diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..c4d5bae8ca5430c591a85a757161b0f4f09b55c9 100644 --- a/.gitattributes +++ b/.gitattributes @@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +refTools/evaluation/meteor/meteor-1.5.jar filter=lfs diff=lfs merge=lfs -text +refTools/evaluation/tokenizer/stanford-corenlp-3.4.1.jar filter=lfs diff=lfs merge=lfs -text diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..451824f8f0fc201563b5f31bc26b15d44eaeea32 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 mshukor + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index ff407fbaf4eff0c6a7678b8bd4e2c1da1b97d8f6..0369cc3358bb750801f147d41f161904afb5b7c6 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,151 @@ ---- -title: EP ALM -emoji: 🐢 -colorFrom: pink -colorTo: gray -sdk: gradio -sdk_version: 3.36.1 -app_file: app.py -pinned: false ---- - -Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference +# eP-ALM: Efficient Perceptual Augmentation of Language Models + +

+ +

+ +Official implementation of the paper: +- [eP-ALM: Efficient Perceptual Augmentation of Language Models](https://arxiv.org/abs/2303.11403) + +In this repo, you will find the pretrained models and code to train and evaluate eP-ALM on Image/Video/Audio-Text tasks. + +## News + +* **[June-2023]** A new version of the paper is released on arXiv: + * We re-evaluate the models with greedy decoding. + * We add comparison with SoTA. + * We add new experiments, including pretraining on CC3M and evaluation in zero-shot (check Appendix). +* **[May-2023]** The code is optimized to train and evaluate with float16 mixed precision, using the accelerate library 🤗. +* **[May-2023]** We found greedy decoding with beam search is significantly better than multinomial/random sampling. +* **[20-March-2023]** The paper is released on arXiv. +* **[March-2023]** The paper is submitted for publication and currently under review. + +## Summary: + +* [Introduction](#introduction) +* [Download](#download) +* [Installation](#installation) +* [Evaluation](#evaluation) +* [Accelerated Training 🤗](#accelerated-training) +* [Training](#training) +* [Citation](#citation) +* [Acknowledgment](#acknowledgment) + + +## Introduction + + + +Large Language Models (LLMs) have so far impressed the world, with unprecedented capabilities that emerge in models at large scales. On the vision side, transformer models (i.e., ViT) are following the same trend, achieving the best performance on challenging benchmarks. With the abundance of such unimodal models, a natural question arises; do we need also to follow this trend to tackle multimodal tasks? In this work, we propose to rather direct effort to efficient adaptations of existing models, and propose to augment Language Models with perception. Existing approaches for adapting pretrained models for vision-language tasks still rely on several key components that hinder their efficiency. In particular, they still train a large number of parameters, rely on large multimodal pretraining, use encoders (e.g., CLIP) trained on huge image-text datasets, and add significant inference overhead. In addition, most of these approaches have focused on Zero-Shot and In Context Learning, with little to no effort on direct finetuning. We investigate the minimal computational effort needed to adapt unimodal models for multimodal tasks and propose a new challenging setup, alongside different approaches, that efficiently adapts unimodal pretrained models. We show that by freezing more than 99\% of total parameters, training only one linear projection layer, and prepending only one trainable token, our approach (dubbed eP-ALM) significantly outperforms other baselines on VQA and Captioning across Image, Video, and Audio modalities, following the proposed setup. + + +

+ +

+ + +### Results + +> Comparison of eP-ALM with text generation-based SoTA that train significant number of parameters, including methods with large-scale pretraining. Best and next best scores are bolded and underlined respectively. FT: Finetuning. ZS: Zero-shot. + +

+ +

+ + + +> Qualitative results of eP-ALM: the model is able to generate accurate answers and coherent descriptions of the image. Ground truth answers are highlighted in green (with multinomial sampling and OPT350M). + +

+ +

+ + + +## Download + +### OPT Model +First you need to download OPT models and tokenizers. You can use the following (for OPT-2.7B) to automatically download them: + +``` +from transformers import AutoTokenizer, OPTModel +tokenizer = AutoTokenizer.from_pretrained("facebook/opt-2.7b") +model = OPTModel.from_pretrained("facebook/opt-2.7b") +``` + +### Pretrained Models +We provide only the adaptation parameters (linear connection and Soft Prompt). You can download the following models: + +For eP-ALM_pt-L (with OPT-6.7B and ViT-L), trained with float16 mixed precision (`accelerate_training`): +* VQA v2: [ePALM_pt_L](https://nuage.isir.upmc.fr/index.php/s/aSrCTKXsKQxAE72) +* COCO Caption: [ePALM_pt_L](https://nuage.isir.upmc.fr/index.php/s/PNrytqFbJWJqdt3) +* GQA: [ePALM_pt_L](https://nuage.isir.upmc.fr/index.php/s/9o7gk5gWY5ZNKLM) + +In the following, we provide smaller models that used to obtain the main results in the paper. Note that these models are trained with float32: +* VQA v2: [ePALM](https://nuage.isir.upmc.fr/index.php/s/SMTJqfL62KC88z5) +* COCO Caption: [ePALM](https://nuage.isir.upmc.fr/index.php/s/y9KZr9CEpe42443) +* GQA: [ePALM](https://nuage.isir.upmc.fr/index.php/s/8rS84b4EH56CPZq) +* MSR-VTT Video Caption: [ePALM](https://nuage.isir.upmc.fr/index.php/s/nCj7mz7NHgeYokP) +* MSRVTT-QA Video QA: [ePALM](https://nuage.isir.upmc.fr/index.php/s/RysMQzH9sSf5b7P) +* MSVD-QA: [ePALM](https://nuage.isir.upmc.fr/index.php/s/LCdLN3xg35jGCP2) +* AudioCaps Audio Captioning: [ePALM](https://nuage.isir.upmc.fr/index.php/s/ZeeZc9zdFSgFTFC) + +### Data +More details on the download and the organization of datasets can be found [here](docs/datasets.md) + +## Installation +Main requirements: +``` +python >= 3.8+ +torch >= 1.12+ +transformers >= 4.24+ +accelerate >= 0.11.0 +``` +More details can be found [here](docs/installation.md). + + +## Evaluation +To evaluate the trained models, you can use the same scripts in `run_scripts/`, and just pass the best checkpoint path to the `--evaluate` arguments. + +To visualize the results and test on your own images, you can use this notebook `ePALM.ipynb`. + +You should use the same script used for training to evaluate the model (e.g., `run_scripts/accelerate_training` for models trained with accelerate). + +Note that you can evaluate the models trained with float32 with `run_scripts/accelerate_training`, but the you might obtain slightly different results (e.g., for caption we obtain 102 CIDEr instead of 97 as reported in the paper). + + +## Accelerated Training 🤗 +We optimized the code code based on the [accelerate](https://github.com/huggingface/accelerate) librairy. Mainly we train with mixed precision and keep the precision of the LM in float16, this significantly reduces the memory consumption (/2) and accelerates (x2) the training. + +For example, after specifying the path to `config` file, `data_dir` and `output_dir`, +to launch a training of eP-ALM_pt-L on VQA v2: + +``` +sh run_scripts/accelerate/image/ePALM_pt_L_vqa_acc.sh +``` + +To resume training, specify the initialization checkpoint to the `--resume` argument. + + +## Training +Previous models are trained with float32 precision. You can launch the training/evaluation of eP-ALM using the different scripts in `run_scripts/float32`. For example you can launch a training on VQA v2 from the following script: + +``` +sh run_scripts/float32/image/ePALM_vqa.sh +``` + +## Citation + +``` +@article{shukor2023ep, + title={eP-ALM: Efficient Perceptual Augmentation of Language Models}, + author={Shukor, Mustafa and Dancette, Corentin and Cord, Matthieu}, + journal={arXiv preprint arXiv:2303.11403}, + year={2023} +} +``` +## Acknowledgment + +Some code was borrowed from [timm](https://github.com/rwightman/pytorch-image-models), [transformers](https://github.com/huggingface/transformers), [TimeSformer](https://github.com/facebookresearch/TimeSformer), and [VL-Adapter](https://github.com/ylsung/VL_adapter). + + diff --git a/TimeSformer/.gitignore b/TimeSformer/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..3ea6368ece23c0bf0673a56a7dd0f92593e231e0 --- /dev/null +++ b/TimeSformer/.gitignore @@ -0,0 +1,143 @@ + + + + +# Docker file from Python is inspired from here : +# https://github.com/github/gitignore/blob/master/Python.gitignore + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +tests/report/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + + +# Cython debug symbols +cython_debug/ diff --git a/TimeSformer/CODE_OF_CONDUCT.md b/TimeSformer/CODE_OF_CONDUCT.md new file mode 100755 index 0000000000000000000000000000000000000000..0f7ad8bfc173eac554f0b6ef7c684861e8014bbe --- /dev/null +++ b/TimeSformer/CODE_OF_CONDUCT.md @@ -0,0 +1,5 @@ +# Code of Conduct + +Facebook has adopted a Code of Conduct that we expect project participants to adhere to. +Please read the [full text](https://code.fb.com/codeofconduct/) +so that you can understand what actions will and will not be tolerated. diff --git a/TimeSformer/CONTRIBUTING.md b/TimeSformer/CONTRIBUTING.md new file mode 100755 index 0000000000000000000000000000000000000000..d8efe19ded9398669b9c85721d92b18442d9f412 --- /dev/null +++ b/TimeSformer/CONTRIBUTING.md @@ -0,0 +1,25 @@ +# Contributing to TimeSformer + +## Pull Requests +We actively welcome your pull requests. + +1. Fork the repo and create your branch from `master`. +2. If you've added code that should be tested, add tests. +3. If you've changed APIs, update the documentation. +4. Ensure the test suite passes. +5. Make sure your code lints. +6. If you haven't already, complete the Contributor License Agreement ("CLA"). + +## Contributor License Agreement ("CLA") +In order to accept your pull request, we need you to submit a CLA. You only need +to do this once to work on any of Facebook's open source projects. + +Complete your CLA here: + +## Issues +We use GitHub issues to track public bugs. Please ensure your description is +clear and has sufficient instructions to be able to reproduce the issue. + +## License +By contributing to TimeSformer, you agree that your contributions will be licensed +under the [LICENSE.md](LICENSE.md) file in the root directory of this source tree. diff --git a/TimeSformer/LICENSE b/TimeSformer/LICENSE new file mode 100755 index 0000000000000000000000000000000000000000..6b28d560c9cbbb829dec5bf3eb23f8ddae46d697 --- /dev/null +++ b/TimeSformer/LICENSE @@ -0,0 +1,399 @@ +Attribution-NonCommercial 4.0 International + +======================================================================= + +Creative Commons Corporation ("Creative Commons") is not a law firm and +does not provide legal services or legal advice. Distribution of +Creative Commons public licenses does not create a lawyer-client or +other relationship. Creative Commons makes its licenses and related +information available on an "as-is" basis. Creative Commons gives no +warranties regarding its licenses, any material licensed under their +terms and conditions, or any related information. Creative Commons +disclaims all liability for damages resulting from their use to the +fullest extent possible. + +Using Creative Commons Public Licenses + +Creative Commons public licenses provide a standard set of terms and +conditions that creators and other rights holders may use to share +original works of authorship and other material subject to copyright +and certain other rights specified in the public license below. The +following considerations are for informational purposes only, are not +exhaustive, and do not form part of our licenses. + + Considerations for licensors: Our public licenses are + intended for use by those authorized to give the public + permission to use material in ways otherwise restricted by + copyright and certain other rights. Our licenses are + irrevocable. Licensors should read and understand the terms + and conditions of the license they choose before applying it. + Licensors should also secure all rights necessary before + applying our licenses so that the public can reuse the + material as expected. Licensors should clearly mark any + material not subject to the license. This includes other CC- + licensed material, or material used under an exception or + limitation to copyright. More considerations for licensors: + wiki.creativecommons.org/Considerations_for_licensors + + Considerations for the public: By using one of our public + licenses, a licensor grants the public permission to use the + licensed material under specified terms and conditions. If + the licensor's permission is not necessary for any reason--for + example, because of any applicable exception or limitation to + copyright--then that use is not regulated by the license. Our + licenses grant only permissions under copyright and certain + other rights that a licensor has authority to grant. Use of + the licensed material may still be restricted for other + reasons, including because others have copyright or other + rights in the material. A licensor may make special requests, + such as asking that all changes be marked or described. + Although not required by our licenses, you are encouraged to + respect those requests where reasonable. More_considerations + for the public: + wiki.creativecommons.org/Considerations_for_licensees + +======================================================================= + +Creative Commons Attribution-NonCommercial 4.0 International Public +License + +By exercising the Licensed Rights (defined below), You accept and agree +to be bound by the terms and conditions of this Creative Commons +Attribution-NonCommercial 4.0 International Public License ("Public +License"). To the extent this Public License may be interpreted as a +contract, You are granted the Licensed Rights in consideration of Your +acceptance of these terms and conditions, and the Licensor grants You +such rights in consideration of benefits the Licensor receives from +making the Licensed Material available under these terms and +conditions. + +Section 1 -- Definitions. + + a. Adapted Material means material subject to Copyright and Similar + Rights that is derived from or based upon the Licensed Material + and in which the Licensed Material is translated, altered, + arranged, transformed, or otherwise modified in a manner requiring + permission under the Copyright and Similar Rights held by the + Licensor. For purposes of this Public License, where the Licensed + Material is a musical work, performance, or sound recording, + Adapted Material is always produced where the Licensed Material is + synched in timed relation with a moving image. + + b. Adapter's License means the license You apply to Your Copyright + and Similar Rights in Your contributions to Adapted Material in + accordance with the terms and conditions of this Public License. + + c. Copyright and Similar Rights means copyright and/or similar rights + closely related to copyright including, without limitation, + performance, broadcast, sound recording, and Sui Generis Database + Rights, without regard to how the rights are labeled or + categorized. For purposes of this Public License, the rights + specified in Section 2(b)(1)-(2) are not Copyright and Similar + Rights. + d. Effective Technological Measures means those measures that, in the + absence of proper authority, may not be circumvented under laws + fulfilling obligations under Article 11 of the WIPO Copyright + Treaty adopted on December 20, 1996, and/or similar international + agreements. + + e. Exceptions and Limitations means fair use, fair dealing, and/or + any other exception or limitation to Copyright and Similar Rights + that applies to Your use of the Licensed Material. + + f. Licensed Material means the artistic or literary work, database, + or other material to which the Licensor applied this Public + License. + + g. Licensed Rights means the rights granted to You subject to the + terms and conditions of this Public License, which are limited to + all Copyright and Similar Rights that apply to Your use of the + Licensed Material and that the Licensor has authority to license. + + h. Licensor means the individual(s) or entity(ies) granting rights + under this Public License. + + i. NonCommercial means not primarily intended for or directed towards + commercial advantage or monetary compensation. For purposes of + this Public License, the exchange of the Licensed Material for + other material subject to Copyright and Similar Rights by digital + file-sharing or similar means is NonCommercial provided there is + no payment of monetary compensation in connection with the + exchange. + + j. Share means to provide material to the public by any means or + process that requires permission under the Licensed Rights, such + as reproduction, public display, public performance, distribution, + dissemination, communication, or importation, and to make material + available to the public including in ways that members of the + public may access the material from a place and at a time + individually chosen by them. + + k. Sui Generis Database Rights means rights other than copyright + resulting from Directive 96/9/EC of the European Parliament and of + the Council of 11 March 1996 on the legal protection of databases, + as amended and/or succeeded, as well as other essentially + equivalent rights anywhere in the world. + + l. You means the individual or entity exercising the Licensed Rights + under this Public License. Your has a corresponding meaning. + +Section 2 -- Scope. + + a. License grant. + + 1. Subject to the terms and conditions of this Public License, + the Licensor hereby grants You a worldwide, royalty-free, + non-sublicensable, non-exclusive, irrevocable license to + exercise the Licensed Rights in the Licensed Material to: + + a. reproduce and Share the Licensed Material, in whole or + in part, for NonCommercial purposes only; and + + b. produce, reproduce, and Share Adapted Material for + NonCommercial purposes only. + + 2. Exceptions and Limitations. For the avoidance of doubt, where + Exceptions and Limitations apply to Your use, this Public + License does not apply, and You do not need to comply with + its terms and conditions. + + 3. Term. The term of this Public License is specified in Section + 6(a). + + 4. Media and formats; technical modifications allowed. The + Licensor authorizes You to exercise the Licensed Rights in + all media and formats whether now known or hereafter created, + and to make technical modifications necessary to do so. The + Licensor waives and/or agrees not to assert any right or + authority to forbid You from making technical modifications + necessary to exercise the Licensed Rights, including + technical modifications necessary to circumvent Effective + Technological Measures. For purposes of this Public License, + simply making modifications authorized by this Section 2(a) + (4) never produces Adapted Material. + + 5. Downstream recipients. + + a. Offer from the Licensor -- Licensed Material. Every + recipient of the Licensed Material automatically + receives an offer from the Licensor to exercise the + Licensed Rights under the terms and conditions of this + Public License. + + b. No downstream restrictions. You may not offer or impose + any additional or different terms or conditions on, or + apply any Effective Technological Measures to, the + Licensed Material if doing so restricts exercise of the + Licensed Rights by any recipient of the Licensed + Material. + + 6. No endorsement. Nothing in this Public License constitutes or + may be construed as permission to assert or imply that You + are, or that Your use of the Licensed Material is, connected + with, or sponsored, endorsed, or granted official status by, + the Licensor or others designated to receive attribution as + provided in Section 3(a)(1)(A)(i). + + b. Other rights. + + 1. Moral rights, such as the right of integrity, are not + licensed under this Public License, nor are publicity, + privacy, and/or other similar personality rights; however, to + the extent possible, the Licensor waives and/or agrees not to + assert any such rights held by the Licensor to the limited + extent necessary to allow You to exercise the Licensed + Rights, but not otherwise. + + 2. Patent and trademark rights are not licensed under this + Public License. + + 3. To the extent possible, the Licensor waives any right to + collect royalties from You for the exercise of the Licensed + Rights, whether directly or through a collecting society + under any voluntary or waivable statutory or compulsory + licensing scheme. In all other cases the Licensor expressly + reserves any right to collect such royalties, including when + the Licensed Material is used other than for NonCommercial + purposes. + +Section 3 -- License Conditions. + +Your exercise of the Licensed Rights is expressly made subject to the +following conditions. + + a. Attribution. + + 1. If You Share the Licensed Material (including in modified + form), You must: + + a. retain the following if it is supplied by the Licensor + with the Licensed Material: + + i. identification of the creator(s) of the Licensed + Material and any others designated to receive + attribution, in any reasonable manner requested by + the Licensor (including by pseudonym if + designated); + + ii. a copyright notice; + + iii. a notice that refers to this Public License; + + iv. a notice that refers to the disclaimer of + warranties; + + v. a URI or hyperlink to the Licensed Material to the + extent reasonably practicable; + + b. indicate if You modified the Licensed Material and + retain an indication of any previous modifications; and + + c. indicate the Licensed Material is licensed under this + Public License, and include the text of, or the URI or + hyperlink to, this Public License. + + 2. You may satisfy the conditions in Section 3(a)(1) in any + reasonable manner based on the medium, means, and context in + which You Share the Licensed Material. For example, it may be + reasonable to satisfy the conditions by providing a URI or + hyperlink to a resource that includes the required + information. + + 3. If requested by the Licensor, You must remove any of the + information required by Section 3(a)(1)(A) to the extent + reasonably practicable. + + 4. If You Share Adapted Material You produce, the Adapter's + License You apply must not prevent recipients of the Adapted + Material from complying with this Public License. + +Section 4 -- Sui Generis Database Rights. + +Where the Licensed Rights include Sui Generis Database Rights that +apply to Your use of the Licensed Material: + + a. for the avoidance of doubt, Section 2(a)(1) grants You the right + to extract, reuse, reproduce, and Share all or a substantial + portion of the contents of the database for NonCommercial purposes + only; + + b. if You include all or a substantial portion of the database + contents in a database in which You have Sui Generis Database + Rights, then the database in which You have Sui Generis Database + Rights (but not its individual contents) is Adapted Material; and + + c. You must comply with the conditions in Section 3(a) if You Share + all or a substantial portion of the contents of the database. + +For the avoidance of doubt, this Section 4 supplements and does not +replace Your obligations under this Public License where the Licensed +Rights include other Copyright and Similar Rights. + +Section 5 -- Disclaimer of Warranties and Limitation of Liability. + + a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE + EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS + AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF + ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, + IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, + WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR + PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, + ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT + KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT + ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. + + b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE + TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, + NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, + INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, + COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR + USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN + ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR + DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR + IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. + + c. The disclaimer of warranties and limitation of liability provided + above shall be interpreted in a manner that, to the extent + possible, most closely approximates an absolute disclaimer and + waiver of all liability. + +Section 6 -- Term and Termination. + + a. This Public License applies for the term of the Copyright and + Similar Rights licensed here. However, if You fail to comply with + this Public License, then Your rights under this Public License + terminate automatically. + + b. Where Your right to use the Licensed Material has terminated under + Section 6(a), it reinstates: + + 1. automatically as of the date the violation is cured, provided + it is cured within 30 days of Your discovery of the + violation; or + + 2. upon express reinstatement by the Licensor. + + For the avoidance of doubt, this Section 6(b) does not affect any + right the Licensor may have to seek remedies for Your violations + of this Public License. + + c. For the avoidance of doubt, the Licensor may also offer the + Licensed Material under separate terms or conditions or stop + distributing the Licensed Material at any time; however, doing so + will not terminate this Public License. + + d. Sections 1, 5, 6, 7, and 8 survive termination of this Public + License. + +Section 7 -- Other Terms and Conditions. + + a. The Licensor shall not be bound by any additional or different + terms or conditions communicated by You unless expressly agreed. + + b. Any arrangements, understandings, or agreements regarding the + Licensed Material not stated herein are separate from and + independent of the terms and conditions of this Public License. + +Section 8 -- Interpretation. + + a. For the avoidance of doubt, this Public License does not, and + shall not be interpreted to, reduce, limit, restrict, or impose + conditions on any use of the Licensed Material that could lawfully + be made without permission under this Public License. + + b. To the extent possible, if any provision of this Public License is + deemed unenforceable, it shall be automatically reformed to the + minimum extent necessary to make it enforceable. If the provision + cannot be reformed, it shall be severed from this Public License + without affecting the enforceability of the remaining terms and + conditions. + + c. No term or condition of this Public License will be waived and no + failure to comply consented to unless expressly agreed to by the + Licensor. + + d. Nothing in this Public License constitutes or may be interpreted + as a limitation upon, or waiver of, any privileges and immunities + that apply to the Licensor or You, including from the legal + processes of any jurisdiction or authority. + +======================================================================= + +Creative Commons is not a party to its public +licenses. Notwithstanding, Creative Commons may elect to apply one of +its public licenses to material it publishes and in those instances +will be considered the “Licensor.” The text of the Creative Commons +public licenses is dedicated to the public domain under the CC0 Public +Domain Dedication. Except for the limited purpose of indicating that +material is shared under a Creative Commons public license or as +otherwise permitted by the Creative Commons policies published at +creativecommons.org/policies, Creative Commons does not authorize the +use of the trademark "Creative Commons" or any other trademark or logo +of Creative Commons without its prior written consent including, +without limitation, in connection with any unauthorized modifications +to any of its public licenses or any other arrangements, +understandings, or agreements concerning use of licensed material. For +the avoidance of doubt, this paragraph does not form part of the +public licenses. + +Creative Commons may be contacted at creativecommons.org. diff --git a/TimeSformer/README.md b/TimeSformer/README.md new file mode 100755 index 0000000000000000000000000000000000000000..578320248488c74e5b22a32f0d7ef7fd2fb2c8b9 --- /dev/null +++ b/TimeSformer/README.md @@ -0,0 +1,248 @@ +# TimeSformer + +This is an official pytorch implementation of our ICML 2021 paper [Is Space-Time Attention All You Need for Video Understanding?](https://arxiv.org/pdf/2102.05095.pdf). In this repository, we provide PyTorch code for training and testing our proposed TimeSformer model. TimeSformer provides an efficient video classification framework that achieves state-of-the-art results on several video action recognition benchmarks such as Kinetics-400. + +If you find TimeSformer useful in your research, please use the following BibTeX entry for citation. + +```BibTeX +@inproceedings{gberta_2021_ICML, + author = {Gedas Bertasius and Heng Wang and Lorenzo Torresani}, + title = {Is Space-Time Attention All You Need for Video Understanding?}, + booktitle = {Proceedings of the International Conference on Machine Learning (ICML)}, + month = {July}, + year = {2021} +} +``` + +# Model Zoo + +We provide TimeSformer models pretrained on Kinetics-400 (K400), Kinetics-600 (K600), Something-Something-V2 (SSv2), and HowTo100M datasets. + +| name | dataset | # of frames | spatial crop | acc@1 | acc@5 | url | +| --- | --- | --- | --- | --- | --- | --- | +| TimeSformer | K400 | 8 | 224 | 77.9 | 93.2 | [model](https://www.dropbox.com/s/g5t24we9gl5yk88/TimeSformer_divST_8x32_224_K400.pyth?dl=0) | +| TimeSformer-HR | K400 | 16 | 448 | 79.6 | 94.0 | [model](https://www.dropbox.com/s/6f0x172lpqy3oxt/TimeSformer_divST_16x16_448_K400.pyth?dl=0) | +| TimeSformer-L | K400 | 96 | 224 | 80.6 | 94.7 | [model](https://www.dropbox.com/s/r1iuxahif3sgimo/TimeSformer_divST_96x4_224_K400.pyth?dl=0) | + +| name | dataset | # of frames | spatial crop | acc@1 | acc@5 | url | +| --- | --- | --- | --- | --- | --- | --- | +| TimeSformer | K600 | 8 | 224 | 79.1 | 94.4 | [model](https://www.dropbox.com/s/4h2qt41m2z3aqrb/TimeSformer_divST_8x32_224_K600.pyth?dl=0) | +| TimeSformer-HR | K600 | 16 | 448 | 81.8 | 95.8 | [model](https://www.dropbox.com/s/ft1e92g2vhvxecv/TimeSformer_divST_16x16_448_K600.pyth?dl=0) | +| TimeSformer-L | K600 | 96 | 224 | 82.2 | 95.6 | [model](https://www.dropbox.com/s/857rx6xeclxfhdg/TimeSformer_divST_96x4_224_K600.pyth?dl=0) | + +| name | dataset | # of frames | spatial crop | acc@1 | acc@5 | url | +| --- | --- | --- | --- | --- | --- | --- | +| TimeSformer | SSv2 | 8 | 224 | 59.1 | 85.6 | [model](https://www.dropbox.com/s/tybhuml57y24wpm/TimeSformer_divST_8_224_SSv2.pyth?dl=0) | +| TimeSformer-HR | SSv2 | 16 | 448 | 61.8 | 86.9 | [model](https://www.dropbox.com/s/9t68uzk8w2fpfnv/TimeSformer_divST_16_448_SSv2.pyth?dl=0) | +| TimeSformer-L | SSv2 | 64 | 224 | 62.0 | 87.5 | [model](https://www.dropbox.com/s/3f1rm2al8mhprwa/TimeSformer_divST_64_224_SSv2.pyth?dl=0) | + +| name | dataset | # of frames | spatial crop | single clip coverage | acc@1 | url | +| --- | --- | --- | --- | --- | --- | --- | +| TimeSformer | HowTo100M | 8 | 224 | 8.5s | 56.8 | [model](https://www.dropbox.com/s/9v8hcm88b9tc6ff/TimeSformer_divST_8x32_224_HowTo100M.pyth?dl=0) | +| TimeSformer | HowTo100M | 32 | 224 | 34.1s | 61.2 | [model](https://www.dropbox.com/s/4roflx4q1gscu85/TimeSformer_divST_32x32_224_HowTo100M.pyth?dl=0) | +| TimeSformer | HowTo100M | 64 | 448 | 68.3s | 62.2 | [model](https://www.dropbox.com/s/15bvqltl1j5vyp3/TimeSformer_divST_64x32_224_HowTo100M.pyth?dl=0) | +| TimeSformer | HowTo100M | 96 | 224 | 102.4s | 62.6 | [model](https://www.dropbox.com/s/t2mzgahnfhgakma/TimeSformer_divST_96x32_224_HowTo100M.pyth?dl=0) | + +We note that these models were re-trained using a slightly different implementation than the one used in the paper. Therefore, there might be a small difference in performance compared to the results reported in the paper. + +You can load the pretrained models as follows: + +```python +import torch +from timesformer.models.vit import TimeSformer + +model = TimeSformer(img_size=224, num_classes=400, num_frames=8, attention_type='divided_space_time', pretrained_model='/path/to/pretrained/model.pyth') + +dummy_video = torch.randn(2, 3, 8, 224, 224) # (batch x channels x frames x height x width) + +pred = model(dummy_video,) # (2, 400) +``` + +# Installation + +First, create a conda virtual environment and activate it: +``` +conda create -n timesformer python=3.7 -y +source activate timesformer +``` + +Then, install the following packages: + +- torchvision: `pip install torchvision` or `conda install torchvision -c pytorch` +- [fvcore](https://github.com/facebookresearch/fvcore/): `pip install 'git+https://github.com/facebookresearch/fvcore'` +- simplejson: `pip install simplejson` +- einops: `pip install einops` +- timm: `pip install timm` +- PyAV: `conda install av -c conda-forge` +- psutil: `pip install psutil` +- scikit-learn: `pip install scikit-learn` +- OpenCV: `pip install opencv-python` +- tensorboard: `pip install tensorboard` + +Lastly, build the TimeSformer codebase by running: +``` +git clone https://github.com/facebookresearch/TimeSformer +cd TimeSformer +python setup.py build develop +``` + +# Usage + +## Dataset Preparation + +Please use the dataset preparation instructions provided in [DATASET.md](timesformer/datasets/DATASET.md). + +## Training the Default TimeSformer + +Training the default TimeSformer that uses divided space-time attention, and operates on 8-frame clips cropped at 224x224 spatial resolution, can be done using the following command: + +``` +python tools/run_net.py \ + --cfg configs/Kinetics/TimeSformer_divST_8x32_224.yaml \ + DATA.PATH_TO_DATA_DIR path_to_your_dataset \ + NUM_GPUS 8 \ + TRAIN.BATCH_SIZE 8 \ +``` +You may need to pass location of your dataset in the command line by adding `DATA.PATH_TO_DATA_DIR path_to_your_dataset`, or you can simply add + +``` +DATA: + PATH_TO_DATA_DIR: path_to_your_dataset +``` + +To the yaml configs file, then you do not need to pass it to the command line every time. + +## Using a Different Number of GPUs + +If you want to use a smaller number of GPUs, you need to modify .yaml configuration files in [`configs/`](configs/). Specifically, you need to modify the NUM_GPUS, TRAIN.BATCH_SIZE, TEST.BATCH_SIZE, DATA_LOADER.NUM_WORKERS entries in each configuration file. The BATCH_SIZE entry should be the same or higher as the NUM_GPUS entry. In [`configs/Kinetics/TimeSformer_divST_8x32_224_4gpus.yaml`](configs/Kinetics/TimeSformer_divST_8x32_224_4gpus.yaml), we provide a sample configuration file for a 4 GPU setup. + + +## Using Different Self-Attention Schemes + +If you want to experiment with different space-time self-attention schemes, e.g., space-only or joint space-time attention, use the following commands: + + +``` +python tools/run_net.py \ + --cfg configs/Kinetics/TimeSformer_spaceOnly_8x32_224.yaml \ + DATA.PATH_TO_DATA_DIR path_to_your_dataset \ + NUM_GPUS 8 \ + TRAIN.BATCH_SIZE 8 \ +``` + +and + +``` +python tools/run_net.py \ + --cfg configs/Kinetics/TimeSformer_jointST_8x32_224.yaml \ + DATA.PATH_TO_DATA_DIR path_to_your_dataset \ + NUM_GPUS 8 \ + TRAIN.BATCH_SIZE 8 \ +``` + +## Training Different TimeSformer Variants + +If you want to train more powerful TimeSformer variants, e.g., TimeSformer-HR (operating on 16-frame clips sampled at 448x448 spatial resolution), and TimeSformer-L (operating on 96-frame clips sampled at 224x224 spatial resolution), use the following commands: + +``` +python tools/run_net.py \ + --cfg configs/Kinetics/TimeSformer_divST_16x16_448.yaml \ + DATA.PATH_TO_DATA_DIR path_to_your_dataset \ + NUM_GPUS 8 \ + TRAIN.BATCH_SIZE 8 \ +``` + +and + +``` +python tools/run_net.py \ + --cfg configs/Kinetics/TimeSformer_divST_96x4_224.yaml \ + DATA.PATH_TO_DATA_DIR path_to_your_dataset \ + NUM_GPUS 8 \ + TRAIN.BATCH_SIZE 8 \ +``` + +Note that for these models you will need a set of GPUs with ~32GB of memory. + +## Inference + +Use `TRAIN.ENABLE` and `TEST.ENABLE` to control whether training or testing is required for a given run. When testing, you also have to provide the path to the checkpoint model via TEST.CHECKPOINT_FILE_PATH. +``` +python tools/run_net.py \ + --cfg configs/Kinetics/TimeSformer_divST_8x32_224_TEST.yaml \ + DATA.PATH_TO_DATA_DIR path_to_your_dataset \ + TEST.CHECKPOINT_FILE_PATH path_to_your_checkpoint \ + TRAIN.ENABLE False \ +``` + +## Single-Node Training via Slurm + +To train TimeSformer via Slurm, please check out our single node Slurm training script [`slurm_scripts/run_single_node_job.sh`](slurm_scripts/run_single_node_job.sh). + + +## Multi-Node Training via Submitit + +Distributed training is available via Slurm and submitit + +``` +pip install submitit +``` + +To train TimeSformer model on Kinetics using 4 nodes with 8 gpus each use the following command: +``` +python tools/submit.py --cfg configs/Kinetics/TimeSformer_divST_8x32_224.yaml --job_dir /your/job/dir/${JOB_NAME}/ --num_shards 4 --name ${JOB_NAME} --use_volta32 +``` + +We provide a script for launching slurm jobs in [`slurm_scripts/run_multi_node_job.sh`](slurm_scripts/run_multi_node_job.sh). + +## Finetuning + +To finetune from an existing PyTorch checkpoint add the following line in the command line, or you can also add it in the YAML config: + +``` +TRAIN.CHECKPOINT_FILE_PATH path_to_your_PyTorch_checkpoint +TRAIN.FINETUNE True +``` + +## HowTo100M Dataset Split + +If you want to experiment with the long-term video modeling task on HowTo100M, please download the train/test split files from [here](https://www.dropbox.com/sh/ttvsxwqypijjuda/AACmJx1CnddW6cVBoc21eSuva?dl=0). + + +# Environment + +The code was developed using python 3.7 on Ubuntu 20.04. For training, we used four GPU compute nodes each node containing 8 Tesla V100 GPUs (32 GPUs in total). Other platforms or GPU cards have not been fully tested. + +# License + +The majority of this work is licensed under [CC-NC 4.0 International license](LICENSE). However portions of the project are available under separate license terms: [SlowFast](https://github.com/facebookresearch/SlowFast) and [pytorch-image-models](https://github.com/rwightman/pytorch-image-models) are licensed under the Apache 2.0 license. + +# Contributing + +We actively welcome your pull requests. Please see [CONTRIBUTING.md](CONTRIBUTING.md) and [CODE_OF_CONDUCT.md](CODE_OF_CONDUCT.md) for more info. + +# Acknowledgements + +TimeSformer is built on top of [PySlowFast](https://github.com/facebookresearch/SlowFast) and [pytorch-image-models](https://github.com/rwightman/pytorch-image-models) by [Ross Wightman](https://github.com/rwightman). We thank the authors for releasing their code. If you use our model, please consider citing these works as well: + +```BibTeX +@misc{fan2020pyslowfast, + author = {Haoqi Fan and Yanghao Li and Bo Xiong and Wan-Yen Lo and + Christoph Feichtenhofer}, + title = {PySlowFast}, + howpublished = {\url{https://github.com/facebookresearch/slowfast}}, + year = {2020} +} +``` + +```BibTeX +@misc{rw2019timm, + author = {Ross Wightman}, + title = {PyTorch Image Models}, + year = {2019}, + publisher = {GitHub}, + journal = {GitHub repository}, + doi = {10.5281/zenodo.4414861}, + howpublished = {\url{https://github.com/rwightman/pytorch-image-models}} +} +``` diff --git a/TimeSformer/configs/Kinetics/SLOWFAST_4x16_R50.yaml b/TimeSformer/configs/Kinetics/SLOWFAST_4x16_R50.yaml new file mode 100755 index 0000000000000000000000000000000000000000..9eeadf54ef11d7cd1d6b03812920f5dcfea9cdb9 --- /dev/null +++ b/TimeSformer/configs/Kinetics/SLOWFAST_4x16_R50.yaml @@ -0,0 +1,63 @@ +TRAIN: + ENABLE: True + DATASET: kinetics + BATCH_SIZE: 64 + EVAL_PERIOD: 10 + CHECKPOINT_PERIOD: 5 + AUTO_RESUME: True +DATA: + PATH_TO_DATA_DIR: /path/to/kinetics/ + NUM_FRAMES: 32 + SAMPLING_RATE: 2 + TRAIN_JITTER_SCALES: [256, 320] + TRAIN_CROP_SIZE: 224 + TEST_CROP_SIZE: 256 + INPUT_CHANNEL_NUM: [3, 3] +SLOWFAST: + ALPHA: 8 + BETA_INV: 8 + FUSION_CONV_CHANNEL_RATIO: 2 + FUSION_KERNEL_SZ: 5 +RESNET: + ZERO_INIT_FINAL_BN: True + WIDTH_PER_GROUP: 64 + NUM_GROUPS: 1 + DEPTH: 50 + TRANS_FUNC: bottleneck_transform + STRIDE_1X1: False + NUM_BLOCK_TEMP_KERNEL: [[3, 3], [4, 4], [6, 6], [3, 3]] + SPATIAL_STRIDES: [[1, 1], [2, 2], [2, 2], [2, 2]] + SPATIAL_DILATIONS: [[1, 1], [1, 1], [1, 1], [1, 1]] +NONLOCAL: + LOCATION: [[[], []], [[], []], [[], []], [[], []]] + GROUP: [[1, 1], [1, 1], [1, 1], [1, 1]] + INSTANTIATION: dot_product +BN: + USE_PRECISE_STATS: True + NUM_BATCHES_PRECISE: 200 +SOLVER: + BASE_LR: 0.8 + LR_POLICY: cosine + MAX_EPOCH: 196 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-4 + WARMUP_EPOCHS: 34.0 + WARMUP_START_LR: 0.01 + OPTIMIZING_METHOD: sgd +MODEL: + NUM_CLASSES: 400 + ARCH: slowfast + MODEL_NAME: SlowFast + LOSS_FUNC: cross_entropy + DROPOUT_RATE: 0.5 +TEST: + ENABLE: True + DATASET: kinetics + BATCH_SIZE: 64 +DATA_LOADER: + NUM_WORKERS: 8 + PIN_MEMORY: True +NUM_GPUS: 8 +NUM_SHARDS: 1 +RNG_SEED: 0 +OUTPUT_DIR: . diff --git a/TimeSformer/configs/Kinetics/SLOWFAST_8x8_R101.yaml b/TimeSformer/configs/Kinetics/SLOWFAST_8x8_R101.yaml new file mode 100755 index 0000000000000000000000000000000000000000..72142be54b0d40d6476788da18a9a916287450e5 --- /dev/null +++ b/TimeSformer/configs/Kinetics/SLOWFAST_8x8_R101.yaml @@ -0,0 +1,63 @@ +TRAIN: + ENABLE: True + DATASET: kinetics + BATCH_SIZE: 64 + EVAL_PERIOD: 10 + CHECKPOINT_PERIOD: 5 + AUTO_RESUME: True +DATA: + PATH_TO_DATA_DIR: /path/to/kinetics/ + NUM_FRAMES: 32 + SAMPLING_RATE: 2 + TRAIN_JITTER_SCALES: [256, 340] + TRAIN_CROP_SIZE: 224 + TEST_CROP_SIZE: 256 + INPUT_CHANNEL_NUM: [3, 3] +SLOWFAST: + ALPHA: 4 + BETA_INV: 8 + FUSION_CONV_CHANNEL_RATIO: 2 + FUSION_KERNEL_SZ: 5 +RESNET: + ZERO_INIT_FINAL_BN: True + WIDTH_PER_GROUP: 64 + NUM_GROUPS: 1 + DEPTH: 101 + TRANS_FUNC: bottleneck_transform + STRIDE_1X1: False + NUM_BLOCK_TEMP_KERNEL: [[3, 3], [4, 4], [6, 6], [3, 3]] + SPATIAL_STRIDES: [[1, 1], [2, 2], [2, 2], [2, 2]] + SPATIAL_DILATIONS: [[1, 1], [1, 1], [1, 1], [1, 1]] +NONLOCAL: + LOCATION: [[[], []], [[], []], [[], []], [[], []]] + GROUP: [[1, 1], [1, 1], [1, 1], [1, 1]] + INSTANTIATION: dot_product +BN: + USE_PRECISE_STATS: True + NUM_BATCHES_PRECISE: 200 +SOLVER: + BASE_LR: 0.8 ## 8 nodes + LR_POLICY: cosine + MAX_EPOCH: 196 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-4 + WARMUP_EPOCHS: 34.0 + WARMUP_START_LR: 0.01 + OPTIMIZING_METHOD: sgd +MODEL: + NUM_CLASSES: 400 + ARCH: slowfast + MODEL_NAME: SlowFast + LOSS_FUNC: cross_entropy + DROPOUT_RATE: 0.5 +TEST: + ENABLE: True + DATASET: kinetics + BATCH_SIZE: 64 +DATA_LOADER: + NUM_WORKERS: 8 + PIN_MEMORY: True +NUM_GPUS: 8 +NUM_SHARDS: 1 +RNG_SEED: 0 +OUTPUT_DIR: . diff --git a/TimeSformer/configs/Kinetics/SLOWFAST_8x8_R50.yaml b/TimeSformer/configs/Kinetics/SLOWFAST_8x8_R50.yaml new file mode 100755 index 0000000000000000000000000000000000000000..09228517b002ae63014faad0cd018914bba128b5 --- /dev/null +++ b/TimeSformer/configs/Kinetics/SLOWFAST_8x8_R50.yaml @@ -0,0 +1,63 @@ +TRAIN: + ENABLE: True + DATASET: kinetics + BATCH_SIZE: 64 + EVAL_PERIOD: 10 + CHECKPOINT_PERIOD: 5 + AUTO_RESUME: True +DATA: + PATH_TO_DATA_DIR: /path/to/kinetics/ + NUM_FRAMES: 32 + SAMPLING_RATE: 2 + TRAIN_JITTER_SCALES: [256, 320] + TRAIN_CROP_SIZE: 224 + TEST_CROP_SIZE: 256 + INPUT_CHANNEL_NUM: [3, 3] +SLOWFAST: + ALPHA: 4 + BETA_INV: 8 + FUSION_CONV_CHANNEL_RATIO: 2 + FUSION_KERNEL_SZ: 7 +RESNET: + ZERO_INIT_FINAL_BN: True + WIDTH_PER_GROUP: 64 + NUM_GROUPS: 1 + DEPTH: 50 + TRANS_FUNC: bottleneck_transform + STRIDE_1X1: False + NUM_BLOCK_TEMP_KERNEL: [[3, 3], [4, 4], [6, 6], [3, 3]] + SPATIAL_STRIDES: [[1, 1], [2, 2], [2, 2], [2, 2]] + SPATIAL_DILATIONS: [[1, 1], [1, 1], [1, 1], [1, 1]] +NONLOCAL: + LOCATION: [[[], []], [[], []], [[], []], [[], []]] + GROUP: [[1, 1], [1, 1], [1, 1], [1, 1]] + INSTANTIATION: dot_product +BN: + USE_PRECISE_STATS: True + NUM_BATCHES_PRECISE: 200 +SOLVER: + BASE_LR: 0.8 + LR_POLICY: cosine + MAX_EPOCH: 196 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-4 + WARMUP_EPOCHS: 34.0 + WARMUP_START_LR: 0.01 + OPTIMIZING_METHOD: sgd +MODEL: + NUM_CLASSES: 400 + ARCH: slowfast + MODEL_NAME: SlowFast + LOSS_FUNC: cross_entropy + DROPOUT_RATE: 0.5 +TEST: + ENABLE: True + DATASET: kinetics + BATCH_SIZE: 64 +DATA_LOADER: + NUM_WORKERS: 8 + PIN_MEMORY: True +NUM_GPUS: 8 +NUM_SHARDS: 1 +RNG_SEED: 0 +OUTPUT_DIR: . diff --git a/TimeSformer/configs/Kinetics/TimeSformer_divST_16x16_448.yaml b/TimeSformer/configs/Kinetics/TimeSformer_divST_16x16_448.yaml new file mode 100755 index 0000000000000000000000000000000000000000..de4b4ffe94bdfff8adf4d06af9f9635298c7050a --- /dev/null +++ b/TimeSformer/configs/Kinetics/TimeSformer_divST_16x16_448.yaml @@ -0,0 +1,45 @@ +TRAIN: + ENABLE: True + DATASET: kinetics + BATCH_SIZE: 8 + EVAL_PERIOD: 5 + CHECKPOINT_PERIOD: 5 + AUTO_RESUME: True +DATA: + PATH_TO_DATA_DIR: /path/to/kinetics/ + NUM_FRAMES: 16 + SAMPLING_RATE: 16 + TRAIN_JITTER_SCALES: [448, 512] + TRAIN_CROP_SIZE: 448 + TEST_CROP_SIZE: 448 + INPUT_CHANNEL_NUM: [3] +TIMESFORMER: + ATTENTION_TYPE: 'divided_space_time' +SOLVER: + BASE_LR: 0.005 + LR_POLICY: steps_with_relative_lrs + STEPS: [0, 11, 14] + LRS: [1, 0.1, 0.01] + MAX_EPOCH: 15 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-4 + OPTIMIZING_METHOD: sgd +MODEL: + MODEL_NAME: vit_base_patch16_224 + NUM_CLASSES: 400 + ARCH: vit + LOSS_FUNC: cross_entropy + DROPOUT_RATE: 0.5 +TEST: + ENABLE: True + DATASET: kinetics + BATCH_SIZE: 8 + NUM_ENSEMBLE_VIEWS: 1 + NUM_SPATIAL_CROPS: 3 +DATA_LOADER: + NUM_WORKERS: 8 + PIN_MEMORY: True +NUM_GPUS: 8 +NUM_SHARDS: 1 +RNG_SEED: 0 +OUTPUT_DIR: . diff --git a/TimeSformer/configs/Kinetics/TimeSformer_divST_8x32_224.yaml b/TimeSformer/configs/Kinetics/TimeSformer_divST_8x32_224.yaml new file mode 100755 index 0000000000000000000000000000000000000000..3c0e65c36d5648abc8ba19d17e0553aef8025b95 --- /dev/null +++ b/TimeSformer/configs/Kinetics/TimeSformer_divST_8x32_224.yaml @@ -0,0 +1,45 @@ +TRAIN: + ENABLE: True + DATASET: kinetics + BATCH_SIZE: 8 + EVAL_PERIOD: 5 + CHECKPOINT_PERIOD: 5 + AUTO_RESUME: True +DATA: + PATH_TO_DATA_DIR: /path/to/kinetics/ + NUM_FRAMES: 8 + SAMPLING_RATE: 32 + TRAIN_JITTER_SCALES: [256, 320] + TRAIN_CROP_SIZE: 224 + TEST_CROP_SIZE: 224 + INPUT_CHANNEL_NUM: [3] +TIMESFORMER: + ATTENTION_TYPE: 'divided_space_time' +SOLVER: + BASE_LR: 0.005 + LR_POLICY: steps_with_relative_lrs + STEPS: [0, 11, 14] + LRS: [1, 0.1, 0.01] + MAX_EPOCH: 15 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-4 + OPTIMIZING_METHOD: sgd +MODEL: + MODEL_NAME: vit_base_patch16_224 + NUM_CLASSES: 400 + ARCH: vit + LOSS_FUNC: cross_entropy + DROPOUT_RATE: 0.5 +TEST: + ENABLE: True + DATASET: kinetics + BATCH_SIZE: 8 + NUM_ENSEMBLE_VIEWS: 1 + NUM_SPATIAL_CROPS: 3 +DATA_LOADER: + NUM_WORKERS: 8 + PIN_MEMORY: True +NUM_GPUS: 8 +NUM_SHARDS: 1 +RNG_SEED: 0 +OUTPUT_DIR: . diff --git a/TimeSformer/configs/Kinetics/TimeSformer_divST_8x32_224_4gpus.yaml b/TimeSformer/configs/Kinetics/TimeSformer_divST_8x32_224_4gpus.yaml new file mode 100755 index 0000000000000000000000000000000000000000..f494b973a31f300cdfa08a66b3761b851a1ec087 --- /dev/null +++ b/TimeSformer/configs/Kinetics/TimeSformer_divST_8x32_224_4gpus.yaml @@ -0,0 +1,45 @@ +TRAIN: + ENABLE: True + DATASET: kinetics + BATCH_SIZE: 4 + EVAL_PERIOD: 5 + CHECKPOINT_PERIOD: 5 + AUTO_RESUME: True +DATA: + PATH_TO_DATA_DIR: /path/to/kinetics/ + NUM_FRAMES: 8 + SAMPLING_RATE: 32 + TRAIN_JITTER_SCALES: [256, 320] + TRAIN_CROP_SIZE: 224 + TEST_CROP_SIZE: 224 + INPUT_CHANNEL_NUM: [3] +TIMESFORMER: + ATTENTION_TYPE: 'divided_space_time' +SOLVER: + BASE_LR: 0.005 + LR_POLICY: steps_with_relative_lrs + STEPS: [0, 11, 14] + LRS: [1, 0.1, 0.01] + MAX_EPOCH: 15 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-4 + OPTIMIZING_METHOD: sgd +MODEL: + MODEL_NAME: vit_base_patch16_224 + NUM_CLASSES: 400 + ARCH: vit + LOSS_FUNC: cross_entropy + DROPOUT_RATE: 0.5 +TEST: + ENABLE: True + DATASET: kinetics + BATCH_SIZE: 4 + NUM_ENSEMBLE_VIEWS: 1 + NUM_SPATIAL_CROPS: 3 +DATA_LOADER: + NUM_WORKERS: 4 + PIN_MEMORY: True +NUM_GPUS: 4 +NUM_SHARDS: 1 +RNG_SEED: 0 +OUTPUT_DIR: . diff --git a/TimeSformer/configs/Kinetics/TimeSformer_divST_8x32_224_TEST.yaml b/TimeSformer/configs/Kinetics/TimeSformer_divST_8x32_224_TEST.yaml new file mode 100755 index 0000000000000000000000000000000000000000..cc8accf5b4037c078808a3cf5e9a37ddc081f32f --- /dev/null +++ b/TimeSformer/configs/Kinetics/TimeSformer_divST_8x32_224_TEST.yaml @@ -0,0 +1,46 @@ +TRAIN: + ENABLE: False + DATASET: kinetics + BATCH_SIZE: 8 + EVAL_PERIOD: 5 + CHECKPOINT_PERIOD: 5 + AUTO_RESUME: True +DATA: + PATH_TO_DATA_DIR: /path/to/kinetics/ + NUM_FRAMES: 8 + SAMPLING_RATE: 32 + TRAIN_JITTER_SCALES: [256, 320] + TRAIN_CROP_SIZE: 224 + TEST_CROP_SIZE: 224 + INPUT_CHANNEL_NUM: [3] +TIMESFORMER: + ATTENTION_TYPE: 'divided_space_time' +SOLVER: + BASE_LR: 0.005 + LR_POLICY: steps_with_relative_lrs + STEPS: [0, 11, 14] + LRS: [1, 0.1, 0.01] + MAX_EPOCH: 15 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-4 + OPTIMIZING_METHOD: sgd +MODEL: + MODEL_NAME: vit_base_patch16_224 + NUM_CLASSES: 400 + ARCH: vit + LOSS_FUNC: cross_entropy + DROPOUT_RATE: 0.5 +TEST: + ENABLE: True + DATASET: kinetics + BATCH_SIZE: 8 + NUM_ENSEMBLE_VIEWS: 1 + NUM_SPATIAL_CROPS: 3 + CHECKPOINT_FILE_PATH: '/checkpoint/gedas/jobs/timesformer/kinetics_400/TimeSformer_divST_8x32_224/checkpoints/checkpoint_epoch_00025.pyth' +DATA_LOADER: + NUM_WORKERS: 8 + PIN_MEMORY: True +NUM_GPUS: 8 +NUM_SHARDS: 1 +RNG_SEED: 0 +OUTPUT_DIR: . diff --git a/TimeSformer/configs/Kinetics/TimeSformer_divST_96x4_224.yaml b/TimeSformer/configs/Kinetics/TimeSformer_divST_96x4_224.yaml new file mode 100755 index 0000000000000000000000000000000000000000..976d5509b49001ae8fef1a6526c0df9c5cdb8a4c --- /dev/null +++ b/TimeSformer/configs/Kinetics/TimeSformer_divST_96x4_224.yaml @@ -0,0 +1,45 @@ +TRAIN: + ENABLE: True + DATASET: kinetics + BATCH_SIZE: 8 + EVAL_PERIOD: 5 + CHECKPOINT_PERIOD: 5 + AUTO_RESUME: True +DATA: + PATH_TO_DATA_DIR: /path/to/kinetics/ + NUM_FRAMES: 96 + SAMPLING_RATE: 4 + TRAIN_JITTER_SCALES: [256, 320] + TRAIN_CROP_SIZE: 224 + TEST_CROP_SIZE: 224 + INPUT_CHANNEL_NUM: [3] +TIMESFORMER: + ATTENTION_TYPE: 'divided_space_time' +SOLVER: + BASE_LR: 0.005 + LR_POLICY: steps_with_relative_lrs + STEPS: [0, 11, 14] + LRS: [1, 0.1, 0.01] + MAX_EPOCH: 15 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-4 + OPTIMIZING_METHOD: sgd +MODEL: + MODEL_NAME: vit_base_patch16_224 + NUM_CLASSES: 400 + ARCH: vit + LOSS_FUNC: cross_entropy + DROPOUT_RATE: 0.5 +TEST: + ENABLE: True + DATASET: kinetics + BATCH_SIZE: 8 + NUM_ENSEMBLE_VIEWS: 1 + NUM_SPATIAL_CROPS: 3 +DATA_LOADER: + NUM_WORKERS: 8 + PIN_MEMORY: True +NUM_GPUS: 8 +NUM_SHARDS: 1 +RNG_SEED: 0 +OUTPUT_DIR: . diff --git a/TimeSformer/configs/Kinetics/TimeSformer_jointST_8x32_224.yaml b/TimeSformer/configs/Kinetics/TimeSformer_jointST_8x32_224.yaml new file mode 100755 index 0000000000000000000000000000000000000000..ea48c27b09d38be163976ff848b61529a2efe1e1 --- /dev/null +++ b/TimeSformer/configs/Kinetics/TimeSformer_jointST_8x32_224.yaml @@ -0,0 +1,45 @@ +TRAIN: + ENABLE: True + DATASET: kinetics + BATCH_SIZE: 8 + EVAL_PERIOD: 5 + CHECKPOINT_PERIOD: 5 + AUTO_RESUME: True +DATA: + PATH_TO_DATA_DIR: /path/to/kinetics/ + NUM_FRAMES: 8 + SAMPLING_RATE: 32 + TRAIN_JITTER_SCALES: [256, 320] + TRAIN_CROP_SIZE: 224 + TEST_CROP_SIZE: 224 + INPUT_CHANNEL_NUM: [3] +TIMESFORMER: + ATTENTION_TYPE: 'joint_space_time' +SOLVER: + BASE_LR: 0.005 + LR_POLICY: steps_with_relative_lrs + STEPS: [0, 11, 14] + LRS: [1, 0.1, 0.01] + MAX_EPOCH: 15 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-4 + OPTIMIZING_METHOD: sgd +MODEL: + MODEL_NAME: vit_base_patch16_224 + NUM_CLASSES: 400 + ARCH: vit + LOSS_FUNC: cross_entropy + DROPOUT_RATE: 0.5 +TEST: + ENABLE: True + DATASET: kinetics + BATCH_SIZE: 8 + NUM_ENSEMBLE_VIEWS: 1 + NUM_SPATIAL_CROPS: 3 +DATA_LOADER: + NUM_WORKERS: 8 + PIN_MEMORY: True +NUM_GPUS: 8 +NUM_SHARDS: 1 +RNG_SEED: 0 +OUTPUT_DIR: . diff --git a/TimeSformer/configs/Kinetics/TimeSformer_spaceOnly_8x32_224.yaml b/TimeSformer/configs/Kinetics/TimeSformer_spaceOnly_8x32_224.yaml new file mode 100755 index 0000000000000000000000000000000000000000..b6c3e7b7b1a0a1da05ed822ddff7ef8f20d6e4fc --- /dev/null +++ b/TimeSformer/configs/Kinetics/TimeSformer_spaceOnly_8x32_224.yaml @@ -0,0 +1,45 @@ +TRAIN: + ENABLE: True + DATASET: kinetics + BATCH_SIZE: 8 + EVAL_PERIOD: 5 + CHECKPOINT_PERIOD: 5 + AUTO_RESUME: True +DATA: + PATH_TO_DATA_DIR: /path/to/kinetics/ + NUM_FRAMES: 8 + SAMPLING_RATE: 32 + TRAIN_JITTER_SCALES: [256, 320] + TRAIN_CROP_SIZE: 224 + TEST_CROP_SIZE: 224 + INPUT_CHANNEL_NUM: [3] +TIMESFORMER: + ATTENTION_TYPE: 'space_only' +SOLVER: + BASE_LR: 0.005 + LR_POLICY: steps_with_relative_lrs + STEPS: [0, 11, 14] + LRS: [1, 0.1, 0.01] + MAX_EPOCH: 15 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-4 + OPTIMIZING_METHOD: sgd +MODEL: + MODEL_NAME: vit_base_patch16_224 + NUM_CLASSES: 400 + ARCH: vit + LOSS_FUNC: cross_entropy + DROPOUT_RATE: 0.5 +TEST: + ENABLE: True + DATASET: kinetics + BATCH_SIZE: 8 + NUM_ENSEMBLE_VIEWS: 1 + NUM_SPATIAL_CROPS: 3 +DATA_LOADER: + NUM_WORKERS: 8 + PIN_MEMORY: True +NUM_GPUS: 8 +NUM_SHARDS: 1 +RNG_SEED: 0 +OUTPUT_DIR: . diff --git a/TimeSformer/configs/SSv2/SLOWFAST_16x8_R50.yaml b/TimeSformer/configs/SSv2/SLOWFAST_16x8_R50.yaml new file mode 100755 index 0000000000000000000000000000000000000000..cb7f859cca83d06c762ce3d5b9a555381cecbfa2 --- /dev/null +++ b/TimeSformer/configs/SSv2/SLOWFAST_16x8_R50.yaml @@ -0,0 +1,83 @@ +TRAIN: + ENABLE: True + DATASET: ssv2 + BATCH_SIZE: 16 + EVAL_PERIOD: 5 + CHECKPOINT_PERIOD: 5 + AUTO_RESUME: True +DATA: + PATH_TO_DATA_DIR: " /path/to/ssv2/annotations/" + PATH_PREFIX: "/path/to/ssv2/frames/" + NUM_FRAMES: 64 + SAMPLING_RATE: 2 + TRAIN_JITTER_SCALES: [256, 320] + TRAIN_CROP_SIZE: 224 + TEST_CROP_SIZE: 256 + INPUT_CHANNEL_NUM: [3, 3] + INV_UNIFORM_SAMPLE: True + RANDOM_FLIP: False + REVERSE_INPUT_CHANNEL: True +SLOWFAST: + ALPHA: 4 + BETA_INV: 8 + FUSION_CONV_CHANNEL_RATIO: 2 + FUSION_KERNEL_SZ: 7 +RESNET: + SPATIAL_STRIDES: [[1, 1], [2, 2], [2, 2], [2, 2]] + SPATIAL_DILATIONS: [[1, 1], [1, 1], [1, 1], [1, 1]] + ZERO_INIT_FINAL_BN: True + WIDTH_PER_GROUP: 64 + NUM_GROUPS: 1 + DEPTH: 50 + TRANS_FUNC: bottleneck_transform + STRIDE_1X1: False + NUM_BLOCK_TEMP_KERNEL: [[3, 3], [4, 4], [6, 6], [3, 3]] +NONLOCAL: + LOCATION: [[[], []], [[], []], [[], []], [[], []]] + GROUP: [[1, 1], [1, 1], [1, 1], [1, 1]] + INSTANTIATION: dot_product +BN: + USE_PRECISE_STATS: True + NUM_BATCHES_PRECISE: 200 + NORM_TYPE: sync_batchnorm + NUM_SYNC_DEVICES: 4 +SOLVER: + BASE_LR: 0.2 #8 nodes + LR_POLICY: cosine + MAX_EPOCH: 200 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-4 + WARMUP_EPOCHS: 34.0 + WARMUP_START_LR: 0.01 + OPTIMIZING_METHOD: sgd + #SOLVER: + # BASE_LR: 0.03 + # LR_POLICY: steps_with_relative_lrs + # LRS: [1, 0.1, 0.01, 0.001, 0.0001, 0.00001] + # STEPS: [0, 14, 18] + # MAX_EPOCH: 22 + # MOMENTUM: 0.9 + # WEIGHT_DECAY: 1e-6 + # WARMUP_EPOCHS: 0.19 + # WARMUP_START_LR: 0.0001 + # OPTIMIZING_METHOD: sgd +MODEL: + NUM_CLASSES: 174 + ARCH: slowfast + LOSS_FUNC: cross_entropy + DROPOUT_RATE: 0.5 +TEST: + ENABLE: True + DATASET: ssv2 + BATCH_SIZE: 16 + NUM_ENSEMBLE_VIEWS: 1 + NUM_SPATIAL_CROPS: 1 +DATA_LOADER: + NUM_WORKERS: 4 + PIN_MEMORY: True +NUM_GPUS: 8 +NUM_SHARDS: 1 +RNG_SEED: 0 +OUTPUT_DIR: . +#LOG_MODEL_INFO: False +LOG_MODEL_INFO: True diff --git a/TimeSformer/configs/SSv2/TimeSformer_divST_16_448.yaml b/TimeSformer/configs/SSv2/TimeSformer_divST_16_448.yaml new file mode 100755 index 0000000000000000000000000000000000000000..721ebb8ecc25639818bfb83310a091df1578cf8f --- /dev/null +++ b/TimeSformer/configs/SSv2/TimeSformer_divST_16_448.yaml @@ -0,0 +1,48 @@ +TRAIN: + ENABLE: True + DATASET: ssv2 + BATCH_SIZE: 8 + EVAL_PERIOD: 5 + CHECKPOINT_PERIOD: 5 + AUTO_RESUME: True +DATA: + PATH_TO_DATA_DIR: " /path/to/ssv2/annotations/" + PATH_PREFIX: "/path/to/ssv2/frames/" + NUM_FRAMES: 16 + TRAIN_JITTER_SCALES: [448, 512] + TRAIN_CROP_SIZE: 448 + TEST_CROP_SIZE: 448 + INPUT_CHANNEL_NUM: [3] + INV_UNIFORM_SAMPLE: True + RANDOM_FLIP: False + REVERSE_INPUT_CHANNEL: True +TIMESFORMER: + ATTENTION_TYPE: 'divided_space_time' +SOLVER: + BASE_LR: 0.005 + LR_POLICY: steps_with_relative_lrs + STEPS: [0, 11, 14] + LRS: [1, 0.1, 0.01] + MAX_EPOCH: 15 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-4 + OPTIMIZING_METHOD: sgd +MODEL: + MODEL_NAME: vit_base_patch16_224 + NUM_CLASSES: 174 + ARCH: vit + LOSS_FUNC: cross_entropy + DROPOUT_RATE: 0.5 +TEST: + ENABLE: True + DATASET: ssv2 + BATCH_SIZE: 8 + NUM_ENSEMBLE_VIEWS: 1 + NUM_SPATIAL_CROPS: 3 +DATA_LOADER: + NUM_WORKERS: 4 + PIN_MEMORY: True +NUM_GPUS: 8 +NUM_SHARDS: 1 +RNG_SEED: 0 +OUTPUT_DIR: . diff --git a/TimeSformer/configs/SSv2/TimeSformer_divST_64_224.yaml b/TimeSformer/configs/SSv2/TimeSformer_divST_64_224.yaml new file mode 100755 index 0000000000000000000000000000000000000000..c212aabb750da540ae1b37112ee3251d7f0b5fee --- /dev/null +++ b/TimeSformer/configs/SSv2/TimeSformer_divST_64_224.yaml @@ -0,0 +1,48 @@ +TRAIN: + ENABLE: True + DATASET: ssv2 + BATCH_SIZE: 8 + EVAL_PERIOD: 5 + CHECKPOINT_PERIOD: 5 + AUTO_RESUME: True +DATA: + PATH_TO_DATA_DIR: " /path/to/ssv2/annotations/" + PATH_PREFIX: "/path/to/ssv2/frames/" + NUM_FRAMES: 64 + TRAIN_JITTER_SCALES: [256, 320] + TRAIN_CROP_SIZE: 224 + TEST_CROP_SIZE: 224 + INPUT_CHANNEL_NUM: [3] + INV_UNIFORM_SAMPLE: True + RANDOM_FLIP: False + REVERSE_INPUT_CHANNEL: True +TIMESFORMER: + ATTENTION_TYPE: 'divided_space_time' +SOLVER: + BASE_LR: 0.005 + LR_POLICY: steps_with_relative_lrs + STEPS: [0, 11, 14] + LRS: [1, 0.1, 0.01] + MAX_EPOCH: 15 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-4 + OPTIMIZING_METHOD: sgd +MODEL: + MODEL_NAME: vit_base_patch16_224 + NUM_CLASSES: 174 + ARCH: vit + LOSS_FUNC: cross_entropy + DROPOUT_RATE: 0.5 +TEST: + ENABLE: True + DATASET: ssv2 + BATCH_SIZE: 8 + NUM_ENSEMBLE_VIEWS: 1 + NUM_SPATIAL_CROPS: 3 +DATA_LOADER: + NUM_WORKERS: 4 + PIN_MEMORY: True +NUM_GPUS: 8 +NUM_SHARDS: 1 +RNG_SEED: 0 +OUTPUT_DIR: . diff --git a/TimeSformer/configs/SSv2/TimeSformer_divST_8_224.yaml b/TimeSformer/configs/SSv2/TimeSformer_divST_8_224.yaml new file mode 100755 index 0000000000000000000000000000000000000000..c6396660b869450a05ea78e104a9b7ca3cfc30a7 --- /dev/null +++ b/TimeSformer/configs/SSv2/TimeSformer_divST_8_224.yaml @@ -0,0 +1,48 @@ +TRAIN: + ENABLE: True + DATASET: ssv2 + BATCH_SIZE: 8 + EVAL_PERIOD: 5 + CHECKPOINT_PERIOD: 5 + AUTO_RESUME: True +DATA: + PATH_TO_DATA_DIR: " /path/to/ssv2/annotations/" + PATH_PREFIX: "/path/to/ssv2/frames/" + NUM_FRAMES: 8 + TRAIN_JITTER_SCALES: [256, 320] + TRAIN_CROP_SIZE: 224 + TEST_CROP_SIZE: 224 + INPUT_CHANNEL_NUM: [3] + INV_UNIFORM_SAMPLE: True + RANDOM_FLIP: False + REVERSE_INPUT_CHANNEL: True +TIMESFORMER: + ATTENTION_TYPE: 'divided_space_time' +SOLVER: + BASE_LR: 0.005 + LR_POLICY: steps_with_relative_lrs + STEPS: [0, 11, 14] + LRS: [1, 0.1, 0.01] + MAX_EPOCH: 15 + MOMENTUM: 0.9 + WEIGHT_DECAY: 1e-4 + OPTIMIZING_METHOD: sgd +MODEL: + MODEL_NAME: vit_base_patch16_224 + NUM_CLASSES: 174 + ARCH: vit + LOSS_FUNC: cross_entropy + DROPOUT_RATE: 0.5 +TEST: + ENABLE: True + DATASET: ssv2 + BATCH_SIZE: 8 + NUM_ENSEMBLE_VIEWS: 1 + NUM_SPATIAL_CROPS: 3 +DATA_LOADER: + NUM_WORKERS: 4 + PIN_MEMORY: True +NUM_GPUS: 8 +NUM_SHARDS: 1 +RNG_SEED: 0 +OUTPUT_DIR: . diff --git a/TimeSformer/environment.yml b/TimeSformer/environment.yml new file mode 100644 index 0000000000000000000000000000000000000000..a444dc63cc826ffe3fb0f556a2e62f87ea6b6d66 --- /dev/null +++ b/TimeSformer/environment.yml @@ -0,0 +1,26 @@ +name: timesformer +channels: +- pytorch +- conda-forge +- defaults +dependencies: +- python>3.7 +- jupyterlab +- pandas>=1.2 +- numpy>1.19 +- pytorch>=1.6 +- torchvision>=0.7 +- scikit-learn>=0.22 +- opencv>=4.2 +- pyyaml>=5.1 +- yacs>=0.1.6 +- einops>=0.3 +- tensorboard +- psutil +- tqdm +- matplotlib +- simplejson +- pip +- pip: + - fvcore + - av \ No newline at end of file diff --git a/TimeSformer/example.ipynb b/TimeSformer/example.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..fd4e5a4b3cfa5aeebc1345d9d55317d821d9919e --- /dev/null +++ b/TimeSformer/example.ipynb @@ -0,0 +1,84 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "08fe0c59", + "metadata": {}, + "outputs": [], + "source": [ + "from pathlib import Path\n", + "\n", + "import torch\n", + "from timesformer.models.vit import TimeSformer" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "10239d32", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model_file = Path.home()/'TimeSformer/models/TimeSformer_divST_8x32_224_K600.pyth'\n", + "model_file.exists()" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "652fb03e", + "metadata": {}, + "outputs": [], + "source": [ + "model = TimeSformer(img_size=224, num_classes=600, num_frames=8, attention_type='divided_space_time', pretrained_model=str(model_file))\n", + "\n", + "dummy_video = torch.randn(2, 3, 8, 224, 224) # (batch x channels x frames x height x width)\n", + "\n", + "pred = model(dummy_video,) # (2, 600)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "83de13c5-791c-4db7-aba4-6d29ce88584e", + "metadata": {}, + "outputs": [], + "source": [ + "assert pred.shape == (2,600)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.4" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/TimeSformer/setup.cfg b/TimeSformer/setup.cfg new file mode 100755 index 0000000000000000000000000000000000000000..c6589702511d1c61b26817b563b9f9451cfa8923 --- /dev/null +++ b/TimeSformer/setup.cfg @@ -0,0 +1,23 @@ +[isort] +line_length=100 +multi_line_output=4 +known_standard_library=numpy,setuptools +known_myself=timesformer +known_third_party=fvcore,av,torch,pycocotools,yacs,termcolor,scipy,simplejson,matplotlib,torchvision,yaml,tqdm,psutil,opencv-python,pandas,tensorboard,moviepy,sklearn,cv2 +no_lines_before=STDLIB,THIRDPARTY +sections=FUTURE,STDLIB,THIRDPARTY,myself,FIRSTPARTY,LOCALFOLDER +default_section=FIRSTPARTY + +[mypy] +python_version=3.6 +ignore_missing_imports = True +warn_unused_configs = True +disallow_untyped_defs = True +check_untyped_defs = True +warn_unused_ignores = True +warn_redundant_casts = True +show_column_numbers = True +follow_imports = silent +allow_redefinition = True +; Require all functions to be annotated +disallow_incomplete_defs = True diff --git a/TimeSformer/setup.py b/TimeSformer/setup.py new file mode 100755 index 0000000000000000000000000000000000000000..4086cba4c11c3caefe1d2a73c50e9e710d561be6 --- /dev/null +++ b/TimeSformer/setup.py @@ -0,0 +1,23 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +from setuptools import find_packages, setup + +setup( + name="timesformer", + version="1.0", + author="FBAI", + url="unknown", + description="TimeSformer", + keywords = [ + 'artificial intelligence', + 'attention mechanism', + 'transformers', + 'video classification', + ], + install_requires=[ + 'einops>=0.3', + 'torch>=1.6' + ], + extras_require={"tensorboard_video_visualization": ["moviepy"]}, + packages=find_packages(exclude=("configs", "tests")), +) diff --git a/TimeSformer/slurm_scripts/run_multi_node_job.sh b/TimeSformer/slurm_scripts/run_multi_node_job.sh new file mode 100755 index 0000000000000000000000000000000000000000..484fe5c6890b2a7b598f28cdc0eb93f3904bf4d8 --- /dev/null +++ b/TimeSformer/slurm_scripts/run_multi_node_job.sh @@ -0,0 +1,25 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +# A script with a list of commands for submitting SLURM jobs + +#### Kinetics training +JOB_NAME=TimeSformer_divST_8x32_224 +python tools/submit.py --cfg configs/Kinetics/TimeSformer_divST_8x32_224.yaml --job_dir /your/job/dir/${JOB_NAME}/ --num_shards 4 --partition dev --comment "" --name ${JOB_NAME} --use_volta32 + +#JOB_NAME=TimeSformer_jointST_8x32_224 +#python tools/submit.py --cfg configs/Kinetics/TimeSformer_jointST_8x32_224.yaml --job_dir /your/job/dir/${JOB_NAME}/ --num_shards 4 --partition learnfair --comment "" --name ${JOB_NAME} --use_volta32 + +#JOB_NAME=TimeSformer_spaceOnly_8x32_224 +#python tools/submit.py --cfg configs/Kinetics/TimeSformer_spaceOnly_8x32_224.yaml --job_dir /your/job/dir/${JOB_NAME}/ --num_shards 4 --partition learnfair --comment "" --name ${JOB_NAME} --use_volta32 + +#### Kinetics inference +#JOB_NAME=TimeSformer_divST_8x32_224_TEST_3clips +#python tools/submit.py --cfg configs/Kinetics/TimeSformer_divST_8x32_224_TEST.yaml --job_dir /your/job/dir/${JOB_NAME}/ --num_shards 4 --partition dev --comment "" --name ${JOB_NAME} --use_volta32 + + +##### SSv2 training +#JOB_NAME=TimeSformer_divST_8_224 +#python tools/submit.py --cfg configs/SSv2/TimeSformer_divST_8_224.yaml --job_dir /your/job/dir/${JOB_NAME}/ --num_shards 4 --partition learnfair --comment "" --name ${JOB_NAME} --use_volta32 + +##### Sth-Sth_v2 inference +#JOB_NAME=TimeSformer_divST_8_224_TEST_3clips +#python tools/submit.py --cfg configs/SSv2/TimeSformer_divST_8_224_TEST.yaml --job_dir /your/job/dir/${JOB_NAME}/ --num_shards 4 --partition learnfair --comment "" --name ${JOB_NAME} --use_volta32 diff --git a/TimeSformer/slurm_scripts/run_single_node_job.sh b/TimeSformer/slurm_scripts/run_single_node_job.sh new file mode 100755 index 0000000000000000000000000000000000000000..40f5dc6294682a1dc59e2098912e97d03ff34af2 --- /dev/null +++ b/TimeSformer/slurm_scripts/run_single_node_job.sh @@ -0,0 +1,35 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +# A script with a list of commands for submitting SLURM jobs + +#SBATCH --job-name=timesformer +#SBATCH --mail-type=END,FAIL,REQUEUE +#SBATCH --mail-user=name@domain.com + +## %j is the job id, %u is the user id +#SBATCH --output=/path/to/output/logs/slog-%A-%a.out + +## filename for job standard error output (stderr) +#SBATCH --error=/path/to/error/logs/slog-%A-%a.err + +#SBATCH --array=1 +#SBATCH --partition=partition_of_your_choice +#SBATCH --nodes=1 -C volta32gb +#SBATCH --ntasks-per-node=1 +#SBATCH --gpus-per-node=8 +#SBATCH --cpus-per-task=80 +#SBATCH --mem=480GB +#SBATCH --signal=USR1@600 +#SBATCH --time=72:00:00 +#SBATCH --open-mode=append + +module purge +module load cuda/10.0 +module load NCCL/2.4.7-1-cuda.10.0 +module load cudnn/v7.4-cuda.10.0 +source activate timesformer + +WORKINGDIR=/path/to/TimeSformer +CURPYTHON=/path/to/python + +srun --label ${CURPYTHON} ${WORKINGDIR}/tools/run_net.py --cfg ${WORKINGDIR}/configs/Kinetics/TimeSformer_divST_8x32_224.yaml NUM_GPUS 8 TRAIN.BATCH_SIZE 8 + diff --git a/TimeSformer/timesformer/__init__.py b/TimeSformer/timesformer/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..6ba3c6083a2735979f6cf8a43e0118758775ed6a --- /dev/null +++ b/TimeSformer/timesformer/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +from timesformer.utils.env import setup_environment + +setup_environment() diff --git a/TimeSformer/timesformer/config/__init__.py b/TimeSformer/timesformer/config/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..5c7f19c6c00a4ac3f2f2bc66f892e44bcbd72612 --- /dev/null +++ b/TimeSformer/timesformer/config/__init__.py @@ -0,0 +1 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. diff --git a/TimeSformer/timesformer/config/defaults.py b/TimeSformer/timesformer/config/defaults.py new file mode 100755 index 0000000000000000000000000000000000000000..4340c07fc7747ce22f30e98911b7de54b43d606c --- /dev/null +++ b/TimeSformer/timesformer/config/defaults.py @@ -0,0 +1,820 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Configs.""" +from fvcore.common.config import CfgNode +# ----------------------------------------------------------------------------- +# Config definition +# ----------------------------------------------------------------------------- +_C = CfgNode() + +# ---------------------------------------------------------------------------- # +# Batch norm options +# ---------------------------------------------------------------------------- # +_C.BN = CfgNode() + +# Precise BN stats. +_C.BN.USE_PRECISE_STATS = False + +# Number of samples use to compute precise bn. +_C.BN.NUM_BATCHES_PRECISE = 200 + +# Weight decay value that applies on BN. +_C.BN.WEIGHT_DECAY = 0.0 + +# Norm type, options include `batchnorm`, `sub_batchnorm`, `sync_batchnorm` +_C.BN.NORM_TYPE = "batchnorm" + +# Parameter for SubBatchNorm, where it splits the batch dimension into +# NUM_SPLITS splits, and run BN on each of them separately independently. +_C.BN.NUM_SPLITS = 1 + +# Parameter for NaiveSyncBatchNorm3d, where the stats across `NUM_SYNC_DEVICES` +# devices will be synchronized. +_C.BN.NUM_SYNC_DEVICES = 1 + + +# ---------------------------------------------------------------------------- # +# Training options. +# ---------------------------------------------------------------------------- # +_C.TRAIN = CfgNode() + +# If True Train the model, else skip training. +_C.TRAIN.ENABLE = True + +# Dataset. +_C.TRAIN.DATASET = "kinetics" + +## +_C.TRAIN.FINETUNE = False + +# Total mini-batch size. +_C.TRAIN.BATCH_SIZE = 64 + +# Evaluate model on test data every eval period epochs. +_C.TRAIN.EVAL_PERIOD = 10 + +# Save model checkpoint every checkpoint period epochs. +_C.TRAIN.CHECKPOINT_PERIOD = 10 + +# Resume training from the latest checkpoint in the output directory. +_C.TRAIN.AUTO_RESUME = True + +# Path to the checkpoint to load the initial weight. +_C.TRAIN.CHECKPOINT_FILE_PATH = "" + +# Checkpoint types include `caffe2` or `pytorch`. +_C.TRAIN.CHECKPOINT_TYPE = "pytorch" + +# If True, perform inflation when loading checkpoint. +_C.TRAIN.CHECKPOINT_INFLATE = False + +# If True, reset epochs when loading checkpoint. +_C.TRAIN.CHECKPOINT_EPOCH_RESET = False + +# If set, clear all layer names according to the pattern provided. +_C.TRAIN.CHECKPOINT_CLEAR_NAME_PATTERN = () # ("backbone.",) + +# ---------------------------------------------------------------------------- # +# Testing options +# ---------------------------------------------------------------------------- # +_C.TEST = CfgNode() + +# If True test the model, else skip the testing. +_C.TEST.ENABLE = True + +# Dataset for testing. +_C.TEST.DATASET = "kinetics" + +# Total mini-batch size +_C.TEST.BATCH_SIZE = 8 + +# Path to the checkpoint to load the initial weight. +_C.TEST.CHECKPOINT_FILE_PATH = "" + +# Number of clips to sample from a video uniformly for aggregating the +# prediction results. +_C.TEST.NUM_ENSEMBLE_VIEWS = 10 + +# Number of crops to sample from a frame spatially for aggregating the +# prediction results. +_C.TEST.NUM_SPATIAL_CROPS = 3 + +# Checkpoint types include `caffe2` or `pytorch`. +_C.TEST.CHECKPOINT_TYPE = "pytorch" +# Path to saving prediction results file. +_C.TEST.SAVE_RESULTS_PATH = "" +# ----------------------------------------------------------------------------- +# ResNet options +# ----------------------------------------------------------------------------- +_C.RESNET = CfgNode() + +# Transformation function. +_C.RESNET.TRANS_FUNC = "bottleneck_transform" + +# Number of groups. 1 for ResNet, and larger than 1 for ResNeXt). +_C.RESNET.NUM_GROUPS = 1 + +# Width of each group (64 -> ResNet; 4 -> ResNeXt). +_C.RESNET.WIDTH_PER_GROUP = 64 + +# Apply relu in a inplace manner. +_C.RESNET.INPLACE_RELU = True + +# Apply stride to 1x1 conv. +_C.RESNET.STRIDE_1X1 = False + +# If true, initialize the gamma of the final BN of each block to zero. +_C.RESNET.ZERO_INIT_FINAL_BN = False + +# Number of weight layers. +_C.RESNET.DEPTH = 50 + +# If the current block has more than NUM_BLOCK_TEMP_KERNEL blocks, use temporal +# kernel of 1 for the rest of the blocks. +_C.RESNET.NUM_BLOCK_TEMP_KERNEL = [[3], [4], [6], [3]] + +# Size of stride on different res stages. +_C.RESNET.SPATIAL_STRIDES = [[1], [2], [2], [2]] + +# Size of dilation on different res stages. +_C.RESNET.SPATIAL_DILATIONS = [[1], [1], [1], [1]] + +# ---------------------------------------------------------------------------- # +# X3D options +# See https://arxiv.org/abs/2004.04730 for details about X3D Networks. +# ---------------------------------------------------------------------------- # +_C.X3D = CfgNode() + +# Width expansion factor. +_C.X3D.WIDTH_FACTOR = 1.0 + +# Depth expansion factor. +_C.X3D.DEPTH_FACTOR = 1.0 + +# Bottleneck expansion factor for the 3x3x3 conv. +_C.X3D.BOTTLENECK_FACTOR = 1.0 # + +# Dimensions of the last linear layer before classificaiton. +_C.X3D.DIM_C5 = 2048 + +# Dimensions of the first 3x3 conv layer. +_C.X3D.DIM_C1 = 12 + +# Whether to scale the width of Res2, default is false. +_C.X3D.SCALE_RES2 = False + +# Whether to use a BatchNorm (BN) layer before the classifier, default is false. +_C.X3D.BN_LIN5 = False + +# Whether to use channelwise (=depthwise) convolution in the center (3x3x3) +# convolution operation of the residual blocks. +_C.X3D.CHANNELWISE_3x3x3 = True + +# ----------------------------------------------------------------------------- +# Nonlocal options +# ----------------------------------------------------------------------------- +_C.NONLOCAL = CfgNode() + +# Index of each stage and block to add nonlocal layers. +_C.NONLOCAL.LOCATION = [[[]], [[]], [[]], [[]]] + +# Number of group for nonlocal for each stage. +_C.NONLOCAL.GROUP = [[1], [1], [1], [1]] + +# Instatiation to use for non-local layer. +_C.NONLOCAL.INSTANTIATION = "dot_product" + + +# Size of pooling layers used in Non-Local. +_C.NONLOCAL.POOL = [ + # Res2 + [[1, 2, 2], [1, 2, 2]], + # Res3 + [[1, 2, 2], [1, 2, 2]], + # Res4 + [[1, 2, 2], [1, 2, 2]], + # Res5 + [[1, 2, 2], [1, 2, 2]], +] + +# ----------------------------------------------------------------------------- +# Model options +# ----------------------------------------------------------------------------- +_C.MODEL = CfgNode() + +# Model architecture. +_C.MODEL.ARCH = "slowfast" + +# Model name +_C.MODEL.MODEL_NAME = "SlowFast" + +# The number of classes to predict for the model. +_C.MODEL.NUM_CLASSES = 400 + +# Loss function. +_C.MODEL.LOSS_FUNC = "cross_entropy" + +# Model architectures that has one single pathway. +_C.MODEL.SINGLE_PATHWAY_ARCH = ["c2d", "i3d", "slow", "x3d"] + +# Model architectures that has multiple pathways. +_C.MODEL.MULTI_PATHWAY_ARCH = ["slowfast"] + +# Dropout rate before final projection in the backbone. +_C.MODEL.DROPOUT_RATE = 0.5 + +# Randomly drop rate for Res-blocks, linearly increase from res2 to res5 +_C.MODEL.DROPCONNECT_RATE = 0.0 + +# The std to initialize the fc layer(s). +_C.MODEL.FC_INIT_STD = 0.01 + +# Activation layer for the output head. +_C.MODEL.HEAD_ACT = "softmax" + + +# ----------------------------------------------------------------------------- +# SlowFast options +# ----------------------------------------------------------------------------- +_C.SLOWFAST = CfgNode() + +# Corresponds to the inverse of the channel reduction ratio, $\beta$ between +# the Slow and Fast pathways. +_C.SLOWFAST.BETA_INV = 8 + +# Corresponds to the frame rate reduction ratio, $\alpha$ between the Slow and +# Fast pathways. +_C.SLOWFAST.ALPHA = 8 + +# Ratio of channel dimensions between the Slow and Fast pathways. +_C.SLOWFAST.FUSION_CONV_CHANNEL_RATIO = 2 + +# Kernel dimension used for fusing information from Fast pathway to Slow +# pathway. +_C.SLOWFAST.FUSION_KERNEL_SZ = 5 + +####### TimeSformer Options +_C.TIMESFORMER = CfgNode() +_C.TIMESFORMER.ATTENTION_TYPE = 'divided_space_time' +_C.TIMESFORMER.PRETRAINED_MODEL = '' + +## MixUp parameters +_C.MIXUP = CfgNode() +_C.MIXUP.ENABLED = False +_C.MIXUP.ALPHA = 0.8 +_C.MIXUP.CUTMIX_ALPHA = 1.0 +_C.MIXUP.CUTMIX_MINMAX = None +_C.MIXUP.PROB = 1.0 +_C.MIXUP.SWITCH_PROB = 0.5 +_C.MIXUP.MODE = 'batch' + +_C.EMA = CfgNode() +_C.EMA.ENABLED = False + +# ----------------------------------------------------------------------------- +# Data options +# ----------------------------------------------------------------------------- +_C.DATA = CfgNode() + +# The path to the data directory. +_C.DATA.PATH_TO_DATA_DIR = "" + +# The separator used between path and label. +_C.DATA.PATH_LABEL_SEPARATOR = " " + +# Video path prefix if any. +_C.DATA.PATH_PREFIX = "" + +# The spatial crop size of the input clip. +_C.DATA.CROP_SIZE = 224 + +# The number of frames of the input clip. +_C.DATA.NUM_FRAMES = 8 + +# The video sampling rate of the input clip. +_C.DATA.SAMPLING_RATE = 8 + +# The mean value of the video raw pixels across the R G B channels. +_C.DATA.MEAN = [0.45, 0.45, 0.45] +# List of input frame channel dimensions. + +_C.DATA.INPUT_CHANNEL_NUM = [3, 3] + +# The std value of the video raw pixels across the R G B channels. +_C.DATA.STD = [0.225, 0.225, 0.225] + +# The spatial augmentation jitter scales for training. +_C.DATA.TRAIN_JITTER_SCALES = [256, 320] + +# The spatial crop size for training. +_C.DATA.TRAIN_CROP_SIZE = 224 + +# The spatial crop size for testing. +_C.DATA.TEST_CROP_SIZE = 256 + +# Input videos may has different fps, convert it to the target video fps before +# frame sampling. +_C.DATA.TARGET_FPS = 30 + +# Decoding backend, options include `pyav` or `torchvision` +_C.DATA.DECODING_BACKEND = "pyav" + +# if True, sample uniformly in [1 / max_scale, 1 / min_scale] and take a +# reciprocal to get the scale. If False, take a uniform sample from +# [min_scale, max_scale]. +_C.DATA.INV_UNIFORM_SAMPLE = False + +# If True, perform random horizontal flip on the video frames during training. +_C.DATA.RANDOM_FLIP = True + +# If True, calculdate the map as metric. +_C.DATA.MULTI_LABEL = False + +# Method to perform the ensemble, options include "sum" and "max". +_C.DATA.ENSEMBLE_METHOD = "sum" + +# If True, revert the default input channel (RBG <-> BGR). +_C.DATA.REVERSE_INPUT_CHANNEL = False + +############ +_C.DATA.TEMPORAL_EXTENT = 8 +_C.DATA.DEIT_TRANSFORMS = False +_C.DATA.COLOR_JITTER = 0. +_C.DATA.AUTO_AUGMENT = '' +_C.DATA.RE_PROB = 0.0 + +# ---------------------------------------------------------------------------- # +# Optimizer options +# ---------------------------------------------------------------------------- # +_C.SOLVER = CfgNode() + +# Base learning rate. +_C.SOLVER.BASE_LR = 0.1 + +# Learning rate policy (see utils/lr_policy.py for options and examples). +_C.SOLVER.LR_POLICY = "cosine" + +# Final learning rates for 'cosine' policy. +_C.SOLVER.COSINE_END_LR = 0.0 + +# Exponential decay factor. +_C.SOLVER.GAMMA = 0.1 + +# Step size for 'exp' and 'cos' policies (in epochs). +_C.SOLVER.STEP_SIZE = 1 + +# Steps for 'steps_' policies (in epochs). +_C.SOLVER.STEPS = [] + +# Learning rates for 'steps_' policies. +_C.SOLVER.LRS = [] + +# Maximal number of epochs. +_C.SOLVER.MAX_EPOCH = 300 + +# Momentum. +_C.SOLVER.MOMENTUM = 0.9 + +# Momentum dampening. +_C.SOLVER.DAMPENING = 0.0 + +# Nesterov momentum. +_C.SOLVER.NESTEROV = True + +# L2 regularization. +_C.SOLVER.WEIGHT_DECAY = 1e-4 + +# Start the warm up from SOLVER.BASE_LR * SOLVER.WARMUP_FACTOR. +_C.SOLVER.WARMUP_FACTOR = 0.1 + +# Gradually warm up the SOLVER.BASE_LR over this number of epochs. +_C.SOLVER.WARMUP_EPOCHS = 0.0 + +# The start learning rate of the warm up. +_C.SOLVER.WARMUP_START_LR = 0.01 + +# Optimization method. +_C.SOLVER.OPTIMIZING_METHOD = "sgd" + +# Base learning rate is linearly scaled with NUM_SHARDS. +_C.SOLVER.BASE_LR_SCALE_NUM_SHARDS = False + +# ---------------------------------------------------------------------------- # +# Misc options +# ---------------------------------------------------------------------------- # + +# Number of GPUs to use (applies to both training and testing). +_C.NUM_GPUS = 1 + +# Number of machine to use for the job. +_C.NUM_SHARDS = 1 + +# The index of the current machine. +_C.SHARD_ID = 0 + +# Output basedir. +_C.OUTPUT_DIR = "./tmp" + +# Note that non-determinism may still be present due to non-deterministic +# operator implementations in GPU operator libraries. +_C.RNG_SEED = 1 + +# Log period in iters. +_C.LOG_PERIOD = 10 + +# If True, log the model info. +_C.LOG_MODEL_INFO = False + +# Distributed backend. +_C.DIST_BACKEND = "nccl" + +# Global batch size +_C.GLOBAL_BATCH_SIZE = 64 + +# ---------------------------------------------------------------------------- # +# Benchmark options +# ---------------------------------------------------------------------------- # +_C.BENCHMARK = CfgNode() + +# Number of epochs for data loading benchmark. +_C.BENCHMARK.NUM_EPOCHS = 5 + +# Log period in iters for data loading benchmark. +_C.BENCHMARK.LOG_PERIOD = 100 + +# If True, shuffle dataloader for epoch during benchmark. +_C.BENCHMARK.SHUFFLE = True + + +# ---------------------------------------------------------------------------- # +# Common train/test data loader options +# ---------------------------------------------------------------------------- # +_C.DATA_LOADER = CfgNode() + +# Number of data loader workers per training process. +_C.DATA_LOADER.NUM_WORKERS = 8 + +# Load data to pinned host memory. +_C.DATA_LOADER.PIN_MEMORY = True + +# Enable multi thread decoding. +_C.DATA_LOADER.ENABLE_MULTI_THREAD_DECODE = False + + +# ---------------------------------------------------------------------------- # +# Detection options. +# ---------------------------------------------------------------------------- # +_C.DETECTION = CfgNode() + +# Whether enable video detection. +_C.DETECTION.ENABLE = False + +# Aligned version of RoI. More details can be found at slowfast/models/head_helper.py +_C.DETECTION.ALIGNED = True + +# Spatial scale factor. +_C.DETECTION.SPATIAL_SCALE_FACTOR = 16 + +# RoI tranformation resolution. +_C.DETECTION.ROI_XFORM_RESOLUTION = 7 + + +# ----------------------------------------------------------------------------- +# AVA Dataset options +# ----------------------------------------------------------------------------- +_C.AVA = CfgNode() + +# Directory path of frames. +_C.AVA.FRAME_DIR = "/mnt/fair-flash3-east/ava_trainval_frames.img/" + +# Directory path for files of frame lists. +_C.AVA.FRAME_LIST_DIR = ( + "/mnt/vol/gfsai-flash3-east/ai-group/users/haoqifan/ava/frame_list/" +) + +# Directory path for annotation files. +_C.AVA.ANNOTATION_DIR = ( + "/mnt/vol/gfsai-flash3-east/ai-group/users/haoqifan/ava/frame_list/" +) + +# Filenames of training samples list files. +_C.AVA.TRAIN_LISTS = ["train.csv"] + +# Filenames of test samples list files. +_C.AVA.TEST_LISTS = ["val.csv"] + +# Filenames of box list files for training. Note that we assume files which +# contains predicted boxes will have a suffix "predicted_boxes" in the +# filename. +_C.AVA.TRAIN_GT_BOX_LISTS = ["ava_train_v2.2.csv"] +_C.AVA.TRAIN_PREDICT_BOX_LISTS = [] + +# Filenames of box list files for test. +_C.AVA.TEST_PREDICT_BOX_LISTS = ["ava_val_predicted_boxes.csv"] + +# This option controls the score threshold for the predicted boxes to use. +_C.AVA.DETECTION_SCORE_THRESH = 0.9 + +# If use BGR as the format of input frames. +_C.AVA.BGR = False + +# Training augmentation parameters +# Whether to use color augmentation method. +_C.AVA.TRAIN_USE_COLOR_AUGMENTATION = False + +# Whether to only use PCA jitter augmentation when using color augmentation +# method (otherwise combine with color jitter method). +_C.AVA.TRAIN_PCA_JITTER_ONLY = True + +# Eigenvalues for PCA jittering. Note PCA is RGB based. +_C.AVA.TRAIN_PCA_EIGVAL = [0.225, 0.224, 0.229] + +# Eigenvectors for PCA jittering. +_C.AVA.TRAIN_PCA_EIGVEC = [ + [-0.5675, 0.7192, 0.4009], + [-0.5808, -0.0045, -0.8140], + [-0.5836, -0.6948, 0.4203], +] + +# Whether to do horizontal flipping during test. +_C.AVA.TEST_FORCE_FLIP = False + +# Whether to use full test set for validation split. +_C.AVA.FULL_TEST_ON_VAL = False + +# The name of the file to the ava label map. +_C.AVA.LABEL_MAP_FILE = "ava_action_list_v2.2_for_activitynet_2019.pbtxt" + +# The name of the file to the ava exclusion. +_C.AVA.EXCLUSION_FILE = "ava_val_excluded_timestamps_v2.2.csv" + +# The name of the file to the ava groundtruth. +_C.AVA.GROUNDTRUTH_FILE = "ava_val_v2.2.csv" + +# Backend to process image, includes `pytorch` and `cv2`. +_C.AVA.IMG_PROC_BACKEND = "cv2" + +# ---------------------------------------------------------------------------- # +# Multigrid training options +# See https://arxiv.org/abs/1912.00998 for details about multigrid training. +# ---------------------------------------------------------------------------- # +_C.MULTIGRID = CfgNode() + +# Multigrid training allows us to train for more epochs with fewer iterations. +# This hyperparameter specifies how many times more epochs to train. +# The default setting in paper trains for 1.5x more epochs than baseline. +_C.MULTIGRID.EPOCH_FACTOR = 1.5 + +# Enable short cycles. +_C.MULTIGRID.SHORT_CYCLE = False +# Short cycle additional spatial dimensions relative to the default crop size. +_C.MULTIGRID.SHORT_CYCLE_FACTORS = [0.5, 0.5 ** 0.5] + +_C.MULTIGRID.LONG_CYCLE = False +# (Temporal, Spatial) dimensions relative to the default shape. +_C.MULTIGRID.LONG_CYCLE_FACTORS = [ + (0.25, 0.5 ** 0.5), + (0.5, 0.5 ** 0.5), + (0.5, 1), + (1, 1), +] + +# While a standard BN computes stats across all examples in a GPU, +# for multigrid training we fix the number of clips to compute BN stats on. +# See https://arxiv.org/abs/1912.00998 for details. +_C.MULTIGRID.BN_BASE_SIZE = 8 + +# Multigrid training epochs are not proportional to actual training time or +# computations, so _C.TRAIN.EVAL_PERIOD leads to too frequent or rare +# evaluation. We use a multigrid-specific rule to determine when to evaluate: +# This hyperparameter defines how many times to evaluate a model per long +# cycle shape. +_C.MULTIGRID.EVAL_FREQ = 3 + +# No need to specify; Set automatically and used as global variables. +_C.MULTIGRID.LONG_CYCLE_SAMPLING_RATE = 0 +_C.MULTIGRID.DEFAULT_B = 0 +_C.MULTIGRID.DEFAULT_T = 0 +_C.MULTIGRID.DEFAULT_S = 0 + +# ----------------------------------------------------------------------------- +# Tensorboard Visualization Options +# ----------------------------------------------------------------------------- +_C.TENSORBOARD = CfgNode() + +# Log to summary writer, this will automatically. +# log loss, lr and metrics during train/eval. +_C.TENSORBOARD.ENABLE = False +# Provide path to prediction results for visualization. +# This is a pickle file of [prediction_tensor, label_tensor] +_C.TENSORBOARD.PREDICTIONS_PATH = "" +# Path to directory for tensorboard logs. +# Default to to cfg.OUTPUT_DIR/runs-{cfg.TRAIN.DATASET}. +_C.TENSORBOARD.LOG_DIR = "" +# Path to a json file providing class_name - id mapping +# in the format {"class_name1": id1, "class_name2": id2, ...}. +# This file must be provided to enable plotting confusion matrix +# by a subset or parent categories. +_C.TENSORBOARD.CLASS_NAMES_PATH = "" + +# Path to a json file for categories -> classes mapping +# in the format {"parent_class": ["child_class1", "child_class2",...], ...}. +_C.TENSORBOARD.CATEGORIES_PATH = "" + +# Config for confusion matrices visualization. +_C.TENSORBOARD.CONFUSION_MATRIX = CfgNode() +# Visualize confusion matrix. +_C.TENSORBOARD.CONFUSION_MATRIX.ENABLE = False +# Figure size of the confusion matrices plotted. +_C.TENSORBOARD.CONFUSION_MATRIX.FIGSIZE = [8, 8] +# Path to a subset of categories to visualize. +# File contains class names separated by newline characters. +_C.TENSORBOARD.CONFUSION_MATRIX.SUBSET_PATH = "" + +# Config for histogram visualization. +_C.TENSORBOARD.HISTOGRAM = CfgNode() +# Visualize histograms. +_C.TENSORBOARD.HISTOGRAM.ENABLE = False +# Path to a subset of classes to plot histograms. +# Class names must be separated by newline characters. +_C.TENSORBOARD.HISTOGRAM.SUBSET_PATH = "" +# Visualize top-k most predicted classes on histograms for each +# chosen true label. +_C.TENSORBOARD.HISTOGRAM.TOPK = 10 +# Figure size of the histograms plotted. +_C.TENSORBOARD.HISTOGRAM.FIGSIZE = [8, 8] + +# Config for layers' weights and activations visualization. +# _C.TENSORBOARD.ENABLE must be True. +_C.TENSORBOARD.MODEL_VIS = CfgNode() + +# If False, skip model visualization. +_C.TENSORBOARD.MODEL_VIS.ENABLE = False + +# If False, skip visualizing model weights. +_C.TENSORBOARD.MODEL_VIS.MODEL_WEIGHTS = False + +# If False, skip visualizing model activations. +_C.TENSORBOARD.MODEL_VIS.ACTIVATIONS = False + +# If False, skip visualizing input videos. +_C.TENSORBOARD.MODEL_VIS.INPUT_VIDEO = False + + +# List of strings containing data about layer names and their indexing to +# visualize weights and activations for. The indexing is meant for +# choosing a subset of activations outputed by a layer for visualization. +# If indexing is not specified, visualize all activations outputed by the layer. +# For each string, layer name and indexing is separated by whitespaces. +# e.g.: [layer1 1,2;1,2, layer2, layer3 150,151;3,4]; this means for each array `arr` +# along the batch dimension in `layer1`, we take arr[[1, 2], [1, 2]] +_C.TENSORBOARD.MODEL_VIS.LAYER_LIST = [] +# Top-k predictions to plot on videos +_C.TENSORBOARD.MODEL_VIS.TOPK_PREDS = 1 +# Colormap to for text boxes and bounding boxes colors +_C.TENSORBOARD.MODEL_VIS.COLORMAP = "Pastel2" +# Config for visualization video inputs with Grad-CAM. +# _C.TENSORBOARD.ENABLE must be True. +_C.TENSORBOARD.MODEL_VIS.GRAD_CAM = CfgNode() +# Whether to run visualization using Grad-CAM technique. +_C.TENSORBOARD.MODEL_VIS.GRAD_CAM.ENABLE = True +# CNN layers to use for Grad-CAM. The number of layers must be equal to +# number of pathway(s). +_C.TENSORBOARD.MODEL_VIS.GRAD_CAM.LAYER_LIST = [] +# If True, visualize Grad-CAM using true labels for each instances. +# If False, use the highest predicted class. +_C.TENSORBOARD.MODEL_VIS.GRAD_CAM.USE_TRUE_LABEL = False +# Colormap to for text boxes and bounding boxes colors +_C.TENSORBOARD.MODEL_VIS.GRAD_CAM.COLORMAP = "viridis" + +# Config for visualization for wrong prediction visualization. +# _C.TENSORBOARD.ENABLE must be True. +_C.TENSORBOARD.WRONG_PRED_VIS = CfgNode() +_C.TENSORBOARD.WRONG_PRED_VIS.ENABLE = False +# Folder tag to origanize model eval videos under. +_C.TENSORBOARD.WRONG_PRED_VIS.TAG = "Incorrectly classified videos." +# Subset of labels to visualize. Only wrong predictions with true labels +# within this subset is visualized. +_C.TENSORBOARD.WRONG_PRED_VIS.SUBSET_PATH = "" + + +# ---------------------------------------------------------------------------- # +# Demo options +# ---------------------------------------------------------------------------- # +_C.DEMO = CfgNode() + +# Run model in DEMO mode. +_C.DEMO.ENABLE = False + +# Path to a json file providing class_name - id mapping +# in the format {"class_name1": id1, "class_name2": id2, ...}. +_C.DEMO.LABEL_FILE_PATH = "" + +# Specify a camera device as input. This will be prioritized +# over input video if set. +# If -1, use input video instead. +_C.DEMO.WEBCAM = -1 + +# Path to input video for demo. +_C.DEMO.INPUT_VIDEO = "" +# Custom width for reading input video data. +_C.DEMO.DISPLAY_WIDTH = 0 +# Custom height for reading input video data. +_C.DEMO.DISPLAY_HEIGHT = 0 +# Path to Detectron2 object detection model configuration, +# only used for detection tasks. +_C.DEMO.DETECTRON2_CFG = "COCO-Detection/faster_rcnn_R_50_FPN_3x.yaml" +# Path to Detectron2 object detection model pre-trained weights. +_C.DEMO.DETECTRON2_WEIGHTS = "detectron2://COCO-Detection/faster_rcnn_R_50_FPN_3x/137849458/model_final_280758.pkl" +# Threshold for choosing predicted bounding boxes by Detectron2. +_C.DEMO.DETECTRON2_THRESH = 0.9 +# Number of overlapping frames between 2 consecutive clips. +# Increase this number for more frequent action predictions. +# The number of overlapping frames cannot be larger than +# half of the sequence length `cfg.DATA.NUM_FRAMES * cfg.DATA.SAMPLING_RATE` +_C.DEMO.BUFFER_SIZE = 0 +# If specified, the visualized outputs will be written this a video file of +# this path. Otherwise, the visualized outputs will be displayed in a window. +_C.DEMO.OUTPUT_FILE = "" +# Frames per second rate for writing to output video file. +# If not set (-1), use fps rate from input file. +_C.DEMO.OUTPUT_FPS = -1 +# Input format from demo video reader ("RGB" or "BGR"). +_C.DEMO.INPUT_FORMAT = "BGR" +# Draw visualization frames in [keyframe_idx - CLIP_VIS_SIZE, keyframe_idx + CLIP_VIS_SIZE] inclusively. +_C.DEMO.CLIP_VIS_SIZE = 10 +# Number of processes to run video visualizer. +_C.DEMO.NUM_VIS_INSTANCES = 2 + +# Path to pre-computed predicted boxes +_C.DEMO.PREDS_BOXES = "" +# Whether to run in with multi-threaded video reader. +_C.DEMO.THREAD_ENABLE = False +# Take one clip for every `DEMO.NUM_CLIPS_SKIP` + 1 for prediction and visualization. +# This is used for fast demo speed by reducing the prediction/visualiztion frequency. +# If -1, take the most recent read clip for visualization. This mode is only supported +# if `DEMO.THREAD_ENABLE` is set to True. +_C.DEMO.NUM_CLIPS_SKIP = 0 +# Path to ground-truth boxes and labels (optional) +_C.DEMO.GT_BOXES = "" +# The starting second of the video w.r.t bounding boxes file. +_C.DEMO.STARTING_SECOND = 900 +# Frames per second of the input video/folder of images. +_C.DEMO.FPS = 30 +# Visualize with top-k predictions or predictions above certain threshold(s). +# Option: {"thres", "top-k"} +_C.DEMO.VIS_MODE = "thres" +# Threshold for common class names. +_C.DEMO.COMMON_CLASS_THRES = 0.7 +# Theshold for uncommon class names. This will not be +# used if `_C.DEMO.COMMON_CLASS_NAMES` is empty. +_C.DEMO.UNCOMMON_CLASS_THRES = 0.3 +# This is chosen based on distribution of examples in +# each classes in AVA dataset. +_C.DEMO.COMMON_CLASS_NAMES = [ + "watch (a person)", + "talk to (e.g., self, a person, a group)", + "listen to (a person)", + "touch (an object)", + "carry/hold (an object)", + "walk", + "sit", + "lie/sleep", + "bend/bow (at the waist)", +] +# Slow-motion rate for the visualization. The visualized portions of the +# video will be played `_C.DEMO.SLOWMO` times slower than usual speed. +_C.DEMO.SLOWMO = 1 + +def _assert_and_infer_cfg(cfg): + # BN assertions. + if cfg.BN.USE_PRECISE_STATS: + assert cfg.BN.NUM_BATCHES_PRECISE >= 0 + # TRAIN assertions. + assert cfg.TRAIN.CHECKPOINT_TYPE in ["pytorch", "caffe2"] + assert cfg.TRAIN.BATCH_SIZE % cfg.NUM_GPUS == 0 + + # TEST assertions. + assert cfg.TEST.CHECKPOINT_TYPE in ["pytorch", "caffe2"] + assert cfg.TEST.BATCH_SIZE % cfg.NUM_GPUS == 0 + assert cfg.TEST.NUM_SPATIAL_CROPS == 3 + + # RESNET assertions. + assert cfg.RESNET.NUM_GROUPS > 0 + assert cfg.RESNET.WIDTH_PER_GROUP > 0 + assert cfg.RESNET.WIDTH_PER_GROUP % cfg.RESNET.NUM_GROUPS == 0 + + # Execute LR scaling by num_shards. + if cfg.SOLVER.BASE_LR_SCALE_NUM_SHARDS: + cfg.SOLVER.BASE_LR *= cfg.NUM_SHARDS + + # General assertions. + assert cfg.SHARD_ID < cfg.NUM_SHARDS + return cfg + + +def get_cfg(): + """ + Get a copy of the default config. + """ + return _assert_and_infer_cfg(_C.clone()) diff --git a/TimeSformer/timesformer/datasets/DATASET.md b/TimeSformer/timesformer/datasets/DATASET.md new file mode 100755 index 0000000000000000000000000000000000000000..a53c67cf0e5a779c64e6eddc92467d168769e397 --- /dev/null +++ b/TimeSformer/timesformer/datasets/DATASET.md @@ -0,0 +1,26 @@ +# Dataset Preparation + +## Kinetics + +The Kinetics Dataset could be downloaded from the following [link](https://github.com/cvdfoundation/kinetics-dataset): + +After all the videos were downloaded, resize the video to the short edge size of 256, then prepare the csv files for training, validation, and testing set as `train.csv`, `val.csv`, `test.csv`. The format of the csv file is: + +``` +path_to_video_1 label_1 +path_to_video_2 label_2 +path_to_video_3 label_3 +... +path_to_video_N label_N +``` + +## Something-Something V2 +1. Please download the dataset and annotations from [dataset provider](https://20bn.com/datasets/something-something). + +2. Download the *frame list* from the following links: ([train](https://dl.fbaipublicfiles.com/pyslowfast/dataset/ssv2/frame_lists/train.csv), [val](https://dl.fbaipublicfiles.com/pyslowfast/dataset/ssv2/frame_lists/val.csv)). + +3. Extract the frames at 30 FPS. (We used ffmpeg-4.1.3 with command +`ffmpeg -i "${video}" -r 30 -q:v 1 "${out_name}"` + in experiments.) Please put the frames in a structure consistent with the frame lists. + +Please put all annotation json files and the frame lists in the same folder, and set `DATA.PATH_TO_DATA_DIR` to the path. Set `DATA.PATH_PREFIX` to be the path to the folder containing extracted frames. diff --git a/TimeSformer/timesformer/datasets/__init__.py b/TimeSformer/timesformer/datasets/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..bc647343301b9b518c24ff1f11fc7b7c5571b9c9 --- /dev/null +++ b/TimeSformer/timesformer/datasets/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +from .build import DATASET_REGISTRY, build_dataset # noqa +from .kinetics import Kinetics # noqa +from .ssv2 import Ssv2 # noqa diff --git a/TimeSformer/timesformer/datasets/build.py b/TimeSformer/timesformer/datasets/build.py new file mode 100755 index 0000000000000000000000000000000000000000..60e1850f4c645b570916e89e77bbae31d29b21ba --- /dev/null +++ b/TimeSformer/timesformer/datasets/build.py @@ -0,0 +1,30 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +from fvcore.common.registry import Registry + +DATASET_REGISTRY = Registry("DATASET") +DATASET_REGISTRY.__doc__ = """ +Registry for dataset. + +The registered object will be called with `obj(cfg, split)`. +The call should return a `torch.utils.data.Dataset` object. +""" + + +def build_dataset(dataset_name, cfg, split): + """ + Build a dataset, defined by `dataset_name`. + Args: + dataset_name (str): the name of the dataset to be constructed. + cfg (CfgNode): configs. Details can be found in + slowfast/config/defaults.py + split (str): the split of the data loader. Options include `train`, + `val`, and `test`. + Returns: + Dataset: a constructed dataset specified by dataset_name. + """ + # Capitalize the the first letter of the dataset_name since the dataset_name + # in configs may be in lowercase but the name of dataset class should always + # start with an uppercase letter. + name = dataset_name.capitalize() + return DATASET_REGISTRY.get(name)(cfg, split) diff --git a/TimeSformer/timesformer/datasets/cv2_transform.py b/TimeSformer/timesformer/datasets/cv2_transform.py new file mode 100755 index 0000000000000000000000000000000000000000..0f6c9d95fbfba042bec5b808e5c619458395eec2 --- /dev/null +++ b/TimeSformer/timesformer/datasets/cv2_transform.py @@ -0,0 +1,796 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import math +import numpy as np +import cv2 + + +def clip_boxes_to_image(boxes, height, width): + """ + Clip the boxes with the height and width of the image size. + Args: + boxes (ndarray): bounding boxes to peform crop. The dimension is + `num boxes` x 4. + height (int): the height of the image. + width (int): the width of the image. + Returns: + boxes (ndarray): cropped bounding boxes. + """ + boxes[:, [0, 2]] = np.minimum( + width - 1.0, np.maximum(0.0, boxes[:, [0, 2]]) + ) + boxes[:, [1, 3]] = np.minimum( + height - 1.0, np.maximum(0.0, boxes[:, [1, 3]]) + ) + return boxes + + +def random_short_side_scale_jitter_list(images, min_size, max_size, boxes=None): + """ + Perform a spatial short scale jittering on the given images and + corresponding boxes. + Args: + images (list): list of images to perform scale jitter. Dimension is + `height` x `width` x `channel`. + min_size (int): the minimal size to scale the frames. + max_size (int): the maximal size to scale the frames. + boxes (list): optional. Corresponding boxes to images. Dimension is + `num boxes` x 4. + Returns: + (list): the list of scaled images with dimension of + `new height` x `new width` x `channel`. + (ndarray or None): the scaled boxes with dimension of + `num boxes` x 4. + """ + size = int(round(1.0 / np.random.uniform(1.0 / max_size, 1.0 / min_size))) + + height = images[0].shape[0] + width = images[0].shape[1] + if (width <= height and width == size) or ( + height <= width and height == size + ): + return images, boxes + new_width = size + new_height = size + if width < height: + new_height = int(math.floor((float(height) / width) * size)) + if boxes is not None: + boxes = [ + proposal * float(new_height) / height for proposal in boxes + ] + else: + new_width = int(math.floor((float(width) / height) * size)) + if boxes is not None: + boxes = [proposal * float(new_width) / width for proposal in boxes] + return ( + [ + cv2.resize( + image, (new_width, new_height), interpolation=cv2.INTER_LINEAR + ).astype(np.float32) + for image in images + ], + boxes, + ) + + +def scale(size, image): + """ + Scale the short side of the image to size. + Args: + size (int): size to scale the image. + image (array): image to perform short side scale. Dimension is + `height` x `width` x `channel`. + Returns: + (ndarray): the scaled image with dimension of + `height` x `width` x `channel`. + """ + height = image.shape[0] + width = image.shape[1] + if (width <= height and width == size) or ( + height <= width and height == size + ): + return image + new_width = size + new_height = size + if width < height: + new_height = int(math.floor((float(height) / width) * size)) + else: + new_width = int(math.floor((float(width) / height) * size)) + img = cv2.resize( + image, (new_width, new_height), interpolation=cv2.INTER_LINEAR + ) + return img.astype(np.float32) + + +def scale_boxes(size, boxes, height, width): + """ + Scale the short side of the box to size. + Args: + size (int): size to scale the image. + boxes (ndarray): bounding boxes to peform scale. The dimension is + `num boxes` x 4. + height (int): the height of the image. + width (int): the width of the image. + Returns: + boxes (ndarray): scaled bounding boxes. + """ + if (width <= height and width == size) or ( + height <= width and height == size + ): + return boxes + + new_width = size + new_height = size + if width < height: + new_height = int(math.floor((float(height) / width) * size)) + boxes *= float(new_height) / height + else: + new_width = int(math.floor((float(width) / height) * size)) + boxes *= float(new_width) / width + return boxes + + +def horizontal_flip_list(prob, images, order="CHW", boxes=None): + """ + Horizontally flip the list of image and optional boxes. + Args: + prob (float): probability to flip. + image (list): ilist of images to perform short side scale. Dimension is + `height` x `width` x `channel` or `channel` x `height` x `width`. + order (str): order of the `height`, `channel` and `width`. + boxes (list): optional. Corresponding boxes to images. + Dimension is `num boxes` x 4. + Returns: + (ndarray): the scaled image with dimension of + `height` x `width` x `channel`. + (list): optional. Corresponding boxes to images. Dimension is + `num boxes` x 4. + """ + _, width, _ = images[0].shape + if np.random.uniform() < prob: + if boxes is not None: + boxes = [flip_boxes(proposal, width) for proposal in boxes] + if order == "CHW": + out_images = [] + for image in images: + image = np.asarray(image).swapaxes(2, 0) + image = image[::-1] + out_images.append(image.swapaxes(0, 2)) + return out_images, boxes + elif order == "HWC": + return [cv2.flip(image, 1) for image in images], boxes + return images, boxes + + +def spatial_shift_crop_list(size, images, spatial_shift_pos, boxes=None): + """ + Perform left, center, or right crop of the given list of images. + Args: + size (int): size to crop. + image (list): ilist of images to perform short side scale. Dimension is + `height` x `width` x `channel` or `channel` x `height` x `width`. + spatial_shift_pos (int): option includes 0 (left), 1 (middle), and + 2 (right) crop. + boxes (list): optional. Corresponding boxes to images. + Dimension is `num boxes` x 4. + Returns: + cropped (ndarray): the cropped list of images with dimension of + `height` x `width` x `channel`. + boxes (list): optional. Corresponding boxes to images. Dimension is + `num boxes` x 4. + """ + + assert spatial_shift_pos in [0, 1, 2] + + height = images[0].shape[0] + width = images[0].shape[1] + y_offset = int(math.ceil((height - size) / 2)) + x_offset = int(math.ceil((width - size) / 2)) + + if height > width: + if spatial_shift_pos == 0: + y_offset = 0 + elif spatial_shift_pos == 2: + y_offset = height - size + else: + if spatial_shift_pos == 0: + x_offset = 0 + elif spatial_shift_pos == 2: + x_offset = width - size + + cropped = [ + image[y_offset : y_offset + size, x_offset : x_offset + size, :] + for image in images + ] + assert cropped[0].shape[0] == size, "Image height not cropped properly" + assert cropped[0].shape[1] == size, "Image width not cropped properly" + + if boxes is not None: + for i in range(len(boxes)): + boxes[i][:, [0, 2]] -= x_offset + boxes[i][:, [1, 3]] -= y_offset + return cropped, boxes + + +def CHW2HWC(image): + """ + Transpose the dimension from `channel` x `height` x `width` to + `height` x `width` x `channel`. + Args: + image (array): image to transpose. + Returns + (array): transposed image. + """ + return image.transpose([1, 2, 0]) + + +def HWC2CHW(image): + """ + Transpose the dimension from `height` x `width` x `channel` to + `channel` x `height` x `width`. + Args: + image (array): image to transpose. + Returns + (array): transposed image. + """ + return image.transpose([2, 0, 1]) + + +def color_jitter_list( + images, img_brightness=0, img_contrast=0, img_saturation=0 +): + """ + Perform color jitter on the list of images. + Args: + images (list): list of images to perform color jitter. + img_brightness (float): jitter ratio for brightness. + img_contrast (float): jitter ratio for contrast. + img_saturation (float): jitter ratio for saturation. + Returns: + images (list): the jittered list of images. + """ + jitter = [] + if img_brightness != 0: + jitter.append("brightness") + if img_contrast != 0: + jitter.append("contrast") + if img_saturation != 0: + jitter.append("saturation") + + if len(jitter) > 0: + order = np.random.permutation(np.arange(len(jitter))) + for idx in range(0, len(jitter)): + if jitter[order[idx]] == "brightness": + images = brightness_list(img_brightness, images) + elif jitter[order[idx]] == "contrast": + images = contrast_list(img_contrast, images) + elif jitter[order[idx]] == "saturation": + images = saturation_list(img_saturation, images) + return images + + +def lighting_list(imgs, alphastd, eigval, eigvec, alpha=None): + """ + Perform AlexNet-style PCA jitter on the given list of images. + Args: + images (list): list of images to perform lighting jitter. + alphastd (float): jitter ratio for PCA jitter. + eigval (list): eigenvalues for PCA jitter. + eigvec (list[list]): eigenvectors for PCA jitter. + Returns: + out_images (list): the list of jittered images. + """ + if alphastd == 0: + return imgs + # generate alpha1, alpha2, alpha3 + alpha = np.random.normal(0, alphastd, size=(1, 3)) + eig_vec = np.array(eigvec) + eig_val = np.reshape(eigval, (1, 3)) + rgb = np.sum( + eig_vec * np.repeat(alpha, 3, axis=0) * np.repeat(eig_val, 3, axis=0), + axis=1, + ) + out_images = [] + for img in imgs: + for idx in range(img.shape[0]): + img[idx] = img[idx] + rgb[2 - idx] + out_images.append(img) + return out_images + + +def color_normalization(image, mean, stddev): + """ + Perform color normalization on the image with the given mean and stddev. + Args: + image (array): image to perform color normalization. + mean (float): mean value to subtract. + stddev (float): stddev to devide. + """ + # Input image should in format of CHW + assert len(mean) == image.shape[0], "channel mean not computed properly" + assert len(stddev) == image.shape[0], "channel stddev not computed properly" + for idx in range(image.shape[0]): + image[idx] = image[idx] - mean[idx] + image[idx] = image[idx] / stddev[idx] + return image + + +def pad_image(image, pad_size, order="CHW"): + """ + Pad the given image with the size of pad_size. + Args: + image (array): image to pad. + pad_size (int): size to pad. + order (str): order of the `height`, `channel` and `width`. + Returns: + img (array): padded image. + """ + if order == "CHW": + img = np.pad( + image, + ((0, 0), (pad_size, pad_size), (pad_size, pad_size)), + mode=str("constant"), + ) + elif order == "HWC": + img = np.pad( + image, + ((pad_size, pad_size), (pad_size, pad_size), (0, 0)), + mode=str("constant"), + ) + return img + + +def horizontal_flip(prob, image, order="CHW"): + """ + Horizontally flip the image. + Args: + prob (float): probability to flip. + image (array): image to pad. + order (str): order of the `height`, `channel` and `width`. + Returns: + img (array): flipped image. + """ + assert order in ["CHW", "HWC"], "order {} is not supported".format(order) + if np.random.uniform() < prob: + if order == "CHW": + image = image[:, :, ::-1] + elif order == "HWC": + image = image[:, ::-1, :] + else: + raise NotImplementedError("Unknown order {}".format(order)) + return image + + +def flip_boxes(boxes, im_width): + """ + Horizontally flip the boxes. + Args: + boxes (array): box to flip. + im_width (int): width of the image. + Returns: + boxes_flipped (array): flipped box. + """ + + boxes_flipped = boxes.copy() + boxes_flipped[:, 0::4] = im_width - boxes[:, 2::4] - 1 + boxes_flipped[:, 2::4] = im_width - boxes[:, 0::4] - 1 + return boxes_flipped + + +def crop_boxes(boxes, x_offset, y_offset): + """ + Crop the boxes given the offsets. + Args: + boxes (array): boxes to crop. + x_offset (int): offset on x. + y_offset (int): offset on y. + """ + boxes[:, [0, 2]] = boxes[:, [0, 2]] - x_offset + boxes[:, [1, 3]] = boxes[:, [1, 3]] - y_offset + return boxes + + +def random_crop_list(images, size, pad_size=0, order="CHW", boxes=None): + """ + Perform random crop on a list of images. + Args: + images (list): list of images to perform random crop. + size (int): size to crop. + pad_size (int): padding size. + order (str): order of the `height`, `channel` and `width`. + boxes (list): optional. Corresponding boxes to images. + Dimension is `num boxes` x 4. + Returns: + cropped (ndarray): the cropped list of images with dimension of + `height` x `width` x `channel`. + boxes (list): optional. Corresponding boxes to images. Dimension is + `num boxes` x 4. + """ + # explicitly dealing processing per image order to avoid flipping images. + if pad_size > 0: + images = [ + pad_image(pad_size=pad_size, image=image, order=order) + for image in images + ] + + # image format should be CHW. + if order == "CHW": + if images[0].shape[1] == size and images[0].shape[2] == size: + return images, boxes + height = images[0].shape[1] + width = images[0].shape[2] + y_offset = 0 + if height > size: + y_offset = int(np.random.randint(0, height - size)) + x_offset = 0 + if width > size: + x_offset = int(np.random.randint(0, width - size)) + cropped = [ + image[:, y_offset : y_offset + size, x_offset : x_offset + size] + for image in images + ] + assert cropped[0].shape[1] == size, "Image not cropped properly" + assert cropped[0].shape[2] == size, "Image not cropped properly" + elif order == "HWC": + if images[0].shape[0] == size and images[0].shape[1] == size: + return images, boxes + height = images[0].shape[0] + width = images[0].shape[1] + y_offset = 0 + if height > size: + y_offset = int(np.random.randint(0, height - size)) + x_offset = 0 + if width > size: + x_offset = int(np.random.randint(0, width - size)) + cropped = [ + image[y_offset : y_offset + size, x_offset : x_offset + size, :] + for image in images + ] + assert cropped[0].shape[0] == size, "Image not cropped properly" + assert cropped[0].shape[1] == size, "Image not cropped properly" + + if boxes is not None: + boxes = [crop_boxes(proposal, x_offset, y_offset) for proposal in boxes] + return cropped, boxes + + +def center_crop(size, image): + """ + Perform center crop on input images. + Args: + size (int): size of the cropped height and width. + image (array): the image to perform center crop. + """ + height = image.shape[0] + width = image.shape[1] + y_offset = int(math.ceil((height - size) / 2)) + x_offset = int(math.ceil((width - size) / 2)) + cropped = image[y_offset : y_offset + size, x_offset : x_offset + size, :] + assert cropped.shape[0] == size, "Image height not cropped properly" + assert cropped.shape[1] == size, "Image width not cropped properly" + return cropped + + +# ResNet style scale jittering: randomly select the scale from +# [1/max_size, 1/min_size] +def random_scale_jitter(image, min_size, max_size): + """ + Perform ResNet style random scale jittering: randomly select the scale from + [1/max_size, 1/min_size]. + Args: + image (array): image to perform random scale. + min_size (int): min size to scale. + max_size (int) max size to scale. + Returns: + image (array): scaled image. + """ + img_scale = int( + round(1.0 / np.random.uniform(1.0 / max_size, 1.0 / min_size)) + ) + image = scale(img_scale, image) + return image + + +def random_scale_jitter_list(images, min_size, max_size): + """ + Perform ResNet style random scale jittering on a list of image: randomly + select the scale from [1/max_size, 1/min_size]. Note that all the image + will share the same scale. + Args: + images (list): list of images to perform random scale. + min_size (int): min size to scale. + max_size (int) max size to scale. + Returns: + images (list): list of scaled image. + """ + img_scale = int( + round(1.0 / np.random.uniform(1.0 / max_size, 1.0 / min_size)) + ) + return [scale(img_scale, image) for image in images] + + +def random_sized_crop(image, size, area_frac=0.08): + """ + Perform random sized cropping on the given image. Random crop with size + 8% - 100% image area and aspect ratio in [3/4, 4/3]. + Args: + image (array): image to crop. + size (int): size to crop. + area_frac (float): area of fraction. + Returns: + (array): cropped image. + """ + for _ in range(0, 10): + height = image.shape[0] + width = image.shape[1] + area = height * width + target_area = np.random.uniform(area_frac, 1.0) * area + aspect_ratio = np.random.uniform(3.0 / 4.0, 4.0 / 3.0) + w = int(round(math.sqrt(float(target_area) * aspect_ratio))) + h = int(round(math.sqrt(float(target_area) / aspect_ratio))) + if np.random.uniform() < 0.5: + w, h = h, w + if h <= height and w <= width: + if height == h: + y_offset = 0 + else: + y_offset = np.random.randint(0, height - h) + if width == w: + x_offset = 0 + else: + x_offset = np.random.randint(0, width - w) + y_offset = int(y_offset) + x_offset = int(x_offset) + cropped = image[y_offset : y_offset + h, x_offset : x_offset + w, :] + assert ( + cropped.shape[0] == h and cropped.shape[1] == w + ), "Wrong crop size" + cropped = cv2.resize( + cropped, (size, size), interpolation=cv2.INTER_LINEAR + ) + return cropped.astype(np.float32) + return center_crop(size, scale(size, image)) + + +def lighting(img, alphastd, eigval, eigvec): + """ + Perform AlexNet-style PCA jitter on the given image. + Args: + image (array): list of images to perform lighting jitter. + alphastd (float): jitter ratio for PCA jitter. + eigval (array): eigenvalues for PCA jitter. + eigvec (list): eigenvectors for PCA jitter. + Returns: + img (tensor): the jittered image. + """ + if alphastd == 0: + return img + # generate alpha1, alpha2, alpha3. + alpha = np.random.normal(0, alphastd, size=(1, 3)) + eig_vec = np.array(eigvec) + eig_val = np.reshape(eigval, (1, 3)) + rgb = np.sum( + eig_vec * np.repeat(alpha, 3, axis=0) * np.repeat(eig_val, 3, axis=0), + axis=1, + ) + for idx in range(img.shape[0]): + img[idx] = img[idx] + rgb[2 - idx] + return img + + +def random_sized_crop_list(images, size, crop_area_fraction=0.08): + """ + Perform random sized cropping on the given list of images. Random crop with + size 8% - 100% image area and aspect ratio in [3/4, 4/3]. + Args: + images (list): image to crop. + size (int): size to crop. + area_frac (float): area of fraction. + Returns: + (list): list of cropped image. + """ + for _ in range(0, 10): + height = images[0].shape[0] + width = images[0].shape[1] + area = height * width + target_area = np.random.uniform(crop_area_fraction, 1.0) * area + aspect_ratio = np.random.uniform(3.0 / 4.0, 4.0 / 3.0) + w = int(round(math.sqrt(float(target_area) * aspect_ratio))) + h = int(round(math.sqrt(float(target_area) / aspect_ratio))) + if np.random.uniform() < 0.5: + w, h = h, w + if h <= height and w <= width: + if height == h: + y_offset = 0 + else: + y_offset = np.random.randint(0, height - h) + if width == w: + x_offset = 0 + else: + x_offset = np.random.randint(0, width - w) + y_offset = int(y_offset) + x_offset = int(x_offset) + + croppsed_images = [] + for image in images: + cropped = image[ + y_offset : y_offset + h, x_offset : x_offset + w, : + ] + assert ( + cropped.shape[0] == h and cropped.shape[1] == w + ), "Wrong crop size" + cropped = cv2.resize( + cropped, (size, size), interpolation=cv2.INTER_LINEAR + ) + croppsed_images.append(cropped.astype(np.float32)) + return croppsed_images + + return [center_crop(size, scale(size, image)) for image in images] + + +def blend(image1, image2, alpha): + return image1 * alpha + image2 * (1 - alpha) + + +def grayscale(image): + """ + Convert the image to gray scale. + Args: + image (tensor): image to convert to gray scale. Dimension is + `channel` x `height` x `width`. + Returns: + img_gray (tensor): image in gray scale. + """ + # R -> 0.299, G -> 0.587, B -> 0.114. + img_gray = np.copy(image) + gray_channel = 0.299 * image[2] + 0.587 * image[1] + 0.114 * image[0] + img_gray[0] = gray_channel + img_gray[1] = gray_channel + img_gray[2] = gray_channel + return img_gray + + +def saturation(var, image): + """ + Perform color saturation on the given image. + Args: + var (float): variance. + image (array): image to perform color saturation. + Returns: + (array): image that performed color saturation. + """ + img_gray = grayscale(image) + alpha = 1.0 + np.random.uniform(-var, var) + return blend(image, img_gray, alpha) + + +def brightness(var, image): + """ + Perform color brightness on the given image. + Args: + var (float): variance. + image (array): image to perform color brightness. + Returns: + (array): image that performed color brightness. + """ + img_bright = np.zeros(image.shape).astype(image.dtype) + alpha = 1.0 + np.random.uniform(-var, var) + return blend(image, img_bright, alpha) + + +def contrast(var, image): + """ + Perform color contrast on the given image. + Args: + var (float): variance. + image (array): image to perform color contrast. + Returns: + (array): image that performed color contrast. + """ + img_gray = grayscale(image) + img_gray.fill(np.mean(img_gray[0])) + alpha = 1.0 + np.random.uniform(-var, var) + return blend(image, img_gray, alpha) + + +def saturation_list(var, images): + """ + Perform color saturation on the list of given images. + Args: + var (float): variance. + images (list): list of images to perform color saturation. + Returns: + (list): list of images that performed color saturation. + """ + alpha = 1.0 + np.random.uniform(-var, var) + + out_images = [] + for image in images: + img_gray = grayscale(image) + out_images.append(blend(image, img_gray, alpha)) + return out_images + + +def brightness_list(var, images): + """ + Perform color brightness on the given list of images. + Args: + var (float): variance. + images (list): list of images to perform color brightness. + Returns: + (array): list of images that performed color brightness. + """ + alpha = 1.0 + np.random.uniform(-var, var) + + out_images = [] + for image in images: + img_bright = np.zeros(image.shape).astype(image.dtype) + out_images.append(blend(image, img_bright, alpha)) + return out_images + + +def contrast_list(var, images): + """ + Perform color contrast on the given list of images. + Args: + var (float): variance. + images (list): list of images to perform color contrast. + Returns: + (array): image that performed color contrast. + """ + alpha = 1.0 + np.random.uniform(-var, var) + + out_images = [] + for image in images: + img_gray = grayscale(image) + img_gray.fill(np.mean(img_gray[0])) + out_images.append(blend(image, img_gray, alpha)) + return out_images + + +def color_jitter(image, img_brightness=0, img_contrast=0, img_saturation=0): + """ + Perform color jitter on the given image. + Args: + image (array): image to perform color jitter. + img_brightness (float): jitter ratio for brightness. + img_contrast (float): jitter ratio for contrast. + img_saturation (float): jitter ratio for saturation. + Returns: + image (array): the jittered image. + """ + jitter = [] + if img_brightness != 0: + jitter.append("brightness") + if img_contrast != 0: + jitter.append("contrast") + if img_saturation != 0: + jitter.append("saturation") + + if len(jitter) > 0: + order = np.random.permutation(np.arange(len(jitter))) + for idx in range(0, len(jitter)): + if jitter[order[idx]] == "brightness": + image = brightness(img_brightness, image) + elif jitter[order[idx]] == "contrast": + image = contrast(img_contrast, image) + elif jitter[order[idx]] == "saturation": + image = saturation(img_saturation, image) + return image + + +def revert_scaled_boxes(size, boxes, img_height, img_width): + """ + Revert scaled input boxes to match the original image size. + Args: + size (int): size of the cropped image. + boxes (array): shape (num_boxes, 4). + img_height (int): height of original image. + img_width (int): width of original image. + Returns: + reverted_boxes (array): boxes scaled back to the original image size. + """ + scaled_aspect = np.min([img_height, img_width]) + scale_ratio = scaled_aspect / size + reverted_boxes = boxes * scale_ratio + return reverted_boxes diff --git a/TimeSformer/timesformer/datasets/decoder.py b/TimeSformer/timesformer/datasets/decoder.py new file mode 100755 index 0000000000000000000000000000000000000000..c7bb17da8989bba36ff65ecd33f3f141484221f9 --- /dev/null +++ b/TimeSformer/timesformer/datasets/decoder.py @@ -0,0 +1,392 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import math +import numpy as np +import random +import torch +import torchvision.io as io + + +def temporal_sampling(frames, start_idx, end_idx, num_samples): + """ + Given the start and end frame index, sample num_samples frames between + the start and end with equal interval. + Args: + frames (tensor): a tensor of video frames, dimension is + `num video frames` x `channel` x `height` x `width`. + start_idx (int): the index of the start frame. + end_idx (int): the index of the end frame. + num_samples (int): number of frames to sample. + Returns: + frames (tersor): a tensor of temporal sampled video frames, dimension is + `num clip frames` x `channel` x `height` x `width`. + """ + index = torch.linspace(start_idx, end_idx, num_samples) + index = torch.clamp(index, 0, frames.shape[0] - 1).long() + frames = torch.index_select(frames, 0, index) + return frames + + +def get_start_end_idx(video_size, clip_size, clip_idx, num_clips): + """ + Sample a clip of size clip_size from a video of size video_size and + return the indices of the first and last frame of the clip. If clip_idx is + -1, the clip is randomly sampled, otherwise uniformly split the video to + num_clips clips, and select the start and end index of clip_idx-th video + clip. + Args: + video_size (int): number of overall frames. + clip_size (int): size of the clip to sample from the frames. + clip_idx (int): if clip_idx is -1, perform random jitter sampling. If + clip_idx is larger than -1, uniformly split the video to num_clips + clips, and select the start and end index of the clip_idx-th video + clip. + num_clips (int): overall number of clips to uniformly sample from the + given video for testing. + Returns: + start_idx (int): the start frame index. + end_idx (int): the end frame index. + """ + delta = max(video_size - clip_size, 0) + if clip_idx == -1: + # Random temporal sampling. + start_idx = random.uniform(0, delta) + else: + # Uniformly sample the clip with the given index. + start_idx = delta * clip_idx / num_clips + end_idx = start_idx + clip_size - 1 + return start_idx, end_idx + + +def pyav_decode_stream( + container, start_pts, end_pts, stream, stream_name, buffer_size=0 +): + """ + Decode the video with PyAV decoder. + Args: + container (container): PyAV container. + start_pts (int): the starting Presentation TimeStamp to fetch the + video frames. + end_pts (int): the ending Presentation TimeStamp of the decoded frames. + stream (stream): PyAV stream. + stream_name (dict): a dictionary of streams. For example, {"video": 0} + means video stream at stream index 0. + buffer_size (int): number of additional frames to decode beyond end_pts. + Returns: + result (list): list of frames decoded. + max_pts (int): max Presentation TimeStamp of the video sequence. + """ + # Seeking in the stream is imprecise. Thus, seek to an ealier PTS by a + # margin pts. + margin = 1024 + seek_offset = max(start_pts - margin, 0) + + container.seek(seek_offset, any_frame=False, backward=True, stream=stream) + frames = {} + buffer_count = 0 + max_pts = 0 + for frame in container.decode(**stream_name): + max_pts = max(max_pts, frame.pts) + if frame.pts < start_pts: + continue + if frame.pts <= end_pts: + frames[frame.pts] = frame + else: + buffer_count += 1 + frames[frame.pts] = frame + if buffer_count >= buffer_size: + break + result = [frames[pts] for pts in sorted(frames)] + return result, max_pts + + +def torchvision_decode( + video_handle, + sampling_rate, + num_frames, + clip_idx, + video_meta, + num_clips=10, + target_fps=30, + modalities=("visual",), + max_spatial_scale=0, +): + """ + If video_meta is not empty, perform temporal selective decoding to sample a + clip from the video with TorchVision decoder. If video_meta is empty, decode + the entire video and update the video_meta. + Args: + video_handle (bytes): raw bytes of the video file. + sampling_rate (int): frame sampling rate (interval between two sampled + frames). + num_frames (int): number of frames to sample. + clip_idx (int): if clip_idx is -1, perform random temporal + sampling. If clip_idx is larger than -1, uniformly split the + video to num_clips clips, and select the clip_idx-th video clip. + video_meta (dict): a dict contains VideoMetaData. Details can be found + at `pytorch/vision/torchvision/io/_video_opt.py`. + num_clips (int): overall number of clips to uniformly sample from the + given video. + target_fps (int): the input video may has different fps, convert it to + the target video fps. + modalities (tuple): tuple of modalities to decode. Currently only + support `visual`, planning to support `acoustic` soon. + max_spatial_scale (int): the maximal resolution of the spatial shorter + edge size during decoding. + Returns: + frames (tensor): decoded frames from the video. + fps (float): the number of frames per second of the video. + decode_all_video (bool): if True, the entire video was decoded. + """ + # Convert the bytes to a tensor. + video_tensor = torch.from_numpy(np.frombuffer(video_handle, dtype=np.uint8)) + + decode_all_video = True + video_start_pts, video_end_pts = 0, -1 + # The video_meta is empty, fetch the meta data from the raw video. + if len(video_meta) == 0: + # Tracking the meta info for selective decoding in the future. + meta = io._probe_video_from_memory(video_tensor) + # Using the information from video_meta to perform selective decoding. + video_meta["video_timebase"] = meta.video_timebase + video_meta["video_numerator"] = meta.video_timebase.numerator + video_meta["video_denominator"] = meta.video_timebase.denominator + video_meta["has_video"] = meta.has_video + video_meta["video_duration"] = meta.video_duration + video_meta["video_fps"] = meta.video_fps + video_meta["audio_timebas"] = meta.audio_timebase + video_meta["audio_numerator"] = meta.audio_timebase.numerator + video_meta["audio_denominator"] = meta.audio_timebase.denominator + video_meta["has_audio"] = meta.has_audio + video_meta["audio_duration"] = meta.audio_duration + video_meta["audio_sample_rate"] = meta.audio_sample_rate + + fps = video_meta["video_fps"] + if ( + video_meta["has_video"] + and video_meta["video_denominator"] > 0 + and video_meta["video_duration"] > 0 + ): + # try selective decoding. + decode_all_video = False + clip_size = sampling_rate * num_frames / target_fps * fps + start_idx, end_idx = get_start_end_idx( + fps * video_meta["video_duration"], clip_size, clip_idx, num_clips + ) + # Convert frame index to pts. + pts_per_frame = video_meta["video_denominator"] / fps + video_start_pts = int(start_idx * pts_per_frame) + video_end_pts = int(end_idx * pts_per_frame) + + # Decode the raw video with the tv decoder. + v_frames, _ = io._read_video_from_memory( + video_tensor, + seek_frame_margin=1.0, + read_video_stream="visual" in modalities, + video_width=0, + video_height=0, + video_min_dimension=max_spatial_scale, + video_pts_range=(video_start_pts, video_end_pts), + video_timebase_numerator=video_meta["video_numerator"], + video_timebase_denominator=video_meta["video_denominator"], + ) + + if v_frames.shape == torch.Size([0]): + # failed selective decoding + decode_all_video = True + video_start_pts, video_end_pts = 0, -1 + v_frames, _ = io._read_video_from_memory( + video_tensor, + seek_frame_margin=1.0, + read_video_stream="visual" in modalities, + video_width=0, + video_height=0, + video_min_dimension=max_spatial_scale, + video_pts_range=(video_start_pts, video_end_pts), + video_timebase_numerator=video_meta["video_numerator"], + video_timebase_denominator=video_meta["video_denominator"], + ) + + return v_frames, fps, decode_all_video + + +def pyav_decode( + container, sampling_rate, num_frames, clip_idx, num_clips=10, target_fps=30, start=None, end=None +, duration=None, frames_length=None): + """ + Convert the video from its original fps to the target_fps. If the video + support selective decoding (contain decoding information in the video head), + the perform temporal selective decoding and sample a clip from the video + with the PyAV decoder. If the video does not support selective decoding, + decode the entire video. + + Args: + container (container): pyav container. + sampling_rate (int): frame sampling rate (interval between two sampled + frames. + num_frames (int): number of frames to sample. + clip_idx (int): if clip_idx is -1, perform random temporal sampling. If + clip_idx is larger than -1, uniformly split the video to num_clips + clips, and select the clip_idx-th video clip. + num_clips (int): overall number of clips to uniformly sample from the + given video. + target_fps (int): the input video may has different fps, convert it to + the target video fps before frame sampling. + Returns: + frames (tensor): decoded frames from the video. Return None if the no + video stream was found. + fps (float): the number of frames per second of the video. + decode_all_video (bool): If True, the entire video was decoded. + """ + # Try to fetch the decoding information from the video head. Some of the + # videos does not support fetching the decoding information, for that case + # it will get None duration. + fps = float(container.streams.video[0].average_rate) + + orig_duration = duration + tb = float(container.streams.video[0].time_base) + frames_length = container.streams.video[0].frames + duration = container.streams.video[0].duration + if duration is None and orig_duration is not None: + duration = orig_duration / tb + + if duration is None: + # If failed to fetch the decoding information, decode the entire video. + decode_all_video = True + video_start_pts, video_end_pts = 0, math.inf + else: + # Perform selective decoding. + decode_all_video = False + start_idx, end_idx = get_start_end_idx( + frames_length, + sampling_rate * num_frames / target_fps * fps, + clip_idx, + num_clips, + ) + timebase = duration / frames_length + video_start_pts = int(start_idx * timebase) + video_end_pts = int(end_idx * timebase) + + if start is not None and end is not None: + decode_all_video = False + + frames = None + # If video stream was found, fetch video frames from the video. + if container.streams.video: + if start is None and end is None: + video_frames, max_pts = pyav_decode_stream( + container, + video_start_pts, + video_end_pts, + container.streams.video[0], + {"video": 0}, + ) + else: + timebase = duration / frames_length + start_i = start + end_i = end + video_frames, max_pts = pyav_decode_stream( + container, + start_i, + end_i, + container.streams.video[0], + {"video": 0}, + ) + container.close() + + frames = [frame.to_rgb().to_ndarray() for frame in video_frames] + frames = torch.as_tensor(np.stack(frames)) + + return frames, fps, decode_all_video + + +def decode( + container, + sampling_rate, + num_frames, + clip_idx=-1, + num_clips=10, + video_meta=None, + target_fps=30, + backend="pyav", + max_spatial_scale=0, + start=None, + end=None, + duration=None, + frames_length=None, +): + """ + Decode the video and perform temporal sampling. + Args: + container (container): pyav container. + sampling_rate (int): frame sampling rate (interval between two sampled + frames). + num_frames (int): number of frames to sample. + clip_idx (int): if clip_idx is -1, perform random temporal + sampling. If clip_idx is larger than -1, uniformly split the + video to num_clips clips, and select the + clip_idx-th video clip. + num_clips (int): overall number of clips to uniformly + sample from the given video. + video_meta (dict): a dict contains VideoMetaData. Details can be find + at `pytorch/vision/torchvision/io/_video_opt.py`. + target_fps (int): the input video may have different fps, convert it to + the target video fps before frame sampling. + backend (str): decoding backend includes `pyav` and `torchvision`. The + default one is `pyav`. + max_spatial_scale (int): keep the aspect ratio and resize the frame so + that shorter edge size is max_spatial_scale. Only used in + `torchvision` backend. + Returns: + frames (tensor): decoded frames from the video. + """ + # Currently support two decoders: 1) PyAV, and 2) TorchVision. + assert clip_idx >= -1, "Not valied clip_idx {}".format(clip_idx) + try: + if backend == "pyav": + frames, fps, decode_all_video = pyav_decode( + container, + sampling_rate, + num_frames, + clip_idx, + num_clips, + target_fps, + start, + end, + duration, + frames_length, + ) + elif backend == "torchvision": + frames, fps, decode_all_video = torchvision_decode( + container, + sampling_rate, + num_frames, + clip_idx, + video_meta, + num_clips, + target_fps, + ("visual",), + max_spatial_scale, + ) + else: + raise NotImplementedError( + "Unknown decoding backend {}".format(backend) + ) + except Exception as e: + print("Failed to decode by {} with exception: {}".format(backend, e)) + return None + + # Return None if the frames was not decoded successfully. + if frames is None or frames.size(0) == 0: + return None + + clip_sz = sampling_rate * num_frames / target_fps * fps + start_idx, end_idx = get_start_end_idx( + frames.shape[0], + clip_sz, + clip_idx if decode_all_video else 0, + num_clips if decode_all_video else 1, + ) + # Perform temporal sampling from the decoded video. + frames = temporal_sampling(frames, start_idx, end_idx, num_frames) + return frames diff --git a/TimeSformer/timesformer/datasets/kinetics.py b/TimeSformer/timesformer/datasets/kinetics.py new file mode 100755 index 0000000000000000000000000000000000000000..5151d877d6da4e9c7949be26b1bfd4046e618305 --- /dev/null +++ b/TimeSformer/timesformer/datasets/kinetics.py @@ -0,0 +1,294 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import os +import random +import torch +import torch.utils.data +from fvcore.common.file_io import PathManager + +import timesformer.utils.logging as logging + +from . import decoder as decoder +from . import utils as utils +from . import video_container as container +from .build import DATASET_REGISTRY +logger = logging.get_logger(__name__) + + +@DATASET_REGISTRY.register() +class Kinetics(torch.utils.data.Dataset): + """ + Kinetics video loader. Construct the Kinetics video loader, then sample + clips from the videos. For training and validation, a single clip is + randomly sampled from every video with random cropping, scaling, and + flipping. For testing, multiple clips are uniformaly sampled from every + video with uniform cropping. For uniform cropping, we take the left, center, + and right crop if the width is larger than height, or take top, center, and + bottom crop if the height is larger than the width. + """ + + def __init__(self, cfg, mode, num_retries=10): + """ + Construct the Kinetics video loader with a given csv file. The format of + the csv file is: + ``` + path_to_video_1 label_1 + path_to_video_2 label_2 + ... + path_to_video_N label_N + ``` + Args: + cfg (CfgNode): configs. + mode (string): Options includes `train`, `val`, or `test` mode. + For the train and val mode, the data loader will take data + from the train or val set, and sample one clip per video. + For the test mode, the data loader will take data from test set, + and sample multiple clips per video. + num_retries (int): number of retries. + """ + # Only support train, val, and test mode. + assert mode in [ + "train", + "val", + "test", + ], "Split '{}' not supported for Kinetics".format(mode) + self.mode = mode + self.cfg = cfg + + self._video_meta = {} + self._num_retries = num_retries + # For training or validation mode, one single clip is sampled from every + # video. For testing, NUM_ENSEMBLE_VIEWS clips are sampled from every + # video. For every clip, NUM_SPATIAL_CROPS is cropped spatially from + # the frames. + if self.mode in ["train", "val"]: + self._num_clips = 1 + elif self.mode in ["test"]: + self._num_clips = ( + cfg.TEST.NUM_ENSEMBLE_VIEWS * cfg.TEST.NUM_SPATIAL_CROPS + ) + + logger.info("Constructing Kinetics {}...".format(mode)) + self._construct_loader() + + def _construct_loader(self): + """ + Construct the video loader. + """ + path_to_file = os.path.join( + self.cfg.DATA.PATH_TO_DATA_DIR, "{}.csv".format(self.mode) + ) + assert PathManager.exists(path_to_file), "{} dir not found".format( + path_to_file + ) + + self._path_to_videos = [] + self._labels = [] + self._spatial_temporal_idx = [] + with PathManager.open(path_to_file, "r") as f: + for clip_idx, path_label in enumerate(f.read().splitlines()): + assert ( + len(path_label.split(self.cfg.DATA.PATH_LABEL_SEPARATOR)) + == 2 + ) + path, label = path_label.split( + self.cfg.DATA.PATH_LABEL_SEPARATOR + ) + for idx in range(self._num_clips): + self._path_to_videos.append( + os.path.join(self.cfg.DATA.PATH_PREFIX, path) + ) + self._labels.append(int(label)) + self._spatial_temporal_idx.append(idx) + self._video_meta[clip_idx * self._num_clips + idx] = {} + assert ( + len(self._path_to_videos) > 0 + ), "Failed to load Kinetics split {} from {}".format( + self._split_idx, path_to_file + ) + logger.info( + "Constructing kinetics dataloader (size: {}) from {}".format( + len(self._path_to_videos), path_to_file + ) + ) + + def __getitem__(self, index): + """ + Given the video index, return the list of frames, label, and video + index if the video can be fetched and decoded successfully, otherwise + repeatly find a random video that can be decoded as a replacement. + Args: + index (int): the video index provided by the pytorch sampler. + Returns: + frames (tensor): the frames of sampled from the video. The dimension + is `channel` x `num frames` x `height` x `width`. + label (int): the label of the current video. + index (int): if the video provided by pytorch sampler can be + decoded, then return the index of the video. If not, return the + index of the video replacement that can be decoded. + """ + short_cycle_idx = None + # When short cycle is used, input index is a tupple. + if isinstance(index, tuple): + index, short_cycle_idx = index + + if self.mode in ["train", "val"]: + # -1 indicates random sampling. + temporal_sample_index = -1 + spatial_sample_index = -1 + min_scale = self.cfg.DATA.TRAIN_JITTER_SCALES[0] + max_scale = self.cfg.DATA.TRAIN_JITTER_SCALES[1] + crop_size = self.cfg.DATA.TRAIN_CROP_SIZE + if short_cycle_idx in [0, 1]: + crop_size = int( + round( + self.cfg.MULTIGRID.SHORT_CYCLE_FACTORS[short_cycle_idx] + * self.cfg.MULTIGRID.DEFAULT_S + ) + ) + if self.cfg.MULTIGRID.DEFAULT_S > 0: + # Decreasing the scale is equivalent to using a larger "span" + # in a sampling grid. + min_scale = int( + round( + float(min_scale) + * crop_size + / self.cfg.MULTIGRID.DEFAULT_S + ) + ) + elif self.mode in ["test"]: + temporal_sample_index = ( + self._spatial_temporal_idx[index] + // self.cfg.TEST.NUM_SPATIAL_CROPS + ) + # spatial_sample_index is in [0, 1, 2]. Corresponding to left, + # center, or right if width is larger than height, and top, middle, + # or bottom if height is larger than width. + spatial_sample_index = ( + ( + self._spatial_temporal_idx[index] + % self.cfg.TEST.NUM_SPATIAL_CROPS + ) + if self.cfg.TEST.NUM_SPATIAL_CROPS > 1 + else 1 + ) + min_scale, max_scale, crop_size = ( + [self.cfg.DATA.TEST_CROP_SIZE] * 3 + if self.cfg.TEST.NUM_SPATIAL_CROPS > 1 + else [self.cfg.DATA.TRAIN_JITTER_SCALES[0]] * 2 + + [self.cfg.DATA.TEST_CROP_SIZE] + ) + # The testing is deterministic and no jitter should be performed. + # min_scale, max_scale, and crop_size are expect to be the same. + assert len({min_scale, max_scale}) == 1 + else: + raise NotImplementedError( + "Does not support {} mode".format(self.mode) + ) + sampling_rate = utils.get_random_sampling_rate( + self.cfg.MULTIGRID.LONG_CYCLE_SAMPLING_RATE, + self.cfg.DATA.SAMPLING_RATE, + ) + # Try to decode and sample a clip from a video. If the video can not be + # decoded, repeatly find a random video replacement that can be decoded. + for i_try in range(self._num_retries): + video_container = None + try: + video_container = container.get_video_container( + self._path_to_videos[index], + self.cfg.DATA_LOADER.ENABLE_MULTI_THREAD_DECODE, + self.cfg.DATA.DECODING_BACKEND, + ) + except Exception as e: + logger.info( + "Failed to load video from {} with error {}".format( + self._path_to_videos[index], e + ) + ) + # Select a random video if the current video was not able to access. + if video_container is None: + logger.warning( + "Failed to meta load video idx {} from {}; trial {}".format( + index, self._path_to_videos[index], i_try + ) + ) + if self.mode not in ["test"] and i_try > self._num_retries // 2: + # let's try another one + index = random.randint(0, len(self._path_to_videos) - 1) + continue + + # Decode video. Meta info is used to perform selective decoding. + frames = decoder.decode( + video_container, + sampling_rate, + self.cfg.DATA.NUM_FRAMES, + temporal_sample_index, + self.cfg.TEST.NUM_ENSEMBLE_VIEWS, + video_meta=self._video_meta[index], + target_fps=self.cfg.DATA.TARGET_FPS, + backend=self.cfg.DATA.DECODING_BACKEND, + max_spatial_scale=min_scale, + ) + + # If decoding failed (wrong format, video is too short, and etc), + # select another video. + if frames is None: + logger.warning( + "Failed to decode video idx {} from {}; trial {}".format( + index, self._path_to_videos[index], i_try + ) + ) + if self.mode not in ["test"] and i_try > self._num_retries // 2: + # let's try another one + index = random.randint(0, len(self._path_to_videos) - 1) + continue + + + label = self._labels[index] + + # Perform color normalization. + frames = utils.tensor_normalize( + frames, self.cfg.DATA.MEAN, self.cfg.DATA.STD + ) + + # T H W C -> C T H W. + frames = frames.permute(3, 0, 1, 2) + # Perform data augmentation. + frames = utils.spatial_sampling( + frames, + spatial_idx=spatial_sample_index, + min_scale=min_scale, + max_scale=max_scale, + crop_size=crop_size, + random_horizontal_flip=self.cfg.DATA.RANDOM_FLIP, + inverse_uniform_sampling=self.cfg.DATA.INV_UNIFORM_SAMPLE, + ) + + + if not self.cfg.MODEL.ARCH in ['vit']: + frames = utils.pack_pathway_output(self.cfg, frames) + else: + # Perform temporal sampling from the fast pathway. + frames = torch.index_select( + frames, + 1, + torch.linspace( + 0, frames.shape[1] - 1, self.cfg.DATA.NUM_FRAMES + + ).long(), + ) + + return frames, label, index, {} + else: + raise RuntimeError( + "Failed to fetch video after {} retries.".format( + self._num_retries + ) + ) + + def __len__(self): + """ + Returns: + (int): the number of videos in the dataset. + """ + return len(self._path_to_videos) diff --git a/TimeSformer/timesformer/datasets/loader.py b/TimeSformer/timesformer/datasets/loader.py new file mode 100755 index 0000000000000000000000000000000000000000..226e407f5ae3c6950aede4898ae69c79ef010f33 --- /dev/null +++ b/TimeSformer/timesformer/datasets/loader.py @@ -0,0 +1,134 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Data loader.""" + +import itertools +import numpy as np +import torch +from torch.utils.data._utils.collate import default_collate +from torch.utils.data.distributed import DistributedSampler +from torch.utils.data.sampler import RandomSampler + +from timesformer.datasets.multigrid_helper import ShortCycleBatchSampler + +from . import utils as utils +from .build import build_dataset + + +def detection_collate(batch): + """ + Collate function for detection task. Concatanate bboxes, labels and + metadata from different samples in the first dimension instead of + stacking them to have a batch-size dimension. + Args: + batch (tuple or list): data batch to collate. + Returns: + (tuple): collated detection data batch. + """ + inputs, labels, video_idx, extra_data = zip(*batch) + inputs, video_idx = default_collate(inputs), default_collate(video_idx) + labels = torch.tensor(np.concatenate(labels, axis=0)).float() + + collated_extra_data = {} + for key in extra_data[0].keys(): + data = [d[key] for d in extra_data] + if key == "boxes" or key == "ori_boxes": + # Append idx info to the bboxes before concatenating them. + bboxes = [ + np.concatenate( + [np.full((data[i].shape[0], 1), float(i)), data[i]], axis=1 + ) + for i in range(len(data)) + ] + bboxes = np.concatenate(bboxes, axis=0) + collated_extra_data[key] = torch.tensor(bboxes).float() + elif key == "metadata": + collated_extra_data[key] = torch.tensor( + list(itertools.chain(*data)) + ).view(-1, 2) + else: + collated_extra_data[key] = default_collate(data) + + return inputs, labels, video_idx, collated_extra_data + + +def construct_loader(cfg, split, is_precise_bn=False): + """ + Constructs the data loader for the given dataset. + Args: + cfg (CfgNode): configs. Details can be found in + slowfast/config/defaults.py + split (str): the split of the data loader. Options include `train`, + `val`, and `test`. + """ + assert split in ["train", "val", "test"] + if split in ["train"]: + dataset_name = cfg.TRAIN.DATASET + batch_size = int(cfg.TRAIN.BATCH_SIZE / max(1, cfg.NUM_GPUS)) + shuffle = True + drop_last = True + elif split in ["val"]: + dataset_name = cfg.TRAIN.DATASET + batch_size = int(cfg.TRAIN.BATCH_SIZE / max(1, cfg.NUM_GPUS)) + shuffle = False + drop_last = False + elif split in ["test"]: + dataset_name = cfg.TEST.DATASET + batch_size = int(cfg.TEST.BATCH_SIZE / max(1, cfg.NUM_GPUS)) + shuffle = False + drop_last = False + + # Construct the dataset + dataset = build_dataset(dataset_name, cfg, split) + + if cfg.MULTIGRID.SHORT_CYCLE and split in ["train"] and not is_precise_bn: + # Create a sampler for multi-process training + sampler = utils.create_sampler(dataset, shuffle, cfg) + batch_sampler = ShortCycleBatchSampler( + sampler, batch_size=batch_size, drop_last=drop_last, cfg=cfg + ) + # Create a loader + loader = torch.utils.data.DataLoader( + dataset, + batch_sampler=batch_sampler, + num_workers=cfg.DATA_LOADER.NUM_WORKERS, + pin_memory=cfg.DATA_LOADER.PIN_MEMORY, + worker_init_fn=utils.loader_worker_init_fn(dataset), + ) + else: + # Create a sampler for multi-process training + sampler = utils.create_sampler(dataset, shuffle, cfg) + # Create a loader + loader = torch.utils.data.DataLoader( + dataset, + batch_size=batch_size, + shuffle=(False if sampler else shuffle), + sampler=sampler, + num_workers=cfg.DATA_LOADER.NUM_WORKERS, + pin_memory=cfg.DATA_LOADER.PIN_MEMORY, + drop_last=drop_last, + collate_fn=detection_collate if cfg.DETECTION.ENABLE else None, + worker_init_fn=utils.loader_worker_init_fn(dataset), + ) + return loader + + +def shuffle_dataset(loader, cur_epoch): + """ " + Shuffles the data. + Args: + loader (loader): data loader to perform shuffle. + cur_epoch (int): number of the current epoch. + """ + sampler = ( + loader.batch_sampler.sampler + if isinstance(loader.batch_sampler, ShortCycleBatchSampler) + else loader.sampler + ) + assert isinstance( + sampler, (RandomSampler, DistributedSampler) + ), "Sampler type '{}' not supported".format(type(sampler)) + # RandomSampler handles shuffling automatically + if isinstance(sampler, DistributedSampler): + # DistributedSampler shuffles data based on epoch + sampler.set_epoch(cur_epoch) diff --git a/TimeSformer/timesformer/datasets/multigrid_helper.py b/TimeSformer/timesformer/datasets/multigrid_helper.py new file mode 100755 index 0000000000000000000000000000000000000000..de9ba0c23071560ed8baee5fd0b70cfcf5ab9306 --- /dev/null +++ b/TimeSformer/timesformer/datasets/multigrid_helper.py @@ -0,0 +1,78 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Helper functions for multigrid training.""" + +import numpy as np +from torch._six import int_classes as _int_classes +from torch.utils.data.sampler import Sampler + + +class ShortCycleBatchSampler(Sampler): + """ + Extend Sampler to support "short cycle" sampling. + See paper "A Multigrid Method for Efficiently Training Video Models", + Wu et al., 2019 (https://arxiv.org/abs/1912.00998) for details. + """ + + def __init__(self, sampler, batch_size, drop_last, cfg): + if not isinstance(sampler, Sampler): + raise ValueError( + "sampler should be an instance of " + "torch.utils.data.Sampler, but got sampler={}".format(sampler) + ) + if ( + not isinstance(batch_size, _int_classes) + or isinstance(batch_size, bool) + or batch_size <= 0 + ): + raise ValueError( + "batch_size should be a positive integer value, " + "but got batch_size={}".format(batch_size) + ) + if not isinstance(drop_last, bool): + raise ValueError( + "drop_last should be a boolean value, but got " + "drop_last={}".format(drop_last) + ) + self.sampler = sampler + self.drop_last = drop_last + + bs_factor = [ + int( + round( + ( + float(cfg.DATA.TRAIN_CROP_SIZE) + / (s * cfg.MULTIGRID.DEFAULT_S) + ) + ** 2 + ) + ) + for s in cfg.MULTIGRID.SHORT_CYCLE_FACTORS + ] + + self.batch_sizes = [ + batch_size * bs_factor[0], + batch_size * bs_factor[1], + batch_size, + ] + + def __iter__(self): + counter = 0 + batch_size = self.batch_sizes[0] + batch = [] + for idx in self.sampler: + batch.append((idx, counter % 3)) + if len(batch) == batch_size: + yield batch + counter += 1 + batch_size = self.batch_sizes[counter % 3] + batch = [] + if len(batch) > 0 and not self.drop_last: + yield batch + + def __len__(self): + avg_batch_size = sum(self.batch_sizes) / 3.0 + if self.drop_last: + return int(np.floor(len(self.sampler) / avg_batch_size)) + else: + return int(np.ceil(len(self.sampler) / avg_batch_size)) diff --git a/TimeSformer/timesformer/datasets/ssv2.py b/TimeSformer/timesformer/datasets/ssv2.py new file mode 100755 index 0000000000000000000000000000000000000000..172c14ca2f700effb6311131208ac557c2279341 --- /dev/null +++ b/TimeSformer/timesformer/datasets/ssv2.py @@ -0,0 +1,278 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import json +import numpy as np +import os +import random +from itertools import chain as chain +import torch +import torch.utils.data +from fvcore.common.file_io import PathManager + +import timesformer.utils.logging as logging + +from . import utils as utils +from .build import DATASET_REGISTRY + +logger = logging.get_logger(__name__) + + +@DATASET_REGISTRY.register() +class Ssv2(torch.utils.data.Dataset): + """ + Something-Something v2 (SSV2) video loader. Construct the SSV2 video loader, + then sample clips from the videos. For training and validation, a single + clip is randomly sampled from every video with random cropping, scaling, and + flipping. For testing, multiple clips are uniformaly sampled from every + video with uniform cropping. For uniform cropping, we take the left, center, + and right crop if the width is larger than height, or take top, center, and + bottom crop if the height is larger than the width. + """ + + def __init__(self, cfg, mode, num_retries=10): + """ + Load Something-Something V2 data (frame paths, labels, etc. ) to a given + Dataset object. The dataset could be downloaded from Something-Something + official website (https://20bn.com/datasets/something-something). + Please see datasets/DATASET.md for more information about the data format. + Args: + cfg (CfgNode): configs. + mode (string): Options includes `train`, `val`, or `test` mode. + For the train and val mode, the data loader will take data + from the train or val set, and sample one clip per video. + For the test mode, the data loader will take data from test set, + and sample multiple clips per video. + num_retries (int): number of retries for reading frames from disk. + """ + # Only support train, val, and test mode. + assert mode in [ + "train", + "val", + "test", + ], "Split '{}' not supported for Something-Something V2".format(mode) + self.mode = mode + self.cfg = cfg + + self._video_meta = {} + self._num_retries = num_retries + # For training or validation mode, one single clip is sampled from every + # video. For testing, NUM_ENSEMBLE_VIEWS clips are sampled from every + # video. For every clip, NUM_SPATIAL_CROPS is cropped spatially from + # the frames. + if self.mode in ["train", "val"]: + self._num_clips = 1 + elif self.mode in ["test"]: + self._num_clips = ( + cfg.TEST.NUM_ENSEMBLE_VIEWS * cfg.TEST.NUM_SPATIAL_CROPS + ) + + logger.info("Constructing Something-Something V2 {}...".format(mode)) + self._construct_loader() + + def _construct_loader(self): + """ + Construct the video loader. + """ + # Loading label names. + with PathManager.open( + os.path.join( + self.cfg.DATA.PATH_TO_DATA_DIR, + "something-something-v2-labels.json", + ), + "r", + ) as f: + label_dict = json.load(f) + + # Loading labels. + label_file = os.path.join( + self.cfg.DATA.PATH_TO_DATA_DIR, + "something-something-v2-{}.json".format( + "train" if self.mode == "train" else "validation" + ), + ) + with PathManager.open(label_file, "r") as f: + label_json = json.load(f) + + self._video_names = [] + self._labels = [] + for video in label_json: + video_name = video["id"] + template = video["template"] + template = template.replace("[", "") + template = template.replace("]", "") + label = int(label_dict[template]) + self._video_names.append(video_name) + self._labels.append(label) + + path_to_file = os.path.join( + self.cfg.DATA.PATH_TO_DATA_DIR, + "{}.csv".format("train" if self.mode == "train" else "val"), + ) + assert PathManager.exists(path_to_file), "{} dir not found".format( + path_to_file + ) + + self._path_to_videos, _ = utils.load_image_lists( + path_to_file, self.cfg.DATA.PATH_PREFIX + ) + + assert len(self._path_to_videos) == len(self._video_names), ( + len(self._path_to_videos), + len(self._video_names), + ) + + + # From dict to list. + new_paths, new_labels = [], [] + for index in range(len(self._video_names)): + if self._video_names[index] in self._path_to_videos: + new_paths.append(self._path_to_videos[self._video_names[index]]) + new_labels.append(self._labels[index]) + + self._labels = new_labels + self._path_to_videos = new_paths + + # Extend self when self._num_clips > 1 (during testing). + self._path_to_videos = list( + chain.from_iterable( + [[x] * self._num_clips for x in self._path_to_videos] + ) + ) + self._labels = list( + chain.from_iterable([[x] * self._num_clips for x in self._labels]) + ) + self._spatial_temporal_idx = list( + chain.from_iterable( + [ + range(self._num_clips) + for _ in range(len(self._path_to_videos)) + ] + ) + ) + logger.info( + "Something-Something V2 dataloader constructed " + " (size: {}) from {}".format( + len(self._path_to_videos), path_to_file + ) + ) + + def __getitem__(self, index): + """ + Given the video index, return the list of frames, label, and video + index if the video frames can be fetched. + Args: + index (int): the video index provided by the pytorch sampler. + Returns: + frames (tensor): the frames of sampled from the video. The dimension + is `channel` x `num frames` x `height` x `width`. + label (int): the label of the current video. + index (int): the index of the video. + """ + short_cycle_idx = None + # When short cycle is used, input index is a tupple. + if isinstance(index, tuple): + index, short_cycle_idx = index + + if self.mode in ["train", "val"]: #or self.cfg.MODEL.ARCH in ['resformer', 'vit']: + # -1 indicates random sampling. + spatial_sample_index = -1 + min_scale = self.cfg.DATA.TRAIN_JITTER_SCALES[0] + max_scale = self.cfg.DATA.TRAIN_JITTER_SCALES[1] + crop_size = self.cfg.DATA.TRAIN_CROP_SIZE + if short_cycle_idx in [0, 1]: + crop_size = int( + round( + self.cfg.MULTIGRID.SHORT_CYCLE_FACTORS[short_cycle_idx] + * self.cfg.MULTIGRID.DEFAULT_S + ) + ) + if self.cfg.MULTIGRID.DEFAULT_S > 0: + # Decreasing the scale is equivalent to using a larger "span" + # in a sampling grid. + min_scale = int( + round( + float(min_scale) + * crop_size + / self.cfg.MULTIGRID.DEFAULT_S + ) + ) + elif self.mode in ["test"]: + # spatial_sample_index is in [0, 1, 2]. Corresponding to left, + # center, or right if width is larger than height, and top, middle, + # or bottom if height is larger than width. + spatial_sample_index = ( + self._spatial_temporal_idx[index] + % self.cfg.TEST.NUM_SPATIAL_CROPS + ) + if self.cfg.TEST.NUM_SPATIAL_CROPS == 1: + spatial_sample_index = 1 + + min_scale, max_scale, crop_size = [self.cfg.DATA.TEST_CROP_SIZE] * 3 + # The testing is deterministic and no jitter should be performed. + # min_scale, max_scale, and crop_size are expect to be the same. + assert len({min_scale, max_scale, crop_size}) == 1 + else: + raise NotImplementedError( + "Does not support {} mode".format(self.mode) + ) + + label = self._labels[index] + + num_frames = self.cfg.DATA.NUM_FRAMES + video_length = len(self._path_to_videos[index]) + + + seg_size = float(video_length - 1) / num_frames + seq = [] + for i in range(num_frames): + start = int(np.round(seg_size * i)) + end = int(np.round(seg_size * (i + 1))) + if self.mode == "train": + seq.append(random.randint(start, end)) + else: + seq.append((start + end) // 2) + + frames = torch.as_tensor( + utils.retry_load_images( + [self._path_to_videos[index][frame] for frame in seq], + self._num_retries, + ) + ) + + # Perform color normalization. + frames = utils.tensor_normalize( + frames, self.cfg.DATA.MEAN, self.cfg.DATA.STD + ) + + # T H W C -> C T H W. + frames = frames.permute(3, 0, 1, 2) + frames = utils.spatial_sampling( + frames, + spatial_idx=spatial_sample_index, + min_scale=min_scale, + max_scale=max_scale, + crop_size=crop_size, + random_horizontal_flip=self.cfg.DATA.RANDOM_FLIP, + inverse_uniform_sampling=self.cfg.DATA.INV_UNIFORM_SAMPLE, + ) + #if not self.cfg.RESFORMER.ACTIVE: + if not self.cfg.MODEL.ARCH in ['vit']: + frames = utils.pack_pathway_output(self.cfg, frames) + else: + # Perform temporal sampling from the fast pathway. + frames = torch.index_select( + frames, + 1, + torch.linspace( + 0, frames.shape[1] - 1, self.cfg.DATA.NUM_FRAMES + + ).long(), + ) + return frames, label, index, {} + + def __len__(self): + """ + Returns: + (int): the number of videos in the dataset. + """ + return len(self._path_to_videos) diff --git a/TimeSformer/timesformer/datasets/transform.py b/TimeSformer/timesformer/datasets/transform.py new file mode 100755 index 0000000000000000000000000000000000000000..bd0e156317379405376ead99a12ecbd4042e2318 --- /dev/null +++ b/TimeSformer/timesformer/datasets/transform.py @@ -0,0 +1,459 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import math +import numpy as np +import torch + + +def random_short_side_scale_jitter( + images, min_size, max_size, boxes=None, inverse_uniform_sampling=False +): + """ + Perform a spatial short scale jittering on the given images and + corresponding boxes. + Args: + images (tensor): images to perform scale jitter. Dimension is + `num frames` x `channel` x `height` x `width`. + min_size (int): the minimal size to scale the frames. + max_size (int): the maximal size to scale the frames. + boxes (ndarray): optional. Corresponding boxes to images. + Dimension is `num boxes` x 4. + inverse_uniform_sampling (bool): if True, sample uniformly in + [1 / max_scale, 1 / min_scale] and take a reciprocal to get the + scale. If False, take a uniform sample from [min_scale, max_scale]. + Returns: + (tensor): the scaled images with dimension of + `num frames` x `channel` x `new height` x `new width`. + (ndarray or None): the scaled boxes with dimension of + `num boxes` x 4. + """ + if inverse_uniform_sampling: + size = int( + round(1.0 / np.random.uniform(1.0 / max_size, 1.0 / min_size)) + ) + else: + size = int(round(np.random.uniform(min_size, max_size))) + + height = images.shape[2] + width = images.shape[3] + if (width <= height and width == size) or ( + height <= width and height == size + ): + return images, boxes + new_width = size + new_height = size + if width < height: + new_height = int(math.floor((float(height) / width) * size)) + if boxes is not None: + boxes = boxes * float(new_height) / height + else: + new_width = int(math.floor((float(width) / height) * size)) + if boxes is not None: + boxes = boxes * float(new_width) / width + + return ( + torch.nn.functional.interpolate( + images, + size=(new_height, new_width), + mode="bilinear", + align_corners=False, + ), + boxes, + ) + + +def crop_boxes(boxes, x_offset, y_offset): + """ + Peform crop on the bounding boxes given the offsets. + Args: + boxes (ndarray or None): bounding boxes to peform crop. The dimension + is `num boxes` x 4. + x_offset (int): cropping offset in the x axis. + y_offset (int): cropping offset in the y axis. + Returns: + cropped_boxes (ndarray or None): the cropped boxes with dimension of + `num boxes` x 4. + """ + cropped_boxes = boxes.copy() + cropped_boxes[:, [0, 2]] = boxes[:, [0, 2]] - x_offset + cropped_boxes[:, [1, 3]] = boxes[:, [1, 3]] - y_offset + + return cropped_boxes + + +def random_crop(images, size, boxes=None): + """ + Perform random spatial crop on the given images and corresponding boxes. + Args: + images (tensor): images to perform random crop. The dimension is + `num frames` x `channel` x `height` x `width`. + size (int): the size of height and width to crop on the image. + boxes (ndarray or None): optional. Corresponding boxes to images. + Dimension is `num boxes` x 4. + Returns: + cropped (tensor): cropped images with dimension of + `num frames` x `channel` x `size` x `size`. + cropped_boxes (ndarray or None): the cropped boxes with dimension of + `num boxes` x 4. + """ + if images.shape[2] == size and images.shape[3] == size: + return images, None + height = images.shape[2] + width = images.shape[3] + y_offset = 0 + if height > size: + y_offset = int(np.random.randint(0, height - size)) + x_offset = 0 + if width > size: + x_offset = int(np.random.randint(0, width - size)) + cropped = images[ + :, :, y_offset : y_offset + size, x_offset : x_offset + size + ] + + cropped_boxes = ( + crop_boxes(boxes, x_offset, y_offset) if boxes is not None else None + ) + + return cropped, cropped_boxes + + +def horizontal_flip(prob, images, boxes=None): + """ + Perform horizontal flip on the given images and corresponding boxes. + Args: + prob (float): probility to flip the images. + images (tensor): images to perform horizontal flip, the dimension is + `num frames` x `channel` x `height` x `width`. + boxes (ndarray or None): optional. Corresponding boxes to images. + Dimension is `num boxes` x 4. + Returns: + images (tensor): images with dimension of + `num frames` x `channel` x `height` x `width`. + flipped_boxes (ndarray or None): the flipped boxes with dimension of + `num boxes` x 4. + """ + if boxes is None: + flipped_boxes = None + else: + flipped_boxes = boxes.copy() + + if np.random.uniform() < prob: + images = images.flip((-1)) + + width = images.shape[3] + if boxes is not None: + flipped_boxes[:, [0, 2]] = width - boxes[:, [2, 0]] - 1 + + return images, flipped_boxes + + +def uniform_crop(images, size, spatial_idx, boxes=None): + """ + Perform uniform spatial sampling on the images and corresponding boxes. + Args: + images (tensor): images to perform uniform crop. The dimension is + `num frames` x `channel` x `height` x `width`. + size (int): size of height and weight to crop the images. + spatial_idx (int): 0, 1, or 2 for left, center, and right crop if width + is larger than height. Or 0, 1, or 2 for top, center, and bottom + crop if height is larger than width. + boxes (ndarray or None): optional. Corresponding boxes to images. + Dimension is `num boxes` x 4. + Returns: + cropped (tensor): images with dimension of + `num frames` x `channel` x `size` x `size`. + cropped_boxes (ndarray or None): the cropped boxes with dimension of + `num boxes` x 4. + """ + assert spatial_idx in [0, 1, 2] + height = images.shape[2] + width = images.shape[3] + + y_offset = int(math.ceil((height - size) / 2)) + x_offset = int(math.ceil((width - size) / 2)) + + if height > width: + if spatial_idx == 0: + y_offset = 0 + elif spatial_idx == 2: + y_offset = height - size + else: + if spatial_idx == 0: + x_offset = 0 + elif spatial_idx == 2: + x_offset = width - size + cropped = images[ + :, :, y_offset : y_offset + size, x_offset : x_offset + size + ] + + cropped_boxes = ( + crop_boxes(boxes, x_offset, y_offset) if boxes is not None else None + ) + + return cropped, cropped_boxes + + +def uniform_crop_2crops(images, size, spatial_idx, boxes=None): + """ + Perform uniform spatial sampling on the images and corresponding boxes. + Args: + images (tensor): images to perform uniform crop. The dimension is + `num frames` x `channel` x `height` x `width`. + size (int): size of height and weight to crop the images. + spatial_idx (int): 0, 1, or 2 for left, center, and right crop if width + is larger than height. Or 0, 1, or 2 for top, center, and bottom + crop if height is larger than width. + boxes (ndarray or None): optional. Corresponding boxes to images. + Dimension is `num boxes` x 4. + Returns: + cropped (tensor): images with dimension of + `num frames` x `channel` x `size` x `size`. + cropped_boxes (ndarray or None): the cropped boxes with dimension of + `num boxes` x 4. + """ + assert spatial_idx in [0, 1, 2] + height = images.shape[2] + width = images.shape[3] + + + if height > width: + x_offset = 0 + if height > size * 2: + if spatial_idx == 0: + y_offset = int((height - size * 2) // 2) + elif spatial_idx == 1: + y_offset = int(height - size - ((height - size * 2) // 2)) + else: + if spatial_idx == 0: + y_offset = 0 + elif spatial_idx == 1: + y_offset = height - size + else: + y_offset = 0 + if width > size * 2: + if spatial_idx == 0: + x_offset = int((width - size * 2) // 2) + elif spatial_idx == 1: + x_offset = int(width - size - ((width - size * 2) // 2)) + else: + if spatial_idx == 0: + x_offset = 0 + elif spatial_idx == 1: + x_offset = width - size + + cropped = images[ + :, :, y_offset : y_offset + size, x_offset : x_offset + size + ] + + cropped_boxes = ( + crop_boxes(boxes, x_offset, y_offset) if boxes is not None else None + ) + + return cropped, cropped_boxes + +def clip_boxes_to_image(boxes, height, width): + """ + Clip an array of boxes to an image with the given height and width. + Args: + boxes (ndarray): bounding boxes to perform clipping. + Dimension is `num boxes` x 4. + height (int): given image height. + width (int): given image width. + Returns: + clipped_boxes (ndarray): the clipped boxes with dimension of + `num boxes` x 4. + """ + clipped_boxes = boxes.copy() + clipped_boxes[:, [0, 2]] = np.minimum( + width - 1.0, np.maximum(0.0, boxes[:, [0, 2]]) + ) + clipped_boxes[:, [1, 3]] = np.minimum( + height - 1.0, np.maximum(0.0, boxes[:, [1, 3]]) + ) + return clipped_boxes + + +def blend(images1, images2, alpha): + """ + Blend two images with a given weight alpha. + Args: + images1 (tensor): the first images to be blended, the dimension is + `num frames` x `channel` x `height` x `width`. + images2 (tensor): the second images to be blended, the dimension is + `num frames` x `channel` x `height` x `width`. + alpha (float): the blending weight. + Returns: + (tensor): blended images, the dimension is + `num frames` x `channel` x `height` x `width`. + """ + return images1 * alpha + images2 * (1 - alpha) + + +def grayscale(images): + """ + Get the grayscale for the input images. The channels of images should be + in order BGR. + Args: + images (tensor): the input images for getting grayscale. Dimension is + `num frames` x `channel` x `height` x `width`. + Returns: + img_gray (tensor): blended images, the dimension is + `num frames` x `channel` x `height` x `width`. + """ + # R -> 0.299, G -> 0.587, B -> 0.114. + img_gray = torch.tensor(images) + gray_channel = ( + 0.299 * images[:, 2] + 0.587 * images[:, 1] + 0.114 * images[:, 0] + ) + img_gray[:, 0] = gray_channel + img_gray[:, 1] = gray_channel + img_gray[:, 2] = gray_channel + return img_gray + + +def color_jitter(images, img_brightness=0, img_contrast=0, img_saturation=0): + """ + Perfrom a color jittering on the input images. The channels of images + should be in order BGR. + Args: + images (tensor): images to perform color jitter. Dimension is + `num frames` x `channel` x `height` x `width`. + img_brightness (float): jitter ratio for brightness. + img_contrast (float): jitter ratio for contrast. + img_saturation (float): jitter ratio for saturation. + Returns: + images (tensor): the jittered images, the dimension is + `num frames` x `channel` x `height` x `width`. + """ + + jitter = [] + if img_brightness != 0: + jitter.append("brightness") + if img_contrast != 0: + jitter.append("contrast") + if img_saturation != 0: + jitter.append("saturation") + + if len(jitter) > 0: + order = np.random.permutation(np.arange(len(jitter))) + for idx in range(0, len(jitter)): + if jitter[order[idx]] == "brightness": + images = brightness_jitter(img_brightness, images) + elif jitter[order[idx]] == "contrast": + images = contrast_jitter(img_contrast, images) + elif jitter[order[idx]] == "saturation": + images = saturation_jitter(img_saturation, images) + return images + + +def brightness_jitter(var, images): + """ + Perfrom brightness jittering on the input images. The channels of images + should be in order BGR. + Args: + var (float): jitter ratio for brightness. + images (tensor): images to perform color jitter. Dimension is + `num frames` x `channel` x `height` x `width`. + Returns: + images (tensor): the jittered images, the dimension is + `num frames` x `channel` x `height` x `width`. + """ + alpha = 1.0 + np.random.uniform(-var, var) + + img_bright = torch.zeros(images.shape) + images = blend(images, img_bright, alpha) + return images + + +def contrast_jitter(var, images): + """ + Perfrom contrast jittering on the input images. The channels of images + should be in order BGR. + Args: + var (float): jitter ratio for contrast. + images (tensor): images to perform color jitter. Dimension is + `num frames` x `channel` x `height` x `width`. + Returns: + images (tensor): the jittered images, the dimension is + `num frames` x `channel` x `height` x `width`. + """ + alpha = 1.0 + np.random.uniform(-var, var) + + img_gray = grayscale(images) + img_gray[:] = torch.mean(img_gray, dim=(1, 2, 3), keepdim=True) + images = blend(images, img_gray, alpha) + return images + + +def saturation_jitter(var, images): + """ + Perfrom saturation jittering on the input images. The channels of images + should be in order BGR. + Args: + var (float): jitter ratio for saturation. + images (tensor): images to perform color jitter. Dimension is + `num frames` x `channel` x `height` x `width`. + Returns: + images (tensor): the jittered images, the dimension is + `num frames` x `channel` x `height` x `width`. + """ + alpha = 1.0 + np.random.uniform(-var, var) + img_gray = grayscale(images) + images = blend(images, img_gray, alpha) + + return images + + +def lighting_jitter(images, alphastd, eigval, eigvec): + """ + Perform AlexNet-style PCA jitter on the given images. + Args: + images (tensor): images to perform lighting jitter. Dimension is + `num frames` x `channel` x `height` x `width`. + alphastd (float): jitter ratio for PCA jitter. + eigval (list): eigenvalues for PCA jitter. + eigvec (list[list]): eigenvectors for PCA jitter. + Returns: + out_images (tensor): the jittered images, the dimension is + `num frames` x `channel` x `height` x `width`. + """ + if alphastd == 0: + return images + # generate alpha1, alpha2, alpha3. + alpha = np.random.normal(0, alphastd, size=(1, 3)) + eig_vec = np.array(eigvec) + eig_val = np.reshape(eigval, (1, 3)) + rgb = np.sum( + eig_vec * np.repeat(alpha, 3, axis=0) * np.repeat(eig_val, 3, axis=0), + axis=1, + ) + out_images = torch.zeros_like(images) + for idx in range(images.shape[1]): + out_images[:, idx] = images[:, idx] + rgb[2 - idx] + + return out_images + + +def color_normalization(images, mean, stddev): + """ + Perform color nomration on the given images. + Args: + images (tensor): images to perform color normalization. Dimension is + `num frames` x `channel` x `height` x `width`. + mean (list): mean values for normalization. + stddev (list): standard deviations for normalization. + + Returns: + out_images (tensor): the noramlized images, the dimension is + `num frames` x `channel` x `height` x `width`. + """ + assert len(mean) == images.shape[1], "channel mean not computed properly" + assert ( + len(stddev) == images.shape[1] + ), "channel stddev not computed properly" + + out_images = torch.zeros_like(images) + for idx in range(len(mean)): + out_images[:, idx] = (images[:, idx] - mean[idx]) / stddev[idx] + + return out_images diff --git a/TimeSformer/timesformer/datasets/utils.py b/TimeSformer/timesformer/datasets/utils.py new file mode 100755 index 0000000000000000000000000000000000000000..35a4ad645f3c5c199d316785ee9082ac48b47589 --- /dev/null +++ b/TimeSformer/timesformer/datasets/utils.py @@ -0,0 +1,380 @@ +#!/usr/bin/env python3 + +import logging +import numpy as np +import os +import random +import time +from collections import defaultdict +import cv2 +import torch +from fvcore.common.file_io import PathManager +from torch.utils.data.distributed import DistributedSampler + +from . import transform as transform + +logger = logging.getLogger(__name__) + + +def retry_load_images(image_paths, retry=10, backend="pytorch"): + """ + This function is to load images with support of retrying for failed load. + + Args: + image_paths (list): paths of images needed to be loaded. + retry (int, optional): maximum time of loading retrying. Defaults to 10. + backend (str): `pytorch` or `cv2`. + + Returns: + imgs (list): list of loaded images. + """ + for i in range(retry): + imgs = [] + for image_path in image_paths: + with PathManager.open(image_path, "rb") as f: + img_str = np.frombuffer(f.read(), np.uint8) + img = cv2.imdecode(img_str, flags=cv2.IMREAD_COLOR) + imgs.append(img) + + if all(img is not None for img in imgs): + if backend == "pytorch": + imgs = torch.as_tensor(np.stack(imgs)) + return imgs + else: + logger.warn("Reading failed. Will retry.") + time.sleep(1.0) + if i == retry - 1: + raise Exception("Failed to load images {}".format(image_paths)) + + +def get_sequence(center_idx, half_len, sample_rate, num_frames): + """ + Sample frames among the corresponding clip. + + Args: + center_idx (int): center frame idx for current clip + half_len (int): half of the clip length + sample_rate (int): sampling rate for sampling frames inside of the clip + num_frames (int): number of expected sampled frames + + Returns: + seq (list): list of indexes of sampled frames in this clip. + """ + seq = list(range(center_idx - half_len, center_idx + half_len, sample_rate)) + + for seq_idx in range(len(seq)): + if seq[seq_idx] < 0: + seq[seq_idx] = 0 + elif seq[seq_idx] >= num_frames: + seq[seq_idx] = num_frames - 1 + return seq + + +def pack_pathway_output(cfg, frames): + """ + Prepare output as a list of tensors. Each tensor corresponding to a + unique pathway. + Args: + frames (tensor): frames of images sampled from the video. The + dimension is `channel` x `num frames` x `height` x `width`. + Returns: + frame_list (list): list of tensors with the dimension of + `channel` x `num frames` x `height` x `width`. + """ + if cfg.DATA.REVERSE_INPUT_CHANNEL: + frames = frames[[2, 1, 0], :, :, :] + if cfg.MODEL.ARCH in cfg.MODEL.SINGLE_PATHWAY_ARCH: + frame_list = [frames] + elif cfg.MODEL.ARCH in cfg.MODEL.MULTI_PATHWAY_ARCH: + fast_pathway = frames + # Perform temporal sampling from the fast pathway. + slow_pathway = torch.index_select( + frames, + 1, + torch.linspace( + 0, frames.shape[1] - 1, frames.shape[1] // cfg.SLOWFAST.ALPHA + ).long(), + ) + frame_list = [slow_pathway, fast_pathway] + else: + raise NotImplementedError( + "Model arch {} is not in {}".format( + cfg.MODEL.ARCH, + cfg.MODEL.SINGLE_PATHWAY_ARCH + cfg.MODEL.MULTI_PATHWAY_ARCH, + ) + ) + return frame_list + + +def spatial_sampling( + frames, + spatial_idx=-1, + min_scale=256, + max_scale=320, + crop_size=224, + random_horizontal_flip=True, + inverse_uniform_sampling=False, +): + """ + Perform spatial sampling on the given video frames. If spatial_idx is + -1, perform random scale, random crop, and random flip on the given + frames. If spatial_idx is 0, 1, or 2, perform spatial uniform sampling + with the given spatial_idx. + Args: + frames (tensor): frames of images sampled from the video. The + dimension is `num frames` x `height` x `width` x `channel`. + spatial_idx (int): if -1, perform random spatial sampling. If 0, 1, + or 2, perform left, center, right crop if width is larger than + height, and perform top, center, buttom crop if height is larger + than width. + min_scale (int): the minimal size of scaling. + max_scale (int): the maximal size of scaling. + crop_size (int): the size of height and width used to crop the + frames. + inverse_uniform_sampling (bool): if True, sample uniformly in + [1 / max_scale, 1 / min_scale] and take a reciprocal to get the + scale. If False, take a uniform sample from [min_scale, + max_scale]. + Returns: + frames (tensor): spatially sampled frames. + """ + assert spatial_idx in [-1, 0, 1, 2] + if spatial_idx == -1: + frames, _ = transform.random_short_side_scale_jitter( + images=frames, + min_size=min_scale, + max_size=max_scale, + inverse_uniform_sampling=inverse_uniform_sampling, + ) + frames, _ = transform.random_crop(frames, crop_size) + if random_horizontal_flip: + frames, _ = transform.horizontal_flip(0.5, frames) + else: + # The testing is deterministic and no jitter should be performed. + # min_scale, max_scale, and crop_size are expect to be the same. + #assert len({min_scale, max_scale, crop_size}) == 1 + frames, _ = transform.random_short_side_scale_jitter( + frames, min_scale, max_scale + ) + frames, _ = transform.uniform_crop(frames, crop_size, spatial_idx) + return frames + +def spatial_sampling_2crops( + frames, + spatial_idx=-1, + min_scale=256, + max_scale=320, + crop_size=224, + random_horizontal_flip=True, + inverse_uniform_sampling=False, +): + """ + Perform spatial sampling on the given video frames. If spatial_idx is + -1, perform random scale, random crop, and random flip on the given + frames. If spatial_idx is 0, 1, or 2, perform spatial uniform sampling + with the given spatial_idx. + Args: + frames (tensor): frames of images sampled from the video. The + dimension is `num frames` x `height` x `width` x `channel`. + spatial_idx (int): if -1, perform random spatial sampling. If 0, 1, + or 2, perform left, center, right crop if width is larger than + height, and perform top, center, buttom crop if height is larger + than width. + min_scale (int): the minimal size of scaling. + max_scale (int): the maximal size of scaling. + crop_size (int): the size of height and width used to crop the + frames. + inverse_uniform_sampling (bool): if True, sample uniformly in + [1 / max_scale, 1 / min_scale] and take a reciprocal to get the + scale. If False, take a uniform sample from [min_scale, + max_scale]. + Returns: + frames (tensor): spatially sampled frames. + """ + assert spatial_idx in [-1, 0, 1, 2] + if spatial_idx == -1: + frames, _ = transform.random_short_side_scale_jitter( + images=frames, + min_size=min_scale, + max_size=max_scale, + inverse_uniform_sampling=inverse_uniform_sampling, + ) + frames, _ = transform.random_crop(frames, crop_size) + if random_horizontal_flip: + frames, _ = transform.horizontal_flip(0.5, frames) + else: + # The testing is deterministic and no jitter should be performed. + # min_scale, max_scale, and crop_size are expect to be the same. + #assert len({min_scale, max_scale, crop_size}) == 1 + frames, _ = transform.random_short_side_scale_jitter( + frames, min_scale, max_scale + ) + frames, _ = transform.uniform_crop_2crops(frames, crop_size, spatial_idx) + return frames + + +def as_binary_vector(labels, num_classes): + """ + Construct binary label vector given a list of label indices. + Args: + labels (list): The input label list. + num_classes (int): Number of classes of the label vector. + Returns: + labels (numpy array): the resulting binary vector. + """ + label_arr = np.zeros((num_classes,)) + + for lbl in set(labels): + label_arr[lbl] = 1.0 + return label_arr + + +def aggregate_labels(label_list): + """ + Join a list of label list. + Args: + labels (list): The input label list. + Returns: + labels (list): The joint list of all lists in input. + """ + all_labels = [] + for labels in label_list: + for l in labels: + all_labels.append(l) + return list(set(all_labels)) + + +def convert_to_video_level_labels(labels): + """ + Aggregate annotations from all frames of a video to form video-level labels. + Args: + labels (list): The input label list. + Returns: + labels (list): Same as input, but with each label replaced by + a video-level one. + """ + for video_id in range(len(labels)): + video_level_labels = aggregate_labels(labels[video_id]) + for i in range(len(labels[video_id])): + labels[video_id][i] = video_level_labels + return labels + + +def load_image_lists(frame_list_file, prefix="", return_list=False): + """ + Load image paths and labels from a "frame list". + Each line of the frame list contains: + `original_vido_id video_id frame_id path labels` + Args: + frame_list_file (string): path to the frame list. + prefix (str): the prefix for the path. + return_list (bool): if True, return a list. If False, return a dict. + Returns: + image_paths (list or dict): list of list containing path to each frame. + If return_list is False, then return in a dict form. + labels (list or dict): list of list containing label of each frame. + If return_list is False, then return in a dict form. + """ + image_paths = defaultdict(list) + labels = defaultdict(list) + with PathManager.open(frame_list_file, "r") as f: + assert f.readline().startswith("original_vido_id") + for line in f: + row = line.split() + # original_vido_id video_id frame_id path labels + assert len(row) == 5 + video_name = row[0] + if prefix == "": + path = row[3] + else: + path = os.path.join(prefix, row[3]) + image_paths[video_name].append(path) + frame_labels = row[-1].replace('"', "") + if frame_labels != "": + labels[video_name].append( + [int(x) for x in frame_labels.split(",")] + ) + else: + labels[video_name].append([]) + + if return_list: + keys = image_paths.keys() + image_paths = [image_paths[key] for key in keys] + labels = [labels[key] for key in keys] + return image_paths, labels + return dict(image_paths), dict(labels) + + +def tensor_normalize(tensor, mean, std): + """ + Normalize a given tensor by subtracting the mean and dividing the std. + Args: + tensor (tensor): tensor to normalize. + mean (tensor or list): mean value to subtract. + std (tensor or list): std to divide. + """ + if tensor.dtype == torch.uint8: + tensor = tensor.float() + tensor = tensor / 255.0 + if type(mean) == list: + mean = torch.tensor(mean) + if type(std) == list: + std = torch.tensor(std) + tensor = tensor - mean + tensor = tensor / std + return tensor + + +def get_random_sampling_rate(long_cycle_sampling_rate, sampling_rate): + """ + When multigrid training uses a fewer number of frames, we randomly + increase the sampling rate so that some clips cover the original span. + """ + if long_cycle_sampling_rate > 0: + assert long_cycle_sampling_rate >= sampling_rate + return random.randint(sampling_rate, long_cycle_sampling_rate) + else: + return sampling_rate + + +def revert_tensor_normalize(tensor, mean, std): + """ + Revert normalization for a given tensor by multiplying by the std and adding the mean. + Args: + tensor (tensor): tensor to revert normalization. + mean (tensor or list): mean value to add. + std (tensor or list): std to multiply. + """ + if type(mean) == list: + mean = torch.tensor(mean) + if type(std) == list: + std = torch.tensor(std) + tensor = tensor * std + tensor = tensor + mean + return tensor + + +def create_sampler(dataset, shuffle, cfg): + """ + Create sampler for the given dataset. + Args: + dataset (torch.utils.data.Dataset): the given dataset. + shuffle (bool): set to ``True`` to have the data reshuffled + at every epoch. + cfg (CfgNode): configs. Details can be found in + slowfast/config/defaults.py + Returns: + sampler (Sampler): the created sampler. + """ + sampler = DistributedSampler(dataset) if cfg.NUM_GPUS > 1 else None + + return sampler + + +def loader_worker_init_fn(dataset): + """ + Create init function passed to pytorch data loader. + Args: + dataset (torch.utils.data.Dataset): the given dataset. + """ + return None diff --git a/TimeSformer/timesformer/datasets/video_container.py b/TimeSformer/timesformer/datasets/video_container.py new file mode 100755 index 0000000000000000000000000000000000000000..dbe5af720369391c3e9f0eca04bd34dc91a9b1da --- /dev/null +++ b/TimeSformer/timesformer/datasets/video_container.py @@ -0,0 +1,31 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import av + + +def get_video_container(path_to_vid, multi_thread_decode=False, backend="pyav"): + """ + Given the path to the video, return the pyav video container. + Args: + path_to_vid (str): path to the video. + multi_thread_decode (bool): if True, perform multi-thread decoding. + backend (str): decoder backend, options include `pyav` and + `torchvision`, default is `pyav`. + Returns: + container (container): video container. + """ + if backend == "torchvision": + with open(path_to_vid, "rb") as fp: + container = fp.read() + return container + elif backend == "pyav": + #try: + container = av.open(path_to_vid) + if multi_thread_decode: + # Enable multiple threads for decoding. + container.streams.video[0].thread_type = "AUTO" + #except: + # container = None + return container + else: + raise NotImplementedError("Unknown backend {}".format(backend)) diff --git a/TimeSformer/timesformer/models/__init__.py b/TimeSformer/timesformer/models/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..88e5842a59fe86d22f09f4e4bd0ddb1fa95d3c56 --- /dev/null +++ b/TimeSformer/timesformer/models/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +from .build import MODEL_REGISTRY, build_model # noqa +from .custom_video_model_builder import * # noqa +from .video_model_builder import ResNet, SlowFast # noqa diff --git a/TimeSformer/timesformer/models/batchnorm_helper.py b/TimeSformer/timesformer/models/batchnorm_helper.py new file mode 100755 index 0000000000000000000000000000000000000000..fdb29acefa1192b678172936fe21021c863f16b9 --- /dev/null +++ b/TimeSformer/timesformer/models/batchnorm_helper.py @@ -0,0 +1,217 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""BatchNorm (BN) utility functions and custom batch-size BN implementations""" + +from functools import partial +import torch +import torch.distributed as dist +import torch.nn as nn +from torch.autograd.function import Function + +import timesformer.utils.distributed as du + + +def get_norm(cfg): + """ + Args: + cfg (CfgNode): model building configs, details are in the comments of + the config file. + Returns: + nn.Module: the normalization layer. + """ + if cfg.BN.NORM_TYPE == "batchnorm": + return nn.BatchNorm3d + elif cfg.BN.NORM_TYPE == "sub_batchnorm": + return partial(SubBatchNorm3d, num_splits=cfg.BN.NUM_SPLITS) + elif cfg.BN.NORM_TYPE == "sync_batchnorm": + return partial( + NaiveSyncBatchNorm3d, num_sync_devices=cfg.BN.NUM_SYNC_DEVICES + ) + else: + raise NotImplementedError( + "Norm type {} is not supported".format(cfg.BN.NORM_TYPE) + ) + + +class SubBatchNorm3d(nn.Module): + """ + The standard BN layer computes stats across all examples in a GPU. In some + cases it is desirable to compute stats across only a subset of examples + (e.g., in multigrid training https://arxiv.org/abs/1912.00998). + SubBatchNorm3d splits the batch dimension into N splits, and run BN on + each of them separately (so that the stats are computed on each subset of + examples (1/N of batch) independently. During evaluation, it aggregates + the stats from all splits into one BN. + """ + + def __init__(self, num_splits, **args): + """ + Args: + num_splits (int): number of splits. + args (list): other arguments. + """ + super(SubBatchNorm3d, self).__init__() + self.num_splits = num_splits + num_features = args["num_features"] + # Keep only one set of weight and bias. + if args.get("affine", True): + self.affine = True + args["affine"] = False + self.weight = torch.nn.Parameter(torch.ones(num_features)) + self.bias = torch.nn.Parameter(torch.zeros(num_features)) + else: + self.affine = False + self.bn = nn.BatchNorm3d(**args) + args["num_features"] = num_features * num_splits + self.split_bn = nn.BatchNorm3d(**args) + + def _get_aggregated_mean_std(self, means, stds, n): + """ + Calculate the aggregated mean and stds. + Args: + means (tensor): mean values. + stds (tensor): standard deviations. + n (int): number of sets of means and stds. + """ + mean = means.view(n, -1).sum(0) / n + std = ( + stds.view(n, -1).sum(0) / n + + ((means.view(n, -1) - mean) ** 2).view(n, -1).sum(0) / n + ) + return mean.detach(), std.detach() + + def aggregate_stats(self): + """ + Synchronize running_mean, and running_var. Call this before eval. + """ + if self.split_bn.track_running_stats: + ( + self.bn.running_mean.data, + self.bn.running_var.data, + ) = self._get_aggregated_mean_std( + self.split_bn.running_mean, + self.split_bn.running_var, + self.num_splits, + ) + + def forward(self, x): + if self.training: + n, c, t, h, w = x.shape + x = x.view(n // self.num_splits, c * self.num_splits, t, h, w) + x = self.split_bn(x) + x = x.view(n, c, t, h, w) + else: + x = self.bn(x) + if self.affine: + x = x * self.weight.view((-1, 1, 1, 1)) + x = x + self.bias.view((-1, 1, 1, 1)) + return x + + +class GroupGather(Function): + """ + GroupGather performs all gather on each of the local process/ GPU groups. + """ + + @staticmethod + def forward(ctx, input, num_sync_devices, num_groups): + """ + Perform forwarding, gathering the stats across different process/ GPU + group. + """ + ctx.num_sync_devices = num_sync_devices + ctx.num_groups = num_groups + + input_list = [ + torch.zeros_like(input) for k in range(du.get_local_size()) + ] + dist.all_gather( + input_list, input, async_op=False, group=du._LOCAL_PROCESS_GROUP + ) + + inputs = torch.stack(input_list, dim=0) + if num_groups > 1: + rank = du.get_local_rank() + group_idx = rank // num_sync_devices + inputs = inputs[ + group_idx + * num_sync_devices : (group_idx + 1) + * num_sync_devices + ] + inputs = torch.sum(inputs, dim=0) + return inputs + + @staticmethod + def backward(ctx, grad_output): + """ + Perform backwarding, gathering the gradients across different process/ GPU + group. + """ + grad_output_list = [ + torch.zeros_like(grad_output) for k in range(du.get_local_size()) + ] + dist.all_gather( + grad_output_list, + grad_output, + async_op=False, + group=du._LOCAL_PROCESS_GROUP, + ) + + grads = torch.stack(grad_output_list, dim=0) + if ctx.num_groups > 1: + rank = du.get_local_rank() + group_idx = rank // ctx.num_sync_devices + grads = grads[ + group_idx + * ctx.num_sync_devices : (group_idx + 1) + * ctx.num_sync_devices + ] + grads = torch.sum(grads, dim=0) + return grads, None, None + + +class NaiveSyncBatchNorm3d(nn.BatchNorm3d): + def __init__(self, num_sync_devices, **args): + """ + Naive version of Synchronized 3D BatchNorm. + Args: + num_sync_devices (int): number of device to sync. + args (list): other arguments. + """ + self.num_sync_devices = num_sync_devices + if self.num_sync_devices > 0: + assert du.get_local_size() % self.num_sync_devices == 0, ( + du.get_local_size(), + self.num_sync_devices, + ) + self.num_groups = du.get_local_size() // self.num_sync_devices + else: + self.num_sync_devices = du.get_local_size() + self.num_groups = 1 + super(NaiveSyncBatchNorm3d, self).__init__(**args) + + def forward(self, input): + if du.get_local_size() == 1 or not self.training: + return super().forward(input) + + assert input.shape[0] > 0, "SyncBatchNorm does not support empty inputs" + C = input.shape[1] + mean = torch.mean(input, dim=[0, 2, 3, 4]) + meansqr = torch.mean(input * input, dim=[0, 2, 3, 4]) + + vec = torch.cat([mean, meansqr], dim=0) + vec = GroupGather.apply(vec, self.num_sync_devices, self.num_groups) * ( + 1.0 / self.num_sync_devices + ) + + mean, meansqr = torch.split(vec, C) + var = meansqr - mean * mean + self.running_mean += self.momentum * (mean.detach() - self.running_mean) + self.running_var += self.momentum * (var.detach() - self.running_var) + + invstd = torch.rsqrt(var + self.eps) + scale = self.weight * invstd + bias = self.bias - mean * scale + scale = scale.reshape(1, -1, 1, 1, 1) + bias = bias.reshape(1, -1, 1, 1, 1) + return input * scale + bias diff --git a/TimeSformer/timesformer/models/build.py b/TimeSformer/timesformer/models/build.py new file mode 100755 index 0000000000000000000000000000000000000000..8e9e9db8a3748a012cd3ed6c603ebdd365bee448 --- /dev/null +++ b/TimeSformer/timesformer/models/build.py @@ -0,0 +1,54 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Model construction functions.""" + +import torch +from fvcore.common.registry import Registry + +MODEL_REGISTRY = Registry("MODEL") +MODEL_REGISTRY.__doc__ = """ +Registry for video model. + +The registered object will be called with `obj(cfg)`. +The call should return a `torch.nn.Module` object. +""" + + +def build_model(cfg, gpu_id=None): + """ + Builds the video model. + Args: + cfg (configs): configs that contains the hyper-parameters to build the + backbone. Details can be seen in slowfast/config/defaults.py. + gpu_id (Optional[int]): specify the gpu index to build model. + """ + if torch.cuda.is_available(): + assert ( + cfg.NUM_GPUS <= torch.cuda.device_count() + ), "Cannot use more GPU devices than available" + else: + assert ( + cfg.NUM_GPUS == 0 + ), "Cuda is not available. Please set `NUM_GPUS: 0 for running on CPUs." + + # Construct the model + name = cfg.MODEL.MODEL_NAME + model = MODEL_REGISTRY.get(name)(cfg) + + if cfg.NUM_GPUS: + if gpu_id is None: + # Determine the GPU used by the current process + cur_device = torch.cuda.current_device() + else: + cur_device = gpu_id + # Transfer the model to the current GPU device + model = model.cuda(device=cur_device) + + + # Use multi-process data parallel model in the multi-gpu setting + if cfg.NUM_GPUS > 1: + # Make model replica operate on the current device + model = torch.nn.parallel.DistributedDataParallel( + module=model, device_ids=[cur_device], output_device=cur_device + ) + return model diff --git a/TimeSformer/timesformer/models/conv2d_same.py b/TimeSformer/timesformer/models/conv2d_same.py new file mode 100755 index 0000000000000000000000000000000000000000..0a63bab902324f256b8e5ccdc19511ee81c6a7d5 --- /dev/null +++ b/TimeSformer/timesformer/models/conv2d_same.py @@ -0,0 +1,74 @@ +# Copyright 2020 Ross Wightman +# Conv2d w/ Same Padding + +import torch +import torch.nn as nn +import torch.nn.functional as F +from typing import Tuple, Optional + +import math +from typing import List, Tuple +#from .padding import pad_same, get_padding_value + +# Dynamically pad input x with 'SAME' padding for conv with specified args +def pad_same(x, k: List[int], s: List[int], d: List[int] = (1, 1), value: float = 0): + ih, iw = x.size()[-2:] + pad_h, pad_w = get_same_padding(ih, k[0], s[0], d[0]), get_same_padding(iw, k[1], s[1], d[1]) + if pad_h > 0 or pad_w > 0: + x = F.pad(x, [pad_w // 2, pad_w - pad_w // 2, pad_h // 2, pad_h - pad_h // 2], value=value) + return x + +# Calculate asymmetric TensorFlow-like 'SAME' padding for a convolution +def get_same_padding(x: int, k: int, s: int, d: int): + return max((math.ceil(x / s) - 1) * s + (k - 1) * d + 1 - x, 0) + +def get_padding_value(padding, kernel_size, **kwargs) -> Tuple[Tuple, bool]: + dynamic = False + if isinstance(padding, str): + # for any string padding, the padding will be calculated for you, one of three ways + padding = padding.lower() + if padding == 'same': + # TF compatible 'SAME' padding, has a performance and GPU memory allocation impact + if is_static_pad(kernel_size, **kwargs): + # static case, no extra overhead + padding = get_padding(kernel_size, **kwargs) + else: + # dynamic 'SAME' padding, has runtime/GPU memory overhead + padding = 0 + dynamic = True + elif padding == 'valid': + # 'VALID' padding, same as padding=0 + padding = 0 + else: + # Default to PyTorch style 'same'-ish symmetric padding + padding = get_padding(kernel_size, **kwargs) + return padding, dynamic + +def conv2d_same( + x, weight: torch.Tensor, bias: Optional[torch.Tensor] = None, stride: Tuple[int, int] = (1, 1), + padding: Tuple[int, int] = (0, 0), dilation: Tuple[int, int] = (1, 1), groups: int = 1): + x = pad_same(x, weight.shape[-2:], stride, dilation) + return F.conv2d(x, weight, bias, stride, (0, 0), dilation, groups) + + +class Conv2dSame(nn.Conv2d): + """ Tensorflow like 'SAME' convolution wrapper for 2D convolutions + """ + + def __init__(self, in_channels, out_channels, kernel_size, stride=1, + padding=0, dilation=1, groups=1, bias=True): + super(Conv2dSame, self).__init__( + in_channels, out_channels, kernel_size, stride, 0, dilation, groups, bias) + + def forward(self, x): + return conv2d_same(x, self.weight, self.bias, self.stride, self.padding, self.dilation, self.groups) + + +def create_conv2d_pad(in_chs, out_chs, kernel_size, **kwargs): + padding = kwargs.pop('padding', '') + kwargs.setdefault('bias', False) + padding, is_dynamic = get_padding_value(padding, kernel_size, **kwargs) + if is_dynamic: + return Conv2dSame(in_chs, out_chs, kernel_size, **kwargs) + else: + return nn.Conv2d(in_chs, out_chs, kernel_size, padding=padding, **kwargs) diff --git a/TimeSformer/timesformer/models/custom_video_model_builder.py b/TimeSformer/timesformer/models/custom_video_model_builder.py new file mode 100755 index 0000000000000000000000000000000000000000..1b500aa695ec2262886dcc08cb0ef910409b8535 --- /dev/null +++ b/TimeSformer/timesformer/models/custom_video_model_builder.py @@ -0,0 +1,4 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + + +"""A More Flexible Video models.""" diff --git a/TimeSformer/timesformer/models/features.py b/TimeSformer/timesformer/models/features.py new file mode 100755 index 0000000000000000000000000000000000000000..5ef9e7a7f5f3b6beb0c2c0cc644f024d8f0ef056 --- /dev/null +++ b/TimeSformer/timesformer/models/features.py @@ -0,0 +1,266 @@ +# Copyright 2020 Ross Wightman + +from collections import OrderedDict, defaultdict +from copy import deepcopy +from functools import partial +from typing import Dict, List, Tuple + +import torch +import torch.nn as nn + + +class FeatureInfo: + + def __init__(self, feature_info: List[Dict], out_indices: Tuple[int]): + prev_reduction = 1 + for fi in feature_info: + # sanity check the mandatory fields, there may be additional fields depending on the model + assert 'num_chs' in fi and fi['num_chs'] > 0 + assert 'reduction' in fi and fi['reduction'] >= prev_reduction + prev_reduction = fi['reduction'] + assert 'module' in fi + self.out_indices = out_indices + self.info = feature_info + + def from_other(self, out_indices: Tuple[int]): + return FeatureInfo(deepcopy(self.info), out_indices) + + def get(self, key, idx=None): + """ Get value by key at specified index (indices) + if idx == None, returns value for key at each output index + if idx is an integer, return value for that feature module index (ignoring output indices) + if idx is a list/tupple, return value for each module index (ignoring output indices) + """ + if idx is None: + return [self.info[i][key] for i in self.out_indices] + if isinstance(idx, (tuple, list)): + return [self.info[i][key] for i in idx] + else: + return self.info[idx][key] + + def get_dicts(self, keys=None, idx=None): + """ return info dicts for specified keys (or all if None) at specified indices (or out_indices if None) + """ + if idx is None: + if keys is None: + return [self.info[i] for i in self.out_indices] + else: + return [{k: self.info[i][k] for k in keys} for i in self.out_indices] + if isinstance(idx, (tuple, list)): + return [self.info[i] if keys is None else {k: self.info[i][k] for k in keys} for i in idx] + else: + return self.info[idx] if keys is None else {k: self.info[idx][k] for k in keys} + + def channels(self, idx=None): + """ feature channels accessor + """ + return self.get('num_chs', idx) + + def reduction(self, idx=None): + """ feature reduction (output stride) accessor + """ + return self.get('reduction', idx) + + def module_name(self, idx=None): + """ feature module name accessor + """ + return self.get('module', idx) + + def __getitem__(self, item): + return self.info[item] + + def __len__(self): + return len(self.info) + + +class FeatureHooks: + """ Feature Hook Helper + This module helps with the setup and extraction of hooks for extracting features from + internal nodes in a model by node name. This works quite well in eager Python but needs + redesign for torcscript. + """ + + def __init__(self, hooks, named_modules, out_map=None, default_hook_type='forward'): + # setup feature hooks + modules = {k: v for k, v in named_modules} + for i, h in enumerate(hooks): + hook_name = h['module'] + m = modules[hook_name] + hook_id = out_map[i] if out_map else hook_name + hook_fn = partial(self._collect_output_hook, hook_id) + hook_type = h['hook_type'] if 'hook_type' in h else default_hook_type + if hook_type == 'forward_pre': + m.register_forward_pre_hook(hook_fn) + elif hook_type == 'forward': + m.register_forward_hook(hook_fn) + else: + assert False, "Unsupported hook type" + self._feature_outputs = defaultdict(OrderedDict) + + def _collect_output_hook(self, hook_id, *args): + x = args[-1] # tensor we want is last argument, output for fwd, input for fwd_pre + if isinstance(x, tuple): + x = x[0] # unwrap input tuple + self._feature_outputs[x.device][hook_id] = x + + def get_output(self, device) -> Dict[str, torch.tensor]: + output = self._feature_outputs[device] + self._feature_outputs[device] = OrderedDict() # clear after reading + return output + + +def _module_list(module, flatten_sequential=False): + # a yield/iter would be better for this but wouldn't be compatible with torchscript + ml = [] + for name, module in module.named_children(): + if flatten_sequential and isinstance(module, nn.Sequential): + # first level of Sequential containers is flattened into containing model + for child_name, child_module in module.named_children(): + combined = [name, child_name] + ml.append(('_'.join(combined), '.'.join(combined), child_module)) + else: + ml.append((name, name, module)) + return ml + + +def _get_feature_info(net, out_indices): + feature_info = getattr(net, 'feature_info') + if isinstance(feature_info, FeatureInfo): + return feature_info.from_other(out_indices) + elif isinstance(feature_info, (list, tuple)): + return FeatureInfo(net.feature_info, out_indices) + else: + assert False, "Provided feature_info is not valid" + + +def _get_return_layers(feature_info, out_map): + module_names = feature_info.module_name() + return_layers = {} + for i, name in enumerate(module_names): + return_layers[name] = out_map[i] if out_map is not None else feature_info.out_indices[i] + return return_layers + + +class FeatureDictNet(nn.ModuleDict): + """ Feature extractor with OrderedDict return + Wrap a model and extract features as specified by the out indices, the network is + partially re-built from contained modules. + There is a strong assumption that the modules have been registered into the model in the same + order as they are used. There should be no reuse of the same nn.Module more than once, including + trivial modules like `self.relu = nn.ReLU`. + Only submodules that are directly assigned to the model class (`model.feature1`) or at most + one Sequential container deep (`model.features.1`, with flatten_sequent=True) can be captured. + All Sequential containers that are directly assigned to the original model will have their + modules assigned to this module with the name `model.features.1` being changed to `model.features_1` + Arguments: + model (nn.Module): model from which we will extract the features + out_indices (tuple[int]): model output indices to extract features for + out_map (sequence): list or tuple specifying desired return id for each out index, + otherwise str(index) is used + feature_concat (bool): whether to concatenate intermediate features that are lists or tuples + vs select element [0] + flatten_sequential (bool): whether to flatten sequential modules assigned to model + """ + def __init__( + self, model, + out_indices=(0, 1, 2, 3, 4), out_map=None, feature_concat=False, flatten_sequential=False): + super(FeatureDictNet, self).__init__() + self.feature_info = _get_feature_info(model, out_indices) + self.concat = feature_concat + self.return_layers = {} + return_layers = _get_return_layers(self.feature_info, out_map) + modules = _module_list(model, flatten_sequential=flatten_sequential) + remaining = set(return_layers.keys()) + layers = OrderedDict() + for new_name, old_name, module in modules: + layers[new_name] = module + if old_name in remaining: + # return id has to be consistently str type for torchscript + self.return_layers[new_name] = str(return_layers[old_name]) + remaining.remove(old_name) + if not remaining: + break + assert not remaining and len(self.return_layers) == len(return_layers), \ + f'Return layers ({remaining}) are not present in model' + self.update(layers) + + def _collect(self, x) -> (Dict[str, torch.Tensor]): + out = OrderedDict() + for name, module in self.items(): + x = module(x) + if name in self.return_layers: + out_id = self.return_layers[name] + if isinstance(x, (tuple, list)): + # If model tap is a tuple or list, concat or select first element + # FIXME this may need to be more generic / flexible for some nets + out[out_id] = torch.cat(x, 1) if self.concat else x[0] + else: + out[out_id] = x + return out + + def forward(self, x) -> Dict[str, torch.Tensor]: + return self._collect(x) + + +class FeatureListNet(FeatureDictNet): + """ Feature extractor with list return + See docstring for FeatureDictNet above, this class exists only to appease Torchscript typing constraints. + In eager Python we could have returned List[Tensor] vs Dict[id, Tensor] based on a member bool. + """ + def __init__( + self, model, + out_indices=(0, 1, 2, 3, 4), out_map=None, feature_concat=False, flatten_sequential=False): + super(FeatureListNet, self).__init__( + model, out_indices=out_indices, out_map=out_map, feature_concat=feature_concat, + flatten_sequential=flatten_sequential) + + def forward(self, x) -> (List[torch.Tensor]): + return list(self._collect(x).values()) + + +class FeatureHookNet(nn.ModuleDict): + """ FeatureHookNet + Wrap a model and extract features specified by the out indices using forward/forward-pre hooks. + If `no_rewrite` is True, features are extracted via hooks without modifying the underlying + network in any way. + If `no_rewrite` is False, the model will be re-written as in the + FeatureList/FeatureDict case by folding first to second (Sequential only) level modules into this one. + FIXME this does not currently work with Torchscript, see FeatureHooks class + """ + def __init__( + self, model, + out_indices=(0, 1, 2, 3, 4), out_map=None, out_as_dict=False, no_rewrite=False, + feature_concat=False, flatten_sequential=False, default_hook_type='forward'): + super(FeatureHookNet, self).__init__() + assert not torch.jit.is_scripting() + self.feature_info = _get_feature_info(model, out_indices) + self.out_as_dict = out_as_dict + layers = OrderedDict() + hooks = [] + if no_rewrite: + assert not flatten_sequential + if hasattr(model, 'reset_classifier'): # make sure classifier is removed? + model.reset_classifier(0) + layers['body'] = model + hooks.extend(self.feature_info.get_dicts()) + else: + modules = _module_list(model, flatten_sequential=flatten_sequential) + remaining = {f['module']: f['hook_type'] if 'hook_type' in f else default_hook_type + for f in self.feature_info.get_dicts()} + for new_name, old_name, module in modules: + layers[new_name] = module + for fn, fm in module.named_modules(prefix=old_name): + if fn in remaining: + hooks.append(dict(module=fn, hook_type=remaining[fn])) + del remaining[fn] + if not remaining: + break + assert not remaining, f'Return layers ({remaining}) are not present in model' + self.update(layers) + self.hooks = FeatureHooks(hooks, model.named_modules(), out_map=out_map) + + def forward(self, x): + for name, module in self.items(): + x = module(x) + out = self.hooks.get_output(x.device) + return out if self.out_as_dict else list(out.values()) diff --git a/TimeSformer/timesformer/models/head_helper.py b/TimeSformer/timesformer/models/head_helper.py new file mode 100755 index 0000000000000000000000000000000000000000..9e36ae1e296f0281b86efe5946082ce675fba58f --- /dev/null +++ b/TimeSformer/timesformer/models/head_helper.py @@ -0,0 +1,235 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""ResNe(X)t Head helper.""" + +import torch +import torch.nn as nn + +class ResNetBasicHead(nn.Module): + """ + ResNe(X)t 3D head. + This layer performs a fully-connected projection during training, when the + input size is 1x1x1. It performs a convolutional projection during testing + when the input size is larger than 1x1x1. If the inputs are from multiple + different pathways, the inputs will be concatenated after pooling. + """ + + def __init__( + self, + dim_in, + num_classes, + pool_size, + dropout_rate=0.0, + act_func="softmax", + ): + """ + The `__init__` method of any subclass should also contain these + arguments. + ResNetBasicHead takes p pathways as input where p in [1, infty]. + + Args: + dim_in (list): the list of channel dimensions of the p inputs to the + ResNetHead. + num_classes (int): the channel dimensions of the p outputs to the + ResNetHead. + pool_size (list): the list of kernel sizes of p spatial temporal + poolings, temporal pool kernel size, spatial pool kernel size, + spatial pool kernel size in order. + dropout_rate (float): dropout rate. If equal to 0.0, perform no + dropout. + act_func (string): activation function to use. 'softmax': applies + softmax on the output. 'sigmoid': applies sigmoid on the output. + """ + super(ResNetBasicHead, self).__init__() + assert ( + len({len(pool_size), len(dim_in)}) == 1 + ), "pathway dimensions are not consistent." + self.num_pathways = len(pool_size) + + for pathway in range(self.num_pathways): + if pool_size[pathway] is None: + avg_pool = nn.AdaptiveAvgPool3d((1, 1, 1)) + else: + avg_pool = nn.AvgPool3d(pool_size[pathway], stride=1) + self.add_module("pathway{}_avgpool".format(pathway), avg_pool) + + if dropout_rate > 0.0: + self.dropout = nn.Dropout(dropout_rate) + # Perform FC in a fully convolutional manner. The FC layer will be + # initialized with a different std comparing to convolutional layers. + self.projection = nn.Linear(sum(dim_in), num_classes, bias=True) + + # Softmax for evaluation and testing. + if act_func == "softmax": + self.act = nn.Softmax(dim=4) + elif act_func == "sigmoid": + self.act = nn.Sigmoid() + else: + raise NotImplementedError( + "{} is not supported as an activation" + "function.".format(act_func) + ) + + def forward(self, inputs): + assert ( + len(inputs) == self.num_pathways + ), "Input tensor does not contain {} pathway".format(self.num_pathways) + pool_out = [] + for pathway in range(self.num_pathways): + m = getattr(self, "pathway{}_avgpool".format(pathway)) + pool_out.append(m(inputs[pathway])) + x = torch.cat(pool_out, 1) + # (N, C, T, H, W) -> (N, T, H, W, C). + x = x.permute((0, 2, 3, 4, 1)) + # Perform dropout. + if hasattr(self, "dropout"): + x = self.dropout(x) + x = self.projection(x) + + # Performs fully convlutional inference. + if not self.training: + x = self.act(x) + x = x.mean([1, 2, 3]) + + x = x.view(x.shape[0], -1) + return x + + +class X3DHead(nn.Module): + """ + X3D head. + This layer performs a fully-connected projection during training, when the + input size is 1x1x1. It performs a convolutional projection during testing + when the input size is larger than 1x1x1. If the inputs are from multiple + different pathways, the inputs will be concatenated after pooling. + """ + + def __init__( + self, + dim_in, + dim_inner, + dim_out, + num_classes, + pool_size, + dropout_rate=0.0, + act_func="softmax", + inplace_relu=True, + eps=1e-5, + bn_mmt=0.1, + norm_module=nn.BatchNorm3d, + bn_lin5_on=False, + ): + """ + The `__init__` method of any subclass should also contain these + arguments. + X3DHead takes a 5-dim feature tensor (BxCxTxHxW) as input. + + Args: + dim_in (float): the channel dimension C of the input. + num_classes (int): the channel dimensions of the output. + pool_size (float): a single entry list of kernel size for + spatiotemporal pooling for the TxHxW dimensions. + dropout_rate (float): dropout rate. If equal to 0.0, perform no + dropout. + act_func (string): activation function to use. 'softmax': applies + softmax on the output. 'sigmoid': applies sigmoid on the output. + inplace_relu (bool): if True, calculate the relu on the original + input without allocating new memory. + eps (float): epsilon for batch norm. + bn_mmt (float): momentum for batch norm. Noted that BN momentum in + PyTorch = 1 - BN momentum in Caffe2. + norm_module (nn.Module): nn.Module for the normalization layer. The + default is nn.BatchNorm3d. + bn_lin5_on (bool): if True, perform normalization on the features + before the classifier. + """ + super(X3DHead, self).__init__() + self.pool_size = pool_size + self.dropout_rate = dropout_rate + self.num_classes = num_classes + self.act_func = act_func + self.eps = eps + self.bn_mmt = bn_mmt + self.inplace_relu = inplace_relu + self.bn_lin5_on = bn_lin5_on + self._construct_head(dim_in, dim_inner, dim_out, norm_module) + + def _construct_head(self, dim_in, dim_inner, dim_out, norm_module): + + self.conv_5 = nn.Conv3d( + dim_in, + dim_inner, + kernel_size=(1, 1, 1), + stride=(1, 1, 1), + padding=(0, 0, 0), + bias=False, + ) + self.conv_5_bn = norm_module( + num_features=dim_inner, eps=self.eps, momentum=self.bn_mmt + ) + self.conv_5_relu = nn.ReLU(self.inplace_relu) + + if self.pool_size is None: + self.avg_pool = nn.AdaptiveAvgPool3d((1, 1, 1)) + else: + self.avg_pool = nn.AvgPool3d(self.pool_size, stride=1) + + self.lin_5 = nn.Conv3d( + dim_inner, + dim_out, + kernel_size=(1, 1, 1), + stride=(1, 1, 1), + padding=(0, 0, 0), + bias=False, + ) + if self.bn_lin5_on: + self.lin_5_bn = norm_module( + num_features=dim_out, eps=self.eps, momentum=self.bn_mmt + ) + self.lin_5_relu = nn.ReLU(self.inplace_relu) + + if self.dropout_rate > 0.0: + self.dropout = nn.Dropout(self.dropout_rate) + # Perform FC in a fully convolutional manner. The FC layer will be + # initialized with a different std comparing to convolutional layers. + self.projection = nn.Linear(dim_out, self.num_classes, bias=True) + + # Softmax for evaluation and testing. + if self.act_func == "softmax": + self.act = nn.Softmax(dim=4) + elif self.act_func == "sigmoid": + self.act = nn.Sigmoid() + else: + raise NotImplementedError( + "{} is not supported as an activation" + "function.".format(self.act_func) + ) + + def forward(self, inputs): + # In its current design the X3D head is only useable for a single + # pathway input. + assert len(inputs) == 1, "Input tensor does not contain 1 pathway" + x = self.conv_5(inputs[0]) + x = self.conv_5_bn(x) + x = self.conv_5_relu(x) + x = self.avg_pool(x) + + x = self.lin_5(x) + if self.bn_lin5_on: + x = self.lin_5_bn(x) + x = self.lin_5_relu(x) + + # (N, C, T, H, W) -> (N, T, H, W, C). + x = x.permute((0, 2, 3, 4, 1)) + # Perform dropout. + if hasattr(self, "dropout"): + x = self.dropout(x) + x = self.projection(x) + + # Performs fully convlutional inference. + if not self.training: + x = self.act(x) + x = x.mean([1, 2, 3]) + + x = x.view(x.shape[0], -1) + return x diff --git a/TimeSformer/timesformer/models/helpers.py b/TimeSformer/timesformer/models/helpers.py new file mode 100755 index 0000000000000000000000000000000000000000..cf5ee1dd12217847254ae6bb5cbffb0656e176e0 --- /dev/null +++ b/TimeSformer/timesformer/models/helpers.py @@ -0,0 +1,360 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +# Copyright 2020 Ross Wightman +# Modified model creation / weight loading / state_dict helpers + +import logging +import os +import math +from collections import OrderedDict +from copy import deepcopy +from typing import Callable + +import torch +import torch.nn as nn +import torch.utils.model_zoo as model_zoo +import torch.nn.functional as F + +from timesformer.models.features import FeatureListNet, FeatureDictNet, FeatureHookNet +from timesformer.models.conv2d_same import Conv2dSame +from timesformer.models.linear import Linear + + +_logger = logging.getLogger(__name__) + +def load_state_dict(checkpoint_path, use_ema=False): + if checkpoint_path and os.path.isfile(checkpoint_path): + checkpoint = torch.load(checkpoint_path, map_location='cpu') + state_dict_key = 'state_dict' + if isinstance(checkpoint, dict): + if use_ema and 'state_dict_ema' in checkpoint: + state_dict_key = 'state_dict_ema' + if state_dict_key and state_dict_key in checkpoint: + new_state_dict = OrderedDict() + for k, v in checkpoint[state_dict_key].items(): + # strip `module.` prefix + name = k[7:] if k.startswith('module') else k + new_state_dict[name] = v + state_dict = new_state_dict + elif 'model_state' in checkpoint: + state_dict_key = 'model_state' + new_state_dict = OrderedDict() + for k, v in checkpoint[state_dict_key].items(): + # strip `model.` prefix + name = k[6:] if k.startswith('model') else k + new_state_dict[name] = v + state_dict = new_state_dict + else: + state_dict = checkpoint + _logger.info("Loaded {} from checkpoint '{}'".format(state_dict_key, checkpoint_path)) + return state_dict + else: + _logger.error("No checkpoint found at '{}'".format(checkpoint_path)) + raise FileNotFoundError() + + +def load_checkpoint(model, checkpoint_path, use_ema=False, strict=True): + state_dict = load_state_dict(checkpoint_path, use_ema) + model.load_state_dict(state_dict, strict=strict) + + +def resume_checkpoint(model, checkpoint_path, optimizer=None, loss_scaler=None, log_info=True): + resume_epoch = None + if os.path.isfile(checkpoint_path): + checkpoint = torch.load(checkpoint_path, map_location='cpu') + if isinstance(checkpoint, dict) and 'state_dict' in checkpoint: + if log_info: + _logger.info('Restoring model state from checkpoint...') + new_state_dict = OrderedDict() + for k, v in checkpoint['state_dict'].items(): + name = k[7:] if k.startswith('module') else k + new_state_dict[name] = v + model.load_state_dict(new_state_dict) + + if optimizer is not None and 'optimizer' in checkpoint: + if log_info: + _logger.info('Restoring optimizer state from checkpoint...') + optimizer.load_state_dict(checkpoint['optimizer']) + + if loss_scaler is not None and loss_scaler.state_dict_key in checkpoint: + if log_info: + _logger.info('Restoring AMP loss scaler state from checkpoint...') + loss_scaler.load_state_dict(checkpoint[loss_scaler.state_dict_key]) + + if 'epoch' in checkpoint: + resume_epoch = checkpoint['epoch'] + if 'version' in checkpoint and checkpoint['version'] > 1: + resume_epoch += 1 # start at the next epoch, old checkpoints incremented before save + + if log_info: + _logger.info("Loaded checkpoint '{}' (epoch {})".format(checkpoint_path, checkpoint['epoch'])) + else: + model.load_state_dict(checkpoint) + if log_info: + _logger.info("Loaded checkpoint '{}'".format(checkpoint_path)) + return resume_epoch + else: + _logger.error("No checkpoint found at '{}'".format(checkpoint_path)) + raise FileNotFoundError() + + +def load_pretrained(model, cfg=None, num_classes=1000, in_chans=3, filter_fn=None, img_size=224, num_frames=8, num_patches=196, attention_type='divided_space_time', pretrained_model="", strict=True): + if cfg is None: + cfg = getattr(model, 'default_cfg') + if cfg is None or 'url' not in cfg or not cfg['url']: + _logger.warning("Pretrained model URL is invalid, using random initialization.") + return + + if len(pretrained_model) == 0: + state_dict = model_zoo.load_url(cfg['url'], progress=False, map_location='cpu') + else: + try: + state_dict = load_state_dict(pretrained_model)['model'] + except: + state_dict = load_state_dict(pretrained_model) + + + if filter_fn is not None: + state_dict = filter_fn(state_dict) + + if in_chans == 1: + conv1_name = cfg['first_conv'] + _logger.info('Converting first conv (%s) pretrained weights from 3 to 1 channel' % conv1_name) + conv1_weight = state_dict[conv1_name + '.weight'] + conv1_type = conv1_weight.dtype + conv1_weight = conv1_weight.float() + O, I, J, K = conv1_weight.shape + if I > 3: + assert conv1_weight.shape[1] % 3 == 0 + # For models with space2depth stems + conv1_weight = conv1_weight.reshape(O, I // 3, 3, J, K) + conv1_weight = conv1_weight.sum(dim=2, keepdim=False) + else: + conv1_weight = conv1_weight.sum(dim=1, keepdim=True) + conv1_weight = conv1_weight.to(conv1_type) + state_dict[conv1_name + '.weight'] = conv1_weight + elif in_chans != 3: + conv1_name = cfg['first_conv'] + conv1_weight = state_dict[conv1_name + '.weight'] + conv1_type = conv1_weight.dtype + conv1_weight = conv1_weight.float() + O, I, J, K = conv1_weight.shape + if I != 3: + _logger.warning('Deleting first conv (%s) from pretrained weights.' % conv1_name) + del state_dict[conv1_name + '.weight'] + strict = False + else: + _logger.info('Repeating first conv (%s) weights in channel dim.' % conv1_name) + repeat = int(math.ceil(in_chans / 3)) + conv1_weight = conv1_weight.repeat(1, repeat, 1, 1)[:, :in_chans, :, :] + conv1_weight *= (3 / float(in_chans)) + conv1_weight = conv1_weight.to(conv1_type) + state_dict[conv1_name + '.weight'] = conv1_weight + + + classifier_name = cfg['classifier'] + if num_classes == 1000 and cfg['num_classes'] == 1001: + # special case for imagenet trained models with extra background class in pretrained weights + classifier_weight = state_dict[classifier_name + '.weight'] + state_dict[classifier_name + '.weight'] = classifier_weight[1:] + classifier_bias = state_dict[classifier_name + '.bias'] + state_dict[classifier_name + '.bias'] = classifier_bias[1:] + elif num_classes != state_dict[classifier_name + '.weight'].size(0): + #print('Removing the last fully connected layer due to dimensions mismatch ('+str(num_classes)+ ' != '+str(state_dict[classifier_name + '.weight'].size(0))+').', flush=True) + # completely discard fully connected for all other differences between pretrained and created model + del state_dict[classifier_name + '.weight'] + del state_dict[classifier_name + '.bias'] + strict = False + + + ## Resizing the positional embeddings in case they don't match + if num_patches + 1 != state_dict['pos_embed'].size(1): + pos_embed = state_dict['pos_embed'] + cls_pos_embed = pos_embed[0,0,:].unsqueeze(0).unsqueeze(1) + other_pos_embed = pos_embed[0,1:,:].unsqueeze(0).transpose(1, 2) + new_pos_embed = F.interpolate(other_pos_embed, size=(num_patches), mode='nearest') + new_pos_embed = new_pos_embed.transpose(1, 2) + new_pos_embed = torch.cat((cls_pos_embed, new_pos_embed), 1) + state_dict['pos_embed'] = new_pos_embed + + ## Resizing time embeddings in case they don't match + if 'time_embed' in state_dict and num_frames != state_dict['time_embed'].size(1): + time_embed = state_dict['time_embed'].transpose(1, 2) + new_time_embed = F.interpolate(time_embed, size=(num_frames), mode='nearest') + state_dict['time_embed'] = new_time_embed.transpose(1, 2) + + ## Initializing temporal attention + if attention_type == 'divided_space_time': + new_state_dict = state_dict.copy() + for key in state_dict: + if 'blocks' in key and 'attn' in key: + new_key = key.replace('attn','temporal_attn') + if not new_key in state_dict: + new_state_dict[new_key] = state_dict[key] + else: + new_state_dict[new_key] = state_dict[new_key] + if 'blocks' in key and 'norm1' in key: + new_key = key.replace('norm1','temporal_norm1') + if not new_key in state_dict: + new_state_dict[new_key] = state_dict[key] + else: + new_state_dict[new_key] = state_dict[new_key] + state_dict = new_state_dict + + ## Loading the weights + model.load_state_dict(state_dict, strict=False) + + +def extract_layer(model, layer): + layer = layer.split('.') + module = model + if hasattr(model, 'module') and layer[0] != 'module': + module = model.module + if not hasattr(model, 'module') and layer[0] == 'module': + layer = layer[1:] + for l in layer: + if hasattr(module, l): + if not l.isdigit(): + module = getattr(module, l) + else: + module = module[int(l)] + else: + return module + return module + + +def set_layer(model, layer, val): + layer = layer.split('.') + module = model + if hasattr(model, 'module') and layer[0] != 'module': + module = model.module + lst_index = 0 + module2 = module + for l in layer: + if hasattr(module2, l): + if not l.isdigit(): + module2 = getattr(module2, l) + else: + module2 = module2[int(l)] + lst_index += 1 + lst_index -= 1 + for l in layer[:lst_index]: + if not l.isdigit(): + module = getattr(module, l) + else: + module = module[int(l)] + l = layer[lst_index] + setattr(module, l, val) + + +def adapt_model_from_string(parent_module, model_string): + separator = '***' + state_dict = {} + lst_shape = model_string.split(separator) + for k in lst_shape: + k = k.split(':') + key = k[0] + shape = k[1][1:-1].split(',') + if shape[0] != '': + state_dict[key] = [int(i) for i in shape] + + new_module = deepcopy(parent_module) + for n, m in parent_module.named_modules(): + old_module = extract_layer(parent_module, n) + if isinstance(old_module, nn.Conv2d) or isinstance(old_module, Conv2dSame): + if isinstance(old_module, Conv2dSame): + conv = Conv2dSame + else: + conv = nn.Conv2d + s = state_dict[n + '.weight'] + in_channels = s[1] + out_channels = s[0] + g = 1 + if old_module.groups > 1: + in_channels = out_channels + g = in_channels + new_conv = conv( + in_channels=in_channels, out_channels=out_channels, kernel_size=old_module.kernel_size, + bias=old_module.bias is not None, padding=old_module.padding, dilation=old_module.dilation, + groups=g, stride=old_module.stride) + set_layer(new_module, n, new_conv) + if isinstance(old_module, nn.BatchNorm2d): + new_bn = nn.BatchNorm2d( + num_features=state_dict[n + '.weight'][0], eps=old_module.eps, momentum=old_module.momentum, + affine=old_module.affine, track_running_stats=True) + set_layer(new_module, n, new_bn) + if isinstance(old_module, nn.Linear): + num_features = state_dict[n + '.weight'][1] + new_fc = Linear( + in_features=num_features, out_features=old_module.out_features, bias=old_module.bias is not None) + set_layer(new_module, n, new_fc) + if hasattr(new_module, 'num_features'): + new_module.num_features = num_features + new_module.eval() + parent_module.eval() + + return new_module + + +def adapt_model_from_file(parent_module, model_variant): + adapt_file = os.path.join(os.path.dirname(__file__), 'pruned', model_variant + '.txt') + with open(adapt_file, 'r') as f: + return adapt_model_from_string(parent_module, f.read().strip()) + + +def default_cfg_for_features(default_cfg): + default_cfg = deepcopy(default_cfg) + # remove default pretrained cfg fields that don't have much relevance for feature backbone + to_remove = ('num_classes', 'crop_pct', 'classifier') # add default final pool size? + for tr in to_remove: + default_cfg.pop(tr, None) + return default_cfg + + +def build_model_with_cfg( + model_cls: Callable, + variant: str, + pretrained: bool, + default_cfg: dict, + model_cfg: dict = None, + feature_cfg: dict = None, + pretrained_strict: bool = True, + pretrained_filter_fn: Callable = None, + **kwargs): + pruned = kwargs.pop('pruned', False) + features = False + feature_cfg = feature_cfg or {} + + if kwargs.pop('features_only', False): + features = True + feature_cfg.setdefault('out_indices', (0, 1, 2, 3, 4)) + if 'out_indices' in kwargs: + feature_cfg['out_indices'] = kwargs.pop('out_indices') + + model = model_cls(**kwargs) if model_cfg is None else model_cls(cfg=model_cfg, **kwargs) + model.default_cfg = deepcopy(default_cfg) + + if pruned: + model = adapt_model_from_file(model, variant) + + # for classification models, check class attr, then kwargs, then default to 1k, otherwise 0 for feats + num_classes_pretrained = 0 if features else getattr(model, 'num_classes', kwargs.get('num_classes', 1000)) + if pretrained: + load_pretrained( + model, + num_classes=num_classes_pretrained, in_chans=kwargs.get('in_chans', 3), + filter_fn=pretrained_filter_fn, strict=pretrained_strict) + + if features: + feature_cls = FeatureListNet + if 'feature_cls' in feature_cfg: + feature_cls = feature_cfg.pop('feature_cls') + if isinstance(feature_cls, str): + feature_cls = feature_cls.lower() + if 'hook' in feature_cls: + feature_cls = FeatureHookNet + else: + assert False, f'Unknown feature class {feature_cls}' + model = feature_cls(model, **feature_cfg) + model.default_cfg = default_cfg_for_features(default_cfg) # add back default_cfg + + return model diff --git a/TimeSformer/timesformer/models/linear.py b/TimeSformer/timesformer/models/linear.py new file mode 100755 index 0000000000000000000000000000000000000000..940ec8c8bbd8c0a91d6da6e029276df4e8edd36c --- /dev/null +++ b/TimeSformer/timesformer/models/linear.py @@ -0,0 +1,13 @@ +""" Linear layer (alternate definition) +""" +import torch +import torch.nn.functional as F +from torch import nn as nn + +class Linear(nn.Linear): + def forward(self, input: torch.Tensor) -> torch.Tensor: + if torch.jit.is_scripting(): + bias = self.bias.to(dtype=input.dtype) if self.bias is not None else None + return F.linear(input, self.weight.to(dtype=input.dtype), bias=bias) + else: + return F.linear(input, self.weight, self.bias) diff --git a/TimeSformer/timesformer/models/losses.py b/TimeSformer/timesformer/models/losses.py new file mode 100755 index 0000000000000000000000000000000000000000..caa42ddd55ce8a15cae2b38144f82a6f9c9eb7d0 --- /dev/null +++ b/TimeSformer/timesformer/models/losses.py @@ -0,0 +1,22 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Loss functions.""" + +import torch.nn as nn + +_LOSSES = { + "cross_entropy": nn.CrossEntropyLoss, + "bce": nn.BCELoss, + "bce_logit": nn.BCEWithLogitsLoss, +} + + +def get_loss_func(loss_name): + """ + Retrieve the loss given the loss name. + Args (int): + loss_name: the name of the loss to use. + """ + if loss_name not in _LOSSES.keys(): + raise NotImplementedError("Loss {} is not supported".format(loss_name)) + return _LOSSES[loss_name] diff --git a/TimeSformer/timesformer/models/nonlocal_helper.py b/TimeSformer/timesformer/models/nonlocal_helper.py new file mode 100755 index 0000000000000000000000000000000000000000..06a12587cd9372753d3fad44f178fbe3c617a760 --- /dev/null +++ b/TimeSformer/timesformer/models/nonlocal_helper.py @@ -0,0 +1,147 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Non-local helper""" + +import torch +import torch.nn as nn + + +class Nonlocal(nn.Module): + """ + Builds Non-local Neural Networks as a generic family of building + blocks for capturing long-range dependencies. Non-local Network + computes the response at a position as a weighted sum of the + features at all positions. This building block can be plugged into + many computer vision architectures. + More details in the paper: https://arxiv.org/pdf/1711.07971.pdf + """ + + def __init__( + self, + dim, + dim_inner, + pool_size=None, + instantiation="softmax", + zero_init_final_conv=False, + zero_init_final_norm=True, + norm_eps=1e-5, + norm_momentum=0.1, + norm_module=nn.BatchNorm3d, + ): + """ + Args: + dim (int): number of dimension for the input. + dim_inner (int): number of dimension inside of the Non-local block. + pool_size (list): the kernel size of spatial temporal pooling, + temporal pool kernel size, spatial pool kernel size, spatial + pool kernel size in order. By default pool_size is None, + then there would be no pooling used. + instantiation (string): supports two different instantiation method: + "dot_product": normalizing correlation matrix with L2. + "softmax": normalizing correlation matrix with Softmax. + zero_init_final_conv (bool): If true, zero initializing the final + convolution of the Non-local block. + zero_init_final_norm (bool): + If true, zero initializing the final batch norm of the Non-local + block. + norm_module (nn.Module): nn.Module for the normalization layer. The + default is nn.BatchNorm3d. + """ + super(Nonlocal, self).__init__() + self.dim = dim + self.dim_inner = dim_inner + self.pool_size = pool_size + self.instantiation = instantiation + self.use_pool = ( + False + if pool_size is None + else any((size > 1 for size in pool_size)) + ) + self.norm_eps = norm_eps + self.norm_momentum = norm_momentum + self._construct_nonlocal( + zero_init_final_conv, zero_init_final_norm, norm_module + ) + + def _construct_nonlocal( + self, zero_init_final_conv, zero_init_final_norm, norm_module + ): + # Three convolution heads: theta, phi, and g. + self.conv_theta = nn.Conv3d( + self.dim, self.dim_inner, kernel_size=1, stride=1, padding=0 + ) + self.conv_phi = nn.Conv3d( + self.dim, self.dim_inner, kernel_size=1, stride=1, padding=0 + ) + self.conv_g = nn.Conv3d( + self.dim, self.dim_inner, kernel_size=1, stride=1, padding=0 + ) + + # Final convolution output. + self.conv_out = nn.Conv3d( + self.dim_inner, self.dim, kernel_size=1, stride=1, padding=0 + ) + # Zero initializing the final convolution output. + self.conv_out.zero_init = zero_init_final_conv + + # TODO: change the name to `norm` + self.bn = norm_module( + num_features=self.dim, + eps=self.norm_eps, + momentum=self.norm_momentum, + ) + # Zero initializing the final bn. + self.bn.transform_final_bn = zero_init_final_norm + + # Optional to add the spatial-temporal pooling. + if self.use_pool: + self.pool = nn.MaxPool3d( + kernel_size=self.pool_size, + stride=self.pool_size, + padding=[0, 0, 0], + ) + + def forward(self, x): + x_identity = x + N, C, T, H, W = x.size() + + theta = self.conv_theta(x) + + # Perform temporal-spatial pooling to reduce the computation. + if self.use_pool: + x = self.pool(x) + + phi = self.conv_phi(x) + g = self.conv_g(x) + + theta = theta.view(N, self.dim_inner, -1) + phi = phi.view(N, self.dim_inner, -1) + g = g.view(N, self.dim_inner, -1) + + # (N, C, TxHxW) * (N, C, TxHxW) => (N, TxHxW, TxHxW). + theta_phi = torch.einsum("nct,ncp->ntp", (theta, phi)) + # For original Non-local paper, there are two main ways to normalize + # the affinity tensor: + # 1) Softmax normalization (norm on exp). + # 2) dot_product normalization. + if self.instantiation == "softmax": + # Normalizing the affinity tensor theta_phi before softmax. + theta_phi = theta_phi * (self.dim_inner ** -0.5) + theta_phi = nn.functional.softmax(theta_phi, dim=2) + elif self.instantiation == "dot_product": + spatial_temporal_dim = theta_phi.shape[2] + theta_phi = theta_phi / spatial_temporal_dim + else: + raise NotImplementedError( + "Unknown norm type {}".format(self.instantiation) + ) + + # (N, TxHxW, TxHxW) * (N, C, TxHxW) => (N, C, TxHxW). + theta_phi_g = torch.einsum("ntg,ncg->nct", (theta_phi, g)) + + # (N, C, TxHxW) => (N, C, T, H, W). + theta_phi_g = theta_phi_g.view(N, self.dim_inner, T, H, W) + + p = self.conv_out(theta_phi_g) + p = self.bn(p) + return x_identity + p diff --git a/TimeSformer/timesformer/models/operators.py b/TimeSformer/timesformer/models/operators.py new file mode 100755 index 0000000000000000000000000000000000000000..85474d44a2836a50d451a8c6df971cf3efdba5b9 --- /dev/null +++ b/TimeSformer/timesformer/models/operators.py @@ -0,0 +1,81 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Custom operators.""" + +import torch +import torch.nn as nn + + +class Swish(nn.Module): + """Swish activation function: x * sigmoid(x).""" + + def __init__(self): + super(Swish, self).__init__() + + def forward(self, x): + return SwishEfficient.apply(x) + + +class SwishEfficient(torch.autograd.Function): + """Swish activation function: x * sigmoid(x).""" + + @staticmethod + def forward(ctx, x): + result = x * torch.sigmoid(x) + ctx.save_for_backward(x) + return result + + @staticmethod + def backward(ctx, grad_output): + x = ctx.saved_variables[0] + sigmoid_x = torch.sigmoid(x) + return grad_output * (sigmoid_x * (1 + x * (1 - sigmoid_x))) + + +class SE(nn.Module): + """Squeeze-and-Excitation (SE) block w/ Swish: AvgPool, FC, Swish, FC, Sigmoid.""" + + def _round_width(self, width, multiplier, min_width=8, divisor=8): + """ + Round width of filters based on width multiplier + Args: + width (int): the channel dimensions of the input. + multiplier (float): the multiplication factor. + min_width (int): the minimum width after multiplication. + divisor (int): the new width should be dividable by divisor. + """ + if not multiplier: + return width + + width *= multiplier + min_width = min_width or divisor + width_out = max( + min_width, int(width + divisor / 2) // divisor * divisor + ) + if width_out < 0.9 * width: + width_out += divisor + return int(width_out) + + def __init__(self, dim_in, ratio, relu_act=True): + """ + Args: + dim_in (int): the channel dimensions of the input. + ratio (float): the channel reduction ratio for squeeze. + relu_act (bool): whether to use ReLU activation instead + of Swish (default). + divisor (int): the new width should be dividable by divisor. + """ + super(SE, self).__init__() + self.avg_pool = nn.AdaptiveAvgPool3d((1, 1, 1)) + dim_fc = self._round_width(dim_in, ratio) + self.fc1 = nn.Conv3d(dim_in, dim_fc, 1, bias=True) + self.fc1_act = nn.ReLU() if relu_act else Swish() + self.fc2 = nn.Conv3d(dim_fc, dim_in, 1, bias=True) + + self.fc2_sig = nn.Sigmoid() + + def forward(self, x): + x_in = x + for module in self.children(): + x = module(x) + return x_in * x diff --git a/TimeSformer/timesformer/models/optimizer.py b/TimeSformer/timesformer/models/optimizer.py new file mode 100755 index 0000000000000000000000000000000000000000..86ccbb324ef09cabd99eed6c1cdc2a0aa6de6638 --- /dev/null +++ b/TimeSformer/timesformer/models/optimizer.py @@ -0,0 +1,99 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Optimizer.""" + +import torch + +import timesformer.utils.lr_policy as lr_policy + + +def construct_optimizer(model, cfg): + """ + Construct a stochastic gradient descent or ADAM optimizer with momentum. + Details can be found in: + Herbert Robbins, and Sutton Monro. "A stochastic approximation method." + and + Diederik P.Kingma, and Jimmy Ba. + "Adam: A Method for Stochastic Optimization." + + Args: + model (model): model to perform stochastic gradient descent + optimization or ADAM optimization. + cfg (config): configs of hyper-parameters of SGD or ADAM, includes base + learning rate, momentum, weight_decay, dampening, and etc. + """ + # Batchnorm parameters. + bn_params = [] + # Non-batchnorm parameters. + non_bn_parameters = [] + for name, p in model.named_parameters(): + if "bn" in name: + bn_params.append(p) + else: + non_bn_parameters.append(p) + # Apply different weight decay to Batchnorm and non-batchnorm parameters. + # In Caffe2 classification codebase the weight decay for batchnorm is 0.0. + # Having a different weight decay on batchnorm might cause a performance + # drop. + optim_params = [ + {"params": bn_params, "weight_decay": cfg.BN.WEIGHT_DECAY}, + {"params": non_bn_parameters, "weight_decay": cfg.SOLVER.WEIGHT_DECAY}, + ] + # Check all parameters will be passed into optimizer. + assert len(list(model.parameters())) == len(non_bn_parameters) + len( + bn_params + ), "parameter size does not match: {} + {} != {}".format( + len(non_bn_parameters), len(bn_params), len(list(model.parameters())) + ) + + if cfg.SOLVER.OPTIMIZING_METHOD == "sgd": + return torch.optim.SGD( + optim_params, + lr=cfg.SOLVER.BASE_LR, + momentum=cfg.SOLVER.MOMENTUM, + weight_decay=cfg.SOLVER.WEIGHT_DECAY, + dampening=cfg.SOLVER.DAMPENING, + nesterov=cfg.SOLVER.NESTEROV, + ) + elif cfg.SOLVER.OPTIMIZING_METHOD == "adam": + return torch.optim.Adam( + optim_params, + lr=cfg.SOLVER.BASE_LR, + betas=(0.9, 0.999), + eps=1e-08, + weight_decay=cfg.SOLVER.WEIGHT_DECAY, + ) + elif cfg.SOLVER.OPTIMIZING_METHOD == "adamw": + return torch.optim.AdamW( + optim_params, + lr=cfg.SOLVER.BASE_LR, + betas=(0.9, 0.999), + eps=1e-08, + weight_decay=cfg.SOLVER.WEIGHT_DECAY, + ) + else: + raise NotImplementedError( + "Does not support {} optimizer".format(cfg.SOLVER.OPTIMIZING_METHOD) + ) + + +def get_epoch_lr(cur_epoch, cfg): + """ + Retrieves the lr for the given epoch (as specified by the lr policy). + Args: + cfg (config): configs of hyper-parameters of ADAM, includes base + learning rate, betas, and weight decays. + cur_epoch (float): the number of epoch of the current training stage. + """ + return lr_policy.get_lr_at_epoch(cfg, cur_epoch) + + +def set_lr(optimizer, new_lr): + """ + Sets the optimizer lr to the specified value. + Args: + optimizer (optim): the optimizer using to optimize the current network. + new_lr (float): the new learning rate to set. + """ + for param_group in optimizer.param_groups: + param_group["lr"] = new_lr diff --git a/TimeSformer/timesformer/models/resnet_helper.py b/TimeSformer/timesformer/models/resnet_helper.py new file mode 100755 index 0000000000000000000000000000000000000000..cd930ce0c3bf6c0d93c7be724b5afacb1897571e --- /dev/null +++ b/TimeSformer/timesformer/models/resnet_helper.py @@ -0,0 +1,745 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Video models.""" + +import torch +import torch.nn as nn + +from timesformer.models.nonlocal_helper import Nonlocal +from timesformer.models.operators import SE, Swish + +from torch import einsum +from einops import rearrange, reduce, repeat +import torch.nn.functional as F +from torch.nn.modules.module import Module +# from torch.nn.modules.linear import _LinearWithBias +from torch.nn.modules.activation import MultiheadAttention + +import numpy as np + +def get_trans_func(name): + """ + Retrieves the transformation module by name. + """ + trans_funcs = { + "bottleneck_transform": BottleneckTransform, + "basic_transform": BasicTransform, + "x3d_transform": X3DTransform, + } + assert ( + name in trans_funcs.keys() + ), "Transformation function '{}' not supported".format(name) + return trans_funcs[name] + + + + +class BasicTransform(nn.Module): + """ + Basic transformation: Tx3x3, 1x3x3, where T is the size of temporal kernel. + """ + + def __init__( + self, + dim_in, + dim_out, + temp_kernel_size, + stride, + dim_inner=None, + num_groups=1, + stride_1x1=None, + inplace_relu=True, + eps=1e-5, + bn_mmt=0.1, + norm_module=nn.BatchNorm3d, + block_idx=0, + ): + """ + Args: + dim_in (int): the channel dimensions of the input. + dim_out (int): the channel dimension of the output. + temp_kernel_size (int): the temporal kernel sizes of the first + convolution in the basic block. + stride (int): the stride of the bottleneck. + dim_inner (None): the inner dimension would not be used in + BasicTransform. + num_groups (int): number of groups for the convolution. Number of + group is always 1 for BasicTransform. + stride_1x1 (None): stride_1x1 will not be used in BasicTransform. + inplace_relu (bool): if True, calculate the relu on the original + input without allocating new memory. + eps (float): epsilon for batch norm. + bn_mmt (float): momentum for batch norm. Noted that BN momentum in + PyTorch = 1 - BN momentum in Caffe2. + norm_module (nn.Module): nn.Module for the normalization layer. The + default is nn.BatchNorm3d. + """ + super(BasicTransform, self).__init__() + self.temp_kernel_size = temp_kernel_size + self._inplace_relu = inplace_relu + self._eps = eps + self._bn_mmt = bn_mmt + self._construct(dim_in, dim_out, stride, norm_module) + + def _construct(self, dim_in, dim_out, stride, norm_module): + # Tx3x3, BN, ReLU. + self.a = nn.Conv3d( + dim_in, + dim_out, + kernel_size=[self.temp_kernel_size, 3, 3], + stride=[1, stride, stride], + padding=[int(self.temp_kernel_size // 2), 1, 1], + bias=False, + ) + self.a_bn = norm_module( + num_features=dim_out, eps=self._eps, momentum=self._bn_mmt + ) + self.a_relu = nn.ReLU(inplace=self._inplace_relu) + # 1x3x3, BN. + self.b = nn.Conv3d( + dim_out, + dim_out, + kernel_size=[1, 3, 3], + stride=[1, 1, 1], + padding=[0, 1, 1], + bias=False, + ) + self.b_bn = norm_module( + num_features=dim_out, eps=self._eps, momentum=self._bn_mmt + ) + + self.b_bn.transform_final_bn = True + + def forward(self, x): + x = self.a(x) + x = self.a_bn(x) + x = self.a_relu(x) + + x = self.b(x) + x = self.b_bn(x) + return x + + +class X3DTransform(nn.Module): + """ + X3D transformation: 1x1x1, Tx3x3 (channelwise, num_groups=dim_in), 1x1x1, + augmented with (optional) SE (squeeze-excitation) on the 3x3x3 output. + T is the temporal kernel size (defaulting to 3) + """ + + def __init__( + self, + dim_in, + dim_out, + temp_kernel_size, + stride, + dim_inner, + num_groups, + stride_1x1=False, + inplace_relu=True, + eps=1e-5, + bn_mmt=0.1, + dilation=1, + norm_module=nn.BatchNorm3d, + se_ratio=0.0625, + swish_inner=True, + block_idx=0, + ): + """ + Args: + dim_in (int): the channel dimensions of the input. + dim_out (int): the channel dimension of the output. + temp_kernel_size (int): the temporal kernel sizes of the middle + convolution in the bottleneck. + stride (int): the stride of the bottleneck. + dim_inner (int): the inner dimension of the block. + num_groups (int): number of groups for the convolution. num_groups=1 + is for standard ResNet like networks, and num_groups>1 is for + ResNeXt like networks. + stride_1x1 (bool): if True, apply stride to 1x1 conv, otherwise + apply stride to the 3x3 conv. + inplace_relu (bool): if True, calculate the relu on the original + input without allocating new memory. + eps (float): epsilon for batch norm. + bn_mmt (float): momentum for batch norm. Noted that BN momentum in + PyTorch = 1 - BN momentum in Caffe2. + dilation (int): size of dilation. + norm_module (nn.Module): nn.Module for the normalization layer. The + default is nn.BatchNorm3d. + se_ratio (float): if > 0, apply SE to the Tx3x3 conv, with the SE + channel dimensionality being se_ratio times the Tx3x3 conv dim. + swish_inner (bool): if True, apply swish to the Tx3x3 conv, otherwise + apply ReLU to the Tx3x3 conv. + """ + super(X3DTransform, self).__init__() + self.temp_kernel_size = temp_kernel_size + self._inplace_relu = inplace_relu + self._eps = eps + self._bn_mmt = bn_mmt + self._se_ratio = se_ratio + self._swish_inner = swish_inner + self._stride_1x1 = stride_1x1 + self._block_idx = block_idx + self._construct( + dim_in, + dim_out, + stride, + dim_inner, + num_groups, + dilation, + norm_module, + ) + + def _construct( + self, + dim_in, + dim_out, + stride, + dim_inner, + num_groups, + dilation, + norm_module, + ): + (str1x1, str3x3) = (stride, 1) if self._stride_1x1 else (1, stride) + + # 1x1x1, BN, ReLU. + self.a = nn.Conv3d( + dim_in, + dim_inner, + kernel_size=[1, 1, 1], + stride=[1, str1x1, str1x1], + padding=[0, 0, 0], + bias=False, + ) + self.a_bn = norm_module( + num_features=dim_inner, eps=self._eps, momentum=self._bn_mmt + ) + self.a_relu = nn.ReLU(inplace=self._inplace_relu) + + # Tx3x3, BN, ReLU. + self.b = nn.Conv3d( + dim_inner, + dim_inner, + [self.temp_kernel_size, 3, 3], + stride=[1, str3x3, str3x3], + padding=[int(self.temp_kernel_size // 2), dilation, dilation], + groups=num_groups, + bias=False, + dilation=[1, dilation, dilation], + ) + self.b_bn = norm_module( + num_features=dim_inner, eps=self._eps, momentum=self._bn_mmt + ) + + # Apply SE attention or not + use_se = True if (self._block_idx + 1) % 2 else False + if self._se_ratio > 0.0 and use_se: + self.se = SE(dim_inner, self._se_ratio) + + if self._swish_inner: + self.b_relu = Swish() + else: + self.b_relu = nn.ReLU(inplace=self._inplace_relu) + + # 1x1x1, BN. + self.c = nn.Conv3d( + dim_inner, + dim_out, + kernel_size=[1, 1, 1], + stride=[1, 1, 1], + padding=[0, 0, 0], + bias=False, + ) + self.c_bn = norm_module( + num_features=dim_out, eps=self._eps, momentum=self._bn_mmt + ) + self.c_bn.transform_final_bn = True + + def forward(self, x): + for block in self.children(): + x = block(x) + return x + +class BottleneckTransform(nn.Module): + """ + Bottleneck transformation: Tx1x1, 1x3x3, 1x1x1, where T is the size of + temporal kernel. + """ + + def __init__( + self, + dim_in, + dim_out, + temp_kernel_size, + stride, + dim_inner, + num_groups, + stride_1x1=False, + inplace_relu=True, + eps=1e-5, + bn_mmt=0.1, + dilation=1, + norm_module=nn.BatchNorm3d, + block_idx=0, + ): + """ + Args: + dim_in (int): the channel dimensions of the input. + dim_out (int): the channel dimension of the output. + temp_kernel_size (int): the temporal kernel sizes of the first + convolution in the bottleneck. + stride (int): the stride of the bottleneck. + dim_inner (int): the inner dimension of the block. + num_groups (int): number of groups for the convolution. num_groups=1 + is for standard ResNet like networks, and num_groups>1 is for + ResNeXt like networks. + stride_1x1 (bool): if True, apply stride to 1x1 conv, otherwise + apply stride to the 3x3 conv. + inplace_relu (bool): if True, calculate the relu on the original + input without allocating new memory. + eps (float): epsilon for batch norm. + bn_mmt (float): momentum for batch norm. Noted that BN momentum in + PyTorch = 1 - BN momentum in Caffe2. + dilation (int): size of dilation. + norm_module (nn.Module): nn.Module for the normalization layer. The + default is nn.BatchNorm3d. + """ + super(BottleneckTransform, self).__init__() + self.temp_kernel_size = temp_kernel_size + self._inplace_relu = inplace_relu + self._eps = eps + self._bn_mmt = bn_mmt + self._stride_1x1 = stride_1x1 + self._construct( + dim_in, + dim_out, + stride, + dim_inner, + num_groups, + dilation, + norm_module, + ) + + def _construct( + self, + dim_in, + dim_out, + stride, + dim_inner, + num_groups, + dilation, + norm_module, + ): + (str1x1, str3x3) = (stride, 1) if self._stride_1x1 else (1, stride) + + #print(str1x1, str3x3) + + # Tx1x1, BN, ReLU. + self.a = nn.Conv3d( + dim_in, + dim_inner, + kernel_size=[self.temp_kernel_size, 1, 1], + stride=[1, str1x1, str1x1], + padding=[int(self.temp_kernel_size // 2), 0, 0], + bias=False, + ) + self.a_bn = norm_module( + num_features=dim_inner, eps=self._eps, momentum=self._bn_mmt + ) + self.a_relu = nn.ReLU(inplace=self._inplace_relu) + + # 1x3x3, BN, ReLU. + self.b = nn.Conv3d( + dim_inner, + dim_inner, + [1, 3, 3], + stride=[1, str3x3, str3x3], + padding=[0, dilation, dilation], + groups=num_groups, + bias=False, + dilation=[1, dilation, dilation], + ) + self.b_bn = norm_module( + num_features=dim_inner, eps=self._eps, momentum=self._bn_mmt + ) + self.b_relu = nn.ReLU(inplace=self._inplace_relu) + + # 1x1x1, BN. + self.c = nn.Conv3d( + dim_inner, + dim_out, + kernel_size=[1, 1, 1], + stride=[1, 1, 1], + padding=[0, 0, 0], + bias=False, + ) + self.c_bn = norm_module( + num_features=dim_out, eps=self._eps, momentum=self._bn_mmt + ) + self.c_bn.transform_final_bn = True + + def forward(self, x): + # Explicitly forward every layer. + # Branch2a. + x = self.a(x) + x = self.a_bn(x) + x = self.a_relu(x) + + # Branch2b. + x = self.b(x) + x = self.b_bn(x) + x = self.b_relu(x) + + # Branch2c + x = self.c(x) + x = self.c_bn(x) + return x + + +class ResBlock(nn.Module): + """ + Residual block. + """ + + def __init__( + self, + dim_in, + dim_out, + temp_kernel_size, + stride, + trans_func, + dim_inner, + num_groups=1, + stride_1x1=False, + inplace_relu=True, + eps=1e-5, + bn_mmt=0.1, + dilation=1, + norm_module=nn.BatchNorm3d, + block_idx=0, + drop_connect_rate=0.0, + ): + """ + ResBlock class constructs redisual blocks. More details can be found in: + Kaiming He, Xiangyu Zhang, Shaoqing Ren, and Jian Sun. + "Deep residual learning for image recognition." + https://arxiv.org/abs/1512.03385 + Args: + dim_in (int): the channel dimensions of the input. + dim_out (int): the channel dimension of the output. + temp_kernel_size (int): the temporal kernel sizes of the middle + convolution in the bottleneck. + stride (int): the stride of the bottleneck. + trans_func (string): transform function to be used to construct the + bottleneck. + dim_inner (int): the inner dimension of the block. + num_groups (int): number of groups for the convolution. num_groups=1 + is for standard ResNet like networks, and num_groups>1 is for + ResNeXt like networks. + stride_1x1 (bool): if True, apply stride to 1x1 conv, otherwise + apply stride to the 3x3 conv. + inplace_relu (bool): calculate the relu on the original input + without allocating new memory. + eps (float): epsilon for batch norm. + bn_mmt (float): momentum for batch norm. Noted that BN momentum in + PyTorch = 1 - BN momentum in Caffe2. + dilation (int): size of dilation. + norm_module (nn.Module): nn.Module for the normalization layer. The + default is nn.BatchNorm3d. + drop_connect_rate (float): basic rate at which blocks are dropped, + linearly increases from input to output blocks. + """ + super(ResBlock, self).__init__() + self._inplace_relu = inplace_relu + self._eps = eps + self._bn_mmt = bn_mmt + self._drop_connect_rate = drop_connect_rate + self._construct( + dim_in, + dim_out, + temp_kernel_size, + stride, + trans_func, + dim_inner, + num_groups, + stride_1x1, + inplace_relu, + dilation, + norm_module, + block_idx, + ) + + def _construct( + self, + dim_in, + dim_out, + temp_kernel_size, + stride, + trans_func, + dim_inner, + num_groups, + stride_1x1, + inplace_relu, + dilation, + norm_module, + block_idx, + ): + # Use skip connection with projection if dim or res change. + if (dim_in != dim_out) or (stride != 1): + self.branch1 = nn.Conv3d( + dim_in, + dim_out, + kernel_size=1, + stride=[1, stride, stride], + padding=0, + bias=False, + dilation=1, + ) + self.branch1_bn = norm_module( + num_features=dim_out, eps=self._eps, momentum=self._bn_mmt + ) + self.branch2 = trans_func( + dim_in, + dim_out, + temp_kernel_size, + stride, + dim_inner, + num_groups, + stride_1x1=stride_1x1, + inplace_relu=inplace_relu, + dilation=dilation, + norm_module=norm_module, + block_idx=block_idx, + ) + self.relu = nn.ReLU(self._inplace_relu) + + def _drop_connect(self, x, drop_ratio): + """Apply dropconnect to x""" + keep_ratio = 1.0 - drop_ratio + mask = torch.empty( + [x.shape[0], 1, 1, 1, 1], dtype=x.dtype, device=x.device + ) + mask.bernoulli_(keep_ratio) + x.div_(keep_ratio) + x.mul_(mask) + return x + + def forward(self, x): + f_x = self.branch2(x) + if self.training and self._drop_connect_rate > 0.0: + f_x = self._drop_connect(f_x, self._drop_connect_rate) + if hasattr(self, "branch1"): + x = self.branch1_bn(self.branch1(x)) + f_x + else: + x = x + f_x + x = self.relu(x) + return x + + +class ResStage(nn.Module): + """ + Stage of 3D ResNet. It expects to have one or more tensors as input for + single pathway (C2D, I3D, Slow), and multi-pathway (SlowFast) cases. + More details can be found here: + + Christoph Feichtenhofer, Haoqi Fan, Jitendra Malik, and Kaiming He. + "SlowFast networks for video recognition." + https://arxiv.org/pdf/1812.03982.pdf + """ + + def __init__( + self, + dim_in, + dim_out, + stride, + temp_kernel_sizes, + num_blocks, + dim_inner, + num_groups, + num_block_temp_kernel, + nonlocal_inds, + nonlocal_group, + nonlocal_pool, + dilation, + instantiation="softmax", + trans_func_name="bottleneck_transform", + stride_1x1=False, + inplace_relu=True, + norm_module=nn.BatchNorm3d, + drop_connect_rate=0.0, + ): + """ + The `__init__` method of any subclass should also contain these arguments. + ResStage builds p streams, where p can be greater or equal to one. + Args: + dim_in (list): list of p the channel dimensions of the input. + Different channel dimensions control the input dimension of + different pathways. + dim_out (list): list of p the channel dimensions of the output. + Different channel dimensions control the input dimension of + different pathways. + temp_kernel_sizes (list): list of the p temporal kernel sizes of the + convolution in the bottleneck. Different temp_kernel_sizes + control different pathway. + stride (list): list of the p strides of the bottleneck. Different + stride control different pathway. + num_blocks (list): list of p numbers of blocks for each of the + pathway. + dim_inner (list): list of the p inner channel dimensions of the + input. Different channel dimensions control the input dimension + of different pathways. + num_groups (list): list of number of p groups for the convolution. + num_groups=1 is for standard ResNet like networks, and + num_groups>1 is for ResNeXt like networks. + num_block_temp_kernel (list): extent the temp_kernel_sizes to + num_block_temp_kernel blocks, then fill temporal kernel size + of 1 for the rest of the layers. + nonlocal_inds (list): If the tuple is empty, no nonlocal layer will + be added. If the tuple is not empty, add nonlocal layers after + the index-th block. + dilation (list): size of dilation for each pathway. + nonlocal_group (list): list of number of p nonlocal groups. Each + number controls how to fold temporal dimension to batch + dimension before applying nonlocal transformation. + https://github.com/facebookresearch/video-nonlocal-net. + instantiation (string): different instantiation for nonlocal layer. + Supports two different instantiation method: + "dot_product": normalizing correlation matrix with L2. + "softmax": normalizing correlation matrix with Softmax. + trans_func_name (string): name of the the transformation function apply + on the network. + norm_module (nn.Module): nn.Module for the normalization layer. The + default is nn.BatchNorm3d. + drop_connect_rate (float): basic rate at which blocks are dropped, + linearly increases from input to output blocks. + """ + super(ResStage, self).__init__() + assert all( + ( + num_block_temp_kernel[i] <= num_blocks[i] + for i in range(len(temp_kernel_sizes)) + ) + ) + self.num_blocks = num_blocks + self.nonlocal_group = nonlocal_group + self._drop_connect_rate = drop_connect_rate + self.temp_kernel_sizes = [ + (temp_kernel_sizes[i] * num_blocks[i])[: num_block_temp_kernel[i]] + + [1] * (num_blocks[i] - num_block_temp_kernel[i]) + for i in range(len(temp_kernel_sizes)) + ] + assert ( + len( + { + len(dim_in), + len(dim_out), + len(temp_kernel_sizes), + len(stride), + len(num_blocks), + len(dim_inner), + len(num_groups), + len(num_block_temp_kernel), + len(nonlocal_inds), + len(nonlocal_group), + } + ) + == 1 + ) + self.num_pathways = len(self.num_blocks) + self._construct( + dim_in, + dim_out, + stride, + dim_inner, + num_groups, + trans_func_name, + stride_1x1, + inplace_relu, + nonlocal_inds, + nonlocal_pool, + instantiation, + dilation, + norm_module, + ) + + def _construct( + self, + dim_in, + dim_out, + stride, + dim_inner, + num_groups, + trans_func_name, + stride_1x1, + inplace_relu, + nonlocal_inds, + nonlocal_pool, + instantiation, + dilation, + norm_module, + ): + for pathway in range(self.num_pathways): + for i in range(self.num_blocks[pathway]): + # Retrieve the transformation function. + trans_func = get_trans_func(trans_func_name) + # Construct the block. + res_block = ResBlock( + dim_in[pathway] if i == 0 else dim_out[pathway], + dim_out[pathway], + self.temp_kernel_sizes[pathway][i], + stride[pathway] if i == 0 else 1, + trans_func, + dim_inner[pathway], + num_groups[pathway], + stride_1x1=stride_1x1, + inplace_relu=inplace_relu, + dilation=dilation[pathway], + norm_module=norm_module, + block_idx=i, + drop_connect_rate=self._drop_connect_rate, + ) + self.add_module("pathway{}_res{}".format(pathway, i), res_block) + if i in nonlocal_inds[pathway]: + nln = Nonlocal( + dim_out[pathway], + dim_out[pathway] // 2, + nonlocal_pool[pathway], + instantiation=instantiation, + norm_module=norm_module, + ) + self.add_module( + "pathway{}_nonlocal{}".format(pathway, i), nln + ) + + def forward(self, inputs): + output = [] + for pathway in range(self.num_pathways): + x = inputs[pathway] + for i in range(self.num_blocks[pathway]): + m = getattr(self, "pathway{}_res{}".format(pathway, i)) + x = m(x) + if hasattr(self, "pathway{}_nonlocal{}".format(pathway, i)): + nln = getattr( + self, "pathway{}_nonlocal{}".format(pathway, i) + ) + b, c, t, h, w = x.shape + if self.nonlocal_group[pathway] > 1: + # Fold temporal dimension into batch dimension. + x = x.permute(0, 2, 1, 3, 4) + x = x.reshape( + b * self.nonlocal_group[pathway], + t // self.nonlocal_group[pathway], + c, + h, + w, + ) + x = x.permute(0, 2, 1, 3, 4) + x = nln(x) + if self.nonlocal_group[pathway] > 1: + # Fold back to temporal dimension. + x = x.permute(0, 2, 1, 3, 4) + x = x.reshape(b, t, c, h, w) + x = x.permute(0, 2, 1, 3, 4) + output.append(x) + + return output diff --git a/TimeSformer/timesformer/models/stem_helper.py b/TimeSformer/timesformer/models/stem_helper.py new file mode 100755 index 0000000000000000000000000000000000000000..691d9463286f86e5ea6185bf96711b18340bf7fd --- /dev/null +++ b/TimeSformer/timesformer/models/stem_helper.py @@ -0,0 +1,279 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""ResNe(X)t 3D stem helper.""" + +import torch.nn as nn + + +def get_stem_func(name): + """ + Retrieves the stem module by name. + """ + trans_funcs = {"x3d_stem": X3DStem, "basic_stem": ResNetBasicStem} + assert ( + name in trans_funcs.keys() + ), "Transformation function '{}' not supported".format(name) + return trans_funcs[name] + + +class VideoModelStem(nn.Module): + """ + Video 3D stem module. Provides stem operations of Conv, BN, ReLU, MaxPool + on input data tensor for one or multiple pathways. + """ + + def __init__( + self, + dim_in, + dim_out, + kernel, + stride, + padding, + inplace_relu=True, + eps=1e-5, + bn_mmt=0.1, + norm_module=nn.BatchNorm3d, + stem_func_name="basic_stem", + ): + """ + The `__init__` method of any subclass should also contain these + arguments. List size of 1 for single pathway models (C2D, I3D, Slow + and etc), list size of 2 for two pathway models (SlowFast). + + Args: + dim_in (list): the list of channel dimensions of the inputs. + dim_out (list): the output dimension of the convolution in the stem + layer. + kernel (list): the kernels' size of the convolutions in the stem + layers. Temporal kernel size, height kernel size, width kernel + size in order. + stride (list): the stride sizes of the convolutions in the stem + layer. Temporal kernel stride, height kernel size, width kernel + size in order. + padding (list): the paddings' sizes of the convolutions in the stem + layer. Temporal padding size, height padding size, width padding + size in order. + inplace_relu (bool): calculate the relu on the original input + without allocating new memory. + eps (float): epsilon for batch norm. + bn_mmt (float): momentum for batch norm. Noted that BN momentum in + PyTorch = 1 - BN momentum in Caffe2. + norm_module (nn.Module): nn.Module for the normalization layer. The + default is nn.BatchNorm3d. + stem_func_name (string): name of the the stem function applied on + input to the network. + """ + super(VideoModelStem, self).__init__() + + assert ( + len( + { + len(dim_in), + len(dim_out), + len(kernel), + len(stride), + len(padding), + } + ) + == 1 + ), "Input pathway dimensions are not consistent." + self.num_pathways = len(dim_in) + self.kernel = kernel + self.stride = stride + self.padding = padding + self.inplace_relu = inplace_relu + self.eps = eps + self.bn_mmt = bn_mmt + # Construct the stem layer. + self._construct_stem(dim_in, dim_out, norm_module, stem_func_name) + + def _construct_stem(self, dim_in, dim_out, norm_module, stem_func_name): + trans_func = get_stem_func(stem_func_name) + + for pathway in range(len(dim_in)): + stem = trans_func( + dim_in[pathway], + dim_out[pathway], + self.kernel[pathway], + self.stride[pathway], + self.padding[pathway], + self.inplace_relu, + self.eps, + self.bn_mmt, + norm_module, + ) + self.add_module("pathway{}_stem".format(pathway), stem) + + def forward(self, x): + assert ( + len(x) == self.num_pathways + ), "Input tensor does not contain {} pathway".format(self.num_pathways) + for pathway in range(len(x)): + m = getattr(self, "pathway{}_stem".format(pathway)) + x[pathway] = m(x[pathway]) + return x + + +class ResNetBasicStem(nn.Module): + """ + ResNe(X)t 3D stem module. + Performs spatiotemporal Convolution, BN, and Relu following by a + spatiotemporal pooling. + """ + + def __init__( + self, + dim_in, + dim_out, + kernel, + stride, + padding, + inplace_relu=True, + eps=1e-5, + bn_mmt=0.1, + norm_module=nn.BatchNorm3d, + ): + """ + The `__init__` method of any subclass should also contain these arguments. + + Args: + dim_in (int): the channel dimension of the input. Normally 3 is used + for rgb input, and 2 or 3 is used for optical flow input. + dim_out (int): the output dimension of the convolution in the stem + layer. + kernel (list): the kernel size of the convolution in the stem layer. + temporal kernel size, height kernel size, width kernel size in + order. + stride (list): the stride size of the convolution in the stem layer. + temporal kernel stride, height kernel size, width kernel size in + order. + padding (int): the padding size of the convolution in the stem + layer, temporal padding size, height padding size, width + padding size in order. + inplace_relu (bool): calculate the relu on the original input + without allocating new memory. + eps (float): epsilon for batch norm. + bn_mmt (float): momentum for batch norm. Noted that BN momentum in + PyTorch = 1 - BN momentum in Caffe2. + norm_module (nn.Module): nn.Module for the normalization layer. The + default is nn.BatchNorm3d. + """ + super(ResNetBasicStem, self).__init__() + self.kernel = kernel + self.stride = stride + self.padding = padding + self.inplace_relu = inplace_relu + self.eps = eps + self.bn_mmt = bn_mmt + # Construct the stem layer. + self._construct_stem(dim_in, dim_out, norm_module) + + def _construct_stem(self, dim_in, dim_out, norm_module): + self.conv = nn.Conv3d( + dim_in, + dim_out, + self.kernel, + stride=self.stride, + padding=self.padding, + bias=False, + ) + self.bn = norm_module( + num_features=dim_out, eps=self.eps, momentum=self.bn_mmt + ) + self.relu = nn.ReLU(self.inplace_relu) + self.pool_layer = nn.MaxPool3d( + kernel_size=[1, 3, 3], stride=[1, 2, 2], padding=[0, 1, 1] + ) + + def forward(self, x): + x = self.conv(x) + x = self.bn(x) + x = self.relu(x) + x = self.pool_layer(x) + return x + + +class X3DStem(nn.Module): + """ + X3D's 3D stem module. + Performs a spatial followed by a depthwise temporal Convolution, BN, and Relu following by a + spatiotemporal pooling. + """ + + def __init__( + self, + dim_in, + dim_out, + kernel, + stride, + padding, + inplace_relu=True, + eps=1e-5, + bn_mmt=0.1, + norm_module=nn.BatchNorm3d, + ): + """ + The `__init__` method of any subclass should also contain these arguments. + + Args: + dim_in (int): the channel dimension of the input. Normally 3 is used + for rgb input, and 2 or 3 is used for optical flow input. + dim_out (int): the output dimension of the convolution in the stem + layer. + kernel (list): the kernel size of the convolution in the stem layer. + temporal kernel size, height kernel size, width kernel size in + order. + stride (list): the stride size of the convolution in the stem layer. + temporal kernel stride, height kernel size, width kernel size in + order. + padding (int): the padding size of the convolution in the stem + layer, temporal padding size, height padding size, width + padding size in order. + inplace_relu (bool): calculate the relu on the original input + without allocating new memory. + eps (float): epsilon for batch norm. + bn_mmt (float): momentum for batch norm. Noted that BN momentum in + PyTorch = 1 - BN momentum in Caffe2. + norm_module (nn.Module): nn.Module for the normalization layer. The + default is nn.BatchNorm3d. + """ + super(X3DStem, self).__init__() + self.kernel = kernel + self.stride = stride + self.padding = padding + self.inplace_relu = inplace_relu + self.eps = eps + self.bn_mmt = bn_mmt + # Construct the stem layer. + self._construct_stem(dim_in, dim_out, norm_module) + + def _construct_stem(self, dim_in, dim_out, norm_module): + self.conv_xy = nn.Conv3d( + dim_in, + dim_out, + kernel_size=(1, self.kernel[1], self.kernel[2]), + stride=(1, self.stride[1], self.stride[2]), + padding=(0, self.padding[1], self.padding[2]), + bias=False, + ) + self.conv = nn.Conv3d( + dim_out, + dim_out, + kernel_size=(self.kernel[0], 1, 1), + stride=(self.stride[0], 1, 1), + padding=(self.padding[0], 0, 0), + bias=False, + groups=dim_out, + ) + + self.bn = norm_module( + num_features=dim_out, eps=self.eps, momentum=self.bn_mmt + ) + self.relu = nn.ReLU(self.inplace_relu) + + def forward(self, x): + x = self.conv_xy(x) + x = self.conv(x) + x = self.bn(x) + x = self.relu(x) + return x diff --git a/TimeSformer/timesformer/models/video_model_builder.py b/TimeSformer/timesformer/models/video_model_builder.py new file mode 100755 index 0000000000000000000000000000000000000000..3edb7e7fce69337b73f798f2d8c8d16f75296f61 --- /dev/null +++ b/TimeSformer/timesformer/models/video_model_builder.py @@ -0,0 +1,780 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Video models.""" + +import math +import torch +import torch.nn as nn + +import timesformer.utils.weight_init_helper as init_helper +from timesformer.models.batchnorm_helper import get_norm + +from . import head_helper, resnet_helper, stem_helper +from .build import MODEL_REGISTRY + +import math +from torch.nn import ReplicationPad3d +from torch import einsum +from einops import rearrange, reduce, repeat +import copy + + +import numpy as np +from timesformer.models.vit import vit_base_patch16_224 + +# Number of blocks for different stages given the model depth. +_MODEL_STAGE_DEPTH = {50: (3, 4, 6, 3), 101: (3, 4, 23, 3)} + +# Basis of temporal kernel sizes for each of the stage. +_TEMPORAL_KERNEL_BASIS = { + "c2d": [ + [[1]], # conv1 temporal kernel. + [[1]], # res2 temporal kernel. + [[1]], # res3 temporal kernel. + [[1]], # res4 temporal kernel. + [[1]], # res5 temporal kernel. + ], + "c2d_nopool": [ + [[1]], # conv1 temporal kernel. + [[1]], # res2 temporal kernel. + [[1]], # res3 temporal kernel. + [[1]], # res4 temporal kernel. + [[1]], # res5 temporal kernel. + ], + "i3d": [ + [[5]], # conv1 temporal kernel. + [[3]], # res2 temporal kernel. + [[3, 1]], # res3 temporal kernel. + [[3, 1]], # res4 temporal kernel. + [[1, 3]], # res5 temporal kernel. + ], + "i3d_nopool": [ + [[5]], # conv1 temporal kernel. + [[3]], # res2 temporal kernel. + [[3, 1]], # res3 temporal kernel. + [[3, 1]], # res4 temporal kernel. + [[1, 3]], # res5 temporal kernel. + ], + "slow": [ + [[1]], # conv1 temporal kernel. + [[1]], # res2 temporal kernel. + [[1]], # res3 temporal kernel. + [[3]], # res4 temporal kernel. + [[3]], # res5 temporal kernel. + ], + "slowfast": [ + [[1], [5]], # conv1 temporal kernel for slow and fast pathway. + [[1], [3]], # res2 temporal kernel for slow and fast pathway. + [[1], [3]], # res3 temporal kernel for slow and fast pathway. + [[3], [3]], # res4 temporal kernel for slow and fast pathway. + [[3], [3]], # res5 temporal kernel for slow and fast pathway. + ], + "x3d": [ + [[5]], # conv1 temporal kernels. + [[3]], # res2 temporal kernels. + [[3]], # res3 temporal kernels. + [[3]], # res4 temporal kernels. + [[3]], # res5 temporal kernels. + ], +} + +_POOL1 = { + "c2d": [[2, 1, 1]], + "c2d_nopool": [[1, 1, 1]], + "i3d": [[2, 1, 1]], + "i3d_nopool": [[1, 1, 1]], + "slow": [[1, 1, 1]], + "slowfast": [[1, 1, 1], [1, 1, 1]], + "x3d": [[1, 1, 1]], +} + + +class FuseFastToSlow(nn.Module): + """ + Fuses the information from the Fast pathway to the Slow pathway. Given the + tensors from Slow pathway and Fast pathway, fuse information from Fast to + Slow, then return the fused tensors from Slow and Fast pathway in order. + """ + + def __init__( + self, + dim_in, + fusion_conv_channel_ratio, + fusion_kernel, + alpha, + eps=1e-5, + bn_mmt=0.1, + inplace_relu=True, + norm_module=nn.BatchNorm3d, + ): + """ + Args: + dim_in (int): the channel dimension of the input. + fusion_conv_channel_ratio (int): channel ratio for the convolution + used to fuse from Fast pathway to Slow pathway. + fusion_kernel (int): kernel size of the convolution used to fuse + from Fast pathway to Slow pathway. + alpha (int): the frame rate ratio between the Fast and Slow pathway. + eps (float): epsilon for batch norm. + bn_mmt (float): momentum for batch norm. Noted that BN momentum in + PyTorch = 1 - BN momentum in Caffe2. + inplace_relu (bool): if True, calculate the relu on the original + input without allocating new memory. + norm_module (nn.Module): nn.Module for the normalization layer. The + default is nn.BatchNorm3d. + """ + super(FuseFastToSlow, self).__init__() + self.conv_f2s = nn.Conv3d( + dim_in, + dim_in * fusion_conv_channel_ratio, + kernel_size=[fusion_kernel, 1, 1], + stride=[alpha, 1, 1], + padding=[fusion_kernel // 2, 0, 0], + bias=False, + ) + self.bn = norm_module( + num_features=dim_in * fusion_conv_channel_ratio, + eps=eps, + momentum=bn_mmt, + ) + self.relu = nn.ReLU(inplace_relu) + + def forward(self, x): + x_s = x[0] + x_f = x[1] + fuse = self.conv_f2s(x_f) + fuse = self.bn(fuse) + fuse = self.relu(fuse) + x_s_fuse = torch.cat([x_s, fuse], 1) + return [x_s_fuse, x_f] + + +@MODEL_REGISTRY.register() +class SlowFast(nn.Module): + """ + SlowFast model builder for SlowFast network. + + Christoph Feichtenhofer, Haoqi Fan, Jitendra Malik, and Kaiming He. + "SlowFast networks for video recognition." + https://arxiv.org/pdf/1812.03982.pdf + """ + + def __init__(self, cfg): + """ + The `__init__` method of any subclass should also contain these + arguments. + Args: + cfg (CfgNode): model building configs, details are in the + comments of the config file. + """ + super(SlowFast, self).__init__() + self.norm_module = get_norm(cfg) + self.enable_detection = cfg.DETECTION.ENABLE + self.num_pathways = 2 + + self._construct_network(cfg) + init_helper.init_weights( + self, cfg.MODEL.FC_INIT_STD, cfg.RESNET.ZERO_INIT_FINAL_BN + ) + + + def _construct_network(self, cfg): + """ + Builds a SlowFast model. The first pathway is the Slow pathway and the + second pathway is the Fast pathway. + Args: + cfg (CfgNode): model building configs, details are in the + comments of the config file. + """ + assert cfg.MODEL.ARCH in _POOL1.keys() + pool_size = _POOL1[cfg.MODEL.ARCH] + assert len({len(pool_size), self.num_pathways}) == 1 + assert cfg.RESNET.DEPTH in _MODEL_STAGE_DEPTH.keys() + + (d2, d3, d4, d5) = _MODEL_STAGE_DEPTH[cfg.RESNET.DEPTH] + + num_groups = cfg.RESNET.NUM_GROUPS + width_per_group = cfg.RESNET.WIDTH_PER_GROUP + dim_inner = num_groups * width_per_group + out_dim_ratio = ( + cfg.SLOWFAST.BETA_INV // cfg.SLOWFAST.FUSION_CONV_CHANNEL_RATIO + ) + + temp_kernel = _TEMPORAL_KERNEL_BASIS[cfg.MODEL.ARCH] + + self.s1 = stem_helper.VideoModelStem( + dim_in=cfg.DATA.INPUT_CHANNEL_NUM, + dim_out=[width_per_group, width_per_group // cfg.SLOWFAST.BETA_INV], + kernel=[temp_kernel[0][0] + [7, 7], temp_kernel[0][1] + [7, 7]], + stride=[[1, 2, 2]] * 2, + padding=[ + [temp_kernel[0][0][0] // 2, 3, 3], + [temp_kernel[0][1][0] // 2, 3, 3], + ], + norm_module=self.norm_module, + ) + self.s1_fuse = FuseFastToSlow( + width_per_group // cfg.SLOWFAST.BETA_INV, + cfg.SLOWFAST.FUSION_CONV_CHANNEL_RATIO, + cfg.SLOWFAST.FUSION_KERNEL_SZ, + cfg.SLOWFAST.ALPHA, + norm_module=self.norm_module, + ) + + self.s2 = resnet_helper.ResStage( + dim_in=[ + width_per_group + width_per_group // out_dim_ratio, + width_per_group // cfg.SLOWFAST.BETA_INV, + ], + dim_out=[ + width_per_group * 4, + width_per_group * 4 // cfg.SLOWFAST.BETA_INV, + ], + dim_inner=[dim_inner, dim_inner // cfg.SLOWFAST.BETA_INV], + temp_kernel_sizes=temp_kernel[1], + stride=cfg.RESNET.SPATIAL_STRIDES[0], + num_blocks=[d2] * 2, + num_groups=[num_groups] * 2, + num_block_temp_kernel=cfg.RESNET.NUM_BLOCK_TEMP_KERNEL[0], + nonlocal_inds=cfg.NONLOCAL.LOCATION[0], + nonlocal_group=cfg.NONLOCAL.GROUP[0], + nonlocal_pool=cfg.NONLOCAL.POOL[0], + instantiation=cfg.NONLOCAL.INSTANTIATION, + trans_func_name=cfg.RESNET.TRANS_FUNC, + dilation=cfg.RESNET.SPATIAL_DILATIONS[0], + norm_module=self.norm_module, + ) + self.s2_fuse = FuseFastToSlow( + width_per_group * 4 // cfg.SLOWFAST.BETA_INV, + cfg.SLOWFAST.FUSION_CONV_CHANNEL_RATIO, + cfg.SLOWFAST.FUSION_KERNEL_SZ, + cfg.SLOWFAST.ALPHA, + norm_module=self.norm_module, + ) + + for pathway in range(self.num_pathways): + pool = nn.MaxPool3d( + kernel_size=pool_size[pathway], + stride=pool_size[pathway], + padding=[0, 0, 0], + ) + self.add_module("pathway{}_pool".format(pathway), pool) + + self.s3 = resnet_helper.ResStage( + dim_in=[ + width_per_group * 4 + width_per_group * 4 // out_dim_ratio, + width_per_group * 4 // cfg.SLOWFAST.BETA_INV, + ], + dim_out=[ + width_per_group * 8, + width_per_group * 8 // cfg.SLOWFAST.BETA_INV, + ], + dim_inner=[dim_inner * 2, dim_inner * 2 // cfg.SLOWFAST.BETA_INV], + temp_kernel_sizes=temp_kernel[2], + stride=cfg.RESNET.SPATIAL_STRIDES[1], + num_blocks=[d3] * 2, + num_groups=[num_groups] * 2, + num_block_temp_kernel=cfg.RESNET.NUM_BLOCK_TEMP_KERNEL[1], + nonlocal_inds=cfg.NONLOCAL.LOCATION[1], + nonlocal_group=cfg.NONLOCAL.GROUP[1], + nonlocal_pool=cfg.NONLOCAL.POOL[1], + instantiation=cfg.NONLOCAL.INSTANTIATION, + trans_func_name=cfg.RESNET.TRANS_FUNC, + dilation=cfg.RESNET.SPATIAL_DILATIONS[1], + norm_module=self.norm_module, + ) + self.s3_fuse = FuseFastToSlow( + width_per_group * 8 // cfg.SLOWFAST.BETA_INV, + cfg.SLOWFAST.FUSION_CONV_CHANNEL_RATIO, + cfg.SLOWFAST.FUSION_KERNEL_SZ, + cfg.SLOWFAST.ALPHA, + norm_module=self.norm_module, + ) + + self.s4 = resnet_helper.ResStage( + dim_in=[ + width_per_group * 8 + width_per_group * 8 // out_dim_ratio, + width_per_group * 8 // cfg.SLOWFAST.BETA_INV, + ], + dim_out=[ + width_per_group * 16, + width_per_group * 16 // cfg.SLOWFAST.BETA_INV, + ], + dim_inner=[dim_inner * 4, dim_inner * 4 // cfg.SLOWFAST.BETA_INV], + temp_kernel_sizes=temp_kernel[3], + stride=cfg.RESNET.SPATIAL_STRIDES[2], + num_blocks=[d4] * 2, + num_groups=[num_groups] * 2, + num_block_temp_kernel=cfg.RESNET.NUM_BLOCK_TEMP_KERNEL[2], + nonlocal_inds=cfg.NONLOCAL.LOCATION[2], + nonlocal_group=cfg.NONLOCAL.GROUP[2], + nonlocal_pool=cfg.NONLOCAL.POOL[2], + instantiation=cfg.NONLOCAL.INSTANTIATION, + trans_func_name=cfg.RESNET.TRANS_FUNC, + dilation=cfg.RESNET.SPATIAL_DILATIONS[2], + norm_module=self.norm_module, + ) + self.s4_fuse = FuseFastToSlow( + width_per_group * 16 // cfg.SLOWFAST.BETA_INV, + cfg.SLOWFAST.FUSION_CONV_CHANNEL_RATIO, + cfg.SLOWFAST.FUSION_KERNEL_SZ, + cfg.SLOWFAST.ALPHA, + norm_module=self.norm_module, + ) + + self.s5 = resnet_helper.ResStage( + dim_in=[ + width_per_group * 16 + width_per_group * 16 // out_dim_ratio, + width_per_group * 16 // cfg.SLOWFAST.BETA_INV, + ], + dim_out=[ + width_per_group * 32, + width_per_group * 32 // cfg.SLOWFAST.BETA_INV, + ], + dim_inner=[dim_inner * 8, dim_inner * 8 // cfg.SLOWFAST.BETA_INV], + temp_kernel_sizes=temp_kernel[4], + stride=cfg.RESNET.SPATIAL_STRIDES[3], + num_blocks=[d5] * 2, + num_groups=[num_groups] * 2, + num_block_temp_kernel=cfg.RESNET.NUM_BLOCK_TEMP_KERNEL[3], + nonlocal_inds=cfg.NONLOCAL.LOCATION[3], + nonlocal_group=cfg.NONLOCAL.GROUP[3], + nonlocal_pool=cfg.NONLOCAL.POOL[3], + instantiation=cfg.NONLOCAL.INSTANTIATION, + trans_func_name=cfg.RESNET.TRANS_FUNC, + dilation=cfg.RESNET.SPATIAL_DILATIONS[3], + norm_module=self.norm_module, + ) + + if cfg.DETECTION.ENABLE: + self.head = head_helper.ResNetRoIHead( + dim_in=[ + width_per_group * 32, + width_per_group * 32 // cfg.SLOWFAST.BETA_INV, + ], + num_classes=cfg.MODEL.NUM_CLASSES, + pool_size=[ + [ + cfg.DATA.NUM_FRAMES + // cfg.SLOWFAST.ALPHA + // pool_size[0][0], + 1, + 1, + ], + [cfg.DATA.NUM_FRAMES // pool_size[1][0], 1, 1], + ], + resolution=[[cfg.DETECTION.ROI_XFORM_RESOLUTION] * 2] * 2, + scale_factor=[cfg.DETECTION.SPATIAL_SCALE_FACTOR] * 2, + dropout_rate=cfg.MODEL.DROPOUT_RATE, + act_func=cfg.MODEL.HEAD_ACT, + aligned=cfg.DETECTION.ALIGNED, + ) + else: + head = head_helper.ResNetBasicHead( + dim_in=[ + width_per_group * 32, + width_per_group * 32 // cfg.SLOWFAST.BETA_INV, + ], + num_classes=cfg.MODEL.NUM_CLASSES, + pool_size=[None, None] + if cfg.MULTIGRID.SHORT_CYCLE + else [ + [ + cfg.DATA.NUM_FRAMES + // cfg.SLOWFAST.ALPHA + // pool_size[0][0], + cfg.DATA.TRAIN_CROP_SIZE // 32 // pool_size[0][1], + cfg.DATA.TRAIN_CROP_SIZE // 32 // pool_size[0][2], + ], + [ + cfg.DATA.NUM_FRAMES // pool_size[1][0], + cfg.DATA.TRAIN_CROP_SIZE // 32 // pool_size[1][1], + cfg.DATA.TRAIN_CROP_SIZE // 32 // pool_size[1][2], + ], + ], # None for AdaptiveAvgPool3d((1, 1, 1)) + dropout_rate=cfg.MODEL.DROPOUT_RATE, + act_func=cfg.MODEL.HEAD_ACT, + ) + self.head_name = "head{}".format(cfg.TASK) + self.add_module(self.head_name, head) + + def forward(self, x, bboxes=None): + x = self.s1(x) + x = self.s1_fuse(x) + x = self.s2(x) + x = self.s2_fuse(x) + for pathway in range(self.num_pathways): + pool = getattr(self, "pathway{}_pool".format(pathway)) + x[pathway] = pool(x[pathway]) + x = self.s3(x) + x = self.s3_fuse(x) + x = self.s4(x) + x = self.s4_fuse(x) + x = self.s5(x) + + head = getattr(self, self.head_name) + if self.enable_detection: + x = head(x, bboxes) + else: + x = head(x) + + return x + + +@MODEL_REGISTRY.register() +class ResNet(nn.Module): + """ + ResNet model builder. It builds a ResNet like network backbone without + lateral connection (C2D, I3D, Slow). + + Christoph Feichtenhofer, Haoqi Fan, Jitendra Malik, and Kaiming He. + "SlowFast networks for video recognition." + https://arxiv.org/pdf/1812.03982.pdf + + Xiaolong Wang, Ross Girshick, Abhinav Gupta, and Kaiming He. + "Non-local neural networks." + https://arxiv.org/pdf/1711.07971.pdf + """ + + def __init__(self, cfg): + """ + The `__init__` method of any subclass should also contain these + arguments. + + Args: + cfg (CfgNode): model building configs, details are in the + comments of the config file. + """ + super(ResNet, self).__init__() + self.norm_module = get_norm(cfg) + self.enable_detection = cfg.DETECTION.ENABLE + self.num_pathways = 1 + self._construct_network(cfg) + init_helper.init_weights( + self, cfg.MODEL.FC_INIT_STD, cfg.RESNET.ZERO_INIT_FINAL_BN + ) + + def _construct_network(self, cfg): + """ + Builds a single pathway ResNet model. + + Args: + cfg (CfgNode): model building configs, details are in the + comments of the config file. + """ + assert cfg.MODEL.ARCH in _POOL1.keys() + pool_size = _POOL1[cfg.MODEL.ARCH] + assert len({len(pool_size), self.num_pathways}) == 1 + assert cfg.RESNET.DEPTH in _MODEL_STAGE_DEPTH.keys() + + (d2, d3, d4, d5) = _MODEL_STAGE_DEPTH[cfg.RESNET.DEPTH] + + num_groups = cfg.RESNET.NUM_GROUPS + width_per_group = cfg.RESNET.WIDTH_PER_GROUP + dim_inner = num_groups * width_per_group + + temp_kernel = _TEMPORAL_KERNEL_BASIS[cfg.MODEL.ARCH] + + self.s1 = stem_helper.VideoModelStem( + dim_in=cfg.DATA.INPUT_CHANNEL_NUM, + dim_out=[width_per_group], + kernel=[temp_kernel[0][0] + [7, 7]], + stride=[[1, 2, 2]], + padding=[[temp_kernel[0][0][0] // 2, 3, 3]], + norm_module=self.norm_module, + ) + + self.s2 = resnet_helper.ResStage( + dim_in=[width_per_group], + dim_out=[width_per_group * 4], + dim_inner=[dim_inner], + temp_kernel_sizes=temp_kernel[1], + stride=cfg.RESNET.SPATIAL_STRIDES[0], + num_blocks=[d2], + num_groups=[num_groups], + num_block_temp_kernel=cfg.RESNET.NUM_BLOCK_TEMP_KERNEL[0], + nonlocal_inds=cfg.NONLOCAL.LOCATION[0], + nonlocal_group=cfg.NONLOCAL.GROUP[0], + nonlocal_pool=cfg.NONLOCAL.POOL[0], + instantiation=cfg.NONLOCAL.INSTANTIATION, + trans_func_name=cfg.RESNET.TRANS_FUNC, + stride_1x1=cfg.RESNET.STRIDE_1X1, + inplace_relu=cfg.RESNET.INPLACE_RELU, + dilation=cfg.RESNET.SPATIAL_DILATIONS[0], + norm_module=self.norm_module, + ) + + for pathway in range(self.num_pathways): + pool = nn.MaxPool3d( + kernel_size=pool_size[pathway], + stride=pool_size[pathway], + padding=[0, 0, 0], + ) + self.add_module("pathway{}_pool".format(pathway), pool) + + self.s3 = resnet_helper.ResStage( + dim_in=[width_per_group * 4], + dim_out=[width_per_group * 8], + dim_inner=[dim_inner * 2], + temp_kernel_sizes=temp_kernel[2], + stride=cfg.RESNET.SPATIAL_STRIDES[1], + num_blocks=[d3], + num_groups=[num_groups], + num_block_temp_kernel=cfg.RESNET.NUM_BLOCK_TEMP_KERNEL[1], + nonlocal_inds=cfg.NONLOCAL.LOCATION[1], + nonlocal_group=cfg.NONLOCAL.GROUP[1], + nonlocal_pool=cfg.NONLOCAL.POOL[1], + instantiation=cfg.NONLOCAL.INSTANTIATION, + trans_func_name=cfg.RESNET.TRANS_FUNC, + stride_1x1=cfg.RESNET.STRIDE_1X1, + inplace_relu=cfg.RESNET.INPLACE_RELU, + dilation=cfg.RESNET.SPATIAL_DILATIONS[1], + norm_module=self.norm_module, + ) + + self.s4 = resnet_helper.ResStage( + dim_in=[width_per_group * 8], + dim_out=[width_per_group * 16], + dim_inner=[dim_inner * 4], + temp_kernel_sizes=temp_kernel[3], + stride=cfg.RESNET.SPATIAL_STRIDES[2], + num_blocks=[d4], + num_groups=[num_groups], + num_block_temp_kernel=cfg.RESNET.NUM_BLOCK_TEMP_KERNEL[2], + nonlocal_inds=cfg.NONLOCAL.LOCATION[2], + nonlocal_group=cfg.NONLOCAL.GROUP[2], + nonlocal_pool=cfg.NONLOCAL.POOL[2], + instantiation=cfg.NONLOCAL.INSTANTIATION, + trans_func_name=cfg.RESNET.TRANS_FUNC, + stride_1x1=cfg.RESNET.STRIDE_1X1, + inplace_relu=cfg.RESNET.INPLACE_RELU, + dilation=cfg.RESNET.SPATIAL_DILATIONS[2], + norm_module=self.norm_module, + ) + + self.s5 = resnet_helper.ResStage( + dim_in=[width_per_group * 16], + dim_out=[width_per_group * 32], + dim_inner=[dim_inner * 8], + temp_kernel_sizes=temp_kernel[4], + stride=cfg.RESNET.SPATIAL_STRIDES[3], + num_blocks=[d5], + num_groups=[num_groups], + num_block_temp_kernel=cfg.RESNET.NUM_BLOCK_TEMP_KERNEL[3], + nonlocal_inds=cfg.NONLOCAL.LOCATION[3], + nonlocal_group=cfg.NONLOCAL.GROUP[3], + nonlocal_pool=cfg.NONLOCAL.POOL[3], + instantiation=cfg.NONLOCAL.INSTANTIATION, + trans_func_name=cfg.RESNET.TRANS_FUNC, + stride_1x1=cfg.RESNET.STRIDE_1X1, + inplace_relu=cfg.RESNET.INPLACE_RELU, + dilation=cfg.RESNET.SPATIAL_DILATIONS[3], + norm_module=self.norm_module, + ) + + if self.enable_detection: + self.head = head_helper.ResNetRoIHead( + dim_in=[width_per_group * 32], + num_classes=cfg.MODEL.NUM_CLASSES, + pool_size=[[cfg.DATA.NUM_FRAMES // pool_size[0][0], 1, 1]], + resolution=[[cfg.DETECTION.ROI_XFORM_RESOLUTION] * 2], + scale_factor=[cfg.DETECTION.SPATIAL_SCALE_FACTOR], + dropout_rate=cfg.MODEL.DROPOUT_RATE, + act_func=cfg.MODEL.HEAD_ACT, + aligned=cfg.DETECTION.ALIGNED, + ) + else: + head = head_helper.ResNetBasicHead( + dim_in=[width_per_group * 32], + num_classes=cfg.MODEL.NUM_CLASSES, + pool_size=[None, None] + if cfg.MULTIGRID.SHORT_CYCLE + else [ + [ + cfg.DATA.NUM_FRAMES // pool_size[0][0], + cfg.DATA.TRAIN_CROP_SIZE // 32 // pool_size[0][1], + cfg.DATA.TRAIN_CROP_SIZE // 32 // pool_size[0][2], + ] + ], # None for AdaptiveAvgPool3d((1, 1, 1)) + dropout_rate=cfg.MODEL.DROPOUT_RATE, + act_func=cfg.MODEL.HEAD_ACT, + ) + self.head_name = "head{}".format(cfg.TASK) + self.add_module(self.head_name, head) + + def forward(self, x, bboxes=None): + x = self.s1(x) + x = self.s2(x) + for pathway in range(self.num_pathways): + pool = getattr(self, "pathway{}_pool".format(pathway)) + x[pathway] = pool(x[pathway]) + x = self.s3(x) + x = self.s4(x) + x = self.s5(x) + + head = getattr(self, self.head_name) + if self.enable_detection: + x = head(x, bboxes) + else: + x = head(x) + return x + + +@MODEL_REGISTRY.register() +class X3D(nn.Module): + """ + X3D model builder. It builds a X3D network backbone, which is a ResNet. + + Christoph Feichtenhofer. + "X3D: Expanding Architectures for Efficient Video Recognition." + https://arxiv.org/abs/2004.04730 + """ + + def __init__(self, cfg): + """ + The `__init__` method of any subclass should also contain these + arguments. + + Args: + cfg (CfgNode): model building configs, details are in the + comments of the config file. + """ + super(X3D, self).__init__() + self.norm_module = get_norm(cfg) + self.enable_detection = cfg.DETECTION.ENABLE + self.num_pathways = 1 + + exp_stage = 2.0 + self.dim_c1 = cfg.X3D.DIM_C1 + + self.dim_res2 = ( + self._round_width(self.dim_c1, exp_stage, divisor=8) + if cfg.X3D.SCALE_RES2 + else self.dim_c1 + ) + self.dim_res3 = self._round_width(self.dim_res2, exp_stage, divisor=8) + self.dim_res4 = self._round_width(self.dim_res3, exp_stage, divisor=8) + self.dim_res5 = self._round_width(self.dim_res4, exp_stage, divisor=8) + + self.block_basis = [ + # blocks, c, stride + [1, self.dim_res2, 2], + [2, self.dim_res3, 2], + [5, self.dim_res4, 2], + [3, self.dim_res5, 2], + ] + self._construct_network(cfg) + init_helper.init_weights( + self, cfg.MODEL.FC_INIT_STD, cfg.RESNET.ZERO_INIT_FINAL_BN + ) + + def _round_width(self, width, multiplier, min_depth=8, divisor=8): + """Round width of filters based on width multiplier.""" + if not multiplier: + return width + + width *= multiplier + min_depth = min_depth or divisor + new_filters = max( + min_depth, int(width + divisor / 2) // divisor * divisor + ) + if new_filters < 0.9 * width: + new_filters += divisor + return int(new_filters) + + def _round_repeats(self, repeats, multiplier): + """Round number of layers based on depth multiplier.""" + multiplier = multiplier + if not multiplier: + return repeats + return int(math.ceil(multiplier * repeats)) + + def _construct_network(self, cfg): + """ + Builds a single pathway X3D model. + + Args: + cfg (CfgNode): model building configs, details are in the + comments of the config file. + """ + assert cfg.MODEL.ARCH in _POOL1.keys() + assert cfg.RESNET.DEPTH in _MODEL_STAGE_DEPTH.keys() + + (d2, d3, d4, d5) = _MODEL_STAGE_DEPTH[cfg.RESNET.DEPTH] + + num_groups = cfg.RESNET.NUM_GROUPS + width_per_group = cfg.RESNET.WIDTH_PER_GROUP + dim_inner = num_groups * width_per_group + + w_mul = cfg.X3D.WIDTH_FACTOR + d_mul = cfg.X3D.DEPTH_FACTOR + dim_res1 = self._round_width(self.dim_c1, w_mul) + + temp_kernel = _TEMPORAL_KERNEL_BASIS[cfg.MODEL.ARCH] + + self.s1 = stem_helper.VideoModelStem( + dim_in=cfg.DATA.INPUT_CHANNEL_NUM, + dim_out=[dim_res1], + kernel=[temp_kernel[0][0] + [3, 3]], + stride=[[1, 2, 2]], + padding=[[temp_kernel[0][0][0] // 2, 1, 1]], + norm_module=self.norm_module, + stem_func_name="x3d_stem", + ) + + # blob_in = s1 + dim_in = dim_res1 + for stage, block in enumerate(self.block_basis): + dim_out = self._round_width(block[1], w_mul) + dim_inner = int(cfg.X3D.BOTTLENECK_FACTOR * dim_out) + + n_rep = self._round_repeats(block[0], d_mul) + prefix = "s{}".format( + stage + 2 + ) # start w res2 to follow convention + + s = resnet_helper.ResStage( + dim_in=[dim_in], + dim_out=[dim_out], + dim_inner=[dim_inner], + temp_kernel_sizes=temp_kernel[1], + stride=[block[2]], + num_blocks=[n_rep], + num_groups=[dim_inner] + if cfg.X3D.CHANNELWISE_3x3x3 + else [num_groups], + num_block_temp_kernel=[n_rep], + nonlocal_inds=cfg.NONLOCAL.LOCATION[0], + nonlocal_group=cfg.NONLOCAL.GROUP[0], + nonlocal_pool=cfg.NONLOCAL.POOL[0], + instantiation=cfg.NONLOCAL.INSTANTIATION, + trans_func_name=cfg.RESNET.TRANS_FUNC, + stride_1x1=cfg.RESNET.STRIDE_1X1, + norm_module=self.norm_module, + dilation=cfg.RESNET.SPATIAL_DILATIONS[stage], + drop_connect_rate=cfg.MODEL.DROPCONNECT_RATE + * (stage + 2) + / (len(self.block_basis) + 1), + ) + dim_in = dim_out + self.add_module(prefix, s) + + if self.enable_detection: + NotImplementedError + else: + spat_sz = int(math.ceil(cfg.DATA.TRAIN_CROP_SIZE / 32.0)) + self.head = head_helper.X3DHead( + dim_in=dim_out, + dim_inner=dim_inner, + dim_out=cfg.X3D.DIM_C5, + num_classes=cfg.MODEL.NUM_CLASSES, + pool_size=[cfg.DATA.NUM_FRAMES, spat_sz, spat_sz], + dropout_rate=cfg.MODEL.DROPOUT_RATE, + act_func=cfg.MODEL.HEAD_ACT, + bn_lin5_on=cfg.X3D.BN_LIN5, + ) + + def forward(self, x, bboxes=None): + for module in self.children(): + x = module(x) + return x diff --git a/TimeSformer/timesformer/models/vit.py b/TimeSformer/timesformer/models/vit.py new file mode 100755 index 0000000000000000000000000000000000000000..e3f8670fd06a70c39b5fb0aeedd706ab1e637dc9 --- /dev/null +++ b/TimeSformer/timesformer/models/vit.py @@ -0,0 +1,351 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +# Copyright 2020 Ross Wightman +# Modified Model definition + +import torch +import torch.nn as nn +from functools import partial +import math +import warnings +import torch.nn.functional as F +import numpy as np + +from timesformer.models.vit_utils import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD +from timesformer.models.helpers import load_pretrained +from timesformer.models.vit_utils import DropPath, to_2tuple, trunc_normal_ + +from .build import MODEL_REGISTRY +from torch import einsum +from einops import rearrange, reduce, repeat + +def _cfg(url='', **kwargs): + return { + 'url': url, + 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None, + 'crop_pct': .9, 'interpolation': 'bicubic', + 'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD, + 'first_conv': 'patch_embed.proj', 'classifier': 'head', + **kwargs + } + + +default_cfgs = { + 'vit_base_patch16_224': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_p16_224-80ecf9dd.pth', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), + ), +} + +class Mlp(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + +class Attention(nn.Module): + def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0., with_qkv=True): + super().__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = qk_scale or head_dim ** -0.5 + self.with_qkv = with_qkv + if self.with_qkv: + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + self.attn_drop = nn.Dropout(attn_drop) + + def forward(self, x): + B, N, C = x.shape + if self.with_qkv: + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv[0], qkv[1], qkv[2] + else: + qkv = x.reshape(B, N, self.num_heads, C // self.num_heads).permute(0, 2, 1, 3) + q, k, v = qkv, qkv, qkv + + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + if self.with_qkv: + x = self.proj(x) + x = self.proj_drop(x) + return x + +class Block(nn.Module): + + def __init__(self, dim, num_heads, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop=0., attn_drop=0., + drop_path=0.1, act_layer=nn.GELU, norm_layer=nn.LayerNorm, attention_type='divided_space_time'): + super().__init__() + self.attention_type = attention_type + assert(attention_type in ['divided_space_time', 'space_only','joint_space_time']) + + self.norm1 = norm_layer(dim) + self.attn = Attention( + dim, num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale, attn_drop=attn_drop, proj_drop=drop) + + ## Temporal Attention Parameters + if self.attention_type == 'divided_space_time': + self.temporal_norm1 = norm_layer(dim) + self.temporal_attn = Attention( + dim, num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale, attn_drop=attn_drop, proj_drop=drop) + self.temporal_fc = nn.Linear(dim, dim) + + ## drop path + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + + + def forward(self, x, B, T, W): + num_spatial_tokens = (x.size(1) - 1) // T + H = num_spatial_tokens // W + + if self.attention_type in ['space_only', 'joint_space_time']: + x = x + self.drop_path(self.attn(self.norm1(x))) + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x + elif self.attention_type == 'divided_space_time': + ## Temporal + xt = x[:,1:,:] + xt = rearrange(xt, 'b (h w t) m -> (b h w) t m',b=B,h=H,w=W,t=T) + res_temporal = self.drop_path(self.temporal_attn(self.temporal_norm1(xt))) + res_temporal = rearrange(res_temporal, '(b h w) t m -> b (h w t) m',b=B,h=H,w=W,t=T) + res_temporal = self.temporal_fc(res_temporal) + xt = x[:,1:,:] + res_temporal + + ## Spatial + init_cls_token = x[:,0,:].unsqueeze(1) + cls_token = init_cls_token.repeat(1, T, 1) + cls_token = rearrange(cls_token, 'b t m -> (b t) m',b=B,t=T).unsqueeze(1) + xs = xt + xs = rearrange(xs, 'b (h w t) m -> (b t) (h w) m',b=B,h=H,w=W,t=T) + xs = torch.cat((cls_token, xs), 1) + res_spatial = self.drop_path(self.attn(self.norm1(xs))) + + ### Taking care of CLS token + cls_token = res_spatial[:,0,:] + cls_token = rearrange(cls_token, '(b t) m -> b t m',b=B,t=T) + cls_token = torch.mean(cls_token,1,True) ## averaging for every frame + res_spatial = res_spatial[:,1:,:] + res_spatial = rearrange(res_spatial, '(b t) (h w) m -> b (h w t) m',b=B,h=H,w=W,t=T) + res = res_spatial + x = xt + + ## Mlp + x = torch.cat((init_cls_token, x), 1) + torch.cat((cls_token, res), 1) + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x + +class PatchEmbed(nn.Module): + """ Image to Patch Embedding + """ + def __init__(self, img_size=224, patch_size=16, in_chans=3, embed_dim=768): + super().__init__() + img_size = to_2tuple(img_size) + patch_size = to_2tuple(patch_size) + num_patches = (img_size[1] // patch_size[1]) * (img_size[0] // patch_size[0]) + self.img_size = img_size + self.patch_size = patch_size + self.num_patches = num_patches + + self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size) + + def forward(self, x): + B, C, T, H, W = x.shape + x = rearrange(x, 'b c t h w -> (b t) c h w') + x = self.proj(x) + W = x.size(-1) + x = x.flatten(2).transpose(1, 2) + return x, T, W + + +class VisionTransformer(nn.Module): + """ Vision Transformere + """ + def __init__(self, img_size=224, patch_size=16, in_chans=3, num_classes=1000, embed_dim=768, depth=12, + num_heads=12, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop_rate=0., attn_drop_rate=0., + drop_path_rate=0.1, hybrid_backbone=None, norm_layer=nn.LayerNorm, num_frames=8, attention_type='divided_space_time', dropout=0.): + super().__init__() + self.attention_type = attention_type + self.depth = depth + self.dropout = nn.Dropout(dropout) + self.num_classes = num_classes + self.num_features = self.embed_dim = embed_dim # num_features for consistency with other models + self.patch_embed = PatchEmbed( + img_size=img_size, patch_size=patch_size, in_chans=in_chans, embed_dim=embed_dim) + num_patches = self.patch_embed.num_patches + + ## Positional Embeddings + self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim)) + self.pos_embed = nn.Parameter(torch.zeros(1, num_patches+1, embed_dim)) + self.pos_drop = nn.Dropout(p=drop_rate) + if self.attention_type != 'space_only': + self.time_embed = nn.Parameter(torch.zeros(1, num_frames, embed_dim)) + self.time_drop = nn.Dropout(p=drop_rate) + + ## Attention Blocks + dpr = [x.item() for x in torch.linspace(0, drop_path_rate, self.depth)] # stochastic depth decay rule + self.blocks = nn.ModuleList([ + Block( + dim=embed_dim, num_heads=num_heads, mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, qk_scale=qk_scale, + drop=drop_rate, attn_drop=attn_drop_rate, drop_path=dpr[i], norm_layer=norm_layer, attention_type=self.attention_type) + for i in range(self.depth)]) + self.norm = norm_layer(embed_dim) + + # Classifier head + self.head = nn.Linear(embed_dim, num_classes) if num_classes > 0 else nn.Identity() + + trunc_normal_(self.pos_embed, std=.02) + trunc_normal_(self.cls_token, std=.02) + self.apply(self._init_weights) + + ## initialization of temporal attention weights + if self.attention_type == 'divided_space_time': + i = 0 + for m in self.blocks.modules(): + m_str = str(m) + if 'Block' in m_str: + if i > 0: + nn.init.constant_(m.temporal_fc.weight, 0) + nn.init.constant_(m.temporal_fc.bias, 0) + i += 1 + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {'pos_embed', 'cls_token', 'time_embed'} + + def get_classifier(self): + return self.head + + def reset_classifier(self, num_classes, global_pool=''): + self.num_classes = num_classes + self.head = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity() + + def forward_features(self, x): + B = x.shape[0] + x, T, W = self.patch_embed(x) + cls_tokens = self.cls_token.expand(x.size(0), -1, -1) + x = torch.cat((cls_tokens, x), dim=1) + + ## resizing the positional embeddings in case they don't match the input at inference + if x.size(1) != self.pos_embed.size(1): + pos_embed = self.pos_embed + cls_pos_embed = pos_embed[0,0,:].unsqueeze(0).unsqueeze(1) + other_pos_embed = pos_embed[0,1:,:].unsqueeze(0).transpose(1, 2) + P = int(other_pos_embed.size(2) ** 0.5) + H = x.size(1) // W + other_pos_embed = other_pos_embed.reshape(1, x.size(2), P, P) + new_pos_embed = F.interpolate(other_pos_embed, size=(H, W), mode='nearest') + new_pos_embed = new_pos_embed.flatten(2) + new_pos_embed = new_pos_embed.transpose(1, 2) + new_pos_embed = torch.cat((cls_pos_embed, new_pos_embed), 1) + x = x + new_pos_embed + else: + x = x + self.pos_embed + x = self.pos_drop(x) + + + ## Time Embeddings + if self.attention_type != 'space_only': + cls_tokens = x[:B, 0, :].unsqueeze(1) + x = x[:,1:] + x = rearrange(x, '(b t) n m -> (b n) t m',b=B,t=T) + ## Resizing time embeddings in case they don't match + if T != self.time_embed.size(1): + time_embed = self.time_embed.transpose(1, 2) + new_time_embed = F.interpolate(time_embed, size=(T), mode='nearest') + new_time_embed = new_time_embed.transpose(1, 2) + x = x + new_time_embed + else: + x = x + self.time_embed + x = self.time_drop(x) + x = rearrange(x, '(b n) t m -> b (n t) m',b=B,t=T) + x = torch.cat((cls_tokens, x), dim=1) + + ## Attention blocks + for blk in self.blocks: + x = blk(x, B, T, W) + + ### Predictions for space-only baseline + if self.attention_type == 'space_only': + x = rearrange(x, '(b t) n m -> b t n m',b=B,t=T) + x = torch.mean(x, 1) # averaging predictions for every frame + + x = self.norm(x) + return x[:, 0] + + def forward(self, x): + x = self.forward_features(x) + x = self.head(x) + return x + +def _conv_filter(state_dict, patch_size=16): + """ convert patch embedding weight from manual patchify + linear proj to conv""" + out_dict = {} + for k, v in state_dict.items(): + if 'patch_embed.proj.weight' in k: + if v.shape[-1] != patch_size: + patch_size = v.shape[-1] + v = v.reshape((v.shape[0], 3, patch_size, patch_size)) + out_dict[k] = v + return out_dict + +@MODEL_REGISTRY.register() +class vit_base_patch16_224(nn.Module): + def __init__(self, cfg, **kwargs): + super(vit_base_patch16_224, self).__init__() + self.pretrained=True + patch_size = 16 + self.model = VisionTransformer(img_size=cfg.DATA.TRAIN_CROP_SIZE, num_classes=cfg.MODEL.NUM_CLASSES, patch_size=patch_size, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, norm_layer=partial(nn.LayerNorm, eps=1e-6), drop_rate=0., attn_drop_rate=0., drop_path_rate=0.1, num_frames=cfg.DATA.NUM_FRAMES, attention_type=cfg.TIMESFORMER.ATTENTION_TYPE, **kwargs) + + self.attention_type = cfg.TIMESFORMER.ATTENTION_TYPE + self.model.default_cfg = default_cfgs['vit_base_patch16_224'] + self.num_patches = (cfg.DATA.TRAIN_CROP_SIZE // patch_size) * (cfg.DATA.TRAIN_CROP_SIZE // patch_size) + pretrained_model=cfg.TIMESFORMER.PRETRAINED_MODEL + if self.pretrained: + load_pretrained(self.model, num_classes=self.model.num_classes, in_chans=kwargs.get('in_chans', 3), filter_fn=_conv_filter, img_size=cfg.DATA.TRAIN_CROP_SIZE, num_patches=self.num_patches, attention_type=self.attention_type, pretrained_model=pretrained_model) + + def forward(self, x): + x = self.model(x) + return x + +@MODEL_REGISTRY.register() +class TimeSformer(nn.Module): + def __init__(self, img_size=224, patch_size=16, num_classes=400, num_frames=8, attention_type='divided_space_time', pretrained_model='', **kwargs): + super(TimeSformer, self).__init__() + self.pretrained=True + self.model = VisionTransformer(img_size=img_size, num_classes=num_classes, patch_size=patch_size, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, norm_layer=partial(nn.LayerNorm, eps=1e-6), drop_rate=0., attn_drop_rate=0., drop_path_rate=0.1, num_frames=num_frames, attention_type=attention_type, **kwargs) + + self.attention_type = attention_type + self.model.default_cfg = default_cfgs['vit_base_patch'+str(patch_size)+'_224'] + self.num_patches = (img_size // patch_size) * (img_size // patch_size) + if self.pretrained: + load_pretrained(self.model, num_classes=self.model.num_classes, in_chans=kwargs.get('in_chans', 3), filter_fn=_conv_filter, img_size=img_size, num_frames=num_frames, num_patches=self.num_patches, attention_type=self.attention_type, pretrained_model=pretrained_model) + def forward(self, x): + x = self.model(x) + return x diff --git a/TimeSformer/timesformer/models/vit_utils.py b/TimeSformer/timesformer/models/vit_utils.py new file mode 100755 index 0000000000000000000000000000000000000000..d11a163a50807b68fc2dd3de150384f72e8a00c6 --- /dev/null +++ b/TimeSformer/timesformer/models/vit_utils.py @@ -0,0 +1,164 @@ +# Copyright 2020 Ross Wightman +# Various utility functions + +import torch +import torch.nn as nn +from functools import partial +import math +import warnings +import torch.nn.functional as F + +from timesformer.models.helpers import load_pretrained +from .build import MODEL_REGISTRY +from itertools import repeat +from collections import abc as container_abcs +# from torch._six import container_abcs + +DEFAULT_CROP_PCT = 0.875 +IMAGENET_DEFAULT_MEAN = (0.485, 0.456, 0.406) +IMAGENET_DEFAULT_STD = (0.229, 0.224, 0.225) +IMAGENET_INCEPTION_MEAN = (0.5, 0.5, 0.5) +IMAGENET_INCEPTION_STD = (0.5, 0.5, 0.5) +IMAGENET_DPN_MEAN = (124 / 255, 117 / 255, 104 / 255) +IMAGENET_DPN_STD = tuple([1 / (.0167 * 255)] * 3) + +def _no_grad_trunc_normal_(tensor, mean, std, a, b): + def norm_cdf(x): + # Computes standard normal cumulative distribution function + return (1. + math.erf(x / math.sqrt(2.))) / 2. + + if (mean < a - 2 * std) or (mean > b + 2 * std): + warnings.warn("mean is more than 2 std from [a, b] in nn.init.trunc_normal_. " + "The distribution of values may be incorrect.", + stacklevel=2) + + with torch.no_grad(): + # Values are generated by using a truncated uniform distribution and + # then using the inverse CDF for the normal distribution. + # Get upper and lower cdf values + l = norm_cdf((a - mean) / std) + u = norm_cdf((b - mean) / std) + + # Uniformly fill tensor with values from [l, u], then translate to + # [2l-1, 2u-1]. + tensor.uniform_(2 * l - 1, 2 * u - 1) + + # Use inverse cdf transform for normal distribution to get truncated + # standard normal + tensor.erfinv_() + + # Transform to proper mean, std + tensor.mul_(std * math.sqrt(2.)) + tensor.add_(mean) + + # Clamp to ensure it's in the proper range + tensor.clamp_(min=a, max=b) + return tensor + +def trunc_normal_(tensor, mean=0., std=1., a=-2., b=2.): + # type: (Tensor, float, float, float, float) -> Tensor + r"""Fills the input Tensor with values drawn from a truncated + normal distribution. The values are effectively drawn from the + normal distribution :math:`\mathcal{N}(\text{mean}, \text{std}^2)` + with values outside :math:`[a, b]` redrawn until they are within + the bounds. The method used for generating the random values works + best when :math:`a \leq \text{mean} \leq b`. + Args: + tensor: an n-dimensional `torch.Tensor` + mean: the mean of the normal distribution + std: the standard deviation of the normal distribution + a: the minimum cutoff value + b: the maximum cutoff value + Examples: + >>> w = torch.empty(3, 5) + >>> nn.init.trunc_normal_(w) + """ + return _no_grad_trunc_normal_(tensor, mean, std, a, b) + +# From PyTorch internals +def _ntuple(n): + def parse(x): + if isinstance(x, container_abcs.Iterable): + return x + return tuple(repeat(x, n)) + return parse +to_2tuple = _ntuple(2) + +# Calculate symmetric padding for a convolution +def get_padding(kernel_size: int, stride: int = 1, dilation: int = 1, **_) -> int: + padding = ((stride - 1) + dilation * (kernel_size - 1)) // 2 + return padding + +def get_padding_value(padding, kernel_size, **kwargs): + dynamic = False + if isinstance(padding, str): + # for any string padding, the padding will be calculated for you, one of three ways + padding = padding.lower() + if padding == 'same': + # TF compatible 'SAME' padding, has a performance and GPU memory allocation impact + if is_static_pad(kernel_size, **kwargs): + # static case, no extra overhead + padding = get_padding(kernel_size, **kwargs) + else: + # dynamic 'SAME' padding, has runtime/GPU memory overhead + padding = 0 + dynamic = True + elif padding == 'valid': + # 'VALID' padding, same as padding=0 + padding = 0 + else: + # Default to PyTorch style 'same'-ish symmetric padding + padding = get_padding(kernel_size, **kwargs) + return padding, dynamic + +# Calculate asymmetric TensorFlow-like 'SAME' padding for a convolution +def get_same_padding(x: int, k: int, s: int, d: int): + return max((int(math.ceil(x // s)) - 1) * s + (k - 1) * d + 1 - x, 0) + + +# Can SAME padding for given args be done statically? +def is_static_pad(kernel_size: int, stride: int = 1, dilation: int = 1, **_): + return stride == 1 and (dilation * (kernel_size - 1)) % 2 == 0 + + +# Dynamically pad input x with 'SAME' padding for conv with specified args +#def pad_same(x, k: List[int], s: List[int], d: List[int] = (1, 1), value: float = 0): +def pad_same(x, k, s, d=(1, 1), value= 0): + ih, iw = x.size()[-2:] + pad_h, pad_w = get_same_padding(ih, k[0], s[0], d[0]), get_same_padding(iw, k[1], s[1], d[1]) + if pad_h > 0 or pad_w > 0: + x = F.pad(x, [pad_w // 2, pad_w - pad_w // 2, pad_h // 2, pad_h - pad_h // 2], value=value) + return x + +def adaptive_pool_feat_mult(pool_type='avg'): + if pool_type == 'catavgmax': + return 2 + else: + return 1 + +def drop_path(x, drop_prob: float = 0., training: bool = False): + """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks). + This is the same as the DropConnect impl I created for EfficientNet, etc networks, however, + the original name is misleading as 'Drop Connect' is a different form of dropout in a separate paper... + See discussion: https://github.com/tensorflow/tpu/issues/494#issuecomment-532968956 ... I've opted for + changing the layer and argument names to 'drop path' rather than mix DropConnect as a layer name and use + 'survival rate' as the argument. + """ + if drop_prob == 0. or not training: + return x + keep_prob = 1 - drop_prob + shape = (x.shape[0],) + (1,) * (x.ndim - 1) # work with diff dim tensors, not just 2D ConvNets + random_tensor = keep_prob + torch.rand(shape, dtype=x.dtype, device=x.device) + random_tensor.floor_() # binarize + output = x.div(keep_prob) * random_tensor + return output + +class DropPath(nn.Module): + """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks). + """ + def __init__(self, drop_prob=None): + super(DropPath, self).__init__() + self.drop_prob = drop_prob + + def forward(self, x): + return drop_path(x, self.drop_prob, self.training) diff --git a/TimeSformer/timesformer/utils/__init__.py b/TimeSformer/timesformer/utils/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..5c7f19c6c00a4ac3f2f2bc66f892e44bcbd72612 --- /dev/null +++ b/TimeSformer/timesformer/utils/__init__.py @@ -0,0 +1 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. diff --git a/TimeSformer/timesformer/utils/ava_eval_helper.py b/TimeSformer/timesformer/utils/ava_eval_helper.py new file mode 100755 index 0000000000000000000000000000000000000000..6bad2043cb2ba509c9c52606942c6f0026aa8583 --- /dev/null +++ b/TimeSformer/timesformer/utils/ava_eval_helper.py @@ -0,0 +1,304 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +############################################################################## +# +# Based on: +# -------------------------------------------------------- +# ActivityNet +# Copyright (c) 2015 ActivityNet +# Licensed under The MIT License +# [see https://github.com/activitynet/ActivityNet/blob/master/LICENSE for details] +# -------------------------------------------------------- + +"""Helper functions for AVA evaluation.""" + +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) +import csv +import logging +import numpy as np +import pprint +import time +from collections import defaultdict +from fvcore.common.file_io import PathManager +import timesformer.utils.distributed as du + +from timesformer.utils.ava_evaluation import ( + object_detection_evaluation, + standard_fields, +) + +logger = logging.getLogger(__name__) + + +def make_image_key(video_id, timestamp): + """Returns a unique identifier for a video id & timestamp.""" + return "%s,%04d" % (video_id, int(timestamp)) + + +def read_csv(csv_file, class_whitelist=None, load_score=False): + """Loads boxes and class labels from a CSV file in the AVA format. + CSV file format described at https://research.google.com/ava/download.html. + Args: + csv_file: A file object. + class_whitelist: If provided, boxes corresponding to (integer) class labels + not in this set are skipped. + Returns: + boxes: A dictionary mapping each unique image key (string) to a list of + boxes, given as coordinates [y1, x1, y2, x2]. + labels: A dictionary mapping each unique image key (string) to a list of + integer class lables, matching the corresponding box in `boxes`. + scores: A dictionary mapping each unique image key (string) to a list of + score values lables, matching the corresponding label in `labels`. If + scores are not provided in the csv, then they will default to 1.0. + """ + boxes = defaultdict(list) + labels = defaultdict(list) + scores = defaultdict(list) + with PathManager.open(csv_file, "r") as f: + reader = csv.reader(f) + for row in reader: + assert len(row) in [7, 8], "Wrong number of columns: " + row + image_key = make_image_key(row[0], row[1]) + x1, y1, x2, y2 = [float(n) for n in row[2:6]] + action_id = int(row[6]) + if class_whitelist and action_id not in class_whitelist: + continue + score = 1.0 + if load_score: + score = float(row[7]) + boxes[image_key].append([y1, x1, y2, x2]) + labels[image_key].append(action_id) + scores[image_key].append(score) + return boxes, labels, scores + + +def read_exclusions(exclusions_file): + """Reads a CSV file of excluded timestamps. + Args: + exclusions_file: A file object containing a csv of video-id,timestamp. + Returns: + A set of strings containing excluded image keys, e.g. "aaaaaaaaaaa,0904", + or an empty set if exclusions file is None. + """ + excluded = set() + if exclusions_file: + with PathManager.open(exclusions_file, "r") as f: + reader = csv.reader(f) + for row in reader: + assert len(row) == 2, "Expected only 2 columns, got: " + row + excluded.add(make_image_key(row[0], row[1])) + return excluded + + +def read_labelmap(labelmap_file): + """Read label map and class ids.""" + + labelmap = [] + class_ids = set() + name = "" + class_id = "" + with PathManager.open(labelmap_file, "r") as f: + for line in f: + if line.startswith(" name:"): + name = line.split('"')[1] + elif line.startswith(" id:") or line.startswith(" label_id:"): + class_id = int(line.strip().split(" ")[-1]) + labelmap.append({"id": class_id, "name": name}) + class_ids.add(class_id) + return labelmap, class_ids + + +def evaluate_ava_from_files(labelmap, groundtruth, detections, exclusions): + """Run AVA evaluation given annotation/prediction files.""" + + categories, class_whitelist = read_labelmap(labelmap) + excluded_keys = read_exclusions(exclusions) + groundtruth = read_csv(groundtruth, class_whitelist, load_score=False) + detections = read_csv(detections, class_whitelist, load_score=True) + run_evaluation(categories, groundtruth, detections, excluded_keys) + + +def evaluate_ava( + preds, + original_boxes, + metadata, + excluded_keys, + class_whitelist, + categories, + groundtruth=None, + video_idx_to_name=None, + name="latest", +): + """Run AVA evaluation given numpy arrays.""" + + eval_start = time.time() + + detections = get_ava_eval_data( + preds, + original_boxes, + metadata, + class_whitelist, + video_idx_to_name=video_idx_to_name, + ) + + logger.info("Evaluating with %d unique GT frames." % len(groundtruth[0])) + logger.info( + "Evaluating with %d unique detection frames" % len(detections[0]) + ) + + write_results(detections, "detections_%s.csv" % name) + write_results(groundtruth, "groundtruth_%s.csv" % name) + + results = run_evaluation(categories, groundtruth, detections, excluded_keys) + + logger.info("AVA eval done in %f seconds." % (time.time() - eval_start)) + return results["PascalBoxes_Precision/mAP@0.5IOU"] + + +def run_evaluation( + categories, groundtruth, detections, excluded_keys, verbose=True +): + """AVA evaluation main logic.""" + + pascal_evaluator = object_detection_evaluation.PascalDetectionEvaluator( + categories + ) + + boxes, labels, _ = groundtruth + + gt_keys = [] + pred_keys = [] + + for image_key in boxes: + if image_key in excluded_keys: + logging.info( + ( + "Found excluded timestamp in ground truth: %s. " + "It will be ignored." + ), + image_key, + ) + continue + pascal_evaluator.add_single_ground_truth_image_info( + image_key, + { + standard_fields.InputDataFields.groundtruth_boxes: np.array( + boxes[image_key], dtype=float + ), + standard_fields.InputDataFields.groundtruth_classes: np.array( + labels[image_key], dtype=int + ), + standard_fields.InputDataFields.groundtruth_difficult: np.zeros( + len(boxes[image_key]), dtype=bool + ), + }, + ) + + gt_keys.append(image_key) + + boxes, labels, scores = detections + + for image_key in boxes: + if image_key in excluded_keys: + logging.info( + ( + "Found excluded timestamp in detections: %s. " + "It will be ignored." + ), + image_key, + ) + continue + pascal_evaluator.add_single_detected_image_info( + image_key, + { + standard_fields.DetectionResultFields.detection_boxes: np.array( + boxes[image_key], dtype=float + ), + standard_fields.DetectionResultFields.detection_classes: np.array( + labels[image_key], dtype=int + ), + standard_fields.DetectionResultFields.detection_scores: np.array( + scores[image_key], dtype=float + ), + }, + ) + + pred_keys.append(image_key) + + metrics = pascal_evaluator.evaluate() + + if du.is_master_proc(): + pprint.pprint(metrics, indent=2) + return metrics + + +def get_ava_eval_data( + scores, + boxes, + metadata, + class_whitelist, + verbose=False, + video_idx_to_name=None, +): + """ + Convert our data format into the data format used in official AVA + evaluation. + """ + + out_scores = defaultdict(list) + out_labels = defaultdict(list) + out_boxes = defaultdict(list) + count = 0 + for i in range(scores.shape[0]): + video_idx = int(np.round(metadata[i][0])) + sec = int(np.round(metadata[i][1])) + + video = video_idx_to_name[video_idx] + + key = video + "," + "%04d" % (sec) + batch_box = boxes[i].tolist() + # The first is batch idx. + batch_box = [batch_box[j] for j in [0, 2, 1, 4, 3]] + + one_scores = scores[i].tolist() + for cls_idx, score in enumerate(one_scores): + if cls_idx + 1 in class_whitelist: + out_scores[key].append(score) + out_labels[key].append(cls_idx + 1) + out_boxes[key].append(batch_box[1:]) + count += 1 + + return out_boxes, out_labels, out_scores + + +def write_results(detections, filename): + """Write prediction results into official formats.""" + start = time.time() + + boxes, labels, scores = detections + with PathManager.open(filename, "w") as f: + for key in boxes.keys(): + for box, label, score in zip(boxes[key], labels[key], scores[key]): + f.write( + "%s,%.03f,%.03f,%.03f,%.03f,%d,%.04f\n" + % (key, box[1], box[0], box[3], box[2], label, score) + ) + + logger.info("AVA results wrote to %s" % filename) + logger.info("\ttook %d seconds." % (time.time() - start)) diff --git a/TimeSformer/timesformer/utils/ava_evaluation/README.md b/TimeSformer/timesformer/utils/ava_evaluation/README.md new file mode 100755 index 0000000000000000000000000000000000000000..3fe4626c550747a442768784e55516f23aab0354 --- /dev/null +++ b/TimeSformer/timesformer/utils/ava_evaluation/README.md @@ -0,0 +1 @@ +The code under this folder is from the official [ActivityNet repo](https://github.com/activitynet/ActivityNet). diff --git a/TimeSformer/timesformer/utils/ava_evaluation/__init__.py b/TimeSformer/timesformer/utils/ava_evaluation/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TimeSformer/timesformer/utils/ava_evaluation/ava_action_list_v2.1_for_activitynet_2018.pbtxt.txt b/TimeSformer/timesformer/utils/ava_evaluation/ava_action_list_v2.1_for_activitynet_2018.pbtxt.txt new file mode 100755 index 0000000000000000000000000000000000000000..5e2c485682830919a09300ac851e6b0e4bdf3efb --- /dev/null +++ b/TimeSformer/timesformer/utils/ava_evaluation/ava_action_list_v2.1_for_activitynet_2018.pbtxt.txt @@ -0,0 +1,240 @@ +item { + name: "bend/bow (at the waist)" + id: 1 +} +item { + name: "crouch/kneel" + id: 3 +} +item { + name: "dance" + id: 4 +} +item { + name: "fall down" + id: 5 +} +item { + name: "get up" + id: 6 +} +item { + name: "jump/leap" + id: 7 +} +item { + name: "lie/sleep" + id: 8 +} +item { + name: "martial art" + id: 9 +} +item { + name: "run/jog" + id: 10 +} +item { + name: "sit" + id: 11 +} +item { + name: "stand" + id: 12 +} +item { + name: "swim" + id: 13 +} +item { + name: "walk" + id: 14 +} +item { + name: "answer phone" + id: 15 +} +item { + name: "carry/hold (an object)" + id: 17 +} +item { + name: "climb (e.g., a mountain)" + id: 20 +} +item { + name: "close (e.g., a door, a box)" + id: 22 +} +item { + name: "cut" + id: 24 +} +item { + name: "dress/put on clothing" + id: 26 +} +item { + name: "drink" + id: 27 +} +item { + name: "drive (e.g., a car, a truck)" + id: 28 +} +item { + name: "eat" + id: 29 +} +item { + name: "enter" + id: 30 +} +item { + name: "hit (an object)" + id: 34 +} +item { + name: "lift/pick up" + id: 36 +} +item { + name: "listen (e.g., to music)" + id: 37 +} +item { + name: "open (e.g., a window, a car door)" + id: 38 +} +item { + name: "play musical instrument" + id: 41 +} +item { + name: "point to (an object)" + id: 43 +} +item { + name: "pull (an object)" + id: 45 +} +item { + name: "push (an object)" + id: 46 +} +item { + name: "put down" + id: 47 +} +item { + name: "read" + id: 48 +} +item { + name: "ride (e.g., a bike, a car, a horse)" + id: 49 +} +item { + name: "sail boat" + id: 51 +} +item { + name: "shoot" + id: 52 +} +item { + name: "smoke" + id: 54 +} +item { + name: "take a photo" + id: 56 +} +item { + name: "text on/look at a cellphone" + id: 57 +} +item { + name: "throw" + id: 58 +} +item { + name: "touch (an object)" + id: 59 +} +item { + name: "turn (e.g., a screwdriver)" + id: 60 +} +item { + name: "watch (e.g., TV)" + id: 61 +} +item { + name: "work on a computer" + id: 62 +} +item { + name: "write" + id: 63 +} +item { + name: "fight/hit (a person)" + id: 64 +} +item { + name: "give/serve (an object) to (a person)" + id: 65 +} +item { + name: "grab (a person)" + id: 66 +} +item { + name: "hand clap" + id: 67 +} +item { + name: "hand shake" + id: 68 +} +item { + name: "hand wave" + id: 69 +} +item { + name: "hug (a person)" + id: 70 +} +item { + name: "kiss (a person)" + id: 72 +} +item { + name: "lift (a person)" + id: 73 +} +item { + name: "listen to (a person)" + id: 74 +} +item { + name: "push (another person)" + id: 76 +} +item { + name: "sing to (e.g., self, a person, a group)" + id: 77 +} +item { + name: "take (an object) from (a person)" + id: 78 +} +item { + name: "talk to (e.g., self, a person, a group)" + id: 79 +} +item { + name: "watch (a person)" + id: 80 +} diff --git a/TimeSformer/timesformer/utils/ava_evaluation/label_map_util.py b/TimeSformer/timesformer/utils/ava_evaluation/label_map_util.py new file mode 100755 index 0000000000000000000000000000000000000000..bd5e62de7561b9f3e881f63abfc3d75b61919d2e --- /dev/null +++ b/TimeSformer/timesformer/utils/ava_evaluation/label_map_util.py @@ -0,0 +1,187 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Label map utility functions.""" + +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) +import logging + +# from google.protobuf import text_format +# from google3.third_party.tensorflow_models.object_detection.protos import string_int_label_map_pb2 + + +def _validate_label_map(label_map): + """Checks if a label map is valid. + + Args: + label_map: StringIntLabelMap to validate. + + Raises: + ValueError: if label map is invalid. + """ + for item in label_map.item: + if item.id < 1: + raise ValueError("Label map ids should be >= 1.") + + +def create_category_index(categories): + """Creates dictionary of COCO compatible categories keyed by category id. + + Args: + categories: a list of dicts, each of which has the following keys: + 'id': (required) an integer id uniquely identifying this category. + 'name': (required) string representing category name + e.g., 'cat', 'dog', 'pizza'. + + Returns: + category_index: a dict containing the same entries as categories, but keyed + by the 'id' field of each category. + """ + category_index = {} + for cat in categories: + category_index[cat["id"]] = cat + return category_index + + +def get_max_label_map_index(label_map): + """Get maximum index in label map. + + Args: + label_map: a StringIntLabelMapProto + + Returns: + an integer + """ + return max([item.id for item in label_map.item]) + + +def convert_label_map_to_categories( + label_map, max_num_classes, use_display_name=True +): + """Loads label map proto and returns categories list compatible with eval. + + This function loads a label map and returns a list of dicts, each of which + has the following keys: + 'id': (required) an integer id uniquely identifying this category. + 'name': (required) string representing category name + e.g., 'cat', 'dog', 'pizza'. + We only allow class into the list if its id-label_id_offset is + between 0 (inclusive) and max_num_classes (exclusive). + If there are several items mapping to the same id in the label map, + we will only keep the first one in the categories list. + + Args: + label_map: a StringIntLabelMapProto or None. If None, a default categories + list is created with max_num_classes categories. + max_num_classes: maximum number of (consecutive) label indices to include. + use_display_name: (boolean) choose whether to load 'display_name' field + as category name. If False or if the display_name field does not exist, + uses 'name' field as category names instead. + Returns: + categories: a list of dictionaries representing all possible categories. + """ + categories = [] + list_of_ids_already_added = [] + if not label_map: + label_id_offset = 1 + for class_id in range(max_num_classes): + categories.append( + { + "id": class_id + label_id_offset, + "name": "category_{}".format(class_id + label_id_offset), + } + ) + return categories + for item in label_map.item: + if not 0 < item.id <= max_num_classes: + logging.info( + "Ignore item %d since it falls outside of requested " + "label range.", + item.id, + ) + continue + if use_display_name and item.HasField("display_name"): + name = item.display_name + else: + name = item.name + if item.id not in list_of_ids_already_added: + list_of_ids_already_added.append(item.id) + categories.append({"id": item.id, "name": name}) + return categories + + +def load_labelmap(path): + """Loads label map proto. + + Args: + path: path to StringIntLabelMap proto text file. + Returns: + a StringIntLabelMapProto + """ + with open(path, "r") as fid: + label_map_string = fid.read() + label_map = string_int_label_map_pb2.StringIntLabelMap() + try: + text_format.Merge(label_map_string, label_map) + except text_format.ParseError: + label_map.ParseFromString(label_map_string) + _validate_label_map(label_map) + return label_map + + +def get_label_map_dict(label_map_path, use_display_name=False): + """Reads a label map and returns a dictionary of label names to id. + + Args: + label_map_path: path to label_map. + use_display_name: whether to use the label map items' display names as keys. + + Returns: + A dictionary mapping label names to id. + """ + label_map = load_labelmap(label_map_path) + label_map_dict = {} + for item in label_map.item: + if use_display_name: + label_map_dict[item.display_name] = item.id + else: + label_map_dict[item.name] = item.id + return label_map_dict + + +def create_category_index_from_labelmap(label_map_path): + """Reads a label map and returns a category index. + + Args: + label_map_path: Path to `StringIntLabelMap` proto text file. + + Returns: + A category index, which is a dictionary that maps integer ids to dicts + containing categories, e.g. + {1: {'id': 1, 'name': 'dog'}, 2: {'id': 2, 'name': 'cat'}, ...} + """ + label_map = load_labelmap(label_map_path) + max_num_classes = max(item.id for item in label_map.item) + categories = convert_label_map_to_categories(label_map, max_num_classes) + return create_category_index(categories) + + +def create_class_agnostic_category_index(): + """Creates a category index with a single `object` class.""" + return {1: {"id": 1, "name": "object"}} diff --git a/TimeSformer/timesformer/utils/ava_evaluation/metrics.py b/TimeSformer/timesformer/utils/ava_evaluation/metrics.py new file mode 100755 index 0000000000000000000000000000000000000000..0094216fd38a23dc2f51e3bdfba4f427bd22f617 --- /dev/null +++ b/TimeSformer/timesformer/utils/ava_evaluation/metrics.py @@ -0,0 +1,154 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Functions for computing metrics like precision, recall, CorLoc and etc.""" +from __future__ import division +import numpy as np + + +def compute_precision_recall(scores, labels, num_gt): + """Compute precision and recall. + + Args: + scores: A float numpy array representing detection score + labels: A boolean numpy array representing true/false positive labels + num_gt: Number of ground truth instances + + Raises: + ValueError: if the input is not of the correct format + + Returns: + precision: Fraction of positive instances over detected ones. This value is + None if no ground truth labels are present. + recall: Fraction of detected positive instance over all positive instances. + This value is None if no ground truth labels are present. + + """ + if ( + not isinstance(labels, np.ndarray) + or labels.dtype != np.bool + or len(labels.shape) != 1 + ): + raise ValueError("labels must be single dimension bool numpy array") + + if not isinstance(scores, np.ndarray) or len(scores.shape) != 1: + raise ValueError("scores must be single dimension numpy array") + + if num_gt < np.sum(labels): + raise ValueError( + "Number of true positives must be smaller than num_gt." + ) + + if len(scores) != len(labels): + raise ValueError("scores and labels must be of the same size.") + + if num_gt == 0: + return None, None + + sorted_indices = np.argsort(scores) + sorted_indices = sorted_indices[::-1] + labels = labels.astype(int) + true_positive_labels = labels[sorted_indices] + false_positive_labels = 1 - true_positive_labels + cum_true_positives = np.cumsum(true_positive_labels) + cum_false_positives = np.cumsum(false_positive_labels) + precision = cum_true_positives.astype(float) / ( + cum_true_positives + cum_false_positives + ) + recall = cum_true_positives.astype(float) / num_gt + return precision, recall + + +def compute_average_precision(precision, recall): + """Compute Average Precision according to the definition in VOCdevkit. + + Precision is modified to ensure that it does not decrease as recall + decrease. + + Args: + precision: A float [N, 1] numpy array of precisions + recall: A float [N, 1] numpy array of recalls + + Raises: + ValueError: if the input is not of the correct format + + Returns: + average_precison: The area under the precision recall curve. NaN if + precision and recall are None. + + """ + if precision is None: + if recall is not None: + raise ValueError("If precision is None, recall must also be None") + return np.NAN + + if not isinstance(precision, np.ndarray) or not isinstance( + recall, np.ndarray + ): + raise ValueError("precision and recall must be numpy array") + if precision.dtype != np.float or recall.dtype != np.float: + raise ValueError("input must be float numpy array.") + if len(precision) != len(recall): + raise ValueError("precision and recall must be of the same size.") + if not precision.size: + return 0.0 + if np.amin(precision) < 0 or np.amax(precision) > 1: + raise ValueError("Precision must be in the range of [0, 1].") + if np.amin(recall) < 0 or np.amax(recall) > 1: + raise ValueError("recall must be in the range of [0, 1].") + if not all(recall[i] <= recall[i + 1] for i in range(len(recall) - 1)): + raise ValueError("recall must be a non-decreasing array") + + recall = np.concatenate([[0], recall, [1]]) + precision = np.concatenate([[0], precision, [0]]) + + # Preprocess precision to be a non-decreasing array + for i in range(len(precision) - 2, -1, -1): + precision[i] = np.maximum(precision[i], precision[i + 1]) + + indices = np.where(recall[1:] != recall[:-1])[0] + 1 + average_precision = np.sum( + (recall[indices] - recall[indices - 1]) * precision[indices] + ) + return average_precision + + +def compute_cor_loc( + num_gt_imgs_per_class, num_images_correctly_detected_per_class +): + """Compute CorLoc according to the definition in the following paper. + + https://www.robots.ox.ac.uk/~vgg/rg/papers/deselaers-eccv10.pdf + + Returns nans if there are no ground truth images for a class. + + Args: + num_gt_imgs_per_class: 1D array, representing number of images containing + at least one object instance of a particular class + num_images_correctly_detected_per_class: 1D array, representing number of + images that are correctly detected at least one object instance of a + particular class + + Returns: + corloc_per_class: A float numpy array represents the corloc score of each + class + """ + # Divide by zero expected for classes with no gt examples. + with np.errstate(divide="ignore", invalid="ignore"): + return np.where( + num_gt_imgs_per_class == 0, + np.nan, + num_images_correctly_detected_per_class / num_gt_imgs_per_class, + ) diff --git a/TimeSformer/timesformer/utils/ava_evaluation/np_box_list.py b/TimeSformer/timesformer/utils/ava_evaluation/np_box_list.py new file mode 100755 index 0000000000000000000000000000000000000000..18744b0344183f42f6e20328f35fcaf52b61efb6 --- /dev/null +++ b/TimeSformer/timesformer/utils/ava_evaluation/np_box_list.py @@ -0,0 +1,143 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Numpy BoxList classes and functions.""" + +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) +import numpy as np + + +class BoxList(object): + """Box collection. + + BoxList represents a list of bounding boxes as numpy array, where each + bounding box is represented as a row of 4 numbers, + [y_min, x_min, y_max, x_max]. It is assumed that all bounding boxes within a + given list correspond to a single image. + + Optionally, users can add additional related fields (such as + objectness/classification scores). + """ + + def __init__(self, data): + """Constructs box collection. + + Args: + data: a numpy array of shape [N, 4] representing box coordinates + + Raises: + ValueError: if bbox data is not a numpy array + ValueError: if invalid dimensions for bbox data + """ + if not isinstance(data, np.ndarray): + raise ValueError("data must be a numpy array.") + if len(data.shape) != 2 or data.shape[1] != 4: + raise ValueError("Invalid dimensions for box data.") + if data.dtype != np.float32 and data.dtype != np.float64: + raise ValueError( + "Invalid data type for box data: float is required." + ) + if not self._is_valid_boxes(data): + raise ValueError( + "Invalid box data. data must be a numpy array of " + "N*[y_min, x_min, y_max, x_max]" + ) + self.data = {"boxes": data} + + def num_boxes(self): + """Return number of boxes held in collections.""" + return self.data["boxes"].shape[0] + + def get_extra_fields(self): + """Return all non-box fields.""" + return [k for k in self.data.keys() if k != "boxes"] + + def has_field(self, field): + return field in self.data + + def add_field(self, field, field_data): + """Add data to a specified field. + + Args: + field: a string parameter used to speficy a related field to be accessed. + field_data: a numpy array of [N, ...] representing the data associated + with the field. + Raises: + ValueError: if the field is already exist or the dimension of the field + data does not matches the number of boxes. + """ + if self.has_field(field): + raise ValueError("Field " + field + "already exists") + if len(field_data.shape) < 1 or field_data.shape[0] != self.num_boxes(): + raise ValueError("Invalid dimensions for field data") + self.data[field] = field_data + + def get(self): + """Convenience function for accesssing box coordinates. + + Returns: + a numpy array of shape [N, 4] representing box corners + """ + return self.get_field("boxes") + + def get_field(self, field): + """Accesses data associated with the specified field in the box collection. + + Args: + field: a string parameter used to speficy a related field to be accessed. + + Returns: + a numpy 1-d array representing data of an associated field + + Raises: + ValueError: if invalid field + """ + if not self.has_field(field): + raise ValueError("field {} does not exist".format(field)) + return self.data[field] + + def get_coordinates(self): + """Get corner coordinates of boxes. + + Returns: + a list of 4 1-d numpy arrays [y_min, x_min, y_max, x_max] + """ + box_coordinates = self.get() + y_min = box_coordinates[:, 0] + x_min = box_coordinates[:, 1] + y_max = box_coordinates[:, 2] + x_max = box_coordinates[:, 3] + return [y_min, x_min, y_max, x_max] + + def _is_valid_boxes(self, data): + """Check whether data fullfills the format of N*[ymin, xmin, ymax, xmin]. + + Args: + data: a numpy array of shape [N, 4] representing box coordinates + + Returns: + a boolean indicating whether all ymax of boxes are equal or greater than + ymin, and all xmax of boxes are equal or greater than xmin. + """ + if data.shape[0] > 0: + for i in range(data.shape[0]): + if data[i, 0] > data[i, 2] or data[i, 1] > data[i, 3]: + return False + return True diff --git a/TimeSformer/timesformer/utils/ava_evaluation/np_box_list_ops.py b/TimeSformer/timesformer/utils/ava_evaluation/np_box_list_ops.py new file mode 100755 index 0000000000000000000000000000000000000000..6a85b29285f7d70b3e537e1ad569c539c409b045 --- /dev/null +++ b/TimeSformer/timesformer/utils/ava_evaluation/np_box_list_ops.py @@ -0,0 +1,593 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Bounding Box List operations for Numpy BoxLists. + +Example box operations that are supported: + * Areas: compute bounding box areas + * IOU: pairwise intersection-over-union scores +""" +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) +import numpy as np + +from . import np_box_list, np_box_ops + + +class SortOrder(object): + """Enum class for sort order. + + Attributes: + ascend: ascend order. + descend: descend order. + """ + + ASCEND = 1 + DESCEND = 2 + + +def area(boxlist): + """Computes area of boxes. + + Args: + boxlist: BoxList holding N boxes + + Returns: + a numpy array with shape [N*1] representing box areas + """ + y_min, x_min, y_max, x_max = boxlist.get_coordinates() + return (y_max - y_min) * (x_max - x_min) + + +def intersection(boxlist1, boxlist2): + """Compute pairwise intersection areas between boxes. + + Args: + boxlist1: BoxList holding N boxes + boxlist2: BoxList holding M boxes + + Returns: + a numpy array with shape [N*M] representing pairwise intersection area + """ + return np_box_ops.intersection(boxlist1.get(), boxlist2.get()) + + +def iou(boxlist1, boxlist2): + """Computes pairwise intersection-over-union between box collections. + + Args: + boxlist1: BoxList holding N boxes + boxlist2: BoxList holding M boxes + + Returns: + a numpy array with shape [N, M] representing pairwise iou scores. + """ + return np_box_ops.iou(boxlist1.get(), boxlist2.get()) + + +def ioa(boxlist1, boxlist2): + """Computes pairwise intersection-over-area between box collections. + + Intersection-over-area (ioa) between two boxes box1 and box2 is defined as + their intersection area over box2's area. Note that ioa is not symmetric, + that is, IOA(box1, box2) != IOA(box2, box1). + + Args: + boxlist1: BoxList holding N boxes + boxlist2: BoxList holding M boxes + + Returns: + a numpy array with shape [N, M] representing pairwise ioa scores. + """ + return np_box_ops.ioa(boxlist1.get(), boxlist2.get()) + + +def gather(boxlist, indices, fields=None): + """Gather boxes from BoxList according to indices and return new BoxList. + + By default, gather returns boxes corresponding to the input index list, as + well as all additional fields stored in the boxlist (indexing into the + first dimension). However one can optionally only gather from a + subset of fields. + + Args: + boxlist: BoxList holding N boxes + indices: a 1-d numpy array of type int_ + fields: (optional) list of fields to also gather from. If None (default), + all fields are gathered from. Pass an empty fields list to only gather + the box coordinates. + + Returns: + subboxlist: a BoxList corresponding to the subset of the input BoxList + specified by indices + + Raises: + ValueError: if specified field is not contained in boxlist or if the + indices are not of type int_ + """ + if indices.size: + if np.amax(indices) >= boxlist.num_boxes() or np.amin(indices) < 0: + raise ValueError("indices are out of valid range.") + subboxlist = np_box_list.BoxList(boxlist.get()[indices, :]) + if fields is None: + fields = boxlist.get_extra_fields() + for field in fields: + extra_field_data = boxlist.get_field(field) + subboxlist.add_field(field, extra_field_data[indices, ...]) + return subboxlist + + +def sort_by_field(boxlist, field, order=SortOrder.DESCEND): + """Sort boxes and associated fields according to a scalar field. + + A common use case is reordering the boxes according to descending scores. + + Args: + boxlist: BoxList holding N boxes. + field: A BoxList field for sorting and reordering the BoxList. + order: (Optional) 'descend' or 'ascend'. Default is descend. + + Returns: + sorted_boxlist: A sorted BoxList with the field in the specified order. + + Raises: + ValueError: if specified field does not exist or is not of single dimension. + ValueError: if the order is not either descend or ascend. + """ + if not boxlist.has_field(field): + raise ValueError("Field " + field + " does not exist") + if len(boxlist.get_field(field).shape) != 1: + raise ValueError("Field " + field + "should be single dimension.") + if order != SortOrder.DESCEND and order != SortOrder.ASCEND: + raise ValueError("Invalid sort order") + + field_to_sort = boxlist.get_field(field) + sorted_indices = np.argsort(field_to_sort) + if order == SortOrder.DESCEND: + sorted_indices = sorted_indices[::-1] + return gather(boxlist, sorted_indices) + + +def non_max_suppression( + boxlist, max_output_size=10000, iou_threshold=1.0, score_threshold=-10.0 +): + """Non maximum suppression. + + This op greedily selects a subset of detection bounding boxes, pruning + away boxes that have high IOU (intersection over union) overlap (> thresh) + with already selected boxes. In each iteration, the detected bounding box with + highest score in the available pool is selected. + + Args: + boxlist: BoxList holding N boxes. Must contain a 'scores' field + representing detection scores. All scores belong to the same class. + max_output_size: maximum number of retained boxes + iou_threshold: intersection over union threshold. + score_threshold: minimum score threshold. Remove the boxes with scores + less than this value. Default value is set to -10. A very + low threshold to pass pretty much all the boxes, unless + the user sets a different score threshold. + + Returns: + a BoxList holding M boxes where M <= max_output_size + Raises: + ValueError: if 'scores' field does not exist + ValueError: if threshold is not in [0, 1] + ValueError: if max_output_size < 0 + """ + if not boxlist.has_field("scores"): + raise ValueError("Field scores does not exist") + if iou_threshold < 0.0 or iou_threshold > 1.0: + raise ValueError("IOU threshold must be in [0, 1]") + if max_output_size < 0: + raise ValueError("max_output_size must be bigger than 0.") + + boxlist = filter_scores_greater_than(boxlist, score_threshold) + if boxlist.num_boxes() == 0: + return boxlist + + boxlist = sort_by_field(boxlist, "scores") + + # Prevent further computation if NMS is disabled. + if iou_threshold == 1.0: + if boxlist.num_boxes() > max_output_size: + selected_indices = np.arange(max_output_size) + return gather(boxlist, selected_indices) + else: + return boxlist + + boxes = boxlist.get() + num_boxes = boxlist.num_boxes() + # is_index_valid is True only for all remaining valid boxes, + is_index_valid = np.full(num_boxes, 1, dtype=bool) + selected_indices = [] + num_output = 0 + for i in range(num_boxes): + if num_output < max_output_size: + if is_index_valid[i]: + num_output += 1 + selected_indices.append(i) + is_index_valid[i] = False + valid_indices = np.where(is_index_valid)[0] + if valid_indices.size == 0: + break + + intersect_over_union = np_box_ops.iou( + np.expand_dims(boxes[i, :], axis=0), boxes[valid_indices, :] + ) + intersect_over_union = np.squeeze(intersect_over_union, axis=0) + is_index_valid[valid_indices] = np.logical_and( + is_index_valid[valid_indices], + intersect_over_union <= iou_threshold, + ) + return gather(boxlist, np.array(selected_indices)) + + +def multi_class_non_max_suppression( + boxlist, score_thresh, iou_thresh, max_output_size +): + """Multi-class version of non maximum suppression. + + This op greedily selects a subset of detection bounding boxes, pruning + away boxes that have high IOU (intersection over union) overlap (> thresh) + with already selected boxes. It operates independently for each class for + which scores are provided (via the scores field of the input box_list), + pruning boxes with score less than a provided threshold prior to + applying NMS. + + Args: + boxlist: BoxList holding N boxes. Must contain a 'scores' field + representing detection scores. This scores field is a tensor that can + be 1 dimensional (in the case of a single class) or 2-dimensional, which + which case we assume that it takes the shape [num_boxes, num_classes]. + We further assume that this rank is known statically and that + scores.shape[1] is also known (i.e., the number of classes is fixed + and known at graph construction time). + score_thresh: scalar threshold for score (low scoring boxes are removed). + iou_thresh: scalar threshold for IOU (boxes that that high IOU overlap + with previously selected boxes are removed). + max_output_size: maximum number of retained boxes per class. + + Returns: + a BoxList holding M boxes with a rank-1 scores field representing + corresponding scores for each box with scores sorted in decreasing order + and a rank-1 classes field representing a class label for each box. + Raises: + ValueError: if iou_thresh is not in [0, 1] or if input boxlist does not have + a valid scores field. + """ + if not 0 <= iou_thresh <= 1.0: + raise ValueError("thresh must be between 0 and 1") + if not isinstance(boxlist, np_box_list.BoxList): + raise ValueError("boxlist must be a BoxList") + if not boxlist.has_field("scores"): + raise ValueError("input boxlist must have 'scores' field") + scores = boxlist.get_field("scores") + if len(scores.shape) == 1: + scores = np.reshape(scores, [-1, 1]) + elif len(scores.shape) == 2: + if scores.shape[1] is None: + raise ValueError( + "scores field must have statically defined second " "dimension" + ) + else: + raise ValueError("scores field must be of rank 1 or 2") + num_boxes = boxlist.num_boxes() + num_scores = scores.shape[0] + num_classes = scores.shape[1] + + if num_boxes != num_scores: + raise ValueError("Incorrect scores field length: actual vs expected.") + + selected_boxes_list = [] + for class_idx in range(num_classes): + boxlist_and_class_scores = np_box_list.BoxList(boxlist.get()) + class_scores = np.reshape(scores[0:num_scores, class_idx], [-1]) + boxlist_and_class_scores.add_field("scores", class_scores) + boxlist_filt = filter_scores_greater_than( + boxlist_and_class_scores, score_thresh + ) + nms_result = non_max_suppression( + boxlist_filt, + max_output_size=max_output_size, + iou_threshold=iou_thresh, + score_threshold=score_thresh, + ) + nms_result.add_field( + "classes", np.zeros_like(nms_result.get_field("scores")) + class_idx + ) + selected_boxes_list.append(nms_result) + selected_boxes = concatenate(selected_boxes_list) + sorted_boxes = sort_by_field(selected_boxes, "scores") + return sorted_boxes + + +def scale(boxlist, y_scale, x_scale): + """Scale box coordinates in x and y dimensions. + + Args: + boxlist: BoxList holding N boxes + y_scale: float + x_scale: float + + Returns: + boxlist: BoxList holding N boxes + """ + y_min, x_min, y_max, x_max = np.array_split(boxlist.get(), 4, axis=1) + y_min = y_scale * y_min + y_max = y_scale * y_max + x_min = x_scale * x_min + x_max = x_scale * x_max + scaled_boxlist = np_box_list.BoxList( + np.hstack([y_min, x_min, y_max, x_max]) + ) + + fields = boxlist.get_extra_fields() + for field in fields: + extra_field_data = boxlist.get_field(field) + scaled_boxlist.add_field(field, extra_field_data) + + return scaled_boxlist + + +def clip_to_window(boxlist, window): + """Clip bounding boxes to a window. + + This op clips input bounding boxes (represented by bounding box + corners) to a window, optionally filtering out boxes that do not + overlap at all with the window. + + Args: + boxlist: BoxList holding M_in boxes + window: a numpy array of shape [4] representing the + [y_min, x_min, y_max, x_max] window to which the op + should clip boxes. + + Returns: + a BoxList holding M_out boxes where M_out <= M_in + """ + y_min, x_min, y_max, x_max = np.array_split(boxlist.get(), 4, axis=1) + win_y_min = window[0] + win_x_min = window[1] + win_y_max = window[2] + win_x_max = window[3] + y_min_clipped = np.fmax(np.fmin(y_min, win_y_max), win_y_min) + y_max_clipped = np.fmax(np.fmin(y_max, win_y_max), win_y_min) + x_min_clipped = np.fmax(np.fmin(x_min, win_x_max), win_x_min) + x_max_clipped = np.fmax(np.fmin(x_max, win_x_max), win_x_min) + clipped = np_box_list.BoxList( + np.hstack([y_min_clipped, x_min_clipped, y_max_clipped, x_max_clipped]) + ) + clipped = _copy_extra_fields(clipped, boxlist) + areas = area(clipped) + nonzero_area_indices = np.reshape( + np.nonzero(np.greater(areas, 0.0)), [-1] + ).astype(np.int32) + return gather(clipped, nonzero_area_indices) + + +def prune_non_overlapping_boxes(boxlist1, boxlist2, minoverlap=0.0): + """Prunes the boxes in boxlist1 that overlap less than thresh with boxlist2. + + For each box in boxlist1, we want its IOA to be more than minoverlap with + at least one of the boxes in boxlist2. If it does not, we remove it. + + Args: + boxlist1: BoxList holding N boxes. + boxlist2: BoxList holding M boxes. + minoverlap: Minimum required overlap between boxes, to count them as + overlapping. + + Returns: + A pruned boxlist with size [N', 4]. + """ + intersection_over_area = ioa(boxlist2, boxlist1) # [M, N] tensor + intersection_over_area = np.amax( + intersection_over_area, axis=0 + ) # [N] tensor + keep_bool = np.greater_equal(intersection_over_area, np.array(minoverlap)) + keep_inds = np.nonzero(keep_bool)[0] + new_boxlist1 = gather(boxlist1, keep_inds) + return new_boxlist1 + + +def prune_outside_window(boxlist, window): + """Prunes bounding boxes that fall outside a given window. + + This function prunes bounding boxes that even partially fall outside the given + window. See also ClipToWindow which only prunes bounding boxes that fall + completely outside the window, and clips any bounding boxes that partially + overflow. + + Args: + boxlist: a BoxList holding M_in boxes. + window: a numpy array of size 4, representing [ymin, xmin, ymax, xmax] + of the window. + + Returns: + pruned_corners: a tensor with shape [M_out, 4] where M_out <= M_in. + valid_indices: a tensor with shape [M_out] indexing the valid bounding boxes + in the input tensor. + """ + + y_min, x_min, y_max, x_max = np.array_split(boxlist.get(), 4, axis=1) + win_y_min = window[0] + win_x_min = window[1] + win_y_max = window[2] + win_x_max = window[3] + coordinate_violations = np.hstack( + [ + np.less(y_min, win_y_min), + np.less(x_min, win_x_min), + np.greater(y_max, win_y_max), + np.greater(x_max, win_x_max), + ] + ) + valid_indices = np.reshape( + np.where(np.logical_not(np.max(coordinate_violations, axis=1))), [-1] + ) + return gather(boxlist, valid_indices), valid_indices + + +def concatenate(boxlists, fields=None): + """Concatenate list of BoxLists. + + This op concatenates a list of input BoxLists into a larger BoxList. It also + handles concatenation of BoxList fields as long as the field tensor shapes + are equal except for the first dimension. + + Args: + boxlists: list of BoxList objects + fields: optional list of fields to also concatenate. By default, all + fields from the first BoxList in the list are included in the + concatenation. + + Returns: + a BoxList with number of boxes equal to + sum([boxlist.num_boxes() for boxlist in BoxList]) + Raises: + ValueError: if boxlists is invalid (i.e., is not a list, is empty, or + contains non BoxList objects), or if requested fields are not contained in + all boxlists + """ + if not isinstance(boxlists, list): + raise ValueError("boxlists should be a list") + if not boxlists: + raise ValueError("boxlists should have nonzero length") + for boxlist in boxlists: + if not isinstance(boxlist, np_box_list.BoxList): + raise ValueError( + "all elements of boxlists should be BoxList objects" + ) + concatenated = np_box_list.BoxList( + np.vstack([boxlist.get() for boxlist in boxlists]) + ) + if fields is None: + fields = boxlists[0].get_extra_fields() + for field in fields: + first_field_shape = boxlists[0].get_field(field).shape + first_field_shape = first_field_shape[1:] + for boxlist in boxlists: + if not boxlist.has_field(field): + raise ValueError("boxlist must contain all requested fields") + field_shape = boxlist.get_field(field).shape + field_shape = field_shape[1:] + if field_shape != first_field_shape: + raise ValueError( + "field %s must have same shape for all boxlists " + "except for the 0th dimension." % field + ) + concatenated_field = np.concatenate( + [boxlist.get_field(field) for boxlist in boxlists], axis=0 + ) + concatenated.add_field(field, concatenated_field) + return concatenated + + +def filter_scores_greater_than(boxlist, thresh): + """Filter to keep only boxes with score exceeding a given threshold. + + This op keeps the collection of boxes whose corresponding scores are + greater than the input threshold. + + Args: + boxlist: BoxList holding N boxes. Must contain a 'scores' field + representing detection scores. + thresh: scalar threshold + + Returns: + a BoxList holding M boxes where M <= N + + Raises: + ValueError: if boxlist not a BoxList object or if it does not + have a scores field + """ + if not isinstance(boxlist, np_box_list.BoxList): + raise ValueError("boxlist must be a BoxList") + if not boxlist.has_field("scores"): + raise ValueError("input boxlist must have 'scores' field") + scores = boxlist.get_field("scores") + if len(scores.shape) > 2: + raise ValueError("Scores should have rank 1 or 2") + if len(scores.shape) == 2 and scores.shape[1] != 1: + raise ValueError( + "Scores should have rank 1 or have shape " + "consistent with [None, 1]" + ) + high_score_indices = np.reshape( + np.where(np.greater(scores, thresh)), [-1] + ).astype(np.int32) + return gather(boxlist, high_score_indices) + + +def change_coordinate_frame(boxlist, window): + """Change coordinate frame of the boxlist to be relative to window's frame. + + Given a window of the form [ymin, xmin, ymax, xmax], + changes bounding box coordinates from boxlist to be relative to this window + (e.g., the min corner maps to (0,0) and the max corner maps to (1,1)). + + An example use case is data augmentation: where we are given groundtruth + boxes (boxlist) and would like to randomly crop the image to some + window (window). In this case we need to change the coordinate frame of + each groundtruth box to be relative to this new window. + + Args: + boxlist: A BoxList object holding N boxes. + window: a size 4 1-D numpy array. + + Returns: + Returns a BoxList object with N boxes. + """ + win_height = window[2] - window[0] + win_width = window[3] - window[1] + boxlist_new = scale( + np_box_list.BoxList( + boxlist.get() - [window[0], window[1], window[0], window[1]] + ), + 1.0 / win_height, + 1.0 / win_width, + ) + _copy_extra_fields(boxlist_new, boxlist) + + return boxlist_new + + +def _copy_extra_fields(boxlist_to_copy_to, boxlist_to_copy_from): + """Copies the extra fields of boxlist_to_copy_from to boxlist_to_copy_to. + + Args: + boxlist_to_copy_to: BoxList to which extra fields are copied. + boxlist_to_copy_from: BoxList from which fields are copied. + + Returns: + boxlist_to_copy_to with extra fields. + """ + for field in boxlist_to_copy_from.get_extra_fields(): + boxlist_to_copy_to.add_field( + field, boxlist_to_copy_from.get_field(field) + ) + return boxlist_to_copy_to + + +def _update_valid_indices_by_removing_high_iou_boxes( + selected_indices, is_index_valid, intersect_over_union, threshold +): + max_iou = np.max(intersect_over_union[:, selected_indices], axis=1) + return np.logical_and(is_index_valid, max_iou <= threshold) diff --git a/TimeSformer/timesformer/utils/ava_evaluation/np_box_mask_list.py b/TimeSformer/timesformer/utils/ava_evaluation/np_box_mask_list.py new file mode 100755 index 0000000000000000000000000000000000000000..14bd8eb107028b910001b91096e95ab9157776e3 --- /dev/null +++ b/TimeSformer/timesformer/utils/ava_evaluation/np_box_mask_list.py @@ -0,0 +1,73 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Numpy BoxMaskList classes and functions.""" + +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) +import numpy as np + +from . import np_box_list + + +class BoxMaskList(np_box_list.BoxList): + """Convenience wrapper for BoxList with masks. + + BoxMaskList extends the np_box_list.BoxList to contain masks as well. + In particular, its constructor receives both boxes and masks. Note that the + masks correspond to the full image. + """ + + def __init__(self, box_data, mask_data): + """Constructs box collection. + + Args: + box_data: a numpy array of shape [N, 4] representing box coordinates + mask_data: a numpy array of shape [N, height, width] representing masks + with values are in {0,1}. The masks correspond to the full + image. The height and the width will be equal to image height and width. + + Raises: + ValueError: if bbox data is not a numpy array + ValueError: if invalid dimensions for bbox data + ValueError: if mask data is not a numpy array + ValueError: if invalid dimension for mask data + """ + super(BoxMaskList, self).__init__(box_data) + if not isinstance(mask_data, np.ndarray): + raise ValueError("Mask data must be a numpy array.") + if len(mask_data.shape) != 3: + raise ValueError("Invalid dimensions for mask data.") + if mask_data.dtype != np.uint8: + raise ValueError( + "Invalid data type for mask data: uint8 is required." + ) + if mask_data.shape[0] != box_data.shape[0]: + raise ValueError( + "There should be the same number of boxes and masks." + ) + self.data["masks"] = mask_data + + def get_masks(self): + """Convenience function for accessing masks. + + Returns: + a numpy array of shape [N, height, width] representing masks + """ + return self.get_field("masks") diff --git a/TimeSformer/timesformer/utils/ava_evaluation/np_box_mask_list_ops.py b/TimeSformer/timesformer/utils/ava_evaluation/np_box_mask_list_ops.py new file mode 100755 index 0000000000000000000000000000000000000000..be16a0a1daec081420afd8c1b8a33b6635723c95 --- /dev/null +++ b/TimeSformer/timesformer/utils/ava_evaluation/np_box_mask_list_ops.py @@ -0,0 +1,428 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Operations for np_box_mask_list.BoxMaskList. + +Example box operations that are supported: + * Areas: compute bounding box areas + * IOU: pairwise intersection-over-union scores +""" +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) +import numpy as np + +from . import np_box_list_ops, np_box_mask_list, np_mask_ops + + +def box_list_to_box_mask_list(boxlist): + """Converts a BoxList containing 'masks' into a BoxMaskList. + + Args: + boxlist: An np_box_list.BoxList object. + + Returns: + An np_box_mask_list.BoxMaskList object. + + Raises: + ValueError: If boxlist does not contain `masks` as a field. + """ + if not boxlist.has_field("masks"): + raise ValueError("boxlist does not contain mask field.") + box_mask_list = np_box_mask_list.BoxMaskList( + box_data=boxlist.get(), mask_data=boxlist.get_field("masks") + ) + extra_fields = boxlist.get_extra_fields() + for key in extra_fields: + if key != "masks": + box_mask_list.data[key] = boxlist.get_field(key) + return box_mask_list + + +def area(box_mask_list): + """Computes area of masks. + + Args: + box_mask_list: np_box_mask_list.BoxMaskList holding N boxes and masks + + Returns: + a numpy array with shape [N*1] representing mask areas + """ + return np_mask_ops.area(box_mask_list.get_masks()) + + +def intersection(box_mask_list1, box_mask_list2): + """Compute pairwise intersection areas between masks. + + Args: + box_mask_list1: BoxMaskList holding N boxes and masks + box_mask_list2: BoxMaskList holding M boxes and masks + + Returns: + a numpy array with shape [N*M] representing pairwise intersection area + """ + return np_mask_ops.intersection( + box_mask_list1.get_masks(), box_mask_list2.get_masks() + ) + + +def iou(box_mask_list1, box_mask_list2): + """Computes pairwise intersection-over-union between box and mask collections. + + Args: + box_mask_list1: BoxMaskList holding N boxes and masks + box_mask_list2: BoxMaskList holding M boxes and masks + + Returns: + a numpy array with shape [N, M] representing pairwise iou scores. + """ + return np_mask_ops.iou( + box_mask_list1.get_masks(), box_mask_list2.get_masks() + ) + + +def ioa(box_mask_list1, box_mask_list2): + """Computes pairwise intersection-over-area between box and mask collections. + + Intersection-over-area (ioa) between two masks mask1 and mask2 is defined as + their intersection area over mask2's area. Note that ioa is not symmetric, + that is, IOA(mask1, mask2) != IOA(mask2, mask1). + + Args: + box_mask_list1: np_box_mask_list.BoxMaskList holding N boxes and masks + box_mask_list2: np_box_mask_list.BoxMaskList holding M boxes and masks + + Returns: + a numpy array with shape [N, M] representing pairwise ioa scores. + """ + return np_mask_ops.ioa( + box_mask_list1.get_masks(), box_mask_list2.get_masks() + ) + + +def gather(box_mask_list, indices, fields=None): + """Gather boxes from np_box_mask_list.BoxMaskList according to indices. + + By default, gather returns boxes corresponding to the input index list, as + well as all additional fields stored in the box_mask_list (indexing into the + first dimension). However one can optionally only gather from a + subset of fields. + + Args: + box_mask_list: np_box_mask_list.BoxMaskList holding N boxes + indices: a 1-d numpy array of type int_ + fields: (optional) list of fields to also gather from. If None (default), + all fields are gathered from. Pass an empty fields list to only gather + the box coordinates. + + Returns: + subbox_mask_list: a np_box_mask_list.BoxMaskList corresponding to the subset + of the input box_mask_list specified by indices + + Raises: + ValueError: if specified field is not contained in box_mask_list or if the + indices are not of type int_ + """ + if fields is not None: + if "masks" not in fields: + fields.append("masks") + return box_list_to_box_mask_list( + np_box_list_ops.gather( + boxlist=box_mask_list, indices=indices, fields=fields + ) + ) + + +def sort_by_field( + box_mask_list, field, order=np_box_list_ops.SortOrder.DESCEND +): + """Sort boxes and associated fields according to a scalar field. + + A common use case is reordering the boxes according to descending scores. + + Args: + box_mask_list: BoxMaskList holding N boxes. + field: A BoxMaskList field for sorting and reordering the BoxMaskList. + order: (Optional) 'descend' or 'ascend'. Default is descend. + + Returns: + sorted_box_mask_list: A sorted BoxMaskList with the field in the specified + order. + """ + return box_list_to_box_mask_list( + np_box_list_ops.sort_by_field( + boxlist=box_mask_list, field=field, order=order + ) + ) + + +def non_max_suppression( + box_mask_list, + max_output_size=10000, + iou_threshold=1.0, + score_threshold=-10.0, +): + """Non maximum suppression. + + This op greedily selects a subset of detection bounding boxes, pruning + away boxes that have high IOU (intersection over union) overlap (> thresh) + with already selected boxes. In each iteration, the detected bounding box with + highest score in the available pool is selected. + + Args: + box_mask_list: np_box_mask_list.BoxMaskList holding N boxes. Must contain + a 'scores' field representing detection scores. All scores belong to the + same class. + max_output_size: maximum number of retained boxes + iou_threshold: intersection over union threshold. + score_threshold: minimum score threshold. Remove the boxes with scores + less than this value. Default value is set to -10. A very + low threshold to pass pretty much all the boxes, unless + the user sets a different score threshold. + + Returns: + an np_box_mask_list.BoxMaskList holding M boxes where M <= max_output_size + + Raises: + ValueError: if 'scores' field does not exist + ValueError: if threshold is not in [0, 1] + ValueError: if max_output_size < 0 + """ + if not box_mask_list.has_field("scores"): + raise ValueError("Field scores does not exist") + if iou_threshold < 0.0 or iou_threshold > 1.0: + raise ValueError("IOU threshold must be in [0, 1]") + if max_output_size < 0: + raise ValueError("max_output_size must be bigger than 0.") + + box_mask_list = filter_scores_greater_than(box_mask_list, score_threshold) + if box_mask_list.num_boxes() == 0: + return box_mask_list + + box_mask_list = sort_by_field(box_mask_list, "scores") + + # Prevent further computation if NMS is disabled. + if iou_threshold == 1.0: + if box_mask_list.num_boxes() > max_output_size: + selected_indices = np.arange(max_output_size) + return gather(box_mask_list, selected_indices) + else: + return box_mask_list + + masks = box_mask_list.get_masks() + num_masks = box_mask_list.num_boxes() + + # is_index_valid is True only for all remaining valid boxes, + is_index_valid = np.full(num_masks, 1, dtype=bool) + selected_indices = [] + num_output = 0 + for i in range(num_masks): + if num_output < max_output_size: + if is_index_valid[i]: + num_output += 1 + selected_indices.append(i) + is_index_valid[i] = False + valid_indices = np.where(is_index_valid)[0] + if valid_indices.size == 0: + break + + intersect_over_union = np_mask_ops.iou( + np.expand_dims(masks[i], axis=0), masks[valid_indices] + ) + intersect_over_union = np.squeeze(intersect_over_union, axis=0) + is_index_valid[valid_indices] = np.logical_and( + is_index_valid[valid_indices], + intersect_over_union <= iou_threshold, + ) + return gather(box_mask_list, np.array(selected_indices)) + + +def multi_class_non_max_suppression( + box_mask_list, score_thresh, iou_thresh, max_output_size +): + """Multi-class version of non maximum suppression. + + This op greedily selects a subset of detection bounding boxes, pruning + away boxes that have high IOU (intersection over union) overlap (> thresh) + with already selected boxes. It operates independently for each class for + which scores are provided (via the scores field of the input box_list), + pruning boxes with score less than a provided threshold prior to + applying NMS. + + Args: + box_mask_list: np_box_mask_list.BoxMaskList holding N boxes. Must contain a + 'scores' field representing detection scores. This scores field is a + tensor that can be 1 dimensional (in the case of a single class) or + 2-dimensional, in which case we assume that it takes the + shape [num_boxes, num_classes]. We further assume that this rank is known + statically and that scores.shape[1] is also known (i.e., the number of + classes is fixed and known at graph construction time). + score_thresh: scalar threshold for score (low scoring boxes are removed). + iou_thresh: scalar threshold for IOU (boxes that that high IOU overlap + with previously selected boxes are removed). + max_output_size: maximum number of retained boxes per class. + + Returns: + a box_mask_list holding M boxes with a rank-1 scores field representing + corresponding scores for each box with scores sorted in decreasing order + and a rank-1 classes field representing a class label for each box. + Raises: + ValueError: if iou_thresh is not in [0, 1] or if input box_mask_list does + not have a valid scores field. + """ + if not 0 <= iou_thresh <= 1.0: + raise ValueError("thresh must be between 0 and 1") + if not isinstance(box_mask_list, np_box_mask_list.BoxMaskList): + raise ValueError("box_mask_list must be a box_mask_list") + if not box_mask_list.has_field("scores"): + raise ValueError("input box_mask_list must have 'scores' field") + scores = box_mask_list.get_field("scores") + if len(scores.shape) == 1: + scores = np.reshape(scores, [-1, 1]) + elif len(scores.shape) == 2: + if scores.shape[1] is None: + raise ValueError( + "scores field must have statically defined second " "dimension" + ) + else: + raise ValueError("scores field must be of rank 1 or 2") + + num_boxes = box_mask_list.num_boxes() + num_scores = scores.shape[0] + num_classes = scores.shape[1] + + if num_boxes != num_scores: + raise ValueError("Incorrect scores field length: actual vs expected.") + + selected_boxes_list = [] + for class_idx in range(num_classes): + box_mask_list_and_class_scores = np_box_mask_list.BoxMaskList( + box_data=box_mask_list.get(), mask_data=box_mask_list.get_masks() + ) + class_scores = np.reshape(scores[0:num_scores, class_idx], [-1]) + box_mask_list_and_class_scores.add_field("scores", class_scores) + box_mask_list_filt = filter_scores_greater_than( + box_mask_list_and_class_scores, score_thresh + ) + nms_result = non_max_suppression( + box_mask_list_filt, + max_output_size=max_output_size, + iou_threshold=iou_thresh, + score_threshold=score_thresh, + ) + nms_result.add_field( + "classes", np.zeros_like(nms_result.get_field("scores")) + class_idx + ) + selected_boxes_list.append(nms_result) + selected_boxes = np_box_list_ops.concatenate(selected_boxes_list) + sorted_boxes = np_box_list_ops.sort_by_field(selected_boxes, "scores") + return box_list_to_box_mask_list(boxlist=sorted_boxes) + + +def prune_non_overlapping_masks(box_mask_list1, box_mask_list2, minoverlap=0.0): + """Prunes the boxes in list1 that overlap less than thresh with list2. + + For each mask in box_mask_list1, we want its IOA to be more than minoverlap + with at least one of the masks in box_mask_list2. If it does not, we remove + it. If the masks are not full size image, we do the pruning based on boxes. + + Args: + box_mask_list1: np_box_mask_list.BoxMaskList holding N boxes and masks. + box_mask_list2: np_box_mask_list.BoxMaskList holding M boxes and masks. + minoverlap: Minimum required overlap between boxes, to count them as + overlapping. + + Returns: + A pruned box_mask_list with size [N', 4]. + """ + intersection_over_area = ioa( + box_mask_list2, box_mask_list1 + ) # [M, N] tensor + intersection_over_area = np.amax( + intersection_over_area, axis=0 + ) # [N] tensor + keep_bool = np.greater_equal(intersection_over_area, np.array(minoverlap)) + keep_inds = np.nonzero(keep_bool)[0] + new_box_mask_list1 = gather(box_mask_list1, keep_inds) + return new_box_mask_list1 + + +def concatenate(box_mask_lists, fields=None): + """Concatenate list of box_mask_lists. + + This op concatenates a list of input box_mask_lists into a larger + box_mask_list. It also + handles concatenation of box_mask_list fields as long as the field tensor + shapes are equal except for the first dimension. + + Args: + box_mask_lists: list of np_box_mask_list.BoxMaskList objects + fields: optional list of fields to also concatenate. By default, all + fields from the first BoxMaskList in the list are included in the + concatenation. + + Returns: + a box_mask_list with number of boxes equal to + sum([box_mask_list.num_boxes() for box_mask_list in box_mask_list]) + Raises: + ValueError: if box_mask_lists is invalid (i.e., is not a list, is empty, or + contains non box_mask_list objects), or if requested fields are not + contained in all box_mask_lists + """ + if fields is not None: + if "masks" not in fields: + fields.append("masks") + return box_list_to_box_mask_list( + np_box_list_ops.concatenate(boxlists=box_mask_lists, fields=fields) + ) + + +def filter_scores_greater_than(box_mask_list, thresh): + """Filter to keep only boxes and masks with score exceeding a given threshold. + + This op keeps the collection of boxes and masks whose corresponding scores are + greater than the input threshold. + + Args: + box_mask_list: BoxMaskList holding N boxes and masks. Must contain a + 'scores' field representing detection scores. + thresh: scalar threshold + + Returns: + a BoxMaskList holding M boxes and masks where M <= N + + Raises: + ValueError: if box_mask_list not a np_box_mask_list.BoxMaskList object or + if it does not have a scores field + """ + if not isinstance(box_mask_list, np_box_mask_list.BoxMaskList): + raise ValueError("box_mask_list must be a BoxMaskList") + if not box_mask_list.has_field("scores"): + raise ValueError("input box_mask_list must have 'scores' field") + scores = box_mask_list.get_field("scores") + if len(scores.shape) > 2: + raise ValueError("Scores should have rank 1 or 2") + if len(scores.shape) == 2 and scores.shape[1] != 1: + raise ValueError( + "Scores should have rank 1 or have shape " + "consistent with [None, 1]" + ) + high_score_indices = np.reshape( + np.where(np.greater(scores, thresh)), [-1] + ).astype(np.int32) + return gather(box_mask_list, high_score_indices) diff --git a/TimeSformer/timesformer/utils/ava_evaluation/np_box_ops.py b/TimeSformer/timesformer/utils/ava_evaluation/np_box_ops.py new file mode 100755 index 0000000000000000000000000000000000000000..1d7400562dfa29da4c5801c2e525683dfdefe045 --- /dev/null +++ b/TimeSformer/timesformer/utils/ava_evaluation/np_box_ops.py @@ -0,0 +1,108 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Operations for [N, 4] numpy arrays representing bounding boxes. + +Example box operations that are supported: + * Areas: compute bounding box areas + * IOU: pairwise intersection-over-union scores +""" +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) +import numpy as np + + +def area(boxes): + """Computes area of boxes. + + Args: + boxes: Numpy array with shape [N, 4] holding N boxes + + Returns: + a numpy array with shape [N*1] representing box areas + """ + return (boxes[:, 2] - boxes[:, 0]) * (boxes[:, 3] - boxes[:, 1]) + + +def intersection(boxes1, boxes2): + """Compute pairwise intersection areas between boxes. + + Args: + boxes1: a numpy array with shape [N, 4] holding N boxes + boxes2: a numpy array with shape [M, 4] holding M boxes + + Returns: + a numpy array with shape [N*M] representing pairwise intersection area + """ + [y_min1, x_min1, y_max1, x_max1] = np.split(boxes1, 4, axis=1) + [y_min2, x_min2, y_max2, x_max2] = np.split(boxes2, 4, axis=1) + + all_pairs_min_ymax = np.minimum(y_max1, np.transpose(y_max2)) + all_pairs_max_ymin = np.maximum(y_min1, np.transpose(y_min2)) + intersect_heights = np.maximum( + np.zeros(all_pairs_max_ymin.shape), + all_pairs_min_ymax - all_pairs_max_ymin, + ) + all_pairs_min_xmax = np.minimum(x_max1, np.transpose(x_max2)) + all_pairs_max_xmin = np.maximum(x_min1, np.transpose(x_min2)) + intersect_widths = np.maximum( + np.zeros(all_pairs_max_xmin.shape), + all_pairs_min_xmax - all_pairs_max_xmin, + ) + return intersect_heights * intersect_widths + + +def iou(boxes1, boxes2): + """Computes pairwise intersection-over-union between box collections. + + Args: + boxes1: a numpy array with shape [N, 4] holding N boxes. + boxes2: a numpy array with shape [M, 4] holding N boxes. + + Returns: + a numpy array with shape [N, M] representing pairwise iou scores. + """ + intersect = intersection(boxes1, boxes2) + area1 = area(boxes1) + area2 = area(boxes2) + union = ( + np.expand_dims(area1, axis=1) + + np.expand_dims(area2, axis=0) + - intersect + ) + return intersect / union + + +def ioa(boxes1, boxes2): + """Computes pairwise intersection-over-area between box collections. + + Intersection-over-area (ioa) between two boxes box1 and box2 is defined as + their intersection area over box2's area. Note that ioa is not symmetric, + that is, IOA(box1, box2) != IOA(box2, box1). + + Args: + boxes1: a numpy array with shape [N, 4] holding N boxes. + boxes2: a numpy array with shape [M, 4] holding N boxes. + + Returns: + a numpy array with shape [N, M] representing pairwise ioa scores. + """ + intersect = intersection(boxes1, boxes2) + areas = np.expand_dims(area(boxes2), axis=0) + return intersect / areas diff --git a/TimeSformer/timesformer/utils/ava_evaluation/np_mask_ops.py b/TimeSformer/timesformer/utils/ava_evaluation/np_mask_ops.py new file mode 100755 index 0000000000000000000000000000000000000000..4f3ec219e520acee373ca44ef8e688bdba020fa7 --- /dev/null +++ b/TimeSformer/timesformer/utils/ava_evaluation/np_mask_ops.py @@ -0,0 +1,130 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Operations for [N, height, width] numpy arrays representing masks. + +Example mask operations that are supported: + * Areas: compute mask areas + * IOU: pairwise intersection-over-union scores +""" +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) +import numpy as np + +EPSILON = 1e-7 + + +def area(masks): + """Computes area of masks. + + Args: + masks: Numpy array with shape [N, height, width] holding N masks. Masks + values are of type np.uint8 and values are in {0,1}. + + Returns: + a numpy array with shape [N*1] representing mask areas. + + Raises: + ValueError: If masks.dtype is not np.uint8 + """ + if masks.dtype != np.uint8: + raise ValueError("Masks type should be np.uint8") + return np.sum(masks, axis=(1, 2), dtype=np.float32) + + +def intersection(masks1, masks2): + """Compute pairwise intersection areas between masks. + + Args: + masks1: a numpy array with shape [N, height, width] holding N masks. Masks + values are of type np.uint8 and values are in {0,1}. + masks2: a numpy array with shape [M, height, width] holding M masks. Masks + values are of type np.uint8 and values are in {0,1}. + + Returns: + a numpy array with shape [N*M] representing pairwise intersection area. + + Raises: + ValueError: If masks1 and masks2 are not of type np.uint8. + """ + if masks1.dtype != np.uint8 or masks2.dtype != np.uint8: + raise ValueError("masks1 and masks2 should be of type np.uint8") + n = masks1.shape[0] + m = masks2.shape[0] + answer = np.zeros([n, m], dtype=np.float32) + for i in np.arange(n): + for j in np.arange(m): + answer[i, j] = np.sum( + np.minimum(masks1[i], masks2[j]), dtype=np.float32 + ) + return answer + + +def iou(masks1, masks2): + """Computes pairwise intersection-over-union between mask collections. + + Args: + masks1: a numpy array with shape [N, height, width] holding N masks. Masks + values are of type np.uint8 and values are in {0,1}. + masks2: a numpy array with shape [M, height, width] holding N masks. Masks + values are of type np.uint8 and values are in {0,1}. + + Returns: + a numpy array with shape [N, M] representing pairwise iou scores. + + Raises: + ValueError: If masks1 and masks2 are not of type np.uint8. + """ + if masks1.dtype != np.uint8 or masks2.dtype != np.uint8: + raise ValueError("masks1 and masks2 should be of type np.uint8") + intersect = intersection(masks1, masks2) + area1 = area(masks1) + area2 = area(masks2) + union = ( + np.expand_dims(area1, axis=1) + + np.expand_dims(area2, axis=0) + - intersect + ) + return intersect / np.maximum(union, EPSILON) + + +def ioa(masks1, masks2): + """Computes pairwise intersection-over-area between box collections. + + Intersection-over-area (ioa) between two masks, mask1 and mask2 is defined as + their intersection area over mask2's area. Note that ioa is not symmetric, + that is, IOA(mask1, mask2) != IOA(mask2, mask1). + + Args: + masks1: a numpy array with shape [N, height, width] holding N masks. Masks + values are of type np.uint8 and values are in {0,1}. + masks2: a numpy array with shape [M, height, width] holding N masks. Masks + values are of type np.uint8 and values are in {0,1}. + + Returns: + a numpy array with shape [N, M] representing pairwise ioa scores. + + Raises: + ValueError: If masks1 and masks2 are not of type np.uint8. + """ + if masks1.dtype != np.uint8 or masks2.dtype != np.uint8: + raise ValueError("masks1 and masks2 should be of type np.uint8") + intersect = intersection(masks1, masks2) + areas = np.expand_dims(area(masks2), axis=0) + return intersect / (areas + EPSILON) diff --git a/TimeSformer/timesformer/utils/ava_evaluation/object_detection_evaluation.py b/TimeSformer/timesformer/utils/ava_evaluation/object_detection_evaluation.py new file mode 100755 index 0000000000000000000000000000000000000000..7a54021a6cbb9efae874eebe03522d19c7f6f004 --- /dev/null +++ b/TimeSformer/timesformer/utils/ava_evaluation/object_detection_evaluation.py @@ -0,0 +1,824 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""object_detection_evaluation module. + +ObjectDetectionEvaluation is a class which manages ground truth information of a +object detection dataset, and computes frequently used detection metrics such as +Precision, Recall, CorLoc of the provided detection results. +It supports the following operations: +1) Add ground truth information of images sequentially. +2) Add detection result of images sequentially. +3) Evaluate detection metrics on already inserted detection results. +4) Write evaluation result into a pickle file for future processing or + visualization. + +Note: This module operates on numpy boxes and box lists. +""" + +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) +import collections +import logging +import numpy as np +from abc import ABCMeta, abstractmethod + +from . import label_map_util, metrics, per_image_evaluation, standard_fields + + +class DetectionEvaluator(object): + """Interface for object detection evalution classes. + + Example usage of the Evaluator: + ------------------------------ + evaluator = DetectionEvaluator(categories) + + # Detections and groundtruth for image 1. + evaluator.add_single_groundtruth_image_info(...) + evaluator.add_single_detected_image_info(...) + + # Detections and groundtruth for image 2. + evaluator.add_single_groundtruth_image_info(...) + evaluator.add_single_detected_image_info(...) + + metrics_dict = evaluator.evaluate() + """ + + __metaclass__ = ABCMeta + + def __init__(self, categories): + """Constructor. + + Args: + categories: A list of dicts, each of which has the following keys - + 'id': (required) an integer id uniquely identifying this category. + 'name': (required) string representing category name e.g., 'cat', 'dog'. + """ + self._categories = categories + + @abstractmethod + def add_single_ground_truth_image_info(self, image_id, groundtruth_dict): + """Adds groundtruth for a single image to be used for evaluation. + + Args: + image_id: A unique string/integer identifier for the image. + groundtruth_dict: A dictionary of groundtruth numpy arrays required + for evaluations. + """ + pass + + @abstractmethod + def add_single_detected_image_info(self, image_id, detections_dict): + """Adds detections for a single image to be used for evaluation. + + Args: + image_id: A unique string/integer identifier for the image. + detections_dict: A dictionary of detection numpy arrays required + for evaluation. + """ + pass + + @abstractmethod + def evaluate(self): + """Evaluates detections and returns a dictionary of metrics.""" + pass + + @abstractmethod + def clear(self): + """Clears the state to prepare for a fresh evaluation.""" + pass + + +class ObjectDetectionEvaluator(DetectionEvaluator): + """A class to evaluate detections.""" + + def __init__( + self, + categories, + matching_iou_threshold=0.5, + evaluate_corlocs=False, + metric_prefix=None, + use_weighted_mean_ap=False, + evaluate_masks=False, + ): + """Constructor. + + Args: + categories: A list of dicts, each of which has the following keys - + 'id': (required) an integer id uniquely identifying this category. + 'name': (required) string representing category name e.g., 'cat', 'dog'. + matching_iou_threshold: IOU threshold to use for matching groundtruth + boxes to detection boxes. + evaluate_corlocs: (optional) boolean which determines if corloc scores + are to be returned or not. + metric_prefix: (optional) string prefix for metric name; if None, no + prefix is used. + use_weighted_mean_ap: (optional) boolean which determines if the mean + average precision is computed directly from the scores and tp_fp_labels + of all classes. + evaluate_masks: If False, evaluation will be performed based on boxes. + If True, mask evaluation will be performed instead. + + Raises: + ValueError: If the category ids are not 1-indexed. + """ + super(ObjectDetectionEvaluator, self).__init__(categories) + self._num_classes = max([cat["id"] for cat in categories]) + if min(cat["id"] for cat in categories) < 1: + raise ValueError("Classes should be 1-indexed.") + self._matching_iou_threshold = matching_iou_threshold + self._use_weighted_mean_ap = use_weighted_mean_ap + self._label_id_offset = 1 + self._evaluate_masks = evaluate_masks + self._evaluation = ObjectDetectionEvaluation( + num_groundtruth_classes=self._num_classes, + matching_iou_threshold=self._matching_iou_threshold, + use_weighted_mean_ap=self._use_weighted_mean_ap, + label_id_offset=self._label_id_offset, + ) + self._image_ids = set([]) + self._evaluate_corlocs = evaluate_corlocs + self._metric_prefix = (metric_prefix + "_") if metric_prefix else "" + + def add_single_ground_truth_image_info(self, image_id, groundtruth_dict): + """Adds groundtruth for a single image to be used for evaluation. + + Args: + image_id: A unique string/integer identifier for the image. + groundtruth_dict: A dictionary containing - + standard_fields.InputDataFields.groundtruth_boxes: float32 numpy array + of shape [num_boxes, 4] containing `num_boxes` groundtruth boxes of + the format [ymin, xmin, ymax, xmax] in absolute image coordinates. + standard_fields.InputDataFields.groundtruth_classes: integer numpy array + of shape [num_boxes] containing 1-indexed groundtruth classes for the + boxes. + standard_fields.InputDataFields.groundtruth_difficult: Optional length + M numpy boolean array denoting whether a ground truth box is a + difficult instance or not. This field is optional to support the case + that no boxes are difficult. + standard_fields.InputDataFields.groundtruth_instance_masks: Optional + numpy array of shape [num_boxes, height, width] with values in {0, 1}. + + Raises: + ValueError: On adding groundtruth for an image more than once. Will also + raise error if instance masks are not in groundtruth dictionary. + """ + if image_id in self._image_ids: + raise ValueError("Image with id {} already added.".format(image_id)) + + groundtruth_classes = ( + groundtruth_dict[ + standard_fields.InputDataFields.groundtruth_classes + ] + - self._label_id_offset + ) + # If the key is not present in the groundtruth_dict or the array is empty + # (unless there are no annotations for the groundtruth on this image) + # use values from the dictionary or insert None otherwise. + if standard_fields.InputDataFields.groundtruth_difficult in groundtruth_dict.keys() and ( + groundtruth_dict[ + standard_fields.InputDataFields.groundtruth_difficult + ].size + or not groundtruth_classes.size + ): + groundtruth_difficult = groundtruth_dict[ + standard_fields.InputDataFields.groundtruth_difficult + ] + else: + groundtruth_difficult = None + if not len(self._image_ids) % 1000: + logging.warn( + "image %s does not have groundtruth difficult flag specified", + image_id, + ) + groundtruth_masks = None + if self._evaluate_masks: + if ( + standard_fields.InputDataFields.groundtruth_instance_masks + not in groundtruth_dict + ): + raise ValueError( + "Instance masks not in groundtruth dictionary." + ) + groundtruth_masks = groundtruth_dict[ + standard_fields.InputDataFields.groundtruth_instance_masks + ] + self._evaluation.add_single_ground_truth_image_info( + image_key=image_id, + groundtruth_boxes=groundtruth_dict[ + standard_fields.InputDataFields.groundtruth_boxes + ], + groundtruth_class_labels=groundtruth_classes, + groundtruth_is_difficult_list=groundtruth_difficult, + groundtruth_masks=groundtruth_masks, + ) + self._image_ids.update([image_id]) + + def add_single_detected_image_info(self, image_id, detections_dict): + """Adds detections for a single image to be used for evaluation. + + Args: + image_id: A unique string/integer identifier for the image. + detections_dict: A dictionary containing - + standard_fields.DetectionResultFields.detection_boxes: float32 numpy + array of shape [num_boxes, 4] containing `num_boxes` detection boxes + of the format [ymin, xmin, ymax, xmax] in absolute image coordinates. + standard_fields.DetectionResultFields.detection_scores: float32 numpy + array of shape [num_boxes] containing detection scores for the boxes. + standard_fields.DetectionResultFields.detection_classes: integer numpy + array of shape [num_boxes] containing 1-indexed detection classes for + the boxes. + standard_fields.DetectionResultFields.detection_masks: uint8 numpy + array of shape [num_boxes, height, width] containing `num_boxes` masks + of values ranging between 0 and 1. + + Raises: + ValueError: If detection masks are not in detections dictionary. + """ + detection_classes = ( + detections_dict[ + standard_fields.DetectionResultFields.detection_classes + ] + - self._label_id_offset + ) + detection_masks = None + if self._evaluate_masks: + if ( + standard_fields.DetectionResultFields.detection_masks + not in detections_dict + ): + raise ValueError( + "Detection masks not in detections dictionary." + ) + detection_masks = detections_dict[ + standard_fields.DetectionResultFields.detection_masks + ] + self._evaluation.add_single_detected_image_info( + image_key=image_id, + detected_boxes=detections_dict[ + standard_fields.DetectionResultFields.detection_boxes + ], + detected_scores=detections_dict[ + standard_fields.DetectionResultFields.detection_scores + ], + detected_class_labels=detection_classes, + detected_masks=detection_masks, + ) + + def evaluate(self): + """Compute evaluation result. + + Returns: + A dictionary of metrics with the following fields - + + 1. summary_metrics: + 'Precision/mAP@IOU': mean average precision at + the specified IOU threshold. + + 2. per_category_ap: category specific results with keys of the form + 'PerformanceByCategory/mAP@IOU/category'. + """ + ( + per_class_ap, + mean_ap, + _, + _, + per_class_corloc, + mean_corloc, + ) = self._evaluation.evaluate() + pascal_metrics = { + self._metric_prefix + + "Precision/mAP@{}IOU".format( + self._matching_iou_threshold + ): mean_ap + } + if self._evaluate_corlocs: + pascal_metrics[ + self._metric_prefix + + "Precision/meanCorLoc@{}IOU".format( + self._matching_iou_threshold + ) + ] = mean_corloc + category_index = label_map_util.create_category_index(self._categories) + for idx in range(per_class_ap.size): + if idx + self._label_id_offset in category_index: + display_name = ( + self._metric_prefix + + "PerformanceByCategory/AP@{}IOU/{}".format( + self._matching_iou_threshold, + category_index[idx + self._label_id_offset]["name"], + ) + ) + pascal_metrics[display_name] = per_class_ap[idx] + + # Optionally add CorLoc metrics.classes + if self._evaluate_corlocs: + display_name = ( + self._metric_prefix + + "PerformanceByCategory/CorLoc@{}IOU/{}".format( + self._matching_iou_threshold, + category_index[idx + self._label_id_offset]["name"], + ) + ) + pascal_metrics[display_name] = per_class_corloc[idx] + + return pascal_metrics + + def clear(self): + """Clears the state to prepare for a fresh evaluation.""" + self._evaluation = ObjectDetectionEvaluation( + num_groundtruth_classes=self._num_classes, + matching_iou_threshold=self._matching_iou_threshold, + use_weighted_mean_ap=self._use_weighted_mean_ap, + label_id_offset=self._label_id_offset, + ) + self._image_ids.clear() + + +class PascalDetectionEvaluator(ObjectDetectionEvaluator): + """A class to evaluate detections using PASCAL metrics.""" + + def __init__(self, categories, matching_iou_threshold=0.5): + super(PascalDetectionEvaluator, self).__init__( + categories, + matching_iou_threshold=matching_iou_threshold, + evaluate_corlocs=False, + metric_prefix="PascalBoxes", + use_weighted_mean_ap=False, + ) + + +class WeightedPascalDetectionEvaluator(ObjectDetectionEvaluator): + """A class to evaluate detections using weighted PASCAL metrics. + + Weighted PASCAL metrics computes the mean average precision as the average + precision given the scores and tp_fp_labels of all classes. In comparison, + PASCAL metrics computes the mean average precision as the mean of the + per-class average precisions. + + This definition is very similar to the mean of the per-class average + precisions weighted by class frequency. However, they are typically not the + same as the average precision is not a linear function of the scores and + tp_fp_labels. + """ + + def __init__(self, categories, matching_iou_threshold=0.5): + super(WeightedPascalDetectionEvaluator, self).__init__( + categories, + matching_iou_threshold=matching_iou_threshold, + evaluate_corlocs=False, + metric_prefix="WeightedPascalBoxes", + use_weighted_mean_ap=True, + ) + + +class PascalInstanceSegmentationEvaluator(ObjectDetectionEvaluator): + """A class to evaluate instance masks using PASCAL metrics.""" + + def __init__(self, categories, matching_iou_threshold=0.5): + super(PascalInstanceSegmentationEvaluator, self).__init__( + categories, + matching_iou_threshold=matching_iou_threshold, + evaluate_corlocs=False, + metric_prefix="PascalMasks", + use_weighted_mean_ap=False, + evaluate_masks=True, + ) + + +class WeightedPascalInstanceSegmentationEvaluator(ObjectDetectionEvaluator): + """A class to evaluate instance masks using weighted PASCAL metrics. + + Weighted PASCAL metrics computes the mean average precision as the average + precision given the scores and tp_fp_labels of all classes. In comparison, + PASCAL metrics computes the mean average precision as the mean of the + per-class average precisions. + + This definition is very similar to the mean of the per-class average + precisions weighted by class frequency. However, they are typically not the + same as the average precision is not a linear function of the scores and + tp_fp_labels. + """ + + def __init__(self, categories, matching_iou_threshold=0.5): + super(WeightedPascalInstanceSegmentationEvaluator, self).__init__( + categories, + matching_iou_threshold=matching_iou_threshold, + evaluate_corlocs=False, + metric_prefix="WeightedPascalMasks", + use_weighted_mean_ap=True, + evaluate_masks=True, + ) + + +class OpenImagesDetectionEvaluator(ObjectDetectionEvaluator): + """A class to evaluate detections using Open Images V2 metrics. + + Open Images V2 introduce group_of type of bounding boxes and this metric + handles those boxes appropriately. + """ + + def __init__( + self, categories, matching_iou_threshold=0.5, evaluate_corlocs=False + ): + """Constructor. + + Args: + categories: A list of dicts, each of which has the following keys - + 'id': (required) an integer id uniquely identifying this category. + 'name': (required) string representing category name e.g., 'cat', 'dog'. + matching_iou_threshold: IOU threshold to use for matching groundtruth + boxes to detection boxes. + evaluate_corlocs: if True, additionally evaluates and returns CorLoc. + """ + super(OpenImagesDetectionEvaluator, self).__init__( + categories, + matching_iou_threshold, + evaluate_corlocs, + metric_prefix="OpenImagesV2", + ) + + def add_single_ground_truth_image_info(self, image_id, groundtruth_dict): + """Adds groundtruth for a single image to be used for evaluation. + + Args: + image_id: A unique string/integer identifier for the image. + groundtruth_dict: A dictionary containing - + standard_fields.InputDataFields.groundtruth_boxes: float32 numpy array + of shape [num_boxes, 4] containing `num_boxes` groundtruth boxes of + the format [ymin, xmin, ymax, xmax] in absolute image coordinates. + standard_fields.InputDataFields.groundtruth_classes: integer numpy array + of shape [num_boxes] containing 1-indexed groundtruth classes for the + boxes. + standard_fields.InputDataFields.groundtruth_group_of: Optional length + M numpy boolean array denoting whether a groundtruth box contains a + group of instances. + + Raises: + ValueError: On adding groundtruth for an image more than once. + """ + if image_id in self._image_ids: + raise ValueError("Image with id {} already added.".format(image_id)) + + groundtruth_classes = ( + groundtruth_dict[ + standard_fields.InputDataFields.groundtruth_classes + ] + - self._label_id_offset + ) + # If the key is not present in the groundtruth_dict or the array is empty + # (unless there are no annotations for the groundtruth on this image) + # use values from the dictionary or insert None otherwise. + if standard_fields.InputDataFields.groundtruth_group_of in groundtruth_dict.keys() and ( + groundtruth_dict[ + standard_fields.InputDataFields.groundtruth_group_of + ].size + or not groundtruth_classes.size + ): + groundtruth_group_of = groundtruth_dict[ + standard_fields.InputDataFields.groundtruth_group_of + ] + else: + groundtruth_group_of = None + if not len(self._image_ids) % 1000: + logging.warn( + "image %s does not have groundtruth group_of flag specified", + image_id, + ) + self._evaluation.add_single_ground_truth_image_info( + image_id, + groundtruth_dict[standard_fields.InputDataFields.groundtruth_boxes], + groundtruth_classes, + groundtruth_is_difficult_list=None, + groundtruth_is_group_of_list=groundtruth_group_of, + ) + self._image_ids.update([image_id]) + + +ObjectDetectionEvalMetrics = collections.namedtuple( + "ObjectDetectionEvalMetrics", + [ + "average_precisions", + "mean_ap", + "precisions", + "recalls", + "corlocs", + "mean_corloc", + ], +) + + +class ObjectDetectionEvaluation(object): + """Internal implementation of Pascal object detection metrics.""" + + def __init__( + self, + num_groundtruth_classes, + matching_iou_threshold=0.5, + nms_iou_threshold=1.0, + nms_max_output_boxes=10000, + use_weighted_mean_ap=False, + label_id_offset=0, + ): + if num_groundtruth_classes < 1: + raise ValueError( + "Need at least 1 groundtruth class for evaluation." + ) + + self.per_image_eval = per_image_evaluation.PerImageEvaluation( + num_groundtruth_classes=num_groundtruth_classes, + matching_iou_threshold=matching_iou_threshold, + ) + self.num_class = num_groundtruth_classes + self.use_weighted_mean_ap = use_weighted_mean_ap + self.label_id_offset = label_id_offset + + self.groundtruth_boxes = {} + self.groundtruth_class_labels = {} + self.groundtruth_masks = {} + self.groundtruth_is_difficult_list = {} + self.groundtruth_is_group_of_list = {} + self.num_gt_instances_per_class = np.zeros(self.num_class, dtype=int) + self.num_gt_imgs_per_class = np.zeros(self.num_class, dtype=int) + + self._initialize_detections() + + def _initialize_detections(self): + self.detection_keys = set() + self.scores_per_class = [[] for _ in range(self.num_class)] + self.tp_fp_labels_per_class = [[] for _ in range(self.num_class)] + self.num_images_correctly_detected_per_class = np.zeros(self.num_class) + self.average_precision_per_class = np.empty(self.num_class, dtype=float) + self.average_precision_per_class.fill(np.nan) + self.precisions_per_class = [] + self.recalls_per_class = [] + self.corloc_per_class = np.ones(self.num_class, dtype=float) + + def clear_detections(self): + self._initialize_detections() + + def add_single_ground_truth_image_info( + self, + image_key, + groundtruth_boxes, + groundtruth_class_labels, + groundtruth_is_difficult_list=None, + groundtruth_is_group_of_list=None, + groundtruth_masks=None, + ): + """Adds groundtruth for a single image to be used for evaluation. + + Args: + image_key: A unique string/integer identifier for the image. + groundtruth_boxes: float32 numpy array of shape [num_boxes, 4] + containing `num_boxes` groundtruth boxes of the format + [ymin, xmin, ymax, xmax] in absolute image coordinates. + groundtruth_class_labels: integer numpy array of shape [num_boxes] + containing 0-indexed groundtruth classes for the boxes. + groundtruth_is_difficult_list: A length M numpy boolean array denoting + whether a ground truth box is a difficult instance or not. To support + the case that no boxes are difficult, it is by default set as None. + groundtruth_is_group_of_list: A length M numpy boolean array denoting + whether a ground truth box is a group-of box or not. To support + the case that no boxes are groups-of, it is by default set as None. + groundtruth_masks: uint8 numpy array of shape + [num_boxes, height, width] containing `num_boxes` groundtruth masks. + The mask values range from 0 to 1. + """ + if image_key in self.groundtruth_boxes: + logging.warn( + "image %s has already been added to the ground truth database.", + image_key, + ) + return + + self.groundtruth_boxes[image_key] = groundtruth_boxes + self.groundtruth_class_labels[image_key] = groundtruth_class_labels + self.groundtruth_masks[image_key] = groundtruth_masks + if groundtruth_is_difficult_list is None: + num_boxes = groundtruth_boxes.shape[0] + groundtruth_is_difficult_list = np.zeros(num_boxes, dtype=bool) + self.groundtruth_is_difficult_list[ + image_key + ] = groundtruth_is_difficult_list.astype(dtype=bool) + if groundtruth_is_group_of_list is None: + num_boxes = groundtruth_boxes.shape[0] + groundtruth_is_group_of_list = np.zeros(num_boxes, dtype=bool) + self.groundtruth_is_group_of_list[ + image_key + ] = groundtruth_is_group_of_list.astype(dtype=bool) + + self._update_ground_truth_statistics( + groundtruth_class_labels, + groundtruth_is_difficult_list.astype(dtype=bool), + groundtruth_is_group_of_list.astype(dtype=bool), + ) + + def add_single_detected_image_info( + self, + image_key, + detected_boxes, + detected_scores, + detected_class_labels, + detected_masks=None, + ): + """Adds detections for a single image to be used for evaluation. + + Args: + image_key: A unique string/integer identifier for the image. + detected_boxes: float32 numpy array of shape [num_boxes, 4] + containing `num_boxes` detection boxes of the format + [ymin, xmin, ymax, xmax] in absolute image coordinates. + detected_scores: float32 numpy array of shape [num_boxes] containing + detection scores for the boxes. + detected_class_labels: integer numpy array of shape [num_boxes] containing + 0-indexed detection classes for the boxes. + detected_masks: np.uint8 numpy array of shape [num_boxes, height, width] + containing `num_boxes` detection masks with values ranging + between 0 and 1. + + Raises: + ValueError: if the number of boxes, scores and class labels differ in + length. + """ + if len(detected_boxes) != len(detected_scores) or len( + detected_boxes + ) != len(detected_class_labels): + raise ValueError( + "detected_boxes, detected_scores and " + "detected_class_labels should all have same lengths. Got" + "[%d, %d, %d]" % len(detected_boxes), + len(detected_scores), + len(detected_class_labels), + ) + + if image_key in self.detection_keys: + logging.warn( + "image %s has already been added to the detection result database", + image_key, + ) + return + + self.detection_keys.add(image_key) + if image_key in self.groundtruth_boxes: + groundtruth_boxes = self.groundtruth_boxes[image_key] + groundtruth_class_labels = self.groundtruth_class_labels[image_key] + # Masks are popped instead of look up. The reason is that we do not want + # to keep all masks in memory which can cause memory overflow. + groundtruth_masks = self.groundtruth_masks.pop(image_key) + groundtruth_is_difficult_list = self.groundtruth_is_difficult_list[ + image_key + ] + groundtruth_is_group_of_list = self.groundtruth_is_group_of_list[ + image_key + ] + else: + groundtruth_boxes = np.empty(shape=[0, 4], dtype=float) + groundtruth_class_labels = np.array([], dtype=int) + if detected_masks is None: + groundtruth_masks = None + else: + groundtruth_masks = np.empty(shape=[0, 1, 1], dtype=float) + groundtruth_is_difficult_list = np.array([], dtype=bool) + groundtruth_is_group_of_list = np.array([], dtype=bool) + ( + scores, + tp_fp_labels, + ) = self.per_image_eval.compute_object_detection_metrics( + detected_boxes=detected_boxes, + detected_scores=detected_scores, + detected_class_labels=detected_class_labels, + groundtruth_boxes=groundtruth_boxes, + groundtruth_class_labels=groundtruth_class_labels, + groundtruth_is_difficult_list=groundtruth_is_difficult_list, + groundtruth_is_group_of_list=groundtruth_is_group_of_list, + detected_masks=detected_masks, + groundtruth_masks=groundtruth_masks, + ) + + for i in range(self.num_class): + if scores[i].shape[0] > 0: + self.scores_per_class[i].append(scores[i]) + self.tp_fp_labels_per_class[i].append(tp_fp_labels[i]) + + def _update_ground_truth_statistics( + self, + groundtruth_class_labels, + groundtruth_is_difficult_list, + groundtruth_is_group_of_list, + ): + """Update grouth truth statitistics. + + 1. Difficult boxes are ignored when counting the number of ground truth + instances as done in Pascal VOC devkit. + 2. Difficult boxes are treated as normal boxes when computing CorLoc related + statitistics. + + Args: + groundtruth_class_labels: An integer numpy array of length M, + representing M class labels of object instances in ground truth + groundtruth_is_difficult_list: A boolean numpy array of length M denoting + whether a ground truth box is a difficult instance or not + groundtruth_is_group_of_list: A boolean numpy array of length M denoting + whether a ground truth box is a group-of box or not + """ + for class_index in range(self.num_class): + num_gt_instances = np.sum( + groundtruth_class_labels[ + ~groundtruth_is_difficult_list + & ~groundtruth_is_group_of_list + ] + == class_index + ) + self.num_gt_instances_per_class[class_index] += num_gt_instances + if np.any(groundtruth_class_labels == class_index): + self.num_gt_imgs_per_class[class_index] += 1 + + def evaluate(self): + """Compute evaluation result. + + Returns: + A named tuple with the following fields - + average_precision: float numpy array of average precision for + each class. + mean_ap: mean average precision of all classes, float scalar + precisions: List of precisions, each precision is a float numpy + array + recalls: List of recalls, each recall is a float numpy array + corloc: numpy float array + mean_corloc: Mean CorLoc score for each class, float scalar + """ + if (self.num_gt_instances_per_class == 0).any(): + logging.info( + "The following classes have no ground truth examples: %s", + np.squeeze(np.argwhere(self.num_gt_instances_per_class == 0)) + + self.label_id_offset, + ) + + if self.use_weighted_mean_ap: + all_scores = np.array([], dtype=float) + all_tp_fp_labels = np.array([], dtype=bool) + + for class_index in range(self.num_class): + if self.num_gt_instances_per_class[class_index] == 0: + continue + if not self.scores_per_class[class_index]: + scores = np.array([], dtype=float) + tp_fp_labels = np.array([], dtype=bool) + else: + scores = np.concatenate(self.scores_per_class[class_index]) + tp_fp_labels = np.concatenate( + self.tp_fp_labels_per_class[class_index] + ) + if self.use_weighted_mean_ap: + all_scores = np.append(all_scores, scores) + all_tp_fp_labels = np.append(all_tp_fp_labels, tp_fp_labels) + precision, recall = metrics.compute_precision_recall( + scores, + tp_fp_labels, + self.num_gt_instances_per_class[class_index], + ) + self.precisions_per_class.append(precision) + self.recalls_per_class.append(recall) + average_precision = metrics.compute_average_precision( + precision, recall + ) + self.average_precision_per_class[class_index] = average_precision + + self.corloc_per_class = metrics.compute_cor_loc( + self.num_gt_imgs_per_class, + self.num_images_correctly_detected_per_class, + ) + + if self.use_weighted_mean_ap: + num_gt_instances = np.sum(self.num_gt_instances_per_class) + precision, recall = metrics.compute_precision_recall( + all_scores, all_tp_fp_labels, num_gt_instances + ) + mean_ap = metrics.compute_average_precision(precision, recall) + else: + mean_ap = np.nanmean(self.average_precision_per_class) + mean_corloc = np.nanmean(self.corloc_per_class) + return ObjectDetectionEvalMetrics( + self.average_precision_per_class, + mean_ap, + self.precisions_per_class, + self.recalls_per_class, + self.corloc_per_class, + mean_corloc, + ) diff --git a/TimeSformer/timesformer/utils/ava_evaluation/per_image_evaluation.py b/TimeSformer/timesformer/utils/ava_evaluation/per_image_evaluation.py new file mode 100755 index 0000000000000000000000000000000000000000..ea0f4fe73349502152645e01dd2aac69dc8ab64d --- /dev/null +++ b/TimeSformer/timesformer/utils/ava_evaluation/per_image_evaluation.py @@ -0,0 +1,453 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Evaluate Object Detection result on a single image. + +Annotate each detected result as true positives or false positive according to +a predefined IOU ratio. Non Maximum Supression is used by default. Multi class +detection is supported by default. +Based on the settings, per image evaluation is either performed on boxes or +on object masks. +""" +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) +import numpy as np + +from . import ( + np_box_list, + np_box_list_ops, + np_box_mask_list, + np_box_mask_list_ops, +) + + +class PerImageEvaluation(object): + """Evaluate detection result of a single image.""" + + def __init__(self, num_groundtruth_classes, matching_iou_threshold=0.5): + """Initialized PerImageEvaluation by evaluation parameters. + + Args: + num_groundtruth_classes: Number of ground truth object classes + matching_iou_threshold: A ratio of area intersection to union, which is + the threshold to consider whether a detection is true positive or not + """ + self.matching_iou_threshold = matching_iou_threshold + self.num_groundtruth_classes = num_groundtruth_classes + + def compute_object_detection_metrics( + self, + detected_boxes, + detected_scores, + detected_class_labels, + groundtruth_boxes, + groundtruth_class_labels, + groundtruth_is_difficult_list, + groundtruth_is_group_of_list, + detected_masks=None, + groundtruth_masks=None, + ): + """Evaluates detections as being tp, fp or ignored from a single image. + + The evaluation is done in two stages: + 1. All detections are matched to non group-of boxes; true positives are + determined and detections matched to difficult boxes are ignored. + 2. Detections that are determined as false positives are matched against + group-of boxes and ignored if matched. + + Args: + detected_boxes: A float numpy array of shape [N, 4], representing N + regions of detected object regions. + Each row is of the format [y_min, x_min, y_max, x_max] + detected_scores: A float numpy array of shape [N, 1], representing + the confidence scores of the detected N object instances. + detected_class_labels: A integer numpy array of shape [N, 1], repreneting + the class labels of the detected N object instances. + groundtruth_boxes: A float numpy array of shape [M, 4], representing M + regions of object instances in ground truth + groundtruth_class_labels: An integer numpy array of shape [M, 1], + representing M class labels of object instances in ground truth + groundtruth_is_difficult_list: A boolean numpy array of length M denoting + whether a ground truth box is a difficult instance or not + groundtruth_is_group_of_list: A boolean numpy array of length M denoting + whether a ground truth box has group-of tag + detected_masks: (optional) A uint8 numpy array of shape + [N, height, width]. If not None, the metrics will be computed based + on masks. + groundtruth_masks: (optional) A uint8 numpy array of shape + [M, height, width]. + + Returns: + scores: A list of C float numpy arrays. Each numpy array is of + shape [K, 1], representing K scores detected with object class + label c + tp_fp_labels: A list of C boolean numpy arrays. Each numpy array + is of shape [K, 1], representing K True/False positive label of + object instances detected with class label c + """ + ( + detected_boxes, + detected_scores, + detected_class_labels, + detected_masks, + ) = self._remove_invalid_boxes( + detected_boxes, + detected_scores, + detected_class_labels, + detected_masks, + ) + scores, tp_fp_labels = self._compute_tp_fp( + detected_boxes=detected_boxes, + detected_scores=detected_scores, + detected_class_labels=detected_class_labels, + groundtruth_boxes=groundtruth_boxes, + groundtruth_class_labels=groundtruth_class_labels, + groundtruth_is_difficult_list=groundtruth_is_difficult_list, + groundtruth_is_group_of_list=groundtruth_is_group_of_list, + detected_masks=detected_masks, + groundtruth_masks=groundtruth_masks, + ) + + return scores, tp_fp_labels + + def _compute_tp_fp( + self, + detected_boxes, + detected_scores, + detected_class_labels, + groundtruth_boxes, + groundtruth_class_labels, + groundtruth_is_difficult_list, + groundtruth_is_group_of_list, + detected_masks=None, + groundtruth_masks=None, + ): + """Labels true/false positives of detections of an image across all classes. + + Args: + detected_boxes: A float numpy array of shape [N, 4], representing N + regions of detected object regions. + Each row is of the format [y_min, x_min, y_max, x_max] + detected_scores: A float numpy array of shape [N, 1], representing + the confidence scores of the detected N object instances. + detected_class_labels: A integer numpy array of shape [N, 1], repreneting + the class labels of the detected N object instances. + groundtruth_boxes: A float numpy array of shape [M, 4], representing M + regions of object instances in ground truth + groundtruth_class_labels: An integer numpy array of shape [M, 1], + representing M class labels of object instances in ground truth + groundtruth_is_difficult_list: A boolean numpy array of length M denoting + whether a ground truth box is a difficult instance or not + groundtruth_is_group_of_list: A boolean numpy array of length M denoting + whether a ground truth box has group-of tag + detected_masks: (optional) A np.uint8 numpy array of shape + [N, height, width]. If not None, the scores will be computed based + on masks. + groundtruth_masks: (optional) A np.uint8 numpy array of shape + [M, height, width]. + + Returns: + result_scores: A list of float numpy arrays. Each numpy array is of + shape [K, 1], representing K scores detected with object class + label c + result_tp_fp_labels: A list of boolean numpy array. Each numpy array is of + shape [K, 1], representing K True/False positive label of object + instances detected with class label c + + Raises: + ValueError: If detected masks is not None but groundtruth masks are None, + or the other way around. + """ + if detected_masks is not None and groundtruth_masks is None: + raise ValueError( + "Detected masks is available but groundtruth masks is not." + ) + if detected_masks is None and groundtruth_masks is not None: + raise ValueError( + "Groundtruth masks is available but detected masks is not." + ) + + result_scores = [] + result_tp_fp_labels = [] + for i in range(self.num_groundtruth_classes): + groundtruth_is_difficult_list_at_ith_class = groundtruth_is_difficult_list[ + groundtruth_class_labels == i + ] + groundtruth_is_group_of_list_at_ith_class = groundtruth_is_group_of_list[ + groundtruth_class_labels == i + ] + ( + gt_boxes_at_ith_class, + gt_masks_at_ith_class, + detected_boxes_at_ith_class, + detected_scores_at_ith_class, + detected_masks_at_ith_class, + ) = self._get_ith_class_arrays( + detected_boxes, + detected_scores, + detected_masks, + detected_class_labels, + groundtruth_boxes, + groundtruth_masks, + groundtruth_class_labels, + i, + ) + scores, tp_fp_labels = self._compute_tp_fp_for_single_class( + detected_boxes=detected_boxes_at_ith_class, + detected_scores=detected_scores_at_ith_class, + groundtruth_boxes=gt_boxes_at_ith_class, + groundtruth_is_difficult_list=groundtruth_is_difficult_list_at_ith_class, + groundtruth_is_group_of_list=groundtruth_is_group_of_list_at_ith_class, + detected_masks=detected_masks_at_ith_class, + groundtruth_masks=gt_masks_at_ith_class, + ) + result_scores.append(scores) + result_tp_fp_labels.append(tp_fp_labels) + return result_scores, result_tp_fp_labels + + def _get_overlaps_and_scores_box_mode( + self, + detected_boxes, + detected_scores, + groundtruth_boxes, + groundtruth_is_group_of_list, + ): + """Computes overlaps and scores between detected and groudntruth boxes. + + Args: + detected_boxes: A numpy array of shape [N, 4] representing detected box + coordinates + detected_scores: A 1-d numpy array of length N representing classification + score + groundtruth_boxes: A numpy array of shape [M, 4] representing ground truth + box coordinates + groundtruth_is_group_of_list: A boolean numpy array of length M denoting + whether a ground truth box has group-of tag. If a groundtruth box + is group-of box, every detection matching this box is ignored. + + Returns: + iou: A float numpy array of size [num_detected_boxes, num_gt_boxes]. If + gt_non_group_of_boxlist.num_boxes() == 0 it will be None. + ioa: A float numpy array of size [num_detected_boxes, num_gt_boxes]. If + gt_group_of_boxlist.num_boxes() == 0 it will be None. + scores: The score of the detected boxlist. + num_boxes: Number of non-maximum suppressed detected boxes. + """ + detected_boxlist = np_box_list.BoxList(detected_boxes) + detected_boxlist.add_field("scores", detected_scores) + gt_non_group_of_boxlist = np_box_list.BoxList( + groundtruth_boxes[~groundtruth_is_group_of_list] + ) + iou = np_box_list_ops.iou(detected_boxlist, gt_non_group_of_boxlist) + scores = detected_boxlist.get_field("scores") + num_boxes = detected_boxlist.num_boxes() + return iou, None, scores, num_boxes + + def _compute_tp_fp_for_single_class( + self, + detected_boxes, + detected_scores, + groundtruth_boxes, + groundtruth_is_difficult_list, + groundtruth_is_group_of_list, + detected_masks=None, + groundtruth_masks=None, + ): + """Labels boxes detected with the same class from the same image as tp/fp. + + Args: + detected_boxes: A numpy array of shape [N, 4] representing detected box + coordinates + detected_scores: A 1-d numpy array of length N representing classification + score + groundtruth_boxes: A numpy array of shape [M, 4] representing ground truth + box coordinates + groundtruth_is_difficult_list: A boolean numpy array of length M denoting + whether a ground truth box is a difficult instance or not. If a + groundtruth box is difficult, every detection matching this box + is ignored. + groundtruth_is_group_of_list: A boolean numpy array of length M denoting + whether a ground truth box has group-of tag. If a groundtruth box + is group-of box, every detection matching this box is ignored. + detected_masks: (optional) A uint8 numpy array of shape + [N, height, width]. If not None, the scores will be computed based + on masks. + groundtruth_masks: (optional) A uint8 numpy array of shape + [M, height, width]. + + Returns: + Two arrays of the same size, containing all boxes that were evaluated as + being true positives or false positives; if a box matched to a difficult + box or to a group-of box, it is ignored. + + scores: A numpy array representing the detection scores. + tp_fp_labels: a boolean numpy array indicating whether a detection is a + true positive. + """ + if detected_boxes.size == 0: + return np.array([], dtype=float), np.array([], dtype=bool) + + ( + iou, + _, + scores, + num_detected_boxes, + ) = self._get_overlaps_and_scores_box_mode( + detected_boxes=detected_boxes, + detected_scores=detected_scores, + groundtruth_boxes=groundtruth_boxes, + groundtruth_is_group_of_list=groundtruth_is_group_of_list, + ) + + if groundtruth_boxes.size == 0: + return scores, np.zeros(num_detected_boxes, dtype=bool) + + tp_fp_labels = np.zeros(num_detected_boxes, dtype=bool) + is_matched_to_difficult_box = np.zeros(num_detected_boxes, dtype=bool) + is_matched_to_group_of_box = np.zeros(num_detected_boxes, dtype=bool) + + # The evaluation is done in two stages: + # 1. All detections are matched to non group-of boxes; true positives are + # determined and detections matched to difficult boxes are ignored. + # 2. Detections that are determined as false positives are matched against + # group-of boxes and ignored if matched. + + # Tp-fp evaluation for non-group of boxes (if any). + if iou.shape[1] > 0: + groundtruth_nongroup_of_is_difficult_list = groundtruth_is_difficult_list[ + ~groundtruth_is_group_of_list + ] + max_overlap_gt_ids = np.argmax(iou, axis=1) + is_gt_box_detected = np.zeros(iou.shape[1], dtype=bool) + for i in range(num_detected_boxes): + gt_id = max_overlap_gt_ids[i] + if iou[i, gt_id] >= self.matching_iou_threshold: + if not groundtruth_nongroup_of_is_difficult_list[gt_id]: + if not is_gt_box_detected[gt_id]: + tp_fp_labels[i] = True + is_gt_box_detected[gt_id] = True + else: + is_matched_to_difficult_box[i] = True + + return ( + scores[~is_matched_to_difficult_box & ~is_matched_to_group_of_box], + tp_fp_labels[ + ~is_matched_to_difficult_box & ~is_matched_to_group_of_box + ], + ) + + def _get_ith_class_arrays( + self, + detected_boxes, + detected_scores, + detected_masks, + detected_class_labels, + groundtruth_boxes, + groundtruth_masks, + groundtruth_class_labels, + class_index, + ): + """Returns numpy arrays belonging to class with index `class_index`. + + Args: + detected_boxes: A numpy array containing detected boxes. + detected_scores: A numpy array containing detected scores. + detected_masks: A numpy array containing detected masks. + detected_class_labels: A numpy array containing detected class labels. + groundtruth_boxes: A numpy array containing groundtruth boxes. + groundtruth_masks: A numpy array containing groundtruth masks. + groundtruth_class_labels: A numpy array containing groundtruth class + labels. + class_index: An integer index. + + Returns: + gt_boxes_at_ith_class: A numpy array containing groundtruth boxes labeled + as ith class. + gt_masks_at_ith_class: A numpy array containing groundtruth masks labeled + as ith class. + detected_boxes_at_ith_class: A numpy array containing detected boxes + corresponding to the ith class. + detected_scores_at_ith_class: A numpy array containing detected scores + corresponding to the ith class. + detected_masks_at_ith_class: A numpy array containing detected masks + corresponding to the ith class. + """ + selected_groundtruth = groundtruth_class_labels == class_index + gt_boxes_at_ith_class = groundtruth_boxes[selected_groundtruth] + if groundtruth_masks is not None: + gt_masks_at_ith_class = groundtruth_masks[selected_groundtruth] + else: + gt_masks_at_ith_class = None + selected_detections = detected_class_labels == class_index + detected_boxes_at_ith_class = detected_boxes[selected_detections] + detected_scores_at_ith_class = detected_scores[selected_detections] + if detected_masks is not None: + detected_masks_at_ith_class = detected_masks[selected_detections] + else: + detected_masks_at_ith_class = None + return ( + gt_boxes_at_ith_class, + gt_masks_at_ith_class, + detected_boxes_at_ith_class, + detected_scores_at_ith_class, + detected_masks_at_ith_class, + ) + + def _remove_invalid_boxes( + self, + detected_boxes, + detected_scores, + detected_class_labels, + detected_masks=None, + ): + """Removes entries with invalid boxes. + + A box is invalid if either its xmax is smaller than its xmin, or its ymax + is smaller than its ymin. + + Args: + detected_boxes: A float numpy array of size [num_boxes, 4] containing box + coordinates in [ymin, xmin, ymax, xmax] format. + detected_scores: A float numpy array of size [num_boxes]. + detected_class_labels: A int32 numpy array of size [num_boxes]. + detected_masks: A uint8 numpy array of size [num_boxes, height, width]. + + Returns: + valid_detected_boxes: A float numpy array of size [num_valid_boxes, 4] + containing box coordinates in [ymin, xmin, ymax, xmax] format. + valid_detected_scores: A float numpy array of size [num_valid_boxes]. + valid_detected_class_labels: A int32 numpy array of size + [num_valid_boxes]. + valid_detected_masks: A uint8 numpy array of size + [num_valid_boxes, height, width]. + """ + valid_indices = np.logical_and( + detected_boxes[:, 0] < detected_boxes[:, 2], + detected_boxes[:, 1] < detected_boxes[:, 3], + ) + detected_boxes = detected_boxes[valid_indices] + detected_scores = detected_scores[valid_indices] + detected_class_labels = detected_class_labels[valid_indices] + if detected_masks is not None: + detected_masks = detected_masks[valid_indices] + return [ + detected_boxes, + detected_scores, + detected_class_labels, + detected_masks, + ] diff --git a/TimeSformer/timesformer/utils/ava_evaluation/standard_fields.py b/TimeSformer/timesformer/utils/ava_evaluation/standard_fields.py new file mode 100755 index 0000000000000000000000000000000000000000..ea30a05c88b609fd6295940444865a7c3897d6b7 --- /dev/null +++ b/TimeSformer/timesformer/utils/ava_evaluation/standard_fields.py @@ -0,0 +1,225 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Contains classes specifying naming conventions used for object detection. + + +Specifies: + InputDataFields: standard fields used by reader/preprocessor/batcher. + DetectionResultFields: standard fields returned by object detector. + BoxListFields: standard field used by BoxList + TfExampleFields: standard fields for tf-example data format (go/tf-example). +""" + + +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + + +class InputDataFields(object): + """Names for the input tensors. + + Holds the standard data field names to use for identifying input tensors. This + should be used by the decoder to identify keys for the returned tensor_dict + containing input tensors. And it should be used by the model to identify the + tensors it needs. + + Attributes: + image: image. + original_image: image in the original input size. + key: unique key corresponding to image. + source_id: source of the original image. + filename: original filename of the dataset (without common path). + groundtruth_image_classes: image-level class labels. + groundtruth_boxes: coordinates of the ground truth boxes in the image. + groundtruth_classes: box-level class labels. + groundtruth_label_types: box-level label types (e.g. explicit negative). + groundtruth_is_crowd: [DEPRECATED, use groundtruth_group_of instead] + is the groundtruth a single object or a crowd. + groundtruth_area: area of a groundtruth segment. + groundtruth_difficult: is a `difficult` object + groundtruth_group_of: is a `group_of` objects, e.g. multiple objects of the + same class, forming a connected group, where instances are heavily + occluding each other. + proposal_boxes: coordinates of object proposal boxes. + proposal_objectness: objectness score of each proposal. + groundtruth_instance_masks: ground truth instance masks. + groundtruth_instance_boundaries: ground truth instance boundaries. + groundtruth_instance_classes: instance mask-level class labels. + groundtruth_keypoints: ground truth keypoints. + groundtruth_keypoint_visibilities: ground truth keypoint visibilities. + groundtruth_label_scores: groundtruth label scores. + groundtruth_weights: groundtruth weight factor for bounding boxes. + num_groundtruth_boxes: number of groundtruth boxes. + true_image_shapes: true shapes of images in the resized images, as resized + images can be padded with zeros. + """ + + image = "image" + original_image = "original_image" + key = "key" + source_id = "source_id" + filename = "filename" + groundtruth_image_classes = "groundtruth_image_classes" + groundtruth_boxes = "groundtruth_boxes" + groundtruth_classes = "groundtruth_classes" + groundtruth_label_types = "groundtruth_label_types" + groundtruth_is_crowd = "groundtruth_is_crowd" + groundtruth_area = "groundtruth_area" + groundtruth_difficult = "groundtruth_difficult" + groundtruth_group_of = "groundtruth_group_of" + proposal_boxes = "proposal_boxes" + proposal_objectness = "proposal_objectness" + groundtruth_instance_masks = "groundtruth_instance_masks" + groundtruth_instance_boundaries = "groundtruth_instance_boundaries" + groundtruth_instance_classes = "groundtruth_instance_classes" + groundtruth_keypoints = "groundtruth_keypoints" + groundtruth_keypoint_visibilities = "groundtruth_keypoint_visibilities" + groundtruth_label_scores = "groundtruth_label_scores" + groundtruth_weights = "groundtruth_weights" + num_groundtruth_boxes = "num_groundtruth_boxes" + true_image_shape = "true_image_shape" + + +class DetectionResultFields(object): + """Naming conventions for storing the output of the detector. + + Attributes: + source_id: source of the original image. + key: unique key corresponding to image. + detection_boxes: coordinates of the detection boxes in the image. + detection_scores: detection scores for the detection boxes in the image. + detection_classes: detection-level class labels. + detection_masks: contains a segmentation mask for each detection box. + detection_boundaries: contains an object boundary for each detection box. + detection_keypoints: contains detection keypoints for each detection box. + num_detections: number of detections in the batch. + """ + + source_id = "source_id" + key = "key" + detection_boxes = "detection_boxes" + detection_scores = "detection_scores" + detection_classes = "detection_classes" + detection_masks = "detection_masks" + detection_boundaries = "detection_boundaries" + detection_keypoints = "detection_keypoints" + num_detections = "num_detections" + + +class BoxListFields(object): + """Naming conventions for BoxLists. + + Attributes: + boxes: bounding box coordinates. + classes: classes per bounding box. + scores: scores per bounding box. + weights: sample weights per bounding box. + objectness: objectness score per bounding box. + masks: masks per bounding box. + boundaries: boundaries per bounding box. + keypoints: keypoints per bounding box. + keypoint_heatmaps: keypoint heatmaps per bounding box. + """ + + boxes = "boxes" + classes = "classes" + scores = "scores" + weights = "weights" + objectness = "objectness" + masks = "masks" + boundaries = "boundaries" + keypoints = "keypoints" + keypoint_heatmaps = "keypoint_heatmaps" + + +class TfExampleFields(object): + """TF-example proto feature names for object detection. + + Holds the standard feature names to load from an Example proto for object + detection. + + Attributes: + image_encoded: JPEG encoded string + image_format: image format, e.g. "JPEG" + filename: filename + channels: number of channels of image + colorspace: colorspace, e.g. "RGB" + height: height of image in pixels, e.g. 462 + width: width of image in pixels, e.g. 581 + source_id: original source of the image + object_class_text: labels in text format, e.g. ["person", "cat"] + object_class_label: labels in numbers, e.g. [16, 8] + object_bbox_xmin: xmin coordinates of groundtruth box, e.g. 10, 30 + object_bbox_xmax: xmax coordinates of groundtruth box, e.g. 50, 40 + object_bbox_ymin: ymin coordinates of groundtruth box, e.g. 40, 50 + object_bbox_ymax: ymax coordinates of groundtruth box, e.g. 80, 70 + object_view: viewpoint of object, e.g. ["frontal", "left"] + object_truncated: is object truncated, e.g. [true, false] + object_occluded: is object occluded, e.g. [true, false] + object_difficult: is object difficult, e.g. [true, false] + object_group_of: is object a single object or a group of objects + object_depiction: is object a depiction + object_is_crowd: [DEPRECATED, use object_group_of instead] + is the object a single object or a crowd + object_segment_area: the area of the segment. + object_weight: a weight factor for the object's bounding box. + instance_masks: instance segmentation masks. + instance_boundaries: instance boundaries. + instance_classes: Classes for each instance segmentation mask. + detection_class_label: class label in numbers. + detection_bbox_ymin: ymin coordinates of a detection box. + detection_bbox_xmin: xmin coordinates of a detection box. + detection_bbox_ymax: ymax coordinates of a detection box. + detection_bbox_xmax: xmax coordinates of a detection box. + detection_score: detection score for the class label and box. + """ + + image_encoded = "image/encoded" + image_format = "image/format" # format is reserved keyword + filename = "image/filename" + channels = "image/channels" + colorspace = "image/colorspace" + height = "image/height" + width = "image/width" + source_id = "image/source_id" + object_class_text = "image/object/class/text" + object_class_label = "image/object/class/label" + object_bbox_ymin = "image/object/bbox/ymin" + object_bbox_xmin = "image/object/bbox/xmin" + object_bbox_ymax = "image/object/bbox/ymax" + object_bbox_xmax = "image/object/bbox/xmax" + object_view = "image/object/view" + object_truncated = "image/object/truncated" + object_occluded = "image/object/occluded" + object_difficult = "image/object/difficult" + object_group_of = "image/object/group_of" + object_depiction = "image/object/depiction" + object_is_crowd = "image/object/is_crowd" + object_segment_area = "image/object/segment/area" + object_weight = "image/object/weight" + instance_masks = "image/segmentation/object" + instance_boundaries = "image/boundaries/object" + instance_classes = "image/segmentation/object/class" + detection_class_label = "image/detection/label" + detection_bbox_ymin = "image/detection/bbox/ymin" + detection_bbox_xmin = "image/detection/bbox/xmin" + detection_bbox_ymax = "image/detection/bbox/ymax" + detection_bbox_xmax = "image/detection/bbox/xmax" + detection_score = "image/detection/score" diff --git a/TimeSformer/timesformer/utils/benchmark.py b/TimeSformer/timesformer/utils/benchmark.py new file mode 100755 index 0000000000000000000000000000000000000000..7e6dbb11eb27f478f11748d7ba43e2f119aaeab6 --- /dev/null +++ b/TimeSformer/timesformer/utils/benchmark.py @@ -0,0 +1,103 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +""" +Functions for benchmarks. +""" + +import numpy as np +import pprint +import torch +import tqdm +from fvcore.common.timer import Timer + +import timesformer.utils.logging as logging +import timesformer.utils.misc as misc +from timesformer.datasets import loader +from timesformer.utils.env import setup_environment + +logger = logging.get_logger(__name__) + + +def benchmark_data_loading(cfg): + """ + Benchmark the speed of data loading in PySlowFast. + Args: + + cfg (CfgNode): configs. Details can be found in + lib/config/defaults.py + """ + # Set up environment. + setup_environment() + # Set random seed from configs. + np.random.seed(cfg.RNG_SEED) + torch.manual_seed(cfg.RNG_SEED) + + # Setup logging format. + logging.setup_logging(cfg.OUTPUT_DIR) + + # Print config. + logger.info("Benchmark data loading with config:") + logger.info(pprint.pformat(cfg)) + + timer = Timer() + dataloader = loader.construct_loader(cfg, "train") + logger.info( + "Initialize loader using {:.2f} seconds.".format(timer.seconds()) + ) + # Total batch size across different machines. + batch_size = cfg.TRAIN.BATCH_SIZE * cfg.NUM_SHARDS + log_period = cfg.BENCHMARK.LOG_PERIOD + epoch_times = [] + # Test for a few epochs. + for cur_epoch in range(cfg.BENCHMARK.NUM_EPOCHS): + timer = Timer() + timer_epoch = Timer() + iter_times = [] + if cfg.BENCHMARK.SHUFFLE: + loader.shuffle_dataset(dataloader, cur_epoch) + for cur_iter, _ in enumerate(tqdm.tqdm(dataloader)): + if cur_iter > 0 and cur_iter % log_period == 0: + iter_times.append(timer.seconds()) + ram_usage, ram_total = misc.cpu_mem_usage() + logger.info( + "Epoch {}: {} iters ({} videos) in {:.2f} seconds. " + "RAM Usage: {:.2f}/{:.2f} GB.".format( + cur_epoch, + log_period, + log_period * batch_size, + iter_times[-1], + ram_usage, + ram_total, + ) + ) + timer.reset() + epoch_times.append(timer_epoch.seconds()) + ram_usage, ram_total = misc.cpu_mem_usage() + logger.info( + "Epoch {}: in total {} iters ({} videos) in {:.2f} seconds. " + "RAM Usage: {:.2f}/{:.2f} GB.".format( + cur_epoch, + len(dataloader), + len(dataloader) * batch_size, + epoch_times[-1], + ram_usage, + ram_total, + ) + ) + logger.info( + "Epoch {}: on average every {} iters ({} videos) take {:.2f}/{:.2f} " + "(avg/std) seconds.".format( + cur_epoch, + log_period, + log_period * batch_size, + np.mean(iter_times), + np.std(iter_times), + ) + ) + logger.info( + "On average every epoch ({} videos) takes {:.2f}/{:.2f} " + "(avg/std) seconds.".format( + len(dataloader) * batch_size, + np.mean(epoch_times), + np.std(epoch_times), + ) + ) diff --git a/TimeSformer/timesformer/utils/bn_helper.py b/TimeSformer/timesformer/utils/bn_helper.py new file mode 100755 index 0000000000000000000000000000000000000000..c584e7d326a87259f35dea970820a88132a6acd1 --- /dev/null +++ b/TimeSformer/timesformer/utils/bn_helper.py @@ -0,0 +1,76 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""bn helper.""" + +import itertools +import torch + + +@torch.no_grad() +def compute_and_update_bn_stats(model, data_loader, num_batches=200): + """ + Compute and update the batch norm stats to make it more precise. During + training both bn stats and the weight are changing after every iteration, + so the bn can not precisely reflect the latest stats of the current model. + Here the bn stats is recomputed without change of weights, to make the + running mean and running var more precise. + Args: + model (model): the model using to compute and update the bn stats. + data_loader (dataloader): dataloader using to provide inputs. + num_batches (int): running iterations using to compute the stats. + """ + + # Prepares all the bn layers. + bn_layers = [ + m + for m in model.modules() + if any( + ( + isinstance(m, bn_type) + for bn_type in ( + torch.nn.BatchNorm1d, + torch.nn.BatchNorm2d, + torch.nn.BatchNorm3d, + ) + ) + ) + ] + + # In order to make the running stats only reflect the current batch, the + # momentum is disabled. + # bn.running_mean = (1 - momentum) * bn.running_mean + momentum * batch_mean + # Setting the momentum to 1.0 to compute the stats without momentum. + momentum_actual = [bn.momentum for bn in bn_layers] + for bn in bn_layers: + bn.momentum = 1.0 + + # Calculates the running iterations for precise stats computation. + running_mean = [torch.zeros_like(bn.running_mean) for bn in bn_layers] + running_square_mean = [torch.zeros_like(bn.running_var) for bn in bn_layers] + + for ind, (inputs, _, _) in enumerate( + itertools.islice(data_loader, num_batches) + ): + # Forwards the model to update the bn stats. + if isinstance(inputs, (list,)): + for i in range(len(inputs)): + inputs[i] = inputs[i].float().cuda(non_blocking=True) + else: + inputs = inputs.cuda(non_blocking=True) + model(inputs) + + for i, bn in enumerate(bn_layers): + # Accumulates the bn stats. + running_mean[i] += (bn.running_mean - running_mean[i]) / (ind + 1) + # $E(x^2) = Var(x) + E(x)^2$. + cur_square_mean = bn.running_var + bn.running_mean ** 2 + running_square_mean[i] += ( + cur_square_mean - running_square_mean[i] + ) / (ind + 1) + + for i, bn in enumerate(bn_layers): + bn.running_mean = running_mean[i] + # Var(x) = $E(x^2) - E(x)^2$. + bn.running_var = running_square_mean[i] - bn.running_mean ** 2 + # Sets the precise bn stats. + bn.momentum = momentum_actual[i] diff --git a/TimeSformer/timesformer/utils/c2_model_loading.py b/TimeSformer/timesformer/utils/c2_model_loading.py new file mode 100755 index 0000000000000000000000000000000000000000..794adae09b3013480d9b5a52fdd962a0bc51d495 --- /dev/null +++ b/TimeSformer/timesformer/utils/c2_model_loading.py @@ -0,0 +1,119 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Caffe2 to PyTorch checkpoint name converting utility.""" + +import re + + +def get_name_convert_func(): + """ + Get the function to convert Caffe2 layer names to PyTorch layer names. + Returns: + (func): function to convert parameter name from Caffe2 format to PyTorch + format. + """ + pairs = [ + # ------------------------------------------------------------ + # 'nonlocal_conv3_1_theta_w' -> 's3.pathway0_nonlocal3.conv_g.weight' + [ + r"^nonlocal_conv([0-9]+)_([0-9]+)_(.*)", + r"s\1.pathway0_nonlocal\2_\3", + ], + # 'theta' -> 'conv_theta' + [r"^(.*)_nonlocal([0-9]+)_(theta)(.*)", r"\1_nonlocal\2.conv_\3\4"], + # 'g' -> 'conv_g' + [r"^(.*)_nonlocal([0-9]+)_(g)(.*)", r"\1_nonlocal\2.conv_\3\4"], + # 'phi' -> 'conv_phi' + [r"^(.*)_nonlocal([0-9]+)_(phi)(.*)", r"\1_nonlocal\2.conv_\3\4"], + # 'out' -> 'conv_out' + [r"^(.*)_nonlocal([0-9]+)_(out)(.*)", r"\1_nonlocal\2.conv_\3\4"], + # 'nonlocal_conv4_5_bn_s' -> 's4.pathway0_nonlocal3.bn.weight' + [r"^(.*)_nonlocal([0-9]+)_(bn)_(.*)", r"\1_nonlocal\2.\3.\4"], + # ------------------------------------------------------------ + # 't_pool1_subsample_bn' -> 's1_fuse.conv_f2s.bn.running_mean' + [r"^t_pool1_subsample_bn_(.*)", r"s1_fuse.bn.\1"], + # 't_pool1_subsample' -> 's1_fuse.conv_f2s' + [r"^t_pool1_subsample_(.*)", r"s1_fuse.conv_f2s.\1"], + # 't_res4_5_branch2c_bn_subsample_bn_rm' -> 's4_fuse.conv_f2s.bias' + [ + r"^t_res([0-9]+)_([0-9]+)_branch2c_bn_subsample_bn_(.*)", + r"s\1_fuse.bn.\3", + ], + # 't_pool1_subsample' -> 's1_fuse.conv_f2s' + [ + r"^t_res([0-9]+)_([0-9]+)_branch2c_bn_subsample_(.*)", + r"s\1_fuse.conv_f2s.\3", + ], + # ------------------------------------------------------------ + # 'res4_4_branch_2c_bn_b' -> 's4.pathway0_res4.branch2.c_bn_b' + [ + r"^res([0-9]+)_([0-9]+)_branch([0-9]+)([a-z])_(.*)", + r"s\1.pathway0_res\2.branch\3.\4_\5", + ], + # 'res_conv1_bn_' -> 's1.pathway0_stem.bn.' + [r"^res_conv1_bn_(.*)", r"s1.pathway0_stem.bn.\1"], + # 'conv1_xy_w_momentum' -> 's1.pathway0_stem.conv_xy.' + [r"^conv1_xy(.*)", r"s1.pathway0_stem.conv_xy\1"], + # 'conv1_w_momentum' -> 's1.pathway0_stem.conv.' + [r"^conv1_(.*)", r"s1.pathway0_stem.conv.\1"], + # 'res4_0_branch1_w' -> 'S4.pathway0_res0.branch1.weight' + [ + r"^res([0-9]+)_([0-9]+)_branch([0-9]+)_(.*)", + r"s\1.pathway0_res\2.branch\3_\4", + ], + # 'res_conv1_' -> 's1.pathway0_stem.conv.' + [r"^res_conv1_(.*)", r"s1.pathway0_stem.conv.\1"], + # ------------------------------------------------------------ + # 'res4_4_branch_2c_bn_b' -> 's4.pathway0_res4.branch2.c_bn_b' + [ + r"^t_res([0-9]+)_([0-9]+)_branch([0-9]+)([a-z])_(.*)", + r"s\1.pathway1_res\2.branch\3.\4_\5", + ], + # 'res_conv1_bn_' -> 's1.pathway0_stem.bn.' + [r"^t_res_conv1_bn_(.*)", r"s1.pathway1_stem.bn.\1"], + # 'conv1_w_momentum' -> 's1.pathway0_stem.conv.' + [r"^t_conv1_(.*)", r"s1.pathway1_stem.conv.\1"], + # 'res4_0_branch1_w' -> 'S4.pathway0_res0.branch1.weight' + [ + r"^t_res([0-9]+)_([0-9]+)_branch([0-9]+)_(.*)", + r"s\1.pathway1_res\2.branch\3_\4", + ], + # 'res_conv1_' -> 's1.pathway0_stem.conv.' + [r"^t_res_conv1_(.*)", r"s1.pathway1_stem.conv.\1"], + # ------------------------------------------------------------ + # pred_ -> head.projection. + [r"pred_(.*)", r"head.projection.\1"], + # '.b_bn_fc' -> '.se.fc' + [r"(.*)b_bn_fc(.*)", r"\1se.fc\2"], + # conv_5 -> head.conv_5. + [r"conv_5(.*)", r"head.conv_5\1"], + # conv_5 -> head.conv_5. + [r"lin_5(.*)", r"head.lin_5\1"], + # '.bn_b' -> '.weight' + [r"(.*)bn.b\Z", r"\1bn.bias"], + # '.bn_s' -> '.weight' + [r"(.*)bn.s\Z", r"\1bn.weight"], + # '_bn_rm' -> '.running_mean' + [r"(.*)bn.rm\Z", r"\1bn.running_mean"], + # '_bn_riv' -> '.running_var' + [r"(.*)bn.riv\Z", r"\1bn.running_var"], + # '_b' -> '.bias' + [r"(.*)[\._]b\Z", r"\1.bias"], + # '_w' -> '.weight' + [r"(.*)[\._]w\Z", r"\1.weight"], + ] + + def convert_caffe2_name_to_pytorch(caffe2_layer_name): + """ + Convert the caffe2_layer_name to pytorch format by apply the list of + regular expressions. + Args: + caffe2_layer_name (str): caffe2 layer name. + Returns: + (str): pytorch layer name. + """ + for source, dest in pairs: + caffe2_layer_name = re.sub(source, dest, caffe2_layer_name) + return caffe2_layer_name + + return convert_caffe2_name_to_pytorch diff --git a/TimeSformer/timesformer/utils/checkpoint.py b/TimeSformer/timesformer/utils/checkpoint.py new file mode 100755 index 0000000000000000000000000000000000000000..c2be628dfb26c1b04b6b6f129856557018c514c4 --- /dev/null +++ b/TimeSformer/timesformer/utils/checkpoint.py @@ -0,0 +1,570 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Functions that handle saving and loading of checkpoints.""" + +import copy +import numpy as np +import os +import pickle +from collections import OrderedDict +import torch +from fvcore.common.file_io import PathManager + +import timesformer.utils.distributed as du +import timesformer.utils.logging as logging +from timesformer.utils.c2_model_loading import get_name_convert_func +import torch.nn.functional as F + +logger = logging.get_logger(__name__) + + +def make_checkpoint_dir(path_to_job): + """ + Creates the checkpoint directory (if not present already). + Args: + path_to_job (string): the path to the folder of the current job. + """ + checkpoint_dir = os.path.join(path_to_job, "checkpoints") + # Create the checkpoint dir from the master process + if du.is_master_proc() and not PathManager.exists(checkpoint_dir): + try: + PathManager.mkdirs(checkpoint_dir) + except Exception: + pass + return checkpoint_dir + + +def get_checkpoint_dir(path_to_job): + """ + Get path for storing checkpoints. + Args: + path_to_job (string): the path to the folder of the current job. + """ + return os.path.join(path_to_job, "checkpoints") + + +def get_path_to_checkpoint(path_to_job, epoch): + """ + Get the full path to a checkpoint file. + Args: + path_to_job (string): the path to the folder of the current job. + epoch (int): the number of epoch for the checkpoint. + """ + name = "checkpoint_epoch_{:05d}.pyth".format(epoch) + return os.path.join(get_checkpoint_dir(path_to_job), name) + + +def get_last_checkpoint(path_to_job): + """ + Get the last checkpoint from the checkpointing folder. + Args: + path_to_job (string): the path to the folder of the current job. + """ + + d = get_checkpoint_dir(path_to_job) + names = PathManager.ls(d) if PathManager.exists(d) else [] + names = [f for f in names if "checkpoint" in f] + assert len(names), "No checkpoints found in '{}'.".format(d) + # Sort the checkpoints by epoch. + name = sorted(names)[-1] + return os.path.join(d, name) + + +def has_checkpoint(path_to_job): + """ + Determines if the given directory contains a checkpoint. + Args: + path_to_job (string): the path to the folder of the current job. + """ + d = get_checkpoint_dir(path_to_job) + files = PathManager.ls(d) if PathManager.exists(d) else [] + return any("checkpoint" in f for f in files) + + +def is_checkpoint_epoch(cfg, cur_epoch, multigrid_schedule=None): + """ + Determine if a checkpoint should be saved on current epoch. + Args: + cfg (CfgNode): configs to save. + cur_epoch (int): current number of epoch of the model. + multigrid_schedule (List): schedule for multigrid training. + """ + if cur_epoch + 1 == cfg.SOLVER.MAX_EPOCH: + return True + if multigrid_schedule is not None: + prev_epoch = 0 + for s in multigrid_schedule: + if cur_epoch < s[-1]: + period = max( + (s[-1] - prev_epoch) // cfg.MULTIGRID.EVAL_FREQ + 1, 1 + ) + return (s[-1] - 1 - cur_epoch) % period == 0 + prev_epoch = s[-1] + + return (cur_epoch + 1) % cfg.TRAIN.CHECKPOINT_PERIOD == 0 + + +def save_checkpoint(path_to_job, model, optimizer, epoch, cfg): + """ + Save a checkpoint. + Args: + model (model): model to save the weight to the checkpoint. + optimizer (optim): optimizer to save the historical state. + epoch (int): current number of epoch of the model. + cfg (CfgNode): configs to save. + """ + # Save checkpoints only from the master process. + if not du.is_master_proc(cfg.NUM_GPUS * cfg.NUM_SHARDS): + return + # Ensure that the checkpoint dir exists. + PathManager.mkdirs(get_checkpoint_dir(path_to_job)) + # Omit the DDP wrapper in the multi-gpu setting. + sd = model.module.state_dict() if cfg.NUM_GPUS > 1 else model.state_dict() + normalized_sd = sub_to_normal_bn(sd) + + # Record the state. + checkpoint = { + "epoch": epoch, + "model_state": normalized_sd, + "optimizer_state": optimizer.state_dict(), + "cfg": cfg.dump(), + } + # Write the checkpoint. + path_to_checkpoint = get_path_to_checkpoint(path_to_job, epoch + 1) + with PathManager.open(path_to_checkpoint, "wb") as f: + torch.save(checkpoint, f) + return path_to_checkpoint + + +def inflate_weight(state_dict_2d, state_dict_3d): + """ + Inflate 2D model weights in state_dict_2d to the 3D model weights in + state_dict_3d. The details can be found in: + Joao Carreira, and Andrew Zisserman. + "Quo vadis, action recognition? a new model and the kinetics dataset." + Args: + state_dict_2d (OrderedDict): a dict of parameters from a 2D model. + state_dict_3d (OrderedDict): a dict of parameters from a 3D model. + Returns: + state_dict_inflated (OrderedDict): a dict of inflated parameters. + """ + state_dict_inflated = OrderedDict() + #print(state_dict_2d.keys()) + #print('----') + #print(state_dict_3d.keys()) + for k, v2d in state_dict_2d.items(): + assert k in state_dict_3d.keys() + v3d = state_dict_3d[k] + # Inflate the weight of 2D conv to 3D conv. + if len(v2d.shape) == 4 and len(v3d.shape) == 5: + logger.info( + "Inflate {}: {} -> {}: {}".format(k, v2d.shape, k, v3d.shape) + ) + # Dimension need to be match. + try: + assert v2d.shape[-2:] == v3d.shape[-2:] + assert v2d.shape[:2] == v3d.shape[:2] + v3d = ( + v2d.unsqueeze(2).repeat(1, 1, v3d.shape[2], 1, 1) / v3d.shape[2] + ) + except: ### my modification + temp = ( + v2d.unsqueeze(2).repeat(1, 1, v3d.shape[2], 1, 1) / v3d.shape[2] + ) + v3d = torch.zeros(v3d.shape) + v3d[:,:v2d.shape[1],:,:,:] = temp + #################### + + elif v2d.shape == v3d.shape: + v3d = v2d + else: + logger.info( + "Unexpected {}: {} -|> {}: {}".format( + k, v2d.shape, k, v3d.shape + ) + ) + state_dict_inflated[k] = v3d.clone() + return state_dict_inflated + + +def load_checkpoint( + path_to_checkpoint, + model, + data_parallel=True, + optimizer=None, + inflation=False, + convert_from_caffe2=False, + epoch_reset=False, + clear_name_pattern=(), +): + """ + Load the checkpoint from the given file. If inflation is True, inflate the + 2D Conv weights from the checkpoint to 3D Conv. + Args: + path_to_checkpoint (string): path to the checkpoint to load. + model (model): model to load the weights from the checkpoint. + data_parallel (bool): if true, model is wrapped by + torch.nn.parallel.DistributedDataParallel. + optimizer (optim): optimizer to load the historical state. + inflation (bool): if True, inflate the weights from the checkpoint. + convert_from_caffe2 (bool): if True, load the model from caffe2 and + convert it to pytorch. + epoch_reset (bool): if True, reset #train iterations from the checkpoint. + clear_name_pattern (string): if given, this (sub)string will be cleared + from a layer name if it can be matched. + Returns: + (int): the number of training epoch of the checkpoint. + """ + assert PathManager.exists( + path_to_checkpoint + ), "Checkpoint '{}' not found".format(path_to_checkpoint) + logger.info("Loading network weights from {}.".format(path_to_checkpoint)) + + # Account for the DDP wrapper in the multi-gpu setting. + try: + ms = model.module if data_parallel else model + except: + ms = model + + if convert_from_caffe2: + with PathManager.open(path_to_checkpoint, "rb") as f: + caffe2_checkpoint = pickle.load(f, encoding="latin1") + state_dict = OrderedDict() + name_convert_func = get_name_convert_func() + for key in caffe2_checkpoint["blobs"].keys(): + converted_key = name_convert_func(key) + converted_key = c2_normal_to_sub_bn(converted_key, ms.state_dict()) + if converted_key in ms.state_dict(): + c2_blob_shape = caffe2_checkpoint["blobs"][key].shape + model_blob_shape = ms.state_dict()[converted_key].shape + + # expand shape dims if they differ (eg for converting linear to conv params) + if len(c2_blob_shape) < len(model_blob_shape): + c2_blob_shape += (1,) * ( + len(model_blob_shape) - len(c2_blob_shape) + ) + caffe2_checkpoint["blobs"][key] = np.reshape( + caffe2_checkpoint["blobs"][key], c2_blob_shape + ) + # Load BN stats to Sub-BN. + if ( + len(model_blob_shape) == 1 + and len(c2_blob_shape) == 1 + and model_blob_shape[0] > c2_blob_shape[0] + and model_blob_shape[0] % c2_blob_shape[0] == 0 + ): + caffe2_checkpoint["blobs"][key] = np.concatenate( + [caffe2_checkpoint["blobs"][key]] + * (model_blob_shape[0] // c2_blob_shape[0]) + ) + c2_blob_shape = caffe2_checkpoint["blobs"][key].shape + + if c2_blob_shape == tuple(model_blob_shape): + state_dict[converted_key] = torch.tensor( + caffe2_checkpoint["blobs"][key] + ).clone() + logger.info( + "{}: {} => {}: {}".format( + key, + c2_blob_shape, + converted_key, + tuple(model_blob_shape), + ) + ) + else: + logger.warn( + "!! {}: {} does not match {}: {}".format( + key, + c2_blob_shape, + converted_key, + tuple(model_blob_shape), + ) + ) + else: + if not any( + prefix in key for prefix in ["momentum", "lr", "model_iter"] + ): + logger.warn( + "!! {}: can not be converted, got {}".format( + key, converted_key + ) + ) + diff = set(ms.state_dict()) - set(state_dict) + diff = {d for d in diff if "num_batches_tracked" not in d} + if len(diff) > 0: + logger.warn("Not loaded {}".format(diff)) + ms.load_state_dict(state_dict, strict=False) + epoch = -1 + else: + # Load the checkpoint on CPU to avoid GPU mem spike. + with PathManager.open(path_to_checkpoint, "rb") as f: + checkpoint = torch.load(f, map_location="cpu") + try: +# if True: + model_state_dict_3d = ( + model.module.state_dict() if data_parallel else model.state_dict() + ) + checkpoint["model_state"] = normal_to_sub_bn( + checkpoint["model_state"], model_state_dict_3d + ) + except: + + model_state_dict_3d = model.state_dict() + checkpoint["model_state"] = normal_to_sub_bn( + checkpoint["model_state"], model_state_dict_3d + ) + +# except: ####### checkpoint from DEIT +# print(checkpoint.keys()) +# model_state_dict_3d = model.state_dict() +# checkpoint["model_state"] = normal_to_sub_bn( +## checkpoint["model"], model_state_dict_3d +# checkpoint, model_state_dict_3d +# ) +# keys = checkpoint['model_state'].keys() +# checkpoint['new_model_state'] = {} +# for key in keys: +# new_key = 'model.'+key +# checkpoint['new_model_state'][new_key] = checkpoint['model_state'][key] +# checkpoint['model_state'] = checkpoint['new_model_state'] +# del checkpoint['new_model_state'] +# +# ############ + + if inflation: + # Try to inflate the model. + inflated_model_dict = inflate_weight( + checkpoint["model_state"], model_state_dict_3d + ) + ms.load_state_dict(inflated_model_dict, strict=False) + else: + if clear_name_pattern: + for item in clear_name_pattern: + model_state_dict_new = OrderedDict() + for k in checkpoint["model_state"]: + if item in k: + k_re = k.replace(item, "") + model_state_dict_new[k_re] = checkpoint[ + "model_state" + ][k] + logger.info("renaming: {} -> {}".format(k, k_re)) + else: + model_state_dict_new[k] = checkpoint["model_state"][ + k + ] + checkpoint["model_state"] = model_state_dict_new + + pre_train_dict = checkpoint["model_state"] + model_dict = ms.state_dict() + ############ + if 'model.time_embed' in pre_train_dict: + k = 'model.time_embed' + v = pre_train_dict[k] + v = v[0,:,:].unsqueeze(0).transpose(1,2) + new_v = F.interpolate(v, size=(model_dict[k].size(1)), mode='nearest') + pre_train_dict[k] = new_v.transpose(1,2) + ################### + + # Match pre-trained weights that have same shape as current model. + pre_train_dict_match = { + k: v + for k, v in pre_train_dict.items() + if k in model_dict and v.size() == model_dict[k].size() + } +# print(pre_train_dict.keys()) +# print('-------------') +# print(model_dict.keys()) +# print(pre_train_dict_match) +# print(xy) + # Weights that do not have match from the pre-trained model. + not_load_layers = [ + k + for k in model_dict.keys() + if k not in pre_train_dict_match.keys() + ] + # Log weights that are not loaded with the pre-trained weights. + if not_load_layers: + for k in not_load_layers: + logger.info("Network weights {} not loaded.".format(k)) + # Load pre-trained weights. + ms.load_state_dict(pre_train_dict_match, strict=False) + epoch = -1 + + # Load the optimizer state (commonly not done when fine-tuning) + if "epoch" in checkpoint.keys() and not epoch_reset: + epoch = checkpoint["epoch"] + if optimizer: + optimizer.load_state_dict(checkpoint["optimizer_state"]) + else: + epoch = -1 + return epoch + + +def sub_to_normal_bn(sd): + """ + Convert the Sub-BN paprameters to normal BN parameters in a state dict. + There are two copies of BN layers in a Sub-BN implementation: `bn.bn` and + `bn.split_bn`. `bn.split_bn` is used during training and + "compute_precise_bn". Before saving or evaluation, its stats are copied to + `bn.bn`. We rename `bn.bn` to `bn` and store it to be consistent with normal + BN layers. + Args: + sd (OrderedDict): a dict of parameters whitch might contain Sub-BN + parameters. + Returns: + new_sd (OrderedDict): a dict with Sub-BN parameters reshaped to + normal parameters. + """ + new_sd = copy.deepcopy(sd) + modifications = [ + ("bn.bn.running_mean", "bn.running_mean"), + ("bn.bn.running_var", "bn.running_var"), + ("bn.split_bn.num_batches_tracked", "bn.num_batches_tracked"), + ] + to_remove = ["bn.bn.", ".split_bn."] + for key in sd: + for before, after in modifications: + if key.endswith(before): + new_key = key.split(before)[0] + after + new_sd[new_key] = new_sd.pop(key) + + for rm in to_remove: + if rm in key and key in new_sd: + del new_sd[key] + + for key in new_sd: + if key.endswith("bn.weight") or key.endswith("bn.bias"): + if len(new_sd[key].size()) == 4: + assert all(d == 1 for d in new_sd[key].size()[1:]) + new_sd[key] = new_sd[key][:, 0, 0, 0] + + return new_sd + + +def c2_normal_to_sub_bn(key, model_keys): + """ + Convert BN parameters to Sub-BN parameters if model contains Sub-BNs. + Args: + key (OrderedDict): source dict of parameters. + mdoel_key (OrderedDict): target dict of parameters. + Returns: + new_sd (OrderedDict): converted dict of parameters. + """ + if "bn.running_" in key: + if key in model_keys: + return key + + new_key = key.replace("bn.running_", "bn.split_bn.running_") + if new_key in model_keys: + return new_key + else: + return key + + +def normal_to_sub_bn(checkpoint_sd, model_sd): + """ + Convert BN parameters to Sub-BN parameters if model contains Sub-BNs. + Args: + checkpoint_sd (OrderedDict): source dict of parameters. + model_sd (OrderedDict): target dict of parameters. + Returns: + new_sd (OrderedDict): converted dict of parameters. + """ + for key in model_sd: + if key not in checkpoint_sd: + if "bn.split_bn." in key: + load_key = key.replace("bn.split_bn.", "bn.") + bn_key = key.replace("bn.split_bn.", "bn.bn.") + checkpoint_sd[key] = checkpoint_sd.pop(load_key) + checkpoint_sd[bn_key] = checkpoint_sd[key] + + for key in model_sd: + if key in checkpoint_sd: + model_blob_shape = model_sd[key].shape + c2_blob_shape = checkpoint_sd[key].shape + + if ( + len(model_blob_shape) == 1 + and len(c2_blob_shape) == 1 + and model_blob_shape[0] > c2_blob_shape[0] + and model_blob_shape[0] % c2_blob_shape[0] == 0 + ): + before_shape = checkpoint_sd[key].shape + checkpoint_sd[key] = torch.cat( + [checkpoint_sd[key]] + * (model_blob_shape[0] // c2_blob_shape[0]) + ) + logger.info( + "{} {} -> {}".format( + key, before_shape, checkpoint_sd[key].shape + ) + ) + return checkpoint_sd + + +def load_test_checkpoint(cfg, model): + """ + Loading checkpoint logic for testing. + """ + # Load a checkpoint to test if applicable. + if cfg.TEST.CHECKPOINT_FILE_PATH != "": + # If no checkpoint found in MODEL_VIS.CHECKPOINT_FILE_PATH or in the current + # checkpoint folder, try to load checkpoint from + # TEST.CHECKPOINT_FILE_PATH and test it. + load_checkpoint( + cfg.TEST.CHECKPOINT_FILE_PATH, + model, + cfg.NUM_GPUS > 1, + None, + inflation=False, + convert_from_caffe2=cfg.TEST.CHECKPOINT_TYPE == "caffe2", + ) + elif has_checkpoint(cfg.OUTPUT_DIR): + last_checkpoint = get_last_checkpoint(cfg.OUTPUT_DIR) + load_checkpoint(last_checkpoint, model, cfg.NUM_GPUS > 1) + elif cfg.TRAIN.CHECKPOINT_FILE_PATH != "": + # If no checkpoint found in TEST.CHECKPOINT_FILE_PATH or in the current + # checkpoint folder, try to load checkpoint from + # TRAIN.CHECKPOINT_FILE_PATH and test it. + load_checkpoint( + cfg.TRAIN.CHECKPOINT_FILE_PATH, + model, + cfg.NUM_GPUS > 1, + None, + inflation=False, + convert_from_caffe2=cfg.TRAIN.CHECKPOINT_TYPE == "caffe2", + ) + else: + logger.info( + "Unknown way of loading checkpoint. Using with random initialization, only for debugging." + ) + + +def load_train_checkpoint(cfg, model, optimizer): + """ + Loading checkpoint logic for training. + """ + if cfg.TRAIN.AUTO_RESUME and has_checkpoint(cfg.OUTPUT_DIR): + last_checkpoint = get_last_checkpoint(cfg.OUTPUT_DIR) + logger.info("Load from last checkpoint, {}.".format(last_checkpoint)) + checkpoint_epoch = load_checkpoint( + last_checkpoint, model, cfg.NUM_GPUS > 1, optimizer + ) + start_epoch = checkpoint_epoch + 1 + elif cfg.TRAIN.CHECKPOINT_FILE_PATH != "": + logger.info("Load from given checkpoint file.") + checkpoint_epoch = load_checkpoint( + cfg.TRAIN.CHECKPOINT_FILE_PATH, + model, + cfg.NUM_GPUS > 1, + optimizer, + inflation=cfg.TRAIN.CHECKPOINT_INFLATE, + convert_from_caffe2=cfg.TRAIN.CHECKPOINT_TYPE == "caffe2", + epoch_reset=cfg.TRAIN.CHECKPOINT_EPOCH_RESET, + clear_name_pattern=cfg.TRAIN.CHECKPOINT_CLEAR_NAME_PATTERN, + ) + start_epoch = checkpoint_epoch + 1 + else: + start_epoch = 0 + + return start_epoch diff --git a/TimeSformer/timesformer/utils/distributed.py b/TimeSformer/timesformer/utils/distributed.py new file mode 100755 index 0000000000000000000000000000000000000000..b511a846eec5592d01aba73e011b94ad879e4612 --- /dev/null +++ b/TimeSformer/timesformer/utils/distributed.py @@ -0,0 +1,308 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Distributed helpers.""" + +import functools +import logging +import pickle +import torch +import torch.distributed as dist + +_LOCAL_PROCESS_GROUP = None + + +def all_gather(tensors): + """ + All gathers the provided tensors from all processes across machines. + Args: + tensors (list): tensors to perform all gather across all processes in + all machines. + """ + + gather_list = [] + output_tensor = [] + world_size = dist.get_world_size() + for tensor in tensors: + tensor_placeholder = [ + torch.ones_like(tensor) for _ in range(world_size) + ] + dist.all_gather(tensor_placeholder, tensor, async_op=False) + gather_list.append(tensor_placeholder) + for gathered_tensor in gather_list: + output_tensor.append(torch.cat(gathered_tensor, dim=0)) + return output_tensor + + +def all_reduce(tensors, average=True): + """ + All reduce the provided tensors from all processes across machines. + Args: + tensors (list): tensors to perform all reduce across all processes in + all machines. + average (bool): scales the reduced tensor by the number of overall + processes across all machines. + """ + + for tensor in tensors: + dist.all_reduce(tensor, async_op=False) + if average: + world_size = dist.get_world_size() + for tensor in tensors: + tensor.mul_(1.0 / world_size) + return tensors + + +def init_process_group( + local_rank, + local_world_size, + shard_id, + num_shards, + init_method, + dist_backend="nccl", +): + """ + Initializes the default process group. + Args: + local_rank (int): the rank on the current local machine. + local_world_size (int): the world size (number of processes running) on + the current local machine. + shard_id (int): the shard index (machine rank) of the current machine. + num_shards (int): number of shards for distributed training. + init_method (string): supporting three different methods for + initializing process groups: + "file": use shared file system to initialize the groups across + different processes. + "tcp": use tcp address to initialize the groups across different + dist_backend (string): backend to use for distributed training. Options + includes gloo, mpi and nccl, the details can be found here: + https://pytorch.org/docs/stable/distributed.html + """ + # Sets the GPU to use. + torch.cuda.set_device(local_rank) + # Initialize the process group. + proc_rank = local_rank + shard_id * local_world_size + world_size = local_world_size * num_shards + dist.init_process_group( + backend=dist_backend, + init_method=init_method, + world_size=world_size, + rank=proc_rank, + ) + + +def is_master_proc(num_gpus=8): + """ + Determines if the current process is the master process. + """ + if torch.distributed.is_initialized(): + return dist.get_rank() % num_gpus == 0 + else: + return True + + +def is_root_proc(): + """ + Determines if the current process is the root process. + """ + if torch.distributed.is_initialized(): + return dist.get_rank() == 0 + else: + return True + + +def get_world_size(): + """ + Get the size of the world. + """ + if not dist.is_available(): + return 1 + if not dist.is_initialized(): + return 1 + return dist.get_world_size() + + +def get_rank(): + """ + Get the rank of the current process. + """ + if not dist.is_available(): + return 0 + if not dist.is_initialized(): + return 0 + return dist.get_rank() + + +def synchronize(): + """ + Helper function to synchronize (barrier) among all processes when + using distributed training + """ + if not dist.is_available(): + return + if not dist.is_initialized(): + return + world_size = dist.get_world_size() + if world_size == 1: + return + dist.barrier() + + +@functools.lru_cache() +def _get_global_gloo_group(): + """ + Return a process group based on gloo backend, containing all the ranks + The result is cached. + Returns: + (group): pytorch dist group. + """ + if dist.get_backend() == "nccl": + return dist.new_group(backend="gloo") + else: + return dist.group.WORLD + + +def _serialize_to_tensor(data, group): + """ + Seriialize the tensor to ByteTensor. Note that only `gloo` and `nccl` + backend is supported. + Args: + data (data): data to be serialized. + group (group): pytorch dist group. + Returns: + tensor (ByteTensor): tensor that serialized. + """ + + backend = dist.get_backend(group) + assert backend in ["gloo", "nccl"] + device = torch.device("cpu" if backend == "gloo" else "cuda") + + buffer = pickle.dumps(data) + if len(buffer) > 1024 ** 3: + logger = logging.getLogger(__name__) + logger.warning( + "Rank {} trying to all-gather {:.2f} GB of data on device {}".format( + get_rank(), len(buffer) / (1024 ** 3), device + ) + ) + storage = torch.ByteStorage.from_buffer(buffer) + tensor = torch.ByteTensor(storage).to(device=device) + return tensor + + +def _pad_to_largest_tensor(tensor, group): + """ + Padding all the tensors from different GPUs to the largest ones. + Args: + tensor (tensor): tensor to pad. + group (group): pytorch dist group. + Returns: + list[int]: size of the tensor, on each rank + Tensor: padded tensor that has the max size + """ + world_size = dist.get_world_size(group=group) + assert ( + world_size >= 1 + ), "comm.gather/all_gather must be called from ranks within the given group!" + local_size = torch.tensor( + [tensor.numel()], dtype=torch.int64, device=tensor.device + ) + size_list = [ + torch.zeros([1], dtype=torch.int64, device=tensor.device) + for _ in range(world_size) + ] + dist.all_gather(size_list, local_size, group=group) + size_list = [int(size.item()) for size in size_list] + + max_size = max(size_list) + + # we pad the tensor because torch all_gather does not support + # gathering tensors of different shapes + if local_size != max_size: + padding = torch.zeros( + (max_size - local_size,), dtype=torch.uint8, device=tensor.device + ) + tensor = torch.cat((tensor, padding), dim=0) + return size_list, tensor + + +def all_gather_unaligned(data, group=None): + """ + Run all_gather on arbitrary picklable data (not necessarily tensors). + + Args: + data: any picklable object + group: a torch process group. By default, will use a group which + contains all ranks on gloo backend. + + Returns: + list[data]: list of data gathered from each rank + """ + if get_world_size() == 1: + return [data] + if group is None: + group = _get_global_gloo_group() + if dist.get_world_size(group) == 1: + return [data] + + tensor = _serialize_to_tensor(data, group) + + size_list, tensor = _pad_to_largest_tensor(tensor, group) + max_size = max(size_list) + + # receiving Tensor from all ranks + tensor_list = [ + torch.empty((max_size,), dtype=torch.uint8, device=tensor.device) + for _ in size_list + ] + dist.all_gather(tensor_list, tensor, group=group) + + data_list = [] + for size, tensor in zip(size_list, tensor_list): + buffer = tensor.cpu().numpy().tobytes()[:size] + data_list.append(pickle.loads(buffer)) + + return data_list + + +def init_distributed_training(cfg): + """ + Initialize variables needed for distributed training. + """ + if cfg.NUM_GPUS <= 1: + return + num_gpus_per_machine = cfg.NUM_GPUS + num_machines = dist.get_world_size() // num_gpus_per_machine + for i in range(num_machines): + ranks_on_i = list( + range(i * num_gpus_per_machine, (i + 1) * num_gpus_per_machine) + ) + pg = dist.new_group(ranks_on_i) + if i == cfg.SHARD_ID: + global _LOCAL_PROCESS_GROUP + _LOCAL_PROCESS_GROUP = pg + + +def get_local_size() -> int: + """ + Returns: + The size of the per-machine process group, + i.e. the number of processes per machine. + """ + if not dist.is_available(): + return 1 + if not dist.is_initialized(): + return 1 + return dist.get_world_size(group=_LOCAL_PROCESS_GROUP) + + +def get_local_rank() -> int: + """ + Returns: + The rank of the current process within the local (per-machine) process group. + """ + if not dist.is_available(): + return 0 + if not dist.is_initialized(): + return 0 + assert _LOCAL_PROCESS_GROUP is not None + return dist.get_rank(group=_LOCAL_PROCESS_GROUP) diff --git a/TimeSformer/timesformer/utils/env.py b/TimeSformer/timesformer/utils/env.py new file mode 100755 index 0000000000000000000000000000000000000000..0d836a345886e22736ff8fc58a3f59dd0d35b2ca --- /dev/null +++ b/TimeSformer/timesformer/utils/env.py @@ -0,0 +1,14 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Set up Environment.""" + +import timesformer.utils.logging as logging + +_ENV_SETUP_DONE = False + + +def setup_environment(): + global _ENV_SETUP_DONE + if _ENV_SETUP_DONE: + return + _ENV_SETUP_DONE = True diff --git a/TimeSformer/timesformer/utils/logging.py b/TimeSformer/timesformer/utils/logging.py new file mode 100755 index 0000000000000000000000000000000000000000..4c6d9664029ff321acdc25b57beacb92a6b09ad6 --- /dev/null +++ b/TimeSformer/timesformer/utils/logging.py @@ -0,0 +1,95 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Logging.""" + +import atexit +import builtins +import decimal +import functools +import logging +import os +import sys +import simplejson +from fvcore.common.file_io import PathManager + +import timesformer.utils.distributed as du + + +def _suppress_print(): + """ + Suppresses printing from the current process. + """ + + def print_pass(*objects, sep=" ", end="\n", file=sys.stdout, flush=False): + pass + + builtins.print = print_pass + + +@functools.lru_cache(maxsize=None) +def _cached_log_stream(filename): + io = PathManager.open(filename, "a", buffering=1024) + atexit.register(io.close) + return io + + +def setup_logging(output_dir=None): + """ + Sets up the logging for multiple processes. Only enable the logging for the + master process, and suppress logging for the non-master processes. + """ + # Set up logging format. + _FORMAT = "[%(levelname)s: %(filename)s: %(lineno)4d]: %(message)s" + + if du.is_master_proc(): + # Enable logging for the master process. + logging.root.handlers = [] + else: + # Suppress logging for non-master processes. + _suppress_print() + + logger = logging.getLogger() + logger.setLevel(logging.DEBUG) + logger.propagate = False + plain_formatter = logging.Formatter( + "[%(asctime)s][%(levelname)s] %(filename)s: %(lineno)3d: %(message)s", + datefmt="%m/%d %H:%M:%S", + ) + + if du.is_master_proc(): + ch = logging.StreamHandler(stream=sys.stdout) + ch.setLevel(logging.DEBUG) + ch.setFormatter(plain_formatter) + logger.addHandler(ch) + + if output_dir is not None and du.is_master_proc(du.get_world_size()): + filename = os.path.join(output_dir, "stdout.log") + fh = logging.StreamHandler(_cached_log_stream(filename)) + fh.setLevel(logging.DEBUG) + fh.setFormatter(plain_formatter) + logger.addHandler(fh) + + +def get_logger(name): + """ + Retrieve the logger with the specified name or, if name is None, return a + logger which is the root logger of the hierarchy. + Args: + name (string): name of the logger. + """ + return logging.getLogger(name) + + +def log_json_stats(stats): + """ + Logs json stats. + Args: + stats (dict): a dictionary of statistical information to log. + """ + stats = { + k: decimal.Decimal("{:.5f}".format(v)) if isinstance(v, float) else v + for k, v in stats.items() + } + json_stats = simplejson.dumps(stats, sort_keys=True, use_decimal=True) + logger = get_logger(__name__) + logger.info("json_stats: {:s}".format(json_stats)) diff --git a/TimeSformer/timesformer/utils/lr_policy.py b/TimeSformer/timesformer/utils/lr_policy.py new file mode 100755 index 0000000000000000000000000000000000000000..9e2393be89b5b5c8e34ffa3494f3398f833ec50c --- /dev/null +++ b/TimeSformer/timesformer/utils/lr_policy.py @@ -0,0 +1,87 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Learning rate policy.""" + +import math + + +def get_lr_at_epoch(cfg, cur_epoch): + """ + Retrieve the learning rate of the current epoch with the option to perform + warm up in the beginning of the training stage. + Args: + cfg (CfgNode): configs. Details can be found in + slowfast/config/defaults.py + cur_epoch (float): the number of epoch of the current training stage. + """ + lr = get_lr_func(cfg.SOLVER.LR_POLICY)(cfg, cur_epoch) + # Perform warm up. + if cur_epoch < cfg.SOLVER.WARMUP_EPOCHS: + lr_start = cfg.SOLVER.WARMUP_START_LR + lr_end = get_lr_func(cfg.SOLVER.LR_POLICY)( + cfg, cfg.SOLVER.WARMUP_EPOCHS + ) + alpha = (lr_end - lr_start) / cfg.SOLVER.WARMUP_EPOCHS + lr = cur_epoch * alpha + lr_start + return lr + + +def lr_func_cosine(cfg, cur_epoch): + """ + Retrieve the learning rate to specified values at specified epoch with the + cosine learning rate schedule. Details can be found in: + Ilya Loshchilov, and Frank Hutter + SGDR: Stochastic Gradient Descent With Warm Restarts. + Args: + cfg (CfgNode): configs. Details can be found in + slowfast/config/defaults.py + cur_epoch (float): the number of epoch of the current training stage. + """ + assert cfg.SOLVER.COSINE_END_LR < cfg.SOLVER.BASE_LR + return ( + cfg.SOLVER.COSINE_END_LR + + (cfg.SOLVER.BASE_LR - cfg.SOLVER.COSINE_END_LR) + * (math.cos(math.pi * cur_epoch / cfg.SOLVER.MAX_EPOCH) + 1.0) + * 0.5 + ) + + +def lr_func_steps_with_relative_lrs(cfg, cur_epoch): + """ + Retrieve the learning rate to specified values at specified epoch with the + steps with relative learning rate schedule. + Args: + cfg (CfgNode): configs. Details can be found in + slowfast/config/defaults.py + cur_epoch (float): the number of epoch of the current training stage. + """ + ind = get_step_index(cfg, cur_epoch) + return cfg.SOLVER.LRS[ind] * cfg.SOLVER.BASE_LR + + +def get_step_index(cfg, cur_epoch): + """ + Retrieves the lr step index for the given epoch. + Args: + cfg (CfgNode): configs. Details can be found in + slowfast/config/defaults.py + cur_epoch (float): the number of epoch of the current training stage. + """ + steps = cfg.SOLVER.STEPS + [cfg.SOLVER.MAX_EPOCH] + for ind, step in enumerate(steps): # NoQA + if cur_epoch < step: + break + return ind - 1 + + +def get_lr_func(lr_policy): + """ + Given the configs, retrieve the specified lr policy function. + Args: + lr_policy (string): the learning rate policy to use for the job. + """ + policy = "lr_func_" + lr_policy + if policy not in globals(): + raise NotImplementedError("Unknown LR policy: {}".format(lr_policy)) + else: + return globals()[policy] diff --git a/TimeSformer/timesformer/utils/meters.py b/TimeSformer/timesformer/utils/meters.py new file mode 100755 index 0000000000000000000000000000000000000000..5b91ad300855cea2fca55c78517f58f969df4c3b --- /dev/null +++ b/TimeSformer/timesformer/utils/meters.py @@ -0,0 +1,596 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Meters.""" + +import datetime +import numpy as np +import os +from collections import defaultdict, deque +import torch +from fvcore.common.timer import Timer +from sklearn.metrics import average_precision_score + +import timesformer.utils.logging as logging +import timesformer.utils.metrics as metrics +import timesformer.utils.misc as misc + +logger = logging.get_logger(__name__) + + +class TestMeter(object): + """ + Perform the multi-view ensemble for testing: each video with an unique index + will be sampled with multiple clips, and the predictions of the clips will + be aggregated to produce the final prediction for the video. + The accuracy is calculated with the given ground truth labels. + """ + + def __init__( + self, + num_videos, + num_clips, + num_cls, + overall_iters, + multi_label=False, + ensemble_method="sum", + ): + """ + Construct tensors to store the predictions and labels. Expect to get + num_clips predictions from each video, and calculate the metrics on + num_videos videos. + Args: + num_videos (int): number of videos to test. + num_clips (int): number of clips sampled from each video for + aggregating the final prediction for the video. + num_cls (int): number of classes for each prediction. + overall_iters (int): overall iterations for testing. + multi_label (bool): if True, use map as the metric. + ensemble_method (str): method to perform the ensemble, options + include "sum", and "max". + """ + + self.iter_timer = Timer() + self.data_timer = Timer() + self.net_timer = Timer() + self.num_clips = num_clips + self.overall_iters = overall_iters + self.multi_label = multi_label + self.ensemble_method = ensemble_method + # Initialize tensors. + self.video_preds = torch.zeros((num_videos, num_cls)) + if multi_label: + self.video_preds -= 1e10 + + self.video_labels = ( + torch.zeros((num_videos, num_cls)) + if multi_label + else torch.zeros((num_videos)).long() + ) + self.clip_count = torch.zeros((num_videos)).long() + self.topk_accs = [] + self.stats = {} + + # Reset metric. + self.reset() + + def reset(self): + """ + Reset the metric. + """ + self.clip_count.zero_() + self.video_preds.zero_() + if self.multi_label: + self.video_preds -= 1e10 + self.video_labels.zero_() + + def update_stats(self, preds, labels, clip_ids): + """ + Collect the predictions from the current batch and perform on-the-flight + summation as ensemble. + Args: + preds (tensor): predictions from the current batch. Dimension is + N x C where N is the batch size and C is the channel size + (num_cls). + labels (tensor): the corresponding labels of the current batch. + Dimension is N. + clip_ids (tensor): clip indexes of the current batch, dimension is + N. + """ + for ind in range(preds.shape[0]): + vid_id = int(clip_ids[ind]) // self.num_clips + if self.video_labels[vid_id].sum() > 0: + assert torch.equal( + self.video_labels[vid_id].type(torch.FloatTensor), + labels[ind].type(torch.FloatTensor), + ) + self.video_labels[vid_id] = labels[ind] + if self.ensemble_method == "sum": + self.video_preds[vid_id] += preds[ind] + elif self.ensemble_method == "max": + self.video_preds[vid_id] = torch.max( + self.video_preds[vid_id], preds[ind] + ) + else: + raise NotImplementedError( + "Ensemble Method {} is not supported".format( + self.ensemble_method + ) + ) + self.clip_count[vid_id] += 1 + + def log_iter_stats(self, cur_iter): + """ + Log the stats. + Args: + cur_iter (int): the current iteration of testing. + """ + eta_sec = self.iter_timer.seconds() * (self.overall_iters - cur_iter) + eta = str(datetime.timedelta(seconds=int(eta_sec))) + stats = { + "split": "test_iter", + "cur_iter": "{}".format(cur_iter + 1), + "eta": eta, + "time_diff": self.iter_timer.seconds(), + } + logging.log_json_stats(stats) + + def iter_tic(self): + """ + Start to record time. + """ + self.iter_timer.reset() + self.data_timer.reset() + + def iter_toc(self): + """ + Stop to record time. + """ + self.iter_timer.pause() + self.net_timer.pause() + + def data_toc(self): + self.data_timer.pause() + self.net_timer.reset() + + def finalize_metrics(self, ks=(1, 5)): + """ + Calculate and log the final ensembled metrics. + ks (tuple): list of top-k values for topk_accuracies. For example, + ks = (1, 5) correspods to top-1 and top-5 accuracy. + """ + if not all(self.clip_count == self.num_clips): + logger.warning( + "clip count {} ~= num clips {}".format( + ", ".join( + [ + "{}: {}".format(i, k) + for i, k in enumerate(self.clip_count.tolist()) + ] + ), + self.num_clips, + ) + ) + + self.stats = {"split": "test_final"} + if self.multi_label: + map = get_map( + self.video_preds.cpu().numpy(), self.video_labels.cpu().numpy() + ) + self.stats["map"] = map + else: + num_topks_correct = metrics.topks_correct( + self.video_preds, self.video_labels, ks + ) + topks = [ + (x / self.video_preds.size(0)) * 100.0 + for x in num_topks_correct + ] + + assert len({len(ks), len(topks)}) == 1 + for k, topk in zip(ks, topks): + self.stats["top{}_acc".format(k)] = "{:.{prec}f}".format( + topk, prec=2 + ) + logging.log_json_stats(self.stats) + + +class ScalarMeter(object): + """ + A scalar meter uses a deque to track a series of scaler values with a given + window size. It supports calculating the median and average values of the + window, and also supports calculating the global average. + """ + + def __init__(self, window_size): + """ + Args: + window_size (int): size of the max length of the deque. + """ + self.deque = deque(maxlen=window_size) + self.total = 0.0 + self.count = 0 + + def reset(self): + """ + Reset the deque. + """ + self.deque.clear() + self.total = 0.0 + self.count = 0 + + def add_value(self, value): + """ + Add a new scalar value to the deque. + """ + self.deque.append(value) + self.count += 1 + self.total += value + + def get_win_median(self): + """ + Calculate the current median value of the deque. + """ + return np.median(self.deque) + + def get_win_avg(self): + """ + Calculate the current average value of the deque. + """ + return np.mean(self.deque) + + def get_global_avg(self): + """ + Calculate the global mean value. + """ + return self.total / self.count + + +class TrainMeter(object): + """ + Measure training stats. + """ + + def __init__(self, epoch_iters, cfg): + """ + Args: + epoch_iters (int): the overall number of iterations of one epoch. + cfg (CfgNode): configs. + """ + self._cfg = cfg + self.epoch_iters = epoch_iters + self.MAX_EPOCH = cfg.SOLVER.MAX_EPOCH * epoch_iters + self.iter_timer = Timer() + self.data_timer = Timer() + self.net_timer = Timer() + self.loss = ScalarMeter(cfg.LOG_PERIOD) + self.loss_total = 0.0 + self.lr = None + # Current minibatch errors (smoothed over a window). + self.mb_top1_err = ScalarMeter(cfg.LOG_PERIOD) + self.mb_top5_err = ScalarMeter(cfg.LOG_PERIOD) + # Number of misclassified examples. + self.num_top1_mis = 0 + self.num_top5_mis = 0 + self.num_samples = 0 + self.output_dir = cfg.OUTPUT_DIR + self.extra_stats = {} + self.extra_stats_total = {} + self.log_period = cfg.LOG_PERIOD + + def reset(self): + """ + Reset the Meter. + """ + self.loss.reset() + self.loss_total = 0.0 + self.lr = None + self.mb_top1_err.reset() + self.mb_top5_err.reset() + self.num_top1_mis = 0 + self.num_top5_mis = 0 + self.num_samples = 0 + + for key in self.extra_stats.keys(): + self.extra_stats[key].reset() + self.extra_stats_total[key] = 0.0 + + def iter_tic(self): + """ + Start to record time. + """ + self.iter_timer.reset() + self.data_timer.reset() + + def iter_toc(self): + """ + Stop to record time. + """ + self.iter_timer.pause() + self.net_timer.pause() + + def data_toc(self): + self.data_timer.pause() + self.net_timer.reset() + + def update_stats(self, top1_err, top5_err, loss, lr, mb_size, stats={}): + """ + Update the current stats. + Args: + top1_err (float): top1 error rate. + top5_err (float): top5 error rate. + loss (float): loss value. + lr (float): learning rate. + mb_size (int): mini batch size. + """ + self.loss.add_value(loss) + self.lr = lr + self.loss_total += loss * mb_size + self.num_samples += mb_size + + if not self._cfg.DATA.MULTI_LABEL: + # Current minibatch stats + self.mb_top1_err.add_value(top1_err) + self.mb_top5_err.add_value(top5_err) + # Aggregate stats + self.num_top1_mis += top1_err * mb_size + self.num_top5_mis += top5_err * mb_size + + for key in stats.keys(): + if key not in self.extra_stats: + self.extra_stats[key] = ScalarMeter(self.log_period) + self.extra_stats_total[key] = 0.0 + self.extra_stats[key].add_value(stats[key]) + self.extra_stats_total[key] += stats[key] * mb_size + + def log_iter_stats(self, cur_epoch, cur_iter): + """ + log the stats of the current iteration. + Args: + cur_epoch (int): the number of current epoch. + cur_iter (int): the number of current iteration. + """ + if (cur_iter + 1) % self._cfg.LOG_PERIOD != 0: + return + eta_sec = self.iter_timer.seconds() * ( + self.MAX_EPOCH - (cur_epoch * self.epoch_iters + cur_iter + 1) + ) + eta = str(datetime.timedelta(seconds=int(eta_sec))) + stats = { + "_type": "train_iter", + "epoch": "{}/{}".format(cur_epoch + 1, self._cfg.SOLVER.MAX_EPOCH), + "iter": "{}/{}".format(cur_iter + 1, self.epoch_iters), + "dt": self.iter_timer.seconds(), + "dt_data": self.data_timer.seconds(), + "dt_net": self.net_timer.seconds(), + "eta": eta, + "loss": self.loss.get_win_median(), + "lr": self.lr, + "gpu_mem": "{:.2f}G".format(misc.gpu_mem_usage()), + } + if not self._cfg.DATA.MULTI_LABEL: + stats["top1_err"] = self.mb_top1_err.get_win_median() + stats["top5_err"] = self.mb_top5_err.get_win_median() + for key in self.extra_stats.keys(): + stats[key] = self.extra_stats_total[key] / self.num_samples + logging.log_json_stats(stats) + + def log_epoch_stats(self, cur_epoch): + """ + Log the stats of the current epoch. + Args: + cur_epoch (int): the number of current epoch. + """ + eta_sec = self.iter_timer.seconds() * ( + self.MAX_EPOCH - (cur_epoch + 1) * self.epoch_iters + ) + eta = str(datetime.timedelta(seconds=int(eta_sec))) + stats = { + "_type": "train_epoch", + "epoch": "{}/{}".format(cur_epoch + 1, self._cfg.SOLVER.MAX_EPOCH), + "dt": self.iter_timer.seconds(), + "dt_data": self.data_timer.seconds(), + "dt_net": self.net_timer.seconds(), + "eta": eta, + "lr": self.lr, + "gpu_mem": "{:.2f}G".format(misc.gpu_mem_usage()), + "RAM": "{:.2f}/{:.2f}G".format(*misc.cpu_mem_usage()), + } + if not self._cfg.DATA.MULTI_LABEL: + top1_err = self.num_top1_mis / self.num_samples + top5_err = self.num_top5_mis / self.num_samples + avg_loss = self.loss_total / self.num_samples + stats["top1_err"] = top1_err + stats["top5_err"] = top5_err + stats["loss"] = avg_loss + for key in self.extra_stats.keys(): + stats[key] = self.extra_stats_total[key] / self.num_samples + logging.log_json_stats(stats) + + +class ValMeter(object): + """ + Measures validation stats. + """ + + def __init__(self, max_iter, cfg): + """ + Args: + max_iter (int): the max number of iteration of the current epoch. + cfg (CfgNode): configs. + """ + self._cfg = cfg + self.max_iter = max_iter + self.iter_timer = Timer() + self.data_timer = Timer() + self.net_timer = Timer() + # Current minibatch errors (smoothed over a window). + self.mb_top1_err = ScalarMeter(cfg.LOG_PERIOD) + self.mb_top5_err = ScalarMeter(cfg.LOG_PERIOD) + # Min errors (over the full val set). + self.min_top1_err = 100.0 + self.min_top5_err = 100.0 + # Number of misclassified examples. + self.num_top1_mis = 0 + self.num_top5_mis = 0 + self.num_samples = 0 + self.all_preds = [] + self.all_labels = [] + self.output_dir = cfg.OUTPUT_DIR + self.extra_stats = {} + self.extra_stats_total = {} + self.log_period = cfg.LOG_PERIOD + + def reset(self): + """ + Reset the Meter. + """ + self.iter_timer.reset() + self.mb_top1_err.reset() + self.mb_top5_err.reset() + self.num_top1_mis = 0 + self.num_top5_mis = 0 + self.num_samples = 0 + self.all_preds = [] + self.all_labels = [] + + for key in self.extra_stats.keys(): + self.extra_stats[key].reset() + self.extra_stats_total[key] = 0.0 + + def iter_tic(self): + """ + Start to record time. + """ + self.iter_timer.reset() + self.data_timer.reset() + + def iter_toc(self): + """ + Stop to record time. + """ + self.iter_timer.pause() + self.net_timer.pause() + + def data_toc(self): + self.data_timer.pause() + self.net_timer.reset() + + def update_stats(self, top1_err, top5_err, mb_size, stats={}): + """ + Update the current stats. + Args: + top1_err (float): top1 error rate. + top5_err (float): top5 error rate. + mb_size (int): mini batch size. + """ + self.mb_top1_err.add_value(top1_err) + self.mb_top5_err.add_value(top5_err) + self.num_top1_mis += top1_err * mb_size + self.num_top5_mis += top5_err * mb_size + self.num_samples += mb_size + + for key in stats.keys(): + if key not in self.extra_stats: + self.extra_stats[key] = ScalarMeter(self.log_period) + self.extra_stats_total[key] = 0.0 + self.extra_stats[key].add_value(stats[key]) + self.extra_stats_total[key] += stats[key] * mb_size + + + def update_predictions(self, preds, labels): + """ + Update predictions and labels. + Args: + preds (tensor): model output predictions. + labels (tensor): labels. + """ + # TODO: merge update_prediction with update_stats. + self.all_preds.append(preds) + self.all_labels.append(labels) + + def log_iter_stats(self, cur_epoch, cur_iter): + """ + log the stats of the current iteration. + Args: + cur_epoch (int): the number of current epoch. + cur_iter (int): the number of current iteration. + """ + if (cur_iter + 1) % self._cfg.LOG_PERIOD != 0: + return + eta_sec = self.iter_timer.seconds() * (self.max_iter - cur_iter - 1) + eta = str(datetime.timedelta(seconds=int(eta_sec))) + stats = { + "_type": "val_iter", + "epoch": "{}/{}".format(cur_epoch + 1, self._cfg.SOLVER.MAX_EPOCH), + "iter": "{}/{}".format(cur_iter + 1, self.max_iter), + "time_diff": self.iter_timer.seconds(), + "eta": eta, + "gpu_mem": "{:.2f}G".format(misc.gpu_mem_usage()), + } + if not self._cfg.DATA.MULTI_LABEL: + stats["top1_err"] = self.mb_top1_err.get_win_median() + stats["top5_err"] = self.mb_top5_err.get_win_median() + for key in self.extra_stats.keys(): + stats[key] = self.extra_stats[key].get_win_median() + logging.log_json_stats(stats) + + def log_epoch_stats(self, cur_epoch): + """ + Log the stats of the current epoch. + Args: + cur_epoch (int): the number of current epoch. + """ + stats = { + "_type": "val_epoch", + "epoch": "{}/{}".format(cur_epoch + 1, self._cfg.SOLVER.MAX_EPOCH), + "time_diff": self.iter_timer.seconds(), + "gpu_mem": "{:.2f}G".format(misc.gpu_mem_usage()), + "RAM": "{:.2f}/{:.2f}G".format(*misc.cpu_mem_usage()), + } + if self._cfg.DATA.MULTI_LABEL: + stats["map"] = get_map( + torch.cat(self.all_preds).cpu().numpy(), + torch.cat(self.all_labels).cpu().numpy(), + ) + else: + top1_err = self.num_top1_mis / self.num_samples + top5_err = self.num_top5_mis / self.num_samples + self.min_top1_err = min(self.min_top1_err, top1_err) + self.min_top5_err = min(self.min_top5_err, top5_err) + + stats["top1_err"] = top1_err + stats["top5_err"] = top5_err + stats["min_top1_err"] = self.min_top1_err + stats["min_top5_err"] = self.min_top5_err + + for key in self.extra_stats.keys(): + stats[key] = self.extra_stats_total[key] / self.num_samples + + logging.log_json_stats(stats) + + +def get_map(preds, labels): + """ + Compute mAP for multi-label case. + Args: + preds (numpy tensor): num_examples x num_classes. + labels (numpy tensor): num_examples x num_classes. + Returns: + mean_ap (int): final mAP score. + """ + + logger.info("Getting mAP for {} examples".format(preds.shape[0])) + + preds = preds[:, ~(np.all(labels == 0, axis=0))] + labels = labels[:, ~(np.all(labels == 0, axis=0))] + aps = [0] + try: + aps = average_precision_score(labels, preds, average=None) + except ValueError: + print( + "Average precision requires a sufficient number of samples \ + in a batch which are missing in this sample." + ) + + mean_ap = np.mean(aps) + return mean_ap diff --git a/TimeSformer/timesformer/utils/metrics.py b/TimeSformer/timesformer/utils/metrics.py new file mode 100755 index 0000000000000000000000000000000000000000..ce1451bbb4afaa5b8c7074fd3553742f2a717b80 --- /dev/null +++ b/TimeSformer/timesformer/utils/metrics.py @@ -0,0 +1,94 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Functions for computing metrics.""" + +import torch +import numpy as np + +def topks_correct(preds, labels, ks): + """ + Given the predictions, labels, and a list of top-k values, compute the + number of correct predictions for each top-k value. + + Args: + preds (array): array of predictions. Dimension is batchsize + N x ClassNum. + labels (array): array of labels. Dimension is batchsize N. + ks (list): list of top-k values. For example, ks = [1, 5] correspods + to top-1 and top-5. + + Returns: + topks_correct (list): list of numbers, where the `i`-th entry + corresponds to the number of top-`ks[i]` correct predictions. + """ + assert preds.size(0) == labels.size( + 0 + ), "Batch dim of predictions and labels must match" + # Find the top max_k predictions for each sample + _top_max_k_vals, top_max_k_inds = torch.topk( + preds, max(ks), dim=1, largest=True, sorted=True + ) + # (batch_size, max_k) -> (max_k, batch_size). + top_max_k_inds = top_max_k_inds.t() + # (batch_size, ) -> (max_k, batch_size). + rep_max_k_labels = labels.view(1, -1).expand_as(top_max_k_inds) + # (i, j) = 1 if top i-th prediction for the j-th sample is correct. + top_max_k_correct = top_max_k_inds.eq(rep_max_k_labels) + # Compute the number of topk correct predictions for each k. + topks_correct = [top_max_k_correct[:k, :].float().sum() for k in ks] + return topks_correct + + +def topk_errors(preds, labels, ks): + """ + Computes the top-k error for each k. + Args: + preds (array): array of predictions. Dimension is N. + labels (array): array of labels. Dimension is N. + ks (list): list of ks to calculate the top accuracies. + """ + num_topks_correct = topks_correct(preds, labels, ks) + return [(1.0 - x / preds.size(0)) * 100.0 for x in num_topks_correct] + + +def topk_accuracies(preds, labels, ks): + """ + Computes the top-k accuracy for each k. + Args: + preds (array): array of predictions. Dimension is N. + labels (array): array of labels. Dimension is N. + ks (list): list of ks to calculate the top accuracies. + """ + num_topks_correct = topks_correct(preds, labels, ks) + return [(x / preds.size(0)) * 100.0 for x in num_topks_correct] + +def multitask_topks_correct(preds, labels, ks=(1,)): + """ + Args: + preds: tuple(torch.FloatTensor), each tensor should be of shape + [batch_size, class_count], class_count can vary on a per task basis, i.e. + outputs[i].shape[1] can be different to outputs[j].shape[j]. + labels: tuple(torch.LongTensor), each tensor should be of shape [batch_size] + ks: tuple(int), compute accuracy at top-k for the values of k specified + in this parameter. + Returns: + tuple(float), same length at topk with the corresponding accuracy@k in. + """ + max_k = int(np.max(ks)) + task_count = len(preds) + batch_size = labels[0].size(0) + all_correct = torch.zeros(max_k, batch_size).type(torch.ByteTensor) + if torch.cuda.is_available(): + all_correct = all_correct.cuda() + for output, label in zip(preds, labels): + _, max_k_idx = output.topk(max_k, dim=1, largest=True, sorted=True) + # Flip batch_size, class_count as .view doesn't work on non-contiguous + max_k_idx = max_k_idx.t() + correct_for_task = max_k_idx.eq(label.view(1, -1).expand_as(max_k_idx)) + all_correct.add_(correct_for_task) + + multitask_topks_correct = [ + torch.ge(all_correct[:k].float().sum(0), task_count).float().sum(0) for k in ks + ] + + return multitask_topks_correct diff --git a/TimeSformer/timesformer/utils/misc.py b/TimeSformer/timesformer/utils/misc.py new file mode 100755 index 0000000000000000000000000000000000000000..abdd6677d0528166773b33876dff8f56d275542f --- /dev/null +++ b/TimeSformer/timesformer/utils/misc.py @@ -0,0 +1,372 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import json +import logging +import math +import numpy as np +import os +from datetime import datetime +import psutil +import torch +from fvcore.common.file_io import PathManager +from fvcore.nn.activation_count import activation_count +from fvcore.nn.flop_count import flop_count +from matplotlib import pyplot as plt +from torch import nn + +import timesformer.utils.logging as logging +import timesformer.utils.multiprocessing as mpu +from timesformer.datasets.utils import pack_pathway_output +from timesformer.models.batchnorm_helper import SubBatchNorm3d + +logger = logging.get_logger(__name__) + + +def check_nan_losses(loss): + """ + Determine whether the loss is NaN (not a number). + Args: + loss (loss): loss to check whether is NaN. + """ + if math.isnan(loss): + raise RuntimeError("ERROR: Got NaN losses {}".format(datetime.now())) + + +def params_count(model, ignore_bn=False): + """ + Compute the number of parameters. + Args: + model (model): model to count the number of parameters. + """ + if not ignore_bn: + return np.sum([p.numel() for p in model.parameters()]).item() + else: + count = 0 + for m in model.modules(): + if not isinstance(m, nn.BatchNorm3d): + for p in m.parameters(recurse=False): + count += p.numel() + return count + + +def gpu_mem_usage(): + """ + Compute the GPU memory usage for the current device (GB). + """ + if torch.cuda.is_available(): + mem_usage_bytes = torch.cuda.max_memory_allocated() + else: + mem_usage_bytes = 0 + return mem_usage_bytes / 1024 ** 3 + + +def cpu_mem_usage(): + """ + Compute the system memory (RAM) usage for the current device (GB). + Returns: + usage (float): used memory (GB). + total (float): total memory (GB). + """ + vram = psutil.virtual_memory() + usage = (vram.total - vram.available) / 1024 ** 3 + total = vram.total / 1024 ** 3 + + return usage, total + + +def _get_model_analysis_input(cfg, use_train_input): + """ + Return a dummy input for model analysis with batch size 1. The input is + used for analyzing the model (counting flops and activations etc.). + Args: + cfg (CfgNode): configs. Details can be found in + lib/config/defaults.py + use_train_input (bool): if True, return the input for training. Otherwise, + return the input for testing. + + Returns: + inputs: the input for model analysis. + """ + rgb_dimension = 3 + if use_train_input: + input_tensors = torch.rand( + rgb_dimension, + cfg.DATA.NUM_FRAMES, + cfg.DATA.TRAIN_CROP_SIZE, + cfg.DATA.TRAIN_CROP_SIZE, + ) + else: + input_tensors = torch.rand( + rgb_dimension, + cfg.DATA.NUM_FRAMES, + cfg.DATA.TEST_CROP_SIZE, + cfg.DATA.TEST_CROP_SIZE, + ) + if not cfg.MODEL.ARCH in ['resformer', 'vit']: + model_inputs = pack_pathway_output(cfg, input_tensors) + for i in range(len(model_inputs)): + model_inputs[i] = model_inputs[i].unsqueeze(0) + if cfg.NUM_GPUS: + model_inputs[i] = model_inputs[i].cuda(non_blocking=True) + + else: + model_inputs = input_tensors.cuda(non_blocking=True).unsqueeze(0) + + # If detection is enabled, count flops for one proposal. + if cfg.DETECTION.ENABLE: + bbox = torch.tensor([[0, 0, 1.0, 0, 1.0]]) + if cfg.NUM_GPUS: + bbox = bbox.cuda() + inputs = (model_inputs, bbox) + else: + inputs = (model_inputs,) + return inputs + + +def get_model_stats(model, cfg, mode, use_train_input): + """ + Compute statistics for the current model given the config. + Args: + model (model): model to perform analysis. + cfg (CfgNode): configs. Details can be found in + lib/config/defaults.py + mode (str): Options include `flop` or `activation`. Compute either flop + (gflops) or activation count (mega). + use_train_input (bool): if True, compute statistics for training. Otherwise, + compute statistics for testing. + + Returns: + float: the total number of count of the given model. + """ + assert mode in [ + "flop", + "activation", + ], "'{}' not supported for model analysis".format(mode) + if mode == "flop": + model_stats_fun = flop_count + elif mode == "activation": + model_stats_fun = activation_count + + # Set model to evaluation mode for analysis. + # Evaluation mode can avoid getting stuck with sync batchnorm. + model_mode = model.training + model.eval() + inputs = _get_model_analysis_input(cfg, use_train_input) + count_dict, *_ = model_stats_fun(model, inputs) + count = sum(count_dict.values()) + model.train(model_mode) + return count + + +def log_model_info(model, cfg, use_train_input=True): + """ + Log info, includes number of parameters, gpu usage, gflops and activation count. + The model info is computed when the model is in validation mode. + Args: + model (model): model to log the info. + cfg (CfgNode): configs. Details can be found in + lib/config/defaults.py + use_train_input (bool): if True, log info for training. Otherwise, + log info for testing. + """ + logger.info("Model:\n{}".format(model)) + logger.info("Params: {:,}".format(params_count(model))) + logger.info("Mem: {:,} MB".format(gpu_mem_usage())) + logger.info( + "Flops: {:,} G".format( + get_model_stats(model, cfg, "flop", use_train_input) + ) + ) + logger.info( + "Activations: {:,} M".format( + get_model_stats(model, cfg, "activation", use_train_input) + ) + ) + logger.info("nvidia-smi") + os.system("nvidia-smi") + + +def is_eval_epoch(cfg, cur_epoch, multigrid_schedule): + """ + Determine if the model should be evaluated at the current epoch. + Args: + cfg (CfgNode): configs. Details can be found in + lib/config/defaults.py + cur_epoch (int): current epoch. + multigrid_schedule (List): schedule for multigrid training. + """ + if cur_epoch + 1 == cfg.SOLVER.MAX_EPOCH: + return True + if multigrid_schedule is not None: + prev_epoch = 0 + for s in multigrid_schedule: + if cur_epoch < s[-1]: + period = max( + (s[-1] - prev_epoch) // cfg.MULTIGRID.EVAL_FREQ + 1, 1 + ) + return (s[-1] - 1 - cur_epoch) % period == 0 + prev_epoch = s[-1] + + return (cur_epoch + 1) % cfg.TRAIN.EVAL_PERIOD == 0 + + +def plot_input(tensor, bboxes=(), texts=(), path="./tmp_vis.png"): + """ + Plot the input tensor with the optional bounding box and save it to disk. + Args: + tensor (tensor): a tensor with shape of `NxCxHxW`. + bboxes (tuple): bounding boxes with format of [[x, y, h, w]]. + texts (tuple): a tuple of string to plot. + path (str): path to the image to save to. + """ + tensor = tensor.float() + tensor = tensor - tensor.min() + tensor = tensor / tensor.max() + f, ax = plt.subplots(nrows=1, ncols=tensor.shape[0], figsize=(50, 20)) + for i in range(tensor.shape[0]): + ax[i].axis("off") + ax[i].imshow(tensor[i].permute(1, 2, 0)) + # ax[1][0].axis('off') + if bboxes is not None and len(bboxes) > i: + for box in bboxes[i]: + x1, y1, x2, y2 = box + ax[i].vlines(x1, y1, y2, colors="g", linestyles="solid") + ax[i].vlines(x2, y1, y2, colors="g", linestyles="solid") + ax[i].hlines(y1, x1, x2, colors="g", linestyles="solid") + ax[i].hlines(y2, x1, x2, colors="g", linestyles="solid") + + if texts is not None and len(texts) > i: + ax[i].text(0, 0, texts[i]) + f.savefig(path) + + +def frozen_bn_stats(model): + """ + Set all the bn layers to eval mode. + Args: + model (model): model to set bn layers to eval mode. + """ + for m in model.modules(): + if isinstance(m, nn.BatchNorm3d): + m.eval() + + +def aggregate_sub_bn_stats(module): + """ + Recursively find all SubBN modules and aggregate sub-BN stats. + Args: + module (nn.Module) + Returns: + count (int): number of SubBN module found. + """ + count = 0 + for child in module.children(): + if isinstance(child, SubBatchNorm3d): + child.aggregate_stats() + count += 1 + else: + count += aggregate_sub_bn_stats(child) + return count + + +def launch_job(cfg, init_method, func, daemon=False): + """ + Run 'func' on one or more GPUs, specified in cfg + Args: + cfg (CfgNode): configs. Details can be found in + lib/config/defaults.py + init_method (str): initialization method to launch the job with multiple + devices. + func (function): job to run on GPU(s) + daemon (bool): The spawned processes’ daemon flag. If set to True, + daemonic processes will be created + """ + if cfg.NUM_GPUS > 1: + torch.multiprocessing.spawn( + mpu.run, + nprocs=cfg.NUM_GPUS, + args=( + cfg.NUM_GPUS, + func, + init_method, + cfg.SHARD_ID, + cfg.NUM_SHARDS, + cfg.DIST_BACKEND, + cfg, + ), + daemon=daemon, + ) + else: + func(cfg=cfg) + + +def get_class_names(path, parent_path=None, subset_path=None): + """ + Read json file with entries {classname: index} and return + an array of class names in order. + If parent_path is provided, load and map all children to their ids. + Args: + path (str): path to class ids json file. + File must be in the format {"class1": id1, "class2": id2, ...} + parent_path (Optional[str]): path to parent-child json file. + File must be in the format {"parent1": ["child1", "child2", ...], ...} + subset_path (Optional[str]): path to text file containing a subset + of class names, separated by newline characters. + Returns: + class_names (list of strs): list of class names. + class_parents (dict): a dictionary where key is the name of the parent class + and value is a list of ids of the children classes. + subset_ids (list of ints): list of ids of the classes provided in the + subset file. + """ + try: + with PathManager.open(path, "r") as f: + class2idx = json.load(f) + except Exception as err: + print("Fail to load file from {} with error {}".format(path, err)) + return + + max_key = max(class2idx.values()) + class_names = [None] * (max_key + 1) + + for k, i in class2idx.items(): + class_names[i] = k + + class_parent = None + if parent_path is not None and parent_path != "": + try: + with PathManager.open(parent_path, "r") as f: + d_parent = json.load(f) + except EnvironmentError as err: + print( + "Fail to load file from {} with error {}".format( + parent_path, err + ) + ) + return + class_parent = {} + for parent, children in d_parent.items(): + indices = [ + class2idx[c] for c in children if class2idx.get(c) is not None + ] + class_parent[parent] = indices + + subset_ids = None + if subset_path is not None and subset_path != "": + try: + with PathManager.open(subset_path, "r") as f: + subset = f.read().split("\n") + subset_ids = [ + class2idx[name] + for name in subset + if class2idx.get(name) is not None + ] + except EnvironmentError as err: + print( + "Fail to load file from {} with error {}".format( + subset_path, err + ) + ) + return + + return class_names, class_parent, subset_ids diff --git a/TimeSformer/timesformer/utils/multigrid.py b/TimeSformer/timesformer/utils/multigrid.py new file mode 100755 index 0000000000000000000000000000000000000000..d96c6d268919afdf038748cbc3823959ff6f90fe --- /dev/null +++ b/TimeSformer/timesformer/utils/multigrid.py @@ -0,0 +1,239 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Helper functions for multigrid training.""" + +import numpy as np + +import timesformer.utils.logging as logging + +logger = logging.get_logger(__name__) + + +class MultigridSchedule(object): + """ + This class defines multigrid training schedule and update cfg accordingly. + """ + + def init_multigrid(self, cfg): + """ + Update cfg based on multigrid settings. + Args: + cfg (configs): configs that contains training and multigrid specific + hyperparameters. Details can be seen in + slowfast/config/defaults.py. + Returns: + cfg (configs): the updated cfg. + """ + self.schedule = None + # We may modify cfg.TRAIN.BATCH_SIZE, cfg.DATA.NUM_FRAMES, and + # cfg.DATA.TRAIN_CROP_SIZE during training, so we store their original + # value in cfg and use them as global variables. + cfg.MULTIGRID.DEFAULT_B = cfg.TRAIN.BATCH_SIZE + cfg.MULTIGRID.DEFAULT_T = cfg.DATA.NUM_FRAMES + cfg.MULTIGRID.DEFAULT_S = cfg.DATA.TRAIN_CROP_SIZE + + if cfg.MULTIGRID.LONG_CYCLE: + self.schedule = self.get_long_cycle_schedule(cfg) + cfg.SOLVER.STEPS = [0] + [s[-1] for s in self.schedule] + # Fine-tuning phase. + cfg.SOLVER.STEPS[-1] = ( + cfg.SOLVER.STEPS[-2] + cfg.SOLVER.STEPS[-1] + ) // 2 + cfg.SOLVER.LRS = [ + cfg.SOLVER.GAMMA ** s[0] * s[1][0] for s in self.schedule + ] + # Fine-tuning phase. + cfg.SOLVER.LRS = cfg.SOLVER.LRS[:-1] + [ + cfg.SOLVER.LRS[-2], + cfg.SOLVER.LRS[-1], + ] + + cfg.SOLVER.MAX_EPOCH = self.schedule[-1][-1] + + elif cfg.MULTIGRID.SHORT_CYCLE: + cfg.SOLVER.STEPS = [ + int(s * cfg.MULTIGRID.EPOCH_FACTOR) for s in cfg.SOLVER.STEPS + ] + cfg.SOLVER.MAX_EPOCH = int( + cfg.SOLVER.MAX_EPOCH * cfg.MULTIGRID.EPOCH_FACTOR + ) + return cfg + + def update_long_cycle(self, cfg, cur_epoch): + """ + Before every epoch, check if long cycle shape should change. If it + should, update cfg accordingly. + Args: + cfg (configs): configs that contains training and multigrid specific + hyperparameters. Details can be seen in + slowfast/config/defaults.py. + cur_epoch (int): current epoch index. + Returns: + cfg (configs): the updated cfg. + changed (bool): do we change long cycle shape at this epoch? + """ + base_b, base_t, base_s = get_current_long_cycle_shape( + self.schedule, cur_epoch + ) + if base_s != cfg.DATA.TRAIN_CROP_SIZE or base_t != cfg.DATA.NUM_FRAMES: + + cfg.DATA.NUM_FRAMES = base_t + cfg.DATA.TRAIN_CROP_SIZE = base_s + cfg.TRAIN.BATCH_SIZE = base_b * cfg.MULTIGRID.DEFAULT_B + + bs_factor = ( + float(cfg.TRAIN.BATCH_SIZE / cfg.NUM_GPUS) + / cfg.MULTIGRID.BN_BASE_SIZE + ) + + if bs_factor < 1: + cfg.BN.NORM_TYPE = "sync_batchnorm" + cfg.BN.NUM_SYNC_DEVICES = int(1.0 / bs_factor) + elif bs_factor > 1: + cfg.BN.NORM_TYPE = "sub_batchnorm" + cfg.BN.NUM_SPLITS = int(bs_factor) + else: + cfg.BN.NORM_TYPE = "batchnorm" + + cfg.MULTIGRID.LONG_CYCLE_SAMPLING_RATE = cfg.DATA.SAMPLING_RATE * ( + cfg.MULTIGRID.DEFAULT_T // cfg.DATA.NUM_FRAMES + ) + logger.info("Long cycle updates:") + logger.info("\tBN.NORM_TYPE: {}".format(cfg.BN.NORM_TYPE)) + if cfg.BN.NORM_TYPE == "sync_batchnorm": + logger.info( + "\tBN.NUM_SYNC_DEVICES: {}".format(cfg.BN.NUM_SYNC_DEVICES) + ) + elif cfg.BN.NORM_TYPE == "sub_batchnorm": + logger.info("\tBN.NUM_SPLITS: {}".format(cfg.BN.NUM_SPLITS)) + logger.info("\tTRAIN.BATCH_SIZE: {}".format(cfg.TRAIN.BATCH_SIZE)) + logger.info( + "\tDATA.NUM_FRAMES x LONG_CYCLE_SAMPLING_RATE: {}x{}".format( + cfg.DATA.NUM_FRAMES, cfg.MULTIGRID.LONG_CYCLE_SAMPLING_RATE + ) + ) + logger.info( + "\tDATA.TRAIN_CROP_SIZE: {}".format(cfg.DATA.TRAIN_CROP_SIZE) + ) + return cfg, True + else: + return cfg, False + + def get_long_cycle_schedule(self, cfg): + """ + Based on multigrid hyperparameters, define the schedule of a long cycle. + Args: + cfg (configs): configs that contains training and multigrid specific + hyperparameters. Details can be seen in + slowfast/config/defaults.py. + Returns: + schedule (list): Specifies a list long cycle base shapes and their + corresponding training epochs. + """ + + steps = cfg.SOLVER.STEPS + + default_size = float( + cfg.DATA.NUM_FRAMES * cfg.DATA.TRAIN_CROP_SIZE ** 2 + ) + default_iters = steps[-1] + + # Get shapes and average batch size for each long cycle shape. + avg_bs = [] + all_shapes = [] + for t_factor, s_factor in cfg.MULTIGRID.LONG_CYCLE_FACTORS: + base_t = int(round(cfg.DATA.NUM_FRAMES * t_factor)) + base_s = int(round(cfg.DATA.TRAIN_CROP_SIZE * s_factor)) + if cfg.MULTIGRID.SHORT_CYCLE: + shapes = [ + [ + base_t, + cfg.MULTIGRID.DEFAULT_S + * cfg.MULTIGRID.SHORT_CYCLE_FACTORS[0], + ], + [ + base_t, + cfg.MULTIGRID.DEFAULT_S + * cfg.MULTIGRID.SHORT_CYCLE_FACTORS[1], + ], + [base_t, base_s], + ] + else: + shapes = [[base_t, base_s]] + + # (T, S) -> (B, T, S) + shapes = [ + [int(round(default_size / (s[0] * s[1] * s[1]))), s[0], s[1]] + for s in shapes + ] + avg_bs.append(np.mean([s[0] for s in shapes])) + all_shapes.append(shapes) + + # Get schedule regardless of cfg.MULTIGRID.EPOCH_FACTOR. + total_iters = 0 + schedule = [] + for step_index in range(len(steps) - 1): + step_epochs = steps[step_index + 1] - steps[step_index] + + for long_cycle_index, shapes in enumerate(all_shapes): + cur_epochs = ( + step_epochs * avg_bs[long_cycle_index] / sum(avg_bs) + ) + + cur_iters = cur_epochs / avg_bs[long_cycle_index] + total_iters += cur_iters + schedule.append((step_index, shapes[-1], cur_epochs)) + + iter_saving = default_iters / total_iters + + final_step_epochs = cfg.SOLVER.MAX_EPOCH - steps[-1] + + # We define the fine-tuning phase to have the same amount of iteration + # saving as the rest of the training. + ft_epochs = final_step_epochs / iter_saving * avg_bs[-1] + + schedule.append((step_index + 1, all_shapes[-1][2], ft_epochs)) + + # Obtrain final schedule given desired cfg.MULTIGRID.EPOCH_FACTOR. + x = ( + cfg.SOLVER.MAX_EPOCH + * cfg.MULTIGRID.EPOCH_FACTOR + / sum(s[-1] for s in schedule) + ) + + final_schedule = [] + total_epochs = 0 + for s in schedule: + epochs = s[2] * x + total_epochs += epochs + final_schedule.append((s[0], s[1], int(round(total_epochs)))) + print_schedule(final_schedule) + return final_schedule + + +def print_schedule(schedule): + """ + Log schedule. + """ + logger.info("Long cycle index\tBase shape\tEpochs") + for s in schedule: + logger.info("{}\t{}\t{}".format(s[0], s[1], s[2])) + + +def get_current_long_cycle_shape(schedule, epoch): + """ + Given a schedule and epoch index, return the long cycle base shape. + Args: + schedule (configs): configs that contains training and multigrid specific + hyperparameters. Details can be seen in + slowfast/config/defaults.py. + cur_epoch (int): current epoch index. + Returns: + shapes (list): A list describing the base shape in a long cycle: + [batch size relative to default, + number of frames, spatial dimension]. + """ + for s in schedule: + if epoch < s[-1]: + return s[1] + return schedule[-1][1] diff --git a/TimeSformer/timesformer/utils/multiprocessing.py b/TimeSformer/timesformer/utils/multiprocessing.py new file mode 100755 index 0000000000000000000000000000000000000000..d88f5044328a84427d441aef20c8a395fbdc0710 --- /dev/null +++ b/TimeSformer/timesformer/utils/multiprocessing.py @@ -0,0 +1,61 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Multiprocessing helpers.""" + +import torch + + +def run( + local_rank, + num_proc, + func, + init_method, + shard_id, + num_shards, + backend, + cfg, + output_queue=None, +): + """ + Runs a function from a child process. + Args: + local_rank (int): rank of the current process on the current machine. + num_proc (int): number of processes per machine. + func (function): function to execute on each of the process. + init_method (string): method to initialize the distributed training. + TCP initialization: equiring a network address reachable from all + processes followed by the port. + Shared file-system initialization: makes use of a file system that + is shared and visible from all machines. The URL should start with + file:// and contain a path to a non-existent file on a shared file + system. + shard_id (int): the rank of the current machine. + num_shards (int): number of overall machines for the distributed + training job. + backend (string): three distributed backends ('nccl', 'gloo', 'mpi') are + supports, each with different capabilities. Details can be found + here: + https://pytorch.org/docs/stable/distributed.html + cfg (CfgNode): configs. Details can be found in + slowfast/config/defaults.py + output_queue (queue): can optionally be used to return values from the + master process. + """ + # Initialize the process group. + world_size = num_proc * num_shards + rank = shard_id * num_proc + local_rank + + try: + torch.distributed.init_process_group( + backend=backend, + init_method=init_method, + world_size=world_size, + rank=rank, + ) + except Exception as e: + raise e + + torch.cuda.set_device(local_rank) + ret = func(cfg) + if output_queue is not None and local_rank == 0: + output_queue.put(ret) diff --git a/TimeSformer/timesformer/utils/parser.py b/TimeSformer/timesformer/utils/parser.py new file mode 100755 index 0000000000000000000000000000000000000000..cc6958d04bb6cc52254de9fbc2f4914ea01d82ea --- /dev/null +++ b/TimeSformer/timesformer/utils/parser.py @@ -0,0 +1,93 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Argument parser functions.""" + +import argparse +import sys + +import timesformer.utils.checkpoint as cu +from timesformer.config.defaults import get_cfg + + +def parse_args(): + """ + Parse the following arguments for a default parser for PySlowFast users. + Args: + shard_id (int): shard id for the current machine. Starts from 0 to + num_shards - 1. If single machine is used, then set shard id to 0. + num_shards (int): number of shards using by the job. + init_method (str): initialization method to launch the job with multiple + devices. Options includes TCP or shared file-system for + initialization. details can be find in + https://pytorch.org/docs/stable/distributed.html#tcp-initialization + cfg (str): path to the config file. + opts (argument): provide addtional options from the command line, it + overwrites the config loaded from file. + """ + parser = argparse.ArgumentParser( + description="Provide SlowFast video training and testing pipeline." + ) + parser.add_argument( + "--shard_id", + help="The shard id of current node, Starts from 0 to num_shards - 1", + default=0, + type=int, + ) + parser.add_argument( + "--num_shards", + help="Number of shards using by the job", + default=1, + type=int, + ) + parser.add_argument( + "--init_method", + help="Initialization method, includes TCP or shared file-system", + default="tcp://localhost:9999", + type=str, + ) + parser.add_argument( + "--cfg", + dest="cfg_file", + help="Path to the config file", + default="configs/Kinetics/SLOWFAST_4x16_R50.yaml", + type=str, + ) + parser.add_argument( + "opts", + help="See slowfast/config/defaults.py for all options", + default=None, + nargs=argparse.REMAINDER, + ) + if len(sys.argv) == 1: + parser.print_help() + return parser.parse_args() + + +def load_config(args): + """ + Given the arguemnts, load and initialize the configs. + Args: + args (argument): arguments includes `shard_id`, `num_shards`, + `init_method`, `cfg_file`, and `opts`. + """ + # Setup cfg. + cfg = get_cfg() + # Load config from cfg. + if args.cfg_file is not None: + cfg.merge_from_file(args.cfg_file) + # Load config from command line, overwrite config from opts. + if args.opts is not None: + cfg.merge_from_list(args.opts) + + # Inherit parameters from args. + if hasattr(args, "num_shards") and hasattr(args, "shard_id"): + cfg.NUM_SHARDS = args.num_shards + cfg.SHARD_ID = args.shard_id + if hasattr(args, "rng_seed"): + cfg.RNG_SEED = args.rng_seed + if hasattr(args, "output_dir"): + cfg.OUTPUT_DIR = args.output_dir + + # Create the checkpoint dir. + cu.make_checkpoint_dir(cfg.OUTPUT_DIR) + return cfg diff --git a/TimeSformer/timesformer/utils/weight_init_helper.py b/TimeSformer/timesformer/utils/weight_init_helper.py new file mode 100755 index 0000000000000000000000000000000000000000..e57275f54c310c92fb27a49458c69fba6048b87a --- /dev/null +++ b/TimeSformer/timesformer/utils/weight_init_helper.py @@ -0,0 +1,43 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Utility function for weight initialization""" + +import torch.nn as nn +from fvcore.nn.weight_init import c2_msra_fill + + +def init_weights(model, fc_init_std=0.01, zero_init_final_bn=True): + """ + Performs ResNet style weight initialization. + Args: + fc_init_std (float): the expected standard deviation for fc layer. + zero_init_final_bn (bool): if True, zero initialize the final bn for + every bottleneck. + """ + for m in model.modules(): + if isinstance(m, nn.Conv3d): + """ + Follow the initialization method proposed in: + {He, Kaiming, et al. + "Delving deep into rectifiers: Surpassing human-level + performance on imagenet classification." + arXiv preprint arXiv:1502.01852 (2015)} + """ + c2_msra_fill(m) + elif isinstance(m, nn.BatchNorm3d): + if ( + hasattr(m, "transform_final_bn") + and m.transform_final_bn + and zero_init_final_bn + ): + batchnorm_weight = 0.0 + else: + batchnorm_weight = 1.0 + if m.weight is not None: + m.weight.data.fill_(batchnorm_weight) + if m.bias is not None: + m.bias.data.zero_() + if isinstance(m, nn.Linear): + m.weight.data.normal_(mean=0.0, std=fc_init_std) + if m.bias is not None: + m.bias.data.zero_() diff --git a/TimeSformer/timesformer/visualization/__init__.py b/TimeSformer/timesformer/visualization/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..5c7f19c6c00a4ac3f2f2bc66f892e44bcbd72612 --- /dev/null +++ b/TimeSformer/timesformer/visualization/__init__.py @@ -0,0 +1 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. diff --git a/TimeSformer/timesformer/visualization/tensorboard_vis.py b/TimeSformer/timesformer/visualization/tensorboard_vis.py new file mode 100755 index 0000000000000000000000000000000000000000..5c1130c03715a04f2078c4810a2414cdb4f81c10 --- /dev/null +++ b/TimeSformer/timesformer/visualization/tensorboard_vis.py @@ -0,0 +1,428 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import logging as log +import math +import os +import matplotlib.pyplot as plt +import torch +from torch.utils.tensorboard import SummaryWriter +from torchvision.utils import make_grid + +import timesformer.utils.logging as logging +import timesformer.visualization.utils as vis_utils +from timesformer.utils.misc import get_class_names + +logger = logging.get_logger(__name__) +log.getLogger("matplotlib").setLevel(log.ERROR) + + +class TensorboardWriter(object): + """ + Helper class to log information to Tensorboard. + """ + + def __init__(self, cfg): + """ + Args: + cfg (CfgNode): configs. Details can be found in + slowfast/config/defaults.py + """ + # class_names: list of class names. + # cm_subset_classes: a list of class ids -- a user-specified subset. + # parent_map: dictionary where key is the parent class name and + # value is a list of ids of its children classes. + # hist_subset_classes: a list of class ids -- user-specified to plot histograms. + ( + self.class_names, + self.cm_subset_classes, + self.parent_map, + self.hist_subset_classes, + ) = (None, None, None, None) + self.cfg = cfg + self.cm_figsize = cfg.TENSORBOARD.CONFUSION_MATRIX.FIGSIZE + self.hist_figsize = cfg.TENSORBOARD.HISTOGRAM.FIGSIZE + + if cfg.TENSORBOARD.LOG_DIR == "": + log_dir = os.path.join( + cfg.OUTPUT_DIR, "runs-{}".format(cfg.TRAIN.DATASET) + ) + else: + log_dir = os.path.join(cfg.OUTPUT_DIR, cfg.TENSORBOARD.LOG_DIR) + + self.writer = SummaryWriter(log_dir=log_dir) + logger.info( + "To see logged results in Tensorboard, please launch using the command \ + `tensorboard --port= --logdir {}`".format( + log_dir + ) + ) + + if cfg.TENSORBOARD.CLASS_NAMES_PATH != "": + if cfg.DETECTION.ENABLE: + logger.info( + "Plotting confusion matrix is currently \ + not supported for detection." + ) + ( + self.class_names, + self.parent_map, + self.cm_subset_classes, + ) = get_class_names( + cfg.TENSORBOARD.CLASS_NAMES_PATH, + cfg.TENSORBOARD.CATEGORIES_PATH, + cfg.TENSORBOARD.CONFUSION_MATRIX.SUBSET_PATH, + ) + + if cfg.TENSORBOARD.HISTOGRAM.ENABLE: + if cfg.DETECTION.ENABLE: + logger.info( + "Plotting histogram is not currently \ + supported for detection tasks." + ) + if cfg.TENSORBOARD.HISTOGRAM.SUBSET_PATH != "": + _, _, self.hist_subset_classes = get_class_names( + cfg.TENSORBOARD.CLASS_NAMES_PATH, + None, + cfg.TENSORBOARD.HISTOGRAM.SUBSET_PATH, + ) + + def add_scalars(self, data_dict, global_step=None): + """ + Add multiple scalars to Tensorboard logs. + Args: + data_dict (dict): key is a string specifying the tag of value. + global_step (Optinal[int]): Global step value to record. + """ + if self.writer is not None: + for key, item in data_dict.items(): + self.writer.add_scalar(key, item, global_step) + + def plot_eval(self, preds, labels, global_step=None): + """ + Plot confusion matrices and histograms for eval/test set. + Args: + preds (tensor or list of tensors): list of predictions. + labels (tensor or list of tensors): list of labels. + global step (Optional[int]): current step in eval/test. + """ + if not self.cfg.DETECTION.ENABLE: + cmtx = None + if self.cfg.TENSORBOARD.CONFUSION_MATRIX.ENABLE: + cmtx = vis_utils.get_confusion_matrix( + preds, labels, self.cfg.MODEL.NUM_CLASSES + ) + # Add full confusion matrix. + add_confusion_matrix( + self.writer, + cmtx, + self.cfg.MODEL.NUM_CLASSES, + global_step=global_step, + class_names=self.class_names, + figsize=self.cm_figsize, + ) + # If a list of subset is provided, plot confusion matrix subset. + if self.cm_subset_classes is not None: + add_confusion_matrix( + self.writer, + cmtx, + self.cfg.MODEL.NUM_CLASSES, + global_step=global_step, + subset_ids=self.cm_subset_classes, + class_names=self.class_names, + tag="Confusion Matrix Subset", + figsize=self.cm_figsize, + ) + # If a parent-child classes mapping is provided, plot confusion + # matrices grouped by parent classes. + if self.parent_map is not None: + # Get list of tags (parent categories names) and their children. + for parent_class, children_ls in self.parent_map.items(): + tag = ( + "Confusion Matrices Grouped by Parent Classes/" + + parent_class + ) + add_confusion_matrix( + self.writer, + cmtx, + self.cfg.MODEL.NUM_CLASSES, + global_step=global_step, + subset_ids=children_ls, + class_names=self.class_names, + tag=tag, + figsize=self.cm_figsize, + ) + if self.cfg.TENSORBOARD.HISTOGRAM.ENABLE: + if cmtx is None: + cmtx = vis_utils.get_confusion_matrix( + preds, labels, self.cfg.MODEL.NUM_CLASSES + ) + plot_hist( + self.writer, + cmtx, + self.cfg.MODEL.NUM_CLASSES, + self.cfg.TENSORBOARD.HISTOGRAM.TOPK, + global_step=global_step, + subset_ids=self.hist_subset_classes, + class_names=self.class_names, + figsize=self.hist_figsize, + ) + + def add_video(self, vid_tensor, tag="Video Input", global_step=None, fps=4): + """ + Add input to tensorboard SummaryWriter as a video. + Args: + vid_tensor (tensor): shape of (B, T, C, H, W). Values should lie + [0, 255] for type uint8 or [0, 1] for type float. + tag (Optional[str]): name of the video. + global_step(Optional[int]): current step. + fps (int): frames per second. + """ + self.writer.add_video(tag, vid_tensor, global_step=global_step, fps=fps) + + def plot_weights_and_activations( + self, + weight_activation_dict, + tag="", + normalize=False, + global_step=None, + batch_idx=None, + indexing_dict=None, + heat_map=True, + ): + """ + Visualize weights/ activations tensors to Tensorboard. + Args: + weight_activation_dict (dict[str, tensor]): a dictionary of the pair {layer_name: tensor}, + where layer_name is a string and tensor is the weights/activations of + the layer we want to visualize. + tag (Optional[str]): name of the video. + normalize (bool): If True, the tensor is normalized. (Default to False) + global_step(Optional[int]): current step. + batch_idx (Optional[int]): current batch index to visualize. If None, + visualize the entire batch. + indexing_dict (Optional[dict]): a dictionary of the {layer_name: indexing}. + where indexing is numpy-like fancy indexing. + heatmap (bool): whether to add heatmap to the weights/ activations. + """ + for name, array in weight_activation_dict.items(): + if batch_idx is None: + # Select all items in the batch if batch_idx is not provided. + batch_idx = list(range(array.shape[0])) + if indexing_dict is not None: + fancy_indexing = indexing_dict[name] + fancy_indexing = (batch_idx,) + fancy_indexing + array = array[fancy_indexing] + else: + array = array[batch_idx] + add_ndim_array( + self.writer, + array, + tag + name, + normalize=normalize, + global_step=global_step, + heat_map=heat_map, + ) + + def flush(self): + self.writer.flush() + + def close(self): + self.writer.flush() + self.writer.close() + + +def add_confusion_matrix( + writer, + cmtx, + num_classes, + global_step=None, + subset_ids=None, + class_names=None, + tag="Confusion Matrix", + figsize=None, +): + """ + Calculate and plot confusion matrix to a SummaryWriter. + Args: + writer (SummaryWriter): the SummaryWriter to write the matrix to. + cmtx (ndarray): confusion matrix. + num_classes (int): total number of classes. + global_step (Optional[int]): current step. + subset_ids (list of ints): a list of label indices to keep. + class_names (list of strs, optional): a list of all class names. + tag (str or list of strs): name(s) of the confusion matrix image. + figsize (Optional[float, float]): the figure size of the confusion matrix. + If None, default to [6.4, 4.8]. + + """ + if subset_ids is None or len(subset_ids) != 0: + # If class names are not provided, use class indices as class names. + if class_names is None: + class_names = [str(i) for i in range(num_classes)] + # If subset is not provided, take every classes. + if subset_ids is None: + subset_ids = list(range(num_classes)) + + sub_cmtx = cmtx[subset_ids, :][:, subset_ids] + sub_names = [class_names[j] for j in subset_ids] + + sub_cmtx = vis_utils.plot_confusion_matrix( + sub_cmtx, + num_classes=len(subset_ids), + class_names=sub_names, + figsize=figsize, + ) + # Add the confusion matrix image to writer. + writer.add_figure(tag=tag, figure=sub_cmtx, global_step=global_step) + + +def plot_hist( + writer, + cmtx, + num_classes, + k=10, + global_step=None, + subset_ids=None, + class_names=None, + figsize=None, +): + """ + Given all predictions and all true labels, plot histograms of top-k most + frequently predicted classes for each true class. + + Args: + writer (SummaryWriter object): a tensorboard SummaryWriter object. + cmtx (ndarray): confusion matrix. + num_classes (int): total number of classes. + k (int): top k to plot histograms. + global_step (Optional[int]): current step. + subset_ids (list of ints, optional): class indices to plot histogram. + mapping (list of strings): names of all classes. + figsize (Optional[float, float]): the figure size of the confusion matrix. + If None, default to [6.4, 4.8]. + """ + if subset_ids is None or len(subset_ids) != 0: + if subset_ids is None: + subset_ids = set(range(num_classes)) + else: + subset_ids = set(subset_ids) + # If class names are not provided, use their indices as names. + if class_names is None: + class_names = list(range(num_classes)) + + for i in subset_ids: + pred = cmtx[i] + hist = vis_utils.plot_topk_histogram( + class_names[i], + torch.Tensor(pred), + k, + class_names, + figsize=figsize, + ) + writer.add_figure( + tag="Top {} predictions by classes/{}".format( + k, class_names[i] + ), + figure=hist, + global_step=global_step, + ) + + +def add_ndim_array( + writer, + array, + name, + nrow=None, + normalize=False, + global_step=None, + heat_map=True, +): + """ + Visualize and add tensors of n-dimentionals to a Tensorboard SummaryWriter. Tensors + will be visualized as a 2D grid image. + Args: + writer (SummaryWriter): Tensorboard SummaryWriter. + array (tensor): tensor to visualize. + name (str): name of the tensor. + nrow (Optional[int]): number of 2D filters in each row in the grid image. + normalize (bool): whether to normalize when we have multiple 2D filters. + Default to False. + global_step (Optional[int]): current step. + heat_map (bool): whether to add heat map to 2D each 2D filters in array. + """ + if array is not None and array.ndim != 0: + if array.ndim == 1: + reshaped_array = array.unsqueeze(0) + if nrow is None: + nrow = int(math.sqrt(reshaped_array.size()[1])) + reshaped_array = reshaped_array.view(-1, nrow) + if heat_map: + reshaped_array = add_heatmap(reshaped_array) + writer.add_image( + name, + reshaped_array, + global_step=global_step, + dataformats="CHW", + ) + else: + writer.add_image( + name, + reshaped_array, + global_step=global_step, + dataformats="HW", + ) + elif array.ndim == 2: + reshaped_array = array + if heat_map: + heatmap = add_heatmap(reshaped_array) + writer.add_image( + name, heatmap, global_step=global_step, dataformats="CHW" + ) + else: + writer.add_image( + name, + reshaped_array, + global_step=global_step, + dataformats="HW", + ) + else: + last2_dims = array.size()[-2:] + reshaped_array = array.view(-1, *last2_dims) + if heat_map: + reshaped_array = [ + add_heatmap(array_2d).unsqueeze(0) + for array_2d in reshaped_array + ] + reshaped_array = torch.cat(reshaped_array, dim=0) + else: + reshaped_array = reshaped_array.unsqueeze(1) + if nrow is None: + nrow = int(math.sqrt(reshaped_array.size()[0])) + img_grid = make_grid( + reshaped_array, nrow, padding=1, normalize=normalize + ) + writer.add_image(name, img_grid, global_step=global_step) + + +def add_heatmap(tensor): + """ + Add heatmap to 2D tensor. + Args: + tensor (tensor): a 2D tensor. Tensor value must be in [0..1] range. + Returns: + heatmap (tensor): a 3D tensor. Result of applying heatmap to the 2D tensor. + """ + assert tensor.ndim == 2, "Only support 2D tensors." + # Move tensor to cpu if necessary. + if tensor.device != torch.device("cpu"): + arr = tensor.cpu() + else: + arr = tensor + arr = arr.numpy() + # Get the color map by name. + cm = plt.get_cmap("viridis") + heatmap = cm(arr) + heatmap = heatmap[:, :, :3] + # Convert (H, W, C) to (C, H, W) + heatmap = torch.Tensor(heatmap).permute(2, 0, 1) + return heatmap diff --git a/TimeSformer/timesformer/visualization/utils.py b/TimeSformer/timesformer/visualization/utils.py new file mode 100755 index 0000000000000000000000000000000000000000..445b7638d9a9897a865072fc97be9a83e4b696bb --- /dev/null +++ b/TimeSformer/timesformer/visualization/utils.py @@ -0,0 +1,374 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import itertools +import numpy as np +import matplotlib.pyplot as plt +import torch +from sklearn.metrics import confusion_matrix + +import timesformer.utils.logging as logging +from timesformer.datasets.utils import pack_pathway_output, tensor_normalize + +logger = logging.get_logger(__name__) + + +def get_confusion_matrix(preds, labels, num_classes, normalize="true"): + """ + Calculate confusion matrix on the provided preds and labels. + Args: + preds (tensor or lists of tensors): predictions. Each tensor is in + in the shape of (n_batch, num_classes). Tensor(s) must be on CPU. + labels (tensor or lists of tensors): corresponding labels. Each tensor is + in the shape of either (n_batch,) or (n_batch, num_classes). + num_classes (int): number of classes. Tensor(s) must be on CPU. + normalize (Optional[str]) : {‘true’, ‘pred’, ‘all’}, default="true" + Normalizes confusion matrix over the true (rows), predicted (columns) + conditions or all the population. If None, confusion matrix + will not be normalized. + Returns: + cmtx (ndarray): confusion matrix of size (num_classes x num_classes) + """ + if isinstance(preds, list): + preds = torch.cat(preds, dim=0) + if isinstance(labels, list): + labels = torch.cat(labels, dim=0) + # If labels are one-hot encoded, get their indices. + if labels.ndim == preds.ndim: + labels = torch.argmax(labels, dim=-1) + # Get the predicted class indices for examples. + preds = torch.flatten(torch.argmax(preds, dim=-1)) + labels = torch.flatten(labels) + cmtx = confusion_matrix( + labels, preds, labels=list(range(num_classes)), normalize=normalize + ) + return cmtx + + +def plot_confusion_matrix(cmtx, num_classes, class_names=None, figsize=None): + """ + A function to create a colored and labeled confusion matrix matplotlib figure + given true labels and preds. + Args: + cmtx (ndarray): confusion matrix. + num_classes (int): total number of classes. + class_names (Optional[list of strs]): a list of class names. + figsize (Optional[float, float]): the figure size of the confusion matrix. + If None, default to [6.4, 4.8]. + + Returns: + img (figure): matplotlib figure. + """ + if class_names is None or type(class_names) != list: + class_names = [str(i) for i in range(num_classes)] + + figure = plt.figure(figsize=figsize) + plt.imshow(cmtx, interpolation="nearest", cmap=plt.cm.Blues) + plt.title("Confusion matrix") + plt.colorbar() + tick_marks = np.arange(len(class_names)) + plt.xticks(tick_marks, class_names, rotation=45) + plt.yticks(tick_marks, class_names) + + # Use white text if squares are dark; otherwise black. + threshold = cmtx.max() / 2.0 + for i, j in itertools.product(range(cmtx.shape[0]), range(cmtx.shape[1])): + color = "white" if cmtx[i, j] > threshold else "black" + plt.text( + j, + i, + format(cmtx[i, j], ".2f") if cmtx[i, j] != 0 else ".", + horizontalalignment="center", + color=color, + ) + + plt.tight_layout() + plt.ylabel("True label") + plt.xlabel("Predicted label") + + return figure + + +def plot_topk_histogram(tag, array, k=10, class_names=None, figsize=None): + """ + Plot histogram of top-k value from the given array. + Args: + tag (str): histogram title. + array (tensor): a tensor to draw top k value from. + k (int): number of top values to draw from array. + Defaut to 10. + class_names (list of strings, optional): + a list of names for values in array. + figsize (Optional[float, float]): the figure size of the confusion matrix. + If None, default to [6.4, 4.8]. + Returns: + fig (matplotlib figure): a matplotlib figure of the histogram. + """ + val, ind = torch.topk(array, k) + + fig = plt.Figure(figsize=figsize, facecolor="w", edgecolor="k") + + ax = fig.add_subplot(1, 1, 1) + + if class_names is None: + class_names = [str(i) for i in ind] + else: + class_names = [class_names[i] for i in ind] + + tick_marks = np.arange(k) + width = 0.75 + ax.bar( + tick_marks, + val, + width, + color="orange", + tick_label=class_names, + edgecolor="w", + linewidth=1, + ) + + ax.set_xlabel("Candidates") + ax.set_xticks(tick_marks) + ax.set_xticklabels(class_names, rotation=-45, ha="center") + ax.xaxis.set_label_position("bottom") + ax.xaxis.tick_bottom() + + y_tick = np.linspace(0, 1, num=10) + ax.set_ylabel("Frequency") + ax.set_yticks(y_tick) + y_labels = [format(i, ".1f") for i in y_tick] + ax.set_yticklabels(y_labels, ha="center") + + for i, v in enumerate(val.numpy()): + ax.text( + i - 0.1, + v + 0.03, + format(v, ".2f"), + color="orange", + fontweight="bold", + ) + + ax.set_title(tag) + + fig.set_tight_layout(True) + + return fig + + +class GetWeightAndActivation: + """ + A class used to get weights and activations from specified layers from a Pytorch model. + """ + + def __init__(self, model, layers): + """ + Args: + model (nn.Module): the model containing layers to obtain weights and activations from. + layers (list of strings): a list of layer names to obtain weights and activations from. + Names are hierarchical, separated by /. For example, If a layer follow a path + "s1" ---> "pathway0_stem" ---> "conv", the layer path is "s1/pathway0_stem/conv". + """ + self.model = model + self.hooks = {} + self.layers_names = layers + # eval mode + self.model.eval() + self._register_hooks() + + def _get_layer(self, layer_name): + """ + Return a layer (nn.Module Object) given a hierarchical layer name, separated by /. + Args: + layer_name (str): the name of the layer. + """ + layer_ls = layer_name.split("/") + prev_module = self.model + for layer in layer_ls: + prev_module = prev_module._modules[layer] + + return prev_module + + def _register_single_hook(self, layer_name): + """ + Register hook to a layer, given layer_name, to obtain activations. + Args: + layer_name (str): name of the layer. + """ + + def hook_fn(module, input, output): + self.hooks[layer_name] = output.clone().detach() + + layer = get_layer(self.model, layer_name) + layer.register_forward_hook(hook_fn) + + def _register_hooks(self): + """ + Register hooks to layers in `self.layers_names`. + """ + for layer_name in self.layers_names: + self._register_single_hook(layer_name) + + def get_activations(self, input, bboxes=None): + """ + Obtain all activations from layers that we register hooks for. + Args: + input (tensors, list of tensors): the model input. + bboxes (Optional): Bouding boxes data that might be required + by the model. + Returns: + activation_dict (Python dictionary): a dictionary of the pair + {layer_name: list of activations}, where activations are outputs returned + by the layer. + """ + input_clone = [inp.clone() for inp in input] + if bboxes is not None: + preds = self.model(input_clone, bboxes) + else: + preds = self.model(input_clone) + + activation_dict = {} + for layer_name, hook in self.hooks.items(): + # list of activations for each instance. + activation_dict[layer_name] = hook + + return activation_dict, preds + + def get_weights(self): + """ + Returns weights from registered layers. + Returns: + weights (Python dictionary): a dictionary of the pair + {layer_name: weight}, where weight is the weight tensor. + """ + weights = {} + for layer in self.layers_names: + cur_layer = get_layer(self.model, layer) + if hasattr(cur_layer, "weight"): + weights[layer] = cur_layer.weight.clone().detach() + else: + logger.error( + "Layer {} does not have weight attribute.".format(layer) + ) + return weights + + +def get_indexing(string): + """ + Parse numpy-like fancy indexing from a string. + Args: + string (str): string represent the indices to take + a subset of from array. Indices for each dimension + are separated by `,`; indices for different dimensions + are separated by `;`. + e.g.: For a numpy array `arr` of shape (3,3,3), the string "1,2;1,2" + means taking the sub-array `arr[[1,2], [1,2]] + Returns: + final_indexing (tuple): the parsed indexing. + """ + index_ls = string.strip().split(";") + final_indexing = [] + for index in index_ls: + index_single_dim = index.split(",") + index_single_dim = [int(i) for i in index_single_dim] + final_indexing.append(index_single_dim) + + return tuple(final_indexing) + + +def process_layer_index_data(layer_ls, layer_name_prefix=""): + """ + Extract layer names and numpy-like fancy indexing from a string. + Args: + layer_ls (list of strs): list of strings containing data about layer names + and their indexing. For each string, layer name and indexing is separated by whitespaces. + e.g.: [layer1 1,2;2, layer2, layer3 150;3,4] + layer_name_prefix (Optional[str]): prefix to be added to each layer name. + Returns: + layer_name (list of strings): a list of layer names. + indexing_dict (Python dict): a dictionary of the pair + {one_layer_name: indexing_for_that_layer} + """ + + layer_name, indexing_dict = [], {} + for layer in layer_ls: + ls = layer.split() + name = layer_name_prefix + ls[0] + layer_name.append(name) + if len(ls) == 2: + indexing_dict[name] = get_indexing(ls[1]) + else: + indexing_dict[name] = () + return layer_name, indexing_dict + + +def process_cv2_inputs(frames, cfg): + """ + Normalize and prepare inputs as a list of tensors. Each tensor + correspond to a unique pathway. + Args: + frames (list of array): list of input images (correspond to one clip) in range [0, 255]. + cfg (CfgNode): configs. Details can be found in + slowfast/config/defaults.py + """ + inputs = torch.from_numpy(np.array(frames)).float() / 255 + inputs = tensor_normalize(inputs, cfg.DATA.MEAN, cfg.DATA.STD) + # T H W C -> C T H W. + inputs = inputs.permute(3, 0, 1, 2) + # Sample frames for num_frames specified. + index = torch.linspace(0, inputs.shape[1] - 1, cfg.DATA.NUM_FRAMES).long() + inputs = torch.index_select(inputs, 1, index) + inputs = pack_pathway_output(cfg, inputs) + inputs = [inp.unsqueeze(0) for inp in inputs] + return inputs + + +def get_layer(model, layer_name): + """ + Return the targeted layer (nn.Module Object) given a hierarchical layer name, + separated by /. + Args: + model (model): model to get layers from. + layer_name (str): name of the layer. + Returns: + prev_module (nn.Module): the layer from the model with `layer_name` name. + """ + layer_ls = layer_name.split("/") + prev_module = model + for layer in layer_ls: + prev_module = prev_module._modules[layer] + + return prev_module + + +class TaskInfo: + def __init__(self): + self.frames = None + self.id = -1 + self.bboxes = None + self.action_preds = None + self.num_buffer_frames = 0 + self.img_height = -1 + self.img_width = -1 + self.crop_size = -1 + self.clip_vis_size = -1 + + def add_frames(self, idx, frames): + """ + Add the clip and corresponding id. + Args: + idx (int): the current index of the clip. + frames (list[ndarray]): list of images in "BGR" format. + """ + self.frames = frames + self.id = idx + + def add_bboxes(self, bboxes): + """ + Add correspondding bounding boxes. + """ + self.bboxes = bboxes + + def add_action_preds(self, preds): + """ + Add the corresponding action predictions. + """ + self.action_preds = preds diff --git a/TimeSformer/tools/benchmark.py b/TimeSformer/tools/benchmark.py new file mode 100755 index 0000000000000000000000000000000000000000..b9abff2749f72b1849d6aaf25bb7be3320fbce47 --- /dev/null +++ b/TimeSformer/tools/benchmark.py @@ -0,0 +1,24 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +""" +A script to benchmark data loading. +""" + +import timesformer.utils.logging as logging +from timesformer.utils.benchmark import benchmark_data_loading +from timesformer.utils.misc import launch_job +from timesformer.utils.parser import load_config, parse_args + +logger = logging.get_logger(__name__) + + +def main(): + args = parse_args() + cfg = load_config(args) + + launch_job( + cfg=cfg, init_method=args.init_method, func=benchmark_data_loading + ) + + +if __name__ == "__main__": + main() diff --git a/TimeSformer/tools/run_net.py b/TimeSformer/tools/run_net.py new file mode 100755 index 0000000000000000000000000000000000000000..8c4ec28ad50cd31672d234512e6ccc8341fa508e --- /dev/null +++ b/TimeSformer/tools/run_net.py @@ -0,0 +1,44 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Wrapper to train and test a video classification model.""" +from timesformer.utils.misc import launch_job +from timesformer.utils.parser import load_config, parse_args + +from tools.test_net import test +from tools.train_net import train + + +def get_func(cfg): + train_func = train + test_func = test + return train_func, test_func + +def main(): + """ + Main function to spawn the train and test process. + """ + args = parse_args() + if args.num_shards > 1: + args.output_dir = str(args.job_dir) + cfg = load_config(args) + + train, test = get_func(cfg) + + # Perform training. + if cfg.TRAIN.ENABLE: + launch_job(cfg=cfg, init_method=args.init_method, func=train) + + # Perform multi-clip testing. + if cfg.TEST.ENABLE: + launch_job(cfg=cfg, init_method=args.init_method, func=test) + + # Perform model visualization. + if cfg.TENSORBOARD.ENABLE and ( + cfg.TENSORBOARD.MODEL_VIS.ENABLE + or cfg.TENSORBOARD.WRONG_PRED_VIS.ENABLE + ): + launch_job(cfg=cfg, init_method=args.init_method, func=visualize) + + +if __name__ == "__main__": + main() diff --git a/TimeSformer/tools/submit.py b/TimeSformer/tools/submit.py new file mode 100755 index 0000000000000000000000000000000000000000..39c90f7d439520ebdba62a0003013694466ec60f --- /dev/null +++ b/TimeSformer/tools/submit.py @@ -0,0 +1,199 @@ +import argparse +import os +from pathlib import Path +import shutil +import submitit +import multiprocessing +import sys + +import torch +import timesformer.utils.checkpoint as cu +import timesformer.utils.multiprocessing as mpu +from timesformer.utils.misc import launch_job +from timesformer.utils.parser import load_config + +from tools.run_net import get_func + +def parse_args(): + parser = argparse.ArgumentParser( + "Submitit for onestage training", add_help=False + ) + parser.add_argument( + "--num_gpus", + help="Number of GPUs", + default=8, + type=int, + ) + parser.add_argument( + "--num_shards", + help="Number of Nodes", + default=1, + type=int, + ) + parser.add_argument( + "--partition", default="learnfair", type=str, help="Partition where to submit" + ) + parser.add_argument("--timeout", default=60 * 72, type=int, help="Duration of the job") + parser.add_argument("--cfg", dest="cfg_file", help="Path to the config file", + default="configs/test_R50_8GPU.yaml", type=str) + parser.add_argument( + "--job_dir", default="", type=str, help="Job dir. Leave empty for automatic." + ) + parser.add_argument( + "--name", default="", type=str, help="Job dir. Leave empty for automatic." + ) + parser.add_argument( + "--resume-from", + default="", + type=str, + help=( + "Weights to resume from (.*pth file) or a file (last_checkpoint) that contains " + + "weight file name from the same directory" + ), + ) + parser.add_argument("--resume-job", default="", type=str, help="resume training from the job") + parser.add_argument("--use_volta32", action='store_true', help="Big models? Use this") + parser.add_argument("--postfix", default="experiment", type=str, help="Postfix of the jobs") + parser.add_argument("--mail", default="", type=str, + help="Email this user when the job finishes if specified") + parser.add_argument('--comment', default="", type=str, + help='Comment to pass to scheduler, e.g. priority message') + parser.add_argument( + "opts", + help="See lib/config/defaults.py for all options", + default=None, + nargs=argparse.REMAINDER, + ) + return parser.parse_args() + + +def get_shared_folder() -> Path: + user = os.getenv("USER") + if Path("/checkpoint/").is_dir(): + p = Path(f"/checkpoint/{user}/experiments") + p.mkdir(exist_ok=True) + return p + raise RuntimeError("No shared folder available") + + +def launch(shard_id, num_shards, cfg, init_method): + os.environ["NCCL_MIN_NRINGS"] = "8" + + print ("Pytorch version: ", torch.__version__) + + cfg.SHARD_ID = shard_id + cfg.NUM_SHARDS = num_shards + + print([ + shard_id, num_shards, cfg + ]) + + train, test = get_func(cfg) + # Launch job. + if cfg.TRAIN.ENABLE: + launch_job(cfg=cfg, init_method=init_method, func=train) + + if cfg.TEST.ENABLE: + launch_job(cfg=cfg, init_method=init_method, func=test) + + +class Trainer(object): + def __init__(self, args): + self.args = args + + def __call__(self): + + socket_name = os.popen("ip r | grep default | awk '{print $5}'").read().strip('\n') + print("Setting GLOO and NCCL sockets IFNAME to: {}".format(socket_name)) + os.environ["GLOO_SOCKET_IFNAME"] = socket_name + # not sure if the next line is really affect anything + os.environ["NCCL_SOCKET_IFNAME"] = socket_name + + + hostname_first_node = os.popen( + "scontrol show hostnames $SLURM_JOB_NODELIST" + ).read().split("\n")[0] + dist_url = "tcp://{}:12399".format(hostname_first_node) + print("We will use the following dist url: {}".format(dist_url)) + + self._setup_gpu_args() + results = launch( + shard_id=self.args.machine_rank, + num_shards=self.args.num_shards, + cfg=load_config(self.args), + init_method=dist_url, + ) + return results + + def checkpoint(self): + import submitit + + job_env = submitit.JobEnvironment() + slurm_job_id = job_env.job_id + if self.args.resume_job == "": + self.args.resume_job = slurm_job_id + print("Requeuing ", self.args) + empty_trainer = type(self)(self.args) + return submitit.helpers.DelayedSubmission(empty_trainer) + + def _setup_gpu_args(self): + import submitit + + job_env = submitit.JobEnvironment() + print(self.args) + + self.args.machine_rank = job_env.global_rank + print(f"Process rank: {job_env.global_rank}") + + +def main(): + args = parse_args() + + if args.name == "": + cfg_name = os.path.splitext(os.path.basename(args.cfg_file))[0] + args.name = '_'.join([cfg_name, args.postfix]) + + assert args.job_dir != "" + + args.output_dir = str(args.job_dir) + args.job_dir = Path(args.job_dir) / "%j" + + # Note that the folder will depend on the job_id, to easily track experiments + #executor = submitit.AutoExecutor(folder=Path(args.job_dir) / "%j", slurm_max_num_timeout=30) + executor = submitit.AutoExecutor(folder=args.job_dir, slurm_max_num_timeout=30) + + # cluster setup is defined by environment variables + num_gpus_per_node = args.num_gpus + nodes = args.num_shards + partition = args.partition + timeout_min = args.timeout + kwargs = {} + if args.use_volta32: + kwargs['slurm_constraint'] = 'volta32gb,ib4' + if args.comment: + kwargs['slurm_comment'] = args.comment + + executor.update_parameters( + mem_gb=60 * num_gpus_per_node, + gpus_per_node=num_gpus_per_node, + tasks_per_node=1, + cpus_per_task=10 * num_gpus_per_node, + nodes=nodes, + timeout_min=timeout_min, # max is 60 * 72 + slurm_partition=partition, + slurm_signal_delay_s=120, + **kwargs + ) + + + print(args.name) + executor.update_parameters(name=args.name) + + trainer = Trainer(args) + job = executor.submit(trainer) + + print("Submitted job_id:", job.job_id) + + +if __name__ == "__main__": + main() diff --git a/TimeSformer/tools/test_net.py b/TimeSformer/tools/test_net.py new file mode 100755 index 0000000000000000000000000000000000000000..99fc3f330dccfa39b001dfed98b936e99f6a9ee1 --- /dev/null +++ b/TimeSformer/tools/test_net.py @@ -0,0 +1,199 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Multi-view test a video classification model.""" + +import numpy as np +import os +import pickle +import torch +from fvcore.common.file_io import PathManager +import cv2 +from einops import rearrange, reduce, repeat +import scipy.io + +import timesformer.utils.checkpoint as cu +import timesformer.utils.distributed as du +import timesformer.utils.logging as logging +import timesformer.utils.misc as misc +import timesformer.visualization.tensorboard_vis as tb +from timesformer.datasets import loader +from timesformer.models import build_model +from timesformer.utils.meters import TestMeter + +logger = logging.get_logger(__name__) + + +@torch.no_grad() +def perform_test(test_loader, model, test_meter, cfg, writer=None): + """ + For classification: + Perform mutli-view testing that uniformly samples N clips from a video along + its temporal axis. For each clip, it takes 3 crops to cover the spatial + dimension, followed by averaging the softmax scores across all Nx3 views to + form a video-level prediction. All video predictions are compared to + ground-truth labels and the final testing performance is logged. + For detection: + Perform fully-convolutional testing on the full frames without crop. + Args: + test_loader (loader): video testing loader. + model (model): the pretrained video model to test. + test_meter (TestMeter): testing meters to log and ensemble the testing + results. + cfg (CfgNode): configs. Details can be found in + slowfast/config/defaults.py + writer (TensorboardWriter object, optional): TensorboardWriter object + to writer Tensorboard log. + """ + # Enable eval mode. + model.eval() + test_meter.iter_tic() + + for cur_iter, (inputs, labels, video_idx, meta) in enumerate(test_loader): + if cfg.NUM_GPUS: + # Transfer the data to the current GPU device. + if isinstance(inputs, (list,)): + for i in range(len(inputs)): + inputs[i] = inputs[i].cuda(non_blocking=True) + else: + inputs = inputs.cuda(non_blocking=True) + + # Transfer the data to the current GPU device. + labels = labels.cuda() + video_idx = video_idx.cuda() + for key, val in meta.items(): + if isinstance(val, (list,)): + for i in range(len(val)): + val[i] = val[i].cuda(non_blocking=True) + else: + meta[key] = val.cuda(non_blocking=True) + test_meter.data_toc() + + if cfg.DETECTION.ENABLE: + # Compute the predictions. + preds = model(inputs, meta["boxes"]) + ori_boxes = meta["ori_boxes"] + metadata = meta["metadata"] + + preds = preds.detach().cpu() if cfg.NUM_GPUS else preds.detach() + ori_boxes = ( + ori_boxes.detach().cpu() if cfg.NUM_GPUS else ori_boxes.detach() + ) + metadata = ( + metadata.detach().cpu() if cfg.NUM_GPUS else metadata.detach() + ) + + if cfg.NUM_GPUS > 1: + preds = torch.cat(du.all_gather_unaligned(preds), dim=0) + ori_boxes = torch.cat(du.all_gather_unaligned(ori_boxes), dim=0) + metadata = torch.cat(du.all_gather_unaligned(metadata), dim=0) + + test_meter.iter_toc() + # Update and log stats. + test_meter.update_stats(preds, ori_boxes, metadata) + test_meter.log_iter_stats(None, cur_iter) + else: + # Perform the forward pass. + preds = model(inputs) + + # Gather all the predictions across all the devices to perform ensemble. + if cfg.NUM_GPUS > 1: + preds, labels, video_idx = du.all_gather( + [preds, labels, video_idx] + ) + if cfg.NUM_GPUS: + preds = preds.cpu() + labels = labels.cpu() + video_idx = video_idx.cpu() + + test_meter.iter_toc() + # Update and log stats. + test_meter.update_stats( + preds.detach(), labels.detach(), video_idx.detach() + ) + test_meter.log_iter_stats(cur_iter) + + test_meter.iter_tic() + + # Log epoch stats and print the final testing results. + if not cfg.DETECTION.ENABLE: + all_preds = test_meter.video_preds.clone().detach() + all_labels = test_meter.video_labels + if cfg.NUM_GPUS: + all_preds = all_preds.cpu() + all_labels = all_labels.cpu() + if writer is not None: + writer.plot_eval(preds=all_preds, labels=all_labels) + + if cfg.TEST.SAVE_RESULTS_PATH != "": + save_path = os.path.join(cfg.OUTPUT_DIR, cfg.TEST.SAVE_RESULTS_PATH) + + with PathManager.open(save_path, "wb") as f: + pickle.dump([all_labels, all_labels], f) + + logger.info( + "Successfully saved prediction results to {}".format(save_path) + ) + + test_meter.finalize_metrics() + return test_meter + + +def test(cfg): + """ + Perform multi-view testing on the pretrained video model. + Args: + cfg (CfgNode): configs. Details can be found in + slowfast/config/defaults.py + """ + # Set up environment. + du.init_distributed_training(cfg) + # Set random seed from configs. + np.random.seed(cfg.RNG_SEED) + torch.manual_seed(cfg.RNG_SEED) + + # Setup logging format. + logging.setup_logging(cfg.OUTPUT_DIR) + + # Print config. + logger.info("Test with config:") + logger.info(cfg) + + # Build the video model and print model statistics. + model = build_model(cfg) + if du.is_master_proc() and cfg.LOG_MODEL_INFO: + misc.log_model_info(model, cfg, use_train_input=False) + + cu.load_test_checkpoint(cfg, model) + + # Create video testing loaders. + test_loader = loader.construct_loader(cfg, "test") + logger.info("Testing model for {} iterations".format(len(test_loader))) + + assert ( + len(test_loader.dataset) + % (cfg.TEST.NUM_ENSEMBLE_VIEWS * cfg.TEST.NUM_SPATIAL_CROPS) + == 0 + ) + # Create meters for multi-view testing. + test_meter = TestMeter( + len(test_loader.dataset) + // (cfg.TEST.NUM_ENSEMBLE_VIEWS * cfg.TEST.NUM_SPATIAL_CROPS), + cfg.TEST.NUM_ENSEMBLE_VIEWS * cfg.TEST.NUM_SPATIAL_CROPS, + cfg.MODEL.NUM_CLASSES, + len(test_loader), + cfg.DATA.MULTI_LABEL, + cfg.DATA.ENSEMBLE_METHOD, + ) + + # Set up writer for logging to Tensorboard format. + if cfg.TENSORBOARD.ENABLE and du.is_master_proc( + cfg.NUM_GPUS * cfg.NUM_SHARDS + ): + writer = tb.TensorboardWriter(cfg) + else: + writer = None + + # # Perform multi-view test on the entire dataset. + test_meter = perform_test(test_loader, model, test_meter, cfg, writer) + if writer is not None: + writer.close() diff --git a/TimeSformer/tools/train_net.py b/TimeSformer/tools/train_net.py new file mode 100755 index 0000000000000000000000000000000000000000..9149e0ab0c8a6a35762cc317acaccc6c309cc5c9 --- /dev/null +++ b/TimeSformer/tools/train_net.py @@ -0,0 +1,512 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +"""Train a video classification model.""" + +import numpy as np +import pprint +import torch +from fvcore.nn.precise_bn import get_bn_modules, update_bn_stats + +import timesformer.models.losses as losses +import timesformer.models.optimizer as optim +import timesformer.utils.checkpoint as cu +import timesformer.utils.distributed as du +import timesformer.utils.logging as logging +import timesformer.utils.metrics as metrics +import timesformer.utils.misc as misc +import timesformer.visualization.tensorboard_vis as tb +from timesformer.datasets import loader +from timesformer.models import build_model +from timesformer.utils.meters import TrainMeter, ValMeter +from timesformer.utils.multigrid import MultigridSchedule + +from timm.data import Mixup +from timm.loss import LabelSmoothingCrossEntropy, SoftTargetCrossEntropy + +logger = logging.get_logger(__name__) + + +def train_epoch( + train_loader, model, optimizer, train_meter, cur_epoch, cfg, writer=None +): + """ + Perform the video training for one epoch. + Args: + train_loader (loader): video training loader. + model (model): the video model to train. + optimizer (optim): the optimizer to perform optimization on the model's + parameters. + train_meter (TrainMeter): training meters to log the training performance. + cur_epoch (int): current epoch of training. + cfg (CfgNode): configs. Details can be found in + slowfast/config/defaults.py + writer (TensorboardWriter, optional): TensorboardWriter object + to writer Tensorboard log. + """ + # Enable train mode. + model.train() + train_meter.iter_tic() + data_size = len(train_loader) + + cur_global_batch_size = cfg.NUM_SHARDS * cfg.TRAIN.BATCH_SIZE + num_iters = cfg.GLOBAL_BATCH_SIZE // cur_global_batch_size + + for cur_iter, (inputs, labels, _, meta) in enumerate(train_loader): + # Transfer the data to the current GPU device. + if cfg.NUM_GPUS: + if isinstance(inputs, (list,)): + for i in range(len(inputs)): + inputs[i] = inputs[i].cuda(non_blocking=True) + else: + inputs = inputs.cuda(non_blocking=True) + labels = labels.cuda() + for key, val in meta.items(): + if isinstance(val, (list,)): + for i in range(len(val)): + val[i] = val[i].cuda(non_blocking=True) + else: + meta[key] = val.cuda(non_blocking=True) + + # Update the learning rate. + lr = optim.get_epoch_lr(cur_epoch + float(cur_iter) / data_size, cfg) + optim.set_lr(optimizer, lr) + + train_meter.data_toc() + + # Explicitly declare reduction to mean. + if not cfg.MIXUP.ENABLED: + loss_fun = losses.get_loss_func(cfg.MODEL.LOSS_FUNC)(reduction="mean") + else: + mixup_fn = Mixup( + mixup_alpha=cfg.MIXUP.ALPHA, cutmix_alpha=cfg.MIXUP.CUTMIX_ALPHA, cutmix_minmax=cfg.MIXUP.CUTMIX_MINMAX, prob=cfg.MIXUP.PROB, switch_prob=cfg.MIXUP.SWITCH_PROB, mode=cfg.MIXUP.MODE, + label_smoothing=0.1, num_classes=cfg.MODEL.NUM_CLASSES) + hard_labels = labels + inputs, labels = mixup_fn(inputs, labels) + loss_fun = SoftTargetCrossEntropy() + + if cfg.DETECTION.ENABLE: + preds = model(inputs, meta["boxes"]) + else: + preds = model(inputs) + + # Compute the loss. + loss = loss_fun(preds, labels) + + if cfg.MIXUP.ENABLED: + labels = hard_labels + + # check Nan Loss. + misc.check_nan_losses(loss) + + + if cur_global_batch_size >= cfg.GLOBAL_BATCH_SIZE: + # Perform the backward pass. + optimizer.zero_grad() + loss.backward() + # Update the parameters. + optimizer.step() + else: + if cur_iter == 0: + optimizer.zero_grad() + loss.backward() + if (cur_iter + 1) % num_iters == 0: + for p in model.parameters(): + p.grad /= num_iters + optimizer.step() + optimizer.zero_grad() + + if cfg.DETECTION.ENABLE: + if cfg.NUM_GPUS > 1: + loss = du.all_reduce([loss])[0] + loss = loss.item() + + # Update and log stats. + train_meter.update_stats(None, None, None, loss, lr) + # write to tensorboard format if available. + if writer is not None: + writer.add_scalars( + {"Train/loss": loss, "Train/lr": lr}, + global_step=data_size * cur_epoch + cur_iter, + ) + + else: + top1_err, top5_err = None, None + if cfg.DATA.MULTI_LABEL: + # Gather all the predictions across all the devices. + if cfg.NUM_GPUS > 1: + [loss] = du.all_reduce([loss]) + loss = loss.item() + else: + # Compute the errors. + num_topks_correct = metrics.topks_correct(preds, labels, (1, 5)) + top1_err, top5_err = [ + (1.0 - x / preds.size(0)) * 100.0 for x in num_topks_correct + ] + # Gather all the predictions across all the devices. + if cfg.NUM_GPUS > 1: + loss, top1_err, top5_err = du.all_reduce( + [loss, top1_err, top5_err] + ) + + # Copy the stats from GPU to CPU (sync point). + loss, top1_err, top5_err = ( + loss.item(), + top1_err.item(), + top5_err.item(), + ) + + # Update and log stats. + train_meter.update_stats( + top1_err, + top5_err, + loss, + lr, + inputs[0].size(0) + * max( + cfg.NUM_GPUS, 1 + ), # If running on CPU (cfg.NUM_GPUS == 1), use 1 to represent 1 CPU. + ) + # write to tensorboard format if available. + if writer is not None: + writer.add_scalars( + { + "Train/loss": loss, + "Train/lr": lr, + "Train/Top1_err": top1_err, + "Train/Top5_err": top5_err, + }, + global_step=data_size * cur_epoch + cur_iter, + ) + + train_meter.iter_toc() # measure allreduce for this meter + train_meter.log_iter_stats(cur_epoch, cur_iter) + train_meter.iter_tic() + + # Log epoch stats. + train_meter.log_epoch_stats(cur_epoch) + train_meter.reset() + + +@torch.no_grad() +def eval_epoch(val_loader, model, val_meter, cur_epoch, cfg, writer=None): + """ + Evaluate the model on the val set. + Args: + val_loader (loader): data loader to provide validation data. + model (model): model to evaluate the performance. + val_meter (ValMeter): meter instance to record and calculate the metrics. + cur_epoch (int): number of the current epoch of training. + cfg (CfgNode): configs. Details can be found in + slowfast/config/defaults.py + writer (TensorboardWriter, optional): TensorboardWriter object + to writer Tensorboard log. + """ + + # Evaluation mode enabled. The running stats would not be updated. + model.eval() + val_meter.iter_tic() + + for cur_iter, (inputs, labels, _, meta) in enumerate(val_loader): + if cfg.NUM_GPUS: + # Transferthe data to the current GPU device. + if isinstance(inputs, (list,)): + for i in range(len(inputs)): + inputs[i] = inputs[i].cuda(non_blocking=True) + else: + inputs = inputs.cuda(non_blocking=True) + labels = labels.cuda() + for key, val in meta.items(): + if isinstance(val, (list,)): + for i in range(len(val)): + val[i] = val[i].cuda(non_blocking=True) + else: + meta[key] = val.cuda(non_blocking=True) + val_meter.data_toc() + + if cfg.DETECTION.ENABLE: + # Compute the predictions. + preds = model(inputs, meta["boxes"]) + ori_boxes = meta["ori_boxes"] + metadata = meta["metadata"] + + if cfg.NUM_GPUS: + preds = preds.cpu() + ori_boxes = ori_boxes.cpu() + metadata = metadata.cpu() + + if cfg.NUM_GPUS > 1: + preds = torch.cat(du.all_gather_unaligned(preds), dim=0) + ori_boxes = torch.cat(du.all_gather_unaligned(ori_boxes), dim=0) + metadata = torch.cat(du.all_gather_unaligned(metadata), dim=0) + + val_meter.iter_toc() + # Update and log stats. + val_meter.update_stats(preds, ori_boxes, metadata) + + else: + preds = model(inputs) + + if cfg.DATA.MULTI_LABEL: + if cfg.NUM_GPUS > 1: + preds, labels = du.all_gather([preds, labels]) + else: + # Compute the errors. + num_topks_correct = metrics.topks_correct(preds, labels, (1, 5)) + + # Combine the errors across the GPUs. + top1_err, top5_err = [ + (1.0 - x / preds.size(0)) * 100.0 for x in num_topks_correct + ] + if cfg.NUM_GPUS > 1: + top1_err, top5_err = du.all_reduce([top1_err, top5_err]) + + # Copy the errors from GPU to CPU (sync point). + top1_err, top5_err = top1_err.item(), top5_err.item() + + val_meter.iter_toc() + # Update and log stats. + val_meter.update_stats( + top1_err, + top5_err, + inputs[0].size(0) + * max( + cfg.NUM_GPUS, 1 + ), # If running on CPU (cfg.NUM_GPUS == 1), use 1 to represent 1 CPU. + ) + # write to tensorboard format if available. + if writer is not None: + writer.add_scalars( + {"Val/Top1_err": top1_err, "Val/Top5_err": top5_err}, + global_step=len(val_loader) * cur_epoch + cur_iter, + ) + + val_meter.update_predictions(preds, labels) + + val_meter.log_iter_stats(cur_epoch, cur_iter) + val_meter.iter_tic() + + # Log epoch stats. + val_meter.log_epoch_stats(cur_epoch) + # write to tensorboard format if available. + if writer is not None: + if cfg.DETECTION.ENABLE: + writer.add_scalars( + {"Val/mAP": val_meter.full_map}, global_step=cur_epoch + ) + else: + all_preds = [pred.clone().detach() for pred in val_meter.all_preds] + all_labels = [ + label.clone().detach() for label in val_meter.all_labels + ] + if cfg.NUM_GPUS: + all_preds = [pred.cpu() for pred in all_preds] + all_labels = [label.cpu() for label in all_labels] + writer.plot_eval( + preds=all_preds, labels=all_labels, global_step=cur_epoch + ) + + val_meter.reset() + + +def calculate_and_update_precise_bn(loader, model, num_iters=200, use_gpu=True): + """ + Update the stats in bn layers by calculate the precise stats. + Args: + loader (loader): data loader to provide training data. + model (model): model to update the bn stats. + num_iters (int): number of iterations to compute and update the bn stats. + use_gpu (bool): whether to use GPU or not. + """ + + def _gen_loader(): + for inputs, *_ in loader: + if use_gpu: + if isinstance(inputs, (list,)): + for i in range(len(inputs)): + inputs[i] = inputs[i].cuda(non_blocking=True) + else: + inputs = inputs.cuda(non_blocking=True) + yield inputs + + # Update the bn stats. + update_bn_stats(model, _gen_loader(), num_iters) + + +def build_trainer(cfg): + """ + Build training model and its associated tools, including optimizer, + dataloaders and meters. + Args: + cfg (CfgNode): configs. Details can be found in + slowfast/config/defaults.py + Returns: + model (nn.Module): training model. + optimizer (Optimizer): optimizer. + train_loader (DataLoader): training data loader. + val_loader (DataLoader): validatoin data loader. + precise_bn_loader (DataLoader): training data loader for computing + precise BN. + train_meter (TrainMeter): tool for measuring training stats. + val_meter (ValMeter): tool for measuring validation stats. + """ + # Build the video model and print model statistics. + model = build_model(cfg) + if du.is_master_proc() and cfg.LOG_MODEL_INFO: + misc.log_model_info(model, cfg, use_train_input=True) + + # Construct the optimizer. + optimizer = optim.construct_optimizer(model, cfg) + + # Create the video train and val loaders. + train_loader = loader.construct_loader(cfg, "train") + val_loader = loader.construct_loader(cfg, "val") + + precise_bn_loader = loader.construct_loader( + cfg, "train", is_precise_bn=True + ) + # Create meters. + train_meter = TrainMeter(len(train_loader), cfg) + val_meter = ValMeter(len(val_loader), cfg) + + return ( + model, + optimizer, + train_loader, + val_loader, + precise_bn_loader, + train_meter, + val_meter, + ) + + +def train(cfg): + """ + Train a video model for many epochs on train set and evaluate it on val set. + Args: + cfg (CfgNode): configs. Details can be found in + slowfast/config/defaults.py + """ + # Set up environment. + du.init_distributed_training(cfg) + # Set random seed from configs. + np.random.seed(cfg.RNG_SEED) + torch.manual_seed(cfg.RNG_SEED) + + # Setup logging format. + logging.setup_logging(cfg.OUTPUT_DIR) + + # Init multigrid. + multigrid = None + if cfg.MULTIGRID.LONG_CYCLE or cfg.MULTIGRID.SHORT_CYCLE: + multigrid = MultigridSchedule() + cfg = multigrid.init_multigrid(cfg) + if cfg.MULTIGRID.LONG_CYCLE: + cfg, _ = multigrid.update_long_cycle(cfg, cur_epoch=0) + # Print config. + logger.info("Train with config:") + logger.info(pprint.pformat(cfg)) + + # Build the video model and print model statistics. + model = build_model(cfg) + if du.is_master_proc() and cfg.LOG_MODEL_INFO: + misc.log_model_info(model, cfg, use_train_input=True) + + # Construct the optimizer. + optimizer = optim.construct_optimizer(model, cfg) + + # Load a checkpoint to resume training if applicable. + if not cfg.TRAIN.FINETUNE: + start_epoch = cu.load_train_checkpoint(cfg, model, optimizer) + else: + start_epoch = 0 + cu.load_checkpoint(cfg.TRAIN.CHECKPOINT_FILE_PATH, model) + + # Create the video train and val loaders. + train_loader = loader.construct_loader(cfg, "train") + val_loader = loader.construct_loader(cfg, "val") + + precise_bn_loader = ( + loader.construct_loader(cfg, "train", is_precise_bn=True) + if cfg.BN.USE_PRECISE_STATS + else None + ) + + train_meter = TrainMeter(len(train_loader), cfg) + val_meter = ValMeter(len(val_loader), cfg) + + # set up writer for logging to Tensorboard format. + if cfg.TENSORBOARD.ENABLE and du.is_master_proc( + cfg.NUM_GPUS * cfg.NUM_SHARDS + ): + writer = tb.TensorboardWriter(cfg) + else: + writer = None + + # Perform the training loop. + logger.info("Start epoch: {}".format(start_epoch + 1)) + + for cur_epoch in range(start_epoch, cfg.SOLVER.MAX_EPOCH): + if cfg.MULTIGRID.LONG_CYCLE: + cfg, changed = multigrid.update_long_cycle(cfg, cur_epoch) + if changed: + ( + model, + optimizer, + train_loader, + val_loader, + precise_bn_loader, + train_meter, + val_meter, + ) = build_trainer(cfg) + + # Load checkpoint. + if cu.has_checkpoint(cfg.OUTPUT_DIR): + last_checkpoint = cu.get_last_checkpoint(cfg.OUTPUT_DIR) + assert "{:05d}.pyth".format(cur_epoch) in last_checkpoint + else: + last_checkpoint = cfg.TRAIN.CHECKPOINT_FILE_PATH + logger.info("Load from {}".format(last_checkpoint)) + cu.load_checkpoint( + last_checkpoint, model, cfg.NUM_GPUS > 1, optimizer + ) + + # Shuffle the dataset. + loader.shuffle_dataset(train_loader, cur_epoch) + + # Train for one epoch. + train_epoch( + train_loader, model, optimizer, train_meter, cur_epoch, cfg, writer + ) + + is_checkp_epoch = cu.is_checkpoint_epoch( + cfg, + cur_epoch, + None if multigrid is None else multigrid.schedule, + ) + is_eval_epoch = misc.is_eval_epoch( + cfg, cur_epoch, None if multigrid is None else multigrid.schedule + ) + + # Compute precise BN stats. + if ( + (is_checkp_epoch or is_eval_epoch) + and cfg.BN.USE_PRECISE_STATS + and len(get_bn_modules(model)) > 0 + ): + calculate_and_update_precise_bn( + precise_bn_loader, + model, + min(cfg.BN.NUM_BATCHES_PRECISE, len(precise_bn_loader)), + cfg.NUM_GPUS > 0, + ) + _ = misc.aggregate_sub_bn_stats(model) + + # Save a checkpoint. + if is_checkp_epoch: + cu.save_checkpoint(cfg.OUTPUT_DIR, model, optimizer, cur_epoch, cfg) + # Evaluate the model on validation set. + if is_eval_epoch: + eval_epoch(val_loader, model, val_meter, cur_epoch, cfg, writer) + + if writer is not None: + writer.close() diff --git a/TimeSformer/tools/visualization.py b/TimeSformer/tools/visualization.py new file mode 100755 index 0000000000000000000000000000000000000000..21e0c728952c931810d9e0c6a757c3b396a53d2c --- /dev/null +++ b/TimeSformer/tools/visualization.py @@ -0,0 +1,346 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import numpy as np +import pickle +import torch +import tqdm +from fvcore.common.file_io import PathManager + +import slowfast.datasets.utils as data_utils +import slowfast.utils.checkpoint as cu +import slowfast.utils.distributed as du +import slowfast.utils.logging as logging +import slowfast.utils.misc as misc +import slowfast.visualization.tensorboard_vis as tb +from slowfast.datasets import loader +from slowfast.models import build_model +from slowfast.visualization.gradcam_utils import GradCAM +from slowfast.visualization.prediction_vis import WrongPredictionVis +from slowfast.visualization.utils import ( + GetWeightAndActivation, + process_layer_index_data, +) +from slowfast.visualization.video_visualizer import VideoVisualizer + +logger = logging.get_logger(__name__) + + +def run_visualization(vis_loader, model, cfg, writer=None): + """ + Run model visualization (weights, activations and model inputs) and visualize + them on Tensorboard. + Args: + vis_loader (loader): video visualization loader. + model (model): the video model to visualize. + cfg (CfgNode): configs. Details can be found in + slowfast/config/defaults.py + writer (TensorboardWriter, optional): TensorboardWriter object + to writer Tensorboard log. + """ + n_devices = cfg.NUM_GPUS * cfg.NUM_SHARDS + prefix = "module/" if n_devices > 1 else "" + # Get a list of selected layer names and indexing. + layer_ls, indexing_dict = process_layer_index_data( + cfg.TENSORBOARD.MODEL_VIS.LAYER_LIST, layer_name_prefix=prefix + ) + logger.info("Start Model Visualization.") + # Register hooks for activations. + model_vis = GetWeightAndActivation(model, layer_ls) + + if writer is not None and cfg.TENSORBOARD.MODEL_VIS.MODEL_WEIGHTS: + layer_weights = model_vis.get_weights() + writer.plot_weights_and_activations( + layer_weights, tag="Layer Weights/", heat_map=False + ) + + video_vis = VideoVisualizer( + cfg.MODEL.NUM_CLASSES, + cfg.TENSORBOARD.CLASS_NAMES_PATH, + cfg.TENSORBOARD.MODEL_VIS.TOPK_PREDS, + cfg.TENSORBOARD.MODEL_VIS.COLORMAP, + ) + if n_devices > 1: + grad_cam_layer_ls = [ + "module/" + layer + for layer in cfg.TENSORBOARD.MODEL_VIS.GRAD_CAM.LAYER_LIST + ] + else: + grad_cam_layer_ls = cfg.TENSORBOARD.MODEL_VIS.GRAD_CAM.LAYER_LIST + + if cfg.TENSORBOARD.MODEL_VIS.GRAD_CAM.ENABLE: + gradcam = GradCAM( + model, + target_layers=grad_cam_layer_ls, + data_mean=cfg.DATA.MEAN, + data_std=cfg.DATA.STD, + colormap=cfg.TENSORBOARD.MODEL_VIS.GRAD_CAM.COLORMAP, + ) + logger.info("Finish drawing weights.") + global_idx = -1 + for inputs, labels, _, meta in tqdm.tqdm(vis_loader): + if cfg.NUM_GPUS: + # Transfer the data to the current GPU device. + if isinstance(inputs, (list,)): + for i in range(len(inputs)): + inputs[i] = inputs[i].cuda(non_blocking=True) + else: + inputs = inputs.cuda(non_blocking=True) + labels = labels.cuda() + for key, val in meta.items(): + if isinstance(val, (list,)): + for i in range(len(val)): + val[i] = val[i].cuda(non_blocking=True) + else: + meta[key] = val.cuda(non_blocking=True) + + if cfg.DETECTION.ENABLE: + activations, preds = model_vis.get_activations( + inputs, meta["boxes"] + ) + else: + activations, preds = model_vis.get_activations(inputs) + if cfg.TENSORBOARD.MODEL_VIS.GRAD_CAM.ENABLE: + if cfg.TENSORBOARD.MODEL_VIS.GRAD_CAM.USE_TRUE_LABEL: + inputs, preds = gradcam(inputs, labels=labels) + else: + inputs, preds = gradcam(inputs) + if cfg.NUM_GPUS: + inputs = du.all_gather_unaligned(inputs) + activations = du.all_gather_unaligned(activations) + preds = du.all_gather_unaligned(preds) + if isinstance(inputs[0], list): + for i in range(len(inputs)): + for j in range(len(inputs[0])): + inputs[i][j] = inputs[i][j].cpu() + else: + inputs = [inp.cpu() for inp in inputs] + preds = [pred.cpu() for pred in preds] + else: + inputs, activations, preds = [inputs], [activations], [preds] + + boxes = [None] * max(n_devices, 1) + if cfg.DETECTION.ENABLE and cfg.NUM_GPUS: + boxes = du.all_gather_unaligned(meta["boxes"]) + boxes = [box.cpu() for box in boxes] + + if writer is not None: + total_vids = 0 + for i in range(max(n_devices, 1)): + cur_input = inputs[i] + cur_activations = activations[i] + cur_batch_size = cur_input[0].shape[0] + cur_preds = preds[i] + cur_boxes = boxes[i] + for cur_batch_idx in range(cur_batch_size): + global_idx += 1 + total_vids += 1 + if ( + cfg.TENSORBOARD.MODEL_VIS.INPUT_VIDEO + or cfg.TENSORBOARD.MODEL_VIS.GRAD_CAM.ENABLE + ): + for path_idx, input_pathway in enumerate(cur_input): + if cfg.TEST.DATASET == "ava" and cfg.AVA.BGR: + video = input_pathway[ + cur_batch_idx, [2, 1, 0], ... + ] + else: + video = input_pathway[cur_batch_idx] + + if not cfg.TENSORBOARD.MODEL_VIS.GRAD_CAM.ENABLE: + # Permute to (T, H, W, C) from (C, T, H, W). + video = video.permute(1, 2, 3, 0) + video = data_utils.revert_tensor_normalize( + video, cfg.DATA.MEAN, cfg.DATA.STD + ) + else: + # Permute from (T, C, H, W) to (T, H, W, C) + video = video.permute(0, 2, 3, 1) + bboxes = ( + None if cur_boxes is None else cur_boxes[:, 1:] + ) + cur_prediction = ( + cur_preds + if cfg.DETECTION.ENABLE + else cur_preds[cur_batch_idx] + ) + video = video_vis.draw_clip( + video, cur_prediction, bboxes=bboxes + ) + video = ( + torch.from_numpy(np.array(video)) + .permute(0, 3, 1, 2) + .unsqueeze(0) + ) + writer.add_video( + video, + tag="Input {}/Pathway {}".format( + global_idx, path_idx + 1 + ), + ) + if cfg.TENSORBOARD.MODEL_VIS.ACTIVATIONS: + writer.plot_weights_and_activations( + cur_activations, + tag="Input {}/Activations: ".format(global_idx), + batch_idx=cur_batch_idx, + indexing_dict=indexing_dict, + ) + + +def perform_wrong_prediction_vis(vis_loader, model, cfg): + """ + Visualize video inputs with wrong predictions on Tensorboard. + Args: + vis_loader (loader): video visualization loader. + model (model): the video model to visualize. + cfg (CfgNode): configs. Details can be found in + slowfast/config/defaults.py + """ + wrong_prediction_visualizer = WrongPredictionVis(cfg=cfg) + for batch_idx, (inputs, labels, _, _) in tqdm.tqdm(enumerate(vis_loader)): + if cfg.NUM_GPUS: + # Transfer the data to the current GPU device. + if isinstance(inputs, (list,)): + for i in range(len(inputs)): + inputs[i] = inputs[i].cuda(non_blocking=True) + else: + inputs = inputs.cuda(non_blocking=True) + labels = labels.cuda() + + # Some model modify the original input. + inputs_clone = [inp.clone() for inp in inputs] + + preds = model(inputs) + + if cfg.NUM_GPUS > 1: + preds, labels = du.all_gather([preds, labels]) + if isinstance(inputs_clone, (list,)): + inputs_clone = du.all_gather(inputs_clone) + else: + inputs_clone = du.all_gather([inputs_clone])[0] + + if cfg.NUM_GPUS: + # Transfer the data to the current CPU device. + labels = labels.cpu() + preds = preds.cpu() + if isinstance(inputs_clone, (list,)): + for i in range(len(inputs_clone)): + inputs_clone[i] = inputs_clone[i].cpu() + else: + inputs_clone = inputs_clone.cpu() + + # If using CPU (NUM_GPUS = 0), 1 represent 1 CPU. + n_devices = max(cfg.NUM_GPUS, 1) + for device_idx in range(1, n_devices + 1): + wrong_prediction_visualizer.visualize_vid( + video_input=inputs_clone, + labels=labels, + preds=preds.detach().clone(), + batch_idx=device_idx * batch_idx, + ) + + logger.info( + "Class indices with wrong predictions: {}".format( + sorted(wrong_prediction_visualizer.wrong_class_prediction) + ) + ) + wrong_prediction_visualizer.clean() + + +def visualize(cfg): + """ + Perform layer weights and activations visualization on the model. + Args: + cfg (CfgNode): configs. Details can be found in + slowfast/config/defaults.py + """ + if cfg.TENSORBOARD.ENABLE and ( + cfg.TENSORBOARD.MODEL_VIS.ENABLE + or cfg.TENSORBOARD.WRONG_PRED_VIS.ENABLE + ): + # Set up environment. + du.init_distributed_training(cfg) + # Set random seed from configs. + np.random.seed(cfg.RNG_SEED) + torch.manual_seed(cfg.RNG_SEED) + + # Setup logging format. + logging.setup_logging(cfg.OUTPUT_DIR) + + # Print config. + logger.info("Model Visualization with config:") + logger.info(cfg) + + # Build the video model and print model statistics. + model = build_model(cfg) + model.eval() + if du.is_master_proc() and cfg.LOG_MODEL_INFO: + misc.log_model_info(model, cfg, use_train_input=False) + + cu.load_test_checkpoint(cfg, model) + + # Create video testing loaders. + vis_loader = loader.construct_loader(cfg, "test") + + if cfg.DETECTION.ENABLE: + assert cfg.NUM_GPUS == cfg.TEST.BATCH_SIZE or cfg.NUM_GPUS == 0 + + # Set up writer for logging to Tensorboard format. + if du.is_master_proc(cfg.NUM_GPUS * cfg.NUM_SHARDS): + writer = tb.TensorboardWriter(cfg) + else: + writer = None + if cfg.TENSORBOARD.PREDICTIONS_PATH != "": + assert not cfg.DETECTION.ENABLE, "Detection is not supported." + logger.info( + "Visualizing class-level performance from saved results..." + ) + if writer is not None: + with PathManager.open( + cfg.TENSORBOARD.PREDICTIONS_PATH, "rb" + ) as f: + preds, labels = pickle.load(f, encoding="latin1") + + writer.plot_eval(preds, labels) + + if cfg.TENSORBOARD.MODEL_VIS.ENABLE: + if cfg.TENSORBOARD.MODEL_VIS.GRAD_CAM.ENABLE: + assert ( + not cfg.DETECTION.ENABLE + ), "Detection task is currently not supported for Grad-CAM visualization." + if cfg.MODEL.ARCH in cfg.MODEL.SINGLE_PATHWAY_ARCH: + assert ( + len(cfg.TENSORBOARD.MODEL_VIS.GRAD_CAM.LAYER_LIST) == 1 + ), "The number of chosen CNN layers must be equal to the number of pathway(s), given {} layer(s).".format( + len(cfg.TENSORBOARD.MODEL_VIS.GRAD_CAM.LAYER_LIST) + ) + elif cfg.MODEL.ARCH in cfg.MODEL.MULTI_PATHWAY_ARCH: + assert ( + len(cfg.TENSORBOARD.MODEL_VIS.GRAD_CAM.LAYER_LIST) == 2 + ), "The number of chosen CNN layers must be equal to the number of pathway(s), given {} layer(s).".format( + len(cfg.TENSORBOARD.MODEL_VIS.GRAD_CAM.LAYER_LIST) + ) + else: + raise NotImplementedError( + "Model arch {} is not in {}".format( + cfg.MODEL.ARCH, + cfg.MODEL.SINGLE_PATHWAY_ARCH + + cfg.MODEL.MULTI_PATHWAY_ARCH, + ) + ) + logger.info( + "Visualize model analysis for {} iterations".format( + len(vis_loader) + ) + ) + # Run visualization on the model + run_visualization(vis_loader, model, cfg, writer) + if cfg.TENSORBOARD.WRONG_PRED_VIS.ENABLE: + logger.info( + "Visualize Wrong Predictions for {} iterations".format( + len(vis_loader) + ) + ) + perform_wrong_prediction_vis(vis_loader, model, cfg) + + if writer is not None: + writer.close() diff --git a/__init__.py b/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/accelerate_training/__init__.py b/accelerate_training/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/accelerate_training/audio_caption.py b/accelerate_training/audio_caption.py new file mode 100644 index 0000000000000000000000000000000000000000..3c1422afcc55ee12bb140cf26324d3a6dd223519 --- /dev/null +++ b/accelerate_training/audio_caption.py @@ -0,0 +1,481 @@ +import argparse +import os +import ruamel_yaml as yaml +import numpy as np +import random +import time +import datetime +import json +from pathlib import Path + +import torch +import torch.backends.cudnn as cudnn +import torch.distributed as dist + + +import os, sys +sys.path.append(os.path.abspath('.')) # ~/ep-alm + +from models.epalm import ePALM +from models.utils import freeze_whole_model, unfreeze_parameters, print_trainable_params_percentage +from models.utils import filter_state, filter_msg, exclude_list + +from transformers import AutoTokenizer + +import utils + +from dataset.audio_caption import get_loader +from scheduler import create_scheduler +from optim import create_optimizer + + +from accelerate import Accelerator + +def train(model, data_loader, optimizer, tokenizer, epoch, warmup_steps, device, scheduler, config, accelerator=None): + # train + model.train() + + metric_logger = utils.MetricLogger(delimiter=" ", accelerator=accelerator) + metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + metric_logger.add_meter('loss', utils.SmoothedValue(window_size=1, fmt='{value:.4f}')) + + header = 'Train Epoch: [{}]'.format(epoch) + print_freq = 50 + step_size = 100 + warmup_iterations = warmup_steps*step_size + lm_loss_weight = config.get('lm_loss_weight', 1) + + append_eos_token = config.get('append_eos_token', False) + eos_token = tokenizer.eos_token + + config_optim = utils.AttrDict(config['optimizer']) + prompt_lr = config_optim.prompt_lr if hasattr(config_optim, 'prompt_lr') else None + + + if prompt_lr is not None: + metric_logger.add_meter('prompt_lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + + + for i, batch in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + image = batch["images"].to(device,non_blocking=True) + + text = batch["sent"] + + if append_eos_token: + text = [t.replace(eos_token, '') + eos_token for t in text] + + + + + + text_input = tokenizer(text, padding='longest', return_tensors="pt").to(device) + + + targets = text_input.input_ids.masked_fill(text_input.input_ids == tokenizer.pad_token_id, -100) + + + answer_output = model(image=image, + text=text_input, + labels = targets, + return_dict = True, + mode='train', + reduction='none', + ) + + loss = answer_output.loss + loss = loss.sum()/image.size(0) + loss = loss*lm_loss_weight + + optimizer.zero_grad() + accelerator.backward(loss) + optimizer.step() + + metric_logger.update(loss=loss.item()) + metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + if prompt_lr is not None: + metric_logger.update(prompt_lr=optimizer.param_groups[1]["lr"]) + if epoch==0 and i%step_size==0 and i<=warmup_iterations: + scheduler.step(i//step_size) + + + # gather the stats from all processes + metric_logger.synchronize_between_processes() + accelerator.print("Averaged stats:", metric_logger.global_avg()) + return {k: "{:.3f}".format(meter.global_avg) for k, meter in metric_logger.meters.items()} + + + + +@torch.no_grad() +def evaluation(model, data_loader, tokenizer, device, config, accelerator=None): + # test + model.eval() + + metric_logger = utils.MetricLogger(delimiter=" ") + header = 'Generate Caption test result:' + print_freq = 50 + + + predictions = [] + targets = [] + + + + pad_token = tokenizer.pad_token + eos_token = tokenizer.eos_token + + num_beams = config.get('num_beams', 1) + do_sample = config.get('do_sample', True) + accelerator.print("num_beams", num_beams, "do_sample", do_sample) + + for n, batch in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + image = batch["images"].to(device,non_blocking=True) + text = ['' for q in image] + + text_input = tokenizer(text, padding='longest', return_tensors="pt").to(device) + + out = model(image=image, text=text_input, mode='generate', return_dict=True, max_length=30, + do_sample=do_sample, num_beams=num_beams) + out_decode = [] + for i, o in enumerate(out): + try: + + res = tokenizer.decode(o) + response = res.split('')[1].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True + except TypeError: + accelerator.print(o) + response = ' ' + out_decode.append(response) + + + predictions.extend(out_decode) + + if 'targets' in batch: + targets.extend(batch['targets']) + + + + + evaluator = data_loader.evaluator + eval_results = evaluator.evaluate(predictions, targets) + + + wandb_log_dict = {} + + for score_name, score in eval_results.items(): + wandb_log_dict[f'Valid/{score_name}'] = score + + + accelerator.print(wandb_log_dict) + + return wandb_log_dict + + + +def main(args, config): + if 'XDG_CACHE_HOME' in os.environ: + os.environ['TORCH_HOME'] = os.environ['XDG_CACHE_HOME']+'/torch' + else: + os.environ['TORCH_HOME'] = '~/.cache/torch' + args.distributed = False + accelerator = Accelerator() + device = torch.device(args.device) + + # fix the seed for reproducibility + seed = args.seed + utils.get_rank() + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + cudnn.benchmark = True + + start_epoch = 0 + max_epoch = config['schedular']['epochs'] + warmup_steps = config['schedular']['warmup_epochs'] + + accelerator.print(args, config) + + + tokenizer = AutoTokenizer.from_pretrained(args.text_model, use_fast=False, local_files_only=True) + + + + + if args.distributed: + num_tasks = utils.get_world_size() + global_rank = utils.get_rank() + else: + num_tasks = None + global_rank = None + + + ######### + num_workers = config.get('num_workers', 4) + train_topk = config.get('train_topk', -1) + valid_topk = config.get('valid_topk', -1) + data_dir = args.data_dir + + args.image_size = config.get('image_res', 224) + args.use_data_augmentation = True + + black_image = config.get('black_image', False) + + accelerator.print("black image:", black_image) + + # audio + args.melbins = config.get('melbins', 128) + args.target_length = config.get('target_length', 1024) + args.num_tries = config.get('num_tries', 1) + + args.skip_norm = config.get('skip_norm', True) + args.norm_mean = config.get('norm_mean', None) + args.norm_std = config.get('norm_std', None) + args.noise = config.get('noise', False) + + args.freqm_p = config.get('freqm_p', 48) + args.timem_p = config.get('timem_p', 192) + + + train_split = config.get('train_split', 'train') + val_split = config.get('val_split', 'val') + test_split = config.get('test_split', 'test') + + + train_loader = get_loader( + args, + split=train_split, mode='train', batch_size=config['batch_size_train'], + distributed=args.distributed, + workers=num_workers, + topk=train_topk, + data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True, black_image=black_image + ) + + accelerator.print('# len train loader:', len(train_loader)) + accelerator.print(f'Building val loader') + val_loader = get_loader( + args, + split=val_split, mode='val', batch_size=config['batch_size_test'], + distributed=False, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True, black_image=black_image + ) + accelerator.print('# len val loader:', len(val_loader)) + + accelerator.print(f'Building test loader') + test_loader = get_loader( + args, + split=test_split, mode='val', batch_size=config['batch_size_test'], + distributed=False, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + + + accelerator.print('# len test loader:', len(test_loader)) + + #### Model #### + accelerator.print("Creating model") + + start_layer_idx = config.get('start_layer_idx', 0) + end_layer_idx = config.get('end_layer_idx', 0) + + vision_model_name = config.get('vision_model_name', args.vision_model) + + model = ePALM(opt_model_name = args.text_model, + vision_model_name = vision_model_name, + use_vis_prefix = True, + start_layer_idx = start_layer_idx, + end_layer_idx = end_layer_idx, + return_hidden_state_vision = True, + config=config, + low_cpu=args.low_cpu + ) + + + model = model.to(device) + + arg_opt = utils.AttrDict(config['optimizer']) + optimizer = create_optimizer(arg_opt, model, config=config) + + if hasattr(arg_opt, 'prompt_lr') and arg_opt.prompt_lr is not None: + accelerator.print('\tInitial other params params lr: %f' % optimizer.param_groups[0]['lr']) + accelerator.print('\tInitial prompt params lr: %f' % optimizer.param_groups[1]['lr']) + + arg_sche = utils.AttrDict(config['schedular']) + lr_scheduler, _ = create_scheduler(arg_sche, optimizer) + + best_epoch = 0 + best_valid = 0 + + if args.checkpoint: + + checkpoint = torch.load(args.checkpoint, map_location='cpu') + state_dict = checkpoint['model'] + msg = model.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + accelerator.print('load checkpoint from %s'%args.checkpoint) + accelerator.print(msg) + + if args.resume: + model = model.to(device) + optimizer.load_state_dict(checkpoint['optimizer']) + lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) + start_epoch = checkpoint['epoch']+1 + accelerator.print(checkpoint.keys()) + for p in optimizer.param_groups: # not necessay after torch 1.12.1 + p['capturable'] = True + if 'best_valid' in checkpoint: + best_valid = checkpoint['best_valid'] + best_epoch = checkpoint['best_epoch'] + accelerator.print("load best valid {} at epoch {}".format(best_valid, best_epoch)) + + + freeze_whole_model(model) + unfreeze_parameters(model, config) + print_trainable_params_percentage(model) + + val_evaluator = val_loader.evaluator + test_evaluator = test_loader.evaluator + task = val_loader.task + device = accelerator.device + + model, optimizer, train_loader, val_loader, test_loader, lr_scheduler = accelerator.prepare( + model, optimizer, train_loader, val_loader, test_loader, lr_scheduler + ) + model = model.to(device) + + test_loader.evaluator = test_evaluator + val_loader.evaluator = val_evaluator + + test_loader.task = task + val_loader.task = task + + + accelerator.print("Start training") + start_time = time.time() + + + for epoch in range(start_epoch, max_epoch): + if epoch>0: + lr_scheduler.step(epoch+warmup_steps) + + if not args.evaluate: + if args.distributed: + train_loader.sampler.set_epoch(epoch) + + train_stats = train(model, train_loader, optimizer, tokenizer, epoch, warmup_steps, device, + lr_scheduler, config, accelerator=accelerator) + + if args.evaluate: + break + + + valid_results = evaluation(model, val_loader, tokenizer, device, config, accelerator=accelerator) + + if utils.is_main_process(): + log_stats = {**{f'train_{k}': v for k, v in train_stats.items()}, + 'epoch': epoch, + } + with open(os.path.join(args.output_dir, "log.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + + ## avoid memory issue with accelerator.get_state_dict + state_dict = accelerator.unwrap_model(model) + state_dict = state_dict.state_dict() + state_dict = filter_state(state_dict, exclude_list) # filter_state(model_without_ddp.state_dict(), exclude_list) + if state_dict is not None: + for k in state_dict: + if state_dict[k].dtype == torch.float16: + state_dict[k] = state_dict[k].float() + + + save_obj = { + 'model': state_dict, + 'optimizer': optimizer.state_dict(), + 'lr_scheduler': lr_scheduler.state_dict(), + 'config': config, + 'epoch': epoch, + 'best_valid': best_valid, + 'best_epoch': best_epoch, + } + + + if args.save_best: + valid_score = valid_results['Valid/CIDEr'] + + if valid_score > best_valid or epoch == 0: + best_valid = valid_score + best_epoch = epoch + accelerator.print("Save best epoch:", best_epoch) + + save_obj['best_valid'] = best_valid + save_obj['best_epoch'] = best_epoch + + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_best.pth')) + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_last.pth')) + + + dist.barrier() + + + + + + ### test best model + if not args.evaluate: + checkpoint = torch.load(os.path.join(args.output_dir, 'checkpoint_best.pth'), map_location='cpu') + state_dict = checkpoint['model'] + msg = model.module.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + accelerator.print('load checkpoint for test from %s'%os.path.join(args.output_dir, 'checkpoint_best.pth')) + accelerator.print(msg) + print("best_epoch", checkpoint['best_epoch'], "best_valid", checkpoint['best_valid']) + print("best_epoch", best_epoch, "best_valid", best_valid) + vqa_result = evaluation(model, test_loader, tokenizer, device, config, accelerator=accelerator) + + + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + accelerator.print('Training time {}'.format(total_time_str)) + + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='./configs/VQA.yaml') + parser.add_argument('--checkpoint', default='') + parser.add_argument('--output_dir', default='output/vqa') + parser.add_argument('--evaluate', action='store_true') + parser.add_argument('--text_model', default='facebook/opt-350m') + parser.add_argument('--vision_model', default='vit_base_patch16_224') + parser.add_argument('--device', default='cuda') + parser.add_argument('--seed', default=42, type=int) + parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes') + parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') + parser.add_argument('--distributed', default=True, type=bool) + + parser.add_argument('--data_dir', default='/data/mshukor/data') + parser.add_argument('--resume', action='store_true') + + parser.add_argument('--save_best', action='store_true') + + parser.add_argument('--image_dir', default='/data/mshukor/data') + + + parser.add_argument('--low_cpu', action='store_true') + args = parser.parse_args() + + config = yaml.load(open(args.config, 'r'), Loader=yaml.Loader) + + args.result_dir = os.path.join(args.output_dir, 'result') + + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + Path(args.result_dir).mkdir(parents=True, exist_ok=True) + + yaml.dump(config, open(os.path.join(args.output_dir, 'config.yaml'), 'w')) + + main(args, config) \ No newline at end of file diff --git a/accelerate_training/caption.py b/accelerate_training/caption.py new file mode 100644 index 0000000000000000000000000000000000000000..67483823efd58eb46da3e1b934f05f23c04d90a5 --- /dev/null +++ b/accelerate_training/caption.py @@ -0,0 +1,476 @@ +import argparse +import os +import ruamel_yaml as yaml +import numpy as np +import random +import time +import datetime +import json +from pathlib import Path + +import torch +import torch.backends.cudnn as cudnn +import torch.distributed as dist + +import os, sys +sys.path.append(os.path.abspath('.')) # ~/ep-alm + +from models.epalm import ePALM +from models.utils import freeze_whole_model, unfreeze_parameters, print_trainable_params_percentage +from models.utils import filter_state, filter_msg, exclude_list + +from transformers import AutoTokenizer + + +import utils + + + +from dataset.caption import get_loader +from scheduler import create_scheduler +from optim import create_optimizer + +from accelerate import Accelerator + + +def train(model, data_loader, optimizer, tokenizer, epoch, warmup_steps, device, scheduler, config, accelerator=None): + # train + model.train() + + + metric_logger = utils.MetricLogger(delimiter=" ", accelerator=accelerator) + metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + metric_logger.add_meter('loss', utils.SmoothedValue(window_size=1, fmt='{value:.4f}')) + + header = 'Train Epoch: [{}]'.format(epoch) + print_freq = 50 + step_size = 100 + warmup_iterations = warmup_steps*step_size + lm_loss_weight = config.get('lm_loss_weight', 1) + append_eos_token = config.get('append_eos_token', False) + eos_token = tokenizer.eos_token + + config_optim = utils.AttrDict(config['optimizer']) + prompt_lr = config_optim.prompt_lr if hasattr(config_optim, 'prompt_lr') else None + + + if prompt_lr is not None: + metric_logger.add_meter('prompt_lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + + + for i, batch in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + image = batch["images"].to(device,non_blocking=True) + + text = batch["sent"] + + if append_eos_token: + text = [t.replace(eos_token, '') + eos_token for t in text] + + + + text_input = tokenizer(text, padding='longest', return_tensors="pt").to(device) + + + targets = text_input.input_ids.masked_fill(text_input.input_ids == tokenizer.pad_token_id, -100) + + + answer_output = model(image=image, + text=text_input, + labels = targets, + return_dict = True, + mode='train', + reduction='none', + ) + + loss = answer_output.loss + loss = loss.sum()/image.size(0) + loss = loss*lm_loss_weight + + optimizer.zero_grad() + accelerator.backward(loss) + optimizer.step() + + metric_logger.update(loss=loss.item()) + metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + if prompt_lr is not None: + metric_logger.update(prompt_lr=optimizer.param_groups[1]["lr"]) + if epoch==0 and i%step_size==0 and i<=warmup_iterations: + scheduler.step(i//step_size) + + + # gather the stats from all processes + metric_logger.synchronize_between_processes() + accelerator.print("Averaged stats:", metric_logger.global_avg()) + return {k: "{:.3f}".format(meter.global_avg) for k, meter in metric_logger.meters.items()} + + + + +@torch.no_grad() +def evaluation(model, data_loader, tokenizer, device, config, accelerator=None, max_length=30) : + # test + model.eval() + + metric_logger = utils.MetricLogger(delimiter=" ") + header = 'Generate Caption test result:' + print_freq = 50 + + + predictions = [] + targets = [] + + pad_token = tokenizer.pad_token + eos_token = tokenizer.eos_token + + num_beams = config.get('num_beams', 1) + do_sample = config.get('do_sample', True) + accelerator.print("num_beams", num_beams, "do_sample", do_sample, "max_length", max_length) + + for n, batch in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + image = batch["images"].to(device,non_blocking=True) + text = ['' for q in image] + text_input = tokenizer(text, padding='longest', return_tensors="pt").to(device) + + out = model(image=image, text=text_input, mode='generate', return_dict=True, max_length=max_length, + do_sample=do_sample, num_beams=num_beams) + out_decode = [] + for i, o in enumerate(out): + try: + res = tokenizer.decode(o) + response = res.split('')[1].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True + except (TypeError,IndexError): + accelerator.print(o) + response = ' ' + + out_decode.append(response) + + + predictions.extend(out_decode) + + if 'targets' in batch: + targets.extend(batch['targets']) + + + + + evaluator = data_loader.evaluator + eval_results = evaluator.evaluate(predictions, targets) + + + + + wandb_log_dict = {} + + for score_name, score in eval_results.items(): + wandb_log_dict[f'Valid/{score_name}'] = score + + + accelerator.print(wandb_log_dict) + + return wandb_log_dict + + + +def main(args, config): + if 'XDG_CACHE_HOME' in os.environ: + os.environ['TORCH_HOME'] = os.environ['XDG_CACHE_HOME']+'/torch' + else: + os.environ['TORCH_HOME'] = '~/.cache/torch' + + args.distributed = False + + accelerator = Accelerator() + + device = torch.device(args.device) + + # fix the seed for reproducibility + seed = args.seed + utils.get_rank() + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + cudnn.benchmark = True + + start_epoch = 0 + max_epoch = config['schedular']['epochs'] + warmup_steps = config['schedular']['warmup_epochs'] + + accelerator.print(args, config) + #### Dataset #### + + + tokenizer_name = config.get('tokenizer_name', args.text_model) + + if 'opt' in tokenizer_name: + tokenizer = AutoTokenizer.from_pretrained(args.text_model, use_fast=False, local_files_only=True) + else: + raise NotImplemented + + + + if args.distributed: + num_tasks = utils.get_world_size() + global_rank = utils.get_rank() + else: + num_tasks = None + global_rank = None + + + val_split = config.get('val_split', 'val') + test_split = config.get('test_split', 'test') + + ######### + num_workers = config.get('num_workers', 4) + train_topk = config.get('train_topk', -1) + valid_topk = config.get('valid_topk', -1) + data_dir = args.data_dir + + args.image_size = config.get('image_res', 224) + args.use_data_augmentation = True + + + train_loader = get_loader( + args, + split='train', mode='train', batch_size=config['batch_size_train'], + distributed=args.distributed, + workers=num_workers, + topk=train_topk, + data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + + # if gpu == 0: + accelerator.print('# len train loader:', len(train_loader)) + accelerator.print(f'Building val loader') + val_loader = get_loader( + args, + split=val_split, mode='val', batch_size=config['batch_size_test'], + distributed=False, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + accelerator.print('# len val loader:', len(val_loader)) + + accelerator.print(f'Building test loader') + test_loader = get_loader( + args, + split=test_split, mode='val', batch_size=config['batch_size_test'], + distributed=False, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + + + accelerator.print('# len test loader:', len(test_loader)) + + #### Model #### + accelerator.print("Creating model") + + start_layer_idx = config.get('start_layer_idx', 0) + end_layer_idx = config.get('end_layer_idx', 0) + + vision_model_name = config.get('vision_model_name', args.vision_model) + + + model = ePALM(opt_model_name = args.text_model, + vision_model_name = vision_model_name, + use_vis_prefix = True, + start_layer_idx = start_layer_idx, + end_layer_idx = end_layer_idx, + return_hidden_state_vision = True, + config=config, + low_cpu=args.low_cpu + ) + + + + model = model.to(device) + + arg_opt = utils.AttrDict(config['optimizer']) + optimizer = create_optimizer(arg_opt, model, config=config) + + if hasattr(arg_opt, 'prompt_lr') and arg_opt.prompt_lr is not None: + accelerator.print('\tInitial other params params lr: %f' % optimizer.param_groups[0]['lr']) + accelerator.print('\tInitial prompt params lr: %f' % optimizer.param_groups[1]['lr']) + + arg_sche = utils.AttrDict(config['schedular']) + lr_scheduler, _ = create_scheduler(arg_sche, optimizer) + + best_epoch = 0 + best_valid = 0 + + if args.checkpoint: + + checkpoint = torch.load(args.checkpoint, map_location='cpu') + state_dict = checkpoint['model'] + msg = model.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + + accelerator.print('load checkpoint from %s'%args.checkpoint) + accelerator.print(msg) + + if args.resume: + model = model.to(device) + optimizer.load_state_dict(checkpoint['optimizer']) + lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) + start_epoch = checkpoint['epoch']+1 + accelerator.print(checkpoint.keys()) + for p in optimizer.param_groups: # not necessay after torch 1.12.1 + p['capturable'] = True + if 'best_valid' in checkpoint: + best_valid = checkpoint['best_valid'] + best_epoch = checkpoint['best_epoch'] + accelerator.print("load best valid {} at epoch {}".format(best_valid, best_epoch)) + + + freeze_whole_model(model) + unfreeze_parameters(model, config) + print_trainable_params_percentage(model) + + + val_evaluator = val_loader.evaluator + test_evaluator = test_loader.evaluator + task = val_loader.task + device = accelerator.device + + model, optimizer, train_loader, val_loader, test_loader, lr_scheduler = accelerator.prepare( + model, optimizer, train_loader, val_loader, test_loader, lr_scheduler + ) + model = model.to(device) + + test_loader.evaluator = test_evaluator + val_loader.evaluator = val_evaluator + + test_loader.task = task + val_loader.task = task + + accelerator.print("Start training") + start_time = time.time() + + + max_length = config.get("max_length", 30) + print("max length", max_length) + for epoch in range(start_epoch, max_epoch): + if epoch>0: + lr_scheduler.step(epoch+warmup_steps) + + if not args.evaluate: + if args.distributed: + train_loader.sampler.set_epoch(epoch) + + train_stats = train(model, train_loader, optimizer, tokenizer, epoch, warmup_steps, device, lr_scheduler, + config, accelerator=accelerator) + + if args.evaluate: + break + + + valid_results = evaluation(model, val_loader, tokenizer, device, config, accelerator=accelerator, max_length=max_length) + + if utils.is_main_process(): + log_stats = {**{f'train_{k}': v for k, v in train_stats.items()}, + 'epoch': epoch, + } + with open(os.path.join(args.output_dir, "log.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + + + ## avoid memory issue with accelerator.get_state_dict + state_dict = accelerator.unwrap_model(model) + state_dict = state_dict.state_dict() + state_dict = filter_state(state_dict, exclude_list) # + if state_dict is not None: + for k in state_dict: + if state_dict[k].dtype == torch.float16: + state_dict[k] = state_dict[k].float() + + + save_obj = { + 'model': state_dict, + 'optimizer': optimizer.state_dict(), + 'lr_scheduler': lr_scheduler.state_dict(), + 'config': config, + 'epoch': epoch, + 'best_valid': best_valid, + 'best_epoch': best_epoch, + } + + if args.save_best: + valid_score = valid_results['Valid/CIDEr'] + + if valid_score > best_valid or epoch == 0: + best_valid = valid_score + best_epoch = epoch + accelerator.print("Save best epoch:", best_epoch) + + save_obj['best_valid'] = best_valid + save_obj['best_epoch'] = best_epoch + + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_best.pth')) + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_last.pth')) + + + dist.barrier() + + + + + + ### test best model + if not args.evaluate: + checkpoint = torch.load(os.path.join(args.output_dir, 'checkpoint_best.pth'), map_location='cpu') + state_dict = checkpoint['model'] + msg = model.module.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + accelerator.print('load checkpoint for test from %s'%os.path.join(args.output_dir, 'checkpoint_best.pth')) + accelerator.print(msg) + print("best_epoch", checkpoint['best_epoch'], "best_valid", checkpoint['best_valid']) + print("best_epoch", best_epoch, "best_valid", best_valid) + + vqa_result = evaluation(model, test_loader, tokenizer, device, config, accelerator=accelerator, max_length=max_length) + dist.barrier() + + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + accelerator.print('Training time {}'.format(total_time_str)) + + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='./configs/VQA.yaml') + parser.add_argument('--checkpoint', default='') + parser.add_argument('--output_dir', default='output/vqa') + parser.add_argument('--evaluate', action='store_true') + parser.add_argument('--text_model', default='facebook/opt-350m') + parser.add_argument('--vision_model', default='vit_base_patch16_224') + parser.add_argument('--device', default='cuda') + parser.add_argument('--seed', default=42, type=int) + parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes') + parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') + parser.add_argument('--distributed', default=True, type=bool) + + parser.add_argument('--data_dir', default='/data/mshukor/data') + parser.add_argument('--resume', action='store_true') + + parser.add_argument('--save_best', action='store_true') + + parser.add_argument('--image_dir', default='/data/mshukor/data') + + parser.add_argument('--low_cpu', action='store_true') + + args = parser.parse_args() + + config = yaml.load(open(args.config, 'r'), Loader=yaml.Loader) + + args.result_dir = os.path.join(args.output_dir, 'result') + + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + Path(args.result_dir).mkdir(parents=True, exist_ok=True) + + yaml.dump(config, open(os.path.join(args.output_dir, 'config.yaml'), 'w')) + + main(args, config) \ No newline at end of file diff --git a/accelerate_training/gqa.py b/accelerate_training/gqa.py new file mode 100644 index 0000000000000000000000000000000000000000..842e1419b79ef388db5bcaba20ed271e9f232436 --- /dev/null +++ b/accelerate_training/gqa.py @@ -0,0 +1,552 @@ +import argparse +import os +import ruamel_yaml as yaml +import numpy as np +import random +import time +import datetime +import json +from pathlib import Path + +import torch +import torch.backends.cudnn as cudnn +import torch.distributed as dist + + +import os, sys +sys.path.append(os.path.abspath('.')) # ~/ep-alm + +from models.epalm import ePALM +from models.utils import freeze_whole_model, unfreeze_parameters, print_trainable_params_percentage +from models.utils import filter_state, filter_msg, exclude_list + +from transformers import AutoTokenizer + + +import utils +from tqdm import tqdm + + +from dataset.gqa import get_loader +from scheduler import create_scheduler +from optim import create_optimizer + +from accelerate import Accelerator + + +def train(model, data_loader, optimizer, tokenizer, epoch, warmup_steps, device, scheduler, config, accelerator=None): + # train + model.train() + + + metric_logger = utils.MetricLogger(delimiter=" ", accelerator=accelerator) + metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + metric_logger.add_meter('loss', utils.SmoothedValue(window_size=1, fmt='{value:.4f}')) + + config_optim = utils.AttrDict(config['optimizer']) + prompt_lr = config_optim.prompt_lr if hasattr(config_optim, 'prompt_lr') else None + connector_lr = config_optim.connector_lr if hasattr(config_optim, 'connector_lr') else None + vis_lr = config_optim.vis_lr if hasattr(config_optim, 'vis_lr') else None + text_lr = config_optim.text_lr if hasattr(config_optim, 'text_lr') else None + + accelerator.print(vis_lr, text_lr, connector_lr, len(optimizer.param_groups)) + if prompt_lr is not None: + metric_logger.add_meter('prompt_lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + + + + header = 'Train Epoch: [{}]'.format(epoch) + print_freq = 50 + step_size = 100 + warmup_iterations = warmup_steps*step_size + lm_loss_weight = config.get('lm_loss_weight', 1) + special_answer_token = config.get('special_answer_token', None) + + special_eo_answer_token = config.get('special_eo_answer_token', None) + + + eos_token = tokenizer.eos_token if special_eo_answer_token is None else special_eo_answer_token + + for i, batch in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + image = batch['images'].to(device,non_blocking=True) + + question = batch['sent'] + + answer = batch['answers'] + + + questions_answers = [] + + + if special_answer_token is not None: + questions_answers += [question[i] + "?" + special_answer_token + answer[i].replace('[SEP]','') + eos_token for i in range(len(question))] + else: + questions_answers += [question[i] + "" + answer[i].replace('[SEP]','') + eos_token for i in range(len(question))] + + questions_answers_input = tokenizer(questions_answers, padding='longest', return_tensors="pt").to(device) + answer_targets = questions_answers_input.input_ids.masked_fill(questions_answers_input.input_ids == tokenizer.pad_token_id, -100) + + images = image + + answer_output = model(image=images, + text=questions_answers_input, + labels = answer_targets, + return_dict = True, + mode='train', + reduction='none', + ) + + loss = answer_output.loss + loss = loss.sum()/image.size(0) + loss = loss*lm_loss_weight + + optimizer.zero_grad() + accelerator.backward(loss) + optimizer.step() + + metric_logger.update(loss=loss.item()) + metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + if prompt_lr is not None: + metric_logger.update(prompt_lr=optimizer.param_groups[1]["lr"]) + + if i % print_freq == 0: + lrs = [g["lr"] for g in optimizer.param_groups] + accelerator.print(lrs) + + if epoch==0 and i%step_size==0 and i<=warmup_iterations: + if scheduler is not None: + scheduler.step(i//step_size) + # gather the stats from all processes + metric_logger.synchronize_between_processes() + accelerator.print("Averaged stats:", metric_logger.global_avg()) + return {k: "{:.3f}".format(meter.global_avg) for k, meter in metric_logger.meters.items()} + + + +@torch.no_grad() +def predict(model, loader, tokenizer, device, dump_path=None, verbose=False, distributed=False, + special_answer_token=None, special_eo_answer_token=None, config=None, accelerator=None): + model.eval() + eos_token = tokenizer.eos_token if special_eo_answer_token is None else special_eo_answer_token + pad_token = tokenizer.pad_token + + num_beams = config.get('num_beams', 1) + do_sample = config.get('do_sample', True) + max_length = config.get('max_length', 30) + accelerator.print("num_beams", num_beams, "do_sample", do_sample, "max_length", max_length) + with torch.no_grad(): + quesid2ans = {} + if verbose: + pbar = tqdm(total=len(loader), ncols=120, desc="Prediction") + for i, batch in enumerate(loader): + + + image = batch['images'].to(device,non_blocking=True) + + question = batch['sent'] + + question_id = batch['question_ids'] + + if special_answer_token is not None: + question = [q+'?'+special_answer_token for q in question] + else: + question = [q+eos_token for q in question] + + question_input = tokenizer(question, padding='longest', return_tensors="pt").to(device) + + out = model(image=image, text=question_input, mode='generate', return_dict=True, max_length=max_length, + do_sample=do_sample, num_beams=num_beams) + + + + + for ques_id, o in zip(question_id, out): + o_list = o.tolist() + try: + if special_answer_token is not None: + response = tokenizer.decode(o_list).split(special_answer_token)[1].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True + else: + response = tokenizer.decode(o_list).split('')[2].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True + except TypeError: + accelerator.print(o_list) + response = ' ' + + ques_id = int(ques_id) + quesid2ans[ques_id] = response + + if verbose: + pbar.update(1) + if verbose: + pbar.close() + + if distributed: + dist.barrier() + + qid2ans_list = utils.all_gather(quesid2ans) + if verbose: + quesid2ans = {} + for qid2ans in qid2ans_list: + for k, v in qid2ans.items(): + quesid2ans[k] = v + + if dump_path is not None: + evaluator = loader.evaluator + evaluator.dump_result(quesid2ans, dump_path) + + return quesid2ans + + + + +def evaluate(model, data_loader, tokenizer, device, + distributed=False, special_answer_token=None, special_eo_answer_token=None, config=None, accelerator=None): + verbose = utils.is_main_process() + + quesid2ans = predict(model, data_loader, tokenizer, device, verbose=verbose, + distributed=distributed, special_answer_token=special_answer_token, + special_eo_answer_token=special_eo_answer_token, config=config, accelerator=accelerator) + + evaluator = data_loader.evaluator + + acc_dict = {} + topk_score = evaluator.evaluate(quesid2ans, normalize_answer=True) + acc_dict['topk_score'] = topk_score + return acc_dict + + + + + + + + +def main(args, config): + if 'XDG_CACHE_HOME' in os.environ: + os.environ['TORCH_HOME'] = os.environ['XDG_CACHE_HOME']+'/torch' + else: + os.environ['TORCH_HOME'] = '~/.cache/torch' + + args.distributed = False + + + accelerator = Accelerator() + + device = torch.device(args.device) + + # fix the seed for reproducibility + seed = args.seed + utils.get_rank() + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + cudnn.benchmark = True + + start_epoch = 0 + max_epoch = config['schedular']['epochs'] + warmup_steps = config['schedular']['warmup_epochs'] + + + #### Dataset #### + + accelerator.print("Creating dataset") + + + if args.distributed: + num_tasks = utils.get_world_size() + global_rank = utils.get_rank() + else: + num_tasks = None + global_rank = None + + num_workers = config.get('num_workers', 4) + train_topk = config.get('train_topk', -1) + valid_topk = config.get('valid_topk', -1) + data_dir = args.data_dir + + args.image_size = config.get('image_res', 224) + args.use_data_augmentation = True + + train_split = config.get('train_split', 'train') + val_split = config.get('val_split', 'valid') + test_split = config.get('test_split', 'testdev') + + train_loader = get_loader( + args, + split=train_split, mode='train', batch_size=config['batch_size_train'], + distributed=args.distributed, + workers=num_workers, + topk=train_topk, + data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + + args.raw_label = False + accelerator.print('# len train loader:', len(train_loader)) + accelerator.print(f'Building val loader') + val_loader = get_loader( + args, + split=val_split, mode='val', batch_size=config['batch_size_test'], + distributed=args.distributed, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + accelerator.print('# len val loader:', len(val_loader)) + + accelerator.print(f'Building test loader') + test_loader = get_loader( + args, + split=test_split, mode='val', batch_size=config['batch_size_test'], + distributed=args.distributed, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + + + accelerator.print('# len test loader:', len(test_loader)) + + + + #### Model #### + accelerator.print("Creating model") + + start_layer_idx = config.get('start_layer_idx', 0) + end_layer_idx = config.get('end_layer_idx', 0) + + vision_model_name = config.get('vision_model_name', args.vision_model) + tokenizer_name = config.get('tokenizer_name', args.text_model) + + model = ePALM(opt_model_name = args.text_model, + vision_model_name = vision_model_name, + use_vis_prefix = True, + start_layer_idx = start_layer_idx, + end_layer_idx = end_layer_idx, + return_hidden_state_vision = True, + config=config, + low_cpu=args.low_cpu + ) + + + model = model.to(device) + + + if 'opt' in tokenizer_name: + tokenizer = AutoTokenizer.from_pretrained(tokenizer_name, use_fast=False, local_files_only=True) + else: + raise NotImplemented + + + special_answer_token = config.get('special_answer_token', None) + special_eo_answer_token = config.get('special_eo_answer_token', None) + + + if special_answer_token is not None: + special_tokens_dict = {'additional_special_tokens': [special_answer_token]} + if special_eo_answer_token is not None: + special_tokens_dict['additional_special_tokens'] += [special_eo_answer_token] + + tokenizer.add_special_tokens(special_tokens_dict) + accelerator.print("Adding special token:", special_tokens_dict) + accelerator.print(tokenizer) + + arg_opt = utils.AttrDict(config['optimizer']) + optimizer = create_optimizer(arg_opt, model, config=config['optimizer']) + + if hasattr(arg_opt, 'prompt_lr') and arg_opt.prompt_lr is not None: + accelerator.print('\tInitial other params params lr: %f' % optimizer.param_groups[0]['lr']) + accelerator.print('\tInitial prompt params lr: %f' % optimizer.param_groups[1]['lr']) + + arg_sche = utils.AttrDict(config['schedular']) + lr_scheduler, _ = create_scheduler(arg_sche, optimizer) + + + if args.checkpoint: + + checkpoint = torch.load(args.checkpoint, map_location='cpu') + state_dict = checkpoint['model'] + msg = model.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + accelerator.print('load checkpoint from %s'%args.checkpoint) + accelerator.print(msg) + + if args.resume: + model = model.to(device) + optimizer.load_state_dict(checkpoint['optimizer']) + lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) + start_epoch = checkpoint['epoch']+1 + accelerator.print(checkpoint.keys()) + for p in optimizer.param_groups: # not necessay after torch 1.12.1 + p['capturable'] = True + if 'best_valid' in checkpoint: + best_valid = checkpoint['best_valid'] + best_epoch = checkpoint['best_epoch'] + accelerator.print("load best valid {} at epoch {}".format(best_valid, best_epoch)) + + freeze_whole_model(model) + unfreeze_parameters(model, config) + print_trainable_params_percentage(model) + + device = accelerator.device + + val_evaluator = val_loader.evaluator + test_evaluator = test_loader.evaluator + task = val_loader.task + + model, optimizer, train_loader, val_loader, test_loader, lr_scheduler = accelerator.prepare( + model, optimizer, train_loader, val_loader, test_loader, lr_scheduler + ) + model = model.to(device) + + test_loader.evaluator = test_evaluator + val_loader.evaluator = val_evaluator + + test_loader.task = task + val_loader.task = task + + + accelerator.print("Start training") + start_time = time.time() + + best_valid = 0. + best_epoch = 0 + + for epoch in range(start_epoch, max_epoch): + if epoch>0: + if lr_scheduler is not None: + lr_scheduler.step(epoch+warmup_steps) + + if not args.evaluate: + if args.distributed: + train_loader.sampler.set_epoch(epoch) + + train_stats = train(model, train_loader, optimizer, tokenizer, epoch, warmup_steps, device, + lr_scheduler, config, accelerator=accelerator) + + if args.evaluate: + break + + score_dict = evaluate(model, val_loader, tokenizer, device, distributed=args.distributed, + special_answer_token=special_answer_token, special_eo_answer_token=special_eo_answer_token, + config=config, accelerator=accelerator) + + accelerator.print(score_dict) + if utils.is_main_process(): + log_stats = {**{f'train_{k}': v for k, v in train_stats.items()}, + 'epoch': epoch, + } + with open(os.path.join(args.output_dir, "log.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + + if lr_scheduler is None: + lr_scheduler_state_dict = {} + else: + lr_scheduler_state_dict = lr_scheduler.state_dict() + + ## avoid memory issue with accelerator.get_state_dict + state_dict = accelerator.unwrap_model(model) + state_dict = state_dict.state_dict() + state_dict = filter_state(state_dict, exclude_list) # filter_state(model_without_ddp.state_dict(), exclude_list) + if state_dict is not None: + for k in state_dict: + if state_dict[k].dtype == torch.float16: + state_dict[k] = state_dict[k].float() + + + save_obj = { + 'model': state_dict, + 'optimizer': optimizer.state_dict(), + 'lr_scheduler': lr_scheduler_state_dict, + 'config': config, + 'epoch': epoch, + 'best_valid': best_valid, + 'best_epoch': best_epoch, + } + + + + if args.save_best: + valid_score = score_dict['topk_score'] * 100. + if valid_score > best_valid or epoch == 0: + best_valid = valid_score + best_epoch = epoch + + save_obj['best_valid'] = best_valid + save_obj['best_epoch'] = best_epoch + + accelerator.print("save best epoch:", best_epoch) + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_best.pth')) + + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_last.pth')) + + dist.barrier() + + verbose = utils.is_main_process() + + + ### test best model + if not args.evaluate: + checkpoint = torch.load(os.path.join(args.output_dir, 'checkpoint_best.pth'), map_location='cpu') + state_dict = checkpoint['model'] + msg = model.module.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + accelerator.print('load checkpoint for test from %s'%os.path.join(args.output_dir, 'checkpoint_best.pth')) + accelerator.print(msg) + + quesid2ans = predict(model, test_loader, tokenizer, device, verbose=verbose, + distributed=args.distributed, special_answer_token=special_answer_token, + special_eo_answer_token=special_eo_answer_token, config=config, accelerator=accelerator) + + evaluator = test_loader.evaluator + score_dict = evaluator.evaluate(quesid2ans, normalize_answer=True) + + accelerator.print("Test accuracy:", score_dict) + + + + if args.distributed: + dist.barrier() + exit() + + + + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + accelerator.print('Training time {}'.format(total_time_str)) + + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='./configs/VQA.yaml') + parser.add_argument('--checkpoint', default='') + parser.add_argument('--output_dir', default='output/vqa') + parser.add_argument('--evaluate', action='store_true') + parser.add_argument('--text_model', default='facebook/opt-350m') + parser.add_argument('--vision_model', default='vit_base_patch16_224') + parser.add_argument('--device', default='cuda') + parser.add_argument('--seed', default=42, type=int) + parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes') + parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') + parser.add_argument('--distributed', default=True, type=bool) + + parser.add_argument('--data_dir', default='/data/mshukor/data') + parser.add_argument('--resume', action='store_true') + + parser.add_argument('--save_best', action='store_true') + + parser.add_argument('--low_cpu', action='store_true') + + + args = parser.parse_args() + + config = yaml.load(open(args.config, 'r'), Loader=yaml.Loader) + + args.result_dir = os.path.join(args.output_dir, 'result') + + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + Path(args.result_dir).mkdir(parents=True, exist_ok=True) + + yaml.dump(config, open(os.path.join(args.output_dir, 'config.yaml'), 'w')) + + main(args, config) \ No newline at end of file diff --git a/accelerate_training/video_caption.py b/accelerate_training/video_caption.py new file mode 100644 index 0000000000000000000000000000000000000000..dd6d5ea74626561c5ae3d1ba0dbed81380bc10a7 --- /dev/null +++ b/accelerate_training/video_caption.py @@ -0,0 +1,509 @@ +import argparse +import os +import ruamel_yaml as yaml +import numpy as np +import random +import time +import datetime +import json +from pathlib import Path + +import torch +import torch.backends.cudnn as cudnn +import torch.distributed as dist + + +import os, sys +sys.path.append(os.path.abspath('.')) # ~/ep-alm + +from models.epalm import ePALM +from models.utils import freeze_whole_model, unfreeze_parameters, print_trainable_params_percentage +from models.utils import filter_state, filter_msg, exclude_list + +from transformers import AutoTokenizer + + +import utils + + + +from dataset.video_caption import get_loader +from scheduler import create_scheduler +from optim import create_optimizer +from models.utils import filter_state, filter_msg, exclude_list + +from accelerate import Accelerator + + +def train(model, data_loader, optimizer, tokenizer, epoch, warmup_steps, device, scheduler, config, accelerator=None): + # train + model.train() + + metric_logger = utils.MetricLogger(delimiter=" ", accelerator=accelerator) + metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + metric_logger.add_meter('loss', utils.SmoothedValue(window_size=1, fmt='{value:.4f}')) + + header = 'Train Epoch: [{}]'.format(epoch) + print_freq = 50 + step_size = 100 + warmup_iterations = warmup_steps*step_size + lm_loss_weight = config.get('lm_loss_weight', 1) + append_eos_token = config.get('append_eos_token', False) + eos_token = tokenizer.eos_token + + config_optim = utils.AttrDict(config['optimizer']) + prompt_lr = config_optim.prompt_lr if hasattr(config_optim, 'prompt_lr') else None + + + if prompt_lr is not None: + metric_logger.add_meter('prompt_lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + + + for i, batch in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + image = batch["images"].to(device,non_blocking=True) + + text = batch["sent"] + + if append_eos_token: + text = [t.replace(eos_token, '') + eos_token for t in text] + + + + text_input = tokenizer(text, padding='longest', return_tensors="pt").to(device) + + + targets = text_input.input_ids.masked_fill(text_input.input_ids == tokenizer.pad_token_id, -100) + + + answer_output = model(image=image, + text=text_input, + labels = targets, + return_dict = True, + mode='train', + reduction='none', + ) + + loss = answer_output.loss + loss = loss.sum()/image.size(0) + loss = loss*lm_loss_weight + + optimizer.zero_grad() + accelerator.backward(loss) + optimizer.step() + + metric_logger.update(loss=loss.item()) + metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + if prompt_lr is not None: + metric_logger.update(prompt_lr=optimizer.param_groups[1]["lr"]) + if epoch==0 and i%step_size==0 and i<=warmup_iterations: + scheduler.step(i//step_size) + + + # gather the stats from all processes + metric_logger.synchronize_between_processes() + accelerator.print("Averaged stats:", metric_logger.global_avg()) + return {k: "{:.3f}".format(meter.global_avg) for k, meter in metric_logger.meters.items()} + + + + +@torch.no_grad() +def evaluation(model, data_loader, tokenizer, device, config, max_length=30, accelerator=None): + # test + model.eval() + + metric_logger = utils.MetricLogger(delimiter=" ") + header = 'Generate Caption test result:' + print_freq = 50 + + + + predictions = [] + targets = [] + + + + pad_token = tokenizer.pad_token + eos_token = tokenizer.eos_token + + num_beams = config.get('num_beams', 1) + do_sample = config.get('do_sample', True) + accelerator.print("num_beams", num_beams, "do_sample", do_sample) + + for n, batch in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + image = batch["images"].to(device,non_blocking=True) + text = ['' for q in image] + + text_input = tokenizer(text, padding='longest', return_tensors="pt").to(device) + + out = model(image=image, text=text_input, mode='generate', return_dict=True, + max_length=max_length, do_sample=do_sample, num_beams=num_beams) + out_decode = [] + for i, o in enumerate(out): + try: + + res = tokenizer.decode(o) + response = res.split('')[1].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True + except TypeError: + accelerator.print(o) + response = ' ' + + + out_decode.append(response) + + + predictions.extend(out_decode) + + if 'targets' in batch: + targets.extend(batch['targets']) + + + + + evaluator = data_loader.evaluator + eval_results = evaluator.evaluate(predictions, targets) + + + wandb_log_dict = {} + + for score_name, score in eval_results.items(): + wandb_log_dict[f'Valid/{score_name}'] = score + + + accelerator.print(wandb_log_dict) + + + return wandb_log_dict + + + +def main(args, config): + if 'XDG_CACHE_HOME' in os.environ: + os.environ['TORCH_HOME'] = os.environ['XDG_CACHE_HOME']+'/torch' + else: + os.environ['TORCH_HOME'] = '~/.cache/torch' + args.distributed = False + accelerator = Accelerator() + device = torch.device(args.device) + + # fix the seed for reproducibility + seed = args.seed + utils.get_rank() + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + cudnn.benchmark = True + + start_epoch = 0 + max_epoch = config['schedular']['epochs'] + warmup_steps = config['schedular']['warmup_epochs'] + + accelerator.print(args, config) + + + tokenizer = AutoTokenizer.from_pretrained(args.text_model, use_fast=False, local_files_only=True) + + special_answer_token = config.get('special_answer_token', None) + special_eo_answer_token = config.get('special_eo_answer_token', None) + + + if special_answer_token is not None: + special_tokens_dict = {'additional_special_tokens': [special_answer_token]} + if special_eo_answer_token is not None: + special_tokens_dict['additional_special_tokens'] += [special_eo_answer_token] + + tokenizer.add_special_tokens(special_tokens_dict) + accelerator.print("Adding special token:", special_tokens_dict) + accelerator.print(tokenizer) + + + + if args.distributed: + num_tasks = utils.get_world_size() + global_rank = utils.get_rank() + else: + num_tasks = None + global_rank = None + + + ######### + max_length = args.max_gen_length + + num_workers = config.get('num_workers', 4) + train_topk = config.get('train_topk', -1) + valid_topk = config.get('valid_topk', -1) + data_dir = args.data_dir + + args.image_size = config.get('image_res', 224) + args.use_data_augmentation = True + + black_image = config.get('black_image', False) + + accelerator.print("black image:", black_image) + + + + # video + args.num_frames = config.get('num_frames', 4) + args.as_images = config.get('as_images', True) + args.num_tries = config.get('num_tries', 1) + args.sample_type = config.get('sample_type', 'rand') + + + train_split = config.get('train_split', 'train') + val_split = config.get('val_split', 'val') + test_split = config.get('test_split', 'test') + + + + train_loader = get_loader( + args, + split=train_split, mode='train', batch_size=config['batch_size_train'], + distributed=args.distributed, + workers=num_workers, + topk=train_topk, + data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True, black_image=black_image + ) + + accelerator.print('# len train loader:', len(train_loader)) + accelerator.print(f'Building val loader') + val_loader = get_loader( + args, + split=val_split, mode='val', batch_size=config['batch_size_test'], + distributed=False, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True, black_image=black_image + ) + accelerator.print('# len val loader:', len(val_loader)) + + accelerator.print(f'Building test loader') + test_loader = get_loader( + args, + split=test_split, mode='val', batch_size=config['batch_size_test'], + distributed=False, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + + + accelerator.print('# len test loader:', len(test_loader)) + + #### Model #### + accelerator.print("Creating model") + + start_layer_idx = config.get('start_layer_idx', 0) + end_layer_idx = config.get('end_layer_idx', 0) + + vision_model_name = config.get('vision_model_name', args.vision_model) + + vision_model_name = config.get('vision_model_name', args.vision_model) + + model = ePALM(opt_model_name = args.text_model, + vision_model_name = vision_model_name, + use_vis_prefix = True, + start_layer_idx = start_layer_idx, + end_layer_idx = end_layer_idx, + return_hidden_state_vision = True, + config=config, + low_cpu=args.low_cpu + ) + + + model = model.to(device) + + arg_opt = utils.AttrDict(config['optimizer']) + optimizer = create_optimizer(arg_opt, model, config=config) + + if hasattr(arg_opt, 'prompt_lr') and arg_opt.prompt_lr is not None: + accelerator.print('\tInitial other params params lr: %f' % optimizer.param_groups[0]['lr']) + accelerator.print('\tInitial prompt params lr: %f' % optimizer.param_groups[1]['lr']) + + arg_sche = utils.AttrDict(config['schedular']) + lr_scheduler, _ = create_scheduler(arg_sche, optimizer) + + best_epoch = 0 + best_valid = 0 + + + + if args.checkpoint: + + checkpoint = torch.load(args.checkpoint, map_location='cpu') + state_dict = checkpoint['model'] + msg = model.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + accelerator.print('load checkpoint from %s'%args.checkpoint) + accelerator.print(msg) + if 'best_valid' in checkpoint: + accelerator.print("load best valid {} at epoch {}".format(checkpoint['best_valid'] , checkpoint['best_epoch'] )) + + if args.resume: + model = model.to(device) + optimizer.load_state_dict(checkpoint['optimizer']) + lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) + start_epoch = checkpoint['epoch']+1 + accelerator.print(checkpoint.keys()) + for p in optimizer.param_groups: # not necessay after torch 1.12.1 + p['capturable'] = True + if 'best_valid' in checkpoint: + best_valid = checkpoint['best_valid'] + best_epoch = checkpoint['best_epoch'] + accelerator.print("load best valid {} at epoch {}".format(best_valid, best_epoch)) + + + freeze_whole_model(model) + unfreeze_parameters(model, config) + print_trainable_params_percentage(model) + + val_evaluator = val_loader.evaluator + test_evaluator = test_loader.evaluator + task = val_loader.task + # if args.use_accelerate: + device = accelerator.device + + model, optimizer, train_loader, val_loader, test_loader, lr_scheduler = accelerator.prepare( + model, optimizer, train_loader, val_loader, test_loader, lr_scheduler + ) + model_without_ddp = model.module + model = model.to(device) + + test_loader.evaluator = test_evaluator + val_loader.evaluator = val_evaluator + + test_loader.task = task + val_loader.task = task + + + accelerator.print("Start training") + start_time = time.time() + + + for epoch in range(start_epoch, max_epoch): + if epoch>0: + lr_scheduler.step(epoch+warmup_steps) + + if not args.evaluate: + if args.distributed: + train_loader.sampler.set_epoch(epoch) + + train_stats = train(model, train_loader, optimizer, tokenizer, epoch, warmup_steps, device, lr_scheduler, + config, accelerator=accelerator) + + if args.evaluate: + break + + + valid_results = evaluation(model, val_loader, tokenizer, device, config, max_length=max_length, accelerator=accelerator) + + if utils.is_main_process(): + log_stats = {**{f'train_{k}': v for k, v in train_stats.items()}, + 'epoch': epoch, + } + with open(os.path.join(args.output_dir, "log.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + + ## avoid memory issue with accelerator.get_state_dict + state_dict = accelerator.unwrap_model(model) + state_dict = state_dict.state_dict() + state_dict = filter_state(state_dict, exclude_list) # filter_state(model_without_ddp.state_dict(), exclude_list) + if state_dict is not None: + for k in state_dict: + if state_dict[k].dtype == torch.float16: + state_dict[k] = state_dict[k].float() + + # state_dict = accelerator.get_state_dict(model) + + save_obj = { + 'model': state_dict, + 'optimizer': optimizer.state_dict(), + 'lr_scheduler': lr_scheduler.state_dict(), + 'config': config, + 'epoch': epoch, + 'best_valid': best_valid, + 'best_epoch': best_epoch, + } + + if args.save_best: + valid_score = valid_results['Valid/CIDEr'] + + if valid_score > best_valid or epoch == 0: + best_valid = valid_score + best_epoch = epoch + accelerator.print("Save best epoch:", best_epoch) + + save_obj['best_valid'] = best_valid + save_obj['best_epoch'] = best_epoch + + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_best.pth')) + # else: + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_last.pth')) + + + dist.barrier() + + + + + + # else: + ### test best model + if not args.evaluate: + checkpoint = torch.load(os.path.join(args.output_dir, 'checkpoint_best.pth'), map_location='cpu') + state_dict = checkpoint['model'] + msg = model.module.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + accelerator.print('load checkpoint for test from %s'%args.checkpoint) + accelerator.print(msg) + print("best_epoch", checkpoint['best_epoch'], "best_valid", checkpoint['best_valid']) + print("best_epoch", best_epoch, "best_valid", best_valid) + vqa_result = evaluation(model, test_loader, tokenizer, device, config, max_length=max_length, accelerator=accelerator) + + # result_file = save_result(vqa_result, args.result_dir, 'vqa_result_epoch%d'%epoch) + + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + accelerator.print('Training time {}'.format(total_time_str)) + + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='./configs/VQA.yaml') + parser.add_argument('--checkpoint', default='') + parser.add_argument('--output_dir', default='output/vqa') + parser.add_argument('--evaluate', action='store_true') + parser.add_argument('--text_model', default='facebook/opt-350m') + parser.add_argument('--vision_model', default='vit_base_patch16_224') + parser.add_argument('--device', default='cuda') + parser.add_argument('--seed', default=42, type=int) + parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes') + parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') + parser.add_argument('--distributed', default=True, type=bool) + + parser.add_argument('--data_dir', default='/data/mshukor/data') + parser.add_argument('--resume', action='store_true') + + parser.add_argument('--save_best', action='store_true') + + parser.add_argument('--image_dir', default='/data/mshukor/data') + parser.add_argument('--max_gen_length', default=30, type=int, help='max_gen_length') + + + parser.add_argument('--low_cpu', action='store_true') + + args = parser.parse_args() + + config = yaml.load(open(args.config, 'r'), Loader=yaml.Loader) + + args.result_dir = os.path.join(args.output_dir, 'result') + + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + Path(args.result_dir).mkdir(parents=True, exist_ok=True) + + yaml.dump(config, open(os.path.join(args.output_dir, 'config.yaml'), 'w')) + + main(args, config) \ No newline at end of file diff --git a/accelerate_training/video_qa.py b/accelerate_training/video_qa.py new file mode 100644 index 0000000000000000000000000000000000000000..f42f1b700695b58e38ff93f6321fa15bc4d2081e --- /dev/null +++ b/accelerate_training/video_qa.py @@ -0,0 +1,571 @@ +import argparse +import os +import ruamel_yaml as yaml +import numpy as np +import random +import time +import datetime +import json +from pathlib import Path + +import torch +import torch.backends.cudnn as cudnn +import torch.distributed as dist + +import os, sys +sys.path.append(os.path.abspath('.')) # ~/ep-alm + +from models.epalm import ePALM +from models.utils import freeze_whole_model, unfreeze_parameters, print_trainable_params_percentage +from models.utils import filter_state, filter_msg, exclude_list + +from transformers import AutoTokenizer + + +import utils +import re +from tqdm import tqdm + +from dataset.video_vqa import get_loader +from scheduler import create_scheduler +from optim import create_optimizer + + +from models.utils import filter_state, filter_msg, exclude_list + +from accelerate import Accelerator + + +def train(model, data_loader, optimizer, tokenizer, epoch, warmup_steps, device, scheduler, config, accelerator=None): + # train + model.train() + + + metric_logger = utils.MetricLogger(delimiter=" ", accelerator=accelerator) + metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + metric_logger.add_meter('loss', utils.SmoothedValue(window_size=1, fmt='{value:.4f}')) + + config_optim = utils.AttrDict(config['optimizer']) + prompt_lr = config_optim.prompt_lr if hasattr(config_optim, 'prompt_lr') else None + connector_lr = config_optim.connector_lr if hasattr(config_optim, 'connector_lr') else None + vis_lr = config_optim.vis_lr if hasattr(config_optim, 'vis_lr') else None + text_lr = config_optim.text_lr if hasattr(config_optim, 'text_lr') else None + + accelerator.print(vis_lr, text_lr, connector_lr, len(optimizer.param_groups)) + if prompt_lr is not None: + metric_logger.add_meter('prompt_lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + + + + header = 'Train Epoch: [{}]'.format(epoch) + print_freq = 50 + step_size = 100 + warmup_iterations = warmup_steps*step_size + lm_loss_weight = config.get('lm_loss_weight', 1) + special_answer_token = config.get('special_answer_token', None) + + special_eo_answer_token = config.get('special_eo_answer_token', None) + + + eos_token = tokenizer.eos_token if special_eo_answer_token is None else special_eo_answer_token + + for i, batch in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + image = batch['images'].to(device,non_blocking=True) + + question = batch['sent'] + + answer = batch['answers'] + + + questions_answers = [] + + + if special_answer_token is not None: + questions_answers += [question[i] + "?" + special_answer_token + answer[i].replace('[SEP]','') + eos_token for i in range(len(question))] + else: + questions_answers += [question[i] + "" + answer[i].replace('[SEP]','') + eos_token for i in range(len(question))] + + questions_answers_input = tokenizer(questions_answers, padding='longest', return_tensors="pt").to(device) + answer_targets = questions_answers_input.input_ids.masked_fill(questions_answers_input.input_ids == tokenizer.pad_token_id, -100) + + images = image + + + + answer_output = model(image=images, + text=questions_answers_input, + labels = answer_targets, + return_dict = True, + mode='train', + reduction='none', + ) + + loss = answer_output.loss + loss = loss.sum()/image.size(0) + loss = loss*lm_loss_weight + + optimizer.zero_grad() + accelerator.backward(loss) + optimizer.step() + + metric_logger.update(loss=loss.item()) + metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + if prompt_lr is not None: + metric_logger.update(prompt_lr=optimizer.param_groups[1]["lr"]) + + if i % print_freq == 0: + lrs = [g["lr"] for g in optimizer.param_groups] + accelerator.print(lrs) + + + if epoch==0 and i%step_size==0 and i<=warmup_iterations: + if scheduler is not None: + scheduler.step(i//step_size) + # gather the stats from all processes + metric_logger.synchronize_between_processes() + accelerator.print("Averaged stats:", metric_logger.global_avg()) + return {k: "{:.3f}".format(meter.global_avg) for k, meter in metric_logger.meters.items()} + + + +@torch.no_grad() +def predict(model, loader, tokenizer, device, dump_path=None, verbose=False, distributed=False, + special_answer_token=None, special_eo_answer_token=None, config=None, accelerator=None): + model.eval() + eos_token = tokenizer.eos_token if special_eo_answer_token is None else special_eo_answer_token + pad_token = tokenizer.pad_token + + num_beams = config.get('num_beams', 1) + do_sample = config.get('do_sample', True) + accelerator.print("num_beams", num_beams, "do_sample", do_sample) + + with torch.no_grad(): + quesid2ans = {} + if verbose: + pbar = tqdm(total=len(loader), ncols=120, desc="Prediction") + for i, batch in enumerate(loader): + + + image = batch['images'].to(device,non_blocking=True) + + question = batch['sent'] + + question_id = batch['question_ids'] + + if special_answer_token is not None: + question = [q+'?'+special_answer_token for q in question] + else: + question = [q+eos_token for q in question] + + question_input = tokenizer(question, padding='longest', return_tensors="pt").to(device) + + out = model(image=image, text=question_input, mode='generate', return_dict=True, max_length=30, + do_sample=do_sample, num_beams=num_beams) + + + + + for ques_id, o in zip(question_id, out): + o_list = o.tolist() + try: + if special_answer_token is not None: + response = tokenizer.decode(o_list).split(special_answer_token)[1].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True + else: + response = tokenizer.decode(o_list).split('')[2].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True + except TypeError: + accelerator.print(o_list) + response = ' ' + + ques_id = ques_id + quesid2ans[ques_id] = response + + if verbose: + pbar.update(1) + if verbose: + pbar.close() + + if distributed: + dist.barrier() + + qid2ans_list = utils.all_gather(quesid2ans) + if verbose: + quesid2ans = {} + for qid2ans in qid2ans_list: + for k, v in qid2ans.items(): + quesid2ans[k] = v + + if dump_path is not None: + evaluator = loader.evaluator + evaluator.dump_result(quesid2ans, dump_path) + + return quesid2ans + + + + +def evaluate(model, data_loader, tokenizer, device, + distributed=False, special_answer_token=None, special_eo_answer_token=None, config=None, accelerator=None): + verbose = utils.is_main_process() + + + quesid2ans = predict(model, data_loader, tokenizer, device, verbose=verbose, + distributed=distributed, special_answer_token=special_answer_token, + special_eo_answer_token=special_eo_answer_token, config=config, accelerator=accelerator) + + evaluator = data_loader.evaluator + + acc_dict = {} + topk_score = evaluator.evaluate(quesid2ans, normalize_answer=True) + acc_dict['topk_score'] = topk_score + return acc_dict + + + + + + +def main(args, config): + + if 'XDG_CACHE_HOME' in os.environ: + os.environ['TORCH_HOME'] = os.environ['XDG_CACHE_HOME']+'/torch' + else: + os.environ['TORCH_HOME'] = '~/.cache/torch' + args.distributed = False + + + accelerator = Accelerator() + + device = torch.device(args.device) + + # fix the seed for reproducibility + seed = args.seed + utils.get_rank() + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + cudnn.benchmark = True + + start_epoch = 0 + max_epoch = config['schedular']['epochs'] + warmup_steps = config['schedular']['warmup_epochs'] + + + #### Dataset #### + + accelerator.print("Creating dataset") + + + if args.distributed: + num_tasks = utils.get_world_size() + global_rank = utils.get_rank() + else: + num_tasks = None + global_rank = None + + num_workers = config.get('num_workers', 4) + train_topk = config.get('train_topk', -1) + valid_topk = config.get('valid_topk', -1) + data_dir = args.data_dir + + args.image_size = config.get('image_res', 224) + args.use_data_augmentation = True + + + # video + args.num_frames = config.get('num_frames', 4) + args.as_images = config.get('as_images', True) + args.num_tries = config.get('num_tries', 1) + args.sample_type = config.get('sample_type', 'rand') + + train_split = config.get('train_split', 'train') + val_split = config.get('val_split', 'val') + test_split = config.get('test_split', 'test') + + + train_loader = get_loader( + args, + split=train_split, mode='train', batch_size=config['batch_size_train'], + distributed=args.distributed, + workers=num_workers, + topk=train_topk, + data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + + args.raw_label = False + accelerator.print('# len train loader:', len(train_loader)) + accelerator.print(f'Building val loader') + val_loader = get_loader( + args, + split=val_split, mode='val', batch_size=config['batch_size_test'], + distributed=args.distributed, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + accelerator.print('# len val loader:', len(val_loader)) + + accelerator.print(f'Building test loader') + test_loader = get_loader( + args, + split=test_split, mode='val', batch_size=config['batch_size_test'], + distributed=args.distributed, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + + + accelerator.print('# len test loader:', len(test_loader)) + + + #### Model #### + accelerator.print("Creating model") + + start_layer_idx = config.get('start_layer_idx', 0) + end_layer_idx = config.get('end_layer_idx', 0) + + + vision_model_name = config.get('vision_model_name', args.vision_model) + tokenizer_name = config.get('tokenizer_name', args.text_model) + + model = ePALM(opt_model_name = args.text_model, + vision_model_name = vision_model_name, + use_vis_prefix = True, + start_layer_idx = start_layer_idx, + end_layer_idx = end_layer_idx, + return_hidden_state_vision = True, + config=config, + low_cpu=args.low_cpu + ) + + + model = model.to(device) + + # tokenizer + if 'opt' in tokenizer_name: + tokenizer = AutoTokenizer.from_pretrained(tokenizer_name, use_fast=False, local_files_only=True) + else: + raise NotImplemented + + + special_answer_token = config.get('special_answer_token', None) + special_eo_answer_token = config.get('special_eo_answer_token', None) + + + if special_answer_token is not None: + special_tokens_dict = {'additional_special_tokens': [special_answer_token]} + if special_eo_answer_token is not None: + special_tokens_dict['additional_special_tokens'] += [special_eo_answer_token] + + tokenizer.add_special_tokens(special_tokens_dict) + accelerator.print("Adding special token:", special_tokens_dict) + accelerator.print(tokenizer) + + arg_opt = utils.AttrDict(config['optimizer']) + optimizer = create_optimizer(arg_opt, model, config=config['optimizer']) + + if hasattr(arg_opt, 'prompt_lr') and arg_opt.prompt_lr is not None: + accelerator.print('\tInitial other params params lr: %f' % optimizer.param_groups[0]['lr']) + accelerator.print('\tInitial prompt params lr: %f' % optimizer.param_groups[1]['lr']) + + arg_sche = utils.AttrDict(config['schedular']) + lr_scheduler, _ = create_scheduler(arg_sche, optimizer) + + + if args.checkpoint: + + checkpoint = torch.load(args.checkpoint, map_location='cpu') + state_dict = checkpoint['model'] + + msg = model.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + accelerator.print('load checkpoint from %s'%args.checkpoint) + accelerator.print(msg) + + if 'best_valid' in checkpoint: + accelerator.print("load best valid {} at epoch {}".format(checkpoint['best_valid'] , checkpoint['best_epoch'] )) + + if args.resume: + model = model.to(device) + optimizer.load_state_dict(checkpoint['optimizer']) + lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) + start_epoch = checkpoint['epoch']+1 + accelerator.print(checkpoint.keys()) + + for p in optimizer.param_groups: # not necessay after torch 1.12.1 + p['capturable'] = True + + + if 'best_valid' in checkpoint: + best_valid = checkpoint['best_valid'] + best_epoch = checkpoint['best_epoch'] + accelerator.print("load best valid {} at epoch {}".format(best_valid, best_epoch)) + + freeze_whole_model(model) + unfreeze_parameters(model, config) + print_trainable_params_percentage(model) + + val_evaluator = val_loader.evaluator + test_evaluator = test_loader.evaluator + task = val_loader.task + device = accelerator.device + + model, optimizer, train_loader, val_loader, test_loader, lr_scheduler = accelerator.prepare( + model, optimizer, train_loader, val_loader, test_loader, lr_scheduler + ) + model = model.to(device) + + test_loader.evaluator = test_evaluator + val_loader.evaluator = val_evaluator + + test_loader.task = task + val_loader.task = task + + + + accelerator.print("Start training") + start_time = time.time() + + best_valid = 0. + best_epoch = 0 + + for epoch in range(start_epoch, max_epoch): + if epoch>0: + if lr_scheduler is not None: + lr_scheduler.step(epoch+warmup_steps) + + if not args.evaluate: + if args.distributed: + train_loader.sampler.set_epoch(epoch) + + train_stats = train(model, train_loader, optimizer, tokenizer, epoch, warmup_steps, device, lr_scheduler, config, accelerator=accelerator) + + if args.evaluate: + break + + score_dict = evaluate(model, val_loader, tokenizer, device, distributed=args.distributed, + special_answer_token=special_answer_token, special_eo_answer_token=special_eo_answer_token, + config=config, accelerator=accelerator) + accelerator.print(score_dict) + if utils.is_main_process(): + log_stats = {**{f'train_{k}': v for k, v in train_stats.items()}, + 'epoch': epoch, + } + with open(os.path.join(args.output_dir, "log.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + + if lr_scheduler is None: + lr_scheduler_state_dict = {} + else: + lr_scheduler_state_dict = lr_scheduler.state_dict() + + + ## avoid memory issue with accelerator.get_state_dict + state_dict = accelerator.unwrap_model(model) + state_dict = state_dict.state_dict() + state_dict = filter_state(state_dict, exclude_list) # filter_state(model_without_ddp.state_dict(), exclude_list) + if state_dict is not None: + for k in state_dict: + if state_dict[k].dtype == torch.float16: + state_dict[k] = state_dict[k].float() + + + save_obj = { + 'model': state_dict, + 'optimizer': optimizer.state_dict(), + 'lr_scheduler': lr_scheduler_state_dict, + 'config': config, + 'epoch': epoch, + 'best_valid': best_valid, + 'best_epoch': best_epoch, + } + + + + if args.save_best: + valid_score = score_dict['topk_score'] * 100. + if valid_score > best_valid or epoch == 0: + best_valid = valid_score + best_epoch = epoch + + save_obj['best_valid'] = best_valid + save_obj['best_epoch'] = best_epoch + + accelerator.print("save best epoch:", best_epoch) + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_best.pth')) + + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_last.pth')) + + dist.barrier() + + + verbose = utils.is_main_process() + + + ### test best model + if not args.evaluate: + checkpoint = torch.load(os.path.join(args.output_dir, 'checkpoint_best.pth'), map_location='cpu') + state_dict = checkpoint['model'] + msg = model.module.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + accelerator.print('load checkpoint for test from %s'%os.path.join(args.output_dir, 'checkpoint_best.pth')) + accelerator.print(msg) + print("best_epoch", checkpoint['best_epoch'], "best_valid", checkpoint['best_valid']) + print("best_epoch", best_epoch, "best_valid", best_valid) + + quesid2ans = predict(model, test_loader, tokenizer, device, verbose=verbose, + distributed=args.distributed, special_answer_token=special_answer_token, + special_eo_answer_token=special_eo_answer_token, config=config, accelerator=accelerator) + + evaluator = test_loader.evaluator + score_dict = evaluator.evaluate(quesid2ans, normalize_answer=True) + + accelerator.print("Test accuracy:", score_dict) + + + if args.distributed: + dist.barrier() + exit() + + + + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + accelerator.print('Training time {}'.format(total_time_str)) + + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='./configs/VQA.yaml') + parser.add_argument('--checkpoint', default='') + parser.add_argument('--output_dir', default='output/vqa') + parser.add_argument('--evaluate', action='store_true') + parser.add_argument('--text_model', default='facebook/opt-350m') + parser.add_argument('--vision_model', default='vit_base_patch16_224') + parser.add_argument('--device', default='cuda') + parser.add_argument('--seed', default=42, type=int) + parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes') + parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') + parser.add_argument('--distributed', default=True, type=bool) + + parser.add_argument('--data_dir', default='/data/mshukor/data') + parser.add_argument('--resume', action='store_true') + + parser.add_argument('--save_best', action='store_true') + + parser.add_argument('--low_cpu', action='store_true') + + args = parser.parse_args() + + config = yaml.load(open(args.config, 'r'), Loader=yaml.Loader) + + args.result_dir = os.path.join(args.output_dir, 'result') + + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + Path(args.result_dir).mkdir(parents=True, exist_ok=True) + + yaml.dump(config, open(os.path.join(args.output_dir, 'config.yaml'), 'w')) + + main(args, config) \ No newline at end of file diff --git a/accelerate_training/vqa.py b/accelerate_training/vqa.py new file mode 100644 index 0000000000000000000000000000000000000000..faf138931ddf12a691d89cb7e08615b10efc33c5 --- /dev/null +++ b/accelerate_training/vqa.py @@ -0,0 +1,589 @@ +import argparse +import os +import ruamel_yaml as yaml +import numpy as np +import random +import time +import datetime +import json +from pathlib import Path + +import torch +import torch.backends.cudnn as cudnn +import torch.distributed as dist + +import os, sys +sys.path.append(os.path.abspath('.')) # ~/ep-alm + +from models.epalm import ePALM +from models.utils import freeze_whole_model, unfreeze_parameters, print_trainable_params_percentage +from models.utils import filter_state, filter_msg, exclude_list + +from transformers import AutoTokenizer +import utils +from dataset.vqa import get_loader +from scheduler import create_scheduler +from optim import create_optimizer + +from tqdm import tqdm + +from accelerate import Accelerator + + +def train(model, data_loader, optimizer, tokenizer, epoch, warmup_steps, device, scheduler, config, accelerator=None): + # train + model.train() + + metric_logger = utils.MetricLogger(delimiter=" ") + + + metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + metric_logger.add_meter('loss', utils.SmoothedValue(window_size=1, fmt='{value:.4f}')) + + config_optim = utils.AttrDict(config['optimizer']) + prompt_lr = config_optim.prompt_lr if hasattr(config_optim, 'prompt_lr') else None + connector_lr = config_optim.connector_lr if hasattr(config_optim, 'connector_lr') else None + vis_lr = config_optim.vis_lr if hasattr(config_optim, 'vis_lr') else None + text_lr = config_optim.text_lr if hasattr(config_optim, 'text_lr') else None + + accelerator.print(vis_lr, text_lr, connector_lr, len(optimizer.param_groups)) + if prompt_lr is not None: + metric_logger.add_meter('prompt_lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + + + header = 'Train Epoch: [{}]'.format(epoch) + print_freq = 50 + step_size = 100 + warmup_iterations = warmup_steps*step_size + lm_loss_weight = config.get('lm_loss_weight', 1) + special_answer_token = config.get('special_answer_token', None) + + special_eo_answer_token = config.get('special_eo_answer_token', None) + + + eos_token = tokenizer.eos_token if special_eo_answer_token is None else special_eo_answer_token + + for i, batch in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + + image = batch['images'].to(device,non_blocking=True) + + question = batch['sent'] + + answer = batch['answers'] + + + questions_answers = [] + + + if special_answer_token is not None: + questions_answers += [question[i] + "?" + special_answer_token + answer[i].replace('[SEP]','') + eos_token for i in range(len(question))] + else: + questions_answers += [question[i] + "" + answer[i].replace('[SEP]','') + eos_token for i in range(len(question))] + + + questions_answers_input = tokenizer(questions_answers, padding='longest', return_tensors="pt").to(device) + answer_targets = questions_answers_input.input_ids.masked_fill(questions_answers_input.input_ids == tokenizer.pad_token_id, -100) + + images = image + + + + answer_output = model(image=images, + text=questions_answers_input, + labels = answer_targets, + return_dict = True, + mode='train', + reduction='none', + ) + + + + loss = answer_output.loss + loss = loss.sum()/image.size(0) + loss = loss*lm_loss_weight + optimizer.zero_grad() + + accelerator.backward(loss) + + optimizer.step() + + metric_logger.update(loss=loss.item()) + metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + if prompt_lr is not None: + metric_logger.update(prompt_lr=optimizer.param_groups[1]["lr"]) + + if i % print_freq == 0: + lrs = [g["lr"] for g in optimizer.param_groups] + accelerator.print(lrs) + + if epoch==0 and i%step_size==0 and i<=warmup_iterations: + if scheduler is not None: + scheduler.step(i//step_size) + # gather the stats from all processes + metric_logger.synchronize_between_processes() + accelerator.print("Averaged stats:", metric_logger.global_avg()) + return {k: "{:.3f}".format(meter.global_avg) for k, meter in metric_logger.meters.items()} + + + +@torch.no_grad() +def predict(model, loader, tokenizer, device, dump_path=None, verbose=False, distributed=False, special_answer_token=None, + special_eo_answer_token=None, accelerator=None): + model.eval() + eos_token = tokenizer.eos_token if special_eo_answer_token is None else special_eo_answer_token + pad_token = tokenizer.pad_token + print('pad_token', pad_token) + + + num_beams = config.get('num_beams', 1) + do_sample = config.get('do_sample', True) + max_length = config.get('max_length', 30) + accelerator.print("num_beams", num_beams, "do_sample", do_sample, "max_length", max_length) + + with torch.no_grad(): + quesid2ans = {} + if verbose: + pbar = tqdm(total=len(loader), ncols=120, desc="Prediction") + for i, batch in enumerate(loader): + + + image = batch['images'].to(device,non_blocking=True) + + question = batch['sent'] + + question_id = batch['question_ids'] + + if special_answer_token is not None: + question = [q+'?'+special_answer_token for q in question] + else: + question = [q+eos_token for q in question] + + question_input = tokenizer(question, padding='longest', return_tensors="pt").to(device) + out = model(image=image, text=question_input, mode='generate', return_dict=True, max_length=max_length, + do_sample=do_sample, num_beams=num_beams) + + + + for ques_id, o in zip(question_id, out): + o_list = o.tolist() + try: + if special_answer_token is not None: + response = tokenizer.decode(o_list).split(special_answer_token)[1].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True + else: + response = tokenizer.decode(o_list).split('')[2].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True + except TypeError: + accelerator.print(o_list) + response = ' ' + ques_id = int(ques_id) + quesid2ans[ques_id] = response + + + if verbose: + pbar.update(1) + if verbose: + pbar.close() + + if distributed: + dist.barrier() + + qid2ans_list = utils.all_gather(quesid2ans) + if verbose: + quesid2ans = {} + for qid2ans in qid2ans_list: + for k, v in qid2ans.items(): + quesid2ans[k] = v + + if dump_path is not None: + evaluator = loader.evaluator + evaluator.dump_result(quesid2ans, dump_path) + + return quesid2ans + + + + +def evaluate(model, data_loader, tokenizer, device, + distributed=False, special_answer_token=None, special_eo_answer_token=None, accelerator=None): + verbose = utils.is_main_process() + + + quesid2ans = predict(model, data_loader, tokenizer, device, verbose=verbose, + distributed=distributed, special_answer_token=special_answer_token, + special_eo_answer_token=special_eo_answer_token, accelerator=accelerator) + + evaluator = data_loader.evaluator + score_dict = evaluator.evaluate(quesid2ans) + + + acc_dict = evaluator.evaluate_raw(quesid2ans) + topk_score = evaluator.evaluate(quesid2ans) + acc_dict['topk_score'] = topk_score + return acc_dict + + + + + + + + +def main(args, config): + if 'XDG_CACHE_HOME' in os.environ: + os.environ['TORCH_HOME'] = os.environ['XDG_CACHE_HOME']+'/torch' + else: + os.environ['TORCH_HOME'] = '~/.cache/torch' + args.distributed = False + + + accelerator = Accelerator() + + + + device = torch.device(args.device) + + # fix the seed for reproducibility + seed = args.seed + utils.get_rank() + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + cudnn.benchmark = True + + start_epoch = 0 + max_epoch = config['schedular']['epochs'] + warmup_steps = config['schedular']['warmup_epochs'] + + accelerator.print(args) + #### Dataset #### + + + + accelerator.print("Creating dataset") + + + if args.distributed: + num_tasks = utils.get_world_size() + global_rank = utils.get_rank() + else: + num_tasks = None + global_rank = None + + num_workers = config.get('num_workers', 4) + train_topk = config.get('train_topk', -1) + valid_topk = config.get('valid_topk', -1) + data_dir = args.data_dir + + args.image_size = config.get('image_res', 224) + args.use_data_augmentation = True + + black_image = config.get('black_image', False) + + accelerator.print("black image:", black_image) + + train_split = config.get('train_split', 'karpathy_train') + val_split = config.get('val_split', 'karpathy_val') + test_split = config.get('test_split', 'karpathy_test') + + balanced_data = config.get('balanced_data', False) + + seed = config.get('seed', 42) + + train_loader = get_loader( + args, + split=train_split, mode='train', batch_size=config['batch_size_train'], + distributed=args.distributed, + workers=num_workers, + topk=train_topk, + data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, + verbose=True, black_image=black_image,balanced_data=balanced_data,seed=seed, + ) + + args.raw_label = False + accelerator.print('# len train loader:', len(train_loader)) + accelerator.print(f'Building val loader') + val_loader = get_loader( + args, + split=val_split, mode='val', batch_size=config['batch_size_test'], + distributed=args.distributed, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, + verbose=True, black_image=black_image, seed=seed + ) + accelerator.print('# len val loader:', len(val_loader)) + + accelerator.print(f'Building test loader') + test_loader = get_loader( + args, + split=test_split, mode='val', batch_size=config['batch_size_test'], + distributed=args.distributed, + workers=4, + topk=-1,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, + verbose=True, black_image=black_image, seed=seed + ) + + + accelerator.print('# len test loader:', len(test_loader)) + + + + + #### Model #### + accelerator.print("Creating model") + + start_layer_idx = config.get('start_layer_idx', 0) + end_layer_idx = config.get('end_layer_idx', 0) + + + vision_model_name = config.get('vision_model_name', args.vision_model) + tokenizer_name = config.get('tokenizer_name', args.text_model) + + if 'opt' in tokenizer_name: + tokenizer = AutoTokenizer.from_pretrained(tokenizer_name, use_fast=False, local_files_only=True) + else: + raise NotImplemented + + model = ePALM(opt_model_name = args.text_model, + vision_model_name = vision_model_name, + use_vis_prefix = True, + start_layer_idx = start_layer_idx, + end_layer_idx = end_layer_idx, + return_hidden_state_vision = True, + config=config, + low_cpu=args.low_cpu + ) + + special_answer_token = config.get('special_answer_token', None) + special_eo_answer_token = config.get('special_eo_answer_token', None) + + + if special_answer_token is not None: + special_tokens_dict = {'additional_special_tokens': [special_answer_token]} + if special_eo_answer_token is not None: + special_tokens_dict['additional_special_tokens'] += [special_eo_answer_token] + + tokenizer.add_special_tokens(special_tokens_dict) + accelerator.print("Adding special token:", special_tokens_dict) + accelerator.print(tokenizer) + + + freeze_whole_model(model) + unfreeze_parameters(model, config) + + + arg_opt = utils.AttrDict(config['optimizer']) + optimizer = create_optimizer(arg_opt, model, config=config['optimizer']) + + if hasattr(arg_opt, 'prompt_lr') and arg_opt.prompt_lr is not None: + accelerator.print('\tInitial other params params lr: %f' % optimizer.param_groups[0]['lr']) + accelerator.print('\tInitial prompt params lr: %f' % optimizer.param_groups[1]['lr']) + + arg_sche = utils.AttrDict(config['schedular']) + lr_scheduler, _ = create_scheduler(arg_sche, optimizer) + + best_valid = 0. + best_epoch = 0 + + if args.checkpoint: + checkpoint = torch.load(args.checkpoint, map_location='cpu') + state_dict = checkpoint['model'] + + msg = model.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + accelerator.print('load checkpoint from %s'%args.checkpoint) + accelerator.print(msg) + + if args.resume: + model = model.to(device) + optimizer.load_state_dict(checkpoint['optimizer']) + lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) + start_epoch = checkpoint['epoch']+1 + accelerator.print(checkpoint.keys()) + if 'best_valid' in checkpoint: + best_valid = checkpoint['best_valid'] + best_epoch = checkpoint['best_epoch'] + accelerator.print("load best valid {} at epoch {}".format(best_valid, best_epoch)) + + + + + print_trainable_params_percentage(model) + val_evaluator = val_loader.evaluator + test_evaluator = test_loader.evaluator + task = val_loader.task + device = accelerator.device + + model, optimizer, train_loader, val_loader, test_loader, lr_scheduler = accelerator.prepare( + model, optimizer, train_loader, val_loader, test_loader, lr_scheduler + ) + model_without_ddp = model.module + model = model.to(device) + + test_loader.evaluator = test_evaluator + val_loader.evaluator = val_evaluator + + test_loader.task = task + val_loader.task = task + + + + accelerator.print("Start training") + start_time = time.time() + + + + for epoch in range(start_epoch, max_epoch): + if epoch>0: + if lr_scheduler is not None: + lr_scheduler.step(epoch+warmup_steps) + + if not args.evaluate: + if args.distributed: + train_loader.sampler.set_epoch(epoch) + + train_stats = train(model, train_loader, optimizer, tokenizer, epoch, warmup_steps, device, + lr_scheduler, config, accelerator=accelerator) + + if args.evaluate: + break + + score_dict = evaluate(model, val_loader, tokenizer, device, distributed=args.distributed, + special_answer_token=special_answer_token, special_eo_answer_token=special_eo_answer_token, accelerator=accelerator) + accelerator.print(score_dict) + if utils.is_main_process(): + log_stats = {**{f'train_{k}': v for k, v in train_stats.items()}, + 'epoch': epoch, + } + with open(os.path.join(args.output_dir, "log.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + + if lr_scheduler is None: + lr_scheduler_state_dict = {} + else: + lr_scheduler_state_dict = lr_scheduler.state_dict() + + save_obj = { + 'model': filter_state(model_without_ddp.state_dict(), exclude_list), + 'optimizer': optimizer.state_dict(), + 'lr_scheduler': lr_scheduler_state_dict, + 'config': config, + 'epoch': epoch, + 'best_valid': score_dict['overall'], + 'best_epoch': epoch, + } + + if args.save_best: + valid_score = score_dict['topk_score'] * 100. + valid_score_raw = score_dict['overall'] + if valid_score_raw > best_valid or epoch == 0: + best_valid = valid_score_raw + best_epoch = epoch + + accelerator.print("save best epoch:", best_epoch) + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_best.pth')) + + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_last.pth')) + + dist.barrier() + + if lr_scheduler is None: + lr_scheduler_state_dict = {} + else: + lr_scheduler_state_dict = lr_scheduler.state_dict() + save_obj = { + 'model': filter_state(model_without_ddp.state_dict(), exclude_list), + 'optimizer': optimizer.state_dict(), + 'lr_scheduler': lr_scheduler_state_dict, + 'config': config, + 'epoch': epoch, + 'best_valid': best_valid, + 'best_epoch': best_epoch, + } + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_last.pth')) + + verbose = utils.is_main_process() + + + ### test best model + if not args.evaluate: + checkpoint = torch.load(os.path.join(args.output_dir, 'checkpoint_best.pth'), map_location='cpu') + state_dict = checkpoint['model'] + msg = model.module.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + accelerator.print('load checkpoint for test from', args.output_dir, 'checkpoint_best.pth') + accelerator.print(msg) + + quesid2ans = predict(model, test_loader, tokenizer, device, verbose=verbose, + distributed=args.distributed, special_answer_token=special_answer_token, + special_eo_answer_token=special_eo_answer_token, accelerator=accelerator) + + evaluator = test_loader.evaluator + score_dict = evaluator.evaluate(quesid2ans) + + + acc_dict_all = evaluator.evaluate_raw(quesid2ans) + acc_dict_answerable = evaluator.evaluate_raw(quesid2ans, is_topk_optimal=True) + acc_dict_unanswerable = evaluator.evaluate_raw(quesid2ans, is_topk_optimal=False) + + wandb_log_dict = {} + wandb_log_dict['Test/overall'] = acc_dict_all['overall'] + wandb_log_dict['Test/topk_optimal'] = acc_dict_answerable['overall'] + wandb_log_dict['Test/topk_not_optimal'] = acc_dict_unanswerable['overall'] + + for qtype, score in acc_dict_all['perQuestionType'].items(): + wandb_log_dict[f'Test_Qtypes/{qtype}'] = score + for atype, score in acc_dict_all['perAnswerType'].items(): + if atype == 'yes/no': + atype = 'yes_no' + wandb_log_dict[f'Test_Atypes/{atype}'] = score + + accelerator.print(wandb_log_dict) + accelerator.print('best epoch:', best_epoch) + + + if args.distributed: + dist.barrier() + exit() + + + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + accelerator.print('Training time {}'.format(total_time_str)) + + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='./configs/VQA.yaml') + parser.add_argument('--checkpoint', default='') + parser.add_argument('--output_dir', default='output/vqa') + parser.add_argument('--evaluate', action='store_true') + parser.add_argument('--text_model', default='facebook/opt-350m') + parser.add_argument('--vision_model', default='vit_base_patch16_224') + parser.add_argument('--device', default='cuda') + parser.add_argument('--seed', default=42, type=int) + parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes') + parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') + parser.add_argument('--distributed', default=True, type=bool) + + parser.add_argument('--data_dir', default='/data/mshukor/data') + parser.add_argument('--resume', action='store_true') + + parser.add_argument('--save_best', action='store_true') + + parser.add_argument('--low_cpu', action='store_true') + + args = parser.parse_args() + + config = yaml.load(open(args.config, 'r'), Loader=yaml.Loader) + + args.result_dir = os.path.join(args.output_dir, 'result') + + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + Path(args.result_dir).mkdir(parents=True, exist_ok=True) + + yaml.dump(config, open(os.path.join(args.output_dir, 'config.yaml'), 'w')) + + main(args, config) \ No newline at end of file diff --git a/app.py b/app.py new file mode 100644 index 0000000000000000000000000000000000000000..f669725424f2fc862f919353b9bf7b4c7006b88b --- /dev/null +++ b/app.py @@ -0,0 +1,168 @@ +import os + +os.system('cd TimeSformer;' + 'python setup.py build develop; cd ..') + +os.system('ls -l') + + + + +import torch +from torchvision import transforms + + +from transformers import AutoTokenizer + + +from PIL import Image +import json +import os + +from torchvision import transforms + +from models.epalm import ePALM + +import os + +from transformers import AutoTokenizer + +# import ruamel_yaml as yaml +from ruamel.yaml import YAML + +import torch +import gradio as gr + + +yaml=YAML(typ='safe') + + + +use_cuda = torch.cuda.is_available() +device = torch.deivce('cuda') if use_cuda else torch.deivce('cpu') + +## Load model + +config = 'configs/image/ePALM_caption.yaml' +# config = yaml.load(open(config, 'r'), Loader=yaml.Loader) +config = yaml.load(open(config, 'r')) + + +text_model = 'facebook/opt-2.7b' +vision_model_name = 'vit_base_patch16_224' + +# text_model = 'facebook/opt-6.7b' +# vision_model_name = 'vit_large_patch16_224' + + +start_layer_idx = 19 +end_layer_idx = 31 +low_cpu = True +model = ePALM(opt_model_name=text_model, + vision_model_name=vision_model_name, + use_vis_prefix=True, + start_layer_idx=start_layer_idx, + end_layer_idx=end_layer_idx, + return_hidden_state_vision=True, + config=config, + low_cpu=low_cpu +) +print("Model Built") +model.to(device) + + +checkpoint_path = 'checkpoints/float32/ePALM_caption/checkpoint_best.pth' +# checkpoint_path = '/data/mshukor/logs/eplam/models/accelerate/ePALM_pt_L_acc_caption/checkpoint_best.pth' +checkpoint = torch.load(checkpoint_path, map_location='cpu') +state_dict = checkpoint['model'] +msg = model.load_state_dict(state_dict,strict=False) + + +## Load tokenizer +tokenizer = AutoTokenizer.from_pretrained(text_model, use_fast=False) +eos_token = tokenizer.eos_token +pad_token = tokenizer.pad_token + + + + +image_size = 224 +normalize = transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711)) + +transform = transforms.Compose([ + transforms.Resize((image_size,image_size),interpolation=Image.BICUBIC), + transforms.ToTensor(), + normalize, + ]) + + + + + + + + +do_sample=False +num_beams=3 +max_length=30 + + + + + +def inference(image, audio, video, task_type, instruction): + + if task_type == 'Image Captioning': + text = [''] + text_input = tokenizer(text, padding='longest', return_tensors="pt").to(device) + else: + raise NotImplemented + + if "Video" in task_type: + pass + elif "Audio" in task_type: + pass + else: + image = transform(image) + image = image.to(device,non_blocking=True).unsqueeze(0) + + + + + with torch.autocast(device_type='cuda', dtype=torch.float16, enabled=True): + + out = model(image=image, text=text_input, mode='generate', return_dict=True, max_length=max_length, + do_sample=do_sample, num_beams=num_beams) + + out_decode = [] + for i, o in enumerate(out): + res = tokenizer.decode(o) + response = res.split('')[1].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True + + return response + + +inputs = [gr.inputs.Image(type='pil'), gr.Audio(source="upload", type="filepath"), gr.Video(source="upload", type="filepath"), gr.inputs.Radio(choices=['Image Captioning', 'Video Captioning', 'Audio Captioning', "Visual Question Answering", "Visual Grounding", "General", "General Video"], type="value", default="Image Captioning", label="Task"), gr.inputs.Textbox(lines=1, label="Instruction")] +outputs = ['text'] +examples = [ + ['examples/images/soccer.jpg', None, None, 'Image Captioning', None], + ['examples/images/ski.jpg', None, None, 'Visual Question Answering', 'what does the woman wearing black do?'], + ['examples/images/banana.jpg', None, None, 'Visual Grounding', 'the detached banana'], + ['examples/images/skateboard.jpg', None, None, 'General', 'which region does the text " a yellow bird " describe?'], + ['examples/images/baseball.jpg', None, None, 'General', 'what color is the left car?'], + [None, None, 'examples/videos/video7014.mp4', 'Video Captioning', None], + [None, None, 'examples/videos/video7017.mp4', 'Video Captioning', None], + [None, None, 'examples/videos/video7019.mp4', 'Video Captioning', None], + [None, None, 'examples/videos/video7021.mp4', 'Video Captioning', None], + [None, None, 'examples/videos/video7021.mp4', 'General Video', "What is this sport?"], + [None, 'examples/audios/6cS0FsUM-cQ.wav', None, 'Audio Captioning', None], + [None, 'examples/audios/AJtNitYMa1I.wav', None, 'Audio Captioning', None], +] + +title = "eP-ALM" +description = "Gradio Demo for eP-ALM: " +article = "

Paper | Github Repo

" + +io = gr.Interface(fn=inference, inputs=inputs, outputs=outputs, + title=title, description=description, article=article, examples=examples, cache_examples=False) +io.launch() \ No newline at end of file diff --git a/checkpoints/accelerate/ePALM_pt_L_acc_caption/checkpoint_best.pth b/checkpoints/accelerate/ePALM_pt_L_acc_caption/checkpoint_best.pth new file mode 100644 index 0000000000000000000000000000000000000000..2b43a7959558865bdafb1ef08e1c67d48ca59a24 --- /dev/null +++ b/checkpoints/accelerate/ePALM_pt_L_acc_caption/checkpoint_best.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:625971e73ef39a50977b4591455a2b4f09ffa1901a96811c44a84e5911982752 +size 50879681 diff --git a/checkpoints/accelerate/ePALM_pt_L_acc_gqa/checkpoint_best.pth b/checkpoints/accelerate/ePALM_pt_L_acc_gqa/checkpoint_best.pth new file mode 100644 index 0000000000000000000000000000000000000000..e702c576cdeaaf49614c5102df41e2e63d0867ad --- /dev/null +++ b/checkpoints/accelerate/ePALM_pt_L_acc_gqa/checkpoint_best.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9c7d03bfd6ca76459d21ec26d34f4e1445dccfc391a94a395b0e20268808bd97 +size 50879681 diff --git a/checkpoints/accelerate/ePALM_pt_L_acc_vqa/checkpoint_best.pth b/checkpoints/accelerate/ePALM_pt_L_acc_vqa/checkpoint_best.pth new file mode 100644 index 0000000000000000000000000000000000000000..2ed31880cac188460d291a7f3b74b7c6abc83c80 --- /dev/null +++ b/checkpoints/accelerate/ePALM_pt_L_acc_vqa/checkpoint_best.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:262adc093fef6a66888e86d5765836e06e149a98ebd26b9c9f229461abcd92de +size 50879681 diff --git a/checkpoints/float32/ePALM_audio_caption/checkpoint_best.pth b/checkpoints/float32/ePALM_audio_caption/checkpoint_best.pth new file mode 100644 index 0000000000000000000000000000000000000000..60acbaac05e5346b01c6f95f06b967c05d98edec --- /dev/null +++ b/checkpoints/float32/ePALM_audio_caption/checkpoint_best.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d3f416992fe4b04840d123b23ec90f051c287f6b3347cf332e11326c6dbe077f +size 299420441 diff --git a/checkpoints/float32/ePALM_audio_caption/config.yaml b/checkpoints/float32/ePALM_audio_caption/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3579cdeff8f638e3df79148ba139731600118d97 --- /dev/null +++ b/checkpoints/float32/ePALM_audio_caption/config.yaml @@ -0,0 +1,38 @@ +all_vis_tokens: false +append_eos_token: true +batch_size_test: 64 +batch_size_train: 8 +dataset_name: audiocaps +end_layer_idx: 31 +freqm_p: 24 +image_res: 224 +injected_hidden_states: 6 +lm_loss_weight: 0.1 +melbins: 128 +modality: audio +noise: false +norm_mean: -4.2677393 +norm_std: 4.5689974 +num_tries: 8 +num_workers: 4 +optimizer: {lr: 2e-05, opt: adamW, prompt_lr: 1e-05, weight_decay: 0.02} +pretrained_model: /gpfswork/rech/dyf/ugz83ue/.cache/torch/hub/checkpoints/audioset_10_10_0.4593.pth +prompt_len: 10 +prompt_tuning: true +replace_added_tokens: true +schedular: {cooldown_epochs: 0, decay_rate: 1, epochs: 30, lr: 2e-05, min_lr: 1e-06, + sched: cosine, scheduler_groups: 0, warmup_epochs: 4, warmup_lr: 1e-05} +shift_labels: false +skip_norm: false +start_layer_idx: 19 +target_length: 1024 +test_split: audiocaps_caption_test +timem_p: 96 +train_split: audiocaps_caption_train +unfreeze_text_layer_norm: false +unfreeze_vision_layer_norm: false +use_cache: false +use_vis_prefix: true +val_split: audiocaps_caption_val +vision_model_name: ast +warm_up: true diff --git a/checkpoints/float32/ePALM_caption/checkpoint_best.pth b/checkpoints/float32/ePALM_caption/checkpoint_best.pth new file mode 100644 index 0000000000000000000000000000000000000000..0e4d08c1afe30aa80ebe7f56a45a2a5a4c5da724 --- /dev/null +++ b/checkpoints/float32/ePALM_caption/checkpoint_best.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:19cf35889daf2e20284ff334ecf63c8e226a44d5bbf22dde368e593b66ce8b3e +size 299419097 diff --git a/checkpoints/float32/ePALM_caption/config.yaml b/checkpoints/float32/ePALM_caption/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..39ab1684992ff086d1dc96fe5c6c251f42c475af --- /dev/null +++ b/checkpoints/float32/ePALM_caption/config.yaml @@ -0,0 +1,21 @@ +append_eos_token: true +batch_size_test: 64 +batch_size_train: 16 +end_layer_idx: 31 +image_res: 224 +injected_hidden_states: 6 +lm_loss_weight: 0.1 +num_workers: 4 +optimizer: {lr: 2e-05, opt: adamW, prompt_lr: 1e-05, weight_decay: 0.02} +prompt_len: 10 +prompt_tuning: true +replace_added_tokens: true +schedular: {cooldown_epochs: 0, decay_rate: 1, epochs: 8, lr: 2e-05, min_lr: 1e-06, + sched: cosine, scheduler_groups: 0, warmup_epochs: 4, warmup_lr: 1e-05} +shift_labels: false +start_layer_idx: 19 +unfreeze_text_layer_norm: false +unfreeze_vision_layer_norm: false +use_cache: false +use_vis_prefix: true +warm_up: true diff --git a/checkpoints/float32/ePALM_gqa/checkpoint_best.pth b/checkpoints/float32/ePALM_gqa/checkpoint_best.pth new file mode 100644 index 0000000000000000000000000000000000000000..5bcc79ce3ddfc2736596ea0292a00580016e29fd --- /dev/null +++ b/checkpoints/float32/ePALM_gqa/checkpoint_best.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3d3dbbe0581762284cbcef7c4cf5bf5458bfa01a8393696792eba2e2c07df779 +size 299418969 diff --git a/checkpoints/float32/ePALM_gqa/config.yaml b/checkpoints/float32/ePALM_gqa/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8af4cd3b2f678229bde1ac3ed037e4deceeb263e --- /dev/null +++ b/checkpoints/float32/ePALM_gqa/config.yaml @@ -0,0 +1,26 @@ +batch_size_test: 128 +batch_size_train: 64 +connector_per_text_layer: false +end_layer_idx: 31 +image_res: 224 +injected_hidden_states: 6 +k_test: 64 +lm_loss_weight: 0.1 +num_workers: 4 +optimizer: {lr: 2e-05, opt: adamW, prompt_lr: 1e-05, weight_decay: 0.02} +prompt_len: 10 +prompt_tuning: true +replace_added_tokens: true +schedular: {cooldown_epochs: 0, decay_rate: 1, epochs: 8, lr: 2e-05, min_lr: 1e-06, + sched: cosine, scheduler_groups: 0, warmup_epochs: 4, warmup_lr: 1e-05} +special_answer_token: +start_layer_idx: 19 +test_split: valid +text_step: 1 +train_split: train +unfreeze_text_layer_norm: false +unfreeze_vision_layer_norm: false +use_cache: false +use_vis_prefix: true +val_split: valid +warm_up: true diff --git a/checkpoints/float32/ePALM_video_caption_msrvtt/checkpoint_best.pth b/checkpoints/float32/ePALM_video_caption_msrvtt/checkpoint_best.pth new file mode 100644 index 0000000000000000000000000000000000000000..6f70dd3413490ab75d6a7c3c1adeef2725765bcb --- /dev/null +++ b/checkpoints/float32/ePALM_video_caption_msrvtt/checkpoint_best.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:396f4bf1cc936ae0da9c33c64acdf0f39e0d648ab1e5f6c203d428b533b93353 +size 299420569 diff --git a/checkpoints/float32/ePALM_video_caption_msrvtt/config.yaml b/checkpoints/float32/ePALM_video_caption_msrvtt/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..fc879e125dbbb8ad906e882ca3b46257ac019844 --- /dev/null +++ b/checkpoints/float32/ePALM_video_caption_msrvtt/config.yaml @@ -0,0 +1,33 @@ +all_vis_tokens: false +append_eos_token: true +as_images: false +batch_size_test: 8 +batch_size_train: 16 +dataset_name: msrvtt +end_layer_idx: 31 +image_res: 224 +injected_hidden_states: 6 +lm_loss_weight: 0.1 +modality: video +num_frames: 16 +num_tries: 1 +num_workers: 4 +optimizer: {lr: 2e-05, opt: adamW, prompt_lr: 1e-05, weight_decay: 0.02} +pretrained_model: /gpfswork/rech/dyf/ugz83ue/.cache/torch/hub/checkpoints/TimeSformer_divST_8x32_224_K600.pyth +prompt_len: 10 +prompt_tuning: true +replace_added_tokens: true +sample_type: rand +schedular: {cooldown_epochs: 0, decay_rate: 1, epochs: 26, lr: 2e-05, min_lr: 1e-06, + sched: cosine, scheduler_groups: 0, warmup_epochs: 4, warmup_lr: 1e-05} +shift_labels: false +start_layer_idx: 19 +test_split: msrvtt_caption_test +train_split: msrvtt_caption_train7k +unfreeze_text_layer_norm: false +unfreeze_vision_layer_norm: false +use_cache: false +use_vis_prefix: true +val_split: msrvtt_caption_test +vision_model_name: timesformer +warm_up: true diff --git a/checkpoints/float32/ePALM_video_qa_msrvtt/checkpoint_best.pth b/checkpoints/float32/ePALM_video_qa_msrvtt/checkpoint_best.pth new file mode 100644 index 0000000000000000000000000000000000000000..142fb36faec7f5d4c44b8369a617c73e7a4c921d --- /dev/null +++ b/checkpoints/float32/ePALM_video_qa_msrvtt/checkpoint_best.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:840c8ab56bd51d4ecc5ebddf364eb569ca459bc56e9da99aa7b197b3fa62ca79 +size 299419737 diff --git a/checkpoints/float32/ePALM_video_qa_msrvtt/config.yaml b/checkpoints/float32/ePALM_video_qa_msrvtt/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..32838ac712033a22e7a28bb2f1d43977737932f3 --- /dev/null +++ b/checkpoints/float32/ePALM_video_qa_msrvtt/config.yaml @@ -0,0 +1,35 @@ +all_vis_tokens: false +as_images: false +batch_size_test: 64 +batch_size_train: 8 +connector_per_text_layer: false +dataset_name: msrvtt +end_layer_idx: 31 +image_res: 224 +injected_hidden_states: 6 +k_test: 64 +lm_loss_weight: 0.1 +modality: video +num_frames: 8 +num_tries: 1 +num_workers: 4 +optimizer: {lr: 2e-05, opt: adamW, prompt_lr: 1e-05, weight_decay: 0.02} +pretrained_model: /gpfswork/rech/dyf/ugz83ue/.cache/torch/hub/checkpoints/TimeSformer_divST_8x32_224_K600.pyth +prompt_len: 10 +prompt_tuning: true +replace_added_tokens: true +sample_type: rand +schedular: {cooldown_epochs: 0, decay_rate: 1, epochs: 16, lr: 2e-05, min_lr: 1e-06, + sched: cosine, scheduler_groups: 0, warmup_epochs: 4, warmup_lr: 1e-05} +special_answer_token: +start_layer_idx: 19 +test_split: msrvtt_vqa_test +text_step: 1 +train_split: msrvtt_vqa_train +unfreeze_text_layer_norm: false +unfreeze_vision_layer_norm: false +use_cache: false +use_vis_prefix: true +val_split: msrvtt_vqa_val +vision_model_name: timesformer +warm_up: true diff --git a/checkpoints/float32/ePALM_video_qa_msvd/checkpoint_best.pth b/checkpoints/float32/ePALM_video_qa_msvd/checkpoint_best.pth new file mode 100644 index 0000000000000000000000000000000000000000..aff72394103ff49ae63364e60785413b462c3f36 --- /dev/null +++ b/checkpoints/float32/ePALM_video_qa_msvd/checkpoint_best.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0c40fb28fc03c76e04b4ec231fcc77d814e75a658e356f013a2a1a3545bf6489 +size 299419737 diff --git a/checkpoints/float32/ePALM_video_qa_msvd/config.yaml b/checkpoints/float32/ePALM_video_qa_msvd/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a05fdc117de0e6d1da0a5bf8daf0b222cbde7df2 --- /dev/null +++ b/checkpoints/float32/ePALM_video_qa_msvd/config.yaml @@ -0,0 +1,35 @@ +all_vis_tokens: false +as_images: false +batch_size_test: 64 +batch_size_train: 8 +connector_per_text_layer: false +dataset_name: msvd +end_layer_idx: 31 +image_res: 224 +injected_hidden_states: 6 +k_test: 64 +lm_loss_weight: 0.1 +modality: video +num_frames: 8 +num_tries: 1 +num_workers: 4 +optimizer: {lr: 2e-05, opt: adamW, prompt_lr: 1e-05, weight_decay: 0.02} +pretrained_model: /gpfswork/rech/dyf/ugz83ue/.cache/torch/hub/checkpoints/TimeSformer_divST_8x32_224_K600.pyth +prompt_len: 10 +prompt_tuning: true +replace_added_tokens: true +sample_type: rand +schedular: {cooldown_epochs: 0, decay_rate: 1, epochs: 26, lr: 2e-05, min_lr: 1e-06, + sched: cosine, scheduler_groups: 0, warmup_epochs: 4, warmup_lr: 1e-05} +special_answer_token: +start_layer_idx: 19 +test_split: msvd_vqa_test +text_step: 1 +train_split: msvd_vqa_train +unfreeze_text_layer_norm: false +unfreeze_vision_layer_norm: false +use_cache: false +use_vis_prefix: true +val_split: msvd_vqa_val +vision_model_name: timesformer +warm_up: true diff --git a/checkpoints/float32/ePALM_vqa/checkpoint_best.pth b/checkpoints/float32/ePALM_vqa/checkpoint_best.pth new file mode 100644 index 0000000000000000000000000000000000000000..8ad58d64985badb9ebabe1ee6d6ea68ce143b0a5 --- /dev/null +++ b/checkpoints/float32/ePALM_vqa/checkpoint_best.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ed245e9f9395937cc5297f6415ec89880fdcc13d48cc79f988c829f4632543a8 +size 299418969 diff --git a/checkpoints/float32/ePALM_vqa/config.yaml b/checkpoints/float32/ePALM_vqa/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..30ad5c5f723d4dad6a2dd46297e91bfa0b5565d9 --- /dev/null +++ b/checkpoints/float32/ePALM_vqa/config.yaml @@ -0,0 +1,23 @@ +batch_size_test: 64 +batch_size_train: 64 +connector_per_text_layer: false +end_layer_idx: 31 +image_res: 224 +injected_hidden_states: 6 +k_test: 64 +lm_loss_weight: 0.1 +num_workers: 4 +optimizer: {lr: 2e-05, opt: adamW, prompt_lr: 1e-05, weight_decay: 0.02} +prompt_len: 10 +prompt_tuning: true +replace_added_tokens: true +schedular: {cooldown_epochs: 0, decay_rate: 1, epochs: 8, lr: 2e-05, min_lr: 1e-06, + sched: cosine, scheduler_groups: 0, warmup_epochs: 4, warmup_lr: 1e-05} +special_answer_token: +start_layer_idx: 19 +text_step: 1 +unfreeze_text_layer_norm: false +unfreeze_vision_layer_norm: false +use_cache: false +use_vis_prefix: true +warm_up: true diff --git a/configs/.ipynb_checkpoints/visopt_caption_reptok_nocache_noeostk-checkpoint.yaml b/configs/.ipynb_checkpoints/visopt_caption_reptok_nocache_noeostk-checkpoint.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5bc0a894adc1d900b2d953d66d1bcbcb4211518a --- /dev/null +++ b/configs/.ipynb_checkpoints/visopt_caption_reptok_nocache_noeostk-checkpoint.yaml @@ -0,0 +1,38 @@ +image_res: 224 +batch_size_train: 64 + +batch_size_test: 64 + +warm_up: True + + + +optimizer: {opt: adamW, lr: 2e-5, weight_decay: 0.02} +schedular: {sched: cosine, lr: 2e-5, epochs: 8, min_lr: 1e-6, decay_rate: 1, warmup_lr: 1e-5, warmup_epochs: 4, cooldown_epochs: 0} + +use_vis_prefix: True +start_layer_idx: 11 +end_layer_idx: 23 + +injected_hidden_states: 1 + +lm_loss_weight: 0.1 + +unfreeze_text_layer_norm: False +unfreeze_vision_layer_norm: False + + +num_workers: 4 + +# train_topk: 50 +# valid_topk: 50 + + +replace_added_tokens: True + + +use_cache: False + +shift_labels: False + +append_eos_token: True \ No newline at end of file diff --git a/configs/.ipynb_checkpoints/visopt_caption_reptok_nocache_noeostk_6cls_l11_23-checkpoint.yaml b/configs/.ipynb_checkpoints/visopt_caption_reptok_nocache_noeostk_6cls_l11_23-checkpoint.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/configs/.ipynb_checkpoints/visopt_caption_reptok_nocache_noeostk_6cls_l11_23_adapt_lr1e5-checkpoint.yaml b/configs/.ipynb_checkpoints/visopt_caption_reptok_nocache_noeostk_6cls_l11_23_adapt_lr1e5-checkpoint.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/configs/.ipynb_checkpoints/visopt_caption_reptok_nocache_shiftinput_noeostk_tprompt-checkpoint.yaml b/configs/.ipynb_checkpoints/visopt_caption_reptok_nocache_shiftinput_noeostk_tprompt-checkpoint.yaml new file mode 100644 index 0000000000000000000000000000000000000000..659ecbf86cac948f1f12759148c276faaceac349 --- /dev/null +++ b/configs/.ipynb_checkpoints/visopt_caption_reptok_nocache_shiftinput_noeostk_tprompt-checkpoint.yaml @@ -0,0 +1,41 @@ +image_res: 224 +batch_size_train: 64 + +batch_size_test: 64 + +warm_up: True + + + +optimizer: {opt: adamW, lr: 2e-5, weight_decay: 0.02} +schedular: {sched: cosine, lr: 2e-5, epochs: 8, min_lr: 1e-6, decay_rate: 1, warmup_lr: 1e-5, warmup_epochs: 4, cooldown_epochs: 0} + +use_vis_prefix: True +start_layer_idx: 11 +end_layer_idx: 23 + +injected_hidden_states: 1 + +lm_loss_weight: 0.1 + +unfreeze_text_layer_norm: False +unfreeze_vision_layer_norm: False + + +num_workers: 4 + +# train_topk: 50 +# valid_topk: 50 + + +replace_added_tokens: True + + +use_cache: False + +shift_labels: True + +append_eos_token: True + + +task_prompt: 'Describe the image:' \ No newline at end of file diff --git a/configs/.ipynb_checkpoints/visopt_vqa-checkpoint.yaml b/configs/.ipynb_checkpoints/visopt_vqa-checkpoint.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a6665c4974d225f378518a4d447398ba7bb5e38a --- /dev/null +++ b/configs/.ipynb_checkpoints/visopt_vqa-checkpoint.yaml @@ -0,0 +1,37 @@ +train_file: ['vqa_train.json', + 'vqa_val.json', + ] #'vg_qa.json' + +test_file: ['vqa_test.json'] +answer_list: 'answer_list.json' + +vqa_root: 'coco/' #train2014/ +vg_root: 'visual_genome/' #image/ + +image_res: 224 +batch_size_train: 64 +batch_size_test: 4 +k_test: 128 + +warm_up: True + + + +optimizer: {opt: adamW, lr: 2e-5, weight_decay: 0.02} +schedular: {sched: cosine, lr: 2e-5, epochs: 8, min_lr: 1e-6, decay_rate: 1, warmup_lr: 1e-5, warmup_epochs: 4, cooldown_epochs: 0} + +use_vis_prefix: True +start_layer_idx: 11 +end_layer_idx: 23 + +injected_hidden_states: 1 + +lm_loss_weight: 0.1 + +unfreeze_text_layer_norm: False +unfreeze_vision_layer_norm: False + + + + + \ No newline at end of file diff --git a/configs/audio/ePALM_audio_caption.yaml b/configs/audio/ePALM_audio_caption.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5685f4e3e76f47e81f36125f428929ba1ec31c15 --- /dev/null +++ b/configs/audio/ePALM_audio_caption.yaml @@ -0,0 +1,75 @@ +image_res: 224 +batch_size_train: 8 + +batch_size_test: 64 + +warm_up: True + + + +optimizer: {opt: adamW, lr: 2e-5, weight_decay: 0.02, prompt_lr: 1e-5} +schedular: {sched: cosine, scheduler_groups: 0 , lr: 2e-5, epochs: 30, min_lr: 1e-6, decay_rate: 1, warmup_lr: 1e-5, warmup_epochs: 4, cooldown_epochs: 0} + +use_vis_prefix: True +start_layer_idx: 19 +end_layer_idx: 31 + +injected_hidden_states: 6 + +lm_loss_weight: 0.1 + +unfreeze_text_layer_norm: False +unfreeze_vision_layer_norm: False + + +num_workers: 4 + + + + +replace_added_tokens: True + + +use_cache: False + +shift_labels: False + +append_eos_token: True + +num_beams: 3 +do_sample: False + +# Prompt tuning +prompt_tuning: True +prompt_len: 10 + + +modality: 'audio' +dataset_name: 'audiocaps' + +train_split: 'audiocaps_caption_train' +val_split: 'audiocaps_caption_val' +test_split: 'audiocaps_caption_test' + + +melbins: 128 +target_length: 1024 +num_tries: 8 + +skip_norm: False +norm_mean: -4.2677393 +norm_std: 4.5689974 +noise: False + +freqm_p: 24 +timem_p: 96 + + + + + +all_vis_tokens: False + + +vision_model_name: 'ast' +pretrained_model: '/gpfswork/rech/dyf/ugz83ue/.cache/torch/hub/checkpoints/audioset_10_10_0.4593.pth' \ No newline at end of file diff --git a/configs/image/ePALM_caption.yaml b/configs/image/ePALM_caption.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a8ed0d4e60cbc0544aae56ccd565987b9e65ddf0 --- /dev/null +++ b/configs/image/ePALM_caption.yaml @@ -0,0 +1,45 @@ +image_res: 224 +batch_size_train: 16 + +batch_size_test: 64 + +warm_up: True + + + +optimizer: {opt: adamW, lr: 2e-5, weight_decay: 0.02, prompt_lr: 1e-5} +schedular: {sched: cosine, scheduler_groups: 0 , lr: 2e-5, epochs: 8, min_lr: 1e-6, decay_rate: 1, warmup_lr: 1e-5, warmup_epochs: 4, cooldown_epochs: 0} + +use_vis_prefix: True +start_layer_idx: 19 +end_layer_idx: 31 + +injected_hidden_states: 6 + +lm_loss_weight: 0.1 + +unfreeze_text_layer_norm: False +unfreeze_vision_layer_norm: False + + +num_workers: 4 + + + + +replace_added_tokens: True + + +use_cache: False + +shift_labels: False + +append_eos_token: True + +num_beams: 3 +do_sample: False + +# Prompt tuning +prompt_tuning: True +prompt_len: 10 + diff --git a/configs/image/ePALM_gqa.yaml b/configs/image/ePALM_gqa.yaml new file mode 100644 index 0000000000000000000000000000000000000000..18599b1dc2cd2bd61e9acc4adc48a95acbded248 --- /dev/null +++ b/configs/image/ePALM_gqa.yaml @@ -0,0 +1,49 @@ + +image_res: 224 +batch_size_train: 64 + +batch_size_test: 128 +k_test: 64 + +warm_up: True + + + +optimizer: {opt: adamW, lr: 2e-5, weight_decay: 0.02, prompt_lr: 1e-5} +schedular: {sched: cosine, scheduler_groups: 0 , lr: 2e-5, epochs: 8, min_lr: 1e-6, decay_rate: 1, warmup_lr: 1e-5, warmup_epochs: 4, cooldown_epochs: 0} + +use_vis_prefix: True +start_layer_idx: 19 +end_layer_idx: 31 + +injected_hidden_states: 6 + +lm_loss_weight: 0.1 + +unfreeze_text_layer_norm: False +unfreeze_vision_layer_norm: False + +train_split: 'train' +val_split: 'valid' +test_split: 'test' + + +num_workers: 4 + +special_answer_token: '' + +replace_added_tokens: True + + +use_cache: False + +connector_per_text_layer: False + +text_step: 1 + +num_beams: 3 +do_sample: False +# Prompt tuning +prompt_tuning: True +prompt_len: 10 + diff --git a/configs/image/ePALM_pt_L_caption.yaml b/configs/image/ePALM_pt_L_caption.yaml new file mode 100644 index 0000000000000000000000000000000000000000..03eb431891a1d73c110ecc66a2b19a607f6255d8 --- /dev/null +++ b/configs/image/ePALM_pt_L_caption.yaml @@ -0,0 +1,46 @@ +image_res: 224 +batch_size_train: 2 + +batch_size_test: 16 + +warm_up: True + + + +optimizer: {opt: adamW, lr: 2e-4, weight_decay: 0.02, prompt_lr: 1e-3} +schedular: {sched: cosine, scheduler_groups: 0 , lr: 2e-4, epochs: 35, min_lr: 1e-5, decay_rate: 1, warmup_lr: 1e-4, warmup_epochs: 4, cooldown_epochs: 0} + +use_vis_prefix: True +start_layer_idx: 19 +end_layer_idx: 31 + +injected_hidden_states: 6 +shared_connector: True + +lm_loss_weight: 0.1 + +unfreeze_text_layer_norm: False +unfreeze_vision_layer_norm: False + + +num_workers: 4 + + + +replace_added_tokens: True + + +use_cache: False + +shift_labels: False + +append_eos_token: True + +num_beams: 3 +do_sample: False +# Prompt tuning +prompt_tuning: True +prompt_len: 10 + +mlp: False + diff --git a/configs/image/ePALM_pt_L_gqa.yaml b/configs/image/ePALM_pt_L_gqa.yaml new file mode 100644 index 0000000000000000000000000000000000000000..aa0095346bf5390f5f6969f757dac9597b6dbb02 --- /dev/null +++ b/configs/image/ePALM_pt_L_gqa.yaml @@ -0,0 +1,53 @@ + + +image_res: 224 +batch_size_train: 8 + +batch_size_test: 16 +k_test: 64 + +warm_up: True + + + +optimizer: {opt: adamW, lr: 2e-4, weight_decay: 0.02, prompt_lr: 1e-3} +schedular: {sched: cosine, scheduler_groups: 0 , lr: 2e-4, epochs: 35, min_lr: 1e-5, decay_rate: 1, warmup_lr: 1e-4, warmup_epochs: 4, cooldown_epochs: 0} + +use_vis_prefix: True +start_layer_idx: 19 +end_layer_idx: 31 + +injected_hidden_states: 6 +shared_connector: True + +lm_loss_weight: 0.1 + +unfreeze_text_layer_norm: False +unfreeze_vision_layer_norm: False + + +num_workers: 4 + +special_answer_token: '' + +replace_added_tokens: True + + +use_cache: False + +connector_per_text_layer: False + +text_step: 1 + +num_beams: 3 +do_sample: False +# Prompt tuning +prompt_tuning: True +prompt_len: 10 + +mlp: False + + +train_split: 'train' +val_split: 'valid' +test_split: 'test' \ No newline at end of file diff --git a/configs/image/ePALM_pt_L_vqa.yaml b/configs/image/ePALM_pt_L_vqa.yaml new file mode 100644 index 0000000000000000000000000000000000000000..acffb4c555136a96167f444abd00031402f64428 --- /dev/null +++ b/configs/image/ePALM_pt_L_vqa.yaml @@ -0,0 +1,52 @@ + + +image_res: 224 +batch_size_train: 2 + +batch_size_test: 16 +k_test: 64 + +warm_up: True + + + +optimizer: {opt: adamW, lr: 2e-4, weight_decay: 0.02, prompt_lr: 1e-3} +schedular: {sched: cosine, scheduler_groups: 0 , lr: 2e-4, epochs: 35, min_lr: 1e-5, decay_rate: 1, warmup_lr: 1e-4, warmup_epochs: 4, cooldown_epochs: 0} + +use_vis_prefix: True +start_layer_idx: 19 +end_layer_idx: 31 + +injected_hidden_states: 6 +shared_connector: True + +lm_loss_weight: 0.1 + +unfreeze_text_layer_norm: False +unfreeze_vision_layer_norm: False + + +num_workers: 4 + +special_answer_token: '' + +replace_added_tokens: True + + +use_cache: False + +connector_per_text_layer: False + +text_step: 1 + +num_beams: 3 +do_sample: False +# Prompt tuning +prompt_tuning: True +prompt_len: 10 + +mlp: False + +train_split: 'train_standard' +val_split: 'karpathy_val' +test_split: 'karpathy_val' # val_standard \ No newline at end of file diff --git a/configs/image/ePALM_vqa.yaml b/configs/image/ePALM_vqa.yaml new file mode 100644 index 0000000000000000000000000000000000000000..04ddb14ac99028c312caa1261eb0aca45cd89fca --- /dev/null +++ b/configs/image/ePALM_vqa.yaml @@ -0,0 +1,46 @@ + +image_res: 224 +batch_size_train: 64 + +batch_size_test: 64 +k_test: 64 + +warm_up: True + + + +optimizer: {opt: adamW, lr: 2e-5, weight_decay: 0.02, prompt_lr: 1e-5} +schedular: {sched: cosine, scheduler_groups: 0 , lr: 2e-5, epochs: 8, min_lr: 1e-6, decay_rate: 1, warmup_lr: 1e-5, warmup_epochs: 4, cooldown_epochs: 0} + +use_vis_prefix: True +start_layer_idx: 19 +end_layer_idx: 31 + +injected_hidden_states: 6 + +lm_loss_weight: 0.1 + +unfreeze_text_layer_norm: False +unfreeze_vision_layer_norm: False + + + +num_workers: 4 + +special_answer_token: '' + +replace_added_tokens: True + + +use_cache: False + +connector_per_text_layer: False + +text_step: 1 + +num_beams: 3 +do_sample: False +# Prompt tuning +prompt_tuning: True +prompt_len: 10 + diff --git a/configs/video/ePALM_video_caption_msrvtt.yaml b/configs/video/ePALM_video_caption_msrvtt.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1c1c633fc85b5b6f2f2cdc173b46465dd00ba9aa --- /dev/null +++ b/configs/video/ePALM_video_caption_msrvtt.yaml @@ -0,0 +1,63 @@ +image_res: 224 +batch_size_train: 16 + +batch_size_test: 8 + +warm_up: True + + + +optimizer: {opt: adamW, lr: 2e-5, weight_decay: 0.02, prompt_lr: 1e-5} +schedular: {sched: cosine, scheduler_groups: 0 , lr: 2e-5, epochs: 26, min_lr: 1e-6, decay_rate: 1, warmup_lr: 1e-5, warmup_epochs: 4, cooldown_epochs: 0} + +use_vis_prefix: True +start_layer_idx: 19 +end_layer_idx: 31 + +injected_hidden_states: 6 + +lm_loss_weight: 0.1 + +unfreeze_text_layer_norm: False +unfreeze_vision_layer_norm: False + + +num_workers: 4 + + + + +replace_added_tokens: True + + +use_cache: False + +shift_labels: False + +append_eos_token: True + +num_beams: 3 +do_sample: False +# Prompt tuning +prompt_tuning: True +prompt_len: 10 + + +# video +modality: 'video' +dataset_name: 'msrvtt' + +train_split: 'msrvtt_caption_train7k' +val_split: 'msrvtt_caption_test' +test_split: 'msrvtt_caption_test' + + +num_frames: 16 +sample_type: 'rand' +num_tries: 1 +as_images: False +all_vis_tokens: False + + +vision_model_name: 'timesformer' +pretrained_model: '/gpfswork/rech/dyf/ugz83ue/.cache/torch/hub/checkpoints/TimeSformer_divST_8x32_224_K600.pyth' \ No newline at end of file diff --git a/configs/video/ePALM_video_qa_msrvtt.yaml b/configs/video/ePALM_video_qa_msrvtt.yaml new file mode 100644 index 0000000000000000000000000000000000000000..daebbe3353fb57a014645ee1d30fe09e0d39c7fd --- /dev/null +++ b/configs/video/ePALM_video_qa_msrvtt.yaml @@ -0,0 +1,64 @@ +image_res: 224 +batch_size_train: 8 + +batch_size_test: 64 +k_test: 64 + +warm_up: True + + + +optimizer: {opt: adamW, lr: 2e-5, weight_decay: 0.02, prompt_lr: 1e-5} +schedular: {sched: cosine, scheduler_groups: 0 , lr: 2e-5, epochs: 26, min_lr: 1e-6, decay_rate: 1, warmup_lr: 1e-5, warmup_epochs: 4, cooldown_epochs: 0} + +use_vis_prefix: True +start_layer_idx: 19 +end_layer_idx: 31 + +injected_hidden_states: 6 + +lm_loss_weight: 0.1 + +unfreeze_text_layer_norm: False +unfreeze_vision_layer_norm: False + + + +num_workers: 4 + +special_answer_token: '' + +replace_added_tokens: True + + +use_cache: False + +connector_per_text_layer: False + +text_step: 1 + +num_beams: 3 +do_sample: False +# Prompt tuning +prompt_tuning: True +prompt_len: 10 + + +# video +modality: 'video' +dataset_name: 'msrvtt' + +train_split: 'msrvtt_vqa_train' +val_split: 'msrvtt_vqa_val' +test_split: 'msrvtt_vqa_test' + + +num_frames: 8 +sample_type: 'rand' +num_tries: 1 +as_images: False +all_vis_tokens: False + + +vision_model_name: 'timesformer' +pretrained_model: '/gpfswork/rech/dyf/ugz83ue/.cache/torch/hub/checkpoints/TimeSformer_divST_8x32_224_K600.pyth' diff --git a/configs/video/ePALM_video_qa_msvd.yaml b/configs/video/ePALM_video_qa_msvd.yaml new file mode 100644 index 0000000000000000000000000000000000000000..810e5b759c17ccee08533b9a17a57ccedbb59098 --- /dev/null +++ b/configs/video/ePALM_video_qa_msvd.yaml @@ -0,0 +1,63 @@ +image_res: 224 +batch_size_train: 8 + +batch_size_test: 64 +k_test: 64 + +warm_up: True + + + +optimizer: {opt: adamW, lr: 2e-5, weight_decay: 0.02, prompt_lr: 1e-5} +schedular: {sched: cosine, scheduler_groups: 0 , lr: 2e-5, epochs: 26, min_lr: 1e-6, decay_rate: 1, warmup_lr: 1e-5, warmup_epochs: 4, cooldown_epochs: 0} + +use_vis_prefix: True +start_layer_idx: 19 +end_layer_idx: 31 + +injected_hidden_states: 6 + +lm_loss_weight: 0.1 + +unfreeze_text_layer_norm: False +unfreeze_vision_layer_norm: False + + +num_workers: 4 + +special_answer_token: '' + +replace_added_tokens: True + + +use_cache: False + +connector_per_text_layer: False + +text_step: 1 + +num_beams: 3 +do_sample: False +# Prompt tuning +prompt_tuning: True +prompt_len: 10 + + +# video +modality: 'video' +dataset_name: 'msvd' + +train_split: 'msvd_vqa_train' +val_split: 'msvd_vqa_val' +test_split: 'msvd_vqa_test' + + +num_frames: 8 +sample_type: 'rand' +num_tries: 1 +as_images: False +all_vis_tokens: False + + +vision_model_name: 'timesformer' +pretrained_model: '/gpfswork/rech/dyf/ugz83ue/.cache/torch/hub/checkpoints/TimeSformer_divST_8x32_224_K600.pyth' diff --git a/dataset/.ipynb_checkpoints/__init__-checkpoint.py b/dataset/.ipynb_checkpoints/__init__-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..59e5256b2fa670940e39ae71dfc0a5d3be3f5caa --- /dev/null +++ b/dataset/.ipynb_checkpoints/__init__-checkpoint.py @@ -0,0 +1,73 @@ +from dataset.randaugment import RandomAugment +from torch.utils.data import DataLoader + +from .vqa import vqa_dataset + +import torch +from torch import nn +from torchvision import transforms + +from PIL import Image + + +def create_dataset(dataset, config, data_dir='/data/mshukor/data'): + + normalize = transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711)) + + pretrain_transform = transforms.Compose([ + transforms.RandomResizedCrop(config['image_res'],scale=(0.2, 1.0), interpolation=Image.BICUBIC), + transforms.RandomHorizontalFlip(), + RandomAugment(2,7,isPIL=True,augs=['Identity','AutoContrast','Equalize','Brightness','Sharpness', + 'ShearX', 'ShearY', 'TranslateX', 'TranslateY', 'Rotate']), + transforms.ToTensor(), + normalize, + ]) + train_transform = transforms.Compose([ + transforms.RandomResizedCrop(config['image_res'],scale=(0.5, 1.0), interpolation=Image.BICUBIC), + transforms.RandomHorizontalFlip(), + RandomAugment(2,7,isPIL=True,augs=['Identity','AutoContrast','Equalize','Brightness','Sharpness', + 'ShearX', 'ShearY', 'TranslateX', 'TranslateY', 'Rotate']), + transforms.ToTensor(), + normalize, + ]) + test_transform = transforms.Compose([ + transforms.Resize((config['image_res'],config['image_res']),interpolation=Image.BICUBIC), + transforms.ToTensor(), + normalize, + ]) + + if dataset=='vqa': + train_dataset = vqa_dataset(config['train_file'], train_transform, config['vqa_root'], config['vg_root'], split='train') + vqa_test_dataset = vqa_dataset(config['test_file'], test_transform, config['vqa_root'], config['vg_root'], split='test', answer_list=config['answer_list']) + return train_dataset, vqa_test_dataset + + +def create_loader(datasets, samplers, batch_size, num_workers, is_trains, collate_fns): + loaders = [] + for dataset,sampler,bs,n_worker,is_train,collate_fn in zip(datasets,samplers,batch_size,num_workers,is_trains,collate_fns): + if is_train: + shuffle = (sampler is None) + drop_last = True + else: + shuffle = False + drop_last = False + loader = DataLoader( + dataset, + batch_size=bs, + num_workers=n_worker, + pin_memory=True, + sampler=sampler, + shuffle=shuffle, + collate_fn=collate_fn, + drop_last=drop_last, + ) + loaders.append(loader) + return loaders + + +def create_sampler(datasets, shuffles, num_tasks, global_rank): + samplers = [] + for dataset,shuffle in zip(datasets,shuffles): + sampler = torch.utils.data.DistributedSampler(dataset, num_replicas=num_tasks, rank=global_rank, shuffle=shuffle) + samplers.append(sampler) + return samplers \ No newline at end of file diff --git a/dataset/.ipynb_checkpoints/caption-checkpoint.py b/dataset/.ipynb_checkpoints/caption-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..18b677cbeedcba9117438a6ab8f78b3cb3400eb8 --- /dev/null +++ b/dataset/.ipynb_checkpoints/caption-checkpoint.py @@ -0,0 +1,612 @@ +from torch.utils.data import DataLoader, Dataset, Sampler +from pathlib import Path +from collections import defaultdict +import json +import random +from multiprocessing import Pool +import h5py +import pickle +import math +from tqdm import tqdm +import torch +import numpy as np +from copy import deepcopy +from PIL import Image + +from torch.utils.data.distributed import DistributedSampler + +# from transformers import T5TokenizerFast, BartTokenizer +# from tokenization import VLT5TokenizerFast + +# from vis_encoder import _transform +# from vqa_raw_data import augmentation_transform + +from dataset.randaugment import RandomAugment + +import torch +from torch import nn +from torchvision import transforms + +import os +import re +# project_dir = Path(__file__).resolve().parent.parent # VLT5 +# workspace_dir = project_dir.parent +# dataset_dir = workspace_dir.joinpath('datasets/').resolve() +# coco_dir = dataset_dir.joinpath('COCO') +# vg_dir = dataset_dir.joinpath('VG') +# coco_img_dir = coco_dir.joinpath('images/') +# coco_feature_dir = coco_dir.joinpath('features') + + +class COCOCaptionFineTuneDataset(Dataset): + def __init__(self, split='karpathy_train', raw_dataset=None, rank=-1, topk=-1, verbose=True, args=None, mode='train', + data_dir='/data/mshukor/data'): + super().__init__() + + self.raw_dataset = raw_dataset + self.topk = topk + self.verbose = verbose + self.args = args + + self.args.BUTD100 = False + + self.mode = mode + + dataset_dir = Path(data_dir) + coco_dir = dataset_dir.joinpath('COCO') + vg_dir = dataset_dir.joinpath('VG') + coco_img_dir = coco_dir.joinpath('images/') + coco_feature_dir = coco_dir.joinpath('features') + + + # Loading datasets to data + self.source = split + if self.verbose: + print('Data source: ', self.source) + + + normalize = transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711)) + + + self.train_transform = transforms.Compose([ + transforms.RandomResizedCrop(args.image_size,scale=(0.5, 1.0), interpolation=Image.BICUBIC), + transforms.RandomHorizontalFlip(), + RandomAugment(2,7,isPIL=True,augs=['Identity','AutoContrast','Equalize','Brightness','Sharpness', + 'ShearX', 'ShearY', 'TranslateX', 'TranslateY', 'Rotate']), + transforms.ToTensor(), + normalize, + ]) + self.test_transform = transforms.Compose([ + transforms.Resize((args.image_size,args.image_size),interpolation=Image.BICUBIC), + transforms.ToTensor(), + normalize, + ]) + + # if self.args.tokenizer is None: + # self.args.tokenizer = self.args.backbone + + # if 't5' in self.args.tokenizer: + # if self.args.use_vision: + # self.tokenizer = VLT5TokenizerFast.from_pretrained( + # args.backbone, + # # max_length=self.args.max_text_length, + # do_lower_case=self.args.do_lower_case) + # else: + # self.tokenizer = T5TokenizerFast.from_pretrained( + # args.backbone, + # # max_length=self.args.max_text_length, + # do_lower_case=self.args.do_lower_case) + # elif 'bart' in self.args.tokenizer: + # self.tokenizer = BartTokenizer.from_pretrained( + # args.backbone, + # # max_length=self.args.max_text_length, + # do_lower_case=self.args.do_lower_case) + + # additional_special_tokens = [f'' for i in range(100-1, -1, -1)] + \ + # [f'' for i in range(100-1, -1, -1)] + # special_tokens_dict = {'additional_special_tokens': additional_special_tokens} + # num_added_toks = self.tokenizer.add_special_tokens(special_tokens_dict) + + # if self.args.oscar_tags: + # # Load VG Classes + # vg_classes = [] + # with open(vg_dir.joinpath('objects_vocab.txt')) as f: + # for obj in f.readlines(): + # vg_classes.append(obj.split(',')[0].lower().strip()) + # self.vg_classes = vg_classes + + data_info_path = dataset_dir.joinpath('COCO/dataset_coco.json') + with open(data_info_path) as f: + karpathy_data = json.load(f) + + split_rename = { + 'train': 'train', + 'restval': 'train', + 'val': 'val', + 'test': 'test' + } + + n_images = 0 + + data = [] + for datum in karpathy_data['images']: + re_split = split_rename[datum['split']] + if re_split != self.source.split('_')[-1]: + continue + + if re_split == 'train': + for d in datum['sentences']: + # if self.args.BUTD100: + # img_id = str(int(datum['filename'].split('.')[0].split('_')[-1])) + # else: + img_id = datum['filename'].split('.')[0] + new_datum = { + 'img_id': img_id, + 'sent': d['raw'].strip(), + 'targets': [d['raw'].strip() for d in datum['sentences']], + 'is_train': True, + } + data.append(new_datum) + else: + # if self.args.BUTD100: + # img_id = str( + # int(datum['filename'].split('.')[0].split('_')[-1])) + # else: + img_id = datum['filename'].split('.')[0] + new_datum = { + 'img_id': img_id, + # 'sent': d['raw'], + 'targets': [d['raw'].strip() for d in datum['sentences']], + 'is_train': False, + } + data.append(new_datum) + + n_images += 1 + + if self.verbose: + print(f"{self.source} has {n_images} images") + print(f"Loaded {len(data)} data from", split) + + # self.n_gpus = torch.cuda.device_count() + + # self.rank = rank + + if isinstance(self.topk, float) and (0 < self.topk <= 1): + used_samples = int(self.topk * len(data)) + data = random.sample(data, used_samples) + if self.verbose: + print(f"Use only {len(data)} data") + + elif self.topk > 0: + data = data[:int(self.topk)] + if self.verbose: + print(f"Use only {len(data)} data") + + self.data = data + + if self.verbose: + print("# all sentences:", len(self.data)) + + # self.n_boxes = args.n_boxes + + self.image_size = self.args.image_size + + if mode == "train" and self.args.use_data_augmentation: + self.transform = self.train_transform + else: + self.transform = self.test_transform + + self.source_to_h5 = {} + + # if self.args.max_n_boxes == 36: + self.source_to_h5.update({ + 'train2014': coco_img_dir.joinpath(f'train2014'), + 'val2014': coco_img_dir.joinpath(f'val2014'), + }) + + + def __len__(self): + return len(self.data) + + def __getitem__(self, idx): + + out_dict = {} + out_dict['args'] = self.args + + datum = self.data[idx] + + ###### Image ###### + # if self.args.use_vision: + img_id = datum['img_id'] + out_dict['img_id'] = img_id + + + if self.args.BUTD100: + source = self.source + else: + if 'train' in img_id: + source = 'train2014' + elif 'val' in img_id: + source = 'val2014' + + path = self.source_to_h5[source].joinpath(f"{img_id}.jpg") + + image = Image.open(path).convert('RGB') + + + out_dict["image"] = self.transform(image) + + # out_dict['n_boxes'] = self.args.n_boxes + + + ###### Text ##### + # if self.args.no_prefix: + # input_text = '' + # input_ids = [] + + # else: + # if self.args.prefix is None: + # prefix = f'{self.args.prompt}' + # elif self.args.prefix == 'span': + # prefix = "span prediction:" + # elif self.args.prefix == 'denoise': + # prefix = "denoise text: " + # elif self.args.prefix == 'mask': + # if 'bart' in self.args.tokenizer: + # prefix = "" + + # input_tokens = [prefix] + + # # if self.args.oscar_tags: + # # prefix = f'describe image with tags:' + # # input_tokens = [prefix] + # # obj_ids = f[f'{img_id}/obj_id'][()] + # # for obj_id in obj_ids: + # # obj = self.vg_classes[obj_id] + # # if obj not in input_tokens: + # # input_tokens.append(obj) + # input_text = ' '.join(input_tokens) + + # # if 't5' in self.args.tokenizer: + # # input_ids = self.tokenizer.encode( + # # input_text, + # # max_length=self.args.max_text_length, truncation=True) + # # elif 'bart' in self.args.tokenizer: + # # input_ids = self.tokenizer.encode( + # # input_text, + # # max_length=self.args.max_text_length, truncation=True) + # # else: + # # input_ids = self.tokenizer.convert_tokens_to_ids( + # # self.tokenizer.tokenize(input_text)[:self.args.max_text_length - 1] + ['[SEP]']) + + # out_dict['input_text'] = input_text + + # out_dict['input_ids'] = torch.LongTensor(input_ids) + # out_dict['input_length'] = len(input_ids) + if datum['is_train']: + sent = datum['sent'].strip() + # if 't5' in self.args.tokenizer: + # target_ids = self.tokenizer.encode(sent, max_length=self.args.gen_max_length, truncation=True) + # elif 'bart' in self.args.tokenizer: + # target_ids = self.tokenizer.encode(sent, max_length=self.args.gen_max_length, truncation=True) + + # assert len(target_ids) <= self.args.gen_max_length, len(target_ids) + out_dict['sent'] = sent + # out_dict['target_ids'] = torch.LongTensor(target_ids) + # out_dict['target_length'] = len(target_ids) + + if 'targets' in datum: + out_dict['targets'] = datum['targets'] + + + return out_dict + + def collate_fn(self, batch): + batch_entry = {} + + B = len(batch) + + # S_W_L = max(entry['input_length'] for entry in batch) + # input_ids = torch.ones(B, S_W_L, dtype=torch.long) * self.tokenizer.pad_token_id + + # if self.args.no_prefix: + # assert input_ids.size() == (B, 0) + + # if self.args.use_vision: + # pass + + if 'target_ids' in batch[0]: + T_W_L = max(entry['target_length'] for entry in batch) + target_ids = torch.ones(B, T_W_L, dtype=torch.long) * self.tokenizer.pad_token_id + + # sentences = [] + + targets = [] + img_ids = [] + img_paths = [] + input_text = [] + images = [] + sents = [] + + for i, entry in enumerate(batch): + # input_ids[i, :entry['input_length']] = entry['input_ids'] + + # if self.args.use_vision: + # n_boxes = entry['n_boxes'] + images.append(entry['image']) + img_ids.append(entry['img_id']) + # img_paths.append(entry['img_path']) + + if 'target_ids' in entry: + target_ids[i, :entry['target_length']] = entry['target_ids'] + + # if 'input_text' in entry: + # input_text.append(entry['input_text']) + + # sentences.append(entry['sent']) + + if 'targets' in entry: + targets.append(entry['targets']) + if 'sent' in entry: + sents.append(entry['sent']) + # batch_entry['input_ids'] = input_ids + # if 'target_ids' in batch[0]: + # word_mask = target_ids != self.tokenizer.pad_token_id + # target_ids[~word_mask] = -100 + # batch_entry['target_ids'] = target_ids + + # if self.args.use_vision: + batch_entry['images'] = torch.stack(images) + batch_entry['img_id'] = img_ids + batch_entry['img_paths'] = img_paths + if 'sent' in entry: + batch_entry['sent'] = sents + + # batch_entry['sent'] = sentences + + # batch_entry['input_text'] = input_text + + batch_entry['targets'] = targets + + batch_entry['task'] = 'caption' + + return batch_entry + + +def pre_caption(caption,max_words): + caption = re.sub( + r"([,.'!?\"()*#:;~])", + '', + caption.lower(), + ).replace('-', ' ').replace('/', ' ').replace('', 'person') + + caption = re.sub( + r"\s{2,}", + ' ', + caption, + ) + caption = caption.rstrip('\n') + caption = caption.strip(' ') + + #truncate caption + caption_words = caption.split(' ') + if len(caption_words)>max_words: + caption = ' '.join(caption_words[:max_words]) + + return caption + +class CCDataset(Dataset): + def __init__(self, split='CC', raw_dataset=None, rank=-1, topk=-1, verbose=True, args=None, mode='train', + data_dir='/data/mshukor/data', config_dir='/data/mshukor/data/cc3m.json', max_words=30): + super().__init__() + + self.raw_dataset = raw_dataset + self.topk = topk + self.verbose = verbose + self.args = args + + + self.mode = mode + + + data = [] + ann_files = [config_dir] + ann_file = [] + for p in ann_files: + ann_file.append(os.path.join(args.data_json_dir, p)) + + for f in ann_file: + tmp = json.load(open(f,'r')) + data += tmp + print('size of', f, len(tmp)) + print(len(data)) + self.max_words = max_words + for e in data: + e['image'] = os.path.join(data_dir, ('/').join(e['image'].split('/')[4:])) + + # Loading datasets to data + self.source = split + if self.verbose: + print('Data source: ', self.source) + + + normalize = transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711)) + + + self.train_transform = transforms.Compose([ + transforms.RandomResizedCrop(args.image_size,scale=(0.5, 1.0), interpolation=Image.BICUBIC), + transforms.RandomHorizontalFlip(), + RandomAugment(2,7,isPIL=True,augs=['Identity','AutoContrast','Equalize','Brightness','Sharpness', + 'ShearX', 'ShearY', 'TranslateX', 'TranslateY', 'Rotate']), + transforms.ToTensor(), + normalize, + ]) + self.test_transform = transforms.Compose([ + transforms.Resize((args.image_size,args.image_size),interpolation=Image.BICUBIC), + transforms.ToTensor(), + normalize, + ]) + + + if isinstance(self.topk, float) and (0 < self.topk <= 1): + used_samples = int(self.topk * len(data)) + data = random.sample(data, used_samples) + if self.verbose: + print(f"Use only {len(data)} data") + + elif self.topk > 0: + data = data[:int(self.topk)] + if self.verbose: + print(f"Use only {len(data)} data") + + self.data = data + + if self.verbose: + print("# all sentences:", len(self.data)) + + + self.image_size = self.args.image_size + + if mode == "train" and self.args.use_data_augmentation: + self.transform = self.train_transform + else: + self.transform = self.test_transform + + + + def __len__(self): + return len(self.data) + + def __getitem__(self, idx): + + out_dict = {} + out_dict['args'] = self.args + + datum = self.data[idx] + + if type(datum['caption']) == list: + caption = pre_caption(random.choice(datum['caption']), self.max_words) + else: + caption = pre_caption(datum['caption'], self.max_words) + + ###### Image ###### + image = Image.open(datum['image']).convert('RGB') + + img_id = datum['image'].split('/')[-1].split('.')[0] + out_dict['img_id'] = img_id + + + out_dict["image"] = self.transform(image) + + out_dict['sent'] = caption + + out_dict['targets'] = caption + + + return out_dict + + def collate_fn(self, batch): + batch_entry = {} + + B = len(batch) + + + if 'target_ids' in batch[0]: + T_W_L = max(entry['target_length'] for entry in batch) + target_ids = torch.ones(B, T_W_L, dtype=torch.long) * self.tokenizer.pad_token_id + + + targets = [] + img_ids = [] + img_paths = [] + input_text = [] + images = [] + sents = [] + + for i, entry in enumerate(batch): + + images.append(entry['image']) + img_ids.append(entry['img_id']) + + if 'target_ids' in entry: + target_ids[i, :entry['target_length']] = entry['target_ids'] + + if 'targets' in entry: + targets.append(entry['targets']) + if 'sent' in entry: + sents.append(entry['sent']) + + + # if self.args.use_vision: + batch_entry['images'] = torch.stack(images) + batch_entry['img_id'] = img_ids + batch_entry['img_paths'] = img_paths + if 'sent' in entry: + batch_entry['sent'] = sents + + batch_entry['targets'] = targets + + batch_entry['task'] = 'caption' + + return batch_entry + +def get_loader(args, split='train', mode='train', + batch_size=32, workers=4, distributed=False, gpu=0, + topk=-1, data_dir='/data/mshukor/data', local_rank=None, world_size=None, verbose=False, config_dir=None): + + # if 'mscoco' in split: + # verbose = (gpu == 0) + + if 'CC' in split: + dataset = CCDataset(split, data_dir=data_dir, mode=mode, topk=topk, args=args, verbose=verbose, rank=gpu, config_dir=config_dir) + else: + dataset = COCOCaptionFineTuneDataset( + split, + # raw_dataset=_dset, + rank=gpu, + topk=topk, + verbose=verbose, + args=args, + mode=mode, data_dir=data_dir) + + + if distributed and mode == 'train': + train_sampler = DistributedSampler(dataset, num_replicas=world_size, rank=local_rank) + # train_sampler = DistributedSampler(dataset) + # train_sampler = RandomNonreplacmentSampler(dataset, dataset.n_iter) + else: + train_sampler = None + if mode == 'train': + loader = DataLoader( + dataset, batch_size=batch_size, shuffle=(train_sampler is None), + num_workers=workers, pin_memory=True, sampler=train_sampler, + collate_fn=dataset.collate_fn) + else: + loader = DataLoader( + dataset, + batch_size=batch_size, shuffle=False, + num_workers=workers, pin_memory=True, + sampler=None, + collate_fn=dataset.collate_fn, + drop_last=False) + + if verbose: + loader.evaluator = COCOCaptionEvaluator() + + loader.task = 'caption' + + return loader + + + +class COCOCaptionEvaluator: + def __init__(self): + import language_evaluation + self.evaluator = language_evaluation.CocoEvaluator(verbose=False) + + + def evaluate(self, predicts, answers): + + results = self.evaluator.run_evaluation(predicts, answers) + + return results \ No newline at end of file diff --git a/dataset/.ipynb_checkpoints/randaugment-checkpoint.py b/dataset/.ipynb_checkpoints/randaugment-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..1fbfa736ec24b849997e264231180f71b4a27c3d --- /dev/null +++ b/dataset/.ipynb_checkpoints/randaugment-checkpoint.py @@ -0,0 +1,341 @@ +#albef +import cv2 +import numpy as np + + +## aug functions +def identity_func(img): + return img + + +def autocontrast_func(img, cutoff=0): + ''' + same output as PIL.ImageOps.autocontrast + ''' + n_bins = 256 + + def tune_channel(ch): + n = ch.size + cut = cutoff * n // 100 + if cut == 0: + high, low = ch.max(), ch.min() + else: + hist = cv2.calcHist([ch], [0], None, [n_bins], [0, n_bins]) + low = np.argwhere(np.cumsum(hist) > cut) + low = 0 if low.shape[0] == 0 else low[0] + high = np.argwhere(np.cumsum(hist[::-1]) > cut) + high = n_bins - 1 if high.shape[0] == 0 else n_bins - 1 - high[0] + if high <= low: + table = np.arange(n_bins) + else: + scale = (n_bins - 1) / (high - low) + offset = -low * scale + table = np.arange(n_bins) * scale + offset + table[table < 0] = 0 + table[table > n_bins - 1] = n_bins - 1 + table = table.clip(0, 255).astype(np.uint8) + return table[ch] + + channels = [tune_channel(ch) for ch in cv2.split(img)] + out = cv2.merge(channels) + return out + + +def equalize_func(img): + ''' + same output as PIL.ImageOps.equalize + PIL's implementation is different from cv2.equalize + ''' + n_bins = 256 + + def tune_channel(ch): + hist = cv2.calcHist([ch], [0], None, [n_bins], [0, n_bins]) + non_zero_hist = hist[hist != 0].reshape(-1) + step = np.sum(non_zero_hist[:-1]) // (n_bins - 1) + if step == 0: return ch + n = np.empty_like(hist) + n[0] = step // 2 + n[1:] = hist[:-1] + table = (np.cumsum(n) // step).clip(0, 255).astype(np.uint8) + return table[ch] + + channels = [tune_channel(ch) for ch in cv2.split(img)] + out = cv2.merge(channels) + return out + + +def rotate_func(img, degree, fill=(0, 0, 0)): + ''' + like PIL, rotate by degree, not radians + ''' + H, W = img.shape[0], img.shape[1] + center = W / 2, H / 2 + M = cv2.getRotationMatrix2D(center, degree, 1) + out = cv2.warpAffine(img, M, (W, H), borderValue=fill) + return out + + +def solarize_func(img, thresh=128): + ''' + same output as PIL.ImageOps.posterize + ''' + table = np.array([el if el < thresh else 255 - el for el in range(256)]) + table = table.clip(0, 255).astype(np.uint8) + out = table[img] + return out + + +def color_func(img, factor): + ''' + same output as PIL.ImageEnhance.Color + ''' + ## implementation according to PIL definition, quite slow + # degenerate = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)[:, :, np.newaxis] + # out = blend(degenerate, img, factor) + # M = ( + # np.eye(3) * factor + # + np.float32([0.114, 0.587, 0.299]).reshape(3, 1) * (1. - factor) + # )[np.newaxis, np.newaxis, :] + M = ( + np.float32([ + [0.886, -0.114, -0.114], + [-0.587, 0.413, -0.587], + [-0.299, -0.299, 0.701]]) * factor + + np.float32([[0.114], [0.587], [0.299]]) + ) + out = np.matmul(img, M).clip(0, 255).astype(np.uint8) + return out + + +def contrast_func(img, factor): + """ + same output as PIL.ImageEnhance.Contrast + """ + mean = np.sum(np.mean(img, axis=(0, 1)) * np.array([0.114, 0.587, 0.299])) + table = np.array([( + el - mean) * factor + mean + for el in range(256) + ]).clip(0, 255).astype(np.uint8) + out = table[img] + return out + + +def brightness_func(img, factor): + ''' + same output as PIL.ImageEnhance.Contrast + ''' + table = (np.arange(256, dtype=np.float32) * factor).clip(0, 255).astype(np.uint8) + out = table[img] + return out + + +def sharpness_func(img, factor): + ''' + The differences the this result and PIL are all on the 4 boundaries, the center + areas are same + ''' + kernel = np.ones((3, 3), dtype=np.float32) + kernel[1][1] = 5 + kernel /= 13 + degenerate = cv2.filter2D(img, -1, kernel) + if factor == 0.0: + out = degenerate + elif factor == 1.0: + out = img + else: + out = img.astype(np.float32) + degenerate = degenerate.astype(np.float32)[1:-1, 1:-1, :] + out[1:-1, 1:-1, :] = degenerate + factor * (out[1:-1, 1:-1, :] - degenerate) + out = out.astype(np.uint8) + return out + + +def shear_x_func(img, factor, fill=(0, 0, 0)): + H, W = img.shape[0], img.shape[1] + M = np.float32([[1, factor, 0], [0, 1, 0]]) + out = cv2.warpAffine(img, M, (W, H), borderValue=fill, flags=cv2.INTER_LINEAR).astype(np.uint8) + return out + + +def translate_x_func(img, offset, fill=(0, 0, 0)): + ''' + same output as PIL.Image.transform + ''' + H, W = img.shape[0], img.shape[1] + M = np.float32([[1, 0, -offset], [0, 1, 0]]) + out = cv2.warpAffine(img, M, (W, H), borderValue=fill, flags=cv2.INTER_LINEAR).astype(np.uint8) + return out + + +def translate_y_func(img, offset, fill=(0, 0, 0)): + ''' + same output as PIL.Image.transform + ''' + H, W = img.shape[0], img.shape[1] + M = np.float32([[1, 0, 0], [0, 1, -offset]]) + out = cv2.warpAffine(img, M, (W, H), borderValue=fill, flags=cv2.INTER_LINEAR).astype(np.uint8) + return out + + +def posterize_func(img, bits): + ''' + same output as PIL.ImageOps.posterize + ''' + out = np.bitwise_and(img, np.uint8(255 << (8 - bits))) + return out + + +def shear_y_func(img, factor, fill=(0, 0, 0)): + H, W = img.shape[0], img.shape[1] + M = np.float32([[1, 0, 0], [factor, 1, 0]]) + out = cv2.warpAffine(img, M, (W, H), borderValue=fill, flags=cv2.INTER_LINEAR).astype(np.uint8) + return out + + +def cutout_func(img, pad_size, replace=(0, 0, 0)): + replace = np.array(replace, dtype=np.uint8) + H, W = img.shape[0], img.shape[1] + rh, rw = np.random.random(2) + pad_size = pad_size // 2 + ch, cw = int(rh * H), int(rw * W) + x1, x2 = max(ch - pad_size, 0), min(ch + pad_size, H) + y1, y2 = max(cw - pad_size, 0), min(cw + pad_size, W) + out = img.copy() + out[x1:x2, y1:y2, :] = replace + return out + + +### level to args +def enhance_level_to_args(MAX_LEVEL): + def level_to_args(level): + return ((level / MAX_LEVEL) * 1.8 + 0.1,) + return level_to_args + + +def shear_level_to_args(MAX_LEVEL, replace_value): + def level_to_args(level): + level = (level / MAX_LEVEL) * 0.3 + if np.random.random() > 0.5: level = -level + return (level, replace_value) + + return level_to_args + + +def translate_level_to_args(translate_const, MAX_LEVEL, replace_value): + def level_to_args(level): + level = (level / MAX_LEVEL) * float(translate_const) + if np.random.random() > 0.5: level = -level + return (level, replace_value) + + return level_to_args + + +def cutout_level_to_args(cutout_const, MAX_LEVEL, replace_value): + def level_to_args(level): + level = int((level / MAX_LEVEL) * cutout_const) + return (level, replace_value) + + return level_to_args + + +def solarize_level_to_args(MAX_LEVEL): + def level_to_args(level): + level = int((level / MAX_LEVEL) * 256) + return (level, ) + return level_to_args + + +def none_level_to_args(level): + return () + + +def posterize_level_to_args(MAX_LEVEL): + def level_to_args(level): + level = int((level / MAX_LEVEL) * 4) + return (level, ) + return level_to_args + + +def rotate_level_to_args(MAX_LEVEL, replace_value): + def level_to_args(level): + level = (level / MAX_LEVEL) * 30 + if np.random.random() < 0.5: + level = -level + return (level, replace_value) + + return level_to_args + + +func_dict = { + 'Identity': identity_func, + 'AutoContrast': autocontrast_func, + 'Equalize': equalize_func, + 'Rotate': rotate_func, + 'Solarize': solarize_func, + 'Color': color_func, + 'Contrast': contrast_func, + 'Brightness': brightness_func, + 'Sharpness': sharpness_func, + 'ShearX': shear_x_func, + 'TranslateX': translate_x_func, + 'TranslateY': translate_y_func, + 'Posterize': posterize_func, + 'ShearY': shear_y_func, +} + +translate_const = 10 +MAX_LEVEL = 10 +replace_value = (128, 128, 128) +arg_dict = { + 'Identity': none_level_to_args, + 'AutoContrast': none_level_to_args, + 'Equalize': none_level_to_args, + 'Rotate': rotate_level_to_args(MAX_LEVEL, replace_value), + 'Solarize': solarize_level_to_args(MAX_LEVEL), + 'Color': enhance_level_to_args(MAX_LEVEL), + 'Contrast': enhance_level_to_args(MAX_LEVEL), + 'Brightness': enhance_level_to_args(MAX_LEVEL), + 'Sharpness': enhance_level_to_args(MAX_LEVEL), + 'ShearX': shear_level_to_args(MAX_LEVEL, replace_value), + 'TranslateX': translate_level_to_args( + translate_const, MAX_LEVEL, replace_value + ), + 'TranslateY': translate_level_to_args( + translate_const, MAX_LEVEL, replace_value + ), + 'Posterize': posterize_level_to_args(MAX_LEVEL), + 'ShearY': shear_level_to_args(MAX_LEVEL, replace_value), +} + + +class RandomAugment(object): + + def __init__(self, N=2, M=10, isPIL=False, augs=[]): + self.N = N + self.M = M + self.isPIL = isPIL + if augs: + self.augs = augs + else: + self.augs = list(arg_dict.keys()) + + def get_random_ops(self): + sampled_ops = np.random.choice(self.augs, self.N) + return [(op, 0.5, self.M) for op in sampled_ops] + + def __call__(self, img): + if self.isPIL: + img = np.array(img) + ops = self.get_random_ops() + for name, prob, level in ops: + if np.random.random() > prob: + continue + args = arg_dict[name](level) + img = func_dict[name](img, *args) + return img + + +if __name__ == '__main__': + a = RandomAugment() + img = np.random.randn(32, 32, 3) + a(img) \ No newline at end of file diff --git a/dataset/.ipynb_checkpoints/utils-checkpoint.py b/dataset/.ipynb_checkpoints/utils-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..93bda3a729b2bc4ab9977328eeb9342c28ba5c8c --- /dev/null +++ b/dataset/.ipynb_checkpoints/utils-checkpoint.py @@ -0,0 +1,217 @@ +# https://github.com/salesforce/ALBEF + +import re + +def pre_question(question,max_ques_words): + question = re.sub( + r"([,.'!?\"()*#:;~])", + '', + question.lower(), + ).replace('-', ' ').replace('/', ' ') + question = question.rstrip(' ') + + #truncate question + question_words = question.split(' ') + if len(question_words)>max_ques_words: + question = ' '.join(question_words[:max_ques_words]) + + return question + + +def pre_caption(caption,max_words): + caption = re.sub( + r"([,.'!?\"()*#:;~])", + '', + caption.lower(), + ).replace('-', ' ').replace('/', ' ').replace('', 'person') + + caption = re.sub( + r"\s{2,}", + ' ', + caption, + ) + caption = caption.rstrip('\n') + caption = caption.strip(' ') + + #truncate caption + caption_words = caption.split(' ') + if len(caption_words)>max_words: + caption = ' '.join(caption_words[:max_words]) + + return caption + + +from vqaTools.vqaEval import VQAEval +from refTools.evaluation.refEvaluation import RefEvaluation + +import json +import os +import numpy as np +import torch +import torch.distributed as dist +import torch.nn.functional as F + +import utils +from tqdm import tqdm + + +def vqa_eval(vqa, result_file, test_ques_path): + vqaRes = vqa.loadRes(result_file, test_ques_path) + # create vqaEval object by taking vqa and vqaRes + vqaEval = VQAEval(vqa, vqaRes, n=2) # n is precision of accuracy (number of places after decimal), default is 2 + # evaluate results + vqaEval.evaluate() + + # print accuracies + print("\n") + print("Overall Accuracy is: %.02f\n" % (vqaEval.accuracy['overall'])) + print("Per Answer Type Accuracy is the following:") + for ansType in vqaEval.accuracy['perAnswerType']: + print("%s : %.02f" % (ansType, vqaEval.accuracy['perAnswerType'][ansType])) + print("\n") + + return vqaEval + + + +def collect_result(result, result_dir, filename, is_json=True, is_list=True): + if is_json: + result_file = os.path.join(result_dir, '%s_rank%d.json'%(filename,utils.get_rank())) + final_result_file = os.path.join(result_dir, '%s.json'%filename) + json.dump(result,open(result_file,'w')) + else: + result_file = os.path.join(result_dir, '%s_rank%d.pth'%(filename,utils.get_rank())) + final_result_file = os.path.join(result_dir, '%s.pth'%filename) + torch.save(result,result_file) + + dist.barrier() + + result = None + if utils.is_main_process(): + # combine results from all processes + if is_list: + result = [] + else: + result = {} + for rank in range(utils.get_world_size()): + if is_json: + result_file = os.path.join(result_dir, '%s_rank%d.json'%(filename,rank)) + res = json.load(open(result_file,'r')) + else: + result_file = os.path.join(result_dir, '%s_rank%d.pth'%(filename,rank)) + res = torch.load(result_file) + if is_list: + result += res + else: + result.update(res) + + return result + + +def save_result(result, result_dir, filename, is_json=True, is_list=True): + if is_json: + result_file = os.path.join(result_dir, '%s_rank%d.json'%(filename,utils.get_rank())) + final_result_file = os.path.join(result_dir, '%s.json'%filename) + json.dump(result,open(result_file,'w')) + else: + result_file = os.path.join(result_dir, '%s_rank%d.pth'%(filename,utils.get_rank())) + final_result_file = os.path.join(result_dir, '%s.pth'%filename) + torch.save(result,result_file) + + dist.barrier() + + if utils.is_main_process(): + # combine results from all processes + if is_list: + result = [] + else: + result = {} + for rank in range(utils.get_world_size()): + if is_json: + result_file = os.path.join(result_dir, '%s_rank%d.json'%(filename,rank)) + res = json.load(open(result_file,'r')) + else: + result_file = os.path.join(result_dir, '%s_rank%d.pth'%(filename,rank)) + res = torch.load(result_file) + if is_list: + result += res + else: + result.update(res) + if is_json: + json.dump(result,open(final_result_file,'w')) + else: + torch.save(result,final_result_file) + + print('result file saved to %s'%final_result_file) + dist.barrier() + return final_result_file + + + +def grounding_eval(results,dets,cocos,refer,alpha,mask_size=24): + + correct_A_d, correct_B_d, correct_val_d = 0, 0, 0 + correct_A, correct_B, correct_val = 0, 0, 0 + num_A,num_B,num_val = 0,0,0 + + for res in tqdm(results): + + ref_id = res['ref_id'] + ref = refer.Refs[ref_id] + ref_box = refer.refToAnn[ref_id]['bbox'] + image = refer.Imgs[ref['image_id']] + + mask = res['pred'].cuda().view(1,1,mask_size,mask_size) + mask = F.interpolate(mask,size = (image['height'],image['width']), mode='bicubic').squeeze() + + # rank detection boxes + max_score = 0 + for det in dets[str(ref['image_id'])]: + score = mask[int(det[1]):int(det[1]+det[3]),int(det[0]):int(det[0]+det[2])] + area = det[2]*det[3] + score = score.sum() / area**alpha + if score>max_score: + pred_box = det[:4] + max_score = score + + IoU_det = computeIoU(ref_box, pred_box) + + if ref['split']=='testA': + num_A += 1 + if IoU_det >= 0.5: + correct_A_d += 1 + elif ref['split']=='testB': + num_B += 1 + if IoU_det >= 0.5: + correct_B_d += 1 + elif ref['split']=='val': + num_val += 1 + if IoU_det >= 0.5: + correct_val_d += 1 + + eval_result = {'val_d':correct_val_d/num_val,'testA_d':correct_A_d/num_A,'testB_d':correct_B_d/num_B} + + for metric, acc in eval_result.items(): + print(f'{metric}: {acc:.3f}') + + return eval_result + + + +# IoU function +def computeIoU(box1, box2): + # each box is of [x1, y1, w, h] + inter_x1 = max(box1[0], box2[0]) + inter_y1 = max(box1[1], box2[1]) + inter_x2 = min(box1[0]+box1[2]-1, box2[0]+box2[2]-1) + inter_y2 = min(box1[1]+box1[3]-1, box2[1]+box2[3]-1) + + if inter_x1 < inter_x2 and inter_y1 < inter_y2: + inter = (inter_x2-inter_x1+1)*(inter_y2-inter_y1+1) + else: + inter = 0 + union = box1[2]*box1[3] + box2[2]*box2[3] - inter + return float(inter)/union + + + \ No newline at end of file diff --git a/dataset/.ipynb_checkpoints/vqa-checkpoint.py b/dataset/.ipynb_checkpoints/vqa-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..e3272614ae4ab4085e1f9936438cec5f3ea30c6f --- /dev/null +++ b/dataset/.ipynb_checkpoints/vqa-checkpoint.py @@ -0,0 +1,108 @@ +from torch.utils.data import Dataset +import numpy as np + +import os +import random + +import re + +import torch +from torch import nn +from torchvision import transforms + +from PIL import Image +import json + + + +def pre_question(question,max_ques_words): + question = re.sub( + r"([,.'!?\"()*#:;~])", + '', + question.lower(), + ).replace('-', ' ').replace('/', ' ') + question = question.rstrip(' ') + + #truncate question + question_words = question.split(' ') + if len(question_words)>max_ques_words: + question = ' '.join(question_words[:max_ques_words]) + + return question + +class vqa_dataset(Dataset): + def __init__(self, ann_file, transform, vqa_root, vg_root, eos='[SEP]', split="train", max_ques_words=30, answer_list=''): + self.split = split + self.ann = [] + for f in ann_file: + tmp = json.load(open(f,'r')) + self.ann += tmp + print(f, len(self.ann), len(tmp)) + self.transform = transform + self.vqa_root = vqa_root + self.vg_root = vg_root + self.max_ques_words = max_ques_words + self.eos = eos + + if split=='test': + self.max_ques_words = 50 # do not limit question length during test + self.answer_list = json.load(open(answer_list,'r')) + + + def __len__(self): + return len(self.ann) + + def __getitem__(self, index): + + ann = self.ann[index] + + if ann['dataset']=='vqa': + image_path = os.path.join(self.vqa_root,ann['image']) + elif ann['dataset']=='vg': + image_path = os.path.join(self.vg_root,ann['image']) + + image = Image.open(image_path).convert('RGB') + image = self.transform(image) + + if self.split == 'test': + question = pre_question(ann['question'],self.max_ques_words) + question_id = ann['question_id'] + return image, question, question_id + + + elif self.split=='train': + + question = pre_question(ann['question'],self.max_ques_words) + + if ann['dataset']=='vqa': + + answer_weight = {} + for answer in ann['answer']: + if answer in answer_weight.keys(): + answer_weight[answer] += 1/len(ann['answer']) + else: + answer_weight[answer] = 1/len(ann['answer']) + + answers = list(answer_weight.keys()) + weights = list(answer_weight.values()) + + elif ann['dataset']=='vg': + answers = [ann['answer']] + weights = [0.5] + + answers = [answer+self.eos for answer in answers] + + return image, question, answers, weights + + + + +def vqa_collate_fn(batch): + image_list, question_list, answer_list, weight_list, n = [], [], [], [], [] + for image, question, answer, weights in batch: + image_list.append(image) + question_list.append(question) + weight_list += weights + answer_list += answer + n.append(len(answer)) + return torch.stack(image_list,dim=0), question_list, answer_list, torch.Tensor(weight_list), n \ No newline at end of file diff --git a/dataset/__init__.py b/dataset/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/dataset/audio_caption.py b/dataset/audio_caption.py new file mode 100644 index 0000000000000000000000000000000000000000..d6a50d0c3a6709335631816437eb11594a49f192 --- /dev/null +++ b/dataset/audio_caption.py @@ -0,0 +1,375 @@ +from torch.utils.data import DataLoader, Dataset +from pathlib import Path +import json +import random + +import torch +import numpy as np + +from torch.utils.data.distributed import DistributedSampler + +import torch + + +import re + +import torchaudio + +class AudioCaptionFineTuneDataset(Dataset): + def __init__(self, split='karpathy_train', raw_dataset=None, rank=-1, topk=-1, verbose=True, args=None, mode='train', + data_dir='/data/mshukor/data', black_image=False): + super().__init__() + + self.raw_dataset = raw_dataset + self.topk = topk + self.verbose = verbose + self.args = args + + self.mode = mode + + data_dir = Path(data_dir) + dataset_dir = data_dir.joinpath('annotation') + coco_img_dir = data_dir.joinpath('audios') + + self.black_image = black_image + + self.source = split + if self.verbose: + print('Data source: ', self.source) + + + + # audio + self.melbins = args.melbins + self.target_length = args.target_length + self.num_tries = args.num_tries # 2 + self.freqm_p = args.freqm_p + self.timem_p = args.timem_p + + self.skip_norm = args.skip_norm + self.norm_mean = args.norm_mean + self.norm_std = args.norm_std + self.noise = args.noise + + self.freqm = torchaudio.transforms.FrequencyMasking(self.freqm_p) + self.timem = torchaudio.transforms.TimeMasking(self.timem_p) + + + data_info_path = dataset_dir.joinpath(split+'.json') + with open(data_info_path) as f: + karpathy_data = json.load(f) + + + n_images = 0 + + data = [] + for datum in karpathy_data: + + + if 'train' in split : + caption = datum['caption'] + if isinstance(caption, list): + for d in caption: + + img_id = ".".join(datum['audio'].split('.')[:-1]) + new_datum = { + 'img_id': img_id, + 'sent': d.strip(), + 'targets': [k.strip() for k in caption], + 'is_train': True, + 'audio': datum['audio'], + } + data.append(new_datum) + else: + img_id = ".".join(datum['audio'].split('.')[:-1]) + new_datum = { + 'img_id': img_id, + 'sent': caption.strip(), + 'targets': caption.strip(), + 'is_train': True, + 'audio': datum['audio'], + } + data.append(new_datum) + else: + caption = datum['caption'] + if not isinstance(caption, list): + caption = [caption] + img_id = ".".join(datum['audio'].split('.')[:-1]) + new_datum = { + 'img_id': img_id, + 'targets': [d.strip() for d in caption], + 'is_train': False, + 'audio': datum['audio'], + } + data.append(new_datum) + + n_images += 1 + + if self.verbose: + print(f"{self.source} has {n_images} images") + print(f"Loaded {len(data)} data from", split) + + + + if isinstance(self.topk, float) and (0 < self.topk <= 1): + used_samples = int(self.topk * len(data)) + data = random.sample(data, used_samples) + if self.verbose: + print(f"Use only {len(data)} data") + + elif self.topk > 0: + data = data[:int(self.topk)] + if self.verbose: + print(f"Use only {len(data)} data") + + self.data = data + + if self.verbose: + print("# all sentences:", len(self.data)) + + + self.image_size = self.args.image_size + + + self.source_to_h5 = {} + + self.source_to_h5.update({ + 'all': coco_img_dir, + }) + + + def __len__(self): + return len(self.data) + + def __getitem__(self, idx): + + out_dict = {} + out_dict['args'] = self.args + + for i in range(self.num_tries): + + try: + datum = self.data[idx] + + ###### Image ###### + img_id = datum['img_id'] + out_dict['img_id'] = img_id + + audio = datum['audio'] + path = str(self.source_to_h5['all'].joinpath(f"{audio}")) + + waveform, sr = torchaudio.load(path) + waveform = waveform - waveform.mean() + + # audio + fbank = torchaudio.compliance.kaldi.fbank(waveform, htk_compat=True, sample_frequency=sr, use_energy=False, + window_type='hanning', num_mel_bins=self.melbins, dither=0.0, + frame_shift=10) + + except Exception as e: + print(i, path) + idx = random.randint(0, len(self) - 1) + print( + f"Caught exception {e} when loading audio {path}, " + f"randomly sample a new audio as replacement" + ) + continue + + + + + + n_frames = fbank.shape[0] + + p = self.target_length - n_frames + + # cut and pad + if p > 0: + m = torch.nn.ZeroPad2d((0, 0, 0, p)) + fbank = m(fbank) + elif p < 0: + fbank = fbank[0:self.target_length, :] + + + + + # SpecAug, not do for eval set + + fbank = torch.transpose(fbank, 0, 1) + # this is just to satisfy new torchaudio version, which only accept [1, freq, time] + fbank = fbank.unsqueeze(0) + + + if self.mode == 'train': + if self.freqm_p != 0: + fbank = self.freqm(fbank) + if self.timem_p != 0: + fbank = self.timem(fbank) + # squeeze it back, it is just a trick to satisfy new torchaudio version + fbank = fbank.squeeze(0) + fbank = torch.transpose(fbank, 0, 1) + + + # normalize the input for both training and test + if not self.skip_norm: + fbank = (fbank - self.norm_mean) / (self.norm_std * 2) + # skip normalization the input if you are trying to get the normalization stats. + else: + pass + + if self.mode == 'train' and self.noise == True: + fbank = fbank + torch.rand(fbank.shape[0], fbank.shape[1]) * np.random.rand() / 10 + fbank = torch.roll(fbank, np.random.randint(-10, 10), 0) + + + + + out_dict["image"] = fbank + + + + if self.black_image: + out_dict["image"] = torch.zeros_like(out_dict["image"]) + + + if datum['is_train']: + sent = datum['sent'].strip() + + out_dict['sent'] = sent + + + if 'targets' in datum: + out_dict['targets'] = datum['targets'] + + + return out_dict + + def collate_fn(self, batch): + batch_entry = {} + + B = len(batch) + + + + if 'target_ids' in batch[0]: + T_W_L = max(entry['target_length'] for entry in batch) + target_ids = torch.ones(B, T_W_L, dtype=torch.long) * self.tokenizer.pad_token_id + + + targets = [] + img_ids = [] + img_paths = [] + input_text = [] + images = [] + sents = [] + + for i, entry in enumerate(batch): + + + images.append(entry['image']) + img_ids.append(entry['img_id']) + + if 'target_ids' in entry: + target_ids[i, :entry['target_length']] = entry['target_ids'] + + + + if 'targets' in entry: + targets.append(entry['targets']) + if 'sent' in entry: + sents.append(entry['sent']) + + # if self.args.use_vision: + batch_entry['images'] = torch.stack(images) + batch_entry['img_id'] = img_ids + batch_entry['img_paths'] = img_paths + if 'sent' in entry: + batch_entry['sent'] = sents + + + + batch_entry['targets'] = targets + + batch_entry['task'] = 'caption' + + return batch_entry + + +def pre_caption(caption,max_words): + caption = re.sub( + r"([,.'!?\"()*#:;~])", + '', + caption.lower(), + ).replace('-', ' ').replace('/', ' ').replace('', 'person') + + caption = re.sub( + r"\s{2,}", + ' ', + caption, + ) + caption = caption.rstrip('\n') + caption = caption.strip(' ') + + #truncate caption + caption_words = caption.split(' ') + if len(caption_words)>max_words: + caption = ' '.join(caption_words[:max_words]) + + return caption + + + +def get_loader(args, split='train', mode='train', + batch_size=32, workers=4, distributed=False, gpu=0, + topk=-1, data_dir='/data/mshukor/data', local_rank=None, world_size=None, verbose=False, + config_dir=None, black_image=False): + + + + dataset = AudioCaptionFineTuneDataset( + split, + rank=gpu, + topk=topk, + verbose=verbose, + args=args, + mode=mode, data_dir=data_dir, black_image=black_image) + + + if distributed and mode == 'train': + train_sampler = DistributedSampler(dataset, num_replicas=world_size, rank=local_rank) + else: + train_sampler = None + if mode == 'train': + loader = DataLoader( + dataset, batch_size=batch_size, shuffle=(train_sampler is None), + num_workers=workers, pin_memory=True, sampler=train_sampler, + collate_fn=dataset.collate_fn) + else: + loader = DataLoader( + dataset, + batch_size=batch_size, shuffle=False, + num_workers=workers, pin_memory=True, + sampler=None, + collate_fn=dataset.collate_fn, + drop_last=False) + + if verbose: + loader.evaluator = COCOCaptionEvaluator() + + loader.task = 'caption' + + return loader + + + +class COCOCaptionEvaluator: + def __init__(self): + import language_evaluation + self.evaluator = language_evaluation.CocoEvaluator(verbose=False) + + + def evaluate(self, predicts, answers): + + results = self.evaluator.run_evaluation(predicts, answers) + + return results \ No newline at end of file diff --git a/dataset/caption.py b/dataset/caption.py new file mode 100644 index 0000000000000000000000000000000000000000..80fa7f50c18f1c4c9821dacbd6ce521a33c270c1 --- /dev/null +++ b/dataset/caption.py @@ -0,0 +1,325 @@ +from torch.utils.data import DataLoader, Dataset +from pathlib import Path +import json +import random +from multiprocessing import Pool +import torch +from PIL import Image + +from torch.utils.data.distributed import DistributedSampler + + + +from dataset.randaugment import RandomAugment + +import torch +from torchvision import transforms + +import os +import re + + + +class COCOCaptionFineTuneDataset(Dataset): + def __init__(self, split='karpathy_train', raw_dataset=None, rank=-1, topk=-1, verbose=True, args=None, mode='train', + data_dir='/data/mshukor/data', black_image=False): + super().__init__() + + self.raw_dataset = raw_dataset + self.topk = topk + self.verbose = verbose + self.args = args + + self.args.BUTD100 = False + + self.mode = mode + + dataset_dir = Path(data_dir) + coco_dir = dataset_dir.joinpath('COCO') + vg_dir = dataset_dir.joinpath('VG') + coco_img_dir = coco_dir.joinpath('images/') + coco_feature_dir = coco_dir.joinpath('features') + + self.black_image = black_image + + # Loading datasets to data + self.source = split + if self.verbose: + print('Data source: ', self.source) + + + normalize = transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711)) + + + self.train_transform = transforms.Compose([ + transforms.RandomResizedCrop(args.image_size,scale=(0.5, 1.0), interpolation=Image.BICUBIC), + transforms.RandomHorizontalFlip(), + RandomAugment(2,7,isPIL=True,augs=['Identity','AutoContrast','Equalize','Brightness','Sharpness', + 'ShearX', 'ShearY', 'TranslateX', 'TranslateY', 'Rotate']), + transforms.ToTensor(), + normalize, + ]) + self.test_transform = transforms.Compose([ + transforms.Resize((args.image_size,args.image_size),interpolation=Image.BICUBIC), + transforms.ToTensor(), + normalize, + ]) + + + + data_info_path = dataset_dir.joinpath('COCO/dataset_coco.json') + with open(data_info_path) as f: + karpathy_data = json.load(f) + + split_rename = { + 'train': 'train', + 'restval': 'train', + 'val': 'val', + 'test': 'test' + } + + n_images = 0 + + data = [] + for datum in karpathy_data['images']: + re_split = split_rename[datum['split']] + if re_split != self.source.split('_')[-1]: + continue + + if re_split == 'train': + for d in datum['sentences']: + + img_id = datum['filename'].split('.')[0] + new_datum = { + 'img_id': img_id, + 'sent': d['raw'].strip(), + 'targets': [d['raw'].strip() for d in datum['sentences']], + 'is_train': True, + } + data.append(new_datum) + else: + + img_id = datum['filename'].split('.')[0] + new_datum = { + 'img_id': img_id, + # 'sent': d['raw'], + 'targets': [d['raw'].strip() for d in datum['sentences']], + 'is_train': False, + } + data.append(new_datum) + + n_images += 1 + + if self.verbose: + print(f"{self.source} has {n_images} images") + print(f"Loaded {len(data)} data from", split) + + + + if isinstance(self.topk, float) and (0 < self.topk <= 1): + used_samples = int(self.topk * len(data)) + data = random.sample(data, used_samples) + if self.verbose: + print(f"Use only {len(data)} data") + + elif self.topk > 0: + data = data[:int(self.topk)] + if self.verbose: + print(f"Use only {len(data)} data") + + self.data = data + + if self.verbose: + print("# all sentences:", len(self.data)) + + + self.image_size = self.args.image_size + + if mode == "train" and self.args.use_data_augmentation: + self.transform = self.train_transform + else: + self.transform = self.test_transform + + self.source_to_h5 = {} + + self.source_to_h5.update({ + 'train2014': coco_img_dir.joinpath(f'train2014'), + 'val2014': coco_img_dir.joinpath(f'val2014'), + }) + + + def __len__(self): + return len(self.data) + + def __getitem__(self, idx): + + out_dict = {} + out_dict['args'] = self.args + + datum = self.data[idx] + + ###### Image ###### + img_id = datum['img_id'] + out_dict['img_id'] = img_id + + + if self.args.BUTD100: + source = self.source + else: + if 'train' in img_id: + source = 'train2014' + elif 'val' in img_id: + source = 'val2014' + + path = self.source_to_h5[source].joinpath(f"{img_id}.jpg") + + image = Image.open(path).convert('RGB') + + + out_dict["image"] = self.transform(image) + + if self.black_image: + out_dict["image"] = torch.zeros_like(out_dict["image"]) + + if datum['is_train']: + sent = datum['sent'].strip() + + out_dict['sent'] = sent + + + if 'targets' in datum: + out_dict['targets'] = datum['targets'] + + + return out_dict + + def collate_fn(self, batch): + batch_entry = {} + + B = len(batch) + + + + if 'target_ids' in batch[0]: + T_W_L = max(entry['target_length'] for entry in batch) + target_ids = torch.ones(B, T_W_L, dtype=torch.long) * self.tokenizer.pad_token_id + + + targets = [] + img_ids = [] + img_paths = [] + input_text = [] + images = [] + sents = [] + + for i, entry in enumerate(batch): + + images.append(entry['image']) + img_ids.append(entry['img_id']) + + if 'target_ids' in entry: + target_ids[i, :entry['target_length']] = entry['target_ids'] + + + + if 'targets' in entry: + targets.append(entry['targets']) + if 'sent' in entry: + sents.append(entry['sent']) + + + batch_entry['images'] = torch.stack(images) + batch_entry['img_id'] = img_ids + batch_entry['img_paths'] = img_paths + if 'sent' in entry: + batch_entry['sent'] = sents + + + + batch_entry['targets'] = targets + + batch_entry['task'] = 'caption' + + return batch_entry + + +def pre_caption(caption,max_words): + caption = re.sub( + r"([,.'!?\"()*#:;~])", + '', + caption.lower(), + ).replace('-', ' ').replace('/', ' ').replace('', 'person') + + caption = re.sub( + r"\s{2,}", + ' ', + caption, + ) + caption = caption.rstrip('\n') + caption = caption.strip(' ') + + #truncate caption + caption_words = caption.split(' ') + if len(caption_words)>max_words: + caption = ' '.join(caption_words[:max_words]) + + return caption + + + +def get_loader(args, split='train', mode='train', + batch_size=32, workers=4, distributed=False, gpu=0, + topk=-1, data_dir='/data/mshukor/data', local_rank=None, world_size=None, verbose=False, + config_dir=None, black_image=False): + + + + + dataset = COCOCaptionFineTuneDataset( + split, + # raw_dataset=_dset, + rank=gpu, + topk=topk, + verbose=verbose, + args=args, + mode=mode, data_dir=data_dir, black_image=black_image) + + + if distributed and mode == 'train': + train_sampler = DistributedSampler(dataset, num_replicas=world_size, rank=local_rank) + else: + train_sampler = None + if mode == 'train': + loader = DataLoader( + dataset, batch_size=batch_size, shuffle=(train_sampler is None), + num_workers=workers, pin_memory=True, sampler=train_sampler, + collate_fn=dataset.collate_fn) + else: + loader = DataLoader( + dataset, + batch_size=batch_size, shuffle=False, + num_workers=workers, pin_memory=True, + sampler=None, + collate_fn=dataset.collate_fn, + drop_last=False) + + if verbose: + loader.evaluator = COCOCaptionEvaluator() + + loader.task = 'caption' + + return loader + + + +class COCOCaptionEvaluator: + def __init__(self): + import language_evaluation + self.evaluator = language_evaluation.CocoEvaluator(verbose=False) + + + def evaluate(self, predicts, answers): + + results = self.evaluator.run_evaluation(predicts, answers) + + return results \ No newline at end of file diff --git a/dataset/gqa.py b/dataset/gqa.py new file mode 100644 index 0000000000000000000000000000000000000000..ee1caa371e33d0936446ff5df26bf8eed25db3d2 --- /dev/null +++ b/dataset/gqa.py @@ -0,0 +1,480 @@ +## from VL-Adapter + +from torch.utils.data import DataLoader, Dataset +from pathlib import Path +import json +import random +import torch +import numpy as np + +from torchvision import transforms + + +from torch.utils.data.distributed import DistributedSampler + + +from dataset.randaugment import RandomAugment + + + +from PIL import Image +import re + + +project_dir = Path(__file__).resolve().parent.parent # VLT5 +workspace_dir = project_dir.parent + + + +class GQAFineTuneDataset(Dataset): + def __init__(self, split='train,valid', raw_dataset=None, rank=-1, topk=-1, verbose=True, args=None, mode='train', data_dir=None): + super().__init__() + + self.raw_dataset = raw_dataset + self.topk = topk + self.verbose = verbose + self.args = args + + self.mode = mode + + normalize = transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711)) + + + self.train_transform = transforms.Compose([ + transforms.RandomResizedCrop(args.image_size,scale=(0.5, 1.0), interpolation=Image.BICUBIC), + transforms.RandomHorizontalFlip(), + RandomAugment(2,7,isPIL=True,augs=['Identity','AutoContrast','Equalize','Brightness','Sharpness', + 'ShearX', 'ShearY', 'TranslateX', 'TranslateY', 'Rotate']), + transforms.ToTensor(), + normalize, + ]) + self.test_transform = transforms.Compose([ + transforms.Resize((args.image_size,args.image_size),interpolation=Image.BICUBIC), + transforms.ToTensor(), + normalize, + ]) + + ### dataset paths + dataset_dir = Path(data_dir) + vg_dir = dataset_dir.joinpath('VG') + gqa_dir = dataset_dir.joinpath('GQA') + gqa_img_dir = gqa_dir.joinpath('images/') + vg_img_dir = vg_dir.joinpath('VG_100K/') + + vizwiz_img_dir = dataset_dir.joinpath('vizwiz') + self.sources = split.split(',') + if self.verbose: + print('Data sources: ', self.sources) + + + + self.img_ids_to_source = {} + data_info_dicts = [] + for source in self.sources: + data_info_path = dataset_dir.joinpath(f'GQA/{source}.json') + with open(data_info_path) as f: + _data_info_dicts = json.load(f) + for _d in _data_info_dicts: + self.img_ids_to_source[_d['img_id']] = source + _d['source'] = source + + data_info_dicts.extend(_data_info_dicts) + if self.verbose: + print(f"Loaded {len(_data_info_dicts)} data from", source) + + data = data_info_dicts + + + if isinstance(self.topk, float) and (0 < self.topk <= 1): + used_samples = int(self.topk * len(data)) + data = random.sample(data, used_samples) + if self.verbose: + print(f"Use only {len(data)} data") + + elif self.topk > 0: + data = data[:int(self.topk)] + if self.verbose: + print(f"Use only {len(data)} data") + + self.data = data + + if self.verbose: + print("# all sentences:", len(self.data)) + + + self.image_size = self.args.image_size + + if mode == "train" and self.args.use_data_augmentation: + self.transform = self.train_transform + else: + self.transform = self.test_transform + + self.source_to_featname = { + 'train': 'others', + 'valid': 'others', + 'submit': 'testdev', + + 'testdev': 'testdev', + 'val_vg': 'others', + 'train_vg': 'others', + + 'train_vizwiz': 'train_vizwiz', + 'val_vizwiz': 'val_vizwiz' + } + + self.featname_to_h5 = { + 'others': vg_img_dir, + 'testdev': gqa_img_dir, + 'train_vizwiz': vizwiz_img_dir.joinpath('train'), + 'val_vizwiz': vizwiz_img_dir.joinpath('val'), + } + + + def __len__(self): + return len(self.data) + + def __getitem__(self, idx): + + out_dict = {} + out_dict['args'] = self.args + + datum = self.data[idx] + + img_id = datum['img_id'] + out_dict['img_id'] = img_id + + source = self.img_ids_to_source[img_id] + + featname = self.source_to_featname[source] + + + + + path = self.featname_to_h5[featname].joinpath(f"{img_id}.jpg") + + image = Image.open(path).convert("RGB") + + out_dict["image"] = self.transform(image) + + ###### Text ##### + + + if 'sent' in datum: + sent = datum['sent'] + elif 'question' in datum: + sent = datum['question'] + + question_id = datum['question_id'] + out_dict['question_id'] = question_id + + out_dict['sent'] = sent + + + if 'label' in datum: + label = datum['label'] + out_dict['label'] = label + + # https://github.com/airsplay/lxmert/blob/master/src/pretrain/lxmert_pretrain.py#L191 + answers = [] + scores = [] + for a, s in label.items(): + answers.append(a) + scores.append(s) + + score_sum = sum(scores) + + if score_sum == 0: + answer = '' + score = 0. + else: + prob = [score / score_sum for score in scores] + choice = np.random.multinomial(1, prob).argmax() + answer = answers[choice] + score = scores[choice] + assert len(answer) > 0, (sent, label, choice, answer) + + out_dict['answer'] = answer + out_dict['score'] = score + out_dict['all_answers'] = answers + + + return out_dict + + + def collate_fn(self, batch): + batch_entry = {} + + + B = len(batch) + + + sentences = [] + question_ids = [] + answers = [] + all_answers = [] + all_answers_tokenized = [] + best_answers_tokenized = [] + img_ids = [] + img_paths = [] + labels = [] + scores = [] + + images = [] + + for i, entry in enumerate(batch): + + images.append(entry["image"]) + + + sentences.append(entry['sent']) + question_ids.append(entry['question_id']) + if 'answer' in entry: + answers.append(entry['answer']) + if 'all_answers' in entry: + all_answers.append(entry['all_answers']) + + if 'score' in entry: + scores.append(entry['score']) + + if 'label' in entry: + labels.append(entry['label']) + + + + batch_entry['images'] = torch.stack(images) + + + batch_entry['sent'] = sentences + batch_entry['question_ids'] = question_ids + batch_entry['answers'] = answers + batch_entry['all_answers'] = all_answers + + batch_entry['scores'] = torch.FloatTensor(scores) + batch_entry['labels'] = labels + + batch_entry['task'] = 'gqa' + + return batch_entry + + +def get_loader(args, split='train', mode='train', + batch_size=32, workers=4, distributed=False, gpu=0, + topk=-1, verbose=None, data_dir='/data/mshukor/data', local_rank=None, world_size=None): + + + _dset = GQADataset(split, verbose, data_dir=data_dir) + + dataset = GQAFineTuneDataset( + split, + raw_dataset=_dset, + rank=gpu, + topk=topk, + verbose=verbose, + args=args, + mode=mode, data_dir=data_dir) + + if distributed: + sampler = DistributedSampler(dataset, num_replicas=world_size, rank=local_rank) + else: + sampler = None + + + if mode == 'train': + loader = DataLoader( + dataset, batch_size=batch_size, shuffle=(sampler is None), + num_workers=workers, pin_memory=True, sampler=sampler, + collate_fn=dataset.collate_fn) + else: + loader = DataLoader( + dataset, + batch_size=batch_size, + num_workers=workers, pin_memory=True, + sampler=sampler, + shuffle=None if (sampler is not None) else False, + collate_fn=dataset.collate_fn, + drop_last=False) + + loader.evaluator = GQAEvaluator(_dset) + loader.task = 'gqa' + + return loader + + +class GQADataset: + """ + A GQA data example in json file: + { + "img_id": "2375429", + "label": { + "pipe": 1.0 + }, + "question_id": "07333408", + "sent": "What is on the white wall?" + } + """ + + def __init__(self, splits: str, verbose=True, data_dir='/data/mshukor/data'): + self.name = splits + self.splits = splits.split(',') + + dataset_dir = Path(data_dir) + + gqa_dir = dataset_dir.joinpath('GQA') + + + # Loading datasets to data + self.data = [] + for split in self.splits: + self.data.extend(json.load(open(gqa_dir.joinpath("%s.json" % split)))) + if verbose: + print("Load %d data from split(s) %s." % + (len(self.data), self.name)) + + # List to dict (for evaluation and others) + self.id2datum = { + int(datum['question_id']): datum + for datum in self.data + } + + # Answers + self.ans2label = json.load(open(gqa_dir.joinpath("trainval_ans2label.json"))) + self.label2ans = json.load(open(gqa_dir.joinpath("trainval_label2ans.json"))) + assert len(self.ans2label) == len(self.label2ans) + for ans, label in self.ans2label.items(): + assert self.label2ans[label] == ans + + @property + def num_answers(self): + return len(self.ans2label) + + def __len__(self): + return len(self.data) + + +class GQAEvaluator: + def __init__(self, dataset: GQADataset): + self.dataset = dataset + + """https://github.com/GT-Vision-Lab/VQA/blob/master/PythonEvaluationTools/vqaEvaluation/vqaEval.py""" + + self.contractions = {"aint": "ain't", "arent": "aren't", "cant": "can't", "couldve": "could've", "couldnt": "couldn't", \ + "couldn'tve": "couldn't've", "couldnt've": "couldn't've", "didnt": "didn't", "doesnt": "doesn't", "dont": "don't", "hadnt": "hadn't", \ + "hadnt've": "hadn't've", "hadn'tve": "hadn't've", "hasnt": "hasn't", "havent": "haven't", "hed": "he'd", "hed've": "he'd've", \ + "he'dve": "he'd've", "hes": "he's", "howd": "how'd", "howll": "how'll", "hows": "how's", "Id've": "I'd've", "I'dve": "I'd've", \ + "Im": "I'm", "Ive": "I've", "isnt": "isn't", "itd": "it'd", "itd've": "it'd've", "it'dve": "it'd've", "itll": "it'll", "let's": "let's", \ + "maam": "ma'am", "mightnt": "mightn't", "mightnt've": "mightn't've", "mightn'tve": "mightn't've", "mightve": "might've", \ + "mustnt": "mustn't", "mustve": "must've", "neednt": "needn't", "notve": "not've", "oclock": "o'clock", "oughtnt": "oughtn't", \ + "ow's'at": "'ow's'at", "'ows'at": "'ow's'at", "'ow'sat": "'ow's'at", "shant": "shan't", "shed've": "she'd've", "she'dve": "she'd've", \ + "she's": "she's", "shouldve": "should've", "shouldnt": "shouldn't", "shouldnt've": "shouldn't've", "shouldn'tve": "shouldn't've", \ + "somebody'd": "somebodyd", "somebodyd've": "somebody'd've", "somebody'dve": "somebody'd've", "somebodyll": "somebody'll", \ + "somebodys": "somebody's", "someoned": "someone'd", "someoned've": "someone'd've", "someone'dve": "someone'd've", \ + "someonell": "someone'll", "someones": "someone's", "somethingd": "something'd", "somethingd've": "something'd've", \ + "something'dve": "something'd've", "somethingll": "something'll", "thats": "that's", "thered": "there'd", "thered've": "there'd've", \ + "there'dve": "there'd've", "therere": "there're", "theres": "there's", "theyd": "they'd", "theyd've": "they'd've", \ + "they'dve": "they'd've", "theyll": "they'll", "theyre": "they're", "theyve": "they've", "twas": "'twas", "wasnt": "wasn't", \ + "wed've": "we'd've", "we'dve": "we'd've", "weve": "we've", "werent": "weren't", "whatll": "what'll", "whatre": "what're", \ + "whats": "what's", "whatve": "what've", "whens": "when's", "whered": "where'd", "wheres": "where's", "whereve": "where've", \ + "whod": "who'd", "whod've": "who'd've", "who'dve": "who'd've", "wholl": "who'll", "whos": "who's", "whove": "who've", "whyll": "why'll", \ + "whyre": "why're", "whys": "why's", "wont": "won't", "wouldve": "would've", "wouldnt": "wouldn't", "wouldnt've": "wouldn't've", \ + "wouldn'tve": "wouldn't've", "yall": "y'all", "yall'll": "y'all'll", "y'allll": "y'all'll", "yall'd've": "y'all'd've", \ + "y'alld've": "y'all'd've", "y'all'dve": "y'all'd've", "youd": "you'd", "youd've": "you'd've", "you'dve": "you'd've", \ + "youll": "you'll", "youre": "you're", "youve": "you've"} + + self.manualMap = { 'none': '0', + 'zero': '0', + 'one': '1', + 'two': '2', + 'three': '3', + 'four': '4', + 'five': '5', + 'six': '6', + 'seven': '7', + 'eight': '8', + 'nine': '9', + 'ten': '10' + } + + self.articles = ['a', + 'an', + 'the' + ] + + self.periodStrip = re.compile("(?!<=\d)(\.)(?!\d)") + self.commaStrip = re.compile("(\d)(\,)(\d)") + self.punct = [';', r"/", '[', ']', '"', '{', '}', + '(', ')', '=', '+', '\\', '_', '-', + '>', '<', '@', '`', ',', '?', '!'] + + def evaluate(self, quesid2ans: dict, normalize_answer=False): + score = 0. + for quesid, ans in quesid2ans.items(): + datum = self.dataset.id2datum[quesid] + label = datum['label'] + if normalize_answer: + ans = self.normalize_answer(ans) + new_label = {self.normalize_answer(l): label[l] for l in label} + else: + new_label = label + + if ans in new_label: + # print(ans, new_label) + score += new_label[ans] + return score / len(quesid2ans) + + def normalize_answer(self, resAns): + resAns = resAns.replace('\n', ' ') + resAns = resAns.replace('\t', ' ') + resAns = resAns.strip() + resAns = self.processPunctuation(resAns) + resAns = self.processDigitArticle(resAns) + resAns = resAns.replace(',', '') + return resAns + + def processPunctuation(self, inText): + outText = inText + for p in self.punct: + if (p + ' ' in inText or ' ' + p in inText) or (re.search(self.commaStrip, inText) != None): + outText = outText.replace(p, '') + else: + outText = outText.replace(p, ' ') + outText = self.periodStrip.sub("", + outText, + re.UNICODE) + return outText + + def processDigitArticle(self, inText): + outText = [] + tempText = inText.lower().split() + for word in tempText: + word = self.manualMap.setdefault(word, word) + if word not in self.articles: + outText.append(word) + else: + pass + for wordId, word in enumerate(outText): + if word in self.contractions: + outText[wordId] = self.contractions[word] + outText = ' '.join(outText) + return outText + + def dump_result(self, quesid2ans: dict, path): + """ + Dump the result to a GQA-challenge submittable json file. + GQA json file submission requirement: + results = [result] + result = { + "questionId": str, # Note: it's a actually an int number but the server requires an str. + "prediction": str + } + :param quesid2ans: A dict mapping question id to its predicted answer. + :param path: The file path to save the json file. + :return: + """ + with open(path, 'w') as f: + result = [] + for ques_id, ans in quesid2ans.items(): + result.append({ + 'questionId': ques_id, + 'prediction': ans + }) + json.dump(result, f, indent=4, sort_keys=True) diff --git a/dataset/randaugment.py b/dataset/randaugment.py new file mode 100644 index 0000000000000000000000000000000000000000..1fbfa736ec24b849997e264231180f71b4a27c3d --- /dev/null +++ b/dataset/randaugment.py @@ -0,0 +1,341 @@ +#albef +import cv2 +import numpy as np + + +## aug functions +def identity_func(img): + return img + + +def autocontrast_func(img, cutoff=0): + ''' + same output as PIL.ImageOps.autocontrast + ''' + n_bins = 256 + + def tune_channel(ch): + n = ch.size + cut = cutoff * n // 100 + if cut == 0: + high, low = ch.max(), ch.min() + else: + hist = cv2.calcHist([ch], [0], None, [n_bins], [0, n_bins]) + low = np.argwhere(np.cumsum(hist) > cut) + low = 0 if low.shape[0] == 0 else low[0] + high = np.argwhere(np.cumsum(hist[::-1]) > cut) + high = n_bins - 1 if high.shape[0] == 0 else n_bins - 1 - high[0] + if high <= low: + table = np.arange(n_bins) + else: + scale = (n_bins - 1) / (high - low) + offset = -low * scale + table = np.arange(n_bins) * scale + offset + table[table < 0] = 0 + table[table > n_bins - 1] = n_bins - 1 + table = table.clip(0, 255).astype(np.uint8) + return table[ch] + + channels = [tune_channel(ch) for ch in cv2.split(img)] + out = cv2.merge(channels) + return out + + +def equalize_func(img): + ''' + same output as PIL.ImageOps.equalize + PIL's implementation is different from cv2.equalize + ''' + n_bins = 256 + + def tune_channel(ch): + hist = cv2.calcHist([ch], [0], None, [n_bins], [0, n_bins]) + non_zero_hist = hist[hist != 0].reshape(-1) + step = np.sum(non_zero_hist[:-1]) // (n_bins - 1) + if step == 0: return ch + n = np.empty_like(hist) + n[0] = step // 2 + n[1:] = hist[:-1] + table = (np.cumsum(n) // step).clip(0, 255).astype(np.uint8) + return table[ch] + + channels = [tune_channel(ch) for ch in cv2.split(img)] + out = cv2.merge(channels) + return out + + +def rotate_func(img, degree, fill=(0, 0, 0)): + ''' + like PIL, rotate by degree, not radians + ''' + H, W = img.shape[0], img.shape[1] + center = W / 2, H / 2 + M = cv2.getRotationMatrix2D(center, degree, 1) + out = cv2.warpAffine(img, M, (W, H), borderValue=fill) + return out + + +def solarize_func(img, thresh=128): + ''' + same output as PIL.ImageOps.posterize + ''' + table = np.array([el if el < thresh else 255 - el for el in range(256)]) + table = table.clip(0, 255).astype(np.uint8) + out = table[img] + return out + + +def color_func(img, factor): + ''' + same output as PIL.ImageEnhance.Color + ''' + ## implementation according to PIL definition, quite slow + # degenerate = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)[:, :, np.newaxis] + # out = blend(degenerate, img, factor) + # M = ( + # np.eye(3) * factor + # + np.float32([0.114, 0.587, 0.299]).reshape(3, 1) * (1. - factor) + # )[np.newaxis, np.newaxis, :] + M = ( + np.float32([ + [0.886, -0.114, -0.114], + [-0.587, 0.413, -0.587], + [-0.299, -0.299, 0.701]]) * factor + + np.float32([[0.114], [0.587], [0.299]]) + ) + out = np.matmul(img, M).clip(0, 255).astype(np.uint8) + return out + + +def contrast_func(img, factor): + """ + same output as PIL.ImageEnhance.Contrast + """ + mean = np.sum(np.mean(img, axis=(0, 1)) * np.array([0.114, 0.587, 0.299])) + table = np.array([( + el - mean) * factor + mean + for el in range(256) + ]).clip(0, 255).astype(np.uint8) + out = table[img] + return out + + +def brightness_func(img, factor): + ''' + same output as PIL.ImageEnhance.Contrast + ''' + table = (np.arange(256, dtype=np.float32) * factor).clip(0, 255).astype(np.uint8) + out = table[img] + return out + + +def sharpness_func(img, factor): + ''' + The differences the this result and PIL are all on the 4 boundaries, the center + areas are same + ''' + kernel = np.ones((3, 3), dtype=np.float32) + kernel[1][1] = 5 + kernel /= 13 + degenerate = cv2.filter2D(img, -1, kernel) + if factor == 0.0: + out = degenerate + elif factor == 1.0: + out = img + else: + out = img.astype(np.float32) + degenerate = degenerate.astype(np.float32)[1:-1, 1:-1, :] + out[1:-1, 1:-1, :] = degenerate + factor * (out[1:-1, 1:-1, :] - degenerate) + out = out.astype(np.uint8) + return out + + +def shear_x_func(img, factor, fill=(0, 0, 0)): + H, W = img.shape[0], img.shape[1] + M = np.float32([[1, factor, 0], [0, 1, 0]]) + out = cv2.warpAffine(img, M, (W, H), borderValue=fill, flags=cv2.INTER_LINEAR).astype(np.uint8) + return out + + +def translate_x_func(img, offset, fill=(0, 0, 0)): + ''' + same output as PIL.Image.transform + ''' + H, W = img.shape[0], img.shape[1] + M = np.float32([[1, 0, -offset], [0, 1, 0]]) + out = cv2.warpAffine(img, M, (W, H), borderValue=fill, flags=cv2.INTER_LINEAR).astype(np.uint8) + return out + + +def translate_y_func(img, offset, fill=(0, 0, 0)): + ''' + same output as PIL.Image.transform + ''' + H, W = img.shape[0], img.shape[1] + M = np.float32([[1, 0, 0], [0, 1, -offset]]) + out = cv2.warpAffine(img, M, (W, H), borderValue=fill, flags=cv2.INTER_LINEAR).astype(np.uint8) + return out + + +def posterize_func(img, bits): + ''' + same output as PIL.ImageOps.posterize + ''' + out = np.bitwise_and(img, np.uint8(255 << (8 - bits))) + return out + + +def shear_y_func(img, factor, fill=(0, 0, 0)): + H, W = img.shape[0], img.shape[1] + M = np.float32([[1, 0, 0], [factor, 1, 0]]) + out = cv2.warpAffine(img, M, (W, H), borderValue=fill, flags=cv2.INTER_LINEAR).astype(np.uint8) + return out + + +def cutout_func(img, pad_size, replace=(0, 0, 0)): + replace = np.array(replace, dtype=np.uint8) + H, W = img.shape[0], img.shape[1] + rh, rw = np.random.random(2) + pad_size = pad_size // 2 + ch, cw = int(rh * H), int(rw * W) + x1, x2 = max(ch - pad_size, 0), min(ch + pad_size, H) + y1, y2 = max(cw - pad_size, 0), min(cw + pad_size, W) + out = img.copy() + out[x1:x2, y1:y2, :] = replace + return out + + +### level to args +def enhance_level_to_args(MAX_LEVEL): + def level_to_args(level): + return ((level / MAX_LEVEL) * 1.8 + 0.1,) + return level_to_args + + +def shear_level_to_args(MAX_LEVEL, replace_value): + def level_to_args(level): + level = (level / MAX_LEVEL) * 0.3 + if np.random.random() > 0.5: level = -level + return (level, replace_value) + + return level_to_args + + +def translate_level_to_args(translate_const, MAX_LEVEL, replace_value): + def level_to_args(level): + level = (level / MAX_LEVEL) * float(translate_const) + if np.random.random() > 0.5: level = -level + return (level, replace_value) + + return level_to_args + + +def cutout_level_to_args(cutout_const, MAX_LEVEL, replace_value): + def level_to_args(level): + level = int((level / MAX_LEVEL) * cutout_const) + return (level, replace_value) + + return level_to_args + + +def solarize_level_to_args(MAX_LEVEL): + def level_to_args(level): + level = int((level / MAX_LEVEL) * 256) + return (level, ) + return level_to_args + + +def none_level_to_args(level): + return () + + +def posterize_level_to_args(MAX_LEVEL): + def level_to_args(level): + level = int((level / MAX_LEVEL) * 4) + return (level, ) + return level_to_args + + +def rotate_level_to_args(MAX_LEVEL, replace_value): + def level_to_args(level): + level = (level / MAX_LEVEL) * 30 + if np.random.random() < 0.5: + level = -level + return (level, replace_value) + + return level_to_args + + +func_dict = { + 'Identity': identity_func, + 'AutoContrast': autocontrast_func, + 'Equalize': equalize_func, + 'Rotate': rotate_func, + 'Solarize': solarize_func, + 'Color': color_func, + 'Contrast': contrast_func, + 'Brightness': brightness_func, + 'Sharpness': sharpness_func, + 'ShearX': shear_x_func, + 'TranslateX': translate_x_func, + 'TranslateY': translate_y_func, + 'Posterize': posterize_func, + 'ShearY': shear_y_func, +} + +translate_const = 10 +MAX_LEVEL = 10 +replace_value = (128, 128, 128) +arg_dict = { + 'Identity': none_level_to_args, + 'AutoContrast': none_level_to_args, + 'Equalize': none_level_to_args, + 'Rotate': rotate_level_to_args(MAX_LEVEL, replace_value), + 'Solarize': solarize_level_to_args(MAX_LEVEL), + 'Color': enhance_level_to_args(MAX_LEVEL), + 'Contrast': enhance_level_to_args(MAX_LEVEL), + 'Brightness': enhance_level_to_args(MAX_LEVEL), + 'Sharpness': enhance_level_to_args(MAX_LEVEL), + 'ShearX': shear_level_to_args(MAX_LEVEL, replace_value), + 'TranslateX': translate_level_to_args( + translate_const, MAX_LEVEL, replace_value + ), + 'TranslateY': translate_level_to_args( + translate_const, MAX_LEVEL, replace_value + ), + 'Posterize': posterize_level_to_args(MAX_LEVEL), + 'ShearY': shear_level_to_args(MAX_LEVEL, replace_value), +} + + +class RandomAugment(object): + + def __init__(self, N=2, M=10, isPIL=False, augs=[]): + self.N = N + self.M = M + self.isPIL = isPIL + if augs: + self.augs = augs + else: + self.augs = list(arg_dict.keys()) + + def get_random_ops(self): + sampled_ops = np.random.choice(self.augs, self.N) + return [(op, 0.5, self.M) for op in sampled_ops] + + def __call__(self, img): + if self.isPIL: + img = np.array(img) + ops = self.get_random_ops() + for name, prob, level in ops: + if np.random.random() > prob: + continue + args = arg_dict[name](level) + img = func_dict[name](img, *args) + return img + + +if __name__ == '__main__': + a = RandomAugment() + img = np.random.randn(32, 32, 3) + a(img) \ No newline at end of file diff --git a/dataset/utils.py b/dataset/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..93bda3a729b2bc4ab9977328eeb9342c28ba5c8c --- /dev/null +++ b/dataset/utils.py @@ -0,0 +1,217 @@ +# https://github.com/salesforce/ALBEF + +import re + +def pre_question(question,max_ques_words): + question = re.sub( + r"([,.'!?\"()*#:;~])", + '', + question.lower(), + ).replace('-', ' ').replace('/', ' ') + question = question.rstrip(' ') + + #truncate question + question_words = question.split(' ') + if len(question_words)>max_ques_words: + question = ' '.join(question_words[:max_ques_words]) + + return question + + +def pre_caption(caption,max_words): + caption = re.sub( + r"([,.'!?\"()*#:;~])", + '', + caption.lower(), + ).replace('-', ' ').replace('/', ' ').replace('', 'person') + + caption = re.sub( + r"\s{2,}", + ' ', + caption, + ) + caption = caption.rstrip('\n') + caption = caption.strip(' ') + + #truncate caption + caption_words = caption.split(' ') + if len(caption_words)>max_words: + caption = ' '.join(caption_words[:max_words]) + + return caption + + +from vqaTools.vqaEval import VQAEval +from refTools.evaluation.refEvaluation import RefEvaluation + +import json +import os +import numpy as np +import torch +import torch.distributed as dist +import torch.nn.functional as F + +import utils +from tqdm import tqdm + + +def vqa_eval(vqa, result_file, test_ques_path): + vqaRes = vqa.loadRes(result_file, test_ques_path) + # create vqaEval object by taking vqa and vqaRes + vqaEval = VQAEval(vqa, vqaRes, n=2) # n is precision of accuracy (number of places after decimal), default is 2 + # evaluate results + vqaEval.evaluate() + + # print accuracies + print("\n") + print("Overall Accuracy is: %.02f\n" % (vqaEval.accuracy['overall'])) + print("Per Answer Type Accuracy is the following:") + for ansType in vqaEval.accuracy['perAnswerType']: + print("%s : %.02f" % (ansType, vqaEval.accuracy['perAnswerType'][ansType])) + print("\n") + + return vqaEval + + + +def collect_result(result, result_dir, filename, is_json=True, is_list=True): + if is_json: + result_file = os.path.join(result_dir, '%s_rank%d.json'%(filename,utils.get_rank())) + final_result_file = os.path.join(result_dir, '%s.json'%filename) + json.dump(result,open(result_file,'w')) + else: + result_file = os.path.join(result_dir, '%s_rank%d.pth'%(filename,utils.get_rank())) + final_result_file = os.path.join(result_dir, '%s.pth'%filename) + torch.save(result,result_file) + + dist.barrier() + + result = None + if utils.is_main_process(): + # combine results from all processes + if is_list: + result = [] + else: + result = {} + for rank in range(utils.get_world_size()): + if is_json: + result_file = os.path.join(result_dir, '%s_rank%d.json'%(filename,rank)) + res = json.load(open(result_file,'r')) + else: + result_file = os.path.join(result_dir, '%s_rank%d.pth'%(filename,rank)) + res = torch.load(result_file) + if is_list: + result += res + else: + result.update(res) + + return result + + +def save_result(result, result_dir, filename, is_json=True, is_list=True): + if is_json: + result_file = os.path.join(result_dir, '%s_rank%d.json'%(filename,utils.get_rank())) + final_result_file = os.path.join(result_dir, '%s.json'%filename) + json.dump(result,open(result_file,'w')) + else: + result_file = os.path.join(result_dir, '%s_rank%d.pth'%(filename,utils.get_rank())) + final_result_file = os.path.join(result_dir, '%s.pth'%filename) + torch.save(result,result_file) + + dist.barrier() + + if utils.is_main_process(): + # combine results from all processes + if is_list: + result = [] + else: + result = {} + for rank in range(utils.get_world_size()): + if is_json: + result_file = os.path.join(result_dir, '%s_rank%d.json'%(filename,rank)) + res = json.load(open(result_file,'r')) + else: + result_file = os.path.join(result_dir, '%s_rank%d.pth'%(filename,rank)) + res = torch.load(result_file) + if is_list: + result += res + else: + result.update(res) + if is_json: + json.dump(result,open(final_result_file,'w')) + else: + torch.save(result,final_result_file) + + print('result file saved to %s'%final_result_file) + dist.barrier() + return final_result_file + + + +def grounding_eval(results,dets,cocos,refer,alpha,mask_size=24): + + correct_A_d, correct_B_d, correct_val_d = 0, 0, 0 + correct_A, correct_B, correct_val = 0, 0, 0 + num_A,num_B,num_val = 0,0,0 + + for res in tqdm(results): + + ref_id = res['ref_id'] + ref = refer.Refs[ref_id] + ref_box = refer.refToAnn[ref_id]['bbox'] + image = refer.Imgs[ref['image_id']] + + mask = res['pred'].cuda().view(1,1,mask_size,mask_size) + mask = F.interpolate(mask,size = (image['height'],image['width']), mode='bicubic').squeeze() + + # rank detection boxes + max_score = 0 + for det in dets[str(ref['image_id'])]: + score = mask[int(det[1]):int(det[1]+det[3]),int(det[0]):int(det[0]+det[2])] + area = det[2]*det[3] + score = score.sum() / area**alpha + if score>max_score: + pred_box = det[:4] + max_score = score + + IoU_det = computeIoU(ref_box, pred_box) + + if ref['split']=='testA': + num_A += 1 + if IoU_det >= 0.5: + correct_A_d += 1 + elif ref['split']=='testB': + num_B += 1 + if IoU_det >= 0.5: + correct_B_d += 1 + elif ref['split']=='val': + num_val += 1 + if IoU_det >= 0.5: + correct_val_d += 1 + + eval_result = {'val_d':correct_val_d/num_val,'testA_d':correct_A_d/num_A,'testB_d':correct_B_d/num_B} + + for metric, acc in eval_result.items(): + print(f'{metric}: {acc:.3f}') + + return eval_result + + + +# IoU function +def computeIoU(box1, box2): + # each box is of [x1, y1, w, h] + inter_x1 = max(box1[0], box2[0]) + inter_y1 = max(box1[1], box2[1]) + inter_x2 = min(box1[0]+box1[2]-1, box2[0]+box2[2]-1) + inter_y2 = min(box1[1]+box1[3]-1, box2[1]+box2[3]-1) + + if inter_x1 < inter_x2 and inter_y1 < inter_y2: + inter = (inter_x2-inter_x1+1)*(inter_y2-inter_y1+1) + else: + inter = 0 + union = box1[2]*box1[3] + box2[2]*box2[3] - inter + return float(inter)/union + + + \ No newline at end of file diff --git a/dataset/video_caption.py b/dataset/video_caption.py new file mode 100644 index 0000000000000000000000000000000000000000..1d0eece61dd5362a7294d7c93c8a6df9ed80d597 --- /dev/null +++ b/dataset/video_caption.py @@ -0,0 +1,348 @@ +from torch.utils.data import DataLoader, Dataset +from pathlib import Path +import json +import random +import torch +from PIL import Image + +from torch.utils.data.distributed import DistributedSampler + + + +import torch +from torchvision import transforms + +import re + + +from dataset.video_utils import VIDEO_READER_FUNCS + +class MSRVTTCaptionFineTuneDataset(Dataset): + def __init__(self, split='karpathy_train', raw_dataset=None, rank=-1, topk=-1, verbose=True, args=None, mode='train', + data_dir='/data/mshukor/data', black_image=False): + super().__init__() + + self.raw_dataset = raw_dataset + self.topk = topk + self.verbose = verbose + self.args = args + + self.mode = mode + + data_dir = Path(data_dir) + dataset_dir = data_dir.joinpath('annotation') + coco_img_dir = data_dir.joinpath('videos/all') + + self.black_image = black_image + + self.source = split + if self.verbose: + print('Data source: ', self.source) + + + + # video + self.num_frames = args.num_frames # 4 + self.video_reader = VIDEO_READER_FUNCS['decord'] + self.as_images = args.as_images # True + self.num_tries = args.num_tries # 2 + self.sample_type = args.sample_type # 'rand' + + normalize = transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711)) + + + type_transform = transforms.Lambda(lambda x: x.float().div(255.0)) + + + + self.train_transform = transforms.Compose([ + transforms.RandomResizedCrop(args.image_size,scale=(0.5, 1.0), interpolation=Image.BICUBIC), + transforms.RandomHorizontalFlip(), + transforms.RandAugment(), + type_transform, + normalize, + ]) + self.test_transform = transforms.Compose([ + transforms.Resize((args.image_size,args.image_size),interpolation=Image.BICUBIC), + type_transform, + normalize, + ]) + + + data_info_path = dataset_dir.joinpath(split+'.json') + with open(data_info_path) as f: + karpathy_data = json.load(f) + + + n_images = 0 + + data = [] + for datum in karpathy_data: + + + if 'train' in split : + caption = datum['caption'] + if isinstance(caption, list): + for d in caption: + + img_id = ".".join(datum['video'].split('.')[:-1]) + new_datum = { + 'img_id': img_id, + 'sent': d.strip(), + 'targets': [k.strip() for k in caption], + 'is_train': True, + 'video': datum['video'], + } + data.append(new_datum) + else: + img_id = ".".join(datum['video'].split('.')[:-1]) + new_datum = { + 'img_id': img_id, + 'sent': caption.strip(), + 'targets': caption.strip(), + 'is_train': True, + 'video': datum['video'], + } + data.append(new_datum) + else: + caption = datum['caption'] + if not isinstance(caption, list): + caption = [caption] + img_id = ".".join(datum['video'].split('.')[:-1]) + new_datum = { + 'img_id': img_id, + 'targets': [d.strip() for d in caption], + 'is_train': False, + 'video': datum['video'], + } + data.append(new_datum) + + n_images += 1 + + if self.verbose: + print(f"{self.source} has {n_images} images") + print(f"Loaded {len(data)} data from", split) + + + + if isinstance(self.topk, float) and (0 < self.topk <= 1): + used_samples = int(self.topk * len(data)) + data = random.sample(data, used_samples) + if self.verbose: + print(f"Use only {len(data)} data") + + elif self.topk > 0: + data = data[:int(self.topk)] + if self.verbose: + print(f"Use only {len(data)} data") + + self.data = data + + if self.verbose: + print("# all sentences:", len(self.data)) + + + self.image_size = self.args.image_size + + if mode == "train" and self.args.use_data_augmentation: + self.transform = self.train_transform + else: + self.transform = self.test_transform + + self.source_to_h5 = {} + + self.source_to_h5.update({ + 'all': coco_img_dir, + }) + + + def __len__(self): + return len(self.data) + + def __getitem__(self, idx): + + out_dict = {} + out_dict['args'] = self.args + + for i in range(self.num_tries): + + try: + datum = self.data[idx] + + ###### Image ###### + img_id = datum['img_id'] + out_dict['img_id'] = img_id + + video = datum['video'] + path = str(self.source_to_h5['all'].joinpath(f"{video}")) + + + max_num_frames = self.max_num_frames if hasattr(self, "max_num_frames") else -1 + frames, frame_indices, video_duration = self.video_reader( + path, self.num_frames, self.sample_type, max_num_frames=max_num_frames + ) + + except Exception as e: + print(i, path) + idx = random.randint(0, len(self) - 1) + print( + f"Caught exception {e} when loading video {path}, " + f"randomly sample a new video as replacement" + ) + continue + + + + + out_dict["image"] = self.transform(frames) + + + + if self.black_image: + out_dict["image"] = torch.zeros_like(out_dict["image"]) + + if not self.as_images: + out_dict["image"] = out_dict["image"].permute(1, 0, 2, 3) # -> CTHW + + + if datum['is_train']: + sent = datum['sent'].strip() + + out_dict['sent'] = sent + + + if 'targets' in datum: + out_dict['targets'] = datum['targets'] + + + return out_dict + + def collate_fn(self, batch): + batch_entry = {} + + B = len(batch) + + + + if 'target_ids' in batch[0]: + T_W_L = max(entry['target_length'] for entry in batch) + target_ids = torch.ones(B, T_W_L, dtype=torch.long) * self.tokenizer.pad_token_id + + + targets = [] + img_ids = [] + img_paths = [] + input_text = [] + images = [] + sents = [] + + for i, entry in enumerate(batch): + + + images.append(entry['image']) + img_ids.append(entry['img_id']) + + if 'target_ids' in entry: + target_ids[i, :entry['target_length']] = entry['target_ids'] + + + + if 'targets' in entry: + targets.append(entry['targets']) + if 'sent' in entry: + sents.append(entry['sent']) + + # if self.args.use_vision: + batch_entry['images'] = torch.stack(images) + batch_entry['img_id'] = img_ids + batch_entry['img_paths'] = img_paths + if 'sent' in entry: + batch_entry['sent'] = sents + + + + batch_entry['targets'] = targets + + batch_entry['task'] = 'caption' + + return batch_entry + + +def pre_caption(caption,max_words): + caption = re.sub( + r"([,.'!?\"()*#:;~])", + '', + caption.lower(), + ).replace('-', ' ').replace('/', ' ').replace('', 'person') + + caption = re.sub( + r"\s{2,}", + ' ', + caption, + ) + caption = caption.rstrip('\n') + caption = caption.strip(' ') + + #truncate caption + caption_words = caption.split(' ') + if len(caption_words)>max_words: + caption = ' '.join(caption_words[:max_words]) + + return caption + + + +def get_loader(args, split='train', mode='train', + batch_size=32, workers=4, distributed=False, gpu=0, + topk=-1, data_dir='/data/mshukor/data', local_rank=None, world_size=None, verbose=False, + config_dir=None, black_image=False): + + + + dataset = MSRVTTCaptionFineTuneDataset( + split, + rank=gpu, + topk=topk, + verbose=verbose, + args=args, + mode=mode, data_dir=data_dir, black_image=black_image) + + + if distributed and mode == 'train': + train_sampler = DistributedSampler(dataset, num_replicas=world_size, rank=local_rank) + else: + train_sampler = None + if mode == 'train': + loader = DataLoader( + dataset, batch_size=batch_size, shuffle=(train_sampler is None), + num_workers=workers, pin_memory=True, sampler=train_sampler, + collate_fn=dataset.collate_fn) + else: + loader = DataLoader( + dataset, + batch_size=batch_size, shuffle=False, + num_workers=workers, pin_memory=True, + sampler=None, + collate_fn=dataset.collate_fn, + drop_last=False) + + if verbose: + loader.evaluator = COCOCaptionEvaluator() + + loader.task = 'caption' + + return loader + + + +class COCOCaptionEvaluator: + def __init__(self): + import language_evaluation + self.evaluator = language_evaluation.CocoEvaluator(verbose=False) + + + def evaluate(self, predicts, answers): + + results = self.evaluator.run_evaluation(predicts, answers) + + return results \ No newline at end of file diff --git a/dataset/video_utils.py b/dataset/video_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..a38945ba18880b2afc9337d7226f51528d8d622e --- /dev/null +++ b/dataset/video_utils.py @@ -0,0 +1,124 @@ +""" +Modified from https://github.com/m-bain/frozen-in-time/blob/22a91d78405ec6032fdf521ae1ff5573358e632f/base/base_dataset.py +""" +import random +import av +import decord +import torch +import numpy as np +import math +# decord.bridge.set_bridge("torch") + +from decord.bridge import to_torch + +def pts_to_secs(pts: int, time_base: float, start_pts: int) -> float: + """ + Converts a present time with the given time base and start_pts offset to seconds. + + Returns: + time_in_seconds (float): The corresponding time in seconds. + + https://github.com/facebookresearch/pytorchvideo/blob/main/pytorchvideo/data/utils.py#L54-L64 + """ + if pts == math.inf: + return math.inf + + return int(pts - start_pts) * time_base + + +def get_pyav_video_duration(video_reader): + video_stream = video_reader.streams.video[0] + video_duration = pts_to_secs( + video_stream.duration, + video_stream.time_base, + video_stream.start_time + ) + return float(video_duration) + + +def get_frame_indices_by_fps(): + pass + + +def get_frame_indices(num_frames, vlen, sample='rand', fix_start=None, input_fps=1, max_num_frames=-1): + if sample in ["rand", "middle"]: + acc_samples = min(num_frames, vlen) + # split the video into `acc_samples` intervals, and sample from each interval. + intervals = np.linspace(start=0, stop=vlen, num=acc_samples + 1).astype(int) + ranges = [] + for idx, interv in enumerate(intervals[:-1]): + ranges.append((interv, intervals[idx + 1] - 1)) + if sample == 'rand': + try: + frame_indices = [random.choice(range(x[0], x[1])) for x in ranges] + except: + frame_indices = np.random.permutation(vlen)[:acc_samples] + frame_indices.sort() + frame_indices = list(frame_indices) + elif fix_start is not None: + frame_indices = [x[0] + fix_start for x in ranges] + elif sample == 'middle': + frame_indices = [(x[0] + x[1]) // 2 for x in ranges] + else: + raise NotImplementedError + + if len(frame_indices) < num_frames: # padded with last frame + padded_frame_indices = [frame_indices[-1]] * num_frames + padded_frame_indices[:len(frame_indices)] = frame_indices + frame_indices = padded_frame_indices + elif "fps" in sample: # fps0.5, sequentially sample frames at 0.5 fps + output_fps = float(sample[3:]) + duration = float(vlen) / input_fps + delta = 1 / output_fps # gap between frames, this is also the clip length each frame represents + frame_seconds = np.arange(0 + delta / 2, duration + delta / 2, delta) + frame_indices = np.around(frame_seconds * input_fps).astype(int) + frame_indices = [e for e in frame_indices if e < vlen] + if max_num_frames > 0 and len(frame_indices) > max_num_frames: + frame_indices = frame_indices[:max_num_frames] + # frame_indices = np.linspace(0 + delta / 2, duration + delta / 2, endpoint=False, num=max_num_frames) + else: + raise ValueError + return frame_indices + + +def read_frames_av(video_path, num_frames, sample='rand', fix_start=None, max_num_frames=-1): + reader = av.open(video_path) + frames = [torch.from_numpy(f.to_rgb().to_ndarray()) for f in reader.decode(video=0)] + vlen = len(frames) + duration = get_pyav_video_duration(reader) + fps = vlen / float(duration) + frame_indices = get_frame_indices( + num_frames, vlen, sample=sample, fix_start=fix_start, + input_fps=fps, max_num_frames=max_num_frames + ) + frames = torch.stack([frames[idx] for idx in frame_indices]) # (T, H, W, C), torch.uint8 + frames = frames.permute(0, 3, 1, 2) # (T, C, H, W), torch.uint8 + return frames, frame_indices, duration + +# decord.bridge.set_bridge("torch") +def read_frames_decord(video_path, num_frames, sample='rand', fix_start=None, max_num_frames=-1): + video_reader = decord.VideoReader(video_path, num_threads=1) + vlen = len(video_reader) + fps = video_reader.get_avg_fps() + duration = vlen / float(fps) + frame_indices = get_frame_indices( + num_frames, vlen, sample=sample, fix_start=fix_start, + input_fps=fps, max_num_frames=max_num_frames + ) + frames = video_reader.get_batch(frame_indices) # (T, H, W, C), torch.uint8 + frames = to_torch(frames) + # try: + # print(type(frames)) + # frames = frames.asnumpy() + # frames = torch.from_numpy(frames) + # except: + # print("expt", type(frames)) + # pass + frames = frames.permute(0, 3, 1, 2) # (T, C, H, W), torch.uint8 + return frames, frame_indices, duration + + +VIDEO_READER_FUNCS = { + 'av': read_frames_av, + 'decord': read_frames_decord +} diff --git a/dataset/video_vqa.py b/dataset/video_vqa.py new file mode 100644 index 0000000000000000000000000000000000000000..7a11d1f9a5d4e8bdb5e4f36e68462c11cc50044f --- /dev/null +++ b/dataset/video_vqa.py @@ -0,0 +1,474 @@ +## from VL-Adapter + +from torch.utils.data import DataLoader, Dataset +from pathlib import Path +import json +import random +import torch +import numpy as np + +from torchvision import transforms + + +from torch.utils.data.distributed import DistributedSampler + + +from PIL import Image +import re + +from dataset.video_utils import VIDEO_READER_FUNCS + + +project_dir = Path(__file__).resolve().parent.parent # VLT5 +workspace_dir = project_dir.parent + + + +class MSRVTTVQAFineTuneDataset(Dataset): + def __init__(self, split='train,valid', raw_dataset=None, rank=-1, topk=-1, verbose=True, args=None, mode='train', data_dir=None): + super().__init__() + + self.raw_dataset = raw_dataset + self.topk = topk + self.verbose = verbose + self.args = args + + self.mode = mode + + data_dir = Path(data_dir) + dataset_dir = data_dir.joinpath('annotation') + coco_img_dir = data_dir.joinpath('videos/all') + + # video + self.num_frames = args.num_frames # 4 + self.video_reader = VIDEO_READER_FUNCS['decord'] + self.as_images = args.as_images # True + self.num_tries = args.num_tries # 2 + self.sample_type = args.sample_type # 'rand' + + + normalize = transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711)) + + + type_transform = transforms.Lambda(lambda x: x.float().div(255.0)) + + + + self.train_transform = transforms.Compose([ + transforms.RandomResizedCrop(args.image_size,scale=(0.5, 1.0), interpolation=Image.BICUBIC), + transforms.RandomHorizontalFlip(), + transforms.RandAugment(), + type_transform, + normalize, + ]) + self.test_transform = transforms.Compose([ + transforms.Resize((args.image_size,args.image_size),interpolation=Image.BICUBIC), + type_transform, + normalize, + ]) + + + + # Loading datasets to data + self.sources = split.split(',') + if self.verbose: + print('Data sources: ', self.sources) + + + + data_info_path = dataset_dir.joinpath(split+'.json') + with open(data_info_path) as f: + karpathy_data = json.load(f) + + + + data = karpathy_data + + + + if isinstance(self.topk, float) and (0 < self.topk <= 1): + used_samples = int(self.topk * len(data)) + data = random.sample(data, used_samples) + if self.verbose: + print(f"Use only {len(data)} data") + + elif self.topk > 0: + data = data[:int(self.topk)] + if self.verbose: + print(f"Use only {len(data)} data") + + self.data = data + + if self.verbose: + print("# all sentences:", len(self.data)) + + + self.image_size = self.args.image_size + + if mode == "train" and self.args.use_data_augmentation: + self.transform = self.train_transform + else: + self.transform = self.test_transform + + self.source_to_h5 = {} + self.source_to_h5.update({ + 'all': coco_img_dir, + }) + + + def __len__(self): + return len(self.data) + + def __getitem__(self, idx): + + out_dict = {} + out_dict['args'] = self.args + + datum = self.data[idx] + + + ###### Image ###### + + for i in range(self.num_tries): + + try: + datum = self.data[idx] + + ###### Image ###### + video = datum['video'] + out_dict['img_id'] = video.split('.')[0] + + path = str(self.source_to_h5['all'].joinpath(f"{video}")) + + + max_num_frames = self.max_num_frames if hasattr(self, "max_num_frames") else -1 + frames, frame_indices, video_duration = self.video_reader( + path, self.num_frames, self.sample_type, max_num_frames=max_num_frames + ) + + except Exception as e: + print(i, path) + idx = random.randint(0, len(self) - 1) + print( + f"Caught exception {e} when loading video {path}, " + f"randomly sample a new video as replacement" + ) + continue + + + out_dict["image"] = self.transform(frames) + if not self.as_images: + out_dict["image"] = out_dict["image"].permute(1, 0, 2, 3) # -> CTHW + + ###### Text ##### + + + if 'sent' in datum: + sent = datum['sent'] + elif 'question' in datum: + sent = datum['question'] + + question_id = datum['question_id'] + out_dict['question_id'] = question_id + + out_dict['sent'] = sent + + + if 'label' in datum: + label = datum['label'] + out_dict['label'] = label + + # https://github.com/airsplay/lxmert/blob/master/src/pretrain/lxmert_pretrain.py#L191 + answers = [] + scores = [] + for a, s in label.items(): + answers.append(a) + scores.append(s) + + score_sum = sum(scores) + + if score_sum == 0: + answer = '' + score = 0. + else: + prob = [score / score_sum for score in scores] + choice = np.random.multinomial(1, prob).argmax() + answer = answers[choice] + score = scores[choice] + assert len(answer) > 0, (sent, label, choice, answer) + + out_dict['answer'] = answer + out_dict['score'] = score + out_dict['all_answers'] = answers + + + + return out_dict + + + def collate_fn(self, batch): + batch_entry = {} + + args = batch[0]['args'] + + B = len(batch) + + + + sentences = [] + question_ids = [] + answers = [] + all_answers = [] + labels = [] + scores = [] + + images = [] + + for i, entry in enumerate(batch): + + images.append(entry["image"]) + + + sentences.append(entry['sent']) + question_ids.append(entry['question_id']) + if 'answer' in entry: + answers.append(entry['answer']) + if 'all_answers' in entry: + all_answers.append(entry['all_answers']) + + if 'score' in entry: + scores.append(entry['score']) + + if 'label' in entry: + labels.append(entry['label']) + + + batch_entry['images'] = torch.stack(images) + + + batch_entry['sent'] = sentences + batch_entry['question_ids'] = question_ids + batch_entry['answers'] = answers + batch_entry['all_answers'] = all_answers + + batch_entry['scores'] = torch.FloatTensor(scores) + batch_entry['labels'] = labels + + batch_entry['task'] = 'gqa' + + return batch_entry + + +def get_loader(args, split='train', mode='train', + batch_size=32, workers=4, distributed=False, gpu=0, + topk=-1, verbose=None, data_dir='/data/mshukor/data', local_rank=None, world_size=None): + + + + _dset = MSRVTTVQADataset(split, verbose, data_dir=data_dir) + + dataset = MSRVTTVQAFineTuneDataset( + split, + raw_dataset=_dset, + rank=gpu, + topk=topk, + verbose=verbose, + args=args, + mode=mode, data_dir=data_dir) + + if distributed: + sampler = DistributedSampler(dataset, num_replicas=world_size, rank=local_rank) + else: + sampler = None + + + if mode == 'train': + loader = DataLoader( + dataset, batch_size=batch_size, shuffle=(sampler is None), + num_workers=workers, pin_memory=True, sampler=sampler, + collate_fn=dataset.collate_fn) + else: + loader = DataLoader( + dataset, + batch_size=batch_size, + num_workers=workers, pin_memory=True, + sampler=sampler, + shuffle=None if (sampler is not None) else False, + collate_fn=dataset.collate_fn, + drop_last=False) + + loader.evaluator = MSRVTTVQAQAEvaluator(_dset) + loader.task = 'msrvttvqa' + + return loader + + +class MSRVTTVQADataset: + """ + A GQA data example in json file: + { + "video": "2375429.mp4", + "label": { + "pipe": 1.0 + }, + "question_id": "07333408", + "sent": "What is on the white wall?" + } + """ + + def __init__(self, splits: str, verbose=True, data_dir='/data/mshukor/data'): + self.name = splits + self.splits = splits.split(',') + + data_dir = Path(data_dir) + dataset_dir = data_dir.joinpath('annotation') + + + + + # Loading datasets to data + self.data = [] + for split in self.splits: + self.data.extend(json.load(open(dataset_dir.joinpath("%s.json" % split)))) + if verbose: + print("Load %d data from split(s) %s." % + (len(self.data), self.name)) + + # List to dict (for evaluation and others) + self.id2datum = { + datum['question_id']: datum + for datum in self.data + } + + + + def __len__(self): + return len(self.data) + + +class MSRVTTVQAQAEvaluator: + def __init__(self, dataset: MSRVTTVQADataset): + self.dataset = dataset + + """https://github.com/GT-Vision-Lab/VQA/blob/master/PythonEvaluationTools/vqaEvaluation/vqaEval.py""" + + self.contractions = {"aint": "ain't", "arent": "aren't", "cant": "can't", "couldve": "could've", "couldnt": "couldn't", \ + "couldn'tve": "couldn't've", "couldnt've": "couldn't've", "didnt": "didn't", "doesnt": "doesn't", "dont": "don't", "hadnt": "hadn't", \ + "hadnt've": "hadn't've", "hadn'tve": "hadn't've", "hasnt": "hasn't", "havent": "haven't", "hed": "he'd", "hed've": "he'd've", \ + "he'dve": "he'd've", "hes": "he's", "howd": "how'd", "howll": "how'll", "hows": "how's", "Id've": "I'd've", "I'dve": "I'd've", \ + "Im": "I'm", "Ive": "I've", "isnt": "isn't", "itd": "it'd", "itd've": "it'd've", "it'dve": "it'd've", "itll": "it'll", "let's": "let's", \ + "maam": "ma'am", "mightnt": "mightn't", "mightnt've": "mightn't've", "mightn'tve": "mightn't've", "mightve": "might've", \ + "mustnt": "mustn't", "mustve": "must've", "neednt": "needn't", "notve": "not've", "oclock": "o'clock", "oughtnt": "oughtn't", \ + "ow's'at": "'ow's'at", "'ows'at": "'ow's'at", "'ow'sat": "'ow's'at", "shant": "shan't", "shed've": "she'd've", "she'dve": "she'd've", \ + "she's": "she's", "shouldve": "should've", "shouldnt": "shouldn't", "shouldnt've": "shouldn't've", "shouldn'tve": "shouldn't've", \ + "somebody'd": "somebodyd", "somebodyd've": "somebody'd've", "somebody'dve": "somebody'd've", "somebodyll": "somebody'll", \ + "somebodys": "somebody's", "someoned": "someone'd", "someoned've": "someone'd've", "someone'dve": "someone'd've", \ + "someonell": "someone'll", "someones": "someone's", "somethingd": "something'd", "somethingd've": "something'd've", \ + "something'dve": "something'd've", "somethingll": "something'll", "thats": "that's", "thered": "there'd", "thered've": "there'd've", \ + "there'dve": "there'd've", "therere": "there're", "theres": "there's", "theyd": "they'd", "theyd've": "they'd've", \ + "they'dve": "they'd've", "theyll": "they'll", "theyre": "they're", "theyve": "they've", "twas": "'twas", "wasnt": "wasn't", \ + "wed've": "we'd've", "we'dve": "we'd've", "weve": "we've", "werent": "weren't", "whatll": "what'll", "whatre": "what're", \ + "whats": "what's", "whatve": "what've", "whens": "when's", "whered": "where'd", "wheres": "where's", "whereve": "where've", \ + "whod": "who'd", "whod've": "who'd've", "who'dve": "who'd've", "wholl": "who'll", "whos": "who's", "whove": "who've", "whyll": "why'll", \ + "whyre": "why're", "whys": "why's", "wont": "won't", "wouldve": "would've", "wouldnt": "wouldn't", "wouldnt've": "wouldn't've", \ + "wouldn'tve": "wouldn't've", "yall": "y'all", "yall'll": "y'all'll", "y'allll": "y'all'll", "yall'd've": "y'all'd've", \ + "y'alld've": "y'all'd've", "y'all'dve": "y'all'd've", "youd": "you'd", "youd've": "you'd've", "you'dve": "you'd've", \ + "youll": "you'll", "youre": "you're", "youve": "you've"} + + self.manualMap = { 'none': '0', + 'zero': '0', + 'one': '1', + 'two': '2', + 'three': '3', + 'four': '4', + 'five': '5', + 'six': '6', + 'seven': '7', + 'eight': '8', + 'nine': '9', + 'ten': '10' + } + + self.articles = ['a', + 'an', + 'the' + ] + + self.periodStrip = re.compile("(?!<=\d)(\.)(?!\d)") + self.commaStrip = re.compile("(\d)(\,)(\d)") + self.punct = [';', r"/", '[', ']', '"', '{', '}', + '(', ')', '=', '+', '\\', '_', '-', + '>', '<', '@', '`', ',', '?', '!'] + + def evaluate(self, quesid2ans: dict, normalize_answer=False): + score = 0. + for quesid, ans in quesid2ans.items(): + datum = self.dataset.id2datum[quesid] + label = datum['label'] + if normalize_answer: + ans = self.normalize_answer(ans) + new_label = {self.normalize_answer(l): label[l] for l in label} + else: + new_label = label + + if ans in new_label: + score += new_label[ans] + return score / len(quesid2ans) + + def normalize_answer(self, resAns): + resAns = resAns.replace('\n', ' ') + resAns = resAns.replace('\t', ' ') + resAns = resAns.strip() + resAns = self.processPunctuation(resAns) + resAns = self.processDigitArticle(resAns) + resAns = resAns.replace(',', '') + return resAns + + def processPunctuation(self, inText): + outText = inText + for p in self.punct: + if (p + ' ' in inText or ' ' + p in inText) or (re.search(self.commaStrip, inText) != None): + outText = outText.replace(p, '') + else: + outText = outText.replace(p, ' ') + outText = self.periodStrip.sub("", + outText, + re.UNICODE) + return outText + + def processDigitArticle(self, inText): + outText = [] + tempText = inText.lower().split() + for word in tempText: + word = self.manualMap.setdefault(word, word) + if word not in self.articles: + outText.append(word) + else: + pass + for wordId, word in enumerate(outText): + if word in self.contractions: + outText[wordId] = self.contractions[word] + outText = ' '.join(outText) + return outText + + def dump_result(self, quesid2ans: dict, path): + """ + Dump the result to a GQA-challenge submittable json file. + GQA json file submission requirement: + results = [result] + result = { + "questionId": str, # Note: it's a actually an int number but the server requires an str. + "prediction": str + } + :param quesid2ans: A dict mapping question id to its predicted answer. + :param path: The file path to save the json file. + :return: + """ + with open(path, 'w') as f: + result = [] + for ques_id, ans in quesid2ans.items(): + result.append({ + 'questionId': ques_id, + 'prediction': ans + }) + json.dump(result, f, indent=4, sort_keys=True) diff --git a/dataset/vqa.py b/dataset/vqa.py new file mode 100644 index 0000000000000000000000000000000000000000..85b168d332dfb283fb60c676f5ac81e0fd40188a --- /dev/null +++ b/dataset/vqa.py @@ -0,0 +1,662 @@ +from torch.utils.data import Dataset +import numpy as np + +import random + +import re + +import torch +from torchvision import transforms + +from PIL import Image +import json + +from dataset.randaugment import RandomAugment + + + +def pre_question(question,max_ques_words): + question = re.sub( + r"([,.'!?\"()*#:;~])", + '', + question.lower(), + ).replace('-', ' ').replace('/', ' ') + question = question.rstrip(' ') + + #truncate question + question_words = question.split(' ') + if len(question_words)>max_ques_words: + question = ' '.join(question_words[:max_ques_words]) + + return question + + + +############################################### +# https://raw.githubusercontent.com/ylsung/VL_adapter/545fcbbdbbaec4c442de35567f6ae477ff4e8265/VL-T5/src/vqa_raw_data.py + + +from torch.utils.data import DataLoader, Dataset +from pathlib import Path +import json +import random +from tqdm import tqdm +import torch +import numpy as np +import re +from PIL import Image + +from torch.utils.data.distributed import DistributedSampler + + +# {'what': 0.0, 'what color are the': 0.0, 'are there': 0.0, 'where is the': 0.0, 'how many': 0.0, 'what type of': 0.0, 'is the': 0.0, 'do': 0.0, 'are the': 0.0, 'none of the above': 0.0, 'are these': 0.0, 'is this a': 0.0, 'is the woman': 0.0, 'what color is the': 0.0, 'was': 0.0, 'what brand': 0.0, 'what is the': 0.0, 'what room is': 0.0, 'does this': 0.0, 'who is': 0.0, 'what are the': 0.0, 'where are the': 0.0, 'can you': 0.0, 'are': 0.0, 'what is the person': 0.0, 'has': 0.0, 'are they': 0.0, 'what is on the': 0.0, 'what is the man': 0.0, 'what kind of': 0.0, 'is the man': 0.0, 'is': 0.0, 'what is': 0.0, 'is that a': 0.0, 'what are': 0.0, 'is this': 0.0, 'what is in the': 0.0, 'how': 0.0, 'is there': 0.0, 'which': 0.0, 'is it': 0.0, 'how many people are': 0.0, 'what color is': 0.0, 'are there any': 0.0, 'what time': 0.0, 'what is the woman': 0.0, 'is there a': 0.0, 'is this person': 0.0, 'does the': 0.0, 'why': 0.0, 'what is the color of the': 0.0, 'what does the': 0.0, 'is this an': 0.0, 'is he': 0.0, 'what animal is': 0.0, 'how many people are in': 0.0, 'what color': 0.0, 'what is the name': 0.0, 'why is the': 0.0, 'do you': 0.0, 'could': 0.0, 'what sport is': 0.0, 'what is this': 0.0, 'is the person': 0.0, 'what number is': 0.0} +class VQAFineTuneDataset(Dataset): + def __init__(self, split='train', raw_dataset=None, rank=-1, topk=-1, verbose=True, args=None, mode='train', + data_dir=None, black_image=False, balanced_data=False, seed=42): + super().__init__() + + dataset_dir = Path(data_dir) + coco_dir = dataset_dir.joinpath('COCO') + vg_dir = dataset_dir.joinpath('VG') + coco_img_dir = coco_dir.joinpath('images/') + coco_feature_dir = coco_dir.joinpath('features') + vqa_dir = dataset_dir.joinpath('vqa') + self.seed = seed + self.raw_dataset = raw_dataset + self.topk = topk + self.verbose = verbose + self.args = args + + self.mode = mode + + self.black_image = black_image + self.balanced_data = balanced_data + normalize = transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711)) + + + self.train_transform = transforms.Compose([ + transforms.RandomResizedCrop(args.image_size,scale=(0.5, 1.0), interpolation=Image.BICUBIC), + transforms.RandomHorizontalFlip(), + RandomAugment(2,7,isPIL=True,augs=['Identity','AutoContrast','Equalize','Brightness','Sharpness', + 'ShearX', 'ShearY', 'TranslateX', 'TranslateY', 'Rotate']), + transforms.ToTensor(), + normalize, + ]) + self.test_transform = transforms.Compose([ + transforms.Resize((args.image_size,args.image_size),interpolation=Image.BICUBIC), + transforms.ToTensor(), + normalize, + ]) + + + # Loading datasets to data + self.sources = split.split(',') + if self.verbose: + print('Data sources: ', self.sources) + + + + self.answer_normalizer = VQAEvaluator() + + self.img_ids_to_source = {} + data_info_dicts = [] + for source in self.sources: + data_info_path = dataset_dir.joinpath(f'vqa/{source}.json') + with open(data_info_path) as f: + _data_info_dicts = json.load(f) + for _d in _data_info_dicts: + if 'vg_qa_full' == source: + self.img_ids_to_source[_d['img_id']] = 'vg' + elif 'train2014' in _d['img_id']: + self.img_ids_to_source[_d['img_id']] = 'train2014' + elif 'val2014' in _d['img_id']: + self.img_ids_to_source[_d['img_id']] = 'val2014' + elif 'test2014' in _d['img_id']: + self.img_ids_to_source[_d['img_id']] = 'test2014' + else: + self.img_ids_to_source[_d['img_id']] = source + _d['source'] = source + + data_info_dicts.extend(_data_info_dicts) + if self.verbose: + print(f"Loaded {len(_data_info_dicts)} data from", source) + + data = data_info_dicts + + + if isinstance(self.topk, float) and (0 < self.topk <= 1): + used_samples = int(self.topk * len(data)) + random.seed(self.seed) + data = random.sample(data, used_samples) + if self.verbose: + print(f"Use only {len(data)} data") + + elif self.topk > 0: + random.seed(self.seed) + random.shuffle(data) + if self.balanced_data and mode == 'train': + print("create balanced data") + qtype_2_data = {} + for d in data: + qtype = d['question_type'] + if qtype not in qtype_2_data: + qtype_2_data[qtype] = [d] + else: + qtype_2_data[qtype].append(d) + qtype_len = self.topk//(len(qtype_2_data.keys())) + print(qtype_len, "sample per question type") + new_data = [] + for k, v in qtype_2_data.items(): + new_data+=v[:qtype_len] + + data = new_data + + data = data[:int(self.topk)] + if self.verbose: + print(f"Use only {len(data)} data", data[:2]) + + self.data = data + + if self.verbose: + print("# all sentences:", len(self.data)) + + + self.image_size = self.args.image_size #eval(self.args.image_size) + + if mode == "train" and self.args.use_data_augmentation: + self.transform = self.train_transform + else: + self.transform = self.test_transform + + self.source_to_h5 = { + 'train2014': coco_img_dir.joinpath(f'train2014'), + 'val2014': coco_img_dir.joinpath(f'val2014'), + 'test2014': coco_img_dir.joinpath(f'test2014'), + } + + def __len__(self): + return len(self.data) + + def __getitem__(self, idx): + + out_dict = {} + out_dict['args'] = self.args + + datum = self.data[idx] + + ###### Image ###### + img_id = datum['img_id'] + out_dict['img_id'] = img_id + + source = self.img_ids_to_source[img_id] + + path = self.source_to_h5[source].joinpath(f"{img_id}.jpg") + + image = Image.open(path).convert('RGB') + + out_dict["image"] = self.transform(image) + + if self.black_image: + out_dict["image"] = torch.zeros_like(out_dict["image"]) + + + ###### Text ##### + if 'sent' in datum: + sent = datum['sent'] + elif 'question' in datum: + sent = datum['question'] + + + question_id = datum['question_id'] + out_dict['question_id'] = question_id + + + out_dict['sent'] = sent + + + if 'is_topk_optimal' in datum: + out_dict['is_topk_optimal'] = datum['is_topk_optimal'] + + if 'label' in datum: + label = datum['label'] + out_dict['label'] = label + + + if self.args.raw_label: + + # 10 raw answers + # ex) 'answers': [{'answer': 'net', 'answer_confidence': 'maybe', 'answer_id': 1}, + # {'answer': 'net', 'answer_confidence': 'yes', 'answer_id': 2}, + # {'answer': 'net', 'answer_confidence': 'yes', 'answer_id': 3}, + # {'answer': 'netting', 'answer_confidence': 'yes', 'answer_id': 4}, + # {'answer': 'net', 'answer_confidence': 'yes', 'answer_id': 5}, + # {'answer': 'net', 'answer_confidence': 'yes', 'answer_id': 6}, + # {'answer': 'mesh', 'answer_confidence': 'maybe', 'answer_id': 7}, + # {'answer': 'net', 'answer_confidence': 'yes', 'answer_id': 8}, + # {'answer': 'net', 'answer_confidence': 'yes', 'answer_id': 9}, + # {'answer': 'net', 'answer_confidence': 'yes', 'answer_id': 10}], + + answers = datum['answers'] + answer = random.choice(answers)['answer'] + + if self.args.answer_normalize: + answer = self.answer_normalizer.normalize_answer(answer) + + score = int(len(answers) > 0) + + out_dict['answer'] = answer + out_dict['score'] = score + out_dict['all_answers'] = [a['answer'] for a in answers] + + + else: + # https://github.com/airsplay/lxmert/blob/master/src/pretrain/lxmert_pretrain.py#L191 + + answers = [] + scores = [] + for a, s in label.items(): + answers.append(a) + scores.append(s) + + score_sum = sum(scores) + + if score_sum == 0: + answer = '' + score = 0. + else: + prob = [score / score_sum for score in scores] + choice = np.random.multinomial(1, prob).argmax() + answer = answers[choice] + score = scores[choice] + assert len(answer) > 0, (sent, label, choice, answer) + + out_dict['answer'] = answer + out_dict['score'] = score + out_dict['all_answers'] = answers + + + return out_dict + + + def collate_fn(self, batch): + batch_entry = {} + + args = batch[0]['args'] + + B = len(batch) + + + sentences = [] + question_ids = [] + answers = [] + all_answers = [] + img_ids = [] + img_paths = [] + labels = [] + scores = [] + is_topk_optimal = [] + images = [] + + for i, entry in enumerate(batch): + + images.append(entry["image"]) + + + + sentences.append(entry['sent']) + question_ids.append(entry['question_id']) + if 'answer' in entry: + answers.append(entry['answer']) + if 'all_answers' in entry: + all_answers.append(entry['all_answers']) + if 'score' in entry: + scores.append(entry['score']) + + if 'label' in entry: + labels.append(entry['label']) + + if 'is_topk_optimal' in entry: + is_topk_optimal.append(entry['is_topk_optimal']) + + + batch_entry['sent'] = sentences + batch_entry['question_ids'] = question_ids + batch_entry['answers'] = answers + batch_entry['all_answers'] = all_answers + batch_entry['scores'] = torch.FloatTensor(scores) + batch_entry['labels'] = labels + + batch_entry['args'] = args + batch_entry['task'] = 'vqa' + batch_entry['images'] = torch.stack(images) + + return batch_entry + + +def get_loader(args, split='karpathy_train', mode='train', + batch_size=32, workers=4, distributed=False, gpu=0, topk=-1, + verbose=False, data_dir='/data/mshukor/data', local_rank=None, + world_size=None, black_image=False, balanced_data=False, seed=42): + + + _dset = VQADataset(split, verbose, data_dir=data_dir) + + dataset = VQAFineTuneDataset( + split, + raw_dataset=_dset, + rank=gpu, + topk=topk, + verbose=verbose, + args=args, + mode=mode, data_dir=data_dir, black_image=black_image, + balanced_data=balanced_data, seed=seed) + + if distributed: + sampler = DistributedSampler(dataset, num_replicas=world_size, rank=local_rank) + else: + sampler = None + + if mode == 'train': + loader = DataLoader( + dataset, batch_size=batch_size, shuffle=(sampler is None), + num_workers=workers, pin_memory=True, sampler=sampler, + collate_fn=dataset.collate_fn) + else: + loader = DataLoader( + dataset, + batch_size=batch_size, + num_workers=workers, pin_memory=True, + sampler=sampler, + shuffle=None if (sampler is not None) else False, + collate_fn=dataset.collate_fn, + drop_last=False) + + if verbose: + loader.evaluator = VQAEvaluator(_dset) + + loader.task = 'vqa' + + return loader + + +class VQADataset: + """ + A VQA data example in json file: + { + "answer_type": "other", + "img_id": "COCO_train2014_000000458752", + "label": { + "net": 1 + }, + "question_id": 458752000, + "question_type": "what is this", + "sent": "What is this photo taken looking through?" + } + """ + + def __init__(self, splits: str, verbose=True, data_dir='/data/mshukor/data'): + self.name = splits + self.splits = splits.split(',') + + dataset_dir = Path(data_dir) + vqa_dir = dataset_dir.joinpath('vqa') + + with open(dataset_dir.joinpath(f'vqa/v2_mscoco_train2014_annotations.json')) as f: + train2014_data = json.load(f) + with open(dataset_dir.joinpath(f'vqa/v2_mscoco_val2014_annotations.json')) as f: + val2014_data = json.load(f) + train2014_id2datum = {} + for datum in train2014_data['annotations']: + qid = datum['question_id'] + train2014_id2datum[qid] = datum + val2014_id2datum = {} + for datum in val2014_data['annotations']: + qid = datum['question_id'] + val2014_id2datum[qid] = datum + self.id2datum_gt = {**train2014_id2datum, **val2014_id2datum} + + # Loading datasets + self.data = [] + for split in self.splits: + self.data.extend( + json.load(open(vqa_dir.joinpath("%s.json" % split)))) + + if verbose: + print("Load %d data from split(s) %s." % + (len(self.data), self.name)) + + # Convert list to dict (for evaluation) + self.id2datum = { + datum['question_id']: datum + for datum in self.data + } + + # Topk Answers + self.ans2label = json.load( + open(vqa_dir.joinpath("trainval_ans2label.json"))) + self.label2ans = json.load( + open(vqa_dir.joinpath("trainval_label2ans.json"))) + assert len(self.ans2label) == len(self.label2ans) + + if verbose: + print('# Answers:', len(self.ans2label)) + + @property + def num_answers(self): + return len(self.ans2label) + + def __len__(self): + return len(self.data) + + +class VQAEvaluator: + def __init__(self, dataset: VQADataset = None): + self.dataset = dataset + + """https://github.com/GT-Vision-Lab/VQA/blob/master/PythonEvaluationTools/vqaEvaluation/vqaEval.py""" + + self.contractions = {"aint": "ain't", "arent": "aren't", "cant": "can't", "couldve": "could've", "couldnt": "couldn't", \ + "couldn'tve": "couldn't've", "couldnt've": "couldn't've", "didnt": "didn't", "doesnt": "doesn't", "dont": "don't", "hadnt": "hadn't", \ + "hadnt've": "hadn't've", "hadn'tve": "hadn't've", "hasnt": "hasn't", "havent": "haven't", "hed": "he'd", "hed've": "he'd've", \ + "he'dve": "he'd've", "hes": "he's", "howd": "how'd", "howll": "how'll", "hows": "how's", "Id've": "I'd've", "I'dve": "I'd've", \ + "Im": "I'm", "Ive": "I've", "isnt": "isn't", "itd": "it'd", "itd've": "it'd've", "it'dve": "it'd've", "itll": "it'll", "let's": "let's", \ + "maam": "ma'am", "mightnt": "mightn't", "mightnt've": "mightn't've", "mightn'tve": "mightn't've", "mightve": "might've", \ + "mustnt": "mustn't", "mustve": "must've", "neednt": "needn't", "notve": "not've", "oclock": "o'clock", "oughtnt": "oughtn't", \ + "ow's'at": "'ow's'at", "'ows'at": "'ow's'at", "'ow'sat": "'ow's'at", "shant": "shan't", "shed've": "she'd've", "she'dve": "she'd've", \ + "she's": "she's", "shouldve": "should've", "shouldnt": "shouldn't", "shouldnt've": "shouldn't've", "shouldn'tve": "shouldn't've", \ + "somebody'd": "somebodyd", "somebodyd've": "somebody'd've", "somebody'dve": "somebody'd've", "somebodyll": "somebody'll", \ + "somebodys": "somebody's", "someoned": "someone'd", "someoned've": "someone'd've", "someone'dve": "someone'd've", \ + "someonell": "someone'll", "someones": "someone's", "somethingd": "something'd", "somethingd've": "something'd've", \ + "something'dve": "something'd've", "somethingll": "something'll", "thats": "that's", "thered": "there'd", "thered've": "there'd've", \ + "there'dve": "there'd've", "therere": "there're", "theres": "there's", "theyd": "they'd", "theyd've": "they'd've", \ + "they'dve": "they'd've", "theyll": "they'll", "theyre": "they're", "theyve": "they've", "twas": "'twas", "wasnt": "wasn't", \ + "wed've": "we'd've", "we'dve": "we'd've", "weve": "we've", "werent": "weren't", "whatll": "what'll", "whatre": "what're", \ + "whats": "what's", "whatve": "what've", "whens": "when's", "whered": "where'd", "wheres": "where's", "whereve": "where've", \ + "whod": "who'd", "whod've": "who'd've", "who'dve": "who'd've", "wholl": "who'll", "whos": "who's", "whove": "who've", "whyll": "why'll", \ + "whyre": "why're", "whys": "why's", "wont": "won't", "wouldve": "would've", "wouldnt": "wouldn't", "wouldnt've": "wouldn't've", \ + "wouldn'tve": "wouldn't've", "yall": "y'all", "yall'll": "y'all'll", "y'allll": "y'all'll", "yall'd've": "y'all'd've", \ + "y'alld've": "y'all'd've", "y'all'dve": "y'all'd've", "youd": "you'd", "youd've": "you'd've", "you'dve": "you'd've", \ + "youll": "you'll", "youre": "you're", "youve": "you've"} + + self.manualMap = { 'none': '0', + 'zero': '0', + 'one': '1', + 'two': '2', + 'three': '3', + 'four': '4', + 'five': '5', + 'six': '6', + 'seven': '7', + 'eight': '8', + 'nine': '9', + 'ten': '10' + } + + self.articles = ['a', + 'an', + 'the' + ] + + self.periodStrip = re.compile("(?!<=\d)(\.)(?!\d)") + self.commaStrip = re.compile("(\d)(\,)(\d)") + self.punct = [';', r"/", '[', ']', '"', '{', '}', + '(', ')', '=', '+', '\\', '_', '-', + '>', '<', '@', '`', ',', '?', '!'] + + self.n = 2 + + def evaluate(self, quesid2ans: dict, normalize_answer=False): + score = 0. + for quesid, ans in quesid2ans.items(): + datum = self.dataset.id2datum[quesid] + label = datum['label'] + if ans in label: + score += label[ans] + return score / len(quesid2ans) + + def dump_result(self, quesid2ans: dict, path): + """ + Dump results to a json file, which could be submitted to the VQA online evaluation. + VQA json file submission requirement: + results = [result] + result = { + "question_id": int, + "answer": str + } + :param quesid2ans: dict of quesid --> ans + :param path: The desired path of saved file. + """ + with open(path, 'w') as f: + result = [] + for ques_id, ans in quesid2ans.items(): + result.append({ + 'question_id': ques_id, + 'answer': ans + }) + json.dump(result, f, indent=4, sort_keys=True) + + def evaluate_raw(self, quesid2ans: dict, is_topk_optimal=None): + """https://github.com/GT-Vision-Lab/VQA/blob/master/PythonEvaluationTools/vqaEvaluation/vqaEval.py""" + + gts = self.dataset.id2datum_gt + + self.accuracy = {} + self.evalQA = {} + self.evalQuesType = {} + self.evalAnsType = {} + + accQA = [] + accQuesType = {} + accAnsType = {} + + # print("Computing accuracy") + + for quesId, resAns in tqdm(quesid2ans.items(), total=len(quesid2ans), ncols=80): + + quesId = int(quesId) + + datum = self.dataset.id2datum[quesId] + + if is_topk_optimal is None: + pass + elif 'is_topk_optimal' in datum: + if datum['is_topk_optimal'] != is_topk_optimal: + continue + + resAns = resAns.replace('\n', ' ') + resAns = resAns.replace('\t', ' ') + resAns = resAns.strip() + resAns = self.processPunctuation(resAns) + resAns = self.processDigitArticle(resAns) + + gtAcc = [] + gtAnswers = [ans['answer'] for ans in gts[quesId]['answers']] + if len(set(gtAnswers)) > 1: + for ansDic in gts[quesId]['answers']: + ansDic['answer'] = self.processPunctuation(ansDic['answer']) + for gtAnsDatum in gts[quesId]['answers']: + otherGTAns = [item for item in gts[quesId]['answers'] if item!=gtAnsDatum] + matchingAns = [item for item in otherGTAns if item['answer']==resAns] + acc = min(1, float(len(matchingAns))/3) + gtAcc.append(acc) + quesType = gts[quesId]['question_type'] + ansType = gts[quesId]['answer_type'] + avgGTAcc = float(sum(gtAcc))/len(gtAcc) + accQA.append(avgGTAcc) + if quesType not in accQuesType: + accQuesType[quesType] = [] + accQuesType[quesType].append(avgGTAcc) + if ansType not in accAnsType: + accAnsType[ansType] = [] + accAnsType[ansType].append(avgGTAcc) + + self.setEvalQA(quesId, avgGTAcc) + self.setEvalQuesType(quesId, quesType, avgGTAcc) + self.setEvalAnsType(quesId, ansType, avgGTAcc) + + + if len(accQA) == 0: + return { + 'overall': 0, + 'perQuestionType': {}, + 'perAnswerType': {} + } + else: + self.setAccuracy(accQA, accQuesType, accAnsType) + + return self.accuracy + + def normalize_answer(self, resAns): + resAns = resAns.replace('\n', ' ') + resAns = resAns.replace('\t', ' ') + resAns = resAns.strip() + resAns = self.processPunctuation(resAns) + resAns = self.processDigitArticle(resAns) + resAns = resAns.replace(',', '') + return resAns + + def processPunctuation(self, inText): + outText = inText + for p in self.punct: + if (p + ' ' in inText or ' ' + p in inText) or (re.search(self.commaStrip, inText) != None): + outText = outText.replace(p, '') + else: + outText = outText.replace(p, ' ') + outText = self.periodStrip.sub("", + outText, + re.UNICODE) + return outText + + def processDigitArticle(self, inText): + outText = [] + tempText = inText.lower().split() + for word in tempText: + word = self.manualMap.setdefault(word, word) + if word not in self.articles: + outText.append(word) + else: + pass + for wordId, word in enumerate(outText): + if word in self.contractions: + outText[wordId] = self.contractions[word] + outText = ' '.join(outText) + return outText + + def setEvalQA(self, quesId, acc): + self.evalQA[quesId] = round(100*acc, self.n) + + def setEvalQuesType(self, quesId, quesType, acc): + if quesType not in self.evalQuesType: + self.evalQuesType[quesType] = {} + self.evalQuesType[quesType][quesId] = round(100*acc, self.n) + + def setEvalAnsType(self, quesId, ansType, acc): + if ansType not in self.evalAnsType: + self.evalAnsType[ansType] = {} + self.evalAnsType[ansType][quesId] = round(100*acc, self.n) + + def setAccuracy(self, accQA, accQuesType, accAnsType): + self.accuracy['overall'] = round(100*float(sum(accQA))/len(accQA), self.n) + self.accuracy['perQuestionType'] = {quesType: round(100*float(sum(accQuesType[quesType]))/len(accQuesType[quesType]), self.n) for quesType in accQuesType} + self.accuracy['perAnswerType'] = {ansType: round(100*float(sum(accAnsType[ansType]))/len(accAnsType[ansType]), self.n) for ansType in accAnsType} + diff --git a/dataset/vqa_eval.py b/dataset/vqa_eval.py new file mode 100644 index 0000000000000000000000000000000000000000..5d69cceadba7cad09330b5a40c12a6d24cf0210a --- /dev/null +++ b/dataset/vqa_eval.py @@ -0,0 +1,691 @@ +# https://github.com/ylsung/VL_adapter/blob/545fcbbdbbaec4c442de35567f6ae477ff4e8265/VL-T5/src/vqa_raw_data.py#L468 + +from torch.utils.data import DataLoader, Dataset, Sampler +from pathlib import Path +from collections import defaultdict +import json +import random +from multiprocessing import Pool +import h5py +import pickle +import math +from tqdm import tqdm +import torch +import numpy as np +from copy import deepcopy +import re +from PIL import Image + + +# from torch.utils.data.distributed import DistributedSampler + +# from transformers import T5TokenizerFast, BartTokenizer +# from tokenization import VLT5TokenizerFast +# from vis_encoder import _transform + +# from torchvision.transforms import ( +# Compose, Resize, CenterCrop, ToTensor, Normalize, RandomCrop, RandomHorizontalFlip, RandomErasing +# ) + + +project_dir = Path(__file__).resolve().parent.parent # VLT5 +workspace_dir = project_dir.parent +# dataset_dir = workspace_dir.joinpath('datasets/').resolve() +# coco_dir = dataset_dir.joinpath('COCO') +# vg_dir = dataset_dir.joinpath('VG') +# coco_img_dir = coco_dir.joinpath('images/') +# coco_feature_dir = coco_dir.joinpath('clip_features') +# vqa_dir = dataset_dir.joinpath('vqa') + + +# def augmentation_transform(image_size): +# return Compose([ +# Resize(image_size, interpolation=Image.BICUBIC), +# RandomHorizontalFlip(), +# RandomCrop(image_size, padding=int(image_size[0]*0.0625), padding_mode='reflect'), +# lambda image: image.convert("RGB"), +# ToTensor(), +# Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711)), +# RandomErasing(), +# ]) + +# class VQAFineTuneDataset(Dataset): +# def __init__(self, split='train', raw_dataset=None, rank=-1, topk=-1, verbose=True, args=None, mode='train'): +# super().__init__() + +# self.raw_dataset = raw_dataset +# self.topk = topk +# self.verbose = verbose +# self.args = args + +# self.mode = mode + +# # Loading datasets to data +# self.sources = split.split(',') +# if self.verbose: +# print('Data sources: ', self.sources) + +# if 't5' in self.args.backbone: +# if self.args.use_vision: +# self.tokenizer = VLT5TokenizerFast.from_pretrained( +# args.backbone, +# max_length=self.args.max_text_length, +# do_lower_case=self.args.do_lower_case) +# else: +# self.tokenizer = T5TokenizerFast.from_pretrained( +# args.backbone, +# max_length=self.args.max_text_length, +# do_lower_case=self.args.do_lower_case) + +# elif 'bart' in self.args.backbone: +# self.tokenizer = BartTokenizer.from_pretrained( +# args.backbone, +# # max_length=self.args.max_text_length, +# do_lower_case=self.args.do_lower_case) + +# if args.use_vis_order_embedding: +# additional_special_tokens = [f'' for i in range(100-1, -1, -1)] + \ +# [f'' for i in range(100-1, -1, -1)] +# special_tokens_dict = {'additional_special_tokens': additional_special_tokens} +# num_added_toks = self.tokenizer.add_special_tokens(special_tokens_dict) + +# self.answer_normalizer = VQAEvaluator() + +# self.img_ids_to_source = {} +# data_info_dicts = [] +# for source in self.sources: +# data_info_path = dataset_dir.joinpath(f'vqa/{source}.json') +# with open(data_info_path) as f: +# _data_info_dicts = json.load(f) +# for _d in _data_info_dicts: +# if 'vg_qa_full' == source: +# self.img_ids_to_source[_d['img_id']] = 'vg' +# elif 'train2014' in _d['img_id']: +# self.img_ids_to_source[_d['img_id']] = 'train2014' +# elif 'val2014' in _d['img_id']: +# self.img_ids_to_source[_d['img_id']] = 'val2014' +# elif 'test2014' in _d['img_id']: +# self.img_ids_to_source[_d['img_id']] = 'test2014' +# else: +# self.img_ids_to_source[_d['img_id']] = source +# _d['source'] = source + +# data_info_dicts.extend(_data_info_dicts) +# if self.verbose: +# print(f"Loaded {len(_data_info_dicts)} data from", source) + +# data = data_info_dicts + +# self.n_gpus = torch.cuda.device_count() + +# self.rank = rank + +# if isinstance(self.topk, float) and (0 < self.topk <= 1): +# used_samples = int(self.topk * len(data)) +# data = random.sample(data, used_samples) +# if self.verbose: +# print(f"Use only {len(data)} data") + +# elif self.topk > 0: +# data = data[:int(self.topk)] +# if self.verbose: +# print(f"Use only {len(data)} data") + +# self.data = data + +# if self.verbose: +# print("# all sentences:", len(self.data)) + +# self.n_boxes = args.n_boxes + +# self.image_size = eval(self.args.image_size) + +# if mode == "train" and self.args.use_data_augmentation: +# self.transform = augmentation_transform(self.image_size) +# else: +# self.transform = _transform(self.image_size) + +# self.source_to_h5 = { +# 'train2014': coco_img_dir.joinpath(f'train2014'), +# 'val2014': coco_img_dir.joinpath(f'val2014'), +# 'test2014': coco_img_dir.joinpath(f'test2014'), +# } + +# def __len__(self): +# return len(self.data) + +# def __getitem__(self, idx): + +# out_dict = {} +# out_dict['args'] = self.args + +# datum = self.data[idx] + +# ###### Image ###### +# img_id = datum['img_id'] +# out_dict['img_id'] = img_id + +# source = self.img_ids_to_source[img_id] + +# path = self.source_to_h5[source].joinpath(f"{img_id}.jpg") + +# image = Image.open(path) + +# out_dict["image"] = self.transform(image) + +# # boxes = torch.zeros(feats.shape[0], 4) # (L, 4) +# # out_dict['boxes'] = boxes + +# ###### Text ##### +# # caption = datum['caption'] +# if 'sent' in datum: +# sent = datum['sent'] +# elif 'question' in datum: +# sent = datum['question'] + +# input_ids = self.tokenizer.encode(f'{self.args.prompt}{sent}{self.args.post_prompt}', max_length=20, truncation=True) + +# question_id = datum['question_id'] +# out_dict['question_id'] = question_id + + +# out_dict['sent'] = sent +# out_dict['input_ids'] = torch.LongTensor(input_ids) +# out_dict['input_length'] = len(input_ids) +# # out_dict['target_ids'] = torch.LongTensor(target_ids) +# # out_dict['target_length'] = len(target_ids) + +# if 'is_topk_optimal' in datum: +# out_dict['is_topk_optimal'] = datum['is_topk_optimal'] + +# if 'label' in datum: +# label = datum['label'] +# out_dict['label'] = label + +# # 3129 topk answers +# if self.args.classifier: +# target = torch.zeros(self.raw_dataset.num_answers) +# for ans, score in label.items(): +# target[self.raw_dataset.ans2label[ans]] = score +# out_dict['target'] = target + +# elif self.args.raw_label: + +# # 10 raw answers +# # ex) 'answers': [{'answer': 'net', 'answer_confidence': 'maybe', 'answer_id': 1}, +# # {'answer': 'net', 'answer_confidence': 'yes', 'answer_id': 2}, +# # {'answer': 'net', 'answer_confidence': 'yes', 'answer_id': 3}, +# # {'answer': 'netting', 'answer_confidence': 'yes', 'answer_id': 4}, +# # {'answer': 'net', 'answer_confidence': 'yes', 'answer_id': 5}, +# # {'answer': 'net', 'answer_confidence': 'yes', 'answer_id': 6}, +# # {'answer': 'mesh', 'answer_confidence': 'maybe', 'answer_id': 7}, +# # {'answer': 'net', 'answer_confidence': 'yes', 'answer_id': 8}, +# # {'answer': 'net', 'answer_confidence': 'yes', 'answer_id': 9}, +# # {'answer': 'net', 'answer_confidence': 'yes', 'answer_id': 10}], + +# answers = datum['answers'] +# answer = random.choice(answers)['answer'] + +# if self.args.answer_normalize: +# answer = self.answer_normalizer.normalize_answer(answer) + +# score = int(len(answers) > 0) + +# out_dict['answer'] = answer +# out_dict['score'] = score +# out_dict['all_answers'] = [a['answer'] for a in answers] + +# target_ids = self.tokenizer.encode(answer, max_length=10, truncation=True) + +# out_dict['target_ids'] = torch.LongTensor(target_ids) +# out_dict['target_length'] = len(target_ids) + +# else: +# # https://github.com/airsplay/lxmert/blob/master/src/pretrain/lxmert_pretrain.py#L191 + +# answers = [] +# scores = [] +# for a, s in label.items(): +# answers.append(a) +# scores.append(s) + +# score_sum = sum(scores) + +# if score_sum == 0: +# answer = '' +# score = 0. +# else: +# prob = [score / score_sum for score in scores] +# choice = np.random.multinomial(1, prob).argmax() +# answer = answers[choice] +# score = scores[choice] +# assert len(answer) > 0, (sent, label, choice, answer) + +# out_dict['answer'] = answer +# out_dict['score'] = score +# out_dict['all_answers'] = answers + + +# target_ids = self.tokenizer.encode(answer, max_length=10, truncation=True) + +# out_dict['target_ids'] = torch.LongTensor(target_ids) +# out_dict['target_length'] = len(target_ids) + +# return out_dict + + +# def collate_fn(self, batch): +# batch_entry = {} + +# args = batch[0]['args'] + +# B = len(batch) + +# S_W_L = max(entry['input_length'] for entry in batch) +# input_ids = torch.ones(B, S_W_L, dtype=torch.long) * self.tokenizer.pad_token_id + +# if 'target' in batch[0]: +# # targets = [] +# targets = torch.zeros(B, len(batch[0]['target']), dtype=torch.float) +# if 'target_ids' in batch[0]: +# T_W_L = max(entry['target_length'] for entry in batch) +# target_ids = torch.ones(B, T_W_L, dtype=torch.long) * self.tokenizer.pad_token_id + +# sentences = [] +# question_ids = [] +# answers = [] +# all_answers = [] +# img_ids = [] +# img_paths = [] +# labels = [] +# scores = [] +# is_topk_optimal = [] +# images = [] + +# for i, entry in enumerate(batch): +# input_ids[i, :entry['input_length']] = entry['input_ids'] + +# images.append(entry["image"]) +# # img_ids.append(entry['img_id']) +# # img_paths.append(entry['img_path']) + +# if 'target_ids' in entry: +# target_ids[i, :entry['target_length']] = entry['target_ids'] + +# if 'target' in entry: +# targets[i] += entry['target'] +# # targets.append(entry['target']) + +# sentences.append(entry['sent']) +# question_ids.append(entry['question_id']) +# if 'answer' in entry: +# answers.append(entry['answer']) +# if 'all_answers' in entry: +# all_answers.append(entry['all_answers']) +# if 'score' in entry: +# scores.append(entry['score']) + +# if 'label' in entry: +# labels.append(entry['label']) + +# if 'is_topk_optimal' in entry: +# is_topk_optimal.append(entry['is_topk_optimal']) + +# batch_entry['input_ids'] = input_ids +# if 'target_ids' in batch[0]: +# word_mask = target_ids != self.tokenizer.pad_token_id +# target_ids[~word_mask] = -100 +# batch_entry['target_ids'] = target_ids +# if 'target' in batch[0]: +# # targets = torch.stack(targets, dim=0) +# batch_entry['targets'] = targets + +# # batch_entry['img_id'] = img_ids +# # batch_entry['img_paths'] = img_paths + +# batch_entry['sent'] = sentences +# batch_entry['question_ids'] = question_ids +# batch_entry['answers'] = answers +# batch_entry['all_answers'] = all_answers +# batch_entry['scores'] = torch.FloatTensor(scores) +# batch_entry['labels'] = labels + +# batch_entry['args'] = args +# batch_entry['task'] = 'vqa' +# batch_entry['images'] = torch.stack(images) + +# return batch_entry + + +# def get_loader(args, split='karpathy_train', mode='train', +# batch_size=32, workers=4, distributed=False, gpu=0, topk=-1): + +# verbose = (gpu == 0) + +# _dset = VQADataset(split, verbose) + +# dataset = VQAFineTuneDataset( +# split, +# raw_dataset=_dset, +# rank=gpu, +# topk=topk, +# verbose=verbose, +# args=args, +# mode=mode) + +# if distributed: +# sampler = DistributedSampler(dataset) +# else: +# sampler = None + +# if mode == 'train': +# loader = DataLoader( +# dataset, batch_size=batch_size, shuffle=(sampler is None), +# num_workers=workers, pin_memory=True, sampler=sampler, +# collate_fn=dataset.collate_fn) +# else: +# loader = DataLoader( +# dataset, +# batch_size=batch_size, +# num_workers=workers, pin_memory=True, +# sampler=sampler, +# shuffle=None if (sampler is not None) else False, +# collate_fn=dataset.collate_fn, +# drop_last=False) + +# if verbose: +# loader.evaluator = VQAEvaluator(_dset) + +# loader.task = 'vqa' + +# return loader + + +class VQADataset: + """ + A VQA data example in json file: + { + "answer_type": "other", + "img_id": "COCO_train2014_000000458752", + "label": { + "net": 1 + }, + "question_id": 458752000, + "question_type": "what is this", + "sent": "What is this photo taken looking through?" + } + """ + + def __init__(self, splits: str, verbose=True, data_dir=None): + self.name = splits + self.splits = splits.split(',') + + dataset_dir = Path(data_dir) + coco_dir = dataset_dir.joinpath('COCO') + vg_dir = dataset_dir.joinpath('VG') + coco_img_dir = coco_dir.joinpath('images/') + coco_feature_dir = coco_dir.joinpath('features') + vqa_dir = dataset_dir.joinpath('vqa') + + + with open(dataset_dir.joinpath(f'vqa/v2_mscoco_train2014_annotations.json')) as f: + train2014_data = json.load(f) + with open(dataset_dir.joinpath(f'vqa/v2_mscoco_val2014_annotations.json')) as f: + val2014_data = json.load(f) + train2014_id2datum = {} + for datum in train2014_data['annotations']: + qid = datum['question_id'] + train2014_id2datum[qid] = datum + val2014_id2datum = {} + for datum in val2014_data['annotations']: + qid = datum['question_id'] + val2014_id2datum[qid] = datum + self.id2datum_gt = {**train2014_id2datum, **val2014_id2datum} + + # Loading datasets + self.data = [] + for split in self.splits: + self.data.extend( + json.load(open(vqa_dir.joinpath("%s.json" % split)))) + + if verbose: + print("Load %d data from split(s) %s." % + (len(self.data), self.name)) + + # Convert list to dict (for evaluation) + self.id2datum = { + datum['question_id']: datum + for datum in self.data + } + + # Topk Answers + self.ans2label = json.load( + open(vqa_dir.joinpath("trainval_ans2label.json"))) + self.label2ans = json.load( + open(vqa_dir.joinpath("trainval_label2ans.json"))) + assert len(self.ans2label) == len(self.label2ans) + + if verbose: + print('# Answers:', len(self.ans2label)) + + @property + def num_answers(self): + return len(self.ans2label) + + def __len__(self): + return len(self.data) + + +class VQAEvaluator: + def __init__(self, dataset: VQADataset = None): + self.dataset = dataset + + """https://github.com/GT-Vision-Lab/VQA/blob/master/PythonEvaluationTools/vqaEvaluation/vqaEval.py""" + + self.contractions = {"aint": "ain't", "arent": "aren't", "cant": "can't", "couldve": "could've", "couldnt": "couldn't", \ + "couldn'tve": "couldn't've", "couldnt've": "couldn't've", "didnt": "didn't", "doesnt": "doesn't", "dont": "don't", "hadnt": "hadn't", \ + "hadnt've": "hadn't've", "hadn'tve": "hadn't've", "hasnt": "hasn't", "havent": "haven't", "hed": "he'd", "hed've": "he'd've", \ + "he'dve": "he'd've", "hes": "he's", "howd": "how'd", "howll": "how'll", "hows": "how's", "Id've": "I'd've", "I'dve": "I'd've", \ + "Im": "I'm", "Ive": "I've", "isnt": "isn't", "itd": "it'd", "itd've": "it'd've", "it'dve": "it'd've", "itll": "it'll", "let's": "let's", \ + "maam": "ma'am", "mightnt": "mightn't", "mightnt've": "mightn't've", "mightn'tve": "mightn't've", "mightve": "might've", \ + "mustnt": "mustn't", "mustve": "must've", "neednt": "needn't", "notve": "not've", "oclock": "o'clock", "oughtnt": "oughtn't", \ + "ow's'at": "'ow's'at", "'ows'at": "'ow's'at", "'ow'sat": "'ow's'at", "shant": "shan't", "shed've": "she'd've", "she'dve": "she'd've", \ + "she's": "she's", "shouldve": "should've", "shouldnt": "shouldn't", "shouldnt've": "shouldn't've", "shouldn'tve": "shouldn't've", \ + "somebody'd": "somebodyd", "somebodyd've": "somebody'd've", "somebody'dve": "somebody'd've", "somebodyll": "somebody'll", \ + "somebodys": "somebody's", "someoned": "someone'd", "someoned've": "someone'd've", "someone'dve": "someone'd've", \ + "someonell": "someone'll", "someones": "someone's", "somethingd": "something'd", "somethingd've": "something'd've", \ + "something'dve": "something'd've", "somethingll": "something'll", "thats": "that's", "thered": "there'd", "thered've": "there'd've", \ + "there'dve": "there'd've", "therere": "there're", "theres": "there's", "theyd": "they'd", "theyd've": "they'd've", \ + "they'dve": "they'd've", "theyll": "they'll", "theyre": "they're", "theyve": "they've", "twas": "'twas", "wasnt": "wasn't", \ + "wed've": "we'd've", "we'dve": "we'd've", "weve": "we've", "werent": "weren't", "whatll": "what'll", "whatre": "what're", \ + "whats": "what's", "whatve": "what've", "whens": "when's", "whered": "where'd", "wheres": "where's", "whereve": "where've", \ + "whod": "who'd", "whod've": "who'd've", "who'dve": "who'd've", "wholl": "who'll", "whos": "who's", "whove": "who've", "whyll": "why'll", \ + "whyre": "why're", "whys": "why's", "wont": "won't", "wouldve": "would've", "wouldnt": "wouldn't", "wouldnt've": "wouldn't've", \ + "wouldn'tve": "wouldn't've", "yall": "y'all", "yall'll": "y'all'll", "y'allll": "y'all'll", "yall'd've": "y'all'd've", \ + "y'alld've": "y'all'd've", "y'all'dve": "y'all'd've", "youd": "you'd", "youd've": "you'd've", "you'dve": "you'd've", \ + "youll": "you'll", "youre": "you're", "youve": "you've"} + + self.manualMap = { 'none': '0', + 'zero': '0', + 'one': '1', + 'two': '2', + 'three': '3', + 'four': '4', + 'five': '5', + 'six': '6', + 'seven': '7', + 'eight': '8', + 'nine': '9', + 'ten': '10' + } + + self.articles = ['a', + 'an', + 'the' + ] + + self.periodStrip = re.compile("(?!<=\d)(\.)(?!\d)") + self.commaStrip = re.compile("(\d)(\,)(\d)") + self.punct = [';', r"/", '[', ']', '"', '{', '}', + '(', ')', '=', '+', '\\', '_', '-', + '>', '<', '@', '`', ',', '?', '!'] + + self.n = 2 + + def evaluate(self, quesid2ans: dict): + score = 0. + for quesid, ans in quesid2ans.items(): + datum = self.dataset.id2datum[quesid] + label = datum['label'] + if ans in label: + score += label[ans] + return score / len(quesid2ans) + + def dump_result(self, quesid2ans: dict, path): + """ + Dump results to a json file, which could be submitted to the VQA online evaluation. + VQA json file submission requirement: + results = [result] + result = { + "question_id": int, + "answer": str + } + :param quesid2ans: dict of quesid --> ans + :param path: The desired path of saved file. + """ + with open(path, 'w') as f: + result = [] + for ques_id, ans in quesid2ans.items(): + result.append({ + 'question_id': ques_id, + 'answer': ans + }) + json.dump(result, f, indent=4, sort_keys=True) + + def evaluate_raw(self, quesid2ans: dict, is_topk_optimal=None): + """https://github.com/GT-Vision-Lab/VQA/blob/master/PythonEvaluationTools/vqaEvaluation/vqaEval.py""" + + gts = self.dataset.id2datum_gt + + self.accuracy = {} + self.evalQA = {} + self.evalQuesType = {} + self.evalAnsType = {} + + accQA = [] + accQuesType = {} + accAnsType = {} + + # print("Computing accuracy") + + for quesId, resAns in tqdm(quesid2ans.items(), total=len(quesid2ans), ncols=80): + + quesId = int(quesId) + + # datum = self.dataset.id2datum[quesId] + + # if is_topk_optimal is None: + # pass + # elif 'is_topk_optimal' in datum: + # if datum['is_topk_optimal'] != is_topk_optimal: + # continue + + resAns = resAns.replace('\n', ' ') + resAns = resAns.replace('\t', ' ') + resAns = resAns.strip() + resAns = self.processPunctuation(resAns) + resAns = self.processDigitArticle(resAns) + + gtAcc = [] + gtAnswers = [ans['answer'] for ans in gts[quesId]['answers']] + + + if len(set(gtAnswers)) > 1: + for ansDic in gts[quesId]['answers']: + ansDic['answer'] = self.processPunctuation(ansDic['answer']) + for gtAnsDatum in gts[quesId]['answers']: + otherGTAns = [item for item in gts[quesId]['answers'] if item!=gtAnsDatum] + matchingAns = [item for item in otherGTAns if item['answer']==resAns] + acc = min(1, float(len(matchingAns))/3) + gtAcc.append(acc) + quesType = gts[quesId]['question_type'] + ansType = gts[quesId]['answer_type'] + avgGTAcc = float(sum(gtAcc))/len(gtAcc) + accQA.append(avgGTAcc) + if quesType not in accQuesType: + accQuesType[quesType] = [] + accQuesType[quesType].append(avgGTAcc) + if ansType not in accAnsType: + accAnsType[ansType] = [] + accAnsType[ansType].append(avgGTAcc) + + self.setEvalQA(quesId, avgGTAcc) + self.setEvalQuesType(quesId, quesType, avgGTAcc) + self.setEvalAnsType(quesId, ansType, avgGTAcc) + + + if len(accQA) == 0: + return { + 'overall': 0, + 'perQuestionType': {}, + 'perAnswerType': {} + } + else: + self.setAccuracy(accQA, accQuesType, accAnsType) + + return self.accuracy + + def normalize_answer(self, resAns): + resAns = resAns.replace('\n', ' ') + resAns = resAns.replace('\t', ' ') + resAns = resAns.strip() + resAns = self.processPunctuation(resAns) + resAns = self.processDigitArticle(resAns) + resAns = resAns.replace(',', '') + return resAns + + def processPunctuation(self, inText): + outText = inText + for p in self.punct: + if (p + ' ' in inText or ' ' + p in inText) or (re.search(self.commaStrip, inText) != None): + outText = outText.replace(p, '') + else: + outText = outText.replace(p, ' ') + outText = self.periodStrip.sub("", + outText, + re.UNICODE) + return outText + + def processDigitArticle(self, inText): + outText = [] + tempText = inText.lower().split() + for word in tempText: + word = self.manualMap.setdefault(word, word) + if word not in self.articles: + outText.append(word) + else: + pass + for wordId, word in enumerate(outText): + if word in self.contractions: + outText[wordId] = self.contractions[word] + outText = ' '.join(outText) + return outText + + def setEvalQA(self, quesId, acc): + self.evalQA[quesId] = round(100*acc, self.n) + + def setEvalQuesType(self, quesId, quesType, acc): + if quesType not in self.evalQuesType: + self.evalQuesType[quesType] = {} + self.evalQuesType[quesType][quesId] = round(100*acc, self.n) + + def setEvalAnsType(self, quesId, ansType, acc): + if ansType not in self.evalAnsType: + self.evalAnsType[ansType] = {} + self.evalAnsType[ansType][quesId] = round(100*acc, self.n) + + def setAccuracy(self, accQA, accQuesType, accAnsType): + self.accuracy['overall'] = round(100*float(sum(accQA))/len(accQA), self.n) + self.accuracy['perQuestionType'] = {quesType: round(100*float(sum(accQuesType[quesType]))/len(accQuesType[quesType]), self.n) for quesType in accQuesType} + self.accuracy['perAnswerType'] = {ansType: round(100*float(sum(accAnsType[ansType]))/len(accAnsType[ansType]), self.n) for ansType in accAnsType} + diff --git a/docs/datasets.md b/docs/datasets.md new file mode 100644 index 0000000000000000000000000000000000000000..afea5b7bcff92e83bb1cc6a38fa02ec4b93cc7a9 --- /dev/null +++ b/docs/datasets.md @@ -0,0 +1,72 @@ + +### Data +You can download the data json from [data](https://nuage.isir.upmc.fr/index.php/s/ACRfZgaZTp9boZ8). +These data contains annotation coming from several public datasets and their use is bounded to their corresponding licenses. + +To download the images, videos and audios, please go the website of each dataset. + + +Our `data/` directory is organized as follows: +``` +data/ + vqa/ + karpathy_train.json + karpathy_val.json + karpathy_test.json + + val_standard.json + train_standard.json + + COCO/ + dataset_coco.json + + images/ + train2014 + val2014 + GQA/ + train.json + testdev.json + valid.json + + images/ + n356822.jpg + ... + VG/ + VG_100K/ + + audiocaps/ + annotation/ + audiocaps_caption_train + audiocaps_caption_val + audiocaps_caption_test + audios/ + train/ + --L22BmDI6E.wav + ... + test/ + val/ + + MSRVTT/ + annotation/ + msrvtt_caption_train7k + msrvtt_caption_test + msrvtt_vqa_train + msrvtt_vqa_val + msrvtt_vqa_test + + videos/ + all/ + video1000.mp4 + ... + + MSVD/ + annotation/ + msvd_vqa_train.json + msvd_vqa_val.json + msvd_vqa_test.json + + videos/ + all/ + 00jrXRMlZOY_0_10.avi + ... +``` \ No newline at end of file diff --git a/docs/installation.md b/docs/installation.md new file mode 100644 index 0000000000000000000000000000000000000000..8740281ca7b0d9fdea18e0b8bff04ee49ba72cff --- /dev/null +++ b/docs/installation.md @@ -0,0 +1,33 @@ +## Installation + +Main requirements: +``` +python >= 3.8+ +torch >= 1.12+ +transformers >= 4.24+ +accelerate >= 0.11.0 +``` + +We recommend creating a conda environment for this project: +``` +conda create --name epalm python=3.8 +conda activate epalm +``` +Additional dependencies can be found in `requirements.txt`. + + +To run video tasks, install the dependencies in [TimeSformer](https://github.com/facebookresearch/TimeSformer) (mainly fvcore and simplejson), then install it from `./TimeSformer: +``` +cd TimeSformer +python setup.py build develop +``` + +For caption evaluation (CIDEr, BLUE ...) you need to install the following packages: +``` +conda install -c bioconda perl-xml-libxml +conda install -c conda-forge openjdk + +pip install git+https://github.com/bckim92/language-evaluation.git +python -c "import language_evaluation; language_evaluation.download('coco')" + +``` \ No newline at end of file diff --git a/ePALM.ipynb b/ePALM.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..b3158c8e4a81c8c34d1619338249aa328a74edd1 --- /dev/null +++ b/ePALM.ipynb @@ -0,0 +1,539 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# eP-ALM: Efficient Perceptual Augmentation of Language Models\n", + "\n", + "This notebook contains some scripts to run the inference with eP-ALM on VQA and Captioning. You can similarly implement the inference on video and audio modalities.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Import" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [], + "source": [ + "import re\n", + "\n", + "import torch\n", + "from torch import nn\n", + "from torchvision import transforms\n", + "\n", + "\n", + "from transformers import AutoModelForCausalLM, AutoTokenizer, AutoConfig\n", + "from accelerate import Accelerator\n", + "from models.opt import OPTModel, OPTConfig, OPTForCausalLM\n", + "import models.vit \n", + "\n", + "from PIL import Image\n", + "import json \n", + "import os\n", + "\n", + "from torchvision import transforms\n", + "\n", + "import argparse \n", + "from transformers.tokenization_utils_base import BatchEncoding\n", + "from models.epalm import ePALM\n", + "\n", + "import os\n", + "import numpy as np\n", + "\n", + "from transformers import AutoTokenizer\n", + "\n", + "import ruamel_yaml as yaml" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Utils" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/tmp/ipykernel_1746605/3827487731.py:5: DeprecationWarning: BICUBIC is deprecated and will be removed in Pillow 10 (2023-07-01). Use Resampling.BICUBIC instead.\n", + " transforms.Resize((image_size,image_size),interpolation=Image.BICUBIC),\n", + "/data/mshukor/envs/opt/lib/python3.8/site-packages/torchvision/transforms/transforms.py:329: UserWarning: Argument 'interpolation' of type int is deprecated since 0.13 and will be removed in 0.15. Please use InterpolationMode enum.\n", + " warnings.warn(\n" + ] + } + ], + "source": [ + "image_size = 224\n", + "normalize = transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711))\n", + "\n", + "transform = transforms.Compose([\n", + " transforms.Resize((image_size,image_size),interpolation=Image.BICUBIC),\n", + " transforms.ToTensor(),\n", + " normalize,\n", + " ]) " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Captioning" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Model" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "os.environ['TORCH_HOME'] = '/home/mshukor/.cache/torch'\n", + "\n", + "\n", + "# config = '/home/mshukor/ep-alm/configs/image/ePALM_pt_L_caption.yaml'\n", + "# config = yaml.load(open(config, 'r'), Loader=yaml.Loader)\n", + "\n", + "config = '/home/mshukor/ep-alm/configs/image/ePALM_caption.yaml'\n", + "config = yaml.load(open(config, 'r'), Loader=yaml.Loader)\n", + "\n", + "\n", + "text_model = 'facebook/opt-2.7b' \n", + "vision_model_name = 'vit_base_patch16_224'\n", + "\n", + "# text_model = 'facebook/opt-6.7b' \n", + "# vision_model_name = 'vit_large_patch16_224'\n", + "\n", + "\n", + "start_layer_idx = 19\n", + "end_layer_idx = 31\n", + "low_cpu = True \n", + "model = ePALM(opt_model_name=text_model, \n", + " vision_model_name=vision_model_name, \n", + " use_vis_prefix=True, \n", + " start_layer_idx=start_layer_idx, \n", + " end_layer_idx=end_layer_idx, \n", + " return_hidden_state_vision=True, \n", + " config=config,\n", + " low_cpu=low_cpu\n", + ")\n", + "print(\"done\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "device = torch.device(\"cuda\")\n", + "model.to(device)\n", + "print(device)" + ] + }, + { + "cell_type": "code", + "execution_count": 96, + "metadata": {}, + "outputs": [], + "source": [ + "checkpoint_path = '/data/mshukor/logs/eplam/models/float32/ePALM_caption/checkpoint_best.pth'\n", + "# checkpoint_path = '/data/mshukor/logs/eplam/models/accelerate/ePALM_pt_L_acc_caption/checkpoint_best.pth'\n", + "checkpoint = torch.load(checkpoint_path, map_location='cpu') \n", + "state_dict = checkpoint['model']\n", + "msg = model.load_state_dict(state_dict,strict=False) " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Tokenizer" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "tokenizer = AutoTokenizer.from_pretrained(text_model, use_fast=False)\n", + "eos_token = tokenizer.eos_token\n", + "pad_token = tokenizer.pad_token" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Eval" + ] + }, + { + "cell_type": "code", + "execution_count": 100, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfQAAAF3CAIAAADckC6rAAAKMWlDQ1BJQ0MgUHJvZmlsZQAAeJydlndUU9kWh8+9N71QkhCKlNBraFICSA29SJEuKjEJEErAkAAiNkRUcERRkaYIMijggKNDkbEiioUBUbHrBBlE1HFwFBuWSWStGd+8ee/Nm98f935rn73P3Wfvfda6AJD8gwXCTFgJgAyhWBTh58WIjYtnYAcBDPAAA2wA4HCzs0IW+EYCmQJ82IxsmRP4F726DiD5+yrTP4zBAP+flLlZIjEAUJiM5/L42VwZF8k4PVecJbdPyZi2NE3OMErOIlmCMlaTc/IsW3z2mWUPOfMyhDwZy3PO4mXw5Nwn4405Er6MkWAZF+cI+LkyviZjg3RJhkDGb+SxGXxONgAoktwu5nNTZGwtY5IoMoIt43kA4EjJX/DSL1jMzxPLD8XOzFouEiSniBkmXFOGjZMTi+HPz03ni8XMMA43jSPiMdiZGVkc4XIAZs/8WRR5bRmyIjvYODk4MG0tbb4o1H9d/JuS93aWXoR/7hlEH/jD9ld+mQ0AsKZltdn6h21pFQBd6wFQu/2HzWAvAIqyvnUOfXEeunxeUsTiLGcrq9zcXEsBn2spL+jv+p8Of0NffM9Svt3v5WF485M4knQxQ143bmZ6pkTEyM7icPkM5p+H+B8H/nUeFhH8JL6IL5RFRMumTCBMlrVbyBOIBZlChkD4n5r4D8P+pNm5lona+BHQllgCpSEaQH4eACgqESAJe2Qr0O99C8ZHA/nNi9GZmJ37z4L+fVe4TP7IFiR/jmNHRDK4ElHO7Jr8WgI0IABFQAPqQBvoAxPABLbAEbgAD+ADAkEoiARxYDHgghSQAUQgFxSAtaAYlIKtYCeoBnWgETSDNnAYdIFj4DQ4By6By2AE3AFSMA6egCnwCsxAEISFyBAVUod0IEPIHLKFWJAb5AMFQxFQHJQIJUNCSAIVQOugUqgcqobqoWboW+godBq6AA1Dt6BRaBL6FXoHIzAJpsFasBFsBbNgTzgIjoQXwcnwMjgfLoK3wJVwA3wQ7oRPw5fgEVgKP4GnEYAQETqiizARFsJGQpF4JAkRIauQEqQCaUDakB6kH7mKSJGnyFsUBkVFMVBMlAvKHxWF4qKWoVahNqOqUQdQnag+1FXUKGoK9RFNRmuizdHO6AB0LDoZnYsuRlegm9Ad6LPoEfQ4+hUGg6FjjDGOGH9MHCYVswKzGbMb0445hRnGjGGmsVisOtYc64oNxXKwYmwxtgp7EHsSewU7jn2DI+J0cLY4X1w8TogrxFXgWnAncFdwE7gZvBLeEO+MD8Xz8MvxZfhGfA9+CD+OnyEoE4wJroRIQiphLaGS0EY4S7hLeEEkEvWITsRwooC4hlhJPEQ8TxwlviVRSGYkNimBJCFtIe0nnSLdIr0gk8lGZA9yPFlM3kJuJp8h3ye/UaAqWCoEKPAUVivUKHQqXFF4pohXNFT0VFysmK9YoXhEcUjxqRJeyUiJrcRRWqVUo3RU6YbStDJV2UY5VDlDebNyi/IF5UcULMWI4kPhUYoo+yhnKGNUhKpPZVO51HXURupZ6jgNQzOmBdBSaaW0b2iDtCkVioqdSrRKnkqNynEVKR2hG9ED6On0Mvph+nX6O1UtVU9Vvuom1TbVK6qv1eaoeajx1UrU2tVG1N6pM9R91NPUt6l3qd/TQGmYaYRr5Grs0Tir8XQObY7LHO6ckjmH59zWhDXNNCM0V2ju0xzQnNbS1vLTytKq0jqj9VSbru2hnaq9Q/uE9qQOVcdNR6CzQ+ekzmOGCsOTkc6oZPQxpnQ1df11Jbr1uoO6M3rGelF6hXrtevf0Cfos/ST9Hfq9+lMGOgYhBgUGrQa3DfGGLMMUw12G/YavjYyNYow2GHUZPTJWMw4wzjduNb5rQjZxN1lm0mByzRRjyjJNM91tetkMNrM3SzGrMRsyh80dzAXmu82HLdAWThZCiwaLG0wS05OZw2xljlrSLYMtCy27LJ9ZGVjFW22z6rf6aG1vnW7daH3HhmITaFNo02Pzq62ZLde2xvbaXPJc37mr53bPfW5nbse322N3055qH2K/wb7X/oODo4PIoc1h0tHAMdGx1vEGi8YKY21mnXdCO3k5rXY65vTW2cFZ7HzY+RcXpkuaS4vLo3nG8/jzGueNueq5clzrXaVuDLdEt71uUnddd457g/sDD30PnkeTx4SnqWeq50HPZ17WXiKvDq/XbGf2SvYpb8Tbz7vEe9CH4hPlU+1z31fPN9m31XfKz95vhd8pf7R/kP82/xsBWgHcgOaAqUDHwJWBfUGkoAVB1UEPgs2CRcE9IXBIYMj2kLvzDecL53eFgtCA0O2h98KMw5aFfR+OCQ8Lrwl/GGETURDRv4C6YMmClgWvIr0iyyLvRJlESaJ6oxWjE6Kbo1/HeMeUx0hjrWJXxl6K04gTxHXHY+Oj45vipxf6LNy5cDzBPqE44foi40V5iy4s1licvvj4EsUlnCVHEtGJMYktie85oZwGzvTSgKW1S6e4bO4u7hOeB28Hb5Lvyi/nTyS5JpUnPUp2Td6ePJninlKR8lTAFlQLnqf6p9alvk4LTduf9ik9Jr09A5eRmHFUSBGmCfsytTPzMoezzLOKs6TLnJftXDYlChI1ZUPZi7K7xTTZz9SAxESyXjKa45ZTk/MmNzr3SJ5ynjBvYLnZ8k3LJ/J9879egVrBXdFboFuwtmB0pefK+lXQqqWrelfrry5aPb7Gb82BtYS1aWt/KLQuLC98uS5mXU+RVtGaorH1futbixWKRcU3NrhsqNuI2ijYOLhp7qaqTR9LeCUXS61LK0rfb+ZuvviVzVeVX33akrRlsMyhbM9WzFbh1uvb3LcdKFcuzy8f2x6yvXMHY0fJjpc7l+y8UGFXUbeLsEuyS1oZXNldZVC1tep9dUr1SI1XTXutZu2m2te7ebuv7PHY01anVVda926vYO/Ner/6zgajhop9mH05+x42Rjf2f836urlJo6m06cN+4X7pgYgDfc2Ozc0tmi1lrXCrpHXyYMLBy994f9Pdxmyrb6e3lx4ChySHHn+b+O31w0GHe4+wjrR9Z/hdbQe1o6QT6lzeOdWV0iXtjusePhp4tLfHpafje8vv9x/TPVZzXOV42QnCiaITn07mn5w+lXXq6enk02O9S3rvnIk9c60vvG/wbNDZ8+d8z53p9+w/ed71/LELzheOXmRd7LrkcKlzwH6g4wf7HzoGHQY7hxyHui87Xe4Znjd84or7ldNXva+euxZw7dLI/JHh61HXb95IuCG9ybv56Fb6ree3c27P3FlzF3235J7SvYr7mvcbfjT9sV3qID0+6j068GDBgztj3LEnP2X/9H686CH5YcWEzkTzI9tHxyZ9Jy8/Xvh4/EnWk5mnxT8r/1z7zOTZd794/DIwFTs1/lz0/NOvm1+ov9j/0u5l73TY9P1XGa9mXpe8UX9z4C3rbf+7mHcTM7nvse8rP5h+6PkY9PHup4xPn34D94Tz+6TMXDkAAQAASURBVHicnP15uC3ZUR2Ir4jYOzPPOXd+c82zpCpJpQGBZiQQSAgQGDBgM+PGbmNMY7DbtN3A72eD8Qg2BgzG2GBjG8RohBASQgOaS3NVqUpVqvnVm4c7nSEz946I/mPnue8JrDbq/N736ta9756TZ+feMaxYsYJ++OtuFUhOCiBWAcJmRkTBAguY2d3dDQAREZG7ugIACYPd3YkoCLG6ERNzJnd3ESFzcgSOZiZERG4EY1N3EuaciYQgROTu6gZAApEDADlfeTsqt6EchIiSZSISCYHKrSJSyLUcX6m++lu/p7rtFSkLSwM4qIFHcYI7hNVNyICAv8Dl7ssvZszjRTr9qz/xclzYgtcGIEeTuXClblk75gAX4dj1M6YKALE7Ac7uDnOAnbOZuSvA7MJE7soww7CYRKRuZT2Z2Z3K7w7fAZW7Kguk6syh/MjMBKScYc7ORuyAwYURwR2UDeU1jZHNyJ1BEcGZes3OROZRAnJyM40QCjByIwDq5mQhsmZ3dyFmZjdzo8BMRFmyq6mqiMCcmdlBROpinolcJLqRuoF6kEYfGcHdvaySUfkU2ROUmhCTqQUGjAHO2SkSRN0J4lAzC0HMMsBlexCMmYmIhpdydzeCo2whOXimZhmuMdZWLncJZE5GDCdisBs52MiNlcDixOrupgDEAYeRGzMDbGbuXvakwpGUiEIlqgqgPB0GJe3LuzMHZiaHu7ujTymEAEBVmTmEwESmcOhy7zkzL+/cPABgUsCI2FHenUXLP3M3hYj0OQGIse7NxY1Jmd0JCiKFu/ScKhFTdXd2iBAAEenVxCDEYDZ3BQmIQQlZCMwgByBlEzp5IAOP3CnFjI7G1/H3/ui7FJkhxVb8RU7Z/8vpMyfxdsFtQyvzi+/7w5/+Z09v7wcRJFNVB4xgNtgld3d3EoY5AHY2Qps11hXnnoTNlifahw1M3h88YIMSwM5uxFKOnJgZc4CaMKBGITrXH/rk2cdOzc5PL1x3uP76L72hT0RGRAEeAIAyYIATMSd0UVfbeGibbnzWtSduODGdtWtra94lTCqWUTSrR6uXfG8fZ2+/+ZmT9a3ch+nOTnRf4VUi6mUqE6PQiI6iVYhom51IlXerHnfNO5+ukJoQVJ0mTdawc+o8ckuHOaxsdk9POTjBtBKJzJ40KEZcsQJk7m6mgNNwOWBmEJGyL92JmYXYFM6iqqoqoEDMDnIwByU1qJJlhlH56Cg7khnE7mwQJ3YmZ5CXXcE+bCMfvmBmmLsaO5EBaq5GzoSqTfOY9Oxu/64/fhMuPUQh9t6LG8OBDMoQmAPOAH+++4x53GYdhWu++A1/b+aPMUbEGSEx1WrZYTGMhausXcqLGEYEAxkAcsCVHEzE5KTOcAYV+0jMJOzCAPvyIDCIQWywPrsabFj5YuIPlsKdyuEpRg2AkfFV1+AMnAEMb0dUfj2UdQcy3NwDSwRHFoI5kwtfvexEJAwGkSMIMTnIQMYMFhiZWS7HqdxGuVsjNmLixGxEbpbNDCCmhjFSuA2vXvza4LVERBjlRdgBMDkIAphDyZ3Yyqk3yw41eFk3BwPsTtlNnYgEEojkz9gXU2KOwrUpNAMuzBEeBBQU4mAtUYxnhgUQZ7Os2d2EULk7QSXoEO6oEZGIMMjd6aonBWB4d3MzixICS2Ahh2vxKX5guMvGJiK3YrN0+UyH2GK5sLxclmHdDG5wdQO5WlbNDmVBECJy1U4kgZK7GcgMpg4gBkQETR4QmzBiD2SRvcodSEHEAJbxxmA3lSpFzGBlVoISPDgFBxp14TitQjXtTr74td8Mg3n6fM/X1ddVO1yJ1aSvtCZsP/aOP3ly56Ihtl2X3DJDGUrsEoxFiZV4iIGCFAfPzFEo52zwclKGBYc61AEnNpApzIwAdrirW4YHeLBk7Kx9cmjO2chSWrgmwEEmEgF245wNgLvCO3hHUCIiNOYjqmtjdhbP3rcJ5nWsAoLUTeRYhcCx8sBNiIcma3Ws5pbDqGJms1xVwSxzJT17JU2sJOceFAhBLYVA7pS63EgF8y71HGPZNn3fj0Yj4caUutyzMjywsSFAYlC3pDlIRYSDHXb1xcwiUiwzOQAGGO7mBCo7y2DOIIa4DrEeCTOzhOE8BD8IRdVdiVwIg/sAyvMAu7ti8MxKzuWnxQKSkivIISCWhjQb2UOf/vTD7/+jgIsGdTPzPnmr6AAjAn8+hn04q4AZJESAn/H877nlhXcptpkDseVsak5EJVKr65qZTXW5MgbTwb6bkrmAIrEQlw9eLoOTcIkuXc3VxMFAAJGDHDCH6dXLX/zTsCbk7sp8JXgpx4NpsOi6fILFKBZvIcSAlbiWidicTFXVyYyNiABzMpARe/k3y3jzyoswgxmQkkyQiIiUJIzMLOfsVsEromhO6hmUQD04mV8xW8v1cXYcpCblbzKHMxGJUPEl5R1Zyl25Bzg7SSASL3kGCQm0ZAWDI+eDDKwYZYPnnImofHx371yNAR6cCjvIFKrMoVhrQnb05aag0YstYBZilGCnLAj58jwsnx3gXlwQlzzj4AMCoMjGTgwWYhke3+Aq6CBL9uILiAglyhp2Jg4c+bBiBHcz05K0EJzRE3LZYgZaemtUjEhOUIIWV13+LAO44jCNHWUbBCSiJO5iEGOxIFoHazKEpE+ZuUvjY/HZX/BXQE6ff/BUvMif/47Z3B0caPr4h+77yAcNlYMDV8tIiIWIASFiZzIikhL0XDkFzDAdbPqBUWImIiNzqZyEiAKF4MMjY4YZyo5mImEGYFCDs5AIixA53KAZ7uTGwhOmhigKpCydUmfUThdzz25dDhxHVU1EfU5qqFbGMdaRq+SAiAhMtes6aiKA/d2d8WhEdTDSelzTqCYXz0kqMXhWcnfTubuTRZCoWxyNlRCaqu97ZnYmt9AvrO0XbByIgwkZGQkMni378mEDZTuVKKPsah42YgkSi8Gl4KArcSNAzig7VTOXYEkzsvHyjzvB6OAwl1MhGLaXE4jIqRwKwhCqlP+XkvqVo2V5Aa56pzFlovqDH/zg3gNvH3N2MoMxExO7WdlCB0frL3gRkTOLmVpiVC957Y/39UMVT8wgEfAAiywOUrhYdvPOyZyGqAc4ODPuUBykqweG2Jy8mINhVdW9mJ6yKc1yiUSIaGmqrpit4SrvaAMcMdz5QQB+VdSPkk/AmNk0qSV3VWj5LSouAQfHnACwQ0BkTs5CHFiIaIlqDE+t/AEAEnN3FCOZHcnJiJ0FcHaLniIXj7J0GACMBl9VFqDcJDngTiTL91JclbuAaQgtHcXyEw137jakLAeXuwNGnN2zIzlnsLlnR+9I5rT0B8qOpacxMycSZgaMkMurm7kpCMLMZua6hAXUynodrLO7EzE5DhYKy4hhafrt4JsMIgK5D/b7qg145SMYsIRrABt+3fngwQFQ1YKpMjMyym5wGQIyAjQpwYKQu+acmQlwdQ1VkGUWWKwnMUBuruxBVMiJnMSNvYf38JxlT1SquL536eyzX/U1VVjvkAL/f8Nkrth3HzBfC3D3Gtj+1HvefHq2NwlrZH00ieDgFNzFTAysLm7iVp5IznlweJoLSFiyOrMMzezGcGcndtVUNj4BZuV5gySUzRCEiokDQBJLKBRjLKtkZtnU3aMIcUecAMAD5cotuJGZNSTjUI249pRDCJ12o421teNbOWeJAeJgCqMqh85EobYqTbu9V7GMRiO4lUMtIlBkB4UAcWYOVLubanIKrXu9vpqytimXpCovEgcRESTv2sRRgxg1FKKLpVwTjUNAWly9GzEkhsOpLOcNS1igBG4HQcRBQEHmkeXPwgXMlYTIcmCklo+1fMHLt6ODlxrM3BJEHfYfkxOcYATyLnPo5yZMO9PF+97ydrr8IESz5+Xm0YKbYolOfH6XJxFAu8PXvfoFX/aGvdmnV1c2htuTcuYLAMoxxrIy5d6GexeGcIbnIY42wGhpFq8cfiYSJmElZFCxMoM1+HOHpZh7IjrArw5WmIgEJX5Rgx+sZMmZBlCSWQzkIGGKQlFIGEaeHGBYiUEPrJIt3UNJMgZP70vfO7gl1YMoPoSheFK+c2DUgMF6Dq9J5VG6MZEzloZ+ec5d4T6UL9iHMFiG3VL2f0ndmNlhZiVzuuKlhvDZSz5B7AcbdXm3XpNEIzI1s+xmBBe2wOaduzoCvIZXJRBkURHBVWb0AG8pecbVxp2XG/jgE9FVCUpxOYDB1N0PbvUArgGuzoc+a9MeuHUzI7AbymMFUHIqGEwjIzAHdzXKWMYH5uwQphoUiGtHNERbguklaT54WO6unDW4MRl7Fs1sxmZsxOZmldVpvPjCl387rCQuVz7s53MdGAED4FDAs4swLj/ygY996r6IKvepIjJoCSWWi1BKEH6w2cxQrDwA9UzsyxLIYJTMjB3MLKUC6CbDGWF4cBIiZTEAntWsPNYIiim7O8yMmctRNQwbQVVLaAJcwX6Z0XULV21n0xBYhFbXxtPplNWlilzTeDymCl3smo1J0zTzU5f2z148tnU4J2tzCmMh8poiEQVp+pyNWwnuFkRCl7t6vIpJlLWG6yihykxgEvXeFALkRE7DSXACiFgigpg41eUJLZ/0VYgqkZOA5cpeJHLzTFiih4CaeSmVwTr3zj3Dyh8lT+zKABGGQ3glEldoQaXJrwSQGLxIiXb8ysFgB5dMjAlZEdFplOrxkxc/9c43cb7ccFLNagoYi5XYeXly/qJXVjDXMDgL0L/ky//pyrU27bYDx1A5SN0AJxaVioSrqzf3gVkxMw5CQhAc2F8BCVHkSOYHpSEicmKDm5kRRGQo2S2RarNStMEQVpAsbR8BDB7eXYbH4UMyxFR+VBDwpePhcjwOFkRArLREEuCuRvZnPlF502Il4Twg3wXkGT5deTV2K0EVubsjOTpQ9s++yqcusMCBcyqvhwEKERAxBYKUkLl8UfGAd2dXh4GcYUTkUNCVZ31gT2FMiIQID+7EHAkRJuVOl7tcmAIVjIUDlsXDAjWZGVPFLO7wUr5jKg+uWPyDgz1kIYSCn+CKNf+s60qe6leyuquNkRUgfrD4vnwQ5dgKERF7gYlEAjOXSJ9IiIQQiSuiIZ51dxCxCBGyZRKISPas0BBYNQEwd/MhaaahlgNCEleGkhs5hAJTJFTBV1D79uXTt77s+YfX73Knitg/O2f6i12fnWZBi09JImTnPvonb97bR+NA7nqKHrUnKJEylJHJyxceuNSlASsBH2RIba/ECkyQAUnzrGXBjGDlLARxJneSgc6gFKjUVJdp0+AbytMxcKkdujZkFQDj3iVBkpAyeQfjUIE5hLBo58KEpGOR1dXVlPNkdRLr0KUFjxHqyBDdn2+trVeTSQnmLFAytYVBxIUpsHrraq7sRrEay6iWWmbdjKsYqyaltFgscjb1TEJd22ZNbJSNNXtWBsWYwb0pV5FYQRlLqG8JwbuzSSBmLtDn4GzdCcbkxQwZCOzEbpbLrhUEdjkIpsBkpM7LzNQLCGNOdhCEXp1fF+iVCra6dNSAM5O6kTOSSsWU2RYJUe754D2n73sf6X7NcMADnMxd3f48wPe/uGLJ3DQ6G5mz3PTKr/3hnfxYVUtgd0+xCiKiqjlndRMKDCFn8pIPWjEPBxFcOTlGUHhJ95ypLMJQMTYEDKf3IFkZ4v0BqvLlWukSFRF3d/ABLlOM7LK+RDQAuMPHd/eErJ5VtWD9RMQSparJmQeqkgzgGJMRQhiCYRpu1M1MLfkSQxvWa6jDZ1BHnIaw3shN4BU8DkU5GtL/A4Tal3gUPrtWPDgA/qwCox/wdsicLEMLcl9yvGJbyVWG8ICHPMdRrBiAZbSATF6SQi+ouYONJBNTBAUiIh4qdgC7hXJvHGSw2nRwVwdPqtz5MoFzPUBRSqpT/lztB/mq+GmZyQ3P6yAfKgWK8p+yLQ5wUSLwsE9KckQAOGqh+pDRwGDhgR4CGEgdmXyoTTIUyz15sEPcQGDRmiyyRXIhgEmZM1MmB3G935x56Wv+JjInmcMj0ed9vvBnPZ+7K8hqdBc+8a5HH3iolkkiZ/Gsquqm0GUZ48Ax6BXSR7HIywTCh4D9ah7aUNkeSqumbgaFKZuSZlpiAyIikYmcTBkOgTNESgoYUl7CktwTJ2IDw4nhFbwSrxkgopQ0hCqltLK2mnOuQi1VrckqYSfqPa2Mq5qjJifIZGsjpS6EqgqR68gxVFRBgnOODVg8ELOzmY0mWx7YUiLLmYzrSGYR3Kp5VGeez+cdFiyunHMFrsDojZVqrpBsSGvJllvT3NWR3NXMAGNewjLszAiE8iPmUHJJIg+BYcQO8YGQQNmQzbIqdHkIyyH3AvOVugctzfrw4HkZxzmEWIhBDleQCWpHy15T7yzKHth0L9EH/uh39PzT7NkIyXPvPQEHKcJf/GLuLEFjgldMNbvf9rzvf8azj87m2wCL1O5JrWWKMTSFLVCi8gI8C4orZIIfpNPLlyYSzj7wLga2iBmpsTkFKVH5QXg7GKpl+RTLynP57eIM+KqqsR+UxAuEVaDnZYDsgUmYQZGY1WGkqtnBLITC6ylpxGC/SsZw4M4LcB9ZiAjOhShVDBOxM7M7MQfmQBQAsIDFQAlMBXA8CA/JnG2IkIcw/0pEP5i8goCxQERKcV7VzQziHIkjQdwJZiZCzBxo2JyyrD+DFJRBWYKDsnnvSDFSpRAMBQA1y26JPAkMHTFADI+EWjgyw7zDkrdaXrZkMAWi8aucd1miAUEGrv6+D2VSv4LIlYoLAUwcSulM1YyYJQQQ6QGSTnRg0dwd5jEGBplldyUiEabyYkHds5kSWDgws5Ep+pxIKLoFzQgyEkRLYJPBIPIQavjgJkhFTdyZFK6gzMGoyhzNU+pna7ccv+WmrwABkBKZfb7n639ykQHG6fIH3/Em7dnRLeB9yCuBSZmNxDm4BJfgxIVYkYfkfohgmAxuYDAdBO9DNmwmoEpCchgLOAxVDvJAiGRMQbPDWd2YWS0RO4uZZ3cVISeHcEpJjQ6q9ebsFtzEXQk90aKhipM3Upui7VM1HtWTFYpVa7ZST8g8VDE2MYAmcYREMqpzIFSBDCKRYjACwCllJ+3ygohE6tx3IHOPYFpc3s7TadXUi9STWmq7LCBxc5rP2l7nHDwQoiNAgtQsAWwUtCIJgeuIwOYAkwSAg1Uh14KaUJnLgDVTFeLY3SMLE0WmIG6kCNxTJlYSKDRZyuRZQByYQkWxVBELp81AoAAKAKubUwZlkMEZzqQODyEECNQtK5HWgpqUeuRMEdEyaecwUSRtXE6fu/SBt/w75KeCK5NXYJgrt0tH9Wd24eeMONxrihCA4GQFtd9+6Tf8R4wf5xC5VbesnNUYkqO7VyD23jVXCPAK8IDeFqKREonKFe6EETmMs7EN4A0YIVrgnh3I7sksFxx7iMSRxYJlYq/dIjxkdREy6z2nAQQxBcwI5CyFflvKthSIIhDEmI1rDqQkUmdjZ1HPjgS0KbQae5NspCAZYMogQKikChAoiIiEFZ4ZYiLMhCi0xpi4ibvnXmG1iysW4irOLDE7CLEywUKjEaOgTDAEoypLQgmLEcRZVQETBgGVZCiym4tb6F0p9tQGZIEQY7DYVXCJxJ6CIAKcPRv1xslJSSirMo0810xBYmBuTAXZXZBVzUDOQZgYQ2VWyXMy7d0LMCXwyF5BsvmiBMVwVZ9r7L2O6sqiIimnBbkIRrmHMPdD5UOYkS2ZUKeqFChEZ3GIGYG54I1m5kkZROa1N6JSOK3SmRlyVriDklNPnJgBJpNMwdyJfew5ujuLKvVsjXDDLMIs7pJdMlEWIYIZWQ5Cpp15BpkToCru0cGqBi0VITaDR+YKJYbgkJOqq2q/Pl47v/3gF3/p33CCYl+sJpgrMFQAzFG4mtldFQOi2kE7qC7PoMExFN6zAiAF9e4uCOc++aZHTp6xICH7yAN3VYJZyBWUrc/eKxKRh2KM4MiJE9c0YUQiOHpBauDCEZQqMVaHshMSVEG1ebDMhZmD4B6zBdWg1pVcDbCUOncfkmMmgUR2JwNp4MZIx4CD1TBUXJBAGQx3Ujar+8bcVEZ1lazjEVcjllVu1kIrKaw2zKGq11PHEqpmFAQdk5vAQlsFI2EVj0FSVJMdF8acQ0Ja2WRxv0Tbj53pLnVRU7b9ToM7fDxLjL7vKWgf+5DFeQgzjUBmyHAiSIbDQ12pat+2wlFiMFAVOfWl40lIuOu6vteqajyEEr+YOzkFBCgqDwlWUBcRGXJYAD4UOujq9pxSoXAV0BIJ8mXiW3jIDC8Jpjt6p2BGFZUHVMqPzAQzd3hgvv/h09e9/203veKb4IeSt4Eio/kcNvxzYoVEMMtOziXnVOeweuzQnS98zTe+93f+x5H1OxedxZLFJVlkr6JxzoHrzNLHlpRSD6pZc4IgIzOJk4KExEEkicrbO+CFR16IMcROgJa6p8HduLTo9CIE7RkKZwqxzSk2NbOCydyNHBhyHUWhcMPI2Y0I7p4BDEhIeWIQlNVmg1Gh6g413BJPguEL77z0XolzIHNzAZmDoLYgYc+JGCTqyiGyWecWAGFRYuQMYod3EJHA87YHe90ECq6qIGnSqASnOSciDTEuupYtxNgktZhTFSdT7DV5JGSzuGgsIicCsTAJ92pOLiKM1kpHwUAuKqhIL0GyLkIUYqScAJWKkmr0iomdeGijcwoGVS2I1nK/mpkVYmboxxbmqo07UZyyC/qGqXNeMe0lUKiSaSJGVdc550hUSaXZcp/rUUwpjaKk1LLXcCzhHAPM4HBoZFKVQDPfNWikWh31SsNZNbszC1cOM1fNTgQYE1sVxYPnrE6uRhxq1fYALHKwDTgPqmhwB2HgaQ1okrkONVTm0mHnADlcQk+ezU0kwqkOdZ+NmXf2tleuP/qCF3+dAkwjZygCAjIcZuQo590IDIi5kgtx7TJUQwQgy8jiQSyU8IpMcwxgYPHAB975vjRLXIlZZhIhItXIYQFW4kIns4JNkjqzCCvUPTucvDKNDFcHcq/ukMAxMHMeCAVkWUomLcxEcAxtWuThANsSDgATCE6UU/AQSESi+0LdetfMDDIWAAO+IxLcPasBqalrIsqpC0yBbdRUSGiUmNGsrc7nqXLKiSTUImQW4FEkGimBVbNQI3HiqcPAX4DlXurgIRD08oXza5OVOlTTvX0KAseibUmCuwPcpb5nDWaZiK1gq0RgL0lwRTTvO9UuisQo7FAzZVBKIVZkrn3iyFUVVDVZWyO6ewhSLDHYVdWYpWwu/6zwOBD3rsvMnNycgGGjCR/knYWn5ESDiecC7AOFewSDs0kpRYotwQpnZ+bg0ZO/54/fcvzm25obX5HNXBr3nlAREbzsr7/IlZlZlSAAZWICAtPaC7/kH3763rctzuwL1tiS0pwZsZpIZtVdEe07MiJ4G2Otc1ZRZs6ag5QirBUExc3MlrzPsr0McIaQgYhB7nBzgpIToXJ4VgnOLMlUPTPH+V4bK+IrJQq6ilFRjlOBGIjYvYBdwVyo18yg5E4OIncYgXggGjLYHQ4YOZE0MLiXzswhsnZQZs/BYRCwkyUHiAJZLWE2N5GwQIY4mDnpiHmak0gMIRCC9ayaSCh7T/WCqfLMkEAUNXmQRkS8gk0Fo7Eaj3on6jrKAaTE1WjkaimZmTkxMdzJvCZQKai6k5eSr3OUKntvZqkFcwUBg0iqubVCxMXcERNTZsvkAvfi6EolEWZEzNpxq5kDt00dcxfcADGikXuraozAqNXVLVFlsFQ1oV0s3CnUVdfnEEI2JY5QLuRecuRsZigWjDqriYgoh1GXWkpaUdW2uRYnBlxTMpCHEKpI7jCvzBeO5CBiK02/LMFT9oOghZy5sBWg2ZbF3oPaBjtMpRSKYcuyrLuDwIi9KUO4wDVQtxzrZvfi+ed82esjHUn9nEOEJYUKV3AGSzHglCHmEIJQdjBAZJBCLarIuNIqRTDM0bNkcNcnTKJ/5kPveOzxk7GqPXAigmmMQZOyp4jIbuxMylRsKsEJC/XAMNMQgmrPJEvygtcySurGptqLSM45mdahVtUD4gZ4WSylodUAKEBqLqTc3rknH5B1B1S59yZwN3R7CQ1cBnYDHKMxg9iy17Ga7u8SNMaQM1cMF3EWJlsZT+TI0Xa/TfN5HSdwNvMupxBLFx5bShRgCnKBEWCoOISQZomTBmI2ghICuq7ru46r4BDi4CQKDZGFmJ2QaYC6yY2NU6FbAIBVVUhJNacgNTy7azG2ZOX5UxROqu7uTAwidwIJ2M09OA05WeHyDj0awkvb6sM+Iyy7N+DmyzotETmIMSCaMGK6wmgkEy11kmKzSN0Kr5fIY86zub33zb/1mu88Wo+fmxUuXiqPtHzx/6VpdxcCMcEdB8SHnBCrm77oK/+3N//8z12zcmix6Cqb7PdzDmkRtJIm5LTOuwBkZdz3bbMqWYkDUiKpKPUg4dKFFKjYJhq8rrm7sytTLiR3DHU5tyXIm107yyoIVZX7NKmCV4E9A3rwicpLmRmN2d3dMpnTsuBmZBJqVNz3OTAXxFiGG0g80CtzKZuru0PNFaWxCeJQqr00ffQtK0dmCQ7AxrFWjRVZl+La6hzcMomlqGqhqVy1qWsziw6zJCJmzhEpWZBVtSw1wRIsE4eASES6d8xX46XdM0cmCWHksXMLbYK1qbMcJTRNE0TMTNMiBspUAQaygkGXak6xVYGgqhXVcFjqiAgWRhTAZCyFwVWi3MZZBzYjiBCIAYIDiiq5VHVr1uYWYqEa56QiHZkKBXcQSYzc5bmZxyj7O4vReMyRc85VJPdcS5VStmiWMpXePHcJxEzq6pyyNTkZIUcJDDGzukolyGEOhVkURFGkC7DmHpYmmxjkLHAKsXF3UMlj3eE0VBQbv8r1w9nNzRAZOhQAfEnKARHBEDkQUT9PzMwUaqpthss+f9mXfI+5SRgrIBmQ4HaFf+xEywZH9+yRZYgTnJxZ2ZTNYE2qUsyMpvOUaTKJGZfv+cS77lHVGIKlxEiqCmJ2uFMHdS9dHnlolSBSdyEvJKic+hBNtRWucyKyRc0xhJC5g7omIzCxWUFPSs2vLJGrM8nStgEw06WV4IoQNAeYmzJzTpacNBCsiLOUrjdRM4BjjIDmLpFVUWhvb69qYhhJ6l0DRqNxmzKEZVTF8cZituthyvXga2MtJlk1B1QiEcyWQCowYuYMDyTdNLFbIIa5ptzBhSbBad71zNVi0VrKzhoOYr0Scg8XmRqCCBMs5xzAVQiUyZSlhEIkEok8a4KTVKGcJfNMVJ4nooiZJzgTgaSkiG5Dz+VBcckJQ/vJUEmTgxSxkJTAxARNECkpNqEQacgJRktcGMxgCAgo9jIzSRTc//Bj17znD+58zS3s4x4VuxKJQwk4CN7/Xwz9snQGMxAFswxoqNxzvPOuv/vw89925sMnm/FhhYVIFDXkKusiHjp0z0OfvtTvVlWMTl3XKWI5aczcq4UQVFWE2gQzQ3GxA3MZAMZBSsW5FA5LywwZde4SYyZXczf1rIc31nPXTvt+GYgRgMgiIuSouCuxiQw9Yj6QMrOXsiRT6HMqhVAzm9nQpoSBkDcYgiYqDGxUuPwi4q7CVAOL1E6aTes1pUUcjbvUky2aMJ5T1xGaehI6jabGQNWMASGq66jWeZ+JnXMgEunOs9SmBaRQh41Go/Pnz3/Hd7/g2S943oP3Pv2ff+mX1zdW8mLX2vr4xvGbnnn7bL4z6y6dOfuULlp2RKkEMktF6mfgBZYCo1lGqElSaMLKaHM2m7l2UULOiCXUKO7APbsN2hiy5CYutwEVTLxxlX3LXNe1sAlCb1mkRY2BdAinwJESXISaa2989pNPP1bVCp4nLfE+icSePXMPWCVBmIVYVXOfYhPb1G4eOeRdt799uRk3hFw5pVCllMyGk9L37u5gct8LcQh7CDBPROQmSRKMRCKM4M5gNQUow5YUHwcBzGBncuX6qt1+wH2i8SQuFoukfb0aq6pq25aZQwjPeOGth488B8Zm6kFQmXh2qyLbsvWqFHbNQQDYGHANpfYKuIsLmEGd+NOMmdClCEW//5EP/MajO58cra6QmbizZVVTV69YhEhTye9dTUAKd7DlNGIWrs0sjoNaB4o5uUA81Narq2ru6ybmucVYi2hSJXceOs98CAWIsgnIlsSE8j2Cs0iSoLEiwCRQmmuhnvkSjigqWMROZCLRVIugDTncqFeL1nZKOqkatqTarK3JWHpXiRWh4lq0dVINo5goQ5Gzhko8pYpCZyW0cA0c3Bd7c0upCeKO3GVnipGs7bmi8Wh1urNnvYdxDJ17dAQdCJ4GOJsRJAZLKiAiyWoSqaoq65KqM7uwDDm/RAf6hdYCFld3Jjd2M8siRiUeH+DLoRHJUaB5XLV+DmDZI0BXNlkRu/CSWbtnUMkaDxAVyyWuGiqUPgSmkMgxZ5VsTVN94J0fv/b6d6w/69UVGqfsMELEFdZ2QSE+F0qTC/cXhUPigQrDlcwxeuVXf9+vP/S3yW5VmVad9Gnd6PLksN9/5qk3f+ayH14P2y0W1MoaW/JBJgvZEEPU7CLCjmX5wRVEttSK0s4JpSnIidgDQxjQAM9aS5W6fOON11+8dPbRp/bVM6q1AUEHisJPUfLKWQbOCUC2pNMxeZ4xM4iJLA2KAgKQFFalD40YRDQIwYjAHIoQpMsmAncWeOa+Dqt9PyeCSOWuZk5Suy0ijfsOxh15HsXG3V1auEgwtR1VZaoDN227qKroqWf0vlT+UvVYpdOn9vu19//wofUTJzbe/uDFXdprxpt7eyfvvGPvhp2dnfl8t9OTZy8qhwTjOLdSsgEOttyQfKhK1iKSNW72u0XLg9yYmPUQVjiEneBqZF7FiJxEhCBD/ypK3FCa5jKyroyail1EwN6shUs76PteNR0ECm4WQvjBb3nhmfP28BMXt6f9vO/jqG7zPHO2AkxjyOdFBGqqWkXaW6TDh/fHkS5fvohQEwlppjDS1Bb6jTsBZOTMnHqLUVSVUBORoyOGZhjU3UViUUeILO4ujGzZll0O5REHYocGHiLHoca1bL41DSGEPrV1EBY7YOMcf5gff/T2URXdD+e4CND18TUr48lKzQWExiLLItXmVVWFKvaQi93eZdvzymJgdP18Pm/bDtOZznYXaT7tTT32yR47n+jQYc+XgwdSZ6hISCrm1IyjdTOFgynnHFjcAaIu9ROLsRL3PBrXXdfVsRqNagZuPXx8pWKdLkg8qVcxwlQtFwjuIKArGQA7KJAZmeUBMiYC3Fx75J6zhwG4IQNlCgg9WQjB3VUzgQpREFBVjTJyyyKxT712FmOIQSFOQS2lWAcEZety3wdx8wwLKLyK6JVUrow+p9RWE0nQismNQlNrn/J02ggPyjkxigTNWdu8sblu2ZFcMkuWIByElwx0A5Gbm7mLiAPgwGTe9+bJo4Q6eEqqCWIhBDOHKzwISeLMNLDfocYQTgYzqkKJEsxsQNULV1qvdJwOFxNAYHe3JcxeqrOggsYMz7Jg004kQKDkpYXEzTEwxojZnSKRqXNlSBbe99bffN11N/Nqo6gHwswQvHMhCH4Oy46SWGJYHWYpyZyQaM7YOvp1d77q9z/51o9PqqPJ83hi7zt1/sIji+tvesYNx6dTF8MYI0OtdVc6m7UAwcxsIbDDWcy42I4hVWaCs0HK3mNHCeZLYSIhh0ipn11z5PBqg8d2z28e2krac+9LpRpScgExk5ObWGm4WFY+StSmGE0Kb1VV4wHTDpCrdBpYrmr4EkANAYBGIfAg5dblIzwP4nv1BuVedJoZMaHak7gRdH3cq6dYbV682HIVMjrMoPDxeNOJu4VCwVjdn3abR47t706FJC0W40nVVJK9W11rPvWofPD+83feWnG9cWanrWda69H3f3L+nvlT461RmJA0q4GYERiVplzXpppARuTqBmcHk5ASZxIzmy5SQC0c+76PdTBrAgdXY+OyITO095DQQwfGf0naiTwQY4FqxAvY6V3X7ETUqXLE1qQxq81ziYfILeX54dX1N77/kQ98+L5mbcOAQNGmicMog9QshAoAlMwsEJeAMXf7HI4/eiGHMLc8Wcwrp1CNbRwqWORSGTUQSXYzsxhqclVXeAjM7tHViaKTmJtA1BO7CZEjC0np7iFQgUqhKLQliBbKhy1BG3YUkThRGBpRtjREXUGq3Zw+9d4dda7jGdLAoxjjk4JFFSm4MFHRdSCiUHrP2LMiWyQNlsSS58JZrJqsW5lW5okmdUN5brHpO1BKTFXqekYWkZTc4FW2Gmtt7tVdNYhIaUnNOV62UZ72VT1Ku4tx00Sxu6694eL50/snzz7n6OHNhkniPC2lTr20bxZIxkEooKg7iNVdC+g8mAUnd0VCUPI+wyi7mVALdMGgB8xUiDDzACT0ncYJEUFVLXE/SzpdcNJRopT3Vg5tee4unn50/ZrjK6PN/Vkn1tc+JqmyLTSnKCNQjdzrYhGCkHeEGtSYoJvuoV80K6uz+ZxI0rzTCaM0uwVZLGaRI/VexzpUYIaYu5ExSIiCiZCbOYlkmCWtWIRJVZNYjBFIpSm6KK/WQdzgRq7KUTQngzNzDyMRURsaB6iE8U4Ogy5bNwYKakFmQgjZEmBaCh3goW2aCa6AmAu5EltpN3EXBOTSugInoaEXw5UpMsWKkvWxivtPnfJPvfsPnvP6ayCHCuVmeQ0FpM95OZfKLrOZ90zBHQXQCAGO+OIv/fsPf+R1srPS19UoYGe/3/bj0VK9vphuN5mZeMGLauGJBeYlNZFsmZn63Ks5mTPEqJCLCVa0iqSoZ8CdAWVzpgwKHeomVhW3/eKxJ7bH4xVNgFalA6UIaKmbM4ehQC3ulg2OQnYpnDsXh5oHdjXnwp7P7m45lMiXiKA+oJHMbJnNcwhBzZwpm7IHzfl5t4xe+byvPHXq1G+99Xeuu+YZX/GaV8wXT8/crt+49tGzT/3uO9++srWR55d/6kf+zjve9laN9SRurq3eePr03oXtk3c//9r57Jwnufbo7f/xt974yhc+78jW4dR3J0899amHHxitjuu6btNe6qrL2/lFz7n1peGa226fbO9cPncWh4+vP/zUox/4xEeb0VqfMpH2PoujyC7ZjCAUAruZKZGDVPo+I6ysjHLfuVoValJy6hqJISLJINVAROSZVCZVpaqG0hgFtUREgYnX+vlOuPO248+85RnzC1sSe2rmJ0/qfU/eG2uGaBCPIWhvpHbi6KGKdGNlxE0NsKtl7utRbLtO4CGYDg3OTmRZe1cjqZv6smdN042bj93wRa+8IYTqwYcvPTV9pGvVkUXInTQ7waUKgLJkjrDcu7t5hqIOVaapG4VQFcYBB1aFiFEulflSHXUyH/qKC9ferPCsiIiFiYLQkE/nnKvA7saOQIkCraw0C6yuukXd1kmTUhyFDXelGJwDmXFWdlBkBCHioIoSbVZE4MopInTWtVlXhNZgRgwhodnFLo6IQWDhWDVmmR1NFYlUmBVO5GyIkVOfBQ5CRVPiSKQrTdXEvLU6Or4aznz60k5Y2Zm3G+O673Ooxl3Xj0WyKkyXjP5l54e7q2mmogpO8CVMzcxiYsbcm6u5ZxcKpBZUeyc3JXNmFglEoppVvR41KRszQ21UTR57+IlT5544tHJYtFu76Zo4lqcfenIlLMaTOLU0bfdGm7GbdyKVWs9jz13iLubEbEhd79ynPKcgfU9tawINhH7REklSBBqnnNu2bVOvnWpfzS5PVypmDjD0EiwGAmn2rOJJKJGrD5SmwjknrmAs7k0IDFjOBcrtfdHzAmLOpKpmCEwEjQxGkmxsXlT9AGMQcyDlUDNgy/PDHITcKCURlhAkxCCx1CjcBw1IAEFIKECDZ3F18qGplbw0eDhB3RLc3Kbq8+wg15x4VI3u+eAHL9//JsdcHB06UOvIIFXjq+RT/txFg5YFwExFY4QBsCiMzU2au175hr/e5icbyDTZodVxJXubwU9MjlVqFXtGHcUmIdSIDVXikTwwoiNIGFdMIlKEmoRk6NUEE2ewIbgHZDehgITNZiWuxSS5RZqlzkIloTF3R1Ly0pOgOiCG2VLmpAZXBHATKigDxCwxRngSNsupCsSWhUwpc8UwI4Kxk7CZQQc+sqILFatmgkfy6BYor63Ed9/zqZc+W77ri8cXH0tf+6XPv/Pw5V//vd9/z3vfcWTl3mtwKs1cap3uWHvuI/c//LH7PvPA/R99z/e/4fovv+vs3/mGF5z82Hs++qcf+fDHP/zyLzgzfeLij3zb3c8fPfrdX3Ht17/g2IXtPR6JpWrhc7A/8MjDP/4Df/k5Jy7e/+Hf+vRDHz955o++6mVznj7F68fj7MTm2rPX19eDrF7cW+lmKxwoNg0wHsUT1WRztyX0KzfdePtWWL90WePoSBwd2985Ua9fP66OTmf1qfPccT1KGz3WZrPVqr62Irl4caGyuVbfeGFPp12UatXIunbU5VUfV08/flqm9//yj3/BM47Ndx595z/6W8d/7K98Td5eqKxB18+eWqFuQ3izRrjv/p29/ni7vX5+v1VywebFMzGnTdIb987z7sxJJFay8BxrroUrxO29dE284Y3/9If/yd/6grq/99yTb//JH7jhtXfffOnC6enu2rnLa5a3WDBS9Sj7e+3elHcuVvt9DmgCr7iEpy/v7exOgl57+qxyrnb38+zcCueq78PlXVZNbGpOOWcI+pgBZFBpoQKzMTk7kZNb0pxNs6lEKfoExtybAZ66ELp21rX7vtLOKfey39pe9raHznM/6/o+d2opube+P22ni9QmTj2nlrq5dq3Oc56lnpWmnc6Yeu8XxPscWaiaxBATIzILCRCbTISIGD1KoOwVheg+Fg1EAd4qLbLnnNx43ldg3Z1dvpzHxDyuK3DlKtL1KxV3ZgF1ZCGCsxFn5J5ScrXkYPOKJbK4kxIbC4gERFAOY6KJiDZxRbNr6huvmQMym3IdqypEy8nVIjcZxKZqwKjyDjsPp6c+3N3/nic+9cFz9/zJxx578uzH3vuZB979xH0fevBTTz35wCceefiJBx75+EOffvcn7n3gwc88+smLTz6x/dCF04982rDYzZcubT916anHtN0z759++pS6L3Z2u9ZTbtEn0l5zv9i/qH3KKZlZk9Fs0zDwgZYe6goUDjvoqKZhbkRW1c5JigwrUFgxKDHAwFxcap5gqJc7k7uzsRdIfBklD9rbBeh0V1V2YhJSL9M1bHgJB8PhbOxaGj5YhAAxMwzVmlLgdy7BCAmAwHGQOctKQXKfMqV3v/2db7juLj30vJFNFpZH0pqui1g2l/81cebPRPQNCMA0YO2Zd3///Xf/5pl7do8cOYTQplyBdH2TJhdlsZMpUOCQTc2RzQAuzUE+TAUAhMvClPJy4fYHW3byEykb2DSgQ4ZmKEWriIpwSCIwvCqJl9PAYceyQaA0lAwtkOVvR2nkjjFKCGamTgEC967PdazUTU1hLkONGhkmEl0BmNugmgBzM6SOd3f4zKUMztccWhlLe2jjJWnz+r/z0+88vEVHrjvW7/VrR+MP/du3rR47rjvSnZsuRv4ffvcDP//PX61b159fbKbzF97wfW+31fSOe977O+/62F+/6Zr33ffeldVVaWdeu/SjGv7U5amspoef9v/2B7G6xfvpyv6/ef9Ks3LDePbz//g7PvHxD/2rt3z8jmPH/sH//kO//Zvv/PUP/f7a4eqXf+Q7fv+9f/gzv/rwd37517/k+Wu//a73ftVrr73r1rv/wY//3Dd90xd/40u/5Rfe8sv3PHDy27/qxV9wy0v/0S//1zP55OtecMPf/tpv+tGf+7UPPXDxB771WyzvnT/zwA99++vuuWf+a+/8HyeOH5paTvCK08w3/vsfPv4Pvo/f8t4H3v3e7a31e37p5370v/y3X7pY6Vd90Stef/er3/ieP37o3Ce/9Lm3/+WX3Xjv4w/edCh96jN3fvLkyRfeFV5516s/8tSj+/uPv+LZr//Ifd1vvfteGp1fYZknG1fNiNFtV2/8+R9+7DPv/6s/+Aty7ZHzly9u53c945k3PPP2677tDa9ouuPv+PRH3/exJ/uw0p3ef+6dL3z+zeON4/LWd24/de5JW9n3+eb3fc2XbG1MF/NTWxtf9La3f/QLX3rn5uTYu+5//+lL55971x0PnTn7yLknDzWThaAXXcsrXe6osJ3MysQFJ2Q3MUBCjKyqOauIFJyBiDp2QmmahcEY7DAGU0KfNTMMlNwqgAy95r6QBcr+L9aGijilERxgYspqTu5mbEE8EecYmFxM1foujGpW7jO6rgPEC+2FGiNn1NEyhCVYlzXnXsK4qThYUsi83/faOFTEVZeSCOW8cBNmDiQOdyklBoibHwiOkjIwdE8DtVHIHjDYwZ5yz9RHoladvI5RRPq+TymJRIMGQzCIwxUGtS67uQVM98maqp7VF884aU5xZ3P9xk88cvbUZ84dTRtRD20/vedx/8jq7Aif6wyXz88u+4XJ0XF7sj+uG1j044TpzuW9xYJQdYtOVlfaRT+9gO3d+UT3xtWR/ct9m/rQR6YltbMwiQ7QZxFaqggWeXstTQ9WYD9Ckdcgd3YOFIoPOIBuaWArlobpgVZ2BdqWUgOVA0SzhM6gYWjHgNHjSqV0kKIcxOdQ6lHLFxzUX0sIPxi2BM/m2QpsqqqR44XTFz/6tt+UfDKbRpbWnLU3z/L5S5WaOSiRr3lW5a2Xf8W/4vXHA2w27SaTVSLqu1m32BdBJcF7QNi5QExeVHDLRxWlmL1SBDNRDWZBUSmMzAKUDZEkBCZit7To2p6yeuY+IyVFspgcPXWlF4yuov0M5b/y9ZIAQ0t6QIy1GbIaQCIhmZWDPPTyiAydw2aD/rC5qsoygS3ykKpec7j1xqPntrfDGA+e/KiMn/iVH7vtvn/2xb/+I39PpCJa1HXqORy95sim+aXpye/9ztc88vFPvf+T8wefuuevffHkbT952wd/4Tu+63VfpTz6939w+vG+/he/9q4/fag/tL6mNg7WGM3bsT9w6uz2/uXXvoo/+kc/8Krb7xiPV05epHsvXvrgg2c3V+e/9ft/+si59NGHnp607/qR777xaLN++uGL937kwY9/YPfE6vo3f+21P/Yz//FDj8/+/X/7+D0feeTQ4aPvf/9jN1x38q2//eFHzvb33veJv/qV1d/6yjv68/HjH7/04KnPvPNPH/7Lr33drdf4L/zX3/v9D+39m597y/E1WeOjmoP4jJM2HKheHD5xKLe47drD3/nXvvrv/tC3fd8P/eN7Ltvly+GJe+/9ojvpD9/0hx+6/9z9H3v49S899Av//n/8yM+99Rf/ydfdsdH9/C+97zUvPfLYxz/662++9xd+9Zd+9Htu+jt/6QUXzvZxtBXhwfP5y7tfeMe1123qD/+L365uvX1t02657cinL5x5/yc++eGPnLymxhaf/Z3f+uP1zesvnL30/X/5a/7t337DG9/2B299+4f/yffe+dzrrz336f6//8R3vOyO9F//x5t+9j99YH1j+6GT98Xu3GvuOv77v/LRV951y0uePT516lOxkl72Y9Sx8dynGGcWHEiNllIzfCC+aC7tIyUvH/LColfARFIkntXFOCJ0TD0ABKGKlbJRR7zPZi7moi5OAg5E5K6qCg8EIRLiYABzCCIxCDzGGKtRVTW1iIRKJ2OvInug2HisuKosBiXyWBkLZEyxIoFX4JVYd3M7c343UZhNu+QEF6KgJuax9A05CTmziXgwhzLAHofdreapiEcPYsfkHqCkpWuC2BkFSzCzVEUOofi/vDRZhZ0mxgIJmUKGZEjObD5rtup+sUPz3nvODst24XJ3rs+XL+3vX572O7s4ZXsP96cf2bvwlD/5kQuPP37h8SfPnnns3Pnzl8/sTp986PQTn3jy3KkLl06dP/mZ02cuXFzkdndnOuv6FtNkuru/t1ikxaK7Anz7VfIXtLwwNEAIY5AuYR8ktmloOl0a8WVZcmnWMYgLCg9y4RDBIBgLJkKEFQVa4yXc6U62FKUr1qoMUWCnQQNSijTHgBV46U9cankPwWrp2XGG81C0KS0tilE9vv/j953/yB+HcEktBR6j6g/QmP85LPM5LhZyKBmYkQ3Hrn3Ns1/+pXv96Vo2xk2dWtU0DqGKASmlUuckyIGmCg0q3lSkgJUtkxbyrzIyw1kgwSUQB3DJecSzKmUSJqpB4qQU1APA1VCMpcJnXnK7ARIuupK+HPpThIVp+VlTqbkzSQjMrJqIXIRESD2r5xLsH/gG5gMRL9Zsx5q142sr41CvHeFf+5NHvun/964v+ev/+V/9+r9+9cvxL77/DZefvsAyGqWwaGU6RjXH177qRf/tNz80uTl/3798y9f/6Fu++X//uQce/U8//j0v/euvetHpS080oTnb53696b3dC+aM/Yr/02/8wemnpp0f+bbv+5Pf+71/94t//5m/8INfcXh0ouLJ1oT3pmMfTw6tVOPm8JveNfud99zzz/7e96ZzKa3d8NATJ//Wt3/1e99+ep+qFz//2Ne+/vY3fPmrv/Y1r+vm3YXLE2GujlQ82vyhf/3mV3zx9V///GeemXeX9rYqpb/yhlf901962/E7Trz2y+546Stf+NIXv+i5d2zu7E6reCQQktt0lq5Z2zoU+a5rasmP/Off/JkvfOUdP/S1/1u6jMtx/Jkz+x1P1tcnU58D/O1f9SW/8k/+/m++8a0ffPCR5vDamYvdhR2aHNl67yPjn/rFd37rV7/gWGzn08tNrAxuLZp6XkW9+cYTab5DmRSV+ng7jXhETzyyf2p7d7LV7LWLW9a3vvc7X/M9/8c/7NaPf+Dxxb/95d/+2R/96m/+sttvOn78b/79X9lOI7pm/C9/9WMXEtqZPXHx/LOec931R6/7G//sd/L4RGhGLVWeUCWCjSTxUuhmSbhyU0c2GDylZArm4SAXps2gI1SkhgsHQ93VOYOMFKTwzj2b55zZOFtR7xwGSCX1PqNXMxv00dRNzcAUQohRZr0loszWmvbmUjlCMnjSPsYIw7iRumFVHTeRqa/g2i/UtYfN+tR2tmjBzptbk5WVkZmRG0tiSUwHQptwJy2E0MK9Zji5FvnCQh2GFyVjD+gtQa6cAs0Ey0zaNBWAMitDRIZJYZbVrTBrncpKkiF0TPVkpZ+2nIEEHof9+ba3bcu9d9RJe36ysyDLM13sUX+5t1kz7fj8Y+dlZqradf7ph05uX9BLp6fbJ+c7p9Llvfmi37988UKbeou+6Nq93WnX9trZFXm5gxCvOGSoDdJWy9FLhfJPQJE8BEDCEgMFUVxtFq9MISi10AOjP9hQumJJS6xdDHv526BOttTyBYwKBCgSRaRIAjmVCa6l1RNX/foVu+ksEC4aTNmUg+RsbdvD6/e+/U/70/dEaS3PEpi99aFJ/fOy7+aoSaDOEZngX/jFP7F56zj3O4KuCkLK8MpcjEAx2DBtgtzJFMv/XZaLixriIBnozhSUJZMYm8EMGWSBtQoRRW/Ug1MAsZsYQuH8D/6UluqDMmgYMA2UiJL1lHknKPwlJ4YIF75X9kEP3fKgeCWxKrgZByHhrFqqEwaHcHZb3ew9doE6dHJ4pb7mmsObd93wf//7T9//ULph0q9wUGdlrIr33f7tR264647rHr90cnVj9fDWketuv/HRRfV3f/ocH6LNyr1zCZtNoDotpKlHbbsSk/d+z0MX58mPNZN4Aj/+Zv/yv/kLG+HpX/vJbztar+9fTKbp5vXq9IXdaw43191+/O/8/9/1Na898oqX3HBoc0yz9oZrt26+fjPP+8888PTv/trH77yNP/7ht13aP9tMqpW1Zv/S/q033XLm3OKf//zv/vS//EvPGB1+1vo4Or/g1hO3Htrcntnv/tEHT336qS+6s3vw4w83aytpNoMEBGl9EcK0GYWTJ/v/9oef+Sf/4fSZUw/94//jBbds6qmz5zc21rY2uO18vhcr4fsf+vBHP/zm59/5gvWVI73thYbvvuXabnd+7MTmR5/eWTnEN240bbI51Hw0Wh09eOrJhZ+/fetGSR01KznRCJxJrcPGNWsbkxNtl+ez9rpjTTdfnJzVR6u1m69r7j05XcvhVXcfme7tr990i62M1urVXef9WerQt3bx3/zjb/+lf/cHR+64ZTJibbV2ZPJLBI6ptKgsY/bPHmYESAzF/oYQnCnnDHAiz/DEyEIqlBk9rLVMpmX/WPbgJAboMN6rEL6yacqah2AhhBglUIkfQa5QwAgZ5KqpT21OvSmxB1UQmpzItEKuy3yCnMido7gq2JkQSGJVB4oVJIzWRrNZ9/TpMzEES9nMYghkLiLRYE4Z7hh6OsnRmwoJHUyVcKLlsRUDm1ZcRiK7QducNVBVD4Nw/Sr5VXclVXFjdTGIWzQrwvkg4SBKAomULdaNqYSOBJTFFnXajm1yIvc29GzTKeYMG82qXsO+e6WNZe47Tfsh7xn3TJC27/e2251Zu7u/37d935t2jt6WZn2IyMq44aIGHoYb/SzpXQpOkYkduU85ZxGRGA6mEBNdZUOu8hlFNrawrEq0vuQOD8jMYC7ZrUgOLZ3EwMrCFVShAAslb4hL1ebBZwiWIy9glou0fwH0NXuMUbMxhwsX23ve8iZuP8MhkSV1B9KB/uVf2L5bn82H9qM5NI7W73zZV37LZGWvCqgrdkwLehSrKqVU8huiYTF9OZiwNLAU2d6DqUYMdyIbqrdLvMWK8HjIlrIvVLKxZGUzA+UDz3Qg4zncpZkX+U0uU6/9aglydw1hWC7VNEiiF3jUmUiu9pkAlr1NoThi1fzEGe76Wqmbsz15mTpdnH7y3Iue/UUve/GJt//JH53dGc8XEyVwtAtnpi+463buNXRNu+/nzk41896lva95zatW2v6d975n89Bh4kueuxGt7eepxqat6rquN+u13Z1z66s4JJsrG6MHTtVvfu+lZxw/9o1fNEkLaL11cV/qjfHO+fnOpdOs+KF/8FM/95PfeLxnNzx4/0df/8pnyk6zLzGt+5lTerGtrNryTnrvROr509vjOPrvv3Puwc/86U/80FftX35slvTi/gOv/cI79h643Ew2z0737vuMX2x1JVSjYFCWTupqfdHlznVKLU5UzQ3rf/KmTjaxeajJ2xfrPksfpoS1CavhE4/O3vyRR29+1vpNx1d9Buzt3nrdSqLt7ZOnXnbbC+aX8MSli2E1RifrbX0z3H+S/+sfvf27v+uFPMdsjkYWp5449/Kbn3fLsUOX5xev39ykgM2VZrFY9PqZ1770lpNnLsxnKrrZj/hnfvFPNrZmd59Y3T7fm3Q+mx09cbhNrOf04x/7vX//M39z/WIznS3CiCkzM8bRKwTuFVxa3mUpWABmiAxTJENkIkopqaqBs5mQEPEwzdidC0GZ3AM7exkYWZNUEgAoDzN+nX0wBYNoNfWWcs5MLkIhhBBCVcW6qfIsNTJppEaXpdeNerw5HudFFwPDPEQ31aS5qgVmG6tNHIVmJEwW4BXJ/t78ibOXdmazC2cXVb3CTONRI1xry00Ya05CzMWXkJVpMWXqC9TYAS8tXjgQFnZiEo4xoEz3VUOiUQxMoW37wfO5m1kokwaECwO0SKMbmTOM0NGcm4zcq+Yc+7CxNt1N46lJtb5bTS+HBed+NVeU2FKPxIvR3kJ3pfNqUmOF5/N2Mc1VrpDF1TR3oSbTGjZepLy/v99mhVPuTFtldzVoGZg5mG/iq8woyvSDYrJFiJbznN3JrFRXPDAVgY4yki0MUxdKuzHoz1+DFQItpyHDHOQOK1RbcsjS25dY3rLCiEHCxERuSupCV7zOVTdchGeN4FGCMGBkKZMjCOfcjprq0/c/8ci73xz0QqAOoYEnd8Pno0btFupAJBnu8DWlFoSto195/NprpIqzxXQ0qjfWVwE3zbRsx2IMEy2G8Jkpe87kSpwxJLO5JIZCGVYQFTawAapkZpIRovNIOWpQDwoW9/pg1h0XT7BMjIZihln5Pi/LFaoWQhCJqqopi0iUqigMEyAUSjY0CH+ijLEtsUBRmIBrDoyved1dW3W69mh7LE/++Q/8zWvHG6+69a6f+ZEv+9lf/Mmf+E9P/+xPf+Mbf+zr0uVz8yqtIn7rNz17ZXz+7O6lFz/7xd/zV76yw/a3fs2rvudbbvyr3/+Dn9qvsF5PFxrrwwtKYw4iYrnr+260klAjIX/rN7zs5Xec+M6vf+nXvf5Fpxcfuue+Tx+9hp/7LBw5knTa3X7n8de9/MtuvPuan/rtJz704Q988Ssb28R/+PWPffDBP/y3P/XyL731rpvXb9g61PX93jNuue6O29eJfZL9ptuv/eJX3C3XhW/6oTc973l7m5vrGONH//mvfvmrx9/3Xa+5bu34kc3JtdfEqqLdbj6NSrSQMEt7s+sP3zpan3StX1fdedfKDT/8w9/4xv/0G++7/+y1N959/Iatqkm8kN5mh9bXbr7p+lOP7T99+f7n33mn74bV69fm24uwf/Qvfckr/9Zfe/XP/8pvXDYeU10pFjxPUzpx84l/8FPvkNWH/sXf/4aVtLIxuv2uO+/4wW/5ip2zF9tFuu0Zh+fn8lc+7zUvfsHr/tK3/Ksf/+HXPevo+qqu/4sf+65/+fO/8cBp+1f/8Td+7v9+/R0htNtrdxw68oN/9VsevO+pV7/uy//Pn7z//fe+8bf+zUvvWLuju2ges3BNaTHPoa8mJQbKOR9UswYfDzNN5YgdYO4FT6QMV7NspFZsIpmzUs7Wk/WCVKb4ununBy9Y8kUjS5a6nIiFiKrAkRjmqU0ppZwzc5hNU7/Qyaie1PV8NnVLji7A6qDjMVJvOWFlNQa2pqpHSA0Zi7WcZ6kzquu4hhyMQZGzL1KeJW9j5aDslMukxgAnqLGXOZwRgkGfarAnB/Pli/JjCIGNy4hjyx6FUi6FqyHYF0JRXVdhZ8lF8hCuyOqW3MgnXK+jqy0xrVXNkY0LT57dIEfbZ8mpsbo3TqTM4zxZVH5u9YzGuSaDOXnKpkgubqYpkyVkBLEc533Xyz5xTp6kqbJ5l5R+8tueUx4YM5djHAqHh4cUA8vszEvOrgM6v2SJKjNH4Zx1qXupIkXhmt2WkyhkmM9X3oLIDUTOZQaO0VIm1DVydSU2BqzIJLKHHAvfA1JCyOEnFJiWE4h8UNBmYMAhSuNfUXtoU09ELmkcKgkrkPxVf+Xr1u76hs7GDWVDDTARw8swuf/lZWZMnCmHHFQdtUrv+z/wXc+cVcfZ+/EoPn52+/zevLUADdk7dzewEw/STcVJ5EQcyrBQmKMMRCUJHFPXV2UgiCkxZ1OSSMk4cNE5chtmX3lWjpJNi9sofAZmtjzALMs5NUPFIsaoSX1QhcDBYSYis0w+qD64cEopkrhaIfgEjp7NzCAEMhG645oV37m0y/3mZPK6V7zo13//I7fcfuLS00+8+9HF1ur4eTcc9rx971ParcTRLLzijrDO7X9//+Xv+rZXrqf52z768F3P2rrnntOPbdebR8ZoJQZb+JSlDim4q4WcOBLUp/0v/dg3P/7YyT9930duecahMxcWn/jUkxdp5UU33nzLibxzYefDpxdf+sJnbNneJy5NP/k0tp8++Y/++ov/3X//5Ln6WHvqiefftvHs26/rRC+duvDUxfSs517zghPNH334vtav+4ovusF3tz/89PSdn5i+8hZ92Rfe8rP/5eH5hDdk/1XPu+mGI4dOzXcvPjG/78mdvLZmbadcgRdHjzZ/6bl3bE22bbE1s3B4svvQ0xd++Q+e2HzWidfceuiVd17/1g8+eM+pi6+645mvuXPzE09v/8qfPPbi28ZfeNNN7/7kQ9/2+hc9cn63Tfyy5xy+dOrkP/zFj9L116HviWYxbu5au+nYn4dN2/urX37zyuYREVodNfc98Njb7n38b3ztlx8b94/t7d19/M43f/ijP//GT77hVTe/4iXHZR63t3d/5i2fOHL9bZeefvwbX3HNTTfdtLPX3XXrDb/5ux85eu3md7zhuf/6je/743c+8q+//yVf8pVf+w3f+zPbHST2hhWn1imxxTK2kAKolNMtm1kIUuw+EYsUlheJSM4mIiGwQ5mJHWoJgEtw9+A8jnXqexFOyNnSarPW932X01V4d+FQcDCsNCFWMl2Yuq6uVFWI2zvTUAmAURXzwjUtrrvpyFNP7QjJ6lpVBbl0qessHz7UdLO5WKXBV0Pc7XSv62uipg7TnNpF2tvXu6+JX3LbDb7IvXAFph45FADYynT6TK7ZxUp7duYgynD3UmRQkMGlx2SlPnlu9rYPnJlHbG9vv+jOw1/6smP7F3Ndj9zdciJGIIYhJ8uRG682z4aVuYA7YlapydF1i42vubXLK4/+wYM3Xis3fdWL/vi/vO84sHvzqXAe/Xo9mu6e2L+tlViZPHbiyae3njixF6998sbNa2+sXnp9Xqw8/gcfP9pEUWflRcobr75ucsuJj/3xgxcXH3vOS29c27g7X5hcesfDm1Wgf/qddw9QSeHhQ8pRVzaQLM1EMLNhRjBp0QcX8qKiVrgTo9C0qTc2oSE/A7MxyrSlInnIzmagYXw8UNgchUOyZJqLiCtYiYMYWUYGIMQwd1jR/BiQE4GLu3KgIBAjU8tgYhCTZOhg+NzhS+ifpWGe96ledYPfcPzWL/vW76at52SvgxUt3SrmMDR1QZQ/p5l3BxEcGXDSCOkyYr+Yft+3XZuqW1aOBOsXT53c38516x1lW5hHltxrkCqbJnMbjKkdkFELZ99JmAWLLCLM3vWLGKOqggNQDDckDIuQVYlksrI63dsmSJmfZ3CSoraPYICwWkY2Ek4EdY/Ekcg0BSqhvJUSRTawk1BwM8BEBPBsxhRA6mXl3RkIDDhnt9mCzDJXHqPM9vea8WTeJ47V+spI1ef7rRAmoyiB+6z7+zMHb25MUjfb3t5fWz/SztrRWFZX626uTuDldAUhdqiAOtdmtX7sY6f+5f/53VUt3/9//erktkhdNd6sY4x7e3td206a0Wg0WszmXddNxuPQOEK8dHF6/NgRt6mbtF2Yz1pQqhteWVnZ35kuZvPDhzZBunNhAY8rm9KsxJ19y7k9dLhGDm226d4MSZm5acZ1XTNlQtfCmjjO2XZ3d7uuq0ejrACCUX9061DFsr+7tz/b39raCpW0bdvvt03TrG2tnTt3Tjhurm1cPH+hqqPJaLa/VwU+cvxI0pw0NVXU3DtHstTEpu3owvYld63rul8Yr2JjdW26t5/bvqnqftGPm8nGxsbZnXOmKkQpd0dPHE+W6hAvnD9PbpPJ6nR3Oh6Pm6re3d3eOrS2Mh6t5ESVUC1nL+1yXMsJEgiUCwZYyFEwL9pHZkZGECYqSkQD+ODuQmbmTVVjiQeWiiKqQElHsSKh3pOZeaaaq9UJ7e4vcmaSAAFgqs7OPVEVUIuPxrHv5u4+biZ1qLb3d0lXmNKJYzWMzl7Y2zh8aHe23898fU3WJ+Mzp/Zi0zQj8pQscRhpjKPZdA6hqomX96aXdjqg7vbyFzyTXnXtNbkbg4nQscVEo+iduxdssZDBiMQUOSQsNYXKXHh2kHNC36w2p8+0b/vQ0zMJ+xf2vuDOta96yfWXdrsqBiI3g4OsGERXiI9ztXGuqhei0gcWc4F5xzuHXvs8lclnfveeW195qxw/8bFffN+t166fP/r0fug1XDq0ffhYf2NsZbqSHzrx+Kw+dcuFa489dTw+a1x9wS39/uqp33vg+MpKzDNFZ7Sy8ppDkxMn3v22T89mn3zuC6/fOHxrTcce/+PHj2ElREURHSnN+FokuWhojFyC5mW43bJjd4C9wUTDxEWnNrX1qFK31PVFssQV5OIwEXJ3zWZkIpGGEdtKRI7SGDioPhLIXAlUbJNJiSGvDAgeOp58GJltORd12+TKpUGHCQpzK8PkCSSgYS60u7uppSih72ki1WdOPnzNB9/27C/Z5ObGRCKehMSYXbOEgKIb/zkFxQaR7wAG0KcsMUZuM6ML852dvFFtrqzm/e29SRz3SuNoSTPF0Humitk8uAuQXMwMXvAptmE2BOWaB0tNwatomcpU9VCJF9F2IiIKwkm9TZ1wLMkjEUEzyMjARWBJi1ImEUiWko+9CsAOYjgCuZOqg0WqlFN29ihkSGZmwiALQ3rlZWJ8L0W6k8aTAshlYTQbqzHWDYvESG0Stmo1KNxcPefgOLY+dorap7oaT46OnGRSjYnIukHwhJjdzAFndvPerAnred4fOdJ88BPve+7dz9i6jg8fO9Tva5bk2q6v1LTalDGHK+vNGo9h5IQ+56NH15B74VHb9XW0laON+8jUybC1ueYbk5Q7Nzpy/RZsIPuuTUhk1VoTkxj56KEtuBMh+wKYAUI+rlVS2xPj6OGjhRFWKEZwMbP5YrGytTHZXNOUYLbWjHUyUfgsLTaPHTJDm/Xw9cfNMoQ3DjUMmi1aDpEpdupBKrI5NCTr68DXntgwZSKSLdVMOdvG2gptEQexrMg6Tzvr64eqIG4Gti71Ck+uhw4dch+JEIdJUdRZO3wsuW/P/ewcsbJQscuKkFC0pF1Tiemg0OSAwbJ5KWxhkKAYMvicc6EoE0cemp0TgDJ1CxTNe7iqGROZmwEsBPLZIpuzxAqAOUDw3CfTZhwtZVDlvWgvIWBcN/t7O6jq3OcoPk0de4jV+PyF7fUjE573kzju5l2MOh4ZsU42IzkE0ZQXxmY0azuycGi1coSz3axPMuL1ec5G7FJ5Tk3cbRFYuGjSlsxYYAjD+G0v1O3SWu8Oy0zBDIFjGeLOjr5Ppq6qymXsqoNIiBxGTgYih6hLMlbjyGBxMfaJd8JN6KOfuOGWTzz4qGkXY2xjuGiPba1uXHftF+x9ZH/ke3HBbE3oZXPuKfbtan0oiiQ0piyL7FAlqRMJpy5yn1ayxN1xtdVk6655yXWLLoVSyyYamhDK8yAQm3EYBOR8mCJRlNqX/UKl3cadQSjT4VxiCG7ivbpzifcpsOoQBhazzo4QorvoslDrBixndBFymQhahk4UZRlyZrJsAwfVASJBAX2Yyuxos+U0PgI5D41NS93qEu+bw1VpJLagfoSVqv7Eh959/U23rz9zZcGHJ9CUZwirgYObldf+nOIEBmY4xKDMiNw46L57/7SdL+pmdZ73WmtDNRmPaL9bsIQ+SepMQsxZrQhbKtiIQjecFmIv+rrmBhVid1eQJsmZzQIAN0pASZALxuXuBvWspDTM5GO4C2UCwMQIWsCZTDDTUiFNZmUQXXmyTJyzumoIgRxsTERlnCJziMQ5qbKbwQniJATOMHUFAqNMH9VszNx3GUzuFiQwsrnBjFCBSTX3rgzp3XLvoQq99xwJKhQi9T3ceBhVZwwojMg7nYGb0aEj7/jYyT+976nJka3poiVJ0SKzmFmZw25Z1a1pGuK2W3Adm+gZ1KfOR6GKsVr0M3dnDgHQXES1ggHUetZE4EViUEWCGNy5zQtWmkcWp5BySGpNPe6ThyBmqKJoSmRMbqq9sCVpYozOkmaLUEU2eHawklGAIav1WlVNzt62e+PxOGsGQ7OOqmhOfUohOJmBGgibWpeyK7FEAmXtItcRJOC0SA6XGNpkzlXDyVJXzk4gkYFCGxJS3/UixoysqQqVZgd4fZ2Fo6qLVKYQqfoM11CVUYxDPk0EcnIi0aA2lE+puLoqBAAMY0ao2YxU1ZE9sIgFF2IOHN3VElioDiFKnPWZQnTiAQwQliqSWR0Dx1isyurqxCxTEBCFFKrg7poXFkjyzNbHKzLvErUU13d3Ltd1nQ1s0vcsbJZk0aamGe9N95OnqqoWnZoZGyXyRexzXNRas4tz6C35oAfnBZCUQFbKwlZmvBCVBYE7HOLk4lmrIGIsDBFJvRs8SggShZAH+3MgkOVOyIwQOZMEYofDvVv0TK7cH7nrOq9HT3380VuObMXs6Nq11a0773il7m1N57PD1bh2ZEtGQbKU/vzonnNyJnOvFW0P2WKqo2aqe9S2ZbsxxZDmJmvEESHLgAkAFMppB1B2JQ13WWb7wNTcBbG0Jpf0rVg/YgRI7hM5qhCTkasWUShPSkGIyogQCqUUp4llCAeIhi5XAhgEpWKwaMnKKhnT8C+ZHDAndhcCkwwqNMSFEmJWYil2L/EBjIZRIUTEbs61Wl9LnbVvsrQpfeBdb3ntievCRgVaYeGMBTA2hbCjeK7/eegOwBhsTiBzEzhOPnXv5tEv3Dr08nrvvIjvzbtDR+pJakOapGbhWYUq5rBok5OoOpxH1RUIXqHZVLOro2jtAtxnLflyiMLMQbHk3hARGSznnHOfpCl94uVgFqsPYOEWkrp7Ioc7W3nextaXAgkRgYihHLmSkFIXQoCweibXIAFgMa2FMyGjECtJ3V0AcG/ZUJoVKffZTZtYpTbNhKsgueuYg0jQBAqULees4/G4nS/gwT33yS0ZIbCoqXbLmaPuXtTrGvJ+YRrp0LV3d31rqW9CvWhnWcSSZdMQwqxdjEYjJ5stzDgC1vbKRH2nMdZuZupbo2subZ8OMVWB1cWs6nqrx6PAqOSI5XjLLdeCsuPy9s4js+lOvbK6tXntzuVds351ZX1z47r96Y75Tp6e2tqcXNy+qMST1dWsXlXV/nSnJpou5se2btCUt3cu1U0TJ9V8Pvc+u3tdV+6YTvtQRQ60sz+Pw+RbWuz3QOlHIFCuqxW3xMxRanfr08LdQ1xJqrPFnImratWdsHBYfXhj88zls4D4sv04BM6LBKRAI7Wi6+lmcWE2Gk3c3fo5wVXdrEVAASSQnRxF9rYAkcuAAwwn4hhYVd2JjDxQEVwys5aJeQRAPbtrjCUMg5qAQ5cdGTmxayIOwkU9ESIMI0tuhhY6buqsPSjXzaTtdWd7XzUA1LWdxNAv8vFj67PFbH+eJaZ+UV+6tOjb0IRmbTze3b5Q101wJLF6XO3P5xQDZWo7mc009cjw85f35gsVq0xij0Tm4DpqJoO6EVEgliFWNRhKB5cbHF6ABSc3y0wcJRAAU2YYyJZtN1SEoZyLHhaBcpkiKUhlTigxHGzeRJAsek7H777+0UeewJnpiduOX97b2zxx7XOf8cxKxtt2WbudFFYBXsuHL1c7+zGu5WrUhwBSz50wa2XoOmjYaKwOMbf1NG/5EVyae7eoed2meRJScBpi09JTIwPp0HzZrUoOGpDQJfuCivY/aDkejojYxN21zxypbqou9TklEYFCrqKhWJF4JBBCmR1RvnEAdjCbgobpX1So9UxEbj6MMiQu00cJ7IPadvGzxRcs7xNc2kiHZlIq+rmUhUQpCyqzaZvGk9XPPP74de9+67Nfv5WlUa7YOnUJcQTXgqDRn0Nm/EB3oUTHyGYIwns7F47X+s1fc9vjT2nuaxbrchsJlLnHOszJLYTQ9y2EqQxW9qGxa3BmTA4GWPMcYJFoZg42yyJC7LpkI5Yf2cFIaCHVApos1RzN3akPsTIyz8mNmYMvC6MSVLXMNix6FOKDB2UeGhcKAg41Ig5E2T0vxcBRxqYQVa4JltwkBtVM5BSo7ear1aqD+5w5BIelxbyiyGBpVmNFi9m0WBAuDNyU+0KgXk6OVi8fTUZNtWj3kiOMmxAiJ2ivuQOvjoRj13XMnFIKkfu+DyGMV0aWZL7fs7ghraxP9qep6/XO25778COfnLfbR48edpO2p2ayrqobm2uWW+Z2a2v91NPnF9PDeztx+9Lu1jVH11ZHs9028Ora+mTjSDx35kw3u2a2ezzWowsXd0xovDKZt61ImE7n1aja3dk5srFF8L29PYkcm7rrukoxn8/X11cB3tud1qMmxjhdTJtmPFvMn/mMZz3+5Kky3yLGCHjWVgJyzoRQ1/WinbZ934xWrENVxaw9M6ogltoqyM033njPA5+WUIVQqRZFcQHYCcESMyctKu6Wc66qmHLnaNiZHEJKbDn3AIikdbVBHBRFVmio+ZsLqEDww/gXYjPbWBt1XafqFEREADPLxKiD9G1KKbuTQUOQKMGWnI2cc0qpeI8yCsMxisJBnMglSt8keOl9taR9jJNFvxgJ1xuWel/dGM0WvVq7cf3G9oU98nDixJG+b9VjoAWcJ9VWJARom/uNsYc11i6yXx7JicwX2rwfG0o92OoIcVcmKQXeooYC9wDjIlticGceZDLZoByL8ElpdMqGRpfKkTpgV0X3wwkQEVYK2avsBoSATESCvOi7eedjGY0mH/3Qh29ZO7bbbfM169ffdUemvFaNzj5ykdsFrY/Rp8OXVxeVIHBXoYmmZCqBmUdZyKU3qseRQZ3lee4Oh7rfa+uZ7o22V1YPpcsexECE0kbl7ooi2QhyIhe3q1TXixGXqzig5uxsyDbozIAgqmoKiQwOuU9VLBrfQ3+EWQZQhTLc44q1HGjsIDio0P/M6KCwbirMmczNaEBpuDTGlcdDZAwqxQyYwZ0kuLO7FlgNdAAoKVHQpMY0ClWraVSv3PfBjx27/tYjd2+5j6WIS3j2Ajl9Dtb7AV7jhWEfGI7zZy985H0fne1Vz/ui22d7iwAW0lk3DWGclVVTLoO6mDxrE2Luk3L2oZDqDrXBNnNEWurVLadUMwNW6lqqXuK+nIr0ghv1ZbxgkZM48NbJ2lDgi4KMOcychJN6cQyFSyNEwtHN+jynEAsrrlheM2cKGQsDWWEimRd2czEboYpt6gNxUdRXTQY1kCnDBWREBldXBMSWE6wXJjiV4TKWey+T55Z8O79SanYogZNhMlvsj8ar5JmRQwiAWVYiEomqSUQkcM6ZTIWrGEKfW2cpHyJU+vb3/YfRaFLF5omHVDUREYdI4FNZc07so6xarWQWqeLq4Vq6Rz95ep7GTdXm7kI3zwlVqGKMXQAyrzUTV8VuOsRgyLEwStt+rUh7/pGU0nWTxhYW5hSIW7djteheEg6HY+i7TBkbBN9rjzbVxU/cd3Q0gYGcqXcWMmiwWlXdkmQhVB5ARH1EJQHkQm6qiDDomUfue+7KuIQY5Ag8VD6NQEZO4k7CAUxlV1eBO98uLQ4iMZsVbrtDaxTjLqU7RTV5yU2rkaoKkgQiiA5mjMw6dwNM0RfSoLuTkVQOiJow85JCTa4SZMSMoePCGUAIQQIlVxiKfkl2YwbIBS48VngyCiGk3JJ5CPW8W6xWVfIWFMiv6/ueq5RN3UeVpxJBF7xBRCQQYEqMsPn4+z/SBBrx2PsQhNuclA0gElZVZ6haIAFRdMGS3UFERThbi6p9CEmKIcnF+BmcA4sMJUBfSq2UwNMJxmYEHUZUEIMkrHkX1saHzj18oX96duiaI+2JfPgLb5nKAlXe26bHP/TES05c13ofmCa9s4YmezvuqRa4MYcA0tB1ZtrEsBrY0fche9eEhFm98/DljZec0Lx48P0Plc4FCjBbzswoNLjSavRZsSrxoNrLcDIyYS+gLZtbQe2J3QBLKXKoQiR1V+MQyT2lchSZiLIr8qA0wFz0bZatzz5k+eTGAJk6i3OJn0tvq3lJOJmWk1TKvEcnwN2YxM3JmYrCYXmXJQc8ODnQMPXMlLNJHqWRIbz/T/7otUdONDc8T32s3pNrkIZhtCywHCwF0VBXxnBP5AZFCixdTzfd8rJTT61sHuPYIIZFXadUdzONY8+ACSOpEoKb9717Uos2SMa7l+3hSY08MVQz2VB+BlCORN+Xma5KZkSckxGJEKtnIiEr2UoRHCN3j2APGQC5ep9tWSwyLVNjaChZgIHOnTpVZiJDSgbLIgFgU+XMZRzdckjb0IAgWWIfs8dsg4fOXoGo11xJZJCm5KpgUjgx9z4mZ1dXVWIARuYxhJQSiFCkKIHI4lpGF/w/dP3ZrmRblh2IjTHnWnub2Wm8uX676LJPdkkWq0pFEGCpgfQgQW96ESToUwToB/QBAkrSq6BHSUUBBVCFKlJsQSaTyWQyKzMyI6O9vbfnHDPbe605hx7WtuM3oqiDgIff4+52rNl7rjnHHI3oMkzTfLMuHupdKIQLSURXRNRaR66epCyWzchKCkWSsmdFcR6OS12XXqYrQH2gT9FjKgDZq/qhv7pzr+gr7ciyd0I6kex5bZwz05kxjB24kmwtxHTPwFpz26yyOF49ROvVXZFpHtHmWntLs8Lip+VYq/fzjlN6PUR0SyEV0ekoYFuPpfpU2NuxB1EcHmVpcOtCmg8f80pT5M/9VZnqw2k5zIfz8TSXqSutuPs5RUlWa+99+ARUL7W1UtFilbF1lKnCkdlpk0SDQzSzrnVAOsXmoWPyWoDN9svMWjuVeSpl0B00/pXJHI1eI4IOGzs2M7e6xFDJwMwyNm5uZvedmZyhDJRSAkGlG3bFI9E1+t3a+5pap92Bq2kEb6I4IZ6tcFkZlq0t1bGb53Vpbvsvv3q92x2++/Hzu4efL59/9Vu/9ez+4V51Khl7akElSWhkiQ7FzMhjCW0MtiBsuCwQxesQX2WCcJLRiWTvHQp7dNwamaDQtgi02go7MQmFKoaYSwfLtH/72Ve/8eEPzjpPHz4/Tuc4tmfPPvj3//Qvvj9dTZOvZEbVzJI7qJYoFszUVGanrRW4j93TGTdX8NKWJlZV47Q7/8Xbn7++e/n1y/gsS9+2msNFYeAkI8KOj2QYPZ5jWynQICYCbvBk0CwyBoAnJM2yK02HeXc6LalQbpLIzABgzgEdcOShXoIITZSVLffIzDKllALmMRQ0DuR2/wfG8VNMSSmUZpQCpNMQEvWI74zKLonhqdZs8X7VprprZ7Ivlaevvvrjf/5f/afPP47975jDoZ7hONMO367sl3PuEuCwNfFwo6SH+/XF9773v/8//O9++rN/9md/+pM//MMfAfn06W5Z3n5dooAOYQwyxXvv27Q7/AE2Q+kcUU2GzAy7DLNjAUiyZynFWmska50xYFCRRYCgHAurVDchQsq1cfhfq7V1yBesZ8fIQQxFmpmIcUWjMHsa3MGMBhitSHTrgnVBGi9kvAU8FsU5qrkL2VWswri2MLCV89IXL0Ws6lGMytYU1a6zsxQKa+Zqpa5tKfQElJvkuEdHJAVZAqX1o2uJtTRPL7ddi9SVTKruSs8mRDK9uLU0PFCO3PeWXteSyXU+e9a62tSbmqO4O9W8aDln9WoSp1a8LK3X+dDbbbFFYVKVCEfjqe7qusawMQVSsDoV893ajuZKPyg1z3uD1t7gVW4j3cuYIud5bilJ024H5mGvsMYCMoqsWu3drJaW3B06mAqvU6mWYWhZ5qsdwEka6/HLcANY9VpZi9N28zWAWn1VoBlH3IvDtM/UVPan48KrqWMF1qnOkzx7VAewBArpxsokgI61FHphrqPnFMzGMDdgaN8NgpeRVZnIgCUMir3MejQjSy0KpSi5eYo9R+sBSxrNI6OvKBaIkUWOdWkOT/ix9ci0EsbSTrHf7XrGclqzdqhEsMiL0oD+EDB2ZHFfT9keWi1XP/yLH/3mb/z27/61v6q7OK33N9/lfTuZPZE6TCnUgdi2VkrNzEG2BpRw5dCXQpktguTkpch7671JXSgwMfvI79FoNEevOhqfoVw3smROPagsJnezyt1Tfvi9F0edPvzw4/zKev/K5/r0avew1L7O91+sv3v7EfOm8FwQR9g1rppd7Ze9L3YuVqZKm0wl8qjb/XJzPbtpuZvWYn3X7d1T3rz8i+Xp+vz5pzcFNuq4AEYmyUEROm/R1gJy2xEYkGlluMGUBNI7MgyuhuHfJUiJ4lURS1vcd/P14XR62Pguq4zFqmX2kANJWihwSQ+A2UgilJhAmpsbkVCvqqHMLsHMTYJveESIUhqtMGFmQm/eIpepVGX2UPWyjY2kshuSKvLV5At3gCzWebr943/1+x//2ne+9x9fd92EX8MstFgkfBZGtV2SxblDMD3GLCmkzNfe51Ku9r+1m3/x5s1/t9x9bXj7wYvrV+/iZ1+8u96zr+pTrhk9CXpqodB7iy0ASERepsEh+grARgpwRIxNqoymhXSzAtHQ3Kt5gZcSSDiH2jvWzBy00h646FUpzexpQ1UAX4akDPAglAZ5YTZJbOBC0CeXA1TaCBaW0q20tXcygKTVVfTaEqfeJVDyJGEhahU4DWVVdFsSZgd3RktA67nTaperoWfsQDML9ZGAsyLS0t2nRerdgCVLWiastXvlZefFbC0HQghYNhV5ajJ045odEQXGHMEcKySX1C4hzqQlLSSpqw0/DItlAXCSnHJ2V6oriWVZBUtuZtiAgg3WKbdOGxGqJIyZmYQlhTC4WVkT57ZInOc51k6ikwpTV9KSXKIh4COf3CkCXAa2ZrKiPNVqKVe6lwitCQFuRQFmDHV3Y5I8rWtAU+8IH76Nw/lpaUc42e8QKuatLUmAXLtKKRhjjs42ouiNa0ckrgMdSkPCCojeRUhRyUxLArF4UWZPLynAVqy0VC6W1kEGlGhb/poHUkiarKEAaLbugkyuTqnNqbBYSpuQCfUYDOb17jjEj77eqZQAs2k75JA0K2e0nYVaNz35+Vev6xP7W//pi+89M5v8b+5/++//v/4/z6oZW6oGIJXwsERhoQixQdU9FYyY5rIsJ2N1lDVzmjnvcFzP6p27w1qW5N7nGnYWuhMGFnnvUXwbv71UNbWpwCIyC3y1duu2LO3qP/7kYWcl1+tnuy/L3W3a/lR/9I//4Pt/+6/f3789nLs/vaLuq+1Wqz63m/X6haZG2HE+5GymfptTO7yd+MGvf0yAZt79wCmZ7tNKf/K9a0U3ZEF0kBjZiQSBQCZpGOSdJJjDrF0APRDAcB1MkjKXBhIfj5wWXYwb17VbtsPu6nxqy/k07yYz9NbIahrwXZBb4slo2cyhHPw8PmLxBtuuOI4sUGyduzS8EiBsa9QA6MOVM8NAq04zjBWdmRE7wBOxLViJ1JpKral69S/+63/56Sc/KN/7z9e8dzy478IbwJYytMmO6l8vbb/b/ZWSjpFViR+19s2u/D0Gbuppz10umsoh1qX11+ZF0tqvdnOueXQvsIFOGo1MHwB/KoUt1ZGAJA8Pg8jxK3Pc54TXAGPggXRwRDhlWpHAEE3DphkyUabtjR070qG5Ixm9m9l7oufA/duw0xzI4oDpIhOZotlmQI9MyLfg+Az37Llt1yVlNo5J3DVcI7qcYo7o4AsteqOaJTNgnNwSsYnTaUYPFSWZfopVm/dcDvPBjVcVY4kyhM2CLDMjFHWptfbU2k4kvKL3XuZaet18OIf33QhVlzjIzsDwvL/sU9DhCWwkCF1W9ASDZqBcHByJEWuurDksnQkPcdtgySsyQqS8eISO62IOKeboAQouDng6xJCieIWJzEAqudkWpmVfB7E3oscwgwPEtJEGoEHJNjEvipGSMIIQbTvbkdLktTM6mMbBTlcL62hcaDC3oehJiYKR3SdJ4z4fVgLd0OmLr+NnWWGBDIVJgSNYykcxoQD1QTokPWUBgaIlzJWeYMTZ3AwlAfBsEIKLVg/IxiUiDYMWUCGwtbiUi2Hs4ZmrTeW4rLtpfvn6zVSnTz/+8P7tm/P103h3fvbRzb5Mjowe0zSFzM17Dj3PpVgZc7SyhoigVSXC2nSopdjSeu3DOsRnOtIZ4gpbJTNE5sU8Z/g3uBwl1VsfMUVmOy9t6f3Kn3340Rcv7+cXFtN0Mv/w8OzrP/nis1/8/Ht/5z+1z847RPe272SYm53KQy9LynW1R2S5P82Hp88/efLw3z3svvvk6pNnn3/z5Ue/9oP1fJrosMU7Vx6h4RKmUkBt6T8ayPf4DN0cSGxxd8Oyyk1oYz053AUHe4ii0QJOSTTaWLQaoZ4hpanWmtlH7tLGBmXiIhHi4yICwLZdcg1YOzUi+sb5wo3U6DZ8FWlbceLYa46HcgA+PiptAId0MZ/UOuiseTkkJAe4TpHh+erdP/kH/+B/9L/+kPvfpDt6dPbZX7m1OHe01zr/u4xr8KNWntXSgVnto+PP/+/+1Kdnf/cvf/FHf+93/yMTMpARNQO97w+786pY1qvrq4eHO69TRKe5ekgmRhqEIjEG65RJs/Ow/BlsxWGlZuoQwxOSwkwy0wZqoWRQooWlkH34WIzFvTa2VgYE2cWdOy+r2m1zPLYLOaCE7bgd7zgxTlYNyzdgc4GgSYnhMgaHG60jBXQwenN3mCOZ5BhyI7P41HvfWHfIxCDhqXVty5dEZ2wL5jSVDb/MHKOCBuKpXmO8MjWYzMbHHmB+/uXd7//BZ7vrJ2bm3nYFBRoU0LFvHF/jqtvVofPYvFGHASoGoPtohHeB5UhGAUkfB+5FRWEyLSlp87sfa6PHdoS0Uh5Oq5ddqfP5tJay62jiUDV2aVt8mHnGInOoChMAH1FBpWVUXhxV4/JzIwT1kIjtVYwySrKPJExsYr7xZAAtbHT4Fg4dUsIVpI0rDIzcQhForLDsIUKOTBgZ0V20lCeT1DAZSYocojZgLHicm8RJASVUoqxQGBz0AIXOPFO75MrBtcmxN06iCgqOu3ewzSmMNqWYxbgv5NIIKLdUj3OU/eGhNZv8ZlcZZ0MHcHO4jbVJXNe+383r0mhJRoGRfFycmjvJyHQik2Y1s7uzVkZgOae5Ki2dYWjUItWMBbnH1NDHHTX+R1FSeGc3s8nME5rSzuc2/dUP++Hqh//wX/3t/+lf3V05Lc7n9dZ2f/M3/+YO80++fknkFNl7b4W77oud7uvd9/K6YHc8n63ves/r7z9/7f793/rkq4eHfn8qFefjeWfFi6yr4NoMiE6ymNnW5A1LsE1hLkF5wdoBhDB2ruAwYBFTXtgvyPMYkTLfEwcHoMws5+PDfJgPV/uH46k3lcnAbjA80q2wNVTDPPkCxEuSc9QQPUadGkZKFIZN7piwaRj7jIuDjwnrgApH4LeZw0pIqZOjYlR35fhcDWWFsNxdXT//sz/9yYf//L/5a//DD3tcmWliaDkxvvb1C5y/Kv0d+Cbe/j/58f8GfSqOVm8OfjOtf5796Z+/+YP/5ZP/ybLeW+3rupp2O+e5d9blfOpczefDcu5TrT0C5q2lEJBjmPHJpLA0QDHS6EGEDSgvZUFYDId6I70LyiAKfaTrbT5qBgCeyAiATEHaNLtSDkKutCmiBqNZl+rQ8JgbCiDHykm0khlKkdgSrwY/YDtrlehDv0HncCUxUMpExIaBjP18qgYsNU4MQYweSIkwUZkZY81eSPaQ0JSbxcXQr6aopPiO9BQj8rzm6dyPD8v5vL5btJ7Pv/e3/8ZuqsfzqXW8e+ivH5Z3L1+N9kL4JabA8Gz4Vm/x+H17fFtGffftAgxCZuaFZlZKcfeCcjOLhlJKKVadVkqpZmY7TqkWOt7cXJ3Pxzjd73ZTSMc204vQLj4rNLisRBuLLgFtIDekSDeUJm255OAjmavbhSJysTLchOTBYWXkZkPZPczXe0bd4nne30SQDFOmIgmg0Dg0dcluCaAKNlb0GM6xF+oxMPZTLgvJYMKW0XY5DuEChXQyfcpMWMKGb0sRzoAHALaC7REzz1AZj789UbMRBCghI5XDxgrkuKoTmnf7teW69sN+nmaCnYDJTmu7vrn2edKqcz/X3RS59hTXXmu14srMDEQ4OMxzaMxsck21ZuJ8WshCBnww2QWk+Sh6zoBvTnobaz6HAQFgMGMlvKOhR7A9+Z2PfvqTL04/fnOt3ZrLPHmc1rn0r/Puw4KHL08faIaXJRYzZ8dD5dxygWU+LMevZN+9z+P0Yvr4gw/Wef/m978kzlZbP709WDGQFhUEMgAhinzwBUdRGH4xG4l7hB6NtSoixPH5jek2ZYLcQSEN3OT4Smd9f4eQEEjry4oZu92utdazmyO3sffyk7eJeaA9AEDFuPAGOfNSUQwDLNp4iJdqPq5UYliKbe2/MOIAzbYuRhIxbStkJcYcmYKZrXXPq3Y61sn+7T/8o+9//N3rH/wt8cCpt/U1l6/K+mXvR/G2+HU7/ax//V/w+f/K2fzV/xdRYhd3v/g/+een6fp24d2i09qXUiasoewAyjS9eXv35MlTell6N2eEUlZkIWaE6D6s6jIzww1d2cyC6SzI9CzepcGxIobXWAaU3eBL7grYBismDCiZ2aLNIwlVRAbGcbYFGNqlNHPwFlJhYEBmw0Rye7chTxgzBpoiomuYCz2STyG7FEeJAgPKCA4W1Xt/cC9c1/VxUItIB4tVK7YGUzJLtyH97b233rvTU8wcb0u2jN4yE+elL8vycGrnpa+LljVbDE3zrtj8o5989e71GxlbZiDBnLnnlgX9CM4AgMUGPuJbJf5bXH5/vEBHiQ9OQw3dVglp6EQk16/lTgELh73d5VR4flO//4PvTvPVm1O0xsP+yd1DtLawLKS31k1wGg1zgVtM+0vh5BZ3BZnEtN4hCKYcjKXh3bHZ+WGwTTeydkqzbKg8uPlW9OFGUfsMcoUSDofLPKDMdCVHN2HbajACyKwTR85Gl6gACRPZe6cpIaWlsmz79ffxHeN927jyBDKpEQEpQKkk0mzIeBOw5hBUkxDDYAP53XrHwWgGlEIO2gDp1XwQZ+FYcu1Nu1ILsuVSobbKwSaUak9url5/eZwcvbdS6nk5TaU0k6KPoDEqUwFg0w9alFJIX5cOoBQItOGeSyFjPLVHlfg4/zPfix7NJh/ohGTm0c72dLr66OnLf/EHH9Xb07H368Kb/fnlec5l/3zKUuKbu8NUe+Tci1RXZElc3T+d+pM3H9zdHb948+bnh+/99bJoWU/l6oOXf/Sj7/4nL5rEcxiUyW5Rs5EEA1R5vPEeL3dinPWXcsuBi5BKH1IgxYUGmBDLACG3l/Q+qG/jKaOXUjPQWkw7TDOxMvq3wF7xIgKOR70SKLMy3kEqaMONjMOEdKskuZ1Cugyew5dlsHuMcw67ZoMNZIYyJxOXKr/h9UpE0AIxzxGvvR/u+v2/+Ef/1f/4f17w/LvlzGJf5fkefXZn+IHcT3Zt797km//jq7vz3g6H5/8xn/zm3R//4198U6b9tfBwPi8BoUbiVN3UalhxL6/evHvy5FnLtYAtkiyLZYdyrA0G7meCOUZeRuagnVIMZNKKEsJguW1dhjbQaZyI43LfXOCpNkYUECy28TcBoHNbRpHDBxjU4BdRfcssHO8jIiE1H6MxIzMBKQuNUiUvdsljlyBLUfYYo2JWAAzFCmTitpuxbTa0LkmxeIslBgze1lzXPkiHrT1IjGSEesu191Hcjw+t994UGPAbIJi711jWWL98fWdTzaRjmpSVapuQYtQW6JF8aiUubeZW2SGK5VLTL9/EBskqxtW2uWKMOwUIS5gpt9c8dgCSvni7Lp+9/ujjZ9HX6+vDb/6N33m4u3v7+uVxOR4Oh6v9fHN1XdyOd/d3d28f7u6X1QvN3YtXM9t2X4PntSEd4YlhPp6EqShpZoM8JhKJgXflyCalAZbINNIsh4xkU6owBRQiTQqawQlu09040NQGmR3pW6Gw7J4oaaEBzKSSDSFCF8c32fuhHzBJNQZ1EA56gkiZVtJhQcVlax00GR0czz63NkDA0JxLCdISYSNpQhBQWFp0t9LX81TKbtoZFaFQd1osD+h9Rp04aMR5qLt0pBTqTNZSmBfPVJCKOrlbiTXUVWiKHhCVdDmox4S91g51h0wgzYoiNr8TM0WKhUyaCXaO8/ydjzLK/ecvf+3w5HR/xovD9PQ2saKrzGV5m+vpXJ6VfDhWzevKdX8/9Xtr137uX5Z3zdd4dfKP2rLc7z588eqPf3b/0x9/+L/43bbs8o5uzYUElZVmRG43fGzhb+/rO0i0pBHMzBSGesgGA13S2NoMGNwEMMfIbuYbqdG2eI3xLgzYoS1rnXwq3oHHP+V4KCSGPEoxBssxZGbKYaQRGZLoFGS0wayn2fCFH/lXlwAno9noFdSVGOT6IWeHRrAdxeToicKISq4PvYE+ZZmv4y9+/PWzf/2v/8Z/hjJ3RlP39Gvb7QufKieWJznvaizP69FrAW7Aj5D7Dw+c9uX+3eppuaJn2EwsUbFT6mq3z4fl/nja76ceJ3fPsNMjCsIYxXGAuU4brEkHEAljJ4KyVEo9RDOJm39wYsI5JW1EPUIBhKP1i3nEuEWIQSAEvEBKwDbboAHK0cgkQxysNULkI+Fr7K0pQyhp27gNuWiUjVlvO9RbGanKgGTM2MisayxAR5K5uVD13iX0LO51nudSSrA3W6MrM7P06Fx7rGus67osbTm33nMQk42jFcGoMYrohGGaoFhjNvZcBTtJZWOtbocBgM2hWp2Pzc3GmB0NAiSNuRPf+rLRegz+66OODfBsBgPMwcJCY9IknVXfvLr75ptXgyT+r3//h4Z0einTzfVhf/Cbm8Nc/Xxel3M7Hk91N5txKlYrp7nOdaq1uru0yYlBDBEzK+luOWzWF3IsoNLH+eIpIyWTfBtpA5krVWC+mTxnU9IKjVxl2iZ3w4VmQ1+5FNAyC72rp3GNAPAol2OK0mARkVp9pEHY4zA3jvmFGAwHEMtgckq1S5npJN0HeEgbd8GwIP1WxNvWI7fMUaukRIaZgeipMVju97N7esLI87LY3tc3Pa1/+OEHP/7mrrivrYmWPZks8+TDCDN6RqI6i2ENWile1SMits1jqsJcOZVp7/XU5Sg15VkGqscRsKOEFZhosCaYWLvM1GtHf/q9Zy+/uLcGr8nTMpN2u3ulPIWuP3j25puHXWICzo6Tp7XldP363fzm14774/Swn/D87kX8+RKfvH7ya88fXt3/8X/5Tz799ONpV5czl+OpFiKzytdhBCSRVgj4QLnNLwMQBDiLOyUTM0Azc5iTXQgNBA7SxVkrkRcX/y26pZTthlHNbKllNHHR6Y656tQ4bq/H0wTbdibcjWPkS/PNmVJSG8QwGAkRdCDB4tq26nlR2ZJGhBZ323w7aYSlMsM3hJUjPaBLohU3mdoUbtML2n28y1Kf//Ef/eUHu+vv//XfSJpdfRDlRberiS5n1OdTW+7nD67De5ZeV8ftX75+6YfDVFiN90s3TdFybWupti4PUjWz66urr1+/WeP8/OnV6WGJJifVIwlTOk1CKKSEajAjcyWgMFj2TvIs02imiQiVKACsZAvlQBGsjFxKRihXuj1G+kkSYtzAudI4NHwjF2W7fbpt53VgwC1DjCGzilShZWYYmrKaAzgzq0+ehgDp7qVnttaIOcGU091KRUEPDZJltr6em6Wic5p2u9vrw/VV5VmKzFzaap7z7KE8n9fljsfj6d3b43JcUmlbIpulLoakgyKNzfAmpeJzRFbWzOZIt6qMgfJfVjJboYZA65fx8dtbfQJxKevv+/qxdAobHhyjeceWmYWDRKknIOblRtAkp5lKEQL07FFK6b331t69vX/1ep1q2V/trq72zz949sF3PpHQG9qKbDge4yFXWjNbpsJAeC1T3Qz4GJz20zSX0+nUeieiQFQYWItHO9OLwYd/wJA3N7VSygJEpteiAkmeQsqHAsJh8GJuXsEpRDck8ryusH7ua3HX2ittYcq6mVK9oiC1RWyfRXLkNwCIbYlnXtgzxYSMGw0jV9PESmXHoHUxGWOqw2a+9V72qUHtl3EYg3OTdJBc13W2KRKroioZnNy98BwnlRLMeph6Ro+AMUnO1VuwZ4+VQqmVXhpyac0rixdlOZ/OtKxeWsQ8HVZFAlG9FV/X1qU0LtLVZk+SEYqIyWsQmTm7C0wtmUZeofj04ubzH7691rwj8v6uL+/scI0Dz6fzdz+6+YtfvN7BVximqccxD/mLZ29a3Ot8kF1ffXk6nD8p6wef/9M///wvrj//18dP+/WTTz4M5fpwRxyrX7deiy5RKgSAIhSDgK5I0cHiSmXP2Xt2d2Zvvt0V1jPpueGsw9qfHEwwWsRQMDszc+k51jJCS4HYGQuUvYVE1rKbdDqtkXSvo/csLAjIdwIUGhHXMiYFoyUdDKUNkeWgMdCbE6mtE6+D22DoyOFcA4JsucII+Mj0BTDWwpk+mDihHuOk6++G+7HWY7L8sz/4M7vZf/+3/gqmpzZPGMPCtFcA8+21iP2Z0K4/kfo3L79c759POzUtLc7SYlmm3D/0c86Wi1r2Inx4/eTrt2++ztP11d7b8RyUW88s8JZKsxRMfMDKIIfTPp1yJiXZZQF4XgV6C5LMc6sogKFhoFKZCgGYbNE2fysCmSmokp5aL8Z4GOOvkiaTcfDtt7eZvHAtxv3VNXab9NPI/UDdejSZGb044OQurIm2dhgKw5gymnqYbmu1pzfTPPs8010Rsa7rugacMkbX/fH4+vXbt6/ujg+njGm0yUZz882CdfSvA2S5bFNG4TZorHBjQxasZ4wF4qVMj2KRG5d3K+u/BE5KOQRI4z9z62fNBDGY9NGODOhDNFJYx1UFjB3yJgDsCorD0WNAOBFp5sVqRBhqb7p7c3735uGrL16XUr7zm/tf+/Vf/973v1vLrjf25qcHHR/aucfx/r6dH0zKbF4oWpkOHStUJiQ54CtLZmRMthfYBncU8BH/tjmliF6iEx1mrpD7gM6IhLzI3SlyNSDWLkUqe9veRqEsKZrQUYor4O6td8AhC/RHi21uKkgHkWuYmXPgwDF80kQ+ZDMNFRDHx5pSIIHhQTy6ixxHI4EY0EqGe3FDC8vWnVW27GE1zcyOdqyzZZ+4sph77I93b85xvp72PINYkp1l1zPNd5m5BNy9ZzrKdS0ZGeuplkK4FDSsWFsEGJN1Z08a6VOuxdxRWpyMk1SMHtGEZmar6iSUXmzat/u3eeuH/dP7r/74YE8Wpq/TnLXO+/OTPe64f/Lkq3/8J5/ukQXnnpOe/vTJz7+cXv3Gq9s5pu4nolda7M/744ev/vGb71yX5985HLHwdhefrThOy1RKtjAWXDCORBGWYeY1zMDGwUaKXc4txzNHLXeDmbbLdEC6m+mViMzRSY87zQDEsOwaduWZir5hdoHWfTdP03ToLTLSxykXWUpp2S9e5eS2/Bt1ZUMPRqkiIZiMtEC6hJHnKE0QhebuALltcXwQAii4LJBEDISKLINjA6blFjLlbonMHqfT6Q/+xT9JfH7zMQJ3ylWSubyopRXOzFlUj/sPv//Xfvzjf//hp7+Jnozoa4vMHOYSUESvnDDVu9Opcn727NnXr1+96fHi6VM83I/MkzFjiEhAqWLzIAkaw8Z6wwXFRIzRVRsKNUSQSOvckDJAQsTmhalhLLFRRJkDaYE6k+9JDQDG5qQhynYPj4p5YZnTv13+LitGwOwiSXdjGbOTGw/z7TzVge27W61TLXMIt7NKseJc1+X+ePdwPgWEigI+PJy//OL157949fbNmTDHbLgh18HHxYWSeLk0HmnpWybAY3/9uLC9dOi/yoR5/M/xl/+Df3MsNTZmwaVPuFzzv/Ro3/63Fw6kPf79/39fjxY6g3mZSYgZ+PM/WX74J3948+QPf/23P/6t3/7ux5+8+M73n1xff2CnDH16f+rvHvrDcVmW5Xw6redzrrfm+2Ve17bkugw0KDJXpDsHmcDQmyIVQNbwHMg8NTCNDK3ZUdyNZgbLrdsZHCHlADjTMrOPSB8ax9qx9e7FOtKnGhHJqKwAhLFikQ91v6wVkeyD0Wxl49ZKRYHNQXw7R4eawUld7AdIwMrAZKrV1gNQa+fqNtdJ3puiyNKQiIqx2d5WxhHRWvvggw+urq6W87mijPfcslNiZjEA6G11436/N8tlOaVg8szFinuxtZ1n26exetlWpqZIJNStBUuMHaYBnKUij9IkY3eUSLXY3V6zeLs/P51lsbZXD1qv6+3V9Y7tyfVi0/3b5Xb3vJ3OE0oWPUzorhqB6A+lv/zoGJ+vT47X+5391m9/74g17t9N1wcrfvfqjStr9R4rUm51VBNZFrEaNDYroI/MJKMJMvNBeW25GcNutHJuC30M5AtpBmW93IGBQZ8Y1gXsnswExrw1kH060N1dFoiwQbIwkenbsUEDk7Btb4qp7uKS2sEMbE495lvUEOktx5J0w9N9POdxm21PnTCKGnwZy42WYBoyDd8OD9mgWEV2fflm+vv/7z+8P385VZ/5BKtl5m7vS19S55IHreje9s//u3/6b07f+/XvFMjB1lrP6NlDGYhESP3h4eTzbm1REFe3T97eP/zpn//k5nbqhsx0M0lBREQJrZ7DyCIzH7l6ioRqbKTo0Tpubxf6BVAerVFsC8x+qYxpm/sKYIRbbKw1AGBSCZjBm9rF4m1bijxezfjW1n07XMzGoWCGUr1MPi4Mq2Z3ALJnG3NARCo9WkLxsMZ9yzWhZEnjOfrDsixN4jA1rDYhBTQxi70nXz3+3F8p5dS36vL7tdEvlfhLEefl93is0d8Can7pS9tg8P5Pf+WQuPxzXBgB+tY3h/7psr1/Dz9uv9lYd5cnOUguvffdTGhe7/lHv//Nv/39L+Y5P/nk6sOPnl5/oBcvXtxeP9nV+eltrVbd98tibn3NsqYv57o8FHTLxHltpxBSvXdEDjN+MxvdPVno5tjsAsNAsnQMORBJOEgvNKc/xP3lPZeRkR2R5t5WFYN7zd4AtOzumxnJ9l5RABJJGRBj2B7rOTMOkYxS8jpo/pEjjYDpBcapZ3Jc+sNnfQPd0QuTNBtWaMu5EyhlSnYZyazKWQagIWPYf5t52aJ8oDRwmIV58XVdpzIBgOtw2JtxOadxoinAbaAJOGb1AMOUhkHUyFD2vpqKbxLLYRIFSyNQkCfLbuZK9F6fPD0r+3GdbJ7K7rg05XQiNLN8ePPy3XG+69jn/J3n65tzPfnVMu/3V/KCyXOKYzw8LR/OvDrprvX7ah5dfkuv+/uXD5VD3mVGwtugKglZpnm04WYbhDmoqtYHr4ICzBMELQkwHrt24j1PgTIXhxfaoDDZWLWAcFBl2IOBootQcu1ixDRNVrCuKxJuJXS5OWNIVsZVAgCtDQIAwbEgygAxAiSyW4oJEKF2uaQamRBNNnB3bFfbKpGoW1aTB7mmuqmKmwXj4xonqd1U5/nDQ3kRa/Se8zy8dGIfJUuf5oIzV80vass8a71DrkCe16Vl9AExKl14aKvTY+1ry/vzglKePXt2mA7/7R/9WTdIqlZHcR/J8enjMBwc/y3WlSnUGBtLwgaWPjr0trXb8Vg+TAXAMCQYOBpQhMihBtB6wVvG5zjIj9kveYqSpHykdl3Em0Z/L2gYcgkzU25+n4CNsyemDrj7XOqetEFkMkPUXd1hmsT7u3b3dn04W3jRPNlN76vQzEgbkLa7T9+uqr/y6+PX+1r8y9Ucv1TT+e0yPVoTYOxGxzX8SyV+eN/iIuC4vEvAY4exPf4vlexfenLY4CP8h74ez05sw6IPW/ZYA+hmdr2v7rvW4hc/6T/7yVeBBD7zve+f7erOe66H/X6/30e0SrveTdNUllxhUUqB0jEPVkyxksLa175GV3psNKYhCCY5yJSqlSM7k0NsWXzoTD0jgjY8PDby3yCRjy4+oo3sPYNHhG0uuCMN4v2QVy9vw6DTDhc8KWyTNZhziNDEVCoZFaIRYg53eR/zNY9WXQnWKdY1ou9KKRj0CKZzUIwk9t4ZaVZaa73bsrR5X9WWMVwuIVPC/Li2zNztdlnm4/mc5+7uaciMHNu9gA0/EOvmnKq7xUiEK8WgSluNgEpmAisBRE3rpF8aXJTDbl1XdsRUmluecn0nexrzh7vD7oPPP/viudWj3n7yt37vL/75n370EB+/3R3n582XWFbt6vPji117orRZhza5IBbHVVHa+eXxxVx7X4cFZF7MdslS9JhxKmAza1dPWi0StAEtHEEXgIB52ANodNVIwJFMdA5VikbVH9zowXMe5IPxM/s4gVNBUtWHDKRlM5jRkQNjEy7FfXt6MIq2yagEkLAklYXWaTRVJMmWBLHDqD4wQUA1aBu06YSRbrIYsC0ghtFEBznwDwCD4RPnu9186/JVAW9pva3hdVddPfNheXeou/V8ZL9W+m98+nun9TwmQRgRwGi7U7XO69Jp5s6pIMGHh7taZ+13LGakm2fmkPlWcekqW1xJbmyijT6YvvH0UB9rWWpfpsiWF7f3rWwlkR0YcYHDzKcmoaRQIduiZZmP3pBV/BUMZNQzMgLvYQTJBkF4VXWnjUO3lKGtl9SEqcxzqQohI5liFzLbsb0591dHvWtzpnsJ58rQeqRheNPboBIGo4eXi/fvtwr6Y6esiyYDG4vkVzvrDZ/H2Jn+B+vshsUPMDAf2/lNtj02Ou9/EB/r9bd6f2wEc34bi/kP/rhHIOixZ38EZ0gS7tOWsRVN0XpmzBOnaXbb9ZadWl4Hb3f19knO9k4J36v1d/eZ6D2VKADUOU9d0aP1TR6noBuANYZ4UNpkyVAkmTn0lcDIAYUaEpmYRgBvqhQb5j+SZIPjZ7l9QCcvZbPz3CCyzWfw8VOw95OfjdI/EPxqQ6G3qUXNDG5mFriDTNvxkGMed9A8rE4REUsU4qPnzzxSkXsK1Zr3M6nEFFKHevP9fl3Xp0+elVrpKG5Kh8/WIzN2u/3p/FCt3txeretaze0w9d5H+HtElFqq1b5GKTWiw1HdGY1OJw7TnE7ABTMzZoKyIU0Izl7MJKYKym7KJWAli52RpSdfvrHv1Pq0XN0+ffWP//ijMtUPgNvrz1++e2bzfuGu3UQcGzF3Pnv3nbfHe/+dq/NR9vIc7Y47Ts+u2l1fXt1f3z7JWGgX+u7gHUmlcMrhPMCt285EGC032kyCoqfZePsdEVvXk+R2thoQcIxl5eb2QkUYBlN38Iu3W8gJkBh+YOsC5G43lWLruibD4I9ou39rsubI/cIQUOWF3G4ZhiohQ5NrSCyQW01nIAzD4PaymeHE4aXEGPc+YYwZztHIjFpmIFIhNHMqihsqopvRrZTomW6l7nqUte293p+oV8tbXS/jbVn60FU+DuMe68pSz2vIC6N7yur05buXxQKOEafWqXSoB4VSrZiA90KJUdWGNyQubelj/TCvFp7ZA4OKu5WR9OFm5FuV29jsko9PZNA9METeyu0uHKenXeAOMY2zAclH/dhoSTjrCBhER0EglmEHxOI6P7y9Vyc5+eTB89uH9WGZ72Awc8K9Odehn1HZzT0TkCmJZCCEoGdmfSzuv1Ilt1b9l5ruX0FYtjfo0p6DfA/sPJJhvvUdGqCNJnA5+gY15VLQzfj+CXzrST3+/ttw0K9gNY//8P0ze9zZbo18mIq56dJWD43I6QFRXlbY9XTYR/Rv3rSXanOdrg/1+pjFopqASisOyAZ8QLqZ3ieawSRNJaUyrhAbztqZJAN0mhs2Li1JOlIJuSZFDuR2c6Mwy97hZrNJFrGCkMPMWrfHueXbx9uwfYZwkUyKGg5xZrq0+AlFDCOH7thIlLpM3TliI5Jc3e347u7J9e7J85qt7RxeTcjaRjFKWgloYZ9GFkdG7xlOWh+ZExUdhnZ+tyvl6uoQfenns5mlyoCkAiKHr7WGBHDJHhHFDJEaPrgpVyOmpBHNmIAVFrCfMycaGT0zXHWeIlKyQ5YTllpN90dkb8nl7bL85IzD7oO/8v2vv3oTL4/2yS7VZVT0rL5f6GfsPvAnf+fDh593fvXZHn6/by+eP3/72QMfTv70iRqHg5pyHbnRDhSibR3JaMjYQZK+KZRISHQCSQOEjMWtAAyMe1GESxzdW15gXClsu/f68KwQR9m4FFBOFJbWMtLN3HbujGhxOfP5rTsByBCT2/ocFCWDwS4WFhTZN7b1MJS0SsZg5W9ypY1cZeOqGTsfklAZC4XYNHqP2BCldNS2dCuaa20tsstphgBXHVXKQV1p3Z7s3nx92vF2LHNi3aZeweQFPcvee7gll4hdKZ44ZtyfTyrG4jJ2KAaE7gBkbJtbznZaGuCUiX0DER7RW5Jk1ypKZdiGbOmPm0UTCjBiCEd9Hu9CFYZn/pDnDGmyHuNZxge5jQL0C+qC/GXco6VfKmOSw7dT7p5tIbEvtS399Ppd3Acbr3zXD+sSzKDSyFJodRtERtocjFUyZUYMjvl/r7LbY8/7S8X0l3GY7Y+25bwNY3E+/ul2jQ13nMt/Dm77hhJcXvu3i7W2qKv378xjvR5ixLyM4ReTNdivHDf4pX/+K0gRyciWgjm8jM1HmFuZHe0ms98tC0w+ebWderaXy/L6ND85+O2eEwGtbUnI3LFuq0iqEA4RolLwNNIGRS0yQRg5uk7beIdJmG8eOsh0AYbswgjdAZVwL5lj0I/Z58wOuXp42T1iZZejddToMijLABIx/t/AYhcTtxw8DKI6gHlbINHE8QhKAyx8GhfYnIfbp9dmIpoX7+Z1Q98MQJGZ8pzrLVm89L6WUsZiQ4bew5QwZY/d4crdj/cPFNQxcjkg2ACaeizopZTom3FF9TKeQHT03mVdspTRhmyVYZbqwyV/y6dmpoukySzE6II9vDk9WYt7PZ7Pzz7+cPeR1+9//Nk//tMPbUdz6yYFUeZEJh7sNH3/5sxVgbU1etQPD7Y/fP2TH117zewjwg0AMUNbsGghzWVhGC5gY5liLJ1dkhkVA4WXwxQ96KPvGkvS4dII0QZ9emiRFMigkqYuT4zAlzIUgQMWIrrRd1PtyvNy2u2m3a6srccyBN/bvTSUjgByc7jLpBSpSNIctCFNJoiQw9I0ThQVIQYHDSy4PM52eoGQK5NO2jBJGT83eeGiGOnukatRaztx2vkuelMPc1bKimG1E+YVPY/r3fPrw3de7PvaMqIta4YIgxPJIE44s5eiGuYdXHIt8245nWZeyzw2FSa9uDTyUweJYjPvIcdp1F3zMFgYNJkh2XW3gTENFVdACI2buQFlIzKO7fcWjDQP28LoGLriC6v4MTVXF8O8DQDZPDTxiCps1ZFbWlY2Sel0GI227jzvlnzX9K5NHe5TVy7ZsMiJajSHsGI7bRitjtu86wTAWGCM2Ex3Lp/dBpV8G9l4rIyXyv4ojrMx+1+Yv/yV4m5mpEWuGGfKt4q7WzW2x2b/ESDMzFLq9obke66kpAoP5Wa9ictCG++F+L/y9StN/XifL00Ns1vi4trJiOwmZzHLau699zWOtdZp59BNe9eOb16WQ9l9cOv7omxgDu6yEKE+Dk5zM4OhZmZq62w2I4OEcoUKrAzDHBYHEBHN02k23tSLedEwuqSbArXW1pobM3upRXHW47LdvnWwjRo9sCtpeJYAiawYmOHYpF1m1IAMfhEnpggUJ419pVG9TV5u9tfRtPcJa+xmekef7ThzTt8bPcTTGTut63L14ub29unp3Td1EqRSSjT0db15+rzM8/3x2FQA1qmiL9GXsXWICFoWt0SfuCt4fPVBKTLW3g77HdSzG1BBwVaaos9ma1fSqxeS7NAgk93XEy1KzHevFr1b/Nbypn3/f/ZrKO088f4nv/jd6cmiMsMtg22XXFT2/fTm5ubTGbelvXvd2221648/aB1f/uzzH+x3QppNQK69uz96yXgBAIX1IT0grQRCuVYUwBUsSKmDw9tppnkopDYsZjDo1WaZJ9hQNguw4cYOgHBnOLvUMHKtWQCTtwhSMiSEOMmqHeabU56iA3AgUyucZqU3mYYG1gFz5AjiGL0Ic3B13d0DkanJd613uAMmpNTHUWYYhCADnSpkQjkKGuWFFUDGytJYkBmxiNOUGQyLk6bdvpQc45jO2UuteWPoK/N4Xt+e6m62FWtrjahWLNZEZAGbqaxzckRgj0lTS8S5WUwypBPBtEJSHcnJc5FX632I3C1jODKYsNLMaZK8lpDRS8KthXEcBqQguEGBXs3HjD+KoZGGVPQmlFJKRe89BUMdtlw2TH7fz04gWWtdllOZdr13Y3FyUA7IS8IY4ZVW3J2RbVkW/vCcrSVsKlWm07pKqlZRfTxySJKDOfLmidiWtCxbK4cRDvdt7vmwlBt/GoQn5D62eZRkHBlMBEBTZnr1zG7uk9fcogqRmcP9sbV2mNytrj2U3O130deMVmu0jlLL5XgbuXTeex+zoIbVriSpZ/NCdBnhZCjnYpwqgDbcaWEDUI6ugIKEcc6hLhn+7LGRZ+BSSElqRNUoZWYGDwiR7o7MqZQglTqfVnhUnw52Fad8+Mk739nhdre/PtyXjkgL0BFmCWWn0/qAVLWOY9JkRo/oXmZF9t7dfavDQhEOI4hOSrFLAArktBiG8mCsDdkjQLcIlSRpww4oWngtg+NlcHLcw8TIh5FSRteQnI/R0MEe6QLpQ0Ec6BsLRQFJ1dGUQDU8vT6sdy81F4mLm5V+7VM5qU7ohjPYsid7cbPw8/kenlo5eelYVpznq73Xcj6fl2X4saj1daLZVLoylLUaZZkotLRjabVAxTIKdu5LtzVsz8N5pO6M7FfuTEakxXyuS0fpZ06auqNaaXtTp7X2dhe6nx7evrv9qMa5LFd3V7W++lltX77tv3Z1c1ruq0pMOB8lvb1+WPPl96dfTzwETjqt7XtPpx988NNfHPnZ3dWv/cDvz+e6kHSV0ZwlQPayic7HGm3bhLiZpWjDJWeDbIaqWMDpwogcTU0DxuVSBhdvW1GCG43MBn2lgKY0QYZOKhMDdid92Hz3SC1l3u2btWU5SSqlSIxQnWzolbVhMJdmjcohnhmxHTE6TIgBN1J4xBmGKlomdORI+ki7wBsEeybUaTnAmOxGkiUze4XRayjReyklC2JtPpWEllidKu7HFpnY1am3WNe1txMwkU6MNWZzQ3Y56kTvvdtkra1mmg/71tqQ9WIb8JXMzX/3MVqcY7W9efOOe2P8SjOT+c6BwQ8mk8NW1zRFtmFz+/5x0hJWt7FaZaqQKZNALWVL4r4U9/fWuNNVSqUUmElaI6dahnO0uacLbkWIt8v6zX27P9NKKSUTwyms1kryV9BzbvJ4Pb6Wb2Md+Na+8XGDOl58MUux+DS6S/OtiS40MDPD3ZMc/JNSfZomtJh2u967EHSbJs/Mfa1WTMJsJQNSlFqmOq/rOu0PksjtiY0nIsmo3pJka83MW2tzcXfPoghlpo1eWDB4db+qXFtLJ9wEi9azdYrNh8TeLq3/6NxT79+AjY1zOWA285NtWttmEUuhrdGpyctutw+1u9cPb1+/O3x0Ww67XrL17qkJzIyRSAs3mAuiWUQgwslgGDla8zYuHtAwonsoGSVs3h0UwjbeBB5Nkof5s5gRSMLNedmum1v2NLt8ptvumbZRvR6BeEgyg4MxaMsAB2q34WKBoNMz+hJt3u/OD1yi11Jd8ASjm7klLEUkWvpUl/uTme33V8f78zzX5XhGjat5d3V1fTqd1qXvp6n3Po7Y3ju9bG5adCgSQmYWj56dqrUqoogTzEMrkDQnLWUZwz8ErjXqnhMju1MJrjEZsTN7fVKtz87lbp83z54fl8OUrsXrk+/88J/+q+uy34U3ZeTprIcrf8jp5hfTL+an6+udPdWz+29e+67Xv/3BerN//V//u9u0jGO3FZjcKa7oe0kgE1k2ptdQMaVG+06zFplDIjkgmU2s3S84jHPT/2tL1fbBKdDlonz8dRreXOPvMMBHDZQ1AkoXDWxC9g42L8UE6y0x0gaYmd1sSkgXn9nLLWHGsv0Nyd3NLTPXnlY2Jpreo/yjUhZgvNLNA4+kYGYOLmCDmL1mzrS4eEZCQHQlwop7tQicY60odAa0Nz/dr6Xunt3enE7f9HVJdbNprIQzuiRiIsMoQxo7EyGg7MYmQlJCdlFvPaYYPj5zDt4hEtr4JJKc5k5YEQxuxSDFYHOiGZCZXUsAGMT+7fB1uZdE2aQCICw33zHKy7QVm0wCfoFFVvTKAlm05FTM6W6KFsWK2WzT8u709otX69tlj/nKDiesBHwEilw4NhusHCOLeQNMxm279j6YMrz05uP7I50R2CKPR4ebmYhSqz9W9kIbPoWkzDBNVdI0TeMnSvJpAjBNKsVSw3Igxts+Ck4t3nuneQ95mVItlW6eF8PRMfcoebjet3XdH663SxBc1zWKEJmZkEWEMhWNZO/jo7Tx4ZrTbSplOmbLzNEUu3w7d6HCoZp+f6oDyBQv/HG7ZOxt/8laJpO09o7otZbDtBfQPnsb870/u55u991zVZS0EqKVTbYiZsJY6jwcr3LrLdyKObwUmpmdCkpiEtEzwicON0RXVUQYNnbEeB9oJnSYDGbuw0Z6fO59eLDw8bQG6WOfazboNKPnzGFCPsq5Lk0ABq0HOTSv5hjWQ3Qb9De5hbMbYCnKaaSdMyPCy9QjrZYc2ZnVG+Nq/xSpaH242Y4rbRz8JDdn0kE5hmKASQTJ3X5SEsxQntel1zWRhZMlDC4NYzSUDHHkEWSXUr17u31xdbrnreXrNy+f/L0f2M3NdD6ddvdX1y9e/fzdy3/5J3/vb/8VRDtbMSsoRZkP3Odc/G0vpZ6O30y/d3X9O79dnl/dveov/82f/42PPxVC5kilgjEMei0pwUtSwjhAL3zhRwQDG1yOC7cXY8F+0bVoM9IfC7+ELpSYb4GhljUUg49BaizwkRKTuIgeDWQBKeG8nGbU3XzonufzStCtDLvR8cjmNuwQ+uCj2FgVdEtxcAOQyq4s5EiPu1hpjc7rW1VmvFxc9JkaV2e6tqxTAarO6DEE95loLWr1uiuxNPYueI+wyc5LK/MVlVL03gerWwiCirys1jKzp6wUW9pq5RBWuzKJUaEQ2zmUmRtV5bGLu6iwFPoWYzVINwlEaw3VNbY4l/unZ1yK4+iCbZNaQDkiCUeUArDhK5m2VRVdnsB2zxdmZ4NV86l0pNaYGUXXPq3vzl9+9mV7s+5sd1OvG+LBmiUkuZm7jyo2XsWWtuyuoWxSDpBXEdx8GrdN4wUrt1qLu+fmfKBCDmBNGaTmUkiWUohiZuP1Fp++PQ2kskw+GNmkmNb6Mlz9JFGotR6PZ5Ln8zt6Ab1uaEmu6zoec6s+xPF4JJnnUyml0MawUa16GQPEqFCbgdoSOWk4j2Shqfja2sPyME0lXLXUcUEO+mxrLTfWsIZGlhxQpHjJSt8+pkuJr2aDZjRNNTP7srazivtuvkZq+fyuvbzfffzUb/eLtYVRELX60GiPxIWuZtWHzGK8THNnqeP93xFu48BJ+fDRBjNNeSHFKrsEmJmbtd5oDiCUgoaL2QDZBNCYkU7LHDlWGqq3xw+rX3Y849obNI+x6dk4lmboaWWbXJ3mFIWSKJABTrpQEgahSUDvyeKZvbfkjmn95vpKyYeHB7NCU1+blY0k7m7AiAra5rVxhG6TYFoxA9ChlTxnVk5NfZwBIOkKdQnFvfWcpgobvQjOcXr+vWff/KLfvXu7++3nt7/3az/74u6zf/iP/s7/9j+/P8Y/+7/8/b/+4YuMfuqnqTyJMi969zz2u6YpcHUufMh8ap+Uw5kP14ebf/7/+Jf7nn47nc6dmifGij7lIdEwtPmZBehbTMfF5U7KiHQzsLjK8EragJBHcz0k2C5iHxuNyaW2g+9ZLgBWg43rgnRTks2sBxyqxGWZjvGDOsm2ZnGrZcoJfTkji9sUGRxuptx8+ww+bPsBVBY4lIoU6W6F6o/bP1zoLxeGNC5+JMIlN5sWSiIrAHMpG6gR6AR0SfQCRV8bqTqXqVTGEoMw6Die+9XtB1YzHs5L6xkljZltS3ZLS51pFC3TmJbIYzs1rMV3CsJoxGbwuM0lY588GpgkNrk2favng57tgEas3ZbWdrFoGFFbw/VwA1gojai6lFQeI1AMAGIIiIu1Fn4Z+bcZYuwYa2ULN5WCs5ap1KnUdjq//ukX6ykm313fXLcWy3pmRTEOuAjACB7Tf0/ob+YbOgcMj63L96063X3jOMtGXjmlVDJFdzOWWqUyzeVyTqf7FK179d575ELydF5Htvh+v691mWdb13NrnW60jL6WarnWUrjf7RtKa+3q6gnJ1nOqGquH4vXx3chMwBKRmbV4RF9i/H6+e7sMCWgphWSdp20GLus8TUVsS++9G7krU5mqI3rPUFcb7qa+2+2muuuxjncpY1gUQMzM3NXpVxCt8WtrK4vT1GM1s/1hpiwiHpbV3ev+mtGXn73W/t30/Prq5pA12rI4i08Oyoxea8+E4M5R4yCL7Cl3dxNAJNktRkarbUythGNokYDghsWmtljuER/ABG3smEghNuM2o8HMcDH2+TY/lUACW7j9NuKDUBoFWpCB5PA18lJp3ttUbcSpX94ZDTRije6F50wpd4dZEovPLLvd4f7VORMwCjk6m4iAW0Rw6FovEXB81MyPG3Dz/JCZwaxqpJAyjemDxwdE5OTeaMpWsQA3x1Bb7PnN/q/sHc+nHxQruy//7Z9OP/M//T//m5988fVv1g9ffPeDY4v91aHeLS8nTYb6YPTpo4cXuj/bq/P60fn+ePrkxff+9J//2Rf/9t//Z7/zu6flwaOIKvRFIY1yOiI6VB6P37EFHfXd3YnhtOIXLlpuRZxFaAClLdsdoAZEMzSt7ztiYPw9jr+ZgEYyguB0bt/GY3ty+Wxk53OfppyLU76cu5sZFRI1CJqDPQIRtkWq06xAcilHjiPs8tLy8sAcz3c7jAXpUSeVAxdhOk2ZjUzRlabh1Gwk6GImsyssi1VZ8xw4Ex9Ox2e3T/ZX5fXb7L2LvOhagrbJP2NENUrKTserb96uaVXKHpn52ONs48U4gzaf/G9JQphD4yWjmcwxhFFMUAGhuknqvUc0Kaw4Nx+wy+MMGbFieISMc+KSQaipDEGmSNjjTUZmx26a13ZKxdX1FMf85kdfra9PEw6H+UroD8t9KWV3PTOYmWExbpjHyo4LJ+TyG1xO2hywUall9MKjj3hEVHrvXVFr3e+mx6JGmbvTjdSyLNM0DdZjjyUytGF02tVqZofDXoFo2ZbuVnJN86nQ5mnutgIIdRZWr3RrrVXTPM0RsfY27eYN4xbMrIAholrPmOaamcOqfr5hDFiGyNTxuGoDm1v3s9dSSqn7aizK9AgGqrvInsrM1lOpVM5zHe25xAiLroHH9/4YaIXHDcqAlRjhY3ZJretqYK119jkzl2Uppeyvrtf1vPzijaZj/c7NzW7XnR192ETl0ot5IkUoxpZFTqYpOYAJkkobknNZgpEqzG0INN8KSGbmXH0cgWND9LgmgY3B8bLF2QQH71cpGzd08OM3LAGyIUdX5rY7C4Z7Scjdv/jys9uJHNIWyzDImWY0pTGoVS2zT6Uo2u3N9WfSrk5TLceHpUUvtfS+jmlvVHbSjZtxgghlcFtwZTMYTbAh2SuQBdSSaHzUx8sjs8jFIjWzujLmVGFZ7o5zXnesH/7uCzS+w9vzUZ/94Z//3R/8Rn+7/NWPnrx4fvW2feO5P6751BKMw11O9sL69Mmr/Wut737x7vavXPPD2z/853/4r/5v/+hv/bWP9vu53/X9lEctvU2kmh0LdwAhI7OMQX1YgA343LfEjC6K7DngF26WeNDWGvKCCI+L7NH1esCFjz1GogCd1qkAjKjA8Fvu3FDxCwdRSSA08qzb2lBsqrVmZlvPtRTbiOuPMLSMNJRBEUFuD+VCZpq/Jz4D0BYPCxtnjN6XeFIxorMuTcTwhTR5DOWEk9z2wwYi0Dvq7KgFJ6l13U5taYfD1M4PpPe+juGGZCYTHWSRBzqGy3cmZMe7xabbWM7IPuwaSbqhZ9KIBJnG4OgX6Gaj7w7RIuVwKMFMNQp2QV8u72o3Jkfe8YYLXRYrKADQ04ziJlf0DZbRxl/OEd72XoeyA5t6Oeys4+4vX62vT8X35fqppS/ryZhXu1lSW5su8sMcssdLjX68hwevKTMzRVOtPrpdd/cRINBDTC+lFF9bmycrZXJ3YHBp3d1rnYE8LefM7L1BmZlu5fr2OravNId7zcDxmArLBO0qMRIe6dVhZdpNvYVPdTyBiCi73O/3aK0ra2v0QrIt51prtt77/bK0w3w43T+4z3ViXxfb4lujXO6IgT6JYMyZGa231kZpm0qtdabPGhEr0TLHYWaZzOybBwvpPjaOBuDhtOBbX7rw7oe3tlK9dZpKKWbWhoMUuRuBXEuw1t00q7f7v/xm/+y6Pt3XfTnH4rWUWimMBIwWURRO0+ClghN90I+TCA1/M6TLHufLLXeQo5EbQiVucQAJjGQjbcJ1hWFk8Y62HcO6wN0zMYwQMgd5DOOKzQ2nH//IpKQxE2WqX3711fX3P4ZZQ99rqqKlFWdJK4lqYnRlH+R8s3T3/X5/vH84L+kGKcxhhohGevWyrqu7BSijFEMNb4ahZh8Q5X43jUEkE+dlSY6sxIJMQzeK1kcQDVA1MCLOp9fHnaS+vs66s/sXzz/9V//lv3luxcpy+HjKh3zVX976TJYE385v768fbu8m5I5+PgHX1deftZ/+X//9co4fffXZd/4H3/no9np5+TD5RIYDS8kqqmR0QePTUWlrlFJYCoeaKvPSGrtZJBI0pFEuCzKUQ5FYHuczKcDYzCKBsdAb4xpJZW5nOAtEDMHq8AQbOyJVSYYGJlTcZ6l5SWSuK3a73W43CffZk14c9bJelxDagC4NM9gco6Abh2/CeEK+wXaD9kg+XlvbCQHQBPLi+J0iDBTphew9uJH95FvKFpk8nU6HmzoFeqzmLulQJjdJXHuLXOCFtC2BiM2NrlBSWYrbw2kxpBdF68VdtNbaqC8Xm7C40JgAiYwBStZSRoqOj3k3Qx0oiNCQ/udGhuIWd5xFm+vehQAuSLLCiA6NjaVH6wCqeyqG5pjEAL1H931/pVvs1q/Pr37x2qK6HUzM07l7zNNEljUFCT6G17gQ5PEIXI5TdtzMw4l7nuc6DXCmA8oWCVDmhaXM7g7k1WFXSgHUWssYhnG5rGdfloio8yTlp59++u7t2zGWrIsi5D4TqNPsNvuumnnUuL6+nuf57u7u6uY6IqZpysxipFvv+ez58z/5kz959uGT9Xx+9vzm3ev72czrlJlXV1f39/e311fH4zH1nQ8+ePGXP/7pf/I3f+3nP/vZejqW2gzZuhVPIiMWQs51LLQxUZEz4DQRa+9rb/frmawUanWnplJKcaMiwri7sKdMUly2ije3V73lgOYfwXFcCEU+AjvJniACZF0X1pJzkViS0Xog3PnB1fOHh4d393eHj24PN3ur83ldBJuqjY3xgLClEcBRmqd61JBprAs3Edm5NXc388tJIw7hSfZihktWjxnNrfe+/QXYKN/Y6BDbAsy9AiHRoMs/3CZIcFOK0g2wCqrHoPvaXLuS6IWSK4mA+uj0DR1qyoioPp/PR5pub67WdV3X7ipmPTNKsYiuiFI9oxEZIV38DeWXhJrU5IZME2qtESGb0stKtrpEDwddZsOuCkVSevdzTFbEcPd8e+8tVerVCfW7Tz7/6f2X/80f/d3/6K9qwdv1fGBecY9zWbzXZbr7EF89eXNz9+xKrbdynsFounny7O3+9embv/G3/mY+e1jf3D8tT44ggkTpleW8mEyOoVwws8LCrs7RT01msmyW6cW6kr4tjQevvEZYrTtkS3VlDjOfxLDwlphAykSZByn3tO59yB8TI2M4DXKhjXM/VdiHcYeZ59BfENEJm0J4OK37/f76+vrd28/piGgpuNeh3hw6WjePEdFoQNIy3WroaByXuw09hfrqJZlGjIDdASVd+qCcSAQky4GwBUKSzTUiEALZs9ugdiukWE/YT0+9tR3wWcu/8+Q60r1rbQ/uV60HU4ASxeirJaKbOZOOhTh3Z5oT2ZTI3FY6kLt3ddq0gdQKL3SQiWKT0AIjBS4Hv3yca4PBKfllQypmGtSz8ZLy7hehbi0F6qVaBDI4TdU8MgV5Ilyaas3WE0mWUmzasby1rz/7+uH1ctjdlKmcTg8IzXWHeQay9w7B4T3RQ+SY4HEZ4S7UDrMM1skdmOaBDEbGtgGu0xTZ5nkq5pKmUluLZDst7dxWCYVmLEYa55v9ZGb3x2Mt8+tX92L1Otdpsumw2+3cvUy+rucXLz5IrS+/+WpXLPLdl1/fObicvzmeHqbi9/f3QZPUWs9Ms/LySyjwU7KUIoUQ7p4ZtU6/iNzNB818+erPAZ7Wqe7P109u1t6LT4g+TdO7u7epQ2ZG5P39MVqfc0ktNllXUDbXOskp0K336JmReWrrMKKaplngPJe2nEpxCWvPFFuk1KZCVota+gCqIADREkwbOc/mg2RAqFlhoGQfKrPpUCPzuJxftn7Y75/y+vTl+e1Xb64+ujq82J3Zelvqbr+mYG4SoYJscVf6jpvjnrbobSEzDzMpSJ30DoCMTNJ6iqRisz3AZf2bTA7bv4sCTkQgyU56yyUpg4UMNoy+R1tZGIMK0WEkMyKjsK5sQZjPgFK0Inr2lrtMLYKFuC9TXxA5HE/LPN8+tJ8Qi8ZRYYUbUDrB0JrMzaay8biSSjnKII045PKlK9gsS2ej73c988FKWge7yogyUHaTjM5ezFtG9KmWk6xPX/+bn334935TH8f9X9z9s//iH/ztH/y1HQ/v8HYyhEoNO9Ve8vDm+Rc//PTPDnfl+vzJvdV3L77xd8eDfacRu5v19qPvlEX8nKj74/xg7UZx1af7qVvBoZW3kc20I7gmi6mOeXxQaMxoHj0XqV5w3uG0wsFYApUEt0JBcnxfrfVSjCT6UP/Pks7qRjGMKO4pLkKTDNxRq6lcOJFdimBJFleRNDKXBlzT+hGs10+eHo9HSaVUYOMDZ2ZhGfk42zbYRgsesikuJpwMY7KoeAzDdF2qjV+ImwbEqOkXqer2xxlpMeBzgC6kYCRb0iInX8pcjaUtZb+/gqK1iNYBd7MuDafj7d4QSqAJSTv33kOWkqfD4rJNGtilCY40Go30MprhHgnkZpQNkLANlB8718HYBB+RziEIBi+vcJApN6WiKPPc+a43Rt8sG1NxZUnftVjr9YQwR9uzfvXTt19/9vJ6vrq9fpItj8d7Mxyu986ytJGHOwx8LmjBRej77QWMu5dSbIdaq/qGUazrCuDq6grp7pzmayAiW2vt7niU5FHBLGKZyjRN4+HcysvTcjhczzefTPP1zc2Tedp39NPpYX+Dd2/eri2Wt0u05cuf/+h8vC/FH04ZEZbvu90BKWzkyyQAtwhtVr0n64N7rr6ag4iIIE8A4Ubixz/8yeGwo2O/34OqXqd5LmUqZZqnfZn3Hz//5MmTZ7/4+Rfn8/F0vi8CsvflNJUE1hR8+AqYImJt6i3W3gYKfHN7lRm9LaXK3fcZS/eIiBTcapkr0Hvv65l1vmggQjk6pLDhAOsYeXwZCIZ5vTncnJd7NZ3zXHecyvz2m7u7u/tPfvBR7Kkevp7rbu7IhAylxAzLbe7deGUbTaH1PoihTgJpsLGeGSMEsFmwDQYUyUjfnAYf50emAy200T5lhF+sPTmTKTZmp5EwuSItGYPXZiwsMJhXM5WRWLvZq7jTSX/UX7u7efRY7+8ebl8cIk9eMBYiRmUI4Gb2oAECDZbOOCXHy8iTGhxSTtUrbU0tBa9iydYpN0BKmUjBpVQB6MjkCcuult3i64+Xn/zi36nUH/3hD3/v09++fb5/G+8mZ5z7tNs1Ng9rJT9/urzbtSdvZ8rO8+nlzTcf3l+1qFp7k6utb69f7RhT+/j26A9zz+ltDV9L9wguzyat58rw89StDJys0AYxrZRiVspkbYCG+pYqmuJjLqJtpp3cnHSV1ZRyaagtMnsnUUgErBHdWMQpYlB6WTfvdacpOJRQFIzoyhxUuXHGtyYp9173u6t1XVsLM6vmEoTIjbq38YgFhSmZe5SWSYbMtpxsWDeUTLOLp6A0vG64Ef7GC/ULqgRQntKlB+nKsYglYDUSfc0spbDauT08eTYJ56HckYbuNTW4ByEBhawsDQmzY+8JlU1ilRwJpNhssEimw4ip+FydVO99ATIyRlB7kht/f8NeOZjYwzpnK2BEDpAsfTPAHm6uJLmrT6SHXQkY1zE5ij0tIsocc+zS8uam94f257//ed7PH33wyen+dHo4z3O93h8CEYyMVG4kxRxQkoaTwcZTykxzlOKllEKQCDb1oZiVu0/TVKqZo17HelrPZ1/PF0cE1FpsmgmgZYBeykzujPNuOnzyG5/QosdDWx8++NS//PKnn3/+RWbGn5+XtYvWWwYQGtdS7KvMWOdiVjCWsWStNf29kmBccL7px8KskBwirLHXzczZ6vl8BnS129+fjiTbeem9O9iVu/0kxTzP7ry9un14+vQU7ebZzdPyIoN9yb5GX853d3fKNZHsIbTidthbrz3autsfjsfj67d3mbnf7/e1QFm8NK3TtCN5Pp/XdS119nk2K8Pxv7U2aGOTlwCG0yO6OvtgLanR1EspT66erf3UIpdGBJ5fXyPy83/7xdV3bj/4zmH/bHf3cIJsN+3XdXWvQxuxXUwD6hwGRcOzYqy+x1I1AYS2UNuLq+iItocYthXw7XAdxT9NRmyBCoKgLTkvQ6mkK9UuMZlhVkeaRyAycV76qa1XMyjQLwqA1DA5BNilnpgnnNaH5x98vN9frafmMq8aMpqxIFbSWEgZUiYkE7k9040hQm9jKZJ1nlA8QVben1vvaXBuZkFJH/4fvZYSSqq6ApGkHeymvTvdvXn9e7/+N26fXKu1ibZmzNezrT3I7jnHmVwzuyVoWvex2DLrBdMnW5CeWL949vkz7D5583Tx6jGbmiumU1lKfvH0pcfrJ+fbuU1QKcO49TI7KzOLFy8G06CRjfS0x4UYHleUG4Ny0CKzWsnsotwsIY0kVZ8AI9KQZpnatDNJFTNqOzyMRQjaxsgEMbKWsNECTYnj8Xxzc2VmvR8ze/ECirAO8GKUc/lKUIOJOJK5LLZdKzcTIoGyYbGLzedvw4WHw+AwB1QYmTbSBjQUVdIWrWlQpvVm2plgp6Vd314tZ0jR+yrNG8VzbNuR1kfvjtFK9zYSBxgKG3vay5eBNKhYMa+V+wICKwRhTQ1nAHxLSbD5yJiPberWfvDCdAGApJEmCuZyB4nJcqp2dTCEn9Hodcm1ZC4zsfKjZ1PJ81/+2esf//D+2dWL6Wm+e3m320372+uBvw148byuxt1Q4siGzR8IF/po1cE0w1xKKRvl3CSa3DiVycxqraJaa+/uGlLVrFYvZRpSe7OyGItPhWWadtdPbqfJerRs6+n407dv3z48PLQWf/Av/5V7fRTTuru73VxfSTF5cacQXTPcMnO/G9JTbqtIGslpmtw9oo0FQ0QUotYqoe6veu9mgyQnNM1X10MEcBPhVkf1j74uy5IZx9P98XiOaG/f3v3iF78wK3XnVzeH/fVht9tf3T7d2+2Hv/bx+dTv7u6ODw/tvGSsa19NbSqGXJ9d71oI8rXHm7cPpF9dXZX9Ps5nQ1zPUxRfU11Bx86Yssnr0tXWaBnI9GIm2lhH0gajfKxVju1cpvmw209Tf3h4OL5t+/n6ux9+/Ob1V5/f3x8+vT68mHrv6lFoNkX0wcLlY9tgYG4klkG8Gzcqh6OE0S+X5rY5lywzSiWAFCWFqBi6X5ZNRGnJsTTdBMkrkqZiZn3M6ZlQFkNL24S9XNZ2Xtt1nQAMkb0NpsQItR100mwSvVSSEa1MRe2CC2FzKQQ5VlwgTDaioJIcndHwMqrcXmRxuODJSs+lbWQEcXBIxzYZ4CpGYC9elavMWE11xv6wu/n4+0Sezm+rVyT3cKUeuOxVm1VkeFptOcmTcxRN5yfgPoebFSDTWh+gyWCtVHCd+pqa39wcf/bBX379/PNytt/5+qNy/6H6k+I2SYMm7GADeuQpErXOZsgtm2NEV4/WOC+dvI3jcZzMioGsoVEGWikFYGZ4NVbKlAFrVh4jHQqZYjjcVAJOwbQC8zAhIkXPzO7FqRDr6bRMU7m62p/P69p7dXIk+hK8BBJxiBu+FYXGy2IVGOqfzT+LNKIOQANIsEsJbWoRMIkkUqDUB9Rh0qB1jY2lww2MUAebytXt9drasp5arO67oKn3yD46GB82ZsNSVuprwEpaWm43Cc3GMzUD4AbN1N6sOhJJMIU1R7I3MWSt3zoQksPVF3mx3Blkfje8z9mjSLgXIOWBGk1nJGUj46DSmXl6+qTm3cOf/fGbrz9vHzx5nnZ+95DX1zuaxC5m9ojY1AMZTRpBXXYROYDwwVV3L2YQYsBoNBjLNE0jJ9rM1nVZlqXMXjWTaQQL6JBPsF3adHv7Yn+o7og8t/XuzTfv7u7ulmVZ7zMizOju+93NNE273a73Pu23e9KtKpPwt+/eHQ6HnivhmSn0YTsTaBExS5m5LuRAXnCxuShIVSUxCDzjoqD5tLkSrr1b9VRYNTMrPs/7HYCneDa4OhGxnM7sdl5Or45v86uXYtbdfDhcf/ji47qfd4f55vZFW/L8cFxOx2gLIPIEqTKMbbf3m+vrtcfLty953O2neTfthidfrWNT29CtZaDQiLlMa2TvGVJEupchv0qNtReZKdKl1lot/OiD58u5392fv3l7f3V1fajlmx+/zoebj379+TI/nM4rY3b7FR9NAmlk9tjmxtHbPo7x8cjBt8d/QY7s94HSkIHE1i/GqLCAD4MDCMoMlOIAqlnfMEuMDiwDw3HefQpycLOUiugjXGQQYMY5AmSPUykfDpqm0Mlaas2MbeoYT3V48A2RuACZabSBGlkIABeXpWZwpmqPklioVRaWxY3Bi7VyWDhRk0W+JNVKNzeRbT0aImKWRZkZakxm1qpaMWVp+3V/dr+rs2suYR3lcNKTdy9qn7wlaKnyMDURBTNgjiXSpnX31ZO3P/z05+f5y9VxOqw//+Dtm6v1ozf3RZ4aSgkQMG2UELZspUxu1lobLfswtxhFUxKZSdpgsVODZTIkxSQViYyLrABEaltjVpoG6Q5YoQHOwjRO3UyMKJFhpuEEC0tkFLNorSPrPO/3+9Pp1KOVatzEn9v1JwwXePMczDkICFIDAjYS67Yz2U7i0Ti8p3hqU73mdn0QHATC4Wr1KJFIL0XCkuLpBHJ/uK6w+zHHJJHqeZF3AnAijCOgJnr0TJRdbKQlfuvO2SaJAZ6YD2POtM0MmBezwW3shREj4ejSY4wXPdqHbXTwC7OJZkanzHxGtehOTGUKqrVFSNr00ZPdj/7kmx/+27fPrz96/qEfjw/I/dVcS+0wy4yIlOh0ASPP3gZzLgcaIzM3x1yGS/BoHCmpWKm1bhvjlDJab5FZSileJ8sUrewjjT5fP729ur69eXJ7evfN/cM3X71+ef/wti0tOjPMUaadl2kyw/X1IS6BwPv9FZcUYoC4JGEx3xhL34cyVxhyeRMRUjQgED02Leu3zW2GPiCwQJaZpZS1dR9cqJIRUUrZ76/WZS0+Oc1U0gugtY3wXrjbPM+73c7s7HbzcL+sg6ixno/nh7/85s/rrk7TdLi+mvf7w+H66c3TtvZ1XU/3+7WdPTvzOFdGPxv16fNDbzqe79/ey8putzsUM882yXoFY2iEkubeufqQAbdkaoSqa+xcMiLo89LOUynRy1lrne3JvD+e4u4u5mn59INn714f/+KrH33/r3769IMn9+3ovehxrgUuCQW4TI80s1Biow0i471AQVJmkKRbat3akPFctseDWyp9MwFRDoForEGU3ltA6QQIsRisd5kz0TLNAkDbTDfTttwnpobZyWAhovfVWJb1dHVdD4fD0parOkfvoNx9NG1g2tg4hUaHOu7EQt/yxMlQdmqujtnCEGJmV9bLlmmYBA2x/7Do8cLwwkWnne2KdkUzENM038dK1QJ68WSuGZW5YoatRnzw8Ow8P13LYrk+fbeX3ZwK6IEE0dap78/T7fG6s1vmpMPbw/HnH/749e2X3qaMfcHbu7m9m1WiF9iJcssCmJLGKtLovZ9GCq772LiO7mXbICeHrbdtjDuaO3uGCcPopyuCJjcjoUUWRJFmwqBwNiBAOgrpWzYECRVn0xBbwgAro3YNKxX3iM51nXa73W53PudFlX1hxRJMA2CyRqqUodUi6aIiiqk9BnGNFloJdjDYJ24QYUpb3xFQxYhzH8zfoW3NkExzi+YS3e/enatd7eaZnS36oKxFbm+LaJa2MDhySoAl+kli9Qtvx8YOWRhWCQCgYoFIujsptqYaFsngex70400msgxMZOQvadA5Nfr7ceMQcDcHhvtk5Wm24pw9C2MlsN/VdWm//99+dvcqv/vJp/cP7473mqbpZl8UBlhktIyUsmMQNTtZUMBMCVCp5u7V6E6zMTAl6cVGKrG7e29rX9cxyZtZsVKnQwg9e6n73f76cPuszIXWX7/57PMv//3dqzfrGhnFWAv381TmndeJKLW1ME7nU0zTTMrYo78rB4/1jFQpta+tn9rOPJaTyi4j3D3WBFHLLKmSql26uGmOC2Uw80bcmDoQbhbsRk9Bqx32+9aWd29e1+q9HQGIcNqW8GV7ZwGKWnfaMRZT78ndYf/s+ZPe13Y+reu6nHF8d3p4eCiT1+plnm5unjx79uyD5z+otX7z1Vd3b9/cr8fqVX1BQnZ68vRwI3s49uV4tySrT4fdPtHrXAtRbFlbwOlGzHXtpbWIUF7uDUqZijhOvot0sRWRbS7Fn1zjat7d3Z9fvXn94tnN9VK/+dO7/cft6ntM5ShauoCXl9S2i83TmGMD0mXZCgwfbgBCCjREx6Vl2r63NeLkBn0PUWV1Et4MOYK5EzTPTCbcXT1KNa7IizVK630cHoUw80F2DggXVXdfovdursglE5Pt19bSuiulTdA4jsZHlsV7jJQaxrYSZlhQIakoi4kjlF7ssDIAqbH8G8xvURqB1HWaAKA1svRaXvt577tY0cscikpmZJvp3deyHkI/eDO/fHLT/QRfFnNw9bx1yi0X02m/BEhN3XgTh5dPvvrhRz9+t7tnz27n63V3ff4grXx9fXp520pBBYo4VBSgQ1LESvqAUmutZrau51SnmaNI/bLUzq2+J9OimJssW4h0rzIms8oQM5TDpjGzIYNG30oZsRHeFfCEVValZB4RDpkztbqRqKE+1gOttVKmaff/o+vPYmzbsvQ8bDRzzrXWbqKP090u782bN5vKm9UXixRZlCiSJYpF0jIlGqIkk4AkWH7xgy3IgGDYAAXZgB5sGLAFG9KTDVoWTTVVJEXSrGRTLavPyqzs8/bnni762M1aa87R+GGuHXmTkvdbxImzY8fea8055hj///3NMAwBIkzGqQorr8swkQsimmkxRQ5OaFaMgLyruHlA3tUiXLUwO2pVPYfueGOeg5MDA5JC9SsjAkIQVwPElMJqc9122LDDaCWrIRBHAQMFs+KCBpZNyQiMwLyoZABHRis+TTPuAAlVyGEN1MAmICJjUvCMLkRoU2dsUvgD2E7QhDBxkAGqpN8BEKyeV4AAQs0prEZfpdhwSKPKLQQnTZfn+JXfejIPx4cH4fryhkI8WHYxgZQeY/Z+Jq6makBubHU38sq0Qkdj5rZta6oqoKppjHWMWvESbObDMKgrE6QQiWhvvne7Ht2Zifcfvj5fNMOwKbp69uTF9majg5QxU5gzaTdr2i44Vs57HMVCNnTkCEQGumbEMfchkK6J0YlI+x7cZ20opXBi91yPQRCMqJqNUEQcgYgDcYWh6mQzrQV7Y14wBAHA0AAxkgdysaJubTtTg4rkdaBoomZINuRbqyIcgpRSwrljSPNYSsmlZFXFGJeL+YGVvMw595th2A606fuLzdXjZ/OD9w5P7gu0n/3hL52fXdxcXg63F2Bu0Nq2MIyHbeC92aofr9bb1VD24zwCAAoDzpsU26YvMpZsOA31kUCLVWcQEREEqCirjETQhhGcyQJR3l9SyXuXl2Nq+eh+c311I1fd/tud3xnjaqsPp+SPejwkojondYAIXIICmLmhAwdErq8DAYO5IxpV57u7uUANQfYaAYCwc56YmcZghmzITkXMHCCiMJAquyMDupNPcAtkdK0vzKewzF0bJ48SCEaVlEKTWu+FORpsCdkEgJAZkdxV3IEx1PhPnDBCiIjm6K6RE5pykaQwc2KgQlTcakrtlFhYW7tIAEqoAJwthnZWYLNgSuYqsiAWGVNMiGO2ojGZh5QTY++5HZMobhGGtsxLTGdHV22+2b9ZAgRXLQQWRK0POLp1OcmTw+Fq1pPldmxfvnz08vWbV7NxM7t5sf/hplsFRyasHTkHRAWorzcBAYAC5ixN0zShaTiYSZECAEiVCgl1L2ZA8eAEClCB1OQECpHCZPTCqtMzRsMQ0MGRDE1MEEsVlxGQAQmMSEgukWvPpAqkAFHAHIAMAqgxaAocUjuMhYgJSHddCDQ0cKUMQOQUKU6/mhHAig8Elc8CRLUJGdzYAjiA1WkMGeBOUujoWHUBOmFoAAAgYiwsBjozfF400vFeSmMf3HsbI6UCwBNrAE0AYwiK6hkHLhlko3QAPOI2prbkTIgAXAiIAgMEc7HSBCAs9VonCoyOnsnYiGttwlU3CagIUuncfufNclADZk9NlrHh6jyjSQnCZh6zZhuNkBbd3h/89uN3v715dP/VnLfX19fLxaxtWybMeWAK42iCBsZurmKMQAgm2sZUPHMITWgpkmomcOYasc1ECAbEUVVFi6qFkJYBxKNjAkyrzGlxsnd00s320C4eP373+vp67IdhGAJS03TtYtYsGsJgCG6OBRDdqDAh8ahlNAU3cXc1TLEBAPNkIACqoBx4VKHQ1O4EAIhYnaOKSEAKSJ5qRIgSYv1smdHdOLhDqSUdIhK4SSYHS15HB+oGCBzcLCOTjRGJ1Qs3XDPDXKGMJH5LROoQYwRHBogpGPaSiRC7ruu6zhVyzsMwjOPYX/SXV++nFNbXTw6ODt966/Xr2/2z8yvZiJa1ufkgreEiNsvTLufh7OZ6fWtHh/cscM43h43ud7whXkLocZPFBgCjmFXQFFwRwkTtU0VAcx6zaoDUgBabtYu92fzF5cXl2fb49KTvt09+48n9tx+Fwziuh2DIgZUAtYhxE3iQ0cxD25VSIpJq4dh6VQCTmYPrdO+HydSG0yEAkSApOLiAoYEGJjDUyhAKAKMRsrGJFUMlINXd8DZ4cM5gQA2M5kG2HgKRMsYaGuQQsZ7axVyM0SSIyHJ5eDV+1LAkaRyNIgKYO7kSYoOAyhgMDXSnlq5QPgtopj1h6+IQmdB7Ekuted9LXjKyuZsDkyMhBTNrlEfyxGxxCOzUN30C8kE9ILtgqZpJH0ckFBJwxiCt+1COl3pueO3Em1nBElony0DW4nJz1n388sVSbCZgQ8qr9qYRXmxeee1yb399REhHN3uN0Olyc96tApgBEdcAaYAqJCIiBiwigdmIVQsihRigGJIC0aQDrd3knVG1iqunCtgckavR0wHuWDaIpO4AtT9QnWlWl1p3JAKmWHE0tdyr4xpDIFAKwcyKZkQUB3IKiRO4mRWtFIjgE2eODRicCHlK2qtPhESTdKcKbO7gi04eAACxbhJTd4SI6ggWdtoAnuSv6A5TRCXDZrMJ8YgZi42SFQHQaxYBuKGaOQXT6ZQamStosJ45B1d0JwMEZ3MMqPUwYwCMNW8NxbUaZznmUhQBpmhaRGSd3nogRJ36NEg+hZ26j4wKwEUwcNMkyvm2aVO0MeCsa8t2rb/8y++truWVl+/dXF1rsYP9/a6NqmNRc4QiVsQZMOdsgHWvJWYgFyttm2KMpgVUGqYYU8X6lEwxgIMU2YJxm1pHVxuNFzmPbRdjmH/m0198evZE8sV7L75y/SKP44joMca9vYMqX6laFEFDdEQnMgBjQmBgK25GEIhngKgg6lqsNFHMTBVCSGaGzloEESmRqir5NG4jqu8PlVx7ZQSOpmhW21gTFxegfs7gVo93eRibpglEFcGWUsKKUggZgRGSlWmOjcHAS+JU8QOSCwCogQd1h65p1at2npG4aWNqGwAY133f91782eOLm4v1s49fHB8fnxwdDbOO6Pjs6fkw9uJjEgkgs657+6XZ2cqeXN2Mlvf359didrs5ni1Dm1Ozt90KjNkRYSQrItl0wslNKIhxrEUPqEJtNznnB/eOb7f95flF287uv/ro4huXi/vN4s1Fr04FyMw51RlLlxoFkJIjEbgpGEOBT4i5COpIDB21YmOmoam7VugQMKDXBl61R6FNzGAHhYlLg2g+4YURdtIBMICiourBrRrR3d3MzWAiaCC615J3ipcppUCKTFxBObWlXqeoWrVsgRixAvNqeef1sF4JyRyYYo28Fs3oaIROWHOBYUqZRgR0qoFqVWugisUMOaIjsEMVhiqSEwRAAhdkRzWQTErAyRsjdNB2FACBGFTLqh3IdFF4BqiGHx7d3LTnr1yml1/cX/RHOToCdI5p29y/Ps7ogbGWcxW2VTcsZAxuEpjN3W0kZqsRYyFBVBExrSJCBjVAQqRIPhXNVQpeh9FQUWpY09usrjlAjkAWFCwSmym4EZFaIaoRmr6bGk4rOwKYAjOCU90hzKwUB4CUQlYhUDdAdMZq3HCYkL+TPA4AqycJgRGQDHc6px3NapqiWADcLZeIWClv00hwF+eEjq5mFZrBMa6HsZu3ITAQDMOwI1OjmRmCOKC7er3QjRGGIkShWseCVpg8e5UeEDIYAxZHMVcDN9A6HKhpSnHK7AEAQEKHKj4LpO6IphOMiwgJCLBISXGmam2bxNejhrZpJOs8eUybqzP6nV993qbZg9Oji7PrpmlO9g4cpMgQIknWoq6FirJprsIHASemysILVC3MJTBUW7m7EyVHbBoehi2Td20rmotskRrmWa96+uB15gbAvvXub65XF9shbzdjE7r5fF57gByCu485A+QuhprkgOiI6ipujoRCQYObWYWSIHpATBQzDMQJMSBEcmBGoxHQSNAMEzFNDQWqMafEy2mi5NVED0TgBI3NgCp80KZEVHdEb9o9d3e1wAjEKAbqgUjQiQxR3YwwOBiaq3olJhJgBaLxZA+BcXujSMSRYkRwVTdwA5otcL7YL0ppM9v26+ur7fX5+vnybHY0f+XVN770E1948vj51flVHtaOedObZ02tvfnGfLOaPXm2LmB7e7ONO6w8NeN83hLiNo+zxjJB4GWRfpTCHFUVkQJCKUJEgWK/HSLDbD4X7QP70eFBv803z9cPTw4ur26ffvXi3mdPCmWBwATIoG4mXsmRphpjIEZT22nDdw6mqVCa1vvJseRQMahYW4a7eD1Qr/evoblNncc6L5hcZzVq0wgJKATRAk4NsqpC3LlTcIL+E2BRZ2bN6o7L2fwmhApqMnWfIhBtV0qCgYCxc43CQndDpwCEDpnMwZmQmSOnrEQGomaOjjVyiMyAQBFrPEVhiuBigIbaMhiZgSaJgipEBMhGdalURnQmVyQHLsmgK21ybFQjJHdXchLL0dpCrczQ8GYh5wdni4EfXd1bjHtsFFCbHAiyK967PiTrAlOEaToKtXu4azoDM4NLNb4AgIpjwBAJkZW8clyII0xq7rrwUW1Wu+2QzZX6aeaEMOWtGDoBFkaaJA3AAE4YgLymrPnEoawfVU1LiG402e0RHUzMNFvbUU35KaU4KAADWoXbwVSBO02z0EkNhkQAE8YCEQndEdkC4JTvjV5XSkSafoTqsaa6Zm1Ss9R2QEgswMvlEsGMcSwZECe2vTsDkitCQGD1TFRDZNQhGBlO1hA3d6vT1Yng6E3bRlLCCtWb7pY69qj1Rj0HACKB2m5gVjUHVXRWBTIpNAgBsWRdtU1AJHJadnRy2H37qxdf/72zh6cvq/Rnz18cH91PDYqOAGBmKigCZVBAAjUHKKIcG45QSkkptCmZSwoE7tV9CtOkzVWk+O1sNgdDVWfqIDjibL48Ou4W3R5fXn387MnjYSM6YgixbZrKmXF3kexeOGBKwMyom528EgnYKQAAqqtgoICoHFRVAbkYAATXJQZiRikjgIkSEZmSB0bAEKNKVtVIjGhuJj5OBRczIbtXfqEBrAnIHdV3bVgORDQOpWkaTkEkT/+LauJgrP6IGADBtY7ImV2ljpKructc1BQR27YtqgKmOStkAASmyDwquAmFpt2fdQezod/0m1XRcvP88vzZ09N79/aPjmd7DR+crm9vrjc3I2nXR9oOi/30xc8cn531L242q+iH8xPtRYdxPvcYcDsyU9zCJlGkSMOQmQMiapEYG3fMol0KInmz2RCRI3cpRuKI/Yvb6+X+4WzQ57/z9PQLD+Jx2G4HJw6EaJCLhBDRvZSiLiEk2D281kQ+dTcBAMzNv089g2n6WqNzABFr995r9WSGuyqmylEmEzlxJS8AwTCaqhOp38nrpxEcuQEyVL0sIrpo2yYiqioad/Xduqc1WwIBHEZyBg9u1cwMYEoIBGwT1zhESk24yYUCl9ErtNpcphMh7PwmgGbG7pySBnE0RFBwJAOHoAAESlARQqhaEw+TRY5qoCQRJkZvGywIsqIB+XwIlJfinImc+sPVvOvv50jRhR0k9OYkgK3E05tZAMKaRgaTWAR2EqVoJsRUQTmIiMAiBiAppRgpjyKTbb0eiBS+DwitY01AIKVK2Z+MUrQLmiBSAFMTolR7W8xkZl7B4jQJEyc/AlrgpKpEMGlLgEOg6jepXhgAEJEpdR4h1JEA3eHRsSa6TRlyrnfqSQBACrSbRMLdIo4G4BEJKtCzFq6IijUjRJnqYdFH9fneXinqgGMZnF3r0aZuFFjPfZPv0d23xYDQXWs0NtVUWfNp1wIXghkaV8ULKNZxQM2Dl9qBmdy0FW8QAEywzgbhrhCdFKukPsSEMc5MCcy4KctZ97u/+vjx+8MrD1/dbq9zby8/eolCGcYsNjapNcFhWwBIBRGVAAQdAxsoOHSzGJkIrOEYwWOTBKjUcDsRkJIiQ+xUCnMkTGapnS3nyxmSr24fv/fBzWq1QXRCWu7NQupCbDbbWy+WIseE5I5o7uolE1XXGLqBgRGFui6kFB1MRFR3ZjFkIHJXMwUg5ojoqgXAiEA1AwAShcRkSBRcDRlaLjjpv2uBzvVSdYvEDAy0iygSK+a6aBKAuBsHAEQxFSdjSFaFVoBYMT1s4MCYdiEY06VZVz1wGUcgRA4Vqg3gJlmKQ2iRAGHohxEAutTMm+Ohl2G9Cd6cf3y9uV53y3a+v3dwenRw7+jFh8+ILBBd3AjixcPTg1deOnr/g/Pn5xd7yyakcH0r3SzOF95vhtn88DYPJBJn3VCk3jullJQSgKy3w6xthgxE1jTRXABlftDZJt5eXXdtOt07vfjm+eLVxfzl2XozCgFj7W86EeVclsu9Ycw7lW597AKebGdR3B3HfVrlzQnRrHZg6vcQHKlGqE05HkqT2kMAzRGq48m8iI1FsG3qkMxc7s5ZiETEVkQ0IwYFNxcR4a61nKmiaae2TKjrsqF3yICmaL7zzZLpFDRFikiIiqhYTSGIaMhI9Qjg5Oj1bncGVtXI1MzaIRX1KtJGR8BqdK+/vMYdASqqMrU5Cg8bzgsMI5YBnAuCIzCo4yb0vHWWtkQ1uj25nZ2ujsg6B3Nj0+BghonAGBxdQjUEI0W4WwXrrQIIhmZSW+R1vWNmMZViIVCMDZHuqsZa7JPj5B6ufysiVjEpVpQzTBsJenUjkpnUHzcXYgYU06mRsgN7MWLlzAugBiLHWijh1LY3VHHmqX7XXKZu+M4FD2A+ddgRAAhjXXInGItX6zACVfdKRVEjOync9QSrd7Y2oGrrFet2xIruuunL/ZfnVrL4FO6zayzh5Gd2uxPDqPq2OHRRXdy4riSKdZ5dH5UZbYGAwyRPVi1VoElEtb2DsJPN7AQ0U+3zzzS1HJEMgKVQG33Zwl5c/NMvf7u/2Xvp4fLFs8fz9vDh/fmQr02IMLZtm4uNWVSJqL75Zi4YoqqmmFJgQEtMgQK4dymOaiEEMBzHMYWwPFiWcVCMbqyQYur2DvabWVqvVx89+YCK9rkwxyY2s9kMwMe8KbZpu1ZLBnQCdRMEiMxAYJ5q39aw9l8MHdAluxERccNUY9lMXcHEo7qjChAFVwyhrd7FJFxUpC/FvJiNpS9FkQnHPQ4VMFG3Sg+BkAElqRYRMRdmxFDzyKBrJv8bIqRIATEyujsgI4FIdhB15RgYCKhycg0RDZyYiMP06YgBoU2YCEC3QLU6HGXcOMdFiqWoDL3DLGFKx0FV6ZaHvlcdr6+ebw5zt5i/9vIrV6v17eqqCxQwPnmxXbb59QezwxP66KPV+SocnOyvZYM3ut8usWgMmGIa+twmkgLjWJgx51xlB7k4EcXE/ZAtURNps9WutZTmN5uh6Ppo/+j2ye2w2px++tGgYz/0s1k7jrmJTdu26/UmxlgDWHAXSVCbH0hOlS+C4Lbzy0z1h9d/g2l0Z/VKR5IAXHyCJ1utVsy1mv3cwByRxdQDoJOZVaXM3S3grqVMQz8Vj21CdEQEU6pyxglFRQhM7oYAgcEguJNVPQ4YoThFx+mEh0ZoIdJghQFKKUSTob0KaetVSsBNjCmwEyqCAQYIpiWgFgZFY/AwRTlXhY2aIigJF3RJFiWauqFhCcqqil5C32BJQheH2xcHjx9dHx6vDx2cBA3ZpBAysyCyoLt7qJUs7CDpAFBd5I4UAuaMjl7TFM0sRmZlcVPFGLFpGlV18d1aDHX+UD80msKFdWe5nxa32psiaMkhJakAA5qaqgzENDXKETEAOAMYGprXhZDQOYSKQkXAwFNEfaCQYiiIVmpfiOv1UjkGVqfAGIwEa0B2hUe7IzK4Ice6eoITeYBaHOBIHKe5yrQNeqjLPZAQOTkzDln39hfg6qiWDbVmMKKCq7khmTqoAYGR5VF6QMLgLmYQzIzR0RU9oJMbmzEHRGeuMvnqy6i0ZKJdYlNdFZyAnWFS/uz+oXpKmMDc0JgSOCLmFJmdfvnLX1/g6b2T2ZOPntw/vT/rGDyn0NSq82a1VZnYyH2u6UJsRQmxa1KIFBBi7cC4ptQOagCw3W7cZLlcIuI4KFGTxUKc7x0cxybdbC4fv/us7/uxl4ab2WzWtq27l3GL6CkABiDLQBIxIKI6MdIEt4Sq0fcKDq0ECWRiwar3tyIe2MA8IFFkmyNDYDGTUUreljyCChDP2nYZY1jsd/v7+7NFt1gsZrPZ8UHTtDE1nBIxc/3kDE16NTMZrRSV4uOo282Qs1xty2a13q5uh80298N2u21CnM0W59unxMCMMcWUpjRwE61EVaitAKhPbUgeoDJDHXfqcasmUmV2DkBlyDFSMeUwFNlaQWZeHjVzna9vR1C/fH7ZrdZlvd47Oj49vVe2eRw2BcrVMPajLObw+c++9PTF6oOPn86Xi/2D+XroZ6lLng2wSSEXpYaYuZRiJu4UQioiAQOIqxXiFEJYtKHPBYMv58361m/0Zm+/21zJ8289e/iphxwpS4mRDQwNOAQA3x2C64McHQHMFQEcq8UOJsWxe7XAOaCbIKPpdPSvhTdCtQ/Cbn135rDrlwAVI8BBtLhFuAsGqToCNQB10GIAtSyApksUmKp83QNWHQ9N0BE3IgSUenBgoynpkCEwmVswyGCWmrC3v3h2ljmhKo6lVAdm7TCTu7sieCCIzIA6ymgRqnwNHTIhq0V0I1RCBQxOwa3OXxVtCLnNvhibq8YQlDEoQhQvES1IIHYM62bcdEN6QUgiEMDj9d61ypWkEDAvNvtR96JiaEKEGi4DVQZbg360pj+0bRtCKKUgQXBQ0MBN4oomNHdsmsaiaZ6IK7v2BZoZ8ARvhvpRAijUzc0qIn9qkpi7B3DeDaPHXUdkwk46KANwiO4umusBIrgXVSQMVN3hUp+tSZ1AFhF0App8mlVGXhWQSDwd+hwqLQvqCI3IvTLlCTyi15NYndwwVSdbnezVSJAQay5fSgEAFrMZgpoJiqNVHb8DYA2kEJiO3hx4LAIYHMEJVQx3OXHBIdSrmNAC4W4qZQZiJgbmWByq0RUBgXiqIaeqpnpXd4SE6fp3iIxOSLacB9nar//mew/2HraEL84/Pj097mbBtIC5iFPg7XaLxmBSckYyMwOMKta28yaRuzJ4YIxMlZvqCIMBSJ41PGsacB9HMQwO8ejkZL6cr7Y37z5+v+/Hsh0I8WA+b+dRnfu+Z4pd04AXs5IC61gCIlW43kTrrCZdcbMpTcJdzEMd7UCspwqM2kbOgjnbWHIpvQoTtzHs7e0tDk+WL79yenrv8PgknZwcdW1sojeNMwqhAqqIgKmqQtXxGgCxmXsgBkaEGsA0fR6IGMANS4bVWorwzWq8uFidX17dXLx1fnZ9fn51fXVb8mA6tDOftWyMBIjMYOZiMA0QGUHqgRGZpuoB3AlF100zy1k4xlGEIvdSQkyNQsm2cQWy2UHSMsYQypivrvJ6dbU82t8/eTBbnGyvZmN/vU0lZnz+/Pn9k/0Hp5/6g289fvLRcHC6zLDuFFMT3D0AmQIEI2oAoBhKbYCAqkJMYegzARe85ThzTeR4sIfbHm+vx9l+E4fw4Xc+eOlzL3vwLCMFIGZQIzQH2sWoQo1cRYRKPp0iFWDyqKKDeGXIuyGjI3k1k7CSgU5leO0tUO3UmFd4OAEQYKRQ543J74JhJmJd5ZCjeSmFOGJg2ZZSclgGFAaMQFiF33Wq51jxIzV2BOqpCmDKjSZsALQUTW17enT89cfvz9pUdBBT4ErCRGRQqaZObJrkomMePLShCYaGak0MrmCMRuCE7MBTwB2LOrsOSfumxAKhhMLFWFlCC1HY+wZHRAV0CIF5PqpCFgphmF8vr7/z6je5vwW+z/js9cevJGmAugDYQbX5T8q5XUm866SLAlJbu0WMhKrgGCsnBx3VA3psMJcBERmroAUoTHNRnCAOAO4Bkt1t6Wo0NbABsfa/XEHYdnLjySdVZTBcB33TqUAVAGKVskmjegNcyugyxlm7iAwcTLcDE2/yah4b4naDPYKyL0URkMQUFEQNEQXVQBvZRUmBuJeJkzJtxaWgTTRFRDVx8T4NMbdqV7eytCHfx72MFApkzwDJxclFnZlZVJBJnUYLQXMPkTUkLaQoUQMEMA3E2cTImTkQs3rXpMBG5DlnBCZXBItVi41UkSeIGhAIGZEGUyQFyIxd9oikAKMDo3kgmXG7Pd++/7Unnzp8JQ/9Ve8vnz4gdJMRwBwQY9j00o8EZmJuFNWNArrmg9kSMTr1s4CgmkLjxpEbdb+5WS8P+XB+IMOoan32Yk1M85PjB2JXH7z/nevrWxEDp1k7b7rEzP0mu5e2CcQKruDahuCjGQamiqUnxojobgPAGL3VmgqOECihaiQ2sxhqPB6VQi9uBrU46w6WB0ePHi5feeX01VdP7t9b7u2n1CDAQFbyMJpdq2rZ6rCZSKIOwMDmSFT1P9s6WwKreegFQWCKmQRHMgUM6u6RAwHMGj6Yp9cf7cd4UoKTpzzw+fnw5Mn6o8fXH3x89vzs8vbyQmWcNRSDpagUwzCaAhkwMoAJOJqjmXEMRV0b9jJE7qB4wMgGrM4FhpCRscNQrOioQDzfPxmlDOtVzuXyxWp9ud0/PDh9cN/opednl+s8mDd4WxoYvvjmw7PL1YdnN9DtpRloL7Oui5i30ncpDrk0iYPFcRzBCR3FLY8GasxK1CRCInMTV2y7wNz2m57mzcFwdPH1p3tv7cVlcAVQIfICGB1I1dCYUIlMIGH0qslTBwOD6vI3Aw/m5lLp1aLKTCAS0dUQEQWQAotoDFyGgojA3phqAfXkAQYpm9ywIaK5uIEpYIoYmciJDYsCUUokOghTCiGqiYeW2DFDpKRhNLfoUZ0HzR3NANaCpTBF44juqFuNna9jQ3nMATByT94yJcBLGSnpEMpM4mAiwSIyhTmrgsgYZIHI0mUCC0SlGCaaxDx6lxhqAIDMWHjL5XLv2atPOrKQEGkEwYXhiHlGcQTLabNsDTMP7TbNxvuhHKyWl0/uv3cWV/u5Yd7eJuqOLg6eveTi4TM/+Rd2rd5dJvJU9Cl84lHTfgHAOUw3RpWJmdSZCcE0XK3wgLvicZqLAlQM+u7ZcGLtOvgkoYHqQYYQ76JY6mgQgBDZfHvXOIId7MIdUUdzURlt2K5vn9xcv9Pf3tiKbHEslmfN/Mayie75Icz2KC2PDmdNOw/tPKZZSoligMpDqVz77x8np0YThc6qw5+nxR2qIyY6FIzsYbb9z//W78b5laN7lOIu6IVRHVS8uBcwcWdHLwJsfR4HNIzUIWAWC1PzgZBr9DMRMhMRcTAHFfUyCWsAIZgpQLXXVopOnck4VxAkNOqIPKASc6dxRBiZ5pvr7ZM/OHv1/qs325vSh0f3IlYlpqOYA0A/DqOYqo8ACMYGDTIYNW0EcuNhjgnR5/OZ9KqUroZetH/53t7h/vzmetWPZZRIYX764OFsnp4+ffz82WN3FJEQ0nw+N7NxGGLkpq34NQtEgEYQ1J2Qm4C5bEPAmFKRwZQ4BEQuGr2W0VrM+xgZCUqR1S2qAFM8PTn58R/99Bc+/+rLLx/M5jHNewTRMozj1ZBts/WpoWcTup2ZeZrKoplFDvVYCQjuPPUDq0SNCCHUxb02Kj04UajpE6oqoiK9qyFyZUZyoOOj+OjR7Kd++lD0jaHIx+8O3/vOk2996/mzZ5c3N7eOfYrWzQBQTCPizKEgSggBFFk14cxqIyoWN8vGFiykMWRCZDNlQDVx9+JbVz08Olqv10Pfj0XOzs5W283h0cm94+Nhu9ne0PX10AUHGBbL5rN7D9/78OPvPhtfO70XhxLdDmazTRlDCEroBi2GcSzi7gCBGZHGcUwplKzEVVJh7M5Mbdv2Kw17/aydPf/G7cO3TvzIiwMKMU/IPTBHBQYj4gzCNXgQCKgm3DsREtT+aVVgk1P1xmE2nfBAoBNWdfcwBwQycKhYKURRNXBS51CDkd0ABLA2ujsfrFwzfapYH2jW0nGi3rUotm03AyCDhCYMDRp0oWHYuHQKIJl9FE0lAySkkSlBbkLawsYbD+4ia8FogUvj2ZVj5zl0gCl4kTTkASIaldAqjmYGAu6NB690kyruqSO82qBiQF/v3dyEq23LY2ezVXwJX5qhu3ZBVGjLMM5Kd92NuTtvekRcCSyuZv50sWqc9ofjuS62956PIRfIyU7C2//yX4b/3sN3dN//ocdE1t8tgbswo2kz4E/8ZBVx3s3I//883z/zq2EiVtz9/klnbj/43amxD4AmQOgQAKbBxPYrf/CV/9NHv3T143/ozzeny+xPob/1zZbocP/+j8Q3foTjEsJs97cYOO1cQ9MLuNuQAKCeMndQjLs/gkAzUDJUdHjzs//nveVcRDaD1YA5AlIANyODUPWOKvO2yWLrnCk1YloMIgdgNq+QQiczI+aAMXJAI9RSsmjKCo6gNQAxIADxTo/gjmaqjpGMIBRjZHDNzJ2ZicvhYu/m47PL745vvPza2dkZKL/6cpLenNTdHCGLF9V+KDXEB5ARDQMEDE2cGWg3A+C8j11R6QtYm25ub2eBX394qmVz/uzFWELG+eLwuJk1huPX/uAPpJgUBYDFYi/G2PcbVW2ayIwhGKKbGVNFh3DNWTbNMc1MIRcNgWsiNFPkpCZKZBx8yHh7nccxtt3hZz792mfeeuWHvvjSg0dNDKPLpsjTksv2RSJ0RCXGyCGwOzkhGMU7RendZ8tIdwQHALhTILkLTMAzhDvn85RhW996mjhiiM5OgA3P6yWkWVf9rYEhORG99db8s59/9c/9jz//9Pn2o/c2H324/tYffPj0ybnYKjZju+iRHBRECiku5ntDn4nV0cA4AaGLesyb1HRCRKVUdzUgkLoS+3pzm1LbNI2K9P3m5uZmux22m83Jg4fHD+7n1Xa7ud2MAsN2b968/caj+7fw7ocfreez/eW8UWg4BMIiliMIQBOiDWLFHUjKEIiLmBoExhCYw5SQxwGOD+Ozqzxr4mun9z7+9tn803uze42CK6CBR2Yp0oVERFvNFJiqVqj2uabodQMAJlQzJBZTwGloVwCIdvg39+pFAQYCKmoE9RkI3Zko5ywGhYxCIEAGQycyZ/VIuAUcjVD7WduR9evhveXipS7tU9vqCBRa9JISQzZX6SIOwpno4GCPSt6fP9hkREiceQOPt9u1qjTxlTaNMQ7OWrAfn838JvTXjl0JiWPq1puN0dgYubWI7MUoETYaLPawRoCJh7VbQmudWtCBbbMYRhyd1VljHzs/4Tioq9Pc8bb1NuLsfLm+aPKnfR9wSYAQVyDw5sUr9y4fBmi3XTB8nGET3Xez+2ml+/7Ddr/1Bx/TgBR3S2ElnABATXTarbg7YwCgotz95N3jbvPwnUbw7q4jl2mAgrjbSBCA9e4AUN+U3bOxUeBBBCFE41Kgaeef+eEv/gV/9p9t6DsP3/7LQD8hV9/F/B0fb3D+DnHneKL+QH0BTuiKaARMOGlv/QdeqxFEn5zTvjtkEIAyTuLLRA2rLnnelE3IN3ULLl4Rlw6GPNXI0I9l4+OokmgWzJjC1gq7oZMzMjAzBnIiINDK5BCtKWOozmLGULNJq+Z+l4tSs2msmEcAAx3d2RPodjieHT5/92n/1D/18OT8xVlgvn+/HW5L6pJoccR+LGO2YRSnYGAOmDAUEWoZmTIMe7PObTza3xtXxSj0IufPn77x6N6D2Xy77gel0fa0bfcOjmfz9PzJezcXZypQsqWu3dvbK2VcrW66FGdtUxuRUja1dAMzclI3A0MEYhYrgMBIIkYQZzG6odlaraxWqjLbXz780S99+oe++NLnPvfg6DSXUoZh02+utu5Egah1bNo2EDh6jUYE5rqko/ruOpw+QgSkaku9E27Q7nBpExm0Lu50Z3MjQqrBAPV6qBNCcEAwGOrWCJiYGjYHdAK/Wm3IEexmr4s/8ZOzn/xDJ7d/5vXz8+ErX33+ra9+7+n7H7tsUoPtYs9bX8MKSIk6BUYSdUBgCoEpl+JELiIVu2ogdy6NkreOFEOzXO6HEMbteHt1vR764/29+yfHGJZX11s0CoPqeLs/O3zz9UePzy+fvLi4t3+0bDv1LQQl0SbwMGoXwoBassSQXA0Ba9VJVQpHU5MqQzle7vdbvVrdvvzyvccfnmHG5aN049aGYENum6jqfR5CighOSIYOdbo6IZS8nt1qCkF9V82msxGQI9jk3NitRVYzjKfPa6KJSSlSgIKTOtU1E9GIBFEZ0XpXasLoGjay+dSP/PS/+Gf/vTIGdw/cDcOAwYkgZwnEw7iWAkf3+W/+3/+rZ89//k/9O7dtPH5p/6WrKz+/fSA3z5ac3v78D/3Kt97vw5MOD211GD+1/Ev/s39r8+QVs9lGb7ej5UFvrz/Ula6uP1a7HPRsSNbsJbn0mqQ7kWgreRGMEN29sWLoBsFxrkhoLsEAtgKhQdR2261nB/AyOyNFKcTCbR8v9s4v5x89vF7cv3qUrGULJ9fHHxw/Xc3Pkx4Hxx9Y0+/Y5xOf+Z9d370OWmEHVLtbCf1O3Fq7K5M76ZOItUkfWX/mB575k/UUJpwo0dOxAA0BkHfONner33cAQDNHBURy8B7wHOHKxw3m8MUv/tSv/sqXF3O+9/bP8fwByrXgxvstrH4bm/uhexTaU2iWjq1D59BY3ZzuzHXTmKDSjOqb4Z84lxAAqJYYGrHrp5fPw3yWTQFRAIRZzMW84JQ3UynDAtR0c8BVdASHrCPHCACO5rthEJGnSE1ACKNbVDNHAyLQgAiIypWwCDa5pAwV1MAJWbGAGXlgpGxysFxefe8cn9nD+3sXV0NAv3fSjVkwBVAw91K0H6SIKyBjFcirjkO3mLUtqQ0HezMwbdNyfTsOZtuyAik/80OfT5ZfXF6tNA3SdIuTxX4stvngg3c2F1stiOjLg3mMTc6DalnOuhDJTMFFik/w0dplZoRpmzeXEDi5o5syawygOtyu12U8WCxOvvi5N37qD3/+9U/v7+8r2nbsn108LUSETLEm2yMAOWNQH+5EEOpm1demBruy/ZOXJdXz+07KBTtWoaPzFKwCUKOMsLI17M4pVi+ROvNFRAc0U8MCUIgIq3NWIcW565jaKFmvz1YUthbyg0f8r73ysP/ZV977cPObv/3RV3//W09ffNQlmKeWuwwyokYngoiDjJEKIZgGQgpMDkrMJlK7TC0ZMpviWLITzefzlNr1apu3/fmwXd9e3n/0qdOXXnrx/PkqD94sNPeJ4PXjg/NmfHZ5c1vmJ3tNgzkwqfq8SUNW5gRdGIbBCu4KZ8hFgxnV1Bd00xSpdHNaDXh+cfHy0dGz5y9c5/PXDjQXDm4m5sDMVczruMO1T0wqr36TAmYITBQI3BDMEGrUWO2G3QWxKgDUAx+Zq9eSACMEUcwFIIC71wnGTokDCh6drGTA5ZCHrOXP/o/+WvPwIYExAAK5CZIWKSHMDABBAYiUfvFvfrlvXw9/7OtPbj86XrbrnMvg7Zw862P4/3z1+db0lGwWwpNtyB/43+he2zs9efOoeyTyoA0PAh6tLt4D95sb8XK83Pvi3/mP/p6eXTWhN5v/95oihlQBK3B4Pb9qTthHkHJzSOrP7129Kbh2aGabTMOysa5dPj/siWUxMj47vrmN5a3bV9PQdayYcXHdlaNuldbHkgNVB9NdF/v7q/hu3f2Bl1IhqHf7wPebMHi3E2BtAvsOW7hrcUxF+p0Euxh8v/lzd0qow1yfSHQOUO0lSDtjWwXJwoSBdOagtiVQ8B7GC5LR+ivaPoFu/8e/9Id+9zf+ATLf/6GfhbRn4ywVBSfQEeQWhCEUp7m6GORINXS7kmppItG7Owbwu7PE98fBWo9XIKvhOxZvvMV+O24zWBGqzgyHCFTQzREANFFZyenJyXK+PbvYpjlzYHCAgADsZu7qHpigiZgYHb2ImwbFbKZugTACq4ojWl1raluYAAyNoRnlNlKM2EoZZjx78e45nPevnD58+uJFM58t5ge5FER0t+w+ZM1ZKt6DiEspiMiM872l5rEhCiEEV07N1c1tSot+u0pYfujN13zYPL5cjdRsjPdPj2eJr66fP336MUHMo87ny3bZDWWj29uUUkzB3cAgxZrIY0TBpAYuIBGjORO4azJDKo6GIW629uxqe7B/+sYbb7/9Y5/5iR97/fhYhs1ZGZ9vr4N5IJw3ba7FdWUWcRU3WIEYRJwBqRKTQBkNHar69RNXsu/kvwxgOHlq6pffnzbhJKmuG7wBYDV23g2KbBKDgReiEBiiu1oxBwsIwGAAFJOoA1d/EhEtxuv8nFZNE199xT/32bfKv/ajX//q1a/9+re+/o138vMwa8f5zBTQLEX31pgdNihVPw4+qWsmDjZ5GQak1ETOosOwjU17eHwwbtbb7bDeWP/+h8cPtvcfnYzr4fryui3eRkMo90/2OcVn59eX1+PhctEQJIZxHCOiko+lD4EHkbtdpNbv7M5AyEiOwEFsu7dsNrdwfnl2erR3fpYRr49fO7ktG0AHh4aDEY5mCeL0/tuUfjTJSiCQK2DN8HOYYOgu9XhU1WwwDUtUq6nzrsyaCtOxqCwiEUWfRlBgTgCs7p6yZeVATUPD1e3104cPT7D0yh3WxMBsMc0ESAUapm/+8pd/7ed/8/V7eN49fPbNJ5d4exEQIC2i9+ejY2u5Ob8GgIEA4pLe/e7wv/tP/gEfgCi0jIu2MRgYARpAIAx80MxmqZvl8pBfBzvcsTR2DcAKN3a0kNKYj6+b1f4e6lMBv9kbYLh92TdFZ4DWdyVmVZJVzOT5ZOhyCM6wt2r3xllGXJQwYp5LWPqRpRIYg/p6epsQfmAdt7tl9/vfAgDfOe4AAOD7R129IyNXPBDqlFjt/+zyXR8MHdR+Bd31QuqChbATtsJE+3AAKndnCjQAQZe6vlvexFRcJA+bCNnyOvfXkbc45kV78IUv/OQ3f+NXOMDxm59LsYHyXLhBT1569BlrS3GkiBDGAkcVqwNAAOxI6AQIjFMrxncvczp80PRHybhiiUs+hHi29c5EI5O5gzogm7mDB6RSNCCdXV7d9JvQthRcxRlArQRiYiSKbeIYLIC7ZGMsYgpoCmZuLkzRDANSFWJXJZk7uiu4OnJDEYA2edjvFk+/+8Rv9PTg5P3LJ6fLk9kMtmXdpD0vg7puxcpQvZ0EUOGVHkKIkcu4und8WMZN17aEfH3Th9hc3FzcX84///qnz148ux6lx9a8uX96ggmffvzu1dm1CQPj/tEBsI4yyCj782Zi7DgguonWOSSAORo4EXNV0xOiozNHQFxvx6GXxeL+v/Sn3/4jf+xz9x8GCnl7+/Hl8yFQw946QEzutEVrKuwTq4ir6srdIhcxJ2BEM/DqljTHCdW8exBNrBM2QCJERyP3ym1EpsqUrbgkcK8XM9f5fn2CO4dzqIqqxgDAjRBjCAhgDuKugZqiA7IgkGtEZFNLiRKnUkoRvlpvzG++8Pbyh3/8D52dv/0rv/7h1373/Q8/eAK8Oj4eCUVUkTqCAuRgToTgWu0pqjWJiCcNQwAxldL3eb1YLGLbXV2vTPT8yePt9eWjl147enBvc3NzM25mFHjTHwdanO49udk8vr5+Zf8gEDWzTkSqddURuq7J4Lu/eWLhgTsoFlDRwpy84HIRh3F5fl7u39t//vz8mq/3Xj08X10sZvNhO3CKkXl3GK9r/tRtUVDG5IBmmRDNlWu5wmZAbuiutSeKDsjkBl7ZkJVqBaCgSjZqCZBYffIu1uw0woze5HUAcbhGjRH9+fUv34cfCyGGelwmwhhcwR0a1K/8oy//X/7af/Li4ys9uT5+fam4vL3utN0kzNeunsU4L91vc4dcVDFQjOU4ly0QSrrpx3TVQ2gxF2v7UFQsOJfB9fZHyv5DJhmVkyIygE+AOkOAAACsSAwkzJ4Ek3JUul0OgaGod5sUnp5evnTuwR5etz0ZbzFL7Ndxc7DFZbaB80gdg/Rwu+jDTShXzUWAyXxUhY+w6yVCVarCtIzVj3enUMT/gXY83/3ExH0Dd6un1eknPtl6AQS4BpgC5GpLZ+Iz0t2GYQC6m2RiAztbsxeAAlbci4MR9bZWl02CLfTbOK6jrC33Osh2i12YHZzc//V/+Es/tb6+/2hv7JV0Q/GamqeYElhnZeFpjqGN3DoycguUACJArL0Zh7C7meEHxwPonpGOX5z9BqinVi83l2s/M0ZiUlVVV7eqljHiOFqIs4+untzkMc73pGwAwCeMJRJRIAzETaAYiTRvFIsXgORO7gZgxKCF69hj4kQwodX8GzfLwdveN9yFsyfn4cpP948v1reh24e5itiCuOT14JQLgGoRB0AtSoQG3qUGA4YQjpbt2N8u9uaOfLsW4vbq4uLV1156dJA+ePZRLjz2Ybl3Mj/cv15dPn//w/V6iJjm8za0jQfb9j2ZHi+XBsWsEqlIRKrcEBHNc1UbMgXJJTAzGlG6uOnBu/v33vgX/uKP/eRPv9I122F7NdyoKzZNwHY2jmNsUiAqMjIgYEHkqqQCMAPFYAEdCycnomju7lKBMEVlMhfTtCJP2qyahDWlNtSTkGNljk4nWucqFZ082EDMiHi34PGExnOtmE9Sd6mgEkRCZLFMhGCBgQzJHDxyMWsgRmZHIXLEOKz6ja732vQX//wbf/bPvPXOO6u//7d+57vffMwwLg7VQs+G4KBgRKzqlYGoABSi+RSkAwiEGomaWbPp16FpT073V5fXmmFc9e9873v3X3np4Oi4X89WV5cZynJmFP3h0X666J9dXh/v780pVlQqoTtRHsb5bDaMY87VbIiTT469aUlGTx2im1uTItly8+Ly8njvwfPnTwfIJ6/fXw99WHSWC5kLOk/gbyd0ZNTa11J0cHIOAcSBkcBJCEEna4F/37ldHeQ4VY0Ilf/oBNlLwakzYAhmFtSZKRiUtLgZ7V4ztzFTbvL2HAFcVwgJQpPNGR3IIurXfvMf/+o/+ht5lMt2dfhDV/Taen746Xkg8yiD7FPQzjeZ9uaha7C0rUH2AW9F9me90dhCSZ0BWDFtY4sRgrlRgZA8hZz7ra1mtAAyAAcPu7pnUiGqCwbLgGDMpQ3eoOY4NoVaclrPLp7uX5y+2Gs9BGhYvCldaVHCpi0HqnNiLQ6C7Uzj6RqvD1+sFxY4fxlcwWuXQwDEoaCbW+12MezKaq80JGfw7FZq4bzjVBHBWLmFBOauquImZoakZgZWYWlgppPS0RwA9Puqj2r1dUOpGsScs1eeJJCZKWavNiJzUJMaBuQoKK5qCu6k4iIgRVVdM5tSLuDGDLNf++WvHRzOUpuA+sghxsgBmTFEDAGIfcYMDBzqGgR3m42x1trNwAHClMsFkExGgMXJ4Yf/5Lfn5dR8IN+DHIISgqAPiLPiWww1GdQsMvs2l4TkKlunjqCMNLbA6moITQqz1lpyKyYWzNWci7hjiCykKjIgRfdAaIFMxRzV2cooTZxjGEzHBR1cPr3dfrx6/fjhixdncd6dLOdmoiAbFwMX05yzOo1mkQPHoKoUiRO6lkXboo7z2WLbizP0Q7EyfuHTL8+68dmz7Whp8Ba7dnlveXbx4eOPnhZpmahbzlIThtJbn+dNargRyRgZa9ChOzGoigEThokk5yS5tB0h+PVlNpu99sYP/+yf/qkvfv408Gq7eXK9KojcNC2CGagDx9AggKkGYjMjAketKfcENKlCwcYmxgzkhFETAwoCanc0et5b5T5i0+RAFMYgBmMLYBgdIY8SmgRkBsZQy3aBab7KU5VvzqFyRmynrEEgMt/1GAANmBCwpos5uBgFBFRzAWzZGzcnGxMbixCxKoEJM1gAYxhA89lGYXzz9fkX/4M/++73Vl/+xd/96te+Pq5kf07ASpTckKgYuBvH2JhlQCcmc0NwRGfCUvIizfp+GAX2jo6HYVivVlDy+QcfldOb09OXqTldXV3fjtuWNbIeHzfNEF6cX/V7e13LXmzWzDfDtm2geHF2iiRi5ETEWiRQLIMgopgSgdkmhNA2wSRcbS/u7x1ePr5Z6Xn7ajsiOkJCMgZWNAANmZlJHIpzJPMR3InIzJkZyNzHtqFh46pQw5vRS2QQV6wYEGOsyXchoCCb9QMUiI2PQBEwFR9S4GjooCAg20seTXBRovG4Zig9hcYBvWdsSAQjnj9+9nf/+v/t/t7J08VzfpR7yV48n33ndDa/vDZgRUUUbsnJJMMom1lYzhWuOm9x0HCgxebASNiDJR2R0liAgwc03ais2tkQfQYqGqfqcAJZK0N0gwYHGE5LGtksFcXIs7IEEAN2GDuNSUjQsVj0nrRhg4LD3PebbaeMSRYF3UGjGljPTeRbCb/0//g/+HS5+l3zHc1KxSbB1IVw9x3jmM1rQb2LKnIEILUpktSUwIAwuKOqDhM9po7O/C6pkijYzhtSD2u1E1TJqJV2UA2zAcnMwLs6XQVEQzKo7TgMdtfD4hrEsvtyzaEpxREIMZraxdkAuHVNRB5QoIpt2evxAhFtonE57kxu7t54Y2h3cnKook9T5yT9uDwdf/9rN3HxJns00UFz/b/iVuUZ7lPTcCzjKCWraJHEgd2ZMTkbEQI0Do0rqStCASvgqGbAu19a/7zKeL9FCkWQKYECKDKpgKpQ4vbm+dXq8dXLR4+eXV40XXswm8s4EBESanExVSMzF7V69FXXmDi1USWfHh0N61Wpp2KGAAEAAElEQVTcm21zwdDeXvcM+Nk37qsN55fjJmMxP7p3iIzf+e63tuteBQINB8s9Bc9Dz+Rd06CDFiEiF0ByAwHomOYYinuP4ICBAUJ057C+hu0W33zrCz/75/7oF76wRMvb1WMyYO7atNi1QDIRIVAdwFVfOiISgd9xw73OQhkJZsLYNEpWynm+fNx5e3G5fXF7cXi0/9nP/eH1QEPMKQw8jhzakaLmYTabMRhgIQpW6oko2g8KDbzinbxS9A2rN8/BkHavAe8MKfXUSQAY6sEgMLJ5MS0pJCIuZcKWMxJ4AwqgFjEQ4RghwHK4zavLdx/cn/27/94f+947X/yVX/vaV3/j3WGzWR4o8JBHbeJMdFBRhCYyVX4vOKgUxRIjl9I3bSwlb9al67qDg73N7Wocx2fPbjbb4ZWXH907Pbi9ScNmO+Zt18GiZb5/9OTZhcnicH8+jn2XYt97iOSgqYlC0Pd9IAyBSimOliKXrFW5C+Zm2rVpGMvl9er44f6Hz14cp2bvJSoxFM0gpF5SCqLBARGVIykEnuQvqDtLN6mjQhdpBHczFVFAckCEENhE640PdzZD5pqn4pXySZGZxEwJE3EuOY/Yds26vyZKIFuA884fKoPALZXOYmCw/+I//Y+fvPfB7WfO7h023xjOWhERDh2//+LjkOaBmoI2WgGgotC0cUyg46qLYa3DvRgAxoBGquoKSB5UkAA5WM3/xpEUIUcVqaOYqQGAAOiugDSE1JUVu7VCM+lioaPb1jIGKBlZKDSlaWReKCgWMgVgN6CtdUbGNuRMwMiw5WiaORMVCi+eLHESXVI9I1TgtRNULUB13iPipFGhaY2jensB3KnC67I4Lf0ESI4gC2ihzp3MAIyZgQkYxAoiAoXdzTNpHRHY3c0r2AzNTRzQYWLETZse8O6Y7LtQhTtB2+4EN5MCYFzP4W0IZqJaKlYfsYL6AW2arwOEqfeECDXx3dzdBywA6MjgoS7UVT/gAigt5e72BvcOj50V0PK2dwQFryNfJHCpjnaKHJRAIFPbQOBBdJ5CqcRb9BipSRBDbfcgG/eazd3qEBIda96sYwwkxhSC2MhIpowxbfJqrzvoz29vP7p89fClq6sbimG5XEApWK174qquikWlKBIFMlLVpmFgQLLlvN2sb4/39y5ubrvF3vnlOgJ8+tV7Mq63o4w2H9E+89nPPn76wfn5eb8ZQHDWtYs9lmwuEhkDBwSoAk1VjTGMRWLTmVlfbhI3hBG9EDAFXq11tcHPvPnWn/u5P/z22/dyvtqsPg4QCJg5qhEjhoimI1e8DBoRglktDrBqVOr6CtMxy3fx88Egr598+1v/cHXVv/bqT+3P79/H+O57H6yv//6P/vQf7W1/3EDHDTNk96adjyMwzc3ETWOkSqf65Pzp+01IqlIPrhchTNZrBADCyt6gO8nsdLmiuqFjJIgcQUTKmFOcZR2wNpetOAphZGxEmWiFljikFGMZx8vh/dMHs3/j3/pDf/qf/4l/8o+//tu/9bV+tG4WzIq7RWLZ7SZMWC3R7qwCKXkpuWujOmzXNzG0+/v7fT9e932/2X74/jtHxw/3Dh9AaDdXpTHUMswafuXByYdPXqDp0dFC+nGeZhvfMpKrMVHbpu12aGKqQolcNDASRVWvY1Iiartk4E+enj08vX/1eNUg4rEW4i426DiOQwwtArhbtfsrIRkEgGATNshRjSh4DgRKDiFUt6QpTFgUrbcJuBo6IVFxV9XakFEdQyRAcycHEpHAs82wMVp36dHTx9/Nt//vmAzHlPDIaCD41H/6H/0fv/Kb39vsb+6VxcFsFkaLGEeVMJ/dH+cF2q2OLhZj0w8SaJ6iuw1tm/qrMR40pqbiGM0IyKNbQJWo6EyGrqitk0YYmzK6udXRMddDSPXA1CphJAGAbVo3SX0QRwh4Df6IlQQzeiE3J3H0YDMwHBfFqI++BAVjj4guSsgH2264Pdpfc+hSsyvAHcAnwp6Zg0z9IJ/0ajidfKcFvX5Z12XckdgQHRlrAwcRKRCzqCqgVX6GevYK63BC4B3hHaakTYfaTaxteEebDg8VyQjoTug1YLcC9ZypmVRP09IM05RiGhKSTX05BnRkLjqaK2GAuzqw+t/E6O5vrFMHRgBU/OQ0wQCM0AANzItBambBETsc9FYwx8orIiCiuiGIkzuauLtvc1FAZ1QEbri4YAzoQEwhAiYwdCvmcjcprW8pOjpOZFWUzMTR1AORglAER2sT9Rf9xTvPXtq7v7pdU+DlcuEyGDpxKMWymIipuUqVEsOo46JrmxQQjQKC+d7e3mY7NN3e0ydni3nzxqsnUlZD9iE3HprT05OL6xcff/yBFkjUcUv7+10/bHLO864LzGYSkKwalGIYC3RdM449B5p1JMU4tMS0XePNhb766ut/9d/953/sJx4M648vXnyzxVkIEZ2YIxI7WT3KIQVGUHBzq+ePQCATOjcAVOGQkXpFuTl6XAYul8/e/acncfHSZ3+ivf8qYE6X8PaXXn3/vW/9zq/99uff/tLh8entTc/WhkLKhZncBQkip1IysYUIqjvfxt2Wv5O3775nAMQVegeVPbq7IaqiCwGxNh53hZErBwghqg5dmBl4cWEIiHMwUciJ3WRGbGKbYkTcoEft82rzeLk4/Nf/zR/+mT/+1i/+4nd++7d+Tyzv7+3nPNT7tLptEYujR4710BxCMFFwn3VNydL3m9TNjiLd3t6OmT5+8mKb9dVXPzVr+eLZC7bUEXdJ3nj10fOLm48+Onvp0b2SCxE5a4rtpu/bLjVNc7tZu3vAYIZqoGruSOgByV0NYTGbg/Pt1cXB7OD5u5cP2qPFUbMd15ETh0QsZoCQiEghM6Aj2rRnOwOyW0RrY+pRFcyl4t6ZGQBNtFoTEBHdAMmBSbSMRRcNGla4fx2rQFGLkVVH184hpzg7e1/++v/1v56n25mUEpvF3uF333v37/7tD07efP18u7pP7YPDw3D+IQq5SpH+x9566w++914/rhtBtBYEbJCGI1OSghxnOoIMELiBQCUPDMzAMUAWYbDipYDNiiu50jAFTtTKvYpz3QzB0ciDIkuUVbM5wHYMcjXrO7iO29M2R0UwJPfkaEY5WDL3vt2OaeV4LyqWSAYVVBTIlbHkZh1UqwMTvC6vtvPy8CRHxUnzXVHAAIATv/aTV7lDwho4EcjAjM0RjIkoe3avNx44uJqaOAB0kdztDnsOu8IfSj0TeA08qNCPWiwhYtUNAYCCVSYx2PjP3IHuk2KZGd3VVGIMAFpyTikhpHqKwKkFDKCIyAgbqDOpquHZVV5857D1aew82RCYixsB327k6KWGUJFpHEdxCMjIBDrtiQwo4mIugJsxN90CXVS1DYTZMGEiaBkZvYAVN6mRwTW/cRdqpXWEh67oLSkblxy5bcW3WEpSfPrO+aP907wd1G0xX4KKuwqB51ytp0VUtM6cQERm+x26SRkWXVvGsjg4XK3XzOH8xeXh3vzR6byM60FgNVDbzo+ODl9cXT598oE7MHDbxaaJ6/WaiGbLzs0QKYSguTg5BVY3avtcFil0oEaaZok2eXv1vCyOX/6f/ts/88/91KfIry8/eoewmc/vq+YmBC3iru7CAevojnC66BiAERymngxOW7JULXql2COiITz/+Gy7OqfZS4t20Szur6W0y5ju3x8u+0cvvcFQPvjed/29r7/1+bf2ls3Qm8qslELsooNaCZHUklhEyJ8oeuDuesA7QPnOToNYv/zkZYj1LAGOAVNImGV0d8aIzu4IImvfpjYiuNrAEACJrCNvCbPoGGIg8JJHgtiGrpSAJmdPnxwcL//qv/Ojf/RPvPp3/pvf++pX3pnvpRDEvTCjqhO3blZMEY0wSRFCZA7unlJS92Fct9wcHZ3crrb9sL26eO46np48PH700tWL8+tNn1Bmczw6XIj6s/Ob0+Mlindte7O6jTEOZYwxpSZs+x4NY4xmNmbp2oRI4tpwRAxjX2az0Fu7HjeHR8uPv3v56ptH7XEzjCNFMlPaSajr8rZr8DqAIRGp1e8H8kgITAp1gxcAJCJnd3Iwosl9AtmhiHlDCFoTDaCGDpml0IltOJhsjbicHDy8eHY97964hu2yk/MV/9I/+Pbxo7cutsNe16zX1umaMPaULZfo7Te++42+5DfuL956+fXLC7ja9NzpxTs3qpi6VvPQm4cQyApbaCM5BnMXy6HjBj1nD5GpmLSw7rLfFsQEaLXWrPX0tNYItkbP24xii+1Mift21DHuIzOBk1QuWqacsXSldQ4wcIIZUEvg7FAMG0JFFILVTJhWQVXJd8pTAESqjZGKvL9bMXeXLe2g5/CDcmAfNSAygrvLVMIjFjX2iurdJZmSC6iqZv3+rTCNLg3cpZLGCMzBzU0B3NTdCRPs8juwihkQGNi97PowOOEr6tMS1SwRd8+56rvBzJHAtJhZhXbXOA92RgIFm/6yCe7r4EBQdufrMKWaGAC4gkUkMn12vXm43A+ZGmiHafUkcFaZTgB1Z04plfVAqUFmNKMQcilNJGogBAyEbHX9JiA0c9CdXxtrVhMiIRIhRzdxGdt2b122AXwG8w+/9fg07ZnBCLq/tzQrCk5Mlos61ya7G1dJOBHEGFVzG1Izi+iwXC43m01KzZOnz4+PDh6ezmXc5hw21rTLeQj05OmHLy5egBJh2ttvjWy1XbUxdbNkLkRsou7OMRi4qlBg1UUILiME1Nj2Z8+B48nP/dxP/Omf+3zLOtw816zdbE+5jNYHTjlvIyeiWEqpbD6atvM6cCDAeqCZCG6MZrvwqTuVLIAd7h2cnnzq6vbC6cYidjTT7cRoWe4nL7C0B9/7zjvf+covfupzr9x7/aW9Ni3mx11zMA5xLEXFgB1JQO/4Rd/XwkKVQO6MV4ToCGTovnNGTBcfu2MlnQTIWpwE6yzU3UNQ7nyJM2AoYqM5AjmTYS6YicBEUClRYvIsolRCAy6pa+bjerta3Ty8f/y/+Pf/5d/7rff+5t/4pRfPhr2DBkNhUqZQisWQREd3YGaeRlwFGSITQBi3Q5jRYn8eGh43tzfnlznL4cmDg3sHqwvWklabGyA5Pp6/uFi/uFgd7XdiNpvNxnFsY9qOQ4xpf39/e7OB6ShDY5YYQiAu5uhM3Ev2ppvpeDuU/ni+9/ibzx/9yEt7B4vLzWUbW4LkqGYSMAkJArBVUyQhgoEXJMyiBFTvTJhIumKFsAM2rYWPa/Ur1MAsBUJHpohTH8fQUcQIo0Mm1qxjsdLNtrOZeINk3Zd/6fdtsb/qB5wLMQ+Z08kcnlgxKm69jrP9BShv8s3Dg3TQLt5/8nGOuZtr0RKpMSw6ytHRCRxtFDxSHEpmagiiqjaAhGSJA2FpeHOgw5XNSjVA1xL+zufv0Uw43rbjNq6G2CaZKffJcpCQSULBg02IjjmCErcaHHFvOGjzSKVTNAIPHgwFXAx0ICTGYOx3JlW3qaMBQKk2D3fL9O41gCP/QNDKDswLE9a99sGtUmnRK2XL7uLnqz7YDUbpJ20ukiO7q5u5u+z4z0TkbrD7j4YC7uhGNTPKvIpqDO0TL2+yHgFAKajqMTLi5NBBpGEcmWsAKKqLuUYMRORgJqg7Jcy0W9QdDJvqkZ5ANAAO5mjBk5lkVXHb21tsV2vVKYbKtM6ncWdtnfKqRVXdIiIyZ3RsSZFjcCQXM3EzRQMEczSTmu+7y+bexV2BCiAGDz6UW3ZfpL0Pf//xTA+pC1e3N3v7CzBhN0ASUTRWh1JE1M3BiQEsMLZdQIcUGRy4Sat+CCE8e/bswenp8UHX9z1Ct8mwODoMiZ4++XDYbFV11nTL5TLn7XbYzufzhlHyGinZTqxmZszMsRGRFHOAfW/yMJSLj8KXvvSlv/SXf+bBa+3N+XubHGNMcZ9GW7tjw8HKOragpUejGLBJjRNYUdMpaqyu3kQVKDuxhgjMEOvkEgDqPLyddwD9yclsNaJiZhugOCpyR6bNoL48xsOT+az73NXF8PTsm/M5xoSA+sUfevvll968uuhzQY5JIVcRwW7fMJxApXXnr0RZ/ESfEqZITpjk3BNJhVFH65rWzEbftvMkkG+3q/FSxDah9YPlQRP3DBoBGC0TdLElKbnIlik1bVQtamJuMbSJIGHX327Wq5svfPH4f/uFf+Pn/5tv/MN/+A8dbLFsiowx4ZgH3qkVRB0RiWMNISLEdtHmPGgZZ20bce/29nazWoM9xZPDxf7h+gY0z5j6iHbvcHl7O1xeXB8fH4P7rO36vm9CVFEkb5om54zIwEGKuCtEBpGmCVCBbGhd065ut96Vw8X9F98+e/ntl+ZpUW8Qqt1XsyBTLEHNG4sAATEqIDOiIVb1HTIj1HO2ABKKuQMS1ahMJ6JiajXJBUhVeddG4wBSKEATwM2b0BzcXJzbgpPC3/+d755db5ddd6ljCjYOQ2FJe6/umfdGSKnt0ma7GbIgwLvPPsxjd5tXWWTII3JwKADg4v2wYh1G9zYFDAo6uJgwGIhFAjbGgDpKFxxyxVXUrgMioilVthXopkmbtr9pb54e6KuXB8FwXiDp0JMsV+18e4/LPpoigHKvztr0QYQUHZMDJsIRNCgaA4Ao98HGAruWiO9YLohYNJhNkphd5ga4u4dPGlbRHbGKJglNtMKS0FzMEJmRxHMth4kCIoqYuzpCyRmRiUiREAV3s28GcDcTBQZTrfNAoppiqrXVAAAC4KiAHCDgLr+shr9MocZg7jgMU91d12oz0wLMCGBSiqoW5hjC9Ht3q0g119RGkCJ65Q4DIITdUsJqNpJvNKPTPLUUjCKVvKl5BDYpWGsvyxERmLJa4IgKTb3y1NtIewgIruCDqjg5MoKbC/IUkgVqO10SuFogV4uOjFzmMX74jQ+phEWXnqwuTxbLeWhy2SIRikUMWyulFDEzJXFghhACsaFZS8kNY9NkFUzh7Oz83unJ/mwu4y3B8nbr+yeHTvmjDz/OveShdPODxTxtt7eqfnBwgjCA5YbRkO/efyJWdxcjQMozTNsXZ/1i/tK//T//l376jz3c3D69fOqpu4c0omUQStw4KJo31I6lNDHtzdo83mz7pwJ5Pp/v7e9v+7ZoqVcb81TsiBkjINUsm51BghDIJW8TcrGuS8fn59812cxnR9S1EWNGatp2e/s8e3n0xTewlO2z8N6z1e/91vNhjb/8i//gc1/41Z/7C3/s9OTB+fmKQ4v4yWnT5MfW2pv7hEMS7+JpKpjTteq+qlNkNA5NM7pKGZYHe9e3+ju//Xjom/MoKIcNxJYvD/Yev/n6wb2TV3ycaRnL6BwDplRKASsRE2sjTWO2dRQCbFsw6fqb0fj2L/3VH/qxn375v/x//qMP3n88W7RKQ5NmUjCEImJISISmkwKCkUbIMYZoUIYxNOnw+ODm6nZcDy/yx/cewf7p6fWFSa+IA3o5WHbMfHl+dXrv2ETqnTJqcbUQWYSyKiNRDK6Scw4hCNyA7FF00YxO+/P5ps8UpdP40beevfz2S4PeAkDRhtEJRiCslqSd+gUUFAhcsMYqIRoxmKGDqGrAOOlMaxyNKhMbQ5biznVSJSJNjExMiFqS49bhViWI5GbuSrrYX/ydL7/z0eXQLZrt4KNu99LyckBfn0ffHLaz57CmXrhoRt2KHIX0/tkFh73r1UVsZxDSkHNsPWDjIV2vb+8xx4zWa+xIEDA26DmoM0FWFYZC1s/MSRjwE7jdemMjOgoz0WYMGwLeRjXCbpiRHkZIo/thHwt0hRJL4RG2qHuIV/NzGp/cp4fBul7WkTKgEWrQ2OZlO2LYbm6JqAbITdM7AACXmt85tZhhQgU4hVF1Evj94MMJ0AwUdjkpiIxGbH0t253QKzPAHc0V4qCluKUQyKraAYFDqFhFIERUdUaajI6wU8UgI6LCbmZrVV5pEyTaTMWIiCi7RZhcsgIADMGUlMRBEN2QhiEThxCYGFgZAJyBiBCddkC0Wr4BAFXbN9XkLWcDVe1xsQY/nMeiq8JSwEYOAZQzGA6DegEACBHBeSziSs4B1+CBqY1dYFUC90r870wF3MTcLBGRCgAIoDMG9ygwYhjBG6dsZjMOl+9t6WJ2fDg7u7k+mS8jx82YA8eso4GrmbqjR0IsKIjQBEyMgJRCBPOu4aLFsXl2dn50uN90Cd0djzdjPz/dU/ZnH70YNtnM2mU3b2bj2DPRYsYu67ZtS/HiRsROaKYMzgZknBFD52XMH31Y/vgf/xP/+l/+mW52cfPifaZmNm8wXwEYpVgMyLetNwK8oXLv4GCz/uiXfvnX33vnxWrFUng2k5dfDn/kZ/65e8efvbq8odCCY70MWBvg4gZI1W9TaukMBspQgBDKjEJDBxslTp2hGJDjxhOUgPNOxxfvvPTqD//6r//+i2fDCCkcNOgHv/M7Nx+8/+Wf/dnX/+hP//HzbRhWYwAKiQrkohgcEiHVESkvRt0C9JFbNAbTmjQEQES7hHc3Igpu6FvycnDYrjYvfut3vr44fvW73/zdo9nBy4/2Bri3KUc3K/7m16+enfzq5z712XZxf+iDibamEUV4bygSE7oO4M5ISK7qyDly4zY7f//9Vx8c/of/m7/49/67r/3Cf/urHDG1uRZbGBQBEZOb1BgUI6CCRE4hqKqMOYS0t3ew2WyGnJ8+fTqO5WDvfg9JxttAWSUfLoNaenp2ef/+KcgmoFrAIpTzeLS/v15v1zkzM7tBdeJIQh/YIhgDQHbFlvq8WjRLHPXie+cP3lyMoApc3IwbBMAqlt9pEVxRoILTzMxIgEAxUCJi4pILKEQIUCMOILh7SLDKRNi0KGLaxhiUkJHVKIxF29GCN4IkaYTT5Wv/6Lfe+d7NRSRPwDnI/tFeNBQpYtSPG/MQxzJKGNX3w6z3G4nWUVuYxJlDVk4AFNEteJExprnjCoOwkRUFBqXRNHKwAhoAFMkJtbPb/ZtWD9nboKjWGBWEjBqq6CqNdLx6eDsbEHvX23vPj4v3Pa5bX5wdn3seFtukvGYMbU45jAPdzn3uZkorisnFGJgCewkPzw7S9iB8fH5DGHASXdZDZT18lmlx5x0Ot2amkH4SVjldxwATCRF2MNraDkMzbNHQdGo9TyW2U/BSwBSBSIP6FAwNHsnrHLXC3BGd61nB0137Bb8vzkEBrQ0BBGBAdDCzSpszGyeRft0YnOrO4q7m6oAixuQYhBjCXW+HEGvkKxgABCBzwUnpFXZ4D3MOVuR2GNVj4sAYJG9syK0mdy9q0+6ApOYZNAq6uxEAOQMmwoBTnWIW3MG8mKlDhBpqrRC5HVWQDAKBEWoMjvXJF11z83hz+fH5a/dfurnoF00bGGs/umgh4pJHB8yjoGFWg8CBMURCgBiDm8U2jQJIzfnzs5O92cneot/clpS2Q5kd7AWKH37w4bDemFkza5r5bLu6ionnXaeSmXkYhqZpTLR4HxGjzczHElRxbJrm4gnN5qf/wf/6T33pxx6urs9uL8qsXaqNmp1DI6ZsEaEnWmTe5BEePjj42j/9xV/4hd9WO3E8BKKDowSJv/rNzVe++Qv/6v/kT779xZ8+O78lHhiTytDNyabPpB75qjORq3qKwJ18zOv7p3urPhWVEOYiYzBbX390c/X89mbbj/bqm1/80tuf/tvf+Q2Se2gu2i+XS9PDv/5ffONXf/edf/Nf+YlHL/3k0xdjP5YmxrYahRQ1shciu02EAMlBjZKghTobmKJpJvuruQfwSItvfO1bzz4+v12vM1xdXVz6Lf3u7/z+6Z/7PKYnc6L2wXGfD5/fdme/9969e9/+0htfivFko7gti1kjjRUCU1KCuTu6j4iIPjMl8yGl2Wbd4/Dsz/6Ft37ohx/8l3/9n373O+8vD7LnQMgOo/kQsHFDpAJuoYmqbiYpRUcc+tGQ9veXm9vVattf2pW7nh4/6FeLzXrFhDmPy8XcfX119uL09NRVQjCATJRuNrfzxUJWVooCB9USMRpODaoqokPExBwaWt1uj0+WT58/b7tw/Km963EduMFdmSgizEwIqopA6JBNojMACQE6RKlpnBQCqBvqNPlAMtzd4wZaT9g1XRvIgYFx1vc9OLuwRF0eH//9L//jLd62bTteP3342mdWG7ilcbM6dyNx6yU71cLIVLVroxuoIscwqKqbKqUUiZSZy5CpCCm5e3Zn8xiIHKlY8QII5uDAZmxQCtoQR4BJ2H3HbsFaS/q2U/7Ui2Nwenb8nU072DxnusTrk+T+ZL9v+nU75G7bPbh6OOshhlmyppENGwA2WNhBgXhrEHmz3nuKTR9GQQKrbYmaYVujlgPEOqxC2U08a2+I7/aAKTQVJn4bAxAa7cJU1cnAjTiCA0MFL00kYCQOCoFQybDCUQkdCMGIy2Q4Bt7hbqqOeKdSqP9q0+IOkWuOtUP1BtZcdQjODpM7yT1VDxCCC017FRGZAiKrFgCoUApDQPA75RAAOAQzMtfvh+0BILApmdgIYBZi22S5NAQBIYuKYIxg4FaxvxO0VL3KWzEARARCFxcVc0OvSkcnUwdmdwxckAwUnLD46KpgyMBqniLJdbl+//alg3sXF8/bdDiLrF4cQVWBsKias4mDoROqehtCjMiIbRNcNMagMAI1j58+uXe0f9BGGHLkbtPr/PCAmB8/fjz0PYHP57N22d1s14t5CCGY5xAmkIuqgukyxb4USyv0ljEFaM4+Kj/64z/9V/7Kn2rnw9XZUwaadclsYGamaOgcjAFZ5gbDaHz/paPf+Mf/5Dd+2b70w395cbA8Ojwc1qsPP/z2d9/7RjPfj/i5n/+b/xR1WHaLg+UrIcbIh9vhtg3xbiAxnauwhi8rEDEyIop4itHMCI1Dm9eD3JTxunzw7jotjlZrOdrb+/yPvPTOO8+GbZSBiG5iA4eHr7z/7f5//9d+7U//hQ/+5J/5I8NwtF3HbGNDEplUlCmhNarrFJciZLRqwkxMdo323VAe0R0zc3Z/4+3PfuHHfvzJR+v/75d/cbV5/uqbDxyPn3y8OT5w5v5yM9zI2cHBy6L77374/scf//0vfOa11+6/zWnZ58JMYG1EIhZVdZ+Zg+MAnFNIovWW5efPnp3ea/79//Dn/u7P//4v/PyvNjObdd0w3rad9/2WsHWL7kMRIwrVFyYiTRtVdexXs9kCgIYy3N5capGj40cN7G03q4bdZTxadreAZ88uT+6dug+ITmYhhHW/XXSzla0Hk9A0ZSzuEIgR1d1CVRObB6TFYra+ubq/PL74aNPtdd1xHMuAEM08xmjiVQ1BiEw8jiMiC6I7kAFADcRCF6j3MRE6Vnoemqg7KnoxbYNzqD5oN3ABDUTM3LSwHUwVCunVmMOCy7p3uf4X/sWfuD2f/b/+u79lqRHVzI5NVMoOPGQnjgcHBx+fXXQpNk13dbNWdQBoWlYt7i0iI1F1OyLCLk0FCGKwPDFCgBjYQZRxu1R4DqQIbA6OFQuNbihBGkWfj3h0e3y5PwsFbmZ5HW8fXuwh5U27RtlGk6gzGSSnNWJRREUXdAQFyIYBsItWPPj5Mqe0DcuuRayRKLvM0rv5auUKUBUIuEHFMu8sS76bLSEiojqjK4FPcFSjCgdGN9IJGOloNv08u6iTA1X9iVd8q7oxpru5qLvDnfIMGoBJvV6ni7Vya1DcADkgsFVXEpiZBQgOAqRVu4JOgApgTV0KauEfK+eWEZGQ70a+u02ringaD2qo3++9ugE6GnPTeEDG1FSrXuTiIOBG6MBiiuZB0RED4EQLDwz1wjNXVSF1jeaZKZoFRDTMBOAWNBS3kSkgWSkaI4OhiSNiC+G9bz85bJdlzIFnzMWdHdxNVI2QpZgpihoiF5e2TUSArjG2fd8fLPcAYHQ8e3FxcrA4OoiepZTYCy0PToj46bMnt7e3TNDNZ13XXFydL5bLEJwZy6ipaUSEOaqWEIJsqJ3FXseYvF9Jf5X+yl/5V//En3nr+uKj2xuZdZ2DqOQmzksZjNbuiKYIRhhd9fTg+N1vfePLf/fr/8v/1X+cDja349q1ZeOjBwde2tuLoSzascx/+9dv/uSf+Ewu2zTbGy1DCqpSD1kEaIiEjIhmHogAoFqgs4wUQtM0uQyXN18Jdtnt4dl3++2QXvvcYSlnvuU33ny4f9j++q+8H+O+FpK+qF2f7B9ZWP7t/+r57/3af/vn/uLbb//4j643Tb8ScWOIxEE5p/YojxAadWlAPFBFRu/OpkQViVdrBkDc9E8/87kHMf2J//rn/15e2/GjQ5VNr14kjAKzJL56h5r5/vFnNpv01W/fnj97/7Nv7u8fvrruVUlApBi6xRgRuXfsvagqAxBRkmINN+PKh+3jP/+v/NhnP/fGf/af/cLl9UfzRRpG4Ni6ClFwj2S7Uy8ZoAUKjGimUmzvcGE3ebvJpr3B0+Oj+xSPt9fPEqcybJaLTmB4fvni3vFRCnPD0bREwFLG+Xxu/eDuiuSiHlHcyN2q+KC2+wNojuM4Hi723v36k7d+8kHbWRFljiJi5tVaqKqOysxoWMAdnKDK2tDA3AyrYI6qLA0JEIjMKlXVEAkoEDm4iVkAEMwptGNeR26Y5464OJy/uHp2ME/7h/s/80d/9Ff+7gctz877MwpkoKPrYm/RP3vWLhJxKyLuipwHKUPJhuCGHGBS4yAKBTEAQ3ZkBwMsaFQ7nwCIAmAIwqCG1HdgXKg0BgAo5KGiohxt5P2ApeFVtNm91XHqPex7VDIEA1xs92dDEI4a5Xz/Smz18s3C0PtmzGFopWMURkBTF1gnu+nsMHtoUuUqAznVuheAHIEIPmHln64HcKyL7Peb7ndTaXZ0rDafyYs0gfkJudZYtdaeuBwWgWrUPNrdwQAdg7RTmTvdGJP2sI6nzBUAd68JACCJAxFSBQ1D1c76NFklYAcn9ypUQwdD4FpS1U0CzYFrV8p8N9S1O31G9S1RwPrBOSIBOvv/j63/jLIsu84DwW3OOffe58JlpKnM8g5AFQpAFQASIAwBESJBECIJCRCtRFGGkmakkWkjiVotqldrZnrUvTTTmmGvaWl1j7y4mqToQZAERRYgCiAKtgqFAlBVmZU2IjLci2euOWfvPT/OfZEJtd6KlSby5Y337rt3n32+/RkTQyqI5tp4ch7NiDW20CZmVtCoknmN+TRliwQDQiZUhN6KvZ80GziF7I2c57gAAJ05B+gJtDMHBaNL1gnAuCqvv3CzgiEiLJbN5uZWTPNowoQiQkRdm0yp62KO8kI05yiwY4IuNlVVJFMQOJw2k2ptczQwaaLhIkk1HoOLe7duT0+OAcCVhR+Ew8P99fE4OJ8sSlTPIUah1aIupuiUDYfe7+4utzYf+Jv/4Ifuvb/bufFCCOtl6VWXAOJ86FJDTOx8086qMDZQ4xqbamNo//gXPv74E99zJIfdjYPKFWDWSn14dHLpkXPu0W55As998cp3f+hHKdTT+V6cz4flZmHrwKaryRQDr3Z4BuYBE7GmtCirIkUB4EG19rUvT2fz4+XS3z6gcjSoisntmwcXtkfLnW9iKu69d/3q9UUUMvHOQV3XoZQz58KyHv9vP/vVBx9+/nu//80PP/q6LlZ1Y0kWwO3xcndQbizrwaBYF9wjHQIY9RkMkDlnpkapZk+aCobxwe7egw9ufeQH3/drv/5JvCd2CUEL1SVqB9qcObt5PDt/cHLf+lql5Hfqo70vvfL6h68//uAT89kZ9JmOJgrLGBPhwJEDNTFFC94lAAWuUtKbN67c/9DGf/ff/fj/9r988rnPfXlzm0yTGJBERC8EOSMwX/8GoqaDqli2ulgsRsOJpiZ2sV4s923nzNb5wWStnc3QV6IynoRu2h0cH21vniMvpVlMksBS6sZVeTydUXCcPAGCIFDv2QAGgJjarqgG88UCpJuUk6tfvfW6t99fF03qIGkyIO3LDYsqs0sglG3dycyA1cw0sq28Zk1NbaWvdM6llMeTjGaWQBGYiQjNKAo2TQw4llYH64O47Fjk/Mbw0vbFwuneretoIMZtbLiEnb3l0a22KCux+mQxje1x4bnr0pHOOxVmUAWD1nnK9JCYQBXAnEonJi4gAqkmIC+ZfmhGkJxao1oXbR3qQTPsLxBDQ2CFZOitUXMnpTjASV1WSWJHQhtFLFCL+3fOhe4kSNGVbednoV6qJkQ8GS2bYu7lDEBInFycKw8YaSg0jMH5ImBGfcwAwCHmIsdEK5YLrXAFA0NcDTZz49z3AajeG/RMSkbLmIYAoVkFamiABIZJ76R63NGRnxIQAQGKOhff/KNNKSdbsua2iDNHsHcIAILAiNyPAZQAgNEQOQoCEKIDYlBbEZRdtAhAPjs/Y05Jp9xbnL4MXlXYvOnL7BozspzbAPmi04Jh1kZmDgXUSzEzZaRWyIAVk6kAdaAGpIxZntSPq5E1q8OyF5IEFTNLSAbmyKg3zc+bEmBGiE1tIMNyeHz9oDu2tZFfNPOiqlJsEBy4pIkUUEVEVJKZoZmSo6LwhWM0c8xFqAyB2e/e3iOC7c11SLFL2IoPwxEg7R/sncyWROQLNxwP5ifTycZa4VhT8hxijOhYVTC7/RASUVGGmNrrV6ff9vb3/IW/+MFk1/dvyXC0GWNUAeag1ih25JnAqbhqsGaJjOtW3YVzxW98/Jd3bqYn3rYRKmraIN6izAajtdetPX395S+e7Nx0EAs788lPfOLcRXjg3vtStNlyanD7zPnHpNcS5UKaP24UaxEw73hUQBWixO31dYSt6cn05s5sPNkej2Frg+sZvXZ1F9OFg/2b2+fXbu2fNEvsuoWDygHUqQGtiiGPR5tff235H3/6U69/w5c/+L1vefCRRyfjwWwaP/1b/+b93/Xt95x9+vb+zBdDUVnZpNDpKB4AyI8EozrwWKU23Lh9eP7+ydu/403Pf+MbMdpxO9saozWlFWS+nQxk77A4lmh26MsFUPvsc9/YvX377U8/Le29poIESBZCaVKikWHnycfYILIYEM8RcVSuHe/NQ6V/6a991+/+5gP//J99YjjRYtghVtKiZKiX+qlA3o+2besCaYNtY2vr4+n0qOsSQLvXXT5zz/2j9a2TwwPADrSdjMJ8Fvd298+cGTry5jBJRKS2Xg4H5Vw6MtCEgMrYO55mhITJCbTVuGoXsSx8WvjrLx1cesPWEpNnp0gxipkyezPrkrL1YaMECAbJFMAcoWbLX+uTIQyzio00Z2MjglF2awEgM0VC8h0CM3AnM5Whs/jMk/e9/oGzD9+3vX/r+PqtK4JyMlvASEe+qJdGwJAigBp1585feO3aflFWXddl83E1YKfYt6oYgCGT+jypAKCRqjeK1pmSEogRADtQMpAQF9ViY7kB5jNSktMDWUkpuogCJSO9NpqlVtdml8apcErLIIvyWKCliEohsa+H3cZsWrV0UjhBZQMjrwSCgmyDdnTf7r1+Ka5g3/Njeq+MDMEgUw8d0grrACAwNAuw+gfETAPTXLIzkwR7IrwyAqIlyyMWAFAjBgAyQiPwjNqbkSmsQA+jTgtCMlQzISSj3GIggxE5M+FsBIqQ04sS+pVvuJHjrAInoogRcuQuosGpUCsMmUUs55UCGjEwc9bcnu5U+oYwx+ygERj2wYL950Hg2LT03pp5CMEHOlFB1QQmoKtzwykLsxCcYmuoCqxAgMqo5sxMDHgVO4aIqrLSqytpAjQzQiZFBYuVL3Sejl492lq/UDcnYFSGkKTxHFLsRJyqiSgaaBLng4BxIO8J1UjNIasZO7d/cGjg7tkozBbofdew84Pgw8HB/mJWJ9PhoBpV5XI+H1dV8Jw16CqR0ADUe4yxA2JCZu/nx/FkVv3oj//I937//ce7r2D0VWWCzvmOMUjsvC/ZUdNGH5hQomh27J4Ug2s3vvZ7z351vPHAlVeff+Dx7fNn14/2b57fPHP58tc+85nfnh40RzuDD3/kY3/ur51/7fK1G9deIgPAZudgvn3+Das9ZU//zxcYIlEwSRoVHJWSxDk2Sl1cXLjv0suXb4LW587g1hnX1Pvztrl9fPv1jz905uGHvvTCq1tn1h0tpPHTgy61KQzXUO1ot5tsbDzy+gfe+LEfLGiP3e7+/uXPfOYrGvX2tcn/8DPPfsd37v/gRz92++CAwyg3RQB9RljO6FNhMSaOEQ58CKxBGxqW1R95+m3H9fTlm7eu777mufEBGFnbL53fOlQ+u+gakGXbTQ0uvvCNvSRfefpNRQgXzKo2HvnQKqiloJpQyXufUmInqsm5SpIMK9+h7uzcfNcHLl16+If+n//DL5wc0/oZjdSUrmy6lplPLfw4P0yRudPUtnFjY206ncUYLcHh/s72mYtUVt1SqmJA0q6N/NHxYlm3g6KkEErjpmnYs5BV7LvYqiHk/kaEON9Q6pnVQGIqy7CYnwzK8eGNg/GgGtwTBEBUGckAJYqaARMrRTIFDAKglrC3fxK1fF8ycPa6sMzAM+sZfah5+gagAuZNxJaddN63PkDbzR9+cP2p199z6fxGVZ757U98erK9tvvVr3MZIrfdsu1CPR6O91p0VKTY1XUbfGmoQJw0J1WALwgAxJTYeyUyIQBHnERNzBQIuRUlyMRu1DzvN1YX59VU6LyLBSEBgCAEAxbGPkaKl2W3dPUGQ3XMjSNhaQJcPX9r+xjD4TmUsmw3dzd3NwZ7o2bjsAgsVqRGnFL0pGVHjaRidFJBR86BQBZBImbvUjQ0NMKqn1iiZagbABhZKPXovFru3oGNAJx6AQGn2aE56w8Q2XkCUcmWW6RgRIJo1JEQIQIjKFmfV6ZJAiMzmhFo3uaSATJxdFF7Tqj1pAQjAKBo+WMWNGaSbFBGNCIBY0QPoDkdDYwAmIyTKqED1mzcw+SjCLhklhmQsLJCM0Mw77LPMBOu6HYAbK2ksgiz2PhiHdScoTSdLJvkhiYmqQ+iICQEQNEmKxstKy9ZAFXUIFvbJ0QgciBGpGaCFtgQgQE4CXewLL0PUHz9pcvni3va7qiNtjZYQ1DvixRr65KRExFElKjOBUB0jMikmgofSAwAvPcni+Vi2Zw/e7HESM4dLhvvx87B7PBms1iokKt8WZbLxaIsfOFd23YuMACgGLGBdD1JgVkNjw6nhOf+i7/9g088sX14cxfMs1fmwrRRBYGmKqrYSQItCzJNlsj5aIKTavvG9U9fu3781rd92ICvv3b50//7L6+dDa3NLlw8/9JXXj26Xcxqets7Xv+93//I5auvvOXt5++/WN587ZtLnR8fHL3ucda+b1+N3DGzZbSL5ojNNFrjghM178Ji3p2799Izz8irX/v6bO92PYXJRnnt5sHxkatgun3f4fbm+mLahhBaq6SoH3/qaXfyjcfecPHRxy+ZSVn5waD+1V/51LVrR8tmrV6mtnPDwfZ7vvcDTz19jkeFW26KdpAHaxlzQwAGIiK3JDPkAlJIST0OnIXl/FbcGF44s4HdiLtNIkq6aBtfNzbBvaPFTmuj1jMST8Zd4uG1G3Lrxq8+87Y3PfTg43E+VC3NauQlU4FJwJgYkDxYMKidbyF5Z8iMh7d3Hrj//N/7+3/uZ/9f/+769etnzg+bpnMcMjJD6JJ2Zuacs7YdVuy8zuu0bOrhuFzO23ahMpua8pmzFxkctDMyZLStzeGt3RPYoGFVQKvM3GhKaoGYvBMREFAFSYpAub6LdUwlmJjG4AdJuq3J+t4r03u2JuxDykYkaozkPLcpKhOJoqEgIqtPKKaJerm79VS9PAU0RGRPRmYAklUuDoHYwDxx7DgJY8AkPrXy4MPnz25undseP/vpb966dXwSOz8sjqbzjtKgCMwtpMIEY8uEQ1NS1LZbtB0JADs0Q2bqs3pAohmiOTDpoiSkgECcDBmCQzbsIopgFEAxSkCNawSUDVfxbpCh044ipDQWagZc1UO0mDgerh9uzHxIZ9qgbFh2HQJdPFi/PRze2JpdWI5bJ01oG+4EySHkeIj54IQGB9g5BwXRKkKTrA9cBgDKFMNsiAicd1dmVgoBgBGCI0R3Z+DJvUWwmSkS8GrQKi2i97m0ARoKOCUSNM4oP4L21gOEQAGyMpPIiHrJJxmieikQBTJBwlx2zzYzKkrIVl0ZvWY1EkVzUGS8CIAMCoRT/j04QjQx7IMU1cQ7VPHQmyTcMV0wM2MlcCYZ93cZ72Akcq4qnIhU5RgUBGnpl0gT1KSGQETmKKmBdSkaQGhRCmdsnQgTSjJPrhVRjGjEgJoEkVVyKqehausjJkbtnPNV4W59eXdNJzJIi8N4Zmsje+ODgqlXdCklEzQBM44kYDYIpWkqAmuy4IORnTTt3sH8nnObPiyJiqNFh8XADYbTg8OT42VSDYNibX28XEyLCryHpG1wARTMxHcEBURKgM4Je7TbB0ebm2/66b/z4arEw1vXQ0HsQtM0wYGkDkgc+2QdV6igAuLYiwqajSeja9e+9NnPXZmMH906M3zk0fvpHW9ocPDKSy/94af/4JvPv7y+vr62Rc3eHkOYHS/T3E662XA0GG5sS9p6/NzrYjdwBQPlHb8iQF6XFSgYgQATSS9mNrTEqKWk4XD45FveOL99/cqVV+vWL5uRxvK5L+zeszO4dC+/7oGt51+4NV8un3zd2vd8aOP5Tw0//anPXd/Zm2zyK69c3rneMW933YU2Lc5snvFe23p++dXPOj+aHm5sb29MJuthMqZQgjkWj2bLdnmyaAsqmMuua70jAp+kcWXheG33xheqC68brdlD5tQS0Nqrl3fvuffSg/eUe1+//NwXd5a83iWlsyVPQE6kWFz4/V95fu8tr7zvvd9bn4RWxZgUPBQiMXmkpEosQE5TUCAPTaeDQTG+fXRzNB799N//8f/P//QLX/nKN7bPoBiJJURiQ8deAFWMiqJuY2A39tQmkbodDoeiixTd/PioJNrcPDO3KnXoMGLXnD072dndx61zRVGwxtDFABS7VFblYl5z8DGpEjlkAEhd8o4NBAkICb22XRKDYuj2nj84/6b7E6ZAyZJ25EE7p6CWsIfQVVRXsDFIbgNXvh5AJIYqKoqKTIJBHXtURImpKkEtkTLbEmjd1AfT45Pmvnesv/iVva9evnLj1pESSxc7aSfIRNR07XoFw27Y4lTb5drm5rSuGRwX1EoCZLDoGBuCNcWUpGRfNLSwOSI5RFYfISanDl1MwMYWJYSQKJJJMqgH1Po6dBNvsiArEoKttzYrY7EIXZeARLbUl52fVnQysfHcEOJ4Nul00XAiC2xJ3PrcHY7qvUHivfXD9frcsB6KKyKCjzBbmx5u3rx445zzrup3uNZ3rJmky8iITCvEps+5VqFilYm6epgZAdztnLRig+W2vsoHt94mPdsDGfcmwwigBr3frwGtEnDIzFxfYUXBmDL+9i0W84B6SroHIO4rvmhOeMFTJk8ep5qZMRGa5XBGydLm3nzq1GVBAWBF6MwrEiCQ3oWlMlqnxIE70aIq8wuSprMkHCYptmggWqsBuqARmX2yeDpIyBv3HGBKyKe2sf1IECCTUEPnCSPAsHBtu9s2R93WZHB0PB2NRnkgxphpWCAiMSkidjGyd+Q4BK8SC+dSE6uqUlQAd3z79vntjcJ7BFgm9d6jK2bH0/l8bgihCsPhcDmdlpUvC1aJaGQgaMZEMah33roSGKgqr147essb3/83/tb3Hd9+ZVlTEQaqSUSKwifpgi+IVgRPNVBzPqQuFt4TltPpa7//ya8+/sgH53NBldlsWlZcjNu3v+P1916Y/NovfaKNSuRMoGlOkNkXBTpWgK3tbSLOWH/U1M/h4c4FSQDEedqWFW2ASKqG6DrVtY1wcFiH7fUL1Vlp4etfuzE/rqHYurwzO+yOgs3aadxe27z92tH/72d/21zo0vjVr80xUJPWjHA+j5MhOz+qG3VVLEd4bU9e+OpN6V4ajfzasBoM3XB9tn12raq2traKRx9+7Pzm6w6WR0zmnBOJjj0Dz+fzhx9++IVf+/Ta+MWCJ+VgY7E8KYpw8f5t54rpnCcPX3rXubWXXjq6sdMtd4+30+bR9DC6sDG49PJzh0e7/+57vv+7nZ5vms5THVPhuQRNZGpaIQTRpfdtTCMu2pSWAxrZAhb+lb/xX//gP/sXv/W7v/H5c2fJugTcJhKEkjBBFwVCH1JKVJa+7WJXL8eT4Xy27Np6tpgr8+bm5nQaU0rsg6Z4ZnNrsZgXfoyICB4AmKHrutFotFjUqsm5IJJMzXsvoCrqnEsiAOaCzxvNNtLeK/v3PnF21nRUsBeJiRWSy6S0nFVskLOVc01R1cyHNsxe4ojUUz5UVQBRc3kBBI6CxCgKiKiq3vF0vjia189/7SUOsOym5y49dHzlWkxkhuSsi0Yjl9olbTh1cHQ0jSmZZ1PhTLbNJHpTEFWwZUqt2IBNEhiX0RZE4AElNiFgm4w9mCmxUzAlvxg1Lc8mbhsUB+qSdaIHRYkxDYaSEre+5c29dTFqR22HR43jMo2HzWDcFpWOQaHROJnBYn00HR4rTrSYtf54uDwLah7FKJjnZATIblCtwWrEiXcTso17UmD2QDgVLq2iVlcR0qc1nlcQKNwxxAEF4lO2DfWT7t4ieEUmUTjlsCudTlMBelZiX8czNwIVgXuiDKiZILICmd2p8WIGqLT6hqL6vAL1nJyc3gMAkFaD4ryVxjtv5rRW9Jz6XO7zspGlUqA0GpVtE6vNoaESQdd1IErKYJjFoQCW+VIAIGJJTBWIMqcIAICZ8dTaN7Ok4dSEh4OpWgUcB1pcfuHq5mRY1w0RDcqq7WoAUNOuS2CoqsBOVF1VdClWzqumQIQGZaiY0cDt3DoYlNXQO0YA9RFi8GW9bA/291WVGdfW1pqmKQI7Rk2ipp78KjUFOksQgYiZq+s3Dt/5zvf/5b/8PdP9lwEASRETISA6ACA2RSEMZh2YsDFTAUCFIwA5szH4V//2c+PRQ6JQjJaTzTExOgdt29bT2/fce/7h11361LOf3aZLZP4Njz3Rxg6IFLCNnfceEVUtinBWoAPlT2S1NAKAGJopENHpms6IgK6qqq0tZ2bbG2vXr94Mrnjzm56851FeNNPDg5OD3aUSH57MWgFfVD7Q1tpkb/ewnlrpNgobDifh5u7VEHR94tJMy5Hcd3Y4q8p6uWZQNtq183R8PHjt1S7q4XR6cPHCF//Wf/kXynLStDGnancSg6M2tVtnth688P5Xv/579z50HLkdDs7FWBIOnbMuVcs6eExPvWH0hjfOYntwcGO23KqOphLb2fbm1tHO4pd+/te//098YG34wOJY0XeoJQB7zwocpfXsVCrgBAykhEqAIFLv7Lz0oz/83o3x8Ff+7WfXt8x4aTAEUOcQ1SmxmSETIUVNwbsuxdQsRqPxsXSdpHhyVA0G48lkdnIsgqWnZNIynJycrK+vE3kiUBGHGFNbVcV8LpmUTMSqKnmGp5KtNB07EU1mZdhY3t6v9wpe8x0IGZRcdIikCS1b6fV7aehJMsDZWbofCGeLfcvGUIqAqEp9m6QKnp1KyokZKbWDwcQPJ5/8/ecW0ty6fZCAXn7lSh27alQJxC4hByaAQMW0aztjUay7tvJlPqTGlNDcAL1nRo+oHlQtqQIF6dKcFBQGnbTI5ayLxBDYSVRIxt6BpLpwXUh6kpK5EoIxIiSIZDTvhEzWl2snArfWT9ZVeenM2IfGr9U2ii1ijSAh+Yf3LhCsH42/CNguybd+YeHEdMymoqhOQdmBd6PhYFXKbEU8yFU7lza7q5r3Au9++NlX9hU73lYsGvqWEpllhKvFlld9MWIfVkanxT3TMU9dHmVV5vphQMpVku4c2ATyfKB37KOMY61KMJvZaUp9X+cxM3AhF4XTCwdAT1/1yimKVisQgOgqAF4VwWUUIPLauGrbdq0sRVtgEhFAFm2IUaKxBUXoohBR1NbQ5bWlV94CGSgiARiZYc8p7bcjiKisyRTJDwfw6ud2h8Wos65t3fraSEQy1JhiDoXuR8FJkve+LIOqloVnAETLlX338IAcbp1ZJ40AlAz8aNTU3Xw+tyTsaX1zrW1rBihKrykCEaETyanWoAqeGBEdu1s3bn3vH/3In/nz37F36wWWylzpnEqaO+/BWBSDD6lrLcfTrfRiYJQgTYbV7z37Gzev6YWLfrq8GuXooUfeJKIqUhRlTHB82Gysb62vr8/n9etef/+le9e6ps2kvaRkZiklRPTOmSqgKfamFr1RVAa7NadXE6Jhpr4ikImQ29rsbr32h6nFt7zlmQuXtj/77Gd2b/irrx4spxIGY1eVEbRygBjRmuli/sYn33r/PY8/cP/4zFYI1ebzL77y8Y9/+ZWvf2MwQLbx7PhkbWNtekJ13J0MJtoRUiKsSl+uX7gUm+nhgbv34TKmCKDOua7VqGZmR0dH73zPm278672bV1947Ak3XywH5ThA1Ogszay0Wjrvh9B654tLjylis38LvvTcwobF2tZgdkS//nP//oM/+M7R5qOzaXBlNIMugeGSKJgMEKXy2jTILiQUM1AZDx1Pb9/4yA+87ezG1v/3n/z8cLDmODnHdb0gGhBASqnwQUxExHsaDorFvO5is76+fnBwwAi7u7vnz58vq+FiPi+I27oZDavZfHl8dLK9fW65mFFmmMfIHkejwfHxyaAoJSYUFjXnXCcpsAMAVXXOQUpmNiyqWy8f3PvMfRFrIEq6RKyI8pxPM+oGq6gm6CWOvaMJQ7+JN1MBUzaCFe3aUFXNQQboiJwlCWWxf7i4cetmMfAny7ZutDMfBoNZswyFM8NsozAOxX7dNZ16V7ADBQvOedGURBGMgFFMwEidoZADWk/aqFCwJRVYizFHS5CT0r1TQrFkZNIYTsvpPTgHmkRYioFjlg4Qc6C07m1MF+7GoHXKEwAok28D7K516biliB5YCYdtcX5WzIdjtqZzYhAZnCIDdGhgJK6FQr0bDcKqWKue9qqYSfe5ttppGwtGhH3KDGQf9xWCAZBWhZLursJGAgCZMomrT2WFb6zkUiuqOxoBxjwiyyvz6piAPWx+ipwrWG+wppkTCT12ZAiKaunuMB1dXQGnvAroYRyAPFVfdfo9R2hlBkUGYmY5/C9ZwhyPCeobXBuWGrX0pUhCxM4koRFI3+5jfteIQABOwZJlpg0QIBKhmiGSKRBQ5qKaWU8CArAOaRCK5cmtdrHXnb+nOp5pURQAkE0uJZmoAhIytU3jiTEBsnn2gOqJzIyZuAy3b09jZ2fPTERa56qk6IrQRjk5mS8XixDCcFKl2EqM1WCIqOSYiAh6C2UjU1UvJQZ35eqNH/7Yn/ron3zzzrVvADguwAglgeOxSkKK3rMIMIcEERlIe7KEgKmmajD5vd97fnPjqfsevPD8C88NByWCBVYUH1NrprHDJ974yJe/+Nw02ZNvvL8IipaDlaHwPlM7KF+S+RrrpQmsOXYM0JQBIIdW9xcbmaigxOPF9S988Wuzm/XXvnKj2rp69frRwxfffrSY7++FjYkfjMCV7WIZ58dJO0hYTPdv/5kfGn/Hu0cvv1yfzE6gfeXNT51781ve99KLD33q91/96pdvHU3TA4/Z3/2Zn9i9qjtHUwVpE7gQR2W1sTHeOsuzxXI+nyOWaqZqzjkkI/KitmhufPCPveuXfnHx0pev3//I9Hg6q8LIOe9x0mry4aKa4xCXHWlzEwXOnm8uPeg+/eyt7e2twsfZweBf/NPf+e4P1w8/8vT+/oJ9SRwAWLQx6ggHqU1F8EnFE4sRGndJEd2Na1fe/u6zxfCH/+nPfoLZTA+c82bOLHrvk0YCdI5EIjOPR+XRvFPCzfWNg4MDLujWzZsXL14cDMfdbFoUpUg3GY/3D6cnJyeDqugaMYnB+S7GoqBhWegqH4IyedoMXVblGyI64jbOQih0WexdPjzz8MZcF4bqoBDIieWIYGqY7wxDIDBVMSBiMjBJAnkHfJc3yaqLMgA0EUJLKZmoI2Z0dQsHh9odzNEZojVtdzKLYTg0axSSJTMyZx2DT9Km2AzLohUxIBN1jhUVSZREWqMCG4mteGtbLhqj4HSQJAUu0TrvjESIKRkncwAWOHh2+8M2sQQtI3ScCkWHgAr1wKj1Xc31HNKsXA6Rz55MitbPh3I4aQe12MIrOoCklgytc4mEAVoUD2kCVJtVyWnLTdE6NucGlUfs4zhWXEDOLmCn3e4pMx0A0KRHb/qIj764o4VVI7wC03v7vPYOLHNqxbWyBMmNVi62mY+IePdxcjMrZobO7kSk3oF9+qX8tHPPaSNKdrf3Wt8T59+0L+7UM+77d3HKp7NvZUNqHxWCACAQkHoTwsQ6HhcoOBoMTRMiptQpCHOR2pYcxNgkcMwcExBVIrO8ucmVnVdHBlQ0WLEtxcAAs1kaOUou4isvHN6zvX0yXTgMa4OijksiSikvpSiSMumIiEJwzjNoqooixuicK8rBvGlOFstzZ7a8QzRNahxKIzvZP1os6qSyNhgTaNssJ5MJaN8axyhElHOsSNFxiTC8/NrlH/mxn/zIn3j6xmtf91wSOQgEKSEpcwABM0Ygs5a9aspM2TxYB41pPBldfvW1p9/87um8ffHFl6bT2Xve/UzqlkVJhl47YALn5MrL31wupm94/TMqOB6eadNiBZr16UKmmlJyzmU1W+7vKAOGKxkaEaeU+g4E0EDOnn/wN//587/8y1cffOj1jz/2zFNvfnyyjg89umnJnv3UV3//d1946YWdYWXDgShQSgHRNibnfuZv/dt3vPu+j/7Yd9xz4d758b17t2YA7aMPn3394xf39xa//1vXPv6rf/DXn/+7f/vv/8B3vfvp3YNOwCN4S4uUjpa3i1X+ABJxvkjM1EwBOMY4GMw/+rE/9qu/8B8/97tXL9y72Dhz29RVw9vkq0JQhZNqoKrCYWe6e7vbvqeajGxxUpzQtCw2gnvg13/huQ999OixRz64s9dwSIQBYUAcASKEIklkUzBAgsiikJC5CGu3rh0885aza//FR//7//5fuZLLEmPTkifvXJ0Sub4F67rOez8aFPNZPRiMttY3ZtMTRNzf3z9/4QIOhrFbgggYrq+Nb+8fBr9dFBV0FlNyjtu2rQbVctGs0hFIRJxnEzVTZmeizBw81E0aDAaHNw6r0cBvsUChGtUwW/gb9apCy5RqPaXw9W0dQG8nlW9yA0BVRnBEROAdJUFVzXpmRJuMN3b36/HmYHawN6zKxaIlxyl1qsn7INospTFl7xyaA0wA4BELcnOp2QEoEoH3aA16QhEamj218cab+4vb/vIy1tIFgiYVWDrwVC27Ogkg82S9WMxraU29Ww6kmCbCNOCi1aU6Q+MOA8dAOlxOUjc93ppunVuu+chO/SDKIEqhUck584rso1TN+my4KyCNp9bXBJ3GyWK4XLrFurKhd0Wo7C5LdADqjXhOu+q7EHYA0JXZKa4ePe6Jd+Cx1XhQAUC1Z9Se1lkzY8Acp5eB7lOOimneZN9pwwHAzOVFH/oIU+nDcQHBHFLKwti8IyczRVAQLrxZn4N69xsxJUNVMlRBA0YEI8W7TgAoYg7y4gzjMGB/nDyoNyHAjqWoCABG1RCTOnQaE+dYVlUgFMvAUW7e+5PpGU0MEVTVRMXAr+QFaH2Xi0gISMbjQfnN514bupGQdjWsb7hoXT5OSgmZRDWJkRkyJZWiKlQTM0lKRQiAaAa3dg42RiMPOnAVu7DsEgd/dHQUl41qGq+NXOHq2fTM+pqqKhkaETlGBLIs9XUuEPrXrl3+sR/+iR/842+6fuVrlR8zAbsU28RMIZBKTcyIhYoye7TOsUdDNAMkAHCOSOyFLz0v0Q3K6uSk/dD3fPd4KGAiGBUOEavUSeng5tXb2vn7771n48w4FNTOhaiPQsTMzkZkOt1OZsn5ap3P1kCAWU9mYD0SSO5wevyu97/t3d/1rrWNMfEyOL+c1Yf7t0jlPe++/53vev2LXz743d/8wjde+iYP2lA1A5w0Oj1376Of+8zyK1/4+W9/17l3vPuph1/3COL4eB8X8/3JZPLRn3jyQx978td++aWf/umff/93fvljP/yR6WFnRi50aENTBGoRWEQNjYhNjRGVCIwQ1mfH86K8+SM/8Z0vffX65557/iuf30N/MDqTCl5sri3Xz1Rc2Xw6paRHR9O4PHPt2swV4+GGzI7uub1zvHF2Gmj7l/7lC+//kL3jPX90d5c1RbPEWHZxaUUMVKQ2MkO2wXaOzUQSjMbVjet79z98/u/8zI/8g//2n6CFqtQYW7FIp1goEynFGH1ROk91XQ+Hw7Is62YRm8Xh0f7axpa2bfCDul4A2/rG+OjoYHvrbFEUIgKARNh1XVEUdV1DHiz1PIKscDZDEFPCACAxtmuDycHVo3tGFzpKSTp2QVek51MJDgAYCiEDoGgObiUFsL4FJABgJRTL7t+rThLFwBdB5jMg61JUo0W9NMWtjbNHyz1KjfOGaqaefLdMqSodLEkS+YLjIo6LMBwMDuraEFDMEbCnpMBJmrkNi+YDz7zh5muv/8QrP7d9YeDo0QrxyuH1eftaI1fOjR4+O3rd9Hi+d+vq5MzRsN2oti61JyizAOISOtEOidjCkrqN1m/Wo4ONQUPITUneGbMpr80mFE3NoyVFbD1MGn/u8CEJrcCBBweGBAOj2FapwWVlFRk7cj5vnI1WPreKACv+yh3soq96iVxvV209em6ZLAmnHbdmZVO+Spw6OrWj6bnegIiJEhiS4uo+zQ04KkXE/odqHy1nAAjkVmNIMkholGnsBj2nOJtpM6CiGrrc2Oamj4xklaSM7AxVSVCBDQgYkAWAcYXxUS9PzfiUM8foMnsWXe+JxogJW/SkqoUPJhENVCIRZQOvJAAWEFhUkUShTqkTEVQ1VUZDJHbOEZrmODdF5FXsECIil+7ooJntp/Nn1vcOTzYmYwaqbc7GkvozH2Nk8vlEYSAhJUemys4Fdgp4+/bt4IfjUVVAtC51icpBmM6ms9kCYqrKYjgeL2ZHa6MBGqQuchH6xFLM21r13ovA3s7OD/3JH/noR99y87VvrhcDgWiAjjziQpDzzgkxOYeqisqgJbuEQKKMhCnFtfHoxpVrh7ePp7er8Ya9731PjieaYsuMSSuhLkC2SZm/9NKXHn30MfIn9z/wxPH8lqfCekv9nO0lAODutAtGcIq/5U2YgoEJMrMYIKIAMFHbzYfrFVhny70YaY5CnpwWGOT27aPgjp56Zu1Nb/vAK998++/+5le+/sKVm4uDquKyPL54/1rTPPqpZ29++lO/cvH8mXe889Kb3/66+x64bxHbnf1WFf7kjzzxJ37wnf/ul37vX/6L3/7wh78jlJOuK5CAOLKZADsXorSYiRamaAiEItEHlRRu3nrl3kdGD77+vbd25i+9eP2FF6+2hwfzcPj55fWz62ei0OXri3c89fYLF2R6JNf2dnde2fuzf/q7zm1t/ZOf/XTYmG26h//dv3rplVenH/jwtw3LDe2GXduVPkRC04iOBQuxRC6SmEUHLnapDaO1nYODixerv/t3fur/8X//+WVzUgVWsxBC09XOORExBGaO0g0GVdPJYrGYDAdg0sRuPj8x589unVkcHDOzWgyFi7HYPzg6d3YzhNB2nZnlZIcQQtdEBWMiEQEwRJdUnHOxjY4HRWjbOoXKQY23XztZuzSIRCZmALziHhhCthtARJWsXWVEFFVDZGbBU50KkhEZmpmASGJg7FIUQHTctvVgPOyk8zx2fkFYizYiVlBgNtXOTNWRL3066dpGFCGKBe9ZQcyI2BEyEnA/3/Eb5de+efIfPvPJ73vPH/2PL65/5JmPfvGLryJ+84+86wd/43f+YHTPI+9884d2Xt7Zfsx/9criP77y8R/94Ifnx1tfOvzd5uaCILSqwRGoV6pLIOH6/O1yGe5ZFoexXBiMUM23/p6DkakTFwxmIUERHbNtzGEx3/CRt2dnh82a+ZMFkDgFbUoZkILzTECYhfXJFBCBFQgLcb0vCuaCqgBMiGzagyorpy1iRzkx1IwNsh7JTgcdnsnATPpN0SqBNSQHaODubpgBADiVp100cQYuzEyIHKitcuU9AEPOUaaSTFezloyzsCHlgQzlPj777PbzW7BsBcqnexQiQtBISNZj7blOZFjJ0AwcO3Q5mMcBEdoQy4DUdLQ5jEQKyWkbLQk78sQoQkhLlYSggMEIxDn0SRKTOPKqqhZFyQFXCBKDuhZcxLYsgmttMUkbX3rxq2uTjXrZeQDypqYhFRHaJIroulodDzrpVKMvHHtvIqULmjoypeAPj08WjZ7fZEJUGsxiGo6qOunhwTzVEQZhc320nB2VzgFyRDDPYgnJAbNYAjQyYR68duX2Rz72w3/8o2+6fuM1Hwp0BG1fx9kPTVtTJPaEqIqsRKBIClJG34gDSuoAEe1rr3zl1s7+2UuPvPc736G0nDe1cywxEXRFxBZ2h+XWC1/+xuy4nUzO3v/wQ12cehsKJudYRNlyAyDOOTXoLRC0F7Jk+QMAsPos/0qrzoBRQRJzJbUoGDovLgZiMFKMTij4EgAP92qD2cULo7/0V951+/bjn/mPh1/4/PPXru6czJbsj7cvbqhuHS3SL/78td/8jWuX7nff9o5Lb37LE2tb9x0fHLbx4Ac/9tTR4SPz+dx5JkygCOTEkiGqxHwGEiQA71yl0oIPzgewVKIzSW3a2xi1T78ZX//4U3/w6a985lO7P/pDP/7Qww/87M/+07/y13/i/scvdbcO3/19k1ev7L7wB7f/93/1mx/702/+u//wB/7ZP/nsrdeub5+/98orx7/0bz79hjdtve4Nry+2HpwtHDQLRnJkIgvPPiZAdMoAwIFFZD4O5f7+8b2PnP0//fUP/4//t19zMC0HElPrIYARAZDTJDJmihYtEJC7vTwZjSc0W3Z1Yp0uC+cnIc5GWDtZztbHw53mYLaMZRm8N4viiOq2LaqxdFGNVIGMHSEKqiXLvRVpUgCGppXRYG16Mh3Mg59wTJg0IhMioyBD9hc0QGbm2HWM4H1IqXFMzWJOWAUoPBCaJoalxPWiZCEKYilJs4xyjIgEvmItuQJuTctBAQXD9uRc3R20hmhaumFjC8NizBNndRu7UaBFrAXMM0cGFETr2CdFQA+zg+U733bhvW//0C8//7/c0mdP5s8wpp1p1c6XXOy96w3v+ve/97ufvv3L5zbaP/9d/+ClK+cXs6NvvHZQji9CdYVqDalzDjvo0Ap0ktBRgkl0x2NZuHp9PgbWzsLBWsLmxnZ9kWEjAbLHDmaDpnzw2iNOEvqYwqGi42iNP3Gus7hp0Tv2LpdFot47pYeGEVY8s34mtko67akj/UZ4NR0lvEOT+dZ/5dNqmX+jvoP+T7mVfS/m+5Em9Cm3uS8kRs60COjjS0gNCAgsAeJdW/N+cCpEkBlAdmfYksEWs560Q33LyUCrOJ38LKBTUEhBEKCfg/bPUTSAQgHNgSt8aHFJrFETISphNiBVEW+kyC1ZKxZtZR4GEA0AiQE9gndRhNC1hMxUUJFSkvW1cOtr+10NGxvV0fRobW1MhLGLpKaESC62gsipi44Q2AVyqsaAKXZVUTjnUpdOjo43tjaRc1C4DkejGOPh0dRAkHFjMk5d54nKwpuIpjwnIXOAljJWG8rRlSt7H/q+D/7oj7zn1q2vM+f8V3WFyxLHpJJTOrF3d+ntRwmB/dKDR20NXDUY7+99/bnPfobhvscePCPdDHE4GA5iqsFQcJEkug6b7vrvPfuFk7h26fWPj9c263krriuY27ZzHJCYCMAkdpEYBJgyhEVo0DulZGsKBLA+PswAAY0BEUDIgQmqmnNF6sR7DwZRl0w+UzCcL+eLer6oy7L60Acf/eAHnrxxY/Glz7/84ldfuXFzp+0WzvFg44yjePVq+8o3bv7Cz7385NOTd733yccfe2J6sg/ox+OBaocZjkIirAwRICGaJAw+iNaabjO3XT07vj33bgjmd2/v3Lp1azy6NBneR3CwtTb+y/+X9196cPGZZ3/ub/5Xf3yyOdzbOTCv9XTvwqXhgz+2/kc//Kb/8PvPii3/8l//3s9+9rXf/LXnlktAjJ/53YOvPPfrT7xt+83PvDMMHjg8mIYwFoEUjR0bKDtrlhoKD8QpQhEG169ffd2Tj//1//qD//B//NcQCsUOXSQsDVzU1rnBMjaIREDj4KCL2ixGk8H+/qEq7+8dnNk+5wtPIHHBXZe2xpODk2kIGwAYfNl1XXAuNnVVVXXdgiiC5XmDL1zqEnvuUmRPvvLdMqXUFT4c7h6cH5ytLSIAGUhM7F0yU5GCuOk6KgrHjEDLph4MBjt7u+9973t12l5/8XPF1v1oEQA8U7IEjhwCIMXUs7aIaDgceO+9pyTd2mQwGg+qtbV270BUPDH3wyzzbbSFRMPosPBuYzI+nC/bFDFZcN5734jGtisH+MqNk//pE//0oIhnaFDYoGtf+45n7tu7dfTClRfe857HquEFmz64O30J08bIu1k3v3Dvo5fti1i9VhxvdDDGBI46gYZjWTNVaihutAwcB04HrFJX7f7aAbvj84uLKIyWEsVExUixMEjUoQ0iY2GzxNq447JGTgkAHPMdk6yeMbKyRQfIIWbZLbIvdiyMiKc+wKdFmRFPvcBghcCsCjGsYsnuKvMrnMTw9M/ZmN2tnnjqF587MlbSuwQrgNkcXjNXkk9XCDQBwMTcb9NWS06O081h8LYqtf1SRJxNxzLvHvEOrx9XhR5xZXJpTAaI0mq9RMPCSxeJsY2dIOQ4D+mVDiuZFlqTmTZ5zoxgAETkwEwScUEIpAzCSeqyKOI8Xn1l9/zW+fnxvAyFY1RLBpLQcu525n4REZgUzqGBZ4ckjkHAylBcvXp9NKwGwTkPhuBccM4dHR4tljNGGo5KMoUUy0FwhCkJoWNyolGNTGqHxXAyvHx59s7v+M6f/LPv373+EjEUnsHUsnDEEhLlCQGAZtIhYj6BJAAm5EBKX03bdP4e98nf+WK3HL3tnc8s9HDerhdlzbFA6CQuZ8d1107PbZe/9duf3701+ot/9f/8zDObt65eZvIcWocT56q2bYkgpsTMRVF2XQcEiEgrCznJkaZ2ennZqa7OEEHJyBCQmVRVpCtK37ZzM2NmJMsK68zPE9Gu647aVqTdOFf8wI88/uH0xO5ufeXK4de+enlnZ2/v5hxQXWjr5fB3Pz777V//+GNPPPvn/tKPbayf7brYD1QUESmZMruUOmZGKEzkYP/V6fHLjPXR7fr4+BjRx8gp6bl7th57BCcbe6++fGXjTPG1F2fPPXd8dnt8a/fZK1dOXBirq7VrN9Yf0PUHzNa/8wPPHB7t1t3eW94+eeSx7/rkr3/tGy+8vHt47dzWQ3/4a/YHH//Vd3zXW9/01NNd1zJ7AUVCBOhSV46GqatVwTlnyqNq89aNq0+88eKf+6mP/M//+Bc3th15kQbIaUCOsSOmjFZKF0vHi7aLbbe2tracTi3y9Ohk6/wZxcSptK4rGYuSj46Ozm2fX9Zz51ySjphBknOUJCGSihBxNkZXNQOMKszMnDF63za62Fvy+VK7xEAGKiIKRkYmwMaxjhQI0UJwTVdP1tf/8LnnZJkeOb9+HBclU0kEagAWAYIRAnVdzHZSqe1CCMzoEQDToHIM2ERxHlDNooEn6JJVVLMAEUUbK6MQSIIUg0fpqdtqZiGUXd216HWYgre4WLgNe/H2V37/41/8yz/w9586+p5f/Q+/+H3v+BOXtn9sfaPYOzl54eDZ91z8qU///qt/+NpvvK2wLT0TGZnbrhMNwAJANUNVtJtrnQGE29vX16Zl4gmBXxYo3II1jFZGjuTMNEENDjBzo4WPJ8vZqD67v07o0Pu+uJ8W5Tvlt0djICfwMbJAdtO/M746BdMzE8YAAFhXnTibAjCuKrcRkvXPJIME3zLEzCtBbv5OS/7pqpOfQegAzXjlt5edfYkUCCHnZvch9ARqjGaZdgt4CriDELKhKhKZ3hEtUdYoGYAa9vrmftEiT9mfs4+jAkAgo8Jjk2IE9IPCOnNkIkLkSCVbTYpzqhZTRKVKSVUNWSGjiUaUzTPNqQfEGH2BgAhM1agovvL5lyseojAahNKZqYqIKRFZxEyoh36bZeRYJHrMlhdWFOX+4VFSOTPZBExEpQr4qpzNFk3TmUAxLEIVuqb2gTxTko4ImLH/+WBIg1DpK984eOqpt//1v/nH93a/RsjOecuen+xBMwC+Wrkw48mwktoiGTCticykxUFVHBzsfeLjL377Wz8yne74QswOVMrZSd0txVFkOhoNm9/85Jf2p+f/r//wpwbl1d/6xd9BPj9fHDUnx5vn+Omn315Vo6SpCCwJUkrOW9Q7156tJGy5awcAMMFevJovZvYe2jYyOxUMYdg10fHYe9/Gk65NzhUGQOjAIHDRdclViOKW826+2FGMZVU++Za1t377u6GF+Xy5WMj+QTc96ubzedMuRMTxEMxLSiEEIBWI2QFFBMgHAAfmFGQ82MZu2SyO3vymx77y5Rc3N4blsDs43NFol1965WS+W1Vbw0kxcDxfDObQXbdbTbMcj5vNdYxLfOXal3H44hueeu9svlkNJl2S3VvtaOw/8qNP37ryxsuv3HriqXvDwF+/NhsODxy6Ni3AQShcFxMiM5UiCZholXuGqax8cev63ru+496T2x/4uZ/75a3tQUy1Zy8SmSg7cQNjEvFFOWA/O6nX1iaxWnZLreeLxdQNJuPoBJTq5mQwqKbdfD6fV4OibZfMqAJJYnAB2WUuFhKl1HnvNSXngqolS845TbHrulE5me3Px5sBgZOKUY9uG0I0YB9EYr6DknSAjGbj8bjWRUwtGWEUF0oGKZQIiNBlAyJmj5hSSiH4ovCFwwhalN6FcmdnR3EWCnRcODAxNNQx0bJz5mEJijFuYa+a6xtP76IkVfYYYqQiETPIIPy/f+Uf6UiXE/sXz/5MUd232177F7//Pz+69ZbjvRuvHlwdnXMf/8y/nnW+vBBn+14Gh9xpjBiCFwGwopCuLuJicNSlE3XF4Xrt2tqlSbkcNGWp1nlABRO2QXILFMPW48hc4wRqKm6fuVLM48bROR8DijnOkwG8c6sgZpt5zeFAiNnBCghBevpB7oLz3QyICJkAk5++MvvOM428TGQWpJmRZZ+n3rjttN02AwLfD1pP0fNVKwgADjAbA2lPrVHK5urmCQkM9c5qZGi5l+xtKg2UwJAIkVVym69kOS2JMg2Ge+dhWo3mbeWJmvks0hN0sjpG1YWibrtgfuSLRQONxRiTV+5MzYyNDFEJhADESAAVwHEH6tC8QUaQFY1zlgggYETrBoPicOdkfhvuPbexv78/GJTO9S46YBTF0Dh/SsTUaSzLECGhIzNJokURoqTj6XRzc9PMgnMxWfBlVDucngAAM1dVFWNbBO8DKoiZAfkuJWZEJkhdqMLt3eWDD77up//Oj9++9QUHYy5JYnKOAEBEnCOHJCL5lstdIeYFFvOJR7EpF4O2XZzfGvzyL37+kUff9/Drn2zqM/X0q8c7R5tbZweDrXm8ldzim9+4frAbpstz7/qupy9f+8zlL7/20ANPvOU7nnz++T/kdR+xPd7fHUwOky2Hwy3EDZEqmSdWBDU1O6VEmNGKloTAgD1SlLdKsdXgyqQxFJzSYjApD/f30jxubGyMRqMYJXZChDG1PsDA8/7t1zY3LgzLceykGviYmuZo3sDcBwiBqbDz9693bQrhjGgM7I4Plm3bFkWRUvLeAWcAk4lDSkk0eW9oFooqWXJFXcstP1y4sS3bfV+ZuWIxT2vrZwaD0bXLh9ubG5MHhvNF+8YnH6u7adPAct4ONt1g/Mi8uXLtxmfuvfh9sXWlj6HwlMLJ7YONTb5438XZfB5jvPd+7JpJ28ayHKSUYifsvCkAKCoR+Cit96yYxBANRoPy1rVrH/kTT50czX/zE7+/fWHUttG7QhU0W46rInAm4JZVmM2OwriS1KU6Lo5PQghlWTYpqmOLaXNjbf/2cVkFZjaAHOpiZsRg0bz32bNMVc1Qk7jgLSb0FgpXL1sRcxjm+yfrZzcba1XFEyNwFDWPmsk2CGKaF3XvfLtYLmJLVDIiMHUeO7OIqWKuskLQiHqfQ3TOOc/5VTlfdEmOjk82t4PzFFWDKQA0mooQjhpF9Ko5Dgic48bECBMql05BskFV4SQmlwJKkY4W0yJCKP3MZPfk1eDDzODZvc9w6cMQNKXr0+spAYdiL7R7xe2z7VjBiUACcqZVy/PSptWUm2OAjegskU26sHayPq/2a7/wKpyCOogUDZnMOeEIzlBubpwcja49vDvZ7LaQ2Sk4I1wBMSvRB2Ym4t0dvOUttwM0ymg458QluONYmxGSXJMRQJUQesvIfliKarAyBAHSTGPNSwoCEEiGxU6Le//jM7fdnbIyscdbAHrOIpAR9t65oKgZi+W+1vcUdqRsU2akIA4JyXLqJhgbwinKRP0PITgNBrF8JLOc+WOAAN5Rc9KSL4MLdVTVrmuTWYgkAMwJTFUJmFkUUic58a5fOUBdhpQQiETVFUVWZSUSuvLS7tZke7mce8/9pkVABQAxRWVVTYaMAsbM2V6DHRmIDyH46urVq6PxoCg9Z7YPlcx+Nl/GGFFtbTLS2BlIURY5TcZWqgJEE0ll4Rfzjmzzp/+bP3t88jwkj16SgncEAMQISpoEOev7LXvyIRCDQfYgQgQAH3DZtJ4HXU2xrT74/d85r6/Ol3uHB9LcaC+Hr775re7ylZ3ZVN7/3p+4fXB9Wsvh7s0nv+Nt73rb+6bdN48Wr77+DU91zdfr+WJtsl7X7XKmltJ4LQAruhqkhNWn1TNyFSTTVa2v7whg1vtnMDkzY3RdG0ejtZvXd3Z3D4bDcSja1167tT7ZVNXxeBRjHAzKF174CuHWfHl4z8Ut4XZnf962cbmcP/DAfbcPT9jpcOSvXHn1ngv3HzcQ2JsqlWzQITGRSyln9rKJb2NkBz5YjDVhWZbj4+ns6Ojl8fE+IjYNzeYUYzveEJUmqW2t+aOj9sJ52T4vZ8Rdvv55czocbBVF28RF4ffTcd02g+L+aNKpUu0BxQQI0ObHB6YueG7qzlRDKJo6MZeOUXPoYs6vNw3BqaqqIAEzt92yCKMrV6/+0E+8e+/w4POff2njDDZdZBxAUIua6QeIFjUWk9CeNF1nw2Fx0jQiMp+erG+dcUUQGli9NLPReHB0dLS9vVXXNaHLWRCqKRRORXtnjmTMDGAxRiKXA+d8EZqmqapqdnwiI8EBE5qIBGIiimoO1JBUxHsPqIQY246JRtUAU3LGGKPzAEYFKndiAQBVNcUYVRNSIAbVNBgP64OjehlNtCgKoxRVHaMwgpl4UJdaA2liBdwZtJKg94W1pBHZExGTA4Y4PzKcWOGwjaPhmmCtsSMHFaFiJIQNIIEuESgPgBKYp4jNuN0bddvHxMxiDpWBmshWiFcbDroOAIpogzRGCOsLuOZod3I8uT0X2Coit6HxgAZBtCNz9VBunnmtw0NutxKhMiVo6A5ETgicvdax5/sjGqHmL+zHlpiDCwnyWBVXMg0lBIeImNk3qyf3x2QERiBGIgImc0ToEZyhM3SEBWEBGBAcAzKgQ8q/OiRH6KinXSMiA7qexnr6MDJlVEZlsGymTv0DEPN3+tWC0RizML33FctGVNbXCMYVfJ//P6IQKzEQEZNn8kjOiF3wrUYoCvauQA+SzFC554Omfk4AbEr5rlIgJTYlMENNYADggZ1zoNGkFRDvw/6NWTcH72C5bIqiCGXZNK0poHHXJkNIKtm32ky896bJMTlD52lYDU+mcxUYDQYI4p1jC4Uv2zbOZlNE8w6KQKJdUQQwMU2ESMAAlHsrJEvqjg6b/+bv/aR3h02TqmFlCI7MVFQ6M/Ge2SGgesacXkiADAzmUN2p9KxpbVhJVc6vXHnZ+fTs7/3K8d5yPLj3vd/z3X/mr/zoQ4+//ef/zRcfuu+9jz32ure9a/utb7/41rdd/GM/8N2X7h3euPHJKy999ubVK8vuaDpnpeG8BsBi+8y5zc0zRB6RbTUFyQ/K8/B+pt9rw1Zf1n9J3sypQaoG4fqNK2988xvL0r3w/E2iMy+/evjCizcvv3b46it7w+G527frd73/ra7Cl69cfe3a9OsvH5298IZP/cFXv/7K3vS42btpn//MbYtbRwcnw5FHSr5gteQLJxKZc5MhziMiem9g0YQYhqAjsk3p7r3y8saX/lDqxWbTqGiYz8P+jsZmqHG8c7MdTSo/8CdLOzhpujScn6DZceXKYGux7gJtxGV969ofDkrfgJGg525UkNQYyFUOOEGpIwCIUcqyVE0EYKaOEEDBIlE0bcHUOc/sui4WYeA8kI5297/55//SD9x76QKZTQbFqBRnLVnjMIEJExBRbLtBVXBSQK0mg0akTTKbTYvCOw7eV6mL1cAByHxRe1cRsWoWlAESiWUVCMDKiCO74OUbxzkiAhEpoVoczDiBI69mIsIIlBPtTLMrnBkmMUMwhFnXahFqEPO+adsQAqHLG7p+qILIzJlpwYzEyBQKXxSewSRv/NU6zn42oBuKVcudWiy5613pswP+qcDeLEnHDcmEEUGidZC6zrQTpqVCUA/KiYC8pggaC5QOMAEvAGoMcLh+1IYThADaVkkRuPUcnQinOtSRl2cPtiftmchN8l3HOh0ltdY4Jrf0SETJIwlqU7U3N2911f5kORmmCQCBUV1Fd1rc775bAACybKQHK3JU9B30BvscMVglWqCSoQHRinKMaKcGXatHLsMEp+2+rbLrT8EZy6Yr0Lsn6qk7o4Dm/G7MeVCn9Jv8ykExmwwRZudIMQLM+v6c9dUHexMRoKpldURef8io34L0Zd2o5/KjGQkZ91QQICMmNVUBxhgjCZCCMSUDQSLFQiCCRSBkQDVIagKErs0XY/ZkVVQCBnOAXTKiiFYaJoDiyjde2xxuLpfz4EtA7trkfCFJ8v/OEwvvKaUYQkAVj+SQg/fkLKW0v3dwZmvDMRIoiBJXALBcLpumcY42NtYWi0UxCOwIRVCNiQWRKKtXparCi9+c/q3/6scfeihcv3a1qtbq2DrSTJEty6Jt22TiHIGomTD1HxD0e57sfiYCVg1H0NSvXH71s5+9cmtv9sM/9MceefS+zk5SG5fzg/e//80PP/TI4eEJJ76xc7tuukHBi7Tz/B++sLj8wuMPPmDrY9NmfbQNoUQzT93lV79y332PAA0BgomZnV5dqx3WqS1Sj/dlThQiquVoX0Mx896fnMxe9/o3P/e554MfXLrv3s3NMzdu7rz+idfd2rm6tjVuU/3Qo/f96i/89sWLFymVqvGec+uXX/36e971XmYPbfTe7r94YTo/Onv+PDKJhy5p4YokDaPrui4EL9aqiZGyZSifEKmVqR/C9rmzRwfBYPzsv58vl8vxcOSZZvObjz1+r8ri5p6MB9WNGwej0eh4fjIZlpPq7M5LU5tEadPGVrl3fVGf6M0br5z76CGGEYtB55SCcwJgxoqgIkJQEkNMrfMA0HHW95I3rQBaxUjokxgzM/ukZGlWlQPRM4Wb/52//SP/5B//wrBqRA9r8aI0X8RFK22bzJCUSy5cwbN27gbDVLdtl4iaoijKctAsW+9919XjyfDoaFFuDYkcMxtgCKFuOmYnklsrkhRb0KooIYtIAUy18KFt46AcHs0OB+slMucEbUoycE5Is3Ul9uoHJaLO4kDQKxCgdHFYVt2ihooBsVBQE5OUY3lEhAhC6UVkuWykk8I7MsmcOjNJBo6ojY0bDtuD+QAdtrGLlpKqKjIhGCOZKBowYAeuMReXdRi4ZZFI6jJhZAWuGuxMVMWWCpWvwGLnEkmh0gKgRa5LbcNBuRwwuTacAA4G7SBhRHV7k3kZ8dzhpHbJdXFRVUU7WJa3b433H5gNhCtQjtqMO47B7a/tvLr1yiCmc9OLXtfBHAnXQd1pQecVh+OUxwIA2JtlAxKQIcKpjhLgbubKKqT81H8Lsd+f59qaZ5cGluF7NDAFQiS4u/6rEYAm5l6YczrlFRAGhhWjOZOVMhMGM5ZmZkgr85kMxsmdY5/yHI0UFRCZSLF3JCBTBEFzebNohL1kNcP+Vmb9W7+KqOTlAAIdHO9ROSDXtpS62dw3gg5q1JAcGLSAyaxgJ11UtK6JaRDYF5g6x5BcmLcSDUo2RNeYDEM4eGl/aGvkQ1wsxhOffUxFRMwyuwATtdIVRYGObRV7BagKsQhrO3v7XDpXOs/I6IXYCp7Od6ZNct4G1aAGCBUPrZI0dRgMQzRlBxDBAIpq+Mprex/5vj/2vvc+dfW1Lw/KCVJy2JkA8wAgxZTQMQKiIpJTREBmYeWo1DA6jFxy2WpbDQuoD7/8lReeffaVC+ee/Nt/6yexON7df80FxiiOi8PDwwv3jMzky1/a3RhPHn38Uqcy3Tm+fW3n3vP3t74qg9cUZ+3emIYGcHBSU9goBtvLumPKmFDO3wGF/tJF4xW3ShVRQVBzM4BGmAj6ZBxVNRmW4dueeRMAKEaV+K53PonIi5PioXsvnRwePHDp3nu2L/rAKSXnSExijCEEVc27R1O8RzfMUtckMs9IoEYYDAADiClACaZsEDmxFSAOER3y7Gh+6YEHf/Iv/dl/87/+znCSPvbd74kdfOJXP/n0W7+jk+728f4jbzjXdY0j29zcPFN3qIAG88UsqIcKnRtv3wN4LvliltJaAUWOVyaHrahpKtB1RtEZCZAZOYTeoBTJeVFjUlUk84bgGEyFANXEh7IMJYFoPLlw4fwP/8g7nv3N/7BWXpxJQ2xtW9dtt2jwcKoHi26ewBwPddA1cX1jMNtfSOvmi3ayUfBgPcWj2NhkMghBDmcH57Y2l3MxTcSEiOhQVbIfQPDBGWgrxplul6M3lJ3NZTHk0B0sh9XmDDrnWVMUS6LAzqkqmCgoMyY1Ime+VekyyS2qECGbhWzzYaFLkqBOWLEhUJSO0VObTBjFtEMoGCsAzVUGtGDXipLAsqWNM1WExbKOAsmwBJKkUg6qtlsqDarUJa2tcC21uPTexYSGAogGpEBVxGUpDNa0ofIiRl3JoJACcCyKK1vdG+tGbbNI1BRp6U7QJqV1i6Gl/XRcHodmw/nR1hz3FpOdcXX90i26LZf2HhQObGXj7WBj9+bG9SLB9tH2xskljAFcVLFZmZVEvets34Hd3ct/a3ecy/63tON3cx8V70yTTx+UPdd7Ns2qmBtKZjLedTBEJjK8qyXPR1aErCrHFbZy+noMDMGBKfTuj70fDuG3LFCr3wnQwLjfGRjoqYsZYo+1Qz/nvfst4ArX7d0r86rmfIowGAxSSgwOgcQwqXlwNUAEJEBH1Ggyz9qKJVEzASiASDSwSg/0a6dgEGWGu9cPJuWZrm7KUOUqlFFxOx3uIpZlWdf1YDAAMCRTtcKXZfBt27b1YmNjg5EMXRNlbXMyndfTOVHbFX48qgbz7hjdWAhAKnMGJKBsCoqdd+X0ZPHoI4//2J9632vXvzYcDFAqVWEu0RmaEbNBxjysX6kBAdGKSJGdjQFSct2sazbXN2/vffVzn/3ql794/O3f/v7v/r6nTuZX0sKq4GMHRRgKdIy4aGZn79n+zo2tL33pK3u3bz711BPHh1Mwv7F5oawq4SH7taaZNa1bLpfLZXfvpQe7VggdoKrqKn4daHUR5klMHgJYP1q/Q8sF6OG2bAWcUocoZCSUJOVTDQ888EB2rVksFkTOOsnfNzMV6aAzMxMl4txDIGnOIkUANMlsX17xawkIEGLywGAUzQQYnHPT2c2zF87+jb/3QeJiOp85r0+97cMIhYCRT524qvDBSDuJMSY19E4BUWsCIwvIrksNULU8biTOXREaU5HOgQFjlk05QCRUFVtNu0/dWFUUgPqzlidOjgKy81gyDKuRYtEtl2972xtcSi99+cXtIdV1LTYwHM6bdnsTDo+Lg+PjWWuShotmxl7L0qWmkWVHIa4Nz6oE78v5fD4cFdPj2Wy+9I4chKZry7JcNDURqoL3XmLqMRPAHEAKoETknOu6yL5ourZbttV6aFPtiFXUBd/fDwBoIGqmKtnsK2ODKNjnQijm2EmE7Lyd8Vj25Jin81k5qI5Opj3VilDACueTiUfuJA0nlTtaSJQYU9vEsxtb02YeU0QDZud8tr/VSkoF0yQenDEvY3QVqwlaCmioSzLuKhcxmiaOVrR+zkqAgikWcLB21Oytl/GseXaAnAg0DOdr43JzWdV7xWIQF2eP7jcdXDxej+Wlmxsv76zNMN2457CNePblS6/ub15R6rbmGw8d3l+13BF6Q0GtNJPK/w/sQwAgsDuS/bua9KzxRDxlwpxWdrlj/9v/r1zHgbI1wIr7mAXFdwFAd8anZgi0sgNacWnADJGxn8rq6Y/up2k9XUNXwR3ZGF0IT4vR6n3l13WXoRituDzYq05W3BoEhhx2nB85HMqy1CWHjQDRou5GgxGIEjkxzY1E7CI5jwwpiTGJoAA2bRquTeaOkigio4ImQ1EiMLNOaa0sjl+eWstWJIniyxJB1VTVcvKvSE4Z9iBWOJ9HC0TIyETELuzv3CpDUYXgHAM6X5bLNi0Wi1a7YHFjrZzPTipnvmgjITvQpISeMJqxK4OKW8yav/8zf7qNtwpvkpgRVZWYNKXCq/QfCvas/wxlAcQOKo+gDUJw4Dcu6Kvf+PTvfPyzR0fnPvKxj73prdt7O5cDrXtqRJN3IUpCUnRI5pq6dS584APv+/KXn//cZ74wn8/JcDDeqlNTlpUr/IjGgZMoFcUYyYhIk2ZHoNyjf0u6QF6UEQ1sZTIKKzVcXkatL/iUJ3yWwagQyq7rMgwYY0Q0733PyOrBSeCchglI2URpBQGZmarAqW9Ev6ComqbeTp6ZXdd1REycmm6+vj5+6aWX5tOTBx5+gJmdCw0o2pQ8t11HQLsny51be9tnt86e3UzSAgAgKxSeUGVmJIbcdvuVL73zsWksz7oAULHNkQhJVjH1droXNzMEsxyxsLrpHGEZOAQXGEaDIRomhdEg1Mu9b3vHk8v5yXL/6vrZ0EnXdLEIsjlyA15uj8bTJh5OyzCnnaPd4XBy0i6l6xZzHg+6ohpqMpFjRiyKYrGoz57bbBc5gSCVPjSxyxUgmVJOmxXr+Q79CTeH1CVhV8z2T86tnWvVwKEp52an//jIkSmSR0QBy9qCTJ9gQALLunrMZt0Z9gHJ+bFk0MQuAa5P1jTtCDh2oakX41EwpBhbN1JQjZ06RlUTjdRnPwGAOm8AQERt8ItumlQL1rm1WkBU8ktYD/cvk/liL2mdLFqrBB0zdN51sXbATqtRM5KBTKu4sUxLZ6AQhNVsMh9vlGdPhq/WNDt37FhMKVbN6NLR+Vm5dzw8mpZXlWM92L+9sdcBbU0vnJtuUiojhMYZGCupb7077dCN8/1w2usoQeY+ooHyypKW3R2jGOn1/LCqg3qHZXOHDq+nq4bkVrvnLGreJtwN75yaBMGq61rV8bttIE8fudDftSatgplWsljI9panLElcpcX+p0c6pekD9AhVfxL6Ke4damZ/S6uh1XVbFBt5k9DFtpNEDrHw2goIKIESOEBUB64aneXbi1kAIrNoIqJe1aJFJAoGnTu6Md9Y21w2C+aA3NNDV50jWlZaAnRdNxxWgClPi51z3ofFomlTPHvmLEgyQGE3HA4ODg60qwd1HK6fnaPRoBgVVUy1N7WowDmYkNAp0uDlb+79V//lXz13gXavTYejoaFT7HxIBsLss/NUD1X1KtDeN3vgh1FjEix9GlXds7/1qVdeXlw4+76P/fA7xhvtzs5O4IGl1vlSrREhhOg8xphAC0dBU3Pj5tUn3vCYUzVIVy5/3RXICMRiVhukZdOub2zUdcPOxZS8CyLxlESb935oK5oW9GvyXReF5q0pI+Xr+e7rB8yYWVdcPUmJGTO7ol8yTHrT03xs66/w1fWQt5uEiLEPFuh7yh56RAikaJYtCcUYzCMO5yfGdj4u3c3jq2U52Ns9WF/fdBz294+fefuThng8183zkxe+cbUo3XA42t892hwPbh/Oq2GJZJT8xub4KO6z81vjCQESElh2rr5LvNH/iQBs5Y/d916rASMGj8MyVIXz7JiQlJzjKI1onDXHz7zjjb/5S4dtMwNonPNBBsQ8mrR2IhuectR96kY3TrrRxnhxfACRpvPp5mjb+eClatt6UBV1XTd1x86jmqSuqMoYIzqMsc1EButtqMBzzkkTMw2eTxZxUg6XJ/P6eBE2fBtjcBWY5g1+jDGzmAHySJaTQS9hB+zbOsyKSyfSgmie4DrnmMn7ok1CPjTN3BMjWhO74aBQNDZWlNZqFEwJvHem3DQNEZCRJdVkzpsimNlcb2wVa89ceOyV2VerIgHxYod+6Dv/wn3j9/7Gl//wxeN/w4WxKFprQA2XBS5dpAt84UNP/1Ro7vnE85/cHX7p3sXSpUoACZxiPWxxox6dTKCItL33ANoQfR11FuowmW0tC/BwdPnc9ejGsypdPORHdrdH3X3G1DGWnZUaWt9uzPiulryHJTKSfQdsufvX0+omq77+DmfxW2pltmxWM82Qdv4yAENDulP5zSzD2aeP0wPmDghAKfsyZmdgNMAEmPo/gwIaQFopjAiAwByYu+vFECmR5oUc/4/v5RT86WM68D+7kHzL2wMAJEhRy8AMZiYx1qpJVcWSgnjPjlnbiL2szrGHpqkpR+IZGCC5wsiLknd488qRdk5BVIiZjS0l6YnA0osNsr1D9vsNzveaMWZmf3BwMByPiJmZgakofEqdxia2jXlOwVOUDzxzXwUzpyWSqHMITKiFGxL6neuHH/gj73/3ex7cvXF5bbwuCZAcoiElAgvOiRizd+gcOgY+PVeoGNNcNbGTEPizn3kFugd+6GM/+eGPvpXDrenxgcdAWKHXThcqQ3DivY9tCq4gYEfoiNbXxq+++uqt26+eO1898sj5eT11wQ98gcksgcbRybGOBue6FglDn79DK70Fnn6CCH0Il2UBIYCe6ucg97Ar62ZYUaHydSAiSGYghsrMzJwndSvkIk8++qs9rTpHMwPL/o6MwM6YldiIFJ2SAw7kCwoALqbWBRHpSD3TqOt0Y2NNy/iHX3pxPHnkGy8fnz3/1LKZ1LGg0nVxgdo9eOHiza9frWTI7eD6KycHe3Lr5sHmxgXp1gKfVSi+/tWD57+wf+XyTjkasRGKiuaUSoRk6HyOpyBy/YtEpt6ajdAUQJmxCG5QhjIwMwJhF6XVrpM6iSVxdRup1Cff/vadQ5m3eDKfNk1zcnLiAnLlTSCQDly67/ym9ylZqqpC20VsYt3MiiIQBSKPpKNhOZ3OQghE5L1PXSxK75AyVHKK6/brkfYQMRE5TzHGga9me9OgHhTNRCzjZKZg0scxgRmYJBA9LU0JzMAJco4JyoSc/IOYyXs/r5tiONnbPwLk3qAQ0QhFoqE558wlh5SiKZiqFqEKocy0blUgDyqgCr6gAfMDkzOpRaOgrQwHEfHGRrU4M9TS0gC9KjaGyga0SGZL1uBn5zxubq1tDs4cTeJONXfkEUgYwTUkKUQX4vDC4VoVR503wSHLcH0xeGTn/D1H56u4AbS1LObbM7r/9gOjduS1dclKFeJYl8vdM7eORtddprfr6ibJwCWs4k7ILCtLewPl1afAgGamd/qjOw/UlXsjGPRO26u/3IFITrEayPdMjiY6fQYikmn+2E4fZICofRcCmlMt8FSBav1A1RBABRHQ6O4O7s5xcEX16blAuLIygG99jasFBvFbe30EQCDTKFXpRFsAiBp7MxNVZGq6DhUHLiR0i7pxXA6HBTJ0FhmBDTVaJFHWQA4W3fHNk83RmeXyyLkBcW+HCUCQRUbQa+MEpSyDacqvwznnQjieTkVhrSqipOB8cKEoiqOjo7ZpVHX7wpnrN3b/6p/67ocvzHZ2l9ObANEFzlNG30nXNmFt4H/qL37P7d2Xy1C23dyFwkyJXEoNI0UxQMZvWeMZQDN4lQAZgrOubeZPPPV4NRrGdu/k0IqiJN+AgegSjZwftDJHIhP2roxd9J5iXMbUjkbnJOFkffv23sFwUC3bbrKx/dxzX3jyiTc5LnkQACClzntWNTMlx2qaww97Ct0KV++b5rs/KsQcoW2mhJTxPQDovSTMVDWEECXlVVMQzcx7HyUSZeYcrTZwdIq7rE4F5UKzghpz0ieeQjaw2lcSIKEZdEhEaEUJpdD3fPB9r12+/O3veINYPHfP9je/cWXnxuFTTz1SjnjvYOeN3/am55577qGHH3303LnDg+nWGefJXzgbDo72yyGdu+dSii1Kl1QAwKFrTAA0kFM5pbH1r+L0VKD2FxIjeUeld55JEVJKvQ+1JmZuoyA6Qz2aHl96+Px9rzz2za99YW1CGpdNl6SDKKSEPmBVBaX0hoe2v/S13XG17mgpUZfLebVV+VBgNImLwWBQL09mJ4vBsGzbGsAcuTq1zjlVUxCHvb2VSMxZnEQklkJw7aIeVEXT2vJgOdwatBbNLKW04uihiALkTN2oq1tcVYFJwMgMNAtOzMyI8t4LiCApJsWYgJijygiLwmEbl+OBZ0ZmBgKPFDtMUZjZiBlJk+ZwaV+AgiHTOG3KNLLEcjKa86EiRK+/9sInvumnr00Fx8PpyS7TaHP0Fup40b1GNFqHatYdTOPxKNS47BoM7WBLFpVoi4SMDATOOMRqdFJYKApFE4guNk7Krnh458Ki2DxeO5g04ZGb9643Wx2LxtBRV8kywWbj062NWbWs73S4/0kPfqeDPkUqv/WBp9iHrUZWfVoT5lCPrGvKzuinJbNHYwCykjX/RVHNVsGoPRkxE910BRdCT4NRvKtJBzuN5Vb8ViAoT2G/pQfHntCZ79X8nbsINXfyoe5uJfpqYdbzfPIZyVdQ03T9QJV909YAWURrWR9JxAqwqOeRcXMy+Mb0KClqTMGRc8ymnlCcc2azK3Uwp06k8VWmeCUBoBz3C3dABs3tJjuXUgohhFCIyHQ6Ha2Ny2wCbwTE9WI5m83aLo3WNveOu2ceuf9jH3jia1969o0P3Xd4dGPWnBXuCkrLWqth2Ltx8DN/72+keAsAgNCTj6kpSq+Kjnxugdk7085WH3QmH2WLHOccYacdMo2NdTqtPQ/YtzFGwEGC2nvUCF2smRwiROkYPDkHZBzAsFgsm/P3nLvyB1fX7980Tew8kLt48eKiaYfrYxMlAkBBRARzHNquplPKLfTnx/oEZTQFyDEPiJBxdSIEYN+bQgMQ5Ew2EDRApv4iVM36UgCIkggo371qAtg/zXA1PwUFICQkY0MBsKhyB/Sz/oCo5h0wsAoGDjHV7H3dNFtbW2fPcrvYf/i+s20nCZRw8eC9W4/ddz93aejck0881MT6297xRhJDixc2NhYxoRnC8uLFrWRBdFn4oSy36uaEkYDBJ4qgCQwZbEWLAeijMlZ3NBqII2SmwMQEZpaiASpCQsrcOGLGJC2jBy1mJ3tPv/0NL774jb2DZZcO2VnsTERMgBy2oGy2Pa7u2RgczaAYrM+XCwC3WCzKcqBCsWNNOhoXx0ezwbAEAOecmbJDQ87Xt+acNlFj7jOHCRXFAXQkXYpVGM4PFsP1AbIyk5lp7gbVwExFgUyFJOWxApEBmrKZU0CHubqo6qlQkb3DctDsz10Y7+/PuQhM0LVtWA/eE7GISJ264IoUDV1A13Vdp6qggGyIyF4RjYimetIqJximlj3oAAcnJ92H3v2jD43fsbMM//zf/6P7Ni59+OkfG5dvWc5x7+i1+++/+PVvXvvkl/8lltFOTh6/eP8Tb38dvtTs/vuv6CIyCEJpoIhJCBrXjOIJwFrDUqg5QyMp26KIYdTw/bKRYNz6btgNWgwajqDdMlAFaX1dQkt3V+1vhUT+Mw9cZSrd/YC7tq93P/MUZkE1MiADzkQLy1zVzPGzFXyZJ4T9ApC3wrbSr5rqiuZIaASnQA8QKqLiKoiUABQwIamZmEkOycvPhnxbf8tDAMBOu+P//PvV/+T/5H0uGHVNLMvSzIBwuagtGSM6X1KEYE4QGumcpxDcS9e+eXX3NvoQKCBAp10iM1BoW5X66MZyFMK8PnHgyRRM0UBVk4iIrJQElqORDCQXI2Zm7xbzWsAGgwFpRADnC+eLpum6JhIGX4655v/1H/21wh0Vm5Mz2+MHzg/IDtGgbXQ44deuH3339733qacvnRwLgJJnUe+Lsos1oRIpY0KnYh0REQOSZZIAgObTK01SQfPQWcsaCjKxeSIgLJVachgTKKBz3jNbh+jVCGPiLpkYA/lOOgq2fX69Gri2W6ytTQxo69wFo6IRdjZ2MIwdgXkwyvFAd0Nq3wKvEfZ5tEiQdQlAeUunCGI5j+TOh8ve5WkbESlamzrgXsmMyPn7AGC9akYBMufG1FJG4dSSqppJyGx2FUgRVUilRCyYEIKIILRm4t1QO+/doG3bedMYumWXBFMCbKLz1RCLdqnaQJrNjiFFiiIxtilOlyfaimhszc3mdWymXmFx0nYwc0iGmEQcgUdKkBIamFgm0UoUEMWUL/KVYzYygUNAU1UVUzVU1S7VZtZFbJrGoKubWbu06UkSXL7jO9/3lW8sb9zmqzuL3f3l9CTVc380nXeIbSJbHj1879hwTgWjKSrWbcNs3nvHlSUcVN451zRNURSSc1OHw0xXX93fCqiwSg1SASQC1OCxk47Ja2vNrAbQjL2AGaiB5lEKokHqeWWGamjqANgUVXnVk63qDDJjCGG+bAeD0a1bu+O1CRCaaGCXVEUkSSsiTdOUZWmGTD5JRETnHPZWIOIDEaNqKp036SqfxgULQs3WhrRorxWGVi+tse98+/dt4pPz44OT+vPnzg6otiqsdTpqrU1SXLx48TxWxXmm9aErHKJYQlUwiynIyXgusgjJDbQzlM5AXTKfAJsyDpGKAlJohxGS4Ql3o4QirmWCUsNaMyLRoAgK0czYCDUCpKQmpMk6sIgGIATiQIHESPrTCgCKYISn+DuiGIlgJ9gpKqqA9AOoXIoFMCEaOEGXHItDMzGIBIlNQQ2ioYoZmpIZ97NEcIiFgOQfpHlwlVntar14KfuUWQQTUFz5CPCpk2++gBTQTARBkBJBIlBSI8jMdVbjrBQyTaiS45dW5miA2pc2VAMBnjYnbjLYmnUNOgzgDH3LnUhS4tYnxUjei6FXZV9EZs8YKSazIQQHOMfIg7J9rVNO6spCB47VHMSUzCyqCaAypr7dICKgTBRDHATn0Ux0NpttTsbORGlMDoswXLSzRX2i6tY3y73r+3/hh//og9/2mIfi3Hjtvnu2Hru0OVlvfELveXncnZ+c+TM//pFbu9+EYICVChJHhFQ4BItmBuwImGSIiqQTsJRkUYVJVQwK5gGXgEN0UaKUWDB15pDYsVZRl86okOATeWYRUUUG9sJOu6oQhwqijFoVYWdnb23A4GOrWBVjjCDd8dqEC9RkR+QadqKaAJxzpSREYEtE6Mx69zoChJzWRWr9lkeQEmBr0BFEVvbgHJJAMhRAATBNkltFAvTAzgiighKjU0iImEX7aFn8gsCYLJkhQ/Do2Ix7z7K+9DMRE6ERGCVTAUFTQqcWolErSVHMNDjHCEAqJAmMTAMKpQ4FPAmbeufASIyAPFPJVJJHBHZGZSiQfJOUPaOBUEJGRYgEHSipY3VluV76sDauNtcnwY9URmpsqEieGUNwRASKqVXpUlPHZRMX9bKpbbbo5vV8GXVeu1ZCRzXicP/o+NE3XHjzm998cmiMHoRR1xJF4IFFMMFlXTiFS9uFpPlkbaB1it3ypFm6yhunyF5inEwmJ/OpGiKXBE5jWwZElP8/XX8ebVl21geC37D3Pufc6U0xTzkqMzWmSEloAA0gYSxsY8DGxmUoDyy72samDXbj7lp2V7mqa1V3L3e7l42NhzJQ7sbGuMBtszAgA0IDoFkgpTJTOWdGRsSLePOdzjl77+/7+o9974tIddVdEbki77rv3Pvu2fvb3/AbnHMgysyAfI/iqWFHZB6QK3Im/aipT/ZndRopghqKrlK9hCZoiKwqncfeLGF0jJZByKNzSU0RwVzXLZ1zOTEAObaRRIG2BWanPhKwi771hpKMsosKjWsmmy4fekrGADHGnDM637OZoWUWI1IAJlGdJ/a07RHYWYPUt5AgI/G5zTMTumSSvnZw42d/918eH32GUULtE/R+gZON6ulXX/tHv/mPYPjC5iPnLTm0TcUOvAG7XvIrF0+eu3RwEJ7vqaoViYJmb7BEZfUnoAjmiAXImdXlZAtRQPx9N+4ft9ccQifaGwABZxBUIGJENjUyt2qfkALJSp4Cq1IIl778vQm7KoDaSjZSYI14fF1NQHf7IIl0tQ1sBUsXINM1T7kMSw1KnugA8+ua8qt+CRkUeU8QVLB7AS33znvxtMAgOmVUnn4SMFh76xmsPswKPgeAICvbjrt4agCQXvYPpw+yc2gSU9tFZGAkdChZ723yMLELHrvSGyUwSyLmyIHDqLPDxbgZxtgRgfOoKkAkQqtB6opBhuWwolXqwUToqvr4ZOqcq6rKzJBy413XT9uliRhWwmG0NVj8n370T+jh9XP37SRpZ+3Rtctn3nhw/KX5VNz27u3d/+b//FeW3R6heXKmTJgJvYkpCDEgBhNGInZKlgVO2G0OuZ0efeHWra/52p8/+5bh1ltkRrUXQV0KOjQPTmzh3RANVJRcQFJEyLGvvFcLSNZ30fmAoGAYexmEYeXPx3y4uQHSd/t3bpgZ+93h+Mxw81rfJSJnoMQx5eicY/YaYuyzc6GgSwVUISOag1CW58qWEU4XwGpFnA51ABDuuU2ni7UwHwncvYUsAhsoqpUG8bq/s5KxBwTT4g6wRqSsrmxFvQdXpL6VX6OqrjRM1RQA11NgMgQgLKCv14+LVMtbmZmxYyLKMTnHqE1JaySlOmxUg3revfjK7U9ShPlybjq4ePnBMzsXTJrFggW72kLRw0iIaWU33aMkA4/AzpHzzjnHDkuxIp1WNFmeHHzfn3jn3u0XUjrG5oS0UyQ0YyBStSCA/dmd7b0jTRVzbdpre7IYNaFpmthJTNDU3rObzU8m4+3Yt2BUeSa0RdeXNyIiVSnpvJl5RyLCzIzY98lXLucc28gek5lzbu2giWWW4BhdqS0BAchMzdBEybtyF4rbeymCmdGFwfH+3sWLGymllBKiYxfKy8xRqEJMKZDFGJlr752IDBtny54IJCbnC8kkqzKxDhHiMi3SEABnXZeqmQXjwqFSJdpYdm4Om7fo9oOUPOTaV8kwxlj5Qdd10/7m1n0PHF0e2c1jJah7buY2Ojt5CW/mM6+Nu51Lx1cUGCEbmYFTIXFnAKPyApBcHgbgDNFlSw0E6cOCnVbOFIAcEq7kclcpdEbIiGQKYiqSEBmJ0fw92O+7wxqzlV/CeoeQmUFRlCpdylW8XM9ktTBIDcCXOViRoAQSZ85MwFaYh/XeS/fyYWFtjAtQHKVP32LdMV8LUto9DXQtkuMoa1+/e8I7ghmprT/e6lJg63dSXGX3q7QCkZVOOnHea+6dL9K73iG3KkTk0YmIiTIgosXUARMVkQMCMBDV4MLyYJaXMN6q+zTzNQObJCWsVKwA8s2MgNBOxRjQTA2YQwWGJyez8WTiHGeJlas0IQeYzxfa98Nz2y+9svu3f/i76guzkxsn4+H2+a32cH4nHi7PnZucO+k/99U77//gh97+jmu3bj/jwCOqYxOxIgAKYAAZzCM4AAXsEorTEKy79drv3dz9/fFg3E6PXzj6jSsX9s9f/PZF6tl3AR0AqiZiUmo1lRkmShRmdMFnUeSUcmTPSNkximRmNxzVQjDAC5I3LZHjrp/32tL2zsOzviNyRQ1cFep6EGMUiQiOwZGhmYBlRKx9E3NG5DXmr5ivr25oObtR7fUMNUK8e09P4zsAgBGthDcQCnLeyiFRggoi4imw0tQKwnq1XGHdeV9LItjay7u0/cWs8DAQkU1PP5KuDop7pkfrS5bTqMhKSzYDCSFoTmLS05RcE/xY2hu//+XfeerJF0TG/TxW42GmRPTVzQm98Y2PPvrG98S8LTYlF9BMTcCk4FqZhqUAcp6Cc1gOkZRMNZkR18tusbkFf+QPf/AXf+GXxxsuBKwgJkEAL6EyYkWZ1C4uqi/dztUg5JPWGXdt31QjimbAKaXxeLR/eDgZg2RjhzF1pitRJ1VAZCh4zjJcLRxWIgQssDEPYXm0GG+MwLKt0J2rqZnByp4B7RQlQaBWdGRPYxSuBAe1qaqTk2nTDPdv71/c2vSezSznXLugks2MmbsUa06WDQCZWSSF0KjNUAEd+oCK5r1vexGt4nzxgcc++pZ+0Pjw1RvPpb297kJX0Sgt80n/2nZ17o2Xtk9OPnitudpi55uNee+mfTpTp6qu2xxmqTp7hvy1c+nWgXNkbcZow7TpcNvJQe+yQd9zXSmCOmUQF9mwUgc2yKSRFUiSS73o9Uu3N4/j5vy8waYDAFE2payAJkX2KGn2jI4cIHGxtyEELb0wXUd2OE1S7tktJZ7e/ffp3TrdOSsxXb0nWmJhIBQ+Q8m4VrOwFQ7B1IBPF/2976hchq33yAuXpoytprXw+ki+Ou5P5SkJ70HU3D01EFQRTqlw5ddcMVrNiEhSP+1iM2pIW+K1+ljKRorETGwmaMgOTTGmpMoFoAYGwCQxOsSD3eNBPer7RETMLDkjkomCFSUfNDNyKGKOqLTdFdTMfKiOj6cA0NShZD0EQ67d0dEt0owVM208cGH4N/5337n/1OcnGw8kqN1wJ1RuVMuFcXNl88zTw+UP/tBH9w9eIiWiANYaGqAz60+dtgyKvGrO0gNO6jo/+9S/X8yvnz+7E2Ux3hhKgun+72O1s3n+nctlHDIgBHApqVTOC1rtq5QS+6A5mxIACmhoBrlPIoYIRE7V6rpOsQVQIrbQnrv6UJLsQr3ErlJIKRcVlLLlvatF0IEBQI6xqUJKCYhiK85VBFkA1il1OaXu6s4D02lGgiU7LswekG9YzKXIhdWlVuB+K5LWCKVIPE2lccV4ex24FgAKrKwsv7t1AK2AZwanm2I13zczNFrjCMqHKdtMi9G2c24lbwwmIuPRKDrUdJ5teXjjs68+89TuLZF04bDr3vrAm597ca/T6RNPvEG74yc/99Un/+DJ7/7+72+GD3dd7NvEzJ49M4sYIgNG1SxJo2rp15fFb67KsvRVmE6nb33igee//rabrz0zHnWd97X0lDsCn8V6idAfPnRl+OTLd6rJRKpZjriY+7pW77mPdeymk61xCH65XNZNbRolR+9dGSyd1jq6TstUhJhRUUScc5ql8fVi1g5jw97FnItsYQn8AFC6dmXjF4VxW2eZiHjXqU0FDerKDYYb09kdBAprwlpBwa1m9SKGYJQYnWRDMjCez5eFRKMKZQol2ZiS5Aga3/Pgg3F2fnNzE1v/2t4nL9Zhoeexdp947pfPfdPZR7ff9saP/LFbt251bRzVzZAhhLzV1EeuaB/H7Wo4PX/uWX5yGzg7qxHvuzHZOnr4pUubLc81mwaKlMzMCw1zM6+WBo6MnZKCofK06m6dOby59ZKLuNVuMJoT6ubz1CXuk0o2BrcSPyNjSmRQV84HHTRUeU8kVpTT1w2Qu5HUVvg8VVtrLpb8/RtDfzl1i2cRmhjIasEbgXLkU2pomZVxQb+40/Pk3seagKp4V32sbCxZ06VOU/hym2VVJKzAOaoGhIaMJva6bWmlemUjuYeRWy7FzLFdTBddqBiWuetS6nPwVWqBGVTLmAIQwdSQwDsGAQEjBTPNkBk5TWOaptHQLZdL55wKqTChqcgak2FlpIGODVfuho7QV0EUjo6n4/GYmVXFOSaimS5jBum7rSvnrr9y8x/+N3/rzJb72Kc/fuUhe+SxJ4zB8uxk//oyb+3uHv7gD/zpjUk8OY7BNQBgFmJqq7oy6RAN0ZsGwAwYAQCh3mzy089+9vbRjavntzUmVDlaSMJN6MTPvnL+4rkUtltNGCOjG202L77w0vXrNzY2Nh5//K23dm+eO7NZhhcxS9cl7ytmZLTXXntta3PnySe/+tAbHlksFjtnticTvnXjlUsXr97avTWd7Z09d/+gZtHeTImqdplOTvYuX7mQZckYHLt521VVRQTosmJ/z3IzAEUq9eVdPDXi6QIDMpC7EfZ1SYPh2p5llRuWIAt0j93YPcv6rqjR6i1KGmNamiyvey0QrkzHTvOP131oBMBipY3rgRGAre0tESmmFConmp988sm941s7jRu52fPPfP3GTc4wvHBl03aXz7/6YmuYM/7Bl5/9wHuvnRlfO5mn//jvfvFbP/jtZ85fG5/ZbpO2/aJxoXbORIndalRZlnqZMCAbRE/GLImbLh9+y3e89T/+LzcStCMjcg4CuXpAwTvPtcHVs5fObr/hn/4vn944f/7g1m3u47JfjJst6lWzy1lHg+He/vHly1fbtCz8I2bM2RAppeScKxLWa/hpyaWIGSRlFHNCy6PF+NxWhKyqtKJAaondiEjFdxFfl7FZESy31exdLXvPSG4+X5zdahyHsqOBSBU8kaI6AGOHzhgpZyUiFTRFZkwZCZmZcs5mWOMw+/knr79688l/kefDupncnu227uA3nz2JOEwbr92aH/2rT/+DB3Y+eHJycmXn3HuufNd8kYjh63ee0Y72W6TJ7q2Fuzl9cg/63fGrm9MRwUCc4wXsLIezDT0Oh104qMyz1JnAULrQgbGQOOFEKiA+hSAs9UJ0yfm8z0MkcDHJdN71yQM6ADZwYMiOomZWVEltn9hB2+LOBjeh2LdDsUc53SEGYERkJccBMAF0ACqgDK/fBustJAZcKmIEAEHgovCkgIi6PjPuDjMB8/rHywXL0jeQdTp/2m2H09Bo3/CmpXxTRF7BolfW2giCJcqXX6H8HK1Pi9Xf02PMHPG0Xxq6iotKmYECFnSsiAeOaKoFiQ8EwASBuPRkNKmoDkM1Pzge4CBJLGSanJWARNS0gK0xm3nPqhqCM1Fmh6jOuRCq2WyWcxyNzmlO5MhxNRjxrTt7aS6VayTj1lh/4Hvfm5ev3U7dVz7zse3JlQvDUcVbtw8On3ypG196yx/68BM3dr9WVRsiCdAQK+dBLTIFADBlBAeAZh2Rczzou8P9g6+e2Rxp1makMYXjO/jUV4/mu/nKw9bn6w88NnKKQKlrI4Ofnyy/4yMf/drTz3zq05+7cvniU0++0HXdaDQ6OllUVUPEk8n47M7GztaVp596DnH48ks3g9ucnhwfHR9curz1iU8/vbm1dWs/991Jzt3B4V7T1LUP165d+/KXvwpAu/sHqpD69OCD950dVX1qFY3Zc6Z7gqSts2lVMwK4t6bEVStc7o2u60QPV2Tc9SI6Dehieu/r12vvrpsjlJC9ytfBENc17t23sCK5BPQNSdLdq637MLDqyCB5il0f2Ctq8IwGs5PjHNMbrr3x+a/9mh913/Tms296zM/j1sd/58Xp3LFHtNoTY+6kX27u6NbOxtnlmz/xy59sNqpL1849/OY3n794P/Gw7SM4cwbF4oDISdkGRookolVVq0RPQbNcubj5re95x+71FzbO4mYz3BxMNgdb1dATJwfG6L7rQ2d+54svP707G0w2FsujvtVhtVlVlcOhWqqbUFe+bRch1IK9ijhPOeO6RC6CcKvvLmd1zhGjSCKHfd/XdTM9OBltbnnPSaJqkR5h0VKeFSh2Aa1yLlIVQGYmasVvBBFVhZlv3749GIw0xdpXiJhNHdgKE5WBKswq5BXEigCkIeecCdBx6FMfmpELPucMCMSDp/f2Dw9ebiOkY9ggYoNffuE1RRkMaYzufY/9yaPdCzZ49cGzb3dhvIwpefrMa7//Rfk9IeQz+Pnnnv7Np56swNdbdCVfORdd7BErZle7dOBqYgxVItNslakCJh/AhCgDmxEgKkpyXfbz89PRaHEf2maCReGweYRAXMbAJppNBJSAivgJmvrUmwpDyTYAUO8ymE6D/L3/v8Krvv5hhfeEAEUtssBwDXS1OQTAnPCacLSuoNEATt+OvuGap2andg9NCUvWffcZXA27bCWNYqepGOKqGW9quj7wbd0hBQBjMCODfDqhVSXC2fGs4nHlqg6RAzskkGKvlxx7QevWtQIBemISVF8KkeyQtMvdyXLsJsu45OJFZojkyleESGDIqFS6M1qEVASJPAczO55Nh8MhohEjATH7+XyKEYigGTQpxstnw/H0a8Pt8bXqkc++8Cu/+blP/dH3ftd4cN/OaGt+OPve7/0esKl3jaRsaEBq1lc+pCRMvtwjRAWwAv0jlleuf3pU6SiM2u6kl0jV6L6H6qtX8mvP0ejMG85dvHRw+xmHzebOJefIUajc8NOf+uxovLWzdTV17Hk7YTub6QP3Xa6qJmf94he/+Nh3f5doOj7Zf8+7v/Wzf/DkffcNRPrlMld+J3cLBBjXlFJrWjs4tzXZmi5u9LIYjCaLBS/ntL29rb67fPFi15945qzksFLIBVFnd+/9aei8KzSE65j7v/VYo9VXHXCEAqGHckuKX+s6NUFEMFGAghO9t1jVe1bs60rPYut4T8fAShjXe7SPYLVRBAByzsxMjACYUmKmc+fOTUZjnX7+TW86dzylzl986fqrn//i1zJuQEOcvWrcHM/f9MbBmR2sgydIQtM3v+Wi58lsGr/08d8dbP7ew29++MKVx+r6QlwmJGN0iFBAB6tsqd7wSAS1d4EYh83gez/6oZeeuTr2qW7QORIwM0maHLrlItcD+Ykf+d6/8Df+WXVuczHfgw7b5XQ42l5El1PLYqPR4Phkeub8GTTWLA4xVK7vBB0LGCMW4c0EUFgyxGaoSChowEQd99PW74RsgAgCYEXfwdDMVNXQxFSM1uqBaIpgIKIAYAgKxp6AoaoqbwVsRznnEGitU44OqddkKGhghoy2jF1d1dJ1KSfXAPmiUYESUprKxMKFQDMhq7Tva2ZtGLCedMt+E+Ljly4Pd97Fgw/1y343vvaZZ38Dq1eaETG7LGBR6yrUNfVt7Hx+mu8Mq60mbma0ZZcpjruxe+H80SS2l/Ymw35LAcQiwBDQoous5NWfjGc3t28cVPtP3Lo26TaKJ7vLaYkmBiwCAOA5kJGaefJmSS2ZEnuHVIJNj5lKSsvrXkfpZpQ5LFqpKMXUjBCoKKHraWJisMqIQRMBI7AAg1mGzCi4ZjJBQa6s9qggmt1jBAEAKysoMkcEKx4nFbAOQOm1FqrRurgrIfvenbceAJTAvoLgFzKhARgorcr1wos0MD7t7CMuF72pY+OcdTabpz46V+ckhAgqutIaIxVBoqYK+XBJdSjvHEJY3DnJXeLKEQUQARRiMkvIaMYKgAbMXjUF70WkCs5Ei9Zt6mPf9zsXNkylcj6JMvnZdBF0nO0AfA4aLp7f+Lmf/6lvfvsjdw72mce/+5mvcN8ORvbUjf3H3vKBxx4f7d18tfGjTucGjOjUTnJKnjZUe2Y2FoNoCoQBURWm8/3bGzsjrBe1q1C86dRw7hr/xEc3PvnLv7ncf99jb387DzF248rZfLb38BsemM5no80tZj462D+7sxFjNEKT6Lja3z9861vfnKXv+/7bv/1DOcN3vPetSfLhkTz+0Xe98vLN7/hDj9+6fXzp0rkXnnv53e957PhkPyd9oHnH3t7eRz7ykT6e3P/AuGvTeHipny8dMSjW7FOr6gGR8a72AJgZGBELrgrztbyRgZkh3b2n6/ScABBNYMUwXTmkFyvdqEKIZlR+vLT0FUqPF1ckp3vFiIzW6AB43bz2tEdfVJtWlWhh3pnZvS8lAyFaUeTUrPCA2rYlNNj6gC30DQ/t7N64+fHPfHFYX2pCNskpt/dfc1cunEvL49i48cDVniqXJjsDM7h46UIIb1u22u+no35x6fL8/LWrqitrbwBgh84REYGEykXSKkFErwgOcXn52uS16y+qYkDEHCSTEopPsKF7x8tv/9b7v/nND33h+l6oNuMiKx1ubu+EpkmzBYAMm8HR8bzve14JtKlzrtWeyIlIwdqZoaA5dAImqkSYTTFQm2PFVTtfuk1nIEw+J1AFAlqxURSFDYGRCdkKS+tUglBWAxIsVAnvfT+bSdKqqtj1qpo0uRqZGTCbmYF48qszltk5QgRiYCYhY0+IxilkzPPOL0wzOO0sUuxzzo7rthtQtQ/5//XJf3R5dL6BcW75RndnjjetCZZCiuIweHWRYm7BGy8ZXxrOHpDdC1pBsqEbnpjrOL92/mCEieT8xcPKYIiODaVJlZB2fjkdppvnb83q3Z1jP1mcJcFUd0y1Y6BWe4AhCwuJ5iUiJxdYRc0AKTinMapzi446bUA6BjRR55E4B08MCAqmCriSkwLkVVdRonAAMbDEzESACpoRmMhIwBSSrdz4sPgcZgUiKraNALLCLhkhdIBDo14yEDB5PV7kRd8Arow60ASRiFYeTIOwcDgJXsTmVOzhLDJUab1/VnqWK+oTmgkaEKKuDFhXcUEkmq+z5GAeLKqjaA6cLtreDUaVU2VuJAF6Q80+m7FZJgVviJqVfFQdA5GLkpqqClpJTtrd1lF9pocZm2PvEUOOqUyWyEzB1AH7SjoLAZmAyJidD5Wr3Z3XZo0bVT5EWSpQ1YxiTvPlTBLVlUOoRhO4dq555vdvfu3ZOTfmaRNNPvHlr4DRK9cHP/qjH2mnxxw8Q4LIfTJEQtowjYZTcsMss8CDlJwhOqoq7p557nd95YILIXQaOkSv2JB6xPls7+SbP/CR7TPfkg1yP3ckYr3mAUE/GtWWlinJaOiW/bJUfQyglje2N4goxkgEKbYA2AMqwObOeDqfnzm7OZ8dT8ZsCu98/JF2fugQuMasR2fP1117iIjIbjDkbC26IvRp2SIGDMrZrMBm0YDX83BFFhQzQVAGRsOiGZhVGRgRGQwQBdRMRZHWKqCEoColKgBgRbgKxwinhwSaAZfMcYUQxlWQMkR5HZl2DesCWunoFQ5euVRWpVUbBgpH21CQEIG0KGqQ15yZUUXIU0xCsLzvzVc/8/FP/vuf+0+TrW0KYjY8WNx6aHNQA0HfXthsLpw5uzGeGGUONqh3NjbPbU4uE4xRQhdl0c6ns6P+5tObF6/4zYkAeyQPxOy7mNGZYVAVymhGJl2L6jd8vlHPjqejyiJ1UFUeMS+FfVBRxvxDP/yuz/7Y/3t0cbLIe+gG84N5NapDGHTtzNUy3Gjmx935c9upP4oZQwjOk2QhWpnUA1klZJYBgAgYmMlnyCknDk2aTTGSNVXKOQAmNACqMiGYUgaAAMwpAYM57jUBc0W+j3NwClRRssYNREDAR4ZWEhhD8uiEQ0bkaNJgDg4SktQxyigz9CkfE4rzMScXCb0iaM6ZfY0qWWfs675N6AwBHLAzGZBLEgXhkNqDw5cgQwjoa+fNa58zJCQXETIYZGoozdkNyS2q/KXhrQ/DBPZ30OdKQIknbeqRnr1/nsILD9y8grbFIJ2b+xxy41+6/NSUDxzhOJ8hcuCEck1UOZBgIkiQoUVEQ1LLkg2hKBGSISpIzDJbGLQmmRGSqjpSMw0VDaswHg3Y5qeZznr6XIpKYwJQZ6a6VgUy1YJjQoCV4xaUxkoRijEDNgSzsse0pD4GgmaExIAp5+UiLjoUZbgLj9fT90Ukh92ZLdqYMFjOioZeKJCmsqfWzdTVX111OdfDNlsVAKWuM1RTWMmhAQD74+lRVQdmVLLlsjNDBM9gCUWKNwmhppQ1InFduQG7o5wz58q5rkvSZXCVZkW6O0FafQ2rqGGacgiBADk4VSmHVtun5XJ+7sxZkbzKJ5w7OjrIpqhWVWMzefjKOYT5xtnzGYCXsdlomAmtOzzS977rXZevNSdHN4ZhS7QFg2ySBQxW4ykHalRnU6oMAJH3n3r+Px0f3dgcnCGqwTyUrgcm50E1Sbo0Cm9aLA8BajAhRYMaUc3AQGA9ezcAKFMNJS3iTuAAcCXATVpkKVQAALVkr1nUMGFaUQ9FmFlSwsIpNdKV4kCpx2TVa8O77GhaCZKigZIQEeNK/8dKv9tK5WcIRhlk1XJEppWD7t3HvXl0eQJe12onXQv+nIZxWBWHp52Z17WB0AqVo4hMrp68tzItwjWnHfkymFHLSGZKzKHruo2NST0c/Oy//J+/+OkvXdt5OCPMpktMUvFw52y1vZU3NtLli5sXz55RgT7K5uZk+8zWYLjpQtX3eTpfTvu5UerD3NrBjRde2L4wvHDlGuDYqEHvQhVZw6pA96aqqe/QxJO7dPXSq8/MGu8MOuliFhoOmz4fD6rhjf2X/vCH3/OG+37j5sIZHadlltCphiLK1i36YV0vp8cpJQVHoKrq2UmKBaRDRCZ3p9y4PvoQkYgEDI2W83k1GGcyMVBTNMuMipBNZS3pCqoSJTB5QshJsxACo6olH9BxEJEcU05t8KRgSCYiEAJ7zNo57zWJZ6e9bF6Y9P2h5GSizgHRiqgNhLSCYLMmCY6jRFRSRe84ZTN2nsGZ1QNHRFGzYOwTeE8aNXgHYmCWIbWulpyMfD2oDmP/1XDjfaMGZ+d8m+873DwZc6xh33avn1nstMkLuGxuOZxP9NWdW0t/7NTlJGKEgoRM6AjQxdiKmHNGYABcRNkDQDZbC6+rUkZEMQVlZBYxQIgAqiS9irR1XeFq/Fhg2QCwEkpFSUaEpTVWDuIVEtJM8d7VrOUwwGxGYLkk1FIwYQCOuFjvMJSrWVZSY3ZhFc0NDEQNzdAUM6a+zxsxoFYazRAsODMjWLVr1lvQAIQMjNcy9OvZThkFK6laLmIkBKJKZgAMh8cnoWqyRUTr+5SzqpoDTg4lZlEDQCLHBS5PMHG831qvechudrRgA8CsCt7Rule40khRVQAiBCT03mtODIjFtd25k5M5AEzGTdvNmVzFjZn1fW8kdV0ZyObIXT5DKMm5JkiGLYam6xPXrOrtA9/5kaN2jxgD5WQZGGtfLy0ZJGYGCyakGIODPkE9On7++d+58epiUG37KqOTcpwoJWJlDMFv9RiqepzSbSR0RCCExABmIHdnk6VVVxCoDKcjytMQuS6XV2umAEVKHpBFyoh+PRhVck4BSLGU8AhoIKdCR4oAWBwa7sq8AACtzCDKV1zEYs3MCFfgW7M1UN2oeOza6ZRlHd/NhApaDO6exFDaurbuNwK8/qdgTVDA07Qd1y1BXv/IakgEcAr8uKuOXfYRWXFWYacisV2m+649cOPm9b//9//59FgevPR4Oz+8Pdv9i3/5R776e1+eHT83mdBg2A7HsLU98d4v09J7PHN2px5tEw+6VubLuFwu+7gIwTVN47jaQl+LpoN5uLDttral5aCVcjxVIEupY67UIjp3bnRmcTTD1HqskDVH8+zZoUeObE3o/uz3f/jv/f1fby5tLw4PZ/MTV9XOOaaq6/vRaMTO2r4bNgOVBagF5yNlVFQTtJIlnp6FhOsBXsmBnAv9tKu3RrbOxRgwg0YVA7LSNABDAEYiFQdmaphXNblq9nVo2957JYDgsPK4NEEE4pA1kZICw9pyLkcd1MPaOSDrEZUF1ZREwIBYJCNy7MUM1TIjRdWKG7IsLIDKgASAKQqAMlQZyLvYZ8+gOZqBMUnAuGA0sWxkCg6eCSfXqtuXFxu1uPv2H4i76WS8pLPN7vb+F64dU/jyg9fHVbh86/xi1hxsHl8AV7Xh+lZfpnqIiEbohJORZnOeGLTKqg5jhQDEaxWHbGhY8ODoQAStXx2nRAyIktB6NJWSAa8Fr2nNYAU1A1oZyxqaKRQxHzRZx/e7XA9FxDI/LZiClW6yaKGOmmkWAFExJXKuuIiVaWoZYYI5M3AYlDssxkqUibwpxZyUyye8uw/5bq6OcI9KmgAyoGoCUzC1Ml8u+5bg+KQdjM4WS8aiWlw8Ux1Sv6IgMqEHNDFViQ2ToRqCtrk/6uowVI0IvuSh95xxBXlZzMZNUnSecs7eO+c8AM1mi/F4SGxo4rlx5OeLBQCkTkZbQ5Xu8tltTG2Rsrx08eJgMIiarpx/FKjbOL/10LULfRwMKk5x1vd130vbA9F8mVsVQMAOu4BjWdr5M/zJ3/vlrz35/H1XHh9s3RpPKrWWUBGNWLyvju50ixO8/DC08bXAO0Boms0MsS2CnXAPyATufumCSAWSDGCACQzNPK7urhWnJAYUBD7FhAM454oYtwIwM659bk+LrVVmZxkAcD1lOW25EqGhqqmtFQWNMq7G8KcQybWfi61AXquZ54oT8b85gV0VW69nR7/uBUWS//Rz2j29mDV0shSyJX7de6KUf0hy7NQHbpfZeXrgwYsf/62P/8Iv/Prm4Nzl8zvXX3lha/vcX/uxn9jaCdefOXnrQ8Mudk2tZzZ3Kr+5XKqobmyP6mbAYQTK0nfYxhFb1Th2IfgmV4lxPBjuiMH8cDmCKY/OGlYaayIiBAAJofa+A+0NkmF98YErh7deY6sCgw1SSlLVW5IyYJ4f3P7e73nnT/2L/3QCAR2bWt+3o2bC7D0HVW2qarFcjIabRK2IIKP3HHslVyR91j7VsKprcSUdhZ3IwIUUs8wSjV2PiQhRkQBNDAGKmIwVkRnHqllACVkBxVSRRMl77xwxYx97sKSSTUDVgJnLcAW55LZiEpfQdUsAJUJUEzEzcQNwrjTuMgNqIgIDE8bVcR0tVVVQVc3iiInJTD0ROBTgXtUxs2ogLMT+SrKwNwVnsNOM9rJ8wR1VW7uj6dmxkENXHW161xwP+shzML2+OWP32qw+OX8yfPjmtbapFmchpEbJIzkjUgTXaeXYRFRpjQcAVJNC7ykFdZHm0SxIYlkBGUg0gxkaEfgaqWZOICKmqLbSakIhwK7I6areO2gqGwjVsDhZr3eIAjijMhs1M1MpIQ7M1Ix5NREp4w5VUFMqKhNl75iBEQCCMUhG4D7zPCJiqJAJcwhZMqxL5tdtRcxwiskxK3LhlrGgZQp5fDVPMFFAPDjuhuMtADAQKY6/iCJJkTwQEmbFrCYIigToBhXjPDLS4mghnfiGF1kqrk6d9GxdyZT2C5qamagMhnXf58Jyats2Rdk8v9X3LRg5F8ys71vLUruRUVWxnN2oHdOgnpw/v/WmN73p7CBIl++//83D85f8mSqnyLSF7CH3bX88nx6cLJYUKpuGZRYmQiaVxfbW1ue/8Ctf+twL4+b+6eLVi4ONxfxkNPEi4uqgJn2Xz+xcuH39Rt8Bhhuj5ipoThmQEmNGcGtn89X3eTpgXMlvgVtFzJXr+t0U+57ISAgMlAGMmU+DHaoWyxM4zZ/XMVLB1gpzdyNpOa2VVQFk1W3TlZk3rsVHTxNqVFA0kNdZRhqtHBZfh7H5X4ng3xCRv+Fl6+dXkX2tvwuEK7EEtW/ozNy9TqiciCyXs43N8XCw8c//6c998fNPXb16DRNdf+Wlb3r3wx/97o8usiyW4qkOsi+2GIXh9uQcGALl0eZg8+wO8CBlzPMlRAmQnOPaNeAa54eZwQ+GGILL1tQe2jb3r8LAuc0rkjSWvJYC4QRULWWF2Gyfabq5tjMH2bFzVjE2wSN2y5SmZ66FP/W93/wPfu53B1ujtGhT7gXEh5Bz1S27ugmzxaKNfXA+q8ScyLH2icjHrI6ozDXonrWApfeaVVQ9uDTrB5MmQlYRM/LsHCgrmSkyEYEhiqg6SCQ1khiKKYJTYEAOjtgZn5Kn1AwR0KH1zkhEQkWiGYPL6papNy5AE1MFh1B7VzunvVUDVu0BPEBiBlNi1pT6MGSRRGoOwEwyAgA5sYiWc/JVnWPvHWsRbEHyrNlAnRERuzwUvpXtGXj1CX92CmmUa3M1SjtOsgw1LxoglHA4bKvJYsdDX/XV1p1HoXVMQoVlbupevl1tDchxD2SgvStyESIrsCmZ6T1ZBYD5lfdSGVArU1SZtqkLTtYW5oTmED2xoxWj9e7qL5dTUEAENNAif326qgu3Y90hKV0ZRQAkRjLNpobMBMpqKGYI2QANHQAo2opICoJOTHUWXZqyIXuCmmw0Bnf3jVbwOLunUj6tjm3NfCBkMjBbK8xYCQh2Mu8GlyaSMgPknHLOCGSoDFyar6oqokpshKZc+eCp98rToza4qihikmfVu2pJawUbWNtyACPlHEtkJ6Ljk5OqZudcihHIE/ucs6rGLg3H3qgdDd1wyPUgBI8pnRwc3tzcPjOs/e2Tp++7esZXl9nuACzATYDPNR4BpkYZkXI/zrFTMM6+qlOfjj/2n7+wvXm1Gevb335henjA9aBru/F4IDkbOu/dYOiaBhfTk+bcBGFq6BGRnYJVaw3/Mny4Z4uCgXkkRcxmiOCKXYBBRirG5AUotfr+bQ2rDc6nJEQrLSNc5wqwRuzZ66jmq7a7IayGMWYCutYFVbGCf105jJ7+nJlBUaxeJxz/f632Ik/9v5rCIxro62wgy7Prau/uB767/O5mPKcKBGvjiNPSpzRzOm015UuXz93Zvf0//vf/6PgA3vDQ23b3X27n+U/96T/y1nc/uHswzzlsjUbEzWJaU5htDDeqELp+Wg+rzZ3JaLLlaTO11i2WNZGviOrGjzeh2lAjp4FJrS9qcdqn2C6WtC/46mvgnBtMmvEZaCbGjQCLq4I6ID/exEV6sfFJVR0HNVTrG+Uu+W5x+IN/9g/9T//mC0S0SAvopA99XQ8MKGYZ1qGu68VyVm9uEEtOHfM9KFWAglvVtdD2aQUWHCUVjxwX/SAbMegK9lz8Fwk0iYg5JEJV9ejMVMxyKQVoVaWpiolmhZzU+SDSkaNyeOec0aNYQkfkzWMAjIYkloG4uEogCJIgeAVTJMmGiJrAgE0zcxGVQAJjRGDozUzVIaNJxS6nzATGEpM4zzlrdmAJBCxj6JroAKq+ejkuz28+8+DJxShDoYwdXzt4ZOO1dDzUZy89r1Ee2Xv44smVYBWag5lPVc9SERAiiJm7eWTbQ2IFcLVaByaEtRZDKdQ1i58InKgSpaRKACaZiJBATMzsZN4aB7OV/SkCODQmZcKzA2Hm4gK2AngZqCmv1BbXOREoGJGa4iojO40Iq6rbVMBEABXNIKmpAhJaKaJBpfiHgBb+HwqJaYZoCSRnVJ4qdSlc2CjMw7tbVADIyOiUr1JW2Cqbk4LmwdJpsYIUArBll84Oh6A5EIJqmZCxD1k8QK8ldhcxBQFNggBDRmlzWsjAVRqTI2+ST3Fado8eDqghEzMhWunJkOOs0nVxa2tDLRuS49qQujiXHMGMK0DqAIaH04WHzg9DznAyPdi9PXroyvbJdCmgoAo4BmlBZoIjhqoOG1IvJbaDBttOl/1Mxcbj7Z//1z+zuVW9490XTg4PPvWx165cPKvh8MGHKyQjNlFIUZY02zkz6myKaogpgwF7hULOZXQZV3FLi8p5uaFEDjAjwoooY1aGob6QhxHBgKhowKACoWYCFFNb5Rpyaqn8OnLQvc0fW+fytnKYI0AAT2RWINFkoMWbr5DWqGQFp3FeEWg9lVl3/1/HSi3w9ruR6BRxuwatn1YqBd6Oq1949QIFIGAyU1yb2agBIcHdNz0teAiQmAzcxcvjT/72p3/mX/zylfMPP3i/f/n6k2d2zv/lv/49G4PN3VvHzJsOwBGPd87f+fpzjz68c/bMJSbnfJpsjIaDLUdjU6/pyLlMjrHy1AygGSF5y9kBaAIATqmfzQ+99+ibUI+hElUFxxE6bVtC55x3zCqVwcBXNfFA0gIwJxUCFOsAsG42Z4f7Vx69733vuO8Tv/8cDVzqYt+3dT1A5zBWlmFQ14dHU7NNRGZmUK1CSFGCI1PIYCUMnJ5wZaTsmaIKIFmv3WxBE8+eREyKYkAJ3CBFJYQhMACrOmSLmcDAsomFCuq66XS5mPdtn8VQDdWi5BQqb5ZcHRxrqCi1WZIyQeoTkBgiKihC1oRoSKRKhC7nQklD54ZJMjufFokrD2gqGdXQIzpWBQ2IQtJJkc3xJF4BIxgNCCKAQjb2FVY2amAZw+/2d+KE32TB6cSrbe5XFAfRYxUnKXfjzteWlerMzBmqTIZrHUZCyi44h5KzCps6VlTBZJBFRdVMDAGRARkAim0Ko3PmPToQ1SwEnrBxlgNZzeQRyFQl5ZRSF6Osew6qaitd6YKbKQ6hpw/QjGZKqqSGulYJLokLWRmHIDN7BNYSqRABAmClUJlVADVAMPSEwVQRgLJz4p1VjAEAonanafIa174O8QpatKBLkBVFNVIzkCJxrGs/U0AFsrbvqqoqaiex64kIkLKCGqkRFJEKMjSBnEw0Z62IpI2QsfThXREXW3+a011dzN8KWU5VkclXAQC6rgOAZhBEEhixD6bY923XdY4DhQFGjF3Y3cuaCNUr1NOj7uT2nZs3b5zduYI4j3IbwkT8TpaY6QjIoTtb80bl/Kh241EInprx5LWb148ODz/w/m862Hv65Hj36tWry9QdH8aNja1kS9E5sVZVlaJsb58ZDxuQjUAXNDdARWE7O/T3RLd1Hl2SJhJTNHWghGRISkSEXtbH7anBxGmyzMyqWtzvCskQER3Sqi+uiEblDwGfJryiBTtRWmlEiqarjimtajBLYrqSoKHCuihejKt3/1/TMgIgQD5114Py1sivf83qty6f2QzQ7oJrtdxlRADg9WSV1v1+IwSmsvJVFVQR0RNvn6n/xT/9dz/9z379gauPOY/PP//su97x+N/8m3+BGt4/2fNVcGxN6GI6uHLfIzfvjJvhkF3V9eJCMxrsBLcJ2bfzZb/Mw8GkGW1WzVk/uoK6oUvnYxCNsV0e7u3fvPXaneM9v127bR/rvk/aZ1l27WxxPJ8fL9vpfHFycrSf8mvL/kXFQ8cwnU7V2hgPY2olM5JrOyWAJCff9sHHrIth4Kzoy1uuQoPEMUZ2CCYpSkHLqGrhY7t7Gu6rlXPvTFvVCJmZkbply4CenZiSd2KaVYCImRVMRFTVZQxA3hBEGTJoVEtVTQTM7JB8qIfIzgizJWBNKkqsqNXQM0MILnUpsKtc5ZgZVtQzIkImgcwUYOUOiKPRRvBDBJ+iPHDxYcIGqBpPtpl9jtnMMgEkTIt0YeuiswDgjKs2QlNPxjCUFhsdOnXj2Qhlhwahrin29lS1f2O4yKZomjlZ7ZsF19k2GJ1SxCqDqUWpXUspMYhz6kkcOe6wJePae2Ngry5xaV1BbZjM+iJLmlWRyNCpdOQ9ABRBTlRVzVkjOjI1WgPIEBmADYoxJTmIxZHNAACSJ805Abvid2rFDZVIVa0HcmhQXJcKAp4MyBmqJlM1nKJDUyJksJAhImZEImWz0h4VKxmeonOQIBkXrWghmxgoo5pEwhVdhNHMhMFny4hGgJaNnZMczYGgM2qd1gEcYoqgvjY4Onxlj76radrYR667PifwgYRTUjEhVK6lj8gaMScgUZ+zZKunR0cj9SYsDjySQTYJuPKfPgXEkaI5VcdS+aAKoBAad3BwUFVN7Sm3HBUwQJuXi34hqn4cIHeubozj/vHytfH2aGNEnaizw/mddLt67PGzmLyenODWDKxiGnBaGm2oG8FoO+R2Ip2IHc+qzYH7zd/47HyJX/rc9Qfvu3zpkRbq6cm0O7e9keSYpAoDytpn8MrtEqOjjXrw0CxlwuSVI3jjFG2BBQUL7I3AIIMZoRJCjp6rsodTn9hXUYQZXW7QYZYlopg40wrIGwplL2bIKJiVDA1NnRkKaOE0AJahhZZjkZUExBAKcQGBEU0wGXBROQXEpOKQAMATO1sxKoqiRtGdYTThAs1iVShNTBWtnM+4AEUih8AihgwiCVCN1h0dLVN8QEBQyyvkS6kUsIyLVdFk1Rwiclb6vSyiPZh5G5shE6HDtjs+c35yeLT/D//OL7y2e/jIo288ONxbLu/8lz/8J9/0+FtvHpyA6aBuJBtwyj5rsslm86YPvcPpYnliw5FcHF9o4CzL2X6R0nI5Dh4wQL2jbsxaoUXMi9lsGbHvl70kaRft5OwOmJckYuKQmBCNVIIZajKFZJCOZ1Vty5h7qSaH85zzMuUe8RgZHNXkNMZBuHny9sfPDobS26bQiaVIOXM98LHK7RKNq6Ze9Ivt7c1uOVU1RC0HuaGSOiJS0QRGDLRyaIcEEMyWaemboNPOnXHHaT70rksCSAmoLr1e9qACDXbQZXJA2EU1ZHCUhWqWOrTTBY43t2bTvnEKmYAaUkf93O00m2NWOrEx+bTZL8Q7H0hdM5630QFHlSE4DIpTsxwV+ywuzvX9j78z7j/2m6/8sz/+LT+wezO+tvgEj3bfee0vffL3fvv8FZYMB8e3R2MOvnl45+0HaXhz+aWZe1WzfPMb3/srH//Mu9/2xjq/8+XXPv/Nb/3QzaPDr778paV79v1ve+SFQ/eF+Qvv2ugvLR/2fYZqmXmCzo2OHOsOS4W+VxkjTB01ZEYiYI4A3fmNwYBT5WgYttR1XVpaZgIkkKyVykBVNfeirYKgKiBrLn3tle0sIIMZ6akMAJwes4AQoXLiBFxWR+gQESEbGDvB4k28upKYlr2Ka4mfcvHCH9EMmdCQwNRyliSg5I0ji5kWcQExpTJnVUjMtaaUtRTgRIoopuIOIqkIChMYsXkS52LjSRWQyAhFTUkIIROLQRN99K4zY01EqXb+4Lh9dredtsqhMlsQZdRIBClDVK/Sr0F1BkqsIWaTHpKpdDktI3O9NFFiK8iQU58HQKP1vBGLwczKDIiZc9IY08ZkknMGwLquiajve8jAzIPBgPvOgzFBFrp5Zz4ej6+cqzzl+ZzZMXsyWKao3B34ZgfyVoZ955Q59ImqppaYEPHMmdF8cfLS08+GvLFz5lzv99qkIErDTCGYg+OT5QBo1ASNbeOc5aFr3kCuEu1BTYwNhRBInYAYgOJ6hLkCMABCQGAVRadbZye379wJ9ZjImY9dSt5XqDWaIvdiLVIQzIhUyJkF3YgrloIB4MrBZ50sm9mpDC8RaQHIYsFrrZ+3u3DD06xc4C70boVdNCNEIso5YuHfoXbSMtYAqhaRIpABOmY2c2JplXivRrGlhlBf+HFYPINRVkkoOufUkoGpRkJSRYnJVQPJAKFHHS7b1lfx/geu/fZv/d7P/PTPjweXHrj/oRee/8rl+y/81R/7m8Px4M7egecAxFC865BQEE0Ws6P3v++bF9dfW7z21GYD9cXLuXrMloemLzVhAKNKqprqigXz8aFDbqf7R8uDrMxIWUUxnT2/Q96DGOnKHA204D+RqGgsA2dlxK4VcuYG9e7ebR+ISH1Aj44tBu/v3DrcGrkPvOf+j31qv6qGkpbT2WziB865Tq0iapr68LhNXQ/rYbvz1HdFJLL0tsSgTKNO8f5UTkoGzEC5y37gsmkEqAaNgIkIskcDNsQk5BkNQJTJmwkiI2LwVRiMXUzLbtr3fQhkOREaSa6agNz6GhTBs1Ft3byLUQQwdl3fJyFDRGZyjtFhrBbeeKAuVNM/eOGT92+Nzp+9sNmcO6yuXzv3xCu7H28Xdx65dOXCpUcXi263e/HBBx97/uVnpO/vO3/2An/oc199arTz9N5CJ83GZPjY7is3Hr7/bZ/6zKff/I7Lo7C5vX3lsXMfPjn4/NfNrp9d0tGNi8cXNFLVdA/euT/1tRqLZZe8QWWlxRcKI0bN0O1s3GFLIjhNS+Mcs5ENuDT/WMkBoZFHSA7V1JCRVBQRCV1xqaOiUovurkSqrVDECDydx9YLWFItKQ2CZUapHdU1IAipEjkDB1DGSmRiwIZGpX0BJKCgxqZA4EvJjVAMLB0oAlORj0AiUyTMZhy1K1xEoML/BgFJKVHnY0xFtQJAwRRRhjWdG4qUUSaqgWYsnRXOmcliBvQgFcjTN/T5Y1dX0CNUGzVDC4oWDc2ipozgsHGmyYzRxIjBGDVh7gXyMlufsfEptg6digKSqWnh95YuDiIAcpm7oxMTZiLHy0WXM9R1ADAxIUJJOXU9IjIRiTljJkCzUI+O2/6VGydb4x2PcdGGB95wX7Q072aDMEz9MTB4d9VsKOkEU1+FCTjxfRwOdOD99Ze/DNY+/OjDw2F30h/DsHZM5ybn77yyG3Ozc2GUrE0aGCVqMMCBf5SwQYqIgMaICUHNuLA7cNVjKRg1ADBml5Ihg6vkM5/7vQ988KMHB3ttN63rTTIVy6bqsFjwFvnyIsnOBT+BaIKKBKQMq1HNSi0dTnG4BXG5lp4ua8O42Gviyox93UNXAMN70DKnrW4zZk6pr6qQch/FfHCWNWcJIagiojnHfd+DqnMBMBeHDVzJCUjRYndgUsYDVDSSyqzcsioiMlhRNzQChwGMiETTOMl0tOEnkwv/8qd//rc/9qUH7n9LEnrm2ac/+O3v/J4/8d2zZbu3d9BUdSBIaky1iCAAgq84gCwc66U3vuFGnLtmaUmrYdL6jGmktE/DEeGWtSG3My/t/t5em8WCc6zscT7t7nvkYTcMfTSiULHLlAEA1MgVMF2hf4nLJtIzBSTYuXi2nc8qzr5mTVrw7LG36exg07s/9p3v+o3f+Lf1ZDJPbZe7CaivPGGIKYcQzJZ939d1QIOcs/e+75L3IcYI92CTTsdRxLx2AAdEXC4W1WiUtQ+K3pDUAE3BFAgYezSvtNKoRUMzAhQxM2RXqeU+5Zj7ybgGjWyAoNmgcubIslK3JCcZjFQh5WyOfO1NYpcVMGfLqdVxNVbqIsTMaDCYNOygeeHOrd39F9741m977sWPKR5ub5x/9cUXBA7e9sg7rh8uv/rMpy5/6MO//6UvPPLWa+cHk7YF69Nbrj5cGT32wGNdm7gadMkpdtV48vTx8zpqP/KuD/X7uzT0+6GbHNfVMozNklyqcMyYATPhkkkYnQASKBmZqUuxn4s4qBUTuixKoAlMslgZfDERgy/gDXKkCoQOygFOSAUuiaVRSQBaWIKgAkV7n630Da3ocaqtCuZWqrp2rCCGBoxsVgQRV0YCK2paUdaw0qfJa/Q5qJYmUFQIAILMoKhFad+ymXgEcsRMCTRnQSRkEEwERCDEnEtFYJVETFjzuFMVMCDKhooSwZjUhEExVaAg+IVXl69055Uc5rkIOo85q5imJKCGyEgiRit6pZkIFDm6LF2XXT/tKgtqVsTfQdQAbDWTWBmMoCFRgcwLYkVIQEaEi64PPniHqoKOQwhRRZKKWNUEQovemNE0A3h21f7R/OYtX10YZqgffvSR2XzuXR74CnJq53t+a5t5gnEXYQ62HW3YjLZyjgY029vd3Di3d3T85FPHw81z1x53F87Np4uT5XJ7985stqwu329NJVnMD4DozUCboopmSGygVJS1DAixiAviapCwfpjr0/Sxxzb+5U//h5/+J5//to+89qM//t2TjYs3bt5q6rGqOi9kYOocV6aJCv9g1Y23FfPJVu7npmX9FWG8Nb3iVALo1JqDSsa/DhB0Sp4FWxvAn45kV7gpAtWMDFmimdUhZFVTCB5F+pWWqURiqBrquiliBSsHpZUa++vgkGutggIRQwN0UIhvnp0qACIzZ8tkbtkeXLxyce9g/x/83//hrRvTBx68erR3soz7P/gXf+Ad73381q3blGlztJWlzdQR1gUczA5MOeX+3M7INE4X/QPveeMEKjmap3w7Dyq/seNwp8tWk7NuHvPR3uy4R7Pg1axxXgyuPPDwYGO7z8auwtIyUimJKth6aoJmROh5KVjRJMXZ5vZGPHvZFgdN48UTYex6WcyOnbfFcvbEOx65cml4PRqRM+tjWg6qDeervp+6iprgu65rmgagDJkyMZy69OA3oErNSI2IMGcTIKJu0TcyNkDzNO26M5OAjjQLIBKYIwI1h0zIknLpE6iKZfFMKfdmJpKCb8yM0CPGrAranBy0SckQBqgINSHVQ3eymCoAkhIBMzoP4EMRIpAspvE4Lm4snrm1eH5HJw89+o6vPP2xZiufwFE/P4x1rW7xxZc/fvnsI9/5zX9mPr1jG4u97mYaNTfvHDy03XzhS19tb3z8nQ9/56svPf+2dz7enzTnKqr6s1y7nQcff+aZZ3dfeuqbHn3o1fzUE296M73Y0LGvdIDcZ5cRHKuZWUeZwAGuoIzO15aiAgkzkiPLQGyEyElEvamzZAIRQDMy5kBEvqJskiVyqTa1TCKrUtMWbh9iMaFRAikaBkXfrjD8y2SZVoLRCkAlIqCRQiGiF9F9oyKyC0DoTbOBApGYKRCYI2NVXqOPEcExqiqogQmbalbC4A1RDAjQCJKAmDMkkWyWGdCZOEmtNgVxg5oIyCCTgYkgac26P8/P7MohnlUebATrE9U0CYQCYhwFejP04HOPrbUAoGjGJIJgzpS61HWt9PMYXN2nSI4KCknBissSFo+/FSpcVAuZRZxn51gR27bfGA2JKGZgzwaQ+1zMgquqAhXzogQOqG+X9aARwOt3puyHb3v7w8ONcDy7szEYzZdxTA49pnTHN9dUA3Kf8px5Az2FcOIppT4/cOX8xQfHz70WXnguxqMIZ+FkUd+Z9jzJrtZRvUn5xEQh7kw23wIglgstAFQzoKERFiXkU/DfeiKGBmLtztnmdz799f/wi1/8pie+6amvvvpX/tL/8Nf/xg994Fsff/X6fo7mB15yT0hd6oAMkKnAYRFg1bBzRmzSAwACrTGrtuZArZDLtGqxEOKKNHca3EFNQQkc3AM9tHVyuA7Hxo41Z8kWQhX7xOwbF5J2Bua8y6nMeLFdZOdGYhEAVvzntUceIuYVd8+yatkvBFAUSInYoRcxcijaA4LlqJYfe+TRT3zyd37qn/zixsb2lUsXbr62d+Fi9aN/6UcHw+GtW7e9q6vgck6iwsSIrGY+UNaeiRClbkLTNO18Edg1ky0YXTw62G2Pb2wHhMGFuhrN9l5M8zvLxSxGDNUgLuMwNNl0MNrcuHh/7CNR0bxLUfuipKmgXGBjRfTYwEAC1yDGBKJxOBkuuiNJGcjaNh8c3Uk2rdxGjtlz//hbLj//qefYcxetXSwrPyDvILJlGAzqg4MjUAOjoh3mnEtZT+X+XjeTNzMT5hX1j9lr6rVL2LCCSjGU9w5AVRXW8vcFWpVS8csFh+ScG1VeU3SOyhEORgoMhJ4gJ9YluBrq2jUOFy1JlOnsJJsOBlUU1WSAQmQpSZIZuEqyVex66Z/afRY26OlXvvCc+wIRSOCnXvm6s5QMjS0Iv/bS85WDJIA8PLj1gnNUD+H565/d2TpzY9+++Myve0effuYV0IHn/mQvTVAD3r+b9mdb6VMvvwyiuy99+dpo42G3sd0+MOxrVI6GEaVCH7BRTYRcnDmdiTok74iIuKgzACQ1ZAZkciUZRy3+QGQDHla1DyYpGQGbWcy9rS3HyIpuy/rOgJKSIZoWXP36KDbojaYdITkDY2A0cMRkyLbkosEEwgWYUo7L1X0hQ6eIzlfD4TCE2gUuwJxSbZmCKIkRKGqfRJOBc4SSchH1VyQgB4ZqbJABshAA5L1ZICBAATUmDo7GNTMJc351t3vupGn9+aDinWQn03Y+GgyrmnKmrJgVSyFvmg2JijUloKIUzEUG7JdRBX0Vlsu59w7McjnFTMno1M8BVymmEbFaRiTnfOyzSGrqACaIhI6j5OVyKWLARA4lpYqJDECtcqjSoXezTF996eD7/sxbl/0xqmSBWTd3vmncSPuF0QnB0NSIOnZji009Oje7/QePv/Ghre3dQ71x8ZrNF71z46M9izm84UE32agZOk45pux4s3bvEvVArYOA5g2zsZCAGRV8KQOsSaTrB2HObd1c+8Rv/9tuSd2i2j6z1Wf9e3/3//Odf+gLP/LX/3yG9vbuwbCaAKp3AEiY/cpRA09JyCu2Hd4rnbi+/mqAgbimJRQFf4W7QMeVjlABaNpKP26dG67vgkguAcV7Jzk78qa27JauAu9DipJUgIlJXQWm3cpU0lZnWLmhtsZ7wqlO0RrkCACSlD0io4D6qmoX88l4OB43//SnfubXf+ULD73hEe/phRdefN/73/Knf+B7TtJsNpvV6DTFHDIQBN+wUtJUIA9oAJRG48oRx3Y5GNCoGgJqR7OwNR7gmwg1dtAf3tnb35vNj0x5UA3I+3o8Ho42+zith5O0bIE9GuaUEKSMqEXEesmIzL4c4WpSO7akaopk7XJeTUY3r3cWO/J5MVv2sa0ajLnPvb324q0H7h/ZJ9vgR12HmrLlxMwcqhjbuq4ILefike1izMTO0l37trtiPsXTSlSdIqKqIaFHJ4tI9QD6VI+CZQFlDk4sK7EQIJQczytYAc0bCBNUTJriqtYmAmJREswBQTRuTFw1TlzLgGx6x0z81tbGyWwuIgzsnCAaO0MiE8+V9mmalp6CowqXXdc0o0Qt8sBw5kgsjj2RwlzNj5p6mRehHi0WLVeNgQnY8+1BJQfWeK4EyCMnrBbLhVYjf2Q5L145s8Mbw3r/UNoFgFZfPTl4vjm4Opw95Ef353OjfF6sFkgoc2FD8kWZw6X299lhEgAAMhRQFxgYVSoFZ1YhDNQatAG7GtEzG1gmtMo5MswK4HwCgXVDrljnIRisJAHYFI2LYJMAIiOpqhK0GRQI0AMAqDAaqE28VuQBBUSRGVWyAjMLLgjVrO6TJsSMgE6NYi+ikpMkMyRicsQIAobi1WnXJcFsagQKKqxkOaGySDFJsgQAyGLOZ0bLKjlnNbNASls4rvGgjV87HEfanKCoYZdyM7DdwzmyokspSsScOwVyKgreIBOK15SVQbVHRRONuY/LHhEzIAB4Q1EpuSbgyv+hQClKAcJIBAyUAIWI5vN5YUurKgD7UK94raK+8YrgPTtFM8tqVeWjJBboOhkMxzsXdmbz60PvYsxKEiIye8dO+mM3vIQ6IkkST4hGgOFw/2Dkqgvnzs527wxCe+GyAvtbN+88+uCVAHdqRyf7lvOsHo88PnJ244GYgaCQAa0oaDMTiAqtoJ0IjCsZzlVo82Fz99bhX/trf/Xa1V/7+X/9W019buvsxfvf0P7u77z6la/9nf/9j//QE+94/Porr1lSIEImv5qEkhZauAqYmAE5wjWi8d7wXtS3bYVoJwNdU2GL+BFQSQBpxSdaJYbr65yeFp5qUyEkkeScU82IcOnKxba/MZ0dbp09n6IeHc2qUIMggF9x8XCtTbTW/l0dOlDoTGhmGQEY0cD7UjpEqvhkNrt64cpyuvi7f+f/uXtz8ZbH37R35/jk5OjP/aXvfue7H79zZ5asqyhUzgNotETMKSV0tStcwoxVGIl2o7oKzqPIaEDmzMxjTr5KfngObRv67HYOd49vJMC3PfE48IBC7Spn1k+Om6jmPcbYmSooiGqnYoCSspqwUeEJi4ho1sanWPq02ueEm/4kpuWtvXoDNS6cq1Pidjnt5nJ4svAD3dls2oSEwQRyzr6qux5VQDR575fL5cbGRpa4Pr/XKn6re2oGQMAGuUAqEVEQwMyjz8uu2moiQyaQ4lJswMZOgRLkIEykCAaUVDyDZkGwNXjJi/Sn6vzsSl+vrwdEQaJYzYDoLZPQ0hESY0wCSozInvosjauXh7Ot4SAo9jaLc/MMZKzMEZasKMkhqcmciZS7uUCKDFmqqsnYmooHGNgkQdKIKg030p5kF+rBcKOdLim4OsRZypU2kwqcWLtMO34Ql/HV+uhlOrg42H8Q9y/q1kYe+bzhjR15V8bsFR54QudxUI9UuO06JQB0Ex8VLKOq5pQ15SbHMxi3u7CzXGRi8OBMCdmBQ0Tj1fSq7KDSzYSVKU1BIYOqKQEqmJg5a53WSqxmiEospBZjnwx94DUVcSU8QFb45BwF50tb5JyBVBNaStSsbBMKFaQA6kFIj0ehBiBDdkRIAJosoXoPgKoRIZEJmhF4Z8yIIglNHXnJFuNShZ3Hl3YdU+MdLNNCEgjVGCEdteAa8s5AQHvRHpGjWgY26AGBgBySEIGRCsaOu0VnRlEyInJpHAEQIjsugNyiOrBCxSECkA/MjAi8XHaDqkZS1JUYcowxxggAzXAAoKrZMCgiOO5FPXKg+ni6/23v+3YM3XK6GI63RNEy9ClmHy0jYcxpatR4qBEWWPnF/tHmaGeWg15PNNv2G5OD6dNNQMr1rZsH45Eoh4VE0cGgfvuli+8TzaZTcgPiDJpUTAldUV5ce9+WpgwYGioiKkDf9yGEGzdu/NHv/ra3vv0NP/kPf+HWrVtbO5vnLl9cLvr/+v/4L//IH33Pf/Vf/Yl2MTs4nFc0BsxgqOAQCBGQBCwRSJnAf0NkB1CHXITDbE1rNERmUsES1olo1XtdaQwgFKXvU0GZVeu+jD/AiIiwT+n8+bP/7t///K//0jMC3dZO9V/+8Pe/+S2P3L59uwl1mbnBuvBadR5LhxpKRbkSjSlECSB0WuQgk3HuY//YY49+/GOf+hf/+Gcm2w9duLT90ouvXri49aN/669tbG3e2jtEpwNoACBZJsQKvGUjTxFSpblyTqTJvZLD8WgEYgR5Up1FrNocxdLAT8C2AauoN8XNQ3A7w3GwZdLZYsZ4yBJTbKOQuhlVVUVEsRdDv4xZrMhwFToCm5maqOZZFGcNeVVhVpidnNTV8GAaFZcD36TYLWM6mS66Lt46Pohcn9nZfPm1Q09OJKXYVaMBADB7AGmaajFPzAwJyLEpOOLit1ds3+weujICZBFGRoQk0hClpCBWsUtt58Y1ASYVYhYERfMmRK6cSTHmali8iIGZQwidqsgSioINYh3IWgfQGaiYkwxUYROG7Xw+OOf6vu/7ZApYUH8IZtZA24fxic7nmt50+a1X/Lc8t//sH+x+scZIrpbUOcxJs/eWestEG/X4bVf/+N7Ji1+98UUeSOVcP5fGuiE6yzga3bd79PyFrYenfd4/ugXSb6Vh6yvKZLCEiRcNrpN5FjMOLeTO3WK7zbeGuLvpaVjtXOhHAYJHR0bOVw6BiTmbChLwiMwxS/JmZp4qR6ywAIt9d3s5Pco9hGozKi9NHKFElVZ8DYwI3pQ8AgNGFAUAR9abIERWICA0BiNCIcgZC6RphVMGBDFSbjIxEJolRC8KZuI8pZQRI+jAs7QxLWECmCr2SxsU1A8BgKJJmd9xjqZQ5WQpTTHreLjB5iWnKnARNEsEqA6RULKqZoqWIYJTRadKkJVR3OhoJsfLJAqmc81DsB66xWBjvEyLyWTis5AjmXfkgkTnMZskNexFIaDmjMrmIaF2Ebqem6bpug7Z9cSo4hHAVPRuilegpcwIjMpmio6qNvZ96iaTbULfWwoeA/Ky7c0sozlijdFVQTBrTL4aI6PkFoUhjd/xrW+ez2715qZCIxIfMbOfu+RrEuyq5ZEfmrohYZ0OXoTpndHgggO/XLQni50W9y4Mzl6/MTt/uSLIDsZxsQSF85N3PHjhvQdwbNBt6SDl1AMAMxAiQFIlIDZvnM1M10XJKkE28w5ySoD+1eu725vb/8P/5f/wb3/+137zNz83qSajCQ3uu/Kff/VLX/7Sk3/7v/5zDz96/6uv3GBmNiRlRFIgxdKZBdRMRAAmpoZIRLx2eRQgIwVV4sJzRlE0UjErPh6MSFmpmJR6hrRCfWVCNWNAUsioZsIEbE3fxavXdn7pl371X/3zL1x55HzF506O43/7t3/yJ3/qJybnxvOpBWZlAzW3tsQWMEUFQlIEMMEyJgqMaJDFckIFqS3L5lY1GVU//ZM/82u/9rn7HngUO7t5/fn3fPBdf/JPfd9yuTw4PHZM7DxqwkKeQi60OoDoQRGGCgkwMcnGaBTQd3m+dWHkhpyAK8lc17netNTlo+ePZ9d1mUbs3ebO7svHCqiqfZf6vhefZ7OTts8h1Mtu+vg73t110ciyCigyYDRBNrFMCmSAOaiD3JtZNE9pmXe2Js96lCX01YxtcHRnsez6/XY2X7YN+q1h84IkDU3qc5u7OibvqpTMFL3HrPNl1yJ6yT0aeB9SSgBqTKAayJcESBEN0CGDkSfIsZfaGziYZR5jdh6EIJmwAbEqVCYWKhPAWozYUTJU5VrBhL2vgwguZwsEcM5FyK4nABxApdx5RFYxq8SmyIMmwPJkMWr8colqKaKIQ2/QEZsRmWFKF5udV1+5c2P+yhu3dy5sfujpG89cODsZyKiVdOPwa+fOnXnptd33v/F9N1+avXz47EY1ePD8u/dfOnjije/YO6p+58Wf267h8cfe/sp/fPWd73v7V556ZaOeTMLw+t5NkhPheTaGzupBbkFdj12L85SJpXYJwZ10eBBTxbvPUcotszpHRipLJkWDlER6Y2DvBHEhfbCMplktIzjiUDU82Egu7BK2noaVG5PPGI656g29GjJg7aBhqNRXUAVoNFY+QsihsgFbZWYJU2SNjhBRCucTCABkNSTFrDBvY8zYRksC6HxKgshMtRKeLPPADychD9A1bjLhPNA8AhiTHwffOHSWA+ZxzaPgsNchjLw6l3Q8aLz3IhZFkxgZsRIJmqIRK7tiu24IiqTgwHixTDcOljFSnzQnMLOskgGj6t7h0aAmhIyQJcWUEgprBoSqkBULeMjMcs4ppbbtSklxz1zIzEzlVOj1LlSjPHQtc2qGYFQ5L5JBzbmgujoGQnAgGYvVJRGEujdZxI6CP2r7c/edO391Z3EQLVd9lGXuokvR+pQktgAA0bqYFSFIf3i492pMpq6B2m2f2zl/5mLlw8Z5t3VtqAEXSc2GbXv10vnvfvDBb85yNNEQ4iQGXSerira2OUEzEFP8BpKnlD8azcT7KvjRfL7cP7j5Z37woz/+E/8FVunWzbmCv3TtbOqaH/+Rf/xzP/Mrly+dHYdh1yL5URIjNQ8ZrcvQ95Kzihqu1Ee0wOxLD+QuIQwKq8igyuSFWIyVAEgQlDgzqxTgTHG4BlrfCCZAqM0k6SI0eDKjX/qlz973yP1MQeJiuInDnas/+29+bWtri1HIq1N1YAbQg0Y0sYyimASBERwDASpxBowiRtCwgKTF+YuTebf4mz/x//iN33jmwQffND1Oe7MbP/TDf/oH/uz3HZ/cWcyPg2c0ZVMwZ8W6wBDAoTo0D+aBTA1TBg4wnoQosWpgMBgYDZNJ7zT5oQs7s4ODWy+9ML05ZctNYAKph3UyPVgs9hfzw769s7/YO1x0vV1/bXc03iTHfe6LiBqDMWPlmAgccfE1JQIyZULHXE5OJr+xsaXiU3SHR+2sl+OuPTxcSAp37rSzxTLUlfe+pDIxxqr2CpJzLvO5IrDKzAAGqKXGgtX8o4DobCWxp1j+wYCmSgY5CoHlnFUh57JHFEANMqihIqiZAhRTYzM0qCqPZmiCpqiGJgSKYERgZmuFafCefR26PpuoMXUgGQTNgkHwKCCoxJRMmAD6OL126UIT/OKgu7Rz9g3n3/7Q+SdObsfL21cfOfORB8/8YU/w6ed+6+qD4eHhE99y7SMXwrU3PfZeEUh5/8Gzjw3r+yI2Gezw9t79G1fffd97rgwvvun+J7T3DkM2zETKWA9psAGhkqry3jVAHhjRJ3YCqEjekBRdNHaO0VQV1XkASiqkmdAxoRABQDYDRGcKAJmDDWDa9s/lvKswIB54HCBXhgkhVIGDr8lq8iLai2StJEZSgSRChIAZTUGIseCC13NIg7UhMGY16S1nySkNqso5KNTzlMEzuQpf3Z8uUz3ru6QtQd9hUAVTJnJs5AAQlACd17GrPFVMhpWrxuIHpFGSBVXNSc0ETIrNjmpGPzZLAJaVnRGYWya6vYSkXsAxmokaoQHP2u7O8fzBs5fIYGW8p0pERZtAgUQy8YqjrFmSaNtF5yjnqCrO1aeNxTJ2UwREoJX8uRExIhIZIjrnDg6nVVURGpfnfcixT0nMbFAFg8yMiOCzZSHnzRvV5PZP2g+8/9Fh0AUpam4X/XAYWJxoakkx9mHZD4bbkBZYt7J8aSOw98MMi3pwGe3O5tZJvblTnaQXn/3i2fuu7E3pzi594Dvet7195WS2MNRK83a9MW0TeYSVwvlKvt1A1QzBwwoPiMUWo2wX7wZtl0XUuVqBsvQ3br5y3wPn/tv/21/91z/3q7/1sa+cPXtuOGo2xm/4hZ/7nc/+3lM//mN/9oE33P/Syy9XvmFCEzFwDjc0zMGoAEkRkZiR1UQNV0YPvALArPryWvT5S2lvK2FYVVBGWIlDWpFmLJhLVSVqBEkxG+likWIaDZ1PcRZwkPKCm/rgBFMUtVbV00r/azVALnw8MsiSYk51XRcrJRENocqmrO6RRy7+1ic/9U9/6pc9b1554OpLL16/776dP/9Xfuzs2Z1Xb748qkdNXYMhe28qRbv6dL0AFUworxrAmapaq4FLbR5MfBUGSQJA6+rKeAIR+8VJl9pz5685i7NZG3ztq3q+OLm1u9f20Xs/HGycOduAOefrS1fuT1GqqhFJjhB0BVspfinlNBcQNfQrxzRTBQHY3D5z8+WXEONi2Z20s1nXRZHc2e6d5fFyTp5ZEREtS9ZUuyE6lr53znvPqe+qasxIWRWxYINRsyJANvUlZwIzWon+F3KfGTpGy0JaEUKfIjasGBGcKhbxR0ZEFcsi2cqeZbS6cWZGBBmsdErZENEAFJElg5nVFSGJGJoNknQZTUzROY0AAM6BgThHHUQz63oAamazkzS1D7/7+46Pcl2Nv/SVX/vQt/zQb3/iVz/0wW/5gydvJKRs/Nmvffw9j3xXXtQnS3lx73NXL15a5OxcnB7NXLe8NLkw2Dh/69b8ZO/o5evPvffx99ToBZ2wGDhVdawAabzp49K6VlWUCidOgVSSsBoQAzpzZuIcEJVkMDMh8dC0QuoA0IxVCAkIGcA8s+cl+JnqK32KXcdmI8YRYcXhEQdnYr/MehxcKDsq+BH5haGAUWGsEnDxNzNUIlfgDWYrkQ21Yl7ik6IYSJEHIAAVgVxpe7KYP3lLE11ybsC+HgzqDWeLLh3Pl30nznFgAsugedblrj1gse1m8ypvpMO2gt6rZ2dIDIwJrdDYuAwEjAuPUQFFwSG02eYQDJmJiSTlSA66aN3xvO1tMtgy9QpOVQWEGRRSAiqYb4Q1WRFRxLqYiKrSWIcyq9ISf1Be7yS+ztaFuEiv0GKxGA7HBOgQlB2ixRhzzgAaKldAHQUTCVwZRiTqRXult7zzjfP2KAAGX6fUH7fTaXsSaqlbthCsCjtyiZlPbqXJ4FxdbSdbxtxS1dQbGxeaS7m3uOze89BjX33u5Qevfc+Ztz70+S/92kP3P331yv0RLy+7iHF/4EYtRl7xk2ylsAZgAA5ONXLZVvqfiogpkveVSEKKaNjUoxjD0UHnavpzf+H7n3ji7f/qZ//9/mHc2Rpde+DCyXH/N//WP/wvfvA7/sT3ffvJydHRtPVuRMAeF0mM0ZioTGaKnwMWy5ZTaApwGWySFafTlSnUvZj3IlqwgsyYIRiXIxtFLCMEx34xX1y77/L2mXByMtucDFBls7l4cnjj/KO1c45xU60Xc2SIBs4AABRRUY0QxAZ1LWamFJM577p+MRjXW2c2f+qf/Jv/73/49MOPvAEAnn3+6W/78OM/8Ke+f577G7u3x81G6rNnV2Bq3ldqceUafc86QWQBA2PnbTQYaDbiNGrGJiFJrAMhVOQnsjgaj/ncxbcJNYS1nhz3Xaem2/ddHV444zxbluACIhB6AOjjUgUC1YskaImZzRSRi0xzSsnMiLF036WYL6Nr27YZ1gdL0WXbp+OTOJ+3Jr27s3d0+7CN2VQBNNbsc0qVmmhyzklHiFjX9XzWFfzsPb/gqRzbqtgFBFXNtEqqiCjn7LzvU4LccvAqZFirqBGiJXJr8hyQaraVlJOYmfcODMHBaliECKrMnHNZtx5RkC2bqIKY68xMkQyNKCEkQvJACjH1SD5Ga8YbX9/9vWtb7uzFi597+hfPX7h6cHBn+0L95Ev/OeIzbN9yNP26GxyfH59/+Ny7fv/lL3bd9fsufUszrG8df/7ixSc+/QevmR6+cOPj73nXe28f7R0dPPPh9/3xc6N3vbD7Fap6gb6Yj2Y1BkZH5BIHcAPol6lfsIl3xs6hykoHjcRclh5QPA4ARqCkSFR8bTAUN0kqczxCQCBMwBigIRiMag4VK/bdchH7WRQv/TBx3eNykdCRIrZtlx0CM7MvsyQPiGI5S+vZmd31mikoeDNDUzMiZGAgU5Nolsko8NYyTXenMtq84ELVOGIcuKoNDl0AZp+ya6p6XNXDwIGJIvZOd4/3bry6Oz/xW+HccW9GubK5bwZcBUBK2mUBZ+aYUQ1klXAhYra8TJaUHBFiNhMgiElOTrpMfU44HA6LkFiSrKpi2YqjwNrAZa3SDl2fuihcfN6YQbQsoJWMxuvlBu/2Z9TI+ZSyKnjPxCgixB4w5xzNpKDjyw43U9M+VKOYsmOnEbe3zzx4/8Xl7KXKWdJErIDzk9mzm8G7FNo8FGbf5nFTb2yeQUbYOOPzDGRGPELWajBLR2TqtsZnHnoobZ2PfnPx0T/yx7/+hV+48bXfvPzod/mNK31/lOMSIMCpsUWZI8LKtarY1+k6b0crFQlk6dlpzh1zyCkbQF3XmuH2jdfe+OYLf++//5F/9bO/9Jnf/fKFC1cm21vNBv9P/+zXPveZr/3oj/+ZK9eu3rxxmz2pUOWDplzQ0QYmRSeuuEOukdGnYs5mRsV/bZX+mhAomJqxllFq4btyMemzFQ1YYrK6YsL6+Ojoh3/4j/xf/8ef8+Nrs+729HjZ0PGf/4EfPNw7iTnXzIpc0hNYU61KnUAMImrgEKEa0mIxv3jl8uHR8Y/9xH9383r35jd90/Hh9GR+/Yf/8h9+//vff/PWHTHf+IaAK+9V1VUhSZ81+SK7cI96ckFemgPp87DRYbWdun4wBu+Glj3YEtCRG+Vu2fc3zZYni1qDDUmXixmbtn0nkmrPJCYSu6zeU98vmbxBRGSBhJqAyAD7nMwSEYFQzoJMks0Uk5oUa0Sytp2PJxutNjdv3PJuES1PZ+7oqD2cxuPePDuCqIDB+UXsUu5rqQO7Hijn7BwnyboSW3OrwC0r58JSARutvtJVqW93Qz+qQXQQGBFTXg5qL5KD8ybcQyLPHpHWmK0ouU9pJX0E0ovmlXYQKDGRSjamANCmpE3DPnC/TExerQcgwtIKZGY1pAHWC7UuzdCdOcp066XfVk856ksv3xwEQnHzWX7LI/d//sXPHPDzVg329m/fvP2reSxqdPP6r9QDgJ5vvvrrO5PxLI1eOL71zO4vGtr2xsaTt5784lO/7yqragAFNiMQhEjIPjRZo2NwDZF3RmIElsRUHaAikAISOAdVjIssXXDmeMOEBYFQCS1KFkmInMUFDsygrOiQnIL1OSdLngirMB7X1TIeL/vnPD3swk6SqEqMY4Ol6UYWQXK+qlCdgRhj5SELrBQAqTSpC/4JGCGlxC6YmUg2E1Mh50SPnjvWPRuF4GqNGBW9sy5ahZaSaTbDPvVJ+lZ41NTD2hiqB6+e39p2t24c7rbHZ0YD6fdEecDWeCIkyS7nAjXJYBUAmhWjbYuGbVIwt/YaUWSeLxfH83a0ySla06Boiw6TioHLGQycFr36gvBXNYAyQlDjwD7niGpEjhlFJCdVhZJallzPraKSFQUCZu5ir2YhBJEEzEwOwEQTrp2JVpNYQ0YkSd6w8X7/YPmmN79xc+Dmeyk1ASF6B7O9+defuT0eN9ub4+0Nc8dJd7g6eybSftjIXR/rauwxKr2EuKUdjQY7lx9YHux7GMvNO589uv0bo+adXA1St3jh2d+4+thH0I3YgxgqABkBARqsKWmmqGvd3fLf0h4pElveVIKvVAQQmVQseVcZ8snBwtfVX/8bP/Sud33Tv/qff+X23o37L+888pb7X7nZ/shf/cm/+MMf/mPf/cH9vdm8nw2ohlMIIwJ7BwCScknV0MBWfngrdkVasVpOY+OaN1uyeUREJAE1zAaqYOgJkZn6OK/9xvH+4r3v/abv+PBzH/vVL7ztiQeGDX7vH/3TW6Nzx23fDDUtK+/MEJSLegwyIBujaRclVI0YivbzxcmDDz/yiU988R//5M+G5sy1+y+8+sqts+fp7/6tHzl/4dqrr+4675qKJYmJBQ7RcpLWBTY11AAAa8Dx6jcAAJPEAMPaMXAS3RiNmQY5RcJE1cSM2+WRC10SHLiJH4WXn3m2bdtu2cb4/6PrP8Nty676TniEOVfY4eRzbq5b99atKFVQQBKyEUFksE0wtl8w2BjTksHAg4HGbeyG1+7c7Xa7u+m2u20M2AZDg8FgLIIJAiQKoVKoUpUq18333JN3XGvNOccY/WHuc+rKz/vuD/XsqjrPCWuvNeYI//H7ByAc9IcxxtB1xKVzi9bgZLrX7/dXV9ejzNEGJtqGZKLOObOUVMg7EJCkIhkSImY2mc2KXq/q925sH5zedE0noxEcTbpZNlETYTxREXEGoHtmZhaJ7Dn31r0vxFLqEpMnA0PULHACOTmnITOZ79lsIiJLDdmwi5pSIUIlGqCqpZxSIKIhKxiyJ4pE5NgkqrqUVETREQMoEDrnTEOIgqqIZEoEFkLqcXFolhBIDQWcgmMIoGPUQBFo2KgwpEFdCpJAMu+dVp0dLK2U166/jnatHlRtSOa4V61O05F37Lx1SoVT0uEcjmJCx1D2hgrStNOXr326rLpYkCA45yQBqDDkLX2HjkXZDLGA/ipCbdJgN9c0NyMiRF+wU4kFVSKxS3uRm6rYcjBQxajjlMyUybGYJfRAJUFBOAUxwoLRTAFIAa0LWlLR4s3QdSinERJZj2CdfAllK8mMkLlmhyoAQIWrp213XIJlqtMCuZSNc44JuICI6FxQvLY7uTHfpP6QNJiRMDjXAnFnGPOuEyKYM1FpSdhZwYU538K54SZs2PbOaBbAo0cHkjXIgKXzlUMxF1TaGJUQFE1E2QJRFPBcqCUzMeB5G0ezto1QmaaUhr1+SskTp6S5u5qxwCJmyseWyCRis+mcqSQiVMzowZyzixw3Lt58aV71ZCJGx+za8QzJnCMLgujIFyl1MQgCl2WpKsSgqgSoVdUFIfIdyVETH33ygVbHWJY96LUxqXfcO/fIW89dPH9h99Yrf/IHv/7Ag+cne3vTvfLMxplqqV06NaJBdBbbCK6EolyfTTqzfl3octnEVd/eid52Vs6dblcujHYPr736dKuDzbXTa1sPmmU2Fp4YRuf8eRHZcUH70hPwE7ICaTJEFgveeTKXOvRuBXCmMdy5sfO2px555LFLP/1Tv/zxj3x24/Sp5dVy0Cv/xT/9vU/88We+5wf+4rnzp7dv7TBSwQURiSSQEy1p5pK+ae9liAQqmOsGAhMSw+PVGOU8FmBTw8y6ADM0Qp9SR+TY+aZtDXE06v7ojz926YHTf+9HPzg6HJvI3aMDcgVENk6iCoiSyQtkZCgApOaL2kzabr68vLy6eu6f/J8/86u/+rvnLzxYoH/1xc9+4fuf+La/8ldigNvbN+q6D1qbzJiAAUUjMhiYSjzeeIVjfyvLqt9MmusVRb8apDivelz3VxVYcFwUdbSqcMQy0iR1f607Cju3b0wPx8P+oEvt5spm1asPjsaM1XJ/kJi2794aDlb29/dW13ptkFkTupCQYpBkCASgSWKMSU3bQIBmmNd0k0ZVTQJHo8m5c+vJ/CTweJKOJt1MQ1JBAxUmJlAxYCBm77oYBn7JuaKNjSfy3nddV5YDMFIzl00T1XKetGiHHwtcASBhYmTLg1ii0KlXDiEokhoQFyoG6POMwLLBiyjnZr3DXuGkDepibvk455RjnoEhU9RUMBL40IEjlAimiRbRSbIIsijJzKqUj4lEMVRV1Rw2RpUi67jBcl5VPVUySlRW427W56KrYN6MvDOciRlzpRpAbR6KyidvTUjaKJh5r0AIRY2gsVNCRTYDIvYOzAtGQSAjBkIRgFpd6bFPvsX5WCFoUnHz+e3aXyiLNfVjkTa0o8K5oihUFLVg6hOTIShHc6WAx64iaCqHybDtBIi8c0pC5ld6xai5Pp9fdW4glGZRLS6hnC7ckMv1WYoKaGSEDjqoyCERoCWRzDEDFNUkSr4okiACm4WowsyH4/H1bojlgEKqqIhkSmzBA0djcWAICRSUlIgSQqepH0z7aRK6pbh0/9opns9uH2xjue41zDUedYCgDtSjcvY9YgEuENFSFMBWOagysWjmPtHBeDJtAlA1b6aicdBbNUGzFOYJRAkFLeT9OQMwWzyEMca2DYZVDOJd6TyFEGKMuZ2wuGuP12fwWDmDiJKMyDVNU1WVgTjnVIzIte08RiFg70qBjghMBNiHTmtgR4Vq61356IOXmtG+Iw4w5rKezyZVkVa2yt/96M+//MJn3/nEE+yKvZFQ0fX74dTZB2tfhWZS9Jd6/U2h5eDLgtZqOnVn9Gw3bWejy3s7BLK3e7AzXKqg9QTdxlZfeR/xipkpGiLzohdhBBretNRABEPKVi4mKfii6roOCQDNEZmJKqHvEJ1nHyOUhe3v7pV17/u+/y///ts//bP/6hdxWm5snqou+s++vP+Bb//vv/u7vumLvvJP7+/uNbO2dD6z7y0JY37yaFG5Q+6OKCJ6ZULSvDVhamCWPbUzzRQX4zpE5YWfOxRFEUJCqAzblfX+S6/eeumV3b/wX375eLyzszfCYlAM+iwS2+Qqr6ho5jLdLPMOECIpW9HEozMXNiej+AN/6+9fv3r40MNv293fPjwY/82/+Rfe876337xzF7Ao+0uiyXM0Je+8pWwdLI79ggaNCRGP16E0a0YADJmcc47rGA/7y8tEvRAiuo55icul8Z0bND3sbW1iMYzFDMvB1sXh4eHh6UvnAa0LYbgxiCHEtlNz/WGvi/Hyg490YRpjbAMy9RR9iJ0iMJomERFynBRQIiAn0QQZ0A3ki9Gk2VpbrXur27t7mnjeSaOR0Hv1RqK08EISMF+Vk2nTX1lG4szo9943XVwCyjoCMGOkpAkRUxKHlL2XaQEVWLgtGyGgMhemrYgAOhEk9CmlonALcyxVUBOV3AUFUUtSeNbQSUoAYKKEZogiQRS9x3xVu3lCskHVb9oQkjkCAjCESJBQfFk5sNYFiQTiIUmYzd73+Jfu3dz4zLVf+pL3fB2FC59+/dci3Xzk0lc/8/LH1rfKU3jp09vP9ns22pH3PfGenTvh+v6Lw+XAWONcupEIlYJQEjjCRmaMiWPfEbepFe53gj1XkTdBKxiSJDUBAgWHVKgz88lVRUUIkRyYK90wpDtBeFBdLPywld0oSHETqXBFMDCEgg05EmvtnG9JzQStJY6+1JBQnAPExAJoVVWBWlUOvGeDlFLqwkspLafQobsPqN/FNuGEnElXkWNE5LIwRwAGypxKtmnTga9QumkS8FAezMPzo77SOqaGvZ+qoLElAkZiB50ky6vRLKZRIvHyfFY74tUykUg3n/mhP3XfmYlgVQ+6Zto1qcCaCCJIpA66YAEKD4OlUiSAh0A466J5Ru0INYBNmm48txQ5RRQPGKWuSKyJoAHmCj6hC+jQavBNJX4iIuAL7drUHnbcRw4aBUyigB5LSlQWy7y5q6gKTACayWHkUgKMwv2aMUkyKou6JJqEyMxRxVBNQYUYHQI5MKFEZtoNV7do7VRf5oeIPbChxrYurPTl1Ve209z3qo3rN3cHvfLMqfXx9Kh1m9VWlQTqpfNtmsbR3nAlcloTv0Wn1s4sl3btldXWn7v0hXf3D6Ztk1SqpRK0BQjdfIaKqApmzCaSAIjICZApsimRRgxIhIIIlIAKqqUVRo+GSeD4Wc0hVkHNcwGGVQ9SSjeu7bz7vQ8+9ugP/8Q/+/mXnn9tfX19a2NF0vI/+l9+6SMf+9T3ft+3LQ2rvbsHhGVmW6UggEaeohqTR0uqHVNpUgAHUTQFJEDHOXYCqmLefM3+3BncqKrqGSQJM4O5tonnz69+9Gd+dW3o3v7kE5Nx6Pd6pqadITtXkKl4qBJ2QmoCBTvTBADoQML8/iv3PfP0Z/7Rf/vTZb31wOVH3nj5+fMXNv/6D37fhfvOX796vSxLJjYxNUKHDsG0USLCioAAApOAMYIBOkFSTYTABKqkil7bpZUNMXRFOSiW2AAhAtVardi4HR9dX1sdpKjN7O7kcNpMD+vKLZflbHS4c3i4tr6xv79/69adL/iCL/jIR5+uqh4z3927Mx6PVldXESm2cjC5vTIcOGYNKQtGJZmhEhVJGiRlgmYei2ogIlGnVHE97B1cb80EXUPRK1h0wiILT2YnDJKmzXve+Y5rN65iy9aJinnPbRvMIpgwsEJ+MpQBgSBZMkMyVFuUaKqY7YaTKpFCiz6ZFgDGKCSMhk6UChACRFRiS9YlTOooMFQOg/mSEADGbWuGloiNK5ZOY98ghMjosWMA0gkBDTXebVmIkZFVIxROhXrsZ9gEnJQ2MHE7R3d6vrq4drkZqcyfffDUE5985draW4ZDXpXuRle7B1fe8bb7H3utbmQ8efLyo739y9befu/Fr/qNZz50+tHNF1+/etTuSu9QUMFKsKLhqILMS0jJFyAQjchDK94ZORMDE0cKok6wdEUTQD1CYcRKRVkTDYhoHq91YV7ROTWZxlsMnqFEU9NgyRt4AEkaibxBT6Gn5IEJEUII6IgICUuEwhcc4kxEEMl7HvbPDIfc7x8y7kAaF6h9Lvs4MC5EW0uhB7xW0LL3fe96/Q4LIEyaEhAWHjqgV7fnMfRMQASTOPDlTLvrd6+/cWv7cAQtuk4LTT41BmnQtWuzbll6my2v7HabI1o/Mrc3VtD61NZ6145XNpeLFZnhqLN5wWmAvtKiYvNUWIdr/fVTq1ssmDpRpA7EEKLoZDwNKQGymM3n86Kq+j0CSwgxtTO0pNKRJZMWlVNKIslSVNX5LEpUPG6sZ2nHmw5BuFCH3zNWJZVFHdp1XZJUOp9zGWYWWeiCC3a5r8+UmeXGjNnWezKZ3HfhwrBfhi4BABYzADDphY7uv3LxcHQgafjUk9/YzCnEodlwbe3+0aHE2MVuzDqsfL8ZJ6CkcQ9CIlhe6S9vrHB/0J4+XWysl+fPbS4tDVZWNtbXTp8+eymmDsnYoWhkh4TGpAgJszgCKeujBDDT1TXvGBEQATtEMgMBE0Re9FJUDBKRec9VVW3fnBu23/+Df/Uv/KWvGY8n49G0HtQXL194/tnZX//2H/vYnzx74dIlQ1CcJ0u+rIjzYgSKdoTO0TCpQwqm3iAuwLzmABXJCMssnrGFiuJYWEmULCF4M4mx6/f7TdP9we9/7D3vfXJlZaXrupOZrYIkjYYqMGEqwQrvvVo0ikE6ot6Zi+d/+v/+lX/w9358/dRGtVxeffXFL/2Sz/vbf/+DSytLN27erHu9vLlKRJUvNCZgAnJEzhAyQ1uyf2/mYppgZuqpkQECuNIVJSad9Hu+HvSadiIWynpIWk4md73DXr08Gbe7Owfj8RGhu35zMwnBNAABAABJREFUPG/4lde3VepnnnkpxhJh+Jnnrk6m6cqDj+/tNzdv7IHVL3722tU3tl9+8dqrr1ybz6IpqSAYZ7F5Stp1EaFQ8TEgchU6abtkVqaQzp3bHB9OnXMpQsGFI0QRJp/vbVSo63oymTz73KfUhJkROJtaA0BKiYhO5qW4AA6+ySE4lpMdD67zLoURIoYQUuhEImCGrCZEEGM1EnMIBULB4AkYE7lBHR0GUQWLMTIgmjnAlLwH1wfnE2tKiTuto1bmGNkRAeixKRuiAUDojKCPNExWCNrO4Y4rvYJr1a5tf6ZYX51xRURxTJjw4vDc46ff/eE/+b3Ns+qkt333uXUfhnr+2bsve4frS5sci7qIHs0aKKlljmzEpqiNw7YoAnEyzFlgIowADtkpJXBN1FlQAWdCKmBNiM4IwRDiqf6Ak83m83k1XCJou263KFbKslAVi41BJ1QY9Fl6AC4BAURCZbYQoqo6AjFiKsFpSm0Sj1wRM5uAU4NpjNdR7zCVZFUKpRa9GjcIoZWD2RSJeuiIxFxRlIhBzGEZYnNr0s54DbBqJHrHB0fjGMBV9XD5Qhfitf07vWlvfWUVQYFs3s6MVsW5cTcCWEYuGwOXmiGkqsF+1SuLw3mjy/2Vw73xzd1DY0bwfe8HJS0lVbPDeRchHnQBqPLgQWMDMp2Hpk2ysKHBNnZJrXBljMKeJCBYicqYCNTUHFpyjAnMgGdzWTjDkWUYbDYaBKPsXAimOcR74txIUFUwV/hqPhMDW+x9GDBzkpDRd2XhCLP5kBEYGCiCKvjKNd3ksYevGCQ1YlCJPaZg0CCV+7t6/sLb/tSffte8Obxw/58hqFdXeoizWTsBrBxFrqbc63s6HVIi3yVRx8N65XQ7ujsouWmOSDE0SQwVUcQQlbDQhfuWMyUDIyzUEoFI9kM1QkCBhTIRyTBz9E2P3emMCBAqgAQYkQUW+AqPyr2+gPLu7t4Xfdm73vLkI//in/0/L7/06umzp9dPu3Z25n/4r37lox95/ru+9y8S9Xfu7IKvCy9qCRGIOKU2Jx2yULEUAAAUDRJCmbdZCRY6bjUEQEMDhOxJYULIqCbLyyufff71G9en3/U9nxdTYuc0W+cBgYFjVk1My12cMqOoEkDb8dlz9x9Obv3oj/wPn/ns9qOPvfdof3t38upf+8A3fvH73n377gG4kgsPJ5JtJI2pYCeWFtvtlgAAkDHjtfOqVUZbYh4qGAOWdUUMonFpad0SCEhZ9YkG0Bm7Wb9fN3MJndZ1P4VmMp9vnD796tWrS8urZV2dPX9+7/Aogq5ubbRJfud3f//8+fuYeXv79gNXHrxx49bF+6+kay8XRQ/MAaGYxQTk2SGpgSQUIYXMkE0KFhLM5+3FC1uSQEQIvak5gpRMxI6lAQqgmSdT11XBfSJnlnINF6OUpUcJC39PzDr6N4XFOaZnVfG9Kklmjl2wqhSRbLYCqICqLIKkeTQrQiAAHcjcO0ZTM3NcNF2MBq2JY9CCHIgkddEcOHKRATEQSDJUUyRa2E4woaI614m4ro2zua6e89Fat5zuvnzrgQcvn37rN1zfvZPilFQePv/2Oa5zZyO7dt+Ft7hZWXt3Y3JrjQrlo9WNJ159rdk/uvXkk09++OO32VHpy66bqEOPRoRM6jyw5yYmM0PiUr0goI9dSA4Kpko5qUYAIAPvsfLOOV/OeV9D0zVbvXqTylHbHRblOXaQIxIYGrAmSBaRZizOXE+pQCjAUsFkCCFELgpQRSYAco6TtqwOkaMFJHMsddkwEUAFTM61nAqDIqUNwfOCdYQuF10Jp4O6l6IAu1Z4t0Ethtq1/WLQpSYazaKNjw569WBzbWljeXhw2NxNu5tbK0BoKm27X3Ko6iLOsayLlECt6JztjqcrUqysrIx2jgq/vrm10lkitxxVJvPDu0ezJQcrSxt9NNFOFBEoBgGUkGQ67zoxAxdSUIOQopoVTmIMjM5Ao0YzZ6pIJu2CeaSaRGg2FwCXiUhwz3z/ZIRKlHmZRkQZAwnAzEzEMbZ+QROTrCZO+Z8pEPcQkXMQ0ESEqgLmc6l65cp9s+lBUVSGyhCJWJK6AvySbp2unnnmjx986DHy1h8m45agSBJFkahyhF3bVKsVpwjaspeuC2XZ57JPk73lflWVw9eu364GffZuPJ5VJdXVQI4JiGbmkaezw8FgEENuv+MxEWTxJ4vh8QOZIepmpkRkMGdlsEoATBFIAFokdVDEEDz53e2dql//vf/vd//iL/zGr/67363r/vLy4MLlzaf/4Mazz/533/cDf+497/q861evzduiqvoiYkqmhFghCmAE4/y0gzkAMJDcZDDLvWxYWCUhAaiYOkRkCAlSCqurqx/6Dz+1tdV729sePjo8rKsipYTEusDtAQIGiYUfEKcYu2R4+cHTT//xn/yv//jfVLZy38Urr752/cxm/Q/+6+85df7sjVtT570mqYoiI02IKHTBew8mgIpYoEGG2hn4PD5U5HvG76iW1SPmuOraOKidx6prlcj5YqCp6ppD0G7QXw0tc9l1885X5aC/POvCuQtnuhiixMFyNVg+Y2ZtO11bX9rYfKsZxhgfXXsoduHBK5dijI8++ihomrdzyts/wJK0i4GZU8xOv1EkKAgwAth83q6fGg5XltswrcoiSvQLVywjA9EEQM45IgeikqAsHKNLkpxDImhjKHulgpi9SXMDo3zaArxZZp08UG9OXEXAKKmJGHNmgQkYw2IwrTGqCpM6MHbElP1wiWIUJVNDRWNMSTSqlsiolBowA1IHJlnngZYnH+gcAUBMhogOybMznM1T++nX/nBeHD177TdXl69cu/va2hZ84o1fW6kfn3U7nzh6FiTdv/z46uDyR278SVO9PmtsNp+fsteG51ZG7e03br9A5TyJgY2LXpHYgc49ekbHDEQMAoqGRpqiCBo7Rx7EIEUy65X9w6414jZo4cyhQ8Ze0IPRtDE501taQeqFNFYYKHagdcGVYUhJTQix1eSJ2JCYKtAEAIWjeduYIyJTyGaW3rSV1DkagBMQB+AKBoCkkpgrcnUpFnXSsiTrCZSkFbCA67ytzxqpGZncbjNrqZqMj8bTkeF8ZTD0Vq/1eVjT4Wi0s90MBoOlpaXp7OjOje2NjS1PVNZomripgOcWG8Ta0E1CYE3EsNqr+4P55Gh/dXNtc7lu23awtsTcA3DXb90UTUEF2SVNKSUDNLAmaBtMlNQA0Ilo1zX9/lJV+tAQAMSQ1DiZgjGQgZGRmqGpNFHHs5RTjJP78qTAPLk5F+j743/P+YgKhBC89whaeK+CACAaBUwRsgUwEVnudpgAAiN1XegN6nNnN7vmNeZesoCAJpp7IK+99jozX7jv/Pb29fH4aGN9OTbW71UXLyzv7t6pXf/MmTPDHnXt8/3hkqbIRXDA8/GYQZAghWZ/7+jOjb1JuLmydubG1dtm3ZNPPXI4OprP5+fPn5/P5865Fz7z3Lve9a6y8i7JIogisyYEUABVYCaAzEfLfGAAILbCQAwimhllSboDIDXyBajKYNBvY7xx48bXfcNXPPm2x37qX/ybm1fvbm6euu/KcDrWH/3hn/maP/v893zPd0yb8e7dcV0ODGNRQooNCANk20I8FltQrqnv7YYhLoQ9mtHvRkk7hLLu297e4Uf+4ON/5s99SVUVlo6A0CEhkqCZWUrJewaMotq1fjBc2Thd/ZN/8tO/9u8+dvmBx9s2XH/t2ue/9/J3/o1vmba4c32/X7vAgwq6FBPyghbH3plli4lscgnHrkegRmag2TQcNDOKDQhAPREQpxSqymVHEgVBLi1hiLuOXYx+NDkaz/bUHCIeTWddl5KIqhqhSBAxdoWIIXUhGAIwc+jyymVQgQSYJGTTMolRERGByLVJAAqwYJJMHTGn0BFXoY2b6/XG1tr1mxPvjT0liUSOEUOISGhpsZPA7FOnUGTHq4WiMaYFjwRQ8/7RvZnQf7IUYgvVX2a7O1RDBRXQvLqMYA6yu30+t2NSATNCRaCCoimaSjJL6h2DUSFSignUMUmAhGopglQoLhLVzAgJDZJaEsWCCQiRa5WkEjExJlCBcZhCvzyaN4fd80WfUqzuTsa3D/+EC4scT9XL6vuf2P6jG/gcN5XTNpV2Y3Qr2q0wYgKHRSDvAIpGAmFyJQCBiYiJqBMRVBQyI1OmKIjCGmLBFmOcGmrsSAoUBAWKXTlYWibC1Nq83T8avSGt9miToBPtUgIB5EJ8Gb0TRjWba5qwdAwerIxqQOi5SCnD8c37EpEZOUmIqUMkIDZ1YgWAqc1TCijLjU9Ur9TD5Ivnknw8Nq/H5m7XHFGaOBAzPpqnseje+PDmzZuzaeom84O9/dFkPG7nQry1dXpzdTVNR0fjcb+3XJfDo7sjL1QzeGgcNMqpiyoSgVB42GJxMJ/vjyLWW67sdVPt+2VibZpZbJVaefjs2Y1eBdI1UdvMfmZUwVnThIiInHtzphpj9FwUXJuSGYag5CsDBiuiUIZ/qDA7H1XbLjlkoyxwsHuTjvxeF+xfeJOLC5pX+WOMztExTcxnIXB2M2BmkWTH6hoBA6DSu66Vra2NpaHXJKIxL4uLCJj31JdABdem6fr119dW1l964c5wsMbOrl3fVq1cXewd7Bwc7Xft0d7O3aLoGWqIo6pXBXFFQb6QXt87b5cunz8a7S4tLT3++ONXr27v7Ux3tsevvHz7ox/59P7u7Pr1vb3dGVIGkORExwgWLqKEDowQGIzMsq0LA5CAZMB6HpcdP8PKZGQOjRC0dL7k+s7tm5unir/7o9/3FV/97u27t+7ebnv98srDl3/rQ6//tW/7kddevfvAg+cFD0OcAagvA/sZsRp0ZgjmARUwZTU8gJysBZ0Q2BGRAFNKzlFKaWVl7RPPPDebyxd84dvn8zZ3yfKHkk/ioigyBVeSnTrTM2p/8Pv/5w/9+88+9sg75uPR3u4bf+WD7/+u7/uWw73xbHTkSxZDsBbNnCM0yDWZcy5T5hmQQAE1LzAjIiM6LOyYfpN3syz3cxg0dYjRMRryZDYiF0HSfHa3aw+QetN5bKVTEGRKgtlrPm9H59E3GGkyNEpRiZwoaW7ZKqYIImYgiKgIISVFMLOm7fLvTKYEws6IHJhDJMyrnqznzq13TUDMaQcG1ZA6sZSvXlTxVemcExEDYe+yhpWZs5WaESx2uY/THvvc17150kk+BGoSNfMFFuczOrVIIIBCLGqBODtEJG+44MkBipgcnyIODRGDYhO0i6oCpCwpSVQByziL3AIlBgFL0hIhkUvRgzEjFV4JuqJXUSpDhxECIpaVEVtf3MHR5Nnrv/364TPmGuUZ1kCI5ouK6/6QCF0JvscE0WrPPXaYVYUs5pPzWlaucI7Mqa+FEX0CF3t9Z5jUUSPKvmo7Jawc166qymbW1nUf0jR1jQiF8NrQTg0HZ4OkEFoRKUtfuopc6lJAFNApiklgoyJao5DYuZSSCAADMpASchW0jd2Uedk5BB/MhKE0dSJRbZ9kkFLroRy6suwfHOlVsxqJ5i0qDBq8sNcub0/C7sHMtIKE6jWlOZuGTpoZzMuiX9dLp5enYxkdHi4Nl7GP+4d79923VXBKYR9sSbRUS+jElUWyfpfavXmnyYbL/Wk7rVR6VT2bh8gqJZBif1CEiYSOPBZGoUlxHGTeJoUS0FSDxMSeUoyeC7IGKIomlRYJAROgV4jJRA3VHBG3IYSkvsCUIhtni8FFfH+TY2KEJ4JIzY0BRAMgEanrEsBCTFVZGUJUQTWHREQqwEhggkQOKar5gtpGLl2+CNaBmrAQkJiSB9XJrG0ffPjKC8+9HIN74i3vuXPzxue98y1RJqsr64xu0Eektqh709RpE1fX1iKQ45q5A0zVYJ18OZ3H27feuPLAQy21ly7dd7A7HfT4woXNw8PD8xc267p/+fJZRPvmb/6GnZ1tUhRAQHVmZKqogOwA8bi+NgARQDpuNeQsiwTRCECVzBgMJSVfICYE9WDqy+jBNVObzw6+4c9/4zve+d5//s9+8uadg62NlYsPLs2O7Ef+9v/9NX/2ib/+nV/fzW1/ty3KwqBDTAgFUjRDSwUiIgugWla3LIiVBLagtyAgFb7rOjPs1cu/9Rt/+PDDZ648dGF0eITERKBRIQvoibKFL1rv3H3LTz/9qR//336R3dKVBy/dvPnqyqD/D/6rD26cv3Dj+j6n1BuWSR0hMwcVBjNXFDFGRIxd6PXq0M6BXPYIBgQwh6hkmVQBYELH9Z6YMSAjhhCQE+HKdNocjPfuWz89H8/ubl/rDcS0OJzshzgD8JBsOk+qSVXRYR4JihggtSE4V4hgiqJq5igZHs8MSYOaQaZwQcZ1OXKOJrvbRC7huCxLR6dDNF+QIULU0HYPXjn7279lTBhiBHZigAbOFTFGAymrsmnTZDpbX1pJlrz30TlRyY2OhQ0ZCiAd64M/pwmT35y03TOEAADym5Sn4sAAaoRgXozUkA1NGaFENY2u5gUoAhGDChgFkR7SnDqmysxMOEVhBUJzxilmHB0TZTQdLFyohQE5JQ6KVCC3qq2u1EvTDst6NKwems4n4vY7jUzoXKGlEXXU+jU6u9/uBWx7rggRJrFLxOWyC6mJMGjLQD6UaQksCSKBIwZAj8et3W42r2qnSWOToho66vXKVtCwqYwwCRbkJPmiKDSsFkVRUIpB1eaT0fXYSt3fKqtaUUPqgJQZ2RViCtCZcpSKywIIkwSHTERdFI9g0BE5FVcyzuP+bJ76vXVmp5iYyblSJam1DhxSYeiimdFqr1965wqoArQS+I1d3R0V+/tBdQCMSI1K6VwBWMxjo9ZhorZbrop6vY/BVwejw8HKgLm/u9ddPrOutN+mLqhE7KWuRUiePdZbo8ktmllR9vvOtfPxoH+KyVmMnuvkFAqMrFM0QCBTNNNGkpjjommnKSV2yEix7daHawzssJTUgViMAkAgeXM6EhWaSEW6GFTVOScp5NYhgJCBHgvaIVtILBrxix08ZnLEKaWkyTkixykkrBZUPAQlxNyodcQhRAYjcmTAYE3TnTlzWlMAAO9djNE5NqDCL6mlmJq3v+uxGKNqevjR8yJWwaYqVAMhXxW8AZDqqvLE09GdquccbkKHs9mer4a+GhSlP3t2y/d6h13LvlhbvZC6o1P95a1Ta5IWzlughibnz27FLkVTYmADAhRCQHPGUQIAMDMRgizsrVWVKFkmxECxEElQQkQmVoFMcieAmJwhVJ7mOt85vLl2avBj/80P/vzP/8pv/+Yzq/3N/jBdGG7++n/49DMfe+UHfvCb3/LWK9eu7jBVxGoWEUozAMzRnE3NoDNgWkSNN62aEDHEjqnoDfzd7f1nPv7c/+ebv6aq/F6wokJDY+9MERC9L0aTo6Wlwdb6xk//5C/94i/85sXLDzvff+3VV9/9zgc++IFvmkvcv3NYFxWWEJI6RLSYmD2TmcWUsgMGM8am9c6psWAyBARvefwOREpICw+/RXjPtwqiIktKhe/fvT3j0hO6o1EzmxwOhhuj8WwyOVQSMC8SVZwhmSJynknmLXFA8KGTPDhRU5Wk2abO0FQYC5FgaAikpuwIQEejw8Pda4h4MLm6tLJ2+fKmKqpFVQKAGOO5s5slVWRKRNlsB7LEXLSoypDMTJyj7E/LzM45jSEHazNzSPF4JZUIF4r+z51XLdaz3xQpqENSlZRiVEFvACASnQfLFBwjg0CQ0JJJLIcDKn3QJppl61tSKwwV+hATziM7j6Ujl6zXJdcSDRj9SUm5qBWIwUQIhdtOE3CaHcLX/qlv2b9dffSVf/6Wt7xz0z3o11Y+efVTU3zRKc/iKLLWtubY3nHpyz/yJ78r1d04i2CygtW487GJS75qmrkSu7JvzTzWCMaqigTEKbSpcFh4G9DpMDo0c+9++P1xOryzc+PmznNuZSZH8tYH35Eal6I6wCiGyiLMKr7oF6K1pKZtb2mcD1YvaNWPCR3VDgSwRaxVlGhMljQMXQmK1iVBFUZi8QCUULFQDVgVa/P2YDabriyvqyIwoIukYmaYAHpJJTGSc2DJhzlYr3PiFZe3Z+X1gylAo1TvT9NKJYOCABGp50khUQo256MmdLMWN5YGm8u9Zt5BUXcUtyf7504NtGldhNl8RiYCJVbISAO3MZI7s4P4xOlNonDYHQ18MQkdQH+IFFqQ1jmLSRNC0SWYaauOoWkBMIBjVbORmZROEqagICCdFAwuQJCylUZJqUuMNqs8TycJqUghmVlZ+ZxVKJCpZZYeIiIwGhIYIHkuYoiuLtgX0zgj8IyFaSyrnoEDUNJg6p2jvDCfkjpXAVpS9UhzU3bF+fMb8zQGdqwIykBGSiZG6AB4PJpD5pZDnr4mSAozT8Do1DO1ZjPRGCOVR+srCBhmh7NeLWXf171etdKMxiMPPrbQSkBXNrOWqGCgLjbE3owQDSEEIMJYuEpCFFMEL+qE0BHlsYGKMXtbrHsBgkcEy0AoQyISoCQJ7aQwB0QsPImpaupBDY6n43lo59/6LV/39icf++mf/pX9kfX7w3P3nz86HP/AD/3413/9e7/92//8dNqMDqZlkYwcMisEx5zx4GXZs6DHzyoI5A17EJESqlmU1cHyh/7Db2ikL3rfk81U0BUoBsaKEQkIYTZpLp67OO+2//O/+9989tnbb33kXaOj3f3JtW/7tq/+sq/54hu3bqKB8wQcELhgNkMDRyKGCRELRMtsFmRzlAAMDMwtWnMAAEqgSMaQoZcGQIzIxExqiKBW9gc3d49CbCtX3rp2NJk3gf3+PiTZC6KoZKohmpgSeQ/Qth2SC7LAH6UUmZGQM5BAJddVhmaEGCkSExl1Ye5LSjLfvXMnNnNCE5gi+F59BhAVgxoRIgGnaL2VsloaNNGQFVEgKWdRF7OZaYy9wk+blKEUQa0TUDEDIW8K4s0lVBHx3s/azshM3mQx5ciegcBghABM2VRLkyAkj0mlYjIqEyJEtqKqyBQpOVAzdAmMMIk2KAW5SRvK0iWPvgPpGalpUSKxsUO1og1dVVVH88RDFAmATKIURYuOLKIfEtUUp0Ne7mbtlQuXYS6zjgJV0KTOmlD2qgqubH11aZt3Dp59aOtKR/76zZeaWVv71afu/+Krb9xqqxtP3PcVH//47529uFm7szevv/LAlfuSekScxvCp559+7C0PSOzv7N64/7HL81GaNftf8ORX/PKHfyvwq0XDdQe9U2cA3SS9sbJSLeH6LMLFUxdJtUKrmGoiB8Zq3tFyWW4U1Vp0o93Dl+N03KcyBR9SzVR7VkZSJVU1i5JAlJN2ApayRW0yE82rOs4551wI7byZOq4lOTJf+goAmjRu28jMAALIXKD3XewCl/DKfrp6EI2tQ7o7mh6NmmvbKSY16LyPm1trvaW66jtfoMk8dLPt3b2DWVMMhrl8nMzC/pEt9Zc807DnCTvC1LXzruvqfo/QN1376t1DZm9NM2utdGVs5vOQZjE2SZIYkEN2MUjbhNhEXxSpCw4sSgLEZjQjxwu6S0xqklulYJwJWQ7JDLsudu3ChzPvKCEiGB0XvHg8CxK1tOhU4CIZUU0ickJVO1b1ogoZdEjJICAJkRp0agEpGQgjlKVfW1tJKSJZkkgOURwaIghiBAxIwugYe/dMpTCpxiAxaVJLKZPzbHd379bt2wdH47nI7mi8vXN3Ng2xNRJ0IMOKSjQIKVVVhylQOHN289Tp/uYWrazb+qn6wpnBxvJ6N21RtKTKgTlsWUSBFm13RFUFVDMhghDCPXZUKpIAzDl30oI/kUnkSwdoSKnuE5PdvH7j8v0Xf+zv/9A7Pu/i7t718VG3vLx8/8UHf/kXnv2uD/793d3t8/edE61imirMvHOgVhau8HWIkCwZGTpSNBFJGpMKMpgzJMee/+NvfeTylVNXHnpkPB77gjokduZVKUHXyeWHLnz6+U9+7wf/0c03Zk89/rbX3ngJXPNDf+evve9L3nn1jZsOS+cKIgJdFF65aU4EmD/9vE5LpqgGYiAGHQAheCQBTGCVQYnZPx3IiBVBMmeVPTkuHKcIbTtTsOmkmzXzrutSsiaO5900NygWU2sNJvMutYYpSauaUkqqhkgpYtdCVQ4ZPCICGjMaJsPkLEaIY4nV8poJ33z5VZ3sV9yo7fR57b61t+ps/OKzvzk5emPQq8SUPXWhWVnqLy/3Q9cwAVr+lP/TmVP+QLuuy89RiuqcQ0SJKb/JX+aQ8Hiyeu9wFY41kZrJlLro1SRTBYumkUQIkAsgUCBFTNAZCqI31AwjQgBW9ECl81ESOOooQonAkFLqumjBvHppySM5Xjg4EgIROcfMXBBHm7sibvSqXnAPX3jCtdWTZx4chotN02xuPYhBz7rlC+XGzu1PLK9s3TzY+72P/Zsrly9Q7DtXdaE5t3bhia2v+dUP/Vu/vLe6/tikDWcuXdmb6J3RtTuja9KMvujtX7XuLwyxfuziQ/GwKaBbropr195ouzsK4/6KaTkfTfcff+gJO+yfP/fYrEmuoC6NnagiCrEymzlEREAH7ApfgwwEb3fT171UNBgmaCgNPM6IKMVcxHWaHGJhOEtZXYqJDLJXNBgCELlS2ulsNvauJPKAxk6JXJKY2pjIMXM0RERHCOqmE3xtpxxJbZT257I/ShSxE9wfxa3KhzAtfVWWvqh8HX0IoZl2QWX3YL8JcXN1XZN2CXeO2mGJhXMqswKlTWBQJrN5oF69MpvNboznq3VZCu93Ya3oUwq7oxGCN+dUzMRC6to2MVCPy1nbOqYudglMAHtVf319XWMomGYS0QAMVEEFzBhRM4BelafzYILoUUSSMZ6YMp+sYEBe0TQBWRh/oiEROU4zOZ4pLVQBIkHV1DxxQVgAJQBUMYfkC06WFKw/qJdXem2z74lUzDGpmEFWWGhGlCAwZQLzse5AxLoYc+RkJpHI5CSmo/FoNEEAUIQpjmAGSRw47xxM58FV/dgxztKZcyvjye5v/Nav3by2s3Nnp52HfrVy9v7lJ558+Kl3vLNtYhfmqKQBmduo/kShnFJiRENgsKIocrUNmVWwcGYARgIAyJ16AwPKntaGCZEYPQK7yo2P9o3tr/21b3jq7Y/+1E/80sFO3Dq1funK5vgoff/f/D+/6Zs//1u/7euPDqrDw8NBHzRhSIGcQyh8WaSUTCMRFZU3sxgjketiu7S8cevW7ZdeuPPBD36daEwSCleWTCIupK5f47lTq//yp37+//m5p0+dP90ves89+9nP+/xLf/0D34jgtm8fVZUnQMo0x3xsLwQ7aiAITs0AgOnNbgMiihBgAkQwB3a81wZsIIZCREkWIkFAFYGCGYnUEmHtXBXjpCidWhFlWpaFGbdNQPAEnOk3rXQIHKMyOwAjQiBQiN75JI1oQEQwU2AVBqAUW18Wp4a9yeHO7eufrcoWQIXK4eASM8+aUQs0XHtosHrfpJPE0UOZulSWfnNtuL19fTDoqwGi5E09A0Vyi9M6QyIkeXbMDpQ9FwCzGCUVoqoqC5UBHr9OLBJPgvu9XRrvs31CEkMxRSBFEMwlrhGQWmsgaoWqUMm9Xm/WSMm+cEWWm4lBiUwWEMTIkElMsazMs0V1RI4woWkW2qMwYmiSK6rY1levHz1wiX774798YeW+M2630VvF8oMff+PXStdfHazdbUfK/Vu3X3j7W76iqNd29kfoJvdd2qqKVYnzvYMX3v++r3rp1WfE7oyn+weT/VOnzx7sNd71Gx4dtFerYv2NW7fWNgsRkthWNUIly6vLO+Or0zDbPbRT5zaORlffcumxwpZKREmdR+eUEmhSyxMJVkNVQASjynm35Px8vH00e6HHZ1nXjSM4JgZAU1DDqMk7VxL6TkJGNpUum52AaFaGFt5zSO14Olpe2sx+KUxV4at5M5mPdLC8JmjemLD2nl7ZXtqb9j32xqOdyb6l1hN2AezuLm1u9olAgoL3BubYFWXpcTDrxlHa8fhgPp2fO32/977r2lt34+mNPhN6B10XPDoxbtrZcm+lrAbT8fzm/vziUgUxjSbdUu0gUZKUNC+JOFHpJAmgKSKQWMiW1ElwHkLJrBbUYtc1KlFlwa8HoxijEINpTGbAzrGl5JwDJFHN6B8AFFmMgIDwpJ8IQJDvRkQROzZGwNyklmimDBCcLzPOLA/3EIWIGbjt4srKelnQbCrsHBHkPaOMmSRzlr2qUbNT1EIYAmYGISmYAiVvnNn/lqdqYEDEyILBEZt07FjNqrpo284XfGrD/dZv/fvf/tAfdvNaYm+wvMz98uru7hs3ut/4j8/ff+UPHn/8HbdvzDXZu97z4ONPXoKu+ZzcTRHJjJiAcgqGxxMFANHjVP34eV6cCnlugeo1AbMDCv1e0cW0fXvnLY8++N/+d3/rJ3/i5z/+xy+vLG8sLfcGw/v+7S98/OMfe+Fv/cB3XH7g/utv3PHEdd1rY0A3Tcnn1V9VzT+K2ZsYou8P3K996E9A6L2f//bZeFKWbGYOikl7ePr06STNj/7IP37203cuP3Bp3MD1G6//1f/sS77ky//U7t0mxVTVhXPOEqNpdo8xREJQU0Alw3xe4cJzw2BBr1QFh5jtIhkRzBIAmvqMh8pDTlArHHnvmHRYVEJqVFbFegpRrOxVq03TpLSsqmLqnAudiBKAFwFgNTX2jpFUJcbIzgMWYq0ZF0WdIhgKmKITkXa4siazdvvVF2bN9RD20S1X9TKhxcnk7uhub3nzvkvvIBpE6yC2fSqjCTOn0J06vfbcc0BGiIwmgBkKrwSLmjXvOBBijJGIjCiEQMDM7LgAC2YLVgfpwtoBkQDk3nI23xgLeoc5M5OkC6IHcM7ldTHUydYRgqAmhpI4fzfipCJK+bgBIGYgVlc6qslA/MC41jCLlSRCZFQgAlDnyBEjo0dBSlL3pjSnJX+tuf7StevlFr2486xjL3N4bRIL7q3WGyJj0Z2r28/shs/0ej1r65Wl4Xw+n6SdU/r4FKd/+Owbpzbv29vbv3vtZXKVNK13vXB0uNI/Pzh75ubh65q8chOn7elwSAODxraPbiyfvv+ZVz7isHng9KMv3HiuLKiZHR2EgWPyop2pVyM1b1ogZd/H6MFAl+qlYh7fGE1uD2LNPQhWOTahqKqgrVkwYUQSONGHOTIA1CRiqA5LdhWptF1ThZn33pI5VyCqI4zzcds5ggEgOq7GXXptt4oBWo7bczmKqIaFYIU6jzqZho1hBWoITi0SMagUVQ9QCAuP03GTrt58/fyZsxX7SXB42Jxeqw0ah5ZiUFJXlG2Thv3l8Xh8d5RqdEsV7E5mvljqFeW07VQFFJAhSEqiYBRBQFHA1IHMoviyVR3UFREaSNs29zLZDdSXZTvvCLGJOpsHor5DbKL4io+zUTu5L4koiRKA85wBtNlHTZLFKI48Ut5lJWKwYHnvhohFJCu1mRnNUlJybj6bX7605jwQIhkDqJh5XlRScMxaAgAzQXR0z+MBgFEFA7qeM+AU1TEVjr33huS9LzwhWq7liYqu65yNy8r/i3/+f/3uh187c/7x3qCezSb1Rn82m124cGEy3j7fe/uprQebaXXlocFjj53d2PRdNwNMBpzVPkRkhghkCghqi2Bnls2cCInIsgDtWNhwXHwAgzfAquaumbODFCMa94re0cGB4/7f/N7v+OhHnv65f/3bh0e6vFxevH/r6Gj8fX/zf/zmb/3qv/SXvvxw92h0dFT1K5PSzADITImyOEe8c4rqHCLoh3/z4089dfnc+c27tw6RwFDmafzglSsf//jH/vd//AtN6D/y1OOvv/ziylL9Y//gO86eP3f75iFY4bw65yQqE+HCPgTIFmpLymu6C1he9o2T7OCuAkgGVgIALpJWb2ZAHRgwYDJFVLDU7w0GfTfoV6dXT03DUVUPq2pj+87rK8sXCIYHR3csArA7PBqHLjKzmEQxRLJYikaEZBSQEiMasBky1knSvOmKwmlKiGzilwarhwd333jtk4xzhMR+wIgYpvPJLBS8deHR/tJF4N48TgvPDqvUoTpF4JT07LkNW4h+yMxOhtVEhCSZtCymKSWMkTyrkCb1xAudZW7QIQCA4LFc+Fg2c29/hhbcAjkuQyUL29kA1RCVzAEkRwACaMCYTLHnSwItq2qm09CFXl1oMhCNlZR98ENCp86bQ6udlmiQKucJOyQiZTJTYlOwVjrnNWlIEiAliN6bF3VGbChBG0dUsRed3jqa9Jbgj1/+lZkmHPDMBHkyn+4b97V2b+w8xwzK5QtvPFP32DNaSmrtdD7zhd09vH5r51bd8ylCglnh61vj1xLO0blXbrwi+mLdXwpqz7zxEev5OBaH7ububbfoAxIhKjIZABLk+R7p0Ei18BU/YuFm170OdLrsnyLNgtkEZoym1qFVDJhMohhE9UxIJiIKCugAHULpGJumQUQztKSGrXOlFJPZfL+HFTi2mu/swu1pEnKH03a/5c6SSTC1SiHI5GjMa0PfhtGg3mBGAkOkYLEua+3cyrCmYjyez16/+up9py6Uw81xo8U49avCMIvATAGi0dLyYFAWd2d4dxIqTyawO5qs9WoRy7dUEJ13bTABdMDUzjpwFGICY+frpm1Pb6wm08JXluGixmYipgIm5suyNNVmPA2SHFmIERgXW9SwELbnbDTz6gyzkIZM3/SaEBFPBQCoWsZuqCYAVSHHFULKbkJmwICqVlQuBltfX2cEs7xKBp4ZjAEESRQTggE5NEL0cI+s+6ShnUxDl6rSA4H3vlcWS0tL3vtevw+0AqrAEcCSaNccVeXqz//MT+8dxEcefeLwQM0mK32oMA03N+7s7JK4lRX/xrWn77907pv+0l++dfNa2zCdNFiOH0vVN3cOc+ENACAKQGCoujA11cVi0/HhhGCJgKNZ64qE5sjVZpY01H6QJNy8duudn/e2Rx555J//X7/44gs3NreWloarS8P1n/2Z33n6o5/+2//Ft933wOaNawdl1VeNJoZAtFj0NzCKIQ6Wq9dfvfv6Kzf/1g//BQNFrkKUqo/nzp35l//qZ37u5z5y5szl/pr/5Cee+/z3PPSBD3xjSPHO7b2qqlzpU5cAhDiYEEL23stLW5RjHBllDD4CG2QgV4bVgJmzxQw51ysGIHlUw8xusdYApeeCcW2p57xngaq3riF5r1XRn08CWudKP2tmIuJcZWJE4iCk1CFTxSWYj5FSJkFQJCRQj2hl5fIihQks9VdffenVW7ef3txaSanU5NeG/b39O4di5y4+Vq5e8K6XOlGLDF5joWhWJjJKSZPGjc3lNsaU22xEprlVAmaCBmqKiCKKps6TGYfjQdSsbWQ4OM45Tsbd95oDwL3/cZEt0WINSpNZMjICURImbw5dXrdMKYd7A8XKsfd+MksJIGlqu+hqFJGy6IrKfCHonJEJQrTIxVChIAymSUEBSAyUJYEROcHCF+vSpD5bo4Edu1kbiTwKWl+wSxjEirpGSH5PDosCFJwhR5mzA4DGa90b8myqSHF5adVsbskUxLnS1SkGJCxdoUoz9v3QOEWv3DnuiZKvoFfAvGsAwGoA5hAFuCSqHAgAEpIBK4Cg4mItW6rE6Hxh0RDL4fD0ZHp9Pr+NTITrxKyqBE6BRAUp0iI6REgMQKgmeUXBgLkw6xWlC6FpO2HmJK2xOC2Je7GZNk3jyqql8tq+dK7XiB6MWp5hL2knqVUTDGTdfDp0ZSUhgiixqQpxgSLgHAiYSL9aclx4KO7s766gG/bqg4kS1QgEDKYaQyLu2qbo9Zf9dH8S4+HcSl+NmrbvfAAVJIe5P2AKZGIxdVyWXTfTThBLMO4VtQOdN23Zq5tWY0BGLynlxDxCkmRF6RQhmnkEYlamTCDIko97sw+iLLKW7D5AgIzEBDmNxWM7ITNRS6rKnJgEUJgx/1DL3RsmM9za3EgyJ0uIyoSIi/1rQAZIhoQLbIsy5I1RNMs79/m0sRhjv1e5wovFedfW1vNIMTZ+sAnmATFqy577fT+7/fzm8uoXPPHUH3/qpTQsdg9iN6H59KjycwxBXHr15ZshhO60jPa0m6WiAMJ+hA4XQJ0cwAxAEcH02IUBgMkholg2xrOsn4GT1fMMiCcjgiiRyGmC7Gtclj6EWHrnPe/t7PrS/dDf/s6P/uFz//pf/pLjuj/gi5dPHe7HD37n//SB7/pzX/O177tze5wDhi99jDHFxOwMVSwNhis/+69/01f27ve+/eBgGg3Wt7aihR/5kf/++Rd2Lj344HQ6vXv19W/9li/72j/35Xd27kpIKysrbRtSmHkPmmqCCv3ItADQhVFlxiBr/pPNIEr2MFnouXERzTLd17wZAEZAUF1cECInqkTkGAm08Nykrih7AL3x9MZweRgTzsMUmAy5aaddSEmdKty9s33j5huegYqDzc3L/fpMv7+ZtDCQ3CFEmBdcKjABSWwdxU8887vSzc6cqj0uUeWS7N+4+fpw9cLFB54QX0LjJLYqnSPPyEkaMmPvsp1Sl2K9XBJzUiEwJABkzLLF46lqnjYhgXPUtCGEwMeTcy68NS1Qnvfk7NPuNVs/6dSdPE0EkPetxDSqKHqBY1uIBS2OVVN2JRTomtCY6Ww2oz548rPZbGW9JMeFZ8CoBpnvayidJPSunWtKScQ0P48I7Bm9QyQCDXHeRXbcC+1copTeoaG0UlQz4PrgKD1wdr0bQdTbzKUnL+im0+mqW1peutiEbjR6rfXGrt8FCXI47G9Muz3viqjIQaqy9r6OCcA1puKpBosDHAL0UjkVDTHYcl3GGE2WweJwqYhdqHulIxuIqkGXJIAAGBMWqGQEgp1qWbI3E+SN4VI1PXh5Pt9mHJT9/qIBCC5pQGoLc4uyCAx1YUYuBmgJrESqQNk77Lq2KLxBNC1FjHBAyF3XOXBt5+8eUr+p95tpp0hFkVIExRr03FpvmqrJqGu7VBCaQMFFIk0RvK/atnGeCdHElW6Iyx7L2f7+XQkrvLJ6OGnqEkxjiIq+lyzNZrN6qao8jZo07ugUS7bNImAFFMWQYhcFkQnJWZimSABsEIBilK/8ovebpiY4X/P+4Xg0ni0NlhSYiMw6Ihe1izGGGJmZiFLogKvc2M0eWDkxR8STJb28MElAiDkCLDQhi+zeyPIuK4h3JQCmpEQOgRGBsnewCSKurKzEGIiBkJitC4EKQHQIDMBgoCaECdFA+d4kiIhQDRAIsWkaACs9KsG8nQGkmksJd6rytEr2g8JwsNseHV45daad3r7/vvVPvnhruNSPSSWFg6YbLp/2MOGSuC1j0rIuDT1QmbRDYgQ4njkn4gysMGZnxyWLZo0HmqmhI0RddKABEoiZKUBBLgkxDsCAWfM+QdcFRyzWIbuyGKjZrTuvPvXO0w89+sGf+Ke/9sKLr6+vr6+s95ZWzv4f/+vPf/Qjz3zv93/L2vqZ7e1t6QQdu7JQ1SSp6tVtgD/4/aff897H1la3rl2/c/9D5z/xzKf+53/44wQPXLny1p071+q+/J0f/faHH3ngxvVbBQ183bVt69kBFqbIJIiq0meX8rZkzlXBMEcG44VXV47qanrcsYloDhENExiZeYMc7r2IAFFKqSrYOcesrnBtq1VvWQIYmXfFbI5JoxrMO+s6RmaJCRHXVzanR5PQNhurpx1Wd27f7Q3G4ATR5vM2hBQmEwGZzEb9ge/aWZjPeq5YX15y5Smw2f7+btPCxQffu3Xmvvks0NxDEWJCdMuCJBrIESmmQAqNK3xKoah8fzgARDWlnNQcZyoAkGGcee0o9yrJMYHlUX9e7LonfCsAGeTU/83XySazHesjEUlVRC0vRGdbDcHkwBFWABMAUHRm88xr8Y48ezOLqYuSsPSBtGCI2lEydlAysBqnpOKRHZEAkQqKZFMXSGAQsYAaWWbtdL23yrB+eGgF76wsXZ5308ns4EK19e5TX/Xs6PXXw533XH7HsHrq6Rd+1cv8bVe+OMztWnPVu0HJTdMAh7jSv3hwtNMbLIdAKU4ubb1tZXX1mWc/2l86JcFSnDHocm/5yUe+6MNP/55bli6FEt14Z/aOt77/aH9wY/+jRQmAqRlPndo+Y52iBxOmoFipMDIbClrNVigBQQFKgNRff+vR4afb6V3ELa6zeW9Cp6VUEZE8yVzFHPdQVUQEsOuwjHrIMiS/ga42a7rUIMyDzGs3bEW4GlCSefDXj2iM1ZR0OoOg5gDMUdtNn7p4kd3+1ZuzO4fFlTGeWTPF4Gw5Seghd2plWZlZJhWqWkHVWr9XyZ2dowPxVR/rZcCKeoZJpQMT5UFo/bA3HI+nh01k5hXPil4lIqkRhS55oyQpISZwBUgX1MA7XyWY/ftf//BX/OBfXR5GqoWh6w0GCWM7TsH55PoOrORCYN7MWgvEA27MCjEjBCNTM3uzR59SWuwtsUdTUfEFA2KIEkXy6geAgseAJgAmig4BIG/25zUoE0E0py4aFGtFADOoDTWKePag0SApLigcaEDKGVxKRHyyIKq59QwiYkBliRKEC9e2wMglYQnjaFtusIV8d779vJ+267112ID1w+l9IY1n7evX9kpYMVwufJe6wy5abzjYHx+sPb6i0Ch2URY2gYaAnPv+qAYAbICUF2dQkdGARAWMkQgtawQhASIyAGebK4WM0jop20kAfFGCJbZSREpnaoYwHB0queL7/843fvi3n//lX/w9S25puXfhgcsvvzH+ju/8nz7wga//uq9737XtO+EABv2ywRlDMVxd+eynPnn92uEHvuMvIcUzl9d+8qd/4d/9/G9ePvMkVnTt2kuPPHbmu7/7r6jqzTf2y2KgmkDROwQQw5xxsoo6RBHHx8wvRQU0XlDR4digBTO/N08aAXwCSyAExKBkIjlRBSHHhkAOy4LB0nA4jEHRCN06zGZoSa2WcBhg5rWfQoiC0TRRkgjTphms9pmoE5m3s9Hs7v7BYVm0moJpoamct3t13XMJm0OPVAz655ynO6PJlty8e3DgqtW3vfsLCXrT0RTIqwGJR1PQFgAcISIKgXEAUgtGSTcGW7Wvk04cEakXBjUk8ADGjFEjO+q6GIs6pIaZLY/to6AoMydCTGBmzjAhqy78QvQetsyJpNg5F0JQ5zUBAibEBAiq0VTIoYAnCTpJ4NQltIARPReDyidTBGRGAfDqIAXfzbiHBWDSiK5IBp2gc21eBQCnXVAHRAaexTtwSa1g5QAgStTNwhe/5UuutvOLZ7jFYtpo091e6a33sC51GXnplaufeOzhTaVh4lsBJqjn+7x+4cKDd29d658abKw/evWVO4PeC49eedLihY984t84F2Def+/DX7q5duHmre396dW3vPUdr752c3zQPHrp8+/s3bqwWTTjvUc+7+398vxo59MPnX1qZfnMSy+9dO7+TToOiwjIioDHPV8iR+hyWxCRERxhgVwtLT3c6DRISJERkdA0ZJGVefLMHgBU2LAQhSSgKUhMKWkMAOYJSwDXRdVk3TzFKF0bTTBBb3cauwSTrklU1lUl3MyCnFrf8FV7Z6zzWIcETQsVV21jDYg6jQ5KIKdAYmQA7IDJGAVhuHJqbXPl8Givm4ckGFLs2mgCKpRSClEMvS+KEHUezJU9BcvWZVk4kQloogBgqhpCEMM8dgeApDI+nHfzzpE5TI5Tv3bOzGJA5JSSCoaoxxk6aCb8Hb/wHoJKHgedsEpOEhYzk7SYUTvniqI42d87qUaP/1UBQGOqvVuuehglh8wTxeFx73JB6lAEWCimSY7HEQKimBFVoEDjycwAoqQ2hi62IXVNS76c6eyF9uaLMGvM+4ag6vfPbm5uDOq1Yb06HBi0iY5WTjP6lgsyS0DxS97/BaPRPqIVHs06FqO8JsNovLCu5qSWPatz60qNIFuyZxgAI3A+xxDUTMwEMC14a8ZgDJbJJgkAmDGvfRqIc+w9I9nOjfS+9z31d37s27bOLV+7viPGW6dWT58+/eP/+Ff+7g/9Qwdw/v71kaTkB+O2rcvBf/zIczzA93/du1NK//V/8b/86s/+3sMPvnMG4zeufvrr//wXf/8P/I35rN3fO+r1PcC88IvPMeek+YIfJ575Y7XjN9nZVU9ugDc/UEIz60wAoADKisgOM7spF2eL3lVROtXkvVc1LksQTDrq9YvY2ryZMLgQ2rYJpKZB2nmHHgdrKwej2R89/akXn/7Y6OanVvyd5aKhRirxGBq20dqSrzD2inap34DtxnhXZS4pvHbz4OzFt77nC78qGE3CjKrCWLnUY83+YnSUnxrIRzc5RESHzlOMMfed1NJCyP+5sgICZHSOsxfAggR5/K3evNvvuXT/PwTvi6/53J4nIhIAxUimHomBNZkkVUEFl6UBzjk06LrOEXddx+iUa7JCiQsDn0QRHCB4n2YJIzolD8CAZIBGqJblbi7FXkd1q4/e99b97d2NTZu4pY8+92Gs75678LaPPvfxq5PnqU8MSQxrRMZGQUsqoWwPw/VBr5x0o/XVi9dfe261P3vHw1822fGj9o4fnolqguO11eFnPv306a367Y9/wR9/7DOvb3+415ONwZaNeYX6T97/xTdevvvyq79x4ez6qaVL3fTgrQ9dKXWJRDRHEDvZCAA1Q7QCjA2dAgMSsiMsCeq6d6Fa2Zw2BykIAIsED5xM1VLelVDVlBxTz5CSgkqUGCWkrgkqSFgh9URKEUtBQqex6zBVszTY61i00NYhdEPzrOQwXhj2IaTdEWDLJcCkk+QgAnYMhOjElNiYgAmYHJIn9sgOMBEsLa9vrg52tm+NxqlTVNbQcRJKIiFENSrrKqhNG4WyWNxPCMkgLUByZIb54mTehap2sTUEIvKMCOCRCQ1NCo+ls15JqprUAH0XF8pIRGT3Obfp5964x72RkzdmgIpvxnpGAEuWx4xEhAhEQAzAkHnpiiBgtS+W69pCZCJFOC5hKSOoFsDbk/iObAAGZEiG2WtUDUHze8JJ0yp658suddPZke8V+6+9cPDSCzaPVb2qQo4L8EsbGxunN1fuP7N6en047Puq9qBUu41er2eo/UH1K//+362vb+YzjyAJoyI4hTJBEc0pKIC6haKfLM+WF0V8rsczMIuBOTfbM8NVs5uSIEaiRKgETFAg5h1QRDQC9IzeEYNVFeztjkpXfO/3/+Vv/ItfdHi4f3gwdd4uPbT58quT7/rO//F3fvvDl86urhTlk297Z7F07vd/8zPf9he/YrJ/8K3f/MO3truH3vrk3TvbGA7/7t/9rq/+mvdvb28HScPhUBIQEft7PfDyEAWIIFuCHG8Y3OttDZRVfsf3A+Qz3haLl/bmxhNnaJchKOTPR+vCE0FZlgCAvtd1SWnimabjeVmxp6rrYrJ5p60gLK2sarLf+A//4Y/+8I9SxMSphaZNMUS1BI7Qq1ZUhdhX9L4sjN20gXko5xHnUS499MTlh9+2uzc39Mg+KYBRkrAQ7L+ZrLj87Bs6RBY156g/rFUV2BQMcaFWICKihRiGmSEKiDIgAYNaPhrl2Db2ZCXV/v/E9Ddb+McBfRHCVMnAzLyhBwI1QkRBBg/EYoRMRLkHmEJonXP5p7MkXAygMYKPHYc5WUsRyMhHU2FKBAHBPAfEQA1yQ4VOjecMbSe9vj9stg+mLw76tcrkhRd/9Qu+4KnV/ttiE8ugMWmSCWMLCgezw9uHL+5Pr027u29761c2LR3NRqfPP3L19vX+ph5O9+fp9fnkyHt/OE3BFROdvrH9ylNPvn1r8K5ZJ3f2Pv2Otz28Obi0d3D9zPn1c6ffcXi0Y2bTEbzw/MuDZXV6jOYDIFOlbFBNTgFp4YHCOTooEKAz4OHSI7H9dOj2i/KsWWTEoImSIRaIqJAkEVFNjlXMzJIoaEg6L1LtfIHcQ78yn94qJcxUimTe415LLdQqnJDMhRikC3Gpz0VlNw+6aUKyriihaaPTbsCORRlcgjyFYnQMooCGAoxqiEFSgW5jZbmddzsHB1ys9UpCJEwGhURJFMmVDohjMjUDUiBCMJGUDMRQgcySgYgoECOxpsUj55xjU7MoIkxVFFQxkUiU8Y3QxtR2yZelI0pd/E8y7hP15Em6l/+X5pZNVvLmZ4G9aJRkiVMuQrP43bI98iIQkKoqIXtf+sJUCVmQRUNmVCKePBKaSQcAuSGJCIpZWLggDRqAphSQrCA3m817wJXHIOl3fuWXz25tbq6uawJCH7kjBBcVektrW1vrR4dnJiNzxbTjg73WK0/FfFFsbi2/9NJrn/rkZy9ePBe6joDIUFENoDMFBEZ0iGAqeBIW8QTJC6hqkhG4OTiimh23lwAkLz3lZjUsDH2ciCCyI04AoMaARGyYEH2MenQ0/tKv/NNPPvWWn/yJn3v95eunN7ZOn12Zp/V/+L/94ic/9fKf/zNfEfbufvIznzC5M99tvue7/tHq6cu9evDSq8+/860XPvDXv7uzcOv67arXE40KhFA6BolzYswzqJyMU/aTFVvgE96sn3L5BQuK/T3xPfMonIISZj4yGjCiGEV8c0KCaExWFY6ZQ9LK9dSSGGpCgxRjFCu6CCl6LntNSC9+5vmrn/1Un+JjV4ZNalLAslrb3d/3xMOB7ziU/bWyXO9Ha5uRSjsdz7w6acPSyponWVlZmc9C4QoTZYOCe13sHKHhPSIWPDmP0cwBgSREZ4NBL93M3tZiZqCasWiLP+R4FAGZ3Q4gx2pREfG0mOHpm7A9O/mJxz9r8TwSERmoKpMLuQjWfLp+TuZkZojG+ZoSlmUZY6QyE/nNM2sUY2kM+w2KERGYUttXTwriGNiBiYFZAoGiwAqRdVnJJ1Voy7r2z9z6uKdPqjiC5Cq78zq6Qu6+9mI3qbkX4zA44Y+89tuO66rnP3n7aeeKYlmevfmHy/TSwXTb9dd+59lfPjy4vT5eLeuhcRil0dH1u8RDAdh94/mQZqujs96feeHWy9IdjLuboIPD6fXByvJKfWln59rVo09srl48Orz6iVdfzZO6BIsnSY9XwhwCKaohGJAiKQIQGXpVRVteX38g2jTEKcEgWZcw5gwrG6AESWIE5IJ0ppxUknadTNs4jhLFHPsVRZxLG1LXxjRp8WjmhMsYxcxKN4zQxtBsDfpiYWeeIHogEbB5J4IEGCEJiEMu/XHejshGLIyRMTksfQ91TlpcOHvRV3HvaNSmfoCg+WYRya7peeZJfKzP08zGU80qDUUAiJLA0BZBBx0jEyZLoinvXyhSiGKGqmQanIckIpInoqQKRM7ueeE9LzsWieA9bgxmC/ljrnYdkssx/ThbyYZwi5ciAidQLpkcnnx/0MV65AKQndFNoMdsYT1pWC+yY3Om7JmQJJPLVGE+b3u9pY89/cd7t3ZT1Gmcdjieh0MGM01FGZOFtbWN86fP3Xduoy5VuobQoUpVOoNompaXlweDASJbMgBKx0BFPO5WKEDSfKcBIB+XEZD/CxEg2bF/fWYwgIgRCzEgeIQarABzZmIQgDCLM3LJApAd/dBRwZTqigsudrZ3i9J++If/xjf9xa89mI33dqYVxbc8+tgv/+LHPvPxT1596dfeeOZDH/7d/+ONa39Qrmy5avX2Gy99w1e954d++K8exK6Za79fx25ORmjmvYAFR/18PRehB968wvkoujfZzIkUIso9qeji7GeSHK3Q5V/e8oHuCsiqf7PMCa6qUlURPZkPzUGvf6qZJLQ2AU9nrYJbHqzdvn79Q//+F5979o+WhrC+iksDWR7i6pKPbVMXvvAmsWMgZk5xOp7vicJgsLW+vnnffStlOblz9/WLF+5f39gKISCiqTri0LS5VLwXoZFJ1JBX7WRRASNKv1elILlbggsiqpyk54vjgQkWPAnMfSdFENOTQvbk8EODe5Ohe6/n4muO6ap5Hy3/32iqjEooCElCkgAoOb8vy3Lx+CBmgUNK2s2Lds5dQxJNkhEasbha5qGLKsB5tdDMwDE4hsYOMLW104QByW8OyqEnJIeIDgpXUVn6GIhraZOgspA1CYiAANmBL9CERMNOutWVsDubH8U7y6fqDnR7dNMNWGCJ/IAYHEtdukE9nDZ7E/sslJNq1U/oYMxXcbVo3GQnPk2DefLw6s4ntTdvTF0OdERgkstCBTAwIkZFyOQLNWRCIkLwaCSWSne+P7w1newt16tCAAgqgJwvnIkkEXR+YJjZraggaF2KDVPFUIGrXLk8md0BcDFhwqq1nqlL0rEBAx7GWDve6Lvd+XTW+ApNqIqhc4wBfQJdIkZ0imCUN2INiRiA0BSSiTAYUAlACHru9ObVGweT2XxpmSWZJSA0kUQCBFhX7AhFiLP9myrYcXdPk4jk6BwUMrabGUvHSZiQMy9esxTZlZo0xSb7ponmDF1NkZlNMQsD7u3M5FvKzCRZdslcWErmtjJkrwsgzmL2vCT7ZmWqupBgMHNIqSj7VLLFfDa5fBjYCaEGFq0PxAxoXKyWnMSe/IulFJi9maSUwFxRFbPZDBFnEO7cPSJXr2+CxG5YFFBVQSY9gtTR8vL66uhoebC/vXuIDOVSub7WMxxMZyGJnTmzeTQ6LIoCVIWRFw5f+aeCEgohAy+WrRZhEY9/51x6UA7+i1/XFsDknPIi5kw/1zFIudA8yfIAzTRJqorBvOmKMs9zeW9v50u/7PMefOzKz/7UL033p+C6P/We4V/+5ocPDldOnV9a1p1vet+jv/jhyb5sf+/3fdOTT73j+rXbkcPAL6XUVXXhGWPsAEEVyclJpMul7yJPzFf7+K/iHH7UEFD5+CZb9BYQCJkwcSITFhYARUgICOYEElr+ToVnAuv1egrkixJiMplCXCJ0SRqDnmcchdFHP/w7ezu31gsIQ6AiHMbZkutTKpIfSSIuawupcF5aubt7t6zqwda5YX8wPjgMqU0hgPP3nbuycfrSvAN0hSIDQRRxNacUFjXeQqR7fD/nv3GRI6OB9HoVmiOilAJidnLPHEfIyncjBNAECbFUhIxFA8xFs+U0DIHN0qLCBDjp9cOJygsREZU0f7Vp5rPnXJ+BWA0Fjts7IkCiFrznoigA3sQFW1aBR3PBpLLCq3NqzKUBoROWAGpsKRk4EABzqhhrqdDVjTRdHIP5SQhFtF6wNPQpkHeUooqJ8Iy8c12VijAoqpCkKFyZzCJi4dlMlMnPqZpqN9wfTQua1nVPU1v6GqGMMq56TiQYWGq73qASNeAGyHeBQWds5N1ArUUqB/21ZtZFhTdHOpSdGxcFIhnxwrPYHNhCl2BIQKUj7pIbDB8gcCmOCD0biYhqzJMiVY2RPS8R+hyAzFBEY9ulECVGS+KLZVFKHcRYjkMRtE7RghIVFCw2UVdW+1rZ3XkSLcBZR1a57rFTXAZlY08kBAkMkJEcsCNyBOyQCqACCFCZqqQRIK3017fWliaTbZNKEboEUTSk2AZwrqh7Pjs25AkJyOfYaKSUwAjJQd6PT6H0zjuneSKT0mJnEtFMRaLDArJUSBTAGICZo4R7s/V7Huk3ezWyKHMXjCSyk5Qk9xMF1Ba5yT3B+CT15mSesrOzIgCKuoV1JACdmBsQGhzn7LkXzyed4jz0I/OIbIa+LBSkjS0RbG/fLopiZ3TUtDFNYq2U0JJqwbUBGepwbXDqzKn7L2xcOF+cOmsXHxlampTOEOyd73wHkhp0KUUVKJHZQAESQSIQMzSogQny0QxosPA1zRxzc6ZsuUuGnK2LiMBwMS0AMiABQiAGdABKnK+aZkQXIRCCd0VKyTFakgLZrKsrv71998xa/T3f+82PPnF692h05vyDr7+2q/TO93/1//4HH7lWp+GFzebLv+iRU6cu3jzcJaIlXjUWx5UlVIkIyRKVdQ9IAJQolw1vugURUX6UjicHRHmImD9AOkEMvUlGy6Ng1dy7yhlG5qgbAGgS7733PgcmV/ZjmhUld6mZN2PvPZh/8TPP/cav//J0Pl5eqkvSXlGaYjXog5OylB7zsK5T0zEXQa3RZvnU2vD01lJVjHdupmbfurYZdw6W7z//oIiC+cUmrSIiRglAhs7RYrFO8wgqdwtV1RHmGlFVe1Xl0DOSmWV0Wr4qCqZgJ9PR3KXk4ySEgGkx9v+cQZSqHvvZwPF1g3sfqM9JesxyYUTwpi0fAudQl1fnyrK+t5JWJAWi2rgE77RA8CIuo5gqqhB9skKRIrISKThARpLkfLJCCo2ghg5psHIh9jdS06DNgOYptjXh+aUnsVmWbhq6iAXOQpg0LXgUil2KMwtewwqdr23F2/StDz5+6dQ7OPZrdF2aoOuSb0dxClWvlbCxeYHiOqSIWpAlp6kG7gFi9FFMtO3mYJEcB7J7htf5/UIukrkigArHFeKiBwCmJbISb60OzooeEFYWSMEMIpIZoapKAsYBgFsMXZJJsNgljSmFLsauoD6zl2CiRStFEAfmgEvPysljwpWBhqhdwz0WE8NuvlnHJ+6vSTr0jGYoqTZiMacLdQEAiIIRAxOCRzZiZqolhTNbK/26HB1NBSEpRtA2htAJk1tUuLaAXOerkW0NQOQ4cC88eQGgcB5NmVQlaTRTZxlMAcm7hMimuFANaVJL7LKwW++9FxeBnDmHgJywCyw+i9xwPwkTjMR4PIriRVJzz3cjMyuB+JhS4tDlxqa8OcQjMMqfRTaLFuPFXIHQCDOYMFlAKkUJycUYk6n3/OIrL7ahTdoj6fr1VHhnd3KnbeexjWIsXGKvgBI3ttYvnD/70KULD1w4N6yWTm2tn9pc75X1+973hdPp1HtPxOwrwRzQLZ/BaCYaoyUCzn10RiNQXmwpIQACoNqCgWsmBhFMiBOiITiw0qBUIwVRbPPTnTUrx+S1HHYr1eS8Zbsgj4UZlGUxEdnZObhvo/rPvvXP3r2z/a//7R/cmS/j1v0fer77o9fn1anBmQsrN9642vMlUS9SNGUkKYrCpGLree9jjCr+3g83p5MMSHSMU3hz9Jc7lyafW4Qt4osqqRmCOjJHzpAFmAi9O/kCT+zeXON1TZg75+fdONq831965k8+9eorL1+6eHp53amfD9aXTp++v/JrA79cuxIhAlWqoayhYCK15V7lYGbt0d7RdYE4Gs+mc+Ni+dEn3gXl8jQyGzCAA/BIoOSICVgDZYvwE8HPyeRTNaEtbDQY3cK+CIAsj8T5c7IcwlyVLlIZI1BDtbxOgZ/7Onlq8HNtVOFNtvu96Q5AtkxKYqK8MN6i3BsEc23bZvak94URZutBEfUihuDBmUfx5NGEg8dZDCJi7B0QR4NWIJBBSepC50fRu8lMJREEeefFP70xuf/yylNnh1/ZtUvDavOtZ77w0dX3l93Kw+efOk33x1F4YOP+i9UVmvM6r5zm89D59ZX1S6cf63aPvuwtX3ueHnrs3CNPPPBVKudXyw2d2sPrT75t80vrcblJG+9+8KtX/AXvBqXb6uZ1xfVm776aNlRn/cEqU9+XTT2IzOYMEiFKdAZeaQwJHXkFEcIKHACriRkJoyE6ATJzziU1EcbBeuj2KLYEBpzQBkkbg8CuCmGK2C/L4bS5A+AFu2CJ0lKdTHGssKKuxmolTXei9sZu0LnYNkYeOfqJjrwP/fL0/iwkKBURuevMrmxSaUdNGpgfmqJnTtgROWLK4hIiJgORCGZAbKaO2czAvCjdd+HSS6++XKbTQgTSaUoRAUX7HgwCESiCMQlBMijQx9iYKpsDbTosyQt05FLqcZ8ZQUi7cfIu+k4SWqGxVdVKKBmZaKcqCJVRSikWWMkCzbZA/h6HnsWNmDQSg6fsBumT5YY5ioJzlYKJAaAnCicJjqmSkQOKoEowg3a9LJCE0AkC+E6JnTKCkiKSLoZOeQBmyIAGgJDthAkxY15KFHFMEpUUxWSwvPTR3/+9yW6T0rX3PPHE7q395miyvLHGMCiH6uq+LxxgIbxUlnjhsvNL56o9KSfBaTWd7i2vzVBGhUBUh4QmEYgQycAWcx4+fuCtYwRFOnFVRUIGShABgOmkCcOmZGa6YLOAmSBA5tSaEWIGhwNxIYuAk2uj6JwjUY8kaM5xMnNcqnabK715kR5/av2Xf5MvWP8Tv/ZPn/3Nf/X4Gv2rT9z67q/8swc33ti6dJaCQIGCWKBZQhFxrKoBDJgLVXWAeZkSCQAUURVQBB2RqCAYk899AyUVSgj+JG/I4z8QAC0MjbLQMxdaZGBqZowFpVCUBEwe0QGac8mi01GKwziVqhy+8eru0c7exSsrB4d3qmqlLM9vbFz+9Q/9xwv3D2tCigigFEsmFh35qi0q1zSpaWbrGx6m7uDoqB4MW/AXHnuqWN5q57EkFhMTAEJAZQIVNDTiqLiYYZoBGohCzn4zAJItLUzj2RIkRDRHZtal4JwjdP8vWX8WZNuVngdi/7DW2nufITNv5r24A3ABFIYq1EDWwGKxWBKLpCiKaolSdKu7JcvNkBQtq8Nvdoef/ORwhB3h8LPDD+3uCI8SW+pWa6AkqkuUKJFVJbIG1jygABTme3GHHM85e++1/sEPa5+TCegEIiNvZuIMe/jXv77/GzyXQBgrt5hxijYFRWZxC4xoDoYE0yDZEaDaI23ds+suZwf9k6A6cCdKNigKTuRih0BOAh4Cq/WKm4CZAVNo2pCzGSOqatelKEpEA+ucyIIrIzNmyJ0hMMXEg26wsHNhDwvMMyUnidC47Pf50YvX77xwcPubq2883ry7PFzcPfwswvGef/Fg/0mTs/Xqp59+7rM65P3byzwMqZ2F/bJnz3/4w7/41R/9vusbt+d33rj33nw2mzUHv//lP4wHj37pk3/+F5/6/Pfv//AjTy334829br+51h5cv6G9Nbz4ubu/HsLs1eHHt27fXLa33nj7+y8980kdn/6jt/8nx5la7yAkQEBoUJAkhEBOJuKuUYLBrGggIgUF4egRuCC6ujgwAKd0NJvd6Ps1JVcx0WxWF0nLOReB0Mwm8AvArCIhfS6D5LGMOfA8C2SJSNEgEIVArAA26iGnTlH64myA2axEH+/ewMEloycaEbEAB18yCIOwC8gIXhinfo0cQC0QBkZwYcbZrDs8OthszhF9yFmdNkNNLIq78cuuAakyYyIa88oUO172fQYi9TF1kblRBAWHolGo9RCcmRkD0JbxdcVQrLrI7nJBL/eV7g5QL9M4DYvMh2HI/TB1+g4mdVpl1bcLwZg5ELu7q9QwB1dxq8FjYcfnwVo0ITmwezRngwQeAKND8G3SDfBujTFiGIM4a4ptOz88WCz17PGj+/d+8NbjWTp8/UdvbS7s4Zk8Pt/0q4cdjKgqDpD2eHld036aP7138PRydrBsrnE3t9jeff6F0b03ocRKqqQAumW5GMDE1XJHdVbnncQcwNxVvMB/8MAtun21YdyhVdMxRayDVGaeEAMAIPRAzgGZvK4W5DEuB9MPf/y5d1//ym/91V/5nX/98v/89dXvvfL2//X/9m9+5sUXo59dDMezw7lhgKF0uobJPAArm89gG1yNE2qwzWSt/AIECgCw45vW7VtV3lcaANWwWZoIWRUeqF2tV9yMGJFFiphG4hSg65pBJKS29JvYzFVolJWDHJ/c6/bX6/7erTvPHNx+Fpr41tuv/dynPhGdS0bqnhB6inHYDCcQ4vnIr79T3r4HbXf35EQfP368d/2JleBLn/j5m9fv9uebJkUDC5wqHaDOexARHOvIwxAqrktETESITIQcCQMA1HwlgCrOCldTZ6dvECqpYbcp2f1qt+zVUa2Z7U791r9zh2hNPzea/p4Mk2Hr1DimaiZOV3dIVbVDZhYCMVLiwIA553o7Rg7o4Cr1RE+TXjRqmFOKjKSOaqog6BYhS8GghGWR0o1FQys7pNmNODs+efgnr375mQ895RK+9v0/3ts/vLF84ds/etn93oef/NnXX3vlnUc/eObpJ9958PrJ+ErTQUh3Rt30tlGH6/t3tI+5AHbXSXE+n5+thlfv/XDv9uxr3/nGvQc/fvHF248e21f+5B988rMvodz6d1/5R08/e4T69Ov3vsG54Kg4AhUO4AFQFEZzIUBTR5PAgFA8FEYmcwiJLIQxWQvuhkDMUVW1pOX8zrA+HUtGTFYlgU6AKjKUYqmZW6WKg6mjS86MjAje2xhS2FOMReeGjWsgsuBewEeTw3mrTGvDlAKM4yi5jesn9xerMQgtIoI6Ao7u6EDVsJw4EqKDEVFskg9EMRBBMa9Rc+Dxiet3z85/KCIiPIL3eZztMQcytdoHVqUJM8pEX3SOM7O02WTCBEBFV+28CVSg0vKqgR0QghI6mIuYmxOGyMGNzIQBQ0qSy1Xtxu7BgOZeQaHJOc8m9uLuT93VJ3eHqflDMHLwiaHhgbG4q9ZF1IEBnNSUKAAZoJk7gdtkRoU+jcodYGIoV4hStQB4cnh0/MraL24f3fj6l75+8uajD92+OZyNj8/7QdKTzx14Q2tqVz5beIdpD9LNLM4RUUts03wm1wTePHkUQ9N0S9UemcaSa6k1s+oFtn0Dl0cCL2FWw4oIAiDEesywitdpO0mrpgt4OV5DRAw0BQkhuF0CtGZGgdHQHRxcJ09YYkCXnGLbpGd/8KUv/YXf+syf/UufP191JeF/9jef+6VPPPWv/tnv/eznvkixzXLWYERrHQyqe++EWDoAMIJPRikTBGE4+ea4e+V/Vs4mIlZDXXAFI4MA0zSrfmpy2L1v2iqiHMxDCGbQNCkxpxSUmFKLqxU3fHExMlM/5KzmsPnQsx/+/d977e//D//d3/mv/tNbt49KBpH9Mp434Frk1IvTYgZ7mwcn+6mjOfnGhwtcHt249+j8uZc+s394t98M+7PlOvccJtN5RL4EwN0AnKUCKVYXLUUDYjMHA6qiPNBpW+LkutuqXjECez+64u47wtgHkHScDNTqdHra8tb1YLdaXK7xAMU8OzB6ipgQ1KfD6nU/hQygjkZEagJGzAxuRMHdFAECUGBEQQdSD2TBsZRcMmAUVZ1GV1D/LIDpvJ3/8NWfPvPyg/YavHn6RpY2tLycL3/8xh8tF9d/+Ze+eHp27/Hj7/7aL3/x7Hz1pW/9P3/2U5+DYfm9175996mjdsjLW3eMNptxwADf/MHvfPLjfwnoM+8++Enh3/30pz9tw/6pffuZ5158fHL8xV/5TH/evvr692/d3X/6mf/y1Ve+e+P67Dd+/a+88/b92D3c21ucM4CjKkWKgVWB3AAUydwwUNCEMutpgUUD0ThYS4QshY2oQ03gKpJDdFM271I7y/0QmbEOpQEIxMFK9qZJQOww6TPFylCwYQbI5gPhAYe9onPHoEbECCqqKqox8TiCeULMQ/ATyp+ZY4LwsCQMbSQS8IAqviZfgAGiEyKYuhZ3RSZHtaojQwsxFXURW8z3jw4Xp+cDUdr0mlUjBwCpe2kiIhQiCITCQKhq6m4Co0UEddLgA8RFWxyDQTEzRwWouklyIFEzF7EK4dWb2+yDirurD9wy39GQzaE23Q6IKC4ETEToYCYOWqHMyYuD2WCiaDJRjayb2PGBxSd3DgcFtx2FA9DqSyKy13JUedjE7oKIYNwtZl//h/+2GU8e7NHy+t6nfuFmPgfEVbcIyudE1wmWgkk4cFxwPNAMxI2JEsNisVjurQ0KPVjfPNwjyeYWiQ2QGd3ddjQZNASqDojugBRrGbt6ZBDRd7aRONUAmHRe9W9qC2+wI5gy1M3Q1cNLhJV245X5BgiVZkgoDhDx1Bezpz7/O//qT+49Ov1f/+2//rk/9ev//p//t1/90v/3mU98eP/OEyAteIAIPfYtEGBNZ0FHQneocjcPU+2ukzwAIAOcTITqz6de3sncQl2PqnIMAbdlfKp0CODbDHUAwGp770SEXhwUQlQ1VCVuc15lGRMv1uv1Yv/gzbdOOMx+62//rbvP37w4f8yYiA+W16714+PSlG4TAGcP31sZhbVtusTmsNhvNxo//fO/fHTjmYvVwETiRsjo/D5+V+03CAkwmyLWBFxEdwJAN3bPjsxhV38DMTNrLkyEbgTVMGjyc2fmCkphuOzWAUD9slVHIjSvv7iKue++Xr1a6kOqPyuh4rQrdTD06I5u00+QPEQin+zGtmN6CMWiQgIgJCdX8IIwknfobBGDeqgB9koOwawYAoAU5OZwjMPYwMvnr2j28azMGpCC+RTad8lQHfi1738zMmwS/P6PXmkprB3uv2nNzB5lePDmu9KkmNqHF6f/5gd/P3QYO4kY77/2al7NUzw7eet75vGNV8b1iosN777Ngb7er/s3VtrgfH2BzRyGsqlwtCuAWsDgWixACERjvlARhAC2CHlOWjJnLNyfXLRdxMNZNgloTRfM3KwQdS5t0y5zHqpOwafbztT6cRybvUShcZ8MEVV1hOyYQAYOwb3E9tB17kAAFCKIlY2NmIhiGEeMoR3XPSvMpNxexKEsPbYBB4gduLFFRYg6EgVAV1VCREZzFjeMLCKBAxMBZg4BSRDy7SduXqzeVrVqQB/YsU4Yt4wuQmCEyB4i1QwTQ8hWSh73m/ZDd44ojxTQwEW1Jlx4NagGAELmEELwyRjL0aneubQ1D7jaaNQbpvqqJAi7ZoScd8dyYvJsrcwBubqAOU18wlruyaHGawDUYj6p9dARMSDRJFWzSek+dfE4wUcESJiYEbBdX/if+rP/eXOtAc7/+O/9vaMWTo4vmGaz1iKv7jfvyLB+9A6Xi359Y3V4cNbtXU+LW0oBSMq4vr63yOu3bx82qaOcR3AzICAai1H1wJxozoZold5X9xCXZX3asNC2FZtYjlfWxO0xnLwCsf7EzCbB0/vXzirQA8fq32I4bZdIvWXfGJ3m8y/82V/9+h99/Qd/9P/597d/+5/9/f/m+t7sIx///MHzLyElG0ZucLR1aBJqhRS2g1OvT+sKXLM4aKp1Vs0gwRBA0QhrZTcEwAARUQEJgQl8MhObyhtORbP2q2gIAmCVSA6TBiICBREhsJyLeW6a7vjx6dnFieF5v7ZnXrgx2zt8+623jw5vrPphI7DYv+EG64vhdHUuoqs+ZmG1xjw/8cTerSeOnnv2I22a50FTjFl6FWhia6Mj67TOmPmVMX4sgAha5zZTXiA7ORavI3rkaTWqD3G5eorr2ULEyhTA7SB0S/d1u7Lx2v0vuqWV7gCc3cKDTKaq4uQeDFqnVjFmg4bqDqiu/GZQQ/vMpI2hbWIk7HUQAhVHd4lU2ApYAA9AjBhcQ7EmJbQwbdkRyQHJGXF0dPeApAIxUDJAyynCAS4tArh48IKOAMFcEgXAjmNWLmVsYzJFjy0jGGoCErW4iBwIG+eArFyssWYDzWzAgYP1eeC9graHIH253x5cI4/nF8dx2QyqEBKru4OaFZUwWI4Y0Fg2mQ1TTMePVz9+9cen4UPXZ80C5amI9sZ7cO0g/MzeuutnDSNpTEk0mEMIoeW9C3pgIubo4ICVDVnGvEY4jKHr/RzcAFhcNLsDIxu5mWVsrlFqoeIIZGjKoxxC2vfmMQTscLwQU9onmTd5xCWFgGaKATxAZmgU0NSKV5YLBSdQw+LYqEVKqKiqk0sAuJa8XCyuLecPHp8zJ8ljolkAGN0B2d2ZuUuNgwBIE8nEIzCWQmLR4BMffuLBwx9uHo2ivTCKT7G/5saAI5oyWjF3LZV05DV8j65wut5H3kLEamDroFbj5beNku8saKoGl6i2OeIGROjsNqlYDQTUANBVtmYJtZySS22QoUa9AUClkFcV6G6N2Q4JpGRrfNWz3v3ok2/88OW/9//7x/celk9/+mPUnMH84PF6g8Pq7JVTeGY2X6Qf//jHty7u+c07R/AsxRmEfQNJTSjjah7l9pN756shizIHczRwigGRQGvwgiHw+6Sz2y6MYMfMr2ki1UBmogvubnWCSQSEVvMfgpmZWiUbImyfxbbMot0ZsIkwXg9J9hk6u+Zmb/7tH7761//aX/nNv/CFt+4/3FvOAzUb6wFWTReyDyGEmBOEuiVSmGB7RlcAQJ8o+eBkqLv3yYyq4KAVWlY1QA8hiCv5dr3fXhoAboiTdq7i85PozNwpBRIXRQYOpE6sBtBvNjn3qZm98fq7ucBicXsc+2y+evMdysv7765HONnfXwz5wTicXj9Yvndx9t7D44BzHeHo6Ch14cUPv/jk03ezuhVFVyRo26aYDf2QsNlV1ulypTqDV+W64wSsK69iJZqXqrxTQGARUbeJyuJTqjURuaPCxNrajU/gCruxXuq78wU6gW9EpNP1Ax/46ju6DoCaC6AiKNXZ1ISV1d9BxbnUYoyEgQgV3IHNjAzZIToGx9oOGAIS5IgK3g+WFhKUwF0MiMhTHQIT+mCemDAIxJAYE7qCm6PFJmoemTlxsGGktikKRQxj24+ZEAOpQ24IovPoOBSfz1PBdcMMfWm8hFgMhtguZCx78wDGHINYCbqHboF4MWuhqGMHYBIQERzRBEOgqMUAhJtYRi9jmaX4wtN3Q3tt/2DhuOkCrPcM05z2Z/Mm4Dj0m6Jmqe1UtZTMPGvbxbg6RWzdytQDkxXZiBwSJ/etOs1FiyMkZwMVDUVxBlxleGgoYH7quaOIxgjUeA6ig8k8DSHNSoyiIcbGFNHVSMl18IBmRBhYAyq4qZWIENwZGzECRyY0JzSiEEzx8GD/7Owsi7iWNjXoYPVOdYxEXUeORICEHsljHG9en43Z9xZPfPiFJ47PDbBLOGd0UCPAbQ4DVn55jKHv85izqnIT3BwEEclNPwDL4BYpNvCKP6pbtXygrfYa0KDSKiYTCEJXMCMwAvJKHDQkAEwsIu7OMexwSWYGLzAZy2yr24Rdt5XuSVT7aXNHIIqzRROGf/Tf/7333nxwuHfwxV/7wv7tp2OzR7DmBZ4dP7h47e2yPgtHex/60DP3Xr33/bPXfuH6M3D+OMTBQUwGJjg4Ojx79F7FkYaikxtP4DyWUB29YILdGWtDwLJbfq4w3KYyvvvXdi0EcERgQnXfUuC2qMxUFy4nrrgVLsKWLe5bYMeQHIpZv98a5PHl77z81/73f2Esem3/qPDxULDhYNIZEqGBU2Ib617KDWpKANiuCAEa1LAVZ0dGcHBxCDXgTQEJK0fTYAroMETyy2XnqiOuXjazgDgxBouoFQuri77twpAz6Hj86Lybd9/73ssU0kc/9plhkMPrRjGWO/mpJ176P/0f/o/uZ8++8NS1o+X8Wvf9l3/sj+PHn/t0aNq9o/2bT91YLBZl8M261CoI5KpGTmjQtslNvUZ+1Q0Fbg+deXAEmDp3dXSsNgJOFalCJApFqmTXkBSRwASprqlODCoO5sBbE4IrqAszu1xOUz5wv1z9yQ6fqbaawGRIGSyjRWQhSHUEtW32zQwrOxYxUCw5J08VJkJEUMtgQmiMSATuoI4EbIQBzZljoKDqog6OyFXIBksAGKSsswPT2goaEghkxy6AF0bOiierzYJTf2YFhggAQm0zt7LmAga4QYvqveCYzU827SKeZYnoiXk9aGhiGUpAgiEhukh2dkDPspHsXeqIKGJcrc4FAhG4uDuFaMGiDz6OYIrYUOhCaFJs9kPbrrw/H0407c8BsH/4KLVNaSh1bS5nQBAimCh4F2g+wjERo4uquBOiFxuHYaiM7/pQ1ZzdTZHEuEDK2dE7dGdCdCyIkwXTiJjbaDJyABMMIcf4rFIjovNlcFsTRI/M6oPIct7MW2rR5qRdsIBglleupeRiyXC2GvushdPSTazw3mI2n6WLswHBZimRT9Oe6hgQmJvoANA0cTZrzfX6PKCS2iywKlmILVhhcZfiaEZmFDBwKEZOK5EQQksEhKUUtxIxXcXKr16dPm2hptz3CkCoOrPXw2WEQIBbHwJEJEIEQweuij6DunoD82ozFBkR0c2rG9wshro21EpWL2AHcFeq1A4GAJFKmY7NfL58681XvvPVf3PraPnSr33O0xOE18ayXucHCdKD18+vHx49+9LHE47Hj+4/eHC+uHH0rW+9fufeepb6sT9/9tlnIZimUKA1cR0thMSqzOigpd80ITqY77bnlTtYo0ImHVnFiAygRlg4eLjSoFW5KSBSdQwOyAY+rc44CfR9UmhM/ieAgFPShZHXJwCdtC0gATYl3t6/881vfG/v4Pozz9/+yRuv77UdyYGQoZw3aaYK0Uk5rcoqUDdRfdR8glSAmciszkIRGZDREUDBg5qHkBBEVdEdA5pppRe70wRmogEQOKIBhorR+FTpkRjY0YnIVYEdiFcXQ5e6i3UfW3LH4+OTlNJif2+97tUKRDNPAejN115+6flnbhw+8ZWvfPcVefe94zd/6Ytf/JXf+kJgDIFcabMeTx9vgJwTsARjkGpuZMzmRQdK4MKERFC9jdwRCRmJJouY+vErygXgRkoGMInpqmueIyh4bYJ2aMzlnq1WafOr4Mzu664o1znKDquhLSEStr0/gqNZRecDYHJqFNJo2CKA86TSu1w/CEMIQUQAkruLKUGAugOzLTefCAgUrbgFIndSpBE1m5oBMbYhAmtRIEr1xjYVD9I0QNpIzIgEBhYKjLjHLUWxyEuIEksB5SzapDVbKg26uuJ8L9j5GE1mxJLmm3HNZgcHMykEKk0DIJhNIW58iNxqotSk4lnIlqabxSzltROgBQfkELHdlGMvFrlhU3Tq+7WtToO3PZZhM7ogwIoIPHBPzjFLbLilMW3oaM5Nyvki8BJoOVoPENmD2SigoBlH8QYB3Sw4GdAIFvv1yezgCfXVRjplCh6ICyOTeI8ZITddp22cZx573zhB0fn8KM/3KFyLwAslh7QhEYtlmH/2s3/6mbv9H/3bvx8kNt3B6fH45r2HgxTp9p8Abd/+yY2nbl175sl1Wp1rHAr1MJ/P9eiI3zvRZddxkA0EichDjzEAEzgk4jZFJxcXwWxAHDr0zaLZ0zNdNJj9IjM5GikGTNkLmbp7AfUUx5KzCoMjsYA6oqvt8JarYCIimgMiT3kzxEBuoCkyg6E5IqqZuLUx1Ag6yw4YFL1xQAcnhBhLkYiAamjR3Yp5CE3yZHZCGNwdKFBgMwWY2DjFPDFDKTXZI6W2C/jl3/ufLs7eeOL6hxaHz4jPx/U4jPeHzabpbF3KraOjp+7smVmgeCPeIbRn7j5JKXiyR8NZN+vWaJp9c37atq1qTB0PeRVCVkvuHQYTzu6dg1PdSYPWRtvB0TeIDYbGzQ3VXRmDKVYlF+6oNVTxdavJsNtmH+EKGltJ74YOAHW+amasiMxOpjAxstmdiKTXqP3BUfjDf/PlL/7qJ9bSp5SMDJkSouC8GuubA6o0aYFQcbbGWQ3EwQMm8GDYI0SYRsQFABAYPAYYXEZ0jtCYg/noCIpMrogMHhCnkL06QXCtQXDBaWtQU3WZNsYY0N2zrJntvM9ZdKXZFBQP9vc3o4u1Hlu34uViMb92lt945sXFzRuz3vZPz9NvfuSvfuazn3vvwcnQFzOlOtCMgDXsnsENmaJHU83IAIJBU311Q/NKNjIHZCA0rd+TATiZo6MaIpApMg1OADAMhViR2AECsBErah05161bXcTUkRhKKeDBzMwlhFAGVTdHRnfCKCUbAl5B8upCXneoRDSW0cAQp1muGpYgGptg6BzEzN2LY5EVu4BHHWQ2I2AG4OiOjsLOig6klKvNKrqjABMyiHH2U6In21B0UDbQ0ZU7DRoCSA9uozGkMcgsXCO/UCwpBABD5DKCuqYuiIDDIKlzoS7RACN7isUoSJstdUxOsYMQUmwjaJkxE0NAQioUKMSYvUQiyrMmoYMrA+JsFIltSZhUck3jncVWcgkbRnEE4LpeKRJ3HfMeLEJsU9BGBFSc2Aw8xC7SrGSnJlKwLA6uIaCjQwQTAyuOqsEMRSFn2yAyElupLMkE2FCAcRwLUwAECI7kOk2gtJh7E5QhM7VtvsgtyeAbTssmzUaX2WJ/U3omi3CNPXzsZ2/e8G++9s3XTh+s3n0XRE+RrWsPht6PQr59YzE8BslxwQM3luxixIuL4L10XfdExxuOGhGimptTiHVk7gYYeOpuMCC0ltGARjsd5Ww0nLVLj5FjcAQFVIcKPPqkQfXKsdt16HXPaO9rQ67sLus0l97XyQCAVx8CoGrAWwFeIqqeSI6gXkNhEczVoGkwq/RDXswac3A3dyUKbiE1XGRdsjZNqwWYImOlDbgGV08Hs73Tx2//3lf/1f5scfPJz8UWR1m//dNHR4d32jY99cLd2WzWNA2Aj2Ovqpi46+aMdPx4ffv2HTNNKbn78claihLR2A+hRdlIDDMn9qJuYk6EqUZ7uznWrTw4M5kZWevAmgUpuCNzo6LMQb2QXzLhcOrdK6ROu+YRrmzStyvnlc7QAWLdnEPwajHlhmDgoSGkdHrc//jl7/+t/+o/PTs7a2NyK+aKhMToun1ahKqT9epzBQQY0HwyY4KuOlIBgDtPJnqU3Rxq0QdFgGpIgegKhlAAtXa+DuA1qRLj9vMwgKHzjh/LnEJAcej7zVAyqDEzOBd1sVIUwBydHQFjUPDHx+cxzI7PNmL8a3/uN+Z717/5J9959Oj06OjaU0/dGcaenEIIDoQGIhmJ3nrzrWefezbEqEVSaMYxT9qrHZWFJjcMrN4OW2eZLQXejdjqPANYsjISGEQMttM7bW+EyiLbCg8n67HdwGkq30jqWodLgA4OZpN2ZNfp76ho9X9U011TLyJtSogOWAgZjMEjECMacPXUIhFhZgCToo1REAylwmbowR1QCHKl7SCJ6TAMmDgErO4jyDGEEJKJbMbRFWHQDbk0IWmpg2UsFocyrvLIzE0bN6NoVspotdfgEFATY28BzZ1YkcZBxpLdkWODvbUUGybJLgZF85DzPBFoSE07jjk25JQHQwE3JKtbSrYATTBHL2Ylo1oWFNFxHEGk6QDASjaiwA2XUqJqiBhDihSHzQoHW+41Mc3LuGFOsfQuWIDQPRh5dskaGyIK7qNp3R61bdue9cdNmoMzUSQid1AgJSpEjCGl5NAhs8AmhtALHl67Tc0Si0MdmSHDkD/6kTv7afX2vW+cPPIFHn38bpfasrc/JxDi0X2x3tj1T70gMFurhAEpljina+OjUW5Fn4U2JMLGvYc8E9JEIoZubWrmXdem6F5Eh7wuJg4xiSekDtX3Y0xr8dqN19JrvJWS465e70q5uRkabKHhXSW6ekVe4u9b0gQgitsHOJSI6KjFhMCNEIB2Rd/JDHTodX/JChkJ3VwdmIMUMO1SiLkfmxAYQyklMg6lD01adOF73/n9N1598+mnPvr626+vw2ln4fq1Jz5284W7d++qngL4epXHflTwpompiaWUzWYVEwdiWY+pCeYSY2MKs+WeA4zjOK7PA7WEQU2kWAyhFAkU1AXBiambtWa23mwqWtWmfdWiIGqladoi5khATl7jqC6PWC30Xskn9Y7fHvgJu/kAMuvgAIoA5uzANcPEoVbYnPOdm3f+4N9+8+7Tt+7evfvqq680gZgq7BN2HKdacwwUt6olRAZAR6+uCIBc6b7by8C8EowBKrIEKAAAHhEAXJEqZxMBKooEgOSOl+5pDoiE5pUEiaE1YgdUAweyOsoEBSUxy+bFBIEJTN0hpH7sn/7QCwhwenryhV99abMe9GJz/97xL/3KF175yet/8ic/vnHj9unx46z9k0/e2vTnT9+9s5hfe/nlt9+71//0p6/effqW27BcdIv5Nbhkr9Yd0sRJnwwVp2kIgjsAKZKDGgoajJs+MpPTxNndLox1NyaqVTGEiAjMgDKZGNJucOqoPuk/pm1ZlUrAhM5tT3FN7LPibvU+MzMwYAwGaCATP9VABQ1IUQGgbVPkQETibmYiDhZt6/WNleMKyO5kHprQXxSE2DRNZitFTL2iQ2Lq7lmKKoaAEUOXmEOz2WwYkYjVpeWIiE7YQcEQjMJmGNGQACIyqgGTmTEiRWQCAGgwBuRiMpu3JtYXTSkhGKofzK9JLk3EcTMkTj6moWRK4KRhIHcv6IgUgrNoNVWBGKMBmAUpwOCgLFpy1hAIGKSom/pozSJ1M2hTcBfXzThkCo7KbCT1XCCSIqqJZiqT3yxaAA/mMbb7OBwPCKCYiCKDFQNVBo0GaL3yTAyiFXIZRz48evLGtWvrrPNuHsmzcsn6xJ7g8Orp41UrNF+tGo/JJD9e5ZMH3XI+2IbWVqw5Y3F/L/BR0+qghSOXZXoSr71LHDgt4lCCu7lFJAIzd/Wcs4n2hACiVgowMaplNW66pRRrmogIKllECN2RAZQcEIAIuY6TKdCkOmRXICLzy8bkaqe5q0QTIO4oboa1Sd9Svswr9Qq3duFITsSAFWcBZkYGNRsHJQwmaw7RXQEYsS8Fu7RE9LaNRXqHCDEMtl4uDrWcfO+b/56kPVje/fLXvnv76Rc+9emfuX54NI79rIt/+JV//slPfvrBg/M7t58BWM3bxXvvPc6jNk1zdHRIrKKDOctoiLjq1wDhT77zY2R64YUXmGdmZjbGOCN0wNQ0msu6CY2ZqZavff2bh4fXX3zxxX4YY4wP33s0m8cYITmUMphBTLFoH6nbsQR36xxux9iwKzzTkdzu9yvGgbvaD2BOO8FhZSIhmpuBzBZ7X/qX/+43/uIX+9WqCSkQg8u0368QxGTUDuA6JZ9snxWdEAgJ3Qs4uQdEAMzVmg0s7bwfr87SwZCwqTNVd6/Qh0+22rBNkquNPNYFXNT6zTiQMyMzBiKf5hOoZlM7wTVLDMgAqTJiw+GNmxfr3lRSQ/P54gff/8mbb7x969atw8ODl1/+0fPPf+j119+6c+dOSu0ff+0bbu3jk3MVJOKfvPLan/uNX704HWHLcJ80wDjldNAVlou7V6mJuyNhIHD3fj3EUK/YCuVPzIoKePiWA4OIqNPzmNmUWHCF2Dqp3C9vk92eGHYt/PbdQahN/tbhwxDMoDoyubuA1EH9KON8PjfTgKG4A0BgRmRHNZwECehGCOzAaimlR/35ONZsKRQBVQ0hqI9mZmgxpsCt4WgmQGHQkSKZO3iJTSDVUlTVBzf3gsiUWimi6uDWdR1kcTMKIURyFXRvOBBy23HkeDGubXslLxezzWodGkWe6QYRUDUTAEpQCUOOrk4eNENoPYizIhQQcxUDM0O12GFgcMD5rCUMRXMk7tpGm3ZAd7W2TW1orGzMBpXR2Y1NgxRVRxAcC0GBkbQ6Sqo7u7sBclrGphmHAkgMofY7RuxghY2Sg5NCRgORBGbPv3gL2dAQmQzQy7igcnuPQz7XDDo2gbAA85xh0bqxt8s86MGTt60/JhDyFrqZhLNyQiYp92PXncxpmQKRawAAisaGXmASboCY69SrMCKThHkXoRmHs3uPHjzuPnowBmN3N0V0Qg+EIjA5Qhlddtm2Gw35++7tHU9795f1+8tbBSMnUKkWifUrY80XgkoMcDWabjQEE8MgIuv1JsbDyckALISghbomlLLi4KbKIRZVBDo8PHznjR9+52vfvXX4witvvbvK43/yn/+1u8/fPn3w3sXZGlTbprn/7moxP3fy85/+OHgYh3fvP3h87eDGer1+/gV85503jq4fjKtRpIx5/elPf3qzGR4drxeLxde//r2PvvTSm2++LiL7e0cnJ6cxhibRjScOT8/OX3jhha985Zs//wuf//a3vvu9773SdbPFYvHy66997nOfknFTZXAxxjHn1EaXy0N3ZRXE9x/OXel/f88Ol+AM1w6RSSstt5pVCczny7fffvfBgwef/8WfPz4+jrGpve/OyIEoTFQmcEQmR/BpAQHYlgGoFJr6E6tNPXgAJEBGFHdHj+4OKDg5cbk7UBUO12dwAmAEDRN4gZXW7USO1QO9oslgjlL/aeIkaA5A6OgMVreD5oYZIJkWn+Q4lMf+4x/7yLsP3v3TX/x5FVctn/m5j+/v75+dvXf3qRsqcHT92vXr18/Ozp5//sXj48fPPf+hcTAHBUQEnoYCAARWaUBwZbndHXMCIwAjdYUhj8jglTdaZU3uhKQEYCgmrkYUAChrEb8ckFbl13QSycmpmmQS8ftZCQ5b1zDf2v9Wx8O6BKgqBwIn8AgeAAbzkXjit4VIIqIatncrWrGpkXIANEc3QEVTBofStm2TupWvHZwcRKwCD4hT6p4KSTaDcRwMjCtJzIiGPIo5EK/7MRSICUJEMQXA2ES1fH6+ImuHIcfosy4WKaAa2dCKc9CxX6aurNfBiBq82JwYm3k8v7iIabnOBuaeEUXJQ196zBwG1DUFUwBAJZCaBkhYD8aKIM46KzT2Bu5NtwgxFOe8GkKK6Dz2w4VZm3CxN8u2Rg+AAcFpMlNGMwNRUDQFM3A1B1Ukh6bplqf9SQwJQzIsbhIhqimOUgyCJ4PhYtAypufvdNeW41Bgr9lz8KLC3n/41uxa48OmmV1vHl/0J8oKLYNcv304XEgGHF1/7/d/9zd/839xsbkXC9EM1iXxAlWXXk7fGt8+gifnwS1gp7gJ5CqVr4FgSAFqn+ZoCk3C2aJdl/Xqom8ay6KRncUCx9pcqIu7AlaXUQN3nAo1mpmjq6qH97tqvF+IsSv3AdBt6tNDpNxPo9dJZ1RzK7TUTbFKBgQMXLEJUGCis7OLEJ4w8AABQF2NPKkWYHUk8vmwKQfX9kK0P/rDf/3uT0+ffvKlH/zk1Y9+6lO/+MXPnJ89ePjOu4kjuYaGVmenP/vJjw65fOu7P/riF3/lwbvvtG37+c999lvf+c7Pf+7nvvnNbwLQo4dnt5+4iWRme6vVikMahuGXf/mXHzy8/9VvfOPFF18M3Lz8ymsvfeRDq/Vjdf/J62/PUnznnXtFDZyWy/2Li4uf/OSVw2vXu/15yYbehMh5LOiUYgMKgFapjx9YGi+Z7FdAGHw/C+Py5wCA4DDd+QYOTqhmojePbv+D3/6nH/vE89cPjx49PCZsJjNaJKzzkpr+rAoAhAG8chmriwZtKwwYEKAhyuSSDbECwm4I025hK5IFA2cGqRPxGic3vaDbZBIB5OBok7gft+qzqoWtVosIAMBQ3aEdp04fDCtJtr5BdHRzR2Y20WFc3X3q5no1AMaU2qbp8th//OMfFR2l4J1bt4c8Hh4euNv1oyNELKVMO0qoH6ASkC7PwpWDTfVAMAI4EOGYZRwEGM2FCLxmKQIDkZtearaubGEBwFwCJwLc2qNOPb9Oh2H3l5eNUX0EJPHJG4Coktp9q3SleqYc1L1AVccRqCpvGcOiqqroZFvL1WoKUrXMBKaQ3QMBEwYzYaZAMSCZWV10pZgWSCGkGBNG7fvYJDNDcOSYNQfkxWwRcurzGtE8S4wRLDdIRHFwghDJ0Ys1SKlrUkpjP3CkNF8EZw5IkVabc47cpWgjszuZRmTLiGOgjC7eXOzljZ6fw7CGAExOaHlq2H30slmXs/Prj1RkY+u+wwZqkFtM8+XBtettaJO3QViqKM/GYlSCB4VIhGzqVdajBMU9mThUS3czUbdicT6/xg9PAFDE0NDdFVxcLBCYytALweiw1/EztzuVdYr7VPsXufjIk4vow4NHxs18vVodP9YQ2v60xNj4gtHWD09Pr998YvXw7f/Xf/e7/8Xf+Ssn734fBi9AbLmwLEIHcmawfiLiyjGTBoUBgRBCzVV0U1UK1KSQAhvCenN8uj47OfPrN5Yc03zRlDEThdrVqFe5ggugXF5sQESojORYVV1u/oFuczvx3zWku1IOAMRcrNT1o6ioGxJ51bTs+NpUjQSwulRGpkePHgE9ZwaIkdHUJAYzF2JScYDy5J0b995+/V/+i38S6KBN1+4/Xv/lv/of7x/hvXuvNmGZODpuOAQzjmFx7QC6RXruQ0+99+Dk7p0nulnjPn74hduB+l/4+U+cnZ533bx6YImOgVhVfvmXPo9Yzk4fPn3r1u2jo3/5u//q85///LX9xbK1rpt/++EPn3n++etP3Fgu59/59jdu3rz5oWc/cvvW9RAChWrsi1I8hCBS6sFhckSe/MRqY+7gbsC03adOBWJXcj5wkGsRmv7AHMEDslcVfEwxdl/74z/5G7/1lx8+fNg0jakHZvNa1sls0ikgbbcF0zeVdTPxXLaUfLhk5Vcu/8RxpC2rHcAJ0ADVsTof1xViIv8DQnXL4Yn2U0E5BERGt2pzj4RUj8NEpzEEQjJwMKvvzhAI0ziOKTVFxB0ciAMh4ubCEAIHVO1VNYSk6q6RWEopaEFz9W4Pbsp01UF9e6wd3CZ7nWo3UjdSVfcLCFWwdrEZ133pFsFNCbyYgQO6gbOaArF73ZayE5p7DYk1s4DbBtysYmK+7dbNrEocYDuvmmwDKpdWp/6dCM3V6g3i7lCc0KvllyIAglkppfIYIgfwqGZEXO9bBzSoWQHbLk2JA/Tr1ekp5H6UVC0/yEFdzd0jB4Gs2WecyNXVETlnYUYCRs3RuVECteNViR03s84oE1FiMlUpEElTjDlnLNrMUhMJSWLyWYfjZp0xQuSh5BDbNiYbhXVzEPZWZxIkSJ+Dsm7a80dy/CBJsdyXQDFU5XoARGMydafghByCgeTCoKil5h+4ZQYFsxiDYGGXGDo3pH7sUrsJJ5mKkSqIgQlLIRUYWNutWQ+5i6qqYLe/DyEWw6LeEMcYUGPAZn/WhWv9eHFhtJhzd/smiq0DXOs4FgTM5UM3uiWu7z9cPx4ic37qaG9V7ifXdt7EUMwweLffJLvAv/iXf+H//H/5Z6+/9nMv3L3x6OwE0XhcIGUEu3lwIyhoq2WjIxgEAmVAUjdUR4TIFAMGFDLr3zvZHI83FnFP7ECVHg06BJuUqAhMCARqFadzZChTytcHOostbLmty//B3K/+ZpJfq2GoKJYjgjgUMGc0BK6eqI6BCRHUiiEGbhgDABwfHzOzAVXpCyMDINFMizctHex3X/mD3/uDL331xQ99bIT5zaef+MVf+tR6dbp6DB01iKYQEZfFhNBE8nK2l4ccAz57+wl3FxFE3p/fGsfRUe/cPBSRosRM7pWyFlRtuNgc7R0u717vh/Nf+TOf2987HMdx3i3c/cMfeW6xnJ0cP2qb9Jmf+6RqKbm/desAwDQXYrQKtrrEhGJqWk0WFYFrr0dYvUzQQbfBJh8s5h84sPV7hWqxgASIQMUdA3LTvvHaW+vzzWd+7uNvv/1uCCnGRkofU8hFkJEQXbW2jHVpmWr2ZDOwZWcigis4IUZ3nWanyAgRcARIAIBU3B0wIqBDQUR38slnHByswjNTzQRz8GoLCVMKUfW+d2Rwd6qWB2ikbk7AwV3RnAkdSQBEKaZOJBNRjadQdzcJ6MAkJSNTTEEkB2Ynt2rQxFJ9SRCQiOt+xbfzTNjW8QqsXz3000I4qTopRT47GTd9Xh5EV3XzgESIZqYmqkZITsa1uUQQEa11F6dx6LRVJdqCY9P8vE5cYTtERcTa7UjRamVQ884Mp3ErTdoxAwwACB7rDAMR521nJjFxkWn3sNkMB5aSMRu4m4BXHA/I52Dz+Xw2W7j4CHnd583FRiQys6sDUB6l78c0juTKbolnRcp61NHWakbA6I4GyoOhAYwaPKs5sRQF4JIDmOReuq5Bble5Vy2pjafnxYsHd5ci6mneXVxkdYuyZwPaptNNHE7bB28Pw0oIFmf9+YJxX+3Zg1kQQsPGMWtQdzDh0l1b+cHYXQ/N88OwiakFwq7rVDIcLPc6NR8JLauNmhmJQrceT7RYw23WTSZVVVIEZTUarUTCQczA0DlZ42oj7s3aG4/ON0t1MF3EGJs0W3R9efhenJ3Qam/RkIxMNOqsnUeBDcD87uGmi8Mb51HStZtLhjJerG22twRjhrCYxfle+/D0+Fxz112/Mf/wp//UN/6H//53/nf/9X+JeEq0/yhfzKxb6ZrHOfN6PsMmJuY4yhiodSGOVCc9yGpNC4Rysd5b3FjeJEGRUY5uzeloHVsX8RgSKkZxMSdFUwwegrgBuxpgMMqGnLAtIAWMeNIcXe0v61cRiTEiOTCbiyEUsDbwKXh1DWzIbdS2mQU8HUOXNxfXmn2qK6bENrQiK2vC3jKePzo2mYekDKMIcJq5qqFBysu9xf/42//we9/64d1nPrzx7lf+o888efvW2emJiLZdKqWAaiBXj8QT1Sy7YAzFPQ+jE9Wwu5KFgA1g3RcgDDBKmQVAoKxVaOq+3D9UG1I3j7zMZYiBEJqsuthvRCWkIKal19ohDaXSSALpZOqFwGbA6BTBdAIGDJEAFKZxF1Q0d4vnqhkjuatXFAR3+nVzAEePPhPsBYUsugoHGHO+defm/+Mf/KNP/9yLKS4reVFtg4xSfR0q3EEM5ggYicAIUK1OYgAJzd0U6htJPpm7OcD06kjKhu55W6ABQc3ALVrMQE6ml9MDqhz5bRITgaEhsDsxcQXSq4N9pb8CAlFwBWYz7wHZA6kbIpIAJh/HdYydG5mPMTb9xrq2FRwAHENwdzUn5C12oXWDAMoOwcABzGnSvkGNI3c1NTckiIhsbgamVZoNTgABSby3Piz25i8/PlYbGuCirkyGWRXIsUGyAAOBus8RCCEhmziYY+BByl7b5OICqOBcHYDUmAmRRGQ3V4fqPemeOBGTcB9CdK3PE1xDbbnBCKwDFxONqBEIfHBAIsiaKTSiaqBmRKCRcRVGIE2B0T0JkHlgSOqhDf0o1vcasqmIIhVOFNaWDwIJDoY4b9q9uXsG7E1jf33v8PHJBXMqMKAKmTuEtp1DEkqgaI4aELgJRtF7oNzv7ZGyFRgp+HK+KFkFx739fVVd9zmUKCJaFMb5cKG2Ij0Lx+/Ret3kMUUrB7F/ac6dQyyczzchiaC71JE0kANKAR1MbOCczYQccs6y2SAolh5mQWFUgBTbNjWgSg4z5wsUd0NkhGCmZgCm7sUsTDCnu6qLgCiAczcHW62GvG7K8taNbp50tTlfr52wI1QHwCaZxjZZxFE3zbNhdZhnzRg3b90v/bGGtLw262V9QO0oao7Ktn6AS7duyP0b9wvHj4XmXbn/4KdvL47fYgu3b1wrthFgpxXyiILBYzJUiCjSJ+2QyMGtLJu4H5wD+PUmOIsLu1vi63NqUCMVs4LYqUsVdxgaEdWITLfJYKRt27EQABQprIFjNfySKx1Qxdwv23Z3p7pHFQ0xMHFlvO3CV4nIZUXBKLDk0jiFGMZxJA5UtEE+fvhoHLIDCTpGFjSjQuiHs/b//X//b47vr5+8/uRskf7SX/tNGy9OTo+1SNs2ogKIMUQTiQQwacGVCBHQXMyNkSsGgQBISK5ACOAFIkQdXCLEUnLbsIuqbJKX9ZDny30tdnZ2srh+Exk8bxgTAG0j1ujyaKBhlcRsH3W+FqECz85bN12k6pdoNd6HwcGsTjARDaCyWWxHja+Hl0DRPHIwhxST5AEd2cK3v/a1/+1//b85eXxcSSg4YS6mSAAEaA5OE7TugLYFsStCQeiT8Rza1NozbOFdc3Czij9MLSk70JS1DgQVLacd+btah9fLggAQQSuWbQa0TV4knKq/V+kNZUBmTqUUd2dK7uZhsGHWpkORAcAZO8klJQPqoVRT7kvvnekQAU+0HZ9se6v7kFbzdrTavVchMAAVH+qEFKlKDxzQjdxhoeDO6dHxMTJAMikyijHN3L0Gx0guEAmIC0JHlLNUhappIcQmpWEY3N0NnbYZeFZHxRNh7Orm7AOe71uq6nRdkTmZkjomBIcilsGUOWdpmoamTNCC3KgbQqAs0IjVPRC5oRmgOeioyWKi0DLmwWJH7QFgpw0HMDCFRKnlaOXCjNq2EZeT4wuAWPoSu47RU0in5xcNOocQmERVFEAUkI1oiKWJ1KSkhmKeEGGUCAFo/6JYUvSR+8I0QlrPyrrrH8Lp47w+A1FbhOEm5hsBD0Pb2wapxYQIEpQMI7qAqLJhpEgQVAA6DQFRDLDs7bVtDAg2m3UbuQgh4OgAHtoYYgzJN9lJKveMaMrKkckZ1Xxr8wYqloukDKYxNO1Ff5w0xhGaWff4+J0+e+bZOPbzWYohZPOQvA3RV5trXbtYwXK+5+cPu7fe0vvvwfy63747nzHTQ3ONXVsUVg9Xy9m8cz15722X2YeOnnj6+vrv/rd/93/16z8f9MyCMXeyaImdUigqguImyBGDdSYMjiCHy+7J/eU+upVxNM+8Jgud06aUmfu4cfQ2OilDtWtVcEFXV1VXhaoUB/FqDw0AAZiITWHn5451RHjlGjUzrPI7RDMQkbaZ0nsjcfFSTWCYOSK7Q7EIgOaDW7FA0HApm8Dx9OL05GITm8azBCZXTwRmuulzmHUQ9SOf+Njnv/iZ1fo9BHaRFIK7l6LMLCIBUFyxygjBHQpOW1pXLNNwqmIUu0GfM5QcEw+aU4xoqBAkuNB8uWxOHz38d1/+d1/81T8DlfFA0aHA1AlOw8RpWEruW37Irry7OxKD22WWJrlXABtCTUNHpMkVGIAgbPnUW9ro5ECAxUdAUHNALJox0l639/3v/QjJPvrxl9566w0KnFVDCHUMThM5HRAJ0AGtzuF4kip53XdViJwMFAsSg4PARNBzr7EkVXdF7pMPWsXWQaF6PwKAw6TIr0v8dFVY5ZJvR/G0LehWczx2B89N1B1rMoaKqyFxx9H6YQhJiUVyZAohwjAMTMHpCphFCFPakQNidfNC8q3bPjHqtMX0q+cBKve3EkXMQQGwGj04O5eW47033iIoqBzIHJigUCQA0FIigo3CAiE2BJyrtYa5uSE6M176DDvtECHfOgfAZOm8++elONnAibb8d3d30ACIlVQ2hb6yUzAAg8jsKqLm7m42jiXktLRIVQbBAVDrOKVC6Ck1MViXcMhpY1mtpHbmch6QmFlKkb6UVRZCTCxiuRiIE4dhPcy7ZrW+GHI+LYUYUMElQLFASOS5lBgCIORiRdHMmqZbbYZixmBoeSyBZc7roj0/Pg7vvrbK/djE/SZ1B8NwexxvIDCF04thPk9WgN1jimFEAXRHC8xEKOa95w3KIiARZTfP2jRdMTdTGrM1s1FUi2nWMp47QrtH3M3cj7fiHausBIQAPvFV63TEIJiGXDAXTrTHfHbRnzbLo4ePz8fRMLWPLs7EbZ5AR+VmxkxexhuL5uYR7919rsz55HS05c8c8ie9aS5ymc0WbaKEWqwEJ77QMyFHaz7yQt7ojVvd7bT67uO37n/8k8/eXUB/rllIUAmQZBxW5syBAV2KdIiZYUZ+mGg8P31rXI+oPXIz6Ohs5BstT7XrwaCbJfO1exKZZqQMqFDN+mDQAgAiknMWgYZiCOwVlN8+rnBmdk5Y7/ttnVJUTlhDsTq+ujszxxj7Yv2QD/ea524vPV+EFDW4lBSofbTMPpzfuLEcV2cNhSl0u5kVKn/17/yt9Zkczvf6/iQFQABuOjMr6imlGKPlEjkgaqREVO9uR0TkiIhh2xjVnD/bvm1GAE1K2ib2Qo5xb9ZokdXwaPNo/OPf//Lnf/FX7tx+5vzxezGaum6Dn3RXTmoQxDRUnNiEU+NVx5G7uxe21QTQqC4PiECIZldG05X6v9sT1FIKpjnEzhQRUXQE18P95d//8v/467/2RdOBA4TAokAISIju4ZK96o51VTY3NARwr2ZkCFZ584Qgk3VKpawAT/FwuFVg8dVZCwBMgsuJmuOwa+NpujwMlaoEFFjVOJIZivvkgA9eSfHFOBBLccKAnAk2HKNqFNWQEBE1z5gYaNiMq8R76gK+tWSc1tD6hthBq9++uddlyeoIFMANHKemuJLZQdmngSO518WeDJygqA3gfPro4triUGV0g8Rz13MCETc1bNJsvFijg4epVTAztcKIAQnUdjaQcIVUdqWFv5yR485QqE4ldAo6rmUHnIQQEYK7uTOHaejiQBiIIARChxhjFgdHKQ5K4K5qqCDkBAiGkQO2PNhGbM7MpgUKBEtpS9lCxDLmhveuLZcrFQcMLaMixyiA82ZGqABw0LaLFDBaKUWVAqcUCZlWmx4QYgpV2UambUL3SM6QxXNXbF5ygFM+f5fOHodZQWiXS9C7snmGkQBOoeSALeCgSKUIIg8xREct6kUCEgKpKyJ0i4a4MYTYzNx9nQ0AUmry6E1ZS9G2bVMCQgWPwQAzmjIiOqiDOGQzMYgAwV2rBkGdENgAi4IotrzfxjfELs4fp4u43N+f3X/82KCJAUouqUm5ZMh48yjeutYy7GVaeDkvMpwO4zhbei9ZRFzW5+ujg64JWoriKATdYLjKQ8ry+Hj+7NMf2W9/8vu/8+/+9t/4jdXZSR+XIBcOXdOZWAFo0BHJCFicNjZcO2q9sfsnp6RIEADbja/ZiAftIJ0/OGFvZ7GNhaQLNeMxYChg6EqAjEykZlbEmCPAWOu2qsZ4JTDerxQOh6vX8SWMQBRjLHkaH1WtMxEbinhRy/PUfPqlZ3k4MZOKWQTu3jsYqD+7ffTkOY+Rm+IWrQNszDMJHO3N8tA3qfG0HG0wM3XixGKGiE3TMFIHYRtN54CTdbABTtmtV6iGCAQAbIJtl4N61r3ZAtr18YMfh3f67tHbb63x5z/zhY+/9LHV5tFTT+w1XVpfrByiWd1EX6ZATKvGleVtV5p31HXfMiC3h8undv1KIg8hOjT1abd/u107vUFoAINZQQrjOO43kfP6L//GX0omt/f3zDGEkMvAzGBaW/56smpweWX0qhuAAQdmJIJAxEgMiF4J7DV8o5YYd9cCNXgSwLdqhmq/ZepI4GgTqd3Iya/Y0ZvZ5JsCqKpcs1m3zG4zq032xjJCdneuE1ML7mRMDhJCGkfgmQH27tjxEsnc2HECxAwnJNDdxRAACdBMAJCgzhSxEJjZdCYmPZGZTalJvvsPadIvslkTLgZ/9/5J4JCtB8JS+sCt6uCIGJrRIINZ9JFkhpXxDbVwV6+h+gGvDlfrGfetbzAiEm0HqhPshljtD6i+sek5WT0gsE1QXUG3QE41+TKGSO6m6gBIgcGDkjqaWHELIODogdiF+j4CYcmUrTBjasCMKAoxjyqGYI4xxiamjSoCacX3zBR0OZuXnGfz6MQOyCE4mDpY1UKwSYCsHsXZPcaYmK0faXQ2xvW8mM/7xVs/2rz5xvra/LYWOM+nd8RfAn4K3FhOWTuILVLGDWlD7GME34wBJ09wElMWS0KQIWRY4mmo9qwIdW7f0QQ4ZKLYJW0HJ43UsquUgWhrf1RNQtHc0IHd5fL2QKotjIFLg5loM5T9jh6cPzxdC4cOVNhQsTGPwfun9pon5o2VLtOypDwHw6Iz7mzDjLjXxFIG85l4dB3U3ZlkFM3ada00vrlYPfnUnZ/91FPf/uPHZ+cATTOuT9iBeRYpCLNpUAdwQQ4B017Jz6Z2/fj+uz+91/F+7ss5phEtGaexGNIzHw6F4yYVHTfXwl7tGkwIwcBqVAAQESCP4yalhJinrBm6TJDY9uy7vukK1d2QAu5qSkphHIqZEYO4FVOKgWtPyHy2yS+/9QDzuYhB9IaBqXl8OtKP3/jIZz5xct4HRic3GNrIWgpCMTOKnkdRS7EWIHOeQsUdmA2smEIhBiQGosnQzCY7lffz4ryqB7Ex0ZZimh0/evPVb//ufhr0QdkPe8cXs7t/+oXHvgq0fuON+y//5N4zzz938+i6mZkLTG3vjope/MoiB1uUeqsS2vVruyjaWtDRCavD2m5pxLoP3/XIFdlHMy2ICuzjOFy7dvSt730fyK/feuLevfcoBAAyBTGrChrB4oa1LzETn4LygrszVmkw2dZs3mvYYzW2BfAp2sLcnYlh26dX4mqFYjDEqRgSVL1nlThvlzetB6cuBLzbCNTdElPY8nSauBTJgVyLIKRIs1xWwJtILWHUeQtoogQw00zIAxFXcKUi+9NRMq/2vDWIFAC2TCQUUKthCY5VxG9Wq6HV6r+txS7uap5BZotZ3vTR1kcHHTWWGtZc1Bhd226xys2b7xy7SYghhgCE4zgaAgbOfW7btAtc3WrxrlJoJmNU3z5g29q7T1mVdaXdLf/ZLFafHwAw0yLgSlu1lNUPIQphUvwiMIX6eqCO1VPLDNggsRIF5qhlzAbiScAKSH2rDj6UvM5Dzjmg9RlZSMZMXVytVjE4xwAAOeca02aO4IpCrlakmHZ5KMwOLSHR5ny0MYq4DKLn/Pi1R/feKjeeuK2bdbMablHzM7bGNp4jshGNDAAh+j4vh5BdaR44pjFkU0auPF53cKGxHy9WG9QZkYpkAAghOfrYj+oSwQWaBN5gopbGERmUGAEqja0CoOygDlpr/fY+BFMwcDc1k8JDMwunj/teT/rBZs0yKXPI/dg2XZDN5rkn2jsH6fS4D+1ht09pRBtXeXPOM+43GQuCNjHGWYuBCgcUl1x689g05LpRCA3raj1/4aVPfesr/+LN11+5+zO38KwY+Zh73wRVhIAphIKKpqMMi8hdQ2+dnr53Pi5YW9HEbm5j65tl02/sbpw3peksRd7LUupJcpPaySADmIupmUmxKg4QEAeF6VbxXdmq39cd5e5XisDO9V4RySkl942q4jZXI3AKloKPbjwof+PH7wRU4mhsKrmhZsz2sLz1qys86SVRdtY2hPOzx00TiT2PpZulIhIpFPOEaCoAEJggkBTNqhwDAIpX8ZBiTckE7Pyy8k7+HUDuWAJFGW2MJxfvvvXv/8UN9ncfFIrt6Wpz4dB0/qPvfe/s4YOIsxtP3m2Wi+PNGgC2tua2K+40tee7XkyxOgTY5d+AUx1FXi4JeJnDNz0Dyq4IbpdMAAAHIWzcPSQ6X4+Lo9k//J3/+e7dm483+eFq0zQNAVVBuWiBLZiOXqFzm9YVBwxsagQOIIbgAOhEAIbb9/kBeqsZVfipnmiAOk+eRshTOIlVUanhtBTVjw+XOxgs2+S5iuNN2wVEtROmFiAQOOHK/Z7jw5Lf25xrkdXR9Wfm6YWSZ2ZDCEFKpJDdatorKWyNJ2EytwBE8lBfov6uBa6QB2whsmk1U3NCrTZiCmZm7gqeyya2i0frzS98/O7NG2mQ85hCClAwYlkv5kff+cnZ/XsnRtEVosdiOkqNF0YAiLERN9mtmVeWtCvr/SXIvt3VKflu5WPES4/8OkEyIHAEVDAh0+iBiFIMzCgGMTbVlwYAVBUM3dCRAczQnUTMkH2+1xTtaQGsXT6/2KyHwPuIG2J0cQBzJmWMbWODuFDAlGYtz9m4Z7K2bRyI22A6knvrBE5Y9xCRnTQ6NW0U4FzEMQXoSBf7A91/K54+7G8+fYib830KB4nvWD9rZ4OKilPA2AR3LVJGMHBvB58X3oCFhoOMGxsLQ0AOJcZhNsvN3kW7ZyaIHmPszZijtnG9Xi/mSTOvB2tOx6ODZpYaQhstmxdEc3dTNGczEDMF3V4kYGZ1ro0q4jIb2ijYOOSVtctlEUbAQI23UcrmpVvLm7Nw8nDVQzOjjUlaLkgi7Nky9zCbt8gZSZCwYzUwKeoFY5qBRUegwI21pO9drOTw4JknX7j2xz/4YXNncc1jJkcQAHIjtAwqATkSiMJsngICjdK2BwO37AxqixB66UOBObZ5fQFRuc1wsTHfqw4w4g5gNSove92NMcUA7LXhYuZiyildRV2uth6XNwxAzU52Q1BLKe22pdt2Hg0tRJRh1e09oTIwRS+oG8HYETeJ/d477w2bPgQODBmK6NT3afYYFjX/z1EIWwOsFMxcshu7YQhBVKmOEb2CrwYUiHh0qxl5sFUVIpI7pixKsNy/9r1/8o+fwlUvt/7RV+89/eEnf3Hv2q0je/UP/+7xa4/nh3c/9uufG4jDWsbKf+A698Sp2lGAKb16eurJFbhiZrs6TljNfAEAeNqt+9aP0KHKOgGumBNUQN8VgJg4lDH3OgTu1mv93nd/8jf/xt94fLJCbsGDubpXG4lkvpWOgoFDVS7VkpqduQ7aJkEPTEwZw4pZ7c5onQoYBK2fbbtCONQ5oU7stC1z3BEcqJYlB5osy7cW5DtmkddV1w0rrBwSgKn2R4eLhw/f+ObX//X64sKVjx/MQqOLo7c//JE3P/LhL6xWLRG7G0E2JPBw6apfzRUsV0Ok6fBWE7OJpDS9NPl2JUBGASd0rg75wIg1KD5AO2vmY3++XKS9RZrZvjuijhaZjCLyo0fHo7gTmSlwmPZJFalDDCHkUab3pApbTR9sscoKywBA5RcgeYWpdvcR1bz6erjdO4+sjmiMwYkEXEALIRRrm9C27cnFeUxsZsx1UzLZ1BAwoFAAiI7owmjcrMbT/RCJQgyQuDRkqnWBA2ZW9EHHwImZ0R3JmT1GVAbTPIzKIQpRsTEREjVorqomHigaWXRI2IybkgdvbZFPuX8sP/qJDyLdLMg7J08112+SzaBf7s03FtJgcxezvEbvzSyLt+1eiNdab5G/nz0UNxUhZYrYjxfI18ZiKg54QiERt16ECUQ3rc9njOhxzOvAad3zUMbF3Badp2bp/hCNzDKhQSkupeE5+QDoBqpSbzIYlFpCKg12G7VgNDiPeUiLWSIKgmOw8rG9a08mX/ePOm/304IIm6BxKKQUY9vDOQQFwiamYbUeOTgwuDUNwYWWVSYCZHA4RvRsF+1p+/nnXvjSl7997zu3bn30TosFE/VZCMXAA6CqATceyhG5F7kwWeI6IhpHZZ4xczNnwHly0r5gDGG2ce3sQmUjxhaCFQNr2CSBr8waBHVRMsYAAhDYhwI7+tqkKqUdGANwiSQC1lhv01wWy2Xt2cETqJkXokVIAUvQYsy8GVBZIzGFxhRW5Sw26WJ1fu/d4c4z3Xi2bmg50ojoYMpEiKMbAiR0cBAnysxgbs7VK8TdFRAJCKfEAnMDK+iCYJGaMQs6pBDMLWufUhKxOI+vffurfvoAP7SH0n/mrj/RCpaHyxLKOTz/gp6ufnJ87048+tnRCnncCoJ4qo9uZkq1wZ2w8sqIUHerA9SpgtSMNEYHAHFAVHCjSn6vkkIUF5wADgQAA0N0jCjauGakgL65cXTtD/7t127fvnnzzv6rr70bqtQQKqtRJh3sbuSI1f2FgKCYkrluZTV1IrzFCOrsDQCcHQjZEQSdraaJVPFTHe86gE6dKu6WLkYkA89sbMiKKBYQrcpZ3ZGp+v67k6maay3AyF5O8J//8395/+Jkf39/8xCRbzTX+am7N84v6O23333t1Z+eXcw++bO/kqtLXkQvyQWRNwqF6EAyhiBkjZqLTxIlAEAnRBxtrH2wex201gujOCd3dxUiqkGDrsDMo/fXm/T7f/yd196+/+YJJRbUbpQLQxoF1/n8dJ1TwNVmbOcLDyBrNEVXJ3ID6bp4drYOSFVpWknAO1Rq1w5dDldryCIwM4AaOmAMxTQopUgONFoGsMixCDUpeIZInWtpQzfkDMUR0XxMIYp6UMMebQAhpKa0S8AWvECCGUoeN8azzpUQsZ0DhKiUKYKZcJplDxcXPjc7PV3Pulk7Ey05UJQSxkE8sLq0wINftIYgDMnPZaOArDAHvjjV8zR2Aco5uTVnp3L6Gj5+ywzb1PKylKcbvhkuipiEdpPZNusl2LX92Wg59XLIXTmgdel7W4XF4gfn+XWKwaQPHXlvRSHgTHIbeaHGFj0xqg69acDQxWQyClvoNy0oWU6LbrZMpQwxiUF2R6jGPY4IXCXXiICSg1Ii6F3MgJkNfCjjHjqSa2lSR4jqCg7ZB3z+Jh9if/rmw6aJoUi/0cPbT+N6/ZgfmuaYsCgIA7L3lBfzBeXeIwnwkIfzPsvgiRkRyaJDVpZx2HTzg9Tge++989PZ4vbzB6Z9m5ph6FNcmLOxKQwOlNpGfbh15+bhYYy8cGJEzD56jOQUszy+WEPRxhg8hVSLMmjxyq12EnMNmKRI17Qu5xVw5xDF+t2w6CqeqLpVwOwq/RXlKiIy887n2opUeV7t5lS1TY15cXdVAecYmxiDGbzzzjsfevHu4CsD48pD3mLW7gYTm9nADFWr5BN3jEwmB6vkHvfKJa/2HihljE3rauNQYoxMsYhl0EVcPnj7R9cPgxWnbJ/98JPD4w0dzefX8WJcZ4emS5uLx4fXckNJ0AG31O6pkFTlGGzdbnW6gXe1dYo8BUPYmjoB1Pddu3WfiBu1QQeYElN9+kUlGlaFEWvGbrb/pS996Vf+zBfOzzYf2Et9YCO1hWrrM6DDZBmAO5Kl7lbricBBjuBuE5oF5oJThuMlqHHlJaZdGVD1TjRGI6vrRZWewCSJUqu0kh0iMX2+bHFJf/Pv/Cc/+dZrX/nq18sirs7z/GLxtZ9+6xe+8GdOTh7v7z3xh//mx88//zNtO+tXUF0uEYWpcVvk3DctjIPxlqVTb1ufthoAhJWaY37FFdIITKeD4V5tlAxIzceMY05f+/pro/SnF2SqDiMwuZwkTiqkBQQQAplZK57dVZWZ1JS2etRyJZNydy/URwi8PbGXP3R3I3ICZXQmJDf24o7mB94kLwGcScyLcysGyElNQgjuHkIapa+0HQ4BnNVVytjMAhiVNcy7RD7UqRMzM4HkAR0QjJlFLIRU+mymjDBPraRShrHbbw4O5pvNJjQxUBpl3GtaVW3gUEVCEyB6NyoCQYz9xaAc42Y2OqHQ7Hz28I18fmKzvaWZfk7wAGyl5uJFeo2USuMt50FsyKMO2XHuNhtgblTC4rUz+GoenmuPQqJw3p8MPna8kNHMSLOKClu1DbA2zMBD0IQqbYubIsghhDSI98crLUPXdL716qxO0/XEIKIDeKBcVN1NHc1RHYsGdYGiRgYIHtAdXFDDYtYePXFwdnLs+/s9BXaQ5cHQhP2jvYjd6uJkvTkvmoESAFtWyWlxgFB5xxTiPEKThGwEAU6sSdyyxEU754M4jKc/vvdOc/taYqnnSRSDo6MhScImsIvIe4/O+j6prYexAEDjIYOp6sGyG2zlwSj5qBvvOykaCC1YsWJObogQ62WXAjHjWIYUmuAQ6DJrkIjctYYiUd2cXrmCfWtul0tRt7Zt1+t1JZaWUgAgpUREAKXv+66LXoAImVgdiMgUYoyvvPLKr//5j5/AI3NBIySftDBe7d8dai9cecmTM7YzIjNbtRh3J0Aimsbp7kRBzUoWpkpHw1LM3NN8sXn8YHzvUXet2zy4CM3iIgssMEEeRwgRMUZGsxTbRVqf9SHEas2EW3gHKhyDl0SR+ooViDHQSoS/sv5NxHG4tBPzy2/tEvvaVecarmmAYiWm7tGDi/feu/9Lv/SFs7Mz/g+MCqbTBDuO+WXa9XTUtg+AuqwjOmglmWxNEiZhLCJapWxewjX1XU0/8d25mN5wqKvcNIyZlEaIMAVAT2EldVqDAFCyJOJHj46f+ujNXzt48uFPv/vaa/D9t+BP/ek/pwYXm3NwmHX7v/3bv/1bf+u/iM1SbUAg8LbvN6EdQyQpxBzdZDpouwM5vWWqatVqaeY2rZtk1SHfqrGBqiOjijZNvP/u2Xv3T2/cjgagHg16L5GsGTBkVQOQUqIjA4YQXAYXRUZ355qJ5F5vil0ZgSldFncr/u5Xu3NNRG6OVKGuyhZVgFCXK/e6CYSA1XIyEgIwppRWfY6xycOYGlIrDlLEQxOkUBut5QAFlSkRmBY29pJBhQlCRCclTIiUUgCAAKSS511nyY1Kgdwuu6KyGXp0xRTJ3VxMoS9qAxgwkhMYos/mOXG3Pi+xNKdv6ZtvKqe0z+NTQ4ZAa80zhyQxQzNYLpQPofXEIcY9kQ0BMaSsB8Ae8LVBmsSfbSiUjW4XauFgRXNoQjIOSIMN7tIRuoFgRLIgYohqGR1KViLfW3QhkAMOo08XuYthdd0EAAgGpBOtVh0VwQmETMW0BGIFczOnlIOmJ4+auBo3A1Lsqhc3BVQvo4yDmxFnAE4NYijZU2pNkLMBGQUWhsFkHLOTU2D2HAxAOCtz5y3K2cXD3LePz9ZPPtGuVudAlPYndyAZpQsYLAYEyYoYmtjEJjESIraMUmzWzdbH98E4KAfpmjRHZMBqLKXMmI1VAFxr+GeKbFtJzm5wevXh7jsTQ9wReK0yDKGoqHrbtmfnZ2iuoCLioqFLHAOVMgxD06WsMuMUAquUivVcv3bw+muvbtYGxByCWoFJRVj7ze0tIVab4gl3ADecZnkT6o24dUY0ADeFEFPlPVEM4kZE4NglPH77niL0wIvl/uZsM5/v4YJWx/dD2kMGKUozb2d7QzYIl+5p23glwhqkirytxFYZzFuV1E6TUqkmuxt7VygRp04drrT/V2dxCA6iwjEVzbeu3/mn/+RLL330ub397uTkJMRLItP2pNTnunyGnZnJ1CSCb8ur18ru7pOj7URp120pMqo+jhV8qxM/xwpQVweU+iKwpfBPS5dXjMisWhN7xR1q3K/hdvPiCG1rDFGGpr8gALpxm9JcmsPV17/8r5z3D5+Yg5UQ6N4D+MpXvvoX/9yff+ct6mYDMcYQxcTdCQNA9TmoMwPf4tX1n7T9lqqFNZgD7ZxN0XeWxegiee/o2te/9vow9AjNmDM5Io6g6hBUUXHiXDFSjNEcc87uBgLuHmMCAJEPjMQvt1YwAZiXNX13ysjRVLkWbQOuZvaAwuIoDKweEiJ5BBlJraAFdxHxKbU4qmYQXy72Blu5BXAkD+Mmi+XrN5aq61LGBS4YYdamtQiRKSgalqwo2lB00Bh5s8lN01n0i/WqmXV5FFWdd52ISCkQnCmVTcmGEkFdkkEoWMZWNzoe86uvnK7O92eLQzx5+9nDa4sZpYJNagPRoFqyHYTuCQTNmdhbphjaaFqINtgLIaPew/zZ2eLQ16GbNRenfQAUMQJVzUMZCjbzhHvz1r0EwIABI8bUetkshTmwORaj2aLp2jiO95xOkRSJrFzK6xHB0MWUAgIBEFc3vMAsVsgDMCB6jCYZS4H9zo6WuDk/NZ6pl0QEzmxjFxKOm5SCKIwjqKlAcdeDo4MUDRFF3NzEBYCYo6Ehgih7JZxaNuqbSMf3T+e37jx6952Xnv3ZkXi0TdFxRoBGLS2RhCIM69WibUqOaAVJXaXEJZATgzMJSoiGQZXEoWgRN3Zj8ErxHACNOY7jyEQBae9geXG2mlG7VhXBmkWXc96Vhp0L0gcAATOLFEYpKaVd2oSqDn1Os5YCQ6CsAgDEVUuiXRfNoJRx1i0fPX509njVzbt+vWpDfF8GlG/jRgnNvfa59bfuXkQAteqnrt5R7k6IKjm1aewLMwM4M439wMQP793fW0ZupZTEjL08wjKjDoGBkyMF9xysQNZIcUJcLhc7myp8rSoTf6ay9MBdCZ0MnSYQxqZ5aZ2bIu4+znaOse1UdpDHdFQDRXdCxK6bf/UPv/zX/5d/8WJ1YmhX5E6X7Emscvv3ddv11CBte3eorrBT8QFmtkqN3M5h0R0r1A5wNTWksk6qaUsti27bGr+jHyBOmLc7IvDWEQDf954MHXmMGxBoGU0RYoawWOJHn00L5lff9cJo2PayufXks9/51stP37n1sZ/57PH9U2QEMKY2y4gkaAkmL7P3EVS82t7W9QYhVLNGRHByjFA5RWpUUVhTMG27/Vd/cj9EI44yZERzJSJ3Ex01hCCmCm4Ru9SUUUQKANSgvbadTa5f5Lz1099VeUSEynvd7m53y+2EbZlRTMZejXwdpqAPcEcKBg7ElVPvhAxIBE3TYMnuzkRqCOD73Q0EK3LuOfRiozi1IJhD5QEAR+aCCgAYtJsn5kIOMaVsZxwCJkoYnTwiz9ouiwPAbLZIsUbRGwIxR250mVgZwTvoAQ1M2tYX793rNxfdIjR3PB80833J15AGyDNrN5ssjd+W4bbP72tZMqXBs/XHbKOVubcCDsSPN/ww+zXh06Gnt967QA8syBgUQ5P20CIxD4KgwUfejKYYpC/jOvdCfe8iNA5Wsg29SvEUWpym/7a1kcPpvnW1QIIuLu5u6js7fBUEGk257ZjYJce9PYouQ9VSExvYaCAWTFyB1a1obru4WDb7e93B/ixFiGwDoYXAzGzkRc0km46uBV1CEoxIQcUpdEzdxXq1On3809fe2oy1yxtl6JvYXKz7TF7cLMRBqWAzUrc2XlvKg+pG8kaGXgVCaGZKsXArxYrKJPeBrQhX0RTqwA3Qmy7Vlo1jsC1aenUveQVq8MuvE+TqpZQQQgpJpCoTMau4IQZGRFU18BgbDggo6NJEbgM2CVT7V195cznfqxf/7hVxW0rqTYtXHlf80i/L+vSeHRm5ZutoEURE88RBRLpZyzTTfr3fRigOANakbn//YNnuLWYplkDaNhGRDNZd16pd4qc1p4qIiHHidCPWnCNEJEBCQAJGRKrp9bA7coQwxZjsHlMB8KlgTi9BRFTnwxRYxBaLxdtvvLlaPf65z/zMxdmK+H1Djvd9c6V8XOkiAQCYKDDvFDSIyESXfI0t4EZEBGw+7UcIgSd9rxHYdin64PM7cnUkZyImCoyMMM3V7YOLDQBwkllIaJyZmkVq2YON3uT9ZX90VAhUfEgpZTlbLm/9s3/6R6/89I+vHV0znbs3ouumCejkoJWn49vxzO407VRru7dKAIyVs4pYgyF9kmylJsjZ5vUfvnzjWhMAIyfmiNQgcBOZGJCnxjxAAA59ETMDrCaO2DTNOJQtGuZbgPdyt1fnrLvPPr2B+oisCN6EjK5oApgZhJAtsBOhI5p4GS1bSB7aenl0XVP1Fu4eY4yhUQ9WADWOG99scrdoZvNoNqBTm9I4StWNE5GCNl0zyogRiwohVQTV0Zgx5xxCaFNi5jKM41BUXA06agC512KELQUeJV+M/abgurz7xsWj9+SGhZ/x/iN59VSgBaGO6wPFiLBIcLfvn4oxznDOgCEqhwvwTaQNwTl6xgDKYwkIvMKxINHv/uu3I95KGEoZRX1YG+SgRY2bkwt5dJ7P13K67i/GfLrJp+syKp5vymoom17OTtdDr0yNjELobqbFwNmsoqns7i0QloJidZMpBhncI5uOpQzgzaxpEd2A9/aXZVQtqcHI2YMTMBYx95jB1qUfIGNSTg4sSL7ZbMoonTBn9SJmRgFTiDPArlgsA7uZDQHcIA7gdMDvrB6Llj/48h/PF/tmCppRbcyCLVU3YlV/5af3f/L68Y/eOP7eT8++/9b6R6+PP/rp6tXXzt5652JYlQY9Wo4+dvO9wAnA1ArU/DYPZlOIjKsEQkFFhiJjjHHXBV9tN7bKrx3dbWtL4FQNDIio67ra7MN2aNiGNhCD+bDpGSlyaJuUmsAIMSB5OdhrX3359RBCmOqfIzoQX9aRGqUyBUWZuZiZgU42Kn653myVNygiIQQATKkBqBlj1HXNT1/5AYRNnxwO9oZoOGtsZD/JOc8CHJQNWrGAwa13V0jVrdsJkJEYiQkZobK2A1JAYmAGRnQiCITAQERIHpCIkRADQkQItNUDVYMvRsKJiI0VpyHAmnPIzMxFRUEPDg6++pU//OQnXmwiSsEQwtZ4xOorTv+Bc0BkQgZkqi6QyMSMXgcmEwI+/VfBCfCpXtfCzsgBGciRgNAYgQkIjdAQoZJ5duvTpLpEZKw5qvXsTM4ABkSARExEAYkrt4aImQcI5tqUcQkg6mtzaJhBxVs15zgsl5gCxxidw+LaU7/zj7/53R//+/0bxVwIGxUBMJzC5+wqTghXZhp11drBNe6Vqar1nRuCAZnBbL58643jx4/eu7bXlnFAL+gQ2BndVJBRCYpDIG5CRPNecg2VNJO2bcFriB1sRbi7su7ul0vOf7jNraspAFBAhUk9ixy2DjkAakwEaq5AgGg+jiMA5JwR3bQ0KQROjOF8cyI6ohgpM1KIBigNNXlUpqjVLzSwI2r1sGNQn/bfTdM0MTCYaY5NMLDQhCZGQmxCxBoyhcWhAAAoWQmyIlsn6Ger1+avvwpwgk+dbZ5TnaHnPDIzte2A8WIY20A35otZs3duuE720O2toKvA13B2SPMicIzwTjGZQbNIm71A+/Pw9tuHP/jOw5//5HLTPzTiSLHBzpn3ZGMIB9f3ZikV6bkNHBsbnYgMx5TmxUEsEw9ZT0Y7jlu5NiKTefVSYEQDr8RzHxSdGorRMRgg5JxLjDGGTvUiBEhxrv2YKbINycyM0oxnTdPGOLbSAhrHvBE3cwicWGFAhg0qIDcUGNy1jGWgRN1ssY8oGMgHybp3bX8ePOhIY3u+Wm/6cnx8eng9Das1NKAEEkxHTJxOzx9uTi7EOo7RgzERQCFSQDAzBgIOjoSxHfOqyAgQEBBc3BDQODh4FFQUR8QQOMaYR3FC1ZoGMB2lnRUBbPf4H3hU5oCqxhhXJkSk4KUUEUsxpJRKyXko3mpINGs69WwOXdc4+XLRvfn665orp2VC1mGy+5q4w5W643bZAMOUPjPxWPDKrsJFiYgcxK3qunPOy4P/P13/HSxplt0HYsfcez+T7tny1dXVfnpmerzDEIMhPEASBIklCYqSqI0QCcVGrLTSxkqhWIUUWmm1oT8Yq5VW2qDE4FJcegAECAIcYAaD8X7QM9PdM+1t+Xr1XJrP3HvPOfrjy/eqMBvK6KjIys7OzvzMuef+zs/Mbty4fufad6alUTAqRTu10uLIH9yT/gACaDWtLpypu35Fal4hQ8c8O51hKtigDDITJMYThMSG1CIaaGoDveZk4IqIYKwglBGGVNk1KI8GNKQswdqw+36RAgnBWxJ2+L3vPfs/+pu/upjPy2KSrB/K2Y+353q6r4E12I8wtNrEa/eYYUcxvENEiBzRMEDQdYIFAKqhp2H2aKYISoTDBGYwbx+OM9HaxB0A2Bkokg6kQwTiNbYNBoAMaLSOBhjk48gVQmI2yxyknFZbq9W8S3bcMvjxxhhWRzF2mfyoqJuc+orPffGL3ybo3/3UB+aHYArsEDWIxdO6+eCpJ2aQgQux/o4GQIgEaiaIQEgCQwghlfX02Zdel6pskTI5JQPzBC0iJgRHXhVQFNgNAl2fpR3oMZInZRkHiMaMBszwQcrjevTyZzokOCEyMTOpkdowxPPEzpCVzEyKRJBjxhEWaOaMnUjK0bvCIQGqaCqKAgAcc9d3e0erK4+Vfb/EXJi6ZqlljayEit65PuaYJakkkcJBKEsgEzOPaKZ919W+6FYrx7hKHbmQO4WkAQkkS0rOUZdicH5jUprKarHIXeFlc3lH3n69b1t9ZjT+QNCV9nfbVHJxd6VTX4mugJI0VJQb11f7+94mUDZG+/0qAJAHB9j3It4fm1jbHWVdMh92ndMSv/pSfvzdqa4l9UbdQZ7bdDVzDj1bvHa4n/N4ysBsrkiScl64SHfuNltnr4YLszQ5TsVBYTWE1McmqjmHuW9FenYjoyrnpEKgrNaJxUyOuKewWkqn0a98CuRqlswSkonTAFQGV7NTVfZIIfuiH/tgAJ32YQpmkFMkxxg4S+uwYs6SKSYyRshK6izqsWIbj6vaN1H6eeeClx7HZRXqYtfJs1/+zsd+8eOlWy1js4UuZ1a2w65xpX/40R3yW8jkmRC5BJCgGcLWZPLiD98peZJdK4YqrQ8klpOxEBiCiBByT5CBiNhBtABSGrSpKke560Syc2vnlpTScHU+KFLFkzg3RBQgBuraWFXVIVJSQVQRXvWRuSBCI20lR3GEjjFmxbIQkxIgTSb+9tv3br292rxcpqZlKgEAMakqkjMBoGhrlVImAwKGU/qwGdPAYlrPNfHEKEpcRk5JKSecbE7v3nn1lWe/dKYaqaZQTCRDK33pZ2Bc7Uh/3GV65K39W37id86VC+nYGcSRYUJgPakdiApEDlk1A2QiUgUiJHI5R0Qguq9IYkBFBYLEsh67rmF4RTIgVEAUGWKtB0DMOScpg7HFrqwmb791A6R5+umnb+8fGvYOBqoQAvB6jGmGaMTMZmkA1YmMAETAMhEPSthhpoq4ZtMTkwkMX2dYBAhoWDo9JgFACDQYb2gGgiFnl3Cd7AxIaAAMoFmN1/xQRDqpdEAk0RBRAWzYnqggGaKJOiiyZnSIFqqmE0cJel8WRlU+XFk1Hpmm+dFeUdbeOefbgFc/9/nXMtoH3v2RozuEoMotKhP6LC2SEVZgApggAYKwAwCSYThy4v1iYBSKmDxwywoMLUPhoXj9+dd2a8eSAmdIqpSTeU+QtANF7RXU2CEjgWLOKoPVl2JVliklEUFg0zUe9OBicwJj2ikgNtw+fR/LsgSBFMAXkCQKsCGZCTowJMbgUAF7wSwWBcS4VOsx9xuz8Z2jo5LdMqfc94BQYtWL1HWZFirQlxUmsLRkX+Smj6k1xyMKqTQPMUFeATgvnpwYKOpsmY5y6VLUgmskNBPwqGrqAMg5ZuhzJvDB660lrarchf6OvfrD5Mv6z0H3Xq8g7ajyIVrjoM1xSXrGwWVf18Zv5cU9ZyRpVPi87KfOU1nutVERuPIQ48j04mh65/Dejc49rqVbNot5615+lXfK6WFTn999+KCIVb1ZFFEFoHSM7tBJUTI5bmNT8fnZFPzVhQ9jCRn8UmXuOUlWBxLIQLMTY1PLkR2yE7WoUpo6EUSDgkrIVW7vWOp9GA9JMdJ5hLg1yRJr9k5RZ5Np4UtNEIoauIy0KqvN5fE8YDByqRNSCFCJFWq9wnFRV4eJGpWCCCSr5apE1cyMwG6yuRnxrQhanb2weOvFvNp79YXX3v+hh7p0NF+5stpaxLjo8Vw1ffhi9sXYESMAsUc0IIuWNrfytTf7midBZmh3JGJsM5oGx8kkmTKwZOcIMmAeZj5m3rkEEWVtWzhcrKcBNyfX6P3AsOFFIhJRAMs5V/WImde9dpbURzedMDsiAmIRaTqZjbxRIkPgLIhm4r28/NIrP/Pkh47a1ZBLCXjyIZYHGvGJdddJez58HwI1g6G/hDWjAxGRLSipVCnq7s7m3rUXv//1z082eDoBdBj7du9OuzndAoCsK4TxtJhdu320tXXunbduBK547CV2jrytp6Z8etPSA6jUA1uXU1sxBjS0U6SVwAANiWHw7LO1HcLaooWZFXDd5CKYrD8nK57d3f3D3/9H73//+0NZrD8LjbwfkBY9mQcMYHuSYTMBhsSgwA6HeL9hmUGCBxp9U2V2603N8MpwFNfK1oGXb2BGhoPHJN4nfgLc32c4s/vDkPsHRI1cWke2IoEhkgMABCytSTk7QED0YYvdYwxz84lCKxEFOsFcjn1sxyAOjdQ5oGZjOvrmV1+p/ejJR5+ZH7pQppyACUVKQjDtAVUzeVdk6U+O+2B0rAR0arNcOzDlLL1p4Wt3e//44N7haMRADaJjqBTUs5Ca5UKF29QPDnTIbtX2UWXYBQ0Q02KxOL1HTn/7j/319MXTpn7Yx+QcOfC6iycCUDA1QwWnosqgcDpoFQBNWYloGGv1KfexDwQ+UFZBBkOhyveNUCrDzCQvc+/MhCgDOtWcTRiAixYHLa2tJSmImHNyyOKIiQDIshICKalkS8mXU+K8PFxGqGJP6Ra99cJhbKfvreTDO9v59p2G1BgnYezBTUbFpsm4YJ/lbrto0c6U08ponrp9yOQCZR27qhEBs7HD3aIYY3/Fjb5h8lrlnMv7DVx44ZU8Kv2fvnr8l392tDOpjwzrIkhGRvKBRTCVwRAEJ4mrchODI0mM0Dsy6TVmIOezrZL2YB65RnOWoQy+x4SOvfdFYYTizSbOjxh6iUlAMjvH46n1xymLx1R01NVuLFl/74/+9K3rRXSb5WZl7GzZHc8P5/N5zJiEowIQueBWx0fve/Lif/C3Prp6+61yCcjJsAuY0U0D07A/N23PFlWJ1vf9lVC8aqs2VO+8ffPJJy6Foh+cKono8LA7X86uXKgNhmsO1AzYoZV9itvbXn3jfaXSmgPQJDmCFmAMqiDKCIiCxghqA1NQEjuXYK0yH6xLTrH1YbOp6+v7/gxzXegNVDWlxMxlWXZdVzg/ADU5Z+eCd0WMKcYonrMWTNmZR1QwdARbO9Urr7zyi+6nEI1pCPMERDcIeohB5QRg/TPk7nWyMJ80lUNdR0TwftmvPFYPXz77/POf/9ELX9/eOrM12mmaO02rAPrwww9XXN/au3HUduceemw5n0eWcnt2cHB0az+dHxfgNDNwHAqUITA9cOsa4UkwA9w3OEAEZDohXyMiAwgQIvBpHQVwSCdjPSRSEUOEU+BrGGwYQuz6V1986e/+nf/J/uEhMpqqQ06yBo5PmkQY/itgN1DxTbOZMfvBVcvW1r6nc8YBNkJAQ8PBhWBdjO5DPQ/6Op/oVH+MjH/yA+EB+vaD8Ajimhs86AJO6egO0KBkU0Uxc4y7hi7bXe+TtVAVIUHsm7i1s7mcd6Jdb8boi8J5Dp/73LfqcvPszpV+ieTZNDkKquA8qQJBoZaR10HhgOvLGBDMgI01Z8nt0JSIluNp+eyX3jRtNraKNjagnpGSRaVM4AZzEkmZHLH3wNR0XVZhppT7ui5V85C5MbQy94eoD1jC/dhy+ABig72ksqwMFCSL0yGQ0kwQeViSVRVBU0pqRo6cOSS3toHCofeSnMU05CSRUzFyGCW2ibxjD5QtS2JwfZ+cc9oJMjHXAMts6tDpkIuwbhhC13WK6H0xxDM4By44zZJsOYqu7qvmiEI73rvZJSl3i/ywK/YPbmfL4Eoz7EFLthlogEQx3+06mI1KhdXhomV/aALBTQGStvvaNMpjqA5bMMOVNBs4rqX74WLh/uJPbP2DP+7fPCifvlI1eviV5279pY8+8r5HiogtqgdMgBEsMEPO0fmaUTRFzbnwAa1pVgeWV2VRxXjcNweevJgdLo9XUYpqGsmkWeVVWsyXx0u5u18IZL+VAVe3924v23GUVbs6hzi6e3Rwff/C5mRmaT+CuNIdNsuDFW+cmVbe1cEr2u5kBy5sDcTD0agySIhQ0mPFJKTVO/uvfX9Lz29tTjoFs9THa1Gs4JE6TgSctUY3C5Pt7K+jO4gGq8MfvfTaRz569bg9Pl+OfYGrFq8vdFyraEJXOOfUMuVglnK2uHB3+lLLAK4z6nMfh/5Lsw2im6FGQFYb/K8CQQvoGByjwdAmlGU5NCmq6gYG9FoTdJoSua7yTAQqIpJSquu6WS6Hazrn3LWxrBytQ2R6s1EfZVwRmOKamS5FEW7fuXH35jyUAQxAxQwZXRZBUkJ3IlEBM9BTmgkiAYMmssEwxQbEBpEkI5V6dmv8D/6b/xfkw0sP7TBrltsJ2t1z4+l4smzs7dtHrty4cPVhZo4Rzu7ONLcPP/zwnduvhmKq2cgAHQ5G5KdMckQkQIFBDGmIMty3RIQ4JIkCrG25DGwAoXHNucb1EkXrBMNhnvbgLl6JXEp5Mpu+/toroyI88sgj79y+CS7UIeR25XwwA33AUXI4F8mAEAnN8XoYbgqARGs8Xk/miqew/unygwCga5MYQF3LcQdLRUSkgc7+4zsVABiMpNbL6unW6qS8e4DTqbsOBwxAMzFQmVPLZBJ75zT1DcpxhU0MDCkzQTUb3b113CvUdaW9BixIRaE5d+bKV77xrV/8edoYX2haE43MWBRl1yNhATB43A/HVYEG27aTJV8zE1MB0aQqp3nZeRq9+sIb47ExU+CRIjKbI4sZ2RXzrotJgRDYBV/mpF3XqWbHvunSdr05KLdPljEdOF0P1vfTs3P651Dc6YR2RM6JagjBF8EQUM0VzkzWttUCiCZrp/j1Wj6qJznfYbc+L8PEWCJC5aiw0YbzPfbL6CauLjBn7uZK6pOu2FPbJuid956IyDtRA1FGR5ZSHwsfRCTHZAghOFcEERFSp3J0BHFZcarau3R0wOe4/AioZJxnyZ49sjfJZKt+vsHcg4CrXF13q0wC4P2coc0ysUntihvL/VWw4J12OYzr290SwJ+t9ZPgvtP27ul378Ln3jhuKunKizt4a2//1tH5T57dbVJEIaJCJDiuBgqMY2Lpk3LC2nvPyHVdAkrKvQO3OesNHQBcPtPmrFmYHPca4/l5aq1PedU4yVvlyLsqnZULT7rxKunl8b6rqEbeGR2Mi0oDaGKn8td++ukqbI5m05t7+9euL87uzFJKIrZaNuPJ5tvXbo0m46Io39o78LCdsTjzwY/cuBvfDHnipqPe5tATB4IqoxKadPOVz8dxdfkjT95ov/fOiwfoJtf3Dh45fAhH/u69I7c9BSivz2WCDrk6uNcsF23hmbD0HttV2m7dKzfandkZiwqJu2iAnhxBTuhEUcRIzZlFQFAyJDJU9QCBci/e+wFnN7O1LQHTqf3A/cv65Fa2E8+Ztm3rugaAAY5MKaWU6lFJxM45EYlJ2qQb41J1BVYFRzKExFt8+Uev/cSnHt/f33NMhB4RTwwmT3RDp7YEJwFB7BwqP4gJqCqiSW4f3j333/zf/v4br1776Z/66EiW7fIWn61qDft7zZ19d2v/5oc/9D5TDH50eHzgZyH2bRGmxWR81Ovtg/zQ47urbqW45pDf779O/kans8v723Aa3CkBEEDXf12TNU/bN7jPOD9ZFgBIHzAEF8kbs63f/Mq/+uhHPyoizEzEfd8H74A452wqgPQgux+JzURPtK8ntcVUFJlOYZMTTZUBEtKABK2/2DCEBwM8EWM9UKHuozp0IkM9PSAnT+/73duJXwUZAMIAyetAD4cEwEAGyMyo2JI1BRNqNS3QeWiytmm1uzW6dWt+uBdH05lKY4bEnKUvitE3vvXdn/6pn3B+h63KOcXUIngzdR5F1sQpOBEWwMlkggL0PWRNxbh85bXXH3rozNFevHP99mTik2RkxyhoiZgDhSSWDPoswASI3vtmETWLqQokQiqKouu6ASQBUTrhCZ3eDg8+P2VDDhvZtYE7KhWcLaeUYxA1JmIDyWpAPCiziRyimqEOcbAnaqkQfEo9EZgBY0ZAE+h6KcAzeaSM0PhQGhg7MBBAQQRDICdZUkyd84MLBnrvEHtT7SGHsrCkiMqDEj3maai7FcQeY+vyTb11LXktLlJhTXcACc0bYCdCwUitdAV4hGODiXNtN06WCyfIM49e0kMuRWDnwq6vvFnPq6LyJXrLfortBSVidfOFXj1Ld+bxtWuLnQuzm4dHz7700my2OnvGiZj3rKrIARFwSL5mIfQOHaFoUkNPZRDmikp0DIYm4CCAkGRkNFQrXDWaOHCFDxaIgUpA957LjyVKag76BXt96vJZVQumqXe+FsFVspzjvG3brXGeXTWDPe+LlJJuCtL+7gQVV4jtQzPD0E0qEc2bZ33PfZ/33EbYlciujLlppAEL2+c2nt2dvvJmf3N1/WOfeubLf/qvw+bmvYP5nTeuffqn33NwtDclMwop92N1RAjYzwqcVLVZY6FuBM7UUtF8OvKSIEDdyQHAEFjhAMxQsqqsheNguCZ7AALXRbfsRmGEiKfqO1UFIjkpDaePB4v7YGLb9/1kMimqKqUUPBtI0/dT3CiLOqWYckpqbZfUSmQaxozM3oObTMJz33/+Z37uIwhHRIZgIskxJ5FheAcAhoxkZGDrhDXIKODo/hwTUUzIu6tndv7b//c/feWla5/8+Ie61eFxc7C9tbV/G+J81abSVe7JRx5KzTvO7VSzSx0FWe1X1ZaDUZv8zqV3WaiEiwQLb/6++aPBqXEA3W/QCHFNu4I/YyFPAMCIp1RNHCas6+gJOy3z9ECJHG6rEMJysXjrjTf/6q/8ytHqmByjEgEDWRZBROZBpw5yfxdluBZnDq8wAKgpMjzIF8STr60nNX3YEuHpInG6hCEDGALboKFFAzA6qfI/1sUjIpF/wMUe6IGEqSHFlMwAmJGiGINDxeCxXeSkPQXsYxIFj+Uw5Sgmtrtx/rVXr93ZO5yM66JkpFGKfRPz8li/9vVvf/rP/ULXIKBDUkI0EBEDqxC6B8w6T9p24NYaZF/6GlPemsy2Zhtf/+JbfVztbFet9mCJSXMCFedDueiOFCipDMIGM+y7ZIpkkFKqqsp7f3Q0Nx20AKRyf9fyY5j7g837sOVd+y95Rs+qGc0EzGiA2vLaylRUWREdMxOwnti+97EdVZVz0PSdghICRGemSAqmXUypk2LktYcGfYzRU5U65Sp0vYaaHIojds4hsCPfLJrYFn3fhxAKpr7vANATp9wDBwRu+hxX2s8Vjke3X+/6pj4rqYb8RqAJUt/E6Wgmfc5tayahHOekyi4eLcoy9M7art0q63Gn0yZLbfuLVXZlzN0qd94B7M83w7hOq2x4T9N5Cq4qtp586PC1P00I7kqxM9vpF732DUHHVWmIKmwArSmjMpkIhcGsSUBohFzhol30fbyzutHrEp0PYepCtbl5YWv3UjlizFF7SS0tl02Kt6HfXx63q9hQGpvSzuxMCCqSgXuBbNJzucJjF6wUUyyykJKKMw8lLJplYAcEBNFXTsTMJIxNaTFfrhjJhxDI2AvxSiM5iiXAKLQi4dL27NK5cRXy7/3mn3zkmQuIxWg6nTn70fPf/fMfnz60sWRZQhHMCfVZW52WNY+c5rtg2qFsTWjTzbZ5pW0mZHA9sdjgD2gskm1IJRvMx0/gVoeoKK70ja4GnCHnPHDef+x6vY/Irp+ggDEORj2Uc55Op3t7ewUiIHRdF2PywQ9vTjl3mbqodVmc0m2MY1W4a++8fv3a/ng67fsjXDeZgsimZJrWBl5GJ0InQIBsBiAAZIrEoKpGUNfVC8/94E+++u1Pfuzj8+O7JcNounm4WJzbvnBtb3X14ohzoy3Q+KFi86yGoItjjOgQbt3aozKNtuvL5y9ZikPbjcADkDHAJid+t/zA0SBd284x2J+JzbNBjw8nfML1vxKj+y2zqDpcx2QPlXQ6nfzwhz/c3tk6e/HC2zffcr7yHAREIDKvA5VPPTuJCNf/X4KBRwMwzFxPd133a/HQr9uJuxrAWrgwzBFOuvIBs0bkU47piWBVT+r1KdxxYpePaGs3eWBEOP1sOx1VDJxRR4PyyMhxBeZBlakbT2l+qJqscoWq9k0bXXziXee2zuCdO4tFe+iw8m7sIJf19K3XbnzJvvyzP/PLi6NVTuI8EJAZquQHiumanThMhx2WiGSaDWBjcwQy/tHzr0ynnlQIxVQJcK1kRstCkg0UgMH7EGOOMZuZkUnWejRKkvu+H3gEg5/Xj21k76/wJ+IyOEHemLnv+2JSZlUAqOu6noxNcs45FOtMMZFspwIOSEP34Jzz7Iio79sYY3AWKi+ZwMB7LmsfXWolSdRAvjEVQVBCC32zAuDcZzIFNQQe1vCcs1kYsjBJjUVCUYplSZZTT1j2XZLDVOrm3UO3v6QzyI/VxaxviHIrFkruNYMImoZRmWIOzi8qGxf18XIVIZ+p6rJvQWFzvHEMuQgWxQAlVH7T9Ay7jdLd6gGtOGIt1Nw9kYtbBv2qG8/efvPgyXdXL794vFzZ/tGd7bMTTaxI7AagSssRJD3shI4pHEtaNa0sks/VI2eeevIjH7n68GPj0YZZa3D7YO/W4X5HWJW1t97TtjO/qusnpv5M2/hFPG73Xj7eb29cf+7O0byg6aT0zrkkHS6QPLYWFcmiAoBoIpdtH7330bKZhOD6fsXMipCEQgAEpwpmmZUQDDAriylD9sY5CwjcOljdLcbFWzeXh0cvX9jaXh7c2X3swneu5S9+8/Wf/fB2I8faeO+LII6wkKgRV4SCqcgjw8Vqo5rN9+e7MwaCjIktaDYw8YxJejMldZBZKYMiEToyAkAz78MaFyYSkbIsT6dGiPigA8HJHAlxndMGImLGfd+XZSkqRjhkwjddO+bgnBOWruuruuj7WAVn1Jk6zGCYgh/7EJ/7wYs/83MfadsDZFxHD9OggHnAYICBbF1tUQ3UgAR0MFXPjnFSlf/lf/dbV3eeghXTKI1HHOR4d6d0IW14Pm7yzsOXclW3sDGyLcnZj6Zs3qTfujA66uTS1fObW6MmNVZ6zPcjjx9oVA0feO00x/4Ex+D7tBG0dZyzyYkQ+vRT1ACQCVRtPVBda2HKuvrud7/7gQ98IEokx4S4d/vOztkznalXwJPKPswGY4xt25/d3mm6LsY8mIMP0UinC/OD/eM6wecUyBoWi2FfhEgEMgTQnbS/ayDupGwhKq1D74bPXB+f083csN6YrvNe1p9OYCYAlgzBk0pmJTJfualB4WnRrKKzsZqPsWekDL6P2B605cgeeix0/YXDvUVzeMgQ2mUzG++++srb4/GXP/HxnzzYmw9zaARG7sAGCoAMy6SaITIiuOiIVVCTQlHzzVurt95488qlKvYLZGEip56CIPSr1aLtIPVxWJtCCIs2JckKNlh4lmW5XDSAQ1zrUCL//3brDz7WexoiUSmZybuUUtv2TdnVzjGzagYLg3POOsTDsmoGJ2iKYMzctivJ0TEDZDPzRQZCSaFvlQtXbeLqsB+VGz0t1ICAENF7ADNj1H6guAEAqIL3PoTgc46SU0pMHlRy7MfjUWxSv8raArUzOxrdfW3J2V0ORqvuwOQ80SESZTyar5Zoo6lv++W4GidOtaSUaVbWJVGJeTQpk8G1rnmO3C2UlS+Wyzzi0Sj35yzuFulS7bdBtxmOk7hmOdq4WGyVMebudg+fvviutP/c9fm+L7tpg1ZOxhu7wa2WtLgrGWK6E2Ol0iOz2zh/7son3vWpD7z752DCzeELr7zy7a+/860btw8ODptOGqFgmhXERBkdmFMLjouSi0tnL053Hrt89ZGnn3ovxIM777y+d+2GGQJoMmIUssDKYsk8Eno2sVCZpgIxCWkmcmWGSGBkBOJi1uC9qJiJJzIlgDrhwnvTjCW5Pt9+/ZW+KGbWzetqw8Xi6Ohw5+L07OvdD16+/cH37bhuDvW0lyKzKa0gkjdIpEaRjqwHbuNiYXDR16pL1EJo4UgjKWQgI0RjSMiGjCqoyMgYsyUjH9gqizExUEaNOQVCBRHCQhgcPOihMRR9gCGZx5idZEtZR8x1VbfdKhQVYm6a1Ww6qosp5MViNU9RF8tuc7KLUR1kY5fEI+pmiT989gc/+UsfVAyg6s0cjBfWs/UAzkzR0MhMUYAMAZFJoADTrFLaKsetSbk54v/q7/0/33idH3uCNnfj2VGxUdSm+fbe4WGfy8s7u7tnt7a2c04iTQqHOXJVTqmiO3dNfLEzGQc3EysBmkIxSWR0DkkMop14rwgIKwrCwK83BHBrpAN4SF9gWHN+xEhVPfPJWNbMiAwQmIhiymXhJQpgoWDkFNCtFnLt7Tf/1q//zcPD4ypMfvijV8p6cvT2jdlsE7BdrVaz2aYhPPeDH/zqr/7qZz//J+fOnfvu91/+4Mc+8MXP/slTjz+9uzm+dfPm4eHhRz/54dXicFB9wUm4CAAikdpgnnwCrLMBGIBlQWIyMzRUVWQe+ntSBgTgYaB9f7YsqCZraRsZKDIiislJUuMwyR0s1AlRYcguN85qmbTaOr9Y3ZL2MEDV5C5j1wKqjbKmApG1WvQ9QXJOzp4rl7W7e3ulKQDw7uTiD3/w4pOPPj4azWLbMZISZg2ak+dA6hCxh4hsNDgPOxHwhhh1tRUe//KzXwtVNu4N1Rkik6KhgWARzfquNxAk82XlfNHcmWtsTXOKUpXBMccumSCsA1nX0qSTpQ5McSCwDew3AEATJJ/MnAJlQTIYY8odeSTVoBLY9ZhL50hV2BKACUCWITbboQMOyCV5FfMZOvZAhuOixqyI0hwvxhuFD7miohc6zt1ogjwBXmlMmEuw7E2SQVI4iYiGnFLZA/SN+RLRO8xCYlVwCJCFUuvzCjTDtReae0t8T00XjXqzhSMUKUXAhzNhfD4lcO4AtZNc9AVonmmaBZPknMTbyF9bwtslifox01lJjzFU2oTS3VzK1w7SgfF7AD9VFzNvbnHUnr88nla3b4LrlA9uLj/07ke+8PxrfPFD04fONtBZkEbaN5v5wnxYtKO6YtJL58PToyuf+MT/HM6PXvn+f31444tfffYHby4v6GRWO6tGO+rOotDYbFpXGayTKKKVHznyB0eHL929Hm6/9Z3vRTY4e2ZnYzRpvLLRZPPMTtsu5vPOuqIeWWTuoxEK+UAaqVuQKTAbMqKCKJs3EY3sKVkCAkNcWUaHTC6oT0bJcKcavXmzv35Xp2dLtb3jLOWYjufRjaebZzZeeTO/ea197CLrqvSbodNYARaEq1Rg2fquENf3WdRo1UX2ZU4+YF6CZQUxVWMFAGIgsrU7Lp14YBkYOOdcFWQppQ9dE1VVB8rBAISrPdj3nW75BfUkRy3l1IvU43F97949glpFNctisZiNR+Q4OJ9T7Irx4arfHLNYJIeSrFcs6/Gt2zfeevH2w4/urOZ7AK63VQiUhzZs+JJrHbyCEqJGn1KCovYiMgrl4qj9e//FPzw87H/y008+vK0p7R/349sLmatwvX3uwvbWbAeAV8cy3dgQL/v7q93dixhGTunsmdm167eQ0nhSpNx6R6YAwalRMkhZejnp0VVxTQm9b+MFAGiAPrAZog1eBbRmt8LpqBPXlHMyAAFzzqcYCQDJQDXneObMua988Vvnzp2bbm7cuHVz5EPbtj/9cz//pS9+VSRNx+Pcy6ievvPOOwR8dHDs0S8OF8UIJjU9duXMud16tlHdvN2+/0NPr+Z7TGGo7IOQaejEbSDJwzAJGJ7gEPnNjJLyQLX0zg2+W1nlRCEMAIBE9zF6QuJ1k36Cz5gN5KX1tsbWrTsoACEksELNyJtgC+guXX76Rz9YVcXtUI4hmeSmycmBP14ui4CIBYS86lsVKcvphYvj/Zt3U3Ova8O42P3D3//M/+Bv//W2c4II0CFWofCaBZGiCPHgVo2MDAgxig/BURSx559/cTwuU27QAwOymukQ/+v7vhdLjE5MnfNdF2NMqsDsJKWqGqeUHuxv1vaoDxhyDAz7B6EYWG9qbQ24M1Hw2URNLWPOKlkBENgY1RE6QtCMxmjgDFUk54wGdV0jYlEU4sSptn1HllFHSM4zEAFx8oWmPkFZlyNMKiqiWYZR+ZDFNrQaBpCz9n0kIkQI6A0GfYk2q15al1amTVi9Tsc9X2J5FOLY8shToXE7ucvl6La0R5DYoIw6Ye5zCpRm5EdWtj1MvF1T/znDRS3vKdxZwvEqB1StuCSoUN4zCkDlDxfpu3T8+wGficG1Kx9q2tjQO7dGkJtobuPi5s+cvTg+R4fNcZuUPdzr5jc7HVP7S+89+8jV4+tv3irszCMPxdWb//mbn/+qFIuyeqycnH1yu5yMJtfvhhtL/9TV7S3F2IY7smhSe9QusgjAUkT6nJz3Iy5dKLwPdxbp+r3bpQdH/l4DaWNr++KoaI9Whz1JqDyQivaT3veBNyALWYeS2FedFhkiqwMzIs5rySKakHOh73tmgxydurJc3r1+YBlGZQFpYmbZoqf0J5/51qavQp2++9LND1/YvgnXpD2zu7nRRV12KWB2zdjoWFhd9k6Nch6PUA3EFd4VyGTKRqiA+cTwGoyQULIgETKaZkUrRsXqaDUKJa+ZZMREavcjxB7caa4vX0JVJQYysCxNs6zrGpk0ZWAys7Ztx+N6MDGxLMlSJyQYNCOZgJlYNgDH/Xe/9oOnnv6Lc+mdC6aJBcCcIRgo2pqsBydQMebErso5uUSTsfuHv/V7r72y94lPflzx8F4vu9OQUzo6ase7O9Np7YrJ4WK+sbE529xYrZYZYgih65fQxTNbuzfeegvAnnzqkZRa50myCAzqJdCTOeqJH6LzdAKzGOmJ9QogiKkMvgIMg/sWwuCstfbRXVPTbT3JZPREOrS5iKiGRSi/9a1v/Pmf/om+782MHFfj6gtf+MLR4fGlSxfu3b27WCwmk0nXLC+eP3vj2tvTSXX+/Pnnf/Q8Aa0OFvUj9csvv7xsOkIHxgo2nMT7OMtJ0OvJuXuQywia1+ScGKP3fijnjHRq6P5gwQIAUIH7aM9Q2lEByU7ePbyR1sF7bNjGSFw4AAKYz+dVMbr4xIffeuVLTLEoJCcshFqIo1EhStB21nNOjopCfcrYjM44lBGrrI701s3+y59/9lM/+xOH+13JGybHbTZHXkAGCBERvS9yFEDwrkgpbu+e+eFzry2OF+fPeVVNRGhkBGoJiHqhRZ8zGidh79lXy0UXs+oJ7WY0GjVN92CCzYBM/hh2t54JDaAWMxhpzgDgvUux87WT4QZUM4XBL5UIHZGgEKAxRITCU8KcmQkJAFLuB6iNnS1TgwCWNbYZUMta2ItkVHDjMUCyrlmG0Si2alI6RymZ6uDLqTFJOYQkC2lUIkIDTbkMlSqkmCWrB0/RpWN3cJTVuaehqCXdM9oJfBlgRgSUsOkmZUmVg6wbLtSucEnNp9hBUbpXNX1LZLOafix0oKi5U9boQ2faiahZ7Xyp8NERfDhufc/RN33n7vIqvndr9Y3y5vfu/eqvfOyxj1xczlpQBV/vy7LaGkm7974L/ce53RqFx3bnb//oxY3puQbG89XB3vyNW7yzWbx/0R998r1PpD597ns/XLjdzdHWc6/vtxCszbMaxuMxUu5j3ysgO3SubXsL2C3auiwL8sQKntRSr8c/ut0UXDy8M710ZbyY79/sj7MTo1UtiZXQyLmghkk6yBac72nFzEPclxlqFiIH7ArIPWTnq+Uq3h4dvbmKGcNxXHgv7aLJ3m2G2fy4pZGjMV+/aa/YrJocTN3RKy/faWF09QObx81d6GvjlB1k4H3frQI4lwG6BCiyToUX5Ww69Fm23qGrqhoQMkBSkcR1meHYQJgpqyKEYRw43DMP8hDgAYTRVEkZUEWk6+JoMq6qKi5T5SrRpEoxZh9C3xGoxHYJI5f7XPpgJuw4W06ats5uvfbyKweHbVGOJWUMXjozc4BpoMicQJvruWKRS1FT0d2dnc9+9g/feOed/9X/+j/87rf+aFrOHz7z0IjaVdO/2Uud6mm1efPWO2402Sx2b+/fdoyb04l3bu/u3boe31U4PD587zPvZgRBTCk59gRsMSIqrW29hgWFhixGGJDqoeifyH7o/sDZhokZGqCRIaoO9d3wAVdLtYxEOWVDVSByxY2bd2/euvPe973/8PgIiefL5uGHHz44ODp79mwovCv47OSML92jTzxS1/VqtZCURrV78tH33t2bX77yWNem8WjLoLyzN9/Z2UhxOdjg4pruPihsAZB1AN8B4aTyD205IIkqMYuqAep6RGxIA1/STleCkz3f6d5lvcyTrpf9BwodDBm3TDVgf29/fzqe3bt770cvPj/bGn3sz334sXd9/M1XXhA5HE/NhUw9JeR50yQMltKoKFKKZI7ME8dQt9vTHTsXZ5Pz3/7q1yYb9u73fqw5iqV3ZeFTzGsnUVDmEGNk8lnUMUnqAl34ztefG48ZICNikTkrGZkau1C1i2i98zlm1dJ7AOpizCpEFmNfVoGZu66zP/s4GU48eCjglDR8+vowXBXTUHkjRQUiMmI9sVMyU2NTg2zmyQlgMhAF5x0BqqppLhz3IjlpklSFAglA8hCMTqzszTIyOqp7cCaZTUcMiwySM3giDp56B0wMHsGpRkk5ixQBk8TFYoWIJtDOO22mecmrBsm6qXINFM3d7aJIMudQgYqSwKyPm9W4TgoxQfCphXvc3F75z0dHpbxrvh+5AGiD0IqlFhwlBDJ1lvv+sGmjtbUrr2KdtXd//PKb3TfHs90z/4v/+INXH6vvLI4OlsUTF84dHV83wRqLrVG8Wu1v1C75rb1+49xjf4PGo+7eq59/u906/6Fnv/NHEm9/6L185wbM+713X5r2lt5ZLO9pMQt1vSttil3uU0rsXEEoGR05LFl5XnEAyAkSIS4igigzVZokdK/sHd+9t3FmWsMo3sUfHRc32wRkFVgw6DmImbEEEhJOzKiSnGMRQV27OBVaNNqFqjjs4/ka3kLoIRQQnIZiY1zu4NF1AB9bpCIXmm999vn5038+x4r3XtTjb9I1rsPH9iBGB5UhRInzza1rRVLiIDYC6E9dcwFIB+aGOXQDKcB7H6NlU0cMolh4qnyfk3eu67vs1REpyqnfwANX8PohomtsF1hFUSAnGI+m+4t7SKZZ1GvX96OtDd81/WohfRF7i4U6T6IgAqIhpeRK25/f++KXv/erf/kTt26/VrgZowgYAa8DkEwVEQYTLiAFY5eRiuO+WZH87/6L/9P+9Vd2Z+1uGVx7TJu7igh449btg8fe9Uy96Ip6dHiwHFfl5sbG0cHBZDK5fOnhvk+95aff/cRsNum6qGrMwUyRDWVtVEaIbGJDNg0aDia7g4niA7cuDzl2AznPLObMwABCQGYga0b5mqFkhDZE0akBoYidO3vuM3/wh+fOX3I+tF3PvgBTQ9rd3c1Zm6bZ3N4a6gsRLBbHzOyLYv/wMPhJlFRt1DHG0bgYj2vjYhEbjw50wLuRDVXUTBHWTgNmiggnCue1p1UXewDw3icZiLADprwuW/cHp6dkytP9HJqqrYO2Bw3tSWkbYj1ArdfkC3fu4oXVUfvay2/M6tmtG2+99vrkmac++OiTH14c3Dncf320udLV0klFULYisQuSYEh86VMaz0ZFAQe39qej/P73bG1Mrnzrq9+++vgTfrNcHAO1GkKZLauod04kIYKaIUHKq+lGuHHt4NrbN2ebYiq4zoU1VQUmNV62OWUdqClFUXV933WdWiakbN3GxoWcNae1DcbpttVOcsMfuCnu3yZmNrCRmEhEMkpZOlFBQARSBFEVBCHNCBPzjOCTeGZIRgVissG6zjM5R2p5jQsZiioqG+QhqZO8AECfZOTVj7DPrXIJAMBEYpqACRWzIuScDVQygqJoRqCsKacUSs/k+3lMrWnPe7cWILMrnFWXC+akkgkiV4uRK5Y2DcFJ9o7GgduuX2gSxN46hfHXtZ2V0/eMnOOFh6LmtFhZYbghMGJufV5BzOS5CH2glgx9unysbqcqHvdPn3tqV0nvLZtJVZErmnS4d+/gwjnedqurm48YvAsvnSkCQNsvdJWPbly/d6uA+fztF971CDcxfPftw/dfKSYjv4z47FvLezJxo2C6v+xKRAAmC0iWWcnAyJl4afPa6I5QSRQSM7pAqME0iyEf6eHq1mq3fnhrchn4JfTfAdKkvWQKEojFSFvtZ2EcY1+4EZoJZle4xbKJqi7k7VDPuy6w2w6VtjGaD8hRDw38aHvav3GzcpVhOxoTFPW9t5QaKMeydZWWL2y/9fX46LmN+uG+6JCBl6rGSGk6oqLXHF0wyY5AlOKDJhgIakDeOeAmxSzi2REiOS6mVX93NQkTGPxy0ROqoJyqW+HP4oxDA5PNPPHA3OqadjKbHvqjIXA997FzUZTKatR3bUp6uOjYj+8eriSrAiqAgjmXg/ef/6Ov/eIvfNr5IvWxYibtEQpYMzR4XS4MEFC9sgPrre37n/35Xy4s/+Cr/+7p8xuHyxzOXF1Vs7v7i4/83PtvvfPON775JcJyZ9apiW7MPHlAZ+Tvzed1XV+6cKEIoe8iIgdXiIjznHNyQ2MFpmZiOni6KBjrMNG9Pw2AQbOkRgPt3AhVDQeJFaoNnMqTDCYzMUBFJSNy5JyI9CmPJptf+fLX/vpf+5W27V0oyXlVVbGYhZEK57NAisn5kFJidjFL4X1Zjk0xWupiDsQSIwIbm5EOazoAMLEayNA+nyhmDYYpytrQHwFyEmYPAF3MzKx2EjO03jKtTXPWXBRVAn4geVGJCAgQAZhhvRqvf+/Jjktzlmk9vru6u2qOynrysY/9xObO7sFxNxuPCf3m5rlX3/xWNQ5N00OXKa1GbvbqtbubZ0e752ePPvr488+9tDW5+MT7Hrr+1ttdPj5/aTx+c/FP/uE/+9t/529tbO4sjpd934ayGhphVS2Kout751wXFxc3r372975IKI6SCYBhT4mRJEfvymXbt7nrSIyt5oKYm8Uq5zysf45cWdbz+fKkVT/lHa3VbMOvXO9SHujiT4u7Y04SOTAVLml3kqdrYqoghmSEEW3kHACe6tqGq2lYQYNjk+yZUQgJEFGyU43sAB0hmJpXiK4GohKpLets2KmqCKZIiRMRMHkiIiBVc84XaM77pA2YeVc0yy6vkG3aNb5rcQuPfzZUU+S5CzeWrWF5HO1u6h8KhaU4JZhMRvOmaTHL2PUe2JfXpIDK3r/tNpcxQFWPsx6q+XCGq8pxp7nI4Dkk73tDz6kpi77TZW/uf/arP81jf7w8pHExK2eyOrqx2G8hzUJOGurJ7hy8r6bjrQtxde32/o8cdu/c68Ed3jhM1945eOrh7ZfeORrPUkzEHqqo77u48dwcDhooedRBBnI62OGrOVJ1qqRJshmLCaLlnE20YEdAyz6tPNdYuQzEkkbLG+klvz8d1Q+PdGtRPNf4a1ZRwjL1DRoQjJaU+yaWJeWcgYkdtsIxxnnGM+j6FibjsmAH1gNLZ32W8Jd/7eLbL7/JCV2dJmfC4V7vS+ruts8979/7M2G2A6/vv32puPzyZ+Xyr7nW75c5RFFZxtRiXUBCE8Oub0VSlJyyyRoLZWBy5BKZia6tvQFRKRlUs/HR7SNEZO+zanG6Hvw4LfCENrNOIQBlMkJT7bpuOp3W41G7mAfHIhZjWi5Xk3ERisKg73OKQMsOVIkROKBjI8c+8NGt/a9/7Tuf/Mknju7ejcB5sFHENcoxVBkEYsJEljuritD1iZO+8sNvbji1vi/PPeS3LmvGh6+cKzeunHeTxereRz78saN7h33floU/OLx38eJFcnjpysPOOYkppWTrEfOaAsGMOsTUAQgMrS4ashq6NROT7SSgERQMMSGQwdC/IzLRWpw5qBIHzdNQ6IaoZra1jYJqmtST1195vWuW73/f04eLpRhpVgVg5whAUnbOo0rwDgA8eVMLHGKfvC8MEY0ZWbIisSMvSbxHUEM4BbEAEQkZ72trUdVO3cWHUjVA5568al5HM64FagDrukWAoCKGDGvzmVP78sEg6WTwflrY8YRZAlBX1Ve+9NX58bKahun2eLa1KVou5nfffuP4oQtXnv3ud97zzKOXLl26cfPavYO3NjY+/vu//3vPfOh9H/v4T/RtJMCf/clL86OjCHbhkfdqRomLX/gLj337G9/5b//rf/7X/vpf3L54+fBoqZoBAIxKH4bKHrOE4O7eaL/3veemU04pMZGKkILDTCYYYXXcSZfZDInYFX22pmlFxBGn3M9ms5S1bVuA+1sZVSViPBHZ3T9CJw9HkGxtTU1EOWkYl+qIpDi5m2R946haphSsAx05EFRykEnVq3rADg0U0ZCscB5TjwoIQGQqHKNgAPXqA3gjNcfqHJIxxi7WwGDgfKgnJdFybZwA1vep7yBjzCJdBlXL0KcVaFOmfdq72bZL9lgcCjvpzhVQOlpwhpIRy3uqS5DbXTs9FpfNGzpRdqhtePFwry/L8qjrVwtVzCpVD5dGNoGc+i4BOgNG3O/TvbZfVR6OUwUhz0Zuel5ev74Yc1lkun7jzaV6perxjTPntrC2uDGaJCoiUFos2+7oe9dezNG/+NbdjcqeeGh750J5ZlMff6jgPkifF+A+f5NuJJ5MCrWoFACrBJkzluRVMRMnVAJisg1N5jWq9RnBe0FtY+/8SLTvYleBjT2QcSIznsOyd0WY5fdQ3FrmNxq3QKoYg1i7zLlTzanXlD2H2OfgfdLEkveOD2suk+A7h6uMrlB1JAVPIJEqLSIVW6Of/Pc3//AfvNUebldUzl+N957MxSWxyzZ/bV5Ed/iKle8u+9z3AIs+ghQcvGBka2zYna1lNIiMls0QnPOgMfYZEYmcCYKqZB2PR3Mi1YyIOSV1nhytIYUHNCz3S4YBECFZljjc4mq5j+14Nl0dH6kiUlCBtu+nkzqEInZRJOXY1iM2AU1KZp4d+KQ5n9kqP/uZz3760x92tp/Es8dog+ewMiIMwcI08E6o8LOUW3SViLz+xveunp0B+/rc4xKr3XNny0KgO9yahYPNnaOD6zTafOzRK47xUcCqqBExa9KUmQqVyI4IUEVOdibGZusc6SE6ztgUGekkmW4YkJKZrMmGntXWwPwwVMtmYsonAdbrw0VrKZYjyr2KimbbOb/1m//id5968tFR7fcOtSiLLiYaJMQGzrmUEnnIOSIisxcRG7o5TYmATBkY2GXSDOI8o0QbiK8AoKJmAOsQooHvAia4zg1kAFAF5+ju3btlWZ47d24+n9dVEUJo21UX88msZUgdOiXaICIB4NqPc6j9BnJ/O3eyz1v78zsEd/bM+RRvHDf969ffHm9tgTaA9rGPf+QLn/3q3TuLH/7oneNl2j1z4UMff+Yrf/Ls+z/woU9/+qPX3j4Ifpa1FUg8GmPklR4Ze/b1uCp++hd+evbtzX/xr/71L/3qLz3+xNN7d4+CKwgpxsSOs0rs9crVC3/w28/FGNmzxmDWMyMKKyQsikWDi95WWYmIhdAVfcxDIJF3qDlPpxt9zCklBBYR59Ya4JNo8h9vdx58DE3DAGF5z4KqcJrSSDjQ2pkYAFBVDJEZ1Rlq7C1FpOScc86toT9EzxxTFlHLDqEg7oGlbYEUALWNfc3gAE1LTYVDE2mzqFgc2DJD547AAIKIfYyqI89gWTBZf2TNofUrjr2OR/FGD7ex38hW+GLS55p1jmlpnhNVYbY0ajGNGc+THy9jKvvDTOeIz7Rto1BTLquqRoeVtjmD4LjaWJAdtotj0Tye1lnL0hASK7g33jx29bgvu+t3e+C6mFiAstVubyFPn6m3R7jXRZPt2UZ9+429LutD08nlj0vu9upwJOMU+7SYs5iqhhmUP/VE+KO3tOsrsrkYsrYBCACygiDqMDAbYvaUmUNApTAwlyw4ztqa9b72ksqFYu3Fg5pyLBdZfO4Lgsd2cGtlP7qDdxvXklTmDbxan4wpQgyZemYpTDvNPiSmpluOfcCqIFjmhIbt3t4qRfDodKk37uXFKk6Vlz5NIizmFbX7Tzw1vnevXfUz/Xa8fBlz5XInktF6DlRoEh8CaUrJgyAhuGjaZ64mGZ0wBxlbsVy0czPJ4M1D8OQqrnYmaa8bldWyt6Si7CsMPSeV+8FMp7yLNFijGDIzAqoYAi4XzdlzBTpUZsecYhsbjHFEYcRhKa1bHKXtnXGTl0aUDR0yZEyQy2m48drhD55987FntpYHt0uZ0GB9yCySmRUdppzRV6aCjDl1s53w8ovfnJbA5UW++szRnEeQp177PpkLvnQf+fiH9w6uUx8Ob9+6+sijKi6LAUYiJIQhR1GzoCMgU7DBCQfJDY5oSGjBUlYyR1mYKGIkHygronQKTKFwmGJmJOYCBvI/Aw1jYLLYW1kUoh0AEHpTBMwxZio4JhlVs5Krr3/7i7/xG39nvhREMYluWFKIRAURnCNjwsHRHu4PMHNKzhjMGRiAoKKiimVkNOUh0JuIBncgQUUikHWOBREJiIABoZAZ9tsXJm+9dfSVb33pU5/+2O/968+d2d557zOXNjbPpb7JORNWxAJkmkvjlGN2RKBDyKoZiQIBExms6e+EpqC6zjN3aqntnnzikSefejRJfvXVl9985c2Hrzx0/uLOH/3h565du/mX/+ovvPziG89/77mt7Ve//uVwY6/92Z9/5vqNO9PZ2b4zBPDG2lvGvvCVCmjq26jM+OFPfmxjd+Nzn/kMmz388EPLecpWK3QMGaUa1bk9rr79nWenM4Pcs5aRXHbmEnswA9/FuEwNO0PFYjRVs/l8gaAI2sdY1jW7sDqcg7GaMa95MoR0QnI/RWPwATgFE3IwajWidxjFmHBWS2zZHJZoSdWcAZpoUhn5ggwcYYaYyAdyrM5TrVxI1wBo5UqkIJTAMCOzaQYzsR4Jcp46pNTXo6CS1AA6YcakEYBqX3WruaSQlQyiR0MQzzguR7dXbSgcmrFRbKzrqoTBGnVc1ON+RgVMM1u10rASakqXYuehdimFEqacskooazG6A9IHJFdWG/joCNw8T8befD121CJ0qsgeSgPo5znnmjHLTlohF2YqmJHNZU5H7arv0qgczUYzHyB2XSjy1kgStu/s2aMPX33hhy8vF3/uyasfn/ry2rXPLGMRhJqVAPpR6FsVhtnvv9At+fK0dDmKo+yw6AQiJ84BQImQmG3YMRkRkXEGUjBhRFRlXZP/OI88qnEWUnUaASiTM4eSzJILweHWrv7cjn9zYS8fwSIdyxE5KALknrhYgTkxH1FBU44KmlKmCZqTRY+bnh1x0wpwaa7tmi7QjBk7zZZCdyzats0SZ7v46mp1bnq+u71avl2N3j1XMIQg2IVQguwTzwCzqgB4IgAiMe+4Ho/qLnU25GQSeO+DBWZG5ygUG2fP3L79uqMaiFQVkmRVcnRKdX+wSWHm0yZl+HNwiOy6uLW1dffuPQvG7HPOy0WzsbFRV2OJq5T6vvdVWc/ncwDKGsaOO+nbRmZn+Ld/9w/+jx/7D/fSdcc5OXBEqmoI3hcxRu/KGCMG6rVxvrAm3X37+sVHPzSaPtkvktx7aefJ9y5i8sU4x8YwFxZqPFOMMwR37cb1i2cva87mB1GmKQxeaZJSKoInA4lpkGjRqRp1EB8hIpMwgE7YBFliY3VVmjQQwbmQUkIXQa1g33d9CAEAcsLal5qUiIrCx5jRlEyxrGLXzcb18X77G//Z//7eYRiNLi6bFsAZOoU8wBlIPNg1SlaiIdADmVlFDSSUhYopKqzBkLVwYQCET84UGKjBmpqhg0H9APqus6/RAUIs1bUffObpz/7+169feurX/72/9Vu/+c//5T997eMfe9dT770aimnMmi2iCnHLUgkREeUc1dR7zpIhSVmWcMqYtPUihGhgxB77vm+apk/REJ544qnLFy/duXs7d7icN+/70NPC/PQH3/vRP/fRZ7/9/O0XX55O3T/9h3+4uTGebuov/fLP1tV232XnssjgsGbOOUJu+wYJHn308VC4L335T6Ybf6UIY7bM6MUoaXtm9/K/+71vzJvD8+dLSdE4w7BvIc0KXZ8Xqx7UQI2ZHbpeNOc8OOUBwGg06rruNGJp7bdzAm3Bmu1+/6Y4YUwBmmURIPTMqY9hXCKRIycZVUSzqhEjmAUzE80B2TnvmBFYBVxJfVxMJlNgEtMuxZQSWFITJiICj4SiaObZOccS+yi58IQggACWASGrtdILAwQlhlBVOYsBp2hHxwsK1Pcp11Gj72MHre7faO+9QZ26EUPrQJZGKhshVTn1LGFcYmxLcoWRa2Nh4LsckFfWmuXrh92Npb0HawjFYinouybH4y4he/Yug3U5Y1HASmvRKfBuSQI0j7DIiTphSTrxI+/waHlw897e4WI+mZy9dv344LhL1B0cv1Xkm2+9+Zlvv/D/eOHF3+r6NOZgYZp8q4X7wQ1/s3NT7y9MtpWrQyuEqg61JUWjoCMlhsG2DWStsaPhLPZkHVomzc64MFdqWVklJSWvxpaRVhnnWVaaOomLQqPrV6vVcXP7uLnLq0fe7f72h+yvf5B++eP5/Y/brDJoc9dbl6Rbcu1CHYBJrE+wBMIdUAfcx+CwWUnKATinJFXB0+l4FTumiBLSXbdamW2GflKnvGKc3HsenYyFLApTwUzgEQE1aT65s7MyoKt8OatGm84F55whKFhSkUHuDy4hjs9u96gi4oJXVY/Enk4L+oNkgAf5XqcUguFftV0cj6frax1VVbuu1yTOl6H0aNqtoucgpkAoCsmEfeWMdibhzRdf/NZXfrize7aF1kyyJFUNvmi6nsipqvfeiZo1ZRneeuX726WbjaZt/9r1l37v4vZOXVYKTsQ4eCOLWabV7ODgwJelq4o7t66NxgU5zmKEblgwmDmEkLOYQWAHojD4kyEjeDQHMOTOkQeoUDCZZXAhITSEhcIYonp2HBi8NX3jnCNXJC0cBjLwbGg5xx4NPBUMhZig0uZ44/f/6DO39xdN43/rt//N7rntPkpOpoDZNEqOkrNaGg6uArFj74AQmQApJ7l/CgiNkGhQznFWzarD1E4kiSTVbCZ5MObHU+92AkUTABTQOqv9wq986h//5r/43T/64m/8J//JEx/+wBe+8s6/+8zzq2buuXFmhS8RxtnC4HTIzI6DKBAG74ucVQ1PL4b71waIgKHjrFIUhfd+cTwHgIevXG1uH7sYu/nizVfeuHn91te+/M3vffeHm7Ot8Qj/xt/4S0899QiYhoJSXhLL6YeyQ7GcNYUQUsxd1z905dFf+/d+PSU0oGxdn3pA5wKuFvTtrz67OXGYk0VgMxT1iQ1V2C+iLNsIimwQXFCg5XKZ+jjYPHjvJ5NJs1rknE4Rlwd+2n0BwYNlfXiDB0oqwxwnWXbjoh9gQB5+xnq6M9hFnHZFp8KoYWodu0TkTu2MVLOZEQ2Rj2qiliz1OScd1gAj7LgQx5kYrZToU48sUKgnyF3TOOeY/aJpY1ZyzphwxbnlMtSoLq4K1NolrRUsdTpMmwhDxTPPbr4sshbOGC0bRce9dsHymbp8/NL5cxN3oczjEkYSnXYTNEI/DcWmKwtEFB2H4FOehBDYWXDXWd9J7RGIuUCNeBe4a9vlqhOwnTM7Z87t3rm7eOaR905mF28sd8vtj2xfwrsHL86bUVWd53G9Pz+6dbQ6P9o9vE2fv3H2D248+g9fvHRDdkHAoctcM/vEkrxk65LpEJ0gIiKJT9RlBW0wTgkqwGAIGSVDk2zlE7pMBMjoTUJOmCR3YkX2WdiKQi3lfHy42nvz8NWF6LXbu+nwI7vzv/6e9m9ebZ7Z5pFHiLHbl9iTIWJFBbS5qNvsrOuoGld7t+bd0rxHQEbkvu+RSDih49XboD0saf7YJyYxLkdb1LydV2/VPkArqXQjRzKIj7IKIqtqBlEwX9bEwQCCr6tqXFQTFwogRMe+LEJZMHIYj0c7m0P+hohCEoGBR3efJ/Ngqz5ciD/m9t51MYtsbW3lHIe35T42TcdcEJFzru9j1+WqKM0EmDAJCEboVjFOz4z/5b/6d1WxY6JGaLiOj3FcADoFFFOHJYrL0i3iK1V13Nx54eidz5/Z8sWZK10vFWVKC1BJ2aKZqzw4d3Q439zYXqVu72DfuYDoBTCLAXogVgU1VMNeVGnw8yJEBiBTWDsvImTpTHtHaMjk6xyLPovfSJOx996vVgJQz6ZbRXCxWwVvilkgCaihIy4BXco6xAMxusWq+8lP/1TbHQR//MlPvHu+OAAjMHJrU53TEoCmMKS75ZxTEgAiHwRwEJXrA4XGraOykYjWgR2Gp5LaIbQaTxK1aHgzkHp3eERf+OL3H3niyUcfe/z733vnX/6Lf/dLf+kXk7Pbd+Tb336JsGAIObGYj7AcbLOQib1ThRizDtAcDuAwn14hJ4VJ2DkzSSmhgXMOABaLxZXHHnrsPVcPDufk3cbGme8//+onfvpDf+HXfvHyxQt37734V/7an/sbv/5rEokZiUUHdZmts8AG87nBPG656pCK0WQ6rHzksO/brc2zX/3CD1LXbI9H0sW1lZsNbBWNiqvOsqCZATnyIYm0q1ZkiOZI0+l0CKLRLA82Lg/8rj+TpXd6CgDAVBQMHYsm8lhMKmRi9maGyOT41NmNiDwxIqolYuEg4FTE6moLbZwkD4rZ4diCPuAqSsTsVSGJkGMFQEaUHFla32U2Z1ihlQVwCSF4QkfosuS6Gm1szRRzKByrG4ELQpoL4tHGiC+dLzcn1UZdb03o4tClu/QAAQAASURBVKwcO4dET21s/MRo8yoUW2AhiZmZ93kU2krnuT1ezJ/cmOwUdJhgtlFf2JiW4zAm3t3cqLxjwLIM6MmPSiycq7wjAAbwxoWVtbrG2kUUMLcxmgTgc5Oto+O9e8dHexcu5GNZ3V5uvLs8mm398ZdeHW+c//mn714/WP3RD3eXtHWkdKOZ++nGRKus5bFbTZgYctSOzBFVxoWBU+2NGZhY/UniiYFYCwkQADMRoTESMjgyS7kQFHRZLTlDNIGMosXSUkpSucXUebaY8XjZmLeqvoh33trPq83N0cWRf6hd3BiN3qzKFxo96IGWiB4S9rRR16pLoZgUOBZd7hzTspem15Qsp3S8sOlOtThc1cdjnR2OLsBrvW4kH8xd/3b/6OXQuiTReXZ9kho4J0NkVIQhuogxSpfbToWNA7uAXBg7dsEFZwCYtW1Xm2c39/beCr4cMlcVjOB+4NzpfXt6KZ9W+dOK3/dxtWq3trb29u6EUCJijLFpmsnGhNiTTzn3i8Vic3sac0I1MctZ0VvMVFb0zjvXv/qVFz7+iYdu3LgxmUxyFgBwJ/5lZrIwqcrqYP8VxJYqMt2XhNtXPxm9LzSBJO99BiqwBNSldJsXzt+7dWdra+vsQ5dfffX1x1xZ1+NGxOv6Fo2SVdWhMzMVKxhssOVdTyNRwRTBwsxyJlSmOkUtiliNpm+8s3rtnbhYkUhder14Nj90sdjdnomkMHZd16UYTYMmBsih4phaac2hP25XZRXObIe/95//H9jl4+OmKuuhQeN1Ro9lNbPsiekkAZGIBAxODsXpeVFVADI1gsEsfiCtDOpSBGAw8gimpjiQgYwZEJGZk6XzD2184zvXKP/EJMxssvrmn3zh0s7or/6lT//bf/PF118vRhvPf/KTH2hXEfpVRSMzI88qambk/DCtHZzmTi4JBjjptQclV85E7rQ+DraJc+muPvbko0+8q+2aF37w6ofe957NrfL2/q2Llx55/vm9N15/O/ja8Sj1gGDMmDUB4kD88N6LCDJ5DikrGOTcApAZsdMQfLtyX/7KNyebeNws2XHGPoMHAjExwaaPyyYO7EPnSyXfNP3JggGINplMjg7nKvcv+Af//O818uvbYThNKSZjLH3oV0s3DUIqIJ5oYKwQ4iB9PW3VYfDpX2uaZRh++qI8nstwzQ8eaUN8IIEmQyNG59B5Mc2AJsP8FpwBqOXeslLvIBk1sRVJRPUwDG+6dn68ylULmSEUq8PIS398Ww7vNK6lZr6aUJE1nhlz5ahPQihv312+ezybjVyXesdYQuBOAkLFBOTLDvUoTufFN/ZXT20XUzXHhH3M0STbiMEyNGhtBa3kVd9n4rOtJQ77Xb4RW2pi66CqQt3FJnG6sbcHOCqqsjm+16zc5uXtH9x4461b23X9xO75Z26lZz73/Vkz2R5vn/nOdbjRbI8gAB04f3un76YuOKJRSYUPo5JqaEpiHxgG6x8iQ5ZsloGJNBms03c0sNYORt4mTmpOnlQk9ZIzax9kVcgi0PXjfSrDvO/mFlZhwzzPXBXme3rv6Mz5nYj5eH47NXsTmDW3nvT7f2Xr4NPbulk5S2KtaXUWsFSn5pzrm5yWnWdPRDkW86NUOO+gin171NC9W5YboJ1y+iG+tVhWk2LxZmzeqUZFmWJGcOiB0GlGUENTGiIXte+7edfO+65NXT8Q2pzzIXgzSbnrlquDvT2uQ2YBzSEEfWDL+aCR6WlZP3mCJ1LttW1k17SqOpttNk3DDgW079uj48VkssnOuxCatu26OBlNc+oFCVlBPSpgxq2t8p//899PVtX1pO/jAMV0KeKJbZnzaHpvuffsDDj46tUb14vNp8vtS5Z7AOrNrwwF0GFC7bN1zteO6Pb1G+PJ9kNXHrv21tux7QC9GBpSE2PKwq5IZm1KhpQEo6ioid2/bxHZ8tBTh6RGpVodfv+Pf/iFLxy/cyseLo7bfPuwOfjOD278zh+8+pv/9vtf+taP/s3vfvmFF94o6hF7yNqS11W7AuRAVdOvNrc3/uSPv/Dh9777/O7u4f5yiAhXVRMFNRMFAAZ0SCct/JBOnnLfWRYQPYlaspMjn9fPsliW050+Mg3ZHQB4wmT5M31oqYXT/KlP/sw//cd/cHzcZ+B6cubLX/3e408+9KlPvb8I9PYrh9//5usgWtd14d1QBJkRmE4oIUhECvJAeNaw+1lfJ0RueHF4ImLMDIhtA8fHe6XDKxfOaDzMTVcAA/mLFx8HqIswVhgmmSqShv10llMqDiNi1zUPLnLB103TnTlz/nN/+A3pllUhoEbkTBJmQPTEoMZtk1PMaEDMFKo+y2q1yDmbaR/bzc1NM8s5n2bXnB6rB1v1B1958D1DZw1q2XKYVhkSquW1+fN9U5rhk7MpGCE4FZZMA0V1vriX5HigyjjHOefTlcPMollSSSnlnPNgBUGQkxgHJUdEuQfJLitpRu1yVYaUMhE5dn0bzawqKhOyZWMSlguPqxIjAjNhURvPSje2YAkr5q1qugzTH/ZxL0UREcs+wNjxFoeRkjfWLL4Oz2w7CPKtlCeefNuGot4N6AvwJYeCdgt/JupDFmYGgWwUJhUVTpVSorIce6gg9gYyT8u91cGb129vb1/curjVNPeevHIpxlxU57am8PIbz/7W167F2aVJRdgtrDQqWnTY8JhsSxy03K005ehjdLFXTbE1cTi4yKlkywLDLcKA4yCjQis2D2BKKbuUfNf7jB0iknpOAbVw4orM4zT69Q/9wmrvJR/g5p0fzo9XQtZykwsdeRjBwZVz/SHevRWXi8ODacgyX66OHiv3/+KF7sMX2HwDylZspabzIOrIZZWcwDvqj81blUxj21WhkIzacJfAmv6ZD2wkvmMEpYX959XF2rwBmgMzNlUwzYSAioxONYNlkz42x6vlYWyWse9zH3NKXdO2q2Wfuyxd5FzNRjkmci7nTPfdB/AkSe7PXND//Y6eAEVkfrzc3d0FgJwzO0wWD46PFDgUIxccACzmK8/BgbCIaYqKnWZ1UE/dnZtvf+53vrZ75kLO6r3POYfg0LERikhNk+W9W0m6qMXBkTZpcu7S+3LOLptkdUWJwESWMaPD2ldO/dUrD9+6dWuxv7p05sqTTzzxoxd+OCmn7J0Os9Lgk4qqhhDMTA0ygD4YD21kQKNiCIz1oE1dT7/xrYPX37El3MnzV/Phm6vrr/QHr5vcLEfN8fHhC99/a3f70tWrV1PfxdRVZdCsBRWUAyI652ej3R/+6Y8+9ZOfuHd05IupZlEQIGQfkNgAB6B/MGEeYO6U4+CdgmjwZyv7sBLQMC0a3A/AFM0IgQkdk2MlVMIBTmHvkE9/YLdq5lvb4+2zxcVHx5tn4LEnzj39noe/+NlvdMvlU4/vPnLpwmL/+Ct//OVnv/nawf6qHk9EpOk7VT1ZNuCk7R38i9YbBgVUsAEbHAraemHwLknOaMJG1XiRdLK78f6Pfmj/eHG8WJKHcxfPhbJYNitEQY5IGVTJ8fBLyfmYdSCohBAMJOXeuaACTTvfmp25fX359a989+x2mZOWZalRnTin5MWp6irqsusNlMC8D0rctb223XCcEW1jY7parWTgkZ4oclVPzOLhxyeo8GchSkUgg9xHF3wxqZnZO0fsT2Cc++GrJ+uiQ2Q5dXcAGL4JgRFaCEFVjRARxUxhsBpDSfm062JmRGbLLeduLBFTmWyaZMToS1DNRDT4eDt0Hst+kUuczopp4Xzh69hbUdLmZrk9c1e2/aSkQEbMwK5ZthDckcFeJ+PEm4JbhHVB6rWFlAvEzWrlu4u7/Jcvzl4N7kZg2PZ+VKYgG4RksRg7KKwsfeFsVpee4YDzyufzl2ef+PDDrksdsgeXYy4fPX8Zm7ttxsl0pO3BZDa9eXtvawZtbDZ2N9PB3dnOWelTbL2Qme6IuYjHHrxCFhy53HNAMSWfVdFxLagOlJgBaGDxMnoP6nFgjmFGiAZRQEFNUbJgyhUXCM4YBSgaQiqKfuPXf+avvnPnTyr5hf/xbzz2v/nP/qO5e3S2/bgd7+0t+nPTSxfqcR9tvy1TXfZe61RRJm3Nbn14NNpt4btuc297u27vpgn6PiUKnKOEgMv9NvXCHp0D8FCzpD0KflTdgHf+sNnKm23s61Asb7T52AEL+dzP1U0yqpllh4NPmbOMRBbFmFKKqmyWk0pGUYQ8uNMWPgjbeHu62N/zNMomFXq4n5dteBIv9mBZf/ASHxIqiFzbthsbG7PZ7PDocDoZiwiY3ds/3Noad/0iVF6Ttcu2Kot2uTKCsqhInPTHwPVDD2//9u98/lM///6yGqmmnBOzQ7Sco/desG17ufT4r7bCFzyNt29X9VbOgagDE4i5RMpC2QcS3+x1y+XBpcvj1Wr14g9evDm9tXO27PrmD373D37iZz66sTHtuq5t28I7IkYFRDIiMsUBnTv5sQDQ9o7L0OtyOh0f7LVvvPG6KzHHhOMqrXoEKUeluLB3fO/Rqzt/8ed/yZstV/sIUMBEOvYU2KnEFHO/u3P+83/4Nevtmfe969qdO+brgH0EPgmVJkRAJBFJmgvH09kk5xxjrOtaxRbHc5FEjtfyLgZEGsT+AMR435PyREpKigiEYoag6wxAUQIkosxORLrm+Kd+6mOSe6dtczQ/uH18yMfTzfFLL+6dPX+5SUtHxeNXz3/761967IMfOHd2p6omTdNJzs45Q3SIxkSDdQ2A0alOlYjc4EE/oEAxKQwYjihgH/syBJfiyjv3yKWHsqD5nBMExjCpYhdTBIJAlFPORVHEmAczAO997BpG8swppdwbE6XUTScP/7N//Juqml3bNEp5AaKFedWkhE1u5ituujiknTjnElCOEVWccyl3o9FIwZq2bdvOOXcKngwxiXRiawwwRHGdvrK+EcxscOq3nF3tjCmn6MAZAaA6IhOBtYv/OhfXRBUVfUZCM21jX1cbZdg6jm/nnAdYZhAT2xB0rqhZCLAqQpbYdR2xlUXI5CjDCDn10GZdsWLmSpzzNBgnAABTYAx9p13q7wEfHxz3t9KNa0hQ63IhgPcK200peWqbfuSqmYNRc1iJbRW7s0mRFkfVskvOMITKhZRMu2XbUdbwgUK+elt+eyK/MQW1g2RlyrHwvmwhJ2w058DtclUzL22VUjq6a0TkLsy2D/oFRdpi6o+Ot3brtmn6tmPAyXi2Odm4c++142b80OXz31n8YF+5BFPHRoyQHPQFshmqDaSH2mdBRFVgx8ksGAKyt84Nxl4FNn2MxiK5BwuiPYLVQdree5/INHCUSWcONRbM2ooDTxX0/p3/4P/+f7340FPff/Gb7/7TyS9/4if/uz99zRr4T//9v/OjN//R2+/c2l/ODg6P33791tEhZozj8c54oz67s1PWsV8+VKbZdv3KxdnLz2uDuCEQs6AvSVJcvFYyM0hMpcecFGFl/c7+2Ve+vBeXHMqQclJrbdo3B+2MNgoIC+8wIVoDLFG9ASE7AOxTArSEPoaESVM/FyoBYlBGIsOOjgOU4LbrnqxKEMqi7fqCynXDDmxqRDjsEx8o7muDwMFcUCQPd/JqtdrZPnN8tIBMjCC5Xa38xuZ0Y2Pz8N7tjLBsm2K6nSU6wJw6RAIsEZwqJrj3T/7xN/+X//GvvPX6C6NqLJBEu8K73JtYvvTQh9o+jks2lDOXH84xEUVVcj5kBEEmdNTJ1vbo97/0uf07sL1VHBzcG9er/XvF2+/U+wfHzuNXvvTts2fPv//9z0zGZZZORAzQMaMKM0vOhA7QmSgzEVBkZIhs6Ll++52Xm/nx7vnLc12AL5VlMqsj92S1LvQj73lKpbl3eFSGCtSIBTAJiKHLrLNx/c4bb/5f/s//xZVLV47myuwVtFN2BoSkAkQMRAqQLTnn/uk/+eOn3vV0KNWHVXdUOB49/t5tLouuS47RM4lmMNHBlApNvAM1UmMbghQHKREkNUI2FQVVVWJSMEZTNGWyhuToxsHtN27dWN6+fnj14qNntupx7M/thBt3Xvvk+9/3u5/5ht/Ye997dr/7nZdfq158+iMf2Nq5EJdLROtUoBgVIin3RILsVAAUU+4nk9Hdg+Nbt+984AMfuHfvHhKMRrVIyqLBj2/f3d/YqPq+ZUerdrk521ot2xx7j8Xx0TLFjhk3tzeSGHIFbXf73v758+cXq+WoHt29vRfYj0ZV7EUxAeU+d5ONne899+p3vv3ymbPl4jhihgQQBeaSPfDR6siN6yZlIGSzcjRlVx8dzpumUUSFHCU9tHu1b/t21a9NjmF9qZ8kI/4Y8n4/d54dmlmK0ZuRjRrrtndnycRDQZY6M0LISohkJoUj8EzAGSB7cpyNOUJgMO9Wq+4gxK2iHA++TzZIwNUYIRMVppocWFCIvuA+pzq4nLDwyVMZSTQU1SR0CgjSaU6mANpEIYNMXRFKUiFl7iAtysUhNH32IY+B93NzpWt3RqOjpJl5i3AT9fIj9c6ZEG806WjlOSWHhszoIPicY4zqxr5aySrHv3DO/39X927whcdgmcFqKsQylGho1KpvcWTes5v0MSuXVdXG3j127sxzL13HagcxUd9UcnZzV199860rFy5X3B3nFW3gnTvX7Vb9vg8+8+VXbkE1zX0iBQfoAIlITJWHfBtcW2WgIhEDEDFi06Ux18FBQ21/cXbGB8qaKqTc2/z4AJLFYiKZVrk5JikTxByRtUuJqMhZHVgE8qPl9etH5SX6+1/+rbOT8ZMXtt956+2v/eny7/4P/6v/6d/91Gf/YNV1JbFompq/p1ZlGJUjPnv+4rvf+57Z5jQvH59ttlFfVeeV+j4mcuxCuHnnHrqi6WJdOM2JDHarM4dvd6ujXBYjcYY5m8ZZmFa6wUqineMEtuoTCRTkRo4KDiNClKaTHAPlNgEaefHWZFeUZoKGYow+ZEmj2XTnyvnDN/eq0ainvJa3nXQoQ/8FJ67WDzxMVcz0VOg0n88nk8lkMmmW86qqEDhLPDw8PLO7wa5UjX3fdk07q2fz48O6rM2si72hsXfnzu788ec/88u//MGzZzaWy8ZxIEw5KzOmFM2SmhqUmlGACEoEKxzFlF3wzJAkAkCKi6ffc/lfPftvAj92ZmdbTcpyI2PauTjpoqSD46+/8IN71177wEc+sLG1RcTZFECSgUMkg8CIFoVM0ITJQYFiKTG6osnNaDoCZNHkuuTZxT7PNqaaC/KEjNJn7z0AOOfWPDamnLMp9p2OxuNz5zcvXtnq48LMUhL2pUqPiEqmoEPLxsxVVf3cL/7kP/knvzOdbPzSrzzVy5vNkl78EV698tT22YvtcpWHvQXhiaOMQn6gkTxp9xCR19SnIWwEQA0RDAwTlMT98vjo5ss725Ppzpnp1ealV+7c/BGOHQTPFU5uXW9m4+3nvvej8v0P353fGsXqtW9996mnr8wefqLrsAShNM8QiBkGWzE0dDiwf27dvLtYNHdvHb786puEvDGbLReN9z6le1Xtj+fXkWw23rp+fW9SN8fHx2fOTd/11MOf/+Pf3tra+PCHP/z1r3/10qWH7t47Oru7eevW/vHhMGY8XK3mjz5yUSQjIxoKtCZWhzO/8y/+7WgckSB1iZlTFsecBA6OjzgUZhb7lpHYB+dC16Uu9pKyqmTrZ5MZIq5Wy+Ho5ZzxxMn4x2D3U+/fU/QcjFVEFcizSPJVKKfjVc7snWMfUyIVSzkUpQA2y2XbuVxQVkMb1AjmkLwnBGeIYpoJgKksCjZ1sBaER80BFcgpmCpUwfU5mWLf99OyjH1fjEN0ZCZEZGTZGTnO0NZj732ZusgoG7Mwv5sm9TjNaNNhP0IkOO/98WFzbrq7ycfc485ksiHLzREjFK++uBgtx2NK5EdoGmPqXA5GaEYOS085iM/8WOf+ZhzX7aLdDDtdOKAeGc2ZiYXaOy5C4qSpgCIEi7EdlZ6+9OIr4M+QIKrfmO0eH7Qb4/GocLFvxuwmoZ4W0w8+8/hoNBlPNsYOA8DY+ZFzdfDsnDrWMuTgxbKonv6TVLJpNlm2CpRiPG6jdVa1GVIfg4Rtmly6eO6RRx5+9MrVSxc2MrVlWc7cuA6hKjwQGnMyQHKpVxJqJI9nHDrLNWIH+8t45qnqH/3bv3f27Cf+2b+M2U/KLVdujqotqOoL9WgyrbAEeuflV/7gd/715/7wczff4ItnngyldL0weeecZum7dLxY9REMuED0vlBVF3n+TgYpjKnTLClsjs6ubvKzn7sVZCsUs64xh0XOGcypeeaK/Lga7Y63L26duRLqisXYuDVUD5ayR0qm49H2xtmdyWyzHE22rpzNrB5I2U5jCh6cJj2IxvwY+HiKz7Rtu1gszp7bXU+TxFLfr1bLvpPJdJudI4L50bFm8xAkiuYcnHcEKinFZnM7/H/+/j+bTC+YCTGYOkLO0juuAByzT0mYvQpkTW1cpdQF50BNVRmhKIr5cfvUEx968l0XilEsR7RoFvPuXp+6HP2o2uLR+JM//fHRdjg4mjNPTLEugkb2xpAMfdmbRWBBF/wIEjtDhGI02Tiar/YPe1eM9o8OY9atyTgUDojYQrNc1tNi98wmJPO+WKeQkwHoIIYiomwA7DZ3qv/tf/ofiRgYOk8mgI5lbURl3q8Jcymlnd3u1/7a5aeeqv/VP/7Bqz/aMb955bHLL7/63EsvvTSZjlUy2tCrOAATEBQltZOMbtN1LOF6JSYD1DWrdeBLAqgaZ6e8Xe43R5IOL03dhy6erUO63fTXjuJrt5uvvHjNT6t3P7Q75eqxJx+6uHv+4I5/7rs33372exPogithkPEaxAymTOiixFCV+/MjkXT27PaLL/3g4SvnL186T4TL1dwRVRPb2K4PDw93Ns9fv3ZH/n90/XewZlt2H4atsPc+53zx5s798nsz82YwwCAMAglQBAhRDIIgQyyTRdO2JFOWrbJll8oS/YdZZSuUlUBSBqvAAAEUk0WQSBwiDQBiMMiTE95782Ln7pu+eM7ZYa3lP87tnsZIvnWrq+93v+p7+/v2Xnvt3/oF0Qcn91589eZ2u+n69eHh7tWrl7/82peHIlt5Pj1ZTsaz8bhuu2Ufz1NZq2Z2phaLQteWq1eu/9LP/+57t96a79Y5RTdEzZiUlE0VPY9n49JHEzVDxx7Qb7pusI4ZlvHh4WG7XXddR0NWAdEw7XgsQGWiQSf8tbL+9OIfJgpGLJrrWV3o4nyNJTskAmyquu97BWXn0DE6BgIsSsA8TP7MStbAoR6NNPaxb5PmTksmyCqgWgEBEGABAFM2EAbQArX3iYTQgiLEAsTCCKpjoiBEPVJ2qK6P+ugsJgydxQerbSp86+H6zHKI66tZNaWHpS9Rq4KbdfsI+LVze/0rsVtU9219ty/3ktzepONO1j1sFrmshFuyrdrC1idny2vH7/uL1c2/1Ox8pD6hPvWS29yve+ml9CWut3nbl7bHPmLsPail5GaViSXPkEjfa89nMDvs9eru5eevHfWr24H6R49ssh8eLU6WqdsZ7x73xXkXSyGEYoUKePQSe2VDGJiBOGREGoCZInkH0eVCnhPFVXaTarciuXr9+dHeGK0cjMu9e184XrQPWgtWCSsRO2EUEUDvXDFl8p6gjaUG9y271z7/5meeefbZz/zS7dc/ed9X+zu7QUxVlygE2RU7cTYH3lgejacUU3l098HP3/nYH/ueV28c7R4/2BJjcOScL1mRKGXTJPUcBbCAbdYdJgghCFjuZeoZLTql9qG4GbNTI4w5eU3OkmhW0z5FMBZyFOq6mezs0vHpWfEUbY04cfW4YSJ0SaJaOTk/c1C4htTHqqlym54Q3Z6aBf0h8sCTx4cB4DDZJ6Lz8/Od3Wf293cXi8UozNFKSXmx3Fy5fOirIOr7Ni+Xy0sHh6vluWMqKsBkomY2n/nXX3vjn//s7/7ZH/zwe7feaNwBUQbGUtQ5llKG8qdWxqOmafbidhtLBiRHIZceHbIPDx/0f+LP/sVcWu+a7bYfjb1lQHMAxY9qIkcEBqn0G8Q6J2OfnQZBUDL2NSpKlpyIcSIlA7t60nz+M1959EhmOweCZ92Gc5Kq8hA5ChWQl1+6UVduowLgBt0KwDAPQybXdfGZ527+7R/9sVc//GqofS5kJhQKu8HPa/BqGcqHDM372btnY+i+5zuam1evfewX79y+P//Sl/pXP3jtM597fdTsPHPjUte3hJYlM/NgK3PRTj5O0bo4ni9swtDMGEnNBjsDIyuQqfLcjuoK43K5aB/lXL8yxYNxfXC0T5bU+92GXds9vPPoq49O/sh3fCRrvP3WsWxsefpb7//oq3VzlEpf+5GkAfbMFTdSACy8+v73OeeODnaHU2VnPjs8GJmZG6EKfcM3vO/00aPnnttNkur6KrO/dLTXtcuX3/cMEI3nYbs3mjSTuobJeL5pN9NpvX/07KNHD186uJH7KJLEHKLOpjtnJ/Ff/NwvXLk6soJsAUyKCjKBQSmxmY7FdLvdhlAjex9GbRc367aUwoi5xP39Xe/cw9XqyTzpYtR0kTdFT632r2cTDHZAqhZ8JUUMymx/HlGrqipFuyQVgIGpSlWFvu998AOPvmHnnEPSgfPjXMiFS7KSUnCeiJAHc6MLATIhMjnVjEP6ZclV8N0m783GBYuxE9OiBkagSAoVMgSOlqnyYsWRgxz6xXbip+tE/cMuJ2cxvlq7myiHNRAX8qFLvYJUVgcm30BwhtlCLQ1h9gqVq8CZGSANTLvNuHv//+aG3pRbv7KBXxM5f+SaKXJlxlyMmQ3RTDx7JJ8jGYgnNVACHSn5VgIwWuqv717+ge//nkcP7774yodf+aYPfO6tLz1s2z+4fXupZdmXs9Vq0/fbGHMpAgZMVDlEIFTDQfcBavbY7wjMrGJmBgohcL3fjOahxHjeinzqK5/5tV/89Ouff/jgThyPXqx1H4rQOBOoA22CRxXHmCUlKFvpl6mKOSxS7OPxN3/wuc/+yqfv/c6dvWYyrrmPD6x0mGb9WnPpStSUt7FwVs4FELnx01FN//JXPnN+3DbjCRimlEzKQDBwjI5xVFcni5V5l6xkVaMsolz8eOQKdkxp7Ku6EuENe9o/2hctiMAoZrF0y+3q4fr83urkXrfc9oQW6H1Xrn/zczeixrWJZWg3i7PlwxxbKKVonh3Oe+kcuqF5+bpW5UnD8uS6+nSVH0r8QDM4Pz+/dOmKmZmJ85RS2mza5Wrrq5FzwXvf9tsuddWkjlqMKZaMzjtf5dTdeObo7/34//fh/TSd7CLllAqQ97VkjcwegHKOs9ns1nuPfvM3vnB61hO7etzcu3cONvGhpmCjmTdwBrM+22qTjh+g4a5r/GQ+mTj2GV1hEnYMxMXXvmqaxMq1q3wobYsq3qGrWYICgKv0dLl+7fX7h1euotO9vb2qqcNszIPJ+mRi3g7255qiYgZFBEa8UKJdqKKAmOrPf+HL3/cn/tXjs3Nk5+sGAFTL4yPgD9+BFBHXGHX78PRD7+MXXlrtHqUw9W/d6tjv3757wqEqJSFa5TwroCISA134DA/kJTQgeSpBYngEEA3QgIGkpKYeOZxgBI7qadyDzGfN3tg/uvOgW9r2uLzz3vLNByve3zs42v/l3/itUYPv/9B+88zk9ml558sPat9X3uXYmmYEQVBTRbWmqku2GFMINZNnRHY4HjfVqDIZRj1w7crVuqr25nsAFLtUV9P1qicMIuhcvbNzUFXV/v4+ubS/N2P2oNXlo+dB66aeqSLzuOu2h4eH/+gnfolJPIeUSlV7AxpEnkQkgEZ0vlx7XxWDqhmp8XKziTF6doPgdmdn7/z8fGjSB3v3i4b9cXCVXqTDPz47/3BxH0akg8Kr2anqeeNcAABgqpvRhQUYmWrxjiRlAPLee+8FTJDUEE0UAdVBNlBVR74KVQhOgcVq55E4A9LAhwIYohMdsSqk1DvlYtA60sqxYMjokFoUMcUqF1sVyMzoXUEomt28Go1prOo+tH/08qhBl6/tTXea8Qs7k2cdve9gb3cU1tIua4hV1YwnzrjBsBtGO1xVhg4QPEeT0XX3/h88fPDVu3d/+Cz/JrotN82hea4aDjWDA0FRTOoKelVM1QSqsWbfzq4FShgrxdqobGON1b3t4r/6yZ9bjac/+clf/Xsf+6Tqs9/64ed26jNfMPixVM47xwYeEbPkmIroKvdSOSBUMFEVGSzuDAHQQIySkjmLXbq2/9x3f8tHd+qcVqcfePHlP/Onvuvmc/Tu6s1f/8ofcKUHTd5u8zp1XU6pRGJAUybwDEymuOr6zXkOaXr1/u/mL/9GcvVhrEZtvyWbglGXF1hBNi5YC1FJHBwQNiKSJEXpXV2dr2DRtuqJK6/OKaMydnE7ndURdNtFBddJzGQFpPRaEY6rkYEz5KaeAgCCk8SrZdcbKKISqvVoa9ZlKCtsT/sYu/UKurSD9SuXLulmrVZyKcEZAWjJVEqJqZ425C+8CeFxIvDT3ICnK/sAypvZUNMHyHj47mKxUNVLl692qS+m7LDkbrNdsaub0ZSdM4Tj0xPv/WOlqwO1nMWDrxusJ+Vv/sg/ONh/Yd0tgq9VMJbsXa3iwJyZxdTu7e0sl0vAoEj37j/65Cc+9fnP3n50T9786oP1Zvkvf/nXV+cn+zvT27feNYR7D5ef+fytN947/+wXbr93/+HZpv/qm2ebbRVFN2335htnALP799enp9tmMh8U/2YCVsxhNR79/qfeJneIXo2jggOGxXo1Go2GJ43G48061dVUwDxhKYXYOxeYvCma2aUrl3/h5391NBo9//zzm3btKhBTIk8YLqbWyDBUpUFdXEq0Ldcu9ePNcnP14HIoB9NqbzI6Ah7df3QeU6nreig9pahzQcDEQAxMgYwQ6OmAkadRtcGKgDIFdr2WsLeXKxeL5LWOdXS+oQcnbeTR7fPtV+88fO9h28HuG7eO3Wj2/lc/cHyev/LW8sGy3b129fOvv/sLP/eb/aZjArU+l61INhOVjJKQilmRCyeJGsxt+6iCjkNJvUjuYgSou1ZRgSnFXryvci5StGTJfdpuu7ZtVWy73W63277vc84x5nabTULbra5cvfTLP/+FT/3el3d2J6vNUiS3fcpqplhybmOqJ+M+5lIUyTkfnK/Wbdu1EVABLEva3dszxfVqOxyxMFB4h/jTi5frD+VyPHkNn7yeIoUAASxqmV3azaAGLEYI5L0nx4OlR/DeswvkG1drVsllcE8CHoRMzvvgyNV1LSnnmEAuSK+KF3KHnITMOSSRYopZSt1wLooFCBClgJoBCFI0U6JJXVmC4GoGUy1h1CSBbczn6/V7p8tFtuXJ4t7Z5s6ihWX54un6k7fPFxtYnizLdjMnB2t/7156+3x7P7t3o570uu1tnUtMYpucerl3u3/tRx+s/6WnPLVg5FIPMS/8ZrVdLzfduus3fd+l2OeUCipLZ6k17Hn9sCP11oFETm4n5Kne1zdPtqvYPXr99hubMtK4/+Lu7jc+e3RYaciIjvu+M1O1YgTMHpQxAXVfA46HIoVGiOiQChTEsSSsJ/rOnbc++RtfPJg+869993e8cKVenH3mvXc/vVpGtbEKOnTOvPeBnI+i5nwBBVBUoVJe2r80qmlnd/UbP/WJH/8nX5qMD4SKSAI1JgIpVGoQzrknCwxGbIDZTIic2CampJg4+MVik5MpUCmaciGiUVNdPtg926y9r6BIyVkNxDj22ky49G1RAJK+bxGNED2RSa8IiiAgRZNCEUlmYlKKdrWp9qnN0qc4HgZApe/7vhTLOVsu1qmgup2w3qyqqoLHab9fh7x/Heb4pKOxx/6RIiLFTs8W+/v7vqacc2AHKH3fbTadr2cuBM8B1DbLzWw8yV0fmFBKxQSKm/Vy/3D0qc9++p//zCeef/ZDbWoVlWmUytAugfe+67rROFy9dpjy+u23bt2+ff87/8i3zufV5z77pfUi/cEX3nzfyx8psTShuXR4sLtHm/WDnZ2dzXYlo/rhJv3e579ycPXyb3/6Myk2x4/6bde++cadr7557907D4DrzSYF35gACU6mO7/1228vl83OznWEGqFerzaHh5cu7ey1m9V4PLac0fDdWydvvns8nx8NvupEzBSAgyoActM0H/sXP0MsRM65cSqZGQE9MpkZwoV7VMmqAh597QNTQHLGTSrNlasHzz9fX7vWVGHz6ofeR2wpJQVCcIhMDo3EEMwM1S48whAKmhAokhEDELIbNEYX71oAUA5Avhr52UGpXZg2zle7e6O9vd3d2XTa0NF88syVSyBlOp3cvvfo1nvt3XNZQ1iehjdeP967dnOxgd/8xOeZRmIAxGqcCzoXSkmAhViJIYTQddHMvPdqUjIjV1yhUh9l5QOaosORDyyWAQRNGVFEvPehqnJCpso5QtKuXxEbeRdjBMztOvwPP/6xnT3ftm3OXdvl5arbtl273cY+K6HxIAXimMtkupsKrLebooIIMXWhcvV0vFqtzZ4Q9mFoU54Eszy5qT5BvZ5GKQeCewghpVSP63p/Kmx1NWmmM9dUqlDQ5CLKy85PTr/xg9/4yvMv5VjqUHtyRM6h8977umpGs1K0zdGTBzVkL2bGVEpRETYwsyxghAAqBmrmasLgEoAD9CKUY0ERZ1JshK6wJKWcvaErknKbPPrUtm2XS3Rxm8Bk3caxVZchnPQd1uqraqvVtqthmwnWxJuKtMO8yttFXq9k07nUu9RqK1RqLeFwZGOM0oqmTd8TETRK5Bx6z1VwIyIPwKKowjHGksRXk7YFGhnTCMc7R5Rls1726y5YyOjnPEXK4xmKjX11+fLlan3ycEKjVy9ff+HyJTaNOfWqUS1U48GT+kl5ImAcHMnNRi5o6prRNIzG1RQNcXf23Hd+9E99+H3T5erNhyW+cXb21qO33zhbPmzrKcEMKpcMRYuKOIomEJyxfeX+O0fXdk5/7ezW75xdm026rpNEljZUMXtCz33etH3v60axRwjJYgEWLDGBmXlqLKnlUqFP26ixOOSKgvTp0u6utN1y23piS8UhlQIpYhaspw6klGxikFKZTGaakQmqCkkLaCYwYgCmKNILdso1ikgWT8XVibEDKSVWZEQE5gAI1NBICUY7I6aLyd6w6J8+HZ/IJofFLaIAyOxEZPDOBUDnvKput9u+T/tH+0WklMJgXbc9OVuYYT0eBfQOeH22qJ2fTaaWcyAilRCCSClFn33+8O/+2N97cCdOp2ODlCWxE6VeNKvCKOyUiKNmvLPXdOv+xrWbzSRO95cvvW929crOCy8+++D8rd2jg9V2VXmEjDcvH5X+7MalS5d3Js9d3X/x2aNb773+LR/58GtffaNY5kqb/dHu0ezocI9LmlQ+9p0hhMn41u3TR/fqw4OXVeFo/+VxfW0y2cnFStvu7e1Np+MAMK0nfYR3b50EPxaRqqoGMgwYMntVPT47/Y/+b//+//bf/gsPH52YhqaeGhAzq0XnBoE+AlDgEDioatd1To1wPN8/AD6Y7OzuXxqvzsLuzjPb7enVa0ebzYbIi7ECYXBJMwE6JH7cqiuSAclj6c3jekSPLwrYu56dM9GSoZoduskslhXkNqZO/WS5hYenG22mD87a8/PuwfGWbH6yWXVtYvFb27gG77z3yI333/nqyc9/7BOzyWHKaFA7mvStea7BHFgA8yLgnNeBPsjEDtRApAIYs6/LMMAUKpqJCICY2RQd1ypYinkfcjGwyjRUYaaCarlAvHHjmR/5az9JuK0aKSWajAAdOVQYpJwymc9Pzk5zFiIaTcaKtNqsY5+LJEZTLTs7O0TQti0AqJqqDbromCMTD8qHJ/dUu7B2YXysycILgpD6wEn6a8/e5HFwdVXXo6oZhaoJIXDlFKwe1yml2WS6Ol+cnZw3oRYRFEO7UICrqoAVMQMoBOR8xY6yeDEGRO+E0XkqRVWVHKoheE6aByPTKAU8csUoqQH0BizqJAfUxjODN2RqSjVXP+F6VqmUmaP9CV06mE4Pqr19vrFTPayq+W6oQ8aJ03FDlGejMGkm82Z0ZTLbnc1c4xsfmmrkwngnzEf1gatCMxqFyWgyrV0VLAFBLlwsKFRYWCQQT4IE2lqfKtJx6FCtrpyvq4nH5dmDNgoE8b4WKwSlp1jblXU6/cI7dbte7M1e/O5vn3/6ndurXo4fPOx9Ley9Jm/bUjBAjVrUCMETcModN1XK6swpMqujyElUcvvC7nx/pg8fvLY5PutherI5y1iDo7EwAG1pbRIEjLDKfe/QPPjNZiuOnjsY//6/eOtznzw+mh7E1AVPXYp1HbZZmW29Of9X/9x3XLp8+ON/42dmk92tFEQug5keomlllhFBQfyoatsNi2uCy9KNJiM/Hr/x5T8Y7czXpXMBKDtmOIv97mi8D+G0bD3yNskaIQOBpggxlkpECMaleICCRWvkbNGMRdEcZYMa4zJJAp1RgyUhUYUKDMXEUFWhGY1tfp6Wm9FkfL7e+iyeoEAxJDR6Qoh8TPUdqrwQUc55WKyl5AH2PT89u3T98qJepZRGrq4Jct8eH58cXrokdZSIBeju8eLoYEa5c1XoYzHtqzArKY7rsjMb/Vc//Lf+q//2/5TaFWKFmhEFiQuQQGug168fJEsf+egHQFV1MkCcpSTn3LcffXMsue/TlSvXHDFP/f7+s6oK4BFtb3o0yCz3v/k5RQI7CKHWG9M+QStbZHY8Lro10898+T418+XqUYHcpbvb/g4ZeTfJzd5J7EajONkdc9HmsIpdFiRs6oJgAK6pSp+IvWcfYx5Pr7hR2Ur2DZVSGJ2oKk1YvfOl5MwMAmZGIYTT8zcRYz3Z74CTpJm3GzvCafGlN0I1HV+5trtqu6PDa5LOADuwiVIgsFKkcv6JapQYHTuORQGMTEABB7CdQdDpzLSwgeciCEfXXr2f387LJRi7TesMRqNJyd1oFMw7bLemOvJBCISgDrtSEMg9eHh689pLX/z8p1KWH/o3//Ty/KE58CH0QhWhqoENqwYVdMiEUtFqMFjWQSEEiiqYnnjomhmiErIqipBSRg8i6LgryfswWncPn7353M/85Guf+ezvv/Ts1Ry3TdOst1vvvBbMmI1xNJpslhsQn8G8d5PpfLXJq8WqSGKmNvWj8XQ+2z05ORliuQaGERGVUpjQQEzRyIgYLzJUtZQCAOCcAwzECTRqmYZRv21d8H4seZWKn7ScWcR5nxxip84kxkwYcAy3H713YzLZmXuxbORcidBMo7lpM69pHzAH4POUSilSWXGhB80QHGqxjKROFAQzslOVWNgTmnjxLmExSiAqVSmbDuuiWTwgcKGeG5OO1utRSUtJY1nnZbLWxK/dcbd5/mD+sF+juU93ebe29yeNeVsT1E0Ykfexky6Wsse+n5bE1MRQehWLLYsNsyVmBtSSrGiWYlLiuGk42DZ1CEwFnQ/nAmMPWjL0seeKcs6LxaqUUgXn2ZkqqIGYGJXU93F95+Gd8zYfn/fzyQH2cX83PHtt18Oay8abAYXCrnPQWmiZ21DWsDXPoJjUejLjTONgVXn26Mqf++if/r/+uT878g9+6ld++bffunfn2Bw3gRWMe+s62G5zFUV7l1ZxLYa90HFfwmR3Woff+qm3P/0rt/enR32KCZME5royqqdNvdqsP/rHv0Xmp0t4b7Jf5ViCakWB0EwzsAGbkPUg0TjHTMAOSTQj4qZrb9+5x1UjxQVqgjYGfRfjHjYfvH5wHNMmpU2fi3LJxoRV8KAYqDGzYpotG4ihKgIgkmcCBTMHyKaWCyuhqA0ekFKyFlUhMxPNVsb7s04jAHgEEaHgOFT4VBeDjzMK4LGB1BMs8snVdTAOW50vbty4ISJZZeAfbLeb2HWj8Q57R4Rt2/Z9appm27YKAMYikYBT76dzevvdL/7E3/mFa9deybBB8qr0eDZIwY9KUSnUb3K3LVo4R+i2JXaQom26TUpJVBWhiOVsKVsRLEVzgSyaUkmppAJSUBVSl8o2BSBH5Iwtdjuj0dtfvbPqNIxdtFOj8/PFo1E12W6UsAIrJgQp9B0WJaw8+dAmlSwOvAlILhd9mUhwFGMHWtwQZoUEhIjoAIBFrSAaMxMBoLAz0TjamUHxmPO4xqhdsxM++M2TVz6w5Go7no4+99n3ihiAgky1ICsNgVDFAJEFkMiBEZgZO2QC4sEXcgjYUgTSKFY01Il8KgHDzvWXPjh95rq37dHE9kL/jc9fGam2p4vNcuVDSLJatWc5x5SSWlq1S3DVsk0Plg8nO/Nv/MgrfXlUKFHtom2Icxmi2XEgqg3mCBcDGwFTNAUT02EQBoSDYOiJYZFaMRDAYsONxDTL2HwuqT+Y77/19vof//2feP7mjdS3qBb7vnZBYq6YA3l0Xr07X68QoEY3rsbbUhaLhVzIRAsAHB0dLRarkuRpAtjTHIGhSX/CiRwe8d4Pr2MREZFBLFbAZns72XHLTTNtJmNPoYYMFNfmBRFCCMSgCMBODUWM2SsFqoNVpdmp5wdX94/ev2qBawzsCDDnDGZSCgCUnEXEythyI8lpdiAeCpqAw8AO0FWENnFUFTdlx9pzhaYE5jxXUsAKWQE0J0mS+TqOfMeTyrmJezt3p8aHYTIFfCeV+waxxjCZzNFliiUbuHHxZwK6LhwDSyp+1c/Bs0hAJRWTnFLPtTOH1DiqG0m27c3RuAa30W5j7bwA9NkxWXC/n7eUc1EZyEaCZgSIZkQBoTLLYv3J6qzePXr3wSm75pkrN+7dPY49sFWsjjAwOG9Mgp4cK1jMHsmBg6gjpIqE+jpEqMq4An+4k37vc5/44lsPCKuGd301Ymc1oMvKwXqKiqOtoZfJjCZisLFuvoduFX/lb3721mfbS+OrJSIGCqMmS+lTybkAIjhs5n62W987vrV7MI05moAVASnMKLkMFuKenBYAo8AOQCUlAPVVSCLOVyVtJMPh0c7B/uyDL1/98AsHm8XZnfOHrUErmoSK0mhUl9g7F/quKBgRENEFL2jImAErZAqiqAWsywmDU0Jhc4+HbIjIRIRQVMJ8QiOfUj/xtZj0qZgZoWN2TxNmnhAJni70T0s8Simr1QYRr1671nXdwK3stu3ZySm52lcNB6oqf3J8RujNIOfcpwxApSRQSFGfuXn0cz/3s7/3O3f39i/3uQN0jEHLhaEYkguOBkvh4UrBzFyxDfmlqDgwRIAuSBSuGr4Ec2BOwSF6REZwggDKDGg6UGaL5/DGW/fV0bJdhYZm4xEJmtLlq1eXm8X+7v7udISWxnWlFKJSQkGXmarBgtbhBQeReUhHYgfojEHRkBSQmQO7i5pCOLjmOudi34Hm4heKC8dror4azbYJRN20nm+61nk/3d197Y2353v7AFp5ZNNhHgtGQA6BHYfhSyVW+hpvZxgLDikeoakLqBITQy6tIEz2r12++YF1tsXq/Pz89OrlKwdHl6OVrbTj6tLB7pGvKw4WdRVqirlUVbPM0hV879aDZjwRcr3WClPi2hAHS20EgsfoqIk+ofTooJJFM4LHnjj2pIkedCkARmpRJKN5hACj7DquD370r/+P81kNWDgwBy+qMUd21MZegMejyWa5ceQVQZ2b7uwuTxebdTucMTHHw8NDJurbru+jPUXzvVjPRgj8hDWAX/dhZoRGpGo1exFRNj+risre/uHlvYPv+YZv+tf/tR84eOVD85sfmDeHFAIgEjMzi0IxQK4AkHwxdMTjUE1cVYdqIhq2rRA5VKucZ0AmDyrMxOSjbMRawEiQCIUBUVHEUFJKCbKNVLmzuuhYgQGG6Jiu65AAKe/uVtMJjMfA5BlpPgkHAXcrdN58sLqCj8wn7+VuFaqd4B7B+lHJ7EZ7E6qpH7mQQKuJp9K6IGHsHmpMY59qb+MmMmnwAihqZsieYKd2NZkmqP1055Knuq+BJ7gzcm/G5ivcUEpJABTVUM3ME6uqCoBoLLEradV2t+7cD1X9W5/6zKboqKm2XedCpWyFE7EElGAxW1szjDCwVVmhOFTKIH3nFknLgXPf98oLu97efvTua+tbv3n/jZ/7zO9+5u37jzb2MC66OnYJXakYTicApfRrUK3zzf1m+ZmTX/3RT+Fp3UzGBbO63Pf9ZtliwRrRo7VlhVB/5dPvhjjfd9fuvbNkN1VfvA+lFDIiIwcBilDJDaGYAhMzD9iCFpFiORuhA+SojnR8Y3f3zvGdW2vptrSJGBX7Uuq6RlADQZVctgOda1BHmBkjDSihMhqqQ1DVLibHwQiVUdnYIYENjHZkGnZiczDt+k1dBc++lywKDGhF8H9ChcTH3nVPNsnTrX3s+sXZcm9vbzQex5RE87huNsvlydnZaDyvQkMOmfnB/bOd+QE6I+eQpKk8sZB6yeXqzdFf++G/u16M6rFXgRS1Dp7BwKLAQCRTAxHNakVJzUy1MHsCVoXBGRWAhph5gWFzotFj03NDMxMm9L6YFsBsyjWdLlbb5EeOIcvJyer2wxMNbpva0+P7e/Nx129OHp064/VquW3PsrWIWLnpkIuNiAKIiKKKQ83Fi3TbylXB1+wDAFoRMURyhF4U1VwVRovVkohimqtVTbNPsMt+4utKBEtiKvWdW6v5bPd8FRernitQIWYEI0LnXGD27Gpy3rnAXCG5wRJ9uETTE4P3YEWzCgWYsPjKSd8+enD7nf3nrn/wOz7yvm/7xmXp7y9O1916vrvruOliHE1mk2rireEScmuIDKzkm/H08ue//O46FvIuSwKyPvdPNwE2xIQDqn2thX/SL1/4rcPQRD+9igAAgSgQM6lQWbabF2++/KN/45++d/fWZN60ud3mLpNmUHOEjoupEotB7JKIcAjj6ez45KzfxAt2AEjt/d7ufLPZxJiG0IInP/Hr2AFfxx0Yqv/QNBUwMPIcYsl+XGUHi3axPr93cn58v3340jPzP/r+D1yaXakmO81414XG+8oFb4SAyIxmSigMPPL7s/GVg/3L7WYzbSY7oyPJRTR7JkRyRGY2tCdASI6rKtQVedZQS9UoUE+Vt4Cq2IxmmyzFGIRNjKosEKsmIAeEarWQbsu5rbZnadm1y+0mt1Y2ENb5qoZ8ei7LNC3u18r2rtS8yveKvraSLy+kRMC+wRwkMXdeeyTw3owj5k3ql1237iVh16U+aSqQZRn73PZqCO3iPJ6fVcBQysj8799e/5P29EV2NJ5MihggOs9gmHNGtKyqllxwXdZSJHUJqVhwD9tub77DiJ6J0FKRJE7AcxVqEMvZg7MiimI+FyjOhQb5cFT/sQ+9bLT4R5/++Gfun0If9hxV89loxhm64iexr0cWKqBeIEJp9qvnbrjDTff7//0Xf/un73B1gDtV1F4xeVaHVFODBUopqUTtqXbYL7pf+Aef+5Wf/ApjEEvFfErJiLOYsVuVrXrcWuywzZbQKTgopYAAGXryaJAEmhmt19srR/tffefWG/fa0z510a+2JZaMDnOJzMhIqoV9lgKI/PQAjQERzRt6gaCoKea2d4Bc1IklhgKGZoxYwGSwJcw6OZhiRW2/dc4NVuBPVvmTgv51hf7JE4YO6MnHarVq2/by5culDIp8cM6dnBynJM14h4jQcRFartudnVkqGa2XXIqYYNJiZER+9df/2/9+f/6KkpKXnGUAQtkJgkfwYA7BM1VgzpQJAxQYrE7ogtytInmInH7qP/K133P43cyM2alq04zfee/YVzNDOj09HdfVJIQANB3P+iJuPK7r+tKl/arG7/rOb/ymb35lb7e5evmyJitWgIEIRMTVjbFTcORq7wIBq1yY/2URImIfhmsWOSaigRwvIOTJuxGP/DZh1nHJOHIztNFsf+falVm/LusFn5/H+8envpoBNooAxMSV8xVxYO/ViDgAMtPg4eUYkI0RkZGISLVmqCs0kJUP6v00uN1rl25uVwuE/PIrz/7pH/i+b/zm5196ZedoH6ksSliutw9A1vszmNUyaTTHtt1K7lrEMfrdX/v13xs1Mwe5lJULpApPMA0zG26QRogKw+eg4SWDx66KF36WRhc3GwVSIESfJBqkGPMzzx393E996nd/6wvXbk5L3ni2OoTU9WSAKjFGAKg4HJ+eWwgQgkdvvSzOltuSDIQIY4rXr9/s+7RabgCAnyrcTy9g+JrTgH59H4OoYKUUQss5FpTRfCwoQceywPXx9jOf/ezt21+dkAWqemjEAnGTBXISBVPVopkICKaucpevH+4dXkaY3b1zAgAptU9kU4hYsjhPIpJTQahSRlEv6vtkOZMCS4EIxDXHAtjMVgRbz+vEqmQoAsBVbUC9lMVyue1izJnActuT0Rp0SYSuGovcRCqlvFIHZfuph4/i+PA5U4urOxA+1eO7vpsRh6ZtDvLEVX2XB/0EERVJTeW9I8/kwMiUZU5Rpyk1MXNDo2moreyG0acX6SeMP+j0e7F3MCSYkDMTzyHGyBU6dgjZzNBCqAajjKiaTtfdUkc9kCOacFj3CubYi3E3MX+eY3LGjhxqSlJRlTp4//jo3/ye79rKnZ/45Mc3XT2tdrrc9eA9bAVp4khSCzxddstq5A7Hc4jl9K2T3/+1+/de76b15PDAtnHrXO20oqEvUxQtIVQFLVQj6deMTovkrGCVCjryRTZmAYy7EhW6a89em8zGXHGW/Ojtezn1zA2FKucsRUXMBMbzmqtqGuRs+eDtR+tMI5+LOagoNA070u12M51Oh8Y/SqeD1FwvlCxZh4LF5SLEBrtctqUge4AhzgFBLxa3GJipR+8YobLZ5d3le2ezeu6RSinkMISQtTzRNz3d9TxNLRjq+xNrmlLK+eni6PKlw4ODxWJhReq6Niv379578cUXbLq3PD/3dVgul/Vovn8wO3tw4lxxVSglgnrt/f4le+ONL/3Yj/7C/+7/+H23br02CnOwxOxLiWSOycPA22EiJDMxYwSiAe0YtivqwANkxgF5NqPHJxYAgAeyIkzI7AlVhW+/d5ZSU2DlvcR22fiRebdZb+c7YxeyN3v5lcvzWQO6vnE4HdWUux5wy/4CuRpgVkXyzGCoCoA4tG+mSoUE0EzZEUhBBPaISln7yWyc8qpy85TOM6TAXey7GEeTnSZM+w9/ePKV107v34UQ5sdn6+eec+qWjmpTZnbEoZiCoZRChqbDkWEKeuGuNLxZAOhM+lJKnu9Mv/rOo1//9c/+kT/60ctXwi6PN+161S98XV26Or92c7cUjT2cPzo5XS3UF6xSTFr7mUvzh3f6N95549HpH8x2Ln31jbNX33dy5eBgsaKEVuHFGObJOiGDpxrli2r5ta4Z/9CTh5cQAEpqsaq6Pl07mt564/xv/91/cu2ZHSorUkN2KWeHxIyqCmbznb1H5yvnQlYdVxNfN8ePztu2IzMzzbnM5zt109y/+yDG6Jw3MyR80osM/crFTeOp5Q1gw8QCH09XRUvl6z5HNwnU+Ky5BMlM8xoc9NP55APPvPz8c6/+2D/9+bu5L5ksdRnYUgTCEELwWTmEsRMnBXG+f6mezgs8yNYPQ2VXOTAajhYkIyNVzTH2ndMG3KB+YgLivElOxRTXcUs+ZCkxKWVomonjyFSFEMThdC+EHe4I84rm051tyTJB6UEyhZp2GslKBex7ePTZQ/6ZnP/UaPYNLqPKGkNGXJwef+ef/8DDWw+635fp3HEOK4iOw3jsB2Vc7ssoOEQEctlFyVy7nVplrpIq96kUP+71j4fpd4dQibhuvXZYaRF26MmDV0BDQAMy1caHSePQ4Mqlcd+3e3XVluyNK1cFxxnWZhsykRI7meaKzRmblWzKzoyvTHZ/6E98F8lbP/2rv/RIrjSh2tiyeHEm5za2sq1EWVzk7fxqnR/EW7/++tufW54vcjQ3PtpJKcs2TsmzNp1LpSQBKGLMrJYROMboLGz7tatBckB2HGKJfe0aROxz3t+dftNH3/fw+AFxNZvNRdPV+e4bX/mD5XI5baaEIVpEJu+oqipIPmq8v1jGEjxJg0Fd3B03PthitXZcI5OYAXEuAOiBA2CP5AANzAQMACKogCljb6UHscBSCB07Q0TD4AWIGXDQeKErpR/tjtYnGxAYVdW27wqJQ35sRvr19r9P78yv7d1Bauvcer1umubw8HC1Whlin1PtQ5F4//7969evt9tkJbKDk+PlM89edtwwFAUhV7FhYGq35dlnj37+5//5i+/f/yN/9IP3b9+q60DgkI0IAJQYELGYIKEjQi1G3sAEBETVCBnQMRLxY68tfUoUioiMDpyYQYlpPKvvPTw/WSRs/EtXr7x7+1ZrEMZhC+3+pdHe2D9z/TBMKJtut+tJE+L2GCHt7MwAlMmLiENyzJLy8NqUnH1whmZEMlxfgIlZEdgGFQqJIToUVfINWtVJrpopSNqmtvagVGJuHVe+6l54aYquPj6WlEzVaIB8mAG5GAB6AGNCYAADtHwR9gyPk/AQECD3WtWNpNLm9saNw2/+xld/8Rc//p3f853PHkx8ZaEOptS1xSyPx2NCffWlG7dOxp/56mcNUJD7vDma2h/93pe+t/nwl7705u/89ptnx/KpT3/lT/7JP+5dHahkKDysC72YkCoAqYGRmSle8GfUzC76EDITe3ztu3BDA7DKXKZ5NVts7T/9L3782lETbJMQkamPfQiBCEuOMcZLV689OltkUDSqXWiq0clys4mdqjrDTApkly5dOT0933at9yGm6J1/eg08tWy/9sjgw/ykwRc1BQ3sGJUYm51xhmImXsBTAWBoZquW7t9dPDxPuLvrV5WUlpkNXS8wnCMpFe+2MY66tly/MiEHJUctpXJeAJMUIBpm46VkJiR0HoJDNfUA6r1nX5BS5SCog2jU+H65YtgNaOSLYZESVVLgIIVMQ98iNH6zsm6LZ9tSSYitrJdtTZNN6Z8ZwXazHo92RtvNzcK/l+HHffnBvflHKR1pvzrX/et1v033Pr0p7ZhpE3jk+qJglfd93zNjyRnZA0KS5LoiqADrufFXc/ylZf6CNv9mqP6Y7xYJe6nc/u5eH3XTrmtX5y6HyhlZVmP2ElPpu3pvb7HcKo4vzWfPXN9/996b7z6KgcbZJ6w37Xo7cdMq7FrunCYUDoXYqqquadX+hT/5/Q7+4K/91K/lyaXgc6s68nMfuwTdyKKadlpZxVOf3/74O5//+KNuy2HkJrtz7gqsOkb143E2ifnMB99JFCWqPBgQgWeUrNlp7WdIRYs4YgNPDpSSCJpZkY5gQ7J660tvjuu9KrCNjQlQTYulIl0Xx9NRaCqEnOPx6VmpJo1tTx368bweh5FztFgvCXioJqqKFDxTUkVCcN4gGwE5IgURZSVQcwZZVVVJLxxiQUwJwXkR8cSeXM4iCOQQyVfjUTzup6MxPI70xcdM6qfanItu3Z4artrj8RQiDiKm9WI5GdVXrly5d++ec2SixLrarB8+Wl46uHp68pZXK8k/uL+8eWXv9u17yI0hMGnMK6a9HLeXr9Nf/29/4urVv3Lt+qxdChiquMC+5IJMIYRSshT0xIgGrgxouxoSKIKzQcJtMvhxM7KADR6XBiAioUI17Dd5p5refvioM3cwH7/78KzQaKcKNy8fHB42+weT1WpRuMAauXFV1aRYAtYBRk09E9uguYq9SmG0gUmHoJXnXAwu7GTBEyOzOMqlBKvJVSomJoBMBCkD8Rh9zklMJ1ZGWXNTgdjG4TSnzDydzk1h3m623abam17P6cFgCikizjszIMcE6BxAKQgX6diKMPiJmVnlWDBzVWsCp+mPftvzlw6aX/vEl08Oxt/wDc/tj0O76h240Wj09puvXbt27fax/O7vf2V0OFqvutW6DoEW5yeP1v36XuyX8/lod0Hrz31hwf73vv/7PiBrTK445vDkRFEjMAAog0cLgAIaqMLjJtmGuWu5YF49TpHGooKwu9/83//DH3eGkz1dLbTxLlLhpipmpqVIPLx25WyzWfR9g2jkIIQ25+1iJV00MGVKqb9549mU0nbbmoGaMvFwv3z6uvAEg3n68T/UrKgUK1XwOWeuXTOuN9qF4EADaNxuciM7P/3TP/3cyx8+Xctqse5TJynFGM2p956cG8BSsoil7jdbhxoISRgLldgzM6AN1ghGFkIoQ+Rg7kGl5Gx9qpx36kAQSI0wCRgKBmnbdmcSQLoycYwZAYr0TJa1B/Ql96rBtnbeLy/LqKLxQ1KqLDBWhaXoMm4+4KtJKJfMfTHjP1/Ht3aab1/qN122j/zF2euffjA+Hf/BdDPTOiRpjKxoAkVyCBC869soUnYbN6rHRfPdmP9pXz6p9kxd/YeOn1fbkIBg4ERXdw+vTiqVxE1Te1WwXh3EDeS+UL/QzRt3e/M7pw9aB/NNKjevXDezc1mkzSJEIK68A0/nQFs2ZqgEOHlL5/F//cf+5OHs3l//+Z/tm5mCoSkrWS+eTUF68MUcVDBx5Xd+7DOv/VIfRgeTnRFDiP2WOFczX40qMqicV0JBAG1IbFqJBymKxYdEVvlac5vb5BkYFY1ANWU0qWuH3cL+5S+9ffqomo4vsafkHCQyqRWCsiFK5TwJQkkaZbltQ0NAZk0zm0zHpMB4suk7k5hEt2k+nngGT0Kpn0BwfgrOEQasnDlBIwTnCIQAwRAxGzIqYRQEpjBIvZjZgMWQApszziHl7fzKyHzapL6a1JRzsSeybMTHUTX42J/ga92NyBPcZqCyCFif04NHJ/OdvelsJxfokqmC9u3J8d3ebLx7TbFyrnRFj8+X165dLmVLaKVoES+YulwywO6l6X/2n/2dPu3wKAm1iCa6NQeC2OXMzpGzqH0hUHFojpU8sSII6UCLNKwUnQBmHZIqi2pxjAGcaUGS0Gjq4+m9M1fi5nxdYtLUvfr+mwcHIVS2PF9D4ZHz0VIqMZdeEDoxZoK8cWBAoAxKaOwuUq2RIiK5IRgEgV0myqZWpCYW0layVYxMHjwoFguLOOmzx3qvuMo3dRjNMtQxzw0hU2MCuTXyeHjl6tmy72SdHRZXChZPiCUGzAyRMNFgC0nE7AAQFRgZkQhRSUgdZSbi7N17J8sbz7188/qh0/FmI8HhZFy5UW2O5pP9N9989PD03vRqkysWqmqvZbU6GF/u+3LtBll1x5rFi984+uj3HH3xC6//4i/83uSAg/eoLYqA1UrcYVLH+ljlP5jJIxipoikjKHQGYsjknSIQMio6GWXkS9cOf/g//5mHj27vHtF6i9TwUvsKwIMGQEmlGe1t29xu+9pxD4S+8m589ui8S8kIkK2Nm4PdS3VoFmfnJSUCKKUMNjKlZDV5sp4R0TlGhIvy+pTdKRGLaAatuBlYrPXhJFFRBzwERrlQzXbGR3uTq1cv3bgxbuqcs+VzLC2RAoAgOkxBesNQzEeBvF29/cYfPLh7ezatI5TiIloP4LioWTaUkiNAQLVIRRQrrGoNcZn6Phnitvcxk59OUwuWaOZ1jLYj6KJAVRWE6XTsQ+XJO6TxTnBgVaD98aRh34lwHWTK1Ditq9290cZhmYyfn42vNOVPX977of0JSv4nIfyE8U/9Cn/1DX20j3M6bMLUeWi8uzkaT2sY7/JBY/uT8up3P3v4vnrl9LNIfzPCDwPfnfn/1V7zfx6PXw3cBnGGY4bKmZt4PbrmOpmfb2B/Or+3PO3MJo62pp5HQNSnxXIV3nfp0qsvNr/zxc996fbWN3653bKrG1d3Il0bRwyKI3bZ9cUCpU7/yAsfOFm//sv/7LcjXGmp8zB1FoxagdSC6wJQ7lF4n90///FPLd92l/bqFEs1cyWFTSfNxFkpcRvRtEchGqU2cUYOk22KyLgb6s16Eyrfdd1AjC0x5ZyrqjJAQDLKQOYcm4Pbd+4HCKMRKxUpvQiqQEoFAAldCEElR1XimtT6dd/4GgA3SRTjzuX5vfeWOzv7W+2ZfCnFDFOMzOPReC+KGsbSbYkIUGkYk5oZak6iAoZGLgBZBHPkK3YASM4ZgqI6grztzbwhNpd21/eXMwzZeyyCIQykw8EGEp4SNMFThtdPup7haQM403Xd6enp0dFRSkmLlGyO2Uxvvfv2Sy+9ON/Z365PfEnLhQZXHcx3jxdn1WgsYiISKsqCwIus1f/z//Gj/+V//X9ZltdVs6Ii92jknEt57WhaV/NUejNAMEPi4fdRBUQqIIjMLHJhFDWY25gh+hKFKHHNpFYWudQ7u7ldYwUvvnBld9cT+1LKark8PDxcrnu1aDwpgtNRjZpGFZpZlkotD5gtiAzXlwE8NjUAUMALuxICuugQjQBEFRHByPs6+HEnwuBUjIlEcbXKTNNm5PqSEX09que7KiuwIhm0qec5gQESkigSQC4GPDTKRIRP37dEipk5ZgEDI+dQciqW64rbbkFeq1F17+GD61dvvP7aF7nuioivxl3dbqU3Tqg6HVeFYNmWl67enM9fPls+GH8wLjcPlWw2pueufuvf+1sfc8Lf/6e/Nds8JWHXmcYKKwKfqAXiJ8RIRTDHqCYAoKwWkUZ9j1WDqY8EVZ+Pr9x49h/82G9/6vOffua53dV26d2IcxhBn6V471Msjng+njx4+NARF5Gqqryv1uv1tu9U1UyzpCpUu7u76/W67/uhanvvBhsZVQH7n0lJfdK5P5myDiN3RjRSSYXGPlS+g1yxK4pN7dCHph5rhrjpP//p39einpNAEUtFMhoNGeiDbQGaUc5lsz6Td83MpbqZWe3rLWS6EHM9xjlRzdA7x2SmJad2ZxqC59LHigJoLGJIhYNXDTHFYhrLkFruK18DuKK8XaPbOs2VJFv3kLA8LFplV29jm+V0XA63QcnubLYVIwgs+yVl/c7x7pGXN07yDz9cgBt5yV7PDhyPil7h7RU39hG7jYrCWmX5mdtbT+9sPfLqKsoPuukHCo9QleOpJJBah3urqhOdx/PNtb3pg5N7tnPZaw4eMzcEUnJBM99USWSbKmRenZw6GoPkyrmkBlmDo945gX7ECWzS1tJuy0cvPfdHPrTz93/1n6z5OhQmJDFQ1II6dZ6j7Ig/7vlgRz//sc/r2/bi9Wvt+qxO1elq0VQWNMPG+ZmrXyIe11WYSkmxT2cPt7AMdQKecA99mIy60jsOqhpLR0zOuz4lVAPATEJIRVVirHzNErbtShEdeZGh1kDOmdBlsZgiOXOeD/ZG58ebtutwWnWx3X92N8w1vaVI0lMSERElrgSMQuObqetaJO8ziHYIxaxczIgML2ghyECodrGOFAwcC5NDmjU1xPRQtiYoBm6/gbNl6TJXVcnbnLP3/slA/wleOVQxe+wu+6S+Dxj9cB6UUs5Pz5qmOTo6unXrXQ9eFBxrjP3tW/euXTuMcY2as6vOz7vrV/Z2p+NHm01dTQKDKih0lZuMD+j4/rv/6V/9u//Ff/Pv3br/B5aJpHJBcok1T0UwxhbJD0cZghohGeFAuwbQC0sKYAe5qPfeLvzRyHkyKwgVwkTBZU3k6OrlwxdfuGay2Sy6LDDfbZIuhIqHEVFlxH2O+yMfnAC5ZEpK+NinYXA9HSwDSxYA4CevGw4njno3ZhG1wo6tiIEgm0FWqiVlZldEYzJy1eY8zXamDfvz5Xqye0jecvSj0ej4bHk4nap1gGpU5CJqbyhJWeXCQ1hVzYDIiWaxUgCxlIDsmQgoqxSJ3/LRD//WL39+PJp/8YtfvPpcByXu73/ovZN7sFUpxRF5JCXLDmiT2rQ8sLDbHMwbN5v4xfLk8v7Nw70r/8ofO/3YT39yMqq+4499WDClrozDtOQ2ydZcIMk4NASqqMOAF1UVCVFHpu2o8SU5NVAs1569/o//x1//yX/2Gy+9sG9xOwsTg7LtEjknXpOVvsTDvf2H58cRtGRj78bNrO/TarV+bFBqiHj16tVSynq9HhqRx2tVB80UIj412n0yy7Wva1mGfxAAnMMM6vYmGUUJScCHCshXYSwpb1ebjZ6gAbuL6RSIOhAgsKFsIyOY5KKmZoicl+dvOxvPx14zSrn4uahGQExeAIihLcmjK2yTWagnAV3pE3g27xiMjcxXvl0K+0ZsSURqESyrRR/AIDfVLOdFH3WpnswXS+ciu54qkwqx6mXE4hlPrN8RnhQMLMfQHae411V/nD3VfJy2vefzns4y3SL3VvLWFy3iiRBKVbvRIt3E5k+P8HluXmAIJbaSO3AAQdQHjcakHsjUPVqcnTzS0az5we//vvXpW18x9+5Z9joRAe8NTT15ge7Tr3+l6+c3rr3U2ub+a8dMZM5QoSHfa27FlLTycbmSDx49/33f9eI//+TPtnY1FSSIRbxJqWuaUoMJihYhrCaYYvfeZ5fTcnh2/1a9Px9dcdXuJdvr5qOj3d15DhBrStBhn0Y2Ubx809rlWX/8xc32K2fe6hjUAxhQzDFULuXsiZCsiFZV7bVIRqagRUpOzGQo7DyqYzJEVLFcclMFERmCcB36nZ2dR3fXZnW7Tate6r40N8BXIZVcTaumaYgQEIE0aq95YAuFup61HXgnBlFNGMiwGLHhQB4zY3QKxqTMXPku9pOqOdo/ePeNN0UElMXMnI72J93tzXTUmPeD9SMiPq4X9jQm8/RfntT3J73PYFFwfHx85cqVw8PD89OFiLBDU10ul1Xt9vcO+yVoJ6B698HDmzevtlm23baejNkFKpxzFzu9dHny5luf/q//83/8H/6VH7p/566voWTvaSrSExlyFTV6qu3xNhlA9mF0B/Z4lxoraJby2B6KSEtBrXem77x7cnL7/s6lG+LrnZ2dbtuixpw1ZW9L8KOaWGMxdtG7BlSCn8XYh9rhBRMFh2xeBcQn/OjH3CGFixA8HRimlpEMh7moc0ZY25i7Dj0DctcXE5jtzmJOlZu1qdTjurfeA23as2uXX+o6NExZEoIoKrACAMOQAqqAAsRAJAYlFzPz3hE6NWVGR2iqORdFIHQiWtX+aIfE8HNv39q7eXQwvfTo4TLKxrvi/Ci1HSO64Pu42b00n+1PN330UJHrZvX1SXWjyObe7ZM/+wP/inP5X37887OD8sEPf8D6cSkFCAmDoV5kFhsikKGZooAhohZm1lxAsCOa9dv2hVeu/OLPfemf/f2PP//iQZQuF/EuggyzWO+Iu217+fBos15rETJipNlkp0vl5PhsMBVAhCT9tSvXqqq6f/dBzvnJuTus2yIFH/vtfB22/gRvtMfLyEwRmZljjGEawqQR0nFds1I9HRuwmcZui5ZBJFR1ShGMVCIAoSmYgeWizsw9lmy5GLNhG1wy3VbhyhBUiYiOvqaiGtDEgB6yQiw1c2q36JQILCmHOolWvsqtOnTMLiAwxBBqYkAoWbcCfagtjDjPKS95zG2KdLRfP881Ljd+BnPv25j3aRRNUnBdsp3gn8XRFsraaUBW1Ss1z5uws19JF0FgrHkE1VqFJ2Ndbj1QqUJVlAlXJG/mSJ3sjaa+clKik+zBCaKBGIP76un9ZjyNG33nK+vv/bYPbRfn52dnSDmCU4cGsF1FdK2rRw+X6f3Xj87uvRtVKdTeFUrWAPaSxHnHVat2VO/9me969ef+5T8+0V2zGqkHIjLlUEvJYpZKdJU30H6tJcxe/l++XwTHezfdyNiZovpyFJNuS2qjypYgprnnEcFJt92K0Wh07btH+ZXxa794V87JTZgNRsGnnN1FuIQN6e9StohM4Dw5QEnakWMwr1YQQRVVFYGRzCA750j08pWjvmujWAFsu2IWuo3UOzNXJ8mlmU6Cp2JFFNRKESNVQyT2zgiRDc3wwtdO4YLv5ZxD4LpyiGyEs51533YH4+CI37n1XofimE0xolEpzc6oO+u07V1T6bYrpXjvnXPDFhqgyaenT09K+ZONMfT7Q8VPfTw+Pr527Xrs+7bdbLvYNCO1fHZ2Utd1PTmi/KjvOmZ/+86jZ65dfnT2sOv7SR0YqiRtHZrYpWdf2PnEb/6L6m/Qv/cf/OsPH91hToDGMFaKSVbMu2YXqIg8Zv3jEDX32HReRIBQRJidmSEn0MYkAsobb/5BM5qBhdlsJia5uN35gZQzpHB8uh5bqCdU1xS8cxbHo3HRLARUhNEJKOLX8jHQ6MJZk9iG4LvhwCFkhsFwHUERsZTiXCDHFTQOXSowm+xl3ZgU8KgFSu7rehIhC1Rdr7t7s5Ozh10/unTkqBKJbhhjO3ZQDMFMBdEBXaiImfyT+xSzs5LL8N55X3J2yLULm5O1r5rR2KW3J7/z6XLtYBmc4Hg7rqd93IYQBm+4HT+5enBtyjt6MaKuRAUweZxTyHcfvfv9/9r3rNf6qx//VGnpI9/yTWfLNbEnlpKi48rsgvlYQI1EwQCAlQHUuyA6SmX5zHOHv/Erb/+tv/lTN2/sp3SexYeK+7jGMjMoPpS47nd3d7uu27YdIhdA1zQF3eL8ZNCdImKf+t357s7OzvGj0xjTwFQhGgK7EwHhBUp20YoMq1dVntw+B+Nre+ygx0xApqVMDo/8ZGw1VwLEPuaskiT3SIKkgJByh8jEWDtIQirDWWsCpgDeDECE2BwDA5uV7VLTnGunCxhycb+2mQAQ0QNLX1BdSjEweFe7irFoToCcYypVGWkRSIGE3XAxEJjP9uvRMVO9XsTJRPukaLUU2BquxFIx11sO2HfdFBXUFZW20KqUs67fIZy4MNEE2mczrcODNt492zY+pHa7X7sXfJ0X7arPWUTBzCFIyYb7VM90dK79O7kjgR3FfeIeiMRQ0Vjp/la62M7n4c7Z6tFZ+I4Pfce/9X3fCW7hRts+rzZpkaGgQZF4ujgHHF+e77B0bAhqYqrgxlVQ9Kl3edV82ze88Mkv/Pqq7OaC2a9NQ4nA5HrpU9a0liuzyY3DMXUm3m3LdvZMNX1mblTnyOt1tTqnfiHSddCXsW8qJ57L/t7IN5tOcpSO1S+Xqkf1N/zQNb+/LEsEKCKZETyxqdZ1HUuOJb3yoZcOb457XZlZlsTkTT3IkDINxKCqwQeRPHwSSsrtG2+/u439NrXOOe8wtxGAChdNoEnYtKg4wr7vCcfsR814Nmom5HyoaxdQWZUNyAQMiQgdIjMgkQuuGo2nvmqmuzt10+TYS04m6gdjPFA2UIJ6f9LmDaoMIPXQ1+BjKdNQRp/+eLoPeuzpoY83j66Xq9OT80uXLw/1IucMlvu+v3/v0TbCeL5TNTUw9SmfHB9f2jtomianHqGt3K5qAsLlef/Sywcf/6Vf+okf++WbN14QI0MSTIhcublJ/4cuEIPlABAZDRjFRa7I41hRLZJ6NqTZbLI6Wy/XuMXqvUd3JgRhuunK6WL9CDy8/d672674sANWG4yWp/12IdPRPpFjz67y3tOgixkkGkSO2TM5ulD/AwAMBr8IPKiuHDcIFbNnH4AwpgSok+lYcn18sjQlUd5ucTI6rMKEANVwXI/HzdQIp7uTg/0dRMygQGzAKSMimwmTqhYjFL2YajN7ZmbyAFBUmIiIxLSY+qp2rtJiD+89bDd2vti88sozJ48e3Llz8q3f+u0vXX8hL9P2zvrVGx949vCFF6+89IHr75/xpNv2GbSoKQJgRvS5tGoCZefh6YMf+gvff/XKsx//pd/50hf+YL47E8zFlKFSASmmAoPaXIewBVXRDFgVtW17vn90+KXPnv7Yj/7C8y8duNARsUOBlGtomlAh8Tq24/EEkdbbXtAlcn48CZPZ6Xrdtt3w1sccqypcvnx5uVhvt+2TA35Yn0w8nPFPPuBrbBl7AiQOjqeI9OTu1cb+6Prl8cHOkI+XY+q6bb9Z5rhgLGTGxgMDOVSAGNkBMRIBoSFiAbxA51Bj7pNmBZJcyMw0HR4eqoGIXCSvqOFFvhIUUzHiMOn6LMix4LbL674D1LrxKVrlqr7N7SZpdoTOh4v177zPCu1GJKEodpaKa6xyt7v0uc3qbVfu9Hi66Nk09m3OCXOsi1bsJPDt0m6M2eiqC5e79JzSsxz2iJntvM+3LK+ZYp8LEfpAAoIcPGxlHWl9NPb76FMvj7LcknJicaNZVb2Qi6m7fxZA2m948fpn77xtdzbk03xyEF0RaTfZwElK6NWTD+8+2H77hz5y/Y17xzklYfZ1p8BsGruTNfz57/2m9fqrn3v7kVWHEdri3dyyVpNNuxbK1+YHH7nyUtXEX/r8b6x1VGxCVLq1IECNxTJOqwoIUw3ShWnQviSzyiped72lpq6kydbZxrnAq4LT8MIPvv+dX7vVvxuZPKNDQBJD0VEV+r5f9vHD3/nK7Tu/VxF7CUnF+zAZ1X3pbMDgkIhYrQxNXjZ+dLoupUYwxqAFCIQ09OsWg5i4SsPOfAwcA/nUFSRvykzOAJh91UwQuTdRyqkUZLJigykSGKhql7r5aIzIm3ajqQcrqkpZhBBRHaghRSl+5vqZS12sQkOPQ/WGXfGkPf+6i+2Tbw3e3E8gkqGLXy7OmtHlS5ev3r17V0oSAWafYndyfHzzmSvNbG+zOJuMqs12Kwh7R4dnJ7dSbLJtyfMwk1yt5JUPHP7iz39ci/z7/4c/f+/e64bCgEZah9pMnzp7AJ7IlRSYSFUJ8SI5TcUxIzX1aHJ6fv67v/2lsxVcf/7Z2cyV8/N7d2Ec6jurU0S+evXlB8cnXRuT9P2ye+b6Dcl6693j2Xw6Glex73vLgQMBAqEqILGCETvEQuiHOSsa0RCUdAHvKpMiMjlWISVHSPuzKWH78ORUxXnXjJv6+Ph4PGmCqwRFcwEo3rlQ+b7vTHOX0BmmolKQSCsCg4Rkhl87UDUXAB0chlVViXSoYoiS84P7x7lNOfXNGDtlKmff+g3Xj4/73/rE5//U9397XMuO26tp1vcLJFQmYOdqKZYcaEneOTOLRAjas2fi8ODOo3/nL//gj/zwP/yZn/rNqP1HvvUbHjzc+pCceUJEHdp3JgAFNhNDyGWbsn/m+Wd/+zfe+Bv/9c9evVkVOWsyRAGhmI3YqhFH6Lb7o/mkqh8+OCYfnLEoTarpyclic74oJasqEDpH165f2Ww2Z2fnYPwEZlG1i0hIMLCv8b7+5wCZizvoAOMMtxZf0ezy7ka7GCO0xkjorK5IrTgskov3FTAhEmkKTKpycR0nMxu80sxM0IjZPBloUsC69iml80dn3ldIGIJzjg2Gcw8NtGL0jiAZF3biVKxqmMghohYYVaAxO2i2m4SBCgM6A4CUEgcGtGZU13X2VRpNdTQm2MoOoSdQh0Kume5NvPYmIwdTZ5J1WvG+qx7EvOz1bFqnhg6FdtEmo9Hdh6spzU9d2+Q8rcMGYAm5RvAAylxklIOeaF6rkpb94EfeuSJIxmqE5Ikc8yjK4tFq++ufPndNxa5gamejZj6e7Abu2+0W4sgzwzibfvnuvaN9fvn5o+2tuFny2HlF9GR16V545pBh9YnPvEHzw9VmhfWIsq4FULr3Xz0c+fGmj5+6+9n3Tk6g2g1awC0ww0E1Z1TRTgLE0ho4l4UDpqyB3MG0Xq0368Q9OmNTqxqJ2bY2m7sq4PbRS9//zJ2fPj07OQMjBqp8HVNCMu/ovXfuf+A795//0ME7n12MQ0DTnBPWRoQGkKIghJJ1mHOZkaCqRVUQyJ6wcj6mlnjmjJt5091pp1zt7+5kuRusRvB1ZUmzorRdykpJEKBWC4aDtekFxyuwq52n4HE6HflqWo98kVVOCUEB0DstRVEZLLNHLUA4uroXXzuF8Dhd7DFH+GsQxFNK7qf/HDYMMw/8s8e27+nk+PzK1Uv7B/P79+8GN2Zzoqnfbu7ePb167WhnJuvzB+Zo3fV0ujg8PLz74BzZi1FOhZ1YsfPz+MyzO7/8iz9fB/eX//c/dOf251U8wxhULkxAnkL/zQwBpRTiJ4FqZXCdrarKePubn3j7t3/3taPrlzHYyNt6edqMxqfHZ7jrp7P9kvHdW3dnu6OskYhfffXKpcsz0Vw6XJ2tSMejHa9cIA3MOV8kO0QVG4zDhhEq6CCIIQRUNDAC80xBNauggoUQREuRPB2h7uws1in2Zbk68ZVTocUi7x5MSVtSAaNusx43s9in1PtCtG3NuUC9NNO6xIzsizp2RUQI0DlCZEBVUWYqQEzgiHJKZHi4t+8PHTMDa3G8XW23225+kD73qdc+9vOfW3anr7xy0FHOKJWrcwErxsMioZFzJqkiSIxZzAFaSZ4wnpwu/tK/9+d/5K/997/8sd8f19MX3v/M8WKlVBDRAAwQDIhI0YpYRdp18eqNg9/9zUf/zf/7Jy9dxZyXIqLFKymYY7OaLW7bcVM3k+bs7JwqH7O6UB1duvrw0cmmbVPJJuKrsGk3r7z0UjOqXrv1uuMq5zIs2qGxEBFR4QtcGy5MbAAGTtHQOujjAGERKaUMQ6a23xxe2V+Xrosb11SjQtkDkwGUgLVj49qLxlB755z0VLs6saQNlKJkSo+doxBVBQMJmxI6AYhq7IMjB0BmimjskJGYSMGhUcaI5M6368tHtYDEmCbjWqXkjiVBPakgqphDJ+pSBjJtCF0IDVFQMJFMLElzsHHlYNsTVQjO9UogsZMminRJSVFMC8Gj2FobUSg4QE3bVU4CCwi7vTh0k5BrrXLOxraDPEkQnTu1qMlmvjfhxbqDKtyoq4nA/V7fsvwajrc5tpaMHRXekAGylzGVkI2lmo63aI/6ftPSN7908yoUi5K8Ju26dnX7vC9SPzg+sekoSQyEfaq+7dVvunGEP/lbn5Ddg01bnJtJTCOWZw52Xrq01/XpnQe33z2+e9abr+dTT/s7s0t+Oh/vNE3jyEFGFI/FsQZB1JQZ4qgxtJxNM5mqZirC1rpKwrR00RET17FNV35wb/xsve2zEBTTqqqAOoC6yfbm5/rD63vAhYpjAiLY9uh95bgupZDL5AaXUadaoAhLQHCOHAmKEbjKiqRKQgjebNt1D09v2zp3bek7u/Pg/rrrK6LSdhRGXPlm5DlMCB2ZdxY3Ih2ooyppRZPL49HNvZ33f/u3/fFrVy/vzXbIACkbJNUcOKALbOqYRbSqfLlWpW3kpjYHlpOBAGFgx8zDNfZp5P1J/06Po/iePGc4DNrt6vT0dD472tu9JgplaNGsbxdn7aJ34wM33+UaPWq77M9X7cHRnuY+AIyIvBUfoC8YY/fS8zs/+5P/9G/8d//w8jMfEvZFIiIzhwEcISDPbhg8CLMxKmTkAe5FZJ3Nd269u/qbf+Pjv/X7r48Pr696UIHt6cN8vvAp1RWwU+LYx4cvPjM7GOGHnr36J779w9f2x5hSWnUOaT6fPjg5vfPeKi5nSiFZUlUHCEW8cyLJpAYgZv84mkNtMIRCBEtEoMOrZWiqKODAj6qd+XgWU2e1F25qt1dj2Gl0jCYK0TdasKkmWk1pfqU6vHneTcLkMrrd87UWqwQqU8eomonRMXhVMouMCBJQvQcCRURy7InINw4q7C22nRwfny/WK5VcVqu6prvHD+6/t37v7VUINRqzoqMM2oElR95Qigq6bA4FWICLOkFwwfd9artb//Zf/rPmm3/0Tz5xct5OZyMTwgKivaAYOcBCmtnctuMrzzzzW79797/4L//W4TVGblW8qTdARGM0730v2QJOd6bbdZtFk2Cox1RV664/Pz+PXWcpG5WU+qODg6pqbr1zVwVzzgZipkRIhDknAKCLOo4XcxkEIlQVMx36DyDMkgeDHAJrXBVjDOORP6zR0rQZVQrKGqh4LqPGxhMZj21c6eHEH1Zul+yg8TsNZsGSRFQLgaoScyHIBuqKoTd2itFrTZYStOPZ7sCWKcKAXIQAvcA2qREaooWqycYmVHtfSunViU+iXntiX+LybHfnKCBXBUasaEU5jP0UQNSow4jsWrYoaUhoS1FO2s0qxUW7XpYi2jUEmz6XTqrOWuk5xP1R/SGgbwM4Il4AvLHe3mrj3V6g00adlThBm1W2x3aAbm7dlOqmtN9yUH/oyuytnH+kbf9fcfuTVj8oS/R2paquAzow39RuWnn20OeegXKRAlbAL/vs/XQ62u03OaUiCuLC8aPF1fdfAn0zdZsxj6z4SeNVy6e/8tqoOVpvVB0V1BvTq88fTu8tH73+6NgHB6AKVgMdTnfrykvK5z5rX5JIVONmrCkSQ583IXiPbnCP1VxURNAJEgIgoRkYsKHEVIpCUdWFPfc9V3N3p7wX61m16TZOKvViYPfu3L35/he9Z00iIuxCKSVnqiofQkipOGYiLlmd96KdaI6lr0Ng41wKMqsAJOMKEogTnLkZhK0GMLxTQ95pqu35gjSTZYXKs5+M0nmp6tr3JwKF0AFoqh1Ct4glHm+Wv/vp4+X63nq97pPkbF7RBZ9S8eSKXRj7GsJsNlsdn1HqA/repCrADNnk/5+cDx/H1jyZSj0Bc9SKKW5Wa0S8dOmSSFmvFkRYSgFK9+6/J3a4f3C4LiihF0snx+1VDNcODh+cHONo3EaryDUsJrrays2Xdz/+ix9fL9N//Ff/4v1772mJNdVEXhWQC6KBkBmYRceVWlJNhAwATT39h//wZz/xq1/YvXp1Z3dX0sPnn7n86ivXr12Zh5o23eZ0eQgAo7qaTJ87PDx8681bn/3cVwBmsSPRbrLTFCtZu2eeP0ypnJy8N8o3ZrMpEq02D8fj5oLMzo8v+GCAg4iXzExNkLlocQ6J2AxUAJlF5N7Jyfq0O5jsh7FbwLZdbl0TIEgXU1EEcuYmzfSwL1WtZXO62Wm6OmiyuHtp0nePRo1PqSAA+2Aq4ARNzKpciEPJWtQMCQYvBAMDBVUlT0yyH2qAuqjMd8cf/OAHPv+5177Yv/HW63c/+P7nL11xq8Um4BQYzQhBK8UBbRjA6iFUiok0z7Ssum2cTau/9Jf+jb/9d/7+X/tv/s5//J/8Rx7PM/cOGjaIaZlx7FxV2u0zz+79wse++rf/zv/w7DPTEhNblWHlwwiyAaCIqvTO8d7u0eJ8EWNUQET03gu6k4fHm802OO/ruu03o1F95cq1e/fubbdbRGCmUspgE/919C0AGBr5r43cH4MwzBycF82iufEjQYoQn725B5odg/dIBOwGK190zjkTxIsrQDbKYmpEyF23UWUmApNcSskZFAnNQQUAORUKjgOjYc7x7Py+qxxSHISAjgBAEZGRBDD0mZJklZZ0BOAVQMyJJ4IkeTJpxiOwFJswyqUlrwUhJkkZwTwz+8o5r5drt53b5jyBwSHCxDPWza6SC+hq9ugpUQdahbpfr2oME69bzytpFGycNRJoKRFEXJhPRpgbTWg9sspu38rhvO/okvN3J9XHFut3e73i6n975J9FnaYmhJBzpkCuorryUHsViRWqC1VRlZIbB6bw+t1bIrFgcYxGhCj3tov2jaWbXcoAyZs3KmC3Trbu8KXTh6sFrEM1ffXKpe783U+9+14Rt+PnXJGhNt575LZt7y/PIiiwr4kVMwRYxZRiqtE3zdihVMhmJCICSuwJ0Cmpqg2bVtEU+5QNiZwn9r2tX/quG7ft9vLWkt3EKuQCBm57tkR1k53x9kF03qkZAw1250SOCQk9GKiVlMrh0W7f5bbrzCznDBc3O2tXUs0nmWTkrOZRgYVr6qaemb178vC1cVWjVaXvNiV3UBXPVDfaZcZRTBb2Z7GUej5/5uVnz+6fz+o978rGdHF6Mizx4hiliKkjRCMCNUBF9ZWbXJ9u3zvdGe0nV4ZxGNc16Nc8I59U+ac3zP8UqCGiIppSXK1WdV3fuHHj3XdS27bMjtlS3p6eUu3H0+nV7eZEbDFWOjte7MzHV69dvnt6WoWRSlHQ1Itwt13BtZv4iV//5e1faf6Tv/pv9e2trl0FPymSPDrCupSuqilFFEMfRooKwIaxL4s/+r2vfsNHXjrYPVq2q5s3rzdVHbetd9hteyB69vJIhJmrmPPDe3dTaseT5vWvvrG3sxvTpppURYXZAUAzrg/5KGcAcn0vu7tXum5bMjWjRoo5p0gDuWSA+oe2mbRIUQlc9TEhMpBL2U5Pl1n9aD4djeuT1XLVbibTWd9rWY53poc+KFdTh3mxjVUTtuf3D3eqnJJZW1fecXGGWrL3oWg27JFGJSfvKylkULIWopokmhlR0CJDApEWI+CCEZMN6k0ESNvlh9737LXrB5/51Osf+9lP/Lt/+X/hw4OYusB1zplYOkAkFBF7LHQwNQBUOw9VkuIX5/2163v/xg/8iX/6z37hl3/xF/7MD3y3rSvRPpH6MEl9m1K+/vylf/YPPvETP/5Tz718GWxRuSonI2AAUZKKa+mSd3Z4uHd8vFIjMVCmph4Vg7OTs826Y3A55yIlhPD88y8eHx9vt1vvfYxfc2x/orZ7ej705LtPkHcivJi3oxvwdw7+fLvauTKudsyyec+hghA8oIqICuSBU8kwOOcIaAYoSigARqagqIjmiB2hI2ItZiV4cDTwURXUHI9CqEU3ZuYDE4GRDdEiRMQUhlBFJwJ9tEmlrNaKNIECrmKZEFBDgrpuy2yvoVADIgdHnoC1aCIMjrGYcAURrC1cVU6KrrPGrAu14DHHbpX6WHMQOKhnMfdfWa8tO8+OMFaIHDHQGFDP23YWoClwvG2f+4Em3dzGtw5Xn9xOQ/qHJ/HX1vhixf/G2F4MXLWSa81CJpDJAIRMFE2YLZe+mPYpFVUizH3uCh1vsrhJLmZFETgWiiZ3T/uUfePC8IaVYn30aGmyU7307MtTdg8ePDjZYMaxOd8TQAix5JPF+el61atkYmBPVkXTVdz0sRUzqeqIDAWCMQomtR4gDR6tYCwCMFjOoQEYccyG5Ih9cq21VR7D1e++7GbokFWVFZgcAK4X2/F0lECQaKiMRNT3fd8nsyFqQwwEoFy6dGiPueREREax70FVzflZNnRZXK/Fs1mPMc1me0eKsOkjuQod1yPfWyzWb9cbK52H1tvGcSGsq3BV8+Xp9FJVz/Z2Dw4OdufTOgQAtsGb1LMzu1C3MoJDLJrH+7Wf1eebxahuRORxy/aHQJinC/rXERKe3lHs0MxKSiePjtvN9vq1m009FoWigqBayt2797fbON+/hGHknAPnF9u0WbfXD/aobFGKcSCHzoPk7ercnnl++tnPfew/+g9+pOLr051JG/vgpwbcp9Z7r+KJPKGUUkoxNUaoc6G9vYNnn3/2YIdffOZSyt3J8rwzW/WlADlu+h5ilC6mruuZ/dHB/pWre9euzVyIL7x4HdRSJwxVKXZ6fFb5IOVR6k+8z21/SiyjcQVgLshgg3XRGw4ZBWiICGTeBaJQNfM2yTbrOqMbX3LBty0/eiSLU5A0Xp3D2UlXjWeL/piCbLsztbizs7PtNpNpANkMY06VXEpBDoZOlJkaEGJkMC4ZUkreVcvz/itffkuMTRkpFMGuL4B+PNkB9MGPXN2IUahGjoP33Mf1aAR/6s98dDp1P/Lf/SORqqq4qBCbGQ820QYAhENyIRmgmqvASA0coT8+uf/SK1f/nX/3B773T3xwvd0gbQZ6X9dvx83ohRee+//88C/+w7/38Vc/eJ1xxRgA1HlgbgCVmWPqnOPDw8PNaltKSVnQ1aGaivHifLPd9GamVoiQGF588eXT09PVamWiKaUBN38y23/6Yjn0GE8//mQBD3yqUjIKBqpW21UztZsvHDHbeNxUtReRvu/7LsUkQITktsXaQl2BrkAcuECgqsVsoLgPmL7lnFULMw/SQjQjcgBgCmisaqUkNWP+mkgQbcghACvStx0zO4WQFBRT49SMnDmHBVxSySBZBRGLdGCGSMGPQXzqc0xdLpsVJuMsYosIGymmOfVxON2r/x9dfxpsWZadh2Fr2Hufc+7wpnwv55qru7q7Co2BIAEOIDgApkhCHE1Jlk2FHA5btMQwTQVthS3RtoK2qAg7bAUpS5ZsUaZIkZREWRYBcDDBQSAmkcTU3Wg0uuaqzMrhjXc65+y91+Af51V2doO6PzLyZma8fHlz73XW+tY37LTPunMCCDKOS4Bx7NuRI4SEcV45ZatsKy7nZRyQN9m3pp3i6Ue7u7//X0i//od0bP/8ufz8rP09i/Q/b+izlHS3q2HEnUsso21Fdycv7IcmuEId3S1E8VDFiTwxZnR2BSPajkcHt0oZr4ahEEWIFFmrYAWxMKrMZw0EjuG+52F1drnLugMKNANVst068HZz5bWkyKOJONjkKpFFRDjOh1KVkoARqLOLkYsVBCFgpOtQCDCn6M7ARhMbr4oSuhlDsBhH1dTRq7/11V/60XeXerjTbeAGHLarkRkBTEQQSNxMPaVWVd1BrWbp9xazm7duPXny5OrqikPr7kzsjpPz1dCv95ZzbGIZRmA1pUghtV5ts1ze3FyOVbX0OcxvvPnZly42Dzjx6vyxEg1u5+urFJbanz796GkMh1e2Ol9Lrhd1yvdQJSenCR9Hd+dr61gnQtW6uH/0ePuwjuNsudgNW6wyta7Pd+7/Xc0RPFtvXqvjAyKK1IcPH77wwgt3795974MPVDQGFi2E/ujJAw/3bxy/fHXxgerA4OvzdQS/eTA/XW1zNm9C63HeLarlcaSX7hxcXvzcv/I//bf+1J/+43df5E8efRzDIobg6FaR2IjA0dRqlUpEGGK/dWJ0ZilmHkNMiRFcJev55SW1NxFK02VgcQ+lWJeaEMI7bz863N9jNycDy4zcdQjQz9NeLeaNEDiCqIymTIQh8qf/cHr2/DOzxDMxRWYRmO/dutyOhqAEXdcVGbFaN49NhNSl0022Wb+3H1NTt1c7y80ItGg7r9ushoSRY6nZjRAAMJoQooPNBTJhAad5F/ttDx6/8zve+vDx091m23VdHsecc9M0TYjn5+cN03x/7+rqan++2G3W4vLGG6+N/Wa9yv/8v/A7/8//9v/97bff/8LnX62yiUyIMVUkQjMyUAIwJzFzwtorh+BQQ7Jaebety8UNMAk4Zu0ogIybvb3YdPt/4l/99z985+PXPr9cby6YI0IAVGRzVKJYhhwC7O8v1pthu8ujVuA2tXsOdHZ2PvaDu6tVRFCrn/nsa2M/bFbrmquoNClNnnrE4J+yc7+F1vWMFADPBYqpqiM4aODoGIF3r3/2XtMalGYnA2FAY+bohKq1VjMTC+Hab/M5B2yYtN+AAG4ISCimCm4OjMlUi2kIYXJidVSHkQgJEcBVHY2IEZXAvJomDgyAhkRk7pFoFHcre81McikXOeFMcoloPo4hzAGAQBfdjDDEGALXmIomP9iPy6ijB0p2U7gNtGzxFrWLrO9pjdS1FGg/XeZRm3SOYQ+tg3E+azRjh7hz28aAgQF0FFssGvnp8aM/8Pc0zv9mz78S0z87w88LDWIYx/3ISvPtrKjEGQVWe/LOFTE7Bc7VkRs1wBgQ0cWboOI1Q91K3ZS6HkcMHCNm160Iplm1WM1G0gFx0Mbxdnv08sXVd5/Mf9fdNmH9GKJTWC6cF9h0mKzAKJ4BK8JgPiCqsSkJkoAmINCwE9oBDkiGzBAmN6ZKoCEAAKABwOSsi4gM6KBRYmEwhFoo3OoO35iPsouhRUQw19EDEwDxp2YsRCwiogLgRJhCYuZxHD/55NT9m9aVTdO4IysWbe68zp//4rKO25Dm3SKCrr3OIySk9Wb3ds4Pnz762tknj+dwY0kHs+ZgXYX3Dm6/+tLB8eG9F+8glY8ef3Xdn56dPV1fbkCYITFFByW4VvFNSCKBu05EOsYZ33r51rZukCGEZFXcbWp2vql8f/p6vi2CZ5mrTuDXSn1VrbU+fPiJI77yyishJFVHdLVxzLuzJ0+3m+HkhTcWBwfsQoir1TBUu33zeBYwYDBxxhBSA1ZlHI72lkCP/vj/4t/8xz/z8WuvfoE4T2wHoEFEqk7pRxwCxciqOTWOuAXnCGHOqZXiY48qq03/7gePYkqxSxzCbLbIowVsAnNAu3F0pLWIDsNwGRNkHR0wVxjrNs14LH1VE0UDadpALKZwrV6iqbgzABCQSyRAYs2aU1owdG1MiAOhHd9KL7zWvPr5w5svpuO7tndIAFRr2G5qG+bz2ATPEXNMikmBuJrGGJEcmMQNg7uLQ2V2IjIXJEEuedjs+vVuvTo63B+H3axJd26etIkur05v3jq6efP2g0ePb965fXFxkVKLHK4228Ax9+Xq4vxf+WP/w1dfuVdq33ZkGlV18hU18MmLrZq6O6gx7mmdAdBYBzEABghFLRfBEEK/q0e3jnM++GN/5M98+NH7t1/2XC8ATKqP46hexjqISN5ZCvHwcH/I9WK724k5pdjNAMP52VXui4jkMhChaHnhxRfN7MmTJ1NQe+Bgdl1kTb/pBE4tx3QSn0fhnyGH7l5rQUQMPMjmxVduzw7ibuhrcSMWRAEqBlm9GBTD6kAubsVNzMwURH3a2D97Zrg7MiETEonblEfCTE7fsNiLYc6ICNdXCa+/HwQAKNd2mVLUQtigDFK6URf7syrm4mYIHppZF1skcrIWHBz7lMxcVN2dkUMiT8vAQRShIh94PHHabTfB8o2AVEavNYqT42WVM+PLsr5hdgvjo83mA9FieI/jHckzYjBFp34UuhFh++hvP/naz+vu9xyGlySt6y6yyY6vIFXdjZqTDK69ym7WENWqk3PcjGI0dRkRPVD04k1oxqyjpXUJFZdqkS1Gt/0mWoaWFh3HJroaBVguaHZ0+L2///v+R/+3P/ov/5k/8Zf/+B/8U8fhnOck1YDIQhAghMCCJAZjrU5CIbtxDDAxYhxHb7YuA3gGyAq5elVQQ0P6dMS+DpRhZkRnJBGJsUFVVO9Fbr9xkmMJ3Bo6AujgLgoAAQI5OE4RLUIEzJxLNoXdLj9+dDlvlk2a1arT+ogIRESrBFpuH2//B//j70o3ynYzljKuSwVPATlxc3h4Z2//5RRuHB7PH57/7M9/6Uc/eudLmlfJOEoT8pzGLtji5q0XUqtVt1p7rxWruqqCV3QFdzR1QcRruRyhEyJFrdIddu3J7GJ1kUJyd7H6/CX5Fpz9G5Pwcy93n/RQE4tGREopjx49app07949Ncu5EgUC3O6uPvrgbRU8vnG/mR16aIR52+fNZvfC3ZsRSimlikVYECUPtS+rJtHhrc3/4U/+X//S//vH7997LUSoNRMRYmCKCIGoNY3jgG3YJ48MTBC1isnoVgEdGQ9uzL7r133+S7/4kwFl7Me8ExPPOQ/DVmTc9eOuz6JEcbHaaj+EPs8uN2HE2XufXA02X+0WY96rNZkl8GTm0wcwfZhmhk5M0cEWi70Q4tHekZRdF+RwTm+8dPOFu/sH+8fQHF0JadorehDiSdfulQxFsJojessOdRfM0QkdaHJlCsFdiWDySOekVYB4zmGx2oztbHbj1sF2u/3i5+7X3ZMXby+O9mmv01uH6TMvHe+12nX6a7/j9WHz4O4Li+Nb7UsvHM0aA8rzZQjc9Zsc2MCrVlE3JCrBK7qQOU1dql0PJXjpoQd2ZjZAAKpVMURzL8PlK6+dfOnLF//S/+zfIaLbt2frfmPSAQBSQZIsRcTKMMYAxwcH/WbcbAcBBObZ3hFhe3l+sV1vSh3FJDZcJN+7fz+l9uGDJyLyrECrqohODKVfPUfCc9LT57HEZ6134KZITTPbu9ldbPuioYIhRHACRidDNAqETCE10w15NhkQICOGwOzGhIERzU205mI24foKAByc2AkDcRKRcVAwcEUEYA6EYXoyISI2EYt6dW9SWMwbCqy2f7x0gKcXV71USmG93TlUD7LTSmyAkBqMqQVoxtEEnFKFYLumpg5qHs/HUkSSiFU5HepGdB9oZhbBZNePome7WrU7z0KYX23ltQMbfLOqtYUljSNVGrHfgVluf8EOfiTT72ra13dSpWf2FebYzboaeuakZtAAtdTOR5EQY1NrTYx7yyaQPN1uEAIAWdSqQwyI4FptcbDcbM/Guks8G+uOm4Ni4EYjIIKhjJhvfQ//xt/z/U3b2MPH68vteyChSh7BV3nwyNAEFyXzJsTBlbgO6gQxVm8wFZtyLQqiGqACGvinLN1AzgpT+UMwJMfAwUXNjVKsNXcO6LixOls23ZzWq13DSAQ1y1h2EcP1ph4NkfjasFABkCmZWow0jiWmDlGJwFyIAoqH0JxfrP97v/HVUj76x7/04A9//z7Vq7a5Q92s2JOOZu4436fG9oTg6HY7rrcPV+exrSN8uLeMp+vH6/PhnVrSMd1a3hgzLI8Oh3y5vVzDSGRc+4qEIaTdmCMxEpp6SE1VMwjBqmI9euHkk4sHUKRbLLZ5C6V82pPi86Pu8yjN8/fKzEspE/t4QjlFas704MGDe/fuvPTSSx998HEtRjRlLJe3f/EXbt25P9+7q/ykDBcusNv0ZHrnxo0H50+zjVQJHQwbD+4gAekLX2z+0l/5f33961//E/+rf9Hp6uzp6Xw+V4UQSEyQa9c0qqNW7bp2q33qWNlBA3iyYuC5IX/j5VcvH58dHu5fnT8+Obmx2m6Wy/2r9XrXu1oZSx5GCWk04oPDGwpivAfK6xHGXYZ9Xh6wanarhI2762QHSE5TYLdT01FMreSgbosZH8yhiWEchgJtE0mz7PGhyi5yzaXf5QRK88WyqBgOMSCogQ4Mhh7BxcTdJKaYRdEmcxVCTOa4Wve5J606X8bDw/To4cVrr3zhyZMn3awVERNtmjlEeP+DT27dOHrjpc+dri8Xs+Xl04t5N3v49JNbt49ESsDAgFYbI0/t6DprRnNQIHI0MxM3AAKHMG1ZnYhTQnMHUO6HvNfN946P/9x//KP/1X/+d1957a7zpi+7ZVpIyWSNg8RkJlhr2d/v9mZNv7kso6swkrXzuRqvLtebzdpVKJA7DHn30osv7S33Hjx4JNnM7ZkL6aRBrbU6OBM/X9yfofDP03OnA/ypygkRYtXt65+5PfpQjYEDsYKagyQmh6rg6NEUIETEZG5TzBeRMElgA4A06/qdD1UBLYSGfZJ2gykZEiKKleqSSDFqiMBIoCaibohh8iFQd4eqnriA+Si2qt0i9bUo++58u7e/P0ofQPfSDIYRojA3DgkAMCg3hgwKjhQhVIpFW7h7Z9lfZUloDvuOzcHBWQnva39jeTAHqpozujThYLYsPqyIvqq254g9a+yu0KzTvSFujVqLvExDsb91efEdxzff4Gq9IJDDLOZcm500EQbrmq6gg/lYcogxiO/mMVGIH6+2+/Mmtc1qlJZCsBRZVasDcYrjaEwt4No8R4pSMaYSaDaHIlJDK6ybU/vaf/bTr7z/9un56V994eVf3AhraiBCHEdyEwYHZIMJyacRZjG6q4IXMEcgEIPqivOA5L5TLyGWOjaEamJOjTJHHFpZX5a3Xrm9XX/86IwDx5mUYtxyXYy0Xmg4Ss2VICY3BrZxpOoeCLhxzQRkgVOphYkNzEGI0d0Uygw7cVfXABwC1eh5t70Xb3z2sPzMz3340lt3Auhi7v3mqVqfTEteGbYFWyX3jG2zby3umQVcN5F5ub930KY9u3fnBcDNk0frw5vzNlHdWSJw37pAhOSApVjLcWrY1cFckZGs94RZvZ21N145unrvdK89QmqwFDfAJrhbIDBz+bSqf0tlf9a8P1NCfcpSIBPZrNaPAO/ff+HuXXvw4AEiqBko97o6O8WufSHFBYHlfpsI+60bnt+9eXJ2erEd16Hp3BnUq9XKre761149+OVf+od/9I988K/+a3/kzbdef/LJqVMFwonTACrkmLq2Sl6mUEXNGJDdhRkChFrt6EgRZk8fnxX1tz98tOtHxlUbZ4psXJvO9g+7+bxrm4Z8DJGEx8isbmUGy7lbLUDEs5YKT6p3Qlez1ASxSl3isBTAmMKMuJQxxVYRILZRgDQsmpgt501pOMy42Wacd22wVMQB28o9pGB9chwhkFljKO6OogFZtBIF9oqYx7G45RdfvvfhBw9Pz4YQwrYvT1cf9uMucgiEMaRNf7l3sF9D8/aj0yuBhw8fdeliGIbD/YN333vUhHj7zsl2uyaKEMzEpEfkIUR3Y3VyQ+LAAEWqKSoJIiM0np0i5mrF4cb9Tsbl//5P/qdf+fkvv/76jMJTkxnTUjEbRU+qplYEHW7s78UYV9thHLyaQgjt/NAknp9fbtcrMPXgoqWK3rvz0nJx+PiTx7vNJsZoAu7OTKpiZg6OMIUvfutkiUhTwqxV6bpuHIcK0rZtzQURG+o25fLG/T2etYMMITCYR01ArhAMwT0ROiJPXLkWXcwnvjwiMELDyIzAKFQ3Ow4YHUi0qA05dEtiABGFSLMYHWVMAUOaKfTG3sVArKjqDA4qDkwBI9VSaw8lmveyt1yuLjbcNrH1YWXFvFDNtAwJ6rim4IBAW8WxdZuznS3aqLl6mqc80NyYsbf9HV01xsNuiBBnxCTSuK8l564bR7rwYR71uEJX9anmk252N8BZHtyXO+hn1Amk/Vr/4UY/YPq9VudedhgIzWTsQtAMHgECqRmoZgRndIfQYTCFbEqpHUaLKQVcq19JaUJExIDAYykMymwxdNVpzCHGVG3lGmJsFosGyAI9/eTBzxwdfLwdv9LDzz1ZBYVD9xGqVaohcAQU1cGBDKKHsdkVUzNCMAIFV/LU0mIow+C6bANJRa0Nc+82IClX80ZzgU4oeMnKlJjEpnC3mNBh1IFx1nCzg2263oBDHvt47TdCBpooppTMrGhpY2tmDopITG3FLKoRGm+k1qGNy7My/MDv/MIvfviTB7MX5fJoV4atUqCzxoME5oZcatceVAgayuDrGpiAGdBEnzw6a5YvR2bQcLkaq+w223Fj9eL0EbhIHbRWpmDOiI54LfT8tB47Q3TwyGHsx/nx4W41XJ5eHC9v7kzN3MAJCZlEh4ndY6bwq174Kf/9W7ZbU63fbDbvv//+66+/LiIfPfhwuZhPHdZmt3n73Xfu3bnbtvsqmMuO2cqITx89vX37JFxtzteFAwJjjGADWKQ1jrfuh2H75H/3r/3pP/iH/vv/7P/k1+7Wp6vLbdd0yKZuzKyWCUPJFlJUFWZDRK1Wzff39j580H/8wdcRYpZ8ePPgM595YTZbNNTwjETEXVOIiIzm4zg2KYkaMiOGhdPQ94W8i61fbjHNUkqqiiF0baduy8VyNw5tgnmTAMBU42xGRIAYiakLJataRcSUkrtTiE6awUrJwBhjZOahjmQWIlUpROSu0yZHq4C5kyYOZrKcddvV+t2v/8puzC+/9OrF5RntxhSa7LVt+GBvr5QCEC9PHx3uze/cv/3hhx/OI/W7q7fefPPhw4cH+91qdXnjaJkiq1ZTj8wGnGsmYkRAAkLKdRQpMTIEjb4PXk0zM293Y5rJS7dv/8xPPv13/92/GJuL19+a9dttwsZUpVak6u4xdpZJpBwfzmL0zW5Xsleranywf1zVL6/Otrs1IKgzuBYpL7/46mw2e/zJo2EYJs/RaXv5jZN2/fabjtnzE6SqxiaNtWAITUhVxMFTTH3dzZbdjdvHu3ETulREIoXqQk5AiECAOtmMTid3cEACIkc2JgafFp8hbxFhmOFMsgjUCB0P3uCutotARuzMFRCJDsvQEpm7i4gZqWps2NRCCCqOUK1YC7TAltRVeNUX53hrzmXMw9Zv7C37MnLwib7nVsEhpbS3PwdQICwCZgviYh3iwgrXi+35ectHWoL5qg4Z9P7ewocChFeXuwqswY+JNddzgDXhgco8NNuqWXYth1qyBqmCP+H5Xtt0qldeEiQ2IKLihjF6FUcvAd0NAQHRzMJJOx+sXJRcK5gTuWHsFBwhCLqDohlTMQyGbg5qNSV0KETEaERSiweeSTxjX28vz24ePAzhgnD50i10scCt5fR0yA9LJebPHRweBNiVVW+HYrWIDOqjhCpUTADXoVuYjSWQBIasESkRKuAIbmaAXOsQmu5svZ4t4kjaWqtoqhYMMHJy0p0wsplH4hBCHmtENi2MkSA0TaOqVWvAIFoQcYrycsdRPEYig7HURZxt+/Xnv/fg4/5XHl2VN47jz/3U1//Qb/kdI9X92BTogduuu0G1knLiDruuipnxfHbQBI8xzps5Mg11e371YWSfakfN0szbQLLbVHcnYK1Kn3pLkYMTkV/T+QmpjCU0UdlPXrv/YPtu7dfNfLHd7rzWlJK6ERE4obn/Kivgb3k7KZu+IW4yCyGM4/jee++99MrLTde8//77TROnOItay6PHTw8PT27fvHe5+iSXNQoFDo8/eXx8fNykxflqm+vIGEIogGglbCrO5/NX3rC/+sP/wT/68j/6X/7Lf/i1z9/66ON3Ay0RUVQDs7kQRRFBcnARsxTbOqobr4fVqP23f/GLR0cHQ+5rrbv1qsRWrqiZNYvFbLcdZk0g4sXsYLtdtyFcrK/E4cbB4TxGAXf3NGsNqrnHtgGHXCWGpmSYpX0y79qWiKTUSRjp7tMdiGy5oskYYwMqTeqQxtTNESznwRFFhEBTig6VmWrNkcnMSh5jjKJKlMAhxigiR0cHw5DvzWbDsD05XCbOy/298TAws+m4aFOdw/GNkxm1ZvbaC3fUhNPtmtcnNxZ3bx/KMABaLZUBmcBEAaAJ0YzElcmripm1bYvoUhV0REgOmutwcvMIff4f/dkf/+s/+ndvvhiattmsC+F+v9siKSOBmzvsdjsCvH3zxHQYhlyr7voSZvPDg6PdUC8uLsZxUK2qHkLMZXzh3gtt2549PdtstviNLIFrGcHzJ22yEXp+pf+N32Wacj+mUARVCxxUtYT8wot3DIRjEBFkElNmBjdAIkJTBANH9ynjihKhuxVXR4whBDWoo2WJguTAva1ef+uzT97bHd29sQi79bAjregR1JUqAeYK7gxExOwIIQTEyW5vgjqjWBUPWZRrbRqKCg2ziqtJG9syjJHCbt3Pb5i5hyCI0DS+XB4ANgSdYw4JE8k4D/GGHR3gZt2vcD9EbRR3lIppNUIHg9Au98HkpPQ3Iw+K2xTMpAktu+01zVq0BOZSOdKj0R8A/9OzNhQZndFk1nTiImZVakoRVKtKDOn680cLp0PtFoG0R+wjtKVYSo0YiGSOxMxIHrAyKjq5YAwRoGrBiB1ijB5nXUhJDhoayhPWDY6ro242AlCokZpM0dmOGE+kBQDqNMPoXo+cxdmb1JcsJoo8FjbDjZday9VqcOooNOhCCloGioEZTa3lRioAY5HiSIZm6AambhgRRxg2PcVOxZo2AoAbEKGBmwIhbXe7rm1TSObiDkRw+/btBw8+csQuBi1araY0Gwa5/WL70pvx8dn5d7/81k/9rV8K7c15KAudhzaxRSy7SHvNHDbrK/CWOJKTlSE288CNA213+YBmB/uzyVN6MW/aLg2GB/ucgjOG3Xbo+9HJnucPTPwsRgJEI0xtA0wKZglPXr51/vUHbY2zth2G0USJkDAgoUhB/ieIV+GbKWjPYJnp7uWcmXks+eOPP3r55ZeJ6L333mnaKKopNG5yfvrUTPaOFtSmfHmBRu7h4mJ948b+yVF3tQVRMMDEyFirllyW4s1rL79yef7uv/6/+bf+uX/un/mh3/8bT1fv51Gb0AISIU8e9SLFkV3ByTnF3bB97dW7X/y2N3ab7XZ3KWKPn5zlAZaLIxU+v3jvzTc/bwiPHz16/ZVXH37y6N69OxfDdnWxi6HdRMlg5NBwGqASattyqVUdl8uZKqTAeShpL4VAhBTaNBEqrou7Q6DAEbFgbyMQYnAi2u0GtBoIxlGpcUJ09yzVHWOMuYxtTMyzcRzni71SKjLtxh2AMXpKodS+SVxKf7B3UGpt0yyPA3OKFN2IU0Cp7pIiEacqQgEBULWEEKQaAIib6zN3LY6hlbqtUpCoaRoV0GqJ5woCnN3iy6++8Is/9+F/8Gf/ynp7+sXvPBnGi1xzE3TMa7cQItQ8RlqK9CH44f681rGOshtEANvlvJnfGsbx8uqs7wcVIExOdcybl198qevmZ6fn6/UGgVUrf/NJex6HgW+lynyjvWAkw+u8X6kWmGIMu3F7eHe/WcRd2QZmQnZzIEQGN5xk6e7g4ARIDIBEjugWEBJhYHKFWjGPYsEB2xrkC9/11htv/uDbX/3Ptcv3Xv2e9S/+/GyxINmCWYqx9FsO+1P/xMyIxima5GksAAQaESOCoTggo7gxQXXPazpYtJiAVEZBtAgAk9GFG3Rd8/LLnyG7kfsKNopsCVsDpJgOjrtHD8NDD6cJDxhFqxYfqoYu7bJsTQntlaatdShAtViLSKSrOiIRCwyWCfEA+EEe92jvHhJWXaYQQLOVrMbM4F5M1TQiTux7BgQ0uuqtVF5wux+jYbGEo2ulGpnJCRRATaRULQZKBGyhZdhf2KKhm4fdjcOyt1ztRepS1yySzTaZBjF245LbvtIooNnQHeZRW6pjlp0YpZ3VXnXQqq6E3gXab+JhE49TvNW1RzF1hIg+ombyMJs5smEBArIQFffahsQ6C9cC4oAGYCS6zl6uBU9NE8d+AAAkZ4jmpiAAUKtM6aNEdPPmzfPzcxEL3CVyU2ZmNOSG77yxd/p4c7R//Mtfeni+7ioDk23Hy7A88LSX0BGh6UJqGViK9u6ZpCJwEcAQ1X3M9fJqfbm6AOKx5N02m5FU36y3w1hUnYjpuQg9/dTtmog8gBFaJHdrUxCp7fF8/tLRZtiESG1MVgUA1A0Igf8JSQjPbuCzrznB7s+SV6e6L1I3m82HH364WCxee+0zZcyMgYykDOr92fnDs9NzsK7dO/CYenEHurq6qkP/ws2bi7b5tPpAjAyY83hR8mbepJPb8S/+xb/0b/4bf07Gw7v3jquNeQRHMBfVmlJnioQRkWvNwEAy25wPINEK3rpxe7ce5/Plk7NT9f7W7aOn508//ughQvyFL31ttS4fP7iQ3gxbbxfnm4w1bs761eW4q376ZLy6squL7BbWF5urq6vV6mpvOZvP59cOhYSTs/kU60BISNCkMJu187ZLqZ3iBhtOWuo8dG6cYksOYx2RqenacSyEPC1IgPj+i68t9o7GMTNFMwMi0eLuw9AjeVWZuCIxpIihFgGAUko28xAqmMB1aNc0PqqbI8TQBE4hhJAihWgGY94RkYpfx8K5xUSl5gy7/ePF8uDwP/z3/sGf/j/+eWwevPRKGLanQQKJzRpj2iFWF2qaeZGxi3R8sJeHfrfbrTZjtRibRbs87Hfl9Px8tx3cnRiyjCLy+uuvLxaL09PT9XoNAA76PFf4WdPwLQSY6WXXbhrX9Z0cCNBEpyD7pml243rvxvzGncNsY4zsagzIzDiZRBLCBPRM4yYFBEZEAgSVQBxjU8S3g/ZVJbAIFNt81/d8zxde+8Ef+Ss/3qg9/vC9p4/e+cKb31VKUkAMZNI0cR/R54s0LaWffZ845YMAllCnNQMbRKBxLMXDYKxVpXopEmOsVYkCgKl4VQJoHjx8ul71AA1ap8UZSLLGWne7zWxWUtWrvn80lDJkrHUAHRC2pmOATb9r1BqVyywbsTKMB9VoHAaEq7E6pCQqRKrwidh+wj0VMG8ChxizmiJUsYltGmNUA3n2yZuHvJBL2d1sW9cqWWNKAdxNK7G6BeYYI6Gbu4ID+WHjkbt2ZsRm2Zt4p6PV1cXpkA6hw9GsiwfaXzD1hEndk4+R4DrTDqiNrYU4elVo3SbOkYtzLS4miK7OXeB2Rpd9WWtPiBtxjF2LIDA0IdRsBKAuMkVWoqPolErQxdCf7aw6RVKX2Mbdrg8QQAAAFKbTAkQkWgOnlMLZ2UUpBZEAa80hRBHAofYvf/FQmnzzRnMky5/7ygOOCyqVxtniRr149KQOO+a5UdmpYJjV7BDdwc1gGHfoI5BX3eS6JlQD6fOF6NjvNnvzPUREglnXXOXBTAivh1u8FtfhFEqmaEikak0MJj5LTT/m/Xu3bC3nZxfHixtSqotS4CnTY2IsfEtB/5aW6vnbiJ9ShlxNwfu+/+CDD1577bXXX//shx98nCW3XVAsbri6vHDho5O9xUEnXgeryVnFTk9P9w+WKenlugg3iBqDL1JrpsZqondf5I8f/8z/9k98/ff/wT/0Q3/gO/rxydmTOusckYdcU2qJKOccQgAwNGQMMQVmHsdy6+TGbLGgCCkwETapm826ccyLvUYtH53cqTXH1hvSjnG5l4YCx0dz3Y3NDd7bizlnlW3bzJrUHRwcmMlsfuTX7ZkTswMQkk2cCjckSBw0OQGSQyklcNfEdrvepQayQsdsCBRQSo0xBgITQ6IQ2tOzi74XZsplAHAp5RkhJHCqsiPGWmuKUaqEGAzIVRUCIDrQkMcYIiCyc62VU7IqVQ0AVMVMAMgdIRQzns32cs6qAmDD2O/t7e3dePUf/J2v/JW/8PeAtyd3pGTZbQeAVmUMCKXneTjKPlQba/GDw9R4urpaV/Ghr7HtZvOlUbNal8vzp30/MEWVol7N6xc+/xZRfPDRR+M4AsAUDTZpLCef3ufL+q8mQfpzqml3B0d3mYxnYsDdsOGOT166qTgJV4CZ1SxyVAY3MyQEIDBAJ5xADAdHR6cYPMBo0BcfqgMxMs4X88++9b0PH9pP/9hfR13P4fIgLp9+9Csg9f6Lr4z9Ig+PABxqBA21SggBTRBRRNKnDB8z8xQwcLVc0btELYUuUSkjR3GNVbgwxdTsxl2y6dtUACOipkWHldsqMo29YRcbtvVQ4s2QjnXsaZXAnY602QXpncDM2rhUOcBwLoNgNOTFvDkwbW2yiPPQBoK4EFb3DeGdaEuF0rLZ2GJMHEaHAmKuy9A40Q4LERFyIALXwLruK/YQIqBSMxncV7bgRmZoFSsQcNfwvAsxmJS1+yHYzIAJbr1265//7GfgL/8Xf/SgpX7Tt/NF7jWFJrJWZ2Mmr8DmUNEFkUdRQEIMxBoggEZmcaqqGi2ocNFxRE1I85Yab5BC2PSbPDoCMJsCBcw5D7XZiSk5eIzGYgbAyenxJ2uxFBWJsG2bi6tL5uSq0zbfyEgJABycmfNYU0o87TI9C83dtdZ6+EJq7+e0nB8e8t/5L75Ks+WQQaD2qKM0y0OGUMTICXOuoQbNGZjFMNeCIDlvYuRx3MUi4NS0s1qglBExLBaLFJZ5DMOw2m7X7iylEj7jujgSgbuYOUFE4hQBsAwjVnDxGuDopTur7XrUHCKLKugEFeKv9hR7VsefITP+nOOHmZVSpt1jIDbRodT33nn35Zdfvnfv3pMnT8aamYkoENJ69RTR9XBxeHxvdfG0H8ZEiOyXq83hXkcHsNqg6UxxGK24J6MxUgfV9/fmw7j9L//qf/LTP/WP/vC/+Pve+jW3nz66AgAQUa1myDw9cshxdMZBirsnau7cuaXgi2WaIN6qtnl6ee/+C2fnp3dPDks9k9LMl8sXX355WG/rmF+4fVNVb71we/R+HMaD/S6lNnLc3z8Ep5TaSdrkAPiN7Rww0rSaBjAHJUB3Tyk0KeyGPFuE7eUme2yLL7roWsqo6N7ENAx927ZiZgbL5V7JPq6HWZtqGREJOeSc29TmMgImVevaWS2FMExkaubgVVUKM0cOiFhKIQrMXMchxgYAXMU0AxiFaAqA6AB1zIw45nG5XN66ef/hw/M/83/5C1/75Q9efHlGQTZrC4GzZBVlb1PXu2aEjlGQhuX+wWY9DrJVw+1Qm3bRdHPCdrXZnJ5fgo5gRhFEHRzefPNNFfjow49qmfJYPMZgZpPm6DooGL7RQHwzBvONN8/eiykwqNYYGwWrXl989QXrQEYJkdzVwTnFItI0qdaK4Ru9iF//ZYhAZhKaYA61uGoE0MCxW7a/6bf+9icfLb/+1b/RhkuRrJyrEeS0unj/eO8oNHtFd1ouIihYk8caAiF5CDESAxhOiSZmTY7sRIkpErTQzlu3OlFEJ5toYDJAdSOedroKoMPQbzdrwOqsiEgBXMnJUwA2PzhqtmtYga4x3iVrqgt6oQhiZJpz3QZmrxFgEYiqKHA/WENJLEPkRSmZsUB4hbGtOgSDbDW6GYxFMAZAy6rDmCszmTGZqQMYRU9Ns3fVFwnR0RDUdSRWodE4IxcOuljEg0WXsA6bJynOjVdjVpdFWvS/8Et/n/nwzS/8wKVcQWvFd9pua6hjSWAItBN1U3Jt1ZeOS8AAIIq9gVI0CL1yNjQgosjO9TCGWYiOyM0sUmSVRcNtkhJkNC0O1XLXxVqloRTd3DE4NTEFbvNuXJ9umzhTwaZpUhNKFpjyb4Gfcb1FJPJ1FpqITkNMgE7TxnTehfbuZ+e0r0j603/viuOirp3FPc4hjeQ9VQqA5n0i6IjmrNF3dberfXHfqYwmBVzamffDecmXq82j7fZJrVUN8iiEbYzdFHVQiz4ruIaARMx8venGGJzzbhD1KYFwnmYyVOv41ksvbPNuMq3ONUcO5Tny+7c0U9PI/6yHeoYCAUCMsdYaQhCRyYB7GMZ33v2g6dqXXnmRQ6qCxMlczMfN6urRo9Pz87zcv9PN96rLUEUqXl2MbM3JviOcrTe6HXnUrCWWPNuuZb3agOn+YXp8+va/8Sf/1L/zb/8NNz46OmbGXHrzCgBmAB6qBaNGISF3CBGA0BXdXM2qRWrf/tr7F09354932zU8+aT/8tuffPzh5S/843d2pXl0Plye5/Onu/c+OD07tdMn42ZlaI1puLxYNW3s5inn/OxjMTNwcDVwoAnVNUNARCw5Nynt7y8vN6vTi0tHfHpxmVXdUUQicyLKQ58CmRRXk1yW8z1EJBfXYnqte2ckLbUJMQaKgUxrCIRujCBSQCUAJGRSnWKAIrG7G8IUpljG7KAh0GQlxQTggbkBgMD6ysv3CZu/+Of/+r/+v/73nj5+73NfCDHudutNDAUAHCIn17QZS3JaDvkqRFwu9zdXu3ELOWM/ltmyWx4ecFicnm/Pn16AZVQkhmG84uDf/u3f2e/qo0ePSx0QeILyJneBCZYxs+dXqc+K+fMHD5+zP5raiiI1NQ0x5Dq88PILad702lNMQMGJp/8AjiHXSoEnM1F3dVfwSVyKhBwDoUMZa83qiii0N5t/7pVXPn538wv/+Cc7usKSHbgiA88wGRRebR4bFU7LFGeEEMjUt6Y6wRdTLCUimkEIoTaaoQBIALdhwCzDroTYmSYRwyhOuUhBCIv5vqqzJwCu2futgy+0zMeRnFChVi+zlqL5/o2g6hemn5isZJz1yk6bXG0QQt+hXu7EEGaAvu0NfO0q4PMmQR1D7zsumepoeDyLAHYkeGiN56qIENirBOLB6uBSn0suJIfg7qZVU7gqeR4Deg6ByigAKczQJRy0d5guyi5+5vYf/Nx3dz/8Y/9+c/eW6I7Hm3df+v6T1/RHfvhPS5cZA3ggQ6xKkSyqUiUPIeg08AR3FQMARyAMZqzgQCmYT8k5RdUIkQw8IqPIKFaqoinNCUKFlccxIWgTKXAdi7hyar3UVkLWps1Pvt7LLjQd7mp/4+R4uxuDt4RcbEuEgedYiVA9xKraYGkCLJbNk4vzJhzWWMMYetvc+7XHfvOs0b3zD3c6uhhhQCDXQW1o6Vh3uUoN1O3HuB+DRNJxfYYgddy6j3m0UXvGRcKLsb9CbiEyaERAwtHGobcaw2g2ugpRdAMRm/wtHdEVEGDyGMhajQ10IAwYsIJwwH4c25vt/rjYfHi1nJ10TGPuQwiTb8mEgE/XL8YoovRc0MfzbRUiTkHGU36Tqk77Vanjxx99cuf+vc9+/nPvv/9u7jOhMzrQULf5dBAZjw9vHuwtl1dPP9F+bLtO1+vFLJ4sj5rg611RBU/IckGxrU5F0WQV23hrPvuJn/wb/+1P/fzv+KHf8Hv/wPcf3eLzp6fDkOdpRuICOQIumlCqWuicgriF6Gxs1Xa5fv7Ntx4/fpxz3u2Gs8vLG8vlvOU2wEe/8tW+1tliFhFudJFUD/b2t3mY1b15lwiwZGjS5EZriDAZjgOAovMUmQJw7YyIYhADyP19iXcoNbGU/sa2n7lseo+RRXJQBhAlQm68cNfOhu1Qh54YRQQQzd20UCBnqyZgIXA0MxdFDg6AITpiNXWzECKQVilmCgoEIWAttY8cQtMMpQIhIFcxEQdYndy6UXX2H/2ln/nRv/aTAcudW9HMzi+LSAVIIkZEJpWIgjeivXtZLBZSfehtGGWsa2xiWiz3926vN/2TJx8P4w4dVA2ijWM5OTp56aVXVqv108dPai3MU1TZ9SNn8kCHKW/0UxTxuTHRiYgUKxgEno5cAAKEalpJAMy0jqJHL96cv3gwlM0MZuog7hAbdxcEMCOm6k6mhBSmYF4kUaXACmiGyStKUgWFcb7XfOd3/5qf/9Lpww9/PLBHr0oh9/0hMXpBNYxaTs+j97fufk7tztkHv2Ce09FNfLKT+lScx7GkxiGToTohqoeUqng/6PwkmTlb9JzFmaBy5RT3HWXUMmQ3FOEKoBWocHIozDuPIuYKNENIqT3bG/ad9pfjdt2sliTctIlENc/iUCQpjG4n81kLI3g+SKGgj67LptvZ7pjbG91slW2lIqAfDfZawHU/pNkyaDRyAge1JrU1FyJ0Dm6CCUlBkEMET2hVi1qQmMzRJLcp3WgWWTLGgcJDUOvSa8xH/bhtlx1LdiMJh3n84j/1m18+aYe//A/+w25+IOZoCGiEalJAmMLcvDzDBMCnCGlwAEeZjoWCT0JxQkE0IUYxUkMnRVZWQKBK3IZ2rCZVDSqRTqj0FKKITYZ1K93Z+6vIjaoQwMHB4uHHTxBBbFwslk0Try6njTyocgxVFWNaKgBABLDgYZT+zssHJ6/qWoJtZX0KJlprJYzuyiGK9shoBsQFq+V+N1s2wTk45VoiUi3FHc3FQTebjXAktOAcIlVxIxT0o739lObz/bS6WruRAhmAm6uqATgCgk3rtWfQLTgoOCIAA7OWnR7cvq/l0fbx6fHB8ZWQKTDTOI4ThX8q8eM4PvPq+6b+6puDnKam7BnmiEi5bD98/53bd194/bXPf/zhO+vNlXtEdQ6GNF6cPxrqeO/+ndv3Xl+fP96uzrvZrO6sDf2Nw1kTpa+46iW1nYoRMppzCi7Wj5sbx/sG/Q//tb/+t//mT//Q7/udv+t3f++9u+Hjj95zcZotiXSshhZYg4InZxkFE7Vta7hV11cPjk3p7GJ18+QIEZuG5l3ICqGN+/szEzeRmnez+XFjWsu6UEXE3eDNDHLGpvEYERHNCBwCkmlGJpoitC2MWavDLo9u9cbNWIuHmNru5p27N1Oir371q0SUIqGxSCUzqc6NPX363pA3cQr0ut4lgoojMlMUdANwQuXJPQgMwN0IjdjExsidQZPrEIOZD4WdcB5CqmVAZYZY8kBUbtw6ADv523/9q//ZX/l76+3l8U0IOG42IDxtyAX52mQRAMfttkOcz+dgPPRlLLlWLWJNt9ftLWKYP3z4Sb8bi2YzZQJm6nO+ffv2S/dfevz4yenpqavFGHfjrgnN8+vTb9mg+jfnPro7xADVXLWJya+7boNAnQOk5a7uugXffeF4KzWmudXMDICACDbBYiBETICGDG4G5uDMxEROYCZovLaRwJtuf5DN7/jdf/Dq/Ojs9JfaLtQy1DpMizgkJ5jOORYbG4ml9jdv3tk9WRrUkk3BUoo+IhHVmqcACZzguSroishEnMeeMbgjVQUOGfqTw8P4YGiD1KrUIFcHoPV6u1070CjVYox5bExqAXQSjAlSOLq77AcoGDeQZ8XRNILOYtvX/lYLnw3j28W1uiNd5l1KEcDGQDE0Wx9q1QGaFOWJjCBc2jbUIg7VnImMuNRaXAEQXA0DVhP3ZB4yA6GagxqIWRtSAD+IkWATUwAkL4asmd5/70m5zHjz+NbVau3WQiiP+3f/9k8sZnrclYYXauAODB5UUdURxGBAJHdzt2tS0zOTfmSHqdZPBD0jt+CmHhwyOgEwIDuJAxgFNUsBrZq4V8GCwIGCKRnUbE3Xrj/Y9k+041S9b7qum6XtdtuEWS2CTgAkVtowNzQwQ7Gmm5Wax3VlYvecB0iL2au/Pqxpk8Ls6cOLujlCVvDGjACQqWVuEByRY2yUN7GLMaVx3EHgoS+MVN3cTEpmcHIwBWPLeahWKXVAIETbMdPQd60Q0Zh7DmEiP7iTugM5IhLihCDBZLwE+imcTlo9pdT7ePDiwdPd5uzyarl3WPK21uvczmkWnux3Pk3a+yfcw2ebrmdcGkREclN1UES7OD2rY3711dc/fPDh6dOLJmGtJaEb2GZ99d679ebN45Pj+zEuVqunSSqDf/Jku1wujw8WCfuzPMbQMcex5KAITFlH0D6in9xpa9795f/0z/+NH/mbv+/3/87f/ju+a+/Qnp4+qIUZQiBDYhCoqt2M3TSPWyRcLg7JgTnu7x8QETKbSCCez286oag3nFzdnKTs5l0HYH0/EoXlIj15sgmcrlbSNGkx72jC2MkJ2B3NzYEur+pqK7t+U8Zht10dzBZjuUwNP/zkfG/5UuaW8Ijj9vxyM5vNquisS7ERxjL02xiCKk7xcegEaNfPS+foMnW1DOjgiEiGIupu6hioGYdMgdo55EEYW3dF8iwDmIkNGPqTW7fB5j/29776n/2V/+TJ49Obd9Ot5dYVVAOgeRECSKmptQbCUqqWmmJzsNeOY+l3xUwUtKqkdrE4OJFqnzx5Mg5FtIx9n9rooEPevfziqzdunDx48GC93ohIIB7HcdbMnk9df76y/6pV6vXbYhpDYECvSilUFXU3syY2pYzY+Yvf9bkrGbo4M3ebNVQkEImLG4aUXIOBIqKaTu5fAKpm4obOgKTK0ATwvd3Qv/bZVznd+vGf+CkrGXDQOhCYqxOwq4IrIRax+Yw4hKHfqFWzZE6z2bLKKkQ1cFV1cDdnQKtORGioqpEjmNQs1CQ0dmqkIogESv2gnulzN+5/8PHXRYCQSrbLyyuwAsDbTY+MAcARpDVoWEJe3uzwneF0W2bWUGMd6NbCer15o+Fj5CcbYYdA6clmpCadYFOvNnGvKS7jmNeuXdfc3OnDHrZdaDSNdWXciCozIMAgpbpTTOyA6pU1gDlxACBAooBuJHWMLS0SExSCxEDqjtggu+gOHU8Ov391+WPY+DickeTV6sGP/dTNLn4wO5zVkTi4TkMAkoERBRGL7A7uU3gKfVpcpsDkqVucaOjTAXFyVZm24mgB3J2zu6DWoi1RSz4oVDVGYjc0BfNqfUd7H3/9QeMzY69DvXP/ZLfrtQIkYMahL8M4MoA7EEfAGqkzKxxAM6INTE0mePM3L8bZlfXUf7LLlw1R7zpjLqpChLWMiF5rJmZVVXERe3p+1gSlCGnG/WY0F9cqmt29aZqtiJAgA6mhGDLUWjfet8EODmaqmlKoYuBOiHbNl/nGkupZ5QUAR3QwAGdqstbAIGCHr959+kufeC4cg1QxM8llUuhMZJhrg7vnWDTPfv7NAzVMycLkxAHcIgPlvKllUNW7L7wwX+x/9PHbiOTO6M6cS6lPHpd+O96+e+cw0bB+OtbRbUa7CFqXbXN4sPfo9KIfhtDNJk7OPCTt84CgQoHiy6/cWK/Wf+4//n/+yA+/9AM/+E//7h/69qPbYbs56/tcsquVmKCUQtgRJaRYiiROtQpgjjG4KwMxcz9kJ2zbViXXUpjZDMahzGbRwFKI2+2uioU2h9pU4cAWUi11aNKiZN9tx7YLWez8clit+832PLGtV2Pp+27WjKONg371l39RRGbzOI7blJKPeH66PTlanhwvpO4IgxqLyvRpTh8w+hScJIFdXH0yEwTnKV/Wham1ukUeZ4s2Z5RCTdPWuml50fc9Um1aOt6/sxvC3/m7X/0bf/Mn3n/7g9mR334l5M3GLMw6zjYofkrzEE0h5py11sPDw7Ztr1ZPhz6nNHOMIrB3eLzcO7pcbS9PzzabzTTOzebtZrdtUvPWt30HA3/wwQfjmHPOkx9f0zTP3MGer+zTyXleP/G8WpWImNhyDSmWT+1XmxCraabhre/+YulmoSZSdyyAjCGaWdN2AFBzQQzMrFZTJDMzN6RIBPG6PXRqEHnOOMu5xnT3b//Nn9xsH4KtvWZVR3JEdpMw7WwwMpBD3W5WRWTYXO4vD1VPx6EQg7ujg5mHyKLGTCYFnYhRzQNSzjLhQ04IlM1C4MlVPgL6drsFVbdgUGaz2XxJAAGcArYCF4FY2ZQ1NBA7WRyl42N6dOpXbYE46wnn/fD6/iLUbSl6nkJXHGOzDXAjJFI7WHSkuJGCqatl2A96L4afNugbj6Np6IDoeq+BaOBGjgzBmI2JHIGKY0AdkecCNJm6JqyBg1QNLAwLpmSIVSyGqPDwqx/9NZ7BrLz1u77r1//DL/2503LKe0U0mY1glaEhbMB782woiAkgVFd3R/iGgHiCZRSqG08HRh0J0JXQ0L2ioTkgKoKxR3YhKIlcrYoLQIiOCbmUYqZoGDranvWrB7s27feuAHbj/uFHX/sgcDJxRHZHU02pDRAqeWB2JYjAAXgAjrNdHe988Wj+6nixIb3AzUeGQNlKgmRmE9dWxQGIKKU4C9xCHo7mB/P9Wx9/9Mvko+TtPDaXq2JeGV3QAMxdVQoBQiCvxYl5wfPZso223a66ruvHK0L268EU4Xki4zd32e6GyO6ODORIiurgCe+9cefJLz9kXXRdt9vtnJnMCIiZa62Tr9Oznv1bfnwei3d3V0B0VYsh1bFwAEBYrVZ9Lq++/tJnP/PWRx98VPq+a7iUERDMYXVe87C++9K9+clL43qjw7DdrqrQJuNh3t67eXS1K+dXGwdmZmRiDsCVoYJZv62R+cUXj4dx9Vf/v/+PH/tvXv4t3/cbf+C3/doXXjxS3axXUkbpUqyADl7rwIHNdiEigYFX9xmnZqzQzvaJqMqIaCGROnII4JTz2HVdqdusmUJrFdt2VgXPrrbzRYhxtu3h8aOLJs7m++37774zjDKfzWYNuMm9Oye5nJ2dXpbsROi42TvoSi4l82wRVSEXEwUDrgJAQY1imF0HBok4GKET4eSNpaaTzzioCcBkbFrtlDi4BazEHMyq1OrVt+V073BxsH/3yaPdf/1ffuXv/p1/+PTqwf7N9Jk3Dq9225JLajAhiaAIeWsIzES1lu12M+/a45OTfrs7Pb2c/IFzHWPqjg9fNKdPHp+fnp5OMQVm6u5lLIcHNz772c+utpuPP/6wlKKqjGRmMYbnfR+/pbJ/Y016/Vvf+EV2cHeOoag4QowR3QPSrm6+8H3fxTeOqK/z0NU4utJe6EoUBjBVdcWmAwBRcYwpBZwWFWQT2A+IgRtji7i32Z3dOL7zycfry7NHEXPxLUEyBBUFAldrYuQpRFfRzCIgSd1ePlnGQ7CQMDZMDM6I4KQqYmrgjoBAABhCEjEcKoUIhqDgVigwJDPsZ4ll6AOzx9BwAoDVarXZXgCqaGmaRhUIgqCaM0VvO7SM9+91q/XobVibd0Jts+xVO5Wjg8OLyzWEZqOFOATwwYaj5aIdXKttOS+aDsxeafFHt9uvFvxO5mwQ3K4xWwBAEq8qwACGvJRQg7pRiImcrY6lCWlv1sVAAADUeBhrkRhjM+NhUFPnUMyvZvl2X3CLr332zd/60X/zX6U9Kz6PphBAQQIFDJEAycaASKzZkXyKpPeJrDplChECohOQGyoYwHU0mhGCQjR3UAEXdARqABBxNBVAIERHUDNRZFbXLoYP3730MdDMasn7x4sQ8fJ804aFibpDiOSe3DykkHUXuCuWu3ZvGHbVc6lpfjK/8+28LoPXdPnhGDRmF7WZ8A59ZgpmEFPnkBAqQpp3R4gX1XQ39MysWgFsN2yZWWshIteac8ZZM3VVBXIkpGT9btU1XYPUNqmUEhJb+dTumq65ZVNRn3Dzb0bMDRHUoEWsrmIYwK2h5Ysnq49WWKXpkqlmqW1M9E+KanrWfz27iteH41l3ZujOxccQiDAWkZi8Zv3aV99+5eXXvu3bvv2dr//S6uqiCalWNSttZ2OWd77+9r27rxwc7ntK1OjQXwXDs772AItucf/m8S7Xq+1uKGNoG66dI4bIajVE2O02zPH2zROB0x/7sb/693/sb33XF7/n13/fd7/57Xf3b9jF6Vk/DE3XmiFhCJA0C8WI6CpasSKkWpUZYmj6IYeIYOpI7tg2bc7StAvkpXOjBlkIQBlgs4WcdwCAgdXykycll+1iMUshBzDmOAy7Js1uHLt5YWYXHvp6cmP/5NjGskUM4f7J/t7coCq4i3BoRK73tJNKy+E6fYjV0A0ArgdHBwAgIAx7hMgYcs4GIzqQ09HRLWjg3Xef/Cd//m/8wj96Ow9Xewf+yst7m23ebreokGBmYEWyeeUWAdHU8jgGxhfu3Mk5Xzw9nVz1nRJxmM/nIc2HvpxdrIZh566mAmDmpqp3795/8cUXnzx58sknn7gqM1eRGGMIcfJqf352fL7VeP4gfTpYXjO1AmJVreDMXEuJMYTE63771m95685nvu3B+S4uHXSLQLM4C05IhQDzsAMRigQAknMgEi9MARMhTK0zxtCmOCvCl08/mc2TlLzZXrnswGutNUEEAzOYGpWJYEoOIbFpRXIyzf1lN4vzro2JTZTYEDGEkMtoBlUrIgKqq8fE41h4jmTkdh3IVkdsJ2WoUN/bzeOXPnl8hikTQs316ZMzgK2qFtmoTlgImBH4tEYszX5omnpxGn5ltnvTeK26Mfkix6dXV5ipRjjT8diSqQNo7KsaEmBj2FiV0T7D/G3A/7/K39bEmC+VIoWg7mJm5KKm7kwUoAdkQ1qZhY5mY9UOkRmQqQIqODEECsSopVaoZpaIQE2xUbuAZvzl938kxafdMV/mMg9t2FHpkusIuAE0NFSDKRbRZaJiXxcXAp7GOMMRnMjJriNtpo4eAdhJzdUmOTAYgpEDoBAxqKl59erqRsAQgMDXuvlwiJTAFR1uvnJy8ckpWECwgOQOgIpAIlMJpZBYARRBnZjiGHav/MZ7Or/EIZ6/vYG6UB+1dIzFjSNrdTNHDqbeV6nmXLUHLrthzUK7nMUImaoVR0FgkQEIiUFK7lp2IsagLmh1osTaKOvV1t1rVZqcYQC+KULPQf0awgWgiYc71WkCNwZ1Tw1b1krcHs/dyvrBbh6ju5vnLBjAmSdEzJ5p8J4frs0FEQHc7RsPADNDCMwoqgTKTLXWKZr4g3fevbpYvvqZz15dXb339juBIzFIrcghmj598N76au/eyy/eevlzH33wy2QFtamDb3OfeFzMFvsnNy5326vNWkMTA4sBhTSUklJb6rAbhkU64oYi+y9+5R/83Z/6+0c3bv+G7/t1v/0Hv/fFl7pxHHPO29Wlp9DGZF4IiSyAibgisgMTpBg6NycfwIEpqEHTLYuG0HTiqWmBW3KVWivUytP2yTO5jYMd7O0xucloojEmJ1UvqQGAyMgeOTKYbhzKsp05UAwRScdxcHfRqm7u06PR3BURAWwiEAgBALkaACCRTT6oiFrUnUC17VI7D103X53JT/3U2z/ywz/54cMHzvnoKN3Ya7TA2dPM0ZAiY1bvEQgoJO6k1N1qN4t+Y7kkosvz8zJWDgE8mHloZ8vlEpEvzldXl+ucs4NOCuyxZAz8+hufW8yXH3/08OnjJ8Q+2YFNzkuTIK5IoU+d03/1Wv7Z2Pdco4BEhA6OLmru3hCHlK7G9Re//3ve/K3f9/F75zeP7o+6U2v3iIpCdk3Qgdrs6BDArtZXRDTryKoM0hOwqhJq285TSlahFux3g2IeMsrmqUmtxVK0YK346EDM1wivuwKwE4rVyTqCI5rutuPTto19v5niGGNopvg9RqqgkfFTbgGouBvbtCs0xxhLkYVhRHQ0TOHJ6WNCdxUzunHjxsnxAbiBE1B1cYGKyBFdI6ADdYL7fnw/XW2UsHlC5bOJXm8On55fWGpgZusKQ6TYNrnf7AUypDMdaqSkDCZNS4dN9wc8/p92m78D8x/cW8JQFdCnLpADgiGRELVUheKF+lcshyejg9SDva4JUUTcbR64YxwKsBhzRFQiAiDVikm2GFuns6ufNclEaUFlrL12bQdFDa0EdwCqAiqOoBAnVgxeUwWmNKWJOjPx0RAmiaa4IyCRcoXiE6YKzi4VVAARnTl4hmom4MqGznWsMWE+H/N5XbaNl6GN7fJk+bWvftimuVkmD0SsPgJGwlC0NtSp7Xa57O3NFGqx/Ob3Hs9euBpq7Z+qXCqZqLZgGTTyLJgYUUI0VTcF5khEkgtDHtaX+wdH8zhfycYzJEB1H82KyGKxd3Jy4/TJpaqiIQBbGypIDNh0ab+ZW/HLK4wx+qj2aUP07P7YRJL7RtbwlL1H7h6gHzyBp2hKjY1FwCmeLA/6cHF2frA8CMzbXR9CcJ/a/29quJ518dNM5+4KOkU1TfM1oktBJEZWVWVqUN1wiKG5vLzcfaW88tqr3/5rfs3Xf+Vrw3bHGBvNEOLoVYb+3a9/cP50dXLrRYda1pe7cQ1e2ghDXxtt2qY95r1NxixTQo46WLVs4KC4lV0TokiPyW/eDWN+9CM//P/5sb/142985rO/6fu/+61ve+mVV06G7fk4jq5YFRNTLSW1UbWiR3dmCIaBAFVdwEJSxdETN3t7kWDY7spWwLUNTG4GAFIRvUpBb8h56HeRuUl7Q987BhOMMQaK4ziiKyARChKX3WCMYloBRGRiI6kUcBKfBKXAOJU9BndGQ0QHB3NEULVJecDB5zNYLNM4Nu++vfqJf/AzP/uzP3t1+XR/Fm6e4OhD0bFmAkAlGU1JGUwYAUIcRfqrbYN6fGMWuFmtVuM4AjGllNWa+aKdzbt2fnW1uTg/7/veNAMqAASCXe5Pjo/vvfhCLvLu+x/0q01KbLU6wQTlTafCzBjZ3AjpWzao8M1uo89jNYiYa5nS7FKIXUynu8vf8Lt/+2/4XT/40Qf6ws3Z/sFSqpVSzXPx6kC7LLMUXQ3Rb92y6pL7gUwrASLmnLUO4MLMm/W4G/txvBALQJndQRlYh4JsHhKKKgC6Gn9q4+EOIRCHIGbubj6KAsB+P2wpsLkDUylF1cE1EDGSowKTulxnrjqoKrN5HSnMuGvEMHkqqmPelTh0tkSwYRgWe8ThAPGKOcRYCmuiJkAdDNCAO1HV4/vx9GPrd0PkZl3iu2X0WUKhLcUB6kHT+bY/SOmgbc/X64EgeEDTs8ShjtHSC/vlhyz8hWG7R3u/JabBRAFpCh5yM4dRZBE6Kfi2jv+4WjAOxOYACBIR3Vww9MgUUKOKuVdORILFGUzAoB95aEKIicDYrAluNpYhNGQaoAJYVUUmBmQAJCYiBxIRQyCaGlUnCGCKXAhUvIIhO6gUmMitQi7u6sXAAEsQHLoiuwZdKpoHrsBIV8aB6exrZ516i81D2bz0xXuy3m12tmy9qiO3QIKOJoxkHBxgty3ZcBnDrJSze6/v3/i1uu6znS6uHqyCzC0DWlFmQWNn+pTHZl5abslqbPahS4PTUUrNYtZfrUOqPFrBZOgYZQHLKsP54zPGPUUKgAWks0gCvhtrKCUGRQJOPlRFACabmI7g6IBE1CapGYGnSf965zlpLMMsqmIwU7eKAcjMAhK+NptxXT/eLNt517XjWNrUoBkGbttWRIrUQOzuAQkBRczpGooB+Abyfs2LMPCKzAkRp1olWpjjuFt9/Ze/ev/+/Tc++7mLy/OPP3g/E6Iru5NUBlpfPF1vzm/dvn94cnsPTs4/+WioeRBrVZvdZp745qJV7DajnV5tQ2rBMRGZZSZWVGcnDzjogtPxvT3x/O6Dn/2VP/fVk6MXft33fNe3f8eLL764OLk911ovrq7ArPQGjpaw6o5CVFUiZw7gQQ2XzVJV+9NHMcbku0CMyCgpVyNu3MDd0aKAKoinIGZadwgWDBRMpTgV86KWI6MDSFFkcFVQJQIm1zIyYOtYMCMQMl+jjyqk0kbOYv6pHpiIUgo39vZijJeq7/zKk5//mS9/9cvvPP7kY47l9p3D/eWCq63HETQqABGYax5GACB1jjSYlN2K0e8cHEbmYRiuhp07Bu7UJBDv7x+k5mAY68cPLq9W52RVxjGlRNT0w06hvvHmm01sr85XZ6cXUmoMVEomnqQnNvGsnh05QnpeJwHPec+FEKtUnIzu1GJMhlBEwFncOqbIfj6uX/62l77vd/82Kceff2NxsVnNZrPTxxf37t6PHC6vTinA2cXp3t7Ber0JFJl5tblaHh1lK2AOQNpU03r29JPtarVandbSg0lwoYoAplBoSqkGAWUCNnHEoKFwIkQKAAEpVacIBgQVuhiZRcc1YDKnoEOIvs46AzCgquYOFI0Ax43s3U6Gm5hinC9zrxJL2seqKRfJRZeHRxfbqxrVgShh15yoUFEhRWIAx92YEwMAVIIaiaLumnLj3uzrX6Yh0EkzHBUxi4WhzXnus00/brV8cXYAq5XHbmv+1MbogUZVTiQjrOGtln7Qw18YyirAP9XNYr+zQNWyaIGKh/PZsg4/ru1/bfX3NIvAUmKwyfNSaQJGtLolSSECBXNh8GBOSuKoETtwkIpOwCiA5mhq1poKeiFyC2bBFQZnQTetpjZRjj7lU1uYhJhu4TrgMzLHgAgByEd1FTABQQTkEMRToQKTmtFUFRiYwa06jpTnq6c5tt22jNzgS/dPfvGnvtTFuTM0LZfd4IqEyAEcHCCMfR3RF/Mko3Td7NXfeSDyUYTu9NG2jth4IHAHZCQgczVFcKlt6grakAfjOOatjiXGutme7coApODkJu5GNF5/k5ODYDJ3QgIEHMvQdnN0Fy3DgH1/kXN292kBBQhkjtfzjbsowrUH/bf03Xad9ejOAEhoju4Kbhnmt5aE0D/pZ2nedTyOfaAYgSd1UhPTZPMLEwb2nIzQr01cv4kr+dzcgIiooC6WYqsmH773fu6H23duLj7/1nsfvltH4cQqfa1DDIuyLU8ffXhxtXjh7r2TWy9K2Z1dPdqOW2w6yxirAIx7B/P9+/vnq20/SG8htfMIbkLTRoXYKdSxlCK2x8t0jLPF5Ve+8tNf/vKXu27/4Kh7861Xv+s77u0dzGKbSim7fjADyBVMgBYCglhmbdo9vZBSZ13j0DdpYWaORsECgA7jBLxWBgPXaRHkQBMl2G0CWqc6DchiZmaqnmCS2pTihkzeoLrmUkOJiBVJY6BcR0CuhLs+Bwpdk7o23DzZdwiPn+7+2587/dKX3v7qz/7y+emTxbxpOj85ppiavL1MoRl8EFRBqEURICBFiIyhx7EMI6GeHOx1qet34/lmHEUZEcww8Wy+3Fseq9PVen1xcbHZXoAHVW9mXZVhHIa9/b3PfOaLY7HHjx9vrtboAGhT/Omk/n/+9exsTJzaZ4fhWs2k2gRWJEcgJAjk7qJqbg6+DCgYL4bt7deP/5l/6Y/lVZebnC/S/uGdp2dnm97ff/+X5rNZarlozrJ5cHbedXNXQ/KQwsXV0+pim0xEkTGP/XZ7tV6dSd1VGdnVRG1q0UUnEi8juaOZoIeJrE6f5qmO4zibd0guIm28jhFmjlrFQGPEGBMza6mKHhNPnnqqE4XMOfHki8VR3AsgmRh4YwZXZysiQhSEoNXURiQVYwUUNOpDNItNQAQFNQLFClRv3ZudfcDDlp8aHneLk6zsuYB46G873OzmF5vdYRNuNWlYbx9zWCKHiAcVEsKjUEj816W55PGvOf/Sev37DvdeziWZLVKb0a8E/s64/Kl09Xvbvd+MIQyqyCxOKE4UmIiFEZ2iuSNMDBZUQzNAg0glOwaj5AgeCMBFqCrtVLJ5Vi9iqq4OAq4IU+opIuOn6RlmimjT9Ipg5IaITBUdGDAmiIECAZGDqYqqghqIVDQgTERZTYuiS21b7h9clTWlFjZlvPPCrXG73pyVRDNu6Nd977f/xN//WfIoWWLQpmnWm51DAEiLg/nVo8dv/K4jXX4SJT56p/Zn6jm5kNTqaIaGQGBCbWs6giGIYEBRn3VRRQgshVAUU5yPVkwDgzsKeZ2oss/aH3BHRwo06VdCoLZN40hIjjRp4K/pMYgAE0r1rM5+Ayv/FNgyn/6wAyCBA4ADmIN5wdre7CTX/nKYtx0zqwkbT69nD1dzCSGgfoOn/KyyP1/fryH4Tx0LAIEZSx0JQ5PS0yeP1uv1iy+++O3f/us/fvDO2ekjjC2U6jA2TZChFrt69+v9jYOTk5OTu3f3zlePN6tz1BzncxTor/pFGw/n3Y19PrvcVq/9MLbNIjVtLWPVEYgcQ0gN0ta1Hda+WEBKWcvZe1/avfPzP/ej/8X+7bt3Xnrt/mfffOGFV4+Pb8xcKlRXt+12JGivtusQAiXYgoKT6mhmqWm2ZYAQkWM1SDFiLWiKppPT8tQQmphqRQT0CmgIimBoEhAUCQzNEZ3NyMymU5wpuyo7ljK2MTBjCLQ43lfA7Vbe+Xj9pV/4la986f1PPvpwt33SNnrzYO/lV+cF8zhsmajf9MysTglITMXAicS9qOSSHWARae/4ECmt15vL83NzJeYU0bwJDR0cHKVmvrraPn16No69g0ZP4u5gu2GroK++/pmT47vnZ5tPHr4rIowT+q/TRlEVpqL2/L79ecrjM/jluqMnvtZhIDoCwUSAMwBvI1WgnfTzo/a3/sE/9NW3H1dftqZFnpxdnHeLLqZWsJ5tnm4er0JLMwzVat7tAE1Kvg7ANmFHq4Kgw26lkofhitHAsro7GMMUYcDuPt0IJHTHGEKRysxNDJJLSjG1UU2s1EXbpdRIFYTm8OBodXGOyKWUvu9VNRFNzzdmdjQARWRVZydzs+pa6+FJS7awYk2TAPJ2exbm2yoHhFBrNRMnMAMDrA5xpuTOpGaG7g1jbAPNyWBz843D97+cr6B5CN66JsIQUie6SMzEqwweg5ZMTje0hYA72R7MZqjMlUYvTZDfPm8/o/Jjyn92NS4IjyHNql14fYp0cyn//Lj4bRqewjYsg7cRI9jEtAZiRSUGdUKjgEBISCqmIghERhUQhUSMfHRVrwJVcasjAZOjMzsjgJNpdBuuz4r7FAzOTEQ4+SAiu7oAsIOqgjkjrYUCSIPQBIwUHU1J3TWGMI5Wiro6EJtjFeggbC4yKjMzUb19fPPrv/iVBIkTlTH3276MtQkByQnDMAwEJNB3cV8Hv/kd84Pv3HjWs0e0egjBOvcgBo4GDK5OhMwsWTl64AScIyMYtl1cLBa17Nx3BlOCcWZUJ8u1mGEkLo4pBVMhDGbOPDHYzVxKGR3aEJkRqpoBOPgkS6VPO3RDIHcAwueILp/WeCD/1Lhp4igAEqIEt0IVYO/FG5dw1l+Ny3be65BzjjH6lEQcAgCUOmmDp4v6nCyFHK9Rmm+yC/70GhNOJENRyTmGUMv4ta997eD06Wc+88asXTx6/FFFnHIpkTJqEJWnpw+3u83+jZMbJ/fu3X/5yeOPthdn7Eyc8lbKrnKCronLLgI1/a6WfhtjjF1XVKRoTB6l4eRWr2TM/cpCaE6OlyKisLq6uPrk4Zd//Me7pjs4uX3n5VfuvvryvRdeub3Y29/fY/PCmIZtzX0OjJuciSBbVgOUaAaRQzVvuuSuYJN04XoB7aIYCzOruqsBoAqZJXQQ790MqgIATTWQKDI0Sdu2A+TArWl8erp7+njztV/5hXff/uTtt99ZXa4Zy2JBx8ezwxszBkxN2OzWYd4uj29FTjTblu061515IxRjDDKMpd8i2KJt9vb2gsHF5WWeYi5CQEcnaZpmvjhKcbHr68NHj4dxI2WUUgCQMKhmhbJ/dPjyK5/VCu+8/dFmsyE3VxcQJI8puKGZieg3nYTnBripeX8Olrn+Y0wEABTY3bVKNQOEwAGD5yIHi/jmD/3AOx+umHLOD5EPG+6RLY99Ed7tdkAeW+v7rWYmgoJuJpOTGiCGJlYRqbnKKLUHrYxiJqYy3YNrEgAhGpibI01rkanCSJYy5mbZISI4qkpIcdoTIAKgXV2cgxoHxNA0TbMuoyMy0DX7AI0RVc2F3JEoEAEGSCmY5sScOqxFX3/pez548hV3My+qla/T1MzdrJI07opiZtXdgImoYV7AmMvhTXnajVfntO5CH4gVIOuguJO8i5YonvdSowHYDAmKg4ts1ubUcufGKxn3A70G8OZs+fa4fo9sV5aPnNo5/96i3+m8nOt52SRJIREyXG/6AdENJhtddWYyQHEsbsGNCQ2xCLCY5FqkugrbxJ8j3BlExCkbHtnBlUwIABHQhDEAgValZ1Gf6pNj10TpdjdxI3RXyoajO2dncnZpCAJjtgqT+Rd4zaUUQqBgdP7xahZaKbUjev/L75kxIhTbzeLs619+N3highiia8gyMFnjFDmEZX3xBzqEy9o3F+8JZDSZmCOKDBRYzMDJyCc9gxI4glZJEHebbXULmKQUIzdTh4qkk1gLHQCB0UMIUL+h2zZVMmbE3W4XIroXMwMw1utiOrkVEiEQMqCAARhcgzOfekYiToonB3A09OcuIVogAgq9lb17h1tbbda7LiZquJQiIrPZbLqiTdPUWqev/MyL5ht/y7fy6z+VOCGr+LNbLVoMKDD369VXv/yVO3fuvf7am0+fPj47e6qoHAAqOGgg2Q1j/8nq7Gx5cuvW/fuvz+cn2935avUE3Zsw94xUKg85NmHWdmGWiEJVsWKIaFq4ZQfGMB+KUSANts5bg4AqbeoO92aKbrR6+uTi4wfv/MSPz9pu0bbt7du3771wfPPW8t79g8WM2sXiYB6rlBi51pqLgDt5JsSaRyKKCOBVRKYnWQhkgC6ipRAYMxP5FARK3IYQmXlC0QBgO/S7od8+nn/84Onp1Xh+vnvnvY+H3TagPX74oOH1Ytns30ekJhfd7HqO1HZReNnO2q5J2XTnPts/6ncjaHWiWkWrAPjR4X7XtMMwXF1d5dHMapOIQcClme/ND44pzvNq9ejs8Wo95pyr7Ig8RATVTd508/SZV78wmx88fXT59PQxWI4hk3bXSAuaiIjYs0jr5yv7t6zfP13GfJNCwj8929NyNQTmyH3JIdZXfvNvsj7uZAjaE4FaKWMWqZFYwc1ErTKDSu4NOaBqRXRwAwEEdkUAAxOt2bRIzWjqZojXcZITamYKNNmJAVgFRwB2ybJYzA4ODuq47bpODYBD00QEd5UUY2QYxg3iIudMsaaUAMisErO5hxCUJDC7qZlJVSRHhNmSBLZVuMO93TDEWSditQC36RrCQnUrqAiqZICF0RwRDQQiKrlZrVwxp3bML78+/8p2c57Dk0gNloaaHIWoeTr2B13ybFxw3s582HLbUo0cadAS0Syls91wEOfAPsr2rYbfcqcDfW+bNyO8PpuHkscqhsAJQnF2gThdWgMGZEBGlGCI5I5iCowYg0nIpewKitWqakYwTUao6NYYGNeqjgIRiQEpsKUItYgYmMbIIQQCmMxPCKLBlNTF4qCTChidYEQnhqCKuaiatIHaJlJgB80i9f/P15/FWrdlaWLQN8aYc6619t6n+dvb37hxo8nKysxwVcrlrMoqk6azyyADMsbYgnKhgpIAGRCqEsg28ALyGzJgEBK88GAhGoGFhIVdNgi7Oqeq0llV2Udk3Ii47d+eZndrrTnnGIOHufb5T9xIsRW6sc/5zz5n7bXnHHOMb3zj+0ohk5jE1Y7XRx1FUIQlAp69kDx8a3V9c2Oz7nYTYWBjsKmqkBAp++XRdt/7z74fzj6hHb34SZ5vRUqqpa0OMrirmpKTkSMRO3wuU86VaPE7zbXARdWIzaq6OpzVnTgx11pVmAM8MMgtEFdHEFHVCDRPTiCg1S4wIzhRQ9GdACYHWicX8EXSl2gZYL3DQ53gaFgPADKiRFqVjIrbxUeXL77/VZqD9NxxzHNtsogxxpikzafcy9/BzO3tf03H9W4nW1Wn5qUkxG3DqwNeUW3/o09+/+LirSdvv/XkyaNX1y+++vIFYRyG9XScARs6Vj18+fnnr55fP3n74fvvf3y2ebrbvp4OV7DsakH72Xg+HoQPZxfnXb9aDzCr5qXq6FWEk8OEGWZFC0mSlLJNpBNZ6labiyHpWtVeARNR/OQnX/7uD3r3TdWYUlhv0vvvXj598vDhZb9ex7OLoes5RRo6Wa2HSBwDp5higyOc3d0wmrqLiEQoDoex5Fpr2e0Oh3G+vjm8eH59/Wq/vxmvr293h/F6++rtd95z4bnOwyqQbB89XJ+vL3fX+1zLXFy1qo1D33XS62Eq48vzB5eFFI5Hm9Xu6ibPc8mFaeq6flifz7lut9ub291Uijn14g4bTS/PLvtuzdR76T7/8kU5bMc8zvMUY+z7vuZ8PO5DxAfffP/D9751e3P8g9/9ZB53TFY1R5aihYXMzLwyc/O6M1jggJ9+3I/v91ntLdybOZjMrObCzDGG7OU47ddD//6f+cUZvL+6uYjpFVXS2tM8VYFbIQ8ic84sOM5j34UCO0zTsFq5mZXapaGU2ZlhxdVKmWuZ3R1NVJAXaqaf4EkFBKBWdzaCALTvVuuhL8dtdRNQ62aTcFjwk+7s7OzloUigaZ73xyNgLa+XwKqqrsHJ3YVjCC4LgmHFwRS99rnM0s0vrn6H+1vnBw7WympH+EjG7IgCCgpAQOxgYQU5GxTDBeaDpXfqo6v01Y+7L0nPelzubWCb1HPsXpX9E+GL1JlV6mSNmlIqVt5fDXSsz8b9kCKMzrp0XacvSOYSB7cxdYc8j/PUIfaxO0p9NR9DdA+AkAVhdzdxY5/JpRLRCpQgVYG56Dh6zlqsQVxCzc0GTdTaE5iNxV2IIoTM1Rzum81mf3BoReumcoOLDVrdsKSe5mRgkEKzFVZSqwIhMFOa1OdDTULi7BB4qlkr1RhC2ZW8K31YVy8tGHfncXW+efXsVb86cy+EwIyiXjWr1z6kKeePfzUMHz8Pe7y+9tsvwJTqTCJRKzOLVYeDfIFCZi099yzEJE4w6Nn6ok+dBBPKRRUAk3YDsle3StVgGthTDDSBHK1bzSA1B9B1HTPXCoMb3JicoIu5MDU/N4OHxZ+A7/L3O2je/U3Gbu3Pu8PZ2VhMzCWkMU9Pv/3u9eevaTfH0MUktSx+flqdmbGIXPmdatgfmcjf7erIXN1UDUDgAGZxJ3X1SWRNasfdy58crx48fvfp2+8/ePDWs2efbLfH0K3IfZ4KC4hsLvPzz2+uX7y6fPzO2+9+6A8f7Q4vpsN+dyxcChPB+PZqR7IPnaz6rpO+Z1OCGxHxPM0IkcOQZyY+ugZCZOLjPBrUnRhE/VHC+WZzfi7r7X53cSnnZ+HTT//wk0/SzdXFtDsQscRhuzus12elFAwpEEeWEFhiIOZaTUuthWqt43Ffa85lbroOqkqlVFMjY5Shk/MusOjDc3z35z84jAWcjpPH0O19PuSy3e+NzovuWKdg5WJYp74bS65U12EIMqTN5atnz7949sVh+3J11g+PzpRxfX376vneilmpAooQkShRV8PlcP5ovX5we719/fzFNO7yOCqgWmMiIt3u9gR++73333vvHS39D3/w5c3VlYjDK4iF+qosoTJzzrkxj83UYL2k+nU31K+LzRHxXW7Rgqwwt5xdhA2qXtOqe/+Xv1Urj1fbNPQv5uNgfAzlaCWkqKXGEI7HbYhJTYnmaT4ypU6CjjMzoDrvbyV2mudqBUDjrLaJDVVVs0jExBUuzOZgwNXUrEFkRXPXxdvb69cv6f3L8+M8cZIUQ4wQIpHW+mPhmPNuOI8JoYnrscHcAwd1g0gz9Wnv1LQGDk4BQSW55YlTvz9ePXrywav9sDuOxGHKZS6zk1WHQiHIROxwkClgAmc2klKlZt8MVfM3PlyNz8eXOz1by5rIVIVDp7by8BiB5lwI5CGLVtTOSA/TTDgP6UJ9S9tXNVRSVe9kuMnjyHpWENayQriadreqY+Cwr9w5+sDRGW0qHwStTOwwQ6m1brNu96wqMRG8OCKqmFtrNzkJUSqwaBxqZbJ2agXj3lGnCbUIgdTUCjWdQtOA5rcCdkZVdleouSVNLbmfUCucObA6KSZPTNkJVgVFK3Jwmg7GShxCpjlROsz7t548PRyPjFRgHpVhVcmd1Oe+i9Ncn3yj+xP/dH99+2oa06ufZJt6MqekXFKpRnAGGOLwamA4xeBGEWCiwzhbHMbjPB/27mSGuRaR1TjVrOMxZ0S4LfyWPibXCYCqKnEptV8NfUxmtt1uzY6tTRUdFY5mJkLEDjKHGYQAArX4jrsimIQb78edmrdkk4/JXDsjMkXykiehVIhWH27wyXw8HrvYN/X2nPNSOtDX5WtacG/UiFMX9w0O6yhCDAnqfpymKCHGSGQprHI5ELkj6VyfP/vJzc3N07fe//Cj7+0PN1cvv7x5+VqaepDUIOYmx/nF9PzmxYsfP3nywZNHHzx+R8bD1e72i+Mus0cJrKbzWPIxJx5ervz8rLvYhOQ8vRojSEzJR58GkLuohppNiQSuDOh4Npe5Ow+H4+3Z4yd/4S//5f1xMq+/8e/+259+8sPHTzbMDAqr8/WUM0phG8WJqtXJZlJ3MjOrWsqcutCRCmWRks54s9mY11C1mM8wVR5iXHV9zWWajrcjlxmlbKOE3e46MpU6U9Ge9ptNIN+05veo1q3PkeueMk3H8WZ0rdzzZv1ovN1df/py1FprTX0XmBGIRfq+64Z1NzzOuc6jPfvyx9urayLNZSTyIETMx+Pk0MePH3708ceg9PzF1dWLz/J06BLlMokLgOpTSGyVSyltIeWahSRwaCDb3Ujz15rqMUaA2uBrUz8vpTihVjXVPqQgcjttw5C+/d1vvbBplSuHNM8HItmWEgu07yxnmB+nyZmmaTKYw4RI4DAC0TRNXUwgmscDx0AAERUzgFVbDWsiAnW4NxF8aQ5NbuRGHMw1pVTm3IWwWq3maQoxMC+SECHEJmHgTjnXNKR53sNT1yffu5mFmEopJEyQZoDVmsxwF5Fcx00MAKaMUiKvujxRnodaJhab58NcKsDZxmxqzq5mMG4iuJwVBAHYKxF5jRPRA338Xt3+AX019md9fkeHM7Pi2Ti+Ng/CgAWnahjU5+RsriTV/aHQ2yFwHco88Toeci4qT7p4GX1EP9f5dVSJDI6B9scxsHXCStF1rkrSiUWLxRhjpd1Y82zMiCA72hD6KrXSXAutU6jTvgEGLpzrGAODxasJMYlWPZTKERQip4BaXeFWrZN0QEXVzqgWq+BiUgrFmDImK5WdhVIp2YWmXJKk2Y5KwsWlZESfx6knKYdDIFTTYH1l1WCPzx7/3o9+L8aVqvbzYKtZmeZRiWKZhRO+91/nsbwKgi9/rLtXUdymQ430sOrcSora0CIGtPGFnFkDROdxq7Nqujl8NVYrVnjOwX2qP3FJjhATh+hT0arQsFp1N9lKojNKvIoqsmKmkg+ko6tN02TVQCis7s5ErX1qZkROgVyxSM40rAbWGJOm2ohf1JQwyIjdzKKTwg3CihAI7m4K4/DtVfysTK+mrl+Hvq+Ho+aiHXUOYlY+/VFAyCWgmre8rFEp2vNaq0KoFRVACAKomkcWMxOJDXiVIIHM5pvnn+5uXj59+53H3/ro5/dPrj///Me721tyGmSlrkJBS3XaP//q+y9efPrg4t2nT957/OFjnY/j9mrcXlkuaojDsNMqu3y1G7dhv9lshm4TokgMllHgx+PRZjTtSXNdDZtaq6cdg6j6yxfbf+yf+gtPv/NLP/r1v/PhO9/9J/7p9//V/9m/POruYjOMx13XdVFqsdshPiilqCuJo+QYQox8Ox2EWUvmICl0wQLcx3G2qipTCKHO1aqDw+4wFphL1NtPQ//o4ZP33TBNPzE9kh36FTx3EpKZFVUzNS3z7kCQnoNaOU7H43Eap2me51pr18dOKMVk5jHGIa2H9Zl0fSn6+jof9jfj7ho1k2q1kkKA0H6aa82PHj36+OPvDnHz5RfPn7/4xKwSVNhKcYIYgdwDRyi06bQAMJfFEMNZovz0WX7HZ5fIDoVzZDFwLQYydq9mRBRTNMJ+2p9drC+/+fbLNMY5mrDVyoHcShSqXqVqrqX9foEQPAiXohzjVE2CAiBBLhOZAmylMnlVhZnwkp03nywlGJxp6fSqw7lR64Qt1FoADgzF5BKIjIwQa5BIrE7BME4epBtymQEXjlayGaQjhXtwYUxWNn1PeqiAhd7dHHoWJG3nKfa9euSdUX+dj7m+XsdzSp4nv97dwrOgC2EyroGsEIoTEwdnLoUTzRGdeukYg8d6fP9bF7vb8ebV8QWHS2g0i7KajUe2AfMQeJ7KJqW9z4n6Aj7jcFl0k1jNcxiLU86WmTeDvVVdYvp8vnUKzImIgmkIZ4kDS2IEh9PSFQUyrXfjcZwzOMRVYieYCzhTGLoVz7kfeC6T9UlVxS1qHQIzg2AhcHMpJ8LQGnd1URESMwjlaqGaO6aqtZpxOI6TgWYtmpWJoIVZzcyrWS4TSjYpNg/SIcf5MIkk1JivW+7Z1AXy06dPDvu9mYHApBpjyaJWiOfAoj5+7596cvHwNmfcPpOXnwXxs7kchq7P5Qo23NWbzZSghTYVyVSjI3VD5BDARnzWrVfnZxFrLZY8hOFCubs5XO+PuxRhps6SUmRyV3OCqjtG81A1dl1iYZFqnt0KUcBpkmjZWgwQudob+Nv0TX5Nf4TD2V2qxeJwpsbAabnPVOPTc+lL/vy2QxfPzw7jyNOsfddAoMBsAJk3djfzm9L7Ln+/k6DBohLl7mSOarXZFoP8FBG4NWWP44tPP7t9+Wr94NGTn/vOL0/l8OlnP7y6ehURUscxiVYiBCt2ffX59vaL82dvXTy+PH/y7ubd9/Jxf3j9cr7dbsw0iJq70W47XtcdBRrWfbcaHnB4++33Xl9fg2m720rq9ocS+04or+jcD4f3L8///X/rrz968MfeefebeavWxUdvvzPdfDJPo7igkKmLhXGecs5M7u7jeIgS+qEjomqTmVldbgWTC8jItFLf9d1Kdtvt4XAopbgwBXFKpeQohyFFKVM5boM4mMZSRZITwOAowqy5HI5jOejt7a51BYk9BI6R3Ociw2rdr1bDarV2k6r88svtYT8f6wsrVnOJLCISiOZSp3F+8PDyww8/Oj+7ePns5Q+++sNSZ2aGqy3M/TeyE1+r0r62eH4Wh1mKudpQEbPG0zJzVRagkkgQx5zH4cFq842nJaFTc4ZqEREiKaWyIIhUVbgKhbaoOLKZpRgBdExQGFRbVsGsZuoWThOwqkqNfAm76+6+2Q0OOKhJAlAAwCKmCBwJFEkkuDBpNQHFlMgc4KHrCRJiZ0qnarWoegzRbDE058SrrmeGkrvjWKrpMG/xblzD/HD7arN6z+aqlANUDMIDy6ClpBB3NjkJg3qHQuegSghBqMII3Kk8sIlc8/YbP3c27g+3u9Xn/fQh+QY5m1P2fh3KNHVp9bLqWxgY5F5Lzl0aguF11Qvqd1lHMXZdc3iJeChlFDlXYgVgTB4++3deUgwckQSdkASKLIHjXI6IIrEDU65VDRQkxDjm25xWD842LDmIFadctXqpoaeIYpUZHqMRmMHMkXI15cilWD8kd2i1IB1FquycOCnG47RKkTnsj8coK8AqtFh1NzhUnIiiesE8lRI9qFstOXRDKSVE5hZbDBeb8+ubG4aAzEsoUqhD2eckQ67j23/88tv/6cO4nw/X8vIzjRRK3gYmqwJbtfW/SPgBRLy4Xji5WwnqjlwNLBJpdX7mJRtnJxingphz9WxJ44yZiAALgd2qN0+R6iyBWVgoBIBam19Afh/XXv7PCE259KfRz1P8dvdm/NkQ86/rOt0hOe11HHon6x6w4Pz4bE+H4zp1k5ZjmfuYYM6t2Qs1cw6B7tHb259Am571O4/We0KSRhUqIo0STQ1ndHc4x1xKLaXsd+P19fatd5588MG3Hj596+arl+M45rmC5tRVVGOsXOPr60+2+zV/dbY6e/j222+/9+Hb0/51nvfT9XY/7tWKQ2OIzGHe6vFmdytX4eXLNKw2m82jp0/Wm+HV1auzs87q+e3+2qikYOPLv/f//r/8Gx9861eODp1fzweLYehCrbPnnEWcuRyOBYCzm1mzOjkcDkRU1BgUQzQzAmlVCdx1XVQuU5mpzGVKIRIpYKh1nEal/Xy8ds1k89AFB7OEgVdVdc65aj7O4ziOVlQrtZmdrgtZxzaIkOJmsz7vHjy0inkuN7d6/frV8XisNZcyd8HzmIdhyJZ3x50Djx48+sVv/+LZ6uzli6vvf/7J8bivOhKqVlraKqeH33vcff+nltbPoDF3TxL32oQkYQsDkanW0nVnlufZ5v7hev3ew8Kqc4nEnCI1VfVSm/5F+6NalAO5u5qJSK15kaQOcqpKjQwwYjNyVzciUjjeHEik7m56V1m6LgM0ZtYsbYgIzu7UStvmjmBmqc3JOzFRIM45U6tNgRASMC4XqRpDO4phIK9c5wwoc7IYc/FBgrm6e9U6Hmv09VhnN4jb5iyZZsu5zFwDMnlHHAzVvVkdS3HOcHE3C0zcp7Kq68fjBx91v/f3dy9Kij29by6Mw2D7iLes7zIR4UqPyUWdNl1/BFTsbAZ3HIILYwWNLK9q2dbyMHrlyDAGE3PQLycEISJjeBRmmkGRctTCIZAUAjoidy/VplpWq42x9d94aLpbJ5lmp8rmc52yC5c6psDGXEoRjs7USSDmqWQS3JIfdwdXADy4euDcPAyZivquTDIMgv1c1ZiIWCgQREmM4Za7s3QcZ3gKcfBarl/f1oOEtnThRLS73e+3B+ZkChIBzfNkKQSvRznvfuWf3xzKZ1TCy09dDyGSVRc4mVf4wKjV3d38RBpZNoNbiBz5DTxtcDBRnSvtCsXKPdWiWpQyscNQNAfC+dmw6brmw8Eiqs4BIQROAhgVtwozLPbc95kJ1FbWyazjNAnVYn0jHQNGkLtXMbNpOVHhXaFLM4g4zxZNiysediFI+WJbZ1v1ay+5zCWJQGSuJYQwpFhKbXfy6wcGwDCHG+M0Oot2Kxrt3qoTOzPbAtA7aosXBbD9bt7tX52dXz59+52Pv/Pzu+3Ndnd9c3NV5qqmKc3EmUlABXm7e747vPrqi81Fvzl78PDRk/ffeuhFbdpuXx5ubrVmJpEYdAi5aJmn/WEW4i7SaoiT0WYYHnWPVoMX3a83SNMffv7bn42jb8fb8zM56u66zkHWQbrEyHnsVissDC4VEQaVYlFiDH2U4LA8HkTE1Mas1+XQS8q1hC5qpeJqRWsuTsaYLy4fq5eC2g1JzWbV/XbCoYzzNM+jQwFjUAi9gNC7auEgm/XD1Xqz2lyy9CX7zdVxv9tN07jb3gSCack5D8Ogs3Ogm8OWuLzzwaMPP/g4hcvXr46f/uh3d9uDqkdhuBdVEYlJGoEd93w27qMuf+RH/LXI3n6mORfExEFCnjKALkSLMk/HivzgnUfh8TDaKJmZQyFJtTCzQtsCdDTQkU4FhLqbqjU031252t3VOGCuFQCTOGubAGlE6YVsv8zNtqCNE2zYUhBVDUlKViIyq8QCL2asKObuHkspTpWIGNbeGpPeAZ4C0mbdweTkqjodZglldRZijCowO4Y01DFLkHXoD7vrVYcQexF0KazWEcyRRatLPwRMIM9OroLqCWB2Z0Iynohy7JnpnOc6vvXN9eFm/5Mv5AuIgz7qQjcfWIlZmPQbMX6OchNICkWVSXNMzKvhOheSsHYdXDKwN+1SOge9IicjcRPj8Hq3ayPFzhRZGNJurzsZHEQSiCkoE0mkkC7nnF339JKlBIYgFXfnKOKsDqwjpVprrcEl1lmpyaK2Sp7R2bDf7tl5BpOwVjPP8OYUZjMXKjrlTFEIQkDzs57mOermisdiSp4U+fHjQaeajsniEhmJZLs9aj75RLOZhz7GPG4r5Nf+2bf98U9kkqvndfeagq3nXNhCVRAX4nyawmNznGaEwMweCJFqoKwlMDpmuDq5M7HE6IImDBRFkQ6YaaYQAlyJnVQdHkWMjDgDQZVKBgcRiTGV6u6KxWfwLj+5G1n6mW+yA8J3Kg73eq1LsG2UBmAJuoALmxPDUEvpLlKUi8MXN8i5D1GNrDXpk5ip1ZpCyPWUlXurxZcvG8jGAKBGfIKGvMX6Ft+dffHoZrgJCwTuPpORIW2vXk/7MXSbx08uPvjwm0+ePn396sXLl68bekuQUmqg2sRKxu2cD7e7588+OxvOH1xeXl5ePPm5h2/pPO32x1f7/W2cAisxEKJozV5pPJTdbvtqfi2dhxRYzoRX6/WY+umtB8OjCzpM44Pz8251vjscDofDZrOpBdNcSikxCcGmcWIKQcTM1PJ+N7EjhCBM0zwy8/mmH3fXMYZh4BrdS+HgNYI5zAVXNzdk4XAYVYuZzWbm1J3mHiWwugKInTCFbtNvNpcxrEi6PNfb2/3t9tPd7lanygKrSlqbh1bfd7vdzglxwMfffPftp99wXb94fvP61e+r7b0Kk4NVrZp5DD0z5XJkSn6nFHRPjf0OqLlfMuL+rNzpZ+4a6SGyerXJ2ZFCciDn4sgXHzzBRZctRwkkXJQimNiq5hACBKqVXCSSmZHA4EQkKbp7oGAliwipNul5JgkiROS1VlMSXkZP7x05C61Q1Z1ExNHUApSZG6G4nR8SXQKYjNgIGIYuAO4aQtcz4JpzBqyLXNTMa2jUNG81KJ3UJTmP1m0AwWHcS8QQ5LjLF3zu6i5ObKDiFNQxzeV2N8JSrtmd5jpeAhnVSZbLdq9OKpBCKSRXjVaUMZ3zvhze+fn1zaxXL+cvu3WZ5m/HIc/1tc8fMN7j9Q3G65I7iQlOEl+MUxaGk5h25oXkAJodCeEWNFomsw5MoPCd/8I7wYngxd2dFishr2qp1urFWJ0rXEFmYNWsrLTnr2ISdXNFHxKqorlgEdksrsXd2cI811uz2KV5LkTeFCpz1Wk6xhJqNSdisVqrObH0N+O8EmFOXl21GqELAU6ksXCpbupGqI8eXjDybhr7vqc2Ud+8641FyN0hhKpZcB55HFff+kdXH/zK1fW+m27mL/4QkYIVJafshxA2dY4EtQY7+Bved9sYkQBzs3pKmVG9mla2QCTOyYm0VlVWA0Oqz8xspVIgJlJ3wGqtIsnMVKtmsKrWSYuavQnidzuQQWBq8i9fw2SamcAdLO6wRmA7gat8tzndXeFkLpFVvQsxGs3zTOuwev/R9Gynu8PF+kxVD9MoFkIX2FBzIQl3KRucQfexortQsCTo1OTTTwI1ANoQUAtnVTMzMQczE3Yh1XlX6fazT1+/fvHo4cMn777zc++8O11dP3/58vl4ewghpDiYtem2IlyrFj+uXo3bZ59/thrOLy4enJ+fP3j0c29/MOTdizwf8rQfjwcbYYpSKYazNAw13u4Q6mwp3H71xRcUO0SLFpKkXo4SXqchcvRxX5lCEBKOMUakLocIIFAwM6thFQd3L3V2R5RERFoM2ues4/FYSsnj5GrzPINJaWZjV+5ikkBach/Z3SW5OsChX23SekjdIHFwJ6YYOD579up4eJaneTxsaxmHlGqd4MLOAGWtUz467NGjB4/fvXz04MM8pmdfbF+++kmeblMn03FmlloLi4WQSLkWI3bmUJc55J9y2PgaP+r/T3y/e3gksNe5wmlIvTnGOiPR+buP02Y4lFGSuHAu2kunubIQ2vydtmRc/VRGKppiILtrgz2ZOZ/UYpvlA5uLQ8B12W6LfKwIu7u6mRmZLyJ61uoSVzdCYCZYZXLARSiXmggMzjmTSN8ndx3z+CBsNpsN+R6LAlpY9EIctbazsBpxDDDlvo+pN52rlirxwiifna+e+aiGvn961j/e7v4g9jTmOU97gJg5SKJ6mJqdnCgLiGCV8mRuCBHWFyRjRcgYEt3GiS9W3/zO6nj7cnuEDSG5PjBfpf44br+Qm5nsQmN1eYE8W5kjNhJWyvsyakjumGHCPpdxy3BCdGqaXaF7Z+gJfRAVUlBwcffiBpsgp7dtJGAA6mRQmIfFBYRKnjvpbfbsBUao5NaEs9wdq+rBQjtRSylqVZznuah65VSK5pyZEcHT9fTVD1+dx/N3vjF0Mb16+Xp2riHM6lRJlCCK7NHDIN06xuN+jNSXUoVJvTS8Qlhaa1+tBlAMPo928eHwp/9ZO5TddJDnf0gJg3AdvZaShFPTdFG1O8MmJ2IIES/1WjEKJEgxplytus1V5zKl4LNa0bkymIQtcmXWpQXE5qvV6ny9eV4KV0JypiihD13qhgAqx8NcZ7LCITQLpiVDWaL50gxb5jIALG3SNj/yRgGG7pBVuwfc38vkoVYiBa0FQkRC1dMm8geb8jpfX287D2er9XGex/04dL3wm5p9mTu3JTY0T5vTxYCIiMndiY0aq9KWMmIpoF2jSHWzUmOM7UnXRZuTJJT51RdfvHh9dXlxcfnw8aO3f/GDcbu/un51e309zXtmDjGOFSJdypmkCqEep9eH65dfxpTOV+uLcLZ66+nT1eXTB08Qgx/HbZ4Pt7e3Gdf5YJxrHy27xY1EItZOkzE0l0M5FtoxOOSqDbgioqq5xTJTwF04Qr2UoveaeFBT1WZvC6IgyxHIQTgytKYUYF7rZE5hHbuuTynxKqxX5xJ6UzZAq4/7st3v5/10HPfTuBfSZn3Qx1RzBXfV9DjemOvlg/PH73zwjY++03XD8y9fffL9r/a326ojc2W2eSRCci8hBJBpdXdjYXevxZaO+tcrwJ8K6PfBmTv1xzvJrfZptpKambvYwzGWI63l8u1HUzJo7mOatFZoz6I1hyG4LtdgpiHEYqq6ZMRNV121APCqTOSmUZU4GMHNqxdr3AImNnPYiSFM7NCW+hhYBCAt5m4SQwPVgDvkx4WEOJq7QsSMpaX56iTr1YogX3z+VRPEZiKmUKuxOnGoeU5D8obPAs08uZKSYBwR56lfs9FUcZRA47Fc9muxGGM3H6Zp3DLU3WqFsNQgBAsOcTipRPKs5CxkVkySIHEkd8P5mYy3++EhffNPPPzBf3RtM12h6zt6fdxj01PNpvG1hOdlCkN4IOm9KtF9T7kQlKVz7QkkbGS9++QkBIKRe6gjK7kld0Y1dTciqlYSkhEKyI0YKA5tO1dcPfepi9li9V6olNmoq7m4CdmStblZMQupY9FaC5y4c6GQa03nvTmp5z6k6F21IiaP33r4xecvk6REw0oSqm4ena3ef7z3WedJ1CzEMmqw5Ifp5fOXyaOCKiMIUAGBN407R0g85Rncd6zX5fCr/8yj0r2eb/HqpR6uNoElzxMDMXGe2bS4U+Cz6uMS0ZqAxsl9tBBsgWtoOkzB5awbOlkVmYhWyTlaJqvOHYIUU2aOA9tuPhym8XD0GEKMRoWCqhbPIXVRAgfMhTJTBeL9bUb35PeWZP0Ux1vGJRSY7RT83zxiCHeB9X6+ljwUKypWHSkOrDSOewpYvX0GsfzikOd50w1jlbmUlNIdoH63VdrzNrTk7kvb6lQuiJA7NUYwU7jLAV1pLoUTSwjt4KEg1Vis6gywS6BSrr748ur5y83lxdPLR6uHb7379MOPttdXN9evS57z9VUMUjkRSJjcK5MypjxvTZ/l63DzZR/DuuvPhtXm4ZOHDx69/+43z6/m3ePzfr7dv372Zc3bPFLJyPSKraqqE4VVl6vGyLHkIEToVBUuIQQzK1okRDbiWIndiMy81kKABA9qgTsH1IpqDiwglRhM6AwPUxdiF6RLiNKvL7r+7PZ2ZzXm6oerw2F/m+dDOR61ZDNTG909McHBMdZaj2UqpZiPIciTd58+efzWNz/+uc+/eP7ixf7Zi0/1eKNlIqnibIXdxMkkNHNnuArIHKo6mzND+I9C1ZcD7J7Y592XTVru/g8vkL3WGFPs0jzPc5nPLjfD000OlZ0yLBhHBDJCYgTLeezTSrVwDCJipkQUJLh7rTWE0P6AgMyUmbyqR3FQNWsNnTYaqlYlhlLUzIIwA1ZM7xvLnXgEjTrMzKZwd2F2LV03iESOTCKRVaQ01JmIxnF6SPL++x99/w9+JBJK8ZZrenVlFY44+RkzBVOvtaLO69RFRI5KSeYCjuh6XO9+tFptN/2gNkNle7MnJNV9nSfpK5m4s1YvqM7eD0FWRE4Z0Sc1hnaFVmAWmjWsUUu+vJTvfuvx93/39qpqFYuDvFXSK6OXYrOWx7F/XGmjnutciQ+iNWyyB+gxuU+grltf5LJHTcAAikA4ln01Mu2aOFw2E5A4udYsUHKu5kpObBwdbNGJohqYiCB1ojpbzSrhUv2mgMhKH4YZs+qQs1UNQ0ehKLyfOcwowaoU7aQzjbP4sVgopvHw3h9/d/u7Y5gu0/owdJvjTnKZi5WND4VQPEutaT+PN8dIzAFD6HaZ4T70abJM4CQ0Qo/OXTrrhA67/bf/nDz9h45fvi5yhesXGDryPFVIBcEyEzkJs+h4gCe4EjFXB+DUhBg5koM9kpkXE3fCxBT65KV3Suqxgo0rUS4OMMhzUtk5Bgt91Fc2ZEJyKZPHAOY67W/hVUvlwslS4RYldVm44rTk3tXdCeLubtSoR6pqUDhUlZlBlZlUlVmsTk2kzI3cXQKpV8CyBSVWMVKHZ9XadamYWp3Dk4HPuuOPt5qn2MkQ0zxXKGhIHAimHVNxsupkyDq3/Es4Alh0M5pkrCNRbNpneuLStGCv1bVqk2AjYpBZPOnVFI1RukTMx+3tD3fbi77vz84vLy8vP/roSfG8P968fPVct0czn+cKUAgCeIxcayUmxzTlKdfr21t++SKs1hdmJpuL1eXl+fnlpn9vdfbeOmkQ7uy75vM8z9M0zfNIgJaKWszMc0WMqlHdFEWGQGBVB4aUoDo7tOs6FrAIqIlKUXNyIA4ppTZyTMPKneZcc9bxOF/dzHnaHg5HTIdpOoKUSc0U6qouIhTNDeaUa8l5LHVcn/WPHp9/54//mTKbZRl3+Td//Xe225s874mbkZO7kcJJQOLsrmrM0ojYbu0MDXI6eu/Deneh8D7D1U49FTT8hCAhMIuqmpqZmWtKq05oOu6ylc3T8/RwNZGaOhGSiLsJE5jcKgWhGAGQBCOwBBIWNdVM5gzSOUNYiJqlCRlAZNlasKdF2IuZ2UGllOAEiuRkLeUEmzkJ4MuwNwAhqJNWOIpwcARwjsmEPbqLj+6RkEyNuXl6BuXEw5kisKmKUmJGpZi8GrO4ztWt55WE4uzcBXhntYt8U3NKh1r1iDKM+5unm/ff3vzJH3zxm5DOLZRxdGwRJCICo4S5EFWwFDOgmLEEzKbzzCFqNXYKkRyGlTNTh7Iv5eK9y29Z+v0/3F/NQx+6SMZqArqMsUMp5i+VjaQjF9+IT8GrxHQYkYjF6y3nh44g0hEJPPzJX74Cc2C4k+WmEEBm1XXonSPzrkwlkBUVCmY25w0zL5a21V0RlNqIfXFccNiN6zm/eMDDXArCvnaRRjGZOdaBvB8F5uhyztmc1nPp1Q2RNLjVwMfD/CO7dTrsj9D1s00RuXZnn8K0wo28PNQdoSfpjUccClX21WE8MiKh20EJdc0h2+0Oag/Cn/4L7xxun9Nr+clnHuo6+TS3qWJnA5GZG0y1+RYuK77BHg15IAfQkhoi0lIBlDxR1MizYu9IaspgNyEDqblRyZk4gKwfOttXkUFADpdgEoyEHW5R3p4AAADxnklEQVRuVSvcYMFPf1rdoEYOPwnJMTEAI2tVqQQydYDbbPRiPE5CEBJv0ykttTGrcGcWIhJ3QXByc6cUqjkJlyQ4lLRa1Y+rP9v79Rz7M6xCHmc9jk0gi4STkgnmQDEvXJ07uYI3iT3etODutxDuCvxFTYiZBe2EWA4AdTI4VScw307jfjxevX49xDScn10+fPzou994m8O0293eXL/e7/fzOOWxpjiksBIvgUhpBpF0sart9lcSunR1/eIVvfAYpY+p8xhjjEPo05AuHlw696Ol4WzVpeRQZqY8toOq1np5cXZ7dS0iXTeUMs/ziMUVqy+lxJBymQ2LY5GZl6I3x+yjjuOx7F6WknOeHOqaa83C5m6gSsymKKexIBbP5VBmqTqD6rAZPnrr3QcP3h76S0K4ev7i9vpme3ubxwmmLABXh8HvVUUnzKRlxPcT8/vF1h2A/rPxne/5o7aWSZAYydxdc3FVg4eUOKyl5Nvx4D1dvPU0bgbzSmpJAvhEhMfSdMHSzTegicVWZpbAQFPXgARpyi0xRphrXeAacze4gLixmt0bGtiErW0pU5c3mE1FApzgcNXWR22DoA2OZA7usl6dz9PRncimQJHJa55WXcfBxfKrZz9hQSnFQ2q0UdUZEGaARFyseuXSdSHAy1yOllMamOdp5Ni/3fW5zLPmMPQPtWSHwvxstTaYKqZcJVk1rqDIMYQA0spqpMoeN2BzFKCA1ImRCJQweuSHWvl4+Y3NN0r47Mfj1ax5E56yPSHhPM1kB0pn0qd89C5XJK8+kPSIx1ivvZ7BH0l4yKFVMewehoviTZ7anQyBmJmNzOO4ptARQaeS2Nz7JpGIZ2+6Lk2BwZncIT1Vrny98mfMZ7e67WLW7T/2G6//Ezm6gmhGoBo6c3cqncnefAq2PyMei45l/6DTx+oRu77yQ48f1aup13EHUk3FZ5uSVMkjZc4uBvVpdxHs9gpzkf1U9HCUSXcHDet4tn7y6if+j/8XP4jb39lP5foq6Vyh+RYxlmKKxoRtUZKZXdyXkrRlBAvthFkIDiDwwglpquimCglkDjeBVANc2Slwql45soBYDFbYg5lNNUs3EDEowElhDgWJMYl70wptPeG2FxuE3eAOImJpF9xKirsdS3Bu09fEXGHEwnCzwhBxAgVyAxHMxaFEmVyiaNZIXGq1jkinLlD86PE0jOPLA2a6GM6PMhetKMZKTqQEZBMJ9wNHu1lKYAG5N48RYpK7uHMv0NyFEq3wJiAMl3Af2W+xAHBnKeNumg+Hm9cvJKazywcPHlx8+OFbZjXP4+Gw3d5eX1+/FvNpdOFORNicxSCVeDYHSzCr1UabUCci5xEMlq8+Iw4BFCV2psQcRIQEjStNjuf9Nk9TDF1KuepUyuxsIlLKjasKcc6TGeZ5DoHH6QAosU/TsXWMyZqgBtxdAAarKfkq5yn0XGueS2ZGMe+67tFblw8fPn5w+TTEPue83W6/+vKTw2E3HTO5RYGwupcmlA0Jd3Tv+8EdbarsTZPlTS/07jt35+7dZ/EGA7yvI2Skau6mVhweQhLiMs7Fjumijw/WvumOdYrEImLwZoxH0paugRim3Azul8LOCU6OE2GRHGCR5ldAeMP7uutqEJGpQdjUODRH8dOCO6XqFSdfVwDcdieISMk7YXe40dMnb6/WF3QM6keiXKEpBIU7E1w5wnwGjMhFoll7L4RmQ2fgpf0GBgUWjoGrd0m09FqrBKu1prA5HieSfdd1AqpuDIoUoYVEnFkLPFA2n606lB0pIgZoBJmzCApZs9slIhCbXqYwppxXu/e++4hVvvzJ4TCn7YrnrO9Kf0kodTrwTImTRcW8Vu6FR62FvAedVdowjuLkELXACM+OTqrkrgwjcq8JHImgdBMy1AhmKJ7Cjc+srtx8NtBAhAjylobGGYGSyRUNVbePVu/87ld//u+/+K+q1FwcQTgQk+FA5uSpP5tt7qr52FEoiGAMDOMSjDWpqf7yZl4Tv+rx7Qfdj292R09Fpk0KCWGbD9SzEL9Wi2nqXNdlp+4i6YGVLnTA5YPL1c2v/+s/9zeebt958cOXY8cJVqmSEjHTnbVoS5eX9c0OZ+a7PBTMpFatWp8ig1oZG1lczbFSYiWpaIrSWYUdwSoxk6EGZmJnISICicHNCdoS2apVbVGuIKJmgE0ObR2wtiXu70zcqXotDNXFVbUNIqqqMgVqqmJKzM4ukLaJiKgZbMtJC8lBkWDQFDubpWTgrVV30Y0/eXWzvR7ONk4hjxMiIQiZrTgWvFGMurswZiZa5gIAWzRvWt3DTapgSQzvegP2pl9warIZuaMLm0aprrUwgTDXMmul5/vrq+dd7FbDanP58NH55QeP3/qmej0eX+/3+/3tdr/dj7tdYI4S3D3GGCIlCWAz8kDmTUeiZAAsUethGquIcOjGnIXBFBrcfFUyBSFhU5CLkzsKEU3TMbBEYdPiDSgjclgIwbOyVZhxiCQkRlZVTauZaq611pJBZrvy8MnDt86eptQ/fPQkpY6pPx6Pz5+93G634ziqFSsZZJG51lLUyJRIOCa4uJF6/lpwv4vvuBtIPkny0s90Te++/NmcvX055qOIzDUDWK/XMJ/HkQB5ul6drTWilkxQDrGBJC0p4RjNa5vAgBoxK4wdzNTyIV3qFRRVIhIRAqlq0/kys0YZoGYz63BXcsFpveCO/bUMQUNOx8NCKyDAXc1DDFqaw4bOcxaR4zytz5JrLKYBIUoPSjUrc4gxKsYkzMyBoxkkibkzkWLR12stJTNYBVWepyqU+zUz83QEn8k4v/zq5W9LICKvJed5ZkezcHLhippEgjerNCGwgQoZjUrkEo16V3OYswV2oVDzlEMnxjV0N+98qzNKf/ijG6opxmEmek+ndwWF8k2ltSWV2YNkkZs6EodzTmtzwOacg0GIhDgUcuZlUXiTazALDheokJL1zMHY4dlVmRGk8eOqGlmtADMCMyx65psAxuEyfPh3f/zf+AdXf5pDNcxwJY9eCO6JWbTSfLwxN4sRnatXmUkoT16tbGK0Mey74T8Yo6j35vHAv0ObNYXeHhxYKuaBL+rWquEsyqGmkeaLrnroX+3te+e0qf43r/jyxbMPn75zvrn+8tObOJCPxASJVooCATBypsaJgRHD9S5pXnjuDSMOIThVco8xjuMIoNZKNoMqnKkZcZoa3Ck06dRSSjsqVl3nOyUiSVGDUlCWpiiU4Zlc2eEUW+IEX9b43anTjAvopBMDZ4IwV1cD4LAm0NumQgOzqxE8sJCgqjtUOEDNhBocG0m0KgurmQTharVWil2AlHnvIpvvvr/99OVxux+QztabfZ09z0NI1c1OMAvuRJsb6gJvnlX34s4pxNw7CezUoKYTFafF09ZnY+Zi2dyIvaqKiFv7WxRZreynOub59ur6JXGXurP1+uLBk4ePH7771mMPbKbTzc2L29vX+8P2sL3FRKoOEhEJIQpHCnHVqDvVQAgpOXmmosFYqYEAZtZ1XV6kVwh8XPxBVfu+d7U2DiK4my1ggWRTckmUxsNYSoGat+Ycc+xCv15tLvny4vFquHhw+bQWfv3q5vrlvN2+Oh6PsFzqEW3G1d3NASqYiEgkGqSqojpQgUUf8X6Yvv9oNtZ3cfyPBNy/9vL7P9B46KXOKXQppVrqPu9jLw+fPJzXVMxQahRBjLZo/XKQYHckhrvRZXdiNjifPnFrk0nEThRTMjO4d13X8BaOcZ7Hlr3eHf/0U8N6X+drRmJzt+YhAxjBGxJUVTwwkwS62d68eNGZ15xz7yJwMYQQpJIb02x5f2wE5dqgOaJGoOAgpVZHc+9jEGtFhdesXGW18VrhFlLXaJP68vUn4EcUqKISswNWrahH4W4GuVZ4abm5EincKVYymKtTdG5uFu5B/IFgXtGYTMB6PIaL+RsfX3Ltf/R5JcVrOHcR4/zesK5U99EvPI4wchPjFXOAZxgz1EHslQFGsLEikhHYXAzJxaHVcYzKFZFobCy8YkRMUe56GcvnwGRElamahtoPVqX71t/+0b/8o/HDiKJQcVKkQJLIa/FZI9CDShJzt4wZDGPAulC5l3RT5sQJNGu20iXH8Qd1SHV128+302GTYiXdKTmk51SLur2O3QPP2fLOPL7WcDMeaPX0LPZy+ZO/9/I3rjnVvRK6ZG7am+0dasZmTcaZiZ0Jtiyje5kOmTuFEKrVtgGK1lbckUGaEq8TzMnFnIkSkThXCYyq5BBmsupVTQgeiESIjSHGxOxN3o7v8Yvv6Fxo09NKJ7110DILimXNK9RJ2oksbmRQIW5MUAM11xB3hcOASi4LJV4FosBEFkIXqxmmzIiSIoWZysOP38kvb8avbsoxn23OtOPj8ahzTTGFEJbk66emYFryLmjC9A4Bk8PBDufT/eQ2T3LPtg2nPdweEhqHX1SVIKcxMi8gkDTzLsYkKHXcbqcvr14jxZVIOr94dLa5SN3jj779jZSk0EjV5+O8298edrv5sB+Px+M87UphZq/agnjDu2LflUBabDFxN9RaY4xuUHPmVquxai2lTFBy1FpaKaDaKMxkVVNK6Sx1PGxWZ8NwFlOXUs8SQ0iEdHt7e/Vy/8Mf/Faej8wo85GZqc0Yu2nVBuMIO7MXa3n6Cdpya2onLRf5Wj5+/wl+BoHBT6ftb9DUe2dAu/nuTmxD6oTCPI6T59WDdXq8OQb1auzWxQjh2RVOSYQUxl7NqLq9aa5QMQ1MaDHXjZmFxb3BLOy6VKvuzsyu1liS6uans9/fTJy+mdm+e4NE1MayqclcgxplnsVX3TAdi4Rg07TZrB48vJj8YD6SQIhAlYQdBnYXBSuRkLOZqSqRqxbiVLIajIlPBwxqNRcwCRNsPB+PB75wicYiMfTf/vYv/OCTLzgIURURRjQ7tmsuFoM5MQKjWUR4lNhHzwYlMubauPVWWMHqa2Hz1Ww8oiYZ1yAuH/7iGmn+ySc3wOULYhvWOs3v9MOo0wQRQzIkl87IUOc2XWmBTat7JA2EkwWiOYjqCcVjIDqCwVQrwRWRwRW15YxLos9gKmaavUsw7Di895s//u/+aPed5FckXAjJxCyXkYsmtvwobYfkk9Kr2x6rDrEjssFqUa+wveeBgnJd17jPRgc7poB6lJ5Joycf3c0DAvVmVsYx9KYRN/NVH1MckiLxWQlxxbHGkG+eVaSKbKAOnQiPZYIE8haeHICrWV3SdrpHIb9La4pWM4VZKUVEOIiIcNMARmQwIwemar1TNFKvlX0Z8E0ShKu7qzfTXFRXs8YbJHWGs7U5/RYEze2UujMFN3eylswvmKCpgMkNYIefyO3uROokLNCW0xgzk5pZdQoAAhwtdRCuXpmASFIIURjOpSgbRxom3c/Xw6P1ZojjVzf7cUfShS51sStaWkBstfydaTKo0TVdwQRrkjjsgpPldHP2oKX9C7JW7CwR4a4ERhUApmbmboWZm9ereYU3TVJyNyd3yxK597MyHSrtr/P25XMiTv2wAYcudOfrzXq9GYa3Lx681yWhYIX80eVbf/B7v++a1126ub0aj7sQeLvdOpjIa65OKHN2aNXs7vDADDfr+1699EOESIgc+4v1+dk05vXZ+dCvRcLQ9bWaeai1qup4OB7GcbvdznnSUqfx4F645Zde6uzkbNXUpqauRWxCoZ3aqt5kAxZlo8XEpd2+lnncKfwsq5T5Tc57FwG/hp7dT9Lvd7n1JM4lsjBBD9OOEz16+shX8WgzQKlRbCPjNN4FQKlJv5DD7zrkLGLWZF64yceBCLx8/ixSaz11+83MiElVmzRUY3+1qzW4wsUddO9dLIsHlc3fVCosak1+Zjwcu27dHGnmeZ7HCeabrs8+GqqyV1JhgROnyH2im1lEyCmEcHfrajUJwhCHwLmUkrOH5O5QtaJu0tTwOw2jdPbkyXc+/eKVE4obIjncjQKxaylH4g4spAFqQLagRu41gKJUmDeICYCbO7JAq62HUIn3nvFATPZi9O7HQ0H3xY/2WvrrCBpS3h8exngtNhBVRkfkqB2IiTLMmePS+UKwwGQGAxiVUeFs1ImUYlWouiWOVgtSmF0TcGyADkNAamAjdiYnK2kj8sPP/9Kn1z+P7lnFBgDUsiap9Xtn+T/3sf65R+VpKBToWOv2sP13v5T/009Wn+FCB3KdwSk4VdRCc96Gf+K9/Cff2j9luS353//hi791/dTOUyUdNHFNBy/Rjn+it19+OLnd/njs/sZV2oeu8nycxmeQeNye96qHSkbSJxxq5hyjzjW0lNkd1FJCNiEUXgC8u5y0rTMC+tS1YNQG5cdp6rgamxspQaVUU2VU5IISQ5rmIkJmtl5vStklgNhjYgkgMlAGVSU1dnWCt4mpBZAhX5yX3Og0o9QIKosRK6mzM5EoKRFMlSAGRWA1a7JKLSU0qwlchcmUnZypwkIMWmoEyzRr1Nkg1osk8cx6mMQHSVM5ok/Dt9+yV8fxyxuvGs/OwFDVXIqZxRhpAdzbVbsuTt0MYHEyMhcQMTHc1NSNKUDYzUJrovLSLgYQQqiaCdJikaq5o1ZlCsEiCZOjejZ3oUEpaSHxsbGJtFRhho7TbsciB9j+OhI6k444RA59twqr/nN+3XVdP6wd/OjRA3/sEuMHMVA9diHN88zM/RDHPBatwzDkaXb3JEGkK2oONsJh3K90SF13s72ttYy74lWfj88Puz2CjOMIq6rFtIpX0hqEndQUBDZDi3vuRkCQoWmnuLmRNQ4+EcHR2npYGFFELnBu6P/XMnE6iTB/DYTxnxEIux/c7yJ7+7EYYwhh2peZ59WjTf+gL1C3Y8/RM4po7Dsj1Glc9UMxzVZNKFDr39Od7kuMsfWC7lcMpZTA0vhFLZZ5s7JTVdUQgp2YDETU5q3vgAG4gpcysaUK7XbBFhCQW47j5uoSuYmRNbX6cZxlapx/iiEIKopJZHKBcjlWdyIWN98MKxF2gxs4RPLqIFcQIMQhkIhRKSkYk6lTMyXWmkopf/+3/6akjkMAYb05r7B12xoi0iN1TGLKmiiEFGyueayURKIbWSEwNFJIGly9kKcQ5uxWtBd00coZZWYnfPTt8/OBfvD7L/PYv2AJq9VDL6HQMdBk5YmkUE3IB4RVDaB5EFkFCfAgBgMRwO5UKzOEiKq1ZjWBilYGUTEwG7AiVRF3K47mMdyRTMRBpi9u//J/tP0VrMahXIyYwATueH/4K7+Ef+E7NvjxK4SbiWQsRPRg3f23fln+yx8d/se/cfh/3b6Diw3qbSrDMUY+zv+7Xxn/kx9WG22E0MD/tW+lf+vHz/4Hf/eBPXxkfhx59bbs/rU/hT91fiC1IJmdf+v68K/+Zn22/65QCkdHv532Jceu6Nhn8VBhTCV6CJ6LqykFA6lXKJsK4AYndvPKLUopMcDgqhYvum4/lUpjtj6ItBDGc4AUJUIAGTuCB4/CxHGqGYhykKAWUtAKYmKRwJoNTuSIRMm9glwNRCzirkUrgyUGQjFrDf4ANzM3ODNXUTMjsqXOECZXAVwX3gCIYMbE7qESBVUlmBDMRZ20Rg7VLXAvbStxVSMBM7oVE2liiupavdKTfjh7a355e/vyZc/r2AdirkVzBlMI0UNkIycEr2q2DF75yZ2ndb2YxAHh4O7sVM2JIBJKyeGUvNdSWVoCtZCv3Y1ZWmeMwR6ogeiqKg4yo0h+QnvNzAnN1QFEhVQwQkdXVOJ9ht0aUwcmgJdZxhO41IUzZnYALAAIIiLuW9OxoUZFy4LRqamq+exqsKXz0RzF1E2WIGUtcVAzODeFLLS+DhEFMl+8tOqdrvpp+M5+aoboHlxO5q5vArQv1wvA3dCc535aGcabIrMs8T2ymDnA4OBe5pJrLVHikDqITHPZTYdw6efnZwiUySFBIUoEgZDXmsEkMVbTEAKxVVUmY2JfwGq0ucW2At1dc0kSGn/d4ZUKg32ZjiF3Z1DkwISxqqsRQGpCBGe0VSDE5qTu7gpfAEAHFw9RHJ7diDt4ZJhwLa4iCPDMwVmYzTtyb7z4OXiiWI2LFAjDKLHeqhCpznks7ivuSjXFUahzWGTAOUtxXaPMlIR6skMVi2PREOZe1S19853v/uDT78c4A3CmkJK5wjWS0jqouIhwMRZkq7UzFlAz46lgprkQBQIMbFGgVNEjdH0dcZh0WEdnFz565+d9/Dhtnv/Arnf6FdcpOsN3xEzyuRtLTIoLQwh11GF2P+ZyUL1vnMjAYiPk7lKhDGsOcAQhsDo7agCrQNyrR6rOmI0HrjL+k//g2a9hpqB1ZIUo19htp//Vr83/sQ/x4nZ3daQo2fqucrXaVcPVbj6L9K/9WVv99c/+zcMTO3t09BG7w7/xZ/hPv7f74cv6pNtQyrur8hrdP/7RBcbrf/G35vnJh9/Mz/7P/3F6mKar62yD1Fo914829L/5s/mv/M3f/A/5u9yH3BmN4FoZsFq4mgnVLtLRxOBEgmWa3ok4SBOFfLOhTluFCZW81uxkjcCnVuAKJisVDiJnahiIu3m1BsQXd41iQuRqLKnWaq5USEur+q2FZ7uH8jtBRJhZiBW+mFaZuoMFROzmwkzUIPmlZ9UCZG229OwOZSF3I2lhb0mqHd7oFAZ3NN1ILJ1ccYNUELkrxggOMHF3ZUoU39pcPNrc/PCmjiVwWPVptmmcDzpHLn1IBmt8zQhhVdUFrzxJsy7F0IJ0BWGH11pSSo2jnbWysJ0c/u4lp0s4MzMvbgQjAos0tu4pybvLS99gE2r6Jsldfq3S3M4/ZgFIczsreTe+ZmZtrH0AWAgkdPp9beLmLhHWyqHBBe7mTcEQwjTP9U12DG4SQXfRFneXd2+K+HS9X4fF79bh/W+++Q75SR7OAXL4aSbjTW7OzGR+ktKCEYxcvVrNY5n6kFb9OnKsNR+nnUc/e7yWjXCQ3E4RgFyZOISQG5ZC5OYOM3KARFppqI577w4guIElCDm3tUgsTqbwbpm4hlOjl3mFcl2IYS1TMbOl+wpfGIrmRAsG0xiURFxVbbm3rqYOFYJQMHMDmGFW1bKA05CsFlsc5sTdDRaSnJ2vr1/fxMBi4Xg8sBBKI/BwQzrVPS5Dd1aypt5RxSLmWVfh7PnxRe4nl4dvnf/8D/gW9JqgA8cEK7Me5xg6xoxSqnpz9q4hpK6TopUnzFYoMBmomk7mAXHD2SyFZFMBT9IxUYBSh5IHD6HA5wfvDOt08exH+xfPDy/0nGFxrA7cuJCgchYQpkScHxq/F7oPmMJS7SxABLs1tR6qYm3rB1/41y7IDQgzI4GQkwIpzlZW9uFvv/i1/fFxTKVIghYMne3tr/5D9mvvHH7/+byRx5vhYBp0doQacB4rmEbv5Cr3f+XPjH/r3/vxi/mxHeWf+4h/9T39wVf28OLs5UifPCvf+84jv3397FB+9bvyq3+w/w9f6v/kz/KjRJ/fjuv14DvUVX+2lmdX+3cePvxv/9L4N/7Wq/zkvWSq4pMWNLPpIZBZqASYkQugvmB/5oRTbXs/a1p2GpkIGS2G8W3ndKELIZi6aWMWMrm2MTyykahjEKp3se9CzJalYwkNpK7MrHXp0JLR0kXltqpOJAf7qWOGyRhMIHP1pZh3nCYMscTEBV+1k3QXM9TNSVrH093BzcLPAMiJOM1ORiAyQAzwYO7OxuSoUANRx9Tz5S89ml9O+y+2825eDevzs9VUpu14HCaKMcYYnTyX4u7MIsRG9+jw90DhJjDS2DLMrC0RvS9DePrJE+zQQi4E1EZzjYyM7l7lTE1+pL1QRNo8Gt1R6AAiKl7NTECAUbv5SrbcvWUBtBBl7Y/aSe4HDSU7hdfSdDNxF6bbdaaQTpJKjWy1PCN6847efPfUvb/PUv+p93vv07//5N5/2y9fGvEABAQ0jRCYOTdtQ6FiOuW5yRUV03XsQ4hEdJwP1Uo6T/3DwSMZsTLBmAORMKmQt6PDSBhwCEBtEXqjAwBoHu2ERWCO/E0DoLqSg5ldCEbVyt1s4Juz6vShn/backg4II2JDHcmBvxUyrRck5ohpTuocYwZRuZqS2JRa82uktW6ENzZ3F09RLFIcynbm+uQonotphBmBzOI2V3UF6HTJmlDRq4OJ5JOdBTC1JzFmUznF9evi9a2GsbDtOjqgOdsgSQEAcNFraJq9hhLLTyzM2KIucwdYuq4otTJQsdlKla920jOGoQS8zT7eZ/2ZLiwEGZL/s0hnp+d/c4PR0hXIp/DP07MnsnRq54P/ERWl6ofSOi1BCyCf631vPCu3Z0URDBephKEACC0qsINSixgE6tBot9c/7nv797FenQroBzkYZ39F1b7f+574eWzeN7nqIftZGdD/yhA9WFdzfOEfh5urXbanQ/83/mA/pUvbj3yf+qb6cvpNoR40XX/0r/3k7+2/+7/cM5/6RcuX91sowx/8p36xeef/cMfv/Ojly8enq2vtvhv/rXj+tHl//If9cszPcz5uxfdL9n27+ZzDBgokFEVj8SJuLrNOZM2lREyE4GAFhHEO8IvERkaS8npZBfQ2Bu12DzP8zwHVCIikmXDYqGzAiJkFYUFKIgsQWh2cy/LHSMiZrfaTDc4BK4FDDTuTFvCDYE5jXgIsbGRK4ECs7oLCEwtRICJWih3uXNfOu2dRspwIvLTGKG7cxOB8dLAcSIXNINPJYAbq5oIgZcjwc2qlcHkrfDg/OH4/HjzYh+muLlY0wY1+zxnrrXvexY+nTrLzO2ydZecAY0j0bZtLUZB3JZAfz/3vIuD93JhasO6TRfQ+eQKsnTC35g78xuLuBYlTsGahWlRXPEmGC6NK/1TMbRRCdq1tnBDd7He3d27EKxFdvhye5eKrd5R+O+/hbvn9yP23Zc/+5ZxV2XdC3w/fW9wf2oJdxwSIgCRpGW1ScJcS8PKqpq5hcAX6w1nyWU+1gMSLt59wKtuZq2g6EYijYXC1pQCCeRCskiCuzOTuTdju6UoOHU2l1jPTTfwNIu7GCSJiGitQhBmWrz91AzVlBqt6xSy0cAX4lozQ9ocrJ9WVBt5C8TCbFCYAstMXFvvS3ULSZIIrKpzOcRIKZFw0xA2MyMrQqxVXQsRUeM9MKsT1CmQE8z0bhi7+amWMYcYxzyGOASetey5f9Wvt1EcwOvbqwIEVGYXD2oakqhVIoohlKm4VRY5FBVCgHONmVCsDaPDlaRnRc3VBSSkhon6oHNG6ik50RyocgoPY/rF9fD57+1vKo5psFw+IN57KYGiWvH9dakpdV31sISo5ciEMaStp4bZ+bKOqi+pE2sbRoMC7sw2Sfj4D6//Efg5SKsLAGWE7e4v/spFnq6KewG5+cUZvb71v/obu9+Xy//pNx78Ix9sb0R76ef+elvOf+278en3Pz08/PjnuxhmX3f8oxdf/e0DyeOLv/3Vs7/0C1zLFGz9wQa/dD6KezcfH56v/s1Pn/1QvsXHi994uf0nP5KXu+P6/OyPP/jq70zXdWiS0EK1IldjMSYVCkBTMiQiBpObeisTT7QNZ2/+SURoauxQEQ6dpL4TjiSdtDmoRsKqpTW1pMGULrUYkzrPjdEI5mzMnsnaKYITrOzu6tx49XdEyAXAMG/83dP2Pm3yE5GwpduN79g4v9JS/iDBzBjk5hLEUYlOcsbkQAv3ViObGZkzh1Z+oiHO6sakAISISBxsJGQ8cqWqXV19vBmeDPsvtuPutkNMF5tMUuY8jxMRhRRTFCJiXS53KVBOPI02U6hqkmLzXF50cn76cUpsnZmAJcklghA7A8KmzUJ2wdzfxMd7yDWdeCOqyuREVFvefm9QJfAbveJlC5DfBWN3Pyk9LOjHVGqL7G+4empoY/D3fsn9azi9neVf7zL0rwXsFtbvFsDdy+9ee/9Vd98BWaTAdwxCWVAjrdXdc54NFsCXq3MnP05TqXskXr/7IG6SiR/qHEIMHFhra99XK07GLA6YO8fAxObq5mBumTrzXTJCd9dz/wmDhBjsJ1VehNDsJJvwvwNN7xGKNypg5GgXb6a08KyWldCOhAY3tQ+mvYAJBFJ3Ok1amDV+kYQQA/kqiXmGqYAii1sNzIHJao1RVKRMYwhs2jjE7k2svVHQzO/Mp2zKFEPWepb663mrCWHdHW5vqhVzJfjN1Wt3BVWRIES5mpPVihCaJ11wdzJ68vSszLlMM4TVECmyiJDUopKkC7HW2tjhIqyA99QpqakQQWS702k9nr3rf+zswcs/vP7+s+3vdGtn+QVZ7+fDa/JAoZDuHUZ3sMyytpYFrYALiIyJ2NxaxURQQiwsfXCvqtGp9Mzj8U99uX8LlOLxWEIEPXCaH3T6qx8ON9PIjIGS0rzqwl/99R//9eN34/rxf+8H+//7O5cPume1lFXpp7B/e+BfejT9zdupUIXqTCwppDrvxnzTDVY1hctqvuH5Ydh0kkuIbn00eFCN08t86GA1h76Oj5Jj1k69is1aBRwlKZvDE9ja0mPyRjdTNXMzaan0T20bgIhU3eDByavlXFSNqS1UMMjpFDHhIKPm4gs1OHNouLMIM7tIgLtq0bacHe7eCAPqBkeT58DiK0atccagZViJCWiusg53MLUQczeUuJAp6c2VmxOcG9VkCQ1occ3ZsUj5CnFjrAHkzEyIIDN2IlKmwOTsRBBOMbQIF90He/Dwqb7Ot59f5ZvXfdddbjbkOEzjPM21UogcqAshNBoMTmlprXWJWm8sR5agf39o5S6Knf57F2eXoEfmQrhzJtF7fO37kfXuKzPjoLbEFAKgpwa1VjJa9M4AInNmcXfzSkQGcremru7u1JgZbSqlTda22PQz8PrXgvvPxnr8zKMd7fe/vP/b2tl2yuKXfxIRorDApkRGMLNac9FaawEQJQ2pM7NDHquVSt693Q1nG6Q0aQ0S+xBhlbSApVnotq7PIguxhHBnEIkw8x3CRicwzMjdvaUhBDixiJCjzX43Fyavqm9ImViAxKZ07GRM3urR07snR0rJ3Z3AoCaFpu4G71LIuZo63Z3K3syAaWGAmJOjTDMLqWAqxz5IiExEChd4YAkp4iRuGmM8zMc2UNDYc1WVgxCRiJhXQsv3RMyZZRqruwfG7c3xvfe/98lvl2ovA+T25etgWSjA42zbdm0xQiTWsbQZ9FJrwI6SyJlECnlfrMzqQGQPnkdNIaboSlqdgMiYJkM8jqln30C9dohhCGVban982p2vHobf/cH1b8/1Zd+/H4eYp698WDENWtfwAKNltPeeLYM4osOIFK4CdzAghuBNEdOdATeGg999ffW9YoHiZBq4TuDBnN970q/x6lWdolzORVd8+aPn4985XOLxhdVXJXe/+VX5Z/7Y+bPjbQgGrA3xVx/n/8/L7cGevkvdi3J4ulr/i9/c/R+++OSf/6WnI2+PZX5IKYXuZiqqccDhNvP33n73vR+8zmp/6p0Hu93LiDrHs9T1dL1PgQcFG3KkrgtxzsExlgoNVk2Vtbq3+MkOkmaFDF+kUhtBF0AgOC9K1Mf9IeesqrWasBdUN5CpgcxMFW7mYiJiRuyBvQqEQRIIDifjIDAlCg5bYHJmNrg7482+dUAajklos0vU1DmIhMTdFxi0aQUyn/LfxqZwJnJicTcskBETkSxSTwyGuZgFEj8dBhICgxiokQXSuMlC3OQPDW4syaNmdQAJljR03ZO3L46fXu1e3b7YvdiE9Wq1SW7H6TBNUxJS1UaaDPGUmBNZ64JIKFkbxZgFd5zIn0172xjRvX9doCJmxtIsXTY/FgH+n8oi755bVWuwlZGdsF0zdy0LIndCS3ihcp5mF93VK7CMkbZ6oZ0Yfq81oq73KC4NMqNTo/HrQfx+fL+fTJy+cYdF45S6Oi34x1KOtJ9pHHNVrd7o3l5rLVrdPYUUUgwc5nmay6TBh0eriwfn1nmFt/lnt9pYuNYkf2ttWiJ35UWKMZu2g4XbICgBDjMTaa4c1BJvkuUzOmXoTcaxSZIJSciaRYI0ZwxAVXOevFRJ3V3dZra8C5zeo52ggwVPgbPEdlOMsUxWtLYUORMxOZHHAGhVz4DFwEwE0wKKEoTIDLeHIzioV1WNfafHXQKpgmRBDpuLaivhWAhEVUyKarGpTCYOI1X/7PWXq9UazgAfj0cAqkVz6kAFTM0U29m9ihOALoQocX8clUEJKUqtXtyJpHhJzvOYQ/VuHU1tznOKfBZiljrOSozYhSFRmadKxkOmfj7fDN87G158Ul9d62/UebPmM7dHnQxzeRJiOKUMSzg7DdFQdW8pCS8pA8BkgDdOLoSkkpL5h69vnxCCa9EUYT0G7bflncQcJGjS4damFXd1us7TUcLIzGda5VXdiwVHvxWL85w5P12Z0eo/+PL8L373OlzTvuT/zJ9457/0S1b41XyY0nA2y3RU/INt97oY5Omu6PuPjv/XP985LMaXezOPEsaSS/AuuVZlOECzSinVqwuTUGRxatv61MRjqPrd3sK9TdbKXqKlGGw2m+4+5dz1sRmENQbdYqkkXDI4BKszSIHsVqFOFotnFgjBQK2B15AWXwTtvB0szNRmukXYGn+N2fAGs27ZHTM5gesb6gUimIlZrFaGtDVvRWPXoyo7nEm9Ocg72rEWxJiqGwAWBsjUgjMHQRB3Ml/OPyZxVWO1WARCGllF3Qr07FuPz997uP9qe/3Z63F7NXSroe9X1E1jnec55xxC6LwzaEvkEVlVtXrf902FTbWNLL0JfKdc9XTnZYmqAAQkS2PC74VFQrN2MCfh+/HUTmbftS7xsVmKNwTf3Zm4WYTdpcktIz/9b7nD0oTkfCn+GyObHXdDZyJyV2Hcy5R+huty7+z52vdP//TmhXfHABFxW2en4N5+5q5R1AqXUopCY0jDMBDxPM/H49aZLp5cpvNkA9XgNRMR3Grg1g/V6kJp5WU2eAyhyZ81XqObxyDezI/Iixq44S2nMhdwN5Dc5eTFjc3IEZMws7M5GYfQxx6AWqveCGQxRpLg3mYKyZvntbfd4G2ohNvwkhnMQE5YYPpW+ZlpA07JSdWMHTC1ypJYjKiSqzqZAdQU+h2qTKHfrI/HQ5+Ck+RcQwhiXnKFotGgiajWqupAG162zToWDtNuf3GeDgcdKzYren37u7ttsZolhFnrDF5HZs6SQyY3s1IqOaAgJ6u+Xg/Ho2lB6lhHC13iLvlhGm9KPBcTVTOqMm9z13NlzwUDzXzeDZZwqCWrDZVXVIlT7CcbOxwv3+m7s/Tgi/r8x+H5IRjquJIbxTbEkMUYCGiuBOAGUwvEGGqh2R8DEKi5CGeLInM00yPHLh7LH3th5yTBZYW8Q+TVaEdsBtxWnFfk6JERtU4W4LwyGRU8wLZVnQLnMXRrE7c5rvsjYvnf/3D/5548+rmHhxe3u9mOMcZyTB4NiippN82f0vr/+Afhr/xxv9rud/shpPow0s4LFZBG7fmzww3xt1dlp+s6M0V3VxgPZiXOUqDuFBEIKF5VSSvorp/slSD3syoncZoH628NwZNILCGfcyDva92TJ7HkNjF5IapuLjHWw6zkxCH6xZldHasfdv1Fb1WJOUqYtRiTlZpChFduSRhHANXVGZTEqkMYTOogYnG4NtxNs6txcHcja6Z4RsoevJm2cryDMEMkVhYm1UrmwmzkbiphwUwtVxYmIi3KHN25EpE6FWUObepUrQLVvBhaQLdaRyJq+79u3SLRk+7Bg/cPX9zML8d8O3Vd1w+bIVWt85Tn23kKMfWJQEarTt2I7clbj58/fxlFvJTAVTW1Pgc7zJRZcqkUA8OrthMUUURVfeEOCrE5lEOqVc1MONYyN+9k3MuI3d2qCjd2ZmhZfyOuEqAOtwXbbdHTvACQ1gX2hWJgLYwzlXZaEGjBYxcmkqvdj9p2am4bXE0DNRX4Beu4G9RsAMgJ6vAQAlpLxlsQl6VWIAKZSGRmgAKfEvZacs7qVrUSsIr90PcO5JyP8+Ts/UVaP9nwOuRarGjUSExEbBytCViqdUnqnFkoEEOrqa6G9TjP7o4YKVciciJy6kjIiUBuWiOrKrWS4p4u/MppaT9ng5iIOENJ1SzGyCHAqf1pEUIwbRIxZqZNkn6568S1ufZWNJthCuDgmB0QioRITabJVasLgViLcUxEWtUnlV7BfWVDTTCKiSxVqAeHbrg7Jg1O6uWDp49/6/s3GomCubFWj91QbeYmLEPcR9ad1GxcaVWHXM4k1SfD/vYq/Pk//d//t//a/y0fv2S5KNM1IxlCcDY5ekHKAcIarE40zdRteKaxFnTnKIoYhiOOPXMA+gvUxCJSuZRJQ+ynUuEWxLOGWovE4MmlI1MiowcDlX4kccyV+n03hDgMw8PV5svDl5/7NOnkYacUksGByu4nHFkMZO5MftLlAREacwRgmtmYWDhUpzDuzrQyBeXpK5GNOWtWgG40+HzLGOhYKfLk4b0H43cG+35+G908TeUXLnWumXqulNWOLuedAHV+3fG/8Ov2P/oO/+rTdUf1h1/Mr9h/+YMHfnWz9vC7z9WGzf/2D6NQ+q988+LpkIYw/b2vjhzowws+lAOuuz88nGEdauTo1FuopMaUFAabO8K8RG0D7krdUzeTcHJqJ27At7FzEZpRwioo5cBKeQ6r3kASujxDImtFLRNLYvWBKVvHiazsztfnl2f81uWjDz5+/+rFZ89fvDiUHCFJgpfqEo1gxByZHIoKgMJiYkqSFuh9QYodLERMFMWUOJiZoy4zEa6Ne9gss4nImZr3azvaGgOHiHCynTwNOHhTHwTAXNxhpi3vnNRJWKsTkbolI3eqpsKBGqOxqrtbhI+K6ohx/dYwPOqn14fp1T5vj6FL3dCfr9ZlyuM4HsYdBKvahcAxxmdffaXVY9d1/WquM1AlhuzKFJRq4EJw5gpGPIW/aoVYWnsi+mw1hrQa8z4kqtVzPqYulj/SOxQw50Uf0v1EbGQiIiO4Lnj+PSZiedPjJqOlS0Kg2Hhj1jrvaMRUENr58MYB8Y6FY54ktpzU4UHCKay32c57SDTgbsGkET2DiHk1r1FC7COQRMSNatWccyml/U51c6J+WK+HvtZ6c9jPWoJwvEz9pkurVLjmnIkoxgAmVm2SBU3/zwiBHXBVDzESiSvmcRJhFoHBwyKwvmwSEIFYiCbtRHwBtJiIqls19cYIEyFa0tVWWUkj8tIyx2SuCphX5NpKMXGwO2jpdU/kBPACADvMA0kiqRjJaJGsA0nrGqpn9dh0uTkda1VWNiRTQScGGBUHBUfkSfLrfJuzqzARDcNARDHGmgsxhTXv846FAsfAHHJ3/aObq69KRFjRsDlb/ebnn3FM6/7h4frq//mv/3+ffPQL2P6m1BI6LfPYrYJq8tqpl73XlNlmLqRpgLAhdrHOYdulNYyPZthr4g0rTX3AfJhrQZkBm7pOzAXecTl2gWvOEmQuKgHeqHRFQgdhNnHnWm0/hPX7F+v3Pog/+eT2yy/qcdKwjIxb85jG3RBTq1Vb5dXoHK4AWwynmJgC08U8P0HTbuVV4QqdnFJYz59s62Qr0F7TJmEqhmD9v/QP6//8t37vD+o7f/Fb8U+92x3mbTGFURBhRykFtO6Urg1/9bcuqD/YLX+TXv2v//P9dLgKZ364Df/OPnerM0q7/8Xvr/4fn5THl/nVK/vl8+5f+bOb17v9o8v4u88Pn+Til6VoqVLcyavDzEHVKwAidjcD30NIT2DtTyOhbc+VPFFHwqK7UT0rPHIc1WPoAFhrFTml1NfCbuoy+aRdXIfhwfnlxXcO+oO///2fpAflcB1EzqhvXMDKEOFg7iESUUMZGMQU4WRqpfUCiMyaKLarqrEHV1eDGNTIzBvNcTEa8AWGB52kFEwWDNF50V/lhjDU5pu04PCVIK6ViNi1qVS1xBZiIHIDBYIbq6lldhALiVltw9XJGNWt5FlEuoebbrPJu33ZHrc3t50M/eb84mLjZc7Hw3i0eaoxOknsuo6IatUQEokKktQaQEpBC5tYoQOrMDHcm3IOcVRVU5h0lGzSKYTNPM6gkkKsc3M0b5/gCWFsn61YcwZpaT01K1SQtCjRVrovpyktkrYLOZix3GQmVj9BsX4vhjNLPbF0frqnqycJzBiDO1Q1SFoEc4iJG8K+5O8CKg6OwuH/R9afR9uWXWed4DfnXGvtfc65zWsj4kUrRaizpJDVWg1y3yB3NMaYQdGbxkBCQjIygaQYmRQUXZEUGDJNgaEqC9MZm0bGxg24l2RbliVbVhuKkBSKF83r323OOXuvNZv6Y537ZEbdP6Qx7njv3bj37r3WbL7v9yUwCSXmYIYT+SQtwkxVq3m1UBaknJax4CQadnx0p5lykf1zq7wYaSnM3GAWnCh1oYs2yjnACHiXSDEh2EA+ZOk7xjGJmTH3ClutlwvMO0EO3NzMkJhYrI+EmBnBMJMIHwxETk4eiDOUDQAUN9tNyXoN4sFhztkpeJcu6V1RAKDoEObEIGFhb2iKZlR3dDp2EAd19k6QB7Exc1MNGqZaHWJEGig5cTdaCJOwuCVH6lpKQCS/8MILKaUdMI5wstY0DkSh7sR89+ZRVHrja1772Msv3bq1vXPr7no6dixPj3zQ5U//5/fuX3x5tqRUQ5cIO3eQms2YKw2ySGPdToAvSpZiXDxm1/Npsnn/IFmDyFiWhpjsFKe3vfemZeCq1ioFYbbtfkGnakfYYhAND6bmnpJLzgZTtxiRE2ze+jSxxGOvX9730PLmi9vk3eEGSo4dTLD/Mros7Ey0hF1JQvCuNFbjIXi/tX2QupNjCQbo1MUDi2vz8OlTedvecGOeHSVJnMzttZfTP/2apLJNq1ZPpo2WJY+TMtyl0OnWgeXsGe02x7xvB7/54fydbxnvT5u7x+mBw/P/8lOfvuOPBM84qcu297l5/bmT4T33+59668Xl9vlT3R7g4g8/++LR6tULyywtuAWHgDPYmMyZie5JLnoD+EVBW+werbPyLXpkxziW2TVFGlyEByqjI0dOgSbJUyGtaOqJi4czc4sxjcEyLs7d57F54sr4ptv0s099iPcHqGWWYblYtxksRaTVSZoSRXdd0tm+0cEMiwATwc40zxHEoWwRQSq0qxd72gS77v6UI85eRe/jWafgCIA9oh/o4Z45mRGD78kYzI1AQQmEcM+cwyJxDo8CdLMGkSQ6M5QIJ+bQpGKeQwJLHdhIOeahpeVePrekk1bvbNd3b9JxGvaWaX9vtTBV3Z5uvdbttB7HMSIWizHxqKGplN2LPtWcc/ZVTcagXYQcU5AFuwOJiFBIotkcbCK5tRbJM6WzcfC9QTZTQHZri2BmDWfq+jmY/DeCdAB9hU1MZ1P3e6LDMwYoIgDhnbqeiMByDwlw7+nqfzEJ1zbnLGpVRFjCvEoSRLfpwN2jQwmISWTRMf0EOBNRIvGGqm22k4hQVQ5iZuGCYFdax6zVDYZEw7nlYn+BQbZw0BzqDIATE4XVvjlojcEwRBFxRwesOwMc7o6IJBJEZwKkGFLpQrI421UwISLmpMwEB4mklMkDzEn6mBHeh1QAx+6W8ME9AhwUYRHeNRpczCZQd5PhiwxUoOYJ7BTElBAc3tN1IIZ7JVq4o//YAyQ5QoloAA+KgSyoae7L78aeKJg9LTwtbFjRco3TlNI0x8nJSb/CtTUwjURMFsRBLClp5eO79Vk6unNah/2Lc813bk+Ro9ZUOB889PDxjfXxvNbQwiukeuGBsSRXRfYQdh6WrUK2OqbFybrO5vuXDpa2jc3MnOY2DTNSybeDJDAO2evsQaHk8OVhRkIAlCRqNQ0iJ4txyAm6bc6pEXEqrOQyShqpjdpGjjqnhd53PiV4fFGCFASQICKgdLbZ2OHewUTSFxJgg7l7cAoroACMGjJykxQpxcwzLf7jU3fe8WUHWH8u0j40hsW529vNSHmhdjIfZ0lEzGFAbq4sensGUMZ2/Z++4/zj5QuJ58VqPtlO1463V85dfuHG7b/38SwXzj0+3PjSK6uv2Lu2XMbje+uH9+LmNN2yduGhS7/6+dN//YXL6dJiW02WnqSXD9w7b2jfm+0E1IBE4AyjvjvWf0PNtTsrtzoLpSAuebTw03lr88aFBePA43pzh2PMBNM2DkOoDUF1cTCUkm1zumkYD9/w8tdoffYOiZOs65QkLgtxoEadixZZMjMJAt6sqqp1SBkN8KBwM2eHg/sFmwlu3qeUBIahM1eDfLduA4fDsBMaOkuYyu7Yj91KVVvAusB8pwBj1uYALAUAs7gXTuIdogSx7j4ViYgOFGTmRjOaUTiRdMQVgYhFN9UjYi9hb7+tU729PT05kiMSoJQySoEA4a7KROvjkzSCmdkXHkMukKG5+Zgu1nr3jNNixKGzdj/LGoFIqlaElmNWbWSWU2pn4T59VM2gbtzo/JJetJsTmPsIhUCML6qB753v6D78fkIxHN0cH5k5ugp2J4oHIsjNvrhC3XUBuxyWkCQID+GMQE6pdaW8NCIiIepZABwAQtSpdBeye1PV1lof3xusv4gsXBKnRNRnbjLvjcNitVwsRhJUr4oaQiIslAgdmaRBcHIiS576Icsc4S7i9zbALAwgfGbmQHSHX5WNgO79xiUREan7AZJIZ9pFsBmiqVPMEeZ9ikjc6yONcDhO3CM4iRI8QgPaXETSvWAlIoA1ogelJSb0XJTQna+VCeCMKYII3DUQAtld5NEMjfPgTUvEoCBn92DTgTmBYe6kLIOFn2wnM41w97xYLDanG+YuN2c3CwujCPG5VWvSDFevX8f1xvj8wIsgmuJ0L4+OuLs9HhTL5Z45NkebBx9d7V2slGaENQ3PA9c6mkZJx+u1NsxN5ht39s8DA+C6l1Ja0TZaYVSGesxTFJHETmKhFR5zYEzZgpioTp4IRibEYu7KRFxSipiZIYVy4UqOHBUwstQZI+yQXRMLB9suHuVs0bPrVdnQ57GFKTlvAo3YOj02RnbbRk2JJVAN9F9vDD/w2fa7X3H5C9tKNenxZhhqSNxu6ZBWkUWrVZ1izM4eQle3zIwV50f2tHg+sfn2SRsiPXH50p1b8x//hc3RhVde5PYQ+0N67ateeen45AZb/txtHqGPnePPPnf3z71P27nXLDZHJ8mHMBafXd1zqDViD84G8yAHvL+ctHuqiLpQvfeLZx8MIJcBwEbruBpAdbUoaRj2M28X0WPYmCh4bNXquqdyxtSEhnQ63WnMm9ONt9OHLwwPS3F3kZXZTllXSmmtEc1gIjC4BK1EMgkjCMm4zy+76wZs3czpxz3zEwAlYWZyypICM3AmKduRloyok9HSbsID6hUp4CF9+P5FPYbv0u7RRd+d49jcQBIReiZLb2ZE3D1O7m6euBGcVLV6nW12JZ+9nV/FPEMriPR8sgf2bRKa/Obt1qq15h7hjq0rS26gPGczZRwXkTonzqv1eh147oHLe1OfL1PkUsj6/k0vljIu7dLlcy88f/LSS8fL5WGkpFqLfNGv0Gvo7lR1SdHrz3BGUP9f7DrUs2u9/18AUDG6B+Ha3QoRQGu7z/ef9ZmF0XYQ5l2J1P8dElA94ytEREChmjgxs1frqaPMSXaCXAuNqhq7yBwljsUyLxdDzvlSaXuLxWpZlgMPhZmDoSDnlKLHZ0ckhGAZrhYelOXseubMPGRmiBCoc6Sp1wfM7A5zZ/G+egGQUuqosj6W77r1e75TRwcIcVc9unfwYEe7hxEERCTB3WaRdvCjRKoKkuZWW1PVea69jPB+j3U7obtpGFB4WU1r86aOSE7UFJO26qzNQgnmFuwUblB3JGlgyqpzvTwI5RhDnFqDVM4QKQiCI3mwMYyEO2F6miqCWmsMMrPGnGihbc6ZU8r1ZEOuizJq9Udeef+tu9fOr/YjcOv2bfK9MqO2thoX4xgXzqWHXhmLy3PahzdeRtpGVYL7wo+n1z9++Q0Pfsl+vf9qvfvJz37sxXQj9pJv8831yXCOL2M4OTfp3Bb7CWbkEhHzZOOy2FQ9mUACkTJFRPVwstLtTgh3oyA4Iny79rKfXUlyTUAy4gikXdBbX6KG9sUiGG5EOyGeU2B3rDQgRSC8MW3gCAVK01gSQvWUsaAc25y+55N1xOI9DwZJnC7EUVLT+87Fnclu38LBwWiFmOYUpsqfPBZPvPBpucx7dZnKlisz5f/09PHf/pRcp5e5+psv2uMHfnwHI2vVYRxXV4bNJi/e+9z0Nz5YXrrvVdmvJ8/gNERKLO4gYXgQmBlFY9ph+2jX6J25zX/jMg2/YfiuqmMZjm394IWLV/YOn/v084d75zenbTqxjOGR+5dCKahNuomqQ0ktjplAUahAUcMs8WozL0lmNONgJ86LZdUGp1XZb3Gijg7rBfhMh+dVG52hi9zdzKo2AA0DzkpwMNwb3IrLnHOE9SVh9Ek9JWYOHmDm1gBmEpyFCDJpBJ3huthxdmZFAsBJIgjcOYsCIO+k5dxqZeYONVPVgglIBK61auvjWvEg1o2kAZzUW7hLKpyXAbHYuPG0tanGdvL11E7XerKeX7yzRSyOT9XFLLBeX3vrmx75A3/wG152KFXb1KobSilZiJmYKKMFMOwf3j6mP/vnvvvF2xPJXo1KX9Se046TtjtxLcK7eA/o1My4d5yj7/zOGGsEAkzuqW52AdAszGW107/vjj9mQriTiJxN4Yl+QxtgtlmU4Vu+5Vt+6Id+6PDgsMtkcpGFMICUmXe+od3mPIflLCJEHClJLiQEd0+cORAwa7VPNkREJAMIEghn5tQJo5EE5NJZX32s6q6VkhTORnCIJDazLCnn7EEWDtId3l2EAJHSf3TCCQA8WFJXzTDnlHfCXES47tDBY6R+uCOCIEyJmIiT9O8nLT0759QFAAx3aymlNt1pbqptB5Y407YmlmZa3QjILBTcWmutacmq6katWVM40WxuZqcVZjxZqwu6dMCZgqyQOUqomxigwUGUvS8Ut9ttKWm9roZQtyElawpCyDZ06Nbvug2rkimZZcLWFk0I919eDcmWj2leyTnSmzexne+uyt5i6bQ6jpU0tmHFU5tLymIx6vb3f8O3HJS3Pf2i3o6Tl1981bte8S2/dvVDP/zpf99W02hSTrEdtoLBeB5WsOZ1Dm1kiti2MXfmg1e1xSIb+p6Mq7VcQORmDg54RjDD6kkS0ZyNgJQNTmhEMwdRJIAtxOG5gzmATkvb9f4BcHijLlNiTsMWcMLIrRmmoAVkz2FsHojrWP7Fp6Yfu7r/TY+2Jw63qzy44X2fW73vkzf+8NsvcUGba2k6Utqu9ak7jkvLtfk/+hQeJV9vx5dO5g/cTZ+bzvnqnMgxYvXhEzo3jiT0wWv7UdvR3eMXbp/7j9evP3V8xVelnKyx2DtJxDYb8TKyKCxn5TZMUI0pjFHg0SOvdq9uRPiOtBfYmeqw+wQxGbtT6CsffmSx9kt7D/3Iz3/odrRXPnCwv7gwhCUkye6+ONzb95QSWzAxc1ZbMFkBmMbljsbRYofmFfDto7t3NjbIub6sL8vEZG41SwxjHqkw9aLPwUEUFs7M3maKANx4h9rA7IWFRFV3rk83dOAidzIX3KtGBElSs9YaIpSFIf19dofFzjayJekb10RsFhRi6sLciN0gAC1LRLS5RhgyqpXWtuEKx5iSiDDCfKbhIGrAJFBMoDCfqjd3Vg7el7wo9dKCNJJb4rRPxds8FbnQlGpobZtzB+c+97Mf/8hUSx6MOc6GRWQqxJxBRE2tLMZ3vvkVX7h61XwLrBBBFLRzWUJoZ0YLbwCK7DJA5MwYpWQAdnbg3/BRoDmlDjHupzn1zr1O/Sym2L0CXZXUTb2y23JExE4cCT4wi/Xnfvpb334/XJnhVlOiGh3u1lvG1r86MyQCO3JRANqNWgJKqet8kA5yxyOrIwiDVsqJsjjQp+rMiQJmjSG7MqXLxT0ASGILJaHamoBypiCxcOYEHiPC3dfr9WpclDSo6uwzgJxTzmc71Q4n8HuJWrnHaxAndN+pg2N33fQSnsBGgeBSkrp6GEFDLReXsoyuoiHrt2LHGwzeFFllBwAOJ6CQSEyNiHazQQRlCaKgkOZjDPOklYuXcDPUFai6BieCU8I85jQe7pe9FfF4eMjuOgx27ty502s3zuRMELB45iByYzARKWY1C8qCwStKHvdWdGM6nvc2T37la5556rnbN+8mPT5/EGkP20zBw3atlGg+KlfG9Xd867ee3nzNf/21n7qmv3I0QxyHF65851f9ke/Q5Y88/R/m1RpbXVdbLgnKzSNglJAFOYbNuuWBgsktmGHecs7Vmgg7J6NAmAMyUGiTkHFBqW3VwBjARO/6O7tVEAU4QACfCYHPetudz7g3nBFRBooJUylDrObNH/25L7xJKIWSF6Mpj1y2o3XPmm3nlEeTOeYYqCzT5oj3/MbJn33b+T/8JdvrJ8daKG/svsP4qeeP//sPPiwXl+YVp4UysTSrLGOONJOLdYj5PLKYJY4pMs0aKQRIAhEwEpURaLGeB/7y4Wdfzf+vj92+c/fp7CYIjgo2uLtW2q7hE3Mjb26VmEuz2ql1TEmoV3zu5KMQmCr8wf37/8Bv+6ZnNs/++1/45WUZgmhdN6o1pwNTfvOrH3n6Ux9f0+EQp1Mq2jBuaE5tHGnjEB4HjzmCZFjlqc265SSrw2x3nSgMyl6iJFBILToEIZzOX9g/PDd+7tnnSzoXPglVoOSIcTGcX+2vT6vjaHK9UC5R1DQyCSFm1JbTchhXKcfmaFqMOaWkZu6eCT0cbrVaWd0MOXchR5hKolprSrKk1Lv1nJgDwkhMQjsguzAlBgWEuIcRC08ioqqlpN7o5JTMDJQpXIh2uWvu6tadjbXWcSzhzolqrTnn1prw0JqFuUju9abWtptr286NaWZSJIDmJsmshlBqcx0GKQM7aYQVXwQb4E5sTmaNEwI1Yx/BFh7kTk4REkQuQWSYA0ZNCDkKmc+s1hKFg7lkzqrqrk4aFBE8kKhqdUuJCxiAJ042IjkVmFlGKmCmCNWU8uyNcwJHOAXBzElyAnGi1moeh9oapQTKc9VmnEp2wFVLyqbRAM6JKWv4tqkGNFxEQk1rI2KSrooJTtK9vmbe2iTMgnCChxAxBRjR2nz54qXbN28+cPnKdrM5Pj7OqRweHs6+uXb9WJYZfvQVT77p6c88/9zJ7XJAr7r0YLio8bWbd2+dHBuxenOCW8lMdZ5FRIpt6zqnRfjoaIhWEnEa7myqpTTA87RdhywXe5uTNXMSEQ010zLmAbWkfYoWGTTgoJURZRruDm3YTCqLwWlaUVqVEYCQAaGGSFZkoODtdm2eKXFrM3mUIdVJkCz8YFtPXI5GX5y2KXJJsijBhfnhR17x7HO3x+WD+8vFjRdvf/rpW7/48V9JC6JGpJsaQlyat3ElDJwebRONMJtjuvKqc5PND186f/mCPF/X83T3G377m49u5JubD/mg0Fyyc3ZYirmZcl3rt7zlyfv2v/6z165e2kMe5ETp55/5oeobmv1/+W1//0d++iffd/zT42KLwW2DRMxMIcaJ3DFvPGYSHj1UvZWF5FFAKiLuWoEwsDLCF4OYWathDT1bCkCopagevCPy9PGEgcysF+zUCcq7oaGFw4TX6ikNEQidR3nugL/kRM8lcQ9E1i0FEmhSy6DFYKjhRfZKU70TF9L66LveLH/odTdePN4Ii3tBjkjjj37ieewv3BpaLPd5ssnc82pU34YjaUnkahx71WcVsA3CtB9yxNtzye+kstjAlKxqyonVLbbHWBqbEPmwwHRagxMiwSK8dzq9au8T7V3JQEzhYe6AQyDMEyOB3JGWo63KD/yHHysPPpJoCG+XFpcS4MzrwB2t6WD/YFiG70PbsFxwigcGPbrVlsFE2yaz+rlEYN/nve2FvUunJ7eUx5jXnPdGZbM2FylYzanSyMmWLx2d3tweRZaanZhIsjas3cay+cLpeqnkGJLQjbij4HS0aHasNT/26KOzvnjnxdNpuvnQAw/dOTnZbrcantJO0BwRETcEo5n1viURdaDxWIbT9VEaCoDdAm2nI1IZxohIDAo3Mwb6knMZWUNLKSAnCgG11ogoEhE8gzOhgw126pEEIrLmQyl9EyDUAAw8R4R1YYk24m5MCWcyM5gnEINKKUxkZsY5UXJvJSWsdZomImqqzM1aYwEzlzQkyd6ceWz2vNhBNOTBwaGaRCToiClxYF6fLFeFuGwnXu3vBU+YfbG3Ot2cwENrZWZwsnCxIOZwzzkDYd428yYxJ9xaz0p5qC3ysAhCCDf46FkpJlcjbOfJiMGZc6INqjqJcNEWAM/buiUhU1i4E0Ktp1QoSN2IWxDtOjPHkDIFKEIHamZN5yIpIjKJVUNE3+N2qWUXRCVQArampVxLLP7Jm621lNI8NRFZUEDSWmtAP/LCx06ON5Wjua6y1Drlws3qrFMukoh1rrRA4oHdDs+N091NTivbKuKW5BVFG5x1nlrLdV1zWmw32pLleqSq3R3tEaqKiaFpTndisoGSCUBHQ0Td2t6FcTV4vbt2XzU9qXozEZZp/4EHr3zhhRcdsVyk09MTa7UpIFiucillc7I2i5CKODLng8PVo/ddMdZnnnnmoYcvHx3fYY7F8TNsi+OnP3rlgYu3n/rMY5cfT9nnaR5tNacF+66kMDNJCcA4jjBrk3pjMgvX2QiFyeULV6+WRtMkqMTSeAhEpCxIQaeEeXzlE+/82Z/7wrB68b7Ddz53dHz/eP5Ve296uj11S6/9zMd/+Etf9+5f/alPbnBtuZh9v1Jfw2uEhQSPLJzz8bRNAxYrkYLg5h5mngg8s6rLwhF8vNbE2acmgVaCkgZBEakQ0xmXEGf+F3eM8C4M7od6MJnvuPnmZMIUcG1j+fT9e192fPuCDRWTYiwIwxyJx+aU51Z1XnhsjYaFvn5144++e/WOy/X6SSXjnInMx4V89tbtH71xJV0efEYk3dQ1SSEZo2MRgzWMmGko1GYWsXAh6NGttz5x7hrdvDovq2YYcmbiGpTMeLFc8ZgV3pwzhMg7HNA1zCyCOARnM9ldg9K3RrFrwsFnZA93EtnU+dPPPTshTu6eHtfTYIYyu0HUJb/03EuDiPMcFntD3LoWLac9xyS+n2S7bUVG8HrdKsNSSqfXn0sIWK0yNDlZNQ0MVq3O4qLuXoZT8lgfTSWVpmsuEhEc4xjp+mI6F+lk07bDvKCEuQ6CU1pLJPK6vft8nWl79GLEcHR6Uk8muHcH9G59CiRJFpUiEqeuP7Nal8vlhdV5n66NOVtgPls8mBm41KZAgBjkPeeMiJy47o3mKuNosdN0zhUiksc9JgpQ7YqSM9ZCkaWZLRarOUhS4lxunZzkcYRvzTXnXGs9ODioU73v8gMAP/fC1b29PW865PLA5fu2m83m6OTw8PDu6fMAUhLV+tJLL73sS568e+fkkUdefvLCZ8+fu3j+/PnPP/vZu+uT5tbRH1fue9c0nWZZbOrKwl1Op2lT0mE6vJja6asff/RTn35m1vbYI/d/9JMfnXkY9g5vX7/24IWX6WZ645vf8PGnP12ZT60uJctQ3L2wrKetMnIplw/P26mdP3/xZS972bPPfu7mrZdmq8ebjXPe1NYpLGF1UXLve27fvesXygMPPHD16hfu3Lq9WA5ZhioEYBx0XC67OCd1Yy54sVrO200E9RsX5iml3TSI2zAMewf71lRVE+2Cp8vqvLZqNvsuzUi5KcKURNWl5MVioaoBnudZRKgFoIfDSnLSbR0zrXKBjdXWhZgAsbSgFaEtEoY0zObRIFJam9zzdi7gCDLSSta2k5DAJ5ZwH4MOFsO0dNfVKGrV3ROEAXc00WQtF25zttCcxTXnRawWFw72VuvYTLPdXB9zOqiyPtnYzU8+vTq355Feunkr5yHJUljnqm2T62lDjExk5qvVYrO2Yoef+MyNIReiS1949sQQFy9deOHFO2F21E4+/sufedujDywvjGG+HJZtbaB7K2KEeUg0s1SyK7WtoQlcCS2lBZK6+XKxxyttN2V/WQZxS9Bm1JSStEOjTct2sG3PEF7SaevzXfD+173qPS/8gt+xW09f/7XXP/HV5y+trFDbIhIMURITc18/WAKEFmDKlAdWUkcwIxTEPJY0e7NtGCwJDUnm6mwiZiw5Iti0U36w8/nsggUAoCWK6AgWJGc40m42SZmG2VohCXajF+47fPYztx/yiTntw1oKsh5ar5tvemL47Q9ON49vDvuLB0d/xf7+Zrt+6WTLy4OxzHpS8sBE+vc/oHLuQfU1USFrVPZ8q6CqQggZS5lkIiP3Tdi+4wQi1OR1D+6lkzlaimygqcylSsYYyZ3JvfEsrXEED9NEbgpVsvFMu89dA3RPYULBHh679AtCfyGDSiPKoilu3bl969atAVmryzAACAGIwkkQhwcHx+vjcbFct5MXbvijF3Bzu+ayPJDlBlYWvKUIm0vhIuzYC8zurgNRZcnVSMI2TfOwirrJiwEKzyKJhUAirUIpMbutWY9eun1kOLg07kk62hqpogzFNLCVdHD1xdvuvhoP1dd3blwX4lTywOyu4bvwpnBu0ZjZnK1pzhnBd+/eunHjpWVO0/rUHYaeyRDuLokoGID+/+G96snMjCmOOsssIngXDLBmPpt39+UBk0eQBiKYkrunVNzPQjEypZRaa4txdW17I8nyxU8fC2dDvRPHnJO5P4MXhLlwUb2R00EusDjdzMeg/LlPHi2HS8/caYgrN26kixfOTe3VaaknRy8er29Z2FZZddw/uOBeIMzY206nI68I57md5+cufe7a3QhPew9uY2q+1eNZ5KFTPWzRPvrZ7Xp74WRzgrxUFB7yNE1tM1lg3NsDcOfF07Tce+ba0Yc/9glhmMY8n2pU9417jYiUmUFRYLUheDsDsfnM858C6cip3t1ENjeycLDMp9VhvXjv9odjms1rbzaZOQg5ixMckWPImVVpVkRIZumDr70Y1uu6rdq3oAhKyIlLwIjz5nSaRgJYtRIRcyitGSJtaZq0ItyZ5pLIgolCtTZVzqVW5+DEUqllEXgNA4iTmNo8FK5weCLKEQHzIZdps2nNSyPTmhL3zxPJTkcQc2yjJVXiZrwIAnRO9e7teYqZkTLPi6FgPTffRtZEMp0em0tAN8enTAtiZxqON8egBnKOkVi32zkLP3/1lMJOwnMe2qzsdv3oTgumNADTubx46OWP3bh9utnYHhceybenIQkezDuFxS7HLII7aiIBYmEdvGQPP/qKUg8++9J7GWtYmDGYvJFCKEc6B2VlTjenm5cvXSlx7qaezr5cyP2lntveIJmctgdNXbympbiGO2AcSq2ZK5yp5OLRtLlJ9/omAYml2WeRZM3IPC+o1olSMmbbOCmYKQXSDnVHEDlDQlt0xenOsEcI7vT6cMKASB4CFXKT1Hy9P3z8gYPX3Dh5IDwcc8UeZeaSZLt587np3RfSS4sDFmuRrx4bi46LLNO8JXbQfYfln//S8z+9fQgHBhuS3Gn1Qmzmd1yO1x1MU0yt0u21vZiWVcqe+tG8vbLCnCtvr/2+15775aev//KN+/lASJIWQ2rEohoc5cK4tkm5IiLYwSwkCKSemtbfEzfAo9uAuiq5I61t9/z13UMkgN2D+fDShWAqKbtaJqhNIZxkVNW7m+O8GjfzJGv9G3/0HUuqL21O/s6/+Mj5y2XUXLklmkqgIta2GtORKUfiohbUEgYNp5RyyZu765ddomvTTLKap7lrKQBQScQsU6t3T//kt75ucSF+8see+sRNunyxvOJB+cgz65zPO7tiMy4ky7jdbHmxSVG4Sa21LxgRkXIiyNzqMKxUNSJyGUXyNE3M+eBgz6KpGRgisAhQ5B5lH18Ecu2MlTuWmaaU2o7JTmYuOZlZZ7efRYo4Ezr8sg3u7s3mklPTLQtzTu5GGjq7CEzXIh5xRxWNwA5HsHMwEctmW7dEKZX1eqZ1rrPlYWTGtL55l56pbaISZOnpL7AqEnkZJGAlj1dvfp5i+fxVBK+dAF9kShKzhwbSr1ctaQDT565qEmebJaWGaApwEhC7EZsIR4AsiIhTDvP1nQZAylBnzXlUVclkvkG0RVm49u4F8xQi+eTOTJzUQ0rOER5OITsGs6t7S0LsCQ6CMZO5Scfqowo665OYoaqhSkzCpHCd1+vt7ZKy1pZZ+sLs5OY1hwWTZCYS10bhzAjNESEiJ0e3hmHRi333UHIhI9swGReZp5pz3nYomHmnpds8GYXnNKsxM3QLJxFpdc15JreqHpohKWgrlCB6dOI55zwNnCdTm6fIIswMGFkTuKTso82aiWyUFpYiEyUvJimbYgzEHASewaPkqG7W6lAOFsPijh4RUBJNdSpJ9vfPnZzc8EaJJSBq64gkiyjBrU3L/ZGRa9XFUI43G2iD+ra1F24fj+NhbRNLy4ladLx29J0t7rHn4B5Bgwx7A+aWERPF8cnplb03prQcBiR3GkJVc2Kh2WM4PbF1femJC4//6LM//hPP/NyXlC//7HO/9PJ3Hu7x5vbdm6971cuD/OR4y6M57ee0hjkJd3+A5BECD7grCXl4ZjaEbpWcNOBJmjYm5JGIBcEs2GwmEtRQAeXMSUS6K4S4+1aih6go6a6ijWByCUgQgVqiqLNkwB1BgCU89cqLz1w/PgSvJLM1hwemaDVZyzfbyUmEuAGeCexlM6fBt6vkD+zz//mrz/1vn38Iq/OME+TS2hILhc7j4f7m7q2HLp0ruPvIQxc+cvXO5ygeW8o5Xl+S6Y5tDx84OHnh9u26ZykLZYMC+6mtmZK3cuXg2Vdeun3reDG0YN9mltrcFK2tycUdZwyZrusPN9vVm7uQX98leUXMqXtc4Sl98tnP1kRETuTGJXMy8zDKefRQNrJbd//3v/Yd/4//7QfGC6/83d/6Ot5+XLiEr8WFsS/SdHNrceCBxuOyTZWDlBeqmlIAfPfu0evP7f/Z3/s1f+ivfv99D43gcA6vc2HyxkJMx7e/93/59u/5Zz/+te9625vfkj723l/9X//A28UWP/vBH73v5VZtJK7J6daN6dylc6eTzevp8rmhcyWx49eGR2PBPJ0OwxARZtWhJTPg03wisghnIgojgndGlnnrWub+uO+CsHeL962FilCPpBeRM0E00AcC0QU51k2eqabWfDGs4AgkIri5Ow15VKugaNGPMCJyCm7Nh2GI8FZrSljk4o4wL3np7stVAixggDHz3rDnEWnIWjzLznpjUYhTkpREFuGEVVDYTiFYFrGqNI/7A2nPrQ1SGX1vijYSLRcMV3gQBmZupi1FChKi6gaWIpmIGsWqeJ3n5d7ebJrzAaBmTowt7UJNm1pkWYzDVGuEzhR5zNq0pCTBWlsipqAK5Czh5O6ciwERUVKetAr66wZncC/CnAoxkTgkuRDCtCGlvh1xYUfHKRpRBrmRI1utbeAyrjKgqQP/1TKHelvkhIiNbXk1pGGxtS3NzsTipYeccnIgSmaiyDwieK5bI98iiDNBpWyVE4AUCrgsS1UVhUpjpiBXMNwiLKAiVLfbhRyybywySwJvA3sSbtjKJo2LNnu4JWQPbqJwWEkDqwdiiBJMrW6TSIt6smE1N12DE8jMzblQiM+RPJkHlsmYt5NSa17y7NNprQJuqGOiiKy8CyqkXfRKJGJ3FyIPsJAScimingmZ5Nr1l1bLlxb7Q7Va25aKWcOCJJOdNLDgAx/+8O9515/8mQ8/+unD98+Hm2ev/+qXy+u/+d2/54Uf/tSXveFtn3ru5nZxtFquGq2bFCTjxNBGHiPIXOc58mEGh4YzC3rkcndszRCBjFmyVKuUyKvvjWxZam0eYeHdoQpgl+7WbS0BZOe+aewSK+8bGaYUpAxmUfcACVeLawfLX3rVw4889VwxzXmRW90wZxmKM7VxbMc3cxkop42vc1mOpBdGdkt/5ZdufN+LV9L+A67HJIXcfRygDll99AZ9Zrq08tWQlnq0vTYfXBrz57aNWjs890Cq88Eat+fFc80wJmcCPGGtgdSWDy0/8+RD/+T41vGmejNyAQcbjGlBxWkd3TZO6L+y6OOXCO5vFBEx76iqEWGgapo1jPVkWwFmEIiMZoYHUUoKn1fD4s61a7//t37d+//L8699zZVv/dbX/Zm/+hPDowfZY86r7FvnzYt3jr7t697+qvuGv/v//omD+5cHKd1pLbxxMYUsF2bP2x/7s9/yf/7rH9zbv8S8Z1ibQFKC2mq5uPrs03/rz37nP/9379/Arjx8/9/63h//H77ry2/eivf+3AdXl3kxe+WZYnn60um3fdtj//bffORrvu4NxTY/++Fry/09YT5zrLi7S05dE6U9/hjo4USlFLMqZ2ws3hXfyghiBWDhnXQbHf0HDhdzcE4RwQFi6lbScAuK6KGbBD8LmlBryJh9KyAWMtOhlGatdfkxMbEQkUcEiYeH0KRVKIYhE4m7G5w51WggFCnWlBg5Z3VTV3ExmxGuQSRFQZyLhoMxN80sffImOak6J2mTlrSnU4VMBEfjkgb17ZCXs9cajYQt2iKztsYEhKmT7RCPpl13wNR8TEMEpiEhgKm1IQ2uNoRbqzkPbpxpnE9VZABLctVNW4y5zpO7l2HVmjEyyzxtp0TMzBrKSYR5vTkZ0srdAQ7h8J5kjPDwQTsYYL01zsnZQ1xVi/UfaAhYIIAZLBDBGFPSuTJLrTWlYknMDNV5mdbTOkUahv3paP3i3RdWFy4v9wGKYAU5RTClNqlwlsVw5871kmUx7iUaWGB1ZoKNo2vzNkasSyys3UzlkouykjvlPJgZURCxOUVwU623j4Z9a7rVWK0OD+e68fXNenB4wGkz3yz54sVlu7vRGIeYtKUhUTm6czvnwRFGGMpqnqZSxjbNQy73P7Z8/gunGvOw3NvM03rdFryEIxKOTzcsA/foQTUgnr/6Ip0WsplIAtmj7Vj9BDMT2SUlSEogZodXb7OuQMEhhQPb7emvF1tbVEkkJUWhWUU3GCG6kl987pk33fyVP/lb/sd/9b6/9cmb76uK7/7Bf/z4wYPf9O63DKdf8vRnP5XG46lt8oHmOYyVgnrQFyiCS6QWyS08+sIQXArBBQ5YA7FOrmYyIHHMEREZCjIiCgrmcGOQ7IzrRARn13ALDwIJi0giFoQ42CJVZIFHasGGMGmNa41fe+LgJ66cmxFiCKRkYzFbX071/mGzt8d7q+Gy1EfO7T1wMJC2H/1s/M7/bN9399F8fqE4xpCNRMlQW0ZF+O1NfV7k6e3mY7faUyfDXStPnR587rh8Zr33oZfkF+/gJ67jQ9vz12wUa0FTCjGfUc49cOnDr370f94rH1vTWhfDhuaZMFkhHiLYPVjOMLDuXdzaXSRdQ5ZSKinhDC0NluTInCWX7XZ++YOPLanEbMw8KLOWwGDO4lQwShrLfp6nu1g99gf/2o8fiR3i5ISmu0d3rh1P6rYE/eonb/6j7//IalFS2rt6Oj22Lw+tjlebsiA/fglf++TjX3jm1s88dWdYYb05Pt7coRTVG5impvsHBzmiHcsf/32/+4/9lf/vb/3mN54/zD/x8U98/NrpKPffqtVpUW/e+F2/6VX/0+/66kcO4r5zr7x+2yUvQcU9bSddb6qHSBrnFhbohGzJiYQdYeGOgCgnBJmze0Zjq+SeOQguFEzBAUH0p0X4XiHPvaEkTeJCQVyYs3DJXDJybpxnyjMxQnxnYlSPYbW/acbjIgcNJOLIIRJMhpFyJs5cChWOQs7ewo08xEOIvEiyphx9nI+SMoMaA5kpSUo5qmen5PNI2zHqKDViYsmSS3hL3mSebNBmdxNVpjHJojCZTppkg4mToFFpadTBt86MxjVXzkgpZEQWI+YUQdw6qrxRMzG2GZkWzcg4E3LKC49ug9KUQ7I61q1ULapwziPLoKpE4T65IaWSOAOcOIeTOZIUjppIiVtwdZpmTI1nKzqJz+yNnApTWGH2ac7hlrhRGKEj+2dtFi4lmXoEiiQCFouVE6Z5dqacc7WtjKXkvdPbt97yJRf+1O9/x5OPrtd3T+/cPjnZtuMJL90+uXayXmtVai9+5vNf9dZXfc27Xvfisy+e3N0cHx/fOr67ndPN508wOzV66P7Sbl77C9/5HeX0eH18Y3Pc7t462hyfHN++tT49nqdpfTzdun78zkev/E9/7pVvefmFP/r1r/2Lf/i1t1984b7l9B/+3h98rNIf/c2P/d2/+PWru5s/8+1vedvje3ev3S5lPBQ6vnr1m7/q9U/c51/9tkde++joR9fvG7OfTGNM55J+7btf7id3H764t75xYyEnL39oL8nG0tqxKaKFwBQQjEBeT1/5rq++c7QVC2FMrhl8Zjj4Ymh79ytEiNeWQmABATKcvNX1Ru8aqiyo7KXW1LmZTEOW4A3ILj9KP/C+f3lVP/6d3/7Xv/GNv+cbXvMV3/Clb//ar/qKw/FNv/CZp17MH2zqLeqm5uk06gZuIjlTZhcigQxpNm8W4OQQNVLnubbaDCkxk4hlgCttTly4bFutW3VHKWVc5tQtc+oQATHC0VODnCSIIoxjZ3t1B3NAgp2YZ87soeasIJZJ+QNvu//ihK+5Ve8rtK7IVMZfPx3uv15dyU787pw/v42P3q6/emt5fV7gMIjQVAGB6w5dE6ZBiKAEIiPhYDdSAIy1hxTh4FNlpWEvdCLMxEtqg/ImyeHrD3/0ZZf/fsFRq4cLAKlVsn1bEGAoISZTNE0iO9EvJQ41ItEGIWL0My4IIuQIZ4tWYq22pL1RsMjqaFESIN4zFrk5O/Gw3aynafq5X3jmf/i9b/rOP/F9+6++/x/+qa/6tU9+7u/9kw/9yd/7ZecevvmRD949Orh/vHD7a1/9sje87vyf+Qs/9Xv/wJt+y5MP37Ttn/qb//nhK19y+6lnftef+lbmLzy+KOfuW8Tx5snXve5Hfvopvn9oU2pEJ9fW3tJqb/1H/sz/ceWx+/8v73nrxz/9gU89dQ1Y3HdJh3l4Ce0tb7zyx37Pb/qdf/wHXvboq37yfT8Dgcy3h8X+6VRee7lcvrT3Ux+/Ooz5IOT85eVz19apDDY3IHFKESoVXlamyhDiMHM4iDJBgkAW4kwcpD0dKuDG5ExMQPguWbRHGgkrM7urnoXyOBNB3NuZ+9co0Vy3KbOZBkUREUHEjh3YvEUEc++uwomNgojIETA2pui3CBGxk8OcWcRhGgCZAMOuFctpKeodKwNvTBwhjkwpJ9uAi3X+AjyIIzHBi5Npy5mDLCQiSJ3EB17sVjYVlQbpwr5gsBNFCqCRRybAJIIdzmDm/vKQiFsDeUoEZ+6GUvguOQTwnLsezLnHt0C6WyqxszOIjd1cKPdMzmjmMBFhEa+6M7ISHMTesWHEHkIURZzhFmNm87DMCJ5VmWgcBg4EdDQisI7rzTp/+9e89fj5z73xO/7AX/4b3/Ptv/3NP/3+z3E+/bbf+aYP/vLzbbXaXK+XX3E/7Hhvvf5D3/zqW03r0d03fcmTP/Bjv/yn//C7PvDhT//U+5//h//rd/34T354X2/81T/zjd/3Q792+eHpyYde98EPfvQ93/zO7/93n7p4n//ub3rXX/sHv6TndWX3yXz1kQf2H7//iYvygde9+pLO/HXvfuPHn3/xtl1YT9v1ev87v+EdX/b6L/zAD//6X//Lv+3f/auf+aZ3veLZBw4fe8W5pz5x5d1PPPP2tz/5H3/o41/1ntd8/GNX/dbJ13/Zfd/yW978gZ976dEnDk42m3/6gx/X4eKQpxWX4/U2JY6JM+UjP2FZ3X/h/qs3jw0loTWPCGdCIlYwIELMEWZOFFTY55JQamu1Nq12fnzsXL7/2bsv7F10zy5R/ETzKDa0BOwHu5Z5Wf/1B//xQ5/8L+945TdfHl9l7fYnPnH81PUfmcrTGz4FSlL4ccNAMuSUhDhINKyhIGcgciJya6FBEEiq4sGRKWozAhVK0VoEiFrZz50d4hFt1jRJBz+w9TA1AE7CiaMRyBGdNuOgDgNKZAGQkAFObkHgCI6Mutn/T09y+uS1b7x2mpHWoNX3fn76Z58ZAguAwgEhiCADq4RtYxHfZZDCQ5ktBN4YoICgp774Dknp0SSnaltQAc6FNhL1KJ4U4HN5/epL3/P4/k/VynepHAiSO6wKw8jcZlUqnhNjLVu3RYDBLYgsBC6GlsrsYLMIh0h2EsEAouU8eWKjyVBTKmFskJTFkDK2izbOuU0kF+qN//6Pf83/8j0/9/5PPPHbf9srXvPwA0fHd370I899xbsu/75veOvRdn307Acuv/zlp9fuvPmNL/sX/+mj912qX/m2R//jzz7/69deXN1/8ca143e+8/G7d/m4Lt/4Za99/av21nf5kQf3f+Z9H9u0pdr6PKe3ff0T/+hfvvf//j/+jv/4i9/79e94zRL6yWfj5rPxp3//20VfeuvrX/d//e7vf/AN3/C3v+8Xn/nI1T/9v/+2H//Jj/2e3/LozRefv/+RJ//9v3/fH/hD737f+2/95/c+9bf+zje++T6879PXv/vf/vq8uC/keCU+KYwKBmJvicjh7kaACPU7PqKTndG9bCzSuU47nV/0dWlfzvfko9Yr+nsf5OFwAfUxQgf60S5bg5jJOgeCOTrxPHZAHCbuTKskMLM+NGph6qAztlenJjODoCkJAPcGIDFFRDQ1EhKmjgvnPlbyADGljm132hEICE6UIMzuQU5MCBCId2DoMxqPn/3XdrQ7x25OAg9QENKOoL37mfTWkDnFLuDxv9Ed9fW0u445u3ffVs+5DiIqmR1dVtRjM9CsQ2toR+2KgFCAK9RgnarLthu3BQcFyEMJRj1nJLALKILBA5FlMLCbLmLFfPLCiyevf/lrfur9H/y93/Zlb3/Ny1++tyr7+5/49HPf+LXfsJluP/2pZ9/4+MUXj1rdpC9968t++Md+5dt+x1umu3H3bW977aXHbu1f/69255mT4YMfe+Hiw48/9/Tx7/zyJ3UcP/7BX/u2b/3NKfQb3/G2R17hODr3Ne987ff88Ad+5hduvv2x/Zc9dmmx2nvTqx99y2tfcXy0/po3PfI7/28/f+4JO47lhSvDD/3ER971dW/55neVzdX6ptc9efWFuz//i59Heu3nnn7qt37bV3726ev3XTiPefWBH3/+O7/rGx65fFlPD3T73Oc+eeervvot++Upp8ltO7UpFwmvRbKjjnv5lz70fg+3aAEi7puABFdHHxDmiLmHcEekeZ4pq9q0yvtQQ/Dx+ubDDzy2yOem9e06aZgvhqItkGQSlJK2bfPgffft87I4PX/3FxIOF2UPo73lS+6/fjL82tVfo+UJ2FNmquoB8ZaZZw8HJGVnotZqj3DMQoQkSgmkguZjSRbuyXhINBkmXlKefLt7loS5SErEItIz3krJIsQIFrAgM2WmHFEiRo/RqZZUM1VQEArLSmQPvG/wuZHbYvixtz74g294cMJ2CGp5EF9w7AUOnA5ClqkMKbuXaUMJxtalOc4AsQW7M6UEkR3lJiLgxCB2XmQLQ0pMynZX+BTksEPY/qsPPvHOh/7mg3s/cqw+YbuHVqTYyhtqNCSTTMgiLrwlFCxTUGESLxlDQpSIAYlsxb7MtCppkThnADGFb100Z1tKpJQ+f+1mK2MeUoo2YGpWiYvkdOfo9Cu/+pv3o8QLm1z9nU+++tVP3Pdzv/CFd7/x4nf9jt/2N//Nz77nf/7316b7b79gX/KaV/3oz77voYcW/92f/NoPfegzR5ujj33qxrlzj04vvvj173j1+37x59ukb3z5g7Q9/dJXXdF2dHDhoES6+9LtP//7v+kK+Wd/9Wja8DvfeOFbv+6JZ166/c//2VN/6c+//bFXTEWWOd197etf/vC5w89d/fjf/d73/Pz7Pv/kw48/uP/4q1/5Vf/qB371N3/le37yR576+V/9xHf9obc+cvCy//BL6drxxSRj0jvFm85MdciUwiYK7jb43ksxJyGKIArrc/k4+zCHWnRru8N24BEQg4Q4pdS7WkbHSBMRJd5tF5lZiBJzoiSQ1CMXzj5whoDpl4VZmFmnK5zFCjozSyJOxImQO4mSGES8IF4QRqZBeCyyKLJINFDv1YSCdh1G/y6JhEg6OBw7akFKlHbIAdpJJs6wKiQQgchvDAMAAtYhbpypm2mFAQE4sqSOCU4sWZIQA5xS6XfkDqpF0b+4iBCne46Te4sfD3JwT2SMswlqz7UollMjbkjGOSgHJUAASGL02wwWwe7iII5QIwupltQHpwGcEIkpqkerI/YwrX3C/Y888WM//Yk3vP5tWJukB372l7+w2R48fv+D67ubj/763a//2reFnFc6wOLCnW36rz/9iY3v39kMn/j003dreuMb34HNdOPFO+/+ytc31k984eZRG1X3PnT17vs+euPc5Sv/5Zc/9onPNh7TR56+/uCVl2O5fvO7X/1jP/Pcv/gPv/INX/fOhS3+5v/x46fp/re87vHVpGW0D3/q6jf91nd8+hMfv3Nq8/LCs9fSRz92+11f8fZffeqTD7/uN/2z7/+ldOHhDz99/dc/Wzfj9Ouff+nf/fivvLRdXD1KpwUf/ewxFgcOJs/JR1FJloqMRCKSzx9evH7tRkpDU1iI8EAknaPQfwXa032ZmVLPy07EEeFGy9UoSe+cPM35ZLWQMpa0EC0qh9IGC7PpdB6AC2V4xblXv+78Vzx55Sv38fD5xcueuPTExXbfWx742v328nY6tkm8kp2ITzQpto4YSEawN2k1RyTFMmQ/ZFBLVQf4kEIG4oJIbjl0IB+gpHPdePR6WFIk+sq/DneQwANI4ARTAGhpF00jtGPmMYG56wxwFoSALggFYMLJQ1mWcTHhyVtHX/6po9ffOVkiMcGBgCuYkThiBrvQyt3D9UwY0f8dR5PeO0QYhDvBLhBQpczCo7YZgaQXOerh+etvuPz/OZSfCmszljlTEzde77fVueP7tn7rw59Yx+1ivvEqoiuY2sy11qisUyILa03Abqnz0PuFIkzMYIaIRBpTtGWStdb8wMFNreRFtvOciTOREThUywOr8fd945cubbNe8P/ze3/+u77jd9vmqR/9wNPf9Fve8w++79+0Tfsjv+s9P/nLH/rqV7/xtY/f/0/+4w+++w1f/o1ffuW7//V/+uXPHK8OL/Lp0Xf/+T98cvTC3/8XP/3kk+968oHNl73x1X/le3/wQy9N5y9fun31zl/6g+/ZXP/0crE4v3fulz95a//w0oOHcv8D28/frEH5rQ/uXbftX/ib//mf/42/8MGP/eylB574T+/99Hf9/jd95Fc/f+7+Cz/y0x/6k7/3d/z6xz94+2T7VW9++/d8/3vff/UOL/YuLZh825I4ldQ8YCiJzPusQET4Xkgx0KO8AfRss37KqzsL6CxOCNhlhzJzEHedDN3DafXQgTMUJX4DnQ0A+k87ws7ouyw7EEKveYmIhc50+mDvM4vdk0Ph8G595Ht6cD6L+mTmcGVm76y0AJF0laFZAxBMu3M2QGEMCnDbSWW7zDmox3N1FkfsHAO76tuDhHeEZHeRXZ3e5yQ7PPKZQca8pZTM7J4vun9+94JpT2hxnNm+Ot2z/8ldmJ9w342H7VCOARC6FmCHO2YI7fj9wXAKkJD3dxEIMzgxc5CrO5jCLSIKL4himqZXPv7IzeevX3nggdtfuP3yxx98YT6+evX0PU8+/ju/7Sv/1F/626/6kjecHG/vbm7vLUZwvnXa3O4+ceVlN49u742HJydHt3Wb13b+4YPTjbkSgzhIS71+rb76wfOz+Qu3rr/iyiO36nRyemsspIEUB6frO+f2F6MvbuixNZw72Gv1xFlOT09H2SvDUKfKsuDQaPOwWJxsJxnG7XbOnIYitdZxIarqamrT3urc0dFRykBZeliq88hLpKjRdApmQtL79h/8wsdfSGaOHtmnIWMmcCIjzzkf3bp78fwlLsNL1156+JHDTT350ifvG5E+W2+Vw3r/w+Mjrzn30rUbrVlKiSla2+6fGwLa5ozQetIu5IOveMM3H73Uzh8++OJLdzZx88r9Bwvdb+7vf+YX7tRrQznN2ZiXgTlEc0EpqT9gRKTBYZoTk1NrFhGUIiX2EK0GCwgbOQQCarMv87LWas3dnf7+330VAAWBSWHNKsDB1MIt3GEGa+EW5ggnMLxHP1R4DTMYCZNAzI2wbVaXi9GJ6bzrb3rp5G2/cusJEINFEUYIJI/CXDxtYIFdAgLBqUswHe0MeNDnoE5EMOeBrBXQFmWgSJeXn3jVfe+9kH9O4vaELJaLbBzYKidK+3lcrWFmv/jhNW7uKSpZoZYQM5q0OXljrdUbuTIHh9ewJDv9k0siIRYhEUkpbdZralnd7n/llWePb6BkYfIIns1UNRUkq+6FLp9frL5w7XP75y+b3z65ofuXLqzqhlM25mrFZI7qum0y8mLYX5Xl3ZMvlLwybpLGQmU78Z2jq9/0jgf/3Ld/81/5Bz/wiy+d7t23F02TIfTw7a99gsr6/R/6aD5//tbR3bc9/M7N6UvPPvP0X/wTX/nw3vk/8vf+TTp84PErD9+4/tLl8yMz9g8vf/qpF1/3qpddm26kmR++7/IHP/yLX/V1X/7en/7hi5efsHkSC4VvSQW0bwMzr2nNSVwtgqQTCc6Yf+bSZyu7O7ijzM+I//eAiIyzqArizk0U4jNWPvmOm/vFkOhdK0BAWEqp3yURwZR6CPI9C3EnIALUd90piIR7GGRH00aEgIjSrrpnJiILjZ5NqjugWzhFhOxY4YGen0zSGbDwHTIUZ1CKIIkIBgjCIKezxCWPe/fT2UAKRIRdlrrem7rkPHSS7b0hjLvzWep3X9n1w73DdftjbxosoJ37xOAktBM77SDBPTYw7S7g3jN1ooMhJNwIjpAuJosIDiJiDyIy34GdIww7LHB4AyERBxJvqo7jnp4cj2Xv7nrKq1iO5eilu+944xueefrZu1XHsQSUjcjAS1fjcI1Y+HyjLM+HbMu8usubzOPgMyiZbCX2XQzTTLSYx6ZT7JeU1Vs7VR5TGnPBxjWa5TSszLdEa1sv40LwHc+LppuRz2c9nX3icVlnHXihbVMkB9OmIWXYjFIyqILCfM3+QHhVb0wuZuSiyarVPOzzXM3nlZy78cztZEaJmnmKMEoJwZkjoZRy98adS5ceGMblF65+9oFHzlXbfOmTl84Py0+f3sDe5tIVfuTRw5OTk7lNzcMIi9Wo2uZqFxdLRNUa0zYQcvHcxXluiYmWQdSWcbA53dKItpmXqS1WPvNivTlOGatF7tc2QzgQPAZ7JbOeohbE7jlh09QMYWALuFLitMiGaKfen8DMQv/2u39LBNw9ldxHpdwl8W1y9945WlBQFxBypeYWFp2ciV0cZZiaGfsoQ52aZ26yatuypGL0sGpqXqpTgA3hDuGytUE9gnOFNGKlVB1GPHvyIGep7s55jiAWJ67mjZcedDBefeDw5xbj+ypuViQJX1mqaT0nLy4kWmWZ7GBpk2/9gx881puoCpiomwJ0wtrYGltFqwRjUw1t4omZhVhERHaw8ogIo73lcG5vf1yVg4cP3v+pXzdZJc4Oi4QsyapJySmlqm32eVXGVOdIyHQIMW+nXpbUtqE0DtncMZA72QSGDStsbeC25Uhpf++Fz139E7/jrZdTXj1w7i//w/deeuVlP7bMJWLd8sH25Gjr+f7VIXjjKR/funPAB3/4O1732ode9o//w/t/4Zmrly8sjtuU/cBwlPSwsS6Tt02bh3GgNp3o6vy5aXt0MOx52zhIGU0yC7KD5kQUkdaWFn3m0gMk+mHNHSUb3eywO5DRJ91O9w64nRkSgAdJcldmJtrZQPpf23GKzO+V833CwN7NIz12dPcnichtZmZ3EETVenXft5/gOIu8BdAzeUhsJ97tlOczjx7x2RwDwXoWkhfhQSnii51HRFB0FeHus8EUThzcr657h/sXub6xS7vuhTYFwnc5rj2DqJRxBx10coQkMjP23YyFQW5nsY7CCCPsvALEEYR+R8KEd1Rq33GbA13F2r9HoSSgaE79VSVVBpiyCyJaeBDEIapgCrD1ewVGAQE5QUDamIeZfWhoanmRGhlZhoG42TimO8dtudgX2qg6pMDTiKatOjONvqljLhv4ggPs5knBhYzV6pKHzXbOKSScx7KG5hi41bbICM3zEFSVK3zQ0EESzRtepHUAUzsYL81WKWL2daHCObVKMGcKUBMe60ySnAKpNG3EkonCmofNHoO7S4Kqksc4jq1WcnJKuWBJB9eeut6mLSdmySnI2KNVyiFDKeNw99rtg4MLw7i8cfPFi1f2K07f/IaLh7z/+Xo65ztXXlFe9drXfuKTH1ntc6DW6mMa6rqfj5YFTIOTGhtTJktMWycMy9GniXWRc6aoq+Uw85HkMcI4ETO3ZtxQjNmDKILEiCqZZGi41WDKZg1ZmioHRuJo7pGNqFF0iCYDiQrcwg0aJubwUK3kOCnm7l3FCwOChDjC2ZKZhnGfCVL0Rl0SrbzRCaVRtmQzn6acV4PPG3tRwjJ8EZZTIuRm4ZUuELl7F6gwJyZWV3cXNjODCpyYEyxSSsxMXOukXGze3Nocn7o/YfpKyS2nOlts+USiDMGETfOszDRjr8ThhXl1GcHEcwpt7pjM1ao5uxZTKpm0zq7Y8hxBpm4Nbl0JyAgaD0b2mRTMusW1118RsLJtS9BpSRGztFqdyQez2RbZDVPSlDjqnY3FAbRuZ5XmWTab1Mezzhix1+rJXWX2NWvRsU3X7zw4Hnzpax/59NPP/8yP/3qzxfq26ezNTygXO9kseFiI3ppO3Z0zpjub/+4P/eb1iye/Mr/0sx96evHQ5dvHSgzzrQ4rSXNYEc66ZG56EkJ749xmznysJ0AOYWIlV4oyt1Mn5DS6cjJnIsduuCuSiMLdWVofPgLoqcUR4b47gv/bMYuDkVI2w72TF904HQFi9vB+ZzATIZgA6ZGBjgD1HLc+vSGmIizEMDOSFEQsfK+l6wvSe7NpAEaNdmYtD44k0msX5uIOg/d8AgtmQbdm74D+3mteC5IgoyACgokpOXlQMCEQwruSn/pGVL/YhQTBEH1FISIWSpKE7ExWSyw71HtKKZphFwLCu/jbHhrjjCDuxlTeBW4R7bzTfaMKpi4gAij6xhtsARA1MxEJcHLp0XVOZ5OxTojKZzHJYI9wEHG0cBGBZix1tjwmIx3TOKsSiS59ZUInvI1tOxi5xUYphLjCLJTCS68uW1uNJDaoziDZ2rygRVQisQFtDl6OC80nVQtPLQ3NZcwRm2YlqYTMpmVFqboShR/L4vx6VhnycqitHVlOA+elDxMweCw53I2G0OBwFSngxsymkBycos5NUpEyzKqFkvsO/hymhSQnTFnUZxY36Gp/YaGcErRJHiMxCZCJOOVh9KBJNUkh5PA8lBUUZjDieUsXVo8m//SYeVMbJTFKwR6mBhqXSeBawc4ULUlyOxg4UGUcGIwKcKY6uA2FgYEKqrZTJ0sRUoOJSLCFIxdOQpyiabTEzDJDAUjilJhSstqsGSEPbSKhyIzgNAgHE5fS343E3AmrS+ymhwDMFF/0JVr42JnZvUnsNn0haJKkhnkccH57n8wG9sNLXkPgEU0jKDML3BDNY0fKrmcYcjMmShxndhsC0DMrlBkULJG12T6dO/RLbuBswa2ZpSpBjyFNgrk5mVmxjS/OyzydH2IB817W8NpZVN2ULKI7FPb29kw3hAmWiCjMWmvNAmBJqZSRLbW0LiXrts7bbfCD6kBsKA5k8OWAzElB1mpiurk9crXKl9RshO8tcwafJhknUbI7d49TGrXR1o4kV0lXTLK108VyaDM3j3qKxdV4uaWX8q0/9uXnTVMapzJfPMWRRZksuC23MVGMEnFncbB3fPKOL3vgu7/vB7/qdSviuzYtvOzVtg2vESGC2ZsH2+LYfZnnuqUZZXWQFjc3J25CvkGkzXaTC4VG2JwGR5uC2N3VIyJmJnc1s5IGpkTUXaeE2AGqQGc1uOxO+U5lIyd3TSkxk9nZSCTCrY/mz0bh3M90T8Ei4gS3XehPYo4IRYhkEamqvdIEoFqLlN4y9sM9zoIHQQMQqhrmKaXMO2T/mNVgLCBmM4R3fY/2+QbHDoUGIHYTwT67R8ADEWFd4lmo3Ju29282IgjSdT/hTiy7psHEEam3HURq1tcYHbWfmOMM5UQAIB3ZKZRwluIbQcyp59F3SmuYGwLMIiBnV0vGnJMjNAweIpJztnB4LgFDGBuAZEHCnjhmhUciZrCFB7MzKTTBOdSCcgnfpkShSM7glG55DFwXGK1g204HXnhIiBOsDIiWZ5obBWPPp1NPOXJRnfPqoOkmpVKxtnp+pM2xtIEWFNlLTj55mzeS9pindUx7xMrzuq7HFWvKSh4ny3R4NGskBxAm1rgypYaWbIY3XbAZXJEqiGziJEaeo6qippTMWXUyiZiqYLBgiLl5BIugbU6WQ25B8+R12oJJEiWgTmsJp0Q6KaWpzW2awylxbI+P5HSeju9MkejWrevlXLlxY9vmW/urCD2B2SgoPOWFPfjoXk6xqd7avF+Wp7ft5HjS2LY06QmMYrnPQ3Fq0BPyKRYrDk+baRKCCLtVOAYhcZCPc2wHSSVLRuFIiUkCJS1dgyk5eD1vtmHjgoXMcMiBMA51eu8//d1mtxyFkhCR6AAPSy4Yap049aRLTylFAB5gSUH9AXUh4WyqAnIW4lQ1UqeYGEAZnKXVnjvNnMx3ZYtZdPYsi+xGumcTADUHBe+WSbuelASzspBRKIcDMJAhgknCIxoh7uXf9Amp9Auj9e591zWbKfkAV/MNixMvtQlFVUSybOHOrakKj8TK5IQR0hCsbiQZ7rVWoU4cg4h4kPWcC8Fm3uScY57NGWDhJIkBZwYzt7rViUGNKADW0GFYiCxGxCZ0kIQ6nc7b4+NKSVYH+8l0OZbEpKok7ISpTq6NJKlWIlkuVtvtPE+b/f1lLhFznlvV8GaNyIR5zEUoTUAW6Y6tqdZmSizBtJCsEsbK0NQolCaLDSg0wBHkQ05uip1mwC2gzWsfp+y4mUFEVTWC1GDOGmE7UU2wdJvYLputmakbSCabYTlzCVdidZhRasrmrZ/RPYKqYwzCHCWZBZxaM1BypGAxxJKShmp4Hoem7kTCgzt4rma2WCz6FHuzOQ0m1bq1BhYEq9Us3GrNOc/zrByZxVRBrAhidBKA9XQVAEgIDu4xlJRAZrYLK1dVM+bEOaEBRBq6k0wGRMQsnIU8hHeTdHcX4kRcrTp2s3IJiMO7ZzCimYH5i+nb7hGROHu/JZL0LiEIRijY5b8wJ0f0i4qJONCg3gdoSiKiaDkLKZkZ7/oFuEUwkXCHRJQkZq0HrvVwGIudPkr6hRBnhnU2ACKZILO5I4R3gk5JBA935T5k69FMkRkm7NCaRMJBZ4GIu4xmsKBfoA1AGDNzn9Rh16tpqLGit/Ld6SoifW/f53WqKpSEqDD3TBuFJJbdWqhv+IkAJHhKecjLmzfuUGfeC5nrnpSqoWhUOEtZFjq3yCVKKjONQKkXL+wdjMv15hhDDOfGc4cYUk59+peEJBZL2d9b7Ml9ZuAe9x5zrVNKiZGZMWSRzudI7OCcy6IsVpJSKiWPQxoWi2ViYU7TNNXZ1a3Wpu4W3lntzJRssVotFDE3vXXn+Pbt22bW2pywMLOp1Vor/Zd//Z4bN4NkKcLzpCKZUji2ROS2m+gRCcDhYE6qKtT3iRQECENBAFkKErOdqKBTdT1EKPcZLDM7cZwRAcWlb5Osh0z6ji6bUupAqy6b600DhDmI2AEH6ZmcoP9FRijDic7QbR4EhxPMd8K+wBmq0JwmhgincPMAYSC22rDITS1HmJMmyV0OKFyi9XFEnwNEwCi6TCGJSGuNu6oaGrBqNckYzhFydtNY72wqaoqF2hTQoGRmJBDJYUXVqRmgGCS8kjWjbLobKbs7sXBOZtZaZZaUyWwWEHPq4g3V2vRMgMHEDGFOlAEewty9/xh3BbeZIxJnEhiC3DKxiDSLudUSGbsouD546G+4awvt1Exh5sQ7USOo9bEgNKyZdpdASmVktr6Z4QDZLu7O3YIEXNIiMbEgwoMTgms0VXXqFH0uKUtPtrXSN+5GsEC3E2dJw7AfhG2dU8kOTjJEyP7eYWXKOYvIclyo6nq9DvLtdqvro9ZaEKZ5nWAssd2uc87sZk1dTVU1WoQnlkyomPrRE13zb9Faa6ZKiRlJZLcJJnKEqoqZendJdBXjTk7nXsMszC3czBxIzF0QqQiPjjeiHmTqLBnRE88BaDgzdzFSoEbsFA1w69YDhxGGPt7sv5cI67nqEHZYM4vgaLQLAxcIBTOYHKTu7kFuoo6ACgUzZ+EzIgUAOKm2MKV+KgpDhBKDPRGJBDe3Zu6wvrICzTlnYe4RUj3K0S2MQoSTRBIkYpG0e/eNIno/BqJIiVMWZhCcOEWnA53phYRiucpmwcxMAnBrzTRyznCSFCJE7AwCRCCI5NlVlQJZkoj0mON+PZyebNw5nDqJoefFR84nx1ObJkSLiIsX9x975L7lcmnmZZFZ4BVZFsKDhYfAu9KagxmckwepKsGZUt+ziMiQhYhyzl1PyRTWNCKCRSM4GB55pHvTEYKYKnZRE+6+Mxif5aY5ALaI/vsltoCqR5iZNqsR0aHB6amnnrt2/bGylCK+2TIviBPBGFyY2ayBqUu7AKQUQa2/1MzkCJKeW4SMbYAdfe8EBEGSmheuwd0/wT2dt+9i+6FPII+gXVw1gqJPBjsFjiDqzsxAkupBodyMPCKgjUOFoBCGUzjtwF8ebhTGkVQ9EbsGItJurRdhicMzDolckjXbqOsg462pSnLfCfV6W1yIxNMEYChjaw27pPZIKUmdmdlAKaXaGnGkxKoAbzrICehawN0ukb1DP8BCTEmwADUjx0hDYWFojJU9s2QsjBKtyNUSMTzUiFIGPLIFZQAhIZJEsru62zDiLHyVQPe2kRLORHNEZCYRcYSIzNspIjQzeXB/gIggyORl9y9077UIZzeAiAkDxQAAXeMY5v3pZF4Q4EHOsORORP1wN1NySnG28+NkRBboOdVGDJG+CgQTMw9IqMpJOCeQUC4OuPtSyBAKyqmooZ8XLAmpBGIvpeZROAPsRrPkocMR87iddL1eD8N9IhQ+T+nCspTtPO9JMGt4G+ctmCjcm3K/INscpllSuC4gItIvdVXtsVAMgk1dH+zuvbw091o1EWqtZxHTuxEKcyo+u5m79/z13sdQwKn67ucJOJl24IekjgiW7sPyewloVYsTukyfwoU7FEmzJDNzNd+5q3ZuAw/ldKYkNrS5UreAySrJjhy5025iZwnGGaSMiMxaEolwSHPj8D6rCmES2aEU+jZOu7IoAhGAZ85ZEvULHyCnPt/zMBERspxld4o5EZF2W1z0HQxESBIzw3SnZO3/fl86MHMy72ses/5mydmUzIMp5xxASjvFlJmFZ170EIKeOiu9iOTsq3LBLOZmrdlq1WtKmn1Ly3HmTYKXnA/2MxHVWhMfxAYkkZmZelNrxaNxiVCzcCeyFpQpwg1l2UqGmTEnQpjaPJ26625V2QFWSUQIIh6ttRT3DBwiqo5gETEnDQ0yYSLe3fpwwKDaDdyROcWYzJo79Cw41xHp6Or5VK/EXLZxrE3sLtSb+2G/c0zHYAJobpUgAIxW94RcqkpE3PUGyd1gCIDdnSDMqZkOQiJkEGaWLD1pm4hYWpwpdvvqrOsOm55KQs7sriTWhYmOucQMgROCg5kTk4CEwvPUCzvuMjx2hhMCwS7ePAwKs0YUFMTBPLtNYIowrZU4W5LN8QvvfMtbxv1bQuq6dEdgAtgNioU7GOIWEQSS1lowMaM1Ex4ioDb2nqa/MACHc3+8+sDaHeFDU3DKxFbNKLJjAsPuxpFtRQQxaDgZFS8hU6NAxJAEYOu2RCKGNz91Qykj4KZzyswcIPWmPbKyd6ycEnMPLzJ3gMXhZjGmUueUpIwdANrLeY8w7/jCFr1i5gCQckTrM7UeeUFEiSUiBLmfU1202qfPqeMBBEwswpHIe1iyBIC0O9lGD4swkdzL/K5JCagkJ+lfCBxs5mZh0QPckd0TwkMNBkjUgQDKWcxSSji72ybUlNJ602fubAoXbvPMUlRrIrGqgciSB2ehVKOy9KWRl2QMTyzmCqJOWGIgDcBiZw4JznCPMITf0/wsgeqTmMmu5O3yTSIi90Zmeac3i+jAtegCTnAX9bsl8y7mIQqY885yuxuPRMQCZ5rRbjIhB4eFm0bpy4xe2ZzlIKIyhCARpgyXQUUoIpACwQjJwU4AKXX1vwZBIqKbdMRdhJihZ9IJIHb7leBuvU3MYZ4QXV1mpvDgITNzZ0gIBRGlMAAJqV8kLGclahATrQoI0rcRzNw3F0AMEGa2M7HTWdREJBKnXfJEP+5Bvb8xgIME4K5q7UU/9cD33mZi94ISUQ+umlpVtdqMOU3b1lrLMzbZQ60MfPn8+cuXlvt7BZE0lSIRYTlngJvVxOQ6pzT0liViN+pgAeAtUAYxa0RCKAj2qEQRPgUJkXQtVH+hvKn07wXhoczMBDMIZ8kmtkvgClivjzUUTOOiEJE213BmUiYzGmLcnTygVPXccoS5NcYgKZQiQ6MwNCIiRR+2CCXhbGYU2WAABcLA4UHM7tCa3U0AN8AYRA64x0ZXzKzeDE5ErWn/Lw0qvX2ICPcWQYAyN/ISPXpcnShA7uSqyliwEMzNnCkxs7O5K6eEMD673okoCYsQywYQ8iAqAIh25EKAuKyTEZgTFpSkgZd1+oav+UAqL5JUtAMAoEpE7ghwL7VSZneXlMybE7LncEppcEOgSyf7r4cJQiQAgXfjIPegYqpgLCAGX3GEou3vn/8v//npY12lNCSbcsDYlCRQyQ5Abt5XfOIR0dqOvaJNed2f1GhsSkTSon91EuJmjZn7O5GYm2kEekibDmObaxJZH0sIqyCi40/YHNU8I/UxVESUJBTmBBJQLCKCgvp72F94Zq7amBNTAri/dSklEdpwTixkzog8Fo+Y4TIMTKUjxjiloJTz2N/P4Lj3hg/D0GeLpaStNSEeU5adrN6kZBbx0OVyOU2bnFOf27XWhmGQcCHuvAQAfW42t+qtmrOkYuZhNm/nnLNZ9Ilt1RY9X1drm+aIYE7qZ5dfRBjI+jtjPXWW4eG208s6MtEAmBmdlU5EpKpcBE3hhIieqR0ccM007gJQ4E7uaDuJekoB6xtn8iAKhCOCic2sr277FeGBIKfdodDjNQJM3YFA2TyCWayjwsjNmDnBtFeIvbmLKN7cHanLSTl5d8pwoAV458wKBtxid9+AQE0jCZGzhvevzyHMZFPvoZ2oq4M8gkAu0rc1CfCUuT8nHDnmykwuRELMJNyfK5zsLrrfIEZy7YaEfn1qeFdvcYAZXosIgZQ53BsREzLR2GwDgOWLYtxuuiAKVW3NAEQkN8xzaKXNFNduns5tXoy82dyc5oMrD1wWFva55Q6FdkEw5xjK3DiNQeTMEIg7iIIsAi1jZJcIQ0Sn8zOBGaXsuXsvyxEGmLAaWxDnLCIUwRHUERTDkMLDLIgKcQCZOEVQq+ox5+6jN+8wrKCsqq6DmfU7LN2dz6/rXGuKlIkIs3PSydGTKnujdzacgWrlGoboplUChXMQ3D0oOusjArDdzzGIw7sjtkuMIeQkICKjmXpz5RCBpGJmTWtOpgHTEBYPQSQODDJMtu2Ij3B3OCi5h4W3be6BSl22EBGACQVhr4/mewfaJWLNlGNUWAHSuNS1B0Fl9eiSlod3jo6Po3miuavlBAPAwbU2M7MBg5mhRkSUUvoBut72VwVmVkohp9nXzCnO0OfEIDAFY+MiFJ40NszFjSy2Ig//l/e3l/wtvDcu25RjgXHiRIhVeO0GTN9BtBERizFR2yCYCERShgGAqi7GJaj1TNQ+MuuHOwBi984pEQZ8sFnrTIxlJGeyBAApmEicxYiLryOCOBFQPTGROZiZ07Trjm0n+u7DtJQH0AzM6KEawdQ4GheG5WwOStLCyAMuwmljtUtomjpQ1SmlBKKCVYgoEwlIxtpfxrKq263khDQyZEgDIogKO83WQIeltJTY3ZaLRb8YsutyOZrWeZ6Wy2XO+ejkxAlpHJr63JSZrenR0Z0LFy5NdWZHM53mxszhPm+nzeaUk+hmM3B/Qq3WqtUAZGJFJdOw5q4wJ3NTrbUyJQ9TrdzLaoCI5jZzMzfuaajuGhGMCGPzuxwgEieYo7lFQECBDdz6AtBD71kKZjIzI3BiFoe7E8OBZHk2d3eNjqzpe9IwtoiQzKo1XMmpXw3SJa33CDlE5q6qObo6TnbZuSLmyiDrB6qngAEOsh1qMDPvdD5nluIIAZWBoy8HAPn/lfWnQbdmWVoY9jxr7f2e8313yLEya+6q7lJ3VTfdzYxpbCaDEAgJEMY2GAUOS9gOm5AngQmwQ8ZgZAthIUcQKKxwhIxlRdhS2GGQbIQaBIZGNAESZuruqp6qqisrK7NyuDfv/c45795rPf6x9vkyLd8fGVV5b97vDPtde61nPUMZDy0pxADMvJFoE4VZAVbkIvL+9cBp5lhP7r0SuBasZs2crJzxyoFxhQDjthtIw9b6/VNPmpLeCCAySbbWIM2RE7P3XvBDJQRQFjl3jHOc97lvsZ3P8eTpPDyww/Hxjfd5GuzmuBjgSaRZc5xuaSmbzlnQgZmZ3wzA1AS5Ga0AaM4ZDRS9vC9YfnilXWtReBqIrE48p2nr/mDkIKuRQI3UuU9rL2inpEaAqakUM/DsHLNobLBmoUkJF+RGZeIyIyJyybjTIlLpZu0SGdHUsqSqNcHFiBRLhCEFjAow1gFjo6dlRgoySsyMRhNC5YsUV2WH55gaqfQuVdKxj1oDKltrcyDNWD0UbEbW+RF3pAwQbClLpFSG7urspgiauWRyKk774eaxxWWfcdiab/HBfvrsp0+HW873HO7JaVbMskH0DZgRxedbD86QJmkPxpiZq41tFs22MS7ujwq/QjmuZLmX0Du8Z84DZAYGordtXI4/9uU2Hr1yeXMcxk3DDQ42AjHZamdkmBmj6IfUcfOnox8PDzLschnuMocm3Odxu7lcRkTBdu2+3/H2oXuXBG8it+aWBZWzxPSUojXrB8vZA9MMpKiLu2eCbkMfmFkF/VzRGEnsPEkqSKG1Zt4L7zsU+QpyA51ZPipWtBF3tmLUeG/Ne8r9uNXHa+ChHQAgeTgcmotu/dDaofXeTeVFxO14czxux+OxpFKPHz16+PBhRLz77W9+6hOfvDl0ZEDvZcrbdj6f1W7r4zHDvu9QvPmNNyT13i9jP+87TZw5xyVi0i3jMufMucLtIiJirrVNphSaA0wDI2LuQ9ipqKPIqxPZnPuK5ExNzYhIBKHMKQwTDA7jEKZQa/+mXQqTgUplkeppwiCh4l6GhIxUBjT0nBQbqx3NWlpH0g5JTAx5JPbWgOLqcwIONKXp+lLbZpZNWHZl1dMbugGGaRUQAQMXdACgQYg00u7t4WptZn2JDGi1fTPBzGIdgg2mLNbLWiY7rmrn+sNEAFAWcK9MZKZkJLpRDZdR0YBOeo5QoLc22iEzIW7tYGbCjBhAMg8VMODezCwDpX+QHbAe7W0hzECmv3s+/Owb890nd73xYcfrHzt8RtvNbdNpdPO+kRTcurnBO9rOMxvMpxkbGBMJWds6m9lFkmm0Dje6e0zDPPfeZGqGhXW2hu5HP1SqIk3N/LC1OUHGzQFjDNLHyDkyCfc2RlgTya31VoksictIpT0PzIEp1n5iqj8I3mnfSc829uGGFkrSZCiCgEwzLpPZ41jSC0SaW02lRo+IAm81RcBEAZtxag5U4mzLohMYFVnSmIg070pc9n0qzfucmdIY05wj1Xuf4zL3gbBkVk+kHJklSXfbIGVkKk3mhcAAEC0y3ZoiM+daDNDoz88jPUZsvhlD57s9H76w42Rxtx8fGjMNPbW7nSx0irvb29sXXn7hvSfvjXEisjJmQiDxwssPIvT8+d0UMgiDZhV2o7N6GwUyoZ77KIMT2yMA3Xp/99tPnpw++erryMuzrR+A57LZddsfXFKmSDRrsBwLdd91uWmb5t7Awy2lMeeEN1NKJzK8FWCCzFx8uJzVIkGEfO4iOYGL71v2lq5k1EO8j/3ZHfxmzoAlhTGym8205Yx1bcSQqjgNSfKgDOopALv8TBswcLcMFJVozuthm2SbOdm5GbN3JQLyMU0WNX7NkQBprTq27lupSXs/7DEjM8h+2MZlBT+tO8x6Zm7bdjdHb35sPs+nIsyYtZh69dWD6O+9/wHcxvlyPN5exogQqVC2zR1okGt645hzFIdN2cxbNzGr705abaDLSc1AAyLCj4R0aJ1KZcIUESCvyT4UkAjUiplJdKaQO66fpEGdRvSZQ0YgwXQwMyB5EmVwQyaSprAZSuaGNRpLWq2d0VPPWj9MZXdlOhJEj/ROrx68DklMAtbajZsAmzlI1ArZ3c2aVeOEubTo5lCTGAwF7ZqFAPrWezPX2IvsyyKQKGlyz5Ma0hBqRorsDWrmtSxZNgwr+ARJLX800si2+nqJpLeAH4C1R+u+KdIUbbuNGM40H0VjE91sC10qwoEMszLS9Mw8crK1MeVbGbi3ckl6+kF+6+fat55uQdzYJG5eefHh/gHQDrdtu5zGdmCaPpjzoTeJsXkaJXf3g22ZmZrcOG0UPEvkFmrdbuxoNPQzegEycu9mDfS4xulZM0O6582tRRI5Dgdvvbn3OXPsGZB5b8NgYeSDm80Z5mi+PT/NVOfd8zHmDJLe2JvGnUmERSTy0IkRsyCOzDTjHCPADFgRN+fo7kGdyg9z5IZMJGBMmptMoSnxNObMcDakgEFBUO0ckYN0OWbshBNyIed5wmWMlskdyhkLkQjIDAamAgZYUXOQF5BGEQSouehHXRgl4y5WgyiyZ6YR3cR29DjNnf748fn9r7/22n/1+PjzL7z+7Yj94eO+3z3XNKci71598fVLvvFH/8j/47f+M//s9//8z77/9s/dtJ4OjA8evfTKf/Af/vUXP/HqD3zxxSMQTpeef/Ds8PiQfvDc9yff3n0L56aLRGqP412e+8Euz/X09tVPfvkf/GSEj3ESjiOSqYkDbHaJuQn7nJmI+12NaJ4emmkxZxW1LWP4xhE963MBI1OyCgLHKhx5nTxEMyBvchPLmBPXpq8HG5HWNyPnnG0zc7NIs3bTbN+HtXJJFADK5qipEsZ9zt3MBNtnNG5+21S8KOCwtbKPQYNvlqHMc28ekQ+OD/Zxvn0wYHKzMTJCM2ZbcexrmjWG2eUgzywfomfZ09jLtVEKKQADrHujPPYEsM9T5GgyML/5Zu3fEEMmjPMTFxxAGxs8JxJ+kRCwUZz0c2stc46BmF3CZe6kPGiW3pDSouk2mnGeSbe7udMMzIjhbhEx6mI0U6QbzDjnbPT0PYOgitGubELSLopjMTrGGO7euu17SkoVdqGKwywWDWGAjbn33qWoZ7omxczOKweteJnClEazLqG1bc7Z+yH2S+Zwz9qnR3n/qBPdwcysMFxj85LIpoAElKxMm+FeHm3E892M7nQraSXNEHO6O1J0GExiZFo7eNBkJpBqHbQgfUxAOBy30Al1VkEXITSwWPNPJ7bJDebmabkngObkDhk9YQggK4VArgFunjQSpkhMwQBnuxMtaU7P4QaKsSOJPXKHjgdEnmnWDpZx3tVvm13G+bDZnkCmmd1p3/vc9FDKGWfTjLZ0oHEJd6cBuGzdYV3ErrM5WvrcZz90VhZBN1N0M/dGxGHD8dAV4Q0P+q1wcLZp2bbDnPPBrY+cAC57HLdWI3Dvx5GjeXvoh/MpHt8+OF/20yUys111fZSWB9OixN4LxIXWuiQtB1badkjJZFvmTKXbKVEPY63RmUowQgi53UDL/qmMOoCA0qzXU5ll/ihDxjquQET03mktM2mMSJdlpJpHTL/qsidTV7uSzGReT/aMkkjoatV0rXKy3i+XvR2dlBGn0+i9/8pf+8v/07//N37sH377C1/4/re+9ebHX/vY9/+8Lzx98vbDBy8/eKX9qX/xz/7wf/TOb/+dvwHoB37Wb/qTp3ff891f+rf+7T/1v/hf/+1/4b/5+3/rr/mv/Y2/9zf+lT/2b37HJ3/pH/vX/8Bf+2v/7p/8o//33/Jb/lv/9d/zC955+6cf2Cvn+XzbNoW140U9W85UPnj88Z/+uf/z5I+LNueeGUfvzBz7bF0Tz6AGmFHuyUbJM2zPUe+eknsfY27tkKmIKZUDUK1DsOy9tNaBCQKk4fqpTDc380BkhgLe2JorFHOH93p0gTR6jLqAW+yZUFUop0ukLCbgBDljmnnzRnLf9zVo15VyxU7P5+wtQYwkxJHPA/vc6X4IKIKRzLSRzJxJ0NOVBokGhYxOIWbSDGVEVjyTWiqKdjb3eRnunqHmXuSu3kkzITBnAubN2BSZgURIIgMIWVkmMQhqBwFTzAGglwrVDnPOnGqteS/mqOjmDBHCDlyXgmaSjmRSFLJuXqN5gxkEs3IGyKvzr9HajB2sY59CzMlaBkqljKVyLs4MSTNp0EYqSLYOM9aEUTQkKZb/jqmcgJv3MUZkCqCjHbY5J7JKHbyGjLUTNjDdCnXJa1lY64TGyiFu7n4lwJmZVc6nlDBUdKXWHq7IRkWgqI4tJEMLRXMcK1VV0DwP0o+TCQhII4CxfCZEZJAnpFcoLwFXGObdM6f14q0Y3bmHTmOqEwovc7migMF9pSTDDOFZ2BQbWutjj5i87Bgzbm6206670zxsN6dzAzK07rjWvdHmzHM864cmJIAZw2npEDJH3Vs+JgQcYIT1m55x9tbuP61a3bsT2uhJd7dGSxMk9q0j0zeBO5Cp9FatmDea92Zm7gY3iHPOzEq6KeaotYr6rsqdlboQKDx+rQTrKGX9Mc4I7y0yXECKM7NxR1qmhFK5ACDogKyVi1EyoNJbm5Jzap0SlFKmRld3WCqYUIDNqs0vd8JkmtBFpXFrAzKiCbutte8C+AjAoFwrzYKe3VAwELTvebjZYFSaGubgx1556dXXHvzYj3/w8U9+7OaxHgzevtDfu/vGi689ePrek//pP/8n33j7Wz/4g7/wOz6DD87P1HT64Nuf+MwX/+7f+o//zL/xf/3069/9hS+9vj168q/+b/63//L/7n/8p//4v/Yjf/2X/h//nX//3/iz/+0/8N//k//kb/k/9QcP4u69o6dLM8j9rEiktPPI/MbXvnK4faFEK77YXbaxE0UWhBlyVuyEiF2ZSetmc0ZrPWcYfF/9WtSWFdfqXc825fdfpSQzz5yVl1v0ARLuhFUo8FAWJBCUmpNUzEHY2GfvPRO4ii3vd8WRo7WDWSvuCIl9H/eASTWZhUJHBP0mgJzVayPSvW203C+q7fHVj4YBIikN3rvTKU0VW5SApVRKE0LlFpDCSovlYocUW3cONfc5hoDWG2RzzKHh7jlFAsZrTDwkVjzwZURbjtbl65KKTMxEsAiJiczgYlFfkCbNumJSqWBEbG62jCajFpDlJh8jzP3K8Mtln1ayTFg985IisrWumYmRma01XDHuzAnmHKkiGgAkI1UZ9+Tk2sYGBKzPVXtgzlmkhjGuXQ8xp0paWxhGhU+Wj/EVVgqAdrUKGmOYWSCKmMTlb3xdeEqA2fJnFkllgiUqrKwsS0GUBuWZ2BclnJz7pW+apedgAbGVwQtKLXI4E+ZEK46uIpxbXfYkaZG5jxI6bTMTZYG3ItwRUmWj1uE0mDsr6zf3eZkDbr71qc7ezFqiSZw5zCwyEwKSadY5p5iqxbgk5DKoAyw5wGxerzokzjkjmtNbq8sg67+qA5Ypb04yoc1Jpnl239rNzX6ZY0Rrts+ZI2npfqBG90aCZpSmmBmLUSKzRomte4u11yQJBIhsNMFSSVoqjU0MK32p2WryARjZ6GQzdN8iUVTeOnuGqs4Bo62aYVnUIDThQwfqXCk+kHLmBF2JOZQrmSFAdGkaLlTS+sSWCMN0GdYD7GaKjAxfGqQ1lJbO8b6JL32s5og5+sHnOb/zez6jvHv/7Sen09357pIxPvcLfoHx/M2vfvt3/c7f/32/4Esf/8Sn5+Xh7c2D5+8961tvt499vvUv/MH/1QuvfuHytH/646+/9cbPjA/44MEv+NaT/ZvvfOUhPvPKg5//c2+dseWmyHgYfY+QnEIjHd3meLbH/Prbd317LWedD85MY4AY5blpWhVQBMwJeja2mGPbtrHvzbeI4nZpRRotIBz3yyhbCRsJmq59fR3rtX03M4PRSswCofIiaObeI4Yop0lTSNp0d9poLYwEdGi5h4SZ0tBoMDc315zZ2r3DF7w1iomEXYAOi60xM8cYCBoGeQsTMJeDF5xCyg5tkXy5MqEjU24MhiFAIFNIwQSBmnH9w8VXizC3kSGEOaQYM4tKT7PICe7eGt1KF23skuWitWaJIeu0l7tM5F5wpnJeiRwcYy+CtpkRUkJR2WWa68cZkMCyGo3l1g0yibKvKRlQmrV6HNYugYZURHhbPjdRwLoBLMJJqfyqSnOVGMIdpcCvBC1cY6SmJk01Xa23vJgzTvIqfmBmarmTTVaTbwTWRUFVFrFsyfqut3jFSa/rhythMbT8jVEB3bA6kDARlGdOzaz9qtY6lzsnAKoCHrMOxVrjh2Sci2QjE0tTB2Snb90QqkoHg02oPja3ev2ETGoNEYVURXMeN98cBikuMZ4h2bopBxhU7ONZ7+VdQdJTMyPQDnvsG/uI5X1iMEA5g4jDsQGim7TcSZk552xticUyBffGZT3EnrQoqqi5N7OtNfNUVm8hmJMpJZJ0J+cK0s4sh2BpVd8CWDKzMcWEMoFicchAd074tQiskYsSLZu1nMONk0opzDTjVp68lyxHRR5QARHRlUl2UIUYcn1pNzl3xS4ZneaWidiDcIHuXSJgm3takJwHc8Ei0zgQ6KYxt7RwAjSa0aIS+5DLb+8jv67FnZv38/7cvZeL7DjdffHznzDtv+pX/BesY8Tc+uPnT975xCdf/at/9c//47/xc3/wj/2+3/Lr/ge/4pf/4PH28QfvfWPcxXd96TP/jd/zz7/06GM/8AOv/IW/9N6rn3rQbvK3/e7v+Vf/pd/33psP/unf8Eu+8RN/8Q/8gf/hpz/9hY895jvfvEt0yA0XsGX0iGCLm8N2Pp1+7o1n9O5miEjJmhuwj6A35HkOB0qCkeBIOHAol57qm+ac27btc/TeYkbxLlVIxfWRw31B55qFgWIqLTZCxIyAWZgZCSworJn5iCEtL6DWWsSsMlEPsNFJF/ZSXEro7AAgczNZ3I8LKAI4Wb2PtTOQvT8aezjPqeTsYsm2lCEzVtoHNIzbtbks0UAQVEKmWsCBlOx6UpVykhnp3mrNUB2CKpWOTRE1tSQhoZmtvagV4TyrhW88s3EJ0KrDsti2xpGkQZDCil+snDOIQ+XXZVZlCdJoylikkWqIJbkRUsnoMoNuhEnz2v/uQpgh7/2LEmCQrT4TC37YC0mtQ1dhurtbtcNS5tWw/vrJLFiy7GEiS2aKspvMnIuqeOUjVP2BHMar72d+1F5/OX2scRMAtaL7tHJd6ktBSJlyLpPjtSgQAgH55iGZoqOlsJuyty0ns+XK36wG/vpz5aLMBQHTRMgBSw2QNKbyMqzaEWrhJDlRnznAhU8oa3knA7iPkITuzXg4HLbD8/1ubmbueWjL31DpKQJoaY6NAUxaupwzZL7upASR093nbG4cl2zOqry9WQbDk4nmtrbfZpkBuENzjuxg65EwKVNjR+vn1huofe7eyqiVmcN9ibNgHPsUjEZpTwLmERGJtgjvmdD6DDOT3jOnUIt6yKeoxVfKUJRCNQNMZCn3Srx7RQ8BXrenJVNe94mupGvMvFDpK1FBxZCGydlmBgkpG9k3nzNFtJDcRpODfaKRFypMprKlpRAyYOmJKh9zPfOFO5NG2ozR+oFA8yN9y3jy3d/52jhfLnvbrJsd9t2ON8e33n731/363/i9P/A9f+Nv/blvvLd/9xe/a9vee3J+/oM/+P1/5t/8P/yVH/3Z//Av/9t/6Pf//k98/Ls+9trjv/+ffuUTr/ySz//uB0//7J9/6+vj1/+Gf/bvfeXLX/7xr+bUB3s7vBS6fHDshwiTCO6XcXn04MV333v/vVPevHjIsSsjwKnZXAJm8mA3iXslMZdbGsLQ7xcJh2Pf90vvPTWr89LqEJfoF4AQC6Sp55W6Po1JtmrZrttIVEsL+tLmlE7KRJjSYkbR5zPKSM4iMVOtuUSryNssjglbQ/HuqzCMMQC01ppo6oo5L/scYdyANPM97ghXtsxG+ETSJi1izIK6lQlFxUHOWI61RlxLbYljG6ZVixuBDE8vSn4GFYHmbN4kjZiIyjk67Hu0bgYrJ/fM2LaNmkaLhWVVq7C0uJnT4ISBdS9aay2S6xa53qwLWeYVxcJ1EV2Ph7M2VhZGUsuhiZATa+9ap1aCcZtzrwpNpy9XTc45bQm8BSAmwax0lMUzLGcuEotCDcYMKJMfDgdw0TIGoEA2K3ZsFcvVG11R8qUvB6DrPINrLNcaIMhErCRDkkRmZMobJEvl+m+0Iq6WohujwdbUQGbOFsueBKiEFxcJ0RRNamQ6RIk5Q6707ZA5R0S/uuHXmndKqIuI8IQlHJlEhjBklPU2YJfUHLNB4xQ3h9uY++Vy6RsoGzt1MOYg6elQkpRj3yfJGLPwyZpegTQk2TMHeyuxWDAV6UYJcwbQKhEyprzdx0Ye5j6Vzf2g1MwYy+kBxjIXoZIV+SJpMopTZPAIJe6jKBnF253ZJKFMKJSkyxLwhKisaAKj1zJFTEBlfZRIwi2TMjmmrFlVDiaZCgOUzFTYbvBUujyTtZgpfNdpB2+Zuc+9nCIyE4xkSmneuMSrgcwR1gSvvxVKMGGenBUer6s/hmUWc+Z6UD7SvzhpGSEw90vrx0g7HtsXvvO1MZ54O8yk8Ly3beYD58u3D47vv79/9aeeH3H85Oc+9/SD0+c+9om/9Xf+9v/yT/w7//If+Z993+ftH/1n3/rN//hvaH7z+MX9T/+p//3Tyyt/+l//Q99++o/+lf/5v/fxT3/mD//R33cZ73781Vfunmm7fXA+32U5iplHboe2/fhPvnG6bC/cbuend94o+ZyzmW1m57AMmld5QUbxBSaZMcMMkdm2LVMgY04zmPl90S9lH0CYQL/HYkrZIRLmyJBGjchmbldxNhDN+2UPMFeWbYqmVHgrzxmsLDdlJLtt5Ua2dreQ0eXKGIWU0yyFcn6XJOwxm5lFnuVMGRVTF2MDaI01r2dMZZAeKRhSacjlqyElVPCGrdGcNYODyih9qQ1lc0vpsB3mnCPgjbGolmlmamBWVO/MrOTS+q1JIrMXhEijWWFEEWMCyET54UTM1LRs7j2KxnNlIuBqEw/F6i4qySOWXXB9OJBllnW7gVMyyM1bhW0bGWkZ5WlsZj6viYNglnFmTSpkJxAZENk7IqNGnBL35Qq/5cqt9cwMJY1zH2iQ1H3DOgGrFQMEsVa1gC3gln7fRH84Exefsc5dARGusvks96taykYVfCivwL2bRRk9OIaKsNMleYPy/iIRtZAqkErPCkIsXLf6Fvo4n5JolVGFMtFPlGksuKAkkSixsEgHg0jFkDJphO3B56dLpB+PR2QeD6AJ5oLRg0YYAtGNpGWOrXelL4Zlra+4GspC7bwZWNKHGmxyoKbJxZJNSzNEjAxX8nIevdiaBo70CaHZ1GVPeivueCYbbcfsjlIkZebMebns+74HfEbOqRyzjVBZfxQ/EUalprLXx0oSvD+JgAKyGhUlD0Zqmp2VtzmlMgkiyQQMRjrgEiqKwUzOhBeD01cGdkABunfz9EiEw2DsTmQqwgiYdTYhE8wMkiW6vcL5qn9JK6A53Hto4hqnmdeFAoFkjpmH3mPqMs6vvvrSy688ePfbb5pn5wsyN1wydzPf59Pbm5e+/JWvmd986hMvInLYu3/4f/QHf/MP/abf+3v/O//P/9efCegzX/pSXE4bbv+lP/GHb1949M7PPfnqT9tv+j2/7vOf/OTP/sSb/+DLb378Oz7+qNvlg9MXvvfTbTvNeK7dhd5ae+vtb+7TLzF1tT7t/eCcYx9QnwxLsyQ4jWlGhGKodY7YF7m4uB2pQ+sjI0tWrgWAwkhB9JVxqgLWZ9E73KM4LKrjf7W8MjJjGOFuktzQzMcYMB22m/PlTuseEbT2P8gEoxGzbq+uGSLZu5Wvk2bWng0Jx6MRZzdGbmYtNRMyHap9MxutpZnlnkhzO2CxW8TKdEJkUPJ2gELIQj96hoQEYeYkA2r0YhPOGQrBtjmmE2WMNecsG7WkXG7mOQWYuysjJ5ITSGsmIWM3Q2+icmQHdFUAeUHxMaKMtIoPv9xQ6O4ec1Q9Kbwjy+ISpEBzElBj1QpSCWoSnHOnFyAWi+RRr01aessA4XNMazUxyGlAFFdxxmAJztevNVUULF93c61n6SYiMhWXmjOslqZZLg4r2rxg4dVW1zRzv/hev1BZh+Xr1G1tYqUKFL9GMzNlpuVO7+4+8+x2NHjk9JbeMtLcj9n2Agq8PmsgV35KD+YQELNsbdCdrZNrM5WZLA9a48w8ssEwYVCAFJlEQiPnzeYH94iYEQcz5zbHEPXs+fuH7dF2sJtbkuFusHpAHWYxB+AdWTpYr1HEUsiyPZEgwW2LGFZa3NZ77ykR3ay8x6M1q+/DjJGjQa13YZ+xHw9+PBycYUzD6P1m5rDW59Rmjkggt8OhdxeSibZtGgmADocn2GYOsrE7WXtmWdvO4+zGloK5syFiREBeI5UCtBlzWjrAdMmyZT4k59hgaiWrwwqzj9wz6e4ihvIa3maePOdIs5mpzGU8NO/7QhM8UznSG0OcU+S+ClC15AZPREwHEubeJc19b61JDNEyKuNJpGpiwACaQHDm2KyNywf757/w2eMR0/Ngj9P2zEuS5hgJ+MN9Xr7809/4+KuvfOoFf9Tt//YX//pPvnvz0nduv+bX/HMR3/zSz/u+v/Dn/nbe8Z/7vV984+tf27re/PYb77/zxrY9fnTziHj22uv7G9/4ey++9Hh/Nr//wadP50jd+CGxT/XxzW+9e/TX87ynvFrwVFzMbNsY4TGP2+0+qobXObett7PO7m2ONGvubpawPMWFOHhRJFlsM/my4dglEE2IqJ2HbTlzeLm00ppnTtkcu4yH0GzNhB1yt5byyz5b65ecnKP5ZuTYA/fkVk1Zg7ZLodXM2El6zjJ2wnnsrTmYm7d9P9NFt5llHhF0Gi1zcsq9ZyDCohBABvJCsjsy5eiCG5u4S4G5yZRuU1DIwI3G5RAtkyAdW4OYkZLaskDjQLrgGz2RuUPRtyPE4OzNIi4VWYRs7j73ac3LsjgKtFnYbV4yu4NwM8YU1dwUEWYbGakLaZlH2iYMOoDGSi7gFBo5F7nQEgurJRRtO6Z2ukHb2AXuNBk7GDP2NUawTGP21kFYaIBIDjaUr07vPXMiFq5RHVpxMzK1Artt7ZCV7O0g7HMG4XOgtRY52JMMqqWgZW6KzCBAMJNAOgunQwjplLCVKX1EeSJTJFoGEO5OJzJnku4NsBiJ3iOHM1xoMgx09wLVhnCzOcaestE6Kc+RbhQaKCXN6E2hiAE2Qg0I1Rs0Zirm3lFMKuTaMGeMJDbrMfF8Tb0WEYOXJE/7wVruOt04DnzYdDiPMLPD1cOZbhEBeGs9pgVjs3Ye4d5gbezpbjlza8gkJ7z2+Znb0Xebj3XDmNZz69sMTDkLO/Uz0DffmjfkSO03h5ssZ/9kbzeSunksv2NryM4meiTMfMbw3mKcXMqpy2U/n/bmEeiYmKBiv/RQOkRkRCxjr7r56ywuIpdKpHhd7BgYlli6vcJdE0AaFFGYoBFLu52Z11BNwmRMKRfAyFBAStoKMg3MnJlorSmvNJiFCgDwRIYyl9WSMalZc+XWN0oxIo2buUmZiDklt0SYcT/HZz/7EjApeD/HpPPWEDGn9xZzXC7vvfPme5/51K/3Fx49i689f/9nf/mXcBh/69j55PA0xqtvfOuHX/3Y9zrt2B7kaMz+ye/9vsc3j+72D37yp9/9js989nf9ul/69K03uz16+uwkS7JpZmZ2b29841ukLYoFSdOcpSxtzb3smyW11lIzIhR5kawXRbqM92rahnsvnLFW2coEhIK50lrb3H2MXENPbeoDvUEZMbO1ZjLvyIzIq9GClQlEK4GMQ2OMZj4yt3aYGU5GTPM2RqQqDRERSqW70wqgVzc3rOg450YhY5rYrAHICSCb+UACcGaU+lxweDETyPSWGUOyBMzRm8YK8KpADVm1rJU+cN3tL08vkmYRoy1VvUkae/TWyG7oY08zc3rEJOUNc0x4wLtx0bZBQyAD5o02l1djNqRgIC/GjfTVnavYRg2qxUNPBDANpdWj7OQ1bQgFd4CijETEWIxiJokVzc21bpU0KmAAuF+rGBsLGEPWgSpb0I+o+ovfeZXCXlMJVzOPmLN4eNduHFewC5khaI3w9dYokLZ1HyNVE4lZjuUZH4pkbq1fJwWNsbt7q/xxLhh9ZgBBeFvpLyZpR84KtyQOI3tn7GFxs890N2EKjZEk3Rxtq3l0zBkRbDQzpsqYdt2CWNZj0gpeqXmBhBTKFTZH1ueDMUboLjOEpuaEkwIjIxKtKE/1zEoqqw+aImpHqMisYRjAaSmZrQlM5WXS0dmi7b17Svsc27aRgZkO65tlOaMB1gjGmOfD4YCEO5EohVrvV2EmSmbYM+P585Pg27b5xTlgQnfPQ2vuBq4Upshh7lEhL4ZriSgWAIqokJllvIsFfa4T09Tqk0TJF6IUyDQSCkig09ajhULkhVA9ycu5oDY1gK2VyIL2rDXDlchZbLP74beCOZSsZ6PwvUyKMGsRUkx3wpiBTLQDx2TSGk0zv/MLj/dxyrENHM0TCWsHtnHZ53Z48M7bb7/5tWe/6Dd/4vah/8xX3v0n/onf9Dv+K7/7/WfPP/ddL/6L/5M//sP//rf/3T/3J1558YX33n6rWTu07Wtf/erNi597yv3m8fzrP/Kjr/7T3/nGG3n54LE0H73QRKe7Ypav9LfefLIdP26OiEUJ4KKvhJm17RCzltbMGSCseZEeJXmziLxn1/mie7LsT219TTKw2C+ZSTolo0kg4b4Bs8gzkM0Kj4+L9SPl5dwEIHNWhT1sPSJiqhJ/qtmRIqOBhpV1VTx6ZKB3ssRsq4NDTCBM7dovlLmQpiQnzC+0TWnMWvNmagJjYnb0Zkdj0hTjDGEMJN2u21pAxZrJjMwPM10/+stMc5YT5ATQ7iVOklBh0xZpQFQgSYBj7I1WaB8UFLZm+9wByByZ4IQEydRHTsumFDlpgjyL5nS1tFtGj6iZZ5XWLKNaK/CTEHWdW8C59tur/s7W2jICrN6mvlawKISVUEGZUCjr6p/uyztLwHDld18XUctrT9nLIHhJ5Ojrq4+slQO0EPNa3VcWGxarb3msA6h6KGLsw8wOh8OcMyK8bKGwiKVFcPKGiCgiqJVZjBkFpbo7XAGZcDxuQ8O8KYURsNUBkCvc0d1pjqXjJugRmTH6atUXvLzWcmDkZPVTZgkqSw1Qn/YsFH/tFzwkIr2IVJJ3b0DLDIfg5b6l+8SLuqVI63uGMk2jQQBTfeahcbfR2CRGBBlGmdBty4BkZr21jZxG6+2gZGqn6hJNAZWElApye/rsgxdeeGGMMcbwZnd3zw6tX/aVFNbMWnZz8ciWLqmp9l+xWCeLnA+WhQjJ7eoClUTUFZIEiFjbjbICcEBg0q9shSvmR1TCsU+U/fkSWZFZoffLJaPuQ1OFvsFmzDVtVSEjsuw0UxQNDgKKKcCMECzGZE4QXRiZGknCcjTpknYWH98eXn799Y/dnc7H24PyIo52eDhHRk4zP962n/r6V56M+PTnb2J/cug3igdvvWVDx7fePv/Nv/bjLz3+vk+9+vlvfOOnOm9vjvbtd77x+c+/9PWvvfnmux/8wl/8xS984sHDdvp//yf/2ZyH26N+3X/pe+/Oe87RzJDzdLp7/71hbVsU56q4EmGZEXNGkqjegSs1oubNcOCqx6huda34yimwCnpRNczMhOLz5Qp5IQG6yQ4+L4PevETgDt/axD7G6N0Bi4jWW0xFjtZaTWihMHCO6K0Qeas0lTIqWRSLaxAEjBEi66q2nBOAILhLGjmWuySVObd+U7uTyjUlF5Tc+gGEsKcSwyU3g3um8qqAW+zeethLGsqCjXV/8Ghk7z0itkMrH+Zta3NOMLeD15BEMuUZ2VovwX7rrljJXyPTCXAK7aoJSrQ0OFSu9o2eqgzuMtjDIA/kDiTRAdACFLBpmcGVx9zKsqtbrfJYgABGZjl+jcIWCqS7TmlW0pMZ0eDFelq8N2OKpiJBgKtFq+5h6p5cdN2ycqnSpKxmK928OLVrrFx3QPWtKCJOxaOFcmZwkUrlvq2LB3LSHF7ZjdcdrPCR25c8pqWyuDNYl0jp1u12e/DBs/fEU8zYDo8lj5zH7VB3ZeSIlMObWWttz8DKinFwIf5mLgNyhY9HyHFV86nkeCZCEOWVHNT7jZhAbj29pReYxM1sZpYTYJ0zl7EQuYi8DotJsgqht27ZLNS9dSecch9mDTYzejOWht9wsx0d3PPce488ayXfZd+WzHsNW1YZFCLczej28OHD8/nc2uHRo+2DZ6fCaWcTsFf2RKtL3s1yRlUQgKnQGumqzZLT1ATAA0sNhJVbKsCEaUwyUZ2TElbASbOORcgriwyZFVWODI2C2B1ZLvTXuQk0KGHGSAkR0fqKj7kOd4v7seTJlUvDlAI0yIyacxrcGzOVmmQTwDF6QxDn83h488Lrr79wGe+eL2huvT1+8v549OhR64fL5fL8bnzqU1/8i3/pX+v52uVZ3hxejT0e3Xr0988X/pE//t89bK/u49mjRw+avQCdX3jpY2z24isfvPveww+evvuLf+EPfOKT/Jk3v/zo+Pj1lz5TPWD1tq3b6XR6+oH8cCxLFlzZZrVSUHIfsTVLiaWdxoKtauWwIsEkwlsziaGEkrKyQoWaVPFUtdKoyrB0j5k59ztnhyqveW5bu1xOEQSYU2xXUsd1Ha1ARJSnv5nPGWYsUORK/rsqqFRSbDGxR7TGhMgszgBLEYqFG13fuzLqqatHngs6QOxj9M0K5YgpwDOSa6Vb/b8n5nqpWMYmCxvhfU3JukOOh6qbacbL/rxuA5ohpxISjUxmahqjd3bvl6lMEBuAMUrgOmQ7YJlO+dQAL8oDqFVCK+oQMHelaFFSesKu79AgWrmDYT1zuFrnXlvtuP6vBK6JVpnK+7e+bv1iltQByVK2UEAVbizGogRcmSMLOPJrgV2yGqiOTOkklolQ0eAlq6a+BE9SlCW4WDubVGZm1PVZX+62bQbu+746a6yw77prr0OqLkgYWOt2BdcN7Sk8f35i8+3Yb7bDB+88t76BMWKSpHvCM0a9p7rwPqJ1L/1n8XOZyGKqKIJm7t5gQPWmFQS2rreIcLbuF18O6AyxSYaZmcrVVJF0MlN7heRhfSOFz4BZygkRF+Seupnt1uDn2aZgjQYH3IxoqgghZvd+c3Mcl9Oc83g8AHOMcTweL+eRAQhwq+eoSu+ccXNzrICgKutKzTllrTbGU9k22bAMw5jJtJaczHBahpkJiyCDa6m/7/5orNCQMmmYMwixFAogkHHlVutDNwlU5ZISGVJ6uReX7rwgGPMs7UMWwQ1mVjq+xaW74oaCSCswVFr7+6mopo15APe1WxWJQ7n49808rfuLT07x2kvx2mvtZ37y7b/5177y+mvf7W3Ofczh33rr67/ol33xyZMPnr6Hlx4e/fD0+f7BtNPrrzyad+fnd0+fnPrHX/7S577w6l/54X/0vT/vO9555+sz7m5vHr708sf2c/vcxz9+Ovn7zy+7+Bt+za+xUM6bu9MzMUmbma23J0/ffu/dvX+iYkyRUigrYrEwDCfE8mMmlAVbmXdk8brclmeWWJFjAK3VlgxAa11Z6uAah6sWXOlDOZtbt5aJSLbuBu179HbDhn0/O9zdZyllraXWPA5cJwCzCmZebR0i40MNYcYAVj4C7v8Mr1S4onsbyeXrAqY0UAk1MiUTASYRfWvGNkcSOWO6O9iIjfkc5lCl5Ba7DQrK7oXNWIXWajgsqLdE25aa27bNOYnD2Gcd6sxwb5tzjHNEN+MsoWOZnBpIiSGsFiaLvZVBURAwlorTmnvprTxxUdYcOwFXNDFlA2rQ9Z7jEp2u0LiV6GMf/sPbhwW9res5NSOTi4R3nVGs3IMWSPfR2q1iZOb9hTqLgcNKbiDub5f7bw33Mm/cX5MQAuK8nqT6jr01FHHhGhpkZiAzqmmGrfzNMq5BK1PwzCw5q7KG07aAFt7t4+a2jYn5zHbEZn1rfjfjHMPM+orlW8uD0KzdVVVYrMmG9z1QXsUHdfix/AyMXHoAkDTBydy7oXU/9M19Kw4oNKQOCmZiCjCtybJbrw/nOuIUvKQwbN6g2GQtQeckL26+69gs5oR7b0dJ532/6Ub2OdR8G/vlcp6Hrd09n0ovWieQa8olmRkRreN0OrfWz/sc5zvS9/0C2ZiXvOJy5oaDmy97yjRPUr17iTS8vPPXog5axtaROadmKENzaqbC0zytiQY00BOudC5ksI6FVRReuc0Elo1YoUCBEKvfn3MvJKF+Vn0rc1bWvCKUoYjIUJZI2u4PpRcdyOCZFRvGCJWoh0mTZW5TF7SYc/+uz712bP2dd775zts/te/f/ux3vPjya/j7//BvvPPOe5/65Otvvf2VN9748o/+w594Pp/+7b/7Iz/5Uz9z8/DBl7/yYz/9Ez/7/re+fj4dvvzj33j0+PD87oOvfu0rgH7kR37U7fBX//L/53K+/Zv/yd/5yk/97M3hlf38wt0HL0YcKLjTu0l68PDhm299a4+NvUNW5exK11wjM67jmLWVZmBsWaaXonuv1HLAIvT/B7Cux1Escd2iYNfNsT7PgX3fxzwVlX6MuEdUJGUs3YT3zazJfMyZOefcianclyth/dCraS3gZOlcK5YVjZU6v95gJNIRpnSmcxAXYQdmaRiKaMch7JmZ4RkHxVEzaNMsD9ux+Q2AMZ+tB3uF0VV9t492o/cQTZEjZ8TpfJ4jT+f9fJ4Ztl9S6RmqIGMAZk1iJUTSm2j7jAV3xEU6gRdoACAa0crV1tncD1s7NDd3rTR5RcSoxqqalvWtsGC4NchKcVWZAWqQKf16K/XalNI+/M/vH8AKeq4MK0iVMH09J+veFJZkm6wZO+8/mWv51n/uyHE153Yd3C2X5RyAUtIqBdFoqOSsj7RuiggHaxeSc4GBXNvnBXxZrWiYpNz5gO1GtoU3ucMBzIgx59ZIRTOnNfgmuWCiHw69NTPQUBa5K6kcsBmKiIXjolLAFyRVh7/33rtf37+b2aH3Q/dWIj2z3vtx43FrN+3Ye++9VWRCZY2Zmbu7szVUomW5E66701iGpWwON9+HZUia0I68KPasxPiYM+eQEnPuylGInPmELje3fvugg6N1+abUhUupE/UlkqwftNLoMnvvZj5n9gLENJ2LVt/EbOYeKWK3pDWNudFiNVuslDVdM6wrjYEkiAIAClCRW1F4ExlY+YkCmAkJSl6jGCTU0QTsGqxboS4peGs59hVyTyDH2tsQBpqvkMqUIpVl6ONwWom8hWnWOoGRE+45LZmtAxEKuB/S3JL7iOenu89//tU5zsTl1//aX3F8/EJrl3b85q/4Lz/k/OTTJ++89NLhO/+xh6fz+fT2P/gnf+j1N98bd8+++eDV06c/9+J+0Xb7zs3t9jNv/djU6w8e3L75rZ/77u/5zGV/88Fr+Ln3v/bqpx989rUvvPygv/3BWw8efDzHs9baOYekOWPbtm9+8xu0LaG86vLrsi1Qxd2NJqXMRkaMcTgctt73y0UFmX1k9SqpOUFFFFotoIxixOaIsbmxcYy18nE3wCbazEvfSOM+ZgQPh+PpcmrmFXEXEdVHn/fLRtTE7Y2ZYc6IC8p9Cj6nJJg54TEmGN4YM8o6UIKBlZzOTNcgvNSamTS4mRkt7BnZlZZlv4NkSghhgE5Y5nQPMbo3324QkXmfnlx2V0bYjMv9co8lv67IHq+AwNWoziH3RviIk5X6Kel+qLi91m92e97QYoQEanVloZkpWkBJNq5CLLecU6lhnsC2JAYAYMqOsklBhwhM2gTcDLAsnzIAkGnxCwlOkOQGJDCQAruw2vy6yKuyo7bIpfAxWFI5U3Ku3y7UQjXWSInFXteKUC/86r6r4Bp44Lp3Y6ugR5ZFV9ZwDVnWNj51aIfEzExYJechIrq3+yYjpNY6cq7ytN5ykGiO53PUKymHZLtGghzaACw0/Kh5vrPczjujyc00M3IYYOVIvVa9bd/PBratqYgrAChZXHErtM6rGY7K03JBXg6peIBwlj01DCEOoSu3VK9rjiw7fgm6ZvIkiYhZFhiZ2ZpnRrhKvd9II7zMQQRzzTmtdbO2j9PW2f22Wy/jhght/QBgDnhrvXVIUtTFg6sRwIr5nmV2tHq7MfaIrDWcInPPSq2l2M1bHzNH3thBoYulwdMMtWs2JIdstugiQNtnQDBYax4xIEMsutvMAGZlG8pYTpAQuruoOaLZFpoDMxokWkwHYdhzUuhFDEvkjG3bEgoap4emmSKitZZhCXnzZhYRXGRVmWPqRKO3RmZqbBXibiBtxnlDj3l8fHN8W+fv+NRLevb+Zz79A828t3mZ/OTjX9Kd5AUjXvvYD00lIvFdZpaf5dj3+blP/lDvPvZsbZtx+txnfvG+n7ftGLvMbJ/zV//y7yG6Pvky0Z48f7bZC2Oe3PqM9rC15+O91rct8dWvvpP2sA0MerlU5QKhEVMz0hmAeaVR99vMMeYJlpGb+RaleYRlpnun2R7QUoHCYJZMXBpH6FDo2MgyafVEyOaNtX2nxaYpJ4EwzcY8dhu7Eq172+OkGJu1eZYcpUiw6wWCYjRLvbQ2YOakW4Tm3DxbQrQYeXF3F4wd0GVad9SeHaWwS0AGe2QMWGoMWNLdXc1wGafeu2ABJkQwchgoFv/HAeNUoKh1s/eNi7pDZTZrFd4LKnWGmsIVQQvBp9xtQ2EqiDn3gg5jhNNJb/AqDMtJbw5vDrWxZpXZu+eU0nuLERDMHDFnBsmmSBBKuhsyJcJcGfQi4pV9Qm2/w+qO58ykkvAKfmpigInZsUD2KWmMYaUjwg0Y5YJq1HWbHcaWBedVzTI3URGBj6xYYZmKssNcm1WZWQGnJDNkS2sToXRayWKpkSV3agTTyqZNxqlsSuasmG8zo1rVGM+ZU3JvnplMNlhMlzOzMqCS5IxBNxjOZ3Onsx120rcxJ4PbdN7gknvrh4g2Q5bRzeee2tK4zTktkDF77yhq9ojZKLDJ45SzgalORqZvPpBUenl7zZKQhQFuGxKOm/0S6mDuh2MHbAa8uB9GYLr7nvJaEhQxlzrveTx008gdm7c5LkQPOx4dl3nX9ECI254ZO0SiDyRxNxJ+82CfdLfNQcvKz260nczM8/lMVs5wWRQdU1NTp9MJvu0j51AMXcZu24FuE7Mls9yRRojmE2zN55zaS0sbstAK6u5NB9g+ZqpARy/QVnNALVQyQVkgxerNmVRE5fbiEnv1eMnMMIq9ZlQymUo2IdyYZlNJyDEwEDBBZg4jCDdAQGzdpER2qtxFaAsLVplbcXFay9nDJbW2hQ/a9sElHr6wferzr7x390HK2nG7nG9uDnJpv2Tocet5Oj1tdgintzzvQR7a9gDA6RKSIrY5NAXh5vk56a25JeeWr5OKvMiytTnnJHoqs40nl2l+nIPTTl/9+vu+vbTr3YMdpzIjmMXoaoNDUo2o5VvLslhIQTCfUgg0BwVvjDky3VAWqEEhUimDEcat6TIGZK15azbGrlBrh4DU/ZJRZD+YnSJDdPCc4YvQ1Ugat1RCYdYkzFlrumY0BXYsW9dkZMJQ7EemJlX09kPtCSIDqVZMnqVlL4w+I0cqhYrfA03wCUHpzW8VhX2bmaVEAe6Lp6UycCJBpGdmxlybMa8BfHHIctLbJglCa718V4weGAS9HGV5Pz9FHZhcSyYV+aQ+AaPf6wzLPseEmZEoHkG57C14KIbMmRmrC1Yz2wC6D8lUjzCWrV5mGTHZle5YI3+B1lx42RqbfTXyGIvAklgGgCDp5e9J9lrBa8Lc3VpZ10pZ9eiKxrD7JkRELaJzoeS2wJxUKFb6cSE/V7hAU9XwMgrHzkNGJfnRaSySl9TkBmOaRJNX4h8AxLmZA/BIAF4VeY69+cjpru3maO5Tu7T3djClo5ssNKAUqvvJuqJap3CRa89gc1IZc8JIK/dRkughsZHKCaJddcIZI2AHBHqX05w28rh1wabXmKkCKqhl9qTIg3WCimJzulWYPMj2qJncDdkMoC6kOXDe9wfHPmJu2y3pM6YHM9Nx0ei+HRwy0UWlkLxYyk1ZyjzGJIXe++XygZsZG8QReRrzbo65z94SiEvu+4j22kMp+ex0tpvt+d1zbocxLm6cBlpGpSFkRBY3oz6TANi4MW0ur4NuUABZaqQU6a4KRxBmwpzkyJFZdHRODbt+2gUKmpnJLmMkNC0buLmUCfeg+awNnhxQ6Vp7j33kApElXvMgiMw0FFHBiEqH7nNOA/epG99PE689fvz6Ky/GeT+/N9755uXxJx49fefZfD5ffrXDz0/e1wsvPH72TMSWeNbb7YzWumacbm5uM8Lm6eiWkW4ty4DQoUTig+P2wON4uH3w5Ml77r2ZjT0OL9wcH+H56dkYGfnsaz/3prVXDocWp4VyCRAcaWUdlUq6KVNgmYJmwqw7ojr9shz0dKTcXQzSixYKGESyI1LerAzAqokzQUywaxyshyq2oTZy8tYnZmvIuQu2HZpiRlwaW0IwQFbh9ualyMbK7q5VFbSs9AE1kcggWIJsgtO7K4TFq7tqJxfJgcpirACL/YOiUd4PngCKB5spyEuNfm8RXLwQo3trZrP+XvdGzpwBK7OXtZcUrAqu+1GKGSUFULMGOOFrV+gFRjNzxpUnBlsvxq4eW+vsXWEToydYizv3rYwpUVkVIXfLQAFcUi6vXRaWkGZbhaRei/s9ChA019VfE1zh0d4muIxvoYU2CADGYhwUown3LPvrXvQj/yQ5xvD2ISB/XXeVZEkUuN6sAMwrRn//N8yi6xGbh8e8enCSaFgO9Fl3HpXtGq8KAFZBScLVfFJkCkdr60m+BD0bDYQz0EzpQ7AuA8euS86ty7JnTqMEuKGIjkp430gkjQkzVRqnoFRThm8dxowgkx0NQsINBnYSJpmgaALVzFyma15FoV4Ii+tHQRitNbjLfcZkS2Mrm13Z8Na2fnMZd1K0tsEg5IjkJV584XH355ubQRG7OVvzkhpuas04S8VmnIpQ4sbteZhbdz/qYIE5EpOjm9hBN8vDobfHN48y8+Z4VPLh4eYyQ7jJxOVyAn0EQr32nMhJ5fDedylkQmb4dasJeWttKjGzVBilR4gYbCnKvDfDTBVK0IKy8iYrxD2FkMzZmtDGpMmbn8duEgJBlNmxpCzBEpscNidJc0qlUip+TRdnoZFWhh8ciVRmw4vUs/Pz8xc//doLj+dPv/1zP/1TXzvl4dE7/uZb7949u/1t/8yvdnv/nXff+9wnPvbv/V/+rU+9/gv3+eSb3/zmx1759Kuvfex8efLmm2+9+PgTv/E3ff/p9NzMYMg5W/MIEG072l/5K3/n4c3Hv/rVr/6mf+pXejtdxrOHDx/+5f/o78bl8N1f/NRnP/PK0/e/9t5b7fbhw8vde9cW1mGF+k1UQclgLZ/vAxJKeOshICBRwZhK6wWRJ6GMSBpoQro4Z1QsnPnMzJyb25YMYM5hcrhvc+50nxEAM+Rtm5ogvfs+Rm3YItKblqrPylsihXRiXlkKBJmKa5onszzH2Yr9ZoorfQqwZYAtJJaLlHFLTtpcnFiVInHECvlhQF4lw6hI2HQz5RJGGwFPIWIYrG51AzJqMGRr3spY0d0yE2RrW2ZKF9Su0Ktkj6qp9UPvy1mV8lB+VB5t1zFRghtoLZRzuXJqXUX2HGZXzQ28yTypyHkgjUwsRa4Brcqh1g7wPo16uS3WjQ1euaeg18GvJTtAq8hlSPJWwoLaGFjOogzCzSR5a3PO3vsYo7mTnDlLVlIKpgJpWrMxws3t2uBf16csn8kC9mPdeQZT5CkYRAP6XGoAM7PIpJErwN2Sa428Nu6oT7CG7GLaRDsYKzgFYutS7DFv2xENcb3GahAkmowAU9mcmTq2rkBFccScNAJsZOYsS/MQC7zOjKnZ3JxKJMxTsdGc1jYPzbJYSFwIp7mWX/Sq6U2t+SKUKyRNpTJyYJYTaGuWxpG820Pszhb7nFv2rtb8uB0yxn66+MbLTBwzocbG3jxlm58vyFyGQEZu5ExwDN8e7XtcBvcLR+T5FKddmXh2vovU88u8XEZ769vv6l6RTNv3vZXXuQUQxpKWkgowkNn1qImZcEu6wRliZp7noDHFScxMgIa0VNgxXTElgX4MxJxJ+DQGM4lGtsCGLPe84yTh5VDv3jCN5kbf7VTHZIxpbAFKs7sJNbLUE0oThQawIdk8M615ZdYfDhvJSz6DeUKf+eQrI+M0Dt/zfd//dJwf2fH48MErr33HzsuTt/KFlz/1zpM3fvWv+mUhb/34A7/0hf2S3f2887u+9CnqtZGPQhZ5UZzdsY+9+aH3w9NnT154NWb8zCe/w1qzud8auecJ/JnEI7QXzV/81je//Xz3B71pV/cbXIfwVSwoYzVHOjQPJUpQTm/sV9wg4TS6UmY+R5TYHfBaLa49XbrTrcygjFlhKUkzt45YpS+NaX09t33ytrXMnDEPzccYkWjdLa3YxEW7MAcUrBVn3PdxpizOtg7mMErpbZZxtJkZmjylisStrV8KkRmWERg0b0W0FVKcnORxNYiZM+VelrasmsgrXVeLj5Gouwe5XIYilNlaqycPQDITubKYDWVSTzOrgOQAALci/aaEGTuXA5u5PEJU3lMDVSWzJoKlZQQYhBlhjQqrn5B5/YlzlntwiYGuRbPwLAOKQZClN1YSCInelBnMqs4opLF49FdWDIiShgMS1CFmhVMkISIrv3TR2ZXM+P/p3z9s5OHLnicbi7uM6jwW2ATARbH0O4QolootwU71JBodiy8REmWeFWRMS3MhBNBsWzcTW73+6+wwEZlWaBRgIjJLhr/fHlpmnu4GacetZwrSHRzs1bNnzIAD6LaNvEhSzrLyLGbNHiFXzDnmSi9iUmkxYus2gTMBYpswWBqH0zOBcl41CGVMT9Pp7q5vm/s1dnwxvmDGnJaY6TuMERGGPF5u7da859inpbId+vbw4U2M/bAdWmu3D2/IaI6YrIm1rHKcZsIMmfXMfjrNlO7u5mXf7/Z9j7mPccmZ4n7CiHh+2vcRDcdeY3trDdLt4XZteHxT9cxl+oFOHQxufUqOVLu6pYcoGWKNaTMRapI8oUCQEvd9ArAGBSJIcEYbKNpdciITExiQX+ZE7BlMQmOMMeVBMKpX8svl4p4ZGCPc/QK/buUQxkxmjExYI8mBWRQeAsCMyE1b26zHez/4fZ985dHDn/2Jn33pwcd+6mtvfOvdJz/3tXd/229/+Wvf/Pp/8Of/3g/9sv/i4fBNnexb337vOz//j33x+77z/PztZ88Pj1/8jn08++G/8KPf+6W7b7zxtd/xO/6pfX+fOvvtpmTEuLltP/jzvx+p3o7n0zOCvbe7O/2qX/urRHv2bG432zff+lbY5XCTka2m0WpYqCTRenPnIX3fdxgbW2stA4DNOZuxyHfm5btCozLlXVLw6qFRz+FGs8NU+grH5k4PS6MdkVFdYm82M4r07e4I2/d5PG4ZMxMpHg7bPscsfXi54JnLJHFm+nILKLchiiC9QcvoN2eMK7yQBm9LXWmoDMWVoga4h4LEBoiYLHFIdsNiBx36VeWANDeFY3WOSpTpbIMEhrslVIGc3jiRIwZbs7ZFuby7Zypm9q2btSuTj0pbOnj3fZ7MHADDrfwE2CLLnMBZKhWpbDTNoWlCQmVfQ0DwmiduyjV35gUUvSMbYb6duYSUdDcgIwYg4819j0y2ygWOCMhbM12B6bZtkiIm6caGxX5buieSceWzi4CiBplCbMzsGmOSbr3IGPczins3FvaZc87yspeka+HFwuibNCqDE95ILwmhs2FxK+2qfmVmKpEs68pK32NdEhmtKuOMkoBZ7b7blsBeuBiULhRT5TSETrKhuQC5iQ5tR5xGRut93y92sIkws52TsGYG2cwhormX73JDZ5rTW1sEqmbublsHW5e0OYXYth6QE1RzuPHKFZTcnBS6m3lxaEqPKSiQj5pR6GbN82CN3G5aOxouexC4ORyIQ448z8AchJ7sz6356Xle3ZEcCTPQpqTWbGaU19NUXvbZkmMiZPu+R8y6fEfMse8jpuZuGe0//ps/U4MYInvvOaNvPue0vCW5KJELOHBjO9u5Tv+h92o26/+anSl06+6OZVhoJuPxUrFWJFs6SQMBu7nRsVyslwLArHmI3J5pssnDMJQwItnk9F2F9EWSjJkAstYCUl2/eVXqxVRjA7DHrIaUVM4ZMabzTP/SZz61xfMf+Us/EnfP3r08efbkTbb3Pv7qqzi99dZbP/HK7ZPHh/e+8Y0v3xy399972r7w2S//2I+Z57Onoj7+5S//2Dif3nn6xp53z54/v1xOZFZ8O2RtbuNZmvnzGIfDTUZc9ux2e36a8o3Jm+3mZ7/yrTaOPdDPQH9elpZFIga1cWvGfd9bs207ZOj5+RQRh8PN8dBO54lAUX1PYx4OhzEuEI2ccbXtrrZ3phn7SKXPgWYwN2tepJx9XmrlaN4anehzzNb6LsJ5EhNOEs2HmGjpAY5GK88ACbGwg0W/rSQ0SVyKNxEsVqWZLchfsxltZdZkVjdUnLzyIr9qSs1o8HBQsoQ1FJxSHhWZk+ZYy8YGoRyszBQaNGsoRMPcm7HPlfjsZqRVQhJLBiCbvF4wK8ZoxWwaZTC6W2ttzpKkDmn9OJpn3Wcqy8WeCrqq6LO80AngTAImtwTTHObOvIoyC0JxSmWYXspVu6ZrXvnpTNDdWmIqU8xiKM2M5Qyxanq736hS069r50xtWyfKvcDFRXzK9U82o7EJZfGf9d22VuGw1xdZMBOXGjnHrAjvirZPSCZaErOkFbXeNS+sqVGTQGPJ8dIhb8ZimVoQSQOJZNCaMvdwp7uXxwGNKPsK2LZfRKK1BiSSinAfm5vvs4mk+9oeSBLt0mtPbjTGAQBzUDbpW3Nma5mu837p3iQ1x+ZkJBthcbxxAJZqXcB011U7Bfc0x1aFHUXfMG8sJI1tN/i2WaNX3++MMYI3bb+7cCpPSYc57vbzcTs8GxcJ4mnmUgNsvhmIgwFphowZEVTEnsgcBshpG2Zo7KTmwGXOPSOhWT7cf+i/91tJjpHGFd6WmplZLi2xHE4tEMtTcLTqlM18zqlkRgC4YNOMOlvKsvlWJkbZZFsHEBV2Cp8jLm4ONTAzh5JegraKQZO1IwDNOGxNBOGzziJVYvrM3LZjRHSL1tq9eLVUBpJolZHjADJj6z1zNsON4wM9eqTT7/ztv/wcb8RZJ3yQuoxxcd02l/a7Jgp9t+35OB2jHbYX5jx7035JcF72ZxES/OGDlze/JXrdOud93/rxPGZpylthLNYqLmde0rY+NR/2+Jmf+umvvss7nv3uuPUAbOwrkXxcjRu3rV0uF8BofjpdzMydmRl7v1wuvR3M8fz0/KWXXnr69KlowPFyPhtb3bgpP51H8vpfhTZvBYfMuZsjzTNzjDH2OB5vzdrp7rxtxxSFkGJr/XQ6HY/HOWeMffbbuQ9DyfQ8puiN3k+XO1b0lyrIIov/GyoBrWWgtVYEjPtfUlw7lNUMamLXtEbCc6KZp+XQfuCDYljiQ4cGxJith+RQ1xIulQQ0xW6CUb33OXNGBNi3Q8Z5Drp7SXcR3szBMcuSqZpW8OqLm34FFyX13meUNfwOHM0zYrbWci72nhnImxk7XN38+kYzNHPKbVuWEApA7q4Q0NwZuUuxZMlp7n3Gc4lurQ6VkGSkZuBQ4pzWDApdlUdciQ2Lx3Lf6FDm7hGDZOa8vb3NGPOyWzeJkXWDLudIM5sjwVlkuxIeujuoDNZkvzpWLt2x8eBNzSFpDmUYTN6A1HZopvTGnNPMWjdJJWjo5lIocmvWuwthi2yhti05UklUD63NuTczg7yVV1UcDh2d+yUz57alO7sdIoZ5lkGet2O91MPWkNNp2mCFAG0dUp8KCFszRDPG3IvBdLcP4zb32UeCRPfuOPZ26IbmQR5oVfSkxfhyp1G7SlOdGbgepCSZOAiTVA5DkjZbN019cL5sbbMoHe7K3SY5dsEc5hFDSuV+3DYDtRdPOkYUz1XzMgkPu7tMkl1Sjp3kTBuJuERCk5GZ7YXG1pwPlgmce2ceM9VoxX2FVewpyoRjj1YEqRXBtcgqPjyJDoUU0AYmMCW/ypFXeM/VjURDEygFshKCLCCJ2tdcWXBYLMshTk3JanYrT4s57yReIpE7UxIDGlJtTpGNU9it0nNKhZiZkM/x/ssvv/oPfhZ7HPfL0+P2YudNt2G90mkXC6Lb4dGxme0035qj+y1JzYfKits0v3E7NnZngLG0KjZd9OoGjSptliKJkhE+fuH1H/7Rn3j77cvN7Uspf77vmUn2VFlwpNFAnQZgB0kQtgc3bJSExOFGR96WnuIGHeDLD15mtrDpvGkL1AgR4qGZdYZmDHIkN+uS6OgCtKiiM2KMcTgcMh9kZnJzBjmQMm3J8vEMyqSjp6U0GS3hwoUc+YBFBswawUqOKJprhidmRlik1M01o/Ay922ZkMSCIEYOssE45gVA7z2HIgy2YwQjI0LeQnLSAujc9zPbqP3tLMk+OC8Xd455YRH5gNqo5nnP1saITT3F3OAdjuH9GKEsUUHzOaLRLDSby4qeKM+LmYXtu0ZHoLmNcMxzzga8/OiF2Pydd9692Q4G3/fLnNOay5iZvdKehTECxhDVJOA2FdrNZVSH5sxEBFP9cDXnKGUWQc2Eu8DRjUhlmlsDI/J8KLOgSglaYTUpyT1J1Q7B3aVzZro30JWzHMFgUhqRTlwiWluWk8sPSnL3KBIkSmeXMAc9iYZoTpccJrYZkjENDaBlMxKuUiEwbb0TJymVsYrJVeHdZkZ4oTSs9WnJdfMg1YioRlO0mGg9jgfbtof7HBGBxtvDg4g4XxBSJArGf3oqimbq1KtVleY93pV5YjChmRqxTo7m3ZxzDjcjMFprwCzCqCQZQwErs1yYWcV/p3ZONLZRuIEbMigMI1MNS8ISUjPrvd9pR6QJDppZ+aqUxNrAgrvL8xKRW/fMCfoEQwmveNPhZmOKlBexeBHYQCGW66cotP2C/RIlXqApY9SV7jzXeh5lm4mKLQZiMzMvb67ioqEgz0kkkLCVNFPR4jd+uDZW1RthwXa+cQnkBADXmEq35YdSiGSlz6CA//I4c8c1lQYoM86rOxIRddlI954EeS0fWp792vrN3Sm+/saPSpx79HabO86O0shpBWnCzCHzYJncVGKGKYvzOUGyX7OqisjRAELmbCUSqQS0ehEG9yb4bP3r335yOWuP+R6AjLb2WgkAoeUHO2a/dmGlLFfU5ZSHQm+ALF+eEhaalY8fSE4s4aYVCywUhqR5ddOuljncZ0ahZ5m5JngzZpAwhqGSKEBDkhOp3j3NBTUHwJny1nxx6WrgK4KHmXk3lvKYlGjNKchk1pZuvi3KjBkpNfOC+Lqj0H3AMhTeSRUGYxQp7+buVAPZugGwRrMS04XbNEdr/ujBw8tln3O25i+//PJnHz/44Pz0bn8u4INzPrmbT05hOGz7uYjtIwPIyOEQVyCYAtJcSRUipiZiTspmNmgakHLhonj06Y9DsjRI9carPlZwvAlzTliLQhqBHEaXOWLsDa5kMvfcHxweSNJUrWEAZM5Qbn474pIeMFfA6S45Y+wBX5k+FYfMhbmv+SHvnXbqYLHXIjWLdl/RDcDWe9GBFvYvRQkucRQmihCnZeWdhNXZuRpPzGQN+Q031Xte8f6i+kQIZAAwIJYQ9mowx1I7IRNSVNUbec31LSsLUmkI33ESUWBUhQ723pVpV5qWFteJJfYBdxYudr+rpQAML1y3Wkln0ZAarJ0LdnGfuuc4AGYNQDNZ5ZpVHIxzs42pZn0DEmJjZhoSzQg00VkKzpUufWsHLH7r0pFpjUS7kRZwvyKiKXfvUfV0iRJIKeHgBSDVDFNpbKEkkpnGB5KW2cPv+pU/H0AZolbFrIYO5WRUgsQynm00g4Hu3d27eV3IQK5ahmZe8oduJvOE2lxuHzAzXf0rSDKWOcZVbbh+hYxkLYird7h6YtyjsbYsU6RyncbCCqw+v1Vr9qvditVd0pZ8O5RFDoOOx5sxxhqxc17JKgUsGGmQsVKDxCtLGNfFlCeRiLVtSjC70tQHBWb5delKpADUJYVCnA8e3xwOh/LJ8V4X3Bp+K+BRhOWOle4FlCO+SixpmZmx5oDq1DLA5oiagxjCrEW7mGGuHJgJcY9QDqRFGA5jDFOatdpMFGQfa6iv4AGtg5IxZz8RHa2Lu8baJE5hevU1RZaoz7+bzyyTrTSznNUDzgoaLQCKrB05VQSeJK5ulwW7mTUlwBGGUV/LpGULWpDQyftBUqbRPBP0Pufs/XA+3wF22I7QRvoYo7XW+q1itoohAJ3t7u58npd+vHRzFq+GkGRgjpmV+AspSptkKEuEljPTBYMK9Isxg3rkLSIyqiu1hGampO264ZaEqxUJgMlZJuQ55lpSGPeYnYvN4h/aF9ct75GX7pW1ZJCFZlJtJZt+6PFSV2c5ZVZJKlG3O+ecIoyy2oDUnwWIbKuOViOG+0cJbVOkFKvQAgWRCdnd1r9XK1hIkrcUgsvKu5arKQm+wkBs+eKWxam5e0E91TnVysbMaDtQ+/nrM13v2md1eFY4rVkVK/+o7EByr9sibYXh+L2wokpWQtfV9/pVvzV5KQ6X2aoJ9fL8YuWSLq6khKXpVTKXX14g6AYUKbk7ao8AXd+gIuHz/ifeHwYDB8yqkNrVEaKMExl1JutRLGIxgcFAzQmZvCcF5LzPUATAH/rO1xwstnG97SjxlQ523X3dfyY0qdJg4WbmrHBukXB3qNdzZ+zWyvG4P1xj4/WjZ1lHWN9WB1d9hy1OG+zA+mSZ6r3XpYIrB/j6Za8XRkK9VVtBs3o468PazFavbSYFl+aS5jkzzAyyCPXjIQMRsVlBnFdCcsWTSmrGOpioKqy68+YgKVmRtgmYpzNb9B1lI1Hr32uAvGW6bbC2zwtNJBTq7WbHudGLpFHIaVHEW/mdyUimLWir8Awkrm3IdToRrbcaV9ZRXpmolry4MjDp5pNqVoRxWatLcZ1saxVhI2YWeQ4GwEEpwGzI2BqGLITOzGQkvRc+u27Q64bQwEa7rEEEnDCzyZjQ0TYAoVkTA8mZAVlzZuaYSVMV92ad9Igh5tRsRsWs7lJWAWYeVxr86XSBG8nNmBnHQ7+7uwNQ7oyXcbkb5iFF3M1B+o33OfdT7GbeaWVorKSIZl5CofvjJF35VitFquylV8tcTDPFsh2+P4GSEDl7vy8etWmsgzGnCenO61d/vRcXp/B6fmo/kUk/AgFXaCpZ0yEA1Fb32tNwxUHlgkOt1V9lBjZkpmMjZUgAoWvrg3StSKQ6zLnCT9Kb4RrqzZVhwYBM1pplVkRUrz1wqoo2zddaPa9hPvVMGFQdoaSqIZFlph9e1Ng1B1KYVwkIVQzGanWv1LgKmDWzKk+X2Em2K+1nzbuRbrj/Hu/PpxRgl2QUBUTOq3DBtUIsCEekOaQwZwxLAkaUE/8aPljBBibUuFVm9SSV08EiRwJAGQ/Xf1gEpHpVuU4L6FB5wbIKFMUIZYMibMFxWQwuJcGAwsGpJDxR901Mfvja2jlGo1WzWVs7gBMROiOBK0usYvKAkqtfiQqAVyynkDmhjUVuyE4rs4nuMyQl83qvltm1oaqjFa6xKpEEa1b5zUxWqvK6ma9ZE4Ug4Xrx9OY1N7i7tQXhOW3HrG+6bojurX5LNiBvbQOmucDZfHM/KLG2e6yt7BpilLuZFWhQd7gZ6ebXROlyJTSzRjZwjJLzf2R7aEziQFzyrHTvBltJgDETnGkNKYARsVjpZsOiPvb1AEuEk1ZeAr6m3SJUeAJ2qX+RH+kLWAu4lgpNGX0S7pNyKbQ+0g93ZcYMOCLBq2ySWVAacCFmxMYerj2jExv7RXtRCc1MdBJWDEeSDhfolNRktaPakS2NZJO11hLh3oXqTBw0Oyyf2/vJbG7b9Rj0Cbo3l3ff5tVabjvc9N5Pl33bNmv+sRc+NcblcNz2fd+2rbUmjX2c7+7efvHB45S/f7pE6njYRpzfevJWnC7dWy/LAMWIfcY+xu7KKxZYrRCgMCjO04ARQVOjTa2xbte0q+WsrnB5ZtY5DyzNC+sRBUMzInz1KJpzroQjXB/Oa+9c/7fNZTc5YuWRFT5QfBh85FdVEFpZbqw+oKyV3U1pULlzVxRaFbWSl6mm6iJcooq7NeSHfz+vOibCvdVTL2h59d9XLlwbdpVdxPrduj/WCF54QGZGtRWGesIkGVATJJbR9HKIA2CxBlZyLJA2SXLwWOVoHelrN7Y62f+cV376NBTNyUHK7/+AFAbPTAMWn1MGZFi9tdVT1xckY84dgMlJy7SqlClZrkt9PZIyLS2rxGs45PXzMTAkZN21waqKZqZosEA2mBa8QwB5NYYg4aWZW52G23UwCIi/8PMv2XWV4Vd/ZJIp5YdWrqutFsLZQOJDo+cr3o0SJAlGZZcCvJb7wjruT6qM5NSsN3k9OMYsw8jdzBQgvHIhMvNe71GngVz4DAAvPSSgej+kCUaG9Xo4qh2oDUP9nGYHJTKzdaWm2wHyQ+/ktc231UyZ2eY3ay5zmtlymy4UayHuLqe7d0N3jXT6ioyoARNuSbSepBPe6+JAeqX/faTt0pVaalaE2jLL1PVSKSTACSvnGVRInrGi56+N1XJ+BhlQIhqKp+ieBjcRrsxMNkfxthwVU0ASFat8hTu1lhcWNHe34G7BxhbBidGbaQBo9qG5POomnntCcItEM09Nmk1l0zVouKzbaSU4VpwA675FDDDdfcxi6FqrM2YcdfyTXV5EZspIp/V9n+VXOnATEe7N0YkG24ps47g4q0F2Uoq9bXPqEpoEEFmpASSrVoYt4TSvWcEFP6Yum3nxu5yWkIzM6PI12VXS9RXiMC1cZV1XV1tHuSHS4GKaY0Q4HJJf24L7/FOtndMkWW5P9buZQTcXqkP6sJ2sRzhl1kIlFVrZWLWWQE6D6AbYcgX09dLrGamGwkBSe9pH/2aSzIp9QXdmlo5kuVJLmh9mH+Z17WWUSQMfaVfv/0JDT6iW3lVb18tYRi4pGFZUYdTBtWVtpvWBVC2LsHI5yMyM5ZsY4Tgsf/P7DKZa7mrow8UccI3/jKY6+Gu8KJI+wT3SmZDqYAhlMGklohQK/0hQK1Y87+0qoOA1CaNYZEUVq6+5Xka0qB+amvVSTZ6Zfu+Cp4qHLZwCZRa94BrYzGWwZYs7Ikn8RZ97GUsqLSeRhbtYkIBSs6I0FuKcci4g5T4Js7ZiKmTfypW8SSFMok0s+pStI7K2aE2shdXkNQitnM2xk+We7xFBX0h0dfckUd+u1qe/W6tvKCKSqHJZ0/J9H+FXoEpSZ6vQ9967cGXJq0b9yuQJAHRbFi7qZVsPIFGX36L8NKix9kYtiQYQU+jF3ueVkSIgnT2O3WnQ2hIjvRuQjps6WlUTa0oguUyZa5g1tFYYltX6sJmz7vJCqLzRE2slWVnBvjoIp9PQPaWWEAHHpjAezWxgws2dMYaXnAoy1cswWX0UzdgYOSGnmWMwGrCxDQAcBcJcy8OazJRrXgYpKwtBILJx02pjRVZLlUkcWr9WgcXsrqd628otIIFcfvRJpAYaDd38vibVLgvsBDKnXdmEoQmH3x3YprcpzPJ+gW2IFC4GRqi1rfJQ6jkc2Il0CZWVY2v5L7QGsbeBtBDcwtklyRHTUk6DYlkeuQXm/aGtAzyv7uflRJY54VxjdVpdLbrvf7n2SR4pcJfOMxrR3FvmRj/nrK/8fgKrR6Ceo4+U+/W/xUYlMgyEea4J/sPFwColi87rcxlErs6sHq7MnKrI61FzcyhFVidBOsRkkQCEMIlscV9DVT7ruViD1xd2D4IVV6T4epCgpBZrYsJ6AvlRu6HUPYpli5S3Or85J2tZgmVQL8lVCoMzlvC1LI10vwqiUronolhmwjyZJiCyiZRm5jQbyHaNPzR4LapobVI2Z0oLJEnZlTlSS7UKOcBq3osDsp6gOctadbDmPFvCxLW5U2am03YIuRy/RYsIIZny2tkgMvP/CwqxFCYmU36OAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "execution_count": 100, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "image_path = '/data/mshukor/data/coco/val2014/COCO_val2014_000000386193.jpg'\n", + "img = Image.open(image_path).convert('RGB') \n", + "image = transform(img)\n", + "image = image.to(device,non_blocking=True).unsqueeze(0)\n", + "img" + ] + }, + { + "cell_type": "code", + "execution_count": 108, + "metadata": {}, + "outputs": [], + "source": [ + "do_sample=False\n", + "num_beams=3\n", + "max_length=30" + ] + }, + { + "cell_type": "code", + "execution_count": 110, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "A clock is displayed on a shelf in a store.\n" + ] + } + ], + "source": [ + "text = [''] \n", + "text_input = tokenizer(text, padding='longest', return_tensors=\"pt\").to(device) \n", + "\n", + "with torch.autocast(device_type='cuda', dtype=torch.float16, enabled=True):\n", + "\n", + " out = model(image=image, text=text_input, mode='generate', return_dict=True, max_length=max_length, \n", + " do_sample=do_sample, num_beams=num_beams)\n", + "\n", + "out_decode = []\n", + "for i, o in enumerate(out):\n", + " res = tokenizer.decode(o)\n", + " response = res.split('')[1].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True\n", + "\n", + "print(response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## VQA" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Model" + ] + }, + { + "cell_type": "code", + "execution_count": 111, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loading ePALM ...\n", + "Loading: facebook/opt-2.7b\n", + "OPTConfig {\n", + " \"_name_or_path\": \"facebook/opt-2.7b\",\n", + " \"_remove_final_layer_norm\": false,\n", + " \"activation_dropout\": 0.0,\n", + " \"activation_function\": \"relu\",\n", + " \"architectures\": [\n", + " \"OPTForCausalLM\"\n", + " ],\n", + " \"attention_dropout\": 0.0,\n", + " \"bos_token_id\": 2,\n", + " \"do_layer_norm_before\": true,\n", + " \"dropout\": 0.1,\n", + " \"end_layer_idx\": 31,\n", + " \"eos_token_id\": 2,\n", + " \"ffn_dim\": 10240,\n", + " \"hidden_size\": 2560,\n", + " \"init_std\": 0.02,\n", + " \"layerdrop\": 0.0,\n", + " \"max_position_embeddings\": 2048,\n", + " \"model_type\": \"opt\",\n", + " \"num_attention_heads\": 32,\n", + " \"num_hidden_layers\": 32,\n", + " \"pad_token_id\": 1,\n", + " \"prefix\": \"\",\n", + " \"select_higher_step\": false,\n", + " \"start_layer_idx\": 19,\n", + " \"text_step\": 1,\n", + " \"torch_dtype\": \"float16\",\n", + " \"transformers_version\": \"4.24.0\",\n", + " \"use_cache\": false,\n", + " \"use_vis_prefix\": true,\n", + " \"vocab_size\": 50272,\n", + " \"word_embed_proj_dim\": 2560\n", + "}\n", + "\n", + "Loading: vit_base_patch16_224\n", + "Build connector: linear\n", + "done\n" + ] + } + ], + "source": [ + "os.environ['TORCH_HOME'] = '/home/mshukor/.cache/torch'\n", + "\n", + "\n", + "\n", + "config = '/home/mshukor/ep-alm/configs/image/ePALM_vqa.yaml'\n", + "config = yaml.load(open(config, 'r'), Loader=yaml.Loader)\n", + "\n", + "\n", + "text_model = 'facebook/opt-2.7b' \n", + "vision_model_name = 'vit_base_patch16_224'\n", + "\n", + "# text_model = 'facebook/opt-6.7b' \n", + "# vision_model_name = 'vit_large_patch16_224'\n", + "\n", + "\n", + "start_layer_idx = 19\n", + "end_layer_idx = 31\n", + "low_cpu = True \n", + "model = ePALM(opt_model_name=text_model, \n", + " vision_model_name=vision_model_name, \n", + " use_vis_prefix=True, \n", + " start_layer_idx=start_layer_idx, \n", + " end_layer_idx=end_layer_idx, \n", + " return_hidden_state_vision=True, \n", + " config=config,\n", + " low_cpu=low_cpu\n", + ")\n", + "print(\"done\")" + ] + }, + { + "cell_type": "code", + "execution_count": 117, + "metadata": {}, + "outputs": [], + "source": [ + "checkpoint_path = '/data/mshukor/logs/eplam/models/float32/ePALM_vqa/checkpoint_best.pth'\n", + "checkpoint = torch.load(checkpoint_path, map_location='cpu') \n", + "state_dict = checkpoint['model']\n", + "msg = model.load_state_dict(state_dict,strict=False) " + ] + }, + { + "cell_type": "code", + "execution_count": 135, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "cuda\n" + ] + } + ], + "source": [ + "device = torch.device(\"cuda\")\n", + "model.to(device)\n", + "print(device)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Tokenizer" + ] + }, + { + "cell_type": "code", + "execution_count": 116, + "metadata": {}, + "outputs": [], + "source": [ + "tokenizer = AutoTokenizer.from_pretrained(text_model, use_fast=False)\n", + "eos_token = tokenizer.eos_token\n", + "pad_token = tokenizer.pad_token\n", + "\n", + "\n", + "special_answer_token = ''\n", + "\n", + "if special_answer_token is not None:\n", + " special_tokens_dict = {'additional_special_tokens': [special_answer_token]}\n", + " tokenizer.add_special_tokens(special_tokens_dict)" + ] + }, + { + "cell_type": "code", + "execution_count": 147, + "metadata": {}, + "outputs": [], + "source": [ + "do_sample=False\n", + "num_beams=3\n", + "max_length=30" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Eval" + ] + }, + { + "cell_type": "code", + "execution_count": 123, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfQAAAF3CAIAAADckC6rAAAKMWlDQ1BJQ0MgUHJvZmlsZQAAeJydlndUU9kWh8+9N71QkhCKlNBraFICSA29SJEuKjEJEErAkAAiNkRUcERRkaYIMijggKNDkbEiioUBUbHrBBlE1HFwFBuWSWStGd+8ee/Nm98f935rn73P3Wfvfda6AJD8gwXCTFgJgAyhWBTh58WIjYtnYAcBDPAAA2wA4HCzs0IW+EYCmQJ82IxsmRP4F726DiD5+yrTP4zBAP+flLlZIjEAUJiM5/L42VwZF8k4PVecJbdPyZi2NE3OMErOIlmCMlaTc/IsW3z2mWUPOfMyhDwZy3PO4mXw5Nwn4405Er6MkWAZF+cI+LkyviZjg3RJhkDGb+SxGXxONgAoktwu5nNTZGwtY5IoMoIt43kA4EjJX/DSL1jMzxPLD8XOzFouEiSniBkmXFOGjZMTi+HPz03ni8XMMA43jSPiMdiZGVkc4XIAZs/8WRR5bRmyIjvYODk4MG0tbb4o1H9d/JuS93aWXoR/7hlEH/jD9ld+mQ0AsKZltdn6h21pFQBd6wFQu/2HzWAvAIqyvnUOfXEeunxeUsTiLGcrq9zcXEsBn2spL+jv+p8Of0NffM9Svt3v5WF485M4knQxQ143bmZ6pkTEyM7icPkM5p+H+B8H/nUeFhH8JL6IL5RFRMumTCBMlrVbyBOIBZlChkD4n5r4D8P+pNm5lona+BHQllgCpSEaQH4eACgqESAJe2Qr0O99C8ZHA/nNi9GZmJ37z4L+fVe4TP7IFiR/jmNHRDK4ElHO7Jr8WgI0IABFQAPqQBvoAxPABLbAEbgAD+ADAkEoiARxYDHgghSQAUQgFxSAtaAYlIKtYCeoBnWgETSDNnAYdIFj4DQ4By6By2AE3AFSMA6egCnwCsxAEISFyBAVUod0IEPIHLKFWJAb5AMFQxFQHJQIJUNCSAIVQOugUqgcqobqoWboW+godBq6AA1Dt6BRaBL6FXoHIzAJpsFasBFsBbNgTzgIjoQXwcnwMjgfLoK3wJVwA3wQ7oRPw5fgEVgKP4GnEYAQETqiizARFsJGQpF4JAkRIauQEqQCaUDakB6kH7mKSJGnyFsUBkVFMVBMlAvKHxWF4qKWoVahNqOqUQdQnag+1FXUKGoK9RFNRmuizdHO6AB0LDoZnYsuRlegm9Ad6LPoEfQ4+hUGg6FjjDGOGH9MHCYVswKzGbMb0445hRnGjGGmsVisOtYc64oNxXKwYmwxtgp7EHsSewU7jn2DI+J0cLY4X1w8TogrxFXgWnAncFdwE7gZvBLeEO+MD8Xz8MvxZfhGfA9+CD+OnyEoE4wJroRIQiphLaGS0EY4S7hLeEEkEvWITsRwooC4hlhJPEQ8TxwlviVRSGYkNimBJCFtIe0nnSLdIr0gk8lGZA9yPFlM3kJuJp8h3ye/UaAqWCoEKPAUVivUKHQqXFF4pohXNFT0VFysmK9YoXhEcUjxqRJeyUiJrcRRWqVUo3RU6YbStDJV2UY5VDlDebNyi/IF5UcULMWI4kPhUYoo+yhnKGNUhKpPZVO51HXURupZ6jgNQzOmBdBSaaW0b2iDtCkVioqdSrRKnkqNynEVKR2hG9ED6On0Mvph+nX6O1UtVU9Vvuom1TbVK6qv1eaoeajx1UrU2tVG1N6pM9R91NPUt6l3qd/TQGmYaYRr5Grs0Tir8XQObY7LHO6ckjmH59zWhDXNNCM0V2ju0xzQnNbS1vLTytKq0jqj9VSbru2hnaq9Q/uE9qQOVcdNR6CzQ+ekzmOGCsOTkc6oZPQxpnQ1df11Jbr1uoO6M3rGelF6hXrtevf0Cfos/ST9Hfq9+lMGOgYhBgUGrQa3DfGGLMMUw12G/YavjYyNYow2GHUZPTJWMw4wzjduNb5rQjZxN1lm0mByzRRjyjJNM91tetkMNrM3SzGrMRsyh80dzAXmu82HLdAWThZCiwaLG0wS05OZw2xljlrSLYMtCy27LJ9ZGVjFW22z6rf6aG1vnW7daH3HhmITaFNo02Pzq62ZLde2xvbaXPJc37mr53bPfW5nbse322N3055qH2K/wb7X/oODo4PIoc1h0tHAMdGx1vEGi8YKY21mnXdCO3k5rXY65vTW2cFZ7HzY+RcXpkuaS4vLo3nG8/jzGueNueq5clzrXaVuDLdEt71uUnddd457g/sDD30PnkeTx4SnqWeq50HPZ17WXiKvDq/XbGf2SvYpb8Tbz7vEe9CH4hPlU+1z31fPN9m31XfKz95vhd8pf7R/kP82/xsBWgHcgOaAqUDHwJWBfUGkoAVB1UEPgs2CRcE9IXBIYMj2kLvzDecL53eFgtCA0O2h98KMw5aFfR+OCQ8Lrwl/GGETURDRv4C6YMmClgWvIr0iyyLvRJlESaJ6oxWjE6Kbo1/HeMeUx0hjrWJXxl6K04gTxHXHY+Oj45vipxf6LNy5cDzBPqE44foi40V5iy4s1licvvj4EsUlnCVHEtGJMYktie85oZwGzvTSgKW1S6e4bO4u7hOeB28Hb5Lvyi/nTyS5JpUnPUp2Td6ePJninlKR8lTAFlQLnqf6p9alvk4LTduf9ik9Jr09A5eRmHFUSBGmCfsytTPzMoezzLOKs6TLnJftXDYlChI1ZUPZi7K7xTTZz9SAxESyXjKa45ZTk/MmNzr3SJ5ynjBvYLnZ8k3LJ/J9879egVrBXdFboFuwtmB0pefK+lXQqqWrelfrry5aPb7Gb82BtYS1aWt/KLQuLC98uS5mXU+RVtGaorH1futbixWKRcU3NrhsqNuI2ijYOLhp7qaqTR9LeCUXS61LK0rfb+ZuvviVzVeVX33akrRlsMyhbM9WzFbh1uvb3LcdKFcuzy8f2x6yvXMHY0fJjpc7l+y8UGFXUbeLsEuyS1oZXNldZVC1tep9dUr1SI1XTXutZu2m2te7ebuv7PHY01anVVda926vYO/Ner/6zgajhop9mH05+x42Rjf2f836urlJo6m06cN+4X7pgYgDfc2Ozc0tmi1lrXCrpHXyYMLBy994f9Pdxmyrb6e3lx4ChySHHn+b+O31w0GHe4+wjrR9Z/hdbQe1o6QT6lzeOdWV0iXtjusePhp4tLfHpafje8vv9x/TPVZzXOV42QnCiaITn07mn5w+lXXq6enk02O9S3rvnIk9c60vvG/wbNDZ8+d8z53p9+w/ed71/LELzheOXmRd7LrkcKlzwH6g4wf7HzoGHQY7hxyHui87Xe4Znjd84or7ldNXva+euxZw7dLI/JHh61HXb95IuCG9ybv56Fb6ree3c27P3FlzF3235J7SvYr7mvcbfjT9sV3qID0+6j068GDBgztj3LEnP2X/9H686CH5YcWEzkTzI9tHxyZ9Jy8/Xvh4/EnWk5mnxT8r/1z7zOTZd794/DIwFTs1/lz0/NOvm1+ov9j/0u5l73TY9P1XGa9mXpe8UX9z4C3rbf+7mHcTM7nvse8rP5h+6PkY9PHup4xPn34D94Tz+6TMXDkAAQAASURBVHicnP15uC3ZUR2Ir4jYOzPPOXd+c82zpCpJpQGBZiQQSAgQGDBgM+PGbmNMY7DbtN3A72eD8Qg2BgzG2GBjG8RohBASQgOaS3NVqUpVqvnVm4c7nSEz946I/mPnue8JrDbq/N736ta9756TZ+feMaxYsYJ++OtuFUhOCiBWAcJmRkTBAguY2d3dDQAREZG7ugIACYPd3YkoCLG6ERNzJnd3ESFzcgSOZiZERG4EY1N3EuaciYQgROTu6gZAApEDADlfeTsqt6EchIiSZSISCYHKrSJSyLUcX6m++lu/p7rtFSkLSwM4qIFHcYI7hNVNyICAv8Dl7ssvZszjRTr9qz/xclzYgtcGIEeTuXClblk75gAX4dj1M6YKALE7Ac7uDnOAnbOZuSvA7MJE7soww7CYRKRuZT2Z2Z3K7w7fAZW7Kguk6syh/MjMBKScYc7ORuyAwYURwR2UDeU1jZHNyJ1BEcGZes3OROZRAnJyM40QCjByIwDq5mQhsmZ3dyFmZjdzo8BMRFmyq6mqiMCcmdlBROpinolcJLqRuoF6kEYfGcHdvaySUfkU2ROUmhCTqQUGjAHO2SkSRN0J4lAzC0HMMsBlexCMmYmIhpdydzeCo2whOXimZhmuMdZWLncJZE5GDCdisBs52MiNlcDixOrupgDEAYeRGzMDbGbuXvakwpGUiEIlqgqgPB0GJe3LuzMHZiaHu7ujTymEAEBVmTmEwESmcOhy7zkzL+/cPABgUsCI2FHenUXLP3M3hYj0OQGIse7NxY1Jmd0JCiKFu/ScKhFTdXd2iBAAEenVxCDEYDZ3BQmIQQlZCMwgByBlEzp5IAOP3CnFjI7G1/H3/ui7FJkhxVb8RU7Z/8vpMyfxdsFtQyvzi+/7w5/+Z09v7wcRJFNVB4xgNtgld3d3EoY5AHY2Qps11hXnnoTNlifahw1M3h88YIMSwM5uxFKOnJgZc4CaMKBGITrXH/rk2cdOzc5PL1x3uP76L72hT0RGRAEeAIAyYIATMSd0UVfbeGibbnzWtSduODGdtWtra94lTCqWUTSrR6uXfG8fZ2+/+ZmT9a3ch+nOTnRf4VUi6mUqE6PQiI6iVYhom51IlXerHnfNO5+ukJoQVJ0mTdawc+o8ckuHOaxsdk9POTjBtBKJzJ40KEZcsQJk7m6mgNNwOWBmEJGyL92JmYXYFM6iqqoqoEDMDnIwByU1qJJlhlH56Cg7khnE7mwQJ3YmZ5CXXcE+bCMfvmBmmLsaO5EBaq5GzoSqTfOY9Oxu/64/fhMuPUQh9t6LG8OBDMoQmAPOAH+++4x53GYdhWu++A1/b+aPMUbEGSEx1WrZYTGMhausXcqLGEYEAxkAcsCVHEzE5KTOcAYV+0jMJOzCAPvyIDCIQWywPrsabFj5YuIPlsKdyuEpRg2AkfFV1+AMnAEMb0dUfj2UdQcy3NwDSwRHFoI5kwtfvexEJAwGkSMIMTnIQMYMFhiZWS7HqdxGuVsjNmLixGxEbpbNDCCmhjFSuA2vXvza4LVERBjlRdgBMDkIAphDyZ3Yyqk3yw41eFk3BwPsTtlNnYgEEojkz9gXU2KOwrUpNAMuzBEeBBQU4mAtUYxnhgUQZ7Os2d2EULk7QSXoEO6oEZGIMMjd6aonBWB4d3MzixICS2Ahh2vxKX5guMvGJiK3YrN0+UyH2GK5sLxclmHdDG5wdQO5WlbNDmVBECJy1U4kgZK7GcgMpg4gBkQETR4QmzBiD2SRvcodSEHEAJbxxmA3lSpFzGBlVoISPDgFBxp14TitQjXtTr74td8Mg3n6fM/X1ddVO1yJ1aSvtCZsP/aOP3ly56Ihtl2X3DJDGUrsEoxFiZV4iIGCFAfPzFEo52zwclKGBYc61AEnNpApzIwAdrirW4YHeLBk7Kx9cmjO2chSWrgmwEEmEgF245wNgLvCO3hHUCIiNOYjqmtjdhbP3rcJ5nWsAoLUTeRYhcCx8sBNiIcma3Ws5pbDqGJms1xVwSxzJT17JU2sJOceFAhBLYVA7pS63EgF8y71HGPZNn3fj0Yj4caUutyzMjywsSFAYlC3pDlIRYSDHXb1xcwiUiwzOQAGGO7mBCo7y2DOIIa4DrEeCTOzhOE8BD8IRdVdiVwIg/sAyvMAu7ti8MxKzuWnxQKSkivIISCWhjQb2UOf/vTD7/+jgIsGdTPzPnmr6AAjAn8+hn04q4AZJESAn/H877nlhXcptpkDseVsak5EJVKr65qZTXW5MgbTwb6bkrmAIrEQlw9eLoOTcIkuXc3VxMFAAJGDHDCH6dXLX/zTsCbk7sp8JXgpx4NpsOi6fILFKBZvIcSAlbiWidicTFXVyYyNiABzMpARe/k3y3jzyoswgxmQkkyQiIiUJIzMLOfsVsEromhO6hmUQD04mV8xW8v1cXYcpCblbzKHMxGJUPEl5R1Zyl25Bzg7SSASL3kGCQm0ZAWDI+eDDKwYZYPnnImofHx371yNAR6cCjvIFKrMoVhrQnb05aag0YstYBZilGCnLAj58jwsnx3gXlwQlzzj4AMCoMjGTgwWYhke3+Aq6CBL9uILiAglyhp2Jg4c+bBiBHcz05K0EJzRE3LZYgZaemtUjEhOUIIWV13+LAO44jCNHWUbBCSiJO5iEGOxIFoHazKEpE+ZuUvjY/HZX/BXQE6ff/BUvMif/47Z3B0caPr4h+77yAcNlYMDV8tIiIWIASFiZzIikhL0XDkFzDAdbPqBUWImIiNzqZyEiAKF4MMjY4YZyo5mImEGYFCDs5AIixA53KAZ7uTGwhOmhigKpCydUmfUThdzz25dDhxHVU1EfU5qqFbGMdaRq+SAiAhMtes6aiKA/d2d8WhEdTDSelzTqCYXz0kqMXhWcnfTubuTRZCoWxyNlRCaqu97ZnYmt9AvrO0XbByIgwkZGQkMni378mEDZTuVKKPsah42YgkSi8Gl4KArcSNAzig7VTOXYEkzsvHyjzvB6OAwl1MhGLaXE4jIqRwKwhCqlP+XkvqVo2V5Aa56pzFlovqDH/zg3gNvH3N2MoMxExO7WdlCB0frL3gRkTOLmVpiVC957Y/39UMVT8wgEfAAiywOUrhYdvPOyZyGqAc4ODPuUBykqweG2Jy8mINhVdW9mJ6yKc1yiUSIaGmqrpit4SrvaAMcMdz5QQB+VdSPkk/AmNk0qSV3VWj5LSouAQfHnACwQ0BkTs5CHFiIaIlqDE+t/AEAEnN3FCOZHcnJiJ0FcHaLniIXj7J0GACMBl9VFqDcJDngTiTL91JclbuAaQgtHcXyEw137jakLAeXuwNGnN2zIzlnsLlnR+9I5rT0B8qOpacxMycSZgaMkMurm7kpCMLMZua6hAXUynodrLO7EzE5DhYKy4hhafrt4JsMIgK5D/b7qg145SMYsIRrABt+3fngwQFQ1YKpMjMyym5wGQIyAjQpwYKQu+acmQlwdQ1VkGUWWKwnMUBuruxBVMiJnMSNvYf38JxlT1SquL536eyzX/U1VVjvkAL/f8Nkrth3HzBfC3D3Gtj+1HvefHq2NwlrZH00ieDgFNzFTAysLm7iVp5IznlweJoLSFiyOrMMzezGcGcndtVUNj4BZuV5gySUzRCEiokDQBJLKBRjLKtkZtnU3aMIcUecAMAD5cotuJGZNSTjUI249pRDCJ12o421teNbOWeJAeJgCqMqh85EobYqTbu9V7GMRiO4lUMtIlBkB4UAcWYOVLubanIKrXu9vpqytimXpCovEgcRESTv2sRRgxg1FKKLpVwTjUNAWly9GzEkhsOpLOcNS1igBG4HQcRBQEHmkeXPwgXMlYTIcmCklo+1fMHLt6ODlxrM3BJEHfYfkxOcYATyLnPo5yZMO9PF+97ydrr8IESz5+Xm0YKbYolOfH6XJxFAu8PXvfoFX/aGvdmnV1c2htuTcuYLAMoxxrIy5d6GexeGcIbnIY42wGhpFq8cfiYSJmElZFCxMoM1+HOHpZh7IjrArw5WmIgEJX5Rgx+sZMmZBlCSWQzkIGGKQlFIGEaeHGBYiUEPrJIt3UNJMgZP70vfO7gl1YMoPoSheFK+c2DUgMF6Dq9J5VG6MZEzloZ+ec5d4T6UL9iHMFiG3VL2f0ndmNlhZiVzuuKlhvDZSz5B7AcbdXm3XpNEIzI1s+xmBBe2wOaduzoCvIZXJRBkURHBVWb0AG8pecbVxp2XG/jgE9FVCUpxOYDB1N0PbvUArgGuzoc+a9MeuHUzI7AbymMFUHIqGEwjIzAHdzXKWMYH5uwQphoUiGtHNERbguklaT54WO6unDW4MRl7Fs1sxmZsxOZmldVpvPjCl387rCQuVz7s53MdGAED4FDAs4swLj/ygY996r6IKvepIjJoCSWWi1BKEH6w2cxQrDwA9UzsyxLIYJTMjB3MLKUC6CbDGWF4cBIiZTEAntWsPNYIiim7O8yMmctRNQwbQVVLaAJcwX6Z0XULV21n0xBYhFbXxtPplNWlilzTeDymCl3smo1J0zTzU5f2z148tnU4J2tzCmMh8poiEQVp+pyNWwnuFkRCl7t6vIpJlLWG6yihykxgEvXeFALkRE7DSXACiFgigpg41eUJLZ/0VYgqkZOA5cpeJHLzTFiih4CaeSmVwTr3zj3Dyh8lT+zKABGGQ3glEldoQaXJrwSQGLxIiXb8ysFgB5dMjAlZEdFplOrxkxc/9c43cb7ccFLNagoYi5XYeXly/qJXVjDXMDgL0L/ky//pyrU27bYDx1A5SN0AJxaVioSrqzf3gVkxMw5CQhAc2F8BCVHkSOYHpSEicmKDm5kRRGQo2S2RarNStMEQVpAsbR8BDB7eXYbH4UMyxFR+VBDwpePhcjwOFkRArLREEuCuRvZnPlF502Il4Twg3wXkGT5deTV2K0EVubsjOTpQ9s++yqcusMCBcyqvhwEKERAxBYKUkLl8UfGAd2dXh4GcYUTkUNCVZ31gT2FMiIQID+7EHAkRJuVOl7tcmAIVjIUDlsXDAjWZGVPFLO7wUr5jKg+uWPyDgz1kIYSCn+CKNf+s60qe6leyuquNkRUgfrD4vnwQ5dgKERF7gYlEAjOXSJ9IiIQQiSuiIZ51dxCxCBGyZRKISPas0BBYNQEwd/MhaaahlgNCEleGkhs5hAJTJFTBV1D79uXTt77s+YfX73Knitg/O2f6i12fnWZBi09JImTnPvonb97bR+NA7nqKHrUnKJEylJHJyxceuNSlASsBH2RIba/ECkyQAUnzrGXBjGDlLARxJneSgc6gFKjUVJdp0+AbytMxcKkdujZkFQDj3iVBkpAyeQfjUIE5hLBo58KEpGOR1dXVlPNkdRLr0KUFjxHqyBDdn2+trVeTSQnmLFAytYVBxIUpsHrraq7sRrEay6iWWmbdjKsYqyaltFgscjb1TEJd22ZNbJSNNXtWBsWYwb0pV5FYQRlLqG8JwbuzSSBmLtDn4GzdCcbkxQwZCOzEbpbLrhUEdjkIpsBkpM7LzNQLCGNOdhCEXp1fF+iVCra6dNSAM5O6kTOSSsWU2RYJUe754D2n73sf6X7NcMADnMxd3f48wPe/uGLJ3DQ6G5mz3PTKr/3hnfxYVUtgd0+xCiKiqjlndRMKDCFn8pIPWjEPBxFcOTlGUHhJ95ypLMJQMTYEDKf3IFkZ4v0BqvLlWukSFRF3d/ABLlOM7LK+RDQAuMPHd/eErJ5VtWD9RMQSparJmQeqkgzgGJMRQhiCYRpu1M1MLfkSQxvWa6jDZ1BHnIaw3shN4BU8DkU5GtL/A4Tal3gUPrtWPDgA/qwCox/wdsicLEMLcl9yvGJbyVWG8ICHPMdRrBiAZbSATF6SQi+ouYONJBNTBAUiIh4qdgC7hXJvHGSw2nRwVwdPqtz5MoFzPUBRSqpT/lztB/mq+GmZyQ3P6yAfKgWK8p+yLQ5wUSLwsE9KckQAOGqh+pDRwGDhgR4CGEgdmXyoTTIUyz15sEPcQGDRmiyyRXIhgEmZM1MmB3G935x56Wv+JjInmcMj0ed9vvBnPZ+7K8hqdBc+8a5HH3iolkkiZ/Gsquqm0GUZ48Ax6BXSR7HIywTCh4D9ah7aUNkeSqumbgaFKZuSZlpiAyIikYmcTBkOgTNESgoYUl7CktwTJ2IDw4nhFbwSrxkgopQ0hCqltLK2mnOuQi1VrckqYSfqPa2Mq5qjJifIZGsjpS6EqgqR68gxVFRBgnOODVg8ELOzmY0mWx7YUiLLmYzrSGYR3Kp5VGeez+cdFiyunHMFrsDojZVqrpBsSGvJllvT3NWR3NXMAGNewjLszAiE8iPmUHJJIg+BYcQO8YGQQNmQzbIqdHkIyyH3AvOVugctzfrw4HkZxzmEWIhBDleQCWpHy15T7yzKHth0L9EH/uh39PzT7NkIyXPvPQEHKcJf/GLuLEFjgldMNbvf9rzvf8azj87m2wCL1O5JrWWKMTSFLVCi8gI8C4orZIIfpNPLlyYSzj7wLga2iBmpsTkFKVH5QXg7GKpl+RTLynP57eIM+KqqsR+UxAuEVaDnZYDsgUmYQZGY1WGkqtnBLITC6ylpxGC/SsZw4M4LcB9ZiAjOhShVDBOxM7M7MQfmQBQAsIDFQAlMBXA8CA/JnG2IkIcw/0pEP5i8goCxQERKcV7VzQziHIkjQdwJZiZCzBxo2JyyrD+DFJRBWYKDsnnvSDFSpRAMBQA1y26JPAkMHTFADI+EWjgyw7zDkrdaXrZkMAWi8aucd1miAUEGrv6+D2VSv4LIlYoLAUwcSulM1YyYJQQQ6QGSTnRg0dwd5jEGBplldyUiEabyYkHds5kSWDgws5Ep+pxIKLoFzQgyEkRLYJPBIPIQavjgJkhFTdyZFK6gzMGoyhzNU+pna7ccv+WmrwABkBKZfb7n639ykQHG6fIH3/Em7dnRLeB9yCuBSZmNxDm4BJfgxIVYkYfkfohgmAxuYDAdBO9DNmwmoEpCchgLOAxVDvJAiGRMQbPDWd2YWS0RO4uZZ3cVISeHcEpJjQ6q9ebsFtzEXQk90aKhipM3Upui7VM1HtWTFYpVa7ZST8g8VDE2MYAmcYREMqpzIFSBDCKRYjACwCllJ+3ygohE6tx3IHOPYFpc3s7TadXUi9STWmq7LCBxc5rP2l7nHDwQoiNAgtQsAWwUtCIJgeuIwOYAkwSAg1Uh14KaUJnLgDVTFeLY3SMLE0WmIG6kCNxTJlYSKDRZyuRZQByYQkWxVBELp81AoAAKAKubUwZlkMEZzqQODyEECNQtK5HWgpqUeuRMEdEyaecwUSRtXE6fu/SBt/w75KeCK5NXYJgrt0tH9Wd24eeMONxrihCA4GQFtd9+6Tf8R4wf5xC5VbesnNUYkqO7VyD23jVXCPAK8IDeFqKREonKFe6EETmMs7EN4A0YIVrgnh3I7sksFxx7iMSRxYJlYq/dIjxkdREy6z2nAQQxBcwI5CyFflvKthSIIhDEmI1rDqQkUmdjZ1HPjgS0KbQae5NspCAZYMogQKikChAoiIiEFZ4ZYiLMhCi0xpi4ibvnXmG1iysW4irOLDE7CLEywUKjEaOgTDAEoypLQgmLEcRZVQETBgGVZCiym4tb6F0p9tQGZIEQY7DYVXCJxJ6CIAKcPRv1xslJSSirMo0810xBYmBuTAXZXZBVzUDOQZgYQ2VWyXMy7d0LMCXwyF5BsvmiBMVwVZ9r7L2O6sqiIimnBbkIRrmHMPdD5UOYkS2ZUKeqFChEZ3GIGYG54I1m5kkZROa1N6JSOK3SmRlyVriDklNPnJgBJpNMwdyJfew5ujuLKvVsjXDDLMIs7pJdMlEWIYIZWQ5Cpp15BpkToCru0cGqBi0VITaDR+YKJYbgkJOqq2q/Pl47v/3gF3/p33CCYl+sJpgrMFQAzFG4mtldFQOi2kE7qC7PoMExFN6zAiAF9e4uCOc++aZHTp6xICH7yAN3VYJZyBWUrc/eKxKRh2KM4MiJE9c0YUQiOHpBauDCEZQqMVaHshMSVEG1ebDMhZmD4B6zBdWg1pVcDbCUOncfkmMmgUR2JwNp4MZIx4CD1TBUXJBAGQx3Ujar+8bcVEZ1lazjEVcjllVu1kIrKaw2zKGq11PHEqpmFAQdk5vAQlsFI2EVj0FSVJMdF8acQ0Ja2WRxv0Tbj53pLnVRU7b9ToM7fDxLjL7vKWgf+5DFeQgzjUBmyHAiSIbDQ12pat+2wlFiMFAVOfWl40lIuOu6vteqajyEEr+YOzkFBCgqDwlWUBcRGXJYAD4UOujq9pxSoXAV0BIJ8mXiW3jIDC8Jpjt6p2BGFZUHVMqPzAQzd3hgvv/h09e9/203veKb4IeSt4Eio/kcNvxzYoVEMMtOziXnVOeweuzQnS98zTe+93f+x5H1OxedxZLFJVlkr6JxzoHrzNLHlpRSD6pZc4IgIzOJk4KExEEkicrbO+CFR16IMcROgJa6p8HduLTo9CIE7RkKZwqxzSk2NbOCydyNHBhyHUWhcMPI2Y0I7p4BDEhIeWIQlNVmg1Gh6g413BJPguEL77z0XolzIHNzAZmDoLYgYc+JGCTqyiGyWecWAGFRYuQMYod3EJHA87YHe90ECq6qIGnSqASnOSciDTEuupYtxNgktZhTFSdT7DV5JGSzuGgsIicCsTAJ92pOLiKM1kpHwUAuKqhIL0GyLkIUYqScAJWKkmr0iomdeGijcwoGVS2I1nK/mpkVYmboxxbmqo07UZyyC/qGqXNeMe0lUKiSaSJGVdc550hUSaXZcp/rUUwpjaKk1LLXcCzhHAPM4HBoZFKVQDPfNWikWh31SsNZNbszC1cOM1fNTgQYE1sVxYPnrE6uRhxq1fYALHKwDTgPqmhwB2HgaQ1okrkONVTm0mHnADlcQk+ezU0kwqkOdZ+NmXf2tleuP/qCF3+dAkwjZygCAjIcZuQo590IDIi5kgtx7TJUQwQgy8jiQSyU8IpMcwxgYPHAB975vjRLXIlZZhIhItXIYQFW4kIns4JNkjqzCCvUPTucvDKNDFcHcq/ukMAxMHMeCAVkWUomLcxEcAxtWuThANsSDgATCE6UU/AQSESi+0LdetfMDDIWAAO+IxLcPasBqalrIsqpC0yBbdRUSGiUmNGsrc7nqXLKiSTUImQW4FEkGimBVbNQI3HiqcPAX4DlXurgIRD08oXza5OVOlTTvX0KAseibUmCuwPcpb5nDWaZiK1gq0RgL0lwRTTvO9UuisQo7FAzZVBKIVZkrn3iyFUVVDVZWyO6ewhSLDHYVdWYpWwu/6zwOBD3rsvMnNycgGGjCR/knYWn5ESDiecC7AOFewSDs0kpRYotwQpnZ+bg0ZO/54/fcvzm25obX5HNXBr3nlAREbzsr7/IlZlZlSAAZWICAtPaC7/kH3763rctzuwL1tiS0pwZsZpIZtVdEe07MiJ4G2Otc1ZRZs6ag5QirBUExc3MlrzPsr0McIaQgYhB7nBzgpIToXJ4VgnOLMlUPTPH+V4bK+IrJQq6ilFRjlOBGIjYvYBdwVyo18yg5E4OIncYgXggGjLYHQ4YOZE0MLiXzswhsnZQZs/BYRCwkyUHiAJZLWE2N5GwQIY4mDnpiHmak0gMIRCC9ayaSCh7T/WCqfLMkEAUNXmQRkS8gk0Fo7Eaj3on6jrKAaTE1WjkaimZmTkxMdzJvCZQKai6k5eSr3OUKntvZqkFcwUBg0iqubVCxMXcERNTZsvkAvfi6EolEWZEzNpxq5kDt00dcxfcADGikXuraozAqNXVLVFlsFQ1oV0s3CnUVdfnEEI2JY5QLuRecuRsZigWjDqriYgoh1GXWkpaUdW2uRYnBlxTMpCHEKpI7jCvzBeO5CBiK02/LMFT9oOghZy5sBWg2ZbF3oPaBjtMpRSKYcuyrLuDwIi9KUO4wDVQtxzrZvfi+ed82esjHUn9nEOEJYUKV3AGSzHglCHmEIJQdjBAZJBCLarIuNIqRTDM0bNkcNcnTKJ/5kPveOzxk7GqPXAigmmMQZOyp4jIbuxMylRsKsEJC/XAMNMQgmrPJEvygtcySurGptqLSM45mdahVtUD4gZ4WSylodUAKEBqLqTc3rknH5B1B1S59yZwN3R7CQ1cBnYDHKMxg9iy17Ga7u8SNMaQM1cMF3EWJlsZT+TI0Xa/TfN5HSdwNvMupxBLFx5bShRgCnKBEWCoOISQZomTBmI2ghICuq7ru46r4BDi4CQKDZGFmJ2QaYC6yY2NU6FbAIBVVUhJNacgNTy7azG2ZOX5UxROqu7uTAwidwIJ2M09OA05WeHyDj0awkvb6sM+Iyy7N+DmyzotETmIMSCaMGK6wmgkEy11kmKzSN0Kr5fIY86zub33zb/1mu88Wo+fmxUuXiqPtHzx/6VpdxcCMcEdB8SHnBCrm77oK/+3N//8z12zcmix6Cqb7PdzDmkRtJIm5LTOuwBkZdz3bbMqWYkDUiKpKPUg4dKFFKjYJhq8rrm7sytTLiR3DHU5tyXIm107yyoIVZX7NKmCV4E9A3rwicpLmRmN2d3dMpnTsuBmZBJqVNz3OTAXxFiGG0g80CtzKZuru0PNFaWxCeJQqr00ffQtK0dmCQ7AxrFWjRVZl+La6hzcMomlqGqhqVy1qWsziw6zJCJmzhEpWZBVtSw1wRIsE4eASES6d8xX46XdM0cmCWHksXMLbYK1qbMcJTRNE0TMTNMiBspUAQaygkGXak6xVYGgqhXVcFjqiAgWRhTAZCyFwVWi3MZZBzYjiBCIAYIDiiq5VHVr1uYWYqEa56QiHZkKBXcQSYzc5bmZxyj7O4vReMyRc85VJPdcS5VStmiWMpXePHcJxEzq6pyyNTkZIUcJDDGzukolyGEOhVkURFGkC7DmHpYmmxjkLHAKsXF3UMlj3eE0VBQbv8r1w9nNzRAZOhQAfEnKARHBEDkQUT9PzMwUaqpthss+f9mXfI+5SRgrIBmQ4HaFf+xEywZH9+yRZYgTnJxZ2ZTNYE2qUsyMpvOUaTKJGZfv+cS77lHVGIKlxEiqCmJ2uFMHdS9dHnlolSBSdyEvJKic+hBNtRWucyKyRc0xhJC5g7omIzCxWUFPSs2vLJGrM8nStgEw06WV4IoQNAeYmzJzTpacNBCsiLOUrjdRM4BjjIDmLpFVUWhvb69qYhhJ6l0DRqNxmzKEZVTF8cZituthyvXga2MtJlk1B1QiEcyWQCowYuYMDyTdNLFbIIa5ptzBhSbBad71zNVi0VrKzhoOYr0Scg8XmRqCCBMs5xzAVQiUyZSlhEIkEok8a4KTVKGcJfNMVJ4nooiZJzgTgaSkiG5Dz+VBcckJQ/vJUEmTgxSxkJTAxARNECkpNqEQacgJRktcGMxgCAgo9jIzSRTc//Bj17znD+58zS3s4x4VuxKJQwk4CN7/Xwz9snQGMxAFswxoqNxzvPOuv/vw89925sMnm/FhhYVIFDXkKusiHjp0z0OfvtTvVlWMTl3XKWI5aczcq4UQVFWE2gQzQ3GxA3MZAMZBSsW5FA5LywwZde4SYyZXczf1rIc31nPXTvt+GYgRgMgiIuSouCuxiQw9Yj6QMrOXsiRT6HMqhVAzm9nQpoSBkDcYgiYqDGxUuPwi4q7CVAOL1E6aTes1pUUcjbvUky2aMJ5T1xGaehI6jabGQNWMASGq66jWeZ+JnXMgEunOs9SmBaRQh41Go/Pnz3/Hd7/g2S943oP3Pv2ff+mX1zdW8mLX2vr4xvGbnnn7bL4z6y6dOfuULlp2RKkEMktF6mfgBZYCo1lGqElSaMLKaHM2m7l2UULOiCXUKO7APbsN2hiy5CYutwEVTLxxlX3LXNe1sAlCb1mkRY2BdAinwJESXISaa2989pNPP1bVCp4nLfE+icSePXMPWCVBmIVYVXOfYhPb1G4eOeRdt799uRk3hFw5pVCllMyGk9L37u5gct8LcQh7CDBPROQmSRKMRCKM4M5gNQUow5YUHwcBzGBncuX6qt1+wH2i8SQuFoukfb0aq6pq25aZQwjPeOGth488B8Zm6kFQmXh2qyLbsvWqFHbNQQDYGHANpfYKuIsLmEGd+NOMmdClCEW//5EP/MajO58cra6QmbizZVVTV69YhEhTye9dTUAKd7DlNGIWrs0sjoNaB4o5uUA81Narq2ru6ybmucVYi2hSJXceOs98CAWIsgnIlsSE8j2Cs0iSoLEiwCRQmmuhnvkSjigqWMROZCLRVIugDTncqFeL1nZKOqkatqTarK3JWHpXiRWh4lq0dVINo5goQ5Gzhko8pYpCZyW0cA0c3Bd7c0upCeKO3GVnipGs7bmi8Wh1urNnvYdxDJ17dAQdCJ4GOJsRJAZLKiAiyWoSqaoq65KqM7uwDDm/RAf6hdYCFld3Jjd2M8siRiUeH+DLoRHJUaB5XLV+DmDZI0BXNlkRu/CSWbtnUMkaDxAVyyWuGiqUPgSmkMgxZ5VsTVN94J0fv/b6d6w/69UVGqfsMELEFdZ2QSE+F0qTC/cXhUPigQrDlcwxeuVXf9+vP/S3yW5VmVad9Gnd6PLksN9/5qk3f+ayH14P2y0W1MoaW/JBJgvZEEPU7CLCjmX5wRVEttSK0s4JpSnIidgDQxjQAM9aS5W6fOON11+8dPbRp/bVM6q1AUEHisJPUfLKWQbOCUC2pNMxeZ4xM4iJLA2KAgKQFFalD40YRDQIwYjAHIoQpMsmAncWeOa+Dqt9PyeCSOWuZk5Suy0ijfsOxh15HsXG3V1auEgwtR1VZaoDN227qKroqWf0vlT+UvVYpdOn9vu19//wofUTJzbe/uDFXdprxpt7eyfvvGPvhp2dnfl8t9OTZy8qhwTjOLdSsgEOttyQfKhK1iKSNW72u0XLg9yYmPUQVjiEneBqZF7FiJxEhCBD/ypK3FCa5jKyroyail1EwN6shUs76PteNR0ECm4WQvjBb3nhmfP28BMXt6f9vO/jqG7zPHO2AkxjyOdFBGqqWkXaW6TDh/fHkS5fvohQEwlppjDS1Bb6jTsBZOTMnHqLUVSVUBORoyOGZhjU3UViUUeILO4ujGzZll0O5REHYocGHiLHoca1bL41DSGEPrV1EBY7YOMcf5gff/T2URXdD+e4CND18TUr48lKzQWExiLLItXmVVWFKvaQi93eZdvzymJgdP18Pm/bDtOZznYXaT7tTT32yR47n+jQYc+XgwdSZ6hISCrm1IyjdTOFgynnHFjcAaIu9ROLsRL3PBrXXdfVsRqNagZuPXx8pWKdLkg8qVcxwlQtFwjuIKArGQA7KJAZmeUBMiYC3Fx75J6zhwG4IQNlCgg9WQjB3VUzgQpREFBVjTJyyyKxT712FmOIQSFOQS2lWAcEZety3wdx8wwLKLyK6JVUrow+p9RWE0nQismNQlNrn/J02ggPyjkxigTNWdu8sblu2ZFcMkuWIByElwx0A5Gbm7mLiAPgwGTe9+bJo4Q6eEqqCWIhBDOHKzwISeLMNLDfocYQTgYzqkKJEsxsQNULV1qvdJwOFxNAYHe3JcxeqrOggsYMz7Jg004kQKDkpYXEzTEwxojZnSKRqXNlSBbe99bffN11N/Nqo6gHwswQvHMhCH4Oy46SWGJYHWYpyZyQaM7YOvp1d77q9z/51o9PqqPJ83hi7zt1/sIji+tvesYNx6dTF8MYI0OtdVc6m7UAwcxsIbDDWcy42I4hVWaCs0HK3mNHCeZLYSIhh0ipn11z5PBqg8d2z28e2krac+9LpRpScgExk5ObWGm4WFY+StSmGE0Kb1VV4wHTDpCrdBpYrmr4EkANAYBGIfAg5dblIzwP4nv1BuVedJoZMaHak7gRdH3cq6dYbV682HIVMjrMoPDxeNOJu4VCwVjdn3abR47t706FJC0W40nVVJK9W11rPvWofPD+83feWnG9cWanrWda69H3f3L+nvlT461RmJA0q4GYERiVplzXpppARuTqBmcHk5ASZxIzmy5SQC0c+76PdTBrAgdXY+OyITO095DQQwfGf0naiTwQY4FqxAvY6V3X7ETUqXLE1qQxq81ziYfILeX54dX1N77/kQ98+L5mbcOAQNGmicMog9QshAoAlMwsEJeAMXf7HI4/eiGHMLc8Wcwrp1CNbRwqWORSGTUQSXYzsxhqclVXeAjM7tHViaKTmJtA1BO7CZEjC0np7iFQgUqhKLQliBbKhy1BG3YUkThRGBpRtjREXUGq3Zw+9d4dda7jGdLAoxjjk4JFFSm4MFHRdSCiUHrP2LMiWyQNlsSS58JZrJqsW5lW5okmdUN5brHpO1BKTFXqekYWkZTc4FW2Gmtt7tVdNYhIaUnNOV62UZ72VT1Ku4tx00Sxu6694eL50/snzz7n6OHNhkniPC2lTr20bxZIxkEooKg7iNVdC+g8mAUnd0VCUPI+wyi7mVALdMGgB8xUiDDzACT0ncYJEUFVLXE/SzpdcNJRopT3Vg5tee4unn50/ZrjK6PN/Vkn1tc+JqmyLTSnKCNQjdzrYhGCkHeEGtSYoJvuoV80K6uz+ZxI0rzTCaM0uwVZLGaRI/VexzpUYIaYu5ExSIiCiZCbOYlkmCWtWIRJVZNYjBFIpSm6KK/WQdzgRq7KUTQngzNzDyMRURsaB6iE8U4Ogy5bNwYKakFmQgjZEmBaCh3goW2aCa6AmAu5EltpN3EXBOTSugInoaEXw5UpMsWKkvWxivtPnfJPvfsPnvP6ayCHCuVmeQ0FpM95OZfKLrOZ90zBHQXQCAGO+OIv/fsPf+R1srPS19UoYGe/3/bj0VK9vphuN5mZeMGLauGJBeYlNZFsmZn63Ks5mTPEqJCLCVa0iqSoZ8CdAWVzpgwKHeomVhW3/eKxJ7bH4xVNgFalA6UIaKmbM4ehQC3ulg2OQnYpnDsXh5oHdjXnwp7P7m45lMiXiKA+oJHMbJnNcwhBzZwpm7IHzfl5t4xe+byvPHXq1G+99Xeuu+YZX/GaV8wXT8/crt+49tGzT/3uO9++srWR55d/6kf+zjve9laN9SRurq3eePr03oXtk3c//9r57Jwnufbo7f/xt974yhc+78jW4dR3J0899amHHxitjuu6btNe6qrL2/lFz7n1peGa226fbO9cPncWh4+vP/zUox/4xEeb0VqfMpH2PoujyC7ZjCAUAruZKZGDVPo+I6ysjHLfuVoValJy6hqJISLJINVAROSZVCZVpaqG0hgFtUREgYnX+vlOuPO248+85RnzC1sSe2rmJ0/qfU/eG2uGaBCPIWhvpHbi6KGKdGNlxE0NsKtl7utRbLtO4CGYDg3OTmRZe1cjqZv6smdN042bj93wRa+8IYTqwYcvPTV9pGvVkUXInTQ7waUKgLJkjrDcu7t5hqIOVaapG4VQFcYBB1aFiFEulflSHXUyH/qKC9ferPCsiIiFiYLQkE/nnKvA7saOQIkCraw0C6yuukXd1kmTUhyFDXelGJwDmXFWdlBkBCHioIoSbVZE4MopInTWtVlXhNZgRgwhodnFLo6IQWDhWDVmmR1NFYlUmBVO5GyIkVOfBQ5CRVPiSKQrTdXEvLU6Or4aznz60k5Y2Zm3G+O673Ooxl3Xj0WyKkyXjP5l54e7q2mmogpO8CVMzcxiYsbcm6u5ZxcKpBZUeyc3JXNmFglEoppVvR41KRszQ21UTR57+IlT5544tHJYtFu76Zo4lqcfenIlLMaTOLU0bfdGm7GbdyKVWs9jz13iLubEbEhd79ynPKcgfU9tawINhH7REklSBBqnnNu2bVOvnWpfzS5PVypmDjD0EiwGAmn2rOJJKJGrD5SmwjknrmAs7k0IDFjOBcrtfdHzAmLOpKpmCEwEjQxGkmxsXlT9AGMQcyDlUDNgy/PDHITcKCURlhAkxCCx1CjcBw1IAEFIKECDZ3F18qGplbw0eDhB3RLc3Kbq8+wg15x4VI3u+eAHL9//JsdcHB06UOvIIFXjq+RT/txFg5YFwExFY4QBsCiMzU2au175hr/e5icbyDTZodVxJXubwU9MjlVqFXtGHcUmIdSIDVXikTwwoiNIGFdMIlKEmoRk6NUEE2ewIbgHZDehgITNZiWuxSS5RZqlzkIloTF3R1Ly0pOgOiCG2VLmpAZXBHATKigDxCwxRngSNsupCsSWhUwpc8UwI4Kxk7CZQQc+sqILFatmgkfy6BYor63Ed9/zqZc+W77ri8cXH0tf+6XPv/Pw5V//vd9/z3vfcWTl3mtwKs1cap3uWHvuI/c//LH7PvPA/R99z/e/4fovv+vs3/mGF5z82Hs++qcf+fDHP/zyLzgzfeLij3zb3c8fPfrdX3Ht17/g2IXtPR6JpWrhc7A/8MjDP/4Df/k5Jy7e/+Hf+vRDHz955o++6mVznj7F68fj7MTm2rPX19eDrF7cW+lmKxwoNg0wHsUT1WRztyX0KzfdePtWWL90WePoSBwd2985Ua9fP66OTmf1qfPccT1KGz3WZrPVqr62Irl4caGyuVbfeGFPp12UatXIunbU5VUfV08/flqm9//yj3/BM47Ndx595z/6W8d/7K98Td5eqKxB18+eWqFuQ3izRrjv/p29/ni7vX5+v1VywebFMzGnTdIb987z7sxJJFay8BxrroUrxO29dE284Y3/9If/yd/6grq/99yTb//JH7jhtXfffOnC6enu2rnLa5a3WDBS9Sj7e+3elHcuVvt9DmgCr7iEpy/v7exOgl57+qxyrnb38+zcCueq78PlXVZNbGpOOWcI+pgBZFBpoQKzMTk7kZNb0pxNs6lEKfoExtybAZ66ELp21rX7vtLOKfey39pe9raHznM/6/o+d2opube+P22ni9QmTj2nlrq5dq3Oc56lnpWmnc6Yeu8XxPscWaiaxBATIzILCRCbTISIGD1KoOwVheg+Fg1EAd4qLbLnnNx43ldg3Z1dvpzHxDyuK3DlKtL1KxV3ZgF1ZCGCsxFn5J5ScrXkYPOKJbK4kxIbC4gERFAOY6KJiDZxRbNr6huvmQMym3IdqypEy8nVIjcZxKZqwKjyDjsPp6c+3N3/nic+9cFz9/zJxx578uzH3vuZB979xH0fevBTTz35wCceefiJBx75+EOffvcn7n3gwc88+smLTz6x/dCF04982rDYzZcubT916anHtN0z759++pS6L3Z2u9ZTbtEn0l5zv9i/qH3KKZlZk9Fs0zDwgZYe6goUDjvoqKZhbkRW1c5JigwrUFgxKDHAwFxcap5gqJc7k7uzsRdIfBklD9rbBeh0V1V2YhJSL9M1bHgJB8PhbOxaGj5YhAAxMwzVmlLgdy7BCAmAwHGQOctKQXKfMqV3v/2db7juLj30vJFNFpZH0pqui1g2l/81cebPRPQNCMA0YO2Zd3///Xf/5pl7do8cOYTQplyBdH2TJhdlsZMpUOCQTc2RzQAuzUE+TAUAhMvClPJy4fYHW3byEykb2DSgQ4ZmKEWriIpwSCIwvCqJl9PAYceyQaA0lAwtkOVvR2nkjjFKCGamTgEC967PdazUTU1hLkONGhkmEl0BmNugmgBzM6SOd3f4zKUMztccWhlLe2jjJWnz+r/z0+88vEVHrjvW7/VrR+MP/du3rR47rjvSnZsuRv4ffvcDP//PX61b159fbKbzF97wfW+31fSOe977O+/62F+/6Zr33ffeldVVaWdeu/SjGv7U5amspoef9v/2B7G6xfvpyv6/ef9Ks3LDePbz//g7PvHxD/2rt3z8jmPH/sH//kO//Zvv/PUP/f7a4eqXf+Q7fv+9f/gzv/rwd37517/k+Wu//a73ftVrr73r1rv/wY//3Dd90xd/40u/5Rfe8sv3PHDy27/qxV9wy0v/0S//1zP55OtecMPf/tpv+tGf+7UPPXDxB771WyzvnT/zwA99++vuuWf+a+/8HyeOH5paTvCK08w3/vsfPv4Pvo/f8t4H3v3e7a31e37p5370v/y3X7pY6Vd90Stef/er3/ieP37o3Ce/9Lm3/+WX3Xjv4w/edCh96jN3fvLkyRfeFV5516s/8tSj+/uPv+LZr//Ifd1vvfteGp1fYZknG1fNiNFtV2/8+R9+7DPv/6s/+Aty7ZHzly9u53c945k3PPP2677tDa9ouuPv+PRH3/exJ/uw0p3ef+6dL3z+zeON4/LWd24/de5JW9n3+eb3fc2XbG1MF/NTWxtf9La3f/QLX3rn5uTYu+5//+lL55971x0PnTn7yLknDzWThaAXXcsrXe6osJ3MysQFJ2Q3MUBCjKyqOauIFJyBiDp2QmmahcEY7DAGU0KfNTMMlNwqgAy95r6QBcr+L9aGijilERxgYspqTu5mbEE8EecYmFxM1foujGpW7jO6rgPEC+2FGiNn1NEyhCVYlzXnXsK4qThYUsi83/faOFTEVZeSCOW8cBNmDiQOdyklBoibHwiOkjIwdE8DtVHIHjDYwZ5yz9RHoladvI5RRPq+TymJRIMGQzCIwxUGtS67uQVM98maqp7VF884aU5xZ3P9xk88cvbUZ84dTRtRD20/vedx/8jq7Aif6wyXz88u+4XJ0XF7sj+uG1j044TpzuW9xYJQdYtOVlfaRT+9gO3d+UT3xtWR/ct9m/rQR6YltbMwiQ7QZxFaqggWeXstTQ9WYD9Ckdcgd3YOFIoPOIBuaWArlobpgVZ2BdqWUgOVA0SzhM6gYWjHgNHjSqV0kKIcxOdQ6lHLFxzUX0sIPxi2BM/m2QpsqqqR44XTFz/6tt+UfDKbRpbWnLU3z/L5S5WaOSiRr3lW5a2Xf8W/4vXHA2w27SaTVSLqu1m32BdBJcF7QNi5QExeVHDLRxWlmL1SBDNRDWZBUSmMzAKUDZEkBCZit7To2p6yeuY+IyVFspgcPXWlF4yuov0M5b/y9ZIAQ0t6QIy1GbIaQCIhmZWDPPTyiAydw2aD/rC5qsoygS3ykKpec7j1xqPntrfDGA+e/KiMn/iVH7vtvn/2xb/+I39PpCJa1HXqORy95sim+aXpye/9ztc88vFPvf+T8wefuuevffHkbT952wd/4Tu+63VfpTz6939w+vG+/he/9q4/fag/tL6mNg7WGM3bsT9w6uz2/uXXvoo/+kc/8Krb7xiPV05epHsvXvrgg2c3V+e/9ft/+si59NGHnp607/qR777xaLN++uGL937kwY9/YPfE6vo3f+21P/Yz//FDj8/+/X/7+D0feeTQ4aPvf/9jN1x38q2//eFHzvb33veJv/qV1d/6yjv68/HjH7/04KnPvPNPH/7Lr33drdf4L/zX3/v9D+39m597y/E1WeOjmoP4jJM2HKheHD5xKLe47drD3/nXvvrv/tC3fd8P/eN7Ltvly+GJe+/9ojvpD9/0hx+6/9z9H3v49S899Av//n/8yM+99Rf/ydfdsdH9/C+97zUvPfLYxz/662++9xd+9Zd+9Htu+jt/6QUXzvZxtBXhwfP5y7tfeMe1123qD/+L365uvX1t02657cinL5x5/yc++eGPnLymxhaf/Z3f+uP1zesvnL30/X/5a/7t337DG9/2B299+4f/yffe+dzrrz336f6//8R3vOyO9F//x5t+9j99YH1j+6GT98Xu3GvuOv77v/LRV951y0uePT516lOxkl72Y9Sx8dynGGcWHEiNllIzfCC+aC7tIyUvH/LColfARFIkntXFOCJ0TD0ABKGKlbJRR7zPZi7moi5OAg5E5K6qCg8EIRLiYABzCCIxCDzGGKtRVTW1iIRKJ2OvInug2HisuKosBiXyWBkLZEyxIoFX4JVYd3M7c343UZhNu+QEF6KgJuax9A05CTmziXgwhzLAHofdreapiEcPYsfkHqCkpWuC2BkFSzCzVEUOofi/vDRZhZ0mxgIJmUKGZEjObD5rtup+sUPz3nvODst24XJ3rs+XL+3vX572O7s4ZXsP96cf2bvwlD/5kQuPP37h8SfPnnns3Pnzl8/sTp986PQTn3jy3KkLl06dP/mZ02cuXFzkdndnOuv6FtNkuru/t1ikxaK7Anz7VfIXtLwwNEAIY5AuYR8ktmloOl0a8WVZcmnWMYgLCg9y4RDBIBgLJkKEFQVa4yXc6U62FKUr1qoMUWCnQQNSijTHgBV46U9cankPwWrp2XGG81C0KS0tilE9vv/j953/yB+HcEktBR6j6g/QmP85LPM5LhZyKBmYkQ3Hrn3Ns1/+pXv96Vo2xk2dWtU0DqGKASmlUuckyIGmCg0q3lSkgJUtkxbyrzIyw1kgwSUQB3DJecSzKmUSJqpB4qQU1APA1VCMpcJnXnK7ARIuupK+HPpThIVp+VlTqbkzSQjMrJqIXIRESD2r5xLsH/gG5gMRL9Zsx5q142sr41CvHeFf+5NHvun/964v+ev/+V/9+r9+9cvxL77/DZefvsAyGqWwaGU6RjXH177qRf/tNz80uTl/3798y9f/6Fu++X//uQce/U8//j0v/euvetHpS080oTnb53696b3dC+aM/Yr/02/8wemnpp0f+bbv+5Pf+71/94t//5m/8INfcXh0ouLJ1oT3pmMfTw6tVOPm8JveNfud99zzz/7e96ZzKa3d8NATJ//Wt3/1e99+ep+qFz//2Ne+/vY3fPmrv/Y1r+vm3YXLE2GujlQ82vyhf/3mV3zx9V///GeemXeX9rYqpb/yhlf901962/E7Trz2y+546Stf+NIXv+i5d2zu7E6reCQQktt0lq5Z2zoU+a5rasmP/Off/JkvfOUdP/S1/1u6jMtx/Jkz+x1P1tcnU58D/O1f9SW/8k/+/m++8a0ffPCR5vDamYvdhR2aHNl67yPjn/rFd37rV7/gWGzn08tNrAxuLZp6XkW9+cYTab5DmRSV+ng7jXhETzyyf2p7d7LV7LWLW9a3vvc7X/M9/8c/7NaPf+Dxxb/95d/+2R/96m/+sttvOn78b/79X9lOI7pm/C9/9WMXEtqZPXHx/LOec931R6/7G//sd/L4RGhGLVWeUCWCjSTxUuhmSbhyU0c2GDylZArm4SAXps2gI1SkhgsHQ93VOYOMFKTwzj2b55zZOFtR7xwGSCX1PqNXMxv00dRNzcAUQohRZr0loszWmvbmUjlCMnjSPsYIw7iRumFVHTeRqa/g2i/UtYfN+tR2tmjBzptbk5WVkZmRG0tiSUwHQptwJy2E0MK9Zji5FvnCQh2GFyVjD+gtQa6cAs0Ey0zaNBWAMitDRIZJYZbVrTBrncpKkiF0TPVkpZ+2nIEEHof9+ba3bcu9d9RJe36ysyDLM13sUX+5t1kz7fj8Y+dlZqradf7ph05uX9BLp6fbJ+c7p9Llvfmi37988UKbeou+6Nq93WnX9trZFXm5gxCvOGSoDdJWy9FLhfJPQJE8BEDCEgMFUVxtFq9MISi10AOjP9hQumJJS6xdDHv526BOttTyBYwKBCgSRaRIAjmVCa6l1RNX/foVu+ksEC4aTNmUg+RsbdvD6/e+/U/70/dEaS3PEpi99aFJ/fOy7+aoSaDOEZngX/jFP7F56zj3O4KuCkLK8MpcjEAx2DBtgtzJFMv/XZaLixriIBnozhSUJZMYm8EMGWSBtQoRRW/Ug1MAsZsYQuH8D/6UluqDMmgYMA2UiJL1lHknKPwlJ4YIF75X9kEP3fKgeCWxKrgZByHhrFqqEwaHcHZb3ew9doE6dHJ4pb7mmsObd93wf//7T9//ULph0q9wUGdlrIr33f7tR264647rHr90cnVj9fDWketuv/HRRfV3f/ocH6LNyr1zCZtNoDotpKlHbbsSk/d+z0MX58mPNZN4Aj/+Zv/yv/kLG+HpX/vJbztar+9fTKbp5vXq9IXdaw43191+/O/8/9/1Na898oqX3HBoc0yz9oZrt26+fjPP+8888PTv/trH77yNP/7ht13aP9tMqpW1Zv/S/q033XLm3OKf//zv/vS//EvPGB1+1vo4Or/g1hO3Htrcntnv/tEHT336qS+6s3vw4w83aytpNoMEBGl9EcK0GYWTJ/v/9oef+Sf/4fSZUw/94//jBbds6qmz5zc21rY2uO18vhcr4fsf+vBHP/zm59/5gvWVI73thYbvvuXabnd+7MTmR5/eWTnEN240bbI51Hw0Wh09eOrJhZ+/fetGSR01KznRCJxJrcPGNWsbkxNtl+ez9rpjTTdfnJzVR6u1m69r7j05XcvhVXcfme7tr990i62M1urVXef9WerQt3bx3/zjb/+lf/cHR+64ZTJibbV2ZPJLBI6ptKgsY/bPHmYESAzF/oYQnCnnDHAiz/DEyEIqlBk9rLVMpmX/WPbgJAboMN6rEL6yacqah2AhhBglUIkfQa5QwAgZ5KqpT21OvSmxB1UQmpzItEKuy3yCnMido7gq2JkQSGJVB4oVJIzWRrNZ9/TpMzEES9nMYghkLiLRYE4Z7hh6OsnRmwoJHUyVcKLlsRUDm1ZcRiK7QducNVBVD4Nw/Sr5VXclVXFjdTGIWzQrwvkg4SBKAomULdaNqYSOBJTFFnXajm1yIvc29GzTKeYMG82qXsO+e6WNZe47Tfsh7xn3TJC27/e2251Zu7u/37d935t2jt6WZn2IyMq44aIGHoYb/SzpXQpOkYkduU85ZxGRGA6mEBNdZUOu8hlFNrawrEq0vuQOD8jMYC7ZrUgOLZ3EwMrCFVShAAslb4hL1ebBZwiWIy9glou0fwH0NXuMUbMxhwsX23ve8iZuP8MhkSV1B9KB/uVf2L5bn82H9qM5NI7W73zZV37LZGWvCqgrdkwLehSrKqVU8huiYTF9OZiwNLAU2d6DqUYMdyIbqrdLvMWK8HjIlrIvVLKxZGUzA+UDz3Qg4zncpZkX+U0uU6/9aglydw1hWC7VNEiiF3jUmUiu9pkAlr1NoThi1fzEGe76Wqmbsz15mTpdnH7y3Iue/UUve/GJt//JH53dGc8XEyVwtAtnpi+463buNXRNu+/nzk41896lva95zatW2v6d975n89Bh4kueuxGt7eepxqat6rquN+u13Z1z66s4JJsrG6MHTtVvfu+lZxw/9o1fNEkLaL11cV/qjfHO+fnOpdOs+KF/8FM/95PfeLxnNzx4/0df/8pnyk6zLzGt+5lTerGtrNryTnrvROr509vjOPrvv3Puwc/86U/80FftX35slvTi/gOv/cI79h643Ew2z0737vuMX2x1JVSjYFCWTupqfdHlznVKLU5UzQ3rf/KmTjaxeajJ2xfrPksfpoS1CavhE4/O3vyRR29+1vpNx1d9Buzt3nrdSqLt7ZOnXnbbC+aX8MSli2E1RifrbX0z3H+S/+sfvf27v+uFPMdsjkYWp5449/Kbn3fLsUOX5xev39ykgM2VZrFY9PqZ1770lpNnLsxnKrrZj/hnfvFPNrZmd59Y3T7fm3Q+mx09cbhNrOf04x/7vX//M39z/WIznS3CiCkzM8bRKwTuFVxa3mUpWABmiAxTJENkIkopqaqBs5mQEPEwzdidC0GZ3AM7exkYWZNUEgAoDzN+nX0wBYNoNfWWcs5MLkIhhBBCVcW6qfIsNTJppEaXpdeNerw5HudFFwPDPEQ31aS5qgVmG6tNHIVmJEwW4BXJ/t78ibOXdmazC2cXVb3CTONRI1xry00Ya05CzMWXkJVpMWXqC9TYAS8tXjgQFnZiEo4xoEz3VUOiUQxMoW37wfO5m1kokwaECwO0SKMbmTOM0NGcm4zcq+Yc+7CxNt1N46lJtb5bTS+HBed+NVeU2FKPxIvR3kJ3pfNqUmOF5/N2Mc1VrpDF1TR3oSbTGjZepLy/v99mhVPuTFtldzVoGZg5mG/iq8woyvSDYrJFiJbznN3JrFRXPDAVgY4yki0MUxdKuzHoz1+DFQItpyHDHOQOK1RbcsjS25dY3rLCiEHCxERuSupCV7zOVTdchGeN4FGCMGBkKZMjCOfcjprq0/c/8ci73xz0QqAOoYEnd8Pno0btFupAJBnu8DWlFoSto195/NprpIqzxXQ0qjfWVwE3zbRsx2IMEy2G8Jkpe87kSpwxJLO5JIZCGVYQFTawAapkZpIRovNIOWpQDwoW9/pg1h0XT7BMjIZihln5Pi/LFaoWQhCJqqopi0iUqigMEyAUSjY0CH+ijLEtsUBRmIBrDoyved1dW3W69mh7LE/++Q/8zWvHG6+69a6f+ZEv+9lf/Mmf+E9P/+xPf+Mbf+zr0uVz8yqtIn7rNz17ZXz+7O6lFz/7xd/zV76yw/a3fs2rvudbbvyr3/+Dn9qvsF5PFxrrwwtKYw4iYrnr+260klAjIX/rN7zs5Xec+M6vf+nXvf5Fpxcfuue+Tx+9hp/7LBw5knTa3X7n8de9/MtuvPuan/rtJz704Q988Ssb28R/+PWPffDBP/y3P/XyL731rpvXb9g61PX93jNuue6O29eJfZL9ptuv/eJX3C3XhW/6oTc973l7m5vrGONH//mvfvmrx9/3Xa+5bu34kc3JtdfEqqLdbj6NSrSQMEt7s+sP3zpan3StX1fdedfKDT/8w9/4xv/0G++7/+y1N959/Iatqkm8kN5mh9bXbr7p+lOP7T99+f7n33mn74bV69fm24uwf/Qvfckr/9Zfe/XP/8pvXDYeU10pFjxPUzpx84l/8FPvkNWH/sXf/4aVtLIxuv2uO+/4wW/5ip2zF9tFuu0Zh+fn8lc+7zUvfsHr/tK3/Ksf/+HXPevo+qqu/4sf+65/+fO/8cBp+1f/8Td+7v9+/R0htNtrdxw68oN/9VsevO+pV7/uy//Pn7z//fe+8bf+zUvvWLuju2ges3BNaTHPoa8mJQbKOR9UswYfDzNN5YgdYO4FT6QMV7NspFZsIpmzUs7Wk/WCVKb4ununBy9Y8kUjS5a6nIiFiKrAkRjmqU0ppZwzc5hNU7/Qyaie1PV8NnVLji7A6qDjMVJvOWFlNQa2pqpHSA0Zi7WcZ6kzquu4hhyMQZGzL1KeJW9j5aDslMukxgAnqLGXOZwRgkGfarAnB/Pli/JjCIGNy4hjyx6FUi6FqyHYF0JRXVdhZ8lF8hCuyOqW3MgnXK+jqy0xrVXNkY0LT57dIEfbZ8mpsbo3TqTM4zxZVH5u9YzGuSaDOXnKpkgubqYpkyVkBLEc533Xyz5xTp6kqbJ5l5R+8tueUx4YM5djHAqHh4cUA8vszEvOrgM6v2SJKjNH4Zx1qXupIkXhmt2WkyhkmM9X3oLIDUTOZQaO0VIm1DVydSU2BqzIJLKHHAvfA1JCyOEnFJiWE4h8UNBmYMAhSuNfUXtoU09ELmkcKgkrkPxVf+Xr1u76hs7GDWVDDTARw8swuf/lZWZMnCmHHFQdtUrv+z/wXc+cVcfZ+/EoPn52+/zevLUADdk7dzewEw/STcVJ5EQcyrBQmKMMRCUJHFPXV2UgiCkxZ1OSSMk4cNE5chtmX3lWjpJNi9sofAZmtjzALMs5NUPFIsaoSX1QhcDBYSYis0w+qD64cEopkrhaIfgEjp7NzCAEMhG645oV37m0y/3mZPK6V7zo13//I7fcfuLS00+8+9HF1ur4eTcc9rx971ParcTRLLzijrDO7X9//+Xv+rZXrqf52z768F3P2rrnntOPbdebR8ZoJQZb+JSlDim4q4WcOBLUp/0v/dg3P/7YyT9930duecahMxcWn/jUkxdp5UU33nzLibxzYefDpxdf+sJnbNneJy5NP/k0tp8++Y/++ov/3X//5Ln6WHvqiefftvHs26/rRC+duvDUxfSs517zghPNH334vtav+4ovusF3tz/89PSdn5i+8hZ92Rfe8rP/5eH5hDdk/1XPu+mGI4dOzXcvPjG/78mdvLZmbadcgRdHjzZ/6bl3bE22bbE1s3B4svvQ0xd++Q+e2HzWidfceuiVd17/1g8+eM+pi6+645mvuXPzE09v/8qfPPbi28ZfeNNN7/7kQ9/2+hc9cn63Tfyy5xy+dOrkP/zFj9L116HviWYxbu5au+nYn4dN2/urX37zyuYREVodNfc98Njb7n38b3ztlx8b94/t7d19/M43f/ijP//GT77hVTe/4iXHZR63t3d/5i2fOHL9bZeefvwbX3HNTTfdtLPX3XXrDb/5ux85eu3md7zhuf/6je/743c+8q+//yVf8pVf+w3f+zPbHST2hhWn1imxxTK2kAKolNMtm1kIUuw+EYsUlheJSM4mIiGwQ5mJHWoJgEtw9+A8jnXqexFOyNnSarPW932X01V4d+FQcDCsNCFWMl2Yuq6uVFWI2zvTUAmAURXzwjUtrrvpyFNP7QjJ6lpVBbl0qessHz7UdLO5WKXBV0Pc7XSv62uipg7TnNpF2tvXu6+JX3LbDb7IvXAFph45FADYynT6TK7ZxUp7duYgynD3UmRQkMGlx2SlPnlu9rYPnJlHbG9vv+jOw1/6smP7F3Ndj9zdciJGIIYhJ8uRG682z4aVuYA7YlapydF1i42vubXLK4/+wYM3Xis3fdWL/vi/vO84sHvzqXAe/Xo9mu6e2L+tlViZPHbiyae3njixF6998sbNa2+sXnp9Xqw8/gcfP9pEUWflRcobr75ucsuJj/3xgxcXH3vOS29c27g7X5hcesfDm1Wgf/qddw9QSeHhQ8pRVzaQLM1EMLNhRjBp0QcX8qKiVrgTo9C0qTc2oSE/A7MxyrSlInnIzmagYXw8UNgchUOyZJqLiCtYiYMYWUYGIMQwd1jR/BiQE4GLu3KgIBAjU8tgYhCTZOhg+NzhS+ifpWGe96ledYPfcPzWL/vW76at52SvgxUt3SrmMDR1QZQ/p5l3BxEcGXDSCOkyYr+Yft+3XZuqW1aOBOsXT53c38516x1lW5hHltxrkCqbJnMbjKkdkFELZ99JmAWLLCLM3vWLGKOqggNQDDckDIuQVYlksrI63dsmSJmfZ3CSoraPYICwWkY2Ek4EdY/Ekcg0BSqhvJUSRTawk1BwM8BEBPBsxhRA6mXl3RkIDDhnt9mCzDJXHqPM9vea8WTeJ47V+spI1ef7rRAmoyiB+6z7+zMHb25MUjfb3t5fWz/SztrRWFZX626uTuDldAUhdqiAOtdmtX7sY6f+5f/53VUt3/9//erktkhdNd6sY4x7e3td206a0Wg0WszmXddNxuPQOEK8dHF6/NgRt6mbtF2Yz1pQqhteWVnZ35kuZvPDhzZBunNhAY8rm9KsxJ19y7k9dLhGDm226d4MSZm5acZ1XTNlQtfCmjjO2XZ3d7uuq0ejrACCUX9061DFsr+7tz/b39raCpW0bdvvt03TrG2tnTt3Tjhurm1cPH+hqqPJaLa/VwU+cvxI0pw0NVXU3DtHstTEpu3owvYld63rul8Yr2JjdW26t5/bvqnqftGPm8nGxsbZnXOmKkQpd0dPHE+W6hAvnD9PbpPJ6nR3Oh6Pm6re3d3eOrS2Mh6t5ESVUC1nL+1yXMsJEgiUCwZYyFEwL9pHZkZGECYqSkQD+ODuQmbmTVVjiQeWiiKqQElHsSKh3pOZeaaaq9UJ7e4vcmaSAAFgqs7OPVEVUIuPxrHv5u4+biZ1qLb3d0lXmNKJYzWMzl7Y2zh8aHe23898fU3WJ+Mzp/Zi0zQj8pQscRhpjKPZdA6hqomX96aXdjqg7vbyFzyTXnXtNbkbg4nQscVEo+iduxdssZDBiMQUOSQsNYXKXHh2kHNC36w2p8+0b/vQ0zMJ+xf2vuDOta96yfWXdrsqBiI3g4OsGERXiI9ztXGuqhei0gcWc4F5xzuHXvs8lclnfveeW195qxw/8bFffN+t166fP/r0fug1XDq0ffhYf2NsZbqSHzrx+Kw+dcuFa489dTw+a1x9wS39/uqp33vg+MpKzDNFZ7Sy8ppDkxMn3v22T89mn3zuC6/fOHxrTcce/+PHj2ElREURHSnN+FokuWhojFyC5mW43bJjd4C9wUTDxEWnNrX1qFK31PVFssQV5OIwEXJ3zWZkIpGGEdtKRI7SGDioPhLIXAlUbJNJiSGvDAgeOp58GJltORd12+TKpUGHCQpzK8PkCSSgYS60u7uppSih72ki1WdOPnzNB9/27C/Z5ObGRCKehMSYXbOEgKIb/zkFxQaR7wAG0KcsMUZuM6ML852dvFFtrqzm/e29SRz3SuNoSTPF0Humitk8uAuQXMwMXvAptmE2BOWaB0tNwatomcpU9VCJF9F2IiIKwkm9TZ1wLMkjEUEzyMjARWBJi1ImEUiWko+9CsAOYjgCuZOqg0WqlFN29ihkSGZmwiALQ3rlZWJ8L0W6k8aTAshlYTQbqzHWDYvESG0Stmo1KNxcPefgOLY+dorap7oaT46OnGRSjYnIukHwhJjdzAFndvPerAnred4fOdJ88BPve+7dz9i6jg8fO9Tva5bk2q6v1LTalDGHK+vNGo9h5IQ+56NH15B74VHb9XW0laON+8jUybC1ueYbk5Q7Nzpy/RZsIPuuTUhk1VoTkxj56KEtuBMh+wKYAUI+rlVS2xPj6OGjhRFWKEZwMbP5YrGytTHZXNOUYLbWjHUyUfgsLTaPHTJDm/Xw9cfNMoQ3DjUMmi1aDpEpdupBKrI5NCTr68DXntgwZSKSLdVMOdvG2gptEQexrMg6Tzvr64eqIG4Gti71Ck+uhw4dch+JEIdJUdRZO3wsuW/P/ewcsbJQscuKkFC0pF1Tiemg0OSAwbJ5KWxhkKAYMvicc6EoE0cemp0TgDJ1CxTNe7iqGROZmwEsBPLZIpuzxAqAOUDw3CfTZhwtZVDlvWgvIWBcN/t7O6jq3OcoPk0de4jV+PyF7fUjE573kzju5l2MOh4ZsU42IzkE0ZQXxmY0azuycGi1coSz3axPMuL1ec5G7FJ5Tk3cbRFYuGjSlsxYYAjD+G0v1O3SWu8Oy0zBDIFjGeLOjr5Ppq6qymXsqoNIiBxGTgYih6hLMlbjyGBxMfaJd8JN6KOfuOGWTzz4qGkXY2xjuGiPba1uXHftF+x9ZH/ke3HBbE3oZXPuKfbtan0oiiQ0piyL7FAlqRMJpy5yn1ayxN1xtdVk6655yXWLLoVSyyYamhDK8yAQm3EYBOR8mCJRlNqX/UKl3cadQSjT4VxiCG7ivbpzifcpsOoQBhazzo4QorvoslDrBixndBFymQhahk4UZRlyZrJsAwfVASJBAX2Yyuxos+U0PgI5D41NS93qEu+bw1VpJLagfoSVqv7Eh959/U23rz9zZcGHJ9CUZwirgYObldf+nOIEBmY4xKDMiNw46L57/7SdL+pmdZ73WmtDNRmPaL9bsIQ+SepMQsxZrQhbKtiIQjecFmIv+rrmBhVid1eQJsmZzQIAN0pASZALxuXuBvWspDTM5GO4C2UCwMQIWsCZTDDTUiFNZmUQXXmyTJyzumoIgRxsTERlnCJziMQ5qbKbwQniJATOMHUFAqNMH9VszNx3GUzuFiQwsrnBjFCBSTX3rgzp3XLvoQq99xwJKhQi9T3ceBhVZwwojMg7nYGb0aEj7/jYyT+976nJka3poiVJ0SKzmFmZw25Z1a1pGuK2W3Adm+gZ1KfOR6GKsVr0M3dnDgHQXES1ggHUetZE4EViUEWCGNy5zQtWmkcWp5BySGpNPe6ThyBmqKJoSmRMbqq9sCVpYozOkmaLUEU2eHawklGAIav1WlVNzt62e+PxOGsGQ7OOqmhOfUohOJmBGgibWpeyK7FEAmXtItcRJOC0SA6XGNpkzlXDyVJXzk4gkYFCGxJS3/UixoysqQqVZgd4fZ2Fo6qLVKYQqfoM11CVUYxDPk0EcnIi0aA2lE+puLoqBAAMY0ao2YxU1ZE9sIgFF2IOHN3VElioDiFKnPWZQnTiAQwQliqSWR0Dx1isyurqxCxTEBCFFKrg7poXFkjyzNbHKzLvErUU13d3Ltd1nQ1s0vcsbJZk0aamGe9N95OnqqoWnZoZGyXyRexzXNRas4tz6C35oAfnBZCUQFbKwlZmvBCVBYE7HOLk4lmrIGIsDBFJvRs8SggShZAH+3MgkOVOyIwQOZMEYofDvVv0TK7cH7nrOq9HT3380VuObMXs6Nq11a0773il7m1N57PD1bh2ZEtGQbKU/vzonnNyJnOvFW0P2WKqo2aqe9S2ZbsxxZDmJmvEESHLgAkAFMppB1B2JQ13WWb7wNTcBbG0Jpf0rVg/YgRI7hM5qhCTkasWUShPSkGIyogQCqUUp4llCAeIhi5XAhgEpWKwaMnKKhnT8C+ZHDAndhcCkwwqNMSFEmJWYil2L/EBjIZRIUTEbs61Wl9LnbVvsrQpfeBdb3ntievCRgVaYeGMBTA2hbCjeK7/eegOwBhsTiBzEzhOPnXv5tEv3Dr08nrvvIjvzbtDR+pJakOapGbhWYUq5rBok5OoOpxH1RUIXqHZVLOro2jtAtxnLflyiMLMQbHk3hARGSznnHOfpCl94uVgFqsPYOEWkrp7Ioc7W3nextaXAgkRgYihHLmSkFIXQoCweibXIAFgMa2FMyGjECtJ3V0AcG/ZUJoVKffZTZtYpTbNhKsgueuYg0jQBAqULees4/G4nS/gwT33yS0ZIbCoqXbLmaPuXtTrGvJ+YRrp0LV3d31rqW9CvWhnWcSSZdMQwqxdjEYjJ5stzDgC1vbKRH2nMdZuZupbo2subZ8OMVWB1cWs6nqrx6PAqOSI5XjLLdeCsuPy9s4js+lOvbK6tXntzuVds351ZX1z47r96Y75Tp6e2tqcXNy+qMST1dWsXlXV/nSnJpou5se2btCUt3cu1U0TJ9V8Pvc+u3tdV+6YTvtQRQ60sz+Pw+RbWuz3QOlHIFCuqxW3xMxRanfr08LdQ1xJqrPFnImratWdsHBYfXhj88zls4D4sv04BM6LBKRAI7Wi6+lmcWE2Gk3c3fo5wVXdrEVAASSQnRxF9rYAkcuAAwwn4hhYVd2JjDxQEVwys5aJeQRAPbtrjCUMg5qAQ5cdGTmxayIOwkU9ESIMI0tuhhY6buqsPSjXzaTtdWd7XzUA1LWdxNAv8vFj67PFbH+eJaZ+UV+6tOjb0IRmbTze3b5Q101wJLF6XO3P5xQDZWo7mc009cjw85f35gsVq0xij0Tm4DpqJoO6EVEgliFWNRhKB5cbHF6ABSc3y0wcJRAAU2YYyJZtN1SEoZyLHhaBcpkiKUhlTigxHGzeRJAsek7H777+0UeewJnpiduOX97b2zxx7XOf8cxKxtt2WbudFFYBXsuHL1c7+zGu5WrUhwBSz50wa2XoOmjYaKwOMbf1NG/5EVyae7eoed2meRJScBpi09JTIwPp0HzZrUoOGpDQJfuCivY/aDkejojYxN21zxypbqou9TklEYFCrqKhWJF4JBBCmR1RvnEAdjCbgobpX1So9UxEbj6MMiQu00cJ7IPadvGzxRcs7xNc2kiHZlIq+rmUhUQpCyqzaZvGk9XPPP74de9+67Nfv5WlUa7YOnUJcQTXgqDRn0Nm/EB3oUTHyGYIwns7F47X+s1fc9vjT2nuaxbrchsJlLnHOszJLYTQ9y2EqQxW9qGxa3BmTA4GWPMcYJFoZg42yyJC7LpkI5Yf2cFIaCHVApos1RzN3akPsTIyz8mNmYMvC6MSVLXMNix6FOKDB2UeGhcKAg41Ig5E2T0vxcBRxqYQVa4JltwkBtVM5BSo7ear1aqD+5w5BIelxbyiyGBpVmNFi9m0WBAuDNyU+0KgXk6OVi8fTUZNtWj3kiOMmxAiJ2ivuQOvjoRj13XMnFIKkfu+DyGMV0aWZL7fs7ghraxP9qep6/XO25778COfnLfbR48edpO2p2ayrqobm2uWW+Z2a2v91NPnF9PDeztx+9Lu1jVH11ZHs9028Ora+mTjSDx35kw3u2a2ezzWowsXd0xovDKZt61ImE7n1aja3dk5srFF8L29PYkcm7rrukoxn8/X11cB3tud1qMmxjhdTJtmPFvMn/mMZz3+5Kky3yLGCHjWVgJyzoRQ1/WinbZ934xWrENVxaw9M6ogltoqyM033njPA5+WUIVQqRZFcQHYCcESMyctKu6Wc66qmHLnaNiZHEJKbDn3AIikdbVBHBRFVmio+ZsLqEDww/gXYjPbWBt1XafqFEREADPLxKiD9G1KKbuTQUOQKMGWnI2cc0qpeI8yCsMxisJBnMglSt8keOl9taR9jJNFvxgJ1xuWel/dGM0WvVq7cf3G9oU98nDixJG+b9VjoAWcJ9VWJARom/uNsYc11i6yXx7JicwX2rwfG0o92OoIcVcmKQXeooYC9wDjIlticGceZDLZoByL8ElpdMqGRpfKkTpgV0X3wwkQEVYK2avsBoSATESCvOi7eedjGY0mH/3Qh29ZO7bbbfM169ffdUemvFaNzj5ykdsFrY/Rp8OXVxeVIHBXoYmmZCqBmUdZyKU3qseRQZ3lee4Oh7rfa+uZ7o22V1YPpcsexECE0kbl7ooi2QhyIhe3q1TXixGXqzig5uxsyDbozIAgqmoKiQwOuU9VLBrfQ3+EWQZQhTLc44q1HGjsIDio0P/M6KCwbirMmczNaEBpuDTGlcdDZAwqxQyYwZ0kuLO7FlgNdAAoKVHQpMY0ClWraVSv3PfBjx27/tYjd2+5j6WIS3j2Ajl9Dtb7AV7jhWEfGI7zZy985H0fne1Vz/ui22d7iwAW0lk3DWGclVVTLoO6mDxrE2Luk3L2oZDqDrXBNnNEWurVLadUMwNW6lqqXuK+nIr0ghv1ZbxgkZM48NbJ2lDgi4KMOcychJN6cQyFSyNEwtHN+jynEAsrrlheM2cKGQsDWWEimRd2czEboYpt6gNxUdRXTQY1kCnDBWREBldXBMSWE6wXJjiV4TKWey+T55Z8O79SanYogZNhMlvsj8ar5JmRQwiAWVYiEomqSUQkcM6ZTIWrGEKfW2cpHyJU+vb3/YfRaFLF5omHVDUREYdI4FNZc07so6xarWQWqeLq4Vq6Rz95ep7GTdXm7kI3zwlVqGKMXQAyrzUTV8VuOsRgyLEwStt+rUh7/pGU0nWTxhYW5hSIW7djteheEg6HY+i7TBkbBN9rjzbVxU/cd3Q0gYGcqXcWMmiwWlXdkmQhVB5ARH1EJQHkQm6qiDDomUfue+7KuIQY5Ag8VD6NQEZO4k7CAUxlV1eBO98uLQ4iMZsVbrtDaxTjLqU7RTV5yU2rkaoKkgQiiA5mjMw6dwNM0RfSoLuTkVQOiJow85JCTa4SZMSMoePCGUAIQQIlVxiKfkl2YwbIBS48VngyCiGk3JJ5CPW8W6xWVfIWFMiv6/ueq5RN3UeVpxJBF7xBRCQQYEqMsPn4+z/SBBrx2PsQhNuclA0gElZVZ6haIAFRdMGS3UFERThbi6p9CEmKIcnF+BmcA4sMJUBfSq2UwNMJxmYEHUZUEIMkrHkX1saHzj18oX96duiaI+2JfPgLb5nKAlXe26bHP/TES05c13ofmCa9s4YmezvuqRa4MYcA0tB1ZtrEsBrY0fche9eEhFm98/DljZec0Lx48P0Plc4FCjBbzswoNLjSavRZsSrxoNrLcDIyYS+gLZtbQe2J3QBLKXKoQiR1V+MQyT2lchSZiLIr8qA0wFz0bZatzz5k+eTGAJk6i3OJn0tvq3lJOJmWk1TKvEcnwN2YxM3JmYrCYXmXJQc8ODnQMPXMlLNJHqWRIbz/T/7otUdONDc8T32s3pNrkIZhtCywHCwF0VBXxnBP5AZFCixdTzfd8rJTT61sHuPYIIZFXadUdzONY8+ACSOpEoKb9717Uos2SMa7l+3hSY08MVQz2VB+BlCORN+Xma5KZkSckxGJEKtnIiEr2UoRHCN3j2APGQC5ep9tWSwyLVNjaChZgIHOnTpVZiJDSgbLIgFgU+XMZRzdckjb0IAgWWIfs8dsg4fOXoGo11xJZJCm5KpgUjgx9z4mZ1dXVWIARuYxhJQSiFCkKIHI4lpGF/w/dP3ZrmRblh2IjTHnWnub2Wm8uX676LJPdkkWq0pFEGCpgfQgQW96ESToUwToB/QBAkrSq6BHSUUBBVCFKlJsQSaTyWQyKzMyI6O9vbfnHDPbe605hx7WtuM3oqiDgIff4+52rNl7rjnHHI3oMkzTfLMuHupdKIQLSURXRNRaR66epCyWzchKCkWSsmdFcR6OS12XXqYrQH2gT9FjKgDZq/qhv7pzr+gr7ciyd0I6kex5bZwz05kxjB24kmwtxHTPwFpz26yyOF49ROvVXZFpHtHmWntLs8Lip+VYq/fzjlN6PUR0SyEV0ekoYFuPpfpU2NuxB1EcHmVpcOtCmg8f80pT5M/9VZnqw2k5zIfz8TSXqSutuPs5RUlWa+99+ARUL7W1UtFilbF1lKnCkdlpk0SDQzSzrnVAOsXmoWPyWoDN9svMWjuVeSpl0B00/pXJHI1eI4IOGzs2M7e6xFDJwMwyNm5uZvedmZyhDJRSAkGlG3bFI9E1+t3a+5pap92Bq2kEb6I4IZ6tcFkZlq0t1bGb53Vpbvsvv3q92x2++/Hzu4efL59/9Vu/9ez+4V51Khl7akElSWhkiQ7FzMhjCW0MtiBsuCwQxesQX2WCcJLRiWTvHQp7dNwamaDQtgi02go7MQmFKoaYSwfLtH/72Ve/8eEPzjpPHz4/Tuc4tmfPPvj3//Qvvj9dTZOvZEbVzJI7qJYoFszUVGanrRW4j93TGTdX8NKWJlZV47Q7/8Xbn7++e/n1y/gsS9+2msNFYeAkI8KOj2QYPZ5jWynQICYCbvBk0CwyBoAnJM2yK02HeXc6LalQbpLIzABgzgEdcOShXoIITZSVLffIzDKllALmMRQ0DuR2/wfG8VNMSSmUZpQCpNMQEvWI74zKLonhqdZs8X7VprprZ7Ivlaevvvrjf/5f/afPP47975jDoZ7hONMO367sl3PuEuCwNfFwo6SH+/XF9773v/8//O9++rN/9md/+pM//MMfAfn06W5Z3n5dooAOYQwyxXvv27Q7/AE2Q+kcUU2GzAy7DLNjAUiyZynFWmska50xYFCRRYCgHAurVDchQsq1cfhfq7V1yBesZ8fIQQxFmpmIcUWjMHsa3MGMBhitSHTrgnVBGi9kvAU8FsU5qrkL2VWswri2MLCV89IXL0Ws6lGMytYU1a6zsxQKa+Zqpa5tKfQElJvkuEdHJAVZAqX1o2uJtTRPL7ddi9SVTKruSs8mRDK9uLU0PFCO3PeWXteSyXU+e9a62tSbmqO4O9W8aDln9WoSp1a8LK3X+dDbbbFFYVKVCEfjqe7qusawMQVSsDoV893ajuZKPyg1z3uD1t7gVW4j3cuYIud5bilJ024H5mGvsMYCMoqsWu3drJaW3B06mAqvU6mWYWhZ5qsdwEka6/HLcANY9VpZi9N28zWAWn1VoBlH3IvDtM/UVPan48KrqWMF1qnOkzx7VAewBArpxsokgI61FHphrqPnFMzGMDdgaN8NgpeRVZnIgCUMir3MejQjSy0KpSi5eYo9R+sBSxrNI6OvKBaIkUWOdWkOT/ix9ci0EsbSTrHf7XrGclqzdqhEsMiL0oD+EDB2ZHFfT9keWi1XP/yLH/3mb/z27/61v6q7OK33N9/lfTuZPZE6TCnUgdi2VkrNzEG2BpRw5dCXQpktguTkpch7671JXSgwMfvI79FoNEevOhqfoVw3smROPagsJnezyt1Tfvi9F0edPvzw4/zKev/K5/r0avew1L7O91+sv3v7EfOm8FwQR9g1rppd7Ze9L3YuVqZKm0wl8qjb/XJzPbtpuZvWYn3X7d1T3rz8i+Xp+vz5pzcFNuq4AEYmyUEROm/R1gJy2xEYkGlluMGUBNI7MgyuhuHfJUiJ4lURS1vcd/P14XR62Pguq4zFqmX2kANJWihwSQ+A2UgilJhAmpsbkVCvqqHMLsHMTYJveESIUhqtMGFmQm/eIpepVGX2UPWyjY2kshuSKvLV5At3gCzWebr943/1+x//2ne+9x9fd92EX8MstFgkfBZGtV2SxblDMD3GLCmkzNfe51Ku9r+1m3/x5s1/t9x9bXj7wYvrV+/iZ1+8u96zr+pTrhk9CXpqodB7iy0ASERepsEh+grARgpwRIxNqoymhXSzAtHQ3Kt5gZcSSDiH2jvWzBy00h646FUpzexpQ1UAX4akDPAglAZ5YTZJbOBC0CeXA1TaCBaW0q20tXcygKTVVfTaEqfeJVDyJGEhahU4DWVVdFsSZgd3RktA67nTaperoWfsQDML9ZGAsyLS0t2nRerdgCVLWiastXvlZefFbC0HQghYNhV5ajJ045odEQXGHMEcKySX1C4hzqQlLSSpqw0/DItlAXCSnHJ2V6oriWVZBUtuZtiAgg3WKbdOGxGqJIyZmYQlhTC4WVkT57ZInOc51k6ikwpTV9KSXKIh4COf3CkCXAa2ZrKiPNVqKVe6lwitCQFuRQFmDHV3Y5I8rWtAU+8IH76Nw/lpaUc42e8QKuatLUmAXLtKKRhjjs42ouiNa0ckrgMdSkPCCojeRUhRyUxLArF4UWZPLynAVqy0VC6W1kEGlGhb/poHUkiarKEAaLbugkyuTqnNqbBYSpuQCfUYDOb17jjEj77eqZQAs2k75JA0K2e0nYVaNz35+Vev6xP7W//pi+89M5v8b+5/++//v/4/z6oZW6oGIJXwsERhoQixQdU9FYyY5rIsJ2N1lDVzmjnvcFzP6p27w1qW5N7nGnYWuhMGFnnvUXwbv71UNbWpwCIyC3y1duu2LO3qP/7kYWcl1+tnuy/L3W3a/lR/9I//4Pt/+6/f3789nLs/vaLuq+1Wqz63m/X6haZG2HE+5GymfptTO7yd+MGvf0yAZt79wCmZ7tNKf/K9a0U3ZEF0kBjZiQSBQCZpGOSdJJjDrF0APRDAcB1MkjKXBhIfj5wWXYwb17VbtsPu6nxqy/k07yYz9NbIahrwXZBb4slo2cyhHPw8PmLxBtuuOI4sUGyduzS8EiBsa9QA6MOVM8NAq04zjBWdmRE7wBOxLViJ1JpKral69S/+63/56Sc/KN/7z9e8dzy478IbwJYytMmO6l8vbb/b/ZWSjpFViR+19s2u/D0Gbuppz10umsoh1qX11+ZF0tqvdnOueXQvsIFOGo1MHwB/KoUt1ZGAJA8Pg8jxK3Pc54TXAGPggXRwRDhlWpHAEE3DphkyUabtjR070qG5Ixm9m9l7oufA/duw0xzI4oDpIhOZotlmQI9MyLfg+Az37Llt1yVlNo5J3DVcI7qcYo7o4AsteqOaJTNgnNwSsYnTaUYPFSWZfopVm/dcDvPBjVcVY4kyhM2CLDMjFHWptfbU2k4kvKL3XuZaet18OIf33QhVlzjIzsDwvL/sU9DhCWwkCF1W9ASDZqBcHByJEWuurDksnQkPcdtgySsyQqS8eISO62IOKeboAQouDng6xJCieIWJzEAqudkWpmVfB7E3oscwgwPEtJEGoEHJNjEvipGSMIIQbTvbkdLktTM6mMbBTlcL62hcaDC3oehJiYKR3SdJ4z4fVgLd0OmLr+NnWWGBDIVJgSNYykcxoQD1QTokPWUBgaIlzJWeYMTZ3AwlAfBsEIKLVg/IxiUiDYMWUCGwtbiUi2Hs4ZmrTeW4rLtpfvn6zVSnTz/+8P7tm/P103h3fvbRzb5Mjowe0zSFzM17Dj3PpVgZc7SyhoigVSXC2nSopdjSeu3DOsRnOtIZ4gpbJTNE5sU8Z/g3uBwl1VsfMUVmOy9t6f3Kn3340Rcv7+cXFtN0Mv/w8OzrP/nis1/8/Ht/5z+1z847RPe272SYm53KQy9LynW1R2S5P82Hp88/efLw3z3svvvk6pNnn3/z5Ue/9oP1fJrosMU7Vx6h4RKmUkBt6T8ayPf4DN0cSGxxd8Oyyk1oYz053AUHe4ii0QJOSTTaWLQaoZ4hpanWmtlH7tLGBmXiIhHi4yICwLZdcg1YOzUi+sb5wo3U6DZ8FWlbceLYa46HcgA+PiptAId0MZ/UOuiseTkkJAe4TpHh+erdP/kH/+B/9L/+kPvfpDt6dPbZX7m1OHe01zr/u4xr8KNWntXSgVnto+PP/+/+1Kdnf/cvf/FHf+93/yMTMpARNQO97w+786pY1qvrq4eHO69TRKe5ekgmRhqEIjEG65RJs/Ow/BlsxWGlZuoQwxOSwkwy0wZqoWRQooWlkH34WIzFvTa2VgYE2cWdOy+r2m1zPLYLOaCE7bgd7zgxTlYNyzdgc4GgSYnhMgaHG60jBXQwenN3mCOZ5BhyI7P41HvfWHfIxCDhqXVty5dEZ2wL5jSVDb/MHKOCBuKpXmO8MjWYzMbHHmB+/uXd7//BZ7vrJ2bm3nYFBRoU0LFvHF/jqtvVofPYvFGHASoGoPtohHeB5UhGAUkfB+5FRWEyLSlp87sfa6PHdoS0Uh5Oq5ddqfP5tJay62jiUDV2aVt8mHnGInOoChMAH1FBpWVUXhxV4/JzIwT1kIjtVYwySrKPJExsYr7xZAAtbHT4Fg4dUsIVpI0rDIzcQhForLDsIUKOTBgZ0V20lCeT1DAZSYocojZgLHicm8RJASVUoqxQGBz0AIXOPFO75MrBtcmxN06iCgqOu3ewzSmMNqWYxbgv5NIIKLdUj3OU/eGhNZv8ZlcZZ0MHcHO4jbVJXNe+383r0mhJRoGRfFycmjvJyHQik2Y1s7uzVkZgOae5Ki2dYWjUItWMBbnH1NDHHTX+R1FSeGc3s8nME5rSzuc2/dUP++Hqh//wX/3t/+lf3V05Lc7n9dZ2f/M3/+YO80++fknkFNl7b4W77oud7uvd9/K6YHc8n63ves/r7z9/7f793/rkq4eHfn8qFefjeWfFi6yr4NoMiE6ymNnW5A1LsE1hLkF5wdoBhDB2ruAwYBFTXtgvyPMYkTLfEwcHoMws5+PDfJgPV/uH46k3lcnAbjA80q2wNVTDPPkCxEuSc9QQPUadGkZKFIZN7piwaRj7jIuDjwnrgApH4LeZw0pIqZOjYlR35fhcDWWFsNxdXT//sz/9yYf//L/5a//DD3tcmWliaDkxvvb1C5y/Kv0d+Cbe/j/58f8GfSqOVm8OfjOtf5796Z+/+YP/5ZP/ybLeW+3rupp2O+e5d9blfOpczefDcu5TrT0C5q2lEJBjmPHJpLA0QDHS6EGEDSgvZUFYDId6I70LyiAKfaTrbT5qBgCeyAiATEHaNLtSDkKutCmiBqNZl+rQ8JgbCiDHykm0khlKkdgSrwY/YDtrlehDv0HncCUxUMpExIaBjP18qgYsNU4MQYweSIkwUZkZY81eSPaQ0JSbxcXQr6aopPiO9BQj8rzm6dyPD8v5vL5btJ7Pv/e3/8ZuqsfzqXW8e+ivH5Z3L1+N9kL4JabA8Gz4Vm/x+H17fFtGffftAgxCZuaFZlZKcfeCcjOLhlJKKVadVkqpZmY7TqkWOt7cXJ3Pxzjd73ZTSMc204vQLj4rNLisRBuLLgFtIDekSDeUJm255OAjmavbhSJysTLchOTBYWXkZkPZPczXe0bd4nne30SQDFOmIgmg0Dg0dcluCaAKNlb0GM6xF+oxMPZTLgvJYMKW0XY5DuEChXQyfcpMWMKGb0sRzoAHALaC7REzz1AZj789UbMRBCghI5XDxgrkuKoTmnf7teW69sN+nmaCnYDJTmu7vrn2edKqcz/X3RS59hTXXmu14srMDEQ4OMxzaMxsck21ZuJ8WshCBnww2QWk+Sh6zoBvTnobaz6HAQFgMGMlvKOhR7A9+Z2PfvqTL04/fnOt3ZrLPHmc1rn0r/Puw4KHL08faIaXJRYzZ8dD5dxygWU+LMevZN+9z+P0Yvr4gw/Wef/m978kzlZbP709WDGQFhUEMgAhinzwBUdRGH4xG4l7hB6NtSoixPH5jek2ZYLcQSEN3OT4Smd9f4eQEEjry4oZu92utdazmyO3sffyk7eJeaA9AEDFuPAGOfNSUQwDLNp4iJdqPq5UYliKbe2/MOIAzbYuRhIxbStkJcYcmYKZrXXPq3Y61sn+7T/8o+9//N3rH/wt8cCpt/U1l6/K+mXvR/G2+HU7/ax//V/w+f/K2fzV/xdRYhd3v/g/+een6fp24d2i09qXUiasoewAyjS9eXv35MlTell6N2eEUlZkIWaE6D6s6jIzww1d2cyC6SzI9CzepcGxIobXWAaU3eBL7grYBismDCiZ2aLNIwlVRAbGcbYFGNqlNHPwFlJhYEBmw0Rye7chTxgzBpoiomuYCz2STyG7FEeJAgPKCA4W1Xt/cC9c1/VxUItIB4tVK7YGUzJLtyH97b233rvTU8wcb0u2jN4yE+elL8vycGrnpa+LljVbDE3zrtj8o5989e71GxlbZiDBnLnnlgX9CM4AgMUGPuJbJf5bXH5/vEBHiQ9OQw3dVglp6EQk16/lTgELh73d5VR4flO//4PvTvPVm1O0xsP+yd1DtLawLKS31k1wGg1zgVtM+0vh5BZ3BZnEtN4hCKYcjKXh3bHZ+WGwTTeydkqzbKg8uPlW9OFGUfsMcoUSDofLPKDMdCVHN2HbajACyKwTR85Gl6gACRPZe6cpIaWlsmz79ffxHeN927jyBDKpEQEpQKkk0mzIeBOw5hBUkxDDYAP53XrHwWgGlEIO2gDp1XwQZ+FYcu1Nu1ILsuVSobbKwSaUak9url5/eZwcvbdS6nk5TaU0k6KPoDEqUwFg0w9alFJIX5cOoBQItOGeSyFjPLVHlfg4/zPfix7NJh/ohGTm0c72dLr66OnLf/EHH9Xb07H368Kb/fnlec5l/3zKUuKbu8NUe+Tci1RXZElc3T+d+pM3H9zdHb948+bnh+/99bJoWU/l6oOXf/Sj7/4nL5rEcxiUyW5Rs5EEA1R5vPEeL3dinPWXcsuBi5BKH1IgxYUGmBDLACG3l/Q+qG/jKaOXUjPQWkw7TDOxMvq3wF7xIgKOR70SKLMy3kEqaMONjMOEdKskuZ1Cugyew5dlsHuMcw67ZoMNZIYyJxOXKr/h9UpE0AIxzxGvvR/u+v2/+Ef/1f/4f17w/LvlzGJf5fkefXZn+IHcT3Zt797km//jq7vz3g6H5/8xn/zm3R//4198U6b9tfBwPi8BoUbiVN3UalhxL6/evHvy5FnLtYAtkiyLZYdyrA0G7meCOUZeRuagnVIMZNKKEsJguW1dhjbQaZyI43LfXOCpNkYUECy28TcBoHNbRpHDBxjU4BdRfcssHO8jIiE1H6MxIzMBKQuNUiUvdsljlyBLUfYYo2JWAAzFCmTitpuxbTa0LkmxeIslBgze1lzXPkiHrT1IjGSEesu191Hcjw+t994UGPAbIJi711jWWL98fWdTzaRjmpSVapuQYtQW6JF8aiUubeZW2SGK5VLTL9/EBskqxtW2uWKMOwUIS5gpt9c8dgCSvni7Lp+9/ujjZ9HX6+vDb/6N33m4u3v7+uVxOR4Oh6v9fHN1XdyOd/d3d28f7u6X1QvN3YtXM9t2X4PntSEd4YlhPp6EqShpZoM8JhKJgXflyCalAZbINNIsh4xkU6owBRQiTQqawQlu09040NQGmR3pW6Gw7J4oaaEBzKSSDSFCF8c32fuhHzBJNQZ1EA56gkiZVtJhQcVlax00GR0czz63NkDA0JxLCdISYSNpQhBQWFp0t9LX81TKbtoZFaFQd1osD+h9Rp04aMR5qLt0pBTqTNZSmBfPVJCKOrlbiTXUVWiKHhCVdDmox4S91g51h0wgzYoiNr8TM0WKhUyaCXaO8/ydjzLK/ecvf+3w5HR/xovD9PQ2saKrzGV5m+vpXJ6VfDhWzevKdX8/9Xtr137uX5Z3zdd4dfKP2rLc7z588eqPf3b/0x9/+L/43bbs8o5uzYUElZVmRG43fGzhb+/rO0i0pBHMzBSGesgGA13S2NoMGNwEMMfIbuYbqdG2eI3xLgzYoS1rnXwq3oHHP+V4KCSGPEoxBssxZGbKYaQRGZLoFGS0wayn2fCFH/lXlwAno9noFdSVGOT6IWeHRrAdxeToicKISq4PvYE+ZZmv4y9+/PWzf/2v/8Z/hjJ3RlP39Gvb7QufKieWJznvaizP69FrAW7Aj5D7Dw+c9uX+3eppuaJn2EwsUbFT6mq3z4fl/nja76ceJ3fPsNMjCsIYxXGAuU4brEkHEAljJ4KyVEo9RDOJm39wYsI5JW1EPUIBhKP1i3nEuEWIQSAEvEBKwDbboAHK0cgkQxysNULkI+Fr7K0pQyhp27gNuWiUjVlvO9RbGanKgGTM2MisayxAR5K5uVD13iX0LO51nudSSrA3W6MrM7P06Fx7rGus67osbTm33nMQk42jFcGoMYrohGGaoFhjNvZcBTtJZWOtbocBgM2hWp2Pzc3GmB0NAiSNuRPf+rLRegz+66OODfBsBgPMwcJCY9IknVXfvLr75ptXgyT+r3//h4Z0einTzfVhf/Cbm8Nc/Xxel3M7Hk91N5txKlYrp7nOdaq1uru0yYlBDBEzK+luOWzWF3IsoNLH+eIpIyWTfBtpA5krVWC+mTxnU9IKjVxl2iZ3w4VmQ1+5FNAyC72rp3GNAPAol2OK0mARkVp9pEHY4zA3jvmFGAwHEMtgckq1S5npJN0HeEgbd8GwIP1WxNvWI7fMUaukRIaZgeipMVju97N7esLI87LY3tc3Pa1/+OEHP/7mrrivrYmWPZks8+TDCDN6RqI6i2ENWile1SMits1jqsJcOZVp7/XU5Sg15VkGqscRsKOEFZhosCaYWLvM1GtHf/q9Zy+/uLcGr8nTMpN2u3ulPIWuP3j25puHXWICzo6Tp7XldP363fzm14774/Swn/D87kX8+RKfvH7ya88fXt3/8X/5Tz799ONpV5czl+OpFiKzytdhBCSRVgj4QLnNLwMQBDiLOyUTM0Azc5iTXQgNBA7SxVkrkRcX/y26pZTthlHNbKllNHHR6Y656tQ4bq/H0wTbdibcjWPkS/PNmVJSG8QwGAkRdCDB4tq26nlR2ZJGhBZ323w7aYSlMsM3hJUjPaBLohU3mdoUbtML2n28y1Kf//Ef/eUHu+vv//XfSJpdfRDlRberiS5n1OdTW+7nD67De5ZeV8ftX75+6YfDVFiN90s3TdFybWupti4PUjWz66urr1+/WeP8/OnV6WGJJifVIwlTOk1CKKSEajAjcyWgMFj2TvIs02imiQiVKACsZAvlQBGsjFxKRihXuj1G+kkSYtzAudI4NHwjF2W7fbpt53VgwC1DjCGzilShZWYYmrKaAzgzq0+ehgDp7qVnttaIOcGU091KRUEPDZJltr6em6Wic5p2u9vrw/VV5VmKzFzaap7z7KE8n9fljsfj6d3b43JcUmlbIpulLoakgyKNzfAmpeJzRFbWzOZIt6qMgfJfVjJboYZA65fx8dtbfQJxKevv+/qxdAobHhyjeceWmYWDRKknIOblRtAkp5lKEQL07FFK6b331t69vX/1ep1q2V/trq72zz949sF3PpHQG9qKbDge4yFXWjNbpsJAeC1T3Qz4GJz20zSX0+nUeieiQFQYWItHO9OLwYd/wJA3N7VSygJEpteiAkmeQsqHAsJh8GJuXsEpRDck8ryusH7ua3HX2ittYcq6mVK9oiC1RWyfRXLkNwCIbYlnXtgzxYSMGw0jV9PESmXHoHUxGWOqw2a+9V72qUHtl3EYg3OTdJBc13W2KRKroioZnNy98BwnlRLMeph6Ro+AMUnO1VuwZ4+VQqmVXhpyac0rixdlOZ/OtKxeWsQ8HVZFAlG9FV/X1qU0LtLVZk+SEYqIyWsQmTm7C0wtmUZeofj04ubzH7691rwj8v6uL+/scI0Dz6fzdz+6+YtfvN7BVximqccxD/mLZ29a3Ot8kF1ffXk6nD8p6wef/9M///wvrj//18dP+/WTTz4M5fpwRxyrX7deiy5RKgSAIhSDgK5I0cHiSmXP2Xt2d2Zvvt0V1jPpueGsw9qfHEwwWsRQMDszc+k51jJCS4HYGQuUvYVE1rKbdDqtkXSvo/csLAjIdwIUGhHXMiYFoyUdDKUNkeWgMdCbE6mtE6+D22DoyOFcA4JsucII+Mj0BTDWwpk+mDihHuOk6++G+7HWY7L8sz/4M7vZf/+3/gqmpzZPGMPCtFcA8+21iP2Z0K4/kfo3L79c759POzUtLc7SYlmm3D/0c86Wi1r2Inx4/eTrt2++ztP11d7b8RyUW88s8JZKsxRMfMDKIIfTPp1yJiXZZQF4XgV6C5LMc6sogKFhoFKZCgGYbNE2fysCmSmokp5aL8Z4GOOvkiaTcfDtt7eZvHAtxv3VNXab9NPI/UDdejSZGb044OQurIm2dhgKw5gymnqYbmu1pzfTPPs8010Rsa7rugacMkbX/fH4+vXbt6/ujg+njGm0yUZz882CdfSvA2S5bFNG4TZorHBjQxasZ4wF4qVMj2KRG5d3K+u/BE5KOQRI4z9z62fNBDGY9NGODOhDNFJYx1UFjB3yJgDsCorD0WNAOBFp5sVqRBhqb7p7c3735uGrL16XUr7zm/tf+/Vf/973v1vLrjf25qcHHR/aucfx/r6dH0zKbF4oWpkOHStUJiQ54CtLZmRMthfYBncU8BH/tjmliF6iEx1mrpD7gM6IhLzI3SlyNSDWLkUqe9veRqEsKZrQUYor4O6td8AhC/RHi21uKkgHkWuYmXPgwDF80kQ+ZDMNFRDHx5pSIIHhQTy6ixxHI4EY0EqGe3FDC8vWnVW27GE1zcyOdqyzZZ+4sph77I93b85xvp72PINYkp1l1zPNd5m5BNy9ZzrKdS0ZGeuplkK4FDSsWFsEGJN1Z08a6VOuxdxRWpyMk1SMHtGEZmar6iSUXmzat/u3eeuH/dP7r/74YE8Wpq/TnLXO+/OTPe64f/Lkq3/8J5/ukQXnnpOe/vTJz7+cXv3Gq9s5pu4nolda7M/744ev/vGb71yX5985HLHwdhefrThOy1RKtjAWXDCORBGWYeY1zMDGwUaKXc4txzNHLXeDmbbLdEC6m+mViMzRSY87zQDEsOwaduWZir5hdoHWfTdP03ToLTLSxykXWUpp2S9e5eS2/Bt1ZUMPRqkiIZiMtEC6hJHnKE0QhebuALltcXwQAii4LJBEDISKLINjA6blFjLlbonMHqfT6Q/+xT9JfH7zMQJ3ylWSubyopRXOzFlUj/sPv//Xfvzjf//hp7+Jnozoa4vMHOYSUESvnDDVu9Opcn727NnXr1+96fHi6VM83I/MkzFjiEhAqWLzIAkaw8Z6wwXFRIzRVRsKNUSQSOvckDJAQsTmhalhLLFRRJkDaYE6k+9JDQDG5qQhynYPj4p5YZnTv13+LitGwOwiSXdjGbOTGw/z7TzVge27W61TLXMIt7NKseJc1+X+ePdwPgWEigI+PJy//OL157949fbNmTDHbLgh18HHxYWSeLk0HmnpWybAY3/9uLC9dOi/yoR5/M/xl/+Df3MsNTZmwaVPuFzzv/Ro3/63Fw6kPf79/39fjxY6g3mZSYgZ+PM/WX74J3948+QPf/23P/6t3/7ux5+8+M73n1xff2CnDH16f+rvHvrDcVmW5Xw6redzrrfm+2Ve17bkugw0KDJXpDsHmcDQmyIVQNbwHMg8NTCNDK3ZUdyNZgbLrdsZHCHlADjTMrOPSB8ax9qx9e7FOtKnGhHJqKwAhLFikQ91v6wVkeyD0Wxl49ZKRYHNQXw7R4eawUld7AdIwMrAZKrV1gNQa+fqNtdJ3puiyNKQiIqx2d5WxhHRWvvggw+urq6W87mijPfcslNiZjEA6G11436/N8tlOaVg8szFinuxtZ1n26exetlWpqZIJNStBUuMHaYBnKUij9IkY3eUSLXY3V6zeLs/P51lsbZXD1qv6+3V9Y7tyfVi0/3b5Xb3vJ3OE0oWPUzorhqB6A+lv/zoGJ+vT47X+5391m9/74g17t9N1wcrfvfqjStr9R4rUm51VBNZFrEaNDYroI/MJKMJMvNBeW25GcNutHJuC30M5AtpBmW93IGBQZ8Y1gXsnswExrw1kH060N1dFoiwQbIwkenbsUEDk7Btb4qp7uKS2sEMbE495lvUEOktx5J0w9N9POdxm21PnTCKGnwZy42WYBoyDd8OD9mgWEV2fflm+vv/7z+8P385VZ/5BKtl5m7vS19S55IHreje9s//u3/6b07f+/XvFMjB1lrP6NlDGYhESP3h4eTzbm1REFe3T97eP/zpn//k5nbqhsx0M0lBREQJrZ7DyCIzH7l6ioRqbKTo0Tpubxf6BVAerVFsC8x+qYxpm/sKYIRbbKw1AGBSCZjBm9rF4m1bijxezfjW1n07XMzGoWCGUr1MPi4Mq2Z3ALJnG3NARCo9WkLxsMZ9yzWhZEnjOfrDsixN4jA1rDYhBTQxi70nXz3+3F8p5dS36vL7tdEvlfhLEefl93is0d8Can7pS9tg8P5Pf+WQuPxzXBgB+tY3h/7psr1/Dz9uv9lYd5cnOUguvffdTGhe7/lHv//Nv/39L+Y5P/nk6sOPnl5/oBcvXtxeP9nV+eltrVbd98tibn3NsqYv57o8FHTLxHltpxBSvXdEDjN+MxvdPVno5tjsAsNAsnQMORBJOEgvNKc/xP3lPZeRkR2R5t5WFYN7zd4AtOzumxnJ9l5RABJJGRBj2B7rOTMOkYxS8jpo/pEjjYDpBcapZ3Jc+sNnfQPd0QuTNBtWaMu5EyhlSnYZyazKWQagIWPYf5t52aJ8oDRwmIV58XVdpzIBgOtw2JtxOadxoinAbaAJOGb1AMOUhkHUyFD2vpqKbxLLYRIFSyNQkCfLbuZK9F6fPD0r+3GdbJ7K7rg05XQiNLN8ePPy3XG+69jn/J3n65tzPfnVMu/3V/KCyXOKYzw8LR/OvDrprvX7ah5dfkuv+/uXD5VD3mVGwtugKglZpnm04WYbhDmoqtYHr4ICzBMELQkwHrt24j1PgTIXhxfaoDDZWLWAcFBl2IOBootQcu1ixDRNVrCuKxJuJXS5OWNIVsZVAgCtDQIAwbEgygAxAiSyW4oJEKF2uaQamRBNNnB3bFfbKpGoW1aTB7mmuqmKmwXj4xonqd1U5/nDQ3kRa/Se8zy8dGIfJUuf5oIzV80vass8a71DrkCe16Vl9AExKl14aKvTY+1ry/vzglKePXt2mA7/7R/9WTdIqlZHcR/J8enjMBwc/y3WlSnUGBtLwgaWPjr0trXb8Vg+TAXAMCQYOBpQhMihBtB6wVvG5zjIj9kveYqSpHykdl3Em0Z/L2gYcgkzU25+n4CNsyemDrj7XOqetEFkMkPUXd1hmsT7u3b3dn04W3jRPNlN76vQzEgbkLa7T9+uqr/y6+PX+1r8y9Ucv1TT+e0yPVoTYOxGxzX8SyV+eN/iIuC4vEvAY4exPf4vlexfenLY4CP8h74ez05sw6IPW/ZYA+hmdr2v7rvW4hc/6T/7yVeBBD7zve+f7erOe66H/X6/30e0SrveTdNUllxhUUqB0jEPVkyxksLa175GV3psNKYhCCY5yJSqlSM7k0NsWXzoTD0jgjY8PDby3yCRjy4+oo3sPYNHhG0uuCMN4v2QVy9vw6DTDhc8KWyTNZhziNDEVCoZFaIRYg53eR/zNY9WXQnWKdY1ou9KKRj0CKZzUIwk9t4ZaVZaa73bsrR5X9WWMVwuIVPC/Li2zNztdlnm4/mc5+7uaciMHNu9gA0/EOvmnKq7xUiEK8WgSluNgEpmAisBRE3rpF8aXJTDbl1XdsRUmluecn0nexrzh7vD7oPPP/viudWj3n7yt37vL/75n370EB+/3R3n582XWFbt6vPji117orRZhza5IBbHVVHa+eXxxVx7X4cFZF7MdslS9JhxKmAza1dPWi0StAEtHEEXgIB52ANodNVIwJFMdA5VikbVH9zowXMe5IPxM/s4gVNBUtWHDKRlM5jRkQNjEy7FfXt6MIq2yagEkLAklYXWaTRVJMmWBLHDqD4wQUA1aBu06YSRbrIYsC0ghtFEBznwDwCD4RPnu9186/JVAW9pva3hdVddPfNheXeou/V8ZL9W+m98+nun9TwmQRgRwGi7U7XO69Jp5s6pIMGHh7taZ+13LGakm2fmkPlWcekqW1xJbmyijT6YvvH0UB9rWWpfpsiWF7f3rWwlkR0YcYHDzKcmoaRQIduiZZmP3pBV/BUMZNQzMgLvYQTJBkF4VXWnjUO3lKGtl9SEqcxzqQohI5liFzLbsb0591dHvWtzpnsJ58rQeqRheNPboBIGo4eXi/fvtwr6Y6esiyYDG4vkVzvrDZ/H2Jn+B+vshsUPMDAf2/lNtj02Ou9/EB/r9bd6f2wEc34bi/kP/rhHIOixZ38EZ0gS7tOWsRVN0XpmzBOnaXbb9ZadWl4Hb3f19knO9k4J36v1d/eZ6D2VKADUOU9d0aP1TR6noBuANYZ4UNpkyVAkmTn0lcDIAYUaEpmYRgBvqhQb5j+SZIPjZ7l9QCcvZbPz3CCyzWfw8VOw95OfjdI/EPxqQ6G3qUXNDG5mFriDTNvxkGMed9A8rE4REUsU4qPnzzxSkXsK1Zr3M6nEFFKHevP9fl3Xp0+elVrpKG5Kh8/WIzN2u/3p/FCt3txeretaze0w9d5H+HtElFqq1b5GKTWiw1HdGY1OJw7TnE7ABTMzZoKyIU0Izl7MJKYKym7KJWAli52RpSdfvrHv1Pq0XN0+ffWP//ijMtUPgNvrz1++e2bzfuGu3UQcGzF3Pnv3nbfHe/+dq/NR9vIc7Y47Ts+u2l1fXt1f3z7JWGgX+u7gHUmlcMrhPMCt285EGC032kyCoqfZePsdEVvXk+R2thoQcIxl5eb2QkUYBlN38Iu3W8gJkBh+YOsC5G43lWLruibD4I9ou39rsubI/cIQUOWF3G4ZhiohQ5NrSCyQW01nIAzD4PaymeHE4aXEGPc+YYwZztHIjFpmIFIhNHMqihsqopvRrZTomW6l7nqUte293p+oV8tbXS/jbVn60FU+DuMe68pSz2vIC6N7yur05buXxQKOEafWqXSoB4VSrZiA90KJUdWGNyQubelj/TCvFp7ZA4OKu5WR9OFm5FuV29jsko9PZNA9METeyu0uHKenXeAOMY2zAclH/dhoSTjrCBhER0EglmEHxOI6P7y9Vyc5+eTB89uH9WGZ72Awc8K9Odehn1HZzT0TkCmJZCCEoGdmfSzuv1Ilt1b9l5ruX0FYtjfo0p6DfA/sPJJhvvUdGqCNJnA5+gY15VLQzfj+CXzrST3+/ttw0K9gNY//8P0ze9zZbo18mIq56dJWD43I6QFRXlbY9XTYR/Rv3rSXanOdrg/1+pjFopqASisOyAZ8QLqZ3ieawSRNJaUyrhAbztqZJAN0mhs2Li1JOlIJuSZFDuR2c6Mwy97hZrNJFrGCkMPMWrfHueXbx9uwfYZwkUyKGg5xZrq0+AlFDCOH7thIlLpM3TliI5Jc3e347u7J9e7J85qt7RxeTcjaRjFKWgloYZ9GFkdG7xlOWh+ZExUdhnZ+tyvl6uoQfenns5mlyoCkAiKHr7WGBHDJHhHFDJEaPrgpVyOmpBHNmIAVFrCfMycaGT0zXHWeIlKyQ5YTllpN90dkb8nl7bL85IzD7oO/8v2vv3oTL4/2yS7VZVT0rL5f6GfsPvAnf+fDh593fvXZHn6/by+eP3/72QMfTv70iRqHg5pyHbnRDhSibR3JaMjYQZK+KZRISHQCSQOEjMWtAAyMe1GESxzdW15gXClsu/f68KwQR9m4FFBOFJbWMtLN3HbujGhxOfP5rTsByBCT2/ocFCWDwS4WFhTZN7b1MJS0SsZg5W9ypY1cZeOqGTsfklAZC4XYNHqP2BCldNS2dCuaa20tsstphgBXHVXKQV1p3Z7s3nx92vF2LHNi3aZeweQFPcvee7gll4hdKZ44ZtyfTyrG4jJ2KAaE7gBkbJtbznZaGuCUiX0DER7RW5Jk1ypKZdiGbOmPm0UTCjBiCEd9Hu9CFYZn/pDnDGmyHuNZxge5jQL0C+qC/GXco6VfKmOSw7dT7p5tIbEvtS399Ppd3Acbr3zXD+sSzKDSyFJodRtERtocjFUyZUYMjvl/r7LbY8/7S8X0l3GY7Y+25bwNY3E+/ul2jQ13nMt/Dm77hhJcXvu3i7W2qKv378xjvR5ixLyM4ReTNdivHDf4pX/+K0gRyciWgjm8jM1HmFuZHe0ms98tC0w+ebWderaXy/L6ND85+O2eEwGtbUnI3LFuq0iqEA4RolLwNNIGRS0yQRg5uk7beIdJmG8eOsh0AYbswgjdAZVwL5lj0I/Z58wOuXp42T1iZZejddToMijLABIx/t/AYhcTtxw8DKI6gHlbINHE8QhKAyx8GhfYnIfbp9dmIpoX7+Z1Q98MQJGZ8pzrLVm89L6WUsZiQ4bew5QwZY/d4crdj/cPFNQxcjkg2ACaeizopZTom3FF9TKeQHT03mVdspTRhmyVYZbqwyV/y6dmpoukySzE6II9vDk9WYt7PZ7Pzz7+cPeR1+9//Nk//tMPbUdz6yYFUeZEJh7sNH3/5sxVgbU1etQPD7Y/fP2TH117zewjwg0AMUNbsGghzWVhGC5gY5liLJ1dkhkVA4WXwxQ96KPvGkvS4dII0QZ9emiRFMigkqYuT4zAlzIUgQMWIrrRd1PtyvNy2u2m3a6srccyBN/bvTSUjgByc7jLpBSpSNIctCFNJoiQw9I0ThQVIQYHDSy4PM52eoGQK5NO2jBJGT83eeGiGOnukatRaztx2vkuelMPc1bKimG1E+YVPY/r3fPrw3de7PvaMqIta4YIgxPJIE44s5eiGuYdXHIt8245nWZeyzw2FSa9uDTyUweJYjPvIcdp1F3zMFgYNJkh2XW3gTENFVdACI2buQFlIzKO7fcWjDQP28LoGLriC6v4MTVXF8O8DQDZPDTxiCps1ZFbWlY2Sel0GI227jzvlnzX9K5NHe5TVy7ZsMiJajSHsGI7bRitjtu86wTAWGCM2Ex3Lp/dBpV8G9l4rIyXyv4ojrMx+1+Yv/yV4m5mpEWuGGfKt4q7WzW2x2b/ESDMzFLq9obke66kpAoP5Wa9ictCG++F+L/y9StN/XifL00Ns1vi4trJiOwmZzHLau699zWOtdZp59BNe9eOb16WQ9l9cOv7omxgDu6yEKE+Dk5zM4OhZmZq62w2I4OEcoUKrAzDHBYHEBHN02k23tSLedEwuqSbArXW1pobM3upRXHW47LdvnWwjRo9sCtpeJYAiawYmOHYpF1m1IAMfhEnpggUJ419pVG9TV5u9tfRtPcJa+xmekef7ThzTt8bPcTTGTut63L14ub29unp3Td1EqRSSjT0db15+rzM8/3x2FQA1qmiL9GXsXWICFoWt0SfuCt4fPVBKTLW3g77HdSzG1BBwVaaos9ma1fSqxeS7NAgk93XEy1KzHevFr1b/Nbypn3/f/ZrKO088f4nv/jd6cmiMsMtg22XXFT2/fTm5ubTGbelvXvd2221648/aB1f/uzzH+x3QppNQK69uz96yXgBAIX1IT0grQRCuVYUwBUsSKmDw9tppnkopDYsZjDo1WaZJ9hQNguw4cYOgHBnOLvUMHKtWQCTtwhSMiSEOMmqHeabU56iA3AgUyucZqU3mYYG1gFz5AjiGL0Ic3B13d0DkanJd613uAMmpNTHUWYYhCADnSpkQjkKGuWFFUDGytJYkBmxiNOUGQyLk6bdvpQc45jO2UuteWPoK/N4Xt+e6m62FWtrjahWLNZEZAGbqaxzckRgj0lTS8S5WUwypBPBtEJSHcnJc5FX632I3C1jODKYsNLMaZK8lpDRS8KthXEcBqQguEGBXs3HjD+KoZGGVPQmlFJKRe89BUMdtlw2TH7fz04gWWtdllOZdr13Y3FyUA7IS8IY4ZVW3J2RbVkW/vCcrSVsKlWm07pKqlZRfTxySJKDOfLmidiWtCxbK4cRDvdt7vmwlBt/GoQn5D62eZRkHBlMBEBTZnr1zG7uk9fcogqRmcP9sbV2mNytrj2U3O130deMVmu0jlLL5XgbuXTeex+zoIbVriSpZ/NCdBnhZCjnYpwqgDbcaWEDUI6ugIKEcc6hLhn+7LGRZ+BSSElqRNUoZWYGDwiR7o7MqZQglTqfVnhUnw52Fad8+Mk739nhdre/PtyXjkgL0BFmCWWn0/qAVLWOY9JkRo/oXmZF9t7dfavDQhEOI4hOSrFLAArktBiG8mCsDdkjQLcIlSRpww4oWngtg+NlcHLcw8TIh5FSRteQnI/R0MEe6QLpQ0Ec6BsLRQFJ1dGUQDU8vT6sdy81F4mLm5V+7VM5qU7ohjPYsid7cbPw8/kenlo5eelYVpznq73Xcj6fl2X4saj1daLZVLoylLUaZZkotLRjabVAxTIKdu5LtzVsz8N5pO6M7FfuTEakxXyuS0fpZ06auqNaaXtTp7X2dhe6nx7evrv9qMa5LFd3V7W++lltX77tv3Z1c1ruq0pMOB8lvb1+WPPl96dfTzwETjqt7XtPpx988NNfHPnZ3dWv/cDvz+e6kHSV0ZwlQPayic7HGm3bhLiZpWjDJWeDbIaqWMDpwogcTU0DxuVSBhdvW1GCG43MBn2lgKY0QYZOKhMDdid92Hz3SC1l3u2btWU5SSqlSIxQnWzolbVhMJdmjcohnhmxHTE6TIgBN1J4xBmGKlomdORI+ki7wBsEeybUaTnAmOxGkiUze4XRayjReyklC2JtPpWEllidKu7HFpnY1am3WNe1txMwkU6MNWZzQ3Y56kTvvdtkra1mmg/71tqQ9WIb8JXMzX/3MVqcY7W9efOOe2P8SjOT+c6BwQ8mk8NW1zRFtmFz+/5x0hJWt7FaZaqQKZNALWVL4r4U9/fWuNNVSqUUmElaI6dahnO0uacLbkWIt8v6zX27P9NKKSUTwyms1kryV9BzbvJ4Pb6Wb2Md+Na+8XGDOl58MUux+DS6S/OtiS40MDPD3ZMc/JNSfZomtJh2u967EHSbJs/Mfa1WTMJsJQNSlFqmOq/rOu0PksjtiY0nIsmo3pJka83MW2tzcXfPoghlpo1eWDB4db+qXFtLJ9wEi9azdYrNh8TeLq3/6NxT79+AjY1zOWA285NtWttmEUuhrdGpyctutw+1u9cPb1+/O3x0Ww67XrL17qkJzIyRSAs3mAuiWUQgwslgGDla8zYuHtAwonsoGSVs3h0UwjbeBB5Nkof5s5gRSMLNedmum1v2NLt8ptvumbZRvR6BeEgyg4MxaMsAB2q34WKBoNMz+hJt3u/OD1yi11Jd8ASjm7klLEUkWvpUl/uTme33V8f78zzX5XhGjat5d3V1fTqd1qXvp6n3Po7Y3ju9bG5adCgSQmYWj56dqrUqoogTzEMrkDQnLWUZwz8ErjXqnhMju1MJrjEZsTN7fVKtz87lbp83z54fl8OUrsXrk+/88J/+q+uy34U3ZeTprIcrf8jp5hfTL+an6+udPdWz+29e+67Xv/3BerN//V//u9u0jGO3FZjcKa7oe0kgE1k2ptdQMaVG+06zFplDIjkgmU2s3S84jHPT/2tL1fbBKdDlonz8dRreXOPvMMBHDZQ1AkoXDWxC9g42L8UE6y0x0gaYmd1sSkgXn9nLLWHGsv0Nyd3NLTPXnlY2Jpreo/yjUhZgvNLNA4+kYGYOLmCDmL1mzrS4eEZCQHQlwop7tQicY60odAa0Nz/dr6Xunt3enE7f9HVJdbNprIQzuiRiIsMoQxo7EyGg7MYmQlJCdlFvPaYYPj5zDt4hEtr4JJKc5k5YEQxuxSDFYHOiGZCZXUsAGMT+7fB1uZdE2aQCICw33zHKy7QVm0wCfoFFVvTKAlm05FTM6W6KFsWK2WzT8u709otX69tlj/nKDiesBHwEilw4NhusHCOLeQNMxm279j6YMrz05uP7I50R2CKPR4ebmYhSqz9W9kIbPoWkzDBNVdI0TeMnSvJpAjBNKsVSw3Igxts+Ck4t3nuneQ95mVItlW6eF8PRMfcoebjet3XdH663SxBc1zWKEJmZkEWEMhWNZO/jo7Tx4ZrTbSplOmbLzNEUu3w7d6HCoZp+f6oDyBQv/HG7ZOxt/8laJpO09o7otZbDtBfQPnsb870/u55u991zVZS0EqKVTbYiZsJY6jwcr3LrLdyKObwUmpmdCkpiEtEzwicON0RXVUQYNnbEeB9oJnSYDGbuw0Z6fO59eLDw8bQG6WOfazboNKPnzGFCPsq5Lk0ABq0HOTSv5hjWQ3Qb9De5hbMbYCnKaaSdMyPCy9QjrZYc2ZnVG+Nq/xSpaH242Y4rbRz8JDdn0kE5hmKASQTJ3X5SEsxQntel1zWRhZMlDC4NYzSUDHHkEWSXUr17u31xdbrnreXrNy+f/L0f2M3NdD6ddvdX1y9e/fzdy3/5J3/vb/8VRDtbMSsoRZkP3Odc/G0vpZ6O30y/d3X9O79dnl/dveov/82f/42PPxVC5kilgjEMei0pwUtSwjhAL3zhRwQDG1yOC7cXY8F+0bVoM9IfC7+ELpSYb4GhljUUg49BaizwkRKTuIgeDWQBKeG8nGbU3XzonufzStCtDLvR8cjmNuwQ+uCj2FgVdEtxcAOQyq4s5EiPu1hpjc7rW1VmvFxc9JkaV2e6tqxTAarO6DEE95loLWr1uiuxNPYueI+wyc5LK/MVlVL03gerWwiCirys1jKzp6wUW9pq5RBWuzKJUaEQ2zmUmRtV5bGLu6iwFPoWYzVINwlEaw3VNbY4l/unZ1yK4+iCbZNaQDkiCUeUArDhK5m2VRVdnsB2zxdmZ4NV86l0pNaYGUXXPq3vzl9+9mV7s+5sd1OvG+LBmiUkuZm7jyo2XsWWtuyuoWxSDpBXEdx8GrdN4wUrt1qLu+fmfKBCDmBNGaTmUkiWUohiZuP1Fp++PQ2kskw+GNmkmNb6Mlz9JFGotR6PZ5Ln8zt6Ab1uaEmu6zoec6s+xPF4JJnnUyml0MawUa16GQPEqFCbgdoSOWk4j2Shqfja2sPyME0lXLXUcUEO+mxrLTfWsIZGlhxQpHjJSt8+pkuJr2aDZjRNNTP7srazivtuvkZq+fyuvbzfffzUb/eLtYVRELX60GiPxIWuZtWHzGK8THNnqeP93xFu48BJ+fDRBjNNeSHFKrsEmJmbtd5oDiCUgoaL2QDZBNCYkU7LHDlWGqq3xw+rX3Y849obNI+x6dk4lmboaWWbXJ3mFIWSKJABTrpQEgahSUDvyeKZvbfkjmn95vpKyYeHB7NCU1+blY0k7m7AiAra5rVxhG6TYFoxA9ChlTxnVk5NfZwBIOkKdQnFvfWcpgobvQjOcXr+vWff/KLfvXu7++3nt7/3az/74u6zf/iP/s7/9j+/P8Y/+7/8/b/+4YuMfuqnqTyJMi969zz2u6YpcHUufMh8ap+Uw5kP14ebf/7/+Jf7nn47nc6dmifGij7lIdEwtPmZBehbTMfF5U7KiHQzsLjK8EragJBHcz0k2C5iHxuNyaW2g+9ZLgBWg43rgnRTks2sBxyqxGWZjvGDOsm2ZnGrZcoJfTkji9sUGRxuptx8+ww+bPsBVBY4lIoU6W6F6o/bP1zoLxeGNC5+JMIlN5sWSiIrAHMpG6gR6AR0SfQCRV8bqTqXqVTGEoMw6Die+9XtB1YzHs5L6xkljZltS3ZLS51pFC3TmJbIYzs1rMV3CsJoxGbwuM0lY588GpgkNrk2favng57tgEas3ZbWdrFoGFFbw/VwA1gojai6lFQeI1AMAGIIiIu1Fn4Z+bcZYuwYa2ULN5WCs5ap1KnUdjq//ukX6ykm313fXLcWy3pmRTEOuAjACB7Tf0/ob+YbOgcMj63L96063X3jOMtGXjmlVDJFdzOWWqUyzeVyTqf7FK179d575ELydF5Htvh+v691mWdb13NrnW60jL6WarnWUrjf7RtKa+3q6gnJ1nOqGquH4vXx3chMwBKRmbV4RF9i/H6+e7sMCWgphWSdp20GLus8TUVsS++9G7krU5mqI3rPUFcb7qa+2+2muuuxjncpY1gUQMzM3NXpVxCt8WtrK4vT1GM1s/1hpiwiHpbV3ev+mtGXn73W/t30/Prq5pA12rI4i08Oyoxea8+E4M5R4yCL7Cl3dxNAJNktRkarbUythGNokYDghsWmtljuER/ABG3smEghNuM2o8HMcDH2+TY/lUACW7j9NuKDUBoFWpCB5PA18lJp3ttUbcSpX94ZDTRije6F50wpd4dZEovPLLvd4f7VORMwCjk6m4iAW0Rw6FovEXB81MyPG3Dz/JCZwaxqpJAyjemDxwdE5OTeaMpWsQA3x1Bb7PnN/q/sHc+nHxQruy//7Z9OP/M//T//m5988fVv1g9ffPeDY4v91aHeLS8nTYb6YPTpo4cXuj/bq/P60fn+ePrkxff+9J//2Rf/9t//Z7/zu6flwaOIKvRFIY1yOiI6VB6P37EFHfXd3YnhtOIXLlpuRZxFaAClLdsdoAZEMzSt7ztiYPw9jr+ZgEYyguB0bt/GY3ty+Wxk53OfppyLU76cu5sZFRI1CJqDPQIRtkWq06xAcilHjiPs8tLy8sAcz3c7jAXpUSeVAxdhOk2ZjUzRlabh1Gwk6GImsyssi1VZ8xw4Ex9Ox2e3T/ZX5fXb7L2LvOhagrbJP2NENUrKTserb96uaVXKHpn52ONs48U4gzaf/G9JQphD4yWjmcwxhFFMUAGhuknqvUc0Kaw4Nx+wy+MMGbFieISMc+KSQaipDEGmSNjjTUZmx26a13ZKxdX1FMf85kdfra9PEw6H+UroD8t9KWV3PTOYmWExbpjHyo4LJ+TyG1xO2hywUall9MKjj3hEVHrvXVFr3e+mx6JGmbvTjdSyLNM0DdZjjyUytGF02tVqZofDXoFo2ZbuVnJN86nQ5mnutgIIdRZWr3RrrVXTPM0RsfY27eYN4xbMrIAholrPmOaamcOqfr5hDFiGyNTxuGoDm1v3s9dSSqn7aizK9AgGqrvInsrM1lOpVM5zHe25xAiLroHH9/4YaIXHDcqAlRjhY3ZJretqYK119jkzl2Uppeyvrtf1vPzijaZj/c7NzW7XnR192ETl0ot5IkUoxpZFTqYpOYAJkkobknNZgpEqzG0INN8KSGbmXH0cgWND9LgmgY3B8bLF2QQH71cpGzd08OM3LAGyIUdX5rY7C4Z7Scjdv/jys9uJHNIWyzDImWY0pTGoVS2zT6Uo2u3N9WfSrk5TLceHpUUvtfS+jmlvVHbSjZtxgghlcFtwZTMYTbAh2SuQBdSSaHzUx8sjs8jFIjWzujLmVGFZ7o5zXnesH/7uCzS+w9vzUZ/94Z//3R/8Rn+7/NWPnrx4fvW2feO5P6751BKMw11O9sL69Mmr/Wut737x7vavXPPD2z/853/4r/5v/+hv/bWP9vu53/X9lEctvU2kmh0LdwAhI7OMQX1YgA343LfEjC6K7DngF26WeNDWGvKCCI+L7NH1esCFjz1GogCd1qkAjKjA8Fvu3FDxCwdRSSA08qzb2lBsqrVmZlvPtRTbiOuPMLSMNJRBEUFuD+VCZpq/Jz4D0BYPCxtnjN6XeFIxorMuTcTwhTR5DOWEk9z2wwYi0Dvq7KgFJ6l13U5taYfD1M4PpPe+juGGZCYTHWSRBzqGy3cmZMe7xabbWM7IPuwaSbqhZ9KIBJnG4OgX6Gaj7w7RIuVwKMFMNQp2QV8u72o3Jkfe8YYLXRYrKADQ04ziJlf0DZbRxl/OEd72XoeyA5t6Oeys4+4vX62vT8X35fqppS/ryZhXu1lSW5su8sMcssdLjX68hwevKTMzRVOtPrpdd/cRINBDTC+lFF9bmycrZXJ3YHBp3d1rnYE8LefM7L1BmZlu5fr2OravNId7zcDxmArLBO0qMRIe6dVhZdpNvYVPdTyBiCi73O/3aK0ra2v0QrIt51prtt77/bK0w3w43T+4z3ViXxfb4lujXO6IgT6JYMyZGa231kZpm0qtdabPGhEr0TLHYWaZzOybBwvpPjaOBuDhtOBbX7rw7oe3tlK9dZpKKWbWhoMUuRuBXEuw1t00q7f7v/xm/+y6Pt3XfTnH4rWUWimMBIwWURRO0+ClghN90I+TCA1/M6TLHufLLXeQo5EbQiVucQAJjGQjbcJ1hWFk8Y62HcO6wN0zMYwQMgd5DOOKzQ2nH//IpKQxE2WqX3711fX3P4ZZQ99rqqKlFWdJK4lqYnRlH+R8s3T3/X5/vH84L+kGKcxhhohGevWyrqu7BSijFEMNb4ahZh8Q5X43jUEkE+dlSY6sxIJMQzeK1kcQDVA1MCLOp9fHnaS+vs66s/sXzz/9V//lv3luxcpy+HjKh3zVX976TJYE385v768fbu8m5I5+PgHX1deftZ/+X//9co4fffXZd/4H3/no9np5+TD5RIYDS8kqqmR0QePTUWlrlFJYCoeaKvPSGrtZJBI0pFEuCzKUQ5FYHuczKcDYzCKBsdAb4xpJZW5nOAtEDMHq8AQbOyJVSYYGJlTcZ6l5SWSuK3a73W43CffZk14c9bJelxDagC4NM9gco6Abh2/CeEK+wXaD9kg+XlvbCQHQBPLi+J0iDBTphew9uJH95FvKFpk8nU6HmzoFeqzmLulQJjdJXHuLXOCFtC2BiM2NrlBSWYrbw2kxpBdF68VdtNbaqC8Xm7C40JgAiYwBStZSRoqOj3k3Qx0oiNCQ/udGhuIWd5xFm+vehQAuSLLCiA6NjaVH6wCqeyqG5pjEAL1H931/pVvs1q/Pr37x2qK6HUzM07l7zNNEljUFCT6G17gQ5PEIXI5TdtzMw4l7nuc6DXCmA8oWCVDmhaXM7g7k1WFXSgHUWssYhnG5rGdfloio8yTlp59++u7t2zGWrIsi5D4TqNPsNvuumnnUuL6+nuf57u7u6uY6IqZpysxipFvv+ez58z/5kz959uGT9Xx+9vzm3ev72czrlJlXV1f39/e311fH4zH1nQ8+ePGXP/7pf/I3f+3nP/vZejqW2gzZuhVPIiMWQs51LLQxUZEz4DQRa+9rb/frmawUanWnplJKcaMiwri7sKdMUly2ije3V73lgOYfwXFcCEU+AjvJniACZF0X1pJzkViS0Xog3PnB1fOHh4d393eHj24PN3ur83ldBJuqjY3xgLClEcBRmqd61JBprAs3Edm5NXc388tJIw7hSfZihktWjxnNrfe+/QXYKN/Y6BDbAsy9AiHRoMs/3CZIcFOK0g2wCqrHoPvaXLuS6IWSK4mA+uj0DR1qyoioPp/PR5pub67WdV3X7ipmPTNKsYiuiFI9oxEZIV38DeWXhJrU5IZME2qtESGb0stKtrpEDwddZsOuCkVSevdzTFbEcPd8e+8tVerVCfW7Tz7/6f2X/80f/d3/6K9qwdv1fGBecY9zWbzXZbr7EF89eXNz9+xKrbdynsFounny7O3+9embv/G3/mY+e1jf3D8tT44ggkTpleW8mEyOoVwws8LCrs7RT01msmyW6cW6kr4tjQevvEZYrTtkS3VlDjOfxLDwlphAykSZByn3tO59yB8TI2M4DXKhjXM/VdiHcYeZ59BfENEJm0J4OK37/f76+vrd28/piGgpuNeh3hw6WjePEdFoQNIy3WroaByXuw09hfrqJZlGjIDdASVd+qCcSAQky4GwBUKSzTUiEALZs9ugdiukWE/YT0+9tR3wWcu/8+Q60r1rbQ/uV60HU4ASxeirJaKbOZOOhTh3Z5oT2ZTI3FY6kLt3ddq0gdQKL3SQiWKT0AIjBS4Hv3yca4PBKfllQypmGtSz8ZLy7hehbi0F6qVaBDI4TdU8MgV5Ilyaas3WE0mWUmzasby1rz/7+uH1ctjdlKmcTg8IzXWHeQay9w7B4T3RQ+SY4HEZ4S7UDrMM1skdmOaBDEbGtgGu0xTZ5nkq5pKmUluLZDst7dxWCYVmLEYa55v9ZGb3x2Mt8+tX92L1Otdpsumw2+3cvUy+rucXLz5IrS+/+WpXLPLdl1/fObicvzmeHqbi9/f3QZPUWs9Ms/LySyjwU7KUIoUQ7p4ZtU6/iNzNB818+erPAZ7Wqe7P109u1t6LT4g+TdO7u7epQ2ZG5P39MVqfc0ktNllXUDbXOskp0K336JmReWrrMKKaplngPJe2nEpxCWvPFFuk1KZCVota+gCqIADREkwbOc/mg2RAqFlhoGQfKrPpUCPzuJxftn7Y75/y+vTl+e1Xb64+ujq82J3Zelvqbr+mYG4SoYJscVf6jpvjnrbobSEzDzMpSJ30DoCMTNJ6iqRisz3AZf2bTA7bv4sCTkQgyU56yyUpg4UMNoy+R1tZGIMK0WEkMyKjsK5sQZjPgFK0Inr2lrtMLYKFuC9TXxA5HE/LPN8+tJ8Qi8ZRYYUbUDrB0JrMzaay8biSSjnKII045PKlK9gsS2ej73c988FKWge7yogyUHaTjM5ezFtG9KmWk6xPX/+bn334935TH8f9X9z9s//iH/ztH/y1HQ/v8HYyhEoNO9Ve8vDm+Rc//PTPDnfl+vzJvdV3L77xd8eDfacRu5v19qPvlEX8nKj74/xg7UZx1af7qVvBoZW3kc20I7gmi6mOeXxQaMxoHj0XqV5w3uG0wsFYApUEt0JBcnxfrfVSjCT6UP/Pks7qRjGMKO4pLkKTDNxRq6lcOJFdimBJFleRNDKXBlzT+hGs10+eHo9HSaVUYOMDZ2ZhGfk42zbYRgsesikuJpwMY7KoeAzDdF2qjV+ImwbEqOkXqer2xxlpMeBzgC6kYCRb0iInX8pcjaUtZb+/gqK1iNYBd7MuDafj7d4QSqAJSTv33kOWkqfD4rJNGtilCY40Go30MprhHgnkZpQNkLANlB8718HYBB+RziEIBi+vcJApN6WiKPPc+a43Rt8sG1NxZUnftVjr9YQwR9uzfvXTt19/9vJ6vrq9fpItj8d7Mxyu986ytJGHOwx8LmjBRej77QWMu5dSbIdaq/qGUazrCuDq6grp7pzmayAiW2vt7niU5FHBLGKZyjRN4+HcysvTcjhczzefTPP1zc2Tedp39NPpYX+Dd2/eri2Wt0u05cuf/+h8vC/FH04ZEZbvu90BKWzkyyQAtwhtVr0n64N7rr6ag4iIIE8A4Ubixz/8yeGwo2O/34OqXqd5LmUqZZqnfZn3Hz//5MmTZ7/4+Rfn8/F0vi8CsvflNJUE1hR8+AqYImJt6i3W3gYKfHN7lRm9LaXK3fcZS/eIiBTcapkr0Hvv65l1vmggQjk6pLDhAOsYeXwZCIZ5vTncnJd7NZ3zXHecyvz2m7u7u/tPfvBR7Kkevp7rbu7IhAylxAzLbe7deGUbTaH1PoihTgJpsLGeGSMEsFmwDQYUyUjfnAYf50emAy200T5lhF+sPTmTKTZmp5EwuSItGYPXZiwsMJhXM5WRWLvZq7jTSX/UX7u7efRY7+8ebl8cIk9eMBYiRmUI4Gb2oAECDZbOOCXHy8iTGhxSTtUrbU0tBa9iydYpN0BKmUjBpVQB6MjkCcuult3i64+Xn/zi36nUH/3hD3/v09++fb5/G+8mZ5z7tNs1Ng9rJT9/urzbtSdvZ8rO8+nlzTcf3l+1qFp7k6utb69f7RhT+/j26A9zz+ltDV9L9wguzyat58rw89StDJys0AYxrZRiVspkbYCG+pYqmuJjLqJtpp3cnHSV1ZRyaagtMnsnUUgErBHdWMQpYlB6WTfvdacpOJRQFIzoyhxUuXHGtyYp9173u6t1XVsLM6vmEoTIjbq38YgFhSmZe5SWSYbMtpxsWDeUTLOLp6A0vG64Ef7GC/ULqgRQntKlB+nKsYglYDUSfc0spbDauT08eTYJ56HckYbuNTW4ByEBhawsDQmzY+8JlU1ilRwJpNhssEimw4ip+FydVO99ATIyRlB7kht/f8NeOZjYwzpnK2BEDpAsfTPAHm6uJLmrT6SHXQkY1zE5ij0tIsocc+zS8uam94f257//ed7PH33wyen+dHo4z3O93h8CEYyMVG4kxRxQkoaTwcZTykxzlOKllEKQCDb1oZiVu0/TVKqZo17HelrPZ1/PF0cE1FpsmgmgZYBeykzujPNuOnzyG5/QosdDWx8++NS//PKnn3/+RWbGn5+XtYvWWwYQGtdS7KvMWOdiVjCWsWStNf29kmBccL7px8KskBwirLHXzczZ6vl8BnS129+fjiTbeem9O9iVu/0kxTzP7ry9un14+vQU7ebZzdPyIoN9yb5GX853d3fKNZHsIbTidthbrz3autsfjsfj67d3mbnf7/e1QFm8NK3TtCN5Pp/XdS119nk2K8Pxv7U2aGOTlwCG0yO6OvtgLanR1EspT66erf3UIpdGBJ5fXyPy83/7xdV3bj/4zmH/bHf3cIJsN+3XdXWvQxuxXUwD6hwGRcOzYqy+x1I1AYS2UNuLq+iItocYthXw7XAdxT9NRmyBCoKgLTkvQ6mkK9UuMZlhVkeaRyAycV76qa1XMyjQLwqA1DA5BNilnpgnnNaH5x98vN9frafmMq8aMpqxIFbSWEgZUiYkE7k9040hQm9jKZJ1nlA8QVben1vvaXBuZkFJH/4fvZYSSqq6ApGkHeymvTvdvXn9e7/+N26fXKu1ibZmzNezrT3I7jnHmVwzuyVoWvex2DLrBdMnW5CeWL949vkz7D5583Tx6jGbmiumU1lKfvH0pcfrJ+fbuU1QKcO49TI7KzOLFy8G06CRjfS0x4UYHleUG4Ny0CKzWsnsotwsIY0kVZ8AI9KQZpnatDNJFTNqOzyMRQjaxsgEMbKWsNECTYnj8Xxzc2VmvR8ze/ECirAO8GKUc/lKUIOJOJK5LLZdKzcTIoGyYbGLzedvw4WHw+AwB1QYmTbSBjQUVdIWrWlQpvVm2plgp6Vd314tZ0jR+yrNG8VzbNuR1kfvjtFK9zYSBxgKG3vay5eBNKhYMa+V+wICKwRhTQ1nAHxLSbD5yJiPberWfvDCdAGApJEmCuZyB4nJcqp2dTCEn9Hodcm1ZC4zsfKjZ1PJ81/+2esf//D+2dWL6Wm+e3m320372+uBvw148byuxt1Q4siGzR8IF/po1cE0w1xKKRvl3CSa3DiVycxqraJaa+/uGlLVrFYvZRpSe7OyGItPhWWadtdPbqfJerRs6+n407dv3z48PLQWf/Av/5V7fRTTuru73VxfSTF5cacQXTPcMnO/G9JTbqtIGslpmtw9oo0FQ0QUotYqoe6veu9mgyQnNM1X10MEcBPhVkf1j74uy5IZx9P98XiOaG/f3v3iF78wK3XnVzeH/fVht9tf3T7d2+2Hv/bx+dTv7u6ODw/tvGSsa19NbSqGXJ9d71oI8rXHm7cPpF9dXZX9Ps5nQ1zPUxRfU11Bx86Yssnr0tXWaBnI9GIm2lhH0gajfKxVju1cpvmw209Tf3h4OL5t+/n6ux9+/Ob1V5/f3x8+vT68mHrv6lFoNkX0wcLlY9tgYG4klkG8Gzcqh6OE0S+X5rY5lywzSiWAFCWFqBi6X5ZNRGnJsTTdBMkrkqZiZn3M6ZlQFkNL24S9XNZ2Xtt1nQAMkb0NpsQItR100mwSvVSSEa1MRe2CC2FzKQQ5VlwgTDaioJIcndHwMqrcXmRxuODJSs+lbWQEcXBIxzYZ4CpGYC9elavMWE11xv6wu/n4+0Sezm+rVyT3cKUeuOxVm1VkeFptOcmTcxRN5yfgPoebFSDTWh+gyWCtVHCd+pqa39wcf/bBX379/PNytt/5+qNy/6H6k+I2SYMm7GADeuQpErXOZsgtm2NEV4/WOC+dvI3jcZzMioGsoVEGWikFYGZ4NVbKlAFrVh4jHQqZYjjcVAJOwbQC8zAhIkXPzO7FqRDr6bRMU7m62p/P69p7dXIk+hK8BBJxiBu+FYXGy2IVGOqfzT+LNKIOQANIsEsJbWoRMIkkUqDUB9Rh0qB1jY2lww2MUAebytXt9drasp5arO67oKn3yD46GB82ZsNSVuprwEpaWm43Cc3GMzUD4AbN1N6sOhJJMIU1R7I3MWSt3zoQksPVF3mx3Blkfje8z9mjSLgXIOWBGk1nJGUj46DSmXl6+qTm3cOf/fGbrz9vHzx5nnZ+95DX1zuaxC5m9ojY1AMZTRpBXXYROYDwwVV3L2YQYsBoNBjLNE0jJ9rM1nVZlqXMXjWTaQQL6JBPsF3adHv7Yn+o7og8t/XuzTfv7u7ulmVZ7zMizOju+93NNE273a73Pu23e9KtKpPwt+/eHQ6HnivhmSn0YTsTaBExS5m5LuRAXnCxuShIVSUxCDzjoqD5tLkSrr1b9VRYNTMrPs/7HYCneDa4OhGxnM7sdl5Or45v86uXYtbdfDhcf/ji47qfd4f55vZFW/L8cFxOx2gLIPIEqTKMbbf3m+vrtcfLty953O2neTfthidfrWNT29CtZaDQiLlMa2TvGVJEupchv0qNtReZKdKl1lot/OiD58u5392fv3l7f3V1fajlmx+/zoebj379+TI/nM4rY3b7FR9NAmlk9tjmxtHbPo7x8cjBt8d/QY7s94HSkIHE1i/GqLCAD4MDCMoMlOIAqlnfMEuMDiwDw3HefQpycLOUiugjXGQQYMY5AmSPUykfDpqm0Mlaas2MbeoYT3V48A2RuACZabSBGlkIABeXpWZwpmqPklioVRaWxY3Bi7VyWDhRk0W+JNVKNzeRbT0aImKWRZkZakxm1qpaMWVp+3V/dr+rs2suYR3lcNKTdy9qn7wlaKnyMDURBTNgjiXSpnX31ZO3P/z05+f5y9VxOqw//+Dtm6v1ozf3RZ4aSgkQMG2UELZspUxu1lobLfswtxhFUxKZSdpgsVODZTIkxSQViYyLrABEaltjVpoG6Q5YoQHOwjRO3UyMKJFhpuEEC0tkFLNorSPrPO/3+9Pp1KOVatzEn9v1JwwXePMczDkICFIDAjYS67Yz2U7i0Ti8p3hqU73mdn0QHATC4Wr1KJFIL0XCkuLpBHJ/uK6w+zHHJJHqeZF3AnAijCOgJnr0TJRdbKQlfuvO2SaJAZ6YD2POtM0MmBezwW3shREj4ejSY4wXPdqHbXTwC7OJZkanzHxGtehOTGUKqrVFSNr00ZPdj/7kmx/+27fPrz96/qEfjw/I/dVcS+0wy4yIlOh0ASPP3gZzLgcaIzM3x1yGS/BoHCmpWKm1bhvjlDJab5FZSileJ8sUrewjjT5fP729ur69eXJ7evfN/cM3X71+ef/wti0tOjPMUaadl2kyw/X1IS6BwPv9FZcUYoC4JGEx3xhL34cyVxhyeRMRUjQgED02Leu3zW2GPiCwQJaZpZS1dR9cqJIRUUrZ76/WZS0+Oc1U0gugtY3wXrjbPM+73c7s7HbzcL+sg6ixno/nh7/85s/rrk7TdLi+mvf7w+H66c3TtvZ1XU/3+7WdPTvzOFdGPxv16fNDbzqe79/ey8putzsUM882yXoFY2iEkubeufqQAbdkaoSqa+xcMiLo89LOUynRy1lrne3JvD+e4u4u5mn59INn714f/+KrH33/r3769IMn9+3ovehxrgUuCQW4TI80s1Biow0i471AQVJmkKRbat3akPFctseDWyp9MwFRDoForEGU3ltA6QQIsRisd5kz0TLNAkDbTDfTttwnpobZyWAhovfVWJb1dHVdD4fD0parOkfvoNx9NG1g2tg4hUaHOu7EQt/yxMlQdmqujtnCEGJmV9bLlmmYBA2x/7Do8cLwwkWnne2KdkUzENM038dK1QJ68WSuGZW5YoatRnzw8Ow8P13LYrk+fbeX3ZwK6IEE0dap78/T7fG6s1vmpMPbw/HnH/749e2X3qaMfcHbu7m9m1WiF9iJcssCmJLGKtLovZ9GCq772LiO7mXbICeHrbdtjDuaO3uGCcPopyuCJjcjoUUWRJFmwqBwNiBAOgrpWzYECRVn0xBbwgAro3YNKxX3iM51nXa73W53PudFlX1hxRJMA2CyRqqUodUi6aIiiqk9BnGNFloJdjDYJ24QYUpb3xFQxYhzH8zfoW3NkExzi+YS3e/enatd7eaZnS36oKxFbm+LaJa2MDhySoAl+kli9Qtvx8YOWRhWCQCgYoFIujsptqYaFsngex70400msgxMZOQvadA5Nfr7ceMQcDcHhvtk5Wm24pw9C2MlsN/VdWm//99+dvcqv/vJp/cP7473mqbpZl8UBlhktIyUsmMQNTtZUMBMCVCp5u7V6E6zMTAl6cVGKrG7e29rX9cxyZtZsVKnQwg9e6n73f76cPuszIXWX7/57PMv//3dqzfrGhnFWAv381TmndeJKLW1ME7nU0zTTMrYo78rB4/1jFQpta+tn9rOPJaTyi4j3D3WBFHLLKmSql26uGmOC2Uw80bcmDoQbhbsRk9Bqx32+9aWd29e1+q9HQGIcNqW8GV7ZwGKWnfaMRZT78ndYf/s+ZPe13Y+reu6nHF8d3p4eCiT1+plnm5unjx79uyD5z+otX7z1Vd3b9/cr8fqVX1BQnZ68vRwI3s49uV4tySrT4fdPtHrXAtRbFlbwOlGzHXtpbWIUF7uDUqZijhOvot0sRWRbS7Fn1zjat7d3Z9fvXn94tnN9VK/+dO7/cft6ntM5ShauoCXl9S2i83TmGMD0mXZCgwfbgBCCjREx6Vl2r63NeLkBn0PUWV1Et4MOYK5EzTPTCbcXT1KNa7IizVK630cHoUw80F2DggXVXdfovdursglE5Pt19bSuiulTdA4jsZHlsV7jJQaxrYSZlhQIakoi4kjlF7ssDIAqbH8G8xvURqB1HWaAKA1svRaXvt577tY0cscikpmZJvp3deyHkI/eDO/fHLT/QRfFnNw9bx1yi0X02m/BEhN3XgTh5dPvvrhRz9+t7tnz27n63V3ff4grXx9fXp520pBBYo4VBSgQ1LESvqAUmutZrau51SnmaNI/bLUzq2+J9OimJssW4h0rzIms8oQM5TDpjGzIYNG30oZsRHeFfCEVValZB4RDpkztbqRqKE+1gOttVKmaff/o+vPYmzbsvQ8bDRzzrXWbqKP090u782bN5vKm9UXixRZlCiSJYpF0jIlGqIkk4AkWH7xgy3IgGDYAAXZgB5sGLAFG9KTDVoWTTVVJEXSrGRTLavPyqzs8/bnni762M1aa87R+GGuHXmTkvdbxImzY8fea8055hj///3NMAwBIkzGqQorr8swkQsimmkxRQ5OaFaMgLyruHlA3tUiXLUwO2pVPYfueGOeg5MDA5JC9SsjAkIQVwPElMJqc9122LDDaCWrIRBHAQMFs+KCBpZNyQiMwLyoZABHRis+TTPuAAlVyGEN1MAmICJjUvCMLkRoU2dsUvgD2E7QhDBxkAGqpN8BEKyeV4AAQs0prEZfpdhwSKPKLQQnTZfn+JXfejIPx4cH4fryhkI8WHYxgZQeY/Z+Jq6makBubHU38sq0Qkdj5rZta6oqoKppjHWMWvESbObDMKgrE6QQiWhvvne7Ht2Zifcfvj5fNMOwKbp69uTF9majg5QxU5gzaTdr2i44Vs57HMVCNnTkCEQGumbEMfchkK6J0YlI+x7cZ20opXBi91yPQRCMqJqNUEQcgYgDcYWh6mQzrQV7Y14wBAHA0AAxkgdysaJubTtTg4rkdaBoomZINuRbqyIcgpRSwrljSPNYSsmlZFXFGJeL+YGVvMw595th2A606fuLzdXjZ/OD9w5P7gu0n/3hL52fXdxcXg63F2Bu0Nq2MIyHbeC92aofr9bb1VD24zwCAAoDzpsU26YvMpZsOA31kUCLVWcQEREEqCirjETQhhGcyQJR3l9SyXuXl2Nq+eh+c311I1fd/tud3xnjaqsPp+SPejwkojondYAIXIICmLmhAwdErq8DAYO5IxpV57u7uUANQfYaAYCwc56YmcZghmzITkXMHCCiMJAquyMDupNPcAtkdK0vzKewzF0bJ48SCEaVlEKTWu+FORpsCdkEgJAZkdxV3IEx1PhPnDBCiIjm6K6RE5pykaQwc2KgQlTcakrtlFhYW7tIAEqoAJwthnZWYLNgSuYqsiAWGVNMiGO2ojGZh5QTY++5HZMobhGGtsxLTGdHV22+2b9ZAgRXLQQWRK0POLp1OcmTw+Fq1pPldmxfvnz08vWbV7NxM7t5sf/hplsFRyasHTkHRAWorzcBAYAC5ixN0zShaTiYSZECAEiVCgl1L2ZA8eAEClCB1OQECpHCZPTCqtMzRsMQ0MGRDE1MEEsVlxGQAQmMSEgukWvPpAqkAFHAHIAMAqgxaAocUjuMhYgJSHddCDQ0cKUMQOQUKU6/mhHAig8Elc8CRLUJGdzYAjiA1WkMGeBOUujoWHUBOmFoAAAgYiwsBjozfF400vFeSmMf3HsbI6UCwBNrAE0AYwiK6hkHLhlko3QAPOI2prbkTIgAXAiIAgMEc7HSBCAs9VonCoyOnsnYiGttwlU3CagIUuncfufNclADZk9NlrHh6jyjSQnCZh6zZhuNkBbd3h/89uN3v715dP/VnLfX19fLxaxtWybMeWAK42iCBsZurmKMQAgm2sZUPHMITWgpkmomcOYasc1ECAbEUVVFi6qFkJYBxKNjAkyrzGlxsnd00s320C4eP373+vp67IdhGAJS03TtYtYsGsJgCG6OBRDdqDAh8ahlNAU3cXc1TLEBAPNkIACqoBx4VKHQ1O4EAIhYnaOKSEAKSJ5qRIgSYv1smdHdOLhDqSUdIhK4SSYHS15HB+oGCBzcLCOTjRGJ1Qs3XDPDXKGMJH5LROoQYwRHBogpGPaSiRC7ruu6zhVyzsMwjOPYX/SXV++nFNbXTw6ODt966/Xr2/2z8yvZiJa1ufkgreEiNsvTLufh7OZ6fWtHh/cscM43h43ud7whXkLocZPFBgCjmFXQFFwRwkTtU0VAcx6zaoDUgBabtYu92fzF5cXl2fb49KTvt09+48n9tx+Fwziuh2DIgZUAtYhxE3iQ0cxD25VSIpJq4dh6VQCTmYPrdO+HydSG0yEAkSApOLiAoYEGJjDUyhAKAKMRsrGJFUMlINXd8DZ4cM5gQA2M5kG2HgKRMsYaGuQQsZ7axVyM0SSIyHJ5eDV+1LAkaRyNIgKYO7kSYoOAyhgMDXSnlq5QPgtopj1h6+IQmdB7Ekuted9LXjKyuZsDkyMhBTNrlEfyxGxxCOzUN30C8kE9ILtgqZpJH0ckFBJwxiCt+1COl3pueO3Em1nBElony0DW4nJz1n388sVSbCZgQ8qr9qYRXmxeee1yb399REhHN3uN0Olyc96tApgBEdcAaYAqJCIiBiwigdmIVQsihRigGJIC0aQDrd3knVG1iqunCtgckavR0wHuWDaIpO4AtT9QnWlWl1p3JAKmWHE0tdyr4xpDIFAKwcyKZkQUB3IKiRO4mRWtFIjgE2eODRicCHlK2qtPhESTdKcKbO7gi04eAACxbhJTd4SI6ggWdtoAnuSv6A5TRCXDZrMJ8YgZi42SFQHQaxYBuKGaOQXT6ZQamStosJ45B1d0JwMEZ3MMqPUwYwCMNW8NxbUaZznmUhQBpmhaRGSd3nogRJ36NEg+hZ26j4wKwEUwcNMkyvm2aVO0MeCsa8t2rb/8y++truWVl+/dXF1rsYP9/a6NqmNRc4QiVsQZMOdsgHWvJWYgFyttm2KMpgVUGqYYU8X6lEwxgIMU2YJxm1pHVxuNFzmPbRdjmH/m0198evZE8sV7L75y/SKP44joMca9vYMqX6laFEFDdEQnMgBjQmBgK25GEIhngKgg6lqsNFHMTBVCSGaGzloEESmRqir5NG4jqu8PlVx7ZQSOpmhW21gTFxegfs7gVo93eRibpglEFcGWUsKKUggZgRGSlWmOjcHAS+JU8QOSCwCogQd1h65p1at2npG4aWNqGwAY133f91782eOLm4v1s49fHB8fnxwdDbOO6Pjs6fkw9uJjEgkgs657+6XZ2cqeXN2Mlvf359didrs5ni1Dm1Ozt90KjNkRYSQrItl0wslNKIhxrEUPqEJtNznnB/eOb7f95flF287uv/ro4huXi/vN4s1Fr04FyMw51RlLlxoFkJIjEbgpGEOBT4i5COpIDB21YmOmoam7VugQMKDXBl61R6FNzGAHhYlLg2g+4YURdtIBMICiourBrRrR3d3MzWAiaCC615J3ipcppUCKTFxBObWlXqeoWrVsgRixAvNqeef1sF4JyRyYYo28Fs3oaIROWHOBYUqZRgR0qoFqVWugisUMOaIjsEMVhiqSEwRAAhdkRzWQTErAyRsjdNB2FACBGFTLqh3IdFF4BqiGHx7d3LTnr1yml1/cX/RHOToCdI5p29y/Ps7ogbGWcxW2VTcsZAxuEpjN3W0kZqsRYyFBVBExrSJCBjVAQqRIPhXNVQpeh9FQUWpY09usrjlAjkAWFCwSmym4EZFaIaoRmr6bGk4rOwKYAjOCU90hzKwUB4CUQlYhUDdAdMZq3HCYkL+TPA4AqycJgRGQDHc6px3NapqiWADcLZeIWClv00hwF+eEjq5mFZrBMa6HsZu3ITAQDMOwI1OjmRmCOKC7er3QjRGGIkShWseCVpg8e5UeEDIYAxZHMVcDN9A6HKhpSnHK7AEAQEKHKj4LpO6IphOMiwgJCLBISXGmam2bxNejhrZpJOs8eUybqzP6nV993qbZg9Oji7PrpmlO9g4cpMgQIknWoq6FirJprsIHASemysILVC3MJTBUW7m7EyVHbBoehi2Td20rmotskRrmWa96+uB15gbAvvXub65XF9shbzdjE7r5fF57gByCu485A+QuhprkgOiI6ipujoRCQYObWYWSIHpATBQzDMQJMSBEcmBGoxHQSNAMEzFNDQWqMafEy2mi5NVED0TgBI3NgCp80KZEVHdEb9o9d3e1wAjEKAbqgUjQiQxR3YwwOBiaq3olJhJgBaLxZA+BcXujSMSRYkRwVTdwA5otcL7YL0ppM9v26+ur7fX5+vnybHY0f+XVN770E1948vj51flVHtaOedObZ02tvfnGfLOaPXm2LmB7e7ONO6w8NeN83hLiNo+zxjJB4GWRfpTCHFUVkQJCKUJEgWK/HSLDbD4X7QP70eFBv803z9cPTw4ur26ffvXi3mdPCmWBwATIoG4mXsmRphpjIEZT22nDdw6mqVCa1vvJseRQMahYW4a7eD1Qr/evoblNncc6L5hcZzVq0wgJKATRAk4NsqpC3LlTcIL+E2BRZ2bN6o7L2fwmhApqMnWfIhBtV0qCgYCxc43CQndDpwCEDpnMwZmQmSOnrEQGomaOjjVyiMyAQBFrPEVhiuBigIbaMhiZgSaJgipEBMhGdalURnQmVyQHLsmgK21ybFQjJHdXchLL0dpCrczQ8GYh5wdni4EfXd1bjHtsFFCbHAiyK967PiTrAlOEaToKtXu4azoDM4NLNb4AgIpjwBAJkZW8clyII0xq7rrwUW1Wu+2QzZX6aeaEMOWtGDoBFkaaJA3AAE4YgLymrPnEoawfVU1LiG402e0RHUzMNFvbUU35KaU4KAADWoXbwVSBO02z0EkNhkQAE8YCEQndEdkC4JTvjV5XSkSafoTqsaa6Zm1Ss9R2QEgswMvlEsGMcSwZECe2vTsDkitCQGD1TFRDZNQhGBlO1hA3d6vT1Yng6E3bRlLCCtWb7pY69qj1Rj0HACKB2m5gVjUHVXRWBTIpNAgBsWRdtU1AJHJadnRy2H37qxdf/72zh6cvq/Rnz18cH91PDYqOAGBmKigCZVBAAjUHKKIcG45QSkkptCmZSwoE7tV9CtOkzVWk+O1sNgdDVWfqIDjibL48Ou4W3R5fXn387MnjYSM6YgixbZrKmXF3kexeOGBKwMyom528EgnYKQAAqqtgoICoHFRVAbkYAATXJQZiRikjgIkSEZmSB0bAEKNKVtVIjGhuJj5OBRczIbtXfqEBrAnIHdV3bVgORDQOpWkaTkEkT/+LauJgrP6IGADBtY7ImV2ljpKructc1BQR27YtqgKmOStkAASmyDwquAmFpt2fdQezod/0m1XRcvP88vzZ09N79/aPjmd7DR+crm9vrjc3I2nXR9oOi/30xc8cn531L242q+iH8xPtRYdxPvcYcDsyU9zCJlGkSMOQmQMiapEYG3fMol0KInmz2RCRI3cpRuKI/Yvb6+X+4WzQ57/z9PQLD+Jx2G4HJw6EaJCLhBDRvZSiLiEk2D281kQ+dTcBAMzNv089g2n6WqNzABFr995r9WSGuyqmylEmEzlxJS8AwTCaqhOp38nrpxEcuQEyVL0sIrpo2yYiqioad/Xduqc1WwIBHEZyBg9u1cwMYEoIBGwT1zhESk24yYUCl9ErtNpcphMh7PwmgGbG7pySBnE0RFBwJAOHoAAESlARQqhaEw+TRY5qoCQRJkZvGywIsqIB+XwIlJfinImc+sPVvOvv50jRhR0k9OYkgK3E05tZAMKaRgaTWAR2EqVoJsRUQTmIiMAiBiAppRgpjyKTbb0eiBS+DwitY01AIKVK2Z+MUrQLmiBSAFMTolR7W8xkZl7B4jQJEyc/AlrgpKpEMGlLgEOg6jepXhgAEJEpdR4h1JEA3eHRsSa6TRlyrnfqSQBACrSbRMLdIo4G4BEJKtCzFq6IijUjRJnqYdFH9fneXinqgGMZnF3r0aZuFFjPfZPv0d23xYDQXWs0NtVUWfNp1wIXghkaV8ULKNZxQM2Dl9qBmdy0FW8QAEywzgbhrhCdFKukPsSEMc5MCcy4KctZ97u/+vjx+8MrD1/dbq9zby8/eolCGcYsNjapNcFhWwBIBRGVAAQdAxsoOHSzGJkIrOEYwWOTBKjUcDsRkJIiQ+xUCnMkTGapnS3nyxmSr24fv/fBzWq1QXRCWu7NQupCbDbbWy+WIseE5I5o7uolE1XXGLqBgRGFui6kFB1MRFR3ZjFkIHJXMwUg5ojoqgXAiEA1AwAShcRkSBRcDRlaLjjpv2uBzvVSdYvEDAy0iygSK+a6aBKAuBsHAEQxFSdjSFaFVoBYMT1s4MCYdiEY06VZVz1wGUcgRA4Vqg3gJlmKQ2iRAGHohxEAutTMm+Ohl2G9Cd6cf3y9uV53y3a+v3dwenRw7+jFh8+ILBBd3AjixcPTg1deOnr/g/Pn5xd7yyakcH0r3SzOF95vhtn88DYPJBJn3VCk3jullJQSgKy3w6xthgxE1jTRXABlftDZJt5eXXdtOt07vfjm+eLVxfzl2XozCgFj7W86EeVclsu9Ycw7lW597AKebGdR3B3HfVrlzQnRrHZg6vcQHKlGqE05HkqT2kMAzRGq48m8iI1FsG3qkMxc7s5ZiETEVkQ0IwYFNxcR4a61nKmiaae2TKjrsqF3yICmaL7zzZLpFDRFikiIiqhYTSGIaMhI9Qjg5Oj1bncGVtXI1MzaIRX1KtJGR8BqdK+/vMYdASqqMrU5Cg8bzgsMI5YBnAuCIzCo4yb0vHWWtkQ1uj25nZ2ujsg6B3Nj0+BghonAGBxdQjUEI0W4WwXrrQIIhmZSW+R1vWNmMZViIVCMDZHuqsZa7JPj5B6ufysiVjEpVpQzTBsJenUjkpnUHzcXYgYU06mRsgN7MWLlzAugBiLHWijh1LY3VHHmqX7XXKZu+M4FD2A+ddgRAAhjXXInGItX6zACVfdKRVEjOync9QSrd7Y2oGrrFet2xIruuunL/ZfnVrL4FO6zayzh5Gd2uxPDqPq2OHRRXdy4riSKdZ5dH5UZbYGAwyRPVi1VoElEtb2DsJPN7AQ0U+3zzzS1HJEMgKVQG33Zwl5c/NMvf7u/2Xvp4fLFs8fz9vDh/fmQr02IMLZtm4uNWVSJqL75Zi4YoqqmmFJgQEtMgQK4dymOaiEEMBzHMYWwPFiWcVCMbqyQYur2DvabWVqvVx89+YCK9rkwxyY2s9kMwMe8KbZpu1ZLBnQCdRMEiMxAYJ5q39aw9l8MHdAluxERccNUY9lMXcHEo7qjChAFVwyhrd7FJFxUpC/FvJiNpS9FkQnHPQ4VMFG3Sg+BkAElqRYRMRdmxFDzyKBrJv8bIqRIATEyujsgI4FIdhB15RgYCKhycg0RDZyYiMP06YgBoU2YCEC3QLU6HGXcOMdFiqWoDL3DLGFKx0FV6ZaHvlcdr6+ebw5zt5i/9vIrV6v17eqqCxQwPnmxXbb59QezwxP66KPV+SocnOyvZYM3ut8usWgMmGIa+twmkgLjWJgx51xlB7k4EcXE/ZAtURNps9WutZTmN5uh6Ppo/+j2ye2w2px++tGgYz/0s1k7jrmJTdu26/UmxlgDWHAXSVCbH0hOlS+C4Lbzy0z1h9d/g2l0Z/VKR5IAXHyCJ1utVsy1mv3cwByRxdQDoJOZVaXM3S3grqVMQz8Vj21CdEQEU6pyxglFRQhM7oYAgcEguJNVPQ4YoThFx+mEh0ZoIdJghQFKKUSTob0KaetVSsBNjCmwEyqCAQYIpiWgFgZFY/AwRTlXhY2aIigJF3RJFiWauqFhCcqqil5C32BJQheH2xcHjx9dHx6vDx2cBA3ZpBAysyCyoLt7qJUs7CDpAFBd5I4UAuaMjl7TFM0sRmZlcVPFGLFpGlV18d1aDHX+UD80msKFdWe5nxa32psiaMkhJakAA5qaqgzENDXKETEAOAMYGprXhZDQOYSKQkXAwFNEfaCQYiiIVmpfiOv1UjkGVqfAGIwEa0B2hUe7IzK4Ice6eoITeYBaHOBIHKe5yrQNeqjLPZAQOTkzDln39hfg6qiWDbVmMKKCq7khmTqoAYGR5VF6QMLgLmYQzIzR0RU9oJMbmzEHRGeuMvnqy6i0ZKJdYlNdFZyAnWFS/uz+oXpKmMDc0JgSOCLmFJmdfvnLX1/g6b2T2ZOPntw/vT/rGDyn0NSq82a1VZnYyH2u6UJsRQmxa1KIFBBi7cC4ptQOagCw3W7cZLlcIuI4KFGTxUKc7x0cxybdbC4fv/us7/uxl4ab2WzWtq27l3GL6CkABiDLQBIxIKI6MdIEt4Sq0fcKDq0ECWRiwar3tyIe2MA8IFFkmyNDYDGTUUreljyCChDP2nYZY1jsd/v7+7NFt1gsZrPZ8UHTtDE1nBIxc/3kDE16NTMZrRSV4uOo282Qs1xty2a13q5uh80298N2u21CnM0W59unxMCMMcWUpjRwE61EVaitAKhPbUgeoDJDHXfqcasmUmV2DkBlyDFSMeUwFNlaQWZeHjVzna9vR1C/fH7ZrdZlvd47Oj49vVe2eRw2BcrVMPajLObw+c++9PTF6oOPn86Xi/2D+XroZ6lLng2wSSEXpYaYuZRiJu4UQioiAQOIqxXiFEJYtKHPBYMv58361m/0Zm+/21zJ8289e/iphxwpS4mRDQwNOAQA3x2C64McHQHMFQEcq8UOJsWxe7XAOaCbIKPpdPSvhTdCtQ/Cbn135rDrlwAVI8BBtLhFuAsGqToCNQB10GIAtSyApksUmKp83QNWHQ9N0BE3IgSUenBgoynpkCEwmVswyGCWmrC3v3h2ljmhKo6lVAdm7TCTu7sieCCIzIA6ymgRqnwNHTIhq0V0I1RCBQxOwa3OXxVtCLnNvhibq8YQlDEoQhQvES1IIHYM62bcdEN6QUgiEMDj9d61ypWkEDAvNvtR96JiaEKEGi4DVQZbg360pj+0bRtCKKUgQXBQ0MBN4oomNHdsmsaiaZ6IK7v2BZoZ8ARvhvpRAijUzc0qIn9qkpi7B3DeDaPHXUdkwk46KANwiO4umusBIrgXVSQMVN3hUp+tSZ1AFhF0App8mlVGXhWQSDwd+hwqLQvqCI3IvTLlCTyi15NYndwwVSdbnezVSJAQay5fSgEAFrMZgpoJiqNVHb8DYA2kEJiO3hx4LAIYHMEJVQx3OXHBIdSrmNAC4W4qZQZiJgbmWByq0RUBgXiqIaeqpnpXd4SE6fp3iIxOSLacB9nar//mew/2HraEL84/Pj097mbBtIC5iFPg7XaLxmBSckYyMwOMKta28yaRuzJ4YIxMlZvqCIMBSJ41PGsacB9HMQwO8ejkZL6cr7Y37z5+v+/Hsh0I8WA+b+dRnfu+Z4pd04AXs5IC61gCIlW43kTrrCZdcbMpTcJdzEMd7UCspwqM2kbOgjnbWHIpvQoTtzHs7e0tDk+WL79yenrv8PgknZwcdW1sojeNMwqhAqqIgKmqQtXxGgCxmXsgBkaEGsA0fR6IGMANS4bVWorwzWq8uFidX17dXLx1fnZ9fn51fXVb8mA6tDOftWyMBIjMYOZiMA0QGUHqgRGZpuoB3AlF100zy1k4xlGEIvdSQkyNQsm2cQWy2UHSMsYQypivrvJ6dbU82t8/eTBbnGyvZmN/vU0lZnz+/Pn9k/0Hp5/6g289fvLRcHC6zLDuFFMT3D0AmQIEI2oAoBhKbYCAqkJMYegzARe85ThzTeR4sIfbHm+vx9l+E4fw4Xc+eOlzL3vwLCMFIGZQIzQH2sWoQo1cRYRKPp0iFWDyqKKDeGXIuyGjI3k1k7CSgU5leO0tUO3UmFd4OAEQYKRQ543J74JhJmJd5ZCjeSmFOGJg2ZZSclgGFAaMQFiF33Wq51jxIzV2BOqpCmDKjSZsALQUTW17enT89cfvz9pUdBBT4ErCRGRQqaZObJrkomMePLShCYaGak0MrmCMRuCE7MBTwB2LOrsOSfumxAKhhMLFWFlCC1HY+wZHRAV0CIF5PqpCFgphmF8vr7/z6je5vwW+z/js9cevJGmAugDYQbX5T8q5XUm866SLAlJbu0WMhKrgGCsnBx3VA3psMJcBERmroAUoTHNRnCAOAO4Bkt1t6Wo0NbABsfa/XEHYdnLjySdVZTBcB33TqUAVAGKVskmjegNcyugyxlm7iAwcTLcDE2/yah4b4naDPYKyL0URkMQUFEQNEQXVQBvZRUmBuJeJkzJtxaWgTTRFRDVx8T4NMbdqV7eytCHfx72MFApkzwDJxclFnZlZVJBJnUYLQXMPkTUkLaQoUQMEMA3E2cTImTkQs3rXpMBG5DlnBCZXBItVi41UkSeIGhAIGZEGUyQFyIxd9oikAKMDo3kgmXG7Pd++/7Unnzp8JQ/9Ve8vnz4gdJMRwBwQY9j00o8EZmJuFNWNArrmg9kSMTr1s4CgmkLjxpEbdb+5WS8P+XB+IMOoan32Yk1M85PjB2JXH7z/nevrWxEDp1k7b7rEzP0mu5e2CcQKruDahuCjGQamiqUnxojobgPAGL3VmgqOECihaiQ2sxhqPB6VQi9uBrU46w6WB0ePHi5feeX01VdP7t9b7u2n1CDAQFbyMJpdq2rZ6rCZSKIOwMDmSFT1P9s6WwKreegFQWCKmQRHMgUM6u6RAwHMGj6Yp9cf7cd4UoKTpzzw+fnw5Mn6o8fXH3x89vzs8vbyQmWcNRSDpagUwzCaAhkwMoAJOJqjmXEMRV0b9jJE7qB4wMgGrM4FhpCRscNQrOioQDzfPxmlDOtVzuXyxWp9ud0/PDh9cN/opednl+s8mDd4WxoYvvjmw7PL1YdnN9DtpRloL7Oui5i30ncpDrk0iYPFcRzBCR3FLY8GasxK1CRCInMTV2y7wNz2m57mzcFwdPH1p3tv7cVlcAVQIfICGB1I1dCYUIlMIGH0qslTBwOD6vI3Aw/m5lLp1aLKTCAS0dUQEQWQAotoDFyGgojA3phqAfXkAQYpm9ywIaK5uIEpYIoYmciJDYsCUUokOghTCiGqiYeW2DFDpKRhNLfoUZ0HzR3NANaCpTBF44juqFuNna9jQ3nMATByT94yJcBLGSnpEMpM4mAiwSIyhTmrgsgYZIHI0mUCC0SlGCaaxDx6lxhqAIDMWHjL5XLv2atPOrKQEGkEwYXhiHlGcQTLabNsDTMP7TbNxvuhHKyWl0/uv3cWV/u5Yd7eJuqOLg6eveTi4TM/+Rd2rd5dJvJU9Cl84lHTfgHAOUw3RpWJmdSZCcE0XK3wgLvicZqLAlQM+u7ZcGLtOvgkoYHqQYYQ76JY6mgQgBDZfHvXOIId7MIdUUdzURlt2K5vn9xcv9Pf3tiKbHEslmfN/Mayie75Icz2KC2PDmdNOw/tPKZZSoligMpDqVz77x8np0YThc6qw5+nxR2qIyY6FIzsYbb9z//W78b5laN7lOIu6IVRHVS8uBcwcWdHLwJsfR4HNIzUIWAWC1PzgZBr9DMRMhMRcTAHFfUyCWsAIZgpQLXXVopOnck4VxAkNOqIPKASc6dxRBiZ5pvr7ZM/OHv1/qs325vSh0f3IlYlpqOYA0A/DqOYqo8ACMYGDTIYNW0EcuNhjgnR5/OZ9KqUroZetH/53t7h/vzmetWPZZRIYX764OFsnp4+ffz82WN3FJEQ0nw+N7NxGGLkpq34NQtEgEYQ1J2Qm4C5bEPAmFKRwZQ4BEQuGr2W0VrM+xgZCUqR1S2qAFM8PTn58R/99Bc+/+rLLx/M5jHNewTRMozj1ZBts/WpoWcTup2ZeZrKoplFDvVYCQjuPPUDq0SNCCHUxb02Kj04UajpE6oqoiK9qyFyZUZyoOOj+OjR7Kd++lD0jaHIx+8O3/vOk2996/mzZ5c3N7eOfYrWzQBQTCPizKEgSggBFFk14cxqIyoWN8vGFiykMWRCZDNlQDVx9+JbVz08Olqv10Pfj0XOzs5W283h0cm94+Nhu9ne0PX10AUHGBbL5rN7D9/78OPvPhtfO70XhxLdDmazTRlDCEroBi2GcSzi7gCBGZHGcUwplKzEVVJh7M5Mbdv2Kw17/aydPf/G7cO3TvzIiwMKMU/IPTBHBQYj4gzCNXgQCKgm3DsREtT+aVVgk1P1xmE2nfBAoBNWdfcwBwQycKhYKURRNXBS51CDkd0ABLA2ujsfrFwzfapYH2jW0nGi3rUotm03AyCDhCYMDRp0oWHYuHQKIJl9FE0lAySkkSlBbkLawsYbD+4ia8FogUvj2ZVj5zl0gCl4kTTkASIaldAqjmYGAu6NB690kyruqSO82qBiQF/v3dyEq23LY2ezVXwJX5qhu3ZBVGjLMM5Kd92NuTtvekRcCSyuZv50sWqc9ofjuS62956PIRfIyU7C2//yX4b/3sN3dN//ocdE1t8tgbswo2kz4E/8ZBVx3s3I//883z/zq2EiVtz9/klnbj/43amxD4AmQOgQAKbBxPYrf/CV/9NHv3T143/ozzeny+xPob/1zZbocP/+j8Q3foTjEsJs97cYOO1cQ9MLuNuQAKCeMndQjLs/gkAzUDJUdHjzs//nveVcRDaD1YA5AlIANyODUPWOKvO2yWLrnCk1YloMIgdgNq+QQiczI+aAMXJAI9RSsmjKCo6gNQAxIADxTo/gjmaqjpGMIBRjZHDNzJ2ZicvhYu/m47PL745vvPza2dkZKL/6cpLenNTdHCGLF9V+KDXEB5ARDQMEDE2cGWg3A+C8j11R6QtYm25ub2eBX394qmVz/uzFWELG+eLwuJk1huPX/uAPpJgUBYDFYi/G2PcbVW2ayIwhGKKbGVNFh3DNWTbNMc1MIRcNgWsiNFPkpCZKZBx8yHh7nccxtt3hZz792mfeeuWHvvjSg0dNDKPLpsjTksv2RSJ0RCXGyCGwOzkhGMU7RendZ8tIdwQHALhTILkLTMAzhDvn85RhW996mjhiiM5OgA3P6yWkWVf9rYEhORG99db8s59/9c/9jz//9Pn2o/c2H324/tYffPj0ybnYKjZju+iRHBRECiku5ntDn4nV0cA4AaGLesyb1HRCRKVUdzUgkLoS+3pzm1LbNI2K9P3m5uZmux22m83Jg4fHD+7n1Xa7ud2MAsN2b968/caj+7fw7ocfreez/eW8UWg4BMIiliMIQBOiDWLFHUjKEIiLmBoExhCYw5SQxwGOD+Ozqzxr4mun9z7+9tn803uze42CK6CBR2Yp0oVERFvNFJiqVqj2uabodQMAJlQzJBZTwGloVwCIdvg39+pFAQYCKmoE9RkI3Zko5ywGhYxCIEAGQycyZ/VIuAUcjVD7WduR9evhveXipS7tU9vqCBRa9JISQzZX6SIOwpno4GCPSt6fP9hkREiceQOPt9u1qjTxlTaNMQ7OWrAfn838JvTXjl0JiWPq1puN0dgYubWI7MUoETYaLPawRoCJh7VbQmudWtCBbbMYRhyd1VljHzs/4Tioq9Pc8bb1NuLsfLm+aPKnfR9wSYAQVyDw5sUr9y4fBmi3XTB8nGET3Xez+2ml+/7Ddr/1Bx/TgBR3S2ElnABATXTarbg7YwCgotz95N3jbvPwnUbw7q4jl2mAgrjbSBCA9e4AUN+U3bOxUeBBBCFE41Kgaeef+eEv/gV/9p9t6DsP3/7LQD8hV9/F/B0fb3D+DnHneKL+QH0BTuiKaARMOGlv/QdeqxFEn5zTvjtkEIAyTuLLRA2rLnnelE3IN3ULLl4Rlw6GPNXI0I9l4+OokmgWzJjC1gq7oZMzMjAzBnIiINDK5BCtKWOozmLGULNJq+Z+l4tSs2msmEcAAx3d2RPodjieHT5/92n/1D/18OT8xVlgvn+/HW5L6pJoccR+LGO2YRSnYGAOmDAUEWoZmTIMe7PObTza3xtXxSj0IufPn77x6N6D2Xy77gel0fa0bfcOjmfz9PzJezcXZypQsqWu3dvbK2VcrW66FGdtUxuRUja1dAMzclI3A0MEYhYrgMBIIkYQZzG6odlaraxWqjLbXz780S99+oe++NLnPvfg6DSXUoZh02+utu5Egah1bNo2EDh6jUYE5rqko/ruOpw+QgSkaku9E27Q7nBpExm0Lu50Z3MjQqrBAPV6qBNCcEAwGOrWCJiYGjYHdAK/Wm3IEexmr4s/8ZOzn/xDJ7d/5vXz8+ErX33+ra9+7+n7H7tsUoPtYs9bX8MKSIk6BUYSdUBgCoEpl+JELiIVu2ogdy6NkreOFEOzXO6HEMbteHt1vR764/29+yfHGJZX11s0CoPqeLs/O3zz9UePzy+fvLi4t3+0bDv1LQQl0SbwMGoXwoBassSQXA0Ba9VJVQpHU5MqQzle7vdbvVrdvvzyvccfnmHG5aN049aGYENum6jqfR5CighOSIYOdbo6IZS8nt1qCkF9V82msxGQI9jk3NitRVYzjKfPa6KJSSlSgIKTOtU1E9GIBFEZ0XpXasLoGjay+dSP/PS/+Gf/vTIGdw/cDcOAwYkgZwnEw7iWAkf3+W/+3/+rZ89//k/9O7dtPH5p/6WrKz+/fSA3z5ac3v78D/3Kt97vw5MOD211GD+1/Ev/s39r8+QVs9lGb7ej5UFvrz/Ula6uP1a7HPRsSNbsJbn0mqQ7kWgreRGMEN29sWLoBsFxrkhoLsEAtgKhQdR2261nB/AyOyNFKcTCbR8v9s4v5x89vF7cv3qUrGULJ9fHHxw/Xc3Pkx4Hxx9Y0+/Y5xOf+Z9d370OWmEHVLtbCf1O3Fq7K5M76ZOItUkfWX/mB575k/UUJpwo0dOxAA0BkHfONner33cAQDNHBURy8B7wHOHKxw3m8MUv/tSv/sqXF3O+9/bP8fwByrXgxvstrH4bm/uhexTaU2iWjq1D59BY3ZzuzHXTmKDSjOqb4Z84lxAAqJYYGrHrp5fPw3yWTQFRAIRZzMW84JQ3UynDAtR0c8BVdASHrCPHCACO5rthEJGnSE1ACKNbVDNHAyLQgAiIypWwCDa5pAwV1MAJWbGAGXlgpGxysFxefe8cn9nD+3sXV0NAv3fSjVkwBVAw91K0H6SIKyBjFcirjkO3mLUtqQ0HezMwbdNyfTsOZtuyAik/80OfT5ZfXF6tNA3SdIuTxX4stvngg3c2F1stiOjLg3mMTc6DalnOuhDJTMFFik/w0dplZoRpmzeXEDi5o5syawygOtyu12U8WCxOvvi5N37qD3/+9U/v7+8r2nbsn108LUSETLEm2yMAOWNQH+5EEOpm1demBruy/ZOXJdXz+07KBTtWoaPzFKwCUKOMsLI17M4pVi+ROvNFRAc0U8MCUIgIq3NWIcW565jaKFmvz1YUthbyg0f8r73ysP/ZV977cPObv/3RV3//W09ffNQlmKeWuwwyokYngoiDjJEKIZgGQgpMDkrMJlK7TC0ZMpviWLITzefzlNr1apu3/fmwXd9e3n/0qdOXXnrx/PkqD94sNPeJ4PXjg/NmfHZ5c1vmJ3tNgzkwqfq8SUNW5gRdGIbBCu4KZ8hFgxnV1Bd00xSpdHNaDXh+cfHy0dGz5y9c5/PXDjQXDm4m5sDMVczruMO1T0wqr36TAmYITBQI3BDMEGrUWO2G3QWxKgDUAx+Zq9eSACMEUcwFIIC71wnGTokDCh6drGTA5ZCHrOXP/o/+WvPwIYExAAK5CZIWKSHMDABBAYiUfvFvfrlvXw9/7OtPbj86XrbrnMvg7Zw862P4/3z1+db0lGwWwpNtyB/43+he2zs9efOoeyTyoA0PAh6tLt4D95sb8XK83Pvi3/mP/p6eXTWhN5v/95oihlQBK3B4Pb9qTthHkHJzSOrP7129Kbh2aGabTMOysa5dPj/siWUxMj47vrmN5a3bV9PQdayYcXHdlaNuldbHkgNVB9NdF/v7q/hu3f2Bl1IhqHf7wPebMHi3E2BtAvsOW7hrcUxF+p0Euxh8v/lzd0qow1yfSHQOUO0lSDtjWwXJwoSBdOagtiVQ8B7GC5LR+ivaPoFu/8e/9Id+9zf+ATLf/6GfhbRn4ywVBSfQEeQWhCEUp7m6GORINXS7kmppItG7Owbwu7PE98fBWo9XIKvhOxZvvMV+O24zWBGqzgyHCFTQzREANFFZyenJyXK+PbvYpjlzYHCAgADsZu7qHpigiZgYHb2ImwbFbKZugTACq4ojWl1raluYAAyNoRnlNlKM2EoZZjx78e45nPevnD58+uJFM58t5ge5FER0t+w+ZM1ZKt6DiEspiMiM872l5rEhCiEEV07N1c1tSot+u0pYfujN13zYPL5cjdRsjPdPj2eJr66fP336MUHMo87ny3bZDWWj29uUUkzB3cAgxZrIY0TBpAYuIBGjORO4azJDKo6GIW629uxqe7B/+sYbb7/9Y5/5iR97/fhYhs1ZGZ9vr4N5IJw3ba7FdWUWcRU3WIEYRJwBqRKTQBkNHar69RNXsu/kvwxgOHlq6pffnzbhJKmuG7wBYDV23g2KbBKDgReiEBiiu1oxBwsIwGAAFJOoA1d/EhEtxuv8nFZNE199xT/32bfKv/ajX//q1a/9+re+/o138vMwa8f5zBTQLEX31pgdNihVPw4+qWsmDjZ5GQak1ETOosOwjU17eHwwbtbb7bDeWP/+h8cPtvcfnYzr4fryui3eRkMo90/2OcVn59eX1+PhctEQJIZxHCOiko+lD4EHkbtdpNbv7M5AyEiOwEFsu7dsNrdwfnl2erR3fpYRr49fO7ktG0AHh4aDEY5mCeL0/tuUfjTJSiCQK2DN8HOYYOgu9XhU1WwwDUtUq6nzrsyaCtOxqCwiEUWfRlBgTgCs7p6yZeVATUPD1e3104cPT7D0yh3WxMBsMc0ESAUapm/+8pd/7ed/8/V7eN49fPbNJ5d4exEQIC2i9+ejY2u5Ob8GgIEA4pLe/e7wv/tP/gEfgCi0jIu2MRgYARpAIAx80MxmqZvl8pBfBzvcsTR2DcAKN3a0kNKYj6+b1f4e6lMBv9kbYLh92TdFZ4DWdyVmVZJVzOT5ZOhyCM6wt2r3xllGXJQwYp5LWPqRpRIYg/p6epsQfmAdt7tl9/vfAgDfOe4AAOD7R129IyNXPBDqlFjt/+zyXR8MHdR+Bd31QuqChbATtsJE+3AAKndnCjQAQZe6vlvexFRcJA+bCNnyOvfXkbc45kV78IUv/OQ3f+NXOMDxm59LsYHyXLhBT1569BlrS3GkiBDGAkcVqwNAAOxI6AQIjFMrxncvczp80PRHybhiiUs+hHi29c5EI5O5gzogm7mDB6RSNCCdXV7d9JvQthRcxRlArQRiYiSKbeIYLIC7ZGMsYgpoCmZuLkzRDANSFWJXJZk7uiu4OnJDEYA2edjvFk+/+8Rv9PTg5P3LJ6fLk9kMtmXdpD0vg7puxcpQvZ0EUOGVHkKIkcu4und8WMZN17aEfH3Th9hc3FzcX84///qnz148ux6lx9a8uX96ggmffvzu1dm1CQPj/tEBsI4yyCj782Zi7DgguonWOSSAORo4EXNV0xOiozNHQFxvx6GXxeL+v/Sn3/4jf+xz9x8GCnl7+/Hl8yFQw946QEzutEVrKuwTq4ir6srdIhcxJ2BEM/DqljTHCdW8exBNrBM2QCJERyP3ym1EpsqUrbgkcK8XM9f5fn2CO4dzqIqqxgDAjRBjCAhgDuKugZqiA7IgkGtEZFNLiRKnUkoRvlpvzG++8Pbyh3/8D52dv/0rv/7h1373/Q8/eAK8Oj4eCUVUkTqCAuRgToTgWu0pqjWJiCcNQwAxldL3eb1YLGLbXV2vTPT8yePt9eWjl147enBvc3NzM25mFHjTHwdanO49udk8vr5+Zf8gEDWzTkSqddURuq7J4Lu/eWLhgTsoFlDRwpy84HIRh3F5fl7u39t//vz8mq/3Xj08X10sZvNhO3CKkXl3GK9r/tRtUVDG5IBmmRDNlWu5wmZAbuiutSeKDsjkBl7ZkJVqBaCgSjZqCZBYffIu1uw0woze5HUAcbhGjRH9+fUv34cfCyGGelwmwhhcwR0a1K/8oy//X/7af/Li4ys9uT5+fam4vL3utN0kzNeunsU4L91vc4dcVDFQjOU4ly0QSrrpx3TVQ2gxF2v7UFQsOJfB9fZHyv5DJhmVkyIygE+AOkOAAACsSAwkzJ4Ek3JUul0OgaGod5sUnp5evnTuwR5etz0ZbzFL7Ndxc7DFZbaB80gdg/Rwu+jDTShXzUWAyXxUhY+w6yVCVarCtIzVj3enUMT/gXY83/3ExH0Dd6un1eknPtl6AQS4BpgC5GpLZ+Iz0t2GYQC6m2RiAztbsxeAAlbci4MR9bZWl02CLfTbOK6jrC33Osh2i12YHZzc//V/+Es/tb6+/2hv7JV0Q/GamqeYElhnZeFpjqGN3DoycguUACJArL0Zh7C7meEHxwPonpGOX5z9BqinVi83l2s/M0ZiUlVVV7eqljHiOFqIs4+untzkMc73pGwAwCeMJRJRIAzETaAYiTRvFIsXgORO7gZgxKCF69hj4kQwodX8GzfLwdveN9yFsyfn4cpP948v1reh24e5itiCuOT14JQLgGoRB0AtSoQG3qUGA4YQjpbt2N8u9uaOfLsW4vbq4uLV1156dJA+ePZRLjz2Ybl3Mj/cv15dPn//w/V6iJjm8za0jQfb9j2ZHi+XBsWsEqlIRKrcEBHNc1UbMgXJJTAzGlG6uOnBu/v33vgX/uKP/eRPv9I122F7NdyoKzZNwHY2jmNsUiAqMjIgYEHkqqQCMAPFYAEdCycnomju7lKBMEVlMhfTtCJP2qyahDWlNtSTkGNljk4nWucqFZ082EDMiHi34PGExnOtmE9Sd6mgEkRCZLFMhGCBgQzJHDxyMWsgRmZHIXLEOKz6ja732vQX//wbf/bPvPXOO6u//7d+57vffMwwLg7VQs+G4KBgRKzqlYGoABSi+RSkAwiEGomaWbPp16FpT073V5fXmmFc9e9873v3X3np4Oi4X89WV5cZynJmFP3h0X666J9dXh/v780pVlQqoTtRHsb5bDaMY87VbIiTT469aUlGTx2im1uTItly8+Ly8njvwfPnTwfIJ6/fXw99WHSWC5kLOk/gbyd0ZNTa11J0cHIOAcSBkcBJCEEna4F/37ldHeQ4VY0Ilf/oBNlLwakzYAhmFtSZKRiUtLgZ7V4ztzFTbvL2HAFcVwgJQpPNGR3IIurXfvMf/+o/+ht5lMt2dfhDV/Taen746Xkg8yiD7FPQzjeZ9uaha7C0rUH2AW9F9me90dhCSZ0BWDFtY4sRgrlRgZA8hZz7ra1mtAAyAAcPu7pnUiGqCwbLgGDMpQ3eoOY4NoVaclrPLp7uX5y+2Gs9BGhYvCldaVHCpi0HqnNiLQ6C7Uzj6RqvD1+sFxY4fxlcwWuXQwDEoaCbW+12MezKaq80JGfw7FZq4bzjVBHBWLmFBOauquImZoakZgZWYWlgppPS0RwA9Puqj2r1dUOpGsScs1eeJJCZKWavNiJzUJMaBuQoKK5qCu6k4iIgRVVdM5tSLuDGDLNf++WvHRzOUpuA+sghxsgBmTFEDAGIfcYMDBzqGgR3m42x1trNwAHClMsFkExGgMXJ4Yf/5Lfn5dR8IN+DHIISgqAPiLPiWww1GdQsMvs2l4TkKlunjqCMNLbA6moITQqz1lpyKyYWzNWci7hjiCykKjIgRfdAaIFMxRzV2cooTZxjGEzHBR1cPr3dfrx6/fjhixdncd6dLOdmoiAbFwMX05yzOo1mkQPHoKoUiRO6lkXboo7z2WLbizP0Q7EyfuHTL8+68dmz7Whp8Ba7dnlveXbx4eOPnhZpmahbzlIThtJbn+dNargRyRgZa9ChOzGoigEThokk5yS5tB0h+PVlNpu99sYP/+yf/qkvfv408Gq7eXK9KojcNC2CGagDx9AggKkGYjMjAketKfcENKlCwcYmxgzkhFETAwoCanc0et5b5T5i0+RAFMYgBmMLYBgdIY8SmgRkBsZQy3aBab7KU5VvzqFyRmynrEEgMt/1GAANmBCwpos5uBgFBFRzAWzZGzcnGxMbixCxKoEJM1gAYxhA89lGYXzz9fkX/4M/++73Vl/+xd/96te+Pq5kf07ASpTckKgYuBvH2JhlQCcmc0NwRGfCUvIizfp+GAX2jo6HYVivVlDy+QcfldOb09OXqTldXV3fjtuWNbIeHzfNEF6cX/V7e13LXmzWzDfDtm2geHF2iiRi5ETEWiRQLIMgopgSgdkmhNA2wSRcbS/u7x1ePr5Z6Xn7ajsiOkJCMgZWNAANmZlJHIpzJPMR3InIzJkZyNzHtqFh46pQw5vRS2QQV6wYEGOsyXchoCCb9QMUiI2PQBEwFR9S4GjooCAg20seTXBRovG4Zig9hcYBvWdsSAQjnj9+9nf/+v/t/t7J08VzfpR7yV48n33ndDa/vDZgRUUUbsnJJMMom1lYzhWuOm9x0HCgxebASNiDJR2R0liAgwc03ais2tkQfQYqGqfqcAJZK0N0gwYHGE5LGtksFcXIs7IEEAN2GDuNSUjQsVj0nrRhg4LD3PebbaeMSRYF3UGjGljPTeRbCb/0//g/+HS5+l3zHc1KxSbB1IVw9x3jmM1rQb2LKnIEILUpktSUwIAwuKOqDhM9po7O/C6pkijYzhtSD2u1E1TJqJV2UA2zAcnMwLs6XQVEQzKo7TgMdtfD4hrEsvtyzaEpxREIMZraxdkAuHVNRB5QoIpt2evxAhFtonE57kxu7t54Y2h3cnKook9T5yT9uDwdf/9rN3HxJns00UFz/b/iVuUZ7lPTcCzjKCWraJHEgd2ZMTkbEQI0Do0rqStCASvgqGbAu19a/7zKeL9FCkWQKYECKDKpgKpQ4vbm+dXq8dXLR4+eXV40XXswm8s4EBESanExVSMzF7V69FXXmDi1USWfHh0N61Wpp2KGAAEAAElEQVTcm21zwdDeXvcM+Nk37qsN55fjJmMxP7p3iIzf+e63tuteBQINB8s9Bc9Dz+Rd06CDFiEiF0ByAwHomOYYinuP4ICBAUJ057C+hu0W33zrCz/75/7oF76wRMvb1WMyYO7atNi1QDIRIVAdwFVfOiISgd9xw73OQhkJZsLYNEpWynm+fNx5e3G5fXF7cXi0/9nP/eH1QEPMKQw8jhzakaLmYTabMRhgIQpW6oko2g8KDbzinbxS9A2rN8/BkHavAe8MKfXUSQAY6sEgMLJ5MS0pJCIuZcKWMxJ4AwqgFjEQ4RghwHK4zavLdx/cn/27/94f+947X/yVX/vaV3/j3WGzWR4o8JBHbeJMdFBRhCYyVX4vOKgUxRIjl9I3bSwlb9al67qDg73N7Wocx2fPbjbb4ZWXH907Pbi9ScNmO+Zt18GiZb5/9OTZhcnicH8+jn2XYt97iOSgqYlC0Pd9IAyBSimOliKXrFW5C+Zm2rVpGMvl9er44f6Hz14cp2bvJSoxFM0gpF5SCqLBARGVIykEnuQvqDtLN6mjQhdpBHczFVFAckCEENhE640PdzZD5pqn4pXySZGZxEwJE3EuOY/Yds26vyZKIFuA884fKoPALZXOYmCw/+I//Y+fvPfB7WfO7h023xjOWhERDh2//+LjkOaBmoI2WgGgotC0cUyg46qLYa3DvRgAxoBGquoKSB5UkAA5WM3/xpEUIUcVqaOYqQGAAOiugDSE1JUVu7VCM+lioaPb1jIGKBlZKDSlaWReKCgWMgVgN6CtdUbGNuRMwMiw5WiaORMVCi+eLHESXVI9I1TgtRNULUB13iPipFGhaY2jensB3KnC67I4Lf0ESI4gC2ihzp3MAIyZgQkYxAoiAoXdzTNpHRHY3c0r2AzNTRzQYWLETZse8O6Y7LtQhTtB2+4EN5MCYFzP4W0IZqJaKlYfsYL6AW2arwOEqfeECDXx3dzdBywA6MjgoS7UVT/gAigt5e72BvcOj50V0PK2dwQFryNfJHCpjnaKHJRAIFPbQOBBdJ5CqcRb9BipSRBDbfcgG/eazd3qEBIda96sYwwkxhSC2MhIpowxbfJqrzvoz29vP7p89fClq6sbimG5XEApWK174qquikWlKBIFMlLVpmFgQLLlvN2sb4/39y5ubrvF3vnlOgJ8+tV7Mq63o4w2H9E+89nPPn76wfn5eb8ZQHDWtYs9lmwuEhkDBwSoAk1VjTGMRWLTmVlfbhI3hBG9EDAFXq11tcHPvPnWn/u5P/z22/dyvtqsPg4QCJg5qhEjhoimI1e8DBoRglktDrBqVOr6CtMxy3fx88Egr598+1v/cHXVv/bqT+3P79/H+O57H6yv//6P/vQf7W1/3EDHDTNk96adjyMwzc3ETWOkSqf65Pzp+01IqlIPrhchTNZrBADCyt6gO8nsdLmiuqFjJIgcQUTKmFOcZR2wNpetOAphZGxEmWiFljikFGMZx8vh/dMHs3/j3/pDf/qf/4l/8o+//tu/9bV+tG4WzIq7RWLZ7SZMWC3R7qwCKXkpuWujOmzXNzG0+/v7fT9e932/2X74/jtHxw/3Dh9AaDdXpTHUMswafuXByYdPXqDp0dFC+nGeZhvfMpKrMVHbpu12aGKqQolcNDASRVWvY1Iiartk4E+enj08vX/1eNUg4rEW4i426DiOQwwtArhbtfsrIRkEgGATNshRjSh4DgRKDiFUt6QpTFgUrbcJuBo6IVFxV9XakFEdQyRAcycHEpHAs82wMVp36dHTx9/Nt//vmAzHlPDIaCD41H/6H/0fv/Kb39vsb+6VxcFsFkaLGEeVMJ/dH+cF2q2OLhZj0w8SaJ6iuw1tm/qrMR40pqbiGM0IyKNbQJWo6EyGrqitk0YYmzK6udXRMddDSPXA1CphJAGAbVo3SX0QRwh4Df6IlQQzeiE3J3H0YDMwHBfFqI++BAVjj4guSsgH2264Pdpfc+hSsyvAHcAnwp6Zg0z9IJ/0ajidfKcFvX5Z12XckdgQHRlrAwcRKRCzqCqgVX6GevYK63BC4B3hHaakTYfaTaxteEebDg8VyQjoTug1YLcC9ZypmVRP09IM05RiGhKSTX05BnRkLjqaK2GAuzqw+t/E6O5vrFMHRgBU/OQ0wQCM0AANzItBambBETsc9FYwx8orIiCiuiGIkzuauLtvc1FAZ1QEbri4YAzoQEwhAiYwdCvmcjcprW8pOjpOZFWUzMTR1AORglAER2sT9Rf9xTvPXtq7v7pdU+DlcuEyGDpxKMWymIipuUqVEsOo46JrmxQQjQKC+d7e3mY7NN3e0ydni3nzxqsnUlZD9iE3HprT05OL6xcff/yBFkjUcUv7+10/bHLO864LzGYSkKwalGIYC3RdM449B5p1JMU4tMS0XePNhb766ut/9d/953/sJx4M648vXnyzxVkIEZ2YIxI7WT3KIQVGUHBzq+ePQCATOjcAVOGQkXpFuTl6XAYul8/e/acncfHSZ3+ivf8qYE6X8PaXXn3/vW/9zq/99uff/tLh8entTc/WhkLKhZncBQkip1IysYUIqjvfxt2Wv5O3775nAMQVegeVPbq7IaqiCwGxNh53hZErBwghqg5dmBl4cWEIiHMwUciJ3WRGbGKbYkTcoEft82rzeLk4/Nf/zR/+mT/+1i/+4nd++7d+Tyzv7+3nPNT7tLptEYujR4710BxCMFFwn3VNydL3m9TNjiLd3t6OmT5+8mKb9dVXPzVr+eLZC7bUEXdJ3nj10fOLm48+Onvp0b2SCxE5a4rtpu/bLjVNc7tZu3vAYIZqoGruSOgByV0NYTGbg/Pt1cXB7OD5u5cP2qPFUbMd15ETh0QsZoCQiEghM6Aj2rRnOwOyW0RrY+pRFcyl4t6ZGQBNtFoTEBHdAMmBSbSMRRcNGla4fx2rQFGLkVVH184hpzg7e1/++v/1v56n25mUEpvF3uF333v37/7tD07efP18u7pP7YPDw3D+IQq5SpH+x9566w++914/rhtBtBYEbJCGI1OSghxnOoIMELiBQCUPDMzAMUAWYbDipYDNiiu50jAFTtTKvYpz3QzB0ciDIkuUVbM5wHYMcjXrO7iO29M2R0UwJPfkaEY5WDL3vt2OaeV4LyqWSAYVVBTIlbHkZh1UqwMTvC6vtvPy8CRHxUnzXVHAAIATv/aTV7lDwho4EcjAjM0RjIkoe3avNx44uJqaOAB0kdztDnsOu8IfSj0TeA08qNCPWiwhYtUNAYCCVSYx2PjP3IHuk2KZGd3VVGIMAFpyTikhpHqKwKkFDKCIyAgbqDOpquHZVV5857D1aew82RCYixsB327k6KWGUJFpHEdxCMjIBDrtiQwo4mIugJsxN90CXVS1DYTZMGEiaBkZvYAVN6mRwTW/cRdqpXWEh67oLSkblxy5bcW3WEpSfPrO+aP907wd1G0xX4KKuwqB51ytp0VUtM6cQERm+x26SRkWXVvGsjg4XK3XzOH8xeXh3vzR6byM60FgNVDbzo+ODl9cXT598oE7MHDbxaaJ6/WaiGbLzs0QKYSguTg5BVY3avtcFil0oEaaZok2eXv1vCyOX/6f/ts/88/91KfIry8/eoewmc/vq+YmBC3iru7CAevojnC66BiAERymngxOW7JULXql2COiITz/+Gy7OqfZS4t20Szur6W0y5ju3x8u+0cvvcFQPvjed/29r7/1+bf2ls3Qm8qslELsooNaCZHUklhEyJ8oeuDuesA7QPnOToNYv/zkZYj1LAGOAVNImGV0d8aIzu4IImvfpjYiuNrAEACJrCNvCbPoGGIg8JJHgtiGrpSAJmdPnxwcL//qv/Ojf/RPvPp3/pvf++pX3pnvpRDEvTCjqhO3blZMEY0wSRFCZA7unlJS92Fct9wcHZ3crrb9sL26eO46np48PH700tWL8+tNn1Bmczw6XIj6s/Ob0+Mlindte7O6jTEOZYwxpSZs+x4NY4xmNmbp2oRI4tpwRAxjX2az0Fu7HjeHR8uPv3v56ptH7XEzjCNFMlPaSajr8rZr8DqAIRGp1e8H8kgITAp1gxcAJCJnd3Iwosl9AtmhiHlDCFoTDaCGDpml0IltOJhsjbicHDy8eHY97964hu2yk/MV/9I/+Pbxo7cutsNe16zX1umaMPaULZfo7Te++42+5DfuL956+fXLC7ja9NzpxTs3qpi6VvPQm4cQyApbaCM5BnMXy6HjBj1nD5GpmLSw7rLfFsQEaLXWrPX0tNYItkbP24xii+1Mift21DHuIzOBk1QuWqacsXSldQ4wcIIZUEvg7FAMG0JFFILVTJhWQVXJd8pTAESqjZGKvL9bMXeXLe2g5/CDcmAfNSAygrvLVMIjFjX2iurdJZmSC6iqZv3+rTCNLg3cpZLGCMzBzU0B3NTdCRPs8juwihkQGNi97PowOOEr6tMS1SwRd8+56rvBzJHAtJhZhXbXOA92RgIFm/6yCe7r4EBQdufrMKWaGAC4gkUkMn12vXm43A+ZGmiHafUkcFaZTgB1Z04plfVAqUFmNKMQcilNJGogBAyEbHX9JiA0c9CdXxtrVhMiIRIhRzdxGdt2b122AXwG8w+/9fg07ZnBCLq/tzQrCk5Mlos61ya7G1dJOBHEGFVzG1Izi+iwXC43m01KzZOnz4+PDh6ezmXc5hw21rTLeQj05OmHLy5egBJh2ttvjWy1XbUxdbNkLkRsou7OMRi4qlBg1UUILiME1Nj2Z8+B48nP/dxP/Omf+3zLOtw816zdbE+5jNYHTjlvIyeiWEqpbD6atvM6cCDAeqCZCG6MZrvwqTuVLIAd7h2cnnzq6vbC6cYidjTT7cRoWe4nL7C0B9/7zjvf+covfupzr9x7/aW9Ni3mx11zMA5xLEXFgB1JQO/4Rd/XwkKVQO6MV4ToCGTovnNGTBcfu2MlnQTIWpwE6yzU3UNQ7nyJM2AoYqM5AjmTYS6YicBEUClRYvIsolRCAy6pa+bjerta3Ty8f/y/+Pf/5d/7rff+5t/4pRfPhr2DBkNhUqZQisWQREd3YGaeRlwFGSITQBi3Q5jRYn8eGh43tzfnlznL4cmDg3sHqwvWklabGyA5Pp6/uFi/uFgd7XdiNpvNxnFsY9qOQ4xpf39/e7OB6ShDY5YYQiAu5uhM3Ev2ppvpeDuU/ni+9/ibzx/9yEt7B4vLzWUbW4LkqGYSMAkJArBVUyQhgoEXJMyiBFTvTJhIumKFsAM2rYWPa/Ur1MAsBUJHpohTH8fQUcQIo0Mm1qxjsdLNtrOZeINk3Zd/6fdtsb/qB5wLMQ+Z08kcnlgxKm69jrP9BShv8s3Dg3TQLt5/8nGOuZtr0RKpMSw6ytHRCRxtFDxSHEpmagiiqjaAhGSJA2FpeHOgw5XNSjVA1xL+zufv0Uw43rbjNq6G2CaZKffJcpCQSULBg02IjjmCErcaHHFvOGjzSKVTNAIPHgwFXAx0ICTGYOx3JlW3qaMBQKk2D3fL9O41gCP/QNDKDswLE9a99sGtUmnRK2XL7uLnqz7YDUbpJ20ukiO7q5u5u+z4z0TkbrD7j4YC7uhGNTPKvIpqDO0TL2+yHgFAKajqMTLi5NBBpGEcmWsAKKqLuUYMRORgJqg7Jcy0W9QdDJvqkZ5ANAAO5mjBk5lkVXHb21tsV2vVKYbKtM6ncWdtnfKqRVXdIiIyZ3RsSZFjcCQXM3EzRQMEczSTmu+7y+bexV2BCiAGDz6UW3ZfpL0Pf//xTA+pC1e3N3v7CzBhN0ASUTRWh1JE1M3BiQEsMLZdQIcUGRy4Sat+CCE8e/bswenp8UHX9z1Ct8mwODoMiZ4++XDYbFV11nTL5TLn7XbYzufzhlHyGinZTqxmZszMsRGRFHOAfW/yMJSLj8KXvvSlv/SXf+bBa+3N+XubHGNMcZ9GW7tjw8HKOragpUejGLBJjRNYUdMpaqyu3kQVKDuxhgjMEOvkEgDqPLyddwD9yclsNaJiZhugOCpyR6bNoL48xsOT+az73NXF8PTsm/M5xoSA+sUfevvll968uuhzQY5JIVcRwW7fMJxApXXnr0RZ/ESfEqZITpjk3BNJhVFH65rWzEbftvMkkG+3q/FSxDah9YPlQRP3DBoBGC0TdLElKbnIlik1bVQtamJuMbSJIGHX327Wq5svfPH4f/uFf+Pn/5tv/MN/+A8dbLFsiowx4ZgH3qkVRB0RiWMNISLEdtHmPGgZZ20bce/29nazWoM9xZPDxf7h+gY0z5j6iHbvcHl7O1xeXB8fH4P7rO36vm9CVFEkb5om54zIwEGKuCtEBpGmCVCBbGhd065ut96Vw8X9F98+e/ntl+ZpUW8Qqt1XsyBTLEHNG4sAATEqIDOiIVb1HTIj1HO2ABKKuQMS1ahMJ6JiajXJBUhVeddG4wBSKEATwM2b0BzcXJzbgpPC3/+d755db5ddd6ljCjYOQ2FJe6/umfdGSKnt0ma7GbIgwLvPPsxjd5tXWWTII3JwKADg4v2wYh1G9zYFDAo6uJgwGIhFAjbGgDpKFxxyxVXUrgMioilVthXopkmbtr9pb54e6KuXB8FwXiDp0JMsV+18e4/LPpoigHKvztr0QYQUHZMDJsIRNCgaA4Ao98HGAruWiO9YLohYNJhNkphd5ga4u4dPGlbRHbGKJglNtMKS0FzMEJmRxHMth4kCIoqYuzpCyRmRiUiREAV3s28GcDcTBQZTrfNAoppiqrXVAAAC4KiAHCDgLr+shr9MocZg7jgMU91d12oz0wLMCGBSiqoW5hjC9Ht3q0g119RGkCJ65Q4DIITdUsJqNpJvNKPTPLUUjCKVvKl5BDYpWGsvyxERmLJa4IgKTb3y1NtIewgIruCDqjg5MoKbC/IUkgVqO10SuFogV4uOjFzmMX74jQ+phEWXnqwuTxbLeWhy2SIRikUMWyulFDEzJXFghhACsaFZS8kNY9NkFUzh7Oz83unJ/mwu4y3B8nbr+yeHTvmjDz/OveShdPODxTxtt7eqfnBwgjCA5YbRkO/efyJWdxcjQMozTNsXZ/1i/tK//T//l376jz3c3D69fOqpu4c0omUQStw4KJo31I6lNDHtzdo83mz7pwJ5Pp/v7e9v+7ZoqVcb81TsiBkjINUsm51BghDIJW8TcrGuS8fn59812cxnR9S1EWNGatp2e/s8e3n0xTewlO2z8N6z1e/91vNhjb/8i//gc1/41Z/7C3/s9OTB+fmKQ4v4yWnT5MfW2pv7hEMS7+JpKpjTteq+qlNkNA5NM7pKGZYHe9e3+ju//Xjom/MoKIcNxJYvD/Yev/n6wb2TV3ycaRnL6BwDplRKASsRE2sjTWO2dRQCbFsw6fqb0fj2L/3VH/qxn375v/x//qMP3n88W7RKQ5NmUjCEImJISISmkwKCkUbIMYZoUIYxNOnw+ODm6nZcDy/yx/cewf7p6fWFSa+IA3o5WHbMfHl+dXrv2ETqnTJqcbUQWYSyKiNRDK6Scw4hCNyA7FF00YxO+/P5ps8UpdP40beevfz2S4PeAkDRhtEJRiCslqSd+gUUFAhcsMYqIRoxmKGDqGrAOOlMaxyNKhMbQ5biznVSJSJNjExMiFqS49bhViWI5GbuSrrYX/ydL7/z0eXQLZrt4KNu99LyckBfn0ffHLaz57CmXrhoRt2KHIX0/tkFh73r1UVsZxDSkHNsPWDjIV2vb+8xx4zWa+xIEDA26DmoM0FWFYZC1s/MSRjwE7jdemMjOgoz0WYMGwLeRjXCbpiRHkZIo/thHwt0hRJL4RG2qHuIV/NzGp/cp4fBul7WkTKgEWrQ2OZlO2LYbm6JqAbITdM7AACXmt85tZhhQgU4hVF1Evj94MMJ0AwUdjkpiIxGbH0t253QKzPAHc0V4qCluKUQyKraAYFDqFhFIERUdUaajI6wU8UgI6LCbmZrVV5pEyTaTMWIiCi7RZhcsgIADMGUlMRBEN2QhiEThxCYGFgZAJyBiBCddkC0Wr4BAFXbN9XkLWcDVe1xsQY/nMeiq8JSwEYOAZQzGA6DegEACBHBeSziSs4B1+CBqY1dYFUC90r870wF3MTcLBGRCgAIoDMG9ygwYhjBG6dsZjMOl+9t6WJ2fDg7u7k+mS8jx82YA8eso4GrmbqjR0IsKIjQBEyMgJRCBPOu4aLFsXl2dn50uN90Cd0djzdjPz/dU/ZnH70YNtnM2mU3b2bj2DPRYsYu67ZtS/HiRsROaKYMzgZknBFD52XMH31Y/vgf/xP/+l/+mW52cfPifaZmNm8wXwEYpVgMyLetNwK8oXLv4GCz/uiXfvnX33vnxWrFUng2k5dfDn/kZ/65e8efvbq8odCCY70MWBvg4gZI1W9TaukMBspQgBDKjEJDBxslTp2hGJDjxhOUgPNOxxfvvPTqD//6r//+i2fDCCkcNOgHv/M7Nx+8/+Wf/dnX/+hP//HzbRhWYwAKiQrkohgcEiHVESkvRt0C9JFbNAbTmjQEQES7hHc3Igpu6FvycnDYrjYvfut3vr44fvW73/zdo9nBy4/2Bri3KUc3K/7m16+enfzq5z712XZxf+iDibamEUV4bygSE7oO4M5ISK7qyDly4zY7f//9Vx8c/of/m7/49/67r/3Cf/urHDG1uRZbGBQBEZOb1BgUI6CCRE4hqKqMOYS0t3ew2WyGnJ8+fTqO5WDvfg9JxttAWSUfLoNaenp2ef/+KcgmoFrAIpTzeLS/v15v1zkzM7tBdeJIQh/YIhgDQHbFlvq8WjRLHPXie+cP3lyMoApc3IwbBMAqlt9pEVxRoILTzMxIgEAxUCJi4pILKEQIUCMOILh7SLDKRNi0KGLaxhiUkJHVKIxF29GCN4IkaYTT5Wv/6Lfe+d7NRSRPwDnI/tFeNBQpYtSPG/MQxzJKGNX3w6z3G4nWUVuYxJlDVk4AFNEteJExprnjCoOwkRUFBqXRNHKwAhoAFMkJtbPb/ZtWD9nboKjWGBWEjBqq6CqNdLx6eDsbEHvX23vPj4v3Pa5bX5wdn3seFtukvGYMbU45jAPdzn3uZkorisnFGJgCewkPzw7S9iB8fH5DGHASXdZDZT18lmlx5x0Ot2amkH4SVjldxwATCRF2MNraDkMzbNHQdGo9TyW2U/BSwBSBSIP6FAwNHsnrHLXC3BGd61nB0137Bb8vzkEBrQ0BBGBAdDCzSpszGyeRft0YnOrO4q7m6oAixuQYhBjCXW+HEGvkKxgABCBzwUnpFXZ4D3MOVuR2GNVj4sAYJG9syK0mdy9q0+6ApOYZNAq6uxEAOQMmwoBTnWIW3MG8mKlDhBpqrRC5HVWQDAKBEWoMjvXJF11z83hz+fH5a/dfurnoF00bGGs/umgh4pJHB8yjoGFWg8CBMURCgBiDm8U2jQJIzfnzs5O92cneot/clpS2Q5kd7AWKH37w4bDemFkza5r5bLu6ionnXaeSmXkYhqZpTLR4HxGjzczHElRxbJrm4gnN5qf/wf/6T33pxx6urs9uL8qsXaqNmp1DI6ZsEaEnWmTe5BEePjj42j/9xV/4hd9WO3E8BKKDowSJv/rNzVe++Qv/6v/kT779xZ8+O78lHhiTytDNyabPpB75qjORq3qKwJ18zOv7p3urPhWVEOYiYzBbX390c/X89mbbj/bqm1/80tuf/tvf+Q2Se2gu2i+XS9PDv/5ffONXf/edf/Nf+YlHL/3k0xdjP5YmxrYahRQ1shciu02EAMlBjZKghTobmKJpJvuruQfwSItvfO1bzz4+v12vM1xdXVz6Lf3u7/z+6Z/7PKYnc6L2wXGfD5/fdme/9969e9/+0htfivFko7gti1kjjRUCU1KCuTu6j4iIPjMl8yGl2Wbd4/Dsz/6Ft37ohx/8l3/9n373O+8vD7LnQMgOo/kQsHFDpAJuoYmqbiYpRUcc+tGQ9veXm9vVattf2pW7nh4/6FeLzXrFhDmPy8XcfX119uL09NRVQjCATJRuNrfzxUJWVooCB9USMRpODaoqokPExBwaWt1uj0+WT58/b7tw/Km963EduMFdmSgizEwIqopA6JBNojMACQE6RKlpnBQCqBvqNPlAMtzd4wZaT9g1XRvIgYFx1vc9OLuwRF0eH//9L//jLd62bTteP3342mdWG7ilcbM6dyNx6yU71cLIVLVroxuoIscwqKqbKqUUiZSZy5CpCCm5e3Zn8xiIHKlY8QII5uDAZmxQCtoQR4BJ2H3HbsFaS/q2U/7Ui2Nwenb8nU072DxnusTrk+T+ZL9v+nU75G7bPbh6OOshhlmyppENGwA2WNhBgXhrEHmz3nuKTR9GQQKrbYmaYVujlgPEOqxC2U08a2+I7/aAKTQVJn4bAxAa7cJU1cnAjTiCA0MFL00kYCQOCoFQybDCUQkdCMGIy2Q4Bt7hbqqOeKdSqP9q0+IOkWuOtUP1BtZcdQjODpM7yT1VDxCCC017FRGZAiKrFgCoUApDQPA75RAAOAQzMtfvh+0BILApmdgIYBZi22S5NAQBIYuKYIxg4FaxvxO0VL3KWzEARARCFxcVc0OvSkcnUwdmdwxckAwUnLD46KpgyMBqniLJdbl+//alg3sXF8/bdDiLrF4cQVWBsKias4mDoROqehtCjMiIbRNcNMagMAI1j58+uXe0f9BGGHLkbtPr/PCAmB8/fjz0PYHP57N22d1s14t5CCGY5xAmkIuqgukyxb4USyv0ljEFaM4+Kj/64z/9V/7Kn2rnw9XZUwaadclsYGamaOgcjAFZ5gbDaHz/paPf+Mf/5Dd+2b70w395cbA8Ojwc1qsPP/z2d9/7RjPfj/i5n/+b/xR1WHaLg+UrIcbIh9vhtg3xbiAxnauwhi8rEDEyIop4itHMCI1Dm9eD3JTxunzw7jotjlZrOdrb+/yPvPTOO8+GbZSBiG5iA4eHr7z/7f5//9d+7U//hQ/+5J/5I8NwtF3HbGNDEplUlCmhNarrFJciZLRqwkxMdo323VAe0R0zc3Z/4+3PfuHHfvzJR+v/75d/cbV5/uqbDxyPn3y8OT5w5v5yM9zI2cHBy6L77374/scf//0vfOa11+6/zWnZ58JMYG1EIhZVdZ+Zg+MAnFNIovWW5efPnp3ea/79//Dn/u7P//4v/PyvNjObdd0w3rad9/2WsHWL7kMRIwrVFyYiTRtVdexXs9kCgIYy3N5capGj40cN7G03q4bdZTxadreAZ88uT+6dug+ITmYhhHW/XXSzla0Hk9A0ZSzuEIgR1d1CVRObB6TFYra+ubq/PL74aNPtdd1xHMuAEM08xmjiVQ1BiEw8jiMiC6I7kAFADcRCF6j3MRE6Vnoemqg7KnoxbYNzqD5oN3ABDUTM3LSwHUwVCunVmMOCy7p3uf4X/sWfuD2f/b/+u79lqRHVzI5NVMoOPGQnjgcHBx+fXXQpNk13dbNWdQBoWlYt7i0iI1F1OyLCLk0FCGKwPDFCgBjYQZRxu1R4DqQIbA6OFQuNbihBGkWfj3h0e3y5PwsFbmZ5HW8fXuwh5U27RtlGk6gzGSSnNWJRREUXdAQFyIYBsItWPPj5Mqe0DcuuRayRKLvM0rv5auUKUBUIuEHFMu8sS76bLSEiojqjK4FPcFSjCgdGN9IJGOloNv08u6iTA1X9iVd8q7oxpru5qLvDnfIMGoBJvV6ni7Vya1DcADkgsFVXEpiZBQgOAqRVu4JOgApgTV0KauEfK+eWEZGQ70a+u02ringaD2qo3++9ugE6GnPTeEDG1FSrXuTiIOBG6MBiiuZB0RED4EQLDwz1wjNXVSF1jeaZKZoFRDTMBOAWNBS3kSkgWSkaI4OhiSNiC+G9bz85bJdlzIFnzMWdHdxNVI2QpZgpihoiF5e2TUSArjG2fd8fLPcAYHQ8e3FxcrA4OoiepZTYCy0PToj46bMnt7e3TNDNZ13XXFydL5bLEJwZy6ipaUSEOaqWEIJsqJ3FXseYvF9Jf5X+yl/5V//En3nr+uKj2xuZdZ2DqOQmzksZjNbuiKYIRhhd9fTg+N1vfePLf/fr/8v/1X+cDja349q1ZeOjBwde2tuLoSzascx/+9dv/uSf+Ewu2zTbGy1DCqpSD1kEaIiEjIhmHogAoFqgs4wUQtM0uQyXN18Jdtnt4dl3++2QXvvcYSlnvuU33ny4f9j++q+8H+O+FpK+qF2f7B9ZWP7t/+r57/3af/vn/uLbb//4j643Tb8ScWOIxEE5p/YojxAadWlAPFBFRu/OpkQViVdrBkDc9E8/87kHMf2J//rn/15e2/GjQ5VNr14kjAKzJL56h5r5/vFnNpv01W/fnj97/7Nv7u8fvrruVUlApBi6xRgRuXfsvagqAxBRkmINN+PKh+3jP/+v/NhnP/fGf/af/cLl9UfzRRpG4Ni6ClFwj2S7Uy8ZoAUKjGimUmzvcGE3ebvJpr3B0+Oj+xSPt9fPEqcybJaLTmB4fvni3vFRCnPD0bREwFLG+Xxu/eDuiuSiHlHcyN2q+KC2+wNojuM4Hi723v36k7d+8kHbWRFljiJi5tVaqKqOysxoWMAdnKDK2tDA3AyrYI6qLA0JEIjMKlXVEAkoEDm4iVkAEMwptGNeR26Y5464OJy/uHp2ME/7h/s/80d/9Ff+7gctz877MwpkoKPrYm/RP3vWLhJxKyLuipwHKUPJhuCGHGBS4yAKBTEAQ3ZkBwMsaFQ7nwCIAmAIwqCG1HdgXKg0BgAo5KGiohxt5P2ApeFVtNm91XHqPex7VDIEA1xs92dDEI4a5Xz/Smz18s3C0PtmzGFopWMURkBTF1gnu+nsMHtoUuUqAznVuheAHIEIPmHln64HcKyL7Peb7ndTaXZ0rDafyYs0gfkJudZYtdaeuBwWgWrUPNrdwQAdg7RTmTvdGJP2sI6nzBUAd68JACCJAxFSBQ1D1c76NFklYAcn9ypUQwdD4FpS1U0CzYFrV8p8N9S1O31G9S1RwPrBOSIBOvv/j63/jLIsu84DwW3OOffe58JlpKnM8g5AFQpAFQASIAwBESJBECIJCRCtRFGGkmakkWkjiVotqldrZnrUvTTTmmGvaWl1j7y4mqToQZAERRYgCiAKtgqFAlBVmZU2IjLci2euOWfvPT/OfZEJtd6KlSby5Y337rt3n32+/RkTQyqI5tp4ch7NiDW20CZmVtCoknmN+TRliwQDQiZUhN6KvZ80GziF7I2c57gAAJ05B+gJtDMHBaNL1gnAuCqvv3CzgiEiLJbN5uZWTPNowoQiQkRdm0yp62KO8kI05yiwY4IuNlVVJFMQOJw2k2ptczQwaaLhIkk1HoOLe7duT0+OAcCVhR+Ew8P99fE4OJ8sSlTPIUah1aIupuiUDYfe7+4utzYf+Jv/4Ifuvb/bufFCCOtl6VWXAOJ86FJDTOx8086qMDZQ4xqbamNo//gXPv74E99zJIfdjYPKFWDWSn14dHLpkXPu0W55As998cp3f+hHKdTT+V6cz4flZmHrwKaryRQDr3Z4BuYBE7GmtCirIkUB4EG19rUvT2fz4+XS3z6gcjSoisntmwcXtkfLnW9iKu69d/3q9UUUMvHOQV3XoZQz58KyHv9vP/vVBx9+/nu//80PP/q6LlZ1Y0kWwO3xcndQbizrwaBYF9wjHQIY9RkMkDlnpkapZk+aCobxwe7egw9ufeQH3/drv/5JvCd2CUEL1SVqB9qcObt5PDt/cHLf+lql5Hfqo70vvfL6h68//uAT89kZ9JmOJgrLGBPhwJEDNTFFC94lAAWuUtKbN67c/9DGf/ff/fj/9r988rnPfXlzm0yTGJBERC8EOSMwX/8GoqaDqli2ulgsRsOJpiZ2sV4s923nzNb5wWStnc3QV6IynoRu2h0cH21vniMvpVlMksBS6sZVeTydUXCcPAGCIFDv2QAGgJjarqgG88UCpJuUk6tfvfW6t99fF03qIGkyIO3LDYsqs0sglG3dycyA1cw0sq28Zk1NbaWvdM6llMeTjGaWQBGYiQjNKAo2TQw4llYH64O47Fjk/Mbw0vbFwuneretoIMZtbLiEnb3l0a22KCux+mQxje1x4bnr0pHOOxVmUAWD1nnK9JCYQBXAnEonJi4gAqkmIC+ZfmhGkJxao1oXbR3qQTPsLxBDQ2CFZOitUXMnpTjASV1WSWJHQhtFLFCL+3fOhe4kSNGVbednoV6qJkQ8GS2bYu7lDEBInFycKw8YaSg0jMH5ImBGfcwAwCHmIsdEK5YLrXAFA0NcDTZz49z3AajeG/RMSkbLmIYAoVkFamiABIZJ76R63NGRnxIQAQGKOhff/KNNKSdbsua2iDNHsHcIAILAiNyPAZQAgNEQOQoCEKIDYlBbEZRdtAhAPjs/Y05Jp9xbnL4MXlXYvOnL7BozspzbAPmi04Jh1kZmDgXUSzEzZaRWyIAVk6kAdaAGpIxZntSPq5E1q8OyF5IEFTNLSAbmyKg3zc+bEmBGiE1tIMNyeHz9oDu2tZFfNPOiqlJsEBy4pIkUUEVEVJKZoZmSo6LwhWM0c8xFqAyB2e/e3iOC7c11SLFL2IoPwxEg7R/sncyWROQLNxwP5ifTycZa4VhT8hxijOhYVTC7/RASUVGGmNrrV6ff9vb3/IW/+MFk1/dvyXC0GWNUAeag1ih25JnAqbhqsGaJjOtW3YVzxW98/Jd3bqYn3rYRKmraIN6izAajtdetPX395S+e7Nx0EAs788lPfOLcRXjg3vtStNlyanD7zPnHpNcS5UKaP24UaxEw73hUQBWixO31dYSt6cn05s5sPNkej2Frg+sZvXZ1F9OFg/2b2+fXbu2fNEvsuoWDygHUqQGtiiGPR5tff235H3/6U69/w5c/+L1vefCRRyfjwWwaP/1b/+b93/Xt95x9+vb+zBdDUVnZpNDpKB4AyI8EozrwWKU23Lh9eP7+ydu/403Pf+MbMdpxO9saozWlFWS+nQxk77A4lmh26MsFUPvsc9/YvX377U8/Le29poIESBZCaVKikWHnycfYILIYEM8RcVSuHe/NQ6V/6a991+/+5gP//J99YjjRYtghVtKiZKiX+qlA3o+2besCaYNtY2vr4+n0qOsSQLvXXT5zz/2j9a2TwwPADrSdjMJ8Fvd298+cGTry5jBJRKS2Xg4H5Vw6MtCEgMrYO55mhITJCbTVuGoXsSx8WvjrLx1cesPWEpNnp0gxipkyezPrkrL1YaMECAbJFMAcoWbLX+uTIQyzio00Z2MjglF2awEgM0VC8h0CM3AnM5Whs/jMk/e9/oGzD9+3vX/r+PqtK4JyMlvASEe+qJdGwJAigBp1585feO3aflFWXddl83E1YKfYt6oYgCGT+jypAKCRqjeK1pmSEogRADtQMpAQF9ViY7kB5jNSktMDWUkpuogCJSO9NpqlVtdml8apcErLIIvyWKCliEohsa+H3cZsWrV0UjhBZQMjrwSCgmyDdnTf7r1+Ka5g3/Njeq+MDMEgUw8d0grrACAwNAuw+gfETAPTXLIzkwR7IrwyAqIlyyMWAFAjBgAyQiPwjNqbkSmsQA+jTgtCMlQzISSj3GIggxE5M+FsBIqQ04sS+pVvuJHjrAInoogRcuQuosGpUCsMmUUs55UCGjEwc9bcnu5U+oYwx+ygERj2wYL950Hg2LT03pp5CMEHOlFB1QQmoKtzwykLsxCcYmuoCqxAgMqo5sxMDHgVO4aIqrLSqytpAjQzQiZFBYuVL3Sejl492lq/UDcnYFSGkKTxHFLsRJyqiSgaaBLng4BxIO8J1UjNIasZO7d/cGjg7tkozBbofdew84Pgw8HB/mJWJ9PhoBpV5XI+H1dV8Jw16CqR0ADUe4yxA2JCZu/nx/FkVv3oj//I937//ce7r2D0VWWCzvmOMUjsvC/ZUdNGH5hQomh27J4Ug2s3vvZ7z351vPHAlVeff+Dx7fNn14/2b57fPHP58tc+85nfnh40RzuDD3/kY3/ur51/7fK1G9deIgPAZudgvn3+Das9ZU//zxcYIlEwSRoVHJWSxDk2Sl1cXLjv0suXb4LW587g1hnX1Pvztrl9fPv1jz905uGHvvTCq1tn1h0tpPHTgy61KQzXUO1ot5tsbDzy+gfe+LEfLGiP3e7+/uXPfOYrGvX2tcn/8DPPfsd37v/gRz92++CAwyg3RQB9RljO6FNhMSaOEQ58CKxBGxqW1R95+m3H9fTlm7eu777mufEBGFnbL53fOlQ+u+gakGXbTQ0uvvCNvSRfefpNRQgXzKo2HvnQKqiloJpQyXufUmInqsm5SpIMK9+h7uzcfNcHLl16+If+n//DL5wc0/oZjdSUrmy6lplPLfw4P0yRudPUtnFjY206ncUYLcHh/s72mYtUVt1SqmJA0q6N/NHxYlm3g6KkEErjpmnYs5BV7LvYqiHk/kaEON9Q6pnVQGIqy7CYnwzK8eGNg/GgGtwTBEBUGckAJYqaARMrRTIFDAKglrC3fxK1fF8ycPa6sMzAM+sZfah5+gagAuZNxJaddN63PkDbzR9+cP2p199z6fxGVZ757U98erK9tvvVr3MZIrfdsu1CPR6O91p0VKTY1XUbfGmoQJw0J1WALwgAxJTYeyUyIQBHnERNzBQIuRUlyMRu1DzvN1YX59VU6LyLBSEBgCAEAxbGPkaKl2W3dPUGQ3XMjSNhaQJcPX9r+xjD4TmUsmw3dzd3NwZ7o2bjsAgsVqRGnFL0pGVHjaRidFJBR86BQBZBImbvUjQ0NMKqn1iiZagbABhZKPXovFru3oGNAJx6AQGn2aE56w8Q2XkCUcmWW6RgRIJo1JEQIQIjKFmfV6ZJAiMzmhFo3uaSATJxdFF7Tqj1pAQjAKBo+WMWNGaSbFBGNCIBY0QPoDkdDYwAmIyTKqED1mzcw+SjCLhklhmQsLJCM0Mw77LPMBOu6HYAbK2ksgiz2PhiHdScoTSdLJvkhiYmqQ+iICQEQNEmKxstKy9ZAFXUIFvbJ0QgciBGpGaCFtgQgQE4CXewLL0PUHz9pcvni3va7qiNtjZYQ1DvixRr65KRExFElKjOBUB0jMikmgofSAwAvPcni+Vi2Zw/e7HESM4dLhvvx87B7PBms1iokKt8WZbLxaIsfOFd23YuMACgGLGBdD1JgVkNjw6nhOf+i7/9g088sX14cxfMs1fmwrRRBYGmKqrYSQItCzJNlsj5aIKTavvG9U9fu3781rd92ICvv3b50//7L6+dDa3NLlw8/9JXXj26Xcxqets7Xv+93//I5auvvOXt5++/WN587ZtLnR8fHL3ucda+b1+N3DGzZbSL5ojNNFrjghM178Ji3p2799Izz8irX/v6bO92PYXJRnnt5sHxkatgun3f4fbm+mLahhBaq6SoH3/qaXfyjcfecPHRxy+ZSVn5waD+1V/51LVrR8tmrV6mtnPDwfZ7vvcDTz19jkeFW26KdpAHaxlzQwAGIiK3JDPkAlJIST0OnIXl/FbcGF44s4HdiLtNIkq6aBtfNzbBvaPFTmuj1jMST8Zd4uG1G3Lrxq8+87Y3PfTg43E+VC3NauQlU4FJwJgYkDxYMKidbyF5Z8iMh7d3Hrj//N/7+3/uZ/9f/+769etnzg+bpnMcMjJD6JJ2Zuacs7YdVuy8zuu0bOrhuFzO23ahMpua8pmzFxkctDMyZLStzeGt3RPYoGFVQKvM3GhKaoGYvBMREFAFSYpAub6LdUwlmJjG4AdJuq3J+t4r03u2JuxDykYkaozkPLcpKhOJoqEgIqtPKKaJerm79VS9PAU0RGRPRmYAklUuDoHYwDxx7DgJY8AkPrXy4MPnz25undseP/vpb966dXwSOz8sjqbzjtKgCMwtpMIEY8uEQ1NS1LZbtB0JADs0Q2bqs3pAohmiOTDpoiSkgECcDBmCQzbsIopgFEAxSkCNawSUDVfxbpCh044ipDQWagZc1UO0mDgerh9uzHxIZ9qgbFh2HQJdPFi/PRze2JpdWI5bJ01oG+4EySHkeIj54IQGB9g5BwXRKkKTrA9cBgDKFMNsiAicd1dmVgoBgBGCI0R3Z+DJvUWwmSkS8GrQKi2i97m0ARoKOCUSNM4oP4L21gOEQAGyMpPIiHrJJxmieikQBTJBwlx2zzYzKkrIVl0ZvWY1EkVzUGS8CIAMCoRT/j04QjQx7IMU1cQ7VPHQmyTcMV0wM2MlcCYZ93cZ72Akcq4qnIhU5RgUBGnpl0gT1KSGQETmKKmBdSkaQGhRCmdsnQgTSjJPrhVRjGjEgJoEkVVyKqehausjJkbtnPNV4W59eXdNJzJIi8N4Zmsje+ODgqlXdCklEzQBM44kYDYIpWkqAmuy4IORnTTt3sH8nnObPiyJiqNFh8XADYbTg8OT42VSDYNibX28XEyLCryHpG1wARTMxHcEBURKgM4Je7TbB0ebm2/66b/z4arEw1vXQ0HsQtM0wYGkDkgc+2QdV6igAuLYiwqajSeja9e+9NnPXZmMH906M3zk0fvpHW9ocPDKSy/94af/4JvPv7y+vr62Rc3eHkOYHS/T3E662XA0GG5sS9p6/NzrYjdwBQPlHb8iQF6XFSgYgQATSS9mNrTEqKWk4XD45FveOL99/cqVV+vWL5uRxvK5L+zeszO4dC+/7oGt51+4NV8un3zd2vd8aOP5Tw0//anPXd/Zm2zyK69c3rneMW933YU2Lc5snvFe23p++dXPOj+aHm5sb29MJuthMqZQgjkWj2bLdnmyaAsqmMuua70jAp+kcWXheG33xheqC68brdlD5tQS0Nqrl3fvuffSg/eUe1+//NwXd5a83iWlsyVPQE6kWFz4/V95fu8tr7zvvd9bn4RWxZgUPBQiMXmkpEosQE5TUCAPTaeDQTG+fXRzNB799N//8f/P//QLX/nKN7bPoBiJJURiQ8deAFWMiqJuY2A39tQmkbodDoeiixTd/PioJNrcPDO3KnXoMGLXnD072dndx61zRVGwxtDFABS7VFblYl5z8DGpEjlkAEhd8o4NBAkICb22XRKDYuj2nj84/6b7E6ZAyZJ25EE7p6CWsIfQVVRXsDFIbgNXvh5AJIYqKoqKTIJBHXtURImpKkEtkTLbEmjd1AfT45Pmvnesv/iVva9evnLj1pESSxc7aSfIRNR07XoFw27Y4lTb5drm5rSuGRwX1EoCZLDoGBuCNcWUpGRfNLSwOSI5RFYfISanDl1MwMYWJYSQKJJJMqgH1Po6dBNvsiArEoKttzYrY7EIXZeARLbUl52fVnQysfHcEOJ4Nul00XAiC2xJ3PrcHY7qvUHivfXD9frcsB6KKyKCjzBbmx5u3rx445zzrup3uNZ3rJmky8iITCvEps+5VqFilYm6epgZAdztnLRig+W2vsoHt94mPdsDGfcmwwigBr3frwGtEnDIzFxfYUXBmDL+9i0W84B6SroHIO4rvmhOeMFTJk8ep5qZMRGa5XBGydLm3nzq1GVBAWBF6MwrEiCQ3oWlMlqnxIE70aIq8wuSprMkHCYptmggWqsBuqARmX2yeDpIyBv3HGBKyKe2sf1IECCTUEPnCSPAsHBtu9s2R93WZHB0PB2NRnkgxphpWCAiMSkidjGyd+Q4BK8SC+dSE6uqUlQAd3z79vntjcJ7BFgm9d6jK2bH0/l8bgihCsPhcDmdlpUvC1aJaGQgaMZEMah33roSGKgqr147essb3/83/tb3Hd9+ZVlTEQaqSUSKwifpgi+IVgRPNVBzPqQuFt4TltPpa7//ya8+/sgH53NBldlsWlZcjNu3v+P1916Y/NovfaKNSuRMoGlOkNkXBTpWgK3tbSLOWH/U1M/h4c4FSQDEedqWFW2ASKqG6DrVtY1wcFiH7fUL1Vlp4etfuzE/rqHYurwzO+yOgs3aadxe27z92tH/72d/21zo0vjVr80xUJPWjHA+j5MhOz+qG3VVLEd4bU9e+OpN6V4ajfzasBoM3XB9tn12raq2traKRx9+7Pzm6w6WR0zmnBOJjj0Dz+fzhx9++IVf+/Ta+MWCJ+VgY7E8KYpw8f5t54rpnCcPX3rXubWXXjq6sdMtd4+30+bR9DC6sDG49PJzh0e7/+57vv+7nZ5vms5THVPhuQRNZGpaIQTRpfdtTCMu2pSWAxrZAhb+lb/xX//gP/sXv/W7v/H5c2fJugTcJhKEkjBBFwVCH1JKVJa+7WJXL8eT4Xy27Np6tpgr8+bm5nQaU0rsg6Z4ZnNrsZgXfoyICB4AmKHrutFotFjUqsm5IJJMzXsvoCrqnEsiAOaCzxvNNtLeK/v3PnF21nRUsBeJiRWSy6S0nFVskLOVc01R1cyHNsxe4ojUUz5UVQBRc3kBBI6CxCgKiKiq3vF0vjia189/7SUOsOym5y49dHzlWkxkhuSsi0Yjl9olbTh1cHQ0jSmZZ1PhTLbNJHpTEFWwZUqt2IBNEhiX0RZE4AElNiFgm4w9mCmxUzAlvxg1Lc8mbhsUB+qSdaIHRYkxDYaSEre+5c29dTFqR22HR43jMo2HzWDcFpWOQaHROJnBYn00HR4rTrSYtf54uDwLah7FKJjnZATIblCtwWrEiXcTso17UmD2QDgVLq2iVlcR0qc1nlcQKNwxxAEF4lO2DfWT7t4ieEUmUTjlsCudTlMBelZiX8czNwIVgXuiDKiZILICmd2p8WIGqLT6hqL6vAL1nJyc3gMAkFaD4ryVxjtv5rRW9Jz6XO7zspGlUqA0GpVtE6vNoaESQdd1IErKYJjFoQCW+VIAIGJJTBWIMqcIAICZ8dTaN7Ok4dSEh4OpWgUcB1pcfuHq5mRY1w0RDcqq7WoAUNOuS2CoqsBOVF1VdClWzqumQIQGZaiY0cDt3DoYlNXQO0YA9RFi8GW9bA/291WVGdfW1pqmKQI7Rk2ipp78KjUFOksQgYiZq+s3Dt/5zvf/5b/8PdP9lwEASRETISA6ACA2RSEMZh2YsDFTAUCFIwA5szH4V//2c+PRQ6JQjJaTzTExOgdt29bT2/fce/7h11361LOf3aZLZP4Njz3Rxg6IFLCNnfceEVUtinBWoAPlT2S1NAKAGJopENHpms6IgK6qqq0tZ2bbG2vXr94Mrnjzm56851FeNNPDg5OD3aUSH57MWgFfVD7Q1tpkb/ewnlrpNgobDifh5u7VEHR94tJMy5Hcd3Y4q8p6uWZQNtq183R8PHjt1S7q4XR6cPHCF//Wf/kXynLStDGnancSg6M2tVtnth688P5Xv/579z50HLkdDs7FWBIOnbMuVcs6eExPvWH0hjfOYntwcGO23KqOphLb2fbm1tHO4pd+/te//098YG34wOJY0XeoJQB7zwocpfXsVCrgBAykhEqAIFLv7Lz0oz/83o3x8Ff+7WfXt8x4aTAEUOcQ1SmxmSETIUVNwbsuxdQsRqPxsXSdpHhyVA0G48lkdnIsgqWnZNIynJycrK+vE3kiUBGHGFNbVcV8LpmUTMSqKnmGp5KtNB07EU1mZdhY3t6v9wpe8x0IGZRcdIikCS1b6fV7aehJMsDZWbofCGeLfcvGUIqAqEp9m6QKnp1KyokZKbWDwcQPJ5/8/ecW0ty6fZCAXn7lSh27alQJxC4hByaAQMW0aztjUay7tvJlPqTGlNDcAL1nRo+oHlQtqQIF6dKcFBQGnbTI5ayLxBDYSVRIxt6BpLpwXUh6kpK5EoIxIiSIZDTvhEzWl2snArfWT9ZVeenM2IfGr9U2ii1ijSAh+Yf3LhCsH42/CNguybd+YeHEdMymoqhOQdmBd6PhYFXKbEU8yFU7lza7q5r3Au9++NlX9hU73lYsGvqWEpllhKvFlld9MWIfVkanxT3TMU9dHmVV5vphQMpVku4c2ATyfKB37KOMY61KMJvZaUp9X+cxM3AhF4XTCwdAT1/1yimKVisQgOgqAF4VwWUUIPLauGrbdq0sRVtgEhFAFm2IUaKxBUXoohBR1NbQ5bWlV94CGSgiARiZYc8p7bcjiKisyRTJDwfw6ud2h8Wos65t3fraSEQy1JhiDoXuR8FJkve+LIOqloVnAETLlX338IAcbp1ZJ40AlAz8aNTU3Xw+tyTsaX1zrW1rBihKrykCEaETyanWoAqeGBEdu1s3bn3vH/3In/nz37F36wWWylzpnEqaO+/BWBSDD6lrLcfTrfRiYJQgTYbV7z37Gzev6YWLfrq8GuXooUfeJKIqUhRlTHB82Gysb62vr8/n9etef/+le9e6ps2kvaRkZiklRPTOmSqgKfamFr1RVAa7NadXE6Jhpr4ikImQ29rsbr32h6nFt7zlmQuXtj/77Gd2b/irrx4spxIGY1eVEbRygBjRmuli/sYn33r/PY8/cP/4zFYI1ebzL77y8Y9/+ZWvf2MwQLbx7PhkbWNtekJ13J0MJtoRUiKsSl+uX7gUm+nhgbv34TKmCKDOua7VqGZmR0dH73zPm278672bV1947Ak3XywH5ThA1Ogszay0Wjrvh9B654tLjylis38LvvTcwobF2tZgdkS//nP//oM/+M7R5qOzaXBlNIMugeGSKJgMEKXy2jTILiQUM1AZDx1Pb9/4yA+87ezG1v/3n/z8cLDmODnHdb0gGhBASqnwQUxExHsaDorFvO5is76+fnBwwAi7u7vnz58vq+FiPi+I27oZDavZfHl8dLK9fW65mFFmmMfIHkejwfHxyaAoJSYUFjXnXCcpsAMAVXXOQUpmNiyqWy8f3PvMfRFrIEq6RKyI8pxPM+oGq6gm6CWOvaMJQ7+JN1MBUzaCFe3aUFXNQQboiJwlCWWxf7i4cetmMfAny7ZutDMfBoNZswyFM8NsozAOxX7dNZ16V7ADBQvOedGURBGMgFFMwEidoZADWk/aqFCwJRVYizFHS5CT0r1TQrFkZNIYTsvpPTgHmkRYioFjlg4Qc6C07m1MF+7GoHXKEwAok28D7K516biliB5YCYdtcX5WzIdjtqZzYhAZnCIDdGhgJK6FQr0bDcKqWKue9qqYSfe5ttppGwtGhH3KDGQf9xWCAZBWhZLursJGAgCZMomrT2WFb6zkUiuqOxoBxjwiyyvz6piAPWx+ipwrWG+wppkTCT12ZAiKaunuMB1dXQGnvAroYRyAPFVfdfo9R2hlBkUGYmY5/C9ZwhyPCeobXBuWGrX0pUhCxM4koRFI3+5jfteIQABOwZJlpg0QIBKhmiGSKRBQ5qKaWU8CArAOaRCK5cmtdrHXnb+nOp5pURQAkE0uJZmoAhIytU3jiTEBsnn2gOqJzIyZuAy3b09jZ2fPTERa56qk6IrQRjk5mS8XixDCcFKl2EqM1WCIqOSYiAh6C2UjU1UvJQZ35eqNH/7Yn/ron3zzzrVvADguwAglgeOxSkKK3rMIMIcEERlIe7KEgKmmajD5vd97fnPjqfsevPD8C88NByWCBVYUH1NrprHDJ974yJe/+Nw02ZNvvL8IipaDlaHwPlM7KF+S+RrrpQmsOXYM0JQBIIdW9xcbmaigxOPF9S988Wuzm/XXvnKj2rp69frRwxfffrSY7++FjYkfjMCV7WIZ58dJO0hYTPdv/5kfGn/Hu0cvv1yfzE6gfeXNT51781ve99KLD33q91/96pdvHU3TA4/Z3/2Zn9i9qjtHUwVpE7gQR2W1sTHeOsuzxXI+nyOWaqZqzjkkI/KitmhufPCPveuXfnHx0pev3//I9Hg6q8LIOe9x0mry4aKa4xCXHWlzEwXOnm8uPeg+/eyt7e2twsfZweBf/NPf+e4P1w8/8vT+/oJ9SRwAWLQx6ggHqU1F8EnFE4sRGndJEd2Na1fe/u6zxfCH/+nPfoLZTA+c82bOLHrvk0YCdI5EIjOPR+XRvFPCzfWNg4MDLujWzZsXL14cDMfdbFoUpUg3GY/3D6cnJyeDqugaMYnB+S7GoqBhWegqH4IyedoMXVblGyI64jbOQih0WexdPjzz8MZcF4bqoBDIieWIYGqY7wxDIDBVMSBiMjBJAnkHfJc3yaqLMgA0EUJLKZmoI2Z0dQsHh9odzNEZojVtdzKLYTg0axSSJTMyZx2DT9Km2AzLohUxIBN1jhUVSZREWqMCG4mteGtbLhqj4HSQJAUu0TrvjESIKRkncwAWOHh2+8M2sQQtI3ScCkWHgAr1wKj1Xc31HNKsXA6Rz55MitbPh3I4aQe12MIrOoCklgytc4mEAVoUD2kCVJtVyWnLTdE6NucGlUfs4zhWXEDOLmCn3e4pMx0A0KRHb/qIj764o4VVI7wC03v7vPYOLHNqxbWyBMmNVi62mY+IePdxcjMrZobO7kSk3oF9+qX8tHPPaSNKdrf3Wt8T59+0L+7UM+77d3HKp7NvZUNqHxWCACAQkHoTwsQ6HhcoOBoMTRMiptQpCHOR2pYcxNgkcMwcExBVIrO8ucmVnVdHBlQ0WLEtxcAAs1kaOUou4isvHN6zvX0yXTgMa4OijksiSikvpSiSMumIiEJwzjNoqooixuicK8rBvGlOFstzZ7a8QzRNahxKIzvZP1os6qSyNhgTaNssJ5MJaN8axyhElHOsSNFxiTC8/NrlH/mxn/zIn3j6xmtf91wSOQgEKSEpcwABM0Ygs5a9aspM2TxYB41pPBldfvW1p9/87um8ffHFl6bT2Xve/UzqlkVJhl47YALn5MrL31wupm94/TMqOB6eadNiBZr16UKmmlJyzmU1W+7vKAOGKxkaEaeU+g4E0EDOnn/wN//587/8y1cffOj1jz/2zFNvfnyyjg89umnJnv3UV3//d1946YWdYWXDgShQSgHRNibnfuZv/dt3vPu+j/7Yd9xz4d758b17t2YA7aMPn3394xf39xa//1vXPv6rf/DXn/+7f/vv/8B3vfvp3YNOwCN4S4uUjpa3i1X+ABJxvkjM1EwBOMY4GMw/+rE/9qu/8B8/97tXL9y72Dhz29RVw9vkq0JQhZNqoKrCYWe6e7vbvqeajGxxUpzQtCw2gnvg13/huQ999OixRz64s9dwSIQBYUAcASKEIklkUzBAgsiikJC5CGu3rh0885aza//FR//7//5fuZLLEmPTkifvXJ0Sub4F67rOez8aFPNZPRiMttY3ZtMTRNzf3z9/4QIOhrFbgggYrq+Nb+8fBr9dFBV0FlNyjtu2rQbVctGs0hFIRJxnEzVTZmeizBw81E0aDAaHNw6r0cBvsUChGtUwW/gb9apCy5RqPaXw9W0dQG8nlW9yA0BVRnBEROAdJUFVzXpmRJuMN3b36/HmYHawN6zKxaIlxyl1qsn7INospTFl7xyaA0wA4BELcnOp2QEoEoH3aA16QhEamj218cab+4vb/vIy1tIFgiYVWDrwVC27Ogkg82S9WMxraU29Ww6kmCbCNOCi1aU6Q+MOA8dAOlxOUjc93ppunVuu+chO/SDKIEqhUck584rso1TN+my4KyCNp9bXBJ3GyWK4XLrFurKhd0Wo7C5LdADqjXhOu+q7EHYA0JXZKa4ePe6Jd+Cx1XhQAUC1Z9Se1lkzY8Acp5eB7lOOimneZN9pwwHAzOVFH/oIU+nDcQHBHFLKwti8IyczRVAQLrxZn4N69xsxJUNVMlRBA0YEI8W7TgAoYg7y4gzjMGB/nDyoNyHAjqWoCABG1RCTOnQaE+dYVlUgFMvAUW7e+5PpGU0MEVTVRMXAr+QFaH2Xi0gISMbjQfnN514bupGQdjWsb7hoXT5OSgmZRDWJkRkyJZWiKlQTM0lKRQiAaAa3dg42RiMPOnAVu7DsEgd/dHQUl41qGq+NXOHq2fTM+pqqKhkaETlGBLIs9XUuEPrXrl3+sR/+iR/842+6fuVrlR8zAbsU28RMIZBKTcyIhYoye7TOsUdDNAMkAHCOSOyFLz0v0Q3K6uSk/dD3fPd4KGAiGBUOEavUSeng5tXb2vn7771n48w4FNTOhaiPQsTMzkZkOt1OZsn5ap3P1kCAWU9mYD0SSO5wevyu97/t3d/1rrWNMfEyOL+c1Yf7t0jlPe++/53vev2LXz743d/8wjde+iYP2lA1A5w0Oj1376Of+8zyK1/4+W9/17l3vPuph1/3COL4eB8X8/3JZPLRn3jyQx978td++aWf/umff/93fvljP/yR6WFnRi50aENTBGoRWEQNjYhNjRGVCIwQ1mfH86K8+SM/8Z0vffX65557/iuf30N/MDqTCl5sri3Xz1Rc2Xw6paRHR9O4PHPt2swV4+GGzI7uub1zvHF2Gmj7l/7lC+//kL3jPX90d5c1RbPEWHZxaUUMVKQ2MkO2wXaOzUQSjMbVjet79z98/u/8zI/8g//2n6CFqtQYW7FIp1goEynFGH1ROk91XQ+Hw7Is62YRm8Xh0f7axpa2bfCDul4A2/rG+OjoYHvrbFEUIgKARNh1XVEUdV1DHiz1PIKscDZDEFPCACAxtmuDycHVo3tGFzpKSTp2QVek51MJDgAYCiEDoGgObiUFsL4FJABgJRTL7t+rThLFwBdB5jMg61JUo0W9NMWtjbNHyz1KjfOGaqaefLdMqSodLEkS+YLjIo6LMBwMDuraEFDMEbCnpMBJmrkNi+YDz7zh5muv/8QrP7d9YeDo0QrxyuH1eftaI1fOjR4+O3rd9Hi+d+vq5MzRsN2oti61JyizAOISOtEOidjCkrqN1m/Wo4ONQUPITUneGbMpr80mFE3NoyVFbD1MGn/u8CEJrcCBBweGBAOj2FapwWVlFRk7cj5vnI1WPreKACv+yh3soq96iVxvV209em6ZLAmnHbdmZVO+Spw6OrWj6bnegIiJEhiS4uo+zQ04KkXE/odqHy1nAAjkVmNIMkholGnsBj2nOJtpM6CiGrrc2Oamj4xklaSM7AxVSVCBDQgYkAWAcYXxUS9PzfiUM8foMnsWXe+JxogJW/SkqoUPJhENVCIRZQOvJAAWEFhUkUShTqkTEVQ1VUZDJHbOEZrmODdF5FXsECIil+7ooJntp/Nn1vcOTzYmYwaqbc7GkvozH2Nk8vlEYSAhJUemys4Fdgp4+/bt4IfjUVVAtC51icpBmM6ms9kCYqrKYjgeL2ZHa6MBGqQuchH6xFLM21r13ovA3s7OD/3JH/noR99y87VvrhcDgWiAjjziQpDzzgkxOYeqisqgJbuEQKKMhCnFtfHoxpVrh7ePp7er8Ya9731PjieaYsuMSSuhLkC2SZm/9NKXHn30MfIn9z/wxPH8lqfCekv9nO0lAODutAtGcIq/5U2YgoEJMrMYIKIAMFHbzYfrFVhny70YaY5CnpwWGOT27aPgjp56Zu1Nb/vAK998++/+5le+/sKVm4uDquKyPL54/1rTPPqpZ29++lO/cvH8mXe889Kb3/66+x64bxHbnf1WFf7kjzzxJ37wnf/ul37vX/6L3/7wh78jlJOuK5CAOLKZADsXorSYiRamaAiEItEHlRRu3nrl3kdGD77+vbd25i+9eP2FF6+2hwfzcPj55fWz62ei0OXri3c89fYLF2R6JNf2dnde2fuzf/q7zm1t/ZOf/XTYmG26h//dv3rplVenH/jwtw3LDe2GXduVPkRC04iOBQuxRC6SmEUHLnapDaO1nYODixerv/t3fur/8X//+WVzUgVWsxBC09XOORExBGaO0g0GVdPJYrGYDAdg0sRuPj8x589unVkcHDOzWgyFi7HYPzg6d3YzhNB2nZnlZIcQQtdEBWMiEQEwRJdUnHOxjY4HRWjbOoXKQY23XztZuzSIRCZmALziHhhCthtARJWsXWVEFFVDZGbBU50KkhEZmpmASGJg7FIUQHTctvVgPOyk8zx2fkFYizYiVlBgNtXOTNWRL3066dpGFCGKBe9ZQcyI2BEyEnA/3/Eb5de+efIfPvPJ73vPH/2PL65/5JmPfvGLryJ+84+86wd/43f+YHTPI+9884d2Xt7Zfsx/9criP77y8R/94Ifnx1tfOvzd5uaCILSqwRGoV6pLIOH6/O1yGe5ZFoexXBiMUM23/p6DkakTFwxmIUERHbNtzGEx3/CRt2dnh82a+ZMFkDgFbUoZkILzTECYhfXJFBCBFQgLcb0vCuaCqgBMiGzagyorpy1iRzkx1IwNsh7JTgcdnsnATPpN0SqBNSQHaODubpgBADiVp100cQYuzEyIHKitcuU9AEPOUaaSTFezloyzsCHlgQzlPj777PbzW7BsBcqnexQiQtBISNZj7blOZFjJ0AwcO3Q5mMcBEdoQy4DUdLQ5jEQKyWkbLQk78sQoQkhLlYSggMEIxDn0SRKTOPKqqhZFyQFXCBKDuhZcxLYsgmttMUkbX3rxq2uTjXrZeQDypqYhFRHaJIroulodDzrpVKMvHHtvIqULmjoypeAPj08WjZ7fZEJUGsxiGo6qOunhwTzVEQZhc320nB2VzgFyRDDPYgnJAbNYAjQyYR68duX2Rz72w3/8o2+6fuM1Hwp0BG1fx9kPTVtTJPaEqIqsRKBIClJG34gDSuoAEe1rr3zl1s7+2UuPvPc736G0nDe1cywxEXRFxBZ2h+XWC1/+xuy4nUzO3v/wQ12cehsKJudYRNlyAyDOOTXoLRC0F7Jk+QMAsPos/0qrzoBRQRJzJbUoGDovLgZiMFKMTij4EgAP92qD2cULo7/0V951+/bjn/mPh1/4/PPXru6czJbsj7cvbqhuHS3SL/78td/8jWuX7nff9o5Lb37LE2tb9x0fHLbx4Ac/9tTR4SPz+dx5JkygCOTEkiGqxHwGEiQA71yl0oIPzgewVKIzSW3a2xi1T78ZX//4U3/w6a985lO7P/pDP/7Qww/87M/+07/y13/i/scvdbcO3/19k1ev7L7wB7f/93/1mx/702/+u//wB/7ZP/nsrdeub5+/98orx7/0bz79hjdtve4Nry+2HpwtHDQLRnJkIgvPPiZAdMoAwIFFZD4O5f7+8b2PnP0//fUP/4//t19zMC0HElPrIYARAZDTJDJmihYtEJC7vTwZjSc0W3Z1Yp0uC+cnIc5GWDtZztbHw53mYLaMZRm8N4viiOq2LaqxdFGNVIGMHSEKqiXLvRVpUgCGppXRYG16Mh3Mg59wTJg0IhMioyBD9hc0QGbm2HWM4H1IqXFMzWJOWAUoPBCaJoalxPWiZCEKYilJs4xyjIgEvmItuQJuTctBAQXD9uRc3R20hmhaumFjC8NizBNndRu7UaBFrAXMM0cGFETr2CdFQA+zg+U733bhvW//0C8//7/c0mdP5s8wpp1p1c6XXOy96w3v+ve/97ufvv3L5zbaP/9d/+ClK+cXs6NvvHZQji9CdYVqDalzDjvo0Ap0ktBRgkl0x2NZuHp9PgbWzsLBWsLmxnZ9kWEjAbLHDmaDpnzw2iNOEvqYwqGi42iNP3Gus7hp0Tv2LpdFot47pYeGEVY8s34mtko67akj/UZ4NR0lvEOT+dZ/5dNqmX+jvoP+T7mVfS/m+5Em9Cm3uS8kRs60COjjS0gNCAgsAeJdW/N+cCpEkBlAdmfYksEWs560Q33LyUCrOJ38LKBTUEhBEKCfg/bPUTSAQgHNgSt8aHFJrFETISphNiBVEW+kyC1ZKxZtZR4GEA0AiQE9gndRhNC1hMxUUJFSkvW1cOtr+10NGxvV0fRobW1MhLGLpKaESC62gsipi44Q2AVyqsaAKXZVUTjnUpdOjo43tjaRc1C4DkejGOPh0dRAkHFjMk5d54nKwpuIpjwnIXOAljJWG8rRlSt7H/q+D/7oj7zn1q2vM+f8V3WFyxLHpJJTOrF3d+ntRwmB/dKDR20NXDUY7+99/bnPfobhvscePCPdDHE4GA5iqsFQcJEkug6b7vrvPfuFk7h26fWPj9c263krriuY27ZzHJCYCMAkdpEYBJgyhEVo0DulZGsKBLA+PswAAY0BEUDIgQmqmnNF6sR7DwZRl0w+UzCcL+eLer6oy7L60Acf/eAHnrxxY/Glz7/84ldfuXFzp+0WzvFg44yjePVq+8o3bv7Cz7385NOTd733yccfe2J6sg/ox+OBaocZjkIirAwRICGaJAw+iNaabjO3XT07vj33bgjmd2/v3Lp1azy6NBneR3CwtTb+y/+X9196cPGZZ3/ub/5Xf3yyOdzbOTCv9XTvwqXhgz+2/kc//Kb/8PvPii3/8l//3s9+9rXf/LXnlktAjJ/53YOvPPfrT7xt+83PvDMMHjg8mIYwFoEUjR0bKDtrlhoKD8QpQhEG169ffd2Tj//1//qD//B//NcQCsUOXSQsDVzU1rnBMjaIREDj4KCL2ixGk8H+/qEq7+8dnNk+5wtPIHHBXZe2xpODk2kIGwAYfNl1XXAuNnVVVXXdgiiC5XmDL1zqEnvuUmRPvvLdMqXUFT4c7h6cH5ytLSIAGUhM7F0yU5GCuOk6KgrHjEDLph4MBjt7u+9973t12l5/8XPF1v1oEQA8U7IEjhwCIMXUs7aIaDgceO+9pyTd2mQwGg+qtbV270BUPDH3wyzzbbSFRMPosPBuYzI+nC/bFDFZcN5734jGtisH+MqNk//pE//0oIhnaFDYoGtf+45n7tu7dfTClRfe857HquEFmz64O30J08bIu1k3v3Dvo5fti1i9VhxvdDDGBI46gYZjWTNVaihutAwcB04HrFJX7f7aAbvj84uLKIyWEsVExUixMEjUoQ0iY2GzxNq447JGTgkAHPMdk6yeMbKyRQfIIWbZLbIvdiyMiKc+wKdFmRFPvcBghcCsCjGsYsnuKvMrnMTw9M/ZmN2tnnjqF587MlbSuwQrgNkcXjNXkk9XCDQBwMTcb9NWS06O081h8LYqtf1SRJxNxzLvHvEOrx9XhR5xZXJpTAaI0mq9RMPCSxeJsY2dIOQ4D+mVDiuZFlqTmTZ5zoxgAETkwEwScUEIpAzCSeqyKOI8Xn1l9/zW+fnxvAyFY1RLBpLQcu525n4REZgUzqGBZ4ckjkHAylBcvXp9NKwGwTkPhuBccM4dHR4tljNGGo5KMoUUy0FwhCkJoWNyolGNTGqHxXAyvHx59s7v+M6f/LPv373+EjEUnsHUsnDEEhLlCQGAZtIhYj6BJAAm5EBKX03bdP4e98nf+WK3HL3tnc8s9HDerhdlzbFA6CQuZ8d1107PbZe/9duf3701+ot/9f/8zDObt65eZvIcWocT56q2bYkgpsTMRVF2XQcEiEgrCznJkaZ2ennZqa7OEEHJyBCQmVRVpCtK37ZzM2NmJMsK68zPE9Gu647aVqTdOFf8wI88/uH0xO5ufeXK4de+enlnZ2/v5hxQXWjr5fB3Pz777V//+GNPPPvn/tKPbayf7brYD1QUESmZMruUOmZGKEzkYP/V6fHLjPXR7fr4+BjRx8gp6bl7th57BCcbe6++fGXjTPG1F2fPPXd8dnt8a/fZK1dOXBirq7VrN9Yf0PUHzNa/8wPPHB7t1t3eW94+eeSx7/rkr3/tGy+8vHt47dzWQ3/4a/YHH//Vd3zXW9/01NNd1zJ7AUVCBOhSV46GqatVwTlnyqNq89aNq0+88eKf+6mP/M//+Bc3th15kQbIaUCOsSOmjFZKF0vHi7aLbbe2tracTi3y9Ohk6/wZxcSptK4rGYuSj46Ozm2fX9Zz51ySjphBknOUJCGSihBxNkZXNQOMKszMnDF63za62Fvy+VK7xEAGKiIKRkYmwMaxjhQI0UJwTVdP1tf/8LnnZJkeOb9+HBclU0kEagAWAYIRAnVdzHZSqe1CCMzoEQDToHIM2ERxHlDNooEn6JJVVLMAEUUbK6MQSIIUg0fpqdtqZiGUXd216HWYgre4WLgNe/H2V37/41/8yz/w9586+p5f/Q+/+H3v+BOXtn9sfaPYOzl54eDZ91z8qU///qt/+NpvvK2wLT0TGZnbrhMNwAJANUNVtJtrnQGE29vX16Zl4gmBXxYo3II1jFZGjuTMNEENDjBzo4WPJ8vZqD67v07o0Pu+uJ8W5Tvlt0djICfwMbJAdtO/M746BdMzE8YAAFhXnTibAjCuKrcRkvXPJIME3zLEzCtBbv5OS/7pqpOfQegAzXjlt5edfYkUCCHnZvch9ARqjGaZdgt4CriDELKhKhKZ3hEtUdYoGYAa9vrmftEiT9mfs4+jAkAgo8Jjk2IE9IPCOnNkIkLkSCVbTYpzqhZTRKVKSVUNWSGjiUaUzTPNqQfEGH2BgAhM1agovvL5lyseojAahNKZqYqIKRFZxEyoh36bZeRYJHrMlhdWFOX+4VFSOTPZBExEpQr4qpzNFk3TmUAxLEIVuqb2gTxTko4ImLH/+WBIg1DpK984eOqpt//1v/nH93a/RsjOecuen+xBMwC+Wrkw48mwktoiGTCticykxUFVHBzsfeLjL377Wz8yne74QswOVMrZSd0txVFkOhoNm9/85Jf2p+f/r//wpwbl1d/6xd9BPj9fHDUnx5vn+Omn315Vo6SpCCwJUkrOW9Q7156tJGy5awcAMMFevJovZvYe2jYyOxUMYdg10fHYe9/Gk65NzhUGQOjAIHDRdclViOKW826+2FGMZVU++Za1t377u6GF+Xy5WMj+QTc96ubzedMuRMTxEMxLSiEEIBWI2QFFBMgHAAfmFGQ82MZu2SyO3vymx77y5Rc3N4blsDs43NFol1965WS+W1Vbw0kxcDxfDObQXbdbTbMcj5vNdYxLfOXal3H44hueeu9svlkNJl2S3VvtaOw/8qNP37ryxsuv3HriqXvDwF+/NhsODxy6Ni3AQShcFxMiM5UiCZholXuGqax8cev63ru+496T2x/4uZ/75a3tQUy1Zy8SmSg7cQNjEvFFOWA/O6nX1iaxWnZLreeLxdQNJuPoBJTq5mQwqKbdfD6fV4OibZfMqAJJYnAB2WUuFhKl1HnvNSXngqolS845TbHrulE5me3Px5sBgZOKUY9uG0I0YB9EYr6DknSAjGbj8bjWRUwtGWEUF0oGKZQIiNBlAyJmj5hSSiH4ovCFwwhalN6FcmdnR3EWCnRcODAxNNQx0bJz5mEJijFuYa+a6xtP76IkVfYYYqQiETPIIPy/f+Uf6UiXE/sXz/5MUd232177F7//Pz+69ZbjvRuvHlwdnXMf/8y/nnW+vBBn+14Gh9xpjBiCFwGwopCuLuJicNSlE3XF4Xrt2tqlSbkcNGWp1nlABRO2QXILFMPW48hc4wRqKm6fuVLM48bROR8DijnOkwG8c6sgZpt5zeFAiNnBCghBevpB7oLz3QyICJkAk5++MvvOM428TGQWpJmRZZ+n3rjttN02AwLfD1pP0fNVKwgADjAbA2lPrVHK5urmCQkM9c5qZGi5l+xtKg2UwJAIkVVym69kOS2JMg2Ge+dhWo3mbeWJmvks0hN0sjpG1YWibrtgfuSLRQONxRiTV+5MzYyNDFEJhADESAAVwHEH6tC8QUaQFY1zlgggYETrBoPicOdkfhvuPbexv78/GJTO9S46YBTF0Dh/SsTUaSzLECGhIzNJokURoqTj6XRzc9PMgnMxWfBlVDucngAAM1dVFWNbBO8DKoiZAfkuJWZEJkhdqMLt3eWDD77up//Oj9++9QUHYy5JYnKOAEBEnCOHJCL5lstdIeYFFvOJR7EpF4O2XZzfGvzyL37+kUff9/Drn2zqM/X0q8c7R5tbZweDrXm8ldzim9+4frAbpstz7/qupy9f+8zlL7/20ANPvOU7nnz++T/kdR+xPd7fHUwOky2Hwy3EDZEqmSdWBDU1O6VEmNGKloTAgD1SlLdKsdXgyqQxFJzSYjApD/f30jxubGyMRqMYJXZChDG1PsDA8/7t1zY3LgzLceykGviYmuZo3sDcBwiBqbDz9693bQrhjGgM7I4Plm3bFkWRUvLeAWcAk4lDSkk0eW9oFooqWXJFXcstP1y4sS3bfV+ZuWIxT2vrZwaD0bXLh9ubG5MHhvNF+8YnH6u7adPAct4ONt1g/Mi8uXLtxmfuvfh9sXWlj6HwlMLJ7YONTb5438XZfB5jvPd+7JpJ28ayHKSUYifsvCkAKCoR+Cit96yYxBANRoPy1rVrH/kTT50czX/zE7+/fWHUttG7QhU0W46rInAm4JZVmM2OwriS1KU6Lo5PQghlWTYpqmOLaXNjbf/2cVkFZjaAHOpiZsRg0bz32bNMVc1Qk7jgLSb0FgpXL1sRcxjm+yfrZzcba1XFEyNwFDWPmsk2CGKaF3XvfLtYLmJLVDIiMHUeO7OIqWKuskLQiHqfQ3TOOc/5VTlfdEmOjk82t4PzFFWDKQA0mooQjhpF9Ko5Dgic48bECBMql05BskFV4SQmlwJKkY4W0yJCKP3MZPfk1eDDzODZvc9w6cMQNKXr0+spAYdiL7R7xe2z7VjBiUACcqZVy/PSptWUm2OAjegskU26sHayPq/2a7/wKpyCOogUDZnMOeEIzlBubpwcja49vDvZ7LaQ2Sk4I1wBMSvRB2Ym4t0dvOUttwM0ymg458QluONYmxGSXJMRQJUQesvIfliKarAyBAHSTGPNSwoCEEiGxU6Le//jM7fdnbIyscdbAHrOIpAR9t65oKgZi+W+1vcUdqRsU2akIA4JyXLqJhgbwinKRP0PITgNBrF8JLOc+WOAAN5Rc9KSL4MLdVTVrmuTWYgkAMwJTFUJmFkUUic58a5fOUBdhpQQiETVFUVWZSUSuvLS7tZke7mce8/9pkVABQAxRWVVTYaMAsbM2V6DHRmIDyH46urVq6PxoCg9Z7YPlcx+Nl/GGFFtbTLS2BlIURY5TcZWqgJEE0ll4Rfzjmzzp/+bP3t88jwkj16SgncEAMQISpoEOev7LXvyIRCDQfYgQgQAH3DZtJ4HXU2xrT74/d85r6/Ol3uHB9LcaC+Hr775re7ylZ3ZVN7/3p+4fXB9Wsvh7s0nv+Nt73rb+6bdN48Wr77+DU91zdfr+WJtsl7X7XKmltJ4LQAruhqkhNWn1TNyFSTTVa2v7whg1vtnMDkzY3RdG0ejtZvXd3Z3D4bDcSja1167tT7ZVNXxeBRjHAzKF174CuHWfHl4z8Ut4XZnf962cbmcP/DAfbcPT9jpcOSvXHn1ngv3HzcQ2JsqlWzQITGRSyln9rKJb2NkBz5YjDVhWZbj4+ns6Ojl8fE+IjYNzeYUYzveEJUmqW2t+aOj9sJ52T4vZ8Rdvv55czocbBVF28RF4ffTcd02g+L+aNKpUu0BxQQI0ObHB6YueG7qzlRDKJo6MZeOUXPoYs6vNw3BqaqqIAEzt92yCKMrV6/+0E+8e+/w4POff2njDDZdZBxAUIua6QeIFjUWk9CeNF1nw2Fx0jQiMp+erG+dcUUQGli9NLPReHB0dLS9vVXXNaHLWRCqKRRORXtnjmTMDGAxRiKXA+d8EZqmqapqdnwiI8EBE5qIBGIiimoO1JBUxHsPqIQY246JRtUAU3LGGKPzAEYFKndiAQBVNcUYVRNSIAbVNBgP64OjehlNtCgKoxRVHaMwgpl4UJdaA2liBdwZtJKg94W1pBHZExGTA4Y4PzKcWOGwjaPhmmCtsSMHFaFiJIQNIIEuESgPgBKYp4jNuN0bddvHxMxiDpWBmshWiFcbDroOAIpogzRGCOsLuOZod3I8uT0X2Coit6HxgAZBtCNz9VBunnmtw0NutxKhMiVo6A5ETgicvdax5/sjGqHmL+zHlpiDCwnyWBVXMg0lBIeImNk3qyf3x2QERiBGIgImc0ToEZyhM3SEBWEBGBAcAzKgQ8q/OiRH6KinXSMiA7qexnr6MDJlVEZlsGymTv0DEPN3+tWC0RizML33FctGVNbXCMYVfJ//P6IQKzEQEZNn8kjOiF3wrUYoCvauQA+SzFC554Omfk4AbEr5rlIgJTYlMENNYADggZ1zoNGkFRDvw/6NWTcH72C5bIqiCGXZNK0poHHXJkNIKtm32ky896bJMTlD52lYDU+mcxUYDQYI4p1jC4Uv2zbOZlNE8w6KQKJdUQQwMU2ESMAAlHsrJEvqjg6b/+bv/aR3h02TqmFlCI7MVFQ6M/Ge2SGgesacXkiADAzmUN2p9KxpbVhJVc6vXHnZ+fTs7/3K8d5yPLj3vd/z3X/mr/zoQ4+//ef/zRcfuu+9jz32ure9a/utb7/41rdd/GM/8N2X7h3euPHJKy999ubVK8vuaDpnpeG8BsBi+8y5zc0zRB6RbTUFyQ/K8/B+pt9rw1Zf1n9J3sypQaoG4fqNK2988xvL0r3w/E2iMy+/evjCizcvv3b46it7w+G527frd73/ra7Cl69cfe3a9OsvH5298IZP/cFXv/7K3vS42btpn//MbYtbRwcnw5FHSr5gteQLJxKZc5MhziMiem9g0YQYhqAjsk3p7r3y8saX/lDqxWbTqGiYz8P+jsZmqHG8c7MdTSo/8CdLOzhpujScn6DZceXKYGux7gJtxGV969ofDkrfgJGg525UkNQYyFUOOEGpIwCIUcqyVE0EYKaOEEDBIlE0bcHUOc/sui4WYeA8kI5297/55//SD9x76QKZTQbFqBRnLVnjMIEJExBRbLtBVXBSQK0mg0akTTKbTYvCOw7eV6mL1cAByHxRe1cRsWoWlAESiWUVCMDKiCO74OUbxzkiAhEpoVoczDiBI69mIsIIlBPtTLMrnBkmMUMwhFnXahFqEPO+adsQAqHLG7p+qILIzJlpwYzEyBQKXxSewSRv/NU6zn42oBuKVcudWiy5613pswP+qcDeLEnHDcmEEUGidZC6zrQTpqVCUA/KiYC8pggaC5QOMAEvAGoMcLh+1IYThADaVkkRuPUcnQinOtSRl2cPtiftmchN8l3HOh0ltdY4Jrf0SETJIwlqU7U3N2911f5kORmmCQCBUV1Fd1rc775bAACybKQHK3JU9B30BvscMVglWqCSoQHRinKMaKcGXatHLsMEp+2+rbLrT8EZy6Yr0Lsn6qk7o4Dm/G7MeVCn9Jv8ykExmwwRZudIMQLM+v6c9dUHexMRoKpldURef8io34L0Zd2o5/KjGQkZ91QQICMmNVUBxhgjCZCCMSUDQSLFQiCCRSBkQDVIagKErs0XY/ZkVVQCBnOAXTKiiFYaJoDiyjde2xxuLpfz4EtA7trkfCFJ8v/OEwvvKaUYQkAVj+SQg/fkLKW0v3dwZmvDMRIoiBJXALBcLpumcY42NtYWi0UxCOwIRVCNiQWRKKtXparCi9+c/q3/6scfeihcv3a1qtbq2DrSTJEty6Jt22TiHIGomTD1HxD0e57sfiYCVg1H0NSvXH71s5+9cmtv9sM/9MceefS+zk5SG5fzg/e//80PP/TI4eEJJ76xc7tuukHBi7Tz/B++sLj8wuMPPmDrY9NmfbQNoUQzT93lV79y332PAA0BgomZnV5dqx3WqS1Sj/dlThQiquVoX0Mx896fnMxe9/o3P/e554MfXLrv3s3NMzdu7rz+idfd2rm6tjVuU/3Qo/f96i/89sWLFymVqvGec+uXX/36e971XmYPbfTe7r94YTo/Onv+PDKJhy5p4YokDaPrui4EL9aqiZGyZSifEKmVqR/C9rmzRwfBYPzsv58vl8vxcOSZZvObjz1+r8ri5p6MB9WNGwej0eh4fjIZlpPq7M5LU5tEadPGVrl3fVGf6M0br5z76CGGEYtB55SCcwJgxoqgIkJQEkNMrfMA0HHW95I3rQBaxUjokxgzM/ukZGlWlQPRM4Wb/52//SP/5B//wrBqRA9r8aI0X8RFK22bzJCUSy5cwbN27gbDVLdtl4iaoijKctAsW+9919XjyfDoaFFuDYkcMxtgCKFuOmYnklsrkhRb0KooIYtIAUy18KFt46AcHs0OB+slMucEbUoycE5Is3Ul9uoHJaLO4kDQKxCgdHFYVt2ihooBsVBQE5OUY3lEhAhC6UVkuWykk8I7MsmcOjNJBo6ojY0bDtuD+QAdtrGLlpKqKjIhGCOZKBowYAeuMReXdRi4ZZFI6jJhZAWuGuxMVMWWCpWvwGLnEkmh0gKgRa5LbcNBuRwwuTacAA4G7SBhRHV7k3kZ8dzhpHbJdXFRVUU7WJa3b433H5gNhCtQjtqMO47B7a/tvLr1yiCmc9OLXtfBHAnXQd1pQecVh+OUxwIA2JtlAxKQIcKpjhLgbubKKqT81H8Lsd+f59qaZ5cGluF7NDAFQiS4u/6rEYAm5l6YczrlFRAGhhWjOZOVMhMGM5ZmZkgr85kMxsmdY5/yHI0UFRCZSLF3JCBTBEFzebNohL1kNcP+Vmb9W7+KqOTlAAIdHO9ROSDXtpS62dw3gg5q1JAcGLSAyaxgJ11UtK6JaRDYF5g6x5BcmLcSDUo2RNeYDEM4eGl/aGvkQ1wsxhOffUxFRMwyuwATtdIVRYGObRV7BagKsQhrO3v7XDpXOs/I6IXYCp7Od6ZNct4G1aAGCBUPrZI0dRgMQzRlBxDBAIpq+Mprex/5vj/2vvc+dfW1Lw/KCVJy2JkA8wAgxZTQMQKiIpJTREBmYeWo1DA6jFxy2WpbDQuoD7/8lReeffaVC+ee/Nt/6yexON7df80FxiiOi8PDwwv3jMzky1/a3RhPHn38Uqcy3Tm+fW3n3vP3t74qg9cUZ+3emIYGcHBSU9goBtvLumPKmFDO3wGF/tJF4xW3ShVRQVBzM4BGmAj6ZBxVNRmW4dueeRMAKEaV+K53PonIi5PioXsvnRwePHDp3nu2L/rAKSXnSExijCEEVc27R1O8RzfMUtckMs9IoEYYDAADiClACaZsEDmxFSAOER3y7Gh+6YEHf/Iv/dl/87/+znCSPvbd74kdfOJXP/n0W7+jk+728f4jbzjXdY0j29zcPFN3qIAG88UsqIcKnRtv3wN4LvliltJaAUWOVyaHrahpKtB1RtEZCZAZOYTeoBTJeVFjUlUk84bgGEyFANXEh7IMJYFoPLlw4fwP/8g7nv3N/7BWXpxJQ2xtW9dtt2jwcKoHi26ewBwPddA1cX1jMNtfSOvmi3ayUfBgPcWj2NhkMghBDmcH57Y2l3MxTcSEiOhQVbIfQPDBGWgrxplul6M3lJ3NZTHk0B0sh9XmDDrnWVMUS6LAzqkqmCgoMyY1Ime+VekyyS2qECGbhWzzYaFLkqBOWLEhUJSO0VObTBjFtEMoGCsAzVUGtGDXipLAsqWNM1WExbKOAsmwBJKkUg6qtlsqDarUJa2tcC21uPTexYSGAogGpEBVxGUpDNa0ofIiRl3JoJACcCyKK1vdG+tGbbNI1BRp6U7QJqV1i6Gl/XRcHodmw/nR1hz3FpOdcXX90i26LZf2HhQObGXj7WBj9+bG9SLB9tH2xskljAFcVLFZmZVEvets34Hd3ct/a3ecy/63tON3cx8V70yTTx+UPdd7Ns2qmBtKZjLedTBEJjK8qyXPR1aErCrHFbZy+noMDMGBKfTuj70fDuG3LFCr3wnQwLjfGRjoqYsZYo+1Qz/nvfst4ArX7d0r86rmfIowGAxSSgwOgcQwqXlwNUAEJEBH1Ggyz9qKJVEzASiASDSwSg/0a6dgEGWGu9cPJuWZrm7KUOUqlFFxOx3uIpZlWdf1YDAAMCRTtcKXZfBt27b1YmNjg5EMXRNlbXMyndfTOVHbFX48qgbz7hjdWAhAKnMGJKBsCoqdd+X0ZPHoI4//2J9632vXvzYcDFAqVWEu0RmaEbNBxjysX6kBAdGKSJGdjQFSct2sazbXN2/vffVzn/3ql794/O3f/v7v/r6nTuZX0sKq4GMHRRgKdIy4aGZn79n+zo2tL33pK3u3bz711BPHh1Mwv7F5oawq4SH7taaZNa1bLpfLZXfvpQe7VggdoKrqKn4daHUR5klMHgJYP1q/Q8sF6OG2bAWcUocoZCSUJOVTDQ888EB2rVksFkTOOsnfNzMV6aAzMxMl4txDIGnOIkUANMlsX17xawkIEGLywGAUzQQYnHPT2c2zF87+jb/3QeJiOp85r0+97cMIhYCRT524qvDBSDuJMSY19E4BUWsCIwvIrksNULU8biTOXREaU5HOgQFjlk05QCRUFVtNu0/dWFUUgPqzlidOjgKy81gyDKuRYtEtl2972xtcSi99+cXtIdV1LTYwHM6bdnsTDo+Lg+PjWWuShotmxl7L0qWmkWVHIa4Nz6oE78v5fD4cFdPj2Wy+9I4chKZry7JcNDURqoL3XmLqMRPAHEAKoETknOu6yL5ourZbttV6aFPtiFXUBd/fDwBoIGqmKtnsK2ODKNjnQijm2EmE7Lyd8Vj25Jin81k5qI5Opj3VilDACueTiUfuJA0nlTtaSJQYU9vEsxtb02YeU0QDZud8tr/VSkoF0yQenDEvY3QVqwlaCmioSzLuKhcxmiaOVrR+zkqAgikWcLB21Oytl/GseXaAnAg0DOdr43JzWdV7xWIQF2eP7jcdXDxej+Wlmxsv76zNMN2457CNePblS6/ub15R6rbmGw8d3l+13BF6Q0GtNJPK/w/sQwAgsDuS/bua9KzxRDxlwpxWdrlj/9v/r1zHgbI1wIr7mAXFdwFAd8anZgi0sgNacWnADJGxn8rq6Y/up2k9XUNXwR3ZGF0IT4vR6n3l13WXoRituDzYq05W3BoEhhx2nB85HMqy1CWHjQDRou5GgxGIEjkxzY1E7CI5jwwpiTGJoAA2bRquTeaOkigio4ImQ1EiMLNOaa0sjl+eWstWJIniyxJB1VTVcvKvSE4Z9iBWOJ9HC0TIyETELuzv3CpDUYXgHAM6X5bLNi0Wi1a7YHFjrZzPTipnvmgjITvQpISeMJqxK4OKW8yav/8zf7qNtwpvkpgRVZWYNKXCq/QfCvas/wxlAcQOKo+gDUJw4Dcu6Kvf+PTvfPyzR0fnPvKxj73prdt7O5cDrXtqRJN3IUpCUnRI5pq6dS584APv+/KXn//cZ74wn8/JcDDeqlNTlpUr/IjGgZMoFcUYyYhIk2ZHoNyjf0u6QF6UEQ1sZTIKKzVcXkatL/iUJ3yWwagQyq7rMgwYY0Q0733PyOrBSeCchglI2URpBQGZmarAqW9Ev6ComqbeTp6ZXdd1REycmm6+vj5+6aWX5tOTBx5+gJmdCw0o2pQ8t11HQLsny51be9tnt86e3UzSAgAgKxSeUGVmJIbcdvuVL73zsWksz7oAULHNkQhJVjH1droXNzMEsxyxsLrpHGEZOAQXGEaDIRomhdEg1Mu9b3vHk8v5yXL/6vrZ0EnXdLEIsjlyA15uj8bTJh5OyzCnnaPd4XBy0i6l6xZzHg+6ohpqMpFjRiyKYrGoz57bbBc5gSCVPjSxyxUgmVJOmxXr+Q79CTeH1CVhV8z2T86tnWvVwKEp52an//jIkSmSR0QBy9qCTJ9gQALLunrMZt0Z9gHJ+bFk0MQuAa5P1jTtCDh2oakX41EwpBhbN1JQjZ06RlUTjdRnPwGAOm8AQERt8ItumlQL1rm1WkBU8ktYD/cvk/liL2mdLFqrBB0zdN51sXbATqtRM5KBTKu4sUxLZ6AQhNVsMh9vlGdPhq/WNDt37FhMKVbN6NLR+Vm5dzw8mpZXlWM92L+9sdcBbU0vnJtuUiojhMYZGCupb7077dCN8/1w2usoQeY+ooHyypKW3R2jGOn1/LCqg3qHZXOHDq+nq4bkVrvnLGreJtwN75yaBMGq61rV8bttIE8fudDftSatgplWsljI9panLElcpcX+p0c6pekD9AhVfxL6Ke4damZ/S6uh1XVbFBt5k9DFtpNEDrHw2goIKIESOEBUB64aneXbi1kAIrNoIqJe1aJFJAoGnTu6Md9Y21w2C+aA3NNDV50jWlZaAnRdNxxWgClPi51z3ofFomlTPHvmLEgyQGE3HA4ODg60qwd1HK6fnaPRoBgVVUy1N7WowDmYkNAp0uDlb+79V//lXz13gXavTYejoaFT7HxIBsLss/NUD1X1KtDeN3vgh1FjEix9GlXds7/1qVdeXlw4+76P/fA7xhvtzs5O4IGl1vlSrREhhOg8xphAC0dBU3Pj5tUn3vCYUzVIVy5/3RXICMRiVhukZdOub2zUdcPOxZS8CyLxlESb935oK5oW9GvyXReF5q0pI+Xr+e7rB8yYWVdcPUmJGTO7ol8yTHrT03xs66/w1fWQt5uEiLEPFuh7yh56RAikaJYtCcUYzCMO5yfGdj4u3c3jq2U52Ns9WF/fdBz294+fefuThng8183zkxe+cbUo3XA42t892hwPbh/Oq2GJZJT8xub4KO6z81vjCQESElh2rr5LvNH/iQBs5Y/d916rASMGj8MyVIXz7JiQlJzjKI1onDXHz7zjjb/5S4dtMwNonPNBBsQ8mrR2IhuectR96kY3TrrRxnhxfACRpvPp5mjb+eClatt6UBV1XTd1x86jmqSuqMoYIzqMsc1EButtqMBzzkkTMw2eTxZxUg6XJ/P6eBE2fBtjcBWY5g1+jDGzmAHySJaTQS9hB+zbOsyKSyfSgmie4DrnmMn7ok1CPjTN3BMjWhO74aBQNDZWlNZqFEwJvHem3DQNEZCRJdVkzpsimNlcb2wVa89ceOyV2VerIgHxYod+6Dv/wn3j9/7Gl//wxeN/w4WxKFprQA2XBS5dpAt84UNP/1Ro7vnE85/cHX7p3sXSpUoACZxiPWxxox6dTKCItL33ANoQfR11FuowmW0tC/BwdPnc9ejGsypdPORHdrdH3X3G1DGWnZUaWt9uzPiulryHJTKSfQdsufvX0+omq77+DmfxW2pltmxWM82Qdv4yAENDulP5zSzD2aeP0wPmDghAKfsyZmdgNMAEmPo/gwIaQFopjAiAwByYu+vFECmR5oUc/4/v5RT86WM68D+7kHzL2wMAJEhRy8AMZiYx1qpJVcWSgnjPjlnbiL2szrGHpqkpR+IZGCC5wsiLknd488qRdk5BVIiZjS0l6YnA0osNsr1D9vsNzveaMWZmf3BwMByPiJmZgakofEqdxia2jXlOwVOUDzxzXwUzpyWSqHMITKiFGxL6neuHH/gj73/3ex7cvXF5bbwuCZAcoiElAgvOiRizd+gcOgY+PVeoGNNcNbGTEPizn3kFugd+6GM/+eGPvpXDrenxgcdAWKHXThcqQ3DivY9tCq4gYEfoiNbXxq+++uqt26+eO1898sj5eT11wQ98gcksgcbRybGOBue6FglDn79DK70Fnn6CCH0Il2UBIYCe6ucg97Ar62ZYUaHydSAiSGYghsrMzJwndSvkIk8++qs9rTpHMwPL/o6MwM6YldiIFJ2SAw7kCwoALqbWBRHpSD3TqOt0Y2NNy/iHX3pxPHnkGy8fnz3/1LKZ1LGg0nVxgdo9eOHiza9frWTI7eD6KycHe3Lr5sHmxgXp1gKfVSi+/tWD57+wf+XyTjkasRGKiuaUSoRk6HyOpyBy/YtEpt6ajdAUQJmxCG5QhjIwMwJhF6XVrpM6iSVxdRup1Cff/vadQ5m3eDKfNk1zcnLiAnLlTSCQDly67/ym9ylZqqpC20VsYt3MiiIQBSKPpKNhOZ3OQghE5L1PXSxK75AyVHKK6/brkfYQMRE5TzHGga9me9OgHhTNRCzjZKZg0scxgRmYJBA9LU0JzMAJco4JyoSc/IOYyXs/r5tiONnbPwLk3qAQ0QhFoqE558wlh5SiKZiqFqEKocy0blUgDyqgCr6gAfMDkzOpRaOgrQwHEfHGRrU4M9TS0gC9KjaGyga0SGZL1uBn5zxubq1tDs4cTeJONXfkEUgYwTUkKUQX4vDC4VoVR503wSHLcH0xeGTn/D1H56u4AbS1LObbM7r/9gOjduS1dclKFeJYl8vdM7eORtddprfr6ibJwCWs4k7ILCtLewPl1afAgGamd/qjOw/UlXsjGPRO26u/3IFITrEayPdMjiY6fQYikmn+2E4fZICofRcCmlMt8FSBav1A1RBABRHQ6O4O7s5xcEX16blAuLIygG99jasFBvFbe30EQCDTKFXpRFsAiBp7MxNVZGq6DhUHLiR0i7pxXA6HBTJ0FhmBDTVaJFHWQA4W3fHNk83RmeXyyLkBcW+HCUCQRUbQa+MEpSyDacqvwznnQjieTkVhrSqipOB8cKEoiqOjo7ZpVHX7wpnrN3b/6p/67ocvzHZ2l9ObANEFzlNG30nXNmFt4H/qL37P7d2Xy1C23dyFwkyJXEoNI0UxQMZvWeMZQDN4lQAZgrOubeZPPPV4NRrGdu/k0IqiJN+AgegSjZwftDJHIhP2roxd9J5iXMbUjkbnJOFkffv23sFwUC3bbrKx/dxzX3jyiTc5LnkQACClzntWNTMlx2qaww97Ct0KV++b5rs/KsQcoW2mhJTxPQDovSTMVDWEECXlVVMQzcx7HyUSZeYcrTZwdIq7rE4F5UKzghpz0ieeQjaw2lcSIKEZdEhEaEUJpdD3fPB9r12+/O3veINYPHfP9je/cWXnxuFTTz1SjnjvYOeN3/am55577qGHH3303LnDg+nWGefJXzgbDo72yyGdu+dSii1Kl1QAwKFrTAA0kFM5pbH1r+L0VKD2FxIjeUeld55JEVJKvQ+1JmZuoyA6Qz2aHl96+Px9rzz2za99YW1CGpdNl6SDKKSEPmBVBaX0hoe2v/S13XG17mgpUZfLebVV+VBgNImLwWBQL09mJ4vBsGzbGsAcuTq1zjlVUxCHvb2VSMxZnEQklkJw7aIeVEXT2vJgOdwatBbNLKW04uihiALkTN2oq1tcVYFJwMgMNAtOzMyI8t4LiCApJsWYgJijygiLwmEbl+OBZ0ZmBgKPFDtMUZjZiBlJk+ZwaV+AgiHTOG3KNLLEcjKa86EiRK+/9sInvumnr00Fx8PpyS7TaHP0Fup40b1GNFqHatYdTOPxKNS47BoM7WBLFpVoi4SMDATOOMRqdFJYKApFE4guNk7Krnh458Ki2DxeO5g04ZGb9643Wx2LxtBRV8kywWbj062NWbWs73S4/0kPfqeDPkUqv/WBp9iHrUZWfVoT5lCPrGvKzuinJbNHYwCykjX/RVHNVsGoPRkxE910BRdCT4NRvKtJBzuN5Vb8ViAoT2G/pQfHntCZ79X8nbsINXfyoe5uJfpqYdbzfPIZyVdQ03T9QJV909YAWURrWR9JxAqwqOeRcXMy+Mb0KClqTMGRc8ymnlCcc2azK3Uwp06k8VWmeCUBoBz3C3dABs3tJjuXUgohhFCIyHQ6Ha2Ny2wCbwTE9WI5m83aLo3WNveOu2ceuf9jH3jia1969o0P3Xd4dGPWnBXuCkrLWqth2Ltx8DN/72+keAsAgNCTj6kpSq+Kjnxugdk7085WH3QmH2WLHOccYacdMo2NdTqtPQ/YtzFGwEGC2nvUCF2smRwiROkYPDkHZBzAsFgsm/P3nLvyB1fX7980Tew8kLt48eKiaYfrYxMlAkBBRARzHNquplPKLfTnx/oEZTQFyDEPiJBxdSIEYN+bQgMQ5Ew2EDRApv4iVM36UgCIkggo371qAtg/zXA1PwUFICQkY0MBsKhyB/Sz/oCo5h0wsAoGDjHV7H3dNFtbW2fPcrvYf/i+s20nCZRw8eC9W4/ddz93aejck0881MT6297xRhJDixc2NhYxoRnC8uLFrWRBdFn4oSy36uaEkYDBJ4qgCQwZbEWLAeijMlZ3NBqII2SmwMQEZpaiASpCQsrcOGLGJC2jBy1mJ3tPv/0NL774jb2DZZcO2VnsTERMgBy2oGy2Pa7u2RgczaAYrM+XCwC3WCzKcqBCsWNNOhoXx0ezwbAEAOecmbJDQ87Xt+acNlFj7jOHCRXFAXQkXYpVGM4PFsP1AbIyk5lp7gbVwExFgUyFJOWxApEBmrKZU0CHubqo6qlQkb3DctDsz10Y7+/PuQhM0LVtWA/eE7GISJ264IoUDV1A13Vdp6qggGyIyF4RjYimetIqJximlj3oAAcnJ92H3v2jD43fsbMM//zf/6P7Ni59+OkfG5dvWc5x7+i1+++/+PVvXvvkl/8lltFOTh6/eP8Tb38dvtTs/vuv6CIyCEJpoIhJCBrXjOIJwFrDUqg5QyMp26KIYdTw/bKRYNz6btgNWgwajqDdMlAFaX1dQkt3V+1vhUT+Mw9cZSrd/YC7tq93P/MUZkE1MiADzkQLy1zVzPGzFXyZJ4T9ApC3wrbSr5rqiuZIaASnQA8QKqLiKoiUABQwIamZmEkOycvPhnxbf8tDAMBOu+P//PvV/+T/5H0uGHVNLMvSzIBwuagtGSM6X1KEYE4QGumcpxDcS9e+eXX3NvoQKCBAp10iM1BoW5X66MZyFMK8PnHgyRRM0UBVk4iIrJQElqORDCQXI2Zm7xbzWsAGgwFpRADnC+eLpum6JhIGX4655v/1H/21wh0Vm5Mz2+MHzg/IDtGgbXQ44deuH3339733qacvnRwLgJJnUe+Lsos1oRIpY0KnYh0REQOSZZIAgObTK01SQfPQWcsaCjKxeSIgLJVachgTKKBz3jNbh+jVCGPiLpkYA/lOOgq2fX69Gri2W6ytTQxo69wFo6IRdjZ2MIwdgXkwyvFAd0Nq3wKvEfZ5tEiQdQlAeUunCGI5j+TOh8ve5WkbESlamzrgXsmMyPn7AGC9akYBMufG1FJG4dSSqppJyGx2FUgRVUilRCyYEIKIILRm4t1QO+/doG3bedMYumWXBFMCbKLz1RCLdqnaQJrNjiFFiiIxtilOlyfaimhszc3mdWymXmFx0nYwc0iGmEQcgUdKkBIamFgm0UoUEMWUL/KVYzYygUNAU1UVUzVU1S7VZtZFbJrGoKubWbu06UkSXL7jO9/3lW8sb9zmqzuL3f3l9CTVc380nXeIbSJbHj1879hwTgWjKSrWbcNs3nvHlSUcVN451zRNURSSc1OHw0xXX93fCqiwSg1SASQC1OCxk47Ja2vNrAbQjL2AGaiB5lEKokHqeWWGamjqANgUVXnVk63qDDJjCGG+bAeD0a1bu+O1CRCaaGCXVEUkSSsiTdOUZWmGTD5JRETnHPZWIOIDEaNqKp036SqfxgULQs3WhrRorxWGVi+tse98+/dt4pPz44OT+vPnzg6otiqsdTpqrU1SXLx48TxWxXmm9aErHKJYQlUwiynIyXgusgjJDbQzlM5AXTKfAJsyDpGKAlJohxGS4Ql3o4QirmWCUsNaMyLRoAgK0czYCDUCpKQmpMk6sIgGIATiQIHESPrTCgCKYISn+DuiGIlgJ9gpKqqA9AOoXIoFMCEaOEGXHItDMzGIBIlNQQ2ioYoZmpIZ97NEcIiFgOQfpHlwlVntar14KfuUWQQTUFz5CPCpk2++gBTQTARBkBJBIlBSI8jMdVbjrBQyTaiS45dW5miA2pc2VAMBnjYnbjLYmnUNOgzgDH3LnUhS4tYnxUjei6FXZV9EZs8YKSazIQQHOMfIg7J9rVNO6spCB47VHMSUzCyqCaAypr7dICKgTBRDHATn0Ux0NpttTsbORGlMDoswXLSzRX2i6tY3y73r+3/hh//og9/2mIfi3Hjtvnu2Hru0OVlvfELveXncnZ+c+TM//pFbu9+EYICVChJHhFQ4BItmBuwImGSIiqQTsJRkUYVJVQwK5gGXgEN0UaKUWDB15pDYsVZRl86okOATeWYRUUUG9sJOu6oQhwqijFoVYWdnb23A4GOrWBVjjCDd8dqEC9RkR+QadqKaAJxzpSREYEtE6Mx69zoChJzWRWr9lkeQEmBr0BFEVvbgHJJAMhRAATBNkltFAvTAzgiighKjU0iImEX7aFn8gsCYLJkhQ/Do2Ix7z7K+9DMRE6ERGCVTAUFTQqcWolErSVHMNDjHCEAqJAmMTAMKpQ4FPAmbeufASIyAPFPJVJJHBHZGZSiQfJOUPaOBUEJGRYgEHSipY3VluV76sDauNtcnwY9URmpsqEieGUNwRASKqVXpUlPHZRMX9bKpbbbo5vV8GXVeu1ZCRzXicP/o+NE3XHjzm998cmiMHoRR1xJF4IFFMMFlXTiFS9uFpPlkbaB1it3ypFm6yhunyF5inEwmJ/OpGiKXBE5jWwZElP8/XX8ebVl21geC37D3Pufc6U0xTzkqMzWmSEloAA0gYSxsY8DGxmUoDyy72samDXbj7lp2V7mqa1V3L3e7l42NhzJQ7sbGuMBtszAgA0IDoFkgpTJTOWdGRsSLePOdzjl77+/7+o9974tIddVdEbki77rv3Pvu2fvb3/AbnHMgysyAfI/iqWFHZB6QK3Im/aipT/ZndRopghqKrlK9hCZoiKwqncfeLGF0jJZByKNzSU0RwVzXLZ1zOTEAObaRRIG2BWanPhKwi771hpKMsosKjWsmmy4fekrGADHGnDM637OZoWUWI1IAJlGdJ/a07RHYWYPUt5AgI/G5zTMTumSSvnZw42d/918eH32GUULtE/R+gZON6ulXX/tHv/mPYPjC5iPnLTm0TcUOvAG7XvIrF0+eu3RwEJ7vqaoViYJmb7BEZfUnoAjmiAXImdXlZAtRQPx9N+4ft9ccQifaGwABZxBUIGJENjUyt2qfkALJSp4Cq1IIl778vQm7KoDaSjZSYI14fF1NQHf7IIl0tQ1sBUsXINM1T7kMSw1KnugA8+ua8qt+CRkUeU8QVLB7AS33znvxtMAgOmVUnn4SMFh76xmsPswKPgeAICvbjrt4agCQXvYPpw+yc2gSU9tFZGAkdChZ723yMLELHrvSGyUwSyLmyIHDqLPDxbgZxtgRgfOoKkAkQqtB6opBhuWwolXqwUToqvr4ZOqcq6rKzJBy413XT9uliRhWwmG0NVj8n370T+jh9XP37SRpZ+3Rtctn3nhw/KX5VNz27u3d/+b//FeW3R6heXKmTJgJvYkpCDEgBhNGInZKlgVO2G0OuZ0efeHWra/52p8/+5bh1ltkRrUXQV0KOjQPTmzh3RANVJRcQFJEyLGvvFcLSNZ30fmAoGAYexmEYeXPx3y4uQHSd/t3bpgZ+93h+Mxw81rfJSJnoMQx5eicY/YaYuyzc6GgSwVUISOag1CW58qWEU4XwGpFnA51ABDuuU2ni7UwHwncvYUsAhsoqpUG8bq/s5KxBwTT4g6wRqSsrmxFvQdXpL6VX6OqrjRM1RQA11NgMgQgLKCv14+LVMtbmZmxYyLKMTnHqE1JaySlOmxUg3revfjK7U9ShPlybjq4ePnBMzsXTJrFggW72kLRw0iIaWU33aMkA4/AzpHzzjnHDkuxIp1WNFmeHHzfn3jn3u0XUjrG5oS0UyQ0YyBStSCA/dmd7b0jTRVzbdpre7IYNaFpmthJTNDU3rObzU8m4+3Yt2BUeSa0RdeXNyIiVSnpvJl5RyLCzIzY98lXLucc28gek5lzbu2giWWW4BhdqS0BAchMzdBEybtyF4rbeymCmdGFwfH+3sWLGymllBKiYxfKy8xRqEJMKZDFGJlr752IDBtny54IJCbnC8kkqzKxDhHiMi3SEABnXZeqmQXjwqFSJdpYdm4Om7fo9oOUPOTaV8kwxlj5Qdd10/7m1n0PHF0e2c1jJah7buY2Ojt5CW/mM6+Nu51Lx1cUGCEbmYFTIXFnAKPyApBcHgbgDNFlSw0E6cOCnVbOFIAcEq7kclcpdEbIiGQKYiqSEBmJ0fw92O+7wxqzlV/CeoeQmUFRlCpdylW8XM9ktTBIDcCXOViRoAQSZ85MwFaYh/XeS/fyYWFtjAtQHKVP32LdMV8LUto9DXQtkuMoa1+/e8I7ghmprT/e6lJg63dSXGX3q7QCkZVOOnHea+6dL9K73iG3KkTk0YmIiTIgosXUARMVkQMCMBDV4MLyYJaXMN6q+zTzNQObJCWsVKwA8s2MgNBOxRjQTA2YQwWGJyez8WTiHGeJlas0IQeYzxfa98Nz2y+9svu3f/i76guzkxsn4+H2+a32cH4nHi7PnZucO+k/99U77//gh97+jmu3bj/jwCOqYxOxIgAKYAAZzCM4AAXsEorTEKy79drv3dz9/fFg3E6PXzj6jSsX9s9f/PZF6tl3AR0AqiZiUmo1lRkmShRmdMFnUeSUcmTPSNkximRmNxzVQjDAC5I3LZHjrp/32tL2zsOzviNyRQ1cFep6EGMUiQiOwZGhmYBlRKx9E3NG5DXmr5ivr25oObtR7fUMNUK8e09P4zsAgBGthDcQCnLeyiFRggoi4imw0tQKwnq1XGHdeV9LItjay7u0/cWs8DAQkU1PP5KuDop7pkfrS5bTqMhKSzYDCSFoTmLS05RcE/xY2hu//+XfeerJF0TG/TxW42GmRPTVzQm98Y2PPvrG98S8LTYlF9BMTcCk4FqZhqUAcp6Cc1gOkZRMNZkR18tusbkFf+QPf/AXf+GXxxsuBKwgJkEAL6EyYkWZ1C4uqi/dztUg5JPWGXdt31QjimbAKaXxeLR/eDgZg2RjhzF1pitRJ1VAZCh4zjJcLRxWIgQssDEPYXm0GG+MwLKt0J2rqZnByp4B7RQlQaBWdGRPYxSuBAe1qaqTk2nTDPdv71/c2vSezSznXLugks2MmbsUa06WDQCZWSSF0KjNUAEd+oCK5r1vexGt4nzxgcc++pZ+0Pjw1RvPpb297kJX0Sgt80n/2nZ17o2Xtk9OPnitudpi55uNee+mfTpTp6qu2xxmqTp7hvy1c+nWgXNkbcZow7TpcNvJQe+yQd9zXSmCOmUQF9mwUgc2yKSRFUiSS73o9Uu3N4/j5vy8waYDAFE2payAJkX2KGn2jI4cIHGxtyEELb0wXUd2OE1S7tktJZ7e/ffp3TrdOSsxXb0nWmJhIBQ+Q8m4VrOwFQ7B1IBPF/2976hchq33yAuXpoytprXw+ki+Ou5P5SkJ70HU3D01EFQRTqlw5ddcMVrNiEhSP+1iM2pIW+K1+ljKRorETGwmaMgOTTGmpMoFoAYGwCQxOsSD3eNBPer7RETMLDkjkomCFSUfNDNyKGKOqLTdFdTMfKiOj6cA0NShZD0EQ67d0dEt0owVM208cGH4N/5337n/1OcnGw8kqN1wJ1RuVMuFcXNl88zTw+UP/tBH9w9eIiWiANYaGqAz60+dtgyKvGrO0gNO6jo/+9S/X8yvnz+7E2Ux3hhKgun+72O1s3n+nctlHDIgBHApqVTOC1rtq5QS+6A5mxIACmhoBrlPIoYIRE7V6rpOsQVQIrbQnrv6UJLsQr3ErlJIKRcVlLLlvatF0IEBQI6xqUJKCYhiK85VBFkA1il1OaXu6s4D02lGgiU7LswekG9YzKXIhdWlVuB+K5LWCKVIPE2lccV4ex24FgAKrKwsv7t1AK2AZwanm2I13zczNFrjCMqHKdtMi9G2c24lbwwmIuPRKDrUdJ5teXjjs68+89TuLZF04bDr3vrAm597ca/T6RNPvEG74yc/99Un/+DJ7/7+72+GD3dd7NvEzJ49M4sYIgNG1SxJo2rp15fFb67KsvRVmE6nb33igee//rabrz0zHnWd97X0lDsCn8V6idAfPnRl+OTLd6rJRKpZjriY+7pW77mPdeymk61xCH65XNZNbRolR+9dGSyd1jq6TstUhJhRUUScc5ql8fVi1g5jw97FnItsYQn8AFC6dmXjF4VxW2eZiHjXqU0FDerKDYYb09kdBAprwlpBwa1m9SKGYJQYnWRDMjCez5eFRKMKZQol2ZiS5Aga3/Pgg3F2fnNzE1v/2t4nL9Zhoeexdp947pfPfdPZR7ff9saP/LFbt251bRzVzZAhhLzV1EeuaB/H7Wo4PX/uWX5yGzg7qxHvuzHZOnr4pUubLc81mwaKlMzMCw1zM6+WBo6MnZKCofK06m6dOby59ZKLuNVuMJoT6ubz1CXuk0o2BrcSPyNjSmRQV84HHTRUeU8kVpTT1w2Qu5HUVvg8VVtrLpb8/RtDfzl1i2cRmhjIasEbgXLkU2pomZVxQb+40/Pk3seagKp4V32sbCxZ06VOU/hym2VVJKzAOaoGhIaMJva6bWmlemUjuYeRWy7FzLFdTBddqBiWuetS6nPwVWqBGVTLmAIQwdSQwDsGAQEjBTPNkBk5TWOaptHQLZdL55wKqTChqcgak2FlpIGODVfuho7QV0EUjo6n4/GYmVXFOSaimS5jBum7rSvnrr9y8x/+N3/rzJb72Kc/fuUhe+SxJ4zB8uxk//oyb+3uHv7gD/zpjUk8OY7BNQBgFmJqq7oy6RAN0ZsGwAwYAQCh3mzy089+9vbRjavntzUmVDlaSMJN6MTPvnL+4rkUtltNGCOjG202L77w0vXrNzY2Nh5//K23dm+eO7NZhhcxS9cl7ytmZLTXXntta3PnySe/+tAbHlksFjtnticTvnXjlUsXr97avTWd7Z09d/+gZtHeTImqdplOTvYuX7mQZckYHLt521VVRQTosmJ/z3IzAEUq9eVdPDXi6QIDMpC7EfZ1SYPh2p5llRuWIAt0j93YPcv6rqjR6i1KGmNamiyvey0QrkzHTvOP131oBMBipY3rgRGAre0tESmmFConmp988sm941s7jRu52fPPfP3GTc4wvHBl03aXz7/6YmuYM/7Bl5/9wHuvnRlfO5mn//jvfvFbP/jtZ85fG5/ZbpO2/aJxoXbORIndalRZlnqZMCAbRE/GLImbLh9+y3e89T/+LzcStCMjcg4CuXpAwTvPtcHVs5fObr/hn/4vn944f/7g1m3u47JfjJst6lWzy1lHg+He/vHly1fbtCz8I2bM2RAppeScKxLWa/hpyaWIGSRlFHNCy6PF+NxWhKyqtKJAaondiEjFdxFfl7FZESy31exdLXvPSG4+X5zdahyHsqOBSBU8kaI6AGOHzhgpZyUiFTRFZkwZCZmZcs5mWOMw+/knr79688l/kefDupncnu227uA3nz2JOEwbr92aH/2rT/+DB3Y+eHJycmXn3HuufNd8kYjh63ee0Y72W6TJ7q2Fuzl9cg/63fGrm9MRwUCc4wXsLIezDT0Oh104qMyz1JnAULrQgbGQOOFEKiA+hSAs9UJ0yfm8z0MkcDHJdN71yQM6ADZwYMiOomZWVEltn9hB2+LOBjeh2LdDsUc53SEGYERkJccBMAF0ACqgDK/fBustJAZcKmIEAEHgovCkgIi6PjPuDjMB8/rHywXL0jeQdTp/2m2H09Bo3/CmpXxTRF7BolfW2giCJcqXX6H8HK1Pi9Xf02PMHPG0Xxq6iotKmYECFnSsiAeOaKoFiQ8EwASBuPRkNKmoDkM1Pzge4CBJLGSanJWARNS0gK0xm3nPqhqCM1Fmh6jOuRCq2WyWcxyNzmlO5MhxNRjxrTt7aS6VayTj1lh/4Hvfm5ev3U7dVz7zse3JlQvDUcVbtw8On3ypG196yx/68BM3dr9WVRsiCdAQK+dBLTIFADBlBAeAZh2Rczzou8P9g6+e2Rxp1makMYXjO/jUV4/mu/nKw9bn6w88NnKKQKlrI4Ofnyy/4yMf/drTz3zq05+7cvniU0++0HXdaDQ6OllUVUPEk8n47M7GztaVp596DnH48ks3g9ucnhwfHR9curz1iU8/vbm1dWs/991Jzt3B4V7T1LUP165d+/KXvwpAu/sHqpD69OCD950dVX1qFY3Zc6Z7gqSts2lVMwK4t6bEVStc7o2u60QPV2Tc9SI6Dehieu/r12vvrpsjlJC9ytfBENc17t23sCK5BPQNSdLdq637MLDqyCB5il0f2Ctq8IwGs5PjHNMbrr3x+a/9mh913/Tms296zM/j1sd/58Xp3LFHtNoTY+6kX27u6NbOxtnlmz/xy59sNqpL1849/OY3n794P/Gw7SM4cwbF4oDISdkGRookolVVq0RPQbNcubj5re95x+71FzbO4mYz3BxMNgdb1dATJwfG6L7rQ2d+54svP707G0w2FsujvtVhtVlVlcOhWqqbUFe+bRch1IK9ijhPOeO6RC6CcKvvLmd1zhGjSCKHfd/XdTM9OBltbnnPSaJqkR5h0VKeFSh2Aa1yLlIVQGYmasVvBBFVhZlv3749GIw0xdpXiJhNHdgKE5WBKswq5BXEigCkIeecCdBx6FMfmpELPucMCMSDp/f2Dw9ebiOkY9ggYoNffuE1RRkMaYzufY/9yaPdCzZ49cGzb3dhvIwpefrMa7//Rfk9IeQz+Pnnnv7Np56swNdbdCVfORdd7BErZle7dOBqYgxVItNslakCJh/AhCgDmxEgKkpyXfbz89PRaHEf2maCReGweYRAXMbAJppNBJSAivgJmvrUmwpDyTYAUO8ymE6D/L3/v8Krvv5hhfeEAEUtssBwDXS1OQTAnPCacLSuoNEATt+OvuGap2andg9NCUvWffcZXA27bCWNYqepGOKqGW9quj7wbd0hBQBjMCODfDqhVSXC2fGs4nHlqg6RAzskkGKvlxx7QevWtQIBemISVF8KkeyQtMvdyXLsJsu45OJFZojkyleESGDIqFS6M1qEVASJPAczO55Nh8MhohEjATH7+XyKEYigGTQpxstnw/H0a8Pt8bXqkc++8Cu/+blP/dH3ftd4cN/OaGt+OPve7/0esKl3jaRsaEBq1lc+pCRMvtwjRAWwAv0jlleuf3pU6SiM2u6kl0jV6L6H6qtX8mvP0ejMG85dvHRw+xmHzebOJefIUajc8NOf+uxovLWzdTV17Hk7YTub6QP3Xa6qJmf94he/+Nh3f5doOj7Zf8+7v/Wzf/DkffcNRPrlMld+J3cLBBjXlFJrWjs4tzXZmi5u9LIYjCaLBS/ntL29rb67fPFi15945qzksFLIBVFnd+/9aei8KzSE65j7v/VYo9VXHXCEAqGHckuKX+s6NUFEMFGAghO9t1jVe1bs60rPYut4T8fAShjXe7SPYLVRBAByzsxMjACYUmKmc+fOTUZjnX7+TW86dzylzl986fqrn//i1zJuQEOcvWrcHM/f9MbBmR2sgydIQtM3v+Wi58lsGr/08d8dbP7ew29++MKVx+r6QlwmJGN0iFBAB6tsqd7wSAS1d4EYh83gez/6oZeeuTr2qW7QORIwM0maHLrlItcD+Ykf+d6/8Df+WXVuczHfgw7b5XQ42l5El1PLYqPR4Phkeub8GTTWLA4xVK7vBB0LGCMW4c0EUFgyxGaoSChowEQd99PW74RsgAgCYEXfwdDMVNXQxFSM1uqBaIpgIKIAYAgKxp6AoaoqbwVsRznnEGitU44OqddkKGhghoy2jF1d1dJ1KSfXAPmiUYESUprKxMKFQDMhq7Tva2ZtGLCedMt+E+Ljly4Pd97Fgw/1y343vvaZZ38Dq1eaETG7LGBR6yrUNfVt7Hx+mu8Mq60mbma0ZZcpjruxe+H80SS2l/Ymw35LAcQiwBDQoous5NWfjGc3t28cVPtP3Lo26TaKJ7vLaYkmBiwCAOA5kJGaefJmSS2ZEnuHVIJNj5lKSsvrXkfpZpQ5LFqpKMXUjBCoKKHraWJisMqIQRMBI7AAg1mGzCi4ZjJBQa6s9qggmt1jBAEAKysoMkcEKx4nFbAOQOm1FqrRurgrIfvenbceAJTAvoLgFzKhARgorcr1wos0MD7t7CMuF72pY+OcdTabpz46V+ckhAgqutIaIxVBoqYK+XBJdSjvHEJY3DnJXeLKEQUQARRiMkvIaMYKgAbMXjUF70WkCs5Ei9Zt6mPf9zsXNkylcj6JMvnZdBF0nO0AfA4aLp7f+Lmf/6lvfvsjdw72mce/+5mvcN8ORvbUjf3H3vKBxx4f7d18tfGjTucGjOjUTnJKnjZUe2Y2FoNoCoQBURWm8/3bGzsjrBe1q1C86dRw7hr/xEc3PvnLv7ncf99jb387DzF248rZfLb38BsemM5no80tZj462D+7sxFjNEKT6Lja3z9861vfnKXv+/7bv/1DOcN3vPetSfLhkTz+0Xe98vLN7/hDj9+6fXzp0rkXnnv53e957PhkPyd9oHnH3t7eRz7ykT6e3P/AuGvTeHipny8dMSjW7FOr6gGR8a72AJgZGBELrgrztbyRgZkh3b2n6/ScABBNYMUwXTmkFyvdqEKIZlR+vLT0FUqPF1ckp3vFiIzW6AB43bz2tEdfVJtWlWhh3pnZvS8lAyFaUeTUrPCA2rYlNNj6gC30DQ/t7N64+fHPfHFYX2pCNskpt/dfc1cunEvL49i48cDVniqXJjsDM7h46UIIb1u22u+no35x6fL8/LWrqitrbwBgh84REYGEykXSKkFErwgOcXn52uS16y+qYkDEHCSTEopPsKF7x8tv/9b7v/nND33h+l6oNuMiKx1ubu+EpkmzBYAMm8HR8bzve14JtKlzrtWeyIlIwdqZoaA5dAImqkSYTTFQm2PFVTtfuk1nIEw+J1AFAlqxURSFDYGRCdkKS+tUglBWAxIsVAnvfT+bSdKqqtj1qpo0uRqZGTCbmYF48qszltk5QgRiYCYhY0+IxilkzPPOL0wzOO0sUuxzzo7rthtQtQ/5//XJf3R5dL6BcW75RndnjjetCZZCiuIweHWRYm7BGy8ZXxrOHpDdC1pBsqEbnpjrOL92/mCEieT8xcPKYIiODaVJlZB2fjkdppvnb83q3Z1jP1mcJcFUd0y1Y6BWe4AhCwuJ5iUiJxdYRc0AKTinMapzi446bUA6BjRR55E4B08MCAqmCriSkwLkVVdRonAAMbDEzESACpoRmMhIwBSSrdz4sPgcZgUiKraNALLCLhkhdIBDo14yEDB5PV7kRd8Arow60ASRiFYeTIOwcDgJXsTmVOzhLDJUab1/VnqWK+oTmgkaEKKuDFhXcUEkmq+z5GAeLKqjaA6cLtreDUaVU2VuJAF6Q80+m7FZJgVviJqVfFQdA5GLkpqqClpJTtrd1lF9pocZm2PvEUOOqUyWyEzB1AH7SjoLAZmAyJidD5Wr3Z3XZo0bVT5EWSpQ1YxiTvPlTBLVlUOoRhO4dq555vdvfu3ZOTfmaRNNPvHlr4DRK9cHP/qjH2mnxxw8Q4LIfTJEQtowjYZTcsMss8CDlJwhOqoq7p557nd95YILIXQaOkSv2JB6xPls7+SbP/CR7TPfkg1yP3ckYr3mAUE/GtWWlinJaOiW/bJUfQyglje2N4goxkgEKbYA2AMqwObOeDqfnzm7OZ8dT8ZsCu98/JF2fugQuMasR2fP1117iIjIbjDkbC26IvRp2SIGDMrZrMBm0YDX83BFFhQzQVAGRsOiGZhVGRgRGQwQBdRMRZHWKqCEoColKgBgRbgKxwinhwSaAZfMcYUQxlWQMkR5HZl2DesCWunoFQ5euVRWpVUbBgpH21CQEIG0KGqQ15yZUUXIU0xCsLzvzVc/8/FP/vuf+0+TrW0KYjY8WNx6aHNQA0HfXthsLpw5uzGeGGUONqh3NjbPbU4uE4xRQhdl0c6ns6P+5tObF6/4zYkAeyQPxOy7mNGZYVAVymhGJl2L6jd8vlHPjqejyiJ1UFUeMS+FfVBRxvxDP/yuz/7Y/3t0cbLIe+gG84N5NapDGHTtzNUy3Gjmx935c9upP4oZQwjOk2QhWpnUA1klZJYBgAgYmMlnyCknDk2aTTGSNVXKOQAmNACqMiGYUgaAAMwpAYM57jUBc0W+j3NwClRRssYNREDAR4ZWEhhD8uiEQ0bkaNJgDg4SktQxyigz9CkfE4rzMScXCb0iaM6ZfY0qWWfs675N6AwBHLAzGZBLEgXhkNqDw5cgQwjoa+fNa58zJCQXETIYZGoozdkNyS2q/KXhrQ/DBPZ30OdKQIknbeqRnr1/nsILD9y8grbFIJ2b+xxy41+6/NSUDxzhOJ8hcuCEck1UOZBgIkiQoUVEQ1LLkg2hKBGSISpIzDJbGLQmmRGSqjpSMw0VDaswHg3Y5qeZznr6XIpKYwJQZ6a6VgUy1YJjQoCV4xaUxkoRijEDNgSzsse0pD4GgmaExIAp5+UiLjoUZbgLj9fT90Ukh92ZLdqYMFjOioZeKJCmsqfWzdTVX111OdfDNlsVAKWuM1RTWMmhAQD74+lRVQdmVLLlsjNDBM9gCUWKNwmhppQ1InFduQG7o5wz58q5rkvSZXCVZkW6O0FafQ2rqGGacgiBADk4VSmHVtun5XJ+7sxZkbzKJ5w7OjrIpqhWVWMzefjKOYT5xtnzGYCXsdlomAmtOzzS977rXZevNSdHN4ZhS7QFg2ySBQxW4ykHalRnU6oMAJH3n3r+Px0f3dgcnCGqwTyUrgcm50E1Sbo0Cm9aLA8BajAhRYMaUc3AQGA9ezcAKFMNJS3iTuAAcCXATVpkKVQAALVkr1nUMGFaUQ9FmFlSwsIpNdKV4kCpx2TVa8O77GhaCZKigZIQEeNK/8dKv9tK5WcIRhlk1XJEppWD7t3HvXl0eQJe12onXQv+nIZxWBWHp52Z17WB0AqVo4hMrp68tzItwjWnHfkymFHLSGZKzKHruo2NST0c/Oy//J+/+OkvXdt5OCPMpktMUvFw52y1vZU3NtLli5sXz55RgT7K5uZk+8zWYLjpQtX3eTpfTvu5UerD3NrBjRde2L4wvHDlGuDYqEHvQhVZw6pA96aqqe/QxJO7dPXSq8/MGu8MOuliFhoOmz4fD6rhjf2X/vCH3/OG+37j5sIZHadlltCphiLK1i36YV0vp8cpJQVHoKrq2UmKBaRDRCZ3p9y4PvoQkYgEDI2W83k1GGcyMVBTNMuMipBNZS3pCqoSJTB5QshJsxACo6olH9BxEJEcU05t8KRgSCYiEAJ7zNo57zWJZ6e9bF6Y9P2h5GSizgHRiqgNhLSCYLMmCY6jRFRSRe84ZTN2nsGZ1QNHRFGzYOwTeE8aNXgHYmCWIbWulpyMfD2oDmP/1XDjfaMGZ+d8m+873DwZc6xh33avn1nstMkLuGxuOZxP9NWdW0t/7NTlJGKEgoRM6AjQxdiKmHNGYABcRNkDQDZbC6+rUkZEMQVlZBYxQIgAqiS9irR1XeFq/Fhg2QCwEkpFSUaEpTVWDuIVEtJM8d7VrOUwwGxGYLkk1FIwYQCOuFjvMJSrWVZSY3ZhFc0NDEQNzdAUM6a+zxsxoFYazRAsODMjWLVr1lvQAIQMjNcy9OvZThkFK6laLmIkBKJKZgAMh8cnoWqyRUTr+5SzqpoDTg4lZlEDQCLHBS5PMHG831qvechudrRgA8CsCt7Rule40khRVQAiBCT03mtODIjFtd25k5M5AEzGTdvNmVzFjZn1fW8kdV0ZyObIXT5DKMm5JkiGLYam6xPXrOrtA9/5kaN2jxgD5WQZGGtfLy0ZJGYGCyakGIODPkE9On7++d+58epiUG37KqOTcpwoJWJlDMFv9RiqepzSbSR0RCCExABmIHdnk6VVVxCoDKcjytMQuS6XV2umAEVKHpBFyoh+PRhVck4BSLGU8AhoIKdCR4oAWBwa7sq8AACtzCDKV1zEYs3MCFfgW7M1UN2oeOza6ZRlHd/NhApaDO6exFDaurbuNwK8/qdgTVDA07Qd1y1BXv/IakgEcAr8uKuOXfYRWXFWYacisV2m+649cOPm9b//9//59FgevPR4Oz+8Pdv9i3/5R776e1+eHT83mdBg2A7HsLU98d4v09J7PHN2px5tEw+6VubLuFwu+7gIwTVN47jaQl+LpoN5uLDttral5aCVcjxVIEupY67UIjp3bnRmcTTD1HqskDVH8+zZoUeObE3o/uz3f/jv/f1fby5tLw4PZ/MTV9XOOaaq6/vRaMTO2r4bNgOVBagF5yNlVFQTtJIlnp6FhOsBXsmBnAv9tKu3RrbOxRgwg0YVA7LSNABDAEYiFQdmaphXNblq9nVo2957JYDgsPK4NEEE4pA1kZICw9pyLkcd1MPaOSDrEZUF1ZREwIBYJCNy7MUM1TIjRdWKG7IsLIDKgASAKQqAMlQZyLvYZ8+gOZqBMUnAuGA0sWxkCg6eCSfXqtuXFxu1uPv2H4i76WS8pLPN7vb+F64dU/jyg9fHVbh86/xi1hxsHl8AV7Xh+lZfpnqIiEbohJORZnOeGLTKqg5jhQDEaxWHbGhY8ODoQAStXx2nRAyIktB6NJWSAa8Fr2nNYAU1A1oZyxqaKRQxHzRZx/e7XA9FxDI/LZiClW6yaKGOmmkWAFExJXKuuIiVaWoZYYI5M3AYlDssxkqUibwpxZyUyye8uw/5bq6OcI9KmgAyoGoCUzC1Ml8u+5bg+KQdjM4WS8aiWlw8Ux1Sv6IgMqEHNDFViQ2ToRqCtrk/6uowVI0IvuSh95xxBXlZzMZNUnSecs7eO+c8AM1mi/F4SGxo4rlx5OeLBQCkTkZbQ5Xu8tltTG2Rsrx08eJgMIiarpx/FKjbOL/10LULfRwMKk5x1vd130vbA9F8mVsVQMAOu4BjWdr5M/zJ3/vlrz35/H1XHh9s3RpPKrWWUBGNWLyvju50ixO8/DC08bXAO0Boms0MsS2CnXAPyATufumCSAWSDGCACQzNPK7urhWnJAYUBD7FhAM454oYtwIwM659bk+LrVVmZxkAcD1lOW25EqGhqqmtFQWNMq7G8KcQybWfi61AXquZ54oT8b85gV0VW69nR7/uBUWS//Rz2j29mDV0shSyJX7de6KUf0hy7NQHbpfZeXrgwYsf/62P/8Iv/Prm4Nzl8zvXX3lha/vcX/uxn9jaCdefOXnrQ8Mudk2tZzZ3Kr+5XKqobmyP6mbAYQTK0nfYxhFb1Th2IfgmV4lxPBjuiMH8cDmCKY/OGlYaayIiBAAJofa+A+0NkmF98YErh7deY6sCgw1SSlLVW5IyYJ4f3P7e73nnT/2L/3QCAR2bWt+3o2bC7D0HVW2qarFcjIabRK2IIKP3HHslVyR91j7VsKprcSUdhZ3IwIUUs8wSjV2PiQhRkQBNDAGKmIwVkRnHqllACVkBxVSRRMl77xwxYx97sKSSTUDVgJnLcAW55LZiEpfQdUsAJUJUEzEzcQNwrjTuMgNqIgIDE8bVcR0tVVVQVc3iiInJTD0ROBTgXtUxs2ogLMT+SrKwNwVnsNOM9rJ8wR1VW7uj6dmxkENXHW161xwP+shzML2+OWP32qw+OX8yfPjmtbapFmchpEbJIzkjUgTXaeXYRFRpjQcAVJNC7ykFdZHm0SxIYlkBGUg0gxkaEfgaqWZOICKmqLbSakIhwK7I6areO2gqGwjVsDhZr3eIAjijMhs1M1MpIQ7M1Ix5NREp4w5VUFMqKhNl75iBEQCCMUhG4D7zPCJiqJAJcwhZMqxL5tdtRcxwiskxK3LhlrGgZQp5fDVPMFFAPDjuhuMtADAQKY6/iCJJkTwQEmbFrCYIigToBhXjPDLS4mghnfiGF1kqrk6d9GxdyZT2C5qamagMhnXf58Jyats2Rdk8v9X3LRg5F8ys71vLUruRUVWxnN2oHdOgnpw/v/WmN73p7CBIl++//83D85f8mSqnyLSF7CH3bX88nx6cLJYUKpuGZRYmQiaVxfbW1ue/8Ctf+twL4+b+6eLVi4ONxfxkNPEi4uqgJn2Xz+xcuH39Rt8Bhhuj5ipoThmQEmNGcGtn89X3eTpgXMlvgVtFzJXr+t0U+57ISAgMlAGMmU+DHaoWyxM4zZ/XMVLB1gpzdyNpOa2VVQFk1W3TlZk3rsVHTxNqVFA0kNdZRhqtHBZfh7H5X4ng3xCRv+Fl6+dXkX2tvwuEK7EEtW/ozNy9TqiciCyXs43N8XCw8c//6c998fNPXb16DRNdf+Wlb3r3wx/97o8usiyW4qkOsi+2GIXh9uQcGALl0eZg8+wO8CBlzPMlRAmQnOPaNeAa54eZwQ+GGILL1tQe2jb3r8LAuc0rkjSWvJYC4QRULWWF2Gyfabq5tjMH2bFzVjE2wSN2y5SmZ66FP/W93/wPfu53B1ujtGhT7gXEh5Bz1S27ugmzxaKNfXA+q8ScyLH2icjHrI6ozDXonrWApfeaVVQ9uDTrB5MmQlYRM/LsHCgrmSkyEYEhiqg6SCQ1khiKKYJTYEAOjtgZn5Kn1AwR0KH1zkhEQkWiGYPL6papNy5AE1MFh1B7VzunvVUDVu0BPEBiBlNi1pT6MGSRRGoOwEwyAgA5sYiWc/JVnWPvHWsRbEHyrNlAnRERuzwUvpXtGXj1CX92CmmUa3M1SjtOsgw1LxoglHA4bKvJYsdDX/XV1p1HoXVMQoVlbupevl1tDchxD2SgvStyESIrsCmZ6T1ZBYD5lfdSGVArU1SZtqkLTtYW5oTmED2xoxWj9e7qL5dTUEAENNAif326qgu3Y90hKV0ZRQAkRjLNpobMBMpqKGYI2QANHQAo2opICoJOTHUWXZqyIXuCmmw0Bnf3jVbwOLunUj6tjm3NfCBkMjBbK8xYCQh2Mu8GlyaSMgPknHLOCGSoDFyar6oqokpshKZc+eCp98rToza4qihikmfVu2pJawUbWNtyACPlHEtkJ6Ljk5OqZudcihHIE/ucs6rGLg3H3qgdDd1wyPUgBI8pnRwc3tzcPjOs/e2Tp++7esZXl9nuACzATYDPNR4BpkYZkXI/zrFTMM6+qlOfjj/2n7+wvXm1Gevb335henjA9aBru/F4IDkbOu/dYOiaBhfTk+bcBGFq6BGRnYJVaw3/Mny4Z4uCgXkkRcxmiOCKXYBBRirG5AUotfr+bQ2rDc6nJEQrLSNc5wqwRuzZ66jmq7a7IayGMWYCutYFVbGCf105jJ7+nJlBUaxeJxz/f632Ik/9v5rCIxro62wgy7Prau/uB767/O5mPKcKBGvjiNPSpzRzOm015UuXz93Zvf0//vf/6PgA3vDQ23b3X27n+U/96T/y1nc/uHswzzlsjUbEzWJaU5htDDeqELp+Wg+rzZ3JaLLlaTO11i2WNZGviOrGjzeh2lAjp4FJrS9qcdqn2C6WtC/46mvgnBtMmvEZaCbGjQCLq4I6ID/exEV6sfFJVR0HNVTrG+Uu+W5x+IN/9g/9T//mC0S0SAvopA99XQ8MKGYZ1qGu68VyVm9uEEtOHfM9KFWAglvVtdD2aQUWHCUVjxwX/SAbMegK9lz8Fwk0iYg5JEJV9ejMVMxyKQVoVaWpiolmhZzU+SDSkaNyeOec0aNYQkfkzWMAjIYkloG4uEogCJIgeAVTJMmGiJrAgE0zcxGVQAJjRGDozUzVIaNJxS6nzATGEpM4zzlrdmAJBCxj6JroAKq+ejkuz28+8+DJxShDoYwdXzt4ZOO1dDzUZy89r1Ee2Xv44smVYBWag5lPVc9SERAiiJm7eWTbQ2IFcLVaByaEtRZDKdQ1i58InKgSpaRKACaZiJBATMzsZN4aB7OV/SkCODQmZcKzA2Hm4gK2AngZqCmv1BbXOREoGJGa4iojO40Iq6rbVMBEABXNIKmpAhJaKaJBpfiHgBb+HwqJaYZoCSRnVJ4qdSlc2CjMw7tbVADIyOiUr1JW2Cqbk4LmwdJpsYIUArBll84Oh6A5EIJqmZCxD1k8QK8ldhcxBQFNggBDRmlzWsjAVRqTI2+ST3Fado8eDqghEzMhWunJkOOs0nVxa2tDLRuS49qQujiXHMGMK0DqAIaH04WHzg9DznAyPdi9PXroyvbJdCmgoAo4BmlBZoIjhqoOG1IvJbaDBttOl/1Mxcbj7Z//1z+zuVW9490XTg4PPvWx165cPKvh8MGHKyQjNlFIUZY02zkz6myKaogpgwF7hULOZXQZV3FLi8p5uaFEDjAjwoooY1aGob6QhxHBgKhowKACoWYCFFNb5Rpyaqn8OnLQvc0fW+fytnKYI0AAT2RWINFkoMWbr5DWqGQFp3FeEWg9lVl3/1/HSi3w9ruR6BRxuwatn1YqBd6Oq1949QIFIGAyU1yb2agBIcHdNz0teAiQmAzcxcvjT/72p3/mX/zylfMPP3i/f/n6k2d2zv/lv/49G4PN3VvHzJsOwBGPd87f+fpzjz68c/bMJSbnfJpsjIaDLUdjU6/pyLlMjrHy1AygGSF5y9kBaAIATqmfzQ+99+ibUI+hElUFxxE6bVtC55x3zCqVwcBXNfFA0gIwJxUCFOsAsG42Z4f7Vx69733vuO8Tv/8cDVzqYt+3dT1A5zBWlmFQ14dHU7NNRGZmUK1CSFGCI1PIYCUMnJ5wZaTsmaIKIFmv3WxBE8+eREyKYkAJ3CBFJYQhMACrOmSLmcDAsomFCuq66XS5mPdtn8VQDdWi5BQqb5ZcHRxrqCi1WZIyQeoTkBgiKihC1oRoSKRKhC7nQklD54ZJMjufFokrD2gqGdXQIzpWBQ2IQtJJkc3xJF4BIxgNCCKAQjb2FVY2amAZw+/2d+KE32TB6cSrbe5XFAfRYxUnKXfjzteWlerMzBmqTIZrHUZCyi44h5KzCps6VlTBZJBFRdVMDAGRARkAim0Ko3PmPToQ1SwEnrBxlgNZzeQRyFQl5ZRSF6Osew6qaitd6YKbKQ6hpw/QjGZKqqSGulYJLokLWRmHIDN7BNYSqRABAmClUJlVADVAMPSEwVQRgLJz4p1VjAEAonanafIa174O8QpatKBLkBVFNVIzkCJxrGs/U0AFsrbvqqoqaiex64kIkLKCGqkRFJEKMjSBnEw0Z62IpI2QsfThXREXW3+a011dzN8KWU5VkclXAQC6rgOAZhBEEhixD6bY923XdY4DhQFGjF3Y3cuaCNUr1NOj7uT2nZs3b5zduYI4j3IbwkT8TpaY6QjIoTtb80bl/Kh241EInprx5LWb148ODz/w/m862Hv65Hj36tWry9QdH8aNja1kS9E5sVZVlaJsb58ZDxuQjUAXNDdARWE7O/T3RLd1Hl2SJhJTNHWghGRISkSEXtbH7anBxGmyzMyqWtzvCskQER3Sqi+uiEblDwGfJryiBTtRWmlEiqarjimtajBLYrqSoKHCuihejKt3/1/TMgIgQD5114Py1sivf83qty6f2QzQ7oJrtdxlRADg9WSV1v1+IwSmsvJVFVQR0RNvn6n/xT/9dz/9z379gauPOY/PP//su97x+N/8m3+BGt4/2fNVcGxN6GI6uHLfIzfvjJvhkF3V9eJCMxrsBLcJ2bfzZb/Mw8GkGW1WzVk/uoK6oUvnYxCNsV0e7u3fvPXaneM9v127bR/rvk/aZ1l27WxxPJ8fL9vpfHFycrSf8mvL/kXFQ8cwnU7V2hgPY2olM5JrOyWAJCff9sHHrIth4Kzoy1uuQoPEMUZ2CCYpSkHLqGrhY7t7Gu6rlXPvTFvVCJmZkbply4CenZiSd2KaVYCImRVMRFTVZQxA3hBEGTJoVEtVTQTM7JB8qIfIzgizJWBNKkqsqNXQM0MILnUpsKtc5ZgZVtQzIkImgcwUYOUOiKPRRvBDBJ+iPHDxYcIGqBpPtpl9jtnMMgEkTIt0YeuiswDgjKs2QlNPxjCUFhsdOnXj2Qhlhwahrin29lS1f2O4yKZomjlZ7ZsF19k2GJ1SxCqDqUWpXUspMYhz6kkcOe6wJePae2Ngry5xaV1BbZjM+iJLmlWRyNCpdOQ9ABRBTlRVzVkjOjI1WgPIEBmADYoxJTmIxZHNAACSJ805Abvid2rFDZVIVa0HcmhQXJcKAp4MyBmqJlM1nKJDUyJksJAhImZEImWz0h4VKxmeonOQIBkXrWghmxgoo5pEwhVdhNHMhMFny4hGgJaNnZMczYGgM2qd1gEcYoqgvjY4Onxlj76radrYR667PifwgYRTUjEhVK6lj8gaMScgUZ+zZKunR0cj9SYsDjySQTYJuPKfPgXEkaI5VcdS+aAKoBAad3BwUFVN7Sm3HBUwQJuXi34hqn4cIHeubozj/vHytfH2aGNEnaizw/mddLt67PGzmLyenODWDKxiGnBaGm2oG8FoO+R2Ip2IHc+qzYH7zd/47HyJX/rc9Qfvu3zpkRbq6cm0O7e9keSYpAoDytpn8MrtEqOjjXrw0CxlwuSVI3jjFG2BBQUL7I3AIIMZoRJCjp6rsodTn9hXUYQZXW7QYZYlopg40wrIGwplL2bIKJiVDA1NnRkKaOE0AJahhZZjkZUExBAKcQGBEU0wGXBROQXEpOKQAMATO1sxKoqiRtGdYTThAs1iVShNTBWtnM+4AEUih8AihgwiCVCN1h0dLVN8QEBQyyvkS6kUsIyLVdFk1Rwiclb6vSyiPZh5G5shE6HDtjs+c35yeLT/D//OL7y2e/jIo288ONxbLu/8lz/8J9/0+FtvHpyA6aBuJBtwyj5rsslm86YPvcPpYnliw5FcHF9o4CzL2X6R0nI5Dh4wQL2jbsxaoUXMi9lsGbHvl70kaRft5OwOmJckYuKQmBCNVIIZajKFZJCOZ1Vty5h7qSaH85zzMuUe8RgZHNXkNMZBuHny9sfPDobS26bQiaVIOXM98LHK7RKNq6Ze9Ivt7c1uOVU1RC0HuaGSOiJS0QRGDLRyaIcEEMyWaemboNPOnXHHaT70rksCSAmoLr1e9qACDXbQZXJA2EU1ZHCUhWqWOrTTBY43t2bTvnEKmYAaUkf93O00m2NWOrEx+bTZL8Q7H0hdM5630QFHlSE4DIpTsxwV+ywuzvX9j78z7j/2m6/8sz/+LT+wezO+tvgEj3bfee0vffL3fvv8FZYMB8e3R2MOvnl45+0HaXhz+aWZe1WzfPMb3/srH//Mu9/2xjq/8+XXPv/Nb/3QzaPDr778paV79v1ve+SFQ/eF+Qvv2ugvLR/2fYZqmXmCzo2OHOsOS4W+VxkjTB01ZEYiYI4A3fmNwYBT5WgYttR1XVpaZgIkkKyVykBVNfeirYKgKiBrLn3tle0sIIMZ6akMAJwes4AQoXLiBFxWR+gQESEbGDvB4k28upKYlr2Ka4mfcvHCH9EMmdCQwNRyliSg5I0ji5kWcQExpTJnVUjMtaaUtRTgRIoopuIOIqkIChMYsXkS52LjSRWQyAhFTUkIIROLQRN99K4zY01EqXb+4Lh9dredtsqhMlsQZdRIBClDVK/Sr0F1BkqsIWaTHpKpdDktI3O9NFFiK8iQU58HQKP1vBGLwczKDIiZc9IY08ZkknMGwLquiajve8jAzIPBgPvOgzFBFrp5Zz4ej6+cqzzl+ZzZMXsyWKao3B34ZgfyVoZ955Q59ImqppaYEPHMmdF8cfLS08+GvLFz5lzv99qkIErDTCGYg+OT5QBo1ASNbeOc5aFr3kCuEu1BTYwNhRBInYAYgOJ6hLkCMABCQGAVRadbZye379wJ9ZjImY9dSt5XqDWaIvdiLVIQzIhUyJkF3YgrloIB4MrBZ50sm9mpDC8RaQHIYsFrrZ+3u3DD06xc4C70boVdNCNEIso5YuHfoXbSMtYAqhaRIpABOmY2c2JplXivRrGlhlBf+HFYPINRVkkoOufUkoGpRkJSRYnJVQPJAKFHHS7b1lfx/geu/fZv/d7P/PTPjweXHrj/oRee/8rl+y/81R/7m8Px4M7egecAxFC865BQEE0Ws6P3v++bF9dfW7z21GYD9cXLuXrMloemLzVhAKNKqprqigXz8aFDbqf7R8uDrMxIWUUxnT2/Q96DGOnKHA204D+RqGgsA2dlxK4VcuYG9e7ebR+ISH1Aj44tBu/v3DrcGrkPvOf+j31qv6qGkpbT2WziB865Tq0iapr68LhNXQ/rYbvz1HdFJLL0tsSgTKNO8f5UTkoGzEC5y37gsmkEqAaNgIkIskcDNsQk5BkNQJTJmwkiI2LwVRiMXUzLbtr3fQhkOREaSa6agNz6GhTBs1Ft3byLUQQwdl3fJyFDRGZyjtFhrBbeeKAuVNM/eOGT92+Nzp+9sNmcO6yuXzv3xCu7H28Xdx65dOXCpUcXi263e/HBBx97/uVnpO/vO3/2An/oc199arTz9N5CJ83GZPjY7is3Hr7/bZ/6zKff/I7Lo7C5vX3lsXMfPjn4/NfNrp9d0tGNi8cXNFLVdA/euT/1tRqLZZe8QWWlxRcKI0bN0O1s3GFLIjhNS+Mcs5ENuDT/WMkBoZFHSA7V1JCRVBQRCV1xqaOiUovurkSqrVDECDydx9YLWFItKQ2CZUapHdU1IAipEjkDB1DGSmRiwIZGpX0BJKCgxqZA4EvJjVAMLB0oAlORj0AiUyTMZhy1K1xEoML/BgFJKVHnY0xFtQJAwRRRhjWdG4qUUSaqgWYsnRXOmcliBvQgFcjTN/T5Y1dX0CNUGzVDC4oWDc2ipozgsHGmyYzRxIjBGDVh7gXyMlufsfEptg6digKSqWnh95YuDiIAcpm7oxMTZiLHy0WXM9R1ADAxIUJJOXU9IjIRiTljJkCzUI+O2/6VGydb4x2PcdGGB95wX7Q072aDMEz9MTB4d9VsKOkEU1+FCTjxfRwOdOD99Ze/DNY+/OjDw2F30h/DsHZM5ybn77yyG3Ozc2GUrE0aGCVqMMCBf5SwQYqIgMaICUHNuLA7cNVjKRg1ADBml5Ihg6vkM5/7vQ988KMHB3ttN63rTTIVy6bqsFjwFvnyIsnOBT+BaIKKBKQMq1HNSi0dTnG4BXG5lp4ua8O42Gviyox93UNXAMN70DKnrW4zZk6pr6qQch/FfHCWNWcJIagiojnHfd+DqnMBMBeHDVzJCUjRYndgUsYDVDSSyqzcsioiMlhRNzQChwGMiETTOMl0tOEnkwv/8qd//rc/9qUH7n9LEnrm2ac/+O3v/J4/8d2zZbu3d9BUdSBIaky1iCAAgq84gCwc66U3vuFGnLtmaUmrYdL6jGmktE/DEeGWtSG3My/t/t5em8WCc6zscT7t7nvkYTcMfTSiULHLlAEA1MgVMF2hf4nLJtIzBSTYuXi2nc8qzr5mTVrw7LG36exg07s/9p3v+o3f+Lf1ZDJPbZe7CaivPGGIKYcQzJZ939d1QIOcs/e+75L3IcYI92CTTsdRxLx2AAdEXC4W1WiUtQ+K3pDUAE3BFAgYezSvtNKoRUMzAhQxM2RXqeU+5Zj7ybgGjWyAoNmgcubIslK3JCcZjFQh5WyOfO1NYpcVMGfLqdVxNVbqIsTMaDCYNOygeeHOrd39F9741m977sWPKR5ub5x/9cUXBA7e9sg7rh8uv/rMpy5/6MO//6UvPPLWa+cHk7YF69Nbrj5cGT32wGNdm7gadMkpdtV48vTx8zpqP/KuD/X7uzT0+6GbHNfVMozNklyqcMyYATPhkkkYnQASKBmZqUuxn4s4qBUTuixKoAlMslgZfDERgy/gDXKkCoQOygFOSAUuiaVRSQBaWIKgAkV7n630Da3ocaqtCuZWqrp2rCCGBoxsVgQRV0YCK2paUdaw0qfJa/Q5qJYmUFQIAILMoKhFad+ymXgEcsRMCTRnQSRkEEwERCDEnEtFYJVETFjzuFMVMCDKhooSwZjUhEExVaAg+IVXl69055Uc5rkIOo85q5imJKCGyEgiRit6pZkIFDm6LF2XXT/tKgtqVsTfQdQAbDWTWBmMoCFRgcwLYkVIQEaEi64PPniHqoKOQwhRRZKKWNUEQovemNE0A3h21f7R/OYtX10YZqgffvSR2XzuXR74CnJq53t+a5t5gnEXYQ62HW3YjLZyjgY029vd3Di3d3T85FPHw81z1x53F87Np4uT5XJ7985stqwu329NJVnMD4DozUCboopmSGygVJS1DAixiAviapCwfpjr0/Sxxzb+5U//h5/+J5//to+89qM//t2TjYs3bt5q6rGqOi9kYOocV6aJCv9g1Y23FfPJVu7npmX9FWG8Nb3iVALo1JqDSsa/DhB0Sp4FWxvAn45kV7gpAtWMDFmimdUhZFVTCB5F+pWWqURiqBrquiliBSsHpZUa++vgkGutggIRQwN0UIhvnp0qACIzZ8tkbtkeXLxyce9g/x/83//hrRvTBx68erR3soz7P/gXf+Ad73381q3blGlztJWlzdQR1gUczA5MOeX+3M7INE4X/QPveeMEKjmap3w7Dyq/seNwp8tWk7NuHvPR3uy4R7Pg1axxXgyuPPDwYGO7z8auwtIyUimJKth6aoJmROh5KVjRJMXZ5vZGPHvZFgdN48UTYex6WcyOnbfFcvbEOx65cml4PRqRM+tjWg6qDeervp+6iprgu65rmgagDJkyMZy69OA3oErNSI2IMGcTIKJu0TcyNkDzNO26M5OAjjQLIBKYIwI1h0zIknLpE6iKZfFMKfdmJpKCb8yM0CPGrAranBy0SckQBqgINSHVQ3eymCoAkhIBMzoP4EMRIpAspvE4Lm4snrm1eH5HJw89+o6vPP2xZiufwFE/P4x1rW7xxZc/fvnsI9/5zX9mPr1jG4u97mYaNTfvHDy03XzhS19tb3z8nQ9/56svPf+2dz7enzTnKqr6s1y7nQcff+aZZ3dfeuqbHn3o1fzUE296M73Y0LGvdIDcZ5cRHKuZWUeZwAGuoIzO15aiAgkzkiPLQGyEyElEvamzZAIRQDMy5kBEvqJskiVyqTa1TCKrUtMWbh9iMaFRAikaBkXfrjD8y2SZVoLRCkAlIqCRQiGiF9F9oyKyC0DoTbOBApGYKRCYI2NVXqOPEcExqiqogQmbalbC4A1RDAjQCJKAmDMkkWyWGdCZOEmtNgVxg5oIyCCTgYkgac26P8/P7MohnlUebATrE9U0CYQCYhwFejP04HOPrbUAoGjGJIJgzpS61HWt9PMYXN2nSI4KCknBissSFo+/FSpcVAuZRZxn51gR27bfGA2JKGZgzwaQ+1zMgquqAhXzogQOqG+X9aARwOt3puyHb3v7w8ONcDy7szEYzZdxTA49pnTHN9dUA3Kf8px5Az2FcOIppT4/cOX8xQfHz70WXnguxqMIZ+FkUd+Z9jzJrtZRvUn5xEQh7kw23wIglgstAFQzoKERFiXkU/DfeiKGBmLtztnmdz799f/wi1/8pie+6amvvvpX/tL/8Nf/xg994Fsff/X6fo7mB15yT0hd6oAMkKnAYRFg1bBzRmzSAwACrTGrtuZArZDLtGqxEOKKNHca3EFNQQkc3AM9tHVyuA7Hxo41Z8kWQhX7xOwbF5J2Bua8y6nMeLFdZOdGYhEAVvzntUceIuYVd8+yatkvBFAUSInYoRcxcijaA4LlqJYfe+TRT3zyd37qn/zixsb2lUsXbr62d+Fi9aN/6UcHw+GtW7e9q6vgck6iwsSIrGY+UNaeiRClbkLTNO18Edg1ky0YXTw62G2Pb2wHhMGFuhrN9l5M8zvLxSxGDNUgLuMwNNl0MNrcuHh/7CNR0bxLUfuipKmgXGBjRfTYwEAC1yDGBKJxOBkuuiNJGcjaNh8c3Uk2rdxGjtlz//hbLj//qefYcxetXSwrPyDvILJlGAzqg4MjUAOjoh3mnEtZT+X+XjeTNzMT5hX1j9lr6rVL2LCCSjGU9w5AVRXW8vcFWpVS8csFh+ScG1VeU3SOyhEORgoMhJ4gJ9YluBrq2jUOFy1JlOnsJJsOBlUU1WSAQmQpSZIZuEqyVex66Z/afRY26OlXvvCc+wIRSOCnXvm6s5QMjS0Iv/bS85WDJIA8PLj1gnNUD+H565/d2TpzY9+++Myve0effuYV0IHn/mQvTVAD3r+b9mdb6VMvvwyiuy99+dpo42G3sd0+MOxrVI6GEaVCH7BRTYRcnDmdiTok74iIuKgzACQ1ZAZkciUZRy3+QGQDHla1DyYpGQGbWcy9rS3HyIpuy/rOgJKSIZoWXP36KDbojaYdITkDY2A0cMRkyLbkosEEwgWYUo7L1X0hQ6eIzlfD4TCE2gUuwJxSbZmCKIkRKGqfRJOBc4SSchH1VyQgB4ZqbJABshAA5L1ZICBAATUmDo7GNTMJc351t3vupGn9+aDinWQn03Y+GgyrmnKmrJgVSyFvmg2JijUloKIUzEUG7JdRBX0Vlsu59w7McjnFTMno1M8BVymmEbFaRiTnfOyzSGrqACaIhI6j5OVyKWLARA4lpYqJDECtcqjSoXezTF996eD7/sxbl/0xqmSBWTd3vmncSPuF0QnB0NSIOnZji009Oje7/QePv/Ghre3dQ71x8ZrNF71z46M9izm84UE32agZOk45pux4s3bvEvVArYOA5g2zsZCAGRV8KQOsSaTrB2HObd1c+8Rv/9tuSd2i2j6z1Wf9e3/3//Odf+gLP/LX/3yG9vbuwbCaAKp3AEiY/cpRA09JyCu2Hd4rnbi+/mqAgbimJRQFf4W7QMeVjlABaNpKP26dG67vgkguAcV7Jzk78qa27JauAu9DipJUgIlJXQWm3cpU0lZnWLmhtsZ7wqlO0RrkCACSlD0io4D6qmoX88l4OB43//SnfubXf+ULD73hEe/phRdefN/73/Knf+B7TtJsNpvV6DTFHDIQBN+wUtJUIA9oAJRG48oRx3Y5GNCoGgJqR7OwNR7gmwg1dtAf3tnb35vNj0x5UA3I+3o8Ho42+zith5O0bIE9GuaUEKSMqEXEesmIzL4c4WpSO7akaopk7XJeTUY3r3cWO/J5MVv2sa0ajLnPvb324q0H7h/ZJ9vgR12HmrLlxMwcqhjbuq4ILefike1izMTO0l37trtiPsXTSlSdIqKqIaFHJ4tI9QD6VI+CZQFlDk4sK7EQIJQczytYAc0bCBNUTJriqtYmAmJREswBQTRuTFw1TlzLgGx6x0z81tbGyWwuIgzsnCAaO0MiE8+V9mmalp6CowqXXdc0o0Qt8sBw5kgsjj2RwlzNj5p6mRehHi0WLVeNgQnY8+1BJQfWeK4EyCMnrBbLhVYjf2Q5L145s8Mbw3r/UNoFgFZfPTl4vjm4Opw95Ef353OjfF6sFkgoc2FD8kWZw6X299lhEgAAMhRQFxgYVSoFZ1YhDNQatAG7GtEzG1gmtMo5MswK4HwCgXVDrljnIRisJAHYFI2LYJMAIiOpqhK0GRQI0AMAqDAaqE28VuQBBUSRGVWyAjMLLgjVrO6TJsSMgE6NYi+ikpMkMyRicsQIAobi1WnXJcFsagQKKqxkOaGySDFJsgQAyGLOZ0bLKjlnNbNASls4rvGgjV87HEfanKCoYZdyM7DdwzmyokspSsScOwVyKgreIBOK15SVQbVHRRONuY/LHhEzIAB4Q1EpuSbgyv+hQClKAcJIBAyUAIWI5vN5YUurKgD7UK94raK+8YrgPTtFM8tqVeWjJBboOhkMxzsXdmbz60PvYsxKEiIye8dO+mM3vIQ6IkkST4hGgOFw/2Dkqgvnzs527wxCe+GyAvtbN+88+uCVAHdqRyf7lvOsHo88PnJ244GYgaCQAa0oaDMTiAqtoJ0IjCsZzlVo82Fz99bhX/trf/Xa1V/7+X/9W019buvsxfvf0P7u77z6la/9nf/9j//QE+94/Porr1lSIEImv5qEkhZauAqYmAE5wjWi8d7wXtS3bYVoJwNdU2GL+BFQSQBpxSdaJYbr65yeFp5qUyEkkeScU82IcOnKxba/MZ0dbp09n6IeHc2qUIMggF9x8XCtTbTW/l0dOlDoTGhmGQEY0cD7UjpEqvhkNrt64cpyuvi7f+f/uXtz8ZbH37R35/jk5OjP/aXvfue7H79zZ5asqyhUzgNotETMKSV0tStcwoxVGIl2o7oKzqPIaEDmzMxjTr5KfngObRv67HYOd49vJMC3PfE48IBC7Spn1k+Om6jmPcbYmSooiGqnYoCSspqwUeEJi4ho1sanWPq02ueEm/4kpuWtvXoDNS6cq1Pidjnt5nJ4svAD3dls2oSEwQRyzr6qux5VQDR575fL5cbGRpa4Pr/XKn6re2oGQMAGuUAqEVEQwMyjz8uu2moiQyaQ4lJswMZOgRLkIEykCAaUVDyDZkGwNXjJi/Sn6vzsSl+vrwdEQaJYzYDoLZPQ0hESY0wCSozInvosjauXh7Ot4SAo9jaLc/MMZKzMEZasKMkhqcmciZS7uUCKDFmqqsnYmooHGNgkQdKIKg030p5kF+rBcKOdLim4OsRZypU2kwqcWLtMO34Ql/HV+uhlOrg42H8Q9y/q1kYe+bzhjR15V8bsFR54QudxUI9UuO06JQB0Ex8VLKOq5pQ15SbHMxi3u7CzXGRi8OBMCdmBQ0Tj1fSq7KDSzYSVKU1BIYOqKQEqmJg5a53WSqxmiEospBZjnwx94DUVcSU8QFb45BwF50tb5JyBVBNaStSsbBMKFaQA6kFIj0ehBiBDdkRIAJosoXoPgKoRIZEJmhF4Z8yIIglNHXnJFuNShZ3Hl3YdU+MdLNNCEgjVGCEdteAa8s5AQHvRHpGjWgY26AGBgBySEIGRCsaOu0VnRlEyInJpHAEQIjsugNyiOrBCxSECkA/MjAi8XHaDqkZS1JUYcowxxggAzXAAoKrZMCgiOO5FPXKg+ni6/23v+3YM3XK6GI63RNEy9ClmHy0jYcxpatR4qBEWWPnF/tHmaGeWg15PNNv2G5OD6dNNQMr1rZsH45Eoh4VE0cGgfvuli+8TzaZTcgPiDJpUTAldUV5ce9+WpgwYGioiKkDf9yGEGzdu/NHv/ra3vv0NP/kPf+HWrVtbO5vnLl9cLvr/+v/4L//IH33Pf/Vf/Yl2MTs4nFc0BsxgqOAQCBGQBCwRSJnAf0NkB1CHXITDbE1rNERmUsES1olo1XtdaQwgFKXvU0GZVeu+jD/AiIiwT+n8+bP/7t///K//0jMC3dZO9V/+8Pe/+S2P3L59uwl1mbnBuvBadR5LhxpKRbkSjSlECSB0WuQgk3HuY//YY49+/GOf+hf/+Gcm2w9duLT90ouvXri49aN/669tbG3e2jtEpwNoACBZJsQKvGUjTxFSpblyTqTJvZLD8WgEYgR5Up1FrNocxdLAT8C2AauoN8XNQ3A7w3GwZdLZYsZ4yBJTbKOQuhlVVUVEsRdDv4xZrMhwFToCm5maqOZZFGcNeVVhVpidnNTV8GAaFZcD36TYLWM6mS66Lt46Pohcn9nZfPm1Q09OJKXYVaMBADB7AGmaajFPzAwJyLEpOOLit1ds3+weujICZBFGRoQk0hClpCBWsUtt58Y1ASYVYhYERfMmRK6cSTHmali8iIGZQwidqsgSioINYh3IWgfQGaiYkwxUYROG7Xw+OOf6vu/7ZApYUH8IZtZA24fxic7nmt50+a1X/Lc8t//sH+x+scZIrpbUOcxJs/eWestEG/X4bVf/+N7Ji1+98UUeSOVcP5fGuiE6yzga3bd79PyFrYenfd4/ugXSb6Vh6yvKZLCEiRcNrpN5FjMOLeTO3WK7zbeGuLvpaVjtXOhHAYJHR0bOVw6BiTmbChLwiMwxS/JmZp4qR6ywAIt9d3s5Pco9hGozKi9NHKFElVZ8DYwI3pQ8AgNGFAUAR9abIERWICA0BiNCIcgZC6RphVMGBDFSbjIxEJolRC8KZuI8pZQRI+jAs7QxLWECmCr2SxsU1A8BgKJJmd9xjqZQ5WQpTTHreLjB5iWnKnARNEsEqA6RULKqZoqWIYJTRadKkJVR3OhoJsfLJAqmc81DsB66xWBjvEyLyWTis5AjmXfkgkTnMZskNexFIaDmjMrmIaF2Ebqem6bpug7Z9cSo4hHAVPRuilegpcwIjMpmio6qNvZ96iaTbULfWwoeA/Ky7c0sozlijdFVQTBrTL4aI6PkFoUhjd/xrW+ez2715qZCIxIfMbOfu+RrEuyq5ZEfmrohYZ0OXoTpndHgggO/XLQni50W9y4Mzl6/MTt/uSLIDsZxsQSF85N3PHjhvQdwbNBt6SDl1AMAMxAiQFIlIDZvnM1M10XJKkE28w5ySoD+1eu725vb/8P/5f/wb3/+137zNz83qSajCQ3uu/Kff/VLX/7Sk3/7v/5zDz96/6uv3GBmNiRlRFIgxdKZBdRMRAAmpoZIRLx2eRQgIwVV4sJzRlE0UjErPh6MSFmpmJR6hrRCfWVCNWNAUsioZsIEbE3fxavXdn7pl371X/3zL1x55HzF506O43/7t3/yJ3/qJybnxvOpBWZlAzW3tsQWMEUFQlIEMMEyJgqMaJDFckIFqS3L5lY1GVU//ZM/82u/9rn7HngUO7t5/fn3fPBdf/JPfd9yuTw4PHZM7DxqwkKeQi60OoDoQRGGCgkwMcnGaBTQd3m+dWHkhpyAK8lc17netNTlo+ePZ9d1mUbs3ebO7svHCqiqfZf6vhefZ7OTts8h1Mtu+vg73t110ciyCigyYDRBNrFMCmSAOaiD3JtZNE9pmXe2Js96lCX01YxtcHRnsez6/XY2X7YN+q1h84IkDU3qc5u7OibvqpTMFL3HrPNl1yJ6yT0aeB9SSgBqTKAayJcESBEN0CGDkSfIsZfaGziYZR5jdh6EIJmwAbEqVCYWKhPAWozYUTJU5VrBhL2vgwguZwsEcM5FyK4nABxApdx5RFYxq8SmyIMmwPJkMWr8colqKaKIQ2/QEZsRmWFKF5udV1+5c2P+yhu3dy5sfujpG89cODsZyKiVdOPwa+fOnXnptd33v/F9N1+avXz47EY1ePD8u/dfOnjije/YO6p+58Wf267h8cfe/sp/fPWd73v7V556ZaOeTMLw+t5NkhPheTaGzupBbkFdj12L85SJpXYJwZ10eBBTxbvPUcotszpHRipLJkWDlER6Y2DvBHEhfbCMplktIzjiUDU82Egu7BK2noaVG5PPGI656g29GjJg7aBhqNRXUAVoNFY+QsihsgFbZWYJU2SNjhBRCucTCABkNSTFrDBvY8zYRksC6HxKgshMtRKeLPPADychD9A1bjLhPNA8AhiTHwffOHSWA+ZxzaPgsNchjLw6l3Q8aLz3IhZFkxgZsRIJmqIRK7tiu24IiqTgwHixTDcOljFSnzQnMLOskgGj6t7h0aAmhIyQJcWUEgprBoSqkBULeMjMcs4ppbbtSklxz1zIzEzlVOj1LlSjPHQtc2qGYFQ5L5JBzbmgujoGQnAgGYvVJRGEujdZxI6CP2r7c/edO391Z3EQLVd9lGXuokvR+pQktgAA0bqYFSFIf3i492pMpq6B2m2f2zl/5mLlw8Z5t3VtqAEXSc2GbXv10vnvfvDBb85yNNEQ4iQGXSerira2OUEzEFP8BpKnlD8azcT7KvjRfL7cP7j5Z37woz/+E/8FVunWzbmCv3TtbOqaH/+Rf/xzP/Mrly+dHYdh1yL5URIjNQ8ZrcvQ95Kzihqu1Ee0wOxLD+QuIQwKq8igyuSFWIyVAEgQlDgzqxTgTHG4BlrfCCZAqM0k6SI0eDKjX/qlz973yP1MQeJiuInDnas/+29+bWtri1HIq1N1YAbQg0Y0sYyimASBERwDASpxBowiRtCwgKTF+YuTebf4mz/x//iN33jmwQffND1Oe7MbP/TDf/oH/uz3HZ/cWcyPg2c0ZVMwZ8W6wBDAoTo0D+aBTA1TBg4wnoQosWpgMBgYDZNJ7zT5oQs7s4ODWy+9ML05ZctNYAKph3UyPVgs9hfzw769s7/YO1x0vV1/bXc03iTHfe6LiBqDMWPlmAgccfE1JQIyZULHXE5OJr+xsaXiU3SHR+2sl+OuPTxcSAp37rSzxTLUlfe+pDIxxqr2CpJzLvO5IrDKzAAGqKXGgtX8o4DobCWxp1j+wYCmSgY5CoHlnFUh57JHFEANMqihIqiZAhRTYzM0qCqPZmiCpqiGJgSKYERgZmuFafCefR26PpuoMXUgGQTNgkHwKCCoxJRMmAD6OL126UIT/OKgu7Rz9g3n3/7Q+SdObsfL21cfOfORB8/8YU/w6ed+6+qD4eHhE99y7SMXwrU3PfZeEUh5/8Gzjw3r+yI2Gezw9t79G1fffd97rgwvvun+J7T3DkM2zETKWA9psAGhkqry3jVAHhjRJ3YCqEjekBRdNHaO0VQV1XkASiqkmdAxoRABQDYDRGcKAJmDDWDa9s/lvKswIB54HCBXhgkhVIGDr8lq8iLai2StJEZSgSRChIAZTUGIseCC13NIg7UhMGY16S1nySkNqso5KNTzlMEzuQpf3Z8uUz3ru6QtQd9hUAVTJnJs5AAQlACd17GrPFVMhpWrxuIHpFGSBVXNSc0ETIrNjmpGPzZLAJaVnRGYWya6vYSkXsAxmokaoQHP2u7O8fzBs5fIYGW8p0pERZtAgUQy8YqjrFmSaNtF5yjnqCrO1aeNxTJ2UwREoJX8uRExIhIZIjrnDg6nVVURGpfnfcixT0nMbFAFg8yMiOCzZSHnzRvV5PZP2g+8/9Fh0AUpam4X/XAYWJxoakkx9mHZD4bbkBZYt7J8aSOw98MMi3pwGe3O5tZJvblTnaQXn/3i2fuu7E3pzi594Dvet7195WS2MNRK83a9MW0TeYSVwvlKvt1A1QzBwwoPiMUWo2wX7wZtl0XUuVqBsvQ3br5y3wPn/tv/21/91z/3q7/1sa+cPXtuOGo2xm/4hZ/7nc/+3lM//mN/9oE33P/Syy9XvmFCEzFwDjc0zMGoAEkRkZiR1UQNV0YPvALArPryWvT5S2lvK2FYVVBGWIlDWpFmLJhLVSVqBEkxG+likWIaDZ1PcRZwkPKCm/rgBFMUtVbV00r/azVALnw8MsiSYk51XRcrJRENocqmrO6RRy7+1ic/9U9/6pc9b1554OpLL16/776dP/9Xfuzs2Z1Xb748qkdNXYMhe28qRbv6dL0AFUworxrAmapaq4FLbR5MfBUGSQJA6+rKeAIR+8VJl9pz5685i7NZG3ztq3q+OLm1u9f20Xs/HGycOduAOefrS1fuT1GqqhFJjhB0BVspfinlNBcQNfQrxzRTBQHY3D5z8+WXEONi2Z20s1nXRZHc2e6d5fFyTp5ZEREtS9ZUuyE6lr53znvPqe+qasxIWRWxYINRsyJANvUlZwIzWon+F3KfGTpGy0JaEUKfIjasGBGcKhbxR0ZEFcsi2cqeZbS6cWZGBBmsdErZENEAFJElg5nVFSGJGJoNknQZTUzROY0AAM6BgThHHUQz63oAamazkzS1D7/7+46Pcl2Nv/SVX/vQt/zQb3/iVz/0wW/5gydvJKRs/Nmvffw9j3xXXtQnS3lx73NXL15a5OxcnB7NXLe8NLkw2Dh/69b8ZO/o5evPvffx99ToBZ2wGDhVdawAabzp49K6VlWUCidOgVSSsBoQAzpzZuIcEJVkMDMh8dC0QuoA0IxVCAkIGcA8s+cl+JnqK32KXcdmI8YRYcXhEQdnYr/MehxcKDsq+BH5haGAUWGsEnDxNzNUIlfgDWYrkQ21Yl7ik6IYSJEHIAAVgVxpe7KYP3lLE11ybsC+HgzqDWeLLh3Pl30nznFgAsugedblrj1gse1m8ypvpMO2gt6rZ2dIDIwJrdDYuAwEjAuPUQFFwSG02eYQDJmJiSTlSA66aN3xvO1tMtgy9QpOVQWEGRRSAiqYb4Q1WRFRxLqYiKrSWIcyq9ISf1Be7yS+ztaFuEiv0GKxGA7HBOgQlB2ixRhzzgAaKldAHQUTCVwZRiTqRXult7zzjfP2KAAGX6fUH7fTaXsSaqlbthCsCjtyiZlPbqXJ4FxdbSdbxtxS1dQbGxeaS7m3uOze89BjX33u5Qevfc+Ztz70+S/92kP3P331yv0RLy+7iHF/4EYtRl7xk2ylsAZgAA5ONXLZVvqfiogpkveVSEKKaNjUoxjD0UHnavpzf+H7n3ji7f/qZ//9/mHc2Rpde+DCyXH/N//WP/wvfvA7/sT3ffvJydHRtPVuRMAeF0mM0ZioTGaKnwMWy5ZTaApwGWySFafTlSnUvZj3IlqwgsyYIRiXIxtFLCMEx34xX1y77/L2mXByMtucDFBls7l4cnjj/KO1c45xU60Xc2SIBs4AABRRUY0QxAZ1LWamFJM577p+MRjXW2c2f+qf/Jv/73/49MOPvAEAnn3+6W/78OM/8Ke+f577G7u3x81G6rNnV2Bq3ldqceUafc86QWQBA2PnbTQYaDbiNGrGJiFJrAMhVOQnsjgaj/ncxbcJNYS1nhz3Xaem2/ddHV444zxbluACIhB6AOjjUgUC1YskaImZzRSRi0xzSsnMiLF036WYL6Nr27YZ1gdL0WXbp+OTOJ+3Jr27s3d0+7CN2VQBNNbsc0qVmmhyzklHiFjX9XzWFfzsPb/gqRzbqtgFBFXNtEqqiCjn7LzvU4LccvAqZFirqBGiJXJr8hyQaraVlJOYmfcODMHBaliECKrMnHNZtx5RkC2bqIKY68xMkQyNKCEkQvJACjH1SD5Ga8YbX9/9vWtb7uzFi597+hfPX7h6cHBn+0L95Ev/OeIzbN9yNP26GxyfH59/+Ny7fv/lL3bd9fsufUszrG8df/7ixSc+/QevmR6+cOPj73nXe28f7R0dPPPh9/3xc6N3vbD7Fap6gb6Yj2Y1BkZH5BIHcAPol6lfsIl3xs6hykoHjcRclh5QPA4ARqCkSFR8bTAUN0kqczxCQCBMwBigIRiMag4VK/bdchH7WRQv/TBx3eNykdCRIrZtlx0CM7MvsyQPiGI5S+vZmd31mikoeDNDUzMiZGAgU5Nolsko8NYyTXenMtq84ELVOGIcuKoNDl0AZp+ya6p6XNXDwIGJIvZOd4/3bry6Oz/xW+HccW9GubK5bwZcBUBK2mUBZ+aYUQ1klXAhYra8TJaUHBFiNhMgiElOTrpMfU44HA6LkFiSrKpi2YqjwNrAZa3SDl2fuihcfN6YQbQsoJWMxuvlBu/2Z9TI+ZSyKnjPxCgixB4w5xzNpKDjyw43U9M+VKOYsmOnEbe3zzx4/8Xl7KXKWdJErIDzk9mzm8G7FNo8FGbf5nFTb2yeQUbYOOPzDGRGPELWajBLR2TqtsZnHnoobZ2PfnPx0T/yx7/+hV+48bXfvPzod/mNK31/lOMSIMCpsUWZI8LKtarY1+k6b0crFQlk6dlpzh1zyCkbQF3XmuH2jdfe+OYLf++//5F/9bO/9Jnf/fKFC1cm21vNBv9P/+zXPveZr/3oj/+ZK9eu3rxxmz2pUOWDplzQ0QYmRSeuuEOukdGnYs5mRsV/bZX+mhAomJqxllFq4btyMemzFQ1YYrK6YsL6+Ojoh3/4j/xf/8ef8+Nrs+729HjZ0PGf/4EfPNw7iTnXzIpc0hNYU61KnUAMImrgEKEa0mIxv3jl8uHR8Y/9xH9383r35jd90/Hh9GR+/Yf/8h9+//vff/PWHTHf+IaAK+9V1VUhSZ81+SK7cI96ckFemgPp87DRYbWdun4wBu+Glj3YEtCRG+Vu2fc3zZYni1qDDUmXixmbtn0nkmrPJCYSu6zeU98vmbxBRGSBhJqAyAD7nMwSEYFQzoJMks0Uk5oUa0Sytp2PJxutNjdv3PJuES1PZ+7oqD2cxuPePDuCqIDB+UXsUu5rqQO7Hijn7BwnyboSW3OrwC0r58JSARutvtJVqW93Qz+qQXQQGBFTXg5qL5KD8ybcQyLPHpHWmK0ouU9pJX0E0ovmlXYQKDGRSjamANCmpE3DPnC/TExerQcgwtIKZGY1pAHWC7UuzdCdOcp066XfVk856ksv3xwEQnHzWX7LI/d//sXPHPDzVg329m/fvP2reSxqdPP6r9QDgJ5vvvrrO5PxLI1eOL71zO4vGtr2xsaTt5784lO/7yqragAFNiMQhEjIPjRZo2NwDZF3RmIElsRUHaAikAISOAdVjIssXXDmeMOEBYFQCS1KFkmInMUFDsygrOiQnIL1OSdLngirMB7X1TIeL/vnPD3swk6SqEqMY4Ol6UYWQXK+qlCdgRhj5SELrBQAqTSpC/4JGCGlxC6YmUg2E1Mh50SPnjvWPRuF4GqNGBW9sy5ahZaSaTbDPvVJ+lZ41NTD2hiqB6+e39p2t24c7rbHZ0YD6fdEecDWeCIkyS7nAjXJYBUAmhWjbYuGbVIwt/YaUWSeLxfH83a0ySla06Boiw6TioHLGQycFr36gvBXNYAyQlDjwD7niGpEjhlFJCdVhZJallzPraKSFQUCZu5ir2YhBJEEzEwOwEQTrp2JVpNYQ0YkSd6w8X7/YPmmN79xc+Dmeyk1ASF6B7O9+defuT0eN9ub4+0Nc8dJd7g6eybSftjIXR/rauwxKr2EuKUdjQY7lx9YHux7GMvNO589uv0bo+adXA1St3jh2d+4+thH0I3YgxgqABkBARqsKWmmqGvd3fLf0h4pElveVIKvVAQQmVQseVcZ8snBwtfVX/8bP/Sud33Tv/qff+X23o37L+888pb7X7nZ/shf/cm/+MMf/mPf/cH9vdm8nw2ohlMIIwJ7BwCScknV0MBWfngrdkVasVpOY+OaN1uyeUREJAE1zAaqYOgJkZn6OK/9xvH+4r3v/abv+PBzH/vVL7ztiQeGDX7vH/3TW6Nzx23fDDUtK+/MEJSLegwyIBujaRclVI0YivbzxcmDDz/yiU988R//5M+G5sy1+y+8+sqts+fp7/6tHzl/4dqrr+4675qKJYmJBQ7RcpLWBTY11AAAa8Dx6jcAAJPEAMPaMXAS3RiNmQY5RcJE1cSM2+WRC10SHLiJH4WXn3m2bdtu2cb4/6PrP8Nty676TniEOVfY4eRzbq5b99atKFVQQBKyEUFksE0wtl8w2BjTksHAg4HGbeyG1+7c7Xa7u+m2u20M2AZDg8FgLIIJAiQKoVKoUpUq18333JN3XGvNOccY/WHuc+rKz/vuD/XsqjrPCWuvNeYI//H7ByAc9IcxxtB1xKVzi9bgZLrX7/dXV9ejzNEGJtqGZKLOObOUVMg7EJCkIhkSImY2mc2KXq/q925sH5zedE0noxEcTbpZNlETYTxREXEGoHtmZhaJ7Dn31r0vxFLqEpMnA0PULHACOTmnITOZ79lsIiJLDdmwi5pSIUIlGqCqpZxSIKIhKxiyJ4pE5NgkqrqUVETREQMoEDrnTEOIgqqIZEoEFkLqcXFolhBIDQWcgmMIoGPUQBFo2KgwpEFdCpJAMu+dVp0dLK2U166/jnatHlRtSOa4V61O05F37Lx1SoVT0uEcjmJCx1D2hgrStNOXr326rLpYkCA45yQBqDDkLX2HjkXZDLGA/ipCbdJgN9c0NyMiRF+wU4kFVSKxS3uRm6rYcjBQxajjlMyUybGYJfRAJUFBOAUxwoLRTAFIAa0LWlLR4s3QdSinERJZj2CdfAllK8mMkLlmhyoAQIWrp213XIJlqtMCuZSNc44JuICI6FxQvLY7uTHfpP6QNJiRMDjXAnFnGPOuEyKYM1FpSdhZwYU538K54SZs2PbOaBbAo0cHkjXIgKXzlUMxF1TaGJUQFE1E2QJRFPBcqCUzMeB5G0ezto1QmaaUhr1+SskTp6S5u5qxwCJmyseWyCRis+mcqSQiVMzowZyzixw3Lt58aV71ZCJGx+za8QzJnCMLgujIFyl1MQgCl2WpKsSgqgSoVdUFIfIdyVETH33ygVbHWJY96LUxqXfcO/fIW89dPH9h99Yrf/IHv/7Ag+cne3vTvfLMxplqqV06NaJBdBbbCK6EolyfTTqzfl3octnEVd/eid52Vs6dblcujHYPr736dKuDzbXTa1sPmmU2Fp4YRuf8eRHZcUH70hPwE7ICaTJEFgveeTKXOvRuBXCmMdy5sfO2px555LFLP/1Tv/zxj3x24/Sp5dVy0Cv/xT/9vU/88We+5wf+4rnzp7dv7TBSwQURiSSQEy1p5pK+ae9liAQqmOsGAhMSw+PVGOU8FmBTw8y6ADM0Qp9SR+TY+aZtDXE06v7ojz926YHTf+9HPzg6HJvI3aMDcgVENk6iCoiSyQtkZCgApOaL2kzabr68vLy6eu6f/J8/86u/+rvnLzxYoH/1xc9+4fuf+La/8ldigNvbN+q6D1qbzJiAAUUjMhiYSjzeeIVjfyvLqt9MmusVRb8apDivelz3VxVYcFwUdbSqcMQy0iR1f607Cju3b0wPx8P+oEvt5spm1asPjsaM1XJ/kJi2794aDlb29/dW13ptkFkTupCQYpBkCASgSWKMSU3bQIBmmNd0k0ZVTQJHo8m5c+vJ/CTweJKOJt1MQ1JBAxUmJlAxYCBm77oYBn7JuaKNjSfy3nddV5YDMFIzl00T1XKetGiHHwtcASBhYmTLg1ii0KlXDiEokhoQFyoG6POMwLLBiyjnZr3DXuGkDepibvk455RjnoEhU9RUMBL40IEjlAimiRbRSbIIsijJzKqUj4lEMVRV1Rw2RpUi67jBcl5VPVUySlRW427W56KrYN6MvDOciRlzpRpAbR6KyidvTUjaKJh5r0AIRY2gsVNCRTYDIvYOzAtGQSAjBkIRgFpd6bFPvsX5WCFoUnHz+e3aXyiLNfVjkTa0o8K5oihUFLVg6hOTIShHc6WAx64iaCqHybDtBIi8c0pC5ld6xai5Pp9fdW4glGZRLS6hnC7ckMv1WYoKaGSEDjqoyCERoCWRzDEDFNUkSr4okiACm4WowsyH4/H1bojlgEKqqIhkSmzBA0djcWAICRSUlIgSQqepH0z7aRK6pbh0/9opns9uH2xjue41zDUedYCgDtSjcvY9YgEuENFSFMBWOagysWjmPtHBeDJtAlA1b6aicdBbNUGzFOYJRAkFLeT9OQMwWzyEMca2DYZVDOJd6TyFEGKMuZ2wuGuP12fwWDmDiJKMyDVNU1WVgTjnVIzIte08RiFg70qBjghMBNiHTmtgR4Vq61356IOXmtG+Iw4w5rKezyZVkVa2yt/96M+//MJn3/nEE+yKvZFQ0fX74dTZB2tfhWZS9Jd6/U2h5eDLgtZqOnVn9Gw3bWejy3s7BLK3e7AzXKqg9QTdxlZfeR/xipkpGiLzohdhBBretNRABEPKVi4mKfii6roOCQDNEZmJKqHvEJ1nHyOUhe3v7pV17/u+/y///ts//bP/6hdxWm5snqou+s++vP+Bb//vv/u7vumLvvJP7+/uNbO2dD6z7y0JY37yaFG5Q+6OKCJ6ZULSvDVhamCWPbUzzRQX4zpE5YWfOxRFEUJCqAzblfX+S6/eeumV3b/wX375eLyzszfCYlAM+iwS2+Qqr6ho5jLdLPMOECIpW9HEozMXNiej+AN/6+9fv3r40MNv293fPjwY/82/+Rfe876337xzF7Ao+0uiyXM0Je+8pWwdLI79ggaNCRGP16E0a0YADJmcc47rGA/7y8tEvRAiuo55icul8Z0bND3sbW1iMYzFDMvB1sXh4eHh6UvnAa0LYbgxiCHEtlNz/WGvi/Hyg490YRpjbAMy9RR9iJ0iMJomERFynBRQIiAn0QQZ0A3ki9Gk2VpbrXur27t7mnjeSaOR0Hv1RqK08EISMF+Vk2nTX1lG4szo9943XVwCyjoCMGOkpAkRUxKHlL2XaQEVWLgtGyGgMhemrYgAOhEk9CmlonALcyxVUBOV3AUFUUtSeNbQSUoAYKKEZogiQRS9x3xVu3lCskHVb9oQkjkCAjCESJBQfFk5sNYFiQTiIUmYzd73+Jfu3dz4zLVf+pL3fB2FC59+/dci3Xzk0lc/8/LH1rfKU3jp09vP9ns22pH3PfGenTvh+v6Lw+XAWONcupEIlYJQEjjCRmaMiWPfEbepFe53gj1XkTdBKxiSJDUBAgWHVKgz88lVRUUIkRyYK90wpDtBeFBdLPywld0oSHETqXBFMDCEgg05EmvtnG9JzQStJY6+1JBQnAPExAJoVVWBWlUOvGeDlFLqwkspLafQobsPqN/FNuGEnElXkWNE5LIwRwAGypxKtmnTga9QumkS8FAezMPzo77SOqaGvZ+qoLElAkZiB50ky6vRLKZRIvHyfFY74tUykUg3n/mhP3XfmYlgVQ+6Zto1qcCaCCJIpA66YAEKD4OlUiSAh0A466J5Ru0INYBNmm48txQ5RRQPGKWuSKyJoAHmCj6hC+jQavBNJX4iIuAL7drUHnbcRw4aBUyigB5LSlQWy7y5q6gKTACayWHkUgKMwv2aMUkyKou6JJqEyMxRxVBNQYUYHQI5MKFEZtoNV7do7VRf5oeIPbChxrYurPTl1Ve209z3qo3rN3cHvfLMqfXx9Kh1m9VWlQTqpfNtmsbR3nAlcloTv0Wn1s4sl3btldXWn7v0hXf3D6Ztk1SqpRK0BQjdfIaKqApmzCaSAIjICZApsimRRgxIhIIIlIAKqqUVRo+GSeD4Wc0hVkHNcwGGVQ9SSjeu7bz7vQ8+9ugP/8Q/+/mXnn9tfX19a2NF0vI/+l9+6SMf+9T3ft+3LQ2rvbsHhGVmW6UggEaeohqTR0uqHVNpUgAHUTQFJEDHOXYCqmLefM3+3BncqKrqGSQJM4O5tonnz69+9Gd+dW3o3v7kE5Nx6Pd6pqadITtXkKl4qBJ2QmoCBTvTBADoQML8/iv3PfP0Z/7Rf/vTZb31wOVH3nj5+fMXNv/6D37fhfvOX796vSxLJjYxNUKHDsG0USLCioAAApOAMYIBOkFSTYTABKqkil7bpZUNMXRFOSiW2AAhAtVardi4HR9dX1sdpKjN7O7kcNpMD+vKLZflbHS4c3i4tr6xv79/69adL/iCL/jIR5+uqh4z3927Mx6PVldXESm2cjC5vTIcOGYNKQtGJZmhEhVJGiRlgmYei2ogIlGnVHE97B1cb80EXUPRK1h0wiILT2YnDJKmzXve+Y5rN65iy9aJinnPbRvMIpgwsEJ+MpQBgSBZMkMyVFuUaKqY7YaTKpFCiz6ZFgDGKCSMhk6UChACRFRiS9YlTOooMFQOg/mSEADGbWuGloiNK5ZOY98ghMjosWMA0gkBDTXebVmIkZFVIxROhXrsZ9gEnJQ2MHE7R3d6vrq4drkZqcyfffDUE5985draW4ZDXpXuRle7B1fe8bb7H3utbmQ8efLyo739y9befu/Fr/qNZz50+tHNF1+/etTuSu9QUMFKsKLhqILMS0jJFyAQjchDK94ZORMDE0cKok6wdEUTQD1CYcRKRVkTDYhoHq91YV7ROTWZxlsMnqFEU9NgyRt4AEkaibxBT6Gn5IEJEUII6IgICUuEwhcc4kxEEMl7HvbPDIfc7x8y7kAaF6h9Lvs4MC5EW0uhB7xW0LL3fe96/Q4LIEyaEhAWHjqgV7fnMfRMQASTOPDlTLvrd6+/cWv7cAQtuk4LTT41BmnQtWuzbll6my2v7HabI1o/Mrc3VtD61NZ6145XNpeLFZnhqLN5wWmAvtKiYvNUWIdr/fVTq1ssmDpRpA7EEKLoZDwNKQGymM3n86Kq+j0CSwgxtTO0pNKRJZMWlVNKIslSVNX5LEpUPG6sZ2nHmw5BuFCH3zNWJZVFHdp1XZJUOp9zGWYWWeiCC3a5r8+UmeXGjNnWezKZ3HfhwrBfhi4BABYzADDphY7uv3LxcHQgafjUk9/YzCnEodlwbe3+0aHE2MVuzDqsfL8ZJ6CkcQ9CIlhe6S9vrHB/0J4+XWysl+fPbS4tDVZWNtbXTp8+eymmDsnYoWhkh4TGpAgJszgCKeujBDDT1TXvGBEQATtEMgMBE0Re9FJUDBKRec9VVW3fnBu23/+Df/Uv/KWvGY8n49G0HtQXL194/tnZX//2H/vYnzx74dIlQ1CcJ0u+rIjzYgSKdoTO0TCpQwqm3iAuwLzmABXJCMssnrGFiuJYWEmULCF4M4mx6/f7TdP9we9/7D3vfXJlZaXrupOZrYIkjYYqMGEqwQrvvVo0ikE6ot6Zi+d/+v/+lX/w9358/dRGtVxeffXFL/2Sz/vbf/+DSytLN27erHu9vLlKRJUvNCZgAnJEzhAyQ1uyf2/mYppgZuqpkQECuNIVJSad9Hu+HvSadiIWynpIWk4md73DXr08Gbe7Owfj8RGhu35zMwnBNAABAABJREFUPG/4lde3VepnnnkpxhJh+Jnnrk6m6cqDj+/tNzdv7IHVL3722tU3tl9+8dqrr1ybz6IpqSAYZ7F5Stp1EaFQ8TEgchU6abtkVqaQzp3bHB9OnXMpQsGFI0QRJp/vbVSo63oymTz73KfUhJkROJtaA0BKiYhO5qW4AA6+ySE4lpMdD67zLoURIoYQUuhEImCGrCZEEGM1EnMIBULB4AkYE7lBHR0GUQWLMTIgmjnAlLwH1wfnE2tKiTuto1bmGNkRAeixKRuiAUDojKCPNExWCNrO4Y4rvYJr1a5tf6ZYX51xRURxTJjw4vDc46ff/eE/+b3Ns+qkt333uXUfhnr+2bsve4frS5sci7qIHs0aKKlljmzEpqiNw7YoAnEyzFlgIowADtkpJXBN1FlQAWdCKmBNiM4IwRDiqf6Ak83m83k1XCJou263KFbKslAVi41BJ1QY9Fl6AC4BAURCZbYQoqo6AjFiKsFpSm0Sj1wRM5uAU4NpjNdR7zCVZFUKpRa9GjcIoZWD2RSJeuiIxFxRlIhBzGEZYnNr0s54DbBqJHrHB0fjGMBV9XD5Qhfitf07vWlvfWUVQYFs3s6MVsW5cTcCWEYuGwOXmiGkqsF+1SuLw3mjy/2Vw73xzd1DY0bwfe8HJS0lVbPDeRchHnQBqPLgQWMDMp2Hpk2ysKHBNnZJrXBljMKeJCBYicqYCNTUHFpyjAnMgGdzWTjDkWUYbDYaBKPsXAimOcR74txIUFUwV/hqPhMDW+x9GDBzkpDRd2XhCLP5kBEYGCiCKvjKNd3ksYevGCQ1YlCJPaZg0CCV+7t6/sLb/tSffte8Obxw/58hqFdXeoizWTsBrBxFrqbc63s6HVIi3yVRx8N65XQ7ujsouWmOSDE0SQwVUcQQlbDQhfuWMyUDIyzUEoFI9kM1QkCBhTIRyTBz9E2P3emMCBAqgAQYkQUW+AqPyr2+gPLu7t4Xfdm73vLkI//in/0/L7/06umzp9dPu3Z25n/4r37lox95/ru+9y8S9Xfu7IKvCy9qCRGIOKU2Jx2yULEUAAAUDRJCmbdZCRY6bjUEQEMDhOxJYULIqCbLyyufff71G9en3/U9nxdTYuc0W+cBgYFjVk1My12cMqOoEkDb8dlz9x9Obv3oj/wPn/ns9qOPvfdof3t38upf+8A3fvH73n377gG4kgsPJ5JtJI2pYCeWFtvtlgAAkDHjtfOqVUZbYh4qGAOWdUUMonFpad0SCEhZ9YkG0Bm7Wb9fN3MJndZ1P4VmMp9vnD796tWrS8urZV2dPX9+7/Aogq5ubbRJfud3f//8+fuYeXv79gNXHrxx49bF+6+kay8XRQ/MAaGYxQTk2SGpgSQUIYXMkE0KFhLM5+3FC1uSQEQIvak5gpRMxI6lAQqgmSdT11XBfSJnlnINF6OUpUcJC39PzDr6N4XFOaZnVfG9Kklmjl2wqhSRbLYCqICqLIKkeTQrQiAAHcjcO0ZTM3NcNF2MBq2JY9CCHIgkddEcOHKRATEQSDJUUyRa2E4woaI614m4ro2zua6e89Fat5zuvnzrgQcvn37rN1zfvZPilFQePv/2Oa5zZyO7dt+Ft7hZWXt3Y3JrjQrlo9WNJ159rdk/uvXkk09++OO32VHpy66bqEOPRoRM6jyw5yYmM0PiUr0goI9dSA4Kpko5qUYAIAPvsfLOOV/OeV9D0zVbvXqTylHbHRblOXaQIxIYGrAmSBaRZizOXE+pQCjAUsFkCCFELgpQRSYAco6TtqwOkaMFJHMsddkwEUAFTM61nAqDIqUNwfOCdYQuF10Jp4O6l6IAu1Z4t0Ethtq1/WLQpSYazaKNjw569WBzbWljeXhw2NxNu5tbK0BoKm27X3Ko6iLOsayLlECt6JztjqcrUqysrIx2jgq/vrm10lkitxxVJvPDu0ezJQcrSxt9NNFOFBEoBgGUkGQ67zoxAxdSUIOQopoVTmIMjM5Ao0YzZ6pIJu2CeaSaRGg2FwCXiUhwz3z/ZIRKlHmZRkQZAwnAzEzEMbZ+QROTrCZO+Z8pEPcQkXMQ0ESEqgLmc6l65cp9s+lBUVSGyhCJWJK6AvySbp2unnnmjx986DHy1h8m45agSBJFkahyhF3bVKsVpwjaspeuC2XZ57JPk73lflWVw9eu364GffZuPJ5VJdXVQI4JiGbmkaezw8FgEENuv+MxEWTxJ4vh8QOZIepmpkRkMGdlsEoATBFIAFokdVDEEDz53e2dql//vf/vd//iL/zGr/67363r/vLy4MLlzaf/4Mazz/533/cDf+497/q861evzduiqvoiYkqmhFghCmAE4/y0gzkAMJDcZDDLvWxYWCUhAaiYOkRkCAlSCqurqx/6Dz+1tdV729sePjo8rKsipYTEusDtAQIGiYUfEKcYu2R4+cHTT//xn/yv//jfVLZy38Urr752/cxm/Q/+6+85df7sjVtT570mqYoiI02IKHTBew8mgIpYoEGG2hn4PD5U5HvG76iW1SPmuOraOKidx6prlcj5YqCp6ppD0G7QXw0tc9l1885X5aC/POvCuQtnuhiixMFyNVg+Y2ZtO11bX9rYfKsZxhgfXXsoduHBK5dijI8++ihomrdzyts/wJK0i4GZU8xOv1EkKAgwAth83q6fGg5XltswrcoiSvQLVywjA9EEQM45IgeikqAsHKNLkpxDImhjKHulgpi9SXMDo3zaArxZZp08UG9OXEXAKKmJGHNmgQkYw2IwrTGqCpM6MHbElP1wiWIUJVNDRWNMSTSqlsiolBowA1IHJlnngZYnH+gcAUBMhogOybMznM1T++nX/nBeHD177TdXl69cu/va2hZ84o1fW6kfn3U7nzh6FiTdv/z46uDyR278SVO9PmtsNp+fsteG51ZG7e03br9A5TyJgY2LXpHYgc49ekbHDEQMAoqGRpqiCBo7Rx7EIEUy65X9w6414jZo4cyhQ8Ze0IPRtDE501taQeqFNFYYKHagdcGVYUhJTQix1eSJ2JCYKtAEAIWjeduYIyJTyGaW3rSV1DkagBMQB+AKBoCkkpgrcnUpFnXSsiTrCZSkFbCA67ytzxqpGZncbjNrqZqMj8bTkeF8ZTD0Vq/1eVjT4Wi0s90MBoOlpaXp7OjOje2NjS1PVNZomripgOcWG8Ta0E1CYE3EsNqr+4P55Gh/dXNtc7lu23awtsTcA3DXb90UTUEF2SVNKSUDNLAmaBtMlNQA0Ilo1zX9/lJV+tAQAMSQ1DiZgjGQgZGRmqGpNFHHs5RTjJP78qTAPLk5F+j743/P+YgKhBC89whaeK+CACAaBUwRsgUwEVnudpgAAiN1XegN6nNnN7vmNeZesoCAJpp7IK+99jozX7jv/Pb29fH4aGN9OTbW71UXLyzv7t6pXf/MmTPDHnXt8/3hkqbIRXDA8/GYQZAghWZ/7+jOjb1JuLmydubG1dtm3ZNPPXI4OprP5+fPn5/P5865Fz7z3Lve9a6y8i7JIogisyYEUABVYCaAzEfLfGAAILbCQAwimhllSboDIDXyBajKYNBvY7xx48bXfcNXPPm2x37qX/ybm1fvbm6euu/KcDrWH/3hn/maP/v893zPd0yb8e7dcV0ODGNRQooNCANk20I8FltQrqnv7YYhLoQ9mtHvRkk7hLLu297e4Uf+4ON/5s99SVUVlo6A0CEhkqCZWUrJewaMotq1fjBc2Thd/ZN/8tO/9u8+dvmBx9s2XH/t2ue/9/J3/o1vmba4c32/X7vAgwq6FBPyghbH3plli4lscgnHrkegRmag2TQcNDOKDQhAPREQpxSqymVHEgVBLi1hiLuOXYx+NDkaz/bUHCIeTWddl5KIqhqhSBAxdoWIIXUhGAIwc+jyymVQgQSYJGTTMolRERGByLVJAAqwYJJMHTGn0BFXoY2b6/XG1tr1mxPvjT0liUSOEUOISGhpsZPA7FOnUGTHq4WiMaYFjwRQ8/7RvZnQf7IUYgvVX2a7O1RDBRXQvLqMYA6yu30+t2NSATNCRaCCoimaSjJL6h2DUSFSignUMUmAhGopglQoLhLVzAgJDZJaEsWCCQiRa5WkEjExJlCBcZhCvzyaN4fd80WfUqzuTsa3D/+EC4scT9XL6vuf2P6jG/gcN5XTNpV2Y3Qr2q0wYgKHRSDvAIpGAmFyJQCBiYiJqBMRVBQyI1OmKIjCGmLBFmOcGmrsSAoUBAWKXTlYWibC1Nq83T8avSGt9miToBPtUgIB5EJ8Gb0TRjWba5qwdAwerIxqQOi5SCnD8c37EpEZOUmIqUMkIDZ1YgWAqc1TCijLjU9Ur9TD5Ivnknw8Nq/H5m7XHFGaOBAzPpqnseje+PDmzZuzaeom84O9/dFkPG7nQry1dXpzdTVNR0fjcb+3XJfDo7sjL1QzeGgcNMqpiyoSgVB42GJxMJ/vjyLWW67sdVPt+2VibZpZbJVaefjs2Y1eBdI1UdvMfmZUwVnThIiInHtzphpj9FwUXJuSGYag5CsDBiuiUIZ/qDA7H1XbLjlkoyxwsHuTjvxeF+xfeJOLC5pX+WOMztExTcxnIXB2M2BmkWTH6hoBA6DSu66Vra2NpaHXJKIxL4uLCJj31JdABdem6fr119dW1l964c5wsMbOrl3fVq1cXewd7Bwc7Xft0d7O3aLoGWqIo6pXBXFFQb6QXt87b5cunz8a7S4tLT3++ONXr27v7Ux3tsevvHz7ox/59P7u7Pr1vb3dGVIGkORExwgWLqKEDowQGIzMsq0LA5CAZMB6HpcdP8PKZGQOjRC0dL7k+s7tm5unir/7o9/3FV/97u27t+7ebnv98srDl3/rQ6//tW/7kddevfvAg+cFD0OcAagvA/sZsRp0ZgjmARUwZTU8gJysBZ0Q2BGRAFNKzlFKaWVl7RPPPDebyxd84dvn8zZ3yfKHkk/ioigyBVeSnTrTM2p/8Pv/5w/9+88+9sg75uPR3u4bf+WD7/+u7/uWw73xbHTkSxZDsBbNnCM0yDWZcy5T5hmQQAE1LzAjIiM6LOyYfpN3syz3cxg0dYjRMRryZDYiF0HSfHa3aw+QetN5bKVTEGRKgtlrPm9H59E3GGkyNEpRiZwoaW7ZKqYIImYgiKgIISVFMLOm7fLvTKYEws6IHJhDJMyrnqznzq13TUDMaQcG1ZA6sZSvXlTxVemcExEDYe+yhpWZs5WaESx2uY/THvvc17150kk+BGoSNfMFFuczOrVIIIBCLGqBODtEJG+44MkBipgcnyIODRGDYhO0i6oCpCwpSVQByziL3AIlBgFL0hIhkUvRgzEjFV4JuqJXUSpDhxECIpaVEVtf3MHR5Nnrv/364TPmGuUZ1kCI5ouK6/6QCF0JvscE0WrPPXaYVYUs5pPzWlaucI7Mqa+FEX0CF3t9Z5jUUSPKvmo7Jawc166qymbW1nUf0jR1jQiF8NrQTg0HZ4OkEFoRKUtfuopc6lJAFNApiklgoyJao5DYuZSSCAADMpASchW0jd2Uedk5BB/MhKE0dSJRbZ9kkFLroRy6suwfHOlVsxqJ5i0qDBq8sNcub0/C7sHMtIKE6jWlOZuGTpoZzMuiX9dLp5enYxkdHi4Nl7GP+4d79923VXBKYR9sSbRUS+jElUWyfpfavXmnyYbL/Wk7rVR6VT2bh8gqJZBif1CEiYSOPBZGoUlxHGTeJoUS0FSDxMSeUoyeC7IGKIomlRYJAROgV4jJRA3VHBG3IYSkvsCUIhtni8FFfH+TY2KEJ4JIzY0BRAMgEanrEsBCTFVZGUJUQTWHREQqwEhggkQOKar5gtpGLl2+CNaBmrAQkJiSB9XJrG0ffPjKC8+9HIN74i3vuXPzxue98y1RJqsr64xu0Eektqh709RpE1fX1iKQ45q5A0zVYJ18OZ3H27feuPLAQy21ly7dd7A7HfT4woXNw8PD8xc267p/+fJZRPvmb/6GnZ1tUhRAQHVmZKqogOwA8bi+NgARQDpuNeQsiwTRCECVzBgMJSVfICYE9WDqy+jBNVObzw6+4c9/4zve+d5//s9+8uadg62NlYsPLs2O7Ef+9v/9NX/2ib/+nV/fzW1/ty3KwqBDTAgFUjRDSwUiIgugWla3LIiVBLagtyAgFb7rOjPs1cu/9Rt/+PDDZ648dGF0eITERKBRIQvoibKFL1rv3H3LTz/9qR//336R3dKVBy/dvPnqyqD/D/6rD26cv3Dj+j6n1BuWSR0hMwcVBjNXFDFGRIxd6PXq0M6BXPYIBgQwh6hkmVQBYELH9Z6YMSAjhhCQE+HKdNocjPfuWz89H8/ubl/rDcS0OJzshzgD8JBsOk+qSVXRYR4JihggtSE4V4hgiqJq5igZHs8MSYOaQaZwQcZ1OXKOJrvbRC7huCxLR6dDNF+QIULU0HYPXjn7279lTBhiBHZigAbOFTFGAymrsmnTZDpbX1pJlrz30TlRyY2OhQ0ZCiAd64M/pwmT35y03TOEAADym5Sn4sAAaoRgXozUkA1NGaFENY2u5gUoAhGDChgFkR7SnDqmysxMOEVhBUJzxilmHB0TZTQdLFyohQE5JQ6KVCC3qq2u1EvTDst6NKwems4n4vY7jUzoXKGlEXXU+jU6u9/uBWx7rggRJrFLxOWyC6mJMGjLQD6UaQksCSKBIwZAj8et3W42r2qnSWOToho66vXKVtCwqYwwCRbkJPmiKDSsFkVRUIpB1eaT0fXYSt3fKqtaUUPqgJQZ2RViCtCZcpSKywIIkwSHTERdFI9g0BE5FVcyzuP+bJ76vXVmp5iYyblSJam1DhxSYeiimdFqr1965wqoArQS+I1d3R0V+/tBdQCMSI1K6VwBWMxjo9ZhorZbrop6vY/BVwejw8HKgLm/u9ddPrOutN+mLqhE7KWuRUiePdZbo8ktmllR9vvOtfPxoH+KyVmMnuvkFAqMrFM0QCBTNNNGkpjjommnKSV2yEix7daHawzssJTUgViMAkAgeXM6EhWaSEW6GFTVOScp5NYhgJCBHgvaIVtILBrxix08ZnLEKaWkyTkixykkrBZUPAQlxNyodcQhRAYjcmTAYE3TnTlzWlMAAO9djNE5NqDCL6mlmJq3v+uxGKNqevjR8yJWwaYqVAMhXxW8AZDqqvLE09GdquccbkKHs9mer4a+GhSlP3t2y/d6h13LvlhbvZC6o1P95a1Ta5IWzlughibnz27FLkVTYmADAhRCQHPGUQIAMDMRgizsrVWVKFkmxECxEElQQkQmVoFMcieAmJwhVJ7mOt85vLl2avBj/80P/vzP/8pv/+Yzq/3N/jBdGG7++n/49DMfe+UHfvCb3/LWK9eu7jBVxGoWEUozAMzRnE3NoDNgWkSNN62aEDHEjqnoDfzd7f1nPv7c/+ebv6aq/F6wokJDY+9MERC9L0aTo6Wlwdb6xk//5C/94i/85sXLDzvff+3VV9/9zgc++IFvmkvcv3NYFxWWEJI6RLSYmD2TmcWUsgMGM8am9c6psWAyBARvefwOREpICw+/RXjPtwqiIktKhe/fvT3j0hO6o1EzmxwOhhuj8WwyOVQSMC8SVZwhmSJynknmLXFA8KGTPDhRU5Wk2abO0FQYC5FgaAikpuwIQEejw8Pda4h4MLm6tLJ2+fKmKqpFVQKAGOO5s5slVWRKRNlsB7LEXLSoypDMTJyj7E/LzM45jSEHazNzSPF4JZUIF4r+z51XLdaz3xQpqENSlZRiVEFvACASnQfLFBwjg0CQ0JJJLIcDKn3QJppl61tSKwwV+hATziM7j6Ujl6zXJdcSDRj9SUm5qBWIwUQIhdtOE3CaHcLX/qlv2b9dffSVf/6Wt7xz0z3o11Y+efVTU3zRKc/iKLLWtubY3nHpyz/yJ78r1d04i2CygtW487GJS75qmrkSu7JvzTzWCMaqigTEKbSpcFh4G9DpMDo0c+9++P1xOryzc+PmznNuZSZH8tYH35Eal6I6wCiGyiLMKr7oF6K1pKZtb2mcD1YvaNWPCR3VDgSwRaxVlGhMljQMXQmK1iVBFUZi8QCUULFQDVgVa/P2YDabriyvqyIwoIukYmaYAHpJJTGSc2DJhzlYr3PiFZe3Z+X1gylAo1TvT9NKJYOCABGp50khUQo256MmdLMWN5YGm8u9Zt5BUXcUtyf7504NtGldhNl8RiYCJVbISAO3MZI7s4P4xOlNonDYHQ18MQkdQH+IFFqQ1jmLSRNC0SWYaauOoWkBMIBjVbORmZROEqagICCdFAwuQJCylUZJqUuMNqs8TycJqUghmVlZ+ZxVKJCpZZYeIiIwGhIYIHkuYoiuLtgX0zgj8IyFaSyrnoEDUNJg6p2jvDCfkjpXAVpS9UhzU3bF+fMb8zQGdqwIykBGSiZG6AB4PJpD5pZDnr4mSAozT8Do1DO1ZjPRGCOVR+srCBhmh7NeLWXf171etdKMxiMPPrbQSkBXNrOWqGCgLjbE3owQDSEEIMJYuEpCFFMEL+qE0BHlsYGKMXtbrHsBgkcEy0AoQyISoCQJ7aQwB0QsPImpaupBDY6n43lo59/6LV/39icf++mf/pX9kfX7w3P3nz86HP/AD/3413/9e7/92//8dNqMDqZlkYwcMisEx5zx4GXZs6DHzyoI5A17EJESqlmU1cHyh/7Db2ikL3rfk81U0BUoBsaKEQkIYTZpLp67OO+2//O/+9989tnbb33kXaOj3f3JtW/7tq/+sq/54hu3bqKB8wQcELhgNkMDRyKGCRELRMtsFmRzlAAMDMwtWnMAAEqgSMaQoZcGQIzIxExqiKBW9gc3d49CbCtX3rp2NJk3gf3+PiTZC6KoZKohmpgSeQ/Qth2SC7LAH6UUmZGQM5BAJddVhmaEGCkSExl1Ye5LSjLfvXMnNnNCE5gi+F59BhAVgxoRIgGnaL2VsloaNNGQFVEgKWdRF7OZaYy9wk+blKEUQa0TUDEDIW8K4s0lVBHx3s/azshM3mQx5ciegcBghABM2VRLkyAkj0mlYjIqEyJEtqKqyBQpOVAzdAmMMIk2KAW5SRvK0iWPvgPpGalpUSKxsUO1og1dVVVH88RDFAmATKIURYuOLKIfEtUUp0Ne7mbtlQuXYS6zjgJV0KTOmlD2qgqubH11aZt3Dp59aOtKR/76zZeaWVv71afu/+Krb9xqqxtP3PcVH//47529uFm7szevv/LAlfuSekScxvCp559+7C0PSOzv7N64/7HL81GaNftf8ORX/PKHfyvwq0XDdQe9U2cA3SS9sbJSLeH6LMLFUxdJtUKrmGoiB8Zq3tFyWW4U1Vp0o93Dl+N03KcyBR9SzVR7VkZSJVU1i5JAlJN2ApayRW0yE82rOs4551wI7byZOq4lOTJf+goAmjRu28jMAALIXKD3XewCl/DKfrp6EI2tQ7o7mh6NmmvbKSY16LyPm1trvaW66jtfoMk8dLPt3b2DWVMMhrl8nMzC/pEt9Zc807DnCTvC1LXzruvqfo/QN1376t1DZm9NM2utdGVs5vOQZjE2SZIYkEN2MUjbhNhEXxSpCw4sSgLEZjQjxwu6S0xqklulYJwJWQ7JDLsudu3ChzPvKCEiGB0XvHg8CxK1tOhU4CIZUU0ickJVO1b1ogoZdEjJICAJkRp0agEpGQgjlKVfW1tJKSJZkkgOURwaIghiBAxIwugYe/dMpTCpxiAxaVJLKZPzbHd379bt2wdH47nI7mi8vXN3Ng2xNRJ0IMOKSjQIKVVVhylQOHN289Tp/uYWrazb+qn6wpnBxvJ6N21RtKTKgTlsWUSBFm13RFUFVDMhghDCPXZUKpIAzDl30oI/kUnkSwdoSKnuE5PdvH7j8v0Xf+zv/9A7Pu/i7t718VG3vLx8/8UHf/kXnv2uD/793d3t8/edE61imirMvHOgVhau8HWIkCwZGTpSNBFJGpMKMpgzJMee/+NvfeTylVNXHnpkPB77gjokduZVKUHXyeWHLnz6+U9+7wf/0c03Zk89/rbX3ngJXPNDf+evve9L3nn1jZsOS+cKIgJdFF65aU4EmD/9vE5LpqgGYiAGHQAheCQBTGCVQYnZPx3IiBVBMmeVPTkuHKcIbTtTsOmkmzXzrutSsiaO5900NygWU2sNJvMutYYpSauaUkqqhkgpYtdCVQ4ZPCICGjMaJsPkLEaIY4nV8poJ33z5VZ3sV9yo7fR57b61t+ps/OKzvzk5emPQq8SUPXWhWVnqLy/3Q9cwAVr+lP/TmVP+QLuuy89RiuqcQ0SJKb/JX+aQ8Hiyeu9wFY41kZrJlLro1SRTBYumkUQIkAsgUCBFTNAZCqI31AwjQgBW9ECl81ESOOooQonAkFLqumjBvHppySM5Xjg4EgIROcfMXBBHm7sibvSqXnAPX3jCtdWTZx4chotN02xuPYhBz7rlC+XGzu1PLK9s3TzY+72P/Zsrly9Q7DtXdaE5t3bhia2v+dUP/Vu/vLe6/tikDWcuXdmb6J3RtTuja9KMvujtX7XuLwyxfuziQ/GwKaBbropr195ouzsK4/6KaTkfTfcff+gJO+yfP/fYrEmuoC6NnagiCrEymzlEREAH7ApfgwwEb3fT171UNBgmaCgNPM6IKMVcxHWaHGJhOEtZXYqJDLJXNBgCELlS2ulsNvauJPKAxk6JXJKY2pjIMXM0RERHCOqmE3xtpxxJbZT257I/ShSxE9wfxa3KhzAtfVWWvqh8HX0IoZl2QWX3YL8JcXN1XZN2CXeO2mGJhXMqswKlTWBQJrN5oF69MpvNboznq3VZCu93Ya3oUwq7oxGCN+dUzMRC6to2MVCPy1nbOqYudglMAHtVf319XWMomGYS0QAMVEEFzBhRM4BelafzYILoUUSSMZ6YMp+sYEBe0TQBWRh/oiEROU4zOZ4pLVQBIkHV1DxxQVgAJQBUMYfkC06WFKw/qJdXem2z74lUzDGpmEFWWGhGlCAwZQLzse5AxLoYc+RkJpHI5CSmo/FoNEEAUIQpjmAGSRw47xxM58FV/dgxztKZcyvjye5v/Nav3by2s3Nnp52HfrVy9v7lJ558+Kl3vLNtYhfmqKQBmduo/kShnFJiRENgsKIocrUNmVWwcGYARgIAyJ16AwPKntaGCZEYPQK7yo2P9o3tr/21b3jq7Y/+1E/80sFO3Dq1funK5vgoff/f/D+/6Zs//1u/7euPDqrDw8NBHzRhSIGcQyh8WaSUTCMRFZU3sxgjketiu7S8cevW7ZdeuPPBD36daEwSCleWTCIupK5f47lTq//yp37+//m5p0+dP90ves89+9nP+/xLf/0D34jgtm8fVZUnQMo0x3xsLwQ7aiAITs0AgOnNbgMiihBgAkQwB3a81wZsIIZCREkWIkFAFYGCGYnUEmHtXBXjpCidWhFlWpaFGbdNQPAEnOk3rXQIHKMyOwAjQiBQiN75JI1oQEQwU2AVBqAUW18Wp4a9yeHO7eufrcoWQIXK4eASM8+aUQs0XHtosHrfpJPE0UOZulSWfnNtuL19fTDoqwGi5E09A0Vyi9M6QyIkeXbMDpQ9FwCzGCUVoqoqC5UBHr9OLBJPgvu9XRrvs31CEkMxRSBFEMwlrhGQWmsgaoWqUMm9Xm/WSMm+cEWWm4lBiUwWEMTIkElMsazMs0V1RI4woWkW2qMwYmiSK6rY1levHz1wiX774798YeW+M2630VvF8oMff+PXStdfHazdbUfK/Vu3X3j7W76iqNd29kfoJvdd2qqKVYnzvYMX3v++r3rp1WfE7oyn+weT/VOnzx7sNd71Gx4dtFerYv2NW7fWNgsRkthWNUIly6vLO+Or0zDbPbRT5zaORlffcumxwpZKREmdR+eUEmhSyxMJVkNVQASjynm35Px8vH00e6HHZ1nXjSM4JgZAU1DDqMk7VxL6TkJGNpUum52AaFaGFt5zSO14Olpe2sx+KUxV4at5M5mPdLC8JmjemLD2nl7ZXtqb9j32xqOdyb6l1hN2AezuLm1u9olAgoL3BubYFWXpcTDrxlHa8fhgPp2fO32/977r2lt34+mNPhN6B10XPDoxbtrZcm+lrAbT8fzm/vziUgUxjSbdUu0gUZKUNC+JOFHpJAmgKSKQWMiW1ElwHkLJrBbUYtc1KlFlwa8HoxijEINpTGbAzrGl5JwDJFHN6B8AFFmMgIDwpJ8IQJDvRkQROzZGwNyklmimDBCcLzPOLA/3EIWIGbjt4srKelnQbCrsHBHkPaOMmSRzlr2qUbNT1EIYAmYGISmYAiVvnNn/lqdqYEDEyILBEZt07FjNqrpo284XfGrD/dZv/fvf/tAfdvNaYm+wvMz98uru7hs3ut/4j8/ff+UPHn/8HbdvzDXZu97z4ONPXoKu+ZzcTRHJjJiAcgqGxxMFANHjVP34eV6cCnlugeo1AbMDCv1e0cW0fXvnLY8++N/+d3/rJ3/i5z/+xy+vLG8sLfcGw/v+7S98/OMfe+Fv/cB3XH7g/utv3PHEdd1rY0A3Tcnn1V9VzT+K2ZsYou8P3K996E9A6L2f//bZeFKWbGYOikl7ePr06STNj/7IP37203cuP3Bp3MD1G6//1f/sS77ky//U7t0mxVTVhXPOEqNpdo8xREJQU0Alw3xe4cJzw2BBr1QFh5jtIhkRzBIAmvqMh8pDTlArHHnvmHRYVEJqVFbFegpRrOxVq03TpLSsqmLqnAudiBKAFwFgNTX2jpFUJcbIzgMWYq0ZF0WdIhgKmKITkXa4siazdvvVF2bN9RD20S1X9TKhxcnk7uhub3nzvkvvIBpE6yC2fSqjCTOn0J06vfbcc0BGiIwmgBkKrwSLmjXvOBBijJGIjCiEQMDM7LgAC2YLVgfpwtoBkQDk3nI23xgLeoc5M5OkC6IHcM7ldTHUydYRgqAmhpI4fzfipCJK+bgBIGYgVlc6qslA/MC41jCLlSRCZFQgAlDnyBEjo0dBSlL3pjSnJX+tuf7StevlFr2486xjL3N4bRIL7q3WGyJj0Z2r28/shs/0ej1r65Wl4Xw+n6SdU/r4FKd/+Owbpzbv29vbv3vtZXKVNK13vXB0uNI/Pzh75ubh65q8chOn7elwSAODxraPbiyfvv+ZVz7isHng9KMv3HiuLKiZHR2EgWPyop2pVyM1b1ogZd/H6MFAl+qlYh7fGE1uD2LNPQhWOTahqKqgrVkwYUQSONGHOTIA1CRiqA5LdhWptF1ThZn33pI5VyCqI4zzcds5ggEgOq7GXXptt4oBWo7bczmKqIaFYIU6jzqZho1hBWoITi0SMagUVQ9QCAuP03GTrt58/fyZsxX7SXB42Jxeqw0ah5ZiUFJXlG2Thv3l8Xh8d5RqdEsV7E5mvljqFeW07VQFFJAhSEqiYBRBQFHA1IHMoviyVR3UFREaSNs29zLZDdSXZTvvCLGJOpsHor5DbKL4io+zUTu5L4koiRKA85wBtNlHTZLFKI48Ut5lJWKwYHnvhohFJCu1mRnNUlJybj6bX7605jwQIhkDqJh5XlRScMxaAgAzQXR0z+MBgFEFA7qeM+AU1TEVjr33huS9LzwhWq7liYqu65yNy8r/i3/+f/3uh187c/7x3qCezSb1Rn82m124cGEy3j7fe/uprQebaXXlocFjj53d2PRdNwNMBpzVPkRkhghkCghqi2Bnls2cCInIsgDtWNhwXHwAgzfAquaumbODFCMa94re0cGB4/7f/N7v+OhHnv65f/3bh0e6vFxevH/r6Gj8fX/zf/zmb/3qv/SXvvxw92h0dFT1K5PSzADITImyOEe8c4rqHCLoh3/z4089dfnc+c27tw6RwFDmafzglSsf//jH/vd//AtN6D/y1OOvv/ziylL9Y//gO86eP3f75iFY4bw65yQqE+HCPgTIFmpLymu6C1he9o2T7OCuAkgGVgIALpJWb2ZAHRgwYDJFVLDU7w0GfTfoV6dXT03DUVUPq2pj+87rK8sXCIYHR3csArA7PBqHLjKzmEQxRLJYikaEZBSQEiMasBky1knSvOmKwmlKiGzilwarhwd333jtk4xzhMR+wIgYpvPJLBS8deHR/tJF4N48TgvPDqvUoTpF4JT07LkNW4h+yMxOhtVEhCSZtCymKSWMkTyrkCb1xAudZW7QIQCA4LFc+Fg2c29/hhbcAjkuQyUL29kA1RCVzAEkRwACaMCYTLHnSwItq2qm09CFXl1oMhCNlZR98ENCp86bQ6udlmiQKucJOyQiZTJTYlOwVjrnNWlIEiAliN6bF3VGbChBG0dUsRed3jqa9Jbgj1/+lZkmHPDMBHkyn+4b97V2b+w8xwzK5QtvPFP32DNaSmrtdD7zhd09vH5r51bd8ylCglnh61vj1xLO0blXbrwi+mLdXwpqz7zxEev5OBaH7ububbfoAxIhKjIZABLk+R7p0Ei18BU/YuFm170OdLrsnyLNgtkEZoym1qFVDJhMohhE9UxIJiIKCugAHULpGJumQUQztKSGrXOlFJPZfL+HFTi2mu/swu1pEnKH03a/5c6SSTC1SiHI5GjMa0PfhtGg3mBGAkOkYLEua+3cyrCmYjyez16/+up9py6Uw81xo8U49avCMIvATAGi0dLyYFAWd2d4dxIqTyawO5qs9WoRy7dUEJ13bTABdMDUzjpwFGICY+frpm1Pb6wm08JXluGixmYipgIm5suyNNVmPA2SHFmIERgXW9SwELbnbDTz6gyzkIZM3/SaEBFPBQCoWsZuqCYAVSHHFULKbkJmwICqVlQuBltfX2cEs7xKBp4ZjAEESRQTggE5NEL0cI+s+6ShnUxDl6rSA4H3vlcWS0tL3vtevw+0AqrAEcCSaNccVeXqz//MT+8dxEcefeLwQM0mK32oMA03N+7s7JK4lRX/xrWn77907pv+0l++dfNa2zCdNFiOH0vVN3cOc+ENACAKQGCoujA11cVi0/HhhGCJgKNZ64qE5sjVZpY01H6QJNy8duudn/e2Rx555J//X7/44gs3NreWloarS8P1n/2Z33n6o5/+2//Ft933wOaNawdl1VeNJoZAtFj0NzCKIQ6Wq9dfvfv6Kzf/1g//BQNFrkKUqo/nzp35l//qZ37u5z5y5szl/pr/5Cee+/z3PPSBD3xjSPHO7b2qqlzpU5cAhDiYEEL23stLW5RjHBllDD4CG2QgV4bVgJmzxQw51ysGIHlUw8xusdYApeeCcW2p57xngaq3riF5r1XRn08CWudKP2tmIuJcZWJE4iCk1CFTxSWYj5FSJkFQJCRQj2hl5fIihQks9VdffenVW7ef3txaSanU5NeG/b39O4di5y4+Vq5e8K6XOlGLDF5joWhWJjJKSZPGjc3lNsaU22xEprlVAmaCBmqKiCKKps6TGYfjQdSsbWQ4OM45Tsbd95oDwL3/cZEt0WINSpNZMjICURImbw5dXrdMKYd7A8XKsfd+MksJIGlqu+hqFJGy6IrKfCHonJEJQrTIxVChIAymSUEBSAyUJYEROcHCF+vSpD5bo4Edu1kbiTwKWl+wSxjEirpGSH5PDosCFJwhR5mzA4DGa90b8myqSHF5adVsbskUxLnS1SkGJCxdoUoz9v3QOEWv3DnuiZKvoFfAvGsAwGoA5hAFuCSqHAgAEpIBK4Cg4mItW6rE6Hxh0RDL4fD0ZHp9Pr+NTITrxKyqBE6BRAUp0iI6REgMQKgmeUXBgLkw6xWlC6FpO2HmJK2xOC2Je7GZNk3jyqql8tq+dK7XiB6MWp5hL2knqVUTDGTdfDp0ZSUhgiixqQpxgSLgHAiYSL9aclx4KO7s766gG/bqg4kS1QgEDKYaQyLu2qbo9Zf9dH8S4+HcSl+NmrbvfAAVJIe5P2AKZGIxdVyWXTfTThBLMO4VtQOdN23Zq5tWY0BGLynlxDxCkmRF6RQhmnkEYlamTCDIko97sw+iLLKW7D5AgIzEBDmNxWM7ITNRS6rKnJgEUJgx/1DL3RsmM9za3EgyJ0uIyoSIi/1rQAZIhoQLbIsy5I1RNMs79/m0sRhjv1e5wovFedfW1vNIMTZ+sAnmATFqy577fT+7/fzm8uoXPPHUH3/qpTQsdg9iN6H59KjycwxBXHr15ZshhO60jPa0m6WiAMJ+hA4XQJ0cwAxAEcH02IUBgMkholg2xrOsn4GT1fMMiCcjgiiRyGmC7Gtclj6EWHrnPe/t7PrS/dDf/s6P/uFz//pf/pLjuj/gi5dPHe7HD37n//SB7/pzX/O177tze5wDhi99jDHFxOwMVSwNhis/+69/01f27ve+/eBgGg3Wt7aihR/5kf/++Rd2Lj344HQ6vXv19W/9li/72j/35Xd27kpIKysrbRtSmHkPmmqCCv3ItADQhVFlxiBr/pPNIEr2MFnouXERzTLd17wZAEZAUF1cECInqkTkGAm08Nykrih7AL3x9MZweRgTzsMUmAy5aaddSEmdKty9s33j5huegYqDzc3L/fpMv7+ZtDCQ3CFEmBdcKjABSWwdxU8887vSzc6cqj0uUeWS7N+4+fpw9cLFB54QX0LjJLYqnSPPyEkaMmPvsp1Sl2K9XBJzUiEwJABkzLLF46lqnjYhgXPUtCGEwMeTcy68NS1Qnvfk7NPuNVs/6dSdPE0EkPetxDSqKHqBY1uIBS2OVVN2JRTomtCY6Ww2oz548rPZbGW9JMeFZ8CoBpnvayidJPSunWtKScQ0P48I7Bm9QyQCDXHeRXbcC+1copTeoaG0UlQz4PrgKD1wdr0bQdTbzKUnL+im0+mqW1peutiEbjR6rfXGrt8FCXI47G9Muz3viqjIQaqy9r6OCcA1puKpBosDHAL0UjkVDTHYcl3GGE2WweJwqYhdqHulIxuIqkGXJIAAGBMWqGQEgp1qWbI3E+SN4VI1PXh5Pt9mHJT9/qIBCC5pQGoLc4uyCAx1YUYuBmgJrESqQNk77Lq2KLxBNC1FjHBAyF3XOXBt5+8eUr+p95tpp0hFkVIExRr03FpvmqrJqGu7VBCaQMFFIk0RvK/atnGeCdHElW6Iyx7L2f7+XQkrvLJ6OGnqEkxjiIq+lyzNZrN6qao8jZo07ugUS7bNImAFFMWQYhcFkQnJWZimSABsEIBilK/8ovebpiY4X/P+4Xg0ni0NlhSYiMw6Ihe1izGGGJmZiFLogKvc2M0eWDkxR8STJb28MElAiDkCLDQhi+zeyPIuK4h3JQCmpEQOgRGBsnewCSKurKzEGIiBkJitC4EKQHQIDMBgoCaECdFA+d4kiIhQDRAIsWkaACs9KsG8nQGkmksJd6rytEr2g8JwsNseHV45daad3r7/vvVPvnhruNSPSSWFg6YbLp/2MOGSuC1j0rIuDT1QmbRDYgQ4njkn4gysMGZnxyWLZo0HmqmhI0RddKABEoiZKUBBLgkxDsCAWfM+QdcFRyzWIbuyGKjZrTuvPvXO0w89+sGf+Ke/9sKLr6+vr6+s95ZWzv4f/+vPf/Qjz3zv93/L2vqZ7e1t6QQdu7JQ1SSp6tVtgD/4/aff897H1la3rl2/c/9D5z/xzKf+53/44wQPXLny1p071+q+/J0f/faHH3ngxvVbBQ183bVt69kBFqbIJIiq0meX8rZkzlXBMEcG44VXV47qanrcsYloDhENExiZeYMc7r2IAFFKqSrYOcesrnBtq1VvWQIYmXfFbI5JoxrMO+s6RmaJCRHXVzanR5PQNhurpx1Wd27f7Q3G4ATR5vM2hBQmEwGZzEb9ge/aWZjPeq5YX15y5Smw2f7+btPCxQffu3Xmvvks0NxDEWJCdMuCJBrIESmmQAqNK3xKoah8fzgARDWlnNQcZyoAkGGcee0o9yrJMYHlUX9e7LonfCsAGeTU/83XySazHesjEUlVRC0vRGdbDcHkwBFWABMAUHRm88xr8Y48ezOLqYuSsPSBtGCI2lEydlAysBqnpOKRHZEAkQqKZFMXSGAQsYAaWWbtdL23yrB+eGgF76wsXZ5308ns4EK19e5TX/Xs6PXXw533XH7HsHrq6Rd+1cv8bVe+OMztWnPVu0HJTdMAh7jSv3hwtNMbLIdAKU4ubb1tZXX1mWc/2l86JcFSnDHocm/5yUe+6MNP/55bli6FEt14Z/aOt77/aH9wY/+jRQmAqRlPndo+Y52iBxOmoFipMDIbClrNVigBQQFKgNRff+vR4afb6V3ELa6zeW9Cp6VUEZE8yVzFHPdQVUQEsOuwjHrIMiS/ga42a7rUIMyDzGs3bEW4GlCSefDXj2iM1ZR0OoOg5gDMUdtNn7p4kd3+1ZuzO4fFlTGeWTPF4Gw5Seghd2plWZlZJhWqWkHVWr9XyZ2dowPxVR/rZcCKeoZJpQMT5UFo/bA3HI+nh01k5hXPil4lIqkRhS55oyQpISZwBUgX1MA7XyWY/ftf//BX/OBfXR5GqoWh6w0GCWM7TsH55PoOrORCYN7MWgvEA27MCjEjBCNTM3uzR59SWuwtsUdTUfEFA2KIEkXy6geAgseAJgAmig4BIG/25zUoE0E0py4aFGtFADOoDTWKePag0SApLigcaEDKGVxKRHyyIKq59QwiYkBliRKEC9e2wMglYQnjaFtusIV8d779vJ+267112ID1w+l9IY1n7evX9kpYMVwufJe6wy5abzjYHx+sPb6i0Ch2URY2gYaAnPv+qAYAbICUF2dQkdGARAWMkQgtawQhASIyAGebK4WM0jop20kAfFGCJbZSREpnaoYwHB0queL7/843fvi3n//lX/w9S25puXfhgcsvvzH+ju/8nz7wga//uq9737XtO+EABv2ywRlDMVxd+eynPnn92uEHvuMvIcUzl9d+8qd/4d/9/G9ePvMkVnTt2kuPPHbmu7/7r6jqzTf2y2KgmkDROwQQw5xxsoo6RBHHx8wvRQU0XlDR4digBTO/N08aAXwCSyAExKBkIjlRBSHHhkAOy4LB0nA4jEHRCN06zGZoSa2WcBhg5rWfQoiC0TRRkgjTphms9pmoE5m3s9Hs7v7BYVm0moJpoamct3t13XMJm0OPVAz655ynO6PJlty8e3DgqtW3vfsLCXrT0RTIqwGJR1PQFgAcISIKgXEAUgtGSTcGW7Wvk04cEakXBjUk8ADGjFEjO+q6GIs6pIaZLY/to6AoMydCTGBmzjAhqy78QvQetsyJpNg5F0JQ5zUBAibEBAiq0VTIoYAnCTpJ4NQltIARPReDyidTBGRGAfDqIAXfzbiHBWDSiK5IBp2gc21eBQCnXVAHRAaexTtwSa1g5QAgStTNwhe/5UuutvOLZ7jFYtpo091e6a33sC51GXnplaufeOzhTaVh4lsBJqjn+7x+4cKDd29d658abKw/evWVO4PeC49eedLihY984t84F2Def+/DX7q5duHmre396dW3vPUdr752c3zQPHrp8+/s3bqwWTTjvUc+7+398vxo59MPnX1qZfnMSy+9dO7+TToOiwjIioDHPV8iR+hyWxCRERxhgVwtLT3c6DRISJERkdA0ZJGVefLMHgBU2LAQhSSgKUhMKWkMAOYJSwDXRdVk3TzFKF0bTTBBb3cauwSTrklU1lUl3MyCnFrf8FV7Z6zzWIcETQsVV21jDYg6jQ5KIKdAYmQA7IDJGAVhuHJqbXPl8Givm4ckGFLs2mgCKpRSClEMvS+KEHUezJU9BcvWZVk4kQloogBgqhpCEMM8dgeApDI+nHfzzpE5TI5Tv3bOzGJA5JSSCoaoxxk6aCb8Hb/wHoJKHgedsEpOEhYzk7SYUTvniqI42d87qUaP/1UBQGOqvVuuehglh8wTxeFx73JB6lAEWCimSY7HEQKimBFVoEDjycwAoqQ2hi62IXVNS76c6eyF9uaLMGvM+4ag6vfPbm5uDOq1Yb06HBi0iY5WTjP6lgsyS0DxS97/BaPRPqIVHs06FqO8JsNovLCu5qSWPatz60qNIFuyZxgAI3A+xxDUTMwEMC14a8ZgDJbJJgkAmDGvfRqIc+w9I9nOjfS+9z31d37s27bOLV+7viPGW6dWT58+/eP/+Ff+7g/9Qwdw/v71kaTkB+O2rcvBf/zIczzA93/du1NK//V/8b/86s/+3sMPvnMG4zeufvrr//wXf/8P/I35rN3fO+r1PcC88IvPMeek+YIfJ575Y7XjN9nZVU9ugDc/UEIz60wAoADKisgOM7spF2eL3lVROtXkvVc1LksQTDrq9YvY2ryZMLgQ2rYJpKZB2nmHHgdrKwej2R89/akXn/7Y6OanVvyd5aKhRirxGBq20dqSrzD2inap34DtxnhXZS4pvHbz4OzFt77nC78qGE3CjKrCWLnUY83+YnSUnxrIRzc5RESHzlOMMfed1NJCyP+5sgICZHSOsxfAggR5/K3evNvvuXT/PwTvi6/53J4nIhIAxUimHomBNZkkVUEFl6UBzjk06LrOEXddx+iUa7JCiQsDn0QRHCB4n2YJIzolD8CAZIBGqJblbi7FXkd1q4/e99b97d2NTZu4pY8+92Gs75678LaPPvfxq5PnqU8MSQxrRMZGQUsqoWwPw/VBr5x0o/XVi9dfe261P3vHw1822fGj9o4fnolqguO11eFnPv306a367Y9/wR9/7DOvb3+415ONwZaNeYX6T97/xTdevvvyq79x4ez6qaVL3fTgrQ9dKXWJRDRHEDvZCAA1Q7QCjA2dAgMSsiMsCeq6d6Fa2Zw2BykIAIsED5xM1VLelVDVlBxTz5CSgkqUGCWkrgkqSFgh9URKEUtBQqex6zBVszTY61i00NYhdEPzrOQwXhj2IaTdEWDLJcCkk+QgAnYMhOjElNiYgAmYHJIn9sgOMBEsLa9vrg52tm+NxqlTVNbQcRJKIiFENSrrKqhNG4WyWNxPCMkgLUByZIb54mTehap2sTUEIvKMCOCRCQ1NCo+ls15JqprUAH0XF8pIRGT3Obfp5964x72RkzdmgIpvxnpGAEuWx4xEhAhEQAzAkHnpiiBgtS+W69pCZCJFOC5hKSOoFsDbk/iObAAGZEiG2WtUDUHze8JJ0yp658suddPZke8V+6+9cPDSCzaPVb2qQo4L8EsbGxunN1fuP7N6en047Puq9qBUu41er2eo/UH1K//+362vb+YzjyAJoyI4hTJBEc0pKIC6haKfLM+WF0V8rsczMIuBOTfbM8NVs5uSIEaiRKgETFAg5h1QRDQC9IzeEYNVFeztjkpXfO/3/+Vv/ItfdHi4f3gwdd4uPbT58quT7/rO//F3fvvDl86urhTlk297Z7F07vd/8zPf9he/YrJ/8K3f/MO3truH3vrk3TvbGA7/7t/9rq/+mvdvb28HScPhUBIQEft7PfDyEAWIIFuCHG8Y3OttDZRVfsf3A+Qz3haLl/bmxhNnaJchKOTPR+vCE0FZlgCAvtd1SWnimabjeVmxp6rrYrJ5p60gLK2sarLf+A//4Y/+8I9SxMSphaZNMUS1BI7Qq1ZUhdhX9L4sjN20gXko5xHnUS499MTlh9+2uzc39Mg+KYBRkrAQ7L+ZrLj87Bs6RBY156g/rFUV2BQMcaFWICKihRiGmSEKiDIgAYNaPhrl2Db2ZCXV/v/E9Ddb+McBfRHCVMnAzLyhBwI1QkRBBg/EYoRMRLkHmEJonXP5p7MkXAygMYKPHYc5WUsRyMhHU2FKBAHBPAfEQA1yQ4VOjecMbSe9vj9stg+mLw76tcrkhRd/9Qu+4KnV/ttiE8ugMWmSCWMLCgezw9uHL+5Pr027u29761c2LR3NRqfPP3L19vX+ph5O9+fp9fnkyHt/OE3BFROdvrH9ylNPvn1r8K5ZJ3f2Pv2Otz28Obi0d3D9zPn1c6ffcXi0Y2bTEbzw/MuDZXV6jOYDIFOlbFBNTgFp4YHCOTooEKAz4OHSI7H9dOj2i/KsWWTEoImSIRaIqJAkEVFNjlXMzJIoaEg6L1LtfIHcQ78yn94qJcxUimTe415LLdQqnJDMhRikC3Gpz0VlNw+6aUKyriihaaPTbsCORRlcgjyFYnQMooCGAoxqiEFSgW5jZbmddzsHB1ys9UpCJEwGhURJFMmVDohjMjUDUiBCMJGUDMRQgcySgYgoECOxpsUj55xjU7MoIkxVFFQxkUiU8Y3QxtR2yZelI0pd/E8y7hP15Em6l/+X5pZNVvLmZ4G9aJRkiVMuQrP43bI98iIQkKoqIXtf+sJUCVmQRUNmVCKePBKaSQcAuSGJCIpZWLggDRqAphSQrCA3m817wJXHIOl3fuWXz25tbq6uawJCH7kjBBcVektrW1vrR4dnJiNzxbTjg73WK0/FfFFsbi2/9NJrn/rkZy9ePBe6joDIUFENoDMFBEZ0iGAqeBIW8QTJC6hqkhG4OTiimh23lwAkLz3lZjUsDH2ciCCyI04AoMaARGyYEH2MenQ0/tKv/NNPPvWWn/yJn3v95eunN7ZOn12Zp/V/+L/94ic/9fKf/zNfEfbufvIznzC5M99tvue7/tHq6cu9evDSq8+/860XPvDXv7uzcOv67arXE40KhFA6BolzYswzqJyMU/aTFVvgE96sn3L5BQuK/T3xPfMonIISZj4yGjCiGEV8c0KCaExWFY6ZQ9LK9dSSGGpCgxRjFCu6CCl6LntNSC9+5vmrn/1Un+JjV4ZNalLAslrb3d/3xMOB7ziU/bWyXO9Ha5uRSjsdz7w6acPSyponWVlZmc9C4QoTZYOCe13sHKHhPSIWPDmP0cwBgSREZ4NBL93M3tZiZqCasWiLP+R4FAGZ3Q4gx2pREfG0mOHpm7A9O/mJxz9r8TwSERmoKpMLuQjWfLp+TuZkZojG+ZoSlmUZY6QyE/nNM2sUY2kM+w2KERGYUttXTwriGNiBiYFZAoGiwAqRdVnJJ1Voy7r2z9z6uKdPqjiC5Cq78zq6Qu6+9mI3qbkX4zA44Y+89tuO66rnP3n7aeeKYlmevfmHy/TSwXTb9dd+59lfPjy4vT5eLeuhcRil0dH1u8RDAdh94/mQZqujs96feeHWy9IdjLuboIPD6fXByvJKfWln59rVo09srl48Orz6iVdfzZO6BIsnSY9XwhwCKaohGJAiKQIQGXpVRVteX38g2jTEKcEgWZcw5gwrG6AESWIE5IJ0ppxUknadTNs4jhLFHPsVRZxLG1LXxjRp8WjmhMsYxcxKN4zQxtBsDfpiYWeeIHogEbB5J4IEGCEJiEMu/XHejshGLIyRMTksfQ91TlpcOHvRV3HvaNSmfoCg+WYRya7peeZJfKzP08zGU80qDUUAiJLA0BZBBx0jEyZLoinvXyhSiGKGqmQanIckIpInoqQKRM7ueeE9LzsWieA9bgxmC/ljrnYdkssx/ThbyYZwi5ciAidQLpkcnnx/0MV65AKQndFNoMdsYT1pWC+yY3Om7JmQJJPLVGE+b3u9pY89/cd7t3ZT1Gmcdjieh0MGM01FGZOFtbWN86fP3Xduoy5VuobQoUpVOoNompaXlweDASJbMgBKx0BFPO5WKEDSfKcBIB+XEZD/CxEg2bF/fWYwgIgRCzEgeIQarABzZmIQgDCLM3LJApAd/dBRwZTqigsudrZ3i9J++If/xjf9xa89mI33dqYVxbc8+tgv/+LHPvPxT1596dfeeOZDH/7d/+ONa39Qrmy5avX2Gy99w1e954d++K8exK6Za79fx25ORmjmvYAFR/18PRehB968wvkoujfZzIkUIso9qeji7GeSHK3Q5V/e8oHuCsiqf7PMCa6qUlURPZkPzUGvf6qZJLQ2AU9nrYJbHqzdvn79Q//+F5979o+WhrC+iksDWR7i6pKPbVMXvvAmsWMgZk5xOp7vicJgsLW+vnnffStlOblz9/WLF+5f39gKISCiqTri0LS5VLwXoZFJ1JBX7WRRASNKv1elILlbggsiqpyk54vjgQkWPAnMfSdFENOTQvbk8EODe5Ohe6/n4muO6ap5Hy3/32iqjEooCElCkgAoOb8vy3Lx+CBmgUNK2s2Lds5dQxJNkhEasbha5qGLKsB5tdDMwDE4hsYOMLW104QByW8OyqEnJIeIDgpXUVn6GIhraZOgspA1CYiAANmBL9CERMNOutWVsDubH8U7y6fqDnR7dNMNWGCJ/IAYHEtdukE9nDZ7E/sslJNq1U/oYMxXcbVo3GQnPk2DefLw6s4ntTdvTF0OdERgkstCBTAwIkZFyOQLNWRCIkLwaCSWSne+P7w1newt16tCAAgqgJwvnIkkEXR+YJjZraggaF2KDVPFUIGrXLk8md0BcDFhwqq1nqlL0rEBAx7GWDve6Lvd+XTW+ApNqIqhc4wBfQJdIkZ0imCUN2INiRiA0BSSiTAYUAlACHru9ObVGweT2XxpmSWZJSA0kUQCBFhX7AhFiLP9myrYcXdPk4jk6BwUMrabGUvHSZiQMy9esxTZlZo0xSb7ponmDF1NkZlNMQsD7u3M5FvKzCRZdslcWErmtjJkrwsgzmL2vCT7ZmWqupBgMHNIqSj7VLLFfDa5fBjYCaEGFq0PxAxoXKyWnMSe/IulFJi9maSUwFxRFbPZDBFnEO7cPSJXr2+CxG5YFFBVQSY9gtTR8vL66uhoebC/vXuIDOVSub7WMxxMZyGJnTmzeTQ6LIoCVIWRFw5f+aeCEgohAy+WrRZhEY9/51x6UA7+i1/XFsDknPIi5kw/1zFIudA8yfIAzTRJqorBvOmKMs9zeW9v50u/7PMefOzKz/7UL033p+C6P/We4V/+5ocPDldOnV9a1p1vet+jv/jhyb5sf+/3fdOTT73j+rXbkcPAL6XUVXXhGWPsAEEVyclJpMul7yJPzFf7+K/iHH7UEFD5+CZb9BYQCJkwcSITFhYARUgICOYEElr+ToVnAuv1egrkixJiMplCXCJ0SRqDnmcchdFHP/w7ezu31gsIQ6AiHMbZkutTKpIfSSIuawupcF5aubt7t6zqwda5YX8wPjgMqU0hgPP3nbuycfrSvAN0hSIDQRRxNacUFjXeQqR7fD/nv3GRI6OB9HoVmiOilAJidnLPHEfIyncjBNAECbFUhIxFA8xFs+U0DIHN0qLCBDjp9cOJygsREZU0f7Vp5rPnXJ+BWA0Fjts7IkCiFrznoigA3sQFW1aBR3PBpLLCq3NqzKUBoROWAGpsKRk4EABzqhhrqdDVjTRdHIP5SQhFtF6wNPQpkHeUooqJ8Iy8c12VijAoqpCkKFyZzCJi4dlMlMnPqZpqN9wfTQua1nVPU1v6GqGMMq56TiQYWGq73qASNeAGyHeBQWds5N1ArUUqB/21ZtZFhTdHOpSdGxcFIhnxwrPYHNhCl2BIQKUj7pIbDB8gcCmOCD0biYhqzJMiVY2RPS8R+hyAzFBEY9ulECVGS+KLZVFKHcRYjkMRtE7RghIVFCw2UVdW+1rZ3XkSLcBZR1a57rFTXAZlY08kBAkMkJEcsCNyBOyQCqACCFCZqqQRIK3017fWliaTbZNKEboEUTSk2AZwrqh7Pjs25AkJyOfYaKSUwAjJQd6PT6H0zjuneSKT0mJnEtFMRaLDArJUSBTAGICZo4R7s/V7Huk3ezWyKHMXjCSyk5Qk9xMF1Ba5yT3B+CT15mSesrOzIgCKuoV1JACdmBsQGhzn7LkXzyed4jz0I/OIbIa+LBSkjS0RbG/fLopiZ3TUtDFNYq2U0JJqwbUBGepwbXDqzKn7L2xcOF+cOmsXHxlampTOEOyd73wHkhp0KUUVKJHZQAESQSIQMzSogQny0QxosPA1zRxzc6ZsuUuGnK2LiMBwMS0AMiABQiAGdABKnK+aZkQXIRCCd0VKyTFakgLZrKsrv71998xa/T3f+82PPnF692h05vyDr7+2q/TO93/1//4HH7lWp+GFzebLv+iRU6cu3jzcJaIlXjUWx5UlVIkIyRKVdQ9IAJQolw1vugURUX6UjicHRHmImD9AOkEMvUlGy6Ng1dy7yhlG5qgbAGgS7733PgcmV/ZjmhUld6mZN2PvPZh/8TPP/cav//J0Pl5eqkvSXlGaYjXog5OylB7zsK5T0zEXQa3RZvnU2vD01lJVjHdupmbfurYZdw6W7z//oIiC+cUmrSIiRglAhs7RYrFO8wgqdwtV1RHmGlFVe1Xl0DOSmWV0Wr4qCqZgJ9PR3KXk4ySEgGkx9v+cQZSqHvvZwPF1g3sfqM9JesxyYUTwpi0fAudQl1fnyrK+t5JWJAWi2rgE77RA8CIuo5gqqhB9skKRIrISKThARpLkfLJCCo2ghg5psHIh9jdS06DNgOYptjXh+aUnsVmWbhq6iAXOQpg0LXgUil2KMwtewwqdr23F2/StDz5+6dQ7OPZrdF2aoOuSb0dxClWvlbCxeYHiOqSIWpAlp6kG7gFi9FFMtO3mYJEcB7J7htf5/UIukrkigArHFeKiBwCmJbISb60OzooeEFYWSMEMIpIZoapKAsYBgFsMXZJJsNgljSmFLsauoD6zl2CiRStFEAfmgEvPysljwpWBhqhdwz0WE8NuvlnHJ+6vSTr0jGYoqTZiMacLdQEAiIIRAxOCRzZiZqolhTNbK/26HB1NBSEpRtA2htAJk1tUuLaAXOerkW0NQOQ4cC88eQGgcB5NmVQlaTRTZxlMAcm7hMimuFANaVJL7LKwW++9FxeBnDmHgJywCyw+i9xwPwkTjMR4PIriRVJzz3cjMyuB+JhS4tDlxqa8OcQjMMqfRTaLFuPFXIHQCDOYMFlAKkUJycUYk6n3/OIrL7ahTdoj6fr1VHhnd3KnbeexjWIsXGKvgBI3ttYvnD/70KULD1w4N6yWTm2tn9pc75X1+973hdPp1HtPxOwrwRzQLZ/BaCYaoyUCzn10RiNQXmwpIQACoNqCgWsmBhFMiBOiITiw0qBUIwVRbPPTnTUrx+S1HHYr1eS8Zbsgj4UZlGUxEdnZObhvo/rPvvXP3r2z/a//7R/cmS/j1v0fer77o9fn1anBmQsrN9642vMlUS9SNGUkKYrCpGLree9jjCr+3g83p5MMSHSMU3hz9Jc7lyafW4Qt4osqqRmCOjJHzpAFmAi9O/kCT+zeXON1TZg75+fdONq831965k8+9eorL1+6eHp53amfD9aXTp++v/JrA79cuxIhAlWqoayhYCK15V7lYGbt0d7RdYE4Gs+mc+Ni+dEn3gXl8jQyGzCAA/BIoOSICVgDZYvwE8HPyeRTNaEtbDQY3cK+CIAsj8T5c7IcwlyVLlIZI1BDtbxOgZ/7Onlq8HNtVOFNtvu96Q5AtkxKYqK8MN6i3BsEc23bZvak94URZutBEfUihuDBmUfx5NGEg8dZDCJi7B0QR4NWIJBBSepC50fRu8lMJREEeefFP70xuf/yylNnh1/ZtUvDavOtZ77w0dX3l93Kw+efOk33x1F4YOP+i9UVmvM6r5zm89D59ZX1S6cf63aPvuwtX3ueHnrs3CNPPPBVKudXyw2d2sPrT75t80vrcblJG+9+8KtX/AXvBqXb6uZ1xfVm776aNlRn/cEqU9+XTT2IzOYMEiFKdAZeaQwJHXkFEcIKHACriRkJoyE6ATJzziU1EcbBeuj2KLYEBpzQBkkbg8CuCmGK2C/L4bS5A+AFu2CJ0lKdTHGssKKuxmolTXei9sZu0LnYNkYeOfqJjrwP/fL0/iwkKBURuevMrmxSaUdNGpgfmqJnTtgROWLK4hIiJgORCGZAbKaO2czAvCjdd+HSS6++XKbTQgTSaUoRAUX7HgwCESiCMQlBMijQx9iYKpsDbTosyQt05FLqcZ8ZQUi7cfIu+k4SWqGxVdVKKBmZaKcqCJVRSikWWMkCzbZA/h6HnsWNmDQSg6fsBumT5YY5ioJzlYKJAaAnCicJjqmSkQOKoEowg3a9LJCE0AkC+E6JnTKCkiKSLoZOeQBmyIAGgJDthAkxY15KFHFMEpUUxWSwvPTR3/+9yW6T0rX3PPHE7q395miyvLHGMCiH6uq+LxxgIbxUlnjhsvNL56o9KSfBaTWd7i2vzVBGhUBUh4QmEYgQycAWcx4+fuCtYwRFOnFVRUIGShABgOmkCcOmZGa6YLOAmSBA5tSaEWIGhwNxIYuAk2uj6JwjUY8kaM5xMnNcqnabK715kR5/av2Xf5MvWP8Tv/ZPn/3Nf/X4Gv2rT9z67q/8swc33ti6dJaCQIGCWKBZQhFxrKoBDJgLVXWAeZkSCQAUURVQBB2RqCAYk899AyUVSgj+JG/I4z8QAC0MjbLQMxdaZGBqZowFpVCUBEwe0QGac8mi01GKwziVqhy+8eru0c7exSsrB4d3qmqlLM9vbFz+9Q/9xwv3D2tCigigFEsmFh35qi0q1zSpaWbrGx6m7uDoqB4MW/AXHnuqWN5q57EkFhMTAEJAZQIVNDTiqLiYYZoBGohCzn4zAJItLUzj2RIkRDRHZtal4JwjdP8vWX8WZNuVngdi/7DW2nufITNv5r24A3ABFIYq1EDWwGKxWBKLpCiKaolSdKu7JcvNkBQtq8Nvdoef/ORwhB3h8LPDD+3uCI8SW+pWa6AkqkuUKJFVJbIG1jygABTme3GHHM85e++1/sEPa5+TCegEIiNvZuIMe/jXv77/GzyXQBgrt5hxijYFRWZxC4xoDoYE0yDZEaDaI23ds+suZwf9k6A6cCdKNigKTuRih0BOAh4Cq/WKm4CZAVNo2pCzGSOqatelKEpEA+ucyIIrIzNmyJ0hMMXEg26wsHNhDwvMMyUnidC47Pf50YvX77xwcPubq2883ry7PFzcPfwswvGef/Fg/0mTs/Xqp59+7rM65P3byzwMqZ2F/bJnz3/4w7/41R/9vusbt+d33rj33nw2mzUHv//lP4wHj37pk3/+F5/6/Pfv//AjTy334829br+51h5cv6G9Nbz4ubu/HsLs1eHHt27fXLa33nj7+y8980kdn/6jt/8nx5la7yAkQEBoUJAkhEBOJuKuUYLBrGggIgUF4egRuCC6ujgwAKd0NJvd6Ps1JVcx0WxWF0nLOReB0Mwm8AvArCIhfS6D5LGMOfA8C2SJSNEgEIVArAA26iGnTlH64myA2axEH+/ewMEloycaEbEAB18yCIOwC8gIXhinfo0cQC0QBkZwYcbZrDs8OthszhF9yFmdNkNNLIq78cuuAakyYyIa88oUO172fQYi9TF1kblRBAWHolGo9RCcmRkD0JbxdcVQrLrI7nJBL/eV7g5QL9M4DYvMh2HI/TB1+g4mdVpl1bcLwZg5ELu7q9QwB1dxq8FjYcfnwVo0ITmwezRngwQeAKND8G3SDfBujTFiGIM4a4ptOz88WCz17PGj+/d+8NbjWTp8/UdvbS7s4Zk8Pt/0q4cdjKgqDpD2eHld036aP7138PRydrBsrnE3t9jeff6F0b03ocRKqqQAumW5GMDE1XJHdVbnncQcwNxVvMB/8MAtun21YdyhVdMxRayDVGaeEAMAIPRAzgGZvK4W5DEuB9MPf/y5d1//ym/91V/5nX/98v/89dXvvfL2//X/9m9+5sUXo59dDMezw7lhgKF0uobJPAArm89gG1yNE2qwzWSt/AIECgCw45vW7VtV3lcaANWwWZoIWRUeqF2tV9yMGJFFiphG4hSg65pBJKS29JvYzFVolJWDHJ/c6/bX6/7erTvPHNx+Fpr41tuv/dynPhGdS0bqnhB6inHYDCcQ4vnIr79T3r4HbXf35EQfP368d/2JleBLn/j5m9fv9uebJkUDC5wqHaDOexARHOvIwxAqrktETESITIQcCQMA1HwlgCrOCldTZ6dvECqpYbcp2f1qt+zVUa2Z7U791r9zh2hNPzea/p4Mk2Hr1DimaiZOV3dIVbVDZhYCMVLiwIA553o7Rg7o4Cr1RE+TXjRqmFOKjKSOaqog6BYhS8GghGWR0o1FQys7pNmNODs+efgnr375mQ895RK+9v0/3ts/vLF84ds/etn93oef/NnXX3vlnUc/eObpJ9958PrJ+ErTQUh3Rt30tlGH6/t3tI+5AHbXSXE+n5+thlfv/XDv9uxr3/nGvQc/fvHF248e21f+5B988rMvodz6d1/5R08/e4T69Ov3vsG54Kg4AhUO4AFQFEZzIUBTR5PAgFA8FEYmcwiJLIQxWQvuhkDMUVW1pOX8zrA+HUtGTFYlgU6AKjKUYqmZW6WKg6mjS86MjAje2xhS2FOMReeGjWsgsuBewEeTw3mrTGvDlAKM4yi5jesn9xerMQgtIoI6Ao7u6EDVsJw4EqKDEVFskg9EMRBBMa9Rc+Dxiet3z85/KCIiPIL3eZztMQcytdoHVqUJM8pEX3SOM7O02WTCBEBFV+28CVSg0vKqgR0QghI6mIuYmxOGyMGNzIQBQ0qSy1Xtxu7BgOZeQaHJOc8m9uLuT93VJ3eHqflDMHLwiaHhgbG4q9ZF1IEBnNSUKAAZoJk7gdtkRoU+jcodYGIoV4hStQB4cnh0/MraL24f3fj6l75+8uajD92+OZyNj8/7QdKTzx14Q2tqVz5beIdpD9LNLM4RUUts03wm1wTePHkUQ9N0S9UemcaSa6k1s+oFtn0Dl0cCL2FWw4oIAiDEesywitdpO0mrpgt4OV5DRAw0BQkhuF0CtGZGgdHQHRxcJ09YYkCXnGLbpGd/8KUv/YXf+syf/UufP191JeF/9jef+6VPPPWv/tnv/eznvkixzXLWYERrHQyqe++EWDoAMIJPRikTBGE4+ea4e+V/Vs4mIlZDXXAFI4MA0zSrfmpy2L1v2iqiHMxDCGbQNCkxpxSUmFKLqxU3fHExMlM/5KzmsPnQsx/+/d977e//D//d3/mv/tNbt49KBpH9Mp434Frk1IvTYgZ7mwcn+6mjOfnGhwtcHt249+j8uZc+s394t98M+7PlOvccJtN5RL4EwN0AnKUCKVYXLUUDYjMHA6qiPNBpW+LkutuqXjECez+64u47wtgHkHScDNTqdHra8tb1YLdaXK7xAMU8OzB6ipgQ1KfD6nU/hQygjkZEagJGzAxuRMHdFAECUGBEQQdSD2TBsZRcMmAUVZ1GV1D/LIDpvJ3/8NWfPvPyg/YavHn6RpY2tLycL3/8xh8tF9d/+Ze+eHp27/Hj7/7aL3/x7Hz1pW/9P3/2U5+DYfm9175996mjdsjLW3eMNptxwADf/MHvfPLjfwnoM+8++Enh3/30pz9tw/6pffuZ5158fHL8xV/5TH/evvr692/d3X/6mf/y1Ve+e+P67Dd+/a+88/b92D3c21ucM4CjKkWKgVWB3AAUydwwUNCEMutpgUUD0ThYS4QshY2oQ03gKpJDdFM271I7y/0QmbEOpQEIxMFK9qZJQOww6TPFylCwYQbI5gPhAYe9onPHoEbECCqqKqox8TiCeULMQ/ATyp+ZY4LwsCQMbSQS8IAqviZfgAGiEyKYuhZ3RSZHtaojQwsxFXURW8z3jw4Xp+cDUdr0mlUjBwCpe2kiIhQiCITCQKhq6m4Co0UEddLgA8RFWxyDQTEzRwWouklyIFEzF7EK4dWb2+yDirurD9wy39GQzaE23Q6IKC4ETEToYCYOWqHMyYuD2WCiaDJRjayb2PGBxSd3DgcFtx2FA9DqSyKy13JUedjE7oKIYNwtZl//h/+2GU8e7NHy+t6nfuFmPgfEVbcIyudE1wmWgkk4cFxwPNAMxI2JEsNisVjurQ0KPVjfPNwjyeYWiQ2QGd3ddjQZNASqDojugBRrGbt6ZBDRd7aRONUAmHRe9W9qC2+wI5gy1M3Q1cNLhJV245X5BgiVZkgoDhDx1Bezpz7/O//qT+49Ov1f/+2//rk/9ev//p//t1/90v/3mU98eP/OEyAteIAIPfYtEGBNZ0FHQneocjcPU+2ukzwAIAOcTITqz6de3sncQl2PqnIMAbdlfKp0CODbDHUAwGp770SEXhwUQlQ1VCVuc15lGRMv1uv1Yv/gzbdOOMx+62//rbvP37w4f8yYiA+W16714+PSlG4TAGcP31sZhbVtusTmsNhvNxo//fO/fHTjmYvVwETiRsjo/D5+V+03CAkwmyLWBFxEdwJAN3bPjsxhV38DMTNrLkyEbgTVMGjyc2fmCkphuOzWAUD9slVHIjSvv7iKue++Xr1a6kOqPyuh4rQrdTD06I5u00+QPEQin+zGtmN6CMWiQgIgJCdX8IIwknfobBGDeqgB9koOwawYAoAU5OZwjMPYwMvnr2j28azMGpCC+RTad8lQHfi1738zMmwS/P6PXmkprB3uv2nNzB5lePDmu9KkmNqHF6f/5gd/P3QYO4kY77/2al7NUzw7eet75vGNV8b1iosN777Ngb7er/s3VtrgfH2BzRyGsqlwtCuAWsDgWixACERjvlARhAC2CHlOWjJnLNyfXLRdxMNZNgloTRfM3KwQdS5t0y5zHqpOwafbztT6cRybvUShcZ8MEVV1hOyYQAYOwb3E9tB17kAAFCKIlY2NmIhiGEeMoR3XPSvMpNxexKEsPbYBB4gduLFFRYg6EgVAV1VCREZzFjeMLCKBAxMBZg4BSRDy7SduXqzeVrVqQB/YsU4Yt4wuQmCEyB4i1QwTQ8hWSh73m/ZDd44ojxTQwEW1Jlx4NagGAELmEELwyRjL0aneubQ1D7jaaNQbpvqqJAi7ZoScd8dyYvJsrcwBubqAOU18wlruyaHGawDUYj6p9dARMSDRJFWzSek+dfE4wUcESJiYEbBdX/if+rP/eXOtAc7/+O/9vaMWTo4vmGaz1iKv7jfvyLB+9A6Xi359Y3V4cNbtXU+LW0oBSMq4vr63yOu3bx82qaOcR3AzICAai1H1wJxozoZold5X9xCXZX3asNC2FZtYjlfWxO0xnLwCsf7EzCbB0/vXzirQA8fq32I4bZdIvWXfGJ3m8y/82V/9+h99/Qd/9P/597d/+5/9/f/m+t7sIx///MHzLyElG0ZucLR1aBJqhRS2g1OvT+sKXLM4aKp1Vs0gwRBA0QhrZTcEwAARUQEJgQl8MhObyhtORbP2q2gIAmCVSA6TBiICBREhsJyLeW6a7vjx6dnFieF5v7ZnXrgx2zt8+623jw5vrPphI7DYv+EG64vhdHUuoqs+ZmG1xjw/8cTerSeOnnv2I22a50FTjFl6FWhia6Mj67TOmPmVMX4sgAha5zZTXiA7ORavI3rkaTWqD3G5eorr2ULEyhTA7SB0S/d1u7Lx2v0vuqWV7gCc3cKDTKaq4uQeDFqnVjFmg4bqDqiu/GZQQ/vMpI2hbWIk7HUQAhVHd4lU2ApYAA9AjBhcQ7EmJbQwbdkRyQHJGXF0dPeApAIxUDJAyynCAS4tArh48IKOAMFcEgXAjmNWLmVsYzJFjy0jGGoCErW4iBwIG+eArFyssWYDzWzAgYP1eeC9graHIH253x5cI4/nF8dx2QyqEBKru4OaFZUwWI4Y0Fg2mQ1TTMePVz9+9cen4UPXZ80C5amI9sZ7cO0g/MzeuutnDSNpTEk0mEMIoeW9C3pgIubo4ICVDVnGvEY4jKHr/RzcAFhcNLsDIxu5mWVsrlFqoeIIZGjKoxxC2vfmMQTscLwQU9onmTd5xCWFgGaKATxAZmgU0NSKV5YLBSdQw+LYqEVKqKiqk0sAuJa8XCyuLecPHp8zJ8ljolkAGN0B2d2ZuUuNgwBIE8nEIzCWQmLR4BMffuLBwx9uHo2ivTCKT7G/5saAI5oyWjF3LZV05DV8j65wut5H3kLEamDroFbj5beNku8saKoGl6i2OeIGROjsNqlYDQTUANBVtmYJtZySS22QoUa9AUClkFcV6G6N2Q4JpGRrfNWz3v3ok2/88OW/9//7x/celk9/+mPUnMH84PF6g8Pq7JVTeGY2X6Qf//jHty7u+c07R/AsxRmEfQNJTSjjah7l9pN756shizIHczRwigGRQGvwgiHw+6Sz2y6MYMfMr2ki1UBmogvubnWCSQSEVvMfgpmZWiUbImyfxbbMot0ZsIkwXg9J9hk6u+Zmb/7tH7761//aX/nNv/CFt+4/3FvOAzUb6wFWTReyDyGEmBOEuiVSmGB7RlcAQJ8o+eBkqLv3yYyq4KAVWlY1QA8hiCv5dr3fXhoAboiTdq7i85PozNwpBRIXRQYOpE6sBtBvNjn3qZm98fq7ucBicXsc+2y+evMdysv7765HONnfXwz5wTicXj9Yvndx9t7D44BzHeHo6Ch14cUPv/jk03ezuhVFVyRo26aYDf2QsNlV1ulypTqDV+W64wSsK69iJZqXqrxTQGARUbeJyuJTqjURuaPCxNrajU/gCruxXuq78wU6gW9EpNP1Ax/46ju6DoCaC6AiKNXZ1ISV1d9BxbnUYoyEgQgV3IHNjAzZIToGx9oOGAIS5IgK3g+WFhKUwF0MiMhTHQIT+mCemDAIxJAYE7qCm6PFJmoemTlxsGGktikKRQxj24+ZEAOpQ24IovPoOBSfz1PBdcMMfWm8hFgMhtguZCx78wDGHINYCbqHboF4MWuhqGMHYBIQERzRBEOgqMUAhJtYRi9jmaX4wtN3Q3tt/2DhuOkCrPcM05z2Z/Mm4Dj0m6Jmqe1UtZTMPGvbxbg6RWzdytQDkxXZiBwSJ/etOs1FiyMkZwMVDUVxBlxleGgoYH7quaOIxgjUeA6ig8k8DSHNSoyiIcbGFNHVSMl18IBmRBhYAyq4qZWIENwZGzECRyY0JzSiEEzx8GD/7Owsi7iWNjXoYPVOdYxEXUeORICEHsljHG9en43Z9xZPfPiFJ47PDbBLOGd0UCPAbQ4DVn55jKHv85izqnIT3BwEEclNPwDL4BYpNvCKP6pbtXygrfYa0KDSKiYTCEJXMCMwAvJKHDQkAEwsIu7OMexwSWYGLzAZy2yr24Rdt5XuSVT7aXNHIIqzRROGf/Tf/7333nxwuHfwxV/7wv7tp2OzR7DmBZ4dP7h47e2yPgtHex/60DP3Xr33/bPXfuH6M3D+OMTBQUwGJjg4Ojx79F7FkYaikxtP4DyWUB29YILdGWtDwLJbfq4w3KYyvvvXdi0EcERgQnXfUuC2qMxUFy4nrrgVLsKWLe5bYMeQHIpZv98a5PHl77z81/73f2Esem3/qPDxULDhYNIZEqGBU2Ib617KDWpKANiuCAEa1LAVZ0dGcHBxCDXgTQEJK0fTYAroMETyy2XnqiOuXjazgDgxBouoFQuri77twpAz6Hj86Lybd9/73ssU0kc/9plhkMPrRjGWO/mpJ176P/0f/o/uZ8++8NS1o+X8Wvf9l3/sj+PHn/t0aNq9o/2bT91YLBZl8M261CoI5KpGTmjQtslNvUZ+1Q0Fbg+deXAEmDp3dXSsNgJOFalCJApFqmTXkBSRwASprqlODCoO5sBbE4IrqAszu1xOUz5wv1z9yQ6fqbaawGRIGSyjRWQhSHUEtW32zQwrOxYxUCw5J08VJkJEUMtgQmiMSATuoI4EbIQBzZljoKDqog6OyFXIBksAGKSsswPT2goaEghkxy6AF0bOiierzYJTf2YFhggAQm0zt7LmAga4QYvqveCYzU827SKeZYnoiXk9aGhiGUpAgiEhukh2dkDPspHsXeqIKGJcrc4FAhG4uDuFaMGiDz6OYIrYUOhCaFJs9kPbrrw/H0407c8BsH/4KLVNaSh1bS5nQBAimCh4F2g+wjERo4uquBOiFxuHYaiM7/pQ1ZzdTZHEuEDK2dE7dGdCdCyIkwXTiJjbaDJyABMMIcf4rFIjovNlcFsTRI/M6oPIct7MW2rR5qRdsIBglleupeRiyXC2GvushdPSTazw3mI2n6WLswHBZimRT9Oe6hgQmJvoANA0cTZrzfX6PKCS2iywKlmILVhhcZfiaEZmFDBwKEZOK5EQQksEhKUUtxIxXcXKr16dPm2hptz3CkCoOrPXw2WEQIBbHwJEJEIEQweuij6DunoD82ozFBkR0c2rG9wshro21EpWL2AHcFeq1A4GAJFKmY7NfL58681XvvPVf3PraPnSr33O0xOE18ayXucHCdKD18+vHx49+9LHE47Hj+4/eHC+uHH0rW+9fufeepb6sT9/9tlnIZimUKA1cR0thMSqzOigpd80ITqY77bnlTtYo0ImHVnFiAygRlg4eLjSoFW5KSBSdQwOyAY+rc44CfR9UmhM/ieAgFPShZHXJwCdtC0gATYl3t6/881vfG/v4Pozz9/+yRuv77UdyYGQoZw3aaYK0Uk5rcoqUDdRfdR8glSAmciszkIRGZDREUDBg5qHkBBEVdEdA5pppRe70wRmogEQOKIBhorR+FTpkRjY0YnIVYEdiFcXQ5e6i3UfW3LH4+OTlNJif2+97tUKRDNPAejN115+6flnbhw+8ZWvfPcVefe94zd/6Ytf/JXf+kJgDIFcabMeTx9vgJwTsARjkGpuZMzmRQdK4MKERFC9jdwRCRmJJouY+vErygXgRkoGMInpqmueIyh4bYJ2aMzlnq1WafOr4Mzu664o1znKDquhLSEStr0/gqNZRecDYHJqFNJo2CKA86TSu1w/CEMIQUQAkruLKUGAugOzLTefCAgUrbgFIndSpBE1m5oBMbYhAmtRIEr1xjYVD9I0QNpIzIgEBhYKjLjHLUWxyEuIEksB5SzapDVbKg26uuJ8L9j5GE1mxJLmm3HNZgcHMykEKk0DIJhNIW58iNxqotSk4lnIlqabxSzltROgBQfkELHdlGMvFrlhU3Tq+7WtToO3PZZhM7ogwIoIPHBPzjFLbLilMW3oaM5Nyvki8BJoOVoPENmD2SigoBlH8QYB3Sw4GdAIFvv1yezgCfXVRjplCh6ICyOTeI8ZITddp22cZx573zhB0fn8KM/3KFyLwAslh7QhEYtlmH/2s3/6mbv9H/3bvx8kNt3B6fH45r2HgxTp9p8Abd/+yY2nbl175sl1Wp1rHAr1MJ/P9eiI3zvRZddxkA0EichDjzEAEzgk4jZFJxcXwWxAHDr0zaLZ0zNdNJj9IjM5GikGTNkLmbp7AfUUx5KzCoMjsYA6oqvt8JarYCIimgMiT3kzxEBuoCkyg6E5IqqZuLUx1Ag6yw4YFL1xQAcnhBhLkYiAamjR3Yp5CE3yZHZCGNwdKFBgMwWY2DjFPDFDKTXZI6W2C/jl3/ufLs7eeOL6hxaHz4jPx/U4jPeHzabpbF3KraOjp+7smVmgeCPeIbRn7j5JKXiyR8NZN+vWaJp9c37atq1qTB0PeRVCVkvuHQYTzu6dg1PdSYPWRtvB0TeIDYbGzQ3VXRmDKVYlF+6oNVTxdavJsNtmH+EKGltJ74YOAHW+amasiMxOpjAxstmdiKTXqP3BUfjDf/PlL/7qJ9bSp5SMDJkSouC8GuubA6o0aYFQcbbGWQ3EwQMm8GDYI0SYRsQFABAYPAYYXEZ0jtCYg/noCIpMrogMHhCnkL06QXCtQXDBaWtQU3WZNsYY0N2zrJntvM9ZdKXZFBQP9vc3o4u1Hlu34uViMb92lt945sXFzRuz3vZPz9NvfuSvfuazn3vvwcnQFzOlOtCMgDXsnsENmaJHU83IAIJBU311Q/NKNjIHZCA0rd+TATiZo6MaIpApMg1OADAMhViR2AECsBErah05161bXcTUkRhKKeDBzMwlhFAGVTdHRnfCKCUbAl5B8upCXneoRDSW0cAQp1muGpYgGptg6BzEzN2LY5EVu4BHHWQ2I2AG4OiOjsLOig6klKvNKrqjABMyiHH2U6In21B0UDbQ0ZU7DRoCSA9uozGkMcgsXCO/UCwpBABD5DKCuqYuiIDDIKlzoS7RACN7isUoSJstdUxOsYMQUmwjaJkxE0NAQioUKMSYvUQiyrMmoYMrA+JsFIltSZhUck3jncVWcgkbRnEE4LpeKRJ3HfMeLEJsU9BGBFSc2Aw8xC7SrGSnJlKwLA6uIaCjQwQTAyuOqsEMRSFn2yAyElupLMkE2FCAcRwLUwAECI7kOk2gtJh7E5QhM7VtvsgtyeAbTssmzUaX2WJ/U3omi3CNPXzsZ2/e8G++9s3XTh+s3n0XRE+RrWsPht6PQr59YzE8BslxwQM3luxixIuL4L10XfdExxuOGhGimptTiHVk7gYYeOpuMCC0ltGARjsd5Ww0nLVLj5FjcAQFVIcKPPqkQfXKsdt16HXPaO9rQ67sLus0l97XyQCAVx8CoGrAWwFeIqqeSI6gXkNhEczVoGkwq/RDXswac3A3dyUKbiE1XGRdsjZNqwWYImOlDbgGV08Hs73Tx2//3lf/1f5scfPJz8UWR1m//dNHR4d32jY99cLd2WzWNA2Aj2Ovqpi46+aMdPx4ffv2HTNNKbn78claihLR2A+hRdlIDDMn9qJuYk6EqUZ7uznWrTw4M5kZWevAmgUpuCNzo6LMQb2QXzLhcOrdK6ROu+YRrmzStyvnlc7QAWLdnEPwajHlhmDgoSGkdHrc//jl7/+t/+o/PTs7a2NyK+aKhMToun1ahKqT9epzBQQY0HwyY4KuOlIBgDtPJnqU3Rxq0QdFgGpIgegKhlAAtXa+DuA1qRLj9vMwgKHzjh/LnEJAcej7zVAyqDEzOBd1sVIUwBydHQFjUPDHx+cxzI7PNmL8a3/uN+Z717/5J9959Oj06OjaU0/dGcaenEIIDoQGIhmJ3nrzrWefezbEqEVSaMYxT9qrHZWFJjcMrN4OW2eZLQXejdjqPANYsjISGEQMttM7bW+EyiLbCg8n67HdwGkq30jqWodLgA4OZpN2ZNfp76ho9X9U011TLyJtSogOWAgZjMEjECMacPXUIhFhZgCToo1REAylwmbowR1QCHKl7SCJ6TAMmDgErO4jyDGEEJKJbMbRFWHQDbk0IWmpg2UsFocyrvLIzE0bN6NoVspotdfgEFATY28BzZ1YkcZBxpLdkWODvbUUGybJLgZF85DzPBFoSE07jjk25JQHQwE3JKtbSrYATTBHL2Ylo1oWFNFxHEGk6QDASjaiwA2XUqJqiBhDihSHzQoHW+41Mc3LuGFOsfQuWIDQPRh5dskaGyIK7qNp3R61bdue9cdNmoMzUSQid1AgJSpEjCGl5NAhs8AmhtALHl67Tc0Si0MdmSHDkD/6kTv7afX2vW+cPPIFHn38bpfasrc/JxDi0X2x3tj1T70gMFurhAEpljina+OjUW5Fn4U2JMLGvYc8E9JEIoZubWrmXdem6F5Eh7wuJg4xiSekDtX3Y0xr8dqN19JrvJWS465e70q5uRkabKHhXSW6ekVe4u9b0gQgitsHOJSI6KjFhMCNEIB2Rd/JDHTodX/JChkJ3VwdmIMUMO1SiLkfmxAYQyklMg6lD01adOF73/n9N1598+mnPvr626+vw2ln4fq1Jz5284W7d++qngL4epXHflTwpompiaWUzWYVEwdiWY+pCeYSY2MKs+WeA4zjOK7PA7WEQU2kWAyhFAkU1AXBiambtWa23mwqWtWmfdWiIGqladoi5khATl7jqC6PWC30Xskn9Y7fHvgJu/kAMuvgAIoA5uzANcPEoVbYnPOdm3f+4N9+8+7Tt+7evfvqq680gZgq7BN2HKdacwwUt6olRAZAR6+uCIBc6b7by8C8EowBKrIEKAAAHhEAXJEqZxMBKooEgOSOl+5pDoiE5pUEiaE1YgdUAweyOsoEBSUxy+bFBIEJTN0hpH7sn/7QCwhwenryhV99abMe9GJz/97xL/3KF175yet/8ic/vnHj9unx46z9k0/e2vTnT9+9s5hfe/nlt9+71//0p6/effqW27BcdIv5Nbhkr9Yd0sRJnwwVp2kIgjsAKZKDGgoajJs+MpPTxNndLox1NyaqVTGEiAjMgDKZGNJucOqoPuk/pm1ZlUrAhM5tT3FN7LPibvU+MzMwYAwGaCATP9VABQ1IUQGgbVPkQETibmYiDhZt6/WNleMKyO5kHprQXxSE2DRNZitFTL2iQ2Lq7lmKKoaAEUOXmEOz2WwYkYjVpeWIiE7YQcEQjMJmGNGQACIyqgGTmTEiRWQCAGgwBuRiMpu3JtYXTSkhGKofzK9JLk3EcTMkTj6moWRK4KRhIHcv6IgUgrNoNVWBGKMBmAUpwOCgLFpy1hAIGKSom/pozSJ1M2hTcBfXzThkCo7KbCT1XCCSIqqJZiqT3yxaAA/mMbb7OBwPCKCYiCKDFQNVBo0GaL3yTAyiFXIZRz48evLGtWvrrPNuHsmzcsn6xJ7g8Orp41UrNF+tGo/JJD9e5ZMH3XI+2IbWVqw5Y3F/L/BR0+qghSOXZXoSr71LHDgt4lCCu7lFJAIzd/Wcs4n2hACiVgowMaplNW66pRRrmogIKllECN2RAZQcEIAIuY6TKdCkOmRXICLzy8bkaqe5q0QTIO4oboa1Sd9Svswr9Qq3duFITsSAFWcBZkYGNRsHJQwmaw7RXQEYsS8Fu7RE9LaNRXqHCDEMtl4uDrWcfO+b/56kPVje/fLXvnv76Rc+9emfuX54NI79rIt/+JV//slPfvrBg/M7t58BWM3bxXvvPc6jNk1zdHRIrKKDOctoiLjq1wDhT77zY2R64YUXmGdmZjbGOCN0wNQ0msu6CY2ZqZavff2bh4fXX3zxxX4YY4wP33s0m8cYITmUMphBTLFoH6nbsQR36xxux9iwKzzTkdzu9yvGgbvaD2BOO8FhZSIhmpuBzBZ7X/qX/+43/uIX+9WqCSkQg8u0368QxGTUDuA6JZ9snxWdEAgJ3Qs4uQdEAMzVmg0s7bwfr87SwZCwqTNVd6/Qh0+22rBNkquNPNYFXNT6zTiQMyMzBiKf5hOoZlM7wTVLDMgAqTJiw+GNmxfr3lRSQ/P54gff/8mbb7x969atw8ODl1/+0fPPf+j119+6c+dOSu0ff+0bbu3jk3MVJOKfvPLan/uNX704HWHLcJ80wDjldNAVlou7V6mJuyNhIHD3fj3EUK/YCuVPzIoKePiWA4OIqNPzmNmUWHCF2Dqp3C9vk92eGHYt/PbdQahN/tbhwxDMoDoyubuA1EH9KON8PjfTgKG4A0BgRmRHNZwECehGCOzAaimlR/35ONZsKRQBVQ0hqI9mZmgxpsCt4WgmQGHQkSKZO3iJTSDVUlTVBzf3gsiUWimi6uDWdR1kcTMKIURyFXRvOBBy23HkeDGubXslLxezzWodGkWe6QYRUDUTAEpQCUOOrk4eNENoPYizIhQQcxUDM0O12GFgcMD5rCUMRXMk7tpGm3ZAd7W2TW1orGzMBpXR2Y1NgxRVRxAcC0GBkbQ6Sqo7u7sBclrGphmHAkgMofY7RuxghY2Sg5NCRgORBGbPv3gL2dAQmQzQy7igcnuPQz7XDDo2gbAA85xh0bqxt8s86MGTt60/JhDyFrqZhLNyQiYp92PXncxpmQKRawAAisaGXmASboCY69SrMCKThHkXoRmHs3uPHjzuPnowBmN3N0V0Qg+EIjA5Qhlddtm2Gw35++7tHU9795f1+8tbBSMnUKkWifUrY80XgkoMcDWabjQEE8MgIuv1JsbDyckALISghbomlLLi4KbKIRZVBDo8PHznjR9+52vfvXX4witvvbvK43/yn/+1u8/fPn3w3sXZGlTbprn/7moxP3fy85/+OHgYh3fvP3h87eDGer1+/gV85503jq4fjKtRpIx5/elPf3qzGR4drxeLxde//r2PvvTSm2++LiL7e0cnJ6cxhibRjScOT8/OX3jhha985Zs//wuf//a3vvu9773SdbPFYvHy66997nOfknFTZXAxxjHn1EaXy0N3ZRXE9x/OXel/f88Ol+AM1w6RSSstt5pVCczny7fffvfBgwef/8WfPz4+jrGpve/OyIEoTFQmcEQmR/BpAQHYlgGoFJr6E6tNPXgAJEBGFHdHj+4OKDg5cbk7UBUO12dwAmAEDRN4gZXW7USO1QO9oslgjlL/aeIkaA5A6OgMVreD5oYZIJkWn+Q4lMf+4x/7yLsP3v3TX/x5FVctn/m5j+/v75+dvXf3qRsqcHT92vXr18/Ozp5//sXj48fPPf+hcTAHBUQEnoYCAARWaUBwZbndHXMCIwAjdYUhj8jglTdaZU3uhKQEYCgmrkYUAChrEb8ckFbl13QSycmpmmQS8ftZCQ5b1zDf2v9Wx8O6BKgqBwIn8AgeAAbzkXjit4VIIqIatncrWrGpkXIANEc3QEVTBofStm2TupWvHZwcRKwCD4hT6p4KSTaDcRwMjCtJzIiGPIo5EK/7MRSICUJEMQXA2ES1fH6+ImuHIcfosy4WKaAa2dCKc9CxX6aurNfBiBq82JwYm3k8v7iIabnOBuaeEUXJQ196zBwG1DUFUwBAJZCaBkhYD8aKIM46KzT2Bu5NtwgxFOe8GkKK6Dz2w4VZm3CxN8u2Rg+AAcFpMlNGMwNRUDQFM3A1B1Ukh6bplqf9SQwJQzIsbhIhqimOUgyCJ4PhYtAypufvdNeW41Bgr9lz8KLC3n/41uxa48OmmV1vHl/0J8oKLYNcv304XEgGHF1/7/d/9zd/839xsbkXC9EM1iXxAlWXXk7fGt8+gifnwS1gp7gJ5CqVr4FgSAFqn+ZoCk3C2aJdl/Xqom8ay6KRncUCx9pcqIu7AlaXUQN3nAo1mpmjq6qH97tqvF+IsSv3AdBt6tNDpNxPo9dJZ1RzK7TUTbFKBgQMXLEJUGCis7OLEJ4w8AABQF2NPKkWYHUk8vmwKQfX9kK0P/rDf/3uT0+ffvKlH/zk1Y9+6lO/+MXPnJ89ePjOu4kjuYaGVmenP/vJjw65fOu7P/riF3/lwbvvtG37+c999lvf+c7Pf+7nvvnNbwLQo4dnt5+4iWRme6vVikMahuGXf/mXHzy8/9VvfOPFF18M3Lz8ymsvfeRDq/Vjdf/J62/PUnznnXtFDZyWy/2Li4uf/OSVw2vXu/15yYbehMh5LOiUYgMKgFapjx9YGi+Z7FdAGHw/C+Py5wCA4DDd+QYOTqhmojePbv+D3/6nH/vE89cPjx49PCZsJjNaJKzzkpr+rAoAhAG8chmriwZtKwwYEKAhyuSSDbECwm4I025hK5IFA2cGqRPxGic3vaDbZBIB5OBok7gft+qzqoWtVosIAMBQ3aEdp04fDCtJtr5BdHRzR2Y20WFc3X3q5no1AMaU2qbp8th//OMfFR2l4J1bt4c8Hh4euNv1oyNELKVMO0qoH6ASkC7PwpWDTfVAMAI4EOGYZRwEGM2FCLxmKQIDkZtearaubGEBwFwCJwLc2qNOPb9Oh2H3l5eNUX0EJPHJG4Coktp9q3SleqYc1L1AVccRqCpvGcOiqqroZFvL1WoKUrXMBKaQ3QMBEwYzYaZAMSCZWV10pZgWSCGkGBNG7fvYJDNDcOSYNQfkxWwRcurzGtE8S4wRLDdIRHFwghDJ0Ys1SKlrUkpjP3CkNF8EZw5IkVabc47cpWgjszuZRmTLiGOgjC7eXOzljZ6fw7CGAExOaHlq2H30slmXs/Prj1RkY+u+wwZqkFtM8+XBtettaJO3QViqKM/GYlSCB4VIhGzqVdajBMU9mThUS3czUbdicT6/xg9PAFDE0NDdFVxcLBCYytALweiw1/EztzuVdYr7VPsXufjIk4vow4NHxs18vVodP9YQ2v60xNj4gtHWD09Pr998YvXw7f/Xf/e7/8Xf+Ssn734fBi9AbLmwLEIHcmawfiLiyjGTBoUBgRBCzVV0U1UK1KSQAhvCenN8uj47OfPrN5Yc03zRlDEThdrVqFe5ggugXF5sQESojORYVV1u/oFuczvx3zWku1IOAMRcrNT1o6ioGxJ51bTs+NpUjQSwulRGpkePHgE9ZwaIkdHUJAYzF2JScYDy5J0b995+/V/+i38S6KBN1+4/Xv/lv/of7x/hvXuvNmGZODpuOAQzjmFx7QC6RXruQ0+99+Dk7p0nulnjPn74hduB+l/4+U+cnZ533bx6YImOgVhVfvmXPo9Yzk4fPn3r1u2jo3/5u//q85///LX9xbK1rpt/++EPn3n++etP3Fgu59/59jdu3rz5oWc/cvvW9RAChWrsi1I8hCBS6sFhckSe/MRqY+7gbsC03adOBWJXcj5wkGsRmv7AHMEDslcVfEwxdl/74z/5G7/1lx8+fNg0jakHZvNa1sls0ikgbbcF0zeVdTPxXLaUfLhk5Vcu/8RxpC2rHcAJ0ADVsTof1xViIv8DQnXL4Yn2U0E5BERGt2pzj4RUj8NEpzEEQjJwMKvvzhAI0ziOKTVFxB0ciAMh4ubCEAIHVO1VNYSk6q6RWEopaEFz9W4Pbsp01UF9e6wd3CZ7nWo3UjdSVfcLCFWwdrEZ133pFsFNCbyYgQO6gbOaArF73ZayE5p7DYk1s4DbBtysYmK+7dbNrEocYDuvmmwDKpdWp/6dCM3V6g3i7lCc0KvllyIAglkppfIYIgfwqGZEXO9bBzSoWQHbLk2JA/Tr1ekp5H6UVC0/yEFdzd0jB4Gs2WecyNXVETlnYUYCRs3RuVECteNViR03s84oE1FiMlUpEElTjDlnLNrMUhMJSWLyWYfjZp0xQuSh5BDbNiYbhXVzEPZWZxIkSJ+Dsm7a80dy/CBJsdyXQDFU5XoARGMydafghByCgeTCoKil5h+4ZQYFsxiDYGGXGDo3pH7sUrsJJ5mKkSqIgQlLIRUYWNutWQ+5i6qqYLe/DyEWw6LeEMcYUGPAZn/WhWv9eHFhtJhzd/smiq0DXOs4FgTM5UM3uiWu7z9cPx4ic37qaG9V7ifXdt7EUMwweLffJLvAv/iXf+H//H/5Z6+/9nMv3L3x6OwE0XhcIGUEu3lwIyhoq2WjIxgEAmVAUjdUR4TIFAMGFDLr3zvZHI83FnFP7ECVHg06BJuUqAhMCARqFadzZChTytcHOostbLmty//B3K/+ZpJfq2GoKJYjgjgUMGc0BK6eqI6BCRHUiiEGbhgDABwfHzOzAVXpCyMDINFMizctHex3X/mD3/uDL331xQ99bIT5zaef+MVf+tR6dbp6DB01iKYQEZfFhNBE8nK2l4ccAz57+wl3FxFE3p/fGsfRUe/cPBSRosRM7pWyFlRtuNgc7R0u717vh/Nf+TOf2987HMdx3i3c/cMfeW6xnJ0cP2qb9Jmf+6RqKbm/desAwDQXYrQKtrrEhGJqWk0WFYFrr0dYvUzQQbfBJh8s5h84sPV7hWqxgASIQMUdA3LTvvHaW+vzzWd+7uNvv/1uCCnGRkofU8hFkJEQXbW2jHVpmWr2ZDOwZWcigis4IUZ3nWanyAgRcARIAIBU3B0wIqBDQUR38slnHByswjNTzQRz8GoLCVMKUfW+d2Rwd6qWB2ikbk7AwV3RnAkdSQBEKaZOJBNRjadQdzcJ6MAkJSNTTEEkB2Ynt2rQxFJ9SRCQiOt+xbfzTNjW8QqsXz3000I4qTopRT47GTd9Xh5EV3XzgESIZqYmqkZITsa1uUQQEa11F6dx6LRVJdqCY9P8vE5cYTtERcTa7UjRamVQ884Mp3ErTdoxAwwACB7rDAMR521nJjFxkWn3sNkMB5aSMRu4m4BXHA/I52Dz+Xw2W7j4CHnd583FRiQys6sDUB6l78c0juTKbolnRcp61NHWakbA6I4GyoOhAYwaPKs5sRQF4JIDmOReuq5Bble5Vy2pjafnxYsHd5ci6mneXVxkdYuyZwPaptNNHE7bB28Pw0oIFmf9+YJxX+3Zg1kQQsPGMWtQdzDh0l1b+cHYXQ/N88OwiakFwq7rVDIcLPc6NR8JLauNmhmJQrceT7RYw23WTSZVVVIEZTUarUTCQczA0DlZ42oj7s3aG4/ON0t1MF3EGJs0W3R9efhenJ3Qam/RkIxMNOqsnUeBDcD87uGmi8Mb51HStZtLhjJerG22twRjhrCYxfle+/D0+Fxz112/Mf/wp//UN/6H//53/nf/9X+JeEq0/yhfzKxb6ZrHOfN6PsMmJuY4yhiodSGOVCc9yGpNC4Rysd5b3FjeJEGRUY5uzeloHVsX8RgSKkZxMSdFUwwegrgBuxpgMMqGnLAtIAWMeNIcXe0v61cRiTEiOTCbiyEUsDbwKXh1DWzIbdS2mQU8HUOXNxfXmn2qK6bENrQiK2vC3jKePzo2mYekDKMIcJq5qqFBysu9xf/42//we9/64d1nPrzx7lf+o888efvW2emJiLZdKqWAaiBXj8QT1Sy7YAzFPQ+jE9Wwu5KFgA1g3RcgDDBKmQVAoKxVaOq+3D9UG1I3j7zMZYiBEJqsuthvRCWkIKal19ohDaXSSALpZOqFwGbA6BTBdAIGDJEAFKZxF1Q0d4vnqhkjuatXFAR3+nVzAEePPhPsBYUsugoHGHO+defm/+Mf/KNP/9yLKS4reVFtg4xSfR0q3EEM5ggYicAIUK1OYgAJzd0U6htJPpm7OcD06kjKhu55W6ABQc3ALVrMQE6ml9MDqhz5bRITgaEhsDsxcQXSq4N9pb8CAlFwBWYz7wHZA6kbIpIAJh/HdYydG5mPMTb9xrq2FRwAHENwdzUn5C12oXWDAMoOwcABzGnSvkGNI3c1NTckiIhsbgamVZoNTgABSby3Piz25i8/PlYbGuCirkyGWRXIsUGyAAOBus8RCCEhmziYY+BByl7b5OICqOBcHYDUmAmRRGQ3V4fqPemeOBGTcB9CdK3PE1xDbbnBCKwDFxONqBEIfHBAIsiaKTSiaqBmRKCRcRVGIE2B0T0JkHlgSOqhDf0o1vcasqmIIhVOFNaWDwIJDoY4b9q9uXsG7E1jf33v8PHJBXMqMKAKmTuEtp1DEkqgaI4aELgJRtF7oNzv7ZGyFRgp+HK+KFkFx739fVVd9zmUKCJaFMb5cKG2Ij0Lx+/Ret3kMUUrB7F/ac6dQyyczzchiaC71JE0kANKAR1MbOCczYQccs6y2SAolh5mQWFUgBTbNjWgSg4z5wsUd0NkhGCmZgCm7sUsTDCnu6qLgCiAczcHW62GvG7K8taNbp50tTlfr52wI1QHwCaZxjZZxFE3zbNhdZhnzRg3b90v/bGGtLw262V9QO0oao7Ktn6AS7duyP0b9wvHj4XmXbn/4KdvL47fYgu3b1wrthFgpxXyiILBYzJUiCjSJ+2QyMGtLJu4H5wD+PUmOIsLu1vi63NqUCMVs4LYqUsVdxgaEdWITLfJYKRt27EQABQprIFjNfySKx1Qxdwv23Z3p7pHFQ0xMHFlvO3CV4nIZUXBKLDk0jiFGMZxJA5UtEE+fvhoHLIDCTpGFjSjQuiHs/b//X//b47vr5+8/uRskf7SX/tNGy9OTo+1SNs2ogKIMUQTiQQwacGVCBHQXMyNkSsGgQBISK5ACOAFIkQdXCLEUnLbsIuqbJKX9ZDny30tdnZ2srh+Exk8bxgTAG0j1ujyaKBhlcRsH3W+FqECz85bN12k6pdoNd6HwcGsTjARDaCyWWxHja+Hl0DRPHIwhxST5AEd2cK3v/a1/+1//b85eXxcSSg4YS6mSAAEaA5OE7TugLYFsStCQeiT8Rza1NozbOFdc3Czij9MLSk70JS1DgQVLacd+btah9fLggAQQSuWbQa0TV4knKq/V+kNZUBmTqUUd2dK7uZhsGHWpkORAcAZO8klJQPqoVRT7kvvnekQAU+0HZ9se6v7kFbzdrTavVchMAAVH+qEFKlKDxzQjdxhoeDO6dHxMTJAMikyijHN3L0Gx0guEAmIC0JHlLNUhappIcQmpWEY3N0NnbYZeFZHxRNh7Orm7AOe71uq6nRdkTmZkjomBIcilsGUOWdpmoamTNCC3KgbQqAs0IjVPRC5oRmgOeioyWKi0DLmwWJH7QFgpw0HMDCFRKnlaOXCjNq2EZeT4wuAWPoSu47RU0in5xcNOocQmERVFEAUkI1oiKWJ1KSkhmKeEGGUCAFo/6JYUvSR+8I0QlrPyrrrH8Lp47w+A1FbhOEm5hsBD0Pb2wapxYQIEpQMI7qAqLJhpEgQVAA6DQFRDLDs7bVtDAg2m3UbuQgh4OgAHtoYYgzJN9lJKveMaMrKkckZ1Xxr8wYqloukDKYxNO1Ff5w0xhGaWff4+J0+e+bZOPbzWYohZPOQvA3RV5trXbtYwXK+5+cPu7fe0vvvwfy63747nzHTQ3ONXVsUVg9Xy9m8cz15722X2YeOnnj6+vrv/rd/93/16z8f9MyCMXeyaImdUigqguImyBGDdSYMjiCHy+7J/eU+upVxNM+8Jgud06aUmfu4cfQ2OilDtWtVcEFXV1VXhaoUB/FqDw0AAZiITWHn5451RHjlGjUzrPI7RDMQkbaZ0nsjcfFSTWCYOSK7Q7EIgOaDW7FA0HApm8Dx9OL05GITm8azBCZXTwRmuulzmHUQ9SOf+Njnv/iZ1fo9BHaRFIK7l6LMLCIBUFyxygjBHQpOW1pXLNNwqmIUu0GfM5QcEw+aU4xoqBAkuNB8uWxOHz38d1/+d1/81T8DlfFA0aHA1AlOw8RpWEruW37Irry7OxKD22WWJrlXABtCTUNHpMkVGIAgbPnUW9ro5ECAxUdAUHNALJox0l639/3v/QjJPvrxl9566w0KnFVDCHUMThM5HRAJ0AGtzuF4kip53XdViJwMFAsSg4PARNBzr7EkVXdF7pMPWsXWQaF6PwKAw6TIr0v8dFVY5ZJvR/G0LehWczx2B89N1B1rMoaKqyFxx9H6YQhJiUVyZAohwjAMTMHpCphFCFPakQNidfNC8q3bPjHqtMX0q+cBKve3EkXMQQGwGj04O5eW47033iIoqBzIHJigUCQA0FIigo3CAiE2BJyrtYa5uSE6M176DDvtECHfOgfAZOm8++elONnAibb8d3d30ACIlVQ2hb6yUzAAg8jsKqLm7m42jiXktLRIVQbBAVDrOKVC6Ck1MViXcMhpY1mtpHbmch6QmFlKkb6UVRZCTCxiuRiIE4dhPcy7ZrW+GHI+LYUYUMElQLFASOS5lBgCIORiRdHMmqZbbYZixmBoeSyBZc7roj0/Pg7vvrbK/djE/SZ1B8NwexxvIDCF04thPk9WgN1jimFEAXRHC8xEKOa95w3KIiARZTfP2jRdMTdTGrM1s1FUi2nWMp47QrtH3M3cj7fiHausBIQAPvFV63TEIJiGXDAXTrTHfHbRnzbLo4ePz8fRMLWPLs7EbZ5AR+VmxkxexhuL5uYR7919rsz55HS05c8c8ie9aS5ymc0WbaKEWqwEJ77QMyFHaz7yQt7ojVvd7bT67uO37n/8k8/eXUB/rllIUAmQZBxW5syBAV2KdIiZYUZ+mGg8P31rXI+oPXIz6Ohs5BstT7XrwaCbJfO1exKZZqQMqFDN+mDQAgAiknMWgYZiCOwVlN8+rnBmdk5Y7/ttnVJUTlhDsTq+ujszxxj7Yv2QD/ea524vPV+EFDW4lBSofbTMPpzfuLEcV2cNhSl0u5kVKn/17/yt9Zkczvf6/iQFQABuOjMr6imlGKPlEjkgaqREVO9uR0TkiIhh2xjVnD/bvm1GAE1K2ib2Qo5xb9ZokdXwaPNo/OPf//Lnf/FX7tx+5vzxezGaum6Dn3RXTmoQxDRUnNiEU+NVx5G7uxe21QTQqC4PiECIZldG05X6v9sT1FIKpjnEzhQRUXQE18P95d//8v/467/2RdOBA4TAokAISIju4ZK96o51VTY3NARwr2ZkCFZ584Qgk3VKpawAT/FwuFVg8dVZCwBMgsuJmuOwa+NpujwMlaoEFFjVOJIZivvkgA9eSfHFOBBLccKAnAk2HKNqFNWQEBE1z5gYaNiMq8R76gK+tWSc1tD6hthBq9++uddlyeoIFMANHKemuJLZQdmngSO518WeDJygqA3gfPro4triUGV0g8Rz13MCETc1bNJsvFijg4epVTAztcKIAQnUdjaQcIVUdqWFv5yR485QqE4ldAo6rmUHnIQQEYK7uTOHaejiQBiIIARChxhjFgdHKQ5K4K5qqCDkBAiGkQO2PNhGbM7MpgUKBEtpS9lCxDLmhveuLZcrFQcMLaMixyiA82ZGqABw0LaLFDBaKUWVAqcUCZlWmx4QYgpV2UambUL3SM6QxXNXbF5ygFM+f5fOHodZQWiXS9C7snmGkQBOoeSALeCgSKUIIg8xREct6kUCEgKpKyJ0i4a4MYTYzNx9nQ0AUmry6E1ZS9G2bVMCQgWPwQAzmjIiOqiDOGQzMYgAwV2rBkGdENgAi4IotrzfxjfELs4fp4u43N+f3X/82KCJAUouqUm5ZMh48yjeutYy7GVaeDkvMpwO4zhbei9ZRFzW5+ujg64JWoriKATdYLjKQ8ry+Hj+7NMf2W9/8vu/8+/+9t/4jdXZSR+XIBcOXdOZWAFo0BHJCFicNjZcO2q9sfsnp6RIEADbja/ZiAftIJ0/OGFvZ7GNhaQLNeMxYChg6EqAjEykZlbEmCPAWOu2qsZ4JTDerxQOh6vX8SWMQBRjLHkaH1WtMxEbinhRy/PUfPqlZ3k4MZOKWQTu3jsYqD+7ffTkOY+Rm+IWrQNszDMJHO3N8tA3qfG0HG0wM3XixGKGiE3TMFIHYRtN54CTdbABTtmtV6iGCAQAbIJtl4N61r3ZAtr18YMfh3f67tHbb63x5z/zhY+/9LHV5tFTT+w1XVpfrByiWd1EX6ZATKvGleVtV5p31HXfMiC3h8undv1KIg8hOjT1abd/u107vUFoAINZQQrjOO43kfP6L//GX0omt/f3zDGEkMvAzGBaW/56smpweWX0qhuAAQdmJIJAxEgMiF4J7DV8o5YYd9cCNXgSwLdqhmq/ZepI4GgTqd3Iya/Y0ZvZ5JsCqKpcs1m3zG4zq032xjJCdneuE1ML7mRMDhJCGkfgmQH27tjxEsnc2HECxAwnJNDdxRAACdBMAJCgzhSxEJjZdCYmPZGZTalJvvsPadIvslkTLgZ/9/5J4JCtB8JS+sCt6uCIGJrRIINZ9JFkhpXxDbVwV6+h+gGvDlfrGfetbzAiEm0HqhPshljtD6i+sek5WT0gsE1QXUG3QE41+TKGSO6m6gBIgcGDkjqaWHELIODogdiF+j4CYcmUrTBjasCMKAoxjyqGYI4xxiamjSoCacX3zBR0OZuXnGfz6MQOyCE4mDpY1UKwSYCsHsXZPcaYmK0faXQ2xvW8mM/7xVs/2rz5xvra/LYWOM+nd8RfAn4K3FhOWTuILVLGDWlD7GME34wBJ09wElMWS0KQIWRY4mmo9qwIdW7f0QQ4ZKLYJW0HJ43UsquUgWhrf1RNQtHc0IHd5fL2QKotjIFLg5loM5T9jh6cPzxdC4cOVNhQsTGPwfun9pon5o2VLtOypDwHw6Iz7mzDjLjXxFIG85l4dB3U3ZlkFM3ada00vrlYPfnUnZ/91FPf/uPHZ+cATTOuT9iBeRYpCLNpUAdwQQ4B017Jz6Z2/fj+uz+91/F+7ss5phEtGaexGNIzHw6F4yYVHTfXwl7tGkwIwcBqVAAQESCP4yalhJinrBm6TJDY9uy7vukK1d2QAu5qSkphHIqZEYO4FVOKgWtPyHy2yS+/9QDzuYhB9IaBqXl8OtKP3/jIZz5xct4HRic3GNrIWgpCMTOKnkdRS7EWIHOeQsUdmA2smEIhBiQGosnQzCY7lffz4ryqB7Ex0ZZimh0/evPVb//ufhr0QdkPe8cXs7t/+oXHvgq0fuON+y//5N4zzz938+i6mZkLTG3vjope/MoiB1uUeqsS2vVruyjaWtDRCavD2m5pxLoP3/XIFdlHMy2ICuzjOFy7dvSt730fyK/feuLevfcoBAAyBTGrChrB4oa1LzETn4LygrszVmkw2dZs3mvYYzW2BfAp2sLcnYlh26dX4mqFYjDEqRgSVL1nlThvlzetB6cuBLzbCNTdElPY8nSauBTJgVyLIKRIs1xWwJtILWHUeQtoogQw00zIAxFXcKUi+9NRMq/2vDWIFAC2TCQUUKthCY5VxG9Wq6HV6r+txS7uap5BZotZ3vTR1kcHHTWWGtZc1Bhd226xys2b7xy7SYghhgCE4zgaAgbOfW7btAtc3WrxrlJoJmNU3z5g29q7T1mVdaXdLf/ZLFafHwAw0yLgSlu1lNUPIQphUvwiMIX6eqCO1VPLDNggsRIF5qhlzAbiScAKSH2rDj6UvM5Dzjmg9RlZSMZMXVytVjE4xwAAOeca02aO4IpCrlakmHZ5KMwOLSHR5ny0MYq4DKLn/Pi1R/feKjeeuK2bdbMablHzM7bGNp4jshGNDAAh+j4vh5BdaR44pjFkU0auPF53cKGxHy9WG9QZkYpkAAghOfrYj+oSwQWaBN5gopbGERmUGAEqja0CoOygDlpr/fY+BFMwcDc1k8JDMwunj/teT/rBZs0yKXPI/dg2XZDN5rkn2jsH6fS4D+1ht09pRBtXeXPOM+43GQuCNjHGWYuBCgcUl1x689g05LpRCA3raj1/4aVPfesr/+LN11+5+zO38KwY+Zh73wRVhIAphIKKpqMMi8hdQ2+dnr53Pi5YW9HEbm5j65tl02/sbpw3peksRd7LUupJcpPaySADmIupmUmxKg4QEAeF6VbxXdmq39cd5e5XisDO9V4RySkl942q4jZXI3AKloKPbjwof+PH7wRU4mhsKrmhZsz2sLz1qys86SVRdtY2hPOzx00TiT2PpZulIhIpFPOEaCoAEJggkBTNqhwDAIpX8ZBiTckE7Pyy8k7+HUDuWAJFGW2MJxfvvvXv/8UN9ncfFIrt6Wpz4dB0/qPvfe/s4YOIsxtP3m2Wi+PNGgC2tua2K+40tee7XkyxOgTY5d+AUx1FXi4JeJnDNz0Dyq4IbpdMAAAHIWzcPSQ6X4+Lo9k//J3/+e7dm483+eFq0zQNAVVBuWiBLZiOXqFzm9YVBwxsagQOIIbgAOhEAIbb9/kBeqsZVfipnmiAOk+eRshTOIlVUanhtBTVjw+XOxgs2+S5iuNN2wVEtROmFiAQOOHK/Z7jw5Lf25xrkdXR9Wfm6YWSZ2ZDCEFKpJDdatorKWyNJ2EytwBE8lBfov6uBa6QB2whsmk1U3NCrTZiCmZm7gqeyya2i0frzS98/O7NG2mQ85hCClAwYlkv5kff+cnZ/XsnRtEVosdiOkqNF0YAiLERN9mtmVeWtCvr/SXIvt3VKflu5WPES4/8OkEyIHAEVDAh0+iBiFIMzCgGMTbVlwYAVBUM3dCRAczQnUTMkH2+1xTtaQGsXT6/2KyHwPuIG2J0cQBzJmWMbWODuFDAlGYtz9m4Z7K2bRyI22A6knvrBE5Y9xCRnTQ6NW0U4FzEMQXoSBf7A91/K54+7G8+fYib830KB4nvWD9rZ4OKilPA2AR3LVJGMHBvB58X3oCFhoOMGxsLQ0AOJcZhNsvN3kW7ZyaIHmPszZijtnG9Xi/mSTOvB2tOx6ODZpYaQhstmxdEc3dTNGczEDMF3V4kYGZ1ro0q4jIb2ijYOOSVtctlEUbAQI23UcrmpVvLm7Nw8nDVQzOjjUlaLkgi7Nky9zCbt8gZSZCwYzUwKeoFY5qBRUegwI21pO9drOTw4JknX7j2xz/4YXNncc1jJkcQAHIjtAwqATkSiMJsngICjdK2BwO37AxqixB66UOBObZ5fQFRuc1wsTHfqw4w4g5gNSove92NMcUA7LXhYuZiyildRV2uth6XNwxAzU52Q1BLKe22pdt2Hg0tRJRh1e09oTIwRS+oG8HYETeJ/d477w2bPgQODBmK6NT3afYYFjX/z1EIWwOsFMxcshu7YQhBVKmOEb2CrwYUiHh0qxl5sFUVIpI7pixKsNy/9r1/8o+fwlUvt/7RV+89/eEnf3Hv2q0je/UP/+7xa4/nh3c/9uufG4jDWsbKf+A698Sp2lGAKb16eurJFbhiZrs6TljNfAEAeNqt+9aP0KHKOgGumBNUQN8VgJg4lDH3OgTu1mv93nd/8jf/xt94fLJCbsGDubpXG4lkvpWOgoFDVS7VkpqduQ7aJkEPTEwZw4pZ7c5onQoYBK2fbbtCONQ5oU7stC1z3BEcqJYlB5osy7cW5DtmkddV1w0rrBwSgKn2R4eLhw/f+ObX//X64sKVjx/MQqOLo7c//JE3P/LhL6xWLRG7G0E2JPBw6apfzRUsV0Ok6fBWE7OJpDS9NPl2JUBGASd0rg75wIg1KD5AO2vmY3++XKS9RZrZvjuijhaZjCLyo0fHo7gTmSlwmPZJFalDDCHkUab3pApbTR9sscoKywBA5RcgeYWpdvcR1bz6erjdO4+sjmiMwYkEXEALIRRrm9C27cnFeUxsZsx1UzLZ1BAwoFAAiI7owmjcrMbT/RCJQgyQuDRkqnWBA2ZW9EHHwImZ0R3JmT1GVAbTPIzKIQpRsTEREjVorqomHigaWXRI2IybkgdvbZFPuX8sP/qJDyLdLMg7J08112+SzaBf7s03FtJgcxezvEbvzSyLt+1eiNdab5G/nz0UNxUhZYrYjxfI18ZiKg54QiERt16ECUQ3rc9njOhxzOvAad3zUMbF3Badp2bp/hCNzDKhQSkupeE5+QDoBqpSbzIYlFpCKg12G7VgNDiPeUiLWSIKgmOw8rG9a08mX/ePOm/304IIm6BxKKQUY9vDOQQFwiamYbUeOTgwuDUNwYWWVSYCZHA4RvRsF+1p+/nnXvjSl7997zu3bn30TosFE/VZCMXAA6CqATceyhG5F7kwWeI6IhpHZZ4xczNnwHly0r5gDGG2ce3sQmUjxhaCFQNr2CSBr8waBHVRMsYAAhDYhwI7+tqkKqUdGANwiSQC1lhv01wWy2Xt2cETqJkXokVIAUvQYsy8GVBZIzGFxhRW5Sw26WJ1fu/d4c4z3Xi2bmg50ojoYMpEiKMbAiR0cBAnysxgbs7VK8TdFRAJCKfEAnMDK+iCYJGaMQs6pBDMLWufUhKxOI+vffurfvoAP7SH0n/mrj/RCpaHyxLKOTz/gp6ufnJ87048+tnRCnncCoJ4qo9uZkq1wZ2w8sqIUHerA9SpgtSMNEYHAHFAVHCjSn6vkkIUF5wADgQAA0N0jCjauGakgL65cXTtD/7t127fvnnzzv6rr70bqtQQKqtRJh3sbuSI1f2FgKCYkrluZTV1IrzFCOrsDQCcHQjZEQSdraaJVPFTHe86gE6dKu6WLkYkA89sbMiKKBYQrcpZ3ZGp+v67k6maay3AyF5O8J//8395/+Jkf39/8xCRbzTX+am7N84v6O23333t1Z+eXcw++bO/kqtLXkQvyQWRNwqF6EAyhiBkjZqLTxIlAEAnRBxtrH2wex201gujOCd3dxUiqkGDrsDMo/fXm/T7f/yd196+/+YJJRbUbpQLQxoF1/n8dJ1TwNVmbOcLDyBrNEVXJ3ID6bp4drYOSFVpWknAO1Rq1w5dDldryCIwM4AaOmAMxTQopUgONFoGsMixCDUpeIZInWtpQzfkDMUR0XxMIYp6UMMebQAhpKa0S8AWvECCGUoeN8azzpUQsZ0DhKiUKYKZcJplDxcXPjc7PV3Pulk7Ey05UJQSxkE8sLq0wINftIYgDMnPZaOArDAHvjjV8zR2Aco5uTVnp3L6Gj5+ywzb1PKylKcbvhkuipiEdpPZNusl2LX92Wg59XLIXTmgdel7W4XF4gfn+XWKwaQPHXlvRSHgTHIbeaHGFj0xqg69acDQxWQyClvoNy0oWU6LbrZMpQwxiUF2R6jGPY4IXCXXiICSg1Ii6F3MgJkNfCjjHjqSa2lSR4jqCg7ZB3z+Jh9if/rmw6aJoUi/0cPbT+N6/ZgfmuaYsCgIA7L3lBfzBeXeIwnwkIfzPsvgiRkRyaJDVpZx2HTzg9Tge++989PZ4vbzB6Z9m5ph6FNcmLOxKQwOlNpGfbh15+bhYYy8cGJEzD56jOQUszy+WEPRxhg8hVSLMmjxyq12EnMNmKRI17Qu5xVw5xDF+t2w6CqeqLpVwOwq/RXlKiIy887n2opUeV7t5lS1TY15cXdVAecYmxiDGbzzzjsfevHu4CsD48pD3mLW7gYTm9nADFWr5BN3jEwmB6vkHvfKJa/2HihljE3rauNQYoxMsYhl0EVcPnj7R9cPgxWnbJ/98JPD4w0dzefX8WJcZ4emS5uLx4fXckNJ0AG31O6pkFTlGGzdbnW6gXe1dYo8BUPYmjoB1Pddu3WfiBu1QQeYElN9+kUlGlaFEWvGbrb/pS996Vf+zBfOzzYf2Et9YCO1hWrrM6DDZBmAO5Kl7lbricBBjuBuE5oF5oJThuMlqHHlJaZdGVD1TjRGI6vrRZWewCSJUqu0kh0iMX2+bHFJf/Pv/Cc/+dZrX/nq18sirs7z/GLxtZ9+6xe+8GdOTh7v7z3xh//mx88//zNtO+tXUF0uEYWpcVvk3DctjIPxlqVTb1ufthoAhJWaY37FFdIITKeD4V5tlAxIzceMY05f+/pro/SnF2SqDiMwuZwkTiqkBQQQAplZK57dVZWZ1JS2etRyJZNydy/URwi8PbGXP3R3I3ICZXQmJDf24o7mB94kLwGcScyLcysGyElNQgjuHkIapa+0HQ4BnNVVytjMAhiVNcy7RD7UqRMzM4HkAR0QjJlFLIRU+mymjDBPraRShrHbbw4O5pvNJjQxUBpl3GtaVW3gUEVCEyB6NyoCQYz9xaAc42Y2OqHQ7Hz28I18fmKzvaWZfk7wAGyl5uJFeo2USuMt50FsyKMO2XHuNhtgblTC4rUz+GoenmuPQqJw3p8MPna8kNHMSLOKClu1DbA2zMBD0IQqbYubIsghhDSI98crLUPXdL716qxO0/XEIKIDeKBcVN1NHc1RHYsGdYGiRgYIHtAdXFDDYtYePXFwdnLs+/s9BXaQ5cHQhP2jvYjd6uJkvTkvmoESAFtWyWlxgFB5xxTiPEKThGwEAU6sSdyyxEU754M4jKc/vvdOc/taYqnnSRSDo6MhScImsIvIe4/O+j6prYexAEDjIYOp6sGyG2zlwSj5qBvvOykaCC1YsWJObogQ62WXAjHjWIYUmuAQ6DJrkIjctYYiUd2cXrmCfWtul0tRt7Zt1+t1JZaWUgAgpUREAKXv+66LXoAImVgdiMgUYoyvvPLKr//5j5/AI3NBIySftDBe7d8dai9cecmTM7YzIjNbtRh3J0Aimsbp7kRBzUoWpkpHw1LM3NN8sXn8YHzvUXet2zy4CM3iIgssMEEeRwgRMUZGsxTbRVqf9SHEas2EW3gHKhyDl0SR+ooViDHQSoS/sv5NxHG4tBPzy2/tEvvaVecarmmAYiWm7tGDi/feu/9Lv/SFs7Mz/g+MCqbTBDuO+WXa9XTUtg+AuqwjOmglmWxNEiZhLCJapWxewjX1XU0/8d25mN5wqKvcNIyZlEaIMAVAT2EldVqDAFCyJOJHj46f+ujNXzt48uFPv/vaa/D9t+BP/ek/pwYXm3NwmHX7v/3bv/1bf+u/iM1SbUAg8LbvN6EdQyQpxBzdZDpouwM5vWWqatVqaeY2rZtk1SHfqrGBqiOjijZNvP/u2Xv3T2/cjgagHg16L5GsGTBkVQOQUqIjA4YQXAYXRUZ355qJ5F5vil0ZgSldFncr/u5Xu3NNRG6OVKGuyhZVgFCXK/e6CYSA1XIyEgIwppRWfY6xycOYGlIrDlLEQxOkUBut5QAFlSkRmBY29pJBhQlCRCclTIiUUgCAAKSS511nyY1Kgdwuu6KyGXp0xRTJ3VxMoS9qAxgwkhMYos/mOXG3Pi+xNKdv6ZtvKqe0z+NTQ4ZAa80zhyQxQzNYLpQPofXEIcY9kQ0BMaSsB8Ae8LVBmsSfbSiUjW4XauFgRXNoQjIOSIMN7tIRuoFgRLIgYohqGR1KViLfW3QhkAMOo08XuYthdd0EAAgGpBOtVh0VwQmETMW0BGIFczOnlIOmJ4+auBo3A1Lsqhc3BVQvo4yDmxFnAE4NYijZU2pNkLMBGQUWhsFkHLOTU2D2HAxAOCtz5y3K2cXD3LePz9ZPPtGuVudAlPYndyAZpQsYLAYEyYoYmtjEJjESIraMUmzWzdbH98E4KAfpmjRHZMBqLKXMmI1VAFxr+GeKbFtJzm5wevXh7jsTQ9wReK0yDKGoqHrbtmfnZ2iuoCLioqFLHAOVMgxD06WsMuMUAquUivVcv3bw+muvbtYGxByCWoFJRVj7ze0tIVab4gl3ADecZnkT6o24dUY0ADeFEFPlPVEM4kZE4NglPH77niL0wIvl/uZsM5/v4YJWx/dD2kMGKUozb2d7QzYIl+5p23glwhqkirytxFYZzFuV1E6TUqkmuxt7VygRp04drrT/V2dxCA6iwjEVzbeu3/mn/+RLL330ub397uTkJMRLItP2pNTnunyGnZnJ1CSCb8ur18ru7pOj7URp120pMqo+jhV8qxM/xwpQVweU+iKwpfBPS5dXjMisWhN7xR1q3K/hdvPiCG1rDFGGpr8gALpxm9JcmsPV17/8r5z3D5+Yg5UQ6N4D+MpXvvoX/9yff+ct6mYDMcYQxcTdCQNA9TmoMwPf4tX1n7T9lqqFNZgD7ZxN0XeWxegiee/o2te/9vow9AjNmDM5Io6g6hBUUXHiXDFSjNEcc87uBgLuHmMCAJEPjMQvt1YwAZiXNX13ysjRVLkWbQOuZvaAwuIoDKweEiJ5BBlJraAFdxHxKbU4qmYQXy72Blu5BXAkD+Mmi+XrN5aq61LGBS4YYdamtQiRKSgalqwo2lB00Bh5s8lN01n0i/WqmXV5FFWdd52ISCkQnCmVTcmGEkFdkkEoWMZWNzoe86uvnK7O92eLQzx5+9nDa4sZpYJNagPRoFqyHYTuCQTNmdhbphjaaFqINtgLIaPew/zZ2eLQ16GbNRenfQAUMQJVzUMZCjbzhHvz1r0EwIABI8bUetkshTmwORaj2aLp2jiO95xOkRSJrFzK6xHB0MWUAgIBEFc3vMAsVsgDMCB6jCYZS4H9zo6WuDk/NZ6pl0QEzmxjFxKOm5SCKIwjqKlAcdeDo4MUDRFF3NzEBYCYo6Ehgih7JZxaNuqbSMf3T+e37jx6952Xnv3ZkXi0TdFxRoBGLS2RhCIM69WibUqOaAVJXaXEJZATgzMJSoiGQZXEoWgRN3Zj8ErxHACNOY7jyEQBae9geXG2mlG7VhXBmkWXc96Vhp0L0gcAATOLFEYpKaVd2oSqDn1Os5YCQ6CsAgDEVUuiXRfNoJRx1i0fPX509njVzbt+vWpDfF8GlG/jRgnNvfa59bfuXkQAteqnrt5R7k6IKjm1aewLMwM4M439wMQP793fW0ZupZTEjL08wjKjDoGBkyMF9xysQNZIcUJcLhc7myp8rSoTf6ay9MBdCZ0MnSYQxqZ5aZ2bIu4+znaOse1UdpDHdFQDRXdCxK6bf/UPv/zX/5d/8WJ1YmhX5E6X7Emscvv3ddv11CBte3eorrBT8QFmtkqN3M5h0R0r1A5wNTWksk6qaUsti27bGr+jHyBOmLc7IvDWEQDf954MHXmMGxBoGU0RYoawWOJHn00L5lff9cJo2PayufXks9/51stP37n1sZ/57PH9U2QEMKY2y4gkaAkmL7P3EVS82t7W9QYhVLNGRHByjFA5RWpUUVhTMG27/Vd/cj9EI44yZERzJSJ3Ex01hCCmCm4Ru9SUUUQKANSgvbadTa5f5Lz1099VeUSEynvd7m53y+2EbZlRTMZejXwdpqAPcEcKBg7ElVPvhAxIBE3TYMnuzkRqCOD73Q0EK3LuOfRiozi1IJhD5QEAR+aCCgAYtJsn5kIOMaVsZxwCJkoYnTwiz9ouiwPAbLZIsUbRGwIxR250mVgZwTvoAQ1M2tYX793rNxfdIjR3PB80833J15AGyDNrN5ssjd+W4bbP72tZMqXBs/XHbKOVubcCDsSPN/ww+zXh06Gnt967QA8syBgUQ5P20CIxD4KgwUfejKYYpC/jOvdCfe8iNA5Wsg29SvEUWpym/7a1kcPpvnW1QIIuLu5u6js7fBUEGk257ZjYJce9PYouQ9VSExvYaCAWTFyB1a1obru4WDb7e93B/ixFiGwDoYXAzGzkRc0km46uBV1CEoxIQcUpdEzdxXq1On3809fe2oy1yxtl6JvYXKz7TF7cLMRBqWAzUrc2XlvKg+pG8kaGXgVCaGZKsXArxYrKJPeBrQhX0RTqwA3Qmy7Vlo1jsC1aenUveQVq8MuvE+TqpZQQQgpJpCoTMau4IQZGRFU18BgbDggo6NJEbgM2CVT7V195cznfqxf/7hVxW0rqTYtXHlf80i/L+vSeHRm5ZutoEURE88RBRLpZyzTTfr3fRigOANakbn//YNnuLWYplkDaNhGRDNZd16pd4qc1p4qIiHHidCPWnCNEJEBCQAJGRKrp9bA7coQwxZjsHlMB8KlgTi9BRFTnwxRYxBaLxdtvvLlaPf65z/zMxdmK+H1Djvd9c6V8XOkiAQCYKDDvFDSIyESXfI0t4EZEBGw+7UcIgSd9rxHYdin64PM7cnUkZyImCoyMMM3V7YOLDQBwkllIaJyZmkVq2YON3uT9ZX90VAhUfEgpZTlbLm/9s3/6R6/89I+vHV0znbs3ouumCejkoJWn49vxzO407VRru7dKAIyVs4pYgyF9kmylJsjZ5vUfvnzjWhMAIyfmiNQgcBOZGJCnxjxAAA59ETMDrCaO2DTNOJQtGuZbgPdyt1fnrLvPPr2B+oisCN6EjK5oApgZhJAtsBOhI5p4GS1bSB7aenl0XVP1Fu4eY4yhUQ9WADWOG99scrdoZvNoNqBTm9I4StWNE5GCNl0zyogRiwohVQTV0Zgx5xxCaFNi5jKM41BUXA06agC512KELQUeJV+M/abgurz7xsWj9+SGhZ/x/iN59VSgBaGO6wPFiLBIcLfvn4oxznDOgCEqhwvwTaQNwTl6xgDKYwkIvMKxINHv/uu3I95KGEoZRX1YG+SgRY2bkwt5dJ7P13K67i/GfLrJp+syKp5vymoom17OTtdDr0yNjELobqbFwNmsoqns7i0QloJidZMpBhncI5uOpQzgzaxpEd2A9/aXZVQtqcHI2YMTMBYx95jB1qUfIGNSTg4sSL7ZbMoonTBn9SJmRgFTiDPArlgsA7uZDQHcIA7gdMDvrB6Llj/48h/PF/tmCppRbcyCLVU3YlV/5af3f/L68Y/eOP7eT8++/9b6R6+PP/rp6tXXzt5652JYlQY9Wo4+dvO9wAnA1ArU/DYPZlOIjKsEQkFFhiJjjHHXBV9tN7bKrx3dbWtL4FQNDIio67ra7MN2aNiGNhCD+bDpGSlyaJuUmsAIMSB5OdhrX3359RBCmOqfIzoQX9aRGqUyBUWZuZiZgU42Kn653myVNygiIQQATKkBqBlj1HXNT1/5AYRNnxwO9oZoOGtsZD/JOc8CHJQNWrGAwa13V0jVrdsJkJEYiQkZobK2A1JAYmAGRnQiCITAQERIHpCIkRADQkQItNUDVYMvRsKJiI0VpyHAmnPIzMxFRUEPDg6++pU//OQnXmwiSsEQwtZ4xOorTv+Bc0BkQgZkqi6QyMSMXgcmEwI+/VfBCfCpXtfCzsgBGciRgNAYgQkIjdAQoZJ5duvTpLpEZKw5qvXsTM4ABkSARExEAYkrt4aImQcI5tqUcQkg6mtzaJhBxVs15zgsl5gCxxidw+LaU7/zj7/53R//+/0bxVwIGxUBMJzC5+wqTghXZhp11drBNe6Vqar1nRuCAZnBbL58643jx4/eu7bXlnFAL+gQ2BndVJBRCYpDIG5CRPNecg2VNJO2bcFriB1sRbi7su7ul0vOf7jNraspAFBAhUk9ixy2DjkAakwEaq5AgGg+jiMA5JwR3bQ0KQROjOF8cyI6ohgpM1KIBigNNXlUpqjVLzSwI2r1sGNQn/bfTdM0MTCYaY5NMLDQhCZGQmxCxBoyhcWhAAAoWQmyIlsn6Ger1+avvwpwgk+dbZ5TnaHnPDIzte2A8WIY20A35otZs3duuE720O2toKvA13B2SPMicIzwTjGZQbNIm71A+/Pw9tuHP/jOw5//5HLTPzTiSLHBzpn3ZGMIB9f3ZikV6bkNHBsbnYgMx5TmxUEsEw9ZT0Y7jlu5NiKTefVSYEQDr8RzHxSdGorRMRgg5JxLjDGGTvUiBEhxrv2YKbINycyM0oxnTdPGOLbSAhrHvBE3cwicWGFAhg0qIDcUGNy1jGWgRN1ssY8oGMgHybp3bX8ePOhIY3u+Wm/6cnx8eng9Das1NKAEEkxHTJxOzx9uTi7EOo7RgzERQCFSQDAzBgIOjoSxHfOqyAgQEBBc3BDQODh4FFQUR8QQOMaYR3FC1ZoGMB2lnRUBbPf4H3hU5oCqxhhXJkSk4KUUEUsxpJRKyXko3mpINGs69WwOXdc4+XLRvfn665orp2VC1mGy+5q4w5W643bZAMOUPjPxWPDKrsJFiYgcxK3qunPOy4P/P13/HSxplt0HYsfcez+T7tny1dXVfnpmerzDEIMhPEASBIklCYqSqI0QCcVGrLTSxkqhWIUUWmm1oT8Yq5VW2qDE4FJcegAECAIcYAaD8X7QM9PdM+1t+Xr1XJrP3HvPOfrjy/eqMBvK6KjIys7OzvzMuef+zs/Mbty4fufad6alUTAqRTu10uLIH9yT/gACaDWtLpypu35Fal4hQ8c8O51hKtigDDITJMYThMSG1CIaaGoDveZk4IqIYKwglBGGVNk1KI8GNKQswdqw+36RAgnBWxJ2+L3vPfs/+pu/upjPy2KSrB/K2Y+353q6r4E12I8wtNrEa/eYYUcxvENEiBzRMEDQdYIFAKqhp2H2aKYISoTDBGYwbx+OM9HaxB0A2Bkokg6kQwTiNbYNBoAMaLSOBhjk48gVQmI2yxyknFZbq9W8S3bcMvjxxhhWRzF2mfyoqJuc+orPffGL3ybo3/3UB+aHYArsEDWIxdO6+eCpJ2aQgQux/o4GQIgEaiaIQEgCQwghlfX02Zdel6pskTI5JQPzBC0iJgRHXhVQFNgNAl2fpR3oMZInZRkHiMaMBszwQcrjevTyZzokOCEyMTOpkdowxPPEzpCVzEyKRJBjxhEWaOaMnUjK0bvCIQGqaCqKAgAcc9d3e0erK4+Vfb/EXJi6ZqlljayEit65PuaYJakkkcJBKEsgEzOPaKZ919W+6FYrx7hKHbmQO4WkAQkkS0rOUZdicH5jUprKarHIXeFlc3lH3n69b1t9ZjT+QNCV9nfbVHJxd6VTX4mugJI0VJQb11f7+94mUDZG+/0qAJAHB9j3It4fm1jbHWVdMh92ndMSv/pSfvzdqa4l9UbdQZ7bdDVzDj1bvHa4n/N4ysBsrkiScl64SHfuNltnr4YLszQ5TsVBYTWE1McmqjmHuW9FenYjoyrnpEKgrNaJxUyOuKewWkqn0a98CuRqlswSkonTAFQGV7NTVfZIIfuiH/tgAJ32YQpmkFMkxxg4S+uwYs6SKSYyRshK6izqsWIbj6vaN1H6eeeClx7HZRXqYtfJs1/+zsd+8eOlWy1js4UuZ1a2w65xpX/40R3yW8jkmRC5BJCgGcLWZPLiD98peZJdK4YqrQ8klpOxEBiCiBByT5CBiNhBtABSGrSpKke560Syc2vnlpTScHU+KFLFkzg3RBQgBuraWFXVIVJSQVQRXvWRuSBCI20lR3GEjjFmxbIQkxIgTSb+9tv3br292rxcpqZlKgEAMakqkjMBoGhrlVImAwKGU/qwGdPAYlrPNfHEKEpcRk5JKSecbE7v3nn1lWe/dKYaqaZQTCRDK33pZ2Bc7Uh/3GV65K39W37id86VC+nYGcSRYUJgPakdiApEDlk1A2QiUgUiJHI5R0Qguq9IYkBFBYLEsh67rmF4RTIgVEAUGWKtB0DMOScpg7HFrqwmb791A6R5+umnb+8fGvYOBqoQAvB6jGmGaMTMZmkA1YmMAETAMhEPSthhpoq4ZtMTkwkMX2dYBAhoWDo9JgFACDQYb2gGgiFnl3Cd7AxIaAAMoFmN1/xQRDqpdEAk0RBRAWzYnqggGaKJOiiyZnSIFqqmE0cJel8WRlU+XFk1Hpmm+dFeUdbeOefbgFc/9/nXMtoH3v2RozuEoMotKhP6LC2SEVZgApggAYKwAwCSYThy4v1iYBSKmDxwywoMLUPhoXj9+dd2a8eSAmdIqpSTeU+QtANF7RXU2CEjgWLOKoPVl2JVliklEUFg0zUe9OBicwJj2ikgNtw+fR/LsgSBFMAXkCQKsCGZCTowJMbgUAF7wSwWBcS4VOsx9xuz8Z2jo5LdMqfc94BQYtWL1HWZFirQlxUmsLRkX+Smj6k1xyMKqTQPMUFeATgvnpwYKOpsmY5y6VLUgmskNBPwqGrqAMg5ZuhzJvDB660lrarchf6OvfrD5Mv6z0H3Xq8g7ajyIVrjoM1xSXrGwWVf18Zv5cU9ZyRpVPi87KfOU1nutVERuPIQ48j04mh65/Dejc49rqVbNot5615+lXfK6WFTn999+KCIVb1ZFFEFoHSM7tBJUTI5bmNT8fnZFPzVhQ9jCRn8UmXuOUlWBxLIQLMTY1PLkR2yE7WoUpo6EUSDgkrIVW7vWOp9GA9JMdJ5hLg1yRJr9k5RZ5Np4UtNEIoauIy0KqvN5fE8YDByqRNSCFCJFWq9wnFRV4eJGpWCCCSr5apE1cyMwG6yuRnxrQhanb2weOvFvNp79YXX3v+hh7p0NF+5stpaxLjo8Vw1ffhi9sXYESMAsUc0IIuWNrfytTf7midBZmh3JGJsM5oGx8kkmTKwZOcIMmAeZj5m3rkEEWVtWzhcrKcBNyfX6P3AsOFFIhJRAMs5V/WImde9dpbURzedMDsiAmIRaTqZjbxRIkPgLIhm4r28/NIrP/Pkh47a1ZBLCXjyIZYHGvGJdddJez58HwI1g6G/hDWjAxGRLSipVCnq7s7m3rUXv//1z082eDoBdBj7du9OuzndAoCsK4TxtJhdu320tXXunbduBK547CV2jrytp6Z8etPSA6jUA1uXU1sxBjS0U6SVwAANiWHw7LO1HcLaooWZFXDd5CKYrD8nK57d3f3D3/9H73//+0NZrD8LjbwfkBY9mQcMYHuSYTMBhsSgwA6HeL9hmUGCBxp9U2V2603N8MpwFNfK1oGXb2BGhoPHJN4nfgLc32c4s/vDkPsHRI1cWke2IoEhkgMABCytSTk7QED0YYvdYwxz84lCKxEFOsFcjn1sxyAOjdQ5oGZjOvrmV1+p/ejJR5+ZH7pQppyACUVKQjDtAVUzeVdk6U+O+2B0rAR0arNcOzDlLL1p4Wt3e//44N7haMRADaJjqBTUs5Ca5UKF29QPDnTIbtX2UWXYBQ0Q02KxOL1HTn/7j/319MXTpn7Yx+QcOfC6iycCUDA1QwWnosqgcDpoFQBNWYloGGv1KfexDwQ+UFZBBkOhyveNUCrDzCQvc+/MhCgDOtWcTRiAixYHLa2tJSmImHNyyOKIiQDIshICKalkS8mXU+K8PFxGqGJP6Ra99cJhbKfvreTDO9v59p2G1BgnYezBTUbFpsm4YJ/lbrto0c6U08ponrp9yOQCZR27qhEBs7HD3aIYY3/Fjb5h8lrlnMv7DVx44ZU8Kv2fvnr8l392tDOpjwzrIkhGRvKBRTCVwRAEJ4mrchODI0mM0Dsy6TVmIOezrZL2YB65RnOWoQy+x4SOvfdFYYTizSbOjxh6iUlAMjvH46n1xymLx1R01NVuLFl/74/+9K3rRXSb5WZl7GzZHc8P5/N5zJiEowIQueBWx0fve/Lif/C3Prp6+61yCcjJsAuY0U0D07A/N23PFlWJ1vf9lVC8aqs2VO+8ffPJJy6Foh+cKono8LA7X86uXKgNhmsO1AzYoZV9itvbXn3jfaXSmgPQJDmCFmAMqiDKCIiCxghqA1NQEjuXYK0yH6xLTrH1YbOp6+v7/gxzXegNVDWlxMxlWXZdVzg/ADU5Z+eCd0WMKcYonrMWTNmZR1QwdARbO9Urr7zyi+6nEI1pCPMERDcIeohB5QRg/TPk7nWyMJ80lUNdR0TwftmvPFYPXz77/POf/9ELX9/eOrM12mmaO02rAPrwww9XXN/au3HUduceemw5n0eWcnt2cHB0az+dHxfgNDNwHAqUITA9cOsa4UkwA9w3OEAEZDohXyMiAwgQIvBpHQVwSCdjPSRSEUOEU+BrGGwYQuz6V1986e/+nf/J/uEhMpqqQ06yBo5PmkQY/itgN1DxTbOZMfvBVcvW1r6nc8YBNkJAQ8PBhWBdjO5DPQ/6Op/oVH+MjH/yA+EB+vaD8Ajimhs86AJO6egO0KBkU0Uxc4y7hi7bXe+TtVAVIUHsm7i1s7mcd6Jdb8boi8J5Dp/73LfqcvPszpV+ieTZNDkKquA8qQJBoZaR10HhgOvLGBDMgI01Z8nt0JSIluNp+eyX3jRtNraKNjagnpGSRaVM4AZzEkmZHLH3wNR0XVZhppT7ui5V85C5MbQy94eoD1jC/dhy+ABig72ksqwMFCSL0yGQ0kwQeViSVRVBU0pqRo6cOSS3toHCofeSnMU05CSRUzFyGCW2ibxjD5QtS2JwfZ+cc9oJMjHXAMts6tDpkIuwbhhC13WK6H0xxDM4By44zZJsOYqu7qvmiEI73rvZJSl3i/ywK/YPbmfL4Eoz7EFLthlogEQx3+06mI1KhdXhomV/aALBTQGStvvaNMpjqA5bMMOVNBs4rqX74WLh/uJPbP2DP+7fPCifvlI1eviV5279pY8+8r5HiogtqgdMgBEsMEPO0fmaUTRFzbnwAa1pVgeWV2VRxXjcNweevJgdLo9XUYpqGsmkWeVVWsyXx0u5u18IZL+VAVe3924v23GUVbs6hzi6e3Rwff/C5mRmaT+CuNIdNsuDFW+cmVbe1cEr2u5kBy5sDcTD0agySIhQ0mPFJKTVO/uvfX9Lz29tTjoFs9THa1Gs4JE6TgSctUY3C5Pt7K+jO4gGq8MfvfTaRz569bg9Pl+OfYGrFq8vdFyraEJXOOfUMuVglnK2uHB3+lLLAK4z6nMfh/5Lsw2im6FGQFYb/K8CQQvoGByjwdAmlGU5NCmq6gYG9FoTdJoSua7yTAQqIpJSquu6WS6Hazrn3LWxrBytQ2R6s1EfZVwRmOKamS5FEW7fuXH35jyUAQxAxQwZXRZBUkJ3IlEBM9BTmgkiAYMmssEwxQbEBpEkI5V6dmv8D/6b/xfkw0sP7TBrltsJ2t1z4+l4smzs7dtHrty4cPVhZo4Rzu7ONLcPP/zwnduvhmKq2cgAHQ5G5KdMckQkQIFBDGmIMty3RIQ4JIkCrG25DGwAoXHNucb1EkXrBMNhnvbgLl6JXEp5Mpu+/toroyI88sgj79y+CS7UIeR25XwwA33AUXI4F8mAEAnN8XoYbgqARGs8Xk/miqew/unygwCga5MYQF3LcQdLRUSkgc7+4zsVABiMpNbL6unW6qS8e4DTqbsOBwxAMzFQmVPLZBJ75zT1DcpxhU0MDCkzQTUb3b113CvUdaW9BixIRaE5d+bKV77xrV/8edoYX2haE43MWBRl1yNhATB43A/HVYEG27aTJV8zE1MB0aQqp3nZeRq9+sIb47ExU+CRIjKbI4sZ2RXzrotJgRDYBV/mpF3XqWbHvunSdr05KLdPljEdOF0P1vfTs3P651Dc6YR2RM6JagjBF8EQUM0VzkzWttUCiCZrp/j1Wj6qJznfYbc+L8PEWCJC5aiw0YbzPfbL6CauLjBn7uZK6pOu2FPbJuid956IyDtRA1FGR5ZSHwsfRCTHZAghOFcEERFSp3J0BHFZcarau3R0wOe4/AioZJxnyZ49sjfJZKt+vsHcg4CrXF13q0wC4P2coc0ysUntihvL/VWw4J12OYzr290SwJ+t9ZPgvtP27ul378Ln3jhuKunKizt4a2//1tH5T57dbVJEIaJCJDiuBgqMY2Lpk3LC2nvPyHVdAkrKvQO3OesNHQBcPtPmrFmYHPca4/l5aq1PedU4yVvlyLsqnZULT7rxKunl8b6rqEbeGR2Mi0oDaGKn8td++ukqbI5m05t7+9euL87uzFJKIrZaNuPJ5tvXbo0m46Io39o78LCdsTjzwY/cuBvfDHnipqPe5tATB4IqoxKadPOVz8dxdfkjT95ov/fOiwfoJtf3Dh45fAhH/u69I7c9BSivz2WCDrk6uNcsF23hmbD0HttV2m7dKzfandkZiwqJu2iAnhxBTuhEUcRIzZlFQFAyJDJU9QCBci/e+wFnN7O1LQHTqf3A/cv65Fa2E8+Ztm3rugaAAY5MKaWU6lFJxM45EYlJ2qQb41J1BVYFRzKExFt8+Uev/cSnHt/f33NMhB4RTwwmT3RDp7YEJwFB7BwqP4gJqCqiSW4f3j333/zf/v4br1776Z/66EiW7fIWn61qDft7zZ19d2v/5oc/9D5TDH50eHzgZyH2bRGmxWR81Ovtg/zQ47urbqW45pDf779O/kans8v723Aa3CkBEEDXf12TNU/bN7jPOD9ZFgBIHzAEF8kbs63f/Mq/+uhHPyoizEzEfd8H74A452wqgPQgux+JzURPtK8ntcVUFJlOYZMTTZUBEtKABK2/2DCEBwM8EWM9UKHuozp0IkM9PSAnT+/73duJXwUZAMIAyetAD4cEwEAGyMyo2JI1BRNqNS3QeWiytmm1uzW6dWt+uBdH05lKY4bEnKUvitE3vvXdn/6pn3B+h63KOcXUIngzdR5F1sQpOBEWwMlkggL0PWRNxbh85bXXH3rozNFevHP99mTik2RkxyhoiZgDhSSWDPoswASI3vtmETWLqQokQiqKouu6ASQBUTrhCZ3eDg8+P2VDDhvZtYE7KhWcLaeUYxA1JmIDyWpAPCiziRyimqEOcbAnaqkQfEo9EZgBY0ZAE+h6KcAzeaSM0PhQGhg7MBBAQQRDICdZUkyd84MLBnrvEHtT7SGHsrCkiMqDEj3maai7FcQeY+vyTb11LXktLlJhTXcACc0bYCdCwUitdAV4hGODiXNtN06WCyfIM49e0kMuRWDnwq6vvFnPq6LyJXrLfortBSVidfOFXj1Ld+bxtWuLnQuzm4dHz7700my2OnvGiZj3rKrIARFwSL5mIfQOHaFoUkNPZRDmikp0DIYm4CCAkGRkNFQrXDWaOHCFDxaIgUpA957LjyVKag76BXt96vJZVQumqXe+FsFVspzjvG3brXGeXTWDPe+LlJJuCtL+7gQVV4jtQzPD0E0qEc2bZ33PfZ/33EbYlciujLlppAEL2+c2nt2dvvJmf3N1/WOfeubLf/qvw+bmvYP5nTeuffqn33NwtDclMwop92N1RAjYzwqcVLVZY6FuBM7UUtF8OvKSIEDdyQHAEFjhAMxQsqqsheNguCZ7AALXRbfsRmGEiKfqO1UFIjkpDaePB4v7YGLb9/1kMimqKqUUPBtI0/dT3CiLOqWYckpqbZfUSmQaxozM3oObTMJz33/+Z37uIwhHRIZgIskxJ5FheAcAhoxkZGDrhDXIKODo/hwTUUzIu6tndv7b//c/feWla5/8+Ie61eFxc7C9tbV/G+J81abSVe7JRx5KzTvO7VSzSx0FWe1X1ZaDUZv8zqV3WaiEiwQLb/6++aPBqXEA3W/QCHFNu4I/YyFPAMCIp1RNHCas6+gJOy3z9ECJHG6rEMJysXjrjTf/6q/8ytHqmByjEgEDWRZBROZBpw5yfxdluBZnDq8wAKgpMjzIF8STr60nNX3YEuHpInG6hCEDGALboKFFAzA6qfI/1sUjIpF/wMUe6IGEqSHFlMwAmJGiGINDxeCxXeSkPQXsYxIFj+Uw5Sgmtrtx/rVXr93ZO5yM66JkpFGKfRPz8li/9vVvf/rP/ULXIKBDUkI0EBEDqxC6B8w6T9p24NYaZF/6GlPemsy2Zhtf/+JbfVztbFet9mCJSXMCFedDueiOFCipDMIGM+y7ZIpkkFKqqsp7f3Q0Nx20AKRyf9fyY5j7g837sOVd+y95Rs+qGc0EzGiA2vLaylRUWREdMxOwnti+97EdVZVz0PSdghICRGemSAqmXUypk2LktYcGfYzRU5U65Sp0vYaaHIojds4hsCPfLJrYFn3fhxAKpr7vANATp9wDBwRu+hxX2s8Vjke3X+/6pj4rqYb8RqAJUt/E6Wgmfc5tayahHOekyi4eLcoy9M7art0q63Gn0yZLbfuLVXZlzN0qd94B7M83w7hOq2x4T9N5Cq4qtp586PC1P00I7kqxM9vpF732DUHHVWmIKmwArSmjMpkIhcGsSUBohFzhol30fbyzutHrEp0PYepCtbl5YWv3UjlizFF7SS0tl02Kt6HfXx63q9hQGpvSzuxMCCqSgXuBbNJzucJjF6wUUyyykJKKMw8lLJplYAcEBNFXTsTMJIxNaTFfrhjJhxDI2AvxSiM5iiXAKLQi4dL27NK5cRXy7/3mn3zkmQuIxWg6nTn70fPf/fMfnz60sWRZQhHMCfVZW52WNY+c5rtg2qFsTWjTzbZ5pW0mZHA9sdjgD2gskm1IJRvMx0/gVoeoKK70ja4GnCHnPHDef+x6vY/Irp+ggDEORj2Uc55Op3t7ewUiIHRdF2PywQ9vTjl3mbqodVmc0m2MY1W4a++8fv3a/ng67fsjXDeZgsimZJrWBl5GJ0InQIBsBiAAZIrEoKpGUNfVC8/94E+++u1Pfuzj8+O7JcNounm4WJzbvnBtb3X14ohzoy3Q+KFi86yGoItjjOgQbt3aozKNtuvL5y9ZikPbjcADkDHAJid+t/zA0SBd284x2J+JzbNBjw8nfML1vxKj+y2zqDpcx2QPlXQ6nfzwhz/c3tk6e/HC2zffcr7yHAREIDKvA5VPPTuJCNf/X4KBRwMwzFxPd133a/HQr9uJuxrAWrgwzBFOuvIBs0bkU47piWBVT+r1KdxxYpePaGs3eWBEOP1sOx1VDJxRR4PyyMhxBeZBlakbT2l+qJqscoWq9k0bXXziXee2zuCdO4tFe+iw8m7sIJf19K3XbnzJvvyzP/PLi6NVTuI8EJAZquQHiumanThMhx2WiGSaDWBjcwQy/tHzr0ynnlQIxVQJcK1kRstCkg0UgMH7EGOOMZuZkUnWejRKkvu+H3gEg5/Xj21k76/wJ+IyOEHemLnv+2JSZlUAqOu6noxNcs45FOtMMZFspwIOSEP34Jzz7Iio79sYY3AWKi+ZwMB7LmsfXWolSdRAvjEVQVBCC32zAuDcZzIFNQQe1vCcs1kYsjBJjUVCUYplSZZTT1j2XZLDVOrm3UO3v6QzyI/VxaxviHIrFkruNYMImoZRmWIOzi8qGxf18XIVIZ+p6rJvQWFzvHEMuQgWxQAlVH7T9Ay7jdLd6gGtOGIt1Nw9kYtbBv2qG8/efvPgyXdXL794vFzZ/tGd7bMTTaxI7AagSssRJD3shI4pHEtaNa0sks/VI2eeevIjH7n68GPj0YZZa3D7YO/W4X5HWJW1t97TtjO/qusnpv5M2/hFPG73Xj7eb29cf+7O0byg6aT0zrkkHS6QPLYWFcmiAoBoIpdtH7330bKZhOD6fsXMipCEQgAEpwpmmZUQDDAriylD9sY5CwjcOljdLcbFWzeXh0cvX9jaXh7c2X3swneu5S9+8/Wf/fB2I8faeO+LII6wkKgRV4SCqcgjw8Vqo5rN9+e7MwaCjIktaDYw8YxJejMldZBZKYMiEToyAkAz78MaFyYSkbIsT6dGiPigA8HJHAlxndMGImLGfd+XZSkqRjhkwjddO+bgnBOWruuruuj7WAVn1Jk6zGCYgh/7EJ/7wYs/83MfadsDZFxHD9OggHnAYICBbF1tUQ3UgAR0MFXPjnFSlf/lf/dbV3eeghXTKI1HHOR4d6d0IW14Pm7yzsOXclW3sDGyLcnZj6Zs3qTfujA66uTS1fObW6MmNVZ6zPcjjx9oVA0feO00x/4Ex+D7tBG0dZyzyYkQ+vRT1ACQCVRtPVBda2HKuvrud7/7gQ98IEokx4S4d/vOztkznalXwJPKPswGY4xt25/d3mm6LsY8mIMP0UinC/OD/eM6wecUyBoWi2FfhEgEMgTQnbS/ayDupGwhKq1D74bPXB+f083csN6YrvNe1p9OYCYAlgzBk0pmJTJfualB4WnRrKKzsZqPsWekDL6P2B605cgeeix0/YXDvUVzeMgQ2mUzG++++srb4/GXP/HxnzzYmw9zaARG7sAGCoAMy6SaITIiuOiIVVCTQlHzzVurt95488qlKvYLZGEip56CIPSr1aLtIPVxWJtCCIs2JckKNlh4lmW5XDSAQ1zrUCL//3brDz7WexoiUSmZybuUUtv2TdnVzjGzagYLg3POOsTDsmoGJ2iKYMzctivJ0TEDZDPzRQZCSaFvlQtXbeLqsB+VGz0t1ICAENF7ADNj1H6guAEAqIL3PoTgc46SU0pMHlRy7MfjUWxSv8raArUzOxrdfW3J2V0ORqvuwOQ80SESZTyar5Zoo6lv++W4GidOtaSUaVbWJVGJeTQpk8G1rnmO3C2UlS+Wyzzi0Sj35yzuFulS7bdBtxmOk7hmOdq4WGyVMebudg+fvviutP/c9fm+L7tpg1ZOxhu7wa2WtLgrGWK6E2Ol0iOz2zh/7son3vWpD7z752DCzeELr7zy7a+/860btw8ODptOGqFgmhXERBkdmFMLjouSi0tnL053Hrt89ZGnn3ovxIM777y+d+2GGQJoMmIUssDKYsk8Eno2sVCZpgIxCWkmcmWGSGBkBOJi1uC9qJiJJzIlgDrhwnvTjCW5Pt9+/ZW+KGbWzetqw8Xi6Ohw5+L07OvdD16+/cH37bhuDvW0lyKzKa0gkjdIpEaRjqwHbuNiYXDR16pL1EJo4UgjKWQgI0RjSMiGjCqoyMgYsyUjH9gqizExUEaNOQVCBRHCQhgcPOihMRR9gCGZx5idZEtZR8x1VbfdKhQVYm6a1Ww6qosp5MViNU9RF8tuc7KLUR1kY5fEI+pmiT989gc/+UsfVAyg6s0cjBfWs/UAzkzR0MhMUYAMAZFJoADTrFLaKsetSbk54v/q7/0/33idH3uCNnfj2VGxUdSm+fbe4WGfy8s7u7tnt7a2c04iTQqHOXJVTqmiO3dNfLEzGQc3EysBmkIxSWR0DkkMop14rwgIKwrCwK83BHBrpAN4SF9gWHN+xEhVPfPJWNbMiAwQmIhiymXhJQpgoWDkFNCtFnLt7Tf/1q//zcPD4ypMfvijV8p6cvT2jdlsE7BdrVaz2aYhPPeDH/zqr/7qZz//J+fOnfvu91/+4Mc+8MXP/slTjz+9uzm+dfPm4eHhRz/54dXicFB9wUm4CAAikdpgnnwCrLMBGIBlQWIyMzRUVWQe+ntSBgTgYaB9f7YsqCZraRsZKDIiislJUuMwyR0s1AlRYcguN85qmbTaOr9Y3ZL2MEDV5C5j1wKqjbKmApG1WvQ9QXJOzp4rl7W7e3ulKQDw7uTiD3/w4pOPPj4azWLbMZISZg2ak+dA6hCxh4hsNDgPOxHwhhh1tRUe//KzXwtVNu4N1Rkik6KhgWARzfquNxAk82XlfNHcmWtsTXOKUpXBMccumSCsA1nX0qSTpQ5McSCwDew3AEATJJ/MnAJlQTIYY8odeSTVoBLY9ZhL50hV2BKACUCWITbboQMOyCV5FfMZOvZAhuOixqyI0hwvxhuFD7miohc6zt1ogjwBXmlMmEuw7E2SQVI4iYiGnFLZA/SN+RLRO8xCYlVwCJCFUuvzCjTDtReae0t8T00XjXqzhSMUKUXAhzNhfD4lcO4AtZNc9AVonmmaBZPknMTbyF9bwtslifox01lJjzFU2oTS3VzK1w7SgfF7AD9VFzNvbnHUnr88nla3b4LrlA9uLj/07ke+8PxrfPFD04fONtBZkEbaN5v5wnxYtKO6YtJL58PToyuf+MT/HM6PXvn+f31444tfffYHby4v6GRWO6tGO+rOotDYbFpXGayTKKKVHznyB0eHL929Hm6/9Z3vRTY4e2ZnYzRpvLLRZPPMTtsu5vPOuqIeWWTuoxEK+UAaqVuQKTAbMqKCKJs3EY3sKVkCAkNcWUaHTC6oT0bJcKcavXmzv35Xp2dLtb3jLOWYjufRjaebZzZeeTO/ea197CLrqvSbodNYARaEq1Rg2fquENf3WdRo1UX2ZU4+YF6CZQUxVWMFAGIgsrU7Lp14YBkYOOdcFWQppQ9dE1VVB8rBAISrPdj3nW75BfUkRy3l1IvU43F97949glpFNctisZiNR+Q4OJ9T7Irx4arfHLNYJIeSrFcs6/Gt2zfeevH2w4/urOZ7AK63VQiUhzZs+JJrHbyCEqJGn1KCovYiMgrl4qj9e//FPzw87H/y008+vK0p7R/349sLmatwvX3uwvbWbAeAV8cy3dgQL/v7q93dixhGTunsmdm167eQ0nhSpNx6R6YAwalRMkhZejnp0VVxTQm9b+MFAGiAPrAZog1eBbRmt8LpqBPXlHMyAAFzzqcYCQDJQDXneObMua988Vvnzp2bbm7cuHVz5EPbtj/9cz//pS9+VSRNx+Pcy6ievvPOOwR8dHDs0S8OF8UIJjU9duXMud16tlHdvN2+/0NPr+Z7TGGo7IOQaejEbSDJwzAJGJ7gEPnNjJLyQLX0zg2+W1nlRCEMAIBE9zF6QuJ1k36Cz5gN5KX1tsbWrTsoACEksELNyJtgC+guXX76Rz9YVcXtUI4hmeSmycmBP14ui4CIBYS86lsVKcvphYvj/Zt3U3Ova8O42P3D3//M/+Bv//W2c4II0CFWofCaBZGiCPHgVo2MDAgxig/BURSx559/cTwuU27QAwOymukQ/+v7vhdLjE5MnfNdF2NMqsDsJKWqGqeUHuxv1vaoDxhyDAz7B6EYWG9qbQ24M1Hw2URNLWPOKlkBENgY1RE6QtCMxmjgDFUk54wGdV0jYlEU4sSptn1HllFHSM4zEAFx8oWmPkFZlyNMKiqiWYZR+ZDFNrQaBpCz9n0kIkQI6A0GfYk2q15al1amTVi9Tsc9X2J5FOLY8shToXE7ucvl6La0R5DYoIw6Ye5zCpRm5EdWtj1MvF1T/znDRS3vKdxZwvEqB1StuCSoUN4zCkDlDxfpu3T8+wGficG1Kx9q2tjQO7dGkJtobuPi5s+cvTg+R4fNcZuUPdzr5jc7HVP7S+89+8jV4+tv3irszCMPxdWb//mbn/+qFIuyeqycnH1yu5yMJtfvhhtL/9TV7S3F2IY7smhSe9QusgjAUkT6nJz3Iy5dKLwPdxbp+r3bpQdH/l4DaWNr++KoaI9Whz1JqDyQivaT3veBNyALWYeS2FedFhkiqwMzIs5rySKakHOh73tmgxydurJc3r1+YBlGZQFpYmbZoqf0J5/51qavQp2++9LND1/YvgnXpD2zu7nRRV12KWB2zdjoWFhd9k6Nch6PUA3EFd4VyGTKRqiA+cTwGoyQULIgETKaZkUrRsXqaDUKJa+ZZMREavcjxB7caa4vX0JVJQYysCxNs6zrGpk0ZWAys7Ztx+N6MDGxLMlSJyQYNCOZgJlYNgDH/Xe/9oOnnv6Lc+mdC6aJBcCcIRgo2pqsBydQMebErso5uUSTsfuHv/V7r72y94lPflzx8F4vu9OQUzo6ase7O9Np7YrJ4WK+sbE529xYrZYZYgih65fQxTNbuzfeegvAnnzqkZRa50myCAzqJdCTOeqJH6LzdAKzGOmJ9QogiKkMvgIMg/sWwuCstfbRXVPTbT3JZPREOrS5iKiGRSi/9a1v/Pmf/om+782MHFfj6gtf+MLR4fGlSxfu3b27WCwmk0nXLC+eP3vj2tvTSXX+/Pnnf/Q8Aa0OFvUj9csvv7xsOkIHxgo2nMT7OMtJ0OvJuXuQywia1+ScGKP3fijnjHRq6P5gwQIAUIH7aM9Q2lEByU7ePbyR1sF7bNjGSFw4AAKYz+dVMbr4xIffeuVLTLEoJCcshFqIo1EhStB21nNOjopCfcrYjM44lBGrrI701s3+y59/9lM/+xOH+13JGybHbTZHXkAGCBERvS9yFEDwrkgpbu+e+eFzry2OF+fPeVVNRGhkBGoJiHqhRZ8zGidh79lXy0UXs+oJ7WY0GjVN92CCzYBM/hh2t54JDaAWMxhpzgDgvUux87WT4QZUM4XBL5UIHZGgEKAxRITCU8KcmQkJAFLuB6iNnS1TgwCWNbYZUMta2ItkVHDjMUCyrlmG0Si2alI6RymZ6uDLqTFJOYQkC2lUIkIDTbkMlSqkmCWrB0/RpWN3cJTVuaehqCXdM9oJfBlgRgSUsOkmZUmVg6wbLtSucEnNp9hBUbpXNX1LZLOafix0oKi5U9boQ2faiahZ7Xyp8NERfDhufc/RN33n7vIqvndr9Y3y5vfu/eqvfOyxj1xczlpQBV/vy7LaGkm7974L/ce53RqFx3bnb//oxY3puQbG89XB3vyNW7yzWbx/0R998r1PpD597ns/XLjdzdHWc6/vtxCszbMaxuMxUu5j3ysgO3SubXsL2C3auiwL8sQKntRSr8c/ut0UXDy8M710ZbyY79/sj7MTo1UtiZXQyLmghkk6yBac72nFzEPclxlqFiIH7ArIPWTnq+Uq3h4dvbmKGcNxXHgv7aLJ3m2G2fy4pZGjMV+/aa/YrJocTN3RKy/faWF09QObx81d6GvjlB1k4H3frQI4lwG6BCiyToUX5Ww69Fm23qGrqhoQMkBSkcR1meHYQJgpqyKEYRw43DMP8hDgAYTRVEkZUEWk6+JoMq6qKi5T5SrRpEoxZh9C3xGoxHYJI5f7XPpgJuw4W06ats5uvfbyKweHbVGOJWUMXjozc4BpoMicQJvruWKRS1FT0d2dnc9+9g/feOed/9X/+j/87rf+aFrOHz7z0IjaVdO/2Uud6mm1efPWO2402Sx2b+/fdoyb04l3bu/u3boe31U4PD587zPvZgRBTCk59gRsMSIqrW29hgWFhixGGJDqoeifyH7o/sDZhokZGqCRIaoO9d3wAVdLtYxEOWVDVSByxY2bd2/euvPe973/8PgIiefL5uGHHz44ODp79mwovCv47OSML92jTzxS1/VqtZCURrV78tH33t2bX77yWNem8WjLoLyzN9/Z2UhxOdjg4pruPihsAZB1AN8B4aTyD205IIkqMYuqAep6RGxIA1/STleCkz3f6d5lvcyTrpf9BwodDBm3TDVgf29/fzqe3bt770cvPj/bGn3sz334sXd9/M1XXhA5HE/NhUw9JeR50yQMltKoKFKKZI7ME8dQt9vTHTsXZ5Pz3/7q1yYb9u73fqw5iqV3ZeFTzGsnUVDmEGNk8lnUMUnqAl34ztefG48ZICNikTkrGZkau1C1i2i98zlm1dJ7AOpizCpEFmNfVoGZu66zP/s4GU48eCjglDR8+vowXBXTUHkjRQUiMmI9sVMyU2NTg2zmyQlgMhAF5x0BqqppLhz3IjlpklSFAglA8hCMTqzszTIyOqp7cCaZTUcMiwySM3giDp56B0wMHsGpRkk5ixQBk8TFYoWIJtDOO22mecmrBsm6qXINFM3d7aJIMudQgYqSwKyPm9W4TgoxQfCphXvc3F75z0dHpbxrvh+5AGiD0IqlFhwlBDJ1lvv+sGmjtbUrr2KdtXd//PKb3TfHs90z/4v/+INXH6vvLI4OlsUTF84dHV83wRqLrVG8Wu1v1C75rb1+49xjf4PGo+7eq59/u906/6Fnv/NHEm9/6L185wbM+713X5r2lt5ZLO9pMQt1vSttil3uU0rsXEEoGR05LFl5XnEAyAkSIS4igigzVZokdK/sHd+9t3FmWsMo3sUfHRc32wRkFVgw6DmImbEEEhJOzKiSnGMRQV27OBVaNNqFqjjs4/ka3kLoIRQQnIZiY1zu4NF1AB9bpCIXmm999vn5038+x4r3XtTjb9I1rsPH9iBGB5UhRInzza1rRVLiIDYC6E9dcwFIB+aGOXQDKcB7H6NlU0cMolh4qnyfk3eu67vs1REpyqnfwANX8PohomtsF1hFUSAnGI+m+4t7SKZZ1GvX96OtDd81/WohfRF7i4U6T6IgAqIhpeRK25/f++KXv/erf/kTt26/VrgZowgYAa8DkEwVEQYTLiAFY5eRiuO+WZH87/6L/9P+9Vd2Z+1uGVx7TJu7igh449btg8fe9Uy96Ip6dHiwHFfl5sbG0cHBZDK5fOnhvk+95aff/cRsNum6qGrMwUyRDWVtVEaIbGJDNg0aDia7g4niA7cuDzl2AznPLObMwABCQGYga0b5mqFkhDZE0akBoYidO3vuM3/wh+fOX3I+tF3PvgBTQ9rd3c1Zm6bZ3N4a6gsRLBbHzOyLYv/wMPhJlFRt1DHG0bgYj2vjYhEbjw50wLuRDVXUTBHWTgNmiggnCue1p1UXewDw3icZiLADprwuW/cHp6dkytP9HJqqrYO2Bw3tSWkbYj1ArdfkC3fu4oXVUfvay2/M6tmtG2+99vrkmac++OiTH14c3Dncf320udLV0klFULYisQuSYEh86VMaz0ZFAQe39qej/P73bG1Mrnzrq9+++vgTfrNcHAO1GkKZLauod04kIYKaIUHKq+lGuHHt4NrbN2ebYiq4zoU1VQUmNV62OWUdqClFUXV933WdWiakbN3GxoWcNae1DcbpttVOcsMfuCnu3yZmNrCRmEhEMkpZOlFBQARSBFEVBCHNCBPzjOCTeGZIRgVissG6zjM5R2p5jQsZiioqG+QhqZO8AECfZOTVj7DPrXIJAMBEYpqACRWzIuScDVQygqJoRqCsKacUSs/k+3lMrWnPe7cWILMrnFWXC+akkgkiV4uRK5Y2DcFJ9o7GgduuX2gSxN46hfHXtZ2V0/eMnOOFh6LmtFhZYbghMGJufV5BzOS5CH2glgx9unysbqcqHvdPn3tqV0nvLZtJVZErmnS4d+/gwjnedqurm48YvAsvnSkCQNsvdJWPbly/d6uA+fztF971CDcxfPftw/dfKSYjv4z47FvLezJxo2C6v+xKRAAmC0iWWcnAyJl4afPa6I5QSRQSM7pAqME0iyEf6eHq1mq3fnhrchn4JfTfAdKkvWQKEojFSFvtZ2EcY1+4EZoJZle4xbKJqi7k7VDPuy6w2w6VtjGaD8hRDw38aHvav3GzcpVhOxoTFPW9t5QaKMeydZWWL2y/9fX46LmN+uG+6JCBl6rGSGk6oqLXHF0wyY5AlOKDJhgIakDeOeAmxSzi2REiOS6mVX93NQkTGPxy0ROqoJyqW+HP4oxDA5PNPPHA3OqadjKbHvqjIXA997FzUZTKatR3bUp6uOjYj+8eriSrAiqAgjmXg/ef/6Ov/eIvfNr5IvWxYibtEQpYMzR4XS4MEFC9sgPrre37n/35Xy4s/+Cr/+7p8xuHyxzOXF1Vs7v7i4/83PtvvfPON775JcJyZ9apiW7MPHlAZ+Tvzed1XV+6cKEIoe8iIgdXiIjznHNyQ2MFpmZiOni6KBjrMNG9Pw2AQbOkRgPt3AhVDQeJFaoNnMqTDCYzMUBFJSNy5JyI9CmPJptf+fLX/vpf+5W27V0oyXlVVbGYhZEK57NAisn5kFJidjFL4X1Zjk0xWupiDsQSIwIbm5EOazoAMLEayNA+nyhmDYYpytrQHwFyEmYPAF3MzKx2EjO03jKtTXPWXBRVAn4geVGJCAgQAZhhvRqvf+/Jjktzlmk9vru6u2qOynrysY/9xObO7sFxNxuPCf3m5rlX3/xWNQ5N00OXKa1GbvbqtbubZ0e752ePPvr488+9tDW5+MT7Hrr+1ttdPj5/aTx+c/FP/uE/+9t/529tbO4sjpd934ayGhphVS2Kout751wXFxc3r372975IKI6SCYBhT4mRJEfvymXbt7nrSIyt5oKYm8Uq5zysf45cWdbz+fKkVT/lHa3VbMOvXO9SHujiT4u7Y04SOTAVLml3kqdrYqoghmSEEW3kHACe6tqGq2lYQYNjk+yZUQgJEFGyU43sAB0hmJpXiK4GohKpLets2KmqCKZIiRMRMHkiIiBVc84XaM77pA2YeVc0yy6vkG3aNb5rcQuPfzZUU+S5CzeWrWF5HO1u6h8KhaU4JZhMRvOmaTHL2PUe2JfXpIDK3r/tNpcxQFWPsx6q+XCGq8pxp7nI4Dkk73tDz6kpi77TZW/uf/arP81jf7w8pHExK2eyOrqx2G8hzUJOGurJ7hy8r6bjrQtxde32/o8cdu/c68Ed3jhM1945eOrh7ZfeORrPUkzEHqqo77u48dwcDhooedRBBnI62OGrOVJ1qqRJshmLCaLlnE20YEdAyz6tPNdYuQzEkkbLG+klvz8d1Q+PdGtRPNf4a1ZRwjL1DRoQjJaU+yaWJeWcgYkdtsIxxnnGM+j6FibjsmAH1gNLZ32W8Jd/7eLbL7/JCV2dJmfC4V7vS+ruts8979/7M2G2A6/vv32puPzyZ+Xyr7nW75c5RFFZxtRiXUBCE8Oub0VSlJyyyRoLZWBy5BKZia6tvQFRKRlUs/HR7SNEZO+zanG6Hvw4LfCENrNOIQBlMkJT7bpuOp3W41G7mAfHIhZjWi5Xk3ERisKg73OKQMsOVIkROKBjI8c+8NGt/a9/7Tuf/Mknju7ejcB5sFHENcoxVBkEYsJEljuritD1iZO+8sNvbji1vi/PPeS3LmvGh6+cKzeunHeTxereRz78saN7h33floU/OLx38eJFcnjpysPOOYkppWTrEfOaAsGMOsTUAQgMrS4ashq6NROT7SSgERQMMSGQwdC/IzLRWpw5qBIHzdNQ6IaoZra1jYJqmtST1195vWuW73/f04eLpRhpVgVg5whAUnbOo0rwDgA8eVMLHGKfvC8MEY0ZWbIisSMvSbxHUEM4BbEAEQkZ72trUdVO3cWHUjVA5568al5HM64FagDrukWAoCKGDGvzmVP78sEg6WTwflrY8YRZAlBX1Ve+9NX58bKahun2eLa1KVou5nfffuP4oQtXnv3ud97zzKOXLl26cfPavYO3NjY+/vu//3vPfOh9H/v4T/RtJMCf/clL86OjCHbhkfdqRomLX/gLj337G9/5b//rf/7X/vpf3L54+fBoqZoBAIxKH4bKHrOE4O7eaL/3veemU04pMZGKkILDTCYYYXXcSZfZDInYFX22pmlFxBGn3M9ms5S1bVuA+1sZVSViPBHZ3T9CJw9HkGxtTU1EOWkYl+qIpDi5m2R946haphSsAx05EFRykEnVq3rADg0U0ZCscB5TjwoIQGQqHKNgAPXqA3gjNcfqHJIxxi7WwGDgfKgnJdFybZwA1vep7yBjzCJdBlXL0KcVaFOmfdq72bZL9lgcCjvpzhVQOlpwhpIRy3uqS5DbXTs9FpfNGzpRdqhtePFwry/L8qjrVwtVzCpVD5dGNoGc+i4BOgNG3O/TvbZfVR6OUwUhz0Zuel5ev74Yc1lkun7jzaV6perxjTPntrC2uDGaJCoiUFos2+7oe9dezNG/+NbdjcqeeGh750J5ZlMff6jgPkifF+A+f5NuJJ5MCrWoFACrBJkzluRVMRMnVAJisg1N5jWq9RnBe0FtY+/8SLTvYleBjT2QcSIznsOyd0WY5fdQ3FrmNxq3QKoYg1i7zLlTzanXlD2H2OfgfdLEkveOD2suk+A7h6uMrlB1JAVPIJEqLSIVW6Of/Pc3//AfvNUebldUzl+N957MxSWxyzZ/bV5Ed/iKle8u+9z3AIs+ghQcvGBka2zYna1lNIiMls0QnPOgMfYZEYmcCYKqZB2PR3Mi1YyIOSV1nhytIYUHNCz3S4YBECFZljjc4mq5j+14Nl0dH6kiUlCBtu+nkzqEInZRJOXY1iM2AU1KZp4d+KQ5n9kqP/uZz3760x92tp/Es8dog+ewMiIMwcI08E6o8LOUW3SViLz+xveunp0B+/rc4xKr3XNny0KgO9yahYPNnaOD6zTafOzRK47xUcCqqBExa9KUmQqVyI4IUEVOdibGZusc6SE6ztgUGekkmW4YkJKZrMmGntXWwPwwVMtmYsonAdbrw0VrKZYjyr2KimbbOb/1m//id5968tFR7fcOtSiLLiYaJMQGzrmUEnnIOSIisxcRG7o5TYmATBkY2GXSDOI8o0QbiK8AoKJmAOsQooHvAia4zg1kAFAF5+ju3btlWZ47d24+n9dVEUJo21UX88msZUgdOiXaICIB4NqPc6j9BnJ/O3eyz1v78zsEd/bM+RRvHDf969ffHm9tgTaA9rGPf+QLn/3q3TuLH/7oneNl2j1z4UMff+Yrf/Ls+z/woU9/+qPX3j4Ifpa1FUg8GmPklR4Ze/b1uCp++hd+evbtzX/xr/71L/3qLz3+xNN7d4+CKwgpxsSOs0rs9crVC3/w28/FGNmzxmDWMyMKKyQsikWDi95WWYmIhdAVfcxDIJF3qDlPpxt9zCklBBYR59Ya4JNo8h9vdx58DE3DAGF5z4KqcJrSSDjQ2pkYAFBVDJEZ1Rlq7C1FpOScc86toT9EzxxTFlHLDqEg7oGlbYEUALWNfc3gAE1LTYVDE2mzqFgc2DJD547AAIKIfYyqI89gWTBZf2TNofUrjr2OR/FGD7ex38hW+GLS55p1jmlpnhNVYbY0ajGNGc+THy9jKvvDTOeIz7Rto1BTLquqRoeVtjmD4LjaWJAdtotj0Tye1lnL0hASK7g33jx29bgvu+t3e+C6mFiAstVubyFPn6m3R7jXRZPt2UZ9+429LutD08nlj0vu9upwJOMU+7SYs5iqhhmUP/VE+KO3tOsrsrkYsrYBCACygiDqMDAbYvaUmUNApTAwlyw4ztqa9b72ksqFYu3Fg5pyLBdZfO4Lgsd2cGtlP7qDdxvXklTmDbxan4wpQgyZemYpTDvNPiSmpluOfcCqIFjmhIbt3t4qRfDodKk37uXFKk6Vlz5NIizmFbX7Tzw1vnevXfUz/Xa8fBlz5XInktF6DlRoEh8CaUrJgyAhuGjaZ64mGZ0wBxlbsVy0czPJ4M1D8OQqrnYmaa8bldWyt6Si7CsMPSeV+8FMp7yLNFijGDIzAqoYAi4XzdlzBTpUZsecYhsbjHFEYcRhKa1bHKXtnXGTl0aUDR0yZEyQy2m48drhD55987FntpYHt0uZ0GB9yCySmRUdppzRV6aCjDl1s53w8ovfnJbA5UW++szRnEeQp177PpkLvnQf+fiH9w6uUx8Ob9+6+sijKi6LAUYiJIQhR1GzoCMgU7DBCQfJDY5oSGjBUlYyR1mYKGIkHygronQKTKFwmGJmJOYCBvI/Aw1jYLLYW1kUoh0AEHpTBMwxZio4JhlVs5Krr3/7i7/xG39nvhREMYluWFKIRAURnCNjwsHRHu4PMHNKzhjMGRiAoKKiimVkNOUh0JuIBncgQUUikHWOBREJiIABoZAZ9tsXJm+9dfSVb33pU5/+2O/968+d2d557zOXNjbPpb7JORNWxAJkmkvjlGN2RKBDyKoZiQIBExms6e+EpqC6zjN3aqntnnzikSefejRJfvXVl9985c2Hrzx0/uLOH/3h565du/mX/+ovvPziG89/77mt7Ve//uVwY6/92Z9/5vqNO9PZ2b4zBPDG2lvGvvCVCmjq26jM+OFPfmxjd+Nzn/kMmz388EPLecpWK3QMGaUa1bk9rr79nWenM4Pcs5aRXHbmEnswA9/FuEwNO0PFYjRVs/l8gaAI2sdY1jW7sDqcg7GaMa95MoR0QnI/RWPwATgFE3IwajWidxjFmHBWS2zZHJZoSdWcAZpoUhn5ggwcYYaYyAdyrM5TrVxI1wBo5UqkIJTAMCOzaQYzsR4Jcp46pNTXo6CS1AA6YcakEYBqX3WruaSQlQyiR0MQzzguR7dXbSgcmrFRbKzrqoTBGnVc1ON+RgVMM1u10rASakqXYuehdimFEqacskooazG6A9IHJFdWG/joCNw8T8befD121CJ0qsgeSgPo5znnmjHLTlohF2YqmJHNZU5H7arv0qgczUYzHyB2XSjy1kgStu/s2aMPX33hhy8vF3/uyasfn/ry2rXPLGMRhJqVAPpR6FsVhtnvv9At+fK0dDmKo+yw6AQiJ84BQImQmG3YMRkRkXEGUjBhRFRlXZP/OI88qnEWUnUaASiTM4eSzJILweHWrv7cjn9zYS8fwSIdyxE5KALknrhYgTkxH1FBU44KmlKmCZqTRY+bnh1x0wpwaa7tmi7QjBk7zZZCdyzats0SZ7v46mp1bnq+u71avl2N3j1XMIQg2IVQguwTzwCzqgB4IgAiMe+4Ho/qLnU25GQSeO+DBWZG5ygUG2fP3L79uqMaiFQVkmRVcnRKdX+wSWHm0yZl+HNwiOy6uLW1dffuPQvG7HPOy0WzsbFRV2OJq5T6vvdVWc/ncwDKGsaOO+nbRmZn+Ld/9w/+jx/7D/fSdcc5OXBEqmoI3hcxRu/KGCMG6rVxvrAm3X37+sVHPzSaPtkvktx7aefJ9y5i8sU4x8YwFxZqPFOMMwR37cb1i2cva87mB1GmKQxeaZJSKoInA4lpkGjRqRp1EB8hIpMwgE7YBFliY3VVmjQQwbmQUkIXQa1g33d9CAEAcsLal5qUiIrCx5jRlEyxrGLXzcb18X77G//Z//7eYRiNLi6bFsAZOoU8wBlIPNg1SlaiIdADmVlFDSSUhYopKqzBkLVwYQCET84UGKjBmpqhg0H9APqus6/RAUIs1bUffObpz/7+169feurX/72/9Vu/+c//5T997eMfe9dT770aimnMmi2iCnHLUgkREeUc1dR7zpIhSVmWcMqYtPUihGhgxB77vm+apk/REJ544qnLFy/duXs7d7icN+/70NPC/PQH3/vRP/fRZ7/9/O0XX55O3T/9h3+4uTGebuov/fLP1tV232XnssjgsGbOOUJu+wYJHn308VC4L335T6Ybf6UIY7bM6MUoaXtm9/K/+71vzJvD8+dLSdE4w7BvIc0KXZ8Xqx7UQI2ZHbpeNOc8OOUBwGg06rruNGJp7bdzAm3Bmu1+/6Y4YUwBmmURIPTMqY9hXCKRIycZVUSzqhEjmAUzE80B2TnvmBFYBVxJfVxMJlNgEtMuxZQSWFITJiICj4SiaObZOccS+yi58IQggACWASGrtdILAwQlhlBVOYsBp2hHxwsK1Pcp11Gj72MHre7faO+9QZ26EUPrQJZGKhshVTn1LGFcYmxLcoWRa2Nh4LsckFfWmuXrh92Npb0HawjFYinouybH4y4he/Yug3U5Y1HASmvRKfBuSQI0j7DIiTphSTrxI+/waHlw897e4WI+mZy9dv344LhL1B0cv1Xkm2+9+Zlvv/D/eOHF3+r6NOZgYZp8q4X7wQ1/s3NT7y9MtpWrQyuEqg61JUWjoCMlhsG2DWStsaPhLPZkHVomzc64MFdqWVklJSWvxpaRVhnnWVaaOomLQqPrV6vVcXP7uLnLq0fe7f72h+yvf5B++eP5/Y/brDJoc9dbl6Rbcu1CHYBJrE+wBMIdUAfcx+CwWUnKATinJFXB0+l4FTumiBLSXbdamW2GflKnvGKc3HsenYyFLApTwUzgEQE1aT65s7MyoKt8OatGm84F55whKFhSkUHuDy4hjs9u96gi4oJXVY/Enk4L+oNkgAf5XqcUguFftV0cj6frax1VVbuu1yTOl6H0aNqtoucgpkAoCsmEfeWMdibhzRdf/NZXfrize7aF1kyyJFUNvmi6nsipqvfeiZo1ZRneeuX726WbjaZt/9r1l37v4vZOXVYKTsQ4eCOLWabV7ODgwJelq4o7t66NxgU5zmKEblgwmDmEkLOYQWAHojD4kyEjeDQHMOTOkQeoUDCZZXAhITSEhcIYonp2HBi8NX3jnCNXJC0cBjLwbGg5xx4NPBUMhZig0uZ44/f/6DO39xdN43/rt//N7rntPkpOpoDZNEqOkrNaGg6uArFj74AQmQApJ7l/CgiNkGhQznFWzarD1E4kiSTVbCZ5MObHU+92AkUTABTQOqv9wq986h//5r/43T/64m/8J//JEx/+wBe+8s6/+8zzq2buuXFmhS8RxtnC4HTIzI6DKBAG74ucVQ1PL4b71waIgKHjrFIUhfd+cTwHgIevXG1uH7sYu/nizVfeuHn91te+/M3vffeHm7Ot8Qj/xt/4S0899QiYhoJSXhLL6YeyQ7GcNYUQUsxd1z905dFf+/d+PSU0oGxdn3pA5wKuFvTtrz67OXGYk0VgMxT1iQ1V2C+iLNsIimwQXFCg5XKZ+jjYPHjvJ5NJs1rknE4Rlwd+2n0BwYNlfXiDB0oqwxwnWXbjoh9gQB5+xnq6M9hFnHZFp8KoYWodu0TkTu2MVLOZEQ2Rj2qiliz1OScd1gAj7LgQx5kYrZToU48sUKgnyF3TOOeY/aJpY1ZyzphwxbnlMtSoLq4K1NolrRUsdTpMmwhDxTPPbr4sshbOGC0bRce9dsHymbp8/NL5cxN3oczjEkYSnXYTNEI/DcWmKwtEFB2H4FOehBDYWXDXWd9J7RGIuUCNeBe4a9vlqhOwnTM7Z87t3rm7eOaR905mF28sd8vtj2xfwrsHL86bUVWd53G9Pz+6dbQ6P9o9vE2fv3H2D248+g9fvHRDdkHAoctcM/vEkrxk65LpEJ0gIiKJT9RlBW0wTgkqwGAIGSVDk2zlE7pMBMjoTUJOmCR3YkX2WdiKQi3lfHy42nvz8NWF6LXbu+nwI7vzv/6e9m9ebZ7Z5pFHiLHbl9iTIWJFBbS5qNvsrOuoGld7t+bd0rxHQEbkvu+RSDih49XboD0saf7YJyYxLkdb1LydV2/VPkArqXQjRzKIj7IKIqtqBlEwX9bEwQCCr6tqXFQTFwogRMe+LEJZMHIYj0c7m0P+hohCEoGBR3efJ/Ngqz5ciD/m9t51MYtsbW3lHIe35T42TcdcEJFzru9j1+WqKM0EmDAJCEboVjFOz4z/5b/6d1WxY6JGaLiOj3FcADoFFFOHJYrL0i3iK1V13Nx54eidz5/Z8sWZK10vFWVKC1BJ2aKZqzw4d3Q439zYXqVu72DfuYDoBTCLAXogVgU1VMNeVGnw8yJEBiBTWDsvImTpTHtHaMjk6xyLPovfSJOx996vVgJQz6ZbRXCxWwVvilkgCaihIy4BXco6xAMxusWq+8lP/1TbHQR//MlPvHu+OAAjMHJrU53TEoCmMKS75ZxTEgAiHwRwEJXrA4XGraOykYjWgR2Gp5LaIbQaTxK1aHgzkHp3eERf+OL3H3niyUcfe/z733vnX/6Lf/dLf+kXk7Pbd+Tb336JsGAIObGYj7AcbLOQib1ThRizDtAcDuAwn14hJ4VJ2DkzSSmhgXMOABaLxZXHHnrsPVcPDufk3cbGme8//+onfvpDf+HXfvHyxQt37734V/7an/sbv/5rEokZiUUHdZmts8AG87nBPG656pCK0WQ6rHzksO/brc2zX/3CD1LXbI9H0sW1lZsNbBWNiqvOsqCZATnyIYm0q1ZkiOZI0+l0CKLRLA82Lg/8rj+TpXd6CgDAVBQMHYsm8lhMKmRi9maGyOT41NmNiDwxIqolYuEg4FTE6moLbZwkD4rZ4diCPuAqSsTsVSGJkGMFQEaUHFla32U2Z1ihlQVwCSF4QkfosuS6Gm1szRRzKByrG4ELQpoL4tHGiC+dLzcn1UZdb03o4tClu/QAAQAASURBVKwcO4dET21s/MRo8yoUW2AhiZmZ93kU2krnuT1ezJ/cmOwUdJhgtlFf2JiW4zAm3t3cqLxjwLIM6MmPSiycq7wjAAbwxoWVtbrG2kUUMLcxmgTgc5Oto+O9e8dHexcu5GNZ3V5uvLs8mm398ZdeHW+c//mn714/WP3RD3eXtHWkdKOZ++nGRKus5bFbTZgYctSOzBFVxoWBU+2NGZhY/UniiYFYCwkQADMRoTESMjgyS7kQFHRZLTlDNIGMosXSUkpSucXUebaY8XjZmLeqvoh33trPq83N0cWRf6hd3BiN3qzKFxo96IGWiB4S9rRR16pLoZgUOBZd7hzTspem15Qsp3S8sOlOtThc1cdjnR2OLsBrvW4kH8xd/3b/6OXQuiTReXZ9kho4J0NkVIQhuogxSpfbToWNA7uAXBg7dsEFZwCYtW1Xm2c39/beCr4cMlcVjOB+4NzpfXt6KZ9W+dOK3/dxtWq3trb29u6EUCJijLFpmsnGhNiTTzn3i8Vic3sac0I1MctZ0VvMVFb0zjvXv/qVFz7+iYdu3LgxmUxyFgBwJ/5lZrIwqcrqYP8VxJYqMt2XhNtXPxm9LzSBJO99BiqwBNSldJsXzt+7dWdra+vsQ5dfffX1x1xZ1+NGxOv6Fo2SVdWhMzMVKxhssOVdTyNRwRTBwsxyJlSmOkUtiliNpm+8s3rtnbhYkUhder14Nj90sdjdnomkMHZd16UYTYMmBsih4phaac2hP25XZRXObIe/95//H9jl4+OmKuuhQeN1Ro9lNbPsiekkAZGIBAxODsXpeVFVADI1gsEsfiCtDOpSBGAw8gimpjiQgYwZEJGZk6XzD2184zvXKP/EJMxssvrmn3zh0s7or/6lT//bf/PF118vRhvPf/KTH2hXEfpVRSMzI88qambk/DCtHZzmTi4JBjjptQclV85E7rQ+DraJc+muPvbko0+8q+2aF37w6ofe957NrfL2/q2Llx55/vm9N15/O/ja8Sj1gGDMmDUB4kD88N6LCDJ5DikrGOTcApAZsdMQfLtyX/7KNyebeNws2XHGPoMHAjExwaaPyyYO7EPnSyXfNP3JggGINplMjg7nKvcv+Af//O818uvbYThNKSZjLH3oV0s3DUIqIJ5oYKwQ4iB9PW3VYfDpX2uaZRh++qI8nstwzQ8eaUN8IIEmQyNG59B5Mc2AJsP8FpwBqOXeslLvIBk1sRVJRPUwDG+6dn68ylULmSEUq8PIS398Ww7vNK6lZr6aUJE1nhlz5ahPQihv312+ezybjVyXesdYQuBOAkLFBOTLDvUoTufFN/ZXT20XUzXHhH3M0STbiMEyNGhtBa3kVd9n4rOtJQ77Xb4RW2pi66CqQt3FJnG6sbcHOCqqsjm+16zc5uXtH9x4461b23X9xO75Z26lZz73/Vkz2R5vn/nOdbjRbI8gAB04f3un76YuOKJRSYUPo5JqaEpiHxgG6x8iQ5ZsloGJNBms03c0sNYORt4mTmpOnlQk9ZIzax9kVcgi0PXjfSrDvO/mFlZhwzzPXBXme3rv6Mz5nYj5eH47NXsTmDW3nvT7f2Xr4NPbulk5S2KtaXUWsFSn5pzrm5yWnWdPRDkW86NUOO+gin171NC9W5YboJ1y+iG+tVhWk2LxZmzeqUZFmWJGcOiB0GlGUENTGiIXte+7edfO+65NXT8Q2pzzIXgzSbnrlquDvT2uQ2YBzSEEfWDL+aCR6WlZP3mCJ1LttW1k17SqOpttNk3DDgW079uj48VkssnOuxCatu26OBlNc+oFCVlBPSpgxq2t8p//899PVtX1pO/jAMV0KeKJbZnzaHpvuffsDDj46tUb14vNp8vtS5Z7AOrNrwwF0GFC7bN1zteO6Pb1G+PJ9kNXHrv21tux7QC9GBpSE2PKwq5IZm1KhpQEo6ioid2/bxHZ8tBTh6RGpVodfv+Pf/iFLxy/cyseLo7bfPuwOfjOD278zh+8+pv/9vtf+taP/s3vfvmFF94o6hF7yNqS11W7AuRAVdOvNrc3/uSPv/Dh9777/O7u4f5yiAhXVRMFNRMFAAZ0SCct/JBOnnLfWRYQPYlaspMjn9fPsliW050+Mg3ZHQB4wmT5M31oqYXT/KlP/sw//cd/cHzcZ+B6cubLX/3e408+9KlPvb8I9PYrh9//5usgWtd14d1QBJkRmE4oIUhECvJAeNaw+1lfJ0RueHF4ImLMDIhtA8fHe6XDKxfOaDzMTVcAA/mLFx8HqIswVhgmmSqShv10llMqDiNi1zUPLnLB103TnTlz/nN/+A3pllUhoEbkTBJmQPTEoMZtk1PMaEDMFKo+y2q1yDmbaR/bzc1NM8s5n2bXnB6rB1v1B1958D1DZw1q2XKYVhkSquW1+fN9U5rhk7MpGCE4FZZMA0V1vriX5HigyjjHOefTlcPMollSSSnlnPNgBUGQkxgHJUdEuQfJLitpRu1yVYaUMhE5dn0bzawqKhOyZWMSlguPqxIjAjNhURvPSje2YAkr5q1qugzTH/ZxL0UREcs+wNjxFoeRkjfWLL4Oz2w7CPKtlCeefNuGot4N6AvwJYeCdgt/JupDFmYGgWwUJhUVTpVSorIce6gg9gYyT8u91cGb129vb1/curjVNPeevHIpxlxU57am8PIbz/7W167F2aVJRdgtrDQqWnTY8JhsSxy03K005ehjdLFXTbE1cTi4yKlkywLDLcKA4yCjQis2D2BKKbuUfNf7jB0iknpOAbVw4orM4zT69Q/9wmrvJR/g5p0fzo9XQtZykwsdeRjBwZVz/SHevRWXi8ODacgyX66OHiv3/+KF7sMX2HwDylZspabzIOrIZZWcwDvqj81blUxj21WhkIzacJfAmv6ZD2wkvmMEpYX959XF2rwBmgMzNlUwzYSAioxONYNlkz42x6vlYWyWse9zH3NKXdO2q2Wfuyxd5FzNRjkmci7nTPfdB/AkSe7PXND//Y6eAEVkfrzc3d0FgJwzO0wWD46PFDgUIxccACzmK8/BgbCIaYqKnWZ1UE/dnZtvf+53vrZ75kLO6r3POYfg0LERikhNk+W9W0m6qMXBkTZpcu7S+3LOLptkdUWJwESWMaPD2ldO/dUrD9+6dWuxv7p05sqTTzzxoxd+OCmn7J0Os9Lgk4qqhhDMTA0ygD4YD21kQKNiCIz1oE1dT7/xrYPX37El3MnzV/Phm6vrr/QHr5vcLEfN8fHhC99/a3f70tWrV1PfxdRVZdCsBRWUAyI652ej3R/+6Y8+9ZOfuHd05IupZlEQIGQfkNgAB6B/MGEeYO6U4+CdgmjwZyv7sBLQMC0a3A/AFM0IgQkdk2MlVMIBTmHvkE9/YLdq5lvb4+2zxcVHx5tn4LEnzj39noe/+NlvdMvlU4/vPnLpwmL/+Ct//OVnv/nawf6qHk9EpOk7VT1ZNuCk7R38i9YbBgVUsAEbHAraemHwLknOaMJG1XiRdLK78f6Pfmj/eHG8WJKHcxfPhbJYNitEQY5IGVTJ8fBLyfmYdSCohBAMJOXeuaACTTvfmp25fX359a989+x2mZOWZalRnTin5MWp6irqsusNlMC8D0rctb223XCcEW1jY7parWTgkZ4oclVPzOLhxyeo8GchSkUgg9xHF3wxqZnZO0fsT2Cc++GrJ+uiQ2Q5dXcAGL4JgRFaCEFVjRARxUxhsBpDSfm062JmRGbLLeduLBFTmWyaZMToS1DNRDT4eDt0Hst+kUuczopp4Xzh69hbUdLmZrk9c1e2/aSkQEbMwK5ZthDckcFeJ+PEm4JbhHVB6rWFlAvEzWrlu4u7/Jcvzl4N7kZg2PZ+VKYgG4RksRg7KKwsfeFsVpee4YDzyufzl2ef+PDDrksdsgeXYy4fPX8Zm7ttxsl0pO3BZDa9eXtvawZtbDZ2N9PB3dnOWelTbL2Qme6IuYjHHrxCFhy53HNAMSWfVdFxLagOlJgBaGDxMnoP6nFgjmFGiAZRQEFNUbJgyhUXCM4YBSgaQiqKfuPXf+avvnPnTyr5hf/xbzz2v/nP/qO5e3S2/bgd7+0t+nPTSxfqcR9tvy1TXfZe61RRJm3Nbn14NNpt4btuc297u27vpgn6PiUKnKOEgMv9NvXCHp0D8FCzpD0KflTdgHf+sNnKm23s61Asb7T52AEL+dzP1U0yqpllh4NPmbOMRBbFmFKKqmyWk0pGUYQ8uNMWPgjbeHu62N/zNMomFXq4n5dteBIv9mBZf/ASHxIqiFzbthsbG7PZ7PDocDoZiwiY3ds/3Noad/0iVF6Ttcu2Kot2uTKCsqhInPTHwPVDD2//9u98/lM///6yGqmmnBOzQ7Sco/desG17ufT4r7bCFzyNt29X9VbOgagDE4i5RMpC2QcS3+x1y+XBpcvj1Wr14g9evDm9tXO27PrmD373D37iZz66sTHtuq5t28I7IkYFRDIiMsUBnTv5sQDQ9o7L0OtyOh0f7LVvvPG6KzHHhOMqrXoEKUeluLB3fO/Rqzt/8ed/yZstV/sIUMBEOvYU2KnEFHO/u3P+83/4Nevtmfe969qdO+brgH0EPgmVJkRAJBFJmgvH09kk5xxjrOtaxRbHc5FEjtfyLgZEGsT+AMR435PyREpKigiEYoag6wxAUQIkosxORLrm+Kd+6mOSe6dtczQ/uH18yMfTzfFLL+6dPX+5SUtHxeNXz3/761967IMfOHd2p6omTdNJzs45Q3SIxkSDdQ2A0alOlYjc4EE/oEAxKQwYjihgH/syBJfiyjv3yKWHsqD5nBMExjCpYhdTBIJAlFPORVHEmAczAO997BpG8swppdwbE6XUTScP/7N//Juqml3bNEp5AaKFedWkhE1u5ituujiknTjnElCOEVWccyl3o9FIwZq2bdvOOXcKngwxiXRiawwwRHGdvrK+EcxscOq3nF3tjCmn6MAZAaA6IhOBtYv/OhfXRBUVfUZCM21jX1cbZdg6jm/nnAdYZhAT2xB0rqhZCLAqQpbYdR2xlUXI5CjDCDn10GZdsWLmSpzzNBgnAABTYAx9p13q7wEfHxz3t9KNa0hQ63IhgPcK200peWqbfuSqmYNRc1iJbRW7s0mRFkfVskvOMITKhZRMu2XbUdbwgUK+elt+eyK/MQW1g2RlyrHwvmwhJ2w058DtclUzL22VUjq6a0TkLsy2D/oFRdpi6o+Ot3brtmn6tmPAyXi2Odm4c++142b80OXz31n8YF+5BFPHRoyQHPQFshmqDaSH2mdBRFVgx8ksGAKyt84Nxl4FNn2MxiK5BwuiPYLVQdree5/INHCUSWcONRbM2ooDTxX0/p3/4P/+f7340FPff/Gb7/7TyS9/4if/uz99zRr4T//9v/OjN//R2+/c2l/ODg6P33791tEhZozj8c54oz67s1PWsV8+VKbZdv3KxdnLz2uDuCEQs6AvSVJcvFYyM0hMpcecFGFl/c7+2Ve+vBeXHMqQclJrbdo3B+2MNgoIC+8wIVoDLFG9ASE7AOxTArSEPoaESVM/FyoBYlBGIsOOjgOU4LbrnqxKEMqi7fqCynXDDmxqRDjsEx8o7muDwMFcUCQPd/JqtdrZPnN8tIBMjCC5Xa38xuZ0Y2Pz8N7tjLBsm2K6nSU6wJw6RAIsEZwqJrj3T/7xN/+X//GvvPX6C6NqLJBEu8K73JtYvvTQh9o+jks2lDOXH84xEUVVcj5kBEEmdNTJ1vbo97/0uf07sL1VHBzcG9er/XvF2+/U+wfHzuNXvvTts2fPv//9z0zGZZZORAzQMaMKM0vOhA7QmSgzEVBkZIhs6Ll++52Xm/nx7vnLc12AL5VlMqsj92S1LvQj73lKpbl3eFSGCtSIBTAJiKHLrLNx/c4bb/5f/s//xZVLV47myuwVtFN2BoSkAkQMRAqQLTnn/uk/+eOn3vV0KNWHVXdUOB49/t5tLouuS47RM4lmMNHBlApNvAM1UmMbghQHKREkNUI2FQVVVWJSMEZTNGWyhuToxsHtN27dWN6+fnj14qNntupx7M/thBt3Xvvk+9/3u5/5ht/Ye997dr/7nZdfq158+iMf2Nq5EJdLROtUoBgVIin3RILsVAAUU+4nk9Hdg+Nbt+984AMfuHfvHhKMRrVIyqLBj2/f3d/YqPq+ZUerdrk521ot2xx7j8Xx0TLFjhk3tzeSGHIFbXf73v758+cXq+WoHt29vRfYj0ZV7EUxAeU+d5ONne899+p3vv3ymbPl4jhihgQQBeaSPfDR6siN6yZlIGSzcjRlVx8dzpumUUSFHCU9tHu1b/t21a9NjmF9qZ8kI/4Y8n4/d54dmlmK0ZuRjRrrtndnycRDQZY6M0LISohkJoUj8EzAGSB7cpyNOUJgMO9Wq+4gxK2iHA++TzZIwNUYIRMVppocWFCIvuA+pzq4nLDwyVMZSTQU1SR0CgjSaU6mANpEIYNMXRFKUiFl7iAtysUhNH32IY+B93NzpWt3RqOjpJl5i3AT9fIj9c6ZEG806WjlOSWHhszoIPicY4zqxr5aySrHv3DO/39X927whcdgmcFqKsQylGho1KpvcWTes5v0MSuXVdXG3j127sxzL13HagcxUd9UcnZzV199860rFy5X3B3nFW3gnTvX7Vb9vg8+8+VXbkE1zX0iBQfoAIlITJWHfBtcW2WgIhEDEDFi06Ux18FBQ21/cXbGB8qaKqTc2/z4AJLFYiKZVrk5JikTxByRtUuJqMhZHVgE8qPl9etH5SX6+1/+rbOT8ZMXtt956+2v/eny7/4P/6v/6d/91Gf/YNV1JbFompq/p1ZlGJUjPnv+4rvf+57Z5jQvH59ttlFfVeeV+j4mcuxCuHnnHrqi6WJdOM2JDHarM4dvd6ujXBYjcYY5m8ZZmFa6wUqineMEtuoTCRTkRo4KDiNClKaTHAPlNgEaefHWZFeUZoKGYow+ZEmj2XTnyvnDN/eq0ainvJa3nXQoQ/8FJ67WDzxMVcz0VOg0n88nk8lkMmmW86qqEDhLPDw8PLO7wa5UjX3fdk07q2fz48O6rM2si72hsXfnzu788ec/88u//MGzZzaWy8ZxIEw5KzOmFM2SmhqUmlGACEoEKxzFlF3wzJAkAkCKi6ffc/lfPftvAj92ZmdbTcpyI2PauTjpoqSD46+/8IN71177wEc+sLG1RcTZFECSgUMkg8CIFoVM0ITJQYFiKTG6osnNaDoCZNHkuuTZxT7PNqaaC/KEjNJn7z0AOOfWPDamnLMp9p2OxuNz5zcvXtnq48LMUhL2pUqPiEqmoEPLxsxVVf3cL/7kP/knvzOdbPzSrzzVy5vNkl78EV698tT22YvtcpWHvQXhiaOMQn6gkTxp9xCR19SnIWwEQA0RDAwTlMT98vjo5ss725Ppzpnp1ealV+7c/BGOHQTPFU5uXW9m4+3nvvej8v0P353fGsXqtW9996mnr8wefqLrsAShNM8QiBkGWzE0dDiwf27dvLtYNHdvHb786puEvDGbLReN9z6le1Xtj+fXkWw23rp+fW9SN8fHx2fOTd/11MOf/+Pf3tra+PCHP/z1r3/10qWH7t47Oru7eevW/vHhMGY8XK3mjz5yUSQjIxoKtCZWhzO/8y/+7WgckSB1iZlTFsecBA6OjzgUZhb7lpHYB+dC16Uu9pKyqmTrZ5MZIq5Wy+Ho5ZzxxMn4x2D3U+/fU/QcjFVEFcizSPJVKKfjVc7snWMfUyIVSzkUpQA2y2XbuVxQVkMb1AjmkLwnBGeIYpoJgKksCjZ1sBaER80BFcgpmCpUwfU5mWLf99OyjH1fjEN0ZCZEZGTZGTnO0NZj732ZusgoG7Mwv5sm9TjNaNNhP0IkOO/98WFzbrq7ycfc485ksiHLzREjFK++uBgtx2NK5EdoGmPqXA5GaEYOS085iM/8WOf+ZhzX7aLdDDtdOKAeGc2ZiYXaOy5C4qSpgCIEi7EdlZ6+9OIr4M+QIKrfmO0eH7Qb4/GocLFvxuwmoZ4W0w8+8/hoNBlPNsYOA8DY+ZFzdfDsnDrWMuTgxbKonv6TVLJpNlm2CpRiPG6jdVa1GVIfg4Rtmly6eO6RRx5+9MrVSxc2MrVlWc7cuA6hKjwQGnMyQHKpVxJqJI9nHDrLNWIH+8t45qnqH/3bv3f27Cf+2b+M2U/KLVdujqotqOoL9WgyrbAEeuflV/7gd/715/7wczff4ItnngyldL0weeecZum7dLxY9REMuED0vlBVF3n+TgYpjKnTLClsjs6ubvKzn7sVZCsUs64xh0XOGcypeeaK/Lga7Y63L26duRLqisXYuDVUD5ayR0qm49H2xtmdyWyzHE22rpzNrB5I2U5jCh6cJj2IxvwY+HiKz7Rtu1gszp7bXU+TxFLfr1bLvpPJdJudI4L50bFm8xAkiuYcnHcEKinFZnM7/H/+/j+bTC+YCTGYOkLO0juuAByzT0mYvQpkTW1cpdQF50BNVRmhKIr5cfvUEx968l0XilEsR7RoFvPuXp+6HP2o2uLR+JM//fHRdjg4mjNPTLEugkb2xpAMfdmbRWBBF/wIEjtDhGI02Tiar/YPe1eM9o8OY9atyTgUDojYQrNc1tNi98wmJPO+WKeQkwHoIIYiomwA7DZ3qv/tf/ofiRgYOk8mgI5lbURl3q8Jcymlnd3u1/7a5aeeqv/VP/7Bqz/aMb955bHLL7/63EsvvTSZjlUy2tCrOAATEBQltZOMbtN1LOF6JSYD1DWrdeBLAqgaZ6e8Xe43R5IOL03dhy6erUO63fTXjuJrt5uvvHjNT6t3P7Q75eqxJx+6uHv+4I5/7rs33372exPogithkPEaxAymTOiixFCV+/MjkXT27PaLL/3g4SvnL186T4TL1dwRVRPb2K4PDw93Ns9fv3ZH/n90/XewZlt2H4atsPc+53zx5s798nsz82YwwCAMAglQBAhRDIIgQyyTRdO2JFOWrbJll8oS/YdZZSuUlUBSBqvAAAEUk0WQSBwiDQBiMMiTE95782Ln7pu+eM7ZYa3lP87tnsZIvnWrq+93v+p7+/v2Xnvt3/oF0Qcn91589eZ2u+n69eHh7tWrl7/82peHIlt5Pj1ZTsaz8bhuu2Ufz1NZq2Z2phaLQteWq1eu/9LP/+57t96a79Y5RTdEzZiUlE0VPY9n49JHEzVDxx7Qb7pusI4ZlvHh4WG7XXddR0NWAdEw7XgsQGWiQSf8tbL+9OIfJgpGLJrrWV3o4nyNJTskAmyquu97BWXn0DE6BgIsSsA8TP7MStbAoR6NNPaxb5PmTksmyCqgWgEBEGABAFM2EAbQArX3iYTQgiLEAsTCCKpjoiBEPVJ2qK6P+ugsJgydxQerbSp86+H6zHKI66tZNaWHpS9Rq4KbdfsI+LVze/0rsVtU9219ty/3ktzepONO1j1sFrmshFuyrdrC1idny2vH7/uL1c2/1Ox8pD6hPvWS29yve+ml9CWut3nbl7bHPmLsPail5GaViSXPkEjfa89nMDvs9eru5eevHfWr24H6R49ssh8eLU6WqdsZ7x73xXkXSyGEYoUKePQSe2VDGJiBOGREGoCZInkH0eVCnhPFVXaTarciuXr9+dHeGK0cjMu9e184XrQPWgtWCSsRO2EUEUDvXDFl8p6gjaUG9y271z7/5meeefbZz/zS7dc/ed9X+zu7QUxVlygE2RU7cTYH3lgejacUU3l098HP3/nYH/ueV28c7R4/2BJjcOScL1mRKGXTJPUcBbCAbdYdJgghCFjuZeoZLTql9qG4GbNTI4w5eU3OkmhW0z5FMBZyFOq6mezs0vHpWfEUbY04cfW4YSJ0SaJaOTk/c1C4htTHqqlym54Q3Z6aBf0h8sCTx4cB4DDZJ6Lz8/Od3Wf293cXi8UozNFKSXmx3Fy5fOirIOr7Ni+Xy0sHh6vluWMqKsBkomY2n/nXX3vjn//s7/7ZH/zwe7feaNwBUQbGUtQ5llKG8qdWxqOmafbidhtLBiRHIZceHbIPDx/0f+LP/sVcWu+a7bYfjb1lQHMAxY9qIkcEBqn0G8Q6J2OfnQZBUDL2NSpKlpyIcSIlA7t60nz+M1959EhmOweCZ92Gc5Kq8hA5ChWQl1+6UVduowLgBt0KwDAPQybXdfGZ527+7R/9sVc//GqofS5kJhQKu8HPa/BqGcqHDM372btnY+i+5zuam1evfewX79y+P//Sl/pXP3jtM597fdTsPHPjUte3hJYlM/NgK3PRTj5O0bo4ni9swtDMGEnNBjsDIyuQqfLcjuoK43K5aB/lXL8yxYNxfXC0T5bU+92GXds9vPPoq49O/sh3fCRrvP3WsWxsefpb7//oq3VzlEpf+5GkAfbMFTdSACy8+v73OeeODnaHU2VnPjs8GJmZG6EKfcM3vO/00aPnnttNkur6KrO/dLTXtcuX3/cMEI3nYbs3mjSTuobJeL5pN9NpvX/07KNHD186uJH7KJLEHKLOpjtnJ/Ff/NwvXLk6soJsAUyKCjKBQSmxmY7FdLvdhlAjex9GbRc367aUwoi5xP39Xe/cw9XqyTzpYtR0kTdFT632r2cTDHZAqhZ8JUUMymx/HlGrqipFuyQVgIGpSlWFvu998AOPvmHnnEPSgfPjXMiFS7KSUnCeiJAHc6MLATIhMjnVjEP6ZclV8N0m783GBYuxE9OiBkagSAoVMgSOlqnyYsWRgxz6xXbip+tE/cMuJ2cxvlq7myiHNRAX8qFLvYJUVgcm30BwhtlCLQ1h9gqVq8CZGSANTLvNuHv//+aG3pRbv7KBXxM5f+SaKXJlxlyMmQ3RTDx7JJ8jGYgnNVACHSn5VgIwWuqv717+ge//nkcP7774yodf+aYPfO6tLz1s2z+4fXupZdmXs9Vq0/fbGHMpAgZMVDlEIFTDQfcBavbY7wjMrGJmBgohcL3fjOahxHjeinzqK5/5tV/89Ouff/jgThyPXqx1H4rQOBOoA22CRxXHmCUlKFvpl6mKOSxS7OPxN3/wuc/+yqfv/c6dvWYyrrmPD6x0mGb9WnPpStSUt7FwVs4FELnx01FN//JXPnN+3DbjCRimlEzKQDBwjI5xVFcni5V5l6xkVaMsolz8eOQKdkxp7Ku6EuENe9o/2hctiMAoZrF0y+3q4fr83urkXrfc9oQW6H1Xrn/zczeixrWJZWg3i7PlwxxbKKVonh3Oe+kcuqF5+bpW5UnD8uS6+nSVH0r8QDM4Pz+/dOmKmZmJ85RS2mza5Wrrq5FzwXvf9tsuddWkjlqMKZaMzjtf5dTdeObo7/34//fh/TSd7CLllAqQ97VkjcwegHKOs9ns1nuPfvM3vnB61hO7etzcu3cONvGhpmCjmTdwBrM+22qTjh+g4a5r/GQ+mTj2GV1hEnYMxMXXvmqaxMq1q3wobYsq3qGrWYICgKv0dLl+7fX7h1euotO9vb2qqcNszIPJ+mRi3g7255qiYgZFBEa8UKJdqKKAmOrPf+HL3/cn/tXjs3Nk5+sGAFTL4yPgD9+BFBHXGHX78PRD7+MXXlrtHqUw9W/d6tjv3757wqEqJSFa5TwroCISA134DA/kJTQgeSpBYngEEA3QgIGkpKYeOZxgBI7qadyDzGfN3tg/uvOgW9r2uLzz3vLNByve3zs42v/l3/itUYPv/9B+88zk9ml558sPat9X3uXYmmYEQVBTRbWmqku2GFMINZNnRHY4HjfVqDIZRj1w7crVuqr25nsAFLtUV9P1qicMIuhcvbNzUFXV/v4+ubS/N2P2oNXlo+dB66aeqSLzuOu2h4eH/+gnfolJPIeUSlV7AxpEnkQkgEZ0vlx7XxWDqhmp8XKziTF6doPgdmdn7/z8fGjSB3v3i4b9cXCVXqTDPz47/3BxH0akg8Kr2anqeeNcAABgqpvRhQUYmWrxjiRlAPLee+8FTJDUEE0UAdVBNlBVR74KVQhOgcVq55E4A9LAhwIYohMdsSqk1DvlYtA60sqxYMjokFoUMcUqF1sVyMzoXUEomt28Go1prOo+tH/08qhBl6/tTXea8Qs7k2cdve9gb3cU1tIua4hV1YwnzrjBsBtGO1xVhg4QPEeT0XX3/h88fPDVu3d/+Cz/JrotN82hea4aDjWDA0FRTOoKelVM1QSqsWbfzq4FShgrxdqobGON1b3t4r/6yZ9bjac/+clf/Xsf+6Tqs9/64ed26jNfMPixVM47xwYeEbPkmIroKvdSOSBUMFEVGSzuDAHQQIySkjmLXbq2/9x3f8tHd+qcVqcfePHlP/Onvuvmc/Tu6s1f/8ofcKUHTd5u8zp1XU6pRGJAUybwDEymuOr6zXkOaXr1/u/mL/9GcvVhrEZtvyWbglGXF1hBNi5YC1FJHBwQNiKSJEXpXV2dr2DRtuqJK6/OKaMydnE7ndURdNtFBddJzGQFpPRaEY6rkYEz5KaeAgCCk8SrZdcbKKISqvVoa9ZlKCtsT/sYu/UKurSD9SuXLulmrVZyKcEZAWjJVEqJqZ425C+8CeFxIvDT3ICnK/sAypvZUNMHyHj47mKxUNVLl692qS+m7LDkbrNdsaub0ZSdM4Tj0xPv/WOlqwO1nMWDrxusJ+Vv/sg/ONh/Yd0tgq9VMJbsXa3iwJyZxdTu7e0sl0vAoEj37j/65Cc+9fnP3n50T9786oP1Zvkvf/nXV+cn+zvT27feNYR7D5ef+fytN947/+wXbr93/+HZpv/qm2ebbRVFN2335htnALP799enp9tmMh8U/2YCVsxhNR79/qfeJneIXo2jggOGxXo1Go2GJ43G48061dVUwDxhKYXYOxeYvCma2aUrl3/h5391NBo9//zzm3btKhBTIk8YLqbWyDBUpUFdXEq0Ldcu9ePNcnP14HIoB9NqbzI6Ah7df3QeU6nreig9pahzQcDEQAxMgYwQ6OmAkadRtcGKgDIFdr2WsLeXKxeL5LWOdXS+oQcnbeTR7fPtV+88fO9h28HuG7eO3Wj2/lc/cHyev/LW8sGy3b129fOvv/sLP/eb/aZjArU+l61INhOVjJKQilmRCyeJGsxt+6iCjkNJvUjuYgSou1ZRgSnFXryvci5StGTJfdpuu7ZtVWy73W63277vc84x5nabTULbra5cvfTLP/+FT/3el3d2J6vNUiS3fcpqplhybmOqJ+M+5lIUyTkfnK/Wbdu1EVABLEva3dszxfVqOxyxMFB4h/jTi5frD+VyPHkNn7yeIoUAASxqmV3azaAGLEYI5L0nx4OlR/DeswvkG1drVsllcE8CHoRMzvvgyNV1LSnnmEAuSK+KF3KHnITMOSSRYopZSt1wLooFCBClgJoBCFI0U6JJXVmC4GoGUy1h1CSBbczn6/V7p8tFtuXJ4t7Z5s6ihWX54un6k7fPFxtYnizLdjMnB2t/7156+3x7P7t3o570uu1tnUtMYpucerl3u3/tRx+s/6WnPLVg5FIPMS/8ZrVdLzfduus3fd+l2OeUCipLZ6k17Hn9sCP11oFETm4n5Kne1zdPtqvYPXr99hubMtK4/+Lu7jc+e3RYaciIjvu+M1O1YgTMHpQxAXVfA46HIoVGiOiQChTEsSSsJ/rOnbc++RtfPJg+869993e8cKVenH3mvXc/vVpGtbEKOnTOvPeBnI+i5nwBBVBUoVJe2r80qmlnd/UbP/WJH/8nX5qMD4SKSAI1JgIpVGoQzrknCwxGbIDZTIic2CampJg4+MVik5MpUCmaciGiUVNdPtg926y9r6BIyVkNxDj22ky49G1RAJK+bxGNED2RSa8IiiAgRZNCEUlmYlKKdrWp9qnN0qc4HgZApe/7vhTLOVsu1qmgup2w3qyqqoLHab9fh7x/Heb4pKOxx/6RIiLFTs8W+/v7vqacc2AHKH3fbTadr2cuBM8B1DbLzWw8yV0fmFBKxQSKm/Vy/3D0qc9++p//zCeef/ZDbWoVlWmUytAugfe+67rROFy9dpjy+u23bt2+ff87/8i3zufV5z77pfUi/cEX3nzfyx8psTShuXR4sLtHm/WDnZ2dzXYlo/rhJv3e579ycPXyb3/6Myk2x4/6bde++cadr7557907D4DrzSYF35gACU6mO7/1228vl83OznWEGqFerzaHh5cu7ey1m9V4PLac0fDdWydvvns8nx8NvupEzBSAgyoActM0H/sXP0MsRM65cSqZGQE9MpkZwoV7VMmqAh597QNTQHLGTSrNlasHzz9fX7vWVGHz6ofeR2wpJQVCcIhMDo3EEMwM1S48whAKmhAokhEDELIbNEYX71oAUA5Avhr52UGpXZg2zle7e6O9vd3d2XTa0NF88syVSyBlOp3cvvfo1nvt3XNZQ1iehjdeP967dnOxgd/8xOeZRmIAxGqcCzoXSkmAhViJIYTQddHMvPdqUjIjV1yhUh9l5QOaosORDyyWAQRNGVFEvPehqnJCpso5QtKuXxEbeRdjBMztOvwPP/6xnT3ftm3OXdvl5arbtl273cY+K6HxIAXimMtkupsKrLebooIIMXWhcvV0vFqtzZ4Q9mFoU54Eszy5qT5BvZ5GKQeCewghpVSP63p/Kmx1NWmmM9dUqlDQ5CLKy85PTr/xg9/4yvMv5VjqUHtyRM6h8977umpGs1K0zdGTBzVkL2bGVEpRETYwsyxghAAqBmrmasLgEoAD9CKUY0ERZ1JshK6wJKWcvaErknKbPPrUtm2XS3Rxm8Bk3caxVZchnPQd1uqraqvVtqthmwnWxJuKtMO8yttFXq9k07nUu9RqK1RqLeFwZGOM0oqmTd8TETRK5Bx6z1VwIyIPwKKowjHGksRXk7YFGhnTCMc7R5Rls1726y5YyOjnPEXK4xmKjX11+fLlan3ycEKjVy9ff+HyJTaNOfWqUS1U48GT+kl5ImAcHMnNRi5o6prRNIzG1RQNcXf23Hd+9E99+H3T5erNhyW+cXb21qO33zhbPmzrKcEMKpcMRYuKOIomEJyxfeX+O0fXdk5/7ezW75xdm026rpNEljZUMXtCz33etH3v60axRwjJYgEWLDGBmXlqLKnlUqFP26ixOOSKgvTp0u6utN1y23piS8UhlQIpYhaspw6klGxikFKZTGaakQmqCkkLaCYwYgCmKNILdso1ikgWT8XVibEDKSVWZEQE5gAI1NBICUY7I6aLyd6w6J8+HZ/IJofFLaIAyOxEZPDOBUDnvKput9u+T/tH+0WklMJgXbc9OVuYYT0eBfQOeH22qJ2fTaaWcyAilRCCSClFn33+8O/+2N97cCdOp2ODlCWxE6VeNKvCKOyUiKNmvLPXdOv+xrWbzSRO95cvvW929crOCy8+++D8rd2jg9V2VXmEjDcvH5X+7MalS5d3Js9d3X/x2aNb773+LR/58GtffaNY5kqb/dHu0ezocI9LmlQ+9p0hhMn41u3TR/fqw4OXVeFo/+VxfW0y2cnFStvu7e1Np+MAMK0nfYR3b50EPxaRqqoGMgwYMntVPT47/Y/+b//+//bf/gsPH52YhqaeGhAzq0XnBoE+AlDgEDioatd1To1wPN8/AD6Y7OzuXxqvzsLuzjPb7enVa0ebzYbIi7ECYXBJMwE6JH7cqiuSAclj6c3jekSPLwrYu56dM9GSoZoduskslhXkNqZO/WS5hYenG22mD87a8/PuwfGWbH6yWXVtYvFb27gG77z3yI333/nqyc9/7BOzyWHKaFA7mvStea7BHFgA8yLgnNeBPsjEDtRApAIYs6/LMMAUKpqJCICY2RQd1ypYinkfcjGwyjRUYaaCarlAvHHjmR/5az9JuK0aKSWajAAdOVQYpJwymc9Pzk5zFiIaTcaKtNqsY5+LJEZTLTs7O0TQti0AqJqqDbromCMTD8qHJ/dUu7B2YXysycILgpD6wEn6a8/e5HFwdVXXo6oZhaoJIXDlFKwe1yml2WS6Ol+cnZw3oRYRFEO7UICrqoAVMQMoBOR8xY6yeDEGRO+E0XkqRVWVHKoheE6aByPTKAU8csUoqQH0BizqJAfUxjODN2RqSjVXP+F6VqmUmaP9CV06mE4Pqr19vrFTPayq+W6oQ8aJ03FDlGejMGkm82Z0ZTLbnc1c4xsfmmrkwngnzEf1gatCMxqFyWgyrV0VLAFBLlwsKFRYWCQQT4IE2lqfKtJx6FCtrpyvq4nH5dmDNgoE8b4WKwSlp1jblXU6/cI7dbte7M1e/O5vn3/6ndurXo4fPOx9Ley9Jm/bUjBAjVrUCMETcModN1XK6swpMqujyElUcvvC7nx/pg8fvLY5PutherI5y1iDo7EwAG1pbRIEjLDKfe/QPPjNZiuOnjsY//6/eOtznzw+mh7E1AVPXYp1HbZZmW29Of9X/9x3XLp8+ON/42dmk92tFEQug5keomlllhFBQfyoatsNi2uCy9KNJiM/Hr/x5T8Y7czXpXMBKDtmOIv97mi8D+G0bD3yNskaIQOBpggxlkpECMaleICCRWvkbNGMRdEcZYMa4zJJAp1RgyUhUYUKDMXEUFWhGY1tfp6Wm9FkfL7e+iyeoEAxJDR6Qoh8TPUdqrwQUc55WKyl5AH2PT89u3T98qJepZRGrq4Jct8eH58cXrokdZSIBeju8eLoYEa5c1XoYzHtqzArKY7rsjMb/Vc//Lf+q//2/5TaFWKFmhEFiQuQQGug168fJEsf+egHQFV1MkCcpSTn3LcffXMsue/TlSvXHDFP/f7+s6oK4BFtb3o0yCz3v/k5RQI7CKHWG9M+QStbZHY8Lro10898+T418+XqUYHcpbvb/g4ZeTfJzd5J7EajONkdc9HmsIpdFiRs6oJgAK6pSp+IvWcfYx5Pr7hR2Ur2DZVSGJ2oKk1YvfOl5MwMAmZGIYTT8zcRYz3Z74CTpJm3GzvCafGlN0I1HV+5trtqu6PDa5LOADuwiVIgsFKkcv6JapQYHTuORQGMTEABB7CdQdDpzLSwgeciCEfXXr2f387LJRi7TesMRqNJyd1oFMw7bLemOvJBCISgDrtSEMg9eHh689pLX/z8p1KWH/o3//Ty/KE58CH0QhWhqoENqwYVdMiEUtFqMFjWQSEEiiqYnnjomhmiErIqipBSRg8i6LgryfswWncPn7353M/85Guf+ezvv/Ts1Ry3TdOst1vvvBbMmI1xNJpslhsQn8G8d5PpfLXJq8WqSGKmNvWj8XQ+2z05ORliuQaGERGVUpjQQEzRyIgYLzJUtZQCAOCcAwzECTRqmYZRv21d8H4seZWKn7ScWcR5nxxip84kxkwYcAy3H713YzLZmXuxbORcidBMo7lpM69pHzAH4POUSilSWXGhB80QHGqxjKROFAQzslOVWNgTmnjxLmExSiAqVSmbDuuiWTwgcKGeG5OO1utRSUtJY1nnZbLWxK/dcbd5/mD+sF+juU93ebe29yeNeVsT1E0Ykfexky6Wsse+n5bE1MRQehWLLYsNsyVmBtSSrGiWYlLiuGk42DZ1CEwFnQ/nAmMPWjL0seeKcs6LxaqUUgXn2ZkqqIGYGJXU93F95+Gd8zYfn/fzyQH2cX83PHtt18Oay8abAYXCrnPQWmiZ21DWsDXPoJjUejLjTONgVXn26Mqf++if/r/+uT878g9+6ld++bffunfn2Bw3gRWMe+s62G5zFUV7l1ZxLYa90HFfwmR3Woff+qm3P/0rt/enR32KCZME5royqqdNvdqsP/rHv0Xmp0t4b7Jf5ViCakWB0EwzsAGbkPUg0TjHTMAOSTQj4qZrb9+5x1UjxQVqgjYGfRfjHjYfvH5wHNMmpU2fi3LJxoRV8KAYqDGzYpotG4ihKgIgkmcCBTMHyKaWCyuhqA0ekFKyFlUhMxPNVsb7s04jAHgEEaHgOFT4VBeDjzMK4LGB1BMs8snVdTAOW50vbty4ISJZZeAfbLeb2HWj8Q57R4Rt2/Z9appm27YKAMYikYBT76dzevvdL/7E3/mFa9deybBB8qr0eDZIwY9KUSnUb3K3LVo4R+i2JXaQom26TUpJVBWhiOVsKVsRLEVzgSyaUkmppAJSUBVSl8o2BSBH5Iwtdjuj0dtfvbPqNIxdtFOj8/PFo1E12W6UsAIrJgQp9B0WJaw8+dAmlSwOvAlILhd9mUhwFGMHWtwQZoUEhIjoAIBFrSAaMxMBoLAz0TjamUHxmPO4xqhdsxM++M2TVz6w5Go7no4+99n3ihiAgky1ICsNgVDFAJEFkMiBEZgZO2QC4sEXcgjYUgTSKFY01Il8KgHDzvWXPjh95rq37dHE9kL/jc9fGam2p4vNcuVDSLJatWc5x5SSWlq1S3DVsk0Plg8nO/Nv/MgrfXlUKFHtom2Icxmi2XEgqg3mCBcDGwFTNAUT02EQBoSDYOiJYZFaMRDAYsONxDTL2HwuqT+Y77/19vof//2feP7mjdS3qBb7vnZBYq6YA3l0Xr07X68QoEY3rsbbUhaLhVzIRAsAHB0dLRarkuRpAtjTHIGhSX/CiRwe8d4Pr2MREZFBLFbAZns72XHLTTNtJmNPoYYMFNfmBRFCCMSgCMBODUWM2SsFqoNVpdmp5wdX94/ev2qBawzsCDDnDGZSCgCUnEXEythyI8lpdiAeCpqAw8AO0FWENnFUFTdlx9pzhaYE5jxXUsAKWQE0J0mS+TqOfMeTyrmJezt3p8aHYTIFfCeV+waxxjCZzNFliiUbuHHxZwK6LhwDSyp+1c/Bs0hAJRWTnFLPtTOH1DiqG0m27c3RuAa30W5j7bwA9NkxWXC/n7eUc1EZyEaCZgSIZkQBoTLLYv3J6qzePXr3wSm75pkrN+7dPY49sFWsjjAwOG9Mgp4cK1jMHsmBg6gjpIqE+jpEqMq4An+4k37vc5/44lsPCKuGd301Ymc1oMvKwXqKiqOtoZfJjCZisLFuvoduFX/lb3721mfbS+OrJSIGCqMmS+lTybkAIjhs5n62W987vrV7MI05moAVASnMKLkMFuKenBYAo8AOQCUlAPVVSCLOVyVtJMPh0c7B/uyDL1/98AsHm8XZnfOHrUErmoSK0mhUl9g7F/quKBgRENEFL2jImAErZAqiqAWsywmDU0Jhc4+HbIjIRIRQVMJ8QiOfUj/xtZj0qZgZoWN2TxNmnhAJni70T0s8Simr1QYRr1671nXdwK3stu3ZySm52lcNB6oqf3J8RujNIOfcpwxApSRQSFGfuXn0cz/3s7/3O3f39i/3uQN0jEHLhaEYkguOBkvh4UrBzFyxDfmlqDgwRIAuSBSuGr4Ec2BOwSF6REZwggDKDGg6UGaL5/DGW/fV0bJdhYZm4xEJmtLlq1eXm8X+7v7udISWxnWlFKJSQkGXmarBgtbhBQeReUhHYgfojEHRkBSQmQO7i5pCOLjmOudi34Hm4heKC8dror4azbYJRN20nm+61nk/3d197Y2353v7AFp5ZNNhHgtGQA6BHYfhSyVW+hpvZxgLDikeoakLqBITQy6tIEz2r12++YF1tsXq/Pz89OrlKwdHl6OVrbTj6tLB7pGvKw4WdRVqirlUVbPM0hV879aDZjwRcr3WClPi2hAHS20EgsfoqIk+ofTooJJFM4LHnjj2pIkedCkARmpRJKN5hACj7DquD370r/+P81kNWDgwBy+qMUd21MZegMejyWa5ceQVQZ2b7uwuTxebdTucMTHHw8NDJurbru+jPUXzvVjPRgj8hDWAX/dhZoRGpGo1exFRNj+risre/uHlvYPv+YZv+tf/tR84eOVD85sfmDeHFAIgEjMzi0IxQK4AkHwxdMTjUE1cVYdqIhq2rRA5VKucZ0AmDyrMxOSjbMRawEiQCIUBUVHEUFJKCbKNVLmzuuhYgQGG6Jiu65AAKe/uVtMJjMfA5BlpPgkHAXcrdN58sLqCj8wn7+VuFaqd4B7B+lHJ7EZ7E6qpH7mQQKuJp9K6IGHsHmpMY59qb+MmMmnwAihqZsieYKd2NZkmqP1055Knuq+BJ7gzcm/G5ivcUEpJABTVUM3ME6uqCoBoLLEradV2t+7cD1X9W5/6zKboqKm2XedCpWyFE7EElGAxW1szjDCwVVmhOFTKIH3nFknLgXPf98oLu97efvTua+tbv3n/jZ/7zO9+5u37jzb2MC66OnYJXakYTicApfRrUK3zzf1m+ZmTX/3RT+Fp3UzGBbO63Pf9ZtliwRrRo7VlhVB/5dPvhjjfd9fuvbNkN1VfvA+lFDIiIwcBilDJDaGYAhMzD9iCFpFiORuhA+SojnR8Y3f3zvGdW2vptrSJGBX7Uuq6RlADQZVctgOda1BHmBkjDSihMhqqQ1DVLibHwQiVUdnYIYENjHZkGnZiczDt+k1dBc++lywKDGhF8H9ChcTH3nVPNsnTrX3s+sXZcm9vbzQex5RE87huNsvlydnZaDyvQkMOmfnB/bOd+QE6I+eQpKk8sZB6yeXqzdFf++G/u16M6rFXgRS1Dp7BwKLAQCRTAxHNakVJzUy1MHsCVoXBGRWAhph5gWFzotFj03NDMxMm9L6YFsBsyjWdLlbb5EeOIcvJyer2wxMNbpva0+P7e/Nx129OHp064/VquW3PsrWIWLnpkIuNiAKIiKKKQ83Fi3TbylXB1+wDAFoRMURyhF4U1VwVRovVkohimqtVTbNPsMt+4utKBEtiKvWdW6v5bPd8FRernitQIWYEI0LnXGD27Gpy3rnAXCG5wRJ9uETTE4P3YEWzCgWYsPjKSd8+enD7nf3nrn/wOz7yvm/7xmXp7y9O1916vrvruOliHE1mk2rireEScmuIDKzkm/H08ue//O46FvIuSwKyPvdPNwE2xIQDqn2thX/SL1/4rcPQRD+9igAAgSgQM6lQWbabF2++/KN/45++d/fWZN60ud3mLpNmUHOEjoupEotB7JKIcAjj6ez45KzfxAt2AEjt/d7ufLPZxJiG0IInP/Hr2AFfxx0Yqv/QNBUwMPIcYsl+XGUHi3axPr93cn58v3340jPzP/r+D1yaXakmO81414XG+8oFb4SAyIxmSigMPPL7s/GVg/3L7WYzbSY7oyPJRTR7JkRyRGY2tCdASI6rKtQVedZQS9UoUE+Vt4Cq2IxmmyzFGIRNjKosEKsmIAeEarWQbsu5rbZnadm1y+0mt1Y2ENb5qoZ8ei7LNC3u18r2rtS8yveKvraSLy+kRMC+wRwkMXdeeyTw3owj5k3ql1237iVh16U+aSqQZRn73PZqCO3iPJ6fVcBQysj8799e/5P29EV2NJ5MihggOs9gmHNGtKyqllxwXdZSJHUJqVhwD9tub77DiJ6J0FKRJE7AcxVqEMvZg7MiimI+FyjOhQb5cFT/sQ+9bLT4R5/++Gfun0If9hxV89loxhm64iexr0cWKqBeIEJp9qvnbrjDTff7//0Xf/un73B1gDtV1F4xeVaHVFODBUopqUTtqXbYL7pf+Aef+5Wf/ApjEEvFfErJiLOYsVuVrXrcWuywzZbQKTgopYAAGXryaJAEmhmt19srR/tffefWG/fa0z510a+2JZaMDnOJzMhIqoV9lgKI/PQAjQERzRt6gaCoKea2d4Bc1IklhgKGZoxYwGSwJcw6OZhiRW2/dc4NVuBPVvmTgv51hf7JE4YO6MnHarVq2/by5culDIp8cM6dnBynJM14h4jQcRFartudnVkqGa2XXIqYYNJiZER+9df/2/9+f/6KkpKXnGUAQtkJgkfwYA7BM1VgzpQJAxQYrE7ogtytInmInH7qP/K133P43cyM2alq04zfee/YVzNDOj09HdfVJIQANB3P+iJuPK7r+tKl/arG7/rOb/ymb35lb7e5evmyJitWgIEIRMTVjbFTcORq7wIBq1yY/2URImIfhmsWOSaigRwvIOTJuxGP/DZh1nHJOHIztNFsf+falVm/LusFn5/H+8envpoBNooAxMSV8xVxYO/ViDgAMtPg4eUYkI0RkZGISLVmqCs0kJUP6v00uN1rl25uVwuE/PIrz/7pH/i+b/zm5196ZedoH6ksSliutw9A1vszmNUyaTTHtt1K7lrEMfrdX/v13xs1Mwe5lJULpApPMA0zG26QRogKw+eg4SWDx66KF36WRhc3GwVSIESfJBqkGPMzzx393E996nd/6wvXbk5L3ni2OoTU9WSAKjFGAKg4HJ+eWwgQgkdvvSzOltuSDIQIY4rXr9/s+7RabgCAnyrcTy9g+JrTgH59H4OoYKUUQss5FpTRfCwoQceywPXx9jOf/ezt21+dkAWqemjEAnGTBXISBVPVopkICKaucpevH+4dXkaY3b1zAgAptU9kU4hYsjhPIpJTQahSRlEv6vtkOZMCS4EIxDXHAtjMVgRbz+vEqmQoAsBVbUC9lMVyue1izJnActuT0Rp0SYSuGovcRCqlvFIHZfuph4/i+PA5U4urOxA+1eO7vpsRh6ZtDvLEVX2XB/0EERVJTeW9I8/kwMiUZU5Rpyk1MXNDo2moreyG0acX6SeMP+j0e7F3MCSYkDMTzyHGyBU6dgjZzNBCqAajjKiaTtfdUkc9kCOacFj3CubYi3E3MX+eY3LGjhxqSlJRlTp4//jo3/ye79rKnZ/45Mc3XT2tdrrc9eA9bAVp4khSCzxddstq5A7Hc4jl9K2T3/+1+/de76b15PDAtnHrXO20oqEvUxQtIVQFLVQj6deMTovkrGCVCjryRTZmAYy7EhW6a89em8zGXHGW/Ojtezn1zA2FKucsRUXMBMbzmqtqGuRs+eDtR+tMI5+LOagoNA070u12M51Oh8Y/SqeD1FwvlCxZh4LF5SLEBrtctqUge4AhzgFBLxa3GJipR+8YobLZ5d3le2ezeu6RSinkMISQtTzRNz3d9TxNLRjq+xNrmlLK+eni6PKlw4ODxWJhReq6Niv379578cUXbLq3PD/3dVgul/Vovn8wO3tw4lxxVSglgnrt/f4le+ONL/3Yj/7C/+7/+H23br02CnOwxOxLiWSOycPA22EiJDMxYwSiAe0YtivqwANkxgF5NqPHJxYAgAeyIkzI7AlVhW+/d5ZSU2DlvcR22fiRebdZb+c7YxeyN3v5lcvzWQO6vnE4HdWUux5wy/4CuRpgVkXyzGCoCoA4tG+mSoUE0EzZEUhBBPaISln7yWyc8qpy85TOM6TAXey7GEeTnSZM+w9/ePKV107v34UQ5sdn6+eec+qWjmpTZnbEoZiCoZRChqbDkWEKeuGuNLxZAOhM+lJKnu9Mv/rOo1//9c/+kT/60ctXwi6PN+161S98XV26Or92c7cUjT2cPzo5XS3UF6xSTFr7mUvzh3f6N95549HpH8x2Ln31jbNX33dy5eBgsaKEVuHFGObJOiGDpxrli2r5ta4Z/9CTh5cQAEpqsaq6Pl07mt564/xv/91/cu2ZHSorUkN2KWeHxIyqCmbznb1H5yvnQlYdVxNfN8ePztu2IzMzzbnM5zt109y/+yDG6Jw3MyR80osM/crFTeOp5Q1gw8QCH09XRUvl6z5HNwnU+Ky5BMlM8xoc9NP55APPvPz8c6/+2D/9+bu5L5ksdRnYUgTCEELwWTmEsRMnBXG+f6mezgs8yNYPQ2VXOTAajhYkIyNVzTH2ndMG3KB+YgLivElOxRTXcUs+ZCkxKWVomonjyFSFEMThdC+EHe4I84rm051tyTJB6UEyhZp2GslKBex7ePTZQ/6ZnP/UaPYNLqPKGkNGXJwef+ef/8DDWw+635fp3HEOK4iOw3jsB2Vc7ssoOEQEctlFyVy7nVplrpIq96kUP+71j4fpd4dQibhuvXZYaRF26MmDV0BDQAMy1caHSePQ4Mqlcd+3e3XVluyNK1cFxxnWZhsykRI7meaKzRmblWzKzoyvTHZ/6E98F8lbP/2rv/RIrjSh2tiyeHEm5za2sq1EWVzk7fxqnR/EW7/++tufW54vcjQ3PtpJKcs2TsmzNp1LpSQBKGLMrJYROMboLGz7tatBckB2HGKJfe0aROxz3t+dftNH3/fw+AFxNZvNRdPV+e4bX/mD5XI5baaEIVpEJu+oqipIPmq8v1jGEjxJg0Fd3B03PthitXZcI5OYAXEuAOiBA2CP5AANzAQMACKogCljb6UHscBSCB07Q0TD4AWIGXDQeKErpR/tjtYnGxAYVdW27wqJQ35sRvr19r9P78yv7d1Bauvcer1umubw8HC1Whlin1PtQ5F4//7969evt9tkJbKDk+PlM89edtwwFAUhV7FhYGq35dlnj37+5//5i+/f/yN/9IP3b9+q60DgkI0IAJQYELGYIKEjQi1G3sAEBETVCBnQMRLxY68tfUoUioiMDpyYQYlpPKvvPTw/WSRs/EtXr7x7+1ZrEMZhC+3+pdHe2D9z/TBMKJtut+tJE+L2GCHt7MwAlMmLiENyzJLy8NqUnH1whmZEMlxfgIlZEdgGFQqJIToUVfINWtVJrpopSNqmtvagVGJuHVe+6l54aYquPj6WlEzVaIB8mAG5GAB6AGNCYAADtHwR9gyPk/AQECD3WtWNpNLm9saNw2/+xld/8Rc//p3f853PHkx8ZaEOptS1xSyPx2NCffWlG7dOxp/56mcNUJD7vDma2h/93pe+t/nwl7705u/89ptnx/KpT3/lT/7JP+5dHahkKDysC72YkCoAqYGRmSle8GfUzC76EDITe3ztu3BDA7DKXKZ5NVts7T/9L3782lETbJMQkamPfQiBCEuOMcZLV689OltkUDSqXWiq0clys4mdqjrDTApkly5dOT0933at9yGm6J1/eg08tWy/9sjgw/ykwRc1BQ3sGJUYm51xhmImXsBTAWBoZquW7t9dPDxPuLvrV5WUlpkNXS8wnCMpFe+2MY66tly/MiEHJUctpXJeAJMUIBpm46VkJiR0HoJDNfUA6r1nX5BS5SCog2jU+H65YtgNaOSLYZESVVLgIIVMQ98iNH6zsm6LZ9tSSYitrJdtTZNN6Z8ZwXazHo92RtvNzcK/l+HHffnBvflHKR1pvzrX/et1v033Pr0p7ZhpE3jk+qJglfd93zNjyRnZA0KS5LoiqADrufFXc/ylZf6CNv9mqP6Y7xYJe6nc/u5eH3XTrmtX5y6HyhlZVmP2ElPpu3pvb7HcKo4vzWfPXN9/996b7z6KgcbZJ6w37Xo7cdMq7FrunCYUDoXYqqquadX+hT/5/Q7+4K/91K/lyaXgc6s68nMfuwTdyKKadlpZxVOf3/74O5//+KNuy2HkJrtz7gqsOkb143E2ifnMB99JFCWqPBgQgWeUrNlp7WdIRYs4YgNPDpSSCJpZkY5gQ7J660tvjuu9KrCNjQlQTYulIl0Xx9NRaCqEnOPx6VmpJo1tTx368bweh5FztFgvCXioJqqKFDxTUkVCcN4gGwE5IgURZSVQcwZZVVVJLxxiQUwJwXkR8cSeXM4iCOQQyVfjUTzup6MxPI70xcdM6qfanItu3Z4artrj8RQiDiKm9WI5GdVXrly5d++ec2SixLrarB8+Wl46uHp68pZXK8k/uL+8eWXv9u17yI0hMGnMK6a9HLeXr9Nf/29/4urVv3Lt+qxdChiquMC+5IJMIYRSshT0xIgGrgxouxoSKIKzQcJtMvhxM7KADR6XBiAioUI17Dd5p5refvioM3cwH7/78KzQaKcKNy8fHB42+weT1WpRuMAauXFV1aRYAtYBRk09E9uguYq9SmG0gUmHoJXnXAwu7GTBEyOzOMqlBKvJVSomJoBMBCkD8Rh9zklMJ1ZGWXNTgdjG4TSnzDydzk1h3m623abam17P6cFgCikizjszIMcE6BxAKQgX6diKMPiJmVnlWDBzVWsCp+mPftvzlw6aX/vEl08Oxt/wDc/tj0O76h240Wj09puvXbt27fax/O7vf2V0OFqvutW6DoEW5yeP1v36XuyX8/lod0Hrz31hwf73vv/7PiBrTK445vDkRFEjMAAog0cLgAIaqMLjJtmGuWu5YF49TpHGooKwu9/83//DH3eGkz1dLbTxLlLhpipmpqVIPLx25WyzWfR9g2jkIIQ25+1iJV00MGVKqb9549mU0nbbmoGaMvFwv3z6uvAEg3n68T/UrKgUK1XwOWeuXTOuN9qF4EADaNxuciM7P/3TP/3cyx8+Xctqse5TJynFGM2p956cG8BSsoil7jdbhxoISRgLldgzM6AN1ghGFkIoQ+Rg7kGl5Gx9qpx36kAQSI0wCRgKBmnbdmcSQLoycYwZAYr0TJa1B/Ql96rBtnbeLy/LqKLxQ1KqLDBWhaXoMm4+4KtJKJfMfTHjP1/Ht3aab1/qN122j/zF2euffjA+Hf/BdDPTOiRpjKxoAkVyCBC869soUnYbN6rHRfPdmP9pXz6p9kxd/YeOn1fbkIBg4ERXdw+vTiqVxE1Te1WwXh3EDeS+UL/QzRt3e/M7pw9aB/NNKjevXDezc1mkzSJEIK68A0/nQFs2ZqgEOHlL5/F//cf+5OHs3l//+Z/tm5mCoSkrWS+eTUF68MUcVDBx5Xd+7DOv/VIfRgeTnRFDiP2WOFczX40qMqicV0JBAG1IbFqJBymKxYdEVvlac5vb5BkYFY1ANWU0qWuH3cL+5S+9ffqomo4vsafkHCQyqRWCsiFK5TwJQkkaZbltQ0NAZk0zm0zHpMB4suk7k5hEt2k+nngGT0Kpn0BwfgrOEQasnDlBIwTnCIQAwRAxGzIqYRQEpjBIvZjZgMWQApszziHl7fzKyHzapL6a1JRzsSeybMTHUTX42J/ga92NyBPcZqCyCFif04NHJ/OdvelsJxfokqmC9u3J8d3ebLx7TbFyrnRFj8+X165dLmVLaKVoES+YulwywO6l6X/2n/2dPu3wKAm1iCa6NQeC2OXMzpGzqH0hUHFojpU8sSII6UCLNKwUnQBmHZIqi2pxjAGcaUGS0Gjq4+m9M1fi5nxdYtLUvfr+mwcHIVS2PF9D4ZHz0VIqMZdeEDoxZoK8cWBAoAxKaOwuUq2RIiK5IRgEgV0myqZWpCYW0layVYxMHjwoFguLOOmzx3qvuMo3dRjNMtQxzw0hU2MCuTXyeHjl6tmy72SdHRZXChZPiCUGzAyRMNFgC0nE7AAQFRgZkQhRSUgdZSbi7N17J8sbz7188/qh0/FmI8HhZFy5UW2O5pP9N9989PD03vRqkysWqmqvZbU6GF/u+3LtBll1x5rFi984+uj3HH3xC6//4i/83uSAg/eoLYqA1UrcYVLH+ljlP5jJIxipoikjKHQGYsjknSIQMio6GWXkS9cOf/g//5mHj27vHtF6i9TwUvsKwIMGQEmlGe1t29xu+9pxD4S+8m589ui8S8kIkK2Nm4PdS3VoFmfnJSUCKKUMNjKlZDV5sp4R0TlGhIvy+pTdKRGLaAatuBlYrPXhJFFRBzwERrlQzXbGR3uTq1cv3bgxbuqcs+VzLC2RAoAgOkxBesNQzEeBvF29/cYfPLh7ezatI5TiIloP4LioWTaUkiNAQLVIRRQrrGoNcZn6Phnitvcxk59OUwuWaOZ1jLYj6KJAVRWE6XTsQ+XJO6TxTnBgVaD98aRh34lwHWTK1Ditq9290cZhmYyfn42vNOVPX977of0JSv4nIfyE8U/9Cn/1DX20j3M6bMLUeWi8uzkaT2sY7/JBY/uT8up3P3v4vnrl9LNIfzPCDwPfnfn/1V7zfx6PXw3cBnGGY4bKmZt4PbrmOpmfb2B/Or+3PO3MJo62pp5HQNSnxXIV3nfp0qsvNr/zxc996fbWN3653bKrG1d3Il0bRwyKI3bZ9cUCpU7/yAsfOFm//sv/7LcjXGmp8zB1FoxagdSC6wJQ7lF4n90///FPLd92l/bqFEs1cyWFTSfNxFkpcRvRtEchGqU2cUYOk22KyLgb6s16Eyrfdd1AjC0x5ZyrqjJAQDLKQOYcm4Pbd+4HCKMRKxUpvQiqQEoFAAldCEElR1XimtT6dd/4GgA3SRTjzuX5vfeWOzv7W+2ZfCnFDFOMzOPReC+KGsbSbYkIUGkYk5oZak6iAoZGLgBZBHPkK3YASM4ZgqI6grztzbwhNpd21/eXMwzZeyyCIQykw8EGEp4SNMFThtdPup7haQM403Xd6enp0dFRSkmLlGyO2Uxvvfv2Sy+9ON/Z365PfEnLhQZXHcx3jxdn1WgsYiISKsqCwIus1f/z//Gj/+V//X9ZltdVs6Ii92jknEt57WhaV/NUejNAMEPi4fdRBUQqIIjMLHJhFDWY25gh+hKFKHHNpFYWudQ7u7ldYwUvvnBld9cT+1LKark8PDxcrnu1aDwpgtNRjZpGFZpZlkotD5gtiAzXlwE8NjUAUMALuxICuugQjQBEFRHByPs6+HEnwuBUjIlEcbXKTNNm5PqSEX09que7KiuwIhm0qec5gQESkigSQC4GPDTKRIRP37dEipk5ZgEDI+dQciqW64rbbkFeq1F17+GD61dvvP7aF7nuioivxl3dbqU3Tqg6HVeFYNmWl67enM9fPls+GH8wLjcPlWw2pueufuvf+1sfc8Lf/6e/Nds8JWHXmcYKKwKfqAXiJ8RIRTDHqCYAoKwWkUZ9j1WDqY8EVZ+Pr9x49h/82G9/6vOffua53dV26d2IcxhBn6V471Msjng+njx4+NARF5Gqqryv1uv1tu9U1UyzpCpUu7u76/W67/uhanvvBhsZVQH7n0lJfdK5P5myDiN3RjRSSYXGPlS+g1yxK4pN7dCHph5rhrjpP//p39einpNAEUtFMhoNGeiDbQGaUc5lsz6Td83MpbqZWe3rLWS6EHM9xjlRzdA7x2SmJad2ZxqC59LHigJoLGJIhYNXDTHFYhrLkFruK18DuKK8XaPbOs2VJFv3kLA8LFplV29jm+V0XA63QcnubLYVIwgs+yVl/c7x7pGXN07yDz9cgBt5yV7PDhyPil7h7RU39hG7jYrCWmX5mdtbT+9sPfLqKsoPuukHCo9QleOpJJBah3urqhOdx/PNtb3pg5N7tnPZaw4eMzcEUnJBM99USWSbKmRenZw6GoPkyrmkBlmDo945gX7ECWzS1tJuy0cvPfdHPrTz93/1n6z5OhQmJDFQ1II6dZ6j7Ig/7vlgRz//sc/r2/bi9Wvt+qxO1elq0VQWNMPG+ZmrXyIe11WYSkmxT2cPt7AMdQKecA99mIy60jsOqhpLR0zOuz4lVAPATEJIRVVirHzNErbtShEdeZGh1kDOmdBlsZgiOXOeD/ZG58ebtutwWnWx3X92N8w1vaVI0lMSERElrgSMQuObqetaJO8ziHYIxaxczIgML2ghyECodrGOFAwcC5NDmjU1xPRQtiYoBm6/gbNl6TJXVcnbnLP3/slA/wleOVQxe+wu+6S+Dxj9cB6UUs5Pz5qmOTo6unXrXQ9eFBxrjP3tW/euXTuMcY2as6vOz7vrV/Z2p+NHm01dTQKDKih0lZuMD+j4/rv/6V/9u//Ff/Pv3br/B5aJpHJBcok1T0UwxhbJD0cZghohGeFAuwbQC0sKYAe5qPfeLvzRyHkyKwgVwkTBZU3k6OrlwxdfuGay2Sy6LDDfbZIuhIqHEVFlxH2O+yMfnAC5ZEpK+NinYXA9HSwDSxYA4CevGw4njno3ZhG1wo6tiIEgm0FWqiVlZldEYzJy1eY8zXamDfvz5Xqye0jecvSj0ej4bHk4nap1gGpU5CJqbyhJWeXCQ1hVzYDIiWaxUgCxlIDsmQgoqxSJ3/LRD//WL39+PJp/8YtfvPpcByXu73/ovZN7sFUpxRF5JCXLDmiT2rQ8sLDbHMwbN5v4xfLk8v7Nw70r/8ofO/3YT39yMqq+4499WDClrozDtOQ2ydZcIMk4NASqqMOAF1UVCVFHpu2o8SU5NVAs1569/o//x1//yX/2Gy+9sG9xOwsTg7LtEjknXpOVvsTDvf2H58cRtGRj78bNrO/TarV+bFBqiHj16tVSynq9HhqRx2tVB80UIj412n0yy7Wva1mGfxAAnMMM6vYmGUUJScCHCshXYSwpb1ebjZ6gAbuL6RSIOhAgsKFsIyOY5KKmZoicl+dvOxvPx14zSrn4uahGQExeAIihLcmjK2yTWagnAV3pE3g27xiMjcxXvl0K+0ZsSURqESyrRR/AIDfVLOdFH3WpnswXS+ciu54qkwqx6mXE4hlPrN8RnhQMLMfQHae411V/nD3VfJy2vefzns4y3SL3VvLWFy3iiRBKVbvRIt3E5k+P8HluXmAIJbaSO3AAQdQHjcakHsjUPVqcnTzS0az5we//vvXpW18x9+5Z9joRAe8NTT15ge7Tr3+l6+c3rr3U2ub+a8dMZM5QoSHfa27FlLTycbmSDx49/33f9eI//+TPtnY1FSSIRbxJqWuaUoMJihYhrCaYYvfeZ5fTcnh2/1a9Px9dcdXuJdvr5qOj3d15DhBrStBhn0Y2Ubx809rlWX/8xc32K2fe6hjUAxhQzDFULuXsiZCsiFZV7bVIRqagRUpOzGQo7DyqYzJEVLFcclMFERmCcB36nZ2dR3fXZnW7Tate6r40N8BXIZVcTaumaYgQEIE0aq95YAuFup61HXgnBlFNGMiwGLHhQB4zY3QKxqTMXPku9pOqOdo/ePeNN0UElMXMnI72J93tzXTUmPeD9SMiPq4X9jQm8/RfntT3J73PYFFwfHx85cqVw8PD89OFiLBDU10ul1Xt9vcO+yVoJ6B698HDmzevtlm23baejNkFKpxzFzu9dHny5luf/q//83/8H/6VH7p/566voWTvaSrSExlyFTV6qu3xNhlA9mF0B/Z4lxoraJby2B6KSEtBrXem77x7cnL7/s6lG+LrnZ2dbtuixpw1ZW9L8KOaWGMxdtG7BlSCn8XYh9rhBRMFh2xeBcQn/OjH3CGFixA8HRimlpEMh7moc0ZY25i7Dj0DctcXE5jtzmJOlZu1qdTjurfeA23as2uXX+o6NExZEoIoKrACAMOQAqqAAsRAJAYlFzPz3hE6NWVGR2iqORdFIHQiWtX+aIfE8HNv39q7eXQwvfTo4TLKxrvi/Ci1HSO64Pu42b00n+1PN330UJHrZvX1SXWjyObe7ZM/+wP/inP5X37887OD8sEPf8D6cSkFCAmDoV5kFhsikKGZooAhohZm1lxAsCOa9dv2hVeu/OLPfemf/f2PP//iQZQuF/EuggyzWO+Iu217+fBos15rETJipNlkp0vl5PhsMBVAhCT9tSvXqqq6f/dBzvnJuTus2yIFH/vtfB22/gRvtMfLyEwRmZljjGEawqQR0nFds1I9HRuwmcZui5ZBJFR1ShGMVCIAoSmYgeWizsw9lmy5GLNhG1wy3VbhyhBUiYiOvqaiGtDEgB6yQiw1c2q36JQILCmHOolWvsqtOnTMLiAwxBBqYkAoWbcCfagtjDjPKS95zG2KdLRfP881Ljd+BnPv25j3aRRNUnBdsp3gn8XRFsraaUBW1Ss1z5uws19JF0FgrHkE1VqFJ2Ndbj1QqUJVlAlXJG/mSJ3sjaa+clKik+zBCaKBGIP76un9ZjyNG33nK+vv/bYPbRfn52dnSDmCU4cGsF1FdK2rRw+X6f3Xj87uvRtVKdTeFUrWAPaSxHnHVat2VO/9me969ef+5T8+0V2zGqkHIjLlUEvJYpZKdJU30H6tJcxe/l++XwTHezfdyNiZovpyFJNuS2qjypYgprnnEcFJt92K0Wh07btH+ZXxa794V87JTZgNRsGnnN1FuIQN6e9StohM4Dw5QEnakWMwr1YQQRVVFYGRzCA750j08pWjvmujWAFsu2IWuo3UOzNXJ8mlmU6Cp2JFFNRKESNVQyT2zgiRDc3wwtdO4YLv5ZxD4LpyiGyEs51533YH4+CI37n1XofimE0xolEpzc6oO+u07V1T6bYrpXjvnXPDFhqgyaenT09K+ZONMfT7Q8VPfTw+Pr527Xrs+7bdbLvYNCO1fHZ2Utd1PTmi/KjvOmZ/+86jZ65dfnT2sOv7SR0YqiRtHZrYpWdf2PnEb/6L6m/Qv/cf/OsPH91hToDGMFaKSVbMu2YXqIg8Zv3jEDX32HReRIBQRJidmSEn0MYkAsobb/5BM5qBhdlsJia5uN35gZQzpHB8uh5bqCdU1xS8cxbHo3HRLARUhNEJKOLX8jHQ6MJZk9iG4LvhwCFkhsFwHUERsZTiXCDHFTQOXSowm+xl3ZgU8KgFSu7rehIhC1Rdr7t7s5Ozh10/unTkqBKJbhhjO3ZQDMFMBdEBXaiImfyT+xSzs5LL8N55X3J2yLULm5O1r5rR2KW3J7/z6XLtYBmc4Hg7rqd93IYQBm+4HT+5enBtyjt6MaKuRAUweZxTyHcfvfv9/9r3rNf6qx//VGnpI9/yTWfLNbEnlpKi48rsgvlYQI1EwQCAlQHUuyA6SmX5zHOHv/Erb/+tv/lTN2/sp3SexYeK+7jGMjMoPpS47nd3d7uu27YdIhdA1zQF3eL8ZNCdImKf+t357s7OzvGj0xjTwFQhGgK7EwHhBUp20YoMq1dVntw+B+Nre+ygx0xApqVMDo/8ZGw1VwLEPuaskiT3SIKkgJByh8jEWDtIQirDWWsCpgDeDECE2BwDA5uV7VLTnGunCxhycb+2mQAQ0QNLX1BdSjEweFe7irFoToCcYypVGWkRSIGE3XAxEJjP9uvRMVO9XsTJRPukaLUU2BquxFIx11sO2HfdFBXUFZW20KqUs67fIZy4MNEE2mczrcODNt492zY+pHa7X7sXfJ0X7arPWUTBzCFIyYb7VM90dK79O7kjgR3FfeIeiMRQ0Vjp/la62M7n4c7Z6tFZ+I4Pfce/9X3fCW7hRts+rzZpkaGgQZF4ujgHHF+e77B0bAhqYqrgxlVQ9Kl3edV82ze88Mkv/Pqq7OaC2a9NQ4nA5HrpU9a0liuzyY3DMXUm3m3LdvZMNX1mblTnyOt1tTqnfiHSddCXsW8qJ57L/t7IN5tOcpSO1S+Xqkf1N/zQNb+/LEsEKCKZETyxqdZ1HUuOJb3yoZcOb457XZlZlsTkTT3IkDINxKCqwQeRPHwSSsrtG2+/u439NrXOOe8wtxGAChdNoEnYtKg4wr7vCcfsR814Nmom5HyoaxdQWZUNyAQMiQgdIjMgkQuuGo2nvmqmuzt10+TYS04m6gdjPFA2UIJ6f9LmDaoMIPXQ1+BjKdNQRp/+eLoPeuzpoY83j66Xq9OT80uXLw/1IucMlvu+v3/v0TbCeL5TNTUw9SmfHB9f2jtomianHqGt3K5qAsLlef/Sywcf/6Vf+okf++WbN14QI0MSTIhcublJ/4cuEIPlABAZDRjFRa7I41hRLZJ6NqTZbLI6Wy/XuMXqvUd3JgRhuunK6WL9CDy8/d672674sANWG4yWp/12IdPRPpFjz67y3tOgixkkGkSO2TM5ulD/AwAMBr8IPKiuHDcIFbNnH4AwpgSok+lYcn18sjQlUd5ucTI6rMKEANVwXI/HzdQIp7uTg/0dRMygQGzAKSMimwmTqhYjFL2YajN7ZmbyAFBUmIiIxLSY+qp2rtJiD+89bDd2vti88sozJ48e3Llz8q3f+u0vXX8hL9P2zvrVGx949vCFF6+89IHr75/xpNv2GbSoKQJgRvS5tGoCZefh6YMf+gvff/XKsx//pd/50hf+YL47E8zFlKFSASmmAoPaXIewBVXRDFgVtW17vn90+KXPnv7Yj/7C8y8duNARsUOBlGtomlAh8Tq24/EEkdbbXtAlcn48CZPZ6Xrdtt3w1sccqypcvnx5uVhvt+2TA35Yn0w8nPFPPuBrbBl7AiQOjqeI9OTu1cb+6Prl8cHOkI+XY+q6bb9Z5rhgLGTGxgMDOVSAGNkBMRIBoSFiAbxA51Bj7pNmBZJcyMw0HR4eqoGIXCSvqOFFvhIUUzHiMOn6LMix4LbL674D1LrxKVrlqr7N7SZpdoTOh4v177zPCu1GJKEodpaKa6xyt7v0uc3qbVfu9Hi66Nk09m3OCXOsi1bsJPDt0m6M2eiqC5e79JzSsxz2iJntvM+3LK+ZYp8LEfpAAoIcPGxlHWl9NPb76FMvj7LcknJicaNZVb2Qi6m7fxZA2m948fpn77xtdzbk03xyEF0RaTfZwElK6NWTD+8+2H77hz5y/Y17xzklYfZ1p8BsGruTNfz57/2m9fqrn3v7kVWHEdri3dyyVpNNuxbK1+YHH7nyUtXEX/r8b6x1VGxCVLq1IECNxTJOqwoIUw3ShWnQviSzyiped72lpq6kydbZxrnAq4LT8MIPvv+dX7vVvxuZPKNDQBJD0VEV+r5f9vHD3/nK7Tu/VxF7CUnF+zAZ1X3pbMDgkIhYrQxNXjZ+dLoupUYwxqAFCIQ09OsWg5i4SsPOfAwcA/nUFSRvykzOAJh91UwQuTdRyqkUZLJigykSGKhql7r5aIzIm3ajqQcrqkpZhBBRHaghRSl+5vqZS12sQkOPQ/WGXfGkPf+6i+2Tbw3e3E8gkqGLXy7OmtHlS5ev3r17V0oSAWafYndyfHzzmSvNbG+zOJuMqs12Kwh7R4dnJ7dSbLJtyfMwk1yt5JUPHP7iz39ci/z7/4c/f+/e64bCgEZah9pMnzp7AJ7IlRSYSFUJ8SI5TcUxIzX1aHJ6fv67v/2lsxVcf/7Z2cyV8/N7d2Ec6jurU0S+evXlB8cnXRuT9P2ye+b6Dcl6693j2Xw6Glex73vLgQMBAqEqILGCETvEQuiHOSsa0RCUdAHvKpMiMjlWISVHSPuzKWH78ORUxXnXjJv6+Ph4PGmCqwRFcwEo3rlQ+b7vTHOX0BmmolKQSCsCg4Rkhl87UDUXAB0chlVViXSoYoiS84P7x7lNOfXNGDtlKmff+g3Xj4/73/rE5//U9397XMuO26tp1vcLJFQmYOdqKZYcaEneOTOLRAjas2fi8ODOo3/nL//gj/zwP/yZn/rNqP1HvvUbHjzc+pCceUJEHdp3JgAFNhNDyGWbsn/m+Wd/+zfe+Bv/9c9evVkVOWsyRAGhmI3YqhFH6Lb7o/mkqh8+OCYfnLEoTarpyclic74oJasqEDpH165f2Ww2Z2fnYPwEZlG1i0hIMLCv8b7+5wCZizvoAOMMtxZf0ezy7ka7GCO0xkjorK5IrTgskov3FTAhEmkKTKpycR0nMxu80sxM0IjZPBloUsC69iml80dn3ldIGIJzjg2Gcw8NtGL0jiAZF3biVKxqmMghohYYVaAxO2i2m4SBCgM6A4CUEgcGtGZU13X2VRpNdTQm2MoOoSdQh0Kume5NvPYmIwdTZ5J1WvG+qx7EvOz1bFqnhg6FdtEmo9Hdh6spzU9d2+Q8rcMGYAm5RvAAylxklIOeaF6rkpb94EfeuSJIxmqE5Ikc8yjK4tFq++ufPndNxa5gamejZj6e7Abu2+0W4sgzwzibfvnuvaN9fvn5o+2tuFny2HlF9GR16V545pBh9YnPvEHzw9VmhfWIsq4FULr3Xz0c+fGmj5+6+9n3Tk6g2g1awC0ww0E1Z1TRTgLE0ho4l4UDpqyB3MG0Xq0368Q9OmNTqxqJ2bY2m7sq4PbRS9//zJ2fPj07OQMjBqp8HVNCMu/ovXfuf+A795//0ME7n12MQ0DTnBPWRoQGkKIghJJ1mHOZkaCqRVUQyJ6wcj6mlnjmjJt5091pp1zt7+5kuRusRvB1ZUmzorRdykpJEKBWC4aDtekFxyuwq52n4HE6HflqWo98kVVOCUEB0DstRVEZLLNHLUA4uroXXzuF8Dhd7DFH+GsQxFNK7qf/HDYMMw/8s8e27+nk+PzK1Uv7B/P79+8GN2Zzoqnfbu7ePb167WhnJuvzB+Zo3fV0ujg8PLz74BzZi1FOhZ1YsfPz+MyzO7/8iz9fB/eX//c/dOf251U8wxhULkxAnkL/zQwBpRTiJ4FqZXCdrarKePubn3j7t3/3taPrlzHYyNt6edqMxqfHZ7jrp7P9kvHdW3dnu6OskYhfffXKpcsz0Vw6XJ2tSMejHa9cIA3MOV8kO0QVG4zDhhEq6CCIIQRUNDAC80xBNauggoUQREuRPB2h7uws1in2Zbk68ZVTocUi7x5MSVtSAaNusx43s9in1PtCtG3NuUC9NNO6xIzsizp2RUQI0DlCZEBVUWYqQEzgiHJKZHi4t+8PHTMDa3G8XW23225+kD73qdc+9vOfW3anr7xy0FHOKJWrcwErxsMioZFzJqkiSIxZzAFaSZ4wnpwu/tK/9+d/5K/997/8sd8f19MX3v/M8WKlVBDRAAwQDIhI0YpYRdp18eqNg9/9zUf/zf/7Jy9dxZyXIqLFKymYY7OaLW7bcVM3k+bs7JwqH7O6UB1duvrw0cmmbVPJJuKrsGk3r7z0UjOqXrv1uuMq5zIs2qGxEBFR4QtcGy5MbAAGTtHQOujjAGERKaUMQ6a23xxe2V+Xrosb11SjQtkDkwGUgLVj49qLxlB755z0VLs6saQNlKJkSo+doxBVBQMJmxI6AYhq7IMjB0BmimjskJGYSMGhUcaI5M6368tHtYDEmCbjWqXkjiVBPakgqphDJ+pSBjJtCF0IDVFQMJFMLElzsHHlYNsTVQjO9UogsZMminRJSVFMC8Gj2FobUSg4QE3bVU4CCwi7vTh0k5BrrXLOxraDPEkQnTu1qMlmvjfhxbqDKtyoq4nA/V7fsvwajrc5tpaMHRXekAGylzGVkI2lmo63aI/6ftPSN7908yoUi5K8Ju26dnX7vC9SPzg+sekoSQyEfaq+7dVvunGEP/lbn5Ddg01bnJtJTCOWZw52Xrq01/XpnQe33z2+e9abr+dTT/s7s0t+Oh/vNE3jyEFGFI/FsQZB1JQZ4qgxtJxNM5mqZirC1rpKwrR00RET17FNV35wb/xsve2zEBTTqqqAOoC6yfbm5/rD63vAhYpjAiLY9uh95bgupZDL5AaXUadaoAhLQHCOHAmKEbjKiqRKQgjebNt1D09v2zp3bek7u/Pg/rrrK6LSdhRGXPlm5DlMCB2ZdxY3Ih2ooyppRZPL49HNvZ33f/u3/fFrVy/vzXbIACkbJNUcOKALbOqYRbSqfLlWpW3kpjYHlpOBAGFgx8zDNfZp5P1J/06Po/iePGc4DNrt6vT0dD472tu9JgplaNGsbxdn7aJ34wM33+UaPWq77M9X7cHRnuY+AIyIvBUfoC8YY/fS8zs/+5P/9G/8d//w8jMfEvZFIiIzhwEcISDPbhg8CLMxKmTkAe5FZJ3Nd269u/qbf+Pjv/X7r48Pr696UIHt6cN8vvAp1RWwU+LYx4cvPjM7GOGHnr36J779w9f2x5hSWnUOaT6fPjg5vfPeKi5nSiFZUlUHCEW8cyLJpAYgZv84mkNtMIRCBEtEoMOrZWiqKODAj6qd+XgWU2e1F25qt1dj2Gl0jCYK0TdasKkmWk1pfqU6vHneTcLkMrrd87UWqwQqU8eomonRMXhVMouMCBJQvQcCRURy7InINw4q7C22nRwfny/WK5VcVqu6prvHD+6/t37v7VUINRqzoqMM2oElR95Qigq6bA4FWICLOkFwwfd9artb//Zf/rPmm3/0Tz5xct5OZyMTwgKivaAYOcBCmtnctuMrzzzzW79797/4L//W4TVGblW8qTdARGM0730v2QJOd6bbdZtFk2Cox1RV664/Pz+PXWcpG5WU+qODg6pqbr1zVwVzzgZipkRIhDknAKCLOo4XcxkEIlQVMx36DyDMkgeDHAJrXBVjDOORP6zR0rQZVQrKGqh4LqPGxhMZj21c6eHEH1Zul+yg8TsNZsGSRFQLgaoScyHIBuqKoTd2itFrTZYStOPZ7sCWKcKAXIQAvcA2qREaooWqycYmVHtfSunViU+iXntiX+LybHfnKCBXBUasaEU5jP0UQNSow4jsWrYoaUhoS1FO2s0qxUW7XpYi2jUEmz6XTqrOWuk5xP1R/SGgbwM4Il4AvLHe3mrj3V6g00adlThBm1W2x3aAbm7dlOqmtN9yUH/oyuytnH+kbf9fcfuTVj8oS/R2paquAzow39RuWnn20OeegXKRAlbAL/vs/XQ62u03OaUiCuLC8aPF1fdfAn0zdZsxj6z4SeNVy6e/8tqoOVpvVB0V1BvTq88fTu8tH73+6NgHB6AKVgMdTnfrykvK5z5rX5JIVONmrCkSQ583IXiPbnCP1VxURNAJEgIgoRkYsKHEVIpCUdWFPfc9V3N3p7wX61m16TZOKvViYPfu3L35/he9Z00iIuxCKSVnqiofQkipOGYiLlmd96KdaI6lr0Ng41wKMqsAJOMKEogTnLkZhK0GMLxTQ95pqu35gjSTZYXKs5+M0nmp6tr3JwKF0AFoqh1Ct4glHm+Wv/vp4+X63nq97pPkbF7RBZ9S8eSKXRj7GsJsNlsdn1HqA/repCrADNnk/5+cDx/H1jyZSj0Bc9SKKW5Wa0S8dOmSSFmvFkRYSgFK9+6/J3a4f3C4LiihF0snx+1VDNcODh+cHONo3EaryDUsJrrays2Xdz/+ix9fL9N//Ff/4v1772mJNdVEXhWQC6KBkBmYRceVWlJNhAwATT39h//wZz/xq1/YvXp1Z3dX0sPnn7n86ivXr12Zh5o23eZ0eQgAo7qaTJ87PDx8681bn/3cVwBmsSPRbrLTFCtZu2eeP0ypnJy8N8o3ZrMpEq02D8fj5oLMzo8v+GCAg4iXzExNkLlocQ6J2AxUAJlF5N7Jyfq0O5jsh7FbwLZdbl0TIEgXU1EEcuYmzfSwL1WtZXO62Wm6OmiyuHtp0nePRo1PqSAA+2Aq4ARNzKpciEPJWtQMCQYvBAMDBVUlT0yyH2qAuqjMd8cf/OAHPv+5177Yv/HW63c/+P7nL11xq8Um4BQYzQhBK8UBbRjA6iFUiok0z7Ssum2cTau/9Jf+jb/9d/7+X/tv/s5//J/8Rx7PM/cOGjaIaZlx7FxV2u0zz+79wse++rf/zv/w7DPTEhNblWHlwwiyAaCIqvTO8d7u0eJ8EWNUQET03gu6k4fHm802OO/ruu03o1F95cq1e/fubbdbRGCmUspgE/919C0AGBr5r43cH4MwzBycF82iufEjQYoQn725B5odg/dIBOwGK190zjkTxIsrQDbKYmpEyF23UWUmApNcSskZFAnNQQUAORUKjgOjYc7x7Py+qxxSHISAjgBAEZGRBDD0mZJklZZ0BOAVQMyJJ4IkeTJpxiOwFJswyqUlrwUhJkkZwTwz+8o5r5drt53b5jyBwSHCxDPWza6SC+hq9ugpUQdahbpfr2oME69bzytpFGycNRJoKRFEXJhPRpgbTWg9sspu38rhvO/okvN3J9XHFut3e73i6n975J9FnaYmhJBzpkCuorryUHsViRWqC1VRlZIbB6bw+t1bIrFgcYxGhCj3tov2jaWbXcoAyZs3KmC3Trbu8KXTh6sFrEM1ffXKpe783U+9+14Rt+PnXJGhNt575LZt7y/PIiiwr4kVMwRYxZRiqtE3zdihVMhmJCICSuwJ0Cmpqg2bVtEU+5QNiZwn9r2tX/quG7ft9vLWkt3EKuQCBm57tkR1k53x9kF03qkZAw1250SOCQk9GKiVlMrh0W7f5bbrzCznDBc3O2tXUs0nmWTkrOZRgYVr6qaemb178vC1cVWjVaXvNiV3UBXPVDfaZcZRTBb2Z7GUej5/5uVnz+6fz+o978rGdHF6Mizx4hiliKkjRCMCNUBF9ZWbXJ9u3zvdGe0nV4ZxGNc16Nc8I59U+ac3zP8UqCGiIppSXK1WdV3fuHHj3XdS27bMjtlS3p6eUu3H0+nV7eZEbDFWOjte7MzHV69dvnt6WoWRSlHQ1Itwt13BtZv4iV//5e1faf6Tv/pv9e2trl0FPymSPDrCupSuqilFFEMfRooKwIaxL4s/+r2vfsNHXjrYPVq2q5s3rzdVHbetd9hteyB69vJIhJmrmPPDe3dTaseT5vWvvrG3sxvTpppURYXZAUAzrg/5KGcAcn0vu7tXum5bMjWjRoo5p0gDuWSA+oe2mbRIUQlc9TEhMpBL2U5Pl1n9aD4djeuT1XLVbibTWd9rWY53poc+KFdTh3mxjVUTtuf3D3eqnJJZW1fecXGGWrL3oWg27JFGJSfvKylkULIWopokmhlR0CJDApEWI+CCEZMN6k0ESNvlh9737LXrB5/51Osf+9lP/Lt/+X/hw4OYusB1zplYOkAkFBF7LHQwNQBUOw9VkuIX5/2163v/xg/8iX/6z37hl3/xF/7MD3y3rSvRPpH6MEl9m1K+/vylf/YPPvETP/5Tz718GWxRuSonI2AAUZKKa+mSd3Z4uHd8vFIjMVCmph4Vg7OTs826Y3A55yIlhPD88y8eHx9vt1vvfYxfc2x/orZ7ej705LtPkHcivJi3oxvwdw7+fLvauTKudsyyec+hghA8oIqICuSBU8kwOOcIaAYoSigARqagqIjmiB2hI2ItZiV4cDTwURXUHI9CqEU3ZuYDE4GRDdEiRMQUhlBFJwJ9tEmlrNaKNIECrmKZEFBDgrpuy2yvoVADIgdHnoC1aCIMjrGYcAURrC1cVU6KrrPGrAu14DHHbpX6WHMQOKhnMfdfWa8tO8+OMFaIHDHQGFDP23YWoClwvG2f+4Em3dzGtw5Xn9xOQ/qHJ/HX1vhixf/G2F4MXLWSa81CJpDJAIRMFE2YLZe+mPYpFVUizH3uCh1vsrhJLmZFETgWiiZ3T/uUfePC8IaVYn30aGmyU7307MtTdg8ePDjZYMaxOd8TQAix5JPF+el61atkYmBPVkXTVdz0sRUzqeqIDAWCMQomtR4gDR6tYCwCMFjOoQEYccyG5Ih9cq21VR7D1e++7GbokFWVFZgcAK4X2/F0lECQaKiMRNT3fd8nsyFqQwwEoFy6dGiPueREREax70FVzflZNnRZXK/Fs1mPMc1me0eKsOkjuQod1yPfWyzWb9cbK52H1tvGcSGsq3BV8+Xp9FJVz/Z2Dw4OdufTOgQAtsGb1LMzu1C3MoJDLJrH+7Wf1eebxahuRORxy/aHQJinC/rXERKe3lHs0MxKSiePjtvN9vq1m009FoWigqBayt2797fbON+/hGHknAPnF9u0WbfXD/aobFGKcSCHzoPk7ercnnl++tnPfew/+g9+pOLr051JG/vgpwbcp9Z7r+KJPKGUUkoxNUaoc6G9vYNnn3/2YIdffOZSyt3J8rwzW/WlADlu+h5ilC6mruuZ/dHB/pWre9euzVyIL7x4HdRSJwxVKXZ6fFb5IOVR6k+8z21/SiyjcQVgLshgg3XRGw4ZBWiICGTeBaJQNfM2yTbrOqMbX3LBty0/eiSLU5A0Xp3D2UlXjWeL/piCbLsztbizs7PtNpNpANkMY06VXEpBDoZOlJkaEGJkMC4ZUkreVcvz/itffkuMTRkpFMGuL4B+PNkB9MGPXN2IUahGjoP33Mf1aAR/6s98dDp1P/Lf/SORqqq4qBCbGQ820QYAhENyIRmgmqvASA0coT8+uf/SK1f/nX/3B773T3xwvd0gbQZ6X9dvx83ohRee+//88C/+w7/38Vc/eJ1xxRgA1HlgbgCVmWPqnOPDw8PNaltKSVnQ1aGaivHifLPd9GamVoiQGF588eXT09PVamWiKaUBN38y23/6Yjn0GE8//mQBD3yqUjIKBqpW21UztZsvHDHbeNxUtReRvu/7LsUkQITktsXaQl2BrkAcuECgqsVsoLgPmL7lnFULMw/SQjQjcgBgCmisaqUkNWP+mkgQbcghACvStx0zO4WQFBRT49SMnDmHBVxSySBZBRGLdGCGSMGPQXzqc0xdLpsVJuMsYosIGymmOfVxON2r/x9dfxpsWZadh2Fr2Hufc+7wpnwv55qru7q7Co2BIAEOIDgApkhCHE1Jlk2FHA5btMQwTQVthS3RtoK2qAg7bAUpS5ZsUaZIkZREWRYBcDDBQSAmkcTU3Wg0uuaqzMrhjXc65+y91+Af51V2doO6PzLyZma8fHlz73XW+tY37LTPunMCCDKOS4Bx7NuRI4SEcV45ZatsKy7nZRyQN9m3pp3i6Ue7u7//X0i//od0bP/8ufz8rP09i/Q/b+izlHS3q2HEnUsso21Fdycv7IcmuEId3S1E8VDFiTwxZnR2BSPajkcHt0oZr4ahEEWIFFmrYAWxMKrMZw0EjuG+52F1drnLugMKNANVst068HZz5bWkyKOJONjkKpFFRDjOh1KVkoARqLOLkYsVBCFgpOtQCDCn6M7ARhMbr4oSuhlDsBhH1dTRq7/11V/60XeXerjTbeAGHLarkRkBTEQQSNxMPaVWVd1BrWbp9xazm7duPXny5OrqikPr7kzsjpPz1dCv95ZzbGIZRmA1pUghtV5ts1ze3FyOVbX0OcxvvPnZly42Dzjx6vyxEg1u5+urFJbanz796GkMh1e2Ol9Lrhd1yvdQJSenCR9Hd+dr61gnQtW6uH/0ePuwjuNsudgNW6wyta7Pd+7/Xc0RPFtvXqvjAyKK1IcPH77wwgt3795974MPVDQGFi2E/ujJAw/3bxy/fHXxgerA4OvzdQS/eTA/XW1zNm9C63HeLarlcaSX7hxcXvzcv/I//bf+1J/+43df5E8efRzDIobg6FaR2IjA0dRqlUpEGGK/dWJ0ZilmHkNMiRFcJev55SW1NxFK02VgcQ+lWJeaEMI7bz863N9jNycDy4zcdQjQz9NeLeaNEDiCqIymTIQh8qf/cHr2/DOzxDMxRWYRmO/dutyOhqAEXdcVGbFaN49NhNSl0022Wb+3H1NTt1c7y80ItGg7r9ushoSRY6nZjRAAMJoQooPNBTJhAad5F/ttDx6/8zve+vDx091m23VdHsecc9M0TYjn5+cN03x/7+rqan++2G3W4vLGG6+N/Wa9yv/8v/A7/8//9v/97bff/8LnX62yiUyIMVUkQjMyUAIwJzFzwtorh+BQQ7Jaebety8UNMAk4Zu0ogIybvb3YdPt/4l/99z985+PXPr9cby6YI0IAVGRzVKJYhhwC7O8v1pthu8ujVuA2tXsOdHZ2PvaDu6tVRFCrn/nsa2M/bFbrmquoNClNnnrE4J+yc7+F1vWMFADPBYqpqiM4aODoGIF3r3/2XtMalGYnA2FAY+bohKq1VjMTC+Hab/M5B2yYtN+AAG4ISCimCm4OjMlUi2kIYXJidVSHkQgJEcBVHY2IEZXAvJomDgyAhkRk7pFoFHcre81McikXOeFMcoloPo4hzAGAQBfdjDDEGALXmIomP9iPy6ijB0p2U7gNtGzxFrWLrO9pjdS1FGg/XeZRm3SOYQ+tg3E+azRjh7hz28aAgQF0FFssGvnp8aM/8Pc0zv9mz78S0z87w88LDWIYx/3ISvPtrKjEGQVWe/LOFTE7Bc7VkRs1wBgQ0cWboOI1Q91K3ZS6HkcMHCNm160Iplm1WM1G0gFx0Mbxdnv08sXVd5/Mf9fdNmH9GKJTWC6cF9h0mKzAKJ4BK8JgPiCqsSkJkoAmINCwE9oBDkiGzBAmN6ZKoCEAAKABwOSsi4gM6KBRYmEwhFoo3OoO35iPsouhRUQw19EDEwDxp2YsRCwiogLgRJhCYuZxHD/55NT9m9aVTdO4IysWbe68zp//4rKO25Dm3SKCrr3OIySk9Wb3ds4Pnz762tknj+dwY0kHs+ZgXYX3Dm6/+tLB8eG9F+8glY8ef3Xdn56dPV1fbkCYITFFByW4VvFNSCKBu05EOsYZ33r51rZukCGEZFXcbWp2vql8f/p6vi2CZ5mrTuDXSn1VrbU+fPiJI77yyishJFVHdLVxzLuzJ0+3m+HkhTcWBwfsQoir1TBUu33zeBYwYDBxxhBSA1ZlHI72lkCP/vj/4t/8xz/z8WuvfoE4T2wHoEFEqk7pRxwCxciqOTWOuAXnCGHOqZXiY48qq03/7gePYkqxSxzCbLbIowVsAnNAu3F0pLWIDsNwGRNkHR0wVxjrNs14LH1VE0UDadpALKZwrV6iqbgzABCQSyRAYs2aU1owdG1MiAOhHd9KL7zWvPr5w5svpuO7tndIAFRr2G5qG+bz2ATPEXNMikmBuJrGGJEcmMQNg7uLQ2V2IjIXJEEuedjs+vVuvTo63B+H3axJd26etIkur05v3jq6efP2g0ePb965fXFxkVKLHK4228Ax9+Xq4vxf+WP/w1dfuVdq33ZkGlV18hU18MmLrZq6O6gx7mmdAdBYBzEABghFLRfBEEK/q0e3jnM++GN/5M98+NH7t1/2XC8ATKqP46hexjqISN5ZCvHwcH/I9WK724k5pdjNAMP52VXui4jkMhChaHnhxRfN7MmTJ1NQe+Bgdl1kTb/pBE4tx3QSn0fhnyGH7l5rQUQMPMjmxVduzw7ibuhrcSMWRAEqBlm9GBTD6kAubsVNzMwURH3a2D97Zrg7MiETEonblEfCTE7fsNiLYc6ICNdXCa+/HwQAKNd2mVLUQtigDFK6URf7syrm4mYIHppZF1skcrIWHBz7lMxcVN2dkUMiT8vAQRShIh94PHHabTfB8o2AVEavNYqT42WVM+PLsr5hdgvjo83mA9FieI/jHckzYjBFp34UuhFh++hvP/naz+vu9xyGlySt6y6yyY6vIFXdjZqTDK69ym7WENWqk3PcjGI0dRkRPVD04k1oxqyjpXUJFZdqkS1Gt/0mWoaWFh3HJroaBVguaHZ0+L2///v+R/+3P/ov/5k/8Zf/+B/8U8fhnOck1YDIQhAghMCCJAZjrU5CIbtxDDAxYhxHb7YuA3gGyAq5elVQQ0P6dMS+DpRhZkRnJBGJsUFVVO9Fbr9xkmMJ3Bo6AujgLgoAAQI5OE4RLUIEzJxLNoXdLj9+dDlvlk2a1arT+ogIRESrBFpuH2//B//j70o3ynYzljKuSwVPATlxc3h4Z2//5RRuHB7PH57/7M9/6Uc/eudLmlfJOEoT8pzGLtji5q0XUqtVt1p7rxWruqqCV3QFdzR1QcRruRyhEyJFrdIddu3J7GJ1kUJyd7H6/CX5Fpz9G5Pwcy93n/RQE4tGREopjx49app07949Ncu5EgUC3O6uPvrgbRU8vnG/mR16aIR52+fNZvfC3ZsRSimlikVYECUPtS+rJtHhrc3/4U/+X//S//vH7997LUSoNRMRYmCKCIGoNY3jgG3YJ48MTBC1isnoVgEdGQ9uzL7r133+S7/4kwFl7Me8ExPPOQ/DVmTc9eOuz6JEcbHaaj+EPs8uN2HE2XufXA02X+0WY96rNZkl8GTm0wcwfZhmhk5M0cEWi70Q4tHekZRdF+RwTm+8dPOFu/sH+8fQHF0JadorehDiSdfulQxFsJojessOdRfM0QkdaHJlCsFdiWDySOekVYB4zmGx2oztbHbj1sF2u/3i5+7X3ZMXby+O9mmv01uH6TMvHe+12nX6a7/j9WHz4O4Li+Nb7UsvHM0aA8rzZQjc9Zsc2MCrVlE3JCrBK7qQOU1dql0PJXjpoQd2ZjZAAKpVMURzL8PlK6+dfOnLF//S/+zfIaLbt2frfmPSAQBSQZIsRcTKMMYAxwcH/WbcbAcBBObZ3hFhe3l+sV1vSh3FJDZcJN+7fz+l9uGDJyLyrECrqohODKVfPUfCc9LT57HEZ6134KZITTPbu9ldbPuioYIhRHACRidDNAqETCE10w15NhkQICOGwOzGhIERzU205mI24foKAByc2AkDcRKRcVAwcEUEYA6EYXoyISI2EYt6dW9SWMwbCqy2f7x0gKcXV71USmG93TlUD7LTSmyAkBqMqQVoxtEEnFKFYLumpg5qHs/HUkSSiFU5HepGdB9oZhbBZNePome7WrU7z0KYX23ltQMbfLOqtYUljSNVGrHfgVluf8EOfiTT72ra13dSpWf2FebYzboaeuakZtAAtdTOR5EQY1NrTYx7yyaQPN1uEAIAWdSqQwyI4FptcbDcbM/Guks8G+uOm4Ni4EYjIIKhjJhvfQ//xt/z/U3b2MPH68vteyChSh7BV3nwyNAEFyXzJsTBlbgO6gQxVm8wFZtyLQqiGqACGvinLN1AzgpT+UMwJMfAwUXNjVKsNXcO6LixOls23ZzWq13DSAQ1y1h2EcP1ph4NkfjasFABkCmZWow0jiWmDlGJwFyIAoqH0JxfrP97v/HVUj76x7/04A9//z7Vq7a5Q92s2JOOZu4436fG9oTg6HY7rrcPV+exrSN8uLeMp+vH6/PhnVrSMd1a3hgzLI8Oh3y5vVzDSGRc+4qEIaTdmCMxEpp6SE1VMwjBqmI9euHkk4sHUKRbLLZ5C6V82pPi86Pu8yjN8/fKzEspE/t4QjlFas704MGDe/fuvPTSSx998HEtRjRlLJe3f/EXbt25P9+7q/ykDBcusNv0ZHrnxo0H50+zjVQJHQwbD+4gAekLX2z+0l/5f33961//E/+rf9Hp6uzp6Xw+V4UQSEyQa9c0qqNW7bp2q33qWNlBA3iyYuC5IX/j5VcvH58dHu5fnT8+Obmx2m6Wy/2r9XrXu1oZSx5GCWk04oPDGwpivAfK6xHGXYZ9Xh6wanarhI2762QHSE5TYLdT01FMreSgbosZH8yhiWEchgJtE0mz7PGhyi5yzaXf5QRK88WyqBgOMSCogQ4Mhh7BxcTdJKaYRdEmcxVCTOa4Wve5J606X8bDw/To4cVrr3zhyZMn3awVERNtmjlEeP+DT27dOHrjpc+dri8Xs+Xl04t5N3v49JNbt49ESsDAgFYbI0/t6DprRnNQIHI0MxM3AAKHMG1ZnYhTQnMHUO6HvNfN946P/9x//KP/1X/+d1957a7zpi+7ZVpIyWSNg8RkJlhr2d/v9mZNv7kso6swkrXzuRqvLtebzdpVKJA7DHn30osv7S33Hjx4JNnM7ZkL6aRBrbU6OBM/X9yfofDP03OnA/ypygkRYtXt65+5PfpQjYEDsYKagyQmh6rg6NEUIETEZG5TzBeRMElgA4A06/qdD1UBLYSGfZJ2gykZEiKKleqSSDFqiMBIoCaibohh8iFQd4eqnriA+Si2qt0i9bUo++58u7e/P0ofQPfSDIYRojA3DgkAMCg3hgwKjhQhVIpFW7h7Z9lfZUloDvuOzcHBWQnva39jeTAHqpozujThYLYsPqyIvqq254g9a+yu0KzTvSFujVqLvExDsb91efEdxzff4Gq9IJDDLOZcm500EQbrmq6gg/lYcogxiO/mMVGIH6+2+/Mmtc1qlJZCsBRZVasDcYrjaEwt4No8R4pSMaYSaDaHIlJDK6ybU/vaf/bTr7z/9un56V994eVf3AhraiBCHEdyEwYHZIMJyacRZjG6q4IXMEcgEIPqivOA5L5TLyGWOjaEamJOjTJHHFpZX5a3Xrm9XX/86IwDx5mUYtxyXYy0Xmg4Ss2VICY3BrZxpOoeCLhxzQRkgVOphYkNzEGI0d0Uygw7cVfXABwC1eh5t70Xb3z2sPzMz3340lt3Auhi7v3mqVqfTEteGbYFWyX3jG2zby3umQVcN5F5ub930KY9u3fnBcDNk0frw5vzNlHdWSJw37pAhOSApVjLcWrY1cFckZGs94RZvZ21N145unrvdK89QmqwFDfAJrhbIDBz+bSqf0tlf9a8P1NCfcpSIBPZrNaPAO/ff+HuXXvw4AEiqBko97o6O8WufSHFBYHlfpsI+60bnt+9eXJ2erEd16Hp3BnUq9XKre761149+OVf+od/9I988K/+a3/kzbdef/LJqVMFwonTACrkmLq2Sl6mUEXNGJDdhRkChFrt6EgRZk8fnxX1tz98tOtHxlUbZ4psXJvO9g+7+bxrm4Z8DJGEx8isbmUGy7lbLUDEs5YKT6p3Qlez1ASxSl3isBTAmMKMuJQxxVYRILZRgDQsmpgt501pOMy42Wacd22wVMQB28o9pGB9chwhkFljKO6OogFZtBIF9oqYx7G45RdfvvfhBw9Pz4YQwrYvT1cf9uMucgiEMaRNf7l3sF9D8/aj0yuBhw8fdeliGIbD/YN333vUhHj7zsl2uyaKEMzEpEfkIUR3Y3VyQ+LAAEWqKSoJIiM0np0i5mrF4cb9Tsbl//5P/qdf+fkvv/76jMJTkxnTUjEbRU+qplYEHW7s78UYV9thHLyaQgjt/NAknp9fbtcrMPXgoqWK3rvz0nJx+PiTx7vNJsZoAu7OTKpiZg6OMIUvfutkiUhTwqxV6bpuHIcK0rZtzQURG+o25fLG/T2etYMMITCYR01ArhAMwT0ROiJPXLkWXcwnvjwiMELDyIzAKFQ3Ow4YHUi0qA05dEtiABGFSLMYHWVMAUOaKfTG3sVArKjqDA4qDkwBI9VSaw8lmveyt1yuLjbcNrH1YWXFvFDNtAwJ6rim4IBAW8WxdZuznS3aqLl6mqc80NyYsbf9HV01xsNuiBBnxCTSuK8l564bR7rwYR71uEJX9anmk252N8BZHtyXO+hn1Amk/Vr/4UY/YPq9VudedhgIzWTsQtAMHgECqRmoZgRndIfQYTCFbEqpHUaLKQVcq19JaUJExIDAYykMymwxdNVpzCHGVG3lGmJsFosGyAI9/eTBzxwdfLwdv9LDzz1ZBYVD9xGqVaohcAQU1cGBDKKHsdkVUzNCMAIFV/LU0mIow+C6bANJRa0Nc+82IClX80ZzgU4oeMnKlJjEpnC3mNBh1IFx1nCzg2263oBDHvt47TdCBpooppTMrGhpY2tmDopITG3FLKoRGm+k1qGNy7My/MDv/MIvfviTB7MX5fJoV4atUqCzxoME5oZcatceVAgayuDrGpiAGdBEnzw6a5YvR2bQcLkaq+w223Fj9eL0EbhIHbRWpmDOiI54LfT8tB47Q3TwyGHsx/nx4W41XJ5eHC9v7kzN3MAJCZlEh4ndY6bwq174Kf/9W7ZbU63fbDbvv//+66+/LiIfPfhwuZhPHdZmt3n73Xfu3bnbtvsqmMuO2cqITx89vX37JFxtzteFAwJjjGADWKQ1jrfuh2H75H/3r/3pP/iH/vv/7P/k1+7Wp6vLbdd0yKZuzKyWCUPJFlJUFWZDRK1Wzff39j580H/8wdcRYpZ8ePPgM595YTZbNNTwjETEXVOIiIzm4zg2KYkaMiOGhdPQ94W8i61fbjHNUkqqiiF0baduy8VyNw5tgnmTAMBU42xGRIAYiakLJataRcSUkrtTiE6awUrJwBhjZOahjmQWIlUpROSu0yZHq4C5kyYOZrKcddvV+t2v/8puzC+/9OrF5RntxhSa7LVt+GBvr5QCEC9PHx3uze/cv/3hhx/OI/W7q7fefPPhw4cH+91qdXnjaJkiq1ZTj8wGnGsmYkRAAkLKdRQpMTIEjb4PXk0zM293Y5rJS7dv/8xPPv13/92/GJuL19+a9dttwsZUpVak6u4xdpZJpBwfzmL0zW5Xsleranywf1zVL6/Otrs1IKgzuBYpL7/46mw2e/zJo2EYJs/RaXv5jZN2/fabjtnzE6SqxiaNtWAITUhVxMFTTH3dzZbdjdvHu3ETulREIoXqQk5AiECAOtmMTid3cEACIkc2JgafFp8hbxFhmOFMsgjUCB0P3uCutotARuzMFRCJDsvQEpm7i4gZqWps2NRCCCqOUK1YC7TAltRVeNUX53hrzmXMw9Zv7C37MnLwib7nVsEhpbS3PwdQICwCZgviYh3iwgrXi+35ectHWoL5qg4Z9P7ewocChFeXuwqswY+JNddzgDXhgco8NNuqWXYth1qyBqmCP+H5Xtt0qldeEiQ2IKLihjF6FUcvAd0NAQHRzMJJOx+sXJRcK5gTuWHsFBwhCLqDohlTMQyGbg5qNSV0KETEaERSiweeSTxjX28vz24ePAzhgnD50i10scCt5fR0yA9LJebPHRweBNiVVW+HYrWIDOqjhCpUTADXoVuYjSWQBIasESkRKuAIbmaAXOsQmu5svZ4t4kjaWqtoqhYMMHJy0p0wsplH4hBCHmtENi2MkSA0TaOqVWvAIFoQcYrycsdRPEYig7HURZxt+/Xnv/fg4/5XHl2VN47jz/3U1//Qb/kdI9X92BTogduuu0G1knLiDruuipnxfHbQBI8xzps5Mg11e371YWSfakfN0szbQLLbVHcnYK1Kn3pLkYMTkV/T+QmpjCU0UdlPXrv/YPtu7dfNfLHd7rzWlJK6ERE4obn/Kivgb3k7KZu+IW4yCyGM4/jee++99MrLTde8//77TROnOItay6PHTw8PT27fvHe5+iSXNQoFDo8/eXx8fNykxflqm+vIGEIogGglbCrO5/NX3rC/+sP/wT/68j/6X/7Lf/i1z9/66ON3Ay0RUVQDs7kQRRFBcnARsxTbOqobr4fVqP23f/GLR0cHQ+5rrbv1qsRWrqiZNYvFbLcdZk0g4sXsYLtdtyFcrK/E4cbB4TxGAXf3NGsNqrnHtgGHXCWGpmSYpX0y79qWiKTUSRjp7tMdiGy5oskYYwMqTeqQxtTNESznwRFFhEBTig6VmWrNkcnMSh5jjKJKlMAhxigiR0cHw5DvzWbDsD05XCbOy/298TAws+m4aFOdw/GNkxm1ZvbaC3fUhNPtmtcnNxZ3bx/KMABaLZUBmcBEAaAJ0YzElcmripm1bYvoUhV0REgOmutwcvMIff4f/dkf/+s/+ndvvhiattmsC+F+v9siKSOBmzvsdjsCvH3zxHQYhlyr7voSZvPDg6PdUC8uLsZxUK2qHkLMZXzh3gtt2549PdtstviNLIFrGcHzJ22yEXp+pf+N32Wacj+mUARVCxxUtYT8wot3DIRjEBFkElNmBjdAIkJTBANH9ynjihKhuxVXR4whBDWoo2WJguTAva1ef+uzT97bHd29sQi79bAjregR1JUqAeYK7gxExOwIIQTEyW5vgjqjWBUPWZRrbRqKCg2ziqtJG9syjJHCbt3Pb5i5hyCI0DS+XB4ANgSdYw4JE8k4D/GGHR3gZt2vcD9EbRR3lIppNUIHg9Au98HkpPQ3Iw+K2xTMpAktu+01zVq0BOZSOdKj0R8A/9OzNhQZndFk1nTiImZVakoRVKtKDOn680cLp0PtFoG0R+wjtKVYSo0YiGSOxMxIHrAyKjq5YAwRoGrBiB1ijB5nXUhJDhoayhPWDY6ro242AlCokZpM0dmOGE+kBQDqNMPoXo+cxdmb1JcsJoo8FjbDjZday9VqcOooNOhCCloGioEZTa3lRioAY5HiSIZm6AambhgRRxg2PcVOxZo2AoAbEKGBmwIhbXe7rm1TSObiDkRw+/btBw8+csQuBi1araY0Gwa5/WL70pvx8dn5d7/81k/9rV8K7c15KAudhzaxRSy7SHvNHDbrK/CWOJKTlSE288CNA213+YBmB/uzyVN6MW/aLg2GB/ucgjOG3Xbo+9HJnucPTPwsRgJEI0xtA0wKZglPXr51/vUHbY2zth2G0USJkDAgoUhB/ieIV+GbKWjPYJnp7uWcmXks+eOPP3r55ZeJ6L333mnaKKopNG5yfvrUTPaOFtSmfHmBRu7h4mJ948b+yVF3tQVRMMDEyFirllyW4s1rL79yef7uv/6/+bf+uX/un/mh3/8bT1fv51Gb0AISIU8e9SLFkV3ByTnF3bB97dW7X/y2N3ab7XZ3KWKPn5zlAZaLIxU+v3jvzTc/bwiPHz16/ZVXH37y6N69OxfDdnWxi6HdRMlg5NBwGqASattyqVUdl8uZKqTAeShpL4VAhBTaNBEqrou7Q6DAEbFgbyMQYnAi2u0GtBoIxlGpcUJ09yzVHWOMuYxtTMyzcRzni71SKjLtxh2AMXpKodS+SVxKf7B3UGpt0yyPA3OKFN2IU0Cp7pIiEacqQgEBULWEEKQaAIib6zN3LY6hlbqtUpCoaRoV0GqJ5woCnN3iy6++8Is/9+F/8Gf/ynp7+sXvPBnGi1xzE3TMa7cQItQ8RlqK9CH44f681rGOshtEANvlvJnfGsbx8uqs7wcVIExOdcybl198qevmZ6fn6/UGgVUrf/NJex6HgW+lynyjvWAkw+u8X6kWmGIMu3F7eHe/WcRd2QZmQnZzIEQGN5xk6e7g4ARIDIBEjugWEBJhYHKFWjGPYsEB2xrkC9/11htv/uDbX/3Ptcv3Xv2e9S/+/GyxINmCWYqx9FsO+1P/xMyIxima5GksAAQaESOCoTggo7gxQXXPazpYtJiAVEZBtAgAk9GFG3Rd8/LLnyG7kfsKNopsCVsDpJgOjrtHD8NDD6cJDxhFqxYfqoYu7bJsTQntlaatdShAtViLSKSrOiIRCwyWCfEA+EEe92jvHhJWXaYQQLOVrMbM4F5M1TQiTux7BgQ0uuqtVF5wux+jYbGEo2ulGpnJCRRATaRULQZKBGyhZdhf2KKhm4fdjcOyt1ztRepS1yySzTaZBjF245LbvtIooNnQHeZRW6pjlp0YpZ3VXnXQqq6E3gXab+JhE49TvNW1RzF1hIg+ombyMJs5smEBArIQFffahsQ6C9cC4oAGYCS6zl6uBU9NE8d+AAAkZ4jmpiAAUKtM6aNEdPPmzfPzcxEL3CVyU2ZmNOSG77yxd/p4c7R//Mtfeni+7ioDk23Hy7A88LSX0BGh6UJqGViK9u6ZpCJwEcAQ1X3M9fJqfbm6AOKx5N02m5FU36y3w1hUnYjpuQg9/dTtmog8gBFaJHdrUxCp7fF8/tLRZtiESG1MVgUA1A0Igf8JSQjPbuCzrznB7s+SV6e6L1I3m82HH364WCxee+0zZcyMgYykDOr92fnDs9NzsK7dO/CYenEHurq6qkP/ws2bi7b5tPpAjAyY83hR8mbepJPb8S/+xb/0b/4bf07Gw7v3jquNeQRHMBfVmlJnioQRkWvNwEAy25wPINEK3rpxe7ce5/Plk7NT9f7W7aOn508//ughQvyFL31ttS4fP7iQ3gxbbxfnm4w1bs761eW4q376ZLy6squL7BbWF5urq6vV6mpvOZvP59cOhYSTs/kU60BISNCkMJu187ZLqZ3iBhtOWuo8dG6cYksOYx2RqenacSyEPC1IgPj+i68t9o7GMTNFMwMi0eLuw9AjeVWZuCIxpIihFgGAUko28xAqmMB1aNc0PqqbI8TQBE4hhJAihWgGY94RkYpfx8K5xUSl5gy7/ePF8uDwP/z3/sGf/j/+eWwevPRKGLanQQKJzRpj2iFWF2qaeZGxi3R8sJeHfrfbrTZjtRibRbs87Hfl9Px8tx3cnRiyjCLy+uuvLxaL09PT9XoNAA76PFf4WdPwLQSY6WXXbhrX9Z0cCNBEpyD7pml243rvxvzGncNsY4zsagzIzDiZRBLCBPRM4yYFBEZEAgSVQBxjU8S3g/ZVJbAIFNt81/d8zxde+8Ef+Ss/3qg9/vC9p4/e+cKb31VKUkAMZNI0cR/R54s0LaWffZ845YMAllCnNQMbRKBxLMXDYKxVpXopEmOsVYkCgKl4VQJoHjx8ul71AA1ap8UZSLLGWne7zWxWUtWrvn80lDJkrHUAHRC2pmOATb9r1BqVyywbsTKMB9VoHAaEq7E6pCQqRKrwidh+wj0VMG8ChxizmiJUsYltGmNUA3n2yZuHvJBL2d1sW9cqWWNKAdxNK7G6BeYYI6Gbu4ID+WHjkbt2ZsRm2Zt4p6PV1cXpkA6hw9GsiwfaXzD1hEndk4+R4DrTDqiNrYU4elVo3SbOkYtzLS4miK7OXeB2Rpd9WWtPiBtxjF2LIDA0IdRsBKAuMkVWoqPolErQxdCf7aw6RVKX2Mbdrg8QQAAAFKbTAkQkWgOnlMLZ2UUpBZEAa80hRBHAofYvf/FQmnzzRnMky5/7ygOOCyqVxtniRr149KQOO+a5UdmpYJjV7BDdwc1gGHfoI5BX3eS6JlQD6fOF6NjvNnvzPUREglnXXOXBTAivh1u8FtfhFEqmaEikak0MJj5LTT/m/Xu3bC3nZxfHixtSqotS4CnTY2IsfEtB/5aW6vnbiJ9ShlxNwfu+/+CDD1577bXXX//shx98nCW3XVAsbri6vHDho5O9xUEnXgeryVnFTk9P9w+WKenlugg3iBqDL1JrpsZqondf5I8f/8z/9k98/ff/wT/0Q3/gO/rxydmTOusckYdcU2qJKOccQgAwNGQMMQVmHsdy6+TGbLGgCCkwETapm826ccyLvUYtH53cqTXH1hvSjnG5l4YCx0dz3Y3NDd7bizlnlW3bzJrUHRwcmMlsfuTX7ZkTswMQkk2cCjckSBw0OQGSQyklcNfEdrvepQayQsdsCBRQSo0xBgITQ6IQ2tOzi74XZsplAHAp5RkhJHCqsiPGWmuKUaqEGAzIVRUCIDrQkMcYIiCyc62VU7IqVQ0AVMVMAMgdIRQzns32cs6qAmDD2O/t7e3dePUf/J2v/JW/8PeAtyd3pGTZbQeAVmUMCKXneTjKPlQba/GDw9R4urpaV/Ghr7HtZvOlUbNal8vzp30/MEWVol7N6xc+/xZRfPDRR+M4AsAUDTZpLCef3ufL+q8mQfpzqml3B0d3mYxnYsDdsOGOT166qTgJV4CZ1SxyVAY3MyQEIDBAJ5xADAdHR6cYPMBo0BcfqgMxMs4X88++9b0PH9pP/9hfR13P4fIgLp9+9Csg9f6Lr4z9Ig+PABxqBA21SggBTRBRRNKnDB8z8xQwcLVc0btELYUuUSkjR3GNVbgwxdTsxl2y6dtUACOipkWHldsqMo29YRcbtvVQ4s2QjnXsaZXAnY602QXpncDM2rhUOcBwLoNgNOTFvDkwbW2yiPPQBoK4EFb3DeGdaEuF0rLZ2GJMHEaHAmKuy9A40Q4LERFyIALXwLruK/YQIqBSMxncV7bgRmZoFSsQcNfwvAsxmJS1+yHYzIAJbr1265//7GfgL/8Xf/SgpX7Tt/NF7jWFJrJWZ2Mmr8DmUNEFkUdRQEIMxBoggEZmcaqqGi2ocNFxRE1I85Yab5BC2PSbPDoCMJsCBcw5D7XZiSk5eIzGYgbAyenxJ2uxFBWJsG2bi6tL5uSq0zbfyEgJABycmfNYU0o87TI9C83dtdZ6+EJq7+e0nB8e8t/5L75Ks+WQQaD2qKM0y0OGUMTICXOuoQbNGZjFMNeCIDlvYuRx3MUi4NS0s1qglBExLBaLFJZ5DMOw2m7X7iylEj7jujgSgbuYOUFE4hQBsAwjVnDxGuDopTur7XrUHCKLKugEFeKv9hR7VsefITP+nOOHmZVSpt1jIDbRodT33nn35Zdfvnfv3pMnT8aamYkoENJ69RTR9XBxeHxvdfG0H8ZEiOyXq83hXkcHsNqg6UxxGK24J6MxUgfV9/fmw7j9L//qf/LTP/WP/vC/+Pve+jW3nz66AgAQUa1myDw9cshxdMZBirsnau7cuaXgi2WaIN6qtnl6ee/+C2fnp3dPDks9k9LMl8sXX355WG/rmF+4fVNVb71we/R+HMaD/S6lNnLc3z8Ep5TaSdrkAPiN7Rww0rSaBjAHJUB3Tyk0KeyGPFuE7eUme2yLL7roWsqo6N7ENAx927ZiZgbL5V7JPq6HWZtqGREJOeSc29TmMgImVevaWS2FMExkaubgVVUKM0cOiFhKIQrMXMchxgYAXMU0AxiFaAqA6AB1zIw45nG5XN66ef/hw/M/83/5C1/75Q9efHlGQTZrC4GzZBVlb1PXu2aEjlGQhuX+wWY9DrJVw+1Qm3bRdHPCdrXZnJ5fgo5gRhFEHRzefPNNFfjow49qmfJYPMZgZpPm6DooGL7RQHwzBvONN8/eiykwqNYYGwWrXl989QXrQEYJkdzVwTnFItI0qdaK4Ru9iF//ZYhAZhKaYA61uGoE0MCxW7a/6bf+9icfLb/+1b/RhkuRrJyrEeS0unj/eO8oNHtFd1ouIihYk8caAiF5CDESAxhOiSZmTY7sRIkpErTQzlu3OlFEJ5toYDJAdSOedroKoMPQbzdrwOqsiEgBXMnJUwA2PzhqtmtYga4x3iVrqgt6oQhiZJpz3QZmrxFgEYiqKHA/WENJLEPkRSmZsUB4hbGtOgSDbDW6GYxFMAZAy6rDmCszmTGZqQMYRU9Ns3fVFwnR0RDUdSRWodE4IxcOuljEg0WXsA6bJynOjVdjVpdFWvS/8Et/n/nwzS/8wKVcQWvFd9pua6hjSWAItBN1U3Jt1ZeOS8AAIIq9gVI0CL1yNjQgosjO9TCGWYiOyM0sUmSVRcNtkhJkNC0O1XLXxVqloRTd3DE4NTEFbvNuXJ9umzhTwaZpUhNKFpjyb4Gfcb1FJPJ1FpqITkNMgE7TxnTehfbuZ+e0r0j603/viuOirp3FPc4hjeQ9VQqA5n0i6IjmrNF3dberfXHfqYwmBVzamffDecmXq82j7fZJrVUN8iiEbYzdFHVQiz4ruIaARMx8venGGJzzbhD1KYFwnmYyVOv41ksvbPNuMq3ONUcO5Tny+7c0U9PI/6yHeoYCAUCMsdYaQhCRyYB7GMZ33v2g6dqXXnmRQ6qCxMlczMfN6urRo9Pz87zcv9PN96rLUEUqXl2MbM3JviOcrTe6HXnUrCWWPNuuZb3agOn+YXp8+va/8Sf/1L/zb/8NNz46OmbGXHrzCgBmAB6qBaNGISF3CBGA0BXdXM2qRWrf/tr7F09354932zU8+aT/8tuffPzh5S/843d2pXl0Plye5/Onu/c+OD07tdMn42ZlaI1puLxYNW3s5inn/OxjMTNwcDVwoAnVNUNARCw5Nynt7y8vN6vTi0tHfHpxmVXdUUQicyLKQ58CmRRXk1yW8z1EJBfXYnqte2ckLbUJMQaKgUxrCIRujCBSQCUAJGRSnWKAIrG7G8IUpljG7KAh0GQlxQTggbkBgMD6ysv3CZu/+Of/+r/+v/73nj5+73NfCDHudutNDAUAHCIn17QZS3JaDvkqRFwu9zdXu3ELOWM/ltmyWx4ecFicnm/Pn16AZVQkhmG84uDf/u3f2e/qo0ePSx0QeILyJneBCZYxs+dXqc+K+fMHD5+zP5raiiI1NQ0x5Dq88PILad702lNMQMGJp/8AjiHXSoEnM1F3dVfwSVyKhBwDoUMZa83qiii0N5t/7pVXPn538wv/+Cc7usKSHbgiA88wGRRebR4bFU7LFGeEEMjUt6Y6wRdTLCUimkEIoTaaoQBIALdhwCzDroTYmSYRwyhOuUhBCIv5vqqzJwCu2futgy+0zMeRnFChVi+zlqL5/o2g6hemn5isZJz1yk6bXG0QQt+hXu7EEGaAvu0NfO0q4PMmQR1D7zsumepoeDyLAHYkeGiN56qIENirBOLB6uBSn0suJIfg7qZVU7gqeR4Deg6ByigAKczQJRy0d5guyi5+5vYf/Nx3dz/8Y/9+c/eW6I7Hm3df+v6T1/RHfvhPS5cZA3ggQ6xKkSyqUiUPIeg08AR3FQMARyAMZqzgQCmYT8k5RdUIkQw8IqPIKFaqoinNCUKFlccxIWgTKXAdi7hyar3UVkLWps1Pvt7LLjQd7mp/4+R4uxuDt4RcbEuEgedYiVA9xKraYGkCLJbNk4vzJhzWWMMYetvc+7XHfvOs0b3zD3c6uhhhQCDXQW1o6Vh3uUoN1O3HuB+DRNJxfYYgddy6j3m0UXvGRcKLsb9CbiEyaERAwtHGobcaw2g2ugpRdAMRm/wtHdEVEGDyGMhajQ10IAwYsIJwwH4c25vt/rjYfHi1nJ10TGPuQwiTb8mEgE/XL8YoovRc0MfzbRUiTkHGU36Tqk77Vanjxx99cuf+vc9+/nPvv/9u7jOhMzrQULf5dBAZjw9vHuwtl1dPP9F+bLtO1+vFLJ4sj5rg611RBU/IckGxrU5F0WQV23hrPvuJn/wb/+1P/fzv+KHf8Hv/wPcf3eLzp6fDkOdpRuICOQIumlCqWuicgriF6Gxs1Xa5fv7Ntx4/fpxz3u2Gs8vLG8vlvOU2wEe/8tW+1tliFhFudJFUD/b2t3mY1b15lwiwZGjS5EZriDAZjgOAovMUmQJw7YyIYhADyP19iXcoNbGU/sa2n7lseo+RRXJQBhAlQm68cNfOhu1Qh54YRQQQzd20UCBnqyZgIXA0MxdFDg6AITpiNXWzECKQVilmCgoEIWAttY8cQtMMpQIhIFcxEQdYndy6UXX2H/2ln/nRv/aTAcudW9HMzi+LSAVIIkZEJpWIgjeivXtZLBZSfehtGGWsa2xiWiz3926vN/2TJx8P4w4dVA2ijWM5OTp56aVXVqv108dPai3MU1TZ9SNn8kCHKW/0UxTxuTHRiYgUKxgEno5cAAKEalpJAMy0jqJHL96cv3gwlM0MZuog7hAbdxcEMCOm6k6mhBSmYF4kUaXACmiGyStKUgWFcb7XfOd3/5qf/9Lpww9/PLBHr0oh9/0hMXpBNYxaTs+j97fufk7tztkHv2Ce09FNfLKT+lScx7GkxiGToTohqoeUqng/6PwkmTlb9JzFmaBy5RT3HWXUMmQ3FOEKoBWocHIozDuPIuYKNENIqT3bG/ad9pfjdt2sliTctIlENc/iUCQpjG4n81kLI3g+SKGgj67LptvZ7pjbG91slW2lIqAfDfZawHU/pNkyaDRyAge1JrU1FyJ0Dm6CCUlBkEMET2hVi1qQmMzRJLcp3WgWWTLGgcJDUOvSa8xH/bhtlx1LdiMJh3n84j/1m18+aYe//A/+w25+IOZoCGiEalJAmMLcvDzDBMCnCGlwAEeZjoWCT0JxQkE0IUYxUkMnRVZWQKBK3IZ2rCZVDSqRTqj0FKKITYZ1K93Z+6vIjaoQwMHB4uHHTxBBbFwslk0Try6njTyocgxVFWNaKgBABLDgYZT+zssHJ6/qWoJtZX0KJlprJYzuyiGK9shoBsQFq+V+N1s2wTk45VoiUi3FHc3FQTebjXAktOAcIlVxIxT0o739lObz/bS6WruRAhmAm6uqATgCgk3rtWfQLTgoOCIAA7OWnR7cvq/l0fbx6fHB8ZWQKTDTOI4ThX8q8eM4PvPq+6b+6puDnKam7BnmiEi5bD98/53bd194/bXPf/zhO+vNlXtEdQ6GNF6cPxrqeO/+ndv3Xl+fP96uzrvZrO6sDf2Nw1kTpa+46iW1nYoRMppzCi7Wj5sbx/sG/Q//tb/+t//mT//Q7/udv+t3f++9u+Hjj95zcZotiXSshhZYg4InZxkFE7Vta7hV11cPjk3p7GJ18+QIEZuG5l3ICqGN+/szEzeRmnez+XFjWsu6UEXE3eDNDHLGpvEYERHNCBwCkmlGJpoitC2MWavDLo9u9cbNWIuHmNru5p27N1Oir371q0SUIqGxSCUzqc6NPX363pA3cQr0ut4lgoojMlMUdANwQuXJPQgMwN0IjdjExsidQZPrEIOZD4WdcB5CqmVAZYZY8kBUbtw6ADv523/9q//ZX/l76+3l8U0IOG42IDxtyAX52mQRAMfttkOcz+dgPPRlLLlWLWJNt9ftLWKYP3z4Sb8bi2YzZQJm6nO+ffv2S/dfevz4yenpqavFGHfjrgnN8+vTb9mg+jfnPro7xADVXLWJya+7boNAnQOk5a7uugXffeF4KzWmudXMDICACDbBYiBETICGDG4G5uDMxEROYCZovLaRwJtuf5DN7/jdf/Dq/Ojs9JfaLtQy1DpMizgkJ5jOORYbG4ml9jdv3tk9WRrUkk3BUoo+IhHVmqcACZzguSroishEnMeeMbgjVQUOGfqTw8P4YGiD1KrUIFcHoPV6u1070CjVYox5bExqAXQSjAlSOLq77AcoGDeQZ8XRNILOYtvX/lYLnw3j28W1uiNd5l1KEcDGQDE0Wx9q1QGaFOWJjCBc2jbUIg7VnImMuNRaXAEQXA0DVhP3ZB4yA6GagxqIWRtSAD+IkWATUwAkL4asmd5/70m5zHjz+NbVau3WQiiP+3f/9k8sZnrclYYXauAODB5UUdURxGBAJHdzt2tS0zOTfmSHqdZPBD0jt+CmHhwyOgEwIDuJAxgFNUsBrZq4V8GCwIGCKRnUbE3Xrj/Y9k+041S9b7qum6XtdtuEWS2CTgAkVtowNzQwQ7Gmm5Wax3VlYvecB0iL2au/Pqxpk8Ls6cOLujlCVvDGjACQqWVuEByRY2yUN7GLMaVx3EHgoS+MVN3cTEpmcHIwBWPLeahWKXVAIETbMdPQd60Q0Zh7DmEiP7iTugM5IhLihCDBZLwE+imcTlo9pdT7ePDiwdPd5uzyarl3WPK21uvczmkWnux3Pk3a+yfcw2ebrmdcGkREclN1UES7OD2rY3711dc/fPDh6dOLJmGtJaEb2GZ99d679ebN45Pj+zEuVqunSSqDf/Jku1wujw8WCfuzPMbQMcex5KAITFlH0D6in9xpa9795f/0z/+NH/mbv+/3/87f/ju+a+/Qnp4+qIUZQiBDYhCoqt2M3TSPWyRcLg7JgTnu7x8QETKbSCCez286oag3nFzdnKTs5l0HYH0/EoXlIj15sgmcrlbSNGkx72jC2MkJ2B3NzYEur+pqK7t+U8Zht10dzBZjuUwNP/zkfG/5UuaW8Ijj9vxyM5vNquisS7ERxjL02xiCKk7xcegEaNfPS+foMnW1DOjgiEiGIupu6hioGYdMgdo55EEYW3dF8iwDmIkNGPqTW7fB5j/29776n/2V/+TJ49Obd9Ot5dYVVAOgeRECSKmptQbCUqqWmmJzsNeOY+l3xUwUtKqkdrE4OJFqnzx5Mg5FtIx9n9rooEPevfziqzdunDx48GC93ohIIB7HcdbMnk9df76y/6pV6vXbYhpDYECvSilUFXU3syY2pYzY+Yvf9bkrGbo4M3ebNVQkEImLG4aUXIOBIqKaTu5fAKpm4obOgKTK0ATwvd3Qv/bZVznd+vGf+CkrGXDQOhCYqxOwq4IrIRax+Yw4hKHfqFWzZE6z2bLKKkQ1cFV1cDdnQKtORGioqpEjmNQs1CQ0dmqkIogESv2gnulzN+5/8PHXRYCQSrbLyyuwAsDbTY+MAcARpDVoWEJe3uzwneF0W2bWUGMd6NbCer15o+Fj5CcbYYdA6clmpCadYFOvNnGvKS7jmNeuXdfc3OnDHrZdaDSNdWXciCozIMAgpbpTTOyA6pU1gDlxACBAooBuJHWMLS0SExSCxEDqjtggu+gOHU8Ov391+WPY+DickeTV6sGP/dTNLn4wO5zVkTi4TkMAkoERBRGL7A7uU3gKfVpcpsDkqVucaOjTAXFyVZm24mgB3J2zu6DWoi1RSz4oVDVGYjc0BfNqfUd7H3/9QeMzY69DvXP/ZLfrtQIkYMahL8M4MoA7EEfAGqkzKxxAM6INTE0mePM3L8bZlfXUf7LLlw1R7zpjLqpChLWMiF5rJmZVVXERe3p+1gSlCGnG/WY0F9cqmt29aZqtiJAgA6mhGDLUWjfet8EODmaqmlKoYuBOiHbNl/nGkupZ5QUAR3QwAGdqstbAIGCHr959+kufeC4cg1QxM8llUuhMZJhrg7vnWDTPfv7NAzVMycLkxAHcIgPlvKllUNW7L7wwX+x/9PHbiOTO6M6cS6lPHpd+O96+e+cw0bB+OtbRbUa7CFqXbXN4sPfo9KIfhtDNJk7OPCTt84CgQoHiy6/cWK/Wf+4//n/+yA+/9AM/+E//7h/69qPbYbs56/tcsquVmKCUQtgRJaRYiiROtQpgjjG4KwMxcz9kJ2zbViXXUpjZDMahzGbRwFKI2+2uioU2h9pU4cAWUi11aNKiZN9tx7YLWez8clit+832PLGtV2Pp+27WjKONg371l39RRGbzOI7blJKPeH66PTlanhwvpO4IgxqLyvRpTh8w+hScJIFdXH0yEwTnKV/Wham1ukUeZ4s2Z5RCTdPWuml50fc9Um1aOt6/sxvC3/m7X/0bf/Mn3n/7g9mR334l5M3GLMw6zjYofkrzEE0h5py11sPDw7Ztr1ZPhz6nNHOMIrB3eLzcO7pcbS9PzzabzTTOzebtZrdtUvPWt30HA3/wwQfjmHPOkx9f0zTP3MGer+zTyXleP/G8WpWImNhyDSmWT+1XmxCraabhre/+YulmoSZSdyyAjCGaWdN2AFBzQQzMrFZTJDMzN6RIBPG6PXRqEHnOOMu5xnT3b//Nn9xsH4KtvWZVR3JEdpMw7WwwMpBD3W5WRWTYXO4vD1VPx6EQg7ujg5mHyKLGTCYFnYhRzQNSzjLhQ04IlM1C4MlVPgL6drsFVbdgUGaz2XxJAAGcArYCF4FY2ZQ1NBA7WRyl42N6dOpXbYE46wnn/fD6/iLUbSl6nkJXHGOzDXAjJFI7WHSkuJGCqatl2A96L4afNugbj6Np6IDoeq+BaOBGjgzBmI2JHIGKY0AdkecCNJm6JqyBg1QNLAwLpmSIVSyGqPDwqx/9NZ7BrLz1u77r1//DL/2503LKe0U0mY1glaEhbMB782woiAkgVFd3R/iGgHiCZRSqG08HRh0J0JXQ0L2ioTkgKoKxR3YhKIlcrYoLQIiOCbmUYqZoGDranvWrB7s27feuAHbj/uFHX/sgcDJxRHZHU02pDRAqeWB2JYjAAXgAjrNdHe988Wj+6nixIb3AzUeGQNlKgmRmE9dWxQGIKKU4C9xCHo7mB/P9Wx9/9Mvko+TtPDaXq2JeGV3QAMxdVQoBQiCvxYl5wfPZso223a66ruvHK0L268EU4Xki4zd32e6GyO6ODORIiurgCe+9cefJLz9kXXRdt9vtnJnMCIiZa62Tr9Oznv1bfnwei3d3V0B0VYsh1bFwAEBYrVZ9Lq++/tJnP/PWRx98VPq+a7iUERDMYXVe87C++9K9+clL43qjw7DdrqrQJuNh3t67eXS1K+dXGwdmZmRiDsCVoYJZv62R+cUXj4dx9Vf/v/+PH/tvXv4t3/cbf+C3/doXXjxS3axXUkbpUqyADl7rwIHNdiEigYFX9xmnZqzQzvaJqMqIaCGROnII4JTz2HVdqdusmUJrFdt2VgXPrrbzRYhxtu3h8aOLJs7m++37774zjDKfzWYNuMm9Oye5nJ2dXpbsROi42TvoSi4l82wRVSEXEwUDrgJAQY1imF0HBok4GKET4eSNpaaTzzioCcBkbFrtlDi4BazEHMyq1OrVt+V073BxsH/3yaPdf/1ffuXv/p1/+PTqwf7N9Jk3Dq9225JLajAhiaAIeWsIzES1lu12M+/a45OTfrs7Pb2c/IFzHWPqjg9fNKdPHp+fnp5OMQVm6u5lLIcHNz772c+utpuPP/6wlKKqjGRmMYbnfR+/pbJ/Y016/Vvf+EV2cHeOoag4QowR3QPSrm6+8H3fxTeOqK/z0NU4utJe6EoUBjBVdcWmAwBRcYwpBZwWFWQT2A+IgRtji7i32Z3dOL7zycfry7NHEXPxLUEyBBUFAldrYuQpRFfRzCIgSd1ePlnGQ7CQMDZMDM6I4KQqYmrgjoBAABhCEjEcKoUIhqDgVigwJDPsZ4ll6AOzx9BwAoDVarXZXgCqaGmaRhUIgqCaM0VvO7SM9+91q/XobVibd0Jts+xVO5Wjg8OLyzWEZqOFOATwwYaj5aIdXKttOS+aDsxeafFHt9uvFvxO5mwQ3K4xWwBAEq8qwACGvJRQg7pRiImcrY6lCWlv1sVAAADUeBhrkRhjM+NhUFPnUMyvZvl2X3CLr332zd/60X/zX6U9Kz6PphBAQQIFDJEAycaASKzZkXyKpPeJrDplChECohOQGyoYwHU0mhGCQjR3UAEXdARqABBxNBVAIERHUDNRZFbXLoYP3730MdDMasn7x4sQ8fJ804aFibpDiOSe3DykkHUXuCuWu3ZvGHbVc6lpfjK/8+28LoPXdPnhGDRmF7WZ8A59ZgpmEFPnkBAqQpp3R4gX1XQ39MysWgFsN2yZWWshIteac8ZZM3VVBXIkpGT9btU1XYPUNqmUEhJb+dTumq65ZVNRn3Dzb0bMDRHUoEWsrmIYwK2h5Ysnq49WWKXpkqlmqW1M9E+KanrWfz27iteH41l3ZujOxccQiDAWkZi8Zv3aV99+5eXXvu3bvv2dr//S6uqiCalWNSttZ2OWd77+9r27rxwc7ntK1OjQXwXDs772AItucf/m8S7Xq+1uKGNoG66dI4bIajVE2O02zPH2zROB0x/7sb/693/sb33XF7/n13/fd7/57Xf3b9jF6Vk/DE3XmiFhCJA0C8WI6CpasSKkWpUZYmj6IYeIYOpI7tg2bc7StAvkpXOjBlkIQBlgs4WcdwCAgdXykycll+1iMUshBzDmOAy7Js1uHLt5YWYXHvp6cmP/5NjGskUM4f7J/t7coCq4i3BoRK73tJNKy+E6fYjV0A0ArgdHBwAgIAx7hMgYcs4GIzqQ09HRLWjg3Xef/Cd//m/8wj96Ow9Xewf+yst7m23ebreokGBmYEWyeeUWAdHU8jgGxhfu3Mk5Xzw9nVz1nRJxmM/nIc2HvpxdrIZh566mAmDmpqp3795/8cUXnzx58sknn7gqM1eRGGMIcfJqf352fL7VeP4gfTpYXjO1AmJVreDMXEuJMYTE63771m95685nvu3B+S4uHXSLQLM4C05IhQDzsAMRigQAknMgEi9MARMhTK0zxtCmOCvCl08/mc2TlLzZXrnswGutNUEEAzOYGpWJYEoOIbFpRXIyzf1lN4vzro2JTZTYEDGEkMtoBlUrIgKqq8fE41h4jmTkdh3IVkdsJ2WoUN/bzeOXPnl8hikTQs316ZMzgK2qFtmoTlgImBH4tEYszX5omnpxGn5ltnvTeK26Mfkix6dXV5ipRjjT8diSqQNo7KsaEmBj2FiV0T7D/G3A/7/K39bEmC+VIoWg7mJm5KKm7kwUoAdkQ1qZhY5mY9UOkRmQqQIqODEECsSopVaoZpaIQE2xUbuAZvzl938kxafdMV/mMg9t2FHpkusIuAE0NFSDKRbRZaJiXxcXAp7GOMMRnMjJriNtpo4eAdhJzdUmOTAYgpEDoBAxqKl59erqRsAQgMDXuvlwiJTAFR1uvnJy8ckpWECwgOQOgIpAIlMJpZBYARRBnZjiGHav/MZ7Or/EIZ6/vYG6UB+1dIzFjSNrdTNHDqbeV6nmXLUHLrthzUK7nMUImaoVR0FgkQEIiUFK7lp2IsagLmh1osTaKOvV1t1rVZqcYQC+KULPQf0awgWgiYc71WkCNwZ1Tw1b1krcHs/dyvrBbh6ju5vnLBjAmSdEzJ5p8J4frs0FEQHc7RsPADNDCMwoqgTKTLXWKZr4g3fevbpYvvqZz15dXb339juBIzFIrcghmj598N76au/eyy/eevlzH33wy2QFtamDb3OfeFzMFvsnNy5326vNWkMTA4sBhTSUklJb6rAbhkU64oYi+y9+5R/83Z/6+0c3bv+G7/t1v/0Hv/fFl7pxHHPO29Wlp9DGZF4IiSyAibgisgMTpBg6NycfwIEpqEHTLYuG0HTiqWmBW3KVWivUytP2yTO5jYMd7O0xucloojEmJ1UvqQGAyMgeOTKYbhzKsp05UAwRScdxcHfRqm7u06PR3BURAWwiEAgBALkaACCRTT6oiFrUnUC17VI7D103X53JT/3U2z/ywz/54cMHzvnoKN3Ya7TA2dPM0ZAiY1bvEQgoJO6k1N1qN4t+Y7kkosvz8zJWDgE8mHloZ8vlEpEvzldXl+ucs4NOCuyxZAz8+hufW8yXH3/08OnjJ8Q+2YFNzkuTIK5IoU+d03/1Wv7Z2Pdco4BEhA6OLmru3hCHlK7G9Re//3ve/K3f9/F75zeP7o+6U2v3iIpCdk3Qgdrs6BDArtZXRDTryKoM0hOwqhJq285TSlahFux3g2IeMsrmqUmtxVK0YK346EDM1wivuwKwE4rVyTqCI5rutuPTto19v5niGGNopvg9RqqgkfFTbgGouBvbtCs0xxhLkYVhRHQ0TOHJ6WNCdxUzunHjxsnxAbiBE1B1cYGKyBFdI6ADdYL7fnw/XW2UsHlC5bOJXm8On55fWGpgZusKQ6TYNrnf7AUypDMdaqSkDCZNS4dN9wc8/p92m78D8x/cW8JQFdCnLpADgiGRELVUheKF+lcshyejg9SDva4JUUTcbR64YxwKsBhzRFQiAiDVikm2GFuns6ufNclEaUFlrL12bQdFDa0EdwCqAiqOoBAnVgxeUwWmNKWJOjPx0RAmiaa4IyCRcoXiE6YKzi4VVAARnTl4hmom4MqGznWsMWE+H/N5XbaNl6GN7fJk+bWvftimuVkmD0SsPgJGwlC0NtSp7Xa57O3NFGqx/Ob3Hs9euBpq7Z+qXCqZqLZgGTTyLJgYUUI0VTcF5khEkgtDHtaX+wdH8zhfycYzJEB1H82KyGKxd3Jy4/TJpaqiIQBbGypIDNh0ab+ZW/HLK4wx+qj2aUP07P7YRJL7RtbwlL1H7h6gHzyBp2hKjY1FwCmeLA/6cHF2frA8CMzbXR9CcJ/a/29quJ518dNM5+4KOkU1TfM1oktBJEZWVWVqUN1wiKG5vLzcfaW88tqr3/5rfs3Xf+Vrw3bHGBvNEOLoVYb+3a9/cP50dXLrRYda1pe7cQ1e2ghDXxtt2qY95r1NxixTQo46WLVs4KC4lV0TokiPyW/eDWN+9CM//P/5sb/142985rO/6fu/+61ve+mVV06G7fk4jq5YFRNTLSW1UbWiR3dmCIaBAFVdwEJSxdETN3t7kWDY7spWwLUNTG4GAFIRvUpBb8h56HeRuUl7Q987BhOMMQaK4ziiKyARChKX3WCMYloBRGRiI6kUcBKfBKXAOJU9BndGQ0QHB3NEULVJecDB5zNYLNM4Nu++vfqJf/AzP/uzP3t1+XR/Fm6e4OhD0bFmAkAlGU1JGUwYAUIcRfqrbYN6fGMWuFmtVuM4AjGllNWa+aKdzbt2fnW1uTg/7/veNAMqAASCXe5Pjo/vvfhCLvLu+x/0q01KbLU6wQTlTafCzBjZ3AjpWzao8M1uo89jNYiYa5nS7FKIXUynu8vf8Lt/+2/4XT/40Qf6ws3Z/sFSqpVSzXPx6kC7LLMUXQ3Rb92y6pL7gUwrASLmnLUO4MLMm/W4G/txvBALQJndQRlYh4JsHhKKKgC6Gn9q4+EOIRCHIGbubj6KAsB+P2wpsLkDUylF1cE1EDGSowKTulxnrjqoKrN5HSnMuGvEMHkqqmPelTh0tkSwYRgWe8ThAPGKOcRYCmuiJkAdDNCAO1HV4/vx9GPrd0PkZl3iu2X0WUKhLcUB6kHT+bY/SOmgbc/X64EgeEDTs8ShjtHSC/vlhyz8hWG7R3u/JabBRAFpCh5yM4dRZBE6Kfi2jv+4WjAOxOYACBIR3Vww9MgUUKOKuVdORILFGUzAoB95aEKIicDYrAluNpYhNGQaoAJYVUUmBmQAJCYiBxIRQyCaGlUnCGCKXAhUvIIhO6gUmMitQi7u6sXAAEsQHLoiuwZdKpoHrsBIV8aB6exrZ516i81D2bz0xXuy3m12tmy9qiO3QIKOJoxkHBxgty3ZcBnDrJSze6/v3/i1uu6znS6uHqyCzC0DWlFmQWNn+pTHZl5abslqbPahS4PTUUrNYtZfrUOqPFrBZOgYZQHLKsP54zPGPUUKgAWks0gCvhtrKCUGRQJOPlRFACabmI7g6IBE1CapGYGnSf965zlpLMMsqmIwU7eKAcjMAhK+NptxXT/eLNt517XjWNrUoBkGbttWRIrUQOzuAQkBRczpGooB+Abyfs2LMPCKzAkRp1olWpjjuFt9/Ze/ev/+/Tc++7mLy/OPP3g/E6Iru5NUBlpfPF1vzm/dvn94cnsPTs4/+WioeRBrVZvdZp745qJV7DajnV5tQ2rBMRGZZSZWVGcnDzjogtPxvT3x/O6Dn/2VP/fVk6MXft33fNe3f8eLL764OLk911ovrq7ArPQGjpaw6o5CVFUiZw7gQQ2XzVJV+9NHMcbku0CMyCgpVyNu3MDd0aKAKoinIGZadwgWDBRMpTgV86KWI6MDSFFkcFVQJQIm1zIyYOtYMCMQMl+jjyqk0kbOYv6pHpiIUgo39vZijJeq7/zKk5//mS9/9cvvPP7kY47l9p3D/eWCq63HETQqABGYax5GACB1jjSYlN2K0e8cHEbmYRiuhp07Bu7UJBDv7x+k5mAY68cPLq9W52RVxjGlRNT0w06hvvHmm01sr85XZ6cXUmoMVEomnqQnNvGsnh05QnpeJwHPec+FEKtUnIzu1GJMhlBEwFncOqbIfj6uX/62l77vd/82Kceff2NxsVnNZrPTxxf37t6PHC6vTinA2cXp3t7Ber0JFJl5tblaHh1lK2AOQNpU03r29JPtarVandbSg0lwoYoAplBoSqkGAWUCNnHEoKFwIkQKAAEpVacIBgQVuhiZRcc1YDKnoEOIvs46AzCgquYOFI0Ax43s3U6Gm5hinC9zrxJL2seqKRfJRZeHRxfbqxrVgShh15yoUFEhRWIAx92YEwMAVIIaiaLumnLj3uzrX6Yh0EkzHBUxi4WhzXnus00/brV8cXYAq5XHbmv+1MbogUZVTiQjrOGtln7Qw18YyirAP9XNYr+zQNWyaIGKh/PZsg4/ru1/bfX3NIvAUmKwyfNSaQJGtLolSSECBXNh8GBOSuKoETtwkIpOwCiA5mhq1poKeiFyC2bBFQZnQTetpjZRjj7lU1uYhJhu4TrgMzLHgAgByEd1FTABQQTkEMRToQKTmtFUFRiYwa06jpTnq6c5tt22jNzgS/dPfvGnvtTFuTM0LZfd4IqEyAEcHCCMfR3RF/Mko3Td7NXfeSDyUYTu9NG2jth4IHAHZCQgczVFcKlt6grakAfjOOatjiXGutme7coApODkJu5GNF5/k5ODYDJ3QgIEHMvQdnN0Fy3DgH1/kXN292kBBQhkjtfzjbsowrUH/bf03Xad9ejOAEhoju4Kbhnmt5aE0D/pZ2nedTyOfaAYgSd1UhPTZPMLEwb2nIzQr01cv4kr+dzcgIiooC6WYqsmH773fu6H23duLj7/1nsfvltH4cQqfa1DDIuyLU8ffXhxtXjh7r2TWy9K2Z1dPdqOW2w6yxirAIx7B/P9+/vnq20/SG8htfMIbkLTRoXYKdSxlCK2x8t0jLPF5Ve+8tNf/vKXu27/4Kh7861Xv+s77u0dzGKbSim7fjADyBVMgBYCglhmbdo9vZBSZ13j0DdpYWaORsECgA7jBLxWBgPXaRHkQBMl2G0CWqc6DchiZmaqnmCS2pTihkzeoLrmUkOJiBVJY6BcR0CuhLs+Bwpdk7o23DzZdwiPn+7+2587/dKX3v7qz/7y+emTxbxpOj85ppiavL1MoRl8EFRBqEURICBFiIyhx7EMI6GeHOx1qet34/lmHEUZEcww8Wy+3Fseq9PVen1xcbHZXoAHVW9mXZVhHIa9/b3PfOaLY7HHjx9vrtboAGhT/Omk/n/+9exsTJzaZ4fhWs2k2gRWJEcgJAjk7qJqbg6+DCgYL4bt7deP/5l/6Y/lVZebnC/S/uGdp2dnm97ff/+X5rNZarlozrJ5cHbedXNXQ/KQwsXV0+pim0xEkTGP/XZ7tV6dSd1VGdnVRG1q0UUnEi8juaOZoIeJrE6f5qmO4zibd0guIm28jhFmjlrFQGPEGBMza6mKHhNPnnqqE4XMOfHki8VR3AsgmRh4YwZXZysiQhSEoNXURiQVYwUUNOpDNItNQAQFNQLFClRv3ZudfcDDlp8aHneLk6zsuYB46G873OzmF5vdYRNuNWlYbx9zWCKHiAcVEsKjUEj816W55PGvOf/Sev37DvdeziWZLVKb0a8E/s64/Kl09Xvbvd+MIQyqyCxOKE4UmIiFEZ2iuSNMDBZUQzNAg0glOwaj5AgeCMBFqCrtVLJ5Vi9iqq4OAq4IU+opIuOn6RlmimjT9Ipg5IaITBUdGDAmiIECAZGDqYqqghqIVDQgTERZTYuiS21b7h9clTWlFjZlvPPCrXG73pyVRDNu6Nd977f/xN//WfIoWWLQpmnWm51DAEiLg/nVo8dv/K4jXX4SJT56p/Zn6jm5kNTqaIaGQGBCbWs6giGIYEBRn3VRRQgshVAUU5yPVkwDgzsKeZ2oss/aH3BHRwo06VdCoLZN40hIjjRp4K/pMYgAE0r1rM5+Ayv/FNgyn/6wAyCBA4ADmIN5wdre7CTX/nKYtx0zqwkbT69nD1dzCSGgfoOn/KyyP1/fryH4Tx0LAIEZSx0JQ5PS0yeP1uv1iy+++O3f/us/fvDO2ekjjC2U6jA2TZChFrt69+v9jYOTk5OTu3f3zlePN6tz1BzncxTor/pFGw/n3Y19PrvcVq/9MLbNIjVtLWPVEYgcQ0gN0ta1Hda+WEBKWcvZe1/avfPzP/ej/8X+7bt3Xnrt/mfffOGFV4+Pb8xcKlRXt+12JGivtusQAiXYgoKT6mhmqWm2ZYAQkWM1SDFiLWiKppPT8tQQmphqRQT0CmgIimBoEhAUCQzNEZ3NyMymU5wpuyo7ljK2MTBjCLQ43lfA7Vbe+Xj9pV/4la986f1PPvpwt33SNnrzYO/lV+cF8zhsmajf9MysTglITMXAicS9qOSSHWARae/4ECmt15vL83NzJeYU0bwJDR0cHKVmvrraPn16No69g0ZP4u5gu2GroK++/pmT47vnZ5tPHr4rIowT+q/TRlEVpqL2/L79ecrjM/jluqMnvtZhIDoCwUSAMwBvI1WgnfTzo/a3/sE/9NW3H1dftqZFnpxdnHeLLqZWsJ5tnm4er0JLMwzVat7tAE1Kvg7ANmFHq4Kgw26lkofhitHAsro7GMMUYcDuPt0IJHTHGEKRysxNDJJLSjG1UU2s1EXbpdRIFYTm8OBodXGOyKWUvu9VNRFNzzdmdjQARWRVZydzs+pa6+FJS7awYk2TAPJ2exbm2yoHhFBrNRMnMAMDrA5xpuTOpGaG7g1jbAPNyWBz843D97+cr6B5CN66JsIQUie6SMzEqwweg5ZMTje0hYA72R7MZqjMlUYvTZDfPm8/o/Jjyn92NS4IjyHNql14fYp0cyn//Lj4bRqewjYsg7cRI9jEtAZiRSUGdUKjgEBISCqmIghERhUQhUSMfHRVrwJVcasjAZOjMzsjgJNpdBuuz4r7FAzOTEQ4+SAiu7oAsIOqgjkjrYUCSIPQBIwUHU1J3TWGMI5Wiro6EJtjFeggbC4yKjMzUb19fPPrv/iVBIkTlTH3276MtQkByQnDMAwEJNB3cV8Hv/kd84Pv3HjWs0e0egjBOvcgBo4GDK5OhMwsWTl64AScIyMYtl1cLBa17Nx3BlOCcWZUJ8u1mGEkLo4pBVMhDGbOPDHYzVxKGR3aEJkRqpoBOPgkS6VPO3RDIHcAwueILp/WeCD/1Lhp4igAEqIEt0IVYO/FG5dw1l+Ny3be65BzjjH6lEQcAgCUOmmDp4v6nCyFHK9Rmm+yC/70GhNOJENRyTmGUMv4ta997eD06Wc+88asXTx6/FFFnHIpkTJqEJWnpw+3u83+jZMbJ/fu3X/5yeOPthdn7Eyc8lbKrnKCronLLgI1/a6WfhtjjF1XVKRoTB6l4eRWr2TM/cpCaE6OlyKisLq6uPrk4Zd//Me7pjs4uX3n5VfuvvryvRdeub3Y29/fY/PCmIZtzX0OjJuciSBbVgOUaAaRQzVvuuSuYJN04XoB7aIYCzOruqsBoAqZJXQQ790MqgIATTWQKDI0Sdu2A+TArWl8erp7+njztV/5hXff/uTtt99ZXa4Zy2JBx8ezwxszBkxN2OzWYd4uj29FTjTblu061515IxRjDDKMpd8i2KJt9vb2gsHF5WWeYi5CQEcnaZpmvjhKcbHr68NHj4dxI2WUUgCQMKhmhbJ/dPjyK5/VCu+8/dFmsyE3VxcQJI8puKGZieg3nYTnBripeX8Olrn+Y0wEABTY3bVKNQOEwAGD5yIHi/jmD/3AOx+umHLOD5EPG+6RLY99Ed7tdkAeW+v7rWYmgoJuJpOTGiCGJlYRqbnKKLUHrYxiJqYy3YNrEgAhGpibI01rkanCSJYy5mbZISI4qkpIcdoTIAKgXV2cgxoHxNA0TbMuoyMy0DX7AI0RVc2F3JEoEAEGSCmY5sScOqxFX3/pez548hV3My+qla/T1MzdrJI07opiZtXdgImoYV7AmMvhTXnajVfntO5CH4gVIOuguJO8i5YonvdSowHYDAmKg4ts1ubUcufGKxn3A70G8OZs+fa4fo9sV5aPnNo5/96i3+m8nOt52SRJIREyXG/6AdENJhtddWYyQHEsbsGNCQ2xCLCY5FqkugrbxJ8j3BlExCkbHtnBlUwIABHQhDEAgValZ1Gf6pNj10TpdjdxI3RXyoajO2dncnZpCAJjtgqT+Rd4zaUUQqBgdP7xahZaKbUjev/L75kxIhTbzeLs619+N3highiia8gyMFnjFDmEZX3xBzqEy9o3F+8JZDSZmCOKDBRYzMDJyCc9gxI4glZJEHebbXULmKQUIzdTh4qkk1gLHQCB0UMIUL+h2zZVMmbE3W4XIroXMwMw1utiOrkVEiEQMqCAARhcgzOfekYiToonB3A09OcuIVogAgq9lb17h1tbbda7LiZquJQiIrPZbLqiTdPUWqev/MyL5ht/y7fy6z+VOCGr+LNbLVoMKDD369VXv/yVO3fuvf7am0+fPj47e6qoHAAqOGgg2Q1j/8nq7Gx5cuvW/fuvz+cn2935avUE3Zsw94xUKg85NmHWdmGWiEJVsWKIaFq4ZQfGMB+KUSANts5bg4AqbeoO92aKbrR6+uTi4wfv/MSPz9pu0bbt7du3771wfPPW8t79g8WM2sXiYB6rlBi51pqLgDt5JsSaRyKKCOBVRKYnWQhkgC6ipRAYMxP5FARK3IYQmXlC0QBgO/S7od8+nn/84Onp1Xh+vnvnvY+H3TagPX74oOH1Ytns30ekJhfd7HqO1HZReNnO2q5J2XTnPts/6ncjaHWiWkWrAPjR4X7XtMMwXF1d5dHMapOIQcClme/ND44pzvNq9ejs8Wo95pyr7Ig8RATVTd508/SZV78wmx88fXT59PQxWI4hk3bXSAuaiIjYs0jr5yv7t6zfP13GfJNCwj8929NyNQTmyH3JIdZXfvNvsj7uZAjaE4FaKWMWqZFYwc1ErTKDSu4NOaBqRXRwAwEEdkUAAxOt2bRIzWjqZojXcZITamYKNNmJAVgFRwB2ybJYzA4ODuq47bpODYBD00QEd5UUY2QYxg3iIudMsaaUAMisErO5hxCUJDC7qZlJVSRHhNmSBLZVuMO93TDEWSditQC36RrCQnUrqAiqZICF0RwRDQQiKrlZrVwxp3bML78+/8p2c57Dk0gNloaaHIWoeTr2B13ybFxw3s582HLbUo0cadAS0Syls91wEOfAPsr2rYbfcqcDfW+bNyO8PpuHkscqhsAJQnF2gThdWgMGZEBGlGCI5I5iCowYg0nIpewKitWqakYwTUao6NYYGNeqjgIRiQEpsKUItYgYmMbIIQQCmMxPCKLBlNTF4qCTChidYEQnhqCKuaiatIHaJlJgB80i9f/P15/FWrdlaWLQN8aYc6619t6n+dvb37hxo8nKysxwVcrlrMoqk6azyyADMsbYgnKhgpIAGRCqEsg28ALyGzJgEBK88GAhGoGFhIVdNgi7Oqeq0llV2Udk3Ii47d+eZndrrTnnGIOHufb5T9xIsRW6sc/5zz5n7bXnHHOMb3zj+0ohk5jE1Y7XRx1FUIQlAp69kDx8a3V9c2Oz7nYTYWBjsKmqkBAp++XRdt/7z74fzj6hHb34SZ5vRUqqpa0OMrirmpKTkSMRO3wuU86VaPE7zbXARdWIzaq6OpzVnTgx11pVmAM8MMgtEFdHEFHVCDRPTiCg1S4wIzhRQ9GdACYHWicX8EXSl2gZYL3DQ53gaFgPADKiRFqVjIrbxUeXL77/VZqD9NxxzHNtsogxxpikzafcy9/BzO3tf03H9W4nW1Wn5qUkxG3DqwNeUW3/o09+/+LirSdvv/XkyaNX1y+++vIFYRyG9XScARs6Vj18+fnnr55fP3n74fvvf3y2ebrbvp4OV7DsakH72Xg+HoQPZxfnXb9aDzCr5qXq6FWEk8OEGWZFC0mSlLJNpBNZ6labiyHpWtVeARNR/OQnX/7uD3r3TdWYUlhv0vvvXj598vDhZb9ex7OLoes5RRo6Wa2HSBwDp5higyOc3d0wmrqLiEQoDoex5Fpr2e0Oh3G+vjm8eH59/Wq/vxmvr293h/F6++rtd95z4bnOwyqQbB89XJ+vL3fX+1zLXFy1qo1D33XS62Eq48vzB5eFFI5Hm9Xu6ibPc8mFaeq6flifz7lut9ub291Uijn14g4bTS/PLvtuzdR76T7/8kU5bMc8zvMUY+z7vuZ8PO5DxAfffP/D9751e3P8g9/9ZB53TFY1R5aihYXMzLwyc/O6M1jggJ9+3I/v91ntLdybOZjMrObCzDGG7OU47ddD//6f+cUZvL+6uYjpFVXS2tM8VYFbIQ8ic84sOM5j34UCO0zTsFq5mZXapaGU2ZlhxdVKmWuZ3R1NVJAXaqaf4EkFBKBWdzaCALTvVuuhL8dtdRNQ62aTcFjwk+7s7OzloUigaZ73xyNgLa+XwKqqrsHJ3YVjCC4LgmHFwRS99rnM0s0vrn6H+1vnBw7WympH+EjG7IgCCgpAQOxgYQU5GxTDBeaDpXfqo6v01Y+7L0nPelzubWCb1HPsXpX9E+GL1JlV6mSNmlIqVt5fDXSsz8b9kCKMzrp0XacvSOYSB7cxdYc8j/PUIfaxO0p9NR9DdA+AkAVhdzdxY5/JpRLRCpQgVYG56Dh6zlqsQVxCzc0GTdTaE5iNxV2IIoTM1Rzum81mf3BoReumcoOLDVrdsKSe5mRgkEKzFVZSqwIhMFOa1OdDTULi7BB4qlkr1RhC2ZW8K31YVy8tGHfncXW+efXsVb86cy+EwIyiXjWr1z6kKeePfzUMHz8Pe7y+9tsvwJTqTCJRKzOLVYeDfIFCZi099yzEJE4w6Nn6ok+dBBPKRRUAk3YDsle3StVgGthTDDSBHK1bzSA1B9B1HTPXCoMb3JicoIu5MDU/N4OHxZ+A7/L3O2je/U3Gbu3Pu8PZ2VhMzCWkMU9Pv/3u9eevaTfH0MUktSx+flqdmbGIXPmdatgfmcjf7erIXN1UDUDgAGZxJ3X1SWRNasfdy58crx48fvfp2+8/ePDWs2efbLfH0K3IfZ4KC4hsLvPzz2+uX7y6fPzO2+9+6A8f7Q4vpsN+dyxcChPB+PZqR7IPnaz6rpO+Z1OCGxHxPM0IkcOQZyY+ugZCZOLjPBrUnRhE/VHC+WZzfi7r7X53cSnnZ+HTT//wk0/SzdXFtDsQscRhuzus12elFAwpEEeWEFhiIOZaTUuthWqt43Ffa85lbroOqkqlVFMjY5Shk/MusOjDc3z35z84jAWcjpPH0O19PuSy3e+NzovuWKdg5WJYp74bS65U12EIMqTN5atnz7949sVh+3J11g+PzpRxfX376vneilmpAooQkShRV8PlcP5ovX5we719/fzFNO7yOCqgWmMiIt3u9gR++73333vvHS39D3/w5c3VlYjDK4iF+qosoTJzzrkxj83UYL2k+nU31K+LzRHxXW7Rgqwwt5xdhA2qXtOqe/+Xv1Urj1fbNPQv5uNgfAzlaCWkqKXGEI7HbYhJTYnmaT4ypU6CjjMzoDrvbyV2mudqBUDjrLaJDVVVs0jExBUuzOZgwNXUrEFkRXPXxdvb69cv6f3L8+M8cZIUQ4wQIpHW+mPhmPNuOI8JoYnrscHcAwd1g0gz9Wnv1LQGDk4BQSW55YlTvz9ePXrywav9sDuOxGHKZS6zk1WHQiHIROxwkClgAmc2klKlZt8MVfM3PlyNz8eXOz1by5rIVIVDp7by8BiB5lwI5CGLVtTOSA/TTDgP6UJ9S9tXNVRSVe9kuMnjyHpWENayQriadreqY+Cwr9w5+sDRGW0qHwStTOwwQ6m1brNu96wqMRG8OCKqmFtrNzkJUSqwaBxqZbJ2agXj3lGnCbUIgdTUCjWdQtOA5rcCdkZVdleouSVNLbmfUCucObA6KSZPTNkJVgVFK3Jwmg7GShxCpjlROsz7t548PRyPjFRgHpVhVcmd1Oe+i9Ncn3yj+xP/dH99+2oa06ufZJt6MqekXFKpRnAGGOLwamA4xeBGEWCiwzhbHMbjPB/27mSGuRaR1TjVrOMxZ0S4LfyWPibXCYCqKnEptV8NfUxmtt1uzY6tTRUdFY5mJkLEDjKHGYQAArX4jrsimIQb78edmrdkk4/JXDsjMkXykiehVIhWH27wyXw8HrvYN/X2nPNSOtDX5WtacG/UiFMX9w0O6yhCDAnqfpymKCHGSGQprHI5ELkj6VyfP/vJzc3N07fe//Cj7+0PN1cvv7x5+VqaepDUIOYmx/nF9PzmxYsfP3nywZNHHzx+R8bD1e72i+Mus0cJrKbzWPIxJx5ervz8rLvYhOQ8vRojSEzJR58GkLuohppNiQSuDOh4Npe5Ow+H4+3Z4yd/4S//5f1xMq+/8e/+259+8sPHTzbMDAqr8/WUM0phG8WJqtXJZlJ3MjOrWsqcutCRCmWRks54s9mY11C1mM8wVR5iXHV9zWWajrcjlxmlbKOE3e46MpU6U9Ge9ptNIN+05veo1q3PkeueMk3H8WZ0rdzzZv1ovN1df/py1FprTX0XmBGIRfq+64Z1NzzOuc6jPfvyx9urayLNZSTyIETMx+Pk0MePH3708ceg9PzF1dWLz/J06BLlMokLgOpTSGyVSyltIeWahSRwaCDb3Ujz15rqMUaA2uBrUz8vpTihVjXVPqQgcjttw5C+/d1vvbBplSuHNM8HItmWEgu07yxnmB+nyZmmaTKYw4RI4DAC0TRNXUwgmscDx0AAERUzgFVbDWsiAnW4NxF8aQ5NbuRGHMw1pVTm3IWwWq3maQoxMC+SECHEJmHgTjnXNKR53sNT1yffu5mFmEopJEyQZoDVmsxwF5Fcx00MAKaMUiKvujxRnodaJhab58NcKsDZxmxqzq5mMG4iuJwVBAHYKxF5jRPRA338Xt3+AX019md9fkeHM7Pi2Ti+Ng/CgAWnahjU5+RsriTV/aHQ2yFwHco88Toeci4qT7p4GX1EP9f5dVSJDI6B9scxsHXCStF1rkrSiUWLxRhjpd1Y82zMiCA72hD6KrXSXAutU6jTvgEGLpzrGAODxasJMYlWPZTKERQip4BaXeFWrZN0QEXVzqgWq+BiUgrFmDImK5WdhVIp2YWmXJKk2Y5KwsWlZESfx6knKYdDIFTTYH1l1WCPzx7/3o9+L8aVqvbzYKtZmeZRiWKZhRO+91/nsbwKgi9/rLtXUdymQ430sOrcSora0CIGtPGFnFkDROdxq7Nqujl8NVYrVnjOwX2qP3FJjhATh+hT0arQsFp1N9lKojNKvIoqsmKmkg+ko6tN02TVQCis7s5ErX1qZkROgVyxSM40rAbWGJOm2ohf1JQwyIjdzKKTwg3CihAI7m4K4/DtVfysTK+mrl+Hvq+Ho+aiHXUOYlY+/VFAyCWgmre8rFEp2vNaq0KoFRVACAKomkcWMxOJDXiVIIHM5pvnn+5uXj59+53H3/ro5/dPrj///Me721tyGmSlrkJBS3XaP//q+y9efPrg4t2nT957/OFjnY/j9mrcXlkuaojDsNMqu3y1G7dhv9lshm4TokgMllHgx+PRZjTtSXNdDZtaq6cdg6j6yxfbf+yf+gtPv/NLP/r1v/PhO9/9J/7p9//V/9m/POruYjOMx13XdVFqsdshPiilqCuJo+QYQox8Ox2EWUvmICl0wQLcx3G2qipTCKHO1aqDw+4wFphL1NtPQ//o4ZP33TBNPzE9kh36FTx3EpKZFVUzNS3z7kCQnoNaOU7H43Eap2me51pr18dOKMVk5jHGIa2H9Zl0fSn6+jof9jfj7ho1k2q1kkKA0H6aa82PHj36+OPvDnHz5RfPn7/4xKwSVNhKcYIYgdwDRyi06bQAMJfFEMNZovz0WX7HZ5fIDoVzZDFwLQYydq9mRBRTNMJ+2p9drC+/+fbLNMY5mrDVyoHcShSqXqVqrqX9foEQPAiXohzjVE2CAiBBLhOZAmylMnlVhZnwkp03nywlGJxp6fSqw7lR64Qt1FoADgzF5BKIjIwQa5BIrE7BME4epBtymQEXjlayGaQjhXtwYUxWNn1PeqiAhd7dHHoWJG3nKfa9euSdUX+dj7m+XsdzSp4nv97dwrOgC2EyroGsEIoTEwdnLoUTzRGdeukYg8d6fP9bF7vb8ebV8QWHS2g0i7KajUe2AfMQeJ7KJqW9z4n6Aj7jcFl0k1jNcxiLU86WmTeDvVVdYvp8vnUKzImIgmkIZ4kDS2IEh9PSFQUyrXfjcZwzOMRVYieYCzhTGLoVz7kfeC6T9UlVxS1qHQIzg2AhcHMpJ8LQGnd1URESMwjlaqGaO6aqtZpxOI6TgWYtmpWJoIVZzcyrWS4TSjYpNg/SIcf5MIkk1JivW+7Z1AXy06dPDvu9mYHApBpjyaJWiOfAoj5+7596cvHwNmfcPpOXnwXxs7kchq7P5Qo23NWbzZSghTYVyVSjI3VD5BDARnzWrVfnZxFrLZY8hOFCubs5XO+PuxRhps6SUmRyV3OCqjtG81A1dl1iYZFqnt0KUcBpkmjZWgwQudob+Nv0TX5Nf4TD2V2qxeJwpsbAabnPVOPTc+lL/vy2QxfPzw7jyNOsfddAoMBsAJk3djfzm9L7Ln+/k6DBohLl7mSOarXZFoP8FBG4NWWP44tPP7t9+Wr94NGTn/vOL0/l8OlnP7y6ehURUscxiVYiBCt2ffX59vaL82dvXTy+PH/y7ubd9/Jxf3j9cr7dbsw0iJq70W47XtcdBRrWfbcaHnB4++33Xl9fg2m720rq9ocS+04or+jcD4f3L8///X/rrz968MfeefebeavWxUdvvzPdfDJPo7igkKmLhXGecs5M7u7jeIgS+qEjomqTmVldbgWTC8jItFLf9d1Kdtvt4XAopbgwBXFKpeQohyFFKVM5boM4mMZSRZITwOAowqy5HI5jOejt7a51BYk9BI6R3Ociw2rdr1bDarV2k6r88svtYT8f6wsrVnOJLCISiOZSp3F+8PDyww8/Oj+7ePns5Q+++sNSZ2aGqy3M/TeyE1+r0r62eH4Wh1mKudpQEbPG0zJzVRagkkgQx5zH4cFq842nJaFTc4ZqEREiKaWyIIhUVbgKhbaoOLKZpRgBdExQGFRbVsGsZuoWThOwqkqNfAm76+6+2Q0OOKhJAlAAwCKmCBwJFEkkuDBpNQHFlMgc4KHrCRJiZ0qnarWoegzRbDE058SrrmeGkrvjWKrpMG/xblzD/HD7arN6z+aqlANUDMIDy6ClpBB3NjkJg3qHQuegSghBqMII3Kk8sIlc8/YbP3c27g+3u9Xn/fQh+QY5m1P2fh3KNHVp9bLqWxgY5F5Lzl0aguF11Qvqd1lHMXZdc3iJeChlFDlXYgVgTB4++3deUgwckQSdkASKLIHjXI6IIrEDU65VDRQkxDjm25xWD842LDmIFadctXqpoaeIYpUZHqMRmMHMkXI15cilWD8kd2i1IB1FquycOCnG47RKkTnsj8coK8AqtFh1NzhUnIiiesE8lRI9qFstOXRDKSVE5hZbDBeb8+ubG4aAzEsoUqhD2eckQ67j23/88tv/6cO4nw/X8vIzjRRK3gYmqwJbtfW/SPgBRLy4Xji5WwnqjlwNLBJpdX7mJRtnJxingphz9WxJ44yZiAALgd2qN0+R6iyBWVgoBIBam19Afh/XXv7PCE259KfRz1P8dvdm/NkQ86/rOt0hOe11HHon6x6w4Pz4bE+H4zp1k5ZjmfuYYM6t2Qs1cw6B7tHb259Am571O4/We0KSRhUqIo0STQ1ndHc4x1xKLaXsd+P19fatd5588MG3Hj596+arl+M45rmC5tRVVGOsXOPr60+2+zV/dbY6e/j222+/9+Hb0/51nvfT9XY/7tWKQ2OIzGHe6vFmdytX4eXLNKw2m82jp0/Wm+HV1auzs87q+e3+2qikYOPLv/f//r/8Gx9861eODp1fzweLYehCrbPnnEWcuRyOBYCzm1mzOjkcDkRU1BgUQzQzAmlVCdx1XVQuU5mpzGVKIRIpYKh1nEal/Xy8ds1k89AFB7OEgVdVdc65aj7O4ziOVlQrtZmdrgtZxzaIkOJmsz7vHjy0inkuN7d6/frV8XisNZcyd8HzmIdhyJZ3x50Djx48+sVv/+LZ6uzli6vvf/7J8bivOhKqVlraKqeH33vcff+nltbPoDF3TxL32oQkYQsDkanW0nVnlufZ5v7hev3ew8Kqc4nEnCI1VfVSm/5F+6NalAO5u5qJSK15kaQOcqpKjQwwYjNyVzciUjjeHEik7m56V1m6LgM0ZtYsbYgIzu7UStvmjmBmqc3JOzFRIM45U6tNgRASMC4XqRpDO4phIK9c5wwoc7IYc/FBgrm6e9U6Hmv09VhnN4jb5iyZZsu5zFwDMnlHHAzVvVkdS3HOcHE3C0zcp7Kq68fjBx91v/f3dy9Kij29by6Mw2D7iLes7zIR4UqPyUWdNl1/BFTsbAZ3HIILYwWNLK9q2dbyMHrlyDAGE3PQLycEISJjeBRmmkGRctTCIZAUAjoidy/VplpWq42x9d94aLpbJ5lmp8rmc52yC5c6psDGXEoRjs7USSDmqWQS3JIfdwdXADy4euDcPAyZivquTDIMgv1c1ZiIWCgQREmM4Za7s3QcZ3gKcfBarl/f1oOEtnThRLS73e+3B+ZkChIBzfNkKQSvRznvfuWf3xzKZ1TCy09dDyGSVRc4mVf4wKjV3d38RBpZNoNbiBz5DTxtcDBRnSvtCsXKPdWiWpQyscNQNAfC+dmw6brmw8Eiqs4BIQROAhgVtwozLPbc95kJ1FbWyazjNAnVYn0jHQNGkLtXMbNpOVHhXaFLM4g4zxZNiysediFI+WJbZ1v1ay+5zCWJQGSuJYQwpFhKbXfy6wcGwDCHG+M0Oot2Kxrt3qoTOzPbAtA7aosXBbD9bt7tX52dXz59+52Pv/Pzu+3Ndnd9c3NV5qqmKc3EmUlABXm7e747vPrqi81Fvzl78PDRk/ffeuhFbdpuXx5ubrVmJpEYdAi5aJmn/WEW4i7SaoiT0WYYHnWPVoMX3a83SNMffv7bn42jb8fb8zM56u66zkHWQbrEyHnsVissDC4VEQaVYlFiDH2U4LA8HkTE1Mas1+XQS8q1hC5qpeJqRWsuTsaYLy4fq5eC2g1JzWbV/XbCoYzzNM+jQwFjUAi9gNC7auEgm/XD1Xqz2lyy9CX7zdVxv9tN07jb3gSCack5D8Ogs3Ogm8OWuLzzwaMPP/g4hcvXr46f/uh3d9uDqkdhuBdVEYlJGoEd93w27qMuf+RH/LXI3n6mORfExEFCnjKALkSLMk/HivzgnUfh8TDaKJmZQyFJtTCzQtsCdDTQkU4FhLqbqjU031252t3VOGCuFQCTOGubAGlE6YVsv8zNtqCNE2zYUhBVDUlKViIyq8QCL2asKObuHkspTpWIGNbeGpPeAZ4C0mbdweTkqjodZglldRZijCowO4Y01DFLkHXoD7vrVYcQexF0KazWEcyRRatLPwRMIM9OroLqCWB2Z0Iynohy7JnpnOc6vvXN9eFm/5Mv5AuIgz7qQjcfWIlZmPQbMX6OchNICkWVSXNMzKvhOheSsHYdXDKwN+1SOge9IicjcRPj8Hq3ayPFzhRZGNJurzsZHEQSiCkoE0mkkC7nnF339JKlBIYgFXfnKOKsDqwjpVprrcEl1lmpyaK2Sp7R2bDf7tl5BpOwVjPP8OYUZjMXKjrlTFEIQkDzs57mOermisdiSp4U+fHjQaeajsniEhmJZLs9aj75RLOZhz7GPG4r5Nf+2bf98U9kkqvndfeagq3nXNhCVRAX4nyawmNznGaEwMweCJFqoKwlMDpmuDq5M7HE6IImDBRFkQ6YaaYQAlyJnVQdHkWMjDgDQZVKBgcRiTGV6u6KxWfwLj+5G1n6mW+yA8J3Kg73eq1LsG2UBmAJuoALmxPDUEvpLlKUi8MXN8i5D1GNrDXpk5ip1ZpCyPWUlXurxZcvG8jGAKBGfIKGvMX6Ft+dffHoZrgJCwTuPpORIW2vXk/7MXSbx08uPvjwm0+ePn396sXLl68bekuQUmqg2sRKxu2cD7e7588+OxvOH1xeXl5ePPm5h2/pPO32x1f7/W2cAisxEKJozV5pPJTdbvtqfi2dhxRYzoRX6/WY+umtB8OjCzpM44Pz8251vjscDofDZrOpBdNcSikxCcGmcWIKQcTM1PJ+N7EjhCBM0zwy8/mmH3fXMYZh4BrdS+HgNYI5zAVXNzdk4XAYVYuZzWbm1J3mHiWwugKInTCFbtNvNpcxrEi6PNfb2/3t9tPd7lanygKrSlqbh1bfd7vdzglxwMfffPftp99wXb94fvP61e+r7b0Kk4NVrZp5DD0z5XJkSn6nFHRPjf0OqLlfMuL+rNzpZ+4a6SGyerXJ2ZFCciDn4sgXHzzBRZctRwkkXJQimNiq5hACBKqVXCSSmZHA4EQkKbp7oGAliwipNul5JgkiROS1VlMSXkZP7x05C61Q1Z1ExNHUApSZG6G4nR8SXQKYjNgIGIYuAO4aQtcz4JpzBqyLXNTMa2jUNG81KJ3UJTmP1m0AwWHcS8QQ5LjLF3zu6i5ObKDiFNQxzeV2N8JSrtmd5jpeAhnVSZbLdq9OKpBCKSRXjVaUMZ3zvhze+fn1zaxXL+cvu3WZ5m/HIc/1tc8fMN7j9Q3G65I7iQlOEl+MUxaGk5h25oXkAJodCeEWNFomsw5MoPCd/8I7wYngxd2dFishr2qp1urFWJ0rXEFmYNWsrLTnr2ISdXNFHxKqorlgEdksrsXd2cI811uz2KV5LkTeFCpz1Wk6xhJqNSdisVqrObH0N+O8EmFOXl21GqELAU6ksXCpbupGqI8eXjDybhr7vqc2Ud+8641FyN0hhKpZcB55HFff+kdXH/zK1fW+m27mL/4QkYIVJafshxA2dY4EtQY7+Bved9sYkQBzs3pKmVG9mla2QCTOyYm0VlVWA0Oqz8xspVIgJlJ3wGqtIsnMVKtmsKrWSYuavQnidzuQQWBq8i9fw2SamcAdLO6wRmA7gat8tzndXeFkLpFVvQsxGs3zTOuwev/R9Gynu8PF+kxVD9MoFkIX2FBzIQl3KRucQfexortQsCTo1OTTTwI1ANoQUAtnVTMzMQczE3Yh1XlX6fazT1+/fvHo4cMn777zc++8O11dP3/58vl4ewghpDiYtem2IlyrFj+uXo3bZ59/thrOLy4enJ+fP3j0c29/MOTdizwf8rQfjwcbYYpSKYazNAw13u4Q6mwp3H71xRcUO0SLFpKkXo4SXqchcvRxX5lCEBKOMUakLocIIFAwM6thFQd3L3V2R5RERFoM2ues4/FYSsnj5GrzPINJaWZjV+5ikkBach/Z3SW5OsChX23SekjdIHFwJ6YYOD579up4eJaneTxsaxmHlGqd4MLOAGWtUz467NGjB4/fvXz04MM8pmdfbF+++kmeblMn03FmlloLi4WQSLkWI3bmUJc55J9y2PgaP+r/T3y/e3gksNe5wmlIvTnGOiPR+buP02Y4lFGSuHAu2kunubIQ2vydtmRc/VRGKppiILtrgz2ZOZ/UYpvlA5uLQ8B12W6LfKwIu7u6mRmZLyJ61uoSVzdCYCZYZXLARSiXmggMzjmTSN8ndx3z+CBsNpsN+R6LAlpY9EIctbazsBpxDDDlvo+pN52rlirxwiifna+e+aiGvn961j/e7v4g9jTmOU97gJg5SKJ6mJqdnCgLiGCV8mRuCBHWFyRjRcgYEt3GiS9W3/zO6nj7cnuEDSG5PjBfpf44br+Qm5nsQmN1eYE8W5kjNhJWyvsyakjumGHCPpdxy3BCdGqaXaF7Z+gJfRAVUlBwcffiBpsgp7dtJGAA6mRQmIfFBYRKnjvpbfbsBUao5NaEs9wdq+rBQjtRSylqVZznuah65VSK5pyZEcHT9fTVD1+dx/N3vjF0Mb16+Xp2riHM6lRJlCCK7NHDIN06xuN+jNSXUoVJvTS8Qlhaa1+tBlAMPo928eHwp/9ZO5TddJDnf0gJg3AdvZaShFPTdFG1O8MmJ2IIES/1WjEKJEgxplytus1V5zKl4LNa0bkymIQtcmXWpQXE5qvV6ny9eV4KV0JypiihD13qhgAqx8NcZ7LCITQLpiVDWaL50gxb5jIALG3SNj/yRgGG7pBVuwfc38vkoVYiBa0FQkRC1dMm8geb8jpfX287D2er9XGex/04dL3wm5p9mTu3JTY0T5vTxYCIiMndiY0aq9KWMmIpoF2jSHWzUmOM7UnXRZuTJJT51RdfvHh9dXlxcfnw8aO3f/GDcbu/un51e309zXtmDjGOFSJdypmkCqEep9eH65dfxpTOV+uLcLZ66+nT1eXTB08Qgx/HbZ4Pt7e3Gdf5YJxrHy27xY1EItZOkzE0l0M5FtoxOOSqDbgioqq5xTJTwF04Qr2UoveaeFBT1WZvC6IgyxHIQTgytKYUYF7rZE5hHbuuTynxKqxX5xJ6UzZAq4/7st3v5/10HPfTuBfSZn3Qx1RzBXfV9DjemOvlg/PH73zwjY++03XD8y9fffL9r/a326ojc2W2eSRCci8hBJBpdXdjYXevxZaO+tcrwJ8K6PfBmTv1xzvJrfZptpKambvYwzGWI63l8u1HUzJo7mOatFZoz6I1hyG4LtdgpiHEYqq6ZMRNV121APCqTOSmUZU4GMHNqxdr3AImNnPYiSFM7NCW+hhYBCAt5m4SQwPVgDvkx4WEOJq7QsSMpaX56iTr1YogX3z+VRPEZiKmUKuxOnGoeU5D8obPAs08uZKSYBwR56lfs9FUcZRA47Fc9muxGGM3H6Zp3DLU3WqFsNQgBAsOcTipRPKs5CxkVkySIHEkd8P5mYy3++EhffNPPPzBf3RtM12h6zt6fdxj01PNpvG1hOdlCkN4IOm9KtF9T7kQlKVz7QkkbGS9++QkBIKRe6gjK7kld0Y1dTciqlYSkhEKyI0YKA5tO1dcPfepi9li9V6olNmoq7m4CdmStblZMQupY9FaC5y4c6GQa03nvTmp5z6k6F21IiaP33r4xecvk6REw0oSqm4ena3ef7z3WedJ1CzEMmqw5Ifp5fOXyaOCKiMIUAGBN407R0g85Rncd6zX5fCr/8yj0r2eb/HqpR6uNoElzxMDMXGe2bS4U+Cz6uMS0ZqAxsl9tBBsgWtoOkzB5awbOlkVmYhWyTlaJqvOHYIUU2aOA9tuPhym8XD0GEKMRoWCqhbPIXVRAgfMhTJTBeL9bUb35PeWZP0Ux1vGJRSY7RT83zxiCHeB9X6+ljwUKypWHSkOrDSOewpYvX0GsfzikOd50w1jlbmUlNIdoH63VdrzNrTk7kvb6lQuiJA7NUYwU7jLAV1pLoUTSwjt4KEg1Vis6gywS6BSrr748ur5y83lxdPLR6uHb7379MOPttdXN9evS57z9VUMUjkRSJjcK5MypjxvTZ/l63DzZR/DuuvPhtXm4ZOHDx69/+43z6/m3ePzfr7dv372Zc3bPFLJyPSKraqqE4VVl6vGyLHkIEToVBUuIQQzK1okRDbiWIndiMy81kKABA9qgTsH1IpqDiwglRhM6AwPUxdiF6RLiNKvL7r+7PZ2ZzXm6oerw2F/m+dDOR61ZDNTG909McHBMdZaj2UqpZiPIciTd58+efzWNz/+uc+/eP7ixf7Zi0/1eKNlIqnibIXdxMkkNHNnuArIHKo6mzND+I9C1ZcD7J7Y592XTVru/g8vkL3WGFPs0jzPc5nPLjfD000OlZ0yLBhHBDJCYgTLeezTSrVwDCJipkQUJLh7rTWE0P6AgMyUmbyqR3FQNWsNnTYaqlYlhlLUzIIwA1ZM7xvLnXgEjTrMzKZwd2F2LV03iESOTCKRVaQ01JmIxnF6SPL++x99/w9+JBJK8ZZrenVlFY44+RkzBVOvtaLO69RFRI5KSeYCjuh6XO9+tFptN/2gNkNle7MnJNV9nSfpK5m4s1YvqM7eD0FWRE4Z0Sc1hnaFVmAWmjWsUUu+vJTvfuvx93/39qpqFYuDvFXSK6OXYrOWx7F/XGmjnutciQ+iNWyyB+gxuU+grltf5LJHTcAAikA4ln01Mu2aOFw2E5A4udYsUHKu5kpObBwdbNGJohqYiCB1ojpbzSrhUv2mgMhKH4YZs+qQs1UNQ0ehKLyfOcwowaoU7aQzjbP4sVgopvHw3h9/d/u7Y5gu0/owdJvjTnKZi5WND4VQPEutaT+PN8dIzAFD6HaZ4T70abJM4CQ0Qo/OXTrrhA67/bf/nDz9h45fvi5yhesXGDryPFVIBcEyEzkJs+h4gCe4EjFXB+DUhBg5koM9kpkXE3fCxBT65KV3Suqxgo0rUS4OMMhzUtk5Bgt91Fc2ZEJyKZPHAOY67W/hVUvlwslS4RYldVm44rTk3tXdCeLubtSoR6pqUDhUlZlBlZlUlVmsTk2kzI3cXQKpV8CyBSVWMVKHZ9XadamYWp3Dk4HPuuOPt5qn2MkQ0zxXKGhIHAimHVNxsupkyDq3/Es4Alh0M5pkrCNRbNpneuLStGCv1bVqk2AjYpBZPOnVFI1RukTMx+3tD3fbi77vz84vLy8vP/roSfG8P968fPVct0czn+cKUAgCeIxcayUmxzTlKdfr21t++SKs1hdmJpuL1eXl+fnlpn9vdfbeOmkQ7uy75vM8z9M0zfNIgJaKWszMc0WMqlHdFEWGQGBVB4aUoDo7tOs6FrAIqIlKUXNyIA4ppTZyTMPKneZcc9bxOF/dzHnaHg5HTIdpOoKUSc0U6qouIhTNDeaUa8l5LHVcn/WPHp9/54//mTKbZRl3+Td//Xe225s874mbkZO7kcJJQOLsrmrM0ojYbu0MDXI6eu/Deneh8D7D1U49FTT8hCAhMIuqmpqZmWtKq05oOu6ylc3T8/RwNZGaOhGSiLsJE5jcKgWhGAGQBCOwBBIWNdVM5gzSOUNYiJqlCRlAZNlasKdF2IuZ2UGllOAEiuRkLeUEmzkJ4MuwNwAhqJNWOIpwcARwjsmEPbqLj+6RkEyNuXl6BuXEw5kisKmKUmJGpZi8GrO4ztWt55WE4uzcBXhntYt8U3NKh1r1iDKM+5unm/ff3vzJH3zxm5DOLZRxdGwRJCICo4S5EFWwFDOgmLEEzKbzzCFqNXYKkRyGlTNTh7Iv5eK9y29Z+v0/3F/NQx+6SMZqArqMsUMp5i+VjaQjF9+IT8GrxHQYkYjF6y3nh44g0hEJPPzJX74Cc2C4k+WmEEBm1XXonSPzrkwlkBUVCmY25w0zL5a21V0RlNqIfXFccNiN6zm/eMDDXArCvnaRRjGZOdaBvB8F5uhyztmc1nPp1Q2RNLjVwMfD/CO7dTrsj9D1s00RuXZnn8K0wo28PNQdoSfpjUccClX21WE8MiKh20EJdc0h2+0Oag/Cn/4L7xxun9Nr+clnHuo6+TS3qWJnA5GZG0y1+RYuK77BHg15IAfQkhoi0lIBlDxR1MizYu9IaspgNyEDqblRyZk4gKwfOttXkUFADpdgEoyEHW5R3p4AAADxnklEQVRuVSvcYMFPf1rdoEYOPwnJMTEAI2tVqQQydYDbbPRiPE5CEBJv0ykttTGrcGcWIhJ3QXByc6cUqjkJlyQ4lLRa1Y+rP9v79Rz7M6xCHmc9jk0gi4STkgnmQDEvXJ07uYI3iT3etODutxDuCvxFTYiZBe2EWA4AdTI4VScw307jfjxevX49xDScn10+fPzou994m8O0293eXL/e7/fzOOWxpjiksBIvgUhpBpF0sart9lcSunR1/eIVvfAYpY+p8xhjjEPo05AuHlw696Ol4WzVpeRQZqY8toOq1np5cXZ7dS0iXTeUMs/ziMUVqy+lxJBymQ2LY5GZl6I3x+yjjuOx7F6WknOeHOqaa83C5m6gSsymKKexIBbP5VBmqTqD6rAZPnrr3QcP3h76S0K4ev7i9vpme3ubxwmmLABXh8HvVUUnzKRlxPcT8/vF1h2A/rPxne/5o7aWSZAYydxdc3FVg4eUOKyl5Nvx4D1dvPU0bgbzSmpJAvhEhMfSdMHSzTegicVWZpbAQFPXgARpyi0xRphrXeAacze4gLixmt0bGtiErW0pU5c3mE1FApzgcNXWR22DoA2OZA7usl6dz9PRncimQJHJa55WXcfBxfKrZz9hQSnFQ2q0UdUZEGaARFyseuXSdSHAy1yOllMamOdp5Ni/3fW5zLPmMPQPtWSHwvxstTaYKqZcJVk1rqDIMYQA0spqpMoeN2BzFKCA1ImRCJQweuSHWvl4+Y3NN0r47Mfj1ax5E56yPSHhPM1kB0pn0qd89C5XJK8+kPSIx1ivvZ7BH0l4yKFVMewehoviTZ7anQyBmJmNzOO4ptARQaeS2Nz7JpGIZ2+6Lk2BwZncIT1Vrny98mfMZ7e67WLW7T/2G6//Ezm6gmhGoBo6c3cqncnefAq2PyMei45l/6DTx+oRu77yQ48f1aup13EHUk3FZ5uSVMkjZc4uBvVpdxHs9gpzkf1U9HCUSXcHDet4tn7y6if+j/8XP4jb39lP5foq6Vyh+RYxlmKKxoRtUZKZXdyXkrRlBAvthFkIDiDwwglpquimCglkDjeBVANc2Slwql45soBYDFbYg5lNNUs3EDEowElhDgWJMYl70wptPeG2FxuE3eAOImJpF9xKirsdS3Bu09fEXGHEwnCzwhBxAgVyAxHMxaFEmVyiaNZIXGq1jkinLlD86PE0jOPLA2a6GM6PMhetKMZKTqQEZBMJ9wNHu1lKYAG5N48RYpK7uHMv0NyFEq3wJiAMl3Af2W+xAHBnKeNumg+Hm9cvJKazywcPHlx8+OFbZjXP4+Gw3d5eX1+/FvNpdOFORNicxSCVeDYHSzCr1UabUCci5xEMlq8+Iw4BFCV2psQcRIQEjStNjuf9Nk9TDF1KuepUyuxsIlLKjasKcc6TGeZ5DoHH6QAosU/TsXWMyZqgBtxdAAarKfkq5yn0XGueS2ZGMe+67tFblw8fPn5w+TTEPue83W6/+vKTw2E3HTO5RYGwupcmlA0Jd3Tv+8EdbarsTZPlTS/07jt35+7dZ/EGA7yvI2Skau6mVhweQhLiMs7Fjumijw/WvumOdYrEImLwZoxH0paugRim3Azul8LOCU6OE2GRHGCR5ldAeMP7uutqEJGpQdjUODRH8dOCO6XqFSdfVwDcdieISMk7YXe40dMnb6/WF3QM6keiXKEpBIU7E1w5wnwGjMhFoll7L4RmQ2fgpf0GBgUWjoGrd0m09FqrBKu1prA5HieSfdd1AqpuDIoUoYVEnFkLPFA2n606lB0pIgZoBJmzCApZs9slIhCbXqYwppxXu/e++4hVvvzJ4TCn7YrnrO9Kf0kodTrwTImTRcW8Vu6FR62FvAedVdowjuLkELXACM+OTqrkrgwjcq8JHImgdBMy1AhmKJ7Cjc+srtx8NtBAhAjylobGGYGSyRUNVbePVu/87ld//u+/+K+q1FwcQTgQk+FA5uSpP5tt7qr52FEoiGAMDOMSjDWpqf7yZl4Tv+rx7Qfdj292R09Fpk0KCWGbD9SzEL9Wi2nqXNdlp+4i6YGVLnTA5YPL1c2v/+s/9zeebt958cOXY8cJVqmSEjHTnbVoS5eX9c0OZ+a7PBTMpFatWp8ig1oZG1lczbFSYiWpaIrSWYUdwSoxk6EGZmJnISICicHNCdoS2apVbVGuIKJmgE0ObR2wtiXu70zcqXotDNXFVbUNIqqqMgVqqmJKzM4ukLaJiKgZbMtJC8lBkWDQFDubpWTgrVV30Y0/eXWzvR7ONk4hjxMiIQiZrTgWvFGMurswZiZa5gIAWzRvWt3DTapgSQzvegP2pl9warIZuaMLm0aprrUwgTDXMmul5/vrq+dd7FbDanP58NH55QeP3/qmej0eX+/3+/3tdr/dj7tdYI4S3D3GGCIlCWAz8kDmTUeiZAAsUethGquIcOjGnIXBFBrcfFUyBSFhU5CLkzsKEU3TMbBEYdPiDSgjclgIwbOyVZhxiCQkRlZVTauZaq611pJBZrvy8MnDt86eptQ/fPQkpY6pPx6Pz5+93G634ziqFSsZZJG51lLUyJRIOCa4uJF6/lpwv4vvuBtIPkny0s90Te++/NmcvX055qOIzDUDWK/XMJ/HkQB5ul6drTWilkxQDrGBJC0p4RjNa5vAgBoxK4wdzNTyIV3qFRRVIhIRAqlq0/kys0YZoGYz63BXcsFpveCO/bUMQUNOx8NCKyDAXc1DDFqaw4bOcxaR4zytz5JrLKYBIUoPSjUrc4gxKsYkzMyBoxkkibkzkWLR12stJTNYBVWepyqU+zUz83QEn8k4v/zq5W9LICKvJed5ZkezcHLhippEgjerNCGwgQoZjUrkEo16V3OYswV2oVDzlEMnxjV0N+98qzNKf/ijG6opxmEmek+ndwWF8k2ltSWV2YNkkZs6EodzTmtzwOacg0GIhDgUcuZlUXiTazALDheokJL1zMHY4dlVmRGk8eOqGlmtADMCMyx65psAxuEyfPh3f/zf+AdXf5pDNcxwJY9eCO6JWbTSfLwxN4sRnatXmUkoT16tbGK0Mey74T8Yo6j35vHAv0ObNYXeHhxYKuaBL+rWquEsyqGmkeaLrnroX+3te+e0qf43r/jyxbMPn75zvrn+8tObOJCPxASJVooCATBypsaJgRHD9S5pXnjuDSMOIThVco8xjuMIoNZKNoMqnKkZcZoa3Ck06dRSSjsqVl3nOyUiSVGDUlCWpiiU4Zlc2eEUW+IEX9b43anTjAvopBMDZ4IwV1cD4LAm0NumQgOzqxE8sJCgqjtUOEDNhBocG0m0KgurmQTharVWil2AlHnvIpvvvr/99OVxux+QztabfZ09z0NI1c1OMAvuRJsb6gJvnlX34s4pxNw7CezUoKYTFafF09ZnY+Zi2dyIvaqKiFv7WxRZreynOub59ur6JXGXurP1+uLBk4ePH7771mMPbKbTzc2L29vX+8P2sL3FRKoOEhEJIQpHCnHVqDvVQAgpOXmmosFYqYEAZtZ1XV6kVwh8XPxBVfu+d7U2DiK4my1ggWRTckmUxsNYSoGat+Ycc+xCv15tLvny4vFquHhw+bQWfv3q5vrlvN2+Oh6PsFzqEW3G1d3NASqYiEgkGqSqojpQgUUf8X6Yvv9oNtZ3cfyPBNy/9vL7P9B46KXOKXQppVrqPu9jLw+fPJzXVMxQahRBjLZo/XKQYHckhrvRZXdiNjifPnFrk0nEThRTMjO4d13X8BaOcZ7Hlr3eHf/0U8N6X+drRmJzt+YhAxjBGxJUVTwwkwS62d68eNGZ15xz7yJwMYQQpJIb02x5f2wE5dqgOaJGoOAgpVZHc+9jEGtFhdesXGW18VrhFlLXaJP68vUn4EcUqKISswNWrahH4W4GuVZ4abm5EincKVYymKtTdG5uFu5B/IFgXtGYTMB6PIaL+RsfX3Ltf/R5JcVrOHcR4/zesK5U99EvPI4wchPjFXOAZxgz1EHslQFGsLEikhHYXAzJxaHVcYzKFZFobCy8YkRMUe56GcvnwGRElamahtoPVqX71t/+0b/8o/HDiKJQcVKkQJLIa/FZI9CDShJzt4wZDGPAulC5l3RT5sQJNGu20iXH8Qd1SHV128+302GTYiXdKTmk51SLur2O3QPP2fLOPL7WcDMeaPX0LPZy+ZO/9/I3rjnVvRK6ZG7am+0dasZmTcaZiZ0Jtiyje5kOmTuFEKrVtgGK1lbckUGaEq8TzMnFnIkSkThXCYyq5BBmsupVTQgeiESIjSHGxOxN3o7v8Yvv6Fxo09NKJ7110DILimXNK9RJ2oksbmRQIW5MUAM11xB3hcOASi4LJV4FosBEFkIXqxmmzIiSIoWZysOP38kvb8avbsoxn23OtOPj8ahzTTGFEJbk66emYFryLmjC9A4Bk8PBDufT/eQ2T3LPtg2nPdweEhqHX1SVIKcxMi8gkDTzLsYkKHXcbqcvr14jxZVIOr94dLa5SN3jj779jZSk0EjV5+O8298edrv5sB+Px+M87UphZq/agnjDu2LflUBabDFxN9RaY4xuUHPmVquxai2lTFBy1FpaKaDaKMxkVVNK6Sx1PGxWZ8NwFlOXUs8SQ0iEdHt7e/Vy/8Mf/Faej8wo85GZqc0Yu2nVBuMIO7MXa3n6Cdpya2onLRf5Wj5+/wl+BoHBT6ftb9DUe2dAu/nuTmxD6oTCPI6T59WDdXq8OQb1auzWxQjh2RVOSYQUxl7NqLq9aa5QMQ1MaDHXjZmFxb3BLOy6VKvuzsyu1liS6uans9/fTJy+mdm+e4NE1MayqclcgxplnsVX3TAdi4Rg07TZrB48vJj8YD6SQIhAlYQdBnYXBSuRkLOZqSqRqxbiVLIajIlPBwxqNRcwCRNsPB+PB75wicYiMfTf/vYv/OCTLzgIURURRjQ7tmsuFoM5MQKjWUR4lNhHzwYlMubauPVWWMHqa2Hz1Ww8oiYZ1yAuH/7iGmn+ySc3wOULYhvWOs3v9MOo0wQRQzIkl87IUOc2XWmBTat7JA2EkwWiOYjqCcVjIDqCwVQrwRWRwRW15YxLos9gKmaavUsw7Di895s//u/+aPed5FckXAjJxCyXkYsmtvwobYfkk9Kr2x6rDrEjssFqUa+wveeBgnJd17jPRgc7poB6lJ5Joycf3c0DAvVmVsYx9KYRN/NVH1MckiLxWQlxxbHGkG+eVaSKbKAOnQiPZYIE8haeHICrWV3SdrpHIb9La4pWM4VZKUVEOIiIcNMARmQwIwemar1TNFKvlX0Z8E0ShKu7qzfTXFRXs8YbJHWGs7U5/RYEze2UujMFN3eylswvmKCpgMkNYIefyO3uROokLNCW0xgzk5pZdQoAAhwtdRCuXpmASFIIURjOpSgbRxom3c/Xw6P1ZojjVzf7cUfShS51sStaWkBstfydaTKo0TVdwQRrkjjsgpPldHP2oKX9C7JW7CwR4a4ERhUApmbmboWZm9ereYU3TVJyNyd3yxK597MyHSrtr/P25XMiTv2wAYcudOfrzXq9GYa3Lx681yWhYIX80eVbf/B7v++a1126ub0aj7sQeLvdOpjIa65OKHN2aNXs7vDADDfr+1699EOESIgc+4v1+dk05vXZ+dCvRcLQ9bWaeai1qup4OB7GcbvdznnSUqfx4F645Zde6uzkbNXUpqauRWxCoZ3aqt5kAxZlo8XEpd2+lnncKfwsq5T5Tc57FwG/hp7dT9Lvd7n1JM4lsjBBD9OOEz16+shX8WgzQKlRbCPjNN4FQKlJv5DD7zrkLGLWZF64yceBCLx8/ixSaz11+83MiElVmzRUY3+1qzW4wsUddO9dLIsHlc3fVCosak1+Zjwcu27dHGnmeZ7HCeabrs8+GqqyV1JhgROnyH2im1lEyCmEcHfrajUJwhCHwLmUkrOH5O5QtaJu0tTwOw2jdPbkyXc+/eKVE4obIjncjQKxaylH4g4spAFqQLagRu41gKJUmDeICYCbO7JAq62HUIn3nvFATPZi9O7HQ0H3xY/2WvrrCBpS3h8exngtNhBVRkfkqB2IiTLMmePS+UKwwGQGAxiVUeFs1ImUYlWouiWOVgtSmF0TcGyADkNAamAjdiYnK2kj8sPP/9Kn1z+P7lnFBgDUsiap9Xtn+T/3sf65R+VpKBToWOv2sP13v5T/009Wn+FCB3KdwSk4VdRCc96Gf+K9/Cff2j9luS353//hi791/dTOUyUdNHFNBy/Rjn+it19+OLnd/njs/sZV2oeu8nycxmeQeNye96qHSkbSJxxq5hyjzjW0lNkd1FJCNiEUXgC8u5y0rTMC+tS1YNQG5cdp6rgamxspQaVUU2VU5IISQ5rmIkJmtl5vStklgNhjYgkgMlAGVSU1dnWCt4mpBZAhX5yX3Og0o9QIKosRK6mzM5EoKRFMlSAGRWA1a7JKLSU0qwlchcmUnZypwkIMWmoEyzRr1Nkg1osk8cx6mMQHSVM5ok/Dt9+yV8fxyxuvGs/OwFDVXIqZxRhpAdzbVbsuTt0MYHEyMhcQMTHc1NSNKUDYzUJrovLSLgYQQqiaCdJikaq5o1ZlCsEiCZOjejZ3oUEpaSHxsbGJtFRhho7TbsciB9j+OhI6k444RA59twqr/nN+3XVdP6wd/OjRA3/sEuMHMVA9diHN88zM/RDHPBatwzDkaXb3JEGkK2oONsJh3K90SF13s72ttYy74lWfj88Puz2CjOMIq6rFtIpX0hqEndQUBDZDi3vuRkCQoWmnuLmRNQ4+EcHR2npYGFFELnBu6P/XMnE6iTB/DYTxnxEIux/c7yJ7+7EYYwhh2peZ59WjTf+gL1C3Y8/RM4po7Dsj1Glc9UMxzVZNKFDr39Od7kuMsfWC7lcMpZTA0vhFLZZ5s7JTVdUQgp2YDETU5q3vgAG4gpcysaUK7XbBFhCQW47j5uoSuYmRNbX6cZxlapx/iiEIKopJZHKBcjlWdyIWN98MKxF2gxs4RPLqIFcQIMQhkIhRKSkYk6lTMyXWmkopf/+3/6akjkMAYb05r7B12xoi0iN1TGLKmiiEFGyueayURKIbWSEwNFJIGly9kKcQ5uxWtBd00coZZWYnfPTt8/OBfvD7L/PYv2AJq9VDL6HQMdBk5YmkUE3IB4RVDaB5EFkFCfAgBgMRwO5UKzOEiKq1ZjWBilYGUTEwG7AiVRF3K47mMdyRTMRBpi9u//J/tP0VrMahXIyYwATueH/4K7+Ef+E7NvjxK4SbiWQsRPRg3f23fln+yx8d/se/cfh/3b6Diw3qbSrDMUY+zv+7Xxn/kx9WG22E0MD/tW+lf+vHz/4Hf/eBPXxkfhx59bbs/rU/hT91fiC1IJmdf+v68K/+Zn22/65QCkdHv532Jceu6Nhn8VBhTCV6CJ6LqykFA6lXKJsK4AYndvPKLUopMcDgqhYvum4/lUpjtj6ItBDGc4AUJUIAGTuCB4/CxHGqGYhykKAWUtAKYmKRwJoNTuSIRMm9glwNRCzirkUrgyUGQjFrDf4ANzM3ODNXUTMjsqXOECZXAVwX3gCIYMbE7qESBVUlmBDMRZ20Rg7VLXAvbStxVSMBM7oVE2liiupavdKTfjh7a355e/vyZc/r2AdirkVzBlMI0UNkIycEr2q2DF75yZ2ndb2YxAHh4O7sVM2JIBJKyeGUvNdSWVoCtZCv3Y1ZWmeMwR6ogeiqKg4yo0h+QnvNzAnN1QFEhVQwQkdXVOJ9ht0aUwcmgJdZxhO41IUzZnYALAAIIiLuW9OxoUZFy4LRqamq+exqsKXz0RzF1E2WIGUtcVAzODeFLLS+DhEFMl+8tOqdrvpp+M5+aoboHlxO5q5vArQv1wvA3dCc535aGcabIrMs8T2ymDnA4OBe5pJrLVHikDqITHPZTYdw6efnZwiUySFBIUoEgZDXmsEkMVbTEAKxVVUmY2JfwGq0ucW2At1dc0kSGn/d4ZUKg32ZjiF3Z1DkwISxqqsRQGpCBGe0VSDE5qTu7gpfAEAHFw9RHJ7diDt4ZJhwLa4iCPDMwVmYzTtyb7z4OXiiWI2LFAjDKLHeqhCpznks7ivuSjXFUahzWGTAOUtxXaPMlIR6skMVi2PREOZe1S19853v/uDT78c4A3CmkJK5wjWS0jqouIhwMRZkq7UzFlAz46lgprkQBQIMbFGgVNEjdH0dcZh0WEdnFz565+d9/Dhtnv/Arnf6FdcpOsN3xEzyuRtLTIoLQwh11GF2P+ZyUL1vnMjAYiPk7lKhDGsOcAQhsDo7agCrQNyrR6rOmI0HrjL+k//g2a9hpqB1ZIUo19htp//Vr83/sQ/x4nZ3daQo2fqucrXaVcPVbj6L9K/9WVv99c/+zcMTO3t09BG7w7/xZ/hPv7f74cv6pNtQyrur8hrdP/7RBcbrf/G35vnJh9/Mz/7P/3F6mKar62yD1Fo914829L/5s/mv/M3f/A/5u9yH3BmN4FoZsFq4mgnVLtLRxOBEgmWa3ok4SBOFfLOhTluFCZW81uxkjcCnVuAKJisVDiJnahiIu3m1BsQXd41iQuRqLKnWaq5USEur+q2FZ7uH8jtBRJhZiBW+mFaZuoMFROzmwkzUIPmlZ9UCZG229OwOZSF3I2lhb0mqHd7oFAZ3NN1ILJ1ccYNUELkrxggOMHF3ZUoU39pcPNrc/PCmjiVwWPVptmmcDzpHLn1IBmt8zQhhVdUFrzxJsy7F0IJ0BWGH11pSSo2jnbWysJ0c/u4lp0s4MzMvbgQjAos0tu4pybvLS99gE2r6Jsldfq3S3M4/ZgFIczsreTe+ZmZtrH0AWAgkdPp9beLmLhHWyqHBBe7mTcEQwjTP9U12DG4SQXfRFneXd2+K+HS9X4fF79bh/W+++Q75SR7OAXL4aSbjTW7OzGR+ktKCEYxcvVrNY5n6kFb9OnKsNR+nnUc/e7yWjXCQ3E4RgFyZOISQG5ZC5OYOM3KARFppqI577w4guIElCDm3tUgsTqbwbpm4hlOjl3mFcl2IYS1TMbOl+wpfGIrmRAsG0xiURFxVbbm3rqYOFYJQMHMDmGFW1bKA05CsFlsc5sTdDRaSnJ2vr1/fxMBi4Xg8sBBKI/BwQzrVPS5Dd1aypt5RxSLmWVfh7PnxRe4nl4dvnf/8D/gW9JqgA8cEK7Me5xg6xoxSqnpz9q4hpK6TopUnzFYoMBmomk7mAXHD2SyFZFMBT9IxUYBSh5IHD6HA5wfvDOt08exH+xfPDy/0nGFxrA7cuJCgchYQpkScHxq/F7oPmMJS7SxABLs1tR6qYm3rB1/41y7IDQgzI4GQkwIpzlZW9uFvv/i1/fFxTKVIghYMne3tr/5D9mvvHH7/+byRx5vhYBp0doQacB4rmEbv5Cr3f+XPjH/r3/vxi/mxHeWf+4h/9T39wVf28OLs5UifPCvf+84jv3397FB+9bvyq3+w/w9f6v/kz/KjRJ/fjuv14DvUVX+2lmdX+3cePvxv/9L4N/7Wq/zkvWSq4pMWNLPpIZBZqASYkQugvmB/5oRTbXs/a1p2GpkIGS2G8W3ndKELIZi6aWMWMrm2MTyykahjEKp3se9CzJalYwkNpK7MrHXp0JLR0kXltqpOJAf7qWOGyRhMIHP1pZh3nCYMscTEBV+1k3QXM9TNSVrH093BzcLPAMiJOM1ORiAyQAzwYO7OxuSoUANRx9Tz5S89ml9O+y+2825eDevzs9VUpu14HCaKMcYYnTyX4u7MIsRG9+jw90DhJjDS2DLMrC0RvS9DePrJE+zQQi4E1EZzjYyM7l7lTE1+pL1QRNo8Gt1R6AAiKl7NTECAUbv5SrbcvWUBtBBl7Y/aSe4HDSU7hdfSdDNxF6bbdaaQTpJKjWy1PCN6847efPfUvb/PUv+p93vv07//5N5/2y9fGvEABAQ0jRCYOTdtQ6FiOuW5yRUV03XsQ4hEdJwP1Uo6T/3DwSMZsTLBmAORMKmQt6PDSBhwCEBtEXqjAwBoHu2ERWCO/E0DoLqSg5ldCEbVyt1s4Juz6vShn/backg4II2JDHcmBvxUyrRck5ohpTuocYwZRuZqS2JRa82uktW6ENzZ3F09RLFIcynbm+uQonotphBmBzOI2V3UF6HTJmlDRq4OJ5JOdBTC1JzFmUznF9evi9a2GsbDtOjqgOdsgSQEAcNFraJq9hhLLTyzM2KIucwdYuq4otTJQsdlKla920jOGoQS8zT7eZ/2ZLiwEGZL/s0hnp+d/c4PR0hXIp/DP07MnsnRq54P/ERWl6ofSOi1BCyCf631vPCu3Z0URDBephKEACC0qsINSixgE6tBot9c/7nv797FenQroBzkYZ39F1b7f+574eWzeN7nqIftZGdD/yhA9WFdzfOEfh5urXbanQ/83/mA/pUvbj3yf+qb6cvpNoR40XX/0r/3k7+2/+7/cM5/6RcuX91sowx/8p36xeef/cMfv/Ojly8enq2vtvhv/rXj+tHl//If9cszPcz5uxfdL9n27+ZzDBgokFEVj8SJuLrNOZM2lREyE4GAFhHEO8IvERkaS8npZBfQ2Bu12DzP8zwHVCIikmXDYqGzAiJkFYUFKIgsQWh2cy/LHSMiZrfaTDc4BK4FDDTuTFvCDYE5jXgIsbGRK4ECs7oLCEwtRICJWih3uXNfOu2dRspwIvLTGKG7cxOB8dLAcSIXNINPJYAbq5oIgZcjwc2qlcHkrfDg/OH4/HjzYh+muLlY0wY1+zxnrrXvexY+nTrLzO2ydZecAY0j0bZtLUZB3JZAfz/3vIuD93JhasO6TRfQ+eQKsnTC35g78xuLuBYlTsGahWlRXPEmGC6NK/1TMbRRCdq1tnBDd7He3d27EKxFdvhye5eKrd5R+O+/hbvn9yP23Zc/+5ZxV2XdC3w/fW9wf2oJdxwSIgCRpGW1ScJcS8PKqpq5hcAX6w1nyWU+1gMSLt59wKtuZq2g6EYijYXC1pQCCeRCskiCuzOTuTdju6UoOHU2l1jPTTfwNIu7GCSJiGitQhBmWrz91AzVlBqt6xSy0cAX4lozQ9ocrJ9WVBt5C8TCbFCYAstMXFvvS3ULSZIIrKpzOcRIKZFw0xA2MyMrQqxVXQsRUeM9MKsT1CmQE8z0bhi7+amWMYcYxzyGOASetey5f9Wvt1EcwOvbqwIEVGYXD2oakqhVIoohlKm4VRY5FBVCgHONmVCsDaPDlaRnRc3VBSSkhon6oHNG6ik50RyocgoPY/rF9fD57+1vKo5psFw+IN57KYGiWvH9dakpdV31sISo5ciEMaStp4bZ+bKOqi+pE2sbRoMC7sw2Sfj4D6//Efg5SKsLAGWE7e4v/spFnq6KewG5+cUZvb71v/obu9+Xy//pNx78Ix9sb0R76ef+elvOf+278en3Pz08/PjnuxhmX3f8oxdf/e0DyeOLv/3Vs7/0C1zLFGz9wQa/dD6KezcfH56v/s1Pn/1QvsXHi994uf0nP5KXu+P6/OyPP/jq70zXdWiS0EK1IldjMSYVCkBTMiQiBpObeisTT7QNZ2/+SURoauxQEQ6dpL4TjiSdtDmoRsKqpTW1pMGULrUYkzrPjdEI5mzMnsnaKYITrOzu6tx49XdEyAXAMG/83dP2Pm3yE5GwpduN79g4v9JS/iDBzBjk5hLEUYlOcsbkQAv3ViObGZkzh1Z+oiHO6sakAISISBxsJGQ8cqWqXV19vBmeDPsvtuPutkNMF5tMUuY8jxMRhRRTFCJiXS53KVBOPI02U6hqkmLzXF50cn76cUpsnZmAJcklghA7A8KmzUJ2wdzfxMd7yDWdeCOqyuREVFvefm9QJfAbveJlC5DfBWN3Pyk9LOjHVGqL7G+4empoY/D3fsn9azi9neVf7zL0rwXsFtbvFsDdy+9ee/9Vd98BWaTAdwxCWVAjrdXdc54NFsCXq3MnP05TqXskXr/7IG6SiR/qHEIMHFhra99XK07GLA6YO8fAxObq5mBumTrzXTJCd9dz/wmDhBjsJ1VehNDsJJvwvwNN7xGKNypg5GgXb6a08KyWldCOhAY3tQ+mvYAJBFJ3Ok1amDV+kYQQA/kqiXmGqYAii1sNzIHJao1RVKRMYwhs2jjE7k2svVHQzO/Mp2zKFEPWepb663mrCWHdHW5vqhVzJfjN1Wt3BVWRIES5mpPVihCaJ11wdzJ68vSszLlMM4TVECmyiJDUopKkC7HW2tjhIqyA99QpqakQQWS702k9nr3rf+zswcs/vP7+s+3vdGtn+QVZ7+fDa/JAoZDuHUZ3sMyytpYFrYALiIyJ2NxaxURQQiwsfXCvqtGp9Mzj8U99uX8LlOLxWEIEPXCaH3T6qx8ON9PIjIGS0rzqwl/99R//9eN34/rxf+8H+//7O5cPume1lFXpp7B/e+BfejT9zdupUIXqTCwppDrvxnzTDVY1hctqvuH5Ydh0kkuIbn00eFCN08t86GA1h76Oj5Jj1k69is1aBRwlKZvDE9ja0mPyRjdTNXMzaan0T20bgIhU3eDByavlXFSNqS1UMMjpFDHhIKPm4gs1OHNouLMIM7tIgLtq0bacHe7eCAPqBkeT58DiK0atccagZViJCWiusg53MLUQczeUuJAp6c2VmxOcG9VkCQ1occ3ZsUj5CnFjrAHkzEyIIDN2IlKmwOTsRBBOMbQIF90He/Dwqb7Ot59f5ZvXfdddbjbkOEzjPM21UogcqAshNBoMTmlprXWJWm8sR5agf39o5S6Knf57F2eXoEfmQrhzJtF7fO37kfXuKzPjoLbEFAKgpwa1VjJa9M4AInNmcXfzSkQGcremru7u1JgZbSqlTda22PQz8PrXgvvPxnr8zKMd7fe/vP/b2tl2yuKXfxIRorDApkRGMLNac9FaawEQJQ2pM7NDHquVSt693Q1nG6Q0aQ0S+xBhlbSApVnotq7PIguxhHBnEIkw8x3CRicwzMjdvaUhBDixiJCjzX43Fyavqm9ImViAxKZ07GRM3urR07snR0rJ3Z3AoCaFpu4G71LIuZo63Z3K3syAaWGAmJOjTDMLqWAqxz5IiExEChd4YAkp4iRuGmM8zMc2UNDYc1WVgxCRiJhXQsv3RMyZZRqruwfG7c3xvfe/98lvl2ovA+T25etgWSjA42zbdm0xQiTWsbQZ9FJrwI6SyJlECnlfrMzqQGQPnkdNIaboSlqdgMiYJkM8jqln30C9dohhCGVban982p2vHobf/cH1b8/1Zd+/H4eYp698WDENWtfwAKNltPeeLYM4osOIFK4CdzAghuBNEdOdATeGg999ffW9YoHiZBq4TuDBnN970q/x6lWdolzORVd8+aPn4985XOLxhdVXJXe/+VX5Z/7Y+bPjbQgGrA3xVx/n/8/L7cGevkvdi3J4ulr/i9/c/R+++OSf/6WnI2+PZX5IKYXuZiqqccDhNvP33n73vR+8zmp/6p0Hu93LiDrHs9T1dL1PgQcFG3KkrgtxzsExlgoNVk2Vtbq3+MkOkmaFDF+kUhtBF0AgOC9K1Mf9IeesqrWasBdUN5CpgcxMFW7mYiJiRuyBvQqEQRIIDifjIDAlCg5bYHJmNrg7482+dUAajklos0vU1DmIhMTdFxi0aQUyn/LfxqZwJnJicTcskBETkSxSTwyGuZgFEj8dBhICgxiokQXSuMlC3OQPDW4syaNmdQAJljR03ZO3L46fXu1e3b7YvdiE9Wq1SW7H6TBNUxJS1UaaDPGUmBNZ64JIKFkbxZgFd5zIn0172xjRvX9doCJmxtIsXTY/FgH+n8oi755bVWuwlZGdsF0zdy0LIndCS3ihcp5mF93VK7CMkbZ6oZ0Yfq81oq73KC4NMqNTo/HrQfx+fL+fTJy+cYdF45S6Oi34x1KOtJ9pHHNVrd7o3l5rLVrdPYUUUgwc5nmay6TBh0eriwfn1nmFt/lnt9pYuNYkf2ttWiJ35UWKMZu2g4XbICgBDjMTaa4c1BJvkuUzOmXoTcaxSZIJSciaRYI0ZwxAVXOevFRJ3V3dZra8C5zeo52ggwVPgbPEdlOMsUxWtLYUORMxOZHHAGhVz4DFwEwE0wKKEoTIDLeHIzioV1WNfafHXQKpgmRBDpuLaivhWAhEVUyKarGpTCYOI1X/7PWXq9UazgAfj0cAqkVz6kAFTM0U29m9ihOALoQocX8clUEJKUqtXtyJpHhJzvOYQ/VuHU1tznOKfBZiljrOSozYhSFRmadKxkOmfj7fDN87G158Ul9d62/UebPmM7dHnQxzeRJiOKUMSzg7DdFQdW8pCS8pA8BkgDdOLoSkkpL5h69vnxCCa9EUYT0G7bflncQcJGjS4damFXd1us7TUcLIzGda5VXdiwVHvxWL85w5P12Z0eo/+PL8L373OlzTvuT/zJ9457/0S1b41XyY0nA2y3RU/INt97oY5Omu6PuPjv/XP985LMaXezOPEsaSS/AuuVZlOECzSinVqwuTUGRxatv61MRjqPrd3sK9TdbKXqKlGGw2m+4+5dz1sRmENQbdYqkkXDI4BKszSIHsVqFOFotnFgjBQK2B15AWXwTtvB0szNRmukXYGn+N2fAGs27ZHTM5gesb6gUimIlZrFaGtDVvRWPXoyo7nEm9Ocg72rEWxJiqGwAWBsjUgjMHQRB3Ml/OPyZxVWO1WARCGllF3Qr07FuPz997uP9qe/3Z63F7NXSroe9X1E1jnec55xxC6LwzaEvkEVlVtXrf902FTbWNLL0JfKdc9XTnZYmqAAQkS2PC74VFQrN2MCfh+/HUTmbftS7xsVmKNwTf3Zm4WYTdpcktIz/9b7nD0oTkfCn+GyObHXdDZyJyV2Hcy5R+huty7+z52vdP//TmhXfHABFxW2en4N5+5q5R1AqXUopCY0jDMBDxPM/H49aZLp5cpvNkA9XgNRMR3Grg1g/V6kJp5WU2eAyhyZ81XqObxyDezI/Iixq44S2nMhdwN5Dc5eTFjc3IEZMws7M5GYfQxx6AWqveCGQxRpLg3mYKyZvntbfd4G2ohNvwkhnMQE5YYPpW+ZlpA07JSdWMHTC1ypJYjKiSqzqZAdQU+h2qTKHfrI/HQ5+Ck+RcQwhiXnKFotGgiajWqupAG162zToWDtNuf3GeDgcdKzYren37u7ttsZolhFnrDF5HZs6SQyY3s1IqOaAgJ6u+Xg/Ho2lB6lhHC13iLvlhGm9KPBcTVTOqMm9z13NlzwUDzXzeDZZwqCWrDZVXVIlT7CcbOxwv3+m7s/Tgi/r8x+H5IRjquJIbxTbEkMUYCGiuBOAGUwvEGGqh2R8DEKi5CGeLInM00yPHLh7LH3th5yTBZYW8Q+TVaEdsBtxWnFfk6JERtU4W4LwyGRU8wLZVnQLnMXRrE7c5rvsjYvnf/3D/5548+rmHhxe3u9mOMcZyTB4NiippN82f0vr/+Afhr/xxv9rud/shpPow0s4LFZBG7fmzww3xt1dlp+s6M0V3VxgPZiXOUqDuFBEIKF5VSSvorp/slSD3syoncZoH628NwZNILCGfcyDva92TJ7HkNjF5IapuLjHWw6zkxCH6xZldHasfdv1Fb1WJOUqYtRiTlZpChFduSRhHANXVGZTEqkMYTOogYnG4NtxNs6txcHcja6Z4RsoevJm2cryDMEMkVhYm1UrmwmzkbiphwUwtVxYmIi3KHN25EpE6FWUObepUrQLVvBhaQLdaRyJq+79u3SLRk+7Bg/cPX9zML8d8O3Vd1w+bIVWt85Tn23kKMfWJQEarTt2I7clbj58/fxlFvJTAVTW1Pgc7zJRZcqkUA8OrthMUUURVfeEOCrE5lEOqVc1MONYyN+9k3MuI3d2qCjd2ZmhZfyOuEqAOtwXbbdHTvACQ1gX2hWJgLYwzlXZaEGjBYxcmkqvdj9p2am4bXE0DNRX4Beu4G9RsAMgJ6vAQAlpLxlsQl6VWIAKZSGRmgAKfEvZacs7qVrUSsIr90PcO5JyP8+Ts/UVaP9nwOuRarGjUSExEbBytCViqdUnqnFkoEEOrqa6G9TjP7o4YKVciciJy6kjIiUBuWiOrKrWS4p4u/MppaT9ng5iIOENJ1SzGyCHAqf1pEUIwbRIxZqZNkn6568S1ufZWNJthCuDgmB0QioRITabJVasLgViLcUxEWtUnlV7BfWVDTTCKiSxVqAeHbrg7Jg1O6uWDp49/6/s3GomCubFWj91QbeYmLEPcR9ad1GxcaVWHXM4k1SfD/vYq/Pk//d//t//a/y0fv2S5KNM1IxlCcDY5ekHKAcIarE40zdRteKaxFnTnKIoYhiOOPXMA+gvUxCJSuZRJQ+ynUuEWxLOGWovE4MmlI1MiowcDlX4kccyV+n03hDgMw8PV5svDl5/7NOnkYacUksGByu4nHFkMZO5MftLlAREacwRgmtmYWDhUpzDuzrQyBeXpK5GNOWtWgG40+HzLGOhYKfLk4b0H43cG+35+G908TeUXLnWumXqulNWOLuedAHV+3fG/8Ov2P/oO/+rTdUf1h1/Mr9h/+YMHfnWz9vC7z9WGzf/2D6NQ+q988+LpkIYw/b2vjhzowws+lAOuuz88nGEdauTo1FuopMaUFAabO8K8RG0D7krdUzeTcHJqJ27At7FzEZpRwioo5cBKeQ6r3kASujxDImtFLRNLYvWBKVvHiazsztfnl2f81uWjDz5+/+rFZ89fvDiUHCFJgpfqEo1gxByZHIoKgMJiYkqSFuh9QYodLERMFMWUOJiZoy4zEa6Ne9gss4nImZr3azvaGgOHiHCynTwNOHhTHwTAXNxhpi3vnNRJWKsTkbolI3eqpsKBGqOxqrtbhI+K6ohx/dYwPOqn14fp1T5vj6FL3dCfr9ZlyuM4HsYdBKvahcAxxmdffaXVY9d1/WquM1AlhuzKFJRq4EJw5gpGPIW/aoVYWnsi+mw1hrQa8z4kqtVzPqYulj/SOxQw50Uf0v1EbGQiIiO4Lnj+PSZiedPjJqOlS0Kg2Hhj1jrvaMRUENr58MYB8Y6FY54ktpzU4UHCKay32c57SDTgbsGkET2DiHk1r1FC7COQRMSNatWccyml/U51c6J+WK+HvtZ6c9jPWoJwvEz9pkurVLjmnIkoxgAmVm2SBU3/zwiBHXBVDzESiSvmcRJhFoHBwyKwvmwSEIFYiCbtRHwBtJiIqls19cYIEyFa0tVWWUkj8tIyx2SuCphX5NpKMXGwO2jpdU/kBPACADvMA0kiqRjJaJGsA0nrGqpn9dh0uTkda1VWNiRTQScGGBUHBUfkSfLrfJuzqzARDcNARDHGmgsxhTXv846FAsfAHHJ3/aObq69KRFjRsDlb/ebnn3FM6/7h4frq//mv/3+ffPQL2P6m1BI6LfPYrYJq8tqpl73XlNlmLqRpgLAhdrHOYdulNYyPZthr4g0rTX3AfJhrQZkBm7pOzAXecTl2gWvOEmQuKgHeqHRFQgdhNnHnWm0/hPX7F+v3Pog/+eT2yy/qcdKwjIxb85jG3RBTq1Vb5dXoHK4AWwynmJgC08U8P0HTbuVV4QqdnFJYz59s62Qr0F7TJmEqhmD9v/QP6//8t37vD+o7f/Fb8U+92x3mbTGFURBhRykFtO6Urg1/9bcuqD/YLX+TXv2v//P9dLgKZ364Df/OPnerM0q7/8Xvr/4fn5THl/nVK/vl8+5f+bOb17v9o8v4u88Pn+Til6VoqVLcyavDzEHVKwAidjcD30NIT2DtTyOhbc+VPFFHwqK7UT0rPHIc1WPoAFhrFTml1NfCbuoy+aRdXIfhwfnlxXcO+oO///2fpAflcB1EzqhvXMDKEOFg7iESUUMZGMQU4WRqpfUCiMyaKLarqrEHV1eDGNTIzBvNcTEa8AWGB52kFEwWDNF50V/lhjDU5pu04PCVIK6ViNi1qVS1xBZiIHIDBYIbq6lldhALiVltw9XJGNWt5FlEuoebbrPJu33ZHrc3t50M/eb84mLjZc7Hw3i0eaoxOknsuo6IatUQEokKktQaQEpBC5tYoQOrMDHcm3IOcVRVU5h0lGzSKYTNPM6gkkKsc3M0b5/gCWFsn61YcwZpaT01K1SQtCjRVrovpyktkrYLOZix3GQmVj9BsX4vhjNLPbF0frqnqycJzBiDO1Q1SFoEc4iJG8K+5O8CKg6OwuH/R9afR9uWXWed4DfnXGvtfc65zWsj4kUrRaizpJDVWg1y3yB3NMaYQdGbxkBCQjIygaQYmRQUXZEUGDJNgaEqC9MZm0bGxg24l2RbliVbVhuKkBSKF83r323OOXuvNZv6Y537ZEbdP6Qx7njv3bj37r3WbL7v9yUwCSXmYIYT+SQtwkxVq3m1UBaknJax4CQadnx0p5lykf1zq7wYaSnM3GAWnCh1oYs2yjnACHiXSDEh2EA+ZOk7xjGJmTH3ClutlwvMO0EO3NzMkJhYrI+EmBnBMJMIHwxETk4eiDOUDQAUN9tNyXoN4sFhztkpeJcu6V1RAKDoEObEIGFhb2iKZlR3dDp2EAd19k6QB7Exc1MNGqZaHWJEGig5cTdaCJOwuCVH6lpKQCS/8MILKaUdMI5wstY0DkSh7sR89+ZRVHrja1772Msv3bq1vXPr7no6dixPj3zQ5U//5/fuX3x5tqRUQ5cIO3eQms2YKw2ySGPdToAvSpZiXDxm1/Npsnn/IFmDyFiWhpjsFKe3vfemZeCq1ioFYbbtfkGnakfYYhAND6bmnpJLzgZTtxiRE2ze+jSxxGOvX9730PLmi9vk3eEGSo4dTLD/Mros7Ey0hF1JQvCuNFbjIXi/tX2QupNjCQbo1MUDi2vz8OlTedvecGOeHSVJnMzttZfTP/2apLJNq1ZPpo2WJY+TMtyl0OnWgeXsGe02x7xvB7/54fydbxnvT5u7x+mBw/P/8lOfvuOPBM84qcu297l5/bmT4T33+59668Xl9vlT3R7g4g8/++LR6tULyywtuAWHgDPYmMyZie5JLnoD+EVBW+werbPyLXpkxziW2TVFGlyEByqjI0dOgSbJUyGtaOqJi4czc4sxjcEyLs7d57F54sr4ptv0s099iPcHqGWWYblYtxksRaTVSZoSRXdd0tm+0cEMiwATwc40zxHEoWwRQSq0qxd72gS77v6UI85eRe/jWafgCIA9oh/o4Z45mRGD78kYzI1AQQmEcM+cwyJxDo8CdLMGkSQ6M5QIJ+bQpGKeQwJLHdhIOeahpeVePrekk1bvbNd3b9JxGvaWaX9vtTBV3Z5uvdbttB7HMSIWizHxqKGplN2LPtWcc/ZVTcagXYQcU5AFuwOJiFBIotkcbCK5tRbJM6WzcfC9QTZTQHZri2BmDWfq+jmY/DeCdAB9hU1MZ1P3e6LDMwYoIgDhnbqeiMByDwlw7+nqfzEJ1zbnLGpVRFjCvEoSRLfpwN2jQwmISWTRMf0EOBNRIvGGqm22k4hQVQ5iZuGCYFdax6zVDYZEw7nlYn+BQbZw0BzqDIATE4XVvjlojcEwRBFxRwesOwMc7o6IJBJEZwKkGFLpQrI421UwISLmpMwEB4mklMkDzEn6mBHeh1QAx+6W8ME9AhwUYRHeNRpczCZQd5PhiwxUoOYJ7BTElBAc3tN1IIZ7JVq4o//YAyQ5QoloAA+KgSyoae7L78aeKJg9LTwtbFjRco3TlNI0x8nJSb/CtTUwjURMFsRBLClp5eO79Vk6unNah/2Lc813bk+Ro9ZUOB889PDxjfXxvNbQwiukeuGBsSRXRfYQdh6WrUK2OqbFybrO5vuXDpa2jc3MnOY2DTNSybeDJDAO2evsQaHk8OVhRkIAlCRqNQ0iJ4txyAm6bc6pEXEqrOQyShqpjdpGjjqnhd53PiV4fFGCFASQICKgdLbZ2OHewUTSFxJgg7l7cAoroACMGjJykxQpxcwzLf7jU3fe8WUHWH8u0j40hsW529vNSHmhdjIfZ0lEzGFAbq4sensGUMZ2/Z++4/zj5QuJ58VqPtlO1463V85dfuHG7b/38SwXzj0+3PjSK6uv2Lu2XMbje+uH9+LmNN2yduGhS7/6+dN//YXL6dJiW02WnqSXD9w7b2jfm+0E1IBE4AyjvjvWf0PNtTsrtzoLpSAuebTw03lr88aFBePA43pzh2PMBNM2DkOoDUF1cTCUkm1zumkYD9/w8tdoffYOiZOs65QkLgtxoEadixZZMjMJAt6sqqp1SBkN8KBwM2eHg/sFmwlu3qeUBIahM1eDfLduA4fDsBMaOkuYyu7Yj91KVVvAusB8pwBj1uYALAUAs7gXTuIdogSx7j4ViYgOFGTmRjOaUTiRdMQVgYhFN9UjYi9hb7+tU729PT05kiMSoJQySoEA4a7KROvjkzSCmdkXHkMukKG5+Zgu1nr3jNNixKGzdj/LGoFIqlaElmNWbWSWU2pn4T59VM2gbtzo/JJetJsTmPsIhUCML6qB753v6D78fkIxHN0cH5k5ugp2J4oHIsjNvrhC3XUBuxyWkCQID+GMQE6pdaW8NCIiIepZABwAQtSpdBeye1PV1lof3xusv4gsXBKnRNRnbjLvjcNitVwsRhJUr4oaQiIslAgdmaRBcHIiS576Icsc4S7i9zbALAwgfGbmQHSHX5WNgO79xiUREan7AZJIZ9pFsBmiqVPMEeZ9ikjc6yONcDhO3CM4iRI8QgPaXETSvWAlIoA1ogelJSb0XJTQna+VCeCMKYII3DUQAtld5NEMjfPgTUvEoCBn92DTgTmBYe6kLIOFn2wnM41w97xYLDanG+YuN2c3CwujCPG5VWvSDFevX8f1xvj8wIsgmuJ0L4+OuLs9HhTL5Z45NkebBx9d7V2slGaENQ3PA9c6mkZJx+u1NsxN5ht39s8DA+C6l1Ja0TZaYVSGesxTFJHETmKhFR5zYEzZgpioTp4IRibEYu7KRFxSipiZIYVy4UqOHBUwstQZI+yQXRMLB9suHuVs0bPrVdnQ57GFKTlvAo3YOj02RnbbRk2JJVAN9F9vDD/w2fa7X3H5C9tKNenxZhhqSNxu6ZBWkUWrVZ1izM4eQle3zIwV50f2tHg+sfn2SRsiPXH50p1b8x//hc3RhVde5PYQ+0N67ateeen45AZb/txtHqGPnePPPnf3z71P27nXLDZHJ8mHMBafXd1zqDViD84G8yAHvL+ctHuqiLpQvfeLZx8MIJcBwEbruBpAdbUoaRj2M28X0WPYmCh4bNXquqdyxtSEhnQ63WnMm9ONt9OHLwwPS3F3kZXZTllXSmmtEc1gIjC4BK1EMgkjCMm4zy+76wZs3czpxz3zEwAlYWZyypICM3AmKduRloyok9HSbsID6hUp4CF9+P5FPYbv0u7RRd+d49jcQBIReiZLb2ZE3D1O7m6euBGcVLV6nW12JZ+9nV/FPEMriPR8sgf2bRKa/Obt1qq15h7hjq0rS26gPGczZRwXkTonzqv1eh147oHLe1OfL1PkUsj6/k0vljIu7dLlcy88f/LSS8fL5WGkpFqLfNGv0Gvo7lR1SdHrz3BGUP9f7DrUs2u9/18AUDG6B+Ha3QoRQGu7z/ef9ZmF0XYQ5l2J1P8dElA94ytEREChmjgxs1frqaPMSXaCXAuNqhq7yBwljsUyLxdDzvlSaXuLxWpZlgMPhZmDoSDnlKLHZ0ckhGAZrhYelOXseubMPGRmiBCoc6Sp1wfM7A5zZ/G+egGQUuqosj6W77r1e75TRwcIcVc9unfwYEe7hxEERCTB3WaRdvCjRKoKkuZWW1PVea69jPB+j3U7obtpGFB4WU1r86aOSE7UFJO26qzNQgnmFuwUblB3JGlgyqpzvTwI5RhDnFqDVM4QKQiCI3mwMYyEO2F6miqCWmsMMrPGnGihbc6ZU8r1ZEOuizJq9Udeef+tu9fOr/YjcOv2bfK9MqO2thoX4xgXzqWHXhmLy3PahzdeRtpGVYL7wo+n1z9++Q0Pfsl+vf9qvfvJz37sxXQj9pJv8831yXCOL2M4OTfp3Bb7CWbkEhHzZOOy2FQ9mUACkTJFRPVwstLtTgh3oyA4Iny79rKfXUlyTUAy4gikXdBbX6KG9sUiGG5EOyGeU2B3rDQgRSC8MW3gCAVK01gSQvWUsaAc25y+55N1xOI9DwZJnC7EUVLT+87Fnclu38LBwWiFmOYUpsqfPBZPvPBpucx7dZnKlisz5f/09PHf/pRcp5e5+psv2uMHfnwHI2vVYRxXV4bNJi/e+9z0Nz5YXrrvVdmvJ8/gNERKLO4gYXgQmBlFY9ph+2jX6J25zX/jMg2/YfiuqmMZjm394IWLV/YOn/v084d75zenbTqxjOGR+5dCKahNuomqQ0ktjplAUahAUcMs8WozL0lmNONgJ86LZdUGp1XZb3Gijg7rBfhMh+dVG52hi9zdzKo2AA0DzkpwMNwb3IrLnHOE9SVh9Ek9JWYOHmDm1gBmEpyFCDJpBJ3huthxdmZFAsBJIgjcOYsCIO+k5dxqZeYONVPVgglIBK61auvjWvEg1o2kAZzUW7hLKpyXAbHYuPG0tanGdvL11E7XerKeX7yzRSyOT9XFLLBeX3vrmx75A3/wG152KFXb1KobSilZiJmYKKMFMOwf3j6mP/vnvvvF2xPJXo1KX9Se046TtjtxLcK7eA/o1My4d5yj7/zOGGsEAkzuqW52AdAszGW107/vjj9mQriTiJxN4Yl+QxtgtlmU4Vu+5Vt+6Id+6PDgsMtkcpGFMICUmXe+od3mPIflLCJEHClJLiQEd0+cORAwa7VPNkREJAMIEghn5tQJo5EE5NJZX32s6q6VkhTORnCIJDazLCnn7EEWDtId3l2EAJHSf3TCCQA8WFJXzTDnlHfCXES47tDBY6R+uCOCIEyJmIiT9O8nLT0759QFAAx3aymlNt1pbqptB5Y407YmlmZa3QjILBTcWmutacmq6katWVM40WxuZqcVZjxZqwu6dMCZgqyQOUqomxigwUGUvS8Ut9ttKWm9roZQtyElawpCyDZ06Nbvug2rkimZZcLWFk0I919eDcmWj2leyTnSmzexne+uyt5i6bQ6jpU0tmHFU5tLymIx6vb3f8O3HJS3Pf2i3o6Tl1981bte8S2/dvVDP/zpf99W02hSTrEdtoLBeB5WsOZ1Dm1kiti2MXfmg1e1xSIb+p6Mq7VcQORmDg54RjDD6kkS0ZyNgJQNTmhEMwdRJIAtxOG5gzmATkvb9f4BcHijLlNiTsMWcMLIrRmmoAVkz2FsHojrWP7Fp6Yfu7r/TY+2Jw63qzy44X2fW73vkzf+8NsvcUGba2k6Utqu9ak7jkvLtfk/+hQeJV9vx5dO5g/cTZ+bzvnqnMgxYvXhEzo3jiT0wWv7UdvR3eMXbp/7j9evP3V8xVelnKyx2DtJxDYb8TKyKCxn5TZMUI0pjFHg0SOvdq9uRPiOtBfYmeqw+wQxGbtT6CsffmSx9kt7D/3Iz3/odrRXPnCwv7gwhCUkye6+ONzb95QSWzAxc1ZbMFkBmMbljsbRYofmFfDto7t3NjbIub6sL8vEZG41SwxjHqkw9aLPwUEUFs7M3maKANx4h9rA7IWFRFV3rk83dOAidzIX3KtGBElSs9YaIpSFIf19dofFzjayJekb10RsFhRi6sLciN0gAC1LRLS5RhgyqpXWtuEKx5iSiDDCfKbhIGrAJFBMoDCfqjd3Vg7el7wo9dKCNJJb4rRPxds8FbnQlGpobZtzB+c+97Mf/8hUSx6MOc6GRWQqxJxBRE2tLMZ3vvkVX7h61XwLrBBBFLRzWUJoZ0YLbwCK7DJA5MwYpWQAdnbg3/BRoDmlDjHupzn1zr1O/Sym2L0CXZXUTb2y23JExE4cCT4wi/Xnfvpb334/XJnhVlOiGh3u1lvG1r86MyQCO3JRANqNWgJKqet8kA5yxyOrIwiDVsqJsjjQp+rMiQJmjSG7MqXLxT0ASGILJaHamoBypiCxcOYEHiPC3dfr9WpclDSo6uwzgJxTzmc71Q4n8HuJWrnHaxAndN+pg2N33fQSnsBGgeBSkrp6GEFDLReXsoyuoiHrt2LHGwzeFFllBwAOJ6CQSEyNiHazQQRlCaKgkOZjDPOklYuXcDPUFai6BieCU8I85jQe7pe9FfF4eMjuOgx27ty502s3zuRMELB45iByYzARKWY1C8qCwStKHvdWdGM6nvc2T37la5556rnbN+8mPT5/EGkP20zBw3atlGg+KlfG9Xd867ee3nzNf/21n7qmv3I0QxyHF65851f9ke/Q5Y88/R/m1RpbXVdbLgnKzSNglJAFOYbNuuWBgsktmGHecs7Vmgg7J6NAmAMyUGiTkHFBqW3VwBjARO/6O7tVEAU4QACfCYHPetudz7g3nBFRBooJUylDrObNH/25L7xJKIWSF6Mpj1y2o3XPmm3nlEeTOeYYqCzT5oj3/MbJn33b+T/8JdvrJ8daKG/svsP4qeeP//sPPiwXl+YVp4UysTSrLGOONJOLdYj5PLKYJY4pMs0aKQRIAhEwEpURaLGeB/7y4Wdfzf+vj92+c/fp7CYIjgo2uLtW2q7hE3Mjb26VmEuz2ql1TEmoV3zu5KMQmCr8wf37/8Bv+6ZnNs/++1/45WUZgmhdN6o1pwNTfvOrH3n6Ux9f0+EQp1Mq2jBuaE5tHGnjEB4HjzmCZFjlqc265SSrw2x3nSgMyl6iJFBILToEIZzOX9g/PDd+7tnnSzoXPglVoOSIcTGcX+2vT6vjaHK9UC5R1DQyCSFm1JbTchhXKcfmaFqMOaWkZu6eCT0cbrVaWd0MOXchR5hKolprSrKk1Lv1nJgDwkhMQjsguzAlBgWEuIcRC08ioqqlpN7o5JTMDJQpXIh2uWvu6tadjbXWcSzhzolqrTnn1prw0JqFuUju9abWtptr286NaWZSJIDmJsmshlBqcx0GKQM7aYQVXwQb4E5sTmaNEwI1Yx/BFh7kTk4REkQuQWSYA0ZNCDkKmc+s1hKFg7lkzqrqrk4aFBE8kKhqdUuJCxiAJ042IjkVmFlGKmCmCNWU8uyNcwJHOAXBzElyAnGi1moeh9oapQTKc9VmnEp2wFVLyqbRAM6JKWv4tqkGNFxEQk1rI2KSrooJTtK9vmbe2iTMgnCChxAxBRjR2nz54qXbN28+cPnKdrM5Pj7OqRweHs6+uXb9WJYZfvQVT77p6c88/9zJ7XJAr7r0YLio8bWbd2+dHBuxenOCW8lMdZ5FRIpt6zqnRfjoaIhWEnEa7myqpTTA87RdhywXe5uTNXMSEQ010zLmAbWkfYoWGTTgoJURZRruDm3YTCqLwWlaUVqVEYCQAaGGSFZkoODtdm2eKXFrM3mUIdVJkCz8YFtPXI5GX5y2KXJJsijBhfnhR17x7HO3x+WD+8vFjRdvf/rpW7/48V9JC6JGpJsaQlyat3ElDJwebRONMJtjuvKqc5PND186f/mCPF/X83T3G377m49u5JubD/mg0Fyyc3ZYirmZcl3rt7zlyfv2v/6z165e2kMe5ETp55/5oeobmv1/+W1//0d++iffd/zT42KLwW2DRMxMIcaJ3DFvPGYSHj1UvZWF5FFAKiLuWoEwsDLCF4OYWathDT1bCkCopagevCPy9PGEgcysF+zUCcq7oaGFw4TX6ikNEQidR3nugL/kRM8lcQ9E1i0FEmhSy6DFYKjhRfZKU70TF9L66LveLH/odTdePN4Ii3tBjkjjj37ieewv3BpaLPd5ssnc82pU34YjaUnkahx71WcVsA3CtB9yxNtzye+kstjAlKxqyonVLbbHWBqbEPmwwHRagxMiwSK8dzq9au8T7V3JQEzhYe6AQyDMEyOB3JGWo63KD/yHHysPPpJoCG+XFpcS4MzrwB2t6WD/YFiG70PbsFxwigcGPbrVlsFE2yaz+rlEYN/nve2FvUunJ7eUx5jXnPdGZbM2FylYzanSyMmWLx2d3tweRZaanZhIsjas3cay+cLpeqnkGJLQjbij4HS0aHasNT/26KOzvnjnxdNpuvnQAw/dOTnZbrcantJO0BwRETcEo5n1viURdaDxWIbT9VEaCoDdAm2nI1IZxohIDAo3Mwb6knMZWUNLKSAnCgG11ogoEhE8gzOhgw126pEEIrLmQyl9EyDUAAw8R4R1YYk24m5MCWcyM5gnEINKKUxkZsY5UXJvJSWsdZomImqqzM1aYwEzlzQkyd6ceWz2vNhBNOTBwaGaRCToiClxYF6fLFeFuGwnXu3vBU+YfbG3Ot2cwENrZWZwsnCxIOZwzzkDYd428yYxJ9xaz0p5qC3ysAhCCDf46FkpJlcjbOfJiMGZc6INqjqJcNEWAM/buiUhU1i4E0Ktp1QoSN2IWxDtOjPHkDIFKEIHamZN5yIpIjKJVUNE3+N2qWUXRCVQArampVxLLP7Jm621lNI8NRFZUEDSWmtAP/LCx06ON5Wjua6y1Drlws3qrFMukoh1rrRA4oHdDs+N091NTivbKuKW5BVFG5x1nlrLdV1zWmw32pLleqSq3R3tEaqKiaFpTndisoGSCUBHQ0Td2t6FcTV4vbt2XzU9qXozEZZp/4EHr3zhhRcdsVyk09MTa7UpIFiucillc7I2i5CKODLng8PVo/ddMdZnnnnmoYcvHx3fYY7F8TNsi+OnP3rlgYu3n/rMY5cfT9nnaR5tNacF+66kMDNJCcA4jjBrk3pjMgvX2QiFyeULV6+WRtMkqMTSeAhEpCxIQaeEeXzlE+/82Z/7wrB68b7Ddz53dHz/eP5Ve296uj11S6/9zMd/+Etf9+5f/alPbnBtuZh9v1Jfw2uEhQSPLJzz8bRNAxYrkYLg5h5mngg8s6rLwhF8vNbE2acmgVaCkgZBEakQ0xmXEGf+F3eM8C4M7od6MJnvuPnmZMIUcG1j+fT9e192fPuCDRWTYiwIwxyJx+aU51Z1XnhsjYaFvn5144++e/WOy/X6SSXjnInMx4V89tbtH71xJV0efEYk3dQ1SSEZo2MRgzWMmGko1GYWsXAh6NGttz5x7hrdvDovq2YYcmbiGpTMeLFc8ZgV3pwzhMg7HNA1zCyCOARnM9ldg9K3RrFrwsFnZA93EtnU+dPPPTshTu6eHtfTYIYyu0HUJb/03EuDiPMcFntD3LoWLac9xyS+n2S7bUVG8HrdKsNSSqfXn0sIWK0yNDlZNQ0MVq3O4qLuXoZT8lgfTSWVpmsuEhEc4xjp+mI6F+lk07bDvKCEuQ6CU1pLJPK6vft8nWl79GLEcHR6Uk8muHcH9G59CiRJFpUiEqeuP7Nal8vlhdV5n66NOVtgPls8mBm41KZAgBjkPeeMiJy47o3mKuNosdN0zhUiksc9JgpQ7YqSM9ZCkaWZLRarOUhS4lxunZzkcYRvzTXnXGs9ODioU73v8gMAP/fC1b29PW865PLA5fu2m83m6OTw8PDu6fMAUhLV+tJLL73sS568e+fkkUdefvLCZ8+fu3j+/PnPP/vZu+uT5tbRH1fue9c0nWZZbOrKwl1Op2lT0mE6vJja6asff/RTn35m1vbYI/d/9JMfnXkY9g5vX7/24IWX6WZ645vf8PGnP12ZT60uJctQ3L2wrKetMnIplw/P26mdP3/xZS972bPPfu7mrZdmq8ebjXPe1NYpLGF1UXLve27fvesXygMPPHD16hfu3Lq9WA5ZhioEYBx0XC67OCd1Yy54sVrO200E9RsX5iml3TSI2zAMewf71lRVE+2Cp8vqvLZqNvsuzUi5KcKURNWl5MVioaoBnudZRKgFoIfDSnLSbR0zrXKBjdXWhZgAsbSgFaEtEoY0zObRIFJam9zzdi7gCDLSSta2k5DAJ5ZwH4MOFsO0dNfVKGrV3ROEAXc00WQtF25zttCcxTXnRawWFw72VuvYTLPdXB9zOqiyPtnYzU8+vTq355Feunkr5yHJUljnqm2T62lDjExk5qvVYrO2Yoef+MyNIReiS1949sQQFy9deOHFO2F21E4+/sufedujDywvjGG+HJZtbaB7K2KEeUg0s1SyK7WtoQlcCS2lBZK6+XKxxyttN2V/WQZxS9Bm1JSStEOjTct2sG3PEF7SaevzXfD+173qPS/8gt+xW09f/7XXP/HV5y+trFDbIhIMURITc18/WAKEFmDKlAdWUkcwIxTEPJY0e7NtGCwJDUnm6mwiZiw5Iti0U36w8/nsggUAoCWK6AgWJGc40m42SZmG2VohCXajF+47fPYztx/yiTntw1oKsh5ar5tvemL47Q9ON49vDvuLB0d/xf7+Zrt+6WTLy4OxzHpS8sBE+vc/oHLuQfU1USFrVPZ8q6CqQggZS5lkIiP3Tdi+4wQi1OR1D+6lkzlaimygqcylSsYYyZ3JvfEsrXEED9NEbgpVsvFMu89dA3RPYULBHh679AtCfyGDSiPKoilu3bl969atAVmryzAACAGIwkkQhwcHx+vjcbFct5MXbvijF3Bzu+ayPJDlBlYWvKUIm0vhIuzYC8zurgNRZcnVSMI2TfOwirrJiwEKzyKJhUAirUIpMbutWY9eun1kOLg07kk62hqpogzFNLCVdHD1xdvuvhoP1dd3blwX4lTywOyu4bvwpnBu0ZjZnK1pzhnBd+/eunHjpWVO0/rUHYaeyRDuLokoGID+/+G96snMjCmOOsssIngXDLBmPpt39+UBk0eQBiKYkrunVNzPQjEypZRaa4txdW17I8nyxU8fC2dDvRPHnJO5P4MXhLlwUb2R00EusDjdzMeg/LlPHi2HS8/caYgrN26kixfOTe3VaaknRy8er29Z2FZZddw/uOBeIMzY206nI68I57md5+cufe7a3QhPew9uY2q+1eNZ5KFTPWzRPvrZ7Xp74WRzgrxUFB7yNE1tM1lg3NsDcOfF07Tce+ba0Yc/9glhmMY8n2pU9417jYiUmUFRYLUheDsDsfnM858C6cip3t1ENjeycLDMp9VhvXjv9odjms1rbzaZOQg5ixMckWPImVVpVkRIZumDr70Y1uu6rdq3oAhKyIlLwIjz5nSaRgJYtRIRcyitGSJtaZq0ItyZ5pLIgolCtTZVzqVW5+DEUqllEXgNA4iTmNo8FK5weCLKEQHzIZdps2nNSyPTmhL3zxPJTkcQc2yjJVXiZrwIAnRO9e7teYqZkTLPi6FgPTffRtZEMp0em0tAN8enTAtiZxqON8egBnKOkVi32zkLP3/1lMJOwnMe2qzsdv3oTgumNADTubx46OWP3bh9utnYHhceybenIQkezDuFxS7HLII7aiIBYmEdvGQPP/qKUg8++9J7GWtYmDGYvJFCKEc6B2VlTjenm5cvXSlx7qaezr5cyP2lntveIJmctgdNXbympbiGO2AcSq2ZK5yp5OLRtLlJ9/omAYml2WeRZM3IPC+o1olSMmbbOCmYKQXSDnVHEDlDQlt0xenOsEcI7vT6cMKASB4CFXKT1Hy9P3z8gYPX3Dh5IDwcc8UeZeaSZLt587np3RfSS4sDFmuRrx4bi46LLNO8JXbQfYfln//S8z+9fQgHBhuS3Gn1Qmzmd1yO1x1MU0yt0u21vZiWVcqe+tG8vbLCnCtvr/2+15775aev//KN+/lASJIWQ2rEohoc5cK4tkm5IiLYwSwkCKSemtbfEzfAo9uAuiq5I61t9/z13UMkgN2D+fDShWAqKbtaJqhNIZxkVNW7m+O8GjfzJGv9G3/0HUuqL21O/s6/+Mj5y2XUXLklmkqgIta2GtORKUfiohbUEgYNp5RyyZu765ddomvTTLKap7lrKQBQScQsU6t3T//kt75ucSF+8see+sRNunyxvOJB+cgz65zPO7tiMy4ky7jdbHmxSVG4Sa21LxgRkXIiyNzqMKxUNSJyGUXyNE3M+eBgz6KpGRgisAhQ5B5lH18Ecu2MlTuWmaaU2o7JTmYuOZlZZ7efRYo4Ezr8sg3u7s3mklPTLQtzTu5GGjq7CEzXIh5xRxWNwA5HsHMwEctmW7dEKZX1eqZ1rrPlYWTGtL55l56pbaISZOnpL7AqEnkZJGAlj1dvfp5i+fxVBK+dAF9kShKzhwbSr1ctaQDT565qEmebJaWGaApwEhC7EZsIR4AsiIhTDvP1nQZAylBnzXlUVclkvkG0RVm49u4F8xQi+eTOTJzUQ0rOER5OITsGs6t7S0LsCQ6CMZO5Scfqowo665OYoaqhSkzCpHCd1+vt7ZKy1pZZ+sLs5OY1hwWTZCYS10bhzAjNESEiJ0e3hmHRi333UHIhI9swGReZp5pz3nYomHmnpds8GYXnNKsxM3QLJxFpdc15JreqHpohKWgrlCB6dOI55zwNnCdTm6fIIswMGFkTuKTso82aiWyUFpYiEyUvJimbYgzEHASewaPkqG7W6lAOFsPijh4RUBJNdSpJ9vfPnZzc8EaJJSBq64gkiyjBrU3L/ZGRa9XFUI43G2iD+ra1F24fj+NhbRNLy4ladLx29J0t7rHn4B5Bgwx7A+aWERPF8cnplb03prQcBiR3GkJVc2Kh2WM4PbF1femJC4//6LM//hPP/NyXlC//7HO/9PJ3Hu7x5vbdm6971cuD/OR4y6M57ee0hjkJd3+A5BECD7grCXl4ZjaEbpWcNOBJmjYm5JGIBcEs2GwmEtRQAeXMSUS6K4S4+1aih6go6a6ijWByCUgQgVqiqLNkwB1BgCU89cqLz1w/PgSvJLM1hwemaDVZyzfbyUmEuAGeCexlM6fBt6vkD+zz//mrz/1vn38Iq/OME+TS2hILhc7j4f7m7q2HLp0ruPvIQxc+cvXO5ygeW8o5Xl+S6Y5tDx84OHnh9u26ZykLZYMC+6mtmZK3cuXg2Vdeun3reDG0YN9mltrcFK2tycUdZwyZrusPN9vVm7uQX98leUXMqXtc4Sl98tnP1kRETuTGJXMy8zDKefRQNrJbd//3v/Yd/4//7QfGC6/83d/6Ot5+XLiEr8WFsS/SdHNrceCBxuOyTZWDlBeqmlIAfPfu0evP7f/Z3/s1f+ivfv99D43gcA6vc2HyxkJMx7e/93/59u/5Zz/+te9625vfkj723l/9X//A28UWP/vBH73v5VZtJK7J6daN6dylc6eTzevp8rmhcyWx49eGR2PBPJ0OwxARZtWhJTPg03wisghnIgojgndGlnnrWub+uO+CsHeL962FilCPpBeRM0E00AcC0QU51k2eqabWfDGs4AgkIri5Ow15VKugaNGPMCJyCm7Nh2GI8FZrSljk4o4wL3np7stVAixggDHz3rDnEWnIWjzLznpjUYhTkpREFuGEVVDYTiFYFrGqNI/7A2nPrQ1SGX1vijYSLRcMV3gQBmZupi1FChKi6gaWIpmIGsWqeJ3n5d7ebJrzAaBmTowt7UJNm1pkWYzDVGuEzhR5zNq0pCTBWlsipqAK5Czh5O6ciwERUVKetAr66wZncC/CnAoxkTgkuRDCtCGlvh1xYUfHKRpRBrmRI1utbeAyrjKgqQP/1TKHelvkhIiNbXk1pGGxtS3NzsTipYeccnIgSmaiyDwieK5bI98iiDNBpWyVE4AUCrgsS1UVhUpjpiBXMNwiLKAiVLfbhRyybywySwJvA3sSbtjKJo2LNnu4JWQPbqJwWEkDqwdiiBJMrW6TSIt6smE1N12DE8jMzblQiM+RPJkHlsmYt5NSa17y7NNprQJuqGOiiKy8CyqkXfRKJGJ3FyIPsJAScimingmZ5Nr1l1bLlxb7Q7Va25aKWcOCJJOdNLDgAx/+8O9515/8mQ8/+unD98+Hm2ev/+qXy+u/+d2/54Uf/tSXveFtn3ru5nZxtFquGq2bFCTjxNBGHiPIXOc58mEGh4YzC3rkcndszRCBjFmyVKuUyKvvjWxZam0eYeHdoQpgl+7WbS0BZOe+aewSK+8bGaYUpAxmUfcACVeLawfLX3rVw4889VwxzXmRW90wZxmKM7VxbMc3cxkop42vc1mOpBdGdkt/5ZdufN+LV9L+A67HJIXcfRygDll99AZ9Zrq08tWQlnq0vTYfXBrz57aNWjs890Cq88Eat+fFc80wJmcCPGGtgdSWDy0/8+RD/+T41vGmejNyAQcbjGlBxWkd3TZO6L+y6OOXCO5vFBEx76iqEWGgapo1jPVkWwFmEIiMZoYHUUoKn1fD4s61a7//t37d+//L8699zZVv/dbX/Zm/+hPDowfZY86r7FvnzYt3jr7t697+qvuGv/v//omD+5cHKd1pLbxxMYUsF2bP2x/7s9/yf/7rH9zbv8S8Z1ibQFKC2mq5uPrs03/rz37nP/9379/Arjx8/9/63h//H77ry2/eivf+3AdXl3kxe+WZYnn60um3fdtj//bffORrvu4NxTY/++Fry/09YT5zrLi7S05dE6U9/hjo4USlFLMqZ2ws3hXfyghiBWDhnXQbHf0HDhdzcE4RwQFi6lbScAuK6KGbBD8LmlBryJh9KyAWMtOhlGatdfkxMbEQkUcEiYeH0KRVKIYhE4m7G5w51WggFCnWlBg5Z3VTV3ExmxGuQSRFQZyLhoMxN80sffImOak6J2mTlrSnU4VMBEfjkgb17ZCXs9cajYQt2iKztsYEhKmT7RCPpl13wNR8TEMEpiEhgKm1IQ2uNoRbqzkPbpxpnE9VZABLctVNW4y5zpO7l2HVmjEyyzxtp0TMzBrKSYR5vTkZ0srdAQ7h8J5kjPDwQTsYYL01zsnZQ1xVi/UfaAhYIIAZLBDBGFPSuTJLrTWlYknMDNV5mdbTOkUahv3paP3i3RdWFy4v9wGKYAU5RTClNqlwlsVw5871kmUx7iUaWGB1ZoKNo2vzNkasSyys3UzlkouykjvlPJgZURCxOUVwU623j4Z9a7rVWK0OD+e68fXNenB4wGkz3yz54sVlu7vRGIeYtKUhUTm6czvnwRFGGMpqnqZSxjbNQy73P7Z8/gunGvOw3NvM03rdFryEIxKOTzcsA/foQTUgnr/6Ip0WsplIAtmj7Vj9BDMT2SUlSEogZodXb7OuQMEhhQPb7emvF1tbVEkkJUWhWUU3GCG6kl987pk33fyVP/lb/sd/9b6/9cmb76uK7/7Bf/z4wYPf9O63DKdf8vRnP5XG46lt8oHmOYyVgnrQFyiCS6QWyS08+sIQXArBBQ5YA7FOrmYyIHHMEREZCjIiCgrmcGOQ7IzrRARn13ALDwIJi0giFoQ42CJVZIFHasGGMGmNa41fe+LgJ66cmxFiCKRkYzFbX071/mGzt8d7q+Gy1EfO7T1wMJC2H/1s/M7/bN9399F8fqE4xpCNRMlQW0ZF+O1NfV7k6e3mY7faUyfDXStPnR587rh8Zr33oZfkF+/gJ67jQ9vz12wUa0FTCjGfUc49cOnDr370f94rH1vTWhfDhuaZMFkhHiLYPVjOMLDuXdzaXSRdQ5ZSKinhDC0NluTInCWX7XZ++YOPLanEbMw8KLOWwGDO4lQwShrLfp6nu1g99gf/2o8fiR3i5ISmu0d3rh1P6rYE/eonb/6j7//IalFS2rt6Oj22Lw+tjlebsiA/fglf++TjX3jm1s88dWdYYb05Pt7coRTVG5impvsHBzmiHcsf/32/+4/9lf/vb/3mN54/zD/x8U98/NrpKPffqtVpUW/e+F2/6VX/0+/66kcO4r5zr7x+2yUvQcU9bSddb6qHSBrnFhbohGzJiYQdYeGOgCgnBJmze0Zjq+SeOQguFEzBAUH0p0X4XiHPvaEkTeJCQVyYs3DJXDJybpxnyjMxQnxnYlSPYbW/acbjIgcNJOLIIRJMhpFyJs5cChWOQs7ewo08xEOIvEiyphx9nI+SMoMaA5kpSUo5qmen5PNI2zHqKDViYsmSS3hL3mSebNBmdxNVpjHJojCZTppkg4mToFFpadTBt86MxjVXzkgpZEQWI+YUQdw6qrxRMzG2GZkWzcg4E3LKC49ug9KUQ7I61q1ULapwziPLoKpE4T65IaWSOAOcOIeTOZIUjppIiVtwdZpmTI1nKzqJz+yNnApTWGH2ac7hlrhRGKEj+2dtFi4lmXoEiiQCFouVE6Z5dqacc7WtjKXkvdPbt97yJRf+1O9/x5OPrtd3T+/cPjnZtuMJL90+uXayXmtVai9+5vNf9dZXfc27Xvfisy+e3N0cHx/fOr67ndPN508wOzV66P7Sbl77C9/5HeX0eH18Y3Pc7t462hyfHN++tT49nqdpfTzdun78zkev/E9/7pVvefmFP/r1r/2Lf/i1t1984b7l9B/+3h98rNIf/c2P/d2/+PWru5s/8+1vedvje3ev3S5lPBQ6vnr1m7/q9U/c51/9tkde++joR9fvG7OfTGNM55J+7btf7id3H764t75xYyEnL39oL8nG0tqxKaKFwBQQjEBeT1/5rq++c7QVC2FMrhl8Zjj4Ymh79ytEiNeWQmABATKcvNX1Ru8aqiyo7KXW1LmZTEOW4A3ILj9KP/C+f3lVP/6d3/7Xv/GNv+cbXvMV3/Clb//ar/qKw/FNv/CZp17MH2zqLeqm5uk06gZuIjlTZhcigQxpNm8W4OQQNVLnubbaDCkxk4hlgCttTly4bFutW3VHKWVc5tQtc+oQATHC0VODnCSIIoxjZ3t1B3NAgp2YZ87soeasIJZJ+QNvu//ihK+5Ve8rtK7IVMZfPx3uv15dyU787pw/v42P3q6/emt5fV7gMIjQVAGB6w5dE6ZBiKAEIiPhYDdSAIy1hxTh4FNlpWEvdCLMxEtqg/ImyeHrD3/0ZZf/fsFRq4cLAKlVsn1bEGAoISZTNE0iO9EvJQ41ItEGIWL0My4IIuQIZ4tWYq22pL1RsMjqaFESIN4zFrk5O/Gw3aynafq5X3jmf/i9b/rOP/F9+6++/x/+qa/6tU9+7u/9kw/9yd/7ZecevvmRD949Orh/vHD7a1/9sje87vyf+Qs/9Xv/wJt+y5MP37Ttn/qb//nhK19y+6lnftef+lbmLzy+KOfuW8Tx5snXve5Hfvopvn9oU2pEJ9fW3tJqb/1H/sz/ceWx+/8v73nrxz/9gU89dQ1Y3HdJh3l4Ce0tb7zyx37Pb/qdf/wHXvboq37yfT8Dgcy3h8X+6VRee7lcvrT3Ux+/Ooz5IOT85eVz19apDDY3IHFKESoVXlamyhDiMHM4iDJBgkAW4kwcpD0dKuDG5ExMQPguWbRHGgkrM7urnoXyOBNB3NuZ+9co0Vy3KbOZBkUREUHEjh3YvEUEc++uwomNgojIETA2pui3CBGxk8OcWcRhGgCZAMOuFctpKeodKwNvTBwhjkwpJ9uAi3X+AjyIIzHBi5Npy5mDLCQiSJ3EB17sVjYVlQbpwr5gsBNFCqCRRybAJIIdzmDm/vKQiFsDeUoEZ+6GUvguOQTwnLsezLnHt0C6WyqxszOIjd1cKPdMzmjmMBFhEa+6M7ISHMTesWHEHkIURZzhFmNm87DMCJ5VmWgcBg4EdDQisI7rzTp/+9e89fj5z73xO/7AX/4b3/Ptv/3NP/3+z3E+/bbf+aYP/vLzbbXaXK+XX3E/7Hhvvf5D3/zqW03r0d03fcmTP/Bjv/yn//C7PvDhT//U+5//h//rd/34T354X2/81T/zjd/3Q792+eHpyYde98EPfvQ93/zO7/93n7p4n//ub3rXX/sHv6TndWX3yXz1kQf2H7//iYvygde9+pLO/HXvfuPHn3/xtl1YT9v1ev87v+EdX/b6L/zAD//6X//Lv+3f/auf+aZ3veLZBw4fe8W5pz5x5d1PPPP2tz/5H3/o41/1ntd8/GNX/dbJ13/Zfd/yW978gZ976dEnDk42m3/6gx/X4eKQpxWX4/U2JY6JM+UjP2FZ3X/h/qs3jw0loTWPCGdCIlYwIELMEWZOFFTY55JQamu1Nq12fnzsXL7/2bsv7F10zy5R/ETzKDa0BOwHu5Z5Wf/1B//xQ5/8L+945TdfHl9l7fYnPnH81PUfmcrTGz4FSlL4ccNAMuSUhDhINKyhIGcgciJya6FBEEiq4sGRKWozAhVK0VoEiFrZz50d4hFt1jRJBz+w9TA1AE7CiaMRyBGdNuOgDgNKZAGQkAFObkHgCI6Mutn/T09y+uS1b7x2mpHWoNX3fn76Z58ZAguAwgEhiCADq4RtYxHfZZDCQ5ktBN4YoICgp774Dknp0SSnaltQAc6FNhL1KJ4U4HN5/epL3/P4/k/VynepHAiSO6wKw8jcZlUqnhNjLVu3RYDBLYgsBC6GlsrsYLMIh0h2EsEAouU8eWKjyVBTKmFskJTFkDK2izbOuU0kF+qN//6Pf83/8j0/9/5PPPHbf9srXvPwA0fHd370I899xbsu/75veOvRdn307Acuv/zlp9fuvPmNL/sX/+mj912qX/m2R//jzz7/69deXN1/8ca143e+8/G7d/m4Lt/4Za99/av21nf5kQf3f+Z9H9u0pdr6PKe3ff0T/+hfvvf//j/+jv/4i9/79e94zRL6yWfj5rPxp3//20VfeuvrX/d//e7vf/AN3/C3v+8Xn/nI1T/9v/+2H//Jj/2e3/LozRefv/+RJ//9v3/fH/hD737f+2/95/c+9bf+zje++T6879PXv/vf/vq8uC/keCU+KYwKBmJvicjh7kaACPU7PqKTndG9bCzSuU47nV/0dWlfzvfko9Yr+nsf5OFwAfUxQgf60S5bg5jJOgeCOTrxPHZAHCbuTKskMLM+NGph6qAztlenJjODoCkJAPcGIDFFRDQ1EhKmjgvnPlbyADGljm132hEICE6UIMzuQU5MCBCId2DoMxqPn/3XdrQ7x25OAg9QENKOoL37mfTWkDnFLuDxv9Ed9fW0u445u3ffVs+5DiIqmR1dVtRjM9CsQ2toR+2KgFCAK9RgnarLthu3BQcFyEMJRj1nJLALKILBA5FlMLCbLmLFfPLCiyevf/lrfur9H/y93/Zlb3/Ny1++tyr7+5/49HPf+LXfsJluP/2pZ9/4+MUXj1rdpC9968t++Md+5dt+x1umu3H3bW977aXHbu1f/69255mT4YMfe+Hiw48/9/Tx7/zyJ3UcP/7BX/u2b/3NKfQb3/G2R17hODr3Ne987ff88Ad+5hduvv2x/Zc9dmmx2nvTqx99y2tfcXy0/po3PfI7/28/f+4JO47lhSvDD/3ER971dW/55neVzdX6ptc9efWFuz//i59Heu3nnn7qt37bV3726ev3XTiPefWBH3/+O7/rGx65fFlPD3T73Oc+eeervvot++Upp8ltO7UpFwmvRbKjjnv5lz70fg+3aAEi7puABFdHHxDmiLmHcEekeZ4pq9q0yvtQQ/Dx+ubDDzy2yOem9e06aZgvhqItkGQSlJK2bfPgffft87I4PX/3FxIOF2UPo73lS+6/fjL82tVfo+UJ2FNmquoB8ZaZZw8HJGVnotZqj3DMQoQkSgmkguZjSRbuyXhINBkmXlKefLt7loS5SErEItIz3krJIsQIFrAgM2WmHFEiRo/RqZZUM1VQEArLSmQPvG/wuZHbYvixtz74g294cMJ2CGp5EF9w7AUOnA5ClqkMKbuXaUMJxtalOc4AsQW7M6UEkR3lJiLgxCB2XmQLQ0pMynZX+BTksEPY/qsPPvHOh/7mg3s/cqw+YbuHVqTYyhtqNCSTTMgiLrwlFCxTUGESLxlDQpSIAYlsxb7MtCppkThnADGFb100Z1tKpJQ+f+1mK2MeUoo2YGpWiYvkdOfo9Cu/+pv3o8QLm1z9nU+++tVP3Pdzv/CFd7/x4nf9jt/2N//Nz77nf/7316b7b79gX/KaV/3oz77voYcW/92f/NoPfegzR5ujj33qxrlzj04vvvj173j1+37x59ukb3z5g7Q9/dJXXdF2dHDhoES6+9LtP//7v+kK+Wd/9Wja8DvfeOFbv+6JZ166/c//2VN/6c+//bFXTEWWOd197etf/vC5w89d/fjf/d73/Pz7Pv/kw48/uP/4q1/5Vf/qB371N3/le37yR576+V/9xHf9obc+cvCy//BL6drxxSRj0jvFm85MdciUwiYK7jb43ksxJyGKIArrc/k4+zCHWnRru8N24BEQg4Q4pdS7WkbHSBMRJd5tF5lZiBJzoiSQ1CMXzj5whoDpl4VZmFmnK5zFCjozSyJOxImQO4mSGES8IF4QRqZBeCyyKLJINFDv1YSCdh1G/y6JhEg6OBw7akFKlHbIAdpJJs6wKiQQgchvDAMAAtYhbpypm2mFAQE4sqSOCU4sWZIQA5xS6XfkDqpF0b+4iBCne46Te4sfD3JwT2SMswlqz7UollMjbkjGOSgHJUAASGL02wwWwe7iII5QIwupltQHpwGcEIkpqkerI/YwrX3C/Y888WM//Yk3vP5tWJukB372l7+w2R48fv+D67ubj/763a//2reFnFc6wOLCnW36rz/9iY3v39kMn/j003dreuMb34HNdOPFO+/+ytc31k984eZRG1X3PnT17vs+euPc5Sv/5Zc/9onPNh7TR56+/uCVl2O5fvO7X/1jP/Pcv/gPv/INX/fOhS3+5v/x46fp/re87vHVpGW0D3/q6jf91nd8+hMfv3Nq8/LCs9fSRz92+11f8fZffeqTD7/uN/2z7/+ldOHhDz99/dc/Wzfj9Ouff+nf/fivvLRdXD1KpwUf/ewxFgcOJs/JR1FJloqMRCKSzx9evH7tRkpDU1iI8EAknaPQfwXa032ZmVLPy07EEeFGy9UoSe+cPM35ZLWQMpa0EC0qh9IGC7PpdB6AC2V4xblXv+78Vzx55Sv38fD5xcueuPTExXbfWx742v328nY6tkm8kp2ITzQpto4YSEawN2k1RyTFMmQ/ZFBLVQf4kEIG4oJIbjl0IB+gpHPdePR6WFIk+sq/DneQwANI4ARTAGhpF00jtGPmMYG56wxwFoSALggFYMLJQ1mWcTHhyVtHX/6po9ffOVkiMcGBgCuYkThiBrvQyt3D9UwY0f8dR5PeO0QYhDvBLhBQpczCo7YZgaQXOerh+etvuPz/OZSfCmszljlTEzde77fVueP7tn7rw59Yx+1ivvEqoiuY2sy11qisUyILa03Abqnz0PuFIkzMYIaIRBpTtGWStdb8wMFNreRFtvOciTOREThUywOr8fd945cubbNe8P/ze3/+u77jd9vmqR/9wNPf9Fve8w++79+0Tfsjv+s9P/nLH/rqV7/xtY/f/0/+4w+++w1f/o1ffuW7//V/+uXPHK8OL/Lp0Xf/+T98cvTC3/8XP/3kk+968oHNl73x1X/le3/wQy9N5y9fun31zl/6g+/ZXP/0crE4v3fulz95a//w0oOHcv8D28/frEH5rQ/uXbftX/ib//mf/42/8MGP/eylB574T+/99Hf9/jd95Fc/f+7+Cz/y0x/6k7/3d/z6xz94+2T7VW9++/d8/3vff/UOL/YuLZh825I4ldQ8YCiJzPusQET4Xkgx0KO8AfRss37KqzsL6CxOCNhlhzJzEHedDN3DafXQgTMUJX4DnQ0A+k87ws7ouyw7EEKveYmIhc50+mDvM4vdk0Ph8G595Ht6cD6L+mTmcGVm76y0AJF0laFZAxBMu3M2QGEMCnDbSWW7zDmox3N1FkfsHAO76tuDhHeEZHeRXZ3e5yQ7PPKZQca8pZTM7J4vun9+94JpT2hxnNm+Ot2z/8ldmJ9w342H7VCOARC6FmCHO2YI7fj9wXAKkJD3dxEIMzgxc5CrO5jCLSIKL4himqZXPv7IzeevX3nggdtfuP3yxx98YT6+evX0PU8+/ju/7Sv/1F/626/6kjecHG/vbm7vLUZwvnXa3O4+ceVlN49u742HJydHt3Wb13b+4YPTjbkSgzhIS71+rb76wfOz+Qu3rr/iyiO36nRyemsspIEUB6frO+f2F6MvbuixNZw72Gv1xFlOT09H2SvDUKfKsuDQaPOwWJxsJxnG7XbOnIYitdZxIarqamrT3urc0dFRykBZeliq88hLpKjRdApmQtL79h/8wsdfSGaOHtmnIWMmcCIjzzkf3bp78fwlLsNL1156+JHDTT350ifvG5E+W2+Vw3r/w+Mjrzn30rUbrVlKiSla2+6fGwLa5ozQetIu5IOveMM3H73Uzh8++OJLdzZx88r9Bwvdb+7vf+YX7tRrQznN2ZiXgTlEc0EpqT9gRKTBYZoTk1NrFhGUIiX2EK0GCwgbOQQCarMv87LWas3dnf7+330VAAWBSWHNKsDB1MIt3GEGa+EW5ggnMLxHP1R4DTMYCZNAzI2wbVaXi9GJ6bzrb3rp5G2/cusJEINFEUYIJI/CXDxtYIFdAgLBqUswHe0MeNDnoE5EMOeBrBXQFmWgSJeXn3jVfe+9kH9O4vaELJaLbBzYKidK+3lcrWFmv/jhNW7uKSpZoZYQM5q0OXljrdUbuTIHh9ewJDv9k0siIRYhEUkpbdZralnd7n/llWePb6BkYfIIns1UNRUkq+6FLp9frL5w7XP75y+b3z65ofuXLqzqhlM25mrFZI7qum0y8mLYX5Xl3ZMvlLwybpLGQmU78Z2jq9/0jgf/3Ld/81/5Bz/wiy+d7t23F02TIfTw7a99gsr6/R/6aD5//tbR3bc9/M7N6UvPPvP0X/wTX/nw3vk/8vf+TTp84PErD9+4/tLl8yMz9g8vf/qpF1/3qpddm26kmR++7/IHP/yLX/V1X/7en/7hi5efsHkSC4VvSQW0bwMzr2nNSVwtgqQTCc6Yf+bSZyu7O7ijzM+I//eAiIyzqArizk0U4jNWPvmOm/vFkOhdK0BAWEqp3yURwZR6CPI9C3EnIALUd90piIR7GGRH00aEgIjSrrpnJiILjZ5NqjugWzhFhOxY4YGen0zSGbDwHTIUZ1CKIIkIBgjCIKezxCWPe/fT2UAKRIRdlrrem7rkPHSS7b0hjLvzWep3X9n1w73DdftjbxosoJ37xOAktBM77SDBPTYw7S7g3jN1ooMhJNwIjpAuJosIDiJiDyIy34GdIww7LHB4AyERBxJvqo7jnp4cj2Xv7nrKq1iO5eilu+944xueefrZu1XHsQSUjcjAS1fjcI1Y+HyjLM+HbMu8usubzOPgMyiZbCX2XQzTTLSYx6ZT7JeU1Vs7VR5TGnPBxjWa5TSszLdEa1sv40LwHc+LppuRz2c9nX3icVlnHXihbVMkB9OmIWXYjFIyqILCfM3+QHhVb0wuZuSiyarVPOzzXM3nlZy78cztZEaJmnmKMEoJwZkjoZRy98adS5ceGMblF65+9oFHzlXbfOmTl84Py0+f3sDe5tIVfuTRw5OTk7lNzcMIi9Wo2uZqFxdLRNUa0zYQcvHcxXluiYmWQdSWcbA53dKItpmXqS1WPvNivTlOGatF7tc2QzgQPAZ7JbOeohbE7jlh09QMYWALuFLitMiGaKfen8DMQv/2u39LBNw9ldxHpdwl8W1y9945WlBQFxBypeYWFp2ciV0cZZiaGfsoQ52aZ26yatuypGL0sGpqXqpTgA3hDuGytUE9gnOFNGKlVB1GPHvyIGep7s55jiAWJ67mjZcedDBefeDw5xbj+ypuViQJX1mqaT0nLy4kWmWZ7GBpk2/9gx881puoCpiomwJ0wtrYGltFqwRjUw1t4omZhVhERHaw8ogIo73lcG5vf1yVg4cP3v+pXzdZJc4Oi4QsyapJySmlqm32eVXGVOdIyHQIMW+nXpbUtqE0DtncMZA72QSGDStsbeC25Uhpf++Fz139E7/jrZdTXj1w7i//w/deeuVlP7bMJWLd8sH25Gjr+f7VIXjjKR/funPAB3/4O1732ode9o//w/t/4Zmrly8sjtuU/cBwlPSwsS6Tt02bh3GgNp3o6vy5aXt0MOx52zhIGU0yC7KD5kQUkdaWFn3m0gMk+mHNHSUb3eywO5DRJ91O9w64nRkSgAdJcldmJtrZQPpf23GKzO+V833CwN7NIz12dPcnichtZmZ3EETVenXft5/gOIu8BdAzeUhsJ97tlOczjx7x2RwDwXoWkhfhQSnii51HRFB0FeHus8EUThzcr657h/sXub6xS7vuhTYFwnc5rj2DqJRxBx10coQkMjP23YyFQW5nsY7CCCPsvALEEYR+R8KEd1Rq33GbA13F2r9HoSSgaE79VSVVBpiyCyJaeBDEIapgCrD1ewVGAQE5QUDamIeZfWhoanmRGhlZhoG42TimO8dtudgX2qg6pMDTiKatOjONvqljLhv4ggPs5knBhYzV6pKHzXbOKSScx7KG5hi41bbICM3zEFSVK3zQ0EESzRtepHUAUzsYL81WKWL2daHCObVKMGcKUBMe60ySnAKpNG3EkonCmofNHoO7S4Kqksc4jq1WcnJKuWBJB9eeut6mLSdmySnI2KNVyiFDKeNw99rtg4MLw7i8cfPFi1f2K07f/IaLh7z/+Xo65ztXXlFe9drXfuKTH1ntc6DW6mMa6rqfj5YFTIOTGhtTJktMWycMy9GniXWRc6aoq+Uw85HkMcI4ETO3ZtxQjNmDKILEiCqZZGi41WDKZg1ZmioHRuJo7pGNqFF0iCYDiQrcwg0aJubwUK3kOCnm7l3FCwOChDjC2ZKZhnGfCVL0Rl0SrbzRCaVRtmQzn6acV4PPG3tRwjJ8EZZTIuRm4ZUuELl7F6gwJyZWV3cXNjODCpyYEyxSSsxMXOukXGze3Nocn7o/YfpKyS2nOlts+USiDMGETfOszDRjr8ThhXl1GcHEcwpt7pjM1ao5uxZTKpm0zq7Y8hxBpm4Nbl0JyAgaD0b2mRTMusW1118RsLJtS9BpSRGztFqdyQez2RbZDVPSlDjqnY3FAbRuZ5XmWTab1Mezzhix1+rJXWX2NWvRsU3X7zw4Hnzpax/59NPP/8yP/3qzxfq26ezNTygXO9kseFiI3ppO3Z0zpjub/+4P/eb1iye/Mr/0sx96evHQ5dvHSgzzrQ4rSXNYEc66ZG56EkJ749xmznysJ0AOYWIlV4oyt1Mn5DS6cjJnIsduuCuSiMLdWVofPgLoqcUR4b47gv/bMYuDkVI2w72TF904HQFi9vB+ZzATIZgA6ZGBjgD1HLc+vSGmIizEMDOSFEQsfK+l6wvSe7NpAEaNdmYtD44k0msX5uIOg/d8AgtmQbdm74D+3mteC5IgoyACgokpOXlQMCEQwruSn/pGVL/YhQTBEH1FISIWSpKE7ExWSyw71HtKKZphFwLCu/jbHhrjjCDuxlTeBW4R7bzTfaMKpi4gAij6xhtsARA1MxEJcHLp0XVOZ5OxTojKZzHJYI9wEHG0cBGBZix1tjwmIx3TOKsSiS59ZUInvI1tOxi5xUYphLjCLJTCS68uW1uNJDaoziDZ2rygRVQisQFtDl6OC80nVQtPLQ3NZcwRm2YlqYTMpmVFqboShR/L4vx6VhnycqitHVlOA+elDxMweCw53I2G0OBwFSngxsymkBycos5NUpEyzKqFkvsO/hymhSQnTFnUZxY36Gp/YaGcErRJHiMxCZCJOOVh9KBJNUkh5PA8lBUUZjDieUsXVo8m//SYeVMbJTFKwR6mBhqXSeBawc4ULUlyOxg4UGUcGIwKcKY6uA2FgYEKqrZTJ0sRUoOJSLCFIxdOQpyiabTEzDJDAUjilJhSstqsGSEPbSKhyIzgNAgHE5fS343E3AmrS+ymhwDMFF/0JVr42JnZvUnsNn0haJKkhnkccH57n8wG9sNLXkPgEU0jKDML3BDNY0fKrmcYcjMmShxndhsC0DMrlBkULJG12T6dO/RLbuBswa2ZpSpBjyFNgrk5mVmxjS/OyzydH2IB817W8NpZVN2ULKI7FPb29kw3hAmWiCjMWmvNAmBJqZSRLbW0LiXrts7bbfCD6kBsKA5k8OWAzElB1mpiurk9crXKl9RshO8tcwafJhknUbI7d49TGrXR1o4kV0lXTLK108VyaDM3j3qKxdV4uaWX8q0/9uXnTVMapzJfPMWRRZksuC23MVGMEnFncbB3fPKOL3vgu7/vB7/qdSviuzYtvOzVtg2vESGC2ZsH2+LYfZnnuqUZZXWQFjc3J25CvkGkzXaTC4VG2JwGR5uC2N3VIyJmJnc1s5IGpkTUXaeE2AGqQGc1uOxO+U5lIyd3TSkxk9nZSCTCrY/mz0bh3M90T8Ei4gS3XehPYo4IRYhkEamqvdIEoFqLlN4y9sM9zoIHQQMQqhrmKaXMO2T/mNVgLCBmM4R3fY/2+QbHDoUGIHYTwT67R8ADEWFd4lmo3Ju29282IgjSdT/hTiy7psHEEam3HURq1tcYHbWfmOMM5UQAIB3ZKZRwluIbQcyp59F3SmuYGwLMIiBnV0vGnJMjNAweIpJztnB4LgFDGBuAZEHCnjhmhUciZrCFB7MzKTTBOdSCcgnfpkShSM7glG55DFwXGK1g204HXnhIiBOsDIiWZ5obBWPPp1NPOXJRnfPqoOkmpVKxtnp+pM2xtIEWFNlLTj55mzeS9pindUx7xMrzuq7HFWvKSh4ny3R4NGskBxAm1rgypYaWbIY3XbAZXJEqiGziJEaeo6qippTMWXUyiZiqYLBgiLl5BIugbU6WQ25B8+R12oJJEiWgTmsJp0Q6KaWpzW2awylxbI+P5HSeju9MkejWrevlXLlxY9vmW/urCD2B2SgoPOWFPfjoXk6xqd7avF+Wp7ft5HjS2LY06QmMYrnPQ3Fq0BPyKRYrDk+baRKCCLtVOAYhcZCPc2wHSSVLRuFIiUkCJS1dgyk5eD1vtmHjgoXMcMiBMA51eu8//d1mtxyFkhCR6AAPSy4Yap049aRLTylFAB5gSUH9AXUh4WyqAnIW4lQ1UqeYGEAZnKXVnjvNnMx3ZYtZdPYsi+xGumcTADUHBe+WSbuelASzspBRKIcDMJAhgknCIxoh7uXf9Amp9Auj9e591zWbKfkAV/MNixMvtQlFVUSybOHOrakKj8TK5IQR0hCsbiQZ7rVWoU4cg4h4kPWcC8Fm3uScY57NGWDhJIkBZwYzt7rViUGNKADW0GFYiCxGxCZ0kIQ6nc7b4+NKSVYH+8l0OZbEpKok7ISpTq6NJKlWIlkuVtvtPE+b/f1lLhFznlvV8GaNyIR5zEUoTUAW6Y6tqdZmSizBtJCsEsbK0NQolCaLDSg0wBHkQ05uip1mwC2gzWsfp+y4mUFEVTWC1GDOGmE7UU2wdJvYLputmakbSCabYTlzCVdidZhRasrmrZ/RPYKqYwzCHCWZBZxaM1BypGAxxJKShmp4Hoem7kTCgzt4rma2WCz6FHuzOQ0m1bq1BhYEq9Us3GrNOc/zrByZxVRBrAhidBKA9XQVAEgIDu4xlJRAZrYLK1dVM+bEOaEBRBq6k0wGRMQsnIU8hHeTdHcX4kRcrTp2s3IJiMO7ZzCimYH5i+nb7hGROHu/JZL0LiEIRijY5b8wJ0f0i4qJONCg3gdoSiKiaDkLKZkZ7/oFuEUwkXCHRJQkZq0HrvVwGIudPkr6hRBnhnU2ACKZILO5I4R3gk5JBA935T5k69FMkRkm7NCaRMJBZ4GIu4xmsKBfoA1AGDNzn9Rh16tpqLGit/Ld6SoifW/f53WqKpSEqDD3TBuFJJbdWqhv+IkAJHhKecjLmzfuUGfeC5nrnpSqoWhUOEtZFjq3yCVKKjONQKkXL+wdjMv15hhDDOfGc4cYUk59+peEJBZL2d9b7Ml9ZuAe9x5zrVNKiZGZMWSRzudI7OCcy6IsVpJSKiWPQxoWi2ViYU7TNNXZ1a3Wpu4W3lntzJRssVotFDE3vXXn+Pbt22bW2pywMLOp1Vor/Zd//Z4bN4NkKcLzpCKZUji2ROS2m+gRCcDhYE6qKtT3iRQECENBAFkKErOdqKBTdT1EKPcZLDM7cZwRAcWlb5Osh0z6ji6bUupAqy6b600DhDmI2AEH6ZmcoP9FRijDic7QbR4EhxPMd8K+wBmq0JwmhgincPMAYSC22rDITS1HmJMmyV0OKFyi9XFEnwNEwCi6TCGJSGuNu6oaGrBqNckYzhFydtNY72wqaoqF2hTQoGRmJBDJYUXVqRmgGCS8kjWjbLobKbs7sXBOZtZaZZaUyWwWEHPq4g3V2vRMgMHEDGFOlAEewty9/xh3BbeZIxJnEhiC3DKxiDSLudUSGbsouD546G+4awvt1Exh5sQ7USOo9bEgNKyZdpdASmVktr6Z4QDZLu7O3YIEXNIiMbEgwoMTgms0VXXqFH0uKUtPtrXSN+5GsEC3E2dJw7AfhG2dU8kOTjJEyP7eYWXKOYvIclyo6nq9DvLtdqvro9ZaEKZ5nWAssd2uc87sZk1dTVU1WoQnlkyomPrRE13zb9Faa6ZKiRlJZLcJJnKEqoqZendJdBXjTk7nXsMszC3czBxIzF0QqQiPjjeiHmTqLBnRE88BaDgzdzFSoEbsFA1w69YDhxGGPt7sv5cI67nqEHZYM4vgaLQLAxcIBTOYHKTu7kFuoo6ACgUzZ+EzIgUAOKm2MKV+KgpDhBKDPRGJBDe3Zu6wvrICzTlnYe4RUj3K0S2MQoSTRBIkYpG0e/eNIno/BqJIiVMWZhCcOEWnA53phYRiucpmwcxMAnBrzTRyznCSFCJE7AwCRCCI5NlVlQJZkoj0mON+PZyebNw5nDqJoefFR84nx1ObJkSLiIsX9x975L7lcmnmZZFZ4BVZFsKDhYfAu9KagxmckwepKsGZUt+ziMiQhYhyzl1PyRTWNCKCRSM4GB55pHvTEYKYKnZRE+6+Mxif5aY5ALaI/vsltoCqR5iZNqsR0aHB6amnnrt2/bGylCK+2TIviBPBGFyY2ayBqUu7AKQUQa2/1MzkCJKeW4SMbYAdfe8EBEGSmheuwd0/wT2dt+9i+6FPII+gXVw1gqJPBjsFjiDqzsxAkupBodyMPCKgjUOFoBCGUzjtwF8ebhTGkVQ9EbsGItJurRdhicMzDolckjXbqOsg462pSnLfCfV6W1yIxNMEYChjaw27pPZIKUmdmdlAKaXaGnGkxKoAbzrICehawN0ukb1DP8BCTEmwADUjx0hDYWFojJU9s2QsjBKtyNUSMTzUiFIGPLIFZQAhIZJEsru62zDiLHyVQPe2kRLORHNEZCYRcYSIzNspIjQzeXB/gIggyORl9y9077UIZzeAiAkDxQAAXeMY5v3pZF4Q4EHOsORORP1wN1NySnG28+NkRBboOdVGDJG+CgQTMw9IqMpJOCeQUC4OuPtSyBAKyqmooZ8XLAmpBGIvpeZROAPsRrPkocMR87iddL1eD8N9IhQ+T+nCspTtPO9JMGt4G+ctmCjcm3K/INscpllSuC4gItIvdVXtsVAMgk1dH+zuvbw091o1EWqtZxHTuxEKcyo+u5m79/z13sdQwKn67ucJOJl24IekjgiW7sPyewloVYsTukyfwoU7FEmzJDNzNd+5q3ZuAw/ldKYkNrS5UreAySrJjhy5025iZwnGGaSMiMxaEolwSHPj8D6rCmES2aEU+jZOu7IoAhGAZ85ZEvULHyCnPt/zMBERspxld4o5EZF2W1z0HQxESBIzw3SnZO3/fl86MHMy72ses/5mydmUzIMp5xxASjvFlJmFZ170EIKeOiu9iOTsq3LBLOZmrdlq1WtKmn1Ly3HmTYKXnA/2MxHVWhMfxAYkkZmZelNrxaNxiVCzcCeyFpQpwg1l2UqGmTEnQpjaPJ26625V2QFWSUQIIh6ttRT3DBwiqo5gETEnDQ0yYSLe3fpwwKDaDdyROcWYzJo79Cw41xHp6Or5VK/EXLZxrE3sLtSb+2G/c0zHYAJobpUgAIxW94RcqkpE3PUGyd1gCIDdnSDMqZkOQiJkEGaWLD1pm4hYWpwpdvvqrOsOm55KQs7sriTWhYmOucQMgROCg5kTk4CEwvPUCzvuMjx2hhMCwS7ePAwKs0YUFMTBPLtNYIowrZU4W5LN8QvvfMtbxv1bQuq6dEdgAtgNioU7GOIWEQSS1lowMaM1Ex4ioDb2nqa/MACHc3+8+sDaHeFDU3DKxFbNKLJjAsPuxpFtRQQxaDgZFS8hU6NAxJAEYOu2RCKGNz91Qykj4KZzyswcIPWmPbKyd6ycEnMPLzJ3gMXhZjGmUueUpIwdANrLeY8w7/jCFr1i5gCQckTrM7UeeUFEiSUiBLmfU1202qfPqeMBBEwswpHIe1iyBIC0O9lGD4swkdzL/K5JCagkJ+lfCBxs5mZh0QPckd0TwkMNBkjUgQDKWcxSSji72ybUlNJ602fubAoXbvPMUlRrIrGqgciSB2ehVKOy9KWRl2QMTyzmCqJOWGIgDcBiZw4JznCPMITf0/wsgeqTmMmu5O3yTSIi90Zmeac3i+jAtegCTnAX9bsl8y7mIQqY885yuxuPRMQCZ5rRbjIhB4eFm0bpy4xe2ZzlIKIyhCARpgyXQUUoIpACwQjJwU4AKXX1vwZBIqKbdMRdhJihZ9IJIHb7leBuvU3MYZ4QXV1mpvDgITNzZ0gIBRGlMAAJqV8kLGclahATrQoI0rcRzNw3F0AMEGa2M7HTWdREJBKnXfJEP+5Bvb8xgIME4K5q7UU/9cD33mZi94ISUQ+umlpVtdqMOU3b1lrLMzbZQ60MfPn8+cuXlvt7BZE0lSIRYTlngJvVxOQ6pzT0liViN+pgAeAtUAYxa0RCKAj2qEQRPgUJkXQtVH+hvKn07wXhoczMBDMIZ8kmtkvgClivjzUUTOOiEJE213BmUiYzGmLcnTygVPXccoS5NcYgKZQiQ6MwNCIiRR+2CCXhbGYU2WAABcLA4UHM7tCa3U0AN8AYRA64x0ZXzKzeDE5ErWn/Lw0qvX2ICPcWQYAyN/ISPXpcnShA7uSqyliwEMzNnCkxs7O5K6eEMD673okoCYsQywYQ8iAqAIh25EKAuKyTEZgTFpSkgZd1+oav+UAqL5JUtAMAoEpE7ghwL7VSZneXlMybE7LncEppcEOgSyf7r4cJQiQAgXfjIPegYqpgLCAGX3GEou3vn/8v//npY12lNCSbcsDYlCRQyQ5Abt5XfOIR0dqOvaJNed2f1GhsSkTSon91EuJmjZn7O5GYm2kEekibDmObaxJZH0sIqyCi40/YHNU8I/UxVESUJBTmBBJQLCKCgvp72F94Zq7amBNTAri/dSklEdpwTixkzog8Fo+Y4TIMTKUjxjiloJTz2N/P4Lj3hg/D0GeLpaStNSEeU5adrN6kZBbx0OVyOU2bnFOf27XWhmGQcCHuvAQAfW42t+qtmrOkYuZhNm/nnLNZ9Ilt1RY9X1drm+aIYE7qZ5dfRBjI+jtjPXWW4eG208s6MtEAmBmdlU5EpKpcBE3hhIieqR0ccM007gJQ4E7uaDuJekoB6xtn8iAKhCOCic2sr277FeGBIKfdodDjNQJM3YFA2TyCWayjwsjNmDnBtFeIvbmLKN7cHanLSTl5d8pwoAV458wKBtxid9+AQE0jCZGzhvevzyHMZFPvoZ2oq4M8gkAu0rc1CfCUuT8nHDnmykwuRELMJNyfK5zsLrrfIEZy7YaEfn1qeFdvcYAZXosIgZQ53BsREzLR2GwDgOWLYtxuuiAKVW3NAEQkN8xzaKXNFNduns5tXoy82dyc5oMrD1wWFva55Q6FdkEw5xjK3DiNQeTMEIg7iIIsAi1jZJcIQ0Sn8zOBGaXsuXsvyxEGmLAaWxDnLCIUwRHUERTDkMLDLIgKcQCZOEVQq+ox5+6jN+8wrKCsqq6DmfU7LN2dz6/rXGuKlIkIs3PSydGTKnujdzacgWrlGoboplUChXMQ3D0oOusjArDdzzGIw7sjtkuMIeQkICKjmXpz5RCBpGJmTWtOpgHTEBYPQSQODDJMtu2Ij3B3OCi5h4W3be6BSl22EBGACQVhr4/mewfaJWLNlGNUWAHSuNS1B0Fl9eiSlod3jo6Po3miuavlBAPAwbU2M7MBg5mhRkSUUvoBut72VwVmVkohp9nXzCnO0OfEIDAFY+MiFJ40NszFjSy2Ig//l/e3l/wtvDcu25RjgXHiRIhVeO0GTN9BtBERizFR2yCYCERShgGAqi7GJaj1TNQ+MuuHOwBi984pEQZ8sFnrTIxlJGeyBAApmEicxYiLryOCOBFQPTGROZiZ07Trjm0n+u7DtJQH0AzM6KEawdQ4GheG5WwOStLCyAMuwmljtUtomjpQ1SmlBKKCVYgoEwlIxtpfxrKq263khDQyZEgDIogKO83WQIeltJTY3ZaLRb8YsutyOZrWeZ6Wy2XO+ejkxAlpHJr63JSZrenR0Z0LFy5NdWZHM53mxszhPm+nzeaUk+hmM3B/Qq3WqtUAZGJFJdOw5q4wJ3NTrbUyJQ9TrdzLaoCI5jZzMzfuaajuGhGMCGPzuxwgEieYo7lFQECBDdz6AtBD71kKZjIzI3BiFoe7E8OBZHk2d3eNjqzpe9IwtoiQzKo1XMmpXw3SJa33CDlE5q6qObo6TnbZuSLmyiDrB6qngAEOsh1qMDPvdD5nluIIAZWBoy8HAPn/lfWnQbdmWVoY9jxr7f2e8313yLEya+6q7lJ3VTfdzYxpbCaDEAgJEMY2GAUOS9gOm5AngQmwQ8ZgZAthIUcQKKxwhIxlRdhS2GGQbIQaBIZGNAESZuruqp6qqisrK7NyuDfv/c45795rPf6x9vkyLd8fGVV5b97vDPtde61nPUMZDy0pxADMvJFoE4VZAVbkIvL+9cBp5lhP7r0SuBasZs2crJzxyoFxhQDjthtIw9b6/VNPmpLeCCAySbbWIM2RE7P3XvBDJQRQFjl3jHOc97lvsZ3P8eTpPDyww/Hxjfd5GuzmuBjgSaRZc5xuaSmbzlnQgZmZ3wzA1AS5Ga0AaM4ZDRS9vC9YfnilXWtReBqIrE48p2nr/mDkIKuRQI3UuU9rL2inpEaAqakUM/DsHLNobLBmoUkJF+RGZeIyIyJyybjTIlLpZu0SGdHUsqSqNcHFiBRLhCEFjAow1gFjo6dlRgoySsyMRhNC5YsUV2WH55gaqfQuVdKxj1oDKltrcyDNWD0UbEbW+RF3pAwQbClLpFSG7urspgiauWRyKk774eaxxWWfcdiab/HBfvrsp0+HW873HO7JaVbMskH0DZgRxedbD86QJmkPxpiZq41tFs22MS7ujwq/QjmuZLmX0Du8Z84DZAYGordtXI4/9uU2Hr1yeXMcxk3DDQ42AjHZamdkmBmj6IfUcfOnox8PDzLschnuMocm3Odxu7lcRkTBdu2+3/H2oXuXBG8it+aWBZWzxPSUojXrB8vZA9MMpKiLu2eCbkMfmFkF/VzRGEnsPEkqSKG1Zt4L7zsU+QpyA51ZPipWtBF3tmLUeG/Ne8r9uNXHa+ChHQAgeTgcmotu/dDaofXeTeVFxO14czxux+OxpFKPHz16+PBhRLz77W9+6hOfvDl0ZEDvZcrbdj6f1W7r4zHDvu9QvPmNNyT13i9jP+87TZw5xyVi0i3jMufMucLtIiJirrVNphSaA0wDI2LuQ9ipqKPIqxPZnPuK5ExNzYhIBKHMKQwTDA7jEKZQa/+mXQqTgUplkeppwiCh4l6GhIxUBjT0nBQbqx3NWlpH0g5JTAx5JPbWgOLqcwIONKXp+lLbZpZNWHZl1dMbugGGaRUQAQMXdACgQYg00u7t4WptZn2JDGi1fTPBzGIdgg2mLNbLWiY7rmrn+sNEAFAWcK9MZKZkJLpRDZdR0YBOeo5QoLc22iEzIW7tYGbCjBhAMg8VMODezCwDpX+QHbAe7W0hzECmv3s+/Owb890nd73xYcfrHzt8RtvNbdNpdPO+kRTcurnBO9rOMxvMpxkbGBMJWds6m9lFkmm0Dje6e0zDPPfeZGqGhXW2hu5HP1SqIk3N/LC1OUHGzQFjDNLHyDkyCfc2RlgTya31VoksictIpT0PzIEp1n5iqj8I3mnfSc829uGGFkrSZCiCgEwzLpPZ41jSC0SaW02lRo+IAm81RcBEAZtxag5U4mzLohMYFVnSmIg070pc9n0qzfucmdIY05wj1Xuf4zL3gbBkVk+kHJklSXfbIGVkKk3mhcAAEC0y3ZoiM+daDNDoz88jPUZsvhlD57s9H76w42Rxtx8fGjMNPbW7nSx0irvb29sXXn7hvSfvjXEisjJmQiDxwssPIvT8+d0UMgiDZhV2o7N6GwUyoZ77KIMT2yMA3Xp/99tPnpw++erryMuzrR+A57LZddsfXFKmSDRrsBwLdd91uWmb5t7Awy2lMeeEN1NKJzK8FWCCzFx8uJzVIkGEfO4iOYGL71v2lq5k1EO8j/3ZHfxmzoAlhTGym8205Yx1bcSQqjgNSfKgDOopALv8TBswcLcMFJVozuthm2SbOdm5GbN3JQLyMU0WNX7NkQBprTq27lupSXs/7DEjM8h+2MZlBT+tO8x6Zm7bdjdHb35sPs+nIsyYtZh69dWD6O+9/wHcxvlyPN5exogQqVC2zR1okGt645hzFIdN2cxbNzGr705abaDLSc1AAyLCj4R0aJ1KZcIUESCvyT4UkAjUiplJdKaQO66fpEGdRvSZQ0YgwXQwMyB5EmVwQyaSprAZSuaGNRpLWq2d0VPPWj9MZXdlOhJEj/ROrx68DklMAtbajZsAmzlI1ArZ3c2aVeOEubTo5lCTGAwF7ZqFAPrWezPX2IvsyyKQKGlyz5Ma0hBqRorsDWrmtSxZNgwr+ARJLX800si2+nqJpLeAH4C1R+u+KdIUbbuNGM40H0VjE91sC10qwoEMszLS9Mw8crK1MeVbGbi3ckl6+kF+6+fat55uQdzYJG5eefHh/gHQDrdtu5zGdmCaPpjzoTeJsXkaJXf3g22ZmZrcOG0UPEvkFmrdbuxoNPQzegEycu9mDfS4xulZM0O6582tRRI5Dgdvvbn3OXPsGZB5b8NgYeSDm80Z5mi+PT/NVOfd8zHmDJLe2JvGnUmERSTy0IkRsyCOzDTjHCPADFgRN+fo7kGdyg9z5IZMJGBMmptMoSnxNObMcDakgEFBUO0ckYN0OWbshBNyIed5wmWMlskdyhkLkQjIDAamAgZYUXOQF5BGEQSouehHXRgl4y5WgyiyZ6YR3cR29DjNnf748fn9r7/22n/1+PjzL7z+7Yj94eO+3z3XNKci71598fVLvvFH/8j/47f+M//s9//8z77/9s/dtJ4OjA8evfTKf/Af/vUXP/HqD3zxxSMQTpeef/Ds8PiQfvDc9yff3n0L56aLRGqP412e+8Euz/X09tVPfvkf/GSEj3ESjiOSqYkDbHaJuQn7nJmI+12NaJ4emmkxZxW1LWP4xhE963MBI1OyCgLHKhx5nTxEMyBvchPLmBPXpq8HG5HWNyPnnG0zc7NIs3bTbN+HtXJJFADK5qipEsZ9zt3MBNtnNG5+21S8KOCwtbKPQYNvlqHMc28ekQ+OD/Zxvn0wYHKzMTJCM2ZbcexrmjWG2eUgzywfomfZ09jLtVEKKQADrHujPPYEsM9T5GgyML/5Zu3fEEMmjPMTFxxAGxs8JxJ+kRCwUZz0c2stc46BmF3CZe6kPGiW3pDSouk2mnGeSbe7udMMzIjhbhEx6mI0U6QbzDjnbPT0PYOgitGubELSLopjMTrGGO7euu17SkoVdqGKwywWDWGAjbn33qWoZ7omxczOKweteJnClEazLqG1bc7Z+yH2S+Zwz9qnR3n/qBPdwcysMFxj85LIpoAElKxMm+FeHm3E892M7nQraSXNEHO6O1J0GExiZFo7eNBkJpBqHbQgfUxAOBy30Al1VkEXITSwWPNPJ7bJDebmabkngObkDhk9YQggK4VArgFunjQSpkhMwQBnuxMtaU7P4QaKsSOJPXKHjgdEnmnWDpZx3tVvm13G+bDZnkCmmd1p3/vc9FDKGWfTjLZ0oHEJd6cBuGzdYV3ErrM5WvrcZz90VhZBN1N0M/dGxGHD8dAV4Q0P+q1wcLZp2bbDnPPBrY+cAC57HLdWI3Dvx5GjeXvoh/MpHt8+OF/20yUys111fZSWB9OixN4LxIXWuiQtB1badkjJZFvmTKXbKVEPY63RmUowQgi53UDL/qmMOoCA0qzXU5ll/ihDxjquQET03mktM2mMSJdlpJpHTL/qsidTV7uSzGReT/aMkkjoatV0rXKy3i+XvR2dlBGn0+i9/8pf+8v/07//N37sH377C1/4/re+9ebHX/vY9/+8Lzx98vbDBy8/eKX9qX/xz/7wf/TOb/+dvwHoB37Wb/qTp3ff891f+rf+7T/1v/hf/+1/4b/5+3/rr/mv/Y2/9zf+lT/2b37HJ3/pH/vX/8Bf+2v/7p/8o//33/Jb/lv/9d/zC955+6cf2Cvn+XzbNoW140U9W85UPnj88Z/+uf/z5I+LNueeGUfvzBz7bF0Tz6AGmFHuyUbJM2zPUe+eknsfY27tkKmIKZUDUK1DsOy9tNaBCQKk4fqpTDc380BkhgLe2JorFHOH93p0gTR6jLqAW+yZUFUop0ukLCbgBDljmnnzRnLf9zVo15VyxU7P5+wtQYwkxJHPA/vc6X4IKIKRzLSRzJxJ0NOVBokGhYxOIWbSDGVEVjyTWiqKdjb3eRnunqHmXuSu3kkzITBnAubN2BSZgURIIgMIWVkmMQhqBwFTzAGglwrVDnPOnGqteS/mqOjmDBHCDlyXgmaSjmRSFLJuXqN5gxkEs3IGyKvzr9HajB2sY59CzMlaBkqljKVyLs4MSTNp0EYqSLYOM9aEUTQkKZb/jqmcgJv3MUZkCqCjHbY5J7JKHbyGjLUTNjDdCnXJa1lY64TGyiFu7n4lwJmZVc6nlDBUdKXWHq7IRkWgqI4tJEMLRXMcK1VV0DwP0o+TCQhII4CxfCZEZJAnpFcoLwFXGObdM6f14q0Y3bmHTmOqEwovc7migMF9pSTDDOFZ2BQbWutjj5i87Bgzbm6206670zxsN6dzAzK07rjWvdHmzHM864cmJIAZw2npEDJH3Vs+JgQcYIT1m55x9tbuP61a3bsT2uhJd7dGSxMk9q0j0zeBO5Cp9FatmDea92Zm7gY3iHPOzEq6KeaotYr6rsqdlboQKDx+rQTrKGX9Mc4I7y0yXECKM7NxR1qmhFK5ACDogKyVi1EyoNJbm5Jzap0SlFKmRld3WCqYUIDNqs0vd8JkmtBFpXFrAzKiCbutte8C+AjAoFwrzYKe3VAwELTvebjZYFSaGubgx1556dXXHvzYj3/w8U9+7OaxHgzevtDfu/vGi689ePrek//pP/8n33j7Wz/4g7/wOz6DD87P1HT64Nuf+MwX/+7f+o//zL/xf/3069/9hS+9vj168q/+b/63//L/7n/8p//4v/Yjf/2X/h//nX//3/iz/+0/8N//k//kb/k/9QcP4u69o6dLM8j9rEiktPPI/MbXvnK4faFEK77YXbaxE0UWhBlyVuyEiF2ZSetmc0ZrPWcYfF/9WtSWFdfqXc825fdfpSQzz5yVl1v0ARLuhFUo8FAWJBCUmpNUzEHY2GfvPRO4ii3vd8WRo7WDWSvuCIl9H/eASTWZhUJHBP0mgJzVayPSvW203C+q7fHVj4YBIikN3rvTKU0VW5SApVRKE0LlFpDCSovlYocUW3cONfc5hoDWG2RzzKHh7jlFAsZrTDwkVjzwZURbjtbl65KKTMxEsAiJiczgYlFfkCbNumJSqWBEbG62jCajFpDlJh8jzP3K8Mtln1ayTFg985IisrWumYmRma01XDHuzAnmHKkiGgAkI1UZ9+Tk2sYGBKzPVXtgzlmkhjGuXQ8xp0paWxhGhU+Wj/EVVgqAdrUKGmOYWSCKmMTlb3xdeEqA2fJnFkllgiUqrKwsS0GUBuWZ2BclnJz7pW+apedgAbGVwQtKLXI4E+ZEK46uIpxbXfYkaZG5jxI6bTMTZYG3ItwRUmWj1uE0mDsr6zf3eZkDbr71qc7ezFqiSZw5zCwyEwKSadY5p5iqxbgk5DKoAyw5wGxerzokzjkjmtNbq8sg67+qA5Ypb04yoc1Jpnl239rNzX6ZY0Rrts+ZI2npfqBG90aCZpSmmBmLUSKzRomte4u11yQJBIhsNMFSSVoqjU0MK32p2WryARjZ6GQzdN8iUVTeOnuGqs4Bo62aYVnUIDThQwfqXCk+kHLmBF2JOZQrmSFAdGkaLlTS+sSWCMN0GdYD7GaKjAxfGqQ1lJbO8b6JL32s5og5+sHnOb/zez6jvHv/7Sen09357pIxPvcLfoHx/M2vfvt3/c7f/32/4Esf/8Sn5+Xh7c2D5+8961tvt499vvUv/MH/1QuvfuHytH/646+/9cbPjA/44MEv+NaT/ZvvfOUhPvPKg5//c2+dseWmyHgYfY+QnEIjHd3meLbH/Prbd317LWedD85MY4AY5blpWhVQBMwJeja2mGPbtrHvzbeI4nZpRRotIBz3yyhbCRsJmq59fR3rtX03M4PRSswCofIiaObeI4Yop0lTSNp0d9poLYwEdGi5h4SZ0tBoMDc315zZ2r3DF7w1iomEXYAOi60xM8cYCBoGeQsTMJeDF5xCyg5tkXy5MqEjU24MhiFAIFNIwQSBmnH9w8VXizC3kSGEOaQYM4tKT7PICe7eGt1KF23skuWitWaJIeu0l7tM5F5wpnJeiRwcYy+CtpkRUkJR2WWa68cZkMCyGo3l1g0yibKvKRlQmrV6HNYugYZURHhbPjdRwLoBLMJJqfyqSnOVGMIdpcCvBC1cY6SmJk01Xa23vJgzTvIqfmBmarmTTVaTbwTWRUFVFrFsyfqut3jFSa/rhythMbT8jVEB3bA6kDARlGdOzaz9qtY6lzsnAKoCHrMOxVrjh2Sci2QjE0tTB2Snb90QqkoHg02oPja3ev2ETGoNEYVURXMeN98cBikuMZ4h2bopBxhU7ONZ7+VdQdJTMyPQDnvsG/uI5X1iMEA5g4jDsQGim7TcSZk552xticUyBffGZT3EnrQoqqi5N7OtNfNUVm8hmJMpJZJ0J+cK0s4sh2BpVd8CWDKzMcWEMoFicchAd074tQiskYsSLZu1nMONk0opzDTjVp68lyxHRR5QARHRlUl2UIUYcn1pNzl3xS4ZneaWidiDcIHuXSJgm3takJwHc8Ei0zgQ6KYxt7RwAjSa0aIS+5DLb+8jv67FnZv38/7cvZeL7DjdffHznzDtv+pX/BesY8Tc+uPnT975xCdf/at/9c//47/xc3/wj/2+3/Lr/ge/4pf/4PH28QfvfWPcxXd96TP/jd/zz7/06GM/8AOv/IW/9N6rn3rQbvK3/e7v+Vf/pd/33psP/unf8Eu+8RN/8Q/8gf/hpz/9hY895jvfvEt0yA0XsGX0iGCLm8N2Pp1+7o1n9O5miEjJmhuwj6A35HkOB0qCkeBIOHAol57qm+ac27btc/TeYkbxLlVIxfWRw31B55qFgWIqLTZCxIyAWZgZCSworJn5iCEtL6DWWsSsMlEPsNFJF/ZSXEro7AAgczNZ3I8LKAI4Wb2PtTOQvT8aezjPqeTsYsm2lCEzVtoHNIzbtbks0UAQVEKmWsCBlOx6UpVykhnp3mrNUB2CKpWOTRE1tSQhoZmtvagV4TyrhW88s3EJ0KrDsti2xpGkQZDCil+snDOIQ+XXZVZlCdJoylikkWqIJbkRUsnoMoNuhEnz2v/uQpgh7/2LEmCQrT4TC37YC0mtQ1dhurtbtcNS5tWw/vrJLFiy7GEiS2aKspvMnIuqeOUjVP2BHMar72d+1F5/OX2scRMAtaL7tHJd6ktBSJlyLpPjtSgQAgH55iGZoqOlsJuyty0ns+XK36wG/vpz5aLMBQHTRMgBSw2QNKbyMqzaEWrhJDlRnznAhU8oa3knA7iPkITuzXg4HLbD8/1ubmbueWjL31DpKQJoaY6NAUxaupwzZL7upASR093nbG4cl2zOqry9WQbDk4nmtrbfZpkBuENzjuxg65EwKVNjR+vn1huofe7eyqiVmcN9ibNgHPsUjEZpTwLmERGJtgjvmdD6DDOT3jOnUIt6yKeoxVfKUJRCNQNMZCn3Srx7RQ8BXrenJVNe94mupGvMvFDpK1FBxZCGydlmBgkpG9k3nzNFtJDcRpODfaKRFypMprKlpRAyYOmJKh9zPfOFO5NG2ozR+oFA8yN9y3jy3d/52jhfLnvbrJsd9t2ON8e33n731/363/i9P/A9f+Nv/blvvLd/9xe/a9vee3J+/oM/+P1/5t/8P/yVH/3Z//Av/9t/6Pf//k98/Ls+9trjv/+ffuUTr/ySz//uB0//7J9/6+vj1/+Gf/bvfeXLX/7xr+bUB3s7vBS6fHDshwiTCO6XcXn04MV333v/vVPevHjIsSsjwKnZXAJm8mA3iXslMZdbGsLQ7xcJh2Pf90vvPTWr89LqEJfoF4AQC6Sp55W6Po1JtmrZrttIVEsL+tLmlE7KRJjSYkbR5zPKSM4iMVOtuUSryNssjglbQ/HuqzCMMQC01ppo6oo5L/scYdyANPM97ghXtsxG+ETSJi1izIK6lQlFxUHOWI61RlxLbYljG6ZVixuBDE8vSn4GFYHmbN4kjZiIyjk67Hu0bgYrJ/fM2LaNmkaLhWVVq7C0uJnT4ISBdS9aay2S6xa53qwLWeYVxcJ1EV2Ph7M2VhZGUsuhiZATa+9ap1aCcZtzrwpNpy9XTc45bQm8BSAmwax0lMUzLGcuEotCDcYMKJMfDgdw0TIGoEA2K3ZsFcvVG11R8qUvB6DrPINrLNcaIMhErCRDkkRmZMobJEvl+m+0Iq6WohujwdbUQGbOFsueBKiEFxcJ0RRNamQ6RIk5Q6707ZA5R0S/uuHXmndKqIuI8IQlHJlEhjBklPU2YJfUHLNB4xQ3h9uY++Vy6RsoGzt1MOYg6elQkpRj3yfJGLPwyZpegTQk2TMHeyuxWDAV6UYJcwbQKhEyprzdx0Ye5j6Vzf2g1MwYy+kBxjIXoZIV+SJpMopTZPAIJe6jKBnF253ZJKFMKJSkyxLwhKisaAKj1zJFTEBlfZRIwi2TMjmmrFlVDiaZCgOUzFTYbvBUujyTtZgpfNdpB2+Zuc+9nCIyE4xkSmneuMSrgcwR1gSvvxVKMGGenBUer6s/hmUWc+Z6UD7SvzhpGSEw90vrx0g7HtsXvvO1MZ54O8yk8Ly3beYD58u3D47vv79/9aeeH3H85Oc+9/SD0+c+9om/9Xf+9v/yT/w7//If+Z993+ftH/1n3/rN//hvaH7z+MX9T/+p//3Tyyt/+l//Q99++o/+lf/5v/fxT3/mD//R33cZ73781Vfunmm7fXA+32U5iplHboe2/fhPvnG6bC/cbuend94o+ZyzmW1m57AMmld5QUbxBSaZMcMMkdm2LVMgY04zmPl90S9lH0CYQL/HYkrZIRLmyJBGjchmbldxNhDN+2UPMFeWbYqmVHgrzxmsLDdlJLtt5Ua2dreQ0eXKGIWU0yyFcn6XJOwxm5lFnuVMGRVTF2MDaI01r2dMZZAeKRhSacjlqyElVPCGrdGcNYODyih9qQ1lc0vpsB3mnCPgjbGolmlmamBWVO/MrOTS+q1JIrMXhEijWWFEEWMCyET54UTM1LRs7j2KxnNlIuBqEw/F6i4qySOWXXB9OJBllnW7gVMyyM1bhW0bGWkZ5WlsZj6viYNglnFmTSpkJxAZENk7IqNGnBL35Qq/5cqt9cwMJY1zH2iQ1H3DOgGrFQMEsVa1gC3gln7fRH84Exefsc5dARGusvks96taykYVfCivwL2bRRk9OIaKsNMleYPy/iIRtZAqkErPCkIsXLf6Fvo4n5JolVGFMtFPlGksuKAkkSixsEgHg0jFkDJphO3B56dLpB+PR2QeD6AJ5oLRg0YYAtGNpGWOrXelL4Zlra+4GspC7bwZWNKHGmxyoKbJxZJNSzNEjAxX8nIevdiaBo70CaHZ1GVPeivueCYbbcfsjlIkZebMebns+74HfEbOqRyzjVBZfxQ/EUalprLXx0oSvD+JgAKyGhUlD0Zqmp2VtzmlMgkiyQQMRjrgEiqKwUzOhBeD01cGdkABunfz9EiEw2DsTmQqwgiYdTYhE8wMkiW6vcL5qn9JK6A53Hto4hqnmdeFAoFkjpmH3mPqMs6vvvrSy688ePfbb5pn5wsyN1wydzPf59Pbm5e+/JWvmd986hMvInLYu3/4f/QHf/MP/abf+3v/O//P/9efCegzX/pSXE4bbv+lP/GHb1949M7PPfnqT9tv+j2/7vOf/OTP/sSb/+DLb378Oz7+qNvlg9MXvvfTbTvNeK7dhd5ae+vtb+7TLzF1tT7t/eCcYx9QnwxLsyQ4jWlGhGKodY7YF7m4uB2pQ+sjI0tWrgWAwkhB9JVxqgLWZ9E73KM4LKrjf7W8MjJjGOFuktzQzMcYMB22m/PlTuseEbT2P8gEoxGzbq+uGSLZu5Wvk2bWng0Jx6MRZzdGbmYtNRMyHap9MxutpZnlnkhzO2CxW8TKdEJkUPJ2gELIQj96hoQEYeYkA2r0YhPOGQrBtjmmE2WMNecsG7WkXG7mOQWYuysjJ5ITSGsmIWM3Q2+icmQHdFUAeUHxMaKMtIoPv9xQ6O4ec1Q9Kbwjy+ISpEBzElBj1QpSCWoSnHOnFyAWi+RRr01aessA4XNMazUxyGlAFFdxxmAJztevNVUULF93c61n6SYiMhWXmjOslqZZLg4r2rxg4dVW1zRzv/hev1BZh+Xr1G1tYqUKFL9GMzNlpuVO7+4+8+x2NHjk9JbeMtLcj9n2Agq8PmsgV35KD+YQELNsbdCdrZNrM5WZLA9a48w8ssEwYVCAFJlEQiPnzeYH94iYEQcz5zbHEPXs+fuH7dF2sJtbkuFusHpAHWYxB+AdWTpYr1HEUsiyPZEgwW2LGFZa3NZ77ykR3ay8x6M1q+/DjJGjQa13YZ+xHw9+PBycYUzD6P1m5rDW59Rmjkggt8OhdxeSibZtGgmADocn2GYOsrE7WXtmWdvO4+zGloK5syFiREBeI5UCtBlzWjrAdMmyZT4k59hgaiWrwwqzj9wz6e4ihvIa3maePOdIs5mpzGU8NO/7QhM8UznSG0OcU+S+ClC15AZPREwHEubeJc19b61JDNEyKuNJpGpiwACaQHDm2KyNywf757/w2eMR0/Ngj9P2zEuS5hgJ+MN9Xr7809/4+KuvfOoFf9Tt//YX//pPvnvz0nduv+bX/HMR3/zSz/u+v/Dn/nbe8Z/7vV984+tf27re/PYb77/zxrY9fnTziHj22uv7G9/4ey++9Hh/Nr//wadP50jd+CGxT/XxzW+9e/TX87ynvFrwVFzMbNsY4TGP2+0+qobXObett7PO7m2ONGvubpawPMWFOHhRJFlsM/my4dglEE2IqJ2HbTlzeLm00ppnTtkcu4yH0GzNhB1yt5byyz5b65ecnKP5ZuTYA/fkVk1Zg7ZLodXM2El6zjJ2wnnsrTmYm7d9P9NFt5llHhF0Gi1zcsq9ZyDCohBABvJCsjsy5eiCG5u4S4G5yZRuU1DIwI3G5RAtkyAdW4OYkZLaskDjQLrgGz2RuUPRtyPE4OzNIi4VWYRs7j73ac3LsjgKtFnYbV4yu4NwM8YU1dwUEWYbGakLaZlH2iYMOoDGSi7gFBo5F7nQEgurJRRtO6Z2ukHb2AXuNBk7GDP2NUawTGP21kFYaIBIDjaUr07vPXMiFq5RHVpxMzK1Artt7ZCV7O0g7HMG4XOgtRY52JMMqqWgZW6KzCBAMJNAOgunQwjplLCVKX1EeSJTJFoGEO5OJzJnku4NsBiJ3iOHM1xoMgx09wLVhnCzOcaestE6Kc+RbhQaKCXN6E2hiAE2Qg0I1Rs0Zirm3lFMKuTaMGeMJDbrMfF8Tb0WEYOXJE/7wVruOt04DnzYdDiPMLPD1cOZbhEBeGs9pgVjs3Ye4d5gbezpbjlza8gkJ7z2+Znb0Xebj3XDmNZz69sMTDkLO/Uz0DffmjfkSO03h5ssZ/9kbzeSunksv2NryM4meiTMfMbw3mKcXMqpy2U/n/bmEeiYmKBiv/RQOkRkRCxjr7r56ywuIpdKpHhd7BgYlli6vcJdE0AaFFGYoBFLu52Z11BNwmRMKRfAyFBAStoKMg3MnJlorSmvNJiFCgDwRIYyl9WSMalZc+XWN0oxIo2buUmZiDklt0SYcT/HZz/7EjApeD/HpPPWEDGn9xZzXC7vvfPme5/51K/3Fx49i689f/9nf/mXcBh/69j55PA0xqtvfOuHX/3Y9zrt2B7kaMz+ye/9vsc3j+72D37yp9/9js989nf9ul/69K03uz16+uwkS7JpZmZ2b29841ukLYoFSdOcpSxtzb3smyW11lIzIhR5kawXRbqM92rahnsvnLFW2coEhIK50lrb3H2MXENPbeoDvUEZMbO1ZjLvyIzIq9GClQlEK4GMQ2OMZj4yt3aYGU5GTPM2RqQqDRERSqW70wqgVzc3rOg450YhY5rYrAHICSCb+UACcGaU+lxweDETyPSWGUOyBMzRm8YK8KpADVm1rJU+cN3tL08vkmYRoy1VvUkae/TWyG7oY08zc3rEJOUNc0x4wLtx0bZBQyAD5o02l1djNqRgIC/GjfTVnavYRg2qxUNPBDANpdWj7OQ1bQgFd4CijETEWIxiJokVzc21bpU0KmAAuF+rGBsLGEPWgSpb0I+o+ovfeZXCXlMJVzOPmLN4eNduHFewC5khaI3w9dYokLZ1HyNVE4lZjuUZH4pkbq1fJwWNsbt7q/xxLhh9ZgBBeFvpLyZpR84KtyQOI3tn7GFxs890N2EKjZEk3Rxtq3l0zBkRbDQzpsqYdt2CWNZj0gpeqXmBhBTKFTZH1ueDMUboLjOEpuaEkwIjIxKtKE/1zEoqqw+aImpHqMisYRjAaSmZrQlM5WXS0dmi7b17Svsc27aRgZkO65tlOaMB1gjGmOfD4YCEO5EohVrvV2EmSmbYM+P585Pg27b5xTlgQnfPQ2vuBq4Upshh7lEhL4ZriSgWAIqokJllvIsFfa4T09Tqk0TJF6IUyDQSCkig09ajhULkhVA9ycu5oDY1gK2VyIL2rDXDlchZbLP74beCOZSsZ6PwvUyKMGsRUkx3wpiBTLQDx2TSGk0zv/MLj/dxyrENHM0TCWsHtnHZ53Z48M7bb7/5tWe/6Dd/4vah/8xX3v0n/onf9Dv+K7/7/WfPP/ddL/6L/5M//sP//rf/3T/3J1558YX33n6rWTu07Wtf/erNi597yv3m8fzrP/Kjr/7T3/nGG3n54LE0H73QRKe7Ypav9LfefLIdP26OiEUJ4KKvhJm17RCzltbMGSCseZEeJXmziLxn1/mie7LsT219TTKw2C+ZSTolo0kg4b4Bs8gzkM0Kj4+L9SPl5dwEIHNWhT1sPSJiqhJ/qtmRIqOBhpV1VTx6ZKB3ssRsq4NDTCBM7dovlLmQpiQnzC+0TWnMWvNmagJjYnb0Zkdj0hTjDGEMJN2u21pAxZrJjMwPM10/+stMc5YT5ATQ7iVOklBh0xZpQFQgSYBj7I1WaB8UFLZm+9wByByZ4IQEydRHTsumFDlpgjyL5nS1tFtGj6iZZ5XWLKNaK/CTEHWdW8C59tur/s7W2jICrN6mvlawKISVUEGZUCjr6p/uyztLwHDld18XUctrT9nLIHhJ5Ojrq4+slQO0EPNa3VcWGxarb3msA6h6KGLsw8wOh8OcMyK8bKGwiKVFcPKGiCgiqJVZjBkFpbo7XAGZcDxuQ8O8KYURsNUBkCvc0d1pjqXjJugRmTH6atUXvLzWcmDkZPVTZgkqSw1Qn/YsFH/tFzwkIr2IVJJ3b0DLDIfg5b6l+8SLuqVI63uGMk2jQQBTfeahcbfR2CRGBBlGmdBty4BkZr21jZxG6+2gZGqn6hJNAZWElApye/rsgxdeeGGMMcbwZnd3zw6tX/aVFNbMWnZz8ciWLqmp9l+xWCeLnA+WhQjJ7eoClUTUFZIEiFjbjbICcEBg0q9shSvmR1TCsU+U/fkSWZFZoffLJaPuQ1OFvsFmzDVtVSEjsuw0UxQNDgKKKcCMECzGZE4QXRiZGknCcjTpknYWH98eXn799Y/dnc7H24PyIo52eDhHRk4zP962n/r6V56M+PTnb2J/cug3igdvvWVDx7fePv/Nv/bjLz3+vk+9+vlvfOOnOm9vjvbtd77x+c+/9PWvvfnmux/8wl/8xS984sHDdvp//yf/2ZyH26N+3X/pe+/Oe87RzJDzdLp7/71hbVsU56q4EmGZEXNGkqjegSs1oubNcOCqx6huda34yimwCnpRNczMhOLz5Qp5IQG6yQ4+L4PevETgDt/axD7G6N0Bi4jWW0xFjtZaTWihMHCO6K0Qeas0lTIqWRSLaxAEjBEi66q2nBOAILhLGjmWuySVObd+U7uTyjUlF5Tc+gGEsKcSwyU3g3um8qqAW+zeethLGsqCjXV/8Ghk7z0itkMrH+Zta3NOMLeD15BEMuUZ2VovwX7rrljJXyPTCXAK7aoJSrQ0OFSu9o2eqgzuMtjDIA/kDiTRAdACFLBpmcGVx9zKsqtbrfJYgABGZjl+jcIWCqS7TmlW0pMZ0eDFelq8N2OKpiJBgKtFq+5h6p5cdN2ycqnSpKxmK928OLVrrFx3QPWtKCJOxaOFcmZwkUrlvq2LB3LSHF7ZjdcdrPCR25c8pqWyuDNYl0jp1u12e/DBs/fEU8zYDo8lj5zH7VB3ZeSIlMObWWttz8DKinFwIf5mLgNyhY9HyHFV86nkeCZCEOWVHNT7jZhAbj29pReYxM1sZpYTYJ0zl7EQuYi8DotJsgqht27ZLNS9dSecch9mDTYzejOWht9wsx0d3PPce488ayXfZd+WzHsNW1YZFCLczej28OHD8/nc2uHRo+2DZ6fCaWcTsFf2RKtL3s1yRlUQgKnQGumqzZLT1ATAA0sNhJVbKsCEaUwyUZ2TElbASbOORcgriwyZFVWODI2C2B1ZLvTXuQk0KGHGSAkR0fqKj7kOd4v7seTJlUvDlAI0yIyacxrcGzOVmmQTwDF6QxDn83h488Lrr79wGe+eL2huvT1+8v549OhR64fL5fL8bnzqU1/8i3/pX+v52uVZ3hxejT0e3Xr0988X/pE//t89bK/u49mjRw+avQCdX3jpY2z24isfvPveww+evvuLf+EPfOKT/Jk3v/zo+Pj1lz5TPWD1tq3b6XR6+oH8cCxLFlzZZrVSUHIfsTVLiaWdxoKtauWwIsEkwlsziaGEkrKyQoWaVPFUtdKoyrB0j5k59ztnhyqveW5bu1xOEQSYU2xXUsd1Ha1ARJSnv5nPGWYsUORK/rsqqFRSbDGxR7TGhMgszgBLEYqFG13fuzLqqatHngs6QOxj9M0K5YgpwDOSa6Vb/b8n5nqpWMYmCxvhfU3JukOOh6qbacbL/rxuA5ohpxISjUxmahqjd3bvl6lMEBuAMUrgOmQ7YJlO+dQAL8oDqFVCK+oQMHelaFFSesKu79AgWrmDYT1zuFrnXlvtuP6vBK6JVpnK+7e+bv1iltQByVK2UEAVbizGogRcmSMLOPJrgV2yGqiOTOkklolQ0eAlq6a+BE9SlCW4WDubVGZm1PVZX+62bQbu+746a6yw77prr0OqLkgYWOt2BdcN7Sk8f35i8+3Yb7bDB+88t76BMWKSpHvCM0a9p7rwPqJ1L/1n8XOZyGKqKIJm7t5gQPWmFQS2rreIcLbuF18O6AyxSYaZmcrVVJF0MlN7heRhfSOFz4BZygkRF+Seupnt1uDn2aZgjQYH3IxoqgghZvd+c3Mcl9Oc83g8AHOMcTweL+eRAQhwq+eoSu+ccXNzrICgKutKzTllrTbGU9k22bAMw5jJtJaczHBahpkJiyCDa6m/7/5orNCQMmmYMwixFAogkHHlVutDNwlU5ZISGVJ6uReX7rwgGPMs7UMWwQ1mVjq+xaW74oaCSCswVFr7+6mopo15APe1WxWJQ7n49808rfuLT07x2kvx2mvtZ37y7b/5177y+mvf7W3Ofczh33rr67/ol33xyZMPnr6Hlx4e/fD0+f7BtNPrrzyad+fnd0+fnPrHX/7S577w6l/54X/0vT/vO9555+sz7m5vHr708sf2c/vcxz9+Ovn7zy+7+Bt+za+xUM6bu9MzMUmbma23J0/ffu/dvX+iYkyRUigrYrEwDCfE8mMmlAVbmXdk8brclmeWWJFjAK3VlgxAa11Z6uAah6sWXOlDOZtbt5aJSLbuBu179HbDhn0/O9zdZyllraXWPA5cJwCzCmZebR0i40MNYcYAVj4C7v8Mr1S4onsbyeXrAqY0UAk1MiUTASYRfWvGNkcSOWO6O9iIjfkc5lCl5Ba7DQrK7oXNWIXWajgsqLdE25aa27bNOYnD2Gcd6sxwb5tzjHNEN+MsoWOZnBpIiSGsFiaLvZVBURAwlorTmnvprTxxUdYcOwFXNDFlA2rQ9Z7jEp2u0LiV6GMf/sPbhwW9res5NSOTi4R3nVGs3IMWSPfR2q1iZOb9hTqLgcNKbiDub5f7bw33Mm/cX5MQAuK8nqT6jr01FHHhGhpkZiAzqmmGrfzNMq5BK1PwzCw5q7KG07aAFt7t4+a2jYn5zHbEZn1rfjfjHMPM+orlW8uD0KzdVVVYrMmG9z1QXsUHdfix/AyMXHoAkDTBydy7oXU/9M19Kw4oNKQOCmZiCjCtybJbrw/nOuIUvKQwbN6g2GQtQeckL26+69gs5oR7b0dJ532/6Ub2OdR8G/vlcp6Hrd09n0ovWieQa8olmRkRreN0OrfWz/sc5zvS9/0C2ZiXvOJy5oaDmy97yjRPUr17iTS8vPPXog5axtaROadmKENzaqbC0zytiQY00BOudC5ksI6FVRReuc0Elo1YoUCBEKvfn3MvJKF+Vn0rc1bWvCKUoYjIUJZI2u4PpRcdyOCZFRvGCJWoh0mTZW5TF7SYc/+uz712bP2dd775zts/te/f/ux3vPjya/j7//BvvPPOe5/65Otvvf2VN9748o/+w594Pp/+7b/7Iz/5Uz9z8/DBl7/yYz/9Ez/7/re+fj4dvvzj33j0+PD87oOvfu0rgH7kR37U7fBX//L/53K+/Zv/yd/5yk/97M3hlf38wt0HL0YcKLjTu0l68PDhm299a4+NvUNW5exK11wjM67jmLWVZmBsWaaXonuv1HLAIvT/B7Cux1Escd2iYNfNsT7PgX3fxzwVlX6MuEdUJGUs3YT3zazJfMyZOefcianclyth/dCraS3gZOlcK5YVjZU6v95gJNIRpnSmcxAXYQdmaRiKaMch7JmZ4RkHxVEzaNMsD9ux+Q2AMZ+tB3uF0VV9t492o/cQTZEjZ8TpfJ4jT+f9fJ4Ztl9S6RmqIGMAZk1iJUTSm2j7jAV3xEU6gRdoACAa0crV1tncD1s7NDd3rTR5RcSoxqqalvWtsGC4NchKcVWZAWqQKf16K/XalNI+/M/vH8AKeq4MK0iVMH09J+veFJZkm6wZO+8/mWv51n/uyHE153Yd3C2X5RyAUtIqBdFoqOSsj7RuiggHaxeSc4GBXNvnBXxZrWiYpNz5gO1GtoU3ucMBzIgx59ZIRTOnNfgmuWCiHw69NTPQUBa5K6kcsBmKiIXjolLAFyRVh7/33rtf37+b2aH3Q/dWIj2z3vtx43FrN+3Ye++9VWRCZY2Zmbu7szVUomW5E66701iGpWwON9+HZUia0I68KPasxPiYM+eQEnPuylGInPmELje3fvugg6N1+abUhUupE/UlkqwftNLoMnvvZj5n9gLENJ2LVt/EbOYeKWK3pDWNudFiNVuslDVdM6wrjYEkiAIAClCRW1F4ExlY+YkCmAkJSl6jGCTU0QTsGqxboS4peGs59hVyTyDH2tsQBpqvkMqUIpVl6ONwWom8hWnWOoGRE+45LZmtAxEKuB/S3JL7iOenu89//tU5zsTl1//aX3F8/EJrl3b85q/4Lz/k/OTTJ++89NLhO/+xh6fz+fT2P/gnf+j1N98bd8+++eDV06c/9+J+0Xb7zs3t9jNv/djU6w8e3L75rZ/77u/5zGV/88Fr+Ln3v/bqpx989rUvvPygv/3BWw8efDzHs9baOYekOWPbtm9+8xu0LaG86vLrsi1Qxd2NJqXMRkaMcTgctt73y0UFmX1k9SqpOUFFFFotoIxixOaIsbmxcYy18nE3wCbazEvfSOM+ZgQPh+PpcmrmFXEXEdVHn/fLRtTE7Y2ZYc6IC8p9Cj6nJJg54TEmGN4YM8o6UIKBlZzOTNcgvNSamTS4mRkt7BnZlZZlv4NkSghhgE5Y5nQPMbo3324QkXmfnlx2V0bYjMv9co8lv67IHq+AwNWoziH3RviIk5X6Kel+qLi91m92e97QYoQEanVloZkpWkBJNq5CLLecU6lhnsC2JAYAYMqOsklBhwhM2gTcDLAsnzIAkGnxCwlOkOQGJDCQAruw2vy6yKuyo7bIpfAxWFI5U3Ku3y7UQjXWSInFXteKUC/86r6r4Bp44Lp3Y6ugR5ZFV9ZwDVnWNj51aIfEzExYJechIrq3+yYjpNY6cq7ytN5ykGiO53PUKymHZLtGghzaACw0/Kh5vrPczjujyc00M3IYYOVIvVa9bd/PBratqYgrAChZXHErtM6rGY7K03JBXg6peIBwlj01DCEOoSu3VK9rjiw7fgm6ZvIkiYhZFhiZ2ZpnRrhKvd9II7zMQQRzzTmtdbO2j9PW2f22Wy/jhght/QBgDnhrvXVIUtTFg6sRwIr5nmV2tHq7MfaIrDWcInPPSq2l2M1bHzNH3thBoYulwdMMtWs2JIdstugiQNtnQDBYax4xIEMsutvMAGZlG8pYTpAQuruoOaLZFpoDMxokWkwHYdhzUuhFDEvkjG3bEgoap4emmSKitZZhCXnzZhYRXGRVmWPqRKO3RmZqbBXibiBtxnlDj3l8fHN8W+fv+NRLevb+Zz79A828t3mZ/OTjX9Kd5AUjXvvYD00lIvFdZpaf5dj3+blP/lDvPvZsbZtx+txnfvG+n7ftGLvMbJ/zV//y7yG6Pvky0Z48f7bZC2Oe3PqM9rC15+O91rct8dWvvpP2sA0MerlU5QKhEVMz0hmAeaVR99vMMeYJlpGb+RaleYRlpnun2R7QUoHCYJZMXBpH6FDo2MgyafVEyOaNtX2nxaYpJ4EwzcY8dhu7Eq172+OkGJu1eZYcpUiw6wWCYjRLvbQ2YOakW4Tm3DxbQrQYeXF3F4wd0GVad9SeHaWwS0AGe2QMWGoMWNLdXc1wGafeu2ABJkQwchgoFv/HAeNUoKh1s/eNi7pDZTZrFd4LKnWGmsIVQQvBp9xtQ2EqiDn3gg5jhNNJb/AqDMtJbw5vDrWxZpXZu+eU0nuLERDMHDFnBsmmSBBKuhsyJcJcGfQi4pV9Qm2/w+qO58ykkvAKfmpigInZsUD2KWmMYaUjwg0Y5YJq1HWbHcaWBedVzTI3URGBj6xYYZmKssNcm1WZWQGnJDNkS2sToXRayWKpkSV3agTTyqZNxqlsSuasmG8zo1rVGM+ZU3JvnplMNlhMlzOzMqCS5IxBNxjOZ3Onsx120rcxJ4PbdN7gknvrh4g2Q5bRzeee2tK4zTktkDF77yhq9ojZKLDJ45SzgalORqZvPpBUenl7zZKQhQFuGxKOm/0S6mDuh2MHbAa8uB9GYLr7nvJaEhQxlzrveTx008gdm7c5LkQPOx4dl3nX9ECI254ZO0SiDyRxNxJ+82CfdLfNQcvKz260nczM8/lMVs5wWRQdU1NTp9MJvu0j51AMXcZu24FuE7Mls9yRRojmE2zN55zaS0sbstAK6u5NB9g+ZqpARy/QVnNALVQyQVkgxerNmVRE5fbiEnv1eMnMMIq9ZlQymUo2IdyYZlNJyDEwEDBBZg4jCDdAQGzdpER2qtxFaAsLVplbcXFay9nDJbW2hQ/a9sElHr6wferzr7x390HK2nG7nG9uDnJpv2Tocet5Oj1tdgintzzvQR7a9gDA6RKSIrY5NAXh5vk56a25JeeWr5OKvMiytTnnJHoqs40nl2l+nIPTTl/9+vu+vbTr3YMdpzIjmMXoaoNDUo2o5VvLslhIQTCfUgg0BwVvjDky3VAWqEEhUimDEcat6TIGZK15azbGrlBrh4DU/ZJRZD+YnSJDdPCc4YvQ1Ugat1RCYdYkzFlrumY0BXYsW9dkZMJQ7EemJlX09kPtCSIDqVZMnqVlL4w+I0cqhYrfA03wCUHpzW8VhX2bmaVEAe6Lp6UycCJBpGdmxlybMa8BfHHIctLbJglCa718V4weGAS9HGV5Pz9FHZhcSyYV+aQ+AaPf6wzLPseEmZEoHkG57C14KIbMmRmrC1Yz2wC6D8lUjzCWrV5mGTHZle5YI3+B1lx42RqbfTXyGIvAklgGgCDp5e9J9lrBa8Lc3VpZ10pZ9eiKxrD7JkRELaJzoeS2wJxUKFb6cSE/V7hAU9XwMgrHzkNGJfnRaSySl9TkBmOaRJNX4h8AxLmZA/BIAF4VeY69+cjpru3maO5Tu7T3djClo5ssNKAUqvvJuqJap3CRa89gc1IZc8JIK/dRkughsZHKCaJddcIZI2AHBHqX05w28rh1wabXmKkCKqhl9qTIg3WCimJzulWYPMj2qJncDdkMoC6kOXDe9wfHPmJu2y3pM6YHM9Nx0ei+HRwy0UWlkLxYyk1ZyjzGJIXe++XygZsZG8QReRrzbo65z94SiEvu+4j22kMp+ex0tpvt+d1zbocxLm6cBlpGpSFkRBY3oz6TANi4MW0ur4NuUABZaqQU6a4KRxBmwpzkyJFZdHRODbt+2gUKmpnJLmMkNC0buLmUCfeg+awNnhxQ6Vp7j33kApElXvMgiMw0FFHBiEqH7nNOA/epG99PE689fvz6Ky/GeT+/N9755uXxJx49fefZfD5ffrXDz0/e1wsvPH72TMSWeNbb7YzWumacbm5uM8Lm6eiWkW4ty4DQoUTig+P2wON4uH3w5Ml77r2ZjT0OL9wcH+H56dkYGfnsaz/3prVXDocWp4VyCRAcaWUdlUq6KVNgmYJmwqw7ojr9shz0dKTcXQzSixYKGESyI1LerAzAqokzQUywaxyshyq2oTZy8tYnZmvIuQu2HZpiRlwaW0IwQFbh9ualyMbK7q5VFbSs9AE1kcggWIJsgtO7K4TFq7tqJxfJgcpirACL/YOiUd4PngCKB5spyEuNfm8RXLwQo3trZrP+XvdGzpwBK7OXtZcUrAqu+1GKGSUFULMGOOFrV+gFRjNzxpUnBlsvxq4eW+vsXWEToydYizv3rYwpUVkVIXfLQAFcUi6vXRaWkGZbhaRei/s9ChA019VfE1zh0d4muIxvoYU2CADGYhwUown3LPvrXvQj/yQ5xvD2ISB/XXeVZEkUuN6sAMwrRn//N8yi6xGbh8e8enCSaFgO9Fl3HpXtGq8KAFZBScLVfFJkCkdr60m+BD0bDYQz0EzpQ7AuA8euS86ty7JnTqMEuKGIjkp430gkjQkzVRqnoFRThm8dxowgkx0NQsINBnYSJpmgaALVzFyma15FoV4Ii+tHQRitNbjLfcZkS2Mrm13Z8Na2fnMZd1K0tsEg5IjkJV584XH355ubQRG7OVvzkhpuas04S8VmnIpQ4sbteZhbdz/qYIE5EpOjm9hBN8vDobfHN48y8+Z4VPLh4eYyQ7jJxOVyAn0EQr32nMhJ5fDedylkQmb4dasJeWttKjGzVBilR4gYbCnKvDfDTBVK0IKy8iYrxD2FkMzZmtDGpMmbn8duEgJBlNmxpCzBEpscNidJc0qlUip+TRdnoZFWhh8ciVRmw4vUs/Pz8xc//doLj+dPv/1zP/1TXzvl4dE7/uZb7949u/1t/8yvdnv/nXff+9wnPvbv/V/+rU+9/gv3+eSb3/zmx1759Kuvfex8efLmm2+9+PgTv/E3ff/p9NzMYMg5W/MIEG072l/5K3/n4c3Hv/rVr/6mf+pXejtdxrOHDx/+5f/o78bl8N1f/NRnP/PK0/e/9t5b7fbhw8vde9cW1mGF+k1UQclgLZ/vAxJKeOshICBRwZhK6wWRJ6GMSBpoQro4Z1QsnPnMzJyb25YMYM5hcrhvc+50nxEAM+Rtm5ogvfs+Rm3YItKblqrPylsihXRiXlkKBJmKa5onszzH2Yr9ZoorfQqwZYAtJJaLlHFLTtpcnFiVInHECvlhQF4lw6hI2HQz5RJGGwFPIWIYrG51AzJqMGRr3spY0d0yE2RrW2ZKF9Su0Ktkj6qp9UPvy1mV8lB+VB5t1zFRghtoLZRzuXJqXUX2HGZXzQ28yTypyHkgjUwsRa4Brcqh1g7wPo16uS3WjQ1euaeg18GvJTtAq8hlSPJWwoLaGFjOogzCzSR5a3PO3vsYo7mTnDlLVlIKpgJpWrMxws3t2uBf16csn8kC9mPdeQZT5CkYRAP6XGoAM7PIpJErwN2Sa428Nu6oT7CG7GLaRDsYKzgFYutS7DFv2xENcb3GahAkmowAU9mcmTq2rkBFccScNAJsZOYsS/MQC7zOjKnZ3JxKJMxTsdGc1jYPzbJYSFwIp7mWX/Sq6U2t+SKUKyRNpTJyYJYTaGuWxpG820Pszhb7nFv2rtb8uB0yxn66+MbLTBwzocbG3jxlm58vyFyGQEZu5ExwDN8e7XtcBvcLR+T5FKddmXh2vovU88u8XEZ769vv6l6RTNv3vZXXuQUQxpKWkgowkNn1qImZcEu6wRliZp7noDHFScxMgIa0VNgxXTElgX4MxJxJ+DQGM4lGtsCGLPe84yTh5VDv3jCN5kbf7VTHZIxpbAFKs7sJNbLUE0oThQawIdk8M615ZdYfDhvJSz6DeUKf+eQrI+M0Dt/zfd//dJwf2fH48MErr33HzsuTt/KFlz/1zpM3fvWv+mUhb/34A7/0hf2S3f2887u+9CnqtZGPQhZ5UZzdsY+9+aH3w9NnT154NWb8zCe/w1qzud8auecJ/JnEI7QXzV/81je//Xz3B71pV/cbXIfwVSwoYzVHOjQPJUpQTm/sV9wg4TS6UmY+R5TYHfBaLa49XbrTrcygjFlhKUkzt45YpS+NaX09t33ytrXMnDEPzccYkWjdLa3YxEW7MAcUrBVn3PdxpizOtg7mMErpbZZxtJkZmjylisStrV8KkRmWERg0b0W0FVKcnORxNYiZM+VelrasmsgrXVeLj5Gouwe5XIYilNlaqycPQDITubKYDWVSTzOrgOQAALci/aaEGTuXA5u5PEJU3lMDVSWzJoKlZQQYhBlhjQqrn5B5/YlzlntwiYGuRbPwLAOKQZClN1YSCInelBnMqs4opLF49FdWDIiShgMS1CFmhVMkISIrv3TR2ZXM+P/p3z9s5OHLnicbi7uM6jwW2ATARbH0O4QolootwU71JBodiy8REmWeFWRMS3MhBNBsWzcTW73+6+wwEZlWaBRgIjJLhr/fHlpmnu4GacetZwrSHRzs1bNnzIAD6LaNvEhSzrLyLGbNHiFXzDnmSi9iUmkxYus2gTMBYpswWBqH0zOBcl41CGVMT9Pp7q5vm/s1dnwxvmDGnJaY6TuMERGGPF5u7da859inpbId+vbw4U2M/bAdWmu3D2/IaI6YrIm1rHKcZsIMmfXMfjrNlO7u5mXf7/Z9j7mPccmZ4n7CiHh+2vcRDcdeY3trDdLt4XZteHxT9cxl+oFOHQxufUqOVLu6pYcoGWKNaTMRapI8oUCQEvd9ArAGBSJIcEYbKNpdciITExiQX+ZE7BlMQmOMMeVBMKpX8svl4p4ZGCPc/QK/buUQxkxmjExYI8mBWRQeAsCMyE1b26zHez/4fZ985dHDn/2Jn33pwcd+6mtvfOvdJz/3tXd/229/+Wvf/Pp/8Of/3g/9sv/i4fBNnexb337vOz//j33x+77z/PztZ88Pj1/8jn08++G/8KPf+6W7b7zxtd/xO/6pfX+fOvvtpmTEuLltP/jzvx+p3o7n0zOCvbe7O/2qX/urRHv2bG432zff+lbY5XCTka2m0WpYqCTRenPnIX3fdxgbW2stA4DNOZuxyHfm5btCozLlXVLw6qFRz+FGs8NU+grH5k4PS6MdkVFdYm82M4r07e4I2/d5PG4ZMxMpHg7bPscsfXi54JnLJHFm+nILKLchiiC9QcvoN2eMK7yQBm9LXWmoDMWVoga4h4LEBoiYLHFIdsNiBx36VeWANDeFY3WOSpTpbIMEhrslVIGc3jiRIwZbs7ZFuby7Zypm9q2btSuTj0pbOnj3fZ7MHADDrfwE2CLLnMBZKhWpbDTNoWlCQmVfQ0DwmiduyjV35gUUvSMbYb6duYSUdDcgIwYg4819j0y2ygWOCMhbM12B6bZtkiIm6caGxX5buieSceWzi4CiBplCbMzsGmOSbr3IGPczins3FvaZc87yspeka+HFwuibNCqDE95ILwmhs2FxK+2qfmVmKpEs68pK32NdEhmtKuOMkoBZ7b7blsBeuBiULhRT5TSETrKhuQC5iQ5tR5xGRut93y92sIkws52TsGYG2cwhormX73JDZ5rTW1sEqmbublsHW5e0OYXYth6QE1RzuPHKFZTcnBS6m3lxaEqPKSiQj5pR6GbN82CN3G5aOxouexC4ORyIQ448z8AchJ7sz6356Xle3ZEcCTPQpqTWbGaU19NUXvbZkmMiZPu+R8y6fEfMse8jpuZuGe0//ps/U4MYInvvOaNvPue0vCW5KJELOHBjO9u5Tv+h92o26/+anSl06+6OZVhoJuPxUrFWJFs6SQMBu7nRsVyslwLArHmI3J5pssnDMJQwItnk9F2F9EWSjJkAstYCUl2/eVXqxVRjA7DHrIaUVM4ZMabzTP/SZz61xfMf+Us/EnfP3r08efbkTbb3Pv7qqzi99dZbP/HK7ZPHh/e+8Y0v3xy399972r7w2S//2I+Z57Onoj7+5S//2Dif3nn6xp53z54/v1xOZFZ8O2RtbuNZmvnzGIfDTUZc9ux2e36a8o3Jm+3mZ7/yrTaOPdDPQH9elpZFIga1cWvGfd9bs207ZOj5+RQRh8PN8dBO54lAUX1PYx4OhzEuEI2ccbXtrrZ3phn7SKXPgWYwN2tepJx9XmrlaN4anehzzNb6LsJ5EhNOEs2HmGjpAY5GK88ACbGwg0W/rSQ0SVyKNxEsVqWZLchfsxltZdZkVjdUnLzyIr9qSs1o8HBQsoQ1FJxSHhWZk+ZYy8YGoRyszBQaNGsoRMPcm7HPlfjsZqRVQhJLBiCbvF4wK8ZoxWwaZTC6W2ttzpKkDmn9OJpn3Wcqy8WeCrqq6LO80AngTAImtwTTHObOvIoyC0JxSmWYXspVu6ZrXvnpTNDdWmIqU8xiKM2M5Qyxanq736hS069r50xtWyfKvcDFRXzK9U82o7EJZfGf9d22VuGw1xdZMBOXGjnHrAjvirZPSCZaErOkFbXeNS+sqVGTQGPJ8dIhb8ZimVoQSQOJZNCaMvdwp7uXxwGNKPsK2LZfRKK1BiSSinAfm5vvs4mk+9oeSBLt0mtPbjTGAQBzUDbpW3Nma5mu837p3iQ1x+ZkJBthcbxxAJZqXcB011U7Bfc0x1aFHUXfMG8sJI1tN/i2WaNX3++MMYI3bb+7cCpPSYc57vbzcTs8GxcJ4mnmUgNsvhmIgwFphowZEVTEnsgcBshpG2Zo7KTmwGXOPSOhWT7cf+i/91tJjpHGFd6WmplZLi2xHE4tEMtTcLTqlM18zqlkRgC4YNOMOlvKsvlWJkbZZFsHEBV2Cp8jLm4ONTAzh5JegraKQZO1IwDNOGxNBOGzziJVYvrM3LZjRHSL1tq9eLVUBpJolZHjADJj6z1zNsON4wM9eqTT7/ztv/wcb8RZJ3yQuoxxcd02l/a7Jgp9t+35OB2jHbYX5jx7035JcF72ZxES/OGDlze/JXrdOud93/rxPGZpylthLNYqLmde0rY+NR/2+Jmf+umvvss7nv3uuPUAbOwrkXxcjRu3rV0uF8BofjpdzMydmRl7v1wuvR3M8fz0/KWXXnr69KlowPFyPhtb3bgpP51H8vpfhTZvBYfMuZsjzTNzjDH2OB5vzdrp7rxtxxSFkGJr/XQ6HY/HOWeMffbbuQ9DyfQ8puiN3k+XO1b0lyrIIov/GyoBrWWgtVYEjPtfUlw7lNUMamLXtEbCc6KZp+XQfuCDYljiQ4cGxJith+RQ1xIulQQ0xW6CUb33OXNGBNi3Q8Z5Drp7SXcR3szBMcuSqZpW8OqLm34FFyX13meUNfwOHM0zYrbWci72nhnImxk7XN38+kYzNHPKbVuWEApA7q4Q0NwZuUuxZMlp7n3Gc4lurQ6VkGSkZuBQ4pzWDApdlUdciQ2Lx3Lf6FDm7hGDZOa8vb3NGPOyWzeJkXWDLudIM5sjwVlkuxIeujuoDNZkvzpWLt2x8eBNzSFpDmUYTN6A1HZopvTGnNPMWjdJJWjo5lIocmvWuwthi2yhti05UklUD63NuTczg7yVV1UcDh2d+yUz57alO7sdIoZ5lkGet2O91MPWkNNp2mCFAG0dUp8KCFszRDPG3IvBdLcP4zb32UeCRPfuOPZ26IbmQR5oVfSkxfhyp1G7SlOdGbgepCSZOAiTVA5DkjZbN019cL5sbbMoHe7K3SY5dsEc5hFDSuV+3DYDtRdPOkYUz1XzMgkPu7tMkl1Sjp3kTBuJuERCk5GZ7YXG1pwPlgmce2ceM9VoxX2FVewpyoRjj1YEqRXBtcgqPjyJDoUU0AYmMCW/ypFXeM/VjURDEygFshKCLCCJ2tdcWXBYLMshTk3JanYrT4s57yReIpE7UxIDGlJtTpGNU9it0nNKhZiZkM/x/ssvv/oPfhZ7HPfL0+P2YudNt2G90mkXC6Lb4dGxme0035qj+y1JzYfKits0v3E7NnZngLG0KjZd9OoGjSptliKJkhE+fuH1H/7Rn3j77cvN7Uspf77vmUn2VFlwpNFAnQZgB0kQtgc3bJSExOFGR96WnuIGHeDLD15mtrDpvGkL1AgR4qGZdYZmDHIkN+uS6OgCtKiiM2KMcTgcMh9kZnJzBjmQMm3J8vEMyqSjp6U0GS3hwoUc+YBFBswawUqOKJprhidmRlik1M01o/Ay922ZkMSCIEYOssE45gVA7z2HIgy2YwQjI0LeQnLSAujc9zPbqP3tLMk+OC8Xd455YRH5gNqo5nnP1saITT3F3OAdjuH9GKEsUUHzOaLRLDSby4qeKM+LmYXtu0ZHoLmNcMxzzga8/OiF2Pydd9692Q4G3/fLnNOay5iZvdKehTECxhDVJOA2FdrNZVSH5sxEBFP9cDXnKGUWQc2Eu8DRjUhlmlsDI/J8KLOgSglaYTUpyT1J1Q7B3aVzZro30JWzHMFgUhqRTlwiWluWk8sPSnL3KBIkSmeXMAc9iYZoTpccJrYZkjENDaBlMxKuUiEwbb0TJymVsYrJVeHdZkZ4oTSs9WnJdfMg1YioRlO0mGg9jgfbtof7HBGBxtvDg4g4XxBSJArGf3oqimbq1KtVleY93pV5YjChmRqxTo7m3ZxzDjcjMFprwCzCqCQZQwErs1yYWcV/p3ZONLZRuIEbMigMI1MNS8ISUjPrvd9pR6QJDppZ+aqUxNrAgrvL8xKRW/fMCfoEQwmveNPhZmOKlBexeBHYQCGW66cotP2C/RIlXqApY9SV7jzXeh5lm4mKLQZiMzMvb67ioqEgz0kkkLCVNFPR4jd+uDZW1RthwXa+cQnkBADXmEq35YdSiGSlz6CA//I4c8c1lQYoM86rOxIRddlI954EeS0fWp792vrN3Sm+/saPSpx79HabO86O0shpBWnCzCHzYJncVGKGKYvzOUGyX7OqisjRAELmbCUSqQS0ehEG9yb4bP3r335yOWuP+R6AjLb2WgkAoeUHO2a/dmGlLFfU5ZSHQm+ALF+eEhaalY8fSE4s4aYVCywUhqR5ddOuljncZ0ahZ5m5JngzZpAwhqGSKEBDkhOp3j3NBTUHwJny1nxx6WrgK4KHmXk3lvKYlGjNKchk1pZuvi3KjBkpNfOC+Lqj0H3AMhTeSRUGYxQp7+buVAPZugGwRrMS04XbNEdr/ujBw8tln3O25i+//PJnHz/44Pz0bn8u4INzPrmbT05hOGz7uYjtIwPIyOEQVyCYAtJcSRUipiZiTspmNmgakHLhonj06Y9DsjRI9carPlZwvAlzTliLQhqBHEaXOWLsDa5kMvfcHxweSNJUrWEAZM5Qbn474pIeMFfA6S45Y+wBX5k+FYfMhbmv+SHvnXbqYLHXIjWLdl/RDcDWe9GBFvYvRQkucRQmihCnZeWdhNXZuRpPzGQN+Q031Xte8f6i+kQIZAAwIJYQ9mowx1I7IRNSVNUbec31LSsLUmkI33ESUWBUhQ723pVpV5qWFteJJfYBdxYudr+rpQAML1y3Wkln0ZAarJ0LdnGfuuc4AGYNQDNZ5ZpVHIxzs42pZn0DEmJjZhoSzQg00VkKzpUufWsHLH7r0pFpjUS7kRZwvyKiKXfvUfV0iRJIKeHgBSDVDFNpbKEkkpnGB5KW2cPv+pU/H0AZolbFrIYO5WRUgsQynm00g4Hu3d27eV3IQK5ahmZe8oduJvOE2lxuHzAzXf0rSDKWOcZVbbh+hYxkLYird7h6YtyjsbYsU6RyncbCCqw+v1Vr9qvditVd0pZ8O5RFDoOOx5sxxhqxc17JKgUsGGmQsVKDxCtLGNfFlCeRiLVtSjC70tQHBWb5delKpADUJYVCnA8e3xwOh/LJ8V4X3Bp+K+BRhOWOle4FlCO+SixpmZmx5oDq1DLA5oiagxjCrEW7mGGuHJgJcY9QDqRFGA5jDFOatdpMFGQfa6iv4AGtg5IxZz8RHa2Lu8baJE5hevU1RZaoz7+bzyyTrTSznNUDzgoaLQCKrB05VQSeJK5ulwW7mTUlwBGGUV/LpGULWpDQyftBUqbRPBP0Pufs/XA+3wF22I7QRvoYo7XW+q1itoohAJ3t7u58npd+vHRzFq+GkGRgjpmV+AspSptkKEuEljPTBYMK9Isxg3rkLSIyqiu1hGampO264ZaEqxUJgMlZJuQ55lpSGPeYnYvN4h/aF9ct75GX7pW1ZJCFZlJtJZt+6PFSV2c5ZVZJKlG3O+ecIoyy2oDUnwWIbKuOViOG+0cJbVOkFKvQAgWRCdnd1r9XK1hIkrcUgsvKu5arKQm+wkBs+eKWxam5e0E91TnVysbMaDtQ+/nrM13v2md1eFY4rVkVK/+o7EByr9sibYXh+L2wokpWQtfV9/pVvzV5KQ6X2aoJ9fL8YuWSLq6khKXpVTKXX14g6AYUKbk7ao8AXd+gIuHz/ifeHwYDB8yqkNrVEaKMExl1JutRLGIxgcFAzQmZvCcF5LzPUATAH/rO1xwstnG97SjxlQ523X3dfyY0qdJg4WbmrHBukXB3qNdzZ+zWyvG4P1xj4/WjZ1lHWN9WB1d9hy1OG+zA+mSZ6r3XpYIrB/j6Za8XRkK9VVtBs3o468PazFavbSYFl+aS5jkzzAyyCPXjIQMRsVlBnFdCcsWTSmrGOpioKqy68+YgKVmRtgmYpzNb9B1lI1Hr32uAvGW6bbC2zwtNJBTq7WbHudGLpFHIaVHEW/mdyUimLWir8Awkrm3IdToRrbcaV9ZRXpmolry4MjDp5pNqVoRxWatLcZ1saxVhI2YWeQ4GwEEpwGzI2BqGLITOzGQkvRc+u27Q64bQwEa7rEEEnDCzyZjQ0TYAoVkTA8mZAVlzZuaYSVMV92ad9Igh5tRsRsWs7lJWAWYeVxr86XSBG8nNmBnHQ7+7uwNQ7oyXcbkb5iFF3M1B+o33OfdT7GbeaWVorKSIZl5CofvjJF35VitFquylV8tcTDPFsh2+P4GSEDl7vy8etWmsgzGnCenO61d/vRcXp/B6fmo/kUk/AgFXaCpZ0yEA1Fb32tNwxUHlgkOt1V9lBjZkpmMjZUgAoWvrg3StSKQ6zLnCT9Kb4RrqzZVhwYBM1pplVkRUrz1wqoo2zddaPa9hPvVMGFQdoaSqIZFlph9e1Ng1B1KYVwkIVQzGanWv1LgKmDWzKk+X2Em2K+1nzbuRbrj/Hu/PpxRgl2QUBUTOq3DBtUIsCEekOaQwZwxLAkaUE/8aPljBBibUuFVm9SSV08EiRwJAGQ/Xf1gEpHpVuU4L6FB5wbIKFMUIZYMibMFxWQwuJcGAwsGpJDxR901Mfvja2jlGo1WzWVs7gBMROiOBK0usYvKAkqtfiQqAVyynkDmhjUVuyE4rs4nuMyQl83qvltm1oaqjFa6xKpEEa1b5zUxWqvK6ma9ZE4Ug4Xrx9OY1N7i7tQXhOW3HrG+6bojurX5LNiBvbQOmucDZfHM/KLG2e6yt7BpilLuZFWhQd7gZ6ebXROlyJTSzRjZwjJLzf2R7aEziQFzyrHTvBltJgDETnGkNKYARsVjpZsOiPvb1AEuEk1ZeAr6m3SJUeAJ2qX+RH+kLWAu4lgpNGX0S7pNyKbQ+0g93ZcYMOCLBq2ySWVAacCFmxMYerj2jExv7RXtRCc1MdBJWDEeSDhfolNRktaPakS2NZJO11hLh3oXqTBw0Oyyf2/vJbG7b9Rj0Cbo3l3ff5tVabjvc9N5Pl33bNmv+sRc+NcblcNz2fd+2rbUmjX2c7+7efvHB45S/f7pE6njYRpzfevJWnC7dWy/LAMWIfcY+xu7KKxZYrRCgMCjO04ARQVOjTa2xbte0q+WsrnB5ZtY5DyzNC+sRBUMzInz1KJpzroQjXB/Oa+9c/7fNZTc5YuWRFT5QfBh85FdVEFpZbqw+oKyV3U1pULlzVxRaFbWSl6mm6iJcooq7NeSHfz+vOibCvdVTL2h59d9XLlwbdpVdxPrduj/WCF54QGZGtRWGesIkGVATJJbR9HKIA2CxBlZyLJA2SXLwWOVoHelrN7Y62f+cV376NBTNyUHK7/+AFAbPTAMWn1MGZFi9tdVT1xckY84dgMlJy7SqlClZrkt9PZIyLS2rxGs45PXzMTAkZN21waqKZqZosEA2mBa8QwB5NYYg4aWZW52G23UwCIi/8PMv2XWV4Vd/ZJIp5YdWrqutFsLZQOJDo+cr3o0SJAlGZZcCvJb7wjruT6qM5NSsN3k9OMYsw8jdzBQgvHIhMvNe71GngVz4DAAvPSSgej+kCUaG9Xo4qh2oDUP9nGYHJTKzdaWm2wHyQ+/ktc231UyZ2eY3ay5zmtlymy4UayHuLqe7d0N3jXT6ioyoARNuSbSepBPe6+JAeqX/faTt0pVaalaE2jLL1PVSKSTACSvnGVRInrGi56+N1XJ+BhlQIhqKp+ieBjcRrsxMNkfxthwVU0ASFat8hTu1lhcWNHe34G7BxhbBidGbaQBo9qG5POomnntCcItEM09Nmk1l0zVouKzbaSU4VpwA675FDDDdfcxi6FqrM2YcdfyTXV5EZspIp/V9n+VXOnATEe7N0YkG24ps47g4q0F2Uoq9bXPqEpoEEFmpASSrVoYt4TSvWcEFP6Yum3nxu5yWkIzM6PI12VXS9RXiMC1cZV1XV1tHuSHS4GKaY0Q4HJJf24L7/FOtndMkWW5P9buZQTcXqkP6sJ2sRzhl1kIlFVrZWLWWQE6D6AbYcgX09dLrGamGwkBSe9pH/2aSzIp9QXdmlo5kuVJLmh9mH+Z17WWUSQMfaVfv/0JDT6iW3lVb18tYRi4pGFZUYdTBtWVtpvWBVC2LsHI5yMyM5ZsY4Tgsf/P7DKZa7mrow8UccI3/jKY6+Gu8KJI+wT3SmZDqYAhlMGklohQK/0hQK1Y87+0qoOA1CaNYZEUVq6+5Xka0qB+amvVSTZ6Zfu+Cp4qHLZwCZRa94BrYzGWwZYs7Ikn8RZ97GUsqLSeRhbtYkIBSs6I0FuKcci4g5T4Js7ZiKmTfypW8SSFMok0s+pStI7K2aE2shdXkNQitnM2xk+We7xFBX0h0dfckUd+u1qe/W6tvKCKSqHJZ0/J9H+FXoEpSZ6vQ9967cGXJq0b9yuQJAHRbFi7qZVsPIFGX36L8NKix9kYtiQYQU+jF3ueVkSIgnT2O3WnQ2hIjvRuQjps6WlUTa0oguUyZa5g1tFYYltX6sJmz7vJCqLzRE2slWVnBvjoIp9PQPaWWEAHHpjAezWxgws2dMYaXnAoy1cswWX0UzdgYOSGnmWMwGrCxDQAcBcJcy8OazJRrXgYpKwtBILJx02pjRVZLlUkcWr9WgcXsrqd628otIIFcfvRJpAYaDd38vibVLgvsBDKnXdmEoQmH3x3YprcpzPJ+gW2IFC4GRqi1rfJQ6jkc2Il0CZWVY2v5L7QGsbeBtBDcwtklyRHTUk6DYlkeuQXm/aGtAzyv7uflRJY54VxjdVpdLbrvf7n2SR4pcJfOMxrR3FvmRj/nrK/8fgKrR6Ceo4+U+/W/xUYlMgyEea4J/sPFwColi87rcxlErs6sHq7MnKrI61FzcyhFVidBOsRkkQCEMIlscV9DVT7ruViD1xd2D4IVV6T4epCgpBZrYsJ6AvlRu6HUPYpli5S3Or85J2tZgmVQL8lVCoMzlvC1LI10vwqiUronolhmwjyZJiCyiZRm5jQbyHaNPzR4LapobVI2Z0oLJEnZlTlSS7UKOcBq3osDsp6gOctadbDmPFvCxLW5U2am03YIuRy/RYsIIZny2tkgMvP/CwqxFCYmU36OAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "execution_count": 123, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "image_path = '/data/mshukor/data/coco/val2014/COCO_val2014_000000386193.jpg'\n", + "img = Image.open(image_path).convert('RGB') \n", + "image = transform(img)\n", + "image = image.to(device,non_blocking=True).unsqueeze(0)\n", + "img" + ] + }, + { + "cell_type": "code", + "execution_count": 151, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "A decoder-only architecture is being used, but right-padding was detected! For correct generation results, please set `padding_side='left'` when initializing the tokenizer.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[' clock', ' green']\n" + ] + } + ], + "source": [ + "# question = ['What is this object?']\n", + "question = ['What is this object?', 'What is the color of the object next to the clock?']\n", + "\n", + "if special_answer_token is not None:\n", + " question = [q+'?'+special_answer_token for q in question]\n", + "else:\n", + " question = [q+eos_token for q in question]\n", + "\n", + "question_input = tokenizer(question, padding='longest', return_tensors=\"pt\").to(device) \n", + "\n", + "with torch.autocast(device_type='cuda', dtype=torch.float16, enabled=True):\n", + "\n", + " out = model(image=image, text=question_input, mode='generate', return_dict=True, max_length=max_length, \n", + " do_sample=do_sample, num_beams=num_beams) \n", + "\n", + "\n", + "responses = []\n", + "for o in out:\n", + " o_list = o.tolist()\n", + " if special_answer_token is not None:\n", + " response = tokenizer.decode(o_list).split(special_answer_token)[1].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True\n", + " else:\n", + " response = tokenizer.decode(o_list).split('')[2].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True\n", + " responses.append(response)\n", + "print(responses)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "opt_kernel", + "language": "python", + "name": "opt_kernel" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.13" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/env/env.yaml b/env/env.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7f70362bba6f5f54e0d3a05085bf6f00c13863f4 --- /dev/null +++ b/env/env.yaml @@ -0,0 +1,123 @@ +name: opt +channels: + - conda-forge + - defaults +dependencies: + - _libgcc_mutex=0.1=main + - _openmp_mutex=5.1=1_gnu + - asttokens=2.0.5=pyhd3eb1b0_0 + - backcall=0.2.0=pyhd3eb1b0_0 + - ca-certificates=2022.6.15=ha878542_0 + - certifi=2022.6.15=py38h578d9bd_0 + - debugpy=1.5.1=py38h295c915_0 + - decorator=5.1.1=pyhd3eb1b0_0 + - entrypoints=0.4=py38h06a4308_0 + - executing=0.8.3=pyhd3eb1b0_0 + - ipykernel=6.9.1=py38h06a4308_0 + - ipython=8.4.0=py38h06a4308_0 + - jedi=0.18.1=py38h06a4308_1 + - jupyter_client=7.2.2=py38h06a4308_0 + - jupyter_core=4.10.0=py38h06a4308_0 + - ld_impl_linux-64=2.38=h1181459_1 + - libffi=3.3=he6710b0_2 + - libgcc-ng=11.2.0=h1234567_1 + - libgomp=11.2.0=h1234567_1 + - libsodium=1.0.18=h7b6447c_0 + - libstdcxx-ng=11.2.0=h1234567_1 + - matplotlib-inline=0.1.2=pyhd3eb1b0_2 + - ncurses=6.3=h5eee18b_3 + - nest-asyncio=1.5.5=py38h06a4308_0 + - openssl=1.1.1q=h7f8727e_0 + - parso=0.8.3=pyhd3eb1b0_0 + - pexpect=4.8.0=pyhd3eb1b0_3 + - pickleshare=0.7.5=pyhd3eb1b0_1003 + - pip=22.1.2=py38h06a4308_0 + - prompt-toolkit=3.0.20=pyhd3eb1b0_0 + - ptyprocess=0.7.0=pyhd3eb1b0_2 + - pure_eval=0.2.2=pyhd3eb1b0_0 + - pygments=2.11.2=pyhd3eb1b0_0 + - python=3.8.13=h12debd9_0 + - python-dateutil=2.8.2=pyhd3eb1b0_0 + - python_abi=3.8=2_cp38 + - pyzmq=23.2.0=py38h6a678d5_0 + - readline=8.1.2=h7f8727e_1 + - ruamel_yaml=0.15.80=py38h497a2fe_1006 + - setuptools=61.2.0=py38h06a4308_0 + - six=1.16.0=pyhd3eb1b0_1 + - sqlite=3.38.5=hc218d9a_0 + - stack_data=0.2.0=pyhd3eb1b0_0 + - tk=8.6.12=h1ccaba5_0 + - tornado=6.1=py38h27cfd23_0 + - wcwidth=0.2.5=pyhd3eb1b0_0 + - wheel=0.37.1=pyhd3eb1b0_0 + - xz=5.2.5=h7f8727e_1 + - yaml=0.2.5=h7f98852_2 + - zeromq=4.3.4=h2531618_0 + - zlib=1.2.12=h7f8727e_2 + - pip: + - accelerate==0.11.0 + - anyio==3.6.1 + - argon2-cffi==21.3.0 + - argon2-cffi-bindings==21.2.0 + - attrs==21.4.0 + - babel==2.10.3 + - beautifulsoup4==4.11.1 + - bleach==5.0.1 + - cffi==1.15.1 + - charset-normalizer==2.1.0 + - defusedxml==0.7.1 + - fastjsonschema==2.16.1 + - filelock==3.7.1 + - huggingface-hub==0.8.1 + - idna==3.3 + - importlib-metadata==4.12.0 + - importlib-resources==5.8.0 + - ipython-genutils==0.2.0 + - jinja2==3.1.2 + - json5==0.9.8 + - jsonschema==4.7.2 + - jupyter-server==1.18.1 + - jupyterlab==3.4.3 + - jupyterlab-pygments==0.2.2 + - jupyterlab-server==2.15.0 + - markupsafe==2.1.1 + - mistune==0.8.4 + - nbclassic==0.4.3 + - nbclient==0.6.6 + - nbconvert==6.5.0 + - nbformat==5.4.0 + - notebook-shim==0.1.0 + - numpy==1.23.1 + - opencv-python==4.6.0.66 + - packaging==21.3 + - pandocfilters==1.5.0 + - pillow==9.2.0 + - prometheus-client==0.14.1 + - psutil==5.9.1 + - pycparser==2.21 + - pyparsing==3.0.9 + - pyrsistent==0.18.1 + - pytz==2022.1 + - pyyaml==6.0 + - regex==2022.7.9 + - requests==2.28.1 + - ruamel-yaml==0.17.21 + - ruamel-yaml-clib==0.2.6 + - send2trash==1.8.0 + - sniffio==1.2.0 + - soupsieve==2.3.2.post1 + - terminado==0.15.0 + # - timm==0.6.6 + - tinycss2==1.1.1 + - tokenizers==0.12.1 + - torch==1.12.0 + - torchvision==0.13.0 + - tqdm==4.64.0 + - traitlets==5.3.0 + - transformers==4.20.1 + - typing-extensions==4.3.0 + - urllib3==1.26.10 + - webencodings==0.5.1 + - websocket-client==1.3.3 + - zipp==3.8.1 +prefix: /data/mshukor/envs/opt diff --git a/examples/audios/6cS0FsUM-cQ.wav b/examples/audios/6cS0FsUM-cQ.wav new file mode 100644 index 0000000000000000000000000000000000000000..ea6b71456f4a92fccf7018bf7522b04526ee442d Binary files /dev/null and b/examples/audios/6cS0FsUM-cQ.wav differ diff --git a/examples/audios/AJtNitYMa1I.wav b/examples/audios/AJtNitYMa1I.wav new file mode 100644 index 0000000000000000000000000000000000000000..bd0426abfffa802c151de1d91cddc6d7dffe39dc Binary files /dev/null and b/examples/audios/AJtNitYMa1I.wav differ diff --git a/examples/images/banana.jpg b/examples/images/banana.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2c229b0e7b1dd3869889320a7056f6559912fde2 Binary files /dev/null and b/examples/images/banana.jpg differ diff --git a/examples/images/baseball.jpg b/examples/images/baseball.jpg new file mode 100644 index 0000000000000000000000000000000000000000..67ab75eed2fc63014b5ea15b983321f207c94c18 Binary files /dev/null and b/examples/images/baseball.jpg differ diff --git a/examples/images/laptops.jpg b/examples/images/laptops.jpg new file mode 100644 index 0000000000000000000000000000000000000000..caccbb6b296be7fb3e408537a7a3a88f2243c879 Binary files /dev/null and b/examples/images/laptops.jpg differ diff --git a/examples/images/pigeons.jpg b/examples/images/pigeons.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0c48a759ce5b3a1194485a0f59a7fe66323f940b Binary files /dev/null and b/examples/images/pigeons.jpg differ diff --git a/examples/images/sheep.jpg b/examples/images/sheep.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d834daaa12342f5a1b660afd2a882373d8449795 Binary files /dev/null and b/examples/images/sheep.jpg differ diff --git a/examples/images/skateboard.jpg b/examples/images/skateboard.jpg new file mode 100644 index 0000000000000000000000000000000000000000..84be19459179ea85460c77af594c68fbb9ee7978 Binary files /dev/null and b/examples/images/skateboard.jpg differ diff --git a/examples/images/ski.jpg b/examples/images/ski.jpg new file mode 100644 index 0000000000000000000000000000000000000000..713fd0b72532abcf506b58bba19d103569fe32a2 Binary files /dev/null and b/examples/images/ski.jpg differ diff --git a/examples/images/soccer.jpg b/examples/images/soccer.jpg new file mode 100644 index 0000000000000000000000000000000000000000..3a66acd6ca8aa407bec8d245f9e7076a5c0e5bee Binary files /dev/null and b/examples/images/soccer.jpg differ diff --git a/examples/images/wooden_table.jpg b/examples/images/wooden_table.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b0c55a3e75a4a570846c74f98aa0219c1781f5bc Binary files /dev/null and b/examples/images/wooden_table.jpg differ diff --git a/examples/videos/video7014.mp4 b/examples/videos/video7014.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..dccfc0b77b7a5543d26b74ab274085ffa8387b17 Binary files /dev/null and b/examples/videos/video7014.mp4 differ diff --git a/examples/videos/video7017.mp4 b/examples/videos/video7017.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..010672b910b41c90975b6773924cc5d8e05cca7e Binary files /dev/null and b/examples/videos/video7017.mp4 differ diff --git a/examples/videos/video7019.mp4 b/examples/videos/video7019.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..1e40f9ec4c794366d98734819fca588b5f132f23 Binary files /dev/null and b/examples/videos/video7019.mp4 differ diff --git a/examples/videos/video7021.mp4 b/examples/videos/video7021.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..68a98f5f72c776d842f926b9c86c96e3b25cbd9e Binary files /dev/null and b/examples/videos/video7021.mp4 differ diff --git a/examples/videos/video7068.mp4 b/examples/videos/video7068.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..8433c4d4c99523aa834cb159075b5681c5898735 Binary files /dev/null and b/examples/videos/video7068.mp4 differ diff --git a/float32/audio_caption.py b/float32/audio_caption.py new file mode 100644 index 0000000000000000000000000000000000000000..f12f85dca79ec56708766e75d7de46369a43e6c7 --- /dev/null +++ b/float32/audio_caption.py @@ -0,0 +1,455 @@ +import argparse +import os +import ruamel_yaml as yaml +import numpy as np +import random +import time +import datetime +import json +from pathlib import Path + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.backends.cudnn as cudnn +import torch.distributed as dist + + +from models.epalm import ePALM +from models.utils import freeze_whole_model, unfreeze_parameters, print_trainable_params_percentage + + + +from transformers import AutoTokenizer + + +import utils + + + +from dataset.audio_caption import get_loader + +from scheduler import create_scheduler +from optim import create_optimizer + + + + +from models.utils import filter_state, filter_msg, exclude_list + + +def train(model, data_loader, optimizer, tokenizer, epoch, warmup_steps, device, scheduler, config): + model.train() + + metric_logger = utils.MetricLogger(delimiter=" ") + metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + metric_logger.add_meter('loss', utils.SmoothedValue(window_size=1, fmt='{value:.4f}')) + + header = 'Train Epoch: [{}]'.format(epoch) + print_freq = 50 + step_size = 100 + warmup_iterations = warmup_steps*step_size + lm_loss_weight = config.get('lm_loss_weight', 1) + append_eos_token = config.get('append_eos_token', False) + eos_token = tokenizer.eos_token + + config_optim = utils.AttrDict(config['optimizer']) + prompt_lr = config_optim.prompt_lr if hasattr(config_optim, 'prompt_lr') else None + + task_prompt = config.get('task_prompt', None) + + if prompt_lr is not None: + metric_logger.add_meter('prompt_lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + + + for i, batch in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + image = batch["images"].to(device,non_blocking=True) + + text = batch["sent"] + + if append_eos_token: + text = [t.replace(eos_token, '') + eos_token for t in text] + + if task_prompt is not None: + text = [task_prompt + ' ' + t for t in text] + + + + text_input = tokenizer(text, padding='longest', return_tensors="pt").to(device) + + + targets = text_input.input_ids.masked_fill(text_input.input_ids == tokenizer.pad_token_id, -100) + + + answer_output = model(image=image, + text=text_input, + labels = targets, + return_dict = True, + mode='train', + reduction='none', + ) + + loss = answer_output.loss + loss = loss.sum()/image.size(0) + loss = loss*lm_loss_weight + + optimizer.zero_grad() + loss.backward() + optimizer.step() + + metric_logger.update(loss=loss.item()) + metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + if prompt_lr is not None: + metric_logger.update(prompt_lr=optimizer.param_groups[1]["lr"]) + if epoch==0 and i%step_size==0 and i<=warmup_iterations: + scheduler.step(i//step_size) + + + # gather the stats from all processes + metric_logger.synchronize_between_processes() + print("Averaged stats:", metric_logger.global_avg()) + return {k: "{:.3f}".format(meter.global_avg) for k, meter in metric_logger.meters.items()} + + + + +@torch.no_grad() +def evaluation(model, data_loader, tokenizer, device, config) : + model.eval() + + metric_logger = utils.MetricLogger(delimiter=" ") + header = 'Generate Caption test result:' + print_freq = 50 + + predictions = [] + targets = [] + + + task_prompt = config.get('task_prompt', None) + + pad_token = tokenizer.pad_token + eos_token = tokenizer.eos_token + + for n, batch in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + image = batch["images"].to(device,non_blocking=True) + text = ['' for q in image] + if task_prompt is not None: + text = [task_prompt + ' ' + t for t in text] + text_input = tokenizer(text, padding='longest', return_tensors="pt").to(device) + + out = model(image=image, text=text_input, mode='generate', return_dict=True, max_length=30, do_sample=True) + out_decode = [] + for i, o in enumerate(out): + try: + + res = tokenizer.decode(o) + if task_prompt is not None: + res = res.replace(task_prompt, '') + response = res.split('')[1].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True + except TypeError: + print(o) + response = ' ' + out_decode.append(response) + + + predictions.extend(out_decode) + + if 'targets' in batch: + targets.extend(batch['targets']) + + + + + evaluator = data_loader.evaluator + eval_results = evaluator.evaluate(predictions, targets) + + + wandb_log_dict = {} + + for score_name, score in eval_results.items(): + wandb_log_dict[f'Valid/{score_name}'] = score + + + print(wandb_log_dict) + + return wandb_log_dict + + + +def main(args, config): + + utils.init_distributed_mode(args) + + device = torch.device(args.device) + + # fix the seed for reproducibility + seed = args.seed + utils.get_rank() + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + cudnn.benchmark = True + + start_epoch = 0 + max_epoch = config['schedular']['epochs'] + warmup_steps = config['schedular']['warmup_epochs'] + + print(args, config) + + + tokenizer = AutoTokenizer.from_pretrained(args.text_model, use_fast=False, local_files_only=True) + + + + + if args.distributed: + num_tasks = utils.get_world_size() + global_rank = utils.get_rank() + else: + num_tasks = None + global_rank = None + + + ######### + num_workers = config.get('num_workers', 4) + train_topk = config.get('train_topk', -1) + valid_topk = config.get('valid_topk', -1) + data_dir = args.data_dir + + args.image_size = config.get('image_res', 224) + args.use_data_augmentation = True + + black_image = config.get('black_image', False) + print("black image:", black_image) + + + + # audio + args.melbins = config.get('melbins', 128) + args.target_length = config.get('target_length', 1024) + args.num_tries = config.get('num_tries', 1) + + args.skip_norm = config.get('skip_norm', True) + args.norm_mean = config.get('norm_mean', None) + args.norm_std = config.get('norm_std', None) + args.noise = config.get('noise', False) + + args.freqm_p = config.get('freqm_p', 48) + args.timem_p = config.get('timem_p', 192) + + + + train_split = config.get('train_split', 'train') + val_split = config.get('val_split', 'val') + test_split = config.get('test_split', 'test') + + + + + train_loader = get_loader( + args, + split=train_split, mode='train', batch_size=config['batch_size_train'], + distributed=args.distributed, + workers=num_workers, + topk=train_topk, + data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True, black_image=black_image + ) + + print('# len train loader:', len(train_loader)) + print(f'Building val loader') + val_loader = get_loader( + args, + split=val_split, mode='val', batch_size=config['batch_size_test'], + distributed=False, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True, black_image=black_image + ) + print('# len val loader:', len(val_loader)) + + print(f'Building test loader') + test_loader = get_loader( + args, + split=test_split, mode='val', batch_size=config['batch_size_test'], + distributed=False, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + + + print('# len test loader:', len(test_loader)) + + #### Model #### + print("Creating model") + + start_layer_idx = config.get('start_layer_idx', 0) + end_layer_idx = config.get('end_layer_idx', 0) + + vision_model_name = config.get('vision_model_name', args.vision_model) + + model = ePALM(opt_model_name = args.text_model, + vision_model_name = vision_model_name, + use_vis_prefix = True, + start_layer_idx = start_layer_idx, + end_layer_idx = end_layer_idx, + return_hidden_state_vision = True, + config=config, + ) + + + model = model.to(device) + + arg_opt = utils.AttrDict(config['optimizer']) + optimizer = create_optimizer(arg_opt, model, config=config) + + if hasattr(arg_opt, 'prompt_lr') and arg_opt.prompt_lr is not None: + print('\tInitial other params params lr: %f' % optimizer.param_groups[0]['lr']) + print('\tInitial prompt params lr: %f' % optimizer.param_groups[1]['lr']) + + arg_sche = utils.AttrDict(config['schedular']) + lr_scheduler, _ = create_scheduler(arg_sche, optimizer) + + best_epoch = 0 + best_valid = 0 + + if args.checkpoint: + + checkpoint = torch.load(args.checkpoint, map_location='cpu') + state_dict = checkpoint['model'] + msg = model.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + print('load checkpoint from %s'%args.checkpoint) + print(msg) + + if args.resume: + model = model.to(device) + optimizer.load_state_dict(checkpoint['optimizer']) + lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) + start_epoch = checkpoint['epoch']+1 + print(checkpoint.keys()) + if 'best_valid' in checkpoint: + best_valid = checkpoint['best_valid'] + best_epoch = checkpoint['best_epoch'] + print("load best valid {} at epoch {}".format(best_valid, best_epoch)) + + + freeze_whole_model(model) + unfreeze_parameters(model, config) + print_trainable_params_percentage(model) + + model_without_ddp = model + if args.distributed: + model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) + model_without_ddp = model.module + + + print("Start training") + start_time = time.time() + + + for epoch in range(start_epoch, max_epoch): + if epoch>0: + lr_scheduler.step(epoch+warmup_steps) + + if not args.evaluate: + if args.distributed: + train_loader.sampler.set_epoch(epoch) + + train_stats = train(model, train_loader, optimizer, tokenizer, epoch, warmup_steps, device, lr_scheduler, config) + + if args.evaluate: + break + + + valid_results = evaluation(model, val_loader, tokenizer, device, config) + + if utils.is_main_process(): + log_stats = {**{f'train_{k}': v for k, v in train_stats.items()}, + 'epoch': epoch, + } + with open(os.path.join(args.output_dir, "log.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + + save_obj = { + 'model': filter_state(model_without_ddp.state_dict(), exclude_list), + 'optimizer': optimizer.state_dict(), + 'lr_scheduler': lr_scheduler.state_dict(), + 'config': config, + 'epoch': epoch, + 'best_valid': best_valid, + 'best_epoch': best_epoch, + } + + if args.save_best: + valid_score = valid_results['Valid/CIDEr'] + + if valid_score > best_valid or epoch == 0: + best_valid = valid_score + best_epoch = epoch + print("Save best epoch:", best_epoch) + + save_obj['best_valid'] = best_valid + save_obj['best_epoch'] = best_epoch + + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_best.pth')) + # else: + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_last.pth')) + + + dist.barrier() + + + if not args.evaluate: + checkpoint = torch.load(os.path.join(args.output_dir, 'checkpoint_best.pth'), map_location='cpu') + state_dict = checkpoint['model'] + msg = model.module.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + print('load checkpoint for test from %s'%os.path.join(args.output_dir, 'checkpoint_best.pth')) + print(msg) + vqa_result = evaluation(model, test_loader, tokenizer, device, config) + + + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print('Training time {}'.format(total_time_str)) + + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='./configs/VQA.yaml') + parser.add_argument('--checkpoint', default='') + parser.add_argument('--output_dir', default='output/vqa') + parser.add_argument('--evaluate', action='store_true') + parser.add_argument('--text_model', default='facebook/opt-350m') + parser.add_argument('--vision_model', default='vit_base_patch16_224') + parser.add_argument('--device', default='cuda') + parser.add_argument('--seed', default=42, type=int) + parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes') + parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') + parser.add_argument('--distributed', default=True, type=bool) + + parser.add_argument('--data_dir', default='/data/mshukor/data') + parser.add_argument('--resume', action='store_true') + + parser.add_argument('--save_best', action='store_true') + + parser.add_argument('--image_dir', default='/data/mshukor/data') + + + + args = parser.parse_args() + + config = yaml.load(open(args.config, 'r'), Loader=yaml.Loader) + + args.result_dir = os.path.join(args.output_dir, 'result') + + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + Path(args.result_dir).mkdir(parents=True, exist_ok=True) + + yaml.dump(config, open(os.path.join(args.output_dir, 'config.yaml'), 'w')) + + main(args, config) \ No newline at end of file diff --git a/float32/caption.py b/float32/caption.py new file mode 100644 index 0000000000000000000000000000000000000000..b4bf9020d2218ff5e74fb01dc11494351673df32 --- /dev/null +++ b/float32/caption.py @@ -0,0 +1,415 @@ +import argparse +import os +import ruamel_yaml as yaml +import numpy as np +import random +import time +import datetime +import json +from pathlib import Path + +import torch +import torch.backends.cudnn as cudnn +import torch.distributed as dist + + +from models.epalm import ePALM +from models.utils import freeze_whole_model, unfreeze_parameters, print_trainable_params_percentage + + +from models.utils import filter_state, filter_msg, exclude_list + +from transformers import AutoTokenizer + + +import utils + + + +from dataset.caption import get_loader + +from scheduler import create_scheduler +from optim import create_optimizer + + + +def train(model, data_loader, optimizer, tokenizer, epoch, warmup_steps, device, scheduler, config): + model.train() + + metric_logger = utils.MetricLogger(delimiter=" ") + metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + metric_logger.add_meter('loss', utils.SmoothedValue(window_size=1, fmt='{value:.4f}')) + + header = 'Train Epoch: [{}]'.format(epoch) + print_freq = 50 + step_size = 100 + warmup_iterations = warmup_steps*step_size + lm_loss_weight = config.get('lm_loss_weight', 1) + append_eos_token = config.get('append_eos_token', False) + eos_token = tokenizer.eos_token + + config_optim = utils.AttrDict(config['optimizer']) + prompt_lr = config_optim.prompt_lr if hasattr(config_optim, 'prompt_lr') else None + + task_prompt = config.get('task_prompt', None) + + if prompt_lr is not None: + metric_logger.add_meter('prompt_lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + + + for i, batch in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + image = batch["images"].to(device,non_blocking=True) + + text = batch["sent"] + + if append_eos_token: + text = [t.replace(eos_token, '') + eos_token for t in text] + + if task_prompt is not None: + text = [task_prompt + ' ' + t for t in text] + + text_input = tokenizer(text, padding='longest', return_tensors="pt").to(device) + + + targets = text_input.input_ids.masked_fill(text_input.input_ids == tokenizer.pad_token_id, -100) + + + answer_output = model(image=image, + text=text_input, + labels = targets, + return_dict = True, + mode='train', + reduction='none', + ) + + loss = answer_output.loss + loss = loss.sum()/image.size(0) + loss = loss*lm_loss_weight + + optimizer.zero_grad() + loss.backward() + optimizer.step() + + metric_logger.update(loss=loss.item()) + metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + if prompt_lr is not None: + metric_logger.update(prompt_lr=optimizer.param_groups[1]["lr"]) + if epoch==0 and i%step_size==0 and i<=warmup_iterations: + scheduler.step(i//step_size) + + + # gather the stats from all processes + metric_logger.synchronize_between_processes() + print("Averaged stats:", metric_logger.global_avg()) + return {k: "{:.3f}".format(meter.global_avg) for k, meter in metric_logger.meters.items()} + + + + +@torch.no_grad() +def evaluation(model, data_loader, tokenizer, device, config) : + # test + model.eval() + + metric_logger = utils.MetricLogger(delimiter=" ") + header = 'Generate Caption test result:' + print_freq = 50 + + predictions = [] + targets = [] + + task_prompt = config.get('task_prompt', None) + + pad_token = tokenizer.pad_token + eos_token = tokenizer.eos_token + + for n, batch in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + image = batch["images"].to(device,non_blocking=True) + text = ['' for q in image] + if task_prompt is not None: + text = [task_prompt + ' ' + t for t in text] + text_input = tokenizer(text, padding='longest', return_tensors="pt").to(device) + + out = model(image=image, text=text_input, mode='generate', return_dict=True, max_length=30, do_sample=True) + out_decode = [] + for i, o in enumerate(out): + try: + res = tokenizer.decode(o) + response = res.split('')[1].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True + except TypeError: + print(o) + response = ' ' + out_decode.append(response) + + predictions.extend(out_decode) + + if 'targets' in batch: + targets.extend(batch['targets']) + + + evaluator = data_loader.evaluator + eval_results = evaluator.evaluate(predictions, targets) + + wandb_log_dict = {} + + for score_name, score in eval_results.items(): + wandb_log_dict[f'Valid/{score_name}'] = score + + + print(wandb_log_dict) + + return wandb_log_dict + + + +def main(args, config): + os.environ['TORCH_HOME'] = os.environ['XDG_CACHE_HOME']+'/torch' + utils.init_distributed_mode(args) + + device = torch.device(args.device) + + seed = args.seed + utils.get_rank() + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + cudnn.benchmark = True + + start_epoch = 0 + max_epoch = config['schedular']['epochs'] + warmup_steps = config['schedular']['warmup_epochs'] + + print(args, config) + + + tokenizer = AutoTokenizer.from_pretrained(args.text_model, use_fast=False, local_files_only=True) + + + if args.distributed: + num_tasks = utils.get_world_size() + global_rank = utils.get_rank() + else: + num_tasks = None + global_rank = None + + + ######### + num_workers = config.get('num_workers', 4) + train_topk = config.get('train_topk', -1) + valid_topk = config.get('valid_topk', -1) + data_dir = args.data_dir + + args.image_size = config.get('image_res', 224) + args.use_data_augmentation = True + + + train_loader = get_loader( + args, + split='train', mode='train', batch_size=config['batch_size_train'], + distributed=args.distributed, + workers=num_workers, + topk=train_topk, + data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + + print('# len train loader:', len(train_loader)) + print(f'Building val loader') + val_loader = get_loader( + args, + split='val', mode='val', batch_size=config['batch_size_test'], + distributed=False, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + print('# len val loader:', len(val_loader)) + + print(f'Building test loader') + test_loader = get_loader( + args, + split='test', mode='val', batch_size=config['batch_size_test'], + distributed=False, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + + + print('# len test loader:', len(test_loader)) + + #### Model #### + print("Creating model") + + start_layer_idx = config.get('start_layer_idx', 0) + end_layer_idx = config.get('end_layer_idx', 0) + + vision_model_name = config.get('vision_model_name', args.vision_model) + + model = ePALM(opt_model_name = args.text_model, + vision_model_name = vision_model_name, + use_vis_prefix = True, + start_layer_idx = start_layer_idx, + end_layer_idx = end_layer_idx, + return_hidden_state_vision = True, + config=config, + ) + + + model = model.to(device) + + arg_opt = utils.AttrDict(config['optimizer']) + optimizer = create_optimizer(arg_opt, model, config=config) + + if hasattr(arg_opt, 'prompt_lr') and arg_opt.prompt_lr is not None: + print('\tInitial other params params lr: %f' % optimizer.param_groups[0]['lr']) + print('\tInitial prompt params lr: %f' % optimizer.param_groups[1]['lr']) + + arg_sche = utils.AttrDict(config['schedular']) + lr_scheduler, _ = create_scheduler(arg_sche, optimizer) + + best_epoch = 0 + best_valid = 0 + + if args.checkpoint: + + checkpoint = torch.load(args.checkpoint, map_location='cpu') + state_dict = checkpoint['model'] + msg = model.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + print('load checkpoint from %s'%args.checkpoint) + print(msg) + + if args.resume: + model = model.to(device) + optimizer.load_state_dict(checkpoint['optimizer']) + lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) + start_epoch = checkpoint['epoch']+1 + print(checkpoint.keys()) + if 'best_valid' in checkpoint: + best_valid = checkpoint['best_valid'] + best_epoch = checkpoint['best_epoch'] + print("load best valid {} at epoch {}".format(best_valid, best_epoch)) + + + freeze_whole_model(model) + unfreeze_parameters(model, config) + print_trainable_params_percentage(model) + + model_without_ddp = model + if args.distributed: + model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) + model_without_ddp = model.module + + + print("Start training") + start_time = time.time() + + + for epoch in range(start_epoch, max_epoch): + if epoch>0: + lr_scheduler.step(epoch+warmup_steps) + + if not args.evaluate: + if args.distributed: + train_loader.sampler.set_epoch(epoch) + + train_stats = train(model, train_loader, optimizer, tokenizer, epoch, warmup_steps, device, lr_scheduler, config) + + if args.evaluate: + break + + + valid_results = evaluation(model, val_loader, tokenizer, device, config) + + if utils.is_main_process(): + log_stats = {**{f'train_{k}': v for k, v in train_stats.items()}, + 'epoch': epoch, + } + with open(os.path.join(args.output_dir, "log.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + + save_obj = { + 'model': filter_state(model_without_ddp.state_dict(), exclude_list), + 'optimizer': optimizer.state_dict(), + 'lr_scheduler': lr_scheduler.state_dict(), + 'config': config, + 'epoch': epoch, + 'best_valid': best_valid, + 'best_epoch': best_epoch, + } + + if args.save_best: + valid_score = valid_results['Valid/CIDEr'] + + if valid_score > best_valid or epoch == 0: + best_valid = valid_score + best_epoch = epoch + print("Save best epoch:", best_epoch) + + save_obj['best_valid'] = best_valid + save_obj['best_epoch'] = best_epoch + + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_best.pth')) + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_last.pth')) + + + dist.barrier() + + + + + + ### test best model + if not args.evaluate: + checkpoint = torch.load(os.path.join(args.output_dir, 'checkpoint_best.pth'), map_location='cpu') + state_dict = checkpoint['model'] + msg = model.module.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + print('load checkpoint for test from %s'%os.path.join(args.output_dir, 'checkpoint_best.pth')) + print(msg) + vqa_result = evaluation(model, test_loader, tokenizer, device, config) + + + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print('Training time {}'.format(total_time_str)) + + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='./configs/VQA.yaml') + parser.add_argument('--checkpoint', default='') + parser.add_argument('--output_dir', default='output/vqa') + parser.add_argument('--evaluate', action='store_true') + parser.add_argument('--text_model', default='facebook/opt-350m') + parser.add_argument('--vision_model', default='vit_base_patch16_224') + parser.add_argument('--device', default='cuda') + parser.add_argument('--seed', default=42, type=int) + parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes') + parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') + parser.add_argument('--distributed', default=True, type=bool) + + parser.add_argument('--data_dir', default='/data/mshukor/data') + parser.add_argument('--resume', action='store_true') + + parser.add_argument('--save_best', action='store_true') + + parser.add_argument('--image_dir', default='/data/mshukor/data') + + + + args = parser.parse_args() + + config = yaml.load(open(args.config, 'r'), Loader=yaml.Loader) + + args.result_dir = os.path.join(args.output_dir, 'result') + + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + Path(args.result_dir).mkdir(parents=True, exist_ok=True) + + yaml.dump(config, open(os.path.join(args.output_dir, 'config.yaml'), 'w')) + + main(args, config) \ No newline at end of file diff --git a/float32/gqa.py b/float32/gqa.py new file mode 100644 index 0000000000000000000000000000000000000000..3f3028b4b0e6e661ea5562ced1be2290a01b1d86 --- /dev/null +++ b/float32/gqa.py @@ -0,0 +1,565 @@ +import argparse +import os +import ruamel_yaml as yaml +import numpy as np +import random +import time +import datetime +import json +from pathlib import Path + +import torch +import torch.backends.cudnn as cudnn +import torch.distributed as dist + + +from models.epalm import ePALM +from models.utils import freeze_whole_model, unfreeze_parameters, print_trainable_params_percentage + + + +from transformers import AutoTokenizer + + +import utils + + +from dataset.gqa import get_loader + +from scheduler import create_scheduler +from optim import create_optimizer + + + +from tqdm import tqdm + + +from models.utils import filter_state, filter_msg, exclude_list + + +def train(model, data_loader, optimizer, tokenizer, epoch, warmup_steps, device, scheduler, config): + model.train() + + + metric_logger = utils.MetricLogger(delimiter=" ") + metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + metric_logger.add_meter('loss', utils.SmoothedValue(window_size=1, fmt='{value:.4f}')) + + config_optim = utils.AttrDict(config['optimizer']) + prompt_lr = config_optim.prompt_lr if hasattr(config_optim, 'prompt_lr') else None + connector_lr = config_optim.connector_lr if hasattr(config_optim, 'connector_lr') else None + vis_lr = config_optim.vis_lr if hasattr(config_optim, 'vis_lr') else None + text_lr = config_optim.text_lr if hasattr(config_optim, 'text_lr') else None + + print(vis_lr, text_lr, connector_lr, len(optimizer.param_groups)) + if prompt_lr is not None: + metric_logger.add_meter('prompt_lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + + + + header = 'Train Epoch: [{}]'.format(epoch) + print_freq = 50 + step_size = 100 + warmup_iterations = warmup_steps*step_size + lm_loss_weight = config.get('lm_loss_weight', 1) + special_answer_token = config.get('special_answer_token', None) + + special_eo_answer_token = config.get('special_eo_answer_token', None) + + + shift_labels = config.get('shift_labels', False) + + loss_only_on_answers = config.get('loss_only_on_answers', False) + + eos_token = tokenizer.eos_token if special_eo_answer_token is None else special_eo_answer_token + + for i, batch in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + + image = batch['images'].to(device,non_blocking=True) + + question = batch['sent'] + + answer = batch['answers'] + + + questions_answers = [] + + if loss_only_on_answers: + if special_answer_token is not None: + questions_ = [question[i] + "?" for i in range(len(question))] + answers_ = [special_answer_token + answer[i].replace('[SEP]','') + eos_token for i in range(len(question))] + questions_input = tokenizer(questions_, padding='longest', return_tensors="pt").to(device) + answers_input = tokenizer(answers_, padding='longest', return_tensors="pt").to(device) + + questions_targets = torch.ones_like(questions_input.input_ids)*(-100) + answer_targets_ = answers_input.input_ids.masked_fill(answers_input.input_ids == tokenizer.pad_token_id, -100) + + questions_answers_input = questions_input + + # remove bos token from answer + questions_answers_input.input_ids = torch.cat((questions_input.input_ids, answers_input.input_ids[:, 1:]), dim=-1) + questions_answers_input.attention_mask = torch.cat((questions_input.attention_mask, answers_input.attention_mask[:, 1:]), dim=-1) + + answer_targets = torch.cat((questions_targets, answer_targets_[:, 1:]), dim=-1) + else: + raise NotImplementedError + else: + if special_answer_token is not None: + questions_answers += [question[i] + "?" + special_answer_token + answer[i].replace('[SEP]','') + eos_token for i in range(len(question))] + else: + questions_answers += [question[i] + "" + answer[i].replace('[SEP]','') + eos_token for i in range(len(question))] + + questions_answers_input = tokenizer(questions_answers, padding='longest', return_tensors="pt").to(device) + answer_targets = questions_answers_input.input_ids.masked_fill(questions_answers_input.input_ids == tokenizer.pad_token_id, -100) + + images = image + + + + + if shift_labels: + new_target = torch.ones_like(answer_targets)*(-100) + new_target[:, :-1] = answer_targets[:, 1:] # remove sos token from target, equivalent to shifting inputs + answer_targets = new_target + + answer_output = model(image=images, + text=questions_answers_input, + labels = answer_targets, + return_dict = True, + mode='train', + reduction='none', + ) + + loss = answer_output.loss + loss = loss.sum()/image.size(0) + loss = loss*lm_loss_weight + + optimizer.zero_grad() + loss.backward() + optimizer.step() + + metric_logger.update(loss=loss.item()) + metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + if prompt_lr is not None: + metric_logger.update(prompt_lr=optimizer.param_groups[1]["lr"]) + + if i % print_freq == 0: + lrs = [g["lr"] for g in optimizer.param_groups] + print(lrs) + + if epoch==0 and i%step_size==0 and i<=warmup_iterations: + if scheduler is not None: + scheduler.step(i//step_size) + + metric_logger.synchronize_between_processes() + print("Averaged stats:", metric_logger.global_avg()) + return {k: "{:.3f}".format(meter.global_avg) for k, meter in metric_logger.meters.items()} + + + +@torch.no_grad() +def predict(model, loader, tokenizer, device, dump_path=None, verbose=False, distributed=False, special_answer_token=None, special_eo_answer_token=None): + model.eval() + eos_token = tokenizer.eos_token if special_eo_answer_token is None else special_eo_answer_token + pad_token = tokenizer.pad_token + + with torch.no_grad(): + quesid2ans = {} + if verbose: + pbar = tqdm(total=len(loader), ncols=120, desc="Prediction") + for i, batch in enumerate(loader): + + + image = batch['images'].to(device,non_blocking=True) + + question = batch['sent'] + + question_id = batch['question_ids'] + + if special_answer_token is not None: + question = [q+'?'+special_answer_token for q in question] + else: + question = [q+eos_token for q in question] + + question_input = tokenizer(question, padding='longest', return_tensors="pt").to(device) + + out = model(image=image, text=question_input, mode='generate', return_dict=True, max_length=30, do_sample=True) + + + + + for ques_id, o in zip(question_id, out): + o_list = o.tolist() + try: + if special_answer_token is not None: + response = tokenizer.decode(o_list).split(special_answer_token)[1].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True + else: + response = tokenizer.decode(o_list).split('')[2].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True + except TypeError: + print(o_list) + response = ' ' + + ques_id = int(ques_id) + quesid2ans[ques_id] = response + + + if verbose: + pbar.update(1) + if verbose: + pbar.close() + + if distributed: + dist.barrier() + + qid2ans_list = utils.all_gather(quesid2ans) + if verbose: + quesid2ans = {} + for qid2ans in qid2ans_list: + for k, v in qid2ans.items(): + quesid2ans[k] = v + + if dump_path is not None: + evaluator = loader.evaluator + evaluator.dump_result(quesid2ans, dump_path) + + return quesid2ans + + + + +def evaluate(model, data_loader, tokenizer, device, + distributed=False, special_answer_token=None, special_eo_answer_token=None): + verbose = utils.is_main_process() + + quesid2ans = predict(model, data_loader, tokenizer, device, verbose=verbose, + distributed=distributed, special_answer_token=special_answer_token, special_eo_answer_token=special_eo_answer_token) + + evaluator = data_loader.evaluator + + acc_dict = {} + topk_score = evaluator.evaluate(quesid2ans, normalize_answer=True) + acc_dict['topk_score'] = topk_score + return acc_dict + + + + + + + + +def main(args, config): + os.environ['TORCH_HOME'] = os.environ['XDG_CACHE_HOME']+'/torch' + utils.init_distributed_mode(args) + + device = torch.device(args.device) + + # fix the seed for reproducibility + seed = args.seed + utils.get_rank() + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + cudnn.benchmark = True + + start_epoch = 0 + max_epoch = config['schedular']['epochs'] + warmup_steps = config['schedular']['warmup_epochs'] + + + #### Dataset #### + + print("Creating dataset") + + + if args.distributed: + num_tasks = utils.get_world_size() + global_rank = utils.get_rank() + else: + num_tasks = None + global_rank = None + + num_workers = config.get('num_workers', 4) + train_topk = config.get('train_topk', -1) + valid_topk = config.get('valid_topk', -1) + data_dir = args.data_dir + + args.image_size = config.get('image_res', 224) + args.use_data_augmentation = True + + train_split = config.get('train_split', 'train') + val_split = config.get('val_split', 'valid') + test_split = config.get('test_split', 'testdev') + + train_loader = get_loader( + args, + split=train_split, mode='train', batch_size=config['batch_size_train'], + distributed=args.distributed, + workers=num_workers, + topk=train_topk, + data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + + args.raw_label = False + print('# len train loader:', len(train_loader)) + print(f'Building val loader') + val_loader = get_loader( + args, + split=val_split, mode='val', batch_size=config['batch_size_test'], + distributed=args.distributed, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + print('# len val loader:', len(val_loader)) + + print(f'Building test loader') + test_loader = get_loader( + args, + split=test_split, mode='val', batch_size=config['batch_size_test'], + distributed=args.distributed, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + + + print('# len test loader:', len(test_loader)) + + + if args.submit: + print(f'Building test submit loader ...') + submit_test_loader = get_loader( + args, + split='test', mode='val', batch_size=config['batch_size_test'], + distributed=args.distributed, gpu=args.gpu, + workers=4, + topk=valid_topk, data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + + #### Model #### + print("Creating model") + + start_layer_idx = config.get('start_layer_idx', 0) + end_layer_idx = config.get('end_layer_idx', 0) + + + model = ePALM(opt_model_name = args.text_model, + vision_model_name = args.vision_model, + use_vis_prefix = True, + start_layer_idx = start_layer_idx, + end_layer_idx = end_layer_idx, + return_hidden_state_vision = True, + config=config, + ) + + + model = model.to(device) + + + tokenizer_name = config.get('tokenizer_name', args.text_model) + + tokenizer = AutoTokenizer.from_pretrained(tokenizer_name, use_fast=False, local_files_only=True) + + + + special_answer_token = config.get('special_answer_token', None) + special_eo_answer_token = config.get('special_eo_answer_token', None) + + + if special_answer_token is not None: + special_tokens_dict = {'additional_special_tokens': [special_answer_token]} + if special_eo_answer_token is not None: + special_tokens_dict['additional_special_tokens'] += [special_eo_answer_token] + + tokenizer.add_special_tokens(special_tokens_dict) + print("Adding special token:", special_tokens_dict) + print(tokenizer) + + arg_opt = utils.AttrDict(config['optimizer']) + optimizer = create_optimizer(arg_opt, model, config=config['optimizer']) + + if hasattr(arg_opt, 'prompt_lr') and arg_opt.prompt_lr is not None: + print('\tInitial other params params lr: %f' % optimizer.param_groups[0]['lr']) + print('\tInitial prompt params lr: %f' % optimizer.param_groups[1]['lr']) + + arg_sche = utils.AttrDict(config['schedular']) + lr_scheduler, _ = create_scheduler(arg_sche, optimizer) + + + if args.checkpoint: + + checkpoint = torch.load(args.checkpoint, map_location='cpu') + state_dict = checkpoint['model'] + msg = model.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + print('load checkpoint from %s'%args.checkpoint) + print(msg) + + if args.resume: + model = model.to(device) + optimizer.load_state_dict(checkpoint['optimizer']) + lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) + start_epoch = checkpoint['epoch']+1 + print(checkpoint.keys()) + if 'best_valid' in checkpoint: + best_valid = checkpoint['best_valid'] + best_epoch = checkpoint['best_epoch'] + print("load best valid {} at epoch {}".format(best_valid, best_epoch)) + + freeze_whole_model(model) + unfreeze_parameters(model, config) + print_trainable_params_percentage(model) + + model_without_ddp = model + if args.distributed: + model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) + model_without_ddp = model.module + + + print("Start training") + start_time = time.time() + + best_valid = 0. + best_epoch = 0 + + for epoch in range(start_epoch, max_epoch): + if epoch>0: + if lr_scheduler is not None: + lr_scheduler.step(epoch+warmup_steps) + + if not args.evaluate: + if args.distributed: + train_loader.sampler.set_epoch(epoch) + + train_stats = train(model, train_loader, optimizer, tokenizer, epoch, warmup_steps, device, lr_scheduler, config) + + if args.evaluate: + break + + score_dict = evaluate(model, val_loader, tokenizer, device, distributed=args.distributed, + special_answer_token=special_answer_token, special_eo_answer_token=special_eo_answer_token) + print(score_dict) + if utils.is_main_process(): + log_stats = {**{f'train_{k}': v for k, v in train_stats.items()}, + 'epoch': epoch, + } + with open(os.path.join(args.output_dir, "log.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + + if lr_scheduler is None: + lr_scheduler_state_dict = {} + else: + lr_scheduler_state_dict = lr_scheduler.state_dict() + save_obj = { + 'model': filter_state(model_without_ddp.state_dict(), exclude_list), + 'optimizer': optimizer.state_dict(), + 'lr_scheduler': lr_scheduler_state_dict, + 'config': config, + 'epoch': epoch, + 'best_valid': best_valid, + 'best_epoch': best_epoch, + } + + if args.save_best: + valid_score = score_dict['topk_score'] * 100. + if valid_score > best_valid or epoch == 0: + best_valid = valid_score + best_epoch = epoch + + save_obj['best_valid'] = best_valid + save_obj['best_epoch'] = best_epoch + + print("save best epoch:", best_epoch) + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_best.pth')) + + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_last.pth')) + + dist.barrier() + + if lr_scheduler is None: + lr_scheduler_state_dict = {} + else: + lr_scheduler_state_dict = lr_scheduler.state_dict() + save_obj = { + 'model': filter_state(model_without_ddp.state_dict(), exclude_list), + 'optimizer': optimizer.state_dict(), + 'lr_scheduler': lr_scheduler_state_dict, + 'config': config, + 'epoch': epoch, + 'best_valid': best_valid, + 'best_epoch': best_epoch, + } + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_last.pth')) + + verbose = utils.is_main_process() + + + if not args.evaluate: + checkpoint = torch.load(os.path.join(args.output_dir, 'checkpoint_best.pth'), map_location='cpu') + state_dict = checkpoint['model'] + msg = model.module.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + print('load checkpoint for test from %s'%os.path.join(args.output_dir, 'checkpoint_best.pth')) + print(msg) + + quesid2ans = predict(model, test_loader, tokenizer, device, verbose=verbose, + distributed=args.distributed, special_answer_token=special_answer_token, special_eo_answer_token=special_eo_answer_token) + + evaluator = test_loader.evaluator + score_dict = evaluator.evaluate(quesid2ans, normalize_answer=True) + + print("Test accuracy:", score_dict) + + if args.submit: + dump_path = os.path.join(args.output_dir, 'submit.json') + predict(submit_test_loader, dump_path) + + if args.distributed: + dist.barrier() + exit() + + + + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print('Training time {}'.format(total_time_str)) + + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='./configs/VQA.yaml') + parser.add_argument('--checkpoint', default='') + parser.add_argument('--output_dir', default='output/vqa') + parser.add_argument('--evaluate', action='store_true') + parser.add_argument('--text_model', default='facebook/opt-350m') + parser.add_argument('--vision_model', default='vit_base_patch16_224') + parser.add_argument('--device', default='cuda') + parser.add_argument('--seed', default=42, type=int) + parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes') + parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') + parser.add_argument('--distributed', default=True, type=bool) + + parser.add_argument('--data_dir', default='/data/mshukor/data') + parser.add_argument('--resume', action='store_true') + + parser.add_argument('--submit', action='store_true') + parser.add_argument('--save_best', action='store_true') + + + + args = parser.parse_args() + + config = yaml.load(open(args.config, 'r'), Loader=yaml.Loader) + + args.result_dir = os.path.join(args.output_dir, 'result') + + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + Path(args.result_dir).mkdir(parents=True, exist_ok=True) + + yaml.dump(config, open(os.path.join(args.output_dir, 'config.yaml'), 'w')) + + main(args, config) \ No newline at end of file diff --git a/float32/video_caption.py b/float32/video_caption.py new file mode 100644 index 0000000000000000000000000000000000000000..ed2fc63468bc0e25e1a21efd8731701e45f43dbf --- /dev/null +++ b/float32/video_caption.py @@ -0,0 +1,471 @@ +import argparse +import os +import ruamel_yaml as yaml +import numpy as np +import random +import time +import datetime +import json +from pathlib import Path + +import torch +import torch.backends.cudnn as cudnn +import torch.distributed as dist + + +from models.epalm import ePALM +from models.utils import freeze_whole_model, unfreeze_parameters, print_trainable_params_percentage + + + +from transformers import AutoTokenizer + + +import utils + + + +from dataset.video_caption import get_loader + +from scheduler import create_scheduler +from optim import create_optimizer + + + + + +from models.utils import filter_state, filter_msg, exclude_list + + + +def train(model, data_loader, optimizer, tokenizer, epoch, warmup_steps, device, scheduler, config): + # train + model.train() + + metric_logger = utils.MetricLogger(delimiter=" ") + metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + metric_logger.add_meter('loss', utils.SmoothedValue(window_size=1, fmt='{value:.4f}')) + + header = 'Train Epoch: [{}]'.format(epoch) + print_freq = 50 + step_size = 100 + warmup_iterations = warmup_steps*step_size + lm_loss_weight = config.get('lm_loss_weight', 1) + append_eos_token = config.get('append_eos_token', False) + eos_token = tokenizer.eos_token + + config_optim = utils.AttrDict(config['optimizer']) + prompt_lr = config_optim.prompt_lr if hasattr(config_optim, 'prompt_lr') else None + + task_prompt = config.get('task_prompt', None) + + if prompt_lr is not None: + metric_logger.add_meter('prompt_lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + + + for i, batch in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + image = batch["images"].to(device,non_blocking=True) + + text = batch["sent"] + + if append_eos_token: + text = [t.replace(eos_token, '') + eos_token for t in text] + + if task_prompt is not None: + text = [task_prompt + ' ' + t for t in text] + + + + text_input = tokenizer(text, padding='longest', return_tensors="pt").to(device) + + + targets = text_input.input_ids.masked_fill(text_input.input_ids == tokenizer.pad_token_id, -100) + + + answer_output = model(image=image, + text=text_input, + labels = targets, + return_dict = True, + mode='train', + reduction='none', + ) + + loss = answer_output.loss + loss = loss.sum()/image.size(0) + loss = loss*lm_loss_weight + + optimizer.zero_grad() + loss.backward() + optimizer.step() + + metric_logger.update(loss=loss.item()) + metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + if prompt_lr is not None: + metric_logger.update(prompt_lr=optimizer.param_groups[1]["lr"]) + if epoch==0 and i%step_size==0 and i<=warmup_iterations: + scheduler.step(i//step_size) + + + # gather the stats from all processes + metric_logger.synchronize_between_processes() + print("Averaged stats:", metric_logger.global_avg()) + return {k: "{:.3f}".format(meter.global_avg) for k, meter in metric_logger.meters.items()} + + + + +@torch.no_grad() +def evaluation(model, data_loader, tokenizer, device, config, max_length=30, nlgeval=None): + model.eval() + + metric_logger = utils.MetricLogger(delimiter=" ") + header = 'Generate Caption test result:' + print_freq = 50 + + + predictions = [] + targets = [] + + + task_prompt = config.get('task_prompt', None) + + pad_token = tokenizer.pad_token + eos_token = tokenizer.eos_token + + for n, batch in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + image = batch["images"].to(device,non_blocking=True) + text = ['' for q in image] + if task_prompt is not None: + text = [task_prompt + ' ' + t for t in text] + text_input = tokenizer(text, padding='longest', return_tensors="pt").to(device) + + out = model(image=image, text=text_input, mode='generate', return_dict=True, max_length=max_length, do_sample=True) + out_decode = [] + for i, o in enumerate(out): + try: + + res = tokenizer.decode(o) + response = res.split('')[1].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True + except TypeError: + print(o) + response = ' ' + + if task_prompt is not None: + response = response.replace(task_prompt, '') + out_decode.append(response) + + + predictions.extend(out_decode) + + if 'targets' in batch: + targets.extend(batch['targets']) + + + evaluator = data_loader.evaluator + eval_results = evaluator.evaluate(predictions, targets) + + + wandb_log_dict = {} + + for score_name, score in eval_results.items(): + wandb_log_dict[f'Valid/{score_name}'] = score + + + print(wandb_log_dict) + + + return wandb_log_dict + + + +def main(args, config): + + utils.init_distributed_mode(args) + + device = torch.device(args.device) + + # fix the seed for reproducibility + seed = args.seed + utils.get_rank() + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + cudnn.benchmark = True + + start_epoch = 0 + max_epoch = config['schedular']['epochs'] + warmup_steps = config['schedular']['warmup_epochs'] + + print(args, config) + + + tokenizer = AutoTokenizer.from_pretrained(args.text_model, use_fast=False, local_files_only=True) + + special_answer_token = config.get('special_answer_token', None) + special_eo_answer_token = config.get('special_eo_answer_token', None) + + + if special_answer_token is not None: + special_tokens_dict = {'additional_special_tokens': [special_answer_token]} + if special_eo_answer_token is not None: + special_tokens_dict['additional_special_tokens'] += [special_eo_answer_token] + + tokenizer.add_special_tokens(special_tokens_dict) + print("Adding special token:", special_tokens_dict) + print(tokenizer) + + + + if args.distributed: + num_tasks = utils.get_world_size() + global_rank = utils.get_rank() + else: + num_tasks = None + global_rank = None + + + ######### + max_length = args.max_gen_length + + num_workers = config.get('num_workers', 4) + train_topk = config.get('train_topk', -1) + valid_topk = config.get('valid_topk', -1) + data_dir = args.data_dir + + args.image_size = config.get('image_res', 224) + args.use_data_augmentation = True + + black_image = config.get('black_image', False) + + print("black image:", black_image) + + + + # video + args.num_frames = config.get('num_frames', 4) + args.as_images = config.get('as_images', True) + args.num_tries = config.get('num_tries', 1) + args.sample_type = config.get('sample_type', 'rand') + + + + train_split = config.get('train_split', 'train') + val_split = config.get('val_split', 'val') + test_split = config.get('test_split', 'test') + + + train_loader = get_loader( + args, + split=train_split, mode='train', batch_size=config['batch_size_train'], + distributed=args.distributed, + workers=num_workers, + topk=train_topk, + data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True, black_image=black_image + ) + + print('# len train loader:', len(train_loader)) + print(f'Building val loader') + val_loader = get_loader( + args, + split=val_split, mode='val', batch_size=config['batch_size_test'], + distributed=False, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True, black_image=black_image + ) + print('# len val loader:', len(val_loader)) + + print(f'Building test loader') + test_loader = get_loader( + args, + split=test_split, mode='val', batch_size=config['batch_size_test'], + distributed=False, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + + + print('# len test loader:', len(test_loader)) + + #### Model #### + print("Creating model") + + start_layer_idx = config.get('start_layer_idx', 0) + end_layer_idx = config.get('end_layer_idx', 0) + + vision_model_name = config.get('vision_model_name', args.vision_model) + + model = ePALM(opt_model_name = args.text_model, + vision_model_name = vision_model_name, + use_vis_prefix = True, + start_layer_idx = start_layer_idx, + end_layer_idx = end_layer_idx, + return_hidden_state_vision = True, + config=config, + ) + + + model = model.to(device) + + arg_opt = utils.AttrDict(config['optimizer']) + optimizer = create_optimizer(arg_opt, model, config=config) + + if hasattr(arg_opt, 'prompt_lr') and arg_opt.prompt_lr is not None: + print('\tInitial other params params lr: %f' % optimizer.param_groups[0]['lr']) + print('\tInitial prompt params lr: %f' % optimizer.param_groups[1]['lr']) + + arg_sche = utils.AttrDict(config['schedular']) + lr_scheduler, _ = create_scheduler(arg_sche, optimizer) + + best_epoch = 0 + best_valid = 0 + + + nlgeval = None + + if args.checkpoint: + + checkpoint = torch.load(args.checkpoint, map_location='cpu') + state_dict = checkpoint['model'] + msg = model.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + print('load checkpoint from %s'%args.checkpoint) + print(msg) + if 'best_valid' in checkpoint: + print("load best valid {} at epoch {}".format(checkpoint['best_valid'] , checkpoint['best_epoch'] )) + + if args.resume: + model = model.to(device) + optimizer.load_state_dict(checkpoint['optimizer']) + lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) + start_epoch = checkpoint['epoch']+1 + print(checkpoint.keys()) + if 'best_valid' in checkpoint: + best_valid = checkpoint['best_valid'] + best_epoch = checkpoint['best_epoch'] + print("load best valid {} at epoch {}".format(best_valid, best_epoch)) + + + freeze_whole_model(model) + unfreeze_parameters(model, config) + print_trainable_params_percentage(model) + + model_without_ddp = model + if args.distributed: + model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) + model_without_ddp = model.module + + + print("Start training") + start_time = time.time() + + + for epoch in range(start_epoch, max_epoch): + if epoch>0: + lr_scheduler.step(epoch+warmup_steps) + + if not args.evaluate: + if args.distributed: + train_loader.sampler.set_epoch(epoch) + + train_stats = train(model, train_loader, optimizer, tokenizer, epoch, warmup_steps, device, lr_scheduler, config) + + if args.evaluate: + break + + + valid_results = evaluation(model, val_loader, tokenizer, device, config, max_length=max_length, nlgeval=nlgeval) + + if utils.is_main_process(): + log_stats = {**{f'train_{k}': v for k, v in train_stats.items()}, + 'epoch': epoch, + } + with open(os.path.join(args.output_dir, "log.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + + save_obj = { + 'model': filter_state(model_without_ddp.state_dict(), exclude_list), + 'optimizer': optimizer.state_dict(), + 'lr_scheduler': lr_scheduler.state_dict(), + 'config': config, + 'epoch': epoch, + 'best_valid': best_valid, + 'best_epoch': best_epoch, + } + + if args.save_best: + valid_score = valid_results['Valid/CIDEr'] + + if valid_score > best_valid or epoch == 0: + best_valid = valid_score + best_epoch = epoch + print("Save best epoch:", best_epoch) + + save_obj['best_valid'] = best_valid + save_obj['best_epoch'] = best_epoch + + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_best.pth')) + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_last.pth')) + + + dist.barrier() + + + + + + if not args.evaluate: + checkpoint = torch.load(os.path.join(args.output_dir, 'checkpoint_best.pth'), map_location='cpu') + state_dict = checkpoint['model'] + msg = model.module.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + print('load checkpoint for test from %s'%args.checkpoint) + print(msg) + vqa_result = evaluation(model, test_loader, tokenizer, device, config, max_length=max_length, nlgeval=nlgeval) + + + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print('Training time {}'.format(total_time_str)) + + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='./configs/VQA.yaml') + parser.add_argument('--checkpoint', default='') + parser.add_argument('--output_dir', default='output/vqa') + parser.add_argument('--evaluate', action='store_true') + parser.add_argument('--text_model', default='facebook/opt-350m') + parser.add_argument('--vision_model', default='vit_base_patch16_224') + parser.add_argument('--device', default='cuda') + parser.add_argument('--seed', default=42, type=int) + parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes') + parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') + parser.add_argument('--distributed', default=True, type=bool) + + parser.add_argument('--data_dir', default='/data/mshukor/data') + parser.add_argument('--resume', action='store_true') + + parser.add_argument('--save_best', action='store_true') + + parser.add_argument('--image_dir', default='/data/mshukor/data') + parser.add_argument('--max_gen_length', default=30, type=int, help='max_gen_length') + + + + args = parser.parse_args() + + config = yaml.load(open(args.config, 'r'), Loader=yaml.Loader) + + args.result_dir = os.path.join(args.output_dir, 'result') + + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + Path(args.result_dir).mkdir(parents=True, exist_ok=True) + + yaml.dump(config, open(os.path.join(args.output_dir, 'config.yaml'), 'w')) + + main(args, config) \ No newline at end of file diff --git a/float32/video_vqa.py b/float32/video_vqa.py new file mode 100644 index 0000000000000000000000000000000000000000..ddff62db513f5c4427c2637aa0e0b4b5a8b9e930 --- /dev/null +++ b/float32/video_vqa.py @@ -0,0 +1,555 @@ +import argparse +import os +import ruamel_yaml as yaml +import numpy as np +import random +import time +import datetime +import json +from pathlib import Path + +import torch +import torch.backends.cudnn as cudnn +import torch.distributed as dist + + +from models.epalm import ePALM +from models.utils import freeze_whole_model, unfreeze_parameters, print_trainable_params_percentage + + + +from transformers import AutoTokenizer + + +import utils + +from dataset.video_vqa import get_loader + +from scheduler import create_scheduler +from optim import create_optimizer + + + +from tqdm import tqdm + + +from models.utils import filter_state, filter_msg, exclude_list + + +def train(model, data_loader, optimizer, tokenizer, epoch, warmup_steps, device, scheduler, config): + model.train() + + + metric_logger = utils.MetricLogger(delimiter=" ") + metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + metric_logger.add_meter('loss', utils.SmoothedValue(window_size=1, fmt='{value:.4f}')) + + config_optim = utils.AttrDict(config['optimizer']) + prompt_lr = config_optim.prompt_lr if hasattr(config_optim, 'prompt_lr') else None + connector_lr = config_optim.connector_lr if hasattr(config_optim, 'connector_lr') else None + vis_lr = config_optim.vis_lr if hasattr(config_optim, 'vis_lr') else None + text_lr = config_optim.text_lr if hasattr(config_optim, 'text_lr') else None + + print(vis_lr, text_lr, connector_lr, len(optimizer.param_groups)) + if prompt_lr is not None: + metric_logger.add_meter('prompt_lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + + + + header = 'Train Epoch: [{}]'.format(epoch) + print_freq = 50 + step_size = 100 + warmup_iterations = warmup_steps*step_size + lm_loss_weight = config.get('lm_loss_weight', 1) + special_answer_token = config.get('special_answer_token', None) + + special_eo_answer_token = config.get('special_eo_answer_token', None) + + + + + eos_token = tokenizer.eos_token if special_eo_answer_token is None else special_eo_answer_token + + for i, batch in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + image = batch['images'].to(device,non_blocking=True) + + question = batch['sent'] + + answer = batch['answers'] + + + questions_answers = [] + + + if special_answer_token is not None: + questions_answers += [question[i] + "?" + special_answer_token + answer[i].replace('[SEP]','') + eos_token for i in range(len(question))] + else: + questions_answers += [question[i] + "" + answer[i].replace('[SEP]','') + eos_token for i in range(len(question))] + + questions_answers_input = tokenizer(questions_answers, padding='longest', return_tensors="pt").to(device) + answer_targets = questions_answers_input.input_ids.masked_fill(questions_answers_input.input_ids == tokenizer.pad_token_id, -100) + + images = image + + + + answer_output = model(image=images, + text=questions_answers_input, + labels = answer_targets, + return_dict = True, + mode='train', + reduction='none', + ) + + loss = answer_output.loss + loss = loss.sum()/image.size(0) + loss = loss*lm_loss_weight + + optimizer.zero_grad() + loss.backward() + optimizer.step() + + metric_logger.update(loss=loss.item()) + metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + if prompt_lr is not None: + metric_logger.update(prompt_lr=optimizer.param_groups[1]["lr"]) + + if i % print_freq == 0: + lrs = [g["lr"] for g in optimizer.param_groups] + print(lrs) + + if epoch==0 and i%step_size==0 and i<=warmup_iterations: + if scheduler is not None: + scheduler.step(i//step_size) + + metric_logger.synchronize_between_processes() + print("Averaged stats:", metric_logger.global_avg()) + return {k: "{:.3f}".format(meter.global_avg) for k, meter in metric_logger.meters.items()} + + + +@torch.no_grad() +def predict(model, loader, tokenizer, device, dump_path=None, verbose=False, distributed=False, special_answer_token=None, special_eo_answer_token=None): + model.eval() + eos_token = tokenizer.eos_token if special_eo_answer_token is None else special_eo_answer_token + pad_token = tokenizer.pad_token + + with torch.no_grad(): + quesid2ans = {} + if verbose: + pbar = tqdm(total=len(loader), ncols=120, desc="Prediction") + for i, batch in enumerate(loader): + + + image = batch['images'].to(device,non_blocking=True) + + question = batch['sent'] + + question_id = batch['question_ids'] + + if special_answer_token is not None: + question = [q+'?'+special_answer_token for q in question] + else: + question = [q+eos_token for q in question] + + question_input = tokenizer(question, padding='longest', return_tensors="pt").to(device) + + out = model(image=image, text=question_input, mode='generate', return_dict=True, max_length=30, do_sample=True) + + + + + for ques_id, o in zip(question_id, out): + o_list = o.tolist() + try: + if special_answer_token is not None: + response = tokenizer.decode(o_list).split(special_answer_token)[1].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True + else: + response = tokenizer.decode(o_list).split('')[2].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True + except TypeError: + print(o_list) + response = ' ' + + ques_id = ques_id + quesid2ans[ques_id] = response + + + if verbose: + pbar.update(1) + if verbose: + pbar.close() + + if distributed: + dist.barrier() + + qid2ans_list = utils.all_gather(quesid2ans) + if verbose: + quesid2ans = {} + for qid2ans in qid2ans_list: + for k, v in qid2ans.items(): + quesid2ans[k] = v + + if dump_path is not None: + evaluator = loader.evaluator + evaluator.dump_result(quesid2ans, dump_path) + + return quesid2ans + + + + +def evaluate(model, data_loader, tokenizer, device, + distributed=False, special_answer_token=None, special_eo_answer_token=None): + verbose = utils.is_main_process() + + + quesid2ans = predict(model, data_loader, tokenizer, device, verbose=verbose, + distributed=distributed, special_answer_token=special_answer_token, special_eo_answer_token=special_eo_answer_token) + + evaluator = data_loader.evaluator + + acc_dict = {} + topk_score = evaluator.evaluate(quesid2ans, normalize_answer=True) + acc_dict['topk_score'] = topk_score + return acc_dict + + + + + + + + +def main(args, config): + + os.environ['TORCH_HOME'] = os.environ['XDG_CACHE_HOME']+'/torch' + + utils.init_distributed_mode(args) + + device = torch.device(args.device) + + # fix the seed for reproducibility + seed = args.seed + utils.get_rank() + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + cudnn.benchmark = True + + start_epoch = 0 + max_epoch = config['schedular']['epochs'] + warmup_steps = config['schedular']['warmup_epochs'] + + + #### Dataset #### + + print("Creating dataset") + + if args.distributed: + num_tasks = utils.get_world_size() + global_rank = utils.get_rank() + else: + num_tasks = None + global_rank = None + + num_workers = config.get('num_workers', 4) + train_topk = config.get('train_topk', -1) + valid_topk = config.get('valid_topk', -1) + data_dir = args.data_dir + + args.image_size = config.get('image_res', 224) + args.use_data_augmentation = True + + + # video + args.num_frames = config.get('num_frames', 4) + args.as_images = config.get('as_images', True) + args.num_tries = config.get('num_tries', 1) + args.sample_type = config.get('sample_type', 'rand') + + train_split = config.get('train_split', 'train') + val_split = config.get('val_split', 'val') + test_split = config.get('test_split', 'test') + + + train_loader = get_loader( + args, + split=train_split, mode='train', batch_size=config['batch_size_train'], + distributed=args.distributed, + workers=num_workers, + topk=train_topk, + data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + + args.raw_label = False + print('# len train loader:', len(train_loader)) + print(f'Building val loader') + val_loader = get_loader( + args, + split=val_split, mode='val', batch_size=config['batch_size_test'], + distributed=args.distributed, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + print('# len val loader:', len(val_loader)) + + print(f'Building test loader') + test_loader = get_loader( + args, + split=test_split, mode='val', batch_size=config['batch_size_test'], + distributed=args.distributed, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + + + print('# len test loader:', len(test_loader)) + + + if args.submit: + print(f'Building test submit loader ...') + submit_test_loader = get_loader( + args, + split='test', mode='val', batch_size=config['batch_size_test'], + distributed=args.distributed, gpu=args.gpu, + workers=4, + topk=valid_topk, data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, verbose=True + ) + + #### Model #### + print("Creating model") + + start_layer_idx = config.get('start_layer_idx', 0) + end_layer_idx = config.get('end_layer_idx', 0) + + + vision_model_name = config.get('vision_model_name', args.vision_model) + + model = ePALM(opt_model_name = args.text_model, + vision_model_name = vision_model_name, + use_vis_prefix = True, + start_layer_idx = start_layer_idx, + end_layer_idx = end_layer_idx, + return_hidden_state_vision = True, + config=config, + ) + + + model = model.to(device) + + + tokenizer_name = config.get('tokenizer_name', args.text_model) + + tokenizer = AutoTokenizer.from_pretrained(tokenizer_name, use_fast=False, local_files_only=True) + + + + special_answer_token = config.get('special_answer_token', None) + special_eo_answer_token = config.get('special_eo_answer_token', None) + + + if special_answer_token is not None: + special_tokens_dict = {'additional_special_tokens': [special_answer_token]} + if special_eo_answer_token is not None: + special_tokens_dict['additional_special_tokens'] += [special_eo_answer_token] + + tokenizer.add_special_tokens(special_tokens_dict) + print("Adding special token:", special_tokens_dict) + print(tokenizer) + + arg_opt = utils.AttrDict(config['optimizer']) + optimizer = create_optimizer(arg_opt, model, config=config['optimizer']) + + if hasattr(arg_opt, 'prompt_lr') and arg_opt.prompt_lr is not None: + print('\tInitial other params params lr: %f' % optimizer.param_groups[0]['lr']) + print('\tInitial prompt params lr: %f' % optimizer.param_groups[1]['lr']) + + arg_sche = utils.AttrDict(config['schedular']) + lr_scheduler, _ = create_scheduler(arg_sche, optimizer) + + + if args.checkpoint: + + checkpoint = torch.load(args.checkpoint, map_location='cpu') + state_dict = checkpoint['model'] + + + msg = model.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + print('load checkpoint from %s'%args.checkpoint) + print(msg) + + if 'best_valid' in checkpoint: + print("load best valid {} at epoch {}".format(checkpoint['best_valid'] , checkpoint['best_epoch'] )) + + if args.resume: + model = model.to(device) + optimizer.load_state_dict(checkpoint['optimizer']) + lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) + start_epoch = checkpoint['epoch']+1 + print(checkpoint.keys()) + if 'best_valid' in checkpoint: + best_valid = checkpoint['best_valid'] + best_epoch = checkpoint['best_epoch'] + print("load best valid {} at epoch {}".format(best_valid, best_epoch)) + + freeze_whole_model(model) + unfreeze_parameters(model, config) + print_trainable_params_percentage(model) + + model_without_ddp = model + if args.distributed: + model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) + model_without_ddp = model.module + + + print("Start training") + start_time = time.time() + + best_valid = 0. + best_epoch = 0 + + for epoch in range(start_epoch, max_epoch): + if epoch>0: + if lr_scheduler is not None: + lr_scheduler.step(epoch+warmup_steps) + + if not args.evaluate: + if args.distributed: + train_loader.sampler.set_epoch(epoch) + + train_stats = train(model, train_loader, optimizer, tokenizer, epoch, warmup_steps, device, lr_scheduler, config) + + if args.evaluate: + break + + score_dict = evaluate(model, val_loader, tokenizer, device, distributed=args.distributed, + special_answer_token=special_answer_token, special_eo_answer_token=special_eo_answer_token) + print(score_dict) + if utils.is_main_process(): + log_stats = {**{f'train_{k}': v for k, v in train_stats.items()}, + 'epoch': epoch, + } + with open(os.path.join(args.output_dir, "log.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + + if lr_scheduler is None: + lr_scheduler_state_dict = {} + else: + lr_scheduler_state_dict = lr_scheduler.state_dict() + save_obj = { + 'model': filter_state(model_without_ddp.state_dict(), exclude_list), + 'optimizer': optimizer.state_dict(), + 'lr_scheduler': lr_scheduler_state_dict, + 'config': config, + 'epoch': epoch, + 'best_valid': best_valid, + 'best_epoch': best_epoch, + } + + if args.save_best: + valid_score = score_dict['topk_score'] * 100. + if valid_score > best_valid or epoch == 0: + best_valid = valid_score + best_epoch = epoch + + save_obj['best_valid'] = best_valid + save_obj['best_epoch'] = best_epoch + + print("save best epoch:", best_epoch) + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_best.pth')) + + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_last.pth')) + + dist.barrier() + + if lr_scheduler is None: + lr_scheduler_state_dict = {} + else: + lr_scheduler_state_dict = lr_scheduler.state_dict() + save_obj = { + 'model': filter_state(model_without_ddp.state_dict(), exclude_list), + 'optimizer': optimizer.state_dict(), + 'lr_scheduler': lr_scheduler_state_dict, + 'config': config, + 'epoch': epoch, + 'best_valid': best_valid, + 'best_epoch': best_epoch, + } + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_last.pth')) + + verbose = utils.is_main_process() + + + if not args.evaluate: + checkpoint = torch.load(os.path.join(args.output_dir, 'checkpoint_best.pth'), map_location='cpu') + state_dict = checkpoint['model'] + msg = model.module.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + print('load checkpoint for test from %s'%os.path.join(args.output_dir, 'checkpoint_best.pth')) + print(msg) + + quesid2ans = predict(model, test_loader, tokenizer, device, verbose=verbose, + distributed=args.distributed, special_answer_token=special_answer_token, special_eo_answer_token=special_eo_answer_token) + + evaluator = test_loader.evaluator + score_dict = evaluator.evaluate(quesid2ans, normalize_answer=True) + + print("Test accuracy:", score_dict) + + + if args.submit: + dump_path = os.path.join(args.output_dir, 'submit.json') + predict(submit_test_loader, dump_path) + + + if args.distributed: + dist.barrier() + exit() + + + + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print('Training time {}'.format(total_time_str)) + + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='./configs/VQA.yaml') + parser.add_argument('--checkpoint', default='') + parser.add_argument('--output_dir', default='output/vqa') + parser.add_argument('--evaluate', action='store_true') + parser.add_argument('--text_model', default='facebook/opt-350m') + parser.add_argument('--vision_model', default='vit_base_patch16_224') + parser.add_argument('--device', default='cuda') + parser.add_argument('--seed', default=42, type=int) + parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes') + parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') + parser.add_argument('--distributed', default=True, type=bool) + + parser.add_argument('--data_dir', default='/data/mshukor/data') + parser.add_argument('--resume', action='store_true') + + parser.add_argument('--submit', action='store_true') + parser.add_argument('--save_best', action='store_true') + + + + args = parser.parse_args() + + config = yaml.load(open(args.config, 'r'), Loader=yaml.Loader) + + args.result_dir = os.path.join(args.output_dir, 'result') + + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + Path(args.result_dir).mkdir(parents=True, exist_ok=True) + + yaml.dump(config, open(os.path.join(args.output_dir, 'config.yaml'), 'w')) + + main(args, config) \ No newline at end of file diff --git a/float32/vqa.py b/float32/vqa.py new file mode 100644 index 0000000000000000000000000000000000000000..e930ca49d242232e1ef0c9d7b2800213c9abe7c4 --- /dev/null +++ b/float32/vqa.py @@ -0,0 +1,577 @@ +import argparse +import os +import ruamel_yaml as yaml +import numpy as np +import random +import time +import datetime +import json +from pathlib import Path + +import torch +import torch.backends.cudnn as cudnn +import torch.distributed as dist + + +from models.epalm import ePALM +from models.utils import freeze_whole_model, unfreeze_parameters, print_trainable_params_percentage + +from models.utils import filter_state, filter_msg, exclude_list + + + +from transformers import AutoTokenizer + + +import utils + + +from dataset.vqa import get_loader + +from scheduler import create_scheduler +from optim import create_optimizer + + +from models.gptj_neo import get_tokenizer + +from tqdm import tqdm + + + +import re + +def train(model, data_loader, optimizer, tokenizer, epoch, warmup_steps, device, scheduler, config): + model.train() + + + metric_logger = utils.MetricLogger(delimiter=" ") + metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + metric_logger.add_meter('loss', utils.SmoothedValue(window_size=1, fmt='{value:.4f}')) + + config_optim = utils.AttrDict(config['optimizer']) + prompt_lr = config_optim.prompt_lr if hasattr(config_optim, 'prompt_lr') else None + connector_lr = config_optim.connector_lr if hasattr(config_optim, 'connector_lr') else None + vis_lr = config_optim.vis_lr if hasattr(config_optim, 'vis_lr') else None + text_lr = config_optim.text_lr if hasattr(config_optim, 'text_lr') else None + + print(vis_lr, text_lr, connector_lr, len(optimizer.param_groups)) + if prompt_lr is not None: + metric_logger.add_meter('prompt_lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + + + + header = 'Train Epoch: [{}]'.format(epoch) + print_freq = 50 + step_size = 100 + warmup_iterations = warmup_steps*step_size + lm_loss_weight = config.get('lm_loss_weight', 1) + special_answer_token = config.get('special_answer_token', None) + + special_eo_answer_token = config.get('special_eo_answer_token', None) + + + + eos_token = tokenizer.eos_token if special_eo_answer_token is None else special_eo_answer_token + + for i, batch in enumerate(metric_logger.log_every(data_loader, print_freq, header)): + + + image = batch['images'].to(device,non_blocking=True) + + question = batch['sent'] + + answer = batch['answers'] + + + questions_answers = [] + + + if special_answer_token is not None: + questions_answers += [question[i] + "?" + special_answer_token + answer[i].replace('[SEP]','') + eos_token for i in range(len(question))] + else: + questions_answers += [question[i] + "" + answer[i].replace('[SEP]','') + eos_token for i in range(len(question))] + + questions_answers_input = tokenizer(questions_answers, padding='longest', return_tensors="pt").to(device) + answer_targets = questions_answers_input.input_ids.masked_fill(questions_answers_input.input_ids == tokenizer.pad_token_id, -100) + + + + + + + + answer_output = model(image=image, + text=questions_answers_input, + labels = answer_targets, + return_dict = True, + mode='train', + reduction='none', + ) + + + + loss = answer_output.loss + loss = loss.sum()/image.size(0) + loss = loss*lm_loss_weight + optimizer.zero_grad() + loss.backward() + optimizer.step() + + metric_logger.update(loss=loss.item()) + metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + if prompt_lr is not None: + metric_logger.update(prompt_lr=optimizer.param_groups[1]["lr"]) + + if i % print_freq == 0: + lrs = [g["lr"] for g in optimizer.param_groups] + print(lrs) + + if epoch==0 and i%step_size==0 and i<=warmup_iterations: + if scheduler is not None: + scheduler.step(i//step_size) + # gather the stats from all processes + metric_logger.synchronize_between_processes() + print("Averaged stats:", metric_logger.global_avg()) + return {k: "{:.3f}".format(meter.global_avg) for k, meter in metric_logger.meters.items()} + + + +@torch.no_grad() +def predict(model, loader, tokenizer, device, dump_path=None, verbose=False, distributed=False, special_answer_token=None, special_eo_answer_token=None): + model.eval() + eos_token = tokenizer.eos_token if special_eo_answer_token is None else special_eo_answer_token + pad_token = tokenizer.pad_token + print('pad_token', pad_token) + + with torch.no_grad(): + quesid2ans = {} + if verbose: + pbar = tqdm(total=len(loader), ncols=120, desc="Prediction") + for i, batch in enumerate(loader): + + + image = batch['images'].to(device,non_blocking=True) + + question = batch['sent'] + + question_id = batch['question_ids'] + + if special_answer_token is not None: + question = [q+'?'+special_answer_token for q in question] + else: + question = [q+eos_token for q in question] + + question_input = tokenizer(question, padding='longest', return_tensors="pt").to(device) + + out = model(image=image, text=question_input, mode='generate', return_dict=True, max_length=30, do_sample=True) + + + + + for ques_id, o in zip(question_id, out): + o_list = o.tolist() + try: + if special_answer_token is not None: + response = tokenizer.decode(o_list).split(special_answer_token)[1].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True + else: + response = tokenizer.decode(o_list).split('')[2].replace(pad_token, '').replace('', '').replace(eos_token, '') # skip_special_tokens=True + except TypeError: + print(o_list) + response = ' ' + + ques_id = int(ques_id) + quesid2ans[ques_id] = response + + + + if verbose: + pbar.update(1) + if verbose: + pbar.close() + + if distributed: + dist.barrier() + + qid2ans_list = utils.all_gather(quesid2ans) + if verbose: + quesid2ans = {} + for qid2ans in qid2ans_list: + for k, v in qid2ans.items(): + quesid2ans[k] = v + + if dump_path is not None: + evaluator = loader.evaluator + evaluator.dump_result(quesid2ans, dump_path) + + return quesid2ans + + + + +def evaluate(model, data_loader, tokenizer, device, + distributed=False, special_answer_token=None, special_eo_answer_token=None): + verbose = utils.is_main_process() + + + quesid2ans = predict(model, data_loader, tokenizer, device, verbose=verbose, + distributed=distributed, special_answer_token=special_answer_token, special_eo_answer_token=special_eo_answer_token) + + evaluator = data_loader.evaluator + score_dict = evaluator.evaluate(quesid2ans) + + + acc_dict = evaluator.evaluate_raw(quesid2ans) + topk_score = evaluator.evaluate(quesid2ans) + acc_dict['topk_score'] = topk_score + return acc_dict + + + + + + + + +def main(args, config): + os.environ['TORCH_HOME'] = os.environ['XDG_CACHE_HOME']+'/torch' + utils.init_distributed_mode(args) + + device = torch.device(args.device) + + # fix the seed for reproducibility + seed = args.seed + utils.get_rank() + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + cudnn.benchmark = True + + start_epoch = 0 + max_epoch = config['schedular']['epochs'] + warmup_steps = config['schedular']['warmup_epochs'] + + print(args) + #### Dataset #### + + print("Creating dataset") + + + if args.distributed: + num_tasks = utils.get_world_size() + global_rank = utils.get_rank() + else: + num_tasks = None + global_rank = None + + num_workers = config.get('num_workers', 4) + train_topk = config.get('train_topk', -1) + valid_topk = config.get('valid_topk', -1) + data_dir = args.data_dir + + args.image_size = config.get('image_res', 224) + args.use_data_augmentation = True + + black_image = config.get('black_image', False) + + print("black image:", black_image) + + train_split = config.get('train_split', 'karpathy_train') + val_split = config.get('val_split', 'karpathy_val') + test_split = config.get('test_split', 'karpathy_test') + + balanced_data = config.get('balanced_data', False) + + seed = config.get('seed', 42) + + train_loader = get_loader( + args, + split=train_split, mode='train', batch_size=config['batch_size_train'], + distributed=args.distributed, + workers=num_workers, + topk=train_topk, + data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, + verbose=True, black_image=black_image,balanced_data=balanced_data,seed=seed, + ) + + args.raw_label = False + print('# len train loader:', len(train_loader)) + print(f'Building val loader') + val_loader = get_loader( + args, + split=val_split, mode='val', batch_size=config['batch_size_test'], + distributed=args.distributed, + workers=4, + topk=valid_topk,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, + verbose=True, black_image=black_image, seed=seed + ) + print('# len val loader:', len(val_loader)) + + print(f'Building test loader') + test_loader = get_loader( + args, + split=test_split, mode='val', batch_size=config['batch_size_test'], + distributed=args.distributed, + workers=4, + topk=-1,data_dir=data_dir, + local_rank=global_rank, world_size=num_tasks, + verbose=True, black_image=black_image, seed=seed + ) + + + print('# len test loader:', len(test_loader)) + + + + #### Model #### + print("Creating model") + + start_layer_idx = config.get('start_layer_idx', 0) + end_layer_idx = config.get('end_layer_idx', 0) + + + vision_model_name = config.get('vision_model_name', args.vision_model) + + model = ePALM(opt_model_name = args.text_model, + vision_model_name = vision_model_name, + use_vis_prefix = True, + start_layer_idx = start_layer_idx, + end_layer_idx = end_layer_idx, + return_hidden_state_vision = True, + config=config, + ) + + + + + # tokenizer + + tokenizer_name = config.get('tokenizer_name', args.text_model) + + tokenizer = AutoTokenizer.from_pretrained(tokenizer_name, use_fast=False, local_files_only=True) + + + + special_answer_token = config.get('special_answer_token', None) + special_eo_answer_token = config.get('special_eo_answer_token', None) + + + if special_answer_token is not None: + special_tokens_dict = {'additional_special_tokens': [special_answer_token]} + if special_eo_answer_token is not None: + special_tokens_dict['additional_special_tokens'] += [special_eo_answer_token] + + tokenizer.add_special_tokens(special_tokens_dict) + print("Adding special token:", special_tokens_dict) + print(tokenizer) + + arg_opt = utils.AttrDict(config['optimizer']) + optimizer = create_optimizer(arg_opt, model, config=config['optimizer']) + + if hasattr(arg_opt, 'prompt_lr') and arg_opt.prompt_lr is not None: + print('\tInitial other params params lr: %f' % optimizer.param_groups[0]['lr']) + print('\tInitial prompt params lr: %f' % optimizer.param_groups[1]['lr']) + + arg_sche = utils.AttrDict(config['schedular']) + lr_scheduler, _ = create_scheduler(arg_sche, optimizer) + + best_valid = 0. + best_epoch = 0 + + if args.checkpoint: + checkpoint = torch.load(args.checkpoint, map_location='cpu') + state_dict = checkpoint['model'] + + + msg = model.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + print('load checkpoint from %s'%args.checkpoint) + print(msg) + + if args.resume: + model = model.to(device) + optimizer.load_state_dict(checkpoint['optimizer']) + lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) + start_epoch = checkpoint['epoch']+1 + print(checkpoint.keys()) + if 'best_valid' in checkpoint: + best_valid = checkpoint['best_valid'] + best_epoch = checkpoint['best_epoch'] + print("load best valid {} at epoch {}".format(best_valid, best_epoch)) + + + + + + freeze_whole_model(model) + unfreeze_parameters(model, config) + + model = model.to(device) + + print_trainable_params_percentage(model) + + + model_without_ddp = model + if args.distributed: + model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) + model_without_ddp = model.module + + + print("Start training") + start_time = time.time() + + + + for epoch in range(start_epoch, max_epoch): + if epoch>0: + if lr_scheduler is not None: + lr_scheduler.step(epoch+warmup_steps) + + if not args.evaluate: + if args.distributed: + train_loader.sampler.set_epoch(epoch) + + train_stats = train(model, train_loader, optimizer, tokenizer, epoch, warmup_steps, device, lr_scheduler, config) + + if args.evaluate: + break + + score_dict = evaluate(model, val_loader, tokenizer, device, distributed=args.distributed, + special_answer_token=special_answer_token, special_eo_answer_token=special_eo_answer_token) + print(score_dict) + if utils.is_main_process(): + log_stats = {**{f'train_{k}': v for k, v in train_stats.items()}, + 'epoch': epoch, + } + with open(os.path.join(args.output_dir, "log.txt"),"a") as f: + f.write(json.dumps(log_stats) + "\n") + + if lr_scheduler is None: + lr_scheduler_state_dict = {} + else: + lr_scheduler_state_dict = lr_scheduler.state_dict() + + save_obj = { + 'model': filter_state(model_without_ddp.state_dict(), exclude_list), + 'optimizer': optimizer.state_dict(), + 'lr_scheduler': lr_scheduler_state_dict, + 'config': config, + 'epoch': epoch, + 'best_valid': score_dict['overall'], + 'best_epoch': epoch, + } + + if args.save_best: + valid_score = score_dict['topk_score'] * 100. + valid_score_raw = score_dict['overall'] + if valid_score_raw > best_valid or epoch == 0: + best_valid = valid_score_raw + best_epoch = epoch + + print("save best epoch:", best_epoch) + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_best.pth')) + + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_last.pth')) + + dist.barrier() + + if lr_scheduler is None: + lr_scheduler_state_dict = {} + else: + lr_scheduler_state_dict = lr_scheduler.state_dict() + save_obj = { + 'model': filter_state(model_without_ddp.state_dict(), exclude_list), + 'optimizer': optimizer.state_dict(), + 'lr_scheduler': lr_scheduler_state_dict, + 'config': config, + 'epoch': epoch, + 'best_valid': best_valid, + 'best_epoch': best_epoch, + } + torch.save(save_obj, os.path.join(args.output_dir, 'checkpoint_last.pth')) + + verbose = utils.is_main_process() + + + ### test best model + if not args.evaluate: + checkpoint = torch.load(os.path.join(args.output_dir, 'checkpoint_best.pth'), map_location='cpu') + state_dict = checkpoint['model'] + msg = model.module.load_state_dict(state_dict,strict=False) + msg = filter_msg(msg, exclude_list) + print('load checkpoint for test from', args.output_dir, 'checkpoint_best.pth') + print(msg) + + quesid2ans = predict(model, test_loader, tokenizer, device, verbose=verbose, + distributed=args.distributed, special_answer_token=special_answer_token, special_eo_answer_token=special_eo_answer_token) + + evaluator = test_loader.evaluator + score_dict = evaluator.evaluate(quesid2ans) + + + acc_dict_all = evaluator.evaluate_raw(quesid2ans) + acc_dict_answerable = evaluator.evaluate_raw(quesid2ans, is_topk_optimal=True) + acc_dict_unanswerable = evaluator.evaluate_raw(quesid2ans, is_topk_optimal=False) + + wandb_log_dict = {} + wandb_log_dict['Test/overall'] = acc_dict_all['overall'] + wandb_log_dict['Test/topk_optimal'] = acc_dict_answerable['overall'] + wandb_log_dict['Test/topk_not_optimal'] = acc_dict_unanswerable['overall'] + + for qtype, score in acc_dict_all['perQuestionType'].items(): + wandb_log_dict[f'Test_Qtypes/{qtype}'] = score + for atype, score in acc_dict_all['perAnswerType'].items(): + if atype == 'yes/no': + atype = 'yes_no' + wandb_log_dict[f'Test_Atypes/{atype}'] = score + + print(wandb_log_dict) + print('best epoch:', best_epoch) + + + if args.distributed: + dist.barrier() + exit() + + + + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print('Training time {}'.format(total_time_str)) + + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='./configs/VQA.yaml') + parser.add_argument('--checkpoint', default='') + parser.add_argument('--output_dir', default='output/vqa') + parser.add_argument('--evaluate', action='store_true') + parser.add_argument('--text_model', default='facebook/opt-350m') + parser.add_argument('--vision_model', default='vit_base_patch16_224') + parser.add_argument('--device', default='cuda') + parser.add_argument('--seed', default=42, type=int) + parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes') + parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') + parser.add_argument('--distributed', default=True, type=bool) + + parser.add_argument('--data_dir', default='/data/mshukor/data') + parser.add_argument('--resume', action='store_true') + + parser.add_argument('--save_best', action='store_true') + + + + + args = parser.parse_args() + + config = yaml.load(open(args.config, 'r'), Loader=yaml.Loader) + + args.result_dir = os.path.join(args.output_dir, 'result') + + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + Path(args.result_dir).mkdir(parents=True, exist_ok=True) + + yaml.dump(config, open(os.path.join(args.output_dir, 'config.yaml'), 'w')) + + main(args, config) \ No newline at end of file diff --git a/images/epalm_sota.png b/images/epalm_sota.png new file mode 100644 index 0000000000000000000000000000000000000000..bf1d330b3debb781628f86ed38a8f9ddb7dc7577 Binary files /dev/null and b/images/epalm_sota.png differ diff --git a/images/qual.jpg b/images/qual.jpg new file mode 100644 index 0000000000000000000000000000000000000000..17e2de6df8a76a5c3abb31136337a828081ad11f Binary files /dev/null and b/images/qual.jpg differ diff --git a/images/teaser.jpg b/images/teaser.jpg new file mode 100644 index 0000000000000000000000000000000000000000..3c5f61bdf6bdbad17eb16b3be698c20d81f00ff8 Binary files /dev/null and b/images/teaser.jpg differ diff --git a/images/variants.jpg b/images/variants.jpg new file mode 100644 index 0000000000000000000000000000000000000000..11273d8f8bd9d687df67c3bce9627ee768cd4509 Binary files /dev/null and b/images/variants.jpg differ diff --git a/models/.ipynb_checkpoints/opt-checkpoint.py b/models/.ipynb_checkpoints/opt-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..4ecea6841ddff3fe12baf6ed470f00db1fb7789f --- /dev/null +++ b/models/.ipynb_checkpoints/opt-checkpoint.py @@ -0,0 +1,1124 @@ +# coding=utf-8 +# Copyright 2022 The Fairseq Authors and The HuggingFace Inc. team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" PyTorch OPT model.""" +import random +from typing import List, Optional, Tuple, Union + +import torch +import torch.utils.checkpoint +from torch import nn +from torch.nn import CrossEntropyLoss + +from transformers.activations import ACT2FN +from transformers.modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast +from transformers.modeling_utils import PreTrainedModel +from transformers.utils import ( + add_code_sample_docstrings, + add_start_docstrings, + add_start_docstrings_to_model_forward, + logging, + replace_return_docstrings, +) +from transformers.models.opt.configuration_opt import OPTConfig +import torch.nn.functional as F + +import numpy as np +logger = logging.get_logger(__name__) + +_CHECKPOINT_FOR_DOC = "facebook/opt-350m" +_CONFIG_FOR_DOC = "OPTConfig" +_TOKENIZER_FOR_DOC = "GPT2Tokenizer" + +# Base model docstring +_EXPECTED_OUTPUT_SHAPE = [1, 8, 1024] + + +OPT_PRETRAINED_MODEL_ARCHIVE_LIST = [ + "facebook/opt-125m", + "facebook/opt-350m", + "facebook/opt-1.3b", + "facebook/opt-2.7b", + "facebook/opt-6.7b", + "facebook/opt-13b", + "facebook/opt-30b", + # See all OPT models at https://huggingface.co/models?filter=opt +] + +def tile(x, dim, n_tile): + init_dim = x.size(dim) + repeat_idx = [1] * x.dim() + repeat_idx[dim] = n_tile + x = x.repeat(*(repeat_idx)) + order_index = torch.LongTensor(np.concatenate([init_dim * np.arange(n_tile) + i for i in range(init_dim)])) + return torch.index_select(x, dim, order_index.to(x.device)) + + +def _make_causal_mask(input_ids_shape: torch.Size, dtype: torch.dtype, past_key_values_length: int = 0): + """ + Make causal mask used for bi-directional self-attention. + """ + bsz, tgt_len = input_ids_shape + mask = torch.full((tgt_len, tgt_len), torch.tensor(float("-inf"))) + mask_cond = torch.arange(mask.size(-1)) + mask.masked_fill_(mask_cond < (mask_cond + 1).view(mask.size(-1), 1), 0) + mask = mask.to(dtype) + + if past_key_values_length > 0: + mask = torch.cat([torch.zeros(tgt_len, past_key_values_length, dtype=dtype), mask], dim=-1) + return mask[None, None, :, :].expand(bsz, 1, tgt_len, tgt_len + past_key_values_length) + + +def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None): + """ + Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`. + """ + bsz, src_len = mask.size() + tgt_len = tgt_len if tgt_len is not None else src_len + + expanded_mask = mask[:, None, None, :].expand(bsz, 1, tgt_len, src_len).to(dtype) + + inverted_mask = 1.0 - expanded_mask + + return inverted_mask.masked_fill(inverted_mask.to(torch.bool), torch.finfo(dtype).min) + + +class OPTLearnedPositionalEmbedding(nn.Embedding): + """ + This module learns positional embeddings up to a fixed maximum size. + """ + + def __init__(self, num_embeddings: int, embedding_dim: int): + # OPT is set up so that if padding_idx is specified then offset the embedding ids by 2 + # and adjust num_embeddings appropriately. Other models don't have this hack + self.offset = 2 + super().__init__(num_embeddings + self.offset, embedding_dim) + + def forward(self, attention_mask: torch.LongTensor, past_key_values_length: int = 0): + """`input_ids_shape` is expected to be [bsz x seqlen].""" + attention_mask = attention_mask.long() + + # create positions depending on attention_mask + positions = (torch.cumsum(attention_mask, dim=1).type_as(attention_mask) * attention_mask).long() - 1 + + # cut positions if `past_key_values_length` is > 0 + positions = positions[:, past_key_values_length:] + + return super().forward(positions + self.offset) + + +# Copied from transformers.models.bart.modeling_bart.BartAttention with Bart->OPT +class OPTAttention(nn.Module): + """Multi-headed attention from 'Attention Is All You Need' paper""" + + def __init__( + self, + embed_dim: int, + num_heads: int, + dropout: float = 0.0, + is_decoder: bool = False, + bias: bool = True, + ): + super().__init__() + self.embed_dim = embed_dim + self.num_heads = num_heads + self.dropout = dropout + self.head_dim = embed_dim // num_heads + + if (self.head_dim * num_heads) != self.embed_dim: + raise ValueError( + f"embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim}" + f" and `num_heads`: {num_heads})." + ) + self.scaling = self.head_dim**-0.5 + self.is_decoder = is_decoder + + self.k_proj = nn.Linear(embed_dim, embed_dim, bias=bias) + self.v_proj = nn.Linear(embed_dim, embed_dim, bias=bias) + self.q_proj = nn.Linear(embed_dim, embed_dim, bias=bias) + self.out_proj = nn.Linear(embed_dim, embed_dim, bias=bias) + + def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int): + return tensor.view(bsz, seq_len, self.num_heads, self.head_dim).transpose(1, 2).contiguous() + + def forward( + self, + hidden_states: torch.Tensor, + key_value_states: Optional[torch.Tensor] = None, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + attention_mask: Optional[torch.Tensor] = None, + layer_head_mask: Optional[torch.Tensor] = None, + output_attentions: bool = False, + ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: + """Input shape: Batch x Time x Channel""" + + # if key_value_states are provided this layer is used as a cross-attention layer + # for the decoder + is_cross_attention = key_value_states is not None + + bsz, tgt_len, _ = hidden_states.size() + + # get query proj + query_states = self.q_proj(hidden_states) * self.scaling + # get key, value proj + if is_cross_attention and past_key_value is not None: + # reuse k,v, cross_attentions + key_states = past_key_value[0] + value_states = past_key_value[1] + elif is_cross_attention: + # cross_attentions + key_states = self._shape(self.k_proj(key_value_states), -1, bsz) + value_states = self._shape(self.v_proj(key_value_states), -1, bsz) + elif past_key_value is not None: + # reuse k, v, self_attention + key_states = self._shape(self.k_proj(hidden_states), -1, bsz) + value_states = self._shape(self.v_proj(hidden_states), -1, bsz) + key_states = torch.cat([past_key_value[0], key_states], dim=2) + value_states = torch.cat([past_key_value[1], value_states], dim=2) + else: + # self_attention + key_states = self._shape(self.k_proj(hidden_states), -1, bsz) + value_states = self._shape(self.v_proj(hidden_states), -1, bsz) + + if self.is_decoder: + # if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states. + # Further calls to cross_attention layer can then reuse all cross-attention + # key/value_states (first "if" case) + # if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of + # all previous decoder key/value_states. Further calls to uni-directional self-attention + # can concat previous decoder key/value_states to current projected key/value_states (third "elif" case) + # if encoder bi-directional self-attention `past_key_value` is always `None` + past_key_value = (key_states, value_states) + + proj_shape = (bsz * self.num_heads, -1, self.head_dim) + query_states = self._shape(query_states, tgt_len, bsz).view(*proj_shape) + key_states = key_states.view(*proj_shape) + value_states = value_states.view(*proj_shape) + + src_len = key_states.size(1) + attn_weights = torch.bmm(query_states, key_states.transpose(1, 2)) + + if attn_weights.size() != (bsz * self.num_heads, tgt_len, src_len): + raise ValueError( + f"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is" + f" {attn_weights.size()}" + ) + + if attention_mask is not None: + if attention_mask.size() != (bsz, 1, tgt_len, src_len): + raise ValueError( + f"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {attention_mask.size()}" + ) + attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attention_mask + attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) + + attn_weights = nn.functional.softmax(attn_weights, dim=-1) + + if layer_head_mask is not None: + if layer_head_mask.size() != (self.num_heads,): + raise ValueError( + f"Head mask for a single layer should be of size {(self.num_heads,)}, but is" + f" {layer_head_mask.size()}" + ) + attn_weights = layer_head_mask.view(1, -1, 1, 1) * attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) + + if output_attentions: + # this operation is a bit awkward, but it's required to + # make sure that attn_weights keeps its gradient. + # In order to do so, attn_weights have to be reshaped + # twice and have to be reused in the following + attn_weights_reshaped = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attn_weights = attn_weights_reshaped.view(bsz * self.num_heads, tgt_len, src_len) + else: + attn_weights_reshaped = None + + attn_probs = nn.functional.dropout(attn_weights, p=self.dropout, training=self.training) + + attn_output = torch.bmm(attn_probs, value_states) + + if attn_output.size() != (bsz * self.num_heads, tgt_len, self.head_dim): + raise ValueError( + f"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is" + f" {attn_output.size()}" + ) + + attn_output = attn_output.view(bsz, self.num_heads, tgt_len, self.head_dim) + attn_output = attn_output.transpose(1, 2) + + # Use the `embed_dim` from the config (stored in the class) rather than `hidden_state` because `attn_output` can be + # partitioned aross GPUs when using tensor-parallelism. + attn_output = attn_output.reshape(bsz, tgt_len, self.embed_dim) + + attn_output = self.out_proj(attn_output) + + return attn_output, attn_weights_reshaped, past_key_value + + +class OPTDecoderLayer(nn.Module): + def __init__(self, config: OPTConfig): + super().__init__() + self.embed_dim = config.hidden_size + self.self_attn = OPTAttention( + embed_dim=self.embed_dim, + num_heads=config.num_attention_heads, + dropout=config.attention_dropout, + is_decoder=True, + ) + self.do_layer_norm_before = config.do_layer_norm_before + self.dropout = config.dropout + self.activation_fn = ACT2FN[config.activation_function] + + self.activation_dropout = config.activation_dropout + + self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim) + self.fc1 = nn.Linear(self.embed_dim, config.ffn_dim) + self.fc2 = nn.Linear(config.ffn_dim, self.embed_dim) + self.final_layer_norm = nn.LayerNorm(self.embed_dim) + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + layer_head_mask: Optional[torch.Tensor] = None, + output_attentions: Optional[bool] = False, + use_cache: Optional[bool] = False, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + ) -> Tuple[torch.FloatTensor, Optional[Tuple[torch.FloatTensor, torch.FloatTensor]]]: + """ + Args: + hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)` + attention_mask (`torch.FloatTensor`, *optional*): attention mask of size + `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. + layer_head_mask (`torch.FloatTensor`, *optional*): mask for attention heads in a given layer of size + `(encoder_attention_heads,)`. + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under + returned tensors for more detail. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding + (see `past_key_values`). + past_key_value (`Tuple(torch.FloatTensor)`, *optional*): cached past key and value projection states + """ + + residual = hidden_states + + # 125m, 1.7B, ..., 175B applies layer norm BEFORE attention + if self.do_layer_norm_before: + hidden_states = self.self_attn_layer_norm(hidden_states) + + # Self Attention + hidden_states, self_attn_weights, present_key_value = self.self_attn( + hidden_states=hidden_states, + past_key_value=past_key_value, + attention_mask=attention_mask, + layer_head_mask=layer_head_mask, + output_attentions=output_attentions, + ) + hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) + hidden_states = residual + hidden_states + + # 350m applies layer norm AFTER attention + if not self.do_layer_norm_before: + hidden_states = self.self_attn_layer_norm(hidden_states) + + # Fully Connected + hidden_states_shape = hidden_states.shape + hidden_states = hidden_states.reshape(-1, hidden_states.size(-1)) + residual = hidden_states + + # 125m, 1.7B, ..., 175B applies layer norm BEFORE attention + if self.do_layer_norm_before: + hidden_states = self.final_layer_norm(hidden_states) + + hidden_states = self.fc1(hidden_states) + hidden_states = self.activation_fn(hidden_states) + + hidden_states = self.fc2(hidden_states) + hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) + + hidden_states = (residual + hidden_states).view(hidden_states_shape) + + # 350m applies layer norm AFTER attention + if not self.do_layer_norm_before: + hidden_states = self.final_layer_norm(hidden_states) + + outputs = (hidden_states,) + + if output_attentions: + outputs += (self_attn_weights,) + + if use_cache: + outputs += (present_key_value,) + + return outputs + + +OPT_START_DOCSTRING = r""" + This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the + library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads + etc.) + + This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. + Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage + and behavior. + + Parameters: + config ([`OPTConfig`]): + Model configuration class with all the parameters of the model. Initializing with a config file does not + load the weights associated with the model, only the configuration. Check out the + [`~PreTrainedModel.from_pretrained`] method to load the model weights. +""" + + +@add_start_docstrings( + "The bare OPT Model outputting raw hidden-states without any specific head on top.", + OPT_START_DOCSTRING, +) +class OPTPreTrainedModel(PreTrainedModel): + config_class = OPTConfig + base_model_prefix = "model" + supports_gradient_checkpointing = True + _no_split_modules = ["OPTDecoderLayer"] + _keys_to_ignore_on_load_unexpected = [r"decoder.version"] + + def _init_weights(self, module): + std = self.config.init_std + if isinstance(module, nn.Linear): + module.weight.data.normal_(mean=0.0, std=std) + if module.bias is not None: + module.bias.data.zero_() + elif isinstance(module, nn.Embedding): + module.weight.data.normal_(mean=0.0, std=std) + if module.padding_idx is not None: + module.weight.data[module.padding_idx].zero_() + + def _set_gradient_checkpointing(self, module, value=False): + if isinstance(module, (OPTDecoder)): + module.gradient_checkpointing = value + + +OPT_INPUTS_DOCSTRING = r""" + Args: + input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): + Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide + it. + + Indices can be obtained using [`GPT2Tokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + + Indices can be obtained using [`OPTTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + If `past_key_values` is used, optionally only the last `decoder_input_ids` have to be input (see + `past_key_values`). + + If you want to change padding behavior, you should read [`modeling_opt._prepare_decoder_attention_mask`] + and modify to your needs. See diagram 1 in [the paper](https://arxiv.org/abs/1910.13461) for more + information on the default strategy. + head_mask (`torch.Tensor` of shape `(encoder_layers, encoder_attention_heads)`, *optional*): + Mask to nullify selected heads of the attention modules in the encoder. Mask values selected in `[0, 1]`: + + - 1 indicates the head is **not masked**, + - 0 indicates the head is **masked**. + + past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): + Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of shape + `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of shape + `(batch_size, num_heads, encoder_sequence_length, embed_size_per_head)`. + + Contains pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention + blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. + + If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that + don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all + `decoder_input_ids` of shape `(batch_size, sequence_length)`. + inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): + Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This + is useful if you want more control over how to convert `input_ids` indices into associated vectors than the + model's internal embedding lookup matrix. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see + `past_key_values`). + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned + tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for + more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. +""" + + +class OPTDecoder(OPTPreTrainedModel): + """ + Transformer decoder consisting of *config.num_hidden_layers* layers. Each layer is a [`OPTDecoderLayer`] + + Args: + config: OPTConfig + embed_tokens (nn.Embedding): output embedding + """ + + def __init__(self, config: OPTConfig): + super().__init__(config) + self.dropout = config.dropout + self.layerdrop = config.layerdrop + self.padding_idx = config.pad_token_id + self.max_target_positions = config.max_position_embeddings + self.vocab_size = config.vocab_size + + self.embed_tokens = nn.Embedding(config.vocab_size, config.word_embed_proj_dim, self.padding_idx) + self.embed_positions = OPTLearnedPositionalEmbedding(config.max_position_embeddings, config.hidden_size) + + if config.word_embed_proj_dim != config.hidden_size: + self.project_out = nn.Linear(config.hidden_size, config.word_embed_proj_dim, bias=False) + else: + self.project_out = None + + if config.word_embed_proj_dim != config.hidden_size: + self.project_in = nn.Linear(config.word_embed_proj_dim, config.hidden_size, bias=False) + else: + self.project_in = None + + # Note that the only purpose of `config._remove_final_layer_norm` is to keep backward compatibility + # with checkpoints that have been fine-tuned before transformers v4.20.1 + # see https://github.com/facebookresearch/metaseq/pull/164 + if config.do_layer_norm_before and not config._remove_final_layer_norm: + self.final_layer_norm = nn.LayerNorm(config.hidden_size) + else: + self.final_layer_norm = None + + self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) + + self.gradient_checkpointing = False + # Initialize weights and apply final processing + self.post_init() + + if hasattr(config, 'use_vis_prefix'): + self.use_vis_prefix = config.use_vis_prefix + self.start_layer_idx = config.start_layer_idx + self.end_layer_idx = config.end_layer_idx + else: + self.use_vis_prefix = False + self.replace_added_tokens = config.replace_added_tokens if hasattr(config, 'replace_added_tokens') else False + + self.vis_pref_mode = config.vis_pref_mode if hasattr(config, 'vis_pref_mode') else 'cat' + + self.connector_per_text_layer = config.connector_per_text_layer if hasattr(config, 'connector_per_text_layer') else False + + self.text_step = config.text_step if hasattr(config, 'text_step') else 1 + self.select_higher_step = config.select_higher_step if hasattr(config, 'select_higher_step') else False + + print('use_vis_prefix: ', self.use_vis_prefix) + print('replace_added_tokens', self.replace_added_tokens) + print('vis_pref_mode', self.vis_pref_mode) + print('connector_per_text_layer', self.connector_per_text_layer) + + def get_input_embeddings(self): + return self.embed_tokens + + def set_input_embeddings(self, value): + self.embed_tokens = value + + # Copied from transformers.models.bart.modeling_bart.BartDecoder._prepare_decoder_attention_mask + def _prepare_decoder_attention_mask(self, attention_mask, input_shape, inputs_embeds, past_key_values_length): + # create causal mask + # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] + combined_attention_mask = None + if input_shape[-1] > 1: + combined_attention_mask = _make_causal_mask( + input_shape, inputs_embeds.dtype, past_key_values_length=past_key_values_length + ).to(inputs_embeds.device) + + if attention_mask is not None: + # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] + expanded_attn_mask = _expand_mask(attention_mask, inputs_embeds.dtype, tgt_len=input_shape[-1]) + combined_attention_mask = ( + expanded_attn_mask if combined_attention_mask is None else expanded_attn_mask + combined_attention_mask + ) + + return combined_attention_mask + + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + head_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + vis_prefix = None, + prompt_embeds = None, + connector = None + ) -> Union[Tuple, BaseModelOutputWithPast]: + r""" + Args: + input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): + Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you + provide it. + + Indices can be obtained using [`OPTTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + head_mask (`torch.Tensor` of shape `(num_hidden_layers, num_attention_heads)`, *optional*): + Mask to nullify selected heads of the attention modules. Mask values selected in `[0, 1]`: + + - 1 indicates the head is **not masked**, + - 0 indicates the head is **masked**. + + past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): + Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of + shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of + + Contains pre-computed hidden-states (key and values in the self-attention blocks and in the + cross-attention blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. + + If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those + that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of + all `decoder_input_ids` of shape `(batch_size, sequence_length)`. + + inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): + Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. + This is useful if you want more control over how to convert `input_ids` indices into associated vectors + than the model's internal embedding lookup matrix. + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under + returned tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors + for more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. + """ + + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + use_cache = use_cache if use_cache is not None else self.config.use_cache + + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + # retrieve input_ids and inputs_embeds + if input_ids is not None and inputs_embeds is not None: + raise ValueError("You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time") + elif input_ids is not None: + input_shape = input_ids.size() + input_ids = input_ids.view(-1, input_shape[-1]) + elif inputs_embeds is not None: + input_shape = inputs_embeds.size()[:-1] + else: + raise ValueError("You have to specify either decoder_input_ids or decoder_inputs_embeds") + + past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0 + + if inputs_embeds is None: + inputs_embeds = self.embed_tokens(input_ids) + # embed positions + if attention_mask is None: + attention_mask = torch.ones(inputs_embeds.shape[:2], dtype=torch.bool, device=inputs_embeds.device) + + # # is still necessary + # if self.use_vis_prefix and vis_prefix is not None: # in case we add prompt to the first layer, to avoid [bs, 0, dim] in pos_embeds + # # another solution is to add the prompt to the second layer + # len_att = attention_mask.shape[-1] + # if (len_att <= past_key_values_length): #and self.start_layer_idx == 0: + # # print(attention_mask) + # l = inputs_embeds.shape[1] + # attention_mask = F.pad(attention_mask, (past_key_values_length - len_att + l, 0, 0, 0), "constant", 1) + + + + pos_embeds = self.embed_positions(attention_mask, past_key_values_length) + ## Prompt tuning + if prompt_embeds is not None and past_key_values_length == 0: # in case of generation don't re add the prompt + # prompt_embeds = prompt_embeds.unsqueeze(0).expand(inputs_embeds.shape[0], -1, -1).to(inputs_embeds.device) + prompt_len = prompt_embeds.shape[1] + inputs_embeds = torch.cat((prompt_embeds, inputs_embeds), dim=1) + attention_mask = F.pad(attention_mask, (prompt_len, 0, 0, 0), "constant", 1) + input_shape = attention_mask.size() + + attention_mask = self._prepare_decoder_attention_mask( + attention_mask, input_shape, inputs_embeds, past_key_values_length + ) + + if self.project_in is not None: + inputs_embeds = self.project_in(inputs_embeds) + + if prompt_embeds is not None and past_key_values_length == 0:# no positional embedding for prompts + hidden_states = inputs_embeds + hidden_states[:, prompt_len:, :] = inputs_embeds[:, prompt_len:, :] + pos_embeds + else: + hidden_states = inputs_embeds + pos_embeds + hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) + + + + + # print(attention_mask.shape, inputs_embeds.shape, past_key_values_length, hidden_states.shape, pos_embeds.shape) + # decoder layers + all_hidden_states = () if output_hidden_states else None + all_self_attns = () if output_attentions else None + next_decoder_cache = () if use_cache else None + + # check if head_mask has a correct number of layers specified if desired + for attn_mask, mask_name in zip([head_mask], ["head_mask"]): + if attn_mask is not None: + if attn_mask.size()[0] != (len(self.layers)): + raise ValueError( + f"The `{mask_name}` should be specified for {len(self.layers)} layers, but it is for" + f" {head_mask.size()[0]}." + ) + + if self.use_vis_prefix and vis_prefix is not None: + num_layers = self.end_layer_idx - self.start_layer_idx + if isinstance(vis_prefix, list) or isinstance(vis_prefix, tuple): + num_vis_prefix = len(vis_prefix) + else: + num_vis_prefix = 1 + + if num_vis_prefix == 1: + step = 1 + else: + step = num_layers//num_vis_prefix if num_layers>num_vis_prefix else 1 + + if self.select_higher_step: + self.text_step = max([step, self.text_step]) + token_added = False + for idx, decoder_layer in enumerate(self.layers): + # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) + if output_hidden_states: + all_hidden_states += (hidden_states,) + + dropout_probability = random.uniform(0, 1) + if self.training and (dropout_probability < self.layerdrop): + continue + + past_key_value = past_key_values[idx] if past_key_values is not None else None + + + ################################################################ + ### Add prefix from visual encoder and attention mask + ## same vis embedd or embed a different layers + tgt_len, src_len = attention_mask.shape[2:] + # don't use vis pref during generation of new tokens + # print(idx, attention_mask.shape, hidden_states.shape) + if self.use_vis_prefix and vis_prefix is not None: + if idx >= self.start_layer_idx and idx < self.end_layer_idx: + + if (idx)%self.text_step==0: + + vis_prefix_idx = ((idx - self.start_layer_idx)//step) + text_idx = ((idx - self.start_layer_idx)//self.text_step) + + if num_vis_prefix == 1: + vis_pref = vis_prefix[0] + else: + vis_pref = vis_prefix[vis_prefix_idx] + + if self.connector_per_text_layer: + vis_pref = connector[text_idx](vis_pref[:, 0, :]).unsqueeze(1) + # print(vis_pref.shape, idx, vis_prefix_idx, text_idx) + bs, l, dim = vis_pref.size() + + bs_v, bs_t = vis_pref.shape[0], hidden_states.shape[0] + if bs_v != bs_t: + vis_pref = tile(vis_pref, 0, bs_t//bs_v) + if self.vis_pref_mode == 'addition': + vis_pref_len = vis_pref.shape[1] + hidden_states_len = hidden_states.shape[1] + if hidden_states_len > vis_pref_len: + num_repeat = hidden_states_len - vis_pref_len + 1 + hidden_states = hidden_states + vis_pref.repeat(1, num_repeat, 1)[:, :hidden_states_len, :] + else: + hidden_states = hidden_states + vis_pref[:, :hidden_states_len, :] + else: + if self.replace_added_tokens and token_added: + token_added = True + hidden_states[:, 0, :] = vis_pref[:, 0, :] + else: + if (tgt_len == src_len): + hidden_states = torch.cat((vis_pref, hidden_states), dim=1) # (bs, l, dim) + attention_mask = F.pad(attention_mask, (l, 0, l, 0, 0, 0, 0, 0), "constant", 0) + elif idx > 0: # during generation + attention_mask = F.pad(attention_mask, (l, 0, 0, 0, 0, 0, 0, 0), "constant", 0) + token_added = True + + # print('after', idx, attention_mask.shape, hidden_states.shape) + ################################################################ + + if self.gradient_checkpointing and self.training: + + if use_cache: + logger.warning( + "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." + ) + use_cache = False + + def create_custom_forward(module): + def custom_forward(*inputs): + # None for past_key_value + return module(*inputs, output_attentions, None) + + return custom_forward + + layer_outputs = torch.utils.checkpoint.checkpoint( + create_custom_forward(decoder_layer), + hidden_states, + attention_mask, + head_mask[idx] if head_mask is not None else None, + None, + ) + else: + + layer_outputs = decoder_layer( + hidden_states, + attention_mask=attention_mask, + layer_head_mask=(head_mask[idx] if head_mask is not None else None), + past_key_value=past_key_value, + output_attentions=output_attentions, + use_cache=use_cache, + ) + + hidden_states = layer_outputs[0] + + if use_cache: + next_decoder_cache += (layer_outputs[2 if output_attentions else 1],) + + if output_attentions: + all_self_attns += (layer_outputs[1],) + + if self.final_layer_norm is not None: + hidden_states = self.final_layer_norm(hidden_states) + + if self.project_out is not None: + hidden_states = self.project_out(hidden_states) + + # add hidden states from the last decoder layer + if output_hidden_states: + all_hidden_states += (hidden_states,) + + next_cache = next_decoder_cache if use_cache else None + if not return_dict: + return tuple(v for v in [hidden_states, next_cache, all_hidden_states, all_self_attns] if v is not None) + return BaseModelOutputWithPast( + last_hidden_state=hidden_states, + past_key_values=next_cache, + hidden_states=all_hidden_states, + attentions=all_self_attns, + ) + + +@add_start_docstrings( + "The bare OPT Model outputting raw hidden-states without any specific head on top.", + OPT_START_DOCSTRING, +) +class OPTModel(OPTPreTrainedModel): + def __init__(self, config: OPTConfig): + super().__init__(config) + self.decoder = OPTDecoder(config) + + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.decoder.embed_tokens + + def set_input_embeddings(self, value): + self.decoder.embed_tokens = value + + def get_decoder(self): + return self.decoder + + @add_start_docstrings_to_model_forward(OPT_INPUTS_DOCSTRING) + @add_code_sample_docstrings( + processor_class=_TOKENIZER_FOR_DOC, + checkpoint=_CHECKPOINT_FOR_DOC, + output_type=BaseModelOutputWithPast, + config_class=_CONFIG_FOR_DOC, + expected_output=_EXPECTED_OUTPUT_SHAPE, + ) + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + head_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + vis_prefix = None, + prompt_embeds = None, + connector = None, + ) -> Union[Tuple, BaseModelOutputWithPast]: + + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + use_cache = use_cache if use_cache is not None else self.config.use_cache + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + # decoder outputs consists of (dec_features, past_key_value, dec_hidden, dec_attn) + decoder_outputs = self.decoder( + input_ids=input_ids, + attention_mask=attention_mask, + head_mask=head_mask, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + vis_prefix=vis_prefix, + prompt_embeds=prompt_embeds, + connector=connector + ) + + if not return_dict: + return decoder_outputs + + return BaseModelOutputWithPast( + last_hidden_state=decoder_outputs.last_hidden_state, + past_key_values=decoder_outputs.past_key_values, + hidden_states=decoder_outputs.hidden_states, + attentions=decoder_outputs.attentions, + ) + + +class OPTForCausalLM(OPTPreTrainedModel): + _keys_to_ignore_on_load_missing = [r"lm_head.weight"] + + def __init__(self, config): + super().__init__(config) + self.model = OPTModel(config) + + # the lm_head weight is automatically tied to the embed tokens weight + self.lm_head = nn.Linear(config.word_embed_proj_dim, config.vocab_size, bias=False) + + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.model.decoder.embed_tokens + + def set_input_embeddings(self, value): + self.model.decoder.embed_tokens = value + + def get_output_embeddings(self): + return self.lm_head + + def set_output_embeddings(self, new_embeddings): + self.lm_head = new_embeddings + + def set_decoder(self, decoder): + self.model.decoder = decoder + + def get_decoder(self): + return self.model.decoder + + @replace_return_docstrings(output_type=CausalLMOutputWithPast, config_class=_CONFIG_FOR_DOC) + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + head_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + labels: Optional[torch.LongTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + vis_prefix = None, + prompt_embeds = None, + reduction='mean', + connector = None + ) -> Union[Tuple, CausalLMOutputWithPast]: + r""" + Args: + input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): + Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you + provide it. + + Indices can be obtained using [`OPTTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + head_mask (`torch.Tensor` of shape `(num_hidden_layers, num_attention_heads)`, *optional*): + Mask to nullify selected heads of the attention modules. Mask values selected in `[0, 1]`: + + - 1 indicates the head is **not masked**, + - 0 indicates the head is **masked**. + + past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): + Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of + shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of + shape `(batch_size, num_heads, encoder_sequence_length, embed_size_per_head)`. The two additional + tensors are only required when the model is used as a decoder in a Sequence to Sequence model. + + Contains pre-computed hidden-states (key and values in the self-attention blocks and in the + cross-attention blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. + + If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those + that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of + all `decoder_input_ids` of shape `(batch_size, sequence_length)`. + inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): + Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. + This is useful if you want more control over how to convert `input_ids` indices into associated vectors + than the model's internal embedding lookup matrix. + labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): + Labels for computing the masked language modeling loss. Indices should either be in `[0, ..., + config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored + (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding + (see `past_key_values`). + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under + returned tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors + for more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. + + Returns: + + Example: + + ```python + >>> from transformers import GPT2Tokenizer, OPTForCausalLM + + >>> model = OPTForCausalLM.from_pretrained("facebook/opt-350m") + >>> tokenizer = GPT2Tokenizer.from_pretrained("facebook/opt-350m") + + >>> prompt = "Hey, are you consciours? Can you talk to me?" + >>> inputs = tokenizer(prompt, return_tensors="pt") + + >>> # Generate + >>> generate_ids = model.generate(inputs.input_ids, max_length=30) + >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0] + "Hey, are you consciours? Can you talk to me?\nI'm not consciours, but I can talk to you." + ```""" + + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + # decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn) + outputs = self.model.decoder( + input_ids=input_ids, + attention_mask=attention_mask, + head_mask=head_mask, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + vis_prefix=vis_prefix, + prompt_embeds=prompt_embeds, + connector=connector + ) + + logits = self.lm_head(outputs[0]).contiguous() + + + loss = None + if labels is not None: + # ignore prompt tokens + src_len, tgt_len = logits.shape[1], labels.shape[-1] + if (tgt_len != src_len): + labels = F.pad(labels, (src_len-tgt_len, 0, 0, 0), "constant", -100) + + + + + # Shift so that tokens < n predict n + shift_logits = logits[..., :-1, :].contiguous() + shift_labels = labels[..., 1:].contiguous() + # Flatten the tokens + + loss_fct = CrossEntropyLoss(reduction=reduction) + loss = loss_fct(shift_logits.view(-1, self.config.vocab_size), shift_labels.view(-1)) + + loss = loss.view(logits.size(0),-1).sum(1) + + if not return_dict: + output = (logits,) + outputs[1:] + return (loss,) + output if loss is not None else output + + return CausalLMOutputWithPast( + loss=loss, + logits=logits, + past_key_values=outputs.past_key_values, + hidden_states=outputs.hidden_states, + attentions=outputs.attentions, + ) + + def prepare_inputs_for_generation(self, input_ids, past=None, attention_mask=None, use_cache=None, **kwargs): + # if model is used as a decoder in encoder-decoder model, the decoder attention mask is created on the fly + if attention_mask is None: + attention_mask = input_ids.new_ones(input_ids.shape) + + if past: + input_ids = input_ids[:, -1:] + # first step, decoder_cached_states are empty + return { + "input_ids": input_ids, # encoder_outputs is defined. input_ids not needed + "attention_mask": attention_mask, + "past_key_values": past, + "use_cache": use_cache, + **kwargs, + } + + @staticmethod + def _reorder_cache(past, beam_idx): + reordered_past = () + for layer_past in past: + reordered_past += (tuple(past_state.index_select(0, beam_idx) for past_state in layer_past),) + return reordered_past diff --git a/models/.ipynb_checkpoints/utils-checkpoint.py b/models/.ipynb_checkpoints/utils-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..7bd2d0e5f722ffce7062ac5c9a8e7091c2beb842 --- /dev/null +++ b/models/.ipynb_checkpoints/utils-checkpoint.py @@ -0,0 +1,46 @@ +import re + +import torch +from torch import nn + + + + + +def freeze_whole_model(model): + for n, p in model.named_parameters(): + p.requires_grad = False + +def unfreeze_parameters(model, config): + # targets = '*.proj_*|*_proj*|*itm_head*|*queue*|*adapter*|*temp*|*.cls.*' + + targets = ['connector'] # lm_head + if config.get('unfreeze_text_layer_norm', False): + targets = targets + ['self_attn_layer_norm', 'final_layer_norm'] + + if config.get('unfreeze_vision_layer_norm', False): + targets = targets + ['norm', 'norm1', 'norm2'] + + print('unfreeze targets:', targets) + for n, p in model.named_parameters(): + if any(t in n for t in targets): + # if re.fullmatch(targets, n): + p.requires_grad = True + print(f"{n} is trainable...") + + +def print_trainable_params_percentage(model): + + + orig_param_size = sum(p.numel() for p in model.parameters()) + + def count_parameters(model): + return sum(p.numel() for p in model.parameters() if p.requires_grad) + + trainable_size = count_parameters(model) + + percentage = trainable_size / orig_param_size * 100 + + print(f"Trainable param percentage: {percentage:.2f}% ({trainable_size}/{orig_param_size})") + + return percentage \ No newline at end of file diff --git a/models/.ipynb_checkpoints/visopt-checkpoint.py b/models/.ipynb_checkpoints/visopt-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..c5af0297b3d17bbb19a72c790a21a67f8d3e42e0 --- /dev/null +++ b/models/.ipynb_checkpoints/visopt-checkpoint.py @@ -0,0 +1,166 @@ +import re + +import torch +from torch import nn +from torchvision import transforms + + +from transformers import AutoModelForCausalLM, AutoTokenizer, AutoConfig +from accelerate import Accelerator +from models.opt import OPTModel, OPTConfig, OPTForCausalLM +import models.vit + +from PIL import Image +import json +import numpy as np + + + +import torch.nn.functional as F +from transformers.tokenization_utils_base import BatchEncoding + +def rank_answer(model, image, question_input, answer_ids, answer_atts, k, tokenizer): + + num_ques = question_input.input_ids.size(0) + start_ids = answer_ids[0,0].repeat(num_ques,1) # bos token + + start_ids = torch.cat((question_input.input_ids, start_ids), dim=1) + attention_mask = torch.cat((question_input.attention_mask, torch.ones((num_ques, 1)).to(question_input.attention_mask.device)), dim=1) + + start_input = {'input_ids': start_ids, 'attention_mask': attention_mask} + start_input = BatchEncoding(start_input) + + + + start_output = model(image, start_input, return_dict = True, mode='evaluate') + + logits = start_output.logits[:,-1,:] # first token's logit + + # topk_probs: top-k probability + # topk_ids: [num_question, k] + answer_first_token = answer_ids[:,1] + prob_first_token = F.softmax(logits,dim=1).index_select(dim=1, index=answer_first_token) + topk_probs, topk_ids = prob_first_token.topk(k,dim=1) + + # answer input: [num_question*k, answer_len] + input_ids = [] + input_atts = [] + for b, topk_id in enumerate(topk_ids): + input_ids.append(answer_ids.index_select(dim=0, index=topk_id)) + input_atts.append(answer_atts.index_select(dim=0, index=topk_id)) + input_ids = torch.cat(input_ids,dim=0) + input_atts = torch.cat(input_atts,dim=0) + + start_ids = tile(start_ids, 0, k) + attention_mask = tile(attention_mask, 0, k) + image = tile(image, 0, k) + + + + + input_ids = torch.cat((start_ids, input_ids), dim=1) # include the ? + input_atts = torch.cat((attention_mask, input_atts), dim=1) + + targets_ids = input_ids.masked_fill(input_ids == tokenizer.pad_token_id, -100) + + + + # repeat encoder's output for top-k answers + + + inputs = {'input_ids': input_ids, 'attention_mask': input_atts} + inputs = BatchEncoding(inputs) + + output = model(image, inputs, labels = targets_ids, return_dict = True, mode='train', reduction='none') + + answer_loss = output.loss + answer_loss = answer_loss.view(input_ids.size(0),-1) + + # topk_prob: first token probability + + topk_probs = topk_probs.view(-1,1) + log_probs = torch.cat([topk_probs.log(), -answer_loss],dim=1) + + # re-calculate log probabilities for the answer sequences using chain rule + log_probs_sum = log_probs.sum(1) + log_probs_sum = log_probs_sum.view(num_ques,k) + + topk_probs = F.softmax(log_probs_sum, dim=-1) + # get top-k after re-ranking + topk_probs, rerank_id = topk_probs.topk(k,dim=1) + topk_ids = torch.gather(topk_ids, 1, rerank_id) + + return topk_ids, topk_probs + +def tile(x, dim, n_tile): + init_dim = x.size(dim) + repeat_idx = [1] * x.dim() + repeat_idx[dim] = n_tile + x = x.repeat(*(repeat_idx)) + order_index = torch.LongTensor(np.concatenate([init_dim * np.arange(n_tile) + i for i in range(init_dim)])) + return torch.index_select(x, dim, order_index.to(x.device)) + + + + + +class VisOPT(nn.Module): + def __init__(self, + opt_model_name = 'facebook/opt-350m', + vision_model_name = 'vit_base_patch16_224', + use_vis_prefix = True, + start_layer_idx = 11, + end_layer_idx = 23, + return_hidden_state_vision = True, + injected_hidden_states = 1, + + ): + super().__init__() + print("Loading VisOPT ...") + # text + config_opt = AutoConfig.from_pretrained(opt_model_name) + + config_opt.use_vis_prefix = use_vis_prefix + config_opt.start_layer_idx = start_layer_idx + config_opt.end_layer_idx = end_layer_idx + + print(config_opt) + print("Loading: ", opt_model_name) + self.model_text = OPTForCausalLM.from_pretrained(opt_model_name, config=config_opt) + + # vision + print("Loading: ", vision_model_name) + vision_func = getattr(models.vit, vision_model_name) + self.model_vision = vision_func(pretrained=True, return_hidden_state=return_hidden_state_vision) + + # connector + self.injected_hidden_states = injected_hidden_states + vis_dim = self.model_vision.embed_dim + text_dim = config_opt.hidden_size + self.connector = nn.ModuleList([nn.Linear(vis_dim, text_dim) for i in range(injected_hidden_states)]) + + + def forward(self, image=None, text=None, mode='generate', return_dict=True, labels=None, reduction='mean', **generation_kwargs): + + if image is not None: + image_embed, image_feat = self.model_vision(image, external_features=None) + + image_feat = list(image_feat) + image_feat = image_feat[-self.injected_hidden_states:] + + ## only cls token, we can think of somthing else + for i in range(1, self.injected_hidden_states + 1): + image_feat[-i] = self.connector[-i](image_feat[-i][:, 0, :].unsqueeze(1)) + else: + image_feat = None + + + # image_feat = None + if mode == 'train' or mode == 'evaluate': + text_output = self.model_text(input_ids=text.input_ids, attention_mask=text.attention_mask, return_dict=return_dict, vis_prefix=image_feat, labels = labels, reduction=reduction) + return text_output + elif mode == 'generate': + print('generation') + gen = self.model_text.generate(input_ids=text.input_ids, vis_prefix=image_feat, **generation_kwargs) + return gen + \ No newline at end of file diff --git a/models/.ipynb_checkpoints/vit-checkpoint.py b/models/.ipynb_checkpoints/vit-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..2921cfe275f2527a594330527d3c8b94e9440b1a --- /dev/null +++ b/models/.ipynb_checkpoints/vit-checkpoint.py @@ -0,0 +1,1244 @@ +""" Vision Transformer (ViT) in PyTorch + +A PyTorch implement of Vision Transformers as described in: + +'An Image Is Worth 16 x 16 Words: Transformers for Image Recognition at Scale' + - https://arxiv.org/abs/2010.11929 + +`How to train your ViT? Data, Augmentation, and Regularization in Vision Transformers` + - https://arxiv.org/abs/2106.10270 + +The official jax code is released and available at https://github.com/google-research/vision_transformer + +Acknowledgments: +* The paper authors for releasing code and weights, thanks! +* I fixed my class token impl based on Phil Wang's https://github.com/lucidrains/vit-pytorch ... check it out +for some einops/einsum fun +* Simple transformer style inspired by Andrej Karpathy's https://github.com/karpathy/minGPT +* Bert reference code checks against Huggingface Transformers and Tensorflow Bert + +Hacked together by / Copyright 2020, Ross Wightman +""" +import math +import logging +from functools import partial +from collections import OrderedDict +from typing import Optional + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.utils.checkpoint + +from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD +from timm.models.helpers import build_model_with_cfg, resolve_pretrained_cfg, named_apply, adapt_input_conv, checkpoint_seq +from timm.models.layers import PatchEmbed, Mlp, DropPath, trunc_normal_, lecun_normal_ +from timm.models.registry import register_model + +_logger = logging.getLogger(__name__) + + +def _cfg(url='', **kwargs): + return { + 'url': url, + 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None, + 'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True, + 'mean': IMAGENET_INCEPTION_MEAN, 'std': IMAGENET_INCEPTION_STD, + 'first_conv': 'patch_embed.proj', 'classifier': 'head', + **kwargs + } + + +default_cfgs = { + # patch models (weights from official Google JAX impl) + 'vit_tiny_patch16_224': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz'), + 'vit_tiny_patch16_384': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz', + input_size=(3, 384, 384), crop_pct=1.0), + 'vit_small_patch32_224': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'S_32-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz'), + 'vit_small_patch32_384': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'S_32-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz', + input_size=(3, 384, 384), crop_pct=1.0), + 'vit_small_patch16_224': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'S_16-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz'), + 'vit_small_patch16_384': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'S_16-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz', + input_size=(3, 384, 384), crop_pct=1.0), + 'vit_base_patch32_224': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'B_32-i21k-300ep-lr_0.001-aug_medium1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz'), + 'vit_base_patch32_384': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'B_32-i21k-300ep-lr_0.001-aug_light1-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz', + input_size=(3, 384, 384), crop_pct=1.0), + 'vit_base_patch16_224': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'B_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.01-res_224.npz'), + 'vit_base_patch16_384': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'B_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.01-res_384.npz', + input_size=(3, 384, 384), crop_pct=1.0), + 'vit_base_patch8_224': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'B_8-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.01-res_224.npz'), + 'vit_large_patch32_224': _cfg( + url='', # no official model weights for this combo, only for in21k + ), + 'vit_large_patch32_384': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_p32_384-9b920ba8.pth', + input_size=(3, 384, 384), crop_pct=1.0), + 'vit_large_patch16_224': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'L_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.01-res_224.npz'), + 'vit_large_patch16_384': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'L_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.01-res_384.npz', + input_size=(3, 384, 384), crop_pct=1.0), + + 'vit_large_patch14_224': _cfg(url=''), + 'vit_huge_patch14_224': _cfg(url=''), + 'vit_giant_patch14_224': _cfg(url=''), + 'vit_gigantic_patch14_224': _cfg(url=''), + + + # patch models, imagenet21k (weights from official Google JAX impl) + 'vit_tiny_patch16_224_in21k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0.npz', + num_classes=21843), + 'vit_small_patch32_224_in21k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/S_32-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0.npz', + num_classes=21843), + 'vit_small_patch16_224_in21k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/S_16-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0.npz', + num_classes=21843), + 'vit_base_patch32_224_in21k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/B_32-i21k-300ep-lr_0.001-aug_medium1-wd_0.03-do_0.0-sd_0.0.npz', + num_classes=21843), + 'vit_base_patch16_224_in21k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/B_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0.npz', + num_classes=21843), + 'vit_base_patch8_224_in21k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/B_8-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0.npz', + num_classes=21843), + 'vit_large_patch32_224_in21k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_patch32_224_in21k-9046d2e7.pth', + num_classes=21843), + 'vit_large_patch16_224_in21k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/L_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.1-sd_0.1.npz', + num_classes=21843), + 'vit_huge_patch14_224_in21k': _cfg( + url='https://storage.googleapis.com/vit_models/imagenet21k/ViT-H_14.npz', + hf_hub_id='timm/vit_huge_patch14_224_in21k', + num_classes=21843), + + # SAM trained models (https://arxiv.org/abs/2106.01548) + 'vit_base_patch32_224_sam': _cfg( + url='https://storage.googleapis.com/vit_models/sam/ViT-B_32.npz'), + 'vit_base_patch16_224_sam': _cfg( + url='https://storage.googleapis.com/vit_models/sam/ViT-B_16.npz'), + + # DINO pretrained - https://arxiv.org/abs/2104.14294 (no classifier head, for fine-tune only) + 'vit_small_patch16_224_dino': _cfg( + url='https://dl.fbaipublicfiles.com/dino/dino_deitsmall16_pretrain/dino_deitsmall16_pretrain.pth', + mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0), + 'vit_small_patch8_224_dino': _cfg( + url='https://dl.fbaipublicfiles.com/dino/dino_deitsmall8_pretrain/dino_deitsmall8_pretrain.pth', + mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0), + 'vit_base_patch16_224_dino': _cfg( + url='https://dl.fbaipublicfiles.com/dino/dino_vitbase16_pretrain/dino_vitbase16_pretrain.pth', + mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0), + 'vit_base_patch8_224_dino': _cfg( + url='https://dl.fbaipublicfiles.com/dino/dino_vitbase8_pretrain/dino_vitbase8_pretrain.pth', + mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0), + + + # ViT ImageNet-21K-P pretraining by MILL + 'vit_base_patch16_224_miil_in21k': _cfg( + url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm/vit_base_patch16_224_in21k_miil.pth', + mean=(0, 0, 0), std=(1, 1, 1), crop_pct=0.875, interpolation='bilinear', num_classes=11221, + ), + 'vit_base_patch16_224_miil': _cfg( + url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm' + '/vit_base_patch16_224_1k_miil_84_4.pth', + mean=(0, 0, 0), std=(1, 1, 1), crop_pct=0.875, interpolation='bilinear', + ), + + 'vit_base_patch16_rpn_224': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_base_patch16_rpn_224-sw-3b07e89d.pth'), + + # experimental (may be removed) + 'vit_base_patch32_plus_256': _cfg(url='', input_size=(3, 256, 256), crop_pct=0.95), + 'vit_base_patch16_plus_240': _cfg(url='', input_size=(3, 240, 240), crop_pct=0.95), + 'vit_small_patch16_36x1_224': _cfg(url=''), + 'vit_small_patch16_18x2_224': _cfg(url=''), + 'vit_base_patch16_18x2_224': _cfg(url=''), +} + + +class Attention(nn.Module): + def __init__(self, dim, num_heads=8, qkv_bias=False, attn_drop=0., proj_drop=0.): + super().__init__() + assert dim % num_heads == 0, 'dim should be divisible by num_heads' + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = head_dim ** -0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x): + B, N, C = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv.unbind(0) # make torchscript happy (cannot use tensor as tuple) + + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class LayerScale(nn.Module): + def __init__(self, dim, init_values=1e-5, inplace=False): + super().__init__() + self.inplace = inplace + self.gamma = nn.Parameter(init_values * torch.ones(dim)) + + def forward(self, x): + return x.mul_(self.gamma) if self.inplace else x * self.gamma + + +class Block(nn.Module): + + def __init__( + self, dim, num_heads, mlp_ratio=4., qkv_bias=False, drop=0., attn_drop=0., init_values=None, + drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm): + super().__init__() + self.norm1 = norm_layer(dim) + self.attn = Attention(dim, num_heads=num_heads, qkv_bias=qkv_bias, attn_drop=attn_drop, proj_drop=drop) + self.ls1 = LayerScale(dim, init_values=init_values) if init_values else nn.Identity() + # NOTE: drop path for stochastic depth, we shall see if this is better than dropout here + self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity() + + self.norm2 = norm_layer(dim) + self.mlp = Mlp(in_features=dim, hidden_features=int(dim * mlp_ratio), act_layer=act_layer, drop=drop) + self.ls2 = LayerScale(dim, init_values=init_values) if init_values else nn.Identity() + self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity() + + def forward(self, x): + x = x + self.drop_path1(self.ls1(self.attn(self.norm1(x)))) + x = x + self.drop_path2(self.ls2(self.mlp(self.norm2(x)))) + return x + + +class ResPostBlock(nn.Module): + + def __init__( + self, dim, num_heads, mlp_ratio=4., qkv_bias=False, drop=0., attn_drop=0., init_values=None, + drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm): + super().__init__() + self.init_values = init_values + + self.attn = Attention(dim, num_heads=num_heads, qkv_bias=qkv_bias, attn_drop=attn_drop, proj_drop=drop) + self.norm1 = norm_layer(dim) + self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity() + + self.mlp = Mlp(in_features=dim, hidden_features=int(dim * mlp_ratio), act_layer=act_layer, drop=drop) + self.norm2 = norm_layer(dim) + self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity() + + self.init_weights() + + def init_weights(self): + # NOTE this init overrides that base model init with specific changes for the block type + if self.init_values is not None: + nn.init.constant_(self.norm1.weight, self.init_values) + nn.init.constant_(self.norm2.weight, self.init_values) + + def forward(self, x): + x = x + self.drop_path1(self.norm1(self.attn(x))) + x = x + self.drop_path2(self.norm2(self.mlp(x))) + return x + + +class ParallelBlock(nn.Module): + + def __init__( + self, dim, num_heads, num_parallel=2, mlp_ratio=4., qkv_bias=False, init_values=None, + drop=0., attn_drop=0., drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm): + super().__init__() + self.num_parallel = num_parallel + self.attns = nn.ModuleList() + self.ffns = nn.ModuleList() + for _ in range(num_parallel): + self.attns.append(nn.Sequential(OrderedDict([ + ('norm', norm_layer(dim)), + ('attn', Attention(dim, num_heads=num_heads, qkv_bias=qkv_bias, attn_drop=attn_drop, proj_drop=drop)), + ('ls', LayerScale(dim, init_values=init_values) if init_values else nn.Identity()), + ('drop_path', DropPath(drop_path) if drop_path > 0. else nn.Identity()) + ]))) + self.ffns.append(nn.Sequential(OrderedDict([ + ('norm', norm_layer(dim)), + ('mlp', Mlp(dim, hidden_features=int(dim * mlp_ratio), act_layer=act_layer, drop=drop)), + ('ls', LayerScale(dim, init_values=init_values) if init_values else nn.Identity()), + ('drop_path', DropPath(drop_path) if drop_path > 0. else nn.Identity()) + ]))) + + def _forward_jit(self, x): + x = x + torch.stack([attn(x) for attn in self.attns]).sum(dim=0) + x = x + torch.stack([ffn(x) for ffn in self.ffns]).sum(dim=0) + return x + + @torch.jit.ignore + def _forward(self, x): + x = x + sum(attn(x) for attn in self.attns) + x = x + sum(ffn(x) for ffn in self.ffns) + return x + + def forward(self, x): + if torch.jit.is_scripting() or torch.jit.is_tracing(): + return self._forward_jit(x) + else: + return self._forward(x) + + +class VisionTransformer(nn.Module): + """ Vision Transformer + + A PyTorch impl of : `An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale` + - https://arxiv.org/abs/2010.11929 + """ + + def __init__( + self, img_size=224, patch_size=16, in_chans=3, num_classes=1000, global_pool='token', + embed_dim=768, depth=12, num_heads=12, mlp_ratio=4., qkv_bias=True, init_values=None, + class_token=True, no_embed_class=False, fc_norm=None, drop_rate=0., attn_drop_rate=0., drop_path_rate=0., + weight_init='', embed_layer=PatchEmbed, norm_layer=None, act_layer=None, block_fn=Block, return_hidden_state=False): + """ + Args: + img_size (int, tuple): input image size + patch_size (int, tuple): patch size + in_chans (int): number of input channels + num_classes (int): number of classes for classification head + global_pool (str): type of global pooling for final sequence (default: 'token') + embed_dim (int): embedding dimension + depth (int): depth of transformer + num_heads (int): number of attention heads + mlp_ratio (int): ratio of mlp hidden dim to embedding dim + qkv_bias (bool): enable bias for qkv if True + init_values: (float): layer-scale init values + class_token (bool): use class token + fc_norm (Optional[bool]): pre-fc norm after pool, set if global_pool == 'avg' if None (default: None) + drop_rate (float): dropout rate + attn_drop_rate (float): attention dropout rate + drop_path_rate (float): stochastic depth rate + weight_init (str): weight init scheme + embed_layer (nn.Module): patch embedding layer + norm_layer: (nn.Module): normalization layer + act_layer: (nn.Module): MLP activation layer + """ + super().__init__() + assert global_pool in ('', 'avg', 'token') + assert class_token or global_pool != 'token' + use_fc_norm = global_pool == 'avg' if fc_norm is None else fc_norm + norm_layer = norm_layer or partial(nn.LayerNorm, eps=1e-6) + act_layer = act_layer or nn.GELU + + self.num_classes = num_classes + self.global_pool = global_pool + self.num_features = self.embed_dim = embed_dim # num_features for consistency with other models + self.num_prefix_tokens = 1 if class_token else 0 + self.no_embed_class = no_embed_class + self.grad_checkpointing = False + + self.patch_embed = embed_layer( + img_size=img_size, patch_size=patch_size, in_chans=in_chans, embed_dim=embed_dim) + num_patches = self.patch_embed.num_patches + + self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim)) if class_token else None + embed_len = num_patches if no_embed_class else num_patches + self.num_prefix_tokens + self.pos_embed = nn.Parameter(torch.randn(1, embed_len, embed_dim) * .02) + self.pos_drop = nn.Dropout(p=drop_rate) + + self.depth = depth + dpr = [x.item() for x in torch.linspace(0, drop_path_rate, depth)] # stochastic depth decay rule + self.blocks = nn.ModuleList([ + block_fn( + dim=embed_dim, num_heads=num_heads, mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, init_values=init_values, + drop=drop_rate, attn_drop=attn_drop_rate, drop_path=dpr[i], norm_layer=norm_layer, act_layer=act_layer) + for i in range(depth)]) + self.norm = norm_layer(embed_dim) if not use_fc_norm else nn.Identity() + + # Classifier Head + self.fc_norm = norm_layer(embed_dim) if use_fc_norm else nn.Identity() + self.head = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity() + + if weight_init != 'skip': + self.init_weights(weight_init) + self.return_hidden_state = return_hidden_state + + def init_weights(self, mode=''): + assert mode in ('jax', 'jax_nlhb', 'moco', '') + head_bias = -math.log(self.num_classes) if 'nlhb' in mode else 0. + trunc_normal_(self.pos_embed, std=.02) + if self.cls_token is not None: + nn.init.normal_(self.cls_token, std=1e-6) + named_apply(get_init_weights_vit(mode, head_bias), self) + + def _init_weights(self, m): + # this fn left here for compat with downstream users + init_weights_vit_timm(m) + + @torch.jit.ignore() + def load_pretrained(self, checkpoint_path, prefix=''): + _load_weights(self, checkpoint_path, prefix) + + @torch.jit.ignore + def no_weight_decay(self): + return {'pos_embed', 'cls_token', 'dist_token'} + + @torch.jit.ignore + def group_matcher(self, coarse=False): + return dict( + stem=r'^cls_token|pos_embed|patch_embed', # stem and embed + blocks=[(r'^blocks\.(\d+)', None), (r'^norm', (99999,))] + ) + + @torch.jit.ignore + def set_grad_checkpointing(self, enable=True): + self.grad_checkpointing = enable + + @torch.jit.ignore + def get_classifier(self): + return self.head + + def reset_classifier(self, num_classes: int, global_pool=None): + self.num_classes = num_classes + if global_pool is not None: + assert global_pool in ('', 'avg', 'token') + self.global_pool = global_pool + self.head = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity() + + def _pos_embed(self, x): + if self.no_embed_class: + # deit-3, updated JAX (big vision) + # position embedding does not overlap with class token, add then concat + x = x + self.pos_embed + if self.cls_token is not None: + x = torch.cat((self.cls_token.expand(x.shape[0], -1, -1), x), dim=1) + else: + # original timm, JAX, and deit vit impl + # pos_embed has entry for class token, concat then add + if self.cls_token is not None: + x = torch.cat((self.cls_token.expand(x.shape[0], -1, -1), x), dim=1) + x = x + self.pos_embed + return self.pos_drop(x) + + def forward_features(self, x): + x = self.patch_embed(x) + x = self._pos_embed(x) + if self.grad_checkpointing and not torch.jit.is_scripting(): + x = checkpoint_seq(self.blocks, x) + else: + x = self.blocks(x) + x = self.norm(x) + return x + + def forward_head(self, x, pre_logits: bool = False): + if self.global_pool: + x = x[:, self.num_prefix_tokens:].mean(dim=1) if self.global_pool == 'avg' else x[:, 0] + x = self.fc_norm(x) + return x if pre_logits else self.head(x) + +# def forward(self, x): +# x = self.forward_features(x) +# x = self.forward_head(x) +# return x + + def forward(self, x, external_features=None): + all_hidden_states = () if self.return_hidden_state else None + B = x.shape[0] + x = self.patch_embed(x) + + cls_tokens = self.cls_token.expand(B, -1, -1) # stole cls_tokens impl from Phil Wang, thanks + x = torch.cat((cls_tokens, x), dim=1) + + x = x + self.pos_embed[:,:x.size(1),:] + x = self.pos_drop(x) + + if external_features is not None: + x = torch.cat((x, external_features), dim=1) + + for i,blk in enumerate(self.blocks): + x = blk(x) + if self.return_hidden_state: + all_hidden_states = all_hidden_states + (self.norm(x),) + x = self.norm(x) + + if self.return_hidden_state: + return x, all_hidden_states + else: + return x + + +def init_weights_vit_timm(module: nn.Module, name: str = ''): + """ ViT weight initialization, original timm impl (for reproducibility) """ + if isinstance(module, nn.Linear): + trunc_normal_(module.weight, std=.02) + if module.bias is not None: + nn.init.zeros_(module.bias) + elif hasattr(module, 'init_weights'): + module.init_weights() + + +def init_weights_vit_jax(module: nn.Module, name: str = '', head_bias: float = 0.): + """ ViT weight initialization, matching JAX (Flax) impl """ + if isinstance(module, nn.Linear): + if name.startswith('head'): + nn.init.zeros_(module.weight) + nn.init.constant_(module.bias, head_bias) + else: + nn.init.xavier_uniform_(module.weight) + if module.bias is not None: + nn.init.normal_(module.bias, std=1e-6) if 'mlp' in name else nn.init.zeros_(module.bias) + elif isinstance(module, nn.Conv2d): + lecun_normal_(module.weight) + if module.bias is not None: + nn.init.zeros_(module.bias) + elif hasattr(module, 'init_weights'): + module.init_weights() + + +def init_weights_vit_moco(module: nn.Module, name: str = ''): + """ ViT weight initialization, matching moco-v3 impl minus fixed PatchEmbed """ + if isinstance(module, nn.Linear): + if 'qkv' in name: + # treat the weights of Q, K, V separately + val = math.sqrt(6. / float(module.weight.shape[0] // 3 + module.weight.shape[1])) + nn.init.uniform_(module.weight, -val, val) + else: + nn.init.xavier_uniform_(module.weight) + if module.bias is not None: + nn.init.zeros_(module.bias) + elif hasattr(module, 'init_weights'): + module.init_weights() + + +def get_init_weights_vit(mode='jax', head_bias: float = 0.): + if 'jax' in mode: + return partial(init_weights_vit_jax, head_bias=head_bias) + elif 'moco' in mode: + return init_weights_vit_moco + else: + return init_weights_vit_timm + + +@torch.no_grad() +def _load_weights(model: VisionTransformer, checkpoint_path: str, prefix: str = ''): + """ Load weights from .npz checkpoints for official Google Brain Flax implementation + """ + import numpy as np + + def _n2p(w, t=True): + if w.ndim == 4 and w.shape[0] == w.shape[1] == w.shape[2] == 1: + w = w.flatten() + if t: + if w.ndim == 4: + w = w.transpose([3, 2, 0, 1]) + elif w.ndim == 3: + w = w.transpose([2, 0, 1]) + elif w.ndim == 2: + w = w.transpose([1, 0]) + return torch.from_numpy(w) + + w = np.load(checkpoint_path) + if not prefix and 'opt/target/embedding/kernel' in w: + prefix = 'opt/target/' + + if hasattr(model.patch_embed, 'backbone'): + # hybrid + backbone = model.patch_embed.backbone + stem_only = not hasattr(backbone, 'stem') + stem = backbone if stem_only else backbone.stem + stem.conv.weight.copy_(adapt_input_conv(stem.conv.weight.shape[1], _n2p(w[f'{prefix}conv_root/kernel']))) + stem.norm.weight.copy_(_n2p(w[f'{prefix}gn_root/scale'])) + stem.norm.bias.copy_(_n2p(w[f'{prefix}gn_root/bias'])) + if not stem_only: + for i, stage in enumerate(backbone.stages): + for j, block in enumerate(stage.blocks): + bp = f'{prefix}block{i + 1}/unit{j + 1}/' + for r in range(3): + getattr(block, f'conv{r + 1}').weight.copy_(_n2p(w[f'{bp}conv{r + 1}/kernel'])) + getattr(block, f'norm{r + 1}').weight.copy_(_n2p(w[f'{bp}gn{r + 1}/scale'])) + getattr(block, f'norm{r + 1}').bias.copy_(_n2p(w[f'{bp}gn{r + 1}/bias'])) + if block.downsample is not None: + block.downsample.conv.weight.copy_(_n2p(w[f'{bp}conv_proj/kernel'])) + block.downsample.norm.weight.copy_(_n2p(w[f'{bp}gn_proj/scale'])) + block.downsample.norm.bias.copy_(_n2p(w[f'{bp}gn_proj/bias'])) + embed_conv_w = _n2p(w[f'{prefix}embedding/kernel']) + else: + embed_conv_w = adapt_input_conv( + model.patch_embed.proj.weight.shape[1], _n2p(w[f'{prefix}embedding/kernel'])) + model.patch_embed.proj.weight.copy_(embed_conv_w) + model.patch_embed.proj.bias.copy_(_n2p(w[f'{prefix}embedding/bias'])) + model.cls_token.copy_(_n2p(w[f'{prefix}cls'], t=False)) + pos_embed_w = _n2p(w[f'{prefix}Transformer/posembed_input/pos_embedding'], t=False) + if pos_embed_w.shape != model.pos_embed.shape: + pos_embed_w = resize_pos_embed( # resize pos embedding when different size from pretrained weights + pos_embed_w, + model.pos_embed, + getattr(model, 'num_prefix_tokens', 1), + model.patch_embed.grid_size + ) + model.pos_embed.copy_(pos_embed_w) + model.norm.weight.copy_(_n2p(w[f'{prefix}Transformer/encoder_norm/scale'])) + model.norm.bias.copy_(_n2p(w[f'{prefix}Transformer/encoder_norm/bias'])) + if isinstance(model.head, nn.Linear) and model.head.bias.shape[0] == w[f'{prefix}head/bias'].shape[-1]: + model.head.weight.copy_(_n2p(w[f'{prefix}head/kernel'])) + model.head.bias.copy_(_n2p(w[f'{prefix}head/bias'])) + # NOTE representation layer has been removed, not used in latest 21k/1k pretrained weights + # if isinstance(getattr(model.pre_logits, 'fc', None), nn.Linear) and f'{prefix}pre_logits/bias' in w: + # model.pre_logits.fc.weight.copy_(_n2p(w[f'{prefix}pre_logits/kernel'])) + # model.pre_logits.fc.bias.copy_(_n2p(w[f'{prefix}pre_logits/bias'])) + for i, block in enumerate(model.blocks.children()): + block_prefix = f'{prefix}Transformer/encoderblock_{i}/' + mha_prefix = block_prefix + 'MultiHeadDotProductAttention_1/' + block.norm1.weight.copy_(_n2p(w[f'{block_prefix}LayerNorm_0/scale'])) + block.norm1.bias.copy_(_n2p(w[f'{block_prefix}LayerNorm_0/bias'])) + block.attn.qkv.weight.copy_(torch.cat([ + _n2p(w[f'{mha_prefix}{n}/kernel'], t=False).flatten(1).T for n in ('query', 'key', 'value')])) + block.attn.qkv.bias.copy_(torch.cat([ + _n2p(w[f'{mha_prefix}{n}/bias'], t=False).reshape(-1) for n in ('query', 'key', 'value')])) + block.attn.proj.weight.copy_(_n2p(w[f'{mha_prefix}out/kernel']).flatten(1)) + block.attn.proj.bias.copy_(_n2p(w[f'{mha_prefix}out/bias'])) + for r in range(2): + getattr(block.mlp, f'fc{r + 1}').weight.copy_(_n2p(w[f'{block_prefix}MlpBlock_3/Dense_{r}/kernel'])) + getattr(block.mlp, f'fc{r + 1}').bias.copy_(_n2p(w[f'{block_prefix}MlpBlock_3/Dense_{r}/bias'])) + block.norm2.weight.copy_(_n2p(w[f'{block_prefix}LayerNorm_2/scale'])) + block.norm2.bias.copy_(_n2p(w[f'{block_prefix}LayerNorm_2/bias'])) + + +def resize_pos_embed(posemb, posemb_new, num_prefix_tokens=1, gs_new=()): + # Rescale the grid of position embeddings when loading from state_dict. Adapted from + # https://github.com/google-research/vision_transformer/blob/00883dd691c63a6830751563748663526e811cee/vit_jax/checkpoint.py#L224 + _logger.info('Resized position embedding: %s to %s', posemb.shape, posemb_new.shape) + ntok_new = posemb_new.shape[1] + if num_prefix_tokens: + posemb_prefix, posemb_grid = posemb[:, :num_prefix_tokens], posemb[0, num_prefix_tokens:] + ntok_new -= num_prefix_tokens + else: + posemb_prefix, posemb_grid = posemb[:, :0], posemb[0] + gs_old = int(math.sqrt(len(posemb_grid))) + if not len(gs_new): # backwards compatibility + gs_new = [int(math.sqrt(ntok_new))] * 2 + assert len(gs_new) >= 2 + _logger.info('Position embedding grid-size from %s to %s', [gs_old, gs_old], gs_new) + posemb_grid = posemb_grid.reshape(1, gs_old, gs_old, -1).permute(0, 3, 1, 2) + posemb_grid = F.interpolate(posemb_grid, size=gs_new, mode='bicubic', align_corners=False) + posemb_grid = posemb_grid.permute(0, 2, 3, 1).reshape(1, gs_new[0] * gs_new[1], -1) + posemb = torch.cat([posemb_prefix, posemb_grid], dim=1) + return posemb + + +def checkpoint_filter_fn(state_dict, model, adapt_layer_scale=False): + """ convert patch embedding weight from manual patchify + linear proj to conv""" + import re + out_dict = {} + if 'model' in state_dict: + # For deit models + state_dict = state_dict['model'] + + for k, v in state_dict.items(): + if 'patch_embed.proj.weight' in k and len(v.shape) < 4: + # For old models that I trained prior to conv based patchification + O, I, H, W = model.patch_embed.proj.weight.shape + v = v.reshape(O, -1, H, W) + elif k == 'pos_embed' and v.shape[1] != model.pos_embed.shape[1]: + # To resize pos embedding when using model at different size from pretrained weights + v = resize_pos_embed( + v, + model.pos_embed, + getattr(model, 'num_prefix_tokens', 1), + model.patch_embed.grid_size + ) + elif adapt_layer_scale and 'gamma_' in k: + # remap layer-scale gamma into sub-module (deit3 models) + k = re.sub(r'gamma_([0-9])', r'ls\1.gamma', k) + elif 'pre_logits' in k: + # NOTE representation layer removed as not used in latest 21k/1k pretrained weights + continue + out_dict[k] = v + return out_dict + + +def _create_vision_transformer(variant, pretrained=False, **kwargs): + if kwargs.get('features_only', None): + raise RuntimeError('features_only not implemented for Vision Transformer models.') + + pretrained_cfg = resolve_pretrained_cfg(variant, pretrained_cfg=kwargs.pop('pretrained_cfg', None)) + model = build_model_with_cfg( + VisionTransformer, variant, pretrained, + pretrained_cfg=pretrained_cfg, + pretrained_filter_fn=checkpoint_filter_fn, + pretrained_custom_load='npz' in pretrained_cfg['url'], + **kwargs) + return model + + +@register_model +def vit_tiny_patch16_224(pretrained=False, **kwargs): + """ ViT-Tiny (Vit-Ti/16) + """ + model_kwargs = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3, **kwargs) + model = _create_vision_transformer('vit_tiny_patch16_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_tiny_patch16_384(pretrained=False, **kwargs): + """ ViT-Tiny (Vit-Ti/16) @ 384x384. + """ + model_kwargs = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3, **kwargs) + model = _create_vision_transformer('vit_tiny_patch16_384', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch32_224(pretrained=False, **kwargs): + """ ViT-Small (ViT-S/32) + """ + model_kwargs = dict(patch_size=32, embed_dim=384, depth=12, num_heads=6, **kwargs) + model = _create_vision_transformer('vit_small_patch32_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch32_384(pretrained=False, **kwargs): + """ ViT-Small (ViT-S/32) at 384x384. + """ + model_kwargs = dict(patch_size=32, embed_dim=384, depth=12, num_heads=6, **kwargs) + model = _create_vision_transformer('vit_small_patch32_384', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch16_224(pretrained=False, **kwargs): + """ ViT-Small (ViT-S/16) + NOTE I've replaced my previous 'small' model definition and weights with the small variant from the DeiT paper + """ + model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, **kwargs) + model = _create_vision_transformer('vit_small_patch16_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch16_384(pretrained=False, **kwargs): + """ ViT-Small (ViT-S/16) + NOTE I've replaced my previous 'small' model definition and weights with the small variant from the DeiT paper + """ + model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, **kwargs) + model = _create_vision_transformer('vit_small_patch16_384', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch32_224(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/32) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k, source https://github.com/google-research/vision_transformer. + """ + model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch32_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch32_384(pretrained=False, **kwargs): + """ ViT-Base model (ViT-B/32) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. + """ + model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch32_384', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_224(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 224x224, source https://github.com/google-research/vision_transformer. + """ + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch16_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_384(pretrained=False, **kwargs): + """ ViT-Base model (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. + """ + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch16_384', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch8_224(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/8) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 224x224, source https://github.com/google-research/vision_transformer. + """ + model_kwargs = dict(patch_size=8, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch8_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_large_patch32_224(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929). No pretrained weights. + """ + model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_large_patch32_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_large_patch32_384(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. + """ + model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_large_patch32_384', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_large_patch16_224(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/16) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 224x224, source https://github.com/google-research/vision_transformer. + """ + model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_large_patch16_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_large_patch16_384(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/16) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. + """ + model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_large_patch16_384', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_large_patch14_224(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/14) + """ + model_kwargs = dict(patch_size=14, embed_dim=1024, depth=24, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_large_patch14_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_huge_patch14_224(pretrained=False, **kwargs): + """ ViT-Huge model (ViT-H/14) from original paper (https://arxiv.org/abs/2010.11929). + """ + model_kwargs = dict(patch_size=14, embed_dim=1280, depth=32, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_huge_patch14_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_giant_patch14_224(pretrained=False, **kwargs): + """ ViT-Giant model (ViT-g/14) from `Scaling Vision Transformers` - https://arxiv.org/abs/2106.04560 + """ + model_kwargs = dict(patch_size=14, embed_dim=1408, mlp_ratio=48/11, depth=40, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_giant_patch14_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_gigantic_patch14_224(pretrained=False, **kwargs): + """ ViT-Gigantic model (ViT-G/14) from `Scaling Vision Transformers` - https://arxiv.org/abs/2106.04560 + """ + model_kwargs = dict(patch_size=14, embed_dim=1664, mlp_ratio=64/13, depth=48, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_gigantic_patch14_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_tiny_patch16_224_in21k(pretrained=False, **kwargs): + """ ViT-Tiny (Vit-Ti/16). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + NOTE: this model has valid 21k classifier head and no representation (pre-logits) layer + """ + model_kwargs = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3, **kwargs) + model = _create_vision_transformer('vit_tiny_patch16_224_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch32_224_in21k(pretrained=False, **kwargs): + """ ViT-Small (ViT-S/16) + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + NOTE: this model has valid 21k classifier head and no representation (pre-logits) layer + """ + model_kwargs = dict(patch_size=32, embed_dim=384, depth=12, num_heads=6, **kwargs) + model = _create_vision_transformer('vit_small_patch32_224_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch16_224_in21k(pretrained=False, **kwargs): + """ ViT-Small (ViT-S/16) + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + NOTE: this model has valid 21k classifier head and no representation (pre-logits) layer + """ + model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, **kwargs) + model = _create_vision_transformer('vit_small_patch16_224_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch32_224_in21k(pretrained=False, **kwargs): + """ ViT-Base model (ViT-B/32) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + NOTE: this model has valid 21k classifier head and no representation (pre-logits) layer + """ + model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch32_224_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_224_in21k(pretrained=False, **kwargs): + """ ViT-Base model (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + NOTE: this model has valid 21k classifier head and no representation (pre-logits) layer + """ + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch16_224_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch8_224_in21k(pretrained=False, **kwargs): + """ ViT-Base model (ViT-B/8) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + NOTE: this model has valid 21k classifier head and no representation (pre-logits) layer + """ + model_kwargs = dict(patch_size=8, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch8_224_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_large_patch32_224_in21k(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + NOTE: this model has a representation layer but the 21k classifier head is zero'd out in original weights + """ + model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_large_patch32_224_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_large_patch16_224_in21k(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/16) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + NOTE: this model has valid 21k classifier head and no representation (pre-logits) layer + """ + model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_large_patch16_224_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_huge_patch14_224_in21k(pretrained=False, **kwargs): + """ ViT-Huge model (ViT-H/14) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + NOTE: this model has a representation layer but the 21k classifier head is zero'd out in original weights + """ + model_kwargs = dict(patch_size=14, embed_dim=1280, depth=32, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_huge_patch14_224_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_224_sam(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/16) w/ SAM pretrained weights. Paper: https://arxiv.org/abs/2106.01548 + """ + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch16_224_sam', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch32_224_sam(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/32) w/ SAM pretrained weights. Paper: https://arxiv.org/abs/2106.01548 + """ + model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch32_224_sam', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch16_224_dino(pretrained=False, **kwargs): + """ ViT-Small (ViT-S/16) w/ DINO pretrained weights (no head) - https://arxiv.org/abs/2104.14294 + """ + model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, **kwargs) + model = _create_vision_transformer('vit_small_patch16_224_dino', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch8_224_dino(pretrained=False, **kwargs): + """ ViT-Small (ViT-S/8) w/ DINO pretrained weights (no head) - https://arxiv.org/abs/2104.14294 + """ + model_kwargs = dict(patch_size=8, embed_dim=384, depth=12, num_heads=6, **kwargs) + model = _create_vision_transformer('vit_small_patch8_224_dino', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_224_dino(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/16) /w DINO pretrained weights (no head) - https://arxiv.org/abs/2104.14294 + """ + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch16_224_dino', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch8_224_dino(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/8) w/ DINO pretrained weights (no head) - https://arxiv.org/abs/2104.14294 + """ + model_kwargs = dict(patch_size=8, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch8_224_dino', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_224_miil_in21k(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). + Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K + """ + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False, **kwargs) + model = _create_vision_transformer('vit_base_patch16_224_miil_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_224_miil(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). + Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K + """ + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False, **kwargs) + model = _create_vision_transformer('vit_base_patch16_224_miil', pretrained=pretrained, **model_kwargs) + return model + + +# Experimental models below + +@register_model +def vit_base_patch32_plus_256(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/32+) + """ + model_kwargs = dict(patch_size=32, embed_dim=896, depth=12, num_heads=14, init_values=1e-5, **kwargs) + model = _create_vision_transformer('vit_base_patch32_plus_256', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_plus_240(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/16+) + """ + model_kwargs = dict(patch_size=16, embed_dim=896, depth=12, num_heads=14, init_values=1e-5, **kwargs) + model = _create_vision_transformer('vit_base_patch16_plus_240', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_rpn_224(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/16) w/ residual post-norm + """ + model_kwargs = dict( + patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False, init_values=1e-5, class_token=False, + block_fn=ResPostBlock, global_pool=kwargs.pop('global_pool', 'avg'), **kwargs) + model = _create_vision_transformer('vit_base_patch16_rpn_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch16_36x1_224(pretrained=False, **kwargs): + """ ViT-Base w/ LayerScale + 36 x 1 (36 block serial) config. Experimental, may remove. + Based on `Three things everyone should know about Vision Transformers` - https://arxiv.org/abs/2203.09795 + Paper focuses on 24x2 + 48x1 for 'Small' width but those are extremely slow. + """ + model_kwargs = dict(patch_size=16, embed_dim=384, depth=36, num_heads=6, init_values=1e-5, **kwargs) + model = _create_vision_transformer('vit_small_patch16_36x1_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch16_18x2_224(pretrained=False, **kwargs): + """ ViT-Small w/ LayerScale + 18 x 2 (36 block parallel) config. Experimental, may remove. + Based on `Three things everyone should know about Vision Transformers` - https://arxiv.org/abs/2203.09795 + Paper focuses on 24x2 + 48x1 for 'Small' width but those are extremely slow. + """ + model_kwargs = dict( + patch_size=16, embed_dim=384, depth=18, num_heads=6, init_values=1e-5, block_fn=ParallelBlock, **kwargs) + model = _create_vision_transformer('vit_small_patch16_18x2_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_18x2_224(pretrained=False, **kwargs): + """ ViT-Base w/ LayerScale + 18 x 2 (36 block parallel) config. Experimental, may remove. + Based on `Three things everyone should know about Vision Transformers` - https://arxiv.org/abs/2203.09795 + """ + model_kwargs = dict( + patch_size=16, embed_dim=768, depth=18, num_heads=12, init_values=1e-5, block_fn=ParallelBlock, **kwargs) + model = _create_vision_transformer('vit_base_patch16_18x2_224', pretrained=pretrained, **model_kwargs) + return model + + +# class VisionTransformer(nn.Module): +# """ Vision Transformer +# A PyTorch impl of : `An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale` - +# https://arxiv.org/abs/2010.11929 +# """ +# def __init__(self, img_size=224, patch_size=16, in_chans=3, num_classes=1000, embed_dim=768, depth=12, +# num_heads=12, mlp_ratio=4., qkv_bias=True, qk_scale=None, representation_size=None, +# drop_rate=0., attn_drop_rate=0., drop_path_rate=0., norm_layer=None, +# return_hidden_state=False, tokens_pool=None, top_p=None, return_attention=False): +# """ +# Args: +# img_size (int, tuple): input image size +# patch_size (int, tuple): patch size +# in_chans (int): number of input channels +# num_classes (int): number of classes for classification head +# embed_dim (int): embedding dimension +# depth (int): depth of transformer +# num_heads (int): number of attention heads +# mlp_ratio (int): ratio of mlp hidden dim to embedding dim +# qkv_bias (bool): enable bias for qkv if True +# qk_scale (float): override default qk scale of head_dim ** -0.5 if set +# representation_size (Optional[int]): enable and set representation layer (pre-logits) to this value if set +# drop_rate (float): dropout rate +# attn_drop_rate (float): attention dropout rate +# drop_path_rate (float): stochastic depth rate +# norm_layer: (nn.Module): normalization layer +# """ +# super().__init__() +# self.num_features = self.embed_dim = embed_dim # num_features for consistency with other models +# norm_layer = norm_layer or partial(nn.LayerNorm, eps=1e-6) + +# self.patch_size = patch_size +# self.patch_embed = PatchEmbed( +# img_size=img_size, patch_size=patch_size, in_chans=in_chans, embed_dim=embed_dim) +# num_patches = self.patch_embed.num_patches + +# self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim)) +# self.pos_embed = nn.Parameter(torch.zeros(1, num_patches + 1, embed_dim)) +# self.pos_drop = nn.Dropout(p=drop_rate) + +# self.tokens_pool = [False for i in range(depth)] if tokens_pool is None else tokens_pool +# self.top_p = [1 for i in range(depth)] if tokens_pool is None else top_p + +# dpr = [x.item() for x in torch.linspace(0, drop_path_rate, depth)] # stochastic depth decay rule +# self.blocks = nn.ModuleList([ +# Block( +# dim=embed_dim, num_heads=num_heads, mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, qk_scale=qk_scale, +# drop=drop_rate, attn_drop=attn_drop_rate, drop_path=dpr[i], norm_layer=norm_layer, tokens_pool=self.tokens_pool[i], top_p=self.top_p[i]) +# for i in range(depth)]) +# self.norm = norm_layer(embed_dim) + +# trunc_normal_(self.pos_embed, std=.02) +# trunc_normal_(self.cls_token, std=.02) +# self.apply(self._init_weights) +# self.return_hidden_state = return_hidden_state +# self.return_attention = return_attention + +# def _init_weights(self, m): +# if isinstance(m, nn.Linear): +# trunc_normal_(m.weight, std=.02) +# if isinstance(m, nn.Linear) and m.bias is not None: +# nn.init.constant_(m.bias, 0) +# elif isinstance(m, nn.LayerNorm): +# nn.init.constant_(m.bias, 0) +# nn.init.constant_(m.weight, 1.0) + +# @torch.jit.ignore +# def no_weight_decay(self): +# return {'pos_embed', 'cls_token'} + +# def forward(self, x, register_blk=-1, external_features=None): +# all_hidden_states = () if self.return_hidden_state else None +# B = x.shape[0] +# x = self.patch_embed(x) + +# cls_tokens = self.cls_token.expand(B, -1, -1) # stole cls_tokens impl from Phil Wang, thanks +# x = torch.cat((cls_tokens, x), dim=1) + +# x = x + self.pos_embed[:,:x.size(1),:] +# x = self.pos_drop(x) + +# if external_features is not None: +# x = torch.cat((x, external_features), dim=1) + +# for i,blk in enumerate(self.blocks): +# if self.return_attention: +# x = blk(x, True) +# else: +# x = blk(x, register_blk==i) +# if self.return_hidden_state: +# all_hidden_states = all_hidden_states + (self.norm(x),) +# x = self.norm(x) + +# if self.return_hidden_state: +# return x, all_hidden_states +# else: +# return x + + + +# def interpolate_pos_embed(pos_embed_checkpoint, visual_encoder): +# # interpolate position embedding +# embedding_size = pos_embed_checkpoint.shape[-1] +# num_patches = visual_encoder.patch_embed.num_patches +# num_extra_tokens = visual_encoder.pos_embed.shape[-2] - num_patches +# # height (== width) for the checkpoint position embedding +# orig_size = int((pos_embed_checkpoint.shape[-2] - num_extra_tokens) ** 0.5) +# # height (== width) for the new position embedding +# new_size = int(num_patches ** 0.5) + +# if orig_size!=new_size: +# # class_token and dist_token are kept unchanged +# extra_tokens = pos_embed_checkpoint[:, :num_extra_tokens] +# # only the position tokens are interpolated +# pos_tokens = pos_embed_checkpoint[:, num_extra_tokens:] +# pos_tokens = pos_tokens.reshape(-1, orig_size, orig_size, embedding_size).permute(0, 3, 1, 2) +# pos_tokens = torch.nn.functional.interpolate( +# pos_tokens, size=(new_size, new_size), mode='bicubic', align_corners=False) +# pos_tokens = pos_tokens.permute(0, 2, 3, 1).flatten(1, 2) +# new_pos_embed = torch.cat((extra_tokens, pos_tokens), dim=1) +# print('reshape position embedding from %d to %d'%(orig_size ** 2,new_size ** 2)) + +# return new_pos_embed +# else: +# return pos_embed_checkpoint diff --git a/models/__init__.py b/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/models/adapters.py b/models/adapters.py new file mode 100644 index 0000000000000000000000000000000000000000..123c2fc1cf7391626471466fa25a257ce03a39d8 --- /dev/null +++ b/models/adapters.py @@ -0,0 +1,120 @@ +import torch +import torch.nn as nn +from torchtyping import TensorType + +# same as https://github.com/Aleph-Alpha/magma/blob/master/magma/adapters.py + +class Adapter(nn.Module): + def __init__( + self, + dim: int, + downsample_factor: int = 4, + activation: nn.Module = nn.ReLU, + add_layernorm: bool = False, + ): + super().__init__() + layers = [] + if add_layernorm: + layers.append(nn.LayerNorm(dim)) + layers.extend( + [ + nn.Linear(dim, dim // downsample_factor), + activation(), + nn.Linear(dim // downsample_factor, dim), + ] + ) + self.adapter = nn.Sequential(*layers) + self.adapter.apply(self.init_weights) + + def init_weights(self, m: nn.Module, std=1e-3): + if isinstance(m, nn.Linear): + torch.nn.init.normal_(m.weight, std=std) + torch.nn.init.normal_(m.bias, std=std) + m.weight.data = torch.clamp(m.weight.data, min=-2 * std, max=2 * std) + m.bias.data = torch.clamp(m.bias.data, min=-2 * std, max=2 * std) + elif isinstance(m, nn.LayerNorm): + m.bias.data.zero_() + m.weight.data.fill_(1.0) + + def forward(self, x: TensorType["b", "s", "d"]) -> TensorType["b", "s", "d"]: + return self.adapter(x) + x + + +class ParallelAdapter(Adapter): + def __init__( + self, + module: nn.Module, + dim: int, + downsample_factor: int = 4, + scaled: bool = False, + add_layernorm: bool = False, + activation: nn.Module = nn.ReLU, + ): + super().__init__( + dim, downsample_factor, add_layernorm=add_layernorm, activation=activation + ) + self.module = module + + if scaled: + # init scaling param + self.adapter_scale = nn.Parameter(torch.ones(1)) + else: + self.adapter_scale = 1 + + def forward(self, x: TensorType["b", "s", "d"], **module_kwargs): + y = self.module(x, **module_kwargs) + z = self.adapter(x) + return y + (z * self.adapter_scale) + + +class ParallelAdapterWrapper(ParallelAdapter): + # used to add an adapter to the attention block + + def __init__( + self, + module: nn.Module, + dim: int, + downsample_factor: int = 4, + scaled: bool = False, + add_layernorm: bool = False, + activation: nn.Module = nn.ReLU, + ): + super().__init__( + module, dim, downsample_factor, scaled, add_layernorm, activation + ) + + def forward(self, x: TensorType["b", "s", "d"], *attn_args, **attn_kwargs): + attn_outputs = self.module(x, *attn_args, **attn_kwargs) + attn_output, outputs = ( + attn_outputs[0], + attn_outputs[1:], + ) # output_attn: a, present, (attentions) + hidden_states = attn_output + (self.adapter(x) * self.adapter_scale) + return (hidden_states,) + outputs + + +class AdapterWrapper(Adapter): + # used to add an adapter to the attention block + + def __init__( + self, + attn_block: nn.Module, + dim: int, + downsample_factor: int = 4, + activation: nn.Module = nn.ReLU, + add_layernorm: bool = False, + ): + super().__init__(dim, downsample_factor, activation, add_layernorm) + self.attn_block = attn_block + + def forward(self, x: TensorType["b", "s", "d"] = None, *attn_args, **attn_kwargs): + if x is None: + attn_outputs = self.attn_block(*attn_args, **attn_kwargs) + else: + attn_outputs = self.attn_block(x, *attn_args, **attn_kwargs) + attn_output, outputs = ( + attn_outputs[0], + attn_outputs[1:], + ) # output_attn: a, present, (attentions) + hidden_states = self.adapter(attn_output) + attn_output + return (hidden_states,) + outputs diff --git a/models/ast.py b/models/ast.py new file mode 100644 index 0000000000000000000000000000000000000000..c6f8ceecf3a6db3e2086c851f1be254e30a52bab --- /dev/null +++ b/models/ast.py @@ -0,0 +1,235 @@ +# -*- coding: utf-8 -*- +# @Time : 6/10/21 5:04 PM +# @Author : Yuan Gong +# @Affiliation : Massachusetts Institute of Technology +# @Email : yuangong@mit.edu +# @File : ast_models.py + +import torch +import torch.nn as nn +from torch.cuda.amp import autocast +import os +# import wget +os.environ['TORCH_HOME'] = '../../pretrained_models' +import timm +from timm.models.layers import to_2tuple,trunc_normal_ + +# override the timm package to relax the input shape constraint. +class PatchEmbed(nn.Module): + def __init__(self, img_size=224, patch_size=16, in_chans=3, embed_dim=768): + super().__init__() + + img_size = to_2tuple(img_size) + patch_size = to_2tuple(patch_size) + num_patches = (img_size[1] // patch_size[1]) * (img_size[0] // patch_size[0]) + self.img_size = img_size + self.patch_size = patch_size + self.num_patches = num_patches + + self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size) + + def forward(self, x): + x = self.proj(x).flatten(2).transpose(1, 2) + return x + +class ASTModel(nn.Module): + """ + The AST model. + :param label_dim: the label dimension, i.e., the number of total classes, it is 527 for AudioSet, 50 for ESC-50, and 35 for speechcommands v2-35 + :param fstride: the stride of patch spliting on the frequency dimension, for 16*16 patchs, fstride=16 means no overlap, fstride=10 means overlap of 6 + :param tstride: the stride of patch spliting on the time dimension, for 16*16 patchs, tstride=16 means no overlap, tstride=10 means overlap of 6 + :param input_fdim: the number of frequency bins of the input spectrogram + :param input_tdim: the number of time frames of the input spectrogram + :param imagenet_pretrain: if use ImageNet pretrained model + :param audioset_pretrain: if use full AudioSet and ImageNet pretrained model + :param model_size: the model size of AST, should be in [tiny224, small224, base224, base384], base224 and base 384 are same model, but are trained differently during ImageNet pretraining. + """ + def __init__(self, label_dim=527, fstride=10, tstride=10, input_fdim=128, input_tdim=1024, + imagenet_pretrain=True, audioset_pretrain=False, model_size='base384', verbose=True, + return_hidden_state=None, pretrained_model=None): + + super(ASTModel, self).__init__() + # assert timm.__version__ == '0.4.5', 'Please use timm == 0.4.5, the code might not be compatible with newer versions.' + + if verbose == True: + print('---------------AST Model Summary---------------') + print('ImageNet pretraining: {:s}, AudioSet pretraining: {:s}'.format(str(imagenet_pretrain),str(audioset_pretrain))) + # override timm input shape restriction + timm.models.vision_transformer.PatchEmbed = PatchEmbed + timm.models.layers.patch_embed.PatchEmbed = PatchEmbed + + + # if AudioSet pretraining is not used (but ImageNet pretraining may still apply) + if audioset_pretrain == False: + if model_size == 'tiny224': + self.v = timm.create_model('vit_deit_tiny_distilled_patch16_224', pretrained=imagenet_pretrain) + elif model_size == 'small224': + self.v = timm.create_model('vit_deit_small_distilled_patch16_224', pretrained=imagenet_pretrain) + elif model_size == 'base224': + self.v = timm.create_model('vit_deit_base_distilled_patch16_224', pretrained=imagenet_pretrain) + elif model_size == 'base384': + self.v = timm.create_model('deit_base_distilled_patch16_384', pretrained=imagenet_pretrain) + else: + raise Exception('Model size must be one of tiny224, small224, base224, base384.') + + + tmp = PatchEmbed(img_size=self.v.patch_embed.img_size, patch_size=self.v.patch_embed.patch_size, + in_chans=3, embed_dim=768) + tmp.load_state_dict(self.v.patch_embed.state_dict()) + self.v.patch_embed = tmp + + # self.v.patch_embed = PatchEmbed(img_size=self.v.patch_embed.img_size, patch_size=self.v.patch_embed.patch_size, + # in_chans=3, embed_dim=768) + + self.original_num_patches = self.v.patch_embed.num_patches + self.oringal_hw = int(self.original_num_patches ** 0.5) + self.original_embedding_dim = self.v.pos_embed.shape[2] + self.mlp_head = nn.Sequential(nn.LayerNorm(self.original_embedding_dim), nn.Linear(self.original_embedding_dim, label_dim)) + + # automatcially get the intermediate shape + f_dim, t_dim = self.get_shape(fstride, tstride, input_fdim, input_tdim) + num_patches = f_dim * t_dim + self.v.patch_embed.num_patches = num_patches + if verbose == True: + print('frequncey stride={:d}, time stride={:d}'.format(fstride, tstride)) + print('number of patches={:d}'.format(num_patches)) + + # the linear projection layer + new_proj = torch.nn.Conv2d(1, self.original_embedding_dim, kernel_size=(16, 16), stride=(fstride, tstride)) + if imagenet_pretrain == True: + new_proj.weight = torch.nn.Parameter(torch.sum(self.v.patch_embed.proj.weight, dim=1).unsqueeze(1)) + new_proj.bias = self.v.patch_embed.proj.bias + self.v.patch_embed.proj = new_proj + + # the positional embedding + if imagenet_pretrain == True: + # get the positional embedding from deit model, skip the first two tokens (cls token and distillation token), reshape it to original 2D shape (24*24). + new_pos_embed = self.v.pos_embed[:, 2:, :].detach().reshape(1, self.original_num_patches, self.original_embedding_dim).transpose(1, 2).reshape(1, self.original_embedding_dim, self.oringal_hw, self.oringal_hw) + # cut (from middle) or interpolate the second dimension of the positional embedding + if t_dim <= self.oringal_hw: + new_pos_embed = new_pos_embed[:, :, :, int(self.oringal_hw / 2) - int(t_dim / 2): int(self.oringal_hw / 2) - int(t_dim / 2) + t_dim] + else: + new_pos_embed = torch.nn.functional.interpolate(new_pos_embed, size=(self.oringal_hw, t_dim), mode='bilinear') + # cut (from middle) or interpolate the first dimension of the positional embedding + if f_dim <= self.oringal_hw: + new_pos_embed = new_pos_embed[:, :, int(self.oringal_hw / 2) - int(f_dim / 2): int(self.oringal_hw / 2) - int(f_dim / 2) + f_dim, :] + else: + new_pos_embed = torch.nn.functional.interpolate(new_pos_embed, size=(f_dim, t_dim), mode='bilinear') + # flatten the positional embedding + new_pos_embed = new_pos_embed.reshape(1, self.original_embedding_dim, num_patches).transpose(1,2) + # concatenate the above positional embedding with the cls token and distillation token of the deit model. + self.v.pos_embed = nn.Parameter(torch.cat([self.v.pos_embed[:, :2, :].detach(), new_pos_embed], dim=1)) + else: + # if not use imagenet pretrained model, just randomly initialize a learnable positional embedding + # TODO can use sinusoidal positional embedding instead + new_pos_embed = nn.Parameter(torch.zeros(1, self.v.patch_embed.num_patches + 2, self.original_embedding_dim)) + self.v.pos_embed = new_pos_embed + trunc_normal_(self.v.pos_embed, std=.02) + + # now load a model that is pretrained on both ImageNet and AudioSet + elif audioset_pretrain == True: + if audioset_pretrain == True and imagenet_pretrain == False: + raise ValueError('currently model pretrained on only audioset is not supported, please set imagenet_pretrain = True to use audioset pretrained model.') + if model_size != 'base384': + raise ValueError('currently only has base384 AudioSet pretrained model.') + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + # if os.path.exists('../../pretrained_models/audioset_10_10_0.4593.pth') == False: + # # this model performs 0.4593 mAP on the audioset eval set + # audioset_mdl_url = 'https://www.dropbox.com/s/cv4knew8mvbrnvq/audioset_0.4593.pth?dl=1' + # wget.download(audioset_mdl_url, out='../../pretrained_models/audioset_10_10_0.4593.pth') + + sd = torch.load(pretrained_model, map_location=device) + audio_model = ASTModel(label_dim=527, fstride=10, tstride=10, input_fdim=128, input_tdim=1024, imagenet_pretrain=False, audioset_pretrain=False, model_size='base384', verbose=False) + audio_model = torch.nn.DataParallel(audio_model) + audio_model.load_state_dict(sd, strict=False) + self.v = audio_model.module.v + self.original_embedding_dim = self.v.pos_embed.shape[2] + self.mlp_head = nn.Sequential(nn.LayerNorm(self.original_embedding_dim), nn.Linear(self.original_embedding_dim, label_dim)) + + f_dim, t_dim = self.get_shape(fstride, tstride, input_fdim, input_tdim) + num_patches = f_dim * t_dim + self.v.patch_embed.num_patches = num_patches + + # self.v.patch_embed.img_size = self.v.patch_embed.img_size + + if verbose == True: + print('frequncey stride={:d}, time stride={:d}'.format(fstride, tstride)) + print('number of patches={:d}'.format(num_patches)) + + new_pos_embed = self.v.pos_embed[:, 2:, :].detach().reshape(1, 1212, 768).transpose(1, 2).reshape(1, 768, 12, 101) + # if the input sequence length is larger than the original audioset (10s), then cut the positional embedding + if t_dim < 101: + new_pos_embed = new_pos_embed[:, :, :, 50 - int(t_dim/2): 50 - int(t_dim/2) + t_dim] + # otherwise interpolate + else: + new_pos_embed = torch.nn.functional.interpolate(new_pos_embed, size=(12, t_dim), mode='bilinear') + if f_dim < 12: + new_pos_embed = new_pos_embed[:, :, 6 - int(f_dim/2): 6 - int(f_dim/2) + f_dim, :] + # otherwise interpolate + elif f_dim > 12: + new_pos_embed = torch.nn.functional.interpolate(new_pos_embed, size=(f_dim, t_dim), mode='bilinear') + new_pos_embed = new_pos_embed.reshape(1, 768, num_patches).transpose(1, 2) + self.v.pos_embed = nn.Parameter(torch.cat([self.v.pos_embed[:, :2, :].detach(), new_pos_embed], dim=1)) + + self.return_hidden_state = return_hidden_state + + def get_shape(self, fstride, tstride, input_fdim=128, input_tdim=1024): + test_input = torch.randn(1, 1, input_fdim, input_tdim) + test_proj = nn.Conv2d(1, self.original_embedding_dim, kernel_size=(16, 16), stride=(fstride, tstride)) + test_out = test_proj(test_input) + f_dim = test_out.shape[2] + t_dim = test_out.shape[3] + return f_dim, t_dim + + @autocast() + def forward(self, x, external_features=None): + """ + :param x: the input spectrogram, expected shape: (batch_size, time_frame_num, frequency_bins), e.g., (12, 1024, 128) + :return: prediction + """ + # expect input x = (batch_size, time_frame_num, frequency_bins), e.g., (12, 1024, 128) + all_hidden_states = () if self.return_hidden_state else None + + x = x.unsqueeze(1) + x = x.transpose(2, 3) + + B = x.shape[0] + x = self.v.patch_embed(x) + cls_tokens = self.v.cls_token.expand(B, -1, -1) + dist_token = self.v.dist_token.expand(B, -1, -1) + x = torch.cat((cls_tokens, dist_token, x), dim=1) + x = x + self.v.pos_embed + x = self.v.pos_drop(x) + + for blk in self.v.blocks: + x = blk(x) + if self.return_hidden_state: + all_hidden_states = all_hidden_states + (self.v.norm(x),) + x = self.v.norm(x) + + # x[:, 0] = (x[:, 0] + x[:, 1]) / 2 + + # x = (x[:, 0] + x[:, 1]) / 2 + # x = self.mlp_head(x) + + if self.return_hidden_state: + return x, all_hidden_states + else: + return x + +if __name__ == '__main__': + input_tdim = 100 + ast_mdl = ASTModel(input_tdim=input_tdim) + # input a batch of 10 spectrogram, each with 100 time frames and 128 frequency bins + test_input = torch.rand([10, input_tdim, 128]) + test_output = ast_mdl(test_input) + # output should be in shape [10, 527], i.e., 10 samples, each with prediction of 527 classes. + print(test_output.shape) + + input_tdim = 256 + ast_mdl = ASTModel(input_tdim=input_tdim,label_dim=50, audioset_pretrain=True) + # input a batch of 10 spectrogram, each with 512 time frames and 128 frequency bins + test_input = torch.rand([10, input_tdim, 128]) + test_output = ast_mdl(test_input) + # output should be in shape [10, 50], i.e., 10 samples, each with prediction of 50 classes. + print(test_output.shape) \ No newline at end of file diff --git a/models/clip.py b/models/clip.py new file mode 100644 index 0000000000000000000000000000000000000000..696d19f7ee307542d882addcfbf2274e460c3c90 --- /dev/null +++ b/models/clip.py @@ -0,0 +1,839 @@ +# from openai clip +from collections import OrderedDict +from typing import Tuple, Union + +import numpy as np +import torch +import torch.nn.functional as F +from torch import nn + +####### +import hashlib +import os +import urllib +import warnings +from typing import Any, Union, List +from pkg_resources import packaging + +import torch +from PIL import Image +from torchvision.transforms import Compose, Resize, CenterCrop, ToTensor, Normalize +from tqdm import tqdm + +# from .simple_tokenizer import SimpleTokenizer as _Tokenizer + +try: + from torchvision.transforms import InterpolationMode + BICUBIC = InterpolationMode.BICUBIC +except ImportError: + BICUBIC = Image.BICUBIC + + +if packaging.version.parse(torch.__version__) < packaging.version.parse("1.7.1"): + warnings.warn("PyTorch version 1.7.1 or higher is recommended") + + +__all__ = ["available_models", "load", "tokenize"] + +_MODELS = { + "RN50": "https://openaipublic.azureedge.net/clip/models/afeb0e10f9e5a86da6080e35cf09123aca3b358a0c3e3b6c78a7b63bc04b6762/RN50.pt", + "RN101": "https://openaipublic.azureedge.net/clip/models/8fa8567bab74a42d41c5915025a8e4538c3bdbe8804a470a72f30b0d94fab599/RN101.pt", + "RN50x4": "https://openaipublic.azureedge.net/clip/models/7e526bd135e493cef0776de27d5f42653e6b4c8bf9e0f653bb11773263205fdd/RN50x4.pt", + "RN50x16": "https://openaipublic.azureedge.net/clip/models/52378b407f34354e150460fe41077663dd5b39c54cd0bfd2b27167a4a06ec9aa/RN50x16.pt", + "RN50x64": "https://openaipublic.azureedge.net/clip/models/be1cfb55d75a9666199fb2206c106743da0f6468c9d327f3e0d0a543a9919d9c/RN50x64.pt", + "ViT-B/32": "https://openaipublic.azureedge.net/clip/models/40d365715913c9da98579312b702a82c18be219cc2a73407c4526f58eba950af/ViT-B-32.pt", + "ViT-B/16": "https://openaipublic.azureedge.net/clip/models/5806e77cd80f8b59890b7e101eabd078d9fb84e6937f9e85e4ecb61988df416f/ViT-B-16.pt", + "ViT-L/14": "https://openaipublic.azureedge.net/clip/models/b8cca3fd41ae0c99ba7e8951adf17d267cdb84cd88be6f7c2e0eca1737a03836/ViT-L-14.pt", + "ViT-L/14@336px": "https://openaipublic.azureedge.net/clip/models/3035c92b350959924f9f00213499208652fc7ea050643e8b385c2dac08641f02/ViT-L-14-336px.pt", +} + + + +############# + +import gzip +import html +import os +from functools import lru_cache + +import ftfy +import regex as re + + +@lru_cache() +def default_bpe(): + return os.path.join(os.path.dirname(os.path.abspath(__file__)), "bpe_simple_vocab_16e6.txt.gz") + + +@lru_cache() +def bytes_to_unicode(): + """ + Returns list of utf-8 byte and a corresponding list of unicode strings. + The reversible bpe codes work on unicode strings. + This means you need a large # of unicode characters in your vocab if you want to avoid UNKs. + When you're at something like a 10B token dataset you end up needing around 5K for decent coverage. + This is a signficant percentage of your normal, say, 32K bpe vocab. + To avoid that, we want lookup tables between utf-8 bytes and unicode strings. + And avoids mapping to whitespace/control characters the bpe code barfs on. + """ + bs = list(range(ord("!"), ord("~")+1))+list(range(ord("¡"), ord("¬")+1))+list(range(ord("®"), ord("ÿ")+1)) + cs = bs[:] + n = 0 + for b in range(2**8): + if b not in bs: + bs.append(b) + cs.append(2**8+n) + n += 1 + cs = [chr(n) for n in cs] + return dict(zip(bs, cs)) + + +def get_pairs(word): + """Return set of symbol pairs in a word. + Word is represented as tuple of symbols (symbols being variable-length strings). + """ + pairs = set() + prev_char = word[0] + for char in word[1:]: + pairs.add((prev_char, char)) + prev_char = char + return pairs + + +def basic_clean(text): + text = ftfy.fix_text(text) + text = html.unescape(html.unescape(text)) + return text.strip() + + +def whitespace_clean(text): + text = re.sub(r'\s+', ' ', text) + text = text.strip() + return text + + +class _Tokenizer(object): + def __init__(self, bpe_path: str = default_bpe()): + self.byte_encoder = bytes_to_unicode() + self.byte_decoder = {v: k for k, v in self.byte_encoder.items()} + merges = gzip.open(bpe_path).read().decode("utf-8").split('\n') + merges = merges[1:49152-256-2+1] + merges = [tuple(merge.split()) for merge in merges] + vocab = list(bytes_to_unicode().values()) + vocab = vocab + [v+'' for v in vocab] + for merge in merges: + vocab.append(''.join(merge)) + vocab.extend(['<|startoftext|>', '<|endoftext|>']) + self.encoder = dict(zip(vocab, range(len(vocab)))) + self.decoder = {v: k for k, v in self.encoder.items()} + self.bpe_ranks = dict(zip(merges, range(len(merges)))) + self.cache = {'<|startoftext|>': '<|startoftext|>', '<|endoftext|>': '<|endoftext|>'} + self.pat = re.compile(r"""<\|startoftext\|>|<\|endoftext\|>|'s|'t|'re|'ve|'m|'ll|'d|[\p{L}]+|[\p{N}]|[^\s\p{L}\p{N}]+""", re.IGNORECASE) + + def bpe(self, token): + if token in self.cache: + return self.cache[token] + word = tuple(token[:-1]) + ( token[-1] + '',) + pairs = get_pairs(word) + + if not pairs: + return token+'' + + while True: + bigram = min(pairs, key = lambda pair: self.bpe_ranks.get(pair, float('inf'))) + if bigram not in self.bpe_ranks: + break + first, second = bigram + new_word = [] + i = 0 + while i < len(word): + try: + j = word.index(first, i) + new_word.extend(word[i:j]) + i = j + except: + new_word.extend(word[i:]) + break + + if word[i] == first and i < len(word)-1 and word[i+1] == second: + new_word.append(first+second) + i += 2 + else: + new_word.append(word[i]) + i += 1 + new_word = tuple(new_word) + word = new_word + if len(word) == 1: + break + else: + pairs = get_pairs(word) + word = ' '.join(word) + self.cache[token] = word + return word + + def encode(self, text): + bpe_tokens = [] + text = whitespace_clean(basic_clean(text)).lower() + for token in re.findall(self.pat, text): + token = ''.join(self.byte_encoder[b] for b in token.encode('utf-8')) + bpe_tokens.extend(self.encoder[bpe_token] for bpe_token in self.bpe(token).split(' ')) + return bpe_tokens + + def decode(self, tokens): + text = ''.join([self.decoder[token] for token in tokens]) + text = bytearray([self.byte_decoder[c] for c in text]).decode('utf-8', errors="replace").replace('', ' ') + return text + +_tokenizer = _Tokenizer(os.path.expanduser(os.environ['XDG_CACHE_HOME']+"/clip/bpe_simple_vocab_16e6.txt.gz")) + +######### +def _download(url: str, root: str): + os.makedirs(root, exist_ok=True) + filename = os.path.basename(url) + + expected_sha256 = url.split("/")[-2] + download_target = os.path.join(root, filename) + + if os.path.exists(download_target) and not os.path.isfile(download_target): + raise RuntimeError(f"{download_target} exists and is not a regular file") + + if os.path.isfile(download_target): + if hashlib.sha256(open(download_target, "rb").read()).hexdigest() == expected_sha256: + return download_target + else: + warnings.warn(f"{download_target} exists, but the SHA256 checksum does not match; re-downloading the file") + + with urllib.request.urlopen(url) as source, open(download_target, "wb") as output: + with tqdm(total=int(source.info().get("Content-Length")), ncols=80, unit='iB', unit_scale=True, unit_divisor=1024) as loop: + while True: + buffer = source.read(8192) + if not buffer: + break + + output.write(buffer) + loop.update(len(buffer)) + + if hashlib.sha256(open(download_target, "rb").read()).hexdigest() != expected_sha256: + raise RuntimeError("Model has been downloaded but the SHA256 checksum does not not match") + + return download_target + + +def _convert_image_to_rgb(image): + return image.convert("RGB") + + +def _transform(n_px): + return Compose([ + Resize(n_px, interpolation=BICUBIC), + CenterCrop(n_px), + _convert_image_to_rgb, + ToTensor(), + Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711)), + ]) + + +def available_models() -> List[str]: + """Returns the names of available CLIP models""" + return list(_MODELS.keys()) + + +def load(name: str, device: Union[str, torch.device] = "cuda" if torch.cuda.is_available() else "cpu", + jit: bool = False, download_root: str = None, **kwargs): + """Load a CLIP model + + Parameters + ---------- + name : str + A model name listed by `clip.available_models()`, or the path to a model checkpoint containing the state_dict + + device : Union[str, torch.device] + The device to put the loaded model + + jit : bool + Whether to load the optimized JIT model or more hackable non-JIT model (default). + + download_root: str + path to download the model files; by default, it uses "~/.cache/clip" + + Returns + ------- + model : torch.nn.Module + The CLIP model + + preprocess : Callable[[PIL.Image], torch.Tensor] + A torchvision transform that converts a PIL image into a tensor that the returned model can take as its input + """ + if name in _MODELS: + model_path = _download(_MODELS[name], download_root or os.path.expanduser("~/.cache/clip")) + elif os.path.isfile(name): + model_path = name + else: + raise RuntimeError(f"Model {name} not found; available models = {available_models()}") + + with open(model_path, 'rb') as opened_file: + try: + # loading JIT archive + model = torch.jit.load(opened_file, map_location=device if jit else "cpu").eval() + state_dict = None + except RuntimeError: + # loading saved state dict + if jit: + warnings.warn(f"File {model_path} is not a JIT archive. Loading as a state dict instead") + jit = False + state_dict = torch.load(opened_file, map_location="cpu") + + if not jit: + model = build_model(state_dict or model.state_dict(), **kwargs).to(device) + if str(device) == "cpu": + model.float() + return model, _transform(model.visual.input_resolution) + + # patch the device names + device_holder = torch.jit.trace(lambda: torch.ones([]).to(torch.device(device)), example_inputs=[]) + device_node = [n for n in device_holder.graph.findAllNodes("prim::Constant") if "Device" in repr(n)][-1] + + def patch_device(module): + try: + graphs = [module.graph] if hasattr(module, "graph") else [] + except RuntimeError: + graphs = [] + + if hasattr(module, "forward1"): + graphs.append(module.forward1.graph) + + for graph in graphs: + for node in graph.findAllNodes("prim::Constant"): + if "value" in node.attributeNames() and str(node["value"]).startswith("cuda"): + node.copyAttributes(device_node) + + model.apply(patch_device) + patch_device(model.encode_image) + patch_device(model.encode_text) + + # patch dtype to float32 on CPU + if str(device) == "cpu": + float_holder = torch.jit.trace(lambda: torch.ones([]).float(), example_inputs=[]) + float_input = list(float_holder.graph.findNode("aten::to").inputs())[1] + float_node = float_input.node() + + def patch_float(module): + try: + graphs = [module.graph] if hasattr(module, "graph") else [] + except RuntimeError: + graphs = [] + + if hasattr(module, "forward1"): + graphs.append(module.forward1.graph) + + for graph in graphs: + for node in graph.findAllNodes("aten::to"): + inputs = list(node.inputs()) + for i in [1, 2]: # dtype can be the second or third argument to aten::to() + if inputs[i].node()["value"] == 5: + inputs[i].node().copyAttributes(float_node) + + model.apply(patch_float) + patch_float(model.encode_image) + patch_float(model.encode_text) + + model.float() + + return model, _transform(model.input_resolution.item()) + + +def tokenize(texts: Union[str, List[str]], context_length: int = 77, truncate: bool = False) -> Union[torch.IntTensor, torch.LongTensor]: + """ + Returns the tokenized representation of given input string(s) + + Parameters + ---------- + texts : Union[str, List[str]] + An input string or a list of input strings to tokenize + + context_length : int + The context length to use; all CLIP models use 77 as the context length + + truncate: bool + Whether to truncate the text in case its encoding is longer than the context length + + Returns + ------- + A two-dimensional tensor containing the resulting tokens, shape = [number of input strings, context_length]. + We return LongTensor when torch version is <1.8.0, since older index_select requires indices to be long. + """ + if isinstance(texts, str): + texts = [texts] + + sot_token = _tokenizer.encoder["<|startoftext|>"] + eot_token = _tokenizer.encoder["<|endoftext|>"] + all_tokens = [[sot_token] + _tokenizer.encode(text) + [eot_token] for text in texts] + if packaging.version.parse(torch.__version__) < packaging.version.parse("1.8.0"): + result = torch.zeros(len(all_tokens), context_length, dtype=torch.long) + else: + result = torch.zeros(len(all_tokens), context_length, dtype=torch.int) + + for i, tokens in enumerate(all_tokens): + if len(tokens) > context_length: + if truncate: + tokens = tokens[:context_length] + tokens[-1] = eot_token + else: + raise RuntimeError(f"Input {texts[i]} is too long for context length {context_length}") + result[i, :len(tokens)] = torch.tensor(tokens) + + return result + + + + +########## +class Bottleneck(nn.Module): + expansion = 4 + + def __init__(self, inplanes, planes, stride=1): + super().__init__() + + # all conv layers have stride 1. an avgpool is performed after the second convolution when stride > 1 + self.conv1 = nn.Conv2d(inplanes, planes, 1, bias=False) + self.bn1 = nn.BatchNorm2d(planes) + self.relu1 = nn.ReLU(inplace=True) + + self.conv2 = nn.Conv2d(planes, planes, 3, padding=1, bias=False) + self.bn2 = nn.BatchNorm2d(planes) + self.relu2 = nn.ReLU(inplace=True) + + self.avgpool = nn.AvgPool2d(stride) if stride > 1 else nn.Identity() + + self.conv3 = nn.Conv2d(planes, planes * self.expansion, 1, bias=False) + self.bn3 = nn.BatchNorm2d(planes * self.expansion) + self.relu3 = nn.ReLU(inplace=True) + + self.downsample = None + self.stride = stride + + if stride > 1 or inplanes != planes * Bottleneck.expansion: + # downsampling layer is prepended with an avgpool, and the subsequent convolution has stride 1 + self.downsample = nn.Sequential(OrderedDict([ + ("-1", nn.AvgPool2d(stride)), + ("0", nn.Conv2d(inplanes, planes * self.expansion, 1, stride=1, bias=False)), + ("1", nn.BatchNorm2d(planes * self.expansion)) + ])) + + def forward(self, x: torch.Tensor): + identity = x + + out = self.relu1(self.bn1(self.conv1(x))) + out = self.relu2(self.bn2(self.conv2(out))) + out = self.avgpool(out) + out = self.bn3(self.conv3(out)) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + out = self.relu3(out) + return out + + +class AttentionPool2d(nn.Module): + def __init__(self, spacial_dim: int, embed_dim: int, num_heads: int, output_dim: int = None): + super().__init__() + self.positional_embedding = nn.Parameter(torch.randn(spacial_dim ** 2 + 1, embed_dim) / embed_dim ** 0.5) + self.k_proj = nn.Linear(embed_dim, embed_dim) + self.q_proj = nn.Linear(embed_dim, embed_dim) + self.v_proj = nn.Linear(embed_dim, embed_dim) + self.c_proj = nn.Linear(embed_dim, output_dim or embed_dim) + self.num_heads = num_heads + + def forward(self, x): + x = x.flatten(start_dim=2).permute(2, 0, 1) # NCHW -> (HW)NC + x = torch.cat([x.mean(dim=0, keepdim=True), x], dim=0) # (HW+1)NC + x = x + self.positional_embedding[:, None, :].to(x.dtype) # (HW+1)NC + x, _ = F.multi_head_attention_forward( + query=x[:1], key=x, value=x, + embed_dim_to_check=x.shape[-1], + num_heads=self.num_heads, + q_proj_weight=self.q_proj.weight, + k_proj_weight=self.k_proj.weight, + v_proj_weight=self.v_proj.weight, + in_proj_weight=None, + in_proj_bias=torch.cat([self.q_proj.bias, self.k_proj.bias, self.v_proj.bias]), + bias_k=None, + bias_v=None, + add_zero_attn=False, + dropout_p=0, + out_proj_weight=self.c_proj.weight, + out_proj_bias=self.c_proj.bias, + use_separate_proj_weight=True, + training=self.training, + need_weights=False + ) + return x.squeeze(0) + + +class ModifiedResNet(nn.Module): + """ + A ResNet class that is similar to torchvision's but contains the following changes: + - There are now 3 "stem" convolutions as opposed to 1, with an average pool instead of a max pool. + - Performs anti-aliasing strided convolutions, where an avgpool is prepended to convolutions with stride > 1 + - The final pooling layer is a QKV attention instead of an average pool + """ + + def __init__(self, layers, output_dim, heads, input_resolution=224, width=64): + super().__init__() + self.output_dim = output_dim + self.input_resolution = input_resolution + + # the 3-layer stem + self.conv1 = nn.Conv2d(3, width // 2, kernel_size=3, stride=2, padding=1, bias=False) + self.bn1 = nn.BatchNorm2d(width // 2) + self.relu1 = nn.ReLU(inplace=True) + self.conv2 = nn.Conv2d(width // 2, width // 2, kernel_size=3, padding=1, bias=False) + self.bn2 = nn.BatchNorm2d(width // 2) + self.relu2 = nn.ReLU(inplace=True) + self.conv3 = nn.Conv2d(width // 2, width, kernel_size=3, padding=1, bias=False) + self.bn3 = nn.BatchNorm2d(width) + self.relu3 = nn.ReLU(inplace=True) + self.avgpool = nn.AvgPool2d(2) + + # residual layers + self._inplanes = width # this is a *mutable* variable used during construction + self.layer1 = self._make_layer(width, layers[0]) + self.layer2 = self._make_layer(width * 2, layers[1], stride=2) + self.layer3 = self._make_layer(width * 4, layers[2], stride=2) + self.layer4 = self._make_layer(width * 8, layers[3], stride=2) + + embed_dim = width * 32 # the ResNet feature dimension + + self.embed_dim = embed_dim + + self.attnpool = AttentionPool2d(input_resolution // 32, embed_dim, heads, output_dim) + + def _make_layer(self, planes, blocks, stride=1): + layers = [Bottleneck(self._inplanes, planes, stride)] + + self._inplanes = planes * Bottleneck.expansion + for _ in range(1, blocks): + layers.append(Bottleneck(self._inplanes, planes)) + + return nn.Sequential(*layers) + + def forward(self, x): + def stem(x): + x = self.relu1(self.bn1(self.conv1(x))) + x = self.relu2(self.bn2(self.conv2(x))) + x = self.relu3(self.bn3(self.conv3(x))) + x = self.avgpool(x) + return x + + x = x.type(self.conv1.weight.dtype) + x = stem(x) + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + x = self.attnpool(x) + + return x + + +class LayerNorm(nn.LayerNorm): + """Subclass torch's LayerNorm to handle fp16.""" + + def forward(self, x: torch.Tensor): + orig_type = x.dtype + ret = super().forward(x.type(torch.float32)) + return ret.type(orig_type) + + +class QuickGELU(nn.Module): + def forward(self, x: torch.Tensor): + return x * torch.sigmoid(1.702 * x) + + +class ResidualAttentionBlock(nn.Module): + def __init__(self, d_model: int, n_head: int, attn_mask: torch.Tensor = None): + super().__init__() + + self.attn = nn.MultiheadAttention(d_model, n_head) + self.ln_1 = LayerNorm(d_model) + self.mlp = nn.Sequential(OrderedDict([ + ("c_fc", nn.Linear(d_model, d_model * 4)), + ("gelu", QuickGELU()), + ("c_proj", nn.Linear(d_model * 4, d_model)) + ])) + self.ln_2 = LayerNorm(d_model) + self.attn_mask = attn_mask + + def attention(self, x: torch.Tensor): + self.attn_mask = self.attn_mask.to(dtype=x.dtype, device=x.device) if self.attn_mask is not None else None + return self.attn(x, x, x, need_weights=False, attn_mask=self.attn_mask)[0] + + def forward(self, x: torch.Tensor): + x = x + self.attention(self.ln_1(x)) + x = x + self.mlp(self.ln_2(x)) + return x + + +class Transformer(nn.Module): + def __init__(self, width: int, layers: int, heads: int, attn_mask: torch.Tensor = None): + super().__init__() + self.width = width + self.layers = layers + self.resblocks = nn.Sequential(*[ResidualAttentionBlock(width, heads, attn_mask) for _ in range(layers)]) + + def forward(self, x: torch.Tensor): + return self.resblocks(x) + + +class VisionTransformer(nn.Module): + def __init__(self, input_resolution: int, patch_size: int, width: int, layers: int, heads: int, + output_dim: int, return_hidden_state=False): + super().__init__() + self.input_resolution = input_resolution + self.output_dim = output_dim + self.conv1 = nn.Conv2d(in_channels=3, out_channels=width, kernel_size=patch_size, stride=patch_size, bias=False) + + scale = width ** -0.5 + self.class_embedding = nn.Parameter(scale * torch.randn(width)) + self.positional_embedding = nn.Parameter(scale * torch.randn((input_resolution // patch_size) ** 2 + 1, width)) + self.ln_pre = LayerNorm(width) + + self.transformer = Transformer(width, layers, heads) + + self.transformer.resblocks = nn.ModuleList([*self.transformer.resblocks]) + + self.ln_post = LayerNorm(width) + self.proj = nn.Parameter(scale * torch.randn(width, output_dim)) + + self.return_hidden_state = return_hidden_state + self.embed_dim = width + + def forward(self, x: torch.Tensor, external_features=None): + all_hidden_states = () if self.return_hidden_state else None + x = self.conv1(x) # shape = [*, width, grid, grid] + x = x.reshape(x.shape[0], x.shape[1], -1) # shape = [*, width, grid ** 2] + x = x.permute(0, 2, 1) # shape = [*, grid ** 2, width] + x = torch.cat([self.class_embedding.to(x.dtype) + torch.zeros(x.shape[0], 1, x.shape[-1], dtype=x.dtype, device=x.device), x], dim=1) # shape = [*, grid ** 2 + 1, width] + x = x + self.positional_embedding.to(x.dtype) + x = self.ln_pre(x) + + x = x.permute(1, 0, 2) # NLD -> LND + + # x = self.transformer(x) + for i,blk in enumerate(self.transformer.resblocks): + x = blk(x) + if self.return_hidden_state: + all_hidden_states = all_hidden_states + (self.ln_post(x.permute(1, 0, 2)),) + + x = x.permute(1, 0, 2) # LND -> NLD + + x = self.ln_post(x) + # x = self.ln_post(x[:, 0, :]) + + # if self.proj is not None: + # x = x @ self.proj + + if self.return_hidden_state: + return x, all_hidden_states + else: + return x + + +class CLIP(nn.Module): + def __init__(self, + embed_dim: int, + # vision + image_resolution: int, + vision_layers: Union[Tuple[int, int, int, int], int], + vision_width: int, + vision_patch_size: int, + # text + context_length: int, + vocab_size: int, + transformer_width: int, + transformer_heads: int, + transformer_layers: int, + return_hidden_state=False, + ): + super().__init__() + + self.context_length = context_length + + if isinstance(vision_layers, (tuple, list)): + vision_heads = vision_width * 32 // 64 + self.visual = ModifiedResNet( + layers=vision_layers, + output_dim=embed_dim, + heads=vision_heads, + input_resolution=image_resolution, + width=vision_width + ) + else: + vision_heads = vision_width // 64 + self.visual = VisionTransformer( + input_resolution=image_resolution, + patch_size=vision_patch_size, + width=vision_width, + layers=vision_layers, + heads=vision_heads, + output_dim=embed_dim, + return_hidden_state=return_hidden_state + ) + + self.transformer = Transformer( + width=transformer_width, + layers=transformer_layers, + heads=transformer_heads, + attn_mask=self.build_attention_mask() + ) + + self.vocab_size = vocab_size + self.token_embedding = nn.Embedding(vocab_size, transformer_width) + self.positional_embedding = nn.Parameter(torch.empty(self.context_length, transformer_width)) + self.ln_final = LayerNorm(transformer_width) + + self.text_projection = nn.Parameter(torch.empty(transformer_width, embed_dim)) + self.logit_scale = nn.Parameter(torch.ones([]) * np.log(1 / 0.07)) + + self.initialize_parameters() + + def initialize_parameters(self): + nn.init.normal_(self.token_embedding.weight, std=0.02) + nn.init.normal_(self.positional_embedding, std=0.01) + + if isinstance(self.visual, ModifiedResNet): + if self.visual.attnpool is not None: + std = self.visual.attnpool.c_proj.in_features ** -0.5 + nn.init.normal_(self.visual.attnpool.q_proj.weight, std=std) + nn.init.normal_(self.visual.attnpool.k_proj.weight, std=std) + nn.init.normal_(self.visual.attnpool.v_proj.weight, std=std) + nn.init.normal_(self.visual.attnpool.c_proj.weight, std=std) + + for resnet_block in [self.visual.layer1, self.visual.layer2, self.visual.layer3, self.visual.layer4]: + for name, param in resnet_block.named_parameters(): + if name.endswith("bn3.weight"): + nn.init.zeros_(param) + + proj_std = (self.transformer.width ** -0.5) * ((2 * self.transformer.layers) ** -0.5) + attn_std = self.transformer.width ** -0.5 + fc_std = (2 * self.transformer.width) ** -0.5 + for block in self.transformer.resblocks: + nn.init.normal_(block.attn.in_proj_weight, std=attn_std) + nn.init.normal_(block.attn.out_proj.weight, std=proj_std) + nn.init.normal_(block.mlp.c_fc.weight, std=fc_std) + nn.init.normal_(block.mlp.c_proj.weight, std=proj_std) + + if self.text_projection is not None: + nn.init.normal_(self.text_projection, std=self.transformer.width ** -0.5) + + def build_attention_mask(self): + # lazily create causal attention mask, with full attention between the vision tokens + # pytorch uses additive attention mask; fill with -inf + mask = torch.empty(self.context_length, self.context_length) + mask.fill_(float("-inf")) + mask.triu_(1) # zero out the lower diagonal + return mask + + @property + def dtype(self): + return self.visual.conv1.weight.dtype + + def encode_image(self, image): + return self.visual(image.type(self.dtype)) + + def encode_text(self, text): + x = self.token_embedding(text).type(self.dtype) # [batch_size, n_ctx, d_model] + + x = x + self.positional_embedding.type(self.dtype) + x = x.permute(1, 0, 2) # NLD -> LND + x = self.transformer(x) + x = x.permute(1, 0, 2) # LND -> NLD + x = self.ln_final(x).type(self.dtype) + + # x.shape = [batch_size, n_ctx, transformer.width] + # take features from the eot embedding (eot_token is the highest number in each sequence) + x = x[torch.arange(x.shape[0]), text.argmax(dim=-1)] @ self.text_projection + + return x + + def forward(self, image, text): + image_features = self.encode_image(image) + text_features = self.encode_text(text) + + # normalized features + image_features = image_features / image_features.norm(dim=1, keepdim=True) + text_features = text_features / text_features.norm(dim=1, keepdim=True) + + # cosine similarity as logits + logit_scale = self.logit_scale.exp() + logits_per_image = logit_scale * image_features @ text_features.t() + logits_per_text = logits_per_image.t() + + # shape = [global_batch_size, global_batch_size] + return logits_per_image, logits_per_text + + +def convert_weights(model: nn.Module): + """Convert applicable model parameters to fp16""" + + def _convert_weights_to_fp16(l): + if isinstance(l, (nn.Conv1d, nn.Conv2d, nn.Linear)): + l.weight.data = l.weight.data.half() + if l.bias is not None: + l.bias.data = l.bias.data.half() + + if isinstance(l, nn.MultiheadAttention): + for attr in [*[f"{s}_proj_weight" for s in ["in", "q", "k", "v"]], "in_proj_bias", "bias_k", "bias_v"]: + tensor = getattr(l, attr) + if tensor is not None: + tensor.data = tensor.data.half() + + for name in ["text_projection", "proj"]: + if hasattr(l, name): + attr = getattr(l, name) + if attr is not None: + attr.data = attr.data.half() + + model.apply(_convert_weights_to_fp16) + + +def build_model(state_dict: dict, **kwargs): + vit = "visual.proj" in state_dict + + if vit: + vision_width = state_dict["visual.conv1.weight"].shape[0] + vision_layers = len([k for k in state_dict.keys() if k.startswith("visual.") and k.endswith(".attn.in_proj_weight")]) + vision_patch_size = state_dict["visual.conv1.weight"].shape[-1] + grid_size = round((state_dict["visual.positional_embedding"].shape[0] - 1) ** 0.5) + image_resolution = vision_patch_size * grid_size + else: + counts: list = [len(set(k.split(".")[2] for k in state_dict if k.startswith(f"visual.layer{b}"))) for b in [1, 2, 3, 4]] + vision_layers = tuple(counts) + vision_width = state_dict["visual.layer1.0.conv1.weight"].shape[0] + output_width = round((state_dict["visual.attnpool.positional_embedding"].shape[0] - 1) ** 0.5) + vision_patch_size = None + assert output_width ** 2 + 1 == state_dict["visual.attnpool.positional_embedding"].shape[0] + image_resolution = output_width * 32 + + embed_dim = state_dict["text_projection"].shape[1] + context_length = state_dict["positional_embedding"].shape[0] + vocab_size = state_dict["token_embedding.weight"].shape[0] + transformer_width = state_dict["ln_final.weight"].shape[0] + transformer_heads = transformer_width // 64 + transformer_layers = len(set(k.split(".")[2] for k in state_dict if k.startswith("transformer.resblocks"))) + + model = CLIP( + embed_dim, + image_resolution, vision_layers, vision_width, vision_patch_size, + context_length, vocab_size, transformer_width, transformer_heads, transformer_layers, **kwargs + ) + + for key in ["input_resolution", "context_length", "vocab_size"]: + if key in state_dict: + del state_dict[key] + + convert_weights(model) + model.load_state_dict(state_dict) + return model.eval() diff --git a/models/connector.py b/models/connector.py new file mode 100644 index 0000000000000000000000000000000000000000..95e9988676c1638d4718df2776277c008ec42d79 --- /dev/null +++ b/models/connector.py @@ -0,0 +1,18 @@ + +from torch import nn + + + + +def connector(connector_type='linear', **kwargs): + print("Build connector:", connector_type) + if connector_type == 'linear': + return nn.ModuleList([nn.Linear(kwargs['input_dim'], kwargs['output_dim']) for i in range(kwargs['num_layers'])]) + else: + raise NotImplemented + + + + + + diff --git a/models/epalm.py b/models/epalm.py new file mode 100644 index 0000000000000000000000000000000000000000..27af48f32523fe3b900bcaf62d3e3b8625359815 --- /dev/null +++ b/models/epalm.py @@ -0,0 +1,451 @@ + +import torch +from torch import nn + +from transformers import AutoConfig +from models.opt import OPTForCausalLM +import models.vit + +import numpy as np + +from copy import deepcopy + + +import torch.nn.functional as F +from transformers.tokenization_utils_base import BatchEncoding + +from models.connector import connector + +from models.adapters import ( + Adapter, + ParallelAdapter, + AdapterWrapper, + ParallelAdapterWrapper, +) +from typing import Literal + + + +from models.timesformer import TimeSformer + + +from models.ast import ASTModel + + +def rank_answer(model, image, question_input, answer_ids, answer_atts, k, tokenizer, special_answer_token=None): + + num_ques = question_input.input_ids.size(0) + if special_answer_token is not None: + start_input = question_input + start_ids = question_input.input_ids + attention_mask = question_input.attention_mask + else: + start_ids = answer_ids[0,0].repeat(num_ques,1) # bos token + + start_ids = torch.cat((question_input.input_ids, start_ids), dim=1) + attention_mask = torch.cat((question_input.attention_mask, torch.ones((num_ques, 1)).to(question_input.attention_mask.device)), dim=1) + + start_input = {'input_ids': start_ids, 'attention_mask': attention_mask} + start_input = BatchEncoding(start_input) + + + + start_output = model(image, start_input, return_dict = True, mode='evaluate') + + logits = start_output.logits[:,-1,:] # first token's logit + + # topk_probs: top-k probability + # topk_ids: [num_question, k] + answer_first_token = answer_ids[:,1] + prob_first_token = F.softmax(logits,dim=1).index_select(dim=1, index=answer_first_token) + topk_probs, topk_ids = prob_first_token.topk(k,dim=1) + + # answer input: [num_question*k, answer_len] + input_ids = [] + input_atts = [] + for b, topk_id in enumerate(topk_ids): + input_ids.append(answer_ids.index_select(dim=0, index=topk_id)) + input_atts.append(answer_atts.index_select(dim=0, index=topk_id)) + input_ids = torch.cat(input_ids,dim=0) + input_atts = torch.cat(input_atts,dim=0) + + attention_mask = tile(attention_mask, 0, k) + image = tile(image, 0, k) + + + + start_ids = tile(start_ids, 0, k) + input_ids = torch.cat((start_ids, input_ids), dim=1) # include the ? + input_atts = torch.cat((attention_mask, input_atts), dim=1) + + targets_ids = input_ids.masked_fill(input_ids == tokenizer.pad_token_id, -100) + + + + # repeat encoder's output for top-k answers + + + inputs = {'input_ids': input_ids, 'attention_mask': input_atts} + inputs = BatchEncoding(inputs) + + output = model(image, inputs, labels = targets_ids, return_dict = True, mode='train', reduction='none') + + answer_loss = output.loss + answer_loss = answer_loss.view(input_ids.size(0),-1) + + # topk_prob: first token probability + + topk_probs = topk_probs.view(-1,1) + log_probs = torch.cat([topk_probs.log(), -answer_loss],dim=1) + + # re-calculate log probabilities for the answer sequences using chain rule + log_probs_sum = log_probs.sum(1) + log_probs_sum = log_probs_sum.view(num_ques,k) + + topk_probs = F.softmax(log_probs_sum, dim=-1) + # get top-k after re-ranking + topk_probs, rerank_id = topk_probs.topk(k,dim=1) + topk_ids = torch.gather(topk_ids, 1, rerank_id) + + return topk_ids, topk_probs + +def tile(x, dim, n_tile): + init_dim = x.size(dim) + repeat_idx = [1] * x.dim() + repeat_idx[dim] = n_tile + x = x.repeat(*(repeat_idx)) + order_index = torch.LongTensor(np.concatenate([init_dim * np.arange(n_tile) + i for i in range(init_dim)])) + return torch.index_select(x, dim, order_index.to(x.device)) + + + + + + + +## modified from https://github.com/ylsung/VL_adapter/blob/main/VL-T5/src/prompt/prompt_modeling.py + +class InputPrompts(nn.Module): + def __init__(self, prompt_len = 10, + prompt_dim = 1024, + mid_dim=512, mlp=True, deep=False, nb_prompts=12): + super().__init__() + + self.prompt_len = prompt_len + self.prompt_dim = prompt_dim + self.mid_dim = mid_dim + + + + self.deep = deep + self.nb_prompts = nb_prompts + if self.deep: + print("Init deep prompts", nb_prompts) + p_len = prompt_len*nb_prompts + else: + p_len = prompt_len + + self.prefix_tokens = torch.arange(p_len).long() + if mlp: + self.prefix_embedding = nn.Sequential( + nn.Embedding(p_len, self.prompt_dim), + nn.Linear(self.prompt_dim, self.mid_dim), + nn.Tanh(), + nn.Linear(self.mid_dim, self.prompt_dim), + ) + else: + self.prefix_embedding = nn.Sequential( + nn.Embedding(p_len, self.prompt_dim), + ) + + def get_prompt(self, bsz, device): + input_tokens = self.prefix_tokens.unsqueeze(0).expand(bsz, -1).to(device) # (B, L) + prefix_prompt = self.prefix_embedding(input_tokens) # (B, L, pdim) + + if self.deep: + + prefix_prompt = prefix_prompt.view(bsz, self.nb_prompts, self.prompt_len, self.prompt_dim) + prompts = [prefix_prompt[:, i, :, :] for i in range(self.nb_prompts)] + return prompts + + return prefix_prompt + + +class ePALM(nn.Module): + def __init__(self, + opt_model_name = 'facebook/opt-350m', + vision_model_name = 'vit_base_patch16_224', + use_vis_prefix = True, + start_layer_idx = 11, + end_layer_idx = 23, + return_hidden_state_vision = True, + config = None, low_cpu=False, + ): + super().__init__() + print("Loading ePALM ...") + # text + + config_opt = AutoConfig.from_pretrained(opt_model_name) + + config_opt.use_vis_prefix = use_vis_prefix + config_opt.start_layer_idx = start_layer_idx + config_opt.end_layer_idx = end_layer_idx + + use_cache = config.get('use_cache', True) + config_opt.use_cache = use_cache + + + + + text_step = config.get('text_step', 1) + config_opt.text_step = text_step + + self.select_higher_step = config.get('select_higher_step', False) + config_opt.select_higher_step = self.select_higher_step + + + if not hasattr(config_opt, 'activation_dropout'): + config_opt.activation_dropout = 0.0 + + print("Loading: ", opt_model_name) + self.no_attention_mask = False + + if low_cpu: + self.model_text = OPTForCausalLM.from_pretrained(opt_model_name, config=config_opt, revision="float16", torch_dtype=torch.float16, low_cpu_mem_usage=False) + else: + self.model_text = OPTForCausalLM.from_pretrained(opt_model_name, config=config_opt) + + self.transformer = self.model_text.model.decoder.layers + + print(self.model_text.config) + # vision + print("Loading: ", vision_model_name) + + image_size = config.get('image_res', 224) + num_frames = config.get('num_frames', 4) + pretrained_model = config.get('pretrained_model', None) + + + mask_p = config.get('mask_p', 0) + + space_only_for_images = config.get('space_only_for_images', None) + if 'timesformer' in vision_model_name: + print("Load:", pretrained_model) + self.model_vision = TimeSformer(img_size=image_size, num_frames=num_frames, + attention_type='divided_space_time', pretrained_model=pretrained_model, + return_hidden_state=return_hidden_state_vision, space_only_for_images=space_only_for_images) + vis_dim = self.model_vision.embed_dim + + + elif 'ast' in vision_model_name: + print("Load:", pretrained_model) + self.model_vision = ASTModel(audioset_pretrain=True, verbose=True, + pretrained_model=pretrained_model, return_hidden_state=return_hidden_state_vision) + vis_dim = self.model_vision.original_embedding_dim + + else: + vision_func = getattr(models.vit, vision_model_name) + if pretrained_model is not None: + pretrained=False + else: + pretrained = True + self.model_vision = vision_func(pretrained=pretrained, return_hidden_state=return_hidden_state_vision, + mask_p=mask_p) + if pretrained_model: + self.model_vision.load_pretrained(pretrained_model) + + vis_dim = self.model_vision.embed_dim + + # connector + connector_type = config.get('connector_type', 'linear') + self.connector_type = connector_type + + + + + + injected_hidden_states = config.get('injected_hidden_states', 1) + self.injected_hidden_states = injected_hidden_states + + + text_dim = self.model_text.config.hidden_size + + connector_config = config.get('connector_config', None) + self.shared_connector = config.get('shared_connector', None) + + + + if self.shared_connector is not None: + num_connectors = 1 + else: + num_connectors = self.injected_hidden_states + + + self.connector = connector(connector_type=connector_type, input_dim=vis_dim, output_dim=text_dim, num_layers=num_connectors, connector_config=connector_config) #nn.ModuleList([nn.Linear(vis_dim, text_dim) for i in range(injected_hidden_states)]) + + # Prompt + self.prompt_tuning = config.get('prompt_tuning', False) + if self.prompt_tuning: + prompt_len = config.get("prompt_len", 10) + + prompt_dim = config_opt.word_embed_proj_dim + + mlp = config.get('mlp', True) + deep = config.get('deep', False) + nb_prompts = config.get('nb_prompts', 12) + self.prompt_module = InputPrompts(prompt_len=prompt_len, prompt_dim=prompt_dim, mid_dim=prompt_dim, + mlp=mlp, deep=deep, nb_prompts=nb_prompts) + + # Adapters + self.use_adapters = config.get('use_adapters', False) + self.mlp_adapter_added, self.attn_adapter_added = False, False + if self.use_adapters: + mlpconfig = config['adapter_config'].get("mlp", None) + if mlpconfig is not None: + mlp_config = deepcopy(mlpconfig) + else: + mlp_config = mlpconfig + + ff_attr = "fc2" + attn_attr = "self_attn" + + if mlp_config: + assert mlp_config.get("adapter_type") is not None + self.add_adapters( + location="mlp", + adapter_type=mlp_config.pop("adapter_type"), + downsample_factor=mlp_config.pop("downsample_factor", 4), + ff_attr = ff_attr, + attn_attr = attn_attr, + **mlp_config, + ) + attn_config = deepcopy(config['adapter_config'].get("attention", None)) + if attn_config: + assert attn_config.get("adapter_type") is not None + self.add_adapters( + location="attention", + adapter_type=attn_config.pop("adapter_type"), + ff_attr = ff_attr, + attn_attr = attn_attr, + **attn_config, + ) + + + def forward(self, image=None, text=None, mode='generate', return_dict=True, labels=None, reduction='mean', modality=None, **generation_kwargs): + + if image is not None: + image_embed, image_feat = self.model_vision(image, external_features=None) + + image_feat = list(image_feat) + + image_feat = image_feat[-self.injected_hidden_states:] + + for i in range(1, self.injected_hidden_states + 1): + + if self.shared_connector: + image_feat[-i] = self.connector[0](image_feat[-i][:, 0, :].unsqueeze(1)) + else: + if modality is not None: + image_feat[-i] = self.connector[-i](image_feat[-i][:, 0, :].unsqueeze(1), modality=modality) + else: + image_feat[-i] = self.connector[-i](image_feat[-i][:, 0, :].unsqueeze(1)) + + else: + image_feat = None + + if self.prompt_tuning: + prompts = self.prompt_module.get_prompt(text.input_ids.shape[0], text.attention_mask.device) + else: + prompts = None + + if self.no_attention_mask: + attention_mask = None + else: + attention_mask = text.attention_mask + if mode == 'train' or mode == 'evaluate': + text_output = self.model_text(input_ids=text.input_ids, attention_mask=attention_mask, + return_dict=return_dict, vis_prefix=image_feat, labels = labels, reduction=reduction, + prompt_embeds=prompts, connector=self.connector) + return text_output + elif mode == 'generate': + gen = self.model_text.generate(input_ids=text.input_ids, vis_prefix=image_feat, prompt_embeds=prompts, + connector=self.connector, attention_mask=attention_mask, + **generation_kwargs) + return gen + + + def add_adapters( + self, + downsample_factor: int = 4, + adapter_type: Literal["normal", "parallel", "scaled_parallel"] = "normal", + location: Literal["mlp", "attention"] = "mlp", + ff_attr: str = "fc2", + attn_attr: str = "self_attn", + **adapter_kwargs, + ): + """ + Adds an adapter layer to `self` at the specified location + """ + assert adapter_type in [ + "normal", + "parallel", + "scaled_parallel", + ], "adapter_type must be one of 'normal', 'parallel', or 'scaled_parallel'" + assert location in [ + "mlp", + "attention", + ], "location must be one of 'mlp' or 'attention'" + + for l in range(len(self.transformer)): + if location == "mlp": + if self.mlp_adapter_added: + raise ValueError("Adapter layer already added") + mlp = getattr(self.transformer[l], ff_attr) + if adapter_type in ["parallel", "scaled_parallel"]: + adapter_layer = ParallelAdapter( + module=mlp, + dim=self.model_text.config.hidden_size, + downsample_factor=downsample_factor, + scaled=adapter_type == "scaled_parallel", + **adapter_kwargs, + ) + else: + adpt = Adapter( + dim=self.model_text.config.hidden_size, + downsample_factor=downsample_factor, + **adapter_kwargs, + ) + adapter_layer = nn.Sequential( + *[ + mlp, + adpt, + ] + ) + setattr(self.transformer[l], ff_attr, adapter_layer) + else: + if self.attn_adapter_added: + raise ValueError("Adapter layer already added") + attn = getattr(self.transformer[l], attn_attr) + if adapter_type in ["parallel", "scaled_parallel"]: + adapter_layer = ParallelAdapterWrapper( + module=attn, + dim=self.model_text.config.hidden_size, + downsample_factor=downsample_factor, + scaled="scaled" in adapter_type, + **adapter_kwargs, + ) + else: + adapter_layer = AdapterWrapper( + attn_block=attn, + dim=self.model_text.config.hidden_size, + downsample_factor=downsample_factor, + **adapter_kwargs, + ) + setattr(self.transformer[l], attn_attr, adapter_layer) + + if location == "mlp": + self.mlp_adapter_added = True + else: + self.attn_adapter_added = True + + diff --git a/models/opt.py b/models/opt.py new file mode 100644 index 0000000000000000000000000000000000000000..6991507303b906ee39508dcbf736b7f548303064 --- /dev/null +++ b/models/opt.py @@ -0,0 +1,1100 @@ +# coding=utf-8 +# Copyright 2022 The Fairseq Authors and The HuggingFace Inc. team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" PyTorch OPT model.""" +import random +from typing import List, Optional, Tuple, Union + +import torch +import torch.utils.checkpoint +from torch import nn +from torch.nn import CrossEntropyLoss + +from transformers.activations import ACT2FN +from transformers.modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast +from transformers.modeling_utils import PreTrainedModel +from transformers.utils import ( + add_code_sample_docstrings, + add_start_docstrings, + add_start_docstrings_to_model_forward, + logging, + replace_return_docstrings, +) +from transformers.models.opt.configuration_opt import OPTConfig +import torch.nn.functional as F + +import numpy as np +logger = logging.get_logger(__name__) + +_CHECKPOINT_FOR_DOC = "facebook/opt-350m" +_CONFIG_FOR_DOC = "OPTConfig" +_TOKENIZER_FOR_DOC = "GPT2Tokenizer" + +# Base model docstring +_EXPECTED_OUTPUT_SHAPE = [1, 8, 1024] + + +OPT_PRETRAINED_MODEL_ARCHIVE_LIST = [ + "facebook/opt-125m", + "facebook/opt-350m", + "facebook/opt-1.3b", + "facebook/opt-2.7b", + "facebook/opt-6.7b", + "facebook/opt-13b", + "facebook/opt-30b", + # See all OPT models at https://huggingface.co/models?filter=opt +] + +def tile(x, dim, n_tile): + init_dim = x.size(dim) + repeat_idx = [1] * x.dim() + repeat_idx[dim] = n_tile + x = x.repeat(*(repeat_idx)) + order_index = torch.LongTensor(np.concatenate([init_dim * np.arange(n_tile) + i for i in range(init_dim)])) + return torch.index_select(x, dim, order_index.to(x.device)) + + +def _make_causal_mask(input_ids_shape: torch.Size, dtype: torch.dtype, past_key_values_length: int = 0): + """ + Make causal mask used for bi-directional self-attention. + """ + bsz, tgt_len = input_ids_shape + mask = torch.full((tgt_len, tgt_len), torch.tensor(float("-inf"))) + mask_cond = torch.arange(mask.size(-1)) + mask.masked_fill_(mask_cond < (mask_cond + 1).view(mask.size(-1), 1), 0) + mask = mask.to(dtype) + + if past_key_values_length > 0: + mask = torch.cat([torch.zeros(tgt_len, past_key_values_length, dtype=dtype), mask], dim=-1) + return mask[None, None, :, :].expand(bsz, 1, tgt_len, tgt_len + past_key_values_length) + + +def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None): + """ + Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`. + """ + bsz, src_len = mask.size() + tgt_len = tgt_len if tgt_len is not None else src_len + + expanded_mask = mask[:, None, None, :].expand(bsz, 1, tgt_len, src_len).to(dtype) + + inverted_mask = 1.0 - expanded_mask + + return inverted_mask.masked_fill(inverted_mask.to(torch.bool), torch.finfo(dtype).min) + + +class OPTLearnedPositionalEmbedding(nn.Embedding): + """ + This module learns positional embeddings up to a fixed maximum size. + """ + + def __init__(self, num_embeddings: int, embedding_dim: int): + # OPT is set up so that if padding_idx is specified then offset the embedding ids by 2 + # and adjust num_embeddings appropriately. Other models don't have this hack + self.offset = 2 + super().__init__(num_embeddings + self.offset, embedding_dim) + + def forward(self, attention_mask: torch.LongTensor, past_key_values_length: int = 0): + """`input_ids_shape` is expected to be [bsz x seqlen].""" + attention_mask = attention_mask.long() + + # create positions depending on attention_mask + positions = (torch.cumsum(attention_mask, dim=1).type_as(attention_mask) * attention_mask).long() - 1 + + # cut positions if `past_key_values_length` is > 0 + positions = positions[:, past_key_values_length:] + + return super().forward(positions + self.offset) + + +# Copied from transformers.models.bart.modeling_bart.BartAttention with Bart->OPT +class OPTAttention(nn.Module): + """Multi-headed attention from 'Attention Is All You Need' paper""" + + def __init__( + self, + embed_dim: int, + num_heads: int, + dropout: float = 0.0, + is_decoder: bool = False, + bias: bool = True, + ): + super().__init__() + self.embed_dim = embed_dim + self.num_heads = num_heads + self.dropout = dropout + self.head_dim = embed_dim // num_heads + + if (self.head_dim * num_heads) != self.embed_dim: + raise ValueError( + f"embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim}" + f" and `num_heads`: {num_heads})." + ) + self.scaling = self.head_dim**-0.5 + self.is_decoder = is_decoder + + self.k_proj = nn.Linear(embed_dim, embed_dim, bias=bias) + self.v_proj = nn.Linear(embed_dim, embed_dim, bias=bias) + self.q_proj = nn.Linear(embed_dim, embed_dim, bias=bias) + self.out_proj = nn.Linear(embed_dim, embed_dim, bias=bias) + + def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int): + return tensor.view(bsz, seq_len, self.num_heads, self.head_dim).transpose(1, 2).contiguous() + + def forward( + self, + hidden_states: torch.Tensor, + key_value_states: Optional[torch.Tensor] = None, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + attention_mask: Optional[torch.Tensor] = None, + layer_head_mask: Optional[torch.Tensor] = None, + output_attentions: bool = False, + ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: + """Input shape: Batch x Time x Channel""" + + # if key_value_states are provided this layer is used as a cross-attention layer + # for the decoder + is_cross_attention = key_value_states is not None + + bsz, tgt_len, _ = hidden_states.size() + + # get query proj + query_states = self.q_proj(hidden_states) * self.scaling + # get key, value proj + if is_cross_attention and past_key_value is not None: + # reuse k,v, cross_attentions + key_states = past_key_value[0] + value_states = past_key_value[1] + elif is_cross_attention: + # cross_attentions + key_states = self._shape(self.k_proj(key_value_states), -1, bsz) + value_states = self._shape(self.v_proj(key_value_states), -1, bsz) + elif past_key_value is not None: + # reuse k, v, self_attention + key_states = self._shape(self.k_proj(hidden_states), -1, bsz) + value_states = self._shape(self.v_proj(hidden_states), -1, bsz) + key_states = torch.cat([past_key_value[0], key_states], dim=2) + value_states = torch.cat([past_key_value[1], value_states], dim=2) + else: + # self_attention + key_states = self._shape(self.k_proj(hidden_states), -1, bsz) + value_states = self._shape(self.v_proj(hidden_states), -1, bsz) + + if self.is_decoder: + # if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states. + # Further calls to cross_attention layer can then reuse all cross-attention + # key/value_states (first "if" case) + # if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of + # all previous decoder key/value_states. Further calls to uni-directional self-attention + # can concat previous decoder key/value_states to current projected key/value_states (third "elif" case) + # if encoder bi-directional self-attention `past_key_value` is always `None` + past_key_value = (key_states, value_states) + + proj_shape = (bsz * self.num_heads, -1, self.head_dim) + query_states = self._shape(query_states, tgt_len, bsz).view(*proj_shape) + key_states = key_states.view(*proj_shape) + value_states = value_states.view(*proj_shape) + + src_len = key_states.size(1) + attn_weights = torch.bmm(query_states, key_states.transpose(1, 2)) + + if attn_weights.size() != (bsz * self.num_heads, tgt_len, src_len): + raise ValueError( + f"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is" + f" {attn_weights.size()}" + ) + + if attention_mask is not None: + if attention_mask.size() != (bsz, 1, tgt_len, src_len): + raise ValueError( + f"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {attention_mask.size()}" + ) + attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attention_mask + attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) + + attn_weights = nn.functional.softmax(attn_weights, dim=-1) + + if layer_head_mask is not None: + if layer_head_mask.size() != (self.num_heads,): + raise ValueError( + f"Head mask for a single layer should be of size {(self.num_heads,)}, but is" + f" {layer_head_mask.size()}" + ) + attn_weights = layer_head_mask.view(1, -1, 1, 1) * attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) + + if output_attentions: + # this operation is a bit awkward, but it's required to + # make sure that attn_weights keeps its gradient. + # In order to do so, attn_weights have to be reshaped + # twice and have to be reused in the following + attn_weights_reshaped = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attn_weights = attn_weights_reshaped.view(bsz * self.num_heads, tgt_len, src_len) + else: + attn_weights_reshaped = None + + attn_probs = nn.functional.dropout(attn_weights, p=self.dropout, training=self.training) + + attn_output = torch.bmm(attn_probs, value_states) + + if attn_output.size() != (bsz * self.num_heads, tgt_len, self.head_dim): + raise ValueError( + f"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is" + f" {attn_output.size()}" + ) + + attn_output = attn_output.view(bsz, self.num_heads, tgt_len, self.head_dim) + attn_output = attn_output.transpose(1, 2) + + # Use the `embed_dim` from the config (stored in the class) rather than `hidden_state` because `attn_output` can be + # partitioned aross GPUs when using tensor-parallelism. + attn_output = attn_output.reshape(bsz, tgt_len, self.embed_dim) + + attn_output = self.out_proj(attn_output) + + return attn_output, attn_weights_reshaped, past_key_value + + +class OPTDecoderLayer(nn.Module): + def __init__(self, config: OPTConfig): + super().__init__() + self.embed_dim = config.hidden_size + self.self_attn = OPTAttention( + embed_dim=self.embed_dim, + num_heads=config.num_attention_heads, + dropout=config.attention_dropout, + is_decoder=True, + ) + self.do_layer_norm_before = config.do_layer_norm_before + self.dropout = config.dropout + self.activation_fn = ACT2FN[config.activation_function] + + self.activation_dropout = config.activation_dropout + + self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim) + self.fc1 = nn.Linear(self.embed_dim, config.ffn_dim) + self.fc2 = nn.Linear(config.ffn_dim, self.embed_dim) + self.final_layer_norm = nn.LayerNorm(self.embed_dim) + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + layer_head_mask: Optional[torch.Tensor] = None, + output_attentions: Optional[bool] = False, + use_cache: Optional[bool] = False, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + ) -> Tuple[torch.FloatTensor, Optional[Tuple[torch.FloatTensor, torch.FloatTensor]]]: + """ + Args: + hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)` + attention_mask (`torch.FloatTensor`, *optional*): attention mask of size + `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. + layer_head_mask (`torch.FloatTensor`, *optional*): mask for attention heads in a given layer of size + `(encoder_attention_heads,)`. + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under + returned tensors for more detail. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding + (see `past_key_values`). + past_key_value (`Tuple(torch.FloatTensor)`, *optional*): cached past key and value projection states + """ + + residual = hidden_states + + # 125m, 1.7B, ..., 175B applies layer norm BEFORE attention + if self.do_layer_norm_before: + hidden_states = self.self_attn_layer_norm(hidden_states) + + # Self Attention + hidden_states, self_attn_weights, present_key_value = self.self_attn( + hidden_states=hidden_states, + past_key_value=past_key_value, + attention_mask=attention_mask, + layer_head_mask=layer_head_mask, + output_attentions=output_attentions, + ) + hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) + hidden_states = residual + hidden_states + + # 350m applies layer norm AFTER attention + if not self.do_layer_norm_before: + hidden_states = self.self_attn_layer_norm(hidden_states) + + # Fully Connected + hidden_states_shape = hidden_states.shape + hidden_states = hidden_states.reshape(-1, hidden_states.size(-1)) + residual = hidden_states + + # 125m, 1.7B, ..., 175B applies layer norm BEFORE attention + if self.do_layer_norm_before: + hidden_states = self.final_layer_norm(hidden_states) + + hidden_states = self.fc1(hidden_states) + hidden_states = self.activation_fn(hidden_states) + + hidden_states = self.fc2(hidden_states) + hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) + + hidden_states = (residual + hidden_states).view(hidden_states_shape) + + # 350m applies layer norm AFTER attention + if not self.do_layer_norm_before: + hidden_states = self.final_layer_norm(hidden_states) + + outputs = (hidden_states,) + + if output_attentions: + outputs += (self_attn_weights,) + + if use_cache: + outputs += (present_key_value,) + + return outputs + + +OPT_START_DOCSTRING = r""" + This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the + library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads + etc.) + + This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. + Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage + and behavior. + + Parameters: + config ([`OPTConfig`]): + Model configuration class with all the parameters of the model. Initializing with a config file does not + load the weights associated with the model, only the configuration. Check out the + [`~PreTrainedModel.from_pretrained`] method to load the model weights. +""" + + +@add_start_docstrings( + "The bare OPT Model outputting raw hidden-states without any specific head on top.", + OPT_START_DOCSTRING, +) +class OPTPreTrainedModel(PreTrainedModel): + config_class = OPTConfig + base_model_prefix = "model" + supports_gradient_checkpointing = True + _no_split_modules = ["OPTDecoderLayer"] + _keys_to_ignore_on_load_unexpected = [r"decoder.version"] + + def _init_weights(self, module): + std = self.config.init_std + if isinstance(module, nn.Linear): + module.weight.data.normal_(mean=0.0, std=std) + if module.bias is not None: + module.bias.data.zero_() + elif isinstance(module, nn.Embedding): + module.weight.data.normal_(mean=0.0, std=std) + if module.padding_idx is not None: + module.weight.data[module.padding_idx].zero_() + + def _set_gradient_checkpointing(self, module, value=False): + if isinstance(module, (OPTDecoder)): + module.gradient_checkpointing = value + + +OPT_INPUTS_DOCSTRING = r""" + Args: + input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): + Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide + it. + + Indices can be obtained using [`GPT2Tokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + + Indices can be obtained using [`OPTTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + If `past_key_values` is used, optionally only the last `decoder_input_ids` have to be input (see + `past_key_values`). + + If you want to change padding behavior, you should read [`modeling_opt._prepare_decoder_attention_mask`] + and modify to your needs. See diagram 1 in [the paper](https://arxiv.org/abs/1910.13461) for more + information on the default strategy. + head_mask (`torch.Tensor` of shape `(encoder_layers, encoder_attention_heads)`, *optional*): + Mask to nullify selected heads of the attention modules in the encoder. Mask values selected in `[0, 1]`: + + - 1 indicates the head is **not masked**, + - 0 indicates the head is **masked**. + + past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): + Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of shape + `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of shape + `(batch_size, num_heads, encoder_sequence_length, embed_size_per_head)`. + + Contains pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention + blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. + + If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that + don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all + `decoder_input_ids` of shape `(batch_size, sequence_length)`. + inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): + Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This + is useful if you want more control over how to convert `input_ids` indices into associated vectors than the + model's internal embedding lookup matrix. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see + `past_key_values`). + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned + tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for + more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. +""" + + +class OPTDecoder(OPTPreTrainedModel): + """ + Transformer decoder consisting of *config.num_hidden_layers* layers. Each layer is a [`OPTDecoderLayer`] + + Args: + config: OPTConfig + embed_tokens (nn.Embedding): output embedding + """ + + def __init__(self, config: OPTConfig): + super().__init__(config) + self.dropout = config.dropout + self.layerdrop = config.layerdrop + self.padding_idx = config.pad_token_id + self.max_target_positions = config.max_position_embeddings + self.vocab_size = config.vocab_size + + self.embed_tokens = nn.Embedding(config.vocab_size, config.word_embed_proj_dim, self.padding_idx) + self.embed_positions = OPTLearnedPositionalEmbedding(config.max_position_embeddings, config.hidden_size) + + if config.word_embed_proj_dim != config.hidden_size: + self.project_out = nn.Linear(config.hidden_size, config.word_embed_proj_dim, bias=False) + else: + self.project_out = None + + if config.word_embed_proj_dim != config.hidden_size: + self.project_in = nn.Linear(config.word_embed_proj_dim, config.hidden_size, bias=False) + else: + self.project_in = None + + # Note that the only purpose of `config._remove_final_layer_norm` is to keep backward compatibility + # with checkpoints that have been fine-tuned before transformers v4.20.1 + # see https://github.com/facebookresearch/metaseq/pull/164 + if config.do_layer_norm_before and not config._remove_final_layer_norm: + self.final_layer_norm = nn.LayerNorm(config.hidden_size) + else: + self.final_layer_norm = None + + self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) + + self.gradient_checkpointing = False + # Initialize weights and apply final processing + self.post_init() + + if hasattr(config, 'use_vis_prefix'): + self.use_vis_prefix = config.use_vis_prefix + self.start_layer_idx = config.start_layer_idx + self.end_layer_idx = config.end_layer_idx + else: + self.use_vis_prefix = False + + + self.text_step = config.text_step if hasattr(config, 'text_step') else 1 + self.select_higher_step = config.select_higher_step if hasattr(config, 'select_higher_step') else False + + + def get_input_embeddings(self): + return self.embed_tokens + + def set_input_embeddings(self, value): + self.embed_tokens = value + + # Copied from transformers.models.bart.modeling_bart.BartDecoder._prepare_decoder_attention_mask + def _prepare_decoder_attention_mask(self, attention_mask, input_shape, inputs_embeds, past_key_values_length): + # create causal mask + # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] + combined_attention_mask = None + if input_shape[-1] > 1: + combined_attention_mask = _make_causal_mask( + input_shape, inputs_embeds.dtype, past_key_values_length=past_key_values_length + ).to(inputs_embeds.device) + + if attention_mask is not None: + # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] + expanded_attn_mask = _expand_mask(attention_mask, inputs_embeds.dtype, tgt_len=input_shape[-1]) + combined_attention_mask = ( + expanded_attn_mask if combined_attention_mask is None else expanded_attn_mask + combined_attention_mask + ) + + return combined_attention_mask + + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + head_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + vis_prefix = None, + prompt_embeds = None, + connector = None + ) -> Union[Tuple, BaseModelOutputWithPast]: + r""" + Args: + input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): + Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you + provide it. + + Indices can be obtained using [`OPTTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + head_mask (`torch.Tensor` of shape `(num_hidden_layers, num_attention_heads)`, *optional*): + Mask to nullify selected heads of the attention modules. Mask values selected in `[0, 1]`: + + - 1 indicates the head is **not masked**, + - 0 indicates the head is **masked**. + + past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): + Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of + shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of + + Contains pre-computed hidden-states (key and values in the self-attention blocks and in the + cross-attention blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. + + If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those + that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of + all `decoder_input_ids` of shape `(batch_size, sequence_length)`. + + inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): + Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. + This is useful if you want more control over how to convert `input_ids` indices into associated vectors + than the model's internal embedding lookup matrix. + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under + returned tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors + for more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. + """ + + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + use_cache = use_cache if use_cache is not None else self.config.use_cache + + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + # retrieve input_ids and inputs_embeds + if input_ids is not None and inputs_embeds is not None: + raise ValueError("You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time") + elif input_ids is not None: + input_shape = input_ids.size() + input_ids = input_ids.view(-1, input_shape[-1]) + elif inputs_embeds is not None: + input_shape = inputs_embeds.size()[:-1] + else: + raise ValueError("You have to specify either decoder_input_ids or decoder_inputs_embeds") + + past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0 + + if inputs_embeds is None: + inputs_embeds = self.embed_tokens(input_ids) + # embed positions + if attention_mask is None: + attention_mask = torch.ones(inputs_embeds.shape[:2], dtype=torch.bool, device=inputs_embeds.device) + + + + pos_embeds = self.embed_positions(attention_mask, past_key_values_length) + ## Prompt tuning + if prompt_embeds is not None and past_key_values_length == 0: # in case of generation don't re add the prompt + p_embeds = prompt_embeds + + prompt_len = p_embeds.shape[1] + + ## support number of beams > 1, inputs_embeds (nxbs, L, dim) + if p_embeds.shape[0] < inputs_embeds.shape[0]: + p_embeds = p_embeds.repeat(inputs_embeds.shape[0]//p_embeds.shape[0], 1, 1) + + inputs_embeds = torch.cat((p_embeds, inputs_embeds), dim=1) + attention_mask = F.pad(attention_mask, (prompt_len, 0, 0, 0), "constant", 1) + input_shape = attention_mask.size() + + attention_mask = self._prepare_decoder_attention_mask( + attention_mask, input_shape, inputs_embeds, past_key_values_length + ) + + if self.project_in is not None: + inputs_embeds = self.project_in(inputs_embeds) + + if prompt_embeds is not None and past_key_values_length == 0:# no positional embedding for prompts + hidden_states = inputs_embeds + hidden_states[:, prompt_len:, :] = inputs_embeds[:, prompt_len:, :] + pos_embeds + else: + hidden_states = inputs_embeds + pos_embeds + hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) + + + + + # decoder layers + all_hidden_states = () if output_hidden_states else None + all_self_attns = () if output_attentions else None + next_decoder_cache = () if use_cache else None + + # check if head_mask has a correct number of layers specified if desired + for attn_mask, mask_name in zip([head_mask], ["head_mask"]): + if attn_mask is not None: + if attn_mask.size()[0] != (len(self.layers)): + raise ValueError( + f"The `{mask_name}` should be specified for {len(self.layers)} layers, but it is for" + f" {head_mask.size()[0]}." + ) + + if self.use_vis_prefix and vis_prefix is not None: + num_layers = self.end_layer_idx - self.start_layer_idx + if isinstance(vis_prefix, list) or isinstance(vis_prefix, tuple): + num_vis_prefix = len(vis_prefix) + else: + num_vis_prefix = 1 + + if num_vis_prefix == 1: + step = 1 + else: + step = num_layers//num_vis_prefix if num_layers>num_vis_prefix else 1 + + if self.select_higher_step: + self.text_step = max([step, self.text_step]) + token_added = False + for idx, decoder_layer in enumerate(self.layers): + # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) + if output_hidden_states: + all_hidden_states += (hidden_states,) + + dropout_probability = random.uniform(0, 1) + if self.training and (dropout_probability < self.layerdrop): + continue + + past_key_value = past_key_values[idx] if past_key_values is not None else None + + + ################################################################ + ### Add prefix from visual encoder + tgt_len, src_len = attention_mask.shape[2:] + # don't use vis pref during generation of new tokens + if self.use_vis_prefix and vis_prefix is not None: + if idx >= self.start_layer_idx and idx < self.end_layer_idx: + + if (idx)%self.text_step==0: + + vis_prefix_idx = ((idx - self.start_layer_idx)//step) + + if num_vis_prefix == 1: + vis_pref = vis_prefix[0] + else: + vis_prefix_idx = min(vis_prefix_idx, len(vis_prefix)-1) + vis_pref = vis_prefix[vis_prefix_idx] + + + bs, l, dim = vis_pref.size() + + bs_v, bs_t = vis_pref.shape[0], hidden_states.shape[0] + if bs_v != bs_t: + vis_pref = tile(vis_pref, 0, bs_t//bs_v) + + if token_added and (tgt_len == src_len): + token_added = True + hidden_states[:, 0, :] = vis_pref[:, 0, :] + else: + if (tgt_len == src_len): + hidden_states = torch.cat((vis_pref, hidden_states), dim=1) # (bs, l, dim) + attention_mask = F.pad(attention_mask, (l, 0, l, 0, 0, 0, 0, 0), "constant", 0) + elif idx > 0: # during generation + attention_mask = F.pad(attention_mask, (l, 0, 0, 0, 0, 0, 0, 0), "constant", 0) + token_added = True + + ################################################################ + + + if self.gradient_checkpointing and self.training: + + if use_cache: + logger.warning( + "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." + ) + use_cache = False + + def create_custom_forward(module): + def custom_forward(*inputs): + # None for past_key_value + return module(*inputs, output_attentions, None) + + return custom_forward + + layer_outputs = torch.utils.checkpoint.checkpoint( + create_custom_forward(decoder_layer), + hidden_states, + attention_mask, + head_mask[idx] if head_mask is not None else None, + None, + ) + else: + + layer_outputs = decoder_layer( + hidden_states, + attention_mask=attention_mask, + layer_head_mask=(head_mask[idx] if head_mask is not None else None), + past_key_value=past_key_value, + output_attentions=output_attentions, + use_cache=use_cache, + ) + + hidden_states = layer_outputs[0] + + if use_cache: + next_decoder_cache += (layer_outputs[2 if output_attentions else 1],) + + if output_attentions: + all_self_attns += (layer_outputs[1],) + + if self.final_layer_norm is not None: + hidden_states = self.final_layer_norm(hidden_states) + + if self.project_out is not None: + hidden_states = self.project_out(hidden_states) + + # add hidden states from the last decoder layer + if output_hidden_states: + all_hidden_states += (hidden_states,) + + next_cache = next_decoder_cache if use_cache else None + if not return_dict: + return tuple(v for v in [hidden_states, next_cache, all_hidden_states, all_self_attns] if v is not None) + return BaseModelOutputWithPast( + last_hidden_state=hidden_states, + past_key_values=next_cache, + hidden_states=all_hidden_states, + attentions=all_self_attns, + ) + + +@add_start_docstrings( + "The bare OPT Model outputting raw hidden-states without any specific head on top.", + OPT_START_DOCSTRING, +) +class OPTModel(OPTPreTrainedModel): + def __init__(self, config: OPTConfig): + super().__init__(config) + self.decoder = OPTDecoder(config) + + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.decoder.embed_tokens + + def set_input_embeddings(self, value): + self.decoder.embed_tokens = value + + def get_decoder(self): + return self.decoder + + @add_start_docstrings_to_model_forward(OPT_INPUTS_DOCSTRING) + @add_code_sample_docstrings( + processor_class=_TOKENIZER_FOR_DOC, + checkpoint=_CHECKPOINT_FOR_DOC, + output_type=BaseModelOutputWithPast, + config_class=_CONFIG_FOR_DOC, + expected_output=_EXPECTED_OUTPUT_SHAPE, + ) + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + head_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + vis_prefix = None, + prompt_embeds = None, + connector = None, + ) -> Union[Tuple, BaseModelOutputWithPast]: + + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + use_cache = use_cache if use_cache is not None else self.config.use_cache + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + # decoder outputs consists of (dec_features, past_key_value, dec_hidden, dec_attn) + decoder_outputs = self.decoder( + input_ids=input_ids, + attention_mask=attention_mask, + head_mask=head_mask, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + vis_prefix=vis_prefix, + prompt_embeds=prompt_embeds, + connector=connector + ) + + if not return_dict: + return decoder_outputs + + return BaseModelOutputWithPast( + last_hidden_state=decoder_outputs.last_hidden_state, + past_key_values=decoder_outputs.past_key_values, + hidden_states=decoder_outputs.hidden_states, + attentions=decoder_outputs.attentions, + ) + + +class OPTForCausalLM(OPTPreTrainedModel): + _keys_to_ignore_on_load_missing = [r"lm_head.weight"] + + def __init__(self, config): + super().__init__(config) + self.model = OPTModel(config) + + # the lm_head weight is automatically tied to the embed tokens weight + self.lm_head = nn.Linear(config.word_embed_proj_dim, config.vocab_size, bias=False) + + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.model.decoder.embed_tokens + + def set_input_embeddings(self, value): + self.model.decoder.embed_tokens = value + + def get_output_embeddings(self): + return self.lm_head + + def set_output_embeddings(self, new_embeddings): + self.lm_head = new_embeddings + + def set_decoder(self, decoder): + self.model.decoder = decoder + + def get_decoder(self): + return self.model.decoder + + @replace_return_docstrings(output_type=CausalLMOutputWithPast, config_class=_CONFIG_FOR_DOC) + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + head_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + labels: Optional[torch.LongTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + vis_prefix = None, + prompt_embeds = None, + reduction='mean', + connector = None + ) -> Union[Tuple, CausalLMOutputWithPast]: + r""" + Args: + input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): + Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you + provide it. + + Indices can be obtained using [`OPTTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + head_mask (`torch.Tensor` of shape `(num_hidden_layers, num_attention_heads)`, *optional*): + Mask to nullify selected heads of the attention modules. Mask values selected in `[0, 1]`: + + - 1 indicates the head is **not masked**, + - 0 indicates the head is **masked**. + + past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): + Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of + shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of + shape `(batch_size, num_heads, encoder_sequence_length, embed_size_per_head)`. The two additional + tensors are only required when the model is used as a decoder in a Sequence to Sequence model. + + Contains pre-computed hidden-states (key and values in the self-attention blocks and in the + cross-attention blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. + + If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those + that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of + all `decoder_input_ids` of shape `(batch_size, sequence_length)`. + inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): + Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. + This is useful if you want more control over how to convert `input_ids` indices into associated vectors + than the model's internal embedding lookup matrix. + labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): + Labels for computing the masked language modeling loss. Indices should either be in `[0, ..., + config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored + (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding + (see `past_key_values`). + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under + returned tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors + for more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. + + Returns: + + Example: + + ```python + >>> from transformers import GPT2Tokenizer, OPTForCausalLM + + >>> model = OPTForCausalLM.from_pretrained("facebook/opt-350m") + >>> tokenizer = GPT2Tokenizer.from_pretrained("facebook/opt-350m") + + >>> prompt = "Hey, are you consciours? Can you talk to me?" + >>> inputs = tokenizer(prompt, return_tensors="pt") + + >>> # Generate + >>> generate_ids = model.generate(inputs.input_ids, max_length=30) + >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0] + "Hey, are you consciours? Can you talk to me?\nI'm not consciours, but I can talk to you." + ```""" + + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + # decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn) + outputs = self.model.decoder( + input_ids=input_ids, + attention_mask=attention_mask, + head_mask=head_mask, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + vis_prefix=vis_prefix, + prompt_embeds=prompt_embeds, + connector=connector + ) + + logits = self.lm_head(outputs[0]).contiguous() + + + loss = None + if labels is not None: + + + + # Shift so that tokens < n predict n + shift_logits = logits[..., :-1, :].contiguous() + shift_labels = labels[..., 1:].contiguous() + # Flatten the tokens + + # ignore prompt tokens + src_len, tgt_len = shift_logits.shape[1], shift_labels.shape[-1] + if (tgt_len != src_len): + shift_labels = F.pad(shift_labels, (src_len-tgt_len, 0, 0, 0), "constant", -100) + # Flatten the tokens + loss_fct = CrossEntropyLoss(reduction=reduction) + loss = loss_fct(shift_logits.view(-1, self.config.vocab_size), shift_labels.view(-1)) + + loss = loss.view(logits.size(0),-1).sum(1) + + if not return_dict: + output = (logits,) + outputs[1:] + return (loss,) + output if loss is not None else output + + return CausalLMOutputWithPast( + loss=loss, + logits=logits, + past_key_values=outputs.past_key_values, + hidden_states=outputs.hidden_states, + attentions=outputs.attentions, + ) + + def prepare_inputs_for_generation(self, input_ids, past=None, attention_mask=None, use_cache=None, **kwargs): + # if model is used as a decoder in encoder-decoder model, the decoder attention mask is created on the fly + if attention_mask is None: + attention_mask = input_ids.new_ones(input_ids.shape) + + if past: + input_ids = input_ids[:, -1:] + # first step, decoder_cached_states are empty + return { + "input_ids": input_ids, # encoder_outputs is defined. input_ids not needed + "attention_mask": attention_mask, + "past_key_values": past, + "use_cache": use_cache, + **kwargs, + } + + @staticmethod + def _reorder_cache(past, beam_idx): + reordered_past = () + for layer_past in past: + reordered_past += (tuple(past_state.index_select(0, beam_idx) for past_state in layer_past),) + return reordered_past diff --git a/models/timesformer.py b/models/timesformer.py new file mode 100644 index 0000000000000000000000000000000000000000..da408738ab8ec0b36e620388a754d70137ea122b --- /dev/null +++ b/models/timesformer.py @@ -0,0 +1,412 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +# Copyright 2020 Ross Wightman +# Modified Model definition + +import torch +import torch.nn as nn +from functools import partial +import math +import warnings +import torch.nn.functional as F +import numpy as np + +from timesformer.models.vit_utils import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD +from timesformer.models.helpers import load_pretrained +from timesformer.models.vit_utils import DropPath, to_2tuple, trunc_normal_ + +# from timesformer.models.build import MODEL_REGISTRY +from torch import einsum +from einops import rearrange, reduce, repeat +import torchvision + +def _cfg(url='', **kwargs): + return { + 'url': url, + 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None, + 'crop_pct': .9, 'interpolation': 'bicubic', + 'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD, + 'first_conv': 'patch_embed.proj', 'classifier': 'head', + **kwargs + } + + +default_cfgs = { + 'timesformer_vit_base_patch16_224': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_p16_224-80ecf9dd.pth', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), + ), +} + +class Mlp(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + +class Attention(nn.Module): + def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0., with_qkv=True): + super().__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = qk_scale or head_dim ** -0.5 + self.with_qkv = with_qkv + if self.with_qkv: + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + self.attn_drop = nn.Dropout(attn_drop) + + def forward(self, x): + B, N, C = x.shape + if self.with_qkv: + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv[0], qkv[1], qkv[2] + else: + qkv = x.reshape(B, N, self.num_heads, C // self.num_heads).permute(0, 2, 1, 3) + q, k, v = qkv, qkv, qkv + + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + if self.with_qkv: + x = self.proj(x) + x = self.proj_drop(x) + return x + +class Block(nn.Module): + + def __init__(self, dim, num_heads, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop=0., attn_drop=0., + drop_path=0.1, act_layer=nn.GELU, norm_layer=nn.LayerNorm, attention_type='divided_space_time'): + super().__init__() + self.attention_type = attention_type + assert(attention_type in ['divided_space_time', 'space_only','joint_space_time']) + + self.norm1 = norm_layer(dim) + self.attn = Attention( + dim, num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale, attn_drop=attn_drop, proj_drop=drop) + + ## Temporal Attention Parameters + if self.attention_type == 'divided_space_time': + self.temporal_norm1 = norm_layer(dim) + self.temporal_attn = Attention( + dim, num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale, attn_drop=attn_drop, proj_drop=drop) + self.temporal_fc = nn.Linear(dim, dim) + + ## drop path + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + + + def forward(self, x, B, T, W): + num_spatial_tokens = (x.size(1) - 1) // T + H = num_spatial_tokens // W + + if self.attention_type in ['space_only', 'joint_space_time']: + x = x + self.drop_path(self.attn(self.norm1(x))) + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x + elif self.attention_type == 'divided_space_time': + ## Temporal + xt = x[:,1:,:] + xt = rearrange(xt, 'b (h w t) m -> (b h w) t m',b=B,h=H,w=W,t=T) + res_temporal = self.drop_path(self.temporal_attn(self.temporal_norm1(xt))) + res_temporal = rearrange(res_temporal, '(b h w) t m -> b (h w t) m',b=B,h=H,w=W,t=T) + res_temporal = self.temporal_fc(res_temporal) + xt = x[:,1:,:] + res_temporal + + ## Spatial + init_cls_token = x[:,0,:].unsqueeze(1) + cls_token = init_cls_token.repeat(1, T, 1) + cls_token = rearrange(cls_token, 'b t m -> (b t) m',b=B,t=T).unsqueeze(1) + xs = xt + xs = rearrange(xs, 'b (h w t) m -> (b t) (h w) m',b=B,h=H,w=W,t=T) + xs = torch.cat((cls_token, xs), 1) + res_spatial = self.drop_path(self.attn(self.norm1(xs))) + + ### Taking care of CLS token + cls_token = res_spatial[:,0,:] + cls_token = rearrange(cls_token, '(b t) m -> b t m',b=B,t=T) + cls_token = torch.mean(cls_token,1,True) ## averaging for every frame + res_spatial = res_spatial[:,1:,:] + res_spatial = rearrange(res_spatial, '(b t) (h w) m -> b (h w t) m',b=B,h=H,w=W,t=T) + res = res_spatial + x = xt + + ## Mlp + x = torch.cat((init_cls_token, x), 1) + torch.cat((cls_token, res), 1) + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x + +class PatchEmbed(nn.Module): + """ Image to Patch Embedding + """ + def __init__(self, img_size=224, patch_size=16, in_chans=3, embed_dim=768): + super().__init__() + img_size = to_2tuple(img_size) + patch_size = to_2tuple(patch_size) + num_patches = (img_size[1] // patch_size[1]) * (img_size[0] // patch_size[0]) + self.img_size = img_size + self.patch_size = patch_size + self.num_patches = num_patches + + self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size) + + def forward(self, x): + B, C, T, H, W = x.shape + x = rearrange(x, 'b c t h w -> (b t) c h w') + x = self.proj(x) + W = x.size(-1) + x = x.flatten(2).transpose(1, 2) + return x, T, W + + +class VisionTransformer(nn.Module): + """ Vision Transformere + """ + def __init__(self, img_size=224, patch_size=16, in_chans=3, num_classes=1000, embed_dim=768, depth=12, + num_heads=12, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop_rate=0., attn_drop_rate=0., + drop_path_rate=0.1, hybrid_backbone=None, norm_layer=nn.LayerNorm, + num_frames=8, attention_type='divided_space_time', dropout=0., + return_hidden_state=False): + super().__init__() + self.attention_type = attention_type + self.depth = depth + self.dropout = nn.Dropout(dropout) + self.num_classes = num_classes + self.num_features = self.embed_dim = embed_dim # num_features for consistency with other models + self.patch_embed = PatchEmbed( + img_size=img_size, patch_size=patch_size, in_chans=in_chans, embed_dim=embed_dim) + num_patches = self.patch_embed.num_patches + + ## Positional Embeddings + self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim)) + self.pos_embed = nn.Parameter(torch.zeros(1, num_patches+1, embed_dim)) + self.pos_drop = nn.Dropout(p=drop_rate) + if self.attention_type != 'space_only': + self.time_embed = nn.Parameter(torch.zeros(1, num_frames, embed_dim)) + self.time_drop = nn.Dropout(p=drop_rate) + + ## Attention Blocks + dpr = [x.item() for x in torch.linspace(0, drop_path_rate, self.depth)] # stochastic depth decay rule + self.blocks = nn.ModuleList([ + Block( + dim=embed_dim, num_heads=num_heads, mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, qk_scale=qk_scale, + drop=drop_rate, attn_drop=attn_drop_rate, drop_path=dpr[i], norm_layer=norm_layer, attention_type=self.attention_type) + for i in range(self.depth)]) + self.norm = norm_layer(embed_dim) + + # Classifier head + self.head = nn.Linear(embed_dim, num_classes) if num_classes > 0 else nn.Identity() + + trunc_normal_(self.pos_embed, std=.02) + trunc_normal_(self.cls_token, std=.02) + self.apply(self._init_weights) + + ## initialization of temporal attention weights + if self.attention_type == 'divided_space_time': + i = 0 + for m in self.blocks.modules(): + m_str = str(m) + if 'Block' in m_str: + if i > 0: + nn.init.constant_(m.temporal_fc.weight, 0) + nn.init.constant_(m.temporal_fc.bias, 0) + i += 1 + print("Load custom timesformer") + + self.return_hidden_state = return_hidden_state + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {'pos_embed', 'cls_token', 'time_embed'} + + def get_classifier(self): + return self.head + + def reset_classifier(self, num_classes, global_pool=''): + self.num_classes = num_classes + self.head = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity() + + def forward_features(self, x, attention_type=None): + all_hidden_states = () if self.return_hidden_state else None + B = x.shape[0] + x, T, W = self.patch_embed(x) + cls_tokens = self.cls_token.expand(x.size(0), -1, -1) + x = torch.cat((cls_tokens, x), dim=1) + + ## resizing the positional embeddings in case they don't match the input at inference + if x.size(1) != self.pos_embed.size(1): + pos_embed = self.pos_embed + cls_pos_embed = pos_embed[0,0,:].unsqueeze(0).unsqueeze(1) + other_pos_embed = pos_embed[0,1:,:].unsqueeze(0).transpose(1, 2) + P = int(other_pos_embed.size(2) ** 0.5) + H = x.size(1) // W + other_pos_embed = other_pos_embed.reshape(1, x.size(2), P, P) + new_pos_embed = F.interpolate(other_pos_embed, size=(H, W), mode='nearest') + new_pos_embed = new_pos_embed.flatten(2) + new_pos_embed = new_pos_embed.transpose(1, 2) + new_pos_embed = torch.cat((cls_pos_embed, new_pos_embed), 1) + x = x + new_pos_embed + else: + x = x + self.pos_embed + x = self.pos_drop(x) + + if attention_type is None: + attention_type = self.attention_type + ## Time Embeddings + if attention_type != 'space_only': + cls_tokens = x[:B, 0, :].unsqueeze(1) + x = x[:,1:] + x = rearrange(x, '(b t) n m -> (b n) t m',b=B,t=T) + ## Resizing time embeddings in case they don't match + if T != self.time_embed.size(1): + time_embed = self.time_embed.transpose(1, 2) + new_time_embed = F.interpolate(time_embed, size=(T), mode='nearest') + new_time_embed = new_time_embed.transpose(1, 2) + x = x + new_time_embed + else: + x = x + self.time_embed + x = self.time_drop(x) + x = rearrange(x, '(b n) t m -> b (n t) m',b=B,t=T) + x = torch.cat((cls_tokens, x), dim=1) + + ## Attention blocks + for blk in self.blocks: + x = blk(x, B, T, W) + + if self.return_hidden_state: + all_hidden_states = all_hidden_states + (self.norm(x),) + + ### Predictions for space-only baseline + if attention_type == 'space_only': + x = rearrange(x, '(b t) n m -> b t n m',b=B,t=T) + x = torch.mean(x, 1) # averaging predictions for every frame + + x = self.norm(x) + + if self.return_hidden_state: + return x, all_hidden_states + else: + return x + + def forward(self, x, attention_type=None): + x = self.forward_features(x, attention_type=attention_type) + return x + +def _conv_filter(state_dict, patch_size=16): + """ convert patch embedding weight from manual patchify + linear proj to conv""" + out_dict = {} + for k, v in state_dict.items(): + if 'patch_embed.proj.weight' in k: + if v.shape[-1] != patch_size: + patch_size = v.shape[-1] + v = v.reshape((v.shape[0], 3, patch_size, patch_size)) + out_dict[k] = v + return out_dict + +# @MODEL_REGISTRY.register() +class timesformer_vit_base_patch16_224(nn.Module): + def __init__(self, cfg, **kwargs): + super(timesformer_vit_base_patch16_224, self).__init__() + self.pretrained=True + patch_size = 16 + self.model = VisionTransformer(img_size=cfg.DATA.TRAIN_CROP_SIZE, + num_classes=cfg.MODEL.NUM_CLASSES, patch_size=patch_size, + embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, norm_layer=partial(nn.LayerNorm, eps=1e-6), drop_rate=0., attn_drop_rate=0., drop_path_rate=0.1, num_frames=cfg.DATA.NUM_FRAMES, attention_type=cfg.TIMESFORMER.ATTENTION_TYPE, **kwargs) + + self.attention_type = cfg.TIMESFORMER.ATTENTION_TYPE + self.model.default_cfg = default_cfgs['timesformer_vit_base_patch16_224'] + self.num_patches = (cfg.DATA.TRAIN_CROP_SIZE // patch_size) * (cfg.DATA.TRAIN_CROP_SIZE // patch_size) + pretrained_model=cfg.TIMESFORMER.PRETRAINED_MODEL + if self.pretrained: + load_pretrained(self.model, num_classes=self.model.num_classes, in_chans=kwargs.get('in_chans', 3), filter_fn=_conv_filter, img_size=cfg.DATA.TRAIN_CROP_SIZE, num_patches=self.num_patches, attention_type=self.attention_type, pretrained_model=pretrained_model) + + def forward(self, x): + x = self.model(x) + return x + +# @MODEL_REGISTRY.register() +class TimeSformer(nn.Module): + def __init__(self, img_size=224, patch_size=16, num_classes=400, num_frames=8, + attention_type='divided_space_time', embed_dim=768, pretrained_model='', + audio_as_image=True, space_only_for_images=False, **kwargs): + super(TimeSformer, self).__init__() + self.pretrained=True + self.model = VisionTransformer(img_size=img_size, num_classes=num_classes, + patch_size=patch_size, embed_dim=embed_dim, depth=12, num_heads=12, mlp_ratio=4, + qkv_bias=True, norm_layer=partial(nn.LayerNorm, eps=1e-6), + drop_rate=0., attn_drop_rate=0., drop_path_rate=0.1, num_frames=num_frames, + attention_type=attention_type, **kwargs) + self.num_frames = num_frames + self.embed_dim = embed_dim + self.attention_type = attention_type + self.model.default_cfg = default_cfgs['timesformer_vit_base_patch'+str(patch_size)+'_224'] + self.num_patches = (img_size // patch_size) * (img_size // patch_size) + self.img_size = img_size + self.audio_as_image = audio_as_image + self.space_only_for_images = space_only_for_images + if self.pretrained: + load_pretrained(self.model, num_classes=self.model.num_classes, in_chans=kwargs.get('in_chans', 3), filter_fn=_conv_filter, img_size=img_size, num_frames=num_frames, num_patches=self.num_patches, attention_type=self.attention_type, pretrained_model=pretrained_model) + def forward(self, x, external_features=None): + + if x.ndim == 4: # image as input + if not self.space_only_for_images: + x = x.unsqueeze(2).expand(-1, -1, self.num_frames, -1, -1) # B, C, f, H, W + else: + x = x.unsqueeze(2) # B, C, f, H, W + if x.ndim == 3: # image as input + B, H, W = x.shape + if self.audio_as_image: + x = x.unsqueeze(1).expand(-1, 3, -1, -1) # add C channel + x = torchvision.transforms.functional.resize(x, (self.img_size, self.img_size)) + if not self.space_only_for_images: + x = x.unsqueeze(2).expand(-1, -1, self.num_frames, -1, -1) # B, C, f, H, W + else: + x = x.unsqueeze(2) # B, C, f, H, W + else: + if H != W: # audio (1024, 128) another option is to make a square image + if H > W: + w = H/self.num_frames + # if w > W: + # w = w/2 + a = w - W # overlap a < 0 + x = x.unfold(1, W, int(W+a)) # from mel to square frames + x = x.unsqueeze(1).expand(-1, 3, -1, -1, -1) # add C channel + x = [torchvision.transforms.functional.resize(x[:, :, i, :, :], (self.img_size, self.img_size)).unsqueeze(2) for i in range(self.num_frames)] + x = torch.cat(x, dim=2) + + x = (x - x.min()) / (x.max() - x.min()) # does not help + # print(x.shape) + if x.shape[-3] == 1 and self.space_only_for_images: # space only for images + attention_type = 'space_only' + else: + attention_type = None + x = self.model(x, attention_type=attention_type) + + + return x diff --git a/models/utils.py b/models/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..58efe4f9104c524632571490bbefe893f33bcb3d --- /dev/null +++ b/models/utils.py @@ -0,0 +1,96 @@ +import re + +import torch +from torch import nn + + +exclude_list = ['model_text', 'transformer', 'model_vision'] + +def filter_msg(msg, exclude_list): + new_msg = [] + if len(msg) > 1: + for k in msg[0]: # missing + if not any([e in k for e in exclude_list]) or 'adapter' in k: + new_msg.append(k) + return new_msg + +def filter_state(state, exclude_list): + import collections + new_tmp = collections.OrderedDict() + for k, v in state.items(): + if not any([e in k for e in exclude_list]) or 'adapter' in k: + new_tmp[k] = state[k] + + return new_tmp + + + +def freeze_whole_model(model): + for n, p in model.named_parameters(): + p.requires_grad = False + +def unfreeze_parameters(model, config): + # targets = '*.proj_*|*_proj*|*itm_head*|*queue*|*adapter*|*temp*|*.cls.*' + + targets = ['prompt'] # lm_head + + + + if not config.get('freeze_connector', False): + targets = targets + ['connector'] + + if config.get('unfreeze_text_layer_norm', False): + targets = targets + ['self_attn_layer_norm', 'final_layer_norm'] + + if config.get('unfreeze_vision_layer_norm', False): + targets = targets + ['norm', 'norm1', 'norm2'] + + if config.get('unfreeze_text_model', False): + targets = targets + ['model_text'] + + if config.get('unfreeze_vision_model', False): + targets = targets + ['model_vision'] + + if config.get('use_adapters', False): + targets = targets + ['adapter'] + + print('unfreeze targets:', targets) + for n, p in model.named_parameters(): + if any(t in n for t in targets): + # if re.fullmatch(targets, n): + p.requires_grad = True + print(f"{n} is trainable...") + + +def print_trainable_params_percentage(model): + + + orig_param_size = sum(p.numel() for p in model.parameters()) + + def count_parameters(model): + return sum(p.numel() for p in model.parameters() if p.requires_grad) + + trainable_size = count_parameters(model) + + percentage = trainable_size / orig_param_size * 100 + + print(f"Trainable param percentage: {percentage:.2f}% ({trainable_size}/{orig_param_size})") + + return percentage + + + +def shift_right(input_ids, decoder_start_token_id=2, pad_token_id=None): + + + # shift inputs to the right + shifted_input_ids = input_ids.new_zeros(input_ids.shape) + shifted_input_ids[..., 1:] = input_ids[..., :-1].clone() + shifted_input_ids[..., 0] = decoder_start_token_id + + # replace possible -100 values in labels by `pad_token_id` + shifted_input_ids.masked_fill_(shifted_input_ids == -100, pad_token_id) + + assert torch.all(shifted_input_ids >= 0).item(), "Verify that `shifted_input_ids` has only positive values" + + return shifted_input_ids \ No newline at end of file diff --git a/models/vit.py b/models/vit.py new file mode 100644 index 0000000000000000000000000000000000000000..8600a1798da0821563b3f75a2764d234250df3f5 --- /dev/null +++ b/models/vit.py @@ -0,0 +1,1140 @@ +""" Vision Transformer (ViT) in PyTorch + +A PyTorch implement of Vision Transformers as described in: + +'An Image Is Worth 16 x 16 Words: Transformers for Image Recognition at Scale' + - https://arxiv.org/abs/2010.11929 + +`How to train your ViT? Data, Augmentation, and Regularization in Vision Transformers` + - https://arxiv.org/abs/2106.10270 + +The official jax code is released and available at https://github.com/google-research/vision_transformer + +Acknowledgments: +* The paper authors for releasing code and weights, thanks! +* I fixed my class token impl based on Phil Wang's https://github.com/lucidrains/vit-pytorch ... check it out +for some einops/einsum fun +* Simple transformer style inspired by Andrej Karpathy's https://github.com/karpathy/minGPT +* Bert reference code checks against Huggingface Transformers and Tensorflow Bert + +Hacked together by / Copyright 2020, Ross Wightman +""" +import math +import logging +from functools import partial +from collections import OrderedDict +from typing import Optional + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.utils.checkpoint + +from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD +from timm.models.helpers import build_model_with_cfg, resolve_pretrained_cfg, named_apply, adapt_input_conv, checkpoint_seq +from timm.models.layers import PatchEmbed, Mlp, DropPath, trunc_normal_, lecun_normal_ +from timm.models.registry import register_model + +_logger = logging.getLogger(__name__) + + +def _cfg(url='', **kwargs): + return { + 'url': url, + 'num_classes': 0, 'input_size': (3, 224, 224), 'pool_size': None, + 'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True, + 'mean': IMAGENET_INCEPTION_MEAN, 'std': IMAGENET_INCEPTION_STD, + 'first_conv': 'patch_embed.proj', 'classifier': 'head', + **kwargs + } + + +default_cfgs = { + # patch models (weights from official Google JAX impl) + 'vit_tiny_patch16_224': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz'), + 'vit_tiny_patch16_384': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz', + input_size=(3, 384, 384), crop_pct=1.0), + 'vit_small_patch32_224': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'S_32-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz'), + 'vit_small_patch32_384': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'S_32-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz', + input_size=(3, 384, 384), crop_pct=1.0), + 'vit_small_patch16_224': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'S_16-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz'), + 'vit_small_patch16_384': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'S_16-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz', + input_size=(3, 384, 384), crop_pct=1.0), + 'vit_base_patch32_224': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'B_32-i21k-300ep-lr_0.001-aug_medium1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz'), + 'vit_base_patch32_384': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'B_32-i21k-300ep-lr_0.001-aug_light1-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz', + input_size=(3, 384, 384), crop_pct=1.0), + 'vit_base_patch16_224': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'B_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.01-res_224.npz'), + 'vit_base_patch16_384': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'B_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.01-res_384.npz', + input_size=(3, 384, 384), crop_pct=1.0), + 'vit_base_patch8_224': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'B_8-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.01-res_224.npz'), + 'vit_large_patch32_224': _cfg( + url='', # no official model weights for this combo, only for in21k + ), + 'vit_large_patch32_384': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_p32_384-9b920ba8.pth', + input_size=(3, 384, 384), crop_pct=1.0), + 'vit_large_patch16_224': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'L_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.01-res_224.npz'), + 'vit_large_patch16_384': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/' + 'L_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.01-res_384.npz', + input_size=(3, 384, 384), crop_pct=1.0), + + 'vit_large_patch14_224': _cfg(url=''), + 'vit_huge_patch14_224': _cfg(url=''), + 'vit_giant_patch14_224': _cfg(url=''), + 'vit_gigantic_patch14_224': _cfg(url=''), + + + # patch models, imagenet21k (weights from official Google JAX impl) + 'vit_tiny_patch16_224_in21k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0.npz', + num_classes=21843), + 'vit_small_patch32_224_in21k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/S_32-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0.npz', + num_classes=21843), + 'vit_small_patch16_224_in21k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/S_16-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0.npz', + num_classes=21843), + 'vit_base_patch32_224_in21k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/B_32-i21k-300ep-lr_0.001-aug_medium1-wd_0.03-do_0.0-sd_0.0.npz', + num_classes=21843), + 'vit_base_patch16_224_in21k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/B_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0.npz', + num_classes=21843), + 'vit_base_patch8_224_in21k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/B_8-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0.npz', + num_classes=21843), + 'vit_large_patch32_224_in21k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_patch32_224_in21k-9046d2e7.pth', + num_classes=21843), + 'vit_large_patch16_224_in21k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/L_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.1-sd_0.1.npz', + num_classes=21843), + 'vit_huge_patch14_224_in21k': _cfg( + url='https://storage.googleapis.com/vit_models/imagenet21k/ViT-H_14.npz', + hf_hub_id='timm/vit_huge_patch14_224_in21k', + num_classes=21843), + + # SAM trained models (https://arxiv.org/abs/2106.01548) + 'vit_base_patch32_224_sam': _cfg( + url='https://storage.googleapis.com/vit_models/sam/ViT-B_32.npz'), + 'vit_base_patch16_224_sam': _cfg( + url='https://storage.googleapis.com/vit_models/sam/ViT-B_16.npz'), + + # DINO pretrained - https://arxiv.org/abs/2104.14294 (no classifier head, for fine-tune only) + 'vit_small_patch16_224_dino': _cfg( + url='https://dl.fbaipublicfiles.com/dino/dino_deitsmall16_pretrain/dino_deitsmall16_pretrain.pth', + mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0), + 'vit_small_patch8_224_dino': _cfg( + url='https://dl.fbaipublicfiles.com/dino/dino_deitsmall8_pretrain/dino_deitsmall8_pretrain.pth', + mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0), + 'vit_base_patch16_224_dino': _cfg( + url='https://dl.fbaipublicfiles.com/dino/dino_vitbase16_pretrain/dino_vitbase16_pretrain.pth', + mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0), + 'vit_base_patch8_224_dino': _cfg( + url='https://dl.fbaipublicfiles.com/dino/dino_vitbase8_pretrain/dino_vitbase8_pretrain.pth', + mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0), + + + # ViT ImageNet-21K-P pretraining by MILL + 'vit_base_patch16_224_miil_in21k': _cfg( + url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm/vit_base_patch16_224_in21k_miil.pth', + mean=(0, 0, 0), std=(1, 1, 1), crop_pct=0.875, interpolation='bilinear', num_classes=11221, + ), + 'vit_base_patch16_224_miil': _cfg( + url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm' + '/vit_base_patch16_224_1k_miil_84_4.pth', + mean=(0, 0, 0), std=(1, 1, 1), crop_pct=0.875, interpolation='bilinear', + ), + + 'vit_base_patch16_rpn_224': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_base_patch16_rpn_224-sw-3b07e89d.pth'), + + # experimental (may be removed) + 'vit_base_patch32_plus_256': _cfg(url='', input_size=(3, 256, 256), crop_pct=0.95), + 'vit_base_patch16_plus_240': _cfg(url='', input_size=(3, 240, 240), crop_pct=0.95), + 'vit_small_patch16_36x1_224': _cfg(url=''), + 'vit_small_patch16_18x2_224': _cfg(url=''), + 'vit_base_patch16_18x2_224': _cfg(url=''), +} + + +class Attention(nn.Module): + def __init__(self, dim, num_heads=8, qkv_bias=False, attn_drop=0., proj_drop=0.): + super().__init__() + assert dim % num_heads == 0, 'dim should be divisible by num_heads' + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = head_dim ** -0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x): + B, N, C = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv.unbind(0) # make torchscript happy (cannot use tensor as tuple) + + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class LayerScale(nn.Module): + def __init__(self, dim, init_values=1e-5, inplace=False): + super().__init__() + self.inplace = inplace + self.gamma = nn.Parameter(init_values * torch.ones(dim)) + + def forward(self, x): + return x.mul_(self.gamma) if self.inplace else x * self.gamma + + +class Block(nn.Module): + + def __init__( + self, dim, num_heads, mlp_ratio=4., qkv_bias=False, drop=0., attn_drop=0., init_values=None, + drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm): + super().__init__() + self.norm1 = norm_layer(dim) + self.attn = Attention(dim, num_heads=num_heads, qkv_bias=qkv_bias, attn_drop=attn_drop, proj_drop=drop) + self.ls1 = LayerScale(dim, init_values=init_values) if init_values else nn.Identity() + # NOTE: drop path for stochastic depth, we shall see if this is better than dropout here + self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity() + + self.norm2 = norm_layer(dim) + self.mlp = Mlp(in_features=dim, hidden_features=int(dim * mlp_ratio), act_layer=act_layer, drop=drop) + self.ls2 = LayerScale(dim, init_values=init_values) if init_values else nn.Identity() + self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity() + + def forward(self, x): + x = x + self.drop_path1(self.ls1(self.attn(self.norm1(x)))) + x = x + self.drop_path2(self.ls2(self.mlp(self.norm2(x)))) + return x + + +class ResPostBlock(nn.Module): + + def __init__( + self, dim, num_heads, mlp_ratio=4., qkv_bias=False, drop=0., attn_drop=0., init_values=None, + drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm): + super().__init__() + self.init_values = init_values + + self.attn = Attention(dim, num_heads=num_heads, qkv_bias=qkv_bias, attn_drop=attn_drop, proj_drop=drop) + self.norm1 = norm_layer(dim) + self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity() + + self.mlp = Mlp(in_features=dim, hidden_features=int(dim * mlp_ratio), act_layer=act_layer, drop=drop) + self.norm2 = norm_layer(dim) + self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity() + + self.init_weights() + + def init_weights(self): + # NOTE this init overrides that base model init with specific changes for the block type + if self.init_values is not None: + nn.init.constant_(self.norm1.weight, self.init_values) + nn.init.constant_(self.norm2.weight, self.init_values) + + def forward(self, x): + x = x + self.drop_path1(self.norm1(self.attn(x))) + x = x + self.drop_path2(self.norm2(self.mlp(x))) + return x + + +class ParallelBlock(nn.Module): + + def __init__( + self, dim, num_heads, num_parallel=2, mlp_ratio=4., qkv_bias=False, init_values=None, + drop=0., attn_drop=0., drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm): + super().__init__() + self.num_parallel = num_parallel + self.attns = nn.ModuleList() + self.ffns = nn.ModuleList() + for _ in range(num_parallel): + self.attns.append(nn.Sequential(OrderedDict([ + ('norm', norm_layer(dim)), + ('attn', Attention(dim, num_heads=num_heads, qkv_bias=qkv_bias, attn_drop=attn_drop, proj_drop=drop)), + ('ls', LayerScale(dim, init_values=init_values) if init_values else nn.Identity()), + ('drop_path', DropPath(drop_path) if drop_path > 0. else nn.Identity()) + ]))) + self.ffns.append(nn.Sequential(OrderedDict([ + ('norm', norm_layer(dim)), + ('mlp', Mlp(dim, hidden_features=int(dim * mlp_ratio), act_layer=act_layer, drop=drop)), + ('ls', LayerScale(dim, init_values=init_values) if init_values else nn.Identity()), + ('drop_path', DropPath(drop_path) if drop_path > 0. else nn.Identity()) + ]))) + + def _forward_jit(self, x): + x = x + torch.stack([attn(x) for attn in self.attns]).sum(dim=0) + x = x + torch.stack([ffn(x) for ffn in self.ffns]).sum(dim=0) + return x + + @torch.jit.ignore + def _forward(self, x): + x = x + sum(attn(x) for attn in self.attns) + x = x + sum(ffn(x) for ffn in self.ffns) + return x + + def forward(self, x): + if torch.jit.is_scripting() or torch.jit.is_tracing(): + return self._forward_jit(x) + else: + return self._forward(x) + + +class VisionTransformer(nn.Module): + """ Vision Transformer + + A PyTorch impl of : `An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale` + - https://arxiv.org/abs/2010.11929 + """ + + def __init__( + self, img_size=224, patch_size=16, in_chans=3, num_classes=1000, global_pool='token', + embed_dim=768, depth=12, num_heads=12, mlp_ratio=4., qkv_bias=True, init_values=None, + class_token=True, no_embed_class=False, fc_norm=None, drop_rate=0., attn_drop_rate=0., drop_path_rate=0., + weight_init='', embed_layer=PatchEmbed, norm_layer=None, act_layer=None, block_fn=Block, + return_hidden_state=False, mask_p=0): + """ + Args: + img_size (int, tuple): input image size + patch_size (int, tuple): patch size + in_chans (int): number of input channels + num_classes (int): number of classes for classification head + global_pool (str): type of global pooling for final sequence (default: 'token') + embed_dim (int): embedding dimension + depth (int): depth of transformer + num_heads (int): number of attention heads + mlp_ratio (int): ratio of mlp hidden dim to embedding dim + qkv_bias (bool): enable bias for qkv if True + init_values: (float): layer-scale init values + class_token (bool): use class token + fc_norm (Optional[bool]): pre-fc norm after pool, set if global_pool == 'avg' if None (default: None) + drop_rate (float): dropout rate + attn_drop_rate (float): attention dropout rate + drop_path_rate (float): stochastic depth rate + weight_init (str): weight init scheme + embed_layer (nn.Module): patch embedding layer + norm_layer: (nn.Module): normalization layer + act_layer: (nn.Module): MLP activation layer + """ + super().__init__() + assert global_pool in ('', 'avg', 'token') + assert class_token or global_pool != 'token' + use_fc_norm = global_pool == 'avg' if fc_norm is None else fc_norm + norm_layer = norm_layer or partial(nn.LayerNorm, eps=1e-6) + act_layer = act_layer or nn.GELU + + self.num_classes = num_classes + self.global_pool = global_pool + self.num_features = self.embed_dim = embed_dim # num_features for consistency with other models + self.num_prefix_tokens = 1 if class_token else 0 + self.no_embed_class = no_embed_class + self.grad_checkpointing = False + + self.patch_embed = embed_layer( + img_size=img_size, patch_size=patch_size, in_chans=in_chans, embed_dim=embed_dim) + num_patches = self.patch_embed.num_patches + + self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim)) if class_token else None + embed_len = num_patches if no_embed_class else num_patches + self.num_prefix_tokens + self.pos_embed = nn.Parameter(torch.randn(1, embed_len, embed_dim) * .02) + self.pos_drop = nn.Dropout(p=drop_rate) + + self.depth = depth + dpr = [x.item() for x in torch.linspace(0, drop_path_rate, depth)] # stochastic depth decay rule + self.blocks = nn.ModuleList([ + block_fn( + dim=embed_dim, num_heads=num_heads, mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, init_values=init_values, + drop=drop_rate, attn_drop=attn_drop_rate, drop_path=dpr[i], norm_layer=norm_layer, act_layer=act_layer) + for i in range(depth)]) + self.norm = norm_layer(embed_dim) if not use_fc_norm else nn.Identity() + + # Classifier Head + self.fc_norm = norm_layer(embed_dim) if use_fc_norm else nn.Identity() + self.head = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity() + + if weight_init != 'skip': + self.init_weights(weight_init) + self.return_hidden_state = return_hidden_state + self.mask_p = mask_p + + def init_weights(self, mode=''): + assert mode in ('jax', 'jax_nlhb', 'moco', '') + head_bias = -math.log(self.num_classes) if 'nlhb' in mode else 0. + trunc_normal_(self.pos_embed, std=.02) + if self.cls_token is not None: + nn.init.normal_(self.cls_token, std=1e-6) + named_apply(get_init_weights_vit(mode, head_bias), self) + + def _init_weights(self, m): + # this fn left here for compat with downstream users + init_weights_vit_timm(m) + + @torch.jit.ignore() + def load_pretrained(self, checkpoint_path, prefix=''): + _load_weights(self, checkpoint_path, prefix) + + @torch.jit.ignore + def no_weight_decay(self): + return {'pos_embed', 'cls_token', 'dist_token'} + + @torch.jit.ignore + def group_matcher(self, coarse=False): + return dict( + stem=r'^cls_token|pos_embed|patch_embed', # stem and embed + blocks=[(r'^blocks\.(\d+)', None), (r'^norm', (99999,))] + ) + + @torch.jit.ignore + def set_grad_checkpointing(self, enable=True): + self.grad_checkpointing = enable + + @torch.jit.ignore + def get_classifier(self): + return self.head + + def reset_classifier(self, num_classes: int, global_pool=None): + self.num_classes = num_classes + if global_pool is not None: + assert global_pool in ('', 'avg', 'token') + self.global_pool = global_pool + self.head = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity() + + def _pos_embed(self, x): + if self.no_embed_class: + # deit-3, updated JAX (big vision) + # position embedding does not overlap with class token, add then concat + x = x + self.pos_embed + if self.cls_token is not None: + x = torch.cat((self.cls_token.expand(x.shape[0], -1, -1), x), dim=1) + else: + # original timm, JAX, and deit vit impl + # pos_embed has entry for class token, concat then add + if self.cls_token is not None: + x = torch.cat((self.cls_token.expand(x.shape[0], -1, -1), x), dim=1) + x = x + self.pos_embed + return self.pos_drop(x) + + def forward_features(self, x): + x = self.patch_embed(x) + x = self._pos_embed(x) + if self.grad_checkpointing and not torch.jit.is_scripting(): + x = checkpoint_seq(self.blocks, x) + else: + x = self.blocks(x) + x = self.norm(x) + return x + + def forward_head(self, x, pre_logits: bool = False): + if self.global_pool: + x = x[:, self.num_prefix_tokens:].mean(dim=1) if self.global_pool == 'avg' else x[:, 0] + x = self.fc_norm(x) + return x if pre_logits else self.head(x) + +# def forward(self, x): +# x = self.forward_features(x) +# x = self.forward_head(x) +# return x + + def forward(self, x, external_features=None): + all_hidden_states = () if self.return_hidden_state else None + B = x.shape[0] + x = self.patch_embed(x) + + cls_tokens = self.cls_token.expand(B, -1, -1) + x = torch.cat((cls_tokens, x), dim=1) + + x = x + self.pos_embed[:,:x.size(1),:] + x = self.pos_drop(x) + + if self.mask_p and self.training: + num_samples = int((1-self.mask_p)*(x.shape[1]-1)) + # idx = torch.tensor(np.random.choice(range(x.shape[1]-1), replace=False)) + L = x.shape[1]-1 + noise = torch.rand(x.shape[0], L, device=x.device) + idx = torch.argsort(noise, dim=1) + idx = idx[:, :num_samples] + # idx = x[:, :, 0].multinomial(num_samples=num_samples, replacement=False) + clst = x[:, :1, :] + sampled_x = torch.gather(x[:, 1:, :], dim=1, index=idx.unsqueeze(-1).repeat(1, 1, x.shape[-1])) + x = torch.cat((clst, sampled_x), dim=1) + + + if external_features is not None: + x = torch.cat((x, external_features), dim=1) + + for i,blk in enumerate(self.blocks): + x = blk(x) + if self.return_hidden_state: + all_hidden_states = all_hidden_states + (self.norm(x),) + x = self.norm(x) + + if self.return_hidden_state: + return x, all_hidden_states + else: + return x + + + + + + +def init_weights_vit_timm(module: nn.Module, name: str = ''): + """ ViT weight initialization, original timm impl (for reproducibility) """ + if isinstance(module, nn.Linear): + trunc_normal_(module.weight, std=.02) + if module.bias is not None: + nn.init.zeros_(module.bias) + elif hasattr(module, 'init_weights'): + module.init_weights() + + +def init_weights_vit_jax(module: nn.Module, name: str = '', head_bias: float = 0.): + """ ViT weight initialization, matching JAX (Flax) impl """ + if isinstance(module, nn.Linear): + if name.startswith('head'): + nn.init.zeros_(module.weight) + nn.init.constant_(module.bias, head_bias) + else: + nn.init.xavier_uniform_(module.weight) + if module.bias is not None: + nn.init.normal_(module.bias, std=1e-6) if 'mlp' in name else nn.init.zeros_(module.bias) + elif isinstance(module, nn.Conv2d): + lecun_normal_(module.weight) + if module.bias is not None: + nn.init.zeros_(module.bias) + elif hasattr(module, 'init_weights'): + module.init_weights() + + +def init_weights_vit_moco(module: nn.Module, name: str = ''): + """ ViT weight initialization, matching moco-v3 impl minus fixed PatchEmbed """ + if isinstance(module, nn.Linear): + if 'qkv' in name: + # treat the weights of Q, K, V separately + val = math.sqrt(6. / float(module.weight.shape[0] // 3 + module.weight.shape[1])) + nn.init.uniform_(module.weight, -val, val) + else: + nn.init.xavier_uniform_(module.weight) + if module.bias is not None: + nn.init.zeros_(module.bias) + elif hasattr(module, 'init_weights'): + module.init_weights() + + +def get_init_weights_vit(mode='jax', head_bias: float = 0.): + if 'jax' in mode: + return partial(init_weights_vit_jax, head_bias=head_bias) + elif 'moco' in mode: + return init_weights_vit_moco + else: + return init_weights_vit_timm + + +@torch.no_grad() +def _load_weights(model: VisionTransformer, checkpoint_path: str, prefix: str = ''): + """ Load weights from .npz checkpoints for official Google Brain Flax implementation + """ + import numpy as np + + def _n2p(w, t=True): + if w.ndim == 4 and w.shape[0] == w.shape[1] == w.shape[2] == 1: + w = w.flatten() + if t: + if w.ndim == 4: + w = w.transpose([3, 2, 0, 1]) + elif w.ndim == 3: + w = w.transpose([2, 0, 1]) + elif w.ndim == 2: + w = w.transpose([1, 0]) + return torch.from_numpy(w) + + w = np.load(checkpoint_path) + if not prefix and 'opt/target/embedding/kernel' in w: + prefix = 'opt/target/' + + if hasattr(model.patch_embed, 'backbone'): + # hybrid + backbone = model.patch_embed.backbone + stem_only = not hasattr(backbone, 'stem') + stem = backbone if stem_only else backbone.stem + stem.conv.weight.copy_(adapt_input_conv(stem.conv.weight.shape[1], _n2p(w[f'{prefix}conv_root/kernel']))) + stem.norm.weight.copy_(_n2p(w[f'{prefix}gn_root/scale'])) + stem.norm.bias.copy_(_n2p(w[f'{prefix}gn_root/bias'])) + if not stem_only: + for i, stage in enumerate(backbone.stages): + for j, block in enumerate(stage.blocks): + bp = f'{prefix}block{i + 1}/unit{j + 1}/' + for r in range(3): + getattr(block, f'conv{r + 1}').weight.copy_(_n2p(w[f'{bp}conv{r + 1}/kernel'])) + getattr(block, f'norm{r + 1}').weight.copy_(_n2p(w[f'{bp}gn{r + 1}/scale'])) + getattr(block, f'norm{r + 1}').bias.copy_(_n2p(w[f'{bp}gn{r + 1}/bias'])) + if block.downsample is not None: + block.downsample.conv.weight.copy_(_n2p(w[f'{bp}conv_proj/kernel'])) + block.downsample.norm.weight.copy_(_n2p(w[f'{bp}gn_proj/scale'])) + block.downsample.norm.bias.copy_(_n2p(w[f'{bp}gn_proj/bias'])) + embed_conv_w = _n2p(w[f'{prefix}embedding/kernel']) + else: + embed_conv_w = adapt_input_conv( + model.patch_embed.proj.weight.shape[1], _n2p(w[f'{prefix}embedding/kernel'])) + model.patch_embed.proj.weight.copy_(embed_conv_w) + model.patch_embed.proj.bias.copy_(_n2p(w[f'{prefix}embedding/bias'])) + model.cls_token.copy_(_n2p(w[f'{prefix}cls'], t=False)) + pos_embed_w = _n2p(w[f'{prefix}Transformer/posembed_input/pos_embedding'], t=False) + if pos_embed_w.shape != model.pos_embed.shape: + pos_embed_w = resize_pos_embed( # resize pos embedding when different size from pretrained weights + pos_embed_w, + model.pos_embed, + getattr(model, 'num_prefix_tokens', 1), + model.patch_embed.grid_size + ) + model.pos_embed.copy_(pos_embed_w) + model.norm.weight.copy_(_n2p(w[f'{prefix}Transformer/encoder_norm/scale'])) + model.norm.bias.copy_(_n2p(w[f'{prefix}Transformer/encoder_norm/bias'])) + if isinstance(model.head, nn.Linear) and model.head.bias.shape[0] == w[f'{prefix}head/bias'].shape[-1]: + model.head.weight.copy_(_n2p(w[f'{prefix}head/kernel'])) + model.head.bias.copy_(_n2p(w[f'{prefix}head/bias'])) + # NOTE representation layer has been removed, not used in latest 21k/1k pretrained weights + # if isinstance(getattr(model.pre_logits, 'fc', None), nn.Linear) and f'{prefix}pre_logits/bias' in w: + # model.pre_logits.fc.weight.copy_(_n2p(w[f'{prefix}pre_logits/kernel'])) + # model.pre_logits.fc.bias.copy_(_n2p(w[f'{prefix}pre_logits/bias'])) + for i, block in enumerate(model.blocks.children()): + block_prefix = f'{prefix}Transformer/encoderblock_{i}/' + mha_prefix = block_prefix + 'MultiHeadDotProductAttention_1/' + block.norm1.weight.copy_(_n2p(w[f'{block_prefix}LayerNorm_0/scale'])) + block.norm1.bias.copy_(_n2p(w[f'{block_prefix}LayerNorm_0/bias'])) + block.attn.qkv.weight.copy_(torch.cat([ + _n2p(w[f'{mha_prefix}{n}/kernel'], t=False).flatten(1).T for n in ('query', 'key', 'value')])) + block.attn.qkv.bias.copy_(torch.cat([ + _n2p(w[f'{mha_prefix}{n}/bias'], t=False).reshape(-1) for n in ('query', 'key', 'value')])) + block.attn.proj.weight.copy_(_n2p(w[f'{mha_prefix}out/kernel']).flatten(1)) + block.attn.proj.bias.copy_(_n2p(w[f'{mha_prefix}out/bias'])) + for r in range(2): + getattr(block.mlp, f'fc{r + 1}').weight.copy_(_n2p(w[f'{block_prefix}MlpBlock_3/Dense_{r}/kernel'])) + getattr(block.mlp, f'fc{r + 1}').bias.copy_(_n2p(w[f'{block_prefix}MlpBlock_3/Dense_{r}/bias'])) + block.norm2.weight.copy_(_n2p(w[f'{block_prefix}LayerNorm_2/scale'])) + block.norm2.bias.copy_(_n2p(w[f'{block_prefix}LayerNorm_2/bias'])) + + +def resize_pos_embed(posemb, posemb_new, num_prefix_tokens=1, gs_new=()): + # Rescale the grid of position embeddings when loading from state_dict. Adapted from + # https://github.com/google-research/vision_transformer/blob/00883dd691c63a6830751563748663526e811cee/vit_jax/checkpoint.py#L224 + _logger.info('Resized position embedding: %s to %s', posemb.shape, posemb_new.shape) + ntok_new = posemb_new.shape[1] + if num_prefix_tokens: + posemb_prefix, posemb_grid = posemb[:, :num_prefix_tokens], posemb[0, num_prefix_tokens:] + ntok_new -= num_prefix_tokens + else: + posemb_prefix, posemb_grid = posemb[:, :0], posemb[0] + gs_old = int(math.sqrt(len(posemb_grid))) + if not len(gs_new): # backwards compatibility + gs_new = [int(math.sqrt(ntok_new))] * 2 + assert len(gs_new) >= 2 + _logger.info('Position embedding grid-size from %s to %s', [gs_old, gs_old], gs_new) + posemb_grid = posemb_grid.reshape(1, gs_old, gs_old, -1).permute(0, 3, 1, 2) + posemb_grid = F.interpolate(posemb_grid, size=gs_new, mode='bicubic', align_corners=False) + posemb_grid = posemb_grid.permute(0, 2, 3, 1).reshape(1, gs_new[0] * gs_new[1], -1) + posemb = torch.cat([posemb_prefix, posemb_grid], dim=1) + return posemb + + +def checkpoint_filter_fn(state_dict, model, adapt_layer_scale=False): + """ convert patch embedding weight from manual patchify + linear proj to conv""" + import re + out_dict = {} + if 'model' in state_dict: + # For deit models + state_dict = state_dict['model'] + + for k, v in state_dict.items(): + if 'patch_embed.proj.weight' in k and len(v.shape) < 4: + # For old models that I trained prior to conv based patchification + O, I, H, W = model.patch_embed.proj.weight.shape + v = v.reshape(O, -1, H, W) + elif k == 'pos_embed' and v.shape[1] != model.pos_embed.shape[1]: + # To resize pos embedding when using model at different size from pretrained weights + v = resize_pos_embed( + v, + model.pos_embed, + getattr(model, 'num_prefix_tokens', 1), + model.patch_embed.grid_size + ) + elif adapt_layer_scale and 'gamma_' in k: + # remap layer-scale gamma into sub-module (deit3 models) + k = re.sub(r'gamma_([0-9])', r'ls\1.gamma', k) + elif 'pre_logits' in k: + # NOTE representation layer removed as not used in latest 21k/1k pretrained weights + continue + out_dict[k] = v + return out_dict + + +def _create_vision_transformer(variant, pretrained=False, **kwargs): + if kwargs.get('features_only', None): + raise RuntimeError('features_only not implemented for Vision Transformer models.') + + pretrained_cfg = resolve_pretrained_cfg(variant, pretrained_cfg=kwargs.pop('pretrained_cfg', None)) + + + model = build_model_with_cfg( + VisionTransformer, variant, pretrained, + pretrained_cfg=pretrained_cfg, + pretrained_filter_fn=checkpoint_filter_fn, + pretrained_custom_load='npz' in pretrained_cfg['url'], + **kwargs) + return model + + +@register_model +def vit_tiny_patch16_224(pretrained=False, **kwargs): + """ ViT-Tiny (Vit-Ti/16) + """ + model_kwargs = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3, **kwargs) + model = _create_vision_transformer('vit_tiny_patch16_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_tiny_patch16_384(pretrained=False, **kwargs): + """ ViT-Tiny (Vit-Ti/16) @ 384x384. + """ + model_kwargs = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3, **kwargs) + model = _create_vision_transformer('vit_tiny_patch16_384', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch32_224(pretrained=False, **kwargs): + """ ViT-Small (ViT-S/32) + """ + model_kwargs = dict(patch_size=32, embed_dim=384, depth=12, num_heads=6, **kwargs) + model = _create_vision_transformer('vit_small_patch32_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch32_384(pretrained=False, **kwargs): + """ ViT-Small (ViT-S/32) at 384x384. + """ + model_kwargs = dict(patch_size=32, embed_dim=384, depth=12, num_heads=6, **kwargs) + model = _create_vision_transformer('vit_small_patch32_384', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch16_224(pretrained=False, **kwargs): + """ ViT-Small (ViT-S/16) + NOTE I've replaced my previous 'small' model definition and weights with the small variant from the DeiT paper + """ + model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, **kwargs) + model = _create_vision_transformer('vit_small_patch16_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch16_384(pretrained=False, **kwargs): + """ ViT-Small (ViT-S/16) + NOTE I've replaced my previous 'small' model definition and weights with the small variant from the DeiT paper + """ + model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, **kwargs) + model = _create_vision_transformer('vit_small_patch16_384', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch32_224(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/32) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k, source https://github.com/google-research/vision_transformer. + """ + model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch32_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch32_384(pretrained=False, **kwargs): + """ ViT-Base model (ViT-B/32) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. + """ + model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch32_384', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_224(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 224x224, source https://github.com/google-research/vision_transformer. + """ + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch16_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_384(pretrained=False, **kwargs): + """ ViT-Base model (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. + """ + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch16_384', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch8_224(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/8) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 224x224, source https://github.com/google-research/vision_transformer. + """ + model_kwargs = dict(patch_size=8, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch8_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_large_patch32_224(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929). No pretrained weights. + """ + model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_large_patch32_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_large_patch32_384(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. + """ + model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_large_patch32_384', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_large_patch16_224(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/16) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 224x224, source https://github.com/google-research/vision_transformer. + """ + model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_large_patch16_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_large_patch16_384(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/16) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. + """ + model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_large_patch16_384', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_large_patch14_224(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/14) + """ + model_kwargs = dict(patch_size=14, embed_dim=1024, depth=24, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_large_patch14_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_huge_patch14_224(pretrained=False, **kwargs): + """ ViT-Huge model (ViT-H/14) from original paper (https://arxiv.org/abs/2010.11929). + """ + model_kwargs = dict(patch_size=14, embed_dim=1280, depth=32, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_huge_patch14_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_giant_patch14_224(pretrained=False, **kwargs): + """ ViT-Giant model (ViT-g/14) from `Scaling Vision Transformers` - https://arxiv.org/abs/2106.04560 + """ + model_kwargs = dict(patch_size=14, embed_dim=1408, mlp_ratio=48/11, depth=40, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_giant_patch14_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_gigantic_patch14_224(pretrained=False, **kwargs): + """ ViT-Gigantic model (ViT-G/14) from `Scaling Vision Transformers` - https://arxiv.org/abs/2106.04560 + """ + model_kwargs = dict(patch_size=14, embed_dim=1664, mlp_ratio=64/13, depth=48, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_gigantic_patch14_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_tiny_patch16_224_in21k(pretrained=False, **kwargs): + """ ViT-Tiny (Vit-Ti/16). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + NOTE: this model has valid 21k classifier head and no representation (pre-logits) layer + """ + model_kwargs = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3, **kwargs) + model = _create_vision_transformer('vit_tiny_patch16_224_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch32_224_in21k(pretrained=False, **kwargs): + """ ViT-Small (ViT-S/16) + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + NOTE: this model has valid 21k classifier head and no representation (pre-logits) layer + """ + model_kwargs = dict(patch_size=32, embed_dim=384, depth=12, num_heads=6, **kwargs) + model = _create_vision_transformer('vit_small_patch32_224_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch16_224_in21k(pretrained=False, **kwargs): + """ ViT-Small (ViT-S/16) + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + NOTE: this model has valid 21k classifier head and no representation (pre-logits) layer + """ + model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, **kwargs) + model = _create_vision_transformer('vit_small_patch16_224_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch32_224_in21k(pretrained=False, **kwargs): + """ ViT-Base model (ViT-B/32) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + NOTE: this model has valid 21k classifier head and no representation (pre-logits) layer + """ + model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch32_224_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_224_in21k(pretrained=False, **kwargs): + """ ViT-Base model (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + NOTE: this model has valid 21k classifier head and no representation (pre-logits) layer + """ + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch16_224_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch8_224_in21k(pretrained=False, **kwargs): + """ ViT-Base model (ViT-B/8) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + NOTE: this model has valid 21k classifier head and no representation (pre-logits) layer + """ + model_kwargs = dict(patch_size=8, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch8_224_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_large_patch32_224_in21k(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + NOTE: this model has a representation layer but the 21k classifier head is zero'd out in original weights + """ + model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_large_patch32_224_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_large_patch16_224_in21k(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/16) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + NOTE: this model has valid 21k classifier head and no representation (pre-logits) layer + """ + model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_large_patch16_224_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_huge_patch14_224_in21k(pretrained=False, **kwargs): + """ ViT-Huge model (ViT-H/14) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + NOTE: this model has a representation layer but the 21k classifier head is zero'd out in original weights + """ + model_kwargs = dict(patch_size=14, embed_dim=1280, depth=32, num_heads=16, **kwargs) + model = _create_vision_transformer('vit_huge_patch14_224_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_224_sam(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/16) w/ SAM pretrained weights. Paper: https://arxiv.org/abs/2106.01548 + """ + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch16_224_sam', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch32_224_sam(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/32) w/ SAM pretrained weights. Paper: https://arxiv.org/abs/2106.01548 + """ + model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch32_224_sam', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch16_224_dino(pretrained=False, **kwargs): + """ ViT-Small (ViT-S/16) w/ DINO pretrained weights (no head) - https://arxiv.org/abs/2104.14294 + """ + model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, **kwargs) + model = _create_vision_transformer('vit_small_patch16_224_dino', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch8_224_dino(pretrained=False, **kwargs): + """ ViT-Small (ViT-S/8) w/ DINO pretrained weights (no head) - https://arxiv.org/abs/2104.14294 + """ + model_kwargs = dict(patch_size=8, embed_dim=384, depth=12, num_heads=6, **kwargs) + model = _create_vision_transformer('vit_small_patch8_224_dino', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_224_dino(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/16) /w DINO pretrained weights (no head) - https://arxiv.org/abs/2104.14294 + """ + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch16_224_dino', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch8_224_dino(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/8) w/ DINO pretrained weights (no head) - https://arxiv.org/abs/2104.14294 + """ + model_kwargs = dict(patch_size=8, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer('vit_base_patch8_224_dino', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_224_miil_in21k(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). + Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K + """ + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False, **kwargs) + model = _create_vision_transformer('vit_base_patch16_224_miil_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_224_miil(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). + Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K + """ + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False, **kwargs) + model = _create_vision_transformer('vit_base_patch16_224_miil', pretrained=pretrained, **model_kwargs) + return model + + +# Experimental models below + +@register_model +def vit_base_patch32_plus_256(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/32+) + """ + model_kwargs = dict(patch_size=32, embed_dim=896, depth=12, num_heads=14, init_values=1e-5, **kwargs) + model = _create_vision_transformer('vit_base_patch32_plus_256', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_plus_240(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/16+) + """ + model_kwargs = dict(patch_size=16, embed_dim=896, depth=12, num_heads=14, init_values=1e-5, **kwargs) + model = _create_vision_transformer('vit_base_patch16_plus_240', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_rpn_224(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/16) w/ residual post-norm + """ + model_kwargs = dict( + patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False, init_values=1e-5, class_token=False, + block_fn=ResPostBlock, global_pool=kwargs.pop('global_pool', 'avg'), **kwargs) + model = _create_vision_transformer('vit_base_patch16_rpn_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch16_36x1_224(pretrained=False, **kwargs): + """ ViT-Base w/ LayerScale + 36 x 1 (36 block serial) config. Experimental, may remove. + Based on `Three things everyone should know about Vision Transformers` - https://arxiv.org/abs/2203.09795 + Paper focuses on 24x2 + 48x1 for 'Small' width but those are extremely slow. + """ + model_kwargs = dict(patch_size=16, embed_dim=384, depth=36, num_heads=6, init_values=1e-5, **kwargs) + model = _create_vision_transformer('vit_small_patch16_36x1_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_small_patch16_18x2_224(pretrained=False, **kwargs): + """ ViT-Small w/ LayerScale + 18 x 2 (36 block parallel) config. Experimental, may remove. + Based on `Three things everyone should know about Vision Transformers` - https://arxiv.org/abs/2203.09795 + Paper focuses on 24x2 + 48x1 for 'Small' width but those are extremely slow. + """ + model_kwargs = dict( + patch_size=16, embed_dim=384, depth=18, num_heads=6, init_values=1e-5, block_fn=ParallelBlock, **kwargs) + model = _create_vision_transformer('vit_small_patch16_18x2_224', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_18x2_224(pretrained=False, **kwargs): + """ ViT-Base w/ LayerScale + 18 x 2 (36 block parallel) config. Experimental, may remove. + Based on `Three things everyone should know about Vision Transformers` - https://arxiv.org/abs/2203.09795 + """ + model_kwargs = dict( + patch_size=16, embed_dim=768, depth=18, num_heads=12, init_values=1e-5, block_fn=ParallelBlock, **kwargs) + model = _create_vision_transformer('vit_base_patch16_18x2_224', pretrained=pretrained, **model_kwargs) + return model + + diff --git a/optim/__init__.py b/optim/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..33e4907f99a74bebdaadb0cd5435d734e2fed684 --- /dev/null +++ b/optim/__init__.py @@ -0,0 +1,13 @@ +from .adamp import AdamP +from .adamw import AdamW +from .adafactor import Adafactor +from .adahessian import Adahessian +from .lookahead import Lookahead +from .nadam import Nadam +from .novograd import NovoGrad +from .nvnovograd import NvNovoGrad +from .radam import RAdam +from .rmsprop_tf import RMSpropTF +from .sgdp import SGDP + +from .optim_factory import create_optimizer \ No newline at end of file diff --git a/optim/adafactor.py b/optim/adafactor.py new file mode 100644 index 0000000000000000000000000000000000000000..088ce3acd82e2be1b393afafa05f48435e538a1a --- /dev/null +++ b/optim/adafactor.py @@ -0,0 +1,174 @@ +""" Adafactor Optimizer + +Lifted from https://github.com/pytorch/fairseq/blob/master/fairseq/optim/adafactor.py + +Original header/copyright below. + +""" +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +import torch +import math + + +class Adafactor(torch.optim.Optimizer): + """Implements Adafactor algorithm. + This implementation is based on: `Adafactor: Adaptive Learning Rates with Sublinear Memory Cost` + (see https://arxiv.org/abs/1804.04235) + + Note that this optimizer internally adjusts the learning rate depending on the + *scale_parameter*, *relative_step* and *warmup_init* options. + + To use a manual (external) learning rate schedule you should set `scale_parameter=False` and + `relative_step=False`. + + Arguments: + params (iterable): iterable of parameters to optimize or dicts defining parameter groups + lr (float, optional): external learning rate (default: None) + eps (tuple[float, float]): regularization constants for square gradient + and parameter scale respectively (default: (1e-30, 1e-3)) + clip_threshold (float): threshold of root mean square of final gradient update (default: 1.0) + decay_rate (float): coefficient used to compute running averages of square gradient (default: -0.8) + beta1 (float): coefficient used for computing running averages of gradient (default: None) + weight_decay (float, optional): weight decay (L2 penalty) (default: 0) + scale_parameter (bool): if True, learning rate is scaled by root mean square of parameter (default: True) + relative_step (bool): if True, time-dependent learning rate is computed + instead of external learning rate (default: True) + warmup_init (bool): time-dependent learning rate computation depends on + whether warm-up initialization is being used (default: False) + """ + + def __init__(self, params, lr=None, eps=1e-30, eps_scale=1e-3, clip_threshold=1.0, + decay_rate=-0.8, betas=None, weight_decay=0.0, scale_parameter=True, warmup_init=False): + relative_step = lr is None + if warmup_init and not relative_step: + raise ValueError('warmup_init requires relative_step=True') + + beta1 = None if betas is None else betas[0] # make it compat with standard betas arg + defaults = dict(lr=lr, eps=eps, eps_scale=eps_scale, clip_threshold=clip_threshold, decay_rate=decay_rate, + beta1=beta1, weight_decay=weight_decay, scale_parameter=scale_parameter, + relative_step=relative_step, warmup_init=warmup_init) + super(Adafactor, self).__init__(params, defaults) + + @staticmethod + def _get_lr(param_group, param_state): + if param_group['relative_step']: + min_step = 1e-6 * param_state['step'] if param_group['warmup_init'] else 1e-2 + lr_t = min(min_step, 1.0 / math.sqrt(param_state['step'])) + param_scale = 1.0 + if param_group['scale_parameter']: + param_scale = max(param_group['eps_scale'], param_state['RMS']) + param_group['lr'] = lr_t * param_scale + return param_group['lr'] + + @staticmethod + def _get_options(param_group, param_shape): + factored = len(param_shape) >= 2 + use_first_moment = param_group['beta1'] is not None + return factored, use_first_moment + + @staticmethod + def _rms(tensor): + return tensor.norm(2) / (tensor.numel() ** 0.5) + + def _approx_sq_grad(self, exp_avg_sq_row, exp_avg_sq_col): + r_factor = (exp_avg_sq_row / exp_avg_sq_row.mean(dim=-1, keepdim=True)).rsqrt_().unsqueeze(-1) + c_factor = exp_avg_sq_col.unsqueeze(-2).rsqrt() + return torch.mul(r_factor, c_factor) + + def step(self, closure=None): + """Performs a single optimization step. + Arguments: + closure (callable, optional): A closure that reevaluates the model and returns the loss. + """ + loss = None + if closure is not None: + loss = closure() + + for group in self.param_groups: + for p in group['params']: + if p.grad is None: + continue + grad = p.grad.data + if grad.dtype in {torch.float16, torch.bfloat16}: + grad = grad.float() + if grad.is_sparse: + raise RuntimeError('Adafactor does not support sparse gradients.') + + state = self.state[p] + grad_shape = grad.shape + + factored, use_first_moment = self._get_options(group, grad_shape) + # State Initialization + if len(state) == 0: + state['step'] = 0 + + if use_first_moment: + # Exponential moving average of gradient values + state['exp_avg'] = torch.zeros_like(grad) + if factored: + state['exp_avg_sq_row'] = torch.zeros(grad_shape[:-1]).to(grad) + state['exp_avg_sq_col'] = torch.zeros(grad_shape[:-2] + grad_shape[-1:]).to(grad) + else: + state['exp_avg_sq'] = torch.zeros_like(grad) + + state['RMS'] = 0 + else: + if use_first_moment: + state['exp_avg'] = state['exp_avg'].to(grad) + if factored: + state['exp_avg_sq_row'] = state['exp_avg_sq_row'].to(grad) + state['exp_avg_sq_col'] = state['exp_avg_sq_col'].to(grad) + else: + state['exp_avg_sq'] = state['exp_avg_sq'].to(grad) + + p_data_fp32 = p.data + if p.data.dtype in {torch.float16, torch.bfloat16}: + p_data_fp32 = p_data_fp32.float() + + state['step'] += 1 + state['RMS'] = self._rms(p_data_fp32) + lr_t = self._get_lr(group, state) + + beta2t = 1.0 - math.pow(state['step'], group['decay_rate']) + update = grad ** 2 + group['eps'] + if factored: + exp_avg_sq_row = state['exp_avg_sq_row'] + exp_avg_sq_col = state['exp_avg_sq_col'] + + exp_avg_sq_row.mul_(beta2t).add_(1.0 - beta2t, update.mean(dim=-1)) + exp_avg_sq_col.mul_(beta2t).add_(1.0 - beta2t, update.mean(dim=-2)) + #exp_avg_sq_row.mul_(beta2t).add_(update.mean(dim=-1), alpha=1.0 - beta2t) # pytorch 1.6+ + #exp_avg_sq_col.mul_(beta2t).add_(update.mean(dim=-2), alpha=1.0 - beta2t) + + # Approximation of exponential moving average of square of gradient + update = self._approx_sq_grad(exp_avg_sq_row, exp_avg_sq_col) + update.mul_(grad) + else: + exp_avg_sq = state['exp_avg_sq'] + + exp_avg_sq.mul_(beta2t).add_(1.0 - beta2t, update) + #exp_avg_sq.mul_(beta2t).add_(update, alpha=1.0 - beta2t) # pytorch 1.6+ + update = exp_avg_sq.rsqrt().mul_(grad) + + update.div_((self._rms(update) / group['clip_threshold']).clamp_(min=1.0)) + update.mul_(lr_t) + + if use_first_moment: + exp_avg = state['exp_avg'] + exp_avg.mul_(group["beta1"]).add_(1 - group["beta1"], update) + #exp_avg.mul_(group['beta1']).add_(update, alpha=1 - group['beta1']) # pytorch 1.6+ + update = exp_avg + + if group['weight_decay'] != 0: + p_data_fp32.add_(-group["weight_decay"] * lr_t, p_data_fp32) + #p_data_fp32.add_(p_data_fp32, alpha=-group['weight_decay'] * lr_t) # pytorch 1.6+ + + p_data_fp32.add_(-update) + + if p.data.dtype in {torch.float16, torch.bfloat16}: + p.data.copy_(p_data_fp32) + + return loss \ No newline at end of file diff --git a/optim/adahessian.py b/optim/adahessian.py new file mode 100644 index 0000000000000000000000000000000000000000..985c67ca686a65f61f5c5b1a7db3e5bba815a19b --- /dev/null +++ b/optim/adahessian.py @@ -0,0 +1,156 @@ +""" AdaHessian Optimizer + +Lifted from https://github.com/davda54/ada-hessian/blob/master/ada_hessian.py +Originally licensed MIT, Copyright 2020, David Samuel +""" +import torch + + +class Adahessian(torch.optim.Optimizer): + """ + Implements the AdaHessian algorithm from "ADAHESSIAN: An Adaptive Second OrderOptimizer for Machine Learning" + + Arguments: + params (iterable): iterable of parameters to optimize or dicts defining parameter groups + lr (float, optional): learning rate (default: 0.1) + betas ((float, float), optional): coefficients used for computing running averages of gradient and the + squared hessian trace (default: (0.9, 0.999)) + eps (float, optional): term added to the denominator to improve numerical stability (default: 1e-8) + weight_decay (float, optional): weight decay (L2 penalty) (default: 0.0) + hessian_power (float, optional): exponent of the hessian trace (default: 1.0) + update_each (int, optional): compute the hessian trace approximation only after *this* number of steps + (to save time) (default: 1) + n_samples (int, optional): how many times to sample `z` for the approximation of the hessian trace (default: 1) + """ + + def __init__(self, params, lr=0.1, betas=(0.9, 0.999), eps=1e-8, weight_decay=0.0, + hessian_power=1.0, update_each=1, n_samples=1, avg_conv_kernel=False): + if not 0.0 <= lr: + raise ValueError(f"Invalid learning rate: {lr}") + if not 0.0 <= eps: + raise ValueError(f"Invalid epsilon value: {eps}") + if not 0.0 <= betas[0] < 1.0: + raise ValueError(f"Invalid beta parameter at index 0: {betas[0]}") + if not 0.0 <= betas[1] < 1.0: + raise ValueError(f"Invalid beta parameter at index 1: {betas[1]}") + if not 0.0 <= hessian_power <= 1.0: + raise ValueError(f"Invalid Hessian power value: {hessian_power}") + + self.n_samples = n_samples + self.update_each = update_each + self.avg_conv_kernel = avg_conv_kernel + + # use a separate generator that deterministically generates the same `z`s across all GPUs in case of distributed training + self.seed = 2147483647 + self.generator = torch.Generator().manual_seed(self.seed) + + defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay, hessian_power=hessian_power) + super(Adahessian, self).__init__(params, defaults) + + for p in self.get_params(): + p.hess = 0.0 + self.state[p]["hessian step"] = 0 + + @property + def is_second_order(self): + return True + + def get_params(self): + """ + Gets all parameters in all param_groups with gradients + """ + + return (p for group in self.param_groups for p in group['params'] if p.requires_grad) + + def zero_hessian(self): + """ + Zeros out the accumalated hessian traces. + """ + + for p in self.get_params(): + if not isinstance(p.hess, float) and self.state[p]["hessian step"] % self.update_each == 0: + p.hess.zero_() + + @torch.no_grad() + def set_hessian(self): + """ + Computes the Hutchinson approximation of the hessian trace and accumulates it for each trainable parameter. + """ + + params = [] + for p in filter(lambda p: p.grad is not None, self.get_params()): + if self.state[p]["hessian step"] % self.update_each == 0: # compute the trace only each `update_each` step + params.append(p) + self.state[p]["hessian step"] += 1 + + if len(params) == 0: + return + + if self.generator.device != params[0].device: # hackish way of casting the generator to the right device + self.generator = torch.Generator(params[0].device).manual_seed(self.seed) + + grads = [p.grad for p in params] + + for i in range(self.n_samples): + # Rademacher distribution {-1.0, 1.0} + zs = [torch.randint(0, 2, p.size(), generator=self.generator, device=p.device) * 2.0 - 1.0 for p in params] + h_zs = torch.autograd.grad( + grads, params, grad_outputs=zs, only_inputs=True, retain_graph=i < self.n_samples - 1) + for h_z, z, p in zip(h_zs, zs, params): + p.hess += h_z * z / self.n_samples # approximate the expected values of z*(H@z) + + @torch.no_grad() + def step(self, closure=None): + """ + Performs a single optimization step. + Arguments: + closure (callable, optional) -- a closure that reevaluates the model and returns the loss (default: None) + """ + + loss = None + if closure is not None: + loss = closure() + + self.zero_hessian() + self.set_hessian() + + for group in self.param_groups: + for p in group['params']: + if p.grad is None or p.hess is None: + continue + + if self.avg_conv_kernel and p.dim() == 4: + p.hess = torch.abs(p.hess).mean(dim=[2, 3], keepdim=True).expand_as(p.hess).clone() + + # Perform correct stepweight decay as in AdamW + p.mul_(1 - group['lr'] * group['weight_decay']) + + state = self.state[p] + + # State initialization + if len(state) == 1: + state['step'] = 0 + # Exponential moving average of gradient values + state['exp_avg'] = torch.zeros_like(p) + # Exponential moving average of Hessian diagonal square values + state['exp_hessian_diag_sq'] = torch.zeros_like(p) + + exp_avg, exp_hessian_diag_sq = state['exp_avg'], state['exp_hessian_diag_sq'] + beta1, beta2 = group['betas'] + state['step'] += 1 + + # Decay the first and second moment running average coefficient + exp_avg.mul_(beta1).add_(p.grad, alpha=1 - beta1) + exp_hessian_diag_sq.mul_(beta2).addcmul_(p.hess, p.hess, value=1 - beta2) + + bias_correction1 = 1 - beta1 ** state['step'] + bias_correction2 = 1 - beta2 ** state['step'] + + k = group['hessian_power'] + denom = (exp_hessian_diag_sq / bias_correction2).pow_(k / 2).add_(group['eps']) + + # make update + step_size = group['lr'] / bias_correction1 + p.addcdiv_(exp_avg, denom, value=-step_size) + + return loss diff --git a/optim/adamp.py b/optim/adamp.py new file mode 100644 index 0000000000000000000000000000000000000000..468c3e865e0ceb6fb2bf22f9388237a783314f07 --- /dev/null +++ b/optim/adamp.py @@ -0,0 +1,107 @@ +""" +AdamP Optimizer Implementation copied from https://github.com/clovaai/AdamP/blob/master/adamp/adamp.py + +Paper: `Slowing Down the Weight Norm Increase in Momentum-based Optimizers` - https://arxiv.org/abs/2006.08217 +Code: https://github.com/clovaai/AdamP + +Copyright (c) 2020-present NAVER Corp. +MIT license +""" + +import torch +import torch.nn as nn +from torch.optim.optimizer import Optimizer, required +import math + +class AdamP(Optimizer): + def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8, + weight_decay=0, delta=0.1, wd_ratio=0.1, nesterov=False): + defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay, + delta=delta, wd_ratio=wd_ratio, nesterov=nesterov) + super(AdamP, self).__init__(params, defaults) + + def _channel_view(self, x): + return x.view(x.size(0), -1) + + def _layer_view(self, x): + return x.view(1, -1) + + def _cosine_similarity(self, x, y, eps, view_func): + x = view_func(x) + y = view_func(y) + + x_norm = x.norm(dim=1).add_(eps) + y_norm = y.norm(dim=1).add_(eps) + dot = (x * y).sum(dim=1) + + return dot.abs() / x_norm / y_norm + + def _projection(self, p, grad, perturb, delta, wd_ratio, eps): + wd = 1 + expand_size = [-1] + [1] * (len(p.shape) - 1) + for view_func in [self._channel_view, self._layer_view]: + + cosine_sim = self._cosine_similarity(grad, p.data, eps, view_func) + + if cosine_sim.max() < delta / math.sqrt(view_func(p.data).size(1)): + p_n = p.data / view_func(p.data).norm(dim=1).view(expand_size).add_(eps) + perturb -= p_n * view_func(p_n * perturb).sum(dim=1).view(expand_size) + wd = wd_ratio + + return perturb, wd + + return perturb, wd + + def step(self, closure=None): + loss = None + if closure is not None: + loss = closure() + + for group in self.param_groups: + for p in group['params']: + if p.grad is None: + continue + + grad = p.grad.data + beta1, beta2 = group['betas'] + nesterov = group['nesterov'] + + state = self.state[p] + + # State initialization + if len(state) == 0: + state['step'] = 0 + state['exp_avg'] = torch.zeros_like(p.data) + state['exp_avg_sq'] = torch.zeros_like(p.data) + + # Adam + exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq'] + + state['step'] += 1 + bias_correction1 = 1 - beta1 ** state['step'] + bias_correction2 = 1 - beta2 ** state['step'] + + exp_avg.mul_(beta1).add_(1 - beta1, grad) + exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad) + + denom = (exp_avg_sq.sqrt() / math.sqrt(bias_correction2)).add_(group['eps']) + step_size = group['lr'] / bias_correction1 + + if nesterov: + perturb = (beta1 * exp_avg + (1 - beta1) * grad) / denom + else: + perturb = exp_avg / denom + + # Projection + wd_ratio = 1 + if len(p.shape) > 1: + perturb, wd_ratio = self._projection(p, grad, perturb, group['delta'], group['wd_ratio'], group['eps']) + + # Weight decay + if group['weight_decay'] > 0: + p.data.mul_(1 - group['lr'] * group['weight_decay'] * wd_ratio) + + # Step + p.data.add_(-step_size, perturb) + + return loss diff --git a/optim/adamw.py b/optim/adamw.py new file mode 100644 index 0000000000000000000000000000000000000000..66f9a959de586356a29ace2f9c57d3fee8d1057a --- /dev/null +++ b/optim/adamw.py @@ -0,0 +1,117 @@ +""" AdamW Optimizer +Impl copied from PyTorch master +""" +import math +import torch +from torch.optim.optimizer import Optimizer + + +class AdamW(Optimizer): + r"""Implements AdamW algorithm. + + The original Adam algorithm was proposed in `Adam: A Method for Stochastic Optimization`_. + The AdamW variant was proposed in `Decoupled Weight Decay Regularization`_. + + Arguments: + params (iterable): iterable of parameters to optimize or dicts defining + parameter groups + lr (float, optional): learning rate (default: 1e-3) + betas (Tuple[float, float], optional): coefficients used for computing + running averages of gradient and its square (default: (0.9, 0.999)) + eps (float, optional): term added to the denominator to improve + numerical stability (default: 1e-8) + weight_decay (float, optional): weight decay coefficient (default: 1e-2) + amsgrad (boolean, optional): whether to use the AMSGrad variant of this + algorithm from the paper `On the Convergence of Adam and Beyond`_ + (default: False) + + .. _Adam\: A Method for Stochastic Optimization: + https://arxiv.org/abs/1412.6980 + .. _Decoupled Weight Decay Regularization: + https://arxiv.org/abs/1711.05101 + .. _On the Convergence of Adam and Beyond: + https://openreview.net/forum?id=ryQu7f-RZ + """ + + def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8, + weight_decay=1e-2, amsgrad=False): + if not 0.0 <= lr: + raise ValueError("Invalid learning rate: {}".format(lr)) + if not 0.0 <= eps: + raise ValueError("Invalid epsilon value: {}".format(eps)) + if not 0.0 <= betas[0] < 1.0: + raise ValueError("Invalid beta parameter at index 0: {}".format(betas[0])) + if not 0.0 <= betas[1] < 1.0: + raise ValueError("Invalid beta parameter at index 1: {}".format(betas[1])) + defaults = dict(lr=lr, betas=betas, eps=eps, + weight_decay=weight_decay, amsgrad=amsgrad) + super(AdamW, self).__init__(params, defaults) + + def __setstate__(self, state): + super(AdamW, self).__setstate__(state) + for group in self.param_groups: + group.setdefault('amsgrad', False) + + def step(self, closure=None): + """Performs a single optimization step. + + Arguments: + closure (callable, optional): A closure that reevaluates the model + and returns the loss. + """ + loss = None + if closure is not None: + loss = closure() + + for group in self.param_groups: + for p in group['params']: + if p.grad is None: + continue + + # Perform stepweight decay + p.data.mul_(1 - group['lr'] * group['weight_decay']) + + # Perform optimization step + grad = p.grad.data + if grad.is_sparse: + raise RuntimeError('Adam does not support sparse gradients, please consider SparseAdam instead') + amsgrad = group['amsgrad'] + + state = self.state[p] + + # State initialization + if len(state) == 0: + state['step'] = 0 + # Exponential moving average of gradient values + state['exp_avg'] = torch.zeros_like(p.data) + # Exponential moving average of squared gradient values + state['exp_avg_sq'] = torch.zeros_like(p.data) + if amsgrad: + # Maintains max of all exp. moving avg. of sq. grad. values + state['max_exp_avg_sq'] = torch.zeros_like(p.data) + + exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq'] + if amsgrad: + max_exp_avg_sq = state['max_exp_avg_sq'] + beta1, beta2 = group['betas'] + + state['step'] += 1 + bias_correction1 = 1 - beta1 ** state['step'] + bias_correction2 = 1 - beta2 ** state['step'] + + # Decay the first and second moment running average coefficient + exp_avg.mul_(beta1).add_(1 - beta1, grad) + exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad) + if amsgrad: + # Maintains the maximum of all 2nd moment running avg. till now + torch.max(max_exp_avg_sq, exp_avg_sq, out=max_exp_avg_sq) + # Use the max. for normalizing running avg. of gradient + denom = (max_exp_avg_sq.sqrt() / math.sqrt(bias_correction2)).add_(group['eps']) + else: + denom = (exp_avg_sq.sqrt() / math.sqrt(bias_correction2)).add_(group['eps']) + + step_size = group['lr'] / bias_correction1 + + p.data.addcdiv_(-step_size, exp_avg, denom) + + return loss diff --git a/optim/lookahead.py b/optim/lookahead.py new file mode 100644 index 0000000000000000000000000000000000000000..6b5b7f38ec8cb6594e3986b66223fa2881daeca3 --- /dev/null +++ b/optim/lookahead.py @@ -0,0 +1,92 @@ +""" Lookahead Optimizer Wrapper. +Implementation modified from: https://github.com/alphadl/lookahead.pytorch +Paper: `Lookahead Optimizer: k steps forward, 1 step back` - https://arxiv.org/abs/1907.08610 + +Hacked together by / Copyright 2020 Ross Wightman +""" +import torch +from torch.optim.optimizer import Optimizer +from collections import defaultdict + + +class Lookahead(Optimizer): + def __init__(self, base_optimizer, alpha=0.5, k=6): + if not 0.0 <= alpha <= 1.0: + raise ValueError(f'Invalid slow update rate: {alpha}') + if not 1 <= k: + raise ValueError(f'Invalid lookahead steps: {k}') + defaults = dict(lookahead_alpha=alpha, lookahead_k=k, lookahead_step=0) + self.base_optimizer = base_optimizer + self.param_groups = self.base_optimizer.param_groups + self.defaults = base_optimizer.defaults + self.defaults.update(defaults) + self.state = defaultdict(dict) + # manually add our defaults to the param groups + for name, default in defaults.items(): + for group in self.param_groups: + group.setdefault(name, default) + + def update_slow(self, group): + for fast_p in group["params"]: + if fast_p.grad is None: + continue + param_state = self.state[fast_p] + if 'slow_buffer' not in param_state: + param_state['slow_buffer'] = torch.empty_like(fast_p.data) + param_state['slow_buffer'].copy_(fast_p.data) + slow = param_state['slow_buffer'] + slow.add_(group['lookahead_alpha'], fast_p.data - slow) + fast_p.data.copy_(slow) + + def sync_lookahead(self): + for group in self.param_groups: + self.update_slow(group) + + def step(self, closure=None): + #assert id(self.param_groups) == id(self.base_optimizer.param_groups) + loss = self.base_optimizer.step(closure) + for group in self.param_groups: + group['lookahead_step'] += 1 + if group['lookahead_step'] % group['lookahead_k'] == 0: + self.update_slow(group) + return loss + + def state_dict(self): + fast_state_dict = self.base_optimizer.state_dict() + slow_state = { + (id(k) if isinstance(k, torch.Tensor) else k): v + for k, v in self.state.items() + } + fast_state = fast_state_dict['state'] + param_groups = fast_state_dict['param_groups'] + return { + 'state': fast_state, + 'slow_state': slow_state, + 'param_groups': param_groups, + } + + def load_state_dict(self, state_dict): + fast_state_dict = { + 'state': state_dict['state'], + 'param_groups': state_dict['param_groups'], + } + self.base_optimizer.load_state_dict(fast_state_dict) + + # We want to restore the slow state, but share param_groups reference + # with base_optimizer. This is a bit redundant but least code + slow_state_new = False + if 'slow_state' not in state_dict: + print('Loading state_dict from optimizer without Lookahead applied.') + state_dict['slow_state'] = defaultdict(dict) + slow_state_new = True + slow_state_dict = { + 'state': state_dict['slow_state'], + 'param_groups': state_dict['param_groups'], # this is pointless but saves code + } + super(Lookahead, self).load_state_dict(slow_state_dict) + self.param_groups = self.base_optimizer.param_groups # make both ref same container + if slow_state_new: + # reapply defaults to catch missing lookahead specific ones + for name, default in self.defaults.items(): + for group in self.param_groups: + group.setdefault(name, default) diff --git a/optim/nadam.py b/optim/nadam.py new file mode 100644 index 0000000000000000000000000000000000000000..d994d1b83485c9b068de73f5f3cf2efb1e5bec39 --- /dev/null +++ b/optim/nadam.py @@ -0,0 +1,88 @@ +import torch +from torch.optim import Optimizer + + +class Nadam(Optimizer): + """Implements Nadam algorithm (a variant of Adam based on Nesterov momentum). + + It has been proposed in `Incorporating Nesterov Momentum into Adam`__. + + Arguments: + params (iterable): iterable of parameters to optimize or dicts defining + parameter groups + lr (float, optional): learning rate (default: 2e-3) + betas (Tuple[float, float], optional): coefficients used for computing + running averages of gradient and its square + eps (float, optional): term added to the denominator to improve + numerical stability (default: 1e-8) + weight_decay (float, optional): weight decay (L2 penalty) (default: 0) + schedule_decay (float, optional): momentum schedule decay (default: 4e-3) + + __ http://cs229.stanford.edu/proj2015/054_report.pdf + __ http://www.cs.toronto.edu/~fritz/absps/momentum.pdf + + Originally taken from: https://github.com/pytorch/pytorch/pull/1408 + NOTE: Has potential issues but does work well on some problems. + """ + + def __init__(self, params, lr=2e-3, betas=(0.9, 0.999), eps=1e-8, + weight_decay=0, schedule_decay=4e-3): + defaults = dict(lr=lr, betas=betas, eps=eps, + weight_decay=weight_decay, schedule_decay=schedule_decay) + super(Nadam, self).__init__(params, defaults) + + def step(self, closure=None): + """Performs a single optimization step. + + Arguments: + closure (callable, optional): A closure that reevaluates the model + and returns the loss. + """ + loss = None + if closure is not None: + loss = closure() + + for group in self.param_groups: + for p in group['params']: + if p.grad is None: + continue + grad = p.grad.data + state = self.state[p] + + # State initialization + if len(state) == 0: + state['step'] = 0 + state['m_schedule'] = 1. + state['exp_avg'] = grad.new().resize_as_(grad).zero_() + state['exp_avg_sq'] = grad.new().resize_as_(grad).zero_() + + # Warming momentum schedule + m_schedule = state['m_schedule'] + schedule_decay = group['schedule_decay'] + exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq'] + beta1, beta2 = group['betas'] + eps = group['eps'] + state['step'] += 1 + t = state['step'] + + if group['weight_decay'] != 0: + grad = grad.add(group['weight_decay'], p.data) + + momentum_cache_t = beta1 * \ + (1. - 0.5 * (0.96 ** (t * schedule_decay))) + momentum_cache_t_1 = beta1 * \ + (1. - 0.5 * (0.96 ** ((t + 1) * schedule_decay))) + m_schedule_new = m_schedule * momentum_cache_t + m_schedule_next = m_schedule * momentum_cache_t * momentum_cache_t_1 + state['m_schedule'] = m_schedule_new + + # Decay the first and second moment running average coefficient + exp_avg.mul_(beta1).add_(1. - beta1, grad) + exp_avg_sq.mul_(beta2).addcmul_(1. - beta2, grad, grad) + exp_avg_sq_prime = exp_avg_sq / (1. - beta2 ** t) + denom = exp_avg_sq_prime.sqrt_().add_(eps) + + p.data.addcdiv_(-group['lr'] * (1. - momentum_cache_t) / (1. - m_schedule_new), grad, denom) + p.data.addcdiv_(-group['lr'] * momentum_cache_t_1 / (1. - m_schedule_next), exp_avg, denom) + + return loss diff --git a/optim/novograd.py b/optim/novograd.py new file mode 100644 index 0000000000000000000000000000000000000000..4137c6aa9406360d29f5f7234ebbdef294404d0e --- /dev/null +++ b/optim/novograd.py @@ -0,0 +1,77 @@ +"""NovoGrad Optimizer. +Original impl by Masashi Kimura (Convergence Lab): https://github.com/convergence-lab/novograd +Paper: `Stochastic Gradient Methods with Layer-wise Adaptive Moments for Training of Deep Networks` + - https://arxiv.org/abs/1905.11286 +""" + +import torch +from torch.optim.optimizer import Optimizer +import math + + +class NovoGrad(Optimizer): + def __init__(self, params, grad_averaging=False, lr=0.1, betas=(0.95, 0.98), eps=1e-8, weight_decay=0): + defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay) + super(NovoGrad, self).__init__(params, defaults) + self._lr = lr + self._beta1 = betas[0] + self._beta2 = betas[1] + self._eps = eps + self._wd = weight_decay + self._grad_averaging = grad_averaging + + self._momentum_initialized = False + + def step(self, closure=None): + loss = None + if closure is not None: + loss = closure() + + if not self._momentum_initialized: + for group in self.param_groups: + for p in group['params']: + if p.grad is None: + continue + state = self.state[p] + grad = p.grad.data + if grad.is_sparse: + raise RuntimeError('NovoGrad does not support sparse gradients') + + v = torch.norm(grad)**2 + m = grad/(torch.sqrt(v) + self._eps) + self._wd * p.data + state['step'] = 0 + state['v'] = v + state['m'] = m + state['grad_ema'] = None + self._momentum_initialized = True + + for group in self.param_groups: + for p in group['params']: + if p.grad is None: + continue + state = self.state[p] + state['step'] += 1 + + step, v, m = state['step'], state['v'], state['m'] + grad_ema = state['grad_ema'] + + grad = p.grad.data + g2 = torch.norm(grad)**2 + grad_ema = g2 if grad_ema is None else grad_ema * \ + self._beta2 + g2 * (1. - self._beta2) + grad *= 1.0 / (torch.sqrt(grad_ema) + self._eps) + + if self._grad_averaging: + grad *= (1. - self._beta1) + + g2 = torch.norm(grad)**2 + v = self._beta2*v + (1. - self._beta2)*g2 + m = self._beta1*m + (grad / (torch.sqrt(v) + self._eps) + self._wd * p.data) + bias_correction1 = 1 - self._beta1 ** step + bias_correction2 = 1 - self._beta2 ** step + step_size = group['lr'] * math.sqrt(bias_correction2) / bias_correction1 + + state['v'], state['m'] = v, m + state['grad_ema'] = grad_ema + p.data.add_(-step_size, m) + return loss diff --git a/optim/nvnovograd.py b/optim/nvnovograd.py new file mode 100644 index 0000000000000000000000000000000000000000..323312d2fc36d028124f7a7ec604d248e71503cd --- /dev/null +++ b/optim/nvnovograd.py @@ -0,0 +1,118 @@ +""" Nvidia NovoGrad Optimizer. +Original impl by Nvidia from Jasper example: + - https://github.com/NVIDIA/DeepLearningExamples/blob/master/PyTorch/SpeechRecognition/Jasper +Paper: `Stochastic Gradient Methods with Layer-wise Adaptive Moments for Training of Deep Networks` + - https://arxiv.org/abs/1905.11286 +""" + +import torch +from torch.optim.optimizer import Optimizer +import math + + +class NvNovoGrad(Optimizer): + """ + Implements Novograd algorithm. + + Args: + params (iterable): iterable of parameters to optimize or dicts defining + parameter groups + lr (float, optional): learning rate (default: 1e-3) + betas (Tuple[float, float], optional): coefficients used for computing + running averages of gradient and its square (default: (0.95, 0.98)) + eps (float, optional): term added to the denominator to improve + numerical stability (default: 1e-8) + weight_decay (float, optional): weight decay (L2 penalty) (default: 0) + grad_averaging: gradient averaging + amsgrad (boolean, optional): whether to use the AMSGrad variant of this + algorithm from the paper `On the Convergence of Adam and Beyond`_ + (default: False) + """ + + def __init__(self, params, lr=1e-3, betas=(0.95, 0.98), eps=1e-8, + weight_decay=0, grad_averaging=False, amsgrad=False): + if not 0.0 <= lr: + raise ValueError("Invalid learning rate: {}".format(lr)) + if not 0.0 <= eps: + raise ValueError("Invalid epsilon value: {}".format(eps)) + if not 0.0 <= betas[0] < 1.0: + raise ValueError("Invalid beta parameter at index 0: {}".format(betas[0])) + if not 0.0 <= betas[1] < 1.0: + raise ValueError("Invalid beta parameter at index 1: {}".format(betas[1])) + defaults = dict(lr=lr, betas=betas, eps=eps, + weight_decay=weight_decay, + grad_averaging=grad_averaging, + amsgrad=amsgrad) + + super(NvNovoGrad, self).__init__(params, defaults) + + def __setstate__(self, state): + super(NvNovoGrad, self).__setstate__(state) + for group in self.param_groups: + group.setdefault('amsgrad', False) + + def step(self, closure=None): + """Performs a single optimization step. + + Arguments: + closure (callable, optional): A closure that reevaluates the model + and returns the loss. + """ + loss = None + if closure is not None: + loss = closure() + + for group in self.param_groups: + for p in group['params']: + if p.grad is None: + continue + grad = p.grad.data + if grad.is_sparse: + raise RuntimeError('Sparse gradients are not supported.') + amsgrad = group['amsgrad'] + + state = self.state[p] + + # State initialization + if len(state) == 0: + state['step'] = 0 + # Exponential moving average of gradient values + state['exp_avg'] = torch.zeros_like(p.data) + # Exponential moving average of squared gradient values + state['exp_avg_sq'] = torch.zeros([]).to(state['exp_avg'].device) + if amsgrad: + # Maintains max of all exp. moving avg. of sq. grad. values + state['max_exp_avg_sq'] = torch.zeros([]).to(state['exp_avg'].device) + + exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq'] + if amsgrad: + max_exp_avg_sq = state['max_exp_avg_sq'] + beta1, beta2 = group['betas'] + + state['step'] += 1 + + norm = torch.sum(torch.pow(grad, 2)) + + if exp_avg_sq == 0: + exp_avg_sq.copy_(norm) + else: + exp_avg_sq.mul_(beta2).add_(1 - beta2, norm) + + if amsgrad: + # Maintains the maximum of all 2nd moment running avg. till now + torch.max(max_exp_avg_sq, exp_avg_sq, out=max_exp_avg_sq) + # Use the max. for normalizing running avg. of gradient + denom = max_exp_avg_sq.sqrt().add_(group['eps']) + else: + denom = exp_avg_sq.sqrt().add_(group['eps']) + + grad.div_(denom) + if group['weight_decay'] != 0: + grad.add_(group['weight_decay'], p.data) + if group['grad_averaging']: + grad.mul_(1 - beta1) + exp_avg.mul_(beta1).add_(grad) + + p.data.add_(-group['lr'], exp_avg) + + return loss diff --git a/optim/optim_factory.py b/optim/optim_factory.py new file mode 100644 index 0000000000000000000000000000000000000000..f2ace85b12a4ed972036416475f9a99922653078 --- /dev/null +++ b/optim/optim_factory.py @@ -0,0 +1,197 @@ +""" Optimizer Factory w/ Custom Weight Decay +Hacked together by / Copyright 2020 Ross Wightman +""" +import torch +from torch import optim as optim + +from .adafactor import Adafactor +from .adahessian import Adahessian +from .adamp import AdamP +from .lookahead import Lookahead +from .nadam import Nadam +from .novograd import NovoGrad +from .nvnovograd import NvNovoGrad +from .radam import RAdam +from .rmsprop_tf import RMSpropTF +from .sgdp import SGDP + +try: + from apex.optimizers import FusedNovoGrad, FusedAdam, FusedLAMB, FusedSGD + has_apex = True +except ImportError: + has_apex = False + + +def add_weight_decay(model, weight_decay=1e-5, skip_list=()): + decay = [] + no_decay = [] + for name, param in model.named_parameters(): + if not param.requires_grad: + continue # frozen weights + if len(param.shape) == 1 or name.endswith(".bias") or name in skip_list: + no_decay.append(param) + else: + decay.append(param) + return [ + {'params': no_decay, 'weight_decay': 0.}, + {'params': decay, 'weight_decay': weight_decay}] + + +def create_optimizer(args, model, filter_bias_and_bn=True, config=None): + opt_lower = args.opt.lower() + weight_decay = args.weight_decay + + customized_lr = config.get('customized_lr', False) + prompt_lr = config.get('prompt_lr', args.lr) + vis_lr = config.get('vis_lr', args.lr) + text_lr = config.get('text_lr', args.lr) + connector_lr = config.get('connector_lr', args.lr) + adapter_lr = config.get('adapter_lr', args.lr) + + if customized_lr: + parameters = [] + targets = ['connector', 'model_vision', 'model_text', 'prompt'] + # if prompt_lr is not None: + # targets.append('prompt') + # promt_params = [kv[1] for kv in model.named_parameters() if 'prompt' in kv[0].split('.')[0]] + # parameters.append({'params': promt_params, 'lr': prompt_lr, 'weight_decay': weight_decay}) + + params = {'connector': [], 'model_vision': [], 'model_text': [], 'prompt': [], 'other': []} + for kv in model.named_parameters(): + if 'connector' in kv[0].split('.')[0]: + params['connector'].append(kv[1]) + elif 'model_vision' in kv[0].split('.')[0]: + params['model_vision'].append(kv[1]) + elif 'model_text' in kv[0].split('.')[0]: + params['model_text'].append(kv[1]) + elif 'prompt' in kv[0].split('.')[0]: + params['prompt'].append(kv[1]) + else: + params['other'].append(kv[1]) + + # connector_params = [kv[1] for kv in model.named_parameters() if 'connector' in kv[0].split('.')[0]] + parameters.append({'params': params['connector'], 'lr': connector_lr, 'weight_decay': weight_decay}) + print('connector', len(params['connector'])) + + # model_vision_params = [kv[1] for kv in model.named_parameters() if 'model_vision' in kv[0].split('.')[0]] + parameters.append({'params': params['model_vision'], 'lr': vis_lr, 'weight_decay': weight_decay}) + print('model_vision', len(params['model_vision'])) + + # model_text_params = [kv[1] for kv in model.named_parameters() if 'model_text' in kv[0].split('.')[0]] + parameters.append({'params': params['model_text'], 'lr': text_lr, 'weight_decay': weight_decay}) + print('model_text', len(params['model_text'])) + + parameters.append({'params': params['prompt'], 'lr': prompt_lr, 'weight_decay': weight_decay}) + print('prompt_lr', len(params['prompt'])) + + # other_params = [kv[1] for kv in model.named_parameters() if any(t in kv[0].split('.')[0] for t in targets)] + parameters.append({'params': params['other']}) + print('other', len(params['other'])) + print(connector_lr, vis_lr, text_lr, prompt_lr, "optim") + + elif config.get('adapter_lr', False) and config.get('prompt_lr', False): + adapter_params = [kv[1] for kv in model.named_parameters() if 'adapter' in kv[0]] + promt_params = [kv[1] for kv in model.named_parameters() if 'prompt' in kv[0]] + + other_params = [kv[1] for kv in model.named_parameters() if 'adapter' not in kv[0] and 'prompt' not in kv[0]] + parameters = [ + {'params': other_params}, + {'params': adapter_params, 'lr': adapter_lr, 'weight_decay': weight_decay}, + {'params': promt_params, 'lr': prompt_lr, 'weight_decay': weight_decay} + ] + + elif config.get('adapter_lr', False): + adapter_params = [kv[1] for kv in model.named_parameters() if 'adapter' in kv[0]] + other_params = [kv[1] for kv in model.named_parameters() if 'adapter' not in kv[0]] + parameters = [ + {'params': other_params}, + {'params': adapter_params, 'lr': adapter_lr, 'weight_decay': weight_decay} + ] + + elif config.get('prompt_lr', False): + promt_params = [kv[1] for kv in model.named_parameters() if 'prompt' in kv[0]] + other_params = [kv[1] for kv in model.named_parameters() if 'prompt' not in kv[0]] + parameters = [ + {'params': other_params}, + {'params': promt_params, 'lr': prompt_lr, 'weight_decay': weight_decay} + ] + elif weight_decay and filter_bias_and_bn: + skip = {} + if hasattr(model, 'no_weight_decay'): + skip = model.no_weight_decay() + parameters = add_weight_decay(model, weight_decay, skip) + weight_decay = 0. + else: + parameters = model.parameters() + + if 'fused' in opt_lower: + assert has_apex and torch.cuda.is_available(), 'APEX and CUDA required for fused optimizers' + + opt_args = dict(lr=args.lr, weight_decay=weight_decay) + if hasattr(args, 'opt_eps') and args.opt_eps is not None: + opt_args['eps'] = args.opt_eps + if hasattr(args, 'opt_betas') and args.opt_betas is not None: + opt_args['betas'] = args.opt_betas + if hasattr(args, 'opt_args') and args.opt_args is not None: + opt_args.update(args.opt_args) + + opt_split = opt_lower.split('_') + opt_lower = opt_split[-1] + if opt_lower == 'sgd' or opt_lower == 'nesterov': + opt_args.pop('eps', None) + optimizer = optim.SGD(parameters, momentum=args.momentum, nesterov=True, **opt_args) + elif opt_lower == 'momentum': + opt_args.pop('eps', None) + optimizer = optim.SGD(parameters, momentum=args.momentum, nesterov=False, **opt_args) + elif opt_lower == 'adam': + optimizer = optim.Adam(parameters, **opt_args) + elif opt_lower == 'adamw': + optimizer = optim.AdamW(parameters, **opt_args) + elif opt_lower == 'nadam': + optimizer = Nadam(parameters, **opt_args) + elif opt_lower == 'radam': + optimizer = RAdam(parameters, **opt_args) + elif opt_lower == 'adamp': + optimizer = AdamP(parameters, wd_ratio=0.01, nesterov=True, **opt_args) + elif opt_lower == 'sgdp': + optimizer = SGDP(parameters, momentum=args.momentum, nesterov=True, **opt_args) + elif opt_lower == 'adadelta': + optimizer = optim.Adadelta(parameters, **opt_args) + elif opt_lower == 'adafactor': + if not args.lr: + opt_args['lr'] = None + optimizer = Adafactor(parameters, **opt_args) + elif opt_lower == 'adahessian': + optimizer = Adahessian(parameters, **opt_args) + elif opt_lower == 'rmsprop': + optimizer = optim.RMSprop(parameters, alpha=0.9, momentum=args.momentum, **opt_args) + elif opt_lower == 'rmsproptf': + optimizer = RMSpropTF(parameters, alpha=0.9, momentum=args.momentum, **opt_args) + elif opt_lower == 'novograd': + optimizer = NovoGrad(parameters, **opt_args) + elif opt_lower == 'nvnovograd': + optimizer = NvNovoGrad(parameters, **opt_args) + elif opt_lower == 'fusedsgd': + opt_args.pop('eps', None) + optimizer = FusedSGD(parameters, momentum=args.momentum, nesterov=True, **opt_args) + elif opt_lower == 'fusedmomentum': + opt_args.pop('eps', None) + optimizer = FusedSGD(parameters, momentum=args.momentum, nesterov=False, **opt_args) + elif opt_lower == 'fusedadam': + optimizer = FusedAdam(parameters, adam_w_mode=False, **opt_args) + elif opt_lower == 'fusedadamw': + optimizer = FusedAdam(parameters, adam_w_mode=True, **opt_args) + elif opt_lower == 'fusedlamb': + optimizer = FusedLAMB(parameters, **opt_args) + elif opt_lower == 'fusednovograd': + opt_args.setdefault('betas', (0.95, 0.98)) + optimizer = FusedNovoGrad(parameters, **opt_args) + else: + assert False and "Invalid optimizer" + raise ValueError + + if len(opt_split) > 1: + if opt_split[0] == 'lookahead': + optimizer = Lookahead(optimizer) + + return optimizer diff --git a/optim/radam.py b/optim/radam.py new file mode 100644 index 0000000000000000000000000000000000000000..9987a334460286b1a6c8ec6d57ee023596a74219 --- /dev/null +++ b/optim/radam.py @@ -0,0 +1,152 @@ +"""RAdam Optimizer. +Implementation lifted from: https://github.com/LiyuanLucasLiu/RAdam +Paper: `On the Variance of the Adaptive Learning Rate and Beyond` - https://arxiv.org/abs/1908.03265 +""" +import math +import torch +from torch.optim.optimizer import Optimizer, required + + +class RAdam(Optimizer): + + def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8, weight_decay=0): + defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay) + self.buffer = [[None, None, None] for ind in range(10)] + super(RAdam, self).__init__(params, defaults) + + def __setstate__(self, state): + super(RAdam, self).__setstate__(state) + + def step(self, closure=None): + + loss = None + if closure is not None: + loss = closure() + + for group in self.param_groups: + + for p in group['params']: + if p.grad is None: + continue + grad = p.grad.data.float() + if grad.is_sparse: + raise RuntimeError('RAdam does not support sparse gradients') + + p_data_fp32 = p.data.float() + + state = self.state[p] + + if len(state) == 0: + state['step'] = 0 + state['exp_avg'] = torch.zeros_like(p_data_fp32) + state['exp_avg_sq'] = torch.zeros_like(p_data_fp32) + else: + state['exp_avg'] = state['exp_avg'].type_as(p_data_fp32) + state['exp_avg_sq'] = state['exp_avg_sq'].type_as(p_data_fp32) + + exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq'] + beta1, beta2 = group['betas'] + + exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad) + exp_avg.mul_(beta1).add_(1 - beta1, grad) + + state['step'] += 1 + buffered = self.buffer[int(state['step'] % 10)] + if state['step'] == buffered[0]: + N_sma, step_size = buffered[1], buffered[2] + else: + buffered[0] = state['step'] + beta2_t = beta2 ** state['step'] + N_sma_max = 2 / (1 - beta2) - 1 + N_sma = N_sma_max - 2 * state['step'] * beta2_t / (1 - beta2_t) + buffered[1] = N_sma + + # more conservative since it's an approximated value + if N_sma >= 5: + step_size = group['lr'] * math.sqrt( + (1 - beta2_t) * (N_sma - 4) / (N_sma_max - 4) * (N_sma - 2) / N_sma * N_sma_max / ( + N_sma_max - 2)) / (1 - beta1 ** state['step']) + else: + step_size = group['lr'] / (1 - beta1 ** state['step']) + buffered[2] = step_size + + if group['weight_decay'] != 0: + p_data_fp32.add_(-group['weight_decay'] * group['lr'], p_data_fp32) + + # more conservative since it's an approximated value + if N_sma >= 5: + denom = exp_avg_sq.sqrt().add_(group['eps']) + p_data_fp32.addcdiv_(-step_size, exp_avg, denom) + else: + p_data_fp32.add_(-step_size, exp_avg) + + p.data.copy_(p_data_fp32) + + return loss + + +class PlainRAdam(Optimizer): + + def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8, weight_decay=0): + defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay) + + super(PlainRAdam, self).__init__(params, defaults) + + def __setstate__(self, state): + super(PlainRAdam, self).__setstate__(state) + + def step(self, closure=None): + + loss = None + if closure is not None: + loss = closure() + + for group in self.param_groups: + + for p in group['params']: + if p.grad is None: + continue + grad = p.grad.data.float() + if grad.is_sparse: + raise RuntimeError('RAdam does not support sparse gradients') + + p_data_fp32 = p.data.float() + + state = self.state[p] + + if len(state) == 0: + state['step'] = 0 + state['exp_avg'] = torch.zeros_like(p_data_fp32) + state['exp_avg_sq'] = torch.zeros_like(p_data_fp32) + else: + state['exp_avg'] = state['exp_avg'].type_as(p_data_fp32) + state['exp_avg_sq'] = state['exp_avg_sq'].type_as(p_data_fp32) + + exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq'] + beta1, beta2 = group['betas'] + + exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad) + exp_avg.mul_(beta1).add_(1 - beta1, grad) + + state['step'] += 1 + beta2_t = beta2 ** state['step'] + N_sma_max = 2 / (1 - beta2) - 1 + N_sma = N_sma_max - 2 * state['step'] * beta2_t / (1 - beta2_t) + + if group['weight_decay'] != 0: + p_data_fp32.add_(-group['weight_decay'] * group['lr'], p_data_fp32) + + # more conservative since it's an approximated value + if N_sma >= 5: + step_size = group['lr'] * math.sqrt( + (1 - beta2_t) * (N_sma - 4) / (N_sma_max - 4) * (N_sma - 2) / N_sma * N_sma_max / ( + N_sma_max - 2)) / (1 - beta1 ** state['step']) + denom = exp_avg_sq.sqrt().add_(group['eps']) + p_data_fp32.addcdiv_(-step_size, exp_avg, denom) + else: + step_size = group['lr'] / (1 - beta1 ** state['step']) + p_data_fp32.add_(-step_size, exp_avg) + + p.data.copy_(p_data_fp32) + + return loss diff --git a/optim/rmsprop_tf.py b/optim/rmsprop_tf.py new file mode 100644 index 0000000000000000000000000000000000000000..5115555cd26040e3af297a6e79e7bd5e4d202623 --- /dev/null +++ b/optim/rmsprop_tf.py @@ -0,0 +1,136 @@ +""" RMSProp modified to behave like Tensorflow impl + +Originally cut & paste from PyTorch RMSProp +https://github.com/pytorch/pytorch/blob/063946d2b3f3f1e953a2a3b54e0b34f1393de295/torch/optim/rmsprop.py +Licensed under BSD-Clause 3 (ish), https://github.com/pytorch/pytorch/blob/master/LICENSE + +Modifications Copyright 2020 Ross Wightman +""" + +import torch +from torch.optim import Optimizer + + +class RMSpropTF(Optimizer): + """Implements RMSprop algorithm (TensorFlow style epsilon) + + NOTE: This is a direct cut-and-paste of PyTorch RMSprop with eps applied before sqrt + and a few other modifications to closer match Tensorflow for matching hyper-params. + + Noteworthy changes include: + 1. Epsilon applied inside square-root + 2. square_avg initialized to ones + 3. LR scaling of update accumulated in momentum buffer + + Proposed by G. Hinton in his + `course `_. + + The centered version first appears in `Generating Sequences + With Recurrent Neural Networks `_. + + Arguments: + params (iterable): iterable of parameters to optimize or dicts defining + parameter groups + lr (float, optional): learning rate (default: 1e-2) + momentum (float, optional): momentum factor (default: 0) + alpha (float, optional): smoothing (decay) constant (default: 0.9) + eps (float, optional): term added to the denominator to improve + numerical stability (default: 1e-10) + centered (bool, optional) : if ``True``, compute the centered RMSProp, + the gradient is normalized by an estimation of its variance + weight_decay (float, optional): weight decay (L2 penalty) (default: 0) + decoupled_decay (bool, optional): decoupled weight decay as per https://arxiv.org/abs/1711.05101 + lr_in_momentum (bool, optional): learning rate scaling is included in the momentum buffer + update as per defaults in Tensorflow + + """ + + def __init__(self, params, lr=1e-2, alpha=0.9, eps=1e-10, weight_decay=0, momentum=0., centered=False, + decoupled_decay=False, lr_in_momentum=True): + if not 0.0 <= lr: + raise ValueError("Invalid learning rate: {}".format(lr)) + if not 0.0 <= eps: + raise ValueError("Invalid epsilon value: {}".format(eps)) + if not 0.0 <= momentum: + raise ValueError("Invalid momentum value: {}".format(momentum)) + if not 0.0 <= weight_decay: + raise ValueError("Invalid weight_decay value: {}".format(weight_decay)) + if not 0.0 <= alpha: + raise ValueError("Invalid alpha value: {}".format(alpha)) + + defaults = dict(lr=lr, momentum=momentum, alpha=alpha, eps=eps, centered=centered, weight_decay=weight_decay, + decoupled_decay=decoupled_decay, lr_in_momentum=lr_in_momentum) + super(RMSpropTF, self).__init__(params, defaults) + + def __setstate__(self, state): + super(RMSpropTF, self).__setstate__(state) + for group in self.param_groups: + group.setdefault('momentum', 0) + group.setdefault('centered', False) + + def step(self, closure=None): + """Performs a single optimization step. + + Arguments: + closure (callable, optional): A closure that reevaluates the model + and returns the loss. + """ + loss = None + if closure is not None: + loss = closure() + + for group in self.param_groups: + for p in group['params']: + if p.grad is None: + continue + grad = p.grad.data + if grad.is_sparse: + raise RuntimeError('RMSprop does not support sparse gradients') + state = self.state[p] + + # State initialization + if len(state) == 0: + state['step'] = 0 + state['square_avg'] = torch.ones_like(p.data) # PyTorch inits to zero + if group['momentum'] > 0: + state['momentum_buffer'] = torch.zeros_like(p.data) + if group['centered']: + state['grad_avg'] = torch.zeros_like(p.data) + + square_avg = state['square_avg'] + one_minus_alpha = 1. - group['alpha'] + + state['step'] += 1 + + if group['weight_decay'] != 0: + if 'decoupled_decay' in group and group['decoupled_decay']: + p.data.add_(-group['weight_decay'], p.data) + else: + grad = grad.add(group['weight_decay'], p.data) + + # Tensorflow order of ops for updating squared avg + square_avg.add_(one_minus_alpha, grad.pow(2) - square_avg) + # square_avg.mul_(alpha).addcmul_(1 - alpha, grad, grad) # PyTorch original + + if group['centered']: + grad_avg = state['grad_avg'] + grad_avg.add_(one_minus_alpha, grad - grad_avg) + # grad_avg.mul_(alpha).add_(1 - alpha, grad) # PyTorch original + avg = square_avg.addcmul(-1, grad_avg, grad_avg).add(group['eps']).sqrt_() # eps moved in sqrt + else: + avg = square_avg.add(group['eps']).sqrt_() # eps moved in sqrt + + if group['momentum'] > 0: + buf = state['momentum_buffer'] + # Tensorflow accumulates the LR scaling in the momentum buffer + if 'lr_in_momentum' in group and group['lr_in_momentum']: + buf.mul_(group['momentum']).addcdiv_(group['lr'], grad, avg) + p.data.add_(-buf) + else: + # PyTorch scales the param update by LR + buf.mul_(group['momentum']).addcdiv_(grad, avg) + p.data.add_(-group['lr'], buf) + else: + p.data.addcdiv_(-group['lr'], grad, avg) + + return loss diff --git a/optim/sgdp.py b/optim/sgdp.py new file mode 100644 index 0000000000000000000000000000000000000000..f4a94aa332d7030a70e888342eb6cc4623d69836 --- /dev/null +++ b/optim/sgdp.py @@ -0,0 +1,96 @@ +""" +SGDP Optimizer Implementation copied from https://github.com/clovaai/AdamP/blob/master/adamp/sgdp.py + +Paper: `Slowing Down the Weight Norm Increase in Momentum-based Optimizers` - https://arxiv.org/abs/2006.08217 +Code: https://github.com/clovaai/AdamP + +Copyright (c) 2020-present NAVER Corp. +MIT license +""" + +import torch +import torch.nn as nn +from torch.optim.optimizer import Optimizer, required +import math + +class SGDP(Optimizer): + def __init__(self, params, lr=required, momentum=0, dampening=0, + weight_decay=0, nesterov=False, eps=1e-8, delta=0.1, wd_ratio=0.1): + defaults = dict(lr=lr, momentum=momentum, dampening=dampening, weight_decay=weight_decay, + nesterov=nesterov, eps=eps, delta=delta, wd_ratio=wd_ratio) + super(SGDP, self).__init__(params, defaults) + + def _channel_view(self, x): + return x.view(x.size(0), -1) + + def _layer_view(self, x): + return x.view(1, -1) + + def _cosine_similarity(self, x, y, eps, view_func): + x = view_func(x) + y = view_func(y) + + x_norm = x.norm(dim=1).add_(eps) + y_norm = y.norm(dim=1).add_(eps) + dot = (x * y).sum(dim=1) + + return dot.abs() / x_norm / y_norm + + def _projection(self, p, grad, perturb, delta, wd_ratio, eps): + wd = 1 + expand_size = [-1] + [1] * (len(p.shape) - 1) + for view_func in [self._channel_view, self._layer_view]: + + cosine_sim = self._cosine_similarity(grad, p.data, eps, view_func) + + if cosine_sim.max() < delta / math.sqrt(view_func(p.data).size(1)): + p_n = p.data / view_func(p.data).norm(dim=1).view(expand_size).add_(eps) + perturb -= p_n * view_func(p_n * perturb).sum(dim=1).view(expand_size) + wd = wd_ratio + + return perturb, wd + + return perturb, wd + + def step(self, closure=None): + loss = None + if closure is not None: + loss = closure() + + for group in self.param_groups: + weight_decay = group['weight_decay'] + momentum = group['momentum'] + dampening = group['dampening'] + nesterov = group['nesterov'] + + for p in group['params']: + if p.grad is None: + continue + grad = p.grad.data + state = self.state[p] + + # State initialization + if len(state) == 0: + state['momentum'] = torch.zeros_like(p.data) + + # SGD + buf = state['momentum'] + buf.mul_(momentum).add_(1 - dampening, grad) + if nesterov: + d_p = grad + momentum * buf + else: + d_p = buf + + # Projection + wd_ratio = 1 + if len(p.shape) > 1: + d_p, wd_ratio = self._projection(p, grad, d_p, group['delta'], group['wd_ratio'], group['eps']) + + # Weight decay + if weight_decay != 0: + p.data.mul_(1 - group['lr'] * group['weight_decay'] * wd_ratio / (1-momentum)) + + # Step + p.data.add_(-group['lr'], d_p) + + return loss diff --git a/refTools/evaluation/__init__.py b/refTools/evaluation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..66e55e9332e33fa7bceb187bb7aa2aed53428c65 --- /dev/null +++ b/refTools/evaluation/__init__.py @@ -0,0 +1,3 @@ +__author__ = 'licheng' + + diff --git a/refTools/evaluation/bleu/LICENSE b/refTools/evaluation/bleu/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..9ccf677900b5238062979c7bc1e7102e501e0be4 --- /dev/null +++ b/refTools/evaluation/bleu/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2015 Xinlei Chen, Hao Fang, Tsung-Yi Lin, and Ramakrishna Vedantam + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/refTools/evaluation/bleu/__init__.py b/refTools/evaluation/bleu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3f7d85bba884ea8f83fc6ab2a1e6ade80d98d4d9 --- /dev/null +++ b/refTools/evaluation/bleu/__init__.py @@ -0,0 +1 @@ +__author__ = 'tylin' diff --git a/refTools/evaluation/bleu/__init__.pyc b/refTools/evaluation/bleu/__init__.pyc new file mode 100644 index 0000000000000000000000000000000000000000..511902ec6502be8c1a68e8cce6e417c22ed12323 Binary files /dev/null and b/refTools/evaluation/bleu/__init__.pyc differ diff --git a/refTools/evaluation/bleu/__pycache__/__init__.cpython-36.pyc b/refTools/evaluation/bleu/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f9e73460ff945bac1e0550a4cbc1802fe4739428 Binary files /dev/null and b/refTools/evaluation/bleu/__pycache__/__init__.cpython-36.pyc differ diff --git a/refTools/evaluation/bleu/__pycache__/__init__.cpython-38.pyc b/refTools/evaluation/bleu/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0397cb0971ef4584da214330174ca05d98ebc86e Binary files /dev/null and b/refTools/evaluation/bleu/__pycache__/__init__.cpython-38.pyc differ diff --git a/refTools/evaluation/bleu/__pycache__/bleu.cpython-36.pyc b/refTools/evaluation/bleu/__pycache__/bleu.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6d754ecbe2e663c19a1611a0688560a886a4dcdd Binary files /dev/null and b/refTools/evaluation/bleu/__pycache__/bleu.cpython-36.pyc differ diff --git a/refTools/evaluation/bleu/__pycache__/bleu.cpython-38.pyc b/refTools/evaluation/bleu/__pycache__/bleu.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fbc89e2bb61d87e0aa0559482dfafe72f29d6bbf Binary files /dev/null and b/refTools/evaluation/bleu/__pycache__/bleu.cpython-38.pyc differ diff --git a/refTools/evaluation/bleu/__pycache__/bleu_scorer.cpython-36.pyc b/refTools/evaluation/bleu/__pycache__/bleu_scorer.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b863f716a4bd9a05e3e2a38d02b805e4bcb7727a Binary files /dev/null and b/refTools/evaluation/bleu/__pycache__/bleu_scorer.cpython-36.pyc differ diff --git a/refTools/evaluation/bleu/__pycache__/bleu_scorer.cpython-38.pyc b/refTools/evaluation/bleu/__pycache__/bleu_scorer.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d34edec2de3bff39078a4727cfa484e8fd1fa60e Binary files /dev/null and b/refTools/evaluation/bleu/__pycache__/bleu_scorer.cpython-38.pyc differ diff --git a/refTools/evaluation/bleu/bleu.py b/refTools/evaluation/bleu/bleu.py new file mode 100644 index 0000000000000000000000000000000000000000..1dab5adb97e4cbf3528bd3669c64f2521bb62571 --- /dev/null +++ b/refTools/evaluation/bleu/bleu.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python +# +# File Name : bleu.py +# +# Description : Wrapper for BLEU scorer. +# +# Creation Date : 06-01-2015 +# Last Modified : Thu 19 Mar 2015 09:13:28 PM PDT +# Authors : Hao Fang and Tsung-Yi Lin + +from refTools.evaluation.bleu.bleu_scorer import BleuScorer + + +class Bleu: + def __init__(self, n=4): + # default compute Blue score up to 4 + self._n = n + self._hypo_for_image = {} + self.ref_for_image = {} + + def compute_score(self, gts, res): + + assert(gts.keys() == res.keys()) + imgIds = gts.keys() + + bleu_scorer = BleuScorer(n=self._n) + for id in imgIds: + hypo = res[id] + ref = gts[id] + + # Sanity check. + assert(type(hypo) is list) + assert(len(hypo) == 1) + assert(type(ref) is list) + assert(len(ref) >= 1) + + bleu_scorer += (hypo[0], ref) + + #score, scores = bleu_scorer.compute_score(option='shortest') + score, scores = bleu_scorer.compute_score(option='closest', verbose=1) + #score, scores = bleu_scorer.compute_score(option='average', verbose=1) + + # return (bleu, bleu_info) + return score, scores + + def method(self): + return "Bleu" diff --git a/refTools/evaluation/bleu/bleu.pyc b/refTools/evaluation/bleu/bleu.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8f15689090b2255fffa2b1212c66f7b6447e5a6b Binary files /dev/null and b/refTools/evaluation/bleu/bleu.pyc differ diff --git a/refTools/evaluation/bleu/bleu_scorer.py b/refTools/evaluation/bleu/bleu_scorer.py new file mode 100644 index 0000000000000000000000000000000000000000..5419434dbbd4f83c9a3acc378cd97aa89a4e10a4 --- /dev/null +++ b/refTools/evaluation/bleu/bleu_scorer.py @@ -0,0 +1,264 @@ +#!/usr/bin/env python + +# bleu_scorer.py +# David Chiang + +# Copyright (c) 2004-2006 University of Maryland. All rights +# reserved. Do not redistribute without permission from the +# author. Not for commercial use. + +# Modified by: +# Hao Fang +# Tsung-Yi Lin + +'''Provides: +cook_refs(refs, n=4): Transform a list of reference sentences as strings into a form usable by cook_test(). +cook_test(test, refs, n=4): Transform a test sentence as a string (together with the cooked reference sentences) into a form usable by score_cooked(). +''' + +import copy +import sys, math, re +from collections import defaultdict + +def precook(s, n=4, out=False): + """Takes a string as input and returns an object that can be given to + either cook_refs or cook_test. This is optional: cook_refs and cook_test + can take string arguments as well.""" + words = s.split() + counts = defaultdict(int) + for k in range(1,n+1): + for i in range(len(words)-k+1): + ngram = tuple(words[i:i+k]) + counts[ngram] += 1 + return (len(words), counts) + +def cook_refs(refs, eff=None, n=4): ## lhuang: oracle will call with "average" + '''Takes a list of reference sentences for a single segment + and returns an object that encapsulates everything that BLEU + needs to know about them.''' + + reflen = [] + maxcounts = {} + for ref in refs: + rl, counts = precook(ref, n) + reflen.append(rl) + for (ngram,count) in counts.items(): + maxcounts[ngram] = max(maxcounts.get(ngram,0), count) + + # Calculate effective reference sentence length. + if eff == "shortest": + reflen = min(reflen) + elif eff == "average": + reflen = float(sum(reflen))/len(reflen) + + ## lhuang: N.B.: leave reflen computaiton to the very end!! + + ## lhuang: N.B.: in case of "closest", keep a list of reflens!! (bad design) + + return (reflen, maxcounts) + +def cook_test(test, refparam, eff=None, n=4): + '''Takes a test sentence and returns an object that + encapsulates everything that BLEU needs to know about it.''' + + reflen, refmaxcounts = refparam[0], refparam[1] + testlen, counts = precook(test, n, True) + + result = {} + + # Calculate effective reference sentence length. + + if eff == "closest": + result["reflen"] = min((abs(l-testlen), l) for l in reflen)[1] + else: ## i.e., "average" or "shortest" or None + result["reflen"] = reflen + + result["testlen"] = testlen + + result["guess"] = [max(0,testlen-k+1) for k in range(1,n+1)] + + result['correct'] = [0]*n + for (ngram, count) in counts.items(): + result["correct"][len(ngram)-1] += min(refmaxcounts.get(ngram,0), count) + + return result + +class BleuScorer(object): + """Bleu scorer. + """ + + __slots__ = "n", "crefs", "ctest", "_score", "_ratio", "_testlen", "_reflen", "special_reflen" + # special_reflen is used in oracle (proportional effective ref len for a node). + + def copy(self): + ''' copy the refs.''' + new = BleuScorer(n=self.n) + new.ctest = copy.copy(self.ctest) + new.crefs = copy.copy(self.crefs) + new._score = None + return new + + def __init__(self, test=None, refs=None, n=4, special_reflen=None): + ''' singular instance ''' + + self.n = n + self.crefs = [] + self.ctest = [] + self.cook_append(test, refs) + self.special_reflen = special_reflen + + def cook_append(self, test, refs): + '''called by constructor and __iadd__ to avoid creating new instances.''' + + if refs is not None: + self.crefs.append(cook_refs(refs)) + if test is not None: + cooked_test = cook_test(test, self.crefs[-1]) + self.ctest.append(cooked_test) ## N.B.: -1 + else: + self.ctest.append(None) # lens of crefs and ctest have to match + + self._score = None ## need to recompute + + def ratio(self, option=None): + self.compute_score(option=option) + return self._ratio + + def score_ratio(self, option=None): + '''return (bleu, len_ratio) pair''' + return (self.fscore(option=option), self.ratio(option=option)) + + def score_ratio_str(self, option=None): + return "%.4f (%.2f)" % self.score_ratio(option) + + def reflen(self, option=None): + self.compute_score(option=option) + return self._reflen + + def testlen(self, option=None): + self.compute_score(option=option) + return self._testlen + + def retest(self, new_test): + if type(new_test) is str: + new_test = [new_test] + assert len(new_test) == len(self.crefs), new_test + self.ctest = [] + for t, rs in zip(new_test, self.crefs): + self.ctest.append(cook_test(t, rs)) + self._score = None + + return self + + def rescore(self, new_test): + ''' replace test(s) with new test(s), and returns the new score.''' + + return self.retest(new_test).compute_score() + + def size(self): + assert len(self.crefs) == len(self.ctest), "refs/test mismatch! %d<>%d" % (len(self.crefs), len(self.ctest)) + return len(self.crefs) + + def __iadd__(self, other): + '''add an instance (e.g., from another sentence).''' + + if type(other) is tuple: + ## avoid creating new BleuScorer instances + self.cook_append(other[0], other[1]) + else: + assert self.compatible(other), "incompatible BLEUs." + self.ctest.extend(other.ctest) + self.crefs.extend(other.crefs) + self._score = None ## need to recompute + + return self + + def compatible(self, other): + return isinstance(other, BleuScorer) and self.n == other.n + + def single_reflen(self, option="average"): + return self._single_reflen(self.crefs[0][0], option) + + def _single_reflen(self, reflens, option=None, testlen=None): + + if option == "shortest": + reflen = min(reflens) + elif option == "average": + reflen = float(sum(reflens))/len(reflens) + elif option == "closest": + reflen = min((abs(l-testlen), l) for l in reflens)[1] + else: + assert False, "unsupported reflen option %s" % option + + return reflen + + def recompute_score(self, option=None, verbose=0): + self._score = None + return self.compute_score(option, verbose) + + def compute_score(self, option=None, verbose=0): + n = self.n + small = 1e-9 + tiny = 1e-15 ## so that if guess is 0 still return 0 + bleu_list = [[] for _ in range(n)] + + if self._score is not None: + return self._score + + if option is None: + option = "average" if len(self.crefs) == 1 else "closest" + + self._testlen = 0 + self._reflen = 0 + totalcomps = {'testlen':0, 'reflen':0, 'guess':[0]*n, 'correct':[0]*n} + + # for each sentence + for comps in self.ctest: + testlen = comps['testlen'] + self._testlen += testlen + + if self.special_reflen is None: ## need computation + reflen = self._single_reflen(comps['reflen'], option, testlen) + else: + reflen = self.special_reflen + + self._reflen += reflen + + for key in ['guess','correct']: + for k in range(n): + totalcomps[key][k] += comps[key][k] + + # append per image bleu score + bleu = 1. + for k in range(n): + bleu *= (float(comps['correct'][k]) + tiny) \ + /(float(comps['guess'][k]) + small) + bleu_list[k].append(bleu ** (1./(k+1))) + ratio = (testlen + tiny) / (reflen + small) ## N.B.: avoid zero division + if ratio < 1: + for k in range(n): + bleu_list[k][-1] *= math.exp(1 - 1/ratio) + + if verbose > 1: + print(comps, reflen) + + totalcomps['reflen'] = self._reflen + totalcomps['testlen'] = self._testlen + + bleus = [] + bleu = 1. + for k in range(n): + bleu *= float(totalcomps['correct'][k] + tiny) \ + / (totalcomps['guess'][k] + small) + bleus.append(bleu ** (1./(k+1))) + ratio = (self._testlen + tiny) / (self._reflen + small) ## N.B.: avoid zero division + if ratio < 1: + for k in range(n): + bleus[k] *= math.exp(1 - 1/ratio) + + if verbose > 0: + print(totalcomps) + print("ratio:", ratio) + + self._score = bleus + return self._score, bleu_list diff --git a/refTools/evaluation/bleu/bleu_scorer.pyc b/refTools/evaluation/bleu/bleu_scorer.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8606fc78aaba796720e4eec06e9c52969cf4ce09 Binary files /dev/null and b/refTools/evaluation/bleu/bleu_scorer.pyc differ diff --git a/refTools/evaluation/cider/__init__.py b/refTools/evaluation/cider/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3f7d85bba884ea8f83fc6ab2a1e6ade80d98d4d9 --- /dev/null +++ b/refTools/evaluation/cider/__init__.py @@ -0,0 +1 @@ +__author__ = 'tylin' diff --git a/refTools/evaluation/cider/__init__.pyc b/refTools/evaluation/cider/__init__.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3e37f4c11318807aacf0766b0b9e5789d8171476 Binary files /dev/null and b/refTools/evaluation/cider/__init__.pyc differ diff --git a/refTools/evaluation/cider/__pycache__/__init__.cpython-36.pyc b/refTools/evaluation/cider/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d71d8f8c02fc15a7125df977665055f98368cf3d Binary files /dev/null and b/refTools/evaluation/cider/__pycache__/__init__.cpython-36.pyc differ diff --git a/refTools/evaluation/cider/__pycache__/__init__.cpython-38.pyc b/refTools/evaluation/cider/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6fa56bcd67bc7ae497416343ac89645140dbc86b Binary files /dev/null and b/refTools/evaluation/cider/__pycache__/__init__.cpython-38.pyc differ diff --git a/refTools/evaluation/cider/__pycache__/cider.cpython-36.pyc b/refTools/evaluation/cider/__pycache__/cider.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d2354bc99c2cda7574ba7ef1f0f4ae8e2acb0c31 Binary files /dev/null and b/refTools/evaluation/cider/__pycache__/cider.cpython-36.pyc differ diff --git a/refTools/evaluation/cider/__pycache__/cider.cpython-38.pyc b/refTools/evaluation/cider/__pycache__/cider.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7c8ec8a4294b9d0829af0b3f67ab7bb8c2aad026 Binary files /dev/null and b/refTools/evaluation/cider/__pycache__/cider.cpython-38.pyc differ diff --git a/refTools/evaluation/cider/__pycache__/cider_scorer.cpython-36.pyc b/refTools/evaluation/cider/__pycache__/cider_scorer.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..33a7b1f729992d65ca1adbe4ecda2fc596f4dcab Binary files /dev/null and b/refTools/evaluation/cider/__pycache__/cider_scorer.cpython-36.pyc differ diff --git a/refTools/evaluation/cider/__pycache__/cider_scorer.cpython-38.pyc b/refTools/evaluation/cider/__pycache__/cider_scorer.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3f76dbbc89c30e5d198a41323a8e7610d9aa7653 Binary files /dev/null and b/refTools/evaluation/cider/__pycache__/cider_scorer.cpython-38.pyc differ diff --git a/refTools/evaluation/cider/cider.py b/refTools/evaluation/cider/cider.py new file mode 100644 index 0000000000000000000000000000000000000000..c96f4f454d7baa383f8bb1073279efbb9f187c0e --- /dev/null +++ b/refTools/evaluation/cider/cider.py @@ -0,0 +1,54 @@ +# Filename: cider.py +# +# Description: Describes the class to compute the CIDEr (Consensus-Based Image Description Evaluation) Metric +# by Vedantam, Zitnick, and Parikh (http://arxiv.org/abs/1411.5726) +# +# Creation Date: Sun Feb 8 14:16:54 2015 +# +# Authors: Ramakrishna Vedantam and Tsung-Yi Lin + +from refTools.evaluation.cider.cider_scorer import CiderScorer +import pdb + +class Cider: + """ + Main Class to compute the CIDEr metric + + """ + def __init__(self, test=None, refs=None, n=4, sigma=6.0): + # set cider to sum over 1 to 4-grams + self._n = n + # set the standard deviation parameter for gaussian penalty + self._sigma = sigma + + def compute_score(self, gts, res): + """ + Main function to compute CIDEr score + :param hypo_for_image (dict) : dictionary with key and value + ref_for_image (dict) : dictionary with key and value + :return: cider (float) : computed CIDEr score for the corpus + """ + + assert(gts.keys() == res.keys()) + imgIds = gts.keys() + + cider_scorer = CiderScorer(n=self._n, sigma=self._sigma) + + for id in imgIds: + hypo = res[id] + ref = gts[id] + + # Sanity check. + assert(type(hypo) is list) + assert(len(hypo) == 1) + assert(type(ref) is list) + assert(len(ref) > 0) + + cider_scorer += (hypo[0], ref) + + (score, scores) = cider_scorer.compute_score() + + return score, scores + + def method(self): + return "CIDEr" \ No newline at end of file diff --git a/refTools/evaluation/cider/cider.pyc b/refTools/evaluation/cider/cider.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1b3d6455abc76d09b7c531ee09fe4960e99d5577 Binary files /dev/null and b/refTools/evaluation/cider/cider.pyc differ diff --git a/refTools/evaluation/cider/cider_scorer.py b/refTools/evaluation/cider/cider_scorer.py new file mode 100644 index 0000000000000000000000000000000000000000..a73405e594b29adf4a3df69ecddd81763818f6df --- /dev/null +++ b/refTools/evaluation/cider/cider_scorer.py @@ -0,0 +1,192 @@ +#!/usr/bin/env python +# Tsung-Yi Lin +# Ramakrishna Vedantam + +import copy +from collections import defaultdict +import numpy as np +import pdb +import math + +def precook(s, n=4, out=False): + """ + Takes a string as input and returns an object that can be given to + either cook_refs or cook_test. This is optional: cook_refs and cook_test + can take string arguments as well. + :param s: string : sentence to be converted into ngrams + :param n: int : number of ngrams for which representation is calculated + :return: term frequency vector for occuring ngrams + """ + words = s.split() + counts = defaultdict(int) + for k in xrange(1,n+1): + for i in xrange(len(words)-k+1): + ngram = tuple(words[i:i+k]) + counts[ngram] += 1 + return counts + +def cook_refs(refs, n=4): ## lhuang: oracle will call with "average" + '''Takes a list of reference sentences for a single segment + and returns an object that encapsulates everything that BLEU + needs to know about them. + :param refs: list of string : reference sentences for some image + :param n: int : number of ngrams for which (ngram) representation is calculated + :return: result (list of dict) + ''' + return [precook(ref, n) for ref in refs] + +def cook_test(test, n=4): + '''Takes a test sentence and returns an object that + encapsulates everything that BLEU needs to know about it. + :param test: list of string : hypothesis sentence for some image + :param n: int : number of ngrams for which (ngram) representation is calculated + :return: result (dict) + ''' + return precook(test, n, True) + +class CiderScorer(object): + """CIDEr scorer. + """ + + def copy(self): + ''' copy the refs.''' + new = CiderScorer(n=self.n) + new.ctest = copy.copy(self.ctest) + new.crefs = copy.copy(self.crefs) + return new + + def __init__(self, test=None, refs=None, n=4, sigma=6.0): + ''' singular instance ''' + self.n = n + self.sigma = sigma + self.crefs = [] + self.ctest = [] + self.document_frequency = defaultdict(float) + self.cook_append(test, refs) + self.ref_len = None + + def cook_append(self, test, refs): + '''called by constructor and __iadd__ to avoid creating new instances.''' + + if refs is not None: + self.crefs.append(cook_refs(refs)) + if test is not None: + self.ctest.append(cook_test(test)) ## N.B.: -1 + else: + self.ctest.append(None) # lens of crefs and ctest have to match + + def size(self): + assert len(self.crefs) == len(self.ctest), "refs/test mismatch! %d<>%d" % (len(self.crefs), len(self.ctest)) + return len(self.crefs) + + def __iadd__(self, other): + '''add an instance (e.g., from another sentence).''' + + if type(other) is tuple: + ## avoid creating new CiderScorer instances + self.cook_append(other[0], other[1]) + else: + self.ctest.extend(other.ctest) + self.crefs.extend(other.crefs) + + return self + def compute_doc_freq(self): + ''' + Compute term frequency for reference data. + This will be used to compute idf (inverse document frequency later) + The term frequency is stored in the object + :return: None + ''' + for refs in self.crefs: + # refs, k ref captions of one image + for ngram in set([ngram for ref in refs for (ngram,count) in ref.iteritems()]): + self.document_frequency[ngram] += 1 + # maxcounts[ngram] = max(maxcounts.get(ngram,0), count) + + def compute_cider(self): + def counts2vec(cnts): + """ + Function maps counts of ngram to vector of tfidf weights. + The function returns vec, an array of dictionary that store mapping of n-gram and tf-idf weights. + The n-th entry of array denotes length of n-grams. + :param cnts: + :return: vec (array of dict), norm (array of float), length (int) + """ + vec = [defaultdict(float) for _ in range(self.n)] + length = 0 + norm = [0.0 for _ in range(self.n)] + for (ngram,term_freq) in cnts.iteritems(): + # give word count 1 if it doesn't appear in reference corpus + df = np.log(max(1.0, self.document_frequency[ngram])) + # ngram index + n = len(ngram)-1 + # tf (term_freq) * idf (precomputed idf) for n-grams + vec[n][ngram] = float(term_freq)*(self.ref_len - df) + # compute norm for the vector. the norm will be used for computing similarity + norm[n] += pow(vec[n][ngram], 2) + + if n == 1: + length += term_freq + norm = [np.sqrt(n) for n in norm] + return vec, norm, length + + def sim(vec_hyp, vec_ref, norm_hyp, norm_ref, length_hyp, length_ref): + ''' + Compute the cosine similarity of two vectors. + :param vec_hyp: array of dictionary for vector corresponding to hypothesis + :param vec_ref: array of dictionary for vector corresponding to reference + :param norm_hyp: array of float for vector corresponding to hypothesis + :param norm_ref: array of float for vector corresponding to reference + :param length_hyp: int containing length of hypothesis + :param length_ref: int containing length of reference + :return: array of score for each n-grams cosine similarity + ''' + delta = float(length_hyp - length_ref) + # measure consine similarity + val = np.array([0.0 for _ in range(self.n)]) + for n in range(self.n): + # ngram + for (ngram,count) in vec_hyp[n].iteritems(): + # vrama91 : added clipping + val[n] += min(vec_hyp[n][ngram], vec_ref[n][ngram]) * vec_ref[n][ngram] + + if (norm_hyp[n] != 0) and (norm_ref[n] != 0): + val[n] /= (norm_hyp[n]*norm_ref[n]) + + assert(not math.isnan(val[n])) + # vrama91: added a length based gaussian penalty + val[n] *= np.e**(-(delta**2)/(2*self.sigma**2)) + return val + + # compute log reference length + self.ref_len = np.log(float(len(self.crefs))) + + scores = [] + for test, refs in zip(self.ctest, self.crefs): + # compute vector for test captions + vec, norm, length = counts2vec(test) + # compute vector for ref captions + score = np.array([0.0 for _ in range(self.n)]) + for ref in refs: + vec_ref, norm_ref, length_ref = counts2vec(ref) + score += sim(vec, vec_ref, norm, norm_ref, length, length_ref) + # change by vrama91 - mean of ngram scores, instead of sum + score_avg = np.mean(score) + # divide by number of references + score_avg /= len(refs) + # multiply score by 10 + score_avg *= 10.0 + # append score of an image to the score list + scores.append(score_avg) + return scores + + def compute_score(self, option=None, verbose=0): + # compute idf + self.compute_doc_freq() + # assert to check document frequency + assert(len(self.ctest) >= max(self.document_frequency.values())) + # compute cider score + score = self.compute_cider() + # debug + # print score + return np.mean(np.array(score)), np.array(score) \ No newline at end of file diff --git a/refTools/evaluation/cider/cider_scorer.pyc b/refTools/evaluation/cider/cider_scorer.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4c77a56adb5304e2f2364ac63f01aca8b65495c6 Binary files /dev/null and b/refTools/evaluation/cider/cider_scorer.pyc differ diff --git a/refTools/evaluation/meteor/__init__.py b/refTools/evaluation/meteor/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3f7d85bba884ea8f83fc6ab2a1e6ade80d98d4d9 --- /dev/null +++ b/refTools/evaluation/meteor/__init__.py @@ -0,0 +1 @@ +__author__ = 'tylin' diff --git a/refTools/evaluation/meteor/__init__.pyc b/refTools/evaluation/meteor/__init__.pyc new file mode 100644 index 0000000000000000000000000000000000000000..01703230a3a8235468fb61cd9eeb40fa3590e1f2 Binary files /dev/null and b/refTools/evaluation/meteor/__init__.pyc differ diff --git a/refTools/evaluation/meteor/__pycache__/__init__.cpython-36.pyc b/refTools/evaluation/meteor/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..168c166e94f510f9c13456d862b73806e0a25542 Binary files /dev/null and b/refTools/evaluation/meteor/__pycache__/__init__.cpython-36.pyc differ diff --git a/refTools/evaluation/meteor/__pycache__/__init__.cpython-38.pyc b/refTools/evaluation/meteor/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ae3430691bd987c82b25c9bebba453600fa30381 Binary files /dev/null and b/refTools/evaluation/meteor/__pycache__/__init__.cpython-38.pyc differ diff --git a/refTools/evaluation/meteor/__pycache__/meteor.cpython-36.pyc b/refTools/evaluation/meteor/__pycache__/meteor.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..389ee8827cace4e0cef2e05de4976d9574ae7585 Binary files /dev/null and b/refTools/evaluation/meteor/__pycache__/meteor.cpython-36.pyc differ diff --git a/refTools/evaluation/meteor/__pycache__/meteor.cpython-38.pyc b/refTools/evaluation/meteor/__pycache__/meteor.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ce08685b1fc1956a5009c7f515851a502a5b73e1 Binary files /dev/null and b/refTools/evaluation/meteor/__pycache__/meteor.cpython-38.pyc differ diff --git a/refTools/evaluation/meteor/meteor.py b/refTools/evaluation/meteor/meteor.py new file mode 100644 index 0000000000000000000000000000000000000000..a4915c2d043a3ea3cf7844b3009b689aa8f41778 --- /dev/null +++ b/refTools/evaluation/meteor/meteor.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python + +# Python wrapper for METEOR implementation, by Xinlei Chen +# Acknowledge Michael Denkowski for the generous discussion and help + +import os +import sys +import subprocess +import threading + +# Assumes meteor-1.5.jar is in the same directory as meteor.py. Change as needed. +METEOR_JAR = 'meteor-1.5.jar' +# print METEOR_JAR + +class Meteor: + + def __init__(self): + self.meteor_cmd = ['java', '-jar', '-Xmx2G', METEOR_JAR, \ + '-', '-', '-stdio', '-l', 'en', '-norm'] + self.meteor_p = subprocess.Popen(self.meteor_cmd, \ + cwd=os.path.dirname(os.path.abspath(__file__)), \ + stdin=subprocess.PIPE, \ + stdout=subprocess.PIPE, \ + stderr=subprocess.PIPE) + # Used to guarantee thread safety + self.lock = threading.Lock() + + def compute_score(self, gts, res): + assert(gts.keys() == res.keys()) + imgIds = gts.keys() + scores = [] + + eval_line = 'EVAL' + self.lock.acquire() + for i in imgIds: + assert(len(res[i]) == 1) + stat = self._stat(res[i][0], gts[i]) + eval_line += ' ||| {}'.format(stat) + + self.meteor_p.stdin.write('{}\n'.format(eval_line).encode()) + for i in range(0,len(imgIds)): + scores.append(float(self.meteor_p.stdout.readline().strip())) + score = float(self.meteor_p.stdout.readline().strip()) + self.lock.release() + + return score, scores + + def method(self): + return "METEOR" + + def _stat(self, hypothesis_str, reference_list): + # SCORE ||| reference 1 words ||| reference n words ||| hypothesis words + hypothesis_str = hypothesis_str.replace('|||','').replace(' ',' ') + score_line = ' ||| '.join(('SCORE', ' ||| '.join(reference_list), hypothesis_str)) + self.meteor_p.stdin.write('{}\n'.format(score_line).encode()) + return self.meteor_p.stdout.readline().decode().strip() + + def _score(self, hypothesis_str, reference_list): + self.lock.acquire() + # SCORE ||| reference 1 words ||| reference n words ||| hypothesis words + hypothesis_str = hypothesis_str.replace('|||','').replace(' ',' ') + score_line = ' ||| '.join(('SCORE', ' ||| '.join(reference_list), hypothesis_str)) + self.meteor_p.stdin.write('{}\n'.format(score_line)) + stats = self.meteor_p.stdout.readline().strip() + eval_line = 'EVAL ||| {}'.format(stats) + # EVAL ||| stats + self.meteor_p.stdin.write('{}\n'.format(eval_line)) + score = float(self.meteor_p.stdout.readline().strip()) + self.lock.release() + return score + + def __exit__(self): + self.lock.acquire() + self.meteor_p.stdin.close() + self.meteor_p.wait() + self.lock.release() diff --git a/refTools/evaluation/meteor/meteor.pyc b/refTools/evaluation/meteor/meteor.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fd280eb46c28d3b52520645135cdf5d4c4dd79a5 Binary files /dev/null and b/refTools/evaluation/meteor/meteor.pyc differ diff --git a/refTools/evaluation/readme.txt b/refTools/evaluation/readme.txt new file mode 100644 index 0000000000000000000000000000000000000000..4491b5a806468b6ca9b974fa04d3cd9fb3b4150c --- /dev/null +++ b/refTools/evaluation/readme.txt @@ -0,0 +1,11 @@ +This folder contains modified coco-caption evaluation, which is downloaded from https://github.com/tylin/coco-caption.git +and refEvaluation which is to be called by the refer algorithm. + +More specifically, this folder contains: +1. bleu/ +2. cider/ +3. meteor/ +4. rouge/ +5. tokenizer/ +6. __init__.py +7. refEvaluation.py diff --git a/refTools/evaluation/refEvaluation.py b/refTools/evaluation/refEvaluation.py new file mode 100644 index 0000000000000000000000000000000000000000..8045cb566b1522844ca89f695a0ace2be755a43c --- /dev/null +++ b/refTools/evaluation/refEvaluation.py @@ -0,0 +1,136 @@ +from refTools.evaluation.tokenizer.ptbtokenizer import PTBTokenizer +from refTools.evaluation.bleu.bleu import Bleu +from refTools.evaluation.meteor.meteor import Meteor +from refTools.evaluation.rouge.rouge import Rouge +from refTools.evaluation.cider.cider import Cider + +""" +Input: refer and Res = [{ref_id, sent}] + +Things of interest +evalRefs - list of ['ref_id', 'CIDEr', 'Bleu_1', 'Bleu_2', 'Bleu_3', 'Bleu_4', 'ROUGE_L', 'METEOR'] +eval - dict of {metric: score} +refToEval - dict of {ref_id: ['ref_id', 'CIDEr', 'Bleu_1', 'Bleu_2', 'Bleu_3', 'Bleu_4', 'ROUGE_L', 'METEOR']} +""" + +class RefEvaluation: + def __init__ (self, refer, Res): + """ + :param refer: refer class of current dataset + :param Res: [{'ref_id', 'sent'}] + """ + self.evalRefs = [] + self.eval = {} + self.refToEval = {} + self.refer = refer + self.Res = Res + + def evaluate(self): + + evalRefIds = [ann['ref_id'] for ann in self.Res] + + refToGts = {} + for ref_id in evalRefIds: + ref = self.refer.Refs[ref_id] + gt_sents = [sent['sent'].encode('ascii', 'ignore').decode('ascii') for sent in ref['sentences']] # up to 3 expressions + refToGts[ref_id] = gt_sents + refToRes = {ann['ref_id']: [ann['sent']] for ann in self.Res} + + print('tokenization...') + tokenizer = PTBTokenizer() + self.refToRes = tokenizer.tokenize(refToRes) + self.refToGts = tokenizer.tokenize(refToGts) + + # ================================================= + # Set up scorers + # ================================================= + print('setting up scorers...') + scorers = [ + (Bleu(4), ["Bleu_1", "Bleu_2", "Bleu_3", "Bleu_4"]), + (Meteor(),"METEOR"), + (Rouge(), "ROUGE_L"), + (Cider(), "CIDEr") + ] + + # ================================================= + # Compute scores + # ================================================= + for scorer, method in scorers: + print('computing %s score...'%(scorer.method())) + score, scores = scorer.compute_score(self.refToGts, self.refToRes) + if type(method) == list: + for sc, scs, m in zip(score, scores, method): + self.setEval(sc, m) + self.setRefToEvalRefs(scs, self.refToGts.keys(), m) + print("%s: %0.3f"%(m, sc)) + else: + self.setEval(score, method) + self.setRefToEvalRefs(scores, self.refToGts.keys(), method) + print("%s: %0.3f"%(method, score)) + self.setEvalRefs() + + def setEval(self, score, method): + self.eval[method] = score + + def setRefToEvalRefs(self, scores, refIds, method): + for refId, score in zip(refIds, scores): + if not refId in self.refToEval: + self.refToEval[refId] = {} + self.refToEval[refId]["ref_id"] = refId + self.refToEval[refId][method] = score + + def setEvalRefs(self): + self.evalRefs = [eval for refId, eval in self.refToEval.items()] + + +if __name__ == '__main__': + + import os.path as osp + import sys + ROOT_DIR = osp.abspath(osp.join(osp.dirname(__file__), '..', '..')) + sys.path.insert(0, osp.join(ROOT_DIR, 'lib', 'datasets')) + from refer import REFER + + # load refer of dataset + dataset = 'refcoco' + refer = REFER(dataset, splitBy = 'google') + + # mimic some Res + val_refIds = refer.getRefIds(split='test') + ref_id = 49767 + print("GD: %s" % refer.Refs[ref_id]['sentences']) + Res = [{'ref_id': ref_id, 'sent': 'left bottle'}] + + # evaluate some refer expressions + refEval = RefEvaluation(refer, Res) + refEval.evaluate() + + # print output evaluation scores + for metric, score in refEval.eval.items(): + print('%s: %.3f'%(metric, score)) + + # demo how to use evalImgs to retrieve low score result + # evals = [eva for eva in refEval.evalRefs if eva['CIDEr']<30] + # print 'ground truth sents' + # refId = evals[0]['ref_id'] + # print 'refId: %s' % refId + # print [sent['sent'] for sent in refer.Refs[refId]['sentences']] + # + # print 'generated sent (CIDEr score %0.1f)' % (evals[0]['CIDEr']) + + # print refEval.refToEval[8] + + + + + + + + + + + + + + + diff --git a/refTools/evaluation/refEvaluation.pyc b/refTools/evaluation/refEvaluation.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a0c3b601ee65da9f5a1b6ceca5332d62e99cdf23 Binary files /dev/null and b/refTools/evaluation/refEvaluation.pyc differ diff --git a/refTools/evaluation/rouge/__init__.py b/refTools/evaluation/rouge/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..43a773e12ea2e960f9a62fa1c2179a73d8c0dd35 --- /dev/null +++ b/refTools/evaluation/rouge/__init__.py @@ -0,0 +1 @@ +__author__ = 'vrama91' diff --git a/refTools/evaluation/rouge/__init__.pyc b/refTools/evaluation/rouge/__init__.pyc new file mode 100644 index 0000000000000000000000000000000000000000..92f2e6ba4c22f9ec9721deb20b44789e9bdc53a0 Binary files /dev/null and b/refTools/evaluation/rouge/__init__.pyc differ diff --git a/refTools/evaluation/rouge/__pycache__/__init__.cpython-36.pyc b/refTools/evaluation/rouge/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f300eabf9e26bd7b5911494045f88e29f2a679c3 Binary files /dev/null and b/refTools/evaluation/rouge/__pycache__/__init__.cpython-36.pyc differ diff --git a/refTools/evaluation/rouge/__pycache__/__init__.cpython-38.pyc b/refTools/evaluation/rouge/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1e12062bff83aad2eb0ff04a0065a98c6feaa6a1 Binary files /dev/null and b/refTools/evaluation/rouge/__pycache__/__init__.cpython-38.pyc differ diff --git a/refTools/evaluation/rouge/__pycache__/rouge.cpython-36.pyc b/refTools/evaluation/rouge/__pycache__/rouge.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0219b6990eea009157a4a1b053f5adab3766f2c5 Binary files /dev/null and b/refTools/evaluation/rouge/__pycache__/rouge.cpython-36.pyc differ diff --git a/refTools/evaluation/rouge/__pycache__/rouge.cpython-38.pyc b/refTools/evaluation/rouge/__pycache__/rouge.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2085f581f95d4cf9e2412a67a222adb6a1aa9597 Binary files /dev/null and b/refTools/evaluation/rouge/__pycache__/rouge.cpython-38.pyc differ diff --git a/refTools/evaluation/rouge/rouge.py b/refTools/evaluation/rouge/rouge.py new file mode 100644 index 0000000000000000000000000000000000000000..3a10f5a50371328d397dcb53c7c9d81eac9472fa --- /dev/null +++ b/refTools/evaluation/rouge/rouge.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python +# +# File Name : rouge.py +# +# Description : Computes ROUGE-L metric as described by Lin and Hovey (2004) +# +# Creation Date : 2015-01-07 06:03 +# Author : Ramakrishna Vedantam + +import numpy as np +import pdb + +def my_lcs(string, sub): + """ + Calculates longest common subsequence for a pair of tokenized strings + :param string : list of str : tokens from a string split using whitespace + :param sub : list of str : shorter string, also split using whitespace + :returns: length (list of int): length of the longest common subsequence between the two strings + + Note: my_lcs only gives length of the longest common subsequence, not the actual LCS + """ + if(len(string)< len(sub)): + sub, string = string, sub + + lengths = [[0 for i in range(0,len(sub)+1)] for j in range(0,len(string)+1)] + + for j in range(1,len(sub)+1): + for i in range(1,len(string)+1): + if(string[i-1] == sub[j-1]): + lengths[i][j] = lengths[i-1][j-1] + 1 + else: + lengths[i][j] = max(lengths[i-1][j] , lengths[i][j-1]) + + return lengths[len(string)][len(sub)] + +class Rouge(): + ''' + Class for computing ROUGE-L score for a set of candidate sentences for the MS COCO test set + + ''' + def __init__(self): + # vrama91: updated the value below based on discussion with Hovey + self.beta = 1.2 + + def calc_score(self, candidate, refs): + """ + Compute ROUGE-L score given one candidate and references for an image + :param candidate: str : candidate sentence to be evaluated + :param refs: list of str : COCO reference sentences for the particular image to be evaluated + :returns score: int (ROUGE-L score for the candidate evaluated against references) + """ + assert(len(candidate)==1) + assert(len(refs)>0) + prec = [] + rec = [] + + # split into tokens + token_c = candidate[0].split(" ") + + for reference in refs: + # split into tokens + token_r = reference.split(" ") + # compute the longest common subsequence + lcs = my_lcs(token_r, token_c) + prec.append(lcs/float(len(token_c))) + rec.append(lcs/float(len(token_r))) + + prec_max = max(prec) + rec_max = max(rec) + + if(prec_max!=0 and rec_max !=0): + score = ((1 + self.beta**2)*prec_max*rec_max)/float(rec_max + self.beta**2*prec_max) + else: + score = 0.0 + return score + + def compute_score(self, gts, res): + """ + Computes Rouge-L score given a set of reference and candidate sentences for the dataset + Invoked by evaluate_captions.py + :param hypo_for_image: dict : candidate / test sentences with "image name" key and "tokenized sentences" as values + :param ref_for_image: dict : reference MS-COCO sentences with "image name" key and "tokenized sentences" as values + :returns: average_score: float (mean ROUGE-L score computed by averaging scores for all the images) + """ + assert(gts.keys() == res.keys()) + imgIds = gts.keys() + + score = [] + for id in imgIds: + hypo = res[id] + ref = gts[id] + + score.append(self.calc_score(hypo, ref)) + + # Sanity check. + assert(type(hypo) is list) + assert(len(hypo) == 1) + assert(type(ref) is list) + assert(len(ref) > 0) + + average_score = np.mean(np.array(score)) + return average_score, np.array(score) + + def method(self): + return "Rouge" diff --git a/refTools/evaluation/rouge/rouge.pyc b/refTools/evaluation/rouge/rouge.pyc new file mode 100644 index 0000000000000000000000000000000000000000..94d0c1c3c3c08dc88603875b741a60914436a460 Binary files /dev/null and b/refTools/evaluation/rouge/rouge.pyc differ diff --git a/refTools/evaluation/tokenizer/__init__.py b/refTools/evaluation/tokenizer/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..71357a4bff7219ddcf7f7321cfeb4484bd8bee08 --- /dev/null +++ b/refTools/evaluation/tokenizer/__init__.py @@ -0,0 +1 @@ +__author__ = 'hfang' diff --git a/refTools/evaluation/tokenizer/__init__.pyc b/refTools/evaluation/tokenizer/__init__.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eddf6e6dfc8644210aa3bae9f13a1f5bce247287 Binary files /dev/null and b/refTools/evaluation/tokenizer/__init__.pyc differ diff --git a/refTools/evaluation/tokenizer/__pycache__/__init__.cpython-36.pyc b/refTools/evaluation/tokenizer/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9866bef9f12c8013a56d92c0787ff173e4e8fba5 Binary files /dev/null and b/refTools/evaluation/tokenizer/__pycache__/__init__.cpython-36.pyc differ diff --git a/refTools/evaluation/tokenizer/__pycache__/__init__.cpython-38.pyc b/refTools/evaluation/tokenizer/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..feba34425e921a2f6e908348a0c6e6f431716908 Binary files /dev/null and b/refTools/evaluation/tokenizer/__pycache__/__init__.cpython-38.pyc differ diff --git a/refTools/evaluation/tokenizer/__pycache__/ptbtokenizer.cpython-36.pyc b/refTools/evaluation/tokenizer/__pycache__/ptbtokenizer.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6cd5ef725bfeb81dad2316da39330466f54ac3ad Binary files /dev/null and b/refTools/evaluation/tokenizer/__pycache__/ptbtokenizer.cpython-36.pyc differ diff --git a/refTools/evaluation/tokenizer/__pycache__/ptbtokenizer.cpython-38.pyc b/refTools/evaluation/tokenizer/__pycache__/ptbtokenizer.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f8179527da824baad134e88bdcd7932c12c89cfe Binary files /dev/null and b/refTools/evaluation/tokenizer/__pycache__/ptbtokenizer.cpython-38.pyc differ diff --git a/refTools/evaluation/tokenizer/ptbtokenizer.py b/refTools/evaluation/tokenizer/ptbtokenizer.py new file mode 100644 index 0000000000000000000000000000000000000000..52176fc18f3c13c10d390eb7c9550801247cbb20 --- /dev/null +++ b/refTools/evaluation/tokenizer/ptbtokenizer.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python +# +# File Name : ptbtokenizer.py +# +# Description : Do the PTB Tokenization and remove punctuations. +# +# Creation Date : 29-12-2014 +# Last Modified : Thu Mar 19 09:53:35 2015 +# Authors : Hao Fang and Tsung-Yi Lin + +import os +import sys +import subprocess +import tempfile +import itertools + +# path to the stanford corenlp jar +STANFORD_CORENLP_3_4_1_JAR = 'stanford-corenlp-3.4.1.jar' + +# punctuations to be removed from the sentences +PUNCTUATIONS = ["''", "'", "``", "`", "-LRB-", "-RRB-", "-LCB-", "-RCB-", \ + ".", "?", "!", ",", ":", "-", "--", "...", ";"] + +class PTBTokenizer: + """Python wrapper of Stanford PTBTokenizer""" + + def tokenize(self, captions_for_image): + cmd = ['java', '-cp', STANFORD_CORENLP_3_4_1_JAR, \ + 'edu.stanford.nlp.process.PTBTokenizer', \ + '-preserveLines', '-lowerCase'] + + # ====================================================== + # prepare data for PTB Tokenizer + # ====================================================== + final_tokenized_captions_for_image = {} + image_id = [k for k, v in captions_for_image.items() for _ in range(len(v))] + sentences = '\n'.join([c.replace('\n', ' ') for k, v in captions_for_image.items() for c in v]) + + # ====================================================== + # save sentences to temporary file + # ====================================================== + path_to_jar_dirname=os.path.dirname(os.path.abspath(__file__)) + tmp_file = tempfile.NamedTemporaryFile(delete=False, dir=path_to_jar_dirname) + tmp_file.write(sentences.encode()) + tmp_file.close() + + # ====================================================== + # tokenize sentence + # ====================================================== + cmd.append(os.path.basename(tmp_file.name)) + p_tokenizer = subprocess.Popen(cmd, cwd=path_to_jar_dirname, \ + stdout=subprocess.PIPE) + token_lines = p_tokenizer.communicate(input=sentences.rstrip())[0] + token_lines = token_lines.decode() + lines = token_lines.split('\n') + # remove temp file + os.remove(tmp_file.name) + + # ====================================================== + # create dictionary for tokenized captions + # ====================================================== + for k, line in zip(image_id, lines): + if not k in final_tokenized_captions_for_image: + final_tokenized_captions_for_image[k] = [] + tokenized_caption = ' '.join([w for w in line.rstrip().split(' ') \ + if w not in PUNCTUATIONS]) + final_tokenized_captions_for_image[k].append(tokenized_caption) + + return final_tokenized_captions_for_image diff --git a/refTools/evaluation/tokenizer/ptbtokenizer.pyc b/refTools/evaluation/tokenizer/ptbtokenizer.pyc new file mode 100644 index 0000000000000000000000000000000000000000..62077b9604d79a38ea495d2f94c5b414adce1dbc Binary files /dev/null and b/refTools/evaluation/tokenizer/ptbtokenizer.pyc differ diff --git a/refTools/evaluation/tokenizer/tmp37tp6xj8 b/refTools/evaluation/tokenizer/tmp37tp6xj8 new file mode 100644 index 0000000000000000000000000000000000000000..13fc6e38f94c157f75a980b4580b50aed7d73fae --- /dev/null +++ b/refTools/evaluation/tokenizer/tmp37tp6xj8 @@ -0,0 +1,7578 @@ +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +the the the trailer trailer trailer trailer trailer trailer trailer +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the fruit fruit fruit fruit fruit fruit fruit +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the street street street street street street street street +the the the farm farm farm farm farm farm farm +and and and and and and and and and and +of of course course course course course course course course +the the the snow snow snow snow snow snow snow +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the park park park park park park park +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the shop shop shop shop shop shop shop +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of buses buses buses buses buses buses buses buses +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the farm farm farm farm farm farm farm +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +and and and and and and and and and and +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the clock clock clock clock clock clock clock +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and person person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the subway subway subway subway subway subway subway subway +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +and and and and and and and and and and +the the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +the the table table table table table table table table +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of of of of of of of of +and and and and and and and and and and +and and groom groom groom groomedededed +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and chairs chairs chairs chairs chairs chairs chairs chairs +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +the the cattle cattle cattle cattle cattle cattle cattle cattle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the tree tree tree tree tree tree tree +in in in in in in in in in in +the the the zoo zoo zoo zoo zoo zoo zoo +the the the game game game game game game game +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the team team team team team team team +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the garden garden garden garden garden garden garden +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the wedding wedding wedding wedding wedding wedding wedding wedding +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the street street street street street street street street +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the snow snow snow snow snow snow snow snow +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the lake lake lake lake lake lake lake lake +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the calculator calculator calculator calculator calculator calculator calculator +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +of of cattle cattle cattle cattle cattle cattle cattle cattle +the the crowd crowd crowd crowd crowd crowd crowd crowd +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the festival festival festival festival festival festival festival festival +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +a a bench bench bench bench bench bench bench bench +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the cattle cattle cattle cattle cattle cattle cattle cattle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the garden garden garden garden garden garden garden garden +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +the the the event event event event event event event +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the truck truck truck truck truck truck truck +the the the farm farm farm farm farm farm farm +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +of of of people people people people people people people +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the dog dog dog dog dog dog dog dog +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the flag flag flag flag flag flag flag +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the boat boat boat boat boat boat boat +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the grill grill grill grill grill grill grill grill +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +of of of of of of of of of of +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +the the the birds birds birds birds birds birds birds +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +the the the garden garden garden garden garden garden garden +and and and and and and and and and and +and and and and and and and and and and +the the the tomatoes tomatoes tomatoes tomatoes tomatoes tomatoes tomatoes +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the game game game game game game game +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the grill grill grill grill grill grill grill +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +the the clock clock clock clock clock clock clock clock +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pasture pasture pasture pasture pasture pasture pasture pasture +the the the temple temple temple temple temple temple temple +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the bed bed bed bed bed bed bed +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the summit summit summit summit summit summit summit summit +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the farm farm farm farm farm farm farm +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the clock clock clock clock clock clock clock clock +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the city city city city city city city city +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of bananas bananas bananas bananas bananas bananas bananas bananas +and and and and and and and and and and +and and and and and and and and and and +the the clock clock clock clock clock clock clock clock +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the office office office office office office office office +of of course course course course course course course course +and and and and and and and and and and +the the the field field field field field field field +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and white white white white white white white white +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the world world world world world world world +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the city city city city city city city +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the truck truck truck truck truck truck truck +and and and and and and and and and and +the the the shop shop shop shop shop shop shop +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bathroom bathroom bathroom bathroom bathroom bathroom bathroom bathroom +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +a a pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +of of sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of buses buses buses buses buses buses buses buses +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +the the grill grill grill grill grill grill grill grill +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the table table table table table table table +the the glass glass glass glass glass glass glass glass +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +the the clock clock clock clock clock clock clock clock +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +the the the sky sky sky sky sky sky sky +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +the the the zoo zoo zoo zoo zoo zoo zoo +the the the laptop laptop laptop laptop laptop laptop laptop +the the the truck truck truck truck truck truck truck +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +the the the street street street street street street street +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +and and and and and and and and and and +the the dog dog dog dog dog dog dog dog +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the crowd crowd crowd crowd crowd crowd crowd crowd +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the crowd crowd crowd crowd crowd crowd crowd crowd +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +the the the sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +of of of of of of of of of of +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +the the the movie movie movie movie movie movie movie +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +the the the pizza pizza pizza pizza pizza pizza pizza +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +the the the trailer trailer trailer trailer trailer trailer trailer +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +the the street street street street street street street street +the the the farm farm farm farm farm farm farm +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the park park park park park park park +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the field field field field field field field +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bathroom bathroom bathroom bathroom bathroom bathroom bathroom bathroom +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the animals animals animals animals animals animals animals +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and person person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the subway subway subway subway subway subway subway subway +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +and and and and and and and and and and +the the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +the the table table table table table table table table +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +of of of of of of of of of of +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and groom groom groom groomedededed +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the cattle cattle cattle cattle cattle cattle cattle cattle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the tree tree tree tree tree tree tree +in in in in in in in in in in +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +the the wedding wedding wedding wedding wedding wedding wedding wedding +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the team team team team team team team +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the garden garden garden garden garden garden garden +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the street street street street street street street street +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the lake lake lake lake lake lake lake lake +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +the the the team team team team team team team +and and and and and and and and and and +and and and and and and and and and and +the the the meeting meeting meeting meeting meeting meeting meeting +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the calculator calculator calculator calculator calculator calculator calculator +and and and and and and and and and and +the the fountain fountain fountain fountain fountain fountain fountain fountain +the the the party party party party party party party +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the sunset sunset sunset sunset sunset sunset sunset sunset +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +the the fire fire fire fire fire fire fire fire +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the street street street street street street street street +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the festival festival festival festival festival festival festival festival +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +a a bench bench bench bench bench bench bench bench +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +the the cattle cattle cattle cattle cattle cattle cattle cattle +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +the the garden garden garden garden garden garden garden garden +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bathroom bathroom bathroom bathroom bathroom bathroom bathroom bathroom +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the truck truck truck truck truck truck truck +the the the farm farm farm farm farm farm farm +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of people people people people people people people +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the flag flag flag flag flag flag flag +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the table table table table table table table +and and and and and and and and and and +and and chairs chairs chairs chairs chairs chairs chairs chairs +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +the the tree tree tree tree tree tree tree tree +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +the the the birds birds birds birds birds birds birds +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the tomatoes tomatoes tomatoes tomatoes tomatoes tomatoes tomatoes +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the game game game game game game game +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the grill grill grill grill grill grill grill +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the clock clock clock clock clock clock clock clock +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bathroom bathroom bathroom bathroom bathroom bathroom bathroom bathroom +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +the the horse horse horse horse horse horse horse horse +and and and and and and and and and and +the the summit summit summit summit summit summit summit summit +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of cattle cattle cattle cattle cattle cattle cattle cattle +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the city city city city city city city city +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the team team team team team team team +in in in in in in in in in in +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the dog dog dog dog dog dog dog +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of sheep sheep sheep sheep sheep sheep sheep sheep +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the world world world world world world world +and and and and and and and and and and +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the car car car car car car car +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the forest forest forest forest forest forest forest +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the truck truck truck truck truck truck truck +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the toilet toilet toilet toilet toilet toilet toilet toilet +of of sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +the the the table table table table table table table +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the roof roof roof roof roof roof roof roof +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +a a pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +of of sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of buses buses buses buses buses buses buses buses +and and and and and and and and and and +and and and and and and and and and and +the the rocks rocks rocks rocks rocks rocks rocks rocks +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +in in in in in in in in in in +and and and and and and and and and and +the the the wedding wedding wedding wedding wedding wedding wedding +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +the the grill grill grill grill grill grill grill grill +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the glass glass glass glass glass glass glass glass +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the studio studio studio studio studio studio studio +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the clock clock clock clock clock clock clock clock +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +the the the sky sky sky sky sky sky sky +and and and and and and and and and and +and and and and and and and and and and +the the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and person person person person person person person +the the beach beach beach beach beach beach beach beach +the the the zoo zoo zoo zoo zoo zoo zoo +the the the laptop laptop laptop laptop laptop laptop laptop +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of cattle cattle cattle cattle cattle cattle cattle cattle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the toilet toilet toilet toilet toilet toilet toilet toilet +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +the the toilet toilet toilet toilet toilet toilet toilet toilet +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +the the market market market market market market market market +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the crowd crowd crowd crowd crowd crowd crowd crowd +and and and and and and and and and and +the the the trailer trailer trailer trailer trailer trailer trailer +and and and and and and and and and and +the the crowd crowd crowd crowd crowd crowd crowd crowd +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of of of of of of of of +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +the the the movie movie movie movie movie movie movie +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +the the the pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +the the the bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +the the the park park park park park park park +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the field field field field field field field +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bathroom bathroom bathroom bathroom bathroom bathroom bathroom bathroom +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the subway subway subway subway subway subway subway subway +and and and and and and and and and and +and and and and and and and and and and +the the the animals animals animals animals animals animals animals +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the city city city city city city city +and and and and and and and and and and +and and and and and and and and and and +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and person person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the street street street street street street street street +the the subway subway subway subway subway subway subway subway +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the table table table table table table table table +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the show show show show show show show show +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +the the wedding wedding wedding wedding wedding wedding wedding wedding +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the team team team team team team team +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +the the the garden garden garden garden garden garden garden +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the city city city city city city city city +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +the the snow snow snow snow snow snow snow snow +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +the the the team team team team team team team +and and and and and and and and and and +and and and and and and and and and and +the the the meeting meeting meeting meeting meeting meeting meeting +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +the the fountain fountain fountain fountain fountain fountain fountain fountain +the the the party party party party party party party +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the sunset sunset sunset sunset sunset sunset sunset sunset +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the fire fire fire fire fire fire fire fire +the the the beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the street street street street street street street street +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of apples apples apples apples apples apples apples apples +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +the the cattle cattle cattle cattle cattle cattle cattle cattle +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +the the garden garden garden garden garden garden garden garden +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of flowers flowers flowers flowers flowers flowers flowers flowers +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +of of sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the truck truck truck truck truck truck truck +the the the farm farm farm farm farm farm farm +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the flag flag flag flag flag flag flag +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the table table table table table table table +and and and and and and and and and and +and and chairs chairs chairs chairs chairs chairs chairs chairs +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the tomatoes tomatoes tomatoes tomatoes tomatoes tomatoes tomatoes +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the game game game game game game game +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the gym gym gym gym gym gym gym +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the grill grill grill grill grill grill grill +and and and and and and and and and and +and and and and and and and and and and +of of birds birds birds birds birds birds birds birds +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +the the the bedroom bedroom bedroom bedroom bedroom bedroom bedroom +and and and and and and and and and and +and and and and and and and and and and +the the the sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bathroom bathroom bathroom bathroom bathroom bathroom bathroom bathroom +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the horse horse horse horse horse horse horse horse +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the wedding wedding wedding wedding wedding wedding wedding wedding +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and carriage carriage carriage carriage carriage carriage carriage carriage +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of cattle cattle cattle cattle cattle cattle cattle cattle +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +the the the team team team team team team team +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the garden garden garden garden garden garden garden +and and and and and and and and and and +and and and and and and and and and and +the the the dog dog dog dog dog dog dog +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of sheep sheep sheep sheep sheep sheep sheep sheep +the the rain rain rain rain rain rain rain rain +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the world world world world world world world +and and and and and and and and and and +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the car car car car car car car +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +the the the beach beach beach beach beach beach beach +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the forest forest forest forest forest forest forest +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the wild wild wild wild wild wild wild +the the toilet toilet toilet toilet toilet toilet toilet toilet +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +the the the table table table table table table table +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +of of of people people people people people people people +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the dog dog dog dog dog dog dog dog +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the truck truck truck truck truck truck truck +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the sign sign sign sign sign sign sign sign +and and and and and and and and and and +and and and and and and and and and and +the the rocks rocks rocks rocks rocks rocks rocks rocks +and and and and and and and and and and +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +in in in in in in in in in in +and and and and and and and and and and +the the the wedding wedding wedding wedding wedding wedding wedding +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +the the grill grill grill grill grill grill grill grill +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the garden garden garden garden garden garden garden +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +the the the pub pub pub pub pub pub pub +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the studio studio studio studio studio studio studio +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +the the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and person person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of cattle cattle cattle cattle cattle cattle cattle cattle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the toilet toilet toilet toilet toilet toilet toilet toilet +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the dog dog dog dog dog dog dog +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the toilet toilet toilet toilet toilet toilet toilet toilet +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +the the market market market market market market market market +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the crowd crowd crowd crowd crowd crowd crowd crowd +and and and and and and and and and and +the the the trailer trailer trailer trailer trailer trailer trailer +the the the zoo zoo zoo zoo zoo zoo zoo +the the crowd crowd crowd crowd crowd crowd crowd crowd +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of of of of of of of of +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the movie movie movie movie movie movie movie +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +the the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +the the the bus bus bus bus bus bus bus +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in love love love love love love love love +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bears bears bears bears bears bears bears bears +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +the the the animals animals animals animals animals animals animals +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +the the the city city city city city city city +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the street street street street street street street street +and and and and and and and and and and +and and person person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and actor actor actor actor actor actor actor actor +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the grill grill grill grill grill grill grill grill +and and and and and and and and and and +in in in in in in in in in in +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the music music music music music music music +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the show show show show show show show show +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the truck truck truck truck truck truck truck +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the team team team team team team team +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +the the sign sign sign sign sign sign sign sign +and and and and and and and and and and +and and and and and and and and and and +the the city city city city city city city city +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +the the snow snow snow snow snow snow snow snow +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +the the the team team team team team team team +and and and and and and and and and and +and and and and and and and and and and +the the the meeting meeting meeting meeting meeting meeting meeting +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +the the fountain fountain fountain fountain fountain fountain fountain fountain +the the the party party party party party party party +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the sunset sunset sunset sunset sunset sunset sunset sunset +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the fire fire fire fire fire fire fire fire +the the the beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the garden garden garden garden garden garden garden +and and and and and and and and and and +the the street street street street street street street street +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of apples apples apples apples apples apples apples apples +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +of of sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +the the cattle cattle cattle cattle cattle cattle cattle cattle +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and person person person person person person person person +and and and and and and and and and and +the the the baby baby baby baby baby baby baby +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the city city city city city city city city +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of flowers flowers flowers flowers flowers flowers flowers flowers +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +of of sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the truck truck truck truck truck truck truck +the the the farm farm farm farm farm farm farm +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the flag flag flag flag flag flag flag +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the table table table table table table table +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the bedroom bedroom bedroom bedroom bedroom bedroom bedroom +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +in in in in in ski ski ski ski ski +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the tomatoes tomatoes tomatoes tomatoes tomatoes tomatoes tomatoes +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the game game game game game game game +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the gym gym gym gym gym gym gym +and and and and and and and and and and +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of birds birds birds birds birds birds birds birds +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +the the the bedroom bedroom bedroom bedroom bedroom bedroom bedroom +and and and and and and and and and and +and and and and and and and and and and +the the the sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the forest forest forest forest forest forest forest +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the boat boat boat boat boat boat boat boat +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the wedding wedding wedding wedding wedding wedding wedding wedding +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the ship ship ship ship ship ship ship +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and carriage carriage carriage carriage carriage carriage carriage carriage +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of cattle cattle cattle cattle cattle cattle cattle cattle +and and and and and and and and and and +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +of of glasses glasses glasses glasses glasses glasses glasses glasses +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the grill grill grill grill grill grill grill grill +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +the the the team team team team team team team +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the garden garden garden garden garden garden garden +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and person person person person person person person person +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the protest protest protest protest protest protest protest protest +and and and and and and and and and and +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the car car car car car car car +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +of of course course course course course course course course +of of sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +the the the beach beach beach beach beach beach beach +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +the the field field field field field field field field +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +the the the wild wild wild wild wild wild wild +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of people people people people people people people +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +a a aaaaaaaa +the the dog dog dog dog dog dog dog dog +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the truck truck truck truck truck truck truck +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the sign sign sign sign sign sign sign sign +and and and and and and and and and and +the the snow snow snow snow snow snow snow snow +and and and and and and and and and and +the the the event event event event event event event +the the sheep sheep sheep sheep sheep sheep sheep sheep +the the the river river river river river river river +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the wedding wedding wedding wedding wedding wedding wedding +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +the the grill grill grill grill grill grill grill grill +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the city city city city city city city +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +of of wine wine wine wine wine wine wine wine +in in in in in in in in in in +the the the pub pub pub pub pub pub pub +a a bench bench bench bench bench bench bench bench +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the oven oven oven oven oven oven oven oven +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the bedroom bedroom bedroom bedroom bedroom bedroom bedroom +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the dog dog dog dog dog dog dog +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +the the market market market market market market market market +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the crowd crowd crowd crowd crowd crowd crowd crowd +and and and and and and and and and and +the the the trailer trailer trailer trailer trailer trailer trailer +the the the zoo zoo zoo zoo zoo zoo zoo +the the crowd crowd crowd crowd crowd crowd crowd crowd +and and and and and and and and and and +and and and and and and and and and and +the the table table table table table table table table +and and and and and and and and and and +the the the park park park park park park park +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +of of of of of of of of of of +and and and and and and and and and and +and and and and and and and and and and +the the summit summit summit summit summit summit summit summit +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the train train train train train train train train +the the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in love love love love love love love love +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bears bears bears bears bears bears bears bears +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +a a carriage carriage carriage carriage carriage carriage carriage carriage +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the street street street street street street street street +and and and and and and and and and and +and and person person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and actor actor actor actor actor actor actor actor +and and and and and and and and and and +the the the car car car car car car car +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the grill grill grill grill grill grill grill grill +and and and and and and and and and and +in in in in in in in in in in +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the music music music music music music music +the the street street street street street street street street +and and and and and and and and and and +and and and and and and and and and and +the the show show show show show show show show +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +the the market market market market market market market market +and and and and and and and and and and +the the the truck truck truck truck truck truck truck +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the ranch ranch ranch ranch ranch ranch ranch +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +and and and and and and and and and and +the the table table table table table table table table +the the the summit summit summit summit summit summit summit +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +the the sign sign sign sign sign sign sign sign +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +the the clock clock clock clock clock clock clock clock +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the canal canal canal canal canal canal canal +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +the the the team team team team team team team +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +the the the party party party party party party party +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the garden garden garden garden garden garden garden +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +and and and and and and and and and and +of of course course course course course course course course +and and chairs chairs chairs chairs chairs chairs chairs chairs +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +of of sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and person person person person person person person person +and and and and and and and and and and +the the the baby baby baby baby baby baby baby +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the city city city city city city city city +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the event event event event event event event +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +a a boat boat boat boat boat boat boat boat +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the flowers flowers flowers flowers flowers flowers flowers flowers +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the bedroom bedroom bedroom bedroom bedroom bedroom bedroom +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in ski ski ski ski ski +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +the the the bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the tomatoes tomatoes tomatoes tomatoes tomatoes tomatoes tomatoes +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the farm farm farm farm farm farm farm farm +and and and and and and and and and and +the the barn barn barn barn barn barn barn barn +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the forest forest forest forest forest forest forest +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the clock clock clock clock clock clock clock clock +and and and and and and and and and and +and and and and and and and and and and +the the the game game game game game game game +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the boat boat boat boat boat boat boat boat +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the wedding wedding wedding wedding wedding wedding wedding wedding +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the ship ship ship ship ship ship ship +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of glasses glasses glasses glasses glasses glasses glasses glasses +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +and and and and and and and and and and +the the grill grill grill grill grill grill grill grill +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +the the the team team team team team team team +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and person person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +a a banana banana banana banana banana banana banana banana +and and and and and and and and and and +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +of of course course course course course course course course +of of sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the road road road road road road road +the the the ship ship ship ship ship ship ship +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the meeting meeting meeting meeting meeting meeting meeting meeting +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +the the field field field field field field field field +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +the the the field field field field field field field +the the the wild wild wild wild wild wild wild +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in love love love love love love love love +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of people people people people people people people +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +a a aaaaaaaa +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the event event event event event event event +the the sheep sheep sheep sheep sheep sheep sheep sheep +the the the river river river river river river river +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the wedding wedding wedding wedding wedding wedding wedding +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +the the grill grill grill grill grill grill grill grill +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +of of wine wine wine wine wine wine wine wine +the the the zoo zoo zoo zoo zoo zoo zoo +the the the pub pub pub pub pub pub pub +a a bench bench bench bench bench bench bench bench +and and and and and and and and and and +of of flowers flowers flowers flowers flowers flowers flowers flowers +and and and and and and and and and and +the the oven oven oven oven oven oven oven oven +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the bedroom bedroom bedroom bedroom bedroom bedroom bedroom +of of of course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the bathroom bathroom bathroom bathroom bathroom bathroom bathroom +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the dog dog dog dog dog dog dog +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +the the the bathroom bathroom bathroom bathroom bathroom bathroom bathroom +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the table table table table table table table table +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +of of of of of of of of of of +and and and and and and and and and and +and and and and and and and and and and +the the summit summit summit summit summit summit summit summit +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the train train train train train train train train +the the bottle bottle bottle bottle bottle bottle bottle bottle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the farm farm farm farm farm farm farm +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +a a carriage carriage carriage carriage carriage carriage carriage carriage +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the street street street street street street street street +and and and and and and and and and and +the the the truck truck truck truck truck truck truck +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bathroom bathroom bathroom bathroom bathroom bathroom bathroom bathroom +of of sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the car car car car car car car +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the grill grill grill grill grill grill grill grill +and and and and and and and and and and +and and and and and and and and and and +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of of of of of of of of +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the street street street street street street street street +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the market market market market market market market market +and and and and and and and and and and +the the the truck truck truck truck truck truck truck +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +the the cattle cattle cattle cattle cattle cattle cattle cattle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the ranch ranch ranch ranch ranch ranch ranch +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +and and and and and and and and and and +the the table table table table table table table table +the the the summit summit summit summit summit summit summit +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the meeting meeting meeting meeting meeting meeting meeting meeting +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +the the clock clock clock clock clock clock clock clock +and and and and and and and and and and +and and and and and and and and and and +the the the gym gym gym gym gym gym gym +and and and and and and and and and and +the the the canal canal canal canal canal canal canal +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +the the the team team team team team team team +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the night night night night night night night +the the the calculator calculator calculator calculator calculator calculator calculator +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and chairs chairs chairs chairs chairs chairs chairs chairs +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and person person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the city city city city city city city city +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the event event event event event event event +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and person person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +a a boat boat boat boat boat boat boat boat +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the flowers flowers flowers flowers flowers flowers flowers flowers +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in ski ski ski ski ski +the the zoo zoo zoo zoo zoo zoo zoo zoo +the the table table table table table table table table +the the zoo zoo zoo zoo zoo zoo zoo zoo +the the the bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the tomatoes tomatoes tomatoes tomatoes tomatoes tomatoes tomatoes +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the farm farm farm farm farm farm farm farm +and and and and and and and and and and +the the barn barn barn barn barn barn barn barn +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the clock clock clock clock clock clock clock clock +and and and and and and and and and and +and and and and and and and and and and +the the the game game game game game game game +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the wedding wedding wedding wedding wedding wedding wedding wedding +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the ship ship ship ship ship ship ship +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the forest forest forest forest forest forest forest +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the park park park park park park park +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of glasses glasses glasses glasses glasses glasses glasses glasses +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +and and and and and and and and and and +the the grill grill grill grill grill grill grill grill +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the team team team team team team team +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the event event event event event event event +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the horse horse horse horse horse horse and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +of of sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the road road road road road road road +the the the ship ship ship ship ship ship ship +and and and and and and and and and and +of of cattle cattle cattle cattle cattle cattle cattle cattle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the meeting meeting meeting meeting meeting meeting meeting meeting +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the field field field field field field field field +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the shop shop shop shop shop shop shop +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the field field field field field field field +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in love love love love love love love love +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +a a aaaaaaaa +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +the the the event event event event event event event +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +the the grill grill grill grill grill grill grill grill +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the table table table table table table table +of of wine wine wine wine wine wine wine wine +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of flowers flowers flowers flowers flowers flowers flowers flowers +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +the the the bathroom bathroom bathroom bathroom bathroom bathroom bathroom +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the wedding wedding wedding wedding wedding wedding wedding +and and and and and and and and and and +the the the bathroom bathroom bathroom bathroom bathroom bathroom bathroom +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the table table table table table table table table +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +of of of of of of of of of of +and and and and and and and and and and +and and and and and and and and and and +the the summit summit summit summit summit summit summit summit +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the tree tree tree tree tree tree tree tree +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bottle bottle bottle bottle bottle bottle bottle bottle +the the beach beach beach beach beach beach beach beach +the the the game game game game game game game +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +the the the farm farm farm farm farm farm farm +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the shop shop shop shop shop shop shop +of of of course course course course course course course +the the pizza pizza pizza pizza pizza pizza pizza pizza +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the farm farm farm farm farm farm farm +the the classroom classroom classroom classroom classroom classroom classroom classroom +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +in in in in in in in in in in +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the truck truck truck truck truck truck truck +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and person person person person person person person +the the bathroom bathroom bathroom bathroom bathroom bathroom bathroom bathroom +of of sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the grill grill grill grill grill grill grill grill +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of of of of of of of of +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the event event event event event event event +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the truck truck truck truck truck truck truck +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +the the cattle cattle cattle cattle cattle cattle cattle cattle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and person person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +the the the cake cake cake cake cake cake cake +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of bicycles bicycles bicycles bicycles bicycles bicycles bicycles bicycles +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the wedding wedding wedding wedding wedding wedding wedding wedding +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the window window window window window window window window +the the meeting meeting meeting meeting meeting meeting meeting meeting +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the clock clock clock clock clock clock clock clock +and and and and and and and and and and +and and and and and and and and and and +the the the gym gym gym gym gym gym gym +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +the the the night night night night night night night +the the the calculator calculator calculator calculator calculator calculator calculator +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +the the clock clock clock clock clock clock clock clock +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +the the crowd crowd crowd crowd crowd crowd crowd crowd +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +the the oven oven oven oven oven oven oven oven +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the deck deck deck deck deck deck deck deck +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and person person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +the the the event event event event event event event +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and person person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the street street street street street street street street +of of of people people people people people people people +and and and person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the dog dog dog dog dog dog dog dog +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the flag flag flag flag flag flag flag +the the flowers flowers flowers flowers flowers flowers flowers flowers +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +of of of of of of of of of of +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the table table table table table table table table +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the show show show show show show show +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the oven oven oven oven oven oven oven oven +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the grill grill grill grill grill grill grill +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the laptop laptop laptop laptop laptop laptop laptop laptop +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the bed bed bed bed bed bed bed +and and and and and and and and and and +and and and and and and and and and and +the the the game game game game game game game +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the ship ship ship ship ship ship ship +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the forest forest forest forest forest forest forest +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the park park park park park park park +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and helmet helmet helmet helmet helmet helmet helmet helmet +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the team team team team team team team +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +the the the event event event event event event event +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the horse horse horse horse horse horse and and +and and and and and and and and and and +and and and and and and and and and and +the the table table table table table table table table +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the cake cake cake cake cake cake cake cake +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +of of sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the road road road road road road road +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +of of cattle cattle cattle cattle cattle cattle cattle cattle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the meeting meeting meeting meeting meeting meeting meeting meeting +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the shop shop shop shop shop shop shop +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the wild wild wild wild wild wild wild wild +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the table table table table table table table +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of flowers flowers flowers flowers flowers flowers flowers flowers +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +the the the wedding wedding wedding wedding wedding wedding wedding +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the crowd crowd crowd crowd crowd crowd crowd crowd +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the table table table table table table table table +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +the the the oven oven oven oven oven oven oven +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the tree tree tree tree tree tree tree tree +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +the the beach beach beach beach beach beach beach beach +the the the game game game game game game game +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +the the the trailer trailer trailer trailer trailer trailer trailer +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the fruit fruit fruit fruit fruit fruit fruit +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +the the the farm farm farm farm farm farm farm +and and and and and and and and and and +of of course course course course course course course course +the the the snow snow snow snow snow snow snow +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the shop shop shop shop shop shop shop +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of buses buses buses buses buses buses buses buses +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the farm farm farm farm farm farm farm +the the classroom classroom classroom classroom classroom classroom classroom classroom +and and and and and and and and and and +and and and and and and and and and and +the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +in in in in in in in in in in +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +the the the clock clock clock clock clock clock clock +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and person person person person person person person +the the zoo zoo zoo zoo zoo zoo zoo zoo +of of sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the grill grill grill grill grill grill grill grill +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of of of of of of of of +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the event event event event event event event +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and chairs chairs chairs chairs chairs chairs chairs chairs +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the rain rain rain rain rain rain rain rain +and and and and and and and and and and +the the cattle cattle cattle cattle cattle cattle cattle cattle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and person person person person person person person person +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +the the the game game game game game game game +the the pizza pizza pizza pizza pizza pizza pizza pizza +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +the the the garden garden garden garden garden garden garden +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the wedding wedding wedding wedding wedding wedding wedding wedding +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the window window window window window window window window +the the meeting meeting meeting meeting meeting meeting meeting meeting +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the snow snow snow snow snow snow snow snow +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +the the the calculator calculator calculator calculator calculator calculator calculator +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the clock clock clock clock clock clock clock clock +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +of of cattle cattle cattle cattle cattle cattle cattle cattle +the the crowd crowd crowd crowd crowd crowd crowd crowd +the the kitchen kitchen kitchen kitchen kitchen kitchen kitchen kitchen +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +the the oven oven oven oven oven oven oven oven +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the deck deck deck deck deck deck deck deck +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +the the the event event event event event event event +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and person person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +the the street street street street street street street street +of of of people people people people people people people +and and and person person person person person person person +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the dog dog dog dog dog dog dog dog +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the flag flag flag flag flag flag flag +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the boat boat boat boat boat boat boat +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the grill grill grill grill grill grill grill grill +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +of of of of of of of of of of +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the table table table table table table table table +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the show show show show show show show +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +the the the garden garden garden garden garden garden garden +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +the the the tomatoes tomatoes tomatoes tomatoes tomatoes tomatoes tomatoes +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +the the oven oven oven oven oven oven oven oven +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the grill grill grill grill grill grill grill +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the laptop laptop laptop laptop laptop laptop laptop laptop +the the pasture pasture pasture pasture pasture pasture pasture pasture +the the the temple temple temple temple temple temple temple +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the bed bed bed bed bed bed bed +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bus bus bus bus bus bus bus bus +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the forest forest forest forest forest forest forest +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the farm farm farm farm farm farm farm +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the clock clock clock clock clock clock clock clock +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of bananas bananas bananas bananas bananas bananas bananas bananas +and and and and and and and and and and +and and and and and and and and and and +the the clock clock clock clock clock clock clock clock +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +the the sheep sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the office office office office office office office office +of of course course course course course course course course +and and and and and and and and and and +the the the field field field field field field field +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and white white white white white white white white +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +and and and and and and and and and and +and and and and and and and and and and +the the the event event event event event event event +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the world world world world world world world +the the table table table table table table table table +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +the the cake cake cake cake cake cake cake cake +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the city city city city city city city +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of cattle cattle cattle cattle cattle cattle cattle cattle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the meeting meeting meeting meeting meeting meeting meeting meeting +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the truck truck truck truck truck truck truck +and and and and and and and and and and +the the the shop shop shop shop shop shop shop +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the wild wild wild wild wild wild wild wild +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the bathroom bathroom bathroom bathroom bathroom bathroom bathroom bathroom +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of buses buses buses buses buses buses buses buses +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +in in in in in in in in in in +and and and and and and and and and and +the the the zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the table table table table table table table +in in in in in in in in in in +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of flowers flowers flowers flowers flowers flowers flowers flowers +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +the the zoo zoo zoo zoo zoo zoo zoo zoo +and and and and and and and and and and +and and and and and and and and and and +the the clock clock clock clock clock clock clock clock +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the pizza pizza pizza pizza pizza pizza pizza pizza +the the the truck truck truck truck truck truck truck +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the beach beach beach beach beach beach beach beach +the the the street street street street street street street +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of of course course course course course course course +and and and and and and and and and and +the the dog dog dog dog dog dog dog dog +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the crowd crowd crowd crowd crowd crowd crowd crowd +and and and and and and and and and and +and and and and and and and and and and +the the the cake cake cake cake cake cake cake +and and and and and and and and and and +and and and and and and and and and and +the the the motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle motorcycle +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the sheep sheep sheep sheep sheep sheep sheep +and and and and and and and and and and +the the the oven oven oven oven oven oven oven +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +of of course course course course course course course course +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +the the the pizza pizza pizza pizza pizza pizza pizza +the the pizza pizza pizza pizza pizza pizza pizza pizza +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and +and and and and and and and and and and \ No newline at end of file diff --git a/refTools/evaluation/tokenizer/tmp82iqkuu0 b/refTools/evaluation/tokenizer/tmp82iqkuu0 new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/refTools/evaluation/tokenizer/tmpn19wmqte b/refTools/evaluation/tokenizer/tmpn19wmqte new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/refTools/refer_python3.py b/refTools/refer_python3.py new file mode 100644 index 0000000000000000000000000000000000000000..0f4f816ee55c14a66352d56d563e08b9270ab383 --- /dev/null +++ b/refTools/refer_python3.py @@ -0,0 +1,252 @@ +__author__ = 'licheng' + +""" +This interface provides access to four datasets: +1) refclef +2) refcoco +3) refcoco+ +4) refcocog +split by unc and google + +The following API functions are defined: +REFER - REFER api class +getRefIds - get ref ids that satisfy given filter conditions. +getAnnIds - get ann ids that satisfy given filter conditions. +getImgIds - get image ids that satisfy given filter conditions. +getCatIds - get category ids that satisfy given filter conditions. +loadRefs - load refs with the specified ref ids. +loadAnns - load anns with the specified ann ids. +loadImgs - load images with the specified image ids. +loadCats - load category names with the specified category ids. +getRefBox - get ref's bounding box [x, y, w, h] given the ref_id +""" + +import sys +import os.path as osp +import json +import _pickle as pickle +import time +import itertools +import skimage.io as io +import matplotlib.pyplot as plt +from matplotlib.collections import PatchCollection +from matplotlib.patches import Polygon, Rectangle +from pprint import pprint +import numpy as np +# import cv2 +# from skimage.measure import label, regionprops + +class REFER: + + def __init__(self, data_root, dataset='refcoco', splitBy='unc'): + # provide data_root folder which contains refclef, refcoco, refcoco+ and refcocog + # also provide dataset name and splitBy information + # e.g., dataset = 'refcoco', splitBy = 'unc' + print('loading dataset %s into memory...' % dataset) + self.ROOT_DIR = osp.abspath(osp.dirname(__file__)) + self.DATA_DIR = osp.join(data_root, dataset) + if dataset in ['refcoco', 'refcoco+', 'refcocog']: + self.IMAGE_DIR = osp.join(data_root, 'images/mscoco/images/train2014') + elif dataset == 'refclef': + self.IMAGE_DIR = osp.join(data_root, 'images/saiapr_tc-12') + else: + print('No refer dataset is called [%s]' % dataset) + sys.exit() + + # load refs from data/dataset/refs(dataset).json + tic = time.time() + ref_file = osp.join(self.DATA_DIR, 'refs('+splitBy+').p') + self.data = {} + self.data['dataset'] = dataset + self.data['refs'] = pickle.load(open(ref_file, 'rb')) + + # load annotations from data/dataset/instances.json + instances_file = osp.join(self.DATA_DIR, 'instances.json') + instances = json.load(open(instances_file, 'r')) + self.data['images'] = instances['images'] + self.data['annotations'] = instances['annotations'] + self.data['categories'] = instances['categories'] + + # create index + self.createIndex() + print('DONE (t=%.2fs)' % (time.time()-tic)) + + def createIndex(self): + # create sets of mapping + # 1) Refs: {ref_id: ref} + # 2) Anns: {ann_id: ann} + # 3) Imgs: {image_id: image} + # 4) Cats: {category_id: category_name} + # 5) Sents: {sent_id: sent} + # 6) imgToRefs: {image_id: refs} + # 7) imgToAnns: {image_id: anns} + # 8) refToAnn: {ref_id: ann} + # 9) annToRef: {ann_id: ref} + # 10) catToRefs: {category_id: refs} + # 11) sentToRef: {sent_id: ref} + # 12) sentToTokens: {sent_id: tokens} + print('creating index...') + # fetch info from instances + Anns, Imgs, Cats, imgToAnns = {}, {}, {}, {} + for ann in self.data['annotations']: + Anns[ann['id']] = ann + imgToAnns[ann['image_id']] = imgToAnns.get(ann['image_id'], []) + [ann] + for img in self.data['images']: + Imgs[img['id']] = img + for cat in self.data['categories']: + Cats[cat['id']] = cat['name'] + + # fetch info from refs + Refs, imgToRefs, refToAnn, annToRef, catToRefs = {}, {}, {}, {}, {} + Sents, sentToRef, sentToTokens = {}, {}, {} + for ref in self.data['refs']: + # ids + ref_id = ref['ref_id'] + ann_id = ref['ann_id'] + category_id = ref['category_id'] + image_id = ref['image_id'] + + # add mapping related to ref + Refs[ref_id] = ref + imgToRefs[image_id] = imgToRefs.get(image_id, []) + [ref] + catToRefs[category_id] = catToRefs.get(category_id, []) + [ref] + refToAnn[ref_id] = Anns[ann_id] + annToRef[ann_id] = ref + + # add mapping of sent + for sent in ref['sentences']: + Sents[sent['sent_id']] = sent + sentToRef[sent['sent_id']] = ref + sentToTokens[sent['sent_id']] = sent['tokens'] + + # create class members + self.Refs = Refs + self.Anns = Anns + self.Imgs = Imgs + self.Cats = Cats + self.Sents = Sents + self.imgToRefs = imgToRefs + self.imgToAnns = imgToAnns + self.refToAnn = refToAnn + self.annToRef = annToRef + self.catToRefs = catToRefs + self.sentToRef = sentToRef + self.sentToTokens = sentToTokens + print('index created.') + + def getRefIds(self, image_ids=[], cat_ids=[], ref_ids=[], split=''): + image_ids = image_ids if type(image_ids) == list else [image_ids] + cat_ids = cat_ids if type(cat_ids) == list else [cat_ids] + ref_ids = ref_ids if type(ref_ids) == list else [ref_ids] + + if len(image_ids)==len(cat_ids)==len(ref_ids)==len(split)==0: + refs = self.data['refs'] + else: + if not len(image_ids) == 0: + refs = [self.imgToRefs[image_id] for image_id in image_ids] + else: + refs = self.data['refs'] + if not len(cat_ids) == 0: + refs = [ref for ref in refs if ref['category_id'] in cat_ids] + if not len(ref_ids) == 0: + refs = [ref for ref in refs if ref['ref_id'] in ref_ids] + if not len(split) == 0: + if split in ['testA', 'testB', 'testC']: + refs = [ref for ref in refs if split[-1] in ref['split']] # we also consider testAB, testBC, ... + elif split in ['testAB', 'testBC', 'testAC']: + refs = [ref for ref in refs if ref['split'] == split] # rarely used I guess... + elif split == 'test': + refs = [ref for ref in refs if 'test' in ref['split']] + elif split == 'train' or split == 'val': + refs = [ref for ref in refs if ref['split'] == split] + else: + print('No such split [%s]' % split) + sys.exit() + ref_ids = [ref['ref_id'] for ref in refs] + return ref_ids + + def getAnnIds(self, image_ids=[], cat_ids=[], ref_ids=[]): + image_ids = image_ids if type(image_ids) == list else [image_ids] + cat_ids = cat_ids if type(cat_ids) == list else [cat_ids] + ref_ids = ref_ids if type(ref_ids) == list else [ref_ids] + + if len(image_ids) == len(cat_ids) == len(ref_ids) == 0: + ann_ids = [ann['id'] for ann in self.data['annotations']] + else: + if not len(image_ids) == 0: + lists = [self.imgToAnns[image_id] for image_id in image_ids if image_id in self.imgToAnns] # list of [anns] + anns = list(itertools.chain.from_iterable(lists)) + else: + anns = self.data['annotations'] + if not len(cat_ids) == 0: + anns = [ann for ann in anns if ann['category_id'] in cat_ids] + ann_ids = [ann['id'] for ann in anns] + if not len(ref_ids) == 0: + ids = set(ann_ids).intersection(set([self.Refs[ref_id]['ann_id'] for ref_id in ref_ids])) + return ann_ids + + def getImgIds(self, ref_ids=[]): + ref_ids = ref_ids if type(ref_ids) == list else [ref_ids] + + if not len(ref_ids) == 0: + image_ids = list(set([self.Refs[ref_id]['image_id'] for ref_id in ref_ids])) + else: + image_ids = self.Imgs.keys() + return image_ids + + def getCatIds(self): + return self.Cats.keys() + + def loadRefs(self, ref_ids=[]): + if type(ref_ids) == list: + return [self.Refs[ref_id] for ref_id in ref_ids] + elif type(ref_ids) == int: + return [self.Refs[ref_ids]] + + def loadAnns(self, ann_ids=[]): + if type(ann_ids) == list: + return [self.Anns[ann_id] for ann_id in ann_ids] + elif type(ann_ids) == int or type(ann_ids) == unicode: + return [self.Anns[ann_ids]] + + def loadImgs(self, image_ids=[]): + if type(image_ids) == list: + return [self.Imgs[image_id] for image_id in image_ids] + elif type(image_ids) == int: + return [self.Imgs[image_ids]] + + def loadCats(self, cat_ids=[]): + if type(cat_ids) == list: + return [self.Cats[cat_id] for cat_id in cat_ids] + elif type(cat_ids) == int: + return [self.Cats[cat_ids]] + + def getRefBox(self, ref_id): + ref = self.Refs[ref_id] + ann = self.refToAnn[ref_id] + return ann['bbox'] # [x, y, w, h] + + + +if __name__ == '__main__': + refer = REFER(dataset='refcocog', splitBy='google') + ref_ids = refer.getRefIds() + print(len(ref_ids)) + + print(len(refer.Imgs)) + print(len(refer.imgToRefs)) + + ref_ids = refer.getRefIds(split='train') + print('There are %s training referred objects.' % len(ref_ids)) + + for ref_id in ref_ids: + ref = refer.loadRefs(ref_id)[0] + if len(ref['sentences']) < 2: + continue + + pprint(ref) + print('The label is %s.' % refer.Cats[ref['category_id']]) + plt.figure() + refer.showRef(ref, seg_box='box') + plt.show() + diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..99718e104d9d882d6cea8b9ef052882d2cca65a1 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,38 @@ +# accelerate==0.11.0 +apex==0.9.10.dev0 +av==10.0.0 +decord==0.6.0 +deepspeed==0.8.2 +dotmap==1.3.30 +einops==0.6.0 +fairscale==0.4.13 +ftfy==6.1.1 +h5py==3.7.0 +matplotlib==3.5.3 +numpy==1.23.1 +opencv_python==4.6.0.66 +Pillow==9.4.0 +python_hostlist==1.23.0 +regex==2022.7.9 +scikit_image==0.19.3 +scikit_learn==1.2.2 +scipy==1.9.1 +sentencepiece==0.1.97 +setuptools==59.6.0 +skimage==0.0 +slowfast==1.0 +submitit==1.4.5 +tensorflow==2.12.0 +timm==0.6.12 +torch +torchaudio +torchvision +transformers +# torch==1.13.1+cu116 +# torchaudio==0.13.1+cu116 +torchlibrosa==0.1.0 +torchtyping==0.1.4 +# torchvision==0.14.1+cu116 +tqdm==4.64.0 +# transformers==4.24.0 +ruamel.yaml \ No newline at end of file diff --git a/run_scripts/accelerate_training/audio/ePALM_audio_caption.sh b/run_scripts/accelerate_training/audio/ePALM_audio_caption.sh new file mode 100644 index 0000000000000000000000000000000000000000..cf673c8bc841148a4cedd373661c2483f4a84bfe --- /dev/null +++ b/run_scripts/accelerate_training/audio/ePALM_audio_caption.sh @@ -0,0 +1,17 @@ + +# visopt_audio_audiocaps_caption_reptok_6clst_l19_31_nocache_noeostk_ptl10_ptlr1e5fixed_ast_bs8_longt_lessmask_opt_2_7b +config=./configs/audio/ePALM_audio_caption.yaml +data_dir=data/audiocaps +output_dir=logs/epalm/ePALM_audio_caption + +accelerate launch --multi_gpu --mixed_precision=fp16 --num_processes=4 --num_machines=1 accelerate_training/audio_caption.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best \ +--text_model facebook/opt-2.7b \ +--low_cpu + +# --evaluate --checkpoint $WORK/logs/epalm/ePALM_audio_caption/checkpoint_best.pth + + diff --git a/run_scripts/accelerate_training/image/ePALM_caption.sh b/run_scripts/accelerate_training/image/ePALM_caption.sh new file mode 100644 index 0000000000000000000000000000000000000000..e22e7331be9c786ebb52f00876841df356faed20 --- /dev/null +++ b/run_scripts/accelerate_training/image/ePALM_caption.sh @@ -0,0 +1,17 @@ +# visopt_caption_reptok_6clst_l19_31_nocache_noeostk_ptl10_ptlr1e5fixed_opt_2_7b + +config=./configs/image/ePALM_caption.yaml +data_dir=data/vl_adapter/vlt5_dataset +output_dir=logs/epalm/ePALM_caption + +accelerate launch --multi_gpu --mixed_precision=fp16 --num_processes=4 --num_machines=1 accelerate_training/caption.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best \ +--text_model facebook/opt-2.7b \ +--low_cpu +# --evaluate --checkpoint $WORK/logs/epalm/ePALM_caption/checkpoint_best.pth +# --resume --checkpoint $WORK/logs/epalm/ePALM_caption/checkpoint_last.pth + + diff --git a/run_scripts/accelerate_training/image/ePALM_gqa.sh b/run_scripts/accelerate_training/image/ePALM_gqa.sh new file mode 100644 index 0000000000000000000000000000000000000000..c7b3fbe518e313371474b80aa97b77780cb0cca4 --- /dev/null +++ b/run_scripts/accelerate_training/image/ePALM_gqa.sh @@ -0,0 +1,17 @@ +# visopt_gqa_vlt5_stok_6clst_l19_31_reptok_nocache_ptl10_ptlr1e5fixed_opt_2_7b +config=./configs/image/ePALM_gqa.yaml +data_dir=data/vl_adapter/vlt5_dataset +output_dir=logs/epalm/ePALM_gqa + +accelerate launch --multi_gpu --mixed_precision=fp16 --num_processes=4 --num_machines=1 accelerate_training/gqa.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best \ +--text_model facebook/opt-2.7b \ +--low_cpu +# --evaluate --checkpoint $WORK/logs/epalm/ePALM_gqa/checkpoint_best.pth + + +# --resume --checkpoint $WORK/logs/epalm/ePALM_gqa/checkpoint_last.pth + diff --git a/run_scripts/accelerate_training/image/ePALM_pt_L_caption.sh b/run_scripts/accelerate_training/image/ePALM_pt_L_caption.sh new file mode 100644 index 0000000000000000000000000000000000000000..b98fff25654ef932e58fab4d13e8d0590ba48885 --- /dev/null +++ b/run_scripts/accelerate_training/image/ePALM_pt_L_caption.sh @@ -0,0 +1,19 @@ +# visopt_caption_reptok_6clst_l19_31_nocache_noeostk_ptl10_ptlr1e3fixed_sharedcon_nomlp_opt_6_7b_64ep +config=./configs/image/ePALM_pt_L_caption.yaml +data_dir=data/vl_adapter/vlt5_dataset +output_dir=logs/epalm/ePALM_pt_L_caption + +export LC_ALL=C + +accelerate launch --multi_gpu --mixed_precision=fp16 --num_processes=8 --num_machines=1 accelerate_training/caption.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best \ +--text_model facebook/opt-6.7b \ +--vision_model vit_large_patch16_224 \ +--low_cpu +# --resume --checkpoint $WORK/logs/visopt/ePALM_pt_L_caption/checkpoint_last.pth + +# --evaluate --checkpoint $WORK/logs/visopt/ePALM_pt_L_caption/checkpoint_best.pth + diff --git a/run_scripts/accelerate_training/image/ePALM_pt_L_gqa.sh b/run_scripts/accelerate_training/image/ePALM_pt_L_gqa.sh new file mode 100644 index 0000000000000000000000000000000000000000..7072d685db352b20eaf6291489b0781c98ad51ae --- /dev/null +++ b/run_scripts/accelerate_training/image/ePALM_pt_L_gqa.sh @@ -0,0 +1,17 @@ + +config=./configs/image/ePALM_pt_L_gqa.yaml +data_dir=data/vl_adapter/vlt5_dataset +output_dir=logs/epalm/ePALM_pt_L_gqa + +accelerate launch --multi_gpu --mixed_precision=fp16 --num_processes=8 --num_machines=1 accelerate_training/gqa.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best\ +--text_model facebook/opt-6.7b \ +--vision_model vit_large_patch16_224 \ +--low_cpu +# --resume --checkpoint $WORK/logs/epalm/ePALM_pt_L_gqa/checkpoint_last.pth + + +# --evaluate --checkpoint $WORK/logs/epalm/ePALM_pt_L_gqa/checkpoint_best.pth diff --git a/run_scripts/accelerate_training/image/ePALM_pt_L_vqa.sh b/run_scripts/accelerate_training/image/ePALM_pt_L_vqa.sh new file mode 100644 index 0000000000000000000000000000000000000000..5d2f9cd99b74d1e2300769d9fd6d34e8a1978247 --- /dev/null +++ b/run_scripts/accelerate_training/image/ePALM_pt_L_vqa.sh @@ -0,0 +1,21 @@ +# visopt_vqa_vlt5_stok_6clst_l19_31_reptok_nocache_ptl10_ptlr1e3fixed_nomlp_sharedcon_opt_6_7b_vitl_stdsplit_ep64 + +config=./configs/image/ePALM_pt_L_vqa.yaml +data_dir=data/vl_adapter/vlt5_dataset +output_dir=logs/epalm/ePALM_pt_L_vqa + +accelerate launch --multi_gpu --mixed_precision=fp16 --num_processes=8 --num_machines=1 accelerate_training/vqa.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best \ +--text_model facebook/opt-6.7b --vision_model vit_large_patch16_224 \ +--low_cpu +# --resume --checkpoint $WORK/logs/epalm/ePALM_pt_L_vqa/checkpoint_last.pth + +# --checkpoint /data/mshukor/logs/epalm/ePALM_pt_L_vqa/checkpoint_last.pth --evaluate --open_ended_eval \ + + + + + diff --git a/run_scripts/accelerate_training/image/ePALM_vqa.sh b/run_scripts/accelerate_training/image/ePALM_vqa.sh new file mode 100644 index 0000000000000000000000000000000000000000..a171cd39a45f4fee9f10528bc2519a4f3a89f4e1 --- /dev/null +++ b/run_scripts/accelerate_training/image/ePALM_vqa.sh @@ -0,0 +1,16 @@ + +# visopt_vqa_vlt5_stok_6clst_l19_31_reptok_nocache_ptl10_ptlr1e5fixed_opt_2_7b + +config=./configs/image/ePALM_vqa.yaml +data_dir=data/vl_adapter/vlt5_dataset +output_dir=logs/epalm/ePALM_vqa + + +accelerate launch --multi_gpu --mixed_precision=fp16 --num_processes=4 --num_machines=1 accelerate_training/vqa.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best \ +--text_model facebook/opt-2.7b \ +--low_cpu +# --evaluate --checkpoint $WORK/logs/epalm/ePALM_vqa/checkpoint_last.pth \ diff --git a/run_scripts/accelerate_training/video/ePALM_video_caption_msrvtt.sh b/run_scripts/accelerate_training/video/ePALM_video_caption_msrvtt.sh new file mode 100644 index 0000000000000000000000000000000000000000..7b4ea05f9f56199e5ea8502eaac450a26c43445d --- /dev/null +++ b/run_scripts/accelerate_training/video/ePALM_video_caption_msrvtt.sh @@ -0,0 +1,16 @@ +# visopt_video_msrvtt_caption_reptok_6clst_l19_31_nocache_noeostk_ptl10_ptlr1e5fixed_tformer_16f_longt_opt_2_7b +config=./configs/video/ePALM_video_caption_msrvtt.yaml +data_dir=data/MSRVTT +output_dir=logs/epalm/ePALM_video_caption_msrvtt + +accelerate launch --multi_gpu --mixed_precision=fp16 --num_processes=4 --num_machines=1 accelerate_training/video_caption.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best \ +--text_model facebook/opt-2.7b \ +--low_cpu +# --resume --checkpoint $WORK/logs/epalm/ePALM_video_caption_msrvtt/checkpoint_last.pth + +# --evaluate --checkpoint $WORK/logs/epalm/ePALM_video_caption_msrvtt/checkpoint_best.pth + diff --git a/run_scripts/accelerate_training/video/ePALM_video_qa_msrvtt.sh b/run_scripts/accelerate_training/video/ePALM_video_qa_msrvtt.sh new file mode 100644 index 0000000000000000000000000000000000000000..3e70695b1f4dddda7a8628e2ed32ca11c72e2b27 --- /dev/null +++ b/run_scripts/accelerate_training/video/ePALM_video_qa_msrvtt.sh @@ -0,0 +1,15 @@ +# visopt_video_msrvtt_vqa_reptok_6clst_l19_31_nocache_noeostk_ptl10_ptlr1e5fixed_tformer_8f_bs8_longt_opt_2_7b +config=./configs/video/ePALM_video_qa_msrvtt.yaml +data_dir=data/MSRVTT +output_dir=logs/epalm/ePALM_video_qa_msrvtt + +accelerate launch --multi_gpu --mixed_precision=fp16 --num_processes=4 --num_machines=1 accelerate_training/video_vqa.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best \ +--text_model facebook/opt-2.7b \ +--low_cpu +# --resume --checkpoint $WORK/logs/epalm/ePALM_video_qa_msrvtt/checkpoint_last.pth + + diff --git a/run_scripts/accelerate_training/video/ePALM_video_qa_msvd.sh b/run_scripts/accelerate_training/video/ePALM_video_qa_msvd.sh new file mode 100644 index 0000000000000000000000000000000000000000..d86286f8abe812353a2261a330d177ab269a75ff --- /dev/null +++ b/run_scripts/accelerate_training/video/ePALM_video_qa_msvd.sh @@ -0,0 +1,16 @@ + +# visopt_video_msvd_vqa_reptok_6clst_l19_31_nocache_noeostk_ptl10_ptlr1e5fixed_tformer_8f_bs8_longt_opt_2_7b +config=./configs/video/ePALM_video_qa_msvd.yaml +data_dir=data/MSVD +output_dir=logs/epalm/ePALM_video_qa_msvd + +accelerate launch --multi_gpu --mixed_precision=fp16 --num_processes=4 --num_machines=1 accelerate_training/video_vqa.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best \ +--text_model facebook/opt-2.7b \ +--low_cpu +# --resume --checkpoint $WORK/logs/epalm/ePALM_video_qa_msvd/checkpoint_last.pth + +# --checkpoint $WORK/logs/epalm/ePALM_video_qa_msvd/checkpoint_best.pth --evaluate diff --git a/run_scripts/float32/audio/ePALM_audio_caption.sh b/run_scripts/float32/audio/ePALM_audio_caption.sh new file mode 100644 index 0000000000000000000000000000000000000000..8759bcdf6afb71dc9d19355b4a0b0a795cba2f91 --- /dev/null +++ b/run_scripts/float32/audio/ePALM_audio_caption.sh @@ -0,0 +1,16 @@ + +# visopt_audio_audiocaps_caption_reptok_6clst_l19_31_nocache_noeostk_ptl10_ptlr1e5fixed_ast_bs8_longt_lessmask_opt_2_7b +config=./configs/audio/ePALM_audio_caption.yaml +data_dir=data/audiocaps +output_dir=logs/epalm/ePALM_audio_caption + +torchrun --nproc_per_node=4 --master_addr="localhost" --master_port=12325 float32/audio_caption.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best \ +--text_model facebook/opt-2.7b \ + +# --evaluate --checkpoint $WORK/logs/epalm/ePALM_audio_caption/checkpoint_best.pth + + diff --git a/run_scripts/float32/image/ePALM_caption.sh b/run_scripts/float32/image/ePALM_caption.sh new file mode 100644 index 0000000000000000000000000000000000000000..bddae2b3820a6353011375cf9ec1e3f1bef55408 --- /dev/null +++ b/run_scripts/float32/image/ePALM_caption.sh @@ -0,0 +1,18 @@ +# visopt_caption_reptok_6clst_l19_31_nocache_noeostk_ptl10_ptlr1e5fixed_opt_2_7b + +config=./configs/image/ePALM_caption.yaml +data_dir=data/vl_adapter/vlt5_dataset +output_dir=logs/epalm/ePALM_caption + + +torchrun --nproc_per_node=1 --master_addr="localhost" --master_port=12326 float32/caption.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best \ +--text_model facebook/opt-2.7b \ + +# --evaluate --checkpoint $WORK/logs/epalm/ePALM_caption/checkpoint_best.pth +# --resume --checkpoint $WORK/logs/epalm/ePALM_caption/checkpoint_last.pth + + diff --git a/run_scripts/float32/image/ePALM_gqa.sh b/run_scripts/float32/image/ePALM_gqa.sh new file mode 100644 index 0000000000000000000000000000000000000000..db8a1ca9a06a85f28a086e1376a5ab8a56ea780c --- /dev/null +++ b/run_scripts/float32/image/ePALM_gqa.sh @@ -0,0 +1,17 @@ +# visopt_gqa_vlt5_stok_6clst_l19_31_reptok_nocache_ptl10_ptlr1e5fixed_opt_2_7b +config=./configs/image/ePALM_gqa.yaml +data_dir=data/vl_adapter/vlt5_dataset +output_dir=logs/epalm/ePALM_gqa + +torchrun --nproc_per_node=4 --master_addr="localhost" --master_port=12303 float32/gqa.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best \ +--text_model facebook/opt-2.7b \ + +# --evaluate --checkpoint $WORK/logs/epalm/ePALM_gqa/checkpoint_best.pth + + +# --resume --checkpoint $WORK/logs/epalm/ePALM_gqa/checkpoint_last.pth + diff --git a/run_scripts/float32/image/ePALM_pt_L_caption.sh b/run_scripts/float32/image/ePALM_pt_L_caption.sh new file mode 100644 index 0000000000000000000000000000000000000000..fa71d6e4938030881d9b8a559eb44d488f109768 --- /dev/null +++ b/run_scripts/float32/image/ePALM_pt_L_caption.sh @@ -0,0 +1,18 @@ +# visopt_caption_reptok_6clst_l19_31_nocache_noeostk_ptl10_ptlr1e3fixed_sharedcon_nomlp_opt_6_7b_64ep +config=./configs/image/ePALM_pt_L_caption.yaml +data_dir=data/vl_adapter/vlt5_dataset +output_dir=logs/epalm/ePALM_pt_L_caption + + +torchrun --nproc_per_node=8 --master_addr="localhost" --master_port=12326 float32/caption.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best \ +--text_model facebook/opt-6.7b \ +--vision_model vit_large_patch16_224 + +# --resume --checkpoint $WORK/logs/visopt/ePALM_pt_L_caption/checkpoint_last.pth + +# --evaluate --checkpoint $WORK/logs/visopt/ePALM_pt_L_caption/checkpoint_best.pth + diff --git a/run_scripts/float32/image/ePALM_pt_L_gqa.sh b/run_scripts/float32/image/ePALM_pt_L_gqa.sh new file mode 100644 index 0000000000000000000000000000000000000000..7e0a4df853a523640a2c337cce64cedd8643d692 --- /dev/null +++ b/run_scripts/float32/image/ePALM_pt_L_gqa.sh @@ -0,0 +1,19 @@ + +# visopt_gqa_vlt5_stok_6clst_l19_31_reptok_nocache_ptl10_ptlr1e3fixed_nomlp_sharedcon_opt_6_7b_vitl_ep64 + +config=./configs/image/ePALM_pt_L_gqa.yaml +data_dir=data/vl_adapter/vlt5_dataset +output_dir=logs/epalm/ePALM_pt_L_gqa + +torchrun --nproc_per_node=8 --master_addr="localhost" --master_port=12303 float32/gqa.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best\ +--text_model facebook/opt-6.7b \ +--vision_model vit_large_patch16_224 + +# --resume --checkpoint $WORK/logs/epalm/ePALM_pt_L_gqa/checkpoint_last.pth + + +# --evaluate --checkpoint $WORK/logs/epalm/ePALM_pt_L_gqa/checkpoint_best.pth diff --git a/run_scripts/float32/image/ePALM_pt_L_vqa.sh b/run_scripts/float32/image/ePALM_pt_L_vqa.sh new file mode 100644 index 0000000000000000000000000000000000000000..6229afa3b40972a22b1425dc55fced2e841d0ce9 --- /dev/null +++ b/run_scripts/float32/image/ePALM_pt_L_vqa.sh @@ -0,0 +1,22 @@ +# visopt_vqa_vlt5_stok_6clst_l19_31_reptok_nocache_ptl10_ptlr1e3fixed_nomlp_sharedcon_opt_6_7b_vitl_stdsplit_ep64 + +config=./configs/image/ePALM_pt_L_vqa.yaml +data_dir=data/vl_adapter/vlt5_dataset +output_dir=logs/epalm/ePALM_pt_L_vqa + + +torchrun --nproc_per_node=8 --master_addr="localhost" --master_port=12302 float32/vqa.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best \ +--text_model facebook/opt-6.7b --vision_model vit_large_patch16_224 \ + +# --resume --checkpoint $WORK/logs/epalm/ePALM_pt_L_vqa/checkpoint_last.pth + +# --checkpoint /data/mshukor/logs/epalm/ePALM_pt_L_vqa/checkpoint_last.pth --evaluate --open_ended_eval \ + + + + + diff --git a/run_scripts/float32/image/ePALM_vqa.sh b/run_scripts/float32/image/ePALM_vqa.sh new file mode 100644 index 0000000000000000000000000000000000000000..79196199384a9e5266382dd34373ab6e34304ecd --- /dev/null +++ b/run_scripts/float32/image/ePALM_vqa.sh @@ -0,0 +1,16 @@ + +# visopt_vqa_vlt5_stok_6clst_l19_31_reptok_nocache_ptl10_ptlr1e5fixed_opt_2_7b + +config=./configs/image/ePALM_vqa.yaml +data_dir=data/vl_adapter/vlt5_dataset +output_dir=logs/epalm/ePALM_vqa + + +torchrun --nproc_per_node=4 --master_addr="localhost" --master_port=12309 float32/vqa.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best \ +--text_model facebook/opt-2.7b \ + +# --evaluate --checkpoint $WORK/logs/epalm/ePALM_vqa/checkpoint_last.pth \ diff --git a/run_scripts/float32/video/ePALM_video_caption_msrvtt.sh b/run_scripts/float32/video/ePALM_video_caption_msrvtt.sh new file mode 100644 index 0000000000000000000000000000000000000000..6e2e9eea66a25ab04b1be9ba375686543c121a4b --- /dev/null +++ b/run_scripts/float32/video/ePALM_video_caption_msrvtt.sh @@ -0,0 +1,17 @@ +# visopt_video_msrvtt_caption_reptok_6clst_l19_31_nocache_noeostk_ptl10_ptlr1e5fixed_tformer_16f_longt_opt_2_7b +config=./configs/video/ePALM_video_caption_msrvtt.yaml +data_dir=data/MSRVTT +output_dir=logs/epalm/ePALM_video_caption_msrvtt + + +torchrun --nproc_per_node=4 --master_addr="localhost" --master_port=12325 float32/video_caption.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best \ +--text_model facebook/opt-2.7b \ + +# --resume --checkpoint $WORK/logs/epalm/ePALM_video_caption_msrvtt/checkpoint_last.pth + +# --evaluate --checkpoint $WORK/logs/epalm/ePALM_video_caption_msrvtt/checkpoint_best.pth + diff --git a/run_scripts/float32/video/ePALM_video_qa_msrvtt.sh b/run_scripts/float32/video/ePALM_video_qa_msrvtt.sh new file mode 100644 index 0000000000000000000000000000000000000000..47e68d537372aa35e1bcd77621b6673df40acd6e --- /dev/null +++ b/run_scripts/float32/video/ePALM_video_qa_msrvtt.sh @@ -0,0 +1,15 @@ +# visopt_video_msrvtt_vqa_reptok_6clst_l19_31_nocache_noeostk_ptl10_ptlr1e5fixed_tformer_8f_bs8_longt_opt_2_7b +config=./configs/video/ePALM_video_qa_msrvtt.yaml +data_dir=data/MSRVTT +output_dir=logs/epalm/ePALM_video_qa_msrvtt + +torchrun --nproc_per_node=4 --master_addr="localhost" --master_port=12325 float32/video_vqa.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best \ +--text_model facebook/opt-2.7b \ + +# --resume --checkpoint $WORK/logs/epalm/ePALM_video_qa_msrvtt/checkpoint_last.pth + + diff --git a/run_scripts/float32/video/ePALM_video_qa_msvd.sh b/run_scripts/float32/video/ePALM_video_qa_msvd.sh new file mode 100644 index 0000000000000000000000000000000000000000..5e1eeabebc7e88e6d83b2ab58b369e42fb8b560c --- /dev/null +++ b/run_scripts/float32/video/ePALM_video_qa_msvd.sh @@ -0,0 +1,17 @@ + +# visopt_video_msvd_vqa_reptok_6clst_l19_31_nocache_noeostk_ptl10_ptlr1e5fixed_tformer_8f_bs8_longt_opt_2_7b +config=./configs/video/ePALM_video_qa_msvd.yaml +data_dir=data/MSVD +output_dir=logs/epalm/ePALM_video_qa_msvd + + +torchrun --nproc_per_node=4 --master_addr="localhost" --master_port=12325 float32/video_vqa.py \ +--config $config \ +--output_dir $output_dir \ +--data_dir $data_dir \ +--save_best \ +--text_model facebook/opt-2.7b \ + +# --resume --checkpoint $WORK/logs/epalm/ePALM_video_qa_msvd/checkpoint_last.pth + +# --checkpoint $WORK/logs/epalm/ePALM_video_qa_msvd/checkpoint_best.pth --evaluate diff --git a/scheduler/__init__.py b/scheduler/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6a7789826229f66e1220cb6149902ba9c411b537 --- /dev/null +++ b/scheduler/__init__.py @@ -0,0 +1,5 @@ +from .cosine_lr import CosineLRScheduler +from .plateau_lr import PlateauLRScheduler +from .step_lr import StepLRScheduler +from .tanh_lr import TanhLRScheduler +from .scheduler_factory import create_scheduler diff --git a/scheduler/cosine_lr.py b/scheduler/cosine_lr.py new file mode 100644 index 0000000000000000000000000000000000000000..40de4a13558c042a4c00a3907eb68dbcb766e876 --- /dev/null +++ b/scheduler/cosine_lr.py @@ -0,0 +1,118 @@ +""" Cosine Scheduler + +Cosine LR schedule with warmup, cycle/restarts, noise. + +Hacked together by / Copyright 2020 Ross Wightman +""" +import logging +import math +import numpy as np +import torch + +from .scheduler import Scheduler + +from pdb import set_trace as breakpoint + +_logger = logging.getLogger(__name__) + + +class CosineLRScheduler(Scheduler): + """ + Cosine decay with restarts. + This is described in the paper https://arxiv.org/abs/1608.03983. + + Inspiration from + https://github.com/allenai/allennlp/blob/master/allennlp/training/learning_rate_schedulers/cosine.py + """ + + def __init__(self, + optimizer: torch.optim.Optimizer, + t_initial: int, + t_mul: float = 1., + lr_min: float = 0., + decay_rate: float = 1., + warmup_t=0, + warmup_lr_init=0, + warmup_prefix=True, + cycle_limit=0, + t_in_epochs=True, + noise_range_t=None, + noise_pct=0.67, + noise_std=1.0, + noise_seed=42, + initialize=True, + scheduler_groups=None) -> None: + super().__init__( + optimizer, param_group_field="lr", + noise_range_t=noise_range_t, noise_pct=noise_pct, noise_std=noise_std, noise_seed=noise_seed, + initialize=initialize, scheduler_groups=scheduler_groups) + + assert t_initial > 0 + assert lr_min >= 0 + if t_initial == 1 and t_mul == 1 and decay_rate == 1: + _logger.warning("Cosine annealing scheduler will have no effect on the learning " + "rate since t_initial = t_mul = eta_mul = 1.") + self.t_initial = t_initial + self.t_mul = t_mul + self.lr_min = lr_min + self.decay_rate = decay_rate + self.cycle_limit = cycle_limit + self.warmup_t = warmup_t + self.warmup_lr_init = warmup_lr_init + self.warmup_prefix = warmup_prefix + self.t_in_epochs = t_in_epochs + if self.warmup_t: + self.warmup_steps = [(v - warmup_lr_init) / self.warmup_t for v in self.base_values] + super().update_groups(self.warmup_lr_init) + else: + self.warmup_steps = [1 for _ in self.base_values] + + def _get_lr(self, t): + if t < self.warmup_t: + lrs = [self.warmup_lr_init + t * s for s in self.warmup_steps] + else: + if self.warmup_prefix: + t = t - self.warmup_t + + if self.t_mul != 1: + i = math.floor(math.log(1 - t / self.t_initial * (1 - self.t_mul), self.t_mul)) + t_i = self.t_mul ** i * self.t_initial + t_curr = t - (1 - self.t_mul ** i) / (1 - self.t_mul) * self.t_initial + else: + i = t // self.t_initial + t_i = self.t_initial + t_curr = t - (self.t_initial * i) + + gamma = self.decay_rate ** i + lr_min = self.lr_min * gamma + lr_max_values = [v * gamma for v in self.base_values] + + if self.cycle_limit == 0 or (self.cycle_limit > 0 and i < self.cycle_limit): + lrs = [ + lr_min + 0.5 * (lr_max - lr_min) * (1 + math.cos(math.pi * t_curr / t_i)) for lr_max in lr_max_values + ] + else: + lrs = [self.lr_min for _ in self.base_values] + + return lrs + + def get_epoch_values(self, epoch: int): + if self.t_in_epochs: + return self._get_lr(epoch) + else: + return None + + def get_update_values(self, num_updates: int): + if not self.t_in_epochs: + return self._get_lr(num_updates) + else: + return None + + def get_cycle_length(self, cycles=0): + if not cycles: + cycles = self.cycle_limit + cycles = max(1, cycles) + if self.t_mul == 1.0: + return self.t_initial * cycles + else: + return int(math.floor(-self.t_initial * (self.t_mul ** cycles - 1) / (1 - self.t_mul))) diff --git a/scheduler/plateau_lr.py b/scheduler/plateau_lr.py new file mode 100644 index 0000000000000000000000000000000000000000..4f2cacb65a1bf23d10aa6fd296f74579571043cf --- /dev/null +++ b/scheduler/plateau_lr.py @@ -0,0 +1,113 @@ +""" Plateau Scheduler + +Adapts PyTorch plateau scheduler and allows application of noise, warmup. + +Hacked together by / Copyright 2020 Ross Wightman +""" +import torch + +from .scheduler import Scheduler + + +class PlateauLRScheduler(Scheduler): + """Decay the LR by a factor every time the validation loss plateaus.""" + + def __init__(self, + optimizer, + decay_rate=0.1, + patience_t=10, + verbose=True, + threshold=1e-4, + cooldown_t=0, + warmup_t=0, + warmup_lr_init=0, + lr_min=0, + mode='max', + noise_range_t=None, + noise_type='normal', + noise_pct=0.67, + noise_std=1.0, + noise_seed=None, + initialize=True, + ): + super().__init__(optimizer, 'lr', initialize=initialize) + + self.lr_scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau( + self.optimizer, + patience=patience_t, + factor=decay_rate, + verbose=verbose, + threshold=threshold, + cooldown=cooldown_t, + mode=mode, + min_lr=lr_min + ) + + self.noise_range = noise_range_t + self.noise_pct = noise_pct + self.noise_type = noise_type + self.noise_std = noise_std + self.noise_seed = noise_seed if noise_seed is not None else 42 + self.warmup_t = warmup_t + self.warmup_lr_init = warmup_lr_init + if self.warmup_t: + self.warmup_steps = [(v - warmup_lr_init) / self.warmup_t for v in self.base_values] + super().update_groups(self.warmup_lr_init) + else: + self.warmup_steps = [1 for _ in self.base_values] + self.restore_lr = None + + def state_dict(self): + return { + 'best': self.lr_scheduler.best, + 'last_epoch': self.lr_scheduler.last_epoch, + } + + def load_state_dict(self, state_dict): + self.lr_scheduler.best = state_dict['best'] + if 'last_epoch' in state_dict: + self.lr_scheduler.last_epoch = state_dict['last_epoch'] + + # override the base class step fn completely + def step(self, epoch, metric=None): + if epoch <= self.warmup_t: + lrs = [self.warmup_lr_init + epoch * s for s in self.warmup_steps] + super().update_groups(lrs) + else: + if self.restore_lr is not None: + # restore actual LR from before our last noise perturbation before stepping base + for i, param_group in enumerate(self.optimizer.param_groups): + param_group['lr'] = self.restore_lr[i] + self.restore_lr = None + + self.lr_scheduler.step(metric, epoch) # step the base scheduler + + if self.noise_range is not None: + if isinstance(self.noise_range, (list, tuple)): + apply_noise = self.noise_range[0] <= epoch < self.noise_range[1] + else: + apply_noise = epoch >= self.noise_range + if apply_noise: + self._apply_noise(epoch) + + def _apply_noise(self, epoch): + g = torch.Generator() + g.manual_seed(self.noise_seed + epoch) + if self.noise_type == 'normal': + while True: + # resample if noise out of percent limit, brute force but shouldn't spin much + noise = torch.randn(1, generator=g).item() + if abs(noise) < self.noise_pct: + break + else: + noise = 2 * (torch.rand(1, generator=g).item() - 0.5) * self.noise_pct + + # apply the noise on top of previous LR, cache the old value so we can restore for normal + # stepping of base scheduler + restore_lr = [] + for i, param_group in enumerate(self.optimizer.param_groups): + old_lr = float(param_group['lr']) + restore_lr.append(old_lr) + new_lr = old_lr + old_lr * noise + param_group['lr'] = new_lr + self.restore_lr = restore_lr diff --git a/scheduler/scheduler.py b/scheduler/scheduler.py new file mode 100644 index 0000000000000000000000000000000000000000..248c8c1259263a6508eb22f6302a133cc1544866 --- /dev/null +++ b/scheduler/scheduler.py @@ -0,0 +1,124 @@ +from typing import Dict, Any + +import torch + + +class Scheduler: + """ Parameter Scheduler Base Class + A scheduler base class that can be used to schedule any optimizer parameter groups. + + Unlike the builtin PyTorch schedulers, this is intended to be consistently called + * At the END of each epoch, before incrementing the epoch count, to calculate next epoch's value + * At the END of each optimizer update, after incrementing the update count, to calculate next update's value + + The schedulers built on this should try to remain as stateless as possible (for simplicity). + + This family of schedulers is attempting to avoid the confusion of the meaning of 'last_epoch' + and -1 values for special behaviour. All epoch and update counts must be tracked in the training + code and explicitly passed in to the schedulers on the corresponding step or step_update call. + + Based on ideas from: + * https://github.com/pytorch/fairseq/tree/master/fairseq/optim/lr_scheduler + * https://github.com/allenai/allennlp/tree/master/allennlp/training/learning_rate_schedulers + """ + + def __init__(self, + optimizer: torch.optim.Optimizer, + param_group_field: str, + noise_range_t=None, + noise_type='normal', + noise_pct=0.67, + noise_std=1.0, + noise_seed=None, + initialize: bool = True, + scheduler_groups=None) -> None: + self.optimizer = optimizer + print("scheduler_groups:", scheduler_groups) + self.scheduler_groups = scheduler_groups + if not isinstance(scheduler_groups, list): + self.scheduler_groups = [self.scheduler_groups] + + # if scheduler_groups is not None: + # if isinstance(scheduler_groups, list): + # optimizer_groups = [] + # for i in scheduler_groups: + # optimizer_groups.append(self.optimizer.param_groups[i]) + # else: + # optimizer_groups = [self.optimizer.param_groups[scheduler_groups]] + # else: + # optimizer_groups = self.optimizer.param_groups + + # self.optimizer_groups = optimizer_groups + + self.param_group_field = param_group_field + self._initial_param_group_field = f"initial_{param_group_field}" + if initialize: + for i, group in enumerate(self.optimizer.param_groups): + if param_group_field not in group: + raise KeyError(f"{param_group_field} missing from param_groups[{i}]") + group.setdefault(self._initial_param_group_field, group[param_group_field]) + else: + for i, group in enumerate(self.optimizer.param_groups): + if self._initial_param_group_field not in group: + raise KeyError(f"{self._initial_param_group_field} missing from param_groups[{i}]") + self.base_values = [group[self._initial_param_group_field] for group in self.optimizer.param_groups] + self.metric = None # any point to having this for all? + self.noise_range_t = noise_range_t + self.noise_pct = noise_pct + self.noise_type = noise_type + self.noise_std = noise_std + self.noise_seed = noise_seed if noise_seed is not None else 42 + self.update_groups(self.base_values) + + def state_dict(self) -> Dict[str, Any]: + return {key: value for key, value in self.__dict__.items() if key != 'optimizer'} + + def load_state_dict(self, state_dict: Dict[str, Any]) -> None: + self.__dict__.update(state_dict) + + def get_epoch_values(self, epoch: int): + return None + + def get_update_values(self, num_updates: int): + return None + + def step(self, epoch: int, metric: float = None) -> None: + self.metric = metric + values = self.get_epoch_values(epoch) + if values is not None: + values = self._add_noise(values, epoch) + self.update_groups(values) + + def step_update(self, num_updates: int, metric: float = None): + self.metric = metric + values = self.get_update_values(num_updates) + if values is not None: + values = self._add_noise(values, num_updates) + self.update_groups(values) + + def update_groups(self, values): + if not isinstance(values, (list, tuple)): + values = [values] * len(self.optimizer.param_groups) + for i, (param_group, value) in enumerate(zip(self.optimizer.param_groups, values)): + if i in self.scheduler_groups: + param_group[self.param_group_field] = value + + def _add_noise(self, lrs, t): + if self.noise_range_t is not None: + if isinstance(self.noise_range_t, (list, tuple)): + apply_noise = self.noise_range_t[0] <= t < self.noise_range_t[1] + else: + apply_noise = t >= self.noise_range_t + if apply_noise: + g = torch.Generator() + g.manual_seed(self.noise_seed + t) + if self.noise_type == 'normal': + while True: + # resample if noise out of percent limit, brute force but shouldn't spin much + noise = torch.randn(1, generator=g).item() + if abs(noise) < self.noise_pct: + break + else: + noise = 2 * (torch.rand(1, generator=g).item() - 0.5) * self.noise_pct + lrs = [v + v * noise for v in lrs] + return lrs diff --git a/scheduler/scheduler_factory.py b/scheduler/scheduler_factory.py new file mode 100644 index 0000000000000000000000000000000000000000..eb2232341de932b8db2b1f94241099c42cfa4041 --- /dev/null +++ b/scheduler/scheduler_factory.py @@ -0,0 +1,91 @@ +""" Scheduler Factory +Hacked together by / Copyright 2020 Ross Wightman +""" +from .cosine_lr import CosineLRScheduler +from .tanh_lr import TanhLRScheduler +from .step_lr import StepLRScheduler +from .plateau_lr import PlateauLRScheduler + + +def create_scheduler(args, optimizer): + num_epochs = args.epochs + + if getattr(args, 'lr_noise', None) is not None: + lr_noise = getattr(args, 'lr_noise') + if isinstance(lr_noise, (list, tuple)): + noise_range = [n * num_epochs for n in lr_noise] + if len(noise_range) == 1: + noise_range = noise_range[0] + else: + noise_range = lr_noise * num_epochs + else: + noise_range = None + + lr_scheduler = None + + scheduler_groups = getattr(args, 'scheduler_groups', None) + + if args.sched == 'cosine': + lr_scheduler = CosineLRScheduler( + optimizer, + t_initial=num_epochs, + t_mul=getattr(args, 'lr_cycle_mul', 1.), + lr_min=args.min_lr, + decay_rate=args.decay_rate, + warmup_lr_init=args.warmup_lr, + warmup_t=args.warmup_epochs, + cycle_limit=getattr(args, 'lr_cycle_limit', 1), + t_in_epochs=True, + noise_range_t=noise_range, + noise_pct=getattr(args, 'lr_noise_pct', 0.67), + noise_std=getattr(args, 'lr_noise_std', 1.), + noise_seed=getattr(args, 'seed', 42), + scheduler_groups=scheduler_groups, + ) + num_epochs = lr_scheduler.get_cycle_length() + args.cooldown_epochs + elif args.sched == 'tanh': + lr_scheduler = TanhLRScheduler( + optimizer, + t_initial=num_epochs, + t_mul=getattr(args, 'lr_cycle_mul', 1.), + lr_min=args.min_lr, + warmup_lr_init=args.warmup_lr, + warmup_t=args.warmup_epochs, + cycle_limit=getattr(args, 'lr_cycle_limit', 1), + t_in_epochs=True, + noise_range_t=noise_range, + noise_pct=getattr(args, 'lr_noise_pct', 0.67), + noise_std=getattr(args, 'lr_noise_std', 1.), + noise_seed=getattr(args, 'seed', 42), + ) + num_epochs = lr_scheduler.get_cycle_length() + args.cooldown_epochs + elif args.sched == 'step': + lr_scheduler = StepLRScheduler( + optimizer, + decay_t=args.decay_epochs, + decay_rate=args.decay_rate, + warmup_lr_init=args.warmup_lr, + warmup_t=args.warmup_epochs, + noise_range_t=noise_range, + noise_pct=getattr(args, 'lr_noise_pct', 0.67), + noise_std=getattr(args, 'lr_noise_std', 1.), + noise_seed=getattr(args, 'seed', 42), + ) + elif args.sched == 'plateau': + mode = 'min' if 'loss' in getattr(args, 'eval_metric', '') else 'max' + lr_scheduler = PlateauLRScheduler( + optimizer, + decay_rate=args.decay_rate, + patience_t=args.patience_epochs, + lr_min=args.min_lr, + mode=mode, + warmup_lr_init=args.warmup_lr, + warmup_t=args.warmup_epochs, + cooldown_t=0, + noise_range_t=noise_range, + noise_pct=getattr(args, 'lr_noise_pct', 0.67), + noise_std=getattr(args, 'lr_noise_std', 1.), + noise_seed=getattr(args, 'seed', 42), + ) + + return lr_scheduler, num_epochs diff --git a/scheduler/step_lr.py b/scheduler/step_lr.py new file mode 100644 index 0000000000000000000000000000000000000000..f797e1a8cf35999531dd5f1ccbbe09a9d0cf30a9 --- /dev/null +++ b/scheduler/step_lr.py @@ -0,0 +1,63 @@ +""" Step Scheduler + +Basic step LR schedule with warmup, noise. + +Hacked together by / Copyright 2020 Ross Wightman +""" +import math +import torch + +from .scheduler import Scheduler + + +class StepLRScheduler(Scheduler): + """ + """ + + def __init__(self, + optimizer: torch.optim.Optimizer, + decay_t: float, + decay_rate: float = 1., + warmup_t=0, + warmup_lr_init=0, + t_in_epochs=True, + noise_range_t=None, + noise_pct=0.67, + noise_std=1.0, + noise_seed=42, + initialize=True, + ) -> None: + super().__init__( + optimizer, param_group_field="lr", + noise_range_t=noise_range_t, noise_pct=noise_pct, noise_std=noise_std, noise_seed=noise_seed, + initialize=initialize) + + self.decay_t = decay_t + self.decay_rate = decay_rate + self.warmup_t = warmup_t + self.warmup_lr_init = warmup_lr_init + self.t_in_epochs = t_in_epochs + if self.warmup_t: + self.warmup_steps = [(v - warmup_lr_init) / self.warmup_t for v in self.base_values] + super().update_groups(self.warmup_lr_init) + else: + self.warmup_steps = [1 for _ in self.base_values] + + def _get_lr(self, t): + if t < self.warmup_t: + lrs = [self.warmup_lr_init + t * s for s in self.warmup_steps] + else: + lrs = [v * (self.decay_rate ** (t // self.decay_t)) for v in self.base_values] + return lrs + + def get_epoch_values(self, epoch: int): + if self.t_in_epochs: + return self._get_lr(epoch) + else: + return None + + def get_update_values(self, num_updates: int): + if not self.t_in_epochs: + return self._get_lr(num_updates) + else: + return None diff --git a/scheduler/tanh_lr.py b/scheduler/tanh_lr.py new file mode 100644 index 0000000000000000000000000000000000000000..8cc338bb1df7a564d9207b32ab0f59cdf1ef4c59 --- /dev/null +++ b/scheduler/tanh_lr.py @@ -0,0 +1,120 @@ +""" TanH Scheduler + +TanH schedule with warmup, cycle/restarts, noise. + +Hacked together by / Copyright 2020 Ross Wightman +""" +import logging +import math +import numpy as np +import torch + +from .scheduler import Scheduler + + +_logger = logging.getLogger(__name__) + + +class TanhLRScheduler(Scheduler): + """ + Hyberbolic-Tangent decay with restarts. + This is described in the paper https://arxiv.org/abs/1806.01593 + """ + + def __init__(self, + optimizer: torch.optim.Optimizer, + t_initial: int, + lb: float = -6., + ub: float = 4., + t_mul: float = 1., + lr_min: float = 0., + decay_rate: float = 1., + warmup_t=0, + warmup_lr_init=0, + warmup_prefix=False, + cycle_limit=0, + t_in_epochs=True, + noise_range_t=None, + noise_pct=0.67, + noise_std=1.0, + noise_seed=42, + initialize=True) -> None: + super().__init__( + optimizer, param_group_field="lr", + noise_range_t=noise_range_t, noise_pct=noise_pct, noise_std=noise_std, noise_seed=noise_seed, + initialize=initialize) + + assert t_initial > 0 + assert lr_min >= 0 + assert lb < ub + assert cycle_limit >= 0 + assert warmup_t >= 0 + assert warmup_lr_init >= 0 + self.lb = lb + self.ub = ub + self.t_initial = t_initial + self.t_mul = t_mul + self.lr_min = lr_min + self.decay_rate = decay_rate + self.cycle_limit = cycle_limit + self.warmup_t = warmup_t + self.warmup_lr_init = warmup_lr_init + self.warmup_prefix = warmup_prefix + self.t_in_epochs = t_in_epochs + if self.warmup_t: + t_v = self.base_values if self.warmup_prefix else self._get_lr(self.warmup_t) + self.warmup_steps = [(v - warmup_lr_init) / self.warmup_t for v in t_v] + super().update_groups(self.warmup_lr_init) + else: + self.warmup_steps = [1 for _ in self.base_values] + + def _get_lr(self, t): + if t < self.warmup_t: + lrs = [self.warmup_lr_init + t * s for s in self.warmup_steps] + else: + if self.warmup_prefix: + t = t - self.warmup_t + + if self.t_mul != 1: + i = math.floor(math.log(1 - t / self.t_initial * (1 - self.t_mul), self.t_mul)) + t_i = self.t_mul ** i * self.t_initial + t_curr = t - (1 - self.t_mul ** i) / (1 - self.t_mul) * self.t_initial + else: + i = t // self.t_initial + t_i = self.t_initial + t_curr = t - (self.t_initial * i) + + if self.cycle_limit == 0 or (self.cycle_limit > 0 and i < self.cycle_limit): + gamma = self.decay_rate ** i + lr_min = self.lr_min * gamma + lr_max_values = [v * gamma for v in self.base_values] + + tr = t_curr / t_i + lrs = [ + lr_min + 0.5 * (lr_max - lr_min) * (1 - math.tanh(self.lb * (1. - tr) + self.ub * tr)) + for lr_max in lr_max_values + ] + else: + lrs = [self.lr_min * (self.decay_rate ** self.cycle_limit) for _ in self.base_values] + return lrs + + def get_epoch_values(self, epoch: int): + if self.t_in_epochs: + return self._get_lr(epoch) + else: + return None + + def get_update_values(self, num_updates: int): + if not self.t_in_epochs: + return self._get_lr(num_updates) + else: + return None + + def get_cycle_length(self, cycles=0): + if not cycles: + cycles = self.cycle_limit + cycles = max(1, cycles) + if self.t_mul == 1.0: + return self.t_initial * cycles + else: + return int(math.floor(-self.t_initial * (self.t_mul ** cycles - 1) / (1 - self.t_mul))) diff --git a/utils.py b/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..4bc5298c2864e73593999e54eae61bafb1fe8585 --- /dev/null +++ b/utils.py @@ -0,0 +1,649 @@ +import numpy as np +import io +import os +import time +from collections import defaultdict, deque +import datetime + +import torch +import torch.distributed as dist + +def optimizer_to(optim, device): + for param in optim.state.values(): + # Not sure there are any global tensors in the state dict + if isinstance(param, torch.Tensor): + param.data = param.data.to(device) + if param._grad is not None: + param._grad.data = param._grad.data.to(device) + elif isinstance(param, dict): + for subparam in param.values(): + if isinstance(subparam, torch.Tensor): + subparam.data = subparam.data.to(device) + if subparam._grad is not None: + subparam._grad.data = subparam._grad.data.to(device) + + +class SmoothedValue(object): + """Track a series of values and provide access to smoothed values over a + window or the global series average. + """ + + def __init__(self, window_size=20, fmt=None): + if fmt is None: + fmt = "{median:.4f} ({global_avg:.4f})" + self.deque = deque(maxlen=window_size) + self.total = 0.0 + self.count = 0 + self.fmt = fmt + + def update(self, value, n=1): + self.deque.append(value) + self.count += n + self.total += value * n + + def synchronize_between_processes(self): + """ + Warning: does not synchronize the deque! + """ + if not is_dist_avail_and_initialized(): + return + t = torch.tensor([self.count, self.total], dtype=torch.float64, device='cuda') + dist.barrier() + dist.all_reduce(t) + t = t.tolist() + self.count = int(t[0]) + self.total = t[1] + + @property + def median(self): + d = torch.tensor(list(self.deque)) + return d.median().item() + + @property + def avg(self): + d = torch.tensor(list(self.deque), dtype=torch.float32) + return d.mean().item() + + @property + def global_avg(self): + return self.total / self.count + + @property + def max(self): + return max(self.deque) + + @property + def value(self): + return self.deque[-1] + + def __str__(self): + return self.fmt.format( + median=self.median, + avg=self.avg, + global_avg=self.global_avg, + max=self.max, + value=self.value) + + +class MetricLogger(object): + def __init__(self, delimiter="\t", accelerator=None): + self.meters = defaultdict(SmoothedValue) + self.delimiter = delimiter + self.accelerator = accelerator + + def update(self, **kwargs): + for k, v in kwargs.items(): + if isinstance(v, torch.Tensor): + v = v.item() + assert isinstance(v, (float, int)) + self.meters[k].update(v) + + def __getattr__(self, attr): + if attr in self.meters: + return self.meters[attr] + if attr in self.__dict__: + return self.__dict__[attr] + raise AttributeError("'{}' object has no attribute '{}'".format( + type(self).__name__, attr)) + + def __str__(self): + loss_str = [] + for name, meter in self.meters.items(): + loss_str.append( + "{}: {}".format(name, str(meter)) + ) + return self.delimiter.join(loss_str) + + def global_avg(self): + loss_str = [] + for name, meter in self.meters.items(): + loss_str.append( + "{}: {:.4f}".format(name, meter.global_avg) + ) + return self.delimiter.join(loss_str) + + def synchronize_between_processes(self): + for meter in self.meters.values(): + meter.synchronize_between_processes() + + def add_meter(self, name, meter): + self.meters[name] = meter + + def log_every(self, iterable, print_freq, header=None): + i = 0 + if not header: + header = '' + start_time = time.time() + end = time.time() + iter_time = SmoothedValue(fmt='{avg:.4f}') + data_time = SmoothedValue(fmt='{avg:.4f}') + space_fmt = ':' + str(len(str(len(iterable)))) + 'd' + log_msg = [ + header, + '[{0' + space_fmt + '}/{1}]', + 'eta: {eta}', + '{meters}', + 'time: {time}', + 'data: {data}' + ] + if torch.cuda.is_available(): + log_msg.append('max mem: {memory:.0f}') + log_msg = self.delimiter.join(log_msg) + MB = 1024.0 * 1024.0 + + if self.accelerator is not None: + print_func = self.accelerator.print + else: + print_func = print + for obj in iterable: + data_time.update(time.time() - end) + yield obj + iter_time.update(time.time() - end) + if i % print_freq == 0 or i == len(iterable) - 1: + eta_seconds = iter_time.global_avg * (len(iterable) - i) + eta_string = str(datetime.timedelta(seconds=int(eta_seconds))) + if torch.cuda.is_available(): + print_func(log_msg.format( + i, len(iterable), eta=eta_string, + meters=str(self), + time=str(iter_time), data=str(data_time), + memory=torch.cuda.max_memory_allocated() / MB)) + else: + print_func(log_msg.format( + i, len(iterable), eta=eta_string, + meters=str(self), + time=str(iter_time), data=str(data_time))) + i += 1 + end = time.time() + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print_func('{} Total time: {} ({:.4f} s / it)'.format( + header, total_time_str, total_time / len(iterable))) + + + +class AttrDict(dict): + def __init__(self, *args, **kwargs): + super(AttrDict, self).__init__(*args, **kwargs) + self.__dict__ = self + + +def compute_acc(logits, label, reduction='mean'): + ret = (torch.argmax(logits, dim=1) == label).float() + if reduction == 'none': + return ret.detach() + elif reduction == 'mean': + return ret.mean().item() + +def compute_n_params(model, return_str=True): + tot = 0 + for p in model.parameters(): + w = 1 + for x in p.shape: + w *= x + tot += w + if return_str: + if tot >= 1e6: + return '{:.1f}M'.format(tot / 1e6) + else: + return '{:.1f}K'.format(tot / 1e3) + else: + return tot + +def setup_for_distributed(is_master): + """ + This function disables printing when not in master process + """ + import builtins as __builtin__ + builtin_print = __builtin__.print + + def print(*args, **kwargs): + force = kwargs.pop('force', False) + if is_master or force: + builtin_print(*args, **kwargs) + + __builtin__.print = print + + +def is_dist_avail_and_initialized(): + if not dist.is_available(): + return False + if not dist.is_initialized(): + return False + return True + + +def get_world_size(): + if not is_dist_avail_and_initialized(): + return 1 + return dist.get_world_size() + + +def get_rank(): + if not is_dist_avail_and_initialized(): + return 0 + return dist.get_rank() + + +def is_main_process(): + return get_rank() == 0 + + +def save_on_master(*args, **kwargs): + if is_main_process(): + torch.save(*args, **kwargs) + + +def init_distributed_mode(args): + if 'RANK' in os.environ and 'WORLD_SIZE' in os.environ: + args.rank = int(os.environ["RANK"]) + args.world_size = int(os.environ['WORLD_SIZE']) + args.gpu = int(os.environ['LOCAL_RANK']) + elif 'SLURM_PROCID' in os.environ: + args.rank = int(os.environ['SLURM_PROCID']) + args.gpu = args.rank % torch.cuda.device_count() + print(args.gpu, os.environ['SLURM_LOCALID'], os.environ['SLURM_JOB_NODELIST'], os.environ['SLURM_STEP_GPUS']) + else: + print('Not using distributed mode') + args.distributed = False + return + + args.distributed = True + + torch.cuda.set_device(args.gpu) + args.dist_backend = 'nccl' + print('world_size', args.world_size, 'gpu', args.gpu, 'dist_url:', args.dist_url) + print('| distributed init (rank {}): {}'.format( + args.rank, args.dist_url), flush=True) + torch.distributed.init_process_group(backend=args.dist_backend, init_method=args.dist_url, + world_size=args.world_size, rank=args.rank) + print("init") + torch.distributed.barrier() + setup_for_distributed(args.rank == 0) + + +def init_distributed_mode_multinodes(args): + import hostlist + if 'RANK' in os.environ and 'WORLD_SIZE' in os.environ: + args.rank = int(os.environ["RANK"]) + args.world_size = int(os.environ['WORLD_SIZE']) + args.gpu = int(os.environ['LOCAL_RANK']) + elif 'SLURM_PROCID' in os.environ: + args.rank = int(os.environ['SLURM_PROCID']) + args.gpu = args.rank % torch.cuda.device_count() + print('slurm') + else: + print('Not using distributed mode') + args.distributed = False + return + + args.distributed = True + # print(args.gpu, os.environ['SLURM_PROCID'], os.environ['SLURM_LOCALID'], os.environ['SLURM_JOB_NODELIST'], os.environ['SLURM_STEP_GPUS']) + hostnames = hostlist.expand_hostlist(os.environ['SLURM_JOB_NODELIST']) + os.environ['MASTER_ADDR'] = hostnames[0] + gpu_ids = os.environ['SLURM_STEP_GPUS'].split(",") + # os.environ['MASTER_PORT'] = str(12345 + int(min(gpu_ids))) # to avoid port conflict on the same node + + print(os.environ['MASTER_ADDR'], os.environ['MASTER_PORT']) + + torch.cuda.set_device(args.gpu) + args.dist_backend = 'nccl' + args.dist_url = 'tcp://'+os.environ['MASTER_ADDR']+':'+os.environ['MASTER_PORT'] + + print('world_size', args.world_size, 'gpu', args.gpu) + print('| distributed init (rank {}): {}'.format( + args.rank, args.dist_url), flush=True) + + + + torch.distributed.init_process_group(backend=args.dist_backend, init_method=args.dist_url, + world_size=args.world_size, rank=args.rank) + # torch.distributed.barrier() + # setup_for_distributed(args.rank == 0) + + +def init_distributed_mode_multinodes_jz(args): + import hostlist + if args.jean_zay: + hostnames = hostlist.expand_hostlist(os.environ['SLURM_JOB_NODELIST']) + os.environ['MASTER_ADDR'] = hostnames[0] + + print(os.environ['MASTER_ADDR'], os.environ['MASTER_PORT'], os.environ['SLURM_PROCID'], os.environ['SLURM_NTASKS'], os.environ['SLURM_LOCALID']) + args.gpu = int(os.environ['SLURM_LOCALID']) + args.rank = int(os.environ['SLURM_PROCID']) + args.world_size = int(os.environ['SLURM_NTASKS']) + args.dist_url = 'env://'+os.environ['MASTER_ADDR']+':'+os.environ['MASTER_PORT'] + # args.dist_url = 'env://' + print('jean zay') + elif 'RANK' in os.environ and 'WORLD_SIZE' in os.environ: + args.rank = int(os.environ["RANK"]) + args.world_size = int(os.environ['WORLD_SIZE']) + args.gpu = int(os.environ['LOCAL_RANK']) + elif 'SLURM_PROCID' in os.environ: + args.rank = int(os.environ['SLURM_PROCID']) + args.gpu = args.rank % torch.cuda.device_count() + print('slurm') + else: + print('Not using distributed mode') + args.distributed = False + return + + args.distributed = True + + torch.cuda.set_device(args.gpu) + args.dist_backend = 'nccl' + print('world_size', args.world_size, 'gpu', args.gpu, 'rank', args.rank) + print('| distributed init (rank {}): {}'.format( + args.rank, args.dist_url), flush=True) + torch.distributed.init_process_group(backend=args.dist_backend, init_method=args.dist_url, + world_size=args.world_size, rank=args.rank) + + + torch.distributed.barrier() + setup_for_distributed(args.rank == 0) + + + + + + + + + + +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +""" +This file contains primitives for multi-gpu communication. +This is useful when doing distributed training. +""" + +import functools +import logging +import numpy as np +import pickle +import torch +import torch.distributed as dist + +import torch + +_LOCAL_PROCESS_GROUP = None +""" +A torch process group which only includes processes that on the same machine as the current process. +This variable is set when processes are spawned by `launch()` in "engine/launch.py". +""" + + +def get_world_size() -> int: + if not dist.is_available(): + return 1 + if not dist.is_initialized(): + return 1 + return dist.get_world_size() + + +def get_rank() -> int: + if not dist.is_available(): + return 0 + if not dist.is_initialized(): + return 0 + return dist.get_rank() + + +def get_local_rank() -> int: + """ + Returns: + The rank of the current process within the local (per-machine) process group. + """ + if not dist.is_available(): + return 0 + if not dist.is_initialized(): + return 0 + assert _LOCAL_PROCESS_GROUP is not None + return dist.get_rank(group=_LOCAL_PROCESS_GROUP) + + +def get_local_size() -> int: + """ + Returns: + The size of the per-machine process group, + i.e. the number of processes per machine. + """ + if not dist.is_available(): + return 1 + if not dist.is_initialized(): + return 1 + return dist.get_world_size(group=_LOCAL_PROCESS_GROUP) + + +def is_main_process() -> bool: + return get_rank() == 0 + + +def synchronize(): + """ + Helper function to synchronize (barrier) among all processes when + using distributed training + """ + if not dist.is_available(): + return + if not dist.is_initialized(): + return + world_size = dist.get_world_size() + if world_size == 1: + return + dist.barrier() + + +@functools.lru_cache() +def _get_global_gloo_group(): + """ + Return a process group based on gloo backend, containing all the ranks + The result is cached. + """ + if dist.get_backend() == "nccl": + return dist.new_group(backend="gloo") + else: + return dist.group.WORLD + + +def _serialize_to_tensor(data, group): + backend = dist.get_backend(group) + assert backend in ["gloo", "nccl"] + device = torch.device("cpu" if backend == "gloo" else "cuda") + + buffer = pickle.dumps(data) + if len(buffer) > 1024 ** 3: + logger = logging.getLogger(__name__) + logger.warning( + "Rank {} trying to all-gather {:.2f} GB of data on device {}".format( + get_rank(), len(buffer) / (1024 ** 3), device + ) + ) + storage = torch.ByteStorage.from_buffer(buffer) + tensor = torch.ByteTensor(storage).to(device=device) + return tensor + + +def _pad_to_largest_tensor(tensor, group): + """ + Returns: + list[int]: size of the tensor, on each rank + Tensor: padded tensor that has the max size + """ + world_size = dist.get_world_size(group=group) + assert ( + world_size >= 1 + ), "comm.gather/all_gather must be called from ranks within the given group!" + local_size = torch.tensor( + [tensor.numel()], dtype=torch.int64, device=tensor.device) + size_list = [ + torch.zeros([1], dtype=torch.int64, device=tensor.device) + for _ in range(world_size) + ] + dist.all_gather(size_list, local_size, group=group) + size_list = [int(size.item()) for size in size_list] + + max_size = max(size_list) + + # we pad the tensor because torch all_gather does not support + # gathering tensors of different shapes + if local_size != max_size: + padding = torch.zeros( + (max_size - local_size,), dtype=torch.uint8, device=tensor.device + ) + tensor = torch.cat((tensor, padding), dim=0) + return size_list, tensor + + +def all_gather(data, group=None): + """ + Run all_gather on arbitrary picklable data (not necessarily tensors). + Args: + data: any picklable object + group: a torch process group. By default, will use a group which + contains all ranks on gloo backend. + Returns: + list[data]: list of data gathered from each rank + """ + if get_world_size() == 1: + return [data] + if group is None: + group = _get_global_gloo_group() + if dist.get_world_size(group) == 1: + return [data] + + tensor = _serialize_to_tensor(data, group) + + size_list, tensor = _pad_to_largest_tensor(tensor, group) + max_size = max(size_list) + + # receiving Tensor from all ranks + tensor_list = [ + torch.empty((max_size,), dtype=torch.uint8, device=tensor.device) + for _ in size_list + ] + dist.all_gather(tensor_list, tensor, group=group) + + data_list = [] + for size, tensor in zip(size_list, tensor_list): + buffer = tensor.cpu().numpy().tobytes()[:size] + data_list.append(pickle.loads(buffer)) + + return data_list + + +def gather(data, dst=0, group=None): + """ + Run gather on arbitrary picklable data (not necessarily tensors). + Args: + data: any picklable object + dst (int): destination rank + group: a torch process group. By default, will use a group which + contains all ranks on gloo backend. + Returns: + list[data]: on dst, a list of data gathered from each rank. Otherwise, + an empty list. + """ + if get_world_size() == 1: + return [data] + if group is None: + group = _get_global_gloo_group() + if dist.get_world_size(group=group) == 1: + return [data] + rank = dist.get_rank(group=group) + + tensor = _serialize_to_tensor(data, group) + size_list, tensor = _pad_to_largest_tensor(tensor, group) + + # receiving Tensor from all ranks + if rank == dst: + max_size = max(size_list) + tensor_list = [ + torch.empty((max_size,), dtype=torch.uint8, device=tensor.device) + for _ in size_list + ] + dist.gather(tensor, tensor_list, dst=dst, group=group) + + data_list = [] + for size, tensor in zip(size_list, tensor_list): + buffer = tensor.cpu().numpy().tobytes()[:size] + data_list.append(pickle.loads(buffer)) + return data_list + else: + dist.gather(tensor, [], dst=dst, group=group) + return [] + + +def shared_random_seed(): + """ + Returns: + int: a random number that is the same across all workers. + If workers need a shared RNG, they can use this shared seed to + create one. + All workers must call this function, otherwise it will deadlock. + """ + ints = np.random.randint(2 ** 31) + all_ints = all_gather(ints) + return all_ints[0] + + + +def reduce_dict(input_dict, average=True): + """ + Reduce the values in the dictionary from all processes so that process with rank + 0 has the reduced results. + Args: + input_dict (dict): inputs to be reduced. (values not necessarily tensors). + average (bool): whether to do average or sum + Returns: + a dict with the same keys as input_dict, after reduction. + """ + + world_size = get_world_size() + if world_size < 2: + return input_dict + + with torch.no_grad(): + + # Convert to CUDA Tensor for dist.reduce() + input_dict_cuda_vals = {} + for k, v in input_dict.items(): + if type(v) == torch.Tensor: + input_dict_cuda_vals[k] = v.to('cuda') + else: + input_dict_cuda_vals[k] = torch.tensor(v, device='cuda') + + names = [] + values = [] + for k, v in sorted(input_dict_cuda_vals.items()): + names.append(k) + values.append(v) + values = torch.stack(values, dim=0) + dist.reduce(values, dst=0) # reduce to gpu 0 + + if dist.get_rank() == 0 and average: + # only main process gets accumulated, so only divide by + # world_size in this case + values /= world_size + reduced_dict = {k: v for k, v in zip(names, values)} + return reduced_dict diff --git a/vqaTools/__init__.py b/vqaTools/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..072d8d90cd261c19c62fa4624ca22471fe72abfd --- /dev/null +++ b/vqaTools/__init__.py @@ -0,0 +1 @@ +__author__ = 'aagrawal' diff --git a/vqaTools/vqa.py b/vqaTools/vqa.py new file mode 100644 index 0000000000000000000000000000000000000000..c0a22da212de2610be91bc566212fc67704efbca --- /dev/null +++ b/vqaTools/vqa.py @@ -0,0 +1,177 @@ +__author__ = 'aagrawal' +__version__ = '0.9' + +# Interface for accessing the VQA dataset. + +# This code is based on the code written by Tsung-Yi Lin for MSCOCO Python API available at the following link: +# (https://github.com/pdollar/coco/blob/master/PythonAPI/pycocotools/coco.py). + +# The following functions are defined: +# VQA - VQA class that loads VQA annotation file and prepares data structures. +# getQuesIds - Get question ids that satisfy given filter conditions. +# getImgIds - Get image ids that satisfy given filter conditions. +# loadQA - Load questions and answers with the specified question ids. +# showQA - Display the specified questions and answers. +# loadRes - Load result file and create result object. + +# Help on each function can be accessed by: "help(COCO.function)" + +import json +import datetime +import copy + +class VQA: + def __init__(self, annotation_file=None, question_file=None): + """ + Constructor of VQA helper class for reading and visualizing questions and answers. + :param annotation_file (str): location of VQA annotation file + :return: + """ + # load dataset + self.dataset = {} + self.questions = {} + self.qa = {} + self.qqa = {} + self.imgToQA = {} + if not annotation_file == None and not question_file == None: + print('loading VQA annotations and questions into memory...') + time_t = datetime.datetime.utcnow() + dataset = json.load(open(annotation_file, 'r')) + questions = json.load(open(question_file, 'r')) + self.dataset = dataset + self.questions = questions + self.createIndex() + + def createIndex(self): + # create index + print('creating index...') + imgToQA = {ann['image_id']: [] for ann in self.dataset['annotations']} + qa = {ann['question_id']: [] for ann in self.dataset['annotations']} + qqa = {ann['question_id']: [] for ann in self.dataset['annotations']} + for ann in self.dataset['annotations']: + imgToQA[ann['image_id']] += [ann] + qa[ann['question_id']] = ann + for ques in self.questions['questions']: + qqa[ques['question_id']] = ques + print('index created!') + + # create class members + self.qa = qa + self.qqa = qqa + self.imgToQA = imgToQA + + def info(self): + """ + Print information about the VQA annotation file. + :return: + """ + for key, value in self.datset['info'].items(): + print('%s: %s'%(key, value)) + + def getQuesIds(self, imgIds=[], quesTypes=[], ansTypes=[]): + """ + Get question ids that satisfy given filter conditions. default skips that filter + :param imgIds (int array) : get question ids for given imgs + quesTypes (str array) : get question ids for given question types + ansTypes (str array) : get question ids for given answer types + :return: ids (int array) : integer array of question ids + """ + imgIds = imgIds if type(imgIds) == list else [imgIds] + quesTypes = quesTypes if type(quesTypes) == list else [quesTypes] + ansTypes = ansTypes if type(ansTypes) == list else [ansTypes] + + if len(imgIds) == len(quesTypes) == len(ansTypes) == 0: + anns = self.dataset['annotations'] + else: + if not len(imgIds) == 0: + anns = sum([self.imgToQA[imgId] for imgId in imgIds if imgId in self.imgToQA],[]) + else: + anns = self.dataset['annotations'] + anns = anns if len(quesTypes) == 0 else [ann for ann in anns if ann['question_type'] in quesTypes] + anns = anns if len(ansTypes) == 0 else [ann for ann in anns if ann['answer_type'] in ansTypes] + ids = [ann['question_id'] for ann in anns] + return ids + + def getImgIds(self, quesIds=[], quesTypes=[], ansTypes=[]): + """ + Get image ids that satisfy given filter conditions. default skips that filter + :param quesIds (int array) : get image ids for given question ids + quesTypes (str array) : get image ids for given question types + ansTypes (str array) : get image ids for given answer types + :return: ids (int array) : integer array of image ids + """ + quesIds = quesIds if type(quesIds) == list else [quesIds] + quesTypes = quesTypes if type(quesTypes) == list else [quesTypes] + ansTypes = ansTypes if type(ansTypes) == list else [ansTypes] + + if len(quesIds) == len(quesTypes) == len(ansTypes) == 0: + anns = self.dataset['annotations'] + else: + if not len(quesIds) == 0: + anns = sum([self.qa[quesId] for quesId in quesIds if quesId in self.qa],[]) + else: + anns = self.dataset['annotations'] + anns = anns if len(quesTypes) == 0 else [ann for ann in anns if ann['question_type'] in quesTypes] + anns = anns if len(ansTypes) == 0 else [ann for ann in anns if ann['answer_type'] in ansTypes] + ids = [ann['image_id'] for ann in anns] + return ids + + def loadQA(self, ids=[]): + """ + Load questions and answers with the specified question ids. + :param ids (int array) : integer ids specifying question ids + :return: qa (object array) : loaded qa objects + """ + if type(ids) == list: + return [self.qa[id] for id in ids] + elif type(ids) == int: + return [self.qa[ids]] + + def showQA(self, anns): + """ + Display the specified annotations. + :param anns (array of object): annotations to display + :return: None + """ + if len(anns) == 0: + return 0 + for ann in anns: + quesId = ann['question_id'] + print("Question: %s" %(self.qqa[quesId]['question'])) + for ans in ann['answers']: + print("Answer %d: %s" %(ans['answer_id'], ans['answer'])) + + def loadRes(self, resFile, quesFile): + """ + Load result file and return a result object. + :param resFile (str) : file name of result file + :return: res (obj) : result api object + """ + res = VQA() + res.questions = json.load(open(quesFile)) + res.dataset['info'] = copy.deepcopy(self.questions['info']) + res.dataset['task_type'] = copy.deepcopy(self.questions['task_type']) + res.dataset['data_type'] = copy.deepcopy(self.questions['data_type']) + res.dataset['data_subtype'] = copy.deepcopy(self.questions['data_subtype']) + res.dataset['license'] = copy.deepcopy(self.questions['license']) + + print('Loading and preparing results... ') + time_t = datetime.datetime.utcnow() + anns = json.load(open(resFile)) + assert type(anns) == list, 'results is not an array of objects' + annsQuesIds = [ann['question_id'] for ann in anns] + assert set(annsQuesIds) == set(self.getQuesIds()), \ + 'Results do not correspond to current VQA set. Either the results do not have predictions for all question ids in annotation file or there is atleast one question id that does not belong to the question ids in the annotation file.' + for ann in anns: + quesId = ann['question_id'] + if res.dataset['task_type'] == 'Multiple Choice': + assert ann['answer'] in self.qqa[quesId]['multiple_choices'], 'predicted answer is not one of the multiple choices' + qaAnn = self.qa[quesId] + ann['image_id'] = qaAnn['image_id'] + ann['question_type'] = qaAnn['question_type'] + ann['answer_type'] = qaAnn['answer_type'] + print('DONE (t=%0.2fs)'%((datetime.datetime.utcnow() - time_t).total_seconds())) + + res.dataset['annotations'] = anns + res.createIndex() + return res diff --git a/vqaTools/vqaEval.py b/vqaTools/vqaEval.py new file mode 100644 index 0000000000000000000000000000000000000000..6b66f981c1edf6646e2adbafa5adc86ddc80cba2 --- /dev/null +++ b/vqaTools/vqaEval.py @@ -0,0 +1,184 @@ +# coding=utf-8 + +__author__='aagrawal' + +# This code is based on the code written by Tsung-Yi Lin for MSCOCO Python API available at the following link: +# (https://github.com/tylin/coco-caption/blob/master/pycocoevalcap/eval.py). +import sys +import re + +class VQAEval: + def __init__(self, vqa, vqaRes, n=2): + self.n = n + self.accuracy = {} + self.evalQA = {} + self.evalQuesType = {} + self.evalAnsType = {} + self.vqa = vqa + self.vqaRes = vqaRes + self.params = {'question_id': vqa.getQuesIds()} + self.contractions = {"aint": "ain't", "arent": "aren't", "cant": "can't", "couldve": "could've", "couldnt": "couldn't", + "couldn'tve": "couldn't've", "couldnt've": "couldn't've", "didnt": "didn't", "doesnt": "doesn't", "dont": "don't", "hadnt": "hadn't", + "hadnt've": "hadn't've", "hadn'tve": "hadn't've", "hasnt": "hasn't", "havent": "haven't", "hed": "he'd", "hed've": "he'd've", + "he'dve": "he'd've", "hes": "he's", "howd": "how'd", "howll": "how'll", "hows": "how's", "Id've": "I'd've", "I'dve": "I'd've", + "Im": "I'm", "Ive": "I've", "isnt": "isn't", "itd": "it'd", "itd've": "it'd've", "it'dve": "it'd've", "itll": "it'll", "let's": "let's", + "maam": "ma'am", "mightnt": "mightn't", "mightnt've": "mightn't've", "mightn'tve": "mightn't've", "mightve": "might've", + "mustnt": "mustn't", "mustve": "must've", "neednt": "needn't", "notve": "not've", "oclock": "o'clock", "oughtnt": "oughtn't", + "ow's'at": "'ow's'at", "'ows'at": "'ow's'at", "'ow'sat": "'ow's'at", "shant": "shan't", "shed've": "she'd've", "she'dve": "she'd've", + "she's": "she's", "shouldve": "should've", "shouldnt": "shouldn't", "shouldnt've": "shouldn't've", "shouldn'tve": "shouldn't've", + "somebody'd": "somebodyd", "somebodyd've": "somebody'd've", "somebody'dve": "somebody'd've", "somebodyll": "somebody'll", + "somebodys": "somebody's", "someoned": "someone'd", "someoned've": "someone'd've", "someone'dve": "someone'd've", + "someonell": "someone'll", "someones": "someone's", "somethingd": "something'd", "somethingd've": "something'd've", + "something'dve": "something'd've", "somethingll": "something'll", "thats": "that's", "thered": "there'd", "thered've": "there'd've", + "there'dve": "there'd've", "therere": "there're", "theres": "there's", "theyd": "they'd", "theyd've": "they'd've", + "they'dve": "they'd've", "theyll": "they'll", "theyre": "they're", "theyve": "they've", "twas": "'twas", "wasnt": "wasn't", + "wed've": "we'd've", "we'dve": "we'd've", "weve": "we've", "werent": "weren't", "whatll": "what'll", "whatre": "what're", + "whats": "what's", "whatve": "what've", "whens": "when's", "whered": "where'd", "wheres": "where's", "whereve": "where've", + "whod": "who'd", "whod've": "who'd've", "who'dve": "who'd've", "wholl": "who'll", "whos": "who's", "whove": "who've", "whyll": "why'll", + "whyre": "why're", "whys": "why's", "wont": "won't", "wouldve": "would've", "wouldnt": "wouldn't", "wouldnt've": "wouldn't've", + "wouldn'tve": "wouldn't've", "yall": "y'all", "yall'll": "y'all'll", "y'allll": "y'all'll", "yall'd've": "y'all'd've", + "y'alld've": "y'all'd've", "y'all'dve": "y'all'd've", "youd": "you'd", "youd've": "you'd've", "you'dve": "you'd've", + "youll": "you'll", "youre": "you're", "youve": "you've"} + self.manualMap = { 'none': '0', + 'zero': '0', + 'one': '1', + 'two': '2', + 'three': '3', + 'four': '4', + 'five': '5', + 'six': '6', + 'seven': '7', + 'eight': '8', + 'nine': '9', + 'ten': '10' + } + self.articles = ['a', + 'an', + 'the' + ] + + + self.periodStrip = re.compile("(?!<=\d)(\.)(?!\d)") + self.commaStrip = re.compile("(\d)(,)(\d)") + self.punct = [';', r"/", '[', ']', '"', '{', '}', + '(', ')', '=', '+', '\\', '_', '-', + '>', '<', '@', '`', ',', '?', '!'] + + + def evaluate(self, quesIds=None): + if quesIds == None: + quesIds = [quesId for quesId in self.params['question_id']] + gts = {} + res = {} + for quesId in quesIds: + gts[quesId] = self.vqa.qa[quesId] + res[quesId] = self.vqaRes.qa[quesId] + + # ================================================= + # Compute accuracy + # ================================================= + accQA = [] + accQuesType = {} + accAnsType = {} + print ("computing accuracy") + step = 0 + for quesId in quesIds: + resAns = res[quesId]['answer'] + resAns = resAns.replace('\n', ' ') + resAns = resAns.replace('\t', ' ') + resAns = resAns.strip() + resAns = self.processPunctuation(resAns) + resAns = self.processDigitArticle(resAns) + gtAcc = [] + gtAnswers = [ans['answer'] for ans in gts[quesId]['answers']] + if len(set(gtAnswers)) > 1: + for ansDic in gts[quesId]['answers']: + ansDic['answer'] = self.processPunctuation(ansDic['answer']) + for gtAnsDatum in gts[quesId]['answers']: + otherGTAns = [item for item in gts[quesId]['answers'] if item!=gtAnsDatum] + matchingAns = [item for item in otherGTAns if item['answer']==resAns] + acc = min(1, float(len(matchingAns))/3) + gtAcc.append(acc) + quesType = gts[quesId]['question_type'] + ansType = gts[quesId]['answer_type'] + avgGTAcc = float(sum(gtAcc))/len(gtAcc) + accQA.append(avgGTAcc) + if quesType not in accQuesType: + accQuesType[quesType] = [] + accQuesType[quesType].append(avgGTAcc) + if ansType not in accAnsType: + accAnsType[ansType] = [] + accAnsType[ansType].append(avgGTAcc) + self.setEvalQA(quesId, avgGTAcc) + self.setEvalQuesType(quesId, quesType, avgGTAcc) + self.setEvalAnsType(quesId, ansType, avgGTAcc) + if step%100 == 0: + self.updateProgress(step/float(len(quesIds))) + step = step + 1 + + self.setAccuracy(accQA, accQuesType, accAnsType) + print ("Done computing accuracy") + + def processPunctuation(self, inText): + outText = inText + for p in self.punct: + if (p + ' ' in inText or ' ' + p in inText) or (re.search(self.commaStrip, inText) != None): + outText = outText.replace(p, '') + else: + outText = outText.replace(p, ' ') + outText = self.periodStrip.sub("", + outText, + re.UNICODE) + return outText + + def processDigitArticle(self, inText): + outText = [] + tempText = inText.lower().split() + for word in tempText: + word = self.manualMap.setdefault(word, word) + if word not in self.articles: + outText.append(word) + else: + pass + for wordId, word in enumerate(outText): + if word in self.contractions: + outText[wordId] = self.contractions[word] + outText = ' '.join(outText) + return outText + + def setAccuracy(self, accQA, accQuesType, accAnsType): + self.accuracy['overall'] = round(100*float(sum(accQA))/len(accQA), self.n) + self.accuracy['perQuestionType'] = {quesType: round(100*float(sum(accQuesType[quesType]))/len(accQuesType[quesType]), self.n) for quesType in accQuesType} + self.accuracy['perAnswerType'] = {ansType: round(100*float(sum(accAnsType[ansType]))/len(accAnsType[ansType]), self.n) for ansType in accAnsType} + + def setEvalQA(self, quesId, acc): + self.evalQA[quesId] = round(100*acc, self.n) + + def setEvalQuesType(self, quesId, quesType, acc): + if quesType not in self.evalQuesType: + self.evalQuesType[quesType] = {} + self.evalQuesType[quesType][quesId] = round(100*acc, self.n) + + def setEvalAnsType(self, quesId, ansType, acc): + if ansType not in self.evalAnsType: + self.evalAnsType[ansType] = {} + self.evalAnsType[ansType][quesId] = round(100*acc, self.n) + + def updateProgress(self, progress): + barLength = 20 + status = "" + if isinstance(progress, int): + progress = float(progress) + if not isinstance(progress, float): + progress = 0 + status = "error: progress var must be float\r\n" + if progress < 0: + progress = 0 + status = "Halt...\r\n" + if progress >= 1: + progress = 1 + status = "Done...\r\n" + block = int(round(barLength*progress)) + text = "\rFinshed Percent: [{0}] {1}% {2}".format( "#"*block + "-"*(barLength-block), int(progress*100), status) + sys.stdout.write(text) + sys.stdout.flush() \ No newline at end of file