diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..58e81ee3a7ca8d82d12c89dbca59114c02615460 100644 --- a/.gitattributes +++ b/.gitattributes @@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +TensorFlowTTS/examples/tacotron2/fig/alignment.gif filter=lfs diff=lfs merge=lfs -text diff --git a/TensorFlowTTS/.eggs/README.txt b/TensorFlowTTS/.eggs/README.txt new file mode 100644 index 0000000000000000000000000000000000000000..5d01668824f45c3a6683e12d1b9048bb1d273041 --- /dev/null +++ b/TensorFlowTTS/.eggs/README.txt @@ -0,0 +1,6 @@ +This directory contains eggs that were downloaded by setuptools to build, test, and run plug-ins. + +This directory caches those eggs to prevent repeated downloads. + +However, it is safe to delete this directory. + diff --git a/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/LICENSE b/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..353924be0e59b9ad7e6c22848c2189398481821d --- /dev/null +++ b/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/LICENSE @@ -0,0 +1,19 @@ +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/PKG-INFO b/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/PKG-INFO new file mode 100644 index 0000000000000000000000000000000000000000..101d73f015decf4ec6a611fe1dfc015c9b154aef --- /dev/null +++ b/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/PKG-INFO @@ -0,0 +1,189 @@ +Metadata-Version: 2.1 +Name: pytest-runner +Version: 6.0.0 +Summary: Invoke py.test as distutils command with dependency resolution +Home-page: https://github.com/pytest-dev/pytest-runner/ +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Framework :: Pytest +Requires-Python: >=3.7 +License-File: LICENSE +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: pytest-virtualenv ; extra == 'testing' +Requires-Dist: types-setuptools ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/pytest-runner.svg + :target: `PyPI link`_ + +.. image:: https://img.shields.io/pypi/pyversions/pytest-runner.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/pytest-runner + +.. image:: https://github.com/pytest-dev/pytest-runner/workflows/tests/badge.svg + :target: https://github.com/pytest-dev/pytest-runner/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. .. image:: https://readthedocs.org/projects/skeleton/badge/?version=latest +.. :target: https://skeleton.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2022-informational + :target: https://blog.jaraco.com/skeleton + +.. image:: https://tidelift.com/badges/package/pypi/pytest-runner + :target: https://tidelift.com/subscription/pkg/pypi-pytest-runner?utm_source=pypi-pytest-runner&utm_medium=readme + +Setup scripts can use pytest-runner to add setup.py test support for pytest +runner. + +Deprecation Notice +================== + +pytest-runner depends on deprecated features of setuptools and relies on features that break security +mechanisms in pip. For example 'setup_requires' and 'tests_require' bypass ``pip --require-hashes``. +See also `pypa/setuptools#1684 `_. + +It is recommended that you: + +- Remove ``'pytest-runner'`` from your ``setup_requires``, preferably removing the ``setup_requires`` option. +- Remove ``'pytest'`` and any other testing requirements from ``tests_require``, preferably removing the ``tests_requires`` option. +- Select a tool to bootstrap and then run tests such as tox. + +Usage +===== + +- Add 'pytest-runner' to your 'setup_requires'. Pin to '>=2.0,<3dev' (or + similar) to avoid pulling in incompatible versions. +- Include 'pytest' and any other testing requirements to 'tests_require'. +- Invoke tests with ``setup.py pytest``. +- Pass ``--index-url`` to have test requirements downloaded from an alternate + index URL (unnecessary if specified for easy_install in setup.cfg). +- Pass additional py.test command-line options using ``--addopts``. +- Set permanent options for the ``python setup.py pytest`` command (like ``index-url``) + in the ``[pytest]`` section of ``setup.cfg``. +- Set permanent options for the ``py.test`` run (like ``addopts`` or ``pep8ignore``) in the ``[pytest]`` + section of ``pytest.ini`` or ``tox.ini`` or put them in the ``[tool:pytest]`` + section of ``setup.cfg``. See `pytest issue 567 + `_. +- Optionally, set ``test=pytest`` in the ``[aliases]`` section of ``setup.cfg`` + to cause ``python setup.py test`` to invoke pytest. + +Example +======= + +The most simple usage looks like this in setup.py:: + + setup( + setup_requires=[ + 'pytest-runner', + ], + tests_require=[ + 'pytest', + ], + ) + +Additional dependencies require to run the tests (e.g. mock or pytest +plugins) may be added to tests_require and will be downloaded and +required by the session before invoking pytest. + +Follow `this search on github +`_ +for examples of real-world usage. + +Standalone Example +================== + +This technique is deprecated - if you have standalone scripts +you wish to invoke with dependencies, `use pip-run +`_. + +Although ``pytest-runner`` is typically used to add pytest test +runner support to maintained packages, ``pytest-runner`` may +also be used to create standalone tests. Consider `this example +failure `_, +reported in `jsonpickle #117 +`_ +or `this MongoDB test +`_ +demonstrating a technique that works even when dependencies +are required in the test. + +Either example file may be cloned or downloaded and simply run on +any system with Python and Setuptools. It will download the +specified dependencies and run the tests. Afterward, the the +cloned directory can be removed and with it all trace of +invoking the test. No other dependencies are needed and no +system configuration is altered. + +Then, anyone trying to replicate the failure can do so easily +and with all the power of pytest (rewritten assertions, +rich comparisons, interactive debugging, extensibility through +plugins, etc). + +As a result, the communication barrier for describing and +replicating failures is made almost trivially low. + +Considerations +============== + +Conditional Requirement +----------------------- + +Because it uses Setuptools setup_requires, pytest-runner will install itself +on every invocation of setup.py. In some cases, this causes delays for +invocations of setup.py that will never invoke pytest-runner. To help avoid +this contingency, consider requiring pytest-runner only when pytest +is invoked:: + + needs_pytest = {'pytest', 'test', 'ptr'}.intersection(sys.argv) + pytest_runner = ['pytest-runner'] if needs_pytest else [] + + # ... + + setup( + #... + setup_requires=[ + #... (other setup requirements) + ] + pytest_runner, + ) + +For Enterprise +============== + +Available as part of the Tidelift Subscription. + +This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. + +`Learn more `_. + +Security Contact +================ + +To report a security vulnerability, please use the +`Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure. + + diff --git a/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/RECORD b/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..676c877774e4767d18df3130551ab3b4158bb602 --- /dev/null +++ b/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/RECORD @@ -0,0 +1,7 @@ +ptr/__init__.py,sha256=0UfzhCooVgCNTBwVEOPOVGEPck4pnl_6PTfsC-QzNGM,6730 +pytest_runner-6.0.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 +pytest_runner-6.0.0.dist-info/METADATA,sha256=xa7jfGba2yXK6_27FdHmVJzb9SifCjm_EBVxNXC8R6w,7381 +pytest_runner-6.0.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +pytest_runner-6.0.0.dist-info/entry_points.txt,sha256=BqezBqeO63XyzSYmHYE58gKEFIjJUd-XdsRQkXHy2ig,58 +pytest_runner-6.0.0.dist-info/top_level.txt,sha256=DPzHbWlKG8yq8EOD5UgEvVNDWeJRPyimrwfShwV6Iuw,4 +pytest_runner-6.0.0.dist-info/RECORD,, diff --git a/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/WHEEL b/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..becc9a66ea739ba941d48a749e248761cc6e658a --- /dev/null +++ b/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/entry_points.txt b/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..08606705d825d4ad98f0a7acfdc654d1e6c1b8fa --- /dev/null +++ b/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/entry_points.txt @@ -0,0 +1,3 @@ +[distutils.commands] +ptr = ptr:PyTest +pytest = ptr:PyTest diff --git a/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/requires.txt b/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/requires.txt new file mode 100644 index 0000000000000000000000000000000000000000..153518886b4e238ea0fecf91de9d43e7f4ff31f9 --- /dev/null +++ b/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/requires.txt @@ -0,0 +1,17 @@ + +[docs] +sphinx +jaraco.packaging>=9 +rst.linker>=1.9 +jaraco.tidelift>=1.4 + +[testing] +pytest>=6 +pytest-checkdocs>=2.4 +pytest-flake8 +pytest-cov +pytest-enabler>=1.0.1 +pytest-virtualenv +types-setuptools +pytest-black>=0.3.7 +pytest-mypy>=0.9.1 diff --git a/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/top_level.txt b/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..e9148ae387cc078aec4d97be87acde54320781de --- /dev/null +++ b/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/EGG-INFO/top_level.txt @@ -0,0 +1 @@ +ptr diff --git a/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/ptr/__init__.py b/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/ptr/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..41192fa83ea9f3194d5a31f429e7fd5515c7529d --- /dev/null +++ b/TensorFlowTTS/.eggs/pytest_runner-6.0.0-py3.11.egg/ptr/__init__.py @@ -0,0 +1,216 @@ +""" +Implementation +""" + +import os as _os +import shlex as _shlex +import contextlib as _contextlib +import sys as _sys +import operator as _operator +import itertools as _itertools +import warnings as _warnings + +import pkg_resources +import setuptools.command.test as orig +from setuptools import Distribution + + +@_contextlib.contextmanager +def _save_argv(repl=None): + saved = _sys.argv[:] + if repl is not None: + _sys.argv[:] = repl + try: + yield saved + finally: + _sys.argv[:] = saved + + +class CustomizedDist(Distribution): + + allow_hosts = None + index_url = None + + def fetch_build_egg(self, req): + """Specialized version of Distribution.fetch_build_egg + that respects respects allow_hosts and index_url.""" + from setuptools.command.easy_install import easy_install + + dist = Distribution({'script_args': ['easy_install']}) + dist.parse_config_files() + opts = dist.get_option_dict('easy_install') + keep = ( + 'find_links', + 'site_dirs', + 'index_url', + 'optimize', + 'site_dirs', + 'allow_hosts', + ) + for key in list(opts): + if key not in keep: + del opts[key] # don't use any other settings + if self.dependency_links: + links = self.dependency_links[:] + if 'find_links' in opts: + links = opts['find_links'][1].split() + links + opts['find_links'] = ('setup', links) + if self.allow_hosts: + opts['allow_hosts'] = ('test', self.allow_hosts) + if self.index_url: + opts['index_url'] = ('test', self.index_url) + install_dir_func = getattr(self, 'get_egg_cache_dir', _os.getcwd) + install_dir = install_dir_func() + cmd = easy_install( + dist, + args=["x"], + install_dir=install_dir, + exclude_scripts=True, + always_copy=False, + build_directory=None, + editable=False, + upgrade=False, + multi_version=True, + no_report=True, + user=False, + ) + cmd.ensure_finalized() + return cmd.easy_install(req) + + +class PyTest(orig.test): + """ + >>> import setuptools + >>> dist = setuptools.Distribution() + >>> cmd = PyTest(dist) + """ + + user_options = [ + ('extras', None, "Install (all) setuptools extras when running tests"), + ( + 'index-url=', + None, + "Specify an index url from which to retrieve dependencies", + ), + ( + 'allow-hosts=', + None, + "Whitelist of comma-separated hosts to allow " + "when retrieving dependencies", + ), + ( + 'addopts=', + None, + "Additional options to be passed verbatim to the pytest runner", + ), + ] + + def initialize_options(self): + self.extras = False + self.index_url = None + self.allow_hosts = None + self.addopts = [] + self.ensure_setuptools_version() + + @staticmethod + def ensure_setuptools_version(): + """ + Due to the fact that pytest-runner is often required (via + setup-requires directive) by toolchains that never invoke + it (i.e. they're only installing the package, not testing it), + instead of declaring the dependency in the package + metadata, assert the requirement at run time. + """ + pkg_resources.require('setuptools>=27.3') + + def finalize_options(self): + if self.addopts: + self.addopts = _shlex.split(self.addopts) + + @staticmethod + def marker_passes(marker): + """ + Given an environment marker, return True if the marker is valid + and matches this environment. + """ + return ( + not marker + or not pkg_resources.invalid_marker(marker) + and pkg_resources.evaluate_marker(marker) + ) + + def install_dists(self, dist): + """ + Extend install_dists to include extras support + """ + return _itertools.chain( + orig.test.install_dists(dist), self.install_extra_dists(dist) + ) + + def install_extra_dists(self, dist): + """ + Install extras that are indicated by markers or + install all extras if '--extras' is indicated. + """ + extras_require = dist.extras_require or {} + + spec_extras = ( + (spec.partition(':'), reqs) for spec, reqs in extras_require.items() + ) + matching_extras = ( + reqs + for (name, sep, marker), reqs in spec_extras + # include unnamed extras or all if self.extras indicated + if (not name or self.extras) + # never include extras that fail to pass marker eval + and self.marker_passes(marker) + ) + results = list(map(dist.fetch_build_eggs, matching_extras)) + return _itertools.chain.from_iterable(results) + + @staticmethod + def _warn_old_setuptools(): + msg = ( + "pytest-runner will stop working on this version of setuptools; " + "please upgrade to setuptools 30.4 or later or pin to " + "pytest-runner < 5." + ) + ver_str = pkg_resources.get_distribution('setuptools').version + ver = pkg_resources.parse_version(ver_str) + if ver < pkg_resources.parse_version('30.4'): + _warnings.warn(msg) + + def run(self): + """ + Override run to ensure requirements are available in this session (but + don't install them anywhere). + """ + self._warn_old_setuptools() + dist = CustomizedDist() + for attr in 'allow_hosts index_url'.split(): + setattr(dist, attr, getattr(self, attr)) + for attr in ( + 'dependency_links install_requires tests_require extras_require ' + ).split(): + setattr(dist, attr, getattr(self.distribution, attr)) + installed_dists = self.install_dists(dist) + if self.dry_run: + self.announce('skipping tests (dry run)') + return + paths = map(_operator.attrgetter('location'), installed_dists) + with self.paths_on_pythonpath(paths): + with self.project_on_sys_path(): + return self.run_tests() + + @property + def _argv(self): + return ['pytest'] + self.addopts + + def run_tests(self): + """ + Invoke pytest, replacing argv. Return result code. + """ + with _save_argv(_sys.argv[:1] + self.addopts): + result_code = __import__('pytest').main() + if result_code: + raise SystemExit(result_code) diff --git a/TensorFlowTTS/.gitattributes b/TensorFlowTTS/.gitattributes new file mode 100644 index 0000000000000000000000000000000000000000..327ba52a0eefacdcd8ce408556858f640b11b328 --- /dev/null +++ b/TensorFlowTTS/.gitattributes @@ -0,0 +1 @@ +*.ipynb linguist-language=Python \ No newline at end of file diff --git a/TensorFlowTTS/.github/stale.yml b/TensorFlowTTS/.github/stale.yml new file mode 100644 index 0000000000000000000000000000000000000000..23dd2b7a2f6343adfa0cb70bc686e134c2e2ea99 --- /dev/null +++ b/TensorFlowTTS/.github/stale.yml @@ -0,0 +1,16 @@ +# Number of days of inactivity before an issue becomes stale +daysUntilStale: 60 +# Number of days of inactivity before a stale issue is closed +daysUntilClose: 7 +# Issues with these labels will never be considered stale +exemptLabels: + - pinned + - security +# Label to use when marking an issue as stale +staleLabel: wontfix +# Comment to post when marking an issue as stale. Set to `false` to disable +markComment: > + This issue has been automatically marked as stale because it has not had + recent activity. It will be closed if no further activity occurs. +# Comment to post when closing a stale issue. Set to `false` to disable +closeComment: false diff --git a/TensorFlowTTS/.github/workflows/ci.yaml b/TensorFlowTTS/.github/workflows/ci.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e38bd2e7f0365b2477084aada1d45b0292e7b7be --- /dev/null +++ b/TensorFlowTTS/.github/workflows/ci.yaml @@ -0,0 +1,50 @@ +name: CI + +on: + push: + branches: + - master + pull_request: + branches: + - master + schedule: + - cron: 0 0 * * 1 + +jobs: + linter_and_test: + runs-on: ubuntu-18.04 + strategy: + max-parallel: 10 + matrix: + python-version: [3.7] + tensorflow-version: [2.7.0] + steps: + - uses: actions/checkout@master + - uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + architecture: 'x64' + - uses: actions/cache@v1 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.pytorch-version }}-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.tensorflow-version }}-pip- + - name: Install dependencies + run: | + # install python modules + python -m pip install --upgrade pip + pip install -q -U numpy + pip install git+https://github.com/repodiac/german_transliterate.git#egg=german_transliterate + pip install -q tensorflow-gpu==${{ matrix.tensorflow-version }} + pip install -q -e . + pip install -q -e .[test] + pip install typing_extensions + sudo apt-get install libsndfile1-dev + python -m pip install black + - name: black + run: | + python -m black . + - name: Pytest + run: | + pytest test \ No newline at end of file diff --git a/TensorFlowTTS/.gitignore b/TensorFlowTTS/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..9a94579136dba856ecb88b71273aba8658b0b2f9 --- /dev/null +++ b/TensorFlowTTS/.gitignore @@ -0,0 +1,46 @@ + +# general +*~ +*.pyc +\#*\# +.\#* +*DS_Store +out.txt +TensorFlowTTS.egg-info/ +doc/_build +slurm-*.out +tmp* +.eggs/ +.hypothesis/ +.idea +.backup/ +.pytest_cache/ +__pycache__/ +.coverage* +coverage.xml* +.vscode* +.nfs* +.ipynb_checkpoints +ljspeech +*.h5 +*.npy +./*.wav +!docker-compose.yml +/Pipfile +/Pipfile.lock +/datasets +/examples/tacotron2/exp/ +/temp/ +LibriTTS/ +dataset/ +mfa/ +kss/ +baker/ +libritts/ +dump_baker/ +dump_ljspeech/ +dump_kss/ +dump_libritts/ +/notebooks/test_saved/ +build/ +dist/ \ No newline at end of file diff --git a/TensorFlowTTS/LICENSE b/TensorFlowTTS/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..4c9ad980682246bd6ab0d2bae82232be6dbdcbd4 --- /dev/null +++ b/TensorFlowTTS/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/TensorFlowTTS/README.md b/TensorFlowTTS/README.md new file mode 100644 index 0000000000000000000000000000000000000000..93e6a30297ad9cad1821f726931a8588ce89b849 --- /dev/null +++ b/TensorFlowTTS/README.md @@ -0,0 +1,319 @@ +

+

:yum: TensorFlowTTS +

+ + Build + + + GitHub + + + Colab + +

+

+

+

Real-Time State-of-the-art Speech Synthesis for Tensorflow 2 +

+ +:zany_face: TensorFlowTTS provides real-time state-of-the-art speech synthesis architectures such as Tacotron-2, Melgan, Multiband-Melgan, FastSpeech, FastSpeech2 based-on TensorFlow 2. With Tensorflow 2, we can speed-up training/inference progress, optimizer further by using [fake-quantize aware](https://www.tensorflow.org/model_optimization/guide/quantization/training_comprehensive_guide) and [pruning](https://www.tensorflow.org/model_optimization/guide/pruning/pruning_with_keras), make TTS models can be run faster than real-time and be able to deploy on mobile devices or embedded systems. + +## What's new +- 2021/08/18 (**NEW!**) Integrated to [Huggingface Spaces](https://huggingface.co/spaces) with [Gradio](https://github.com/gradio-app/gradio). See [Gradio Web Demo](https://huggingface.co/spaces/akhaliq/TensorFlowTTS). +- 2021/08/12 (**NEW!**) Support French TTS (Tacotron2, Multiband MelGAN). Pls see the [colab](https://colab.research.google.com/drive/1jd3u46g-fGQw0rre8fIwWM9heJvrV1c0?usp=sharing). Many Thanks [Samuel Delalez](https://github.com/samuel-lunii) +- 2021/06/01 Integrated with [Huggingface Hub](https://huggingface.co/tensorspeech). See the [PR](https://github.com/TensorSpeech/TensorFlowTTS/pull/555). Thanks [patrickvonplaten](https://github.com/patrickvonplaten) and [osanseviero](https://github.com/osanseviero) +- 2021/03/18 Support IOS for FastSpeech2 and MB MelGAN. Thanks [kewlbear](https://github.com/kewlbear). See [here](https://github.com/TensorSpeech/TensorFlowTTS/tree/master/examples/ios) +- 2021/01/18 Support TFLite C++ inference. Thanks [luan78zaoha](https://github.com/luan78zaoha). See [here](https://github.com/TensorSpeech/TensorFlowTTS/tree/master/examples/cpptflite) +- 2020/12/02 Support German TTS with [Thorsten dataset](https://github.com/thorstenMueller/deep-learning-german-tts). See the [Colab](https://colab.research.google.com/drive/1W0nSFpsz32M0OcIkY9uMOiGrLTPKVhTy?usp=sharing). Thanks [thorstenMueller](https://github.com/thorstenMueller) and [monatis](https://github.com/monatis) +- 2020/11/24 Add HiFi-GAN vocoder. See [here](https://github.com/TensorSpeech/TensorFlowTTS/tree/master/examples/hifigan) +- 2020/11/19 Add Multi-GPU gradient accumulator. See [here](https://github.com/TensorSpeech/TensorFlowTTS/pull/377) +- 2020/08/23 Add Parallel WaveGAN tensorflow implementation. See [here](https://github.com/TensorSpeech/TensorFlowTTS/tree/master/examples/parallel_wavegan) +- 2020/08/20 Add C++ inference code. Thank [@ZDisket](https://github.com/ZDisket). See [here](https://github.com/TensorSpeech/TensorFlowTTS/tree/master/examples/cppwin) +- 2020/08/18 Update [new base processor](https://github.com/TensorSpeech/TensorFlowTTS/blob/master/tensorflow_tts/processor/base_processor.py). Add [AutoProcessor](https://github.com/TensorSpeech/TensorFlowTTS/blob/master/tensorflow_tts/inference/auto_processor.py) and [pretrained processor](https://github.com/TensorSpeech/TensorFlowTTS/blob/master/tensorflow_tts/processor/pretrained/) json file +- 2020/08/14 Support Chinese TTS. Pls see the [colab](https://colab.research.google.com/drive/1YpSHRBRPBI7cnTkQn1UcVTWEQVbsUm1S?usp=sharing). Thank [@azraelkuan](https://github.com/azraelkuan) +- 2020/08/05 Support Korean TTS. Pls see the [colab](https://colab.research.google.com/drive/1ybWwOS5tipgPFttNulp77P6DAB5MtiuN?usp=sharing). Thank [@crux153](https://github.com/crux153) +- 2020/07/17 Support MultiGPU for all Trainer +- 2020/07/05 Support Convert Tacotron-2, FastSpeech to Tflite. Pls see the [colab](https://colab.research.google.com/drive/1HudLLpT9CQdh2k04c06bHUwLubhGTWxA?usp=sharing). Thank @jaeyoo from the TFlite team for his support +- 2020/06/20 [FastSpeech2](https://arxiv.org/abs/2006.04558) implementation with Tensorflow is supported. +- 2020/06/07 [Multi-band MelGAN (MB MelGAN)](https://github.com/tensorspeech/TensorFlowTTS/blob/master/examples/multiband_melgan/) implementation with Tensorflow is supported + + +## Features +- High performance on Speech Synthesis. +- Be able to fine-tune on other languages. +- Fast, Scalable, and Reliable. +- Suitable for deployment. +- Easy to implement a new model, based-on abstract class. +- Mixed precision to speed-up training if possible. +- Support Single/Multi GPU gradient Accumulate. +- Support both Single/Multi GPU in base trainer class. +- TFlite conversion for all supported models. +- Android example. +- Support many languages (currently, we support Chinese, Korean, English, French and German) +- Support C++ inference. +- Support Convert weight for some models from PyTorch to TensorFlow to accelerate speed. + +## Requirements +This repository is tested on Ubuntu 18.04 with: + +- Python 3.7+ +- Cuda 10.1 +- CuDNN 7.6.5 +- Tensorflow 2.2/2.3/2.4/2.5/2.6 +- [Tensorflow Addons](https://github.com/tensorflow/addons) >= 0.10.0 + +Different Tensorflow version should be working but not tested yet. This repo will try to work with the latest stable TensorFlow version. **We recommend you install TensorFlow 2.6.0 to training in case you want to use MultiGPU.** + +## Installation +### With pip +```bash +$ pip install TensorFlowTTS +``` +### From source +Examples are included in the repository but are not shipped with the framework. Therefore, to run the latest version of examples, you need to install the source below. +```bash +$ git clone https://github.com/TensorSpeech/TensorFlowTTS.git +$ cd TensorFlowTTS +$ pip install . +``` +If you want to upgrade the repository and its dependencies: +```bash +$ git pull +$ pip install --upgrade . +``` + +# Supported Model architectures +TensorFlowTTS currently provides the following architectures: + +1. **MelGAN** released with the paper [MelGAN: Generative Adversarial Networks for Conditional Waveform Synthesis](https://arxiv.org/abs/1910.06711) by Kundan Kumar, Rithesh Kumar, Thibault de Boissiere, Lucas Gestin, Wei Zhen Teoh, Jose Sotelo, Alexandre de Brebisson, Yoshua Bengio, Aaron Courville. +2. **Tacotron-2** released with the paper [Natural TTS Synthesis by Conditioning WaveNet on Mel Spectrogram Predictions](https://arxiv.org/abs/1712.05884) by Jonathan Shen, Ruoming Pang, Ron J. Weiss, Mike Schuster, Navdeep Jaitly, Zongheng Yang, Zhifeng Chen, Yu Zhang, Yuxuan Wang, RJ Skerry-Ryan, Rif A. Saurous, Yannis Agiomyrgiannakis, Yonghui Wu. +3. **FastSpeech** released with the paper [FastSpeech: Fast, Robust, and Controllable Text to Speech](https://arxiv.org/abs/1905.09263) by Yi Ren, Yangjun Ruan, Xu Tan, Tao Qin, Sheng Zhao, Zhou Zhao, Tie-Yan Liu. +4. **Multi-band MelGAN** released with the paper [Multi-band MelGAN: Faster Waveform Generation for High-Quality Text-to-Speech](https://arxiv.org/abs/2005.05106) by Geng Yang, Shan Yang, Kai Liu, Peng Fang, Wei Chen, Lei Xie. +5. **FastSpeech2** released with the paper [FastSpeech 2: Fast and High-Quality End-to-End Text to Speech](https://arxiv.org/abs/2006.04558) by Yi Ren, Chenxu Hu, Xu Tan, Tao Qin, Sheng Zhao, Zhou Zhao, Tie-Yan Liu. +6. **Parallel WaveGAN** released with the paper [Parallel WaveGAN: A fast waveform generation model based on generative adversarial networks with multi-resolution spectrogram](https://arxiv.org/abs/1910.11480) by Ryuichi Yamamoto, Eunwoo Song, Jae-Min Kim. +7. **HiFi-GAN** released with the paper [HiFi-GAN: Generative Adversarial Networks for Efficient and High Fidelity Speech Synthesis](https://arxiv.org/abs/2010.05646) by Jungil Kong, Jaehyeon Kim, Jaekyoung Bae. + +We are also implementing some techniques to improve quality and convergence speed from the following papers: + +2. **Guided Attention Loss** released with the paper [Efficiently Trainable Text-to-Speech System Based on Deep Convolutional Networks with Guided Attention +](https://arxiv.org/abs/1710.08969) by Hideyuki Tachibana, Katsuya Uenoyama, Shunsuke Aihara. + + +# Audio Samples +Here in an audio samples on valid set. [tacotron-2](https://drive.google.com/open?id=1kaPXRdLg9gZrll9KtvH3-feOBMM8sn3_), [fastspeech](https://drive.google.com/open?id=1f69ujszFeGnIy7PMwc8AkUckhIaT2OD0), [melgan](https://drive.google.com/open?id=1mBwGVchwtNkgFsURl7g4nMiqx4gquAC2), [melgan.stft](https://drive.google.com/open?id=1xUkDjbciupEkM3N4obiJAYySTo6J9z6b), [fastspeech2](https://drive.google.com/drive/u/1/folders/1NG7oOfNuXSh7WyAoM1hI8P5BxDALY_mU), [multiband_melgan](https://drive.google.com/drive/folders/1DCV3sa6VTyoJzZmKATYvYVDUAFXlQ_Zp) + +# Tutorial End-to-End + +## Prepare Dataset + +Prepare a dataset in the following format: +``` +|- [NAME_DATASET]/ +| |- metadata.csv +| |- wavs/ +| |- file1.wav +| |- ... +``` + +Where `metadata.csv` has the following format: `id|transcription`. This is a ljspeech-like format; you can ignore preprocessing steps if you have other format datasets. + +Note that `NAME_DATASET` should be `[ljspeech/kss/baker/libritts/synpaflex]` for example. + +## Preprocessing + +The preprocessing has two steps: + +1. Preprocess audio features + - Convert characters to IDs + - Compute mel spectrograms + - Normalize mel spectrograms to [-1, 1] range + - Split the dataset into train and validation + - Compute the mean and standard deviation of multiple features from the **training** split +2. Standardize mel spectrogram based on computed statistics + +To reproduce the steps above: +``` +tensorflow-tts-preprocess --rootdir ./[ljspeech/kss/baker/libritts/thorsten/synpaflex] --outdir ./dump_[ljspeech/kss/baker/libritts/thorsten/synpaflex] --config preprocess/[ljspeech/kss/baker/thorsten/synpaflex]_preprocess.yaml --dataset [ljspeech/kss/baker/libritts/thorsten/synpaflex] +tensorflow-tts-normalize --rootdir ./dump_[ljspeech/kss/baker/libritts/thorsten/synpaflex] --outdir ./dump_[ljspeech/kss/baker/libritts/thorsten/synpaflex] --config preprocess/[ljspeech/kss/baker/libritts/thorsten/synpaflex]_preprocess.yaml --dataset [ljspeech/kss/baker/libritts/thorsten/synpaflex] +``` + +Right now we only support [`ljspeech`](https://keithito.com/LJ-Speech-Dataset/), [`kss`](https://www.kaggle.com/bryanpark/korean-single-speaker-speech-dataset), [`baker`](https://weixinxcxdb.oss-cn-beijing.aliyuncs.com/gwYinPinKu/BZNSYP.rar), [`libritts`](http://www.openslr.org/60/), [`thorsten`](https://github.com/thorstenMueller/deep-learning-german-tts) and +[`synpaflex`](https://www.ortolang.fr/market/corpora/synpaflex-corpus/) for dataset argument. In the future, we intend to support more datasets. + +**Note**: To run `libritts` preprocessing, please first read the instruction in [examples/fastspeech2_libritts](https://github.com/TensorSpeech/TensorFlowTTS/tree/master/examples/fastspeech2_libritts). We need to reformat it first before run preprocessing. + +**Note**: To run `synpaflex` preprocessing, please first run the notebook [notebooks/prepare_synpaflex.ipynb](https://github.com/TensorSpeech/TensorFlowTTS/tree/master/notebooks/prepare_synpaflex.ipynb). We need to reformat it first before run preprocessing. + +After preprocessing, the structure of the project folder should be: +``` +|- [NAME_DATASET]/ +| |- metadata.csv +| |- wav/ +| |- file1.wav +| |- ... +|- dump_[ljspeech/kss/baker/libritts/thorsten]/ +| |- train/ +| |- ids/ +| |- LJ001-0001-ids.npy +| |- ... +| |- raw-feats/ +| |- LJ001-0001-raw-feats.npy +| |- ... +| |- raw-f0/ +| |- LJ001-0001-raw-f0.npy +| |- ... +| |- raw-energies/ +| |- LJ001-0001-raw-energy.npy +| |- ... +| |- norm-feats/ +| |- LJ001-0001-norm-feats.npy +| |- ... +| |- wavs/ +| |- LJ001-0001-wave.npy +| |- ... +| |- valid/ +| |- ids/ +| |- LJ001-0009-ids.npy +| |- ... +| |- raw-feats/ +| |- LJ001-0009-raw-feats.npy +| |- ... +| |- raw-f0/ +| |- LJ001-0001-raw-f0.npy +| |- ... +| |- raw-energies/ +| |- LJ001-0001-raw-energy.npy +| |- ... +| |- norm-feats/ +| |- LJ001-0009-norm-feats.npy +| |- ... +| |- wavs/ +| |- LJ001-0009-wave.npy +| |- ... +| |- stats.npy +| |- stats_f0.npy +| |- stats_energy.npy +| |- train_utt_ids.npy +| |- valid_utt_ids.npy +|- examples/ +| |- melgan/ +| |- fastspeech/ +| |- tacotron2/ +| ... +``` + +- `stats.npy` contains the mean and std from the training split mel spectrograms +- `stats_energy.npy` contains the mean and std of energy values from the training split +- `stats_f0.npy` contains the mean and std of F0 values in the training split +- `train_utt_ids.npy` / `valid_utt_ids.npy` contains training and validation utterances IDs respectively + +We use suffix (`ids`, `raw-feats`, `raw-energy`, `raw-f0`, `norm-feats`, and `wave`) for each input type. + + +**IMPORTANT NOTES**: +- This preprocessing step is based on [ESPnet](https://github.com/espnet/espnet) so you can combine all models here with other models from ESPnet repository. +- Regardless of how your dataset is formatted, the final structure of the `dump` folder **SHOULD** follow the above structure to be able to use the training script, or you can modify it by yourself 😄. + +## Training models + +To know how to train model from scratch or fine-tune with other datasets/languages, please see detail at example directory. + +- For Tacotron-2 tutorial, pls see [examples/tacotron2](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/tacotron2) +- For FastSpeech tutorial, pls see [examples/fastspeech](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/fastspeech) +- For FastSpeech2 tutorial, pls see [examples/fastspeech2](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/fastspeech2) +- For FastSpeech2 + MFA tutorial, pls see [examples/fastspeech2_libritts](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/fastspeech2_libritts) +- For MelGAN tutorial, pls see [examples/melgan](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/melgan) +- For MelGAN + STFT Loss tutorial, pls see [examples/melgan.stft](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/melgan.stft) +- For Multiband-MelGAN tutorial, pls see [examples/multiband_melgan](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/multiband_melgan) +- For Parallel WaveGAN tutorial, pls see [examples/parallel_wavegan](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/parallel_wavegan) +- For Multiband-MelGAN Generator + HiFi-GAN tutorial, pls see [examples/multiband_melgan_hf](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/multiband_melgan_hf) +- For HiFi-GAN tutorial, pls see [examples/hifigan](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/hifigan) +# Abstract Class Explaination + +## Abstract DataLoader Tensorflow-based dataset + +A detail implementation of abstract dataset class from [tensorflow_tts/dataset/abstract_dataset](https://github.com/tensorspeech/TensorFlowTTS/blob/master/tensorflow_tts/datasets/abstract_dataset.py). There are some functions you need overide and understand: + +1. **get_args**: This function return argumentation for **generator** class, normally is utt_ids. +2. **generator**: This function have an inputs from **get_args** function and return a inputs for models. **Note that we return a dictionary for all generator functions with the keys that exactly match with the model's parameters because base_trainer will use model(\*\*batch) to do forward step.** +3. **get_output_dtypes**: This function need return dtypes for each element from **generator** function. +4. **get_len_dataset**: Return len of datasets, normaly is len(utt_ids). + +**IMPORTANT NOTES**: + +- A pipeline of creating dataset should be: cache -> shuffle -> map_fn -> get_batch -> prefetch. +- If you do shuffle before cache, the dataset won't shuffle when it re-iterate over datasets. +- You should apply map_fn to make each element return from **generator** function have the same length before getting batch and feed it into a model. + +Some examples to use this **abstract_dataset** are [tacotron_dataset.py](https://github.com/tensorspeech/TensorFlowTTS/blob/master/examples/tacotron2/tacotron_dataset.py), [fastspeech_dataset.py](https://github.com/tensorspeech/TensorFlowTTS/blob/master/examples/fastspeech/fastspeech_dataset.py), [melgan_dataset.py](https://github.com/tensorspeech/TensorFlowTTS/blob/master/examples/melgan/audio_mel_dataset.py), [fastspeech2_dataset.py](https://github.com/TensorSpeech/TensorFlowTTS/blob/master/examples/fastspeech2/fastspeech2_dataset.py) + + +## Abstract Trainer Class + +A detail implementation of base_trainer from [tensorflow_tts/trainer/base_trainer.py](https://github.com/tensorspeech/TensorFlowTTS/blob/master/tensorflow_tts/trainers/base_trainer.py). It include [Seq2SeqBasedTrainer](https://github.com/tensorspeech/TensorFlowTTS/blob/master/tensorflow_tts/trainers/base_trainer.py#L265) and [GanBasedTrainer](https://github.com/tensorspeech/TensorFlowTTS/blob/master/tensorflow_tts/trainers/base_trainer.py#L149) inherit from [BasedTrainer](https://github.com/tensorspeech/TensorFlowTTS/blob/master/tensorflow_tts/trainers/base_trainer.py#L16). All trainer support both single/multi GPU. There a some functions you **MUST** overide when implement new_trainer: + +- **compile**: This function aim to define a models, and losses. +- **generate_and_save_intermediate_result**: This function will save intermediate result such as: plot alignment, save audio generated, plot mel-spectrogram ... +- **compute_per_example_losses**: This function will compute per_example_loss for model, note that all element of the loss **MUST** has shape [batch_size]. + +All models on this repo are trained based-on **GanBasedTrainer** (see [train_melgan.py](https://github.com/tensorspeech/TensorFlowTTS/blob/master/examples/melgan/train_melgan.py), [train_melgan_stft.py](https://github.com/tensorspeech/TensorFlowTTS/blob/master/examples/melgan.stft/train_melgan_stft.py), [train_multiband_melgan.py](https://github.com/tensorspeech/TensorFlowTTS/blob/master/examples/multiband_melgan/train_multiband_melgan.py)) and **Seq2SeqBasedTrainer** (see [train_tacotron2.py](https://github.com/tensorspeech/TensorFlowTTS/blob/master/examples/tacotron2/train_tacotron2.py), [train_fastspeech.py](https://github.com/tensorspeech/TensorFlowTTS/blob/master/examples/fastspeech/train_fastspeech.py)). + +# End-to-End Examples +You can know how to inference each model at [notebooks](https://github.com/tensorspeech/TensorFlowTTS/tree/master/notebooks) or see a [colab](https://colab.research.google.com/drive/1akxtrLZHKuMiQup00tzO2olCaN-y3KiD?usp=sharing) (for English), [colab](https://colab.research.google.com/drive/1ybWwOS5tipgPFttNulp77P6DAB5MtiuN?usp=sharing) (for Korean), [colab](https://colab.research.google.com/drive/1YpSHRBRPBI7cnTkQn1UcVTWEQVbsUm1S?usp=sharing) (for Chinese), [colab](https://colab.research.google.com/drive/1jd3u46g-fGQw0rre8fIwWM9heJvrV1c0?usp=sharing) (for French), [colab](https://colab.research.google.com/drive/1W0nSFpsz32M0OcIkY9uMOiGrLTPKVhTy?usp=sharing) (for German). Here is an example code for end2end inference with fastspeech2 and multi-band melgan. We uploaded all our pretrained in [HuggingFace Hub](https://huggingface.co/tensorspeech). + +```python +import numpy as np +import soundfile as sf +import yaml + +import tensorflow as tf + +from tensorflow_tts.inference import TFAutoModel +from tensorflow_tts.inference import AutoProcessor + +# initialize fastspeech2 model. +fastspeech2 = TFAutoModel.from_pretrained("tensorspeech/tts-fastspeech2-ljspeech-en") + + +# initialize mb_melgan model +mb_melgan = TFAutoModel.from_pretrained("tensorspeech/tts-mb_melgan-ljspeech-en") + + +# inference +processor = AutoProcessor.from_pretrained("tensorspeech/tts-fastspeech2-ljspeech-en") + +input_ids = processor.text_to_sequence("Recent research at Harvard has shown meditating for as little as 8 weeks, can actually increase the grey matter in the parts of the brain responsible for emotional regulation, and learning.") +# fastspeech inference + +mel_before, mel_after, duration_outputs, _, _ = fastspeech2.inference( + input_ids=tf.expand_dims(tf.convert_to_tensor(input_ids, dtype=tf.int32), 0), + speaker_ids=tf.convert_to_tensor([0], dtype=tf.int32), + speed_ratios=tf.convert_to_tensor([1.0], dtype=tf.float32), + f0_ratios =tf.convert_to_tensor([1.0], dtype=tf.float32), + energy_ratios =tf.convert_to_tensor([1.0], dtype=tf.float32), +) + +# melgan inference +audio_before = mb_melgan.inference(mel_before)[0, :, 0] +audio_after = mb_melgan.inference(mel_after)[0, :, 0] + +# save to file +sf.write('./audio_before.wav', audio_before, 22050, "PCM_16") +sf.write('./audio_after.wav', audio_after, 22050, "PCM_16") +``` + +# Contact +- [Minh Nguyen Quan Anh](https://github.com/tensorspeech): nguyenquananhminh@gmail.com +- [erogol](https://github.com/erogol): erengolge@gmail.com +- [Kuan Chen](https://github.com/azraelkuan): azraelkuan@gmail.com +- [Dawid Kobus](https://github.com/machineko): machineko@protonmail.com +- [Takuya Ebata](https://github.com/MokkeMeguru): meguru.mokke@gmail.com +- [Trinh Le Quang](https://github.com/l4zyf9x): trinhle.cse@gmail.com +- [Yunchao He](https://github.com/candlewill): yunchaohe@gmail.com +- [Alejandro Miguel Velasquez](https://github.com/ZDisket): xml506ok@gmail.com + +# License +All models here are licensed under the [Apache 2.0](http://www.apache.org/licenses/LICENSE-2.0) + +# Acknowledgement +We want to thank [Tomoki Hayashi](https://github.com/kan-bayashi), who discussed with us much about Melgan, Multi-band melgan, Fastspeech, and Tacotron. This framework based-on his great open-source [ParallelWaveGan](https://github.com/kan-bayashi/ParallelWaveGAN) project. diff --git a/TensorFlowTTS/docker-compose.yml b/TensorFlowTTS/docker-compose.yml new file mode 100644 index 0000000000000000000000000000000000000000..5e2005ac1e51c15698f60247f035e12ef9c9512f --- /dev/null +++ b/TensorFlowTTS/docker-compose.yml @@ -0,0 +1,11 @@ +version: '2.6' +services: + tensorflowtts: + build: . + volumes: + - .:/workspace + runtime: nvidia + tty: true + command: /bin/bash + environment: + - CUDA_VISIBLE_DEVICES diff --git a/TensorFlowTTS/dockerfile b/TensorFlowTTS/dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..83ed34fa59c7b3c6df6715bd6fdb5dd88635ed46 --- /dev/null +++ b/TensorFlowTTS/dockerfile @@ -0,0 +1,8 @@ +FROM tensorflow/tensorflow:2.6.0-gpu +RUN apt-get update +RUN apt-get install -y zsh tmux wget git libsndfile1 +RUN pip install ipython && \ + pip install git+https://github.com/TensorSpeech/TensorflowTTS.git && \ + pip install git+https://github.com/repodiac/german_transliterate.git#egg=german_transliterate +RUN mkdir /workspace +WORKDIR /workspace diff --git a/TensorFlowTTS/examples/android/.gitignore b/TensorFlowTTS/examples/android/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..835c94ddad8067afacdd9db106ba2800e4184dc5 --- /dev/null +++ b/TensorFlowTTS/examples/android/.gitignore @@ -0,0 +1,59 @@ +# Android Studio +*.iml +.gradle +/local.properties +/.idea +.DS_Store +/build +/captures + +# Built application files +*.apk +!prebuiltapps/*.apk +*.ap_ + +# Files for the Dalvik VM +*.dex + +# Java class files +*.class + +# Generated files +bin/ +gen/ + +# Gradle files +.gradle/ +build/ +*/build/ + +# Local configuration file (sdk path, etc) +local.properties + +# Proguard folder generated by Eclipse +proguard/ + +# Log Files +*.log + +# project +project.properties +.classpath +.project +.settings/ + +# Intellij project files +*.ipr +*.iws +.idea/ +app/.gradle/ +.idea/libraries +.idea/workspace.xml +.idea/vcs.xml +.idea/scopes/scope_setting.xml +.idea/moudles.xml +.idea/misc.xml +.idea/inspectionProfiles/Project_Default.xml +.idea/inspectionProfiles/profiles_setting.xml +.idea/encodings.xml +.idea/.name diff --git a/TensorFlowTTS/examples/android/README.md b/TensorFlowTTS/examples/android/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e6fdbd0bb844757f04de41c51e0300bfdef201f --- /dev/null +++ b/TensorFlowTTS/examples/android/README.md @@ -0,0 +1,15 @@ +### Android Demo + +This is a simple Android demo which will load converted FastSpeech2 and Multi-Band MelGAN modules to synthesize audio. +In order to optimize the synthesize speed, two LinkedBlockingQueues have been implemented. + + +### HOW-TO +1. Import this project into Android Studio. +2. Run the app! + +### LICENSE + The license use for this code is [CC BY-NC 3.0](https://creativecommons.org/licenses/by-nc/3.0/). Please read the license carefully before you use it. + +### Contributors +[Xuefeng Ding](https://github.com/mapledxf) diff --git a/TensorFlowTTS/examples/android/app/.gitignore b/TensorFlowTTS/examples/android/app/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..796b96d1c402326528b4ba3c12ee9d92d0e212e9 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/.gitignore @@ -0,0 +1 @@ +/build diff --git a/TensorFlowTTS/examples/android/app/build.gradle b/TensorFlowTTS/examples/android/app/build.gradle new file mode 100644 index 0000000000000000000000000000000000000000..0090779901626d37a0e88e2471ddf3a467b23e75 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/build.gradle @@ -0,0 +1,39 @@ +apply plugin: 'com.android.application' + +android { + compileSdkVersion 29 + buildToolsVersion "29.0.2" + defaultConfig { + applicationId "com.tensorspeech.tensorflowtts" + minSdkVersion 21 + targetSdkVersion 29 + versionCode 1 + versionName "1.0" + } + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' + } + } + aaptOptions { + noCompress "tflite" + } + compileOptions { + sourceCompatibility = '1.8' + targetCompatibility = '1.8' + } + lintOptions { + abortOnError false + } +} + +dependencies { + implementation fileTree(dir: 'libs', include: ['*.jar']) + implementation 'androidx.appcompat:appcompat:1.1.0' + implementation 'androidx.constraintlayout:constraintlayout:1.1.3' + + implementation 'org.tensorflow:tensorflow-lite:0.0.0-nightly' + implementation 'org.tensorflow:tensorflow-lite-select-tf-ops:0.0.0-nightly' + implementation 'org.tensorflow:tensorflow-lite-support:0.0.0-nightly' +} diff --git a/TensorFlowTTS/examples/android/app/proguard-rules.pro b/TensorFlowTTS/examples/android/app/proguard-rules.pro new file mode 100644 index 0000000000000000000000000000000000000000..f1b424510da51fd82143bc74a0a801ae5a1e2fcd --- /dev/null +++ b/TensorFlowTTS/examples/android/app/proguard-rules.pro @@ -0,0 +1,21 @@ +# Add project specific ProGuard rules here. +# You can control the set of applied configuration files using the +# proguardFiles setting in build.gradle. +# +# For more details, see +# http://developer.android.com/guide/developing/tools/proguard.html + +# If your project uses WebView with JS, uncomment the following +# and specify the fully qualified class name to the JavaScript interface +# class: +#-keepclassmembers class fqcn.of.javascript.interface.for.webview { +# public *; +#} + +# Uncomment this to preserve the line number information for +# debugging stack traces. +#-keepattributes SourceFile,LineNumberTable + +# If you keep the line number information, uncomment this to +# hide the original source file name. +#-renamesourcefileattribute SourceFile diff --git a/TensorFlowTTS/examples/android/app/src/androidTest/java/com/tensorspeech/tensorflowtts/ExampleInstrumentedTest.java b/TensorFlowTTS/examples/android/app/src/androidTest/java/com/tensorspeech/tensorflowtts/ExampleInstrumentedTest.java new file mode 100644 index 0000000000000000000000000000000000000000..f8219d5efadfde0900e61a8b0c10fb41771ac875 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/androidTest/java/com/tensorspeech/tensorflowtts/ExampleInstrumentedTest.java @@ -0,0 +1,27 @@ +package com.tensorspeech.tensorflowtts; + +import android.content.Context; + +import androidx.test.platform.app.InstrumentationRegistry; +import androidx.test.ext.junit.runners.AndroidJUnit4; + +import org.junit.Test; +import org.junit.runner.RunWith; + +import static org.junit.Assert.*; + +/** + * Instrumented test, which will execute on an Android device. + * + * @see Testing documentation + */ +@RunWith(AndroidJUnit4.class) +public class ExampleInstrumentedTest { + @Test + public void useAppContext() { + // Context of the app under test. + Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext(); + + assertEquals("com.tensorspeech.tensorflowtts", appContext.getPackageName()); + } +} diff --git a/TensorFlowTTS/examples/android/app/src/main/AndroidManifest.xml b/TensorFlowTTS/examples/android/app/src/main/AndroidManifest.xml new file mode 100644 index 0000000000000000000000000000000000000000..abd1a559995c35afa8751621d8098329296418f5 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/AndroidManifest.xml @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/TensorFlowTTS/examples/android/app/src/main/assets/fastspeech2_quant.tflite b/TensorFlowTTS/examples/android/app/src/main/assets/fastspeech2_quant.tflite new file mode 100644 index 0000000000000000000000000000000000000000..07460ef1acbbe541abe5c7b875f02567cd5b2eef --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/assets/fastspeech2_quant.tflite @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:68730c535f05b171195d173033d40d5c22c1b08bee7daf35df1b89788db52172 +size 31015600 diff --git a/TensorFlowTTS/examples/android/app/src/main/assets/mbmelgan.tflite b/TensorFlowTTS/examples/android/app/src/main/assets/mbmelgan.tflite new file mode 100644 index 0000000000000000000000000000000000000000..bec7c477218bdbc1ce72831dc3b503863668b7f5 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/assets/mbmelgan.tflite @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ae1f7c0a3f97debe7ab438a36c230116e52fc161bb435a8890b70a598ae94070 +size 10254100 diff --git a/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/MainActivity.java b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/MainActivity.java new file mode 100644 index 0000000000000000000000000000000000000000..10918990765d06736b39cb8acff0e0b304c902c0 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/MainActivity.java @@ -0,0 +1,82 @@ +package com.tensorspeech.tensorflowtts; + +import android.os.Bundle; +import android.text.TextUtils; +import android.view.View; +import android.widget.EditText; +import android.widget.RadioGroup; + +import androidx.appcompat.app.AppCompatActivity; + +import com.tensorspeech.tensorflowtts.dispatcher.OnTtsStateListener; +import com.tensorspeech.tensorflowtts.dispatcher.TtsStateDispatcher; +import com.tensorspeech.tensorflowtts.tts.TtsManager; +import com.tensorspeech.tensorflowtts.utils.ThreadPoolManager; + +/** + * @author {@link "mailto:xuefeng.ding@outlook.com" "Xuefeng Ding"} + * Created 2020-07-20 17:25 + */ +public class MainActivity extends AppCompatActivity { + private static final String DEFAULT_INPUT_TEXT = "Unless you work on a ship, it's unlikely that you use the word boatswain in everyday conversation, so it's understandably a tricky one. The word - which refers to a petty officer in charge of hull maintenance is not pronounced boats-wain Rather, it's bo-sun to reflect the salty pronunciation of sailors, as The Free Dictionary explains./Blue opinion poll conducted for the National Post."; + + private View speakBtn; + private RadioGroup speedGroup; + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_main); + + TtsManager.getInstance().init(this); + + TtsStateDispatcher.getInstance().addListener(new OnTtsStateListener() { + @Override + public void onTtsReady() { + speakBtn.setEnabled(true); + } + + @Override + public void onTtsStart(String text) { + } + + @Override + public void onTtsStop() { + } + }); + + EditText input = findViewById(R.id.input); + input.setHint(DEFAULT_INPUT_TEXT); + + speedGroup = findViewById(R.id.speed_chooser); + speedGroup.check(R.id.normal); + + speakBtn = findViewById(R.id.start); + speakBtn.setEnabled(false); + speakBtn.setOnClickListener(v -> + ThreadPoolManager.getInstance().execute(() -> { + float speed ; + switch (speedGroup.getCheckedRadioButtonId()) { + case R.id.fast: + speed = 0.8F; + break; + case R.id.slow: + speed = 1.2F; + break; + case R.id.normal: + default: + speed = 1.0F; + break; + } + + String inputText = input.getText().toString(); + if (TextUtils.isEmpty(inputText)) { + inputText = DEFAULT_INPUT_TEXT; + } + TtsManager.getInstance().speak(inputText, speed, true); + })); + + findViewById(R.id.stop).setOnClickListener(v -> + TtsManager.getInstance().stopTts()); + } +} diff --git a/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/dispatcher/OnTtsStateListener.java b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/dispatcher/OnTtsStateListener.java new file mode 100644 index 0000000000000000000000000000000000000000..07c0c9c4ff205f3be90753c2ad2a474571ce1547 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/dispatcher/OnTtsStateListener.java @@ -0,0 +1,13 @@ +package com.tensorspeech.tensorflowtts.dispatcher; + +/** + * @author {@link "mailto:xuefeng.ding@outlook.com" "Xuefeng Ding"} + * Created 2020-07-28 14:25 + */ +public interface OnTtsStateListener { + public void onTtsReady(); + + public void onTtsStart(String text); + + public void onTtsStop(); +} \ No newline at end of file diff --git a/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/dispatcher/TtsStateDispatcher.java b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/dispatcher/TtsStateDispatcher.java new file mode 100644 index 0000000000000000000000000000000000000000..082b2e2bb372aaa37533e02f18e2ba1c98170fb1 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/dispatcher/TtsStateDispatcher.java @@ -0,0 +1,79 @@ +package com.tensorspeech.tensorflowtts.dispatcher; + +import android.os.Handler; +import android.os.Looper; +import android.util.Log; + +import java.util.concurrent.CopyOnWriteArrayList; + +/** + * @author {@link "mailto:xuefeng.ding@outlook.com" "Xuefeng Ding"} + * Created 2020-07-28 14:25 + */ +public class TtsStateDispatcher { + private static final String TAG = "TtsStateDispatcher"; + private static volatile TtsStateDispatcher instance; + private static final Object INSTANCE_WRITE_LOCK = new Object(); + + public static TtsStateDispatcher getInstance() { + if (instance == null) { + synchronized (INSTANCE_WRITE_LOCK) { + if (instance == null) { + instance = new TtsStateDispatcher(); + } + } + } + return instance; + } + + private final Handler handler = new Handler(Looper.getMainLooper()); + + private CopyOnWriteArrayList mListeners = new CopyOnWriteArrayList<>(); + + public void release() { + Log.d(TAG, "release: "); + mListeners.clear(); + } + + public void addListener(OnTtsStateListener listener) { + if (mListeners.contains(listener)) { + return; + } + Log.d(TAG, "addListener: " + listener.getClass()); + mListeners.add(listener); + } + + public void removeListener(OnTtsStateListener listener) { + if (mListeners.contains(listener)) { + Log.d(TAG, "removeListener: " + listener.getClass()); + mListeners.remove(listener); + } + } + + public void onTtsStart(String text){ + Log.d(TAG, "onTtsStart: "); + if (!mListeners.isEmpty()) { + for (OnTtsStateListener listener : mListeners) { + handler.post(() -> listener.onTtsStart(text)); + } + } + } + + public void onTtsStop(){ + Log.d(TAG, "onTtsStop: "); + if (!mListeners.isEmpty()) { + for (OnTtsStateListener listener : mListeners) { + handler.post(listener::onTtsStop); + } + } + } + + public void onTtsReady(){ + Log.d(TAG, "onTtsReady: "); + if (!mListeners.isEmpty()) { + for (OnTtsStateListener listener : mListeners) { + handler.post(listener::onTtsReady); + } + } + } +} diff --git a/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/module/AbstractModule.java b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/module/AbstractModule.java new file mode 100644 index 0000000000000000000000000000000000000000..628ab14050993d8c2acf7192e30151cf140e44b5 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/module/AbstractModule.java @@ -0,0 +1,17 @@ +package com.tensorspeech.tensorflowtts.module; + +import org.tensorflow.lite.Interpreter; + +/** + * @author {@link "mailto:xuefeng.ding@outlook.com" "Xuefeng Ding"} + * Created 2020-07-20 17:25 + * + */ +abstract class AbstractModule { + + Interpreter.Options getOption() { + Interpreter.Options options = new Interpreter.Options(); + options.setNumThreads(5); + return options; + } +} diff --git a/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/module/FastSpeech2.java b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/module/FastSpeech2.java new file mode 100644 index 0000000000000000000000000000000000000000..f3324276141daecd55235a087747e7fcc7c77a3c --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/module/FastSpeech2.java @@ -0,0 +1,82 @@ +package com.tensorspeech.tensorflowtts.module; + +import android.annotation.SuppressLint; +import android.util.Log; + +import org.tensorflow.lite.DataType; +import org.tensorflow.lite.Interpreter; +import org.tensorflow.lite.Tensor; +import org.tensorflow.lite.support.tensorbuffer.TensorBuffer; + +import java.io.File; +import java.nio.FloatBuffer; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; + +/** + * @author {@link "mailto:xuefeng.ding@outlook.com" "Xuefeng Ding"} + * Created 2020-07-20 17:26 + * + */ +public class FastSpeech2 extends AbstractModule { + private static final String TAG = "FastSpeech2"; + private Interpreter mModule; + + public FastSpeech2(String modulePath) { + try { + mModule = new Interpreter(new File(modulePath), getOption()); + int input = mModule.getInputTensorCount(); + for (int i = 0; i < input; i++) { + Tensor inputTensor = mModule.getInputTensor(i); + Log.d(TAG, "input:" + i + + " name:" + inputTensor.name() + + " shape:" + Arrays.toString(inputTensor.shape()) + + " dtype:" + inputTensor.dataType()); + } + + int output = mModule.getOutputTensorCount(); + for (int i = 0; i < output; i++) { + Tensor outputTensor = mModule.getOutputTensor(i); + Log.d(TAG, "output:" + i + + " name:" + outputTensor.name() + + " shape:" + Arrays.toString(outputTensor.shape()) + + " dtype:" + outputTensor.dataType()); + } + Log.d(TAG, "successfully init"); + } catch (Exception e) { + e.printStackTrace(); + } + } + + public TensorBuffer getMelSpectrogram(int[] inputIds, float speed) { + Log.d(TAG, "input id length: " + inputIds.length); + mModule.resizeInput(0, new int[]{1, inputIds.length}); + mModule.allocateTensors(); + + @SuppressLint("UseSparseArrays") + Map outputMap = new HashMap<>(); + + FloatBuffer outputBuffer = FloatBuffer.allocate(350000); + outputMap.put(0, outputBuffer); + + int[][] inputs = new int[1][inputIds.length]; + inputs[0] = inputIds; + + long time = System.currentTimeMillis(); + mModule.runForMultipleInputsOutputs( + new Object[]{inputs, new int[1][1], new int[]{0}, new float[]{speed}, new float[]{1F}, new float[]{1F}}, + outputMap); + Log.d(TAG, "time cost: " + (System.currentTimeMillis() - time)); + + int size = mModule.getOutputTensor(0).shape()[2]; + int[] shape = {1, outputBuffer.position() / size, size}; + TensorBuffer spectrogram = TensorBuffer.createFixedSize(shape, DataType.FLOAT32); + float[] outputArray = new float[outputBuffer.position()]; + outputBuffer.rewind(); + outputBuffer.get(outputArray); + spectrogram.loadArray(outputArray); + + return spectrogram; + } +} diff --git a/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/module/MBMelGan.java b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/module/MBMelGan.java new file mode 100644 index 0000000000000000000000000000000000000000..274605be8407e5505bbb13f6ac6ae8219d146495 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/module/MBMelGan.java @@ -0,0 +1,64 @@ +package com.tensorspeech.tensorflowtts.module; + +import android.util.Log; + +import org.tensorflow.lite.Interpreter; +import org.tensorflow.lite.Tensor; +import org.tensorflow.lite.support.tensorbuffer.TensorBuffer; + +import java.io.File; +import java.nio.FloatBuffer; +import java.util.Arrays; + +/** + * @author {@link "mailto:xuefeng.ding@outlook.com" "Xuefeng Ding"} + * Created 2020-07-20 17:26 + * + */ +public class MBMelGan extends AbstractModule { + private static final String TAG = "MBMelGan"; + private Interpreter mModule; + + public MBMelGan(String modulePath) { + try { + mModule = new Interpreter(new File(modulePath), getOption()); + int input = mModule.getInputTensorCount(); + for (int i = 0; i < input; i++) { + Tensor inputTensor = mModule.getInputTensor(i); + Log.d(TAG, "input:" + i + + " name:" + inputTensor.name() + + " shape:" + Arrays.toString(inputTensor.shape()) + + " dtype:" + inputTensor.dataType()); + } + + int output = mModule.getOutputTensorCount(); + for (int i = 0; i < output; i++) { + Tensor outputTensor = mModule.getOutputTensor(i); + Log.d(TAG, "output:" + i + + " name:" + outputTensor.name() + + " shape:" + Arrays.toString(outputTensor.shape()) + + " dtype:" + outputTensor.dataType()); + } + Log.d(TAG, "successfully init"); + } catch (Exception e) { + e.printStackTrace(); + } + } + + + public float[] getAudio(TensorBuffer input) { + mModule.resizeInput(0, input.getShape()); + mModule.allocateTensors(); + + FloatBuffer outputBuffer = FloatBuffer.allocate(350000); + + long time = System.currentTimeMillis(); + mModule.run(input.getBuffer(), outputBuffer); + Log.d(TAG, "time cost: " + (System.currentTimeMillis() - time)); + + float[] audioArray = new float[outputBuffer.position()]; + outputBuffer.rewind(); + outputBuffer.get(audioArray); + return audioArray; + } +} diff --git a/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/tts/InputWorker.java b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/tts/InputWorker.java new file mode 100644 index 0000000000000000000000000000000000000000..f426622376568554e77e28f332a33564fb8726b5 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/tts/InputWorker.java @@ -0,0 +1,114 @@ +package com.tensorspeech.tensorflowtts.tts; + +import android.util.Log; + +import com.tensorspeech.tensorflowtts.dispatcher.TtsStateDispatcher; +import com.tensorspeech.tensorflowtts.module.FastSpeech2; +import com.tensorspeech.tensorflowtts.module.MBMelGan; +import com.tensorspeech.tensorflowtts.utils.Processor; +import com.tensorspeech.tensorflowtts.utils.ThreadPoolManager; + +import org.tensorflow.lite.support.tensorbuffer.TensorBuffer; + +import java.util.Arrays; +import java.util.concurrent.LinkedBlockingQueue; + +/** + * @author {@link "mailto:xuefeng.ding@outlook.com" "Xuefeng Ding"} + * Created 2020-07-28 14:25 + */ +class InputWorker { + private static final String TAG = "InputWorker"; + + private LinkedBlockingQueue mInputQueue = new LinkedBlockingQueue<>(); + private InputText mCurrentInputText; + private FastSpeech2 mFastSpeech2; + private MBMelGan mMBMelGan; + private Processor mProcessor; + private TtsPlayer mTtsPlayer; + + InputWorker(String fastspeech, String vocoder) { + mFastSpeech2 = new FastSpeech2(fastspeech); + mMBMelGan = new MBMelGan(vocoder); + mProcessor = new Processor(); + mTtsPlayer = new TtsPlayer(); + + ThreadPoolManager.getInstance().getSingleExecutor("worker").execute(() -> { + //noinspection InfiniteLoopStatement + while (true) { + try { + mCurrentInputText = mInputQueue.take(); + Log.d(TAG, "processing: " + mCurrentInputText.INPUT_TEXT); + TtsStateDispatcher.getInstance().onTtsStart(mCurrentInputText.INPUT_TEXT); + mCurrentInputText.proceed(); + TtsStateDispatcher.getInstance().onTtsStop(); + } catch (Exception e) { + Log.e(TAG, "Exception: ", e); + } + } + }); + } + + void processInput(String inputText, float speed) { + Log.d(TAG, "add to queue: " + inputText); + mInputQueue.offer(new InputText(inputText, speed)); + } + + void interrupt() { + mInputQueue.clear(); + if (mCurrentInputText != null) { + mCurrentInputText.interrupt(); + } + mTtsPlayer.interrupt(); + } + + + private class InputText { + private final String INPUT_TEXT; + private final float SPEED; + private boolean isInterrupt; + + private InputText(String inputText, float speed) { + this.INPUT_TEXT = inputText; + this.SPEED = speed; + } + + private void proceed() { + String[] sentences = INPUT_TEXT.split("[.,]"); + Log.d(TAG, "speak: " + Arrays.toString(sentences)); + + for (String sentence : sentences) { + + long time = System.currentTimeMillis(); + + int[] inputIds = mProcessor.textToIds(sentence); + + TensorBuffer output = mFastSpeech2.getMelSpectrogram(inputIds, SPEED); + + if (isInterrupt) { + Log.d(TAG, "proceed: interrupt"); + return; + } + + long encoderTime = System.currentTimeMillis(); + + float[] audioData = mMBMelGan.getAudio(output); + + if (isInterrupt) { + Log.d(TAG, "proceed: interrupt"); + return; + } + + long vocoderTime = System.currentTimeMillis(); + + Log.d(TAG, "Time cost: " + (encoderTime - time) + "+" + (vocoderTime - encoderTime) + "=" + (vocoderTime - time)); + + mTtsPlayer.play(new TtsPlayer.AudioData(sentence, audioData)); + } + } + + private void interrupt() { + this.isInterrupt = true; + } + } +} \ No newline at end of file diff --git a/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/tts/TtsManager.java b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/tts/TtsManager.java new file mode 100644 index 0000000000000000000000000000000000000000..37598e6ddd05ecb85ef6e54ca18bb8d91c8f5ce3 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/tts/TtsManager.java @@ -0,0 +1,97 @@ +package com.tensorspeech.tensorflowtts.tts; + +import android.content.Context; +import android.util.Log; + +import com.tensorspeech.tensorflowtts.dispatcher.TtsStateDispatcher; +import com.tensorspeech.tensorflowtts.utils.ThreadPoolManager; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.InputStream; +import java.io.OutputStream; + +/** + * @author {@link "mailto:xuefeng.ding@outlook.com" "Xuefeng Ding"} + * Created 2020-07-28 14:25 + */ +public class TtsManager { + private static final String TAG = "TtsManager"; + + private static final Object INSTANCE_WRITE_LOCK = new Object(); + + private static volatile TtsManager instance; + + public static TtsManager getInstance() { + if (instance == null) { + synchronized (INSTANCE_WRITE_LOCK) { + if (instance == null) { + instance = new TtsManager(); + } + } + } + return instance; + } + + private InputWorker mWorker; + + private final static String FASTSPEECH2_MODULE = "fastspeech2_quant.tflite"; + private final static String MELGAN_MODULE = "mbmelgan.tflite"; + + public void init(Context context) { + ThreadPoolManager.getInstance().getSingleExecutor("init").execute(() -> { + try { + String fastspeech = copyFile(context, FASTSPEECH2_MODULE); + String vocoder = copyFile(context, MELGAN_MODULE); + mWorker = new InputWorker(fastspeech, vocoder); + } catch (Exception e) { + Log.e(TAG, "mWorker init failed", e); + } + + TtsStateDispatcher.getInstance().onTtsReady(); + }); + } + + private String copyFile(Context context, String strOutFileName) { + Log.d(TAG, "start copy file " + strOutFileName); + File file = context.getFilesDir(); + + String tmpFile = file.getAbsolutePath() + "/" + strOutFileName; + File f = new File(tmpFile); + if (f.exists()) { + Log.d(TAG, "file exists " + strOutFileName); + return f.getAbsolutePath(); + } + + try (OutputStream myOutput = new FileOutputStream(f); + InputStream myInput = context.getAssets().open(strOutFileName)) { + byte[] buffer = new byte[1024]; + int length = myInput.read(buffer); + while (length > 0) { + myOutput.write(buffer, 0, length); + length = myInput.read(buffer); + } + myOutput.flush(); + Log.d(TAG, "Copy task successful"); + } catch (Exception e) { + Log.e(TAG, "copyFile: Failed to copy", e); + } finally { + Log.d(TAG, "end copy file " + strOutFileName); + } + return f.getAbsolutePath(); + } + + public void stopTts() { + mWorker.interrupt(); + } + + public void speak(String inputText, float speed, boolean interrupt) { + if (interrupt) { + stopTts(); + } + + ThreadPoolManager.getInstance().execute(() -> + mWorker.processInput(inputText, speed)); + } + +} diff --git a/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/tts/TtsPlayer.java b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/tts/TtsPlayer.java new file mode 100644 index 0000000000000000000000000000000000000000..dc1b6387376ca43c9d8e326ae4050d4c51f92c26 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/tts/TtsPlayer.java @@ -0,0 +1,91 @@ +package com.tensorspeech.tensorflowtts.tts; + +import android.media.AudioAttributes; +import android.media.AudioFormat; +import android.media.AudioManager; +import android.media.AudioTrack; +import android.util.Log; + +import com.tensorspeech.tensorflowtts.utils.ThreadPoolManager; + +import java.util.concurrent.LinkedBlockingQueue; + +/** + * @author {@link "mailto:xuefeng.ding@outlook.com" "Xuefeng Ding"} + * Created 2020-07-20 18:22 + */ +class TtsPlayer { + private static final String TAG = "TtsPlayer"; + + private final AudioTrack mAudioTrack; + + private final static int FORMAT = AudioFormat.ENCODING_PCM_FLOAT; + private final static int SAMPLERATE = 22050; + private final static int CHANNEL = AudioFormat.CHANNEL_OUT_MONO; + private final static int BUFFER_SIZE = AudioTrack.getMinBufferSize(SAMPLERATE, CHANNEL, FORMAT); + private LinkedBlockingQueue mAudioQueue = new LinkedBlockingQueue<>(); + private AudioData mCurrentAudioData; + + TtsPlayer() { + mAudioTrack = new AudioTrack( + new AudioAttributes.Builder() + .setUsage(AudioAttributes.USAGE_MEDIA) + .setContentType(AudioAttributes.CONTENT_TYPE_MUSIC) + .build(), + new AudioFormat.Builder() + .setSampleRate(22050) + .setEncoding(FORMAT) + .setChannelMask(CHANNEL) + .build(), + BUFFER_SIZE, + AudioTrack.MODE_STREAM, AudioManager.AUDIO_SESSION_ID_GENERATE + ); + mAudioTrack.play(); + + ThreadPoolManager.getInstance().getSingleExecutor("audio").execute(() -> { + //noinspection InfiniteLoopStatement + while (true) { + try { + mCurrentAudioData = mAudioQueue.take(); + Log.d(TAG, "playing: " + mCurrentAudioData.text); + int index = 0; + while (index < mCurrentAudioData.audio.length && !mCurrentAudioData.isInterrupt) { + int buffer = Math.min(BUFFER_SIZE, mCurrentAudioData.audio.length - index); + mAudioTrack.write(mCurrentAudioData.audio, index, buffer, AudioTrack.WRITE_BLOCKING); + index += BUFFER_SIZE; + } + } catch (Exception e) { + Log.e(TAG, "Exception: ", e); + } + } + }); + } + + void play(AudioData audioData) { + Log.d(TAG, "add audio data to queue: " + audioData.text); + mAudioQueue.offer(audioData); + } + + void interrupt() { + mAudioQueue.clear(); + if (mCurrentAudioData != null) { + mCurrentAudioData.interrupt(); + } + } + + static class AudioData { + private String text; + private float[] audio; + private boolean isInterrupt; + + AudioData(String text, float[] audio) { + this.text = text; + this.audio = audio; + } + + private void interrupt() { + isInterrupt = true; + } + } + +} diff --git a/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/utils/NumberNorm.java b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/utils/NumberNorm.java new file mode 100644 index 0000000000000000000000000000000000000000..df789b3d30729780ecab0cad452f8788ca2fea29 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/utils/NumberNorm.java @@ -0,0 +1,109 @@ +package com.tensorspeech.tensorflowtts.utils; + +import java.util.HashMap; +import java.util.Map; + +// Borrowed from https://rosettacode.org/wiki/Spelling_of_ordinal_numbers +public class NumberNorm { + + private static Map ordinalMap = new HashMap<>(); + static { + ordinalMap.put("one", "first"); + ordinalMap.put("two", "second"); + ordinalMap.put("three", "third"); + ordinalMap.put("five", "fifth"); + ordinalMap.put("eight", "eighth"); + ordinalMap.put("nine", "ninth"); + ordinalMap.put("twelve", "twelfth"); + } + + public static String toOrdinal(long n) { + String spelling = numToString(n); + String[] split = spelling.split(" "); + String last = split[split.length - 1]; + String replace; + if ( last.contains("-") ) { + String[] lastSplit = last.split("-"); + String lastWithDash = lastSplit[1]; + String lastReplace; + if ( ordinalMap.containsKey(lastWithDash) ) { + lastReplace = ordinalMap.get(lastWithDash); + } + else if ( lastWithDash.endsWith("y") ) { + lastReplace = lastWithDash.substring(0, lastWithDash.length() - 1) + "ieth"; + } + else { + lastReplace = lastWithDash + "th"; + } + replace = lastSplit[0] + "-" + lastReplace; + } + else { + if ( ordinalMap.containsKey(last) ) { + replace = ordinalMap.get(last); + } + else if ( last.endsWith("y") ) { + replace = last.substring(0, last.length() - 1) + "ieth"; + } + else { + replace = last + "th"; + } + } + split[split.length - 1] = replace; + return String.join(" ", split); + } + + private static final String[] nums = new String[] { + "zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", + "ten", "eleven", "twelve", "thirteen", "fourteen", "fifteen", "sixteen", "seventeen", "eighteen", "nineteen" + }; + + private static final String[] tens = new String[] {"zero", "ten", "twenty", "thirty", "forty", "fifty", "sixty", "seventy", "eighty", "ninety"}; + + public static final String numToString(long n) { + return numToStringHelper(n); + } + + private static final String numToStringHelper(long n) { + if ( n < 0 ) { + return "negative " + numToStringHelper(-n); + } + int index = (int) n; + if ( n <= 19 ) { + return nums[index]; + } + if ( n <= 99 ) { + return tens[index/10] + (n % 10 > 0 ? "-" + numToStringHelper(n % 10) : ""); + } + String label = null; + long factor = 0; + if ( n <= 999 ) { + label = "hundred"; + factor = 100; + } + else if ( n <= 999999) { + label = "thousand"; + factor = 1000; + } + else if ( n <= 999999999) { + label = "million"; + factor = 1000000; + } + else if ( n <= 999999999999L) { + label = "billion"; + factor = 1000000000; + } + else if ( n <= 999999999999999L) { + label = "trillion"; + factor = 1000000000000L; + } + else if ( n <= 999999999999999999L) { + label = "quadrillion"; + factor = 1000000000000000L; + } + else { + label = "quintillion"; + factor = 1000000000000000000L; + } + return numToStringHelper(n / factor) + " " + label + (n % factor > 0 ? " " + numToStringHelper(n % factor ) : ""); + } +} diff --git a/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/utils/Processor.java b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/utils/Processor.java new file mode 100644 index 0000000000000000000000000000000000000000..2b4096e620ed678994a36fdab080195075468561 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/utils/Processor.java @@ -0,0 +1,336 @@ +package com.tensorspeech.tensorflowtts.utils; + + +import android.util.Log; + +import androidx.annotation.Nullable; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * @author {@link "mailto:yusufsarigoz@gmail.com" "M. Yusuf Sarıgöz"} + * Created 2020-07-25 17:25 + */ +public class Processor { + private static final String TAG = "processor"; + + private static final String[] VALID_SYMBOLS = new String[]{ + "AA", + "AA0", + "AA1", + "AA2", + "AE", + "AE0", + "AE1", + "AE2", + "AH", + "AH0", + "AH1", + "AH2", + "AO", + "AO0", + "AO1", + "AO2", + "AW", + "AW0", + "AW1", + "AW2", + "AY", + "AY0", + "AY1", + "AY2", + "B", + "CH", + "D", + "DH", + "EH", + "EH0", + "EH1", + "EH2", + "ER", + "ER0", + "ER1", + "ER2", + "EY", + "EY0", + "EY1", + "EY2", + "F", + "G", + "HH", + "IH", + "IH0", + "IH1", + "IH2", + "IY", + "IY0", + "IY1", + "IY2", + "JH", + "K", + "L", + "M", + "N", + "NG", + "OW", + "OW0", + "OW1", + "OW2", + "OY", + "OY0", + "OY1", + "OY2", + "P", + "R", + "S", + "SH", + "T", + "TH", + "UH", + "UH0", + "UH1", + "UH2", + "UW", + "UW0", + "UW1", + "UW2", + "V", + "W", + "Y", + "Z", + "ZH" + }; + + private static final Pattern CURLY_RE = Pattern.compile("(.*?)\\{(.+?)\\}(.*)"); + private static final Pattern COMMA_NUMBER_RE = Pattern.compile("([0-9][0-9\\,]+[0-9])"); + private static final Pattern DECIMAL_RE = Pattern.compile("([0-9]+\\.[0-9]+)"); + private static final Pattern POUNDS_RE = Pattern.compile("£([0-9\\,]*[0-9]+)"); + private static final Pattern DOLLARS_RE = Pattern.compile("\\$([0-9.\\,]*[0-9]+)"); + private static final Pattern ORDINAL_RE = Pattern.compile("[0-9]+(st|nd|rd|th)"); + private static final Pattern NUMBER_RE = Pattern.compile("[0-9]+"); + + private static final String PAD = "_"; + private static final String EOS = "~"; + private static final String SPECIAL = "-"; + + private static final String[] PUNCTUATION = "!'(),.:;? ".split(""); + private static final String[] LETTERS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz".split(""); + + private static final List SYMBOLS = new ArrayList<>(); + private static final Map ABBREVIATIONS = new HashMap<>(); + private static final Map SYMBOL_TO_ID = new HashMap<>(); + + public Processor() { + SYMBOLS.add(PAD); + SYMBOLS.add(SPECIAL); + + for (String p : PUNCTUATION) { + if (!"".equals(p)) { + SYMBOLS.add(p); + } + } + + for (String l : LETTERS) { + if (!"".equals(l)) { + SYMBOLS.add(l); + } + } + + for (String validSymbol : VALID_SYMBOLS) { + SYMBOLS.add("@" + validSymbol); + } + + SYMBOLS.add(EOS); + + for (int i = 0; i < SYMBOLS.size(); ++i) { + SYMBOL_TO_ID.put(SYMBOLS.get(i), i); + } + + ABBREVIATIONS.put("mrs", "misess"); + ABBREVIATIONS.put("mr", "mister"); + ABBREVIATIONS.put("dr", "doctor"); + ABBREVIATIONS.put("st", "saint"); + ABBREVIATIONS.put("co", "company"); + ABBREVIATIONS.put("jr", "junior"); + ABBREVIATIONS.put("maj", "major"); + ABBREVIATIONS.put("gen", "general"); + ABBREVIATIONS.put("drs", "doctors"); + ABBREVIATIONS.put("rev", "reverend"); + ABBREVIATIONS.put("lt", "lieutenant"); + ABBREVIATIONS.put("hon", "honorable"); + ABBREVIATIONS.put("sgt", "sergeant"); + ABBREVIATIONS.put("capt", "captain"); + ABBREVIATIONS.put("esq", "esquire"); + ABBREVIATIONS.put("ltd", "limited"); + ABBREVIATIONS.put("col", "colonel"); + ABBREVIATIONS.put("ft", "fort"); + } + + + private List symbolsToSequence(String symbols) { + List sequence = new ArrayList<>(); + + for (int i = 0; i < symbols.length(); ++i) { + Integer id = SYMBOL_TO_ID.get(String.valueOf(symbols.charAt(i))); + if (id == null) { + Log.e(TAG, "symbolsToSequence: id is not found for " + symbols.charAt(i)); + } else { + sequence.add(id); + } + } + + return sequence; + } + + private List arpabetToSequence(@Nullable String symbols) { + List sequence = new ArrayList<>(); + if (symbols != null) { + String[] as = symbols.split(" "); + for (String s : as) { + sequence.add(SYMBOL_TO_ID.get("@" + s)); + } + } + return sequence; + } + + private String convertToAscii(String text) { + byte[] bytes = text.getBytes(StandardCharsets.US_ASCII); + return new String(bytes); + } + + private String collapseWhitespace(String text) { + return text.replaceAll("\\s+", " "); + } + + private String expandAbbreviations(String text) { + for (Map.Entry entry : ABBREVIATIONS.entrySet()) { + text = text.replaceAll("\\b" + entry.getKey() + "\\.", entry.getValue()); + } + return text; + } + + private String removeCommasFromNumbers(String text) { + Matcher m = COMMA_NUMBER_RE.matcher(text); + while (m.find()) { + String s = m.group().replaceAll(",", ""); + text = text.replaceFirst(m.group(), s); + } + return text; + } + + private String expandPounds(String text) { + Matcher m = POUNDS_RE.matcher(text); + while (m.find()) { + text = text.replaceFirst(m.group(), m.group() + " pounds"); + } + return text; + } + + private String expandDollars(String text) { + Matcher m = DOLLARS_RE.matcher(text); + while (m.find()) { + String dollars = "0"; + String cents = "0"; + String spelling = ""; + String s = m.group().substring(1); + String[] parts = s.split("\\."); + if (!s.startsWith(".")) { + dollars = parts[0]; + } + if (!s.endsWith(".") && parts.length > 1) { + cents = parts[1]; + } + if (!"0".equals(dollars)) { + spelling += parts[0] + " dollars "; + } + if (!"0".equals(cents) && !"00".equals(cents)) { + spelling += parts[1] + " cents "; + } + text = text.replaceFirst("\\" + m.group(), spelling); + } + return text; + } + + private String expandDecimals(String text) { + Matcher m = DECIMAL_RE.matcher(text); + while (m.find()) { + String s = m.group().replaceAll("\\.", " point "); + text = text.replaceFirst(m.group(), s); + } + return text; + } + + private String expandOrdinals(String text) { + Matcher m = ORDINAL_RE.matcher(text); + while (m.find()) { + String s = m.group().substring(0, m.group().length() - 2); + long l = Long.valueOf(s); + String spelling = NumberNorm.toOrdinal(l); + text = text.replaceFirst(m.group(), spelling); + } + return text; + } + + private String expandCardinals(String text) { + Matcher m = NUMBER_RE.matcher(text); + while (m.find()) { + long l = Long.valueOf(m.group()); + String spelling = NumberNorm.numToString(l); + text = text.replaceFirst(m.group(), spelling); + } + return text; + } + + private String expandNumbers(String text) { + text = removeCommasFromNumbers(text); + text = expandPounds(text); + text = expandDollars(text); + text = expandDecimals(text); + text = expandOrdinals(text); + text = expandCardinals(text); + return text; + } + + private String cleanTextForEnglish(String text) { + text = convertToAscii(text); + text = text.toLowerCase(); + text = expandAbbreviations(text); + try { + text = expandNumbers(text); + } catch (Exception e) { + Log.d(TAG, "Failed to convert numbers", e); + } + text = collapseWhitespace(text); + Log.d(TAG, "text preprocessed: " + text); + return text; + } + + public int[] textToIds(String text) { + List sequence = new ArrayList<>(); + while (text!= null && text.length() > 0) { + Matcher m = CURLY_RE.matcher(text); + if (!m.find()) { + sequence.addAll(symbolsToSequence(cleanTextForEnglish(text))); + break; + } + sequence.addAll(symbolsToSequence(cleanTextForEnglish(m.group(1)))); + sequence.addAll(arpabetToSequence(m.group(2))); + text = m.group(3); + } + + int size = sequence.size(); + Integer[] tmp = new Integer[size]; + tmp = sequence.toArray(tmp); + int[] ids = new int[size]; + for (int i = 0; i < size; ++i) { + ids[i] = tmp[i]; + } + return ids; + } +} diff --git a/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/utils/ThreadPoolManager.java b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/utils/ThreadPoolManager.java new file mode 100644 index 0000000000000000000000000000000000000000..16446a4ffb73509e245b9a3a3628b4d369912be4 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/java/com/tensorspeech/tensorflowtts/utils/ThreadPoolManager.java @@ -0,0 +1,157 @@ +package com.tensorspeech.tensorflowtts.utils; + +import android.os.Looper; +import android.os.Process; + +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * @author {@link "mailto:xuefeng.ding@outlook.com" "Xuefeng Ding"} + * Created 2020-07-20 17:25 + */ +@SuppressWarnings("unused") +public class ThreadPoolManager { + + public static ThreadPoolManager getInstance() { + return ThreadPoolManager.Holder.INSTANCE; + } + + private static final class Holder { + private static final ThreadPoolManager INSTANCE = new ThreadPoolManager(); + } + + private ThreadPoolExecutor mExecutor; + + /** + * Constructor + */ + private ThreadPoolManager() { + int corePoolSize = Runtime.getRuntime().availableProcessors() * 2 + 1; + ThreadFactory namedThreadFactory = new NamedThreadFactory("thread pool"); + + mExecutor = new ThreadPoolExecutor( + corePoolSize, + corePoolSize * 10, + 1, + TimeUnit.HOURS, + new LinkedBlockingQueue<>(), + namedThreadFactory, + new ThreadPoolExecutor.DiscardPolicy() + ); + } + + /** + * 执行任务 + * @param runnable 需要执行的异步任务 + */ + public void execute(Runnable runnable) { + if (runnable == null) { + return; + } + mExecutor.execute(runnable); + } + + /** + * single thread with name + * @param name 线程名 + * @return 线程执行器 + */ + public ScheduledThreadPoolExecutor getSingleExecutor(String name) { + return getSingleExecutor(name, Thread.NORM_PRIORITY); + } + + /** + * single thread with name and priority + * @param name thread name + * @param priority thread priority + * @return Thread Executor + */ + @SuppressWarnings("WeakerAccess") + public ScheduledThreadPoolExecutor getSingleExecutor(String name, int priority) { + return new ScheduledThreadPoolExecutor( + 1, + new NamedThreadFactory(name, priority)); + } + + /** + * 从线程池中移除任务 + * @param runnable 需要移除的异步任务 + */ + public void remove(Runnable runnable) { + if (runnable == null) { + return; + } + mExecutor.remove(runnable); + } + + /** + * 为线程池内的每个线程命名的工厂类 + */ + private static class NamedThreadFactory implements ThreadFactory { + private static final AtomicInteger POOL_NUMBER = new AtomicInteger(1); + private final ThreadGroup group; + private final AtomicInteger threadNumber = new AtomicInteger(1); + private final String namePrefix; + private final int priority; + + /** + * Constructor + * @param namePrefix 线程名前缀 + */ + private NamedThreadFactory(String namePrefix) { + this(namePrefix, Thread.NORM_PRIORITY); + } + + /** + * Constructor + * @param threadName 线程名前缀 + * @param priority 线程优先级 + */ + private NamedThreadFactory(String threadName, int priority) { + SecurityManager s = System.getSecurityManager(); + group = (s != null) ? s.getThreadGroup() : + Thread.currentThread().getThreadGroup(); + namePrefix = threadName + "-" + POOL_NUMBER.getAndIncrement(); + this.priority = priority; + } + + @Override + public Thread newThread(Runnable r) { + Thread t = new Thread(group, r, + namePrefix + threadNumber.getAndIncrement(), + 0); + if (t.isDaemon()) { + t.setDaemon(false); + } + + t.setPriority(priority); + + switch (priority) { + case Thread.MIN_PRIORITY: + Process.setThreadPriority(Process.THREAD_PRIORITY_LOWEST); + break; + case Thread.MAX_PRIORITY: + Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); + break; + default: + Process.setThreadPriority(Process.THREAD_PRIORITY_FOREGROUND); + break; + } + + return t; + } + } + + /** + * 判断当前线程是否为主线程 + * @return {@code true} if the current thread is main thread. + */ + public static boolean isMainThread() { + return Looper.myLooper() == Looper.getMainLooper(); + } +} \ No newline at end of file diff --git a/TensorFlowTTS/examples/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml b/TensorFlowTTS/examples/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml new file mode 100644 index 0000000000000000000000000000000000000000..1f6bb290603d7caa16c5fb6f61bbfdc750622f5c --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml @@ -0,0 +1,34 @@ + + + + + + + + + + + diff --git a/TensorFlowTTS/examples/android/app/src/main/res/drawable/ic_launcher_background.xml b/TensorFlowTTS/examples/android/app/src/main/res/drawable/ic_launcher_background.xml new file mode 100644 index 0000000000000000000000000000000000000000..0d025f9bf6b67c63044a36a9ff44fbc69e5c5822 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/res/drawable/ic_launcher_background.xml @@ -0,0 +1,170 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/TensorFlowTTS/examples/android/app/src/main/res/layout/activity_main.xml b/TensorFlowTTS/examples/android/app/src/main/res/layout/activity_main.xml new file mode 100644 index 0000000000000000000000000000000000000000..e5b8c6c2313ce1aed66fc23fee2dbf7299ac7748 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/res/layout/activity_main.xml @@ -0,0 +1,77 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/TensorFlowTTS/examples/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml new file mode 100644 index 0000000000000000000000000000000000000000..eca70cfe52eac1ba66ba280a68ca7be8fcf88a16 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/TensorFlowTTS/examples/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml new file mode 100644 index 0000000000000000000000000000000000000000..eca70cfe52eac1ba66ba280a68ca7be8fcf88a16 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/TensorFlowTTS/examples/android/app/src/main/res/mipmap-hdpi/ic_launcher.png b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-hdpi/ic_launcher.png new file mode 100644 index 0000000000000000000000000000000000000000..898f3ed59ac9f3248734a00e5902736c9367d455 Binary files /dev/null and b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-hdpi/ic_launcher.png differ diff --git a/TensorFlowTTS/examples/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png new file mode 100644 index 0000000000000000000000000000000000000000..dffca3601eba7bf5f409bdd520820e2eb5122c75 Binary files /dev/null and b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png differ diff --git a/TensorFlowTTS/examples/android/app/src/main/res/mipmap-mdpi/ic_launcher.png b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-mdpi/ic_launcher.png new file mode 100644 index 0000000000000000000000000000000000000000..64ba76f75e9ce021aa3d95c213491f73bcacb597 Binary files /dev/null and b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-mdpi/ic_launcher.png differ diff --git a/TensorFlowTTS/examples/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png new file mode 100644 index 0000000000000000000000000000000000000000..dae5e082342fcdeee5db8a6e0b27028e2d2808f5 Binary files /dev/null and b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png differ diff --git a/TensorFlowTTS/examples/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png new file mode 100644 index 0000000000000000000000000000000000000000..e5ed46597ea8447d91ab1786a34e30f1c26b18bd Binary files /dev/null and b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png differ diff --git a/TensorFlowTTS/examples/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png new file mode 100644 index 0000000000000000000000000000000000000000..14ed0af35023e4f1901cf03487b6c524257b8483 Binary files /dev/null and b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png differ diff --git a/TensorFlowTTS/examples/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png new file mode 100644 index 0000000000000000000000000000000000000000..b0907cac3bfd8fbfdc46e1108247f0a1055387ec Binary files /dev/null and b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png differ diff --git a/TensorFlowTTS/examples/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png new file mode 100644 index 0000000000000000000000000000000000000000..d8ae03154975f397f8ed1b84f2d4bf9783ecfa26 Binary files /dev/null and b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png differ diff --git a/TensorFlowTTS/examples/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png new file mode 100644 index 0000000000000000000000000000000000000000..2c18de9e66108411737e910f5c1972476f03ddbf Binary files /dev/null and b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png differ diff --git a/TensorFlowTTS/examples/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png new file mode 100644 index 0000000000000000000000000000000000000000..beed3cdd2c32af5114a7dc70b9ef5b698eb8797e Binary files /dev/null and b/TensorFlowTTS/examples/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png differ diff --git a/TensorFlowTTS/examples/android/app/src/main/res/values/colors.xml b/TensorFlowTTS/examples/android/app/src/main/res/values/colors.xml new file mode 100644 index 0000000000000000000000000000000000000000..69b22338c6510250df3b43672635120dbce2fa49 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/res/values/colors.xml @@ -0,0 +1,6 @@ + + + #008577 + #00574B + #D81B60 + diff --git a/TensorFlowTTS/examples/android/app/src/main/res/values/strings.xml b/TensorFlowTTS/examples/android/app/src/main/res/values/strings.xml new file mode 100644 index 0000000000000000000000000000000000000000..1a7a850472a7c9203a224d0cdd3f7ad9c1c344e8 --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/res/values/strings.xml @@ -0,0 +1,3 @@ + + TensorflowTTS + diff --git a/TensorFlowTTS/examples/android/app/src/main/res/values/styles.xml b/TensorFlowTTS/examples/android/app/src/main/res/values/styles.xml new file mode 100644 index 0000000000000000000000000000000000000000..5885930df6d10edf3d6df40d6556297d11f953da --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/main/res/values/styles.xml @@ -0,0 +1,11 @@ + + + + + + diff --git a/TensorFlowTTS/examples/android/app/src/test/java/com/tensorspeech/tensorflowtts/ExampleUnitTest.java b/TensorFlowTTS/examples/android/app/src/test/java/com/tensorspeech/tensorflowtts/ExampleUnitTest.java new file mode 100644 index 0000000000000000000000000000000000000000..e667101f91a1e20bf1196e98e0825eb0accc99aa --- /dev/null +++ b/TensorFlowTTS/examples/android/app/src/test/java/com/tensorspeech/tensorflowtts/ExampleUnitTest.java @@ -0,0 +1,17 @@ +package com.tensorspeech.tensorflowtts; + +import org.junit.Test; + +import static org.junit.Assert.*; + +/** + * Example local unit test, which will execute on the development machine (host). + * + * @see Testing documentation + */ +public class ExampleUnitTest { + @Test + public void addition_isCorrect() { + assertEquals(4, 2 + 2); + } +} \ No newline at end of file diff --git a/TensorFlowTTS/examples/android/build.gradle b/TensorFlowTTS/examples/android/build.gradle new file mode 100644 index 0000000000000000000000000000000000000000..fb516be20b52a824110a39d1478442083f4d417b --- /dev/null +++ b/TensorFlowTTS/examples/android/build.gradle @@ -0,0 +1,27 @@ +// Top-level build file where you can add configuration options common to all sub-projects/modules. + +buildscript { + repositories { + google() + jcenter() + + } + dependencies { + classpath 'com.android.tools.build:gradle:3.5.2' + + // NOTE: Do not place your application dependencies here; they belong + // in the individual module build.gradle files + } +} + +allprojects { + repositories { + google() + jcenter() + + } +} + +task clean(type: Delete) { + delete rootProject.buildDir +} diff --git a/TensorFlowTTS/examples/android/gradle.properties b/TensorFlowTTS/examples/android/gradle.properties new file mode 100644 index 0000000000000000000000000000000000000000..199d16ede38cfde6d8ff5c75d5d04785ae8c504d --- /dev/null +++ b/TensorFlowTTS/examples/android/gradle.properties @@ -0,0 +1,20 @@ +# Project-wide Gradle settings. +# IDE (e.g. Android Studio) users: +# Gradle settings configured through the IDE *will override* +# any settings specified in this file. +# For more details on how to configure your build environment visit +# http://www.gradle.org/docs/current/userguide/build_environment.html +# Specifies the JVM arguments used for the daemon process. +# The setting is particularly useful for tweaking memory settings. +org.gradle.jvmargs=-Xmx1536m +# When configured, Gradle will run in incubating parallel mode. +# This option should only be used with decoupled projects. More details, visit +# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects +# org.gradle.parallel=true +# AndroidX package structure to make it clearer which packages are bundled with the +# Android operating system, and which are packaged with your app's APK +# https://developer.android.com/topic/libraries/support-library/androidx-rn +android.useAndroidX=true +# Automatically convert third-party libraries to use AndroidX +android.enableJetifier=true + diff --git a/TensorFlowTTS/examples/android/gradle/wrapper/gradle-wrapper.jar b/TensorFlowTTS/examples/android/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000000000000000000000000000000000000..f6b961fd5a86aa5fbfe90f707c3138408be7c718 Binary files /dev/null and b/TensorFlowTTS/examples/android/gradle/wrapper/gradle-wrapper.jar differ diff --git a/TensorFlowTTS/examples/android/gradle/wrapper/gradle-wrapper.properties b/TensorFlowTTS/examples/android/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000000000000000000000000000000000000..1da08fd6f5c5f3f80dfe185922ce806092ee7e57 --- /dev/null +++ b/TensorFlowTTS/examples/android/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Mon Jul 20 11:21:10 CST 2020 +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-5.4.1-all.zip diff --git a/TensorFlowTTS/examples/android/gradlew b/TensorFlowTTS/examples/android/gradlew new file mode 100644 index 0000000000000000000000000000000000000000..cccdd3d517fc5249beaefa600691cf150f2fa3e6 --- /dev/null +++ b/TensorFlowTTS/examples/android/gradlew @@ -0,0 +1,172 @@ +#!/usr/bin/env sh + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS="" + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn () { + echo "$*" +} + +die () { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; + NONSTOP* ) + nonstop=true + ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin, switch paths to Windows format before running java +if $cygwin ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=$((i+1)) + done + case $i in + (0) set -- ;; + (1) set -- "$args0" ;; + (2) set -- "$args0" "$args1" ;; + (3) set -- "$args0" "$args1" "$args2" ;; + (4) set -- "$args0" "$args1" "$args2" "$args3" ;; + (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Escape application args +save () { + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done + echo " " +} +APP_ARGS=$(save "$@") + +# Collect all arguments for the java command, following the shell quoting and substitution rules +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" + +# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong +if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then + cd "$(dirname "$0")" +fi + +exec "$JAVACMD" "$@" diff --git a/TensorFlowTTS/examples/android/gradlew.bat b/TensorFlowTTS/examples/android/gradlew.bat new file mode 100644 index 0000000000000000000000000000000000000000..f9553162f122c71b34635112e717c3e733b5b212 --- /dev/null +++ b/TensorFlowTTS/examples/android/gradlew.bat @@ -0,0 +1,84 @@ +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS= + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windows variants + +if not "%OS%" == "Windows_NT" goto win9xME_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/TensorFlowTTS/examples/android/settings.gradle b/TensorFlowTTS/examples/android/settings.gradle new file mode 100644 index 0000000000000000000000000000000000000000..c5ecd4ee1d899b492589ea1b581f4c0a222f3464 --- /dev/null +++ b/TensorFlowTTS/examples/android/settings.gradle @@ -0,0 +1,2 @@ +include ':app' +rootProject.name='TensorflowTTS' diff --git a/TensorFlowTTS/examples/cpptflite/.gitignore b/TensorFlowTTS/examples/cpptflite/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..abfe4b7ccecbe04b48da86e2c66c5dc5874674dc --- /dev/null +++ b/TensorFlowTTS/examples/cpptflite/.gitignore @@ -0,0 +1,7 @@ +.vscode +/build +/models +/lib +lib.zip +models.zip +models_ljspeech.zip \ No newline at end of file diff --git a/TensorFlowTTS/examples/cpptflite/CMakeLists.txt b/TensorFlowTTS/examples/cpptflite/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..6d35ac9c181c4b31244700e037a2dab5f3fa8303 --- /dev/null +++ b/TensorFlowTTS/examples/cpptflite/CMakeLists.txt @@ -0,0 +1,29 @@ +cmake_minimum_required(VERSION 2.6) +PROJECT(TfliteTTS) + +option(MAPPER "Processor select (supported BAKER or LJSPEECH)") +if (${MAPPER} STREQUAL "LJSPEECH") + add_definitions(-DLJSPEECH) +elseif (${MAPPER} STREQUAL "BAKER") + add_definitions(-DBAKER) +else () + message(FATAL_ERROR "MAPPER is only supported BAKER or LJSPEECH") +endif() + +message(STATUS "MAPPER is selected: "${MAPPER}) + +include_directories(lib) +include_directories(lib/flatbuffers/include) +include_directories(src) + +aux_source_directory(src DIR_SRCS) + +SET(CMAKE_CXX_COMPILER "g++") + +SET(CMAKE_CXX_FLAGS "-O3 -DNDEBUG -Wl,--no-as-needed -ldl -pthread -fpermissive") + +add_executable(demo demo/main.cpp ${DIR_SRCS}) + +find_library(tflite_LIB tensorflow-lite lib) + +target_link_libraries(demo ${tflite_LIB}) \ No newline at end of file diff --git a/TensorFlowTTS/examples/cpptflite/README.md b/TensorFlowTTS/examples/cpptflite/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1acbc0ec4b98e9cf7cd3c39581faebf55f7afa9f --- /dev/null +++ b/TensorFlowTTS/examples/cpptflite/README.md @@ -0,0 +1,112 @@ +# C++ Inference using TFlite +TensorFlow Lite is an open source deep learning framework for on-device inference. On Android and Linux (including Raspberry Pi) platforms, we can run inferences using TensorFlow Lite APIs available in C++. The repository TensorFlowTTS and TensorFlow Lite help developers run popular text-to-speech (TTS) models on mobile, embedded, and IoT devices. + +## TFlite model convert method +Method see [colab notebook](https://colab.research.google.com/drive/1Ma3MIcSdLsOxqOKcN1MlElncYMhrOg3J?usp=sharing#scrollTo=KCm6Oj7iLlu5). + +**Notes:** +- Quantization will deteriorate vocoder and bring noise, so the vocoder doesn't do optimization. +- TensorFlow Lite in C++ doesn't support the TensorFlow operation of Dropout. So the inference function need delete Dropout before converting tflite model, and it doesn't affect the inference result. +For example, fastspeech2 models: +```python +# tensorflow_tts/models/fastspeech2.py +# ... + def _inference(): + # ... + # f0_embedding = self.f0_dropout( + # self.f0_embeddings(tf.expand_dims(f0_outputs, 2)), training=True + # ) + # energy_embedding = self.energy_dropout( + # self.energy_embeddings(tf.expand_dims(energy_outputs, 2)), training=True + # ) + + f0_embedding = self.f0_embeddings(tf.expand_dims(f0_outputs, 2)) + + energy_embedding = self.energy_embeddings(tf.expand_dims(energy_outputs, 2)) + # ... +``` + +## About Code +- TfliteBase.cpp: A base class for loading tflite-model and creating tflite interpreter. By inheriting from this class, you can implement specific behavior, like Mel-spectrogram and Vocoder. +- TTSFrontend.cpp: Text preprocessor converts string to ID based on your desiged phoneme2ID dict, which needs a text to pronunciation module, like g2p for English and pinyin for Chinese. +- TTSBackend.cpp: It contains two-step process - first generating a Mel-spectrogram from phoneme-ID sequence and then generating the audio waveform by Vocoder. + + +## Using the demo +A demo of English or Mandarin TTS and the [tflite-models](https://github.com/lr2582858/TTS_tflite_cpp/releases/tag/0.1.0) are available for linux platform. The pretrained models to be converted are download from the colab notebook ([English](https://colab.research.google.com/drive/1akxtrLZHKuMiQup00tzO2olCaN-y3KiD?usp=sharing#scrollTo=4uv_QngUmFbK) or [Mandarin](https://colab.research.google.com/drive/1Ma3MIcSdLsOxqOKcN1MlElncYMhrOg3J?usp=sharing#scrollTo=KCm6Oj7iLlu5)). Mel-generator and Vocoder select FastSpeech2 and Multiband-MelGAN, respectively. + +**Notes:** +The text2ids function in TTSFrontend.cpp is implemented by using bash command in C++ instead of developing a new pronunciation module (see /demo/text2ids.py). In fact, it is not a recommended method, and you should redevelop a appropriate text2ids module, like the code in examples/cppwin. + +**Firstly**, it should compile a Tensorflow Lite static library. The method see the [reference](https://www.tensorflow.org/lite/guide/build_rpi) from the official guidance of Tensorflow. + +Execute the following command to compile a static library for linux: +```shell +./tensorflow/lite/tools/make/download_dependencies.sh +./tensorflow/lite/tools/make/build_lib.sh (for linux) +``` +(The official also provides different complie methods for other platforms (such as rpi, aarch64, and riscv), see /tensorflow/lite/tools/make/) + +Because this process takes much time, so a static library builded for linux is also available ([libtensorflow-lite.a](https://github.com/lr2582858/TTS_tflite_cpp/releases/tag/0.1.0)). + +The structure of the demo folder should be: +``` +|- [cpptflite]/ +| |- demo/ +| |- src/ +| |- lib/ +| |- flatbuffers/ +| |- tensorflow/lite/ +| |- libtensorflow-lite.a +``` +The two folders of flatbuffers/ and tensorflow/lite/ provide the required header files. + +**Then**, +```shell +cd examples/cpptflite +mkdir build +cd build +``` + +**English Demo (using LJSPEECH dataset)** +```shell +cmake .. -DMAPPER=LJSPEECH +make + +./demo "Bill got in the habit of asking himself “Is that thought true?”" test.wav +``` + +**or Mandarin Demo (using Baker dataset)** +```shell +cmake .. -DMAPPER=BAKER +make + +./demo "这是一个开源的端到端中文语音合成系统" test.wav +``` + + + +## Results +- #### Comparison before and after conversion (English TTS) + ``` + "Bill got in the habit of asking himself “Is that thought true?” \ + And if he wasn’t absolutely certain it was, he just let it go." + ``` +- Before conversion (Python) + + ![ori_mel](./results/lj_ori_mel.png) + + +- After conversion (C++) + + ![tflite_mel](./results/lj_tflite_mel.png) + +- #### Adding #3 in chinese text will create pause prosody in audio +``` +这是一个开源的端到端中文语音合成系统" +``` +![tflite_mel](./results/tflite_mel.png) +``` +"这是一个开源的#3端到端#3中文语音合成系统" +``` +![tflite_mel](./results/tflite_mel2.png) \ No newline at end of file diff --git a/TensorFlowTTS/examples/cpptflite/demo/main.cpp b/TensorFlowTTS/examples/cpptflite/demo/main.cpp new file mode 100644 index 0000000000000000000000000000000000000000..4e3c7a9b7e702f376cb83a5a2b26af17f9f57831 --- /dev/null +++ b/TensorFlowTTS/examples/cpptflite/demo/main.cpp @@ -0,0 +1,70 @@ +#include +#include +#include +#include "VoxCommon.h" +#include "TTSFrontend.h" +#include "TTSBackend.h" + +typedef struct +{ + const char* mapperJson; + unsigned int sampleRate; +} Processor; + +int main(int argc, char* argv[]) +{ + if (argc != 3) + { + fprintf(stderr, "demo text wavfile\n"); + return 1; + } + + const char* cmd = "python3 ../demo/text2ids.py"; + + Processor proc; +#if LJSPEECH + proc.mapperJson = "../../../tensorflow_tts/processor/pretrained/ljspeech_mapper.json"; + proc.sampleRate = 22050; +#elif BAKER + proc.mapperJson = "../../../tensorflow_tts/processor/pretrained/baker_mapper.json"; + proc.sampleRate = 24000; +#endif + + const char* melgenfile = "../models/fastspeech2_quan.tflite"; + const char* vocoderfile = "../models/mb_melgan.tflite"; + + // Init + TTSFrontend ttsfrontend(proc.mapperJson, cmd); + TTSBackend ttsbackend(melgenfile, vocoderfile); + + // Process + ttsfrontend.text2ids(argv[1]); + std::vector phonesIds = ttsfrontend.getPhoneIds(); + + ttsbackend.inference(phonesIds); + MelGenData mel = ttsbackend.getMel(); + std::vector audio = ttsbackend.getAudio(); + + std::cout << "********* Phones' ID *********" << std::endl; + + for (auto iter: phonesIds) + { + std::cout << iter << " "; + } + std::cout << std::endl; + + std::cout << "********* MEL SHAPE **********" << std::endl; + for (auto index : mel.melShape) + { + std::cout << index << " "; + } + std::cout << std::endl; + + std::cout << "********* AUDIO LEN **********" << std::endl; + std::cout << audio.size() << std::endl; + + VoxUtil::ExportWAV(argv[2], audio, proc.sampleRate); + std::cout << "Wavfile: " << argv[2] << " creats." << std::endl; + + return 0; +} \ No newline at end of file diff --git a/TensorFlowTTS/examples/cpptflite/demo/text2ids.py b/TensorFlowTTS/examples/cpptflite/demo/text2ids.py new file mode 100644 index 0000000000000000000000000000000000000000..44ba3b8d5285c1264755437a8fc39582070266ad --- /dev/null +++ b/TensorFlowTTS/examples/cpptflite/demo/text2ids.py @@ -0,0 +1,23 @@ +import sys +import re + +eng_pat = re.compile("[a-zA-Z]+") + +if __name__ == "__main__": + argvs = sys.argv + + if (len(argvs) != 3): + print("usage: python3 {} mapper.json text".format(argvs[0])) + else: + from tensorflow_tts.inference import AutoProcessor + mapper_json = argvs[1] + processor = AutoProcessor.from_pretrained(pretrained_path=mapper_json) + + input_text = argvs[2] + + if eng_pat.match(input_text): + input_ids = processor.text_to_sequence(input_text) + else: + input_ids = processor.text_to_sequence(input_text, inference=True) + + print(" ".join(str(i) for i in input_ids)) \ No newline at end of file diff --git a/TensorFlowTTS/examples/cpptflite/results/lj_ori_mel.png b/TensorFlowTTS/examples/cpptflite/results/lj_ori_mel.png new file mode 100644 index 0000000000000000000000000000000000000000..5725b6c474ba47dfc467c0498f65737ed0bacd1b Binary files /dev/null and b/TensorFlowTTS/examples/cpptflite/results/lj_ori_mel.png differ diff --git a/TensorFlowTTS/examples/cpptflite/results/lj_tflite_mel.png b/TensorFlowTTS/examples/cpptflite/results/lj_tflite_mel.png new file mode 100644 index 0000000000000000000000000000000000000000..7e21bd52596b21a12ebab0fdd4f76e7bf39e5de5 Binary files /dev/null and b/TensorFlowTTS/examples/cpptflite/results/lj_tflite_mel.png differ diff --git a/TensorFlowTTS/examples/cpptflite/results/tflite_mel.png b/TensorFlowTTS/examples/cpptflite/results/tflite_mel.png new file mode 100644 index 0000000000000000000000000000000000000000..2896864924c3a9142f9c0d9c2dd4bdad1cb9554a Binary files /dev/null and b/TensorFlowTTS/examples/cpptflite/results/tflite_mel.png differ diff --git a/TensorFlowTTS/examples/cpptflite/results/tflite_mel2.png b/TensorFlowTTS/examples/cpptflite/results/tflite_mel2.png new file mode 100644 index 0000000000000000000000000000000000000000..734a854c30e091db2bf6b20b4786d83fe1fc7eea Binary files /dev/null and b/TensorFlowTTS/examples/cpptflite/results/tflite_mel2.png differ diff --git a/TensorFlowTTS/examples/cpptflite/src/AudioFile.h b/TensorFlowTTS/examples/cpptflite/src/AudioFile.h new file mode 100644 index 0000000000000000000000000000000000000000..5ffb21e124621cd35799dde9149583bcf4c3737d --- /dev/null +++ b/TensorFlowTTS/examples/cpptflite/src/AudioFile.h @@ -0,0 +1,1253 @@ +//======================================================================= +/** @file AudioFile.h + * @author Adam Stark + * @copyright Copyright (C) 2017 Adam Stark + * + * This file is part of the 'AudioFile' library + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ +//======================================================================= + +#ifndef _AS_AudioFile_h +#define _AS_AudioFile_h + +#include +#include +#include +#include +#include +#include +#include +#include + +// disable some warnings on Windows +#if defined (_MSC_VER) + __pragma(warning (push)) + __pragma(warning (disable : 4244)) + __pragma(warning (disable : 4457)) + __pragma(warning (disable : 4458)) + __pragma(warning (disable : 4389)) + __pragma(warning (disable : 4996)) +#elif defined (__GNUC__) + _Pragma("GCC diagnostic push") + _Pragma("GCC diagnostic ignored \"-Wconversion\"") + _Pragma("GCC diagnostic ignored \"-Wsign-compare\"") + _Pragma("GCC diagnostic ignored \"-Wshadow\"") +#endif + +//============================================================= +/** The different types of audio file, plus some other types to + * indicate a failure to load a file, or that one hasn't been + * loaded yet + */ +enum class AudioFileFormat +{ + Error, + NotLoaded, + Wave, + Aiff +}; + +//============================================================= +template +class AudioFile +{ +public: + + //============================================================= + typedef std::vector > AudioBuffer; + + //============================================================= + /** Constructor */ + AudioFile(); + + //============================================================= + /** Loads an audio file from a given file path. + * @Returns true if the file was successfully loaded + */ + bool load (std::string filePath); + + /** Saves an audio file to a given file path. + * @Returns true if the file was successfully saved + */ + bool save (std::string filePath, AudioFileFormat format = AudioFileFormat::Wave); + + //============================================================= + /** @Returns the sample rate */ + uint32_t getSampleRate() const; + + /** @Returns the number of audio channels in the buffer */ + int getNumChannels() const; + + /** @Returns true if the audio file is mono */ + bool isMono() const; + + /** @Returns true if the audio file is stereo */ + bool isStereo() const; + + /** @Returns the bit depth of each sample */ + int getBitDepth() const; + + /** @Returns the number of samples per channel */ + int getNumSamplesPerChannel() const; + + /** @Returns the length in seconds of the audio file based on the number of samples and sample rate */ + double getLengthInSeconds() const; + + /** Prints a summary of the audio file to the console */ + void printSummary() const; + + //============================================================= + + /** Set the audio buffer for this AudioFile by copying samples from another buffer. + * @Returns true if the buffer was copied successfully. + */ + bool setAudioBuffer (AudioBuffer& newBuffer); + + /** Sets the audio buffer to a given number of channels and number of samples per channel. This will try to preserve + * the existing audio, adding zeros to any new channels or new samples in a given channel. + */ + void setAudioBufferSize (int numChannels, int numSamples); + + /** Sets the number of samples per channel in the audio buffer. This will try to preserve + * the existing audio, adding zeros to new samples in a given channel if the number of samples is increased. + */ + void setNumSamplesPerChannel (int numSamples); + + /** Sets the number of channels. New channels will have the correct number of samples and be initialised to zero */ + void setNumChannels (int numChannels); + + /** Sets the bit depth for the audio file. If you use the save() function, this bit depth rate will be used */ + void setBitDepth (int numBitsPerSample); + + /** Sets the sample rate for the audio file. If you use the save() function, this sample rate will be used */ + void setSampleRate (uint32_t newSampleRate); + + //============================================================= + /** Sets whether the library should log error messages to the console. By default this is true */ + void shouldLogErrorsToConsole (bool logErrors); + + //============================================================= + /** A vector of vectors holding the audio samples for the AudioFile. You can + * access the samples by channel and then by sample index, i.e: + * + * samples[channel][sampleIndex] + */ + AudioBuffer samples; + + //============================================================= + /** An optional iXML chunk that can be added to the AudioFile. + */ + std::string iXMLChunk; + +private: + + //============================================================= + enum class Endianness + { + LittleEndian, + BigEndian + }; + + //============================================================= + AudioFileFormat determineAudioFileFormat (std::vector& fileData); + bool decodeWaveFile (std::vector& fileData); + bool decodeAiffFile (std::vector& fileData); + + //============================================================= + bool saveToWaveFile (std::string filePath); + bool saveToAiffFile (std::string filePath); + + //============================================================= + void clearAudioBuffer(); + + //============================================================= + int32_t fourBytesToInt (std::vector& source, int startIndex, Endianness endianness = Endianness::LittleEndian); + int16_t twoBytesToInt (std::vector& source, int startIndex, Endianness endianness = Endianness::LittleEndian); + int getIndexOfString (std::vector& source, std::string s); + int getIndexOfChunk (std::vector& source, const std::string& chunkHeaderID, int startIndex, Endianness endianness = Endianness::LittleEndian); + + //============================================================= + T sixteenBitIntToSample (int16_t sample); + int16_t sampleToSixteenBitInt (T sample); + + //============================================================= + uint8_t sampleToSingleByte (T sample); + T singleByteToSample (uint8_t sample); + + uint32_t getAiffSampleRate (std::vector& fileData, int sampleRateStartIndex); + bool tenByteMatch (std::vector& v1, int startIndex1, std::vector& v2, int startIndex2); + void addSampleRateToAiffData (std::vector& fileData, uint32_t sampleRate); + T clamp (T v1, T minValue, T maxValue); + + //============================================================= + void addStringToFileData (std::vector& fileData, std::string s); + void addInt32ToFileData (std::vector& fileData, int32_t i, Endianness endianness = Endianness::LittleEndian); + void addInt16ToFileData (std::vector& fileData, int16_t i, Endianness endianness = Endianness::LittleEndian); + + //============================================================= + bool writeDataToFile (std::vector& fileData, std::string filePath); + + //============================================================= + void reportError (std::string errorMessage); + + //============================================================= + AudioFileFormat audioFileFormat; + uint32_t sampleRate; + int bitDepth; + bool logErrorsToConsole {true}; +}; + + +//============================================================= +// Pre-defined 10-byte representations of common sample rates +static std::unordered_map > aiffSampleRateTable = { + {8000, {64, 11, 250, 0, 0, 0, 0, 0, 0, 0}}, + {11025, {64, 12, 172, 68, 0, 0, 0, 0, 0, 0}}, + {16000, {64, 12, 250, 0, 0, 0, 0, 0, 0, 0}}, + {22050, {64, 13, 172, 68, 0, 0, 0, 0, 0, 0}}, + {32000, {64, 13, 250, 0, 0, 0, 0, 0, 0, 0}}, + {37800, {64, 14, 147, 168, 0, 0, 0, 0, 0, 0}}, + {44056, {64, 14, 172, 24, 0, 0, 0, 0, 0, 0}}, + {44100, {64, 14, 172, 68, 0, 0, 0, 0, 0, 0}}, + {47250, {64, 14, 184, 146, 0, 0, 0, 0, 0, 0}}, + {48000, {64, 14, 187, 128, 0, 0, 0, 0, 0, 0}}, + {50000, {64, 14, 195, 80, 0, 0, 0, 0, 0, 0}}, + {50400, {64, 14, 196, 224, 0, 0, 0, 0, 0, 0}}, + {88200, {64, 15, 172, 68, 0, 0, 0, 0, 0, 0}}, + {96000, {64, 15, 187, 128, 0, 0, 0, 0, 0, 0}}, + {176400, {64, 16, 172, 68, 0, 0, 0, 0, 0, 0}}, + {192000, {64, 16, 187, 128, 0, 0, 0, 0, 0, 0}}, + {352800, {64, 17, 172, 68, 0, 0, 0, 0, 0, 0}}, + {2822400, {64, 20, 172, 68, 0, 0, 0, 0, 0, 0}}, + {5644800, {64, 21, 172, 68, 0, 0, 0, 0, 0, 0}} +}; + +//============================================================= +enum WavAudioFormat +{ + PCM = 0x0001, + IEEEFloat = 0x0003, + ALaw = 0x0006, + MULaw = 0x0007, + Extensible = 0xFFFE +}; + +//============================================================= +enum AIFFAudioFormat +{ + Uncompressed, + Compressed, + Error +}; + +//============================================================= +/* IMPLEMENTATION */ +//============================================================= + +//============================================================= +template +AudioFile::AudioFile() +{ + static_assert(std::is_floating_point::value, "ERROR: This version of AudioFile only supports floating point sample formats"); + + bitDepth = 16; + sampleRate = 44100; + samples.resize (1); + samples[0].resize (0); + audioFileFormat = AudioFileFormat::NotLoaded; +} + +//============================================================= +template +uint32_t AudioFile::getSampleRate() const +{ + return sampleRate; +} + +//============================================================= +template +int AudioFile::getNumChannels() const +{ + return (int)samples.size(); +} + +//============================================================= +template +bool AudioFile::isMono() const +{ + return getNumChannels() == 1; +} + +//============================================================= +template +bool AudioFile::isStereo() const +{ + return getNumChannels() == 2; +} + +//============================================================= +template +int AudioFile::getBitDepth() const +{ + return bitDepth; +} + +//============================================================= +template +int AudioFile::getNumSamplesPerChannel() const +{ + if (samples.size() > 0) + return (int) samples[0].size(); + else + return 0; +} + +//============================================================= +template +double AudioFile::getLengthInSeconds() const +{ + return (double)getNumSamplesPerChannel() / (double)sampleRate; +} + +//============================================================= +template +void AudioFile::printSummary() const +{ + std::cout << "|======================================|" << std::endl; + std::cout << "Num Channels: " << getNumChannels() << std::endl; + std::cout << "Num Samples Per Channel: " << getNumSamplesPerChannel() << std::endl; + std::cout << "Sample Rate: " << sampleRate << std::endl; + std::cout << "Bit Depth: " << bitDepth << std::endl; + std::cout << "Length in Seconds: " << getLengthInSeconds() << std::endl; + std::cout << "|======================================|" << std::endl; +} + +//============================================================= +template +bool AudioFile::setAudioBuffer (AudioBuffer& newBuffer) +{ + int numChannels = (int)newBuffer.size(); + + if (numChannels <= 0) + { + assert (false && "The buffer your are trying to use has no channels"); + return false; + } + + size_t numSamples = newBuffer[0].size(); + + // set the number of channels + samples.resize (newBuffer.size()); + + for (int k = 0; k < getNumChannels(); k++) + { + assert (newBuffer[k].size() == numSamples); + + samples[k].resize (numSamples); + + for (size_t i = 0; i < numSamples; i++) + { + samples[k][i] = newBuffer[k][i]; + } + } + + return true; +} + +//============================================================= +template +void AudioFile::setAudioBufferSize (int numChannels, int numSamples) +{ + samples.resize (numChannels); + setNumSamplesPerChannel (numSamples); +} + +//============================================================= +template +void AudioFile::setNumSamplesPerChannel (int numSamples) +{ + int originalSize = getNumSamplesPerChannel(); + + for (int i = 0; i < getNumChannels();i++) + { + samples[i].resize (numSamples); + + // set any new samples to zero + if (numSamples > originalSize) + std::fill (samples[i].begin() + originalSize, samples[i].end(), (T)0.); + } +} + +//============================================================= +template +void AudioFile::setNumChannels (int numChannels) +{ + int originalNumChannels = getNumChannels(); + int originalNumSamplesPerChannel = getNumSamplesPerChannel(); + + samples.resize (numChannels); + + // make sure any new channels are set to the right size + // and filled with zeros + if (numChannels > originalNumChannels) + { + for (int i = originalNumChannels; i < numChannels; i++) + { + samples[i].resize (originalNumSamplesPerChannel); + std::fill (samples[i].begin(), samples[i].end(), (T)0.); + } + } +} + +//============================================================= +template +void AudioFile::setBitDepth (int numBitsPerSample) +{ + bitDepth = numBitsPerSample; +} + +//============================================================= +template +void AudioFile::setSampleRate (uint32_t newSampleRate) +{ + sampleRate = newSampleRate; +} + +//============================================================= +template +void AudioFile::shouldLogErrorsToConsole (bool logErrors) +{ + logErrorsToConsole = logErrors; +} + +//============================================================= +template +bool AudioFile::load (std::string filePath) +{ + std::ifstream file (filePath, std::ios::binary); + + // check the file exists + if (! file.good()) + { + reportError ("ERROR: File doesn't exist or otherwise can't load file\n" + filePath); + return false; + } + + file.unsetf (std::ios::skipws); + std::istream_iterator begin (file), end; + std::vector fileData (begin, end); + + // get audio file format + audioFileFormat = determineAudioFileFormat (fileData); + + if (audioFileFormat == AudioFileFormat::Wave) + { + return decodeWaveFile (fileData); + } + else if (audioFileFormat == AudioFileFormat::Aiff) + { + return decodeAiffFile (fileData); + } + else + { + reportError ("Audio File Type: Error"); + return false; + } +} + +//============================================================= +template +bool AudioFile::decodeWaveFile (std::vector& fileData) +{ + // ----------------------------------------------------------- + // HEADER CHUNK + std::string headerChunkID (fileData.begin(), fileData.begin() + 4); + //int32_t fileSizeInBytes = fourBytesToInt (fileData, 4) + 8; + std::string format (fileData.begin() + 8, fileData.begin() + 12); + + // ----------------------------------------------------------- + // try and find the start points of key chunks + int indexOfDataChunk = getIndexOfChunk (fileData, "data", 12); + int indexOfFormatChunk = getIndexOfChunk (fileData, "fmt ", 12); + int indexOfXMLChunk = getIndexOfChunk (fileData, "iXML", 12); + + // if we can't find the data or format chunks, or the IDs/formats don't seem to be as expected + // then it is unlikely we'll able to read this file, so abort + if (indexOfDataChunk == -1 || indexOfFormatChunk == -1 || headerChunkID != "RIFF" || format != "WAVE") + { + reportError ("ERROR: this doesn't seem to be a valid .WAV file"); + return false; + } + + // ----------------------------------------------------------- + // FORMAT CHUNK + int f = indexOfFormatChunk; + std::string formatChunkID (fileData.begin() + f, fileData.begin() + f + 4); + //int32_t formatChunkSize = fourBytesToInt (fileData, f + 4); + int16_t audioFormat = twoBytesToInt (fileData, f + 8); + int16_t numChannels = twoBytesToInt (fileData, f + 10); + sampleRate = (uint32_t) fourBytesToInt (fileData, f + 12); + int32_t numBytesPerSecond = fourBytesToInt (fileData, f + 16); + int16_t numBytesPerBlock = twoBytesToInt (fileData, f + 20); + bitDepth = (int) twoBytesToInt (fileData, f + 22); + + int numBytesPerSample = bitDepth / 8; + + // check that the audio format is PCM or Float + if (audioFormat != WavAudioFormat::PCM && audioFormat != WavAudioFormat::IEEEFloat) + { + reportError ("ERROR: this .WAV file is encoded in a format that this library does not support at present"); + return false; + } + + // check the number of channels is mono or stereo + if (numChannels < 1 || numChannels > 128) + { + reportError ("ERROR: this WAV file seems to be an invalid number of channels (or corrupted?)"); + return false; + } + + // check header data is consistent + if ((numBytesPerSecond != (numChannels * sampleRate * bitDepth) / 8) || (numBytesPerBlock != (numChannels * numBytesPerSample))) + { + reportError ("ERROR: the header data in this WAV file seems to be inconsistent"); + return false; + } + + // check bit depth is either 8, 16, 24 or 32 bit + if (bitDepth != 8 && bitDepth != 16 && bitDepth != 24 && bitDepth != 32) + { + reportError ("ERROR: this file has a bit depth that is not 8, 16, 24 or 32 bits"); + return false; + } + + // ----------------------------------------------------------- + // DATA CHUNK + int d = indexOfDataChunk; + std::string dataChunkID (fileData.begin() + d, fileData.begin() + d + 4); + int32_t dataChunkSize = fourBytesToInt (fileData, d + 4); + + int numSamples = dataChunkSize / (numChannels * bitDepth / 8); + int samplesStartIndex = indexOfDataChunk + 8; + + clearAudioBuffer(); + samples.resize (numChannels); + + for (int i = 0; i < numSamples; i++) + { + for (int channel = 0; channel < numChannels; channel++) + { + int sampleIndex = samplesStartIndex + (numBytesPerBlock * i) + channel * numBytesPerSample; + + if (bitDepth == 8) + { + T sample = singleByteToSample (fileData[sampleIndex]); + samples[channel].push_back (sample); + } + else if (bitDepth == 16) + { + int16_t sampleAsInt = twoBytesToInt (fileData, sampleIndex); + T sample = sixteenBitIntToSample (sampleAsInt); + samples[channel].push_back (sample); + } + else if (bitDepth == 24) + { + int32_t sampleAsInt = 0; + sampleAsInt = (fileData[sampleIndex + 2] << 16) | (fileData[sampleIndex + 1] << 8) | fileData[sampleIndex]; + + if (sampleAsInt & 0x800000) // if the 24th bit is set, this is a negative number in 24-bit world + sampleAsInt = sampleAsInt | ~0xFFFFFF; // so make sure sign is extended to the 32 bit float + + T sample = (T)sampleAsInt / (T)8388608.; + samples[channel].push_back (sample); + } + else if (bitDepth == 32) + { + int32_t sampleAsInt = fourBytesToInt (fileData, sampleIndex); + T sample; + + if (audioFormat == WavAudioFormat::IEEEFloat) + sample = (T)reinterpret_cast (sampleAsInt); + else // assume PCM + sample = (T) sampleAsInt / static_cast (std::numeric_limits::max()); + + samples[channel].push_back (sample); + } + else + { + assert (false); + } + } + } + + // ----------------------------------------------------------- + // iXML CHUNK + if (indexOfXMLChunk != -1) + { + int32_t chunkSize = fourBytesToInt (fileData, indexOfXMLChunk + 4); + iXMLChunk = std::string ((const char*) &fileData[indexOfXMLChunk + 8], chunkSize); + } + + return true; +} + +//============================================================= +template +bool AudioFile::decodeAiffFile (std::vector& fileData) +{ + // ----------------------------------------------------------- + // HEADER CHUNK + std::string headerChunkID (fileData.begin(), fileData.begin() + 4); + //int32_t fileSizeInBytes = fourBytesToInt (fileData, 4, Endianness::BigEndian) + 8; + std::string format (fileData.begin() + 8, fileData.begin() + 12); + + int audioFormat = format == "AIFF" ? AIFFAudioFormat::Uncompressed : format == "AIFC" ? AIFFAudioFormat::Compressed : AIFFAudioFormat::Error; + + // ----------------------------------------------------------- + // try and find the start points of key chunks + int indexOfCommChunk = getIndexOfChunk (fileData, "COMM", 12, Endianness::BigEndian); + int indexOfSoundDataChunk = getIndexOfChunk (fileData, "SSND", 12, Endianness::BigEndian); + int indexOfXMLChunk = getIndexOfChunk (fileData, "iXML", 12, Endianness::BigEndian); + + // if we can't find the data or format chunks, or the IDs/formats don't seem to be as expected + // then it is unlikely we'll able to read this file, so abort + if (indexOfSoundDataChunk == -1 || indexOfCommChunk == -1 || headerChunkID != "FORM" || audioFormat == AIFFAudioFormat::Error) + { + reportError ("ERROR: this doesn't seem to be a valid AIFF file"); + return false; + } + + // ----------------------------------------------------------- + // COMM CHUNK + int p = indexOfCommChunk; + std::string commChunkID (fileData.begin() + p, fileData.begin() + p + 4); + //int32_t commChunkSize = fourBytesToInt (fileData, p + 4, Endianness::BigEndian); + int16_t numChannels = twoBytesToInt (fileData, p + 8, Endianness::BigEndian); + int32_t numSamplesPerChannel = fourBytesToInt (fileData, p + 10, Endianness::BigEndian); + bitDepth = (int) twoBytesToInt (fileData, p + 14, Endianness::BigEndian); + sampleRate = getAiffSampleRate (fileData, p + 16); + + // check the sample rate was properly decoded + if (sampleRate == 0) + { + reportError ("ERROR: this AIFF file has an unsupported sample rate"); + return false; + } + + // check the number of channels is mono or stereo + if (numChannels < 1 ||numChannels > 2) + { + reportError ("ERROR: this AIFF file seems to be neither mono nor stereo (perhaps multi-track, or corrupted?)"); + return false; + } + + // check bit depth is either 8, 16, 24 or 32-bit + if (bitDepth != 8 && bitDepth != 16 && bitDepth != 24 && bitDepth != 32) + { + reportError ("ERROR: this file has a bit depth that is not 8, 16, 24 or 32 bits"); + return false; + } + + // ----------------------------------------------------------- + // SSND CHUNK + int s = indexOfSoundDataChunk; + std::string soundDataChunkID (fileData.begin() + s, fileData.begin() + s + 4); + int32_t soundDataChunkSize = fourBytesToInt (fileData, s + 4, Endianness::BigEndian); + int32_t offset = fourBytesToInt (fileData, s + 8, Endianness::BigEndian); + //int32_t blockSize = fourBytesToInt (fileData, s + 12, Endianness::BigEndian); + + int numBytesPerSample = bitDepth / 8; + int numBytesPerFrame = numBytesPerSample * numChannels; + int totalNumAudioSampleBytes = numSamplesPerChannel * numBytesPerFrame; + int samplesStartIndex = s + 16 + (int)offset; + + // sanity check the data + if ((soundDataChunkSize - 8) != totalNumAudioSampleBytes || totalNumAudioSampleBytes > static_cast(fileData.size() - samplesStartIndex)) + { + reportError ("ERROR: the metadatafor this file doesn't seem right"); + return false; + } + + clearAudioBuffer(); + samples.resize (numChannels); + + for (int i = 0; i < numSamplesPerChannel; i++) + { + for (int channel = 0; channel < numChannels; channel++) + { + int sampleIndex = samplesStartIndex + (numBytesPerFrame * i) + channel * numBytesPerSample; + + if (bitDepth == 8) + { + int8_t sampleAsSigned8Bit = (int8_t)fileData[sampleIndex]; + T sample = (T)sampleAsSigned8Bit / (T)128.; + samples[channel].push_back (sample); + } + else if (bitDepth == 16) + { + int16_t sampleAsInt = twoBytesToInt (fileData, sampleIndex, Endianness::BigEndian); + T sample = sixteenBitIntToSample (sampleAsInt); + samples[channel].push_back (sample); + } + else if (bitDepth == 24) + { + int32_t sampleAsInt = 0; + sampleAsInt = (fileData[sampleIndex] << 16) | (fileData[sampleIndex + 1] << 8) | fileData[sampleIndex + 2]; + + if (sampleAsInt & 0x800000) // if the 24th bit is set, this is a negative number in 24-bit world + sampleAsInt = sampleAsInt | ~0xFFFFFF; // so make sure sign is extended to the 32 bit float + + T sample = (T)sampleAsInt / (T)8388608.; + samples[channel].push_back (sample); + } + else if (bitDepth == 32) + { + int32_t sampleAsInt = fourBytesToInt (fileData, sampleIndex, Endianness::BigEndian); + T sample; + + if (audioFormat == AIFFAudioFormat::Compressed) + sample = (T)reinterpret_cast (sampleAsInt); + else // assume uncompressed + sample = (T) sampleAsInt / static_cast (std::numeric_limits::max()); + + samples[channel].push_back (sample); + } + else + { + assert (false); + } + } + } + + // ----------------------------------------------------------- + // iXML CHUNK + if (indexOfXMLChunk != -1) + { + int32_t chunkSize = fourBytesToInt (fileData, indexOfXMLChunk + 4); + iXMLChunk = std::string ((const char*) &fileData[indexOfXMLChunk + 8], chunkSize); + } + + return true; +} + +//============================================================= +template +uint32_t AudioFile::getAiffSampleRate (std::vector& fileData, int sampleRateStartIndex) +{ + for (auto it : aiffSampleRateTable) + { + if (tenByteMatch (fileData, sampleRateStartIndex, it.second, 0)) + return it.first; + } + + return 0; +} + +//============================================================= +template +bool AudioFile::tenByteMatch (std::vector& v1, int startIndex1, std::vector& v2, int startIndex2) +{ + for (int i = 0; i < 10; i++) + { + if (v1[startIndex1 + i] != v2[startIndex2 + i]) + return false; + } + + return true; +} + +//============================================================= +template +void AudioFile::addSampleRateToAiffData (std::vector& fileData, uint32_t sampleRate) +{ + if (aiffSampleRateTable.count (sampleRate) > 0) + { + for (int i = 0; i < 10; i++) + fileData.push_back (aiffSampleRateTable[sampleRate][i]); + } +} + +//============================================================= +template +bool AudioFile::save (std::string filePath, AudioFileFormat format) +{ + if (format == AudioFileFormat::Wave) + { + return saveToWaveFile (filePath); + } + else if (format == AudioFileFormat::Aiff) + { + return saveToAiffFile (filePath); + } + + return false; +} + +//============================================================= +template +bool AudioFile::saveToWaveFile (std::string filePath) +{ + std::vector fileData; + + int32_t dataChunkSize = getNumSamplesPerChannel() * (getNumChannels() * bitDepth / 8); + int16_t audioFormat = bitDepth == 32 ? WavAudioFormat::IEEEFloat : WavAudioFormat::PCM; + int32_t formatChunkSize = audioFormat == WavAudioFormat::PCM ? 16 : 18; + int32_t iXMLChunkSize = static_cast (iXMLChunk.size()); + + // ----------------------------------------------------------- + // HEADER CHUNK + addStringToFileData (fileData, "RIFF"); + + // The file size in bytes is the header chunk size (4, not counting RIFF and WAVE) + the format + // chunk size (24) + the metadata part of the data chunk plus the actual data chunk size + int32_t fileSizeInBytes = 4 + formatChunkSize + 8 + 8 + dataChunkSize; + if (iXMLChunkSize > 0) + { + fileSizeInBytes += (8 + iXMLChunkSize); + } + + addInt32ToFileData (fileData, fileSizeInBytes); + + addStringToFileData (fileData, "WAVE"); + + // ----------------------------------------------------------- + // FORMAT CHUNK + addStringToFileData (fileData, "fmt "); + addInt32ToFileData (fileData, formatChunkSize); // format chunk size (16 for PCM) + addInt16ToFileData (fileData, audioFormat); // audio format + addInt16ToFileData (fileData, (int16_t)getNumChannels()); // num channels + addInt32ToFileData (fileData, (int32_t)sampleRate); // sample rate + + int32_t numBytesPerSecond = (int32_t) ((getNumChannels() * sampleRate * bitDepth) / 8); + addInt32ToFileData (fileData, numBytesPerSecond); + + int16_t numBytesPerBlock = getNumChannels() * (bitDepth / 8); + addInt16ToFileData (fileData, numBytesPerBlock); + + addInt16ToFileData (fileData, (int16_t)bitDepth); + + if (audioFormat == WavAudioFormat::IEEEFloat) + addInt16ToFileData (fileData, 0); // extension size + + // ----------------------------------------------------------- + // DATA CHUNK + addStringToFileData (fileData, "data"); + addInt32ToFileData (fileData, dataChunkSize); + + for (int i = 0; i < getNumSamplesPerChannel(); i++) + { + for (int channel = 0; channel < getNumChannels(); channel++) + { + if (bitDepth == 8) + { + uint8_t byte = sampleToSingleByte (samples[channel][i]); + fileData.push_back (byte); + } + else if (bitDepth == 16) + { + int16_t sampleAsInt = sampleToSixteenBitInt (samples[channel][i]); + addInt16ToFileData (fileData, sampleAsInt); + } + else if (bitDepth == 24) + { + int32_t sampleAsIntAgain = (int32_t) (samples[channel][i] * (T)8388608.); + + uint8_t bytes[3]; + bytes[2] = (uint8_t) (sampleAsIntAgain >> 16) & 0xFF; + bytes[1] = (uint8_t) (sampleAsIntAgain >> 8) & 0xFF; + bytes[0] = (uint8_t) sampleAsIntAgain & 0xFF; + + fileData.push_back (bytes[0]); + fileData.push_back (bytes[1]); + fileData.push_back (bytes[2]); + } + else if (bitDepth == 32) + { + int32_t sampleAsInt; + + if (audioFormat == WavAudioFormat::IEEEFloat) + sampleAsInt = (int32_t) reinterpret_cast (samples[channel][i]); + else // assume PCM + sampleAsInt = (int32_t) (samples[channel][i] * std::numeric_limits::max()); + + addInt32ToFileData (fileData, sampleAsInt, Endianness::LittleEndian); + } + else + { + assert (false && "Trying to write a file with unsupported bit depth"); + return false; + } + } + } + + // ----------------------------------------------------------- + // iXML CHUNK + if (iXMLChunkSize > 0) + { + addStringToFileData (fileData, "iXML"); + addInt32ToFileData (fileData, iXMLChunkSize); + addStringToFileData (fileData, iXMLChunk); + } + + // check that the various sizes we put in the metadata are correct + if (fileSizeInBytes != static_cast (fileData.size() - 8) || dataChunkSize != (getNumSamplesPerChannel() * getNumChannels() * (bitDepth / 8))) + { + reportError ("ERROR: couldn't save file to " + filePath); + return false; + } + + // try to write the file + return writeDataToFile (fileData, filePath); +} + +//============================================================= +template +bool AudioFile::saveToAiffFile (std::string filePath) +{ + std::vector fileData; + + int32_t numBytesPerSample = bitDepth / 8; + int32_t numBytesPerFrame = numBytesPerSample * getNumChannels(); + int32_t totalNumAudioSampleBytes = getNumSamplesPerChannel() * numBytesPerFrame; + int32_t soundDataChunkSize = totalNumAudioSampleBytes + 8; + int32_t iXMLChunkSize = static_cast (iXMLChunk.size()); + + // ----------------------------------------------------------- + // HEADER CHUNK + addStringToFileData (fileData, "FORM"); + + // The file size in bytes is the header chunk size (4, not counting FORM and AIFF) + the COMM + // chunk size (26) + the metadata part of the SSND chunk plus the actual data chunk size + int32_t fileSizeInBytes = 4 + 26 + 16 + totalNumAudioSampleBytes; + if (iXMLChunkSize > 0) + { + fileSizeInBytes += (8 + iXMLChunkSize); + } + + addInt32ToFileData (fileData, fileSizeInBytes, Endianness::BigEndian); + + addStringToFileData (fileData, "AIFF"); + + // ----------------------------------------------------------- + // COMM CHUNK + addStringToFileData (fileData, "COMM"); + addInt32ToFileData (fileData, 18, Endianness::BigEndian); // commChunkSize + addInt16ToFileData (fileData, getNumChannels(), Endianness::BigEndian); // num channels + addInt32ToFileData (fileData, getNumSamplesPerChannel(), Endianness::BigEndian); // num samples per channel + addInt16ToFileData (fileData, bitDepth, Endianness::BigEndian); // bit depth + addSampleRateToAiffData (fileData, sampleRate); + + // ----------------------------------------------------------- + // SSND CHUNK + addStringToFileData (fileData, "SSND"); + addInt32ToFileData (fileData, soundDataChunkSize, Endianness::BigEndian); + addInt32ToFileData (fileData, 0, Endianness::BigEndian); // offset + addInt32ToFileData (fileData, 0, Endianness::BigEndian); // block size + + for (int i = 0; i < getNumSamplesPerChannel(); i++) + { + for (int channel = 0; channel < getNumChannels(); channel++) + { + if (bitDepth == 8) + { + uint8_t byte = sampleToSingleByte (samples[channel][i]); + fileData.push_back (byte); + } + else if (bitDepth == 16) + { + int16_t sampleAsInt = sampleToSixteenBitInt (samples[channel][i]); + addInt16ToFileData (fileData, sampleAsInt, Endianness::BigEndian); + } + else if (bitDepth == 24) + { + int32_t sampleAsIntAgain = (int32_t) (samples[channel][i] * (T)8388608.); + + uint8_t bytes[3]; + bytes[0] = (uint8_t) (sampleAsIntAgain >> 16) & 0xFF; + bytes[1] = (uint8_t) (sampleAsIntAgain >> 8) & 0xFF; + bytes[2] = (uint8_t) sampleAsIntAgain & 0xFF; + + fileData.push_back (bytes[0]); + fileData.push_back (bytes[1]); + fileData.push_back (bytes[2]); + } + else if (bitDepth == 32) + { + // write samples as signed integers (no implementation yet for floating point, but looking at WAV implementation should help) + int32_t sampleAsInt = (int32_t) (samples[channel][i] * std::numeric_limits::max()); + addInt32ToFileData (fileData, sampleAsInt, Endianness::BigEndian); + } + else + { + assert (false && "Trying to write a file with unsupported bit depth"); + return false; + } + } + } + + // ----------------------------------------------------------- + // iXML CHUNK + if (iXMLChunkSize > 0) + { + addStringToFileData (fileData, "iXML"); + addInt32ToFileData (fileData, iXMLChunkSize); + addStringToFileData (fileData, iXMLChunk); + } + + // check that the various sizes we put in the metadata are correct + if (fileSizeInBytes != static_cast (fileData.size() - 8) || soundDataChunkSize != getNumSamplesPerChannel() * numBytesPerFrame + 8) + { + reportError ("ERROR: couldn't save file to " + filePath); + return false; + } + + // try to write the file + return writeDataToFile (fileData, filePath); +} + +//============================================================= +template +bool AudioFile::writeDataToFile (std::vector& fileData, std::string filePath) +{ + std::ofstream outputFile (filePath, std::ios::binary); + + if (outputFile.is_open()) + { + for (size_t i = 0; i < fileData.size(); i++) + { + char value = (char) fileData[i]; + outputFile.write (&value, sizeof (char)); + } + + outputFile.close(); + + return true; + } + + return false; +} + +//============================================================= +template +void AudioFile::addStringToFileData (std::vector& fileData, std::string s) +{ + for (size_t i = 0; i < s.length();i++) + fileData.push_back ((uint8_t) s[i]); +} + +//============================================================= +template +void AudioFile::addInt32ToFileData (std::vector& fileData, int32_t i, Endianness endianness) +{ + uint8_t bytes[4]; + + if (endianness == Endianness::LittleEndian) + { + bytes[3] = (i >> 24) & 0xFF; + bytes[2] = (i >> 16) & 0xFF; + bytes[1] = (i >> 8) & 0xFF; + bytes[0] = i & 0xFF; + } + else + { + bytes[0] = (i >> 24) & 0xFF; + bytes[1] = (i >> 16) & 0xFF; + bytes[2] = (i >> 8) & 0xFF; + bytes[3] = i & 0xFF; + } + + for (int i = 0; i < 4; i++) + fileData.push_back (bytes[i]); +} + +//============================================================= +template +void AudioFile::addInt16ToFileData (std::vector& fileData, int16_t i, Endianness endianness) +{ + uint8_t bytes[2]; + + if (endianness == Endianness::LittleEndian) + { + bytes[1] = (i >> 8) & 0xFF; + bytes[0] = i & 0xFF; + } + else + { + bytes[0] = (i >> 8) & 0xFF; + bytes[1] = i & 0xFF; + } + + fileData.push_back (bytes[0]); + fileData.push_back (bytes[1]); +} + +//============================================================= +template +void AudioFile::clearAudioBuffer() +{ + for (size_t i = 0; i < samples.size();i++) + { + samples[i].clear(); + } + + samples.clear(); +} + +//============================================================= +template +AudioFileFormat AudioFile::determineAudioFileFormat (std::vector& fileData) +{ + std::string header (fileData.begin(), fileData.begin() + 4); + + if (header == "RIFF") + return AudioFileFormat::Wave; + else if (header == "FORM") + return AudioFileFormat::Aiff; + else + return AudioFileFormat::Error; +} + +//============================================================= +template +int32_t AudioFile::fourBytesToInt (std::vector& source, int startIndex, Endianness endianness) +{ + int32_t result; + + if (endianness == Endianness::LittleEndian) + result = (source[startIndex + 3] << 24) | (source[startIndex + 2] << 16) | (source[startIndex + 1] << 8) | source[startIndex]; + else + result = (source[startIndex] << 24) | (source[startIndex + 1] << 16) | (source[startIndex + 2] << 8) | source[startIndex + 3]; + + return result; +} + +//============================================================= +template +int16_t AudioFile::twoBytesToInt (std::vector& source, int startIndex, Endianness endianness) +{ + int16_t result; + + if (endianness == Endianness::LittleEndian) + result = (source[startIndex + 1] << 8) | source[startIndex]; + else + result = (source[startIndex] << 8) | source[startIndex + 1]; + + return result; +} + +//============================================================= +template +int AudioFile::getIndexOfString (std::vector& source, std::string stringToSearchFor) +{ + int index = -1; + int stringLength = (int)stringToSearchFor.length(); + + for (size_t i = 0; i < source.size() - stringLength;i++) + { + std::string section (source.begin() + i, source.begin() + i + stringLength); + + if (section == stringToSearchFor) + { + index = static_cast (i); + break; + } + } + + return index; +} + +//============================================================= +template +int AudioFile::getIndexOfChunk (std::vector& source, const std::string& chunkHeaderID, int startIndex, Endianness endianness) +{ + constexpr int dataLen = 4; + if (chunkHeaderID.size() != dataLen) + { + assert (false && "Invalid chunk header ID string"); + return -1; + } + + int i = startIndex; + while (i < source.size() - dataLen) + { + if (memcmp (&source[i], chunkHeaderID.data(), dataLen) == 0) + { + return i; + } + + i += dataLen; + auto chunkSize = fourBytesToInt (source, i, endianness); + i += (dataLen + chunkSize); + } + + return -1; +} + +//============================================================= +template +T AudioFile::sixteenBitIntToSample (int16_t sample) +{ + return static_cast (sample) / static_cast (32768.); +} + +//============================================================= +template +int16_t AudioFile::sampleToSixteenBitInt (T sample) +{ + sample = clamp (sample, -1., 1.); + return static_cast (sample * 32767.); +} + +//============================================================= +template +uint8_t AudioFile::sampleToSingleByte (T sample) +{ + sample = clamp (sample, -1., 1.); + sample = (sample + 1.) / 2.; + return static_cast (sample * 255.); +} + +//============================================================= +template +T AudioFile::singleByteToSample (uint8_t sample) +{ + return static_cast (sample - 128) / static_cast (128.); +} + +//============================================================= +template +T AudioFile::clamp (T value, T minValue, T maxValue) +{ + value = std::min (value, maxValue); + value = std::max (value, minValue); + return value; +} + +//============================================================= +template +void AudioFile::reportError (std::string errorMessage) +{ + if (logErrorsToConsole) + std::cout << errorMessage << std::endl; +} + +#if defined (_MSC_VER) + __pragma(warning (pop)) +#elif defined (__GNUC__) + _Pragma("GCC diagnostic pop") +#endif + +#endif /* AudioFile_h */ diff --git a/TensorFlowTTS/examples/cpptflite/src/MelGenerateTF.cpp b/TensorFlowTTS/examples/cpptflite/src/MelGenerateTF.cpp new file mode 100644 index 0000000000000000000000000000000000000000..4d03694d65f61a6c12197535caa2e6faf25bddb3 --- /dev/null +++ b/TensorFlowTTS/examples/cpptflite/src/MelGenerateTF.cpp @@ -0,0 +1,51 @@ +#include +#include "MelGenerateTF.h" + +MelGenData MelGenerateTF::infer(const std::vector inputIds) +{ + + MelGenData output; + + int32_t idsLen = inputIds.size(); + + std::vector> inputIndexsShape{ {1, idsLen}, {1}, {1}, {1}, {1} }; + + int32_t shapeI = 0; + for (auto index : inputIndexs) + { + interpreter->ResizeInputTensor(index, inputIndexsShape[shapeI]); + shapeI++; + } + + TFLITE_MINIMAL_CHECK(interpreter->AllocateTensors() == kTfLiteOk); + + int32_t* input_ids_ptr = interpreter->typed_tensor(inputIndexs[0]); + memcpy(input_ids_ptr, inputIds.data(), int_size * idsLen); + + int32_t* speaker_ids_ptr = interpreter->typed_tensor(inputIndexs[1]); + memcpy(speaker_ids_ptr, _speakerId.data(), int_size); + + float* speed_ratios_ptr = interpreter->typed_tensor(inputIndexs[2]); + memcpy(speed_ratios_ptr, _speedRatio.data(), float_size); + + float* speed_ratios2_ptr = interpreter->typed_tensor(inputIndexs[3]); + memcpy(speed_ratios2_ptr, _f0Ratio.data(), float_size); + + float* speed_ratios3_ptr = interpreter->typed_tensor(inputIndexs[4]); + memcpy(speed_ratios3_ptr, _enegyRatio.data(), float_size); + + TFLITE_MINIMAL_CHECK(interpreter->Invoke() == kTfLiteOk); + + TfLiteTensor* melGenTensor = interpreter->tensor(ouptIndex); + + for (int i=0; idims->size; i++) + { + output.melShape.push_back(melGenTensor->dims->data[i]); + } + + output.bytes = melGenTensor->bytes; + + output.melData = interpreter->typed_tensor(ouptIndex); + + return output; +} \ No newline at end of file diff --git a/TensorFlowTTS/examples/cpptflite/src/MelGenerateTF.h b/TensorFlowTTS/examples/cpptflite/src/MelGenerateTF.h new file mode 100644 index 0000000000000000000000000000000000000000..2ca051f5d16f290c04758a991bfb960715be715d --- /dev/null +++ b/TensorFlowTTS/examples/cpptflite/src/MelGenerateTF.h @@ -0,0 +1,27 @@ +#ifndef MELGENERATETF_H +#define MELGENERATETF_H + +#include "TfliteBase.h" + +class MelGenerateTF : public TfliteBase +{ +public: + + MelGenerateTF(const char* modelFilename):TfliteBase(modelFilename), + inputIndexs(interpreter->inputs()), + ouptIndex(interpreter->outputs()[1]) {}; + + MelGenData infer(const std::vector inputIds); + +private: + std::vector _speakerId{0}; + std::vector _speedRatio{1.0}; + std::vector _f0Ratio{1.0}; + std::vector _enegyRatio{1.0}; + + const std::vector inputIndexs; + const int32_t ouptIndex; + +}; + +#endif // MELGENERATETF_H \ No newline at end of file diff --git a/TensorFlowTTS/examples/cpptflite/src/TTSBackend.cpp b/TensorFlowTTS/examples/cpptflite/src/TTSBackend.cpp new file mode 100644 index 0000000000000000000000000000000000000000..816bd3bd8ac277ede9ee794d5ed0ee88ca313f82 --- /dev/null +++ b/TensorFlowTTS/examples/cpptflite/src/TTSBackend.cpp @@ -0,0 +1,7 @@ +#include "TTSBackend.h" + +void TTSBackend::inference(std::vector phonesIds) +{ + _mel = MelGen.infer(phonesIds); + _audio = Vocoder.infer(_mel); +} \ No newline at end of file diff --git a/TensorFlowTTS/examples/cpptflite/src/TTSBackend.h b/TensorFlowTTS/examples/cpptflite/src/TTSBackend.h new file mode 100644 index 0000000000000000000000000000000000000000..e62cdf4a2928f6b4d24b95cc99d5820a50effdf1 --- /dev/null +++ b/TensorFlowTTS/examples/cpptflite/src/TTSBackend.h @@ -0,0 +1,33 @@ +#ifndef TTSBACKEND_H +#define TTSBACKEND_H + +#include +#include +#include "MelGenerateTF.h" +#include "VocoderTF.h" + +class TTSBackend +{ +public: + TTSBackend(const char* melgenfile, const char* vocoderfile): + MelGen(melgenfile), Vocoder(vocoderfile) + { + std::cout << "TTSBackend Init" << std::endl; + std::cout << melgenfile << std::endl; + std::cout << vocoderfile << std::endl; + }; + + void inference(std::vector phonesIds); + + MelGenData getMel() const {return _mel;} + std::vector getAudio() const {return _audio;} + +private: + MelGenerateTF MelGen; + VocoderTF Vocoder; + + MelGenData _mel; + std::vector _audio; +}; + +#endif // TTSBACKEND_H \ No newline at end of file diff --git a/TensorFlowTTS/examples/cpptflite/src/TTSFrontend.cpp b/TensorFlowTTS/examples/cpptflite/src/TTSFrontend.cpp new file mode 100644 index 0000000000000000000000000000000000000000..20fd010219b0bc9f85a02b74bfe576da06c774e8 --- /dev/null +++ b/TensorFlowTTS/examples/cpptflite/src/TTSFrontend.cpp @@ -0,0 +1,43 @@ +#include "TTSFrontend.h" + +void TTSFrontend::text2ids(const std::string &text) +{ + _phonesIds = strSplit(getCmdResult(text)); +} + +std::string TTSFrontend::getCmdResult(const std::string &text) +{ + char buf[1000] = {0}; + FILE *pf = NULL; + + if( (pf = popen((_strCmd + " " + _mapperJson + " \"" + text + "\"").c_str(), "r")) == NULL ) + { + return ""; + } + + while(fgets(buf, sizeof(buf), pf)) + { + continue; + } + + std::string strResult(buf); + pclose(pf); + + return strResult; +} + +std::vector TTSFrontend::strSplit(const std::string &idStr) +{ + std::vector idsVector; + + std::regex rgx ("\\s+"); + std::sregex_token_iterator iter(idStr.begin(), idStr.end(), rgx, -1); + std::sregex_token_iterator end; + + while (iter != end) { + idsVector.push_back(stoi(*iter)); + ++iter; + } + + return idsVector; +} \ No newline at end of file diff --git a/TensorFlowTTS/examples/cpptflite/src/TTSFrontend.h b/TensorFlowTTS/examples/cpptflite/src/TTSFrontend.h new file mode 100644 index 0000000000000000000000000000000000000000..957282e515b077e383909375f8e871cb8517a0c3 --- /dev/null +++ b/TensorFlowTTS/examples/cpptflite/src/TTSFrontend.h @@ -0,0 +1,44 @@ +#ifndef TTSFRONTEND_H +#define TTSFRONTEND_H + +#include +#include +#include +#include +#include + +class TTSFrontend +{ +public: + + /** + * Converting text to phoneIDs. + * A tmporary method using command to process text in this demo, + * which should to be replaced by a pronunciation processing module. + *@param strCmd Command to call the method of processor.text_to_sequence() + */ + TTSFrontend(const std::string &mapperJson, + const std::string &strCmd): + _mapperJson(mapperJson), + _strCmd(strCmd) + { + std::cout << "TTSFrontend Init" << std::endl; + std::cout << _mapperJson << std::endl; + std::cout << _strCmd << std::endl; + }; + + void text2ids(const std::string &text); + + std::vector getPhoneIds() const {return _phonesIds;} +private: + + const std::string _mapperJson; + const std::string _strCmd; + + std::vector _phonesIds; + + std::string getCmdResult(const std::string &text); + std::vector strSplit(const std::string &idStr); +}; + +#endif // TTSFRONTEND_H \ No newline at end of file diff --git a/TensorFlowTTS/examples/cpptflite/src/TfliteBase.cpp b/TensorFlowTTS/examples/cpptflite/src/TfliteBase.cpp new file mode 100644 index 0000000000000000000000000000000000000000..a56302a004c5f1e44eec1bc1c7a41768b7d5528b --- /dev/null +++ b/TensorFlowTTS/examples/cpptflite/src/TfliteBase.cpp @@ -0,0 +1,24 @@ +#include "TfliteBase.h" + +TfliteBase::TfliteBase(const char* modelFilename) +{ + interpreterBuild(modelFilename); +} + +TfliteBase::~TfliteBase() +{ + ; +} + +void TfliteBase::interpreterBuild(const char* modelFilename) +{ + model = tflite::FlatBufferModel::BuildFromFile(modelFilename); + + TFLITE_MINIMAL_CHECK(model != nullptr); + + tflite::InterpreterBuilder builder(*model, resolver); + + builder(&interpreter); + + TFLITE_MINIMAL_CHECK(interpreter != nullptr); +} diff --git a/TensorFlowTTS/examples/cpptflite/src/TfliteBase.h b/TensorFlowTTS/examples/cpptflite/src/TfliteBase.h new file mode 100644 index 0000000000000000000000000000000000000000..1a5057998ceb3e2c9064ad857619391ef705113a --- /dev/null +++ b/TensorFlowTTS/examples/cpptflite/src/TfliteBase.h @@ -0,0 +1,40 @@ +#ifndef TFLITEBASE_H +#define TFLITEBASE_H + +#include "tensorflow/lite/interpreter.h" +#include "tensorflow/lite/kernels/register.h" +#include "tensorflow/lite/model.h" +#include "tensorflow/lite/optional_debug_tools.h" + +#define TFLITE_MINIMAL_CHECK(x) \ + if (!(x)) { \ + fprintf(stderr, "Error at %s:%d\n", __FILE__, __LINE__); \ + exit(1); \ + } + +typedef struct +{ + float *melData; + std::vector melShape; + int32_t bytes; +} MelGenData; + +class TfliteBase +{ +public: + uint32_t int_size = sizeof(int32_t); + uint32_t float_size = sizeof(float); + + std::unique_ptr interpreter; + + TfliteBase(const char* modelFilename); + ~TfliteBase(); + +private: + std::unique_ptr model; + tflite::ops::builtin::BuiltinOpResolver resolver; + + void interpreterBuild(const char* modelFilename); +}; + +#endif // TFLITEBASE_H \ No newline at end of file diff --git a/TensorFlowTTS/examples/cpptflite/src/VocoderTF.cpp b/TensorFlowTTS/examples/cpptflite/src/VocoderTF.cpp new file mode 100644 index 0000000000000000000000000000000000000000..6cc542e29954848e281832558b9122a34b106cc3 --- /dev/null +++ b/TensorFlowTTS/examples/cpptflite/src/VocoderTF.cpp @@ -0,0 +1,27 @@ +#include "VocoderTF.h" + +std::vector VocoderTF::infer(const MelGenData mel) +{ + std::vector audio; + + interpreter->ResizeInputTensor(inputIndex, mel.melShape); + TFLITE_MINIMAL_CHECK(interpreter->AllocateTensors() == kTfLiteOk); + + float* melDataPtr = interpreter->typed_input_tensor(inputIndex); + memcpy(melDataPtr, mel.melData, mel.bytes); + + TFLITE_MINIMAL_CHECK(interpreter->Invoke() == kTfLiteOk); + + TfLiteTensor* audioTensor = interpreter->tensor(outputIndex); + + float* outputPtr = interpreter->typed_output_tensor(0); + + int32_t audio_len = audioTensor->bytes / float_size; + + for (int i=0; iinputs()[0]), + outputIndex(interpreter->outputs()[0]) {}; + + std::vector infer(const MelGenData mel); + +private: + + const int32_t inputIndex; + const int32_t outputIndex; +}; + +#endif // VOCODERTF_H \ No newline at end of file diff --git a/TensorFlowTTS/examples/cpptflite/src/VoxCommon.cpp b/TensorFlowTTS/examples/cpptflite/src/VoxCommon.cpp new file mode 100644 index 0000000000000000000000000000000000000000..b372630e111921f1ce058211c9a6b636eb282f31 --- /dev/null +++ b/TensorFlowTTS/examples/cpptflite/src/VoxCommon.cpp @@ -0,0 +1,21 @@ +#include "VoxCommon.h" + +void VoxUtil::ExportWAV(const std::string & Filename, const std::vector& Data, unsigned SampleRate) { + AudioFile::AudioBuffer Buffer; + Buffer.resize(1); + + + Buffer[0] = Data; + size_t BufSz = Data.size(); + + + AudioFile File; + File.setAudioBuffer(Buffer); + File.setAudioBufferSize(1, (int)BufSz); + File.setNumSamplesPerChannel((int)BufSz); + File.setNumChannels(1); + File.setBitDepth(32); + File.setSampleRate(SampleRate); + + File.save(Filename, AudioFileFormat::Wave); +} diff --git a/TensorFlowTTS/examples/cpptflite/src/VoxCommon.h b/TensorFlowTTS/examples/cpptflite/src/VoxCommon.h new file mode 100644 index 0000000000000000000000000000000000000000..05ac4f8dd2b56aba81e06f217edf90e797b8ec4d --- /dev/null +++ b/TensorFlowTTS/examples/cpptflite/src/VoxCommon.h @@ -0,0 +1,25 @@ +#pragma once +/* + VoxCommon.hpp : Defines common data structures and constants to be used with TensorVox +*/ +#include +#include +#include "AudioFile.h" +// #include "ext/CppFlow/include/Tensor.h" +// #include + +#define IF_RETURN(cond,ret) if (cond){return ret;} +#define VX_IF_EXCEPT(cond,ex) if (cond){throw std::invalid_argument(ex);} + + +template +struct TFTensor { + std::vector Data; + std::vector Shape; + size_t TotalSize; +}; + +namespace VoxUtil { + + void ExportWAV(const std::string& Filename, const std::vector& Data, unsigned SampleRate); +} diff --git a/TensorFlowTTS/examples/cppwin/.gitattributes b/TensorFlowTTS/examples/cppwin/.gitattributes new file mode 100644 index 0000000000000000000000000000000000000000..1ff0c423042b46cb1d617b81efb715defbe8054d --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/.gitattributes @@ -0,0 +1,63 @@ +############################################################################### +# Set default behavior to automatically normalize line endings. +############################################################################### +* text=auto + +############################################################################### +# Set default behavior for command prompt diff. +# +# This is need for earlier builds of msysgit that does not have it on by +# default for csharp files. +# Note: This is only used by command line +############################################################################### +#*.cs diff=csharp + +############################################################################### +# Set the merge driver for project and solution files +# +# Merging from the command prompt will add diff markers to the files if there +# are conflicts (Merging from VS is not affected by the settings below, in VS +# the diff markers are never inserted). Diff markers may cause the following +# file extensions to fail to load in VS. An alternative would be to treat +# these files as binary and thus will always conflict and require user +# intervention with every merge. To do so, just uncomment the entries below +############################################################################### +#*.sln merge=binary +#*.csproj merge=binary +#*.vbproj merge=binary +#*.vcxproj merge=binary +#*.vcproj merge=binary +#*.dbproj merge=binary +#*.fsproj merge=binary +#*.lsproj merge=binary +#*.wixproj merge=binary +#*.modelproj merge=binary +#*.sqlproj merge=binary +#*.wwaproj merge=binary + +############################################################################### +# behavior for image files +# +# image files are treated as binary by default. +############################################################################### +#*.jpg binary +#*.png binary +#*.gif binary + +############################################################################### +# diff behavior for common document formats +# +# Convert binary document formats to text before diffing them. This feature +# is only available from the command line. Turn it on by uncommenting the +# entries below. +############################################################################### +#*.doc diff=astextplain +#*.DOC diff=astextplain +#*.docx diff=astextplain +#*.DOCX diff=astextplain +#*.dot diff=astextplain +#*.DOT diff=astextplain +#*.pdf diff=astextplain +#*.PDF diff=astextplain +#*.rtf diff=astextplain +#*.RTF diff=astextplain diff --git a/TensorFlowTTS/examples/cppwin/.gitignore b/TensorFlowTTS/examples/cppwin/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..796f0de8a5a8d28376778b4493f7627ebdda09b9 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/.gitignore @@ -0,0 +1,262 @@ +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +# Libraries +*.dll +# User-specific files +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ + +# Visual Studio 2015 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUNIT +*.VisualState.xml +TestResult.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# DNX +project.lock.json +project.fragment.lock.json +artifacts/ + +*_i.c +*_p.c +*_i.h +*.ilk +*.meta +*.obj +*.pch +*.pdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*.log +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# JustCode is a .NET coding add-in +.JustCode + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# TODO: Comment the next line if you want to checkin your web deploy settings +# but database connection strings (with potential passwords) will be unencrypted +#*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# The packages folder can be ignored because of Package Restore +**/packages/* +# except build/, which is used as an MSBuild target. +!**/packages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/packages/repositories.config +# NuGet v3's project.json files produces more ignoreable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +node_modules/ +orleans.codegen.cs + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm + +# SQL Server files +*.mdf +*.ldf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# JetBrains Rider +.idea/ +*.sln.iml + +# CodeRush +.cr/ + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc \ No newline at end of file diff --git a/TensorFlowTTS/examples/cppwin/README.md b/TensorFlowTTS/examples/cppwin/README.md new file mode 100644 index 0000000000000000000000000000000000000000..ef483105ec6cf56df97f697dc46b293e9d20af26 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/README.md @@ -0,0 +1,85 @@ +# TensorflowTTS C++ Inference + +This contains code (and libs) necessary to make portable programs for inference with FastSpeech2 (MFA-aligned phonetic) and MB-MelGAN on desktop, along with a simple example. + +The program requires two things: +1. An exported and packed TTS model (FS2 + MB-MelGAN). Colab notebook linked below will show +2. A G2P-RNN model. See [here](https://github.com/ZDisket/TensorVox/tree/master/g2p_train) for details. + +If you want to convert your model to the format that this program expects, you can check out the notebook: [](https://colab.research.google.com/drive/1EEkJSq9Koo_eI0Sotc_t_CTducybdV9h?usp=sharing) + +It includes a set of easily understandable and modular classes including a simple English text preprocessor, so you can easily copy and integrate them into your program. + +Inference is even easier than in Python. First you need a Phonemizer, then the voice. + + #include "Voice.h" + std::string LanguagePath = "g2p/English" + std::string VoicePath = "LJ"; + + Phonemizer Phony; + Phony.Initialize(LanguagePath); + + // The Voice class takes a pointer to the Phonemizer to use it. + // Don't let it go out of scope! + Voice LJSpeech(VoicePath,VoicePath,&Phony); + + std::vector AudioData = LJSpeech.Vocalize("I love see plus plus" + LJSpeech.GetInfo().EndPadding); + VoxUtil::ExportWAV("voc1.wav", AudioData, LJSpeech.GetInfo().SampleRate); + + + +# Using the demo + +The demo program is available for download to use for Windows and Linux (Ubuntu 18.04), both x64. +It can take command line arguments (see code for details), but defaults should be fine for mere LJSpeech testing. + +To use it, do the following depending on platform: + +## Using the precompiled demo for Windows + 1. Download the [Windows x64 binary and LJSpeech model](https://drive.google.com/file/d/19ZaiBDtEkyrov_SfVHQUIHgVjVWv2Msu/view?usp=sharing) + 2. Extract to whatever directory you like + 3. Run + +## Using the precompiled demo for Linux +Tested in Ubuntu 18.04 LTS +1. Download the [Linux x64 binary and LJSpeech model](https://drive.google.com/file/d/1IgN9KMq2ccF-QSJX_Z1n94mtMDitnFs4/view?usp=sharing) +2. Extract to whatever directory you like +3. Navigate with terminal +4. `LD_LIBRARY_PATH=lib ./TensorflowTTSCppInference` + +For compiling it yourself, see **Compiling** below + +# Compiling +Compiling the demo depends on what platform. Currently two have been tested: +1. Windows 10 x64; MSVC 2019 +2. Linux(Ubuntu) x64: GCC 7.5.0 + +Note that to test out your shiny new build afterwards you'll have to download the LJSpeech model (or make one yourself), it's bundled in any of the above precompiled demo download links. + +## Dependencies +Download the [dependencies](https://drive.google.com/file/d/167LJXVO2dbFVc1Mmqacrq4LBaUIG9paH/view?usp=sharing) (hint: it's just Tensorflow C API) and drop the deps folder into the same place as the .sln and .pro; it has both Linux and Windows versions. + +The rest (such as CppFlow and AudioFile) are included in the source code + +## Windows +Use the Visual Studio solution file. + +## Ubuntu +Tested with compiler `gcc version 7.5.0 (Ubuntu 7.5.0-3ubuntu1~18.04)` . +1. `sudo apt install qt5-default` +2. `qmake TensorflowTTSCppInference.pro` +3. `make` + + +### Notes when compiling + 1. Tensorflow library malfunctions in debug builds, so only build release. + +## Externals (and thanks) + + - **Tensorflow C API**: [https://www.tensorflow.org/install/lang_c](https://www.tensorflow.org/install/lang_c) + - **CppFlow** (TF C API -> C++ wrapper): [https://github.com/serizba/cppflow](https://github.com/serizba/cppflow) + - **AudioFile** (for WAV export): [https://github.com/adamstark/AudioFile](https://github.com/adamstark/AudioFile) + - [nlohmann/json: JSON for Modern C++ ](https://github.com/nlohmann/json) + - [jarro2783/cxxopts: Lightweight C++ command line option parser)](https://github.com/jarro2783/cxxopts) + + diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference.pro b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference.pro new file mode 100644 index 0000000000000000000000000000000000000000..a9d722ee86e5d77d92587095cf416f811e175a3c --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference.pro @@ -0,0 +1,41 @@ +TEMPLATE = app +CONFIG += console c++14 +CONFIG -= app_bundle +CONFIG -= qt +TARGET = TFTTSCppInfer + +HEADERS += \ + TensorflowTTSCppInference/EnglishPhoneticProcessor.h \ + TensorflowTTSCppInference/FastSpeech2.h \ + TensorflowTTSCppInference/MultiBandMelGAN.h \ + TensorflowTTSCppInference/TextTokenizer.h \ + TensorflowTTSCppInference/Voice.h \ + TensorflowTTSCppInference/VoxCommon.hpp \ + TensorflowTTSCppInference/ext/AudioFile.hpp \ + TensorflowTTSCppInference/ext/CppFlow/include/Model.h \ + TensorflowTTSCppInference/ext/CppFlow/include/Tensor.h \ + TensorflowTTSCppInference/ext/ZCharScanner.h \ + TensorflowTTSCppInference/phonemizer.h \ + TensorflowTTSCppInference/tfg2p.h \ + +SOURCES += \ + TensorflowTTSCppInference/EnglishPhoneticProcessor.cpp \ + TensorflowTTSCppInference/FastSpeech2.cpp \ + TensorflowTTSCppInference/MultiBandMelGAN.cpp \ + TensorflowTTSCppInference/TensorflowTTSCppInference.cpp \ + TensorflowTTSCppInference/TextTokenizer.cpp \ + TensorflowTTSCppInference/Voice.cpp \ + TensorflowTTSCppInference/VoxCommon.cpp \ + TensorflowTTSCppInference/ext/CppFlow/src/Model.cpp \ + TensorflowTTSCppInference/ext/CppFlow/src/Tensor.cpp \ + TensorflowTTSCppInference/phonemizer.cpp \ + TensorflowTTSCppInference/tfg2p.cpp \ + TensorflowTTSCppInference/ext/ZCharScanner.cpp \ + +INCLUDEPATH += $$PWD/deps/include +LIBS += -L$$PWD/deps/lib -ltensorflow + +# GCC shits itself on memcp in AudioFile.hpp (l-1186) unless we add this +QMAKE_CXXFLAGS += -fpermissive + + diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference.sln b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference.sln new file mode 100644 index 0000000000000000000000000000000000000000..5598f9e0ec8f1ffcce23db1a4186b9419924cb16 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference.sln @@ -0,0 +1,31 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 15 +VisualStudioVersion = 15.0.28307.136 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "TensorflowTTSCppInference", "TensorflowTTSCppInference\TensorflowTTSCppInference.vcxproj", "{67C98279-9BA3-49F7-9FE4-2C0DF77A2875}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {67C98279-9BA3-49F7-9FE4-2C0DF77A2875}.Debug|x64.ActiveCfg = Debug|x64 + {67C98279-9BA3-49F7-9FE4-2C0DF77A2875}.Debug|x64.Build.0 = Debug|x64 + {67C98279-9BA3-49F7-9FE4-2C0DF77A2875}.Debug|x86.ActiveCfg = Debug|Win32 + {67C98279-9BA3-49F7-9FE4-2C0DF77A2875}.Debug|x86.Build.0 = Debug|Win32 + {67C98279-9BA3-49F7-9FE4-2C0DF77A2875}.Release|x64.ActiveCfg = Release|x64 + {67C98279-9BA3-49F7-9FE4-2C0DF77A2875}.Release|x64.Build.0 = Release|x64 + {67C98279-9BA3-49F7-9FE4-2C0DF77A2875}.Release|x86.ActiveCfg = Release|Win32 + {67C98279-9BA3-49F7-9FE4-2C0DF77A2875}.Release|x86.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {08E7CCCB-028D-4BFC-9CDC-E8957E50F8EA} + EndGlobalSection +EndGlobal diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/EnglishPhoneticProcessor.cpp b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/EnglishPhoneticProcessor.cpp new file mode 100644 index 0000000000000000000000000000000000000000..fb86df03b5dadd428832971deba1052ca07cb9af --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/EnglishPhoneticProcessor.cpp @@ -0,0 +1,97 @@ +#include "EnglishPhoneticProcessor.h" +#include "VoxCommon.hpp" + +using namespace std; + +bool EnglishPhoneticProcessor::Initialize(Phonemizer* InPhn) +{ + + + Phoner = InPhn; + Tokenizer.SetAllowedChars(Phoner->GetGraphemeChars()); + + + + return true; +} + +std::string EnglishPhoneticProcessor::ProcessTextPhonetic(const std::string& InText, const std::vector &InPhonemes,ETTSLanguage::Enum InLanguage) +{ + if (!Phoner) + return "ERROR"; + + + + vector Words = Tokenizer.Tokenize(InText,InLanguage); + + string Assemble = ""; + // Make a copy of the dict passed. + + for (size_t w = 0; w < Words.size();w++) + { + const string& Word = Words[w]; + + if (Word.find("@") != std::string::npos){ + std::string AddPh = Word.substr(1); // Remove the @ + size_t OutId = 0; + if (VoxUtil::FindInVec(AddPh,InPhonemes,OutId)) + { + Assemble.append(InPhonemes[OutId]); + Assemble.append(" "); + + + } + + continue; + + } + + + + + size_t OverrideIdx = 0; + + + + std::string Res = Phoner->ProcessWord(Word,0.001f); + + // Cache the word in the override dict so next time we don't have to research it + + Assemble.append(Res); + Assemble.append(" "); + + + + + + } + + + // Delete last space if there is + + + if (Assemble[Assemble.size() - 1] == ' ') + Assemble.pop_back(); + + + return Assemble; +} + +EnglishPhoneticProcessor::EnglishPhoneticProcessor() +{ + Phoner = nullptr; +} + +EnglishPhoneticProcessor::EnglishPhoneticProcessor(Phonemizer *InPhn) +{ + Initialize(InPhn); + +} + + + +EnglishPhoneticProcessor::~EnglishPhoneticProcessor() +{ + if (Phoner) + delete Phoner; +} diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/EnglishPhoneticProcessor.h b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/EnglishPhoneticProcessor.h new file mode 100644 index 0000000000000000000000000000000000000000..2fcac823d755faff5cbd8e1517fb6c740828f48b --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/EnglishPhoneticProcessor.h @@ -0,0 +1,23 @@ +#pragma once +#include "TextTokenizer.h" +#include "phonemizer.h" + +class EnglishPhoneticProcessor +{ +private: + TextTokenizer Tokenizer; + Phonemizer* Phoner; + + inline bool FileExists(const std::string& name) { + std::ifstream f(name.c_str()); + return f.good(); + } + +public: + bool Initialize(Phonemizer *InPhn); + std::string ProcessTextPhonetic(const std::string& InText, const std::vector &InPhonemes,ETTSLanguage::Enum InLanguage); + EnglishPhoneticProcessor(); + EnglishPhoneticProcessor(Phonemizer *InPhn); + ~EnglishPhoneticProcessor(); +}; + diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/FastSpeech2.cpp b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/FastSpeech2.cpp new file mode 100644 index 0000000000000000000000000000000000000000..dea309136d4d41646add136877fca0a270bc2f86 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/FastSpeech2.cpp @@ -0,0 +1,93 @@ +#include "FastSpeech2.h" +#include + + +FastSpeech2::FastSpeech2() +{ + FastSpeech = nullptr; +} + +FastSpeech2::FastSpeech2(const std::string & SavedModelFolder) +{ + Initialize(SavedModelFolder); +} + + +bool FastSpeech2::Initialize(const std::string & SavedModelFolder) +{ + try { + FastSpeech = new Model(SavedModelFolder); + } + catch (...) { + FastSpeech = nullptr; + return false; + + } + return true; +} + +TFTensor FastSpeech2::DoInference(const std::vector& InputIDs, int32_t SpeakerID, float Speed, float Energy, float F0, int32_t EmotionID) +{ + if (!FastSpeech) + throw std::invalid_argument("Tried to do inference on unloaded or invalid model!"); + + // Convenience reference so that we don't have to constantly derefer pointers. + Model& Mdl = *FastSpeech; + + // Define the tensors + Tensor input_ids{ Mdl,"serving_default_input_ids" }; + Tensor energy_ratios{ Mdl,"serving_default_energy_ratios" }; + Tensor f0_ratios{ Mdl,"serving_default_f0_ratios" }; + Tensor speaker_ids{ Mdl,"serving_default_speaker_ids" }; + Tensor speed_ratios{ Mdl,"serving_default_speed_ratios" }; + Tensor* emotion_ids = nullptr; + + // This is a multi-emotion model + if (EmotionID != -1) + { + emotion_ids = new Tensor{Mdl,"serving_default_emotion_ids"}; + emotion_ids->set_data(std::vector{EmotionID}); + + } + + + // This is the shape of the input IDs, our equivalent to tf.expand_dims. + std::vector InputIDShape = { 1, (int64_t)InputIDs.size() }; + + input_ids.set_data(InputIDs, InputIDShape); + energy_ratios.set_data(std::vector{ Energy }); + f0_ratios.set_data(std::vector{F0}); + speaker_ids.set_data(std::vector{SpeakerID}); + speed_ratios.set_data(std::vector{Speed}); + + // Define output tensor + Tensor output{ Mdl,"StatefulPartitionedCall" }; + + + // Vector of input tensors + std::vector inputs = { &input_ids,&speaker_ids,&speed_ratios,&f0_ratios,&energy_ratios }; + + if (EmotionID != -1) + inputs.push_back(emotion_ids); + + + // Do inference + FastSpeech->run(inputs, output); + + // Define output and return it + TFTensor Output = VoxUtil::CopyTensor(output); + + // We allocated the emotion_ids tensor dynamically, delete it + if (emotion_ids) + delete emotion_ids; + + // We could just straight out define it in the return statement, but I like it more this way + + return Output; +} + +FastSpeech2::~FastSpeech2() +{ + if (FastSpeech) + delete FastSpeech; +} diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/FastSpeech2.h b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/FastSpeech2.h new file mode 100644 index 0000000000000000000000000000000000000000..7203c2b3938d01ed9d4d644faa963bda9f92e968 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/FastSpeech2.h @@ -0,0 +1,37 @@ +#pragma once + +#include "ext/CppFlow/include/Model.h" +#include "VoxCommon.hpp" +class FastSpeech2 +{ +private: + Model* FastSpeech; + +public: + FastSpeech2(); + FastSpeech2(const std::string& SavedModelFolder); + + /* + Initialize and load the model + + -> SavedModelFolder: Folder where the .pb, variables, and other characteristics of the exported SavedModel + <- Returns: (bool)Success + */ + bool Initialize(const std::string& SavedModelFolder); + + /* + Do inference on a FastSpeech2 model. + + -> InputIDs: Input IDs of tokens for inference + -> SpeakerID: ID of the speaker in the model to do inference on. If single speaker, always leave at 0. If multispeaker, refer to your model. + -> Speed, Energy, F0: Parameters for FS2 inference. Leave at 1.f for defaults + + <- Returns: TFTensor with shape {1,,80} containing contents of mel spectrogram. + */ + TFTensor DoInference(const std::vector& InputIDs, int32_t SpeakerID = 0, float Speed = 1.f, float Energy = 1.f, float F0 = 1.f,int32_t EmotionID = -1); + + + + ~FastSpeech2(); +}; + diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/MultiBandMelGAN.cpp b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/MultiBandMelGAN.cpp new file mode 100644 index 0000000000000000000000000000000000000000..443ba4ae56b5466f4e8db11795d8864a3d8f2fc8 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/MultiBandMelGAN.cpp @@ -0,0 +1,54 @@ +#include "MultiBandMelGAN.h" +#include +#define IF_EXCEPT(cond,ex) if (cond){throw std::invalid_argument(ex);} + + + +bool MultiBandMelGAN::Initialize(const std::string & VocoderPath) +{ + try { + MelGAN = new Model(VocoderPath); + } + catch (...) { + MelGAN = nullptr; + return false; + + } + return true; + + +} + +TFTensor MultiBandMelGAN::DoInference(const TFTensor& InMel) +{ + IF_EXCEPT(!MelGAN, "Tried to infer MB-MelGAN on uninitialized model!!!!") + + // Convenience reference so that we don't have to constantly derefer pointers. + Model& Mdl = *MelGAN; + + Tensor input_mels{ Mdl,"serving_default_mels" }; + input_mels.set_data(InMel.Data, InMel.Shape); + + Tensor out_audio{ Mdl,"StatefulPartitionedCall" }; + + MelGAN->run(input_mels, out_audio); + + TFTensor RetTensor = VoxUtil::CopyTensor(out_audio); + + return RetTensor; + + +} + +MultiBandMelGAN::MultiBandMelGAN() +{ + MelGAN = nullptr; +} + + +MultiBandMelGAN::~MultiBandMelGAN() +{ + if (MelGAN) + delete MelGAN; + +} diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/MultiBandMelGAN.h b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/MultiBandMelGAN.h new file mode 100644 index 0000000000000000000000000000000000000000..1459cfd96fd3eca2a06cb882e5c98fead0a4678b --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/MultiBandMelGAN.h @@ -0,0 +1,23 @@ +#pragma once + +#include "ext/CppFlow/include/Model.h" +#include "VoxCommon.hpp" +class MultiBandMelGAN +{ +private: + Model* MelGAN; + + +public: + bool Initialize(const std::string& VocoderPath); + + + // Do MultiBand MelGAN inference including PQMF + // -> InMel: Mel spectrogram (shape [1, xx, 80]) + // <- Returns: Tensor data [4, xx, 1] + TFTensor DoInference(const TFTensor& InMel); + + MultiBandMelGAN(); + ~MultiBandMelGAN(); +}; + diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/TensorflowTTSCppInference.cpp b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/TensorflowTTSCppInference.cpp new file mode 100644 index 0000000000000000000000000000000000000000..03569885ed85c83a242411a3d436906bb2e6d8b4 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/TensorflowTTSCppInference.cpp @@ -0,0 +1,178 @@ + +#include +#include "Voice.h" +#define LOGF(txt) std::cout << txt << "\n" +#include "phonemizer.h" +#include "ext/ZCharScanner.h" +#include +#include +#include +#include "ext/cxxopts.hpp" + +std::vector GetTxtFile(const std::string& InFn) { + + std::vector Ret; + std::ifstream InFile(InFn); + + if (!InFile.good()) + return Ret; + + + std::string Line; + while (std::getline(InFile, Line)) + { + Ret.push_back(Line); + + + + } + InFile.close(); + + return Ret; + + + + +} + +std::vector SuperWordSplit(const std::string& InStr, int MaxLen) +{ + ZStringDelimiter Del1(InStr); + Del1.AddDelimiter(" "); + + std::vector RawWords = Del1.GetTokens(); + int AmtWords = RawWords.size(); + + int Idx = 0; + std::string CurrentStr = ""; + + std::vector SplitStrs; + + while (Idx < AmtWords) + { + if (CurrentStr.size() > 0) + CurrentStr.append(" "); + + std::string CuWord = RawWords[Idx]; + // phonetic input has to be uppercase + if (CuWord.find("@") == std::string::npos) + { + std::transform(CuWord.begin(), CuWord.end(), CuWord.begin(), + [](unsigned char c) { return std::tolower(c); }); + } + + + CurrentStr.append(CuWord); + + if (CurrentStr.length() > MaxLen) { + SplitStrs.push_back(CurrentStr); + CurrentStr = ""; + + } + + + Idx += 1; + + // Add the last string + if (Idx == AmtWords) + SplitStrs.push_back(CurrentStr); + + + + + + + } + + return SplitStrs; + +} + +int main(int argc, char* argv[]) +{ + cxxopts::Options options("TFTTSInfer", "Inference with TensorflowTTS models in command line"); + options.add_options() + ("v,voice", "Path to the voice folder", cxxopts::value()->default_value("LJ")) // a bool parameter + ("l,language", "Path to the language folder for G2P", cxxopts::value()->default_value("g2p/English")) + ("o,output", "Name of .wav file output of all infers", cxxopts::value()->default_value("AllAud.wav")) + ("m,maxlen", "Optional, max length of split for TTS. Default is 180", cxxopts::value()->default_value("180")) + ; + + auto Args = options.parse(argc, argv); + + std::string Name = Args["voice"].as(); + std::string Lang = Args["language"].as(); + std::string OutputFileName = Args["output"].as(); + int MaxLen = Args["maxlen"].as(); + + if (OutputFileName.find(".wav") == std::string::npos) + OutputFileName += ".wav"; + + + + LOGF("Loading voice..."); + + // Load phonemizer + Phonemizer StdPh; + + bool G2pInit = StdPh.Initialize(Lang); + if (!G2pInit) { + LOGF("Could not initialize language and/or G2P model! See if the path is correct and try again!"); + return -2; + + } + + // Load the voice itself + Voice CurrentVox(Name,Name,&StdPh); + std::vector AllAud; + + // Begin interactive console + bool Running = true; + while (Running) + { + std::string Prompt = ""; + + LOGF("Type a prompt, or type EXIT to exit "); + + std::getline(std::cin, Prompt); + if (Prompt == "EXIT") { + Running = false; + break; + } + std::vector Audata; + + // Split the prompt into chunks (if the user inputs like that) + for (const auto& Spli : SuperWordSplit(Prompt, MaxLen)) { + std::vector ImmediateAudata = CurrentVox.Vocalize(Prompt + CurrentVox.GetInfo().EndPadding); + // Insert the audio data to the end of the mid-level audata vector + Audata.insert(Audata.end(), ImmediateAudata.begin(), ImmediateAudata.end()); + + + } + + + + + std::string Filename = Prompt.substr(0, std::min(16, (int)Prompt.size())) + ".wav"; + + VoxUtil::ExportWAV(Filename, Audata, CurrentVox.GetInfo().SampleRate); + + // Insert the audio into the AllAud vector + AllAud.insert(AllAud.end(), Audata.begin(), Audata.end()); + + LOGF("Saved to " + Filename); + + + + + } + + + // Export all the audio + VoxUtil::ExportWAV(OutputFileName, AllAud, CurrentVox.GetInfo().SampleRate); + LOGF("Saved ALL to " + OutputFileName); + + std::cout << "Hello TensorflowTTS!\n"; + return 0; + +} diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/TensorflowTTSCppInference.vcxproj b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/TensorflowTTSCppInference.vcxproj new file mode 100644 index 0000000000000000000000000000000000000000..f839977e2364ef2d2442caec05a9341a04d00675 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/TensorflowTTSCppInference.vcxproj @@ -0,0 +1,197 @@ + + + + + Debug + Win32 + + + Release + Win32 + + + Debug + x64 + + + Release + x64 + + + + 15.0 + {67C98279-9BA3-49F7-9FE4-2C0DF77A2875} + Win32Proj + TensorflowTTSCppInference + 10.0 + + + + Application + true + v142 + Unicode + + + Application + false + v142 + true + Unicode + + + Application + true + v142 + Unicode + + + Application + false + v142 + true + Unicode + + + + + + + + + + + + + + + + + + + + + true + $(SolutionDir)deps\include;$(IncludePath) + $(SolutionDir)deps\lib;$(LibraryPath) + + + true + $(SolutionDir)deps\include;$(IncludePath) + $(SolutionDir)deps\lib;$(LibraryPath) + + + false + $(SolutionDir)deps\include;$(IncludePath) + $(SolutionDir)deps\lib;$(LibraryPath) + + + false + $(SolutionDir)deps\include;$(IncludePath) + $(SolutionDir)deps\lib;$(LibraryPath) + + + + NotUsing + Level3 + Disabled + true + _CRT_SECURE_NO_WARNINGS;WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions) + true + pch.h + + + Console + true + /FORCE %(AdditionalOptions) + + + + + NotUsing + Level3 + Disabled + true + _CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;%(PreprocessorDefinitions) + true + pch.h + + + Console + true + /FORCE %(AdditionalOptions) + + + + + NotUsing + Level3 + MaxSpeed + true + true + true + _CRT_SECURE_NO_WARNINGS;WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions) + true + pch.h + + + Console + true + true + true + /FORCE %(AdditionalOptions) + + + + + NotUsing + Level3 + MaxSpeed + true + true + true + _CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;%(PreprocessorDefinitions) + true + pch.h + + + Console + true + true + true + tensorflow.lib;%(AdditionalDependencies) + /FORCE %(AdditionalOptions) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/TensorflowTTSCppInference.vcxproj.filters b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/TensorflowTTSCppInference.vcxproj.filters new file mode 100644 index 0000000000000000000000000000000000000000..d4bd08ab3816d0bbf861d6d73afda3bf0a007b9c --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/TensorflowTTSCppInference.vcxproj.filters @@ -0,0 +1,99 @@ + + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + + {93995380-89BD-4b04-88EB-625FBE52EBFB} + h;hh;hpp;hxx;hm;inl;inc;ipp;xsd + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms + + + {8afed9e3-30cb-49ab-9644-423338335a59} + + + {9706a0f7-2bc3-4d3b-913f-e9a21d1f039a} + + + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + ext + + + ext + + + ext\CppFlow + + + ext\CppFlow + + + Header Files + + + Header Files + + + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + ext + + + ext\CppFlow + + + ext\CppFlow + + + Source Files + + + Source Files + + + \ No newline at end of file diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/TextTokenizer.cpp b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/TextTokenizer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..fa83cc54aeca3368dea2462e56b40811a0bb4591 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/TextTokenizer.cpp @@ -0,0 +1,178 @@ +#include "TextTokenizer.h" +#include "ext/ZCharScanner.h" +#include +#include +#include +#include +const std::vector first14 = { "zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "eleven", "twelve", "thirteen", "fourteen" }; +const std::vector prefixes = { "twen", "thir", "for", "fif", "six", "seven", "eigh", "nine" }; + +// Punctuation, this gets auto-converted to SIL +const std::string punctuation = ",.-;"; + + +using namespace std; + +void TextTokenizer::SetAllowedChars(const std::string &value) +{ + AllowedChars = value; +} + +string TextTokenizer::IntToStr(int number) +{ + if (number < 0) + { + return "minus " + IntToStr(-number); + } + if (number <= 14) + return first14.at(number); + if (number < 20) + return prefixes.at(number - 12) + "teen"; + if (number < 100) { + unsigned int remainder = number - (static_cast(number / 10) * 10); + return prefixes.at(number / 10 - 2) + (0 != remainder ? "ty " + IntToStr(remainder) : "ty"); + } + if (number < 1000) { + unsigned int remainder = number - (static_cast(number / 100) * 100); + return first14.at(number / 100) + (0 != remainder ? " hundred " + IntToStr(remainder) : " hundred"); + } + if (number < 1000000) { + unsigned int thousands = static_cast(number / 1000); + unsigned int remainder = number - (thousands * 1000); + return IntToStr(thousands) + (0 != remainder ? " thousand " + IntToStr(remainder) : " thousand"); + } + if (number < 1000000000) { + unsigned int millions = static_cast(number / 1000000); + unsigned int remainder = number - (millions * 1000000); + return IntToStr(millions) + (0 != remainder ? " million " + IntToStr(remainder) : " million"); + } + throw std::out_of_range("inttostr() value too large"); +} + + +vector TextTokenizer::ExpandNumbers(const std::vector& SpaceTokens) +{ + vector RetVec; + RetVec.reserve(SpaceTokens.size()); + + for (auto& Token : SpaceTokens) { + char* p; + long converted = strtol(Token.c_str(), &p, 10); + if (*p) { + RetVec.push_back(Token); + } + else { + if (converted > 1000000000) + continue; + + string IntStr = IntToStr((int)converted); + ZStringDelimiter DelInt(IntStr); + DelInt.AddDelimiter(" "); + + std::vector NumToks = DelInt.GetTokens(); + + // If a number results in one word the delimiter may not add it. + if (NumToks.empty()) + NumToks.push_back(IntStr); + + for (const auto& NumTok : NumToks) + RetVec.push_back(NumTok); + + + } + } + + return RetVec; + +} + +TextTokenizer::TextTokenizer() +{ +} + +TextTokenizer::~TextTokenizer() +{ +} + +vector TextTokenizer::Tokenize(const std::string & InTxt,ETTSLanguage::Enum Language) +{ + vector ProcessedTokens; + + + ZStringDelimiter Delim(InTxt); + Delim.AddDelimiter(" "); + + vector DelimitedTokens = Delim.GetTokens(); + + // Single word handler + if (!Delim.szTokens()) + DelimitedTokens.push_back(InTxt); + + if (Language == ETTSLanguage::English) + DelimitedTokens = ExpandNumbers(DelimitedTokens); + + + + + // We know that the new vector is going to be at least this size so we reserve + ProcessedTokens.reserve(DelimitedTokens.size()); + + /* + In this step we go through the string and only allow qualified character to pass through. + */ + for (size_t TokCtr = 0; TokCtr < DelimitedTokens.size();TokCtr++) + { + const auto& tok = DelimitedTokens[TokCtr]; + string AppTok = ""; + + + if (tok.find("@") != string::npos) + { + + ProcessedTokens.push_back(tok); + continue; + + } + + for (size_t s = 0;s < tok.size();s++) + { + + + if (AllowedChars.find(tok[s]) != std::string::npos) + AppTok += tok[s]; + + + // Prevent an ending period from adding another SIL + bool LastElem = TokCtr == DelimitedTokens.size() - 1 && s == tok.size() - 1; + // Punctuation handler + // This time we explicitly add a token to the vector + if (punctuation.find(tok[s]) != string::npos && !LastElem) { + // First, if the assembled string isn't empty, we add it in its current state + // Otherwise, the SIL could end up appearing before the word. + if (!AppTok.empty()) { + ProcessedTokens.push_back(AppTok); + AppTok = ""; + } + ProcessedTokens.push_back("@SIL"); + } + + + + + + + } + if (!AppTok.empty()) + ProcessedTokens.push_back(AppTok); + + } + // Prevent out of range error if the user inputs one word + if (ProcessedTokens.size() > 1) + { + if (ProcessedTokens[ProcessedTokens.size() - 1] == "SIL") + ProcessedTokens.pop_back(); + } + + + return ProcessedTokens; +} diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/TextTokenizer.h b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/TextTokenizer.h new file mode 100644 index 0000000000000000000000000000000000000000..42d47ac5c2856a5a521d5a17ead6deb5e1f98e2f --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/TextTokenizer.h @@ -0,0 +1,20 @@ +#pragma once +#include +#include +#include "VoxCommon.hpp" + +class TextTokenizer +{ +private: + std::string AllowedChars; + std::string IntToStr(int number); + + std::vector ExpandNumbers(const std::vector& SpaceTokens); +public: + TextTokenizer(); + ~TextTokenizer(); + + std::vector Tokenize(const std::string& InTxt,ETTSLanguage::Enum Language = ETTSLanguage::English); + void SetAllowedChars(const std::string &value); +}; + diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/Voice.cpp b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/Voice.cpp new file mode 100644 index 0000000000000000000000000000000000000000..09f101660aa9d0f9dbf9c4bc7f518c7c049c6a74 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/Voice.cpp @@ -0,0 +1,174 @@ +#include "Voice.h" +#include "ext/ZCharScanner.h" + + + +std::vector Voice::PhonemesToID(const std::string & InTxt) +{ + ZStringDelimiter Delim(InTxt); + Delim.AddDelimiter(" "); + + std::vector VecPhones; + VecPhones.reserve(Delim.szTokens()); + + for (const auto& Pho : Delim.GetTokens()) + { + size_t ArrID = 0; + + if (VoxUtil::FindInVec(Pho, Phonemes, ArrID)) + VecPhones.push_back(PhonemeIDs[ArrID]); + else + std::cout << "Voice::PhonemesToID() WARNING: Unknown phoneme " << Pho << std::endl; + + + + } + + + return VecPhones; + +} + +void Voice::ReadPhonemes(const std::string &PhonemePath) +{ + std::ifstream Phone(PhonemePath); + + std::string Line; + while (std::getline(Phone, Line)) + { + if (Line.find("\t") == std::string::npos) + continue; + + + ZStringDelimiter Deline(Line); + Deline.AddDelimiter("\t"); + + Phonemes.push_back(Deline[0]); + PhonemeIDs.push_back(stoi(Deline[1])); + + + + + } + +} + +void Voice::ReadSpeakers(const std::string &SpeakerPath) +{ + Speakers = GetLinedFile(SpeakerPath); + +} + +void Voice::ReadEmotions(const std::string &EmotionPath) +{ + Emotions = GetLinedFile(EmotionPath); + +} + +void Voice::ReadModelInfo(const std::string &ModelInfoPath) +{ + + ModelInfo = ""; + std::vector MiLines = GetLinedFile(ModelInfoPath); + + for (const std::string& ss : MiLines) + ModelInfo += ss + "\n"; + + +} + +std::vector Voice::GetLinedFile(const std::string &Path) +{ + std::vector RetLines; + std::ifstream Fi(Path); + + if (!Fi.good()) // File not exists, ret empty vec + return RetLines; + + std::string Line; + while (std::getline(Fi, Line)) + { + if (Line.size() > 1) + RetLines.push_back(Line); + + + } + + return RetLines; + +} + +Voice::Voice(const std::string & VoxPath, const std::string &inName, Phonemizer *InPhn) +{ + MelPredictor.Initialize(VoxPath + "/melgen"); + Vocoder.Initialize(VoxPath + "/vocoder"); + + if (InPhn) + Processor.Initialize(InPhn); + + + VoxInfo = VoxUtil::ReadModelJSON(VoxPath + "/info.json"); + Name = inName; + ReadPhonemes(VoxPath + "/phonemes.txt"); + ReadSpeakers(VoxPath + "/speakers.txt"); + ReadEmotions(VoxPath + "/emotions.txt"); + + + ReadModelInfo(VoxPath + "/info.txt"); + + + + + + + +} + +void Voice::AddPhonemizer(Phonemizer *InPhn) +{ + Processor.Initialize(InPhn); + + +} + + +std::vector Voice::Vocalize(const std::string & Prompt, float Speed, int32_t SpeakerID, float Energy, float F0, int32_t EmotionID) +{ + + std::string PhoneticTxt = Processor.ProcessTextPhonetic(Prompt,Phonemes,(ETTSLanguage::Enum)VoxInfo.Language); + + TFTensor Mel = MelPredictor.DoInference(PhonemesToID(PhoneticTxt), SpeakerID, Speed, Energy, F0,EmotionID); + + TFTensor AuData = Vocoder.DoInference(Mel); + + + int64_t Width = AuData.Shape[0]; + int64_t Height = AuData.Shape[1]; + int64_t Depth = AuData.Shape[2]; + //int z = 0; + + std::vector AudioData; + AudioData.resize(Height); + + // Code to access 1D array as if it were 3D + for (int64_t x = 0; x < Width;x++) + { + for (int64_t z = 0;z < Depth;z++) + { + for (int64_t y = 0; y < Height;y++) { + int64_t Index = x * Height * Depth + y * Depth + z; + AudioData[(size_t)y] = AuData.Data[(size_t)Index]; + + } + + } + } + + + return AudioData; +} + + +Voice::~Voice() +{ +} diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/Voice.h b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/Voice.h new file mode 100644 index 0000000000000000000000000000000000000000..9f14bee76438eb35863f1e846d519e74241e6b54 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/Voice.h @@ -0,0 +1,73 @@ +#pragma once + +#include "FastSpeech2.h" +#include "MultiBandMelGAN.h" +#include "EnglishPhoneticProcessor.h" + + +class Voice +{ +private: + FastSpeech2 MelPredictor; + MultiBandMelGAN Vocoder; + EnglishPhoneticProcessor Processor; + VoiceInfo VoxInfo; + + + + std::vector Phonemes; + std::vector PhonemeIDs; + + + + std::vector PhonemesToID(const std::string& InTxt); + + std::vector Speakers; + std::vector Emotions; + + void ReadPhonemes(const std::string& PhonemePath); + + void ReadSpeakers(const std::string& SpeakerPath); + + void ReadEmotions(const std::string& EmotionPath); + + + void ReadModelInfo(const std::string& ModelInfoPath); + + std::vector GetLinedFile(const std::string& Path); + + + std::string ModelInfo; + +public: + /* Voice constructor, arguments obligatory. + -> VoxPath: Path of folder where models are contained. + -- Must be a folder without an ending slash with UNIX slashes, can be relative or absolute (eg: MyVoices/Karen) + -- The folder must contain the following elements: + --- melgen: Folder generated where a FastSpeech2 model was saved as SavedModel, with .pb, variables, etc + --- vocoder: Folder where a Multi-Band MelGAN model was saved as SavedModel. + --- info.json: Model information + --- phonemes.txt: Tab delimited file containing PHONEME \t ID, for inputting to the FS2 model. + + --- If multispeaker, a lined .txt file called speakers.txt + --- If multi-emotion, a lined .txt file called emotions.txt + + */ + Voice(const std::string& VoxPath, const std::string& inName,Phonemizer* InPhn); + + void AddPhonemizer(Phonemizer* InPhn); + + + std::vector Vocalize(const std::string& Prompt, float Speed = 1.f, int32_t SpeakerID = 0, float Energy = 1.f, float F0 = 1.f,int32_t EmotionID = -1); + + std::string Name; + inline const VoiceInfo& GetInfo(){return VoxInfo;} + + inline const std::vector& GetSpeakers(){return Speakers;} + inline const std::vector& GetEmotions(){return Emotions;} + + inline const std::string& GetModelInfo(){return ModelInfo;} + + ~Voice(); +}; + diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/VoxCommon.cpp b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/VoxCommon.cpp new file mode 100644 index 0000000000000000000000000000000000000000..5ac1eec3c573951b2b6ded8841392e6a3db3e9b9 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/VoxCommon.cpp @@ -0,0 +1,83 @@ +#include "VoxCommon.hpp" +#include "ext/json.hpp" +using namespace nlohmann; + +const std::vector Text2MelNames = {"FastSpeech2","Tacotron2"}; +const std::vector VocoderNames = {"Multi-Band MelGAN"}; +const std::vector RepoNames = {"TensorflowTTS"}; + +const std::vector LanguageNames = {"English","Spanish"}; + + +void VoxUtil::ExportWAV(const std::string & Filename, const std::vector& Data, unsigned SampleRate) { + AudioFile::AudioBuffer Buffer; + Buffer.resize(1); + + + Buffer[0] = Data; + size_t BufSz = Data.size(); + + + AudioFile File; + File.setAudioBuffer(Buffer); + File.setAudioBufferSize(1, (int)BufSz); + File.setNumSamplesPerChannel((int)BufSz); + File.setNumChannels(1); + File.setBitDepth(32); + File.setSampleRate(SampleRate); + + File.save(Filename, AudioFileFormat::Wave); + + + +} + +VoiceInfo VoxUtil::ReadModelJSON(const std::string &InfoFilename) +{ + const size_t MaxNoteSize = 80; + + std::ifstream JFile(InfoFilename); + json JS; + + JFile >> JS; + + + JFile.close(); + + auto Arch = JS["architecture"]; + + ArchitectureInfo CuArch; + CuArch.Repo = Arch["repo"].get(); + CuArch.Text2Mel = Arch["text2mel"].get(); + CuArch.Vocoder = Arch["vocoder"].get(); + + // Now fill the strings + CuArch.s_Repo = RepoNames[CuArch.Repo]; + CuArch.s_Text2Mel = Text2MelNames[CuArch.Text2Mel]; + CuArch.s_Vocoder = VocoderNames[CuArch.Vocoder]; + + + uint32_t Lang = JS["language"].get(); + VoiceInfo Inf{JS["name"].get(), + JS["author"].get(), + JS["version"].get(), + JS["description"].get(), + CuArch, + JS["note"].get(), + JS["sarate"].get(), + Lang, + LanguageNames[Lang], + " " + JS["pad"].get()}; // Add a space for separation since we directly append the value to the prompt + + if (Inf.Note.size() > MaxNoteSize) + Inf.Note = Inf.Note.substr(0,MaxNoteSize); + + return Inf; + + + + + + + +} diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/VoxCommon.hpp b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/VoxCommon.hpp new file mode 100644 index 0000000000000000000000000000000000000000..b8d41971beb1c84edfe320b6c5fbc6314b36fa57 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/VoxCommon.hpp @@ -0,0 +1,138 @@ +#pragma once +/* + VoxCommon.hpp : Defines common data structures and constants to be used with TensorVox +*/ +#include +#include +#include "ext/AudioFile.hpp" +#include "ext/CppFlow/include/Tensor.h" + +#define IF_RETURN(cond,ret) if (cond){return ret;} + + + +template +struct TFTensor { + std::vector Data; + std::vector Shape; + size_t TotalSize; + +}; + + +namespace ETTSRepo { +enum Enum{ + TensorflowTTS = 0, + MozillaTTS // not implemented yet +}; + +} +namespace EText2MelModel { +enum Enum{ + FastSpeech2 = 0, + Tacotron2 // not implemented yet +}; + +} + +namespace EVocoderModel{ +enum Enum{ + MultiBandMelGAN = 0 +}; +} + +namespace ETTSLanguage{ +enum Enum{ + English = 0, + Spanish +}; + +} + + + +struct ArchitectureInfo{ + int Repo; + int Text2Mel; + int Vocoder; + + // String versions of the info, for displaying. + // We want boilerplate int index to str conversion code to be low. + std::string s_Repo; + std::string s_Text2Mel; + std::string s_Vocoder; + +}; +struct VoiceInfo{ + std::string Name; + std::string Author; + int32_t Version; + std::string Description; + ArchitectureInfo Architecture; + std::string Note; + + uint32_t SampleRate; + + uint32_t Language; + std::string s_Language; + + std::string EndPadding; + + + +}; + +namespace VoxUtil { + + VoiceInfo ReadModelJSON(const std::string& InfoFilename); + + + template + TFTensor CopyTensor(Tensor& InTens) + { + std::vector Data = InTens.get_data(); + std::vector Shape = InTens.get_shape(); + size_t TotalSize = 1; + for (const int64_t& Dim : Shape) + TotalSize *= Dim; + + return TFTensor{Data, Shape, TotalSize}; + + + } + + template + bool FindInVec(V In, const std::vector& Vec, size_t& OutIdx, size_t start = 0) { + for (size_t xx = start;xx < Vec.size();xx++) + { + if (Vec[xx] == In) { + OutIdx = xx; + return true; + + } + + } + + + return false; + + } + template + bool FindInVec2(V In, const std::vector& Vec, size_t& OutIdx, size_t start = 0) { + for (size_t xx = start;xx < Vec.size();xx++) + { + if (Vec[xx] == In) { + OutIdx = xx; + return true; + + } + + } + + + return false; + + } + + void ExportWAV(const std::string& Filename, const std::vector& Data, unsigned SampleRate); +} diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/AudioFile.hpp b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/AudioFile.hpp new file mode 100644 index 0000000000000000000000000000000000000000..5ffb21e124621cd35799dde9149583bcf4c3737d --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/AudioFile.hpp @@ -0,0 +1,1253 @@ +//======================================================================= +/** @file AudioFile.h + * @author Adam Stark + * @copyright Copyright (C) 2017 Adam Stark + * + * This file is part of the 'AudioFile' library + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ +//======================================================================= + +#ifndef _AS_AudioFile_h +#define _AS_AudioFile_h + +#include +#include +#include +#include +#include +#include +#include +#include + +// disable some warnings on Windows +#if defined (_MSC_VER) + __pragma(warning (push)) + __pragma(warning (disable : 4244)) + __pragma(warning (disable : 4457)) + __pragma(warning (disable : 4458)) + __pragma(warning (disable : 4389)) + __pragma(warning (disable : 4996)) +#elif defined (__GNUC__) + _Pragma("GCC diagnostic push") + _Pragma("GCC diagnostic ignored \"-Wconversion\"") + _Pragma("GCC diagnostic ignored \"-Wsign-compare\"") + _Pragma("GCC diagnostic ignored \"-Wshadow\"") +#endif + +//============================================================= +/** The different types of audio file, plus some other types to + * indicate a failure to load a file, or that one hasn't been + * loaded yet + */ +enum class AudioFileFormat +{ + Error, + NotLoaded, + Wave, + Aiff +}; + +//============================================================= +template +class AudioFile +{ +public: + + //============================================================= + typedef std::vector > AudioBuffer; + + //============================================================= + /** Constructor */ + AudioFile(); + + //============================================================= + /** Loads an audio file from a given file path. + * @Returns true if the file was successfully loaded + */ + bool load (std::string filePath); + + /** Saves an audio file to a given file path. + * @Returns true if the file was successfully saved + */ + bool save (std::string filePath, AudioFileFormat format = AudioFileFormat::Wave); + + //============================================================= + /** @Returns the sample rate */ + uint32_t getSampleRate() const; + + /** @Returns the number of audio channels in the buffer */ + int getNumChannels() const; + + /** @Returns true if the audio file is mono */ + bool isMono() const; + + /** @Returns true if the audio file is stereo */ + bool isStereo() const; + + /** @Returns the bit depth of each sample */ + int getBitDepth() const; + + /** @Returns the number of samples per channel */ + int getNumSamplesPerChannel() const; + + /** @Returns the length in seconds of the audio file based on the number of samples and sample rate */ + double getLengthInSeconds() const; + + /** Prints a summary of the audio file to the console */ + void printSummary() const; + + //============================================================= + + /** Set the audio buffer for this AudioFile by copying samples from another buffer. + * @Returns true if the buffer was copied successfully. + */ + bool setAudioBuffer (AudioBuffer& newBuffer); + + /** Sets the audio buffer to a given number of channels and number of samples per channel. This will try to preserve + * the existing audio, adding zeros to any new channels or new samples in a given channel. + */ + void setAudioBufferSize (int numChannels, int numSamples); + + /** Sets the number of samples per channel in the audio buffer. This will try to preserve + * the existing audio, adding zeros to new samples in a given channel if the number of samples is increased. + */ + void setNumSamplesPerChannel (int numSamples); + + /** Sets the number of channels. New channels will have the correct number of samples and be initialised to zero */ + void setNumChannels (int numChannels); + + /** Sets the bit depth for the audio file. If you use the save() function, this bit depth rate will be used */ + void setBitDepth (int numBitsPerSample); + + /** Sets the sample rate for the audio file. If you use the save() function, this sample rate will be used */ + void setSampleRate (uint32_t newSampleRate); + + //============================================================= + /** Sets whether the library should log error messages to the console. By default this is true */ + void shouldLogErrorsToConsole (bool logErrors); + + //============================================================= + /** A vector of vectors holding the audio samples for the AudioFile. You can + * access the samples by channel and then by sample index, i.e: + * + * samples[channel][sampleIndex] + */ + AudioBuffer samples; + + //============================================================= + /** An optional iXML chunk that can be added to the AudioFile. + */ + std::string iXMLChunk; + +private: + + //============================================================= + enum class Endianness + { + LittleEndian, + BigEndian + }; + + //============================================================= + AudioFileFormat determineAudioFileFormat (std::vector& fileData); + bool decodeWaveFile (std::vector& fileData); + bool decodeAiffFile (std::vector& fileData); + + //============================================================= + bool saveToWaveFile (std::string filePath); + bool saveToAiffFile (std::string filePath); + + //============================================================= + void clearAudioBuffer(); + + //============================================================= + int32_t fourBytesToInt (std::vector& source, int startIndex, Endianness endianness = Endianness::LittleEndian); + int16_t twoBytesToInt (std::vector& source, int startIndex, Endianness endianness = Endianness::LittleEndian); + int getIndexOfString (std::vector& source, std::string s); + int getIndexOfChunk (std::vector& source, const std::string& chunkHeaderID, int startIndex, Endianness endianness = Endianness::LittleEndian); + + //============================================================= + T sixteenBitIntToSample (int16_t sample); + int16_t sampleToSixteenBitInt (T sample); + + //============================================================= + uint8_t sampleToSingleByte (T sample); + T singleByteToSample (uint8_t sample); + + uint32_t getAiffSampleRate (std::vector& fileData, int sampleRateStartIndex); + bool tenByteMatch (std::vector& v1, int startIndex1, std::vector& v2, int startIndex2); + void addSampleRateToAiffData (std::vector& fileData, uint32_t sampleRate); + T clamp (T v1, T minValue, T maxValue); + + //============================================================= + void addStringToFileData (std::vector& fileData, std::string s); + void addInt32ToFileData (std::vector& fileData, int32_t i, Endianness endianness = Endianness::LittleEndian); + void addInt16ToFileData (std::vector& fileData, int16_t i, Endianness endianness = Endianness::LittleEndian); + + //============================================================= + bool writeDataToFile (std::vector& fileData, std::string filePath); + + //============================================================= + void reportError (std::string errorMessage); + + //============================================================= + AudioFileFormat audioFileFormat; + uint32_t sampleRate; + int bitDepth; + bool logErrorsToConsole {true}; +}; + + +//============================================================= +// Pre-defined 10-byte representations of common sample rates +static std::unordered_map > aiffSampleRateTable = { + {8000, {64, 11, 250, 0, 0, 0, 0, 0, 0, 0}}, + {11025, {64, 12, 172, 68, 0, 0, 0, 0, 0, 0}}, + {16000, {64, 12, 250, 0, 0, 0, 0, 0, 0, 0}}, + {22050, {64, 13, 172, 68, 0, 0, 0, 0, 0, 0}}, + {32000, {64, 13, 250, 0, 0, 0, 0, 0, 0, 0}}, + {37800, {64, 14, 147, 168, 0, 0, 0, 0, 0, 0}}, + {44056, {64, 14, 172, 24, 0, 0, 0, 0, 0, 0}}, + {44100, {64, 14, 172, 68, 0, 0, 0, 0, 0, 0}}, + {47250, {64, 14, 184, 146, 0, 0, 0, 0, 0, 0}}, + {48000, {64, 14, 187, 128, 0, 0, 0, 0, 0, 0}}, + {50000, {64, 14, 195, 80, 0, 0, 0, 0, 0, 0}}, + {50400, {64, 14, 196, 224, 0, 0, 0, 0, 0, 0}}, + {88200, {64, 15, 172, 68, 0, 0, 0, 0, 0, 0}}, + {96000, {64, 15, 187, 128, 0, 0, 0, 0, 0, 0}}, + {176400, {64, 16, 172, 68, 0, 0, 0, 0, 0, 0}}, + {192000, {64, 16, 187, 128, 0, 0, 0, 0, 0, 0}}, + {352800, {64, 17, 172, 68, 0, 0, 0, 0, 0, 0}}, + {2822400, {64, 20, 172, 68, 0, 0, 0, 0, 0, 0}}, + {5644800, {64, 21, 172, 68, 0, 0, 0, 0, 0, 0}} +}; + +//============================================================= +enum WavAudioFormat +{ + PCM = 0x0001, + IEEEFloat = 0x0003, + ALaw = 0x0006, + MULaw = 0x0007, + Extensible = 0xFFFE +}; + +//============================================================= +enum AIFFAudioFormat +{ + Uncompressed, + Compressed, + Error +}; + +//============================================================= +/* IMPLEMENTATION */ +//============================================================= + +//============================================================= +template +AudioFile::AudioFile() +{ + static_assert(std::is_floating_point::value, "ERROR: This version of AudioFile only supports floating point sample formats"); + + bitDepth = 16; + sampleRate = 44100; + samples.resize (1); + samples[0].resize (0); + audioFileFormat = AudioFileFormat::NotLoaded; +} + +//============================================================= +template +uint32_t AudioFile::getSampleRate() const +{ + return sampleRate; +} + +//============================================================= +template +int AudioFile::getNumChannels() const +{ + return (int)samples.size(); +} + +//============================================================= +template +bool AudioFile::isMono() const +{ + return getNumChannels() == 1; +} + +//============================================================= +template +bool AudioFile::isStereo() const +{ + return getNumChannels() == 2; +} + +//============================================================= +template +int AudioFile::getBitDepth() const +{ + return bitDepth; +} + +//============================================================= +template +int AudioFile::getNumSamplesPerChannel() const +{ + if (samples.size() > 0) + return (int) samples[0].size(); + else + return 0; +} + +//============================================================= +template +double AudioFile::getLengthInSeconds() const +{ + return (double)getNumSamplesPerChannel() / (double)sampleRate; +} + +//============================================================= +template +void AudioFile::printSummary() const +{ + std::cout << "|======================================|" << std::endl; + std::cout << "Num Channels: " << getNumChannels() << std::endl; + std::cout << "Num Samples Per Channel: " << getNumSamplesPerChannel() << std::endl; + std::cout << "Sample Rate: " << sampleRate << std::endl; + std::cout << "Bit Depth: " << bitDepth << std::endl; + std::cout << "Length in Seconds: " << getLengthInSeconds() << std::endl; + std::cout << "|======================================|" << std::endl; +} + +//============================================================= +template +bool AudioFile::setAudioBuffer (AudioBuffer& newBuffer) +{ + int numChannels = (int)newBuffer.size(); + + if (numChannels <= 0) + { + assert (false && "The buffer your are trying to use has no channels"); + return false; + } + + size_t numSamples = newBuffer[0].size(); + + // set the number of channels + samples.resize (newBuffer.size()); + + for (int k = 0; k < getNumChannels(); k++) + { + assert (newBuffer[k].size() == numSamples); + + samples[k].resize (numSamples); + + for (size_t i = 0; i < numSamples; i++) + { + samples[k][i] = newBuffer[k][i]; + } + } + + return true; +} + +//============================================================= +template +void AudioFile::setAudioBufferSize (int numChannels, int numSamples) +{ + samples.resize (numChannels); + setNumSamplesPerChannel (numSamples); +} + +//============================================================= +template +void AudioFile::setNumSamplesPerChannel (int numSamples) +{ + int originalSize = getNumSamplesPerChannel(); + + for (int i = 0; i < getNumChannels();i++) + { + samples[i].resize (numSamples); + + // set any new samples to zero + if (numSamples > originalSize) + std::fill (samples[i].begin() + originalSize, samples[i].end(), (T)0.); + } +} + +//============================================================= +template +void AudioFile::setNumChannels (int numChannels) +{ + int originalNumChannels = getNumChannels(); + int originalNumSamplesPerChannel = getNumSamplesPerChannel(); + + samples.resize (numChannels); + + // make sure any new channels are set to the right size + // and filled with zeros + if (numChannels > originalNumChannels) + { + for (int i = originalNumChannels; i < numChannels; i++) + { + samples[i].resize (originalNumSamplesPerChannel); + std::fill (samples[i].begin(), samples[i].end(), (T)0.); + } + } +} + +//============================================================= +template +void AudioFile::setBitDepth (int numBitsPerSample) +{ + bitDepth = numBitsPerSample; +} + +//============================================================= +template +void AudioFile::setSampleRate (uint32_t newSampleRate) +{ + sampleRate = newSampleRate; +} + +//============================================================= +template +void AudioFile::shouldLogErrorsToConsole (bool logErrors) +{ + logErrorsToConsole = logErrors; +} + +//============================================================= +template +bool AudioFile::load (std::string filePath) +{ + std::ifstream file (filePath, std::ios::binary); + + // check the file exists + if (! file.good()) + { + reportError ("ERROR: File doesn't exist or otherwise can't load file\n" + filePath); + return false; + } + + file.unsetf (std::ios::skipws); + std::istream_iterator begin (file), end; + std::vector fileData (begin, end); + + // get audio file format + audioFileFormat = determineAudioFileFormat (fileData); + + if (audioFileFormat == AudioFileFormat::Wave) + { + return decodeWaveFile (fileData); + } + else if (audioFileFormat == AudioFileFormat::Aiff) + { + return decodeAiffFile (fileData); + } + else + { + reportError ("Audio File Type: Error"); + return false; + } +} + +//============================================================= +template +bool AudioFile::decodeWaveFile (std::vector& fileData) +{ + // ----------------------------------------------------------- + // HEADER CHUNK + std::string headerChunkID (fileData.begin(), fileData.begin() + 4); + //int32_t fileSizeInBytes = fourBytesToInt (fileData, 4) + 8; + std::string format (fileData.begin() + 8, fileData.begin() + 12); + + // ----------------------------------------------------------- + // try and find the start points of key chunks + int indexOfDataChunk = getIndexOfChunk (fileData, "data", 12); + int indexOfFormatChunk = getIndexOfChunk (fileData, "fmt ", 12); + int indexOfXMLChunk = getIndexOfChunk (fileData, "iXML", 12); + + // if we can't find the data or format chunks, or the IDs/formats don't seem to be as expected + // then it is unlikely we'll able to read this file, so abort + if (indexOfDataChunk == -1 || indexOfFormatChunk == -1 || headerChunkID != "RIFF" || format != "WAVE") + { + reportError ("ERROR: this doesn't seem to be a valid .WAV file"); + return false; + } + + // ----------------------------------------------------------- + // FORMAT CHUNK + int f = indexOfFormatChunk; + std::string formatChunkID (fileData.begin() + f, fileData.begin() + f + 4); + //int32_t formatChunkSize = fourBytesToInt (fileData, f + 4); + int16_t audioFormat = twoBytesToInt (fileData, f + 8); + int16_t numChannels = twoBytesToInt (fileData, f + 10); + sampleRate = (uint32_t) fourBytesToInt (fileData, f + 12); + int32_t numBytesPerSecond = fourBytesToInt (fileData, f + 16); + int16_t numBytesPerBlock = twoBytesToInt (fileData, f + 20); + bitDepth = (int) twoBytesToInt (fileData, f + 22); + + int numBytesPerSample = bitDepth / 8; + + // check that the audio format is PCM or Float + if (audioFormat != WavAudioFormat::PCM && audioFormat != WavAudioFormat::IEEEFloat) + { + reportError ("ERROR: this .WAV file is encoded in a format that this library does not support at present"); + return false; + } + + // check the number of channels is mono or stereo + if (numChannels < 1 || numChannels > 128) + { + reportError ("ERROR: this WAV file seems to be an invalid number of channels (or corrupted?)"); + return false; + } + + // check header data is consistent + if ((numBytesPerSecond != (numChannels * sampleRate * bitDepth) / 8) || (numBytesPerBlock != (numChannels * numBytesPerSample))) + { + reportError ("ERROR: the header data in this WAV file seems to be inconsistent"); + return false; + } + + // check bit depth is either 8, 16, 24 or 32 bit + if (bitDepth != 8 && bitDepth != 16 && bitDepth != 24 && bitDepth != 32) + { + reportError ("ERROR: this file has a bit depth that is not 8, 16, 24 or 32 bits"); + return false; + } + + // ----------------------------------------------------------- + // DATA CHUNK + int d = indexOfDataChunk; + std::string dataChunkID (fileData.begin() + d, fileData.begin() + d + 4); + int32_t dataChunkSize = fourBytesToInt (fileData, d + 4); + + int numSamples = dataChunkSize / (numChannels * bitDepth / 8); + int samplesStartIndex = indexOfDataChunk + 8; + + clearAudioBuffer(); + samples.resize (numChannels); + + for (int i = 0; i < numSamples; i++) + { + for (int channel = 0; channel < numChannels; channel++) + { + int sampleIndex = samplesStartIndex + (numBytesPerBlock * i) + channel * numBytesPerSample; + + if (bitDepth == 8) + { + T sample = singleByteToSample (fileData[sampleIndex]); + samples[channel].push_back (sample); + } + else if (bitDepth == 16) + { + int16_t sampleAsInt = twoBytesToInt (fileData, sampleIndex); + T sample = sixteenBitIntToSample (sampleAsInt); + samples[channel].push_back (sample); + } + else if (bitDepth == 24) + { + int32_t sampleAsInt = 0; + sampleAsInt = (fileData[sampleIndex + 2] << 16) | (fileData[sampleIndex + 1] << 8) | fileData[sampleIndex]; + + if (sampleAsInt & 0x800000) // if the 24th bit is set, this is a negative number in 24-bit world + sampleAsInt = sampleAsInt | ~0xFFFFFF; // so make sure sign is extended to the 32 bit float + + T sample = (T)sampleAsInt / (T)8388608.; + samples[channel].push_back (sample); + } + else if (bitDepth == 32) + { + int32_t sampleAsInt = fourBytesToInt (fileData, sampleIndex); + T sample; + + if (audioFormat == WavAudioFormat::IEEEFloat) + sample = (T)reinterpret_cast (sampleAsInt); + else // assume PCM + sample = (T) sampleAsInt / static_cast (std::numeric_limits::max()); + + samples[channel].push_back (sample); + } + else + { + assert (false); + } + } + } + + // ----------------------------------------------------------- + // iXML CHUNK + if (indexOfXMLChunk != -1) + { + int32_t chunkSize = fourBytesToInt (fileData, indexOfXMLChunk + 4); + iXMLChunk = std::string ((const char*) &fileData[indexOfXMLChunk + 8], chunkSize); + } + + return true; +} + +//============================================================= +template +bool AudioFile::decodeAiffFile (std::vector& fileData) +{ + // ----------------------------------------------------------- + // HEADER CHUNK + std::string headerChunkID (fileData.begin(), fileData.begin() + 4); + //int32_t fileSizeInBytes = fourBytesToInt (fileData, 4, Endianness::BigEndian) + 8; + std::string format (fileData.begin() + 8, fileData.begin() + 12); + + int audioFormat = format == "AIFF" ? AIFFAudioFormat::Uncompressed : format == "AIFC" ? AIFFAudioFormat::Compressed : AIFFAudioFormat::Error; + + // ----------------------------------------------------------- + // try and find the start points of key chunks + int indexOfCommChunk = getIndexOfChunk (fileData, "COMM", 12, Endianness::BigEndian); + int indexOfSoundDataChunk = getIndexOfChunk (fileData, "SSND", 12, Endianness::BigEndian); + int indexOfXMLChunk = getIndexOfChunk (fileData, "iXML", 12, Endianness::BigEndian); + + // if we can't find the data or format chunks, or the IDs/formats don't seem to be as expected + // then it is unlikely we'll able to read this file, so abort + if (indexOfSoundDataChunk == -1 || indexOfCommChunk == -1 || headerChunkID != "FORM" || audioFormat == AIFFAudioFormat::Error) + { + reportError ("ERROR: this doesn't seem to be a valid AIFF file"); + return false; + } + + // ----------------------------------------------------------- + // COMM CHUNK + int p = indexOfCommChunk; + std::string commChunkID (fileData.begin() + p, fileData.begin() + p + 4); + //int32_t commChunkSize = fourBytesToInt (fileData, p + 4, Endianness::BigEndian); + int16_t numChannels = twoBytesToInt (fileData, p + 8, Endianness::BigEndian); + int32_t numSamplesPerChannel = fourBytesToInt (fileData, p + 10, Endianness::BigEndian); + bitDepth = (int) twoBytesToInt (fileData, p + 14, Endianness::BigEndian); + sampleRate = getAiffSampleRate (fileData, p + 16); + + // check the sample rate was properly decoded + if (sampleRate == 0) + { + reportError ("ERROR: this AIFF file has an unsupported sample rate"); + return false; + } + + // check the number of channels is mono or stereo + if (numChannels < 1 ||numChannels > 2) + { + reportError ("ERROR: this AIFF file seems to be neither mono nor stereo (perhaps multi-track, or corrupted?)"); + return false; + } + + // check bit depth is either 8, 16, 24 or 32-bit + if (bitDepth != 8 && bitDepth != 16 && bitDepth != 24 && bitDepth != 32) + { + reportError ("ERROR: this file has a bit depth that is not 8, 16, 24 or 32 bits"); + return false; + } + + // ----------------------------------------------------------- + // SSND CHUNK + int s = indexOfSoundDataChunk; + std::string soundDataChunkID (fileData.begin() + s, fileData.begin() + s + 4); + int32_t soundDataChunkSize = fourBytesToInt (fileData, s + 4, Endianness::BigEndian); + int32_t offset = fourBytesToInt (fileData, s + 8, Endianness::BigEndian); + //int32_t blockSize = fourBytesToInt (fileData, s + 12, Endianness::BigEndian); + + int numBytesPerSample = bitDepth / 8; + int numBytesPerFrame = numBytesPerSample * numChannels; + int totalNumAudioSampleBytes = numSamplesPerChannel * numBytesPerFrame; + int samplesStartIndex = s + 16 + (int)offset; + + // sanity check the data + if ((soundDataChunkSize - 8) != totalNumAudioSampleBytes || totalNumAudioSampleBytes > static_cast(fileData.size() - samplesStartIndex)) + { + reportError ("ERROR: the metadatafor this file doesn't seem right"); + return false; + } + + clearAudioBuffer(); + samples.resize (numChannels); + + for (int i = 0; i < numSamplesPerChannel; i++) + { + for (int channel = 0; channel < numChannels; channel++) + { + int sampleIndex = samplesStartIndex + (numBytesPerFrame * i) + channel * numBytesPerSample; + + if (bitDepth == 8) + { + int8_t sampleAsSigned8Bit = (int8_t)fileData[sampleIndex]; + T sample = (T)sampleAsSigned8Bit / (T)128.; + samples[channel].push_back (sample); + } + else if (bitDepth == 16) + { + int16_t sampleAsInt = twoBytesToInt (fileData, sampleIndex, Endianness::BigEndian); + T sample = sixteenBitIntToSample (sampleAsInt); + samples[channel].push_back (sample); + } + else if (bitDepth == 24) + { + int32_t sampleAsInt = 0; + sampleAsInt = (fileData[sampleIndex] << 16) | (fileData[sampleIndex + 1] << 8) | fileData[sampleIndex + 2]; + + if (sampleAsInt & 0x800000) // if the 24th bit is set, this is a negative number in 24-bit world + sampleAsInt = sampleAsInt | ~0xFFFFFF; // so make sure sign is extended to the 32 bit float + + T sample = (T)sampleAsInt / (T)8388608.; + samples[channel].push_back (sample); + } + else if (bitDepth == 32) + { + int32_t sampleAsInt = fourBytesToInt (fileData, sampleIndex, Endianness::BigEndian); + T sample; + + if (audioFormat == AIFFAudioFormat::Compressed) + sample = (T)reinterpret_cast (sampleAsInt); + else // assume uncompressed + sample = (T) sampleAsInt / static_cast (std::numeric_limits::max()); + + samples[channel].push_back (sample); + } + else + { + assert (false); + } + } + } + + // ----------------------------------------------------------- + // iXML CHUNK + if (indexOfXMLChunk != -1) + { + int32_t chunkSize = fourBytesToInt (fileData, indexOfXMLChunk + 4); + iXMLChunk = std::string ((const char*) &fileData[indexOfXMLChunk + 8], chunkSize); + } + + return true; +} + +//============================================================= +template +uint32_t AudioFile::getAiffSampleRate (std::vector& fileData, int sampleRateStartIndex) +{ + for (auto it : aiffSampleRateTable) + { + if (tenByteMatch (fileData, sampleRateStartIndex, it.second, 0)) + return it.first; + } + + return 0; +} + +//============================================================= +template +bool AudioFile::tenByteMatch (std::vector& v1, int startIndex1, std::vector& v2, int startIndex2) +{ + for (int i = 0; i < 10; i++) + { + if (v1[startIndex1 + i] != v2[startIndex2 + i]) + return false; + } + + return true; +} + +//============================================================= +template +void AudioFile::addSampleRateToAiffData (std::vector& fileData, uint32_t sampleRate) +{ + if (aiffSampleRateTable.count (sampleRate) > 0) + { + for (int i = 0; i < 10; i++) + fileData.push_back (aiffSampleRateTable[sampleRate][i]); + } +} + +//============================================================= +template +bool AudioFile::save (std::string filePath, AudioFileFormat format) +{ + if (format == AudioFileFormat::Wave) + { + return saveToWaveFile (filePath); + } + else if (format == AudioFileFormat::Aiff) + { + return saveToAiffFile (filePath); + } + + return false; +} + +//============================================================= +template +bool AudioFile::saveToWaveFile (std::string filePath) +{ + std::vector fileData; + + int32_t dataChunkSize = getNumSamplesPerChannel() * (getNumChannels() * bitDepth / 8); + int16_t audioFormat = bitDepth == 32 ? WavAudioFormat::IEEEFloat : WavAudioFormat::PCM; + int32_t formatChunkSize = audioFormat == WavAudioFormat::PCM ? 16 : 18; + int32_t iXMLChunkSize = static_cast (iXMLChunk.size()); + + // ----------------------------------------------------------- + // HEADER CHUNK + addStringToFileData (fileData, "RIFF"); + + // The file size in bytes is the header chunk size (4, not counting RIFF and WAVE) + the format + // chunk size (24) + the metadata part of the data chunk plus the actual data chunk size + int32_t fileSizeInBytes = 4 + formatChunkSize + 8 + 8 + dataChunkSize; + if (iXMLChunkSize > 0) + { + fileSizeInBytes += (8 + iXMLChunkSize); + } + + addInt32ToFileData (fileData, fileSizeInBytes); + + addStringToFileData (fileData, "WAVE"); + + // ----------------------------------------------------------- + // FORMAT CHUNK + addStringToFileData (fileData, "fmt "); + addInt32ToFileData (fileData, formatChunkSize); // format chunk size (16 for PCM) + addInt16ToFileData (fileData, audioFormat); // audio format + addInt16ToFileData (fileData, (int16_t)getNumChannels()); // num channels + addInt32ToFileData (fileData, (int32_t)sampleRate); // sample rate + + int32_t numBytesPerSecond = (int32_t) ((getNumChannels() * sampleRate * bitDepth) / 8); + addInt32ToFileData (fileData, numBytesPerSecond); + + int16_t numBytesPerBlock = getNumChannels() * (bitDepth / 8); + addInt16ToFileData (fileData, numBytesPerBlock); + + addInt16ToFileData (fileData, (int16_t)bitDepth); + + if (audioFormat == WavAudioFormat::IEEEFloat) + addInt16ToFileData (fileData, 0); // extension size + + // ----------------------------------------------------------- + // DATA CHUNK + addStringToFileData (fileData, "data"); + addInt32ToFileData (fileData, dataChunkSize); + + for (int i = 0; i < getNumSamplesPerChannel(); i++) + { + for (int channel = 0; channel < getNumChannels(); channel++) + { + if (bitDepth == 8) + { + uint8_t byte = sampleToSingleByte (samples[channel][i]); + fileData.push_back (byte); + } + else if (bitDepth == 16) + { + int16_t sampleAsInt = sampleToSixteenBitInt (samples[channel][i]); + addInt16ToFileData (fileData, sampleAsInt); + } + else if (bitDepth == 24) + { + int32_t sampleAsIntAgain = (int32_t) (samples[channel][i] * (T)8388608.); + + uint8_t bytes[3]; + bytes[2] = (uint8_t) (sampleAsIntAgain >> 16) & 0xFF; + bytes[1] = (uint8_t) (sampleAsIntAgain >> 8) & 0xFF; + bytes[0] = (uint8_t) sampleAsIntAgain & 0xFF; + + fileData.push_back (bytes[0]); + fileData.push_back (bytes[1]); + fileData.push_back (bytes[2]); + } + else if (bitDepth == 32) + { + int32_t sampleAsInt; + + if (audioFormat == WavAudioFormat::IEEEFloat) + sampleAsInt = (int32_t) reinterpret_cast (samples[channel][i]); + else // assume PCM + sampleAsInt = (int32_t) (samples[channel][i] * std::numeric_limits::max()); + + addInt32ToFileData (fileData, sampleAsInt, Endianness::LittleEndian); + } + else + { + assert (false && "Trying to write a file with unsupported bit depth"); + return false; + } + } + } + + // ----------------------------------------------------------- + // iXML CHUNK + if (iXMLChunkSize > 0) + { + addStringToFileData (fileData, "iXML"); + addInt32ToFileData (fileData, iXMLChunkSize); + addStringToFileData (fileData, iXMLChunk); + } + + // check that the various sizes we put in the metadata are correct + if (fileSizeInBytes != static_cast (fileData.size() - 8) || dataChunkSize != (getNumSamplesPerChannel() * getNumChannels() * (bitDepth / 8))) + { + reportError ("ERROR: couldn't save file to " + filePath); + return false; + } + + // try to write the file + return writeDataToFile (fileData, filePath); +} + +//============================================================= +template +bool AudioFile::saveToAiffFile (std::string filePath) +{ + std::vector fileData; + + int32_t numBytesPerSample = bitDepth / 8; + int32_t numBytesPerFrame = numBytesPerSample * getNumChannels(); + int32_t totalNumAudioSampleBytes = getNumSamplesPerChannel() * numBytesPerFrame; + int32_t soundDataChunkSize = totalNumAudioSampleBytes + 8; + int32_t iXMLChunkSize = static_cast (iXMLChunk.size()); + + // ----------------------------------------------------------- + // HEADER CHUNK + addStringToFileData (fileData, "FORM"); + + // The file size in bytes is the header chunk size (4, not counting FORM and AIFF) + the COMM + // chunk size (26) + the metadata part of the SSND chunk plus the actual data chunk size + int32_t fileSizeInBytes = 4 + 26 + 16 + totalNumAudioSampleBytes; + if (iXMLChunkSize > 0) + { + fileSizeInBytes += (8 + iXMLChunkSize); + } + + addInt32ToFileData (fileData, fileSizeInBytes, Endianness::BigEndian); + + addStringToFileData (fileData, "AIFF"); + + // ----------------------------------------------------------- + // COMM CHUNK + addStringToFileData (fileData, "COMM"); + addInt32ToFileData (fileData, 18, Endianness::BigEndian); // commChunkSize + addInt16ToFileData (fileData, getNumChannels(), Endianness::BigEndian); // num channels + addInt32ToFileData (fileData, getNumSamplesPerChannel(), Endianness::BigEndian); // num samples per channel + addInt16ToFileData (fileData, bitDepth, Endianness::BigEndian); // bit depth + addSampleRateToAiffData (fileData, sampleRate); + + // ----------------------------------------------------------- + // SSND CHUNK + addStringToFileData (fileData, "SSND"); + addInt32ToFileData (fileData, soundDataChunkSize, Endianness::BigEndian); + addInt32ToFileData (fileData, 0, Endianness::BigEndian); // offset + addInt32ToFileData (fileData, 0, Endianness::BigEndian); // block size + + for (int i = 0; i < getNumSamplesPerChannel(); i++) + { + for (int channel = 0; channel < getNumChannels(); channel++) + { + if (bitDepth == 8) + { + uint8_t byte = sampleToSingleByte (samples[channel][i]); + fileData.push_back (byte); + } + else if (bitDepth == 16) + { + int16_t sampleAsInt = sampleToSixteenBitInt (samples[channel][i]); + addInt16ToFileData (fileData, sampleAsInt, Endianness::BigEndian); + } + else if (bitDepth == 24) + { + int32_t sampleAsIntAgain = (int32_t) (samples[channel][i] * (T)8388608.); + + uint8_t bytes[3]; + bytes[0] = (uint8_t) (sampleAsIntAgain >> 16) & 0xFF; + bytes[1] = (uint8_t) (sampleAsIntAgain >> 8) & 0xFF; + bytes[2] = (uint8_t) sampleAsIntAgain & 0xFF; + + fileData.push_back (bytes[0]); + fileData.push_back (bytes[1]); + fileData.push_back (bytes[2]); + } + else if (bitDepth == 32) + { + // write samples as signed integers (no implementation yet for floating point, but looking at WAV implementation should help) + int32_t sampleAsInt = (int32_t) (samples[channel][i] * std::numeric_limits::max()); + addInt32ToFileData (fileData, sampleAsInt, Endianness::BigEndian); + } + else + { + assert (false && "Trying to write a file with unsupported bit depth"); + return false; + } + } + } + + // ----------------------------------------------------------- + // iXML CHUNK + if (iXMLChunkSize > 0) + { + addStringToFileData (fileData, "iXML"); + addInt32ToFileData (fileData, iXMLChunkSize); + addStringToFileData (fileData, iXMLChunk); + } + + // check that the various sizes we put in the metadata are correct + if (fileSizeInBytes != static_cast (fileData.size() - 8) || soundDataChunkSize != getNumSamplesPerChannel() * numBytesPerFrame + 8) + { + reportError ("ERROR: couldn't save file to " + filePath); + return false; + } + + // try to write the file + return writeDataToFile (fileData, filePath); +} + +//============================================================= +template +bool AudioFile::writeDataToFile (std::vector& fileData, std::string filePath) +{ + std::ofstream outputFile (filePath, std::ios::binary); + + if (outputFile.is_open()) + { + for (size_t i = 0; i < fileData.size(); i++) + { + char value = (char) fileData[i]; + outputFile.write (&value, sizeof (char)); + } + + outputFile.close(); + + return true; + } + + return false; +} + +//============================================================= +template +void AudioFile::addStringToFileData (std::vector& fileData, std::string s) +{ + for (size_t i = 0; i < s.length();i++) + fileData.push_back ((uint8_t) s[i]); +} + +//============================================================= +template +void AudioFile::addInt32ToFileData (std::vector& fileData, int32_t i, Endianness endianness) +{ + uint8_t bytes[4]; + + if (endianness == Endianness::LittleEndian) + { + bytes[3] = (i >> 24) & 0xFF; + bytes[2] = (i >> 16) & 0xFF; + bytes[1] = (i >> 8) & 0xFF; + bytes[0] = i & 0xFF; + } + else + { + bytes[0] = (i >> 24) & 0xFF; + bytes[1] = (i >> 16) & 0xFF; + bytes[2] = (i >> 8) & 0xFF; + bytes[3] = i & 0xFF; + } + + for (int i = 0; i < 4; i++) + fileData.push_back (bytes[i]); +} + +//============================================================= +template +void AudioFile::addInt16ToFileData (std::vector& fileData, int16_t i, Endianness endianness) +{ + uint8_t bytes[2]; + + if (endianness == Endianness::LittleEndian) + { + bytes[1] = (i >> 8) & 0xFF; + bytes[0] = i & 0xFF; + } + else + { + bytes[0] = (i >> 8) & 0xFF; + bytes[1] = i & 0xFF; + } + + fileData.push_back (bytes[0]); + fileData.push_back (bytes[1]); +} + +//============================================================= +template +void AudioFile::clearAudioBuffer() +{ + for (size_t i = 0; i < samples.size();i++) + { + samples[i].clear(); + } + + samples.clear(); +} + +//============================================================= +template +AudioFileFormat AudioFile::determineAudioFileFormat (std::vector& fileData) +{ + std::string header (fileData.begin(), fileData.begin() + 4); + + if (header == "RIFF") + return AudioFileFormat::Wave; + else if (header == "FORM") + return AudioFileFormat::Aiff; + else + return AudioFileFormat::Error; +} + +//============================================================= +template +int32_t AudioFile::fourBytesToInt (std::vector& source, int startIndex, Endianness endianness) +{ + int32_t result; + + if (endianness == Endianness::LittleEndian) + result = (source[startIndex + 3] << 24) | (source[startIndex + 2] << 16) | (source[startIndex + 1] << 8) | source[startIndex]; + else + result = (source[startIndex] << 24) | (source[startIndex + 1] << 16) | (source[startIndex + 2] << 8) | source[startIndex + 3]; + + return result; +} + +//============================================================= +template +int16_t AudioFile::twoBytesToInt (std::vector& source, int startIndex, Endianness endianness) +{ + int16_t result; + + if (endianness == Endianness::LittleEndian) + result = (source[startIndex + 1] << 8) | source[startIndex]; + else + result = (source[startIndex] << 8) | source[startIndex + 1]; + + return result; +} + +//============================================================= +template +int AudioFile::getIndexOfString (std::vector& source, std::string stringToSearchFor) +{ + int index = -1; + int stringLength = (int)stringToSearchFor.length(); + + for (size_t i = 0; i < source.size() - stringLength;i++) + { + std::string section (source.begin() + i, source.begin() + i + stringLength); + + if (section == stringToSearchFor) + { + index = static_cast (i); + break; + } + } + + return index; +} + +//============================================================= +template +int AudioFile::getIndexOfChunk (std::vector& source, const std::string& chunkHeaderID, int startIndex, Endianness endianness) +{ + constexpr int dataLen = 4; + if (chunkHeaderID.size() != dataLen) + { + assert (false && "Invalid chunk header ID string"); + return -1; + } + + int i = startIndex; + while (i < source.size() - dataLen) + { + if (memcmp (&source[i], chunkHeaderID.data(), dataLen) == 0) + { + return i; + } + + i += dataLen; + auto chunkSize = fourBytesToInt (source, i, endianness); + i += (dataLen + chunkSize); + } + + return -1; +} + +//============================================================= +template +T AudioFile::sixteenBitIntToSample (int16_t sample) +{ + return static_cast (sample) / static_cast (32768.); +} + +//============================================================= +template +int16_t AudioFile::sampleToSixteenBitInt (T sample) +{ + sample = clamp (sample, -1., 1.); + return static_cast (sample * 32767.); +} + +//============================================================= +template +uint8_t AudioFile::sampleToSingleByte (T sample) +{ + sample = clamp (sample, -1., 1.); + sample = (sample + 1.) / 2.; + return static_cast (sample * 255.); +} + +//============================================================= +template +T AudioFile::singleByteToSample (uint8_t sample) +{ + return static_cast (sample - 128) / static_cast (128.); +} + +//============================================================= +template +T AudioFile::clamp (T value, T minValue, T maxValue) +{ + value = std::min (value, maxValue); + value = std::max (value, minValue); + return value; +} + +//============================================================= +template +void AudioFile::reportError (std::string errorMessage) +{ + if (logErrorsToConsole) + std::cout << errorMessage << std::endl; +} + +#if defined (_MSC_VER) + __pragma(warning (pop)) +#elif defined (__GNUC__) + _Pragma("GCC diagnostic pop") +#endif + +#endif /* AudioFile_h */ diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/CppFlow/include/Model.h b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/CppFlow/include/Model.h new file mode 100644 index 0000000000000000000000000000000000000000..42d6e3590847d0b5be1e5fe6b666e075bd3b6818 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/CppFlow/include/Model.h @@ -0,0 +1,70 @@ +// +// Created by sergio on 12/05/19. +// + +#ifndef CPPFLOW_MODEL_H +#define CPPFLOW_MODEL_H + +#include +#include +#include +#include +#include +#include +#include +#pragma warning(push, 0) +#include +#include "Tensor.h" +#pragma warning(pop) +class Tensor; + +class Model { +public: + // Pass a path to the model file and optional Tensorflow config options. See examples/load_model/main.cpp. + explicit Model(const std::string& model_filename, const std::vector& config_options = {}); + + // Rule of five, moving is easy as the pointers can be copied, copying not as i have no idea how to copy + // the contents of the pointer (i guess dereferencing won't do a deep copy) + Model(const Model &model) = delete; + Model(Model &&model) = default; + Model& operator=(const Model &model) = delete; + Model& operator=(Model &&model) = default; + + ~Model(); + + void init(); + void restore(const std::string& ckpt); + void save(const std::string& ckpt); + void restore_savedmodel(const std::string& savedmdl); + std::vector get_operations() const; + + // Original Run + void run(const std::vector& inputs, const std::vector& outputs); + + // Run with references + void run(Tensor& input, const std::vector& outputs); + void run(const std::vector& inputs, Tensor& output); + void run(Tensor& input, Tensor& output); + + // Run with pointers + void run(Tensor* input, const std::vector& outputs); + void run(const std::vector& inputs, Tensor* output); + void run(Tensor* input, Tensor* output); + +private: + TF_Graph* graph; + TF_Session* session; + TF_Status* status; + + // Read a file from a string + static TF_Buffer* read(const std::string&); + + bool status_check(bool throw_exc) const; + void error_check(bool condition, const std::string &error) const; + +public: + friend class Tensor; +}; + + +#endif //CPPFLOW_MODEL_H diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/CppFlow/include/Tensor.h b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/CppFlow/include/Tensor.h new file mode 100644 index 0000000000000000000000000000000000000000..7fb9e523060ac238aa6cdd2bd3fa86b8c5cf97bd --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/CppFlow/include/Tensor.h @@ -0,0 +1,75 @@ +// +// Created by sergio on 13/05/19. +// + +#ifndef CPPFLOW_TENSOR_H +#define CPPFLOW_TENSOR_H + +#include +#include +#include +#include +#include +#include + +// Prevent warnings from Tensorflow C API headers + +#pragma warning(push, 0) +#include +#include "Model.h" +#pragma warning(pop) + +class Model; + +class Tensor { +public: + Tensor(const Model& model, const std::string& operation); + + // Rule of five, moving is easy as the pointers can be copied, copying not as i have no idea how to copy + // the contents of the pointer (i guess dereferencing won't do a deep copy) + Tensor(const Tensor &tensor) = delete; + Tensor(Tensor &&tensor) = default; + Tensor& operator=(const Tensor &tensor) = delete; + Tensor& operator=(Tensor &&tensor) = default; + + ~Tensor(); + + void clean(); + + template + void set_data(std::vector new_data); + + template + void set_data(std::vector new_data, const std::vector& new_shape); + + void set_data(const std::string & new_data, Model & inmodel); + template + std::vector get_data(); + + std::vector get_shape(); + +private: + TF_Tensor* val; + TF_Output op; + TF_DataType type; + std::vector shape; + std::unique_ptr> actual_shape; + void* data; + int flag; + + // Aux functions + void error_check(bool condition, const std::string& error); + + + + + template + static TF_DataType deduce_type(); + + void deduce_shape(); + +public: + friend class Model; +}; + +#endif //CPPFLOW_TENSOR_H diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/CppFlow/src/Model.cpp b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/CppFlow/src/Model.cpp new file mode 100644 index 0000000000000000000000000000000000000000..9be0732ca24a1841fcd27397756be6301d3a2f5a --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/CppFlow/src/Model.cpp @@ -0,0 +1,278 @@ +// +// Created by sergio on 12/05/19. +// + +#include "../include/Model.h" + +Model::Model(const std::string& model_filename, const std::vector& config_options) { + this->status = TF_NewStatus(); + this->graph = TF_NewGraph(); + + // Create the session. + TF_SessionOptions* sess_opts = TF_NewSessionOptions(); + + if (!config_options.empty()) + { + TF_SetConfig(sess_opts, static_cast(config_options.data()), config_options.size(), this->status); + this->status_check(true); + } + + TF_Buffer* RunOpts = NULL; + + const char* tags = "serve"; + int ntags = 1; + + this->session = TF_LoadSessionFromSavedModel(sess_opts, RunOpts, model_filename.c_str(), &tags, ntags, this->graph, NULL, this->status); + if (TF_GetCode(this->status) == TF_OK) + { + printf("TF_LoadSessionFromSavedModel OK\n"); + } + else + { + printf("%s", TF_Message(this->status)); + } + TF_DeleteSessionOptions(sess_opts); + + // Check the status + this->status_check(true); + + // Create the graph + TF_Graph* g = this->graph; + + + this->status_check(true); +} + +Model::~Model() { + TF_DeleteSession(this->session, this->status); + TF_DeleteGraph(this->graph); + this->status_check(true); + TF_DeleteStatus(this->status); +} + + +void Model::init() { + TF_Operation* init_op[1] = {TF_GraphOperationByName(this->graph, "init")}; + + this->error_check(init_op[0]!= nullptr, "Error: No operation named \"init\" exists"); + + TF_SessionRun(this->session, nullptr, nullptr, nullptr, 0, nullptr, nullptr, 0, init_op, 1, nullptr, this->status); + this->status_check(true); +} + +void Model::save(const std::string &ckpt) { + // Encode file_name to tensor + size_t size = 8 + TF_StringEncodedSize(ckpt.length()); + TF_Tensor* t = TF_AllocateTensor(TF_STRING, nullptr, 0, size); + char* data = static_cast(TF_TensorData(t)); + for (int i=0; i<8; i++) {data[i]=0;} + TF_StringEncode(ckpt.c_str(), ckpt.size(), data + 8, size - 8, status); + + memset(data, 0, 8); // 8-byte offset of first string. + TF_StringEncode(ckpt.c_str(), ckpt.length(), (char*)(data + 8), size - 8, status); + + // Check errors + if (!this->status_check(false)) { + TF_DeleteTensor(t); + std::cerr << "Error during filename " << ckpt << " encoding" << std::endl; + this->status_check(true); + } + + TF_Output output_file; + output_file.oper = TF_GraphOperationByName(this->graph, "save/Const"); + output_file.index = 0; + TF_Output inputs[1] = {output_file}; + + TF_Tensor* input_values[1] = {t}; + const TF_Operation* restore_op[1] = {TF_GraphOperationByName(this->graph, "save/control_dependency")}; + if (!restore_op[0]) { + TF_DeleteTensor(t); + this->error_check(false, "Error: No operation named \"save/control_dependencyl\" exists"); + } + + + TF_SessionRun(this->session, nullptr, inputs, input_values, 1, nullptr, nullptr, 0, restore_op, 1, nullptr, this->status); + TF_DeleteTensor(t); + + this->status_check(true); +} + +void Model::restore_savedmodel(const std::string & savedmdl) +{ + + + +} + +void Model::restore(const std::string& ckpt) { + + // Encode file_name to tensor + size_t size = 8 + TF_StringEncodedSize(ckpt.size()); + TF_Tensor* t = TF_AllocateTensor(TF_STRING, nullptr, 0, size); + char* data = static_cast(TF_TensorData(t)); + for (int i=0; i<8; i++) {data[i]=0;} + TF_StringEncode(ckpt.c_str(), ckpt.size(), data + 8, size - 8, status); + + // Check errors + if (!this->status_check(false)) { + TF_DeleteTensor(t); + std::cerr << "Error during filename " << ckpt << " encoding" << std::endl; + this->status_check(true); + } + + TF_Output output_file; + output_file.oper = TF_GraphOperationByName(this->graph, "save/Const"); + output_file.index = 0; + TF_Output inputs[1] = {output_file}; + + TF_Tensor* input_values[1] = {t}; + const TF_Operation* restore_op[1] = {TF_GraphOperationByName(this->graph, "save/restore_all")}; + if (!restore_op[0]) { + TF_DeleteTensor(t); + this->error_check(false, "Error: No operation named \"save/restore_all\" exists"); + } + + + + TF_SessionRun(this->session, nullptr, inputs, input_values, 1, nullptr, nullptr, 0, restore_op, 1, nullptr, this->status); + TF_DeleteTensor(t); + + this->status_check(true); +} + +TF_Buffer *Model::read(const std::string& filename) { + std::ifstream file (filename, std::ios::binary | std::ios::ate); + + // Error opening the file + if (!file.is_open()) { + std::cerr << "Unable to open file: " << filename << std::endl; + return nullptr; + } + + + // Cursor is at the end to get size + auto size = file.tellg(); + // Move cursor to the beginning + file.seekg (0, std::ios::beg); + + // Read + auto data = new char [size]; + file.seekg (0, std::ios::beg); + file.read (data, size); + + // Error reading the file + if (!file) { + std::cerr << "Unable to read the full file: " << filename << std::endl; + return nullptr; + } + + + // Create tensorflow buffer from read data + TF_Buffer* buffer = TF_NewBufferFromString(data, size); + + // Close file and remove data + file.close(); + delete[] data; + + return buffer; +} + +std::vector Model::get_operations() const { + std::vector result; + size_t pos = 0; + TF_Operation* oper; + + // Iterate through the operations of a graph + while ((oper = TF_GraphNextOperation(this->graph, &pos)) != nullptr) { + result.emplace_back(TF_OperationName(oper)); + } + + return result; +} + +void Model::run(const std::vector& inputs, const std::vector& outputs) { + + this->error_check(std::all_of(inputs.begin(), inputs.end(), [](const Tensor* i){return i->flag == 1;}), + "Error: Not all elements from the inputs are full"); + + this->error_check(std::all_of(outputs.begin(), outputs.end(), [](const Tensor* o){return o->flag != -1;}), + "Error: Not all outputs Tensors are valid"); + + + // Clean previous stored outputs + std::for_each(outputs.begin(), outputs.end(), [](Tensor* o){o->clean();}); + + // Get input operations + std::vector io(inputs.size()); + std::transform(inputs.begin(), inputs.end(), io.begin(), [](const Tensor* i) {return i->op;}); + + // Get input values + std::vector iv(inputs.size()); + std::transform(inputs.begin(), inputs.end(), iv.begin(), [](const Tensor* i) {return i->val;}); + + // Get output operations + std::vector oo(outputs.size()); + std::transform(outputs.begin(), outputs.end(), oo.begin(), [](const Tensor* o) {return o->op;}); + + // Prepare output recipients + auto ov = new TF_Tensor*[outputs.size()]; + + TF_SessionRun(this->session, nullptr, io.data(), iv.data(), inputs.size(), oo.data(), ov, outputs.size(), nullptr, 0, nullptr, this->status); + this->status_check(true); + + // Save results on outputs and mark as full + for (std::size_t i=0; ival = ov[i]; + outputs[i]->flag = 1; + outputs[i]->deduce_shape(); + } + + // Mark input as empty + std::for_each(inputs.begin(), inputs.end(), [] (Tensor* i) {i->clean();}); + + delete[] ov; +} + +void Model::run(Tensor &input, Tensor &output) { + this->run(&input, &output); +} + +void Model::run(const std::vector &inputs, Tensor &output) { + this->run(inputs, &output); +} + +void Model::run(Tensor &input, const std::vector &outputs) { + this->run(&input, outputs); +} + +void Model::run(Tensor *input, Tensor *output) { + this->run(std::vector({input}), std::vector({output})); +} + +void Model::run(const std::vector &inputs, Tensor *output) { + this->run(inputs, std::vector({output})); +} + +void Model::run(Tensor *input, const std::vector &outputs) { + this->run(std::vector({input}), outputs); +} + +bool Model::status_check(bool throw_exc) const { + + if (TF_GetCode(this->status) != TF_OK) { + if (throw_exc) { + const char* errmsg = TF_Message(status); + printf(errmsg); + throw std::runtime_error(errmsg); + } else { + return false; + } + } + return true; +} + +void Model::error_check(bool condition, const std::string &error) const { + if (!condition) { + throw std::runtime_error(error); + } +} diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/CppFlow/src/Tensor.cpp b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/CppFlow/src/Tensor.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ffb6f5a46efa762c995a53878ea4218e9a69dec3 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/CppFlow/src/Tensor.cpp @@ -0,0 +1,312 @@ +// +// Created by sergio on 13/05/19. +// + +#include "../include/Tensor.h" + +#include +// Disable "loss of data" warnings +#pragma warning( disable : 4267) +#include + +Tensor::Tensor(const Model& model, const std::string& operation) { + + // Get operation by the name + this->op.oper = TF_GraphOperationByName(model.graph, operation.c_str()); + this->op.index = 0; + + // Operation did not exists + error_check(this->op.oper != nullptr, "No operation named \"" + operation + "\" exists" ); + + // DIMENSIONS + + // Get number of dimensions + int n_dims = TF_GraphGetTensorNumDims(model.graph, this->op, model.status); + + // DataType + this->type = TF_OperationOutputType(this->op); + + // If is not a scalar + if (n_dims > 0) { + // Get dimensions + auto *dims = new int64_t[n_dims]; + TF_GraphGetTensorShape(model.graph, this->op, dims, n_dims, model.status); + + // Check error on Model Status + model.status_check(true); + + this->shape = std::vector(dims, dims + n_dims); + + // Only one dimension can be unknown using this constructor + // error_check(std::count(this->shape.begin(), this->shape.end(), -1) <= 1, "At most one dimension can be unknown"); + + delete[] dims; + } + + this->flag = 0; + this->val = nullptr; + this->data = nullptr; +} + +Tensor::~Tensor() { + this->clean(); +} + + + +void Tensor::clean() { + if (this->flag == 1) { + TF_DeleteTensor(this->val); + this->flag = 0; + } + this->data = nullptr; +} + +void Tensor::error_check(bool condition, const std::string &error) { + if (!condition) { + this->flag = -1; + throw std::runtime_error(error); + } +} + +template +void Tensor::set_data(std::vector new_data) { + + //Non empty tensor + if (this->flag == 1) { + TF_DeleteTensor(this->val); + this->flag = 0; + } + + // Check Tensor is valid + this->error_check(this->flag != -1, "Tensor is not valid"); + + // Check type + this->error_check(deduce_type() == this->type, "Provided type is different from Tensor expected type"); + + // Dimensions must be known + this->error_check(!this->shape.empty(), "Shape of the input Tensor is not known, please provide a shape"); + + // At most one dimension can be unknown + this->error_check(std::count(this->shape.begin(), this->shape.end(), -1) >= -1, "At most one dimension can be unknown, please provide a shape"); + + // Check number of elements + auto exp_size = std::abs(std::accumulate(this->shape.begin(), this->shape.end(), 1, std::multiplies())); + + this->error_check(new_data.size() % exp_size == 0, "Expected and provided number of elements do not match"); + + // Deallocator + auto d = [](void* ddata, size_t, void*) {free(static_cast(ddata));}; + + + // Calculate actual shape of unknown dimensions + this->actual_shape = std::make_unique(shape.begin(), shape.end()); + std::replace_if (actual_shape->begin(), actual_shape->end(), [](int64_t r) {return r==-1;}, new_data.size()/exp_size); + + // Saves data on class + this->data = malloc(sizeof(T) * new_data.size()); + memcpy(this->data, new_data.data(), sizeof(T) * new_data.size()); + + this->val = TF_NewTensor(this->type, actual_shape->data(), actual_shape->size(), this->data, sizeof(T) * new_data.size(), d, nullptr); + + + this->error_check(this->val != nullptr, "An error occurred allocating the Tensor memory"); + + this->flag = 1; +} + +void Tensor::set_data(const std::string& new_data,Model& inmodel) { + + //Non empty tensor + if (this->flag == 1) { + TF_DeleteTensor(this->val); + this->flag = 0; + } + + // Check Tensor is valid + + this->error_check(this->type == TF_STRING,"Tensor is not string"); + this->error_check(this->flag != -1, "Tensor is not valid"); + + std::string input_str = new_data; // any input string + size_t encoded_size = TF_StringEncodedSize(input_str.size()); + size_t total_size = 8 + encoded_size; // 8 extra bytes - for start_offset + char *input_encoded = (char*)malloc(total_size); + for (int i = 0; i < 8; ++i) { // fills start_offset + input_encoded[i] = 0; + } + TF_StringEncode(input_str.c_str(), input_str.size(), input_encoded + 8, encoded_size, inmodel.status); // fills the rest of tensor data + if (TF_GetCode(inmodel.status) != TF_OK) { + fprintf(stderr, "ERROR: something wrong with encoding: %s", TF_Message(inmodel.status)); + } + + + this->shape = { (int64_t)total_size }; + + + // Dimensions must be known + this->error_check(!this->shape.empty(), "Shape of the input Tensor is not known, please provide a shape"); + + // At most one dimension can be unknown + this->error_check(std::count(this->shape.begin(), this->shape.end(), -1) >= -1, "At most one dimension can be unknown, please provide a shape"); + + // Check number of elements + + + // Deallocator + auto d = [](void* ddata, size_t, void*) {free(static_cast(ddata));}; + + + // Calculate actual shape of unknown dimensions + this->actual_shape = std::make_unique(shape.begin(), shape.end()); + + // Saves data on class + this->data = malloc(sizeof(char) * total_size); + memcpy(this->data, input_encoded, sizeof(char) * total_size); + + this->val = TF_NewTensor(this->type,NULL, 0, this->data, sizeof(char) * total_size, d, nullptr); + + + this->error_check(this->val != nullptr, "An error occurred allocating the Tensor memory"); + + this->flag = 1; + free(input_encoded); +} + + + +template void Tensor::set_data(std::vector new_data, const std::vector& new_shape) { + + this->error_check(this->shape.empty() || this->shape.size() == new_shape.size(), "Provided shape has different number of dimensions"); + auto old_shape = this->shape; + + this->shape = new_shape; + this->set_data(new_data); + + this->shape = old_shape; +} + +template +std::vector Tensor::get_data() { + + // Check Tensor is valid + this->error_check(this->flag != -1, "Tensor is not valid"); + + // Check type + this->error_check(deduce_type() == this->type, "Expected return type is different from Tensor type"); + + // Tensor is not empty + this->error_check(this->flag != 0, "Tensor is empty"); + + + // Check tensor data is not empty + auto raw_data = TF_TensorData(this->val); + this->error_check(raw_data != nullptr, "Tensor data is empty"); + + size_t size = TF_TensorByteSize(this->val) / TF_DataTypeSize(TF_TensorType(this->val)); + + // Convert to correct type + const auto T_data = static_cast(raw_data); + return std::vector(T_data, T_data + size); +} + +std::vector Tensor::get_shape() { + return shape; +} + +template +TF_DataType Tensor::deduce_type() { + if (std::is_same::value) + return TF_FLOAT; + if (std::is_same::value) + return TF_DOUBLE; + if (std::is_same::value) + return TF_INT32; + if (std::is_same::value) + return TF_UINT8; + if (std::is_same::value) + return TF_INT16; + if (std::is_same::value) + return TF_INT8; + if (std::is_same::value) + return TF_INT64; +// if constexpr (std::is_same::value) +// return TF_BOOL; + if (std::is_same::value) + return TF_UINT16; + if (std::is_same::value) + return TF_UINT32; + if (std::is_same::value) + return TF_UINT64; + if (std::is_same::value) + return TF_STRING; + + throw std::runtime_error{"Could not deduce type!"}; +} + +void Tensor::deduce_shape() { + // Get number of dimensions + int n_dims = TF_NumDims(this->val); + + // If is not a scalar + if (n_dims > 0) { + // Get dimensions + this->shape = std::vector(n_dims, -1); + for (int i=0; ishape[i] = TF_Dim(this->val, i); + } + } +} + + +// VALID deduce_type TEMPLATES +template TF_DataType Tensor::deduce_type(); +template TF_DataType Tensor::deduce_type(); +//template TF_DataType Tensor::deduce_type(); +template TF_DataType Tensor::deduce_type(); +template TF_DataType Tensor::deduce_type(); +template TF_DataType Tensor::deduce_type(); +template TF_DataType Tensor::deduce_type(); +template TF_DataType Tensor::deduce_type(); +template TF_DataType Tensor::deduce_type(); +template TF_DataType Tensor::deduce_type(); +template TF_DataType Tensor::deduce_type(); + +// VALID get_data TEMPLATES +template std::vector Tensor::get_data(); +template std::vector Tensor::get_data(); +template std::vector Tensor::get_data(); +template std::vector Tensor::get_data(); +template std::vector Tensor::get_data(); +template std::vector Tensor::get_data(); +template std::vector Tensor::get_data(); +template std::vector Tensor::get_data(); +template std::vector Tensor::get_data(); +template std::vector Tensor::get_data(); +template std::vector Tensor::get_data(); + +// VALID set_data TEMPLATES +template void Tensor::set_data(std::vector new_data); +template void Tensor::set_data(std::vector new_data); +//template void Tensor::set_data(std::vector new_data); +template void Tensor::set_data(std::vector new_data); +template void Tensor::set_data(std::vector new_data); +template void Tensor::set_data(std::vector new_data); +template void Tensor::set_data(std::vector new_data); +template void Tensor::set_data(std::vector new_data); +template void Tensor::set_data(std::vector new_data); +template void Tensor::set_data(std::vector new_data); +template void Tensor::set_data(std::vector new_data); + +// VALID set_data TEMPLATES +template void Tensor::set_data(std::vector new_data, const std::vector& new_shape); +template void Tensor::set_data(std::vector new_data, const std::vector& new_shape); +//template void Tensor::set_data(std::vector new_data, const std::vector& new_shape); +template void Tensor::set_data(std::vector new_data, const std::vector& new_shape); +template void Tensor::set_data(std::vector new_data, const std::vector& new_shape); +template void Tensor::set_data(std::vector new_data, const std::vector& new_shape); +template void Tensor::set_data(std::vector new_data, const std::vector& new_shape); +template void Tensor::set_data(std::vector new_data, const std::vector& new_shape); +template void Tensor::set_data(std::vector new_data, const std::vector& new_shape); +template void Tensor::set_data(std::vector new_data, const std::vector& new_shape); +template void Tensor::set_data(std::vector new_data, const std::vector& new_shape); diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/ZCharScanner.cpp b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/ZCharScanner.cpp new file mode 100644 index 0000000000000000000000000000000000000000..95596bce3d88d697a8e4994bace8546d96bd70aa --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/ZCharScanner.cpp @@ -0,0 +1,203 @@ +#include "ZCharScanner.h" +#include +using namespace std; + +int ZStringDelimiter::key_search(const GString& s, const GString& key) +{ + int count = 0; + size_t pos = 0; + while ((pos = s.find(key, pos)) != GString::npos) { + ++count; + ++pos; + } + return count; +} +void ZStringDelimiter::UpdateTokens() +{ + if (!m_vDelimiters.size() || m_sString == "") + return; + + m_vTokens.clear(); + + + vector::iterator dIt = m_vDelimiters.begin(); + while (dIt != m_vDelimiters.end()) + { + GString delimiter = *dIt; + + + DelimStr(m_sString, delimiter, true); + + + ++dIt; + } + + + +} + + +void ZStringDelimiter::DelimStr(const GString & s, const GString & delimiter, const bool & removeEmptyEntries) +{ + BarRange(0, s.length()); + for (size_t start = 0, end; start < s.length(); start = end + delimiter.length()) + { + size_t position = s.find(delimiter, start); + end = position != GString::npos ? position : s.length(); + + GString token = s.substr(start, end - start); + if (!removeEmptyEntries || !token.empty()) + { + if (token != s) + m_vTokens.push_back(token); + + } + Bar(position); + } + + // dadwwdawdaawdwadwd +} + +void ZStringDelimiter::BarRange(const int & min, const int & max) +{ +#ifdef _AFX_ALL_WARNINGS + if (PgBar) + m_pBar->SetRange32(min, max); + + +#endif +} + +void ZStringDelimiter::Bar(const int & pos) +{ +#ifdef _AFX_ALL_WARNINGS + if (PgBar) + m_pBar->SetPos(pos); + + +#endif +} + +ZStringDelimiter::ZStringDelimiter() +{ + m_sString = ""; + tokenIndex = 0; + PgBar = false; +} + + +bool ZStringDelimiter::GetFirstToken(GString & in_out) +{ + if (m_vTokens.size() >= 1) { + in_out = m_vTokens[0]; + return true; + } + else { + return false; + } +} + +bool ZStringDelimiter::GetNextToken(GString & in_sOut) +{ + if (tokenIndex > m_vTokens.size() - 1) + return false; + + in_sOut = m_vTokens[tokenIndex]; + ++tokenIndex; + + return true; +} + +GString ZStringDelimiter::operator[](const size_t & in_index) +{ + if (in_index > m_vTokens.size()) + throw std::out_of_range("ZStringDelimiter tried to access token higher than size"); + + return m_vTokens[in_index]; + +} +GString ZStringDelimiter::Reassemble(const GString& delim, const int& nelem) +{ + GString Result = ""; + TokenIterator RasIt = m_vTokens.begin(); + int r = 0; + if (nelem == -1) { + while (RasIt != m_vTokens.end()) + { + + if (r != 0) + Result.append(delim); + + Result.append(*RasIt); + + ++r; + + + ++RasIt; + } + } + else { + while (RasIt != m_vTokens.end() && r < nelem) + { + + if (r != 0) + Result.append(delim); + + Result.append(*RasIt); + + ++r; + ++RasIt; + } + } + + return Result; + +} + +GString ZStringDelimiter::Reassemble(const GString & delim, const std::vector& Strs,int nelem) +{ + GString Result = ""; + TokenIterator RasIt = Strs.begin(); + int r = 0; + if (nelem == -1) { + while (RasIt != Strs.end()) + { + + if (r != 0) + Result.append(delim); + + Result.append(*RasIt); + + ++r; + + + ++RasIt; + } + } + else { + while (RasIt != Strs.end() && r < nelem) + { + + if (r != 0) + Result.append(delim); + + Result.append(*RasIt); + + ++r; + ++RasIt; + } + } + + return Result; +} + +void ZStringDelimiter::AddDelimiter(const GString & in_Delim) +{ + m_vDelimiters.push_back(in_Delim); + UpdateTokens(); + +} + +ZStringDelimiter::~ZStringDelimiter() +{ +} diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/ZCharScanner.h b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/ZCharScanner.h new file mode 100644 index 0000000000000000000000000000000000000000..a7fef3c9456cdf6910c6f8e4ff784ef3669e2aae --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/ZCharScanner.h @@ -0,0 +1,79 @@ +#pragma once + +#define GBasicCharScanner ZStringDelimiter + +#include +#include + +#define ZSDEL_USE_STD_STRING +#ifndef ZSDEL_USE_STD_STRING +#include "golem_string.h" +#else +#define GString std::string +#endif + +typedef std::vector::const_iterator TokenIterator; + +// ZStringDelimiter +// ============== +// Simple class to delimit and split strings. +// You can use operator[] to access them +// Or you can use the itBegin() and itEnd() to get some iterators +// ================= +class ZStringDelimiter +{ +private: + int key_search(const GString & s, const GString & key); + void UpdateTokens(); + std::vector m_vTokens; + std::vector m_vDelimiters; + + GString m_sString; + + void DelimStr(const GString& s, const GString& delimiter, const bool& removeEmptyEntries = false); + void BarRange(const int& min, const int& max); + void Bar(const int& pos); + size_t tokenIndex; +public: + ZStringDelimiter(); + bool PgBar; + +#ifdef _AFX_ALL_WARNINGS + CProgressCtrl* m_pBar; +#endif + + ZStringDelimiter(const GString& in_iStr) { + m_sString = in_iStr; + PgBar = false; + + } + + bool GetFirstToken(GString& in_out); + bool GetNextToken(GString& in_sOut); + + // std::String alts + + size_t szTokens() { return m_vTokens.size(); } + GString operator[](const size_t& in_index); + + GString Reassemble(const GString & delim, const int & nelem = -1); + + // Override to reassemble provided tokens. + GString Reassemble(const GString & delim, const std::vector& Strs,int nelem = -1); + + // Get a const reference to the tokens + const std::vector& GetTokens() { return m_vTokens; } + + TokenIterator itBegin() { return m_vTokens.begin(); } + TokenIterator itEnd() { return m_vTokens.end(); } + + void SetText(const GString& in_Txt) { + m_sString = in_Txt; + if (m_vDelimiters.size()) + UpdateTokens(); + } + void AddDelimiter(const GString& in_Delim); + + ~ZStringDelimiter(); +}; + diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/cxxopts.hpp b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/cxxopts.hpp new file mode 100644 index 0000000000000000000000000000000000000000..6d230f062309b5d0d7ed7c996a08fc2f1d084415 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/cxxopts.hpp @@ -0,0 +1,2114 @@ +/* + +Copyright (c) 2014, 2015, 2016, 2017 Jarryd Beck + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +*/ + +#ifndef CXXOPTS_HPP_INCLUDED +#define CXXOPTS_HPP_INCLUDED + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#ifdef __cpp_lib_optional +#include +#define CXXOPTS_HAS_OPTIONAL +#endif + +#ifndef CXXOPTS_VECTOR_DELIMITER +#define CXXOPTS_VECTOR_DELIMITER ',' +#endif + +#define CXXOPTS__VERSION_MAJOR 2 +#define CXXOPTS__VERSION_MINOR 2 +#define CXXOPTS__VERSION_PATCH 0 + +namespace cxxopts +{ + static constexpr struct { + uint8_t major, minor, patch; + } version = { + CXXOPTS__VERSION_MAJOR, + CXXOPTS__VERSION_MINOR, + CXXOPTS__VERSION_PATCH + }; +} + +//when we ask cxxopts to use Unicode, help strings are processed using ICU, +//which results in the correct lengths being computed for strings when they +//are formatted for the help output +//it is necessary to make sure that can be found by the +//compiler, and that icu-uc is linked in to the binary. + +#ifdef CXXOPTS_USE_UNICODE +#include + +namespace cxxopts +{ + typedef icu::UnicodeString String; + + inline + String + toLocalString(std::string s) + { + return icu::UnicodeString::fromUTF8(std::move(s)); + } + + class UnicodeStringIterator : public + std::iterator + { + public: + + UnicodeStringIterator(const icu::UnicodeString* string, int32_t pos) + : s(string) + , i(pos) + { + } + + value_type + operator*() const + { + return s->char32At(i); + } + + bool + operator==(const UnicodeStringIterator& rhs) const + { + return s == rhs.s && i == rhs.i; + } + + bool + operator!=(const UnicodeStringIterator& rhs) const + { + return !(*this == rhs); + } + + UnicodeStringIterator& + operator++() + { + ++i; + return *this; + } + + UnicodeStringIterator + operator+(int32_t v) + { + return UnicodeStringIterator(s, i + v); + } + + private: + const icu::UnicodeString* s; + int32_t i; + }; + + inline + String& + stringAppend(String&s, String a) + { + return s.append(std::move(a)); + } + + inline + String& + stringAppend(String& s, int n, UChar32 c) + { + for (int i = 0; i != n; ++i) + { + s.append(c); + } + + return s; + } + + template + String& + stringAppend(String& s, Iterator begin, Iterator end) + { + while (begin != end) + { + s.append(*begin); + ++begin; + } + + return s; + } + + inline + size_t + stringLength(const String& s) + { + return s.length(); + } + + inline + std::string + toUTF8String(const String& s) + { + std::string result; + s.toUTF8String(result); + + return result; + } + + inline + bool + empty(const String& s) + { + return s.isEmpty(); + } +} + +namespace std +{ + inline + cxxopts::UnicodeStringIterator + begin(const icu::UnicodeString& s) + { + return cxxopts::UnicodeStringIterator(&s, 0); + } + + inline + cxxopts::UnicodeStringIterator + end(const icu::UnicodeString& s) + { + return cxxopts::UnicodeStringIterator(&s, s.length()); + } +} + +//ifdef CXXOPTS_USE_UNICODE +#else + +namespace cxxopts +{ + typedef std::string String; + + template + T + toLocalString(T&& t) + { + return std::forward(t); + } + + inline + size_t + stringLength(const String& s) + { + return s.length(); + } + + inline + String& + stringAppend(String&s, String a) + { + return s.append(std::move(a)); + } + + inline + String& + stringAppend(String& s, size_t n, char c) + { + return s.append(n, c); + } + + template + String& + stringAppend(String& s, Iterator begin, Iterator end) + { + return s.append(begin, end); + } + + template + std::string + toUTF8String(T&& t) + { + return std::forward(t); + } + + inline + bool + empty(const std::string& s) + { + return s.empty(); + } +} + +//ifdef CXXOPTS_USE_UNICODE +#endif + +namespace cxxopts +{ + namespace + { +#ifdef _WIN32 + const std::string LQUOTE("\'"); + const std::string RQUOTE("\'"); +#else + const std::string LQUOTE("‘"); + const std::string RQUOTE("’"); +#endif + } + + class Value : public std::enable_shared_from_this + { + public: + + virtual ~Value() = default; + + virtual + std::shared_ptr + clone() const = 0; + + virtual void + parse(const std::string& text) const = 0; + + virtual void + parse() const = 0; + + virtual bool + has_default() const = 0; + + virtual bool + is_container() const = 0; + + virtual bool + has_implicit() const = 0; + + virtual std::string + get_default_value() const = 0; + + virtual std::string + get_implicit_value() const = 0; + + virtual std::shared_ptr + default_value(const std::string& value) = 0; + + virtual std::shared_ptr + implicit_value(const std::string& value) = 0; + + virtual std::shared_ptr + no_implicit_value() = 0; + + virtual bool + is_boolean() const = 0; + }; + + class OptionException : public std::exception + { + public: + OptionException(const std::string& message) + : m_message(message) + { + } + + virtual const char* + what() const noexcept + { + return m_message.c_str(); + } + + private: + std::string m_message; + }; + + class OptionSpecException : public OptionException + { + public: + + OptionSpecException(const std::string& message) + : OptionException(message) + { + } + }; + + class OptionParseException : public OptionException + { + public: + OptionParseException(const std::string& message) + : OptionException(message) + { + } + }; + + class option_exists_error : public OptionSpecException + { + public: + option_exists_error(const std::string& option) + : OptionSpecException("Option " + LQUOTE + option + RQUOTE + " already exists") + { + } + }; + + class invalid_option_format_error : public OptionSpecException + { + public: + invalid_option_format_error(const std::string& format) + : OptionSpecException("Invalid option format " + LQUOTE + format + RQUOTE) + { + } + }; + + class option_syntax_exception : public OptionParseException { + public: + option_syntax_exception(const std::string& text) + : OptionParseException("Argument " + LQUOTE + text + RQUOTE + + " starts with a - but has incorrect syntax") + { + } + }; + + class option_not_exists_exception : public OptionParseException + { + public: + option_not_exists_exception(const std::string& option) + : OptionParseException("Option " + LQUOTE + option + RQUOTE + " does not exist") + { + } + }; + + class missing_argument_exception : public OptionParseException + { + public: + missing_argument_exception(const std::string& option) + : OptionParseException( + "Option " + LQUOTE + option + RQUOTE + " is missing an argument" + ) + { + } + }; + + class option_requires_argument_exception : public OptionParseException + { + public: + option_requires_argument_exception(const std::string& option) + : OptionParseException( + "Option " + LQUOTE + option + RQUOTE + " requires an argument" + ) + { + } + }; + + class option_not_has_argument_exception : public OptionParseException + { + public: + option_not_has_argument_exception + ( + const std::string& option, + const std::string& arg + ) + : OptionParseException( + "Option " + LQUOTE + option + RQUOTE + + " does not take an argument, but argument " + + LQUOTE + arg + RQUOTE + " given" + ) + { + } + }; + + class option_not_present_exception : public OptionParseException + { + public: + option_not_present_exception(const std::string& option) + : OptionParseException("Option " + LQUOTE + option + RQUOTE + " not present") + { + } + }; + + class argument_incorrect_type : public OptionParseException + { + public: + argument_incorrect_type + ( + const std::string& arg + ) + : OptionParseException( + "Argument " + LQUOTE + arg + RQUOTE + " failed to parse" + ) + { + } + }; + + class option_required_exception : public OptionParseException + { + public: + option_required_exception(const std::string& option) + : OptionParseException( + "Option " + LQUOTE + option + RQUOTE + " is required but not present" + ) + { + } + }; + + namespace values + { + namespace + { + std::basic_regex integer_pattern + ("(-)?(0x)?([0-9a-zA-Z]+)|((0x)?0)"); + std::basic_regex truthy_pattern + ("(t|T)(rue)?|1"); + std::basic_regex falsy_pattern + ("(f|F)(alse)?|0"); + } + + namespace detail + { + template + struct SignedCheck; + + template + struct SignedCheck + { + template + void + operator()(bool negative, U u, const std::string& text) + { + if (negative) + { + if (u > static_cast((std::numeric_limits::min)())) + { + throw argument_incorrect_type(text); + } + } + else + { + if (u > static_cast((std::numeric_limits::max)())) + { + throw argument_incorrect_type(text); + } + } + } + }; + + template + struct SignedCheck + { + template + void + operator()(bool, U, const std::string&) {} + }; + + template + void + check_signed_range(bool negative, U value, const std::string& text) + { + SignedCheck::is_signed>()(negative, value, text); + } + } + + template + R + checked_negate(T&& t, const std::string&, std::true_type) + { + // if we got to here, then `t` is a positive number that fits into + // `R`. So to avoid MSVC C4146, we first cast it to `R`. + // See https://github.com/jarro2783/cxxopts/issues/62 for more details. + return -static_cast(t-1)-1; + } + + template + T + checked_negate(T&&, const std::string& text, std::false_type) + { + throw argument_incorrect_type(text); + } + + template + void + integer_parser(const std::string& text, T& value) + { + std::smatch match; + std::regex_match(text, match, integer_pattern); + + if (match.length() == 0) + { + throw argument_incorrect_type(text); + } + + if (match.length(4) > 0) + { + value = 0; + return; + } + + using US = typename std::make_unsigned::type; + + constexpr bool is_signed = std::numeric_limits::is_signed; + const bool negative = match.length(1) > 0; + const uint8_t base = match.length(2) > 0 ? 16 : 10; + + auto value_match = match[3]; + + US result = 0; + + for (auto iter = value_match.first; iter != value_match.second; ++iter) + { + US digit = 0; + + if (*iter >= '0' && *iter <= '9') + { + digit = static_cast(*iter - '0'); + } + else if (base == 16 && *iter >= 'a' && *iter <= 'f') + { + digit = static_cast(*iter - 'a' + 10); + } + else if (base == 16 && *iter >= 'A' && *iter <= 'F') + { + digit = static_cast(*iter - 'A' + 10); + } + else + { + throw argument_incorrect_type(text); + } + + US next = result * base + digit; + if (result > next) + { + throw argument_incorrect_type(text); + } + + result = next; + } + + detail::check_signed_range(negative, result, text); + + if (negative) + { + value = checked_negate(result, + text, + std::integral_constant()); + } + else + { + value = static_cast(result); + } + } + + template + void stringstream_parser(const std::string& text, T& value) + { + std::stringstream in(text); + in >> value; + if (!in) { + throw argument_incorrect_type(text); + } + } + + inline + void + parse_value(const std::string& text, uint8_t& value) + { + integer_parser(text, value); + } + + inline + void + parse_value(const std::string& text, int8_t& value) + { + integer_parser(text, value); + } + + inline + void + parse_value(const std::string& text, uint16_t& value) + { + integer_parser(text, value); + } + + inline + void + parse_value(const std::string& text, int16_t& value) + { + integer_parser(text, value); + } + + inline + void + parse_value(const std::string& text, uint32_t& value) + { + integer_parser(text, value); + } + + inline + void + parse_value(const std::string& text, int32_t& value) + { + integer_parser(text, value); + } + + inline + void + parse_value(const std::string& text, uint64_t& value) + { + integer_parser(text, value); + } + + inline + void + parse_value(const std::string& text, int64_t& value) + { + integer_parser(text, value); + } + + inline + void + parse_value(const std::string& text, bool& value) + { + std::smatch result; + std::regex_match(text, result, truthy_pattern); + + if (!result.empty()) + { + value = true; + return; + } + + std::regex_match(text, result, falsy_pattern); + if (!result.empty()) + { + value = false; + return; + } + + throw argument_incorrect_type(text); + } + + inline + void + parse_value(const std::string& text, std::string& value) + { + value = text; + } + + // The fallback parser. It uses the stringstream parser to parse all types + // that have not been overloaded explicitly. It has to be placed in the + // source code before all other more specialized templates. + template + void + parse_value(const std::string& text, T& value) { + stringstream_parser(text, value); + } + + template + void + parse_value(const std::string& text, std::vector& value) + { + std::stringstream in(text); + std::string token; + while(in.eof() == false && std::getline(in, token, CXXOPTS_VECTOR_DELIMITER)) { + T v; + parse_value(token, v); + value.emplace_back(std::move(v)); + } + } + +#ifdef CXXOPTS_HAS_OPTIONAL + template + void + parse_value(const std::string& text, std::optional& value) + { + T result; + parse_value(text, result); + value = std::move(result); + } +#endif + + template + struct type_is_container + { + static constexpr bool value = false; + }; + + template + struct type_is_container> + { + static constexpr bool value = true; + }; + + template + class abstract_value : public Value + { + using Self = abstract_value; + + public: + abstract_value() + : m_result(std::make_shared()) + , m_store(m_result.get()) + { + } + + abstract_value(T* t) + : m_store(t) + { + } + + virtual ~abstract_value() = default; + + abstract_value(const abstract_value& rhs) + { + if (rhs.m_result) + { + m_result = std::make_shared(); + m_store = m_result.get(); + } + else + { + m_store = rhs.m_store; + } + + m_default = rhs.m_default; + m_implicit = rhs.m_implicit; + m_default_value = rhs.m_default_value; + m_implicit_value = rhs.m_implicit_value; + } + + void + parse(const std::string& text) const + { + parse_value(text, *m_store); + } + + bool + is_container() const + { + return type_is_container::value; + } + + void + parse() const + { + parse_value(m_default_value, *m_store); + } + + bool + has_default() const + { + return m_default; + } + + bool + has_implicit() const + { + return m_implicit; + } + + std::shared_ptr + default_value(const std::string& value) + { + m_default = true; + m_default_value = value; + return shared_from_this(); + } + + std::shared_ptr + implicit_value(const std::string& value) + { + m_implicit = true; + m_implicit_value = value; + return shared_from_this(); + } + + std::shared_ptr + no_implicit_value() + { + m_implicit = false; + return shared_from_this(); + } + + std::string + get_default_value() const + { + return m_default_value; + } + + std::string + get_implicit_value() const + { + return m_implicit_value; + } + + bool + is_boolean() const + { + return std::is_same::value; + } + + const T& + get() const + { + if (m_store == nullptr) + { + return *m_result; + } + else + { + return *m_store; + } + } + + protected: + std::shared_ptr m_result; + T* m_store; + + bool m_default = false; + bool m_implicit = false; + + std::string m_default_value; + std::string m_implicit_value; + }; + + template + class standard_value : public abstract_value + { + public: + using abstract_value::abstract_value; + + std::shared_ptr + clone() const + { + return std::make_shared>(*this); + } + }; + + template <> + class standard_value : public abstract_value + { + public: + ~standard_value() = default; + + standard_value() + { + set_default_and_implicit(); + } + + standard_value(bool* b) + : abstract_value(b) + { + set_default_and_implicit(); + } + + std::shared_ptr + clone() const + { + return std::make_shared>(*this); + } + + private: + + void + set_default_and_implicit() + { + m_default = true; + m_default_value = "false"; + m_implicit = true; + m_implicit_value = "true"; + } + }; + } + + template + std::shared_ptr + value() + { + return std::make_shared>(); + } + + template + std::shared_ptr + value(T& t) + { + return std::make_shared>(&t); + } + + class OptionAdder; + + class OptionDetails + { + public: + OptionDetails + ( + const std::string& short_, + const std::string& long_, + const String& desc, + std::shared_ptr val + ) + : m_short(short_) + , m_long(long_) + , m_desc(desc) + , m_value(val) + , m_count(0) + { + } + + OptionDetails(const OptionDetails& rhs) + : m_desc(rhs.m_desc) + , m_count(rhs.m_count) + { + m_value = rhs.m_value->clone(); + } + + OptionDetails(OptionDetails&& rhs) = default; + + const String& + description() const + { + return m_desc; + } + + const Value& value() const { + return *m_value; + } + + std::shared_ptr + make_storage() const + { + return m_value->clone(); + } + + const std::string& + short_name() const + { + return m_short; + } + + const std::string& + long_name() const + { + return m_long; + } + + private: + std::string m_short; + std::string m_long; + String m_desc; + std::shared_ptr m_value; + int m_count; + }; + + struct HelpOptionDetails + { + std::string s; + std::string l; + String desc; + bool has_default; + std::string default_value; + bool has_implicit; + std::string implicit_value; + std::string arg_help; + bool is_container; + bool is_boolean; + }; + + struct HelpGroupDetails + { + std::string name; + std::string description; + std::vector options; + }; + + class OptionValue + { + public: + void + parse + ( + std::shared_ptr details, + const std::string& text + ) + { + ensure_value(details); + ++m_count; + m_value->parse(text); + } + + void + parse_default(std::shared_ptr details) + { + ensure_value(details); + m_default = true; + m_value->parse(); + } + + size_t + count() const noexcept + { + return m_count; + } + + // TODO: maybe default options should count towards the number of arguments + bool + has_default() const noexcept + { + return m_default; + } + + template + const T& + as() const + { + if (m_value == nullptr) { + throw std::domain_error("No value"); + } + +#ifdef CXXOPTS_NO_RTTI + return static_cast&>(*m_value).get(); +#else + return dynamic_cast&>(*m_value).get(); +#endif + } + + private: + void + ensure_value(std::shared_ptr details) + { + if (m_value == nullptr) + { + m_value = details->make_storage(); + } + } + + std::shared_ptr m_value; + size_t m_count = 0; + bool m_default = false; + }; + + class KeyValue + { + public: + KeyValue(std::string key_, std::string value_) + : m_key(std::move(key_)) + , m_value(std::move(value_)) + { + } + + const + std::string& + key() const + { + return m_key; + } + + const + std::string& + value() const + { + return m_value; + } + + template + T + as() const + { + T result; + values::parse_value(m_value, result); + return result; + } + + private: + std::string m_key; + std::string m_value; + }; + + class ParseResult + { + public: + + ParseResult( + const std::shared_ptr< + std::unordered_map> + >, + std::vector, + bool allow_unrecognised, + int&, char**&); + + size_t + count(const std::string& o) const + { + auto iter = m_options->find(o); + if (iter == m_options->end()) + { + return 0; + } + + auto riter = m_results.find(iter->second); + + return riter->second.count(); + } + + const OptionValue& + operator[](const std::string& option) const + { + auto iter = m_options->find(option); + + if (iter == m_options->end()) + { + throw option_not_present_exception(option); + } + + auto riter = m_results.find(iter->second); + + return riter->second; + } + + const std::vector& + arguments() const + { + return m_sequential; + } + + private: + + void + parse(int& argc, char**& argv); + + void + add_to_option(const std::string& option, const std::string& arg); + + bool + consume_positional(std::string a); + + void + parse_option + ( + std::shared_ptr value, + const std::string& name, + const std::string& arg = "" + ); + + void + parse_default(std::shared_ptr details); + + void + checked_parse_arg + ( + int argc, + char* argv[], + int& current, + std::shared_ptr value, + const std::string& name + ); + + const std::shared_ptr< + std::unordered_map> + > m_options; + std::vector m_positional; + std::vector::iterator m_next_positional; + std::unordered_set m_positional_set; + std::unordered_map, OptionValue> m_results; + + bool m_allow_unrecognised; + + std::vector m_sequential; + }; + + class Options + { + typedef std::unordered_map> + OptionMap; + public: + + Options(std::string program, std::string help_string = "") + : m_program(std::move(program)) + , m_help_string(toLocalString(std::move(help_string))) + , m_custom_help("[OPTION...]") + , m_positional_help("positional parameters") + , m_show_positional(false) + , m_allow_unrecognised(false) + , m_options(std::make_shared()) + , m_next_positional(m_positional.end()) + { + } + + Options& + positional_help(std::string help_text) + { + m_positional_help = std::move(help_text); + return *this; + } + + Options& + custom_help(std::string help_text) + { + m_custom_help = std::move(help_text); + return *this; + } + + Options& + show_positional_help() + { + m_show_positional = true; + return *this; + } + + Options& + allow_unrecognised_options() + { + m_allow_unrecognised = true; + return *this; + } + + ParseResult + parse(int& argc, char**& argv); + + OptionAdder + add_options(std::string group = ""); + + void + add_option + ( + const std::string& group, + const std::string& s, + const std::string& l, + std::string desc, + std::shared_ptr value, + std::string arg_help + ); + + //parse positional arguments into the given option + void + parse_positional(std::string option); + + void + parse_positional(std::vector options); + + void + parse_positional(std::initializer_list options); + + template + void + parse_positional(Iterator begin, Iterator end) { + parse_positional(std::vector{begin, end}); + } + + std::string + help(const std::vector& groups = {}) const; + + const std::vector + groups() const; + + const HelpGroupDetails& + group_help(const std::string& group) const; + + private: + + void + add_one_option + ( + const std::string& option, + std::shared_ptr details + ); + + String + help_one_group(const std::string& group) const; + + void + generate_group_help + ( + String& result, + const std::vector& groups + ) const; + + void + generate_all_groups_help(String& result) const; + + std::string m_program; + String m_help_string; + std::string m_custom_help; + std::string m_positional_help; + bool m_show_positional; + bool m_allow_unrecognised; + + std::shared_ptr m_options; + std::vector m_positional; + std::vector::iterator m_next_positional; + std::unordered_set m_positional_set; + + //mapping from groups to help options + std::map m_help; + }; + + class OptionAdder + { + public: + + OptionAdder(Options& options, std::string group) + : m_options(options), m_group(std::move(group)) + { + } + + OptionAdder& + operator() + ( + const std::string& opts, + const std::string& desc, + std::shared_ptr value + = ::cxxopts::value(), + std::string arg_help = "" + ); + + private: + Options& m_options; + std::string m_group; + }; + + namespace + { + constexpr int OPTION_LONGEST = 30; + constexpr int OPTION_DESC_GAP = 2; + + std::basic_regex option_matcher + ("--([[:alnum:]][-_[:alnum:]]+)(=(.*))?|-([[:alnum:]]+)"); + + std::basic_regex option_specifier + ("(([[:alnum:]]),)?[ ]*([[:alnum:]][-_[:alnum:]]*)?"); + + String + format_option + ( + const HelpOptionDetails& o + ) + { + auto& s = o.s; + auto& l = o.l; + + String result = " "; + + if (s.size() > 0) + { + result += "-" + toLocalString(s) + ","; + } + else + { + result += " "; + } + + if (l.size() > 0) + { + result += " --" + toLocalString(l); + } + + auto arg = o.arg_help.size() > 0 ? toLocalString(o.arg_help) : "arg"; + + if (!o.is_boolean) + { + if (o.has_implicit) + { + result += " [=" + arg + "(=" + toLocalString(o.implicit_value) + ")]"; + } + else + { + result += " " + arg; + } + } + + return result; + } + + String + format_description + ( + const HelpOptionDetails& o, + size_t start, + size_t width + ) + { + auto desc = o.desc; + + if (o.has_default && (!o.is_boolean || o.default_value != "false")) + { + desc += toLocalString(" (default: " + o.default_value + ")"); + } + + String result; + + auto current = std::begin(desc); + auto startLine = current; + auto lastSpace = current; + + auto size = size_t{}; + + while (current != std::end(desc)) + { + if (*current == ' ') + { + lastSpace = current; + } + + if (*current == '\n') + { + startLine = current + 1; + lastSpace = startLine; + } + else if (size > width) + { + if (lastSpace == startLine) + { + stringAppend(result, startLine, current + 1); + stringAppend(result, "\n"); + stringAppend(result, start, ' '); + startLine = current + 1; + lastSpace = startLine; + } + else + { + stringAppend(result, startLine, lastSpace); + stringAppend(result, "\n"); + stringAppend(result, start, ' '); + startLine = lastSpace + 1; + lastSpace = startLine; + } + size = 0; + } + else + { + ++size; + } + + ++current; + } + + //append whatever is left + stringAppend(result, startLine, current); + + return result; + } + } + +inline +ParseResult::ParseResult +( + const std::shared_ptr< + std::unordered_map> + > options, + std::vector positional, + bool allow_unrecognised, + int& argc, char**& argv +) +: m_options(options) +, m_positional(std::move(positional)) +, m_next_positional(m_positional.begin()) +, m_allow_unrecognised(allow_unrecognised) +{ + parse(argc, argv); +} + +inline +OptionAdder +Options::add_options(std::string group) +{ + return OptionAdder(*this, std::move(group)); +} + +inline +OptionAdder& +OptionAdder::operator() +( + const std::string& opts, + const std::string& desc, + std::shared_ptr value, + std::string arg_help +) +{ + std::match_results result; + std::regex_match(opts.c_str(), result, option_specifier); + + if (result.empty()) + { + throw invalid_option_format_error(opts); + } + + const auto& short_match = result[2]; + const auto& long_match = result[3]; + + if (!short_match.length() && !long_match.length()) + { + throw invalid_option_format_error(opts); + } else if (long_match.length() == 1 && short_match.length()) + { + throw invalid_option_format_error(opts); + } + + auto option_names = [] + ( + const std::sub_match& short_, + const std::sub_match& long_ + ) + { + if (long_.length() == 1) + { + return std::make_tuple(long_.str(), short_.str()); + } + else + { + return std::make_tuple(short_.str(), long_.str()); + } + }(short_match, long_match); + + m_options.add_option + ( + m_group, + std::get<0>(option_names), + std::get<1>(option_names), + desc, + value, + std::move(arg_help) + ); + + return *this; +} + +inline +void +ParseResult::parse_default(std::shared_ptr details) +{ + m_results[details].parse_default(details); +} + +inline +void +ParseResult::parse_option +( + std::shared_ptr value, + const std::string& /*name*/, + const std::string& arg +) +{ + auto& result = m_results[value]; + result.parse(value, arg); + + m_sequential.emplace_back(value->long_name(), arg); +} + +inline +void +ParseResult::checked_parse_arg +( + int argc, + char* argv[], + int& current, + std::shared_ptr value, + const std::string& name +) +{ + if (current + 1 >= argc) + { + if (value->value().has_implicit()) + { + parse_option(value, name, value->value().get_implicit_value()); + } + else + { + throw missing_argument_exception(name); + } + } + else + { + if (value->value().has_implicit()) + { + parse_option(value, name, value->value().get_implicit_value()); + } + else + { + parse_option(value, name, argv[current + 1]); + ++current; + } + } +} + +inline +void +ParseResult::add_to_option(const std::string& option, const std::string& arg) +{ + auto iter = m_options->find(option); + + if (iter == m_options->end()) + { + throw option_not_exists_exception(option); + } + + parse_option(iter->second, option, arg); +} + +inline +bool +ParseResult::consume_positional(std::string a) +{ + while (m_next_positional != m_positional.end()) + { + auto iter = m_options->find(*m_next_positional); + if (iter != m_options->end()) + { + auto& result = m_results[iter->second]; + if (!iter->second->value().is_container()) + { + if (result.count() == 0) + { + add_to_option(*m_next_positional, a); + ++m_next_positional; + return true; + } + else + { + ++m_next_positional; + continue; + } + } + else + { + add_to_option(*m_next_positional, a); + return true; + } + } + else + { + throw option_not_exists_exception(*m_next_positional); + } + } + + return false; +} + +inline +void +Options::parse_positional(std::string option) +{ + parse_positional(std::vector{std::move(option)}); +} + +inline +void +Options::parse_positional(std::vector options) +{ + m_positional = std::move(options); + m_next_positional = m_positional.begin(); + + m_positional_set.insert(m_positional.begin(), m_positional.end()); +} + +inline +void +Options::parse_positional(std::initializer_list options) +{ + parse_positional(std::vector(std::move(options))); +} + +inline +ParseResult +Options::parse(int& argc, char**& argv) +{ + ParseResult result(m_options, m_positional, m_allow_unrecognised, argc, argv); + return result; +} + +inline +void +ParseResult::parse(int& argc, char**& argv) +{ + int current = 1; + + int nextKeep = 1; + + bool consume_remaining = false; + + while (current != argc) + { + if (strcmp(argv[current], "--") == 0) + { + consume_remaining = true; + ++current; + break; + } + + std::match_results result; + std::regex_match(argv[current], result, option_matcher); + + if (result.empty()) + { + //not a flag + + // but if it starts with a `-`, then it's an error + if (argv[current][0] == '-' && argv[current][1] != '\0') { + if (!m_allow_unrecognised) { + throw option_syntax_exception(argv[current]); + } + } + + //if true is returned here then it was consumed, otherwise it is + //ignored + if (consume_positional(argv[current])) + { + } + else + { + argv[nextKeep] = argv[current]; + ++nextKeep; + } + //if we return from here then it was parsed successfully, so continue + } + else + { + //short or long option? + if (result[4].length() != 0) + { + const std::string& s = result[4]; + + for (std::size_t i = 0; i != s.size(); ++i) + { + std::string name(1, s[i]); + auto iter = m_options->find(name); + + if (iter == m_options->end()) + { + if (m_allow_unrecognised) + { + continue; + } + else + { + //error + throw option_not_exists_exception(name); + } + } + + auto value = iter->second; + + if (i + 1 == s.size()) + { + //it must be the last argument + checked_parse_arg(argc, argv, current, value, name); + } + else if (value->value().has_implicit()) + { + parse_option(value, name, value->value().get_implicit_value()); + } + else + { + //error + throw option_requires_argument_exception(name); + } + } + } + else if (result[1].length() != 0) + { + const std::string& name = result[1]; + + auto iter = m_options->find(name); + + if (iter == m_options->end()) + { + if (m_allow_unrecognised) + { + // keep unrecognised options in argument list, skip to next argument + argv[nextKeep] = argv[current]; + ++nextKeep; + ++current; + continue; + } + else + { + //error + throw option_not_exists_exception(name); + } + } + + auto opt = iter->second; + + //equals provided for long option? + if (result[2].length() != 0) + { + //parse the option given + + parse_option(opt, name, result[3]); + } + else + { + //parse the next argument + checked_parse_arg(argc, argv, current, opt, name); + } + } + + } + + ++current; + } + + for (auto& opt : *m_options) + { + auto& detail = opt.second; + auto& value = detail->value(); + + auto& store = m_results[detail]; + + if(value.has_default() && !store.count() && !store.has_default()){ + parse_default(detail); + } + } + + if (consume_remaining) + { + while (current < argc) + { + if (!consume_positional(argv[current])) { + break; + } + ++current; + } + + //adjust argv for any that couldn't be swallowed + while (current != argc) { + argv[nextKeep] = argv[current]; + ++nextKeep; + ++current; + } + } + + argc = nextKeep; + +} + +inline +void +Options::add_option +( + const std::string& group, + const std::string& s, + const std::string& l, + std::string desc, + std::shared_ptr value, + std::string arg_help +) +{ + auto stringDesc = toLocalString(std::move(desc)); + auto option = std::make_shared(s, l, stringDesc, value); + + if (s.size() > 0) + { + add_one_option(s, option); + } + + if (l.size() > 0) + { + add_one_option(l, option); + } + + //add the help details + auto& options = m_help[group]; + + options.options.emplace_back(HelpOptionDetails{s, l, stringDesc, + value->has_default(), value->get_default_value(), + value->has_implicit(), value->get_implicit_value(), + std::move(arg_help), + value->is_container(), + value->is_boolean()}); +} + +inline +void +Options::add_one_option +( + const std::string& option, + std::shared_ptr details +) +{ + auto in = m_options->emplace(option, details); + + if (!in.second) + { + throw option_exists_error(option); + } +} + +inline +String +Options::help_one_group(const std::string& g) const +{ + typedef std::vector> OptionHelp; + + auto group = m_help.find(g); + if (group == m_help.end()) + { + return ""; + } + + OptionHelp format; + + size_t longest = 0; + + String result; + + if (!g.empty()) + { + result += toLocalString(" " + g + " options:\n"); + } + + for (const auto& o : group->second.options) + { + if (m_positional_set.find(o.l) != m_positional_set.end() && + !m_show_positional) + { + continue; + } + + auto s = format_option(o); + longest = (std::max)(longest, stringLength(s)); + format.push_back(std::make_pair(s, String())); + } + + longest = (std::min)(longest, static_cast(OPTION_LONGEST)); + + //widest allowed description + auto allowed = size_t{76} - longest - OPTION_DESC_GAP; + + auto fiter = format.begin(); + for (const auto& o : group->second.options) + { + if (m_positional_set.find(o.l) != m_positional_set.end() && + !m_show_positional) + { + continue; + } + + auto d = format_description(o, longest + OPTION_DESC_GAP, allowed); + + result += fiter->first; + if (stringLength(fiter->first) > longest) + { + result += '\n'; + result += toLocalString(std::string(longest + OPTION_DESC_GAP, ' ')); + } + else + { + result += toLocalString(std::string(longest + OPTION_DESC_GAP - + stringLength(fiter->first), + ' ')); + } + result += d; + result += '\n'; + + ++fiter; + } + + return result; +} + +inline +void +Options::generate_group_help +( + String& result, + const std::vector& print_groups +) const +{ + for (size_t i = 0; i != print_groups.size(); ++i) + { + const String& group_help_text = help_one_group(print_groups[i]); + if (empty(group_help_text)) + { + continue; + } + result += group_help_text; + if (i < print_groups.size() - 1) + { + result += '\n'; + } + } +} + +inline +void +Options::generate_all_groups_help(String& result) const +{ + std::vector all_groups; + all_groups.reserve(m_help.size()); + + for (auto& group : m_help) + { + all_groups.push_back(group.first); + } + + generate_group_help(result, all_groups); +} + +inline +std::string +Options::help(const std::vector& help_groups) const +{ + String result = m_help_string + "\nUsage:\n " + + toLocalString(m_program) + " " + toLocalString(m_custom_help); + + if (m_positional.size() > 0 && m_positional_help.size() > 0) { + result += " " + toLocalString(m_positional_help); + } + + result += "\n\n"; + + if (help_groups.size() == 0) + { + generate_all_groups_help(result); + } + else + { + generate_group_help(result, help_groups); + } + + return toUTF8String(result); +} + +inline +const std::vector +Options::groups() const +{ + std::vector g; + + std::transform( + m_help.begin(), + m_help.end(), + std::back_inserter(g), + [] (const std::map::value_type& pair) + { + return pair.first; + } + ); + + return g; +} + +inline +const HelpGroupDetails& +Options::group_help(const std::string& group) const +{ + return m_help.at(group); +} + +} + +#endif //CXXOPTS_HPP_INCLUDED diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/json.hpp b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/json.hpp new file mode 100644 index 0000000000000000000000000000000000000000..f7252a6b5510a231965ddb5e68808191e980a921 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/ext/json.hpp @@ -0,0 +1,25447 @@ +/* + __ _____ _____ _____ + __| | __| | | | JSON for Modern C++ +| | |__ | | | | | | version 3.9.1 +|_____|_____|_____|_|___| https://github.com/nlohmann/json + +Licensed under the MIT License . +SPDX-License-Identifier: MIT +Copyright (c) 2013-2019 Niels Lohmann . + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ + +#ifndef INCLUDE_NLOHMANN_JSON_HPP_ +#define INCLUDE_NLOHMANN_JSON_HPP_ + +#define NLOHMANN_JSON_VERSION_MAJOR 3 +#define NLOHMANN_JSON_VERSION_MINOR 9 +#define NLOHMANN_JSON_VERSION_PATCH 1 + +#include // all_of, find, for_each +#include // nullptr_t, ptrdiff_t, size_t +#include // hash, less +#include // initializer_list +#include // istream, ostream +#include // random_access_iterator_tag +#include // unique_ptr +#include // accumulate +#include // string, stoi, to_string +#include // declval, forward, move, pair, swap +#include // vector + +// #include + + +#include + +// #include + + +#include // transform +#include // array +#include // forward_list +#include // inserter, front_inserter, end +#include // map +#include // string +#include // tuple, make_tuple +#include // is_arithmetic, is_same, is_enum, underlying_type, is_convertible +#include // unordered_map +#include // pair, declval +#include // valarray + +// #include + + +#include // exception +#include // runtime_error +#include // to_string + +// #include + + +#include // size_t + +namespace nlohmann +{ +namespace detail +{ +/// struct to capture the start position of the current token +struct position_t +{ + /// the total number of characters read + std::size_t chars_read_total = 0; + /// the number of characters read in the current line + std::size_t chars_read_current_line = 0; + /// the number of lines read + std::size_t lines_read = 0; + + /// conversion to size_t to preserve SAX interface + constexpr operator size_t() const + { + return chars_read_total; + } +}; + +} // namespace detail +} // namespace nlohmann + +// #include + + +#include // pair +// #include +/* Hedley - https://nemequ.github.io/hedley + * Created by Evan Nemerson + * + * To the extent possible under law, the author(s) have dedicated all + * copyright and related and neighboring rights to this software to + * the public domain worldwide. This software is distributed without + * any warranty. + * + * For details, see . + * SPDX-License-Identifier: CC0-1.0 + */ + +#if !defined(JSON_HEDLEY_VERSION) || (JSON_HEDLEY_VERSION < 13) +#if defined(JSON_HEDLEY_VERSION) + #undef JSON_HEDLEY_VERSION +#endif +#define JSON_HEDLEY_VERSION 13 + +#if defined(JSON_HEDLEY_STRINGIFY_EX) + #undef JSON_HEDLEY_STRINGIFY_EX +#endif +#define JSON_HEDLEY_STRINGIFY_EX(x) #x + +#if defined(JSON_HEDLEY_STRINGIFY) + #undef JSON_HEDLEY_STRINGIFY +#endif +#define JSON_HEDLEY_STRINGIFY(x) JSON_HEDLEY_STRINGIFY_EX(x) + +#if defined(JSON_HEDLEY_CONCAT_EX) + #undef JSON_HEDLEY_CONCAT_EX +#endif +#define JSON_HEDLEY_CONCAT_EX(a,b) a##b + +#if defined(JSON_HEDLEY_CONCAT) + #undef JSON_HEDLEY_CONCAT +#endif +#define JSON_HEDLEY_CONCAT(a,b) JSON_HEDLEY_CONCAT_EX(a,b) + +#if defined(JSON_HEDLEY_CONCAT3_EX) + #undef JSON_HEDLEY_CONCAT3_EX +#endif +#define JSON_HEDLEY_CONCAT3_EX(a,b,c) a##b##c + +#if defined(JSON_HEDLEY_CONCAT3) + #undef JSON_HEDLEY_CONCAT3 +#endif +#define JSON_HEDLEY_CONCAT3(a,b,c) JSON_HEDLEY_CONCAT3_EX(a,b,c) + +#if defined(JSON_HEDLEY_VERSION_ENCODE) + #undef JSON_HEDLEY_VERSION_ENCODE +#endif +#define JSON_HEDLEY_VERSION_ENCODE(major,minor,revision) (((major) * 1000000) + ((minor) * 1000) + (revision)) + +#if defined(JSON_HEDLEY_VERSION_DECODE_MAJOR) + #undef JSON_HEDLEY_VERSION_DECODE_MAJOR +#endif +#define JSON_HEDLEY_VERSION_DECODE_MAJOR(version) ((version) / 1000000) + +#if defined(JSON_HEDLEY_VERSION_DECODE_MINOR) + #undef JSON_HEDLEY_VERSION_DECODE_MINOR +#endif +#define JSON_HEDLEY_VERSION_DECODE_MINOR(version) (((version) % 1000000) / 1000) + +#if defined(JSON_HEDLEY_VERSION_DECODE_REVISION) + #undef JSON_HEDLEY_VERSION_DECODE_REVISION +#endif +#define JSON_HEDLEY_VERSION_DECODE_REVISION(version) ((version) % 1000) + +#if defined(JSON_HEDLEY_GNUC_VERSION) + #undef JSON_HEDLEY_GNUC_VERSION +#endif +#if defined(__GNUC__) && defined(__GNUC_PATCHLEVEL__) + #define JSON_HEDLEY_GNUC_VERSION JSON_HEDLEY_VERSION_ENCODE(__GNUC__, __GNUC_MINOR__, __GNUC_PATCHLEVEL__) +#elif defined(__GNUC__) + #define JSON_HEDLEY_GNUC_VERSION JSON_HEDLEY_VERSION_ENCODE(__GNUC__, __GNUC_MINOR__, 0) +#endif + +#if defined(JSON_HEDLEY_GNUC_VERSION_CHECK) + #undef JSON_HEDLEY_GNUC_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_GNUC_VERSION) + #define JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_GNUC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_MSVC_VERSION) + #undef JSON_HEDLEY_MSVC_VERSION +#endif +#if defined(_MSC_FULL_VER) && (_MSC_FULL_VER >= 140000000) + #define JSON_HEDLEY_MSVC_VERSION JSON_HEDLEY_VERSION_ENCODE(_MSC_FULL_VER / 10000000, (_MSC_FULL_VER % 10000000) / 100000, (_MSC_FULL_VER % 100000) / 100) +#elif defined(_MSC_FULL_VER) + #define JSON_HEDLEY_MSVC_VERSION JSON_HEDLEY_VERSION_ENCODE(_MSC_FULL_VER / 1000000, (_MSC_FULL_VER % 1000000) / 10000, (_MSC_FULL_VER % 10000) / 10) +#elif defined(_MSC_VER) + #define JSON_HEDLEY_MSVC_VERSION JSON_HEDLEY_VERSION_ENCODE(_MSC_VER / 100, _MSC_VER % 100, 0) +#endif + +#if defined(JSON_HEDLEY_MSVC_VERSION_CHECK) + #undef JSON_HEDLEY_MSVC_VERSION_CHECK +#endif +#if !defined(_MSC_VER) + #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (0) +#elif defined(_MSC_VER) && (_MSC_VER >= 1400) + #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (_MSC_FULL_VER >= ((major * 10000000) + (minor * 100000) + (patch))) +#elif defined(_MSC_VER) && (_MSC_VER >= 1200) + #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (_MSC_FULL_VER >= ((major * 1000000) + (minor * 10000) + (patch))) +#else + #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (_MSC_VER >= ((major * 100) + (minor))) +#endif + +#if defined(JSON_HEDLEY_INTEL_VERSION) + #undef JSON_HEDLEY_INTEL_VERSION +#endif +#if defined(__INTEL_COMPILER) && defined(__INTEL_COMPILER_UPDATE) + #define JSON_HEDLEY_INTEL_VERSION JSON_HEDLEY_VERSION_ENCODE(__INTEL_COMPILER / 100, __INTEL_COMPILER % 100, __INTEL_COMPILER_UPDATE) +#elif defined(__INTEL_COMPILER) + #define JSON_HEDLEY_INTEL_VERSION JSON_HEDLEY_VERSION_ENCODE(__INTEL_COMPILER / 100, __INTEL_COMPILER % 100, 0) +#endif + +#if defined(JSON_HEDLEY_INTEL_VERSION_CHECK) + #undef JSON_HEDLEY_INTEL_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_INTEL_VERSION) + #define JSON_HEDLEY_INTEL_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_INTEL_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_INTEL_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_PGI_VERSION) + #undef JSON_HEDLEY_PGI_VERSION +#endif +#if defined(__PGI) && defined(__PGIC__) && defined(__PGIC_MINOR__) && defined(__PGIC_PATCHLEVEL__) + #define JSON_HEDLEY_PGI_VERSION JSON_HEDLEY_VERSION_ENCODE(__PGIC__, __PGIC_MINOR__, __PGIC_PATCHLEVEL__) +#endif + +#if defined(JSON_HEDLEY_PGI_VERSION_CHECK) + #undef JSON_HEDLEY_PGI_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_PGI_VERSION) + #define JSON_HEDLEY_PGI_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_PGI_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_PGI_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_SUNPRO_VERSION) + #undef JSON_HEDLEY_SUNPRO_VERSION +#endif +#if defined(__SUNPRO_C) && (__SUNPRO_C > 0x1000) + #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((((__SUNPRO_C >> 16) & 0xf) * 10) + ((__SUNPRO_C >> 12) & 0xf), (((__SUNPRO_C >> 8) & 0xf) * 10) + ((__SUNPRO_C >> 4) & 0xf), (__SUNPRO_C & 0xf) * 10) +#elif defined(__SUNPRO_C) + #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((__SUNPRO_C >> 8) & 0xf, (__SUNPRO_C >> 4) & 0xf, (__SUNPRO_C) & 0xf) +#elif defined(__SUNPRO_CC) && (__SUNPRO_CC > 0x1000) + #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((((__SUNPRO_CC >> 16) & 0xf) * 10) + ((__SUNPRO_CC >> 12) & 0xf), (((__SUNPRO_CC >> 8) & 0xf) * 10) + ((__SUNPRO_CC >> 4) & 0xf), (__SUNPRO_CC & 0xf) * 10) +#elif defined(__SUNPRO_CC) + #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((__SUNPRO_CC >> 8) & 0xf, (__SUNPRO_CC >> 4) & 0xf, (__SUNPRO_CC) & 0xf) +#endif + +#if defined(JSON_HEDLEY_SUNPRO_VERSION_CHECK) + #undef JSON_HEDLEY_SUNPRO_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_SUNPRO_VERSION) + #define JSON_HEDLEY_SUNPRO_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_SUNPRO_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_SUNPRO_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_EMSCRIPTEN_VERSION) + #undef JSON_HEDLEY_EMSCRIPTEN_VERSION +#endif +#if defined(__EMSCRIPTEN__) + #define JSON_HEDLEY_EMSCRIPTEN_VERSION JSON_HEDLEY_VERSION_ENCODE(__EMSCRIPTEN_major__, __EMSCRIPTEN_minor__, __EMSCRIPTEN_tiny__) +#endif + +#if defined(JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK) + #undef JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_EMSCRIPTEN_VERSION) + #define JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_EMSCRIPTEN_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_ARM_VERSION) + #undef JSON_HEDLEY_ARM_VERSION +#endif +#if defined(__CC_ARM) && defined(__ARMCOMPILER_VERSION) + #define JSON_HEDLEY_ARM_VERSION JSON_HEDLEY_VERSION_ENCODE(__ARMCOMPILER_VERSION / 1000000, (__ARMCOMPILER_VERSION % 1000000) / 10000, (__ARMCOMPILER_VERSION % 10000) / 100) +#elif defined(__CC_ARM) && defined(__ARMCC_VERSION) + #define JSON_HEDLEY_ARM_VERSION JSON_HEDLEY_VERSION_ENCODE(__ARMCC_VERSION / 1000000, (__ARMCC_VERSION % 1000000) / 10000, (__ARMCC_VERSION % 10000) / 100) +#endif + +#if defined(JSON_HEDLEY_ARM_VERSION_CHECK) + #undef JSON_HEDLEY_ARM_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_ARM_VERSION) + #define JSON_HEDLEY_ARM_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_ARM_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_ARM_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_IBM_VERSION) + #undef JSON_HEDLEY_IBM_VERSION +#endif +#if defined(__ibmxl__) + #define JSON_HEDLEY_IBM_VERSION JSON_HEDLEY_VERSION_ENCODE(__ibmxl_version__, __ibmxl_release__, __ibmxl_modification__) +#elif defined(__xlC__) && defined(__xlC_ver__) + #define JSON_HEDLEY_IBM_VERSION JSON_HEDLEY_VERSION_ENCODE(__xlC__ >> 8, __xlC__ & 0xff, (__xlC_ver__ >> 8) & 0xff) +#elif defined(__xlC__) + #define JSON_HEDLEY_IBM_VERSION JSON_HEDLEY_VERSION_ENCODE(__xlC__ >> 8, __xlC__ & 0xff, 0) +#endif + +#if defined(JSON_HEDLEY_IBM_VERSION_CHECK) + #undef JSON_HEDLEY_IBM_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_IBM_VERSION) + #define JSON_HEDLEY_IBM_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_IBM_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_IBM_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_VERSION) + #undef JSON_HEDLEY_TI_VERSION +#endif +#if \ + defined(__TI_COMPILER_VERSION__) && \ + ( \ + defined(__TMS470__) || defined(__TI_ARM__) || \ + defined(__MSP430__) || \ + defined(__TMS320C2000__) \ + ) +#if (__TI_COMPILER_VERSION__ >= 16000000) + #define JSON_HEDLEY_TI_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif +#endif + +#if defined(JSON_HEDLEY_TI_VERSION_CHECK) + #undef JSON_HEDLEY_TI_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_VERSION) + #define JSON_HEDLEY_TI_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CL2000_VERSION) + #undef JSON_HEDLEY_TI_CL2000_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__TMS320C2000__) + #define JSON_HEDLEY_TI_CL2000_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CL2000_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CL2000_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CL2000_VERSION) + #define JSON_HEDLEY_TI_CL2000_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL2000_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CL2000_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CL430_VERSION) + #undef JSON_HEDLEY_TI_CL430_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__MSP430__) + #define JSON_HEDLEY_TI_CL430_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CL430_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CL430_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CL430_VERSION) + #define JSON_HEDLEY_TI_CL430_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL430_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CL430_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_ARMCL_VERSION) + #undef JSON_HEDLEY_TI_ARMCL_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && (defined(__TMS470__) || defined(__TI_ARM__)) + #define JSON_HEDLEY_TI_ARMCL_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_ARMCL_VERSION_CHECK) + #undef JSON_HEDLEY_TI_ARMCL_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_ARMCL_VERSION) + #define JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_ARMCL_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CL6X_VERSION) + #undef JSON_HEDLEY_TI_CL6X_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__TMS320C6X__) + #define JSON_HEDLEY_TI_CL6X_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CL6X_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CL6X_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CL6X_VERSION) + #define JSON_HEDLEY_TI_CL6X_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL6X_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CL6X_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CL7X_VERSION) + #undef JSON_HEDLEY_TI_CL7X_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__C7000__) + #define JSON_HEDLEY_TI_CL7X_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CL7X_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CL7X_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CL7X_VERSION) + #define JSON_HEDLEY_TI_CL7X_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL7X_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CL7X_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CLPRU_VERSION) + #undef JSON_HEDLEY_TI_CLPRU_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__PRU__) + #define JSON_HEDLEY_TI_CLPRU_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CLPRU_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CLPRU_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CLPRU_VERSION) + #define JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CLPRU_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_CRAY_VERSION) + #undef JSON_HEDLEY_CRAY_VERSION +#endif +#if defined(_CRAYC) + #if defined(_RELEASE_PATCHLEVEL) + #define JSON_HEDLEY_CRAY_VERSION JSON_HEDLEY_VERSION_ENCODE(_RELEASE_MAJOR, _RELEASE_MINOR, _RELEASE_PATCHLEVEL) + #else + #define JSON_HEDLEY_CRAY_VERSION JSON_HEDLEY_VERSION_ENCODE(_RELEASE_MAJOR, _RELEASE_MINOR, 0) + #endif +#endif + +#if defined(JSON_HEDLEY_CRAY_VERSION_CHECK) + #undef JSON_HEDLEY_CRAY_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_CRAY_VERSION) + #define JSON_HEDLEY_CRAY_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_CRAY_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_CRAY_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_IAR_VERSION) + #undef JSON_HEDLEY_IAR_VERSION +#endif +#if defined(__IAR_SYSTEMS_ICC__) + #if __VER__ > 1000 + #define JSON_HEDLEY_IAR_VERSION JSON_HEDLEY_VERSION_ENCODE((__VER__ / 1000000), ((__VER__ / 1000) % 1000), (__VER__ % 1000)) + #else + #define JSON_HEDLEY_IAR_VERSION JSON_HEDLEY_VERSION_ENCODE(VER / 100, __VER__ % 100, 0) + #endif +#endif + +#if defined(JSON_HEDLEY_IAR_VERSION_CHECK) + #undef JSON_HEDLEY_IAR_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_IAR_VERSION) + #define JSON_HEDLEY_IAR_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_IAR_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_IAR_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TINYC_VERSION) + #undef JSON_HEDLEY_TINYC_VERSION +#endif +#if defined(__TINYC__) + #define JSON_HEDLEY_TINYC_VERSION JSON_HEDLEY_VERSION_ENCODE(__TINYC__ / 1000, (__TINYC__ / 100) % 10, __TINYC__ % 100) +#endif + +#if defined(JSON_HEDLEY_TINYC_VERSION_CHECK) + #undef JSON_HEDLEY_TINYC_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TINYC_VERSION) + #define JSON_HEDLEY_TINYC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TINYC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TINYC_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_DMC_VERSION) + #undef JSON_HEDLEY_DMC_VERSION +#endif +#if defined(__DMC__) + #define JSON_HEDLEY_DMC_VERSION JSON_HEDLEY_VERSION_ENCODE(__DMC__ >> 8, (__DMC__ >> 4) & 0xf, __DMC__ & 0xf) +#endif + +#if defined(JSON_HEDLEY_DMC_VERSION_CHECK) + #undef JSON_HEDLEY_DMC_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_DMC_VERSION) + #define JSON_HEDLEY_DMC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_DMC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_DMC_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_COMPCERT_VERSION) + #undef JSON_HEDLEY_COMPCERT_VERSION +#endif +#if defined(__COMPCERT_VERSION__) + #define JSON_HEDLEY_COMPCERT_VERSION JSON_HEDLEY_VERSION_ENCODE(__COMPCERT_VERSION__ / 10000, (__COMPCERT_VERSION__ / 100) % 100, __COMPCERT_VERSION__ % 100) +#endif + +#if defined(JSON_HEDLEY_COMPCERT_VERSION_CHECK) + #undef JSON_HEDLEY_COMPCERT_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_COMPCERT_VERSION) + #define JSON_HEDLEY_COMPCERT_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_COMPCERT_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_COMPCERT_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_PELLES_VERSION) + #undef JSON_HEDLEY_PELLES_VERSION +#endif +#if defined(__POCC__) + #define JSON_HEDLEY_PELLES_VERSION JSON_HEDLEY_VERSION_ENCODE(__POCC__ / 100, __POCC__ % 100, 0) +#endif + +#if defined(JSON_HEDLEY_PELLES_VERSION_CHECK) + #undef JSON_HEDLEY_PELLES_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_PELLES_VERSION) + #define JSON_HEDLEY_PELLES_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_PELLES_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_PELLES_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_GCC_VERSION) + #undef JSON_HEDLEY_GCC_VERSION +#endif +#if \ + defined(JSON_HEDLEY_GNUC_VERSION) && \ + !defined(__clang__) && \ + !defined(JSON_HEDLEY_INTEL_VERSION) && \ + !defined(JSON_HEDLEY_PGI_VERSION) && \ + !defined(JSON_HEDLEY_ARM_VERSION) && \ + !defined(JSON_HEDLEY_TI_VERSION) && \ + !defined(JSON_HEDLEY_TI_ARMCL_VERSION) && \ + !defined(JSON_HEDLEY_TI_CL430_VERSION) && \ + !defined(JSON_HEDLEY_TI_CL2000_VERSION) && \ + !defined(JSON_HEDLEY_TI_CL6X_VERSION) && \ + !defined(JSON_HEDLEY_TI_CL7X_VERSION) && \ + !defined(JSON_HEDLEY_TI_CLPRU_VERSION) && \ + !defined(__COMPCERT__) + #define JSON_HEDLEY_GCC_VERSION JSON_HEDLEY_GNUC_VERSION +#endif + +#if defined(JSON_HEDLEY_GCC_VERSION_CHECK) + #undef JSON_HEDLEY_GCC_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_GCC_VERSION) + #define JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_GCC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_HAS_ATTRIBUTE) + #undef JSON_HEDLEY_HAS_ATTRIBUTE +#endif +#if defined(__has_attribute) + #define JSON_HEDLEY_HAS_ATTRIBUTE(attribute) __has_attribute(attribute) +#else + #define JSON_HEDLEY_HAS_ATTRIBUTE(attribute) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_ATTRIBUTE) + #undef JSON_HEDLEY_GNUC_HAS_ATTRIBUTE +#endif +#if defined(__has_attribute) + #define JSON_HEDLEY_GNUC_HAS_ATTRIBUTE(attribute,major,minor,patch) __has_attribute(attribute) +#else + #define JSON_HEDLEY_GNUC_HAS_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_ATTRIBUTE) + #undef JSON_HEDLEY_GCC_HAS_ATTRIBUTE +#endif +#if defined(__has_attribute) + #define JSON_HEDLEY_GCC_HAS_ATTRIBUTE(attribute,major,minor,patch) __has_attribute(attribute) +#else + #define JSON_HEDLEY_GCC_HAS_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_CPP_ATTRIBUTE) + #undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE +#endif +#if \ + defined(__has_cpp_attribute) && \ + defined(__cplusplus) && \ + (!defined(JSON_HEDLEY_SUNPRO_VERSION) || JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0)) + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE(attribute) __has_cpp_attribute(attribute) +#else + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE(attribute) (0) +#endif + +#if defined(JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS) + #undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS +#endif +#if !defined(__cplusplus) || !defined(__has_cpp_attribute) + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(ns,attribute) (0) +#elif \ + !defined(JSON_HEDLEY_PGI_VERSION) && \ + !defined(JSON_HEDLEY_IAR_VERSION) && \ + (!defined(JSON_HEDLEY_SUNPRO_VERSION) || JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0)) && \ + (!defined(JSON_HEDLEY_MSVC_VERSION) || JSON_HEDLEY_MSVC_VERSION_CHECK(19,20,0)) + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(ns,attribute) JSON_HEDLEY_HAS_CPP_ATTRIBUTE(ns::attribute) +#else + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(ns,attribute) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE) + #undef JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE +#endif +#if defined(__has_cpp_attribute) && defined(__cplusplus) + #define JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) __has_cpp_attribute(attribute) +#else + #define JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE) + #undef JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE +#endif +#if defined(__has_cpp_attribute) && defined(__cplusplus) + #define JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) __has_cpp_attribute(attribute) +#else + #define JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_BUILTIN) + #undef JSON_HEDLEY_HAS_BUILTIN +#endif +#if defined(__has_builtin) + #define JSON_HEDLEY_HAS_BUILTIN(builtin) __has_builtin(builtin) +#else + #define JSON_HEDLEY_HAS_BUILTIN(builtin) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_BUILTIN) + #undef JSON_HEDLEY_GNUC_HAS_BUILTIN +#endif +#if defined(__has_builtin) + #define JSON_HEDLEY_GNUC_HAS_BUILTIN(builtin,major,minor,patch) __has_builtin(builtin) +#else + #define JSON_HEDLEY_GNUC_HAS_BUILTIN(builtin,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_BUILTIN) + #undef JSON_HEDLEY_GCC_HAS_BUILTIN +#endif +#if defined(__has_builtin) + #define JSON_HEDLEY_GCC_HAS_BUILTIN(builtin,major,minor,patch) __has_builtin(builtin) +#else + #define JSON_HEDLEY_GCC_HAS_BUILTIN(builtin,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_FEATURE) + #undef JSON_HEDLEY_HAS_FEATURE +#endif +#if defined(__has_feature) + #define JSON_HEDLEY_HAS_FEATURE(feature) __has_feature(feature) +#else + #define JSON_HEDLEY_HAS_FEATURE(feature) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_FEATURE) + #undef JSON_HEDLEY_GNUC_HAS_FEATURE +#endif +#if defined(__has_feature) + #define JSON_HEDLEY_GNUC_HAS_FEATURE(feature,major,minor,patch) __has_feature(feature) +#else + #define JSON_HEDLEY_GNUC_HAS_FEATURE(feature,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_FEATURE) + #undef JSON_HEDLEY_GCC_HAS_FEATURE +#endif +#if defined(__has_feature) + #define JSON_HEDLEY_GCC_HAS_FEATURE(feature,major,minor,patch) __has_feature(feature) +#else + #define JSON_HEDLEY_GCC_HAS_FEATURE(feature,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_EXTENSION) + #undef JSON_HEDLEY_HAS_EXTENSION +#endif +#if defined(__has_extension) + #define JSON_HEDLEY_HAS_EXTENSION(extension) __has_extension(extension) +#else + #define JSON_HEDLEY_HAS_EXTENSION(extension) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_EXTENSION) + #undef JSON_HEDLEY_GNUC_HAS_EXTENSION +#endif +#if defined(__has_extension) + #define JSON_HEDLEY_GNUC_HAS_EXTENSION(extension,major,minor,patch) __has_extension(extension) +#else + #define JSON_HEDLEY_GNUC_HAS_EXTENSION(extension,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_EXTENSION) + #undef JSON_HEDLEY_GCC_HAS_EXTENSION +#endif +#if defined(__has_extension) + #define JSON_HEDLEY_GCC_HAS_EXTENSION(extension,major,minor,patch) __has_extension(extension) +#else + #define JSON_HEDLEY_GCC_HAS_EXTENSION(extension,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE) + #undef JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE +#endif +#if defined(__has_declspec_attribute) + #define JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE(attribute) __has_declspec_attribute(attribute) +#else + #define JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE(attribute) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE) + #undef JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE +#endif +#if defined(__has_declspec_attribute) + #define JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) __has_declspec_attribute(attribute) +#else + #define JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE) + #undef JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE +#endif +#if defined(__has_declspec_attribute) + #define JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) __has_declspec_attribute(attribute) +#else + #define JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_WARNING) + #undef JSON_HEDLEY_HAS_WARNING +#endif +#if defined(__has_warning) + #define JSON_HEDLEY_HAS_WARNING(warning) __has_warning(warning) +#else + #define JSON_HEDLEY_HAS_WARNING(warning) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_WARNING) + #undef JSON_HEDLEY_GNUC_HAS_WARNING +#endif +#if defined(__has_warning) + #define JSON_HEDLEY_GNUC_HAS_WARNING(warning,major,minor,patch) __has_warning(warning) +#else + #define JSON_HEDLEY_GNUC_HAS_WARNING(warning,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_WARNING) + #undef JSON_HEDLEY_GCC_HAS_WARNING +#endif +#if defined(__has_warning) + #define JSON_HEDLEY_GCC_HAS_WARNING(warning,major,minor,patch) __has_warning(warning) +#else + #define JSON_HEDLEY_GCC_HAS_WARNING(warning,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +/* JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_ is for + HEDLEY INTERNAL USE ONLY. API subject to change without notice. */ +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_ +#endif +#if defined(__cplusplus) +# if JSON_HEDLEY_HAS_WARNING("-Wc++98-compat") +# if JSON_HEDLEY_HAS_WARNING("-Wc++17-extensions") +# define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(xpr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wc++98-compat\"") \ + _Pragma("clang diagnostic ignored \"-Wc++17-extensions\"") \ + xpr \ + JSON_HEDLEY_DIAGNOSTIC_POP +# else +# define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(xpr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wc++98-compat\"") \ + xpr \ + JSON_HEDLEY_DIAGNOSTIC_POP +# endif +# endif +#endif +#if !defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(x) x +#endif + +#if defined(JSON_HEDLEY_CONST_CAST) + #undef JSON_HEDLEY_CONST_CAST +#endif +#if defined(__cplusplus) +# define JSON_HEDLEY_CONST_CAST(T, expr) (const_cast(expr)) +#elif \ + JSON_HEDLEY_HAS_WARNING("-Wcast-qual") || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,6,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) +# define JSON_HEDLEY_CONST_CAST(T, expr) (__extension__ ({ \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL \ + ((T) (expr)); \ + JSON_HEDLEY_DIAGNOSTIC_POP \ + })) +#else +# define JSON_HEDLEY_CONST_CAST(T, expr) ((T) (expr)) +#endif + +#if defined(JSON_HEDLEY_REINTERPRET_CAST) + #undef JSON_HEDLEY_REINTERPRET_CAST +#endif +#if defined(__cplusplus) + #define JSON_HEDLEY_REINTERPRET_CAST(T, expr) (reinterpret_cast(expr)) +#else + #define JSON_HEDLEY_REINTERPRET_CAST(T, expr) ((T) (expr)) +#endif + +#if defined(JSON_HEDLEY_STATIC_CAST) + #undef JSON_HEDLEY_STATIC_CAST +#endif +#if defined(__cplusplus) + #define JSON_HEDLEY_STATIC_CAST(T, expr) (static_cast(expr)) +#else + #define JSON_HEDLEY_STATIC_CAST(T, expr) ((T) (expr)) +#endif + +#if defined(JSON_HEDLEY_CPP_CAST) + #undef JSON_HEDLEY_CPP_CAST +#endif +#if defined(__cplusplus) +# if JSON_HEDLEY_HAS_WARNING("-Wold-style-cast") +# define JSON_HEDLEY_CPP_CAST(T, expr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wold-style-cast\"") \ + ((T) (expr)) \ + JSON_HEDLEY_DIAGNOSTIC_POP +# elif JSON_HEDLEY_IAR_VERSION_CHECK(8,3,0) +# define JSON_HEDLEY_CPP_CAST(T, expr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("diag_suppress=Pe137") \ + JSON_HEDLEY_DIAGNOSTIC_POP \ +# else +# define JSON_HEDLEY_CPP_CAST(T, expr) ((T) (expr)) +# endif +#else +# define JSON_HEDLEY_CPP_CAST(T, expr) (expr) +#endif + +#if \ + (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)) || \ + defined(__clang__) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(18,4,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,7,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(2,0,1) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,0,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_CRAY_VERSION_CHECK(5,0,0) || \ + JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,17) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(8,0,0) || \ + (JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) && defined(__C99_PRAGMA_OPERATOR)) + #define JSON_HEDLEY_PRAGMA(value) _Pragma(#value) +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) + #define JSON_HEDLEY_PRAGMA(value) __pragma(value) +#else + #define JSON_HEDLEY_PRAGMA(value) +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_PUSH) + #undef JSON_HEDLEY_DIAGNOSTIC_PUSH +#endif +#if defined(JSON_HEDLEY_DIAGNOSTIC_POP) + #undef JSON_HEDLEY_DIAGNOSTIC_POP +#endif +#if defined(__clang__) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("clang diagnostic push") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("clang diagnostic pop") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("warning(push)") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("warning(pop)") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,6,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("GCC diagnostic push") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("GCC diagnostic pop") +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH __pragma(warning(push)) + #define JSON_HEDLEY_DIAGNOSTIC_POP __pragma(warning(pop)) +#elif JSON_HEDLEY_ARM_VERSION_CHECK(5,6,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("push") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("pop") +#elif \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,4,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,1,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("diag_push") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("diag_pop") +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(2,90,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("warning(push)") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("warning(pop)") +#else + #define JSON_HEDLEY_DIAGNOSTIC_PUSH + #define JSON_HEDLEY_DIAGNOSTIC_POP +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wdeprecated-declarations") + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("clang diagnostic ignored \"-Wdeprecated-declarations\"") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("warning(disable:1478 1786)") +#elif JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress 1215,1444") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,3,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"") +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED __pragma(warning(disable:4996)) +#elif \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress 1291,1718") +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,13,0) && !defined(__cplusplus) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("error_messages(off,E_DEPRECATED_ATT,E_DEPRECATED_ATT_MESS)") +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,13,0) && defined(__cplusplus) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("error_messages(off,symdeprecated,symdeprecated2)") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress=Pe1444,Pe1215") +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(2,90,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("warn(disable:2241)") +#else + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wunknown-pragmas") + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("clang diagnostic ignored \"-Wunknown-pragmas\"") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("warning(disable:161)") +#elif JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress 1675") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,3,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("GCC diagnostic ignored \"-Wunknown-pragmas\"") +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS __pragma(warning(disable:4068)) +#elif \ + JSON_HEDLEY_TI_VERSION_CHECK(16,9,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,0,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,3,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress 163") +#elif JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress 163") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress=Pe161") +#else + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wunknown-attributes") + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("clang diagnostic ignored \"-Wunknown-attributes\"") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,6,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(17,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("warning(disable:1292)") +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(19,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES __pragma(warning(disable:5030)) +#elif JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1097") +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,14,0) && defined(__cplusplus) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("error_messages(off,attrskipunsup)") +#elif \ + JSON_HEDLEY_TI_VERSION_CHECK(18,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,3,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1173") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress=Pe1097") +#else + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wcast-qual") + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL _Pragma("clang diagnostic ignored \"-Wcast-qual\"") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL _Pragma("warning(disable:2203 2331)") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(3,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL _Pragma("GCC diagnostic ignored \"-Wcast-qual\"") +#else + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL +#endif + +#if defined(JSON_HEDLEY_DEPRECATED) + #undef JSON_HEDLEY_DEPRECATED +#endif +#if defined(JSON_HEDLEY_DEPRECATED_FOR) + #undef JSON_HEDLEY_DEPRECATED_FOR +#endif +#if JSON_HEDLEY_MSVC_VERSION_CHECK(14,0,0) + #define JSON_HEDLEY_DEPRECATED(since) __declspec(deprecated("Since " # since)) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __declspec(deprecated("Since " #since "; use " #replacement)) +#elif defined(__cplusplus) && (__cplusplus >= 201402L) + #define JSON_HEDLEY_DEPRECATED(since) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[deprecated("Since " #since)]]) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[deprecated("Since " #since "; use " #replacement)]]) +#elif \ + JSON_HEDLEY_HAS_EXTENSION(attribute_deprecated_with_message) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,5,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,6,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,13,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(18,1,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(18,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,3,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,3,0) + #define JSON_HEDLEY_DEPRECATED(since) __attribute__((__deprecated__("Since " #since))) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __attribute__((__deprecated__("Since " #since "; use " #replacement))) +#elif \ + JSON_HEDLEY_HAS_ATTRIBUTE(deprecated) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) + #define JSON_HEDLEY_DEPRECATED(since) __attribute__((__deprecated__)) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __attribute__((__deprecated__)) +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) || \ + JSON_HEDLEY_PELLES_VERSION_CHECK(6,50,0) + #define JSON_HEDLEY_DEPRECATED(since) __declspec(deprecated) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __declspec(deprecated) +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DEPRECATED(since) _Pragma("deprecated") + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) _Pragma("deprecated") +#else + #define JSON_HEDLEY_DEPRECATED(since) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) +#endif + +#if defined(JSON_HEDLEY_UNAVAILABLE) + #undef JSON_HEDLEY_UNAVAILABLE +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(warning) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,3,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_UNAVAILABLE(available_since) __attribute__((__warning__("Not available until " #available_since))) +#else + #define JSON_HEDLEY_UNAVAILABLE(available_since) +#endif + +#if defined(JSON_HEDLEY_WARN_UNUSED_RESULT) + #undef JSON_HEDLEY_WARN_UNUSED_RESULT +#endif +#if defined(JSON_HEDLEY_WARN_UNUSED_RESULT_MSG) + #undef JSON_HEDLEY_WARN_UNUSED_RESULT_MSG +#endif +#if (JSON_HEDLEY_HAS_CPP_ATTRIBUTE(nodiscard) >= 201907L) + #define JSON_HEDLEY_WARN_UNUSED_RESULT JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard]]) + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard(msg)]]) +#elif JSON_HEDLEY_HAS_CPP_ATTRIBUTE(nodiscard) + #define JSON_HEDLEY_WARN_UNUSED_RESULT JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard]]) + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard]]) +#elif \ + JSON_HEDLEY_HAS_ATTRIBUTE(warn_unused_result) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0) && defined(__cplusplus)) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) + #define JSON_HEDLEY_WARN_UNUSED_RESULT __attribute__((__warn_unused_result__)) + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) __attribute__((__warn_unused_result__)) +#elif defined(_Check_return_) /* SAL */ + #define JSON_HEDLEY_WARN_UNUSED_RESULT _Check_return_ + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) _Check_return_ +#else + #define JSON_HEDLEY_WARN_UNUSED_RESULT + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) +#endif + +#if defined(JSON_HEDLEY_SENTINEL) + #undef JSON_HEDLEY_SENTINEL +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(sentinel) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,4,0) + #define JSON_HEDLEY_SENTINEL(position) __attribute__((__sentinel__(position))) +#else + #define JSON_HEDLEY_SENTINEL(position) +#endif + +#if defined(JSON_HEDLEY_NO_RETURN) + #undef JSON_HEDLEY_NO_RETURN +#endif +#if JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_NO_RETURN __noreturn +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_NO_RETURN __attribute__((__noreturn__)) +#elif defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L + #define JSON_HEDLEY_NO_RETURN _Noreturn +#elif defined(__cplusplus) && (__cplusplus >= 201103L) + #define JSON_HEDLEY_NO_RETURN JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[noreturn]]) +#elif \ + JSON_HEDLEY_HAS_ATTRIBUTE(noreturn) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,2,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) + #define JSON_HEDLEY_NO_RETURN __attribute__((__noreturn__)) +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) + #define JSON_HEDLEY_NO_RETURN _Pragma("does_not_return") +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) + #define JSON_HEDLEY_NO_RETURN __declspec(noreturn) +#elif JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,0,0) && defined(__cplusplus) + #define JSON_HEDLEY_NO_RETURN _Pragma("FUNC_NEVER_RETURNS;") +#elif JSON_HEDLEY_COMPCERT_VERSION_CHECK(3,2,0) + #define JSON_HEDLEY_NO_RETURN __attribute((noreturn)) +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(9,0,0) + #define JSON_HEDLEY_NO_RETURN __declspec(noreturn) +#else + #define JSON_HEDLEY_NO_RETURN +#endif + +#if defined(JSON_HEDLEY_NO_ESCAPE) + #undef JSON_HEDLEY_NO_ESCAPE +#endif +#if JSON_HEDLEY_HAS_ATTRIBUTE(noescape) + #define JSON_HEDLEY_NO_ESCAPE __attribute__((__noescape__)) +#else + #define JSON_HEDLEY_NO_ESCAPE +#endif + +#if defined(JSON_HEDLEY_UNREACHABLE) + #undef JSON_HEDLEY_UNREACHABLE +#endif +#if defined(JSON_HEDLEY_UNREACHABLE_RETURN) + #undef JSON_HEDLEY_UNREACHABLE_RETURN +#endif +#if defined(JSON_HEDLEY_ASSUME) + #undef JSON_HEDLEY_ASSUME +#endif +#if \ + JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_ASSUME(expr) __assume(expr) +#elif JSON_HEDLEY_HAS_BUILTIN(__builtin_assume) + #define JSON_HEDLEY_ASSUME(expr) __builtin_assume(expr) +#elif \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(4,0,0) + #if defined(__cplusplus) + #define JSON_HEDLEY_ASSUME(expr) std::_nassert(expr) + #else + #define JSON_HEDLEY_ASSUME(expr) _nassert(expr) + #endif +#endif +#if \ + (JSON_HEDLEY_HAS_BUILTIN(__builtin_unreachable) && (!defined(JSON_HEDLEY_ARM_VERSION))) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,5,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(18,10,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(13,1,5) + #define JSON_HEDLEY_UNREACHABLE() __builtin_unreachable() +#elif defined(JSON_HEDLEY_ASSUME) + #define JSON_HEDLEY_UNREACHABLE() JSON_HEDLEY_ASSUME(0) +#endif +#if !defined(JSON_HEDLEY_ASSUME) + #if defined(JSON_HEDLEY_UNREACHABLE) + #define JSON_HEDLEY_ASSUME(expr) JSON_HEDLEY_STATIC_CAST(void, ((expr) ? 1 : (JSON_HEDLEY_UNREACHABLE(), 1))) + #else + #define JSON_HEDLEY_ASSUME(expr) JSON_HEDLEY_STATIC_CAST(void, expr) + #endif +#endif +#if defined(JSON_HEDLEY_UNREACHABLE) + #if \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(4,0,0) + #define JSON_HEDLEY_UNREACHABLE_RETURN(value) return (JSON_HEDLEY_STATIC_CAST(void, JSON_HEDLEY_ASSUME(0)), (value)) + #else + #define JSON_HEDLEY_UNREACHABLE_RETURN(value) JSON_HEDLEY_UNREACHABLE() + #endif +#else + #define JSON_HEDLEY_UNREACHABLE_RETURN(value) return (value) +#endif +#if !defined(JSON_HEDLEY_UNREACHABLE) + #define JSON_HEDLEY_UNREACHABLE() JSON_HEDLEY_ASSUME(0) +#endif + +JSON_HEDLEY_DIAGNOSTIC_PUSH +#if JSON_HEDLEY_HAS_WARNING("-Wpedantic") + #pragma clang diagnostic ignored "-Wpedantic" +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wc++98-compat-pedantic") && defined(__cplusplus) + #pragma clang diagnostic ignored "-Wc++98-compat-pedantic" +#endif +#if JSON_HEDLEY_GCC_HAS_WARNING("-Wvariadic-macros",4,0,0) + #if defined(__clang__) + #pragma clang diagnostic ignored "-Wvariadic-macros" + #elif defined(JSON_HEDLEY_GCC_VERSION) + #pragma GCC diagnostic ignored "-Wvariadic-macros" + #endif +#endif +#if defined(JSON_HEDLEY_NON_NULL) + #undef JSON_HEDLEY_NON_NULL +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(nonnull) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,3,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) + #define JSON_HEDLEY_NON_NULL(...) __attribute__((__nonnull__(__VA_ARGS__))) +#else + #define JSON_HEDLEY_NON_NULL(...) +#endif +JSON_HEDLEY_DIAGNOSTIC_POP + +#if defined(JSON_HEDLEY_PRINTF_FORMAT) + #undef JSON_HEDLEY_PRINTF_FORMAT +#endif +#if defined(__MINGW32__) && JSON_HEDLEY_GCC_HAS_ATTRIBUTE(format,4,4,0) && !defined(__USE_MINGW_ANSI_STDIO) + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __attribute__((__format__(ms_printf, string_idx, first_to_check))) +#elif defined(__MINGW32__) && JSON_HEDLEY_GCC_HAS_ATTRIBUTE(format,4,4,0) && defined(__USE_MINGW_ANSI_STDIO) + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __attribute__((__format__(gnu_printf, string_idx, first_to_check))) +#elif \ + JSON_HEDLEY_HAS_ATTRIBUTE(format) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,6,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __attribute__((__format__(__printf__, string_idx, first_to_check))) +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(6,0,0) + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __declspec(vaformat(printf,string_idx,first_to_check)) +#else + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) +#endif + +#if defined(JSON_HEDLEY_CONSTEXPR) + #undef JSON_HEDLEY_CONSTEXPR +#endif +#if defined(__cplusplus) + #if __cplusplus >= 201103L + #define JSON_HEDLEY_CONSTEXPR JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(constexpr) + #endif +#endif +#if !defined(JSON_HEDLEY_CONSTEXPR) + #define JSON_HEDLEY_CONSTEXPR +#endif + +#if defined(JSON_HEDLEY_PREDICT) + #undef JSON_HEDLEY_PREDICT +#endif +#if defined(JSON_HEDLEY_LIKELY) + #undef JSON_HEDLEY_LIKELY +#endif +#if defined(JSON_HEDLEY_UNLIKELY) + #undef JSON_HEDLEY_UNLIKELY +#endif +#if defined(JSON_HEDLEY_UNPREDICTABLE) + #undef JSON_HEDLEY_UNPREDICTABLE +#endif +#if JSON_HEDLEY_HAS_BUILTIN(__builtin_unpredictable) + #define JSON_HEDLEY_UNPREDICTABLE(expr) __builtin_unpredictable((expr)) +#endif +#if \ + JSON_HEDLEY_HAS_BUILTIN(__builtin_expect_with_probability) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(9,0,0) +# define JSON_HEDLEY_PREDICT(expr, value, probability) __builtin_expect_with_probability( (expr), (value), (probability)) +# define JSON_HEDLEY_PREDICT_TRUE(expr, probability) __builtin_expect_with_probability(!!(expr), 1 , (probability)) +# define JSON_HEDLEY_PREDICT_FALSE(expr, probability) __builtin_expect_with_probability(!!(expr), 0 , (probability)) +# define JSON_HEDLEY_LIKELY(expr) __builtin_expect (!!(expr), 1 ) +# define JSON_HEDLEY_UNLIKELY(expr) __builtin_expect (!!(expr), 0 ) +#elif \ + JSON_HEDLEY_HAS_BUILTIN(__builtin_expect) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0) && defined(__cplusplus)) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,7,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,1,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,27) || \ + JSON_HEDLEY_CRAY_VERSION_CHECK(8,1,0) +# define JSON_HEDLEY_PREDICT(expr, expected, probability) \ + (((probability) >= 0.9) ? __builtin_expect((expr), (expected)) : (JSON_HEDLEY_STATIC_CAST(void, expected), (expr))) +# define JSON_HEDLEY_PREDICT_TRUE(expr, probability) \ + (__extension__ ({ \ + double hedley_probability_ = (probability); \ + ((hedley_probability_ >= 0.9) ? __builtin_expect(!!(expr), 1) : ((hedley_probability_ <= 0.1) ? __builtin_expect(!!(expr), 0) : !!(expr))); \ + })) +# define JSON_HEDLEY_PREDICT_FALSE(expr, probability) \ + (__extension__ ({ \ + double hedley_probability_ = (probability); \ + ((hedley_probability_ >= 0.9) ? __builtin_expect(!!(expr), 0) : ((hedley_probability_ <= 0.1) ? __builtin_expect(!!(expr), 1) : !!(expr))); \ + })) +# define JSON_HEDLEY_LIKELY(expr) __builtin_expect(!!(expr), 1) +# define JSON_HEDLEY_UNLIKELY(expr) __builtin_expect(!!(expr), 0) +#else +# define JSON_HEDLEY_PREDICT(expr, expected, probability) (JSON_HEDLEY_STATIC_CAST(void, expected), (expr)) +# define JSON_HEDLEY_PREDICT_TRUE(expr, probability) (!!(expr)) +# define JSON_HEDLEY_PREDICT_FALSE(expr, probability) (!!(expr)) +# define JSON_HEDLEY_LIKELY(expr) (!!(expr)) +# define JSON_HEDLEY_UNLIKELY(expr) (!!(expr)) +#endif +#if !defined(JSON_HEDLEY_UNPREDICTABLE) + #define JSON_HEDLEY_UNPREDICTABLE(expr) JSON_HEDLEY_PREDICT(expr, 1, 0.5) +#endif + +#if defined(JSON_HEDLEY_MALLOC) + #undef JSON_HEDLEY_MALLOC +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(malloc) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(12,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) + #define JSON_HEDLEY_MALLOC __attribute__((__malloc__)) +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) + #define JSON_HEDLEY_MALLOC _Pragma("returns_new_memory") +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(14, 0, 0) + #define JSON_HEDLEY_MALLOC __declspec(restrict) +#else + #define JSON_HEDLEY_MALLOC +#endif + +#if defined(JSON_HEDLEY_PURE) + #undef JSON_HEDLEY_PURE +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(pure) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(2,96,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) +# define JSON_HEDLEY_PURE __attribute__((__pure__)) +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) +# define JSON_HEDLEY_PURE _Pragma("does_not_write_global_data") +#elif defined(__cplusplus) && \ + ( \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(2,0,1) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(4,0,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) \ + ) +# define JSON_HEDLEY_PURE _Pragma("FUNC_IS_PURE;") +#else +# define JSON_HEDLEY_PURE +#endif + +#if defined(JSON_HEDLEY_CONST) + #undef JSON_HEDLEY_CONST +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(const) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(2,5,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) + #define JSON_HEDLEY_CONST __attribute__((__const__)) +#elif \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) + #define JSON_HEDLEY_CONST _Pragma("no_side_effect") +#else + #define JSON_HEDLEY_CONST JSON_HEDLEY_PURE +#endif + +#if defined(JSON_HEDLEY_RESTRICT) + #undef JSON_HEDLEY_RESTRICT +#endif +#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) && !defined(__cplusplus) + #define JSON_HEDLEY_RESTRICT restrict +#elif \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_MSVC_VERSION_CHECK(14,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,4) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,1,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,14,0) && defined(__cplusplus)) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) || \ + defined(__clang__) + #define JSON_HEDLEY_RESTRICT __restrict +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,3,0) && !defined(__cplusplus) + #define JSON_HEDLEY_RESTRICT _Restrict +#else + #define JSON_HEDLEY_RESTRICT +#endif + +#if defined(JSON_HEDLEY_INLINE) + #undef JSON_HEDLEY_INLINE +#endif +#if \ + (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)) || \ + (defined(__cplusplus) && (__cplusplus >= 199711L)) + #define JSON_HEDLEY_INLINE inline +#elif \ + defined(JSON_HEDLEY_GCC_VERSION) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(6,2,0) + #define JSON_HEDLEY_INLINE __inline__ +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(12,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,1,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,0,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) + #define JSON_HEDLEY_INLINE __inline +#else + #define JSON_HEDLEY_INLINE +#endif + +#if defined(JSON_HEDLEY_ALWAYS_INLINE) + #undef JSON_HEDLEY_ALWAYS_INLINE +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(always_inline) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) +# define JSON_HEDLEY_ALWAYS_INLINE __attribute__((__always_inline__)) JSON_HEDLEY_INLINE +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(12,0,0) +# define JSON_HEDLEY_ALWAYS_INLINE __forceinline +#elif defined(__cplusplus) && \ + ( \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,1,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) \ + ) +# define JSON_HEDLEY_ALWAYS_INLINE _Pragma("FUNC_ALWAYS_INLINE;") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) +# define JSON_HEDLEY_ALWAYS_INLINE _Pragma("inline=forced") +#else +# define JSON_HEDLEY_ALWAYS_INLINE JSON_HEDLEY_INLINE +#endif + +#if defined(JSON_HEDLEY_NEVER_INLINE) + #undef JSON_HEDLEY_NEVER_INLINE +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(noinline) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) + #define JSON_HEDLEY_NEVER_INLINE __attribute__((__noinline__)) +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) + #define JSON_HEDLEY_NEVER_INLINE __declspec(noinline) +#elif JSON_HEDLEY_PGI_VERSION_CHECK(10,2,0) + #define JSON_HEDLEY_NEVER_INLINE _Pragma("noinline") +#elif JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,0,0) && defined(__cplusplus) + #define JSON_HEDLEY_NEVER_INLINE _Pragma("FUNC_CANNOT_INLINE;") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_NEVER_INLINE _Pragma("inline=never") +#elif JSON_HEDLEY_COMPCERT_VERSION_CHECK(3,2,0) + #define JSON_HEDLEY_NEVER_INLINE __attribute((noinline)) +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(9,0,0) + #define JSON_HEDLEY_NEVER_INLINE __declspec(noinline) +#else + #define JSON_HEDLEY_NEVER_INLINE +#endif + +#if defined(JSON_HEDLEY_PRIVATE) + #undef JSON_HEDLEY_PRIVATE +#endif +#if defined(JSON_HEDLEY_PUBLIC) + #undef JSON_HEDLEY_PUBLIC +#endif +#if defined(JSON_HEDLEY_IMPORT) + #undef JSON_HEDLEY_IMPORT +#endif +#if defined(_WIN32) || defined(__CYGWIN__) +# define JSON_HEDLEY_PRIVATE +# define JSON_HEDLEY_PUBLIC __declspec(dllexport) +# define JSON_HEDLEY_IMPORT __declspec(dllimport) +#else +# if \ + JSON_HEDLEY_HAS_ATTRIBUTE(visibility) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,3,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(13,1,0) || \ + ( \ + defined(__TI_EABI__) && \ + ( \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) \ + ) \ + ) +# define JSON_HEDLEY_PRIVATE __attribute__((__visibility__("hidden"))) +# define JSON_HEDLEY_PUBLIC __attribute__((__visibility__("default"))) +# else +# define JSON_HEDLEY_PRIVATE +# define JSON_HEDLEY_PUBLIC +# endif +# define JSON_HEDLEY_IMPORT extern +#endif + +#if defined(JSON_HEDLEY_NO_THROW) + #undef JSON_HEDLEY_NO_THROW +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(nothrow) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,3,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_NO_THROW __attribute__((__nothrow__)) +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(13,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) + #define JSON_HEDLEY_NO_THROW __declspec(nothrow) +#else + #define JSON_HEDLEY_NO_THROW +#endif + +#if defined(JSON_HEDLEY_FALL_THROUGH) + #undef JSON_HEDLEY_FALL_THROUGH +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(fallthrough) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(7,0,0) + #define JSON_HEDLEY_FALL_THROUGH __attribute__((__fallthrough__)) +#elif JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(clang,fallthrough) + #define JSON_HEDLEY_FALL_THROUGH JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[clang::fallthrough]]) +#elif JSON_HEDLEY_HAS_CPP_ATTRIBUTE(fallthrough) + #define JSON_HEDLEY_FALL_THROUGH JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[fallthrough]]) +#elif defined(__fallthrough) /* SAL */ + #define JSON_HEDLEY_FALL_THROUGH __fallthrough +#else + #define JSON_HEDLEY_FALL_THROUGH +#endif + +#if defined(JSON_HEDLEY_RETURNS_NON_NULL) + #undef JSON_HEDLEY_RETURNS_NON_NULL +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(returns_nonnull) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,9,0) + #define JSON_HEDLEY_RETURNS_NON_NULL __attribute__((__returns_nonnull__)) +#elif defined(_Ret_notnull_) /* SAL */ + #define JSON_HEDLEY_RETURNS_NON_NULL _Ret_notnull_ +#else + #define JSON_HEDLEY_RETURNS_NON_NULL +#endif + +#if defined(JSON_HEDLEY_ARRAY_PARAM) + #undef JSON_HEDLEY_ARRAY_PARAM +#endif +#if \ + defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) && \ + !defined(__STDC_NO_VLA__) && \ + !defined(__cplusplus) && \ + !defined(JSON_HEDLEY_PGI_VERSION) && \ + !defined(JSON_HEDLEY_TINYC_VERSION) + #define JSON_HEDLEY_ARRAY_PARAM(name) (name) +#else + #define JSON_HEDLEY_ARRAY_PARAM(name) +#endif + +#if defined(JSON_HEDLEY_IS_CONSTANT) + #undef JSON_HEDLEY_IS_CONSTANT +#endif +#if defined(JSON_HEDLEY_REQUIRE_CONSTEXPR) + #undef JSON_HEDLEY_REQUIRE_CONSTEXPR +#endif +/* JSON_HEDLEY_IS_CONSTEXPR_ is for + HEDLEY INTERNAL USE ONLY. API subject to change without notice. */ +#if defined(JSON_HEDLEY_IS_CONSTEXPR_) + #undef JSON_HEDLEY_IS_CONSTEXPR_ +#endif +#if \ + JSON_HEDLEY_HAS_BUILTIN(__builtin_constant_p) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,19) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(13,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,1,0) || \ + (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) && !defined(__cplusplus)) || \ + JSON_HEDLEY_CRAY_VERSION_CHECK(8,1,0) + #define JSON_HEDLEY_IS_CONSTANT(expr) __builtin_constant_p(expr) +#endif +#if !defined(__cplusplus) +# if \ + JSON_HEDLEY_HAS_BUILTIN(__builtin_types_compatible_p) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(13,1,0) || \ + JSON_HEDLEY_CRAY_VERSION_CHECK(8,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,4,0) || \ + JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,24) +#if defined(__INTPTR_TYPE__) + #define JSON_HEDLEY_IS_CONSTEXPR_(expr) __builtin_types_compatible_p(__typeof__((1 ? (void*) ((__INTPTR_TYPE__) ((expr) * 0)) : (int*) 0)), int*) +#else + #include + #define JSON_HEDLEY_IS_CONSTEXPR_(expr) __builtin_types_compatible_p(__typeof__((1 ? (void*) ((intptr_t) ((expr) * 0)) : (int*) 0)), int*) +#endif +# elif \ + ( \ + defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L) && \ + !defined(JSON_HEDLEY_SUNPRO_VERSION) && \ + !defined(JSON_HEDLEY_PGI_VERSION) && \ + !defined(JSON_HEDLEY_IAR_VERSION)) || \ + JSON_HEDLEY_HAS_EXTENSION(c_generic_selections) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,9,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(17,0,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(12,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,3,0) +#if defined(__INTPTR_TYPE__) + #define JSON_HEDLEY_IS_CONSTEXPR_(expr) _Generic((1 ? (void*) ((__INTPTR_TYPE__) ((expr) * 0)) : (int*) 0), int*: 1, void*: 0) +#else + #include + #define JSON_HEDLEY_IS_CONSTEXPR_(expr) _Generic((1 ? (void*) ((intptr_t) * 0) : (int*) 0), int*: 1, void*: 0) +#endif +# elif \ + defined(JSON_HEDLEY_GCC_VERSION) || \ + defined(JSON_HEDLEY_INTEL_VERSION) || \ + defined(JSON_HEDLEY_TINYC_VERSION) || \ + defined(JSON_HEDLEY_TI_ARMCL_VERSION) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(18,12,0) || \ + defined(JSON_HEDLEY_TI_CL2000_VERSION) || \ + defined(JSON_HEDLEY_TI_CL6X_VERSION) || \ + defined(JSON_HEDLEY_TI_CL7X_VERSION) || \ + defined(JSON_HEDLEY_TI_CLPRU_VERSION) || \ + defined(__clang__) +# define JSON_HEDLEY_IS_CONSTEXPR_(expr) ( \ + sizeof(void) != \ + sizeof(*( \ + 1 ? \ + ((void*) ((expr) * 0L) ) : \ +((struct { char v[sizeof(void) * 2]; } *) 1) \ + ) \ + ) \ + ) +# endif +#endif +#if defined(JSON_HEDLEY_IS_CONSTEXPR_) + #if !defined(JSON_HEDLEY_IS_CONSTANT) + #define JSON_HEDLEY_IS_CONSTANT(expr) JSON_HEDLEY_IS_CONSTEXPR_(expr) + #endif + #define JSON_HEDLEY_REQUIRE_CONSTEXPR(expr) (JSON_HEDLEY_IS_CONSTEXPR_(expr) ? (expr) : (-1)) +#else + #if !defined(JSON_HEDLEY_IS_CONSTANT) + #define JSON_HEDLEY_IS_CONSTANT(expr) (0) + #endif + #define JSON_HEDLEY_REQUIRE_CONSTEXPR(expr) (expr) +#endif + +#if defined(JSON_HEDLEY_BEGIN_C_DECLS) + #undef JSON_HEDLEY_BEGIN_C_DECLS +#endif +#if defined(JSON_HEDLEY_END_C_DECLS) + #undef JSON_HEDLEY_END_C_DECLS +#endif +#if defined(JSON_HEDLEY_C_DECL) + #undef JSON_HEDLEY_C_DECL +#endif +#if defined(__cplusplus) + #define JSON_HEDLEY_BEGIN_C_DECLS extern "C" { + #define JSON_HEDLEY_END_C_DECLS } + #define JSON_HEDLEY_C_DECL extern "C" +#else + #define JSON_HEDLEY_BEGIN_C_DECLS + #define JSON_HEDLEY_END_C_DECLS + #define JSON_HEDLEY_C_DECL +#endif + +#if defined(JSON_HEDLEY_STATIC_ASSERT) + #undef JSON_HEDLEY_STATIC_ASSERT +#endif +#if \ + !defined(__cplusplus) && ( \ + (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)) || \ + JSON_HEDLEY_HAS_FEATURE(c_static_assert) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(6,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + defined(_Static_assert) \ + ) +# define JSON_HEDLEY_STATIC_ASSERT(expr, message) _Static_assert(expr, message) +#elif \ + (defined(__cplusplus) && (__cplusplus >= 201103L)) || \ + JSON_HEDLEY_MSVC_VERSION_CHECK(16,0,0) +# define JSON_HEDLEY_STATIC_ASSERT(expr, message) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(static_assert(expr, message)) +#else +# define JSON_HEDLEY_STATIC_ASSERT(expr, message) +#endif + +#if defined(JSON_HEDLEY_NULL) + #undef JSON_HEDLEY_NULL +#endif +#if defined(__cplusplus) + #if __cplusplus >= 201103L + #define JSON_HEDLEY_NULL JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(nullptr) + #elif defined(NULL) + #define JSON_HEDLEY_NULL NULL + #else + #define JSON_HEDLEY_NULL JSON_HEDLEY_STATIC_CAST(void*, 0) + #endif +#elif defined(NULL) + #define JSON_HEDLEY_NULL NULL +#else + #define JSON_HEDLEY_NULL ((void*) 0) +#endif + +#if defined(JSON_HEDLEY_MESSAGE) + #undef JSON_HEDLEY_MESSAGE +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wunknown-pragmas") +# define JSON_HEDLEY_MESSAGE(msg) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS \ + JSON_HEDLEY_PRAGMA(message msg) \ + JSON_HEDLEY_DIAGNOSTIC_POP +#elif \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) +# define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(message msg) +#elif JSON_HEDLEY_CRAY_VERSION_CHECK(5,0,0) +# define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(_CRI message msg) +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) +# define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(message(msg)) +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(2,0,0) +# define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(message(msg)) +#else +# define JSON_HEDLEY_MESSAGE(msg) +#endif + +#if defined(JSON_HEDLEY_WARNING) + #undef JSON_HEDLEY_WARNING +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wunknown-pragmas") +# define JSON_HEDLEY_WARNING(msg) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS \ + JSON_HEDLEY_PRAGMA(clang warning msg) \ + JSON_HEDLEY_DIAGNOSTIC_POP +#elif \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,8,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(18,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) +# define JSON_HEDLEY_WARNING(msg) JSON_HEDLEY_PRAGMA(GCC warning msg) +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) +# define JSON_HEDLEY_WARNING(msg) JSON_HEDLEY_PRAGMA(message(msg)) +#else +# define JSON_HEDLEY_WARNING(msg) JSON_HEDLEY_MESSAGE(msg) +#endif + +#if defined(JSON_HEDLEY_REQUIRE) + #undef JSON_HEDLEY_REQUIRE +#endif +#if defined(JSON_HEDLEY_REQUIRE_MSG) + #undef JSON_HEDLEY_REQUIRE_MSG +#endif +#if JSON_HEDLEY_HAS_ATTRIBUTE(diagnose_if) +# if JSON_HEDLEY_HAS_WARNING("-Wgcc-compat") +# define JSON_HEDLEY_REQUIRE(expr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wgcc-compat\"") \ + __attribute__((diagnose_if(!(expr), #expr, "error"))) \ + JSON_HEDLEY_DIAGNOSTIC_POP +# define JSON_HEDLEY_REQUIRE_MSG(expr,msg) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wgcc-compat\"") \ + __attribute__((diagnose_if(!(expr), msg, "error"))) \ + JSON_HEDLEY_DIAGNOSTIC_POP +# else +# define JSON_HEDLEY_REQUIRE(expr) __attribute__((diagnose_if(!(expr), #expr, "error"))) +# define JSON_HEDLEY_REQUIRE_MSG(expr,msg) __attribute__((diagnose_if(!(expr), msg, "error"))) +# endif +#else +# define JSON_HEDLEY_REQUIRE(expr) +# define JSON_HEDLEY_REQUIRE_MSG(expr,msg) +#endif + +#if defined(JSON_HEDLEY_FLAGS) + #undef JSON_HEDLEY_FLAGS +#endif +#if JSON_HEDLEY_HAS_ATTRIBUTE(flag_enum) + #define JSON_HEDLEY_FLAGS __attribute__((__flag_enum__)) +#endif + +#if defined(JSON_HEDLEY_FLAGS_CAST) + #undef JSON_HEDLEY_FLAGS_CAST +#endif +#if JSON_HEDLEY_INTEL_VERSION_CHECK(19,0,0) +# define JSON_HEDLEY_FLAGS_CAST(T, expr) (__extension__ ({ \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("warning(disable:188)") \ + ((T) (expr)); \ + JSON_HEDLEY_DIAGNOSTIC_POP \ + })) +#else +# define JSON_HEDLEY_FLAGS_CAST(T, expr) JSON_HEDLEY_STATIC_CAST(T, expr) +#endif + +#if defined(JSON_HEDLEY_EMPTY_BASES) + #undef JSON_HEDLEY_EMPTY_BASES +#endif +#if JSON_HEDLEY_MSVC_VERSION_CHECK(19,0,23918) && !JSON_HEDLEY_MSVC_VERSION_CHECK(20,0,0) + #define JSON_HEDLEY_EMPTY_BASES __declspec(empty_bases) +#else + #define JSON_HEDLEY_EMPTY_BASES +#endif + +/* Remaining macros are deprecated. */ + +#if defined(JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK) + #undef JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK +#endif +#if defined(__clang__) + #define JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK(major,minor,patch) (0) +#else + #define JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK(major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_CLANG_HAS_ATTRIBUTE) + #undef JSON_HEDLEY_CLANG_HAS_ATTRIBUTE +#endif +#define JSON_HEDLEY_CLANG_HAS_ATTRIBUTE(attribute) JSON_HEDLEY_HAS_ATTRIBUTE(attribute) + +#if defined(JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE) + #undef JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE +#endif +#define JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE(attribute) JSON_HEDLEY_HAS_CPP_ATTRIBUTE(attribute) + +#if defined(JSON_HEDLEY_CLANG_HAS_BUILTIN) + #undef JSON_HEDLEY_CLANG_HAS_BUILTIN +#endif +#define JSON_HEDLEY_CLANG_HAS_BUILTIN(builtin) JSON_HEDLEY_HAS_BUILTIN(builtin) + +#if defined(JSON_HEDLEY_CLANG_HAS_FEATURE) + #undef JSON_HEDLEY_CLANG_HAS_FEATURE +#endif +#define JSON_HEDLEY_CLANG_HAS_FEATURE(feature) JSON_HEDLEY_HAS_FEATURE(feature) + +#if defined(JSON_HEDLEY_CLANG_HAS_EXTENSION) + #undef JSON_HEDLEY_CLANG_HAS_EXTENSION +#endif +#define JSON_HEDLEY_CLANG_HAS_EXTENSION(extension) JSON_HEDLEY_HAS_EXTENSION(extension) + +#if defined(JSON_HEDLEY_CLANG_HAS_DECLSPEC_DECLSPEC_ATTRIBUTE) + #undef JSON_HEDLEY_CLANG_HAS_DECLSPEC_DECLSPEC_ATTRIBUTE +#endif +#define JSON_HEDLEY_CLANG_HAS_DECLSPEC_ATTRIBUTE(attribute) JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE(attribute) + +#if defined(JSON_HEDLEY_CLANG_HAS_WARNING) + #undef JSON_HEDLEY_CLANG_HAS_WARNING +#endif +#define JSON_HEDLEY_CLANG_HAS_WARNING(warning) JSON_HEDLEY_HAS_WARNING(warning) + +#endif /* !defined(JSON_HEDLEY_VERSION) || (JSON_HEDLEY_VERSION < X) */ + + +// This file contains all internal macro definitions +// You MUST include macro_unscope.hpp at the end of json.hpp to undef all of them + +// exclude unsupported compilers +#if !defined(JSON_SKIP_UNSUPPORTED_COMPILER_CHECK) + #if defined(__clang__) + #if (__clang_major__ * 10000 + __clang_minor__ * 100 + __clang_patchlevel__) < 30400 + #error "unsupported Clang version - see https://github.com/nlohmann/json#supported-compilers" + #endif + #elif defined(__GNUC__) && !(defined(__ICC) || defined(__INTEL_COMPILER)) + #if (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) < 40800 + #error "unsupported GCC version - see https://github.com/nlohmann/json#supported-compilers" + #endif + #endif +#endif + +// C++ language standard detection +#if (defined(__cplusplus) && __cplusplus >= 202002L) || (defined(_MSVC_LANG) && _MSVC_LANG >= 202002L) + #define JSON_HAS_CPP_20 + #define JSON_HAS_CPP_17 + #define JSON_HAS_CPP_14 +#elif (defined(__cplusplus) && __cplusplus >= 201703L) || (defined(_HAS_CXX17) && _HAS_CXX17 == 1) // fix for issue #464 + #define JSON_HAS_CPP_17 + #define JSON_HAS_CPP_14 +#elif (defined(__cplusplus) && __cplusplus >= 201402L) || (defined(_HAS_CXX14) && _HAS_CXX14 == 1) + #define JSON_HAS_CPP_14 +#endif + +// disable float-equal warnings on GCC/clang +#if defined(__clang__) || defined(__GNUC__) || defined(__GNUG__) + #pragma GCC diagnostic push + #pragma GCC diagnostic ignored "-Wfloat-equal" +#endif + +// disable documentation warnings on clang +#if defined(__clang__) + #pragma GCC diagnostic push + #pragma GCC diagnostic ignored "-Wdocumentation" +#endif + +// allow to disable exceptions +#if (defined(__cpp_exceptions) || defined(__EXCEPTIONS) || defined(_CPPUNWIND)) && !defined(JSON_NOEXCEPTION) + #define JSON_THROW(exception) throw exception + #define JSON_TRY try + #define JSON_CATCH(exception) catch(exception) + #define JSON_INTERNAL_CATCH(exception) catch(exception) +#else + #include + #define JSON_THROW(exception) std::abort() + #define JSON_TRY if(true) + #define JSON_CATCH(exception) if(false) + #define JSON_INTERNAL_CATCH(exception) if(false) +#endif + +// override exception macros +#if defined(JSON_THROW_USER) + #undef JSON_THROW + #define JSON_THROW JSON_THROW_USER +#endif +#if defined(JSON_TRY_USER) + #undef JSON_TRY + #define JSON_TRY JSON_TRY_USER +#endif +#if defined(JSON_CATCH_USER) + #undef JSON_CATCH + #define JSON_CATCH JSON_CATCH_USER + #undef JSON_INTERNAL_CATCH + #define JSON_INTERNAL_CATCH JSON_CATCH_USER +#endif +#if defined(JSON_INTERNAL_CATCH_USER) + #undef JSON_INTERNAL_CATCH + #define JSON_INTERNAL_CATCH JSON_INTERNAL_CATCH_USER +#endif + +// allow to override assert +#if !defined(JSON_ASSERT) + #include // assert + #define JSON_ASSERT(x) assert(x) +#endif + +/*! +@brief macro to briefly define a mapping between an enum and JSON +@def NLOHMANN_JSON_SERIALIZE_ENUM +@since version 3.4.0 +*/ +#define NLOHMANN_JSON_SERIALIZE_ENUM(ENUM_TYPE, ...) \ + template \ + inline void to_json(BasicJsonType& j, const ENUM_TYPE& e) \ + { \ + static_assert(std::is_enum::value, #ENUM_TYPE " must be an enum!"); \ + static const std::pair m[] = __VA_ARGS__; \ + auto it = std::find_if(std::begin(m), std::end(m), \ + [e](const std::pair& ej_pair) -> bool \ + { \ + return ej_pair.first == e; \ + }); \ + j = ((it != std::end(m)) ? it : std::begin(m))->second; \ + } \ + template \ + inline void from_json(const BasicJsonType& j, ENUM_TYPE& e) \ + { \ + static_assert(std::is_enum::value, #ENUM_TYPE " must be an enum!"); \ + static const std::pair m[] = __VA_ARGS__; \ + auto it = std::find_if(std::begin(m), std::end(m), \ + [&j](const std::pair& ej_pair) -> bool \ + { \ + return ej_pair.second == j; \ + }); \ + e = ((it != std::end(m)) ? it : std::begin(m))->first; \ + } + +// Ugly macros to avoid uglier copy-paste when specializing basic_json. They +// may be removed in the future once the class is split. + +#define NLOHMANN_BASIC_JSON_TPL_DECLARATION \ + template class ObjectType, \ + template class ArrayType, \ + class StringType, class BooleanType, class NumberIntegerType, \ + class NumberUnsignedType, class NumberFloatType, \ + template class AllocatorType, \ + template class JSONSerializer, \ + class BinaryType> + +#define NLOHMANN_BASIC_JSON_TPL \ + basic_json + +// Macros to simplify conversion from/to types + +#define NLOHMANN_JSON_EXPAND( x ) x +#define NLOHMANN_JSON_GET_MACRO(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25, _26, _27, _28, _29, _30, _31, _32, _33, _34, _35, _36, _37, _38, _39, _40, _41, _42, _43, _44, _45, _46, _47, _48, _49, _50, _51, _52, _53, _54, _55, _56, _57, _58, _59, _60, _61, _62, _63, _64, NAME,...) NAME +#define NLOHMANN_JSON_PASTE(...) NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_GET_MACRO(__VA_ARGS__, \ + NLOHMANN_JSON_PASTE64, \ + NLOHMANN_JSON_PASTE63, \ + NLOHMANN_JSON_PASTE62, \ + NLOHMANN_JSON_PASTE61, \ + NLOHMANN_JSON_PASTE60, \ + NLOHMANN_JSON_PASTE59, \ + NLOHMANN_JSON_PASTE58, \ + NLOHMANN_JSON_PASTE57, \ + NLOHMANN_JSON_PASTE56, \ + NLOHMANN_JSON_PASTE55, \ + NLOHMANN_JSON_PASTE54, \ + NLOHMANN_JSON_PASTE53, \ + NLOHMANN_JSON_PASTE52, \ + NLOHMANN_JSON_PASTE51, \ + NLOHMANN_JSON_PASTE50, \ + NLOHMANN_JSON_PASTE49, \ + NLOHMANN_JSON_PASTE48, \ + NLOHMANN_JSON_PASTE47, \ + NLOHMANN_JSON_PASTE46, \ + NLOHMANN_JSON_PASTE45, \ + NLOHMANN_JSON_PASTE44, \ + NLOHMANN_JSON_PASTE43, \ + NLOHMANN_JSON_PASTE42, \ + NLOHMANN_JSON_PASTE41, \ + NLOHMANN_JSON_PASTE40, \ + NLOHMANN_JSON_PASTE39, \ + NLOHMANN_JSON_PASTE38, \ + NLOHMANN_JSON_PASTE37, \ + NLOHMANN_JSON_PASTE36, \ + NLOHMANN_JSON_PASTE35, \ + NLOHMANN_JSON_PASTE34, \ + NLOHMANN_JSON_PASTE33, \ + NLOHMANN_JSON_PASTE32, \ + NLOHMANN_JSON_PASTE31, \ + NLOHMANN_JSON_PASTE30, \ + NLOHMANN_JSON_PASTE29, \ + NLOHMANN_JSON_PASTE28, \ + NLOHMANN_JSON_PASTE27, \ + NLOHMANN_JSON_PASTE26, \ + NLOHMANN_JSON_PASTE25, \ + NLOHMANN_JSON_PASTE24, \ + NLOHMANN_JSON_PASTE23, \ + NLOHMANN_JSON_PASTE22, \ + NLOHMANN_JSON_PASTE21, \ + NLOHMANN_JSON_PASTE20, \ + NLOHMANN_JSON_PASTE19, \ + NLOHMANN_JSON_PASTE18, \ + NLOHMANN_JSON_PASTE17, \ + NLOHMANN_JSON_PASTE16, \ + NLOHMANN_JSON_PASTE15, \ + NLOHMANN_JSON_PASTE14, \ + NLOHMANN_JSON_PASTE13, \ + NLOHMANN_JSON_PASTE12, \ + NLOHMANN_JSON_PASTE11, \ + NLOHMANN_JSON_PASTE10, \ + NLOHMANN_JSON_PASTE9, \ + NLOHMANN_JSON_PASTE8, \ + NLOHMANN_JSON_PASTE7, \ + NLOHMANN_JSON_PASTE6, \ + NLOHMANN_JSON_PASTE5, \ + NLOHMANN_JSON_PASTE4, \ + NLOHMANN_JSON_PASTE3, \ + NLOHMANN_JSON_PASTE2, \ + NLOHMANN_JSON_PASTE1)(__VA_ARGS__)) +#define NLOHMANN_JSON_PASTE2(func, v1) func(v1) +#define NLOHMANN_JSON_PASTE3(func, v1, v2) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE2(func, v2) +#define NLOHMANN_JSON_PASTE4(func, v1, v2, v3) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE3(func, v2, v3) +#define NLOHMANN_JSON_PASTE5(func, v1, v2, v3, v4) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE4(func, v2, v3, v4) +#define NLOHMANN_JSON_PASTE6(func, v1, v2, v3, v4, v5) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE5(func, v2, v3, v4, v5) +#define NLOHMANN_JSON_PASTE7(func, v1, v2, v3, v4, v5, v6) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE6(func, v2, v3, v4, v5, v6) +#define NLOHMANN_JSON_PASTE8(func, v1, v2, v3, v4, v5, v6, v7) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE7(func, v2, v3, v4, v5, v6, v7) +#define NLOHMANN_JSON_PASTE9(func, v1, v2, v3, v4, v5, v6, v7, v8) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE8(func, v2, v3, v4, v5, v6, v7, v8) +#define NLOHMANN_JSON_PASTE10(func, v1, v2, v3, v4, v5, v6, v7, v8, v9) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE9(func, v2, v3, v4, v5, v6, v7, v8, v9) +#define NLOHMANN_JSON_PASTE11(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE10(func, v2, v3, v4, v5, v6, v7, v8, v9, v10) +#define NLOHMANN_JSON_PASTE12(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE11(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11) +#define NLOHMANN_JSON_PASTE13(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE12(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12) +#define NLOHMANN_JSON_PASTE14(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE13(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13) +#define NLOHMANN_JSON_PASTE15(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE14(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14) +#define NLOHMANN_JSON_PASTE16(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE15(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15) +#define NLOHMANN_JSON_PASTE17(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE16(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16) +#define NLOHMANN_JSON_PASTE18(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE17(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17) +#define NLOHMANN_JSON_PASTE19(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE18(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18) +#define NLOHMANN_JSON_PASTE20(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE19(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19) +#define NLOHMANN_JSON_PASTE21(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE20(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20) +#define NLOHMANN_JSON_PASTE22(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE21(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21) +#define NLOHMANN_JSON_PASTE23(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE22(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22) +#define NLOHMANN_JSON_PASTE24(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE23(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23) +#define NLOHMANN_JSON_PASTE25(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE24(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24) +#define NLOHMANN_JSON_PASTE26(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE25(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25) +#define NLOHMANN_JSON_PASTE27(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE26(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26) +#define NLOHMANN_JSON_PASTE28(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE27(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27) +#define NLOHMANN_JSON_PASTE29(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE28(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28) +#define NLOHMANN_JSON_PASTE30(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE29(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29) +#define NLOHMANN_JSON_PASTE31(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE30(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30) +#define NLOHMANN_JSON_PASTE32(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE31(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31) +#define NLOHMANN_JSON_PASTE33(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE32(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32) +#define NLOHMANN_JSON_PASTE34(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE33(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33) +#define NLOHMANN_JSON_PASTE35(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE34(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34) +#define NLOHMANN_JSON_PASTE36(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE35(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35) +#define NLOHMANN_JSON_PASTE37(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE36(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36) +#define NLOHMANN_JSON_PASTE38(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE37(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37) +#define NLOHMANN_JSON_PASTE39(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE38(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38) +#define NLOHMANN_JSON_PASTE40(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE39(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39) +#define NLOHMANN_JSON_PASTE41(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE40(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40) +#define NLOHMANN_JSON_PASTE42(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE41(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41) +#define NLOHMANN_JSON_PASTE43(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE42(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42) +#define NLOHMANN_JSON_PASTE44(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE43(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43) +#define NLOHMANN_JSON_PASTE45(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE44(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44) +#define NLOHMANN_JSON_PASTE46(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE45(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45) +#define NLOHMANN_JSON_PASTE47(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE46(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46) +#define NLOHMANN_JSON_PASTE48(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE47(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47) +#define NLOHMANN_JSON_PASTE49(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE48(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48) +#define NLOHMANN_JSON_PASTE50(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE49(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49) +#define NLOHMANN_JSON_PASTE51(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE50(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50) +#define NLOHMANN_JSON_PASTE52(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE51(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51) +#define NLOHMANN_JSON_PASTE53(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE52(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52) +#define NLOHMANN_JSON_PASTE54(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE53(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53) +#define NLOHMANN_JSON_PASTE55(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE54(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54) +#define NLOHMANN_JSON_PASTE56(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE55(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55) +#define NLOHMANN_JSON_PASTE57(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE56(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56) +#define NLOHMANN_JSON_PASTE58(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE57(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57) +#define NLOHMANN_JSON_PASTE59(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE58(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58) +#define NLOHMANN_JSON_PASTE60(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE59(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59) +#define NLOHMANN_JSON_PASTE61(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE60(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60) +#define NLOHMANN_JSON_PASTE62(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE61(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61) +#define NLOHMANN_JSON_PASTE63(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE62(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62) +#define NLOHMANN_JSON_PASTE64(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62, v63) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE63(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62, v63) + +#define NLOHMANN_JSON_TO(v1) nlohmann_json_j[#v1] = nlohmann_json_t.v1; +#define NLOHMANN_JSON_FROM(v1) nlohmann_json_j.at(#v1).get_to(nlohmann_json_t.v1); + +/*! +@brief macro +@def NLOHMANN_DEFINE_TYPE_INTRUSIVE +@since version 3.9.0 +*/ +#define NLOHMANN_DEFINE_TYPE_INTRUSIVE(Type, ...) \ + friend void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) } \ + friend void from_json(const nlohmann::json& nlohmann_json_j, Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_FROM, __VA_ARGS__)) } + +/*! +@brief macro +@def NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE +@since version 3.9.0 +*/ +#define NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE(Type, ...) \ + inline void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) } \ + inline void from_json(const nlohmann::json& nlohmann_json_j, Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_FROM, __VA_ARGS__)) } + +#ifndef JSON_USE_IMPLICIT_CONVERSIONS + #define JSON_USE_IMPLICIT_CONVERSIONS 1 +#endif + +#if JSON_USE_IMPLICIT_CONVERSIONS + #define JSON_EXPLICIT +#else + #define JSON_EXPLICIT explicit +#endif + + +namespace nlohmann +{ +namespace detail +{ +//////////////// +// exceptions // +//////////////// + +/*! +@brief general exception of the @ref basic_json class + +This class is an extension of `std::exception` objects with a member @a id for +exception ids. It is used as the base class for all exceptions thrown by the +@ref basic_json class. This class can hence be used as "wildcard" to catch +exceptions. + +Subclasses: +- @ref parse_error for exceptions indicating a parse error +- @ref invalid_iterator for exceptions indicating errors with iterators +- @ref type_error for exceptions indicating executing a member function with + a wrong type +- @ref out_of_range for exceptions indicating access out of the defined range +- @ref other_error for exceptions indicating other library errors + +@internal +@note To have nothrow-copy-constructible exceptions, we internally use + `std::runtime_error` which can cope with arbitrary-length error messages. + Intermediate strings are built with static functions and then passed to + the actual constructor. +@endinternal + +@liveexample{The following code shows how arbitrary library exceptions can be +caught.,exception} + +@since version 3.0.0 +*/ +class exception : public std::exception +{ + public: + /// returns the explanatory string + JSON_HEDLEY_RETURNS_NON_NULL + const char* what() const noexcept override + { + return m.what(); + } + + /// the id of the exception + const int id; + + protected: + JSON_HEDLEY_NON_NULL(3) + exception(int id_, const char* what_arg) : id(id_), m(what_arg) {} + + static std::string name(const std::string& ename, int id_) + { + return "[json.exception." + ename + "." + std::to_string(id_) + "] "; + } + + private: + /// an exception object as storage for error messages + std::runtime_error m; +}; + +/*! +@brief exception indicating a parse error + +This exception is thrown by the library when a parse error occurs. Parse errors +can occur during the deserialization of JSON text, CBOR, MessagePack, as well +as when using JSON Patch. + +Member @a byte holds the byte index of the last read character in the input +file. + +Exceptions have ids 1xx. + +name / id | example message | description +------------------------------ | --------------- | ------------------------- +json.exception.parse_error.101 | parse error at 2: unexpected end of input; expected string literal | This error indicates a syntax error while deserializing a JSON text. The error message describes that an unexpected token (character) was encountered, and the member @a byte indicates the error position. +json.exception.parse_error.102 | parse error at 14: missing or wrong low surrogate | JSON uses the `\uxxxx` format to describe Unicode characters. Code points above above 0xFFFF are split into two `\uxxxx` entries ("surrogate pairs"). This error indicates that the surrogate pair is incomplete or contains an invalid code point. +json.exception.parse_error.103 | parse error: code points above 0x10FFFF are invalid | Unicode supports code points up to 0x10FFFF. Code points above 0x10FFFF are invalid. +json.exception.parse_error.104 | parse error: JSON patch must be an array of objects | [RFC 6902](https://tools.ietf.org/html/rfc6902) requires a JSON Patch document to be a JSON document that represents an array of objects. +json.exception.parse_error.105 | parse error: operation must have string member 'op' | An operation of a JSON Patch document must contain exactly one "op" member, whose value indicates the operation to perform. Its value must be one of "add", "remove", "replace", "move", "copy", or "test"; other values are errors. +json.exception.parse_error.106 | parse error: array index '01' must not begin with '0' | An array index in a JSON Pointer ([RFC 6901](https://tools.ietf.org/html/rfc6901)) may be `0` or any number without a leading `0`. +json.exception.parse_error.107 | parse error: JSON pointer must be empty or begin with '/' - was: 'foo' | A JSON Pointer must be a Unicode string containing a sequence of zero or more reference tokens, each prefixed by a `/` character. +json.exception.parse_error.108 | parse error: escape character '~' must be followed with '0' or '1' | In a JSON Pointer, only `~0` and `~1` are valid escape sequences. +json.exception.parse_error.109 | parse error: array index 'one' is not a number | A JSON Pointer array index must be a number. +json.exception.parse_error.110 | parse error at 1: cannot read 2 bytes from vector | When parsing CBOR or MessagePack, the byte vector ends before the complete value has been read. +json.exception.parse_error.112 | parse error at 1: error reading CBOR; last byte: 0xF8 | Not all types of CBOR or MessagePack are supported. This exception occurs if an unsupported byte was read. +json.exception.parse_error.113 | parse error at 2: expected a CBOR string; last byte: 0x98 | While parsing a map key, a value that is not a string has been read. +json.exception.parse_error.114 | parse error: Unsupported BSON record type 0x0F | The parsing of the corresponding BSON record type is not implemented (yet). +json.exception.parse_error.115 | parse error at byte 5: syntax error while parsing UBJSON high-precision number: invalid number text: 1A | A UBJSON high-precision number could not be parsed. + +@note For an input with n bytes, 1 is the index of the first character and n+1 + is the index of the terminating null byte or the end of file. This also + holds true when reading a byte vector (CBOR or MessagePack). + +@liveexample{The following code shows how a `parse_error` exception can be +caught.,parse_error} + +@sa - @ref exception for the base class of the library exceptions +@sa - @ref invalid_iterator for exceptions indicating errors with iterators +@sa - @ref type_error for exceptions indicating executing a member function with + a wrong type +@sa - @ref out_of_range for exceptions indicating access out of the defined range +@sa - @ref other_error for exceptions indicating other library errors + +@since version 3.0.0 +*/ +class parse_error : public exception +{ + public: + /*! + @brief create a parse error exception + @param[in] id_ the id of the exception + @param[in] pos the position where the error occurred (or with + chars_read_total=0 if the position cannot be + determined) + @param[in] what_arg the explanatory string + @return parse_error object + */ + static parse_error create(int id_, const position_t& pos, const std::string& what_arg) + { + std::string w = exception::name("parse_error", id_) + "parse error" + + position_string(pos) + ": " + what_arg; + return parse_error(id_, pos.chars_read_total, w.c_str()); + } + + static parse_error create(int id_, std::size_t byte_, const std::string& what_arg) + { + std::string w = exception::name("parse_error", id_) + "parse error" + + (byte_ != 0 ? (" at byte " + std::to_string(byte_)) : "") + + ": " + what_arg; + return parse_error(id_, byte_, w.c_str()); + } + + /*! + @brief byte index of the parse error + + The byte index of the last read character in the input file. + + @note For an input with n bytes, 1 is the index of the first character and + n+1 is the index of the terminating null byte or the end of file. + This also holds true when reading a byte vector (CBOR or MessagePack). + */ + const std::size_t byte; + + private: + parse_error(int id_, std::size_t byte_, const char* what_arg) + : exception(id_, what_arg), byte(byte_) {} + + static std::string position_string(const position_t& pos) + { + return " at line " + std::to_string(pos.lines_read + 1) + + ", column " + std::to_string(pos.chars_read_current_line); + } +}; + +/*! +@brief exception indicating errors with iterators + +This exception is thrown if iterators passed to a library function do not match +the expected semantics. + +Exceptions have ids 2xx. + +name / id | example message | description +----------------------------------- | --------------- | ------------------------- +json.exception.invalid_iterator.201 | iterators are not compatible | The iterators passed to constructor @ref basic_json(InputIT first, InputIT last) are not compatible, meaning they do not belong to the same container. Therefore, the range (@a first, @a last) is invalid. +json.exception.invalid_iterator.202 | iterator does not fit current value | In an erase or insert function, the passed iterator @a pos does not belong to the JSON value for which the function was called. It hence does not define a valid position for the deletion/insertion. +json.exception.invalid_iterator.203 | iterators do not fit current value | Either iterator passed to function @ref erase(IteratorType first, IteratorType last) does not belong to the JSON value from which values shall be erased. It hence does not define a valid range to delete values from. +json.exception.invalid_iterator.204 | iterators out of range | When an iterator range for a primitive type (number, boolean, or string) is passed to a constructor or an erase function, this range has to be exactly (@ref begin(), @ref end()), because this is the only way the single stored value is expressed. All other ranges are invalid. +json.exception.invalid_iterator.205 | iterator out of range | When an iterator for a primitive type (number, boolean, or string) is passed to an erase function, the iterator has to be the @ref begin() iterator, because it is the only way to address the stored value. All other iterators are invalid. +json.exception.invalid_iterator.206 | cannot construct with iterators from null | The iterators passed to constructor @ref basic_json(InputIT first, InputIT last) belong to a JSON null value and hence to not define a valid range. +json.exception.invalid_iterator.207 | cannot use key() for non-object iterators | The key() member function can only be used on iterators belonging to a JSON object, because other types do not have a concept of a key. +json.exception.invalid_iterator.208 | cannot use operator[] for object iterators | The operator[] to specify a concrete offset cannot be used on iterators belonging to a JSON object, because JSON objects are unordered. +json.exception.invalid_iterator.209 | cannot use offsets with object iterators | The offset operators (+, -, +=, -=) cannot be used on iterators belonging to a JSON object, because JSON objects are unordered. +json.exception.invalid_iterator.210 | iterators do not fit | The iterator range passed to the insert function are not compatible, meaning they do not belong to the same container. Therefore, the range (@a first, @a last) is invalid. +json.exception.invalid_iterator.211 | passed iterators may not belong to container | The iterator range passed to the insert function must not be a subrange of the container to insert to. +json.exception.invalid_iterator.212 | cannot compare iterators of different containers | When two iterators are compared, they must belong to the same container. +json.exception.invalid_iterator.213 | cannot compare order of object iterators | The order of object iterators cannot be compared, because JSON objects are unordered. +json.exception.invalid_iterator.214 | cannot get value | Cannot get value for iterator: Either the iterator belongs to a null value or it is an iterator to a primitive type (number, boolean, or string), but the iterator is different to @ref begin(). + +@liveexample{The following code shows how an `invalid_iterator` exception can be +caught.,invalid_iterator} + +@sa - @ref exception for the base class of the library exceptions +@sa - @ref parse_error for exceptions indicating a parse error +@sa - @ref type_error for exceptions indicating executing a member function with + a wrong type +@sa - @ref out_of_range for exceptions indicating access out of the defined range +@sa - @ref other_error for exceptions indicating other library errors + +@since version 3.0.0 +*/ +class invalid_iterator : public exception +{ + public: + static invalid_iterator create(int id_, const std::string& what_arg) + { + std::string w = exception::name("invalid_iterator", id_) + what_arg; + return invalid_iterator(id_, w.c_str()); + } + + private: + JSON_HEDLEY_NON_NULL(3) + invalid_iterator(int id_, const char* what_arg) + : exception(id_, what_arg) {} +}; + +/*! +@brief exception indicating executing a member function with a wrong type + +This exception is thrown in case of a type error; that is, a library function is +executed on a JSON value whose type does not match the expected semantics. + +Exceptions have ids 3xx. + +name / id | example message | description +----------------------------- | --------------- | ------------------------- +json.exception.type_error.301 | cannot create object from initializer list | To create an object from an initializer list, the initializer list must consist only of a list of pairs whose first element is a string. When this constraint is violated, an array is created instead. +json.exception.type_error.302 | type must be object, but is array | During implicit or explicit value conversion, the JSON type must be compatible to the target type. For instance, a JSON string can only be converted into string types, but not into numbers or boolean types. +json.exception.type_error.303 | incompatible ReferenceType for get_ref, actual type is object | To retrieve a reference to a value stored in a @ref basic_json object with @ref get_ref, the type of the reference must match the value type. For instance, for a JSON array, the @a ReferenceType must be @ref array_t &. +json.exception.type_error.304 | cannot use at() with string | The @ref at() member functions can only be executed for certain JSON types. +json.exception.type_error.305 | cannot use operator[] with string | The @ref operator[] member functions can only be executed for certain JSON types. +json.exception.type_error.306 | cannot use value() with string | The @ref value() member functions can only be executed for certain JSON types. +json.exception.type_error.307 | cannot use erase() with string | The @ref erase() member functions can only be executed for certain JSON types. +json.exception.type_error.308 | cannot use push_back() with string | The @ref push_back() and @ref operator+= member functions can only be executed for certain JSON types. +json.exception.type_error.309 | cannot use insert() with | The @ref insert() member functions can only be executed for certain JSON types. +json.exception.type_error.310 | cannot use swap() with number | The @ref swap() member functions can only be executed for certain JSON types. +json.exception.type_error.311 | cannot use emplace_back() with string | The @ref emplace_back() member function can only be executed for certain JSON types. +json.exception.type_error.312 | cannot use update() with string | The @ref update() member functions can only be executed for certain JSON types. +json.exception.type_error.313 | invalid value to unflatten | The @ref unflatten function converts an object whose keys are JSON Pointers back into an arbitrary nested JSON value. The JSON Pointers must not overlap, because then the resulting value would not be well defined. +json.exception.type_error.314 | only objects can be unflattened | The @ref unflatten function only works for an object whose keys are JSON Pointers. +json.exception.type_error.315 | values in object must be primitive | The @ref unflatten function only works for an object whose keys are JSON Pointers and whose values are primitive. +json.exception.type_error.316 | invalid UTF-8 byte at index 10: 0x7E | The @ref dump function only works with UTF-8 encoded strings; that is, if you assign a `std::string` to a JSON value, make sure it is UTF-8 encoded. | +json.exception.type_error.317 | JSON value cannot be serialized to requested format | The dynamic type of the object cannot be represented in the requested serialization format (e.g. a raw `true` or `null` JSON object cannot be serialized to BSON) | + +@liveexample{The following code shows how a `type_error` exception can be +caught.,type_error} + +@sa - @ref exception for the base class of the library exceptions +@sa - @ref parse_error for exceptions indicating a parse error +@sa - @ref invalid_iterator for exceptions indicating errors with iterators +@sa - @ref out_of_range for exceptions indicating access out of the defined range +@sa - @ref other_error for exceptions indicating other library errors + +@since version 3.0.0 +*/ +class type_error : public exception +{ + public: + static type_error create(int id_, const std::string& what_arg) + { + std::string w = exception::name("type_error", id_) + what_arg; + return type_error(id_, w.c_str()); + } + + private: + JSON_HEDLEY_NON_NULL(3) + type_error(int id_, const char* what_arg) : exception(id_, what_arg) {} +}; + +/*! +@brief exception indicating access out of the defined range + +This exception is thrown in case a library function is called on an input +parameter that exceeds the expected range, for instance in case of array +indices or nonexisting object keys. + +Exceptions have ids 4xx. + +name / id | example message | description +------------------------------- | --------------- | ------------------------- +json.exception.out_of_range.401 | array index 3 is out of range | The provided array index @a i is larger than @a size-1. +json.exception.out_of_range.402 | array index '-' (3) is out of range | The special array index `-` in a JSON Pointer never describes a valid element of the array, but the index past the end. That is, it can only be used to add elements at this position, but not to read it. +json.exception.out_of_range.403 | key 'foo' not found | The provided key was not found in the JSON object. +json.exception.out_of_range.404 | unresolved reference token 'foo' | A reference token in a JSON Pointer could not be resolved. +json.exception.out_of_range.405 | JSON pointer has no parent | The JSON Patch operations 'remove' and 'add' can not be applied to the root element of the JSON value. +json.exception.out_of_range.406 | number overflow parsing '10E1000' | A parsed number could not be stored as without changing it to NaN or INF. +json.exception.out_of_range.407 | number overflow serializing '9223372036854775808' | UBJSON and BSON only support integer numbers up to 9223372036854775807. (until version 3.8.0) | +json.exception.out_of_range.408 | excessive array size: 8658170730974374167 | The size (following `#`) of an UBJSON array or object exceeds the maximal capacity. | +json.exception.out_of_range.409 | BSON key cannot contain code point U+0000 (at byte 2) | Key identifiers to be serialized to BSON cannot contain code point U+0000, since the key is stored as zero-terminated c-string | + +@liveexample{The following code shows how an `out_of_range` exception can be +caught.,out_of_range} + +@sa - @ref exception for the base class of the library exceptions +@sa - @ref parse_error for exceptions indicating a parse error +@sa - @ref invalid_iterator for exceptions indicating errors with iterators +@sa - @ref type_error for exceptions indicating executing a member function with + a wrong type +@sa - @ref other_error for exceptions indicating other library errors + +@since version 3.0.0 +*/ +class out_of_range : public exception +{ + public: + static out_of_range create(int id_, const std::string& what_arg) + { + std::string w = exception::name("out_of_range", id_) + what_arg; + return out_of_range(id_, w.c_str()); + } + + private: + JSON_HEDLEY_NON_NULL(3) + out_of_range(int id_, const char* what_arg) : exception(id_, what_arg) {} +}; + +/*! +@brief exception indicating other library errors + +This exception is thrown in case of errors that cannot be classified with the +other exception types. + +Exceptions have ids 5xx. + +name / id | example message | description +------------------------------ | --------------- | ------------------------- +json.exception.other_error.501 | unsuccessful: {"op":"test","path":"/baz", "value":"bar"} | A JSON Patch operation 'test' failed. The unsuccessful operation is also printed. + +@sa - @ref exception for the base class of the library exceptions +@sa - @ref parse_error for exceptions indicating a parse error +@sa - @ref invalid_iterator for exceptions indicating errors with iterators +@sa - @ref type_error for exceptions indicating executing a member function with + a wrong type +@sa - @ref out_of_range for exceptions indicating access out of the defined range + +@liveexample{The following code shows how an `other_error` exception can be +caught.,other_error} + +@since version 3.0.0 +*/ +class other_error : public exception +{ + public: + static other_error create(int id_, const std::string& what_arg) + { + std::string w = exception::name("other_error", id_) + what_arg; + return other_error(id_, w.c_str()); + } + + private: + JSON_HEDLEY_NON_NULL(3) + other_error(int id_, const char* what_arg) : exception(id_, what_arg) {} +}; +} // namespace detail +} // namespace nlohmann + +// #include + +// #include + + +#include // size_t +#include // conditional, enable_if, false_type, integral_constant, is_constructible, is_integral, is_same, remove_cv, remove_reference, true_type + +namespace nlohmann +{ +namespace detail +{ +// alias templates to reduce boilerplate +template +using enable_if_t = typename std::enable_if::type; + +template +using uncvref_t = typename std::remove_cv::type>::type; + +// implementation of C++14 index_sequence and affiliates +// source: https://stackoverflow.com/a/32223343 +template +struct index_sequence +{ + using type = index_sequence; + using value_type = std::size_t; + static constexpr std::size_t size() noexcept + { + return sizeof...(Ints); + } +}; + +template +struct merge_and_renumber; + +template +struct merge_and_renumber, index_sequence> + : index_sequence < I1..., (sizeof...(I1) + I2)... > {}; + +template +struct make_index_sequence + : merge_and_renumber < typename make_index_sequence < N / 2 >::type, + typename make_index_sequence < N - N / 2 >::type > {}; + +template<> struct make_index_sequence<0> : index_sequence<> {}; +template<> struct make_index_sequence<1> : index_sequence<0> {}; + +template +using index_sequence_for = make_index_sequence; + +// dispatch utility (taken from ranges-v3) +template struct priority_tag : priority_tag < N - 1 > {}; +template<> struct priority_tag<0> {}; + +// taken from ranges-v3 +template +struct static_const +{ + static constexpr T value{}; +}; + +template +constexpr T static_const::value; +} // namespace detail +} // namespace nlohmann + +// #include + + +#include // numeric_limits +#include // false_type, is_constructible, is_integral, is_same, true_type +#include // declval + +// #include + + +#include // random_access_iterator_tag + +// #include + + +namespace nlohmann +{ +namespace detail +{ +template struct make_void +{ + using type = void; +}; +template using void_t = typename make_void::type; +} // namespace detail +} // namespace nlohmann + +// #include + + +namespace nlohmann +{ +namespace detail +{ +template +struct iterator_types {}; + +template +struct iterator_types < + It, + void_t> +{ + using difference_type = typename It::difference_type; + using value_type = typename It::value_type; + using pointer = typename It::pointer; + using reference = typename It::reference; + using iterator_category = typename It::iterator_category; +}; + +// This is required as some compilers implement std::iterator_traits in a way that +// doesn't work with SFINAE. See https://github.com/nlohmann/json/issues/1341. +template +struct iterator_traits +{ +}; + +template +struct iterator_traits < T, enable_if_t < !std::is_pointer::value >> + : iterator_types +{ +}; + +template +struct iterator_traits::value>> +{ + using iterator_category = std::random_access_iterator_tag; + using value_type = T; + using difference_type = ptrdiff_t; + using pointer = T*; + using reference = T&; +}; +} // namespace detail +} // namespace nlohmann + +// #include + +// #include + +// #include + + +#include + +// #include + + +// https://en.cppreference.com/w/cpp/experimental/is_detected +namespace nlohmann +{ +namespace detail +{ +struct nonesuch +{ + nonesuch() = delete; + ~nonesuch() = delete; + nonesuch(nonesuch const&) = delete; + nonesuch(nonesuch const&&) = delete; + void operator=(nonesuch const&) = delete; + void operator=(nonesuch&&) = delete; +}; + +template class Op, + class... Args> +struct detector +{ + using value_t = std::false_type; + using type = Default; +}; + +template class Op, class... Args> +struct detector>, Op, Args...> +{ + using value_t = std::true_type; + using type = Op; +}; + +template class Op, class... Args> +using is_detected = typename detector::value_t; + +template class Op, class... Args> +using detected_t = typename detector::type; + +template class Op, class... Args> +using detected_or = detector; + +template class Op, class... Args> +using detected_or_t = typename detected_or::type; + +template class Op, class... Args> +using is_detected_exact = std::is_same>; + +template class Op, class... Args> +using is_detected_convertible = + std::is_convertible, To>; +} // namespace detail +} // namespace nlohmann + +// #include +#ifndef INCLUDE_NLOHMANN_JSON_FWD_HPP_ +#define INCLUDE_NLOHMANN_JSON_FWD_HPP_ + +#include // int64_t, uint64_t +#include // map +#include // allocator +#include // string +#include // vector + +/*! +@brief namespace for Niels Lohmann +@see https://github.com/nlohmann +@since version 1.0.0 +*/ +namespace nlohmann +{ +/*! +@brief default JSONSerializer template argument + +This serializer ignores the template arguments and uses ADL +([argument-dependent lookup](https://en.cppreference.com/w/cpp/language/adl)) +for serialization. +*/ +template +struct adl_serializer; + +template class ObjectType = + std::map, + template class ArrayType = std::vector, + class StringType = std::string, class BooleanType = bool, + class NumberIntegerType = std::int64_t, + class NumberUnsignedType = std::uint64_t, + class NumberFloatType = double, + template class AllocatorType = std::allocator, + template class JSONSerializer = + adl_serializer, + class BinaryType = std::vector> +class basic_json; + +/*! +@brief JSON Pointer + +A JSON pointer defines a string syntax for identifying a specific value +within a JSON document. It can be used with functions `at` and +`operator[]`. Furthermore, JSON pointers are the base for JSON patches. + +@sa [RFC 6901](https://tools.ietf.org/html/rfc6901) + +@since version 2.0.0 +*/ +template +class json_pointer; + +/*! +@brief default JSON class + +This type is the default specialization of the @ref basic_json class which +uses the standard template types. + +@since version 1.0.0 +*/ +using json = basic_json<>; + +template +struct ordered_map; + +/*! +@brief ordered JSON class + +This type preserves the insertion order of object keys. + +@since version 3.9.0 +*/ +using ordered_json = basic_json; + +} // namespace nlohmann + +#endif // INCLUDE_NLOHMANN_JSON_FWD_HPP_ + + +namespace nlohmann +{ +/*! +@brief detail namespace with internal helper functions + +This namespace collects functions that should not be exposed, +implementations of some @ref basic_json methods, and meta-programming helpers. + +@since version 2.1.0 +*/ +namespace detail +{ +///////////// +// helpers // +///////////// + +// Note to maintainers: +// +// Every trait in this file expects a non CV-qualified type. +// The only exceptions are in the 'aliases for detected' section +// (i.e. those of the form: decltype(T::member_function(std::declval()))) +// +// In this case, T has to be properly CV-qualified to constraint the function arguments +// (e.g. to_json(BasicJsonType&, const T&)) + +template struct is_basic_json : std::false_type {}; + +NLOHMANN_BASIC_JSON_TPL_DECLARATION +struct is_basic_json : std::true_type {}; + +////////////////////// +// json_ref helpers // +////////////////////// + +template +class json_ref; + +template +struct is_json_ref : std::false_type {}; + +template +struct is_json_ref> : std::true_type {}; + +////////////////////////// +// aliases for detected // +////////////////////////// + +template +using mapped_type_t = typename T::mapped_type; + +template +using key_type_t = typename T::key_type; + +template +using value_type_t = typename T::value_type; + +template +using difference_type_t = typename T::difference_type; + +template +using pointer_t = typename T::pointer; + +template +using reference_t = typename T::reference; + +template +using iterator_category_t = typename T::iterator_category; + +template +using iterator_t = typename T::iterator; + +template +using to_json_function = decltype(T::to_json(std::declval()...)); + +template +using from_json_function = decltype(T::from_json(std::declval()...)); + +template +using get_template_function = decltype(std::declval().template get()); + +// trait checking if JSONSerializer::from_json(json const&, udt&) exists +template +struct has_from_json : std::false_type {}; + +// trait checking if j.get is valid +// use this trait instead of std::is_constructible or std::is_convertible, +// both rely on, or make use of implicit conversions, and thus fail when T +// has several constructors/operator= (see https://github.com/nlohmann/json/issues/958) +template +struct is_getable +{ + static constexpr bool value = is_detected::value; +}; + +template +struct has_from_json < BasicJsonType, T, + enable_if_t < !is_basic_json::value >> +{ + using serializer = typename BasicJsonType::template json_serializer; + + static constexpr bool value = + is_detected_exact::value; +}; + +// This trait checks if JSONSerializer::from_json(json const&) exists +// this overload is used for non-default-constructible user-defined-types +template +struct has_non_default_from_json : std::false_type {}; + +template +struct has_non_default_from_json < BasicJsonType, T, enable_if_t < !is_basic_json::value >> +{ + using serializer = typename BasicJsonType::template json_serializer; + + static constexpr bool value = + is_detected_exact::value; +}; + +// This trait checks if BasicJsonType::json_serializer::to_json exists +// Do not evaluate the trait when T is a basic_json type, to avoid template instantiation infinite recursion. +template +struct has_to_json : std::false_type {}; + +template +struct has_to_json < BasicJsonType, T, enable_if_t < !is_basic_json::value >> +{ + using serializer = typename BasicJsonType::template json_serializer; + + static constexpr bool value = + is_detected_exact::value; +}; + + +/////////////////// +// is_ functions // +/////////////////// + +template +struct is_iterator_traits : std::false_type {}; + +template +struct is_iterator_traits> +{ + private: + using traits = iterator_traits; + + public: + static constexpr auto value = + is_detected::value && + is_detected::value && + is_detected::value && + is_detected::value && + is_detected::value; +}; + +// source: https://stackoverflow.com/a/37193089/4116453 + +template +struct is_complete_type : std::false_type {}; + +template +struct is_complete_type : std::true_type {}; + +template +struct is_compatible_object_type_impl : std::false_type {}; + +template +struct is_compatible_object_type_impl < + BasicJsonType, CompatibleObjectType, + enable_if_t < is_detected::value&& + is_detected::value >> +{ + + using object_t = typename BasicJsonType::object_t; + + // macOS's is_constructible does not play well with nonesuch... + static constexpr bool value = + std::is_constructible::value && + std::is_constructible::value; +}; + +template +struct is_compatible_object_type + : is_compatible_object_type_impl {}; + +template +struct is_constructible_object_type_impl : std::false_type {}; + +template +struct is_constructible_object_type_impl < + BasicJsonType, ConstructibleObjectType, + enable_if_t < is_detected::value&& + is_detected::value >> +{ + using object_t = typename BasicJsonType::object_t; + + static constexpr bool value = + (std::is_default_constructible::value && + (std::is_move_assignable::value || + std::is_copy_assignable::value) && + (std::is_constructible::value && + std::is_same < + typename object_t::mapped_type, + typename ConstructibleObjectType::mapped_type >::value)) || + (has_from_json::value || + has_non_default_from_json < + BasicJsonType, + typename ConstructibleObjectType::mapped_type >::value); +}; + +template +struct is_constructible_object_type + : is_constructible_object_type_impl {}; + +template +struct is_compatible_string_type_impl : std::false_type {}; + +template +struct is_compatible_string_type_impl < + BasicJsonType, CompatibleStringType, + enable_if_t::value >> +{ + static constexpr auto value = + std::is_constructible::value; +}; + +template +struct is_compatible_string_type + : is_compatible_string_type_impl {}; + +template +struct is_constructible_string_type_impl : std::false_type {}; + +template +struct is_constructible_string_type_impl < + BasicJsonType, ConstructibleStringType, + enable_if_t::value >> +{ + static constexpr auto value = + std::is_constructible::value; +}; + +template +struct is_constructible_string_type + : is_constructible_string_type_impl {}; + +template +struct is_compatible_array_type_impl : std::false_type {}; + +template +struct is_compatible_array_type_impl < + BasicJsonType, CompatibleArrayType, + enable_if_t < is_detected::value&& + is_detected::value&& +// This is needed because json_reverse_iterator has a ::iterator type... +// Therefore it is detected as a CompatibleArrayType. +// The real fix would be to have an Iterable concept. + !is_iterator_traits < + iterator_traits>::value >> +{ + static constexpr bool value = + std::is_constructible::value; +}; + +template +struct is_compatible_array_type + : is_compatible_array_type_impl {}; + +template +struct is_constructible_array_type_impl : std::false_type {}; + +template +struct is_constructible_array_type_impl < + BasicJsonType, ConstructibleArrayType, + enable_if_t::value >> + : std::true_type {}; + +template +struct is_constructible_array_type_impl < + BasicJsonType, ConstructibleArrayType, + enable_if_t < !std::is_same::value&& + std::is_default_constructible::value&& +(std::is_move_assignable::value || + std::is_copy_assignable::value)&& +is_detected::value&& +is_detected::value&& +is_complete_type < +detected_t>::value >> +{ + static constexpr bool value = + // This is needed because json_reverse_iterator has a ::iterator type, + // furthermore, std::back_insert_iterator (and other iterators) have a + // base class `iterator`... Therefore it is detected as a + // ConstructibleArrayType. The real fix would be to have an Iterable + // concept. + !is_iterator_traits>::value && + + (std::is_same::value || + has_from_json::value || + has_non_default_from_json < + BasicJsonType, typename ConstructibleArrayType::value_type >::value); +}; + +template +struct is_constructible_array_type + : is_constructible_array_type_impl {}; + +template +struct is_compatible_integer_type_impl : std::false_type {}; + +template +struct is_compatible_integer_type_impl < + RealIntegerType, CompatibleNumberIntegerType, + enable_if_t < std::is_integral::value&& + std::is_integral::value&& + !std::is_same::value >> +{ + // is there an assert somewhere on overflows? + using RealLimits = std::numeric_limits; + using CompatibleLimits = std::numeric_limits; + + static constexpr auto value = + std::is_constructible::value && + CompatibleLimits::is_integer && + RealLimits::is_signed == CompatibleLimits::is_signed; +}; + +template +struct is_compatible_integer_type + : is_compatible_integer_type_impl {}; + +template +struct is_compatible_type_impl: std::false_type {}; + +template +struct is_compatible_type_impl < + BasicJsonType, CompatibleType, + enable_if_t::value >> +{ + static constexpr bool value = + has_to_json::value; +}; + +template +struct is_compatible_type + : is_compatible_type_impl {}; + +// https://en.cppreference.com/w/cpp/types/conjunction +template struct conjunction : std::true_type { }; +template struct conjunction : B1 { }; +template +struct conjunction +: std::conditional, B1>::type {}; + +template +struct is_constructible_tuple : std::false_type {}; + +template +struct is_constructible_tuple> : conjunction...> {}; +} // namespace detail +} // namespace nlohmann + +// #include + + +#include // array +#include // size_t +#include // uint8_t +#include // string + +namespace nlohmann +{ +namespace detail +{ +/////////////////////////// +// JSON type enumeration // +/////////////////////////// + +/*! +@brief the JSON type enumeration + +This enumeration collects the different JSON types. It is internally used to +distinguish the stored values, and the functions @ref basic_json::is_null(), +@ref basic_json::is_object(), @ref basic_json::is_array(), +@ref basic_json::is_string(), @ref basic_json::is_boolean(), +@ref basic_json::is_number() (with @ref basic_json::is_number_integer(), +@ref basic_json::is_number_unsigned(), and @ref basic_json::is_number_float()), +@ref basic_json::is_discarded(), @ref basic_json::is_primitive(), and +@ref basic_json::is_structured() rely on it. + +@note There are three enumeration entries (number_integer, number_unsigned, and +number_float), because the library distinguishes these three types for numbers: +@ref basic_json::number_unsigned_t is used for unsigned integers, +@ref basic_json::number_integer_t is used for signed integers, and +@ref basic_json::number_float_t is used for floating-point numbers or to +approximate integers which do not fit in the limits of their respective type. + +@sa @ref basic_json::basic_json(const value_t value_type) -- create a JSON +value with the default value for a given type + +@since version 1.0.0 +*/ +enum class value_t : std::uint8_t +{ + null, ///< null value + object, ///< object (unordered set of name/value pairs) + array, ///< array (ordered collection of values) + string, ///< string value + boolean, ///< boolean value + number_integer, ///< number value (signed integer) + number_unsigned, ///< number value (unsigned integer) + number_float, ///< number value (floating-point) + binary, ///< binary array (ordered collection of bytes) + discarded ///< discarded by the parser callback function +}; + +/*! +@brief comparison operator for JSON types + +Returns an ordering that is similar to Python: +- order: null < boolean < number < object < array < string < binary +- furthermore, each type is not smaller than itself +- discarded values are not comparable +- binary is represented as a b"" string in python and directly comparable to a + string; however, making a binary array directly comparable with a string would + be surprising behavior in a JSON file. + +@since version 1.0.0 +*/ +inline bool operator<(const value_t lhs, const value_t rhs) noexcept +{ + static constexpr std::array order = {{ + 0 /* null */, 3 /* object */, 4 /* array */, 5 /* string */, + 1 /* boolean */, 2 /* integer */, 2 /* unsigned */, 2 /* float */, + 6 /* binary */ + } + }; + + const auto l_index = static_cast(lhs); + const auto r_index = static_cast(rhs); + return l_index < order.size() && r_index < order.size() && order[l_index] < order[r_index]; +} +} // namespace detail +} // namespace nlohmann + + +namespace nlohmann +{ +namespace detail +{ +template +void from_json(const BasicJsonType& j, typename std::nullptr_t& n) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_null())) + { + JSON_THROW(type_error::create(302, "type must be null, but is " + std::string(j.type_name()))); + } + n = nullptr; +} + +// overloads for basic_json template parameters +template < typename BasicJsonType, typename ArithmeticType, + enable_if_t < std::is_arithmetic::value&& + !std::is_same::value, + int > = 0 > +void get_arithmetic_value(const BasicJsonType& j, ArithmeticType& val) +{ + switch (static_cast(j)) + { + case value_t::number_unsigned: + { + val = static_cast(*j.template get_ptr()); + break; + } + case value_t::number_integer: + { + val = static_cast(*j.template get_ptr()); + break; + } + case value_t::number_float: + { + val = static_cast(*j.template get_ptr()); + break; + } + + default: + JSON_THROW(type_error::create(302, "type must be number, but is " + std::string(j.type_name()))); + } +} + +template +void from_json(const BasicJsonType& j, typename BasicJsonType::boolean_t& b) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_boolean())) + { + JSON_THROW(type_error::create(302, "type must be boolean, but is " + std::string(j.type_name()))); + } + b = *j.template get_ptr(); +} + +template +void from_json(const BasicJsonType& j, typename BasicJsonType::string_t& s) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_string())) + { + JSON_THROW(type_error::create(302, "type must be string, but is " + std::string(j.type_name()))); + } + s = *j.template get_ptr(); +} + +template < + typename BasicJsonType, typename ConstructibleStringType, + enable_if_t < + is_constructible_string_type::value&& + !std::is_same::value, + int > = 0 > +void from_json(const BasicJsonType& j, ConstructibleStringType& s) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_string())) + { + JSON_THROW(type_error::create(302, "type must be string, but is " + std::string(j.type_name()))); + } + + s = *j.template get_ptr(); +} + +template +void from_json(const BasicJsonType& j, typename BasicJsonType::number_float_t& val) +{ + get_arithmetic_value(j, val); +} + +template +void from_json(const BasicJsonType& j, typename BasicJsonType::number_unsigned_t& val) +{ + get_arithmetic_value(j, val); +} + +template +void from_json(const BasicJsonType& j, typename BasicJsonType::number_integer_t& val) +{ + get_arithmetic_value(j, val); +} + +template::value, int> = 0> +void from_json(const BasicJsonType& j, EnumType& e) +{ + typename std::underlying_type::type val; + get_arithmetic_value(j, val); + e = static_cast(val); +} + +// forward_list doesn't have an insert method +template::value, int> = 0> +void from_json(const BasicJsonType& j, std::forward_list& l) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_array())) + { + JSON_THROW(type_error::create(302, "type must be array, but is " + std::string(j.type_name()))); + } + l.clear(); + std::transform(j.rbegin(), j.rend(), + std::front_inserter(l), [](const BasicJsonType & i) + { + return i.template get(); + }); +} + +// valarray doesn't have an insert method +template::value, int> = 0> +void from_json(const BasicJsonType& j, std::valarray& l) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_array())) + { + JSON_THROW(type_error::create(302, "type must be array, but is " + std::string(j.type_name()))); + } + l.resize(j.size()); + std::transform(j.begin(), j.end(), std::begin(l), + [](const BasicJsonType & elem) + { + return elem.template get(); + }); +} + +template +auto from_json(const BasicJsonType& j, T (&arr)[N]) +-> decltype(j.template get(), void()) +{ + for (std::size_t i = 0; i < N; ++i) + { + arr[i] = j.at(i).template get(); + } +} + +template +void from_json_array_impl(const BasicJsonType& j, typename BasicJsonType::array_t& arr, priority_tag<3> /*unused*/) +{ + arr = *j.template get_ptr(); +} + +template +auto from_json_array_impl(const BasicJsonType& j, std::array& arr, + priority_tag<2> /*unused*/) +-> decltype(j.template get(), void()) +{ + for (std::size_t i = 0; i < N; ++i) + { + arr[i] = j.at(i).template get(); + } +} + +template +auto from_json_array_impl(const BasicJsonType& j, ConstructibleArrayType& arr, priority_tag<1> /*unused*/) +-> decltype( + arr.reserve(std::declval()), + j.template get(), + void()) +{ + using std::end; + + ConstructibleArrayType ret; + ret.reserve(j.size()); + std::transform(j.begin(), j.end(), + std::inserter(ret, end(ret)), [](const BasicJsonType & i) + { + // get() returns *this, this won't call a from_json + // method when value_type is BasicJsonType + return i.template get(); + }); + arr = std::move(ret); +} + +template +void from_json_array_impl(const BasicJsonType& j, ConstructibleArrayType& arr, + priority_tag<0> /*unused*/) +{ + using std::end; + + ConstructibleArrayType ret; + std::transform( + j.begin(), j.end(), std::inserter(ret, end(ret)), + [](const BasicJsonType & i) + { + // get() returns *this, this won't call a from_json + // method when value_type is BasicJsonType + return i.template get(); + }); + arr = std::move(ret); +} + +template < typename BasicJsonType, typename ConstructibleArrayType, + enable_if_t < + is_constructible_array_type::value&& + !is_constructible_object_type::value&& + !is_constructible_string_type::value&& + !std::is_same::value&& + !is_basic_json::value, + int > = 0 > +auto from_json(const BasicJsonType& j, ConstructibleArrayType& arr) +-> decltype(from_json_array_impl(j, arr, priority_tag<3> {}), +j.template get(), +void()) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_array())) + { + JSON_THROW(type_error::create(302, "type must be array, but is " + + std::string(j.type_name()))); + } + + from_json_array_impl(j, arr, priority_tag<3> {}); +} + +template +void from_json(const BasicJsonType& j, typename BasicJsonType::binary_t& bin) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_binary())) + { + JSON_THROW(type_error::create(302, "type must be binary, but is " + std::string(j.type_name()))); + } + + bin = *j.template get_ptr(); +} + +template::value, int> = 0> +void from_json(const BasicJsonType& j, ConstructibleObjectType& obj) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_object())) + { + JSON_THROW(type_error::create(302, "type must be object, but is " + std::string(j.type_name()))); + } + + ConstructibleObjectType ret; + auto inner_object = j.template get_ptr(); + using value_type = typename ConstructibleObjectType::value_type; + std::transform( + inner_object->begin(), inner_object->end(), + std::inserter(ret, ret.begin()), + [](typename BasicJsonType::object_t::value_type const & p) + { + return value_type(p.first, p.second.template get()); + }); + obj = std::move(ret); +} + +// overload for arithmetic types, not chosen for basic_json template arguments +// (BooleanType, etc..); note: Is it really necessary to provide explicit +// overloads for boolean_t etc. in case of a custom BooleanType which is not +// an arithmetic type? +template < typename BasicJsonType, typename ArithmeticType, + enable_if_t < + std::is_arithmetic::value&& + !std::is_same::value&& + !std::is_same::value&& + !std::is_same::value&& + !std::is_same::value, + int > = 0 > +void from_json(const BasicJsonType& j, ArithmeticType& val) +{ + switch (static_cast(j)) + { + case value_t::number_unsigned: + { + val = static_cast(*j.template get_ptr()); + break; + } + case value_t::number_integer: + { + val = static_cast(*j.template get_ptr()); + break; + } + case value_t::number_float: + { + val = static_cast(*j.template get_ptr()); + break; + } + case value_t::boolean: + { + val = static_cast(*j.template get_ptr()); + break; + } + + default: + JSON_THROW(type_error::create(302, "type must be number, but is " + std::string(j.type_name()))); + } +} + +template +void from_json(const BasicJsonType& j, std::pair& p) +{ + p = {j.at(0).template get(), j.at(1).template get()}; +} + +template +void from_json_tuple_impl(const BasicJsonType& j, Tuple& t, index_sequence /*unused*/) +{ + t = std::make_tuple(j.at(Idx).template get::type>()...); +} + +template +void from_json(const BasicJsonType& j, std::tuple& t) +{ + from_json_tuple_impl(j, t, index_sequence_for {}); +} + +template < typename BasicJsonType, typename Key, typename Value, typename Compare, typename Allocator, + typename = enable_if_t < !std::is_constructible < + typename BasicJsonType::string_t, Key >::value >> +void from_json(const BasicJsonType& j, std::map& m) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_array())) + { + JSON_THROW(type_error::create(302, "type must be array, but is " + std::string(j.type_name()))); + } + m.clear(); + for (const auto& p : j) + { + if (JSON_HEDLEY_UNLIKELY(!p.is_array())) + { + JSON_THROW(type_error::create(302, "type must be array, but is " + std::string(p.type_name()))); + } + m.emplace(p.at(0).template get(), p.at(1).template get()); + } +} + +template < typename BasicJsonType, typename Key, typename Value, typename Hash, typename KeyEqual, typename Allocator, + typename = enable_if_t < !std::is_constructible < + typename BasicJsonType::string_t, Key >::value >> +void from_json(const BasicJsonType& j, std::unordered_map& m) +{ + if (JSON_HEDLEY_UNLIKELY(!j.is_array())) + { + JSON_THROW(type_error::create(302, "type must be array, but is " + std::string(j.type_name()))); + } + m.clear(); + for (const auto& p : j) + { + if (JSON_HEDLEY_UNLIKELY(!p.is_array())) + { + JSON_THROW(type_error::create(302, "type must be array, but is " + std::string(p.type_name()))); + } + m.emplace(p.at(0).template get(), p.at(1).template get()); + } +} + +struct from_json_fn +{ + template + auto operator()(const BasicJsonType& j, T& val) const + noexcept(noexcept(from_json(j, val))) + -> decltype(from_json(j, val), void()) + { + return from_json(j, val); + } +}; +} // namespace detail + +/// namespace to hold default `from_json` function +/// to see why this is required: +/// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2015/n4381.html +namespace +{ +constexpr const auto& from_json = detail::static_const::value; +} // namespace +} // namespace nlohmann + +// #include + + +#include // copy +#include // begin, end +#include // string +#include // tuple, get +#include // is_same, is_constructible, is_floating_point, is_enum, underlying_type +#include // move, forward, declval, pair +#include // valarray +#include // vector + +// #include + + +#include // size_t +#include // input_iterator_tag +#include // string, to_string +#include // tuple_size, get, tuple_element + +// #include + +// #include + + +namespace nlohmann +{ +namespace detail +{ +template +void int_to_string( string_type& target, std::size_t value ) +{ + // For ADL + using std::to_string; + target = to_string(value); +} +template class iteration_proxy_value +{ + public: + using difference_type = std::ptrdiff_t; + using value_type = iteration_proxy_value; + using pointer = value_type * ; + using reference = value_type & ; + using iterator_category = std::input_iterator_tag; + using string_type = typename std::remove_cv< typename std::remove_reference().key() ) >::type >::type; + + private: + /// the iterator + IteratorType anchor; + /// an index for arrays (used to create key names) + std::size_t array_index = 0; + /// last stringified array index + mutable std::size_t array_index_last = 0; + /// a string representation of the array index + mutable string_type array_index_str = "0"; + /// an empty string (to return a reference for primitive values) + const string_type empty_str = ""; + + public: + explicit iteration_proxy_value(IteratorType it) noexcept : anchor(it) {} + + /// dereference operator (needed for range-based for) + iteration_proxy_value& operator*() + { + return *this; + } + + /// increment operator (needed for range-based for) + iteration_proxy_value& operator++() + { + ++anchor; + ++array_index; + + return *this; + } + + /// equality operator (needed for InputIterator) + bool operator==(const iteration_proxy_value& o) const + { + return anchor == o.anchor; + } + + /// inequality operator (needed for range-based for) + bool operator!=(const iteration_proxy_value& o) const + { + return anchor != o.anchor; + } + + /// return key of the iterator + const string_type& key() const + { + JSON_ASSERT(anchor.m_object != nullptr); + + switch (anchor.m_object->type()) + { + // use integer array index as key + case value_t::array: + { + if (array_index != array_index_last) + { + int_to_string( array_index_str, array_index ); + array_index_last = array_index; + } + return array_index_str; + } + + // use key from the object + case value_t::object: + return anchor.key(); + + // use an empty key for all primitive types + default: + return empty_str; + } + } + + /// return value of the iterator + typename IteratorType::reference value() const + { + return anchor.value(); + } +}; + +/// proxy class for the items() function +template class iteration_proxy +{ + private: + /// the container to iterate + typename IteratorType::reference container; + + public: + /// construct iteration proxy from a container + explicit iteration_proxy(typename IteratorType::reference cont) noexcept + : container(cont) {} + + /// return iterator begin (needed for range-based for) + iteration_proxy_value begin() noexcept + { + return iteration_proxy_value(container.begin()); + } + + /// return iterator end (needed for range-based for) + iteration_proxy_value end() noexcept + { + return iteration_proxy_value(container.end()); + } +}; +// Structured Bindings Support +// For further reference see https://blog.tartanllama.xyz/structured-bindings/ +// And see https://github.com/nlohmann/json/pull/1391 +template = 0> +auto get(const nlohmann::detail::iteration_proxy_value& i) -> decltype(i.key()) +{ + return i.key(); +} +// Structured Bindings Support +// For further reference see https://blog.tartanllama.xyz/structured-bindings/ +// And see https://github.com/nlohmann/json/pull/1391 +template = 0> +auto get(const nlohmann::detail::iteration_proxy_value& i) -> decltype(i.value()) +{ + return i.value(); +} +} // namespace detail +} // namespace nlohmann + +// The Addition to the STD Namespace is required to add +// Structured Bindings Support to the iteration_proxy_value class +// For further reference see https://blog.tartanllama.xyz/structured-bindings/ +// And see https://github.com/nlohmann/json/pull/1391 +namespace std +{ +#if defined(__clang__) + // Fix: https://github.com/nlohmann/json/issues/1401 + #pragma clang diagnostic push + #pragma clang diagnostic ignored "-Wmismatched-tags" +#endif +template +class tuple_size<::nlohmann::detail::iteration_proxy_value> + : public std::integral_constant {}; + +template +class tuple_element> +{ + public: + using type = decltype( + get(std::declval < + ::nlohmann::detail::iteration_proxy_value> ())); +}; +#if defined(__clang__) + #pragma clang diagnostic pop +#endif +} // namespace std + +// #include + +// #include + +// #include + + +namespace nlohmann +{ +namespace detail +{ +////////////////// +// constructors // +////////////////// + +template struct external_constructor; + +template<> +struct external_constructor +{ + template + static void construct(BasicJsonType& j, typename BasicJsonType::boolean_t b) noexcept + { + j.m_type = value_t::boolean; + j.m_value = b; + j.assert_invariant(); + } +}; + +template<> +struct external_constructor +{ + template + static void construct(BasicJsonType& j, const typename BasicJsonType::string_t& s) + { + j.m_type = value_t::string; + j.m_value = s; + j.assert_invariant(); + } + + template + static void construct(BasicJsonType& j, typename BasicJsonType::string_t&& s) + { + j.m_type = value_t::string; + j.m_value = std::move(s); + j.assert_invariant(); + } + + template < typename BasicJsonType, typename CompatibleStringType, + enable_if_t < !std::is_same::value, + int > = 0 > + static void construct(BasicJsonType& j, const CompatibleStringType& str) + { + j.m_type = value_t::string; + j.m_value.string = j.template create(str); + j.assert_invariant(); + } +}; + +template<> +struct external_constructor +{ + template + static void construct(BasicJsonType& j, const typename BasicJsonType::binary_t& b) + { + j.m_type = value_t::binary; + typename BasicJsonType::binary_t value{b}; + j.m_value = value; + j.assert_invariant(); + } + + template + static void construct(BasicJsonType& j, typename BasicJsonType::binary_t&& b) + { + j.m_type = value_t::binary; + typename BasicJsonType::binary_t value{std::move(b)}; + j.m_value = value; + j.assert_invariant(); + } +}; + +template<> +struct external_constructor +{ + template + static void construct(BasicJsonType& j, typename BasicJsonType::number_float_t val) noexcept + { + j.m_type = value_t::number_float; + j.m_value = val; + j.assert_invariant(); + } +}; + +template<> +struct external_constructor +{ + template + static void construct(BasicJsonType& j, typename BasicJsonType::number_unsigned_t val) noexcept + { + j.m_type = value_t::number_unsigned; + j.m_value = val; + j.assert_invariant(); + } +}; + +template<> +struct external_constructor +{ + template + static void construct(BasicJsonType& j, typename BasicJsonType::number_integer_t val) noexcept + { + j.m_type = value_t::number_integer; + j.m_value = val; + j.assert_invariant(); + } +}; + +template<> +struct external_constructor +{ + template + static void construct(BasicJsonType& j, const typename BasicJsonType::array_t& arr) + { + j.m_type = value_t::array; + j.m_value = arr; + j.assert_invariant(); + } + + template + static void construct(BasicJsonType& j, typename BasicJsonType::array_t&& arr) + { + j.m_type = value_t::array; + j.m_value = std::move(arr); + j.assert_invariant(); + } + + template < typename BasicJsonType, typename CompatibleArrayType, + enable_if_t < !std::is_same::value, + int > = 0 > + static void construct(BasicJsonType& j, const CompatibleArrayType& arr) + { + using std::begin; + using std::end; + j.m_type = value_t::array; + j.m_value.array = j.template create(begin(arr), end(arr)); + j.assert_invariant(); + } + + template + static void construct(BasicJsonType& j, const std::vector& arr) + { + j.m_type = value_t::array; + j.m_value = value_t::array; + j.m_value.array->reserve(arr.size()); + for (const bool x : arr) + { + j.m_value.array->push_back(x); + } + j.assert_invariant(); + } + + template::value, int> = 0> + static void construct(BasicJsonType& j, const std::valarray& arr) + { + j.m_type = value_t::array; + j.m_value = value_t::array; + j.m_value.array->resize(arr.size()); + if (arr.size() > 0) + { + std::copy(std::begin(arr), std::end(arr), j.m_value.array->begin()); + } + j.assert_invariant(); + } +}; + +template<> +struct external_constructor +{ + template + static void construct(BasicJsonType& j, const typename BasicJsonType::object_t& obj) + { + j.m_type = value_t::object; + j.m_value = obj; + j.assert_invariant(); + } + + template + static void construct(BasicJsonType& j, typename BasicJsonType::object_t&& obj) + { + j.m_type = value_t::object; + j.m_value = std::move(obj); + j.assert_invariant(); + } + + template < typename BasicJsonType, typename CompatibleObjectType, + enable_if_t < !std::is_same::value, int > = 0 > + static void construct(BasicJsonType& j, const CompatibleObjectType& obj) + { + using std::begin; + using std::end; + + j.m_type = value_t::object; + j.m_value.object = j.template create(begin(obj), end(obj)); + j.assert_invariant(); + } +}; + +///////////// +// to_json // +///////////// + +template::value, int> = 0> +void to_json(BasicJsonType& j, T b) noexcept +{ + external_constructor::construct(j, b); +} + +template::value, int> = 0> +void to_json(BasicJsonType& j, const CompatibleString& s) +{ + external_constructor::construct(j, s); +} + +template +void to_json(BasicJsonType& j, typename BasicJsonType::string_t&& s) +{ + external_constructor::construct(j, std::move(s)); +} + +template::value, int> = 0> +void to_json(BasicJsonType& j, FloatType val) noexcept +{ + external_constructor::construct(j, static_cast(val)); +} + +template::value, int> = 0> +void to_json(BasicJsonType& j, CompatibleNumberUnsignedType val) noexcept +{ + external_constructor::construct(j, static_cast(val)); +} + +template::value, int> = 0> +void to_json(BasicJsonType& j, CompatibleNumberIntegerType val) noexcept +{ + external_constructor::construct(j, static_cast(val)); +} + +template::value, int> = 0> +void to_json(BasicJsonType& j, EnumType e) noexcept +{ + using underlying_type = typename std::underlying_type::type; + external_constructor::construct(j, static_cast(e)); +} + +template +void to_json(BasicJsonType& j, const std::vector& e) +{ + external_constructor::construct(j, e); +} + +template < typename BasicJsonType, typename CompatibleArrayType, + enable_if_t < is_compatible_array_type::value&& + !is_compatible_object_type::value&& + !is_compatible_string_type::value&& + !std::is_same::value&& + !is_basic_json::value, + int > = 0 > +void to_json(BasicJsonType& j, const CompatibleArrayType& arr) +{ + external_constructor::construct(j, arr); +} + +template +void to_json(BasicJsonType& j, const typename BasicJsonType::binary_t& bin) +{ + external_constructor::construct(j, bin); +} + +template::value, int> = 0> +void to_json(BasicJsonType& j, const std::valarray& arr) +{ + external_constructor::construct(j, std::move(arr)); +} + +template +void to_json(BasicJsonType& j, typename BasicJsonType::array_t&& arr) +{ + external_constructor::construct(j, std::move(arr)); +} + +template < typename BasicJsonType, typename CompatibleObjectType, + enable_if_t < is_compatible_object_type::value&& !is_basic_json::value, int > = 0 > +void to_json(BasicJsonType& j, const CompatibleObjectType& obj) +{ + external_constructor::construct(j, obj); +} + +template +void to_json(BasicJsonType& j, typename BasicJsonType::object_t&& obj) +{ + external_constructor::construct(j, std::move(obj)); +} + +template < + typename BasicJsonType, typename T, std::size_t N, + enable_if_t < !std::is_constructible::value, + int > = 0 > +void to_json(BasicJsonType& j, const T(&arr)[N]) +{ + external_constructor::construct(j, arr); +} + +template < typename BasicJsonType, typename T1, typename T2, enable_if_t < std::is_constructible::value&& std::is_constructible::value, int > = 0 > +void to_json(BasicJsonType& j, const std::pair& p) +{ + j = { p.first, p.second }; +} + +// for https://github.com/nlohmann/json/pull/1134 +template>::value, int> = 0> +void to_json(BasicJsonType& j, const T& b) +{ + j = { {b.key(), b.value()} }; +} + +template +void to_json_tuple_impl(BasicJsonType& j, const Tuple& t, index_sequence /*unused*/) +{ + j = { std::get(t)... }; +} + +template::value, int > = 0> +void to_json(BasicJsonType& j, const T& t) +{ + to_json_tuple_impl(j, t, make_index_sequence::value> {}); +} + +struct to_json_fn +{ + template + auto operator()(BasicJsonType& j, T&& val) const noexcept(noexcept(to_json(j, std::forward(val)))) + -> decltype(to_json(j, std::forward(val)), void()) + { + return to_json(j, std::forward(val)); + } +}; +} // namespace detail + +/// namespace to hold default `to_json` function +namespace +{ +constexpr const auto& to_json = detail::static_const::value; +} // namespace +} // namespace nlohmann + + +namespace nlohmann +{ + +template +struct adl_serializer +{ + /*! + @brief convert a JSON value to any value type + + This function is usually called by the `get()` function of the + @ref basic_json class (either explicit or via conversion operators). + + @param[in] j JSON value to read from + @param[in,out] val value to write to + */ + template + static auto from_json(BasicJsonType&& j, ValueType& val) noexcept( + noexcept(::nlohmann::from_json(std::forward(j), val))) + -> decltype(::nlohmann::from_json(std::forward(j), val), void()) + { + ::nlohmann::from_json(std::forward(j), val); + } + + /*! + @brief convert any value type to a JSON value + + This function is usually called by the constructors of the @ref basic_json + class. + + @param[in,out] j JSON value to write to + @param[in] val value to read from + */ + template + static auto to_json(BasicJsonType& j, ValueType&& val) noexcept( + noexcept(::nlohmann::to_json(j, std::forward(val)))) + -> decltype(::nlohmann::to_json(j, std::forward(val)), void()) + { + ::nlohmann::to_json(j, std::forward(val)); + } +}; + +} // namespace nlohmann + +// #include + + +#include // uint8_t +#include // tie +#include // move + +namespace nlohmann +{ + +/*! +@brief an internal type for a backed binary type + +This type extends the template parameter @a BinaryType provided to `basic_json` +with a subtype used by BSON and MessagePack. This type exists so that the user +does not have to specify a type themselves with a specific naming scheme in +order to override the binary type. + +@tparam BinaryType container to store bytes (`std::vector` by + default) + +@since version 3.8.0 +*/ +template +class byte_container_with_subtype : public BinaryType +{ + public: + /// the type of the underlying container + using container_type = BinaryType; + + byte_container_with_subtype() noexcept(noexcept(container_type())) + : container_type() + {} + + byte_container_with_subtype(const container_type& b) noexcept(noexcept(container_type(b))) + : container_type(b) + {} + + byte_container_with_subtype(container_type&& b) noexcept(noexcept(container_type(std::move(b)))) + : container_type(std::move(b)) + {} + + byte_container_with_subtype(const container_type& b, std::uint8_t subtype) noexcept(noexcept(container_type(b))) + : container_type(b) + , m_subtype(subtype) + , m_has_subtype(true) + {} + + byte_container_with_subtype(container_type&& b, std::uint8_t subtype) noexcept(noexcept(container_type(std::move(b)))) + : container_type(std::move(b)) + , m_subtype(subtype) + , m_has_subtype(true) + {} + + bool operator==(const byte_container_with_subtype& rhs) const + { + return std::tie(static_cast(*this), m_subtype, m_has_subtype) == + std::tie(static_cast(rhs), rhs.m_subtype, rhs.m_has_subtype); + } + + bool operator!=(const byte_container_with_subtype& rhs) const + { + return !(rhs == *this); + } + + /*! + @brief sets the binary subtype + + Sets the binary subtype of the value, also flags a binary JSON value as + having a subtype, which has implications for serialization. + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this member function never throws + exceptions. + + @sa @ref subtype() -- return the binary subtype + @sa @ref clear_subtype() -- clears the binary subtype + @sa @ref has_subtype() -- returns whether or not the binary value has a + subtype + + @since version 3.8.0 + */ + void set_subtype(std::uint8_t subtype) noexcept + { + m_subtype = subtype; + m_has_subtype = true; + } + + /*! + @brief return the binary subtype + + Returns the numerical subtype of the value if it has a subtype. If it does + not have a subtype, this function will return size_t(-1) as a sentinel + value. + + @return the numerical subtype of the binary value + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this member function never throws + exceptions. + + @sa @ref set_subtype() -- sets the binary subtype + @sa @ref clear_subtype() -- clears the binary subtype + @sa @ref has_subtype() -- returns whether or not the binary value has a + subtype + + @since version 3.8.0 + */ + constexpr std::uint8_t subtype() const noexcept + { + return m_subtype; + } + + /*! + @brief return whether the value has a subtype + + @return whether the value has a subtype + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this member function never throws + exceptions. + + @sa @ref subtype() -- return the binary subtype + @sa @ref set_subtype() -- sets the binary subtype + @sa @ref clear_subtype() -- clears the binary subtype + + @since version 3.8.0 + */ + constexpr bool has_subtype() const noexcept + { + return m_has_subtype; + } + + /*! + @brief clears the binary subtype + + Clears the binary subtype and flags the value as not having a subtype, which + has implications for serialization; for instance MessagePack will prefer the + bin family over the ext family. + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this member function never throws + exceptions. + + @sa @ref subtype() -- return the binary subtype + @sa @ref set_subtype() -- sets the binary subtype + @sa @ref has_subtype() -- returns whether or not the binary value has a + subtype + + @since version 3.8.0 + */ + void clear_subtype() noexcept + { + m_subtype = 0; + m_has_subtype = false; + } + + private: + std::uint8_t m_subtype = 0; + bool m_has_subtype = false; +}; + +} // namespace nlohmann + +// #include + +// #include + +// #include + +// #include + + +#include // size_t, uint8_t +#include // hash + +namespace nlohmann +{ +namespace detail +{ + +// boost::hash_combine +inline std::size_t combine(std::size_t seed, std::size_t h) noexcept +{ + seed ^= h + 0x9e3779b9 + (seed << 6U) + (seed >> 2U); + return seed; +} + +/*! +@brief hash a JSON value + +The hash function tries to rely on std::hash where possible. Furthermore, the +type of the JSON value is taken into account to have different hash values for +null, 0, 0U, and false, etc. + +@tparam BasicJsonType basic_json specialization +@param j JSON value to hash +@return hash value of j +*/ +template +std::size_t hash(const BasicJsonType& j) +{ + using string_t = typename BasicJsonType::string_t; + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + + const auto type = static_cast(j.type()); + switch (j.type()) + { + case BasicJsonType::value_t::null: + case BasicJsonType::value_t::discarded: + { + return combine(type, 0); + } + + case BasicJsonType::value_t::object: + { + auto seed = combine(type, j.size()); + for (const auto& element : j.items()) + { + const auto h = std::hash {}(element.key()); + seed = combine(seed, h); + seed = combine(seed, hash(element.value())); + } + return seed; + } + + case BasicJsonType::value_t::array: + { + auto seed = combine(type, j.size()); + for (const auto& element : j) + { + seed = combine(seed, hash(element)); + } + return seed; + } + + case BasicJsonType::value_t::string: + { + const auto h = std::hash {}(j.template get_ref()); + return combine(type, h); + } + + case BasicJsonType::value_t::boolean: + { + const auto h = std::hash {}(j.template get()); + return combine(type, h); + } + + case BasicJsonType::value_t::number_integer: + { + const auto h = std::hash {}(j.template get()); + return combine(type, h); + } + + case nlohmann::detail::value_t::number_unsigned: + { + const auto h = std::hash {}(j.template get()); + return combine(type, h); + } + + case nlohmann::detail::value_t::number_float: + { + const auto h = std::hash {}(j.template get()); + return combine(type, h); + } + + case nlohmann::detail::value_t::binary: + { + auto seed = combine(type, j.get_binary().size()); + const auto h = std::hash {}(j.get_binary().has_subtype()); + seed = combine(seed, h); + seed = combine(seed, j.get_binary().subtype()); + for (const auto byte : j.get_binary()) + { + seed = combine(seed, std::hash {}(byte)); + } + return seed; + } + + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // LCOV_EXCL_LINE + } +} + +} // namespace detail +} // namespace nlohmann + +// #include + + +#include // generate_n +#include // array +#include // ldexp +#include // size_t +#include // uint8_t, uint16_t, uint32_t, uint64_t +#include // snprintf +#include // memcpy +#include // back_inserter +#include // numeric_limits +#include // char_traits, string +#include // make_pair, move + +// #include + +// #include + + +#include // array +#include // size_t +#include //FILE * +#include // strlen +#include // istream +#include // begin, end, iterator_traits, random_access_iterator_tag, distance, next +#include // shared_ptr, make_shared, addressof +#include // accumulate +#include // string, char_traits +#include // enable_if, is_base_of, is_pointer, is_integral, remove_pointer +#include // pair, declval + +// #include + +// #include + + +namespace nlohmann +{ +namespace detail +{ +/// the supported input formats +enum class input_format_t { json, cbor, msgpack, ubjson, bson }; + +//////////////////// +// input adapters // +//////////////////// + +/*! +Input adapter for stdio file access. This adapter read only 1 byte and do not use any + buffer. This adapter is a very low level adapter. +*/ +class file_input_adapter +{ + public: + using char_type = char; + + JSON_HEDLEY_NON_NULL(2) + explicit file_input_adapter(std::FILE* f) noexcept + : m_file(f) + {} + + // make class move-only + file_input_adapter(const file_input_adapter&) = delete; + file_input_adapter(file_input_adapter&&) = default; + file_input_adapter& operator=(const file_input_adapter&) = delete; + file_input_adapter& operator=(file_input_adapter&&) = delete; + + std::char_traits::int_type get_character() noexcept + { + return std::fgetc(m_file); + } + + private: + /// the file pointer to read from + std::FILE* m_file; +}; + + +/*! +Input adapter for a (caching) istream. Ignores a UFT Byte Order Mark at +beginning of input. Does not support changing the underlying std::streambuf +in mid-input. Maintains underlying std::istream and std::streambuf to support +subsequent use of standard std::istream operations to process any input +characters following those used in parsing the JSON input. Clears the +std::istream flags; any input errors (e.g., EOF) will be detected by the first +subsequent call for input from the std::istream. +*/ +class input_stream_adapter +{ + public: + using char_type = char; + + ~input_stream_adapter() + { + // clear stream flags; we use underlying streambuf I/O, do not + // maintain ifstream flags, except eof + if (is != nullptr) + { + is->clear(is->rdstate() & std::ios::eofbit); + } + } + + explicit input_stream_adapter(std::istream& i) + : is(&i), sb(i.rdbuf()) + {} + + // delete because of pointer members + input_stream_adapter(const input_stream_adapter&) = delete; + input_stream_adapter& operator=(input_stream_adapter&) = delete; + input_stream_adapter& operator=(input_stream_adapter&& rhs) = delete; + + input_stream_adapter(input_stream_adapter&& rhs) noexcept : is(rhs.is), sb(rhs.sb) + { + rhs.is = nullptr; + rhs.sb = nullptr; + } + + // std::istream/std::streambuf use std::char_traits::to_int_type, to + // ensure that std::char_traits::eof() and the character 0xFF do not + // end up as the same value, eg. 0xFFFFFFFF. + std::char_traits::int_type get_character() + { + auto res = sb->sbumpc(); + // set eof manually, as we don't use the istream interface. + if (JSON_HEDLEY_UNLIKELY(res == EOF)) + { + is->clear(is->rdstate() | std::ios::eofbit); + } + return res; + } + + private: + /// the associated input stream + std::istream* is = nullptr; + std::streambuf* sb = nullptr; +}; + +// General-purpose iterator-based adapter. It might not be as fast as +// theoretically possible for some containers, but it is extremely versatile. +template +class iterator_input_adapter +{ + public: + using char_type = typename std::iterator_traits::value_type; + + iterator_input_adapter(IteratorType first, IteratorType last) + : current(std::move(first)), end(std::move(last)) {} + + typename std::char_traits::int_type get_character() + { + if (JSON_HEDLEY_LIKELY(current != end)) + { + auto result = std::char_traits::to_int_type(*current); + std::advance(current, 1); + return result; + } + else + { + return std::char_traits::eof(); + } + } + + private: + IteratorType current; + IteratorType end; + + template + friend struct wide_string_input_helper; + + bool empty() const + { + return current == end; + } + +}; + + +template +struct wide_string_input_helper; + +template +struct wide_string_input_helper +{ + // UTF-32 + static void fill_buffer(BaseInputAdapter& input, + std::array::int_type, 4>& utf8_bytes, + size_t& utf8_bytes_index, + size_t& utf8_bytes_filled) + { + utf8_bytes_index = 0; + + if (JSON_HEDLEY_UNLIKELY(input.empty())) + { + utf8_bytes[0] = std::char_traits::eof(); + utf8_bytes_filled = 1; + } + else + { + // get the current character + const auto wc = input.get_character(); + + // UTF-32 to UTF-8 encoding + if (wc < 0x80) + { + utf8_bytes[0] = static_cast::int_type>(wc); + utf8_bytes_filled = 1; + } + else if (wc <= 0x7FF) + { + utf8_bytes[0] = static_cast::int_type>(0xC0u | ((static_cast(wc) >> 6u) & 0x1Fu)); + utf8_bytes[1] = static_cast::int_type>(0x80u | (static_cast(wc) & 0x3Fu)); + utf8_bytes_filled = 2; + } + else if (wc <= 0xFFFF) + { + utf8_bytes[0] = static_cast::int_type>(0xE0u | ((static_cast(wc) >> 12u) & 0x0Fu)); + utf8_bytes[1] = static_cast::int_type>(0x80u | ((static_cast(wc) >> 6u) & 0x3Fu)); + utf8_bytes[2] = static_cast::int_type>(0x80u | (static_cast(wc) & 0x3Fu)); + utf8_bytes_filled = 3; + } + else if (wc <= 0x10FFFF) + { + utf8_bytes[0] = static_cast::int_type>(0xF0u | ((static_cast(wc) >> 18u) & 0x07u)); + utf8_bytes[1] = static_cast::int_type>(0x80u | ((static_cast(wc) >> 12u) & 0x3Fu)); + utf8_bytes[2] = static_cast::int_type>(0x80u | ((static_cast(wc) >> 6u) & 0x3Fu)); + utf8_bytes[3] = static_cast::int_type>(0x80u | (static_cast(wc) & 0x3Fu)); + utf8_bytes_filled = 4; + } + else + { + // unknown character + utf8_bytes[0] = static_cast::int_type>(wc); + utf8_bytes_filled = 1; + } + } + } +}; + +template +struct wide_string_input_helper +{ + // UTF-16 + static void fill_buffer(BaseInputAdapter& input, + std::array::int_type, 4>& utf8_bytes, + size_t& utf8_bytes_index, + size_t& utf8_bytes_filled) + { + utf8_bytes_index = 0; + + if (JSON_HEDLEY_UNLIKELY(input.empty())) + { + utf8_bytes[0] = std::char_traits::eof(); + utf8_bytes_filled = 1; + } + else + { + // get the current character + const auto wc = input.get_character(); + + // UTF-16 to UTF-8 encoding + if (wc < 0x80) + { + utf8_bytes[0] = static_cast::int_type>(wc); + utf8_bytes_filled = 1; + } + else if (wc <= 0x7FF) + { + utf8_bytes[0] = static_cast::int_type>(0xC0u | ((static_cast(wc) >> 6u))); + utf8_bytes[1] = static_cast::int_type>(0x80u | (static_cast(wc) & 0x3Fu)); + utf8_bytes_filled = 2; + } + else if (0xD800 > wc || wc >= 0xE000) + { + utf8_bytes[0] = static_cast::int_type>(0xE0u | ((static_cast(wc) >> 12u))); + utf8_bytes[1] = static_cast::int_type>(0x80u | ((static_cast(wc) >> 6u) & 0x3Fu)); + utf8_bytes[2] = static_cast::int_type>(0x80u | (static_cast(wc) & 0x3Fu)); + utf8_bytes_filled = 3; + } + else + { + if (JSON_HEDLEY_UNLIKELY(!input.empty())) + { + const auto wc2 = static_cast(input.get_character()); + const auto charcode = 0x10000u + (((static_cast(wc) & 0x3FFu) << 10u) | (wc2 & 0x3FFu)); + utf8_bytes[0] = static_cast::int_type>(0xF0u | (charcode >> 18u)); + utf8_bytes[1] = static_cast::int_type>(0x80u | ((charcode >> 12u) & 0x3Fu)); + utf8_bytes[2] = static_cast::int_type>(0x80u | ((charcode >> 6u) & 0x3Fu)); + utf8_bytes[3] = static_cast::int_type>(0x80u | (charcode & 0x3Fu)); + utf8_bytes_filled = 4; + } + else + { + utf8_bytes[0] = static_cast::int_type>(wc); + utf8_bytes_filled = 1; + } + } + } + } +}; + +// Wraps another input apdater to convert wide character types into individual bytes. +template +class wide_string_input_adapter +{ + public: + using char_type = char; + + wide_string_input_adapter(BaseInputAdapter base) + : base_adapter(base) {} + + typename std::char_traits::int_type get_character() noexcept + { + // check if buffer needs to be filled + if (utf8_bytes_index == utf8_bytes_filled) + { + fill_buffer(); + + JSON_ASSERT(utf8_bytes_filled > 0); + JSON_ASSERT(utf8_bytes_index == 0); + } + + // use buffer + JSON_ASSERT(utf8_bytes_filled > 0); + JSON_ASSERT(utf8_bytes_index < utf8_bytes_filled); + return utf8_bytes[utf8_bytes_index++]; + } + + private: + BaseInputAdapter base_adapter; + + template + void fill_buffer() + { + wide_string_input_helper::fill_buffer(base_adapter, utf8_bytes, utf8_bytes_index, utf8_bytes_filled); + } + + /// a buffer for UTF-8 bytes + std::array::int_type, 4> utf8_bytes = {{0, 0, 0, 0}}; + + /// index to the utf8_codes array for the next valid byte + std::size_t utf8_bytes_index = 0; + /// number of valid bytes in the utf8_codes array + std::size_t utf8_bytes_filled = 0; +}; + + +template +struct iterator_input_adapter_factory +{ + using iterator_type = IteratorType; + using char_type = typename std::iterator_traits::value_type; + using adapter_type = iterator_input_adapter; + + static adapter_type create(IteratorType first, IteratorType last) + { + return adapter_type(std::move(first), std::move(last)); + } +}; + +template +struct is_iterator_of_multibyte +{ + using value_type = typename std::iterator_traits::value_type; + enum + { + value = sizeof(value_type) > 1 + }; +}; + +template +struct iterator_input_adapter_factory::value>> +{ + using iterator_type = IteratorType; + using char_type = typename std::iterator_traits::value_type; + using base_adapter_type = iterator_input_adapter; + using adapter_type = wide_string_input_adapter; + + static adapter_type create(IteratorType first, IteratorType last) + { + return adapter_type(base_adapter_type(std::move(first), std::move(last))); + } +}; + +// General purpose iterator-based input +template +typename iterator_input_adapter_factory::adapter_type input_adapter(IteratorType first, IteratorType last) +{ + using factory_type = iterator_input_adapter_factory; + return factory_type::create(first, last); +} + +// Convenience shorthand from container to iterator +template +auto input_adapter(const ContainerType& container) -> decltype(input_adapter(begin(container), end(container))) +{ + // Enable ADL + using std::begin; + using std::end; + + return input_adapter(begin(container), end(container)); +} + +// Special cases with fast paths +inline file_input_adapter input_adapter(std::FILE* file) +{ + return file_input_adapter(file); +} + +inline input_stream_adapter input_adapter(std::istream& stream) +{ + return input_stream_adapter(stream); +} + +inline input_stream_adapter input_adapter(std::istream&& stream) +{ + return input_stream_adapter(stream); +} + +using contiguous_bytes_input_adapter = decltype(input_adapter(std::declval(), std::declval())); + +// Null-delimited strings, and the like. +template < typename CharT, + typename std::enable_if < + std::is_pointer::value&& + !std::is_array::value&& + std::is_integral::type>::value&& + sizeof(typename std::remove_pointer::type) == 1, + int >::type = 0 > +contiguous_bytes_input_adapter input_adapter(CharT b) +{ + auto length = std::strlen(reinterpret_cast(b)); + const auto* ptr = reinterpret_cast(b); + return input_adapter(ptr, ptr + length); +} + +template +auto input_adapter(T (&array)[N]) -> decltype(input_adapter(array, array + N)) +{ + return input_adapter(array, array + N); +} + +// This class only handles inputs of input_buffer_adapter type. +// It's required so that expressions like {ptr, len} can be implicitely casted +// to the correct adapter. +class span_input_adapter +{ + public: + template < typename CharT, + typename std::enable_if < + std::is_pointer::value&& + std::is_integral::type>::value&& + sizeof(typename std::remove_pointer::type) == 1, + int >::type = 0 > + span_input_adapter(CharT b, std::size_t l) + : ia(reinterpret_cast(b), reinterpret_cast(b) + l) {} + + template::iterator_category, std::random_access_iterator_tag>::value, + int>::type = 0> + span_input_adapter(IteratorType first, IteratorType last) + : ia(input_adapter(first, last)) {} + + contiguous_bytes_input_adapter&& get() + { + return std::move(ia); + } + + private: + contiguous_bytes_input_adapter ia; +}; +} // namespace detail +} // namespace nlohmann + +// #include + + +#include +#include // string +#include // move +#include // vector + +// #include + +// #include + + +namespace nlohmann +{ + +/*! +@brief SAX interface + +This class describes the SAX interface used by @ref nlohmann::json::sax_parse. +Each function is called in different situations while the input is parsed. The +boolean return value informs the parser whether to continue processing the +input. +*/ +template +struct json_sax +{ + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + + /*! + @brief a null value was read + @return whether parsing should proceed + */ + virtual bool null() = 0; + + /*! + @brief a boolean value was read + @param[in] val boolean value + @return whether parsing should proceed + */ + virtual bool boolean(bool val) = 0; + + /*! + @brief an integer number was read + @param[in] val integer value + @return whether parsing should proceed + */ + virtual bool number_integer(number_integer_t val) = 0; + + /*! + @brief an unsigned integer number was read + @param[in] val unsigned integer value + @return whether parsing should proceed + */ + virtual bool number_unsigned(number_unsigned_t val) = 0; + + /*! + @brief an floating-point number was read + @param[in] val floating-point value + @param[in] s raw token value + @return whether parsing should proceed + */ + virtual bool number_float(number_float_t val, const string_t& s) = 0; + + /*! + @brief a string was read + @param[in] val string value + @return whether parsing should proceed + @note It is safe to move the passed string. + */ + virtual bool string(string_t& val) = 0; + + /*! + @brief a binary string was read + @param[in] val binary value + @return whether parsing should proceed + @note It is safe to move the passed binary. + */ + virtual bool binary(binary_t& val) = 0; + + /*! + @brief the beginning of an object was read + @param[in] elements number of object elements or -1 if unknown + @return whether parsing should proceed + @note binary formats may report the number of elements + */ + virtual bool start_object(std::size_t elements) = 0; + + /*! + @brief an object key was read + @param[in] val object key + @return whether parsing should proceed + @note It is safe to move the passed string. + */ + virtual bool key(string_t& val) = 0; + + /*! + @brief the end of an object was read + @return whether parsing should proceed + */ + virtual bool end_object() = 0; + + /*! + @brief the beginning of an array was read + @param[in] elements number of array elements or -1 if unknown + @return whether parsing should proceed + @note binary formats may report the number of elements + */ + virtual bool start_array(std::size_t elements) = 0; + + /*! + @brief the end of an array was read + @return whether parsing should proceed + */ + virtual bool end_array() = 0; + + /*! + @brief a parse error occurred + @param[in] position the position in the input where the error occurs + @param[in] last_token the last read token + @param[in] ex an exception object describing the error + @return whether parsing should proceed (must return false) + */ + virtual bool parse_error(std::size_t position, + const std::string& last_token, + const detail::exception& ex) = 0; + + virtual ~json_sax() = default; +}; + + +namespace detail +{ +/*! +@brief SAX implementation to create a JSON value from SAX events + +This class implements the @ref json_sax interface and processes the SAX events +to create a JSON value which makes it basically a DOM parser. The structure or +hierarchy of the JSON value is managed by the stack `ref_stack` which contains +a pointer to the respective array or object for each recursion depth. + +After successful parsing, the value that is passed by reference to the +constructor contains the parsed value. + +@tparam BasicJsonType the JSON type +*/ +template +class json_sax_dom_parser +{ + public: + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + + /*! + @param[in, out] r reference to a JSON value that is manipulated while + parsing + @param[in] allow_exceptions_ whether parse errors yield exceptions + */ + explicit json_sax_dom_parser(BasicJsonType& r, const bool allow_exceptions_ = true) + : root(r), allow_exceptions(allow_exceptions_) + {} + + // make class move-only + json_sax_dom_parser(const json_sax_dom_parser&) = delete; + json_sax_dom_parser(json_sax_dom_parser&&) = default; + json_sax_dom_parser& operator=(const json_sax_dom_parser&) = delete; + json_sax_dom_parser& operator=(json_sax_dom_parser&&) = default; + ~json_sax_dom_parser() = default; + + bool null() + { + handle_value(nullptr); + return true; + } + + bool boolean(bool val) + { + handle_value(val); + return true; + } + + bool number_integer(number_integer_t val) + { + handle_value(val); + return true; + } + + bool number_unsigned(number_unsigned_t val) + { + handle_value(val); + return true; + } + + bool number_float(number_float_t val, const string_t& /*unused*/) + { + handle_value(val); + return true; + } + + bool string(string_t& val) + { + handle_value(val); + return true; + } + + bool binary(binary_t& val) + { + handle_value(std::move(val)); + return true; + } + + bool start_object(std::size_t len) + { + ref_stack.push_back(handle_value(BasicJsonType::value_t::object)); + + if (JSON_HEDLEY_UNLIKELY(len != std::size_t(-1) && len > ref_stack.back()->max_size())) + { + JSON_THROW(out_of_range::create(408, + "excessive object size: " + std::to_string(len))); + } + + return true; + } + + bool key(string_t& val) + { + // add null at given key and store the reference for later + object_element = &(ref_stack.back()->m_value.object->operator[](val)); + return true; + } + + bool end_object() + { + ref_stack.pop_back(); + return true; + } + + bool start_array(std::size_t len) + { + ref_stack.push_back(handle_value(BasicJsonType::value_t::array)); + + if (JSON_HEDLEY_UNLIKELY(len != std::size_t(-1) && len > ref_stack.back()->max_size())) + { + JSON_THROW(out_of_range::create(408, + "excessive array size: " + std::to_string(len))); + } + + return true; + } + + bool end_array() + { + ref_stack.pop_back(); + return true; + } + + template + bool parse_error(std::size_t /*unused*/, const std::string& /*unused*/, + const Exception& ex) + { + errored = true; + static_cast(ex); + if (allow_exceptions) + { + JSON_THROW(ex); + } + return false; + } + + constexpr bool is_errored() const + { + return errored; + } + + private: + /*! + @invariant If the ref stack is empty, then the passed value will be the new + root. + @invariant If the ref stack contains a value, then it is an array or an + object to which we can add elements + */ + template + JSON_HEDLEY_RETURNS_NON_NULL + BasicJsonType* handle_value(Value&& v) + { + if (ref_stack.empty()) + { + root = BasicJsonType(std::forward(v)); + return &root; + } + + JSON_ASSERT(ref_stack.back()->is_array() || ref_stack.back()->is_object()); + + if (ref_stack.back()->is_array()) + { + ref_stack.back()->m_value.array->emplace_back(std::forward(v)); + return &(ref_stack.back()->m_value.array->back()); + } + + JSON_ASSERT(ref_stack.back()->is_object()); + JSON_ASSERT(object_element); + *object_element = BasicJsonType(std::forward(v)); + return object_element; + } + + /// the parsed JSON value + BasicJsonType& root; + /// stack to model hierarchy of values + std::vector ref_stack {}; + /// helper to hold the reference for the next object element + BasicJsonType* object_element = nullptr; + /// whether a syntax error occurred + bool errored = false; + /// whether to throw exceptions in case of errors + const bool allow_exceptions = false; +}; + +template +class json_sax_dom_callback_parser +{ + public: + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + using parser_callback_t = typename BasicJsonType::parser_callback_t; + using parse_event_t = typename BasicJsonType::parse_event_t; + + json_sax_dom_callback_parser(BasicJsonType& r, + const parser_callback_t cb, + const bool allow_exceptions_ = true) + : root(r), callback(cb), allow_exceptions(allow_exceptions_) + { + keep_stack.push_back(true); + } + + // make class move-only + json_sax_dom_callback_parser(const json_sax_dom_callback_parser&) = delete; + json_sax_dom_callback_parser(json_sax_dom_callback_parser&&) = default; + json_sax_dom_callback_parser& operator=(const json_sax_dom_callback_parser&) = delete; + json_sax_dom_callback_parser& operator=(json_sax_dom_callback_parser&&) = default; + ~json_sax_dom_callback_parser() = default; + + bool null() + { + handle_value(nullptr); + return true; + } + + bool boolean(bool val) + { + handle_value(val); + return true; + } + + bool number_integer(number_integer_t val) + { + handle_value(val); + return true; + } + + bool number_unsigned(number_unsigned_t val) + { + handle_value(val); + return true; + } + + bool number_float(number_float_t val, const string_t& /*unused*/) + { + handle_value(val); + return true; + } + + bool string(string_t& val) + { + handle_value(val); + return true; + } + + bool binary(binary_t& val) + { + handle_value(std::move(val)); + return true; + } + + bool start_object(std::size_t len) + { + // check callback for object start + const bool keep = callback(static_cast(ref_stack.size()), parse_event_t::object_start, discarded); + keep_stack.push_back(keep); + + auto val = handle_value(BasicJsonType::value_t::object, true); + ref_stack.push_back(val.second); + + // check object limit + if (ref_stack.back() && JSON_HEDLEY_UNLIKELY(len != std::size_t(-1) && len > ref_stack.back()->max_size())) + { + JSON_THROW(out_of_range::create(408, "excessive object size: " + std::to_string(len))); + } + + return true; + } + + bool key(string_t& val) + { + BasicJsonType k = BasicJsonType(val); + + // check callback for key + const bool keep = callback(static_cast(ref_stack.size()), parse_event_t::key, k); + key_keep_stack.push_back(keep); + + // add discarded value at given key and store the reference for later + if (keep && ref_stack.back()) + { + object_element = &(ref_stack.back()->m_value.object->operator[](val) = discarded); + } + + return true; + } + + bool end_object() + { + if (ref_stack.back() && !callback(static_cast(ref_stack.size()) - 1, parse_event_t::object_end, *ref_stack.back())) + { + // discard object + *ref_stack.back() = discarded; + } + + JSON_ASSERT(!ref_stack.empty()); + JSON_ASSERT(!keep_stack.empty()); + ref_stack.pop_back(); + keep_stack.pop_back(); + + if (!ref_stack.empty() && ref_stack.back() && ref_stack.back()->is_structured()) + { + // remove discarded value + for (auto it = ref_stack.back()->begin(); it != ref_stack.back()->end(); ++it) + { + if (it->is_discarded()) + { + ref_stack.back()->erase(it); + break; + } + } + } + + return true; + } + + bool start_array(std::size_t len) + { + const bool keep = callback(static_cast(ref_stack.size()), parse_event_t::array_start, discarded); + keep_stack.push_back(keep); + + auto val = handle_value(BasicJsonType::value_t::array, true); + ref_stack.push_back(val.second); + + // check array limit + if (ref_stack.back() && JSON_HEDLEY_UNLIKELY(len != std::size_t(-1) && len > ref_stack.back()->max_size())) + { + JSON_THROW(out_of_range::create(408, "excessive array size: " + std::to_string(len))); + } + + return true; + } + + bool end_array() + { + bool keep = true; + + if (ref_stack.back()) + { + keep = callback(static_cast(ref_stack.size()) - 1, parse_event_t::array_end, *ref_stack.back()); + if (!keep) + { + // discard array + *ref_stack.back() = discarded; + } + } + + JSON_ASSERT(!ref_stack.empty()); + JSON_ASSERT(!keep_stack.empty()); + ref_stack.pop_back(); + keep_stack.pop_back(); + + // remove discarded value + if (!keep && !ref_stack.empty() && ref_stack.back()->is_array()) + { + ref_stack.back()->m_value.array->pop_back(); + } + + return true; + } + + template + bool parse_error(std::size_t /*unused*/, const std::string& /*unused*/, + const Exception& ex) + { + errored = true; + static_cast(ex); + if (allow_exceptions) + { + JSON_THROW(ex); + } + return false; + } + + constexpr bool is_errored() const + { + return errored; + } + + private: + /*! + @param[in] v value to add to the JSON value we build during parsing + @param[in] skip_callback whether we should skip calling the callback + function; this is required after start_array() and + start_object() SAX events, because otherwise we would call the + callback function with an empty array or object, respectively. + + @invariant If the ref stack is empty, then the passed value will be the new + root. + @invariant If the ref stack contains a value, then it is an array or an + object to which we can add elements + + @return pair of boolean (whether value should be kept) and pointer (to the + passed value in the ref_stack hierarchy; nullptr if not kept) + */ + template + std::pair handle_value(Value&& v, const bool skip_callback = false) + { + JSON_ASSERT(!keep_stack.empty()); + + // do not handle this value if we know it would be added to a discarded + // container + if (!keep_stack.back()) + { + return {false, nullptr}; + } + + // create value + auto value = BasicJsonType(std::forward(v)); + + // check callback + const bool keep = skip_callback || callback(static_cast(ref_stack.size()), parse_event_t::value, value); + + // do not handle this value if we just learnt it shall be discarded + if (!keep) + { + return {false, nullptr}; + } + + if (ref_stack.empty()) + { + root = std::move(value); + return {true, &root}; + } + + // skip this value if we already decided to skip the parent + // (https://github.com/nlohmann/json/issues/971#issuecomment-413678360) + if (!ref_stack.back()) + { + return {false, nullptr}; + } + + // we now only expect arrays and objects + JSON_ASSERT(ref_stack.back()->is_array() || ref_stack.back()->is_object()); + + // array + if (ref_stack.back()->is_array()) + { + ref_stack.back()->m_value.array->push_back(std::move(value)); + return {true, &(ref_stack.back()->m_value.array->back())}; + } + + // object + JSON_ASSERT(ref_stack.back()->is_object()); + // check if we should store an element for the current key + JSON_ASSERT(!key_keep_stack.empty()); + const bool store_element = key_keep_stack.back(); + key_keep_stack.pop_back(); + + if (!store_element) + { + return {false, nullptr}; + } + + JSON_ASSERT(object_element); + *object_element = std::move(value); + return {true, object_element}; + } + + /// the parsed JSON value + BasicJsonType& root; + /// stack to model hierarchy of values + std::vector ref_stack {}; + /// stack to manage which values to keep + std::vector keep_stack {}; + /// stack to manage which object keys to keep + std::vector key_keep_stack {}; + /// helper to hold the reference for the next object element + BasicJsonType* object_element = nullptr; + /// whether a syntax error occurred + bool errored = false; + /// callback function + const parser_callback_t callback = nullptr; + /// whether to throw exceptions in case of errors + const bool allow_exceptions = true; + /// a discarded value for the callback + BasicJsonType discarded = BasicJsonType::value_t::discarded; +}; + +template +class json_sax_acceptor +{ + public: + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + + bool null() + { + return true; + } + + bool boolean(bool /*unused*/) + { + return true; + } + + bool number_integer(number_integer_t /*unused*/) + { + return true; + } + + bool number_unsigned(number_unsigned_t /*unused*/) + { + return true; + } + + bool number_float(number_float_t /*unused*/, const string_t& /*unused*/) + { + return true; + } + + bool string(string_t& /*unused*/) + { + return true; + } + + bool binary(binary_t& /*unused*/) + { + return true; + } + + bool start_object(std::size_t /*unused*/ = std::size_t(-1)) + { + return true; + } + + bool key(string_t& /*unused*/) + { + return true; + } + + bool end_object() + { + return true; + } + + bool start_array(std::size_t /*unused*/ = std::size_t(-1)) + { + return true; + } + + bool end_array() + { + return true; + } + + bool parse_error(std::size_t /*unused*/, const std::string& /*unused*/, const detail::exception& /*unused*/) + { + return false; + } +}; +} // namespace detail + +} // namespace nlohmann + +// #include + + +#include // array +#include // localeconv +#include // size_t +#include // snprintf +#include // strtof, strtod, strtold, strtoll, strtoull +#include // initializer_list +#include // char_traits, string +#include // move +#include // vector + +// #include + +// #include + +// #include + + +namespace nlohmann +{ +namespace detail +{ +/////////// +// lexer // +/////////// + +template +class lexer_base +{ + public: + /// token types for the parser + enum class token_type + { + uninitialized, ///< indicating the scanner is uninitialized + literal_true, ///< the `true` literal + literal_false, ///< the `false` literal + literal_null, ///< the `null` literal + value_string, ///< a string -- use get_string() for actual value + value_unsigned, ///< an unsigned integer -- use get_number_unsigned() for actual value + value_integer, ///< a signed integer -- use get_number_integer() for actual value + value_float, ///< an floating point number -- use get_number_float() for actual value + begin_array, ///< the character for array begin `[` + begin_object, ///< the character for object begin `{` + end_array, ///< the character for array end `]` + end_object, ///< the character for object end `}` + name_separator, ///< the name separator `:` + value_separator, ///< the value separator `,` + parse_error, ///< indicating a parse error + end_of_input, ///< indicating the end of the input buffer + literal_or_value ///< a literal or the begin of a value (only for diagnostics) + }; + + /// return name of values of type token_type (only used for errors) + JSON_HEDLEY_RETURNS_NON_NULL + JSON_HEDLEY_CONST + static const char* token_type_name(const token_type t) noexcept + { + switch (t) + { + case token_type::uninitialized: + return ""; + case token_type::literal_true: + return "true literal"; + case token_type::literal_false: + return "false literal"; + case token_type::literal_null: + return "null literal"; + case token_type::value_string: + return "string literal"; + case token_type::value_unsigned: + case token_type::value_integer: + case token_type::value_float: + return "number literal"; + case token_type::begin_array: + return "'['"; + case token_type::begin_object: + return "'{'"; + case token_type::end_array: + return "']'"; + case token_type::end_object: + return "'}'"; + case token_type::name_separator: + return "':'"; + case token_type::value_separator: + return "','"; + case token_type::parse_error: + return ""; + case token_type::end_of_input: + return "end of input"; + case token_type::literal_or_value: + return "'[', '{', or a literal"; + // LCOV_EXCL_START + default: // catch non-enum values + return "unknown token"; + // LCOV_EXCL_STOP + } + } +}; +/*! +@brief lexical analysis + +This class organizes the lexical analysis during JSON deserialization. +*/ +template +class lexer : public lexer_base +{ + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using char_type = typename InputAdapterType::char_type; + using char_int_type = typename std::char_traits::int_type; + + public: + using token_type = typename lexer_base::token_type; + + explicit lexer(InputAdapterType&& adapter, bool ignore_comments_ = false) + : ia(std::move(adapter)) + , ignore_comments(ignore_comments_) + , decimal_point_char(static_cast(get_decimal_point())) + {} + + // delete because of pointer members + lexer(const lexer&) = delete; + lexer(lexer&&) = default; + lexer& operator=(lexer&) = delete; + lexer& operator=(lexer&&) = default; + ~lexer() = default; + + private: + ///////////////////// + // locales + ///////////////////// + + /// return the locale-dependent decimal point + JSON_HEDLEY_PURE + static char get_decimal_point() noexcept + { + const auto* loc = localeconv(); + JSON_ASSERT(loc != nullptr); + return (loc->decimal_point == nullptr) ? '.' : *(loc->decimal_point); + } + + ///////////////////// + // scan functions + ///////////////////// + + /*! + @brief get codepoint from 4 hex characters following `\u` + + For input "\u c1 c2 c3 c4" the codepoint is: + (c1 * 0x1000) + (c2 * 0x0100) + (c3 * 0x0010) + c4 + = (c1 << 12) + (c2 << 8) + (c3 << 4) + (c4 << 0) + + Furthermore, the possible characters '0'..'9', 'A'..'F', and 'a'..'f' + must be converted to the integers 0x0..0x9, 0xA..0xF, 0xA..0xF, resp. The + conversion is done by subtracting the offset (0x30, 0x37, and 0x57) + between the ASCII value of the character and the desired integer value. + + @return codepoint (0x0000..0xFFFF) or -1 in case of an error (e.g. EOF or + non-hex character) + */ + int get_codepoint() + { + // this function only makes sense after reading `\u` + JSON_ASSERT(current == 'u'); + int codepoint = 0; + + const auto factors = { 12u, 8u, 4u, 0u }; + for (const auto factor : factors) + { + get(); + + if (current >= '0' && current <= '9') + { + codepoint += static_cast((static_cast(current) - 0x30u) << factor); + } + else if (current >= 'A' && current <= 'F') + { + codepoint += static_cast((static_cast(current) - 0x37u) << factor); + } + else if (current >= 'a' && current <= 'f') + { + codepoint += static_cast((static_cast(current) - 0x57u) << factor); + } + else + { + return -1; + } + } + + JSON_ASSERT(0x0000 <= codepoint && codepoint <= 0xFFFF); + return codepoint; + } + + /*! + @brief check if the next byte(s) are inside a given range + + Adds the current byte and, for each passed range, reads a new byte and + checks if it is inside the range. If a violation was detected, set up an + error message and return false. Otherwise, return true. + + @param[in] ranges list of integers; interpreted as list of pairs of + inclusive lower and upper bound, respectively + + @pre The passed list @a ranges must have 2, 4, or 6 elements; that is, + 1, 2, or 3 pairs. This precondition is enforced by an assertion. + + @return true if and only if no range violation was detected + */ + bool next_byte_in_range(std::initializer_list ranges) + { + JSON_ASSERT(ranges.size() == 2 || ranges.size() == 4 || ranges.size() == 6); + add(current); + + for (auto range = ranges.begin(); range != ranges.end(); ++range) + { + get(); + if (JSON_HEDLEY_LIKELY(*range <= current && current <= *(++range))) + { + add(current); + } + else + { + error_message = "invalid string: ill-formed UTF-8 byte"; + return false; + } + } + + return true; + } + + /*! + @brief scan a string literal + + This function scans a string according to Sect. 7 of RFC 7159. While + scanning, bytes are escaped and copied into buffer token_buffer. Then the + function returns successfully, token_buffer is *not* null-terminated (as it + may contain \0 bytes), and token_buffer.size() is the number of bytes in the + string. + + @return token_type::value_string if string could be successfully scanned, + token_type::parse_error otherwise + + @note In case of errors, variable error_message contains a textual + description. + */ + token_type scan_string() + { + // reset token_buffer (ignore opening quote) + reset(); + + // we entered the function by reading an open quote + JSON_ASSERT(current == '\"'); + + while (true) + { + // get next character + switch (get()) + { + // end of file while parsing string + case std::char_traits::eof(): + { + error_message = "invalid string: missing closing quote"; + return token_type::parse_error; + } + + // closing quote + case '\"': + { + return token_type::value_string; + } + + // escapes + case '\\': + { + switch (get()) + { + // quotation mark + case '\"': + add('\"'); + break; + // reverse solidus + case '\\': + add('\\'); + break; + // solidus + case '/': + add('/'); + break; + // backspace + case 'b': + add('\b'); + break; + // form feed + case 'f': + add('\f'); + break; + // line feed + case 'n': + add('\n'); + break; + // carriage return + case 'r': + add('\r'); + break; + // tab + case 't': + add('\t'); + break; + + // unicode escapes + case 'u': + { + const int codepoint1 = get_codepoint(); + int codepoint = codepoint1; // start with codepoint1 + + if (JSON_HEDLEY_UNLIKELY(codepoint1 == -1)) + { + error_message = "invalid string: '\\u' must be followed by 4 hex digits"; + return token_type::parse_error; + } + + // check if code point is a high surrogate + if (0xD800 <= codepoint1 && codepoint1 <= 0xDBFF) + { + // expect next \uxxxx entry + if (JSON_HEDLEY_LIKELY(get() == '\\' && get() == 'u')) + { + const int codepoint2 = get_codepoint(); + + if (JSON_HEDLEY_UNLIKELY(codepoint2 == -1)) + { + error_message = "invalid string: '\\u' must be followed by 4 hex digits"; + return token_type::parse_error; + } + + // check if codepoint2 is a low surrogate + if (JSON_HEDLEY_LIKELY(0xDC00 <= codepoint2 && codepoint2 <= 0xDFFF)) + { + // overwrite codepoint + codepoint = static_cast( + // high surrogate occupies the most significant 22 bits + (static_cast(codepoint1) << 10u) + // low surrogate occupies the least significant 15 bits + + static_cast(codepoint2) + // there is still the 0xD800, 0xDC00 and 0x10000 noise + // in the result so we have to subtract with: + // (0xD800 << 10) + DC00 - 0x10000 = 0x35FDC00 + - 0x35FDC00u); + } + else + { + error_message = "invalid string: surrogate U+D800..U+DBFF must be followed by U+DC00..U+DFFF"; + return token_type::parse_error; + } + } + else + { + error_message = "invalid string: surrogate U+D800..U+DBFF must be followed by U+DC00..U+DFFF"; + return token_type::parse_error; + } + } + else + { + if (JSON_HEDLEY_UNLIKELY(0xDC00 <= codepoint1 && codepoint1 <= 0xDFFF)) + { + error_message = "invalid string: surrogate U+DC00..U+DFFF must follow U+D800..U+DBFF"; + return token_type::parse_error; + } + } + + // result of the above calculation yields a proper codepoint + JSON_ASSERT(0x00 <= codepoint && codepoint <= 0x10FFFF); + + // translate codepoint into bytes + if (codepoint < 0x80) + { + // 1-byte characters: 0xxxxxxx (ASCII) + add(static_cast(codepoint)); + } + else if (codepoint <= 0x7FF) + { + // 2-byte characters: 110xxxxx 10xxxxxx + add(static_cast(0xC0u | (static_cast(codepoint) >> 6u))); + add(static_cast(0x80u | (static_cast(codepoint) & 0x3Fu))); + } + else if (codepoint <= 0xFFFF) + { + // 3-byte characters: 1110xxxx 10xxxxxx 10xxxxxx + add(static_cast(0xE0u | (static_cast(codepoint) >> 12u))); + add(static_cast(0x80u | ((static_cast(codepoint) >> 6u) & 0x3Fu))); + add(static_cast(0x80u | (static_cast(codepoint) & 0x3Fu))); + } + else + { + // 4-byte characters: 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx + add(static_cast(0xF0u | (static_cast(codepoint) >> 18u))); + add(static_cast(0x80u | ((static_cast(codepoint) >> 12u) & 0x3Fu))); + add(static_cast(0x80u | ((static_cast(codepoint) >> 6u) & 0x3Fu))); + add(static_cast(0x80u | (static_cast(codepoint) & 0x3Fu))); + } + + break; + } + + // other characters after escape + default: + error_message = "invalid string: forbidden character after backslash"; + return token_type::parse_error; + } + + break; + } + + // invalid control characters + case 0x00: + { + error_message = "invalid string: control character U+0000 (NUL) must be escaped to \\u0000"; + return token_type::parse_error; + } + + case 0x01: + { + error_message = "invalid string: control character U+0001 (SOH) must be escaped to \\u0001"; + return token_type::parse_error; + } + + case 0x02: + { + error_message = "invalid string: control character U+0002 (STX) must be escaped to \\u0002"; + return token_type::parse_error; + } + + case 0x03: + { + error_message = "invalid string: control character U+0003 (ETX) must be escaped to \\u0003"; + return token_type::parse_error; + } + + case 0x04: + { + error_message = "invalid string: control character U+0004 (EOT) must be escaped to \\u0004"; + return token_type::parse_error; + } + + case 0x05: + { + error_message = "invalid string: control character U+0005 (ENQ) must be escaped to \\u0005"; + return token_type::parse_error; + } + + case 0x06: + { + error_message = "invalid string: control character U+0006 (ACK) must be escaped to \\u0006"; + return token_type::parse_error; + } + + case 0x07: + { + error_message = "invalid string: control character U+0007 (BEL) must be escaped to \\u0007"; + return token_type::parse_error; + } + + case 0x08: + { + error_message = "invalid string: control character U+0008 (BS) must be escaped to \\u0008 or \\b"; + return token_type::parse_error; + } + + case 0x09: + { + error_message = "invalid string: control character U+0009 (HT) must be escaped to \\u0009 or \\t"; + return token_type::parse_error; + } + + case 0x0A: + { + error_message = "invalid string: control character U+000A (LF) must be escaped to \\u000A or \\n"; + return token_type::parse_error; + } + + case 0x0B: + { + error_message = "invalid string: control character U+000B (VT) must be escaped to \\u000B"; + return token_type::parse_error; + } + + case 0x0C: + { + error_message = "invalid string: control character U+000C (FF) must be escaped to \\u000C or \\f"; + return token_type::parse_error; + } + + case 0x0D: + { + error_message = "invalid string: control character U+000D (CR) must be escaped to \\u000D or \\r"; + return token_type::parse_error; + } + + case 0x0E: + { + error_message = "invalid string: control character U+000E (SO) must be escaped to \\u000E"; + return token_type::parse_error; + } + + case 0x0F: + { + error_message = "invalid string: control character U+000F (SI) must be escaped to \\u000F"; + return token_type::parse_error; + } + + case 0x10: + { + error_message = "invalid string: control character U+0010 (DLE) must be escaped to \\u0010"; + return token_type::parse_error; + } + + case 0x11: + { + error_message = "invalid string: control character U+0011 (DC1) must be escaped to \\u0011"; + return token_type::parse_error; + } + + case 0x12: + { + error_message = "invalid string: control character U+0012 (DC2) must be escaped to \\u0012"; + return token_type::parse_error; + } + + case 0x13: + { + error_message = "invalid string: control character U+0013 (DC3) must be escaped to \\u0013"; + return token_type::parse_error; + } + + case 0x14: + { + error_message = "invalid string: control character U+0014 (DC4) must be escaped to \\u0014"; + return token_type::parse_error; + } + + case 0x15: + { + error_message = "invalid string: control character U+0015 (NAK) must be escaped to \\u0015"; + return token_type::parse_error; + } + + case 0x16: + { + error_message = "invalid string: control character U+0016 (SYN) must be escaped to \\u0016"; + return token_type::parse_error; + } + + case 0x17: + { + error_message = "invalid string: control character U+0017 (ETB) must be escaped to \\u0017"; + return token_type::parse_error; + } + + case 0x18: + { + error_message = "invalid string: control character U+0018 (CAN) must be escaped to \\u0018"; + return token_type::parse_error; + } + + case 0x19: + { + error_message = "invalid string: control character U+0019 (EM) must be escaped to \\u0019"; + return token_type::parse_error; + } + + case 0x1A: + { + error_message = "invalid string: control character U+001A (SUB) must be escaped to \\u001A"; + return token_type::parse_error; + } + + case 0x1B: + { + error_message = "invalid string: control character U+001B (ESC) must be escaped to \\u001B"; + return token_type::parse_error; + } + + case 0x1C: + { + error_message = "invalid string: control character U+001C (FS) must be escaped to \\u001C"; + return token_type::parse_error; + } + + case 0x1D: + { + error_message = "invalid string: control character U+001D (GS) must be escaped to \\u001D"; + return token_type::parse_error; + } + + case 0x1E: + { + error_message = "invalid string: control character U+001E (RS) must be escaped to \\u001E"; + return token_type::parse_error; + } + + case 0x1F: + { + error_message = "invalid string: control character U+001F (US) must be escaped to \\u001F"; + return token_type::parse_error; + } + + // U+0020..U+007F (except U+0022 (quote) and U+005C (backspace)) + case 0x20: + case 0x21: + case 0x23: + case 0x24: + case 0x25: + case 0x26: + case 0x27: + case 0x28: + case 0x29: + case 0x2A: + case 0x2B: + case 0x2C: + case 0x2D: + case 0x2E: + case 0x2F: + case 0x30: + case 0x31: + case 0x32: + case 0x33: + case 0x34: + case 0x35: + case 0x36: + case 0x37: + case 0x38: + case 0x39: + case 0x3A: + case 0x3B: + case 0x3C: + case 0x3D: + case 0x3E: + case 0x3F: + case 0x40: + case 0x41: + case 0x42: + case 0x43: + case 0x44: + case 0x45: + case 0x46: + case 0x47: + case 0x48: + case 0x49: + case 0x4A: + case 0x4B: + case 0x4C: + case 0x4D: + case 0x4E: + case 0x4F: + case 0x50: + case 0x51: + case 0x52: + case 0x53: + case 0x54: + case 0x55: + case 0x56: + case 0x57: + case 0x58: + case 0x59: + case 0x5A: + case 0x5B: + case 0x5D: + case 0x5E: + case 0x5F: + case 0x60: + case 0x61: + case 0x62: + case 0x63: + case 0x64: + case 0x65: + case 0x66: + case 0x67: + case 0x68: + case 0x69: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: + case 0x70: + case 0x71: + case 0x72: + case 0x73: + case 0x74: + case 0x75: + case 0x76: + case 0x77: + case 0x78: + case 0x79: + case 0x7A: + case 0x7B: + case 0x7C: + case 0x7D: + case 0x7E: + case 0x7F: + { + add(current); + break; + } + + // U+0080..U+07FF: bytes C2..DF 80..BF + case 0xC2: + case 0xC3: + case 0xC4: + case 0xC5: + case 0xC6: + case 0xC7: + case 0xC8: + case 0xC9: + case 0xCA: + case 0xCB: + case 0xCC: + case 0xCD: + case 0xCE: + case 0xCF: + case 0xD0: + case 0xD1: + case 0xD2: + case 0xD3: + case 0xD4: + case 0xD5: + case 0xD6: + case 0xD7: + case 0xD8: + case 0xD9: + case 0xDA: + case 0xDB: + case 0xDC: + case 0xDD: + case 0xDE: + case 0xDF: + { + if (JSON_HEDLEY_UNLIKELY(!next_byte_in_range({0x80, 0xBF}))) + { + return token_type::parse_error; + } + break; + } + + // U+0800..U+0FFF: bytes E0 A0..BF 80..BF + case 0xE0: + { + if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0xA0, 0xBF, 0x80, 0xBF})))) + { + return token_type::parse_error; + } + break; + } + + // U+1000..U+CFFF: bytes E1..EC 80..BF 80..BF + // U+E000..U+FFFF: bytes EE..EF 80..BF 80..BF + case 0xE1: + case 0xE2: + case 0xE3: + case 0xE4: + case 0xE5: + case 0xE6: + case 0xE7: + case 0xE8: + case 0xE9: + case 0xEA: + case 0xEB: + case 0xEC: + case 0xEE: + case 0xEF: + { + if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x80, 0xBF, 0x80, 0xBF})))) + { + return token_type::parse_error; + } + break; + } + + // U+D000..U+D7FF: bytes ED 80..9F 80..BF + case 0xED: + { + if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x80, 0x9F, 0x80, 0xBF})))) + { + return token_type::parse_error; + } + break; + } + + // U+10000..U+3FFFF F0 90..BF 80..BF 80..BF + case 0xF0: + { + if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x90, 0xBF, 0x80, 0xBF, 0x80, 0xBF})))) + { + return token_type::parse_error; + } + break; + } + + // U+40000..U+FFFFF F1..F3 80..BF 80..BF 80..BF + case 0xF1: + case 0xF2: + case 0xF3: + { + if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x80, 0xBF, 0x80, 0xBF, 0x80, 0xBF})))) + { + return token_type::parse_error; + } + break; + } + + // U+100000..U+10FFFF F4 80..8F 80..BF 80..BF + case 0xF4: + { + if (JSON_HEDLEY_UNLIKELY(!(next_byte_in_range({0x80, 0x8F, 0x80, 0xBF, 0x80, 0xBF})))) + { + return token_type::parse_error; + } + break; + } + + // remaining bytes (80..C1 and F5..FF) are ill-formed + default: + { + error_message = "invalid string: ill-formed UTF-8 byte"; + return token_type::parse_error; + } + } + } + } + + /*! + * @brief scan a comment + * @return whether comment could be scanned successfully + */ + bool scan_comment() + { + switch (get()) + { + // single-line comments skip input until a newline or EOF is read + case '/': + { + while (true) + { + switch (get()) + { + case '\n': + case '\r': + case std::char_traits::eof(): + case '\0': + return true; + + default: + break; + } + } + } + + // multi-line comments skip input until */ is read + case '*': + { + while (true) + { + switch (get()) + { + case std::char_traits::eof(): + case '\0': + { + error_message = "invalid comment; missing closing '*/'"; + return false; + } + + case '*': + { + switch (get()) + { + case '/': + return true; + + default: + { + unget(); + continue; + } + } + } + + default: + continue; + } + } + } + + // unexpected character after reading '/' + default: + { + error_message = "invalid comment; expecting '/' or '*' after '/'"; + return false; + } + } + } + + JSON_HEDLEY_NON_NULL(2) + static void strtof(float& f, const char* str, char** endptr) noexcept + { + f = std::strtof(str, endptr); + } + + JSON_HEDLEY_NON_NULL(2) + static void strtof(double& f, const char* str, char** endptr) noexcept + { + f = std::strtod(str, endptr); + } + + JSON_HEDLEY_NON_NULL(2) + static void strtof(long double& f, const char* str, char** endptr) noexcept + { + f = std::strtold(str, endptr); + } + + /*! + @brief scan a number literal + + This function scans a string according to Sect. 6 of RFC 7159. + + The function is realized with a deterministic finite state machine derived + from the grammar described in RFC 7159. Starting in state "init", the + input is read and used to determined the next state. Only state "done" + accepts the number. State "error" is a trap state to model errors. In the + table below, "anything" means any character but the ones listed before. + + state | 0 | 1-9 | e E | + | - | . | anything + ---------|----------|----------|----------|---------|---------|----------|----------- + init | zero | any1 | [error] | [error] | minus | [error] | [error] + minus | zero | any1 | [error] | [error] | [error] | [error] | [error] + zero | done | done | exponent | done | done | decimal1 | done + any1 | any1 | any1 | exponent | done | done | decimal1 | done + decimal1 | decimal2 | decimal2 | [error] | [error] | [error] | [error] | [error] + decimal2 | decimal2 | decimal2 | exponent | done | done | done | done + exponent | any2 | any2 | [error] | sign | sign | [error] | [error] + sign | any2 | any2 | [error] | [error] | [error] | [error] | [error] + any2 | any2 | any2 | done | done | done | done | done + + The state machine is realized with one label per state (prefixed with + "scan_number_") and `goto` statements between them. The state machine + contains cycles, but any cycle can be left when EOF is read. Therefore, + the function is guaranteed to terminate. + + During scanning, the read bytes are stored in token_buffer. This string is + then converted to a signed integer, an unsigned integer, or a + floating-point number. + + @return token_type::value_unsigned, token_type::value_integer, or + token_type::value_float if number could be successfully scanned, + token_type::parse_error otherwise + + @note The scanner is independent of the current locale. Internally, the + locale's decimal point is used instead of `.` to work with the + locale-dependent converters. + */ + token_type scan_number() // lgtm [cpp/use-of-goto] + { + // reset token_buffer to store the number's bytes + reset(); + + // the type of the parsed number; initially set to unsigned; will be + // changed if minus sign, decimal point or exponent is read + token_type number_type = token_type::value_unsigned; + + // state (init): we just found out we need to scan a number + switch (current) + { + case '-': + { + add(current); + goto scan_number_minus; + } + + case '0': + { + add(current); + goto scan_number_zero; + } + + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_any1; + } + + // all other characters are rejected outside scan_number() + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // LCOV_EXCL_LINE + } + +scan_number_minus: + // state: we just parsed a leading minus sign + number_type = token_type::value_integer; + switch (get()) + { + case '0': + { + add(current); + goto scan_number_zero; + } + + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_any1; + } + + default: + { + error_message = "invalid number; expected digit after '-'"; + return token_type::parse_error; + } + } + +scan_number_zero: + // state: we just parse a zero (maybe with a leading minus sign) + switch (get()) + { + case '.': + { + add(decimal_point_char); + goto scan_number_decimal1; + } + + case 'e': + case 'E': + { + add(current); + goto scan_number_exponent; + } + + default: + goto scan_number_done; + } + +scan_number_any1: + // state: we just parsed a number 0-9 (maybe with a leading minus sign) + switch (get()) + { + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_any1; + } + + case '.': + { + add(decimal_point_char); + goto scan_number_decimal1; + } + + case 'e': + case 'E': + { + add(current); + goto scan_number_exponent; + } + + default: + goto scan_number_done; + } + +scan_number_decimal1: + // state: we just parsed a decimal point + number_type = token_type::value_float; + switch (get()) + { + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_decimal2; + } + + default: + { + error_message = "invalid number; expected digit after '.'"; + return token_type::parse_error; + } + } + +scan_number_decimal2: + // we just parsed at least one number after a decimal point + switch (get()) + { + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_decimal2; + } + + case 'e': + case 'E': + { + add(current); + goto scan_number_exponent; + } + + default: + goto scan_number_done; + } + +scan_number_exponent: + // we just parsed an exponent + number_type = token_type::value_float; + switch (get()) + { + case '+': + case '-': + { + add(current); + goto scan_number_sign; + } + + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_any2; + } + + default: + { + error_message = + "invalid number; expected '+', '-', or digit after exponent"; + return token_type::parse_error; + } + } + +scan_number_sign: + // we just parsed an exponent sign + switch (get()) + { + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_any2; + } + + default: + { + error_message = "invalid number; expected digit after exponent sign"; + return token_type::parse_error; + } + } + +scan_number_any2: + // we just parsed a number after the exponent or exponent sign + switch (get()) + { + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + { + add(current); + goto scan_number_any2; + } + + default: + goto scan_number_done; + } + +scan_number_done: + // unget the character after the number (we only read it to know that + // we are done scanning a number) + unget(); + + char* endptr = nullptr; + errno = 0; + + // try to parse integers first and fall back to floats + if (number_type == token_type::value_unsigned) + { + const auto x = std::strtoull(token_buffer.data(), &endptr, 10); + + // we checked the number format before + JSON_ASSERT(endptr == token_buffer.data() + token_buffer.size()); + + if (errno == 0) + { + value_unsigned = static_cast(x); + if (value_unsigned == x) + { + return token_type::value_unsigned; + } + } + } + else if (number_type == token_type::value_integer) + { + const auto x = std::strtoll(token_buffer.data(), &endptr, 10); + + // we checked the number format before + JSON_ASSERT(endptr == token_buffer.data() + token_buffer.size()); + + if (errno == 0) + { + value_integer = static_cast(x); + if (value_integer == x) + { + return token_type::value_integer; + } + } + } + + // this code is reached if we parse a floating-point number or if an + // integer conversion above failed + strtof(value_float, token_buffer.data(), &endptr); + + // we checked the number format before + JSON_ASSERT(endptr == token_buffer.data() + token_buffer.size()); + + return token_type::value_float; + } + + /*! + @param[in] literal_text the literal text to expect + @param[in] length the length of the passed literal text + @param[in] return_type the token type to return on success + */ + JSON_HEDLEY_NON_NULL(2) + token_type scan_literal(const char_type* literal_text, const std::size_t length, + token_type return_type) + { + JSON_ASSERT(std::char_traits::to_char_type(current) == literal_text[0]); + for (std::size_t i = 1; i < length; ++i) + { + if (JSON_HEDLEY_UNLIKELY(std::char_traits::to_char_type(get()) != literal_text[i])) + { + error_message = "invalid literal"; + return token_type::parse_error; + } + } + return return_type; + } + + ///////////////////// + // input management + ///////////////////// + + /// reset token_buffer; current character is beginning of token + void reset() noexcept + { + token_buffer.clear(); + token_string.clear(); + token_string.push_back(std::char_traits::to_char_type(current)); + } + + /* + @brief get next character from the input + + This function provides the interface to the used input adapter. It does + not throw in case the input reached EOF, but returns a + `std::char_traits::eof()` in that case. Stores the scanned characters + for use in error messages. + + @return character read from the input + */ + char_int_type get() + { + ++position.chars_read_total; + ++position.chars_read_current_line; + + if (next_unget) + { + // just reset the next_unget variable and work with current + next_unget = false; + } + else + { + current = ia.get_character(); + } + + if (JSON_HEDLEY_LIKELY(current != std::char_traits::eof())) + { + token_string.push_back(std::char_traits::to_char_type(current)); + } + + if (current == '\n') + { + ++position.lines_read; + position.chars_read_current_line = 0; + } + + return current; + } + + /*! + @brief unget current character (read it again on next get) + + We implement unget by setting variable next_unget to true. The input is not + changed - we just simulate ungetting by modifying chars_read_total, + chars_read_current_line, and token_string. The next call to get() will + behave as if the unget character is read again. + */ + void unget() + { + next_unget = true; + + --position.chars_read_total; + + // in case we "unget" a newline, we have to also decrement the lines_read + if (position.chars_read_current_line == 0) + { + if (position.lines_read > 0) + { + --position.lines_read; + } + } + else + { + --position.chars_read_current_line; + } + + if (JSON_HEDLEY_LIKELY(current != std::char_traits::eof())) + { + JSON_ASSERT(!token_string.empty()); + token_string.pop_back(); + } + } + + /// add a character to token_buffer + void add(char_int_type c) + { + token_buffer.push_back(static_cast(c)); + } + + public: + ///////////////////// + // value getters + ///////////////////// + + /// return integer value + constexpr number_integer_t get_number_integer() const noexcept + { + return value_integer; + } + + /// return unsigned integer value + constexpr number_unsigned_t get_number_unsigned() const noexcept + { + return value_unsigned; + } + + /// return floating-point value + constexpr number_float_t get_number_float() const noexcept + { + return value_float; + } + + /// return current string value (implicitly resets the token; useful only once) + string_t& get_string() + { + return token_buffer; + } + + ///////////////////// + // diagnostics + ///////////////////// + + /// return position of last read token + constexpr position_t get_position() const noexcept + { + return position; + } + + /// return the last read token (for errors only). Will never contain EOF + /// (an arbitrary value that is not a valid char value, often -1), because + /// 255 may legitimately occur. May contain NUL, which should be escaped. + std::string get_token_string() const + { + // escape control characters + std::string result; + for (const auto c : token_string) + { + if (static_cast(c) <= '\x1F') + { + // escape control characters + std::array cs{{}}; + (std::snprintf)(cs.data(), cs.size(), "", static_cast(c)); + result += cs.data(); + } + else + { + // add character as is + result.push_back(static_cast(c)); + } + } + + return result; + } + + /// return syntax error message + JSON_HEDLEY_RETURNS_NON_NULL + constexpr const char* get_error_message() const noexcept + { + return error_message; + } + + ///////////////////// + // actual scanner + ///////////////////// + + /*! + @brief skip the UTF-8 byte order mark + @return true iff there is no BOM or the correct BOM has been skipped + */ + bool skip_bom() + { + if (get() == 0xEF) + { + // check if we completely parse the BOM + return get() == 0xBB && get() == 0xBF; + } + + // the first character is not the beginning of the BOM; unget it to + // process is later + unget(); + return true; + } + + void skip_whitespace() + { + do + { + get(); + } + while (current == ' ' || current == '\t' || current == '\n' || current == '\r'); + } + + token_type scan() + { + // initially, skip the BOM + if (position.chars_read_total == 0 && !skip_bom()) + { + error_message = "invalid BOM; must be 0xEF 0xBB 0xBF if given"; + return token_type::parse_error; + } + + // read next character and ignore whitespace + skip_whitespace(); + + // ignore comments + while (ignore_comments && current == '/') + { + if (!scan_comment()) + { + return token_type::parse_error; + } + + // skip following whitespace + skip_whitespace(); + } + + switch (current) + { + // structural characters + case '[': + return token_type::begin_array; + case ']': + return token_type::end_array; + case '{': + return token_type::begin_object; + case '}': + return token_type::end_object; + case ':': + return token_type::name_separator; + case ',': + return token_type::value_separator; + + // literals + case 't': + { + std::array true_literal = {{'t', 'r', 'u', 'e'}}; + return scan_literal(true_literal.data(), true_literal.size(), token_type::literal_true); + } + case 'f': + { + std::array false_literal = {{'f', 'a', 'l', 's', 'e'}}; + return scan_literal(false_literal.data(), false_literal.size(), token_type::literal_false); + } + case 'n': + { + std::array null_literal = {{'n', 'u', 'l', 'l'}}; + return scan_literal(null_literal.data(), null_literal.size(), token_type::literal_null); + } + + // string + case '\"': + return scan_string(); + + // number + case '-': + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + return scan_number(); + + // end of input (the null byte is needed when parsing from + // string literals) + case '\0': + case std::char_traits::eof(): + return token_type::end_of_input; + + // error + default: + error_message = "invalid literal"; + return token_type::parse_error; + } + } + + private: + /// input adapter + InputAdapterType ia; + + /// whether comments should be ignored (true) or signaled as errors (false) + const bool ignore_comments = false; + + /// the current character + char_int_type current = std::char_traits::eof(); + + /// whether the next get() call should just return current + bool next_unget = false; + + /// the start position of the current token + position_t position {}; + + /// raw input token string (for error messages) + std::vector token_string {}; + + /// buffer for variable-length tokens (numbers, strings) + string_t token_buffer {}; + + /// a description of occurred lexer errors + const char* error_message = ""; + + // number values + number_integer_t value_integer = 0; + number_unsigned_t value_unsigned = 0; + number_float_t value_float = 0; + + /// the decimal point + const char_int_type decimal_point_char = '.'; +}; +} // namespace detail +} // namespace nlohmann + +// #include + +// #include + + +#include // size_t +#include // declval +#include // string + +// #include + +// #include + + +namespace nlohmann +{ +namespace detail +{ +template +using null_function_t = decltype(std::declval().null()); + +template +using boolean_function_t = + decltype(std::declval().boolean(std::declval())); + +template +using number_integer_function_t = + decltype(std::declval().number_integer(std::declval())); + +template +using number_unsigned_function_t = + decltype(std::declval().number_unsigned(std::declval())); + +template +using number_float_function_t = decltype(std::declval().number_float( + std::declval(), std::declval())); + +template +using string_function_t = + decltype(std::declval().string(std::declval())); + +template +using binary_function_t = + decltype(std::declval().binary(std::declval())); + +template +using start_object_function_t = + decltype(std::declval().start_object(std::declval())); + +template +using key_function_t = + decltype(std::declval().key(std::declval())); + +template +using end_object_function_t = decltype(std::declval().end_object()); + +template +using start_array_function_t = + decltype(std::declval().start_array(std::declval())); + +template +using end_array_function_t = decltype(std::declval().end_array()); + +template +using parse_error_function_t = decltype(std::declval().parse_error( + std::declval(), std::declval(), + std::declval())); + +template +struct is_sax +{ + private: + static_assert(is_basic_json::value, + "BasicJsonType must be of type basic_json<...>"); + + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + using exception_t = typename BasicJsonType::exception; + + public: + static constexpr bool value = + is_detected_exact::value && + is_detected_exact::value && + is_detected_exact::value && + is_detected_exact::value && + is_detected_exact::value && + is_detected_exact::value && + is_detected_exact::value && + is_detected_exact::value && + is_detected_exact::value && + is_detected_exact::value && + is_detected_exact::value && + is_detected_exact::value && + is_detected_exact::value; +}; + +template +struct is_sax_static_asserts +{ + private: + static_assert(is_basic_json::value, + "BasicJsonType must be of type basic_json<...>"); + + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + using exception_t = typename BasicJsonType::exception; + + public: + static_assert(is_detected_exact::value, + "Missing/invalid function: bool null()"); + static_assert(is_detected_exact::value, + "Missing/invalid function: bool boolean(bool)"); + static_assert(is_detected_exact::value, + "Missing/invalid function: bool boolean(bool)"); + static_assert( + is_detected_exact::value, + "Missing/invalid function: bool number_integer(number_integer_t)"); + static_assert( + is_detected_exact::value, + "Missing/invalid function: bool number_unsigned(number_unsigned_t)"); + static_assert(is_detected_exact::value, + "Missing/invalid function: bool number_float(number_float_t, const string_t&)"); + static_assert( + is_detected_exact::value, + "Missing/invalid function: bool string(string_t&)"); + static_assert( + is_detected_exact::value, + "Missing/invalid function: bool binary(binary_t&)"); + static_assert(is_detected_exact::value, + "Missing/invalid function: bool start_object(std::size_t)"); + static_assert(is_detected_exact::value, + "Missing/invalid function: bool key(string_t&)"); + static_assert(is_detected_exact::value, + "Missing/invalid function: bool end_object()"); + static_assert(is_detected_exact::value, + "Missing/invalid function: bool start_array(std::size_t)"); + static_assert(is_detected_exact::value, + "Missing/invalid function: bool end_array()"); + static_assert( + is_detected_exact::value, + "Missing/invalid function: bool parse_error(std::size_t, const " + "std::string&, const exception&)"); +}; +} // namespace detail +} // namespace nlohmann + +// #include + + +namespace nlohmann +{ +namespace detail +{ + +/// how to treat CBOR tags +enum class cbor_tag_handler_t +{ + error, ///< throw a parse_error exception in case of a tag + ignore ///< ignore tags +}; + +/*! +@brief determine system byte order + +@return true if and only if system's byte order is little endian + +@note from https://stackoverflow.com/a/1001328/266378 +*/ +static inline bool little_endianess(int num = 1) noexcept +{ + return *reinterpret_cast(&num) == 1; +} + + +/////////////////// +// binary reader // +/////////////////// + +/*! +@brief deserialization of CBOR, MessagePack, and UBJSON values +*/ +template> +class binary_reader +{ + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + using json_sax_t = SAX; + using char_type = typename InputAdapterType::char_type; + using char_int_type = typename std::char_traits::int_type; + + public: + /*! + @brief create a binary reader + + @param[in] adapter input adapter to read from + */ + explicit binary_reader(InputAdapterType&& adapter) : ia(std::move(adapter)) + { + (void)detail::is_sax_static_asserts {}; + } + + // make class move-only + binary_reader(const binary_reader&) = delete; + binary_reader(binary_reader&&) = default; + binary_reader& operator=(const binary_reader&) = delete; + binary_reader& operator=(binary_reader&&) = default; + ~binary_reader() = default; + + /*! + @param[in] format the binary format to parse + @param[in] sax_ a SAX event processor + @param[in] strict whether to expect the input to be consumed completed + @param[in] tag_handler how to treat CBOR tags + + @return + */ + JSON_HEDLEY_NON_NULL(3) + bool sax_parse(const input_format_t format, + json_sax_t* sax_, + const bool strict = true, + const cbor_tag_handler_t tag_handler = cbor_tag_handler_t::error) + { + sax = sax_; + bool result = false; + + switch (format) + { + case input_format_t::bson: + result = parse_bson_internal(); + break; + + case input_format_t::cbor: + result = parse_cbor_internal(true, tag_handler); + break; + + case input_format_t::msgpack: + result = parse_msgpack_internal(); + break; + + case input_format_t::ubjson: + result = parse_ubjson_internal(); + break; + + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // LCOV_EXCL_LINE + } + + // strict mode: next byte must be EOF + if (result && strict) + { + if (format == input_format_t::ubjson) + { + get_ignore_noop(); + } + else + { + get(); + } + + if (JSON_HEDLEY_UNLIKELY(current != std::char_traits::eof())) + { + return sax->parse_error(chars_read, get_token_string(), + parse_error::create(110, chars_read, exception_message(format, "expected end of input; last byte: 0x" + get_token_string(), "value"))); + } + } + + return result; + } + + private: + ////////// + // BSON // + ////////// + + /*! + @brief Reads in a BSON-object and passes it to the SAX-parser. + @return whether a valid BSON-value was passed to the SAX parser + */ + bool parse_bson_internal() + { + std::int32_t document_size{}; + get_number(input_format_t::bson, document_size); + + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(std::size_t(-1)))) + { + return false; + } + + if (JSON_HEDLEY_UNLIKELY(!parse_bson_element_list(/*is_array*/false))) + { + return false; + } + + return sax->end_object(); + } + + /*! + @brief Parses a C-style string from the BSON input. + @param[in, out] result A reference to the string variable where the read + string is to be stored. + @return `true` if the \x00-byte indicating the end of the string was + encountered before the EOF; false` indicates an unexpected EOF. + */ + bool get_bson_cstr(string_t& result) + { + auto out = std::back_inserter(result); + while (true) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::bson, "cstring"))) + { + return false; + } + if (current == 0x00) + { + return true; + } + *out++ = static_cast(current); + } + } + + /*! + @brief Parses a zero-terminated string of length @a len from the BSON + input. + @param[in] len The length (including the zero-byte at the end) of the + string to be read. + @param[in, out] result A reference to the string variable where the read + string is to be stored. + @tparam NumberType The type of the length @a len + @pre len >= 1 + @return `true` if the string was successfully parsed + */ + template + bool get_bson_string(const NumberType len, string_t& result) + { + if (JSON_HEDLEY_UNLIKELY(len < 1)) + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, exception_message(input_format_t::bson, "string length must be at least 1, is " + std::to_string(len), "string"))); + } + + return get_string(input_format_t::bson, len - static_cast(1), result) && get() != std::char_traits::eof(); + } + + /*! + @brief Parses a byte array input of length @a len from the BSON input. + @param[in] len The length of the byte array to be read. + @param[in, out] result A reference to the binary variable where the read + array is to be stored. + @tparam NumberType The type of the length @a len + @pre len >= 0 + @return `true` if the byte array was successfully parsed + */ + template + bool get_bson_binary(const NumberType len, binary_t& result) + { + if (JSON_HEDLEY_UNLIKELY(len < 0)) + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, exception_message(input_format_t::bson, "byte array length cannot be negative, is " + std::to_string(len), "binary"))); + } + + // All BSON binary values have a subtype + std::uint8_t subtype{}; + get_number(input_format_t::bson, subtype); + result.set_subtype(subtype); + + return get_binary(input_format_t::bson, len, result); + } + + /*! + @brief Read a BSON document element of the given @a element_type. + @param[in] element_type The BSON element type, c.f. http://bsonspec.org/spec.html + @param[in] element_type_parse_position The position in the input stream, + where the `element_type` was read. + @warning Not all BSON element types are supported yet. An unsupported + @a element_type will give rise to a parse_error.114: + Unsupported BSON record type 0x... + @return whether a valid BSON-object/array was passed to the SAX parser + */ + bool parse_bson_element_internal(const char_int_type element_type, + const std::size_t element_type_parse_position) + { + switch (element_type) + { + case 0x01: // double + { + double number{}; + return get_number(input_format_t::bson, number) && sax->number_float(static_cast(number), ""); + } + + case 0x02: // string + { + std::int32_t len{}; + string_t value; + return get_number(input_format_t::bson, len) && get_bson_string(len, value) && sax->string(value); + } + + case 0x03: // object + { + return parse_bson_internal(); + } + + case 0x04: // array + { + return parse_bson_array(); + } + + case 0x05: // binary + { + std::int32_t len{}; + binary_t value; + return get_number(input_format_t::bson, len) && get_bson_binary(len, value) && sax->binary(value); + } + + case 0x08: // boolean + { + return sax->boolean(get() != 0); + } + + case 0x0A: // null + { + return sax->null(); + } + + case 0x10: // int32 + { + std::int32_t value{}; + return get_number(input_format_t::bson, value) && sax->number_integer(value); + } + + case 0x12: // int64 + { + std::int64_t value{}; + return get_number(input_format_t::bson, value) && sax->number_integer(value); + } + + default: // anything else not supported (yet) + { + std::array cr{{}}; + (std::snprintf)(cr.data(), cr.size(), "%.2hhX", static_cast(element_type)); + return sax->parse_error(element_type_parse_position, std::string(cr.data()), parse_error::create(114, element_type_parse_position, "Unsupported BSON record type 0x" + std::string(cr.data()))); + } + } + } + + /*! + @brief Read a BSON element list (as specified in the BSON-spec) + + The same binary layout is used for objects and arrays, hence it must be + indicated with the argument @a is_array which one is expected + (true --> array, false --> object). + + @param[in] is_array Determines if the element list being read is to be + treated as an object (@a is_array == false), or as an + array (@a is_array == true). + @return whether a valid BSON-object/array was passed to the SAX parser + */ + bool parse_bson_element_list(const bool is_array) + { + string_t key; + + while (auto element_type = get()) + { + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::bson, "element list"))) + { + return false; + } + + const std::size_t element_type_parse_position = chars_read; + if (JSON_HEDLEY_UNLIKELY(!get_bson_cstr(key))) + { + return false; + } + + if (!is_array && !sax->key(key)) + { + return false; + } + + if (JSON_HEDLEY_UNLIKELY(!parse_bson_element_internal(element_type, element_type_parse_position))) + { + return false; + } + + // get_bson_cstr only appends + key.clear(); + } + + return true; + } + + /*! + @brief Reads an array from the BSON input and passes it to the SAX-parser. + @return whether a valid BSON-array was passed to the SAX parser + */ + bool parse_bson_array() + { + std::int32_t document_size{}; + get_number(input_format_t::bson, document_size); + + if (JSON_HEDLEY_UNLIKELY(!sax->start_array(std::size_t(-1)))) + { + return false; + } + + if (JSON_HEDLEY_UNLIKELY(!parse_bson_element_list(/*is_array*/true))) + { + return false; + } + + return sax->end_array(); + } + + ////////// + // CBOR // + ////////// + + /*! + @param[in] get_char whether a new character should be retrieved from the + input (true) or whether the last read character should + be considered instead (false) + @param[in] tag_handler how CBOR tags should be treated + + @return whether a valid CBOR value was passed to the SAX parser + */ + bool parse_cbor_internal(const bool get_char, + const cbor_tag_handler_t tag_handler) + { + switch (get_char ? get() : current) + { + // EOF + case std::char_traits::eof(): + return unexpect_eof(input_format_t::cbor, "value"); + + // Integer 0x00..0x17 (0..23) + case 0x00: + case 0x01: + case 0x02: + case 0x03: + case 0x04: + case 0x05: + case 0x06: + case 0x07: + case 0x08: + case 0x09: + case 0x0A: + case 0x0B: + case 0x0C: + case 0x0D: + case 0x0E: + case 0x0F: + case 0x10: + case 0x11: + case 0x12: + case 0x13: + case 0x14: + case 0x15: + case 0x16: + case 0x17: + return sax->number_unsigned(static_cast(current)); + + case 0x18: // Unsigned integer (one-byte uint8_t follows) + { + std::uint8_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_unsigned(number); + } + + case 0x19: // Unsigned integer (two-byte uint16_t follows) + { + std::uint16_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_unsigned(number); + } + + case 0x1A: // Unsigned integer (four-byte uint32_t follows) + { + std::uint32_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_unsigned(number); + } + + case 0x1B: // Unsigned integer (eight-byte uint64_t follows) + { + std::uint64_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_unsigned(number); + } + + // Negative integer -1-0x00..-1-0x17 (-1..-24) + case 0x20: + case 0x21: + case 0x22: + case 0x23: + case 0x24: + case 0x25: + case 0x26: + case 0x27: + case 0x28: + case 0x29: + case 0x2A: + case 0x2B: + case 0x2C: + case 0x2D: + case 0x2E: + case 0x2F: + case 0x30: + case 0x31: + case 0x32: + case 0x33: + case 0x34: + case 0x35: + case 0x36: + case 0x37: + return sax->number_integer(static_cast(0x20 - 1 - current)); + + case 0x38: // Negative integer (one-byte uint8_t follows) + { + std::uint8_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_integer(static_cast(-1) - number); + } + + case 0x39: // Negative integer -1-n (two-byte uint16_t follows) + { + std::uint16_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_integer(static_cast(-1) - number); + } + + case 0x3A: // Negative integer -1-n (four-byte uint32_t follows) + { + std::uint32_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_integer(static_cast(-1) - number); + } + + case 0x3B: // Negative integer -1-n (eight-byte uint64_t follows) + { + std::uint64_t number{}; + return get_number(input_format_t::cbor, number) && sax->number_integer(static_cast(-1) + - static_cast(number)); + } + + // Binary data (0x00..0x17 bytes follow) + case 0x40: + case 0x41: + case 0x42: + case 0x43: + case 0x44: + case 0x45: + case 0x46: + case 0x47: + case 0x48: + case 0x49: + case 0x4A: + case 0x4B: + case 0x4C: + case 0x4D: + case 0x4E: + case 0x4F: + case 0x50: + case 0x51: + case 0x52: + case 0x53: + case 0x54: + case 0x55: + case 0x56: + case 0x57: + case 0x58: // Binary data (one-byte uint8_t for n follows) + case 0x59: // Binary data (two-byte uint16_t for n follow) + case 0x5A: // Binary data (four-byte uint32_t for n follow) + case 0x5B: // Binary data (eight-byte uint64_t for n follow) + case 0x5F: // Binary data (indefinite length) + { + binary_t b; + return get_cbor_binary(b) && sax->binary(b); + } + + // UTF-8 string (0x00..0x17 bytes follow) + case 0x60: + case 0x61: + case 0x62: + case 0x63: + case 0x64: + case 0x65: + case 0x66: + case 0x67: + case 0x68: + case 0x69: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: + case 0x70: + case 0x71: + case 0x72: + case 0x73: + case 0x74: + case 0x75: + case 0x76: + case 0x77: + case 0x78: // UTF-8 string (one-byte uint8_t for n follows) + case 0x79: // UTF-8 string (two-byte uint16_t for n follow) + case 0x7A: // UTF-8 string (four-byte uint32_t for n follow) + case 0x7B: // UTF-8 string (eight-byte uint64_t for n follow) + case 0x7F: // UTF-8 string (indefinite length) + { + string_t s; + return get_cbor_string(s) && sax->string(s); + } + + // array (0x00..0x17 data items follow) + case 0x80: + case 0x81: + case 0x82: + case 0x83: + case 0x84: + case 0x85: + case 0x86: + case 0x87: + case 0x88: + case 0x89: + case 0x8A: + case 0x8B: + case 0x8C: + case 0x8D: + case 0x8E: + case 0x8F: + case 0x90: + case 0x91: + case 0x92: + case 0x93: + case 0x94: + case 0x95: + case 0x96: + case 0x97: + return get_cbor_array(static_cast(static_cast(current) & 0x1Fu), tag_handler); + + case 0x98: // array (one-byte uint8_t for n follows) + { + std::uint8_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_array(static_cast(len), tag_handler); + } + + case 0x99: // array (two-byte uint16_t for n follow) + { + std::uint16_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_array(static_cast(len), tag_handler); + } + + case 0x9A: // array (four-byte uint32_t for n follow) + { + std::uint32_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_array(static_cast(len), tag_handler); + } + + case 0x9B: // array (eight-byte uint64_t for n follow) + { + std::uint64_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_array(static_cast(len), tag_handler); + } + + case 0x9F: // array (indefinite length) + return get_cbor_array(std::size_t(-1), tag_handler); + + // map (0x00..0x17 pairs of data items follow) + case 0xA0: + case 0xA1: + case 0xA2: + case 0xA3: + case 0xA4: + case 0xA5: + case 0xA6: + case 0xA7: + case 0xA8: + case 0xA9: + case 0xAA: + case 0xAB: + case 0xAC: + case 0xAD: + case 0xAE: + case 0xAF: + case 0xB0: + case 0xB1: + case 0xB2: + case 0xB3: + case 0xB4: + case 0xB5: + case 0xB6: + case 0xB7: + return get_cbor_object(static_cast(static_cast(current) & 0x1Fu), tag_handler); + + case 0xB8: // map (one-byte uint8_t for n follows) + { + std::uint8_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_object(static_cast(len), tag_handler); + } + + case 0xB9: // map (two-byte uint16_t for n follow) + { + std::uint16_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_object(static_cast(len), tag_handler); + } + + case 0xBA: // map (four-byte uint32_t for n follow) + { + std::uint32_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_object(static_cast(len), tag_handler); + } + + case 0xBB: // map (eight-byte uint64_t for n follow) + { + std::uint64_t len{}; + return get_number(input_format_t::cbor, len) && get_cbor_object(static_cast(len), tag_handler); + } + + case 0xBF: // map (indefinite length) + return get_cbor_object(std::size_t(-1), tag_handler); + + case 0xC6: // tagged item + case 0xC7: + case 0xC8: + case 0xC9: + case 0xCA: + case 0xCB: + case 0xCC: + case 0xCD: + case 0xCE: + case 0xCF: + case 0xD0: + case 0xD1: + case 0xD2: + case 0xD3: + case 0xD4: + case 0xD8: // tagged item (1 bytes follow) + case 0xD9: // tagged item (2 bytes follow) + case 0xDA: // tagged item (4 bytes follow) + case 0xDB: // tagged item (8 bytes follow) + { + switch (tag_handler) + { + case cbor_tag_handler_t::error: + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, exception_message(input_format_t::cbor, "invalid byte: 0x" + last_token, "value"))); + } + + case cbor_tag_handler_t::ignore: + { + switch (current) + { + case 0xD8: + { + std::uint8_t len{}; + get_number(input_format_t::cbor, len); + break; + } + case 0xD9: + { + std::uint16_t len{}; + get_number(input_format_t::cbor, len); + break; + } + case 0xDA: + { + std::uint32_t len{}; + get_number(input_format_t::cbor, len); + break; + } + case 0xDB: + { + std::uint64_t len{}; + get_number(input_format_t::cbor, len); + break; + } + default: + break; + } + return parse_cbor_internal(true, tag_handler); + } + + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // LCOV_EXCL_LINE + } + } + + case 0xF4: // false + return sax->boolean(false); + + case 0xF5: // true + return sax->boolean(true); + + case 0xF6: // null + return sax->null(); + + case 0xF9: // Half-Precision Float (two-byte IEEE 754) + { + const auto byte1_raw = get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::cbor, "number"))) + { + return false; + } + const auto byte2_raw = get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::cbor, "number"))) + { + return false; + } + + const auto byte1 = static_cast(byte1_raw); + const auto byte2 = static_cast(byte2_raw); + + // code from RFC 7049, Appendix D, Figure 3: + // As half-precision floating-point numbers were only added + // to IEEE 754 in 2008, today's programming platforms often + // still only have limited support for them. It is very + // easy to include at least decoding support for them even + // without such support. An example of a small decoder for + // half-precision floating-point numbers in the C language + // is shown in Fig. 3. + const auto half = static_cast((byte1 << 8u) + byte2); + const double val = [&half] + { + const int exp = (half >> 10u) & 0x1Fu; + const unsigned int mant = half & 0x3FFu; + JSON_ASSERT(0 <= exp&& exp <= 32); + JSON_ASSERT(mant <= 1024); + switch (exp) + { + case 0: + return std::ldexp(mant, -24); + case 31: + return (mant == 0) + ? std::numeric_limits::infinity() + : std::numeric_limits::quiet_NaN(); + default: + return std::ldexp(mant + 1024, exp - 25); + } + }(); + return sax->number_float((half & 0x8000u) != 0 + ? static_cast(-val) + : static_cast(val), ""); + } + + case 0xFA: // Single-Precision Float (four-byte IEEE 754) + { + float number{}; + return get_number(input_format_t::cbor, number) && sax->number_float(static_cast(number), ""); + } + + case 0xFB: // Double-Precision Float (eight-byte IEEE 754) + { + double number{}; + return get_number(input_format_t::cbor, number) && sax->number_float(static_cast(number), ""); + } + + default: // anything else (0xFF is handled inside the other types) + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, exception_message(input_format_t::cbor, "invalid byte: 0x" + last_token, "value"))); + } + } + } + + /*! + @brief reads a CBOR string + + This function first reads starting bytes to determine the expected + string length and then copies this number of bytes into a string. + Additionally, CBOR's strings with indefinite lengths are supported. + + @param[out] result created string + + @return whether string creation completed + */ + bool get_cbor_string(string_t& result) + { + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::cbor, "string"))) + { + return false; + } + + switch (current) + { + // UTF-8 string (0x00..0x17 bytes follow) + case 0x60: + case 0x61: + case 0x62: + case 0x63: + case 0x64: + case 0x65: + case 0x66: + case 0x67: + case 0x68: + case 0x69: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: + case 0x70: + case 0x71: + case 0x72: + case 0x73: + case 0x74: + case 0x75: + case 0x76: + case 0x77: + { + return get_string(input_format_t::cbor, static_cast(current) & 0x1Fu, result); + } + + case 0x78: // UTF-8 string (one-byte uint8_t for n follows) + { + std::uint8_t len{}; + return get_number(input_format_t::cbor, len) && get_string(input_format_t::cbor, len, result); + } + + case 0x79: // UTF-8 string (two-byte uint16_t for n follow) + { + std::uint16_t len{}; + return get_number(input_format_t::cbor, len) && get_string(input_format_t::cbor, len, result); + } + + case 0x7A: // UTF-8 string (four-byte uint32_t for n follow) + { + std::uint32_t len{}; + return get_number(input_format_t::cbor, len) && get_string(input_format_t::cbor, len, result); + } + + case 0x7B: // UTF-8 string (eight-byte uint64_t for n follow) + { + std::uint64_t len{}; + return get_number(input_format_t::cbor, len) && get_string(input_format_t::cbor, len, result); + } + + case 0x7F: // UTF-8 string (indefinite length) + { + while (get() != 0xFF) + { + string_t chunk; + if (!get_cbor_string(chunk)) + { + return false; + } + result.append(chunk); + } + return true; + } + + default: + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, exception_message(input_format_t::cbor, "expected length specification (0x60-0x7B) or indefinite string type (0x7F); last byte: 0x" + last_token, "string"))); + } + } + } + + /*! + @brief reads a CBOR byte array + + This function first reads starting bytes to determine the expected + byte array length and then copies this number of bytes into the byte array. + Additionally, CBOR's byte arrays with indefinite lengths are supported. + + @param[out] result created byte array + + @return whether byte array creation completed + */ + bool get_cbor_binary(binary_t& result) + { + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::cbor, "binary"))) + { + return false; + } + + switch (current) + { + // Binary data (0x00..0x17 bytes follow) + case 0x40: + case 0x41: + case 0x42: + case 0x43: + case 0x44: + case 0x45: + case 0x46: + case 0x47: + case 0x48: + case 0x49: + case 0x4A: + case 0x4B: + case 0x4C: + case 0x4D: + case 0x4E: + case 0x4F: + case 0x50: + case 0x51: + case 0x52: + case 0x53: + case 0x54: + case 0x55: + case 0x56: + case 0x57: + { + return get_binary(input_format_t::cbor, static_cast(current) & 0x1Fu, result); + } + + case 0x58: // Binary data (one-byte uint8_t for n follows) + { + std::uint8_t len{}; + return get_number(input_format_t::cbor, len) && + get_binary(input_format_t::cbor, len, result); + } + + case 0x59: // Binary data (two-byte uint16_t for n follow) + { + std::uint16_t len{}; + return get_number(input_format_t::cbor, len) && + get_binary(input_format_t::cbor, len, result); + } + + case 0x5A: // Binary data (four-byte uint32_t for n follow) + { + std::uint32_t len{}; + return get_number(input_format_t::cbor, len) && + get_binary(input_format_t::cbor, len, result); + } + + case 0x5B: // Binary data (eight-byte uint64_t for n follow) + { + std::uint64_t len{}; + return get_number(input_format_t::cbor, len) && + get_binary(input_format_t::cbor, len, result); + } + + case 0x5F: // Binary data (indefinite length) + { + while (get() != 0xFF) + { + binary_t chunk; + if (!get_cbor_binary(chunk)) + { + return false; + } + result.insert(result.end(), chunk.begin(), chunk.end()); + } + return true; + } + + default: + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, exception_message(input_format_t::cbor, "expected length specification (0x40-0x5B) or indefinite binary array type (0x5F); last byte: 0x" + last_token, "binary"))); + } + } + } + + /*! + @param[in] len the length of the array or std::size_t(-1) for an + array of indefinite size + @param[in] tag_handler how CBOR tags should be treated + @return whether array creation completed + */ + bool get_cbor_array(const std::size_t len, + const cbor_tag_handler_t tag_handler) + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_array(len))) + { + return false; + } + + if (len != std::size_t(-1)) + { + for (std::size_t i = 0; i < len; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!parse_cbor_internal(true, tag_handler))) + { + return false; + } + } + } + else + { + while (get() != 0xFF) + { + if (JSON_HEDLEY_UNLIKELY(!parse_cbor_internal(false, tag_handler))) + { + return false; + } + } + } + + return sax->end_array(); + } + + /*! + @param[in] len the length of the object or std::size_t(-1) for an + object of indefinite size + @param[in] tag_handler how CBOR tags should be treated + @return whether object creation completed + */ + bool get_cbor_object(const std::size_t len, + const cbor_tag_handler_t tag_handler) + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(len))) + { + return false; + } + + string_t key; + if (len != std::size_t(-1)) + { + for (std::size_t i = 0; i < len; ++i) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!get_cbor_string(key) || !sax->key(key))) + { + return false; + } + + if (JSON_HEDLEY_UNLIKELY(!parse_cbor_internal(true, tag_handler))) + { + return false; + } + key.clear(); + } + } + else + { + while (get() != 0xFF) + { + if (JSON_HEDLEY_UNLIKELY(!get_cbor_string(key) || !sax->key(key))) + { + return false; + } + + if (JSON_HEDLEY_UNLIKELY(!parse_cbor_internal(true, tag_handler))) + { + return false; + } + key.clear(); + } + } + + return sax->end_object(); + } + + ///////////// + // MsgPack // + ///////////// + + /*! + @return whether a valid MessagePack value was passed to the SAX parser + */ + bool parse_msgpack_internal() + { + switch (get()) + { + // EOF + case std::char_traits::eof(): + return unexpect_eof(input_format_t::msgpack, "value"); + + // positive fixint + case 0x00: + case 0x01: + case 0x02: + case 0x03: + case 0x04: + case 0x05: + case 0x06: + case 0x07: + case 0x08: + case 0x09: + case 0x0A: + case 0x0B: + case 0x0C: + case 0x0D: + case 0x0E: + case 0x0F: + case 0x10: + case 0x11: + case 0x12: + case 0x13: + case 0x14: + case 0x15: + case 0x16: + case 0x17: + case 0x18: + case 0x19: + case 0x1A: + case 0x1B: + case 0x1C: + case 0x1D: + case 0x1E: + case 0x1F: + case 0x20: + case 0x21: + case 0x22: + case 0x23: + case 0x24: + case 0x25: + case 0x26: + case 0x27: + case 0x28: + case 0x29: + case 0x2A: + case 0x2B: + case 0x2C: + case 0x2D: + case 0x2E: + case 0x2F: + case 0x30: + case 0x31: + case 0x32: + case 0x33: + case 0x34: + case 0x35: + case 0x36: + case 0x37: + case 0x38: + case 0x39: + case 0x3A: + case 0x3B: + case 0x3C: + case 0x3D: + case 0x3E: + case 0x3F: + case 0x40: + case 0x41: + case 0x42: + case 0x43: + case 0x44: + case 0x45: + case 0x46: + case 0x47: + case 0x48: + case 0x49: + case 0x4A: + case 0x4B: + case 0x4C: + case 0x4D: + case 0x4E: + case 0x4F: + case 0x50: + case 0x51: + case 0x52: + case 0x53: + case 0x54: + case 0x55: + case 0x56: + case 0x57: + case 0x58: + case 0x59: + case 0x5A: + case 0x5B: + case 0x5C: + case 0x5D: + case 0x5E: + case 0x5F: + case 0x60: + case 0x61: + case 0x62: + case 0x63: + case 0x64: + case 0x65: + case 0x66: + case 0x67: + case 0x68: + case 0x69: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: + case 0x70: + case 0x71: + case 0x72: + case 0x73: + case 0x74: + case 0x75: + case 0x76: + case 0x77: + case 0x78: + case 0x79: + case 0x7A: + case 0x7B: + case 0x7C: + case 0x7D: + case 0x7E: + case 0x7F: + return sax->number_unsigned(static_cast(current)); + + // fixmap + case 0x80: + case 0x81: + case 0x82: + case 0x83: + case 0x84: + case 0x85: + case 0x86: + case 0x87: + case 0x88: + case 0x89: + case 0x8A: + case 0x8B: + case 0x8C: + case 0x8D: + case 0x8E: + case 0x8F: + return get_msgpack_object(static_cast(static_cast(current) & 0x0Fu)); + + // fixarray + case 0x90: + case 0x91: + case 0x92: + case 0x93: + case 0x94: + case 0x95: + case 0x96: + case 0x97: + case 0x98: + case 0x99: + case 0x9A: + case 0x9B: + case 0x9C: + case 0x9D: + case 0x9E: + case 0x9F: + return get_msgpack_array(static_cast(static_cast(current) & 0x0Fu)); + + // fixstr + case 0xA0: + case 0xA1: + case 0xA2: + case 0xA3: + case 0xA4: + case 0xA5: + case 0xA6: + case 0xA7: + case 0xA8: + case 0xA9: + case 0xAA: + case 0xAB: + case 0xAC: + case 0xAD: + case 0xAE: + case 0xAF: + case 0xB0: + case 0xB1: + case 0xB2: + case 0xB3: + case 0xB4: + case 0xB5: + case 0xB6: + case 0xB7: + case 0xB8: + case 0xB9: + case 0xBA: + case 0xBB: + case 0xBC: + case 0xBD: + case 0xBE: + case 0xBF: + case 0xD9: // str 8 + case 0xDA: // str 16 + case 0xDB: // str 32 + { + string_t s; + return get_msgpack_string(s) && sax->string(s); + } + + case 0xC0: // nil + return sax->null(); + + case 0xC2: // false + return sax->boolean(false); + + case 0xC3: // true + return sax->boolean(true); + + case 0xC4: // bin 8 + case 0xC5: // bin 16 + case 0xC6: // bin 32 + case 0xC7: // ext 8 + case 0xC8: // ext 16 + case 0xC9: // ext 32 + case 0xD4: // fixext 1 + case 0xD5: // fixext 2 + case 0xD6: // fixext 4 + case 0xD7: // fixext 8 + case 0xD8: // fixext 16 + { + binary_t b; + return get_msgpack_binary(b) && sax->binary(b); + } + + case 0xCA: // float 32 + { + float number{}; + return get_number(input_format_t::msgpack, number) && sax->number_float(static_cast(number), ""); + } + + case 0xCB: // float 64 + { + double number{}; + return get_number(input_format_t::msgpack, number) && sax->number_float(static_cast(number), ""); + } + + case 0xCC: // uint 8 + { + std::uint8_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_unsigned(number); + } + + case 0xCD: // uint 16 + { + std::uint16_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_unsigned(number); + } + + case 0xCE: // uint 32 + { + std::uint32_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_unsigned(number); + } + + case 0xCF: // uint 64 + { + std::uint64_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_unsigned(number); + } + + case 0xD0: // int 8 + { + std::int8_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_integer(number); + } + + case 0xD1: // int 16 + { + std::int16_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_integer(number); + } + + case 0xD2: // int 32 + { + std::int32_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_integer(number); + } + + case 0xD3: // int 64 + { + std::int64_t number{}; + return get_number(input_format_t::msgpack, number) && sax->number_integer(number); + } + + case 0xDC: // array 16 + { + std::uint16_t len{}; + return get_number(input_format_t::msgpack, len) && get_msgpack_array(static_cast(len)); + } + + case 0xDD: // array 32 + { + std::uint32_t len{}; + return get_number(input_format_t::msgpack, len) && get_msgpack_array(static_cast(len)); + } + + case 0xDE: // map 16 + { + std::uint16_t len{}; + return get_number(input_format_t::msgpack, len) && get_msgpack_object(static_cast(len)); + } + + case 0xDF: // map 32 + { + std::uint32_t len{}; + return get_number(input_format_t::msgpack, len) && get_msgpack_object(static_cast(len)); + } + + // negative fixint + case 0xE0: + case 0xE1: + case 0xE2: + case 0xE3: + case 0xE4: + case 0xE5: + case 0xE6: + case 0xE7: + case 0xE8: + case 0xE9: + case 0xEA: + case 0xEB: + case 0xEC: + case 0xED: + case 0xEE: + case 0xEF: + case 0xF0: + case 0xF1: + case 0xF2: + case 0xF3: + case 0xF4: + case 0xF5: + case 0xF6: + case 0xF7: + case 0xF8: + case 0xF9: + case 0xFA: + case 0xFB: + case 0xFC: + case 0xFD: + case 0xFE: + case 0xFF: + return sax->number_integer(static_cast(current)); + + default: // anything else + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, exception_message(input_format_t::msgpack, "invalid byte: 0x" + last_token, "value"))); + } + } + } + + /*! + @brief reads a MessagePack string + + This function first reads starting bytes to determine the expected + string length and then copies this number of bytes into a string. + + @param[out] result created string + + @return whether string creation completed + */ + bool get_msgpack_string(string_t& result) + { + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::msgpack, "string"))) + { + return false; + } + + switch (current) + { + // fixstr + case 0xA0: + case 0xA1: + case 0xA2: + case 0xA3: + case 0xA4: + case 0xA5: + case 0xA6: + case 0xA7: + case 0xA8: + case 0xA9: + case 0xAA: + case 0xAB: + case 0xAC: + case 0xAD: + case 0xAE: + case 0xAF: + case 0xB0: + case 0xB1: + case 0xB2: + case 0xB3: + case 0xB4: + case 0xB5: + case 0xB6: + case 0xB7: + case 0xB8: + case 0xB9: + case 0xBA: + case 0xBB: + case 0xBC: + case 0xBD: + case 0xBE: + case 0xBF: + { + return get_string(input_format_t::msgpack, static_cast(current) & 0x1Fu, result); + } + + case 0xD9: // str 8 + { + std::uint8_t len{}; + return get_number(input_format_t::msgpack, len) && get_string(input_format_t::msgpack, len, result); + } + + case 0xDA: // str 16 + { + std::uint16_t len{}; + return get_number(input_format_t::msgpack, len) && get_string(input_format_t::msgpack, len, result); + } + + case 0xDB: // str 32 + { + std::uint32_t len{}; + return get_number(input_format_t::msgpack, len) && get_string(input_format_t::msgpack, len, result); + } + + default: + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, exception_message(input_format_t::msgpack, "expected length specification (0xA0-0xBF, 0xD9-0xDB); last byte: 0x" + last_token, "string"))); + } + } + } + + /*! + @brief reads a MessagePack byte array + + This function first reads starting bytes to determine the expected + byte array length and then copies this number of bytes into a byte array. + + @param[out] result created byte array + + @return whether byte array creation completed + */ + bool get_msgpack_binary(binary_t& result) + { + // helper function to set the subtype + auto assign_and_return_true = [&result](std::int8_t subtype) + { + result.set_subtype(static_cast(subtype)); + return true; + }; + + switch (current) + { + case 0xC4: // bin 8 + { + std::uint8_t len{}; + return get_number(input_format_t::msgpack, len) && + get_binary(input_format_t::msgpack, len, result); + } + + case 0xC5: // bin 16 + { + std::uint16_t len{}; + return get_number(input_format_t::msgpack, len) && + get_binary(input_format_t::msgpack, len, result); + } + + case 0xC6: // bin 32 + { + std::uint32_t len{}; + return get_number(input_format_t::msgpack, len) && + get_binary(input_format_t::msgpack, len, result); + } + + case 0xC7: // ext 8 + { + std::uint8_t len{}; + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, len) && + get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, len, result) && + assign_and_return_true(subtype); + } + + case 0xC8: // ext 16 + { + std::uint16_t len{}; + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, len) && + get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, len, result) && + assign_and_return_true(subtype); + } + + case 0xC9: // ext 32 + { + std::uint32_t len{}; + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, len) && + get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, len, result) && + assign_and_return_true(subtype); + } + + case 0xD4: // fixext 1 + { + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, 1, result) && + assign_and_return_true(subtype); + } + + case 0xD5: // fixext 2 + { + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, 2, result) && + assign_and_return_true(subtype); + } + + case 0xD6: // fixext 4 + { + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, 4, result) && + assign_and_return_true(subtype); + } + + case 0xD7: // fixext 8 + { + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, 8, result) && + assign_and_return_true(subtype); + } + + case 0xD8: // fixext 16 + { + std::int8_t subtype{}; + return get_number(input_format_t::msgpack, subtype) && + get_binary(input_format_t::msgpack, 16, result) && + assign_and_return_true(subtype); + } + + default: // LCOV_EXCL_LINE + return false; // LCOV_EXCL_LINE + } + } + + /*! + @param[in] len the length of the array + @return whether array creation completed + */ + bool get_msgpack_array(const std::size_t len) + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_array(len))) + { + return false; + } + + for (std::size_t i = 0; i < len; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!parse_msgpack_internal())) + { + return false; + } + } + + return sax->end_array(); + } + + /*! + @param[in] len the length of the object + @return whether object creation completed + */ + bool get_msgpack_object(const std::size_t len) + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(len))) + { + return false; + } + + string_t key; + for (std::size_t i = 0; i < len; ++i) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!get_msgpack_string(key) || !sax->key(key))) + { + return false; + } + + if (JSON_HEDLEY_UNLIKELY(!parse_msgpack_internal())) + { + return false; + } + key.clear(); + } + + return sax->end_object(); + } + + //////////// + // UBJSON // + //////////// + + /*! + @param[in] get_char whether a new character should be retrieved from the + input (true, default) or whether the last read + character should be considered instead + + @return whether a valid UBJSON value was passed to the SAX parser + */ + bool parse_ubjson_internal(const bool get_char = true) + { + return get_ubjson_value(get_char ? get_ignore_noop() : current); + } + + /*! + @brief reads a UBJSON string + + This function is either called after reading the 'S' byte explicitly + indicating a string, or in case of an object key where the 'S' byte can be + left out. + + @param[out] result created string + @param[in] get_char whether a new character should be retrieved from the + input (true, default) or whether the last read + character should be considered instead + + @return whether string creation completed + */ + bool get_ubjson_string(string_t& result, const bool get_char = true) + { + if (get_char) + { + get(); // TODO(niels): may we ignore N here? + } + + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::ubjson, "value"))) + { + return false; + } + + switch (current) + { + case 'U': + { + std::uint8_t len{}; + return get_number(input_format_t::ubjson, len) && get_string(input_format_t::ubjson, len, result); + } + + case 'i': + { + std::int8_t len{}; + return get_number(input_format_t::ubjson, len) && get_string(input_format_t::ubjson, len, result); + } + + case 'I': + { + std::int16_t len{}; + return get_number(input_format_t::ubjson, len) && get_string(input_format_t::ubjson, len, result); + } + + case 'l': + { + std::int32_t len{}; + return get_number(input_format_t::ubjson, len) && get_string(input_format_t::ubjson, len, result); + } + + case 'L': + { + std::int64_t len{}; + return get_number(input_format_t::ubjson, len) && get_string(input_format_t::ubjson, len, result); + } + + default: + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, exception_message(input_format_t::ubjson, "expected length type specification (U, i, I, l, L); last byte: 0x" + last_token, "string"))); + } + } + + /*! + @param[out] result determined size + @return whether size determination completed + */ + bool get_ubjson_size_value(std::size_t& result) + { + switch (get_ignore_noop()) + { + case 'U': + { + std::uint8_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format_t::ubjson, number))) + { + return false; + } + result = static_cast(number); + return true; + } + + case 'i': + { + std::int8_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format_t::ubjson, number))) + { + return false; + } + result = static_cast(number); + return true; + } + + case 'I': + { + std::int16_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format_t::ubjson, number))) + { + return false; + } + result = static_cast(number); + return true; + } + + case 'l': + { + std::int32_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format_t::ubjson, number))) + { + return false; + } + result = static_cast(number); + return true; + } + + case 'L': + { + std::int64_t number{}; + if (JSON_HEDLEY_UNLIKELY(!get_number(input_format_t::ubjson, number))) + { + return false; + } + result = static_cast(number); + return true; + } + + default: + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, exception_message(input_format_t::ubjson, "expected length type specification (U, i, I, l, L) after '#'; last byte: 0x" + last_token, "size"))); + } + } + } + + /*! + @brief determine the type and size for a container + + In the optimized UBJSON format, a type and a size can be provided to allow + for a more compact representation. + + @param[out] result pair of the size and the type + + @return whether pair creation completed + */ + bool get_ubjson_size_type(std::pair& result) + { + result.first = string_t::npos; // size + result.second = 0; // type + + get_ignore_noop(); + + if (current == '$') + { + result.second = get(); // must not ignore 'N', because 'N' maybe the type + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::ubjson, "type"))) + { + return false; + } + + get_ignore_noop(); + if (JSON_HEDLEY_UNLIKELY(current != '#')) + { + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::ubjson, "value"))) + { + return false; + } + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, exception_message(input_format_t::ubjson, "expected '#' after type information; last byte: 0x" + last_token, "size"))); + } + + return get_ubjson_size_value(result.first); + } + + if (current == '#') + { + return get_ubjson_size_value(result.first); + } + + return true; + } + + /*! + @param prefix the previously read or set type prefix + @return whether value creation completed + */ + bool get_ubjson_value(const char_int_type prefix) + { + switch (prefix) + { + case std::char_traits::eof(): // EOF + return unexpect_eof(input_format_t::ubjson, "value"); + + case 'T': // true + return sax->boolean(true); + case 'F': // false + return sax->boolean(false); + + case 'Z': // null + return sax->null(); + + case 'U': + { + std::uint8_t number{}; + return get_number(input_format_t::ubjson, number) && sax->number_unsigned(number); + } + + case 'i': + { + std::int8_t number{}; + return get_number(input_format_t::ubjson, number) && sax->number_integer(number); + } + + case 'I': + { + std::int16_t number{}; + return get_number(input_format_t::ubjson, number) && sax->number_integer(number); + } + + case 'l': + { + std::int32_t number{}; + return get_number(input_format_t::ubjson, number) && sax->number_integer(number); + } + + case 'L': + { + std::int64_t number{}; + return get_number(input_format_t::ubjson, number) && sax->number_integer(number); + } + + case 'd': + { + float number{}; + return get_number(input_format_t::ubjson, number) && sax->number_float(static_cast(number), ""); + } + + case 'D': + { + double number{}; + return get_number(input_format_t::ubjson, number) && sax->number_float(static_cast(number), ""); + } + + case 'H': + { + return get_ubjson_high_precision_number(); + } + + case 'C': // char + { + get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::ubjson, "char"))) + { + return false; + } + if (JSON_HEDLEY_UNLIKELY(current > 127)) + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, exception_message(input_format_t::ubjson, "byte after 'C' must be in range 0x00..0x7F; last byte: 0x" + last_token, "char"))); + } + string_t s(1, static_cast(current)); + return sax->string(s); + } + + case 'S': // string + { + string_t s; + return get_ubjson_string(s) && sax->string(s); + } + + case '[': // array + return get_ubjson_array(); + + case '{': // object + return get_ubjson_object(); + + default: // anything else + { + auto last_token = get_token_string(); + return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, exception_message(input_format_t::ubjson, "invalid byte: 0x" + last_token, "value"))); + } + } + } + + /*! + @return whether array creation completed + */ + bool get_ubjson_array() + { + std::pair size_and_type; + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_size_type(size_and_type))) + { + return false; + } + + if (size_and_type.first != string_t::npos) + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_array(size_and_type.first))) + { + return false; + } + + if (size_and_type.second != 0) + { + if (size_and_type.second != 'N') + { + for (std::size_t i = 0; i < size_and_type.first; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_value(size_and_type.second))) + { + return false; + } + } + } + } + else + { + for (std::size_t i = 0; i < size_and_type.first; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!parse_ubjson_internal())) + { + return false; + } + } + } + } + else + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_array(std::size_t(-1)))) + { + return false; + } + + while (current != ']') + { + if (JSON_HEDLEY_UNLIKELY(!parse_ubjson_internal(false))) + { + return false; + } + get_ignore_noop(); + } + } + + return sax->end_array(); + } + + /*! + @return whether object creation completed + */ + bool get_ubjson_object() + { + std::pair size_and_type; + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_size_type(size_and_type))) + { + return false; + } + + string_t key; + if (size_and_type.first != string_t::npos) + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(size_and_type.first))) + { + return false; + } + + if (size_and_type.second != 0) + { + for (std::size_t i = 0; i < size_and_type.first; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_string(key) || !sax->key(key))) + { + return false; + } + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_value(size_and_type.second))) + { + return false; + } + key.clear(); + } + } + else + { + for (std::size_t i = 0; i < size_and_type.first; ++i) + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_string(key) || !sax->key(key))) + { + return false; + } + if (JSON_HEDLEY_UNLIKELY(!parse_ubjson_internal())) + { + return false; + } + key.clear(); + } + } + } + else + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(std::size_t(-1)))) + { + return false; + } + + while (current != '}') + { + if (JSON_HEDLEY_UNLIKELY(!get_ubjson_string(key, false) || !sax->key(key))) + { + return false; + } + if (JSON_HEDLEY_UNLIKELY(!parse_ubjson_internal())) + { + return false; + } + get_ignore_noop(); + key.clear(); + } + } + + return sax->end_object(); + } + + // Note, no reader for UBJSON binary types is implemented because they do + // not exist + + bool get_ubjson_high_precision_number() + { + // get size of following number string + std::size_t size{}; + auto res = get_ubjson_size_value(size); + if (JSON_HEDLEY_UNLIKELY(!res)) + { + return res; + } + + // get number string + std::vector number_vector; + for (std::size_t i = 0; i < size; ++i) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(input_format_t::ubjson, "number"))) + { + return false; + } + number_vector.push_back(static_cast(current)); + } + + // parse number string + auto number_ia = detail::input_adapter(std::forward(number_vector)); + auto number_lexer = detail::lexer(std::move(number_ia), false); + const auto result_number = number_lexer.scan(); + const auto number_string = number_lexer.get_token_string(); + const auto result_remainder = number_lexer.scan(); + + using token_type = typename detail::lexer_base::token_type; + + if (JSON_HEDLEY_UNLIKELY(result_remainder != token_type::end_of_input)) + { + return sax->parse_error(chars_read, number_string, parse_error::create(115, chars_read, exception_message(input_format_t::ubjson, "invalid number text: " + number_lexer.get_token_string(), "high-precision number"))); + } + + switch (result_number) + { + case token_type::value_integer: + return sax->number_integer(number_lexer.get_number_integer()); + case token_type::value_unsigned: + return sax->number_unsigned(number_lexer.get_number_unsigned()); + case token_type::value_float: + return sax->number_float(number_lexer.get_number_float(), std::move(number_string)); + default: + return sax->parse_error(chars_read, number_string, parse_error::create(115, chars_read, exception_message(input_format_t::ubjson, "invalid number text: " + number_lexer.get_token_string(), "high-precision number"))); + } + } + + /////////////////////// + // Utility functions // + /////////////////////// + + /*! + @brief get next character from the input + + This function provides the interface to the used input adapter. It does + not throw in case the input reached EOF, but returns a -'ve valued + `std::char_traits::eof()` in that case. + + @return character read from the input + */ + char_int_type get() + { + ++chars_read; + return current = ia.get_character(); + } + + /*! + @return character read from the input after ignoring all 'N' entries + */ + char_int_type get_ignore_noop() + { + do + { + get(); + } + while (current == 'N'); + + return current; + } + + /* + @brief read a number from the input + + @tparam NumberType the type of the number + @param[in] format the current format (for diagnostics) + @param[out] result number of type @a NumberType + + @return whether conversion completed + + @note This function needs to respect the system's endianess, because + bytes in CBOR, MessagePack, and UBJSON are stored in network order + (big endian) and therefore need reordering on little endian systems. + */ + template + bool get_number(const input_format_t format, NumberType& result) + { + // step 1: read input into array with system's byte order + std::array vec; + for (std::size_t i = 0; i < sizeof(NumberType); ++i) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(format, "number"))) + { + return false; + } + + // reverse byte order prior to conversion if necessary + if (is_little_endian != InputIsLittleEndian) + { + vec[sizeof(NumberType) - i - 1] = static_cast(current); + } + else + { + vec[i] = static_cast(current); // LCOV_EXCL_LINE + } + } + + // step 2: convert array into number of type T and return + std::memcpy(&result, vec.data(), sizeof(NumberType)); + return true; + } + + /*! + @brief create a string by reading characters from the input + + @tparam NumberType the type of the number + @param[in] format the current format (for diagnostics) + @param[in] len number of characters to read + @param[out] result string created by reading @a len bytes + + @return whether string creation completed + + @note We can not reserve @a len bytes for the result, because @a len + may be too large. Usually, @ref unexpect_eof() detects the end of + the input before we run out of string memory. + */ + template + bool get_string(const input_format_t format, + const NumberType len, + string_t& result) + { + bool success = true; + for (NumberType i = 0; i < len; i++) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(format, "string"))) + { + success = false; + break; + } + result.push_back(static_cast(current)); + }; + return success; + } + + /*! + @brief create a byte array by reading bytes from the input + + @tparam NumberType the type of the number + @param[in] format the current format (for diagnostics) + @param[in] len number of bytes to read + @param[out] result byte array created by reading @a len bytes + + @return whether byte array creation completed + + @note We can not reserve @a len bytes for the result, because @a len + may be too large. Usually, @ref unexpect_eof() detects the end of + the input before we run out of memory. + */ + template + bool get_binary(const input_format_t format, + const NumberType len, + binary_t& result) + { + bool success = true; + for (NumberType i = 0; i < len; i++) + { + get(); + if (JSON_HEDLEY_UNLIKELY(!unexpect_eof(format, "binary"))) + { + success = false; + break; + } + result.push_back(static_cast(current)); + } + return success; + } + + /*! + @param[in] format the current format (for diagnostics) + @param[in] context further context information (for diagnostics) + @return whether the last read character is not EOF + */ + JSON_HEDLEY_NON_NULL(3) + bool unexpect_eof(const input_format_t format, const char* context) const + { + if (JSON_HEDLEY_UNLIKELY(current == std::char_traits::eof())) + { + return sax->parse_error(chars_read, "", + parse_error::create(110, chars_read, exception_message(format, "unexpected end of input", context))); + } + return true; + } + + /*! + @return a string representation of the last read byte + */ + std::string get_token_string() const + { + std::array cr{{}}; + (std::snprintf)(cr.data(), cr.size(), "%.2hhX", static_cast(current)); + return std::string{cr.data()}; + } + + /*! + @param[in] format the current format + @param[in] detail a detailed error message + @param[in] context further context information + @return a message string to use in the parse_error exceptions + */ + std::string exception_message(const input_format_t format, + const std::string& detail, + const std::string& context) const + { + std::string error_msg = "syntax error while parsing "; + + switch (format) + { + case input_format_t::cbor: + error_msg += "CBOR"; + break; + + case input_format_t::msgpack: + error_msg += "MessagePack"; + break; + + case input_format_t::ubjson: + error_msg += "UBJSON"; + break; + + case input_format_t::bson: + error_msg += "BSON"; + break; + + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // LCOV_EXCL_LINE + } + + return error_msg + " " + context + ": " + detail; + } + + private: + /// input adapter + InputAdapterType ia; + + /// the current character + char_int_type current = std::char_traits::eof(); + + /// the number of characters read + std::size_t chars_read = 0; + + /// whether we can assume little endianess + const bool is_little_endian = little_endianess(); + + /// the SAX parser + json_sax_t* sax = nullptr; +}; +} // namespace detail +} // namespace nlohmann + +// #include + +// #include + +// #include + + +#include // isfinite +#include // uint8_t +#include // function +#include // string +#include // move +#include // vector + +// #include + +// #include + +// #include + +// #include + +// #include + +// #include + +// #include + + +namespace nlohmann +{ +namespace detail +{ +//////////// +// parser // +//////////// + +enum class parse_event_t : uint8_t +{ + /// the parser read `{` and started to process a JSON object + object_start, + /// the parser read `}` and finished processing a JSON object + object_end, + /// the parser read `[` and started to process a JSON array + array_start, + /// the parser read `]` and finished processing a JSON array + array_end, + /// the parser read a key of a value in an object + key, + /// the parser finished reading a JSON value + value +}; + +template +using parser_callback_t = + std::function; + +/*! +@brief syntax analysis + +This class implements a recursive descent parser. +*/ +template +class parser +{ + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using number_float_t = typename BasicJsonType::number_float_t; + using string_t = typename BasicJsonType::string_t; + using lexer_t = lexer; + using token_type = typename lexer_t::token_type; + + public: + /// a parser reading from an input adapter + explicit parser(InputAdapterType&& adapter, + const parser_callback_t cb = nullptr, + const bool allow_exceptions_ = true, + const bool skip_comments = false) + : callback(cb) + , m_lexer(std::move(adapter), skip_comments) + , allow_exceptions(allow_exceptions_) + { + // read first token + get_token(); + } + + /*! + @brief public parser interface + + @param[in] strict whether to expect the last token to be EOF + @param[in,out] result parsed JSON value + + @throw parse_error.101 in case of an unexpected token + @throw parse_error.102 if to_unicode fails or surrogate error + @throw parse_error.103 if to_unicode fails + */ + void parse(const bool strict, BasicJsonType& result) + { + if (callback) + { + json_sax_dom_callback_parser sdp(result, callback, allow_exceptions); + sax_parse_internal(&sdp); + result.assert_invariant(); + + // in strict mode, input must be completely read + if (strict && (get_token() != token_type::end_of_input)) + { + sdp.parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), + exception_message(token_type::end_of_input, "value"))); + } + + // in case of an error, return discarded value + if (sdp.is_errored()) + { + result = value_t::discarded; + return; + } + + // set top-level value to null if it was discarded by the callback + // function + if (result.is_discarded()) + { + result = nullptr; + } + } + else + { + json_sax_dom_parser sdp(result, allow_exceptions); + sax_parse_internal(&sdp); + result.assert_invariant(); + + // in strict mode, input must be completely read + if (strict && (get_token() != token_type::end_of_input)) + { + sdp.parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), + exception_message(token_type::end_of_input, "value"))); + } + + // in case of an error, return discarded value + if (sdp.is_errored()) + { + result = value_t::discarded; + return; + } + } + } + + /*! + @brief public accept interface + + @param[in] strict whether to expect the last token to be EOF + @return whether the input is a proper JSON text + */ + bool accept(const bool strict = true) + { + json_sax_acceptor sax_acceptor; + return sax_parse(&sax_acceptor, strict); + } + + template + JSON_HEDLEY_NON_NULL(2) + bool sax_parse(SAX* sax, const bool strict = true) + { + (void)detail::is_sax_static_asserts {}; + const bool result = sax_parse_internal(sax); + + // strict mode: next byte must be EOF + if (result && strict && (get_token() != token_type::end_of_input)) + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), + exception_message(token_type::end_of_input, "value"))); + } + + return result; + } + + private: + template + JSON_HEDLEY_NON_NULL(2) + bool sax_parse_internal(SAX* sax) + { + // stack to remember the hierarchy of structured values we are parsing + // true = array; false = object + std::vector states; + // value to avoid a goto (see comment where set to true) + bool skip_to_state_evaluation = false; + + while (true) + { + if (!skip_to_state_evaluation) + { + // invariant: get_token() was called before each iteration + switch (last_token) + { + case token_type::begin_object: + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_object(std::size_t(-1)))) + { + return false; + } + + // closing } -> we are done + if (get_token() == token_type::end_object) + { + if (JSON_HEDLEY_UNLIKELY(!sax->end_object())) + { + return false; + } + break; + } + + // parse key + if (JSON_HEDLEY_UNLIKELY(last_token != token_type::value_string)) + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), + exception_message(token_type::value_string, "object key"))); + } + if (JSON_HEDLEY_UNLIKELY(!sax->key(m_lexer.get_string()))) + { + return false; + } + + // parse separator (:) + if (JSON_HEDLEY_UNLIKELY(get_token() != token_type::name_separator)) + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), + exception_message(token_type::name_separator, "object separator"))); + } + + // remember we are now inside an object + states.push_back(false); + + // parse values + get_token(); + continue; + } + + case token_type::begin_array: + { + if (JSON_HEDLEY_UNLIKELY(!sax->start_array(std::size_t(-1)))) + { + return false; + } + + // closing ] -> we are done + if (get_token() == token_type::end_array) + { + if (JSON_HEDLEY_UNLIKELY(!sax->end_array())) + { + return false; + } + break; + } + + // remember we are now inside an array + states.push_back(true); + + // parse values (no need to call get_token) + continue; + } + + case token_type::value_float: + { + const auto res = m_lexer.get_number_float(); + + if (JSON_HEDLEY_UNLIKELY(!std::isfinite(res))) + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + out_of_range::create(406, "number overflow parsing '" + m_lexer.get_token_string() + "'")); + } + + if (JSON_HEDLEY_UNLIKELY(!sax->number_float(res, m_lexer.get_string()))) + { + return false; + } + + break; + } + + case token_type::literal_false: + { + if (JSON_HEDLEY_UNLIKELY(!sax->boolean(false))) + { + return false; + } + break; + } + + case token_type::literal_null: + { + if (JSON_HEDLEY_UNLIKELY(!sax->null())) + { + return false; + } + break; + } + + case token_type::literal_true: + { + if (JSON_HEDLEY_UNLIKELY(!sax->boolean(true))) + { + return false; + } + break; + } + + case token_type::value_integer: + { + if (JSON_HEDLEY_UNLIKELY(!sax->number_integer(m_lexer.get_number_integer()))) + { + return false; + } + break; + } + + case token_type::value_string: + { + if (JSON_HEDLEY_UNLIKELY(!sax->string(m_lexer.get_string()))) + { + return false; + } + break; + } + + case token_type::value_unsigned: + { + if (JSON_HEDLEY_UNLIKELY(!sax->number_unsigned(m_lexer.get_number_unsigned()))) + { + return false; + } + break; + } + + case token_type::parse_error: + { + // using "uninitialized" to avoid "expected" message + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), + exception_message(token_type::uninitialized, "value"))); + } + + default: // the last token was unexpected + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), + exception_message(token_type::literal_or_value, "value"))); + } + } + } + else + { + skip_to_state_evaluation = false; + } + + // we reached this line after we successfully parsed a value + if (states.empty()) + { + // empty stack: we reached the end of the hierarchy: done + return true; + } + + if (states.back()) // array + { + // comma -> next value + if (get_token() == token_type::value_separator) + { + // parse a new value + get_token(); + continue; + } + + // closing ] + if (JSON_HEDLEY_LIKELY(last_token == token_type::end_array)) + { + if (JSON_HEDLEY_UNLIKELY(!sax->end_array())) + { + return false; + } + + // We are done with this array. Before we can parse a + // new value, we need to evaluate the new state first. + // By setting skip_to_state_evaluation to false, we + // are effectively jumping to the beginning of this if. + JSON_ASSERT(!states.empty()); + states.pop_back(); + skip_to_state_evaluation = true; + continue; + } + + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), + exception_message(token_type::end_array, "array"))); + } + else // object + { + // comma -> next value + if (get_token() == token_type::value_separator) + { + // parse key + if (JSON_HEDLEY_UNLIKELY(get_token() != token_type::value_string)) + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), + exception_message(token_type::value_string, "object key"))); + } + + if (JSON_HEDLEY_UNLIKELY(!sax->key(m_lexer.get_string()))) + { + return false; + } + + // parse separator (:) + if (JSON_HEDLEY_UNLIKELY(get_token() != token_type::name_separator)) + { + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), + exception_message(token_type::name_separator, "object separator"))); + } + + // parse values + get_token(); + continue; + } + + // closing } + if (JSON_HEDLEY_LIKELY(last_token == token_type::end_object)) + { + if (JSON_HEDLEY_UNLIKELY(!sax->end_object())) + { + return false; + } + + // We are done with this object. Before we can parse a + // new value, we need to evaluate the new state first. + // By setting skip_to_state_evaluation to false, we + // are effectively jumping to the beginning of this if. + JSON_ASSERT(!states.empty()); + states.pop_back(); + skip_to_state_evaluation = true; + continue; + } + + return sax->parse_error(m_lexer.get_position(), + m_lexer.get_token_string(), + parse_error::create(101, m_lexer.get_position(), + exception_message(token_type::end_object, "object"))); + } + } + } + + /// get next token from lexer + token_type get_token() + { + return last_token = m_lexer.scan(); + } + + std::string exception_message(const token_type expected, const std::string& context) + { + std::string error_msg = "syntax error "; + + if (!context.empty()) + { + error_msg += "while parsing " + context + " "; + } + + error_msg += "- "; + + if (last_token == token_type::parse_error) + { + error_msg += std::string(m_lexer.get_error_message()) + "; last read: '" + + m_lexer.get_token_string() + "'"; + } + else + { + error_msg += "unexpected " + std::string(lexer_t::token_type_name(last_token)); + } + + if (expected != token_type::uninitialized) + { + error_msg += "; expected " + std::string(lexer_t::token_type_name(expected)); + } + + return error_msg; + } + + private: + /// callback function + const parser_callback_t callback = nullptr; + /// the type of the last read token + token_type last_token = token_type::uninitialized; + /// the lexer + lexer_t m_lexer; + /// whether to throw exceptions in case of errors + const bool allow_exceptions = true; +}; +} // namespace detail +} // namespace nlohmann + +// #include + + +// #include + + +#include // ptrdiff_t +#include // numeric_limits + +namespace nlohmann +{ +namespace detail +{ +/* +@brief an iterator for primitive JSON types + +This class models an iterator for primitive JSON types (boolean, number, +string). It's only purpose is to allow the iterator/const_iterator classes +to "iterate" over primitive values. Internally, the iterator is modeled by +a `difference_type` variable. Value begin_value (`0`) models the begin, +end_value (`1`) models past the end. +*/ +class primitive_iterator_t +{ + private: + using difference_type = std::ptrdiff_t; + static constexpr difference_type begin_value = 0; + static constexpr difference_type end_value = begin_value + 1; + + /// iterator as signed integer type + difference_type m_it = (std::numeric_limits::min)(); + + public: + constexpr difference_type get_value() const noexcept + { + return m_it; + } + + /// set iterator to a defined beginning + void set_begin() noexcept + { + m_it = begin_value; + } + + /// set iterator to a defined past the end + void set_end() noexcept + { + m_it = end_value; + } + + /// return whether the iterator can be dereferenced + constexpr bool is_begin() const noexcept + { + return m_it == begin_value; + } + + /// return whether the iterator is at end + constexpr bool is_end() const noexcept + { + return m_it == end_value; + } + + friend constexpr bool operator==(primitive_iterator_t lhs, primitive_iterator_t rhs) noexcept + { + return lhs.m_it == rhs.m_it; + } + + friend constexpr bool operator<(primitive_iterator_t lhs, primitive_iterator_t rhs) noexcept + { + return lhs.m_it < rhs.m_it; + } + + primitive_iterator_t operator+(difference_type n) noexcept + { + auto result = *this; + result += n; + return result; + } + + friend constexpr difference_type operator-(primitive_iterator_t lhs, primitive_iterator_t rhs) noexcept + { + return lhs.m_it - rhs.m_it; + } + + primitive_iterator_t& operator++() noexcept + { + ++m_it; + return *this; + } + + primitive_iterator_t const operator++(int) noexcept + { + auto result = *this; + ++m_it; + return result; + } + + primitive_iterator_t& operator--() noexcept + { + --m_it; + return *this; + } + + primitive_iterator_t const operator--(int) noexcept + { + auto result = *this; + --m_it; + return result; + } + + primitive_iterator_t& operator+=(difference_type n) noexcept + { + m_it += n; + return *this; + } + + primitive_iterator_t& operator-=(difference_type n) noexcept + { + m_it -= n; + return *this; + } +}; +} // namespace detail +} // namespace nlohmann + + +namespace nlohmann +{ +namespace detail +{ +/*! +@brief an iterator value + +@note This structure could easily be a union, but MSVC currently does not allow +unions members with complex constructors, see https://github.com/nlohmann/json/pull/105. +*/ +template struct internal_iterator +{ + /// iterator for JSON objects + typename BasicJsonType::object_t::iterator object_iterator {}; + /// iterator for JSON arrays + typename BasicJsonType::array_t::iterator array_iterator {}; + /// generic iterator for all other types + primitive_iterator_t primitive_iterator {}; +}; +} // namespace detail +} // namespace nlohmann + +// #include + + +#include // iterator, random_access_iterator_tag, bidirectional_iterator_tag, advance, next +#include // conditional, is_const, remove_const + +// #include + +// #include + +// #include + +// #include + +// #include + +// #include + +// #include + + +namespace nlohmann +{ +namespace detail +{ +// forward declare, to be able to friend it later on +template class iteration_proxy; +template class iteration_proxy_value; + +/*! +@brief a template for a bidirectional iterator for the @ref basic_json class +This class implements a both iterators (iterator and const_iterator) for the +@ref basic_json class. +@note An iterator is called *initialized* when a pointer to a JSON value has + been set (e.g., by a constructor or a copy assignment). If the iterator is + default-constructed, it is *uninitialized* and most methods are undefined. + **The library uses assertions to detect calls on uninitialized iterators.** +@requirement The class satisfies the following concept requirements: +- +[BidirectionalIterator](https://en.cppreference.com/w/cpp/named_req/BidirectionalIterator): + The iterator that can be moved can be moved in both directions (i.e. + incremented and decremented). +@since version 1.0.0, simplified in version 2.0.9, change to bidirectional + iterators in version 3.0.0 (see https://github.com/nlohmann/json/issues/593) +*/ +template +class iter_impl +{ + /// allow basic_json to access private members + friend iter_impl::value, typename std::remove_const::type, const BasicJsonType>::type>; + friend BasicJsonType; + friend iteration_proxy; + friend iteration_proxy_value; + + using object_t = typename BasicJsonType::object_t; + using array_t = typename BasicJsonType::array_t; + // make sure BasicJsonType is basic_json or const basic_json + static_assert(is_basic_json::type>::value, + "iter_impl only accepts (const) basic_json"); + + public: + + /// The std::iterator class template (used as a base class to provide typedefs) is deprecated in C++17. + /// The C++ Standard has never required user-defined iterators to derive from std::iterator. + /// A user-defined iterator should provide publicly accessible typedefs named + /// iterator_category, value_type, difference_type, pointer, and reference. + /// Note that value_type is required to be non-const, even for constant iterators. + using iterator_category = std::bidirectional_iterator_tag; + + /// the type of the values when the iterator is dereferenced + using value_type = typename BasicJsonType::value_type; + /// a type to represent differences between iterators + using difference_type = typename BasicJsonType::difference_type; + /// defines a pointer to the type iterated over (value_type) + using pointer = typename std::conditional::value, + typename BasicJsonType::const_pointer, + typename BasicJsonType::pointer>::type; + /// defines a reference to the type iterated over (value_type) + using reference = + typename std::conditional::value, + typename BasicJsonType::const_reference, + typename BasicJsonType::reference>::type; + + /// default constructor + iter_impl() = default; + + /*! + @brief constructor for a given JSON instance + @param[in] object pointer to a JSON object for this iterator + @pre object != nullptr + @post The iterator is initialized; i.e. `m_object != nullptr`. + */ + explicit iter_impl(pointer object) noexcept : m_object(object) + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + m_it.object_iterator = typename object_t::iterator(); + break; + } + + case value_t::array: + { + m_it.array_iterator = typename array_t::iterator(); + break; + } + + default: + { + m_it.primitive_iterator = primitive_iterator_t(); + break; + } + } + } + + /*! + @note The conventional copy constructor and copy assignment are implicitly + defined. Combined with the following converting constructor and + assignment, they support: (1) copy from iterator to iterator, (2) + copy from const iterator to const iterator, and (3) conversion from + iterator to const iterator. However conversion from const iterator + to iterator is not defined. + */ + + /*! + @brief const copy constructor + @param[in] other const iterator to copy from + @note This copy constructor had to be defined explicitly to circumvent a bug + occurring on msvc v19.0 compiler (VS 2015) debug build. For more + information refer to: https://github.com/nlohmann/json/issues/1608 + */ + iter_impl(const iter_impl& other) noexcept + : m_object(other.m_object), m_it(other.m_it) + {} + + /*! + @brief converting assignment + @param[in] other const iterator to copy from + @return const/non-const iterator + @note It is not checked whether @a other is initialized. + */ + iter_impl& operator=(const iter_impl& other) noexcept + { + m_object = other.m_object; + m_it = other.m_it; + return *this; + } + + /*! + @brief converting constructor + @param[in] other non-const iterator to copy from + @note It is not checked whether @a other is initialized. + */ + iter_impl(const iter_impl::type>& other) noexcept + : m_object(other.m_object), m_it(other.m_it) + {} + + /*! + @brief converting assignment + @param[in] other non-const iterator to copy from + @return const/non-const iterator + @note It is not checked whether @a other is initialized. + */ + iter_impl& operator=(const iter_impl::type>& other) noexcept + { + m_object = other.m_object; + m_it = other.m_it; + return *this; + } + + private: + /*! + @brief set the iterator to the first value + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + void set_begin() noexcept + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + m_it.object_iterator = m_object->m_value.object->begin(); + break; + } + + case value_t::array: + { + m_it.array_iterator = m_object->m_value.array->begin(); + break; + } + + case value_t::null: + { + // set to end so begin()==end() is true: null is empty + m_it.primitive_iterator.set_end(); + break; + } + + default: + { + m_it.primitive_iterator.set_begin(); + break; + } + } + } + + /*! + @brief set the iterator past the last value + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + void set_end() noexcept + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + m_it.object_iterator = m_object->m_value.object->end(); + break; + } + + case value_t::array: + { + m_it.array_iterator = m_object->m_value.array->end(); + break; + } + + default: + { + m_it.primitive_iterator.set_end(); + break; + } + } + } + + public: + /*! + @brief return a reference to the value pointed to by the iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + reference operator*() const + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + JSON_ASSERT(m_it.object_iterator != m_object->m_value.object->end()); + return m_it.object_iterator->second; + } + + case value_t::array: + { + JSON_ASSERT(m_it.array_iterator != m_object->m_value.array->end()); + return *m_it.array_iterator; + } + + case value_t::null: + JSON_THROW(invalid_iterator::create(214, "cannot get value")); + + default: + { + if (JSON_HEDLEY_LIKELY(m_it.primitive_iterator.is_begin())) + { + return *m_object; + } + + JSON_THROW(invalid_iterator::create(214, "cannot get value")); + } + } + } + + /*! + @brief dereference the iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + pointer operator->() const + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + JSON_ASSERT(m_it.object_iterator != m_object->m_value.object->end()); + return &(m_it.object_iterator->second); + } + + case value_t::array: + { + JSON_ASSERT(m_it.array_iterator != m_object->m_value.array->end()); + return &*m_it.array_iterator; + } + + default: + { + if (JSON_HEDLEY_LIKELY(m_it.primitive_iterator.is_begin())) + { + return m_object; + } + + JSON_THROW(invalid_iterator::create(214, "cannot get value")); + } + } + } + + /*! + @brief post-increment (it++) + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl const operator++(int) + { + auto result = *this; + ++(*this); + return result; + } + + /*! + @brief pre-increment (++it) + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl& operator++() + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + std::advance(m_it.object_iterator, 1); + break; + } + + case value_t::array: + { + std::advance(m_it.array_iterator, 1); + break; + } + + default: + { + ++m_it.primitive_iterator; + break; + } + } + + return *this; + } + + /*! + @brief post-decrement (it--) + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl const operator--(int) + { + auto result = *this; + --(*this); + return result; + } + + /*! + @brief pre-decrement (--it) + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl& operator--() + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + { + std::advance(m_it.object_iterator, -1); + break; + } + + case value_t::array: + { + std::advance(m_it.array_iterator, -1); + break; + } + + default: + { + --m_it.primitive_iterator; + break; + } + } + + return *this; + } + + /*! + @brief comparison: equal + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + bool operator==(const iter_impl& other) const + { + // if objects are not the same, the comparison is undefined + if (JSON_HEDLEY_UNLIKELY(m_object != other.m_object)) + { + JSON_THROW(invalid_iterator::create(212, "cannot compare iterators of different containers")); + } + + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + return (m_it.object_iterator == other.m_it.object_iterator); + + case value_t::array: + return (m_it.array_iterator == other.m_it.array_iterator); + + default: + return (m_it.primitive_iterator == other.m_it.primitive_iterator); + } + } + + /*! + @brief comparison: not equal + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + bool operator!=(const iter_impl& other) const + { + return !operator==(other); + } + + /*! + @brief comparison: smaller + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + bool operator<(const iter_impl& other) const + { + // if objects are not the same, the comparison is undefined + if (JSON_HEDLEY_UNLIKELY(m_object != other.m_object)) + { + JSON_THROW(invalid_iterator::create(212, "cannot compare iterators of different containers")); + } + + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + JSON_THROW(invalid_iterator::create(213, "cannot compare order of object iterators")); + + case value_t::array: + return (m_it.array_iterator < other.m_it.array_iterator); + + default: + return (m_it.primitive_iterator < other.m_it.primitive_iterator); + } + } + + /*! + @brief comparison: less than or equal + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + bool operator<=(const iter_impl& other) const + { + return !other.operator < (*this); + } + + /*! + @brief comparison: greater than + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + bool operator>(const iter_impl& other) const + { + return !operator<=(other); + } + + /*! + @brief comparison: greater than or equal + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + bool operator>=(const iter_impl& other) const + { + return !operator<(other); + } + + /*! + @brief add to iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl& operator+=(difference_type i) + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + JSON_THROW(invalid_iterator::create(209, "cannot use offsets with object iterators")); + + case value_t::array: + { + std::advance(m_it.array_iterator, i); + break; + } + + default: + { + m_it.primitive_iterator += i; + break; + } + } + + return *this; + } + + /*! + @brief subtract from iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl& operator-=(difference_type i) + { + return operator+=(-i); + } + + /*! + @brief add to iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl operator+(difference_type i) const + { + auto result = *this; + result += i; + return result; + } + + /*! + @brief addition of distance and iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + friend iter_impl operator+(difference_type i, const iter_impl& it) + { + auto result = it; + result += i; + return result; + } + + /*! + @brief subtract from iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + iter_impl operator-(difference_type i) const + { + auto result = *this; + result -= i; + return result; + } + + /*! + @brief return difference + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + difference_type operator-(const iter_impl& other) const + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + JSON_THROW(invalid_iterator::create(209, "cannot use offsets with object iterators")); + + case value_t::array: + return m_it.array_iterator - other.m_it.array_iterator; + + default: + return m_it.primitive_iterator - other.m_it.primitive_iterator; + } + } + + /*! + @brief access to successor + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + reference operator[](difference_type n) const + { + JSON_ASSERT(m_object != nullptr); + + switch (m_object->m_type) + { + case value_t::object: + JSON_THROW(invalid_iterator::create(208, "cannot use operator[] for object iterators")); + + case value_t::array: + return *std::next(m_it.array_iterator, n); + + case value_t::null: + JSON_THROW(invalid_iterator::create(214, "cannot get value")); + + default: + { + if (JSON_HEDLEY_LIKELY(m_it.primitive_iterator.get_value() == -n)) + { + return *m_object; + } + + JSON_THROW(invalid_iterator::create(214, "cannot get value")); + } + } + } + + /*! + @brief return the key of an object iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + const typename object_t::key_type& key() const + { + JSON_ASSERT(m_object != nullptr); + + if (JSON_HEDLEY_LIKELY(m_object->is_object())) + { + return m_it.object_iterator->first; + } + + JSON_THROW(invalid_iterator::create(207, "cannot use key() for non-object iterators")); + } + + /*! + @brief return the value of an iterator + @pre The iterator is initialized; i.e. `m_object != nullptr`. + */ + reference value() const + { + return operator*(); + } + + private: + /// associated JSON instance + pointer m_object = nullptr; + /// the actual iterator of the associated instance + internal_iterator::type> m_it {}; +}; +} // namespace detail +} // namespace nlohmann + +// #include + +// #include + + +#include // ptrdiff_t +#include // reverse_iterator +#include // declval + +namespace nlohmann +{ +namespace detail +{ +////////////////////// +// reverse_iterator // +////////////////////// + +/*! +@brief a template for a reverse iterator class + +@tparam Base the base iterator type to reverse. Valid types are @ref +iterator (to create @ref reverse_iterator) and @ref const_iterator (to +create @ref const_reverse_iterator). + +@requirement The class satisfies the following concept requirements: +- +[BidirectionalIterator](https://en.cppreference.com/w/cpp/named_req/BidirectionalIterator): + The iterator that can be moved can be moved in both directions (i.e. + incremented and decremented). +- [OutputIterator](https://en.cppreference.com/w/cpp/named_req/OutputIterator): + It is possible to write to the pointed-to element (only if @a Base is + @ref iterator). + +@since version 1.0.0 +*/ +template +class json_reverse_iterator : public std::reverse_iterator +{ + public: + using difference_type = std::ptrdiff_t; + /// shortcut to the reverse iterator adapter + using base_iterator = std::reverse_iterator; + /// the reference type for the pointed-to element + using reference = typename Base::reference; + + /// create reverse iterator from iterator + explicit json_reverse_iterator(const typename base_iterator::iterator_type& it) noexcept + : base_iterator(it) {} + + /// create reverse iterator from base class + explicit json_reverse_iterator(const base_iterator& it) noexcept : base_iterator(it) {} + + /// post-increment (it++) + json_reverse_iterator const operator++(int) + { + return static_cast(base_iterator::operator++(1)); + } + + /// pre-increment (++it) + json_reverse_iterator& operator++() + { + return static_cast(base_iterator::operator++()); + } + + /// post-decrement (it--) + json_reverse_iterator const operator--(int) + { + return static_cast(base_iterator::operator--(1)); + } + + /// pre-decrement (--it) + json_reverse_iterator& operator--() + { + return static_cast(base_iterator::operator--()); + } + + /// add to iterator + json_reverse_iterator& operator+=(difference_type i) + { + return static_cast(base_iterator::operator+=(i)); + } + + /// add to iterator + json_reverse_iterator operator+(difference_type i) const + { + return static_cast(base_iterator::operator+(i)); + } + + /// subtract from iterator + json_reverse_iterator operator-(difference_type i) const + { + return static_cast(base_iterator::operator-(i)); + } + + /// return difference + difference_type operator-(const json_reverse_iterator& other) const + { + return base_iterator(*this) - base_iterator(other); + } + + /// access to successor + reference operator[](difference_type n) const + { + return *(this->operator+(n)); + } + + /// return the key of an object iterator + auto key() const -> decltype(std::declval().key()) + { + auto it = --this->base(); + return it.key(); + } + + /// return the value of an iterator + reference value() const + { + auto it = --this->base(); + return it.operator * (); + } +}; +} // namespace detail +} // namespace nlohmann + +// #include + +// #include + + +#include // all_of +#include // isdigit +#include // max +#include // accumulate +#include // string +#include // move +#include // vector + +// #include + +// #include + +// #include + + +namespace nlohmann +{ +template +class json_pointer +{ + // allow basic_json to access private members + NLOHMANN_BASIC_JSON_TPL_DECLARATION + friend class basic_json; + + public: + /*! + @brief create JSON pointer + + Create a JSON pointer according to the syntax described in + [Section 3 of RFC6901](https://tools.ietf.org/html/rfc6901#section-3). + + @param[in] s string representing the JSON pointer; if omitted, the empty + string is assumed which references the whole JSON value + + @throw parse_error.107 if the given JSON pointer @a s is nonempty and does + not begin with a slash (`/`); see example below + + @throw parse_error.108 if a tilde (`~`) in the given JSON pointer @a s is + not followed by `0` (representing `~`) or `1` (representing `/`); see + example below + + @liveexample{The example shows the construction several valid JSON pointers + as well as the exceptional behavior.,json_pointer} + + @since version 2.0.0 + */ + explicit json_pointer(const std::string& s = "") + : reference_tokens(split(s)) + {} + + /*! + @brief return a string representation of the JSON pointer + + @invariant For each JSON pointer `ptr`, it holds: + @code {.cpp} + ptr == json_pointer(ptr.to_string()); + @endcode + + @return a string representation of the JSON pointer + + @liveexample{The example shows the result of `to_string`.,json_pointer__to_string} + + @since version 2.0.0 + */ + std::string to_string() const + { + return std::accumulate(reference_tokens.begin(), reference_tokens.end(), + std::string{}, + [](const std::string & a, const std::string & b) + { + return a + "/" + escape(b); + }); + } + + /// @copydoc to_string() + operator std::string() const + { + return to_string(); + } + + /*! + @brief append another JSON pointer at the end of this JSON pointer + + @param[in] ptr JSON pointer to append + @return JSON pointer with @a ptr appended + + @liveexample{The example shows the usage of `operator/=`.,json_pointer__operator_add} + + @complexity Linear in the length of @a ptr. + + @sa @ref operator/=(std::string) to append a reference token + @sa @ref operator/=(std::size_t) to append an array index + @sa @ref operator/(const json_pointer&, const json_pointer&) for a binary operator + + @since version 3.6.0 + */ + json_pointer& operator/=(const json_pointer& ptr) + { + reference_tokens.insert(reference_tokens.end(), + ptr.reference_tokens.begin(), + ptr.reference_tokens.end()); + return *this; + } + + /*! + @brief append an unescaped reference token at the end of this JSON pointer + + @param[in] token reference token to append + @return JSON pointer with @a token appended without escaping @a token + + @liveexample{The example shows the usage of `operator/=`.,json_pointer__operator_add} + + @complexity Amortized constant. + + @sa @ref operator/=(const json_pointer&) to append a JSON pointer + @sa @ref operator/=(std::size_t) to append an array index + @sa @ref operator/(const json_pointer&, std::size_t) for a binary operator + + @since version 3.6.0 + */ + json_pointer& operator/=(std::string token) + { + push_back(std::move(token)); + return *this; + } + + /*! + @brief append an array index at the end of this JSON pointer + + @param[in] array_idx array index to append + @return JSON pointer with @a array_idx appended + + @liveexample{The example shows the usage of `operator/=`.,json_pointer__operator_add} + + @complexity Amortized constant. + + @sa @ref operator/=(const json_pointer&) to append a JSON pointer + @sa @ref operator/=(std::string) to append a reference token + @sa @ref operator/(const json_pointer&, std::string) for a binary operator + + @since version 3.6.0 + */ + json_pointer& operator/=(std::size_t array_idx) + { + return *this /= std::to_string(array_idx); + } + + /*! + @brief create a new JSON pointer by appending the right JSON pointer at the end of the left JSON pointer + + @param[in] lhs JSON pointer + @param[in] rhs JSON pointer + @return a new JSON pointer with @a rhs appended to @a lhs + + @liveexample{The example shows the usage of `operator/`.,json_pointer__operator_add_binary} + + @complexity Linear in the length of @a lhs and @a rhs. + + @sa @ref operator/=(const json_pointer&) to append a JSON pointer + + @since version 3.6.0 + */ + friend json_pointer operator/(const json_pointer& lhs, + const json_pointer& rhs) + { + return json_pointer(lhs) /= rhs; + } + + /*! + @brief create a new JSON pointer by appending the unescaped token at the end of the JSON pointer + + @param[in] ptr JSON pointer + @param[in] token reference token + @return a new JSON pointer with unescaped @a token appended to @a ptr + + @liveexample{The example shows the usage of `operator/`.,json_pointer__operator_add_binary} + + @complexity Linear in the length of @a ptr. + + @sa @ref operator/=(std::string) to append a reference token + + @since version 3.6.0 + */ + friend json_pointer operator/(const json_pointer& ptr, std::string token) + { + return json_pointer(ptr) /= std::move(token); + } + + /*! + @brief create a new JSON pointer by appending the array-index-token at the end of the JSON pointer + + @param[in] ptr JSON pointer + @param[in] array_idx array index + @return a new JSON pointer with @a array_idx appended to @a ptr + + @liveexample{The example shows the usage of `operator/`.,json_pointer__operator_add_binary} + + @complexity Linear in the length of @a ptr. + + @sa @ref operator/=(std::size_t) to append an array index + + @since version 3.6.0 + */ + friend json_pointer operator/(const json_pointer& ptr, std::size_t array_idx) + { + return json_pointer(ptr) /= array_idx; + } + + /*! + @brief returns the parent of this JSON pointer + + @return parent of this JSON pointer; in case this JSON pointer is the root, + the root itself is returned + + @complexity Linear in the length of the JSON pointer. + + @liveexample{The example shows the result of `parent_pointer` for different + JSON Pointers.,json_pointer__parent_pointer} + + @since version 3.6.0 + */ + json_pointer parent_pointer() const + { + if (empty()) + { + return *this; + } + + json_pointer res = *this; + res.pop_back(); + return res; + } + + /*! + @brief remove last reference token + + @pre not `empty()` + + @liveexample{The example shows the usage of `pop_back`.,json_pointer__pop_back} + + @complexity Constant. + + @throw out_of_range.405 if JSON pointer has no parent + + @since version 3.6.0 + */ + void pop_back() + { + if (JSON_HEDLEY_UNLIKELY(empty())) + { + JSON_THROW(detail::out_of_range::create(405, "JSON pointer has no parent")); + } + + reference_tokens.pop_back(); + } + + /*! + @brief return last reference token + + @pre not `empty()` + @return last reference token + + @liveexample{The example shows the usage of `back`.,json_pointer__back} + + @complexity Constant. + + @throw out_of_range.405 if JSON pointer has no parent + + @since version 3.6.0 + */ + const std::string& back() const + { + if (JSON_HEDLEY_UNLIKELY(empty())) + { + JSON_THROW(detail::out_of_range::create(405, "JSON pointer has no parent")); + } + + return reference_tokens.back(); + } + + /*! + @brief append an unescaped token at the end of the reference pointer + + @param[in] token token to add + + @complexity Amortized constant. + + @liveexample{The example shows the result of `push_back` for different + JSON Pointers.,json_pointer__push_back} + + @since version 3.6.0 + */ + void push_back(const std::string& token) + { + reference_tokens.push_back(token); + } + + /// @copydoc push_back(const std::string&) + void push_back(std::string&& token) + { + reference_tokens.push_back(std::move(token)); + } + + /*! + @brief return whether pointer points to the root document + + @return true iff the JSON pointer points to the root document + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this function never throws exceptions. + + @liveexample{The example shows the result of `empty` for different JSON + Pointers.,json_pointer__empty} + + @since version 3.6.0 + */ + bool empty() const noexcept + { + return reference_tokens.empty(); + } + + private: + /*! + @param[in] s reference token to be converted into an array index + + @return integer representation of @a s + + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index begins not with a digit + @throw out_of_range.404 if string @a s could not be converted to an integer + @throw out_of_range.410 if an array index exceeds size_type + */ + static typename BasicJsonType::size_type array_index(const std::string& s) + { + using size_type = typename BasicJsonType::size_type; + + // error condition (cf. RFC 6901, Sect. 4) + if (JSON_HEDLEY_UNLIKELY(s.size() > 1 && s[0] == '0')) + { + JSON_THROW(detail::parse_error::create(106, 0, + "array index '" + s + + "' must not begin with '0'")); + } + + // error condition (cf. RFC 6901, Sect. 4) + if (JSON_HEDLEY_UNLIKELY(s.size() > 1 && !(s[0] >= '1' && s[0] <= '9'))) + { + JSON_THROW(detail::parse_error::create(109, 0, "array index '" + s + "' is not a number")); + } + + std::size_t processed_chars = 0; + unsigned long long res = 0; + JSON_TRY + { + res = std::stoull(s, &processed_chars); + } + JSON_CATCH(std::out_of_range&) + { + JSON_THROW(detail::out_of_range::create(404, "unresolved reference token '" + s + "'")); + } + + // check if the string was completely read + if (JSON_HEDLEY_UNLIKELY(processed_chars != s.size())) + { + JSON_THROW(detail::out_of_range::create(404, "unresolved reference token '" + s + "'")); + } + + // only triggered on special platforms (like 32bit), see also + // https://github.com/nlohmann/json/pull/2203 + if (res >= static_cast((std::numeric_limits::max)())) + { + JSON_THROW(detail::out_of_range::create(410, "array index " + s + " exceeds size_type")); // LCOV_EXCL_LINE + } + + return static_cast(res); + } + + json_pointer top() const + { + if (JSON_HEDLEY_UNLIKELY(empty())) + { + JSON_THROW(detail::out_of_range::create(405, "JSON pointer has no parent")); + } + + json_pointer result = *this; + result.reference_tokens = {reference_tokens[0]}; + return result; + } + + /*! + @brief create and return a reference to the pointed to value + + @complexity Linear in the number of reference tokens. + + @throw parse_error.109 if array index is not a number + @throw type_error.313 if value cannot be unflattened + */ + BasicJsonType& get_and_create(BasicJsonType& j) const + { + auto result = &j; + + // in case no reference tokens exist, return a reference to the JSON value + // j which will be overwritten by a primitive value + for (const auto& reference_token : reference_tokens) + { + switch (result->type()) + { + case detail::value_t::null: + { + if (reference_token == "0") + { + // start a new array if reference token is 0 + result = &result->operator[](0); + } + else + { + // start a new object otherwise + result = &result->operator[](reference_token); + } + break; + } + + case detail::value_t::object: + { + // create an entry in the object + result = &result->operator[](reference_token); + break; + } + + case detail::value_t::array: + { + // create an entry in the array + result = &result->operator[](array_index(reference_token)); + break; + } + + /* + The following code is only reached if there exists a reference + token _and_ the current value is primitive. In this case, we have + an error situation, because primitive values may only occur as + single value; that is, with an empty list of reference tokens. + */ + default: + JSON_THROW(detail::type_error::create(313, "invalid value to unflatten")); + } + } + + return *result; + } + + /*! + @brief return a reference to the pointed to value + + @note This version does not throw if a value is not present, but tries to + create nested values instead. For instance, calling this function + with pointer `"/this/that"` on a null value is equivalent to calling + `operator[]("this").operator[]("that")` on that value, effectively + changing the null value to an object. + + @param[in] ptr a JSON value + + @return reference to the JSON value pointed to by the JSON pointer + + @complexity Linear in the length of the JSON pointer. + + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index was not a number + @throw out_of_range.404 if the JSON pointer can not be resolved + */ + BasicJsonType& get_unchecked(BasicJsonType* ptr) const + { + for (const auto& reference_token : reference_tokens) + { + // convert null values to arrays or objects before continuing + if (ptr->is_null()) + { + // check if reference token is a number + const bool nums = + std::all_of(reference_token.begin(), reference_token.end(), + [](const unsigned char x) + { + return std::isdigit(x); + }); + + // change value to array for numbers or "-" or to object otherwise + *ptr = (nums || reference_token == "-") + ? detail::value_t::array + : detail::value_t::object; + } + + switch (ptr->type()) + { + case detail::value_t::object: + { + // use unchecked object access + ptr = &ptr->operator[](reference_token); + break; + } + + case detail::value_t::array: + { + if (reference_token == "-") + { + // explicitly treat "-" as index beyond the end + ptr = &ptr->operator[](ptr->m_value.array->size()); + } + else + { + // convert array index to number; unchecked access + ptr = &ptr->operator[](array_index(reference_token)); + } + break; + } + + default: + JSON_THROW(detail::out_of_range::create(404, "unresolved reference token '" + reference_token + "'")); + } + } + + return *ptr; + } + + /*! + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index was not a number + @throw out_of_range.402 if the array index '-' is used + @throw out_of_range.404 if the JSON pointer can not be resolved + */ + BasicJsonType& get_checked(BasicJsonType* ptr) const + { + for (const auto& reference_token : reference_tokens) + { + switch (ptr->type()) + { + case detail::value_t::object: + { + // note: at performs range check + ptr = &ptr->at(reference_token); + break; + } + + case detail::value_t::array: + { + if (JSON_HEDLEY_UNLIKELY(reference_token == "-")) + { + // "-" always fails the range check + JSON_THROW(detail::out_of_range::create(402, + "array index '-' (" + std::to_string(ptr->m_value.array->size()) + + ") is out of range")); + } + + // note: at performs range check + ptr = &ptr->at(array_index(reference_token)); + break; + } + + default: + JSON_THROW(detail::out_of_range::create(404, "unresolved reference token '" + reference_token + "'")); + } + } + + return *ptr; + } + + /*! + @brief return a const reference to the pointed to value + + @param[in] ptr a JSON value + + @return const reference to the JSON value pointed to by the JSON + pointer + + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index was not a number + @throw out_of_range.402 if the array index '-' is used + @throw out_of_range.404 if the JSON pointer can not be resolved + */ + const BasicJsonType& get_unchecked(const BasicJsonType* ptr) const + { + for (const auto& reference_token : reference_tokens) + { + switch (ptr->type()) + { + case detail::value_t::object: + { + // use unchecked object access + ptr = &ptr->operator[](reference_token); + break; + } + + case detail::value_t::array: + { + if (JSON_HEDLEY_UNLIKELY(reference_token == "-")) + { + // "-" cannot be used for const access + JSON_THROW(detail::out_of_range::create(402, + "array index '-' (" + std::to_string(ptr->m_value.array->size()) + + ") is out of range")); + } + + // use unchecked array access + ptr = &ptr->operator[](array_index(reference_token)); + break; + } + + default: + JSON_THROW(detail::out_of_range::create(404, "unresolved reference token '" + reference_token + "'")); + } + } + + return *ptr; + } + + /*! + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index was not a number + @throw out_of_range.402 if the array index '-' is used + @throw out_of_range.404 if the JSON pointer can not be resolved + */ + const BasicJsonType& get_checked(const BasicJsonType* ptr) const + { + for (const auto& reference_token : reference_tokens) + { + switch (ptr->type()) + { + case detail::value_t::object: + { + // note: at performs range check + ptr = &ptr->at(reference_token); + break; + } + + case detail::value_t::array: + { + if (JSON_HEDLEY_UNLIKELY(reference_token == "-")) + { + // "-" always fails the range check + JSON_THROW(detail::out_of_range::create(402, + "array index '-' (" + std::to_string(ptr->m_value.array->size()) + + ") is out of range")); + } + + // note: at performs range check + ptr = &ptr->at(array_index(reference_token)); + break; + } + + default: + JSON_THROW(detail::out_of_range::create(404, "unresolved reference token '" + reference_token + "'")); + } + } + + return *ptr; + } + + /*! + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index was not a number + */ + bool contains(const BasicJsonType* ptr) const + { + for (const auto& reference_token : reference_tokens) + { + switch (ptr->type()) + { + case detail::value_t::object: + { + if (!ptr->contains(reference_token)) + { + // we did not find the key in the object + return false; + } + + ptr = &ptr->operator[](reference_token); + break; + } + + case detail::value_t::array: + { + if (JSON_HEDLEY_UNLIKELY(reference_token == "-")) + { + // "-" always fails the range check + return false; + } + if (JSON_HEDLEY_UNLIKELY(reference_token.size() == 1 && !("0" <= reference_token && reference_token <= "9"))) + { + // invalid char + return false; + } + if (JSON_HEDLEY_UNLIKELY(reference_token.size() > 1)) + { + if (JSON_HEDLEY_UNLIKELY(!('1' <= reference_token[0] && reference_token[0] <= '9'))) + { + // first char should be between '1' and '9' + return false; + } + for (std::size_t i = 1; i < reference_token.size(); i++) + { + if (JSON_HEDLEY_UNLIKELY(!('0' <= reference_token[i] && reference_token[i] <= '9'))) + { + // other char should be between '0' and '9' + return false; + } + } + } + + const auto idx = array_index(reference_token); + if (idx >= ptr->size()) + { + // index out of range + return false; + } + + ptr = &ptr->operator[](idx); + break; + } + + default: + { + // we do not expect primitive values if there is still a + // reference token to process + return false; + } + } + } + + // no reference token left means we found a primitive value + return true; + } + + /*! + @brief split the string input to reference tokens + + @note This function is only called by the json_pointer constructor. + All exceptions below are documented there. + + @throw parse_error.107 if the pointer is not empty or begins with '/' + @throw parse_error.108 if character '~' is not followed by '0' or '1' + */ + static std::vector split(const std::string& reference_string) + { + std::vector result; + + // special case: empty reference string -> no reference tokens + if (reference_string.empty()) + { + return result; + } + + // check if nonempty reference string begins with slash + if (JSON_HEDLEY_UNLIKELY(reference_string[0] != '/')) + { + JSON_THROW(detail::parse_error::create(107, 1, + "JSON pointer must be empty or begin with '/' - was: '" + + reference_string + "'")); + } + + // extract the reference tokens: + // - slash: position of the last read slash (or end of string) + // - start: position after the previous slash + for ( + // search for the first slash after the first character + std::size_t slash = reference_string.find_first_of('/', 1), + // set the beginning of the first reference token + start = 1; + // we can stop if start == 0 (if slash == std::string::npos) + start != 0; + // set the beginning of the next reference token + // (will eventually be 0 if slash == std::string::npos) + start = (slash == std::string::npos) ? 0 : slash + 1, + // find next slash + slash = reference_string.find_first_of('/', start)) + { + // use the text between the beginning of the reference token + // (start) and the last slash (slash). + auto reference_token = reference_string.substr(start, slash - start); + + // check reference tokens are properly escaped + for (std::size_t pos = reference_token.find_first_of('~'); + pos != std::string::npos; + pos = reference_token.find_first_of('~', pos + 1)) + { + JSON_ASSERT(reference_token[pos] == '~'); + + // ~ must be followed by 0 or 1 + if (JSON_HEDLEY_UNLIKELY(pos == reference_token.size() - 1 || + (reference_token[pos + 1] != '0' && + reference_token[pos + 1] != '1'))) + { + JSON_THROW(detail::parse_error::create(108, 0, "escape character '~' must be followed with '0' or '1'")); + } + } + + // finally, store the reference token + unescape(reference_token); + result.push_back(reference_token); + } + + return result; + } + + /*! + @brief replace all occurrences of a substring by another string + + @param[in,out] s the string to manipulate; changed so that all + occurrences of @a f are replaced with @a t + @param[in] f the substring to replace with @a t + @param[in] t the string to replace @a f + + @pre The search string @a f must not be empty. **This precondition is + enforced with an assertion.** + + @since version 2.0.0 + */ + static void replace_substring(std::string& s, const std::string& f, + const std::string& t) + { + JSON_ASSERT(!f.empty()); + for (auto pos = s.find(f); // find first occurrence of f + pos != std::string::npos; // make sure f was found + s.replace(pos, f.size(), t), // replace with t, and + pos = s.find(f, pos + t.size())) // find next occurrence of f + {} + } + + /// escape "~" to "~0" and "/" to "~1" + static std::string escape(std::string s) + { + replace_substring(s, "~", "~0"); + replace_substring(s, "/", "~1"); + return s; + } + + /// unescape "~1" to tilde and "~0" to slash (order is important!) + static void unescape(std::string& s) + { + replace_substring(s, "~1", "/"); + replace_substring(s, "~0", "~"); + } + + /*! + @param[in] reference_string the reference string to the current value + @param[in] value the value to consider + @param[in,out] result the result object to insert values to + + @note Empty objects or arrays are flattened to `null`. + */ + static void flatten(const std::string& reference_string, + const BasicJsonType& value, + BasicJsonType& result) + { + switch (value.type()) + { + case detail::value_t::array: + { + if (value.m_value.array->empty()) + { + // flatten empty array as null + result[reference_string] = nullptr; + } + else + { + // iterate array and use index as reference string + for (std::size_t i = 0; i < value.m_value.array->size(); ++i) + { + flatten(reference_string + "/" + std::to_string(i), + value.m_value.array->operator[](i), result); + } + } + break; + } + + case detail::value_t::object: + { + if (value.m_value.object->empty()) + { + // flatten empty object as null + result[reference_string] = nullptr; + } + else + { + // iterate object and use keys as reference string + for (const auto& element : *value.m_value.object) + { + flatten(reference_string + "/" + escape(element.first), element.second, result); + } + } + break; + } + + default: + { + // add primitive value with its reference string + result[reference_string] = value; + break; + } + } + } + + /*! + @param[in] value flattened JSON + + @return unflattened JSON + + @throw parse_error.109 if array index is not a number + @throw type_error.314 if value is not an object + @throw type_error.315 if object values are not primitive + @throw type_error.313 if value cannot be unflattened + */ + static BasicJsonType + unflatten(const BasicJsonType& value) + { + if (JSON_HEDLEY_UNLIKELY(!value.is_object())) + { + JSON_THROW(detail::type_error::create(314, "only objects can be unflattened")); + } + + BasicJsonType result; + + // iterate the JSON object values + for (const auto& element : *value.m_value.object) + { + if (JSON_HEDLEY_UNLIKELY(!element.second.is_primitive())) + { + JSON_THROW(detail::type_error::create(315, "values in object must be primitive")); + } + + // assign value to reference pointed to by JSON pointer; Note that if + // the JSON pointer is "" (i.e., points to the whole value), function + // get_and_create returns a reference to result itself. An assignment + // will then create a primitive value. + json_pointer(element.first).get_and_create(result) = element.second; + } + + return result; + } + + /*! + @brief compares two JSON pointers for equality + + @param[in] lhs JSON pointer to compare + @param[in] rhs JSON pointer to compare + @return whether @a lhs is equal to @a rhs + + @complexity Linear in the length of the JSON pointer + + @exceptionsafety No-throw guarantee: this function never throws exceptions. + */ + friend bool operator==(json_pointer const& lhs, + json_pointer const& rhs) noexcept + { + return lhs.reference_tokens == rhs.reference_tokens; + } + + /*! + @brief compares two JSON pointers for inequality + + @param[in] lhs JSON pointer to compare + @param[in] rhs JSON pointer to compare + @return whether @a lhs is not equal @a rhs + + @complexity Linear in the length of the JSON pointer + + @exceptionsafety No-throw guarantee: this function never throws exceptions. + */ + friend bool operator!=(json_pointer const& lhs, + json_pointer const& rhs) noexcept + { + return !(lhs == rhs); + } + + /// the reference tokens + std::vector reference_tokens; +}; +} // namespace nlohmann + +// #include + + +#include +#include + +// #include + + +namespace nlohmann +{ +namespace detail +{ +template +class json_ref +{ + public: + using value_type = BasicJsonType; + + json_ref(value_type&& value) + : owned_value(std::move(value)) + , value_ref(&owned_value) + , is_rvalue(true) + {} + + json_ref(const value_type& value) + : value_ref(const_cast(&value)) + , is_rvalue(false) + {} + + json_ref(std::initializer_list init) + : owned_value(init) + , value_ref(&owned_value) + , is_rvalue(true) + {} + + template < + class... Args, + enable_if_t::value, int> = 0 > + json_ref(Args && ... args) + : owned_value(std::forward(args)...) + , value_ref(&owned_value) + , is_rvalue(true) + {} + + // class should be movable only + json_ref(json_ref&&) = default; + json_ref(const json_ref&) = delete; + json_ref& operator=(const json_ref&) = delete; + json_ref& operator=(json_ref&&) = delete; + ~json_ref() = default; + + value_type moved_or_copied() const + { + if (is_rvalue) + { + return std::move(*value_ref); + } + return *value_ref; + } + + value_type const& operator*() const + { + return *static_cast(value_ref); + } + + value_type const* operator->() const + { + return static_cast(value_ref); + } + + private: + mutable value_type owned_value = nullptr; + value_type* value_ref = nullptr; + const bool is_rvalue = true; +}; +} // namespace detail +} // namespace nlohmann + +// #include + +// #include + +// #include + +// #include + + +#include // reverse +#include // array +#include // uint8_t, uint16_t, uint32_t, uint64_t +#include // memcpy +#include // numeric_limits +#include // string +#include // isnan, isinf + +// #include + +// #include + +// #include + + +#include // copy +#include // size_t +#include // streamsize +#include // back_inserter +#include // shared_ptr, make_shared +#include // basic_ostream +#include // basic_string +#include // vector +// #include + + +namespace nlohmann +{ +namespace detail +{ +/// abstract output adapter interface +template struct output_adapter_protocol +{ + virtual void write_character(CharType c) = 0; + virtual void write_characters(const CharType* s, std::size_t length) = 0; + virtual ~output_adapter_protocol() = default; +}; + +/// a type to simplify interfaces +template +using output_adapter_t = std::shared_ptr>; + +/// output adapter for byte vectors +template +class output_vector_adapter : public output_adapter_protocol +{ + public: + explicit output_vector_adapter(std::vector& vec) noexcept + : v(vec) + {} + + void write_character(CharType c) override + { + v.push_back(c); + } + + JSON_HEDLEY_NON_NULL(2) + void write_characters(const CharType* s, std::size_t length) override + { + std::copy(s, s + length, std::back_inserter(v)); + } + + private: + std::vector& v; +}; + +/// output adapter for output streams +template +class output_stream_adapter : public output_adapter_protocol +{ + public: + explicit output_stream_adapter(std::basic_ostream& s) noexcept + : stream(s) + {} + + void write_character(CharType c) override + { + stream.put(c); + } + + JSON_HEDLEY_NON_NULL(2) + void write_characters(const CharType* s, std::size_t length) override + { + stream.write(s, static_cast(length)); + } + + private: + std::basic_ostream& stream; +}; + +/// output adapter for basic_string +template> +class output_string_adapter : public output_adapter_protocol +{ + public: + explicit output_string_adapter(StringType& s) noexcept + : str(s) + {} + + void write_character(CharType c) override + { + str.push_back(c); + } + + JSON_HEDLEY_NON_NULL(2) + void write_characters(const CharType* s, std::size_t length) override + { + str.append(s, length); + } + + private: + StringType& str; +}; + +template> +class output_adapter +{ + public: + output_adapter(std::vector& vec) + : oa(std::make_shared>(vec)) {} + + output_adapter(std::basic_ostream& s) + : oa(std::make_shared>(s)) {} + + output_adapter(StringType& s) + : oa(std::make_shared>(s)) {} + + operator output_adapter_t() + { + return oa; + } + + private: + output_adapter_t oa = nullptr; +}; +} // namespace detail +} // namespace nlohmann + + +namespace nlohmann +{ +namespace detail +{ +/////////////////// +// binary writer // +/////////////////// + +/*! +@brief serialization to CBOR and MessagePack values +*/ +template +class binary_writer +{ + using string_t = typename BasicJsonType::string_t; + using binary_t = typename BasicJsonType::binary_t; + using number_float_t = typename BasicJsonType::number_float_t; + + public: + /*! + @brief create a binary writer + + @param[in] adapter output adapter to write to + */ + explicit binary_writer(output_adapter_t adapter) : oa(adapter) + { + JSON_ASSERT(oa); + } + + /*! + @param[in] j JSON value to serialize + @pre j.type() == value_t::object + */ + void write_bson(const BasicJsonType& j) + { + switch (j.type()) + { + case value_t::object: + { + write_bson_object(*j.m_value.object); + break; + } + + default: + { + JSON_THROW(type_error::create(317, "to serialize to BSON, top-level type must be object, but is " + std::string(j.type_name()))); + } + } + } + + /*! + @param[in] j JSON value to serialize + */ + void write_cbor(const BasicJsonType& j) + { + switch (j.type()) + { + case value_t::null: + { + oa->write_character(to_char_type(0xF6)); + break; + } + + case value_t::boolean: + { + oa->write_character(j.m_value.boolean + ? to_char_type(0xF5) + : to_char_type(0xF4)); + break; + } + + case value_t::number_integer: + { + if (j.m_value.number_integer >= 0) + { + // CBOR does not differentiate between positive signed + // integers and unsigned integers. Therefore, we used the + // code from the value_t::number_unsigned case here. + if (j.m_value.number_integer <= 0x17) + { + write_number(static_cast(j.m_value.number_integer)); + } + else if (j.m_value.number_integer <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x18)); + write_number(static_cast(j.m_value.number_integer)); + } + else if (j.m_value.number_integer <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x19)); + write_number(static_cast(j.m_value.number_integer)); + } + else if (j.m_value.number_integer <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x1A)); + write_number(static_cast(j.m_value.number_integer)); + } + else + { + oa->write_character(to_char_type(0x1B)); + write_number(static_cast(j.m_value.number_integer)); + } + } + else + { + // The conversions below encode the sign in the first + // byte, and the value is converted to a positive number. + const auto positive_number = -1 - j.m_value.number_integer; + if (j.m_value.number_integer >= -24) + { + write_number(static_cast(0x20 + positive_number)); + } + else if (positive_number <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x38)); + write_number(static_cast(positive_number)); + } + else if (positive_number <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x39)); + write_number(static_cast(positive_number)); + } + else if (positive_number <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x3A)); + write_number(static_cast(positive_number)); + } + else + { + oa->write_character(to_char_type(0x3B)); + write_number(static_cast(positive_number)); + } + } + break; + } + + case value_t::number_unsigned: + { + if (j.m_value.number_unsigned <= 0x17) + { + write_number(static_cast(j.m_value.number_unsigned)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x18)); + write_number(static_cast(j.m_value.number_unsigned)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x19)); + write_number(static_cast(j.m_value.number_unsigned)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x1A)); + write_number(static_cast(j.m_value.number_unsigned)); + } + else + { + oa->write_character(to_char_type(0x1B)); + write_number(static_cast(j.m_value.number_unsigned)); + } + break; + } + + case value_t::number_float: + { + if (std::isnan(j.m_value.number_float)) + { + // NaN is 0xf97e00 in CBOR + oa->write_character(to_char_type(0xF9)); + oa->write_character(to_char_type(0x7E)); + oa->write_character(to_char_type(0x00)); + } + else if (std::isinf(j.m_value.number_float)) + { + // Infinity is 0xf97c00, -Infinity is 0xf9fc00 + oa->write_character(to_char_type(0xf9)); + oa->write_character(j.m_value.number_float > 0 ? to_char_type(0x7C) : to_char_type(0xFC)); + oa->write_character(to_char_type(0x00)); + } + else + { + write_compact_float(j.m_value.number_float, detail::input_format_t::cbor); + } + break; + } + + case value_t::string: + { + // step 1: write control byte and the string length + const auto N = j.m_value.string->size(); + if (N <= 0x17) + { + write_number(static_cast(0x60 + N)); + } + else if (N <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x78)); + write_number(static_cast(N)); + } + else if (N <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x79)); + write_number(static_cast(N)); + } + else if (N <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x7A)); + write_number(static_cast(N)); + } + // LCOV_EXCL_START + else if (N <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x7B)); + write_number(static_cast(N)); + } + // LCOV_EXCL_STOP + + // step 2: write the string + oa->write_characters( + reinterpret_cast(j.m_value.string->c_str()), + j.m_value.string->size()); + break; + } + + case value_t::array: + { + // step 1: write control byte and the array size + const auto N = j.m_value.array->size(); + if (N <= 0x17) + { + write_number(static_cast(0x80 + N)); + } + else if (N <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x98)); + write_number(static_cast(N)); + } + else if (N <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x99)); + write_number(static_cast(N)); + } + else if (N <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x9A)); + write_number(static_cast(N)); + } + // LCOV_EXCL_START + else if (N <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x9B)); + write_number(static_cast(N)); + } + // LCOV_EXCL_STOP + + // step 2: write each element + for (const auto& el : *j.m_value.array) + { + write_cbor(el); + } + break; + } + + case value_t::binary: + { + if (j.m_value.binary->has_subtype()) + { + write_number(static_cast(0xd8)); + write_number(j.m_value.binary->subtype()); + } + + // step 1: write control byte and the binary array size + const auto N = j.m_value.binary->size(); + if (N <= 0x17) + { + write_number(static_cast(0x40 + N)); + } + else if (N <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x58)); + write_number(static_cast(N)); + } + else if (N <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x59)); + write_number(static_cast(N)); + } + else if (N <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x5A)); + write_number(static_cast(N)); + } + // LCOV_EXCL_START + else if (N <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0x5B)); + write_number(static_cast(N)); + } + // LCOV_EXCL_STOP + + // step 2: write each element + oa->write_characters( + reinterpret_cast(j.m_value.binary->data()), + N); + + break; + } + + case value_t::object: + { + // step 1: write control byte and the object size + const auto N = j.m_value.object->size(); + if (N <= 0x17) + { + write_number(static_cast(0xA0 + N)); + } + else if (N <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0xB8)); + write_number(static_cast(N)); + } + else if (N <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0xB9)); + write_number(static_cast(N)); + } + else if (N <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0xBA)); + write_number(static_cast(N)); + } + // LCOV_EXCL_START + else if (N <= (std::numeric_limits::max)()) + { + oa->write_character(to_char_type(0xBB)); + write_number(static_cast(N)); + } + // LCOV_EXCL_STOP + + // step 2: write each element + for (const auto& el : *j.m_value.object) + { + write_cbor(el.first); + write_cbor(el.second); + } + break; + } + + default: + break; + } + } + + /*! + @param[in] j JSON value to serialize + */ + void write_msgpack(const BasicJsonType& j) + { + switch (j.type()) + { + case value_t::null: // nil + { + oa->write_character(to_char_type(0xC0)); + break; + } + + case value_t::boolean: // true and false + { + oa->write_character(j.m_value.boolean + ? to_char_type(0xC3) + : to_char_type(0xC2)); + break; + } + + case value_t::number_integer: + { + if (j.m_value.number_integer >= 0) + { + // MessagePack does not differentiate between positive + // signed integers and unsigned integers. Therefore, we used + // the code from the value_t::number_unsigned case here. + if (j.m_value.number_unsigned < 128) + { + // positive fixnum + write_number(static_cast(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) + { + // uint 8 + oa->write_character(to_char_type(0xCC)); + write_number(static_cast(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) + { + // uint 16 + oa->write_character(to_char_type(0xCD)); + write_number(static_cast(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) + { + // uint 32 + oa->write_character(to_char_type(0xCE)); + write_number(static_cast(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) + { + // uint 64 + oa->write_character(to_char_type(0xCF)); + write_number(static_cast(j.m_value.number_integer)); + } + } + else + { + if (j.m_value.number_integer >= -32) + { + // negative fixnum + write_number(static_cast(j.m_value.number_integer)); + } + else if (j.m_value.number_integer >= (std::numeric_limits::min)() && + j.m_value.number_integer <= (std::numeric_limits::max)()) + { + // int 8 + oa->write_character(to_char_type(0xD0)); + write_number(static_cast(j.m_value.number_integer)); + } + else if (j.m_value.number_integer >= (std::numeric_limits::min)() && + j.m_value.number_integer <= (std::numeric_limits::max)()) + { + // int 16 + oa->write_character(to_char_type(0xD1)); + write_number(static_cast(j.m_value.number_integer)); + } + else if (j.m_value.number_integer >= (std::numeric_limits::min)() && + j.m_value.number_integer <= (std::numeric_limits::max)()) + { + // int 32 + oa->write_character(to_char_type(0xD2)); + write_number(static_cast(j.m_value.number_integer)); + } + else if (j.m_value.number_integer >= (std::numeric_limits::min)() && + j.m_value.number_integer <= (std::numeric_limits::max)()) + { + // int 64 + oa->write_character(to_char_type(0xD3)); + write_number(static_cast(j.m_value.number_integer)); + } + } + break; + } + + case value_t::number_unsigned: + { + if (j.m_value.number_unsigned < 128) + { + // positive fixnum + write_number(static_cast(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) + { + // uint 8 + oa->write_character(to_char_type(0xCC)); + write_number(static_cast(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) + { + // uint 16 + oa->write_character(to_char_type(0xCD)); + write_number(static_cast(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) + { + // uint 32 + oa->write_character(to_char_type(0xCE)); + write_number(static_cast(j.m_value.number_integer)); + } + else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) + { + // uint 64 + oa->write_character(to_char_type(0xCF)); + write_number(static_cast(j.m_value.number_integer)); + } + break; + } + + case value_t::number_float: + { + write_compact_float(j.m_value.number_float, detail::input_format_t::msgpack); + break; + } + + case value_t::string: + { + // step 1: write control byte and the string length + const auto N = j.m_value.string->size(); + if (N <= 31) + { + // fixstr + write_number(static_cast(0xA0 | N)); + } + else if (N <= (std::numeric_limits::max)()) + { + // str 8 + oa->write_character(to_char_type(0xD9)); + write_number(static_cast(N)); + } + else if (N <= (std::numeric_limits::max)()) + { + // str 16 + oa->write_character(to_char_type(0xDA)); + write_number(static_cast(N)); + } + else if (N <= (std::numeric_limits::max)()) + { + // str 32 + oa->write_character(to_char_type(0xDB)); + write_number(static_cast(N)); + } + + // step 2: write the string + oa->write_characters( + reinterpret_cast(j.m_value.string->c_str()), + j.m_value.string->size()); + break; + } + + case value_t::array: + { + // step 1: write control byte and the array size + const auto N = j.m_value.array->size(); + if (N <= 15) + { + // fixarray + write_number(static_cast(0x90 | N)); + } + else if (N <= (std::numeric_limits::max)()) + { + // array 16 + oa->write_character(to_char_type(0xDC)); + write_number(static_cast(N)); + } + else if (N <= (std::numeric_limits::max)()) + { + // array 32 + oa->write_character(to_char_type(0xDD)); + write_number(static_cast(N)); + } + + // step 2: write each element + for (const auto& el : *j.m_value.array) + { + write_msgpack(el); + } + break; + } + + case value_t::binary: + { + // step 0: determine if the binary type has a set subtype to + // determine whether or not to use the ext or fixext types + const bool use_ext = j.m_value.binary->has_subtype(); + + // step 1: write control byte and the byte string length + const auto N = j.m_value.binary->size(); + if (N <= (std::numeric_limits::max)()) + { + std::uint8_t output_type{}; + bool fixed = true; + if (use_ext) + { + switch (N) + { + case 1: + output_type = 0xD4; // fixext 1 + break; + case 2: + output_type = 0xD5; // fixext 2 + break; + case 4: + output_type = 0xD6; // fixext 4 + break; + case 8: + output_type = 0xD7; // fixext 8 + break; + case 16: + output_type = 0xD8; // fixext 16 + break; + default: + output_type = 0xC7; // ext 8 + fixed = false; + break; + } + + } + else + { + output_type = 0xC4; // bin 8 + fixed = false; + } + + oa->write_character(to_char_type(output_type)); + if (!fixed) + { + write_number(static_cast(N)); + } + } + else if (N <= (std::numeric_limits::max)()) + { + std::uint8_t output_type = use_ext + ? 0xC8 // ext 16 + : 0xC5; // bin 16 + + oa->write_character(to_char_type(output_type)); + write_number(static_cast(N)); + } + else if (N <= (std::numeric_limits::max)()) + { + std::uint8_t output_type = use_ext + ? 0xC9 // ext 32 + : 0xC6; // bin 32 + + oa->write_character(to_char_type(output_type)); + write_number(static_cast(N)); + } + + // step 1.5: if this is an ext type, write the subtype + if (use_ext) + { + write_number(static_cast(j.m_value.binary->subtype())); + } + + // step 2: write the byte string + oa->write_characters( + reinterpret_cast(j.m_value.binary->data()), + N); + + break; + } + + case value_t::object: + { + // step 1: write control byte and the object size + const auto N = j.m_value.object->size(); + if (N <= 15) + { + // fixmap + write_number(static_cast(0x80 | (N & 0xF))); + } + else if (N <= (std::numeric_limits::max)()) + { + // map 16 + oa->write_character(to_char_type(0xDE)); + write_number(static_cast(N)); + } + else if (N <= (std::numeric_limits::max)()) + { + // map 32 + oa->write_character(to_char_type(0xDF)); + write_number(static_cast(N)); + } + + // step 2: write each element + for (const auto& el : *j.m_value.object) + { + write_msgpack(el.first); + write_msgpack(el.second); + } + break; + } + + default: + break; + } + } + + /*! + @param[in] j JSON value to serialize + @param[in] use_count whether to use '#' prefixes (optimized format) + @param[in] use_type whether to use '$' prefixes (optimized format) + @param[in] add_prefix whether prefixes need to be used for this value + */ + void write_ubjson(const BasicJsonType& j, const bool use_count, + const bool use_type, const bool add_prefix = true) + { + switch (j.type()) + { + case value_t::null: + { + if (add_prefix) + { + oa->write_character(to_char_type('Z')); + } + break; + } + + case value_t::boolean: + { + if (add_prefix) + { + oa->write_character(j.m_value.boolean + ? to_char_type('T') + : to_char_type('F')); + } + break; + } + + case value_t::number_integer: + { + write_number_with_ubjson_prefix(j.m_value.number_integer, add_prefix); + break; + } + + case value_t::number_unsigned: + { + write_number_with_ubjson_prefix(j.m_value.number_unsigned, add_prefix); + break; + } + + case value_t::number_float: + { + write_number_with_ubjson_prefix(j.m_value.number_float, add_prefix); + break; + } + + case value_t::string: + { + if (add_prefix) + { + oa->write_character(to_char_type('S')); + } + write_number_with_ubjson_prefix(j.m_value.string->size(), true); + oa->write_characters( + reinterpret_cast(j.m_value.string->c_str()), + j.m_value.string->size()); + break; + } + + case value_t::array: + { + if (add_prefix) + { + oa->write_character(to_char_type('[')); + } + + bool prefix_required = true; + if (use_type && !j.m_value.array->empty()) + { + JSON_ASSERT(use_count); + const CharType first_prefix = ubjson_prefix(j.front()); + const bool same_prefix = std::all_of(j.begin() + 1, j.end(), + [this, first_prefix](const BasicJsonType & v) + { + return ubjson_prefix(v) == first_prefix; + }); + + if (same_prefix) + { + prefix_required = false; + oa->write_character(to_char_type('$')); + oa->write_character(first_prefix); + } + } + + if (use_count) + { + oa->write_character(to_char_type('#')); + write_number_with_ubjson_prefix(j.m_value.array->size(), true); + } + + for (const auto& el : *j.m_value.array) + { + write_ubjson(el, use_count, use_type, prefix_required); + } + + if (!use_count) + { + oa->write_character(to_char_type(']')); + } + + break; + } + + case value_t::binary: + { + if (add_prefix) + { + oa->write_character(to_char_type('[')); + } + + if (use_type && !j.m_value.binary->empty()) + { + JSON_ASSERT(use_count); + oa->write_character(to_char_type('$')); + oa->write_character('U'); + } + + if (use_count) + { + oa->write_character(to_char_type('#')); + write_number_with_ubjson_prefix(j.m_value.binary->size(), true); + } + + if (use_type) + { + oa->write_characters( + reinterpret_cast(j.m_value.binary->data()), + j.m_value.binary->size()); + } + else + { + for (size_t i = 0; i < j.m_value.binary->size(); ++i) + { + oa->write_character(to_char_type('U')); + oa->write_character(j.m_value.binary->data()[i]); + } + } + + if (!use_count) + { + oa->write_character(to_char_type(']')); + } + + break; + } + + case value_t::object: + { + if (add_prefix) + { + oa->write_character(to_char_type('{')); + } + + bool prefix_required = true; + if (use_type && !j.m_value.object->empty()) + { + JSON_ASSERT(use_count); + const CharType first_prefix = ubjson_prefix(j.front()); + const bool same_prefix = std::all_of(j.begin(), j.end(), + [this, first_prefix](const BasicJsonType & v) + { + return ubjson_prefix(v) == first_prefix; + }); + + if (same_prefix) + { + prefix_required = false; + oa->write_character(to_char_type('$')); + oa->write_character(first_prefix); + } + } + + if (use_count) + { + oa->write_character(to_char_type('#')); + write_number_with_ubjson_prefix(j.m_value.object->size(), true); + } + + for (const auto& el : *j.m_value.object) + { + write_number_with_ubjson_prefix(el.first.size(), true); + oa->write_characters( + reinterpret_cast(el.first.c_str()), + el.first.size()); + write_ubjson(el.second, use_count, use_type, prefix_required); + } + + if (!use_count) + { + oa->write_character(to_char_type('}')); + } + + break; + } + + default: + break; + } + } + + private: + ////////// + // BSON // + ////////// + + /*! + @return The size of a BSON document entry header, including the id marker + and the entry name size (and its null-terminator). + */ + static std::size_t calc_bson_entry_header_size(const string_t& name) + { + const auto it = name.find(static_cast(0)); + if (JSON_HEDLEY_UNLIKELY(it != BasicJsonType::string_t::npos)) + { + JSON_THROW(out_of_range::create(409, + "BSON key cannot contain code point U+0000 (at byte " + std::to_string(it) + ")")); + } + + return /*id*/ 1ul + name.size() + /*zero-terminator*/1u; + } + + /*! + @brief Writes the given @a element_type and @a name to the output adapter + */ + void write_bson_entry_header(const string_t& name, + const std::uint8_t element_type) + { + oa->write_character(to_char_type(element_type)); // boolean + oa->write_characters( + reinterpret_cast(name.c_str()), + name.size() + 1u); + } + + /*! + @brief Writes a BSON element with key @a name and boolean value @a value + */ + void write_bson_boolean(const string_t& name, + const bool value) + { + write_bson_entry_header(name, 0x08); + oa->write_character(value ? to_char_type(0x01) : to_char_type(0x00)); + } + + /*! + @brief Writes a BSON element with key @a name and double value @a value + */ + void write_bson_double(const string_t& name, + const double value) + { + write_bson_entry_header(name, 0x01); + write_number(value); + } + + /*! + @return The size of the BSON-encoded string in @a value + */ + static std::size_t calc_bson_string_size(const string_t& value) + { + return sizeof(std::int32_t) + value.size() + 1ul; + } + + /*! + @brief Writes a BSON element with key @a name and string value @a value + */ + void write_bson_string(const string_t& name, + const string_t& value) + { + write_bson_entry_header(name, 0x02); + + write_number(static_cast(value.size() + 1ul)); + oa->write_characters( + reinterpret_cast(value.c_str()), + value.size() + 1); + } + + /*! + @brief Writes a BSON element with key @a name and null value + */ + void write_bson_null(const string_t& name) + { + write_bson_entry_header(name, 0x0A); + } + + /*! + @return The size of the BSON-encoded integer @a value + */ + static std::size_t calc_bson_integer_size(const std::int64_t value) + { + return (std::numeric_limits::min)() <= value && value <= (std::numeric_limits::max)() + ? sizeof(std::int32_t) + : sizeof(std::int64_t); + } + + /*! + @brief Writes a BSON element with key @a name and integer @a value + */ + void write_bson_integer(const string_t& name, + const std::int64_t value) + { + if ((std::numeric_limits::min)() <= value && value <= (std::numeric_limits::max)()) + { + write_bson_entry_header(name, 0x10); // int32 + write_number(static_cast(value)); + } + else + { + write_bson_entry_header(name, 0x12); // int64 + write_number(static_cast(value)); + } + } + + /*! + @return The size of the BSON-encoded unsigned integer in @a j + */ + static constexpr std::size_t calc_bson_unsigned_size(const std::uint64_t value) noexcept + { + return (value <= static_cast((std::numeric_limits::max)())) + ? sizeof(std::int32_t) + : sizeof(std::int64_t); + } + + /*! + @brief Writes a BSON element with key @a name and unsigned @a value + */ + void write_bson_unsigned(const string_t& name, + const std::uint64_t value) + { + if (value <= static_cast((std::numeric_limits::max)())) + { + write_bson_entry_header(name, 0x10 /* int32 */); + write_number(static_cast(value)); + } + else if (value <= static_cast((std::numeric_limits::max)())) + { + write_bson_entry_header(name, 0x12 /* int64 */); + write_number(static_cast(value)); + } + else + { + JSON_THROW(out_of_range::create(407, "integer number " + std::to_string(value) + " cannot be represented by BSON as it does not fit int64")); + } + } + + /*! + @brief Writes a BSON element with key @a name and object @a value + */ + void write_bson_object_entry(const string_t& name, + const typename BasicJsonType::object_t& value) + { + write_bson_entry_header(name, 0x03); // object + write_bson_object(value); + } + + /*! + @return The size of the BSON-encoded array @a value + */ + static std::size_t calc_bson_array_size(const typename BasicJsonType::array_t& value) + { + std::size_t array_index = 0ul; + + const std::size_t embedded_document_size = std::accumulate(std::begin(value), std::end(value), std::size_t(0), [&array_index](std::size_t result, const typename BasicJsonType::array_t::value_type & el) + { + return result + calc_bson_element_size(std::to_string(array_index++), el); + }); + + return sizeof(std::int32_t) + embedded_document_size + 1ul; + } + + /*! + @return The size of the BSON-encoded binary array @a value + */ + static std::size_t calc_bson_binary_size(const typename BasicJsonType::binary_t& value) + { + return sizeof(std::int32_t) + value.size() + 1ul; + } + + /*! + @brief Writes a BSON element with key @a name and array @a value + */ + void write_bson_array(const string_t& name, + const typename BasicJsonType::array_t& value) + { + write_bson_entry_header(name, 0x04); // array + write_number(static_cast(calc_bson_array_size(value))); + + std::size_t array_index = 0ul; + + for (const auto& el : value) + { + write_bson_element(std::to_string(array_index++), el); + } + + oa->write_character(to_char_type(0x00)); + } + + /*! + @brief Writes a BSON element with key @a name and binary value @a value + */ + void write_bson_binary(const string_t& name, + const binary_t& value) + { + write_bson_entry_header(name, 0x05); + + write_number(static_cast(value.size())); + write_number(value.has_subtype() ? value.subtype() : std::uint8_t(0x00)); + + oa->write_characters(reinterpret_cast(value.data()), value.size()); + } + + /*! + @brief Calculates the size necessary to serialize the JSON value @a j with its @a name + @return The calculated size for the BSON document entry for @a j with the given @a name. + */ + static std::size_t calc_bson_element_size(const string_t& name, + const BasicJsonType& j) + { + const auto header_size = calc_bson_entry_header_size(name); + switch (j.type()) + { + case value_t::object: + return header_size + calc_bson_object_size(*j.m_value.object); + + case value_t::array: + return header_size + calc_bson_array_size(*j.m_value.array); + + case value_t::binary: + return header_size + calc_bson_binary_size(*j.m_value.binary); + + case value_t::boolean: + return header_size + 1ul; + + case value_t::number_float: + return header_size + 8ul; + + case value_t::number_integer: + return header_size + calc_bson_integer_size(j.m_value.number_integer); + + case value_t::number_unsigned: + return header_size + calc_bson_unsigned_size(j.m_value.number_unsigned); + + case value_t::string: + return header_size + calc_bson_string_size(*j.m_value.string); + + case value_t::null: + return header_size + 0ul; + + // LCOV_EXCL_START + default: + JSON_ASSERT(false); + return 0ul; + // LCOV_EXCL_STOP + } + } + + /*! + @brief Serializes the JSON value @a j to BSON and associates it with the + key @a name. + @param name The name to associate with the JSON entity @a j within the + current BSON document + @return The size of the BSON entry + */ + void write_bson_element(const string_t& name, + const BasicJsonType& j) + { + switch (j.type()) + { + case value_t::object: + return write_bson_object_entry(name, *j.m_value.object); + + case value_t::array: + return write_bson_array(name, *j.m_value.array); + + case value_t::binary: + return write_bson_binary(name, *j.m_value.binary); + + case value_t::boolean: + return write_bson_boolean(name, j.m_value.boolean); + + case value_t::number_float: + return write_bson_double(name, j.m_value.number_float); + + case value_t::number_integer: + return write_bson_integer(name, j.m_value.number_integer); + + case value_t::number_unsigned: + return write_bson_unsigned(name, j.m_value.number_unsigned); + + case value_t::string: + return write_bson_string(name, *j.m_value.string); + + case value_t::null: + return write_bson_null(name); + + // LCOV_EXCL_START + default: + JSON_ASSERT(false); + return; + // LCOV_EXCL_STOP + } + } + + /*! + @brief Calculates the size of the BSON serialization of the given + JSON-object @a j. + @param[in] j JSON value to serialize + @pre j.type() == value_t::object + */ + static std::size_t calc_bson_object_size(const typename BasicJsonType::object_t& value) + { + std::size_t document_size = std::accumulate(value.begin(), value.end(), std::size_t(0), + [](size_t result, const typename BasicJsonType::object_t::value_type & el) + { + return result += calc_bson_element_size(el.first, el.second); + }); + + return sizeof(std::int32_t) + document_size + 1ul; + } + + /*! + @param[in] j JSON value to serialize + @pre j.type() == value_t::object + */ + void write_bson_object(const typename BasicJsonType::object_t& value) + { + write_number(static_cast(calc_bson_object_size(value))); + + for (const auto& el : value) + { + write_bson_element(el.first, el.second); + } + + oa->write_character(to_char_type(0x00)); + } + + ////////// + // CBOR // + ////////// + + static constexpr CharType get_cbor_float_prefix(float /*unused*/) + { + return to_char_type(0xFA); // Single-Precision Float + } + + static constexpr CharType get_cbor_float_prefix(double /*unused*/) + { + return to_char_type(0xFB); // Double-Precision Float + } + + ///////////// + // MsgPack // + ///////////// + + static constexpr CharType get_msgpack_float_prefix(float /*unused*/) + { + return to_char_type(0xCA); // float 32 + } + + static constexpr CharType get_msgpack_float_prefix(double /*unused*/) + { + return to_char_type(0xCB); // float 64 + } + + //////////// + // UBJSON // + //////////// + + // UBJSON: write number (floating point) + template::value, int>::type = 0> + void write_number_with_ubjson_prefix(const NumberType n, + const bool add_prefix) + { + if (add_prefix) + { + oa->write_character(get_ubjson_float_prefix(n)); + } + write_number(n); + } + + // UBJSON: write number (unsigned integer) + template::value, int>::type = 0> + void write_number_with_ubjson_prefix(const NumberType n, + const bool add_prefix) + { + if (n <= static_cast((std::numeric_limits::max)())) + { + if (add_prefix) + { + oa->write_character(to_char_type('i')); // int8 + } + write_number(static_cast(n)); + } + else if (n <= (std::numeric_limits::max)()) + { + if (add_prefix) + { + oa->write_character(to_char_type('U')); // uint8 + } + write_number(static_cast(n)); + } + else if (n <= static_cast((std::numeric_limits::max)())) + { + if (add_prefix) + { + oa->write_character(to_char_type('I')); // int16 + } + write_number(static_cast(n)); + } + else if (n <= static_cast((std::numeric_limits::max)())) + { + if (add_prefix) + { + oa->write_character(to_char_type('l')); // int32 + } + write_number(static_cast(n)); + } + else if (n <= static_cast((std::numeric_limits::max)())) + { + if (add_prefix) + { + oa->write_character(to_char_type('L')); // int64 + } + write_number(static_cast(n)); + } + else + { + if (add_prefix) + { + oa->write_character(to_char_type('H')); // high-precision number + } + + const auto number = BasicJsonType(n).dump(); + write_number_with_ubjson_prefix(number.size(), true); + for (std::size_t i = 0; i < number.size(); ++i) + { + oa->write_character(to_char_type(static_cast(number[i]))); + } + } + } + + // UBJSON: write number (signed integer) + template < typename NumberType, typename std::enable_if < + std::is_signed::value&& + !std::is_floating_point::value, int >::type = 0 > + void write_number_with_ubjson_prefix(const NumberType n, + const bool add_prefix) + { + if ((std::numeric_limits::min)() <= n && n <= (std::numeric_limits::max)()) + { + if (add_prefix) + { + oa->write_character(to_char_type('i')); // int8 + } + write_number(static_cast(n)); + } + else if (static_cast((std::numeric_limits::min)()) <= n && n <= static_cast((std::numeric_limits::max)())) + { + if (add_prefix) + { + oa->write_character(to_char_type('U')); // uint8 + } + write_number(static_cast(n)); + } + else if ((std::numeric_limits::min)() <= n && n <= (std::numeric_limits::max)()) + { + if (add_prefix) + { + oa->write_character(to_char_type('I')); // int16 + } + write_number(static_cast(n)); + } + else if ((std::numeric_limits::min)() <= n && n <= (std::numeric_limits::max)()) + { + if (add_prefix) + { + oa->write_character(to_char_type('l')); // int32 + } + write_number(static_cast(n)); + } + else if ((std::numeric_limits::min)() <= n && n <= (std::numeric_limits::max)()) + { + if (add_prefix) + { + oa->write_character(to_char_type('L')); // int64 + } + write_number(static_cast(n)); + } + // LCOV_EXCL_START + else + { + if (add_prefix) + { + oa->write_character(to_char_type('H')); // high-precision number + } + + const auto number = BasicJsonType(n).dump(); + write_number_with_ubjson_prefix(number.size(), true); + for (std::size_t i = 0; i < number.size(); ++i) + { + oa->write_character(to_char_type(static_cast(number[i]))); + } + } + // LCOV_EXCL_STOP + } + + /*! + @brief determine the type prefix of container values + */ + CharType ubjson_prefix(const BasicJsonType& j) const noexcept + { + switch (j.type()) + { + case value_t::null: + return 'Z'; + + case value_t::boolean: + return j.m_value.boolean ? 'T' : 'F'; + + case value_t::number_integer: + { + if ((std::numeric_limits::min)() <= j.m_value.number_integer && j.m_value.number_integer <= (std::numeric_limits::max)()) + { + return 'i'; + } + if ((std::numeric_limits::min)() <= j.m_value.number_integer && j.m_value.number_integer <= (std::numeric_limits::max)()) + { + return 'U'; + } + if ((std::numeric_limits::min)() <= j.m_value.number_integer && j.m_value.number_integer <= (std::numeric_limits::max)()) + { + return 'I'; + } + if ((std::numeric_limits::min)() <= j.m_value.number_integer && j.m_value.number_integer <= (std::numeric_limits::max)()) + { + return 'l'; + } + if ((std::numeric_limits::min)() <= j.m_value.number_integer && j.m_value.number_integer <= (std::numeric_limits::max)()) + { + return 'L'; + } + // anything else is treated as high-precision number + return 'H'; // LCOV_EXCL_LINE + } + + case value_t::number_unsigned: + { + if (j.m_value.number_unsigned <= static_cast((std::numeric_limits::max)())) + { + return 'i'; + } + if (j.m_value.number_unsigned <= static_cast((std::numeric_limits::max)())) + { + return 'U'; + } + if (j.m_value.number_unsigned <= static_cast((std::numeric_limits::max)())) + { + return 'I'; + } + if (j.m_value.number_unsigned <= static_cast((std::numeric_limits::max)())) + { + return 'l'; + } + if (j.m_value.number_unsigned <= static_cast((std::numeric_limits::max)())) + { + return 'L'; + } + // anything else is treated as high-precision number + return 'H'; // LCOV_EXCL_LINE + } + + case value_t::number_float: + return get_ubjson_float_prefix(j.m_value.number_float); + + case value_t::string: + return 'S'; + + case value_t::array: // fallthrough + case value_t::binary: + return '['; + + case value_t::object: + return '{'; + + default: // discarded values + return 'N'; + } + } + + static constexpr CharType get_ubjson_float_prefix(float /*unused*/) + { + return 'd'; // float 32 + } + + static constexpr CharType get_ubjson_float_prefix(double /*unused*/) + { + return 'D'; // float 64 + } + + /////////////////////// + // Utility functions // + /////////////////////// + + /* + @brief write a number to output input + @param[in] n number of type @a NumberType + @tparam NumberType the type of the number + @tparam OutputIsLittleEndian Set to true if output data is + required to be little endian + + @note This function needs to respect the system's endianess, because bytes + in CBOR, MessagePack, and UBJSON are stored in network order (big + endian) and therefore need reordering on little endian systems. + */ + template + void write_number(const NumberType n) + { + // step 1: write number to array of length NumberType + std::array vec; + std::memcpy(vec.data(), &n, sizeof(NumberType)); + + // step 2: write array to output (with possible reordering) + if (is_little_endian != OutputIsLittleEndian) + { + // reverse byte order prior to conversion if necessary + std::reverse(vec.begin(), vec.end()); + } + + oa->write_characters(vec.data(), sizeof(NumberType)); + } + + void write_compact_float(const number_float_t n, detail::input_format_t format) + { + if (static_cast(n) >= static_cast(std::numeric_limits::lowest()) && + static_cast(n) <= static_cast((std::numeric_limits::max)()) && + static_cast(static_cast(n)) == static_cast(n)) + { + oa->write_character(format == detail::input_format_t::cbor + ? get_cbor_float_prefix(static_cast(n)) + : get_msgpack_float_prefix(static_cast(n))); + write_number(static_cast(n)); + } + else + { + oa->write_character(format == detail::input_format_t::cbor + ? get_cbor_float_prefix(n) + : get_msgpack_float_prefix(n)); + write_number(n); + } + } + + public: + // The following to_char_type functions are implement the conversion + // between uint8_t and CharType. In case CharType is not unsigned, + // such a conversion is required to allow values greater than 128. + // See for a discussion. + template < typename C = CharType, + enable_if_t < std::is_signed::value && std::is_signed::value > * = nullptr > + static constexpr CharType to_char_type(std::uint8_t x) noexcept + { + return *reinterpret_cast(&x); + } + + template < typename C = CharType, + enable_if_t < std::is_signed::value && std::is_unsigned::value > * = nullptr > + static CharType to_char_type(std::uint8_t x) noexcept + { + static_assert(sizeof(std::uint8_t) == sizeof(CharType), "size of CharType must be equal to std::uint8_t"); + static_assert(std::is_trivial::value, "CharType must be trivial"); + CharType result; + std::memcpy(&result, &x, sizeof(x)); + return result; + } + + template::value>* = nullptr> + static constexpr CharType to_char_type(std::uint8_t x) noexcept + { + return x; + } + + template < typename InputCharType, typename C = CharType, + enable_if_t < + std::is_signed::value && + std::is_signed::value && + std::is_same::type>::value + > * = nullptr > + static constexpr CharType to_char_type(InputCharType x) noexcept + { + return x; + } + + private: + /// whether we can assume little endianess + const bool is_little_endian = little_endianess(); + + /// the output + output_adapter_t oa = nullptr; +}; +} // namespace detail +} // namespace nlohmann + +// #include + +// #include + + +#include // reverse, remove, fill, find, none_of +#include // array +#include // localeconv, lconv +#include // labs, isfinite, isnan, signbit +#include // size_t, ptrdiff_t +#include // uint8_t +#include // snprintf +#include // numeric_limits +#include // string, char_traits +#include // is_same +#include // move + +// #include + + +#include // array +#include // signbit, isfinite +#include // intN_t, uintN_t +#include // memcpy, memmove +#include // numeric_limits +#include // conditional + +// #include + + +namespace nlohmann +{ +namespace detail +{ + +/*! +@brief implements the Grisu2 algorithm for binary to decimal floating-point +conversion. + +This implementation is a slightly modified version of the reference +implementation which may be obtained from +http://florian.loitsch.com/publications (bench.tar.gz). + +The code is distributed under the MIT license, Copyright (c) 2009 Florian Loitsch. + +For a detailed description of the algorithm see: + +[1] Loitsch, "Printing Floating-Point Numbers Quickly and Accurately with + Integers", Proceedings of the ACM SIGPLAN 2010 Conference on Programming + Language Design and Implementation, PLDI 2010 +[2] Burger, Dybvig, "Printing Floating-Point Numbers Quickly and Accurately", + Proceedings of the ACM SIGPLAN 1996 Conference on Programming Language + Design and Implementation, PLDI 1996 +*/ +namespace dtoa_impl +{ + +template +Target reinterpret_bits(const Source source) +{ + static_assert(sizeof(Target) == sizeof(Source), "size mismatch"); + + Target target; + std::memcpy(&target, &source, sizeof(Source)); + return target; +} + +struct diyfp // f * 2^e +{ + static constexpr int kPrecision = 64; // = q + + std::uint64_t f = 0; + int e = 0; + + constexpr diyfp(std::uint64_t f_, int e_) noexcept : f(f_), e(e_) {} + + /*! + @brief returns x - y + @pre x.e == y.e and x.f >= y.f + */ + static diyfp sub(const diyfp& x, const diyfp& y) noexcept + { + JSON_ASSERT(x.e == y.e); + JSON_ASSERT(x.f >= y.f); + + return {x.f - y.f, x.e}; + } + + /*! + @brief returns x * y + @note The result is rounded. (Only the upper q bits are returned.) + */ + static diyfp mul(const diyfp& x, const diyfp& y) noexcept + { + static_assert(kPrecision == 64, "internal error"); + + // Computes: + // f = round((x.f * y.f) / 2^q) + // e = x.e + y.e + q + + // Emulate the 64-bit * 64-bit multiplication: + // + // p = u * v + // = (u_lo + 2^32 u_hi) (v_lo + 2^32 v_hi) + // = (u_lo v_lo ) + 2^32 ((u_lo v_hi ) + (u_hi v_lo )) + 2^64 (u_hi v_hi ) + // = (p0 ) + 2^32 ((p1 ) + (p2 )) + 2^64 (p3 ) + // = (p0_lo + 2^32 p0_hi) + 2^32 ((p1_lo + 2^32 p1_hi) + (p2_lo + 2^32 p2_hi)) + 2^64 (p3 ) + // = (p0_lo ) + 2^32 (p0_hi + p1_lo + p2_lo ) + 2^64 (p1_hi + p2_hi + p3) + // = (p0_lo ) + 2^32 (Q ) + 2^64 (H ) + // = (p0_lo ) + 2^32 (Q_lo + 2^32 Q_hi ) + 2^64 (H ) + // + // (Since Q might be larger than 2^32 - 1) + // + // = (p0_lo + 2^32 Q_lo) + 2^64 (Q_hi + H) + // + // (Q_hi + H does not overflow a 64-bit int) + // + // = p_lo + 2^64 p_hi + + const std::uint64_t u_lo = x.f & 0xFFFFFFFFu; + const std::uint64_t u_hi = x.f >> 32u; + const std::uint64_t v_lo = y.f & 0xFFFFFFFFu; + const std::uint64_t v_hi = y.f >> 32u; + + const std::uint64_t p0 = u_lo * v_lo; + const std::uint64_t p1 = u_lo * v_hi; + const std::uint64_t p2 = u_hi * v_lo; + const std::uint64_t p3 = u_hi * v_hi; + + const std::uint64_t p0_hi = p0 >> 32u; + const std::uint64_t p1_lo = p1 & 0xFFFFFFFFu; + const std::uint64_t p1_hi = p1 >> 32u; + const std::uint64_t p2_lo = p2 & 0xFFFFFFFFu; + const std::uint64_t p2_hi = p2 >> 32u; + + std::uint64_t Q = p0_hi + p1_lo + p2_lo; + + // The full product might now be computed as + // + // p_hi = p3 + p2_hi + p1_hi + (Q >> 32) + // p_lo = p0_lo + (Q << 32) + // + // But in this particular case here, the full p_lo is not required. + // Effectively we only need to add the highest bit in p_lo to p_hi (and + // Q_hi + 1 does not overflow). + + Q += std::uint64_t{1} << (64u - 32u - 1u); // round, ties up + + const std::uint64_t h = p3 + p2_hi + p1_hi + (Q >> 32u); + + return {h, x.e + y.e + 64}; + } + + /*! + @brief normalize x such that the significand is >= 2^(q-1) + @pre x.f != 0 + */ + static diyfp normalize(diyfp x) noexcept + { + JSON_ASSERT(x.f != 0); + + while ((x.f >> 63u) == 0) + { + x.f <<= 1u; + x.e--; + } + + return x; + } + + /*! + @brief normalize x such that the result has the exponent E + @pre e >= x.e and the upper e - x.e bits of x.f must be zero. + */ + static diyfp normalize_to(const diyfp& x, const int target_exponent) noexcept + { + const int delta = x.e - target_exponent; + + JSON_ASSERT(delta >= 0); + JSON_ASSERT(((x.f << delta) >> delta) == x.f); + + return {x.f << delta, target_exponent}; + } +}; + +struct boundaries +{ + diyfp w; + diyfp minus; + diyfp plus; +}; + +/*! +Compute the (normalized) diyfp representing the input number 'value' and its +boundaries. + +@pre value must be finite and positive +*/ +template +boundaries compute_boundaries(FloatType value) +{ + JSON_ASSERT(std::isfinite(value)); + JSON_ASSERT(value > 0); + + // Convert the IEEE representation into a diyfp. + // + // If v is denormal: + // value = 0.F * 2^(1 - bias) = ( F) * 2^(1 - bias - (p-1)) + // If v is normalized: + // value = 1.F * 2^(E - bias) = (2^(p-1) + F) * 2^(E - bias - (p-1)) + + static_assert(std::numeric_limits::is_iec559, + "internal error: dtoa_short requires an IEEE-754 floating-point implementation"); + + constexpr int kPrecision = std::numeric_limits::digits; // = p (includes the hidden bit) + constexpr int kBias = std::numeric_limits::max_exponent - 1 + (kPrecision - 1); + constexpr int kMinExp = 1 - kBias; + constexpr std::uint64_t kHiddenBit = std::uint64_t{1} << (kPrecision - 1); // = 2^(p-1) + + using bits_type = typename std::conditional::type; + + const std::uint64_t bits = reinterpret_bits(value); + const std::uint64_t E = bits >> (kPrecision - 1); + const std::uint64_t F = bits & (kHiddenBit - 1); + + const bool is_denormal = E == 0; + const diyfp v = is_denormal + ? diyfp(F, kMinExp) + : diyfp(F + kHiddenBit, static_cast(E) - kBias); + + // Compute the boundaries m- and m+ of the floating-point value + // v = f * 2^e. + // + // Determine v- and v+, the floating-point predecessor and successor if v, + // respectively. + // + // v- = v - 2^e if f != 2^(p-1) or e == e_min (A) + // = v - 2^(e-1) if f == 2^(p-1) and e > e_min (B) + // + // v+ = v + 2^e + // + // Let m- = (v- + v) / 2 and m+ = (v + v+) / 2. All real numbers _strictly_ + // between m- and m+ round to v, regardless of how the input rounding + // algorithm breaks ties. + // + // ---+-------------+-------------+-------------+-------------+--- (A) + // v- m- v m+ v+ + // + // -----------------+------+------+-------------+-------------+--- (B) + // v- m- v m+ v+ + + const bool lower_boundary_is_closer = F == 0 && E > 1; + const diyfp m_plus = diyfp(2 * v.f + 1, v.e - 1); + const diyfp m_minus = lower_boundary_is_closer + ? diyfp(4 * v.f - 1, v.e - 2) // (B) + : diyfp(2 * v.f - 1, v.e - 1); // (A) + + // Determine the normalized w+ = m+. + const diyfp w_plus = diyfp::normalize(m_plus); + + // Determine w- = m- such that e_(w-) = e_(w+). + const diyfp w_minus = diyfp::normalize_to(m_minus, w_plus.e); + + return {diyfp::normalize(v), w_minus, w_plus}; +} + +// Given normalized diyfp w, Grisu needs to find a (normalized) cached +// power-of-ten c, such that the exponent of the product c * w = f * 2^e lies +// within a certain range [alpha, gamma] (Definition 3.2 from [1]) +// +// alpha <= e = e_c + e_w + q <= gamma +// +// or +// +// f_c * f_w * 2^alpha <= f_c 2^(e_c) * f_w 2^(e_w) * 2^q +// <= f_c * f_w * 2^gamma +// +// Since c and w are normalized, i.e. 2^(q-1) <= f < 2^q, this implies +// +// 2^(q-1) * 2^(q-1) * 2^alpha <= c * w * 2^q < 2^q * 2^q * 2^gamma +// +// or +// +// 2^(q - 2 + alpha) <= c * w < 2^(q + gamma) +// +// The choice of (alpha,gamma) determines the size of the table and the form of +// the digit generation procedure. Using (alpha,gamma)=(-60,-32) works out well +// in practice: +// +// The idea is to cut the number c * w = f * 2^e into two parts, which can be +// processed independently: An integral part p1, and a fractional part p2: +// +// f * 2^e = ( (f div 2^-e) * 2^-e + (f mod 2^-e) ) * 2^e +// = (f div 2^-e) + (f mod 2^-e) * 2^e +// = p1 + p2 * 2^e +// +// The conversion of p1 into decimal form requires a series of divisions and +// modulos by (a power of) 10. These operations are faster for 32-bit than for +// 64-bit integers, so p1 should ideally fit into a 32-bit integer. This can be +// achieved by choosing +// +// -e >= 32 or e <= -32 := gamma +// +// In order to convert the fractional part +// +// p2 * 2^e = p2 / 2^-e = d[-1] / 10^1 + d[-2] / 10^2 + ... +// +// into decimal form, the fraction is repeatedly multiplied by 10 and the digits +// d[-i] are extracted in order: +// +// (10 * p2) div 2^-e = d[-1] +// (10 * p2) mod 2^-e = d[-2] / 10^1 + ... +// +// The multiplication by 10 must not overflow. It is sufficient to choose +// +// 10 * p2 < 16 * p2 = 2^4 * p2 <= 2^64. +// +// Since p2 = f mod 2^-e < 2^-e, +// +// -e <= 60 or e >= -60 := alpha + +constexpr int kAlpha = -60; +constexpr int kGamma = -32; + +struct cached_power // c = f * 2^e ~= 10^k +{ + std::uint64_t f; + int e; + int k; +}; + +/*! +For a normalized diyfp w = f * 2^e, this function returns a (normalized) cached +power-of-ten c = f_c * 2^e_c, such that the exponent of the product w * c +satisfies (Definition 3.2 from [1]) + + alpha <= e_c + e + q <= gamma. +*/ +inline cached_power get_cached_power_for_binary_exponent(int e) +{ + // Now + // + // alpha <= e_c + e + q <= gamma (1) + // ==> f_c * 2^alpha <= c * 2^e * 2^q + // + // and since the c's are normalized, 2^(q-1) <= f_c, + // + // ==> 2^(q - 1 + alpha) <= c * 2^(e + q) + // ==> 2^(alpha - e - 1) <= c + // + // If c were an exact power of ten, i.e. c = 10^k, one may determine k as + // + // k = ceil( log_10( 2^(alpha - e - 1) ) ) + // = ceil( (alpha - e - 1) * log_10(2) ) + // + // From the paper: + // "In theory the result of the procedure could be wrong since c is rounded, + // and the computation itself is approximated [...]. In practice, however, + // this simple function is sufficient." + // + // For IEEE double precision floating-point numbers converted into + // normalized diyfp's w = f * 2^e, with q = 64, + // + // e >= -1022 (min IEEE exponent) + // -52 (p - 1) + // -52 (p - 1, possibly normalize denormal IEEE numbers) + // -11 (normalize the diyfp) + // = -1137 + // + // and + // + // e <= +1023 (max IEEE exponent) + // -52 (p - 1) + // -11 (normalize the diyfp) + // = 960 + // + // This binary exponent range [-1137,960] results in a decimal exponent + // range [-307,324]. One does not need to store a cached power for each + // k in this range. For each such k it suffices to find a cached power + // such that the exponent of the product lies in [alpha,gamma]. + // This implies that the difference of the decimal exponents of adjacent + // table entries must be less than or equal to + // + // floor( (gamma - alpha) * log_10(2) ) = 8. + // + // (A smaller distance gamma-alpha would require a larger table.) + + // NB: + // Actually this function returns c, such that -60 <= e_c + e + 64 <= -34. + + constexpr int kCachedPowersMinDecExp = -300; + constexpr int kCachedPowersDecStep = 8; + + static constexpr std::array kCachedPowers = + { + { + { 0xAB70FE17C79AC6CA, -1060, -300 }, + { 0xFF77B1FCBEBCDC4F, -1034, -292 }, + { 0xBE5691EF416BD60C, -1007, -284 }, + { 0x8DD01FAD907FFC3C, -980, -276 }, + { 0xD3515C2831559A83, -954, -268 }, + { 0x9D71AC8FADA6C9B5, -927, -260 }, + { 0xEA9C227723EE8BCB, -901, -252 }, + { 0xAECC49914078536D, -874, -244 }, + { 0x823C12795DB6CE57, -847, -236 }, + { 0xC21094364DFB5637, -821, -228 }, + { 0x9096EA6F3848984F, -794, -220 }, + { 0xD77485CB25823AC7, -768, -212 }, + { 0xA086CFCD97BF97F4, -741, -204 }, + { 0xEF340A98172AACE5, -715, -196 }, + { 0xB23867FB2A35B28E, -688, -188 }, + { 0x84C8D4DFD2C63F3B, -661, -180 }, + { 0xC5DD44271AD3CDBA, -635, -172 }, + { 0x936B9FCEBB25C996, -608, -164 }, + { 0xDBAC6C247D62A584, -582, -156 }, + { 0xA3AB66580D5FDAF6, -555, -148 }, + { 0xF3E2F893DEC3F126, -529, -140 }, + { 0xB5B5ADA8AAFF80B8, -502, -132 }, + { 0x87625F056C7C4A8B, -475, -124 }, + { 0xC9BCFF6034C13053, -449, -116 }, + { 0x964E858C91BA2655, -422, -108 }, + { 0xDFF9772470297EBD, -396, -100 }, + { 0xA6DFBD9FB8E5B88F, -369, -92 }, + { 0xF8A95FCF88747D94, -343, -84 }, + { 0xB94470938FA89BCF, -316, -76 }, + { 0x8A08F0F8BF0F156B, -289, -68 }, + { 0xCDB02555653131B6, -263, -60 }, + { 0x993FE2C6D07B7FAC, -236, -52 }, + { 0xE45C10C42A2B3B06, -210, -44 }, + { 0xAA242499697392D3, -183, -36 }, + { 0xFD87B5F28300CA0E, -157, -28 }, + { 0xBCE5086492111AEB, -130, -20 }, + { 0x8CBCCC096F5088CC, -103, -12 }, + { 0xD1B71758E219652C, -77, -4 }, + { 0x9C40000000000000, -50, 4 }, + { 0xE8D4A51000000000, -24, 12 }, + { 0xAD78EBC5AC620000, 3, 20 }, + { 0x813F3978F8940984, 30, 28 }, + { 0xC097CE7BC90715B3, 56, 36 }, + { 0x8F7E32CE7BEA5C70, 83, 44 }, + { 0xD5D238A4ABE98068, 109, 52 }, + { 0x9F4F2726179A2245, 136, 60 }, + { 0xED63A231D4C4FB27, 162, 68 }, + { 0xB0DE65388CC8ADA8, 189, 76 }, + { 0x83C7088E1AAB65DB, 216, 84 }, + { 0xC45D1DF942711D9A, 242, 92 }, + { 0x924D692CA61BE758, 269, 100 }, + { 0xDA01EE641A708DEA, 295, 108 }, + { 0xA26DA3999AEF774A, 322, 116 }, + { 0xF209787BB47D6B85, 348, 124 }, + { 0xB454E4A179DD1877, 375, 132 }, + { 0x865B86925B9BC5C2, 402, 140 }, + { 0xC83553C5C8965D3D, 428, 148 }, + { 0x952AB45CFA97A0B3, 455, 156 }, + { 0xDE469FBD99A05FE3, 481, 164 }, + { 0xA59BC234DB398C25, 508, 172 }, + { 0xF6C69A72A3989F5C, 534, 180 }, + { 0xB7DCBF5354E9BECE, 561, 188 }, + { 0x88FCF317F22241E2, 588, 196 }, + { 0xCC20CE9BD35C78A5, 614, 204 }, + { 0x98165AF37B2153DF, 641, 212 }, + { 0xE2A0B5DC971F303A, 667, 220 }, + { 0xA8D9D1535CE3B396, 694, 228 }, + { 0xFB9B7CD9A4A7443C, 720, 236 }, + { 0xBB764C4CA7A44410, 747, 244 }, + { 0x8BAB8EEFB6409C1A, 774, 252 }, + { 0xD01FEF10A657842C, 800, 260 }, + { 0x9B10A4E5E9913129, 827, 268 }, + { 0xE7109BFBA19C0C9D, 853, 276 }, + { 0xAC2820D9623BF429, 880, 284 }, + { 0x80444B5E7AA7CF85, 907, 292 }, + { 0xBF21E44003ACDD2D, 933, 300 }, + { 0x8E679C2F5E44FF8F, 960, 308 }, + { 0xD433179D9C8CB841, 986, 316 }, + { 0x9E19DB92B4E31BA9, 1013, 324 }, + } + }; + + // This computation gives exactly the same results for k as + // k = ceil((kAlpha - e - 1) * 0.30102999566398114) + // for |e| <= 1500, but doesn't require floating-point operations. + // NB: log_10(2) ~= 78913 / 2^18 + JSON_ASSERT(e >= -1500); + JSON_ASSERT(e <= 1500); + const int f = kAlpha - e - 1; + const int k = (f * 78913) / (1 << 18) + static_cast(f > 0); + + const int index = (-kCachedPowersMinDecExp + k + (kCachedPowersDecStep - 1)) / kCachedPowersDecStep; + JSON_ASSERT(index >= 0); + JSON_ASSERT(static_cast(index) < kCachedPowers.size()); + + const cached_power cached = kCachedPowers[static_cast(index)]; + JSON_ASSERT(kAlpha <= cached.e + e + 64); + JSON_ASSERT(kGamma >= cached.e + e + 64); + + return cached; +} + +/*! +For n != 0, returns k, such that pow10 := 10^(k-1) <= n < 10^k. +For n == 0, returns 1 and sets pow10 := 1. +*/ +inline int find_largest_pow10(const std::uint32_t n, std::uint32_t& pow10) +{ + // LCOV_EXCL_START + if (n >= 1000000000) + { + pow10 = 1000000000; + return 10; + } + // LCOV_EXCL_STOP + else if (n >= 100000000) + { + pow10 = 100000000; + return 9; + } + else if (n >= 10000000) + { + pow10 = 10000000; + return 8; + } + else if (n >= 1000000) + { + pow10 = 1000000; + return 7; + } + else if (n >= 100000) + { + pow10 = 100000; + return 6; + } + else if (n >= 10000) + { + pow10 = 10000; + return 5; + } + else if (n >= 1000) + { + pow10 = 1000; + return 4; + } + else if (n >= 100) + { + pow10 = 100; + return 3; + } + else if (n >= 10) + { + pow10 = 10; + return 2; + } + else + { + pow10 = 1; + return 1; + } +} + +inline void grisu2_round(char* buf, int len, std::uint64_t dist, std::uint64_t delta, + std::uint64_t rest, std::uint64_t ten_k) +{ + JSON_ASSERT(len >= 1); + JSON_ASSERT(dist <= delta); + JSON_ASSERT(rest <= delta); + JSON_ASSERT(ten_k > 0); + + // <--------------------------- delta ----> + // <---- dist ---------> + // --------------[------------------+-------------------]-------------- + // M- w M+ + // + // ten_k + // <------> + // <---- rest ----> + // --------------[------------------+----+--------------]-------------- + // w V + // = buf * 10^k + // + // ten_k represents a unit-in-the-last-place in the decimal representation + // stored in buf. + // Decrement buf by ten_k while this takes buf closer to w. + + // The tests are written in this order to avoid overflow in unsigned + // integer arithmetic. + + while (rest < dist + && delta - rest >= ten_k + && (rest + ten_k < dist || dist - rest > rest + ten_k - dist)) + { + JSON_ASSERT(buf[len - 1] != '0'); + buf[len - 1]--; + rest += ten_k; + } +} + +/*! +Generates V = buffer * 10^decimal_exponent, such that M- <= V <= M+. +M- and M+ must be normalized and share the same exponent -60 <= e <= -32. +*/ +inline void grisu2_digit_gen(char* buffer, int& length, int& decimal_exponent, + diyfp M_minus, diyfp w, diyfp M_plus) +{ + static_assert(kAlpha >= -60, "internal error"); + static_assert(kGamma <= -32, "internal error"); + + // Generates the digits (and the exponent) of a decimal floating-point + // number V = buffer * 10^decimal_exponent in the range [M-, M+]. The diyfp's + // w, M- and M+ share the same exponent e, which satisfies alpha <= e <= gamma. + // + // <--------------------------- delta ----> + // <---- dist ---------> + // --------------[------------------+-------------------]-------------- + // M- w M+ + // + // Grisu2 generates the digits of M+ from left to right and stops as soon as + // V is in [M-,M+]. + + JSON_ASSERT(M_plus.e >= kAlpha); + JSON_ASSERT(M_plus.e <= kGamma); + + std::uint64_t delta = diyfp::sub(M_plus, M_minus).f; // (significand of (M+ - M-), implicit exponent is e) + std::uint64_t dist = diyfp::sub(M_plus, w ).f; // (significand of (M+ - w ), implicit exponent is e) + + // Split M+ = f * 2^e into two parts p1 and p2 (note: e < 0): + // + // M+ = f * 2^e + // = ((f div 2^-e) * 2^-e + (f mod 2^-e)) * 2^e + // = ((p1 ) * 2^-e + (p2 )) * 2^e + // = p1 + p2 * 2^e + + const diyfp one(std::uint64_t{1} << -M_plus.e, M_plus.e); + + auto p1 = static_cast(M_plus.f >> -one.e); // p1 = f div 2^-e (Since -e >= 32, p1 fits into a 32-bit int.) + std::uint64_t p2 = M_plus.f & (one.f - 1); // p2 = f mod 2^-e + + // 1) + // + // Generate the digits of the integral part p1 = d[n-1]...d[1]d[0] + + JSON_ASSERT(p1 > 0); + + std::uint32_t pow10; + const int k = find_largest_pow10(p1, pow10); + + // 10^(k-1) <= p1 < 10^k, pow10 = 10^(k-1) + // + // p1 = (p1 div 10^(k-1)) * 10^(k-1) + (p1 mod 10^(k-1)) + // = (d[k-1] ) * 10^(k-1) + (p1 mod 10^(k-1)) + // + // M+ = p1 + p2 * 2^e + // = d[k-1] * 10^(k-1) + (p1 mod 10^(k-1)) + p2 * 2^e + // = d[k-1] * 10^(k-1) + ((p1 mod 10^(k-1)) * 2^-e + p2) * 2^e + // = d[k-1] * 10^(k-1) + ( rest) * 2^e + // + // Now generate the digits d[n] of p1 from left to right (n = k-1,...,0) + // + // p1 = d[k-1]...d[n] * 10^n + d[n-1]...d[0] + // + // but stop as soon as + // + // rest * 2^e = (d[n-1]...d[0] * 2^-e + p2) * 2^e <= delta * 2^e + + int n = k; + while (n > 0) + { + // Invariants: + // M+ = buffer * 10^n + (p1 + p2 * 2^e) (buffer = 0 for n = k) + // pow10 = 10^(n-1) <= p1 < 10^n + // + const std::uint32_t d = p1 / pow10; // d = p1 div 10^(n-1) + const std::uint32_t r = p1 % pow10; // r = p1 mod 10^(n-1) + // + // M+ = buffer * 10^n + (d * 10^(n-1) + r) + p2 * 2^e + // = (buffer * 10 + d) * 10^(n-1) + (r + p2 * 2^e) + // + JSON_ASSERT(d <= 9); + buffer[length++] = static_cast('0' + d); // buffer := buffer * 10 + d + // + // M+ = buffer * 10^(n-1) + (r + p2 * 2^e) + // + p1 = r; + n--; + // + // M+ = buffer * 10^n + (p1 + p2 * 2^e) + // pow10 = 10^n + // + + // Now check if enough digits have been generated. + // Compute + // + // p1 + p2 * 2^e = (p1 * 2^-e + p2) * 2^e = rest * 2^e + // + // Note: + // Since rest and delta share the same exponent e, it suffices to + // compare the significands. + const std::uint64_t rest = (std::uint64_t{p1} << -one.e) + p2; + if (rest <= delta) + { + // V = buffer * 10^n, with M- <= V <= M+. + + decimal_exponent += n; + + // We may now just stop. But instead look if the buffer could be + // decremented to bring V closer to w. + // + // pow10 = 10^n is now 1 ulp in the decimal representation V. + // The rounding procedure works with diyfp's with an implicit + // exponent of e. + // + // 10^n = (10^n * 2^-e) * 2^e = ulp * 2^e + // + const std::uint64_t ten_n = std::uint64_t{pow10} << -one.e; + grisu2_round(buffer, length, dist, delta, rest, ten_n); + + return; + } + + pow10 /= 10; + // + // pow10 = 10^(n-1) <= p1 < 10^n + // Invariants restored. + } + + // 2) + // + // The digits of the integral part have been generated: + // + // M+ = d[k-1]...d[1]d[0] + p2 * 2^e + // = buffer + p2 * 2^e + // + // Now generate the digits of the fractional part p2 * 2^e. + // + // Note: + // No decimal point is generated: the exponent is adjusted instead. + // + // p2 actually represents the fraction + // + // p2 * 2^e + // = p2 / 2^-e + // = d[-1] / 10^1 + d[-2] / 10^2 + ... + // + // Now generate the digits d[-m] of p1 from left to right (m = 1,2,...) + // + // p2 * 2^e = d[-1]d[-2]...d[-m] * 10^-m + // + 10^-m * (d[-m-1] / 10^1 + d[-m-2] / 10^2 + ...) + // + // using + // + // 10^m * p2 = ((10^m * p2) div 2^-e) * 2^-e + ((10^m * p2) mod 2^-e) + // = ( d) * 2^-e + ( r) + // + // or + // 10^m * p2 * 2^e = d + r * 2^e + // + // i.e. + // + // M+ = buffer + p2 * 2^e + // = buffer + 10^-m * (d + r * 2^e) + // = (buffer * 10^m + d) * 10^-m + 10^-m * r * 2^e + // + // and stop as soon as 10^-m * r * 2^e <= delta * 2^e + + JSON_ASSERT(p2 > delta); + + int m = 0; + for (;;) + { + // Invariant: + // M+ = buffer * 10^-m + 10^-m * (d[-m-1] / 10 + d[-m-2] / 10^2 + ...) * 2^e + // = buffer * 10^-m + 10^-m * (p2 ) * 2^e + // = buffer * 10^-m + 10^-m * (1/10 * (10 * p2) ) * 2^e + // = buffer * 10^-m + 10^-m * (1/10 * ((10*p2 div 2^-e) * 2^-e + (10*p2 mod 2^-e)) * 2^e + // + JSON_ASSERT(p2 <= (std::numeric_limits::max)() / 10); + p2 *= 10; + const std::uint64_t d = p2 >> -one.e; // d = (10 * p2) div 2^-e + const std::uint64_t r = p2 & (one.f - 1); // r = (10 * p2) mod 2^-e + // + // M+ = buffer * 10^-m + 10^-m * (1/10 * (d * 2^-e + r) * 2^e + // = buffer * 10^-m + 10^-m * (1/10 * (d + r * 2^e)) + // = (buffer * 10 + d) * 10^(-m-1) + 10^(-m-1) * r * 2^e + // + JSON_ASSERT(d <= 9); + buffer[length++] = static_cast('0' + d); // buffer := buffer * 10 + d + // + // M+ = buffer * 10^(-m-1) + 10^(-m-1) * r * 2^e + // + p2 = r; + m++; + // + // M+ = buffer * 10^-m + 10^-m * p2 * 2^e + // Invariant restored. + + // Check if enough digits have been generated. + // + // 10^-m * p2 * 2^e <= delta * 2^e + // p2 * 2^e <= 10^m * delta * 2^e + // p2 <= 10^m * delta + delta *= 10; + dist *= 10; + if (p2 <= delta) + { + break; + } + } + + // V = buffer * 10^-m, with M- <= V <= M+. + + decimal_exponent -= m; + + // 1 ulp in the decimal representation is now 10^-m. + // Since delta and dist are now scaled by 10^m, we need to do the + // same with ulp in order to keep the units in sync. + // + // 10^m * 10^-m = 1 = 2^-e * 2^e = ten_m * 2^e + // + const std::uint64_t ten_m = one.f; + grisu2_round(buffer, length, dist, delta, p2, ten_m); + + // By construction this algorithm generates the shortest possible decimal + // number (Loitsch, Theorem 6.2) which rounds back to w. + // For an input number of precision p, at least + // + // N = 1 + ceil(p * log_10(2)) + // + // decimal digits are sufficient to identify all binary floating-point + // numbers (Matula, "In-and-Out conversions"). + // This implies that the algorithm does not produce more than N decimal + // digits. + // + // N = 17 for p = 53 (IEEE double precision) + // N = 9 for p = 24 (IEEE single precision) +} + +/*! +v = buf * 10^decimal_exponent +len is the length of the buffer (number of decimal digits) +The buffer must be large enough, i.e. >= max_digits10. +*/ +JSON_HEDLEY_NON_NULL(1) +inline void grisu2(char* buf, int& len, int& decimal_exponent, + diyfp m_minus, diyfp v, diyfp m_plus) +{ + JSON_ASSERT(m_plus.e == m_minus.e); + JSON_ASSERT(m_plus.e == v.e); + + // --------(-----------------------+-----------------------)-------- (A) + // m- v m+ + // + // --------------------(-----------+-----------------------)-------- (B) + // m- v m+ + // + // First scale v (and m- and m+) such that the exponent is in the range + // [alpha, gamma]. + + const cached_power cached = get_cached_power_for_binary_exponent(m_plus.e); + + const diyfp c_minus_k(cached.f, cached.e); // = c ~= 10^-k + + // The exponent of the products is = v.e + c_minus_k.e + q and is in the range [alpha,gamma] + const diyfp w = diyfp::mul(v, c_minus_k); + const diyfp w_minus = diyfp::mul(m_minus, c_minus_k); + const diyfp w_plus = diyfp::mul(m_plus, c_minus_k); + + // ----(---+---)---------------(---+---)---------------(---+---)---- + // w- w w+ + // = c*m- = c*v = c*m+ + // + // diyfp::mul rounds its result and c_minus_k is approximated too. w, w- and + // w+ are now off by a small amount. + // In fact: + // + // w - v * 10^k < 1 ulp + // + // To account for this inaccuracy, add resp. subtract 1 ulp. + // + // --------+---[---------------(---+---)---------------]---+-------- + // w- M- w M+ w+ + // + // Now any number in [M-, M+] (bounds included) will round to w when input, + // regardless of how the input rounding algorithm breaks ties. + // + // And digit_gen generates the shortest possible such number in [M-, M+]. + // Note that this does not mean that Grisu2 always generates the shortest + // possible number in the interval (m-, m+). + const diyfp M_minus(w_minus.f + 1, w_minus.e); + const diyfp M_plus (w_plus.f - 1, w_plus.e ); + + decimal_exponent = -cached.k; // = -(-k) = k + + grisu2_digit_gen(buf, len, decimal_exponent, M_minus, w, M_plus); +} + +/*! +v = buf * 10^decimal_exponent +len is the length of the buffer (number of decimal digits) +The buffer must be large enough, i.e. >= max_digits10. +*/ +template +JSON_HEDLEY_NON_NULL(1) +void grisu2(char* buf, int& len, int& decimal_exponent, FloatType value) +{ + static_assert(diyfp::kPrecision >= std::numeric_limits::digits + 3, + "internal error: not enough precision"); + + JSON_ASSERT(std::isfinite(value)); + JSON_ASSERT(value > 0); + + // If the neighbors (and boundaries) of 'value' are always computed for double-precision + // numbers, all float's can be recovered using strtod (and strtof). However, the resulting + // decimal representations are not exactly "short". + // + // The documentation for 'std::to_chars' (https://en.cppreference.com/w/cpp/utility/to_chars) + // says "value is converted to a string as if by std::sprintf in the default ("C") locale" + // and since sprintf promotes float's to double's, I think this is exactly what 'std::to_chars' + // does. + // On the other hand, the documentation for 'std::to_chars' requires that "parsing the + // representation using the corresponding std::from_chars function recovers value exactly". That + // indicates that single precision floating-point numbers should be recovered using + // 'std::strtof'. + // + // NB: If the neighbors are computed for single-precision numbers, there is a single float + // (7.0385307e-26f) which can't be recovered using strtod. The resulting double precision + // value is off by 1 ulp. +#if 0 + const boundaries w = compute_boundaries(static_cast(value)); +#else + const boundaries w = compute_boundaries(value); +#endif + + grisu2(buf, len, decimal_exponent, w.minus, w.w, w.plus); +} + +/*! +@brief appends a decimal representation of e to buf +@return a pointer to the element following the exponent. +@pre -1000 < e < 1000 +*/ +JSON_HEDLEY_NON_NULL(1) +JSON_HEDLEY_RETURNS_NON_NULL +inline char* append_exponent(char* buf, int e) +{ + JSON_ASSERT(e > -1000); + JSON_ASSERT(e < 1000); + + if (e < 0) + { + e = -e; + *buf++ = '-'; + } + else + { + *buf++ = '+'; + } + + auto k = static_cast(e); + if (k < 10) + { + // Always print at least two digits in the exponent. + // This is for compatibility with printf("%g"). + *buf++ = '0'; + *buf++ = static_cast('0' + k); + } + else if (k < 100) + { + *buf++ = static_cast('0' + k / 10); + k %= 10; + *buf++ = static_cast('0' + k); + } + else + { + *buf++ = static_cast('0' + k / 100); + k %= 100; + *buf++ = static_cast('0' + k / 10); + k %= 10; + *buf++ = static_cast('0' + k); + } + + return buf; +} + +/*! +@brief prettify v = buf * 10^decimal_exponent + +If v is in the range [10^min_exp, 10^max_exp) it will be printed in fixed-point +notation. Otherwise it will be printed in exponential notation. + +@pre min_exp < 0 +@pre max_exp > 0 +*/ +JSON_HEDLEY_NON_NULL(1) +JSON_HEDLEY_RETURNS_NON_NULL +inline char* format_buffer(char* buf, int len, int decimal_exponent, + int min_exp, int max_exp) +{ + JSON_ASSERT(min_exp < 0); + JSON_ASSERT(max_exp > 0); + + const int k = len; + const int n = len + decimal_exponent; + + // v = buf * 10^(n-k) + // k is the length of the buffer (number of decimal digits) + // n is the position of the decimal point relative to the start of the buffer. + + if (k <= n && n <= max_exp) + { + // digits[000] + // len <= max_exp + 2 + + std::memset(buf + k, '0', static_cast(n) - static_cast(k)); + // Make it look like a floating-point number (#362, #378) + buf[n + 0] = '.'; + buf[n + 1] = '0'; + return buf + (static_cast(n) + 2); + } + + if (0 < n && n <= max_exp) + { + // dig.its + // len <= max_digits10 + 1 + + JSON_ASSERT(k > n); + + std::memmove(buf + (static_cast(n) + 1), buf + n, static_cast(k) - static_cast(n)); + buf[n] = '.'; + return buf + (static_cast(k) + 1U); + } + + if (min_exp < n && n <= 0) + { + // 0.[000]digits + // len <= 2 + (-min_exp - 1) + max_digits10 + + std::memmove(buf + (2 + static_cast(-n)), buf, static_cast(k)); + buf[0] = '0'; + buf[1] = '.'; + std::memset(buf + 2, '0', static_cast(-n)); + return buf + (2U + static_cast(-n) + static_cast(k)); + } + + if (k == 1) + { + // dE+123 + // len <= 1 + 5 + + buf += 1; + } + else + { + // d.igitsE+123 + // len <= max_digits10 + 1 + 5 + + std::memmove(buf + 2, buf + 1, static_cast(k) - 1); + buf[1] = '.'; + buf += 1 + static_cast(k); + } + + *buf++ = 'e'; + return append_exponent(buf, n - 1); +} + +} // namespace dtoa_impl + +/*! +@brief generates a decimal representation of the floating-point number value in [first, last). + +The format of the resulting decimal representation is similar to printf's %g +format. Returns an iterator pointing past-the-end of the decimal representation. + +@note The input number must be finite, i.e. NaN's and Inf's are not supported. +@note The buffer must be large enough. +@note The result is NOT null-terminated. +*/ +template +JSON_HEDLEY_NON_NULL(1, 2) +JSON_HEDLEY_RETURNS_NON_NULL +char* to_chars(char* first, const char* last, FloatType value) +{ + static_cast(last); // maybe unused - fix warning + JSON_ASSERT(std::isfinite(value)); + + // Use signbit(value) instead of (value < 0) since signbit works for -0. + if (std::signbit(value)) + { + value = -value; + *first++ = '-'; + } + + if (value == 0) // +-0 + { + *first++ = '0'; + // Make it look like a floating-point number (#362, #378) + *first++ = '.'; + *first++ = '0'; + return first; + } + + JSON_ASSERT(last - first >= std::numeric_limits::max_digits10); + + // Compute v = buffer * 10^decimal_exponent. + // The decimal digits are stored in the buffer, which needs to be interpreted + // as an unsigned decimal integer. + // len is the length of the buffer, i.e. the number of decimal digits. + int len = 0; + int decimal_exponent = 0; + dtoa_impl::grisu2(first, len, decimal_exponent, value); + + JSON_ASSERT(len <= std::numeric_limits::max_digits10); + + // Format the buffer like printf("%.*g", prec, value) + constexpr int kMinExp = -4; + // Use digits10 here to increase compatibility with version 2. + constexpr int kMaxExp = std::numeric_limits::digits10; + + JSON_ASSERT(last - first >= kMaxExp + 2); + JSON_ASSERT(last - first >= 2 + (-kMinExp - 1) + std::numeric_limits::max_digits10); + JSON_ASSERT(last - first >= std::numeric_limits::max_digits10 + 6); + + return dtoa_impl::format_buffer(first, len, decimal_exponent, kMinExp, kMaxExp); +} + +} // namespace detail +} // namespace nlohmann + +// #include + +// #include + +// #include + +// #include + +// #include + +// #include + + +namespace nlohmann +{ +namespace detail +{ +/////////////////// +// serialization // +/////////////////// + +/// how to treat decoding errors +enum class error_handler_t +{ + strict, ///< throw a type_error exception in case of invalid UTF-8 + replace, ///< replace invalid UTF-8 sequences with U+FFFD + ignore ///< ignore invalid UTF-8 sequences +}; + +template +class serializer +{ + using string_t = typename BasicJsonType::string_t; + using number_float_t = typename BasicJsonType::number_float_t; + using number_integer_t = typename BasicJsonType::number_integer_t; + using number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using binary_char_t = typename BasicJsonType::binary_t::value_type; + static constexpr std::uint8_t UTF8_ACCEPT = 0; + static constexpr std::uint8_t UTF8_REJECT = 1; + + public: + /*! + @param[in] s output stream to serialize to + @param[in] ichar indentation character to use + @param[in] error_handler_ how to react on decoding errors + */ + serializer(output_adapter_t s, const char ichar, + error_handler_t error_handler_ = error_handler_t::strict) + : o(std::move(s)) + , loc(std::localeconv()) + , thousands_sep(loc->thousands_sep == nullptr ? '\0' : std::char_traits::to_char_type(* (loc->thousands_sep))) + , decimal_point(loc->decimal_point == nullptr ? '\0' : std::char_traits::to_char_type(* (loc->decimal_point))) + , indent_char(ichar) + , indent_string(512, indent_char) + , error_handler(error_handler_) + {} + + // delete because of pointer members + serializer(const serializer&) = delete; + serializer& operator=(const serializer&) = delete; + serializer(serializer&&) = delete; + serializer& operator=(serializer&&) = delete; + ~serializer() = default; + + /*! + @brief internal implementation of the serialization function + + This function is called by the public member function dump and organizes + the serialization internally. The indentation level is propagated as + additional parameter. In case of arrays and objects, the function is + called recursively. + + - strings and object keys are escaped using `escape_string()` + - integer numbers are converted implicitly via `operator<<` + - floating-point numbers are converted to a string using `"%g"` format + - binary values are serialized as objects containing the subtype and the + byte array + + @param[in] val value to serialize + @param[in] pretty_print whether the output shall be pretty-printed + @param[in] ensure_ascii If @a ensure_ascii is true, all non-ASCII characters + in the output are escaped with `\uXXXX` sequences, and the result consists + of ASCII characters only. + @param[in] indent_step the indent level + @param[in] current_indent the current indent level (only used internally) + */ + void dump(const BasicJsonType& val, + const bool pretty_print, + const bool ensure_ascii, + const unsigned int indent_step, + const unsigned int current_indent = 0) + { + switch (val.m_type) + { + case value_t::object: + { + if (val.m_value.object->empty()) + { + o->write_characters("{}", 2); + return; + } + + if (pretty_print) + { + o->write_characters("{\n", 2); + + // variable to hold indentation for recursive calls + const auto new_indent = current_indent + indent_step; + if (JSON_HEDLEY_UNLIKELY(indent_string.size() < new_indent)) + { + indent_string.resize(indent_string.size() * 2, ' '); + } + + // first n-1 elements + auto i = val.m_value.object->cbegin(); + for (std::size_t cnt = 0; cnt < val.m_value.object->size() - 1; ++cnt, ++i) + { + o->write_characters(indent_string.c_str(), new_indent); + o->write_character('\"'); + dump_escaped(i->first, ensure_ascii); + o->write_characters("\": ", 3); + dump(i->second, true, ensure_ascii, indent_step, new_indent); + o->write_characters(",\n", 2); + } + + // last element + JSON_ASSERT(i != val.m_value.object->cend()); + JSON_ASSERT(std::next(i) == val.m_value.object->cend()); + o->write_characters(indent_string.c_str(), new_indent); + o->write_character('\"'); + dump_escaped(i->first, ensure_ascii); + o->write_characters("\": ", 3); + dump(i->second, true, ensure_ascii, indent_step, new_indent); + + o->write_character('\n'); + o->write_characters(indent_string.c_str(), current_indent); + o->write_character('}'); + } + else + { + o->write_character('{'); + + // first n-1 elements + auto i = val.m_value.object->cbegin(); + for (std::size_t cnt = 0; cnt < val.m_value.object->size() - 1; ++cnt, ++i) + { + o->write_character('\"'); + dump_escaped(i->first, ensure_ascii); + o->write_characters("\":", 2); + dump(i->second, false, ensure_ascii, indent_step, current_indent); + o->write_character(','); + } + + // last element + JSON_ASSERT(i != val.m_value.object->cend()); + JSON_ASSERT(std::next(i) == val.m_value.object->cend()); + o->write_character('\"'); + dump_escaped(i->first, ensure_ascii); + o->write_characters("\":", 2); + dump(i->second, false, ensure_ascii, indent_step, current_indent); + + o->write_character('}'); + } + + return; + } + + case value_t::array: + { + if (val.m_value.array->empty()) + { + o->write_characters("[]", 2); + return; + } + + if (pretty_print) + { + o->write_characters("[\n", 2); + + // variable to hold indentation for recursive calls + const auto new_indent = current_indent + indent_step; + if (JSON_HEDLEY_UNLIKELY(indent_string.size() < new_indent)) + { + indent_string.resize(indent_string.size() * 2, ' '); + } + + // first n-1 elements + for (auto i = val.m_value.array->cbegin(); + i != val.m_value.array->cend() - 1; ++i) + { + o->write_characters(indent_string.c_str(), new_indent); + dump(*i, true, ensure_ascii, indent_step, new_indent); + o->write_characters(",\n", 2); + } + + // last element + JSON_ASSERT(!val.m_value.array->empty()); + o->write_characters(indent_string.c_str(), new_indent); + dump(val.m_value.array->back(), true, ensure_ascii, indent_step, new_indent); + + o->write_character('\n'); + o->write_characters(indent_string.c_str(), current_indent); + o->write_character(']'); + } + else + { + o->write_character('['); + + // first n-1 elements + for (auto i = val.m_value.array->cbegin(); + i != val.m_value.array->cend() - 1; ++i) + { + dump(*i, false, ensure_ascii, indent_step, current_indent); + o->write_character(','); + } + + // last element + JSON_ASSERT(!val.m_value.array->empty()); + dump(val.m_value.array->back(), false, ensure_ascii, indent_step, current_indent); + + o->write_character(']'); + } + + return; + } + + case value_t::string: + { + o->write_character('\"'); + dump_escaped(*val.m_value.string, ensure_ascii); + o->write_character('\"'); + return; + } + + case value_t::binary: + { + if (pretty_print) + { + o->write_characters("{\n", 2); + + // variable to hold indentation for recursive calls + const auto new_indent = current_indent + indent_step; + if (JSON_HEDLEY_UNLIKELY(indent_string.size() < new_indent)) + { + indent_string.resize(indent_string.size() * 2, ' '); + } + + o->write_characters(indent_string.c_str(), new_indent); + + o->write_characters("\"bytes\": [", 10); + + if (!val.m_value.binary->empty()) + { + for (auto i = val.m_value.binary->cbegin(); + i != val.m_value.binary->cend() - 1; ++i) + { + dump_integer(*i); + o->write_characters(", ", 2); + } + dump_integer(val.m_value.binary->back()); + } + + o->write_characters("],\n", 3); + o->write_characters(indent_string.c_str(), new_indent); + + o->write_characters("\"subtype\": ", 11); + if (val.m_value.binary->has_subtype()) + { + dump_integer(val.m_value.binary->subtype()); + } + else + { + o->write_characters("null", 4); + } + o->write_character('\n'); + o->write_characters(indent_string.c_str(), current_indent); + o->write_character('}'); + } + else + { + o->write_characters("{\"bytes\":[", 10); + + if (!val.m_value.binary->empty()) + { + for (auto i = val.m_value.binary->cbegin(); + i != val.m_value.binary->cend() - 1; ++i) + { + dump_integer(*i); + o->write_character(','); + } + dump_integer(val.m_value.binary->back()); + } + + o->write_characters("],\"subtype\":", 12); + if (val.m_value.binary->has_subtype()) + { + dump_integer(val.m_value.binary->subtype()); + o->write_character('}'); + } + else + { + o->write_characters("null}", 5); + } + } + return; + } + + case value_t::boolean: + { + if (val.m_value.boolean) + { + o->write_characters("true", 4); + } + else + { + o->write_characters("false", 5); + } + return; + } + + case value_t::number_integer: + { + dump_integer(val.m_value.number_integer); + return; + } + + case value_t::number_unsigned: + { + dump_integer(val.m_value.number_unsigned); + return; + } + + case value_t::number_float: + { + dump_float(val.m_value.number_float); + return; + } + + case value_t::discarded: + { + o->write_characters("", 11); + return; + } + + case value_t::null: + { + o->write_characters("null", 4); + return; + } + + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // LCOV_EXCL_LINE + } + } + + private: + /*! + @brief dump escaped string + + Escape a string by replacing certain special characters by a sequence of an + escape character (backslash) and another character and other control + characters by a sequence of "\u" followed by a four-digit hex + representation. The escaped string is written to output stream @a o. + + @param[in] s the string to escape + @param[in] ensure_ascii whether to escape non-ASCII characters with + \uXXXX sequences + + @complexity Linear in the length of string @a s. + */ + void dump_escaped(const string_t& s, const bool ensure_ascii) + { + std::uint32_t codepoint; + std::uint8_t state = UTF8_ACCEPT; + std::size_t bytes = 0; // number of bytes written to string_buffer + + // number of bytes written at the point of the last valid byte + std::size_t bytes_after_last_accept = 0; + std::size_t undumped_chars = 0; + + for (std::size_t i = 0; i < s.size(); ++i) + { + const auto byte = static_cast(s[i]); + + switch (decode(state, codepoint, byte)) + { + case UTF8_ACCEPT: // decode found a new code point + { + switch (codepoint) + { + case 0x08: // backspace + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = 'b'; + break; + } + + case 0x09: // horizontal tab + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = 't'; + break; + } + + case 0x0A: // newline + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = 'n'; + break; + } + + case 0x0C: // formfeed + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = 'f'; + break; + } + + case 0x0D: // carriage return + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = 'r'; + break; + } + + case 0x22: // quotation mark + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = '\"'; + break; + } + + case 0x5C: // reverse solidus + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = '\\'; + break; + } + + default: + { + // escape control characters (0x00..0x1F) or, if + // ensure_ascii parameter is used, non-ASCII characters + if ((codepoint <= 0x1F) || (ensure_ascii && (codepoint >= 0x7F))) + { + if (codepoint <= 0xFFFF) + { + (std::snprintf)(string_buffer.data() + bytes, 7, "\\u%04x", + static_cast(codepoint)); + bytes += 6; + } + else + { + (std::snprintf)(string_buffer.data() + bytes, 13, "\\u%04x\\u%04x", + static_cast(0xD7C0u + (codepoint >> 10u)), + static_cast(0xDC00u + (codepoint & 0x3FFu))); + bytes += 12; + } + } + else + { + // copy byte to buffer (all previous bytes + // been copied have in default case above) + string_buffer[bytes++] = s[i]; + } + break; + } + } + + // write buffer and reset index; there must be 13 bytes + // left, as this is the maximal number of bytes to be + // written ("\uxxxx\uxxxx\0") for one code point + if (string_buffer.size() - bytes < 13) + { + o->write_characters(string_buffer.data(), bytes); + bytes = 0; + } + + // remember the byte position of this accept + bytes_after_last_accept = bytes; + undumped_chars = 0; + break; + } + + case UTF8_REJECT: // decode found invalid UTF-8 byte + { + switch (error_handler) + { + case error_handler_t::strict: + { + std::string sn(3, '\0'); + (std::snprintf)(&sn[0], sn.size(), "%.2X", byte); + JSON_THROW(type_error::create(316, "invalid UTF-8 byte at index " + std::to_string(i) + ": 0x" + sn)); + } + + case error_handler_t::ignore: + case error_handler_t::replace: + { + // in case we saw this character the first time, we + // would like to read it again, because the byte + // may be OK for itself, but just not OK for the + // previous sequence + if (undumped_chars > 0) + { + --i; + } + + // reset length buffer to the last accepted index; + // thus removing/ignoring the invalid characters + bytes = bytes_after_last_accept; + + if (error_handler == error_handler_t::replace) + { + // add a replacement character + if (ensure_ascii) + { + string_buffer[bytes++] = '\\'; + string_buffer[bytes++] = 'u'; + string_buffer[bytes++] = 'f'; + string_buffer[bytes++] = 'f'; + string_buffer[bytes++] = 'f'; + string_buffer[bytes++] = 'd'; + } + else + { + string_buffer[bytes++] = detail::binary_writer::to_char_type('\xEF'); + string_buffer[bytes++] = detail::binary_writer::to_char_type('\xBF'); + string_buffer[bytes++] = detail::binary_writer::to_char_type('\xBD'); + } + + // write buffer and reset index; there must be 13 bytes + // left, as this is the maximal number of bytes to be + // written ("\uxxxx\uxxxx\0") for one code point + if (string_buffer.size() - bytes < 13) + { + o->write_characters(string_buffer.data(), bytes); + bytes = 0; + } + + bytes_after_last_accept = bytes; + } + + undumped_chars = 0; + + // continue processing the string + state = UTF8_ACCEPT; + break; + } + + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // LCOV_EXCL_LINE + } + break; + } + + default: // decode found yet incomplete multi-byte code point + { + if (!ensure_ascii) + { + // code point will not be escaped - copy byte to buffer + string_buffer[bytes++] = s[i]; + } + ++undumped_chars; + break; + } + } + } + + // we finished processing the string + if (JSON_HEDLEY_LIKELY(state == UTF8_ACCEPT)) + { + // write buffer + if (bytes > 0) + { + o->write_characters(string_buffer.data(), bytes); + } + } + else + { + // we finish reading, but do not accept: string was incomplete + switch (error_handler) + { + case error_handler_t::strict: + { + std::string sn(3, '\0'); + (std::snprintf)(&sn[0], sn.size(), "%.2X", static_cast(s.back())); + JSON_THROW(type_error::create(316, "incomplete UTF-8 string; last byte: 0x" + sn)); + } + + case error_handler_t::ignore: + { + // write all accepted bytes + o->write_characters(string_buffer.data(), bytes_after_last_accept); + break; + } + + case error_handler_t::replace: + { + // write all accepted bytes + o->write_characters(string_buffer.data(), bytes_after_last_accept); + // add a replacement character + if (ensure_ascii) + { + o->write_characters("\\ufffd", 6); + } + else + { + o->write_characters("\xEF\xBF\xBD", 3); + } + break; + } + + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // LCOV_EXCL_LINE + } + } + } + + /*! + @brief count digits + + Count the number of decimal (base 10) digits for an input unsigned integer. + + @param[in] x unsigned integer number to count its digits + @return number of decimal digits + */ + inline unsigned int count_digits(number_unsigned_t x) noexcept + { + unsigned int n_digits = 1; + for (;;) + { + if (x < 10) + { + return n_digits; + } + if (x < 100) + { + return n_digits + 1; + } + if (x < 1000) + { + return n_digits + 2; + } + if (x < 10000) + { + return n_digits + 3; + } + x = x / 10000u; + n_digits += 4; + } + } + + /*! + @brief dump an integer + + Dump a given integer to output stream @a o. Works internally with + @a number_buffer. + + @param[in] x integer number (signed or unsigned) to dump + @tparam NumberType either @a number_integer_t or @a number_unsigned_t + */ + template < typename NumberType, detail::enable_if_t < + std::is_same::value || + std::is_same::value || + std::is_same::value, + int > = 0 > + void dump_integer(NumberType x) + { + static constexpr std::array, 100> digits_to_99 + { + { + {{'0', '0'}}, {{'0', '1'}}, {{'0', '2'}}, {{'0', '3'}}, {{'0', '4'}}, {{'0', '5'}}, {{'0', '6'}}, {{'0', '7'}}, {{'0', '8'}}, {{'0', '9'}}, + {{'1', '0'}}, {{'1', '1'}}, {{'1', '2'}}, {{'1', '3'}}, {{'1', '4'}}, {{'1', '5'}}, {{'1', '6'}}, {{'1', '7'}}, {{'1', '8'}}, {{'1', '9'}}, + {{'2', '0'}}, {{'2', '1'}}, {{'2', '2'}}, {{'2', '3'}}, {{'2', '4'}}, {{'2', '5'}}, {{'2', '6'}}, {{'2', '7'}}, {{'2', '8'}}, {{'2', '9'}}, + {{'3', '0'}}, {{'3', '1'}}, {{'3', '2'}}, {{'3', '3'}}, {{'3', '4'}}, {{'3', '5'}}, {{'3', '6'}}, {{'3', '7'}}, {{'3', '8'}}, {{'3', '9'}}, + {{'4', '0'}}, {{'4', '1'}}, {{'4', '2'}}, {{'4', '3'}}, {{'4', '4'}}, {{'4', '5'}}, {{'4', '6'}}, {{'4', '7'}}, {{'4', '8'}}, {{'4', '9'}}, + {{'5', '0'}}, {{'5', '1'}}, {{'5', '2'}}, {{'5', '3'}}, {{'5', '4'}}, {{'5', '5'}}, {{'5', '6'}}, {{'5', '7'}}, {{'5', '8'}}, {{'5', '9'}}, + {{'6', '0'}}, {{'6', '1'}}, {{'6', '2'}}, {{'6', '3'}}, {{'6', '4'}}, {{'6', '5'}}, {{'6', '6'}}, {{'6', '7'}}, {{'6', '8'}}, {{'6', '9'}}, + {{'7', '0'}}, {{'7', '1'}}, {{'7', '2'}}, {{'7', '3'}}, {{'7', '4'}}, {{'7', '5'}}, {{'7', '6'}}, {{'7', '7'}}, {{'7', '8'}}, {{'7', '9'}}, + {{'8', '0'}}, {{'8', '1'}}, {{'8', '2'}}, {{'8', '3'}}, {{'8', '4'}}, {{'8', '5'}}, {{'8', '6'}}, {{'8', '7'}}, {{'8', '8'}}, {{'8', '9'}}, + {{'9', '0'}}, {{'9', '1'}}, {{'9', '2'}}, {{'9', '3'}}, {{'9', '4'}}, {{'9', '5'}}, {{'9', '6'}}, {{'9', '7'}}, {{'9', '8'}}, {{'9', '9'}}, + } + }; + + // special case for "0" + if (x == 0) + { + o->write_character('0'); + return; + } + + // use a pointer to fill the buffer + auto buffer_ptr = number_buffer.begin(); + + const bool is_negative = std::is_same::value && !(x >= 0); // see issue #755 + number_unsigned_t abs_value; + + unsigned int n_chars; + + if (is_negative) + { + *buffer_ptr = '-'; + abs_value = remove_sign(static_cast(x)); + + // account one more byte for the minus sign + n_chars = 1 + count_digits(abs_value); + } + else + { + abs_value = static_cast(x); + n_chars = count_digits(abs_value); + } + + // spare 1 byte for '\0' + JSON_ASSERT(n_chars < number_buffer.size() - 1); + + // jump to the end to generate the string from backward + // so we later avoid reversing the result + buffer_ptr += n_chars; + + // Fast int2ascii implementation inspired by "Fastware" talk by Andrei Alexandrescu + // See: https://www.youtube.com/watch?v=o4-CwDo2zpg + while (abs_value >= 100) + { + const auto digits_index = static_cast((abs_value % 100)); + abs_value /= 100; + *(--buffer_ptr) = digits_to_99[digits_index][1]; + *(--buffer_ptr) = digits_to_99[digits_index][0]; + } + + if (abs_value >= 10) + { + const auto digits_index = static_cast(abs_value); + *(--buffer_ptr) = digits_to_99[digits_index][1]; + *(--buffer_ptr) = digits_to_99[digits_index][0]; + } + else + { + *(--buffer_ptr) = static_cast('0' + abs_value); + } + + o->write_characters(number_buffer.data(), n_chars); + } + + /*! + @brief dump a floating-point number + + Dump a given floating-point number to output stream @a o. Works internally + with @a number_buffer. + + @param[in] x floating-point number to dump + */ + void dump_float(number_float_t x) + { + // NaN / inf + if (!std::isfinite(x)) + { + o->write_characters("null", 4); + return; + } + + // If number_float_t is an IEEE-754 single or double precision number, + // use the Grisu2 algorithm to produce short numbers which are + // guaranteed to round-trip, using strtof and strtod, resp. + // + // NB: The test below works if == . + static constexpr bool is_ieee_single_or_double + = (std::numeric_limits::is_iec559 && std::numeric_limits::digits == 24 && std::numeric_limits::max_exponent == 128) || + (std::numeric_limits::is_iec559 && std::numeric_limits::digits == 53 && std::numeric_limits::max_exponent == 1024); + + dump_float(x, std::integral_constant()); + } + + void dump_float(number_float_t x, std::true_type /*is_ieee_single_or_double*/) + { + char* begin = number_buffer.data(); + char* end = ::nlohmann::detail::to_chars(begin, begin + number_buffer.size(), x); + + o->write_characters(begin, static_cast(end - begin)); + } + + void dump_float(number_float_t x, std::false_type /*is_ieee_single_or_double*/) + { + // get number of digits for a float -> text -> float round-trip + static constexpr auto d = std::numeric_limits::max_digits10; + + // the actual conversion + std::ptrdiff_t len = (std::snprintf)(number_buffer.data(), number_buffer.size(), "%.*g", d, x); + + // negative value indicates an error + JSON_ASSERT(len > 0); + // check if buffer was large enough + JSON_ASSERT(static_cast(len) < number_buffer.size()); + + // erase thousands separator + if (thousands_sep != '\0') + { + const auto end = std::remove(number_buffer.begin(), + number_buffer.begin() + len, thousands_sep); + std::fill(end, number_buffer.end(), '\0'); + JSON_ASSERT((end - number_buffer.begin()) <= len); + len = (end - number_buffer.begin()); + } + + // convert decimal point to '.' + if (decimal_point != '\0' && decimal_point != '.') + { + const auto dec_pos = std::find(number_buffer.begin(), number_buffer.end(), decimal_point); + if (dec_pos != number_buffer.end()) + { + *dec_pos = '.'; + } + } + + o->write_characters(number_buffer.data(), static_cast(len)); + + // determine if need to append ".0" + const bool value_is_int_like = + std::none_of(number_buffer.begin(), number_buffer.begin() + len + 1, + [](char c) + { + return c == '.' || c == 'e'; + }); + + if (value_is_int_like) + { + o->write_characters(".0", 2); + } + } + + /*! + @brief check whether a string is UTF-8 encoded + + The function checks each byte of a string whether it is UTF-8 encoded. The + result of the check is stored in the @a state parameter. The function must + be called initially with state 0 (accept). State 1 means the string must + be rejected, because the current byte is not allowed. If the string is + completely processed, but the state is non-zero, the string ended + prematurely; that is, the last byte indicated more bytes should have + followed. + + @param[in,out] state the state of the decoding + @param[in,out] codep codepoint (valid only if resulting state is UTF8_ACCEPT) + @param[in] byte next byte to decode + @return new state + + @note The function has been edited: a std::array is used. + + @copyright Copyright (c) 2008-2009 Bjoern Hoehrmann + @sa http://bjoern.hoehrmann.de/utf-8/decoder/dfa/ + */ + static std::uint8_t decode(std::uint8_t& state, std::uint32_t& codep, const std::uint8_t byte) noexcept + { + static const std::array utf8d = + { + { + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 00..1F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 20..3F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 40..5F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 60..7F + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, // 80..9F + 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, // A0..BF + 8, 8, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, // C0..DF + 0xA, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x4, 0x3, 0x3, // E0..EF + 0xB, 0x6, 0x6, 0x6, 0x5, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, // F0..FF + 0x0, 0x1, 0x2, 0x3, 0x5, 0x8, 0x7, 0x1, 0x1, 0x1, 0x4, 0x6, 0x1, 0x1, 0x1, 0x1, // s0..s0 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, // s1..s2 + 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, // s3..s4 + 1, 2, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, // s5..s6 + 1, 3, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 // s7..s8 + } + }; + + const std::uint8_t type = utf8d[byte]; + + codep = (state != UTF8_ACCEPT) + ? (byte & 0x3fu) | (codep << 6u) + : (0xFFu >> type) & (byte); + + std::size_t index = 256u + static_cast(state) * 16u + static_cast(type); + JSON_ASSERT(index < 400); + state = utf8d[index]; + return state; + } + + /* + * Overload to make the compiler happy while it is instantiating + * dump_integer for number_unsigned_t. + * Must never be called. + */ + number_unsigned_t remove_sign(number_unsigned_t x) + { + JSON_ASSERT(false); // LCOV_EXCL_LINE + return x; // LCOV_EXCL_LINE + } + + /* + * Helper function for dump_integer + * + * This function takes a negative signed integer and returns its absolute + * value as unsigned integer. The plus/minus shuffling is necessary as we can + * not directly remove the sign of an arbitrary signed integer as the + * absolute values of INT_MIN and INT_MAX are usually not the same. See + * #1708 for details. + */ + inline number_unsigned_t remove_sign(number_integer_t x) noexcept + { + JSON_ASSERT(x < 0 && x < (std::numeric_limits::max)()); + return static_cast(-(x + 1)) + 1; + } + + private: + /// the output of the serializer + output_adapter_t o = nullptr; + + /// a (hopefully) large enough character buffer + std::array number_buffer{{}}; + + /// the locale + const std::lconv* loc = nullptr; + /// the locale's thousand separator character + const char thousands_sep = '\0'; + /// the locale's decimal point character + const char decimal_point = '\0'; + + /// string buffer + std::array string_buffer{{}}; + + /// the indentation character + const char indent_char; + /// the indentation string + string_t indent_string; + + /// error_handler how to react on decoding errors + const error_handler_t error_handler; +}; +} // namespace detail +} // namespace nlohmann + +// #include + +// #include + +// #include + + +#include // less +#include // allocator +#include // pair +#include // vector + +namespace nlohmann +{ + +/// ordered_map: a minimal map-like container that preserves insertion order +/// for use within nlohmann::basic_json +template , + class Allocator = std::allocator>> + struct ordered_map : std::vector, Allocator> +{ + using key_type = Key; + using mapped_type = T; + using Container = std::vector, Allocator>; + using typename Container::iterator; + using typename Container::const_iterator; + using typename Container::size_type; + using typename Container::value_type; + + // Explicit constructors instead of `using Container::Container` + // otherwise older compilers choke on it (GCC <= 5.5, xcode <= 9.4) + ordered_map(const Allocator& alloc = Allocator()) : Container{alloc} {} + template + ordered_map(It first, It last, const Allocator& alloc = Allocator()) + : Container{first, last, alloc} {} + ordered_map(std::initializer_list init, const Allocator& alloc = Allocator() ) + : Container{init, alloc} {} + + std::pair emplace(const key_type& key, T&& t) + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (it->first == key) + { + return {it, false}; + } + } + Container::emplace_back(key, t); + return {--this->end(), true}; + } + + T& operator[](const Key& key) + { + return emplace(key, T{}).first->second; + } + + const T& operator[](const Key& key) const + { + return at(key); + } + + T& at(const Key& key) + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (it->first == key) + { + return it->second; + } + } + + throw std::out_of_range("key not found"); + } + + const T& at(const Key& key) const + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (it->first == key) + { + return it->second; + } + } + + throw std::out_of_range("key not found"); + } + + size_type erase(const Key& key) + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (it->first == key) + { + // Since we cannot move const Keys, re-construct them in place + for (auto next = it; ++next != this->end(); ++it) + { + it->~value_type(); // Destroy but keep allocation + new (&*it) value_type{std::move(*next)}; + } + Container::pop_back(); + return 1; + } + } + return 0; + } + + iterator erase(iterator pos) + { + auto it = pos; + + // Since we cannot move const Keys, re-construct them in place + for (auto next = it; ++next != this->end(); ++it) + { + it->~value_type(); // Destroy but keep allocation + new (&*it) value_type{std::move(*next)}; + } + Container::pop_back(); + return pos; + } + + size_type count(const Key& key) const + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (it->first == key) + { + return 1; + } + } + return 0; + } + + iterator find(const Key& key) + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (it->first == key) + { + return it; + } + } + return Container::end(); + } + + const_iterator find(const Key& key) const + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (it->first == key) + { + return it; + } + } + return Container::end(); + } + + std::pair insert( value_type&& value ) + { + return emplace(value.first, std::move(value.second)); + } + + std::pair insert( const value_type& value ) + { + for (auto it = this->begin(); it != this->end(); ++it) + { + if (it->first == value.first) + { + return {it, false}; + } + } + Container::push_back(value); + return {--this->end(), true}; + } +}; + +} // namespace nlohmann + + +/*! +@brief namespace for Niels Lohmann +@see https://github.com/nlohmann +@since version 1.0.0 +*/ +namespace nlohmann +{ + +/*! +@brief a class to store JSON values + +@tparam ObjectType type for JSON objects (`std::map` by default; will be used +in @ref object_t) +@tparam ArrayType type for JSON arrays (`std::vector` by default; will be used +in @ref array_t) +@tparam StringType type for JSON strings and object keys (`std::string` by +default; will be used in @ref string_t) +@tparam BooleanType type for JSON booleans (`bool` by default; will be used +in @ref boolean_t) +@tparam NumberIntegerType type for JSON integer numbers (`int64_t` by +default; will be used in @ref number_integer_t) +@tparam NumberUnsignedType type for JSON unsigned integer numbers (@c +`uint64_t` by default; will be used in @ref number_unsigned_t) +@tparam NumberFloatType type for JSON floating-point numbers (`double` by +default; will be used in @ref number_float_t) +@tparam BinaryType type for packed binary data for compatibility with binary +serialization formats (`std::vector` by default; will be used in +@ref binary_t) +@tparam AllocatorType type of the allocator to use (`std::allocator` by +default) +@tparam JSONSerializer the serializer to resolve internal calls to `to_json()` +and `from_json()` (@ref adl_serializer by default) + +@requirement The class satisfies the following concept requirements: +- Basic + - [DefaultConstructible](https://en.cppreference.com/w/cpp/named_req/DefaultConstructible): + JSON values can be default constructed. The result will be a JSON null + value. + - [MoveConstructible](https://en.cppreference.com/w/cpp/named_req/MoveConstructible): + A JSON value can be constructed from an rvalue argument. + - [CopyConstructible](https://en.cppreference.com/w/cpp/named_req/CopyConstructible): + A JSON value can be copy-constructed from an lvalue expression. + - [MoveAssignable](https://en.cppreference.com/w/cpp/named_req/MoveAssignable): + A JSON value van be assigned from an rvalue argument. + - [CopyAssignable](https://en.cppreference.com/w/cpp/named_req/CopyAssignable): + A JSON value can be copy-assigned from an lvalue expression. + - [Destructible](https://en.cppreference.com/w/cpp/named_req/Destructible): + JSON values can be destructed. +- Layout + - [StandardLayoutType](https://en.cppreference.com/w/cpp/named_req/StandardLayoutType): + JSON values have + [standard layout](https://en.cppreference.com/w/cpp/language/data_members#Standard_layout): + All non-static data members are private and standard layout types, the + class has no virtual functions or (virtual) base classes. +- Library-wide + - [EqualityComparable](https://en.cppreference.com/w/cpp/named_req/EqualityComparable): + JSON values can be compared with `==`, see @ref + operator==(const_reference,const_reference). + - [LessThanComparable](https://en.cppreference.com/w/cpp/named_req/LessThanComparable): + JSON values can be compared with `<`, see @ref + operator<(const_reference,const_reference). + - [Swappable](https://en.cppreference.com/w/cpp/named_req/Swappable): + Any JSON lvalue or rvalue of can be swapped with any lvalue or rvalue of + other compatible types, using unqualified function call @ref swap(). + - [NullablePointer](https://en.cppreference.com/w/cpp/named_req/NullablePointer): + JSON values can be compared against `std::nullptr_t` objects which are used + to model the `null` value. +- Container + - [Container](https://en.cppreference.com/w/cpp/named_req/Container): + JSON values can be used like STL containers and provide iterator access. + - [ReversibleContainer](https://en.cppreference.com/w/cpp/named_req/ReversibleContainer); + JSON values can be used like STL containers and provide reverse iterator + access. + +@invariant The member variables @a m_value and @a m_type have the following +relationship: +- If `m_type == value_t::object`, then `m_value.object != nullptr`. +- If `m_type == value_t::array`, then `m_value.array != nullptr`. +- If `m_type == value_t::string`, then `m_value.string != nullptr`. +The invariants are checked by member function assert_invariant(). + +@internal +@note ObjectType trick from https://stackoverflow.com/a/9860911 +@endinternal + +@see [RFC 7159: The JavaScript Object Notation (JSON) Data Interchange +Format](http://rfc7159.net/rfc7159) + +@since version 1.0.0 + +@nosubgrouping +*/ +NLOHMANN_BASIC_JSON_TPL_DECLARATION +class basic_json +{ + private: + template friend struct detail::external_constructor; + friend ::nlohmann::json_pointer; + + template + friend class ::nlohmann::detail::parser; + friend ::nlohmann::detail::serializer; + template + friend class ::nlohmann::detail::iter_impl; + template + friend class ::nlohmann::detail::binary_writer; + template + friend class ::nlohmann::detail::binary_reader; + template + friend class ::nlohmann::detail::json_sax_dom_parser; + template + friend class ::nlohmann::detail::json_sax_dom_callback_parser; + + /// workaround type for MSVC + using basic_json_t = NLOHMANN_BASIC_JSON_TPL; + + // convenience aliases for types residing in namespace detail; + using lexer = ::nlohmann::detail::lexer_base; + + template + static ::nlohmann::detail::parser parser( + InputAdapterType adapter, + detail::parser_callback_tcb = nullptr, + const bool allow_exceptions = true, + const bool ignore_comments = false + ) + { + return ::nlohmann::detail::parser(std::move(adapter), + std::move(cb), allow_exceptions, ignore_comments); + } + + using primitive_iterator_t = ::nlohmann::detail::primitive_iterator_t; + template + using internal_iterator = ::nlohmann::detail::internal_iterator; + template + using iter_impl = ::nlohmann::detail::iter_impl; + template + using iteration_proxy = ::nlohmann::detail::iteration_proxy; + template using json_reverse_iterator = ::nlohmann::detail::json_reverse_iterator; + + template + using output_adapter_t = ::nlohmann::detail::output_adapter_t; + + template + using binary_reader = ::nlohmann::detail::binary_reader; + template using binary_writer = ::nlohmann::detail::binary_writer; + + using serializer = ::nlohmann::detail::serializer; + + public: + using value_t = detail::value_t; + /// JSON Pointer, see @ref nlohmann::json_pointer + using json_pointer = ::nlohmann::json_pointer; + template + using json_serializer = JSONSerializer; + /// how to treat decoding errors + using error_handler_t = detail::error_handler_t; + /// how to treat CBOR tags + using cbor_tag_handler_t = detail::cbor_tag_handler_t; + /// helper type for initializer lists of basic_json values + using initializer_list_t = std::initializer_list>; + + using input_format_t = detail::input_format_t; + /// SAX interface type, see @ref nlohmann::json_sax + using json_sax_t = json_sax; + + //////////////// + // exceptions // + //////////////// + + /// @name exceptions + /// Classes to implement user-defined exceptions. + /// @{ + + /// @copydoc detail::exception + using exception = detail::exception; + /// @copydoc detail::parse_error + using parse_error = detail::parse_error; + /// @copydoc detail::invalid_iterator + using invalid_iterator = detail::invalid_iterator; + /// @copydoc detail::type_error + using type_error = detail::type_error; + /// @copydoc detail::out_of_range + using out_of_range = detail::out_of_range; + /// @copydoc detail::other_error + using other_error = detail::other_error; + + /// @} + + + ///////////////////// + // container types // + ///////////////////// + + /// @name container types + /// The canonic container types to use @ref basic_json like any other STL + /// container. + /// @{ + + /// the type of elements in a basic_json container + using value_type = basic_json; + + /// the type of an element reference + using reference = value_type&; + /// the type of an element const reference + using const_reference = const value_type&; + + /// a type to represent differences between iterators + using difference_type = std::ptrdiff_t; + /// a type to represent container sizes + using size_type = std::size_t; + + /// the allocator type + using allocator_type = AllocatorType; + + /// the type of an element pointer + using pointer = typename std::allocator_traits::pointer; + /// the type of an element const pointer + using const_pointer = typename std::allocator_traits::const_pointer; + + /// an iterator for a basic_json container + using iterator = iter_impl; + /// a const iterator for a basic_json container + using const_iterator = iter_impl; + /// a reverse iterator for a basic_json container + using reverse_iterator = json_reverse_iterator; + /// a const reverse iterator for a basic_json container + using const_reverse_iterator = json_reverse_iterator; + + /// @} + + + /*! + @brief returns the allocator associated with the container + */ + static allocator_type get_allocator() + { + return allocator_type(); + } + + /*! + @brief returns version information on the library + + This function returns a JSON object with information about the library, + including the version number and information on the platform and compiler. + + @return JSON object holding version information + key | description + ----------- | --------------- + `compiler` | Information on the used compiler. It is an object with the following keys: `c++` (the used C++ standard), `family` (the compiler family; possible values are `clang`, `icc`, `gcc`, `ilecpp`, `msvc`, `pgcpp`, `sunpro`, and `unknown`), and `version` (the compiler version). + `copyright` | The copyright line for the library as string. + `name` | The name of the library as string. + `platform` | The used platform as string. Possible values are `win32`, `linux`, `apple`, `unix`, and `unknown`. + `url` | The URL of the project as string. + `version` | The version of the library. It is an object with the following keys: `major`, `minor`, and `patch` as defined by [Semantic Versioning](http://semver.org), and `string` (the version string). + + @liveexample{The following code shows an example output of the `meta()` + function.,meta} + + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes to any JSON value. + + @complexity Constant. + + @since 2.1.0 + */ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json meta() + { + basic_json result; + + result["copyright"] = "(C) 2013-2020 Niels Lohmann"; + result["name"] = "JSON for Modern C++"; + result["url"] = "https://github.com/nlohmann/json"; + result["version"]["string"] = + std::to_string(NLOHMANN_JSON_VERSION_MAJOR) + "." + + std::to_string(NLOHMANN_JSON_VERSION_MINOR) + "." + + std::to_string(NLOHMANN_JSON_VERSION_PATCH); + result["version"]["major"] = NLOHMANN_JSON_VERSION_MAJOR; + result["version"]["minor"] = NLOHMANN_JSON_VERSION_MINOR; + result["version"]["patch"] = NLOHMANN_JSON_VERSION_PATCH; + +#ifdef _WIN32 + result["platform"] = "win32"; +#elif defined __linux__ + result["platform"] = "linux"; +#elif defined __APPLE__ + result["platform"] = "apple"; +#elif defined __unix__ + result["platform"] = "unix"; +#else + result["platform"] = "unknown"; +#endif + +#if defined(__ICC) || defined(__INTEL_COMPILER) + result["compiler"] = {{"family", "icc"}, {"version", __INTEL_COMPILER}}; +#elif defined(__clang__) + result["compiler"] = {{"family", "clang"}, {"version", __clang_version__}}; +#elif defined(__GNUC__) || defined(__GNUG__) + result["compiler"] = {{"family", "gcc"}, {"version", std::to_string(__GNUC__) + "." + std::to_string(__GNUC_MINOR__) + "." + std::to_string(__GNUC_PATCHLEVEL__)}}; +#elif defined(__HP_cc) || defined(__HP_aCC) + result["compiler"] = "hp" +#elif defined(__IBMCPP__) + result["compiler"] = {{"family", "ilecpp"}, {"version", __IBMCPP__}}; +#elif defined(_MSC_VER) + result["compiler"] = {{"family", "msvc"}, {"version", _MSC_VER}}; +#elif defined(__PGI) + result["compiler"] = {{"family", "pgcpp"}, {"version", __PGI}}; +#elif defined(__SUNPRO_CC) + result["compiler"] = {{"family", "sunpro"}, {"version", __SUNPRO_CC}}; +#else + result["compiler"] = {{"family", "unknown"}, {"version", "unknown"}}; +#endif + +#ifdef __cplusplus + result["compiler"]["c++"] = std::to_string(__cplusplus); +#else + result["compiler"]["c++"] = "unknown"; +#endif + return result; + } + + + /////////////////////////// + // JSON value data types // + /////////////////////////// + + /// @name JSON value data types + /// The data types to store a JSON value. These types are derived from + /// the template arguments passed to class @ref basic_json. + /// @{ + +#if defined(JSON_HAS_CPP_14) + // Use transparent comparator if possible, combined with perfect forwarding + // on find() and count() calls prevents unnecessary string construction. + using object_comparator_t = std::less<>; +#else + using object_comparator_t = std::less; +#endif + + /*! + @brief a type for an object + + [RFC 7159](http://rfc7159.net/rfc7159) describes JSON objects as follows: + > An object is an unordered collection of zero or more name/value pairs, + > where a name is a string and a value is a string, number, boolean, null, + > object, or array. + + To store objects in C++, a type is defined by the template parameters + described below. + + @tparam ObjectType the container to store objects (e.g., `std::map` or + `std::unordered_map`) + @tparam StringType the type of the keys or names (e.g., `std::string`). + The comparison function `std::less` is used to order elements + inside the container. + @tparam AllocatorType the allocator to use for objects (e.g., + `std::allocator`) + + #### Default type + + With the default values for @a ObjectType (`std::map`), @a StringType + (`std::string`), and @a AllocatorType (`std::allocator`), the default + value for @a object_t is: + + @code {.cpp} + std::map< + std::string, // key_type + basic_json, // value_type + std::less, // key_compare + std::allocator> // allocator_type + > + @endcode + + #### Behavior + + The choice of @a object_t influences the behavior of the JSON class. With + the default type, objects have the following behavior: + + - When all names are unique, objects will be interoperable in the sense + that all software implementations receiving that object will agree on + the name-value mappings. + - When the names within an object are not unique, it is unspecified which + one of the values for a given key will be chosen. For instance, + `{"key": 2, "key": 1}` could be equal to either `{"key": 1}` or + `{"key": 2}`. + - Internally, name/value pairs are stored in lexicographical order of the + names. Objects will also be serialized (see @ref dump) in this order. + For instance, `{"b": 1, "a": 2}` and `{"a": 2, "b": 1}` will be stored + and serialized as `{"a": 2, "b": 1}`. + - When comparing objects, the order of the name/value pairs is irrelevant. + This makes objects interoperable in the sense that they will not be + affected by these differences. For instance, `{"b": 1, "a": 2}` and + `{"a": 2, "b": 1}` will be treated as equal. + + #### Limits + + [RFC 7159](http://rfc7159.net/rfc7159) specifies: + > An implementation may set limits on the maximum depth of nesting. + + In this class, the object's limit of nesting is not explicitly constrained. + However, a maximum depth of nesting may be introduced by the compiler or + runtime environment. A theoretical limit can be queried by calling the + @ref max_size function of a JSON object. + + #### Storage + + Objects are stored as pointers in a @ref basic_json type. That is, for any + access to object values, a pointer of type `object_t*` must be + dereferenced. + + @sa @ref array_t -- type for an array value + + @since version 1.0.0 + + @note The order name/value pairs are added to the object is *not* + preserved by the library. Therefore, iterating an object may return + name/value pairs in a different order than they were originally stored. In + fact, keys will be traversed in alphabetical order as `std::map` with + `std::less` is used by default. Please note this behavior conforms to [RFC + 7159](http://rfc7159.net/rfc7159), because any order implements the + specified "unordered" nature of JSON objects. + */ + using object_t = ObjectType>>; + + /*! + @brief a type for an array + + [RFC 7159](http://rfc7159.net/rfc7159) describes JSON arrays as follows: + > An array is an ordered sequence of zero or more values. + + To store objects in C++, a type is defined by the template parameters + explained below. + + @tparam ArrayType container type to store arrays (e.g., `std::vector` or + `std::list`) + @tparam AllocatorType allocator to use for arrays (e.g., `std::allocator`) + + #### Default type + + With the default values for @a ArrayType (`std::vector`) and @a + AllocatorType (`std::allocator`), the default value for @a array_t is: + + @code {.cpp} + std::vector< + basic_json, // value_type + std::allocator // allocator_type + > + @endcode + + #### Limits + + [RFC 7159](http://rfc7159.net/rfc7159) specifies: + > An implementation may set limits on the maximum depth of nesting. + + In this class, the array's limit of nesting is not explicitly constrained. + However, a maximum depth of nesting may be introduced by the compiler or + runtime environment. A theoretical limit can be queried by calling the + @ref max_size function of a JSON array. + + #### Storage + + Arrays are stored as pointers in a @ref basic_json type. That is, for any + access to array values, a pointer of type `array_t*` must be dereferenced. + + @sa @ref object_t -- type for an object value + + @since version 1.0.0 + */ + using array_t = ArrayType>; + + /*! + @brief a type for a string + + [RFC 7159](http://rfc7159.net/rfc7159) describes JSON strings as follows: + > A string is a sequence of zero or more Unicode characters. + + To store objects in C++, a type is defined by the template parameter + described below. Unicode values are split by the JSON class into + byte-sized characters during deserialization. + + @tparam StringType the container to store strings (e.g., `std::string`). + Note this container is used for keys/names in objects, see @ref object_t. + + #### Default type + + With the default values for @a StringType (`std::string`), the default + value for @a string_t is: + + @code {.cpp} + std::string + @endcode + + #### Encoding + + Strings are stored in UTF-8 encoding. Therefore, functions like + `std::string::size()` or `std::string::length()` return the number of + bytes in the string rather than the number of characters or glyphs. + + #### String comparison + + [RFC 7159](http://rfc7159.net/rfc7159) states: + > Software implementations are typically required to test names of object + > members for equality. Implementations that transform the textual + > representation into sequences of Unicode code units and then perform the + > comparison numerically, code unit by code unit, are interoperable in the + > sense that implementations will agree in all cases on equality or + > inequality of two strings. For example, implementations that compare + > strings with escaped characters unconverted may incorrectly find that + > `"a\\b"` and `"a\u005Cb"` are not equal. + + This implementation is interoperable as it does compare strings code unit + by code unit. + + #### Storage + + String values are stored as pointers in a @ref basic_json type. That is, + for any access to string values, a pointer of type `string_t*` must be + dereferenced. + + @since version 1.0.0 + */ + using string_t = StringType; + + /*! + @brief a type for a boolean + + [RFC 7159](http://rfc7159.net/rfc7159) implicitly describes a boolean as a + type which differentiates the two literals `true` and `false`. + + To store objects in C++, a type is defined by the template parameter @a + BooleanType which chooses the type to use. + + #### Default type + + With the default values for @a BooleanType (`bool`), the default value for + @a boolean_t is: + + @code {.cpp} + bool + @endcode + + #### Storage + + Boolean values are stored directly inside a @ref basic_json type. + + @since version 1.0.0 + */ + using boolean_t = BooleanType; + + /*! + @brief a type for a number (integer) + + [RFC 7159](http://rfc7159.net/rfc7159) describes numbers as follows: + > The representation of numbers is similar to that used in most + > programming languages. A number is represented in base 10 using decimal + > digits. It contains an integer component that may be prefixed with an + > optional minus sign, which may be followed by a fraction part and/or an + > exponent part. Leading zeros are not allowed. (...) Numeric values that + > cannot be represented in the grammar below (such as Infinity and NaN) + > are not permitted. + + This description includes both integer and floating-point numbers. + However, C++ allows more precise storage if it is known whether the number + is a signed integer, an unsigned integer or a floating-point number. + Therefore, three different types, @ref number_integer_t, @ref + number_unsigned_t and @ref number_float_t are used. + + To store integer numbers in C++, a type is defined by the template + parameter @a NumberIntegerType which chooses the type to use. + + #### Default type + + With the default values for @a NumberIntegerType (`int64_t`), the default + value for @a number_integer_t is: + + @code {.cpp} + int64_t + @endcode + + #### Default behavior + + - The restrictions about leading zeros is not enforced in C++. Instead, + leading zeros in integer literals lead to an interpretation as octal + number. Internally, the value will be stored as decimal number. For + instance, the C++ integer literal `010` will be serialized to `8`. + During deserialization, leading zeros yield an error. + - Not-a-number (NaN) values will be serialized to `null`. + + #### Limits + + [RFC 7159](http://rfc7159.net/rfc7159) specifies: + > An implementation may set limits on the range and precision of numbers. + + When the default type is used, the maximal integer number that can be + stored is `9223372036854775807` (INT64_MAX) and the minimal integer number + that can be stored is `-9223372036854775808` (INT64_MIN). Integer numbers + that are out of range will yield over/underflow when used in a + constructor. During deserialization, too large or small integer numbers + will be automatically be stored as @ref number_unsigned_t or @ref + number_float_t. + + [RFC 7159](http://rfc7159.net/rfc7159) further states: + > Note that when such software is used, numbers that are integers and are + > in the range \f$[-2^{53}+1, 2^{53}-1]\f$ are interoperable in the sense + > that implementations will agree exactly on their numeric values. + + As this range is a subrange of the exactly supported range [INT64_MIN, + INT64_MAX], this class's integer type is interoperable. + + #### Storage + + Integer number values are stored directly inside a @ref basic_json type. + + @sa @ref number_float_t -- type for number values (floating-point) + + @sa @ref number_unsigned_t -- type for number values (unsigned integer) + + @since version 1.0.0 + */ + using number_integer_t = NumberIntegerType; + + /*! + @brief a type for a number (unsigned) + + [RFC 7159](http://rfc7159.net/rfc7159) describes numbers as follows: + > The representation of numbers is similar to that used in most + > programming languages. A number is represented in base 10 using decimal + > digits. It contains an integer component that may be prefixed with an + > optional minus sign, which may be followed by a fraction part and/or an + > exponent part. Leading zeros are not allowed. (...) Numeric values that + > cannot be represented in the grammar below (such as Infinity and NaN) + > are not permitted. + + This description includes both integer and floating-point numbers. + However, C++ allows more precise storage if it is known whether the number + is a signed integer, an unsigned integer or a floating-point number. + Therefore, three different types, @ref number_integer_t, @ref + number_unsigned_t and @ref number_float_t are used. + + To store unsigned integer numbers in C++, a type is defined by the + template parameter @a NumberUnsignedType which chooses the type to use. + + #### Default type + + With the default values for @a NumberUnsignedType (`uint64_t`), the + default value for @a number_unsigned_t is: + + @code {.cpp} + uint64_t + @endcode + + #### Default behavior + + - The restrictions about leading zeros is not enforced in C++. Instead, + leading zeros in integer literals lead to an interpretation as octal + number. Internally, the value will be stored as decimal number. For + instance, the C++ integer literal `010` will be serialized to `8`. + During deserialization, leading zeros yield an error. + - Not-a-number (NaN) values will be serialized to `null`. + + #### Limits + + [RFC 7159](http://rfc7159.net/rfc7159) specifies: + > An implementation may set limits on the range and precision of numbers. + + When the default type is used, the maximal integer number that can be + stored is `18446744073709551615` (UINT64_MAX) and the minimal integer + number that can be stored is `0`. Integer numbers that are out of range + will yield over/underflow when used in a constructor. During + deserialization, too large or small integer numbers will be automatically + be stored as @ref number_integer_t or @ref number_float_t. + + [RFC 7159](http://rfc7159.net/rfc7159) further states: + > Note that when such software is used, numbers that are integers and are + > in the range \f$[-2^{53}+1, 2^{53}-1]\f$ are interoperable in the sense + > that implementations will agree exactly on their numeric values. + + As this range is a subrange (when considered in conjunction with the + number_integer_t type) of the exactly supported range [0, UINT64_MAX], + this class's integer type is interoperable. + + #### Storage + + Integer number values are stored directly inside a @ref basic_json type. + + @sa @ref number_float_t -- type for number values (floating-point) + @sa @ref number_integer_t -- type for number values (integer) + + @since version 2.0.0 + */ + using number_unsigned_t = NumberUnsignedType; + + /*! + @brief a type for a number (floating-point) + + [RFC 7159](http://rfc7159.net/rfc7159) describes numbers as follows: + > The representation of numbers is similar to that used in most + > programming languages. A number is represented in base 10 using decimal + > digits. It contains an integer component that may be prefixed with an + > optional minus sign, which may be followed by a fraction part and/or an + > exponent part. Leading zeros are not allowed. (...) Numeric values that + > cannot be represented in the grammar below (such as Infinity and NaN) + > are not permitted. + + This description includes both integer and floating-point numbers. + However, C++ allows more precise storage if it is known whether the number + is a signed integer, an unsigned integer or a floating-point number. + Therefore, three different types, @ref number_integer_t, @ref + number_unsigned_t and @ref number_float_t are used. + + To store floating-point numbers in C++, a type is defined by the template + parameter @a NumberFloatType which chooses the type to use. + + #### Default type + + With the default values for @a NumberFloatType (`double`), the default + value for @a number_float_t is: + + @code {.cpp} + double + @endcode + + #### Default behavior + + - The restrictions about leading zeros is not enforced in C++. Instead, + leading zeros in floating-point literals will be ignored. Internally, + the value will be stored as decimal number. For instance, the C++ + floating-point literal `01.2` will be serialized to `1.2`. During + deserialization, leading zeros yield an error. + - Not-a-number (NaN) values will be serialized to `null`. + + #### Limits + + [RFC 7159](http://rfc7159.net/rfc7159) states: + > This specification allows implementations to set limits on the range and + > precision of numbers accepted. Since software that implements IEEE + > 754-2008 binary64 (double precision) numbers is generally available and + > widely used, good interoperability can be achieved by implementations + > that expect no more precision or range than these provide, in the sense + > that implementations will approximate JSON numbers within the expected + > precision. + + This implementation does exactly follow this approach, as it uses double + precision floating-point numbers. Note values smaller than + `-1.79769313486232e+308` and values greater than `1.79769313486232e+308` + will be stored as NaN internally and be serialized to `null`. + + #### Storage + + Floating-point number values are stored directly inside a @ref basic_json + type. + + @sa @ref number_integer_t -- type for number values (integer) + + @sa @ref number_unsigned_t -- type for number values (unsigned integer) + + @since version 1.0.0 + */ + using number_float_t = NumberFloatType; + + /*! + @brief a type for a packed binary type + + This type is a type designed to carry binary data that appears in various + serialized formats, such as CBOR's Major Type 2, MessagePack's bin, and + BSON's generic binary subtype. This type is NOT a part of standard JSON and + exists solely for compatibility with these binary types. As such, it is + simply defined as an ordered sequence of zero or more byte values. + + Additionally, as an implementation detail, the subtype of the binary data is + carried around as a `std::uint8_t`, which is compatible with both of the + binary data formats that use binary subtyping, (though the specific + numbering is incompatible with each other, and it is up to the user to + translate between them). + + [CBOR's RFC 7049](https://tools.ietf.org/html/rfc7049) describes this type + as: + > Major type 2: a byte string. The string's length in bytes is represented + > following the rules for positive integers (major type 0). + + [MessagePack's documentation on the bin type + family](https://github.com/msgpack/msgpack/blob/master/spec.md#bin-format-family) + describes this type as: + > Bin format family stores an byte array in 2, 3, or 5 bytes of extra bytes + > in addition to the size of the byte array. + + [BSON's specifications](http://bsonspec.org/spec.html) describe several + binary types; however, this type is intended to represent the generic binary + type which has the description: + > Generic binary subtype - This is the most commonly used binary subtype and + > should be the 'default' for drivers and tools. + + None of these impose any limitations on the internal representation other + than the basic unit of storage be some type of array whose parts are + decomposable into bytes. + + The default representation of this binary format is a + `std::vector`, which is a very common way to represent a byte + array in modern C++. + + #### Default type + + The default values for @a BinaryType is `std::vector` + + #### Storage + + Binary Arrays are stored as pointers in a @ref basic_json type. That is, + for any access to array values, a pointer of the type `binary_t*` must be + dereferenced. + + #### Notes on subtypes + + - CBOR + - Binary values are represented as byte strings. No subtypes are + supported and will be ignored when CBOR is written. + - MessagePack + - If a subtype is given and the binary array contains exactly 1, 2, 4, 8, + or 16 elements, the fixext family (fixext1, fixext2, fixext4, fixext8) + is used. For other sizes, the ext family (ext8, ext16, ext32) is used. + The subtype is then added as singed 8-bit integer. + - If no subtype is given, the bin family (bin8, bin16, bin32) is used. + - BSON + - If a subtype is given, it is used and added as unsigned 8-bit integer. + - If no subtype is given, the generic binary subtype 0x00 is used. + + @sa @ref binary -- create a binary array + + @since version 3.8.0 + */ + using binary_t = nlohmann::byte_container_with_subtype; + /// @} + + private: + + /// helper for exception-safe object creation + template + JSON_HEDLEY_RETURNS_NON_NULL + static T* create(Args&& ... args) + { + AllocatorType alloc; + using AllocatorTraits = std::allocator_traits>; + + auto deleter = [&](T * object) + { + AllocatorTraits::deallocate(alloc, object, 1); + }; + std::unique_ptr object(AllocatorTraits::allocate(alloc, 1), deleter); + AllocatorTraits::construct(alloc, object.get(), std::forward(args)...); + JSON_ASSERT(object != nullptr); + return object.release(); + } + + //////////////////////// + // JSON value storage // + //////////////////////// + + /*! + @brief a JSON value + + The actual storage for a JSON value of the @ref basic_json class. This + union combines the different storage types for the JSON value types + defined in @ref value_t. + + JSON type | value_t type | used type + --------- | --------------- | ------------------------ + object | object | pointer to @ref object_t + array | array | pointer to @ref array_t + string | string | pointer to @ref string_t + boolean | boolean | @ref boolean_t + number | number_integer | @ref number_integer_t + number | number_unsigned | @ref number_unsigned_t + number | number_float | @ref number_float_t + binary | binary | pointer to @ref binary_t + null | null | *no value is stored* + + @note Variable-length types (objects, arrays, and strings) are stored as + pointers. The size of the union should not exceed 64 bits if the default + value types are used. + + @since version 1.0.0 + */ + union json_value + { + /// object (stored with pointer to save storage) + object_t* object; + /// array (stored with pointer to save storage) + array_t* array; + /// string (stored with pointer to save storage) + string_t* string; + /// binary (stored with pointer to save storage) + binary_t* binary; + /// boolean + boolean_t boolean; + /// number (integer) + number_integer_t number_integer; + /// number (unsigned integer) + number_unsigned_t number_unsigned; + /// number (floating-point) + number_float_t number_float; + + /// default constructor (for null values) + json_value() = default; + /// constructor for booleans + json_value(boolean_t v) noexcept : boolean(v) {} + /// constructor for numbers (integer) + json_value(number_integer_t v) noexcept : number_integer(v) {} + /// constructor for numbers (unsigned) + json_value(number_unsigned_t v) noexcept : number_unsigned(v) {} + /// constructor for numbers (floating-point) + json_value(number_float_t v) noexcept : number_float(v) {} + /// constructor for empty values of a given type + json_value(value_t t) + { + switch (t) + { + case value_t::object: + { + object = create(); + break; + } + + case value_t::array: + { + array = create(); + break; + } + + case value_t::string: + { + string = create(""); + break; + } + + case value_t::binary: + { + binary = create(); + break; + } + + case value_t::boolean: + { + boolean = boolean_t(false); + break; + } + + case value_t::number_integer: + { + number_integer = number_integer_t(0); + break; + } + + case value_t::number_unsigned: + { + number_unsigned = number_unsigned_t(0); + break; + } + + case value_t::number_float: + { + number_float = number_float_t(0.0); + break; + } + + case value_t::null: + { + object = nullptr; // silence warning, see #821 + break; + } + + default: + { + object = nullptr; // silence warning, see #821 + if (JSON_HEDLEY_UNLIKELY(t == value_t::null)) + { + JSON_THROW(other_error::create(500, "961c151d2e87f2686a955a9be24d316f1362bf21 3.9.1")); // LCOV_EXCL_LINE + } + break; + } + } + } + + /// constructor for strings + json_value(const string_t& value) + { + string = create(value); + } + + /// constructor for rvalue strings + json_value(string_t&& value) + { + string = create(std::move(value)); + } + + /// constructor for objects + json_value(const object_t& value) + { + object = create(value); + } + + /// constructor for rvalue objects + json_value(object_t&& value) + { + object = create(std::move(value)); + } + + /// constructor for arrays + json_value(const array_t& value) + { + array = create(value); + } + + /// constructor for rvalue arrays + json_value(array_t&& value) + { + array = create(std::move(value)); + } + + /// constructor for binary arrays + json_value(const typename binary_t::container_type& value) + { + binary = create(value); + } + + /// constructor for rvalue binary arrays + json_value(typename binary_t::container_type&& value) + { + binary = create(std::move(value)); + } + + /// constructor for binary arrays (internal type) + json_value(const binary_t& value) + { + binary = create(value); + } + + /// constructor for rvalue binary arrays (internal type) + json_value(binary_t&& value) + { + binary = create(std::move(value)); + } + + void destroy(value_t t) noexcept + { + // flatten the current json_value to a heap-allocated stack + std::vector stack; + + // move the top-level items to stack + if (t == value_t::array) + { + stack.reserve(array->size()); + std::move(array->begin(), array->end(), std::back_inserter(stack)); + } + else if (t == value_t::object) + { + stack.reserve(object->size()); + for (auto&& it : *object) + { + stack.push_back(std::move(it.second)); + } + } + + while (!stack.empty()) + { + // move the last item to local variable to be processed + basic_json current_item(std::move(stack.back())); + stack.pop_back(); + + // if current_item is array/object, move + // its children to the stack to be processed later + if (current_item.is_array()) + { + std::move(current_item.m_value.array->begin(), current_item.m_value.array->end(), + std::back_inserter(stack)); + + current_item.m_value.array->clear(); + } + else if (current_item.is_object()) + { + for (auto&& it : *current_item.m_value.object) + { + stack.push_back(std::move(it.second)); + } + + current_item.m_value.object->clear(); + } + + // it's now safe that current_item get destructed + // since it doesn't have any children + } + + switch (t) + { + case value_t::object: + { + AllocatorType alloc; + std::allocator_traits::destroy(alloc, object); + std::allocator_traits::deallocate(alloc, object, 1); + break; + } + + case value_t::array: + { + AllocatorType alloc; + std::allocator_traits::destroy(alloc, array); + std::allocator_traits::deallocate(alloc, array, 1); + break; + } + + case value_t::string: + { + AllocatorType alloc; + std::allocator_traits::destroy(alloc, string); + std::allocator_traits::deallocate(alloc, string, 1); + break; + } + + case value_t::binary: + { + AllocatorType alloc; + std::allocator_traits::destroy(alloc, binary); + std::allocator_traits::deallocate(alloc, binary, 1); + break; + } + + default: + { + break; + } + } + } + }; + + /*! + @brief checks the class invariants + + This function asserts the class invariants. It needs to be called at the + end of every constructor to make sure that created objects respect the + invariant. Furthermore, it has to be called each time the type of a JSON + value is changed, because the invariant expresses a relationship between + @a m_type and @a m_value. + */ + void assert_invariant() const noexcept + { + JSON_ASSERT(m_type != value_t::object || m_value.object != nullptr); + JSON_ASSERT(m_type != value_t::array || m_value.array != nullptr); + JSON_ASSERT(m_type != value_t::string || m_value.string != nullptr); + JSON_ASSERT(m_type != value_t::binary || m_value.binary != nullptr); + } + + public: + ////////////////////////// + // JSON parser callback // + ////////////////////////// + + /*! + @brief parser event types + + The parser callback distinguishes the following events: + - `object_start`: the parser read `{` and started to process a JSON object + - `key`: the parser read a key of a value in an object + - `object_end`: the parser read `}` and finished processing a JSON object + - `array_start`: the parser read `[` and started to process a JSON array + - `array_end`: the parser read `]` and finished processing a JSON array + - `value`: the parser finished reading a JSON value + + @image html callback_events.png "Example when certain parse events are triggered" + + @sa @ref parser_callback_t for more information and examples + */ + using parse_event_t = detail::parse_event_t; + + /*! + @brief per-element parser callback type + + With a parser callback function, the result of parsing a JSON text can be + influenced. When passed to @ref parse, it is called on certain events + (passed as @ref parse_event_t via parameter @a event) with a set recursion + depth @a depth and context JSON value @a parsed. The return value of the + callback function is a boolean indicating whether the element that emitted + the callback shall be kept or not. + + We distinguish six scenarios (determined by the event type) in which the + callback function can be called. The following table describes the values + of the parameters @a depth, @a event, and @a parsed. + + parameter @a event | description | parameter @a depth | parameter @a parsed + ------------------ | ----------- | ------------------ | ------------------- + parse_event_t::object_start | the parser read `{` and started to process a JSON object | depth of the parent of the JSON object | a JSON value with type discarded + parse_event_t::key | the parser read a key of a value in an object | depth of the currently parsed JSON object | a JSON string containing the key + parse_event_t::object_end | the parser read `}` and finished processing a JSON object | depth of the parent of the JSON object | the parsed JSON object + parse_event_t::array_start | the parser read `[` and started to process a JSON array | depth of the parent of the JSON array | a JSON value with type discarded + parse_event_t::array_end | the parser read `]` and finished processing a JSON array | depth of the parent of the JSON array | the parsed JSON array + parse_event_t::value | the parser finished reading a JSON value | depth of the value | the parsed JSON value + + @image html callback_events.png "Example when certain parse events are triggered" + + Discarding a value (i.e., returning `false`) has different effects + depending on the context in which function was called: + + - Discarded values in structured types are skipped. That is, the parser + will behave as if the discarded value was never read. + - In case a value outside a structured type is skipped, it is replaced + with `null`. This case happens if the top-level element is skipped. + + @param[in] depth the depth of the recursion during parsing + + @param[in] event an event of type parse_event_t indicating the context in + the callback function has been called + + @param[in,out] parsed the current intermediate parse result; note that + writing to this value has no effect for parse_event_t::key events + + @return Whether the JSON value which called the function during parsing + should be kept (`true`) or not (`false`). In the latter case, it is either + skipped completely or replaced by an empty discarded object. + + @sa @ref parse for examples + + @since version 1.0.0 + */ + using parser_callback_t = detail::parser_callback_t; + + ////////////////// + // constructors // + ////////////////// + + /// @name constructors and destructors + /// Constructors of class @ref basic_json, copy/move constructor, copy + /// assignment, static functions creating objects, and the destructor. + /// @{ + + /*! + @brief create an empty value with a given type + + Create an empty JSON value with a given type. The value will be default + initialized with an empty value which depends on the type: + + Value type | initial value + ----------- | ------------- + null | `null` + boolean | `false` + string | `""` + number | `0` + object | `{}` + array | `[]` + binary | empty array + + @param[in] v the type of the value to create + + @complexity Constant. + + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes to any JSON value. + + @liveexample{The following code shows the constructor for different @ref + value_t values,basic_json__value_t} + + @sa @ref clear() -- restores the postcondition of this constructor + + @since version 1.0.0 + */ + basic_json(const value_t v) + : m_type(v), m_value(v) + { + assert_invariant(); + } + + /*! + @brief create a null object + + Create a `null` JSON value. It either takes a null pointer as parameter + (explicitly creating `null`) or no parameter (implicitly creating `null`). + The passed null pointer itself is not read -- it is only used to choose + the right constructor. + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this constructor never throws + exceptions. + + @liveexample{The following code shows the constructor with and without a + null pointer parameter.,basic_json__nullptr_t} + + @since version 1.0.0 + */ + basic_json(std::nullptr_t = nullptr) noexcept + : basic_json(value_t::null) + { + assert_invariant(); + } + + /*! + @brief create a JSON value + + This is a "catch all" constructor for all compatible JSON types; that is, + types for which a `to_json()` method exists. The constructor forwards the + parameter @a val to that method (to `json_serializer::to_json` method + with `U = uncvref_t`, to be exact). + + Template type @a CompatibleType includes, but is not limited to, the + following types: + - **arrays**: @ref array_t and all kinds of compatible containers such as + `std::vector`, `std::deque`, `std::list`, `std::forward_list`, + `std::array`, `std::valarray`, `std::set`, `std::unordered_set`, + `std::multiset`, and `std::unordered_multiset` with a `value_type` from + which a @ref basic_json value can be constructed. + - **objects**: @ref object_t and all kinds of compatible associative + containers such as `std::map`, `std::unordered_map`, `std::multimap`, + and `std::unordered_multimap` with a `key_type` compatible to + @ref string_t and a `value_type` from which a @ref basic_json value can + be constructed. + - **strings**: @ref string_t, string literals, and all compatible string + containers can be used. + - **numbers**: @ref number_integer_t, @ref number_unsigned_t, + @ref number_float_t, and all convertible number types such as `int`, + `size_t`, `int64_t`, `float` or `double` can be used. + - **boolean**: @ref boolean_t / `bool` can be used. + - **binary**: @ref binary_t / `std::vector` may be used, + unfortunately because string literals cannot be distinguished from binary + character arrays by the C++ type system, all types compatible with `const + char*` will be directed to the string constructor instead. This is both + for backwards compatibility, and due to the fact that a binary type is not + a standard JSON type. + + See the examples below. + + @tparam CompatibleType a type such that: + - @a CompatibleType is not derived from `std::istream`, + - @a CompatibleType is not @ref basic_json (to avoid hijacking copy/move + constructors), + - @a CompatibleType is not a different @ref basic_json type (i.e. with different template arguments) + - @a CompatibleType is not a @ref basic_json nested type (e.g., + @ref json_pointer, @ref iterator, etc ...) + - @ref @ref json_serializer has a + `to_json(basic_json_t&, CompatibleType&&)` method + + @tparam U = `uncvref_t` + + @param[in] val the value to be forwarded to the respective constructor + + @complexity Usually linear in the size of the passed @a val, also + depending on the implementation of the called `to_json()` + method. + + @exceptionsafety Depends on the called constructor. For types directly + supported by the library (i.e., all types for which no `to_json()` function + was provided), strong guarantee holds: if an exception is thrown, there are + no changes to any JSON value. + + @liveexample{The following code shows the constructor with several + compatible types.,basic_json__CompatibleType} + + @since version 2.1.0 + */ + template < typename CompatibleType, + typename U = detail::uncvref_t, + detail::enable_if_t < + !detail::is_basic_json::value && detail::is_compatible_type::value, int > = 0 > + basic_json(CompatibleType && val) noexcept(noexcept( + JSONSerializer::to_json(std::declval(), + std::forward(val)))) + { + JSONSerializer::to_json(*this, std::forward(val)); + assert_invariant(); + } + + /*! + @brief create a JSON value from an existing one + + This is a constructor for existing @ref basic_json types. + It does not hijack copy/move constructors, since the parameter has different + template arguments than the current ones. + + The constructor tries to convert the internal @ref m_value of the parameter. + + @tparam BasicJsonType a type such that: + - @a BasicJsonType is a @ref basic_json type. + - @a BasicJsonType has different template arguments than @ref basic_json_t. + + @param[in] val the @ref basic_json value to be converted. + + @complexity Usually linear in the size of the passed @a val, also + depending on the implementation of the called `to_json()` + method. + + @exceptionsafety Depends on the called constructor. For types directly + supported by the library (i.e., all types for which no `to_json()` function + was provided), strong guarantee holds: if an exception is thrown, there are + no changes to any JSON value. + + @since version 3.2.0 + */ + template < typename BasicJsonType, + detail::enable_if_t < + detail::is_basic_json::value&& !std::is_same::value, int > = 0 > + basic_json(const BasicJsonType& val) + { + using other_boolean_t = typename BasicJsonType::boolean_t; + using other_number_float_t = typename BasicJsonType::number_float_t; + using other_number_integer_t = typename BasicJsonType::number_integer_t; + using other_number_unsigned_t = typename BasicJsonType::number_unsigned_t; + using other_string_t = typename BasicJsonType::string_t; + using other_object_t = typename BasicJsonType::object_t; + using other_array_t = typename BasicJsonType::array_t; + using other_binary_t = typename BasicJsonType::binary_t; + + switch (val.type()) + { + case value_t::boolean: + JSONSerializer::to_json(*this, val.template get()); + break; + case value_t::number_float: + JSONSerializer::to_json(*this, val.template get()); + break; + case value_t::number_integer: + JSONSerializer::to_json(*this, val.template get()); + break; + case value_t::number_unsigned: + JSONSerializer::to_json(*this, val.template get()); + break; + case value_t::string: + JSONSerializer::to_json(*this, val.template get_ref()); + break; + case value_t::object: + JSONSerializer::to_json(*this, val.template get_ref()); + break; + case value_t::array: + JSONSerializer::to_json(*this, val.template get_ref()); + break; + case value_t::binary: + JSONSerializer::to_json(*this, val.template get_ref()); + break; + case value_t::null: + *this = nullptr; + break; + case value_t::discarded: + m_type = value_t::discarded; + break; + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // LCOV_EXCL_LINE + } + assert_invariant(); + } + + /*! + @brief create a container (array or object) from an initializer list + + Creates a JSON value of type array or object from the passed initializer + list @a init. In case @a type_deduction is `true` (default), the type of + the JSON value to be created is deducted from the initializer list @a init + according to the following rules: + + 1. If the list is empty, an empty JSON object value `{}` is created. + 2. If the list consists of pairs whose first element is a string, a JSON + object value is created where the first elements of the pairs are + treated as keys and the second elements are as values. + 3. In all other cases, an array is created. + + The rules aim to create the best fit between a C++ initializer list and + JSON values. The rationale is as follows: + + 1. The empty initializer list is written as `{}` which is exactly an empty + JSON object. + 2. C++ has no way of describing mapped types other than to list a list of + pairs. As JSON requires that keys must be of type string, rule 2 is the + weakest constraint one can pose on initializer lists to interpret them + as an object. + 3. In all other cases, the initializer list could not be interpreted as + JSON object type, so interpreting it as JSON array type is safe. + + With the rules described above, the following JSON values cannot be + expressed by an initializer list: + + - the empty array (`[]`): use @ref array(initializer_list_t) + with an empty initializer list in this case + - arrays whose elements satisfy rule 2: use @ref + array(initializer_list_t) with the same initializer list + in this case + + @note When used without parentheses around an empty initializer list, @ref + basic_json() is called instead of this function, yielding the JSON null + value. + + @param[in] init initializer list with JSON values + + @param[in] type_deduction internal parameter; when set to `true`, the type + of the JSON value is deducted from the initializer list @a init; when set + to `false`, the type provided via @a manual_type is forced. This mode is + used by the functions @ref array(initializer_list_t) and + @ref object(initializer_list_t). + + @param[in] manual_type internal parameter; when @a type_deduction is set + to `false`, the created JSON value will use the provided type (only @ref + value_t::array and @ref value_t::object are valid); when @a type_deduction + is set to `true`, this parameter has no effect + + @throw type_error.301 if @a type_deduction is `false`, @a manual_type is + `value_t::object`, but @a init contains an element which is not a pair + whose first element is a string. In this case, the constructor could not + create an object. If @a type_deduction would have be `true`, an array + would have been created. See @ref object(initializer_list_t) + for an example. + + @complexity Linear in the size of the initializer list @a init. + + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes to any JSON value. + + @liveexample{The example below shows how JSON values are created from + initializer lists.,basic_json__list_init_t} + + @sa @ref array(initializer_list_t) -- create a JSON array + value from an initializer list + @sa @ref object(initializer_list_t) -- create a JSON object + value from an initializer list + + @since version 1.0.0 + */ + basic_json(initializer_list_t init, + bool type_deduction = true, + value_t manual_type = value_t::array) + { + // check if each element is an array with two elements whose first + // element is a string + bool is_an_object = std::all_of(init.begin(), init.end(), + [](const detail::json_ref& element_ref) + { + return element_ref->is_array() && element_ref->size() == 2 && (*element_ref)[0].is_string(); + }); + + // adjust type if type deduction is not wanted + if (!type_deduction) + { + // if array is wanted, do not create an object though possible + if (manual_type == value_t::array) + { + is_an_object = false; + } + + // if object is wanted but impossible, throw an exception + if (JSON_HEDLEY_UNLIKELY(manual_type == value_t::object && !is_an_object)) + { + JSON_THROW(type_error::create(301, "cannot create object from initializer list")); + } + } + + if (is_an_object) + { + // the initializer list is a list of pairs -> create object + m_type = value_t::object; + m_value = value_t::object; + + std::for_each(init.begin(), init.end(), [this](const detail::json_ref& element_ref) + { + auto element = element_ref.moved_or_copied(); + m_value.object->emplace( + std::move(*((*element.m_value.array)[0].m_value.string)), + std::move((*element.m_value.array)[1])); + }); + } + else + { + // the initializer list describes an array -> create array + m_type = value_t::array; + m_value.array = create(init.begin(), init.end()); + } + + assert_invariant(); + } + + /*! + @brief explicitly create a binary array (without subtype) + + Creates a JSON binary array value from a given binary container. Binary + values are part of various binary formats, such as CBOR, MessagePack, and + BSON. This constructor is used to create a value for serialization to those + formats. + + @note Note, this function exists because of the difficulty in correctly + specifying the correct template overload in the standard value ctor, as both + JSON arrays and JSON binary arrays are backed with some form of a + `std::vector`. Because JSON binary arrays are a non-standard extension it + was decided that it would be best to prevent automatic initialization of a + binary array type, for backwards compatibility and so it does not happen on + accident. + + @param[in] init container containing bytes to use as binary type + + @return JSON binary array value + + @complexity Linear in the size of @a init. + + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes to any JSON value. + + @since version 3.8.0 + */ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json binary(const typename binary_t::container_type& init) + { + auto res = basic_json(); + res.m_type = value_t::binary; + res.m_value = init; + return res; + } + + /*! + @brief explicitly create a binary array (with subtype) + + Creates a JSON binary array value from a given binary container. Binary + values are part of various binary formats, such as CBOR, MessagePack, and + BSON. This constructor is used to create a value for serialization to those + formats. + + @note Note, this function exists because of the difficulty in correctly + specifying the correct template overload in the standard value ctor, as both + JSON arrays and JSON binary arrays are backed with some form of a + `std::vector`. Because JSON binary arrays are a non-standard extension it + was decided that it would be best to prevent automatic initialization of a + binary array type, for backwards compatibility and so it does not happen on + accident. + + @param[in] init container containing bytes to use as binary type + @param[in] subtype subtype to use in MessagePack and BSON + + @return JSON binary array value + + @complexity Linear in the size of @a init. + + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes to any JSON value. + + @since version 3.8.0 + */ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json binary(const typename binary_t::container_type& init, std::uint8_t subtype) + { + auto res = basic_json(); + res.m_type = value_t::binary; + res.m_value = binary_t(init, subtype); + return res; + } + + /// @copydoc binary(const typename binary_t::container_type&) + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json binary(typename binary_t::container_type&& init) + { + auto res = basic_json(); + res.m_type = value_t::binary; + res.m_value = std::move(init); + return res; + } + + /// @copydoc binary(const typename binary_t::container_type&, std::uint8_t) + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json binary(typename binary_t::container_type&& init, std::uint8_t subtype) + { + auto res = basic_json(); + res.m_type = value_t::binary; + res.m_value = binary_t(std::move(init), subtype); + return res; + } + + /*! + @brief explicitly create an array from an initializer list + + Creates a JSON array value from a given initializer list. That is, given a + list of values `a, b, c`, creates the JSON value `[a, b, c]`. If the + initializer list is empty, the empty array `[]` is created. + + @note This function is only needed to express two edge cases that cannot + be realized with the initializer list constructor (@ref + basic_json(initializer_list_t, bool, value_t)). These cases + are: + 1. creating an array whose elements are all pairs whose first element is a + string -- in this case, the initializer list constructor would create an + object, taking the first elements as keys + 2. creating an empty array -- passing the empty initializer list to the + initializer list constructor yields an empty object + + @param[in] init initializer list with JSON values to create an array from + (optional) + + @return JSON array value + + @complexity Linear in the size of @a init. + + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes to any JSON value. + + @liveexample{The following code shows an example for the `array` + function.,array} + + @sa @ref basic_json(initializer_list_t, bool, value_t) -- + create a JSON value from an initializer list + @sa @ref object(initializer_list_t) -- create a JSON object + value from an initializer list + + @since version 1.0.0 + */ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json array(initializer_list_t init = {}) + { + return basic_json(init, false, value_t::array); + } + + /*! + @brief explicitly create an object from an initializer list + + Creates a JSON object value from a given initializer list. The initializer + lists elements must be pairs, and their first elements must be strings. If + the initializer list is empty, the empty object `{}` is created. + + @note This function is only added for symmetry reasons. In contrast to the + related function @ref array(initializer_list_t), there are + no cases which can only be expressed by this function. That is, any + initializer list @a init can also be passed to the initializer list + constructor @ref basic_json(initializer_list_t, bool, value_t). + + @param[in] init initializer list to create an object from (optional) + + @return JSON object value + + @throw type_error.301 if @a init is not a list of pairs whose first + elements are strings. In this case, no object can be created. When such a + value is passed to @ref basic_json(initializer_list_t, bool, value_t), + an array would have been created from the passed initializer list @a init. + See example below. + + @complexity Linear in the size of @a init. + + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes to any JSON value. + + @liveexample{The following code shows an example for the `object` + function.,object} + + @sa @ref basic_json(initializer_list_t, bool, value_t) -- + create a JSON value from an initializer list + @sa @ref array(initializer_list_t) -- create a JSON array + value from an initializer list + + @since version 1.0.0 + */ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json object(initializer_list_t init = {}) + { + return basic_json(init, false, value_t::object); + } + + /*! + @brief construct an array with count copies of given value + + Constructs a JSON array value by creating @a cnt copies of a passed value. + In case @a cnt is `0`, an empty array is created. + + @param[in] cnt the number of JSON copies of @a val to create + @param[in] val the JSON value to copy + + @post `std::distance(begin(),end()) == cnt` holds. + + @complexity Linear in @a cnt. + + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes to any JSON value. + + @liveexample{The following code shows examples for the @ref + basic_json(size_type\, const basic_json&) + constructor.,basic_json__size_type_basic_json} + + @since version 1.0.0 + */ + basic_json(size_type cnt, const basic_json& val) + : m_type(value_t::array) + { + m_value.array = create(cnt, val); + assert_invariant(); + } + + /*! + @brief construct a JSON container given an iterator range + + Constructs the JSON value with the contents of the range `[first, last)`. + The semantics depends on the different types a JSON value can have: + - In case of a null type, invalid_iterator.206 is thrown. + - In case of other primitive types (number, boolean, or string), @a first + must be `begin()` and @a last must be `end()`. In this case, the value is + copied. Otherwise, invalid_iterator.204 is thrown. + - In case of structured types (array, object), the constructor behaves as + similar versions for `std::vector` or `std::map`; that is, a JSON array + or object is constructed from the values in the range. + + @tparam InputIT an input iterator type (@ref iterator or @ref + const_iterator) + + @param[in] first begin of the range to copy from (included) + @param[in] last end of the range to copy from (excluded) + + @pre Iterators @a first and @a last must be initialized. **This + precondition is enforced with an assertion (see warning).** If + assertions are switched off, a violation of this precondition yields + undefined behavior. + + @pre Range `[first, last)` is valid. Usually, this precondition cannot be + checked efficiently. Only certain edge cases are detected; see the + description of the exceptions below. A violation of this precondition + yields undefined behavior. + + @warning A precondition is enforced with a runtime assertion that will + result in calling `std::abort` if this precondition is not met. + Assertions can be disabled by defining `NDEBUG` at compile time. + See https://en.cppreference.com/w/cpp/error/assert for more + information. + + @throw invalid_iterator.201 if iterators @a first and @a last are not + compatible (i.e., do not belong to the same JSON value). In this case, + the range `[first, last)` is undefined. + @throw invalid_iterator.204 if iterators @a first and @a last belong to a + primitive type (number, boolean, or string), but @a first does not point + to the first element any more. In this case, the range `[first, last)` is + undefined. See example code below. + @throw invalid_iterator.206 if iterators @a first and @a last belong to a + null value. In this case, the range `[first, last)` is undefined. + + @complexity Linear in distance between @a first and @a last. + + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes to any JSON value. + + @liveexample{The example below shows several ways to create JSON values by + specifying a subrange with iterators.,basic_json__InputIt_InputIt} + + @since version 1.0.0 + */ + template < class InputIT, typename std::enable_if < + std::is_same::value || + std::is_same::value, int >::type = 0 > + basic_json(InputIT first, InputIT last) + { + JSON_ASSERT(first.m_object != nullptr); + JSON_ASSERT(last.m_object != nullptr); + + // make sure iterator fits the current value + if (JSON_HEDLEY_UNLIKELY(first.m_object != last.m_object)) + { + JSON_THROW(invalid_iterator::create(201, "iterators are not compatible")); + } + + // copy type from first iterator + m_type = first.m_object->m_type; + + // check if iterator range is complete for primitive values + switch (m_type) + { + case value_t::boolean: + case value_t::number_float: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::string: + { + if (JSON_HEDLEY_UNLIKELY(!first.m_it.primitive_iterator.is_begin() + || !last.m_it.primitive_iterator.is_end())) + { + JSON_THROW(invalid_iterator::create(204, "iterators out of range")); + } + break; + } + + default: + break; + } + + switch (m_type) + { + case value_t::number_integer: + { + m_value.number_integer = first.m_object->m_value.number_integer; + break; + } + + case value_t::number_unsigned: + { + m_value.number_unsigned = first.m_object->m_value.number_unsigned; + break; + } + + case value_t::number_float: + { + m_value.number_float = first.m_object->m_value.number_float; + break; + } + + case value_t::boolean: + { + m_value.boolean = first.m_object->m_value.boolean; + break; + } + + case value_t::string: + { + m_value = *first.m_object->m_value.string; + break; + } + + case value_t::object: + { + m_value.object = create(first.m_it.object_iterator, + last.m_it.object_iterator); + break; + } + + case value_t::array: + { + m_value.array = create(first.m_it.array_iterator, + last.m_it.array_iterator); + break; + } + + case value_t::binary: + { + m_value = *first.m_object->m_value.binary; + break; + } + + default: + JSON_THROW(invalid_iterator::create(206, "cannot construct with iterators from " + + std::string(first.m_object->type_name()))); + } + + assert_invariant(); + } + + + /////////////////////////////////////// + // other constructors and destructor // + /////////////////////////////////////// + + template, + std::is_same>::value, int> = 0 > + basic_json(const JsonRef& ref) : basic_json(ref.moved_or_copied()) {} + + /*! + @brief copy constructor + + Creates a copy of a given JSON value. + + @param[in] other the JSON value to copy + + @post `*this == other` + + @complexity Linear in the size of @a other. + + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes to any JSON value. + + @requirement This function helps `basic_json` satisfying the + [Container](https://en.cppreference.com/w/cpp/named_req/Container) + requirements: + - The complexity is linear. + - As postcondition, it holds: `other == basic_json(other)`. + + @liveexample{The following code shows an example for the copy + constructor.,basic_json__basic_json} + + @since version 1.0.0 + */ + basic_json(const basic_json& other) + : m_type(other.m_type) + { + // check of passed value is valid + other.assert_invariant(); + + switch (m_type) + { + case value_t::object: + { + m_value = *other.m_value.object; + break; + } + + case value_t::array: + { + m_value = *other.m_value.array; + break; + } + + case value_t::string: + { + m_value = *other.m_value.string; + break; + } + + case value_t::boolean: + { + m_value = other.m_value.boolean; + break; + } + + case value_t::number_integer: + { + m_value = other.m_value.number_integer; + break; + } + + case value_t::number_unsigned: + { + m_value = other.m_value.number_unsigned; + break; + } + + case value_t::number_float: + { + m_value = other.m_value.number_float; + break; + } + + case value_t::binary: + { + m_value = *other.m_value.binary; + break; + } + + default: + break; + } + + assert_invariant(); + } + + /*! + @brief move constructor + + Move constructor. Constructs a JSON value with the contents of the given + value @a other using move semantics. It "steals" the resources from @a + other and leaves it as JSON null value. + + @param[in,out] other value to move to this object + + @post `*this` has the same value as @a other before the call. + @post @a other is a JSON null value. + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this constructor never throws + exceptions. + + @requirement This function helps `basic_json` satisfying the + [MoveConstructible](https://en.cppreference.com/w/cpp/named_req/MoveConstructible) + requirements. + + @liveexample{The code below shows the move constructor explicitly called + via std::move.,basic_json__moveconstructor} + + @since version 1.0.0 + */ + basic_json(basic_json&& other) noexcept + : m_type(std::move(other.m_type)), + m_value(std::move(other.m_value)) + { + // check that passed value is valid + other.assert_invariant(); + + // invalidate payload + other.m_type = value_t::null; + other.m_value = {}; + + assert_invariant(); + } + + /*! + @brief copy assignment + + Copy assignment operator. Copies a JSON value via the "copy and swap" + strategy: It is expressed in terms of the copy constructor, destructor, + and the `swap()` member function. + + @param[in] other value to copy from + + @complexity Linear. + + @requirement This function helps `basic_json` satisfying the + [Container](https://en.cppreference.com/w/cpp/named_req/Container) + requirements: + - The complexity is linear. + + @liveexample{The code below shows and example for the copy assignment. It + creates a copy of value `a` which is then swapped with `b`. Finally\, the + copy of `a` (which is the null value after the swap) is + destroyed.,basic_json__copyassignment} + + @since version 1.0.0 + */ + basic_json& operator=(basic_json other) noexcept ( + std::is_nothrow_move_constructible::value&& + std::is_nothrow_move_assignable::value&& + std::is_nothrow_move_constructible::value&& + std::is_nothrow_move_assignable::value + ) + { + // check that passed value is valid + other.assert_invariant(); + + using std::swap; + swap(m_type, other.m_type); + swap(m_value, other.m_value); + + assert_invariant(); + return *this; + } + + /*! + @brief destructor + + Destroys the JSON value and frees all allocated memory. + + @complexity Linear. + + @requirement This function helps `basic_json` satisfying the + [Container](https://en.cppreference.com/w/cpp/named_req/Container) + requirements: + - The complexity is linear. + - All stored elements are destroyed and all memory is freed. + + @since version 1.0.0 + */ + ~basic_json() noexcept + { + assert_invariant(); + m_value.destroy(m_type); + } + + /// @} + + public: + /////////////////////// + // object inspection // + /////////////////////// + + /// @name object inspection + /// Functions to inspect the type of a JSON value. + /// @{ + + /*! + @brief serialization + + Serialization function for JSON values. The function tries to mimic + Python's `json.dumps()` function, and currently supports its @a indent + and @a ensure_ascii parameters. + + @param[in] indent If indent is nonnegative, then array elements and object + members will be pretty-printed with that indent level. An indent level of + `0` will only insert newlines. `-1` (the default) selects the most compact + representation. + @param[in] indent_char The character to use for indentation if @a indent is + greater than `0`. The default is ` ` (space). + @param[in] ensure_ascii If @a ensure_ascii is true, all non-ASCII characters + in the output are escaped with `\uXXXX` sequences, and the result consists + of ASCII characters only. + @param[in] error_handler how to react on decoding errors; there are three + possible values: `strict` (throws and exception in case a decoding error + occurs; default), `replace` (replace invalid UTF-8 sequences with U+FFFD), + and `ignore` (ignore invalid UTF-8 sequences during serialization; all + bytes are copied to the output unchanged). + + @return string containing the serialization of the JSON value + + @throw type_error.316 if a string stored inside the JSON value is not + UTF-8 encoded and @a error_handler is set to strict + + @note Binary values are serialized as object containing two keys: + - "bytes": an array of bytes as integers + - "subtype": the subtype as integer or "null" if the binary has no subtype + + @complexity Linear. + + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes in the JSON value. + + @liveexample{The following example shows the effect of different @a indent\, + @a indent_char\, and @a ensure_ascii parameters to the result of the + serialization.,dump} + + @see https://docs.python.org/2/library/json.html#json.dump + + @since version 1.0.0; indentation character @a indent_char, option + @a ensure_ascii and exceptions added in version 3.0.0; error + handlers added in version 3.4.0; serialization of binary values added + in version 3.8.0. + */ + string_t dump(const int indent = -1, + const char indent_char = ' ', + const bool ensure_ascii = false, + const error_handler_t error_handler = error_handler_t::strict) const + { + string_t result; + serializer s(detail::output_adapter(result), indent_char, error_handler); + + if (indent >= 0) + { + s.dump(*this, true, ensure_ascii, static_cast(indent)); + } + else + { + s.dump(*this, false, ensure_ascii, 0); + } + + return result; + } + + /*! + @brief return the type of the JSON value (explicit) + + Return the type of the JSON value as a value from the @ref value_t + enumeration. + + @return the type of the JSON value + Value type | return value + ------------------------- | ------------------------- + null | value_t::null + boolean | value_t::boolean + string | value_t::string + number (integer) | value_t::number_integer + number (unsigned integer) | value_t::number_unsigned + number (floating-point) | value_t::number_float + object | value_t::object + array | value_t::array + binary | value_t::binary + discarded | value_t::discarded + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this member function never throws + exceptions. + + @liveexample{The following code exemplifies `type()` for all JSON + types.,type} + + @sa @ref operator value_t() -- return the type of the JSON value (implicit) + @sa @ref type_name() -- return the type as string + + @since version 1.0.0 + */ + constexpr value_t type() const noexcept + { + return m_type; + } + + /*! + @brief return whether type is primitive + + This function returns true if and only if the JSON type is primitive + (string, number, boolean, or null). + + @return `true` if type is primitive (string, number, boolean, or null), + `false` otherwise. + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this member function never throws + exceptions. + + @liveexample{The following code exemplifies `is_primitive()` for all JSON + types.,is_primitive} + + @sa @ref is_structured() -- returns whether JSON value is structured + @sa @ref is_null() -- returns whether JSON value is `null` + @sa @ref is_string() -- returns whether JSON value is a string + @sa @ref is_boolean() -- returns whether JSON value is a boolean + @sa @ref is_number() -- returns whether JSON value is a number + @sa @ref is_binary() -- returns whether JSON value is a binary array + + @since version 1.0.0 + */ + constexpr bool is_primitive() const noexcept + { + return is_null() || is_string() || is_boolean() || is_number() || is_binary(); + } + + /*! + @brief return whether type is structured + + This function returns true if and only if the JSON type is structured + (array or object). + + @return `true` if type is structured (array or object), `false` otherwise. + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this member function never throws + exceptions. + + @liveexample{The following code exemplifies `is_structured()` for all JSON + types.,is_structured} + + @sa @ref is_primitive() -- returns whether value is primitive + @sa @ref is_array() -- returns whether value is an array + @sa @ref is_object() -- returns whether value is an object + + @since version 1.0.0 + */ + constexpr bool is_structured() const noexcept + { + return is_array() || is_object(); + } + + /*! + @brief return whether value is null + + This function returns true if and only if the JSON value is null. + + @return `true` if type is null, `false` otherwise. + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this member function never throws + exceptions. + + @liveexample{The following code exemplifies `is_null()` for all JSON + types.,is_null} + + @since version 1.0.0 + */ + constexpr bool is_null() const noexcept + { + return m_type == value_t::null; + } + + /*! + @brief return whether value is a boolean + + This function returns true if and only if the JSON value is a boolean. + + @return `true` if type is boolean, `false` otherwise. + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this member function never throws + exceptions. + + @liveexample{The following code exemplifies `is_boolean()` for all JSON + types.,is_boolean} + + @since version 1.0.0 + */ + constexpr bool is_boolean() const noexcept + { + return m_type == value_t::boolean; + } + + /*! + @brief return whether value is a number + + This function returns true if and only if the JSON value is a number. This + includes both integer (signed and unsigned) and floating-point values. + + @return `true` if type is number (regardless whether integer, unsigned + integer or floating-type), `false` otherwise. + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this member function never throws + exceptions. + + @liveexample{The following code exemplifies `is_number()` for all JSON + types.,is_number} + + @sa @ref is_number_integer() -- check if value is an integer or unsigned + integer number + @sa @ref is_number_unsigned() -- check if value is an unsigned integer + number + @sa @ref is_number_float() -- check if value is a floating-point number + + @since version 1.0.0 + */ + constexpr bool is_number() const noexcept + { + return is_number_integer() || is_number_float(); + } + + /*! + @brief return whether value is an integer number + + This function returns true if and only if the JSON value is a signed or + unsigned integer number. This excludes floating-point values. + + @return `true` if type is an integer or unsigned integer number, `false` + otherwise. + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this member function never throws + exceptions. + + @liveexample{The following code exemplifies `is_number_integer()` for all + JSON types.,is_number_integer} + + @sa @ref is_number() -- check if value is a number + @sa @ref is_number_unsigned() -- check if value is an unsigned integer + number + @sa @ref is_number_float() -- check if value is a floating-point number + + @since version 1.0.0 + */ + constexpr bool is_number_integer() const noexcept + { + return m_type == value_t::number_integer || m_type == value_t::number_unsigned; + } + + /*! + @brief return whether value is an unsigned integer number + + This function returns true if and only if the JSON value is an unsigned + integer number. This excludes floating-point and signed integer values. + + @return `true` if type is an unsigned integer number, `false` otherwise. + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this member function never throws + exceptions. + + @liveexample{The following code exemplifies `is_number_unsigned()` for all + JSON types.,is_number_unsigned} + + @sa @ref is_number() -- check if value is a number + @sa @ref is_number_integer() -- check if value is an integer or unsigned + integer number + @sa @ref is_number_float() -- check if value is a floating-point number + + @since version 2.0.0 + */ + constexpr bool is_number_unsigned() const noexcept + { + return m_type == value_t::number_unsigned; + } + + /*! + @brief return whether value is a floating-point number + + This function returns true if and only if the JSON value is a + floating-point number. This excludes signed and unsigned integer values. + + @return `true` if type is a floating-point number, `false` otherwise. + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this member function never throws + exceptions. + + @liveexample{The following code exemplifies `is_number_float()` for all + JSON types.,is_number_float} + + @sa @ref is_number() -- check if value is number + @sa @ref is_number_integer() -- check if value is an integer number + @sa @ref is_number_unsigned() -- check if value is an unsigned integer + number + + @since version 1.0.0 + */ + constexpr bool is_number_float() const noexcept + { + return m_type == value_t::number_float; + } + + /*! + @brief return whether value is an object + + This function returns true if and only if the JSON value is an object. + + @return `true` if type is object, `false` otherwise. + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this member function never throws + exceptions. + + @liveexample{The following code exemplifies `is_object()` for all JSON + types.,is_object} + + @since version 1.0.0 + */ + constexpr bool is_object() const noexcept + { + return m_type == value_t::object; + } + + /*! + @brief return whether value is an array + + This function returns true if and only if the JSON value is an array. + + @return `true` if type is array, `false` otherwise. + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this member function never throws + exceptions. + + @liveexample{The following code exemplifies `is_array()` for all JSON + types.,is_array} + + @since version 1.0.0 + */ + constexpr bool is_array() const noexcept + { + return m_type == value_t::array; + } + + /*! + @brief return whether value is a string + + This function returns true if and only if the JSON value is a string. + + @return `true` if type is string, `false` otherwise. + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this member function never throws + exceptions. + + @liveexample{The following code exemplifies `is_string()` for all JSON + types.,is_string} + + @since version 1.0.0 + */ + constexpr bool is_string() const noexcept + { + return m_type == value_t::string; + } + + /*! + @brief return whether value is a binary array + + This function returns true if and only if the JSON value is a binary array. + + @return `true` if type is binary array, `false` otherwise. + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this member function never throws + exceptions. + + @liveexample{The following code exemplifies `is_binary()` for all JSON + types.,is_binary} + + @since version 3.8.0 + */ + constexpr bool is_binary() const noexcept + { + return m_type == value_t::binary; + } + + /*! + @brief return whether value is discarded + + This function returns true if and only if the JSON value was discarded + during parsing with a callback function (see @ref parser_callback_t). + + @note This function will always be `false` for JSON values after parsing. + That is, discarded values can only occur during parsing, but will be + removed when inside a structured value or replaced by null in other cases. + + @return `true` if type is discarded, `false` otherwise. + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this member function never throws + exceptions. + + @liveexample{The following code exemplifies `is_discarded()` for all JSON + types.,is_discarded} + + @since version 1.0.0 + */ + constexpr bool is_discarded() const noexcept + { + return m_type == value_t::discarded; + } + + /*! + @brief return the type of the JSON value (implicit) + + Implicitly return the type of the JSON value as a value from the @ref + value_t enumeration. + + @return the type of the JSON value + + @complexity Constant. + + @exceptionsafety No-throw guarantee: this member function never throws + exceptions. + + @liveexample{The following code exemplifies the @ref value_t operator for + all JSON types.,operator__value_t} + + @sa @ref type() -- return the type of the JSON value (explicit) + @sa @ref type_name() -- return the type as string + + @since version 1.0.0 + */ + constexpr operator value_t() const noexcept + { + return m_type; + } + + /// @} + + private: + ////////////////// + // value access // + ////////////////// + + /// get a boolean (explicit) + boolean_t get_impl(boolean_t* /*unused*/) const + { + if (JSON_HEDLEY_LIKELY(is_boolean())) + { + return m_value.boolean; + } + + JSON_THROW(type_error::create(302, "type must be boolean, but is " + std::string(type_name()))); + } + + /// get a pointer to the value (object) + object_t* get_impl_ptr(object_t* /*unused*/) noexcept + { + return is_object() ? m_value.object : nullptr; + } + + /// get a pointer to the value (object) + constexpr const object_t* get_impl_ptr(const object_t* /*unused*/) const noexcept + { + return is_object() ? m_value.object : nullptr; + } + + /// get a pointer to the value (array) + array_t* get_impl_ptr(array_t* /*unused*/) noexcept + { + return is_array() ? m_value.array : nullptr; + } + + /// get a pointer to the value (array) + constexpr const array_t* get_impl_ptr(const array_t* /*unused*/) const noexcept + { + return is_array() ? m_value.array : nullptr; + } + + /// get a pointer to the value (string) + string_t* get_impl_ptr(string_t* /*unused*/) noexcept + { + return is_string() ? m_value.string : nullptr; + } + + /// get a pointer to the value (string) + constexpr const string_t* get_impl_ptr(const string_t* /*unused*/) const noexcept + { + return is_string() ? m_value.string : nullptr; + } + + /// get a pointer to the value (boolean) + boolean_t* get_impl_ptr(boolean_t* /*unused*/) noexcept + { + return is_boolean() ? &m_value.boolean : nullptr; + } + + /// get a pointer to the value (boolean) + constexpr const boolean_t* get_impl_ptr(const boolean_t* /*unused*/) const noexcept + { + return is_boolean() ? &m_value.boolean : nullptr; + } + + /// get a pointer to the value (integer number) + number_integer_t* get_impl_ptr(number_integer_t* /*unused*/) noexcept + { + return is_number_integer() ? &m_value.number_integer : nullptr; + } + + /// get a pointer to the value (integer number) + constexpr const number_integer_t* get_impl_ptr(const number_integer_t* /*unused*/) const noexcept + { + return is_number_integer() ? &m_value.number_integer : nullptr; + } + + /// get a pointer to the value (unsigned number) + number_unsigned_t* get_impl_ptr(number_unsigned_t* /*unused*/) noexcept + { + return is_number_unsigned() ? &m_value.number_unsigned : nullptr; + } + + /// get a pointer to the value (unsigned number) + constexpr const number_unsigned_t* get_impl_ptr(const number_unsigned_t* /*unused*/) const noexcept + { + return is_number_unsigned() ? &m_value.number_unsigned : nullptr; + } + + /// get a pointer to the value (floating-point number) + number_float_t* get_impl_ptr(number_float_t* /*unused*/) noexcept + { + return is_number_float() ? &m_value.number_float : nullptr; + } + + /// get a pointer to the value (floating-point number) + constexpr const number_float_t* get_impl_ptr(const number_float_t* /*unused*/) const noexcept + { + return is_number_float() ? &m_value.number_float : nullptr; + } + + /// get a pointer to the value (binary) + binary_t* get_impl_ptr(binary_t* /*unused*/) noexcept + { + return is_binary() ? m_value.binary : nullptr; + } + + /// get a pointer to the value (binary) + constexpr const binary_t* get_impl_ptr(const binary_t* /*unused*/) const noexcept + { + return is_binary() ? m_value.binary : nullptr; + } + + /*! + @brief helper function to implement get_ref() + + This function helps to implement get_ref() without code duplication for + const and non-const overloads + + @tparam ThisType will be deduced as `basic_json` or `const basic_json` + + @throw type_error.303 if ReferenceType does not match underlying value + type of the current JSON + */ + template + static ReferenceType get_ref_impl(ThisType& obj) + { + // delegate the call to get_ptr<>() + auto ptr = obj.template get_ptr::type>(); + + if (JSON_HEDLEY_LIKELY(ptr != nullptr)) + { + return *ptr; + } + + JSON_THROW(type_error::create(303, "incompatible ReferenceType for get_ref, actual type is " + std::string(obj.type_name()))); + } + + public: + /// @name value access + /// Direct access to the stored value of a JSON value. + /// @{ + + /*! + @brief get special-case overload + + This overloads avoids a lot of template boilerplate, it can be seen as the + identity method + + @tparam BasicJsonType == @ref basic_json + + @return a copy of *this + + @complexity Constant. + + @since version 2.1.0 + */ + template::type, basic_json_t>::value, + int> = 0> + basic_json get() const + { + return *this; + } + + /*! + @brief get special-case overload + + This overloads converts the current @ref basic_json in a different + @ref basic_json type + + @tparam BasicJsonType == @ref basic_json + + @return a copy of *this, converted into @tparam BasicJsonType + + @complexity Depending on the implementation of the called `from_json()` + method. + + @since version 3.2.0 + */ + template < typename BasicJsonType, detail::enable_if_t < + !std::is_same::value&& + detail::is_basic_json::value, int > = 0 > + BasicJsonType get() const + { + return *this; + } + + /*! + @brief get a value (explicit) + + Explicit type conversion between the JSON value and a compatible value + which is [CopyConstructible](https://en.cppreference.com/w/cpp/named_req/CopyConstructible) + and [DefaultConstructible](https://en.cppreference.com/w/cpp/named_req/DefaultConstructible). + The value is converted by calling the @ref json_serializer + `from_json()` method. + + The function is equivalent to executing + @code {.cpp} + ValueType ret; + JSONSerializer::from_json(*this, ret); + return ret; + @endcode + + This overloads is chosen if: + - @a ValueType is not @ref basic_json, + - @ref json_serializer has a `from_json()` method of the form + `void from_json(const basic_json&, ValueType&)`, and + - @ref json_serializer does not have a `from_json()` method of + the form `ValueType from_json(const basic_json&)` + + @tparam ValueTypeCV the provided value type + @tparam ValueType the returned value type + + @return copy of the JSON value, converted to @a ValueType + + @throw what @ref json_serializer `from_json()` method throws + + @liveexample{The example below shows several conversions from JSON values + to other types. There a few things to note: (1) Floating-point numbers can + be converted to integers\, (2) A JSON array can be converted to a standard + `std::vector`\, (3) A JSON object can be converted to C++ + associative containers such as `std::unordered_map`.,get__ValueType_const} + + @since version 2.1.0 + */ + template < typename ValueTypeCV, typename ValueType = detail::uncvref_t, + detail::enable_if_t < + !detail::is_basic_json::value && + detail::has_from_json::value && + !detail::has_non_default_from_json::value, + int > = 0 > + ValueType get() const noexcept(noexcept( + JSONSerializer::from_json(std::declval(), std::declval()))) + { + // we cannot static_assert on ValueTypeCV being non-const, because + // there is support for get(), which is why we + // still need the uncvref + static_assert(!std::is_reference::value, + "get() cannot be used with reference types, you might want to use get_ref()"); + static_assert(std::is_default_constructible::value, + "types must be DefaultConstructible when used with get()"); + + ValueType ret; + JSONSerializer::from_json(*this, ret); + return ret; + } + + /*! + @brief get a value (explicit); special case + + Explicit type conversion between the JSON value and a compatible value + which is **not** [CopyConstructible](https://en.cppreference.com/w/cpp/named_req/CopyConstructible) + and **not** [DefaultConstructible](https://en.cppreference.com/w/cpp/named_req/DefaultConstructible). + The value is converted by calling the @ref json_serializer + `from_json()` method. + + The function is equivalent to executing + @code {.cpp} + return JSONSerializer::from_json(*this); + @endcode + + This overloads is chosen if: + - @a ValueType is not @ref basic_json and + - @ref json_serializer has a `from_json()` method of the form + `ValueType from_json(const basic_json&)` + + @note If @ref json_serializer has both overloads of + `from_json()`, this one is chosen. + + @tparam ValueTypeCV the provided value type + @tparam ValueType the returned value type + + @return copy of the JSON value, converted to @a ValueType + + @throw what @ref json_serializer `from_json()` method throws + + @since version 2.1.0 + */ + template < typename ValueTypeCV, typename ValueType = detail::uncvref_t, + detail::enable_if_t < !std::is_same::value && + detail::has_non_default_from_json::value, + int > = 0 > + ValueType get() const noexcept(noexcept( + JSONSerializer::from_json(std::declval()))) + { + static_assert(!std::is_reference::value, + "get() cannot be used with reference types, you might want to use get_ref()"); + return JSONSerializer::from_json(*this); + } + + /*! + @brief get a value (explicit) + + Explicit type conversion between the JSON value and a compatible value. + The value is filled into the input parameter by calling the @ref json_serializer + `from_json()` method. + + The function is equivalent to executing + @code {.cpp} + ValueType v; + JSONSerializer::from_json(*this, v); + @endcode + + This overloads is chosen if: + - @a ValueType is not @ref basic_json, + - @ref json_serializer has a `from_json()` method of the form + `void from_json(const basic_json&, ValueType&)`, and + + @tparam ValueType the input parameter type. + + @return the input parameter, allowing chaining calls. + + @throw what @ref json_serializer `from_json()` method throws + + @liveexample{The example below shows several conversions from JSON values + to other types. There a few things to note: (1) Floating-point numbers can + be converted to integers\, (2) A JSON array can be converted to a standard + `std::vector`\, (3) A JSON object can be converted to C++ + associative containers such as `std::unordered_map`.,get_to} + + @since version 3.3.0 + */ + template < typename ValueType, + detail::enable_if_t < + !detail::is_basic_json::value&& + detail::has_from_json::value, + int > = 0 > + ValueType & get_to(ValueType& v) const noexcept(noexcept( + JSONSerializer::from_json(std::declval(), v))) + { + JSONSerializer::from_json(*this, v); + return v; + } + + // specialization to allow to call get_to with a basic_json value + // see https://github.com/nlohmann/json/issues/2175 + template::value, + int> = 0> + ValueType & get_to(ValueType& v) const + { + v = *this; + return v; + } + + template < + typename T, std::size_t N, + typename Array = T (&)[N], + detail::enable_if_t < + detail::has_from_json::value, int > = 0 > + Array get_to(T (&v)[N]) const + noexcept(noexcept(JSONSerializer::from_json( + std::declval(), v))) + { + JSONSerializer::from_json(*this, v); + return v; + } + + + /*! + @brief get a pointer value (implicit) + + Implicit pointer access to the internally stored JSON value. No copies are + made. + + @warning Writing data to the pointee of the result yields an undefined + state. + + @tparam PointerType pointer type; must be a pointer to @ref array_t, @ref + object_t, @ref string_t, @ref boolean_t, @ref number_integer_t, + @ref number_unsigned_t, or @ref number_float_t. Enforced by a static + assertion. + + @return pointer to the internally stored JSON value if the requested + pointer type @a PointerType fits to the JSON value; `nullptr` otherwise + + @complexity Constant. + + @liveexample{The example below shows how pointers to internal values of a + JSON value can be requested. Note that no type conversions are made and a + `nullptr` is returned if the value and the requested pointer type does not + match.,get_ptr} + + @since version 1.0.0 + */ + template::value, int>::type = 0> + auto get_ptr() noexcept -> decltype(std::declval().get_impl_ptr(std::declval())) + { + // delegate the call to get_impl_ptr<>() + return get_impl_ptr(static_cast(nullptr)); + } + + /*! + @brief get a pointer value (implicit) + @copydoc get_ptr() + */ + template < typename PointerType, typename std::enable_if < + std::is_pointer::value&& + std::is_const::type>::value, int >::type = 0 > + constexpr auto get_ptr() const noexcept -> decltype(std::declval().get_impl_ptr(std::declval())) + { + // delegate the call to get_impl_ptr<>() const + return get_impl_ptr(static_cast(nullptr)); + } + + /*! + @brief get a pointer value (explicit) + + Explicit pointer access to the internally stored JSON value. No copies are + made. + + @warning The pointer becomes invalid if the underlying JSON object + changes. + + @tparam PointerType pointer type; must be a pointer to @ref array_t, @ref + object_t, @ref string_t, @ref boolean_t, @ref number_integer_t, + @ref number_unsigned_t, or @ref number_float_t. + + @return pointer to the internally stored JSON value if the requested + pointer type @a PointerType fits to the JSON value; `nullptr` otherwise + + @complexity Constant. + + @liveexample{The example below shows how pointers to internal values of a + JSON value can be requested. Note that no type conversions are made and a + `nullptr` is returned if the value and the requested pointer type does not + match.,get__PointerType} + + @sa @ref get_ptr() for explicit pointer-member access + + @since version 1.0.0 + */ + template::value, int>::type = 0> + auto get() noexcept -> decltype(std::declval().template get_ptr()) + { + // delegate the call to get_ptr + return get_ptr(); + } + + /*! + @brief get a pointer value (explicit) + @copydoc get() + */ + template::value, int>::type = 0> + constexpr auto get() const noexcept -> decltype(std::declval().template get_ptr()) + { + // delegate the call to get_ptr + return get_ptr(); + } + + /*! + @brief get a reference value (implicit) + + Implicit reference access to the internally stored JSON value. No copies + are made. + + @warning Writing data to the referee of the result yields an undefined + state. + + @tparam ReferenceType reference type; must be a reference to @ref array_t, + @ref object_t, @ref string_t, @ref boolean_t, @ref number_integer_t, or + @ref number_float_t. Enforced by static assertion. + + @return reference to the internally stored JSON value if the requested + reference type @a ReferenceType fits to the JSON value; throws + type_error.303 otherwise + + @throw type_error.303 in case passed type @a ReferenceType is incompatible + with the stored JSON value; see example below + + @complexity Constant. + + @liveexample{The example shows several calls to `get_ref()`.,get_ref} + + @since version 1.1.0 + */ + template::value, int>::type = 0> + ReferenceType get_ref() + { + // delegate call to get_ref_impl + return get_ref_impl(*this); + } + + /*! + @brief get a reference value (implicit) + @copydoc get_ref() + */ + template < typename ReferenceType, typename std::enable_if < + std::is_reference::value&& + std::is_const::type>::value, int >::type = 0 > + ReferenceType get_ref() const + { + // delegate call to get_ref_impl + return get_ref_impl(*this); + } + + /*! + @brief get a value (implicit) + + Implicit type conversion between the JSON value and a compatible value. + The call is realized by calling @ref get() const. + + @tparam ValueType non-pointer type compatible to the JSON value, for + instance `int` for JSON integer numbers, `bool` for JSON booleans, or + `std::vector` types for JSON arrays. The character type of @ref string_t + as well as an initializer list of this type is excluded to avoid + ambiguities as these types implicitly convert to `std::string`. + + @return copy of the JSON value, converted to type @a ValueType + + @throw type_error.302 in case passed type @a ValueType is incompatible + to the JSON value type (e.g., the JSON value is of type boolean, but a + string is requested); see example below + + @complexity Linear in the size of the JSON value. + + @liveexample{The example below shows several conversions from JSON values + to other types. There a few things to note: (1) Floating-point numbers can + be converted to integers\, (2) A JSON array can be converted to a standard + `std::vector`\, (3) A JSON object can be converted to C++ + associative containers such as `std::unordered_map`.,operator__ValueType} + + @since version 1.0.0 + */ + template < typename ValueType, typename std::enable_if < + !std::is_pointer::value&& + !std::is_same>::value&& + !std::is_same::value&& + !detail::is_basic_json::value + && !std::is_same>::value +#if defined(JSON_HAS_CPP_17) && (defined(__GNUC__) || (defined(_MSC_VER) && _MSC_VER >= 1910 && _MSC_VER <= 1914)) + && !std::is_same::value +#endif + && detail::is_detected::value + , int >::type = 0 > + JSON_EXPLICIT operator ValueType() const + { + // delegate the call to get<>() const + return get(); + } + + /*! + @return reference to the binary value + + @throw type_error.302 if the value is not binary + + @sa @ref is_binary() to check if the value is binary + + @since version 3.8.0 + */ + binary_t& get_binary() + { + if (!is_binary()) + { + JSON_THROW(type_error::create(302, "type must be binary, but is " + std::string(type_name()))); + } + + return *get_ptr(); + } + + /// @copydoc get_binary() + const binary_t& get_binary() const + { + if (!is_binary()) + { + JSON_THROW(type_error::create(302, "type must be binary, but is " + std::string(type_name()))); + } + + return *get_ptr(); + } + + /// @} + + + //////////////////// + // element access // + //////////////////// + + /// @name element access + /// Access to the JSON value. + /// @{ + + /*! + @brief access specified array element with bounds checking + + Returns a reference to the element at specified location @a idx, with + bounds checking. + + @param[in] idx index of the element to access + + @return reference to the element at index @a idx + + @throw type_error.304 if the JSON value is not an array; in this case, + calling `at` with an index makes no sense. See example below. + @throw out_of_range.401 if the index @a idx is out of range of the array; + that is, `idx >= size()`. See example below. + + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes in the JSON value. + + @complexity Constant. + + @since version 1.0.0 + + @liveexample{The example below shows how array elements can be read and + written using `at()`. It also demonstrates the different exceptions that + can be thrown.,at__size_type} + */ + reference at(size_type idx) + { + // at only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + JSON_TRY + { + return m_value.array->at(idx); + } + JSON_CATCH (std::out_of_range&) + { + // create better exception explanation + JSON_THROW(out_of_range::create(401, "array index " + std::to_string(idx) + " is out of range")); + } + } + else + { + JSON_THROW(type_error::create(304, "cannot use at() with " + std::string(type_name()))); + } + } + + /*! + @brief access specified array element with bounds checking + + Returns a const reference to the element at specified location @a idx, + with bounds checking. + + @param[in] idx index of the element to access + + @return const reference to the element at index @a idx + + @throw type_error.304 if the JSON value is not an array; in this case, + calling `at` with an index makes no sense. See example below. + @throw out_of_range.401 if the index @a idx is out of range of the array; + that is, `idx >= size()`. See example below. + + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes in the JSON value. + + @complexity Constant. + + @since version 1.0.0 + + @liveexample{The example below shows how array elements can be read using + `at()`. It also demonstrates the different exceptions that can be thrown., + at__size_type_const} + */ + const_reference at(size_type idx) const + { + // at only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + JSON_TRY + { + return m_value.array->at(idx); + } + JSON_CATCH (std::out_of_range&) + { + // create better exception explanation + JSON_THROW(out_of_range::create(401, "array index " + std::to_string(idx) + " is out of range")); + } + } + else + { + JSON_THROW(type_error::create(304, "cannot use at() with " + std::string(type_name()))); + } + } + + /*! + @brief access specified object element with bounds checking + + Returns a reference to the element at with specified key @a key, with + bounds checking. + + @param[in] key key of the element to access + + @return reference to the element at key @a key + + @throw type_error.304 if the JSON value is not an object; in this case, + calling `at` with a key makes no sense. See example below. + @throw out_of_range.403 if the key @a key is is not stored in the object; + that is, `find(key) == end()`. See example below. + + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes in the JSON value. + + @complexity Logarithmic in the size of the container. + + @sa @ref operator[](const typename object_t::key_type&) for unchecked + access by reference + @sa @ref value() for access by value with a default value + + @since version 1.0.0 + + @liveexample{The example below shows how object elements can be read and + written using `at()`. It also demonstrates the different exceptions that + can be thrown.,at__object_t_key_type} + */ + reference at(const typename object_t::key_type& key) + { + // at only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + JSON_TRY + { + return m_value.object->at(key); + } + JSON_CATCH (std::out_of_range&) + { + // create better exception explanation + JSON_THROW(out_of_range::create(403, "key '" + key + "' not found")); + } + } + else + { + JSON_THROW(type_error::create(304, "cannot use at() with " + std::string(type_name()))); + } + } + + /*! + @brief access specified object element with bounds checking + + Returns a const reference to the element at with specified key @a key, + with bounds checking. + + @param[in] key key of the element to access + + @return const reference to the element at key @a key + + @throw type_error.304 if the JSON value is not an object; in this case, + calling `at` with a key makes no sense. See example below. + @throw out_of_range.403 if the key @a key is is not stored in the object; + that is, `find(key) == end()`. See example below. + + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes in the JSON value. + + @complexity Logarithmic in the size of the container. + + @sa @ref operator[](const typename object_t::key_type&) for unchecked + access by reference + @sa @ref value() for access by value with a default value + + @since version 1.0.0 + + @liveexample{The example below shows how object elements can be read using + `at()`. It also demonstrates the different exceptions that can be thrown., + at__object_t_key_type_const} + */ + const_reference at(const typename object_t::key_type& key) const + { + // at only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + JSON_TRY + { + return m_value.object->at(key); + } + JSON_CATCH (std::out_of_range&) + { + // create better exception explanation + JSON_THROW(out_of_range::create(403, "key '" + key + "' not found")); + } + } + else + { + JSON_THROW(type_error::create(304, "cannot use at() with " + std::string(type_name()))); + } + } + + /*! + @brief access specified array element + + Returns a reference to the element at specified location @a idx. + + @note If @a idx is beyond the range of the array (i.e., `idx >= size()`), + then the array is silently filled up with `null` values to make `idx` a + valid reference to the last stored element. + + @param[in] idx index of the element to access + + @return reference to the element at index @a idx + + @throw type_error.305 if the JSON value is not an array or null; in that + cases, using the [] operator with an index makes no sense. + + @complexity Constant if @a idx is in the range of the array. Otherwise + linear in `idx - size()`. + + @liveexample{The example below shows how array elements can be read and + written using `[]` operator. Note the addition of `null` + values.,operatorarray__size_type} + + @since version 1.0.0 + */ + reference operator[](size_type idx) + { + // implicitly convert null value to an empty array + if (is_null()) + { + m_type = value_t::array; + m_value.array = create(); + assert_invariant(); + } + + // operator[] only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + // fill up array with null values if given idx is outside range + if (idx >= m_value.array->size()) + { + m_value.array->insert(m_value.array->end(), + idx - m_value.array->size() + 1, + basic_json()); + } + + return m_value.array->operator[](idx); + } + + JSON_THROW(type_error::create(305, "cannot use operator[] with a numeric argument with " + std::string(type_name()))); + } + + /*! + @brief access specified array element + + Returns a const reference to the element at specified location @a idx. + + @param[in] idx index of the element to access + + @return const reference to the element at index @a idx + + @throw type_error.305 if the JSON value is not an array; in that case, + using the [] operator with an index makes no sense. + + @complexity Constant. + + @liveexample{The example below shows how array elements can be read using + the `[]` operator.,operatorarray__size_type_const} + + @since version 1.0.0 + */ + const_reference operator[](size_type idx) const + { + // const operator[] only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + return m_value.array->operator[](idx); + } + + JSON_THROW(type_error::create(305, "cannot use operator[] with a numeric argument with " + std::string(type_name()))); + } + + /*! + @brief access specified object element + + Returns a reference to the element at with specified key @a key. + + @note If @a key is not found in the object, then it is silently added to + the object and filled with a `null` value to make `key` a valid reference. + In case the value was `null` before, it is converted to an object. + + @param[in] key key of the element to access + + @return reference to the element at key @a key + + @throw type_error.305 if the JSON value is not an object or null; in that + cases, using the [] operator with a key makes no sense. + + @complexity Logarithmic in the size of the container. + + @liveexample{The example below shows how object elements can be read and + written using the `[]` operator.,operatorarray__key_type} + + @sa @ref at(const typename object_t::key_type&) for access by reference + with range checking + @sa @ref value() for access by value with a default value + + @since version 1.0.0 + */ + reference operator[](const typename object_t::key_type& key) + { + // implicitly convert null value to an empty object + if (is_null()) + { + m_type = value_t::object; + m_value.object = create(); + assert_invariant(); + } + + // operator[] only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + return m_value.object->operator[](key); + } + + JSON_THROW(type_error::create(305, "cannot use operator[] with a string argument with " + std::string(type_name()))); + } + + /*! + @brief read-only access specified object element + + Returns a const reference to the element at with specified key @a key. No + bounds checking is performed. + + @warning If the element with key @a key does not exist, the behavior is + undefined. + + @param[in] key key of the element to access + + @return const reference to the element at key @a key + + @pre The element with key @a key must exist. **This precondition is + enforced with an assertion.** + + @throw type_error.305 if the JSON value is not an object; in that case, + using the [] operator with a key makes no sense. + + @complexity Logarithmic in the size of the container. + + @liveexample{The example below shows how object elements can be read using + the `[]` operator.,operatorarray__key_type_const} + + @sa @ref at(const typename object_t::key_type&) for access by reference + with range checking + @sa @ref value() for access by value with a default value + + @since version 1.0.0 + */ + const_reference operator[](const typename object_t::key_type& key) const + { + // const operator[] only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + JSON_ASSERT(m_value.object->find(key) != m_value.object->end()); + return m_value.object->find(key)->second; + } + + JSON_THROW(type_error::create(305, "cannot use operator[] with a string argument with " + std::string(type_name()))); + } + + /*! + @brief access specified object element + + Returns a reference to the element at with specified key @a key. + + @note If @a key is not found in the object, then it is silently added to + the object and filled with a `null` value to make `key` a valid reference. + In case the value was `null` before, it is converted to an object. + + @param[in] key key of the element to access + + @return reference to the element at key @a key + + @throw type_error.305 if the JSON value is not an object or null; in that + cases, using the [] operator with a key makes no sense. + + @complexity Logarithmic in the size of the container. + + @liveexample{The example below shows how object elements can be read and + written using the `[]` operator.,operatorarray__key_type} + + @sa @ref at(const typename object_t::key_type&) for access by reference + with range checking + @sa @ref value() for access by value with a default value + + @since version 1.1.0 + */ + template + JSON_HEDLEY_NON_NULL(2) + reference operator[](T* key) + { + // implicitly convert null to object + if (is_null()) + { + m_type = value_t::object; + m_value = value_t::object; + assert_invariant(); + } + + // at only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + return m_value.object->operator[](key); + } + + JSON_THROW(type_error::create(305, "cannot use operator[] with a string argument with " + std::string(type_name()))); + } + + /*! + @brief read-only access specified object element + + Returns a const reference to the element at with specified key @a key. No + bounds checking is performed. + + @warning If the element with key @a key does not exist, the behavior is + undefined. + + @param[in] key key of the element to access + + @return const reference to the element at key @a key + + @pre The element with key @a key must exist. **This precondition is + enforced with an assertion.** + + @throw type_error.305 if the JSON value is not an object; in that case, + using the [] operator with a key makes no sense. + + @complexity Logarithmic in the size of the container. + + @liveexample{The example below shows how object elements can be read using + the `[]` operator.,operatorarray__key_type_const} + + @sa @ref at(const typename object_t::key_type&) for access by reference + with range checking + @sa @ref value() for access by value with a default value + + @since version 1.1.0 + */ + template + JSON_HEDLEY_NON_NULL(2) + const_reference operator[](T* key) const + { + // at only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + JSON_ASSERT(m_value.object->find(key) != m_value.object->end()); + return m_value.object->find(key)->second; + } + + JSON_THROW(type_error::create(305, "cannot use operator[] with a string argument with " + std::string(type_name()))); + } + + /*! + @brief access specified object element with default value + + Returns either a copy of an object's element at the specified key @a key + or a given default value if no element with key @a key exists. + + The function is basically equivalent to executing + @code {.cpp} + try { + return at(key); + } catch(out_of_range) { + return default_value; + } + @endcode + + @note Unlike @ref at(const typename object_t::key_type&), this function + does not throw if the given key @a key was not found. + + @note Unlike @ref operator[](const typename object_t::key_type& key), this + function does not implicitly add an element to the position defined by @a + key. This function is furthermore also applicable to const objects. + + @param[in] key key of the element to access + @param[in] default_value the value to return if @a key is not found + + @tparam ValueType type compatible to JSON values, for instance `int` for + JSON integer numbers, `bool` for JSON booleans, or `std::vector` types for + JSON arrays. Note the type of the expected value at @a key and the default + value @a default_value must be compatible. + + @return copy of the element at key @a key or @a default_value if @a key + is not found + + @throw type_error.302 if @a default_value does not match the type of the + value at @a key + @throw type_error.306 if the JSON value is not an object; in that case, + using `value()` with a key makes no sense. + + @complexity Logarithmic in the size of the container. + + @liveexample{The example below shows how object elements can be queried + with a default value.,basic_json__value} + + @sa @ref at(const typename object_t::key_type&) for access by reference + with range checking + @sa @ref operator[](const typename object_t::key_type&) for unchecked + access by reference + + @since version 1.0.0 + */ + // using std::is_convertible in a std::enable_if will fail when using explicit conversions + template < class ValueType, typename std::enable_if < + detail::is_getable::value + && !std::is_same::value, int >::type = 0 > + ValueType value(const typename object_t::key_type& key, const ValueType& default_value) const + { + // at only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + // if key is found, return value and given default value otherwise + const auto it = find(key); + if (it != end()) + { + return it->template get(); + } + + return default_value; + } + + JSON_THROW(type_error::create(306, "cannot use value() with " + std::string(type_name()))); + } + + /*! + @brief overload for a default value of type const char* + @copydoc basic_json::value(const typename object_t::key_type&, const ValueType&) const + */ + string_t value(const typename object_t::key_type& key, const char* default_value) const + { + return value(key, string_t(default_value)); + } + + /*! + @brief access specified object element via JSON Pointer with default value + + Returns either a copy of an object's element at the specified key @a key + or a given default value if no element with key @a key exists. + + The function is basically equivalent to executing + @code {.cpp} + try { + return at(ptr); + } catch(out_of_range) { + return default_value; + } + @endcode + + @note Unlike @ref at(const json_pointer&), this function does not throw + if the given key @a key was not found. + + @param[in] ptr a JSON pointer to the element to access + @param[in] default_value the value to return if @a ptr found no value + + @tparam ValueType type compatible to JSON values, for instance `int` for + JSON integer numbers, `bool` for JSON booleans, or `std::vector` types for + JSON arrays. Note the type of the expected value at @a key and the default + value @a default_value must be compatible. + + @return copy of the element at key @a key or @a default_value if @a key + is not found + + @throw type_error.302 if @a default_value does not match the type of the + value at @a ptr + @throw type_error.306 if the JSON value is not an object; in that case, + using `value()` with a key makes no sense. + + @complexity Logarithmic in the size of the container. + + @liveexample{The example below shows how object elements can be queried + with a default value.,basic_json__value_ptr} + + @sa @ref operator[](const json_pointer&) for unchecked access by reference + + @since version 2.0.2 + */ + template::value, int>::type = 0> + ValueType value(const json_pointer& ptr, const ValueType& default_value) const + { + // at only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + // if pointer resolves a value, return it or use default value + JSON_TRY + { + return ptr.get_checked(this).template get(); + } + JSON_INTERNAL_CATCH (out_of_range&) + { + return default_value; + } + } + + JSON_THROW(type_error::create(306, "cannot use value() with " + std::string(type_name()))); + } + + /*! + @brief overload for a default value of type const char* + @copydoc basic_json::value(const json_pointer&, ValueType) const + */ + JSON_HEDLEY_NON_NULL(3) + string_t value(const json_pointer& ptr, const char* default_value) const + { + return value(ptr, string_t(default_value)); + } + + /*! + @brief access the first element + + Returns a reference to the first element in the container. For a JSON + container `c`, the expression `c.front()` is equivalent to `*c.begin()`. + + @return In case of a structured type (array or object), a reference to the + first element is returned. In case of number, string, boolean, or binary + values, a reference to the value is returned. + + @complexity Constant. + + @pre The JSON value must not be `null` (would throw `std::out_of_range`) + or an empty array or object (undefined behavior, **guarded by + assertions**). + @post The JSON value remains unchanged. + + @throw invalid_iterator.214 when called on `null` value + + @liveexample{The following code shows an example for `front()`.,front} + + @sa @ref back() -- access the last element + + @since version 1.0.0 + */ + reference front() + { + return *begin(); + } + + /*! + @copydoc basic_json::front() + */ + const_reference front() const + { + return *cbegin(); + } + + /*! + @brief access the last element + + Returns a reference to the last element in the container. For a JSON + container `c`, the expression `c.back()` is equivalent to + @code {.cpp} + auto tmp = c.end(); + --tmp; + return *tmp; + @endcode + + @return In case of a structured type (array or object), a reference to the + last element is returned. In case of number, string, boolean, or binary + values, a reference to the value is returned. + + @complexity Constant. + + @pre The JSON value must not be `null` (would throw `std::out_of_range`) + or an empty array or object (undefined behavior, **guarded by + assertions**). + @post The JSON value remains unchanged. + + @throw invalid_iterator.214 when called on a `null` value. See example + below. + + @liveexample{The following code shows an example for `back()`.,back} + + @sa @ref front() -- access the first element + + @since version 1.0.0 + */ + reference back() + { + auto tmp = end(); + --tmp; + return *tmp; + } + + /*! + @copydoc basic_json::back() + */ + const_reference back() const + { + auto tmp = cend(); + --tmp; + return *tmp; + } + + /*! + @brief remove element given an iterator + + Removes the element specified by iterator @a pos. The iterator @a pos must + be valid and dereferenceable. Thus the `end()` iterator (which is valid, + but is not dereferenceable) cannot be used as a value for @a pos. + + If called on a primitive type other than `null`, the resulting JSON value + will be `null`. + + @param[in] pos iterator to the element to remove + @return Iterator following the last removed element. If the iterator @a + pos refers to the last element, the `end()` iterator is returned. + + @tparam IteratorType an @ref iterator or @ref const_iterator + + @post Invalidates iterators and references at or after the point of the + erase, including the `end()` iterator. + + @throw type_error.307 if called on a `null` value; example: `"cannot use + erase() with null"` + @throw invalid_iterator.202 if called on an iterator which does not belong + to the current JSON value; example: `"iterator does not fit current + value"` + @throw invalid_iterator.205 if called on a primitive type with invalid + iterator (i.e., any iterator which is not `begin()`); example: `"iterator + out of range"` + + @complexity The complexity depends on the type: + - objects: amortized constant + - arrays: linear in distance between @a pos and the end of the container + - strings and binary: linear in the length of the member + - other types: constant + + @liveexample{The example shows the result of `erase()` for different JSON + types.,erase__IteratorType} + + @sa @ref erase(IteratorType, IteratorType) -- removes the elements in + the given range + @sa @ref erase(const typename object_t::key_type&) -- removes the element + from an object at the given key + @sa @ref erase(const size_type) -- removes the element from an array at + the given index + + @since version 1.0.0 + */ + template < class IteratorType, typename std::enable_if < + std::is_same::value || + std::is_same::value, int >::type + = 0 > + IteratorType erase(IteratorType pos) + { + // make sure iterator fits the current value + if (JSON_HEDLEY_UNLIKELY(this != pos.m_object)) + { + JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value")); + } + + IteratorType result = end(); + + switch (m_type) + { + case value_t::boolean: + case value_t::number_float: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::string: + case value_t::binary: + { + if (JSON_HEDLEY_UNLIKELY(!pos.m_it.primitive_iterator.is_begin())) + { + JSON_THROW(invalid_iterator::create(205, "iterator out of range")); + } + + if (is_string()) + { + AllocatorType alloc; + std::allocator_traits::destroy(alloc, m_value.string); + std::allocator_traits::deallocate(alloc, m_value.string, 1); + m_value.string = nullptr; + } + else if (is_binary()) + { + AllocatorType alloc; + std::allocator_traits::destroy(alloc, m_value.binary); + std::allocator_traits::deallocate(alloc, m_value.binary, 1); + m_value.binary = nullptr; + } + + m_type = value_t::null; + assert_invariant(); + break; + } + + case value_t::object: + { + result.m_it.object_iterator = m_value.object->erase(pos.m_it.object_iterator); + break; + } + + case value_t::array: + { + result.m_it.array_iterator = m_value.array->erase(pos.m_it.array_iterator); + break; + } + + default: + JSON_THROW(type_error::create(307, "cannot use erase() with " + std::string(type_name()))); + } + + return result; + } + + /*! + @brief remove elements given an iterator range + + Removes the element specified by the range `[first; last)`. The iterator + @a first does not need to be dereferenceable if `first == last`: erasing + an empty range is a no-op. + + If called on a primitive type other than `null`, the resulting JSON value + will be `null`. + + @param[in] first iterator to the beginning of the range to remove + @param[in] last iterator past the end of the range to remove + @return Iterator following the last removed element. If the iterator @a + second refers to the last element, the `end()` iterator is returned. + + @tparam IteratorType an @ref iterator or @ref const_iterator + + @post Invalidates iterators and references at or after the point of the + erase, including the `end()` iterator. + + @throw type_error.307 if called on a `null` value; example: `"cannot use + erase() with null"` + @throw invalid_iterator.203 if called on iterators which does not belong + to the current JSON value; example: `"iterators do not fit current value"` + @throw invalid_iterator.204 if called on a primitive type with invalid + iterators (i.e., if `first != begin()` and `last != end()`); example: + `"iterators out of range"` + + @complexity The complexity depends on the type: + - objects: `log(size()) + std::distance(first, last)` + - arrays: linear in the distance between @a first and @a last, plus linear + in the distance between @a last and end of the container + - strings and binary: linear in the length of the member + - other types: constant + + @liveexample{The example shows the result of `erase()` for different JSON + types.,erase__IteratorType_IteratorType} + + @sa @ref erase(IteratorType) -- removes the element at a given position + @sa @ref erase(const typename object_t::key_type&) -- removes the element + from an object at the given key + @sa @ref erase(const size_type) -- removes the element from an array at + the given index + + @since version 1.0.0 + */ + template < class IteratorType, typename std::enable_if < + std::is_same::value || + std::is_same::value, int >::type + = 0 > + IteratorType erase(IteratorType first, IteratorType last) + { + // make sure iterator fits the current value + if (JSON_HEDLEY_UNLIKELY(this != first.m_object || this != last.m_object)) + { + JSON_THROW(invalid_iterator::create(203, "iterators do not fit current value")); + } + + IteratorType result = end(); + + switch (m_type) + { + case value_t::boolean: + case value_t::number_float: + case value_t::number_integer: + case value_t::number_unsigned: + case value_t::string: + case value_t::binary: + { + if (JSON_HEDLEY_LIKELY(!first.m_it.primitive_iterator.is_begin() + || !last.m_it.primitive_iterator.is_end())) + { + JSON_THROW(invalid_iterator::create(204, "iterators out of range")); + } + + if (is_string()) + { + AllocatorType alloc; + std::allocator_traits::destroy(alloc, m_value.string); + std::allocator_traits::deallocate(alloc, m_value.string, 1); + m_value.string = nullptr; + } + else if (is_binary()) + { + AllocatorType alloc; + std::allocator_traits::destroy(alloc, m_value.binary); + std::allocator_traits::deallocate(alloc, m_value.binary, 1); + m_value.binary = nullptr; + } + + m_type = value_t::null; + assert_invariant(); + break; + } + + case value_t::object: + { + result.m_it.object_iterator = m_value.object->erase(first.m_it.object_iterator, + last.m_it.object_iterator); + break; + } + + case value_t::array: + { + result.m_it.array_iterator = m_value.array->erase(first.m_it.array_iterator, + last.m_it.array_iterator); + break; + } + + default: + JSON_THROW(type_error::create(307, "cannot use erase() with " + std::string(type_name()))); + } + + return result; + } + + /*! + @brief remove element from a JSON object given a key + + Removes elements from a JSON object with the key value @a key. + + @param[in] key value of the elements to remove + + @return Number of elements removed. If @a ObjectType is the default + `std::map` type, the return value will always be `0` (@a key was not + found) or `1` (@a key was found). + + @post References and iterators to the erased elements are invalidated. + Other references and iterators are not affected. + + @throw type_error.307 when called on a type other than JSON object; + example: `"cannot use erase() with null"` + + @complexity `log(size()) + count(key)` + + @liveexample{The example shows the effect of `erase()`.,erase__key_type} + + @sa @ref erase(IteratorType) -- removes the element at a given position + @sa @ref erase(IteratorType, IteratorType) -- removes the elements in + the given range + @sa @ref erase(const size_type) -- removes the element from an array at + the given index + + @since version 1.0.0 + */ + size_type erase(const typename object_t::key_type& key) + { + // this erase only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + return m_value.object->erase(key); + } + + JSON_THROW(type_error::create(307, "cannot use erase() with " + std::string(type_name()))); + } + + /*! + @brief remove element from a JSON array given an index + + Removes element from a JSON array at the index @a idx. + + @param[in] idx index of the element to remove + + @throw type_error.307 when called on a type other than JSON object; + example: `"cannot use erase() with null"` + @throw out_of_range.401 when `idx >= size()`; example: `"array index 17 + is out of range"` + + @complexity Linear in distance between @a idx and the end of the container. + + @liveexample{The example shows the effect of `erase()`.,erase__size_type} + + @sa @ref erase(IteratorType) -- removes the element at a given position + @sa @ref erase(IteratorType, IteratorType) -- removes the elements in + the given range + @sa @ref erase(const typename object_t::key_type&) -- removes the element + from an object at the given key + + @since version 1.0.0 + */ + void erase(const size_type idx) + { + // this erase only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + if (JSON_HEDLEY_UNLIKELY(idx >= size())) + { + JSON_THROW(out_of_range::create(401, "array index " + std::to_string(idx) + " is out of range")); + } + + m_value.array->erase(m_value.array->begin() + static_cast(idx)); + } + else + { + JSON_THROW(type_error::create(307, "cannot use erase() with " + std::string(type_name()))); + } + } + + /// @} + + + //////////// + // lookup // + //////////// + + /// @name lookup + /// @{ + + /*! + @brief find an element in a JSON object + + Finds an element in a JSON object with key equivalent to @a key. If the + element is not found or the JSON value is not an object, end() is + returned. + + @note This method always returns @ref end() when executed on a JSON type + that is not an object. + + @param[in] key key value of the element to search for. + + @return Iterator to an element with key equivalent to @a key. If no such + element is found or the JSON value is not an object, past-the-end (see + @ref end()) iterator is returned. + + @complexity Logarithmic in the size of the JSON object. + + @liveexample{The example shows how `find()` is used.,find__key_type} + + @sa @ref contains(KeyT&&) const -- checks whether a key exists + + @since version 1.0.0 + */ + template + iterator find(KeyT&& key) + { + auto result = end(); + + if (is_object()) + { + result.m_it.object_iterator = m_value.object->find(std::forward(key)); + } + + return result; + } + + /*! + @brief find an element in a JSON object + @copydoc find(KeyT&&) + */ + template + const_iterator find(KeyT&& key) const + { + auto result = cend(); + + if (is_object()) + { + result.m_it.object_iterator = m_value.object->find(std::forward(key)); + } + + return result; + } + + /*! + @brief returns the number of occurrences of a key in a JSON object + + Returns the number of elements with key @a key. If ObjectType is the + default `std::map` type, the return value will always be `0` (@a key was + not found) or `1` (@a key was found). + + @note This method always returns `0` when executed on a JSON type that is + not an object. + + @param[in] key key value of the element to count + + @return Number of elements with key @a key. If the JSON value is not an + object, the return value will be `0`. + + @complexity Logarithmic in the size of the JSON object. + + @liveexample{The example shows how `count()` is used.,count} + + @since version 1.0.0 + */ + template + size_type count(KeyT&& key) const + { + // return 0 for all nonobject types + return is_object() ? m_value.object->count(std::forward(key)) : 0; + } + + /*! + @brief check the existence of an element in a JSON object + + Check whether an element exists in a JSON object with key equivalent to + @a key. If the element is not found or the JSON value is not an object, + false is returned. + + @note This method always returns false when executed on a JSON type + that is not an object. + + @param[in] key key value to check its existence. + + @return true if an element with specified @a key exists. If no such + element with such key is found or the JSON value is not an object, + false is returned. + + @complexity Logarithmic in the size of the JSON object. + + @liveexample{The following code shows an example for `contains()`.,contains} + + @sa @ref find(KeyT&&) -- returns an iterator to an object element + @sa @ref contains(const json_pointer&) const -- checks the existence for a JSON pointer + + @since version 3.6.0 + */ + template < typename KeyT, typename std::enable_if < + !std::is_same::type, json_pointer>::value, int >::type = 0 > + bool contains(KeyT && key) const + { + return is_object() && m_value.object->find(std::forward(key)) != m_value.object->end(); + } + + /*! + @brief check the existence of an element in a JSON object given a JSON pointer + + Check whether the given JSON pointer @a ptr can be resolved in the current + JSON value. + + @note This method can be executed on any JSON value type. + + @param[in] ptr JSON pointer to check its existence. + + @return true if the JSON pointer can be resolved to a stored value, false + otherwise. + + @post If `j.contains(ptr)` returns true, it is safe to call `j[ptr]`. + + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index was not a number + + @complexity Logarithmic in the size of the JSON object. + + @liveexample{The following code shows an example for `contains()`.,contains_json_pointer} + + @sa @ref contains(KeyT &&) const -- checks the existence of a key + + @since version 3.7.0 + */ + bool contains(const json_pointer& ptr) const + { + return ptr.contains(this); + } + + /// @} + + + /////////////// + // iterators // + /////////////// + + /// @name iterators + /// @{ + + /*! + @brief returns an iterator to the first element + + Returns an iterator to the first element. + + @image html range-begin-end.svg "Illustration from cppreference.com" + + @return iterator to the first element + + @complexity Constant. + + @requirement This function helps `basic_json` satisfying the + [Container](https://en.cppreference.com/w/cpp/named_req/Container) + requirements: + - The complexity is constant. + + @liveexample{The following code shows an example for `begin()`.,begin} + + @sa @ref cbegin() -- returns a const iterator to the beginning + @sa @ref end() -- returns an iterator to the end + @sa @ref cend() -- returns a const iterator to the end + + @since version 1.0.0 + */ + iterator begin() noexcept + { + iterator result(this); + result.set_begin(); + return result; + } + + /*! + @copydoc basic_json::cbegin() + */ + const_iterator begin() const noexcept + { + return cbegin(); + } + + /*! + @brief returns a const iterator to the first element + + Returns a const iterator to the first element. + + @image html range-begin-end.svg "Illustration from cppreference.com" + + @return const iterator to the first element + + @complexity Constant. + + @requirement This function helps `basic_json` satisfying the + [Container](https://en.cppreference.com/w/cpp/named_req/Container) + requirements: + - The complexity is constant. + - Has the semantics of `const_cast(*this).begin()`. + + @liveexample{The following code shows an example for `cbegin()`.,cbegin} + + @sa @ref begin() -- returns an iterator to the beginning + @sa @ref end() -- returns an iterator to the end + @sa @ref cend() -- returns a const iterator to the end + + @since version 1.0.0 + */ + const_iterator cbegin() const noexcept + { + const_iterator result(this); + result.set_begin(); + return result; + } + + /*! + @brief returns an iterator to one past the last element + + Returns an iterator to one past the last element. + + @image html range-begin-end.svg "Illustration from cppreference.com" + + @return iterator one past the last element + + @complexity Constant. + + @requirement This function helps `basic_json` satisfying the + [Container](https://en.cppreference.com/w/cpp/named_req/Container) + requirements: + - The complexity is constant. + + @liveexample{The following code shows an example for `end()`.,end} + + @sa @ref cend() -- returns a const iterator to the end + @sa @ref begin() -- returns an iterator to the beginning + @sa @ref cbegin() -- returns a const iterator to the beginning + + @since version 1.0.0 + */ + iterator end() noexcept + { + iterator result(this); + result.set_end(); + return result; + } + + /*! + @copydoc basic_json::cend() + */ + const_iterator end() const noexcept + { + return cend(); + } + + /*! + @brief returns a const iterator to one past the last element + + Returns a const iterator to one past the last element. + + @image html range-begin-end.svg "Illustration from cppreference.com" + + @return const iterator one past the last element + + @complexity Constant. + + @requirement This function helps `basic_json` satisfying the + [Container](https://en.cppreference.com/w/cpp/named_req/Container) + requirements: + - The complexity is constant. + - Has the semantics of `const_cast(*this).end()`. + + @liveexample{The following code shows an example for `cend()`.,cend} + + @sa @ref end() -- returns an iterator to the end + @sa @ref begin() -- returns an iterator to the beginning + @sa @ref cbegin() -- returns a const iterator to the beginning + + @since version 1.0.0 + */ + const_iterator cend() const noexcept + { + const_iterator result(this); + result.set_end(); + return result; + } + + /*! + @brief returns an iterator to the reverse-beginning + + Returns an iterator to the reverse-beginning; that is, the last element. + + @image html range-rbegin-rend.svg "Illustration from cppreference.com" + + @complexity Constant. + + @requirement This function helps `basic_json` satisfying the + [ReversibleContainer](https://en.cppreference.com/w/cpp/named_req/ReversibleContainer) + requirements: + - The complexity is constant. + - Has the semantics of `reverse_iterator(end())`. + + @liveexample{The following code shows an example for `rbegin()`.,rbegin} + + @sa @ref crbegin() -- returns a const reverse iterator to the beginning + @sa @ref rend() -- returns a reverse iterator to the end + @sa @ref crend() -- returns a const reverse iterator to the end + + @since version 1.0.0 + */ + reverse_iterator rbegin() noexcept + { + return reverse_iterator(end()); + } + + /*! + @copydoc basic_json::crbegin() + */ + const_reverse_iterator rbegin() const noexcept + { + return crbegin(); + } + + /*! + @brief returns an iterator to the reverse-end + + Returns an iterator to the reverse-end; that is, one before the first + element. + + @image html range-rbegin-rend.svg "Illustration from cppreference.com" + + @complexity Constant. + + @requirement This function helps `basic_json` satisfying the + [ReversibleContainer](https://en.cppreference.com/w/cpp/named_req/ReversibleContainer) + requirements: + - The complexity is constant. + - Has the semantics of `reverse_iterator(begin())`. + + @liveexample{The following code shows an example for `rend()`.,rend} + + @sa @ref crend() -- returns a const reverse iterator to the end + @sa @ref rbegin() -- returns a reverse iterator to the beginning + @sa @ref crbegin() -- returns a const reverse iterator to the beginning + + @since version 1.0.0 + */ + reverse_iterator rend() noexcept + { + return reverse_iterator(begin()); + } + + /*! + @copydoc basic_json::crend() + */ + const_reverse_iterator rend() const noexcept + { + return crend(); + } + + /*! + @brief returns a const reverse iterator to the last element + + Returns a const iterator to the reverse-beginning; that is, the last + element. + + @image html range-rbegin-rend.svg "Illustration from cppreference.com" + + @complexity Constant. + + @requirement This function helps `basic_json` satisfying the + [ReversibleContainer](https://en.cppreference.com/w/cpp/named_req/ReversibleContainer) + requirements: + - The complexity is constant. + - Has the semantics of `const_cast(*this).rbegin()`. + + @liveexample{The following code shows an example for `crbegin()`.,crbegin} + + @sa @ref rbegin() -- returns a reverse iterator to the beginning + @sa @ref rend() -- returns a reverse iterator to the end + @sa @ref crend() -- returns a const reverse iterator to the end + + @since version 1.0.0 + */ + const_reverse_iterator crbegin() const noexcept + { + return const_reverse_iterator(cend()); + } + + /*! + @brief returns a const reverse iterator to one before the first + + Returns a const reverse iterator to the reverse-end; that is, one before + the first element. + + @image html range-rbegin-rend.svg "Illustration from cppreference.com" + + @complexity Constant. + + @requirement This function helps `basic_json` satisfying the + [ReversibleContainer](https://en.cppreference.com/w/cpp/named_req/ReversibleContainer) + requirements: + - The complexity is constant. + - Has the semantics of `const_cast(*this).rend()`. + + @liveexample{The following code shows an example for `crend()`.,crend} + + @sa @ref rend() -- returns a reverse iterator to the end + @sa @ref rbegin() -- returns a reverse iterator to the beginning + @sa @ref crbegin() -- returns a const reverse iterator to the beginning + + @since version 1.0.0 + */ + const_reverse_iterator crend() const noexcept + { + return const_reverse_iterator(cbegin()); + } + + public: + /*! + @brief wrapper to access iterator member functions in range-based for + + This function allows to access @ref iterator::key() and @ref + iterator::value() during range-based for loops. In these loops, a + reference to the JSON values is returned, so there is no access to the + underlying iterator. + + For loop without iterator_wrapper: + + @code{cpp} + for (auto it = j_object.begin(); it != j_object.end(); ++it) + { + std::cout << "key: " << it.key() << ", value:" << it.value() << '\n'; + } + @endcode + + Range-based for loop without iterator proxy: + + @code{cpp} + for (auto it : j_object) + { + // "it" is of type json::reference and has no key() member + std::cout << "value: " << it << '\n'; + } + @endcode + + Range-based for loop with iterator proxy: + + @code{cpp} + for (auto it : json::iterator_wrapper(j_object)) + { + std::cout << "key: " << it.key() << ", value:" << it.value() << '\n'; + } + @endcode + + @note When iterating over an array, `key()` will return the index of the + element as string (see example). + + @param[in] ref reference to a JSON value + @return iteration proxy object wrapping @a ref with an interface to use in + range-based for loops + + @liveexample{The following code shows how the wrapper is used,iterator_wrapper} + + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes in the JSON value. + + @complexity Constant. + + @note The name of this function is not yet final and may change in the + future. + + @deprecated This stream operator is deprecated and will be removed in + future 4.0.0 of the library. Please use @ref items() instead; + that is, replace `json::iterator_wrapper(j)` with `j.items()`. + */ + JSON_HEDLEY_DEPRECATED_FOR(3.1.0, items()) + static iteration_proxy iterator_wrapper(reference ref) noexcept + { + return ref.items(); + } + + /*! + @copydoc iterator_wrapper(reference) + */ + JSON_HEDLEY_DEPRECATED_FOR(3.1.0, items()) + static iteration_proxy iterator_wrapper(const_reference ref) noexcept + { + return ref.items(); + } + + /*! + @brief helper to access iterator member functions in range-based for + + This function allows to access @ref iterator::key() and @ref + iterator::value() during range-based for loops. In these loops, a + reference to the JSON values is returned, so there is no access to the + underlying iterator. + + For loop without `items()` function: + + @code{cpp} + for (auto it = j_object.begin(); it != j_object.end(); ++it) + { + std::cout << "key: " << it.key() << ", value:" << it.value() << '\n'; + } + @endcode + + Range-based for loop without `items()` function: + + @code{cpp} + for (auto it : j_object) + { + // "it" is of type json::reference and has no key() member + std::cout << "value: " << it << '\n'; + } + @endcode + + Range-based for loop with `items()` function: + + @code{cpp} + for (auto& el : j_object.items()) + { + std::cout << "key: " << el.key() << ", value:" << el.value() << '\n'; + } + @endcode + + The `items()` function also allows to use + [structured bindings](https://en.cppreference.com/w/cpp/language/structured_binding) + (C++17): + + @code{cpp} + for (auto& [key, val] : j_object.items()) + { + std::cout << "key: " << key << ", value:" << val << '\n'; + } + @endcode + + @note When iterating over an array, `key()` will return the index of the + element as string (see example). For primitive types (e.g., numbers), + `key()` returns an empty string. + + @warning Using `items()` on temporary objects is dangerous. Make sure the + object's lifetime exeeds the iteration. See + for more + information. + + @return iteration proxy object wrapping @a ref with an interface to use in + range-based for loops + + @liveexample{The following code shows how the function is used.,items} + + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes in the JSON value. + + @complexity Constant. + + @since version 3.1.0, structured bindings support since 3.5.0. + */ + iteration_proxy items() noexcept + { + return iteration_proxy(*this); + } + + /*! + @copydoc items() + */ + iteration_proxy items() const noexcept + { + return iteration_proxy(*this); + } + + /// @} + + + ////////////// + // capacity // + ////////////// + + /// @name capacity + /// @{ + + /*! + @brief checks whether the container is empty. + + Checks if a JSON value has no elements (i.e. whether its @ref size is `0`). + + @return The return value depends on the different types and is + defined as follows: + Value type | return value + ----------- | ------------- + null | `true` + boolean | `false` + string | `false` + number | `false` + binary | `false` + object | result of function `object_t::empty()` + array | result of function `array_t::empty()` + + @liveexample{The following code uses `empty()` to check if a JSON + object contains any elements.,empty} + + @complexity Constant, as long as @ref array_t and @ref object_t satisfy + the Container concept; that is, their `empty()` functions have constant + complexity. + + @iterators No changes. + + @exceptionsafety No-throw guarantee: this function never throws exceptions. + + @note This function does not return whether a string stored as JSON value + is empty - it returns whether the JSON container itself is empty which is + false in the case of a string. + + @requirement This function helps `basic_json` satisfying the + [Container](https://en.cppreference.com/w/cpp/named_req/Container) + requirements: + - The complexity is constant. + - Has the semantics of `begin() == end()`. + + @sa @ref size() -- returns the number of elements + + @since version 1.0.0 + */ + bool empty() const noexcept + { + switch (m_type) + { + case value_t::null: + { + // null values are empty + return true; + } + + case value_t::array: + { + // delegate call to array_t::empty() + return m_value.array->empty(); + } + + case value_t::object: + { + // delegate call to object_t::empty() + return m_value.object->empty(); + } + + default: + { + // all other types are nonempty + return false; + } + } + } + + /*! + @brief returns the number of elements + + Returns the number of elements in a JSON value. + + @return The return value depends on the different types and is + defined as follows: + Value type | return value + ----------- | ------------- + null | `0` + boolean | `1` + string | `1` + number | `1` + binary | `1` + object | result of function object_t::size() + array | result of function array_t::size() + + @liveexample{The following code calls `size()` on the different value + types.,size} + + @complexity Constant, as long as @ref array_t and @ref object_t satisfy + the Container concept; that is, their size() functions have constant + complexity. + + @iterators No changes. + + @exceptionsafety No-throw guarantee: this function never throws exceptions. + + @note This function does not return the length of a string stored as JSON + value - it returns the number of elements in the JSON value which is 1 in + the case of a string. + + @requirement This function helps `basic_json` satisfying the + [Container](https://en.cppreference.com/w/cpp/named_req/Container) + requirements: + - The complexity is constant. + - Has the semantics of `std::distance(begin(), end())`. + + @sa @ref empty() -- checks whether the container is empty + @sa @ref max_size() -- returns the maximal number of elements + + @since version 1.0.0 + */ + size_type size() const noexcept + { + switch (m_type) + { + case value_t::null: + { + // null values are empty + return 0; + } + + case value_t::array: + { + // delegate call to array_t::size() + return m_value.array->size(); + } + + case value_t::object: + { + // delegate call to object_t::size() + return m_value.object->size(); + } + + default: + { + // all other types have size 1 + return 1; + } + } + } + + /*! + @brief returns the maximum possible number of elements + + Returns the maximum number of elements a JSON value is able to hold due to + system or library implementation limitations, i.e. `std::distance(begin(), + end())` for the JSON value. + + @return The return value depends on the different types and is + defined as follows: + Value type | return value + ----------- | ------------- + null | `0` (same as `size()`) + boolean | `1` (same as `size()`) + string | `1` (same as `size()`) + number | `1` (same as `size()`) + binary | `1` (same as `size()`) + object | result of function `object_t::max_size()` + array | result of function `array_t::max_size()` + + @liveexample{The following code calls `max_size()` on the different value + types. Note the output is implementation specific.,max_size} + + @complexity Constant, as long as @ref array_t and @ref object_t satisfy + the Container concept; that is, their `max_size()` functions have constant + complexity. + + @iterators No changes. + + @exceptionsafety No-throw guarantee: this function never throws exceptions. + + @requirement This function helps `basic_json` satisfying the + [Container](https://en.cppreference.com/w/cpp/named_req/Container) + requirements: + - The complexity is constant. + - Has the semantics of returning `b.size()` where `b` is the largest + possible JSON value. + + @sa @ref size() -- returns the number of elements + + @since version 1.0.0 + */ + size_type max_size() const noexcept + { + switch (m_type) + { + case value_t::array: + { + // delegate call to array_t::max_size() + return m_value.array->max_size(); + } + + case value_t::object: + { + // delegate call to object_t::max_size() + return m_value.object->max_size(); + } + + default: + { + // all other types have max_size() == size() + return size(); + } + } + } + + /// @} + + + /////////////// + // modifiers // + /////////////// + + /// @name modifiers + /// @{ + + /*! + @brief clears the contents + + Clears the content of a JSON value and resets it to the default value as + if @ref basic_json(value_t) would have been called with the current value + type from @ref type(): + + Value type | initial value + ----------- | ------------- + null | `null` + boolean | `false` + string | `""` + number | `0` + binary | An empty byte vector + object | `{}` + array | `[]` + + @post Has the same effect as calling + @code {.cpp} + *this = basic_json(type()); + @endcode + + @liveexample{The example below shows the effect of `clear()` to different + JSON types.,clear} + + @complexity Linear in the size of the JSON value. + + @iterators All iterators, pointers and references related to this container + are invalidated. + + @exceptionsafety No-throw guarantee: this function never throws exceptions. + + @sa @ref basic_json(value_t) -- constructor that creates an object with the + same value than calling `clear()` + + @since version 1.0.0 + */ + void clear() noexcept + { + switch (m_type) + { + case value_t::number_integer: + { + m_value.number_integer = 0; + break; + } + + case value_t::number_unsigned: + { + m_value.number_unsigned = 0; + break; + } + + case value_t::number_float: + { + m_value.number_float = 0.0; + break; + } + + case value_t::boolean: + { + m_value.boolean = false; + break; + } + + case value_t::string: + { + m_value.string->clear(); + break; + } + + case value_t::binary: + { + m_value.binary->clear(); + break; + } + + case value_t::array: + { + m_value.array->clear(); + break; + } + + case value_t::object: + { + m_value.object->clear(); + break; + } + + default: + break; + } + } + + /*! + @brief add an object to an array + + Appends the given element @a val to the end of the JSON value. If the + function is called on a JSON null value, an empty array is created before + appending @a val. + + @param[in] val the value to add to the JSON array + + @throw type_error.308 when called on a type other than JSON array or + null; example: `"cannot use push_back() with number"` + + @complexity Amortized constant. + + @liveexample{The example shows how `push_back()` and `+=` can be used to + add elements to a JSON array. Note how the `null` value was silently + converted to a JSON array.,push_back} + + @since version 1.0.0 + */ + void push_back(basic_json&& val) + { + // push_back only works for null objects or arrays + if (JSON_HEDLEY_UNLIKELY(!(is_null() || is_array()))) + { + JSON_THROW(type_error::create(308, "cannot use push_back() with " + std::string(type_name()))); + } + + // transform null object into an array + if (is_null()) + { + m_type = value_t::array; + m_value = value_t::array; + assert_invariant(); + } + + // add element to array (move semantics) + m_value.array->push_back(std::move(val)); + // if val is moved from, basic_json move constructor marks it null so we do not call the destructor + } + + /*! + @brief add an object to an array + @copydoc push_back(basic_json&&) + */ + reference operator+=(basic_json&& val) + { + push_back(std::move(val)); + return *this; + } + + /*! + @brief add an object to an array + @copydoc push_back(basic_json&&) + */ + void push_back(const basic_json& val) + { + // push_back only works for null objects or arrays + if (JSON_HEDLEY_UNLIKELY(!(is_null() || is_array()))) + { + JSON_THROW(type_error::create(308, "cannot use push_back() with " + std::string(type_name()))); + } + + // transform null object into an array + if (is_null()) + { + m_type = value_t::array; + m_value = value_t::array; + assert_invariant(); + } + + // add element to array + m_value.array->push_back(val); + } + + /*! + @brief add an object to an array + @copydoc push_back(basic_json&&) + */ + reference operator+=(const basic_json& val) + { + push_back(val); + return *this; + } + + /*! + @brief add an object to an object + + Inserts the given element @a val to the JSON object. If the function is + called on a JSON null value, an empty object is created before inserting + @a val. + + @param[in] val the value to add to the JSON object + + @throw type_error.308 when called on a type other than JSON object or + null; example: `"cannot use push_back() with number"` + + @complexity Logarithmic in the size of the container, O(log(`size()`)). + + @liveexample{The example shows how `push_back()` and `+=` can be used to + add elements to a JSON object. Note how the `null` value was silently + converted to a JSON object.,push_back__object_t__value} + + @since version 1.0.0 + */ + void push_back(const typename object_t::value_type& val) + { + // push_back only works for null objects or objects + if (JSON_HEDLEY_UNLIKELY(!(is_null() || is_object()))) + { + JSON_THROW(type_error::create(308, "cannot use push_back() with " + std::string(type_name()))); + } + + // transform null object into an object + if (is_null()) + { + m_type = value_t::object; + m_value = value_t::object; + assert_invariant(); + } + + // add element to array + m_value.object->insert(val); + } + + /*! + @brief add an object to an object + @copydoc push_back(const typename object_t::value_type&) + */ + reference operator+=(const typename object_t::value_type& val) + { + push_back(val); + return *this; + } + + /*! + @brief add an object to an object + + This function allows to use `push_back` with an initializer list. In case + + 1. the current value is an object, + 2. the initializer list @a init contains only two elements, and + 3. the first element of @a init is a string, + + @a init is converted into an object element and added using + @ref push_back(const typename object_t::value_type&). Otherwise, @a init + is converted to a JSON value and added using @ref push_back(basic_json&&). + + @param[in] init an initializer list + + @complexity Linear in the size of the initializer list @a init. + + @note This function is required to resolve an ambiguous overload error, + because pairs like `{"key", "value"}` can be both interpreted as + `object_t::value_type` or `std::initializer_list`, see + https://github.com/nlohmann/json/issues/235 for more information. + + @liveexample{The example shows how initializer lists are treated as + objects when possible.,push_back__initializer_list} + */ + void push_back(initializer_list_t init) + { + if (is_object() && init.size() == 2 && (*init.begin())->is_string()) + { + basic_json&& key = init.begin()->moved_or_copied(); + push_back(typename object_t::value_type( + std::move(key.get_ref()), (init.begin() + 1)->moved_or_copied())); + } + else + { + push_back(basic_json(init)); + } + } + + /*! + @brief add an object to an object + @copydoc push_back(initializer_list_t) + */ + reference operator+=(initializer_list_t init) + { + push_back(init); + return *this; + } + + /*! + @brief add an object to an array + + Creates a JSON value from the passed parameters @a args to the end of the + JSON value. If the function is called on a JSON null value, an empty array + is created before appending the value created from @a args. + + @param[in] args arguments to forward to a constructor of @ref basic_json + @tparam Args compatible types to create a @ref basic_json object + + @return reference to the inserted element + + @throw type_error.311 when called on a type other than JSON array or + null; example: `"cannot use emplace_back() with number"` + + @complexity Amortized constant. + + @liveexample{The example shows how `push_back()` can be used to add + elements to a JSON array. Note how the `null` value was silently converted + to a JSON array.,emplace_back} + + @since version 2.0.8, returns reference since 3.7.0 + */ + template + reference emplace_back(Args&& ... args) + { + // emplace_back only works for null objects or arrays + if (JSON_HEDLEY_UNLIKELY(!(is_null() || is_array()))) + { + JSON_THROW(type_error::create(311, "cannot use emplace_back() with " + std::string(type_name()))); + } + + // transform null object into an array + if (is_null()) + { + m_type = value_t::array; + m_value = value_t::array; + assert_invariant(); + } + + // add element to array (perfect forwarding) +#ifdef JSON_HAS_CPP_17 + return m_value.array->emplace_back(std::forward(args)...); +#else + m_value.array->emplace_back(std::forward(args)...); + return m_value.array->back(); +#endif + } + + /*! + @brief add an object to an object if key does not exist + + Inserts a new element into a JSON object constructed in-place with the + given @a args if there is no element with the key in the container. If the + function is called on a JSON null value, an empty object is created before + appending the value created from @a args. + + @param[in] args arguments to forward to a constructor of @ref basic_json + @tparam Args compatible types to create a @ref basic_json object + + @return a pair consisting of an iterator to the inserted element, or the + already-existing element if no insertion happened, and a bool + denoting whether the insertion took place. + + @throw type_error.311 when called on a type other than JSON object or + null; example: `"cannot use emplace() with number"` + + @complexity Logarithmic in the size of the container, O(log(`size()`)). + + @liveexample{The example shows how `emplace()` can be used to add elements + to a JSON object. Note how the `null` value was silently converted to a + JSON object. Further note how no value is added if there was already one + value stored with the same key.,emplace} + + @since version 2.0.8 + */ + template + std::pair emplace(Args&& ... args) + { + // emplace only works for null objects or arrays + if (JSON_HEDLEY_UNLIKELY(!(is_null() || is_object()))) + { + JSON_THROW(type_error::create(311, "cannot use emplace() with " + std::string(type_name()))); + } + + // transform null object into an object + if (is_null()) + { + m_type = value_t::object; + m_value = value_t::object; + assert_invariant(); + } + + // add element to array (perfect forwarding) + auto res = m_value.object->emplace(std::forward(args)...); + // create result iterator and set iterator to the result of emplace + auto it = begin(); + it.m_it.object_iterator = res.first; + + // return pair of iterator and boolean + return {it, res.second}; + } + + /// Helper for insertion of an iterator + /// @note: This uses std::distance to support GCC 4.8, + /// see https://github.com/nlohmann/json/pull/1257 + template + iterator insert_iterator(const_iterator pos, Args&& ... args) + { + iterator result(this); + JSON_ASSERT(m_value.array != nullptr); + + auto insert_pos = std::distance(m_value.array->begin(), pos.m_it.array_iterator); + m_value.array->insert(pos.m_it.array_iterator, std::forward(args)...); + result.m_it.array_iterator = m_value.array->begin() + insert_pos; + + // This could have been written as: + // result.m_it.array_iterator = m_value.array->insert(pos.m_it.array_iterator, cnt, val); + // but the return value of insert is missing in GCC 4.8, so it is written this way instead. + + return result; + } + + /*! + @brief inserts element + + Inserts element @a val before iterator @a pos. + + @param[in] pos iterator before which the content will be inserted; may be + the end() iterator + @param[in] val element to insert + @return iterator pointing to the inserted @a val. + + @throw type_error.309 if called on JSON values other than arrays; + example: `"cannot use insert() with string"` + @throw invalid_iterator.202 if @a pos is not an iterator of *this; + example: `"iterator does not fit current value"` + + @complexity Constant plus linear in the distance between @a pos and end of + the container. + + @liveexample{The example shows how `insert()` is used.,insert} + + @since version 1.0.0 + */ + iterator insert(const_iterator pos, const basic_json& val) + { + // insert only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + // check if iterator pos fits to this JSON value + if (JSON_HEDLEY_UNLIKELY(pos.m_object != this)) + { + JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value")); + } + + // insert to array and return iterator + return insert_iterator(pos, val); + } + + JSON_THROW(type_error::create(309, "cannot use insert() with " + std::string(type_name()))); + } + + /*! + @brief inserts element + @copydoc insert(const_iterator, const basic_json&) + */ + iterator insert(const_iterator pos, basic_json&& val) + { + return insert(pos, val); + } + + /*! + @brief inserts elements + + Inserts @a cnt copies of @a val before iterator @a pos. + + @param[in] pos iterator before which the content will be inserted; may be + the end() iterator + @param[in] cnt number of copies of @a val to insert + @param[in] val element to insert + @return iterator pointing to the first element inserted, or @a pos if + `cnt==0` + + @throw type_error.309 if called on JSON values other than arrays; example: + `"cannot use insert() with string"` + @throw invalid_iterator.202 if @a pos is not an iterator of *this; + example: `"iterator does not fit current value"` + + @complexity Linear in @a cnt plus linear in the distance between @a pos + and end of the container. + + @liveexample{The example shows how `insert()` is used.,insert__count} + + @since version 1.0.0 + */ + iterator insert(const_iterator pos, size_type cnt, const basic_json& val) + { + // insert only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + // check if iterator pos fits to this JSON value + if (JSON_HEDLEY_UNLIKELY(pos.m_object != this)) + { + JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value")); + } + + // insert to array and return iterator + return insert_iterator(pos, cnt, val); + } + + JSON_THROW(type_error::create(309, "cannot use insert() with " + std::string(type_name()))); + } + + /*! + @brief inserts elements + + Inserts elements from range `[first, last)` before iterator @a pos. + + @param[in] pos iterator before which the content will be inserted; may be + the end() iterator + @param[in] first begin of the range of elements to insert + @param[in] last end of the range of elements to insert + + @throw type_error.309 if called on JSON values other than arrays; example: + `"cannot use insert() with string"` + @throw invalid_iterator.202 if @a pos is not an iterator of *this; + example: `"iterator does not fit current value"` + @throw invalid_iterator.210 if @a first and @a last do not belong to the + same JSON value; example: `"iterators do not fit"` + @throw invalid_iterator.211 if @a first or @a last are iterators into + container for which insert is called; example: `"passed iterators may not + belong to container"` + + @return iterator pointing to the first element inserted, or @a pos if + `first==last` + + @complexity Linear in `std::distance(first, last)` plus linear in the + distance between @a pos and end of the container. + + @liveexample{The example shows how `insert()` is used.,insert__range} + + @since version 1.0.0 + */ + iterator insert(const_iterator pos, const_iterator first, const_iterator last) + { + // insert only works for arrays + if (JSON_HEDLEY_UNLIKELY(!is_array())) + { + JSON_THROW(type_error::create(309, "cannot use insert() with " + std::string(type_name()))); + } + + // check if iterator pos fits to this JSON value + if (JSON_HEDLEY_UNLIKELY(pos.m_object != this)) + { + JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value")); + } + + // check if range iterators belong to the same JSON object + if (JSON_HEDLEY_UNLIKELY(first.m_object != last.m_object)) + { + JSON_THROW(invalid_iterator::create(210, "iterators do not fit")); + } + + if (JSON_HEDLEY_UNLIKELY(first.m_object == this)) + { + JSON_THROW(invalid_iterator::create(211, "passed iterators may not belong to container")); + } + + // insert to array and return iterator + return insert_iterator(pos, first.m_it.array_iterator, last.m_it.array_iterator); + } + + /*! + @brief inserts elements + + Inserts elements from initializer list @a ilist before iterator @a pos. + + @param[in] pos iterator before which the content will be inserted; may be + the end() iterator + @param[in] ilist initializer list to insert the values from + + @throw type_error.309 if called on JSON values other than arrays; example: + `"cannot use insert() with string"` + @throw invalid_iterator.202 if @a pos is not an iterator of *this; + example: `"iterator does not fit current value"` + + @return iterator pointing to the first element inserted, or @a pos if + `ilist` is empty + + @complexity Linear in `ilist.size()` plus linear in the distance between + @a pos and end of the container. + + @liveexample{The example shows how `insert()` is used.,insert__ilist} + + @since version 1.0.0 + */ + iterator insert(const_iterator pos, initializer_list_t ilist) + { + // insert only works for arrays + if (JSON_HEDLEY_UNLIKELY(!is_array())) + { + JSON_THROW(type_error::create(309, "cannot use insert() with " + std::string(type_name()))); + } + + // check if iterator pos fits to this JSON value + if (JSON_HEDLEY_UNLIKELY(pos.m_object != this)) + { + JSON_THROW(invalid_iterator::create(202, "iterator does not fit current value")); + } + + // insert to array and return iterator + return insert_iterator(pos, ilist.begin(), ilist.end()); + } + + /*! + @brief inserts elements + + Inserts elements from range `[first, last)`. + + @param[in] first begin of the range of elements to insert + @param[in] last end of the range of elements to insert + + @throw type_error.309 if called on JSON values other than objects; example: + `"cannot use insert() with string"` + @throw invalid_iterator.202 if iterator @a first or @a last does does not + point to an object; example: `"iterators first and last must point to + objects"` + @throw invalid_iterator.210 if @a first and @a last do not belong to the + same JSON value; example: `"iterators do not fit"` + + @complexity Logarithmic: `O(N*log(size() + N))`, where `N` is the number + of elements to insert. + + @liveexample{The example shows how `insert()` is used.,insert__range_object} + + @since version 3.0.0 + */ + void insert(const_iterator first, const_iterator last) + { + // insert only works for objects + if (JSON_HEDLEY_UNLIKELY(!is_object())) + { + JSON_THROW(type_error::create(309, "cannot use insert() with " + std::string(type_name()))); + } + + // check if range iterators belong to the same JSON object + if (JSON_HEDLEY_UNLIKELY(first.m_object != last.m_object)) + { + JSON_THROW(invalid_iterator::create(210, "iterators do not fit")); + } + + // passed iterators must belong to objects + if (JSON_HEDLEY_UNLIKELY(!first.m_object->is_object())) + { + JSON_THROW(invalid_iterator::create(202, "iterators first and last must point to objects")); + } + + m_value.object->insert(first.m_it.object_iterator, last.m_it.object_iterator); + } + + /*! + @brief updates a JSON object from another object, overwriting existing keys + + Inserts all values from JSON object @a j and overwrites existing keys. + + @param[in] j JSON object to read values from + + @throw type_error.312 if called on JSON values other than objects; example: + `"cannot use update() with string"` + + @complexity O(N*log(size() + N)), where N is the number of elements to + insert. + + @liveexample{The example shows how `update()` is used.,update} + + @sa https://docs.python.org/3.6/library/stdtypes.html#dict.update + + @since version 3.0.0 + */ + void update(const_reference j) + { + // implicitly convert null value to an empty object + if (is_null()) + { + m_type = value_t::object; + m_value.object = create(); + assert_invariant(); + } + + if (JSON_HEDLEY_UNLIKELY(!is_object())) + { + JSON_THROW(type_error::create(312, "cannot use update() with " + std::string(type_name()))); + } + if (JSON_HEDLEY_UNLIKELY(!j.is_object())) + { + JSON_THROW(type_error::create(312, "cannot use update() with " + std::string(j.type_name()))); + } + + for (auto it = j.cbegin(); it != j.cend(); ++it) + { + m_value.object->operator[](it.key()) = it.value(); + } + } + + /*! + @brief updates a JSON object from another object, overwriting existing keys + + Inserts all values from from range `[first, last)` and overwrites existing + keys. + + @param[in] first begin of the range of elements to insert + @param[in] last end of the range of elements to insert + + @throw type_error.312 if called on JSON values other than objects; example: + `"cannot use update() with string"` + @throw invalid_iterator.202 if iterator @a first or @a last does does not + point to an object; example: `"iterators first and last must point to + objects"` + @throw invalid_iterator.210 if @a first and @a last do not belong to the + same JSON value; example: `"iterators do not fit"` + + @complexity O(N*log(size() + N)), where N is the number of elements to + insert. + + @liveexample{The example shows how `update()` is used__range.,update} + + @sa https://docs.python.org/3.6/library/stdtypes.html#dict.update + + @since version 3.0.0 + */ + void update(const_iterator first, const_iterator last) + { + // implicitly convert null value to an empty object + if (is_null()) + { + m_type = value_t::object; + m_value.object = create(); + assert_invariant(); + } + + if (JSON_HEDLEY_UNLIKELY(!is_object())) + { + JSON_THROW(type_error::create(312, "cannot use update() with " + std::string(type_name()))); + } + + // check if range iterators belong to the same JSON object + if (JSON_HEDLEY_UNLIKELY(first.m_object != last.m_object)) + { + JSON_THROW(invalid_iterator::create(210, "iterators do not fit")); + } + + // passed iterators must belong to objects + if (JSON_HEDLEY_UNLIKELY(!first.m_object->is_object() + || !last.m_object->is_object())) + { + JSON_THROW(invalid_iterator::create(202, "iterators first and last must point to objects")); + } + + for (auto it = first; it != last; ++it) + { + m_value.object->operator[](it.key()) = it.value(); + } + } + + /*! + @brief exchanges the values + + Exchanges the contents of the JSON value with those of @a other. Does not + invoke any move, copy, or swap operations on individual elements. All + iterators and references remain valid. The past-the-end iterator is + invalidated. + + @param[in,out] other JSON value to exchange the contents with + + @complexity Constant. + + @liveexample{The example below shows how JSON values can be swapped with + `swap()`.,swap__reference} + + @since version 1.0.0 + */ + void swap(reference other) noexcept ( + std::is_nothrow_move_constructible::value&& + std::is_nothrow_move_assignable::value&& + std::is_nothrow_move_constructible::value&& + std::is_nothrow_move_assignable::value + ) + { + std::swap(m_type, other.m_type); + std::swap(m_value, other.m_value); + assert_invariant(); + } + + /*! + @brief exchanges the values + + Exchanges the contents of the JSON value from @a left with those of @a right. Does not + invoke any move, copy, or swap operations on individual elements. All + iterators and references remain valid. The past-the-end iterator is + invalidated. implemented as a friend function callable via ADL. + + @param[in,out] left JSON value to exchange the contents with + @param[in,out] right JSON value to exchange the contents with + + @complexity Constant. + + @liveexample{The example below shows how JSON values can be swapped with + `swap()`.,swap__reference} + + @since version 1.0.0 + */ + friend void swap(reference left, reference right) noexcept ( + std::is_nothrow_move_constructible::value&& + std::is_nothrow_move_assignable::value&& + std::is_nothrow_move_constructible::value&& + std::is_nothrow_move_assignable::value + ) + { + left.swap(right); + } + + /*! + @brief exchanges the values + + Exchanges the contents of a JSON array with those of @a other. Does not + invoke any move, copy, or swap operations on individual elements. All + iterators and references remain valid. The past-the-end iterator is + invalidated. + + @param[in,out] other array to exchange the contents with + + @throw type_error.310 when JSON value is not an array; example: `"cannot + use swap() with string"` + + @complexity Constant. + + @liveexample{The example below shows how arrays can be swapped with + `swap()`.,swap__array_t} + + @since version 1.0.0 + */ + void swap(array_t& other) + { + // swap only works for arrays + if (JSON_HEDLEY_LIKELY(is_array())) + { + std::swap(*(m_value.array), other); + } + else + { + JSON_THROW(type_error::create(310, "cannot use swap() with " + std::string(type_name()))); + } + } + + /*! + @brief exchanges the values + + Exchanges the contents of a JSON object with those of @a other. Does not + invoke any move, copy, or swap operations on individual elements. All + iterators and references remain valid. The past-the-end iterator is + invalidated. + + @param[in,out] other object to exchange the contents with + + @throw type_error.310 when JSON value is not an object; example: + `"cannot use swap() with string"` + + @complexity Constant. + + @liveexample{The example below shows how objects can be swapped with + `swap()`.,swap__object_t} + + @since version 1.0.0 + */ + void swap(object_t& other) + { + // swap only works for objects + if (JSON_HEDLEY_LIKELY(is_object())) + { + std::swap(*(m_value.object), other); + } + else + { + JSON_THROW(type_error::create(310, "cannot use swap() with " + std::string(type_name()))); + } + } + + /*! + @brief exchanges the values + + Exchanges the contents of a JSON string with those of @a other. Does not + invoke any move, copy, or swap operations on individual elements. All + iterators and references remain valid. The past-the-end iterator is + invalidated. + + @param[in,out] other string to exchange the contents with + + @throw type_error.310 when JSON value is not a string; example: `"cannot + use swap() with boolean"` + + @complexity Constant. + + @liveexample{The example below shows how strings can be swapped with + `swap()`.,swap__string_t} + + @since version 1.0.0 + */ + void swap(string_t& other) + { + // swap only works for strings + if (JSON_HEDLEY_LIKELY(is_string())) + { + std::swap(*(m_value.string), other); + } + else + { + JSON_THROW(type_error::create(310, "cannot use swap() with " + std::string(type_name()))); + } + } + + /*! + @brief exchanges the values + + Exchanges the contents of a JSON string with those of @a other. Does not + invoke any move, copy, or swap operations on individual elements. All + iterators and references remain valid. The past-the-end iterator is + invalidated. + + @param[in,out] other binary to exchange the contents with + + @throw type_error.310 when JSON value is not a string; example: `"cannot + use swap() with boolean"` + + @complexity Constant. + + @liveexample{The example below shows how strings can be swapped with + `swap()`.,swap__binary_t} + + @since version 3.8.0 + */ + void swap(binary_t& other) + { + // swap only works for strings + if (JSON_HEDLEY_LIKELY(is_binary())) + { + std::swap(*(m_value.binary), other); + } + else + { + JSON_THROW(type_error::create(310, "cannot use swap() with " + std::string(type_name()))); + } + } + + /// @copydoc swap(binary_t) + void swap(typename binary_t::container_type& other) + { + // swap only works for strings + if (JSON_HEDLEY_LIKELY(is_binary())) + { + std::swap(*(m_value.binary), other); + } + else + { + JSON_THROW(type_error::create(310, "cannot use swap() with " + std::string(type_name()))); + } + } + + /// @} + + public: + ////////////////////////////////////////// + // lexicographical comparison operators // + ////////////////////////////////////////// + + /// @name lexicographical comparison operators + /// @{ + + /*! + @brief comparison: equal + + Compares two JSON values for equality according to the following rules: + - Two JSON values are equal if (1) they are from the same type and (2) + their stored values are the same according to their respective + `operator==`. + - Integer and floating-point numbers are automatically converted before + comparison. Note that two NaN values are always treated as unequal. + - Two JSON null values are equal. + + @note Floating-point inside JSON values numbers are compared with + `json::number_float_t::operator==` which is `double::operator==` by + default. To compare floating-point while respecting an epsilon, an alternative + [comparison function](https://github.com/mariokonrad/marnav/blob/master/include/marnav/math/floatingpoint.hpp#L34-#L39) + could be used, for instance + @code {.cpp} + template::value, T>::type> + inline bool is_same(T a, T b, T epsilon = std::numeric_limits::epsilon()) noexcept + { + return std::abs(a - b) <= epsilon; + } + @endcode + Or you can self-defined operator equal function like this: + @code {.cpp} + bool my_equal(const_reference lhs, const_reference rhs) { + const auto lhs_type lhs.type(); + const auto rhs_type rhs.type(); + if (lhs_type == rhs_type) { + switch(lhs_type) + // self_defined case + case value_t::number_float: + return std::abs(lhs - rhs) <= std::numeric_limits::epsilon(); + // other cases remain the same with the original + ... + } + ... + } + @endcode + + @note NaN values never compare equal to themselves or to other NaN values. + + @param[in] lhs first JSON value to consider + @param[in] rhs second JSON value to consider + @return whether the values @a lhs and @a rhs are equal + + @exceptionsafety No-throw guarantee: this function never throws exceptions. + + @complexity Linear. + + @liveexample{The example demonstrates comparing several JSON + types.,operator__equal} + + @since version 1.0.0 + */ + friend bool operator==(const_reference lhs, const_reference rhs) noexcept + { + const auto lhs_type = lhs.type(); + const auto rhs_type = rhs.type(); + + if (lhs_type == rhs_type) + { + switch (lhs_type) + { + case value_t::array: + return *lhs.m_value.array == *rhs.m_value.array; + + case value_t::object: + return *lhs.m_value.object == *rhs.m_value.object; + + case value_t::null: + return true; + + case value_t::string: + return *lhs.m_value.string == *rhs.m_value.string; + + case value_t::boolean: + return lhs.m_value.boolean == rhs.m_value.boolean; + + case value_t::number_integer: + return lhs.m_value.number_integer == rhs.m_value.number_integer; + + case value_t::number_unsigned: + return lhs.m_value.number_unsigned == rhs.m_value.number_unsigned; + + case value_t::number_float: + return lhs.m_value.number_float == rhs.m_value.number_float; + + case value_t::binary: + return *lhs.m_value.binary == *rhs.m_value.binary; + + default: + return false; + } + } + else if (lhs_type == value_t::number_integer && rhs_type == value_t::number_float) + { + return static_cast(lhs.m_value.number_integer) == rhs.m_value.number_float; + } + else if (lhs_type == value_t::number_float && rhs_type == value_t::number_integer) + { + return lhs.m_value.number_float == static_cast(rhs.m_value.number_integer); + } + else if (lhs_type == value_t::number_unsigned && rhs_type == value_t::number_float) + { + return static_cast(lhs.m_value.number_unsigned) == rhs.m_value.number_float; + } + else if (lhs_type == value_t::number_float && rhs_type == value_t::number_unsigned) + { + return lhs.m_value.number_float == static_cast(rhs.m_value.number_unsigned); + } + else if (lhs_type == value_t::number_unsigned && rhs_type == value_t::number_integer) + { + return static_cast(lhs.m_value.number_unsigned) == rhs.m_value.number_integer; + } + else if (lhs_type == value_t::number_integer && rhs_type == value_t::number_unsigned) + { + return lhs.m_value.number_integer == static_cast(rhs.m_value.number_unsigned); + } + + return false; + } + + /*! + @brief comparison: equal + @copydoc operator==(const_reference, const_reference) + */ + template::value, int>::type = 0> + friend bool operator==(const_reference lhs, const ScalarType rhs) noexcept + { + return lhs == basic_json(rhs); + } + + /*! + @brief comparison: equal + @copydoc operator==(const_reference, const_reference) + */ + template::value, int>::type = 0> + friend bool operator==(const ScalarType lhs, const_reference rhs) noexcept + { + return basic_json(lhs) == rhs; + } + + /*! + @brief comparison: not equal + + Compares two JSON values for inequality by calculating `not (lhs == rhs)`. + + @param[in] lhs first JSON value to consider + @param[in] rhs second JSON value to consider + @return whether the values @a lhs and @a rhs are not equal + + @complexity Linear. + + @exceptionsafety No-throw guarantee: this function never throws exceptions. + + @liveexample{The example demonstrates comparing several JSON + types.,operator__notequal} + + @since version 1.0.0 + */ + friend bool operator!=(const_reference lhs, const_reference rhs) noexcept + { + return !(lhs == rhs); + } + + /*! + @brief comparison: not equal + @copydoc operator!=(const_reference, const_reference) + */ + template::value, int>::type = 0> + friend bool operator!=(const_reference lhs, const ScalarType rhs) noexcept + { + return lhs != basic_json(rhs); + } + + /*! + @brief comparison: not equal + @copydoc operator!=(const_reference, const_reference) + */ + template::value, int>::type = 0> + friend bool operator!=(const ScalarType lhs, const_reference rhs) noexcept + { + return basic_json(lhs) != rhs; + } + + /*! + @brief comparison: less than + + Compares whether one JSON value @a lhs is less than another JSON value @a + rhs according to the following rules: + - If @a lhs and @a rhs have the same type, the values are compared using + the default `<` operator. + - Integer and floating-point numbers are automatically converted before + comparison + - In case @a lhs and @a rhs have different types, the values are ignored + and the order of the types is considered, see + @ref operator<(const value_t, const value_t). + + @param[in] lhs first JSON value to consider + @param[in] rhs second JSON value to consider + @return whether @a lhs is less than @a rhs + + @complexity Linear. + + @exceptionsafety No-throw guarantee: this function never throws exceptions. + + @liveexample{The example demonstrates comparing several JSON + types.,operator__less} + + @since version 1.0.0 + */ + friend bool operator<(const_reference lhs, const_reference rhs) noexcept + { + const auto lhs_type = lhs.type(); + const auto rhs_type = rhs.type(); + + if (lhs_type == rhs_type) + { + switch (lhs_type) + { + case value_t::array: + // note parentheses are necessary, see + // https://github.com/nlohmann/json/issues/1530 + return (*lhs.m_value.array) < (*rhs.m_value.array); + + case value_t::object: + return (*lhs.m_value.object) < (*rhs.m_value.object); + + case value_t::null: + return false; + + case value_t::string: + return (*lhs.m_value.string) < (*rhs.m_value.string); + + case value_t::boolean: + return (lhs.m_value.boolean) < (rhs.m_value.boolean); + + case value_t::number_integer: + return (lhs.m_value.number_integer) < (rhs.m_value.number_integer); + + case value_t::number_unsigned: + return (lhs.m_value.number_unsigned) < (rhs.m_value.number_unsigned); + + case value_t::number_float: + return (lhs.m_value.number_float) < (rhs.m_value.number_float); + + case value_t::binary: + return (*lhs.m_value.binary) < (*rhs.m_value.binary); + + default: + return false; + } + } + else if (lhs_type == value_t::number_integer && rhs_type == value_t::number_float) + { + return static_cast(lhs.m_value.number_integer) < rhs.m_value.number_float; + } + else if (lhs_type == value_t::number_float && rhs_type == value_t::number_integer) + { + return lhs.m_value.number_float < static_cast(rhs.m_value.number_integer); + } + else if (lhs_type == value_t::number_unsigned && rhs_type == value_t::number_float) + { + return static_cast(lhs.m_value.number_unsigned) < rhs.m_value.number_float; + } + else if (lhs_type == value_t::number_float && rhs_type == value_t::number_unsigned) + { + return lhs.m_value.number_float < static_cast(rhs.m_value.number_unsigned); + } + else if (lhs_type == value_t::number_integer && rhs_type == value_t::number_unsigned) + { + return lhs.m_value.number_integer < static_cast(rhs.m_value.number_unsigned); + } + else if (lhs_type == value_t::number_unsigned && rhs_type == value_t::number_integer) + { + return static_cast(lhs.m_value.number_unsigned) < rhs.m_value.number_integer; + } + + // We only reach this line if we cannot compare values. In that case, + // we compare types. Note we have to call the operator explicitly, + // because MSVC has problems otherwise. + return operator<(lhs_type, rhs_type); + } + + /*! + @brief comparison: less than + @copydoc operator<(const_reference, const_reference) + */ + template::value, int>::type = 0> + friend bool operator<(const_reference lhs, const ScalarType rhs) noexcept + { + return lhs < basic_json(rhs); + } + + /*! + @brief comparison: less than + @copydoc operator<(const_reference, const_reference) + */ + template::value, int>::type = 0> + friend bool operator<(const ScalarType lhs, const_reference rhs) noexcept + { + return basic_json(lhs) < rhs; + } + + /*! + @brief comparison: less than or equal + + Compares whether one JSON value @a lhs is less than or equal to another + JSON value by calculating `not (rhs < lhs)`. + + @param[in] lhs first JSON value to consider + @param[in] rhs second JSON value to consider + @return whether @a lhs is less than or equal to @a rhs + + @complexity Linear. + + @exceptionsafety No-throw guarantee: this function never throws exceptions. + + @liveexample{The example demonstrates comparing several JSON + types.,operator__greater} + + @since version 1.0.0 + */ + friend bool operator<=(const_reference lhs, const_reference rhs) noexcept + { + return !(rhs < lhs); + } + + /*! + @brief comparison: less than or equal + @copydoc operator<=(const_reference, const_reference) + */ + template::value, int>::type = 0> + friend bool operator<=(const_reference lhs, const ScalarType rhs) noexcept + { + return lhs <= basic_json(rhs); + } + + /*! + @brief comparison: less than or equal + @copydoc operator<=(const_reference, const_reference) + */ + template::value, int>::type = 0> + friend bool operator<=(const ScalarType lhs, const_reference rhs) noexcept + { + return basic_json(lhs) <= rhs; + } + + /*! + @brief comparison: greater than + + Compares whether one JSON value @a lhs is greater than another + JSON value by calculating `not (lhs <= rhs)`. + + @param[in] lhs first JSON value to consider + @param[in] rhs second JSON value to consider + @return whether @a lhs is greater than to @a rhs + + @complexity Linear. + + @exceptionsafety No-throw guarantee: this function never throws exceptions. + + @liveexample{The example demonstrates comparing several JSON + types.,operator__lessequal} + + @since version 1.0.0 + */ + friend bool operator>(const_reference lhs, const_reference rhs) noexcept + { + return !(lhs <= rhs); + } + + /*! + @brief comparison: greater than + @copydoc operator>(const_reference, const_reference) + */ + template::value, int>::type = 0> + friend bool operator>(const_reference lhs, const ScalarType rhs) noexcept + { + return lhs > basic_json(rhs); + } + + /*! + @brief comparison: greater than + @copydoc operator>(const_reference, const_reference) + */ + template::value, int>::type = 0> + friend bool operator>(const ScalarType lhs, const_reference rhs) noexcept + { + return basic_json(lhs) > rhs; + } + + /*! + @brief comparison: greater than or equal + + Compares whether one JSON value @a lhs is greater than or equal to another + JSON value by calculating `not (lhs < rhs)`. + + @param[in] lhs first JSON value to consider + @param[in] rhs second JSON value to consider + @return whether @a lhs is greater than or equal to @a rhs + + @complexity Linear. + + @exceptionsafety No-throw guarantee: this function never throws exceptions. + + @liveexample{The example demonstrates comparing several JSON + types.,operator__greaterequal} + + @since version 1.0.0 + */ + friend bool operator>=(const_reference lhs, const_reference rhs) noexcept + { + return !(lhs < rhs); + } + + /*! + @brief comparison: greater than or equal + @copydoc operator>=(const_reference, const_reference) + */ + template::value, int>::type = 0> + friend bool operator>=(const_reference lhs, const ScalarType rhs) noexcept + { + return lhs >= basic_json(rhs); + } + + /*! + @brief comparison: greater than or equal + @copydoc operator>=(const_reference, const_reference) + */ + template::value, int>::type = 0> + friend bool operator>=(const ScalarType lhs, const_reference rhs) noexcept + { + return basic_json(lhs) >= rhs; + } + + /// @} + + /////////////////// + // serialization // + /////////////////// + + /// @name serialization + /// @{ + + /*! + @brief serialize to stream + + Serialize the given JSON value @a j to the output stream @a o. The JSON + value will be serialized using the @ref dump member function. + + - The indentation of the output can be controlled with the member variable + `width` of the output stream @a o. For instance, using the manipulator + `std::setw(4)` on @a o sets the indentation level to `4` and the + serialization result is the same as calling `dump(4)`. + + - The indentation character can be controlled with the member variable + `fill` of the output stream @a o. For instance, the manipulator + `std::setfill('\\t')` sets indentation to use a tab character rather than + the default space character. + + @param[in,out] o stream to serialize to + @param[in] j JSON value to serialize + + @return the stream @a o + + @throw type_error.316 if a string stored inside the JSON value is not + UTF-8 encoded + + @complexity Linear. + + @liveexample{The example below shows the serialization with different + parameters to `width` to adjust the indentation level.,operator_serialize} + + @since version 1.0.0; indentation character added in version 3.0.0 + */ + friend std::ostream& operator<<(std::ostream& o, const basic_json& j) + { + // read width member and use it as indentation parameter if nonzero + const bool pretty_print = o.width() > 0; + const auto indentation = pretty_print ? o.width() : 0; + + // reset width to 0 for subsequent calls to this stream + o.width(0); + + // do the actual serialization + serializer s(detail::output_adapter(o), o.fill()); + s.dump(j, pretty_print, false, static_cast(indentation)); + return o; + } + + /*! + @brief serialize to stream + @deprecated This stream operator is deprecated and will be removed in + future 4.0.0 of the library. Please use + @ref operator<<(std::ostream&, const basic_json&) + instead; that is, replace calls like `j >> o;` with `o << j;`. + @since version 1.0.0; deprecated since version 3.0.0 + */ + JSON_HEDLEY_DEPRECATED_FOR(3.0.0, operator<<(std::ostream&, const basic_json&)) + friend std::ostream& operator>>(const basic_json& j, std::ostream& o) + { + return o << j; + } + + /// @} + + + ///////////////////// + // deserialization // + ///////////////////// + + /// @name deserialization + /// @{ + + /*! + @brief deserialize from a compatible input + + @tparam InputType A compatible input, for instance + - an std::istream object + - a FILE pointer + - a C-style array of characters + - a pointer to a null-terminated string of single byte characters + - an object obj for which begin(obj) and end(obj) produces a valid pair of + iterators. + + @param[in] i input to read from + @param[in] cb a parser callback function of type @ref parser_callback_t + which is used to control the deserialization by filtering unwanted values + (optional) + @param[in] allow_exceptions whether to throw exceptions in case of a + parse error (optional, true by default) + @param[in] ignore_comments whether comments should be ignored and treated + like whitespace (true) or yield a parse error (true); (optional, false by + default) + + @return deserialized JSON value; in case of a parse error and + @a allow_exceptions set to `false`, the return value will be + value_t::discarded. + + @throw parse_error.101 if a parse error occurs; example: `""unexpected end + of input; expected string literal""` + @throw parse_error.102 if to_unicode fails or surrogate error + @throw parse_error.103 if to_unicode fails + + @complexity Linear in the length of the input. The parser is a predictive + LL(1) parser. The complexity can be higher if the parser callback function + @a cb or reading from the input @a i has a super-linear complexity. + + @note A UTF-8 byte order mark is silently ignored. + + @liveexample{The example below demonstrates the `parse()` function reading + from an array.,parse__array__parser_callback_t} + + @liveexample{The example below demonstrates the `parse()` function with + and without callback function.,parse__string__parser_callback_t} + + @liveexample{The example below demonstrates the `parse()` function with + and without callback function.,parse__istream__parser_callback_t} + + @liveexample{The example below demonstrates the `parse()` function reading + from a contiguous container.,parse__contiguouscontainer__parser_callback_t} + + @since version 2.0.3 (contiguous containers); version 3.9.0 allowed to + ignore comments. + */ + template + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json parse(InputType&& i, + const parser_callback_t cb = nullptr, + const bool allow_exceptions = true, + const bool ignore_comments = false) + { + basic_json result; + parser(detail::input_adapter(std::forward(i)), cb, allow_exceptions, ignore_comments).parse(true, result); + return result; + } + + /*! + @brief deserialize from a pair of character iterators + + The value_type of the iterator must be a integral type with size of 1, 2 or + 4 bytes, which will be interpreted respectively as UTF-8, UTF-16 and UTF-32. + + @param[in] first iterator to start of character range + @param[in] last iterator to end of character range + @param[in] cb a parser callback function of type @ref parser_callback_t + which is used to control the deserialization by filtering unwanted values + (optional) + @param[in] allow_exceptions whether to throw exceptions in case of a + parse error (optional, true by default) + @param[in] ignore_comments whether comments should be ignored and treated + like whitespace (true) or yield a parse error (true); (optional, false by + default) + + @return deserialized JSON value; in case of a parse error and + @a allow_exceptions set to `false`, the return value will be + value_t::discarded. + + @throw parse_error.101 if a parse error occurs; example: `""unexpected end + of input; expected string literal""` + @throw parse_error.102 if to_unicode fails or surrogate error + @throw parse_error.103 if to_unicode fails + */ + template + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json parse(IteratorType first, + IteratorType last, + const parser_callback_t cb = nullptr, + const bool allow_exceptions = true, + const bool ignore_comments = false) + { + basic_json result; + parser(detail::input_adapter(std::move(first), std::move(last)), cb, allow_exceptions, ignore_comments).parse(true, result); + return result; + } + + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, parse(ptr, ptr + len)) + static basic_json parse(detail::span_input_adapter&& i, + const parser_callback_t cb = nullptr, + const bool allow_exceptions = true, + const bool ignore_comments = false) + { + basic_json result; + parser(i.get(), cb, allow_exceptions, ignore_comments).parse(true, result); + return result; + } + + /*! + @brief check if the input is valid JSON + + Unlike the @ref parse(InputType&&, const parser_callback_t,const bool) + function, this function neither throws an exception in case of invalid JSON + input (i.e., a parse error) nor creates diagnostic information. + + @tparam InputType A compatible input, for instance + - an std::istream object + - a FILE pointer + - a C-style array of characters + - a pointer to a null-terminated string of single byte characters + - an object obj for which begin(obj) and end(obj) produces a valid pair of + iterators. + + @param[in] i input to read from + @param[in] ignore_comments whether comments should be ignored and treated + like whitespace (true) or yield a parse error (true); (optional, false by + default) + + @return Whether the input read from @a i is valid JSON. + + @complexity Linear in the length of the input. The parser is a predictive + LL(1) parser. + + @note A UTF-8 byte order mark is silently ignored. + + @liveexample{The example below demonstrates the `accept()` function reading + from a string.,accept__string} + */ + template + static bool accept(InputType&& i, + const bool ignore_comments = false) + { + return parser(detail::input_adapter(std::forward(i)), nullptr, false, ignore_comments).accept(true); + } + + template + static bool accept(IteratorType first, IteratorType last, + const bool ignore_comments = false) + { + return parser(detail::input_adapter(std::move(first), std::move(last)), nullptr, false, ignore_comments).accept(true); + } + + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, accept(ptr, ptr + len)) + static bool accept(detail::span_input_adapter&& i, + const bool ignore_comments = false) + { + return parser(i.get(), nullptr, false, ignore_comments).accept(true); + } + + /*! + @brief generate SAX events + + The SAX event lister must follow the interface of @ref json_sax. + + This function reads from a compatible input. Examples are: + - an std::istream object + - a FILE pointer + - a C-style array of characters + - a pointer to a null-terminated string of single byte characters + - an object obj for which begin(obj) and end(obj) produces a valid pair of + iterators. + + @param[in] i input to read from + @param[in,out] sax SAX event listener + @param[in] format the format to parse (JSON, CBOR, MessagePack, or UBJSON) + @param[in] strict whether the input has to be consumed completely + @param[in] ignore_comments whether comments should be ignored and treated + like whitespace (true) or yield a parse error (true); (optional, false by + default); only applies to the JSON file format. + + @return return value of the last processed SAX event + + @throw parse_error.101 if a parse error occurs; example: `""unexpected end + of input; expected string literal""` + @throw parse_error.102 if to_unicode fails or surrogate error + @throw parse_error.103 if to_unicode fails + + @complexity Linear in the length of the input. The parser is a predictive + LL(1) parser. The complexity can be higher if the SAX consumer @a sax has + a super-linear complexity. + + @note A UTF-8 byte order mark is silently ignored. + + @liveexample{The example below demonstrates the `sax_parse()` function + reading from string and processing the events with a user-defined SAX + event consumer.,sax_parse} + + @since version 3.2.0 + */ + template + JSON_HEDLEY_NON_NULL(2) + static bool sax_parse(InputType&& i, SAX* sax, + input_format_t format = input_format_t::json, + const bool strict = true, + const bool ignore_comments = false) + { + auto ia = detail::input_adapter(std::forward(i)); + return format == input_format_t::json + ? parser(std::move(ia), nullptr, true, ignore_comments).sax_parse(sax, strict) + : detail::binary_reader(std::move(ia)).sax_parse(format, sax, strict); + } + + template + JSON_HEDLEY_NON_NULL(3) + static bool sax_parse(IteratorType first, IteratorType last, SAX* sax, + input_format_t format = input_format_t::json, + const bool strict = true, + const bool ignore_comments = false) + { + auto ia = detail::input_adapter(std::move(first), std::move(last)); + return format == input_format_t::json + ? parser(std::move(ia), nullptr, true, ignore_comments).sax_parse(sax, strict) + : detail::binary_reader(std::move(ia)).sax_parse(format, sax, strict); + } + + template + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, sax_parse(ptr, ptr + len, ...)) + JSON_HEDLEY_NON_NULL(2) + static bool sax_parse(detail::span_input_adapter&& i, SAX* sax, + input_format_t format = input_format_t::json, + const bool strict = true, + const bool ignore_comments = false) + { + auto ia = i.get(); + return format == input_format_t::json + ? parser(std::move(ia), nullptr, true, ignore_comments).sax_parse(sax, strict) + : detail::binary_reader(std::move(ia)).sax_parse(format, sax, strict); + } + + /*! + @brief deserialize from stream + @deprecated This stream operator is deprecated and will be removed in + version 4.0.0 of the library. Please use + @ref operator>>(std::istream&, basic_json&) + instead; that is, replace calls like `j << i;` with `i >> j;`. + @since version 1.0.0; deprecated since version 3.0.0 + */ + JSON_HEDLEY_DEPRECATED_FOR(3.0.0, operator>>(std::istream&, basic_json&)) + friend std::istream& operator<<(basic_json& j, std::istream& i) + { + return operator>>(i, j); + } + + /*! + @brief deserialize from stream + + Deserializes an input stream to a JSON value. + + @param[in,out] i input stream to read a serialized JSON value from + @param[in,out] j JSON value to write the deserialized input to + + @throw parse_error.101 in case of an unexpected token + @throw parse_error.102 if to_unicode fails or surrogate error + @throw parse_error.103 if to_unicode fails + + @complexity Linear in the length of the input. The parser is a predictive + LL(1) parser. + + @note A UTF-8 byte order mark is silently ignored. + + @liveexample{The example below shows how a JSON value is constructed by + reading a serialization from a stream.,operator_deserialize} + + @sa parse(std::istream&, const parser_callback_t) for a variant with a + parser callback function to filter values while parsing + + @since version 1.0.0 + */ + friend std::istream& operator>>(std::istream& i, basic_json& j) + { + parser(detail::input_adapter(i)).parse(false, j); + return i; + } + + /// @} + + /////////////////////////// + // convenience functions // + /////////////////////////// + + /*! + @brief return the type as string + + Returns the type name as string to be used in error messages - usually to + indicate that a function was called on a wrong JSON type. + + @return a string representation of a the @a m_type member: + Value type | return value + ----------- | ------------- + null | `"null"` + boolean | `"boolean"` + string | `"string"` + number | `"number"` (for all number types) + object | `"object"` + array | `"array"` + binary | `"binary"` + discarded | `"discarded"` + + @exceptionsafety No-throw guarantee: this function never throws exceptions. + + @complexity Constant. + + @liveexample{The following code exemplifies `type_name()` for all JSON + types.,type_name} + + @sa @ref type() -- return the type of the JSON value + @sa @ref operator value_t() -- return the type of the JSON value (implicit) + + @since version 1.0.0, public since 2.1.0, `const char*` and `noexcept` + since 3.0.0 + */ + JSON_HEDLEY_RETURNS_NON_NULL + const char* type_name() const noexcept + { + { + switch (m_type) + { + case value_t::null: + return "null"; + case value_t::object: + return "object"; + case value_t::array: + return "array"; + case value_t::string: + return "string"; + case value_t::boolean: + return "boolean"; + case value_t::binary: + return "binary"; + case value_t::discarded: + return "discarded"; + default: + return "number"; + } + } + } + + + private: + ////////////////////// + // member variables // + ////////////////////// + + /// the type of the current element + value_t m_type = value_t::null; + + /// the value of the current element + json_value m_value = {}; + + ////////////////////////////////////////// + // binary serialization/deserialization // + ////////////////////////////////////////// + + /// @name binary serialization/deserialization support + /// @{ + + public: + /*! + @brief create a CBOR serialization of a given JSON value + + Serializes a given JSON value @a j to a byte vector using the CBOR (Concise + Binary Object Representation) serialization format. CBOR is a binary + serialization format which aims to be more compact than JSON itself, yet + more efficient to parse. + + The library uses the following mapping from JSON values types to + CBOR types according to the CBOR specification (RFC 7049): + + JSON value type | value/range | CBOR type | first byte + --------------- | ------------------------------------------ | ---------------------------------- | --------------- + null | `null` | Null | 0xF6 + boolean | `true` | True | 0xF5 + boolean | `false` | False | 0xF4 + number_integer | -9223372036854775808..-2147483649 | Negative integer (8 bytes follow) | 0x3B + number_integer | -2147483648..-32769 | Negative integer (4 bytes follow) | 0x3A + number_integer | -32768..-129 | Negative integer (2 bytes follow) | 0x39 + number_integer | -128..-25 | Negative integer (1 byte follow) | 0x38 + number_integer | -24..-1 | Negative integer | 0x20..0x37 + number_integer | 0..23 | Integer | 0x00..0x17 + number_integer | 24..255 | Unsigned integer (1 byte follow) | 0x18 + number_integer | 256..65535 | Unsigned integer (2 bytes follow) | 0x19 + number_integer | 65536..4294967295 | Unsigned integer (4 bytes follow) | 0x1A + number_integer | 4294967296..18446744073709551615 | Unsigned integer (8 bytes follow) | 0x1B + number_unsigned | 0..23 | Integer | 0x00..0x17 + number_unsigned | 24..255 | Unsigned integer (1 byte follow) | 0x18 + number_unsigned | 256..65535 | Unsigned integer (2 bytes follow) | 0x19 + number_unsigned | 65536..4294967295 | Unsigned integer (4 bytes follow) | 0x1A + number_unsigned | 4294967296..18446744073709551615 | Unsigned integer (8 bytes follow) | 0x1B + number_float | *any value representable by a float* | Single-Precision Float | 0xFA + number_float | *any value NOT representable by a float* | Double-Precision Float | 0xFB + string | *length*: 0..23 | UTF-8 string | 0x60..0x77 + string | *length*: 23..255 | UTF-8 string (1 byte follow) | 0x78 + string | *length*: 256..65535 | UTF-8 string (2 bytes follow) | 0x79 + string | *length*: 65536..4294967295 | UTF-8 string (4 bytes follow) | 0x7A + string | *length*: 4294967296..18446744073709551615 | UTF-8 string (8 bytes follow) | 0x7B + array | *size*: 0..23 | array | 0x80..0x97 + array | *size*: 23..255 | array (1 byte follow) | 0x98 + array | *size*: 256..65535 | array (2 bytes follow) | 0x99 + array | *size*: 65536..4294967295 | array (4 bytes follow) | 0x9A + array | *size*: 4294967296..18446744073709551615 | array (8 bytes follow) | 0x9B + object | *size*: 0..23 | map | 0xA0..0xB7 + object | *size*: 23..255 | map (1 byte follow) | 0xB8 + object | *size*: 256..65535 | map (2 bytes follow) | 0xB9 + object | *size*: 65536..4294967295 | map (4 bytes follow) | 0xBA + object | *size*: 4294967296..18446744073709551615 | map (8 bytes follow) | 0xBB + binary | *size*: 0..23 | byte string | 0x40..0x57 + binary | *size*: 23..255 | byte string (1 byte follow) | 0x58 + binary | *size*: 256..65535 | byte string (2 bytes follow) | 0x59 + binary | *size*: 65536..4294967295 | byte string (4 bytes follow) | 0x5A + binary | *size*: 4294967296..18446744073709551615 | byte string (8 bytes follow) | 0x5B + + @note The mapping is **complete** in the sense that any JSON value type + can be converted to a CBOR value. + + @note If NaN or Infinity are stored inside a JSON number, they are + serialized properly. This behavior differs from the @ref dump() + function which serializes NaN or Infinity to `null`. + + @note The following CBOR types are not used in the conversion: + - UTF-8 strings terminated by "break" (0x7F) + - arrays terminated by "break" (0x9F) + - maps terminated by "break" (0xBF) + - byte strings terminated by "break" (0x5F) + - date/time (0xC0..0xC1) + - bignum (0xC2..0xC3) + - decimal fraction (0xC4) + - bigfloat (0xC5) + - expected conversions (0xD5..0xD7) + - simple values (0xE0..0xF3, 0xF8) + - undefined (0xF7) + - half-precision floats (0xF9) + - break (0xFF) + + @param[in] j JSON value to serialize + @return CBOR serialization as byte vector + + @complexity Linear in the size of the JSON value @a j. + + @liveexample{The example shows the serialization of a JSON value to a byte + vector in CBOR format.,to_cbor} + + @sa http://cbor.io + @sa @ref from_cbor(detail::input_adapter&&, const bool, const bool, const cbor_tag_handler_t) for the + analogous deserialization + @sa @ref to_msgpack(const basic_json&) for the related MessagePack format + @sa @ref to_ubjson(const basic_json&, const bool, const bool) for the + related UBJSON format + + @since version 2.0.9; compact representation of floating-point numbers + since version 3.8.0 + */ + static std::vector to_cbor(const basic_json& j) + { + std::vector result; + to_cbor(j, result); + return result; + } + + static void to_cbor(const basic_json& j, detail::output_adapter o) + { + binary_writer(o).write_cbor(j); + } + + static void to_cbor(const basic_json& j, detail::output_adapter o) + { + binary_writer(o).write_cbor(j); + } + + /*! + @brief create a MessagePack serialization of a given JSON value + + Serializes a given JSON value @a j to a byte vector using the MessagePack + serialization format. MessagePack is a binary serialization format which + aims to be more compact than JSON itself, yet more efficient to parse. + + The library uses the following mapping from JSON values types to + MessagePack types according to the MessagePack specification: + + JSON value type | value/range | MessagePack type | first byte + --------------- | --------------------------------- | ---------------- | ---------- + null | `null` | nil | 0xC0 + boolean | `true` | true | 0xC3 + boolean | `false` | false | 0xC2 + number_integer | -9223372036854775808..-2147483649 | int64 | 0xD3 + number_integer | -2147483648..-32769 | int32 | 0xD2 + number_integer | -32768..-129 | int16 | 0xD1 + number_integer | -128..-33 | int8 | 0xD0 + number_integer | -32..-1 | negative fixint | 0xE0..0xFF + number_integer | 0..127 | positive fixint | 0x00..0x7F + number_integer | 128..255 | uint 8 | 0xCC + number_integer | 256..65535 | uint 16 | 0xCD + number_integer | 65536..4294967295 | uint 32 | 0xCE + number_integer | 4294967296..18446744073709551615 | uint 64 | 0xCF + number_unsigned | 0..127 | positive fixint | 0x00..0x7F + number_unsigned | 128..255 | uint 8 | 0xCC + number_unsigned | 256..65535 | uint 16 | 0xCD + number_unsigned | 65536..4294967295 | uint 32 | 0xCE + number_unsigned | 4294967296..18446744073709551615 | uint 64 | 0xCF + number_float | *any value representable by a float* | float 32 | 0xCA + number_float | *any value NOT representable by a float* | float 64 | 0xCB + string | *length*: 0..31 | fixstr | 0xA0..0xBF + string | *length*: 32..255 | str 8 | 0xD9 + string | *length*: 256..65535 | str 16 | 0xDA + string | *length*: 65536..4294967295 | str 32 | 0xDB + array | *size*: 0..15 | fixarray | 0x90..0x9F + array | *size*: 16..65535 | array 16 | 0xDC + array | *size*: 65536..4294967295 | array 32 | 0xDD + object | *size*: 0..15 | fix map | 0x80..0x8F + object | *size*: 16..65535 | map 16 | 0xDE + object | *size*: 65536..4294967295 | map 32 | 0xDF + binary | *size*: 0..255 | bin 8 | 0xC4 + binary | *size*: 256..65535 | bin 16 | 0xC5 + binary | *size*: 65536..4294967295 | bin 32 | 0xC6 + + @note The mapping is **complete** in the sense that any JSON value type + can be converted to a MessagePack value. + + @note The following values can **not** be converted to a MessagePack value: + - strings with more than 4294967295 bytes + - byte strings with more than 4294967295 bytes + - arrays with more than 4294967295 elements + - objects with more than 4294967295 elements + + @note Any MessagePack output created @ref to_msgpack can be successfully + parsed by @ref from_msgpack. + + @note If NaN or Infinity are stored inside a JSON number, they are + serialized properly. This behavior differs from the @ref dump() + function which serializes NaN or Infinity to `null`. + + @param[in] j JSON value to serialize + @return MessagePack serialization as byte vector + + @complexity Linear in the size of the JSON value @a j. + + @liveexample{The example shows the serialization of a JSON value to a byte + vector in MessagePack format.,to_msgpack} + + @sa http://msgpack.org + @sa @ref from_msgpack for the analogous deserialization + @sa @ref to_cbor(const basic_json& for the related CBOR format + @sa @ref to_ubjson(const basic_json&, const bool, const bool) for the + related UBJSON format + + @since version 2.0.9 + */ + static std::vector to_msgpack(const basic_json& j) + { + std::vector result; + to_msgpack(j, result); + return result; + } + + static void to_msgpack(const basic_json& j, detail::output_adapter o) + { + binary_writer(o).write_msgpack(j); + } + + static void to_msgpack(const basic_json& j, detail::output_adapter o) + { + binary_writer(o).write_msgpack(j); + } + + /*! + @brief create a UBJSON serialization of a given JSON value + + Serializes a given JSON value @a j to a byte vector using the UBJSON + (Universal Binary JSON) serialization format. UBJSON aims to be more compact + than JSON itself, yet more efficient to parse. + + The library uses the following mapping from JSON values types to + UBJSON types according to the UBJSON specification: + + JSON value type | value/range | UBJSON type | marker + --------------- | --------------------------------- | ----------- | ------ + null | `null` | null | `Z` + boolean | `true` | true | `T` + boolean | `false` | false | `F` + number_integer | -9223372036854775808..-2147483649 | int64 | `L` + number_integer | -2147483648..-32769 | int32 | `l` + number_integer | -32768..-129 | int16 | `I` + number_integer | -128..127 | int8 | `i` + number_integer | 128..255 | uint8 | `U` + number_integer | 256..32767 | int16 | `I` + number_integer | 32768..2147483647 | int32 | `l` + number_integer | 2147483648..9223372036854775807 | int64 | `L` + number_unsigned | 0..127 | int8 | `i` + number_unsigned | 128..255 | uint8 | `U` + number_unsigned | 256..32767 | int16 | `I` + number_unsigned | 32768..2147483647 | int32 | `l` + number_unsigned | 2147483648..9223372036854775807 | int64 | `L` + number_unsigned | 2147483649..18446744073709551615 | high-precision | `H` + number_float | *any value* | float64 | `D` + string | *with shortest length indicator* | string | `S` + array | *see notes on optimized format* | array | `[` + object | *see notes on optimized format* | map | `{` + + @note The mapping is **complete** in the sense that any JSON value type + can be converted to a UBJSON value. + + @note The following values can **not** be converted to a UBJSON value: + - strings with more than 9223372036854775807 bytes (theoretical) + + @note The following markers are not used in the conversion: + - `Z`: no-op values are not created. + - `C`: single-byte strings are serialized with `S` markers. + + @note Any UBJSON output created @ref to_ubjson can be successfully parsed + by @ref from_ubjson. + + @note If NaN or Infinity are stored inside a JSON number, they are + serialized properly. This behavior differs from the @ref dump() + function which serializes NaN or Infinity to `null`. + + @note The optimized formats for containers are supported: Parameter + @a use_size adds size information to the beginning of a container and + removes the closing marker. Parameter @a use_type further checks + whether all elements of a container have the same type and adds the + type marker to the beginning of the container. The @a use_type + parameter must only be used together with @a use_size = true. Note + that @a use_size = true alone may result in larger representations - + the benefit of this parameter is that the receiving side is + immediately informed on the number of elements of the container. + + @note If the JSON data contains the binary type, the value stored is a list + of integers, as suggested by the UBJSON documentation. In particular, + this means that serialization and the deserialization of a JSON + containing binary values into UBJSON and back will result in a + different JSON object. + + @param[in] j JSON value to serialize + @param[in] use_size whether to add size annotations to container types + @param[in] use_type whether to add type annotations to container types + (must be combined with @a use_size = true) + @return UBJSON serialization as byte vector + + @complexity Linear in the size of the JSON value @a j. + + @liveexample{The example shows the serialization of a JSON value to a byte + vector in UBJSON format.,to_ubjson} + + @sa http://ubjson.org + @sa @ref from_ubjson(detail::input_adapter&&, const bool, const bool) for the + analogous deserialization + @sa @ref to_cbor(const basic_json& for the related CBOR format + @sa @ref to_msgpack(const basic_json&) for the related MessagePack format + + @since version 3.1.0 + */ + static std::vector to_ubjson(const basic_json& j, + const bool use_size = false, + const bool use_type = false) + { + std::vector result; + to_ubjson(j, result, use_size, use_type); + return result; + } + + static void to_ubjson(const basic_json& j, detail::output_adapter o, + const bool use_size = false, const bool use_type = false) + { + binary_writer(o).write_ubjson(j, use_size, use_type); + } + + static void to_ubjson(const basic_json& j, detail::output_adapter o, + const bool use_size = false, const bool use_type = false) + { + binary_writer(o).write_ubjson(j, use_size, use_type); + } + + + /*! + @brief Serializes the given JSON object `j` to BSON and returns a vector + containing the corresponding BSON-representation. + + BSON (Binary JSON) is a binary format in which zero or more ordered key/value pairs are + stored as a single entity (a so-called document). + + The library uses the following mapping from JSON values types to BSON types: + + JSON value type | value/range | BSON type | marker + --------------- | --------------------------------- | ----------- | ------ + null | `null` | null | 0x0A + boolean | `true`, `false` | boolean | 0x08 + number_integer | -9223372036854775808..-2147483649 | int64 | 0x12 + number_integer | -2147483648..2147483647 | int32 | 0x10 + number_integer | 2147483648..9223372036854775807 | int64 | 0x12 + number_unsigned | 0..2147483647 | int32 | 0x10 + number_unsigned | 2147483648..9223372036854775807 | int64 | 0x12 + number_unsigned | 9223372036854775808..18446744073709551615| -- | -- + number_float | *any value* | double | 0x01 + string | *any value* | string | 0x02 + array | *any value* | document | 0x04 + object | *any value* | document | 0x03 + binary | *any value* | binary | 0x05 + + @warning The mapping is **incomplete**, since only JSON-objects (and things + contained therein) can be serialized to BSON. + Also, integers larger than 9223372036854775807 cannot be serialized to BSON, + and the keys may not contain U+0000, since they are serialized a + zero-terminated c-strings. + + @throw out_of_range.407 if `j.is_number_unsigned() && j.get() > 9223372036854775807` + @throw out_of_range.409 if a key in `j` contains a NULL (U+0000) + @throw type_error.317 if `!j.is_object()` + + @pre The input `j` is required to be an object: `j.is_object() == true`. + + @note Any BSON output created via @ref to_bson can be successfully parsed + by @ref from_bson. + + @param[in] j JSON value to serialize + @return BSON serialization as byte vector + + @complexity Linear in the size of the JSON value @a j. + + @liveexample{The example shows the serialization of a JSON value to a byte + vector in BSON format.,to_bson} + + @sa http://bsonspec.org/spec.html + @sa @ref from_bson(detail::input_adapter&&, const bool strict) for the + analogous deserialization + @sa @ref to_ubjson(const basic_json&, const bool, const bool) for the + related UBJSON format + @sa @ref to_cbor(const basic_json&) for the related CBOR format + @sa @ref to_msgpack(const basic_json&) for the related MessagePack format + */ + static std::vector to_bson(const basic_json& j) + { + std::vector result; + to_bson(j, result); + return result; + } + + /*! + @brief Serializes the given JSON object `j` to BSON and forwards the + corresponding BSON-representation to the given output_adapter `o`. + @param j The JSON object to convert to BSON. + @param o The output adapter that receives the binary BSON representation. + @pre The input `j` shall be an object: `j.is_object() == true` + @sa @ref to_bson(const basic_json&) + */ + static void to_bson(const basic_json& j, detail::output_adapter o) + { + binary_writer(o).write_bson(j); + } + + /*! + @copydoc to_bson(const basic_json&, detail::output_adapter) + */ + static void to_bson(const basic_json& j, detail::output_adapter o) + { + binary_writer(o).write_bson(j); + } + + + /*! + @brief create a JSON value from an input in CBOR format + + Deserializes a given input @a i to a JSON value using the CBOR (Concise + Binary Object Representation) serialization format. + + The library maps CBOR types to JSON value types as follows: + + CBOR type | JSON value type | first byte + ---------------------- | --------------- | ---------- + Integer | number_unsigned | 0x00..0x17 + Unsigned integer | number_unsigned | 0x18 + Unsigned integer | number_unsigned | 0x19 + Unsigned integer | number_unsigned | 0x1A + Unsigned integer | number_unsigned | 0x1B + Negative integer | number_integer | 0x20..0x37 + Negative integer | number_integer | 0x38 + Negative integer | number_integer | 0x39 + Negative integer | number_integer | 0x3A + Negative integer | number_integer | 0x3B + Byte string | binary | 0x40..0x57 + Byte string | binary | 0x58 + Byte string | binary | 0x59 + Byte string | binary | 0x5A + Byte string | binary | 0x5B + UTF-8 string | string | 0x60..0x77 + UTF-8 string | string | 0x78 + UTF-8 string | string | 0x79 + UTF-8 string | string | 0x7A + UTF-8 string | string | 0x7B + UTF-8 string | string | 0x7F + array | array | 0x80..0x97 + array | array | 0x98 + array | array | 0x99 + array | array | 0x9A + array | array | 0x9B + array | array | 0x9F + map | object | 0xA0..0xB7 + map | object | 0xB8 + map | object | 0xB9 + map | object | 0xBA + map | object | 0xBB + map | object | 0xBF + False | `false` | 0xF4 + True | `true` | 0xF5 + Null | `null` | 0xF6 + Half-Precision Float | number_float | 0xF9 + Single-Precision Float | number_float | 0xFA + Double-Precision Float | number_float | 0xFB + + @warning The mapping is **incomplete** in the sense that not all CBOR + types can be converted to a JSON value. The following CBOR types + are not supported and will yield parse errors (parse_error.112): + - date/time (0xC0..0xC1) + - bignum (0xC2..0xC3) + - decimal fraction (0xC4) + - bigfloat (0xC5) + - expected conversions (0xD5..0xD7) + - simple values (0xE0..0xF3, 0xF8) + - undefined (0xF7) + + @warning CBOR allows map keys of any type, whereas JSON only allows + strings as keys in object values. Therefore, CBOR maps with keys + other than UTF-8 strings are rejected (parse_error.113). + + @note Any CBOR output created @ref to_cbor can be successfully parsed by + @ref from_cbor. + + @param[in] i an input in CBOR format convertible to an input adapter + @param[in] strict whether to expect the input to be consumed until EOF + (true by default) + @param[in] allow_exceptions whether to throw exceptions in case of a + parse error (optional, true by default) + @param[in] tag_handler how to treat CBOR tags (optional, error by default) + + @return deserialized JSON value; in case of a parse error and + @a allow_exceptions set to `false`, the return value will be + value_t::discarded. + + @throw parse_error.110 if the given input ends prematurely or the end of + file was not reached when @a strict was set to true + @throw parse_error.112 if unsupported features from CBOR were + used in the given input @a v or if the input is not valid CBOR + @throw parse_error.113 if a string was expected as map key, but not found + + @complexity Linear in the size of the input @a i. + + @liveexample{The example shows the deserialization of a byte vector in CBOR + format to a JSON value.,from_cbor} + + @sa http://cbor.io + @sa @ref to_cbor(const basic_json&) for the analogous serialization + @sa @ref from_msgpack(detail::input_adapter&&, const bool, const bool) for the + related MessagePack format + @sa @ref from_ubjson(detail::input_adapter&&, const bool, const bool) for the + related UBJSON format + + @since version 2.0.9; parameter @a start_index since 2.1.1; changed to + consume input adapters, removed start_index parameter, and added + @a strict parameter since 3.0.0; added @a allow_exceptions parameter + since 3.2.0; added @a tag_handler parameter since 3.9.0. + */ + template + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_cbor(InputType&& i, + const bool strict = true, + const bool allow_exceptions = true, + const cbor_tag_handler_t tag_handler = cbor_tag_handler_t::error) + { + basic_json result; + detail::json_sax_dom_parser sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::forward(i)); + const bool res = binary_reader(std::move(ia)).sax_parse(input_format_t::cbor, &sdp, strict, tag_handler); + return res ? result : basic_json(value_t::discarded); + } + + /*! + @copydoc from_cbor(detail::input_adapter&&, const bool, const bool, const cbor_tag_handler_t) + */ + template + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_cbor(IteratorType first, IteratorType last, + const bool strict = true, + const bool allow_exceptions = true, + const cbor_tag_handler_t tag_handler = cbor_tag_handler_t::error) + { + basic_json result; + detail::json_sax_dom_parser sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::move(first), std::move(last)); + const bool res = binary_reader(std::move(ia)).sax_parse(input_format_t::cbor, &sdp, strict, tag_handler); + return res ? result : basic_json(value_t::discarded); + } + + template + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_cbor(ptr, ptr + len)) + static basic_json from_cbor(const T* ptr, std::size_t len, + const bool strict = true, + const bool allow_exceptions = true, + const cbor_tag_handler_t tag_handler = cbor_tag_handler_t::error) + { + return from_cbor(ptr, ptr + len, strict, allow_exceptions, tag_handler); + } + + + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_cbor(ptr, ptr + len)) + static basic_json from_cbor(detail::span_input_adapter&& i, + const bool strict = true, + const bool allow_exceptions = true, + const cbor_tag_handler_t tag_handler = cbor_tag_handler_t::error) + { + basic_json result; + detail::json_sax_dom_parser sdp(result, allow_exceptions); + auto ia = i.get(); + const bool res = binary_reader(std::move(ia)).sax_parse(input_format_t::cbor, &sdp, strict, tag_handler); + return res ? result : basic_json(value_t::discarded); + } + + /*! + @brief create a JSON value from an input in MessagePack format + + Deserializes a given input @a i to a JSON value using the MessagePack + serialization format. + + The library maps MessagePack types to JSON value types as follows: + + MessagePack type | JSON value type | first byte + ---------------- | --------------- | ---------- + positive fixint | number_unsigned | 0x00..0x7F + fixmap | object | 0x80..0x8F + fixarray | array | 0x90..0x9F + fixstr | string | 0xA0..0xBF + nil | `null` | 0xC0 + false | `false` | 0xC2 + true | `true` | 0xC3 + float 32 | number_float | 0xCA + float 64 | number_float | 0xCB + uint 8 | number_unsigned | 0xCC + uint 16 | number_unsigned | 0xCD + uint 32 | number_unsigned | 0xCE + uint 64 | number_unsigned | 0xCF + int 8 | number_integer | 0xD0 + int 16 | number_integer | 0xD1 + int 32 | number_integer | 0xD2 + int 64 | number_integer | 0xD3 + str 8 | string | 0xD9 + str 16 | string | 0xDA + str 32 | string | 0xDB + array 16 | array | 0xDC + array 32 | array | 0xDD + map 16 | object | 0xDE + map 32 | object | 0xDF + bin 8 | binary | 0xC4 + bin 16 | binary | 0xC5 + bin 32 | binary | 0xC6 + ext 8 | binary | 0xC7 + ext 16 | binary | 0xC8 + ext 32 | binary | 0xC9 + fixext 1 | binary | 0xD4 + fixext 2 | binary | 0xD5 + fixext 4 | binary | 0xD6 + fixext 8 | binary | 0xD7 + fixext 16 | binary | 0xD8 + negative fixint | number_integer | 0xE0-0xFF + + @note Any MessagePack output created @ref to_msgpack can be successfully + parsed by @ref from_msgpack. + + @param[in] i an input in MessagePack format convertible to an input + adapter + @param[in] strict whether to expect the input to be consumed until EOF + (true by default) + @param[in] allow_exceptions whether to throw exceptions in case of a + parse error (optional, true by default) + + @return deserialized JSON value; in case of a parse error and + @a allow_exceptions set to `false`, the return value will be + value_t::discarded. + + @throw parse_error.110 if the given input ends prematurely or the end of + file was not reached when @a strict was set to true + @throw parse_error.112 if unsupported features from MessagePack were + used in the given input @a i or if the input is not valid MessagePack + @throw parse_error.113 if a string was expected as map key, but not found + + @complexity Linear in the size of the input @a i. + + @liveexample{The example shows the deserialization of a byte vector in + MessagePack format to a JSON value.,from_msgpack} + + @sa http://msgpack.org + @sa @ref to_msgpack(const basic_json&) for the analogous serialization + @sa @ref from_cbor(detail::input_adapter&&, const bool, const bool, const cbor_tag_handler_t) for the + related CBOR format + @sa @ref from_ubjson(detail::input_adapter&&, const bool, const bool) for + the related UBJSON format + @sa @ref from_bson(detail::input_adapter&&, const bool, const bool) for + the related BSON format + + @since version 2.0.9; parameter @a start_index since 2.1.1; changed to + consume input adapters, removed start_index parameter, and added + @a strict parameter since 3.0.0; added @a allow_exceptions parameter + since 3.2.0 + */ + template + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_msgpack(InputType&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::forward(i)); + const bool res = binary_reader(std::move(ia)).sax_parse(input_format_t::msgpack, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + /*! + @copydoc from_msgpack(detail::input_adapter&&, const bool, const bool) + */ + template + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_msgpack(IteratorType first, IteratorType last, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::move(first), std::move(last)); + const bool res = binary_reader(std::move(ia)).sax_parse(input_format_t::msgpack, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + + template + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_msgpack(ptr, ptr + len)) + static basic_json from_msgpack(const T* ptr, std::size_t len, + const bool strict = true, + const bool allow_exceptions = true) + { + return from_msgpack(ptr, ptr + len, strict, allow_exceptions); + } + + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_msgpack(ptr, ptr + len)) + static basic_json from_msgpack(detail::span_input_adapter&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser sdp(result, allow_exceptions); + auto ia = i.get(); + const bool res = binary_reader(std::move(ia)).sax_parse(input_format_t::msgpack, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + + /*! + @brief create a JSON value from an input in UBJSON format + + Deserializes a given input @a i to a JSON value using the UBJSON (Universal + Binary JSON) serialization format. + + The library maps UBJSON types to JSON value types as follows: + + UBJSON type | JSON value type | marker + ----------- | --------------------------------------- | ------ + no-op | *no value, next value is read* | `N` + null | `null` | `Z` + false | `false` | `F` + true | `true` | `T` + float32 | number_float | `d` + float64 | number_float | `D` + uint8 | number_unsigned | `U` + int8 | number_integer | `i` + int16 | number_integer | `I` + int32 | number_integer | `l` + int64 | number_integer | `L` + high-precision number | number_integer, number_unsigned, or number_float - depends on number string | 'H' + string | string | `S` + char | string | `C` + array | array (optimized values are supported) | `[` + object | object (optimized values are supported) | `{` + + @note The mapping is **complete** in the sense that any UBJSON value can + be converted to a JSON value. + + @param[in] i an input in UBJSON format convertible to an input adapter + @param[in] strict whether to expect the input to be consumed until EOF + (true by default) + @param[in] allow_exceptions whether to throw exceptions in case of a + parse error (optional, true by default) + + @return deserialized JSON value; in case of a parse error and + @a allow_exceptions set to `false`, the return value will be + value_t::discarded. + + @throw parse_error.110 if the given input ends prematurely or the end of + file was not reached when @a strict was set to true + @throw parse_error.112 if a parse error occurs + @throw parse_error.113 if a string could not be parsed successfully + + @complexity Linear in the size of the input @a i. + + @liveexample{The example shows the deserialization of a byte vector in + UBJSON format to a JSON value.,from_ubjson} + + @sa http://ubjson.org + @sa @ref to_ubjson(const basic_json&, const bool, const bool) for the + analogous serialization + @sa @ref from_cbor(detail::input_adapter&&, const bool, const bool, const cbor_tag_handler_t) for the + related CBOR format + @sa @ref from_msgpack(detail::input_adapter&&, const bool, const bool) for + the related MessagePack format + @sa @ref from_bson(detail::input_adapter&&, const bool, const bool) for + the related BSON format + + @since version 3.1.0; added @a allow_exceptions parameter since 3.2.0 + */ + template + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_ubjson(InputType&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::forward(i)); + const bool res = binary_reader(std::move(ia)).sax_parse(input_format_t::ubjson, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + /*! + @copydoc from_ubjson(detail::input_adapter&&, const bool, const bool) + */ + template + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_ubjson(IteratorType first, IteratorType last, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::move(first), std::move(last)); + const bool res = binary_reader(std::move(ia)).sax_parse(input_format_t::ubjson, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + template + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_ubjson(ptr, ptr + len)) + static basic_json from_ubjson(const T* ptr, std::size_t len, + const bool strict = true, + const bool allow_exceptions = true) + { + return from_ubjson(ptr, ptr + len, strict, allow_exceptions); + } + + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_ubjson(ptr, ptr + len)) + static basic_json from_ubjson(detail::span_input_adapter&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser sdp(result, allow_exceptions); + auto ia = i.get(); + const bool res = binary_reader(std::move(ia)).sax_parse(input_format_t::ubjson, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + + /*! + @brief Create a JSON value from an input in BSON format + + Deserializes a given input @a i to a JSON value using the BSON (Binary JSON) + serialization format. + + The library maps BSON record types to JSON value types as follows: + + BSON type | BSON marker byte | JSON value type + --------------- | ---------------- | --------------------------- + double | 0x01 | number_float + string | 0x02 | string + document | 0x03 | object + array | 0x04 | array + binary | 0x05 | still unsupported + undefined | 0x06 | still unsupported + ObjectId | 0x07 | still unsupported + boolean | 0x08 | boolean + UTC Date-Time | 0x09 | still unsupported + null | 0x0A | null + Regular Expr. | 0x0B | still unsupported + DB Pointer | 0x0C | still unsupported + JavaScript Code | 0x0D | still unsupported + Symbol | 0x0E | still unsupported + JavaScript Code | 0x0F | still unsupported + int32 | 0x10 | number_integer + Timestamp | 0x11 | still unsupported + 128-bit decimal float | 0x13 | still unsupported + Max Key | 0x7F | still unsupported + Min Key | 0xFF | still unsupported + + @warning The mapping is **incomplete**. The unsupported mappings + are indicated in the table above. + + @param[in] i an input in BSON format convertible to an input adapter + @param[in] strict whether to expect the input to be consumed until EOF + (true by default) + @param[in] allow_exceptions whether to throw exceptions in case of a + parse error (optional, true by default) + + @return deserialized JSON value; in case of a parse error and + @a allow_exceptions set to `false`, the return value will be + value_t::discarded. + + @throw parse_error.114 if an unsupported BSON record type is encountered + + @complexity Linear in the size of the input @a i. + + @liveexample{The example shows the deserialization of a byte vector in + BSON format to a JSON value.,from_bson} + + @sa http://bsonspec.org/spec.html + @sa @ref to_bson(const basic_json&) for the analogous serialization + @sa @ref from_cbor(detail::input_adapter&&, const bool, const bool, const cbor_tag_handler_t) for the + related CBOR format + @sa @ref from_msgpack(detail::input_adapter&&, const bool, const bool) for + the related MessagePack format + @sa @ref from_ubjson(detail::input_adapter&&, const bool, const bool) for the + related UBJSON format + */ + template + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_bson(InputType&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::forward(i)); + const bool res = binary_reader(std::move(ia)).sax_parse(input_format_t::bson, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + /*! + @copydoc from_bson(detail::input_adapter&&, const bool, const bool) + */ + template + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json from_bson(IteratorType first, IteratorType last, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser sdp(result, allow_exceptions); + auto ia = detail::input_adapter(std::move(first), std::move(last)); + const bool res = binary_reader(std::move(ia)).sax_parse(input_format_t::bson, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + + template + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_bson(ptr, ptr + len)) + static basic_json from_bson(const T* ptr, std::size_t len, + const bool strict = true, + const bool allow_exceptions = true) + { + return from_bson(ptr, ptr + len, strict, allow_exceptions); + } + + JSON_HEDLEY_WARN_UNUSED_RESULT + JSON_HEDLEY_DEPRECATED_FOR(3.8.0, from_bson(ptr, ptr + len)) + static basic_json from_bson(detail::span_input_adapter&& i, + const bool strict = true, + const bool allow_exceptions = true) + { + basic_json result; + detail::json_sax_dom_parser sdp(result, allow_exceptions); + auto ia = i.get(); + const bool res = binary_reader(std::move(ia)).sax_parse(input_format_t::bson, &sdp, strict); + return res ? result : basic_json(value_t::discarded); + } + /// @} + + ////////////////////////// + // JSON Pointer support // + ////////////////////////// + + /// @name JSON Pointer functions + /// @{ + + /*! + @brief access specified element via JSON Pointer + + Uses a JSON pointer to retrieve a reference to the respective JSON value. + No bound checking is performed. Similar to @ref operator[](const typename + object_t::key_type&), `null` values are created in arrays and objects if + necessary. + + In particular: + - If the JSON pointer points to an object key that does not exist, it + is created an filled with a `null` value before a reference to it + is returned. + - If the JSON pointer points to an array index that does not exist, it + is created an filled with a `null` value before a reference to it + is returned. All indices between the current maximum and the given + index are also filled with `null`. + - The special value `-` is treated as a synonym for the index past the + end. + + @param[in] ptr a JSON pointer + + @return reference to the element pointed to by @a ptr + + @complexity Constant. + + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index was not a number + @throw out_of_range.404 if the JSON pointer can not be resolved + + @liveexample{The behavior is shown in the example.,operatorjson_pointer} + + @since version 2.0.0 + */ + reference operator[](const json_pointer& ptr) + { + return ptr.get_unchecked(this); + } + + /*! + @brief access specified element via JSON Pointer + + Uses a JSON pointer to retrieve a reference to the respective JSON value. + No bound checking is performed. The function does not change the JSON + value; no `null` values are created. In particular, the special value + `-` yields an exception. + + @param[in] ptr JSON pointer to the desired element + + @return const reference to the element pointed to by @a ptr + + @complexity Constant. + + @throw parse_error.106 if an array index begins with '0' + @throw parse_error.109 if an array index was not a number + @throw out_of_range.402 if the array index '-' is used + @throw out_of_range.404 if the JSON pointer can not be resolved + + @liveexample{The behavior is shown in the example.,operatorjson_pointer_const} + + @since version 2.0.0 + */ + const_reference operator[](const json_pointer& ptr) const + { + return ptr.get_unchecked(this); + } + + /*! + @brief access specified element via JSON Pointer + + Returns a reference to the element at with specified JSON pointer @a ptr, + with bounds checking. + + @param[in] ptr JSON pointer to the desired element + + @return reference to the element pointed to by @a ptr + + @throw parse_error.106 if an array index in the passed JSON pointer @a ptr + begins with '0'. See example below. + + @throw parse_error.109 if an array index in the passed JSON pointer @a ptr + is not a number. See example below. + + @throw out_of_range.401 if an array index in the passed JSON pointer @a ptr + is out of range. See example below. + + @throw out_of_range.402 if the array index '-' is used in the passed JSON + pointer @a ptr. As `at` provides checked access (and no elements are + implicitly inserted), the index '-' is always invalid. See example below. + + @throw out_of_range.403 if the JSON pointer describes a key of an object + which cannot be found. See example below. + + @throw out_of_range.404 if the JSON pointer @a ptr can not be resolved. + See example below. + + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes in the JSON value. + + @complexity Constant. + + @since version 2.0.0 + + @liveexample{The behavior is shown in the example.,at_json_pointer} + */ + reference at(const json_pointer& ptr) + { + return ptr.get_checked(this); + } + + /*! + @brief access specified element via JSON Pointer + + Returns a const reference to the element at with specified JSON pointer @a + ptr, with bounds checking. + + @param[in] ptr JSON pointer to the desired element + + @return reference to the element pointed to by @a ptr + + @throw parse_error.106 if an array index in the passed JSON pointer @a ptr + begins with '0'. See example below. + + @throw parse_error.109 if an array index in the passed JSON pointer @a ptr + is not a number. See example below. + + @throw out_of_range.401 if an array index in the passed JSON pointer @a ptr + is out of range. See example below. + + @throw out_of_range.402 if the array index '-' is used in the passed JSON + pointer @a ptr. As `at` provides checked access (and no elements are + implicitly inserted), the index '-' is always invalid. See example below. + + @throw out_of_range.403 if the JSON pointer describes a key of an object + which cannot be found. See example below. + + @throw out_of_range.404 if the JSON pointer @a ptr can not be resolved. + See example below. + + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes in the JSON value. + + @complexity Constant. + + @since version 2.0.0 + + @liveexample{The behavior is shown in the example.,at_json_pointer_const} + */ + const_reference at(const json_pointer& ptr) const + { + return ptr.get_checked(this); + } + + /*! + @brief return flattened JSON value + + The function creates a JSON object whose keys are JSON pointers (see [RFC + 6901](https://tools.ietf.org/html/rfc6901)) and whose values are all + primitive. The original JSON value can be restored using the @ref + unflatten() function. + + @return an object that maps JSON pointers to primitive values + + @note Empty objects and arrays are flattened to `null` and will not be + reconstructed correctly by the @ref unflatten() function. + + @complexity Linear in the size the JSON value. + + @liveexample{The following code shows how a JSON object is flattened to an + object whose keys consist of JSON pointers.,flatten} + + @sa @ref unflatten() for the reverse function + + @since version 2.0.0 + */ + basic_json flatten() const + { + basic_json result(value_t::object); + json_pointer::flatten("", *this, result); + return result; + } + + /*! + @brief unflatten a previously flattened JSON value + + The function restores the arbitrary nesting of a JSON value that has been + flattened before using the @ref flatten() function. The JSON value must + meet certain constraints: + 1. The value must be an object. + 2. The keys must be JSON pointers (see + [RFC 6901](https://tools.ietf.org/html/rfc6901)) + 3. The mapped values must be primitive JSON types. + + @return the original JSON from a flattened version + + @note Empty objects and arrays are flattened by @ref flatten() to `null` + values and can not unflattened to their original type. Apart from + this example, for a JSON value `j`, the following is always true: + `j == j.flatten().unflatten()`. + + @complexity Linear in the size the JSON value. + + @throw type_error.314 if value is not an object + @throw type_error.315 if object values are not primitive + + @liveexample{The following code shows how a flattened JSON object is + unflattened into the original nested JSON object.,unflatten} + + @sa @ref flatten() for the reverse function + + @since version 2.0.0 + */ + basic_json unflatten() const + { + return json_pointer::unflatten(*this); + } + + /// @} + + ////////////////////////// + // JSON Patch functions // + ////////////////////////// + + /// @name JSON Patch functions + /// @{ + + /*! + @brief applies a JSON patch + + [JSON Patch](http://jsonpatch.com) defines a JSON document structure for + expressing a sequence of operations to apply to a JSON) document. With + this function, a JSON Patch is applied to the current JSON value by + executing all operations from the patch. + + @param[in] json_patch JSON patch document + @return patched document + + @note The application of a patch is atomic: Either all operations succeed + and the patched document is returned or an exception is thrown. In + any case, the original value is not changed: the patch is applied + to a copy of the value. + + @throw parse_error.104 if the JSON patch does not consist of an array of + objects + + @throw parse_error.105 if the JSON patch is malformed (e.g., mandatory + attributes are missing); example: `"operation add must have member path"` + + @throw out_of_range.401 if an array index is out of range. + + @throw out_of_range.403 if a JSON pointer inside the patch could not be + resolved successfully in the current JSON value; example: `"key baz not + found"` + + @throw out_of_range.405 if JSON pointer has no parent ("add", "remove", + "move") + + @throw other_error.501 if "test" operation was unsuccessful + + @complexity Linear in the size of the JSON value and the length of the + JSON patch. As usually only a fraction of the JSON value is affected by + the patch, the complexity can usually be neglected. + + @liveexample{The following code shows how a JSON patch is applied to a + value.,patch} + + @sa @ref diff -- create a JSON patch by comparing two JSON values + + @sa [RFC 6902 (JSON Patch)](https://tools.ietf.org/html/rfc6902) + @sa [RFC 6901 (JSON Pointer)](https://tools.ietf.org/html/rfc6901) + + @since version 2.0.0 + */ + basic_json patch(const basic_json& json_patch) const + { + // make a working copy to apply the patch to + basic_json result = *this; + + // the valid JSON Patch operations + enum class patch_operations {add, remove, replace, move, copy, test, invalid}; + + const auto get_op = [](const std::string & op) + { + if (op == "add") + { + return patch_operations::add; + } + if (op == "remove") + { + return patch_operations::remove; + } + if (op == "replace") + { + return patch_operations::replace; + } + if (op == "move") + { + return patch_operations::move; + } + if (op == "copy") + { + return patch_operations::copy; + } + if (op == "test") + { + return patch_operations::test; + } + + return patch_operations::invalid; + }; + + // wrapper for "add" operation; add value at ptr + const auto operation_add = [&result](json_pointer & ptr, basic_json val) + { + // adding to the root of the target document means replacing it + if (ptr.empty()) + { + result = val; + return; + } + + // make sure the top element of the pointer exists + json_pointer top_pointer = ptr.top(); + if (top_pointer != ptr) + { + result.at(top_pointer); + } + + // get reference to parent of JSON pointer ptr + const auto last_path = ptr.back(); + ptr.pop_back(); + basic_json& parent = result[ptr]; + + switch (parent.m_type) + { + case value_t::null: + case value_t::object: + { + // use operator[] to add value + parent[last_path] = val; + break; + } + + case value_t::array: + { + if (last_path == "-") + { + // special case: append to back + parent.push_back(val); + } + else + { + const auto idx = json_pointer::array_index(last_path); + if (JSON_HEDLEY_UNLIKELY(idx > parent.size())) + { + // avoid undefined behavior + JSON_THROW(out_of_range::create(401, "array index " + std::to_string(idx) + " is out of range")); + } + + // default case: insert add offset + parent.insert(parent.begin() + static_cast(idx), val); + } + break; + } + + // if there exists a parent it cannot be primitive + default: // LCOV_EXCL_LINE + JSON_ASSERT(false); // LCOV_EXCL_LINE + } + }; + + // wrapper for "remove" operation; remove value at ptr + const auto operation_remove = [&result](json_pointer & ptr) + { + // get reference to parent of JSON pointer ptr + const auto last_path = ptr.back(); + ptr.pop_back(); + basic_json& parent = result.at(ptr); + + // remove child + if (parent.is_object()) + { + // perform range check + auto it = parent.find(last_path); + if (JSON_HEDLEY_LIKELY(it != parent.end())) + { + parent.erase(it); + } + else + { + JSON_THROW(out_of_range::create(403, "key '" + last_path + "' not found")); + } + } + else if (parent.is_array()) + { + // note erase performs range check + parent.erase(json_pointer::array_index(last_path)); + } + }; + + // type check: top level value must be an array + if (JSON_HEDLEY_UNLIKELY(!json_patch.is_array())) + { + JSON_THROW(parse_error::create(104, 0, "JSON patch must be an array of objects")); + } + + // iterate and apply the operations + for (const auto& val : json_patch) + { + // wrapper to get a value for an operation + const auto get_value = [&val](const std::string & op, + const std::string & member, + bool string_type) -> basic_json & + { + // find value + auto it = val.m_value.object->find(member); + + // context-sensitive error message + const auto error_msg = (op == "op") ? "operation" : "operation '" + op + "'"; + + // check if desired value is present + if (JSON_HEDLEY_UNLIKELY(it == val.m_value.object->end())) + { + JSON_THROW(parse_error::create(105, 0, error_msg + " must have member '" + member + "'")); + } + + // check if result is of type string + if (JSON_HEDLEY_UNLIKELY(string_type && !it->second.is_string())) + { + JSON_THROW(parse_error::create(105, 0, error_msg + " must have string member '" + member + "'")); + } + + // no error: return value + return it->second; + }; + + // type check: every element of the array must be an object + if (JSON_HEDLEY_UNLIKELY(!val.is_object())) + { + JSON_THROW(parse_error::create(104, 0, "JSON patch must be an array of objects")); + } + + // collect mandatory members + const auto op = get_value("op", "op", true).template get(); + const auto path = get_value(op, "path", true).template get(); + json_pointer ptr(path); + + switch (get_op(op)) + { + case patch_operations::add: + { + operation_add(ptr, get_value("add", "value", false)); + break; + } + + case patch_operations::remove: + { + operation_remove(ptr); + break; + } + + case patch_operations::replace: + { + // the "path" location must exist - use at() + result.at(ptr) = get_value("replace", "value", false); + break; + } + + case patch_operations::move: + { + const auto from_path = get_value("move", "from", true).template get(); + json_pointer from_ptr(from_path); + + // the "from" location must exist - use at() + basic_json v = result.at(from_ptr); + + // The move operation is functionally identical to a + // "remove" operation on the "from" location, followed + // immediately by an "add" operation at the target + // location with the value that was just removed. + operation_remove(from_ptr); + operation_add(ptr, v); + break; + } + + case patch_operations::copy: + { + const auto from_path = get_value("copy", "from", true).template get(); + const json_pointer from_ptr(from_path); + + // the "from" location must exist - use at() + basic_json v = result.at(from_ptr); + + // The copy is functionally identical to an "add" + // operation at the target location using the value + // specified in the "from" member. + operation_add(ptr, v); + break; + } + + case patch_operations::test: + { + bool success = false; + JSON_TRY + { + // check if "value" matches the one at "path" + // the "path" location must exist - use at() + success = (result.at(ptr) == get_value("test", "value", false)); + } + JSON_INTERNAL_CATCH (out_of_range&) + { + // ignore out of range errors: success remains false + } + + // throw an exception if test fails + if (JSON_HEDLEY_UNLIKELY(!success)) + { + JSON_THROW(other_error::create(501, "unsuccessful: " + val.dump())); + } + + break; + } + + default: + { + // op must be "add", "remove", "replace", "move", "copy", or + // "test" + JSON_THROW(parse_error::create(105, 0, "operation value '" + op + "' is invalid")); + } + } + } + + return result; + } + + /*! + @brief creates a diff as a JSON patch + + Creates a [JSON Patch](http://jsonpatch.com) so that value @a source can + be changed into the value @a target by calling @ref patch function. + + @invariant For two JSON values @a source and @a target, the following code + yields always `true`: + @code {.cpp} + source.patch(diff(source, target)) == target; + @endcode + + @note Currently, only `remove`, `add`, and `replace` operations are + generated. + + @param[in] source JSON value to compare from + @param[in] target JSON value to compare against + @param[in] path helper value to create JSON pointers + + @return a JSON patch to convert the @a source to @a target + + @complexity Linear in the lengths of @a source and @a target. + + @liveexample{The following code shows how a JSON patch is created as a + diff for two JSON values.,diff} + + @sa @ref patch -- apply a JSON patch + @sa @ref merge_patch -- apply a JSON Merge Patch + + @sa [RFC 6902 (JSON Patch)](https://tools.ietf.org/html/rfc6902) + + @since version 2.0.0 + */ + JSON_HEDLEY_WARN_UNUSED_RESULT + static basic_json diff(const basic_json& source, const basic_json& target, + const std::string& path = "") + { + // the patch + basic_json result(value_t::array); + + // if the values are the same, return empty patch + if (source == target) + { + return result; + } + + if (source.type() != target.type()) + { + // different types: replace value + result.push_back( + { + {"op", "replace"}, {"path", path}, {"value", target} + }); + return result; + } + + switch (source.type()) + { + case value_t::array: + { + // first pass: traverse common elements + std::size_t i = 0; + while (i < source.size() && i < target.size()) + { + // recursive call to compare array values at index i + auto temp_diff = diff(source[i], target[i], path + "/" + std::to_string(i)); + result.insert(result.end(), temp_diff.begin(), temp_diff.end()); + ++i; + } + + // i now reached the end of at least one array + // in a second pass, traverse the remaining elements + + // remove my remaining elements + const auto end_index = static_cast(result.size()); + while (i < source.size()) + { + // add operations in reverse order to avoid invalid + // indices + result.insert(result.begin() + end_index, object( + { + {"op", "remove"}, + {"path", path + "/" + std::to_string(i)} + })); + ++i; + } + + // add other remaining elements + while (i < target.size()) + { + result.push_back( + { + {"op", "add"}, + {"path", path + "/-"}, + {"value", target[i]} + }); + ++i; + } + + break; + } + + case value_t::object: + { + // first pass: traverse this object's elements + for (auto it = source.cbegin(); it != source.cend(); ++it) + { + // escape the key name to be used in a JSON patch + const auto key = json_pointer::escape(it.key()); + + if (target.find(it.key()) != target.end()) + { + // recursive call to compare object values at key it + auto temp_diff = diff(it.value(), target[it.key()], path + "/" + key); + result.insert(result.end(), temp_diff.begin(), temp_diff.end()); + } + else + { + // found a key that is not in o -> remove it + result.push_back(object( + { + {"op", "remove"}, {"path", path + "/" + key} + })); + } + } + + // second pass: traverse other object's elements + for (auto it = target.cbegin(); it != target.cend(); ++it) + { + if (source.find(it.key()) == source.end()) + { + // found a key that is not in this -> add it + const auto key = json_pointer::escape(it.key()); + result.push_back( + { + {"op", "add"}, {"path", path + "/" + key}, + {"value", it.value()} + }); + } + } + + break; + } + + default: + { + // both primitive type: replace value + result.push_back( + { + {"op", "replace"}, {"path", path}, {"value", target} + }); + break; + } + } + + return result; + } + + /// @} + + //////////////////////////////// + // JSON Merge Patch functions // + //////////////////////////////// + + /// @name JSON Merge Patch functions + /// @{ + + /*! + @brief applies a JSON Merge Patch + + The merge patch format is primarily intended for use with the HTTP PATCH + method as a means of describing a set of modifications to a target + resource's content. This function applies a merge patch to the current + JSON value. + + The function implements the following algorithm from Section 2 of + [RFC 7396 (JSON Merge Patch)](https://tools.ietf.org/html/rfc7396): + + ``` + define MergePatch(Target, Patch): + if Patch is an Object: + if Target is not an Object: + Target = {} // Ignore the contents and set it to an empty Object + for each Name/Value pair in Patch: + if Value is null: + if Name exists in Target: + remove the Name/Value pair from Target + else: + Target[Name] = MergePatch(Target[Name], Value) + return Target + else: + return Patch + ``` + + Thereby, `Target` is the current object; that is, the patch is applied to + the current value. + + @param[in] apply_patch the patch to apply + + @complexity Linear in the lengths of @a patch. + + @liveexample{The following code shows how a JSON Merge Patch is applied to + a JSON document.,merge_patch} + + @sa @ref patch -- apply a JSON patch + @sa [RFC 7396 (JSON Merge Patch)](https://tools.ietf.org/html/rfc7396) + + @since version 3.0.0 + */ + void merge_patch(const basic_json& apply_patch) + { + if (apply_patch.is_object()) + { + if (!is_object()) + { + *this = object(); + } + for (auto it = apply_patch.begin(); it != apply_patch.end(); ++it) + { + if (it.value().is_null()) + { + erase(it.key()); + } + else + { + operator[](it.key()).merge_patch(it.value()); + } + } + } + else + { + *this = apply_patch; + } + } + + /// @} +}; + +/*! +@brief user-defined to_string function for JSON values + +This function implements a user-defined to_string for JSON objects. + +@param[in] j a JSON object +@return a std::string object +*/ + +NLOHMANN_BASIC_JSON_TPL_DECLARATION +std::string to_string(const NLOHMANN_BASIC_JSON_TPL& j) +{ + return j.dump(); +} +} // namespace nlohmann + +/////////////////////// +// nonmember support // +/////////////////////// + +// specialization of std::swap, and std::hash +namespace std +{ + +/// hash value for JSON objects +template<> +struct hash +{ + /*! + @brief return a hash value for a JSON object + + @since version 1.0.0 + */ + std::size_t operator()(const nlohmann::json& j) const + { + return nlohmann::detail::hash(j); + } +}; + +/// specialization for std::less +/// @note: do not remove the space after '<', +/// see https://github.com/nlohmann/json/pull/679 +template<> +struct less<::nlohmann::detail::value_t> +{ + /*! + @brief compare two value_t enum values + @since version 3.0.0 + */ + bool operator()(nlohmann::detail::value_t lhs, + nlohmann::detail::value_t rhs) const noexcept + { + return nlohmann::detail::operator<(lhs, rhs); + } +}; + +// C++20 prohibit function specialization in the std namespace. +#ifndef JSON_HAS_CPP_20 + +/*! +@brief exchanges the values of two JSON objects + +@since version 1.0.0 +*/ +template<> +inline void swap(nlohmann::json& j1, nlohmann::json& j2) noexcept( + is_nothrow_move_constructible::value&& + is_nothrow_move_assignable::value + ) +{ + j1.swap(j2); +} + +#endif + +} // namespace std + +/*! +@brief user-defined string literal for JSON values + +This operator implements a user-defined string literal for JSON objects. It +can be used by adding `"_json"` to a string literal and returns a JSON object +if no parse error occurred. + +@param[in] s a string representation of a JSON object +@param[in] n the length of string @a s +@return a JSON object + +@since version 1.0.0 +*/ +JSON_HEDLEY_NON_NULL(1) +inline nlohmann::json operator "" _json(const char* s, std::size_t n) +{ + return nlohmann::json::parse(s, s + n); +} + +/*! +@brief user-defined string literal for JSON pointer + +This operator implements a user-defined string literal for JSON Pointers. It +can be used by adding `"_json_pointer"` to a string literal and returns a JSON pointer +object if no parse error occurred. + +@param[in] s a string representation of a JSON Pointer +@param[in] n the length of string @a s +@return a JSON pointer object + +@since version 2.0.0 +*/ +JSON_HEDLEY_NON_NULL(1) +inline nlohmann::json::json_pointer operator "" _json_pointer(const char* s, std::size_t n) +{ + return nlohmann::json::json_pointer(std::string(s, n)); +} + +// #include + + +// restore GCC/clang diagnostic settings +#if defined(__clang__) || defined(__GNUC__) || defined(__GNUG__) + #pragma GCC diagnostic pop +#endif +#if defined(__clang__) + #pragma GCC diagnostic pop +#endif + +// clean up +#undef JSON_ASSERT +#undef JSON_INTERNAL_CATCH +#undef JSON_CATCH +#undef JSON_THROW +#undef JSON_TRY +#undef JSON_HAS_CPP_14 +#undef JSON_HAS_CPP_17 +#undef NLOHMANN_BASIC_JSON_TPL_DECLARATION +#undef NLOHMANN_BASIC_JSON_TPL +#undef JSON_EXPLICIT + +// #include +#undef JSON_HEDLEY_ALWAYS_INLINE +#undef JSON_HEDLEY_ARM_VERSION +#undef JSON_HEDLEY_ARM_VERSION_CHECK +#undef JSON_HEDLEY_ARRAY_PARAM +#undef JSON_HEDLEY_ASSUME +#undef JSON_HEDLEY_BEGIN_C_DECLS +#undef JSON_HEDLEY_CLANG_HAS_ATTRIBUTE +#undef JSON_HEDLEY_CLANG_HAS_BUILTIN +#undef JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE +#undef JSON_HEDLEY_CLANG_HAS_DECLSPEC_DECLSPEC_ATTRIBUTE +#undef JSON_HEDLEY_CLANG_HAS_EXTENSION +#undef JSON_HEDLEY_CLANG_HAS_FEATURE +#undef JSON_HEDLEY_CLANG_HAS_WARNING +#undef JSON_HEDLEY_COMPCERT_VERSION +#undef JSON_HEDLEY_COMPCERT_VERSION_CHECK +#undef JSON_HEDLEY_CONCAT +#undef JSON_HEDLEY_CONCAT3 +#undef JSON_HEDLEY_CONCAT3_EX +#undef JSON_HEDLEY_CONCAT_EX +#undef JSON_HEDLEY_CONST +#undef JSON_HEDLEY_CONSTEXPR +#undef JSON_HEDLEY_CONST_CAST +#undef JSON_HEDLEY_CPP_CAST +#undef JSON_HEDLEY_CRAY_VERSION +#undef JSON_HEDLEY_CRAY_VERSION_CHECK +#undef JSON_HEDLEY_C_DECL +#undef JSON_HEDLEY_DEPRECATED +#undef JSON_HEDLEY_DEPRECATED_FOR +#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL +#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_ +#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED +#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES +#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS +#undef JSON_HEDLEY_DIAGNOSTIC_POP +#undef JSON_HEDLEY_DIAGNOSTIC_PUSH +#undef JSON_HEDLEY_DMC_VERSION +#undef JSON_HEDLEY_DMC_VERSION_CHECK +#undef JSON_HEDLEY_EMPTY_BASES +#undef JSON_HEDLEY_EMSCRIPTEN_VERSION +#undef JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK +#undef JSON_HEDLEY_END_C_DECLS +#undef JSON_HEDLEY_FLAGS +#undef JSON_HEDLEY_FLAGS_CAST +#undef JSON_HEDLEY_GCC_HAS_ATTRIBUTE +#undef JSON_HEDLEY_GCC_HAS_BUILTIN +#undef JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE +#undef JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE +#undef JSON_HEDLEY_GCC_HAS_EXTENSION +#undef JSON_HEDLEY_GCC_HAS_FEATURE +#undef JSON_HEDLEY_GCC_HAS_WARNING +#undef JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK +#undef JSON_HEDLEY_GCC_VERSION +#undef JSON_HEDLEY_GCC_VERSION_CHECK +#undef JSON_HEDLEY_GNUC_HAS_ATTRIBUTE +#undef JSON_HEDLEY_GNUC_HAS_BUILTIN +#undef JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE +#undef JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE +#undef JSON_HEDLEY_GNUC_HAS_EXTENSION +#undef JSON_HEDLEY_GNUC_HAS_FEATURE +#undef JSON_HEDLEY_GNUC_HAS_WARNING +#undef JSON_HEDLEY_GNUC_VERSION +#undef JSON_HEDLEY_GNUC_VERSION_CHECK +#undef JSON_HEDLEY_HAS_ATTRIBUTE +#undef JSON_HEDLEY_HAS_BUILTIN +#undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE +#undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS +#undef JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE +#undef JSON_HEDLEY_HAS_EXTENSION +#undef JSON_HEDLEY_HAS_FEATURE +#undef JSON_HEDLEY_HAS_WARNING +#undef JSON_HEDLEY_IAR_VERSION +#undef JSON_HEDLEY_IAR_VERSION_CHECK +#undef JSON_HEDLEY_IBM_VERSION +#undef JSON_HEDLEY_IBM_VERSION_CHECK +#undef JSON_HEDLEY_IMPORT +#undef JSON_HEDLEY_INLINE +#undef JSON_HEDLEY_INTEL_VERSION +#undef JSON_HEDLEY_INTEL_VERSION_CHECK +#undef JSON_HEDLEY_IS_CONSTANT +#undef JSON_HEDLEY_IS_CONSTEXPR_ +#undef JSON_HEDLEY_LIKELY +#undef JSON_HEDLEY_MALLOC +#undef JSON_HEDLEY_MESSAGE +#undef JSON_HEDLEY_MSVC_VERSION +#undef JSON_HEDLEY_MSVC_VERSION_CHECK +#undef JSON_HEDLEY_NEVER_INLINE +#undef JSON_HEDLEY_NON_NULL +#undef JSON_HEDLEY_NO_ESCAPE +#undef JSON_HEDLEY_NO_RETURN +#undef JSON_HEDLEY_NO_THROW +#undef JSON_HEDLEY_NULL +#undef JSON_HEDLEY_PELLES_VERSION +#undef JSON_HEDLEY_PELLES_VERSION_CHECK +#undef JSON_HEDLEY_PGI_VERSION +#undef JSON_HEDLEY_PGI_VERSION_CHECK +#undef JSON_HEDLEY_PREDICT +#undef JSON_HEDLEY_PRINTF_FORMAT +#undef JSON_HEDLEY_PRIVATE +#undef JSON_HEDLEY_PUBLIC +#undef JSON_HEDLEY_PURE +#undef JSON_HEDLEY_REINTERPRET_CAST +#undef JSON_HEDLEY_REQUIRE +#undef JSON_HEDLEY_REQUIRE_CONSTEXPR +#undef JSON_HEDLEY_REQUIRE_MSG +#undef JSON_HEDLEY_RESTRICT +#undef JSON_HEDLEY_RETURNS_NON_NULL +#undef JSON_HEDLEY_SENTINEL +#undef JSON_HEDLEY_STATIC_ASSERT +#undef JSON_HEDLEY_STATIC_CAST +#undef JSON_HEDLEY_STRINGIFY +#undef JSON_HEDLEY_STRINGIFY_EX +#undef JSON_HEDLEY_SUNPRO_VERSION +#undef JSON_HEDLEY_SUNPRO_VERSION_CHECK +#undef JSON_HEDLEY_TINYC_VERSION +#undef JSON_HEDLEY_TINYC_VERSION_CHECK +#undef JSON_HEDLEY_TI_ARMCL_VERSION +#undef JSON_HEDLEY_TI_ARMCL_VERSION_CHECK +#undef JSON_HEDLEY_TI_CL2000_VERSION +#undef JSON_HEDLEY_TI_CL2000_VERSION_CHECK +#undef JSON_HEDLEY_TI_CL430_VERSION +#undef JSON_HEDLEY_TI_CL430_VERSION_CHECK +#undef JSON_HEDLEY_TI_CL6X_VERSION +#undef JSON_HEDLEY_TI_CL6X_VERSION_CHECK +#undef JSON_HEDLEY_TI_CL7X_VERSION +#undef JSON_HEDLEY_TI_CL7X_VERSION_CHECK +#undef JSON_HEDLEY_TI_CLPRU_VERSION +#undef JSON_HEDLEY_TI_CLPRU_VERSION_CHECK +#undef JSON_HEDLEY_TI_VERSION +#undef JSON_HEDLEY_TI_VERSION_CHECK +#undef JSON_HEDLEY_UNAVAILABLE +#undef JSON_HEDLEY_UNLIKELY +#undef JSON_HEDLEY_UNPREDICTABLE +#undef JSON_HEDLEY_UNREACHABLE +#undef JSON_HEDLEY_UNREACHABLE_RETURN +#undef JSON_HEDLEY_VERSION +#undef JSON_HEDLEY_VERSION_DECODE_MAJOR +#undef JSON_HEDLEY_VERSION_DECODE_MINOR +#undef JSON_HEDLEY_VERSION_DECODE_REVISION +#undef JSON_HEDLEY_VERSION_ENCODE +#undef JSON_HEDLEY_WARNING +#undef JSON_HEDLEY_WARN_UNUSED_RESULT +#undef JSON_HEDLEY_WARN_UNUSED_RESULT_MSG +#undef JSON_HEDLEY_FALL_THROUGH + + + +#endif // INCLUDE_NLOHMANN_JSON_HPP_ diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/phonemizer.cpp b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/phonemizer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..80ad636f994f5a1e3dcff07bb9db2683e6897793 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/phonemizer.cpp @@ -0,0 +1,228 @@ +#include "phonemizer.h" +#include +#include "ext/ZCharScanner.h" + +int32_t GetID(const std::vector& In, const std::string &InStr) +{ + for (const IdStr& It : In) + if (It.STR == InStr) + return It.ID; + + return -1; +} + +std::string GetSTR(const std::vector& In, int32_t InID) +{ + for (const IdStr& It : In) + if (It.ID == InID) + return It.STR; + + return ""; + +} + +std::vector Phonemizer::GetDelimitedFile(const std::string &InFname) +{ + + + std::ifstream InFile (InFname); + + int32_t CuID; + std::string Tok; + std::vector RetVec; + + + std::string Line; + while (std::getline(InFile, Line)) { + + if (Line.find("\t") == std::string::npos) + continue; + + + ZStringDelimiter Deline(Line); + Deline.AddDelimiter("\t"); + + CuID = stoi(Deline[1]); + Tok = Deline[0]; + + + RetVec.push_back(IdStr{CuID,Tok}); + + } + + return RetVec; + + +} + +void Phonemizer::LoadDictionary(const std::string &InDictFn) +{ + + + + std::ifstream InFile (InDictFn); + + std::string Word; + std::string Phn; + + + if (Dictionary.size()) + Dictionary.clear(); + + + + + std::string Line; + while (std::getline(InFile, Line)) { + + if (Line.find("\t") == std::string::npos) + continue; + + + ZStringDelimiter Deline(Line); + Deline.AddDelimiter("\t"); + + Word = Deline[0]; + Phn = Deline[1]; + + + Dictionary.push_back(StrStr{Word,Phn}); + + } + // Sort so lookup can be a bit optimized + std::sort(Dictionary.begin(),Dictionary.end()); + + +} + +std::string Phonemizer::DictLookup(const std::string &InWord) +{ + + for (size_t w = 0 ; w < Dictionary.size();w++) + { + const StrStr& Entr = Dictionary[w]; + + if (Entr.Word.size() != InWord.size()) + continue; + + if (Entr.Word == InWord) + return Entr.Phn; + + } + + return ""; + +} + + + + +Phonemizer::Phonemizer() +{ + +} + +bool Phonemizer::Initialize(const std::string InPath) +{ + // Load indices + try { + CharId = GetDelimitedFile(InPath + "/char2id.txt"); + PhnId = GetDelimitedFile(InPath + "/phn2id.txt"); + + // Load model + G2pModel.Initialize(InPath + "/model"); + + LoadDictionary(InPath + "/dict.txt"); + } + catch (...){ + return false; + } + + + + + return true; + + +} + +std::string Phonemizer::ProcessWord(const std::string &InWord,float Temperature) +{ + // First we try dictionary lookup + // This is because the g2p model can be unreliable, we only want to use it for novel sentences + + std::string PhnDict = DictLookup(InWord); + if (!PhnDict.empty()) + return PhnDict; + + std::vector InIndexes; + InIndexes.reserve(InWord.size()); + + // Turn word into indices + for (const char ch : InWord) + { + std::string Single(1,ch); + int32_t Idx = GetID(CharId,Single); + + if (Idx != -1) + InIndexes.push_back(Idx); + + + } + + TFTensor PhnPrediction = G2pModel.DoInference(InIndexes,Temperature); + + + std::string RetStr = ""; + bool FirstIter = true; + + for (int32_t PhnIdx : PhnPrediction.Data) + { + std::string PhnTxt = GetSTR(PhnId,PhnIdx); + if (!PhnTxt.empty()) + { + if (!FirstIter) + RetStr.append(" "); + + RetStr.append(PhnTxt); + + } + + FirstIter = false; + } + + + + return RetStr; + +} + +std::string Phonemizer::GetPhnLanguage() const +{ + return PhnLanguage; +} + +void Phonemizer::SetPhnLanguage(const std::string &value) +{ + + PhnLanguage = value; +} + +std::string Phonemizer::GetGraphemeChars() +{ + + std::string RetAllowed = ""; + for (const IdStr& Idx : CharId) + RetAllowed.append(Idx.STR); + + return RetAllowed; + +} + + + + +bool operator<(const StrStr &right, const StrStr &left) +{ + return right.Word.length() < left.Word.length(); +} diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/phonemizer.h b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/phonemizer.h new file mode 100644 index 0000000000000000000000000000000000000000..ae389d09e53b0e3be1b59563f4778d6543085289 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/phonemizer.h @@ -0,0 +1,69 @@ +#ifndef PHONEMIZER_H +#define PHONEMIZER_H +#include "tfg2p.h" +#include +#include +#include + +struct IdStr{ + int32_t ID; + std::string STR; +}; + + +struct StrStr{ + std::string Word; + std::string Phn; +}; + + +class Phonemizer +{ +private: + TFG2P G2pModel; + + std::vector CharId; + std::vector PhnId; + + + + + + + std::vector GetDelimitedFile(const std::string& InFname); + + + // Sorry, can't use set, unordered_map or any other types. (I tried) + std::vector Dictionary; + + void LoadDictionary(const std::string& InDictFn); + + std::string DictLookup(const std::string& InWord); + + + + std::string PhnLanguage; +public: + Phonemizer(); + /* + * Initialize a phonemizer + * Expects: + * - Two files consisting in TOKEN \t ID: + * -- char2id.txt: Translation from input character to ID the model can accept + * -- phn2id.txt: Translation from output ID from the model to phoneme + * - A model/ folder where a G2P-Tensorflow model was saved as SavedModel + * - dict.txt: Phonetic dictionary. First it searches the word there and if it can't be found then it uses the model. + + */ + bool Initialize(const std::string InPath); + std::string ProcessWord(const std::string& InWord, float Temperature = 0.1f); + std::string GetPhnLanguage() const; + void SetPhnLanguage(const std::string &value); + + std::string GetGraphemeChars(); + +}; + + +bool operator<(const StrStr& right,const StrStr& left); +#endif // PHONEMIZER_H diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/tfg2p.cpp b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/tfg2p.cpp new file mode 100644 index 0000000000000000000000000000000000000000..bc10ec4dc566d5e2c58e507282aa2921a0ce28ff --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/tfg2p.cpp @@ -0,0 +1,69 @@ +#include "tfg2p.h" +#include +TFG2P::TFG2P() +{ + G2P = nullptr; + +} + +TFG2P::TFG2P(const std::string &SavedModelFolder) +{ + G2P = nullptr; + + Initialize(SavedModelFolder); +} + +bool TFG2P::Initialize(const std::string &SavedModelFolder) +{ + try { + + G2P = new Model(SavedModelFolder); + + } + catch (...) { + G2P = nullptr; + return false; + + } + return true; +} + +TFTensor TFG2P::DoInference(const std::vector &InputIDs, float Temperature) +{ + if (!G2P) + throw std::invalid_argument("Tried to do inference on unloaded or invalid model!"); + + // Convenience reference so that we don't have to constantly derefer pointers. + Model& Mdl = *G2P; + + + // Convenience reference so that we don't have to constantly derefer pointers. + + Tensor input_ids{ Mdl,"serving_default_input_ids" }; + Tensor input_len{Mdl,"serving_default_input_len"}; + Tensor input_temp{Mdl,"serving_default_input_temperature"}; + + input_ids.set_data(InputIDs, std::vector{(int64_t)InputIDs.size()}); + input_len.set_data(std::vector{(int32_t)InputIDs.size()}); + input_temp.set_data(std::vector{Temperature}); + + + + std::vector Inputs {&input_ids,&input_len,&input_temp}; + Tensor out_ids{ Mdl,"StatefulPartitionedCall" }; + + Mdl.run(Inputs, out_ids); + + TFTensor RetTensor = VoxUtil::CopyTensor(out_ids); + + return RetTensor; + + +} + +TFG2P::~TFG2P() +{ + if (G2P) + delete G2P; + +} diff --git a/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/tfg2p.h b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/tfg2p.h new file mode 100644 index 0000000000000000000000000000000000000000..3ab4837af50ef6f8b51e1f6d2a3501c7fda518a9 --- /dev/null +++ b/TensorFlowTTS/examples/cppwin/TensorflowTTSCppInference/tfg2p.h @@ -0,0 +1,38 @@ +#ifndef TFG2P_H +#define TFG2P_H +#include "ext/CppFlow/include/Model.h" +#include "VoxCommon.hpp" + + +class TFG2P +{ +private: + Model* G2P; + +public: + TFG2P(); + TFG2P(const std::string& SavedModelFolder); + + /* + Initialize and load the model + + -> SavedModelFolder: Folder where the .pb, variables, and other characteristics of the exported SavedModel + <- Returns: (bool)Success + */ + bool Initialize(const std::string& SavedModelFolder); + + /* + Do inference on a G2P-TF-RNN model. + + -> InputIDs: Input IDs of tokens for inference + -> Temperature: Temperature of the RNN, values higher than 0.1 cause instability. + + <- Returns: TFTensor containing phoneme IDs + */ + TFTensor DoInference(const std::vector& InputIDs, float Temperature = 0.1f); + + ~TFG2P(); + +}; + +#endif // TFG2P_H diff --git a/TensorFlowTTS/examples/fastspeech/README.md b/TensorFlowTTS/examples/fastspeech/README.md new file mode 100644 index 0000000000000000000000000000000000000000..8fcfdb81f37efd4b2fdff81b6b54e463fa4a3714 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech/README.md @@ -0,0 +1,94 @@ +# FastSpeech: Fast, Robust and Controllable Text to Speech +Based on the script [`train_fastspeech.py`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/fastspeech/train_fastspeech.py). + +## Training FastSpeech from scratch with LJSpeech dataset. +This example code show you how to train FastSpeech from scratch with Tensorflow 2 based on custom training loop and tf.function. The data used for this example is LJSpeech, you can download the dataset at [link](https://keithito.com/LJ-Speech-Dataset/). + +### Step 1: Create Tensorflow based Dataloader (tf.dataset) +First, you need define data loader based on AbstractDataset class (see [`abstract_dataset.py`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/tensorflow_tts/datasets/abstract_dataset.py)). On this example, a dataloader read dataset from path. I use suffix to classify what file is a charactor, duration and mel-spectrogram (see [`fastspeech_dataset.py`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/fastspeech/fastspeech_dataset.py)). If you already have preprocessed version of your target dataset, you don't need to use this example dataloader, you just need refer my dataloader and modify **generator function** to adapt with your case. Normally, a generator function should return [charactor_ids, duration, mel]. Pls see tacotron2-example to know how to extract durations [Extract Duration](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/tacotron2#step-4-extract-duration-from-alignments-for-fastspeech) + +### Step 2: Training from scratch +After you redefine your dataloader, pls modify an input arguments, train_dataset and valid_dataset from [`train_fastspeech.py`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/fastspeech/train_fastspeech.py). Here is an example command line to training fastspeech from scratch: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/fastspeech/train_fastspeech.py \ + --train-dir ./dump/train/ \ + --dev-dir ./dump/valid/ \ + --outdir ./examples/fastspeech/exp/train.fastspeech.v1/ \ + --config ./examples/fastspeech/conf/fastspeech.v1.yaml \ + --use-norm 1 + --mixed_precision 0 \ + --resume "" +``` + +IF you want to use MultiGPU to training you can replace `CUDA_VISIBLE_DEVICES=0` by `CUDA_VISIBLE_DEVICES=0,1,2,3` for example. You also need to tune the `batch_size` for each GPU (in config file) by yourself to maximize the performance. Note that MultiGPU now support for Training but not yet support for Decode. + +In case you want to resume the training progress, please following below example command line: + +```bash +--resume ./examples/fastspeech/exp/train.fastspeech.v1/checkpoints/ckpt-100000 +``` + +If you want to finetune a model, use `--pretrained` like this with your model filename +```bash +--pretrained pretrained.h5 +``` + +### Step 3: Decode mel-spectrogram from folder ids +To running inference on folder ids (charactor), run below command line: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/tacotron2/decode_fastspeech.py \ + --rootdir ./dump/valid/ \ + --outdir ./prediction/fastspeech-200k/ \ + --checkpoint ./examples/fastspeech/exp/train.fastspeech.v1/checkpoints/model-200000.h5 \ + --config ./examples/fastspeech/conf/fastspeech.v1.yaml \ + --batch-size 32 +``` + +## Finetune FastSpeech with ljspeech pretrained on other languages +Here is an example show you how to use pretrained ljspeech to training with other languages. This does not guarantee a better model or faster convergence in all cases but it will improve if there is a correlation between target language and pretrained language. The only thing you need to do before finetune on other languages is re-define embedding layers. You can do it by following code: + +```python +pretrained_config = ... +fastspeech = TFFastSpeech(pretrained_config) +fastspeech._build() +fastspeech.summary() +fastspeech.load_weights(PRETRAINED_PATH) + +# re-define here +pretrained_config.vocab_size = NEW_VOCAB_SIZE +new_embedding_layers = TFFastSpeechEmbeddings(pretrained_config, name='embeddings') +fastspeech.embeddings = new_embedding_layers +# re-build model +fastspeech._build() +fastspeech.summary() + +... # training as normal. +``` + +## Results +Here is a learning curves of fastspeech based on this config [`fastspeech.v1.yaml`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/fastspeech/conf/fastspeech.v1.yaml) + +### Learning curves + + +## Some important notes + +* **DO NOT** apply any activation function on intermediate layer (TFFastSpeechIntermediate). +* There is no different between num_hidden_layers = 6 and num_hidden_layers = 4. +* I use mish rather than relu. +* For extract durations, i use my tacotron2.v1 at 40k steps with window masking (front=4, back=4). Let say, at that steps it's not a strong tacotron-2 model. If you want to improve the quality of fastspeech model, you may consider use my latest checkpoint tacotron2. + + +## Pretrained Models and Audio samples +| Model | Conf | Lang | Fs [Hz] | Mel range [Hz] | FFT / Hop / Win [pt] | # iters | +| :------ | :---: | :---: | :----: | :--------: | :---------------: | :-----: | +| [fastspeech.v1](https://drive.google.com/drive/folders/1f69ujszFeGnIy7PMwc8AkUckhIaT2OD0?usp=sharing) | [link](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/fastspeech/conf/fastspeech.v1.yaml) | EN | 22.05k | 80-7600 | 1024 / 256 / None | 195k | +| [fastspeech.v3](https://drive.google.com/drive/folders/1ITxTJDrS1I0K8S_x0s0tNbym748p9FUI?usp=sharing) | [link](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/fastspeech/conf/fastspeech.v3.yaml) | EN | 22.05k | 80-7600 | 1024 / 256 / None | 150k | + + +## Reference + +1. https://github.com/xcmyz/FastSpeech +2. [FastSpeech: Fast, Robust and Controllable Text to Speech](https://arxiv.org/abs/1905.09263) \ No newline at end of file diff --git a/TensorFlowTTS/examples/fastspeech/conf/fastspeech.v1.yaml b/TensorFlowTTS/examples/fastspeech/conf/fastspeech.v1.yaml new file mode 100644 index 0000000000000000000000000000000000000000..653a85127e4fa74c7b8a94348af49697482f1ead --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech/conf/fastspeech.v1.yaml @@ -0,0 +1,79 @@ +# This is the hyperparameter configuration file for FastSpeech v1. +# Please make sure this is adjusted for the LJSpeech dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 200k iters but a best checkpoint is around 150k iters. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +hop_size: 256 # Hop size. +format: "npy" + + +########################################################### +# NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "fastspeech" + +fastspeech_params: + n_speakers: 1 + encoder_hidden_size: 384 + encoder_num_hidden_layers: 4 + encoder_num_attention_heads: 2 + encoder_attention_head_size: 192 # hidden_size // num_attention_heads + encoder_intermediate_size: 1024 + encoder_intermediate_kernel_size: 3 + encoder_hidden_act: "mish" + decoder_hidden_size: 384 + decoder_num_hidden_layers: 4 + decoder_num_attention_heads: 2 + decoder_attention_head_size: 192 # hidden_size // num_attention_heads + decoder_intermediate_size: 1024 + decoder_intermediate_kernel_size: 3 + decoder_hidden_act: "mish" + num_duration_conv_layers: 2 + duration_predictor_filters: 256 + duration_predictor_kernel_sizes: 3 + num_mels: 80 + hidden_dropout_prob: 0.1 + attention_probs_dropout_prob: 0.1 + duration_predictor_dropout_probs: 0.1 + max_position_embeddings: 2048 + initializer_range: 0.02 + output_attentions: False + output_hidden_states: False + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 16 # Batch size for each GPU with asuming that gradient_accumulation_steps is 1 +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +mel_length_threshold: 32 # remove all targets has mel_length <= 32 +is_shuffle: true # shuffle dataset after each epoch. +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +optimizer_params: + initial_learning_rate: 0.001 + end_learning_rate: 0.00005 + decay_steps: 150000 # < train_max_steps is recommend. + warmup_proportion: 0.02 + weight_decay: 0.001 + +gradient_accumulation_steps: 1 +var_train_expr: null # trainable variable expr (eg. 'embeddings|encoder|decoder' ) + # must separate by |. if var_train_expr is null then we + # training all variable +########################################################### +# INTERVAL SETTING # +########################################################### +train_max_steps: 200000 # Number of training steps. +save_interval_steps: 5000 # Interval steps to save checkpoint. +eval_interval_steps: 500 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. + +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of batch to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/fastspeech/conf/fastspeech.v3.yaml b/TensorFlowTTS/examples/fastspeech/conf/fastspeech.v3.yaml new file mode 100644 index 0000000000000000000000000000000000000000..34920aa490611b2c88a228ac9503e9c28a934b49 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech/conf/fastspeech.v3.yaml @@ -0,0 +1,79 @@ +# This is the hyperparameter configuration file for FastSpeech v1. +# Please make sure this is adjusted for the LJSpeech dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 200k iters but a best checkpoint is around 150k iters. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +hop_size: 256 # Hop size. +format: "npy" + + +########################################################### +# NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "fastspeech" + +fastspeech_params: + n_speakers: 1 + encoder_hidden_size: 384 + encoder_num_hidden_layers: 4 + encoder_num_attention_heads: 2 + encoder_attention_head_size: 192 # hidden_size // num_attention_heads + encoder_intermediate_size: 1024 + encoder_intermediate_kernel_size: 3 + encoder_hidden_act: "mish" + decoder_hidden_size: 384 + decoder_num_hidden_layers: 4 + decoder_num_attention_heads: 2 + decoder_attention_head_size: 192 # hidden_size // num_attention_heads + decoder_intermediate_size: 1024 + decoder_intermediate_kernel_size: 3 + decoder_hidden_act: "mish" + num_duration_conv_layers: 2 + duration_predictor_filters: 256 + duration_predictor_kernel_sizes: 3 + num_mels: 80 + hidden_dropout_prob: 0.2 + attention_probs_dropout_prob: 0.1 + duration_predictor_dropout_probs: 0.2 + max_position_embeddings: 2048 + initializer_range: 0.02 + output_attentions: False + output_hidden_states: False + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 16 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +mel_length_threshold: 32 # remove all targets has mel_length <= 32 +is_shuffle: true # shuffle dataset after each epoch. +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +optimizer_params: + initial_learning_rate: 0.001 + end_learning_rate: 0.00005 + decay_steps: 150000 # < train_max_steps is recommend. + warmup_proportion: 0.02 + weight_decay: 0.001 + +gradient_accumulation_steps: 1 +var_train_expr: null # trainable variable expr (eg. 'embeddings|encoder|decoder' ) + # must separate by |. if var_train_expr is null then we + # training all variable +########################################################### +# INTERVAL SETTING # +########################################################### +train_max_steps: 200000 # Number of training steps. +save_interval_steps: 5000 # Interval steps to save checkpoint. +eval_interval_steps: 500 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. + +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of batch to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/fastspeech/decode_fastspeech.py b/TensorFlowTTS/examples/fastspeech/decode_fastspeech.py new file mode 100644 index 0000000000000000000000000000000000000000..215dca848c060c8d9e061adc4ef21a66a64abb48 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech/decode_fastspeech.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Decode trained FastSpeech from folders.""" + +import argparse +import logging +import os +import sys + +sys.path.append(".") + +import numpy as np +import tensorflow as tf +import yaml +from tqdm import tqdm + +from examples.fastspeech.fastspeech_dataset import CharactorDataset +from tensorflow_tts.configs import FastSpeechConfig +from tensorflow_tts.models import TFFastSpeech + + +def main(): + """Run fastspeech decoding from folder.""" + parser = argparse.ArgumentParser( + description="Decode soft-mel features from charactor with trained FastSpeech " + "(See detail in examples/fastspeech/decode_fastspeech.py)." + ) + parser.add_argument( + "--rootdir", + default=None, + type=str, + required=True, + help="directory including ids/durations files.", + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save generated speech." + ) + parser.add_argument( + "--checkpoint", type=str, required=True, help="checkpoint file to be loaded." + ) + parser.add_argument( + "--config", + default=None, + type=str, + required=True, + help="yaml format configuration file. if not explicitly provided, " + "it will be searched in the checkpoint directory. (default=None)", + ) + parser.add_argument( + "--batch-size", + default=8, + type=int, + required=False, + help="Batch size for inference.", + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + args = parser.parse_args() + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # load config + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + + if config["format"] == "npy": + char_query = "*-ids.npy" + char_load_fn = np.load + else: + raise ValueError("Only npy is supported.") + + # define data-loader + dataset = CharactorDataset( + root_dir=args.rootdir, + charactor_query=char_query, + charactor_load_fn=char_load_fn, + ) + dataset = dataset.create(batch_size=args.batch_size) + + # define model and load checkpoint + fastspeech = TFFastSpeech( + config=FastSpeechConfig(**config["fastspeech_params"]), name="fastspeech" + ) + fastspeech._build() + fastspeech.load_weights(args.checkpoint) + + for data in tqdm(dataset, desc="Decoding"): + utt_ids = data["utt_ids"] + char_ids = data["input_ids"] + + # fastspeech inference. + masked_mel_before, masked_mel_after, duration_outputs = fastspeech.inference( + char_ids, + speaker_ids=tf.zeros(shape=[tf.shape(char_ids)[0]], dtype=tf.int32), + speed_ratios=tf.ones(shape=[tf.shape(char_ids)[0]], dtype=tf.float32), + ) + + # convert to numpy + masked_mel_befores = masked_mel_before.numpy() + masked_mel_afters = masked_mel_after.numpy() + + for (utt_id, mel_before, mel_after, durations) in zip( + utt_ids, masked_mel_befores, masked_mel_afters, duration_outputs + ): + # real len of mel predicted + real_length = durations.numpy().sum() + utt_id = utt_id.numpy().decode("utf-8") + # save to folder. + np.save( + os.path.join(args.outdir, f"{utt_id}-fs-before-feats.npy"), + mel_before[:real_length, :].astype(np.float32), + allow_pickle=False, + ) + np.save( + os.path.join(args.outdir, f"{utt_id}-fs-after-feats.npy"), + mel_after[:real_length, :].astype(np.float32), + allow_pickle=False, + ) + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/fastspeech/fastspeech_dataset.py b/TensorFlowTTS/examples/fastspeech/fastspeech_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..65bf66bcc25951f56fa96af8026a1ecfe7ddbf6d --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech/fastspeech_dataset.py @@ -0,0 +1,263 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Dataset modules.""" + +import itertools +import logging +import os +import random + +import numpy as np +import tensorflow as tf + +from tensorflow_tts.datasets.abstract_dataset import AbstractDataset +from tensorflow_tts.utils import find_files + + +class CharactorDurationMelDataset(AbstractDataset): + """Tensorflow Charactor Mel dataset.""" + + def __init__( + self, + root_dir, + charactor_query="*-ids.npy", + mel_query="*-norm-feats.npy", + duration_query="*-durations.npy", + charactor_load_fn=np.load, + mel_load_fn=np.load, + duration_load_fn=np.load, + mel_length_threshold=0, + ): + """Initialize dataset. + + Args: + root_dir (str): Root directory including dumped files. + charactor_query (str): Query to find charactor files in root_dir. + mel_query (str): Query to find feature files in root_dir. + duration_query (str): Query to find duration files in root_dir. + charactor_load_fn (func): Function to load charactor file. + mel_load_fn (func): Function to load feature file. + duration_load_fn (func): Function to load duration file. + mel_length_threshold (int): Threshold to remove short feature files. + return_utt_id (bool): Whether to return the utterance id with arrays. + + """ + # find all of charactor and mel files. + charactor_files = sorted(find_files(root_dir, charactor_query)) + mel_files = sorted(find_files(root_dir, mel_query)) + duration_files = sorted(find_files(root_dir, duration_query)) + + # assert the number of files + assert len(mel_files) != 0, f"Not found any mels files in ${root_dir}." + assert ( + len(mel_files) == len(charactor_files) == len(duration_files) + ), f"Number of charactor, mel and duration files are different \ + ({len(mel_files)} vs {len(charactor_files)} vs {len(duration_files)})." + + if ".npy" in charactor_query: + suffix = charactor_query[1:] + utt_ids = [os.path.basename(f).replace(suffix, "") for f in charactor_files] + + # set global params + self.utt_ids = utt_ids + self.mel_files = mel_files + self.charactor_files = charactor_files + self.duration_files = duration_files + self.mel_load_fn = mel_load_fn + self.charactor_load_fn = charactor_load_fn + self.duration_load_fn = duration_load_fn + self.mel_length_threshold = mel_length_threshold + + def get_args(self): + return [self.utt_ids] + + def generator(self, utt_ids): + for i, utt_id in enumerate(utt_ids): + mel_file = self.mel_files[i] + charactor_file = self.charactor_files[i] + duration_file = self.duration_files[i] + + items = { + "utt_ids": utt_id, + "mel_files": mel_file, + "charactor_files": charactor_file, + "duration_files": duration_file, + } + + yield items + + @tf.function + def _load_data(self, items): + mel = tf.numpy_function(np.load, [items["mel_files"]], tf.float32) + charactor = tf.numpy_function(np.load, [items["charactor_files"]], tf.int32) + duration = tf.numpy_function(np.load, [items["duration_files"]], tf.int32) + + items = { + "utt_ids": items["utt_ids"], + "input_ids": charactor, + "speaker_ids": 0, + "duration_gts": duration, + "mel_gts": mel, + "mel_lengths": len(mel), + } + + return items + + def create( + self, + allow_cache=False, + batch_size=1, + is_shuffle=False, + map_fn=None, + reshuffle_each_iteration=True, + ): + """Create tf.dataset function.""" + output_types = self.get_output_dtypes() + datasets = tf.data.Dataset.from_generator( + self.generator, output_types=output_types, args=(self.get_args()) + ) + + # load data + datasets = datasets.map( + lambda items: self._load_data(items), tf.data.experimental.AUTOTUNE + ) + + datasets = datasets.filter( + lambda x: x["mel_lengths"] > self.mel_length_threshold + ) + + if allow_cache: + datasets = datasets.cache() + + if is_shuffle: + datasets = datasets.shuffle( + self.get_len_dataset(), + reshuffle_each_iteration=reshuffle_each_iteration, + ) + + # define padded_shapes + padded_shapes = { + "utt_ids": [], + "input_ids": [None], + "speaker_ids": [], + "duration_gts": [None], + "mel_gts": [None, None], + "mel_lengths": [], + } + + datasets = datasets.padded_batch(batch_size, padded_shapes=padded_shapes) + datasets = datasets.prefetch(tf.data.experimental.AUTOTUNE) + return datasets + + def get_output_dtypes(self): + output_types = { + "utt_ids": tf.string, + "mel_files": tf.string, + "charactor_files": tf.string, + "duration_files": tf.string, + } + return output_types + + def get_len_dataset(self): + return len(self.utt_ids) + + def __name__(self): + return "CharactorDurationMelDataset" + + +class CharactorDataset(AbstractDataset): + """Tensorflow Charactor dataset.""" + + def __init__( + self, root_dir, charactor_query="*-ids.npy", charactor_load_fn=np.load, + ): + """Initialize dataset. + + Args: + root_dir (str): Root directory including dumped files. + charactor_query (str): Query to find charactor files in root_dir. + charactor_load_fn (func): Function to load charactor file. + return_utt_id (bool): Whether to return the utterance id with arrays. + + """ + # find all of charactor and mel files. + charactor_files = sorted(find_files(root_dir, charactor_query)) + + # assert the number of files + assert ( + len(charactor_files) != 0 + ), f"Not found any char or duration files in ${root_dir}." + if ".npy" in charactor_query: + suffix = charactor_query[1:] + utt_ids = [os.path.basename(f).replace(suffix, "") for f in charactor_files] + + # set global params + self.utt_ids = utt_ids + self.charactor_files = charactor_files + self.charactor_load_fn = charactor_load_fn + + def get_args(self): + return [self.utt_ids] + + def generator(self, utt_ids): + for i, utt_id in enumerate(utt_ids): + charactor_file = self.charactor_files[i] + charactor = self.charactor_load_fn(charactor_file) + + items = {"utt_ids": utt_id, "input_ids": charactor} + + yield items + + def create( + self, + allow_cache=False, + batch_size=1, + is_shuffle=False, + map_fn=None, + reshuffle_each_iteration=True, + ): + """Create tf.dataset function.""" + output_types = self.get_output_dtypes() + datasets = tf.data.Dataset.from_generator( + self.generator, output_types=output_types, args=(self.get_args()) + ) + + if allow_cache: + datasets = datasets.cache() + + if is_shuffle: + datasets = datasets.shuffle( + self.get_len_dataset(), + reshuffle_each_iteration=reshuffle_each_iteration, + ) + + # define padded shapes + padded_shapes = {"utt_ids": [], "input_ids": [None]} + + datasets = datasets.padded_batch( + batch_size, padded_shapes=padded_shapes, drop_remainder=True + ) + datasets = datasets.prefetch(tf.data.experimental.AUTOTUNE) + return datasets + + def get_output_dtypes(self): + output_types = {"utt_ids": tf.string, "input_ids": tf.int32} + return output_types + + def get_len_dataset(self): + return len(self.utt_ids) + + def __name__(self): + return "CharactorDataset" diff --git a/TensorFlowTTS/examples/fastspeech/fig/fastspeech.v1.png b/TensorFlowTTS/examples/fastspeech/fig/fastspeech.v1.png new file mode 100644 index 0000000000000000000000000000000000000000..b7ff382736015bbd3f082cf704cb710a1aa52177 Binary files /dev/null and b/TensorFlowTTS/examples/fastspeech/fig/fastspeech.v1.png differ diff --git a/TensorFlowTTS/examples/fastspeech/train_fastspeech.py b/TensorFlowTTS/examples/fastspeech/train_fastspeech.py new file mode 100644 index 0000000000000000000000000000000000000000..bce4e1d1d033c7069cbe0d816ed29fc5b98b81e0 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech/train_fastspeech.py @@ -0,0 +1,390 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Train FastSpeech.""" + +import tensorflow as tf + +physical_devices = tf.config.list_physical_devices("GPU") +for i in range(len(physical_devices)): + tf.config.experimental.set_memory_growth(physical_devices[i], True) + +import argparse +import logging +import os +import sys + +sys.path.append(".") + +import numpy as np +import yaml + +import tensorflow_tts +import tensorflow_tts.configs.fastspeech as FASTSPEECH_CONFIG +from examples.fastspeech.fastspeech_dataset import CharactorDurationMelDataset +from tensorflow_tts.models import TFFastSpeech +from tensorflow_tts.optimizers import AdamWeightDecay, WarmUp +from tensorflow_tts.trainers import Seq2SeqBasedTrainer +from tensorflow_tts.utils import calculate_2d_loss, calculate_3d_loss, return_strategy + + +class FastSpeechTrainer(Seq2SeqBasedTrainer): + """FastSpeech Trainer class based on Seq2SeqBasedTrainer.""" + + def __init__( + self, config, strategy, steps=0, epochs=0, is_mixed_precision=False, + ): + """Initialize trainer. + + Args: + steps (int): Initial global steps. + epochs (int): Initial global epochs. + config (dict): Config dict loaded from yaml format configuration file. + is_mixed_precision (bool): Use mixed precision or not. + + """ + super(FastSpeechTrainer, self).__init__( + steps=steps, + epochs=epochs, + config=config, + strategy=strategy, + is_mixed_precision=is_mixed_precision, + ) + # define metrics to aggregates data and use tf.summary logs them + self.list_metrics_name = ["duration_loss", "mel_loss_before", "mel_loss_after"] + self.init_train_eval_metrics(self.list_metrics_name) + self.reset_states_train() + self.reset_states_eval() + + self.config = config + + def compile(self, model, optimizer): + super().compile(model, optimizer) + self.mse = tf.keras.losses.MeanSquaredError( + reduction=tf.keras.losses.Reduction.NONE + ) + self.mae = tf.keras.losses.MeanAbsoluteError( + reduction=tf.keras.losses.Reduction.NONE + ) + + def compute_per_example_losses(self, batch, outputs): + """Compute per example losses and return dict_metrics_losses + Note that all element of the loss MUST has a shape [batch_size] and + the keys of dict_metrics_losses MUST be in self.list_metrics_name. + + Args: + batch: dictionary batch input return from dataloader + outputs: outputs of the model + + Returns: + per_example_losses: per example losses for each GPU, shape [B] + dict_metrics_losses: dictionary loss. + """ + mel_before, mel_after, duration_outputs = outputs + + log_duration = tf.math.log( + tf.cast(tf.math.add(batch["duration_gts"], 1), tf.float32) + ) + duration_loss = self.mse(log_duration, duration_outputs) + mel_loss_before = calculate_3d_loss(batch["mel_gts"], mel_before, self.mae) + mel_loss_after = calculate_3d_loss(batch["mel_gts"], mel_after, self.mae) + + per_example_losses = duration_loss + mel_loss_before + mel_loss_after + + dict_metrics_losses = { + "duration_loss": duration_loss, + "mel_loss_before": mel_loss_before, + "mel_loss_after": mel_loss_after, + } + + return per_example_losses, dict_metrics_losses + + def generate_and_save_intermediate_result(self, batch): + """Generate and save intermediate result.""" + import matplotlib.pyplot as plt + + # predict with tf.function. + outputs = self.one_step_predict(batch) + + mels_before, mels_after, *_ = outputs + mel_gts = batch["mel_gts"] + utt_ids = batch["utt_ids"] + + # convert to tensor. + # here we just take a sample at first replica. + try: + mels_before = mels_before.values[0].numpy() + mels_after = mels_after.values[0].numpy() + mel_gts = mel_gts.values[0].numpy() + utt_ids = utt_ids.values[0].numpy() + except Exception: + mels_before = mels_before.numpy() + mels_after = mels_after.numpy() + mel_gts = mel_gts.numpy() + utt_ids = utt_ids.numpy() + + # check directory + dirname = os.path.join(self.config["outdir"], f"predictions/{self.steps}steps") + if not os.path.exists(dirname): + os.makedirs(dirname) + + for idx, (mel_gt, mel_before, mel_after) in enumerate( + zip(mel_gts, mels_before, mels_after), 0 + ): + mel_gt = tf.reshape(mel_gt, (-1, 80)).numpy() # [length, 80] + mel_before = tf.reshape(mel_before, (-1, 80)).numpy() # [length, 80] + mel_after = tf.reshape(mel_after, (-1, 80)).numpy() # [length, 80] + + # plit figure and save it + utt_id = utt_ids[idx].decode("utf-8") + figname = os.path.join(dirname, f"{utt_id}.png") + fig = plt.figure(figsize=(10, 8)) + ax1 = fig.add_subplot(311) + ax2 = fig.add_subplot(312) + ax3 = fig.add_subplot(313) + im = ax1.imshow(np.rot90(mel_gt), aspect="auto", interpolation="none") + ax1.set_title("Target Mel-Spectrogram") + fig.colorbar(mappable=im, shrink=0.65, orientation="horizontal", ax=ax1) + ax2.set_title("Predicted Mel-before-Spectrogram") + im = ax2.imshow(np.rot90(mel_before), aspect="auto", interpolation="none") + fig.colorbar(mappable=im, shrink=0.65, orientation="horizontal", ax=ax2) + ax3.set_title("Predicted Mel-after-Spectrogram") + im = ax3.imshow(np.rot90(mel_after), aspect="auto", interpolation="none") + fig.colorbar(mappable=im, shrink=0.65, orientation="horizontal", ax=ax3) + plt.tight_layout() + plt.savefig(figname) + plt.close() + + +def main(): + """Run training process.""" + parser = argparse.ArgumentParser( + description="Train FastSpeech (See detail in tensorflow_tts/bin/train-fastspeech.py)" + ) + parser.add_argument( + "--train-dir", + default=None, + type=str, + help="directory including training data. ", + ) + parser.add_argument( + "--dev-dir", + default=None, + type=str, + help="directory including development data. ", + ) + parser.add_argument( + "--use-norm", default=1, type=int, help="usr norm-mels for train or raw." + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save checkpoints." + ) + parser.add_argument( + "--config", type=str, required=True, help="yaml format configuration file." + ) + parser.add_argument( + "--resume", + default="", + type=str, + nargs="?", + help='checkpoint file path to resume training. (default="")', + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + parser.add_argument( + "--mixed_precision", + default=0, + type=int, + help="using mixed precision for generator or not.", + ) + parser.add_argument( + "--pretrained", + default="", + type=str, + nargs="?", + help="pretrained checkpoint file to load weights from. Auto-skips non-matching layers", + ) + args = parser.parse_args() + + # return strategy + STRATEGY = return_strategy() + + # set mixed precision config + if args.mixed_precision == 1: + tf.config.optimizer.set_experimental_options({"auto_mixed_precision": True}) + + args.mixed_precision = bool(args.mixed_precision) + args.use_norm = bool(args.use_norm) + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # check arguments + if args.train_dir is None: + raise ValueError("Please specify --train-dir") + if args.dev_dir is None: + raise ValueError("Please specify --valid-dir") + + # load and save config + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + config["version"] = tensorflow_tts.__version__ + with open(os.path.join(args.outdir, "config.yml"), "w") as f: + yaml.dump(config, f, Dumper=yaml.Dumper) + for key, value in config.items(): + logging.info(f"{key} = {value}") + + # get dataset + if config["remove_short_samples"]: + mel_length_threshold = config["mel_length_threshold"] + else: + mel_length_threshold = None + + if config["format"] == "npy": + charactor_query = "*-ids.npy" + mel_query = "*-raw-feats.npy" if args.use_norm is False else "*-norm-feats.npy" + duration_query = "*-durations.npy" + charactor_load_fn = np.load + mel_load_fn = np.load + duration_load_fn = np.load + else: + raise ValueError("Only npy are supported.") + + # define train/valid dataset + train_dataset = CharactorDurationMelDataset( + root_dir=args.train_dir, + charactor_query=charactor_query, + mel_query=mel_query, + duration_query=duration_query, + charactor_load_fn=charactor_load_fn, + mel_load_fn=mel_load_fn, + duration_load_fn=duration_load_fn, + mel_length_threshold=mel_length_threshold, + ).create( + is_shuffle=config["is_shuffle"], + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] + * STRATEGY.num_replicas_in_sync + * config["gradient_accumulation_steps"], + ) + + valid_dataset = CharactorDurationMelDataset( + root_dir=args.dev_dir, + charactor_query=charactor_query, + mel_query=mel_query, + duration_query=duration_query, + charactor_load_fn=charactor_load_fn, + mel_load_fn=mel_load_fn, + duration_load_fn=duration_load_fn, + ).create( + is_shuffle=config["is_shuffle"], + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] * STRATEGY.num_replicas_in_sync, + ) + + # define trainer + trainer = FastSpeechTrainer( + config=config, + strategy=STRATEGY, + steps=0, + epochs=0, + is_mixed_precision=args.mixed_precision, + ) + + with STRATEGY.scope(): + # define model + fastspeech = TFFastSpeech( + config=FASTSPEECH_CONFIG.FastSpeechConfig(**config["fastspeech_params"]) + ) + fastspeech._build() + fastspeech.summary() + + if len(args.pretrained) > 1: + fastspeech.load_weights(args.pretrained, by_name=True, skip_mismatch=True) + logging.info( + f"Successfully loaded pretrained weight from {args.pretrained}." + ) + + # AdamW for fastspeech + learning_rate_fn = tf.keras.optimizers.schedules.PolynomialDecay( + initial_learning_rate=config["optimizer_params"]["initial_learning_rate"], + decay_steps=config["optimizer_params"]["decay_steps"], + end_learning_rate=config["optimizer_params"]["end_learning_rate"], + ) + + learning_rate_fn = WarmUp( + initial_learning_rate=config["optimizer_params"]["initial_learning_rate"], + decay_schedule_fn=learning_rate_fn, + warmup_steps=int( + config["train_max_steps"] + * config["optimizer_params"]["warmup_proportion"] + ), + ) + + optimizer = AdamWeightDecay( + learning_rate=learning_rate_fn, + weight_decay_rate=config["optimizer_params"]["weight_decay"], + beta_1=0.9, + beta_2=0.98, + epsilon=1e-6, + exclude_from_weight_decay=["LayerNorm", "layer_norm", "bias"], + ) + + _ = optimizer.iterations + + # compile trainer + trainer.compile(model=fastspeech, optimizer=optimizer) + + # start training + try: + trainer.fit( + train_dataset, + valid_dataset, + saved_path=os.path.join(config["outdir"], "checkpoints/"), + resume=args.resume, + ) + except KeyboardInterrupt: + trainer.save_checkpoint() + logging.info(f"Successfully saved checkpoint @ {trainer.steps}steps.") + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/fastspeech2/README.md b/TensorFlowTTS/examples/fastspeech2/README.md new file mode 100644 index 0000000000000000000000000000000000000000..6b5105b2e6f3c4333b606a364603fc43b4992d37 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2/README.md @@ -0,0 +1,70 @@ +# FastSpeech 2: Fast and High-Quality End-to-End Text to Speech +Based on the script [`train_fastspeech2.py`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/fastspeech2/train_fastspeech2.py). + +## Training FastSpeech2 from scratch with LJSpeech dataset. +This example code show you how to train FastSpeech from scratch with Tensorflow 2 based on custom training loop and tf.function. The data used for this example is LJSpeech, you can download the dataset at [link](https://keithito.com/LJ-Speech-Dataset/). + +### Step 1: Create Tensorflow based Dataloader (tf.dataset) +First, you need define data loader based on AbstractDataset class (see [`abstract_dataset.py`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/tensorflow_tts/datasets/abstract_dataset.py)). On this example, a dataloader read dataset from path. I use suffix to classify what file is a charactor, duration and mel-spectrogram (see [`fastspeech2_dataset.py`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/fastspeech2/fastspeech2_dataset.py)). If you already have preprocessed version of your target dataset, you don't need to use this example dataloader, you just need refer my dataloader and modify **generator function** to adapt with your case. Normally, a generator function should return [charactor_ids, duration, f0, energy, mel]. Pls see tacotron2-example to know how to extract durations [Extract Duration](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/tacotron2#step-4-extract-duration-from-alignments-for-fastspeech) + +### Step 2: Training from scratch +After you redefine your dataloader, pls modify an input arguments, train_dataset and valid_dataset from [`train_fastspeech2.py`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/fastspeech2/train_fastspeech2.py). Here is an example command line to training fastspeech2 from scratch: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/fastspeech2/train_fastspeech2.py \ + --train-dir ./dump/train/ \ + --dev-dir ./dump/valid/ \ + --outdir ./examples/fastspeech2/exp/train.fastspeech2.v1/ \ + --config ./examples/fastspeech2/conf/fastspeech2.v1.yaml \ + --use-norm 1 \ + --f0-stat ./dump/stats_f0.npy \ + --energy-stat ./dump/stats_energy.npy \ + --mixed_precision 1 \ + --resume "" +``` + +IF you want to use MultiGPU to training you can replace `CUDA_VISIBLE_DEVICES=0` by `CUDA_VISIBLE_DEVICES=0,1,2,3` for example. You also need to tune the `batch_size` for each GPU (in config file) by yourself to maximize the performance. Note that MultiGPU now support for Training but not yet support for Decode. + +In case you want to resume the training progress, please following below example command line: + +```bash +--resume ./examples/fastspeech2/exp/train.fastspeech2.v1/checkpoints/ckpt-100000 +``` + +If you want to finetune a model, use `--pretrained` like this with your model filename +```bash +--pretrained pretrained.h5 +``` + +You can also define `var_train_expr` in config file to let model training only on some layers in case you want to fine-tune on your dataset with the same pretrained language and processor. For example, `var_train_expr: "embeddings|encoder|decoder"` means we just training all variables that `embeddings`, `encoder`, `decoder` exist in its name. + + +### Step 3: Decode mel-spectrogram from folder ids + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/fastspeech2/decode_fastspeech2.py \ + --rootdir ./dump/valid \ + --outdir ./predictions/fastspeech2.v1/ \ + --config ./examples/fastspeech2/conf/fastspeech2.v1.yaml \ + --checkpoint ./examples/fastspeech2/checkpoints/model-150000.h5 \ + --batch-size 8 +``` + +## What's difference ? + +* It's not ez for the model to learn predict f0/energy on mel level as paper did. Instead, i average f0/energy based on duration to get f0/energy on charactor level then sum it into encoder_hidden_state before pass though Length-Regulator. +* I apply mean/std normalization for both f0/energy. Note that before calculate mean and std values over all training set, i remove all outliers from f0 and energy. +* Instead using 256 bins for F0 and energy as FastSpeech2 paper, i let model learn to predict real f0/energy value then pass it though one layer Conv1D with kernel_size 9 to upsamples f0/energy scalar to vector as **[FastPitch](https://arxiv.org/abs/2006.06873)** paper suggest. +* There are other modifications to make it work, let read the code carefully to make sure you won't miss anything :D. + +## Pretrained Models and Audio samples +| Model | Conf | Lang | Fs [Hz] | Mel range [Hz] | FFT / Hop / Win [pt] | # iters | +| :------ | :---: | :---: | :----: | :--------: | :---------------: | :-----: | +| [fastspeech2.v1](https://drive.google.com/drive/folders/158vFyC2pxw9xKdxp-C5WPEtgtUiWZYE0?usp=sharing) | [link](https://github.com/TensorSpeech/TensorFlowTTS/blob/master/examples/fastspeech2/conf/fastspeech2.v1.yaml) | EN | 22.05k | 80-7600 | 1024 / 256 / None | 150k | +| [fastspeech2.kss.v1](https://drive.google.com/drive/folders/1DU952--jVnJ5SZDSINRs7dVVSpdB7tC_?usp=sharing) | [link](https://github.com/TensorSpeech/TensorFlowTTS/blob/master/examples/fastspeech2/conf/fastspeech2.kss.v1.yaml) | KO | 22.05k | 80-7600 | 1024 / 256 / None | 200k | +| [fastspeech2.kss.v2](https://drive.google.com/drive/folders/1G3-AJnEsu2rYXYgo2iGIVJfCqqfbpwMu?usp=sharing) | [link](https://github.com/TensorSpeech/TensorFlowTTS/blob/master/examples/fastspeech2/conf/fastspeech2.kss.v2.yaml) | KO | 22.05k | 80-7600 | 1024 / 256 / None | 200k | + +## Reference + +1. [FastSpeech 2: Fast and High-Quality End-to-End Text to Speech](https://arxiv.org/abs/2006.04558) +2. [FastPitch: Parallel Text-to-speech with Pitch Prediction](https://arxiv.org/abs/2006.06873) \ No newline at end of file diff --git a/TensorFlowTTS/examples/fastspeech2/conf/fastspeech2.baker.v2.yaml b/TensorFlowTTS/examples/fastspeech2/conf/fastspeech2.baker.v2.yaml new file mode 100644 index 0000000000000000000000000000000000000000..eb2d92a37fe80347a34c6ef70698e37c188e2346 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2/conf/fastspeech2.baker.v2.yaml @@ -0,0 +1,81 @@ +# This is the hyperparameter configuration file for FastSpeech2 v2. +# the different of v2 and v1 is that v2 apply linformer technique. +# Please make sure this is adjusted for the Baker dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 200k iters but a best checkpoint is around 150k iters. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +hop_size: 256 # Hop size. +format: "npy" + + +########################################################### +# NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "fastspeech2" + +fastspeech2_params: + dataset: baker + n_speakers: 1 + encoder_hidden_size: 256 + encoder_num_hidden_layers: 3 + encoder_num_attention_heads: 2 + encoder_attention_head_size: 16 # in v1, = 384//2 + encoder_intermediate_size: 1024 + encoder_intermediate_kernel_size: 3 + encoder_hidden_act: "mish" + decoder_hidden_size: 256 + decoder_num_hidden_layers: 3 + decoder_num_attention_heads: 2 + decoder_attention_head_size: 16 # in v1, = 384//2 + decoder_intermediate_size: 1024 + decoder_intermediate_kernel_size: 3 + decoder_hidden_act: "mish" + variant_prediction_num_conv_layers: 2 + variant_predictor_filter: 256 + variant_predictor_kernel_size: 3 + variant_predictor_dropout_rate: 0.5 + num_mels: 80 + hidden_dropout_prob: 0.2 + attention_probs_dropout_prob: 0.1 + max_position_embeddings: 2048 + initializer_range: 0.02 + output_attentions: False + output_hidden_states: False + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 16 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +mel_length_threshold: 32 # remove all targets has mel_length <= 32 +is_shuffle: true # shuffle dataset after each epoch. +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +optimizer_params: + initial_learning_rate: 0.001 + end_learning_rate: 0.00005 + decay_steps: 150000 # < train_max_steps is recommend. + warmup_proportion: 0.02 + weight_decay: 0.001 + +gradient_accumulation_steps: 1 +var_train_expr: null # trainable variable expr (eg. 'embeddings|encoder|decoder' ) + # must separate by |. if var_train_expr is null then we + # training all variable +########################################################### +# INTERVAL SETTING # +########################################################### +train_max_steps: 200000 # Number of training steps. +save_interval_steps: 5000 # Interval steps to save checkpoint. +eval_interval_steps: 500 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. +delay_f0_energy_steps: 3 # 2 steps use LR outputs only then 1 steps LR + F0 + Energy. +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of batch to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/fastspeech2/conf/fastspeech2.jsut.v1.yaml b/TensorFlowTTS/examples/fastspeech2/conf/fastspeech2.jsut.v1.yaml new file mode 100644 index 0000000000000000000000000000000000000000..afbdf10c4e277cb0b90e8b92a296f4d9a409c4d7 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2/conf/fastspeech2.jsut.v1.yaml @@ -0,0 +1,81 @@ +# This is the hyperparameter configuration file for FastSpeech2 v2. +# the different of v2 and v1 is that v2 apply linformer technique. +# Please make sure this is adjusted for the Baker dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 200k iters but a best checkpoint is around 150k iters. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +hop_size: 300 # Hop size. +format: "npy" + + +########################################################### +# NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "fastspeech2" + +fastspeech2_params: + dataset: jsut + n_speakers: 1 + encoder_hidden_size: 256 + encoder_num_hidden_layers: 3 + encoder_num_attention_heads: 2 + encoder_attention_head_size: 16 # in v1, = 384//2 + encoder_intermediate_size: 1024 + encoder_intermediate_kernel_size: 3 + encoder_hidden_act: "mish" + decoder_hidden_size: 256 + decoder_num_hidden_layers: 3 + decoder_num_attention_heads: 2 + decoder_attention_head_size: 16 # in v1, = 384//2 + decoder_intermediate_size: 1024 + decoder_intermediate_kernel_size: 3 + decoder_hidden_act: "mish" + variant_prediction_num_conv_layers: 2 + variant_predictor_filter: 256 + variant_predictor_kernel_size: 3 + variant_predictor_dropout_rate: 0.5 + num_mels: 80 + hidden_dropout_prob: 0.2 + attention_probs_dropout_prob: 0.1 + max_position_embeddings: 2048 + initializer_range: 0.02 + output_attentions: False + output_hidden_states: False + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 16 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +mel_length_threshold: 32 # remove all targets has mel_length <= 32 +is_shuffle: true # shuffle dataset after each epoch. +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +optimizer_params: + initial_learning_rate: 0.001 + end_learning_rate: 0.00005 + decay_steps: 150000 # < train_max_steps is recommend. + warmup_proportion: 0.02 + weight_decay: 0.001 + +gradient_accumulation_steps: 1 +var_train_expr: null # trainable variable expr (eg. 'embeddings|encoder|decoder' ) + # must separate by |. if var_train_expr is null then we + # training all variable +########################################################### +# INTERVAL SETTING # +########################################################### +train_max_steps: 200000 # Number of training steps. +save_interval_steps: 5000 # Interval steps to save checkpoint. +eval_interval_steps: 500 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. +delay_f0_energy_steps: 3 # 2 steps use LR outputs only then 1 steps LR + F0 + Energy. +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of batch to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/fastspeech2/conf/fastspeech2.kss.v1.yaml b/TensorFlowTTS/examples/fastspeech2/conf/fastspeech2.kss.v1.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f04676de1b20eb3eb3e814cffd69c1f92b40e634 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2/conf/fastspeech2.kss.v1.yaml @@ -0,0 +1,79 @@ +# This is the hyperparameter configuration file for FastSpeech2 v1. +# Please make sure this is adjusted for the KSS dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 200k iters but a best checkpoint is around 150k iters. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +hop_size: 256 # Hop size. +format: "npy" + + +########################################################### +# NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "fastspeech2" + +fastspeech2_params: + dataset: "kss" + n_speakers: 1 + encoder_hidden_size: 384 + encoder_num_hidden_layers: 4 + encoder_num_attention_heads: 2 + encoder_attention_head_size: 192 # hidden_size // num_attention_heads + encoder_intermediate_size: 1024 + encoder_intermediate_kernel_size: 3 + encoder_hidden_act: "mish" + decoder_hidden_size: 384 + decoder_num_hidden_layers: 4 + decoder_num_attention_heads: 2 + decoder_attention_head_size: 192 # hidden_size // num_attention_heads + decoder_intermediate_size: 1024 + decoder_intermediate_kernel_size: 3 + decoder_hidden_act: "mish" + variant_prediction_num_conv_layers: 2 + variant_predictor_filter: 256 + variant_predictor_kernel_size: 3 + variant_predictor_dropout_rate: 0.5 + num_mels: 80 + hidden_dropout_prob: 0.2 + attention_probs_dropout_prob: 0.1 + max_position_embeddings: 2048 + initializer_range: 0.02 + output_attentions: False + output_hidden_states: False + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 16 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +mel_length_threshold: 32 # remove all targets has mel_length <= 32 +is_shuffle: true # shuffle dataset after each epoch. +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +optimizer_params: + initial_learning_rate: 0.001 + end_learning_rate: 0.00005 + decay_steps: 150000 # < train_max_steps is recommend. + warmup_proportion: 0.02 + weight_decay: 0.001 + +gradient_accumulation_steps: 1 +var_train_expr: null # trainable variable expr (eg. 'embeddings|encoder|decoder' ) + # must separate by |. if var_train_expr is null then we + # training all variable +########################################################### +# INTERVAL SETTING # +########################################################### +train_max_steps: 200000 # Number of training steps. +save_interval_steps: 5000 # Interval steps to save checkpoint. +eval_interval_steps: 500 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of batch to be saved as intermediate results. \ No newline at end of file diff --git a/TensorFlowTTS/examples/fastspeech2/conf/fastspeech2.kss.v2.yaml b/TensorFlowTTS/examples/fastspeech2/conf/fastspeech2.kss.v2.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0391bebbdf6673bd9cc2ed5ce6b6a061c25c0b17 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2/conf/fastspeech2.kss.v2.yaml @@ -0,0 +1,81 @@ +# This is the hyperparameter configuration file for FastSpeech2 v2. +# the different of v2 and v1 is that v2 apply linformer technique. +# Please make sure this is adjusted for the KSS dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 200k iters but a best checkpoint is around 150k iters. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +hop_size: 256 # Hop size. +format: "npy" + + +########################################################### +# NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "fastspeech2" + +fastspeech2_params: + dataset: "kss" + n_speakers: 1 + encoder_hidden_size: 256 + encoder_num_hidden_layers: 3 + encoder_num_attention_heads: 2 + encoder_attention_head_size: 16 # in v1, = 384//2 + encoder_intermediate_size: 1024 + encoder_intermediate_kernel_size: 3 + encoder_hidden_act: "mish" + decoder_hidden_size: 256 + decoder_num_hidden_layers: 3 + decoder_num_attention_heads: 2 + decoder_attention_head_size: 16 # in v1, = 384//2 + decoder_intermediate_size: 1024 + decoder_intermediate_kernel_size: 3 + decoder_hidden_act: "mish" + variant_prediction_num_conv_layers: 2 + variant_predictor_filter: 256 + variant_predictor_kernel_size: 3 + variant_predictor_dropout_rate: 0.5 + num_mels: 80 + hidden_dropout_prob: 0.2 + attention_probs_dropout_prob: 0.1 + max_position_embeddings: 2048 + initializer_range: 0.02 + output_attentions: False + output_hidden_states: False + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 16 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +mel_length_threshold: 32 # remove all targets has mel_length <= 32 +is_shuffle: true # shuffle dataset after each epoch. +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +optimizer_params: + initial_learning_rate: 0.001 + end_learning_rate: 0.00005 + decay_steps: 150000 # < train_max_steps is recommend. + warmup_proportion: 0.02 + weight_decay: 0.001 + +gradient_accumulation_steps: 1 +var_train_expr: null # trainable variable expr (eg. 'embeddings|encoder|decoder' ) + # must separate by |. if var_train_expr is null then we + # training all variable +########################################################### +# INTERVAL SETTING # +########################################################### +train_max_steps: 200000 # Number of training steps. +save_interval_steps: 5000 # Interval steps to save checkpoint. +eval_interval_steps: 500 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. +delay_f0_energy_steps: 3 # 2 steps use LR outputs only then 1 steps LR + F0 + Energy. +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of batch to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/fastspeech2/conf/fastspeech2.v1.yaml b/TensorFlowTTS/examples/fastspeech2/conf/fastspeech2.v1.yaml new file mode 100644 index 0000000000000000000000000000000000000000..76bf57ca04bd4a470c8fa407081c783bc96fb52a --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2/conf/fastspeech2.v1.yaml @@ -0,0 +1,78 @@ +# This is the hyperparameter configuration file for FastSpeech2 v1. +# Please make sure this is adjusted for the LJSpeech dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 200k iters but a best checkpoint is around 150k iters. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +hop_size: 256 # Hop size. +format: "npy" + + +########################################################### +# NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "fastspeech2" + +fastspeech2_params: + n_speakers: 1 + encoder_hidden_size: 384 + encoder_num_hidden_layers: 4 + encoder_num_attention_heads: 2 + encoder_attention_head_size: 192 # hidden_size // num_attention_heads + encoder_intermediate_size: 1024 + encoder_intermediate_kernel_size: 3 + encoder_hidden_act: "mish" + decoder_hidden_size: 384 + decoder_num_hidden_layers: 4 + decoder_num_attention_heads: 2 + decoder_attention_head_size: 192 # hidden_size // num_attention_heads + decoder_intermediate_size: 1024 + decoder_intermediate_kernel_size: 3 + decoder_hidden_act: "mish" + variant_prediction_num_conv_layers: 2 + variant_predictor_filter: 256 + variant_predictor_kernel_size: 3 + variant_predictor_dropout_rate: 0.5 + num_mels: 80 + hidden_dropout_prob: 0.2 + attention_probs_dropout_prob: 0.1 + max_position_embeddings: 2048 + initializer_range: 0.02 + output_attentions: False + output_hidden_states: False + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 16 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +mel_length_threshold: 32 # remove all targets has mel_length <= 32 +is_shuffle: true # shuffle dataset after each epoch. +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +optimizer_params: + initial_learning_rate: 0.001 + end_learning_rate: 0.00005 + decay_steps: 150000 # < train_max_steps is recommend. + warmup_proportion: 0.02 + weight_decay: 0.001 + +gradient_accumulation_steps: 1 +var_train_expr: null # trainable variable expr (eg. 'embeddings|encoder|decoder' ) + # must separate by |. if var_train_expr is null then we + # training all variable +########################################################### +# INTERVAL SETTING # +########################################################### +train_max_steps: 200000 # Number of training steps. +save_interval_steps: 5000 # Interval steps to save checkpoint. +eval_interval_steps: 500 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of batch to be saved as intermediate results. \ No newline at end of file diff --git a/TensorFlowTTS/examples/fastspeech2/conf/fastspeech2.v2.yaml b/TensorFlowTTS/examples/fastspeech2/conf/fastspeech2.v2.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3e373c80cae89820b08134185dd58c19d23e0722 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2/conf/fastspeech2.v2.yaml @@ -0,0 +1,80 @@ +# This is the hyperparameter configuration file for FastSpeech2 v2. +# the different of v2 and v1 is that v2 apply linformer technique. +# Please make sure this is adjusted for the LJSpeech dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 200k iters but a best checkpoint is around 150k iters. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +hop_size: 256 # Hop size. +format: "npy" + + +########################################################### +# NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "fastspeech2" + +fastspeech2_params: + n_speakers: 1 + encoder_hidden_size: 256 + encoder_num_hidden_layers: 3 + encoder_num_attention_heads: 2 + encoder_attention_head_size: 16 # in v1, = 384//2 + encoder_intermediate_size: 1024 + encoder_intermediate_kernel_size: 3 + encoder_hidden_act: "mish" + decoder_hidden_size: 256 + decoder_num_hidden_layers: 3 + decoder_num_attention_heads: 2 + decoder_attention_head_size: 16 # in v1, = 384//2 + decoder_intermediate_size: 1024 + decoder_intermediate_kernel_size: 3 + decoder_hidden_act: "mish" + variant_prediction_num_conv_layers: 2 + variant_predictor_filter: 256 + variant_predictor_kernel_size: 3 + variant_predictor_dropout_rate: 0.5 + num_mels: 80 + hidden_dropout_prob: 0.2 + attention_probs_dropout_prob: 0.1 + max_position_embeddings: 2048 + initializer_range: 0.02 + output_attentions: False + output_hidden_states: False + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 16 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1 +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +mel_length_threshold: 32 # remove all targets has mel_length <= 32 +is_shuffle: true # shuffle dataset after each epoch. +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +optimizer_params: + initial_learning_rate: 0.001 + end_learning_rate: 0.00005 + decay_steps: 150000 # < train_max_steps is recommend. + warmup_proportion: 0.02 + weight_decay: 0.001 + +gradient_accumulation_steps: 1 +var_train_expr: null # trainable variable expr (eg. 'embeddings|encoder|decoder' ) + # must separate by |. if var_train_expr is null then we + # training all variable +########################################################### +# INTERVAL SETTING # +########################################################### +train_max_steps: 200000 # Number of training steps. +save_interval_steps: 5000 # Interval steps to save checkpoint. +eval_interval_steps: 500 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. +delay_f0_energy_steps: 3 # 2 steps use LR outputs only then 1 steps LR + F0 + Energy. +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of batch to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/fastspeech2/decode_fastspeech2.py b/TensorFlowTTS/examples/fastspeech2/decode_fastspeech2.py new file mode 100644 index 0000000000000000000000000000000000000000..6c9fe4a7872273d16f6a7430af39b17036b23e32 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2/decode_fastspeech2.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Decode trained FastSpeech from folders.""" + +import argparse +import logging +import os +import sys + +sys.path.append(".") + +import numpy as np +import tensorflow as tf +import yaml +from tqdm import tqdm + +from examples.fastspeech.fastspeech_dataset import CharactorDataset +from tensorflow_tts.configs import FastSpeech2Config +from tensorflow_tts.models import TFFastSpeech2 + + +def main(): + """Run fastspeech2 decoding from folder.""" + parser = argparse.ArgumentParser( + description="Decode soft-mel features from charactor with trained FastSpeech " + "(See detail in examples/fastspeech2/decode_fastspeech2.py)." + ) + parser.add_argument( + "--rootdir", + default=None, + type=str, + required=True, + help="directory including ids/durations files.", + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save generated speech." + ) + parser.add_argument( + "--checkpoint", type=str, required=True, help="checkpoint file to be loaded." + ) + parser.add_argument( + "--config", + default=None, + type=str, + required=True, + help="yaml format configuration file. if not explicitly provided, " + "it will be searched in the checkpoint directory. (default=None)", + ) + parser.add_argument( + "--batch-size", + default=8, + type=int, + required=False, + help="Batch size for inference.", + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + args = parser.parse_args() + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # load config + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + + if config["format"] == "npy": + char_query = "*-ids.npy" + char_load_fn = np.load + else: + raise ValueError("Only npy is supported.") + + # define data-loader + dataset = CharactorDataset( + root_dir=args.rootdir, + charactor_query=char_query, + charactor_load_fn=char_load_fn, + ) + dataset = dataset.create(batch_size=args.batch_size) + + # define model and load checkpoint + fastspeech2 = TFFastSpeech2( + config=FastSpeech2Config(**config["fastspeech2_params"]), name="fastspeech2" + ) + fastspeech2._build() + fastspeech2.load_weights(args.checkpoint) + + for data in tqdm(dataset, desc="Decoding"): + utt_ids = data["utt_ids"] + char_ids = data["input_ids"] + + # fastspeech inference. + ( + masked_mel_before, + masked_mel_after, + duration_outputs, + _, + _, + ) = fastspeech2.inference( + char_ids, + speaker_ids=tf.zeros(shape=[tf.shape(char_ids)[0]], dtype=tf.int32), + speed_ratios=tf.ones(shape=[tf.shape(char_ids)[0]], dtype=tf.float32), + f0_ratios=tf.ones(shape=[tf.shape(char_ids)[0]], dtype=tf.float32), + energy_ratios=tf.ones(shape=[tf.shape(char_ids)[0]], dtype=tf.float32), + ) + + # convert to numpy + masked_mel_befores = masked_mel_before.numpy() + masked_mel_afters = masked_mel_after.numpy() + + for (utt_id, mel_before, mel_after, durations) in zip( + utt_ids, masked_mel_befores, masked_mel_afters, duration_outputs + ): + # real len of mel predicted + real_length = durations.numpy().sum() + utt_id = utt_id.numpy().decode("utf-8") + # save to folder. + np.save( + os.path.join(args.outdir, f"{utt_id}-fs-before-feats.npy"), + mel_before[:real_length, :].astype(np.float32), + allow_pickle=False, + ) + np.save( + os.path.join(args.outdir, f"{utt_id}-fs-after-feats.npy"), + mel_after[:real_length, :].astype(np.float32), + allow_pickle=False, + ) + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/fastspeech2/extractfs_postnets.py b/TensorFlowTTS/examples/fastspeech2/extractfs_postnets.py new file mode 100644 index 0000000000000000000000000000000000000000..0f8d7c9ac8516388ab54aa7acae40f1484a3e080 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2/extractfs_postnets.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Decode trained FastSpeech from folders.""" + +import argparse +import logging +import os +import sys + +sys.path.append(".") + +import numpy as np +import tensorflow as tf +import yaml +from tqdm import tqdm + +from examples.fastspeech2.fastspeech2_dataset import CharactorDurationF0EnergyMelDataset +from tensorflow_tts.configs import FastSpeech2Config +from tensorflow_tts.models import TFFastSpeech2 + + +def main(): + """Run fastspeech2 decoding from folder.""" + parser = argparse.ArgumentParser( + description="Decode soft-mel features from charactor with trained FastSpeech " + "(See detail in examples/fastspeech2/decode_fastspeech2.py)." + ) + parser.add_argument( + "--rootdir", + default=None, + type=str, + required=True, + help="directory including ids/durations files.", + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save generated speech." + ) + parser.add_argument( + "--checkpoint", type=str, required=True, help="checkpoint file to be loaded." + ) + parser.add_argument( + "--config", + default=None, + type=str, + required=True, + help="yaml format configuration file. if not explicitly provided, " + "it will be searched in the checkpoint directory. (default=None)", + ) + parser.add_argument( + "--batch-size", + default=8, + type=int, + required=False, + help="Batch size for inference.", + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + args = parser.parse_args() + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # load config + + outdpost = os.path.join(args.outdir, "postnets") + + if not os.path.exists(outdpost): + os.makedirs(outdpost) + + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + + if config["format"] == "npy": + char_query = "*-ids.npy" + char_load_fn = np.load + else: + raise ValueError("Only npy is supported.") + + # define data-loader + dataset = CharactorDurationF0EnergyMelDataset( + root_dir=args.rootdir, + charactor_query=char_query, + charactor_load_fn=char_load_fn, + ) + dataset = dataset.create( + batch_size=1 + ) # force batch size to 1 otherwise it may miss certain files + + # define model and load checkpoint + fastspeech2 = TFFastSpeech2( + config=FastSpeech2Config(**config["fastspeech2_params"]), name="fastspeech2" + ) + fastspeech2._build() + fastspeech2.load_weights(args.checkpoint) + fastspeech2 = tf.function(fastspeech2, experimental_relax_shapes=True) + + for data in tqdm(dataset, desc="Decoding"): + utt_ids = data["utt_ids"] + char_ids = data["input_ids"] + mel_lens = data["mel_lengths"] + + # fastspeech inference. + masked_mel_before, masked_mel_after, duration_outputs, _, _ = fastspeech2( + **data, training=True + ) + + # convert to numpy + masked_mel_befores = masked_mel_before.numpy() + masked_mel_afters = masked_mel_after.numpy() + + for (utt_id, mel_before, mel_after, durations, mel_len) in zip( + utt_ids, masked_mel_befores, masked_mel_afters, duration_outputs, mel_lens + ): + # real len of mel predicted + real_length = np.around(durations.numpy().sum()).astype(int) + utt_id = utt_id.numpy().decode("utf-8") + + np.save( + os.path.join(outdpost, f"{utt_id}-postnet.npy"), + mel_after[:mel_len, :].astype(np.float32), + allow_pickle=False, + ) + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/fastspeech2/fastspeech2_dataset.py b/TensorFlowTTS/examples/fastspeech2/fastspeech2_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..12749762528101b8f4be795e73e48875a61fe1a0 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2/fastspeech2_dataset.py @@ -0,0 +1,251 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Dataset modules.""" + +import itertools +import logging +import os +import random + +import numpy as np +import tensorflow as tf + +from tensorflow_tts.datasets.abstract_dataset import AbstractDataset +from tensorflow_tts.utils import find_files + + +def average_by_duration(x, durs): + mel_len = durs.sum() + durs_cum = np.cumsum(np.pad(durs, (1, 0))) + + # calculate charactor f0/energy + x_char = np.zeros((durs.shape[0],), dtype=np.float32) + for idx, start, end in zip(range(mel_len), durs_cum[:-1], durs_cum[1:]): + values = x[start:end][np.where(x[start:end] != 0.0)[0]] + x_char[idx] = np.mean(values) if len(values) > 0 else 0.0 # np.mean([]) = nan. + + return x_char.astype(np.float32) + + +def tf_average_by_duration(x, durs): + outs = tf.numpy_function(average_by_duration, [x, durs], tf.float32) + return outs + + +class CharactorDurationF0EnergyMelDataset(AbstractDataset): + """Tensorflow Charactor Duration F0 Energy Mel dataset.""" + + def __init__( + self, + root_dir, + charactor_query="*-ids.npy", + mel_query="*-norm-feats.npy", + duration_query="*-durations.npy", + f0_query="*-raw-f0.npy", + energy_query="*-raw-energy.npy", + f0_stat="./dump/stats_f0.npy", + energy_stat="./dump/stats_energy.npy", + charactor_load_fn=np.load, + mel_load_fn=np.load, + duration_load_fn=np.load, + f0_load_fn=np.load, + energy_load_fn=np.load, + mel_length_threshold=0, + ): + """Initialize dataset. + + Args: + root_dir (str): Root directory including dumped files. + charactor_query (str): Query to find charactor files in root_dir. + mel_query (str): Query to find feature files in root_dir. + duration_query (str): Query to find duration files in root_dir. + f0_query (str): Query to find f0 files in root_dir. + energy_query (str): Query to find energy files in root_dir. + f0_stat (str): str path of f0_stat. + energy_stat (str): str path of energy_stat. + charactor_load_fn (func): Function to load charactor file. + mel_load_fn (func): Function to load feature file. + duration_load_fn (func): Function to load duration file. + f0_load_fn (func): Function to load f0 file. + energy_load_fn (func): Function to load energy file. + mel_length_threshold (int): Threshold to remove short feature files. + + """ + # find all of charactor and mel files. + charactor_files = sorted(find_files(root_dir, charactor_query)) + mel_files = sorted(find_files(root_dir, mel_query)) + duration_files = sorted(find_files(root_dir, duration_query)) + f0_files = sorted(find_files(root_dir, f0_query)) + energy_files = sorted(find_files(root_dir, energy_query)) + + # assert the number of files + assert len(mel_files) != 0, f"Not found any mels files in ${root_dir}." + assert ( + len(mel_files) + == len(charactor_files) + == len(duration_files) + == len(f0_files) + == len(energy_files) + ), f"Number of charactor, mel, duration, f0 and energy files are different" + + if ".npy" in charactor_query: + suffix = charactor_query[1:] + utt_ids = [os.path.basename(f).replace(suffix, "") for f in charactor_files] + + # set global params + self.utt_ids = utt_ids + self.mel_files = mel_files + self.charactor_files = charactor_files + self.duration_files = duration_files + self.f0_files = f0_files + self.energy_files = energy_files + self.mel_load_fn = mel_load_fn + self.charactor_load_fn = charactor_load_fn + self.duration_load_fn = duration_load_fn + self.f0_load_fn = f0_load_fn + self.energy_load_fn = energy_load_fn + self.mel_length_threshold = mel_length_threshold + + self.f0_stat = np.load(f0_stat) + self.energy_stat = np.load(energy_stat) + + def get_args(self): + return [self.utt_ids] + + def _norm_mean_std(self, x, mean, std): + zero_idxs = np.where(x == 0.0)[0] + x = (x - mean) / std + x[zero_idxs] = 0.0 + return x + + def _norm_mean_std_tf(self, x, mean, std): + x = tf.numpy_function(self._norm_mean_std, [x, mean, std], tf.float32) + return x + + def generator(self, utt_ids): + for i, utt_id in enumerate(utt_ids): + mel_file = self.mel_files[i] + charactor_file = self.charactor_files[i] + duration_file = self.duration_files[i] + f0_file = self.f0_files[i] + energy_file = self.energy_files[i] + + items = { + "utt_ids": utt_id, + "mel_files": mel_file, + "charactor_files": charactor_file, + "duration_files": duration_file, + "f0_files": f0_file, + "energy_files": energy_file, + } + + yield items + + @tf.function + def _load_data(self, items): + mel = tf.numpy_function(np.load, [items["mel_files"]], tf.float32) + charactor = tf.numpy_function(np.load, [items["charactor_files"]], tf.int32) + duration = tf.numpy_function(np.load, [items["duration_files"]], tf.int32) + f0 = tf.numpy_function(np.load, [items["f0_files"]], tf.float32) + energy = tf.numpy_function(np.load, [items["energy_files"]], tf.float32) + + f0 = self._norm_mean_std_tf(f0, self.f0_stat[0], self.f0_stat[1]) + energy = self._norm_mean_std_tf( + energy, self.energy_stat[0], self.energy_stat[1] + ) + + # calculate charactor f0/energy + f0 = tf_average_by_duration(f0, duration) + energy = tf_average_by_duration(energy, duration) + + items = { + "utt_ids": items["utt_ids"], + "input_ids": charactor, + "speaker_ids": 0, + "duration_gts": duration, + "f0_gts": f0, + "energy_gts": energy, + "mel_gts": mel, + "mel_lengths": len(mel), + } + + return items + + def create( + self, + allow_cache=False, + batch_size=1, + is_shuffle=False, + map_fn=None, + reshuffle_each_iteration=True, + ): + """Create tf.dataset function.""" + output_types = self.get_output_dtypes() + datasets = tf.data.Dataset.from_generator( + self.generator, output_types=output_types, args=(self.get_args()) + ) + + # load data + datasets = datasets.map( + lambda items: self._load_data(items), tf.data.experimental.AUTOTUNE + ) + + datasets = datasets.filter( + lambda x: x["mel_lengths"] > self.mel_length_threshold + ) + + if allow_cache: + datasets = datasets.cache() + + if is_shuffle: + datasets = datasets.shuffle( + self.get_len_dataset(), + reshuffle_each_iteration=reshuffle_each_iteration, + ) + + # define padded shapes + padded_shapes = { + "utt_ids": [], + "input_ids": [None], + "speaker_ids": [], + "duration_gts": [None], + "f0_gts": [None], + "energy_gts": [None], + "mel_gts": [None, None], + "mel_lengths": [], + } + + datasets = datasets.padded_batch( + batch_size, padded_shapes=padded_shapes, drop_remainder=True + ) + datasets = datasets.prefetch(tf.data.experimental.AUTOTUNE) + return datasets + + def get_output_dtypes(self): + output_types = { + "utt_ids": tf.string, + "mel_files": tf.string, + "charactor_files": tf.string, + "duration_files": tf.string, + "f0_files": tf.string, + "energy_files": tf.string, + } + return output_types + + def get_len_dataset(self): + return len(self.utt_ids) + + def __name__(self): + return "CharactorDurationF0EnergyMelDataset" diff --git a/TensorFlowTTS/examples/fastspeech2/train_fastspeech2.py b/TensorFlowTTS/examples/fastspeech2/train_fastspeech2.py new file mode 100644 index 0000000000000000000000000000000000000000..8d3c9144a930e7bbd4e2154915fec8f240771470 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2/train_fastspeech2.py @@ -0,0 +1,417 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Train FastSpeech2.""" + +import tensorflow as tf + +physical_devices = tf.config.list_physical_devices("GPU") +for i in range(len(physical_devices)): + tf.config.experimental.set_memory_growth(physical_devices[i], True) + +import sys + +sys.path.append(".") + +import argparse +import logging +import os + +import numpy as np +import yaml +from tqdm import tqdm + +import tensorflow_tts +from examples.fastspeech2.fastspeech2_dataset import CharactorDurationF0EnergyMelDataset +from examples.fastspeech.train_fastspeech import FastSpeechTrainer +from tensorflow_tts.configs import FastSpeech2Config +from tensorflow_tts.models import TFFastSpeech2 +from tensorflow_tts.optimizers import AdamWeightDecay, WarmUp +from tensorflow_tts.trainers import Seq2SeqBasedTrainer +from tensorflow_tts.utils import calculate_2d_loss, calculate_3d_loss, return_strategy + + +class FastSpeech2Trainer(Seq2SeqBasedTrainer): + """FastSpeech2 Trainer class based on FastSpeechTrainer.""" + + def __init__( + self, config, strategy, steps=0, epochs=0, is_mixed_precision=False, + ): + """Initialize trainer. + Args: + steps (int): Initial global steps. + epochs (int): Initial global epochs. + config (dict): Config dict loaded from yaml format configuration file. + is_mixed_precision (bool): Use mixed precision or not. + """ + super(FastSpeech2Trainer, self).__init__( + steps=steps, + epochs=epochs, + config=config, + strategy=strategy, + is_mixed_precision=is_mixed_precision, + ) + # define metrics to aggregates data and use tf.summary logs them + self.list_metrics_name = [ + "duration_loss", + "f0_loss", + "energy_loss", + "mel_loss_before", + "mel_loss_after", + ] + self.init_train_eval_metrics(self.list_metrics_name) + self.reset_states_train() + self.reset_states_eval() + + def compile(self, model, optimizer): + super().compile(model, optimizer) + self.mse = tf.keras.losses.MeanSquaredError( + reduction=tf.keras.losses.Reduction.NONE + ) + self.mae = tf.keras.losses.MeanAbsoluteError( + reduction=tf.keras.losses.Reduction.NONE + ) + + def compute_per_example_losses(self, batch, outputs): + """Compute per example losses and return dict_metrics_losses + Note that all element of the loss MUST has a shape [batch_size] and + the keys of dict_metrics_losses MUST be in self.list_metrics_name. + + Args: + batch: dictionary batch input return from dataloader + outputs: outputs of the model + + Returns: + per_example_losses: per example losses for each GPU, shape [B] + dict_metrics_losses: dictionary loss. + """ + mel_before, mel_after, duration_outputs, f0_outputs, energy_outputs = outputs + + log_duration = tf.math.log( + tf.cast(tf.math.add(batch["duration_gts"], 1), tf.float32) + ) + duration_loss = calculate_2d_loss(log_duration, duration_outputs, self.mse) + f0_loss = calculate_2d_loss(batch["f0_gts"], f0_outputs, self.mse) + energy_loss = calculate_2d_loss(batch["energy_gts"], energy_outputs, self.mse) + mel_loss_before = calculate_3d_loss(batch["mel_gts"], mel_before, self.mae) + mel_loss_after = calculate_3d_loss(batch["mel_gts"], mel_after, self.mae) + + per_example_losses = ( + duration_loss + f0_loss + energy_loss + mel_loss_before + mel_loss_after + ) + + dict_metrics_losses = { + "duration_loss": duration_loss, + "f0_loss": f0_loss, + "energy_loss": energy_loss, + "mel_loss_before": mel_loss_before, + "mel_loss_after": mel_loss_after, + } + + return per_example_losses, dict_metrics_losses + + def generate_and_save_intermediate_result(self, batch): + """Generate and save intermediate result.""" + import matplotlib.pyplot as plt + + # predict with tf.function. + outputs = self.one_step_predict(batch) + + mels_before, mels_after, *_ = outputs + mel_gts = batch["mel_gts"] + utt_ids = batch["utt_ids"] + + # convert to tensor. + # here we just take a sample at first replica. + try: + mels_before = mels_before.values[0].numpy() + mels_after = mels_after.values[0].numpy() + mel_gts = mel_gts.values[0].numpy() + utt_ids = utt_ids.values[0].numpy() + except Exception: + mels_before = mels_before.numpy() + mels_after = mels_after.numpy() + mel_gts = mel_gts.numpy() + utt_ids = utt_ids.numpy() + + # check directory + dirname = os.path.join(self.config["outdir"], f"predictions/{self.steps}steps") + if not os.path.exists(dirname): + os.makedirs(dirname) + + for idx, (mel_gt, mel_before, mel_after) in enumerate( + zip(mel_gts, mels_before, mels_after), 0 + ): + mel_gt = tf.reshape(mel_gt, (-1, 80)).numpy() # [length, 80] + mel_before = tf.reshape(mel_before, (-1, 80)).numpy() # [length, 80] + mel_after = tf.reshape(mel_after, (-1, 80)).numpy() # [length, 80] + + # plit figure and save it + utt_id = utt_ids[idx] + figname = os.path.join(dirname, f"{utt_id}.png") + fig = plt.figure(figsize=(10, 8)) + ax1 = fig.add_subplot(311) + ax2 = fig.add_subplot(312) + ax3 = fig.add_subplot(313) + im = ax1.imshow(np.rot90(mel_gt), aspect="auto", interpolation="none") + ax1.set_title("Target Mel-Spectrogram") + fig.colorbar(mappable=im, shrink=0.65, orientation="horizontal", ax=ax1) + ax2.set_title("Predicted Mel-before-Spectrogram") + im = ax2.imshow(np.rot90(mel_before), aspect="auto", interpolation="none") + fig.colorbar(mappable=im, shrink=0.65, orientation="horizontal", ax=ax2) + ax3.set_title("Predicted Mel-after-Spectrogram") + im = ax3.imshow(np.rot90(mel_after), aspect="auto", interpolation="none") + fig.colorbar(mappable=im, shrink=0.65, orientation="horizontal", ax=ax3) + plt.tight_layout() + plt.savefig(figname) + plt.close() + + +def main(): + """Run training process.""" + parser = argparse.ArgumentParser( + description="Train FastSpeech (See detail in tensorflow_tts/bin/train-fastspeech.py)" + ) + parser.add_argument( + "--train-dir", + default=None, + type=str, + help="directory including training data. ", + ) + parser.add_argument( + "--dev-dir", + default=None, + type=str, + help="directory including development data. ", + ) + parser.add_argument( + "--use-norm", default=1, type=int, help="usr norm-mels for train or raw." + ) + parser.add_argument( + "--f0-stat", + default="./dump/stats_f0.npy", + type=str, + required=True, + help="f0-stat path.", + ) + parser.add_argument( + "--energy-stat", + default="./dump/stats_energy.npy", + type=str, + required=True, + help="energy-stat path.", + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save checkpoints." + ) + parser.add_argument( + "--config", type=str, required=True, help="yaml format configuration file." + ) + parser.add_argument( + "--resume", + default="", + type=str, + nargs="?", + help='checkpoint file path to resume training. (default="")', + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + parser.add_argument( + "--mixed_precision", + default=0, + type=int, + help="using mixed precision for generator or not.", + ) + parser.add_argument( + "--pretrained", + default="", + type=str, + nargs="?", + help="pretrained weights .h5 file to load weights from. Auto-skips non-matching layers", + ) + + args = parser.parse_args() + + # return strategy + STRATEGY = return_strategy() + + # set mixed precision config + if args.mixed_precision == 1: + tf.config.optimizer.set_experimental_options({"auto_mixed_precision": True}) + + args.mixed_precision = bool(args.mixed_precision) + args.use_norm = bool(args.use_norm) + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # check arguments + if args.train_dir is None: + raise ValueError("Please specify --train-dir") + if args.dev_dir is None: + raise ValueError("Please specify --valid-dir") + + # load and save config + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + config["version"] = tensorflow_tts.__version__ + with open(os.path.join(args.outdir, "config.yml"), "w") as f: + yaml.dump(config, f, Dumper=yaml.Dumper) + for key, value in config.items(): + logging.info(f"{key} = {value}") + + # get dataset + if config["remove_short_samples"]: + mel_length_threshold = config["mel_length_threshold"] + else: + mel_length_threshold = None + + if config["format"] == "npy": + charactor_query = "*-ids.npy" + mel_query = "*-raw-feats.npy" if args.use_norm is False else "*-norm-feats.npy" + duration_query = "*-durations.npy" + f0_query = "*-raw-f0.npy" + energy_query = "*-raw-energy.npy" + else: + raise ValueError("Only npy are supported.") + + # define train/valid dataset + train_dataset = CharactorDurationF0EnergyMelDataset( + root_dir=args.train_dir, + charactor_query=charactor_query, + mel_query=mel_query, + duration_query=duration_query, + f0_query=f0_query, + energy_query=energy_query, + f0_stat=args.f0_stat, + energy_stat=args.energy_stat, + mel_length_threshold=mel_length_threshold, + ).create( + is_shuffle=config["is_shuffle"], + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] + * STRATEGY.num_replicas_in_sync + * config["gradient_accumulation_steps"], + ) + + valid_dataset = CharactorDurationF0EnergyMelDataset( + root_dir=args.dev_dir, + charactor_query=charactor_query, + mel_query=mel_query, + duration_query=duration_query, + f0_query=f0_query, + energy_query=energy_query, + f0_stat=args.f0_stat, + energy_stat=args.energy_stat, + mel_length_threshold=mel_length_threshold, + ).create( + is_shuffle=config["is_shuffle"], + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] * STRATEGY.num_replicas_in_sync, + ) + + # define trainer + trainer = FastSpeech2Trainer( + config=config, + strategy=STRATEGY, + steps=0, + epochs=0, + is_mixed_precision=args.mixed_precision, + ) + + with STRATEGY.scope(): + # define model + fastspeech = TFFastSpeech2( + config=FastSpeech2Config(**config["fastspeech2_params"]) + ) + fastspeech._build() + fastspeech.summary() + if len(args.pretrained) > 1: + fastspeech.load_weights(args.pretrained, by_name=True, skip_mismatch=True) + logging.info( + f"Successfully loaded pretrained weight from {args.pretrained}." + ) + + # AdamW for fastspeech + learning_rate_fn = tf.keras.optimizers.schedules.PolynomialDecay( + initial_learning_rate=config["optimizer_params"]["initial_learning_rate"], + decay_steps=config["optimizer_params"]["decay_steps"], + end_learning_rate=config["optimizer_params"]["end_learning_rate"], + ) + + learning_rate_fn = WarmUp( + initial_learning_rate=config["optimizer_params"]["initial_learning_rate"], + decay_schedule_fn=learning_rate_fn, + warmup_steps=int( + config["train_max_steps"] + * config["optimizer_params"]["warmup_proportion"] + ), + ) + + optimizer = AdamWeightDecay( + learning_rate=learning_rate_fn, + weight_decay_rate=config["optimizer_params"]["weight_decay"], + beta_1=0.9, + beta_2=0.98, + epsilon=1e-6, + exclude_from_weight_decay=["LayerNorm", "layer_norm", "bias"], + ) + + _ = optimizer.iterations + + # compile trainer + trainer.compile(model=fastspeech, optimizer=optimizer) + + # start training + try: + trainer.fit( + train_dataset, + valid_dataset, + saved_path=os.path.join(config["outdir"], "checkpoints/"), + resume=args.resume, + ) + except KeyboardInterrupt: + trainer.save_checkpoint() + logging.info(f"Successfully saved checkpoint @ {trainer.steps}steps.") + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/fastspeech2_libritts/README.md b/TensorFlowTTS/examples/fastspeech2_libritts/README.md new file mode 100644 index 0000000000000000000000000000000000000000..83d1e2bdeb49e66cdfa9b57c9085e673551a848f --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2_libritts/README.md @@ -0,0 +1,64 @@ +# Fast speech 2 multi-speaker english lang based + +## Prepare +Everything is done from main repo folder so TensorflowTTS/ + +0. Optional* [Download](http://www.openslr.org/60/) and prepare libritts (helper to prepare libri in examples/fastspeech2_libritts/libri_experiment/prepare_libri.ipynb) +- Dataset structure after finish this step: + ``` + |- TensorFlowTTS/ + | |- LibriTTS/ + | |- |- train-clean-100/ + | |- |- SPEAKERS.txt + | |- |- ... + | |- libritts/ + | |- |- 200/ + | |- |- |- 200_124139_000001_000000.txt + | |- |- |- 200_124139_000001_000000.wav + | |- |- |- ... + | |- |- 250/ + | |- |- ... + | |- tensorflow_tts/ + | |- models/ + | |- ... + ``` +1. Extract Duration (use examples/mfa_extraction or pretrained tacotron2) +2. Optional* build docker +- ``` + bash examples/fastspeech2_libritts/scripts/build.sh + ``` +3. Optional* run docker +- ``` + bash examples/fastspeech2_libritts/scripts/interactive.sh + ``` +4. Preprocessing: +- ``` + tensorflow-tts-preprocess --rootdir ./libritts \ + --outdir ./dump_libritts \ + --config preprocess/libritts_preprocess.yaml \ + --dataset libritts + ``` + +5. Normalization: +- ``` + tensorflow-tts-normalize --rootdir ./dump_libritts \ + --outdir ./dump_libritts \ + --config preprocess/libritts_preprocess.yaml \ + --dataset libritts + ``` + +6. Change CharactorDurationF0EnergyMelDataset speaker mapper in fastspeech2_dataset to match your dataset (if you use libri with mfa_extraction you didnt need to change anything) +7. Change train_libri.sh to match your dataset and run: +- ``` + bash examples/fastspeech2_libritts/scripts/train_libri.sh + ``` +8. Optional* If u have problems with tensor sizes mismatch check step 5 in `examples/mfa_extraction` directory + +## Comments + +This version is using popular train.txt '|' split used in other repos. Training files should looks like this => + +Wav Path | Text | Speaker Name + +Wav Path2 | Text | Speaker Name + diff --git a/TensorFlowTTS/examples/fastspeech2_libritts/conf/fastspeech2libritts.yaml b/TensorFlowTTS/examples/fastspeech2_libritts/conf/fastspeech2libritts.yaml new file mode 100644 index 0000000000000000000000000000000000000000..347071d11342a141382ff7a9da847d36e3d7b1d9 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2_libritts/conf/fastspeech2libritts.yaml @@ -0,0 +1,79 @@ +# This is the hyperparameter configuration file for FastSpeech2 v1. +# Please make sure this is adjusted for the LibriTTS dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 200k iters but a best checkpoint is around 150k iters. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +hop_size: 300 # Hop size. +format: "npy" + +########################################################### +# NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: fastspeech2 + +fastspeech2_params: + dataset: "libritts" + n_speakers: 20 + encoder_hidden_size: 384 + encoder_num_hidden_layers: 4 + encoder_num_attention_heads: 2 + encoder_attention_head_size: 192 # hidden_size // num_attention_heads + encoder_intermediate_size: 1024 + encoder_intermediate_kernel_size: 3 + encoder_hidden_act: "mish" + decoder_hidden_size: 384 + decoder_num_hidden_layers: 4 + decoder_num_attention_heads: 2 + decoder_attention_head_size: 192 # hidden_size // num_attention_heads + decoder_intermediate_size: 1024 + decoder_intermediate_kernel_size: 3 + decoder_hidden_act: "mish" + variant_prediction_num_conv_layers: 2 + variant_predictor_filter: 256 + variant_predictor_kernel_size: 3 + variant_predictor_dropout_rate: 0.5 + num_mels: 80 + hidden_dropout_prob: 0.2 + attention_probs_dropout_prob: 0.1 + max_position_embeddings: 2048 + initializer_range: 0.02 + output_attentions: False + output_hidden_states: False + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 32 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +mel_length_threshold: 48 # remove all targets has mel_length <= 32 +is_shuffle: true # shuffle dataset after each epoch. +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +optimizer_params: + initial_learning_rate: 0.0001 + end_learning_rate: 0.00001 + decay_steps: 120000 # < train_max_steps is recommend. + warmup_proportion: 0.02 + weight_decay: 0.001 + +gradient_accumulation_steps: 1 +var_train_expr: null # trainable variable expr (eg. 'embeddings|encoder|decoder' ) + # must separate by |. if var_train_expr is null then we + # training all variable +########################################################### +# INTERVAL SETTING # +########################################################### +train_max_steps: 150000 # Number of training steps. +save_interval_steps: 5000 # Interval steps to save checkpoint. +eval_interval_steps: 5000 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. +########################################################### +# OTHER SETTING # +########################################################### +use_griffin: true # Use GL on evaluation or not. +num_save_intermediate_results: 1 # Number of batch to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/fastspeech2_libritts/fastspeech2_dataset.py b/TensorFlowTTS/examples/fastspeech2_libritts/fastspeech2_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..3c528c8f4f15cafe29af171a3cb2dbd3a4b34190 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2_libritts/fastspeech2_dataset.py @@ -0,0 +1,256 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Dataset modules.""" + +import os +import numpy as np +import tensorflow as tf + +from tensorflow_tts.datasets.abstract_dataset import AbstractDataset +from tensorflow_tts.utils import find_files + + +def average_by_duration(x, durs): + mel_len = durs.sum() + durs_cum = np.cumsum(np.pad(durs, (1, 0))) + + # calculate charactor f0/energy + x_char = np.zeros((durs.shape[0],), dtype=np.float32) + for idx, start, end in zip(range(mel_len), durs_cum[:-1], durs_cum[1:]): + values = x[start:end][np.where(x[start:end] != 0.0)[0]] + x_char[idx] = np.mean(values) if len(values) > 0 else 0.0 # np.mean([]) = nan. + + return x_char.astype(np.float32) + + +def tf_average_by_duration(x, durs): + outs = tf.numpy_function(average_by_duration, [x, durs], tf.float32) + return outs + + +class CharactorDurationF0EnergyMelDataset(AbstractDataset): + """Tensorflow Charactor Duration F0 Energy Mel dataset.""" + + def __init__( + self, + root_dir, + charactor_query="*-ids.npy", + mel_query="*-norm-feats.npy", + duration_query="*-durations.npy", + f0_query="*-raw-f0.npy", + energy_query="*-raw-energy.npy", + f0_stat="./dump/stats_f0.npy", + energy_stat="./dump/stats_energy.npy", + charactor_load_fn=np.load, + mel_load_fn=np.load, + duration_load_fn=np.load, + f0_load_fn=np.load, + energy_load_fn=np.load, + mel_length_threshold=0, + speakers_map=None + ): + """Initialize dataset. + + Args: + root_dir (str): Root directory including dumped files. + charactor_query (str): Query to find charactor files in root_dir. + mel_query (str): Query to find feature files in root_dir. + duration_query (str): Query to find duration files in root_dir. + f0_query (str): Query to find f0 files in root_dir. + energy_query (str): Query to find energy files in root_dir. + f0_stat (str): str path of f0_stat. + energy_stat (str): str path of energy_stat. + charactor_load_fn (func): Function to load charactor file. + mel_load_fn (func): Function to load feature file. + duration_load_fn (func): Function to load duration file. + f0_load_fn (func): Function to load f0 file. + energy_load_fn (func): Function to load energy file. + mel_length_threshold (int): Threshold to remove short feature files. + speakers_map (dict): Speakers map generated in dataset preprocessing + + """ + # find all of charactor and mel files. + charactor_files = sorted(find_files(root_dir, charactor_query)) + mel_files = sorted(find_files(root_dir, mel_query)) + duration_files = sorted(find_files(root_dir, duration_query)) + f0_files = sorted(find_files(root_dir, f0_query)) + energy_files = sorted(find_files(root_dir, energy_query)) + + # assert the number of files + assert len(mel_files) != 0, f"Not found any mels files in ${root_dir}." + assert ( + len(mel_files) + == len(charactor_files) + == len(duration_files) + == len(f0_files) + == len(energy_files) + ), f"Number of charactor, mel, duration, f0 and energy files are different" + + assert speakers_map != None, f"No speakers map found. Did you set --dataset_mapping?" + + if ".npy" in charactor_query: + suffix = charactor_query[1:] + utt_ids = [os.path.basename(f).replace(suffix, "") for f in charactor_files] + + # set global params + self.utt_ids = utt_ids + self.mel_files = mel_files + self.charactor_files = charactor_files + self.duration_files = duration_files + self.f0_files = f0_files + self.energy_files = energy_files + self.mel_load_fn = mel_load_fn + self.charactor_load_fn = charactor_load_fn + self.duration_load_fn = duration_load_fn + self.f0_load_fn = f0_load_fn + self.energy_load_fn = energy_load_fn + self.mel_length_threshold = mel_length_threshold + self.speakers_map = speakers_map + self.speakers = [self.speakers_map[i.split("_")[0]] for i in self.utt_ids] + print("Speaker: utt_id", list(zip(self.speakers, self.utt_ids))) + self.f0_stat = np.load(f0_stat) + self.energy_stat = np.load(energy_stat) + + def get_args(self): + return [self.utt_ids] + + def _norm_mean_std(self, x, mean, std): + zero_idxs = np.where(x == 0.0)[0] + x = (x - mean) / std + x[zero_idxs] = 0.0 + return x + + def _norm_mean_std_tf(self, x, mean, std): + x = tf.numpy_function(self._norm_mean_std, [x, mean, std], tf.float32) + return x + + def generator(self, utt_ids): + for i, utt_id in enumerate(utt_ids): + mel_file = self.mel_files[i] + charactor_file = self.charactor_files[i] + duration_file = self.duration_files[i] + f0_file = self.f0_files[i] + energy_file = self.energy_files[i] + speaker_id = self.speakers[i] + + items = { + "utt_ids": utt_id, + "mel_files": mel_file, + "charactor_files": charactor_file, + "duration_files": duration_file, + "f0_files": f0_file, + "energy_files": energy_file, + "speaker_ids": speaker_id, + } + + yield items + + @tf.function + def _load_data(self, items): + mel = tf.numpy_function(np.load, [items["mel_files"]], tf.float32) + charactor = tf.numpy_function(np.load, [items["charactor_files"]], tf.int32) + duration = tf.numpy_function(np.load, [items["duration_files"]], tf.int32) + f0 = tf.numpy_function(np.load, [items["f0_files"]], tf.float32) + energy = tf.numpy_function(np.load, [items["energy_files"]], tf.float32) + + f0 = self._norm_mean_std_tf(f0, self.f0_stat[0], self.f0_stat[1]) + energy = self._norm_mean_std_tf( + energy, self.energy_stat[0], self.energy_stat[1] + ) + + # calculate charactor f0/energy + f0 = tf_average_by_duration(f0, duration) + energy = tf_average_by_duration(energy, duration) + + items = { + "utt_ids": items["utt_ids"], + "input_ids": charactor, + "speaker_ids": items["speaker_ids"], + "duration_gts": duration, + "f0_gts": f0, + "energy_gts": energy, + "mel_gts": mel, + "mel_lengths": len(mel), + } + + return items + + def create( + self, + allow_cache=False, + batch_size=1, + is_shuffle=False, + map_fn=None, + reshuffle_each_iteration=True, + ): + """Create tf.dataset function.""" + output_types = self.get_output_dtypes() + datasets = tf.data.Dataset.from_generator( + self.generator, output_types=output_types, args=(self.get_args()) + ) + + # load data + datasets = datasets.map( + lambda items: self._load_data(items), tf.data.experimental.AUTOTUNE + ) + + datasets = datasets.filter( + lambda x: x["mel_lengths"] > self.mel_length_threshold + ) + + if allow_cache: + datasets = datasets.cache() + + if is_shuffle: + datasets = datasets.shuffle( + self.get_len_dataset(), + reshuffle_each_iteration=reshuffle_each_iteration, + ) + + # define padded shapes + padded_shapes = { + "utt_ids": [], + "input_ids": [None], + "speaker_ids": [], + "duration_gts": [None], + "f0_gts": [None], + "energy_gts": [None], + "mel_gts": [None, None], + "mel_lengths": [], + } + + datasets = datasets.padded_batch( + batch_size, padded_shapes=padded_shapes, drop_remainder=True + ) + datasets = datasets.prefetch(tf.data.experimental.AUTOTUNE) + return datasets + + def get_output_dtypes(self): + output_types = { + "utt_ids": tf.string, + "mel_files": tf.string, + "charactor_files": tf.string, + "duration_files": tf.string, + "f0_files": tf.string, + "energy_files": tf.string, + "speaker_ids": tf.int32, + } + return output_types + + def get_len_dataset(self): + return len(self.utt_ids) + + def __name__(self): + return "CharactorDurationF0EnergyMelDataset" diff --git a/TensorFlowTTS/examples/fastspeech2_libritts/libri_experiment/prepare_libri.ipynb b/TensorFlowTTS/examples/fastspeech2_libritts/libri_experiment/prepare_libri.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..451608ac4a3b2ae6c465ccf8329b3193a4a5d7c0 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2_libritts/libri_experiment/prepare_libri.ipynb @@ -0,0 +1,155 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import random\n", + "import shutil\n", + "import sys\n", + "\n", + "libri_path = \"....../LibriTTS\" # absolute path to TensorFlowTTS.\n", + "dataset_path = \"....../libritts\" # Change to your paths. This is a output of re-format dataset.\n", + "subset = \"train-clean-100\"" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "with open(os.path.join(libri_path, \"SPEAKERS.txt\")) as f:\n", + " data = f.readlines()\n", + " \n", + "dataset_info = {}\n", + "max_speakers = 20 # Max number of speakers to train on\n", + "min_len = 20 # Min len of speaker narration time\n", + "max_file_len = 11 # max audio file lenght\n", + "min_file_len = 2 # min audio file lenght" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "possible_dataset = [i.split(\"|\") for i in data[12:] if i.split(\"|\")[2].strip() == subset and float(i.split(\"|\")[3].strip()) >= min_len]" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "ids = [i[0].strip() for i in possible_dataset]" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "import soundfile as sf" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "possible_map = {}\n", + "subset_path = os.path.join(libri_path, subset)\n", + "for i in os.listdir(subset_path):\n", + " if i in ids:\n", + " id_path = os.path.join(subset_path, i)\n", + " id_dur = 0\n", + " id_included = []\n", + " \n", + " for k in os.listdir(id_path):\n", + " for j in os.listdir(os.path.join(id_path, k)):\n", + " if \".wav\" in j:\n", + " f_path = os.path.join(id_path, k, j)\n", + " sf_file = sf.SoundFile(f_path)\n", + " dur = len(sf_file) / sf_file.samplerate\n", + " if max_file_len < dur or dur < min_file_len:\n", + " continue\n", + " else:\n", + " id_included.append(f_path)\n", + " id_dur += dur\n", + " possible_map[i] = {\"dur\": id_dur, \"included\": id_included}\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "poss_speakers = {k: v[\"included\"] for k, v in possible_map.items() if v[\"dur\"]/60 >= min_len}" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "to_move = list(poss_speakers.keys())\n", + "random.shuffle(to_move)\n", + "to_move = to_move[:max_speakers]" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "for sp_id, v in poss_speakers.items():\n", + " if sp_id in to_move:\n", + " for j in v:\n", + " f_name = j.split(os.path.sep)[-1]\n", + " text_f_name = f_name.split(\".wav\")[0] + \".txt\"\n", + " os.makedirs(os.path.join(dataset_path, sp_id), exist_ok=True)\n", + " shutil.copy(j, os.path.join(dataset_path, sp_id, f_name))\n", + " shutil.copy(j.replace(\".wav\", \".normalized.txt\"), os.path.join(dataset_path, sp_id, text_f_name))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.7" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/TensorFlowTTS/examples/fastspeech2_libritts/scripts/build.sh b/TensorFlowTTS/examples/fastspeech2_libritts/scripts/build.sh new file mode 100644 index 0000000000000000000000000000000000000000..40be0e4ce5e1055b632f4f10d9badc7237bc4ff3 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2_libritts/scripts/build.sh @@ -0,0 +1,2 @@ +#!/bin/bash +docker build --rm -t tftts -f examples/fastspeech2_libritts/scripts/docker/Dockerfile . diff --git a/TensorFlowTTS/examples/fastspeech2_libritts/scripts/docker/Dockerfile b/TensorFlowTTS/examples/fastspeech2_libritts/scripts/docker/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..38c6944d43cca02ff347aa74884e3695c06c2a56 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2_libritts/scripts/docker/Dockerfile @@ -0,0 +1,7 @@ +FROM tensorflow/tensorflow:2.2.0-gpu +RUN apt-get update +RUN apt-get install -y zsh tmux wget git libsndfile1 +ADD . /workspace/tts +WORKDIR /workspace/tts +RUN pip install . + diff --git a/TensorFlowTTS/examples/fastspeech2_libritts/scripts/interactive.sh b/TensorFlowTTS/examples/fastspeech2_libritts/scripts/interactive.sh new file mode 100644 index 0000000000000000000000000000000000000000..b92d92836e34210cf6e075ff3372d6f087063d9a --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2_libritts/scripts/interactive.sh @@ -0,0 +1,2 @@ +#!/bin/bash +docker run --gpus all --shm-size=1g --ulimit memlock=-1 --ulimit stack=67108864 -it --rm --ipc=host -p 8888:8888 -v $PWD:/workspace/tts/ tftts bash diff --git a/TensorFlowTTS/examples/fastspeech2_libritts/scripts/train_libri.sh b/TensorFlowTTS/examples/fastspeech2_libritts/scripts/train_libri.sh new file mode 100644 index 0000000000000000000000000000000000000000..a3cdc8e0c3407ea59cdf3a997cc8179b177a2819 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2_libritts/scripts/train_libri.sh @@ -0,0 +1,11 @@ +CUDA_VISIBLE_DEVICES=0 python examples/fastspeech2_libritts/train_fastspeech2.py \ + --train-dir ./dump/train/ \ + --dev-dir ./dump/valid/ \ + --outdir ./examples/fastspeech2_libritts/outdir_libri/ \ + --config ./examples/fastspeech2_libritts/conf/fastspeech2libritts.yaml \ + --use-norm 1 \ + --f0-stat ./dump/stats_f0.npy \ + --energy-stat ./dump/stats_energy.npy \ + --mixed_precision 1 \ + --dataset_config preprocess/libritts_preprocess.yaml \ + --dataset_stats dump/stats.npy \ No newline at end of file diff --git a/TensorFlowTTS/examples/fastspeech2_libritts/train_fastspeech2.py b/TensorFlowTTS/examples/fastspeech2_libritts/train_fastspeech2.py new file mode 100644 index 0000000000000000000000000000000000000000..0f9367acb1cddb9a07d7c60d313c6340de5fc7b9 --- /dev/null +++ b/TensorFlowTTS/examples/fastspeech2_libritts/train_fastspeech2.py @@ -0,0 +1,489 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Train FastSpeech2.""" + +import tensorflow as tf + +physical_devices = tf.config.list_physical_devices("GPU") +for i in range(len(physical_devices)): + tf.config.experimental.set_memory_growth(physical_devices[i], True) + +import sys + +sys.path.append(".") + +import argparse +import logging +import os + +import numpy as np +import yaml +import json + +import tensorflow_tts +from examples.fastspeech2_libritts.fastspeech2_dataset import ( + CharactorDurationF0EnergyMelDataset, +) +from tensorflow_tts.configs import FastSpeech2Config +from tensorflow_tts.models import TFFastSpeech2 +from tensorflow_tts.optimizers import AdamWeightDecay, WarmUp +from tensorflow_tts.trainers import Seq2SeqBasedTrainer +from tensorflow_tts.utils import ( + calculate_2d_loss, + calculate_3d_loss, + return_strategy, + TFGriffinLim, +) + + +class FastSpeech2Trainer(Seq2SeqBasedTrainer): + """FastSpeech2 Trainer class based on FastSpeechTrainer.""" + + def __init__( + self, + config, + strategy, + steps=0, + epochs=0, + is_mixed_precision=False, + stats_path: str = "", + dataset_config: str = "", + ): + """Initialize trainer. + Args: + steps (int): Initial global steps. + epochs (int): Initial global epochs. + config (dict): Config dict loaded from yaml format configuration file. + is_mixed_precision (bool): Use mixed precision or not. + """ + super(FastSpeech2Trainer, self).__init__( + steps=steps, + epochs=epochs, + config=config, + strategy=strategy, + is_mixed_precision=is_mixed_precision, + ) + # define metrics to aggregates data and use tf.summary logs them + self.list_metrics_name = [ + "duration_loss", + "f0_loss", + "energy_loss", + "mel_loss_before", + "mel_loss_after", + ] + self.init_train_eval_metrics(self.list_metrics_name) + self.reset_states_train() + self.reset_states_eval() + self.use_griffin = config.get("use_griffin", False) + self.griffin_lim_tf = None + if self.use_griffin: + logging.info( + f"Load griff stats from {stats_path} and config from {dataset_config}" + ) + self.griff_conf = yaml.load(open(dataset_config), Loader=yaml.Loader) + self.prepare_grim(stats_path, self.griff_conf) + + def prepare_grim(self, stats_path, config): + if not stats_path: + raise KeyError("stats path need to exist") + self.griffin_lim_tf = TFGriffinLim(stats_path, config) + + def compile(self, model, optimizer): + super().compile(model, optimizer) + self.mse = tf.keras.losses.MeanSquaredError( + reduction=tf.keras.losses.Reduction.NONE + ) + self.mae = tf.keras.losses.MeanAbsoluteError( + reduction=tf.keras.losses.Reduction.NONE + ) + + def compute_per_example_losses(self, batch, outputs): + """Compute per example losses and return dict_metrics_losses + Note that all element of the loss MUST has a shape [batch_size] and + the keys of dict_metrics_losses MUST be in self.list_metrics_name. + + Args: + batch: dictionary batch input return from dataloader + outputs: outputs of the model + + Returns: + per_example_losses: per example losses for each GPU, shape [B] + dict_metrics_losses: dictionary loss. + """ + mel_before, mel_after, duration_outputs, f0_outputs, energy_outputs = outputs + + log_duration = tf.math.log( + tf.cast(tf.math.add(batch["duration_gts"], 1), tf.float32) + ) + duration_loss = calculate_2d_loss(log_duration, duration_outputs, self.mse) + f0_loss = calculate_2d_loss(batch["f0_gts"], f0_outputs, self.mse) + energy_loss = calculate_2d_loss(batch["energy_gts"], energy_outputs, self.mse) + mel_loss_before = calculate_3d_loss(batch["mel_gts"], mel_before, self.mae) + mel_loss_after = calculate_3d_loss(batch["mel_gts"], mel_after, self.mae) + + per_example_losses = ( + duration_loss + f0_loss + energy_loss + mel_loss_before + mel_loss_after + ) + + dict_metrics_losses = { + "duration_loss": duration_loss, + "f0_loss": f0_loss, + "energy_loss": energy_loss, + "mel_loss_before": mel_loss_before, + "mel_loss_after": mel_loss_after, + } + + return per_example_losses, dict_metrics_losses + + def generate_and_save_intermediate_result(self, batch): + """Generate and save intermediate result.""" + import matplotlib.pyplot as plt + + # predict with tf.function. + outputs = self.one_step_predict(batch) + + mels_before, mels_after, *_ = outputs + mel_gts = batch["mel_gts"] + utt_ids = batch["utt_ids"] + + # convert to tensor. + # here we just take a sample at first replica. + try: + mels_before = mels_before.values[0].numpy() + mels_after = mels_after.values[0].numpy() + mel_gts = mel_gts.values[0].numpy() + utt_ids = utt_ids.values[0].numpy() + except Exception: + mels_before = mels_before.numpy() + mels_after = mels_after.numpy() + mel_gts = mel_gts.numpy() + utt_ids = utt_ids.numpy() + + # check directory + if self.use_griffin: + griff_dir_name = os.path.join( + self.config["outdir"], f"predictions/{self.steps}_wav" + ) + if not os.path.exists(griff_dir_name): + os.makedirs(griff_dir_name) + + dirname = os.path.join(self.config["outdir"], f"predictions/{self.steps}steps") + if not os.path.exists(dirname): + os.makedirs(dirname) + + for idx, (mel_gt, mel_before, mel_after) in enumerate( + zip(mel_gts, mels_before, mels_after), 0 + ): + + if self.use_griffin: + utt_id = utt_ids[idx] + grif_before = self.griffin_lim_tf( + tf.reshape(mel_before, [-1, 80])[tf.newaxis, :], n_iter=32 + ) + grif_after = self.griffin_lim_tf( + tf.reshape(mel_after, [-1, 80])[tf.newaxis, :], n_iter=32 + ) + grif_gt = self.griffin_lim_tf( + tf.reshape(mel_gt, [-1, 80])[tf.newaxis, :], n_iter=32 + ) + self.griffin_lim_tf.save_wav( + grif_before, griff_dir_name, f"{utt_id}_before" + ) + self.griffin_lim_tf.save_wav( + grif_after, griff_dir_name, f"{utt_id}_after" + ) + self.griffin_lim_tf.save_wav(grif_gt, griff_dir_name, f"{utt_id}_gt") + + utt_id = utt_ids[idx] + mel_gt = tf.reshape(mel_gt, (-1, 80)).numpy() # [length, 80] + mel_before = tf.reshape(mel_before, (-1, 80)).numpy() # [length, 80] + mel_after = tf.reshape(mel_after, (-1, 80)).numpy() # [length, 80] + + # plit figure and save it + figname = os.path.join(dirname, f"{utt_id}.png") + fig = plt.figure(figsize=(10, 8)) + ax1 = fig.add_subplot(311) + ax2 = fig.add_subplot(312) + ax3 = fig.add_subplot(313) + im = ax1.imshow(np.rot90(mel_gt), aspect="auto", interpolation="none") + ax1.set_title("Target Mel-Spectrogram") + fig.colorbar(mappable=im, shrink=0.65, orientation="horizontal", ax=ax1) + ax2.set_title("Predicted Mel-before-Spectrogram") + im = ax2.imshow(np.rot90(mel_before), aspect="auto", interpolation="none") + fig.colorbar(mappable=im, shrink=0.65, orientation="horizontal", ax=ax2) + ax3.set_title("Predicted Mel-after-Spectrogram") + im = ax3.imshow(np.rot90(mel_after), aspect="auto", interpolation="none") + fig.colorbar(mappable=im, shrink=0.65, orientation="horizontal", ax=ax3) + plt.tight_layout() + plt.savefig(figname) + plt.close() + + +def main(): + """Run training process.""" + parser = argparse.ArgumentParser( + description="Train FastSpeech (See detail in tensorflow_tts/bin/train-fastspeech.py)" + ) + parser.add_argument( + "--train-dir", + default="dump/train", + type=str, + help="directory including training data. ", + ) + parser.add_argument( + "--dev-dir", + default="dump/valid", + type=str, + help="directory including development data. ", + ) + parser.add_argument( + "--use-norm", default=1, type=int, help="usr norm-mels for train or raw." + ) + parser.add_argument( + "--f0-stat", default="./dump/stats_f0.npy", type=str, help="f0-stat path.", + ) + parser.add_argument( + "--energy-stat", + default="./dump/stats_energy.npy", + type=str, + help="energy-stat path.", + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save checkpoints." + ) + parser.add_argument( + "--config", type=str, required=True, help="yaml format configuration file." + ) + parser.add_argument( + "--resume", + default="", + type=str, + nargs="?", + help='checkpoint file path to resume training. (default="")', + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + parser.add_argument( + "--mixed_precision", + default=1, + type=int, + help="using mixed precision for generator or not.", + ) + parser.add_argument( + "--dataset_config", default="preprocess/libritts_preprocess.yaml", type=str, + ) + parser.add_argument( + "--dataset_stats", default="dump/stats.npy", type=str, + ) + parser.add_argument( + "--dataset_mapping", default="dump/libritts_mapper.npy", type=str, + ) + parser.add_argument( + "--pretrained", + default="", + type=str, + nargs="?", + help="pretrained weights .h5 file to load weights from. Auto-skips non-matching layers", + ) + args = parser.parse_args() + + # return strategy + STRATEGY = return_strategy() + + # set mixed precision config + if args.mixed_precision == 1: + tf.config.optimizer.set_experimental_options({"auto_mixed_precision": True}) + + args.mixed_precision = bool(args.mixed_precision) + args.use_norm = bool(args.use_norm) + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # check arguments + if args.train_dir is None: + raise ValueError("Please specify --train-dir") + if args.dev_dir is None: + raise ValueError("Please specify --valid-dir") + + # load and save config + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + config["version"] = tensorflow_tts.__version__ + with open(os.path.join(args.outdir, "config.yml"), "w") as f: + yaml.dump(config, f, Dumper=yaml.Dumper) + for key, value in config.items(): + logging.info(f"{key} = {value}") + + # get dataset + if config["remove_short_samples"]: + mel_length_threshold = config["mel_length_threshold"] + else: + mel_length_threshold = None + + if config["format"] == "npy": + charactor_query = "*-ids.npy" + mel_query = "*-raw-feats.npy" if args.use_norm is False else "*-norm-feats.npy" + duration_query = "*-durations.npy" + f0_query = "*-raw-f0.npy" + energy_query = "*-raw-energy.npy" + else: + raise ValueError("Only npy are supported.") + + # load speakers map from dataset map + with open(args.dataset_mapping) as f: + dataset_mapping = json.load(f) + speakers_map = dataset_mapping["speakers_map"] + + # Check n_speakers matches number of speakers in speakers_map + n_speakers = config["fastspeech2_params"]["n_speakers"] + assert n_speakers == len( + speakers_map + ), f"Number of speakers in dataset does not match n_speakers in config" + + # define train/valid dataset + train_dataset = CharactorDurationF0EnergyMelDataset( + root_dir=args.train_dir, + charactor_query=charactor_query, + mel_query=mel_query, + duration_query=duration_query, + f0_query=f0_query, + energy_query=energy_query, + f0_stat=args.f0_stat, + energy_stat=args.energy_stat, + mel_length_threshold=mel_length_threshold, + speakers_map=speakers_map, + ).create( + is_shuffle=config["is_shuffle"], + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] + * STRATEGY.num_replicas_in_sync + * config["gradient_accumulation_steps"], + ) + + valid_dataset = CharactorDurationF0EnergyMelDataset( + root_dir=args.dev_dir, + charactor_query=charactor_query, + mel_query=mel_query, + duration_query=duration_query, + f0_query=f0_query, + energy_query=energy_query, + f0_stat=args.f0_stat, + energy_stat=args.energy_stat, + mel_length_threshold=mel_length_threshold, + speakers_map=speakers_map, + ).create( + is_shuffle=config["is_shuffle"], + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] * STRATEGY.num_replicas_in_sync, + ) + + # define trainer + trainer = FastSpeech2Trainer( + config=config, + strategy=STRATEGY, + steps=0, + epochs=0, + is_mixed_precision=args.mixed_precision, + stats_path=args.dataset_stats, + dataset_config=args.dataset_config, + ) + + with STRATEGY.scope(): + # define model + fastspeech = TFFastSpeech2( + config=FastSpeech2Config(**config["fastspeech2_params"]) + ) + fastspeech._build() + fastspeech.summary() + + if len(args.pretrained) > 1: + fastspeech.load_weights(args.pretrained, by_name=True, skip_mismatch=True) + logging.info( + f"Successfully loaded pretrained weight from {args.pretrained}." + ) + + # AdamW for fastspeech + learning_rate_fn = tf.keras.optimizers.schedules.PolynomialDecay( + initial_learning_rate=config["optimizer_params"]["initial_learning_rate"], + decay_steps=config["optimizer_params"]["decay_steps"], + end_learning_rate=config["optimizer_params"]["end_learning_rate"], + ) + + learning_rate_fn = WarmUp( + initial_learning_rate=config["optimizer_params"]["initial_learning_rate"], + decay_schedule_fn=learning_rate_fn, + warmup_steps=int( + config["train_max_steps"] + * config["optimizer_params"]["warmup_proportion"] + ), + ) + + optimizer = AdamWeightDecay( + learning_rate=learning_rate_fn, + weight_decay_rate=config["optimizer_params"]["weight_decay"], + beta_1=0.9, + beta_2=0.98, + epsilon=1e-6, + exclude_from_weight_decay=["LayerNorm", "layer_norm", "bias"], + ) + + _ = optimizer.iterations + + # compile trainer + trainer.compile(model=fastspeech, optimizer=optimizer) + + # start training + try: + trainer.fit( + train_dataset, + valid_dataset, + saved_path=os.path.join(config["outdir"], "checkpoints/"), + resume=args.resume, + ) + except KeyboardInterrupt: + trainer.save_checkpoint() + logging.info(f"Successfully saved checkpoint @ {trainer.steps}steps.") + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/hifigan/README.md b/TensorFlowTTS/examples/hifigan/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c2e7ca35f3d4ef3a8ec98cb09c4805ed72e38207 --- /dev/null +++ b/TensorFlowTTS/examples/hifigan/README.md @@ -0,0 +1,65 @@ +# HiFi-GAN: Generative Adversarial Networks for Efficient and High Fidelity Speech Synthesis +Based on the script [`train_hifigan.py`](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/hifigan/train_hifigan.py). + +## Training HiFi-GAN from scratch with LJSpeech dataset. +This example code show you how to train MelGAN from scratch with Tensorflow 2 based on custom training loop and tf.function. The data used for this example is LJSpeech, you can download the dataset at [link](https://keithito.com/LJ-Speech-Dataset/). + +### Step 1: Create Tensorflow based Dataloader (tf.dataset) +First, you need define data loader based on AbstractDataset class (see [`abstract_dataset.py`](https://github.com/tensorspeech/TensorFlowTTS/tree/master/tensorflow_tts/datasets/abstract_dataset.py)). On this example, a dataloader read dataset from path. I use suffix to classify what file is a audio and mel-spectrogram (see [`audio_mel_dataset.py`](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/melgan/audio_mel_dataset.py)). If you already have preprocessed version of your target dataset, you don't need to use this example dataloader, you just need refer my dataloader and modify **generator function** to adapt with your case. Normally, a generator function should return [audio, mel]. + +### Step 2: Training from scratch +After you re-define your dataloader, pls modify an input arguments, train_dataset and valid_dataset from [`train_hifigan.py`](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/hifigan/train_hifigan.py). Here is an example command line to training HiFi-GAN from scratch: + +First, you need training generator with only stft loss: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/hifigan/train_hifigan.py \ + --train-dir ./dump/train/ \ + --dev-dir ./dump/valid/ \ + --outdir ./examples/hifigan/exp/train.hifigan.v1/ \ + --config ./examples/hifigan/conf/hifigan.v1.yaml \ + --use-norm 1 + --generator_mixed_precision 1 \ + --resume "" +``` + +Then resume and start training generator + discriminator: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/hifigan/train_hifigan.py \ + --train-dir ./dump/train/ \ + --dev-dir ./dump/valid/ \ + --outdir ./examples/hifigan/exp/train.hifigan.v1/ \ + --config ./examples/hifigan/conf/hifigan.v1.yaml \ + --use-norm 1 + --resume ./examples/hifigan/exp/train.hifigan.v1/checkpoints/ckpt-100000 +``` + +IF you want to use MultiGPU to training you can replace `CUDA_VISIBLE_DEVICES=0` by `CUDA_VISIBLE_DEVICES=0,1,2,3` for example. You also need to tune the `batch_size` for each GPU (in config file) by yourself to maximize the performance. Note that MultiGPU now support for Training but not yet support for Decode. + +In case you want to resume the training progress, please following below example command line: + +```bash +--resume ./examples/hifigan/exp/train.hifigan.v1/checkpoints/ckpt-100000 +``` + +If you want to finetune a model, use `--pretrained` like this with the filename of the generator +```bash +--pretrained ptgenerator.h5 +``` + +**IMPORTANT NOTES**: + +- When training generator only, we enable mixed precision to speed-up training progress. +- We don't apply mixed precision when training both generator and discriminator. (Discriminator include group-convolution, which cause discriminator slower when enable mixed precision). +- 100k here is a *discriminator_train_start_steps* parameters from [hifigan.v1.yaml](https://github.com/tensorspeech/TensorflowTTS/tree/master/examples/hifigan/conf/hifigan.v1.yaml) + + +## Reference + +1. https://github.com/descriptinc/melgan-neurips +2. https://github.com/kan-bayashi/ParallelWaveGAN +3. https://github.com/tensorflow/addons +4. [HiFi-GAN: Generative Adversarial Networks for Efficient and High Fidelity Speech Synthesis](https://arxiv.org/abs/2010.05646) +5. [MelGAN: Generative Adversarial Networks for Conditional Waveform Synthesis](https://arxiv.org/abs/1910.06711) +6. [Parallel WaveGAN: A fast waveform generation model based on generative adversarial networks with multi-resolution spectrogram](https://arxiv.org/abs/1910.11480) \ No newline at end of file diff --git a/TensorFlowTTS/examples/hifigan/conf/hifigan.v1.yaml b/TensorFlowTTS/examples/hifigan/conf/hifigan.v1.yaml new file mode 100644 index 0000000000000000000000000000000000000000..26ea9dcc385e7f0d7c60534483ea7f819a319913 --- /dev/null +++ b/TensorFlowTTS/examples/hifigan/conf/hifigan.v1.yaml @@ -0,0 +1,116 @@ + +# This is the hyperparameter configuration file for Hifigan. +# Please make sure this is adjusted for the LJSpeech dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 4000k iters. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +sampling_rate: 22050 # Sampling rate of dataset. +hop_size: 256 # Hop size. +format: "npy" + + +########################################################### +# GENERATOR NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "hifigan_generator" + +hifigan_generator_params: + out_channels: 1 + kernel_size: 7 + filters: 512 + use_bias: true + upsample_scales: [8, 8, 2, 2] + stacks: 3 + stack_kernel_size: [3, 7, 11] + stack_dilation_rate: [[1, 3, 5], [1, 3, 5], [1, 3, 5]] + use_final_nolinear_activation: true + is_weight_norm: false + +########################################################### +# DISCRIMINATOR NETWORK ARCHITECTURE SETTING # +########################################################### +hifigan_discriminator_params: + out_channels: 1 # Number of output channels (number of subbands). + period_scales: [2, 3, 5, 7, 11] # List of period scales. + n_layers: 5 # Number of layer of each period discriminator. + kernel_size: 5 # Kernel size. + strides: 3 # Strides + filters: 8 # In Conv filters of each period discriminator + filter_scales: 4 # Filter scales. + max_filters: 1024 # maximum filters of period discriminator's conv. + is_weight_norm: false # Use weight-norm or not. + +melgan_discriminator_params: + out_channels: 1 # Number of output channels. + scales: 3 # Number of multi-scales. + downsample_pooling: "AveragePooling1D" # Pooling type for the input downsampling. + downsample_pooling_params: # Parameters of the above pooling function. + pool_size: 4 + strides: 2 + kernel_sizes: [5, 3] # List of kernel size. + filters: 16 # Number of channels of the initial conv layer. + max_downsample_filters: 1024 # Maximum number of channels of downsampling layers. + downsample_scales: [4, 4, 4, 4] # List of downsampling scales. + nonlinear_activation: "LeakyReLU" # Nonlinear activation function. + nonlinear_activation_params: # Parameters of nonlinear activation function. + alpha: 0.2 + is_weight_norm: false # Use weight-norm or not. + +########################################################### +# STFT LOSS SETTING # +########################################################### +stft_loss_params: + fft_lengths: [1024, 2048, 512] # List of FFT size for STFT-based loss. + frame_steps: [120, 240, 50] # List of hop size for STFT-based loss + frame_lengths: [600, 1200, 240] # List of window length for STFT-based loss. + +########################################################### +# ADVERSARIAL LOSS SETTING # +########################################################### +lambda_feat_match: 10.0 +lambda_adv: 4.0 + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 16 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +batch_max_steps: 8192 # Length of each audio in batch for training. Make sure dividable by hop_size. +batch_max_steps_valid: 81920 # Length of each audio for validation. Make sure dividable by hope_size. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +is_shuffle: true # shuffle dataset after each epoch. + +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +generator_optimizer_params: + lr_fn: "PiecewiseConstantDecay" + lr_params: + boundaries: [100000, 200000, 300000, 400000, 500000, 600000, 700000] + values: [0.000125, 0.000125, 0.0000625, 0.0000625, 0.0000625, 0.00003125, 0.000015625, 0.000001] + amsgrad: false + +discriminator_optimizer_params: + lr_fn: "PiecewiseConstantDecay" + lr_params: + boundaries: [100000, 200000, 300000, 400000, 500000] + values: [0.00025, 0.000125, 0.0000625, 0.00003125, 0.000015625, 0.000001] + amsgrad: false + +gradient_accumulation_steps: 1 # should be even number or 1. +########################################################### +# INTERVAL SETTING # +########################################################### +discriminator_train_start_steps: 100000 # steps begin training discriminator +train_max_steps: 4000000 # Number of training steps. +save_interval_steps: 20000 # Interval steps to save checkpoint. +eval_interval_steps: 5000 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. + +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of batch to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/hifigan/conf/hifigan.v2.yaml b/TensorFlowTTS/examples/hifigan/conf/hifigan.v2.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6bf942520ca4f1a722a5d429ff4e8e2ce7427383 --- /dev/null +++ b/TensorFlowTTS/examples/hifigan/conf/hifigan.v2.yaml @@ -0,0 +1,116 @@ + +# This is the hyperparameter configuration file for Hifigan. +# Please make sure this is adjusted for the LJSpeech dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 4000k iters. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +sampling_rate: 22050 # Sampling rate of dataset. +hop_size: 256 # Hop size. +format: "npy" + + +########################################################### +# GENERATOR NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "hifigan_generator" + +hifigan_generator_params: + out_channels: 1 + kernel_size: 7 + filters: 128 + use_bias: true + upsample_scales: [8, 8, 2, 2] + stacks: 3 + stack_kernel_size: [3, 7, 11] + stack_dilation_rate: [[1, 3, 5], [1, 3, 5], [1, 3, 5]] + use_final_nolinear_activation: true + is_weight_norm: false + +########################################################### +# DISCRIMINATOR NETWORK ARCHITECTURE SETTING # +########################################################### +hifigan_discriminator_params: + out_channels: 1 # Number of output channels (number of subbands). + period_scales: [2, 3, 5, 7, 11] # List of period scales. + n_layers: 5 # Number of layer of each period discriminator. + kernel_size: 5 # Kernel size. + strides: 3 # Strides + filters: 8 # In Conv filters of each period discriminator + filter_scales: 4 # Filter scales. + max_filters: 512 # maximum filters of period discriminator's conv. + is_weight_norm: false # Use weight-norm or not. + +melgan_discriminator_params: + out_channels: 1 # Number of output channels. + scales: 3 # Number of multi-scales. + downsample_pooling: "AveragePooling1D" # Pooling type for the input downsampling. + downsample_pooling_params: # Parameters of the above pooling function. + pool_size: 4 + strides: 2 + kernel_sizes: [5, 3] # List of kernel size. + filters: 16 # Number of channels of the initial conv layer. + max_downsample_filters: 512 # Maximum number of channels of downsampling layers. + downsample_scales: [4, 4, 4, 4] # List of downsampling scales. + nonlinear_activation: "LeakyReLU" # Nonlinear activation function. + nonlinear_activation_params: # Parameters of nonlinear activation function. + alpha: 0.2 + is_weight_norm: false # Use weight-norm or not. + +########################################################### +# STFT LOSS SETTING # +########################################################### +stft_loss_params: + fft_lengths: [1024, 2048, 512] # List of FFT size for STFT-based loss. + frame_steps: [120, 240, 50] # List of hop size for STFT-based loss + frame_lengths: [600, 1200, 240] # List of window length for STFT-based loss. + +########################################################### +# ADVERSARIAL LOSS SETTING # +########################################################### +lambda_feat_match: 10.0 +lambda_adv: 4.0 + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 16 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +batch_max_steps: 8192 # Length of each audio in batch for training. Make sure dividable by hop_size. +batch_max_steps_valid: 81920 # Length of each audio for validation. Make sure dividable by hope_size. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +is_shuffle: true # shuffle dataset after each epoch. + +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +generator_optimizer_params: + lr_fn: "PiecewiseConstantDecay" + lr_params: + boundaries: [100000, 200000, 300000, 400000, 500000, 600000, 700000] + values: [0.000125, 0.000125, 0.0000625, 0.0000625, 0.0000625, 0.00003125, 0.000015625, 0.000001] + amsgrad: false + +discriminator_optimizer_params: + lr_fn: "PiecewiseConstantDecay" + lr_params: + boundaries: [100000, 200000, 300000, 400000, 500000] + values: [0.00025, 0.000125, 0.0000625, 0.00003125, 0.000015625, 0.000001] + amsgrad: false + +gradient_accumulation_steps: 1 # should be even number or 1. +########################################################### +# INTERVAL SETTING # +########################################################### +discriminator_train_start_steps: 100000 # steps begin training discriminator +train_max_steps: 4000000 # Number of training steps. +save_interval_steps: 20000 # Interval steps to save checkpoint. +eval_interval_steps: 5000 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. + +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of batch to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/hifigan/train_hifigan.py b/TensorFlowTTS/examples/hifigan/train_hifigan.py new file mode 100644 index 0000000000000000000000000000000000000000..362c10723156faf716771b10597b548b3ec5bb69 --- /dev/null +++ b/TensorFlowTTS/examples/hifigan/train_hifigan.py @@ -0,0 +1,325 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Train Hifigan.""" + +import tensorflow as tf + +physical_devices = tf.config.list_physical_devices("GPU") +for i in range(len(physical_devices)): + tf.config.experimental.set_memory_growth(physical_devices[i], True) + +import sys + +sys.path.append(".") + +import argparse +import logging +import os + +import numpy as np +import soundfile as sf +import yaml +from tqdm import tqdm + +import tensorflow_tts +from examples.melgan.audio_mel_dataset import AudioMelDataset +from examples.melgan.train_melgan import collater +from examples.melgan_stft.train_melgan_stft import MultiSTFTMelganTrainer +from tensorflow_tts.configs import ( + HifiGANDiscriminatorConfig, + HifiGANGeneratorConfig, + MelGANDiscriminatorConfig, +) +from tensorflow_tts.models import ( + TFHifiGANGenerator, + TFHifiGANMultiPeriodDiscriminator, + TFMelGANMultiScaleDiscriminator, +) +from tensorflow_tts.utils import return_strategy + + +class TFHifiGANDiscriminator(tf.keras.Model): + def __init__(self, multiperiod_dis, multiscale_dis, **kwargs): + super().__init__(**kwargs) + self.multiperiod_dis = multiperiod_dis + self.multiscale_dis = multiscale_dis + + def call(self, x): + outs = [] + period_outs = self.multiperiod_dis(x) + scale_outs = self.multiscale_dis(x) + outs.extend(period_outs) + outs.extend(scale_outs) + return outs + + +def main(): + """Run training process.""" + parser = argparse.ArgumentParser( + description="Train Hifigan (See detail in examples/hifigan/train_hifigan.py)" + ) + parser.add_argument( + "--train-dir", + default=None, + type=str, + help="directory including training data. ", + ) + parser.add_argument( + "--dev-dir", + default=None, + type=str, + help="directory including development data. ", + ) + parser.add_argument( + "--use-norm", default=1, type=int, help="use norm mels for training or raw." + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save checkpoints." + ) + parser.add_argument( + "--config", type=str, required=True, help="yaml format configuration file." + ) + parser.add_argument( + "--resume", + default="", + type=str, + nargs="?", + help='checkpoint file path to resume training. (default="")', + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + parser.add_argument( + "--generator_mixed_precision", + default=0, + type=int, + help="using mixed precision for generator or not.", + ) + parser.add_argument( + "--discriminator_mixed_precision", + default=0, + type=int, + help="using mixed precision for discriminator or not.", + ) + parser.add_argument( + "--pretrained", + default="", + type=str, + nargs="?", + help="path of .h5 melgan generator to load weights from", + ) + args = parser.parse_args() + + # return strategy + STRATEGY = return_strategy() + + # set mixed precision config + if args.generator_mixed_precision == 1 or args.discriminator_mixed_precision == 1: + tf.config.optimizer.set_experimental_options({"auto_mixed_precision": True}) + + args.generator_mixed_precision = bool(args.generator_mixed_precision) + args.discriminator_mixed_precision = bool(args.discriminator_mixed_precision) + + args.use_norm = bool(args.use_norm) + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # check arguments + if args.train_dir is None: + raise ValueError("Please specify --train-dir") + if args.dev_dir is None: + raise ValueError("Please specify either --valid-dir") + + # load and save config + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + config["version"] = tensorflow_tts.__version__ + with open(os.path.join(args.outdir, "config.yml"), "w") as f: + yaml.dump(config, f, Dumper=yaml.Dumper) + for key, value in config.items(): + logging.info(f"{key} = {value}") + + # get dataset + if config["remove_short_samples"]: + mel_length_threshold = config["batch_max_steps"] // config[ + "hop_size" + ] + 2 * config["hifigan_generator_params"].get("aux_context_window", 0) + else: + mel_length_threshold = None + + if config["format"] == "npy": + audio_query = "*-wave.npy" + mel_query = "*-raw-feats.npy" if args.use_norm is False else "*-norm-feats.npy" + audio_load_fn = np.load + mel_load_fn = np.load + else: + raise ValueError("Only npy are supported.") + + # define train/valid dataset + train_dataset = AudioMelDataset( + root_dir=args.train_dir, + audio_query=audio_query, + mel_query=mel_query, + audio_load_fn=audio_load_fn, + mel_load_fn=mel_load_fn, + mel_length_threshold=mel_length_threshold, + ).create( + is_shuffle=config["is_shuffle"], + map_fn=lambda items: collater( + items, + batch_max_steps=tf.constant(config["batch_max_steps"], dtype=tf.int32), + hop_size=tf.constant(config["hop_size"], dtype=tf.int32), + ), + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] + * STRATEGY.num_replicas_in_sync + * config["gradient_accumulation_steps"], + ) + + valid_dataset = AudioMelDataset( + root_dir=args.dev_dir, + audio_query=audio_query, + mel_query=mel_query, + audio_load_fn=audio_load_fn, + mel_load_fn=mel_load_fn, + mel_length_threshold=mel_length_threshold, + ).create( + is_shuffle=config["is_shuffle"], + map_fn=lambda items: collater( + items, + batch_max_steps=tf.constant( + config["batch_max_steps_valid"], dtype=tf.int32 + ), + hop_size=tf.constant(config["hop_size"], dtype=tf.int32), + ), + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] * STRATEGY.num_replicas_in_sync, + ) + + # define trainer + trainer = MultiSTFTMelganTrainer( + steps=0, + epochs=0, + config=config, + strategy=STRATEGY, + is_generator_mixed_precision=args.generator_mixed_precision, + is_discriminator_mixed_precision=args.discriminator_mixed_precision, + ) + + with STRATEGY.scope(): + # define generator and discriminator + generator = TFHifiGANGenerator( + HifiGANGeneratorConfig(**config["hifigan_generator_params"]), + name="hifigan_generator", + ) + + multiperiod_discriminator = TFHifiGANMultiPeriodDiscriminator( + HifiGANDiscriminatorConfig(**config["hifigan_discriminator_params"]), + name="hifigan_multiperiod_discriminator", + ) + multiscale_discriminator = TFMelGANMultiScaleDiscriminator( + MelGANDiscriminatorConfig( + **config["melgan_discriminator_params"], + name="melgan_multiscale_discriminator", + ) + ) + + discriminator = TFHifiGANDiscriminator( + multiperiod_discriminator, + multiscale_discriminator, + name="hifigan_discriminator", + ) + + # dummy input to build model. + fake_mels = tf.random.uniform(shape=[1, 100, 80], dtype=tf.float32) + y_hat = generator(fake_mels) + discriminator(y_hat) + + if len(args.pretrained) > 1: + generator.load_weights(args.pretrained) + logging.info( + f"Successfully loaded pretrained weight from {args.pretrained}." + ) + + generator.summary() + discriminator.summary() + + # define optimizer + generator_lr_fn = getattr( + tf.keras.optimizers.schedules, config["generator_optimizer_params"]["lr_fn"] + )(**config["generator_optimizer_params"]["lr_params"]) + discriminator_lr_fn = getattr( + tf.keras.optimizers.schedules, + config["discriminator_optimizer_params"]["lr_fn"], + )(**config["discriminator_optimizer_params"]["lr_params"]) + + gen_optimizer = tf.keras.optimizers.Adam( + learning_rate=generator_lr_fn, + amsgrad=config["generator_optimizer_params"]["amsgrad"], + ) + dis_optimizer = tf.keras.optimizers.Adam( + learning_rate=discriminator_lr_fn, + amsgrad=config["discriminator_optimizer_params"]["amsgrad"], + ) + + trainer.compile( + gen_model=generator, + dis_model=discriminator, + gen_optimizer=gen_optimizer, + dis_optimizer=dis_optimizer, + ) + + # start training + try: + trainer.fit( + train_dataset, + valid_dataset, + saved_path=os.path.join(config["outdir"], "checkpoints/"), + resume=args.resume, + ) + except KeyboardInterrupt: + trainer.save_checkpoint() + logging.info(f"Successfully saved checkpoint @ {trainer.steps}steps.") + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/ios/.gitignore b/TensorFlowTTS/examples/ios/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..418f5928dcb8ec2522a8b2e9d51eadcd5795873b --- /dev/null +++ b/TensorFlowTTS/examples/ios/.gitignore @@ -0,0 +1,3 @@ +Pods +*.xcworkspace +xcuserdata diff --git a/TensorFlowTTS/examples/ios/Podfile b/TensorFlowTTS/examples/ios/Podfile new file mode 100644 index 0000000000000000000000000000000000000000..b6089754c00b680759344123743a72806b7bdab1 --- /dev/null +++ b/TensorFlowTTS/examples/ios/Podfile @@ -0,0 +1,6 @@ +platform :ios, '14.0' + +target 'TF_TTS_Demo' do + pod 'TensorFlowLiteSwift' + pod 'TensorFlowLiteSelectTfOps' +end diff --git a/TensorFlowTTS/examples/ios/Podfile.lock b/TensorFlowTTS/examples/ios/Podfile.lock new file mode 100644 index 0000000000000000000000000000000000000000..227c148e7f11bcee99bdaa06f8ccdc0077343ff2 --- /dev/null +++ b/TensorFlowTTS/examples/ios/Podfile.lock @@ -0,0 +1,28 @@ +PODS: + - TensorFlowLiteC (2.4.0): + - TensorFlowLiteC/Core (= 2.4.0) + - TensorFlowLiteC/Core (2.4.0) + - TensorFlowLiteSelectTfOps (2.4.0) + - TensorFlowLiteSwift (2.4.0): + - TensorFlowLiteSwift/Core (= 2.4.0) + - TensorFlowLiteSwift/Core (2.4.0): + - TensorFlowLiteC (= 2.4.0) + +DEPENDENCIES: + - TensorFlowLiteSelectTfOps + - TensorFlowLiteSwift + +SPEC REPOS: + trunk: + - TensorFlowLiteC + - TensorFlowLiteSelectTfOps + - TensorFlowLiteSwift + +SPEC CHECKSUMS: + TensorFlowLiteC: 09f8ac75a76caeadb19bcfa694e97454cc1ecf87 + TensorFlowLiteSelectTfOps: f8053d3ec72032887b832d2d060015d8b7031144 + TensorFlowLiteSwift: f062dc1178120100d825d7799fd9f115b4a37fee + +PODFILE CHECKSUM: 12da12fb22671b6cdc578320043f5d310043aded + +COCOAPODS: 1.10.1 diff --git a/TensorFlowTTS/examples/ios/README.md b/TensorFlowTTS/examples/ios/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1b7e1c286b91bf27cc042dfc4833a03332f69cc3 --- /dev/null +++ b/TensorFlowTTS/examples/ios/README.md @@ -0,0 +1,15 @@ +# iOS Demo + +This app demonstrates using FastSpeech2 and MB MelGAN models on iOS. + +## How to build + +Download LJ Speech TFLite models from https://github.com/luan78zaoha/TTS_tflite_cpp/releases/tag/0.1.0 and unpack into TF_TTS_Demo directory containing Swift files. + +It uses [CocoaPods](https://cocoapods.org) to link with TensorFlowSwift. + +``` +pod install +open TF_TTS_Demo.xcworkspace +``` + diff --git a/TensorFlowTTS/examples/ios/TF_TTS_Demo.xcodeproj/project.pbxproj b/TensorFlowTTS/examples/ios/TF_TTS_Demo.xcodeproj/project.pbxproj new file mode 100644 index 0000000000000000000000000000000000000000..561a366e7f9989070b1a98c176f48eff7852bb7f --- /dev/null +++ b/TensorFlowTTS/examples/ios/TF_TTS_Demo.xcodeproj/project.pbxproj @@ -0,0 +1,459 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 50; + objects = { + +/* Begin PBXBuildFile section */ + 4C85B38B26002B7D003EF7CF /* TF_TTS_DemoApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4C85B38A26002B7D003EF7CF /* TF_TTS_DemoApp.swift */; }; + 4C85B38D26002B7D003EF7CF /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4C85B38C26002B7D003EF7CF /* ContentView.swift */; }; + 4C85B38F26002B7E003EF7CF /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 4C85B38E26002B7E003EF7CF /* Assets.xcassets */; }; + 4C85B39226002B7E003EF7CF /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 4C85B39126002B7E003EF7CF /* Preview Assets.xcassets */; }; + 4C85B3A026002E34003EF7CF /* TTS.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4C85B39F26002E34003EF7CF /* TTS.swift */; }; + 4C85B3AB26002FAE003EF7CF /* ljspeech_mapper.json in Resources */ = {isa = PBXBuildFile; fileRef = 4C85B3AA26002FAE003EF7CF /* ljspeech_mapper.json */; }; + 4C85B3AF26003109003EF7CF /* MBMelGAN.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4C85B3AD26003108003EF7CF /* MBMelGAN.swift */; }; + 4C85B3B026003109003EF7CF /* FastSpeech2.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4C85B3AE26003109003EF7CF /* FastSpeech2.swift */; }; + 4C85B3B726004180003EF7CF /* fastspeech2_quan.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 4C85B3B52600417F003EF7CF /* fastspeech2_quan.tflite */; }; + 4C85B3B826004180003EF7CF /* mb_melgan.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 4C85B3B626004180003EF7CF /* mb_melgan.tflite */; }; + 7962621A2FBFE4F4350BE4CE /* libPods-TF_TTS_Demo.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9C1863705175E1518089A8CA /* libPods-TF_TTS_Demo.a */; }; +/* End PBXBuildFile section */ + +/* Begin PBXFileReference section */ + 4C85B38726002B7D003EF7CF /* TF_TTS_Demo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = TF_TTS_Demo.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 4C85B38A26002B7D003EF7CF /* TF_TTS_DemoApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TF_TTS_DemoApp.swift; sourceTree = ""; }; + 4C85B38C26002B7D003EF7CF /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; + 4C85B38E26002B7E003EF7CF /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 4C85B39126002B7E003EF7CF /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; + 4C85B39326002B7E003EF7CF /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + 4C85B39F26002E34003EF7CF /* TTS.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TTS.swift; sourceTree = ""; }; + 4C85B3AA26002FAE003EF7CF /* ljspeech_mapper.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; name = ljspeech_mapper.json; path = ../../../tensorflow_tts/processor/pretrained/ljspeech_mapper.json; sourceTree = ""; }; + 4C85B3AD26003108003EF7CF /* MBMelGAN.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MBMelGAN.swift; sourceTree = ""; }; + 4C85B3AE26003109003EF7CF /* FastSpeech2.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FastSpeech2.swift; sourceTree = ""; }; + 4C85B3B52600417F003EF7CF /* fastspeech2_quan.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = fastspeech2_quan.tflite; sourceTree = ""; }; + 4C85B3B626004180003EF7CF /* mb_melgan.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = mb_melgan.tflite; sourceTree = ""; }; + 4C85B3BB26004876003EF7CF /* README.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; path = README.md; sourceTree = ""; }; + 7D7970556264AB38256CF749 /* Pods-TF TTS Demo.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-TF TTS Demo.release.xcconfig"; path = "Target Support Files/Pods-TF TTS Demo/Pods-TF TTS Demo.release.xcconfig"; sourceTree = ""; }; + 99A43F21F9647E3677352A5B /* Pods-TF TTS Demo.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-TF TTS Demo.debug.xcconfig"; path = "Target Support Files/Pods-TF TTS Demo/Pods-TF TTS Demo.debug.xcconfig"; sourceTree = ""; }; + 9C1863705175E1518089A8CA /* libPods-TF_TTS_Demo.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-TF_TTS_Demo.a"; sourceTree = BUILT_PRODUCTS_DIR; }; + B3A5C735C30B2D5FC8C19020 /* Pods-TF_TTS_Demo.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-TF_TTS_Demo.debug.xcconfig"; path = "Target Support Files/Pods-TF_TTS_Demo/Pods-TF_TTS_Demo.debug.xcconfig"; sourceTree = ""; }; + D5A15A5967523BB8A50C844F /* Pods-TF_TTS_Demo.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-TF_TTS_Demo.release.xcconfig"; path = "Target Support Files/Pods-TF_TTS_Demo/Pods-TF_TTS_Demo.release.xcconfig"; sourceTree = ""; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + 4C85B38426002B7D003EF7CF /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + 7962621A2FBFE4F4350BE4CE /* libPods-TF_TTS_Demo.a in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 4C85B37E26002B7D003EF7CF = { + isa = PBXGroup; + children = ( + 4C85B3BB26004876003EF7CF /* README.md */, + 4C85B38926002B7D003EF7CF /* TF_TTS_Demo */, + 4C85B38826002B7D003EF7CF /* Products */, + F10AFF36A4BAA04D30FB3F20 /* Pods */, + 98E5496B57C69E944B3260B8 /* Frameworks */, + ); + sourceTree = ""; + }; + 4C85B38826002B7D003EF7CF /* Products */ = { + isa = PBXGroup; + children = ( + 4C85B38726002B7D003EF7CF /* TF_TTS_Demo.app */, + ); + name = Products; + sourceTree = ""; + }; + 4C85B38926002B7D003EF7CF /* TF_TTS_Demo */ = { + isa = PBXGroup; + children = ( + 4C85B3A326002E8B003EF7CF /* TTS */, + 4C85B38A26002B7D003EF7CF /* TF_TTS_DemoApp.swift */, + 4C85B38C26002B7D003EF7CF /* ContentView.swift */, + 4C85B38E26002B7E003EF7CF /* Assets.xcassets */, + 4C85B39326002B7E003EF7CF /* Info.plist */, + 4C85B39026002B7E003EF7CF /* Preview Content */, + ); + path = TF_TTS_Demo; + sourceTree = ""; + }; + 4C85B39026002B7E003EF7CF /* Preview Content */ = { + isa = PBXGroup; + children = ( + 4C85B39126002B7E003EF7CF /* Preview Assets.xcassets */, + ); + path = "Preview Content"; + sourceTree = ""; + }; + 4C85B3A326002E8B003EF7CF /* TTS */ = { + isa = PBXGroup; + children = ( + 4C85B3AA26002FAE003EF7CF /* ljspeech_mapper.json */, + 4C85B3A426002EDB003EF7CF /* Models */, + 4C85B39F26002E34003EF7CF /* TTS.swift */, + 4C85B3AE26003109003EF7CF /* FastSpeech2.swift */, + 4C85B3AD26003108003EF7CF /* MBMelGAN.swift */, + ); + name = TTS; + sourceTree = ""; + }; + 4C85B3A426002EDB003EF7CF /* Models */ = { + isa = PBXGroup; + children = ( + 4C85B3B52600417F003EF7CF /* fastspeech2_quan.tflite */, + 4C85B3B626004180003EF7CF /* mb_melgan.tflite */, + ); + name = Models; + sourceTree = ""; + }; + 98E5496B57C69E944B3260B8 /* Frameworks */ = { + isa = PBXGroup; + children = ( + 9C1863705175E1518089A8CA /* libPods-TF_TTS_Demo.a */, + ); + name = Frameworks; + sourceTree = ""; + }; + F10AFF36A4BAA04D30FB3F20 /* Pods */ = { + isa = PBXGroup; + children = ( + 99A43F21F9647E3677352A5B /* Pods-TF TTS Demo.debug.xcconfig */, + 7D7970556264AB38256CF749 /* Pods-TF TTS Demo.release.xcconfig */, + B3A5C735C30B2D5FC8C19020 /* Pods-TF_TTS_Demo.debug.xcconfig */, + D5A15A5967523BB8A50C844F /* Pods-TF_TTS_Demo.release.xcconfig */, + ); + path = Pods; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + 4C85B38626002B7D003EF7CF /* TF_TTS_Demo */ = { + isa = PBXNativeTarget; + buildConfigurationList = 4C85B39626002B7E003EF7CF /* Build configuration list for PBXNativeTarget "TF_TTS_Demo" */; + buildPhases = ( + 79059AB8757019D2EFF2A6BA /* [CP] Check Pods Manifest.lock */, + 4C85B38326002B7D003EF7CF /* Sources */, + 4C85B38426002B7D003EF7CF /* Frameworks */, + 4C85B38526002B7D003EF7CF /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = TF_TTS_Demo; + productName = "TF TTS Demo"; + productReference = 4C85B38726002B7D003EF7CF /* TF_TTS_Demo.app */; + productType = "com.apple.product-type.application"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + 4C85B37F26002B7D003EF7CF /* Project object */ = { + isa = PBXProject; + attributes = { + LastSwiftUpdateCheck = 1240; + LastUpgradeCheck = 1240; + TargetAttributes = { + 4C85B38626002B7D003EF7CF = { + CreatedOnToolsVersion = 12.4; + }; + }; + }; + buildConfigurationList = 4C85B38226002B7D003EF7CF /* Build configuration list for PBXProject "TF_TTS_Demo" */; + compatibilityVersion = "Xcode 9.3"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 4C85B37E26002B7D003EF7CF; + productRefGroup = 4C85B38826002B7D003EF7CF /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + 4C85B38626002B7D003EF7CF /* TF_TTS_Demo */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + 4C85B38526002B7D003EF7CF /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 4C85B3AB26002FAE003EF7CF /* ljspeech_mapper.json in Resources */, + 4C85B39226002B7E003EF7CF /* Preview Assets.xcassets in Resources */, + 4C85B3B826004180003EF7CF /* mb_melgan.tflite in Resources */, + 4C85B38F26002B7E003EF7CF /* Assets.xcassets in Resources */, + 4C85B3B726004180003EF7CF /* fastspeech2_quan.tflite in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXShellScriptBuildPhase section */ + 79059AB8757019D2EFF2A6BA /* [CP] Check Pods Manifest.lock */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + "${PODS_PODFILE_DIR_PATH}/Podfile.lock", + "${PODS_ROOT}/Manifest.lock", + ); + name = "[CP] Check Pods Manifest.lock"; + outputFileListPaths = ( + ); + outputPaths = ( + "$(DERIVED_FILE_DIR)/Pods-TF_TTS_Demo-checkManifestLockResult.txt", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; + showEnvVarsInLog = 0; + }; +/* End PBXShellScriptBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + 4C85B38326002B7D003EF7CF /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 4C85B3B026003109003EF7CF /* FastSpeech2.swift in Sources */, + 4C85B3AF26003109003EF7CF /* MBMelGAN.swift in Sources */, + 4C85B38D26002B7D003EF7CF /* ContentView.swift in Sources */, + 4C85B3A026002E34003EF7CF /* TTS.swift in Sources */, + 4C85B38B26002B7D003EF7CF /* TF_TTS_DemoApp.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin XCBuildConfiguration section */ + 4C85B39426002B7E003EF7CF /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 14.4; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + }; + name = Debug; + }; + 4C85B39526002B7E003EF7CF /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 14.4; + MTL_ENABLE_DEBUG_INFO = NO; + MTL_FAST_MATH = YES; + SDKROOT = iphoneos; + SWIFT_COMPILATION_MODE = wholemodule; + SWIFT_OPTIMIZATION_LEVEL = "-O"; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + 4C85B39726002B7E003EF7CF /* Debug */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = B3A5C735C30B2D5FC8C19020 /* Pods-TF_TTS_Demo.debug.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_ASSET_PATHS = "\"TF_TTS_Demo/Preview Content\""; + DEVELOPMENT_TEAM = ""; + ENABLE_PREVIEWS = YES; + INFOPLIST_FILE = TF_TTS_Demo/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 14.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + OTHER_LDFLAGS = ( + "$(inherited)", + "-ObjC", + "-l\"TensorFlowLiteSwift\"", + "-l\"c++\"", + "-framework", + "\"TensorFlowLiteC\"", + "-framework", + "\"TensorFlowLiteSelectTfOps\"", + "-weak_framework", + "\"CoreML\"", + "-force_load", + "$(SRCROOT)/Pods/TensorFlowLiteSelectTfOps/Frameworks/TensorFlowLiteSelectTfOps.framework/TensorFlowLiteSelectTfOps", + ); + PRODUCT_BUNDLE_IDENTIFIER = "io.github.tensorspeech.TF-TTS-Demo"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + 4C85B39826002B7E003EF7CF /* Release */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = D5A15A5967523BB8A50C844F /* Pods-TF_TTS_Demo.release.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_ASSET_PATHS = "\"TF_TTS_Demo/Preview Content\""; + DEVELOPMENT_TEAM = ""; + ENABLE_PREVIEWS = YES; + INFOPLIST_FILE = TF_TTS_Demo/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 14.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + OTHER_LDFLAGS = ( + "$(inherited)", + "-ObjC", + "-l\"TensorFlowLiteSwift\"", + "-l\"c++\"", + "-framework", + "\"TensorFlowLiteC\"", + "-framework", + "\"TensorFlowLiteSelectTfOps\"", + "-weak_framework", + "\"CoreML\"", + "-force_load", + "$(SRCROOT)/Pods/TensorFlowLiteSelectTfOps/Frameworks/TensorFlowLiteSelectTfOps.framework/TensorFlowLiteSelectTfOps", + ); + PRODUCT_BUNDLE_IDENTIFIER = "io.github.tensorspeech.TF-TTS-Demo"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + 4C85B38226002B7D003EF7CF /* Build configuration list for PBXProject "TF_TTS_Demo" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 4C85B39426002B7E003EF7CF /* Debug */, + 4C85B39526002B7E003EF7CF /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 4C85B39626002B7E003EF7CF /* Build configuration list for PBXNativeTarget "TF_TTS_Demo" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 4C85B39726002B7E003EF7CF /* Debug */, + 4C85B39826002B7E003EF7CF /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = 4C85B37F26002B7D003EF7CF /* Project object */; +} diff --git a/TensorFlowTTS/examples/ios/TF_TTS_Demo/Assets.xcassets/AccentColor.colorset/Contents.json b/TensorFlowTTS/examples/ios/TF_TTS_Demo/Assets.xcassets/AccentColor.colorset/Contents.json new file mode 100644 index 0000000000000000000000000000000000000000..eb8789700816459c1e1480e0b34781d9fb78a1ca --- /dev/null +++ b/TensorFlowTTS/examples/ios/TF_TTS_Demo/Assets.xcassets/AccentColor.colorset/Contents.json @@ -0,0 +1,11 @@ +{ + "colors" : [ + { + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/TensorFlowTTS/examples/ios/TF_TTS_Demo/Assets.xcassets/AppIcon.appiconset/Contents.json b/TensorFlowTTS/examples/ios/TF_TTS_Demo/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 0000000000000000000000000000000000000000..9221b9bb1a35f5de270a41afa01305478221ae32 --- /dev/null +++ b/TensorFlowTTS/examples/ios/TF_TTS_Demo/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,98 @@ +{ + "images" : [ + { + "idiom" : "iphone", + "scale" : "2x", + "size" : "20x20" + }, + { + "idiom" : "iphone", + "scale" : "3x", + "size" : "20x20" + }, + { + "idiom" : "iphone", + "scale" : "2x", + "size" : "29x29" + }, + { + "idiom" : "iphone", + "scale" : "3x", + "size" : "29x29" + }, + { + "idiom" : "iphone", + "scale" : "2x", + "size" : "40x40" + }, + { + "idiom" : "iphone", + "scale" : "3x", + "size" : "40x40" + }, + { + "idiom" : "iphone", + "scale" : "2x", + "size" : "60x60" + }, + { + "idiom" : "iphone", + "scale" : "3x", + "size" : "60x60" + }, + { + "idiom" : "ipad", + "scale" : "1x", + "size" : "20x20" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "20x20" + }, + { + "idiom" : "ipad", + "scale" : "1x", + "size" : "29x29" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "29x29" + }, + { + "idiom" : "ipad", + "scale" : "1x", + "size" : "40x40" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "40x40" + }, + { + "idiom" : "ipad", + "scale" : "1x", + "size" : "76x76" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "76x76" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "83.5x83.5" + }, + { + "idiom" : "ios-marketing", + "scale" : "1x", + "size" : "1024x1024" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/TensorFlowTTS/examples/ios/TF_TTS_Demo/Assets.xcassets/Contents.json b/TensorFlowTTS/examples/ios/TF_TTS_Demo/Assets.xcassets/Contents.json new file mode 100644 index 0000000000000000000000000000000000000000..73c00596a7fca3f3d4bdd64053b69d86745f9e10 --- /dev/null +++ b/TensorFlowTTS/examples/ios/TF_TTS_Demo/Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/TensorFlowTTS/examples/ios/TF_TTS_Demo/ContentView.swift b/TensorFlowTTS/examples/ios/TF_TTS_Demo/ContentView.swift new file mode 100644 index 0000000000000000000000000000000000000000..19e0356389e44128eed9ea7289272167e84625f3 --- /dev/null +++ b/TensorFlowTTS/examples/ios/TF_TTS_Demo/ContentView.swift @@ -0,0 +1,32 @@ +// +// ContentView.swift +// TF TTS Demo +// +// Created by 안창범 on 2021/03/16. +// + +import SwiftUI + +struct ContentView: View { + @StateObject var tts = TTS() + + @State var text = "The Rhodes Must Fall campaigners said the announcement was hopeful, but warned they would remain cautious until the college had actually carried out the removal." + + var body: some View { + VStack { + TextEditor(text: $text) + Button { + tts.speak(string: text) + } label: { + Label("Speak", systemImage: "speaker.1") + } + } + .padding() + } +} + +struct ContentView_Previews: PreviewProvider { + static var previews: some View { + ContentView() + } +} diff --git a/TensorFlowTTS/examples/ios/TF_TTS_Demo/FastSpeech2.swift b/TensorFlowTTS/examples/ios/TF_TTS_Demo/FastSpeech2.swift new file mode 100644 index 0000000000000000000000000000000000000000..9d31ba735b58b7ede3e3f60264b7ac1b1b0789d3 --- /dev/null +++ b/TensorFlowTTS/examples/ios/TF_TTS_Demo/FastSpeech2.swift @@ -0,0 +1,44 @@ +// +// FastSpeech2.swift +// HelloTensorFlowTTS +// +// Created by 안창범 on 2021/03/09. +// + +import Foundation +import TensorFlowLite + +class FastSpeech2 { + let interpreter: Interpreter + + var speakerId: Int32 = 0 + + var f0Ratio: Float = 1 + + var energyRatio: Float = 1 + + init(url: URL) throws { + var options = Interpreter.Options() + options.threadCount = 5 + interpreter = try Interpreter(modelPath: url.path, options: options) + } + + func getMelSpectrogram(inputIds: [Int32], speedRatio: Float) throws -> Tensor { + try interpreter.resizeInput(at: 0, to: [1, inputIds.count]) + try interpreter.allocateTensors() + + let data = inputIds.withUnsafeBufferPointer(Data.init) + try interpreter.copy(data, toInputAt: 0) + try interpreter.copy(Data(bytes: &speakerId, count: 4), toInputAt: 1) + var speedRatio = speedRatio + try interpreter.copy(Data(bytes: &speedRatio, count: 4), toInputAt: 2) + try interpreter.copy(Data(bytes: &f0Ratio, count: 4), toInputAt: 3) + try interpreter.copy(Data(bytes: &energyRatio, count: 4), toInputAt: 4) + + let t0 = Date() + try interpreter.invoke() + print("fastspeech2: \(Date().timeIntervalSince(t0))s") + + return try interpreter.output(at: 1) + } +} diff --git a/TensorFlowTTS/examples/ios/TF_TTS_Demo/Info.plist b/TensorFlowTTS/examples/ios/TF_TTS_Demo/Info.plist new file mode 100644 index 0000000000000000000000000000000000000000..efc211a0c1b1a3878ff9bb28cb1b7df71704f541 --- /dev/null +++ b/TensorFlowTTS/examples/ios/TF_TTS_Demo/Info.plist @@ -0,0 +1,50 @@ + + + + + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + $(PRODUCT_NAME) + CFBundlePackageType + $(PRODUCT_BUNDLE_PACKAGE_TYPE) + CFBundleShortVersionString + 1.0 + CFBundleVersion + 1 + LSRequiresIPhoneOS + + UIApplicationSceneManifest + + UIApplicationSupportsMultipleScenes + + + UIApplicationSupportsIndirectInputEvents + + UILaunchScreen + + UIRequiredDeviceCapabilities + + armv7 + + UISupportedInterfaceOrientations + + UIInterfaceOrientationPortrait + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UISupportedInterfaceOrientations~ipad + + UIInterfaceOrientationPortrait + UIInterfaceOrientationPortraitUpsideDown + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + + diff --git a/TensorFlowTTS/examples/ios/TF_TTS_Demo/MBMelGAN.swift b/TensorFlowTTS/examples/ios/TF_TTS_Demo/MBMelGAN.swift new file mode 100644 index 0000000000000000000000000000000000000000..af0349409590ddb9ff79c59b3d741d56ef2cb19c --- /dev/null +++ b/TensorFlowTTS/examples/ios/TF_TTS_Demo/MBMelGAN.swift @@ -0,0 +1,32 @@ +// +// MBMelGAN.swift +// HelloTensorFlowTTS +// +// Created by 안창범 on 2021/03/09. +// + +import Foundation +import TensorFlowLite + +class MBMelGan { + let interpreter: Interpreter + + init(url: URL) throws { + var options = Interpreter.Options() + options.threadCount = 5 + interpreter = try Interpreter(modelPath: url.path, options: options) + } + + func getAudio(input: Tensor) throws -> Data { + try interpreter.resizeInput(at: 0, to: input.shape) + try interpreter.allocateTensors() + + try interpreter.copy(input.data, toInputAt: 0) + + let t0 = Date() + try interpreter.invoke() + print("mbmelgan: \(Date().timeIntervalSince(t0))s") + + return try interpreter.output(at: 0).data + } +} diff --git a/TensorFlowTTS/examples/ios/TF_TTS_Demo/Preview Content/Preview Assets.xcassets/Contents.json b/TensorFlowTTS/examples/ios/TF_TTS_Demo/Preview Content/Preview Assets.xcassets/Contents.json new file mode 100644 index 0000000000000000000000000000000000000000..73c00596a7fca3f3d4bdd64053b69d86745f9e10 --- /dev/null +++ b/TensorFlowTTS/examples/ios/TF_TTS_Demo/Preview Content/Preview Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/TensorFlowTTS/examples/ios/TF_TTS_Demo/TF_TTS_DemoApp.swift b/TensorFlowTTS/examples/ios/TF_TTS_Demo/TF_TTS_DemoApp.swift new file mode 100644 index 0000000000000000000000000000000000000000..b7439df28f50b80065b02ab5c574ccbdb9633c9d --- /dev/null +++ b/TensorFlowTTS/examples/ios/TF_TTS_Demo/TF_TTS_DemoApp.swift @@ -0,0 +1,17 @@ +// +// TF_TTS_DemoApp.swift +// TF TTS Demo +// +// Created by 안창범 on 2021/03/16. +// + +import SwiftUI + +@main +struct TF_TTS_DemoApp: App { + var body: some Scene { + WindowGroup { + ContentView() + } + } +} diff --git a/TensorFlowTTS/examples/ios/TF_TTS_Demo/TTS.swift b/TensorFlowTTS/examples/ios/TF_TTS_Demo/TTS.swift new file mode 100644 index 0000000000000000000000000000000000000000..cc516051d9dc6414bcb2b9ac8d22e7bc3900a077 --- /dev/null +++ b/TensorFlowTTS/examples/ios/TF_TTS_Demo/TTS.swift @@ -0,0 +1,96 @@ +// +// TTS.swift +// TF TTS Demo +// +// Created by 안창범 on 2021/03/16. +// + +import Foundation +import AVFoundation + +public class TTS { + var rate: Float = 1.0 + + private let fastSpeech2 = try! FastSpeech2(url: Bundle.main.url(forResource: "fastspeech2_quan", withExtension: "tflite")!) + + private let mbMelGan = try! MBMelGan(url: Bundle.main.url(forResource: "mb_melgan", withExtension: "tflite")!) + + /// Mel spectrogram hop size + public let hopSize = 256 + + /// Vocoder sample rate + let sampleRate = 22_050 + + private let sampleBufferRenderSynchronizer = AVSampleBufferRenderSynchronizer() + + private let sampleBufferAudioRenderer = AVSampleBufferAudioRenderer() + + init() { + sampleBufferRenderSynchronizer.addRenderer(sampleBufferAudioRenderer) + } + + public func speak(string: String) { + let input_ids = text_to_sequence(string) + + do { + let melSpectrogram = try fastSpeech2.getMelSpectrogram(inputIds: input_ids, speedRatio: 2 - rate) + + let data = try mbMelGan.getAudio(input: melSpectrogram) + print(data) + + let blockBuffer = try CMBlockBuffer(length: data.count) + try data.withUnsafeBytes { try blockBuffer.replaceDataBytes(with: $0) } + + let audioStreamBasicDescription = AudioStreamBasicDescription(mSampleRate: Float64(sampleRate), mFormatID: kAudioFormatLinearPCM, mFormatFlags: kAudioFormatFlagIsFloat, mBytesPerPacket: 4, mFramesPerPacket: 1, mBytesPerFrame: 4, mChannelsPerFrame: 1, mBitsPerChannel: 32, mReserved: 0) + + let formatDescription = try CMFormatDescription(audioStreamBasicDescription: audioStreamBasicDescription) + + let delay: TimeInterval = 1 + + let sampleBuffer = try CMSampleBuffer(dataBuffer: blockBuffer, + formatDescription: formatDescription, + numSamples: data.count / 4, + presentationTimeStamp: sampleBufferRenderSynchronizer.currentTime() + + CMTime(seconds: delay, preferredTimescale: CMTimeScale(sampleRate)), + packetDescriptions: []) + + sampleBufferAudioRenderer.enqueue(sampleBuffer) + + sampleBufferRenderSynchronizer.rate = 1 + } + catch { + print(error) + } + } + + lazy var eos_id = symbolIds["eos"]! + + lazy var symbolIds: [String: Int32] = try! loadMapper(url: Bundle.main.url(forResource: "ljspeech_mapper", withExtension: "json")!).symbol_to_id + + public func text_to_sequence(_ text: String) -> [Int32] { + var sequence: [Int32] = [] + sequence += symbols_to_sequence(text) + sequence.append(eos_id) + return sequence + } + + func symbols_to_sequence(_ text: String) -> [Int32] { + return text.unicodeScalars.compactMap { symbolIds[String($0)] } + } + + func loadMapper(url: URL) throws -> Mapper { + let data = try Data(contentsOf: url) + return try JSONDecoder().decode(Mapper.self, from: data) + } +} + +extension TTS: ObservableObject { + +} + +public struct Mapper: Codable { + public let symbol_to_id: [String: Int32] + public let id_to_symbol: [String: String] + public let speakers_map: [String: Int32] + public let processor_name: String +} diff --git a/TensorFlowTTS/examples/melgan/README.md b/TensorFlowTTS/examples/melgan/README.md new file mode 100644 index 0000000000000000000000000000000000000000..ea5ebd62fba14cd584f86fd6722beb14855d178c --- /dev/null +++ b/TensorFlowTTS/examples/melgan/README.md @@ -0,0 +1,78 @@ +# MelGAN: Generative Adversarial Networks for Conditional Waveform Synthesis +Based on the script [`train_melgan.py`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/melgan/train_melgan.py). + +## Training MelGAN from scratch with LJSpeech dataset. +This example code show you how to train MelGAN from scratch with Tensorflow 2 based on custom training loop and tf.function. The data used for this example is LJSpeech, you can download the dataset at [link](https://keithito.com/LJ-Speech-Dataset/). + +### Step 1: Create Tensorflow based Dataloader (tf.dataset) +First, you need define data loader based on AbstractDataset class (see [`abstract_dataset.py`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/tensorflow_tts/datasets/abstract_dataset.py)). On this example, a dataloader read dataset from path. I use suffix to classify what file is a audio and mel-spectrogram (see [`audio_mel_dataset.py`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/melgan/audio_mel_dataset.py)). If you already have preprocessed version of your target dataset, you don't need to use this example dataloader, you just need refer my dataloader and modify **generator function** to adapt with your case. Normally, a generator function should return [audio, mel]. + +### Step 2: Training from scratch +After you redefine your dataloader, pls modify an input arguments, train_dataset and valid_dataset from [`train_melgan.py`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/melgan/train_melgan.py). Here is an example command line to training tacotron-2 from scratch: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/melgan/train_melgan.py \ + --train-dir ./dump/train/ \ + --dev-dir ./dump/valid/ \ + --outdir ./examples/melgan/exp/train.melgan.v1/ \ + --config ./examples/melgan/conf/melgan.v1.yaml \ + --use-norm 1 + --generator_mixed_precision 0 \ + --resume "" +``` + +IF you want to use MultiGPU to training you can replace `CUDA_VISIBLE_DEVICES=0` by `CUDA_VISIBLE_DEVICES=0,1,2,3` for example. You also need to tune the `batch_size` for each GPU (in config file) by yourself to maximize the performance. Note that MultiGPU now support for Training but not yet support for Decode. + +In case you want to resume the training progress, please following below example command line: + +```bash +--resume ./examples/melgan/exp/train.melgan.v1/checkpoints/ckpt-100000 +``` + +If you want to finetune a model, use `--pretrained` like this with the filename of the generator +```bash +--pretrained ptgenerator.h5 +``` + + +### Step 3: Decode audio from folder mel-spectrogram +To running inference on folder mel-spectrogram (eg tacotron2.v1), run below command line: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/melgan/decode_melgan.py \ + --rootdir ./prediction/tacotron2.v1/ \ + --outdir ./prediction/tacotron2.v1_melgan.v1/ \ + --checkpoint ./examples/melgan/exp/train.melgan.v1/checkpoints/model-1500000.h5 \ + --config ./examples/melgan/conf/melgan.v1.yaml \ + --batch-size 32 + --use-norm 1 +``` + + +## Finetune MelGAN with ljspeech pretrained on other languages +Just load pretrained model and training from scratch with other languages. **DO NOT FORGET** re-preprocessing on your dataset if needed. A hop_size should be 256 if you want to use our pretrained. + +## Learning Cuves +Here is a learning curves of melgan based on this config [`melgan.v1.yaml`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/melgan/conf/melgan.v1.yaml) + + + +## Some important notes + +* We don't need use learning rate decay for melgan. +* A weight-norm tensorflow based layer have many problem about ability to save graph, multi-gpu and convergence problem, i will investigate a solution but at this time, pls set is_weight_norm is False on config. +* After one step generator, **DO NOT FORGET** re-generate y_hat for discriminator training. +* Mixed precision make Group Convolution training slower on Discriminator, both pytorch (apex) and tensorflow also has this problems. + +## Pretrained Models and Audio samples +| Model | Conf | Lang | Fs [Hz] | Mel range [Hz] | FFT / Hop / Win [pt] | # iters | +| :------ | :---: | :---: | :----: | :--------: | :---------------: | :-----: | +| [melgan.v1](https://drive.google.com/drive/u/1/folders/1mBwGVchwtNkgFsURl7g4nMiqx4gquAC2) | [link](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/melgan/conf/melgan.v1.yaml) | EN | 22.05k | 80-7600 | 1024 / 256 / None | 1500k | + + +## Reference + +1. https://github.com/descriptinc/melgan-neurips +2. https://github.com/kan-bayashi/ParallelWaveGAN +3. https://github.com/tensorflow/addons +4. [MelGAN: Generative Adversarial Networks for Conditional Waveform Synthesis](https://arxiv.org/abs/1910.06711) \ No newline at end of file diff --git a/TensorFlowTTS/examples/melgan/audio_mel_dataset.py b/TensorFlowTTS/examples/melgan/audio_mel_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..edde888bbeb8423cb0b178499c67e8055a87bc2f --- /dev/null +++ b/TensorFlowTTS/examples/melgan/audio_mel_dataset.py @@ -0,0 +1,187 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Dataset modules.""" + +import logging +import os + +import numpy as np +import tensorflow as tf + +from tensorflow_tts.datasets.abstract_dataset import AbstractDataset +from tensorflow_tts.utils import find_files + + +class AudioMelDataset(AbstractDataset): + """Tensorflow Audio Mel dataset.""" + + def __init__( + self, + root_dir, + audio_query="*-wave.npy", + mel_query="*-raw-feats.npy", + audio_load_fn=np.load, + mel_load_fn=np.load, + audio_length_threshold=0, + mel_length_threshold=0, + ): + """Initialize dataset. + Args: + root_dir (str): Root directory including dumped files. + audio_query (str): Query to find audio files in root_dir. + mel_query (str): Query to find feature files in root_dir. + audio_load_fn (func): Function to load audio file. + mel_load_fn (func): Function to load feature file. + audio_length_threshold (int): Threshold to remove short audio files. + mel_length_threshold (int): Threshold to remove short feature files. + return_utt_id (bool): Whether to return the utterance id with arrays. + """ + # find all of audio and mel files. + audio_files = sorted(find_files(root_dir, audio_query)) + mel_files = sorted(find_files(root_dir, mel_query)) + + # assert the number of files + assert len(audio_files) != 0, f"Not found any audio files in ${root_dir}." + assert len(audio_files) == len( + mel_files + ), f"Number of audio and mel files are different ({len(audio_files)} vs {len(mel_files)})." + + if ".npy" in audio_query: + suffix = audio_query[1:] + utt_ids = [os.path.basename(f).replace(suffix, "") for f in audio_files] + + # set global params + self.utt_ids = utt_ids + self.audio_files = audio_files + self.mel_files = mel_files + self.audio_load_fn = audio_load_fn + self.mel_load_fn = mel_load_fn + self.audio_length_threshold = audio_length_threshold + self.mel_length_threshold = mel_length_threshold + + def get_args(self): + return [self.utt_ids] + + def generator(self, utt_ids): + for i, utt_id in enumerate(utt_ids): + audio_file = self.audio_files[i] + mel_file = self.mel_files[i] + + items = { + "utt_ids": utt_id, + "audio_files": audio_file, + "mel_files": mel_file, + } + + yield items + + @tf.function + def _load_data(self, items): + audio = tf.numpy_function(np.load, [items["audio_files"]], tf.float32) + mel = tf.numpy_function(np.load, [items["mel_files"]], tf.float32) + + items = { + "utt_ids": items["utt_ids"], + "audios": audio, + "mels": mel, + "mel_lengths": len(mel), + "audio_lengths": len(audio), + } + + return items + + def create( + self, + allow_cache=False, + batch_size=1, + is_shuffle=False, + map_fn=None, + reshuffle_each_iteration=True, + ): + """Create tf.dataset function.""" + output_types = self.get_output_dtypes() + datasets = tf.data.Dataset.from_generator( + self.generator, output_types=output_types, args=(self.get_args()) + ) + options = tf.data.Options() + options.experimental_distribute.auto_shard_policy = tf.data.experimental.AutoShardPolicy.OFF + datasets = datasets.with_options(options) + # load dataset + datasets = datasets.map( + lambda items: self._load_data(items), tf.data.experimental.AUTOTUNE + ) + + datasets = datasets.filter( + lambda x: x["mel_lengths"] > self.mel_length_threshold + ) + datasets = datasets.filter( + lambda x: x["audio_lengths"] > self.audio_length_threshold + ) + + if allow_cache: + datasets = datasets.cache() + + if is_shuffle: + datasets = datasets.shuffle( + self.get_len_dataset(), + reshuffle_each_iteration=reshuffle_each_iteration, + ) + + if batch_size > 1 and map_fn is None: + raise ValueError("map function must define when batch_size > 1.") + + if map_fn is not None: + datasets = datasets.map(map_fn, tf.data.experimental.AUTOTUNE) + + # define padded shapes + padded_shapes = { + "utt_ids": [], + "audios": [None], + "mels": [None, 80], + "mel_lengths": [], + "audio_lengths": [], + } + + # define padded values + padding_values = { + "utt_ids": "", + "audios": 0.0, + "mels": 0.0, + "mel_lengths": 0, + "audio_lengths": 0, + } + + datasets = datasets.padded_batch( + batch_size, + padded_shapes=padded_shapes, + padding_values=padding_values, + drop_remainder=True, + ) + datasets = datasets.prefetch(tf.data.experimental.AUTOTUNE) + return datasets + + def get_output_dtypes(self): + output_types = { + "utt_ids": tf.string, + "audio_files": tf.string, + "mel_files": tf.string, + } + return output_types + + def get_len_dataset(self): + return len(self.utt_ids) + + def __name__(self): + return "AudioMelDataset" diff --git a/TensorFlowTTS/examples/melgan/conf/melgan.v1.yaml b/TensorFlowTTS/examples/melgan/conf/melgan.v1.yaml new file mode 100644 index 0000000000000000000000000000000000000000..70294c73095df4f4426fce859e7c2062c26ac00b --- /dev/null +++ b/TensorFlowTTS/examples/melgan/conf/melgan.v1.yaml @@ -0,0 +1,89 @@ + +# This is the hyperparameter configuration file for MelGAN. +# Please make sure this is adjusted for the LJSpeech dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 4000k iters. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +sampling_rate: 22050 # Sampling rate of dataset. +hop_size: 256 # Hop size. +format: "npy" + + +########################################################### +# GENERATOR NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "melgan_generator" + +melgan_generator_params: + out_channels: 1 # Number of output channels. + kernel_size: 7 # Kernel size of initial and final conv layers. + filters: 512 # Initial number of channels for conv layers. + upsample_scales: [8, 8, 2, 2] # List of Upsampling scales. + stack_kernel_size: 3 # Kernel size of dilated conv layers in residual stack. + stacks: 3 # Number of stacks in a single residual stack module. + is_weight_norm: false # Use weight-norm or not. + +########################################################### +# DISCRIMINATOR NETWORK ARCHITECTURE SETTING # +########################################################### +melgan_discriminator_params: + out_channels: 1 # Number of output channels. + scales: 3 # Number of multi-scales. + downsample_pooling: "AveragePooling1D" # Pooling type for the input downsampling. + downsample_pooling_params: # Parameters of the above pooling function. + pool_size: 4 + strides: 2 + kernel_sizes: [5, 3] # List of kernel size. + filters: 16 # Number of channels of the initial conv layer. + max_downsample_filters: 1024 # Maximum number of channels of downsampling layers. + downsample_scales: [4, 4, 4, 4] # List of downsampling scales. + nonlinear_activation: "LeakyReLU" # Nonlinear activation function. + nonlinear_activation_params: # Parameters of nonlinear activation function. + alpha: 0.2 + is_weight_norm: false # Use weight-norm or not. + +########################################################### +# ADVERSARIAL LOSS SETTING # +########################################################### +lambda_feat_match: 10.0 + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 16 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +batch_max_steps: 8192 # Length of each audio in batch for training. Make sure dividable by hop_size. +batch_max_steps_valid: 81920 # Length of each audio for validation. Make sure dividable by hope_size. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +is_shuffle: true # shuffle dataset after each epoch. + +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +generator_optimizer_params: + lr: 0.0001 # Generator's learning rate. + beta_1: 0.5 + beta_2: 0.9 + +discriminator_optimizer_params: + lr: 0.0001 # Discriminator's learning rate. + beta_1: 0.5 + beta_2: 0.9 + +gradient_accumulation_steps: 1 +########################################################### +# INTERVAL SETTING # +########################################################### +train_max_steps: 4000000 # Number of training steps. +save_interval_steps: 3 # Interval steps to save checkpoint. +eval_interval_steps: 2 # Interval steps to evaluate the network. +log_interval_steps: 1 # Interval steps to record the training log. +discriminator_train_start_steps: 0 # step to start training discriminator. + +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of batch to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/melgan/decode_melgan.py b/TensorFlowTTS/examples/melgan/decode_melgan.py new file mode 100644 index 0000000000000000000000000000000000000000..5e3aed0cc92d331351e14bf9f55912de2be19662 --- /dev/null +++ b/TensorFlowTTS/examples/melgan/decode_melgan.py @@ -0,0 +1,141 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Decode trained Melgan from folder.""" + +import argparse +import logging +import os +import sys + +sys.path.append(".") + +import numpy as np +import soundfile as sf +import yaml +from tqdm import tqdm + +from tensorflow_tts.configs import MelGANGeneratorConfig +from tensorflow_tts.datasets import MelDataset +from tensorflow_tts.models import TFMelGANGenerator + + +def main(): + """Run melgan decoding from folder.""" + parser = argparse.ArgumentParser( + description="Generate Audio from melspectrogram with trained melgan " + "(See detail in example/melgan/decode_melgan.py)." + ) + parser.add_argument( + "--rootdir", + default=None, + type=str, + required=True, + help="directory including ids/durations files.", + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save generated speech." + ) + parser.add_argument( + "--checkpoint", type=str, required=True, help="checkpoint file to be loaded." + ) + parser.add_argument( + "--use-norm", type=int, default=1, help="Use norm or raw melspectrogram." + ) + parser.add_argument("--batch-size", type=int, default=8, help="batch_size.") + parser.add_argument( + "--config", + default=None, + type=str, + required=True, + help="yaml format configuration file. if not explicitly provided, " + "it will be searched in the checkpoint directory. (default=None)", + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + args = parser.parse_args() + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # load config + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + + if config["format"] == "npy": + mel_query = "*-norm-feats.npy" if args.use_norm == 1 else "*-raw-feats.npy" + mel_load_fn = np.load + else: + raise ValueError("Only npy is supported.") + + # define data-loader + dataset = MelDataset( + root_dir=args.rootdir, + mel_query=mel_query, + mel_load_fn=mel_load_fn, + ) + dataset = dataset.create(batch_size=args.batch_size) + + # define model and load checkpoint + melgan = TFMelGANGenerator( + config=MelGANGeneratorConfig(**config["melgan_generator_params"]), name="melgan_generator" + ) + melgan._build() + melgan.load_weights(args.checkpoint) + + for data in tqdm(dataset, desc="[Decoding]"): + utt_ids, mels, mel_lengths = data["utt_ids"], data["mels"], data["mel_lengths"] + # melgan inference. + generated_audios = melgan(mels) + + # convert to numpy. + generated_audios = generated_audios.numpy() # [B, T] + + # save to outdir + for i, audio in enumerate(generated_audios): + utt_id = utt_ids[i].numpy().decode("utf-8") + sf.write( + os.path.join(args.outdir, f"{utt_id}.wav"), + audio[: mel_lengths[i].numpy() * config["hop_size"]], + config["sampling_rate"], + "PCM_16", + ) + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/melgan/fig/melgan.v1.png b/TensorFlowTTS/examples/melgan/fig/melgan.v1.png new file mode 100644 index 0000000000000000000000000000000000000000..4ff0453322f5a42ed3b7fe10c87b1e5e682290bf Binary files /dev/null and b/TensorFlowTTS/examples/melgan/fig/melgan.v1.png differ diff --git a/TensorFlowTTS/examples/melgan/train_melgan.py b/TensorFlowTTS/examples/melgan/train_melgan.py new file mode 100644 index 0000000000000000000000000000000000000000..ccb7ed3a14e3e84c5be029ecb95d49354619f12b --- /dev/null +++ b/TensorFlowTTS/examples/melgan/train_melgan.py @@ -0,0 +1,522 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Train MelGAN.""" + +import tensorflow as tf + +physical_devices = tf.config.list_physical_devices("GPU") +for i in range(len(physical_devices)): + tf.config.experimental.set_memory_growth(physical_devices[i], True) + +import sys + +sys.path.append(".") + +import argparse +import logging +import os + +import numpy as np +import soundfile as sf +import yaml +from tqdm import tqdm + +import tensorflow_tts +import tensorflow_tts.configs.melgan as MELGAN_CONFIG +from examples.melgan.audio_mel_dataset import AudioMelDataset +from tensorflow_tts.losses import TFMelSpectrogram +from tensorflow_tts.models import TFMelGANGenerator, TFMelGANMultiScaleDiscriminator +from tensorflow_tts.trainers import GanBasedTrainer +from tensorflow_tts.utils import calculate_2d_loss, calculate_3d_loss, return_strategy + + +class MelganTrainer(GanBasedTrainer): + """Melgan Trainer class based on GanBasedTrainer.""" + + def __init__( + self, + config, + strategy, + steps=0, + epochs=0, + is_generator_mixed_precision=False, + is_discriminator_mixed_precision=False, + ): + """Initialize trainer. + + Args: + steps (int): Initial global steps. + epochs (int): Initial global epochs. + config (dict): Config dict loaded from yaml format configuration file. + is_generator_mixed_precision (bool): Use mixed precision for generator or not. + is_discriminator_mixed_precision (bool): Use mixed precision for discriminator or not. + + + """ + super(MelganTrainer, self).__init__( + steps, + epochs, + config, + strategy, + is_generator_mixed_precision, + is_discriminator_mixed_precision, + ) + # define metrics to aggregates data and use tf.summary logs them + self.list_metrics_name = [ + "adversarial_loss", + "fm_loss", + "gen_loss", + "real_loss", + "fake_loss", + "dis_loss", + "mels_spectrogram_loss", + ] + self.init_train_eval_metrics(self.list_metrics_name) + self.reset_states_train() + self.reset_states_eval() + + self.config = config + + def compile(self, gen_model, dis_model, gen_optimizer, dis_optimizer): + super().compile(gen_model, dis_model, gen_optimizer, dis_optimizer) + # define loss + self.mse_loss = tf.keras.losses.MeanSquaredError( + reduction=tf.keras.losses.Reduction.NONE + ) + self.mae_loss = tf.keras.losses.MeanAbsoluteError( + reduction=tf.keras.losses.Reduction.NONE + ) + self.mels_loss = TFMelSpectrogram() + + def compute_per_example_generator_losses(self, batch, outputs): + """Compute per example generator losses and return dict_metrics_losses + Note that all element of the loss MUST has a shape [batch_size] and + the keys of dict_metrics_losses MUST be in self.list_metrics_name. + + Args: + batch: dictionary batch input return from dataloader + outputs: outputs of the model + + Returns: + per_example_losses: per example losses for each GPU, shape [B] + dict_metrics_losses: dictionary loss. + """ + audios = batch["audios"] + y_hat = outputs + + p_hat = self._discriminator(y_hat) + p = self._discriminator(tf.expand_dims(audios, 2)) + adv_loss = 0.0 + for i in range(len(p_hat)): + adv_loss += calculate_3d_loss( + tf.ones_like(p_hat[i][-1]), p_hat[i][-1], loss_fn=self.mse_loss + ) + adv_loss /= i + 1 + + # define feature-matching loss + fm_loss = 0.0 + for i in range(len(p_hat)): + for j in range(len(p_hat[i]) - 1): + fm_loss += calculate_3d_loss( + p[i][j], p_hat[i][j], loss_fn=self.mae_loss + ) + fm_loss /= (i + 1) * (j + 1) + adv_loss += self.config["lambda_feat_match"] * fm_loss + + per_example_losses = adv_loss + + dict_metrics_losses = { + "adversarial_loss": adv_loss, + "fm_loss": fm_loss, + "gen_loss": adv_loss, + "mels_spectrogram_loss": calculate_2d_loss( + audios, tf.squeeze(y_hat, -1), loss_fn=self.mels_loss + ), + } + + return per_example_losses, dict_metrics_losses + + def compute_per_example_discriminator_losses(self, batch, gen_outputs): + audios = batch["audios"] + y_hat = gen_outputs + + y = tf.expand_dims(audios, 2) + p = self._discriminator(y) + p_hat = self._discriminator(y_hat) + + real_loss = 0.0 + fake_loss = 0.0 + for i in range(len(p)): + real_loss += calculate_3d_loss( + tf.ones_like(p[i][-1]), p[i][-1], loss_fn=self.mse_loss + ) + fake_loss += calculate_3d_loss( + tf.zeros_like(p_hat[i][-1]), p_hat[i][-1], loss_fn=self.mse_loss + ) + real_loss /= i + 1 + fake_loss /= i + 1 + dis_loss = real_loss + fake_loss + + # calculate per_example_losses and dict_metrics_losses + per_example_losses = dis_loss + + dict_metrics_losses = { + "real_loss": real_loss, + "fake_loss": fake_loss, + "dis_loss": dis_loss, + } + + return per_example_losses, dict_metrics_losses + + def generate_and_save_intermediate_result(self, batch): + """Generate and save intermediate result.""" + import matplotlib.pyplot as plt + + # generate + y_batch_ = self.one_step_predict(batch) + y_batch = batch["audios"] + utt_ids = batch["utt_ids"] + + # convert to tensor. + # here we just take a sample at first replica. + try: + y_batch_ = y_batch_.values[0].numpy() + y_batch = y_batch.values[0].numpy() + utt_ids = utt_ids.values[0].numpy() + except Exception: + y_batch_ = y_batch_.numpy() + y_batch = y_batch.numpy() + utt_ids = utt_ids.numpy() + + # check directory + dirname = os.path.join(self.config["outdir"], f"predictions/{self.steps}steps") + if not os.path.exists(dirname): + os.makedirs(dirname) + + for idx, (y, y_) in enumerate(zip(y_batch, y_batch_), 0): + # convert to ndarray + y, y_ = tf.reshape(y, [-1]).numpy(), tf.reshape(y_, [-1]).numpy() + + # plit figure and save it + utt_id = utt_ids[idx] + figname = os.path.join(dirname, f"{utt_id}.png") + plt.subplot(2, 1, 1) + plt.plot(y) + plt.title("groundtruth speech") + plt.subplot(2, 1, 2) + plt.plot(y_) + plt.title(f"generated speech @ {self.steps} steps") + plt.tight_layout() + plt.savefig(figname) + plt.close() + + # save as wavefile + y = np.clip(y, -1, 1) + y_ = np.clip(y_, -1, 1) + sf.write( + figname.replace(".png", "_ref.wav"), + y, + self.config["sampling_rate"], + "PCM_16", + ) + sf.write( + figname.replace(".png", "_gen.wav"), + y_, + self.config["sampling_rate"], + "PCM_16", + ) + + +def collater( + items, + batch_max_steps=tf.constant(8192, dtype=tf.int32), + hop_size=tf.constant(256, dtype=tf.int32), +): + """Initialize collater (mapping function) for Tensorflow Audio-Mel Dataset. + + Args: + batch_max_steps (int): The maximum length of input signal in batch. + hop_size (int): Hop size of auxiliary features. + + """ + audio, mel = items["audios"], items["mels"] + + if batch_max_steps is None: + batch_max_steps = (tf.shape(audio)[0] // hop_size) * hop_size + + batch_max_frames = batch_max_steps // hop_size + if len(audio) < len(mel) * hop_size: + audio = tf.pad(audio, [[0, len(mel) * hop_size - len(audio)]]) + + if len(mel) > batch_max_frames: + # randomly pickup with the batch_max_steps length of the part + interval_start = 0 + interval_end = len(mel) - batch_max_frames + start_frame = tf.random.uniform( + shape=[], minval=interval_start, maxval=interval_end, dtype=tf.int32 + ) + start_step = start_frame * hop_size + audio = audio[start_step : start_step + batch_max_steps] + mel = mel[start_frame : start_frame + batch_max_frames, :] + else: + audio = tf.pad(audio, [[0, batch_max_steps - len(audio)]]) + mel = tf.pad(mel, [[0, batch_max_frames - len(mel)], [0, 0]]) + + items = { + "utt_ids": items["utt_ids"], + "audios": audio, + "mels": mel, + "mel_lengths": len(mel), + "audio_lengths": len(audio), + } + + return items + + +def main(): + """Run training process.""" + parser = argparse.ArgumentParser( + description="Train MelGAN (See detail in tensorflow_tts/bin/train-melgan.py)" + ) + parser.add_argument( + "--train-dir", + default=None, + type=str, + help="directory including training data. ", + ) + parser.add_argument( + "--dev-dir", + default=None, + type=str, + help="directory including development data. ", + ) + parser.add_argument( + "--use-norm", default=1, type=int, help="use norm mels for training or raw." + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save checkpoints." + ) + parser.add_argument( + "--config", type=str, required=True, help="yaml format configuration file." + ) + parser.add_argument( + "--resume", + default="", + type=str, + nargs="?", + help='checkpoint file path to resume training. (default="")', + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + parser.add_argument( + "--generator_mixed_precision", + default=0, + type=int, + help="using mixed precision for generator or not.", + ) + parser.add_argument( + "--discriminator_mixed_precision", + default=0, + type=int, + help="using mixed precision for discriminator or not.", + ) + parser.add_argument( + "--pretrained", + default="", + type=str, + nargs="?", + help="path of .h5 melgan generator to load weights from", + ) + args = parser.parse_args() + + # return strategy + STRATEGY = return_strategy() + + # set mixed precision config + if args.generator_mixed_precision == 1 or args.discriminator_mixed_precision == 1: + tf.config.optimizer.set_experimental_options({"auto_mixed_precision": True}) + + args.generator_mixed_precision = bool(args.generator_mixed_precision) + args.discriminator_mixed_precision = bool(args.discriminator_mixed_precision) + + args.use_norm = bool(args.use_norm) + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # check arguments + if args.train_dir is None: + raise ValueError("Please specify --train-dir") + if args.dev_dir is None: + raise ValueError("Please specify either --valid-dir") + + # load and save config + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + config["version"] = tensorflow_tts.__version__ + with open(os.path.join(args.outdir, "config.yml"), "w") as f: + yaml.dump(config, f, Dumper=yaml.Dumper) + for key, value in config.items(): + logging.info(f"{key} = {value}") + + # get dataset + if config["remove_short_samples"]: + mel_length_threshold = config["batch_max_steps"] // config[ + "hop_size" + ] + 2 * config["melgan_generator_params"].get("aux_context_window", 0) + else: + mel_length_threshold = None + + if config["format"] == "npy": + audio_query = "*-wave.npy" + mel_query = "*-raw-feats.npy" if args.use_norm is False else "*-norm-feats.npy" + audio_load_fn = np.load + mel_load_fn = np.load + else: + raise ValueError("Only npy are supported.") + + # define train/valid dataset + train_dataset = AudioMelDataset( + root_dir=args.train_dir, + audio_query=audio_query, + mel_query=mel_query, + audio_load_fn=audio_load_fn, + mel_load_fn=mel_load_fn, + mel_length_threshold=mel_length_threshold, + ).create( + is_shuffle=config["is_shuffle"], + map_fn=lambda items: collater( + items, + batch_max_steps=tf.constant(config["batch_max_steps"], dtype=tf.int32), + hop_size=tf.constant(config["hop_size"], dtype=tf.int32), + ), + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] + * STRATEGY.num_replicas_in_sync + * config["gradient_accumulation_steps"], + ) + + valid_dataset = AudioMelDataset( + root_dir=args.dev_dir, + audio_query=audio_query, + mel_query=mel_query, + audio_load_fn=audio_load_fn, + mel_load_fn=mel_load_fn, + mel_length_threshold=mel_length_threshold, + ).create( + is_shuffle=config["is_shuffle"], + map_fn=lambda items: collater( + items, + batch_max_steps=tf.constant( + config["batch_max_steps_valid"], dtype=tf.int32 + ), + hop_size=tf.constant(config["hop_size"], dtype=tf.int32), + ), + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] * STRATEGY.num_replicas_in_sync, + ) + + # define trainer + trainer = MelganTrainer( + steps=0, + epochs=0, + config=config, + strategy=STRATEGY, + is_generator_mixed_precision=args.generator_mixed_precision, + is_discriminator_mixed_precision=args.discriminator_mixed_precision, + ) + + # define generator and discriminator + with STRATEGY.scope(): + generator = TFMelGANGenerator( + MELGAN_CONFIG.MelGANGeneratorConfig(**config["melgan_generator_params"]), + name="melgan_generator", + ) + + discriminator = TFMelGANMultiScaleDiscriminator( + MELGAN_CONFIG.MelGANDiscriminatorConfig( + **config["melgan_discriminator_params"] + ), + name="melgan_discriminator", + ) + + # dummy input to build model. + fake_mels = tf.random.uniform(shape=[1, 100, 80], dtype=tf.float32) + y_hat = generator(fake_mels) + discriminator(y_hat) + + if len(args.pretrained) > 1: + generator.load_weights(args.pretrained) + logging.info( + f"Successfully loaded pretrained weight from {args.pretrained}." + ) + + generator.summary() + discriminator.summary() + + gen_optimizer = tf.keras.optimizers.Adam(**config["generator_optimizer_params"]) + dis_optimizer = tf.keras.optimizers.Adam( + **config["discriminator_optimizer_params"] + ) + + trainer.compile( + gen_model=generator, + dis_model=discriminator, + gen_optimizer=gen_optimizer, + dis_optimizer=dis_optimizer, + ) + + # start training + try: + trainer.fit( + train_dataset, + valid_dataset, + saved_path=os.path.join(config["outdir"], "checkpoints/"), + resume=args.resume, + ) + except KeyboardInterrupt: + trainer.save_checkpoint() + logging.info(f"Successfully saved checkpoint @ {trainer.steps}steps.") + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/melgan_stft/README.md b/TensorFlowTTS/examples/melgan_stft/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1ea34227f6b805210e1f267f6667746cc44bdb1a --- /dev/null +++ b/TensorFlowTTS/examples/melgan_stft/README.md @@ -0,0 +1,85 @@ +# MelGAN STFT: MelGAN With Multi Resolution STFT Loss +Based on the script [`train_melgan_stft.py`](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/melgan_stft/train_melgan_stft.py). + +## Training MelGAN STFT from scratch with LJSpeech dataset. +This example code show you how to train MelGAN from scratch with Tensorflow 2 based on custom training loop and tf.function. The data used for this example is LJSpeech, you can download the dataset at [link](https://keithito.com/LJ-Speech-Dataset/). + +### Step 1: Create Tensorflow based Dataloader (tf.dataset) +Please see detail at [examples/melgan/](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/melgan#step-1-create-tensorflow-based-dataloader-tfdataset) + +### Step 2: Training from scratch +After you re-define your dataloader, pls modify an input arguments, train_dataset and valid_dataset from [`train_melgan_stft.py`](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/melgan_stft/train_melgan_stft.py). Here is an example command line to training melgan-stft from scratch: + +First, you need training generator with only stft loss: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/melgan_stft/train_melgan_stft.py \ + --train-dir ./dump/train/ \ + --dev-dir ./dump/valid/ \ + --outdir ./examples/melgan_stft/exp/train.melgan_stft.v1/ \ + --config ./examples/melgan_stft/conf/melgan_stft.v1.yaml \ + --use-norm 1 + --generator_mixed_precision 1 \ + --resume "" +``` + +Then resume and start training generator + discriminator: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/melgan_stft/train_melgan_stft.py \ + --train-dir ./dump/train/ \ + --dev-dir ./dump/valid/ \ + --outdir ./examples/melgan_stft/exp/train.melgan_stft.v1/ \ + --config ./examples/melgan_stft/conf/melgan_stft.v1.yaml \ + --use-norm 1 + --resume ./examples/melgan_stft/exp/train.melgan_stft.v1/checkpoints/ckpt-100000 +``` + +IF you want to use MultiGPU to training you can replace `CUDA_VISIBLE_DEVICES=0` by `CUDA_VISIBLE_DEVICES=0,1,2,3` for example. You also need to tune the `batch_size` for each GPU (in config file) by yourself to maximize the performance. Note that MultiGPU now support for Training but not yet support for Decode. + +In case you want to resume the training progress, please following below example command line: + +```bash +--resume ./examples/melgan_stft/exp/train.melgan_stft.v1/checkpoints/ckpt-100000 +``` + +If you want to finetune a model, use `--pretrained` like this with the filename of the generator +```bash +--pretrained ptgenerator.h5 +``` + +**IMPORTANT NOTES**: + +- When training generator only, we enable mixed precision to speed-up training progress. +- We don't apply mixed precision when training both generator and discriminator. (Discriminator include group-convolution, which cause discriminator slower when enable mixed precision). +- 100k here is a *discriminator_train_start_steps* parameters from [melgan_stft.v1.yaml](https://github.com/tensorspeech/TensorflowTTS/tree/master/examples/melgan_stft/conf/melgan_stft.v1.yaml) + + +## Finetune MelGAN STFT with ljspeech pretrained on other languages +Just load pretrained model and training from scratch with other languages. **DO NOT FORGET** re-preprocessing on your dataset if needed. A hop_size should be 256 if you want to use our pretrained. + +## Learning Curves +Here is a learning curves of melgan based on this config [`melgan_stft.v1.yaml`](https://github.com/tensorspeech/TensorflowTTS/tree/master/examples/melgan_stft/conf/melgan_stft.v1.yaml) + + + + + +## Some important notes + +* We apply learning rate = 1e-3 when training generator only then apply lr = 1e-4 for both G and D. +* See [examples/melgan](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/melgan#some-important-notes) for more notes. + +## Pretrained Models and Audio samples +| Model | Conf | Lang | Fs [Hz] | Mel range [Hz] | FFT / Hop / Win [pt] | # iters | +| :------ | :---: | :---: | :----: | :--------: | :---------------: | :-----: | +| [melgan_stft.v1](https://drive.google.com/drive/folders/1xUkDjbciupEkM3N4obiJAYySTo6J9z6b?usp=sharing) | [link](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/melgan_stft/conf/melgan_stft.v1.yaml) | EN | 22.05k | 80-7600 | 1024 / 256 / None | 1900k | + + +## Reference + +1. https://github.com/descriptinc/melgan-neurips +2. https://github.com/kan-bayashi/ParallelWaveGAN +3. https://github.com/tensorflow/addons +4. [MelGAN: Generative Adversarial Networks for Conditional Waveform Synthesis](https://arxiv.org/abs/1910.06711) +5. [Parallel WaveGAN: A fast waveform generation model based on generative adversarial networks with multi-resolution spectrogram](https://arxiv.org/abs/1910.11480) \ No newline at end of file diff --git a/TensorFlowTTS/examples/melgan_stft/conf/melgan_stft.v1.yaml b/TensorFlowTTS/examples/melgan_stft/conf/melgan_stft.v1.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4660e586066c8246a94fe2e8a82801b397a69159 --- /dev/null +++ b/TensorFlowTTS/examples/melgan_stft/conf/melgan_stft.v1.yaml @@ -0,0 +1,102 @@ + +# This is the hyperparameter configuration file for MelGAN with Multi Resolution STFT. +# Please make sure this is adjusted for the LJSpeech dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 4000k iters. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +sampling_rate: 22050 +hop_size: 256 # Hop size. +format: "npy" + + +########################################################### +# GENERATOR NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "melgan_generator" + +melgan_generator_params: + out_channels: 1 # Number of output channels. + kernel_size: 7 # Kernel size of initial and final conv layers. + filters: 512 # Initial number of channels for conv layers. + upsample_scales: [8, 8, 2, 2] # List of Upsampling scales. + stack_kernel_size: 3 # Kernel size of dilated conv layers in residual stack. + stacks: 3 # Number of stacks in a single residual stack module. + is_weight_norm: false + +########################################################### +# DISCRIMINATOR NETWORK ARCHITECTURE SETTING # +########################################################### +melgan_discriminator_params: + out_channels: 1 # Number of output channels. + scales: 3 # Number of multi-scales. + downsample_pooling: "AveragePooling1D" # Pooling type for the input downsampling. + downsample_pooling_params: # Parameters of the above pooling function. + pool_size: 4 + strides: 2 + kernel_sizes: [5, 3] # List of kernel size. + filters: 16 # Number of channels of the initial conv layer. + max_downsample_filters: 1024 # Maximum number of channels of downsampling layers. + downsample_scales: [4, 4, 4, 4] # List of downsampling scales. + nonlinear_activation: "LeakyReLU" # Nonlinear activation function. + nonlinear_activation_params: # Parameters of nonlinear activation function. + alpha: 0.2 + is_weight_norm: false + +########################################################### +# STFT LOSS SETTING # +########################################################### +stft_loss_params: + fft_lengths: [1024, 2048, 512] # List of FFT size for STFT-based loss. + frame_steps: [120, 240, 50] # List of hop size for STFT-based loss + frame_lengths: [600, 1200, 240] # List of window length for STFT-based loss. + + +########################################################### +# ADVERSARIAL LOSS SETTING # +########################################################### +lambda_feat_match: 10.0 +lambda_adv: 4.0 + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 16 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +batch_max_steps: 8192 # Length of each audio in batch for training. Make sure dividable by hop_size. +batch_max_steps_valid: 81920 # Length of each audio for validation. Make sure dividable by hope_size. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +is_shuffle: true # shuffle dataset after each epoch. + +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +generator_optimizer_params: + lr_fn: "PiecewiseConstantDecay" + lr_params: + boundaries: [100000] # = discriminator_train_start_steps. + values: [0.0005, 0.0001] # learning rate each interval. + + +discriminator_optimizer_params: + lr_fn: "PiecewiseConstantDecay" + lr_params: + boundaries: [0] # after resume and start training discriminator, global steps is 100k, but local discriminator step is 0 + values: [0.0001, 0.0001] # learning rate each interval. + +gradient_accumulation_steps: 1 +########################################################### +# INTERVAL SETTING # +########################################################### +discriminator_train_start_steps: 100000 # steps begin training discriminator +train_max_steps: 4000000 # Number of training steps. +save_interval_steps: 20000 # Interval steps to save checkpoint. +eval_interval_steps: 5000 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. + +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of batch to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/melgan_stft/fig/melgan.stft.v1.eval.png b/TensorFlowTTS/examples/melgan_stft/fig/melgan.stft.v1.eval.png new file mode 100644 index 0000000000000000000000000000000000000000..354379640e248386e7d3bd25797e5a0c72cb4b3c Binary files /dev/null and b/TensorFlowTTS/examples/melgan_stft/fig/melgan.stft.v1.eval.png differ diff --git a/TensorFlowTTS/examples/melgan_stft/fig/melgan.stft.v1.train.png b/TensorFlowTTS/examples/melgan_stft/fig/melgan.stft.v1.train.png new file mode 100644 index 0000000000000000000000000000000000000000..350425707a2c1e6c8e59581613a84b37f9384e38 Binary files /dev/null and b/TensorFlowTTS/examples/melgan_stft/fig/melgan.stft.v1.train.png differ diff --git a/TensorFlowTTS/examples/melgan_stft/train_melgan_stft.py b/TensorFlowTTS/examples/melgan_stft/train_melgan_stft.py new file mode 100644 index 0000000000000000000000000000000000000000..a2820f2003981c31001b50aa462bb66b937d6f82 --- /dev/null +++ b/TensorFlowTTS/examples/melgan_stft/train_melgan_stft.py @@ -0,0 +1,402 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Train MelGAN Multi Resolution STFT Loss.""" + +import tensorflow as tf + +physical_devices = tf.config.list_physical_devices("GPU") +for i in range(len(physical_devices)): + tf.config.experimental.set_memory_growth(physical_devices[i], True) + +import sys + +sys.path.append(".") + +import argparse +import logging +import os + +import numpy as np +import yaml + +import tensorflow_tts +import tensorflow_tts.configs.melgan as MELGAN_CONFIG +from examples.melgan.audio_mel_dataset import AudioMelDataset +from examples.melgan.train_melgan import MelganTrainer, collater +from tensorflow_tts.losses import TFMultiResolutionSTFT +from tensorflow_tts.models import TFMelGANGenerator, TFMelGANMultiScaleDiscriminator +from tensorflow_tts.utils import calculate_2d_loss, calculate_3d_loss, return_strategy + + +class MultiSTFTMelganTrainer(MelganTrainer): + """Multi STFT Melgan Trainer class based on MelganTrainer.""" + + def __init__( + self, + config, + strategy, + steps=0, + epochs=0, + is_generator_mixed_precision=False, + is_discriminator_mixed_precision=False, + ): + """Initialize trainer. + + Args: + steps (int): Initial global steps. + epochs (int): Initial global epochs. + config (dict): Config dict loaded from yaml format configuration file. + is_generator_mixed_precision (bool): Use mixed precision for generator or not. + is_discriminator_mixed_precision (bool): Use mixed precision for discriminator or not. + + """ + super(MultiSTFTMelganTrainer, self).__init__( + config=config, + steps=steps, + epochs=epochs, + strategy=strategy, + is_generator_mixed_precision=is_generator_mixed_precision, + is_discriminator_mixed_precision=is_discriminator_mixed_precision, + ) + + self.list_metrics_name = [ + "adversarial_loss", + "fm_loss", + "gen_loss", + "real_loss", + "fake_loss", + "dis_loss", + "spectral_convergence_loss", + "log_magnitude_loss", + ] + + self.init_train_eval_metrics(self.list_metrics_name) + self.reset_states_train() + self.reset_states_eval() + + def compile(self, gen_model, dis_model, gen_optimizer, dis_optimizer): + super().compile(gen_model, dis_model, gen_optimizer, dis_optimizer) + # define loss + self.stft_loss = TFMultiResolutionSTFT(**self.config["stft_loss_params"]) + + def compute_per_example_generator_losses(self, batch, outputs): + """Compute per example generator losses and return dict_metrics_losses + Note that all element of the loss MUST has a shape [batch_size] and + the keys of dict_metrics_losses MUST be in self.list_metrics_name. + + Args: + batch: dictionary batch input return from dataloader + outputs: outputs of the model + + Returns: + per_example_losses: per example losses for each GPU, shape [B] + dict_metrics_losses: dictionary loss. + """ + dict_metrics_losses = {} + per_example_losses = 0.0 + + audios = batch["audios"] + y_hat = outputs + + # calculate multi-resolution stft loss + sc_loss, mag_loss = calculate_2d_loss( + audios, tf.squeeze(y_hat, -1), self.stft_loss + ) + + # trick to prevent loss expoded here + sc_loss = tf.where(sc_loss >= 15.0, 0.0, sc_loss) + mag_loss = tf.where(mag_loss >= 15.0, 0.0, mag_loss) + + # compute generator loss + gen_loss = 0.5 * (sc_loss + mag_loss) + + if self.steps >= self.config["discriminator_train_start_steps"]: + p_hat = self._discriminator(y_hat) + p = self._discriminator(tf.expand_dims(audios, 2)) + adv_loss = 0.0 + for i in range(len(p_hat)): + adv_loss += calculate_3d_loss( + tf.ones_like(p_hat[i][-1]), p_hat[i][-1], loss_fn=self.mse_loss + ) + adv_loss /= i + 1 + + # define feature-matching loss + fm_loss = 0.0 + for i in range(len(p_hat)): + for j in range(len(p_hat[i]) - 1): + fm_loss += calculate_3d_loss( + p[i][j], p_hat[i][j], loss_fn=self.mae_loss + ) + fm_loss /= (i + 1) * (j + 1) + adv_loss += self.config["lambda_feat_match"] * fm_loss + gen_loss += self.config["lambda_adv"] * adv_loss + + dict_metrics_losses.update({"adversarial_loss": adv_loss}) + dict_metrics_losses.update({"fm_loss": fm_loss}) + + dict_metrics_losses.update({"gen_loss": gen_loss}) + dict_metrics_losses.update({"spectral_convergence_loss": sc_loss}) + dict_metrics_losses.update({"log_magnitude_loss": mag_loss}) + + per_example_losses = gen_loss + return per_example_losses, dict_metrics_losses + + +def main(): + """Run training process.""" + parser = argparse.ArgumentParser( + description="Train MelGAN (See detail in tensorflow_tts/bin/train-melgan.py)" + ) + parser.add_argument( + "--train-dir", + default=None, + type=str, + help="directory including training data. ", + ) + parser.add_argument( + "--dev-dir", + default=None, + type=str, + help="directory including development data. ", + ) + parser.add_argument( + "--use-norm", default=1, type=int, help="use norm mels for training or raw." + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save checkpoints." + ) + parser.add_argument( + "--config", type=str, required=True, help="yaml format configuration file." + ) + parser.add_argument( + "--resume", + default="", + type=str, + nargs="?", + help='checkpoint file path to resume training. (default="")', + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + parser.add_argument( + "--generator_mixed_precision", + default=0, + type=int, + help="using mixed precision for generator or not.", + ) + parser.add_argument( + "--discriminator_mixed_precision", + default=0, + type=int, + help="using mixed precision for discriminator or not.", + ) + parser.add_argument( + "--pretrained", + default="", + type=str, + nargs="?", + help="path of .h5 melgan generator to load weights from", + ) + args = parser.parse_args() + + # return strategy + STRATEGY = return_strategy() + + # set mixed precision config + if args.generator_mixed_precision == 1 or args.discriminator_mixed_precision == 1: + tf.config.optimizer.set_experimental_options({"auto_mixed_precision": True}) + + args.generator_mixed_precision = bool(args.generator_mixed_precision) + args.discriminator_mixed_precision = bool(args.discriminator_mixed_precision) + + args.use_norm = bool(args.use_norm) + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # check arguments + if args.train_dir is None: + raise ValueError("Please specify --train-dir") + if args.dev_dir is None: + raise ValueError("Please specify either --valid-dir") + + # load and save config + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + config["version"] = tensorflow_tts.__version__ + with open(os.path.join(args.outdir, "config.yml"), "w") as f: + yaml.dump(config, f, Dumper=yaml.Dumper) + for key, value in config.items(): + logging.info(f"{key} = {value}") + + # get dataset + if config["remove_short_samples"]: + mel_length_threshold = config["batch_max_steps"] // config[ + "hop_size" + ] + 2 * config["melgan_generator_params"].get("aux_context_window", 0) + else: + mel_length_threshold = None + + if config["format"] == "npy": + audio_query = "*-wave.npy" + mel_query = "*-raw-feats.npy" if args.use_norm is False else "*-norm-feats.npy" + audio_load_fn = np.load + mel_load_fn = np.load + else: + raise ValueError("Only npy are supported.") + + # define train/valid dataset + train_dataset = AudioMelDataset( + root_dir=args.train_dir, + audio_query=audio_query, + mel_query=mel_query, + audio_load_fn=audio_load_fn, + mel_load_fn=mel_load_fn, + mel_length_threshold=mel_length_threshold, + ).create( + is_shuffle=config["is_shuffle"], + map_fn=lambda items: collater( + items, + batch_max_steps=tf.constant(config["batch_max_steps"], dtype=tf.int32), + hop_size=tf.constant(config["hop_size"], dtype=tf.int32), + ), + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] + * STRATEGY.num_replicas_in_sync + * config["gradient_accumulation_steps"], + ) + + valid_dataset = AudioMelDataset( + root_dir=args.dev_dir, + audio_query=audio_query, + mel_query=mel_query, + audio_load_fn=audio_load_fn, + mel_load_fn=mel_load_fn, + mel_length_threshold=mel_length_threshold, + ).create( + is_shuffle=config["is_shuffle"], + map_fn=lambda items: collater( + items, + batch_max_steps=tf.constant( + config["batch_max_steps_valid"], dtype=tf.int32 + ), + hop_size=tf.constant(config["hop_size"], dtype=tf.int32), + ), + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] * STRATEGY.num_replicas_in_sync, + ) + + # define trainer + trainer = MultiSTFTMelganTrainer( + steps=0, + epochs=0, + config=config, + strategy=STRATEGY, + is_generator_mixed_precision=args.generator_mixed_precision, + is_discriminator_mixed_precision=args.discriminator_mixed_precision, + ) + + with STRATEGY.scope(): + # define generator and discriminator + generator = TFMelGANGenerator( + MELGAN_CONFIG.MelGANGeneratorConfig(**config["melgan_generator_params"]), + name="melgan_generator", + ) + + discriminator = TFMelGANMultiScaleDiscriminator( + MELGAN_CONFIG.MelGANDiscriminatorConfig( + **config["melgan_discriminator_params"] + ), + name="melgan_discriminator", + ) + + # dummy input to build model. + fake_mels = tf.random.uniform(shape=[1, 100, 80], dtype=tf.float32) + y_hat = generator(fake_mels) + discriminator(y_hat) + + if len(args.pretrained) > 1: + generator.load_weights(args.pretrained) + logging.info( + f"Successfully loaded pretrained weight from {args.pretrained}." + ) + + generator.summary() + discriminator.summary() + + # define optimizer + generator_lr_fn = getattr( + tf.keras.optimizers.schedules, config["generator_optimizer_params"]["lr_fn"] + )(**config["generator_optimizer_params"]["lr_params"]) + discriminator_lr_fn = getattr( + tf.keras.optimizers.schedules, + config["discriminator_optimizer_params"]["lr_fn"], + )(**config["discriminator_optimizer_params"]["lr_params"]) + + gen_optimizer = tf.keras.optimizers.Adam( + learning_rate=generator_lr_fn, amsgrad=False + ) + dis_optimizer = tf.keras.optimizers.Adam( + learning_rate=discriminator_lr_fn, amsgrad=False + ) + + trainer.compile( + gen_model=generator, + dis_model=discriminator, + gen_optimizer=gen_optimizer, + dis_optimizer=dis_optimizer, + ) + + # start training + try: + trainer.fit( + train_dataset, + valid_dataset, + saved_path=os.path.join(config["outdir"], "checkpoints/"), + resume=args.resume, + ) + except KeyboardInterrupt: + trainer.save_checkpoint() + logging.info(f"Successfully saved checkpoint @ {trainer.steps}steps.") + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/mfa_extraction/README.md b/TensorFlowTTS/examples/mfa_extraction/README.md new file mode 100644 index 0000000000000000000000000000000000000000..504bcb33f498a09e4f14b409e85257ebacb7e312 --- /dev/null +++ b/TensorFlowTTS/examples/mfa_extraction/README.md @@ -0,0 +1,69 @@ +# MFA based extraction for FastSpeech + +## Prepare +Everything is done from main repo folder so TensorflowTTS/ + +0. Optional* Modify MFA scripts to work with your language (https://montreal-forced-aligner.readthedocs.io/en/latest/pretrained_models.html) + +1. Download pretrained mfa, lexicon and run extract textgrids: + +- ``` + bash examples/mfa_extraction/scripts/prepare_mfa.sh + ``` + +- ``` + python examples/mfa_extraction/run_mfa.py \ + --corpus_directory ./libritts \ + --output_directory ./mfa/parsed \ + --jobs 8 + ``` + + After this step, the TextGrids is allocated at `./mfa/parsed`. + +2. Extract duration from textgrid files: +- ``` + python examples/mfa_extraction/txt_grid_parser.py \ + --yaml_path examples/fastspeech2_libritts/conf/fastspeech2libritts.yaml \ + --dataset_path ./libritts \ + --text_grid_path ./mfa/parsed \ + --output_durations_path ./libritts/durations \ + --sample_rate 24000 + ``` + +- Dataset structure after finish this step: + ``` + |- TensorFlowTTS/ + | |- LibriTTS/ + | |- |- train-clean-100/ + | |- |- SPEAKERS.txt + | |- |- ... + | |- dataset/ + | |- |- 200/ + | |- |- |- 200_124139_000001_000000.txt + | |- |- |- 200_124139_000001_000000.wav + | |- |- |- ... + | |- |- 250/ + | |- |- ... + | |- |- durations/ + | |- |- train.txt + | |- tensorflow_tts/ + | |- models/ + | |- ... + ``` +3. Optional* add your own dataset parser based on tensorflow_tts/processor/experiment/example_dataset.py ( If base processor dataset didnt match yours ) + +4. Run preprocess and normalization (Step 4,5 in `examples/fastspeech2_libritts/README.MD`) + +5. Run fix mismatch to fix few frames difference in audio and duration files: + +- ``` + python examples/mfa_extraction/fix_mismatch.py \ + --base_path ./dump \ + --trimmed_dur_path ./dataset/trimmed-durations \ + --dur_path ./dataset/durations + ``` + +## Problems with MFA extraction +Looks like MFA have problems with trimmed files it works better (in my experiments) with ~100ms of silence at start and end + +Short files can get a lot of false positive like only silence extraction (LibriTTS example) so i would get only samples >2s diff --git a/TensorFlowTTS/examples/mfa_extraction/fix_mismatch.py b/TensorFlowTTS/examples/mfa_extraction/fix_mismatch.py new file mode 100644 index 0000000000000000000000000000000000000000..501524c0dce1562019c568d3239506a3bcc851f8 --- /dev/null +++ b/TensorFlowTTS/examples/mfa_extraction/fix_mismatch.py @@ -0,0 +1,121 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Fix mismatch between sum durations and mel lengths.""" + +import numpy as np +import os +from tqdm import tqdm +import click +import logging +import sys + + +logging.basicConfig( + level=logging.DEBUG, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", +) + + +@click.command() +@click.option("--base_path", default="dump") +@click.option("--trimmed_dur_path", default="dataset/trimmed-durations") +@click.option("--dur_path", default="dataset/durations") +@click.option("--use_norm", default="f") +def fix(base_path: str, dur_path: str, trimmed_dur_path: str, use_norm: str): + for t in ["train", "valid"]: + mfa_longer = [] + mfa_shorter = [] + big_diff = [] + not_fixed = [] + pre_path = os.path.join(base_path, t) + os.makedirs(os.path.join(pre_path, "fix_dur"), exist_ok=True) + + logging.info(f"FIXING {t} set ...\n") + for i in tqdm(os.listdir(os.path.join(pre_path, "ids"))): + if use_norm == "t": + mel = np.load( + os.path.join( + pre_path, "norm-feats", f"{i.split('-')[0]}-norm-feats.npy" + ) + ) + else: + mel = np.load( + os.path.join( + pre_path, "raw-feats", f"{i.split('-')[0]}-raw-feats.npy" + ) + ) + + try: + dur = np.load( + os.path.join(trimmed_dur_path, f"{i.split('-')[0]}-durations.npy") + ) + except: + dur = np.load( + os.path.join(dur_path, f"{i.split('-')[0]}-durations.npy") + ) + + l_mel = len(mel) + dur_s = np.sum(dur) + cloned = np.array(dur, copy=True) + diff = abs(l_mel - dur_s) + + if abs(l_mel - dur_s) > 30: # more then 300 ms + big_diff.append([i, abs(l_mel - dur_s)]) + + if dur_s > l_mel: + for j in range(1, len(dur) - 1): + if diff == 0: + break + dur_val = cloned[-j] + + if dur_val >= diff: + cloned[-j] -= diff + diff -= dur_val + break + else: + cloned[-j] = 0 + diff -= dur_val + + if j == len(dur) - 2: + not_fixed.append(i) + + mfa_longer.append(abs(l_mel - dur_s)) + elif dur_s < l_mel: + cloned[-1] += diff + mfa_shorter.append(abs(l_mel - dur_s)) + + np.save( + os.path.join(pre_path, "fix_dur", f"{i.split('-')[0]}-durations.npy"), + cloned.astype(np.int32), + allow_pickle=False, + ) + + logging.info( + f"{t} stats: number of mfa with longer duration: {len(mfa_longer)}, total diff: {sum(mfa_longer)}" + f", mean diff: {sum(mfa_longer)/len(mfa_longer) if len(mfa_longer) > 0 else 0}" + ) + logging.info( + f"{t} stats: number of mfa with shorter duration: {len(mfa_shorter)}, total diff: {sum(mfa_shorter)}" + f", mean diff: {sum(mfa_shorter)/len(mfa_shorter) if len(mfa_shorter) > 0 else 0}" + ) + logging.info( + f"{t} stats: number of files with a ''big'' duration diff: {len(big_diff)} if number>1 you should check it" + ) + logging.info(f"{t} stats: not fixed len: {len(not_fixed)}\n") + + +if __name__ == "__main__": + fix() diff --git a/TensorFlowTTS/examples/mfa_extraction/requirements.txt b/TensorFlowTTS/examples/mfa_extraction/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..34d1c8a45c051962a4697f8e42efabca714baf40 --- /dev/null +++ b/TensorFlowTTS/examples/mfa_extraction/requirements.txt @@ -0,0 +1,3 @@ +textgrid +click +g2p_en \ No newline at end of file diff --git a/TensorFlowTTS/examples/mfa_extraction/run_mfa.py b/TensorFlowTTS/examples/mfa_extraction/run_mfa.py new file mode 100644 index 0000000000000000000000000000000000000000..85c88ea154886990f666e7b1141f5035e6ca7531 --- /dev/null +++ b/TensorFlowTTS/examples/mfa_extraction/run_mfa.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Runing mfa to extract textgrids.""" + +from subprocess import call +from pathlib import Path + +import click +import os + + +@click.command() +@click.option("--mfa_path", default=os.path.join('mfa', 'montreal-forced-aligner', 'bin', 'mfa_align')) +@click.option("--corpus_directory", default="libritts") +@click.option("--lexicon", default=os.path.join('mfa', 'lexicon', 'librispeech-lexicon.txt')) +@click.option("--acoustic_model_path", default=os.path.join('mfa', 'montreal-forced-aligner', 'pretrained_models', 'english.zip')) +@click.option("--output_directory", default=os.path.join('mfa', 'parsed')) +@click.option("--jobs", default="8") +def run_mfa( + mfa_path: str, + corpus_directory: str, + lexicon: str, + acoustic_model_path: str, + output_directory: str, + jobs: str, +): + Path(output_directory).mkdir(parents=True, exist_ok=True) + call( + [ + f".{os.path.sep}{mfa_path}", + corpus_directory, + lexicon, + acoustic_model_path, + output_directory, + f"-j {jobs}" + ] + ) + + +if __name__ == "__main__": + run_mfa() diff --git a/TensorFlowTTS/examples/mfa_extraction/scripts/prepare_mfa.sh b/TensorFlowTTS/examples/mfa_extraction/scripts/prepare_mfa.sh new file mode 100644 index 0000000000000000000000000000000000000000..b5e2ab155fe24d2dcfdb49344aca9b68b0263409 --- /dev/null +++ b/TensorFlowTTS/examples/mfa_extraction/scripts/prepare_mfa.sh @@ -0,0 +1,9 @@ +#!/bin/bash +mkdir mfa +cd mfa +wget https://github.com/MontrealCorpusTools/Montreal-Forced-Aligner/releases/download/v1.1.0-beta.2/montreal-forced-aligner_linux.tar.gz +tar -zxvf montreal-forced-aligner_linux.tar.gz +cd mfa +mkdir lexicon +cd lexicon +wget http://www.openslr.org/resources/11/librispeech-lexicon.txt \ No newline at end of file diff --git a/TensorFlowTTS/examples/mfa_extraction/txt_grid_parser.py b/TensorFlowTTS/examples/mfa_extraction/txt_grid_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..aef218f5f875537817629d7414a06b53e18d70a7 --- /dev/null +++ b/TensorFlowTTS/examples/mfa_extraction/txt_grid_parser.py @@ -0,0 +1,152 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Create training file and durations from textgrids.""" + +import os +from dataclasses import dataclass +from pathlib import Path + +import click +import numpy as np +import textgrid +import yaml +from tqdm import tqdm + +import logging +import sys + + +logging.basicConfig( + level=logging.DEBUG, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", +) + + +@dataclass +class TxtGridParser: + sample_rate: int + multi_speaker: bool + txt_grid_path: str + hop_size: int + output_durations_path: str + dataset_path: str + training_file: str = "train.txt" + phones_mapper = {"sil": "SIL", "sp": "SIL", "spn": "SIL", "": "END"} + """ '' -> is last token in every cases i encounter so u can change it for END but there is a safety check + so it'll fail always when empty string isn't last char in ur dataset just chang it to silence then + """ + sil_phones = set(phones_mapper.keys()) + + def parse(self): + speakers = ( + [ + i + for i in os.listdir(self.txt_grid_path) + if os.path.isdir(os.path.join(self.txt_grid_path, i)) + ] + if self.multi_speaker + else [] + ) + data = [] + + if speakers: + for speaker in speakers: + file_list = os.listdir(os.path.join(self.txt_grid_path, speaker)) + self.parse_text_grid(file_list, data, speaker) + else: + file_list = os.listdir(self.txt_grid_path) + self.parse_text_grid(file_list, data, "") + + with open(os.path.join(self.dataset_path, self.training_file), "w") as f: + f.writelines(data) + + def parse_text_grid(self, file_list: list, data: list, speaker_name: str): + logging.info( + f"\n Parse: {len(file_list)} files, speaker name: {speaker_name} \n" + ) + for f_name in tqdm(file_list): + text_grid = textgrid.TextGrid.fromFile( + os.path.join(self.txt_grid_path, speaker_name, f_name) + ) + pha = text_grid[1] + durations = [] + phs = [] + for iterator, interval in enumerate(pha.intervals): + mark = interval.mark + + if mark in self.sil_phones: + mark = self.phones_mapper[mark] + if mark == "END": + assert iterator == pha.intervals.__len__() - 1 + # check if empty ph is always last example in your dataset if not fix it + + dur = interval.duration() * (self.sample_rate / self.hop_size) + durations.append(round(dur)) + phs.append(mark) + + full_ph = " ".join(phs) + + assert full_ph.split(" ").__len__() == durations.__len__() # safety check + + base_name = f_name.split(".TextGrid")[0] + np.save( + os.path.join(self.output_durations_path, f"{base_name}-durations.npy"), + np.array(durations).astype(np.int32), + allow_pickle=False, + ) + data.append(f"{speaker_name}/{base_name}|{full_ph}|{speaker_name}\n") + + +@click.command() +@click.option( + "--yaml_path", default="examples/fastspeech2_libritts/conf/fastspeech2libritts.yaml" +) +@click.option("--dataset_path", default="dataset", type=str, help="Dataset directory") +@click.option("--text_grid_path", default="mfa/parsed", type=str) +@click.option("--output_durations_path", default="dataset/durations") +@click.option("--sample_rate", default=24000, type=int) +@click.option("--multi_speakers", default=1, type=int, help="Use multi-speaker version") +@click.option("--train_file", default="train.txt") +def main( + yaml_path: str, + dataset_path: str, + text_grid_path: str, + output_durations_path: str, + sample_rate: int, + multi_speakers: int, + train_file: str, +): + + with open(yaml_path) as file: + attrs = yaml.load(file) + hop_size = attrs["hop_size"] + + Path(output_durations_path).mkdir(parents=True, exist_ok=True) + + txt_grid_parser = TxtGridParser( + sample_rate=sample_rate, + multi_speaker=bool(multi_speakers), + txt_grid_path=text_grid_path, + hop_size=hop_size, + output_durations_path=output_durations_path, + training_file=train_file, + dataset_path=dataset_path, + ) + txt_grid_parser.parse() + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/multiband_melgan/README.md b/TensorFlowTTS/examples/multiband_melgan/README.md new file mode 100644 index 0000000000000000000000000000000000000000..7ddfa5bf47dbe1445e2b9f15b951cb4544abba24 --- /dev/null +++ b/TensorFlowTTS/examples/multiband_melgan/README.md @@ -0,0 +1,94 @@ +# Multi-band MelGAN: Faster Waveform Generation for High-Quality Text-to-Speech +Based on the script [`train_multiband_melgan.py`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/multiband_melgan/train_multiband_melgan.py). + +## Training Multi-band MelGAN from scratch with LJSpeech dataset. +This example code show you how to train MelGAN from scratch with Tensorflow 2 based on custom training loop and tf.function. The data used for this example is LJSpeech, you can download the dataset at [link](https://keithito.com/LJ-Speech-Dataset/). + +### Step 1: Create Tensorflow based Dataloader (tf.dataset) +Please see detail at [examples/melgan/](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/melgan#step-1-create-tensorflow-based-dataloader-tfdataset) + +### Step 2: Training from scratch +After you re-define your dataloader, pls modify an input arguments, train_dataset and valid_dataset from [`train_multiband_melgan.py`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/multiband_melgan/train_multiband_melgan.py). Here is an example command line to training melgan-stft from scratch: + +First, you need training generator with only stft loss: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/multiband_melgan/train_multiband_melgan.py \ + --train-dir ./dump/train/ \ + --dev-dir ./dump/valid/ \ + --outdir ./examples/multiband_melgan/exp/train.multiband_melgan.v1/ \ + --config ./examples/multiband_melgan/conf/multiband_melgan.v1.yaml \ + --use-norm 1 \ + --generator_mixed_precision 1 \ + --resume "" +``` + +Then resume and start training generator + discriminator: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/multiband_melgan/train_multiband_melgan.py \ + --train-dir ./dump/train/ \ + --dev-dir ./dump/valid/ \ + --outdir ./examples/multiband_melgan/exp/train.multiband_melgan.v1/ \ + --config ./examples/multiband_melgan/conf/multiband_melgan.v1.yaml \ + --use-norm 1 \ + --resume ./examples/multiband_melgan/exp/train.multiband_melgan.v1/checkpoints/ckpt-200000 +``` + +IF you want to use MultiGPU to training you can replace `CUDA_VISIBLE_DEVICES=0` by `CUDA_VISIBLE_DEVICES=0,1,2,3` for example. You also need to tune the `batch_size` for each GPU (in config file) by yourself to maximize the performance. Note that MultiGPU now support for Training but not yet support for Decode. + +In case you want to resume the training progress, please following below example command line: + +```bash +--resume ./examples/multiband_melgan/exp/train.multiband_melgan.v1/checkpoints/ckpt-100000 +``` + +If you want to finetune a model, use `--pretrained` like this with the filename of the generator +```bash +--pretrained ptgenerator.h5 +``` + +**IMPORTANT NOTES**: + +- If Your Dataset is 16K, upsample_scales = [2, 4, 8] worked. +- If Your Dataset is > 16K (22K, 24K, ...), upsample_scales = [2, 4, 8] didn't worked, used [8, 4, 2] instead. +- Mixed precision make Group Convolution training slower on Discriminator, both pytorch (apex) and tensorflow also has this problems. So, **DO NOT USE** mixed precision when discriminator enable. + +### Step 3: Decode audio from folder mel-spectrogram +To running inference on folder mel-spectrogram (eg valid folder), run below command line: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/multiband_melgan/decode_mb_melgan.py \ + --rootdir ./dump/valid/ \ + --outdir ./prediction/multiband_melgan.v1/ \ + --checkpoint ./examples/multiband_melgan/exp/train.multiband_melgan.v1/checkpoints/generator-940000.h5 \ + --config ./examples/multiband_melgan/conf/multiband_melgan.v1.yaml \ + --batch-size 32 \ + --use-norm 1 +``` + +## Finetune MelGAN STFT with ljspeech pretrained on other languages +Just load pretrained model and training from scratch with other languages. **DO NOT FORGET** re-preprocessing on your dataset if needed. A hop_size should be 256 if you want to use our pretrained. + +## Learning Curves +Here is a learning curves of melgan based on this config [`multiband_melgan.v1.yaml`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/multiband_melgan/conf/multiband_melgan.v1.yaml) + + + + + +## Pretrained Models and Audio samples +| Model | Conf | Lang | Fs [Hz] | Mel range [Hz] | FFT / Hop / Win [pt] | # iters | Notes | +| :------ | :---: | :---: | :----: | :--------: | :---------------: | :-----: | :-----: | +| [multiband_melgan.v1](https://drive.google.com/drive/folders/1Hg82YnPbX6dfF7DxVs4c96RBaiFbh-cT?usp=sharing) | [link](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/multiband_melgan/conf/multiband_melgan.v1.yaml) | EN | 22.05k | 80-7600 | 1024 / 256 / None | 940K | -| +| [multiband_melgan.v1](https://drive.google.com/drive/folders/199XCXER51PWf_VzUpOwxfY_8XDfeXuZl?usp=sharing) | [link](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/multiband_melgan/conf/multiband_melgan.v1.yaml) | KO | 22.05k | 80-7600 | 1024 / 256 / None | 1000K | -| +| [multiband_melgan.v1_24k](https://drive.google.com/drive/folders/14H6Oa8kGxlIhfZZFf6JFzWL5NVHDpKai?usp=sharing) | [link](https://drive.google.com/file/d/1l2jBwTWVVsRuT5FLDOIDToEhqWBmuCMe/view?usp=sharing) | EN | 24k | 80-7600 | 2048 / 300 / 1200 | 1000K | Converted from [kan-bayashi's model](https://drive.google.com/drive/folders/1jfB15igea6tOQ0hZJGIvnpf3QyNhTLnq?usp=sharing); good universal vocoder| + + + + +## Reference + +1. https://github.com/kan-bayashi/ParallelWaveGAN +2. [Parallel WaveGAN: A fast waveform generation model based on generative adversarial networks with multi-resolution spectrogram](https://arxiv.org/abs/1910.11480) +3. [Multi-band MelGAN: Faster Waveform Generation for High-Quality Text-to-Speech](https://arxiv.org/abs/2005.05106) diff --git a/TensorFlowTTS/examples/multiband_melgan/conf/multiband_melgan.baker.v1.yaml b/TensorFlowTTS/examples/multiband_melgan/conf/multiband_melgan.baker.v1.yaml new file mode 100644 index 0000000000000000000000000000000000000000..378cc72b908bda6564f402af303408ddc586dd78 --- /dev/null +++ b/TensorFlowTTS/examples/multiband_melgan/conf/multiband_melgan.baker.v1.yaml @@ -0,0 +1,107 @@ + +# This is the hyperparameter configuration file for Multi-Band MelGAN. +# Please make sure this is adjusted for the Baker dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 1000k iters. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +sampling_rate: 24000 +hop_size: 300 # Hop size. +format: "npy" + + +########################################################### +# GENERATOR NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "multiband_melgan_generator" + +multiband_melgan_generator_params: + out_channels: 4 # Number of output channels (number of subbands). + kernel_size: 7 # Kernel size of initial and final conv layers. + filters: 384 # Initial number of channels for conv layers. + upsample_scales: [3, 5, 5] # List of Upsampling scales. + stack_kernel_size: 3 # Kernel size of dilated conv layers in residual stack. + stacks: 4 # Number of stacks in a single residual stack module. + is_weight_norm: false # Use weight-norm or not. + +########################################################### +# DISCRIMINATOR NETWORK ARCHITECTURE SETTING # +########################################################### +multiband_melgan_discriminator_params: + out_channels: 1 # Number of output channels. + scales: 3 # Number of multi-scales. + downsample_pooling: "AveragePooling1D" # Pooling type for the input downsampling. + downsample_pooling_params: # Parameters of the above pooling function. + pool_size: 4 + strides: 2 + kernel_sizes: [5, 3] # List of kernel size. + filters: 16 # Number of channels of the initial conv layer. + max_downsample_filters: 512 # Maximum number of channels of downsampling layers. + downsample_scales: [4, 4, 4] # List of downsampling scales. + nonlinear_activation: "LeakyReLU" # Nonlinear activation function. + nonlinear_activation_params: # Parameters of nonlinear activation function. + alpha: 0.2 + is_weight_norm: false # Use weight-norm or not. + +########################################################### +# STFT LOSS SETTING # +########################################################### +stft_loss_params: + fft_lengths: [1024, 2048, 512] # List of FFT size for STFT-based loss. + frame_steps: [120, 240, 50] # List of hop size for STFT-based loss + frame_lengths: [600, 1200, 240] # List of window length for STFT-based loss. + +subband_stft_loss_params: + fft_lengths: [384, 683, 171] # List of FFT size for STFT-based loss. + frame_steps: [30, 60, 10] # List of hop size for STFT-based loss + frame_lengths: [150, 300, 60] # List of window length for STFT-based loss. + +########################################################### +# ADVERSARIAL LOSS SETTING # +########################################################### +lambda_feat_match: 10.0 # Loss balancing coefficient for feature matching loss +lambda_adv: 2.5 # Loss balancing coefficient for adversarial loss. + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 64 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +batch_max_steps: 9600 # Length of each audio in batch for training. Make sure dividable by hop_size. +batch_max_steps_valid: 48000 # Length of each audio for validation. Make sure dividable by hope_size. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +is_shuffle: true # shuffle dataset after each epoch. + +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +generator_optimizer_params: + lr_fn: "PiecewiseConstantDecay" + lr_params: + boundaries: [100000, 200000, 300000, 400000, 500000, 600000, 700000] + values: [0.001, 0.0005, 0.00025, 0.000125, 0.0000625, 0.00003125, 0.000015625, 0.000001] + amsgrad: false + +discriminator_optimizer_params: + lr_fn: "PiecewiseConstantDecay" + lr_params: + boundaries: [100000, 200000, 300000, 400000, 500000] + values: [0.00025, 0.000125, 0.0000625, 0.00003125, 0.000015625, 0.000001] + amsgrad: false + +gradient_accumulation_steps: 1 +########################################################### +# INTERVAL SETTING # +########################################################### +discriminator_train_start_steps: 200000 # steps begin training discriminator +train_max_steps: 4000000 # Number of training steps. +save_interval_steps: 20000 # Interval steps to save checkpoint. +eval_interval_steps: 5000 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. + +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of batch to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/multiband_melgan/conf/multiband_melgan.synpaflex.v1.yaml b/TensorFlowTTS/examples/multiband_melgan/conf/multiband_melgan.synpaflex.v1.yaml new file mode 100644 index 0000000000000000000000000000000000000000..686783bf49bf5a9c45e36c223ee862fa82b9ec0a --- /dev/null +++ b/TensorFlowTTS/examples/multiband_melgan/conf/multiband_melgan.synpaflex.v1.yaml @@ -0,0 +1,108 @@ + +# This is the hyperparameter configuration file for Multi-Band MelGAN. +# Please make sure this is adjusted for the LJSpeech dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 1000k iters. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +sampling_rate: 22050 +hop_size: 256 # Hop size. +format: "npy" + + +########################################################### +# GENERATOR NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "multiband_melgan_generator" + +multiband_melgan_generator_params: + out_channels: 4 # Number of output channels (number of subbands). + kernel_size: 7 # Kernel size of initial and final conv layers. + filters: 384 # Initial number of channels for conv layers. + upsample_scales: [8, 4, 2] # List of Upsampling scales. + stack_kernel_size: 3 # Kernel size of dilated conv layers in residual stack. + stacks: 4 # Number of stacks in a single residual stack module. + is_weight_norm: false # Use weight-norm or not. + +########################################################### +# DISCRIMINATOR NETWORK ARCHITECTURE SETTING # +########################################################### +multiband_melgan_discriminator_params: + out_channels: 1 # Number of output channels. + scales: 3 # Number of multi-scales. + downsample_pooling: "AveragePooling1D" # Pooling type for the input downsampling. + downsample_pooling_params: # Parameters of the above pooling function. + pool_size: 4 + strides: 2 + kernel_sizes: [5, 3] # List of kernel size. + filters: 16 # Number of channels of the initial conv layer. + max_downsample_filters: 512 # Maximum number of channels of downsampling layers. + downsample_scales: [4, 4, 4] # List of downsampling scales. + nonlinear_activation: "LeakyReLU" # Nonlinear activation function. + nonlinear_activation_params: # Parameters of nonlinear activation function. + alpha: 0.2 + is_weight_norm: false # Use weight-norm or not. + +########################################################### +# STFT LOSS SETTING # +########################################################### +stft_loss_params: + fft_lengths: [1024, 2048, 512] # List of FFT size for STFT-based loss. + frame_steps: [120, 240, 50] # List of hop size for STFT-based loss + frame_lengths: [600, 1200, 240] # List of window length for STFT-based loss. + +subband_stft_loss_params: + fft_lengths: [384, 683, 171] # List of FFT size for STFT-based loss. + frame_steps: [30, 60, 10] # List of hop size for STFT-based loss + frame_lengths: [150, 300, 60] # List of window length for STFT-based loss. + +########################################################### +# ADVERSARIAL LOSS SETTING # +########################################################### +lambda_feat_match: 10.0 # Loss balancing coefficient for feature matching loss +lambda_adv: 2.5 # Loss balancing coefficient for adversarial loss. + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 64 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +batch_max_steps: 8192 # Length of each audio in batch for training. Make sure dividable by hop_size. +batch_max_steps_valid: 8192 # Length of each audio for validation. Make sure dividable by hope_size. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +is_shuffle: true # shuffle dataset after each epoch. + +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +generator_optimizer_params: + lr_fn: "PiecewiseConstantDecay" + lr_params: + boundaries: [100000, 200000, 300000, 400000, 500000, 600000, 700000] + values: [0.0005, 0.0005, 0.00025, 0.000125, 0.0000625, 0.00003125, 0.000015625, 0.000001] + amsgrad: false + +discriminator_optimizer_params: + lr_fn: "PiecewiseConstantDecay" + lr_params: + boundaries: [100000, 200000, 300000, 400000, 500000] + values: [0.00025, 0.000125, 0.0000625, 0.00003125, 0.000015625, 0.000001] + + amsgrad: false + +gradient_accumulation_steps: 1 +########################################################### +# INTERVAL SETTING # +########################################################### +discriminator_train_start_steps: 200000 # steps begin training discriminator +train_max_steps: 4000000 # Number of training steps. +save_interval_steps: 20000 # Interval steps to save checkpoint. +eval_interval_steps: 5000 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. + +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of batch to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/multiband_melgan/conf/multiband_melgan.v1.yaml b/TensorFlowTTS/examples/multiband_melgan/conf/multiband_melgan.v1.yaml new file mode 100644 index 0000000000000000000000000000000000000000..628fc395547d51387382d12eb13fe29dceab9190 --- /dev/null +++ b/TensorFlowTTS/examples/multiband_melgan/conf/multiband_melgan.v1.yaml @@ -0,0 +1,107 @@ + +# This is the hyperparameter configuration file for Multi-Band MelGAN. +# Please make sure this is adjusted for the LJSpeech dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 1000k iters. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +sampling_rate: 22050 +hop_size: 256 # Hop size. +format: "npy" + + +########################################################### +# GENERATOR NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "multiband_melgan_generator" + +multiband_melgan_generator_params: + out_channels: 4 # Number of output channels (number of subbands). + kernel_size: 7 # Kernel size of initial and final conv layers. + filters: 384 # Initial number of channels for conv layers. + upsample_scales: [8, 4, 2] # List of Upsampling scales. + stack_kernel_size: 3 # Kernel size of dilated conv layers in residual stack. + stacks: 4 # Number of stacks in a single residual stack module. + is_weight_norm: false # Use weight-norm or not. + +########################################################### +# DISCRIMINATOR NETWORK ARCHITECTURE SETTING # +########################################################### +multiband_melgan_discriminator_params: + out_channels: 1 # Number of output channels. + scales: 3 # Number of multi-scales. + downsample_pooling: "AveragePooling1D" # Pooling type for the input downsampling. + downsample_pooling_params: # Parameters of the above pooling function. + pool_size: 4 + strides: 2 + kernel_sizes: [5, 3] # List of kernel size. + filters: 16 # Number of channels of the initial conv layer. + max_downsample_filters: 512 # Maximum number of channels of downsampling layers. + downsample_scales: [4, 4, 4] # List of downsampling scales. + nonlinear_activation: "LeakyReLU" # Nonlinear activation function. + nonlinear_activation_params: # Parameters of nonlinear activation function. + alpha: 0.2 + is_weight_norm: false # Use weight-norm or not. + +########################################################### +# STFT LOSS SETTING # +########################################################### +stft_loss_params: + fft_lengths: [1024, 2048, 512] # List of FFT size for STFT-based loss. + frame_steps: [120, 240, 50] # List of hop size for STFT-based loss + frame_lengths: [600, 1200, 240] # List of window length for STFT-based loss. + +subband_stft_loss_params: + fft_lengths: [384, 683, 171] # List of FFT size for STFT-based loss. + frame_steps: [30, 60, 10] # List of hop size for STFT-based loss + frame_lengths: [150, 300, 60] # List of window length for STFT-based loss. + +########################################################### +# ADVERSARIAL LOSS SETTING # +########################################################### +lambda_feat_match: 10.0 # Loss balancing coefficient for feature matching loss +lambda_adv: 2.5 # Loss balancing coefficient for adversarial loss. + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 64 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +batch_max_steps: 8192 # Length of each audio in batch for training. Make sure dividable by hop_size. +batch_max_steps_valid: 8192 # Length of each audio for validation. Make sure dividable by hope_size. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +is_shuffle: true # shuffle dataset after each epoch. + +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +generator_optimizer_params: + lr_fn: "PiecewiseConstantDecay" + lr_params: + boundaries: [100000, 200000, 300000, 400000, 500000, 600000, 700000] + values: [0.0005, 0.0005, 0.00025, 0.000125, 0.0000625, 0.00003125, 0.000015625, 0.000001] + amsgrad: false + +discriminator_optimizer_params: + lr_fn: "PiecewiseConstantDecay" + lr_params: + boundaries: [100000, 200000, 300000, 400000, 500000] + values: [0.00025, 0.000125, 0.0000625, 0.00003125, 0.000015625, 0.000001] + amsgrad: false + +gradient_accumulation_steps: 1 +########################################################### +# INTERVAL SETTING # +########################################################### +discriminator_train_start_steps: 200000 # steps begin training discriminator +train_max_steps: 4000000 # Number of training steps. +save_interval_steps: 20000 # Interval steps to save checkpoint. +eval_interval_steps: 5000 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. + +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of batch to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/multiband_melgan/decode_mb_melgan.py b/TensorFlowTTS/examples/multiband_melgan/decode_mb_melgan.py new file mode 100644 index 0000000000000000000000000000000000000000..fcaff9bfa619f01fa0fe770e59d1f259f3627efd --- /dev/null +++ b/TensorFlowTTS/examples/multiband_melgan/decode_mb_melgan.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Decode trained Mb-Melgan from folder.""" + +import argparse +import logging +import os + +import numpy as np +import soundfile as sf +import yaml +from tqdm import tqdm + +from tensorflow_tts.configs import MultiBandMelGANGeneratorConfig +from tensorflow_tts.datasets import MelDataset +from tensorflow_tts.models import TFPQMF, TFMelGANGenerator + + +def main(): + """Run melgan decoding from folder.""" + parser = argparse.ArgumentParser( + description="Generate Audio from melspectrogram with trained melgan " + "(See detail in example/melgan/decode_melgan.py)." + ) + parser.add_argument( + "--rootdir", + default=None, + type=str, + required=True, + help="directory including ids/durations files.", + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save generated speech." + ) + parser.add_argument( + "--checkpoint", type=str, required=True, help="checkpoint file to be loaded." + ) + parser.add_argument( + "--use-norm", type=int, default=1, help="Use norm or raw melspectrogram." + ) + parser.add_argument("--batch-size", type=int, default=8, help="batch_size.") + parser.add_argument( + "--config", + default=None, + type=str, + required=True, + help="yaml format configuration file. if not explicitly provided, " + "it will be searched in the checkpoint directory. (default=None)", + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + args = parser.parse_args() + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # load config + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + + if config["format"] == "npy": + mel_query = "*-fs-after-feats.npy" if "fastspeech" in args.rootdir else "*-norm-feats.npy" if args.use_norm == 1 else "*-raw-feats.npy" + mel_load_fn = np.load + else: + raise ValueError("Only npy is supported.") + + # define data-loader + dataset = MelDataset( + root_dir=args.rootdir, + mel_query=mel_query, + mel_load_fn=mel_load_fn, + ) + dataset = dataset.create(batch_size=args.batch_size) + + # define model and load checkpoint + mb_melgan = TFMelGANGenerator( + config=MultiBandMelGANGeneratorConfig(**config["multiband_melgan_generator_params"]), + name="multiband_melgan_generator", + ) + mb_melgan._build() + mb_melgan.load_weights(args.checkpoint) + + pqmf = TFPQMF( + config=MultiBandMelGANGeneratorConfig(**config["multiband_melgan_generator_params"]), name="pqmf" + ) + + for data in tqdm(dataset, desc="[Decoding]"): + utt_ids, mels, mel_lengths = data["utt_ids"], data["mels"], data["mel_lengths"] + + # melgan inference. + generated_subbands = mb_melgan(mels) + generated_audios = pqmf.synthesis(generated_subbands) + + # convert to numpy. + generated_audios = generated_audios.numpy() # [B, T] + + # save to outdir + for i, audio in enumerate(generated_audios): + utt_id = utt_ids[i].numpy().decode("utf-8") + sf.write( + os.path.join(args.outdir, f"{utt_id}.wav"), + audio[: mel_lengths[i].numpy() * config["hop_size"]], + config["sampling_rate"], + "PCM_16", + ) + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/multiband_melgan/fig/eval.png b/TensorFlowTTS/examples/multiband_melgan/fig/eval.png new file mode 100644 index 0000000000000000000000000000000000000000..6304601c264efaea2bb003a817951f8f7a32c3a7 Binary files /dev/null and b/TensorFlowTTS/examples/multiband_melgan/fig/eval.png differ diff --git a/TensorFlowTTS/examples/multiband_melgan/fig/train.png b/TensorFlowTTS/examples/multiband_melgan/fig/train.png new file mode 100644 index 0000000000000000000000000000000000000000..95db83d66c56f975f994c3ad22f598c94b2a87a1 Binary files /dev/null and b/TensorFlowTTS/examples/multiband_melgan/fig/train.png differ diff --git a/TensorFlowTTS/examples/multiband_melgan/train_multiband_melgan.py b/TensorFlowTTS/examples/multiband_melgan/train_multiband_melgan.py new file mode 100644 index 0000000000000000000000000000000000000000..a162db6b3d3416c21428bcf8ec741f7d035c45db --- /dev/null +++ b/TensorFlowTTS/examples/multiband_melgan/train_multiband_melgan.py @@ -0,0 +1,523 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Train Multi-Band MelGAN.""" + +import tensorflow as tf + +physical_devices = tf.config.list_physical_devices("GPU") +for i in range(len(physical_devices)): + tf.config.experimental.set_memory_growth(physical_devices[i], True) + +import sys + +sys.path.append(".") + +import argparse +import logging +import os + +import numpy as np +import soundfile as sf +import yaml +from tensorflow.keras.mixed_precision import experimental as mixed_precision + +import tensorflow_tts +from examples.melgan.audio_mel_dataset import AudioMelDataset +from examples.melgan.train_melgan import MelganTrainer, collater +from tensorflow_tts.configs import ( + MultiBandMelGANDiscriminatorConfig, + MultiBandMelGANGeneratorConfig, +) +from tensorflow_tts.losses import TFMultiResolutionSTFT +from tensorflow_tts.models import ( + TFPQMF, + TFMelGANGenerator, + TFMelGANMultiScaleDiscriminator, +) +from tensorflow_tts.utils import calculate_2d_loss, calculate_3d_loss, return_strategy + + +class MultiBandMelganTrainer(MelganTrainer): + """Multi-Band MelGAN Trainer class based on MelganTrainer.""" + + def __init__( + self, + config, + strategy, + steps=0, + epochs=0, + is_generator_mixed_precision=False, + is_discriminator_mixed_precision=False, + ): + """Initialize trainer. + + Args: + steps (int): Initial global steps. + epochs (int): Initial global epochs. + config (dict): Config dict loaded from yaml format configuration file. + is_generator_mixed_precision (bool): Use mixed precision for generator or not. + is_discriminator_mixed_precision (bool): Use mixed precision for discriminator or not. + + """ + super(MultiBandMelganTrainer, self).__init__( + config=config, + steps=steps, + epochs=epochs, + strategy=strategy, + is_generator_mixed_precision=is_generator_mixed_precision, + is_discriminator_mixed_precision=is_discriminator_mixed_precision, + ) + + # define metrics to aggregates data and use tf.summary logs them + self.list_metrics_name = [ + "adversarial_loss", + "subband_spectral_convergence_loss", + "subband_log_magnitude_loss", + "fullband_spectral_convergence_loss", + "fullband_log_magnitude_loss", + "gen_loss", + "real_loss", + "fake_loss", + "dis_loss", + ] + + self.init_train_eval_metrics(self.list_metrics_name) + self.reset_states_train() + self.reset_states_eval() + + def compile(self, gen_model, dis_model, gen_optimizer, dis_optimizer, pqmf): + super().compile(gen_model, dis_model, gen_optimizer, dis_optimizer) + # define loss + self.sub_band_stft_loss = TFMultiResolutionSTFT( + **self.config["subband_stft_loss_params"] + ) + self.full_band_stft_loss = TFMultiResolutionSTFT( + **self.config["stft_loss_params"] + ) + + # define pqmf module + self.pqmf = pqmf + + def compute_per_example_generator_losses(self, batch, outputs): + """Compute per example generator losses and return dict_metrics_losses + Note that all element of the loss MUST has a shape [batch_size] and + the keys of dict_metrics_losses MUST be in self.list_metrics_name. + + Args: + batch: dictionary batch input return from dataloader + outputs: outputs of the model + + Returns: + per_example_losses: per example losses for each GPU, shape [B] + dict_metrics_losses: dictionary loss. + """ + dict_metrics_losses = {} + per_example_losses = 0.0 + + audios = batch["audios"] + y_mb_hat = outputs + y_hat = self.pqmf.synthesis(y_mb_hat) + + y_mb = self.pqmf.analysis(tf.expand_dims(audios, -1)) + y_mb = tf.transpose(y_mb, (0, 2, 1)) # [B, subbands, T//subbands] + y_mb = tf.reshape(y_mb, (-1, tf.shape(y_mb)[-1])) # [B * subbands, T'] + + y_mb_hat = tf.transpose(y_mb_hat, (0, 2, 1)) # [B, subbands, T//subbands] + y_mb_hat = tf.reshape( + y_mb_hat, (-1, tf.shape(y_mb_hat)[-1]) + ) # [B * subbands, T'] + + # calculate sub/full band spectral_convergence and log mag loss. + sub_sc_loss, sub_mag_loss = calculate_2d_loss( + y_mb, y_mb_hat, self.sub_band_stft_loss + ) + sub_sc_loss = tf.reduce_mean( + tf.reshape(sub_sc_loss, [-1, self.pqmf.subbands]), -1 + ) + sub_mag_loss = tf.reduce_mean( + tf.reshape(sub_mag_loss, [-1, self.pqmf.subbands]), -1 + ) + full_sc_loss, full_mag_loss = calculate_2d_loss( + audios, tf.squeeze(y_hat, -1), self.full_band_stft_loss + ) + + # define generator loss + gen_loss = 0.5 * (sub_sc_loss + sub_mag_loss) + 0.5 * ( + full_sc_loss + full_mag_loss + ) + + if self.steps >= self.config["discriminator_train_start_steps"]: + p_hat = self._discriminator(y_hat) + p = self._discriminator(tf.expand_dims(audios, 2)) + adv_loss = 0.0 + for i in range(len(p_hat)): + adv_loss += calculate_3d_loss( + tf.ones_like(p_hat[i][-1]), p_hat[i][-1], loss_fn=self.mse_loss + ) + adv_loss /= i + 1 + gen_loss += self.config["lambda_adv"] * adv_loss + + dict_metrics_losses.update({"adversarial_loss": adv_loss},) + + dict_metrics_losses.update({"gen_loss": gen_loss}) + dict_metrics_losses.update({"subband_spectral_convergence_loss": sub_sc_loss}) + dict_metrics_losses.update({"subband_log_magnitude_loss": sub_mag_loss}) + dict_metrics_losses.update({"fullband_spectral_convergence_loss": full_sc_loss}) + dict_metrics_losses.update({"fullband_log_magnitude_loss": full_mag_loss}) + + per_example_losses = gen_loss + return per_example_losses, dict_metrics_losses + + def compute_per_example_discriminator_losses(self, batch, gen_outputs): + """Compute per example discriminator losses and return dict_metrics_losses + Note that all element of the loss MUST has a shape [batch_size] and + the keys of dict_metrics_losses MUST be in self.list_metrics_name. + + Args: + batch: dictionary batch input return from dataloader + outputs: outputs of the model + + Returns: + per_example_losses: per example losses for each GPU, shape [B] + dict_metrics_losses: dictionary loss. + """ + y_mb_hat = gen_outputs + y_hat = self.pqmf.synthesis(y_mb_hat) + ( + per_example_losses, + dict_metrics_losses, + ) = super().compute_per_example_discriminator_losses(batch, y_hat) + return per_example_losses, dict_metrics_losses + + def generate_and_save_intermediate_result(self, batch): + """Generate and save intermediate result.""" + import matplotlib.pyplot as plt + + y_mb_batch_ = self.one_step_predict(batch) # [B, T // subbands, subbands] + y_batch = batch["audios"] + utt_ids = batch["utt_ids"] + + # convert to tensor. + # here we just take a sample at first replica. + try: + y_mb_batch_ = y_mb_batch_.values[0].numpy() + y_batch = y_batch.values[0].numpy() + utt_ids = utt_ids.values[0].numpy() + except Exception: + y_mb_batch_ = y_mb_batch_.numpy() + y_batch = y_batch.numpy() + utt_ids = utt_ids.numpy() + + y_batch_ = self.pqmf.synthesis(y_mb_batch_).numpy() # [B, T, 1] + + # check directory + dirname = os.path.join(self.config["outdir"], f"predictions/{self.steps}steps") + if not os.path.exists(dirname): + os.makedirs(dirname) + + for idx, (y, y_) in enumerate(zip(y_batch, y_batch_), 0): + # convert to ndarray + y, y_ = tf.reshape(y, [-1]).numpy(), tf.reshape(y_, [-1]).numpy() + + # plit figure and save it + utt_id = utt_ids[idx] + figname = os.path.join(dirname, f"{utt_id}.png") + plt.subplot(2, 1, 1) + plt.plot(y) + plt.title("groundtruth speech") + plt.subplot(2, 1, 2) + plt.plot(y_) + plt.title(f"generated speech @ {self.steps} steps") + plt.tight_layout() + plt.savefig(figname) + plt.close() + + # save as wavefile + y = np.clip(y, -1, 1) + y_ = np.clip(y_, -1, 1) + sf.write( + figname.replace(".png", "_ref.wav"), + y, + self.config["sampling_rate"], + "PCM_16", + ) + sf.write( + figname.replace(".png", "_gen.wav"), + y_, + self.config["sampling_rate"], + "PCM_16", + ) + + +def main(): + """Run training process.""" + parser = argparse.ArgumentParser( + description="Train MultiBand MelGAN (See detail in examples/multiband_melgan/train_multiband_melgan.py)" + ) + parser.add_argument( + "--train-dir", + default=None, + type=str, + help="directory including training data. ", + ) + parser.add_argument( + "--dev-dir", + default=None, + type=str, + help="directory including development data. ", + ) + parser.add_argument( + "--use-norm", default=1, type=int, help="use norm mels for training or raw." + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save checkpoints." + ) + parser.add_argument( + "--config", type=str, required=True, help="yaml format configuration file." + ) + parser.add_argument( + "--resume", + default="", + type=str, + nargs="?", + help='checkpoint file path to resume training. (default="")', + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + parser.add_argument( + "--generator_mixed_precision", + default=0, + type=int, + help="using mixed precision for generator or not.", + ) + parser.add_argument( + "--discriminator_mixed_precision", + default=0, + type=int, + help="using mixed precision for discriminator or not.", + ) + parser.add_argument( + "--pretrained", + default="", + type=str, + nargs="?", + help="path of .h5 mb-melgan generator to load weights from", + ) + args = parser.parse_args() + + # return strategy + STRATEGY = return_strategy() + + # set mixed precision config + if args.generator_mixed_precision == 1 or args.discriminator_mixed_precision == 1: + tf.config.optimizer.set_experimental_options({"auto_mixed_precision": True}) + + args.generator_mixed_precision = bool(args.generator_mixed_precision) + args.discriminator_mixed_precision = bool(args.discriminator_mixed_precision) + + args.use_norm = bool(args.use_norm) + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # check arguments + if args.train_dir is None: + raise ValueError("Please specify --train-dir") + if args.dev_dir is None: + raise ValueError("Please specify either --valid-dir") + + # load and save config + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + config["version"] = tensorflow_tts.__version__ + with open(os.path.join(args.outdir, "config.yml"), "w") as f: + yaml.dump(config, f, Dumper=yaml.Dumper) + for key, value in config.items(): + logging.info(f"{key} = {value}") + + # get dataset + if config["remove_short_samples"]: + mel_length_threshold = config["batch_max_steps"] // config[ + "hop_size" + ] + 2 * config["multiband_melgan_generator_params"].get("aux_context_window", 0) + else: + mel_length_threshold = None + + if config["format"] == "npy": + audio_query = "*-wave.npy" + mel_query = "*-raw-feats.npy" if args.use_norm is False else "*-norm-feats.npy" + audio_load_fn = np.load + mel_load_fn = np.load + else: + raise ValueError("Only npy are supported.") + + # define train/valid dataset + train_dataset = AudioMelDataset( + root_dir=args.train_dir, + audio_query=audio_query, + mel_query=mel_query, + audio_load_fn=audio_load_fn, + mel_load_fn=mel_load_fn, + mel_length_threshold=mel_length_threshold, + ).create( + is_shuffle=config["is_shuffle"], + map_fn=lambda items: collater( + items, + batch_max_steps=tf.constant(config["batch_max_steps"], dtype=tf.int32), + hop_size=tf.constant(config["hop_size"], dtype=tf.int32), + ), + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] + * STRATEGY.num_replicas_in_sync + * config["gradient_accumulation_steps"], + ) + + valid_dataset = AudioMelDataset( + root_dir=args.dev_dir, + audio_query=audio_query, + mel_query=mel_query, + audio_load_fn=audio_load_fn, + mel_load_fn=mel_load_fn, + mel_length_threshold=mel_length_threshold, + ).create( + is_shuffle=config["is_shuffle"], + map_fn=lambda items: collater( + items, + batch_max_steps=tf.constant( + config["batch_max_steps_valid"], dtype=tf.int32 + ), + hop_size=tf.constant(config["hop_size"], dtype=tf.int32), + ), + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] * STRATEGY.num_replicas_in_sync, + ) + + # define trainer + trainer = MultiBandMelganTrainer( + steps=0, + epochs=0, + config=config, + strategy=STRATEGY, + is_generator_mixed_precision=args.generator_mixed_precision, + is_discriminator_mixed_precision=args.discriminator_mixed_precision, + ) + + with STRATEGY.scope(): + # define generator and discriminator + generator = TFMelGANGenerator( + MultiBandMelGANGeneratorConfig( + **config["multiband_melgan_generator_params"] + ), + name="multi_band_melgan_generator", + ) + + discriminator = TFMelGANMultiScaleDiscriminator( + MultiBandMelGANDiscriminatorConfig( + **config["multiband_melgan_discriminator_params"] + ), + name="multi_band_melgan_discriminator", + ) + + pqmf = TFPQMF( + MultiBandMelGANGeneratorConfig( + **config["multiband_melgan_generator_params"] + ), + dtype=tf.float32, + name="pqmf", + ) + + # dummy input to build model. + fake_mels = tf.random.uniform(shape=[1, 100, 80], dtype=tf.float32) + y_mb_hat = generator(fake_mels) + y_hat = pqmf.synthesis(y_mb_hat) + discriminator(y_hat) + + if len(args.pretrained) > 1: + generator.load_weights(args.pretrained) + logging.info( + f"Successfully loaded pretrained weight from {args.pretrained}." + ) + + generator.summary() + discriminator.summary() + + # define optimizer + generator_lr_fn = getattr( + tf.keras.optimizers.schedules, config["generator_optimizer_params"]["lr_fn"] + )(**config["generator_optimizer_params"]["lr_params"]) + discriminator_lr_fn = getattr( + tf.keras.optimizers.schedules, + config["discriminator_optimizer_params"]["lr_fn"], + )(**config["discriminator_optimizer_params"]["lr_params"]) + + gen_optimizer = tf.keras.optimizers.Adam( + learning_rate=generator_lr_fn, + amsgrad=config["generator_optimizer_params"]["amsgrad"], + ) + dis_optimizer = tf.keras.optimizers.Adam( + learning_rate=discriminator_lr_fn, + amsgrad=config["discriminator_optimizer_params"]["amsgrad"], + ) + + trainer.compile( + gen_model=generator, + dis_model=discriminator, + gen_optimizer=gen_optimizer, + dis_optimizer=dis_optimizer, + pqmf=pqmf, + ) + + # start training + try: + trainer.fit( + train_dataset, + valid_dataset, + saved_path=os.path.join(config["outdir"], "checkpoints/"), + resume=args.resume, + ) + except KeyboardInterrupt: + trainer.save_checkpoint() + logging.info(f"Successfully saved checkpoint @ {trainer.steps}steps.") + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/multiband_melgan_hf/README.md b/TensorFlowTTS/examples/multiband_melgan_hf/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9aca74c725c8112be473f8555116df0a00c98a11 --- /dev/null +++ b/TensorFlowTTS/examples/multiband_melgan_hf/README.md @@ -0,0 +1,94 @@ + +# Multi-band MelGAN: Faster Waveform Generation for High-Quality Text-to-Speech +Based on the script [`train_multiband_melgan_hf.py`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/multiband_melgan_hf/train_multiband_melgan_hf.py). + +## Training Multi-band MelGAN from scratch with LJSpeech dataset. +This example code show you how to train MelGAN from scratch with Tensorflow 2 based on custom training loop and tf.function. The data used for this example is LJSpeech Ultimate, you can download the dataset at [link](https://machineexperiments.tumblr.com/post/662408083204685824/ljspeech-ultimate). + +### Step 1: Create Tensorflow based Dataloader (tf.dataset) +Please see detail at [examples/melgan/](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/melgan#step-1-create-tensorflow-based-dataloader-tfdataset) + +### Step 2: Training from scratch +After you re-define your dataloader, pls modify an input arguments, train_dataset and valid_dataset from [`train_multiband_melgan_hf.py`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/multiband_melgan_hf/train_multiband_melgan_hf.py). Here is an example command line to training melgan-stft from scratch: + +First, you need training generator with only stft loss: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/multiband_melgan_hf/train_multiband_melgan_hf.py \ + --train-dir ./dump/train/ \ + --dev-dir ./dump/valid/ \ + --outdir ./examples/multiband_melgan_hf/exp/train.multiband_melgan_hf.v1/ \ + --config ./examples/multiband_melgan_hf/conf/multiband_melgan_hf.lju.v1.yml \ + --use-norm 1 \ + --generator_mixed_precision 1 \ + --resume "" +``` + +Then resume and start training generator + discriminator: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/multiband_melgan_hf/train_multiband_melgan_hf.py \ + --train-dir ./dump/train/ \ + --dev-dir ./dump/valid/ \ + --outdir ./examples/multiband_melgan_hf/exp/train.multiband_melgan_hf.v1/ \ + --config ./examples/multiband_melgan_hf/conf/multiband_melgan_hf.lju.v1.yml \ + --use-norm 1 \ + --resume ./examples/multiband_melgan_hf/exp/train.multiband_melgan_hf.v1/checkpoints/ckpt-200000 +``` + +IF you want to use MultiGPU to training you can replace `CUDA_VISIBLE_DEVICES=0` by `CUDA_VISIBLE_DEVICES=0,1,2,3` for example. You also need to tune the `batch_size` for each GPU (in config file) by yourself to maximize the performance. Note that MultiGPU now support for Training but not yet support for Decode. + +In case you want to resume the training progress, please following below example command line: + +```bash +--resume ./examples/multiband_melgan_hf/exp/train.multiband_melgan_hf.v1/checkpoints/ckpt-100000 +``` + +If you want to finetune a model, use `--pretrained` like this with the filename of the generator and discriminator, separated by comma. +```bash +--pretrained ptgenerator.h5,ptdiscriminator.h5 +``` +It is recommended that you first train text2mel model then extract postnets so that vocoder learns to compensate for flaws, if you do so, append `--postnets 1` to arguments + + + +**IMPORTANT NOTES**: + +- If Your Dataset is 16K, upsample_scales = [2, 4, 8] worked. +- If Your Dataset is > 16K (22K, 24K, ...), upsample_scales = [2, 4, 8] didn't worked, used [8, 4, 2] instead. +- Mixed precision make Group Convolution training slower on Discriminator, both pytorch (apex) and tensorflow also has this problems. So, **DO NOT USE** mixed precision when discriminator enable. + +### Step 3: Decode audio from folder mel-spectrogram +To running inference on folder mel-spectrogram (eg valid folder), run below command line: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/multiband_melgan_hf/decode_mb_melgan.py \ + --rootdir ./dump/valid/ \ + --outdir ./prediction/multiband_melgan_hf.v1/ \ + --checkpoint ./examples/multiband_melgan_hf/exp/train.multiband_melgan_hf.v1/checkpoints/generator-920000.h5 \ + --config ./examples/multiband_melgan_hf/conf/multiband_melgan_hf.lju.v1.yml \ + --batch-size 32 \ + --use-norm 1 +``` + +## Finetune MelGAN STFT with ljspeech pretrained on other languages +Just load pretrained model and training from scratch with other languages. **DO NOT FORGET** re-preprocessing on your dataset if needed. A hop_size should be 512 if you want to use our pretrained. + +## Learning Curves +Here is a learning curves of melgan based on this config [`multiband_melgan_hf.v1.yaml`](https://github.com/dathudeptrai/TensorflowTTS/tree/master/examples/multiband_melgan_hf/conf/multiband_melgan_hf.v1.yaml) + + + + + +## Pretrained Models and Audio samples +| Model | Conf | Lang | Fs [Hz] | Mel range [Hz] | FFT / Hop / Win [pt] | # iters | Notes | +| :------ | :---: | :---: | :----: | :--------: | :---------------: | :-----: | :-----: | +| [multiband_melgan_hf.lju.v1](https://drive.google.com/drive/folders/1tOMzik_Nr4eY63gooKYSmNTJyXC6Pp55?usp=sharing) | [link](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/multiband_melgan_hf/conf/multiband_melgan_hf.lju.v1.yml) | EN | 44.1k | 20-11025 | 2048 / 512 / 2048 | 920K | -| + + +## Reference + +1. https://github.com/kan-bayashi/ParallelWaveGAN +2. [Parallel WaveGAN: A fast waveform generation model based on generative adversarial networks with multi-resolution spectrogram](https://arxiv.org/abs/1910.11480) +3. [Multi-band MelGAN: Faster Waveform Generation for High-Quality Text-to-Speech](https://arxiv.org/abs/2005.05106) diff --git a/TensorFlowTTS/examples/multiband_melgan_hf/conf/multiband_melgan_hf.lju.v1.yml b/TensorFlowTTS/examples/multiband_melgan_hf/conf/multiband_melgan_hf.lju.v1.yml new file mode 100644 index 0000000000000000000000000000000000000000..574f99f113e427bddd12c333691999092049f1f2 --- /dev/null +++ b/TensorFlowTTS/examples/multiband_melgan_hf/conf/multiband_melgan_hf.lju.v1.yml @@ -0,0 +1,121 @@ + +# This is the hyperparameter configuration file for Multi-Band MelGAN + MPD +# Please make sure this is adjusted for the LJSpeech dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 1000k iters. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +sampling_rate: 44100 +hop_size: 512 # Hop size. +format: "npy" + + +########################################################### +# GENERATOR NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "multiband_melgan_generator" + +multiband_melgan_generator_params: + out_channels: 4 # Number of output channels (number of subbands). + kernel_size: 7 # Kernel size of initial and final conv layers. + filters: 384 # Initial number of channels for conv layers. + upsample_scales: [4, 4, 8] # List of Upsampling scales. + stack_kernel_size: 3 # Kernel size of dilated conv layers in residual stack. + stacks: 4 # Number of stacks in a single residual stack module. + is_weight_norm: false # Use weight-norm or not. + +########################################################### +# DISCRIMINATOR NETWORK ARCHITECTURE SETTING # +########################################################### +multiband_melgan_discriminator_params: + out_channels: 1 # Number of output channels. + scales: 3 # Number of multi-scales. + downsample_pooling: "AveragePooling1D" # Pooling type for the input downsampling. + downsample_pooling_params: # Parameters of the above pooling function. + pool_size: 4 + strides: 2 + kernel_sizes: [5, 3] # List of kernel size. + filters: 16 # Number of channels of the initial conv layer. + max_downsample_filters: 512 # Maximum number of channels of downsampling layers. + downsample_scales: [4, 4, 4] # List of downsampling scales. + nonlinear_activation: "LeakyReLU" # Nonlinear activation function. + nonlinear_activation_params: # Parameters of nonlinear activation function. + alpha: 0.2 + is_weight_norm: false # Use weight-norm or not. + +hifigan_discriminator_params: + out_channels: 1 # Number of output channels (number of subbands). + period_scales: [3, 5, 7, 11, 17, 23, 37] # List of period scales. + n_layers: 5 # Number of layer of each period discriminator. + kernel_size: 5 # Kernel size. + strides: 3 # Strides + filters: 8 # In Conv filters of each period discriminator + filter_scales: 4 # Filter scales. + max_filters: 512 # maximum filters of period discriminator's conv. + is_weight_norm: false # Use weight-norm or not. + + +########################################################### +# STFT LOSS SETTING # +########################################################### +stft_loss_params: + fft_lengths: [1024, 2048, 512] # List of FFT size for STFT-based loss. + frame_steps: [120, 240, 50] # List of hop size for STFT-based loss + frame_lengths: [600, 1200, 240] # List of window length for STFT-based loss. + +subband_stft_loss_params: + fft_lengths: [384, 683, 171] # List of FFT size for STFT-based loss. + frame_steps: [30, 60, 10] # List of hop size for STFT-based loss + frame_lengths: [150, 300, 60] # List of window length for STFT-based loss. + +########################################################### +# ADVERSARIAL LOSS SETTING # +########################################################### +lambda_feat_match: 10.0 # Loss balancing coefficient for feature matching loss +lambda_adv: 2.5 # Loss balancing coefficient for adversarial loss. + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 64 # Batch size. +batch_max_steps: 8192 # Length of each audio in batch for training. Make sure dividable by hop_size. +batch_max_steps_valid: 81920 # Length of each audio for validation. Make sure dividable by hope_size. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +is_shuffle: true # shuffle dataset after each epoch. + +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +generator_optimizer_params: + lr_fn: "PiecewiseConstantDecay" + lr_params: + boundaries: [100000, 200000, 300000, 400000, 500000, 600000, 700000] + values: [0.0005, 0.0005, 0.00025, 0.000125, 0.0000625, 0.00003125, 0.000015625, 0.000001] + amsgrad: false + +discriminator_optimizer_params: + lr_fn: "PiecewiseConstantDecay" + lr_params: + boundaries: [100000, 200000, 300000, 400000, 500000] + values: [0.00025, 0.000125, 0.0000625, 0.00003125, 0.000015625, 0.000001] + + amsgrad: false + +########################################################### +# INTERVAL SETTING # +########################################################### +discriminator_train_start_steps: 200000 # steps begin training discriminator +train_max_steps: 1500000 # Number of training steps. +save_interval_steps: 20000 # Interval steps to save checkpoint. +eval_interval_steps: 5000 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. + + +gradient_accumulation_steps: 1 +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of batch to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/multiband_melgan_hf/conf/multiband_melgan_hf.lju.v1ft.yml b/TensorFlowTTS/examples/multiband_melgan_hf/conf/multiband_melgan_hf.lju.v1ft.yml new file mode 100644 index 0000000000000000000000000000000000000000..9a50e573a2dd62faea3a46d8034da1dc92c2951f --- /dev/null +++ b/TensorFlowTTS/examples/multiband_melgan_hf/conf/multiband_melgan_hf.lju.v1ft.yml @@ -0,0 +1,118 @@ + +#This is the hyperparameter configuration file for finetuning MB-MelGAN + MPD. It is intended to be used for finetuning generator and discriminator +#Trains fast, adapts to new voice within 30k steps, although it is beneficial to keep training beyond that for about 200k, depending on dataset size. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +sampling_rate: 44100 +hop_size: 512 # Hop size. +format: "npy" + + +########################################################### +# GENERATOR NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "multiband_melgan_generator" + +multiband_melgan_generator_params: + out_channels: 4 # Number of output channels (number of subbands). + kernel_size: 7 # Kernel size of initial and final conv layers. + filters: 384 # Initial number of channels for conv layers. + upsample_scales: [4, 4, 8] # List of Upsampling scales. + stack_kernel_size: 3 # Kernel size of dilated conv layers in residual stack. + stacks: 4 # Number of stacks in a single residual stack module. + is_weight_norm: false # Use weight-norm or not. + +########################################################### +# DISCRIMINATOR NETWORK ARCHITECTURE SETTING # +########################################################### +multiband_melgan_discriminator_params: + out_channels: 1 # Number of output channels. + scales: 3 # Number of multi-scales. + downsample_pooling: "AveragePooling1D" # Pooling type for the input downsampling. + downsample_pooling_params: # Parameters of the above pooling function. + pool_size: 4 + strides: 2 + kernel_sizes: [5, 3] # List of kernel size. + filters: 16 # Number of channels of the initial conv layer. + max_downsample_filters: 512 # Maximum number of channels of downsampling layers. + downsample_scales: [4, 4, 4] # List of downsampling scales. + nonlinear_activation: "LeakyReLU" # Nonlinear activation function. + nonlinear_activation_params: # Parameters of nonlinear activation function. + alpha: 0.2 + is_weight_norm: false # Use weight-norm or not. + +hifigan_discriminator_params: + out_channels: 1 # Number of output channels (number of subbands). + period_scales: [3, 5, 7, 11, 17, 23, 37] # List of period scales. + n_layers: 5 # Number of layer of each period discriminator. + kernel_size: 5 # Kernel size. + strides: 3 # Strides + filters: 8 # In Conv filters of each period discriminator + filter_scales: 4 # Filter scales. + max_filters: 512 # maximum filters of period discriminator's conv. + is_weight_norm: false # Use weight-norm or not. + + +########################################################### +# STFT LOSS SETTING # +########################################################### +stft_loss_params: + fft_lengths: [1024, 2048, 512] # List of FFT size for STFT-based loss. + frame_steps: [120, 240, 50] # List of hop size for STFT-based loss + frame_lengths: [600, 1200, 240] # List of window length for STFT-based loss. + +subband_stft_loss_params: + fft_lengths: [384, 683, 171] # List of FFT size for STFT-based loss. + frame_steps: [30, 60, 10] # List of hop size for STFT-based loss + frame_lengths: [150, 300, 60] # List of window length for STFT-based loss. + +########################################################### +# ADVERSARIAL LOSS SETTING # +########################################################### +lambda_feat_match: 10.0 # Loss balancing coefficient for feature matching loss +lambda_adv: 2.5 # Loss balancing coefficient for adversarial loss. + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 64 # Batch size. +batch_max_steps: 8192 # Length of each audio in batch for training. Make sure dividable by hop_size. +batch_max_steps_valid: 81920 # Length of each audio for validation. Make sure dividable by hope_size. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +is_shuffle: true # shuffle dataset after each epoch. + +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +generator_optimizer_params: + lr_fn: "PiecewiseConstantDecay" + lr_params: + boundaries: [100000, 200000] + values: [0.00003125, 0.00003125, 0.00003125] + amsgrad: false + +discriminator_optimizer_params: + lr_fn: "PiecewiseConstantDecay" + lr_params: + boundaries: [100000, 200000] + values: [0.000015625, 0.000015625, 0.000015625] + amsgrad: false + +########################################################### +# INTERVAL SETTING # +########################################################### +discriminator_train_start_steps: 0 # steps begin training discriminator +train_max_steps: 300000 # Number of training steps. +save_interval_steps: 3500 # Interval steps to save checkpoint. +eval_interval_steps: 2000 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. + + +gradient_accumulation_steps: 1 +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of batch to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/multiband_melgan_hf/decode_mb_melgan.py b/TensorFlowTTS/examples/multiband_melgan_hf/decode_mb_melgan.py new file mode 100644 index 0000000000000000000000000000000000000000..fcaff9bfa619f01fa0fe770e59d1f259f3627efd --- /dev/null +++ b/TensorFlowTTS/examples/multiband_melgan_hf/decode_mb_melgan.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Decode trained Mb-Melgan from folder.""" + +import argparse +import logging +import os + +import numpy as np +import soundfile as sf +import yaml +from tqdm import tqdm + +from tensorflow_tts.configs import MultiBandMelGANGeneratorConfig +from tensorflow_tts.datasets import MelDataset +from tensorflow_tts.models import TFPQMF, TFMelGANGenerator + + +def main(): + """Run melgan decoding from folder.""" + parser = argparse.ArgumentParser( + description="Generate Audio from melspectrogram with trained melgan " + "(See detail in example/melgan/decode_melgan.py)." + ) + parser.add_argument( + "--rootdir", + default=None, + type=str, + required=True, + help="directory including ids/durations files.", + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save generated speech." + ) + parser.add_argument( + "--checkpoint", type=str, required=True, help="checkpoint file to be loaded." + ) + parser.add_argument( + "--use-norm", type=int, default=1, help="Use norm or raw melspectrogram." + ) + parser.add_argument("--batch-size", type=int, default=8, help="batch_size.") + parser.add_argument( + "--config", + default=None, + type=str, + required=True, + help="yaml format configuration file. if not explicitly provided, " + "it will be searched in the checkpoint directory. (default=None)", + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + args = parser.parse_args() + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # load config + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + + if config["format"] == "npy": + mel_query = "*-fs-after-feats.npy" if "fastspeech" in args.rootdir else "*-norm-feats.npy" if args.use_norm == 1 else "*-raw-feats.npy" + mel_load_fn = np.load + else: + raise ValueError("Only npy is supported.") + + # define data-loader + dataset = MelDataset( + root_dir=args.rootdir, + mel_query=mel_query, + mel_load_fn=mel_load_fn, + ) + dataset = dataset.create(batch_size=args.batch_size) + + # define model and load checkpoint + mb_melgan = TFMelGANGenerator( + config=MultiBandMelGANGeneratorConfig(**config["multiband_melgan_generator_params"]), + name="multiband_melgan_generator", + ) + mb_melgan._build() + mb_melgan.load_weights(args.checkpoint) + + pqmf = TFPQMF( + config=MultiBandMelGANGeneratorConfig(**config["multiband_melgan_generator_params"]), name="pqmf" + ) + + for data in tqdm(dataset, desc="[Decoding]"): + utt_ids, mels, mel_lengths = data["utt_ids"], data["mels"], data["mel_lengths"] + + # melgan inference. + generated_subbands = mb_melgan(mels) + generated_audios = pqmf.synthesis(generated_subbands) + + # convert to numpy. + generated_audios = generated_audios.numpy() # [B, T] + + # save to outdir + for i, audio in enumerate(generated_audios): + utt_id = utt_ids[i].numpy().decode("utf-8") + sf.write( + os.path.join(args.outdir, f"{utt_id}.wav"), + audio[: mel_lengths[i].numpy() * config["hop_size"]], + config["sampling_rate"], + "PCM_16", + ) + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/multiband_melgan_hf/fig/eval.png b/TensorFlowTTS/examples/multiband_melgan_hf/fig/eval.png new file mode 100644 index 0000000000000000000000000000000000000000..6304601c264efaea2bb003a817951f8f7a32c3a7 Binary files /dev/null and b/TensorFlowTTS/examples/multiband_melgan_hf/fig/eval.png differ diff --git a/TensorFlowTTS/examples/multiband_melgan_hf/fig/train.png b/TensorFlowTTS/examples/multiband_melgan_hf/fig/train.png new file mode 100644 index 0000000000000000000000000000000000000000..95db83d66c56f975f994c3ad22f598c94b2a87a1 Binary files /dev/null and b/TensorFlowTTS/examples/multiband_melgan_hf/fig/train.png differ diff --git a/TensorFlowTTS/examples/multiband_melgan_hf/train_multiband_melgan_hf.py b/TensorFlowTTS/examples/multiband_melgan_hf/train_multiband_melgan_hf.py new file mode 100644 index 0000000000000000000000000000000000000000..0f9ff61cf5060fbc2ca48ceca4c809fd195e02f1 --- /dev/null +++ b/TensorFlowTTS/examples/multiband_melgan_hf/train_multiband_melgan_hf.py @@ -0,0 +1,556 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Train Multi-Band MelGAN + MPD.""" + +import tensorflow as tf + +physical_devices = tf.config.list_physical_devices("GPU") +for i in range(len(physical_devices)): + tf.config.experimental.set_memory_growth(physical_devices[i], True) + +import sys + +sys.path.append(".") + +import argparse +import logging +import os + +import numpy as np +import soundfile as sf +import yaml +from tensorflow.keras.mixed_precision import experimental as mixed_precision + +import tensorflow_tts +from examples.melgan.audio_mel_dataset import AudioMelDataset +from examples.hifigan.train_hifigan import TFHifiGANDiscriminator +from examples.melgan.train_melgan import MelganTrainer, collater +from tensorflow_tts.configs import ( + MultiBandMelGANDiscriminatorConfig, + MultiBandMelGANGeneratorConfig, + HifiGANDiscriminatorConfig, +) +from tensorflow_tts.losses import TFMultiResolutionSTFT +from tensorflow_tts.models import ( + TFPQMF, + TFMelGANGenerator, + TFMelGANMultiScaleDiscriminator, + TFHifiGANMultiPeriodDiscriminator, +) +from tensorflow_tts.utils import calculate_2d_loss, calculate_3d_loss, return_strategy + + +class MultiBandMelganTrainer(MelganTrainer): + """Multi-Band MelGAN Trainer class based on MelganTrainer.""" + + def __init__( + self, + config, + strategy, + steps=0, + epochs=0, + is_generator_mixed_precision=False, + is_discriminator_mixed_precision=False, + ): + """Initialize trainer. + + Args: + steps (int): Initial global steps. + epochs (int): Initial global epochs. + config (dict): Config dict loaded from yaml format configuration file. + is_generator_mixed_precision (bool): Use mixed precision for generator or not. + is_discriminator_mixed_precision (bool): Use mixed precision for discriminator or not. + + """ + super(MultiBandMelganTrainer, self).__init__( + config=config, + steps=steps, + epochs=epochs, + strategy=strategy, + is_generator_mixed_precision=is_generator_mixed_precision, + is_discriminator_mixed_precision=is_discriminator_mixed_precision, + ) + + # define metrics to aggregates data and use tf.summary logs them + self.list_metrics_name = [ + "adversarial_loss", + "subband_spectral_convergence_loss", + "subband_log_magnitude_loss", + "fullband_spectral_convergence_loss", + "fullband_log_magnitude_loss", + "gen_loss", + "real_loss", + "fake_loss", + "dis_loss", + ] + + self.init_train_eval_metrics(self.list_metrics_name) + self.reset_states_train() + self.reset_states_eval() + + def compile(self, gen_model, dis_model, gen_optimizer, dis_optimizer, pqmf): + super().compile(gen_model, dis_model, gen_optimizer, dis_optimizer) + # define loss + self.sub_band_stft_loss = TFMultiResolutionSTFT( + **self.config["subband_stft_loss_params"] + ) + self.full_band_stft_loss = TFMultiResolutionSTFT( + **self.config["stft_loss_params"] + ) + + # define pqmf module + self.pqmf = pqmf + + def compute_per_example_generator_losses(self, batch, outputs): + """Compute per example generator losses and return dict_metrics_losses + Note that all element of the loss MUST has a shape [batch_size] and + the keys of dict_metrics_losses MUST be in self.list_metrics_name. + + Args: + batch: dictionary batch input return from dataloader + outputs: outputs of the model + + Returns: + per_example_losses: per example losses for each GPU, shape [B] + dict_metrics_losses: dictionary loss. + """ + dict_metrics_losses = {} + per_example_losses = 0.0 + + audios = batch["audios"] + y_mb_hat = outputs + y_hat = self.pqmf.synthesis(y_mb_hat) + + y_mb = self.pqmf.analysis(tf.expand_dims(audios, -1)) + y_mb = tf.transpose(y_mb, (0, 2, 1)) # [B, subbands, T//subbands] + y_mb = tf.reshape(y_mb, (-1, tf.shape(y_mb)[-1])) # [B * subbands, T'] + + y_mb_hat = tf.transpose(y_mb_hat, (0, 2, 1)) # [B, subbands, T//subbands] + y_mb_hat = tf.reshape( + y_mb_hat, (-1, tf.shape(y_mb_hat)[-1]) + ) # [B * subbands, T'] + + # calculate sub/full band spectral_convergence and log mag loss. + sub_sc_loss, sub_mag_loss = calculate_2d_loss( + y_mb, y_mb_hat, self.sub_band_stft_loss + ) + sub_sc_loss = tf.reduce_mean( + tf.reshape(sub_sc_loss, [-1, self.pqmf.subbands]), -1 + ) + sub_mag_loss = tf.reduce_mean( + tf.reshape(sub_mag_loss, [-1, self.pqmf.subbands]), -1 + ) + full_sc_loss, full_mag_loss = calculate_2d_loss( + audios, tf.squeeze(y_hat, -1), self.full_band_stft_loss + ) + + # define generator loss + gen_loss = 0.5 * (sub_sc_loss + sub_mag_loss) + 0.5 * ( + full_sc_loss + full_mag_loss + ) + + if self.steps >= self.config["discriminator_train_start_steps"]: + p_hat = self._discriminator(y_hat) + p = self._discriminator(tf.expand_dims(audios, 2)) + adv_loss = 0.0 + for i in range(len(p_hat)): + adv_loss += calculate_3d_loss( + tf.ones_like(p_hat[i][-1]), p_hat[i][-1], loss_fn=self.mse_loss + ) + adv_loss /= i + 1 + gen_loss += self.config["lambda_adv"] * adv_loss + + dict_metrics_losses.update( + {"adversarial_loss": adv_loss}, + ) + + dict_metrics_losses.update({"gen_loss": gen_loss}) + dict_metrics_losses.update({"subband_spectral_convergence_loss": sub_sc_loss}) + dict_metrics_losses.update({"subband_log_magnitude_loss": sub_mag_loss}) + dict_metrics_losses.update({"fullband_spectral_convergence_loss": full_sc_loss}) + dict_metrics_losses.update({"fullband_log_magnitude_loss": full_mag_loss}) + + per_example_losses = gen_loss + return per_example_losses, dict_metrics_losses + + def compute_per_example_discriminator_losses(self, batch, gen_outputs): + """Compute per example discriminator losses and return dict_metrics_losses + Note that all element of the loss MUST has a shape [batch_size] and + the keys of dict_metrics_losses MUST be in self.list_metrics_name. + + Args: + batch: dictionary batch input return from dataloader + outputs: outputs of the model + + Returns: + per_example_losses: per example losses for each GPU, shape [B] + dict_metrics_losses: dictionary loss. + """ + y_mb_hat = gen_outputs + y_hat = self.pqmf.synthesis(y_mb_hat) + ( + per_example_losses, + dict_metrics_losses, + ) = super().compute_per_example_discriminator_losses(batch, y_hat) + return per_example_losses, dict_metrics_losses + + def generate_and_save_intermediate_result(self, batch): + """Generate and save intermediate result.""" + import matplotlib.pyplot as plt + + y_mb_batch_ = self.one_step_predict(batch) # [B, T // subbands, subbands] + y_batch = batch["audios"] + utt_ids = batch["utt_ids"] + + # convert to tensor. + # here we just take a sample at first replica. + try: + y_mb_batch_ = y_mb_batch_.values[0].numpy() + y_batch = y_batch.values[0].numpy() + utt_ids = utt_ids.values[0].numpy() + except Exception: + y_mb_batch_ = y_mb_batch_.numpy() + y_batch = y_batch.numpy() + utt_ids = utt_ids.numpy() + + y_batch_ = self.pqmf.synthesis(y_mb_batch_).numpy() # [B, T, 1] + + # check directory + dirname = os.path.join(self.config["outdir"], f"predictions/{self.steps}steps") + if not os.path.exists(dirname): + os.makedirs(dirname) + + for idx, (y, y_) in enumerate(zip(y_batch, y_batch_), 0): + # convert to ndarray + y, y_ = tf.reshape(y, [-1]).numpy(), tf.reshape(y_, [-1]).numpy() + + # plit figure and save it + utt_id = utt_ids[idx] + figname = os.path.join(dirname, f"{utt_id}.png") + plt.subplot(2, 1, 1) + plt.plot(y) + plt.title("groundtruth speech") + plt.subplot(2, 1, 2) + plt.plot(y_) + plt.title(f"generated speech @ {self.steps} steps") + plt.tight_layout() + plt.savefig(figname) + plt.close() + + # save as wavefile + y = np.clip(y, -1, 1) + y_ = np.clip(y_, -1, 1) + sf.write( + figname.replace(".png", "_ref.wav"), + y, + self.config["sampling_rate"], + "PCM_16", + ) + sf.write( + figname.replace(".png", "_gen.wav"), + y_, + self.config["sampling_rate"], + "PCM_16", + ) + + +def main(): + """Run training process.""" + parser = argparse.ArgumentParser( + description="Train MultiBand MelGAN (See detail in examples/multiband_melgan/train_multiband_melgan.py)" + ) + parser.add_argument( + "--train-dir", + default=None, + type=str, + help="directory including training data. ", + ) + parser.add_argument( + "--dev-dir", + default=None, + type=str, + help="directory including development data. ", + ) + parser.add_argument( + "--use-norm", default=1, type=int, help="use norm mels for training or raw." + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save checkpoints." + ) + parser.add_argument( + "--config", type=str, required=True, help="yaml format configuration file." + ) + parser.add_argument( + "--resume", + default="", + type=str, + nargs="?", + help='checkpoint file path to resume training. (default="")', + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + parser.add_argument( + "--generator_mixed_precision", + default=0, + type=int, + help="using mixed precision for generator or not.", + ) + parser.add_argument( + "--discriminator_mixed_precision", + default=0, + type=int, + help="using mixed precision for discriminator or not.", + ) + parser.add_argument( + "--postnets", + default=0, + type=int, + help="using postnets instead of gt mels or not.", + ) + parser.add_argument( + "--pretrained", + default="", + type=str, + nargs="?", + help="path of .h5 mb-melgan generator and discriminator to load weights from. must be comma delineated, like ptgen.h5,ptdisc.h5", + ) + args = parser.parse_args() + + # return strategy + STRATEGY = return_strategy() + + # set mixed precision config + if args.generator_mixed_precision == 1 or args.discriminator_mixed_precision == 1: + tf.config.optimizer.set_experimental_options({"auto_mixed_precision": True}) + + args.generator_mixed_precision = bool(args.generator_mixed_precision) + args.discriminator_mixed_precision = bool(args.discriminator_mixed_precision) + + args.use_norm = bool(args.use_norm) + args.postnets = bool(args.postnets) + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # check arguments + if args.train_dir is None: + raise ValueError("Please specify --train-dir") + if args.dev_dir is None: + raise ValueError("Please specify either --valid-dir") + + # load and save config + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + config["version"] = tensorflow_tts.__version__ + with open(os.path.join(args.outdir, "config.yml"), "w") as f: + yaml.dump(config, f, Dumper=yaml.Dumper) + for key, value in config.items(): + logging.info(f"{key} = {value}") + + # get dataset + if config["remove_short_samples"]: + mel_length_threshold = config["batch_max_steps"] // config[ + "hop_size" + ] + 2 * config["multiband_melgan_generator_params"].get("aux_context_window", 0) + else: + mel_length_threshold = None + + if config["format"] == "npy": + audio_query = "*-wave.npy" + mel_query = "*-raw-feats.npy" if args.use_norm is False else "*-norm-feats.npy" + audio_load_fn = np.load + mel_load_fn = np.load + else: + raise ValueError("Only npy are supported.") + + if args.postnets is True: + mel_query = "*-postnet.npy" + logging.info("Using postnets") + else: + logging.info("Using GT Mels") + + # define train/valid dataset + + train_dataset = AudioMelDataset( + root_dir=args.train_dir, + audio_query=audio_query, + mel_query=mel_query, + audio_load_fn=audio_load_fn, + mel_load_fn=mel_load_fn, + mel_length_threshold=mel_length_threshold, + ).create( + is_shuffle=config["is_shuffle"], + map_fn=lambda items: collater( + items, + batch_max_steps=tf.constant(config["batch_max_steps"], dtype=tf.int32), + hop_size=tf.constant(config["hop_size"], dtype=tf.int32), + ), + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] + * STRATEGY.num_replicas_in_sync + * config["gradient_accumulation_steps"], + ) + + valid_dataset = AudioMelDataset( + root_dir=args.dev_dir, + audio_query=audio_query, + mel_query=mel_query, + audio_load_fn=audio_load_fn, + mel_load_fn=mel_load_fn, + mel_length_threshold=mel_length_threshold, + ).create( + is_shuffle=config["is_shuffle"], + map_fn=lambda items: collater( + items, + batch_max_steps=tf.constant( + config["batch_max_steps_valid"], dtype=tf.int32 + ), + hop_size=tf.constant(config["hop_size"], dtype=tf.int32), + ), + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] * STRATEGY.num_replicas_in_sync, + ) + + # define trainer + trainer = MultiBandMelganTrainer( + steps=0, + epochs=0, + config=config, + strategy=STRATEGY, + is_generator_mixed_precision=args.generator_mixed_precision, + is_discriminator_mixed_precision=args.discriminator_mixed_precision, + ) + + with STRATEGY.scope(): + # define generator and discriminator + generator = TFMelGANGenerator( + MultiBandMelGANGeneratorConfig( + **config["multiband_melgan_generator_params"] + ), + name="multi_band_melgan_generator", + ) + + multiscale_discriminator = TFMelGANMultiScaleDiscriminator( + MultiBandMelGANDiscriminatorConfig( + **config["multiband_melgan_discriminator_params"] + ), + name="multi_band_melgan_discriminator", + ) + multiperiod_discriminator = TFHifiGANMultiPeriodDiscriminator( + HifiGANDiscriminatorConfig(**config["hifigan_discriminator_params"]), + name="hifigan_multiperiod_discriminator", + ) + + pqmf = TFPQMF( + MultiBandMelGANGeneratorConfig( + **config["multiband_melgan_generator_params"] + ), + dtype=tf.float32, + name="pqmf", + ) + discriminator = TFHifiGANDiscriminator( + multiperiod_discriminator, + multiscale_discriminator, + name="hifigan_discriminator", + ) + + # dummy input to build model. + fake_mels = tf.random.uniform(shape=[1, 100, 80], dtype=tf.float32) + y_mb_hat = generator(fake_mels) + y_hat = pqmf.synthesis(y_mb_hat) + discriminator(y_hat) + + if len(args.pretrained) > 1: + pt_splits = args.pretrained.split(",") + generator.load_weights(pt_splits[0]) + discriminator.load_weights(pt_splits[1]) + logging.info( + f"Successfully loaded pretrained weight from {args.pretrained}." + ) + + generator.summary() + discriminator.summary() + + # define optimizer + generator_lr_fn = getattr( + tf.keras.optimizers.schedules, config["generator_optimizer_params"]["lr_fn"] + )(**config["generator_optimizer_params"]["lr_params"]) + discriminator_lr_fn = getattr( + tf.keras.optimizers.schedules, + config["discriminator_optimizer_params"]["lr_fn"], + )(**config["discriminator_optimizer_params"]["lr_params"]) + + gen_optimizer = tf.keras.optimizers.Adam( + learning_rate=generator_lr_fn, + amsgrad=config["generator_optimizer_params"]["amsgrad"], + ) + dis_optimizer = tf.keras.optimizers.Adam( + learning_rate=discriminator_lr_fn, + amsgrad=config["discriminator_optimizer_params"]["amsgrad"], + ) + + _ = gen_optimizer.iterations + _ = dis_optimizer.iterations + + trainer.compile( + gen_model=generator, + dis_model=discriminator, + gen_optimizer=gen_optimizer, + dis_optimizer=dis_optimizer, + pqmf=pqmf, + ) + + # start training + try: + trainer.fit( + train_dataset, + valid_dataset, + saved_path=os.path.join(config["outdir"], "checkpoints/"), + resume=args.resume, + ) + except KeyboardInterrupt: + trainer.save_checkpoint() + logging.info(f"Successfully saved checkpoint @ {trainer.steps}steps.") + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/parallel_wavegan/README.md b/TensorFlowTTS/examples/parallel_wavegan/README.md new file mode 100644 index 0000000000000000000000000000000000000000..79e6c3256b6f78e38420cf48f7630332b620c841 --- /dev/null +++ b/TensorFlowTTS/examples/parallel_wavegan/README.md @@ -0,0 +1,71 @@ +# Parallel WaveGAN: A fast waveform generation model based on generative adversarial networks with multi-resolution spectrogram +Based on the script [`train_parallel_wavegan.py`](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/parallel_wavegan/train_parallel_wavegan.py). + + +## Convert pretrained weight from Pytorch Parallel WaveGAN to TensorFlow Parallel WaveGAN to Accelerate Inference Speed and Deployability + +We recommand users use pytorch Parallel WaveGAN from [ParallelWaveGAN](https://github.com/kan-bayashi/ParallelWaveGAN) to training for convenient and very stable. After finish training, you can convert the pytorch's weight to this tensorflow pwgan version to accelerate inference speech and enhance deployability. You can use the pretrained weight from [here](https://github.com/kan-bayashi/ParallelWaveGAN#results) then use [convert_pwgan_from_pytorch_to_tensorflow](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/parallel_wavegan/convert_pwgan_from_pytorch_to_tensorflow.ipynp) notebook to convert it. Note that the pwgan pretrained weight from pytorch repo can be use as vocoder with our text2mel model because they uses the same preprocessing procedure (for example on ljspeech dataset). In case you want training pwgan with tensorflow, let take a look below instruction, it's not fully testing yet, we tried to train around 150k steps and everything is fine. + +## Training Parallel WaveGAN from scratch with LJSpeech dataset. +This example code show you how to train Parallel WaveGAN from scratch with Tensorflow 2 based on custom training loop and tf.function. The data used for this example is LJSpeech, you can download the dataset at [link](https://keithito.com/LJ-Speech-Dataset/). + +### Step 1: Create Tensorflow based Dataloader (tf.dataset) +Please see detail at [examples/melgan/](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/melgan#step-1-create-tensorflow-based-dataloader-tfdataset) + +### Step 2: Training from scratch +After you re-define your dataloader, pls modify an input arguments, train_dataset and valid_dataset from [`train_parallel_wavegan.py`](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/parallel_wavegan/train_parallel_wavegan.py). Here is an example command line to training Parallel WaveGAN from scratch: + +First, you need training generator 100K steps with only stft loss: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/parallel_wavegan/train_parallel_wavegan.py \ + --train-dir ./dump/train/ \ + --dev-dir ./dump/valid/ \ + --outdir ./examples/parallel_wavegan/exp/train.parallel_wavegan.v1/ \ + --config ./examples/parallel_wavegan/conf/parallel_wavegan.v1.yaml \ + --use-norm 1 \ + --generator_mixed_precision 1 \ + --resume "" +``` + +Then resume and start training generator + discriminator: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/parallel_wavegan/parallel_wavegan.py \ + --train-dir ./dump/train/ \ + --dev-dir ./dump/valid/ \ + --outdir ./examples/parallel_wavegan/exp/train.parallel_wavegan.v1/ \ + --config ./examples/parallel_wavegan/conf/parallel_wavegan.v1.yaml \ + --use-norm 1 \ + --resume ./examples/parallel_wavegan/exp/train.parallel_wavegan.v1/checkpoints/ckpt-100000 +``` + +IF you want to use MultiGPU to training you can replace `CUDA_VISIBLE_DEVICES=0` by `CUDA_VISIBLE_DEVICES=0,1,2,3` for example. You also need to tune the `batch_size` for each GPU (in config file) by yourself to maximize the performance. Note that MultiGPU now support for Training but not yet support for Decode. + +In case you want to resume the training progress, please following below example command line: + +```bash +--resume ./examples/parallel_wavegan/exp/train.parallel_wavegan.v1/checkpoints/ckpt-100000 +``` + +### Step 3: Decode audio from folder mel-spectrogram +To running inference on folder mel-spectrogram (eg valid folder), run below command line: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/parallel_wavegan/decode_parallel_wavegan.py \ + --rootdir ./dump/valid/ \ + --outdir ./prediction/parallel_wavegan.v1/ \ + --checkpoint ./examples/parallel_wavegan/exp/train.parallel_wavegan.v1/checkpoints/generator-400000.h5 \ + --config ./examples/parallel_wavegan/conf/parallel_wavegan.v1.yaml \ + --batch-size 32 \ + --use-norm 1 +``` + +## Finetune Parallel WaveGAN with ljspeech pretrained on other languages +Just load pretrained model and training from scratch with other languages. **DO NOT FORGET** re-preprocessing on your dataset if needed. A hop_size should be 256 if you want to use our pretrained. + + +## Reference + +1. https://github.com/kan-bayashi/ParallelWaveGAN +2. [Parallel WaveGAN: A fast waveform generation model based on generative adversarial networks with multi-resolution spectrogram](https://arxiv.org/abs/1910.11480) \ No newline at end of file diff --git a/TensorFlowTTS/examples/parallel_wavegan/conf/parallel_wavegan.v1.yaml b/TensorFlowTTS/examples/parallel_wavegan/conf/parallel_wavegan.v1.yaml new file mode 100644 index 0000000000000000000000000000000000000000..684086d04e3cfcd1e3902ffcbd3b393baa96749e --- /dev/null +++ b/TensorFlowTTS/examples/parallel_wavegan/conf/parallel_wavegan.v1.yaml @@ -0,0 +1,106 @@ + +# This is the hyperparameter configuration file for ParallelWavegan. +# Please make sure this is adjusted for the LJSpeech dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 4000k iters. + +# Original: https://github.com/kan-bayashi/ParallelWaveGAN/blob/master/egs/ljspeech/voc1/conf/parallel_wavegan.v1.yaml + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +sampling_rate: 22050 +hop_size: 256 # Hop size. +format: "npy" + + +########################################################### +# GENERATOR NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "parallel_wavegan_generator" + +parallel_wavegan_generator_params: + out_channels: 1 # Number of output channels. + kernel_size: 3 # Kernel size of dilated convolution. + n_layers: 30 # Number of residual block layers. + stacks: 3 # Number of stacks i.e., dilation cycles. + residual_channels: 64 # Number of channels in residual conv. + gate_channels: 128 # Number of channels in gated conv. + skip_channels: 64 # Number of channels in skip conv. + aux_channels: 80 # Number of channels for auxiliary feature conv. + # Must be the same as num_mels. + aux_context_window: 2 # Context window size for auxiliary feature. + # If set to 2, previous 2 and future 2 frames will be considered. + dropout: 0.0 # Dropout rate. 0.0 means no dropout applied. + upsample_params: # Upsampling network parameters. + upsample_scales: [4, 4, 4, 4] # Upsampling scales. Prodcut of these must be the same as hop size. + +########################################################### +# DISCRIMINATOR NETWORK ARCHITECTURE SETTING # +########################################################### +parallel_wavegan_discriminator_params: + out_channels: 1 # Number of output channels. + kernel_size: 3 # Number of output channels. + n_layers: 10 # Number of conv layers. + conv_channels: 64 # Number of chnn layers. + use_bias: true # Whether to use bias parameter in conv. + nonlinear_activation: "LeakyReLU" # Nonlinear function after each conv. + nonlinear_activation_params: # Nonlinear function parameters + alpha: 0.2 # Alpha in LeakyReLU. + +########################################################### +# STFT LOSS SETTING # +########################################################### +stft_loss_params: + fft_lengths: [1024, 2048, 512] # List of FFT size for STFT-based loss. + frame_steps: [120, 240, 50] # List of hop size for STFT-based loss + frame_lengths: [600, 1200, 240] # List of window length for STFT-based loss. + + +########################################################### +# ADVERSARIAL LOSS SETTING # +########################################################### +lambda_adv: 4.0 # Loss balancing coefficient. + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 6 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +batch_max_steps: 25600 # Length of each audio in batch for training. Make sure dividable by hop_size. +batch_max_steps_valid: 81920 # Length of each audio for validation. Make sure dividable by hope_size. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +is_shuffle: true # shuffle dataset after each epoch. + +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +generator_optimizer_params: + lr_fn: "ExponentialDecay" + lr_params: + initial_learning_rate: 0.0005 + decay_steps: 200000 + decay_rate: 0.5 + + +discriminator_optimizer_params: + lr_fn: "ExponentialDecay" + lr_params: + initial_learning_rate: 0.0005 + decay_steps: 200000 + decay_rate: 0.5 + +gradient_accumulation_steps: 1 +########################################################### +# INTERVAL SETTING # +########################################################### +discriminator_train_start_steps: 100000 # steps begin training discriminator +train_max_steps: 400000 # Number of training steps. +save_interval_steps: 5000 # Interval steps to save checkpoint. +eval_interval_steps: 2000 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. + +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of batch to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/parallel_wavegan/convert_pwgan_from_pytorch_to_tensorflow.ipynb b/TensorFlowTTS/examples/parallel_wavegan/convert_pwgan_from_pytorch_to_tensorflow.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..bb84f7f721396d55f6e33736f375a8f8ab4bc1b0 --- /dev/null +++ b/TensorFlowTTS/examples/parallel_wavegan/convert_pwgan_from_pytorch_to_tensorflow.ipynb @@ -0,0 +1,474 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Copyright 2020 The TensorFlowTTS Team. All Rights Reserved.\n", + "Licensed under the Apache License, Version 2.0 (the \"License\");" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "#@title Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# https://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install parallel_wavegan" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/lap13548/anaconda3/envs/tensorflow-tts/lib/python3.7/site-packages/tensorflow_addons/utils/ensure_tf_install.py:68: UserWarning: Tensorflow Addons supports using Python ops for all Tensorflow versions above or equal to 2.2.0 and strictly below 2.3.0 (nightly versions are not supported). \n", + " The versions of TensorFlow you are currently using is 2.3.0 and is not supported. \n", + "Some things might work, some things might not.\n", + "If you were to encounter a bug, do not file an issue.\n", + "If you want to make sure you're using a tested and supported configuration, either change the TensorFlow version or the TensorFlow Addons's version. \n", + "You can find the compatibility matrix in TensorFlow Addon's readme:\n", + "https://github.com/tensorflow/addons\n", + " UserWarning,\n" + ] + } + ], + "source": [ + "import tensorflow as tf\n", + "import torch\n", + "\n", + "from tensorflow_tts.models import TFParallelWaveGANGenerator\n", + "from tensorflow_tts.configs import ParallelWaveGANGeneratorConfig\n", + "\n", + "from parallel_wavegan.models import ParallelWaveGANGenerator\n", + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "tf_model = TFParallelWaveGANGenerator(config=ParallelWaveGANGeneratorConfig(), name=\"parallel_wavegan_generator\")" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "tf_model._build()" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Model: \"parallel_wavegan_generator\"\n", + "_________________________________________________________________\n", + "Layer (type) Output Shape Param # \n", + "=================================================================\n", + "first_convolution (TFConv1d1 multiple 128 \n", + "_________________________________________________________________\n", + "tf_conv_in_upsample_net_work multiple 32036 \n", + "_________________________________________________________________\n", + "residual_block_._0 (TFResidu multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._1 (TFResidu multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._2 (TFResidu multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._3 (TFResidu multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._4 (TFResidu multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._5 (TFResidu multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._6 (TFResidu multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._7 (TFResidu multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._8 (TFResidu multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._9 (TFResidu multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._10 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._11 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._12 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._13 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._14 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._15 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._16 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._17 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._18 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._19 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._20 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._21 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._22 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._23 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._24 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._25 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._26 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._27 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._28 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "residual_block_._29 (TFResid multiple 43264 \n", + "_________________________________________________________________\n", + "re_lu (ReLU) multiple 0 \n", + "_________________________________________________________________\n", + "tf_conv1d1x1 (TFConv1d1x1) multiple 4160 \n", + "_________________________________________________________________\n", + "re_lu_1 (ReLU) multiple 0 \n", + "_________________________________________________________________\n", + "tf_conv1d1x1_1 (TFConv1d1x1) multiple 65 \n", + "_________________________________________________________________\n", + "activation_12 (Activation) multiple 0 \n", + "=================================================================\n", + "Total params: 1,334,309\n", + "Trainable params: 1,334,309\n", + "Non-trainable params: 0\n", + "_________________________________________________________________\n" + ] + } + ], + "source": [ + "tf_model.summary()" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "torch_checkpoints = torch.load(\"./checkpoint-400000steps.pkl\", map_location=torch.device('cpu'))" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "torch_generator_weights = torch_checkpoints[\"model\"][\"generator\"]" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "torch_model = ParallelWaveGANGenerator()" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "torch_model.load_state_dict(torch_checkpoints[\"model\"][\"generator\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "torch_model.remove_weight_norm()" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "1334309" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model_parameters = filter(lambda p: p.requires_grad, torch_model.parameters())\n", + "params = sum([np.prod(p.size()) for p in model_parameters])\n", + "params" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "# in pytorch, in convolution layer, the order is bias -> weight, in tf it is weight -> bias. We need re-order.\n", + "\n", + "def convert_weights_pytorch_to_tensorflow(weights_pytorch):\n", + " \"\"\"\n", + " Convert pytorch Conv1d weight variable to tensorflow Conv2D weights.\n", + " 1D: Pytorch (f_output, f_input, kernel_size) -> TF (kernel_size, f_input, 1, f_output)\n", + " 2D: Pytorch (f_output, f_input, kernel_size_h, kernel_size_w) -> TF (kernel_size_w, kernel_size_h, f_input, 1, f_output)\n", + " \"\"\"\n", + " if len(weights_pytorch.shape) == 3: # conv1d-kernel\n", + " weights_tensorflow = np.transpose(weights_pytorch, (0,2,1)) # [f_output, kernel_size, f_input]\n", + " weights_tensorflow = np.transpose(weights_tensorflow, (1,0,2)) # [kernel-size, f_output, f_input]\n", + " weights_tensorflow = np.transpose(weights_tensorflow, (0,2,1)) # [kernel-size, f_input, f_output]\n", + " return weights_tensorflow\n", + " elif len(weights_pytorch.shape) == 1: # conv1d-bias\n", + " return weights_pytorch\n", + " elif len(weights_pytorch.shape) == 4: # conv2d-kernel\n", + " weights_tensorflow = np.transpose(weights_pytorch, (0,2,1,3)) # [f_output, kernel_size_h, f_input, kernel_size_w]\n", + " weights_tensorflow = np.transpose(weights_tensorflow, (1,0,2,3)) # [kernel-size_h, f_output, f_input, kernel-size-w]\n", + " weights_tensorflow = np.transpose(weights_tensorflow, (0,2,1,3)) # [kernel_size_h, f_input, f_output, kernel-size-w]\n", + " weights_tensorflow = np.transpose(weights_tensorflow, (0,1,3,2)) # [kernel_size_h, f_input, kernel-size-w, f_output]\n", + " weights_tensorflow = np.transpose(weights_tensorflow, (0,2,1,3)) # [kernel_size_h, kernel-size-w, f_input, f_output]\n", + " weights_tensorflow = np.transpose(weights_tensorflow, (1,0,2,3)) # [kernel-size_w, kernel_size_h, f_input, f_output]\n", + " return weights_tensorflow\n", + "\n", + "torch_weights = []\n", + "all_keys = list(torch_model.state_dict().keys())\n", + "all_values = list(torch_model.state_dict().values())\n", + "\n", + "idx_already_append = []\n", + "\n", + "for i in range(len(all_keys) -1):\n", + " if i not in idx_already_append:\n", + " if all_keys[i].split(\".\")[0:-1] == all_keys[i + 1].split(\".\")[0:-1]:\n", + " if all_keys[i].split(\".\")[-1] == \"bias\" and all_keys[i + 1].split(\".\")[-1] == \"weight\":\n", + " torch_weights.append(convert_weights_pytorch_to_tensorflow(all_values[i + 1].cpu().detach().numpy()))\n", + " torch_weights.append(convert_weights_pytorch_to_tensorflow(all_values[i].cpu().detach().numpy()))\n", + " idx_already_append.append(i)\n", + " idx_already_append.append(i + 1)\n", + " else:\n", + " if i not in idx_already_append:\n", + " torch_weights.append(convert_weights_pytorch_to_tensorflow(all_values[i].cpu().detach().numpy()))\n", + " idx_already_append.append(i)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "tf_var = tf_model.trainable_variables" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [], + "source": [ + "for i, var in enumerate(tf_var):\n", + " tf.keras.backend.set_value(var, torch_weights[i])" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "feats = np.load(\"LJ001-0009-norm-feats.npy\")" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [], + "source": [ + "feats = np.expand_dims(feats, 0)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "audio = tf_model(feats)[0, :, 0]" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [], + "source": [ + "import matplotlib.pyplot as plt" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[]" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXwAAAD4CAYAAADvsV2wAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy86wFpkAAAACXBIWXMAAAsTAAALEwEAmpwYAAA100lEQVR4nO2dd5wUVbbHf2eGnNMQlDCogCBIGhGzEgREZc2wuqJrWHfV5+q+3YdiRFRc1FXXnMUVMawBBQUJoiAIQ5IMQ86MZBjynPdHVw81PVXVFW6F7j7fz2c+0111q+7p6upT9557AjEzBEEQhPQnK2wBBEEQhGAQhS8IgpAhiMIXBEHIEEThC4IgZAii8AVBEDKEcmELYEa9evU4Nzc3bDEEQRBSitmzZ//GzDlG+yKr8HNzc5Gfnx+2GIIgCCkFEa012ycmHUEQhAxBFL4gCEKGIApfEAQhQxCFLwiCkCGIwhcEQcgQROELgiBkCKLwBUEQMgRR+C5YtGk3Hv9mcdhiCIIgOCKygVdRpu+LUwEAp9SvhgFdmoYsjSAIgj3SeoS//9BRDPrvr9hz8Igv5x8qo3xBEFKItFb4I6avxahZ6zH8u2W+nH//4WO+nFcQBMEP0lrhz167EwDwwQzT1BKCIAgZQ1or/J1Fh5Wf8+ixYuXnFARBCIK0Vvh+MHHptrBFEARBcIUShU9EvYloGREVENEgkzbXEtFiIlpERCNV9JuM/YeOKj9ncTErP6cgCEIQeFb4RJQN4GUAfQC0ATCAiNoktGkB4H4A5zDzaQD+6rVfOyzdstf3PiYs3up7H4IgCCpQMcLvAqCAmVcx82EAowD0S2hzG4CXmXknADBzytpFDh4t7Zlz6wgp0iIIQmqgQuGfCGC97v0GbZuelgBaEtE0IppBRL2NTkREtxNRPhHlFxYWKhDtOLmDxig5z9Bvlig5jyAIQtAEtWhbDkALABcCGADgTSKqldiImd9g5jxmzsvJMSzJGDrb96v3/BEEQQgCFQp/I4AmuveNtW16NgAYzcxHmHk1gOWIPQAC5YMZa3H4qLhVCoKQmahQ+LMAtCCi5kRUAUB/AKMT2nyJ2OgeRFQPMRPPKgV9O+KhLxfitSkrg+5WEAQhEnhW+Mx8FMBdAMYBWALgE2ZeRERDiOhyrdk4ANuJaDGAyQD+zszbvfbthnwt+lYQBCHTUJItk5nHAhibsO1h3WsGcJ/2Fyo/Lle7GCwIgpAqSKStIAhChiAKXxAEIUMQhS8IgpAhiMIXBEHIEEThC4IgZAii8AVBEDIEUfgR5eeVv/lWi1cQ/OSdqauRO2gMnhnnT2lRwT2i8CPI7gNH8Ps3f8EdH8wOWxRBcMSGnUUY8s1iAMBLkwtClkZIREnglaCGY8UMAnBEK6O4LIB8/oKgkl1FMiuNMjLCjxAnPzAW5z49CawV1ZLMnIIgqEQUfkT4ueA3AMCm3QfxSf76JK0FIZocPHIseSMhNEThR4T5G3aXvB4ui11CivL21NVhiyBYIAo/IhQdVl9wXRCCZtPug2GLIFggCj8i/HuSeDQIqc/89bvCFkGwQBS+IAhChiAKX0hptu09iENHZaFQEOwgCl9Iabo8MVEC1ATBJqLwhZRl8+4DAIDJy6SKmRAst7w3C/1enha2GI4RhS9g464DmJeCi23z1u0KvM9jxYwpywux+4BElGYyE5duS8kFakmtEAE4Hlprso+IfO3/nGGTAABrhvX1tR/V7AwhjP/FiSvwwsQVAFLvegmCjPAjwC+rd5jum6pF4ApleeCLBYH3GVf2gpCKiMKPAAcswtEPHBYPFEEQ1CAKP+KYG3vKcujoMbEtC4Jgiih8jVvem4XcQWOwq8h5hsriYidq2Rkjpq+x3bbVg9+h/WPjfZNFEJyyQzK+RgolCp+IehPRMiIqIKJBFu2uIiImojwV/apk4tJtAIB7P57n+NgHv1roqe/CvYdM900r2O74fMu3usuj/9MKcW8U1LJp14GwRRB0eFb4RJQN4GUAfQC0ATCAiNoYtKsO4B4Av3jt00/c+HSP/GWdpz7/8dmvno4HSqelfeSrRa7O8dW8TZ7lSGck9a9zLv331LBFEHSoGOF3AVDAzKuY+TCAUQD6GbR7HMDTACSdng/0ev7HktfTVzmfFQjJsfCeFSxIRX/1dEWFwj8RgL5ixwZtWwlE1AlAE2YeY3UiIrqdiPKJKL+wMP3NC3/7ZL4rE5IRa7cXeT7H/kOSotkKTlhCj0f6CjHW7zC+Bycv2xawJMExfNzSsEVwhO+LtkSUBeA5AH9L1paZ32DmPGbOy8nJ8Vs0pWzbexBjft1sq+1nszfg3Kcn4b9zNuCLuRt9lsw+3y7cErYInvht3yHc8t4s7PYpIGvCktKK65iPi/WpiNmg4+2f0rcoysuTV4YtgiNUKPyNAJro3jfWtsWpDqAtgB+IaA2ArgBGR3Hh1gs3vj0Td46cg302Rsn/++l8bNgpo0PVvPHjKkxcug2jZnlbUzFj+77Si+vbLBbbM5Eb3jZenturaOaYeP0F56hQ+LMAtCCi5kRUAUB/AKPjO5l5NzPXY+ZcZs4FMAPA5cycr6DvyLDqt/0AgDd/XBWyJM5ItUpb+w8dtfVQ9YMtCdWcrnzl51DkyERem7ISnYdOwGezN4QtSkrjWeEz81EAdwEYB2AJgE+YeRERDSGiy72eP1U4fLQYQOqF3qeSWWLNb/tx2iPj0PaRcYb7t+3x1x/g9RR7mKcTw76N2co/mbU+SUv/sXKjjjpKbPjMPJaZWzLzycz8hLbtYWYebdD2wqiP7hfoCoqnAkZT3XSLuF1ZuA8XPvODZZsvNbfS/LU7DfcfPHIMn83eYJmsTog2M9eY550KilRW+JIt04BdB7xFBx44fAyVK2QrkiY5b00tuyg2e+0OdDu1QdJjU0X1rUvihaT3MPp+8VbDNm0fGYejxYy6VSvgolPrK5VPEFIBSa3gA9NXBZvh8vUp7j0F3p+2Rp0gPpLoEpmInQXUo5r56tGvF2HbXgkHETIPUfgZzuLNe8IWwRa/rLKeyjtJJ7F2e5GS6GaVTFi8FbmDxvi+DuEXP6Sxr306IQo/DVBplonqIu4hbVHcjD85rGu7UZFb7NFj1nLZ5YMZawEAi1LkAZzIpl3Re1Bt2FmE3EFj8Em+2oXeZLPNKCMKH9GPmBy3aEuSqljq+rrhrUinOlLGim37lJxnawQX8I4eK8bSLcE+OI4Wq3nwqeJYMePcpycDAF6aVKD03J/mp65rqCh8ABOXRHs6+qcPZvvmf5xYPTGqeXg+n+P8878zdXUZ33m3mKUN8LJ+YsSRJDMZO/xz3DL0fv4nrCpU81BTQdCJ5x7SZbBdZ/LduUX1+YJEFH6K8HefbM6qTBt+s+egs2Cre0bNxZBvFqPrUxOV9L/fJEBtxPS1Ss4fVyL3jJrn+VyTtVTfP7jI/OqWZLPMVYX7gxFEw2sG23Ql4xX+V/M2Yu320jcjwd+i4UFQZLM0YjxCONVIttagT/V88MgxdHligt8ieWK19j1Ylbu0S9xcNeSbxZ7PpYqPfUp3ITgjoxX+qsJ9uGfUPLyZkNwplRdl4jz9nXEWv5G/rEuLKkROTATrdxRJ3hufSRbM9r6HmZCZOU1wTkYr/CWbjV359jk0HwTFbJMIUiP2HCj7GUbP34QHvliAi3QRq6k/lwkfidwFflrhT+zJ3HU7Hc9Cez43xRdZ0oGMVvhm/PnDOZ6O98skdNWr3pJ1vajl+dl94Aj2Hkzt1AtOVKzf6vhVjwu3bktSRol4iVDVXPHKzxj4zkxHx6jywEpHMlLh281bn8hOm6YQP01CM1fbyyWS6H2TSNy1LBPGpl8qqDlgtWbwz++WuT7v0i17cPG/fkzeUBAUkJEK/86R7kbwSwL2bTbi2ten22qXzMqQ6nb8NQ6m+SoKab/rUwqKzYrcRoFYEr2wUkdnEpN8ms0EQUYq/EylQDfVfWlyLBglVW34MxzEC9j1WLJClT+/n3QeOgHdn/0hbDGECJOxCn9X0WEcURQWn8qkqknnzZ/s56Yfb5I90wmp4rm1dU9pb6SgI26FaJOxCv+/czbir4oKiKcyeyPokcTMSf3sExWbG5w88FPVEUelySgqpGqCuSiQsQp/3CL/CnZHLXBrx/7DJdGXqcBtI2bj5AfG+t6Pk5QaZgW6o0607kQ1FLqobRu2L/+RY8WRcN/NWIVv19slHbj53Zm4+b1ZYYtRioJte/H1/E2G+yYsKW2CKY5ABs+NChZ+BTXsKnLuUhxm4N1v+w6hxeBv8e60NZi9dgfenVa2YFFQZKzCD4swHjTzI1KycVXhvpJ0wj2e+xF3fzS3pBawFV793J3AzLbdb6NEsopg6cTDusRoqUDcS+yz2Rtw1avT8djXi0NzAhCFHzBBZdqLWk3btdv3o9uzU/DM+OWlth+2sKPHa4eO99H8lkjz+8ei4+Pf46OZweR+UWVyMfu+v10Q3LULipUBJ2JThb7YUI+QooFF4QuBEFfesxKKUL//8xrTY8IsxnL/5wsC6cfvTzhmgbsgwyiQLl50Rw3u47DiJUTh+0E6rpT5xPBxy3AgiZ98+Bb85KiqfCUc562fnNu6zb+H8O6iJ8YsCa3vRETh+0EqaKiAOXIsdlGMEsAN/iKY0XQiydJPOGHaymgWjomCZ4hbdh1wvpYy2aQGwIczwkvP7CTpod8oUfhE1JuIlhFRARENMth/HxEtJqJfiWgiETVT0W+moupHvN9kWumHV8yz483zzXxukuvmhYkr8N3CzSnhA18o6ZcjQbHJzbIsSYK6fYeOYtmW1E9ilwzPCp+IsgG8DKAPgDYABhBRm4RmcwHkMfPpAD4D8E+v/WYyc9fvUnKe+Sbnee775YbbvaDPYGjXNv/RzHW44z9zfItyVfkgiepI2mpRPOrsduF+6fZraPvIOPR6/kdMXpY68SpuUDHC7wKggJlXMfNhAKMA9NM3YObJzBx3T5kBoLGCfn0lyq55R4+pUS4fmniixPPsqKK4mEt5kURlMW5XUenv2Evd1TAXmAHz1A9HFN0rYTBq1npl59Kb7/YdOmr6ff3LxWBnZYRqBydDhcI/EYD+m9mgbTPjFgDfGu0gotuJKJ+I8gsLg6vHaUSyKaAVh44GW7DZLUs2BZNnZfCX3vym/Xo+JLqIbvUQsv/WVHfBNHaWEWat2YHvFvrnbRPV2Yk7jD9L/CPu3H8YbR8Zh34vTy153/fFn0ra/eoiZmX++l2urmHigCMIAl20JaIbAOQBGG60n5nfYOY8Zs7LyckJUjSlPDE2OqvyYcPMnn3al2z258H0W0KIvhe9p3JGmDjKvOa16bjjP9Ypvd2m87jp3Zlofv9YR+mmo4zZdxhPjfHD8pjJZuHG2D31zYLNWORx4HPfJ/PxnxnOSzg+/NUiT/26QYXC3wigie59Y21bKYioB4DBAC5n5rRe4dqwMxZZ9/X8TWUWRlUs7ikbkQXgPmo3f39Y3P3R3BJbsZerqnKM/IJWmcxZ/84kmLl6B+4cOQc/aF4tZgvnfrH34JGk7rgqifu9fzRTnZlIz8w1zj1xwshkqkLhzwLQgoiaE1EFAP0BjNY3IKKOAF5HTNmnxKrI9JXbXStWZuDdaatx90dz8UCCy6FZcfFQCGAmP8vFDyFIvp6/qSTVspmHhx1c3ytJ9m/be9zMtHHXAUxeus1wMfNthyala1+fXqry2wfT1zg63ivtHh2P1g9/V+rzqSDZ9UxMbaJq8LR4k3NTUBhJFj0rfGY+CuAuAOMALAHwCTMvIqIhRHS51mw4gGoAPiWieUQ02uR0keGFiSvQ/H73GRsf+3oxgPCz9FnhtDi0U4oOG7t9RjURWeKP30k0pF9ppues3VXyet32Itz83iy0HzK+TLt8Bw/WKNnsnxuv3iPMCUaXws31SZV0D+VUnISZxwIYm7DtYd3rHir6CYPcQWOwZlhf18fPWbdLnTAapDJiyEf2HDBWglFN1Zz4O99VdBjVKtr7iRiFz6tA7wCganZolBo8yEfAsG+Pf45Rs9ajQY1KuKvbKSif7d3goOJZVnT4GKra/N5TDYm0tcGkpd4rJqkkSiM0K7o+NdF22zA9m+IKMAJZmMugt3PP08VN2Mkyasa4RWXv5yBTUL+WkP30hYkr8NRYNQ8zFTEbpz0yToEkyTlSHLx7sih8G/zxvXwA4ftapwtGzyt9vd2giQeFebHhuyXZXM1Mok9nu198/MJggXZPyJXP3lGUI97pV2g2Wd5z0P9ss6tCMAOJwnfACxOce08IqUMUJ05m99zgL1IrJ3xUMfvOP1EY9BUlROHbZN32opSKqIsyRqOqKJSFnLMueh5FWzKofuvF/wo+R7yZeXRohDJcqkQUvk1uHTErMkkwoyKHSrIicCd+ML108EzUisikO8u37rOd3sIs0C1/jbOKcqniAKGKCPzMUoPlW/dFcsqfLkRhhH8s4QsePs48w6fgDzNW2UsznVij9odl2zB77U68P915xGsmkbYKP1U8WdwQvmr0htFXE4WBVqKniqpF+tlrd+C2EfmG51OVCC9duOndWbba6b1xmBk3vTsLV736s19ipQ1prPDDliCzcZqILCsCCj9xhP/Tit+UnPcvH87B94u3GqbVkFlEWa58ZVpS045+RjitIBrFZ8wCDa0IemCatgo/DBe7oEiFT2YViGQ8mg9f4/t1z2zdY54/yUtWVj12FUcqzHznrNuFUx/6zrKNfoS/NwAXSjvoA8rsstinxIBmpLHCD1sCf9lVdBh3jZwTiL+wG6wUS3RNOmFL4B67t7tfEcFB89W8Tdi29yCOFTOe/Na9R43KB+BOFwVbFgeUojxO2ip8P6okRWl09PqPq/DNr5vLeJaoIujI1wjoe99nhW5qtFqRO2hMyesoXL9kPPXtErylJarzyrSC39DliYl4dvwyrN9hLzeTUTSxym/cjVlyUsBpRtJX4UdHN/syCvf78/kZBWj0MCYiLNiw21XFIVX4rfCHfqPetzuex96ue2FiDYAgeX3KKmX+7fHI7IlL7CtMIw8gq6/c6QAv28U09duFZfMa+Ul6ZgiCPz9et2dc+1sR2jWuqVYW7fNl+WQL8Xr5rI5/0iBvCgG47KWp3jr1iN8P0aMJNqNNCrKGFmm5duymGXZTqCOKxD+3k9/51786qxo2rWA7zm1Rz/4BKTDNkhF+ilJcovD9PX9Q+PXgcoLf5m29Z8mRY8U4e9gkz+dcsW2vdj57wr88eWXyRinECgc5mJxGyjutcRyFezgZaavw/VBYbnOe+7OeEPsf1RG+U6LwW/Hb3KH/jC0GG5Z1dsw9o+YpOU8mkFj8BLCetW91WJwlCq7FyUhjhR+2BP4S/3x+KUqvD6l0cot9yqYXCDNjyvJC01TDftXmFfzhwS+dJaiTEX6IRMqjZsoqx9NDK5j1Jh3rm8xtnnOvD8z3fl7jqH1WyMOjLbvNR3OvT7HnWfLxrPUY+M5MvDS5wHB/3G3Pqi8/sJs7f9326FZn84PrXp/uSE+s216EXUXmnlapkJcnLRX+hMVbcfsHs8MWo4QxCzbjRReFqc1gMBZsjNXQ/C7JKr+Z8knGTosb2w52XeXi7Nin1mXRKcsVBEA9/NUiAMBzSTyNnJROVMEOk0RjifzPqLk+SxItflm9A7PXmmdITXwWnD98MjoM+d60/Y/LC8s8NKMSFBYnLRX+rSPyDe11YaIy8+IXczZiwYaYwp+ZJDugWz/fBz3nW3c2RXhxUhrUGrA5wPvSoABJFIjOnDg47LpF/rwyeZqNjbsO4Pzhk0ttm1aQ/DiVs/9kpKXCjyI/rihUdq5PZ2+wbSN3aynxWmjcqUUt7AWvxOyLbrBjOmFm/LA82GAbu5aGKJlBo4bb+BA7Zp5lW9Sk17CDKPyAWL/jAJ6foC6oyG6IfHZImtSp6gg7PfKrP7gzfSUjsWD7A18sxMKN9hZvv/l1kxIZ7Opx0ffqidpCrij8AHk+hBKJbl1JveLUS2fJlnA9WFYqjizerS3Q3vxe6XS/H81cZ/scd420Z1O3WkgEjpvLktny4+tCgjrsqPt+L0/zXY44ovDTGGbGUg/Txe0e/NKdjhbXRtRD5Iu5G1wdN+DNGZ76jadMsEOnx80XEgFg5C+xh8z9n//qSaZMxu3sJwqV3PRETBxBJc3vH+vp+M5DJ7hOdjVlubo1izC59+P5ro7zmvb2wmd+sN3WjnVv296D2H8o+eJg0Enzok78ejjR9/ryi1Fz1VSi8ImoNxEtI6ICIhpksL8iEX2s7f+FiHJV9Cv4z9AxS/D21NVhi5GSfDUvOt44XZ6YiKk2PEZaPfgdcgeNCdx1NKq0evA7fJK/3lH1s46Pf1/S3q6612c+9RPyujJPRNkAlgPoCWADgFkABjDzYl2bvwA4nZnvIKL+AK5g5uuszpuXl8f5+fmO5dlVdNjSV1ZQz6OXtUHHprXRtE4V1KxcHgBw0gPeZhdCdDixVmUM/V1bNK9XFfWqV8T2fYfQuHYVEGIeQAePFKP1w9YFSwT7vHfzGbiwVX3XxxPRbGbOM9ynQOGfBeBRZu6lvb8fAJj5KV2bcVqb6URUDsAWADls0blbhb+ycB+6PzvF8XGCIAhRYs2wvq6Os1L4Kkw6JwJYr3u/Qdtm2IaZjwLYDaCugaC3E1E+EeUXFrqzAefWrerqOEEQhHQnUvnwmfkNAG8AsRG+m3OE5XeeCYy/93zUr14R1SqWQ7ls67FCUDZJwT/u69kS1+Q1RqOalS3bHTxyLGkN2lSnVpXy2OWghGH7JrXwyvWd8OGMtXjlB3spqV//Q2f0Oq2hWxFtoULhbwTQRPe+sbbNqM0GzaRTE0A0Ss0LSVnxRB+UT6LghbLc3e0U/HuSPwFdTunYtBZyqlXE+MVbk7Zd+nhvVCqfbfvc6XxvjLz1TJx9Sj30f2M6Zqyyl65lwn3n45T61QEAl7RrZEvhuzXfOEXFNzULQAsiak5EFQD0BzA6oc1oAAO111cDmGRlvxeSU6NS8me1iptozbC+rn7Q/+jdynPfqc493Vu4PvarO8+x3XbhY72StvniL+fY+h5XPXmJI2UPpPes+uxTYhWvDh6xX+E+ruwBoGK5aD0MPUuj2eTvAjAOwBIAnzDzIiIaQkSXa83eBlCXiAoA3AegjOum4IzL2p/gex9eHhi1q1RQKEl4/K1nS1fH/eeWM1EuOwv9Orj7nlo1rJ68kUa1itYP/0cvaxP7f/lpSc8VdprqqFKlgrOHYJy09MNn5rHM3JKZT2bmJ7RtDzPzaO31QWa+hplPYeYuzKymdH2KcfM5uUrOU7VCNhrUqGSrbaemtZT06RS7Odijzt0uR+nxWqgv9O9Yarvdh2il8tn4ey81s6T2TWoBAHKqV7Rup7jucjrRv0tTV8el3QhfsMewK9vhkcuSj7DskJ1FJdPoE2paK/6wkjcdOZYeCt8N9/ZwNytI5M6LTlFynjpV7c22ZHRvTqsG9mZcbw8s7Q15Yi3rBW8AeO7a9q5kckNaKvzhV58etghlcDtCMEJvM728Q6IHbGnCUvjxUaVd6icZfaYCrRvVsNXugpY5PktSmmY2XZVF3ZtTu0p5W+26t25Q6r2dn9+ZJ5XxUPeNtFT41+Q1Qf6DPcIWwzeydRmZkt5QLn/FXU+q4+5AjdMdmgfuuOBkT/155YX+HTyfo0dr6+jIetViDzU7tnQhGG46O9d035NXtCt5nVO9Iv50/kl45ybDeCYAwKkG6y52bPjJZukqSUuFDwB1bU5jUxG9s0UyX6eL2zSwbmDCuZp3gluc5revF/II/9SG9kbnVsTLJJqVS4z/9pvWqeK5r5I+h/ZRdq77L2mt7FypwJphfS2dCxrWPH5PEhHuv6Q1up1q/nuq6NC7SX/uoEhbhR+l1fHbzmuu9HzlHORcveVcd333butvAEgiYZuPW9SvhvNaeHvIjVsU83H/duFmw/3ltQ+p0o2xgsJFwTNyvc3q0o2LHOazyY6OyjElbRV+lGiqON3D1Z0bl7xO9lwL68Fn1W27E8uae8KuDJSVRahYzniE9toNnWydo6PmEXVm89I22VPqVwMAPHWV2rUlo+soOIMtEh87/e3c0LWZV3F8RxS+A3LrupyKK44xa1KnCrpr9uLePoViexXZ6vgzm5cdSUZjcGQsdO+2jWwd/d7NXdC3XSO8meCpka0pDv3C9Ph7z7flwWGFGzdft3EBUcbIdm7Eqicv8VWOKzs1Tt4oZETh2+TJK9qV+FY7pVtrd3Z0M7IoZnNeM6yvY28Yu/gZBt2sXtkZT5RMcG6pWbk8Xr6+U5lAqItPi33/8UVbAGjZoDqmDermqT833h12PXbCpOCJPnjjD51ttc2tWwXPXdvBVlsjt9NMi/cXhW+T35/p3q2yvGIDdRDmj8ouF6DiWIlodD3CtuED/v347+3REnMe6pk08MkpbmYIqWAGKpedZTsf/Bm5dVC/hvvrWt1GihI/uUtRrIVdROE7oKXN4ItEaiv2GAoiQKaJQk+SRBoauKFlEYUelehF31uZ1rKyyHbwk99EXeG/cn1svSTZmOaf2npIo1qVS82ckpEYK2F0LwbJAA8DSTdEKj1yVIknp+rcrLar41VnE4zAYNgTXQ1METUql0f7JrUwc7W9jIRRo6WD3DdhEraCS8Yl7eytl1zduTEOHyvGtXlNkjfWcXn7E7DEY71hlQQ9s5URfhKm/t9FJTZZp77lgjEVDB6AXZrXwVsD8/ClgyyRqgkjgeuz11iH1V/Z0TqSOl1JZrbMyiLc0LVZiVtq39PtPSgGnh0tT5qgdYoo/CQ0rn3ctBGVdcVzPAZFBUE5i6GLmUmqRqXy6JCwCK0ysCgZzetVC6yvOMnSMdg137n2IIsAf+/VCiufvAQrnjj+XTuNVXj5952wbGhvPHRpG8t2VSrEBm/x2JiwF22DHuGLSccBYd8ccVTZg5vXq4rVv+1Xcq5EVHndqAwsSkYDD4t/DW1mL03EyA9cHyxn9yqelFMNa7YXuZIhaJ6/rgO6NK+Ds4dNAhDzX8/OImR7HO1WLJeNM3KTm13tZCy1SrlghOvoaVH4QlBc3bkxho9bFrYYkeG0E0ovaNaymTALAM4+WV0CLLuJuvRc1CoHk5ZuUyaDH3Q7tT7+0btVSRoLP6o8OTWRmJmO7BQYKn0eR81LEJNOmlM+QvHXUTFRmRG0fGclKG0nMymVrrJ9TBYuB1hkXL3oVGdpAMLgnZvOUJKzyA1mXlTxGIlEnHrCuZ3RBu0WKgrfB/q0bYj7TCol1a0aTJIwKxt6nKgvQn/+57ND7f8xB1ktq1R0mTjL4DuoY5LQSy/Ph7eeWWpfVMyNUcUswtjMg85L3I0TnJaT9IoofAvevLF0iLzdh/h5LXLQuHYsKKZjQsWpwX2DyUhop5/WjaLhSvj479oabg+7Vmp8gc8OTnzB9VjlcgGAS3WlLIliBcZ/+sdFKbFwn8o4DTyM9tDpOGLDN8HIvpisdmgcIuDS02P+vnddVLpEntcIVrvY6ae5QYoDldSuUh47i44kbXfaCcbT/LBnIGGV/NOP+nJ0DxLS9nkNirvxrGYYMX1tqW2v20xl4JYgc74b0cBh/9UrOVtH6RVwdlm3yAjfAXZ/aISYd8ngvm1Q08UCnAqiUK5u9oM9bbUzSxEQpIeOEeUUB8zZpbKuYHbLBsddRa3kcWJCTqzKBPhfezUo04VZ+opOTd0FTdrlH4rqD/uNKPyA8Wpq7WszElG/iGi2UOy33dfuQ8esIHurFIle9RO7Dx2z71KfSjuO3fqsKikOaJEhrEjiVEn+JwrfB6y+e6/RnN2TlNGLo9e1YeaaN4sUNVJEmYid2+E/t5yJEX/s4ur85xvUz21Ys5LvJpxEjqXRqvIfUiDvvRmi8FMMu8mv9KPmQ0eLDdsE8RN87roOhushVnVdX/9D54xNKWDEuS3qGSpuO5g96nsluCn6fS+o1PdhD6bLJcyYk+Xjt5vqOQg8KXwiqkNE3xPRCu1/GUMZEXUgoulEtIiIfiWi67z0mep4zZzZwuZ0XGUgkB9Uq1gOz13bHqPvKps7p9dpDfHcdR18lyExmjIMPRKm8hp525nJGyniwb7WKQ+ccEJNb4VjkpEs8C3x4ZWsNKSZyTIMvI7wBwGYyMwtAEzU3idSBOBGZj4NQG8AzxNRLY/9pixB1Q3V2xSn/t9FZfbfeFaw01KjwJcrOzXG6Y1rBSqHnnrVSj98o7DQHSRnn1wPF7icOTjl4jZqiwD5yX0XWy/AJq5HJMvYGfaMRI9Xhd8PwPva6/cB/C6xATMvZ+YV2utNALYBCOYuC4mw3QkTaVy7Cn55oDu66EoLEoLNDnmvSSBamKj6+F4SvKWRadsSlQ9TpymRjUpqWtEzSYW6xAjsdiG577rBq8JvwMybtddbAFheKSLqAqACgJUm+28nonwiyi8sLPQomnuGmgQC2cYnfe8mx0qcBjUqlcoPErSeqapFonpJUBYE+Q/2wNWdG5fUQLBD2O6jViTa6tOBDgnBjMn44BZnpqtkAX9XOaxdazYAvLBV8OPepHcqEU0gooUGf/307Tg2XDTVI0TUCMAHAG5mZsNVRGZ+g5nzmDkvJye8SUDYZc/M+J/uLQy3D+lnLwXAxW1K//jDcGErlxUd5Xh912YgQqkc/PWqVcQz17S3HWQXFdzGMrTRgt5yXEYKh8E5DtennD6Qk5WiVGWTv87hTEUFSe9qZu5hto+IthJRI2berCl0w3R9RFQDwBgAg5l5hmtpFVK3agVs33/Yl3P7ZdAxO++NZ+Xi4a8Wlbw387s/r2XpcHyz1AF28vA45YSalTGgS9NIubTVqVoBq59Sn7ExDNyaTP7WsyUubtMAbSNe+lCPk4A4s5xWXnD6AEknG/5oAAO11wMBfJXYgIgqAPgCwAhm/sxjf8pIdK3S4ySHShTRF23R4zZnuwqysghPXdmuZEQZJBeFMHW2S6Xy4UcTd/Q5CjVM7rjg5LBFQA2HaRr8xOvdNgxATyJaAaCH9h5ElEdEb2ltrgVwPoCbiGie9tfBY7+eub+PeXKxHjaDm8wIO+quj0leD71cVnbIc1ukV2KuMB4ydjmlfnU8c037SKXNTieisL7SNELVyDxdDWbezszdmbkFM/dg5h3a9nxmvlV7/R9mLs/MHXR/8xTI7gmrBZOwFbYZF7ay9yCyE6QTz+ZphNvMj1HFT6+pT+84y/M5ru7c2HRWJvhPGFHfFctllfKaC4rwH38hUcsk57gK/FIvuRbZLe/tcdxWaRWYcqlW7LliwHm4w8TP57equIowCqhHnaDq9HYJKDZGz7KhfVA3hIFVxip8PwljgvA/3U8p8S6wmkI+c017TPzbBSnnhWIHs6Rg0ZyvlUbUfVmeuaZ9IP3oUyePv/f8QPoMC1H4PnByTrXkjQBc2UldvhgiwqzBPZLWCa1UPjupfE4DW/xGnyLYio//1NV4h8ET+B4TF1chOuR5HHnbzSyrjzZu6XMm0b/1bIn/hljJLf2GeTbIa+avV0L7JrVstTsjtw4+n7PRV1ncEIZt0QqzMnSJmJnpEtV9bt0q+GuPaCn8P57THI+MXoSTc7wXpTk5pypWFu7H3d1OUSCZd8xiBPzGSaGYPm0b+hqXMnNwdxw6Uuy5eI1XMnKE//GfvC+0qaBqGppV/MCJeXvxkOQRsn84KzdyC/PdtCLkVoXKk9GsbhX0aN0Ap9SPzYg62Bx4qMCqelpYgYwVHHg+vXpDZzxymf0axnF+HtTNVrv61SuFruyBDFX4YddKjZfOu9TmlDPTcWLfNoqhOM8nN9PL2xsXxnZDkzpVMP+Ri3HLuc1dn2PK3y/CWwPzMPR37XDLuc0DS4wGwHLGZHeGpop4LeqzTvbfvfiEkGYvbpEhZgiMvK0rCvceQlYW4fL2J2D0/E1hi5Ty6Ef2l7RriLELtpS8b93IHz/8gWerjRquWVlNgE5O9Yp46FJ16YjtYFVkJ+hU3T3bNMD8Ry5Wdj2NePPGPOz0KVLfTzJyhG+EKl/cJnWSP/GrVixX4mKZaiOEMDByWfx7Qg1R/cj+3wM6ldpXtWI5zHyge8n71opKJ1YuL+MlO/QMITWyn8oeiH2ma8+IlnODHUTha1jZIJ3Qq036ZScMG6NIWatkX3qTXdxTo74urUTnXOeL9t1P9RZ9LQhRQBS+RjwgySu3nOfeBisY85hFOcRk1PSQUlqPUaWyiK37horVOks65+pJNUTha8TD7+2YZKyoW9VZ9NyJtaJT/iyqGC36cZKl3PjiZ1gugcJxwnaSEI4jRkiNWlVjI8G+7Zx7Xix9vDcquUxVcP2ZzfCQLrWx4Bwjf/M/XXASFm/a48nNUY9kPgiH//75bBwrZlz7+nQAaoMVMxFR+Bo1KpXHr49ejGoBp0a2m8dc1RpDIhe2ysEPy8KrLmYHs/z89apVxG/7DqGzQSBd/eqV8NHtpSNvOzSphXnrdylLpiYmneP4lQso8bsV85A3ROHrCCtvdVxxmfHTPy5KWoXHLTef07yUwn/thk4WrcPBrODFrMHdMX/DbtsBRu/f3AXLtu6NRMpcwRnZWYRjxYxsecp6Qu58mFeIsov3e9B6dNSkThXXJqNkdGhcq9T73m1TJxiMiBxFk9asUt512gijNYOoFatPZ+Y+3BO3ndc8lFTG6YSM8AFUCDgSUPCO2NSjhd9pHGpUKo/BfYMNJktHRNMh/IInqooiC/5x/ZlqFn/TlWZ1/VljEtQiCh/wnCXPKqzcDg9cYl5u0XfEKmGL+tXL3iNiThZSjYxW+C/9viMAoEV9e/nWzfCaHKqiLCKmJLV9rJqWLoSRVkEwJ6M1TXzF3+sIXfCf56/rUOq9n7nL7eKX51Q68WL/jmGLIOjI6EXb7q0b4Pozm+KeiBXDEMqSWDvAbkF3IVwqV8ic2smpQEYr/ArlsvDEFe3CFgM1LDL72S3T5pZUmdyELWbj2pKiQUh9MtqkExWs6mh2kyyNZZg1uEfgfYbtySUIKhCF75DairIv2iVIPZMqo9iwSuYJQqrjSeETUR0i+p6IVmj/TRNdEFENItpARC956TNsJN4nHKIwwK5UXsZHQmrj9Q4eBGAiM7cAMFF7b8bjAH702F/G4XfeF70ejYJStUNYUbbXn6m2pKEgBI1XbdIPwPva6/cB/M6oERF1BtAAwHiP/WUcfQLMbdOsjkRLWpFYVlEQUg2vCr8BM2/WXm9BTKmXgoiyADwL4H+TnYyIbieifCLKLyyMdsreoAiyeEQ8EC2KRGH2ITmXrGkoKUIiT9I7mIgmENFCg79++nYcS4htNNn+C4CxzLwhWV/M/AYz5zFzXk5Oju0PESQ3n+1PCcNHLwsnMZQ+C2ctiRy1JAoPnSjz0KWl7+HculVCkkQwI6m7AzOb+sAR0VYiasTMm4moEYBtBs3OAnAeEf0FQDUAFYhoHzNb2fsjS4emtXw5b3ZIo0evaSGC4uSc4+kvwlo81btmrniiTygyRJm+pzfCnSOPv//ur+eHJ4xgiFf/ttEABgIYpv3/KrEBM18ff01ENwHIS1Vl7yetG5r74guxbIyLh/RC5fLZkfCJT5UHZZj4VcNBcI/Xu3YYgJ5EtAJAD+09iCiPiN7yKlwmkZfrrjBHJlGlQrlIKHtBSFU8jfCZeTuA7gbb8wHcarD9PQDveekzbLxWx7Li3wM64u6P5vp2fsE7Q/qdhrXbi8IWI7KsfPISzN+wC01qi/0+ikjIokPOOqmub+e+rP0JqFQ+G7eNyPetD8EbN56VG7YIkSY7i9BJCo1HFjFEOsRvk8L5Lev5en4jTohAqmFBEPxHFH7EqFguG4uH9Aq0z5/v7441w/oG2qcgCMGT1gq/ZQNvlawEQRDSibRW+OPvvQALHwt2tKwCCj37uyAI6UhaK3wAqJSC9WKlSpAgCH6Q9l465bKzcFJOVawq3A8AeOX6TiFLZI9TG1ZHvWpSM1UQBHWkvcIHgJcGdMJrU1biX9d1CDQZmRckLF0QBNVkhMJvc0INvDjAn0yQE+67wJfzCoIgqCb1DNwR45T64gkkCEJqIApfEAQhQxCFLwiCkCGIwhcEQcgQROELgiBkCKLwBUEQMgRR+IIgCBmCKHxBEIQMQRS+IAhChiAKXxAEIUMQhe+C/+t9atgiCIIgOCYjcumo5pZzm2Puup0YekXbsEURBEGwjSh8F1Qol4U3bswLWwxBEARHiElHEAQhQ/Ck8ImoDhF9T0QrtP+1Tdo1JaLxRLSEiBYTUa6XfgVBEATneB3hDwIwkZlbAJiovTdiBIDhzNwaQBcA2zz2KwiCIDjEq8LvB+B97fX7AH6X2ICI2gAox8zfAwAz72PmIo/9CoIgCA7xqvAbMPNm7fUWAA0M2rQEsIuIPieiuUQ0nIgMq3QT0e1ElE9E+YWFhR5FEwRBEPQk9dIhogkAGhrsGqx/w8xMRGzSx3kAOgJYB+BjADcBeDuxITO/AeANAMjLyzM6lyAIguCSpAqfmXuY7SOirUTUiJk3E1EjGNvmNwCYx8yrtGO+BNAVBgpfEARB8A+vJp3RAAZqrwcC+MqgzSwAtYgoR3vfDcBij/0KgiAIDiFm95YTIqoL4BMATQGsBXAtM+8gojwAdzDzrVq7ngCeBUAAZgO4nZkPJzl3oXZOt9QD8JuH48MgFWUGRO4gSUWZAZE7SJoxc47RDk8KP8oQUT4zp1Q4bCrKDIjcQZKKMgMid1SQSFtBEIQMQRS+IAhChpDOCv+NsAVwQSrKDIjcQZKKMgMidyRIWxu+IAiCUJp0HuELgiAIOkThC4IgZAhpp/CJqDcRLSOiAiIyy97pZ/9NiGiylgZ6ERHdo21/lIg2EtE87e8S3TH3a/IuI6JeyT4LETUnol+07R8TUQVFsq8hogWafPnaNsMU2BTjRU2GX4mok+48A7X2K4hooG57Z+38BdqxpEDmVrprOo+I9hDRX6N4vYnoHSLaRkQLddt8v75mfXiQeTgRLdXk+oKIamnbc4nogO6av+ZWNqvP70Fu3+8JIqqovS/Q9uc6kdt3mDlt/gBkA1gJ4CQAFQDMB9AmYBkaAeikva4OYDmANgAeBfC/Bu3baHJWBNBckz/b6rMgFuzWX3v9GoA/K5J9DYB6Cdv+CWCQ9noQgKe115cA+BaxYLquAH7RttcBsEr7X1t7XVvbN1NrS9qxfXz4/rcAaBbF6w3gfACdACwM8vqa9eFB5osRy4ALAE/rZM7Vt0s4jyPZzD6/R7l9vycA/AXAa9rr/gA+VnmPe/1LtxF+FwAFzLyKY5G8oxBL4RwYzLyZmedor/cCWALgRItD+gEYxcyHmHk1gALEPofhZ9FGRt0AfKYdb5iWWiFmKbD7ARjBMWYglj6jEYBeAL5n5h3MvBPA9wB6a/tqMPMMjv0aRvggd3cAK5nZKkI7tOvNzD8C2GEgj9/XN2kacycyM/N4Zj6qvZ0BoLHVOVzKZvb5Xcttgcp7Qv95PgPQPT6biQLppvBPBLBe934DrJWtr2jTuY4AftE23aVNT9/RTavNZDbbXhfALt0PTuVnZADjiWg2Ed2ubTNLge1U7hO114nbVdIfwEe691G/3kAw19dOGnO3/BGxkXic5hRLgz6FiM7TtrmRza/fst/3RMkx2v7dWvtIkG4KPzIQUTUA/wXwV2beA+BVACcD6ABgM2K5haLGuczcCUAfAHcS0fn6ndroLJJ+vJoN9XIAn2qbUuF6lyKI66uyDyIaDOAogA+1TZsBNGXmjgDuAzCSiGqEIZsJKXdPqCbdFP5GAE107xtr2wKFiMojpuw/ZObPAYCZtzLzMWYuBvAmYtNFwFxms+3bEZvelkvY7hlm3qj93wbgC03GrfGpNJVOge1U7o0oPfVX/d30ATCHmbdqnyHy11sjiOtr1odriOgmAJcCuF5T1NBMItu117MRs3+3dCmb8t9yQPdEyTHa/ppa+0iQbgp/FoAW2gp6BcSm+KODFECz170NYAkzP6fbrrc/XgEg7j0wGkB/bXW/OYAWiC1wGX4W7cc1GcDV2vFmaamdyl2ViKrHXyO2MLcQ5imwRwO4UfOm6ApgtzY1HwfgYiKqrU2ZLwYwTtu3h4i6atfoRhVy6xgAnTkn6tdbRxDX104ac9sQUW8A/wBwOevKlRJRDmnV7IjoJMSu7SqXspl9fi9yB3FP6D/P1QAmxR+IkcDvVeGg/xBb3V+O2OhicAj9n4vYtPRXAPO0v0sAfABggbZ9NIBGumMGa/Iug85zxeyzIOY1MBOxxaVPAVRUIPdJiHkhzAewKN4fYvbHiQBWAJgAoI62nQC8rMm2AECe7lx/1GQrAHCzbnseYj+ylQBeghbprUD2qoiNomrqtkXueiP2QNoM4Ahidt9bgri+Zn14kLkAMTt1/P6Oe6Vcpd078wDMAXCZW9msPr8HuX2/JwBU0t4XaPtP8kvXuPmT1AqCIAgZQrqZdARBEAQTROELgiBkCKLwBUEQMgRR+IIgCBmCKHxBEIQMQRS+IAhChiAKXxAEIUP4f2bBtmAW3wmyAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "plt.plot(audio)" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [], + "source": [ + "from IPython.display import Audio" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " " + ], + "text/plain": [ + "" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "Audio(audio, rate=22050)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.7" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/TensorFlowTTS/examples/parallel_wavegan/decode_parallel_wavegan.py b/TensorFlowTTS/examples/parallel_wavegan/decode_parallel_wavegan.py new file mode 100644 index 0000000000000000000000000000000000000000..d5a05cd78a769bcb2c19a3c78aa998f268a53ec8 --- /dev/null +++ b/TensorFlowTTS/examples/parallel_wavegan/decode_parallel_wavegan.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Decode trained Mb-Melgan from folder.""" + +import argparse +import logging +import os + +import numpy as np +import soundfile as sf +import yaml +from tqdm import tqdm + +from tensorflow_tts.configs import ParallelWaveGANGeneratorConfig +from tensorflow_tts.datasets import MelDataset +from tensorflow_tts.models import TFParallelWaveGANGenerator + + +def main(): + """Run parallel_wavegan decoding from folder.""" + parser = argparse.ArgumentParser( + description="Generate Audio from melspectrogram with trained melgan " + "(See detail in examples/parallel_wavegan/decode_parallel_wavegan.py)." + ) + parser.add_argument( + "--rootdir", + default=None, + type=str, + required=True, + help="directory including ids/durations files.", + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save generated speech." + ) + parser.add_argument( + "--checkpoint", type=str, required=True, help="checkpoint file to be loaded." + ) + parser.add_argument( + "--use-norm", type=int, default=1, help="Use norm or raw melspectrogram." + ) + parser.add_argument("--batch-size", type=int, default=8, help="batch_size.") + parser.add_argument( + "--config", + default=None, + type=str, + required=True, + help="yaml format configuration file. if not explicitly provided, " + "it will be searched in the checkpoint directory. (default=None)", + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + args = parser.parse_args() + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # load config + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + + if config["format"] == "npy": + mel_query = "*-fs-after-feats.npy" if "fastspeech" in args.rootdir else "*-norm-feats.npy" if args.use_norm == 1 else "*-raw-feats.npy" + mel_load_fn = np.load + else: + raise ValueError("Only npy is supported.") + + # define data-loader + dataset = MelDataset( + root_dir=args.rootdir, + mel_query=mel_query, + mel_load_fn=mel_load_fn, + ) + dataset = dataset.create(batch_size=args.batch_size) + + # define model and load checkpoint + parallel_wavegan = TFParallelWaveGANGenerator( + config=ParallelWaveGANGeneratorConfig(**config["parallel_wavegan_generator_params"]), + name="parallel_wavegan_generator", + ) + parallel_wavegan._build() + parallel_wavegan.load_weights(args.checkpoint) + + for data in tqdm(dataset, desc="[Decoding]"): + utt_ids, mels, mel_lengths = data["utt_ids"], data["mels"], data["mel_lengths"] + + # pwgan inference. + generated_audios = parallel_wavegan.inference(mels) + + # convert to numpy. + generated_audios = generated_audios.numpy() # [B, T] + + # save to outdir + for i, audio in enumerate(generated_audios): + utt_id = utt_ids[i].numpy().decode("utf-8") + sf.write( + os.path.join(args.outdir, f"{utt_id}.wav"), + audio[: mel_lengths[i].numpy() * config["hop_size"]], + config["sampling_rate"], + "PCM_16", + ) + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/parallel_wavegan/train_parallel_wavegan.py b/TensorFlowTTS/examples/parallel_wavegan/train_parallel_wavegan.py new file mode 100644 index 0000000000000000000000000000000000000000..c4e4bf4170b9e40441c6e64c8bbf491304bc417f --- /dev/null +++ b/TensorFlowTTS/examples/parallel_wavegan/train_parallel_wavegan.py @@ -0,0 +1,474 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Train ParallelWavegan.""" + +import tensorflow as tf + +physical_devices = tf.config.list_physical_devices("GPU") +for i in range(len(physical_devices)): + tf.config.experimental.set_memory_growth(physical_devices[i], True) + +import sys + +sys.path.append(".") + +import argparse +import logging +import os +import soundfile as sf + +import numpy as np +import yaml + +import tensorflow_tts + +from examples.melgan.audio_mel_dataset import AudioMelDataset +from examples.melgan.train_melgan import collater + +from tensorflow_tts.configs import ( + ParallelWaveGANGeneratorConfig, + ParallelWaveGANDiscriminatorConfig, +) +from tensorflow_tts.models import ( + TFParallelWaveGANGenerator, + TFParallelWaveGANDiscriminator, +) + +from tensorflow_tts.trainers import GanBasedTrainer +from tensorflow_tts.losses import TFMultiResolutionSTFT +from tensorflow_tts.utils import calculate_2d_loss, calculate_3d_loss, return_strategy + +from tensorflow_addons.optimizers import RectifiedAdam + + +class ParallelWaveganTrainer(GanBasedTrainer): + """ParallelWaveGAN Trainer class based on GanBasedTrainer.""" + + def __init__( + self, + config, + strategy, + steps=0, + epochs=0, + is_generator_mixed_precision=False, + is_discriminator_mixed_precision=False, + ): + """Initialize trainer. + + Args: + steps (int): Initial global steps. + epochs (int): Initial global epochs. + config (dict): Config dict loaded from yaml format configuration file. + is_generator_mixed_precision (bool): Use mixed precision for generator or not. + is_discriminator_mixed_precision (bool): Use mixed precision for discriminator or not. + + """ + super(ParallelWaveganTrainer, self).__init__( + config=config, + steps=steps, + epochs=epochs, + strategy=strategy, + is_generator_mixed_precision=is_generator_mixed_precision, + is_discriminator_mixed_precision=is_discriminator_mixed_precision, + ) + + self.list_metrics_name = [ + "adversarial_loss", + "gen_loss", + "real_loss", + "fake_loss", + "dis_loss", + "spectral_convergence_loss", + "log_magnitude_loss", + ] + + self.init_train_eval_metrics(self.list_metrics_name) + self.reset_states_train() + self.reset_states_eval() + + def compile(self, gen_model, dis_model, gen_optimizer, dis_optimizer): + super().compile(gen_model, dis_model, gen_optimizer, dis_optimizer) + # define loss + self.stft_loss = TFMultiResolutionSTFT(**self.config["stft_loss_params"]) + self.mse_loss = tf.keras.losses.MeanSquaredError( + reduction=tf.keras.losses.Reduction.NONE + ) + self.mae_loss = tf.keras.losses.MeanAbsoluteError( + reduction=tf.keras.losses.Reduction.NONE + ) + + def compute_per_example_generator_losses(self, batch, outputs): + """Compute per example generator losses and return dict_metrics_losses + Note that all element of the loss MUST has a shape [batch_size] and + the keys of dict_metrics_losses MUST be in self.list_metrics_name. + + Args: + batch: dictionary batch input return from dataloader + outputs: outputs of the model + + Returns: + per_example_losses: per example losses for each GPU, shape [B] + dict_metrics_losses: dictionary loss. + """ + dict_metrics_losses = {} + per_example_losses = 0.0 + + audios = batch["audios"] + y_hat = outputs + + # calculate multi-resolution stft loss + sc_loss, mag_loss = calculate_2d_loss( + audios, tf.squeeze(y_hat, -1), self.stft_loss + ) + gen_loss = 0.5 * (sc_loss + mag_loss) + + if self.steps >= self.config["discriminator_train_start_steps"]: + p_hat = self._discriminator(y_hat) + p = self._discriminator(tf.expand_dims(audios, 2)) + adv_loss = 0.0 + adv_loss += calculate_3d_loss( + tf.ones_like(p_hat), p_hat, loss_fn=self.mse_loss + ) + gen_loss += self.config["lambda_adv"] * adv_loss + + # update dict_metrics_losses + dict_metrics_losses.update({"adversarial_loss": adv_loss}) + + dict_metrics_losses.update({"gen_loss": gen_loss}) + dict_metrics_losses.update({"spectral_convergence_loss": sc_loss}) + dict_metrics_losses.update({"log_magnitude_loss": mag_loss}) + + per_example_losses = gen_loss + return per_example_losses, dict_metrics_losses + + def compute_per_example_discriminator_losses(self, batch, gen_outputs): + audios = batch["audios"] + y_hat = gen_outputs + + y = tf.expand_dims(audios, 2) + p = self._discriminator(y) + p_hat = self._discriminator(y_hat) + + real_loss = 0.0 + fake_loss = 0.0 + + real_loss += calculate_3d_loss(tf.ones_like(p), p, loss_fn=self.mse_loss) + fake_loss += calculate_3d_loss( + tf.zeros_like(p_hat), p_hat, loss_fn=self.mse_loss + ) + + dis_loss = real_loss + fake_loss + + # calculate per_example_losses and dict_metrics_losses + per_example_losses = dis_loss + + dict_metrics_losses = { + "real_loss": real_loss, + "fake_loss": fake_loss, + "dis_loss": dis_loss, + } + + return per_example_losses, dict_metrics_losses + + def generate_and_save_intermediate_result(self, batch): + """Generate and save intermediate result.""" + import matplotlib.pyplot as plt + + # generate + y_batch_ = self.one_step_predict(batch) + y_batch = batch["audios"] + utt_ids = batch["utt_ids"] + + # convert to tensor. + # here we just take a sample at first replica. + try: + y_batch_ = y_batch_.values[0].numpy() + y_batch = y_batch.values[0].numpy() + utt_ids = utt_ids.values[0].numpy() + except Exception: + y_batch_ = y_batch_.numpy() + y_batch = y_batch.numpy() + utt_ids = utt_ids.numpy() + + # check directory + dirname = os.path.join(self.config["outdir"], f"predictions/{self.steps}steps") + if not os.path.exists(dirname): + os.makedirs(dirname) + + for idx, (y, y_) in enumerate(zip(y_batch, y_batch_), 0): + # convert to ndarray + y, y_ = tf.reshape(y, [-1]).numpy(), tf.reshape(y_, [-1]).numpy() + + # plit figure and save it + utt_id = utt_ids[idx] + figname = os.path.join(dirname, f"{utt_id}.png") + plt.subplot(2, 1, 1) + plt.plot(y) + plt.title("groundtruth speech") + plt.subplot(2, 1, 2) + plt.plot(y_) + plt.title(f"generated speech @ {self.steps} steps") + plt.tight_layout() + plt.savefig(figname) + plt.close() + + # save as wavefile + y = np.clip(y, -1, 1) + y_ = np.clip(y_, -1, 1) + sf.write( + figname.replace(".png", "_ref.wav"), + y, + self.config["sampling_rate"], + "PCM_16", + ) + sf.write( + figname.replace(".png", "_gen.wav"), + y_, + self.config["sampling_rate"], + "PCM_16", + ) + + +def main(): + """Run training process.""" + parser = argparse.ArgumentParser( + description="Train ParallelWaveGan (See detail in tensorflow_tts/examples/parallel_wavegan/train_parallel_wavegan.py)" + ) + parser.add_argument( + "--train-dir", + default=None, + type=str, + help="directory including training data. ", + ) + parser.add_argument( + "--dev-dir", + default=None, + type=str, + help="directory including development data. ", + ) + parser.add_argument( + "--use-norm", default=1, type=int, help="use norm mels for training or raw." + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save checkpoints." + ) + parser.add_argument( + "--config", type=str, required=True, help="yaml format configuration file." + ) + parser.add_argument( + "--resume", + default="", + type=str, + nargs="?", + help='checkpoint file path to resume training. (default="")', + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + parser.add_argument( + "--generator_mixed_precision", + default=0, + type=int, + help="using mixed precision for generator or not.", + ) + parser.add_argument( + "--discriminator_mixed_precision", + default=0, + type=int, + help="using mixed precision for discriminator or not.", + ) + args = parser.parse_args() + + # return strategy + STRATEGY = return_strategy() + + # set mixed precision config + if args.generator_mixed_precision == 1 or args.discriminator_mixed_precision == 1: + tf.config.optimizer.set_experimental_options({"auto_mixed_precision": True}) + + args.generator_mixed_precision = bool(args.generator_mixed_precision) + args.discriminator_mixed_precision = bool(args.discriminator_mixed_precision) + + args.use_norm = bool(args.use_norm) + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # check arguments + if args.train_dir is None: + raise ValueError("Please specify --train-dir") + if args.dev_dir is None: + raise ValueError("Please specify either --valid-dir") + + # load and save config + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + config["version"] = tensorflow_tts.__version__ + with open(os.path.join(args.outdir, "config.yml"), "w") as f: + yaml.dump(config, f, Dumper=yaml.Dumper) + for key, value in config.items(): + logging.info(f"{key} = {value}") + + # get dataset + if config["remove_short_samples"]: + mel_length_threshold = config["batch_max_steps"] // config[ + "hop_size" + ] + 2 * config["parallel_wavegan_generator_params"].get("aux_context_window", 0) + else: + mel_length_threshold = None + + if config["format"] == "npy": + audio_query = "*-wave.npy" + mel_query = "*-raw-feats.npy" if args.use_norm is False else "*-norm-feats.npy" + audio_load_fn = np.load + mel_load_fn = np.load + else: + raise ValueError("Only npy are supported.") + + # define train/valid dataset + train_dataset = AudioMelDataset( + root_dir=args.train_dir, + audio_query=audio_query, + mel_query=mel_query, + audio_load_fn=audio_load_fn, + mel_load_fn=mel_load_fn, + mel_length_threshold=mel_length_threshold, + ).create( + is_shuffle=config["is_shuffle"], + map_fn=lambda items: collater( + items, + batch_max_steps=tf.constant(config["batch_max_steps"], dtype=tf.int32), + hop_size=tf.constant(config["hop_size"], dtype=tf.int32), + ), + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] + * STRATEGY.num_replicas_in_sync + * config["gradient_accumulation_steps"], + ) + + valid_dataset = AudioMelDataset( + root_dir=args.dev_dir, + audio_query=audio_query, + mel_query=mel_query, + audio_load_fn=audio_load_fn, + mel_load_fn=mel_load_fn, + mel_length_threshold=mel_length_threshold, + ).create( + is_shuffle=config["is_shuffle"], + map_fn=lambda items: collater( + items, + batch_max_steps=tf.constant( + config["batch_max_steps_valid"], dtype=tf.int32 + ), + hop_size=tf.constant(config["hop_size"], dtype=tf.int32), + ), + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] * STRATEGY.num_replicas_in_sync, + ) + + # define trainer + trainer = ParallelWaveganTrainer( + steps=0, + epochs=0, + config=config, + strategy=STRATEGY, + is_generator_mixed_precision=args.generator_mixed_precision, + is_discriminator_mixed_precision=args.discriminator_mixed_precision, + ) + + with STRATEGY.scope(): + # define generator and discriminator + generator = TFParallelWaveGANGenerator( + ParallelWaveGANGeneratorConfig( + **config["parallel_wavegan_generator_params"] + ), + name="parallel_wavegan_generator", + ) + + discriminator = TFParallelWaveGANDiscriminator( + ParallelWaveGANDiscriminatorConfig( + **config["parallel_wavegan_discriminator_params"] + ), + name="parallel_wavegan_discriminator", + ) + + # dummy input to build model. + fake_mels = tf.random.uniform(shape=[1, 100, 80], dtype=tf.float32) + y_hat = generator(fake_mels) + discriminator(y_hat) + + generator.summary() + discriminator.summary() + + # define optimizer + generator_lr_fn = getattr( + tf.keras.optimizers.schedules, config["generator_optimizer_params"]["lr_fn"] + )(**config["generator_optimizer_params"]["lr_params"]) + discriminator_lr_fn = getattr( + tf.keras.optimizers.schedules, + config["discriminator_optimizer_params"]["lr_fn"], + )(**config["discriminator_optimizer_params"]["lr_params"]) + + gen_optimizer = RectifiedAdam(learning_rate=generator_lr_fn, amsgrad=False) + dis_optimizer = RectifiedAdam(learning_rate=discriminator_lr_fn, amsgrad=False) + + trainer.compile( + gen_model=generator, + dis_model=discriminator, + gen_optimizer=gen_optimizer, + dis_optimizer=dis_optimizer, + ) + + # start training + try: + trainer.fit( + train_dataset, + valid_dataset, + saved_path=os.path.join(config["outdir"], "checkpoints/"), + resume=args.resume, + ) + except KeyboardInterrupt: + trainer.save_checkpoint() + logging.info(f"Successfully saved checkpoint @ {trainer.steps}steps.") + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/tacotron2/README.md b/TensorFlowTTS/examples/tacotron2/README.md new file mode 100644 index 0000000000000000000000000000000000000000..47a2dfcf263c69ae993e8d5cbb93f95a3d5e1061 --- /dev/null +++ b/TensorFlowTTS/examples/tacotron2/README.md @@ -0,0 +1,141 @@ +# Tacotron 2 +Based on the script [`train_tacotron2.py`](https://github.com/dathudeptrai/TensorflowTTS/blob/master/examples/tacotron2/train_tacotron2.py). + +## Training Tacotron-2 from scratch with LJSpeech dataset. +This example code show you how to train Tactron-2 from scratch with Tensorflow 2 based on custom training loop and tf.function. The data used for this example is LJSpeech, you can download the dataset at [link](https://keithito.com/LJ-Speech-Dataset/). + +### Step 1: Create Tensorflow based Dataloader (tf.dataset) +First, you need define data loader based on AbstractDataset class (see [`abstract_dataset.py`](https://github.com/dathudeptrai/TensorflowTTS/blob/master/tensorflow_tts/datasets/abstract_dataset.py)). On this example, a dataloader read dataset from path. I use suffix to classify what file is a charactor and mel-spectrogram (see [`tacotron_dataset.py`](https://github.com/dathudeptrai/TensorflowTTS/blob/master/examples/tacotron2/tacotron_dataset.py)). If you already have preprocessed version of your target dataset, you don't need to use this example dataloader, you just need refer my dataloader and modify **generator function** to adapt with your case. Normally, a generator function should return [charactor_ids, char_length, mel, mel_length], here i also return guided attention (see [`DC_TTS`](https://arxiv.org/pdf/1710.08969.pdf)) to support training. + +### Step 2: Training from scratch +After you redefine your dataloader, pls modify an input arguments, train_dataset and valid_dataset from [`train_tacotron2.py`](https://github.com/dathudeptrai/TensorflowTTS/blob/master/examples/tacotron2/train_tacotron2.py). Here is an example command line to training tacotron-2 from scratch: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/tacotron2/train_tacotron2.py \ + --train-dir ./dump/train/ \ + --dev-dir ./dump/valid/ \ + --outdir ./examples/tacotron2/exp/train.tacotron2.v1/ \ + --config ./examples/tacotron2/conf/tacotron2.v1.yaml \ + --use-norm 1 \ + --mixed_precision 0 \ + --resume "" +``` + +IF you want to use MultiGPU to training you can replace `CUDA_VISIBLE_DEVICES=0` by `CUDA_VISIBLE_DEVICES=0,1,2,3` for example. You also need to tune the `batch_size` for each GPU (in config file) by yourself to maximize the performance. Note that MultiGPU now support for Training but not yet support for Decode. + +In case you want to resume the training progress, please following below example command line: + +```bash +--resume ./examples/tacotron2/exp/train.tacotron2.v1/checkpoints/ckpt-100000 +``` + +If you want to finetune a model, use `--pretrained` like this with your model filename +```bash +--pretrained pretrained.h5 +``` + +### Step 3: Decode mel-spectrogram from folder ids +To running inference on folder ids (charactor), run below command line: + +```bash +CUDA_VISIBLE_DEVICES=0 python examples/tacotron2/decode_tacotron2.py \ + --rootdir ./dump/valid/ \ + --outdir ./prediction/tacotron2-120k/ \ + --checkpoint ./examples/tacotron2/exp/train.tacotron2.v1/checkpoints/model-120000.h5 \ + --config ./examples/tacotron2/conf/tacotron2.v1.yaml \ + --batch-size 32 +``` + +### Step 4: Extract duration from alignments for FastSpeech +You may need to extract durations for student models like fastspeech. Here we use teacher forcing with window masking trick to extract durations from alignment maps: + +Extract for valid set: +```bash +CUDA_VISIBLE_DEVICES=0 python examples/tacotron2/extract_duration.py \ + --rootdir ./dump/valid/ \ + --outdir ./dump/valid/durations/ \ + --checkpoint ./examples/tacotron2/exp/train.tacotron2.v1/checkpoints/model-65000.h5 \ + --use-norm 1 \ + --config ./examples/tacotron2/conf/tacotron2.v1.yaml \ + --batch-size 32 + --win-front 3 \ + --win-back 3 +``` + +Extract for training set: +```bash +CUDA_VISIBLE_DEVICES=0 python examples/tacotron2/extract_duration.py \ + --rootdir ./dump/train/ \ + --outdir ./dump/train/durations/ \ + --checkpoint ./examples/tacotron2/exp/train.tacotron2.v1/checkpoints/model-65000.h5 \ + --use-norm 1 \ + --config ./examples/tacotron2/conf/tacotron2.v1.yaml \ + --batch-size 32 + --win-front 3 \ + --win-back 3 +``` + +To extract postnets for training vocoder, follow above steps but with `extract_postnets.py` + +You also can download my extracted durations at 40k steps at [link](https://drive.google.com/drive/u/1/folders/1kaPXRdLg9gZrll9KtvH3-feOBMM8sn3_?usp=drive_open). + +## Finetune Tacotron-2 with ljspeech pretrained on other languages +Here is an example show you how to use pretrained ljspeech to training with other languages. This does not guarantee a better model or faster convergence in all cases but it will improve if there is a correlation between target language and pretrained language. The only thing you need to do before finetune on other languages is re-define embedding layers. You can do it by following code: + +```bash +tacotron_config = Tacotron2Config(**config["tacotron2_params"]) +tacotron_config.vocab_size = NEW_VOCAB_SIZE +tacotron2 = TFTacotron2(config=tacotron_config, training=True, name='tacotron2') +tacotron2._build() +tacotron2.summary() +tacotron2.load_weights("./examples/tacotron2/exp/train.tacotron2.v1/checkpoints/model-120000.h5", by_name=True, skip_mismatch=True) +... # training as normal. +``` +You can also define `var_train_expr` in config file to let model training only on some layers in case you want to fine-tune on your dataset with the same pretrained language and processor. For example, `var_train_expr: "embeddings|encoder|decoder"` means we just training all variables that `embeddings`, `encoder`, `decoder` exist in its name. + +## Using Forced Alignment Guided Attention Loss + +Instead of regular guided attention loss you can opt for [Forced Alignment Guided Attention Loss](https://docs.google.com/document/d/1TMH0klOWzlH4Up_GFT2cR4zB0JehAu1pe9zOemZPk7Y/edit#) (FAL), which uses prealignment information from Montreal Forced Aligner to more accurately guide each utterance. This especially helps on harder datasets, like those with long silences. +First see `examples/mfa_extraction`, and once you have extracted durations, run `export_align.py`, like this. + + python examples/tacotron2/export_align.py --dump-dir dump --looseness 3.5 + +You can experiment with different `looseness` values for stricter (lower) or more tolerant masks. **Note that this script assumes you are using r = 1** +After that, simply pass the argument `--fal 1` to the train_tacotron2.py script afterwards. + + +## Results +Here is a result of tacotron2 based on this config [`tacotron2.v1.yaml`](https://github.com/dathudeptrai/TensorflowTTS/blob/tacotron-2-example/examples/tacotron-2/conf/tacotron2.v1.yaml) but with reduction_factor = 7, we will update learning curves for reduction_factor = 1. + +### Alignments progress + + +### Learning curves + + +## Some important notes + +* This implementation use guided attention by default to help a model learn diagonal alignment faster. After 15-20k, you can disble alignment loss. +* Relu activation function is still a best compared with mish and others. +* Support window masking for inference, solve problem with very long sentences. +* The model convergence at around 100k steps. +* Scheduled teacher forcing is supported but training with teacher forcing give a best performance based on my experiments. You need to be aware of the importance of applying high dropout for prenet (both training and inference), this will reduce the effect of prev mel, so in an inference stage, a noise of prev mel prediction won't affect too much to a current decoder. +* If an amplitude levels of synthesis audio is lower compared to original speech, you may need multiply mel predicted to global gain constant (eg 1.2). +* Apply input_signature for tacotron make training slower, don't know why, so only use experimental_relax_shapes = True. +* It's funny but training with fixed char_len (200) and mel_len (870) is 2x faster than dynamic shape even it's redundant. But i'm not sure because there is a man report that dynamic shape is faster, pls refer [comment](https://github.com/dathudeptrai/TensorflowTTS/issues/34#issuecomment-642309118), you may need to try both **use_fixed_shapes** is True and False to check by yourself 😅. + +## Pretrained Models and Audio samples +| Model | Conf | Lang | Fs [Hz] | Mel range [Hz] | FFT / Hop / Win [pt] | # iters | reduction factor| +| :------ | :---: | :---: | :----: | :--------: | :---------------: | :-----: | :-----: | +| [tacotron2.v1](https://drive.google.com/open?id=1kaPXRdLg9gZrll9KtvH3-feOBMM8sn3_) | [link](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/tacotron2/conf/tacotron2.v1.yaml) | EN | 22.05k | 80-7600 | 1024 / 256 / None | 65K | 1 +| [tacotron2.v1](https://drive.google.com/drive/folders/1WMBe01BBnYf3sOxMhbvnF2CUHaRTpBXJ?usp=sharing) | [link](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/tacotron2/conf/tacotron2.kss.v1.yaml) | KO | 22.05k | 80-7600 | 1024 / 256 / None | 100K | 1 +| [tacotron2.lju.v1](https://drive.google.com/drive/folders/1tOMzik_Nr4eY63gooKYSmNTJyXC6Pp55?usp=sharing) | [link](https://github.com/tensorspeech/TensorFlowTTS/tree/master/examples/tacotron2/conf/tacotron2.lju.v1.yaml) | EN | 44.1k | 20-11025 | 2048 / 512 / None | 126K | 1 + +## Reference + +1. https://github.com/Rayhane-mamah/Tacotron-2 +2. https://github.com/mozilla/TTS +3. https://github.com/tensorflow/addons +4. https://github.com/espnet/espnet +5. [Natural TTS Synthesis by Conditioning WaveNet on Mel Spectrogram Predictions](https://arxiv.org/abs/1712.05884) +6. [Generating Sequences With Recurrent Neural Networks](https://arxiv.org/abs/1308.0850) \ No newline at end of file diff --git a/TensorFlowTTS/examples/tacotron2/conf/tacotron2.baker.v1.yaml b/TensorFlowTTS/examples/tacotron2/conf/tacotron2.baker.v1.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6f069060a48483667c2b19893b4f274e203fc0c4 --- /dev/null +++ b/TensorFlowTTS/examples/tacotron2/conf/tacotron2.baker.v1.yaml @@ -0,0 +1,86 @@ +# This is the hyperparameter configuration file for Tacotron2 v1. +# Please make sure this is adjusted for the Baker dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 200k iters but 65k iters is enough to get a good models. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +hop_size: 256 # Hop size. +format: "npy" + + +########################################################### +# NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "tacotron2" + +tacotron2_params: + dataset: baker + embedding_hidden_size: 512 + initializer_range: 0.5 + embedding_dropout_prob: 0.1 + n_speakers: 1 + n_conv_encoder: 5 + encoder_conv_filters: 512 + encoder_conv_kernel_sizes: 5 + encoder_conv_activation: 'relu' + encoder_conv_dropout_rate: 0.5 + encoder_lstm_units: 256 + n_prenet_layers: 2 + prenet_units: 256 + prenet_activation: 'relu' + prenet_dropout_rate: 0.5 + n_lstm_decoder: 1 + reduction_factor: 2 + decoder_lstm_units: 1024 + attention_dim: 128 + attention_filters: 32 + attention_kernel: 31 + n_mels: 80 + n_conv_postnet: 5 + postnet_conv_filters: 512 + postnet_conv_kernel_sizes: 5 + postnet_dropout_rate: 0.1 + attention_type: "lsa" + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 32 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +mel_length_threshold: 32 # remove all targets has mel_length <= 32 +is_shuffle: true # shuffle dataset after each epoch. +use_fixed_shapes: true # use_fixed_shapes for training (2x speed-up) + # refer (https://github.com/tensorspeech/TensorflowTTS/issues/34#issuecomment-642309118) + +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +optimizer_params: + initial_learning_rate: 0.001 + end_learning_rate: 0.00001 + decay_steps: 150000 # < train_max_steps is recommend. + warmup_proportion: 0.02 + weight_decay: 0.001 + +gradient_accumulation_steps: 1 +var_train_expr: null # trainable variable expr (eg. 'embeddings|decoder_cell' ) + # must separate by |. if var_train_expr is null then we + # training all variable +########################################################### +# INTERVAL SETTING # +########################################################### +train_max_steps: 200000 # Number of training steps. +save_interval_steps: 5000 # Interval steps to save checkpoint. +eval_interval_steps: 500 # Interval steps to evaluate the network. +log_interval_steps: 100 # Interval steps to record the training log. +start_schedule_teacher_forcing: 200001 # don't need to apply schedule teacher forcing. +start_ratio_value: 0.5 # start ratio of scheduled teacher forcing. +schedule_decay_steps: 50000 # decay step scheduled teacher forcing. +end_ratio_value: 0.0 # end ratio of scheduled teacher forcing. +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of results to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/tacotron2/conf/tacotron2.jsut.v1.yaml b/TensorFlowTTS/examples/tacotron2/conf/tacotron2.jsut.v1.yaml new file mode 100644 index 0000000000000000000000000000000000000000..68b40635d85692938f9df4153dd71a6720d9bceb --- /dev/null +++ b/TensorFlowTTS/examples/tacotron2/conf/tacotron2.jsut.v1.yaml @@ -0,0 +1,86 @@ +# This is the hyperparameter configuration file for Tacotron2 v1. +# Please make sure this is adjusted for the Baker dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 200k iters but 65k iters is enough to get a good models. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +hop_size: 300 # Hop size. +format: "npy" + + +########################################################### +# NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "tacotron2" + +tacotron2_params: + dataset: jsut + embedding_hidden_size: 512 + initializer_range: 0.5 + embedding_dropout_prob: 0.1 + n_speakers: 1 + n_conv_encoder: 5 + encoder_conv_filters: 512 + encoder_conv_kernel_sizes: 5 + encoder_conv_activation: 'relu' + encoder_conv_dropout_rate: 0.5 + encoder_lstm_units: 256 + n_prenet_layers: 2 + prenet_units: 256 + prenet_activation: 'relu' + prenet_dropout_rate: 0.5 + n_lstm_decoder: 1 + reduction_factor: 2 + decoder_lstm_units: 1024 + attention_dim: 128 + attention_filters: 32 + attention_kernel: 31 + n_mels: 80 + n_conv_postnet: 5 + postnet_conv_filters: 512 + postnet_conv_kernel_sizes: 5 + postnet_dropout_rate: 0.1 + attention_type: "lsa" + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 32 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +mel_length_threshold: 32 # remove all targets has mel_length <= 32 +is_shuffle: true # shuffle dataset after each epoch. +use_fixed_shapes: true # use_fixed_shapes for training (2x speed-up) + # refer (https://github.com/tensorspeech/TensorflowTTS/issues/34#issuecomment-642309118) + +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +optimizer_params: + initial_learning_rate: 0.001 + end_learning_rate: 0.00001 + decay_steps: 150000 # < train_max_steps is recommend. + warmup_proportion: 0.02 + weight_decay: 0.001 + +gradient_accumulation_steps: 1 +var_train_expr: null # trainable variable expr (eg. 'embeddings|decoder_cell' ) + # must separate by |. if var_train_expr is null then we + # training all variable +########################################################### +# INTERVAL SETTING # +########################################################### +train_max_steps: 200000 # Number of training steps. +save_interval_steps: 5000 # Interval steps to save checkpoint. +eval_interval_steps: 500 # Interval steps to evaluate the network. +log_interval_steps: 100 # Interval steps to record the training log. +start_schedule_teacher_forcing: 200001 # don't need to apply schedule teacher forcing. +start_ratio_value: 0.5 # start ratio of scheduled teacher forcing. +schedule_decay_steps: 50000 # decay step scheduled teacher forcing. +end_ratio_value: 0.0 # end ratio of scheduled teacher forcing. +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of results to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/tacotron2/conf/tacotron2.kss.v1.yaml b/TensorFlowTTS/examples/tacotron2/conf/tacotron2.kss.v1.yaml new file mode 100644 index 0000000000000000000000000000000000000000..dc553fbe5daea8e8fe5b3354c53c82facfcddf1b --- /dev/null +++ b/TensorFlowTTS/examples/tacotron2/conf/tacotron2.kss.v1.yaml @@ -0,0 +1,86 @@ +# This is the hyperparameter configuration file for Tacotron2 v1. +# Please make sure this is adjusted for the KSS dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 200k iters but 65k iters is enough to get a good models. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +hop_size: 256 # Hop size. +format: "npy" + + +########################################################### +# NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "tacotron2" + +tacotron2_params: + dataset: "kss" + embedding_hidden_size: 512 + initializer_range: 0.02 + embedding_dropout_prob: 0.1 + n_speakers: 1 + n_conv_encoder: 5 + encoder_conv_filters: 512 + encoder_conv_kernel_sizes: 5 + encoder_conv_activation: 'relu' + encoder_conv_dropout_rate: 0.5 + encoder_lstm_units: 256 + n_prenet_layers: 2 + prenet_units: 256 + prenet_activation: 'relu' + prenet_dropout_rate: 0.5 + n_lstm_decoder: 1 + reduction_factor: 1 + decoder_lstm_units: 1024 + attention_dim: 128 + attention_filters: 32 + attention_kernel: 31 + n_mels: 80 + n_conv_postnet: 5 + postnet_conv_filters: 512 + postnet_conv_kernel_sizes: 5 + postnet_dropout_rate: 0.1 + attention_type: "lsa" + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 32 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +mel_length_threshold: 32 # remove all targets has mel_length <= 32 +is_shuffle: true # shuffle dataset after each epoch. +use_fixed_shapes: true # use_fixed_shapes for training (2x speed-up) + # refer (https://github.com/dathudeptrai/TensorflowTTS/issues/34#issuecomment-642309118) + +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +optimizer_params: + initial_learning_rate: 0.001 + end_learning_rate: 0.00001 + decay_steps: 150000 # < train_max_steps is recommend. + warmup_proportion: 0.02 + weight_decay: 0.001 + +gradient_accumulation_steps: 1 +var_train_expr: null # trainable variable expr (eg. 'embeddings|decoder_cell' ) + # must separate by |. if var_train_expr is null then we + # training all variable +########################################################### +# INTERVAL SETTING # +########################################################### +train_max_steps: 200000 # Number of training steps. +save_interval_steps: 2000 # Interval steps to save checkpoint. +eval_interval_steps: 500 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. +start_schedule_teacher_forcing: 200001 # don't need to apply schedule teacher forcing. +start_ratio_value: 0.5 # start ratio of scheduled teacher forcing. +schedule_decay_steps: 50000 # decay step scheduled teacher forcing. +end_ratio_value: 0.0 # end ratio of scheduled teacher forcing. +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of results to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/tacotron2/conf/tacotron2.lju.v1.yaml b/TensorFlowTTS/examples/tacotron2/conf/tacotron2.lju.v1.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7d4259315087c2e57683fd3a33aa2b8a67a0a43f --- /dev/null +++ b/TensorFlowTTS/examples/tacotron2/conf/tacotron2.lju.v1.yaml @@ -0,0 +1,87 @@ +# This is the hyperparameter configuration file for Tacotron2 v1. +# Please make sure this is adjusted for the LJSpeech dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 200k iters but 65k iters is enough to get a good models. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +hop_size: 512 # Hop size. +format: "npy" + + +########################################################### +# NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "tacotron2" + +tacotron2_params: + dataset: ljspeechu + embedding_hidden_size: 512 + initializer_range: 0.02 + embedding_dropout_prob: 0.1 + n_speakers: 1 + n_conv_encoder: 5 + encoder_conv_filters: 512 + encoder_conv_kernel_sizes: 5 + encoder_conv_activation: 'relu' + encoder_conv_dropout_rate: 0.5 + encoder_lstm_units: 256 + n_prenet_layers: 2 + prenet_units: 256 + prenet_activation: 'relu' + prenet_dropout_rate: 0.5 + n_lstm_decoder: 1 + reduction_factor: 1 + decoder_lstm_units: 1024 + attention_dim: 128 + attention_filters: 32 + attention_kernel: 31 + n_mels: 80 + n_conv_postnet: 5 + postnet_conv_filters: 512 + postnet_conv_kernel_sizes: 5 + postnet_dropout_rate: 0.1 + attention_type: "lsa" + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 32 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +mel_length_threshold: 32 # remove all targets has mel_length <= 32 +is_shuffle: true # shuffle dataset after each epoch. +use_fixed_shapes: true # use_fixed_shapes for training (2x speed-up) + + # refer (https://github.com/dathudeptrai/TensorflowTTS/issues/34#issuecomment-642309118) + +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +optimizer_params: + initial_learning_rate: 0.001 + end_learning_rate: 0.00001 + decay_steps: 150000 # < train_max_steps is recommend. + warmup_proportion: 0.02 + weight_decay: 0.001 + +gradient_accumulation_steps: 1 +var_train_expr: null # trainable variable expr (eg. 'embeddings|decoder_cell' ) + # must separate by |. if var_train_expr is null then we + # training all variables. +########################################################### +# INTERVAL SETTING # +########################################################### +train_max_steps: 200000 # Number of training steps. +save_interval_steps: 2000 # Interval steps to save checkpoint. +eval_interval_steps: 500 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. +start_schedule_teacher_forcing: 200001 # don't need to apply schedule teacher forcing. +start_ratio_value: 0.5 # start ratio of scheduled teacher forcing. +schedule_decay_steps: 50000 # decay step scheduled teacher forcing. +end_ratio_value: 0.0 # end ratio of scheduled teacher forcing. +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of results to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/tacotron2/conf/tacotron2.synpaflex.v1.yaml b/TensorFlowTTS/examples/tacotron2/conf/tacotron2.synpaflex.v1.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a0e7c56790031db591287934aee849752e978965 --- /dev/null +++ b/TensorFlowTTS/examples/tacotron2/conf/tacotron2.synpaflex.v1.yaml @@ -0,0 +1,86 @@ +# This is the hyperparameter configuration file for Tacotron2 v1. +# Please make sure this is adjusted for the LJSpeech dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 200k iters but 65k iters is enough to get a good models. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +hop_size: 256 # Hop size. +format: "npy" + + +########################################################### +# NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "tacotron2" + +tacotron2_params: + dataset: synpaflex + embedding_hidden_size: 512 + initializer_range: 0.02 + embedding_dropout_prob: 0.1 + n_speakers: 1 + n_conv_encoder: 5 + encoder_conv_filters: 512 + encoder_conv_kernel_sizes: 5 + encoder_conv_activation: 'relu' + encoder_conv_dropout_rate: 0.5 + encoder_lstm_units: 256 + n_prenet_layers: 2 + prenet_units: 256 + prenet_activation: 'relu' + prenet_dropout_rate: 0.5 + n_lstm_decoder: 1 + reduction_factor: 1 + decoder_lstm_units: 1024 + attention_dim: 128 + attention_filters: 32 + attention_kernel: 31 + n_mels: 80 + n_conv_postnet: 5 + postnet_conv_filters: 512 + postnet_conv_kernel_sizes: 5 + postnet_dropout_rate: 0.1 + attention_type: "lsa" + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 32 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +mel_length_threshold: 32 # remove all targets has mel_length <= 32 +is_shuffle: true # shuffle dataset after each epoch. +use_fixed_shapes: true # use_fixed_shapes for training (2x speed-up) + # refer (https://github.com/dathudeptrai/TensorflowTTS/issues/34#issuecomment-642309118) + +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +optimizer_params: + initial_learning_rate: 0.001 + end_learning_rate: 0.00001 + decay_steps: 150000 # < train_max_steps is recommend. + warmup_proportion: 0.02 + weight_decay: 0.001 + +gradient_accumulation_steps: 1 +var_train_expr: null # trainable variable expr (eg. 'embeddings|decoder_cell' ) + # must separate by |. if var_train_expr is null then we + # training all variables. +########################################################### +# INTERVAL SETTING # +########################################################### +train_max_steps: 200000 # Number of training steps. +save_interval_steps: 2000 # Interval steps to save checkpoint. +eval_interval_steps: 500 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. +start_schedule_teacher_forcing: 200001 # don't need to apply schedule teacher forcing. +start_ratio_value: 0.5 # start ratio of scheduled teacher forcing. +schedule_decay_steps: 50000 # decay step scheduled teacher forcing. +end_ratio_value: 0.0 # end ratio of scheduled teacher forcing. +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of results to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/tacotron2/conf/tacotron2.v1.yaml b/TensorFlowTTS/examples/tacotron2/conf/tacotron2.v1.yaml new file mode 100644 index 0000000000000000000000000000000000000000..862b9c904e7611de0c72d2a730c9c3a3b0bfa1ff --- /dev/null +++ b/TensorFlowTTS/examples/tacotron2/conf/tacotron2.v1.yaml @@ -0,0 +1,86 @@ +# This is the hyperparameter configuration file for Tacotron2 v1. +# Please make sure this is adjusted for the LJSpeech dataset. If you want to +# apply to the other dataset, you might need to carefully change some parameters. +# This configuration performs 200k iters but 65k iters is enough to get a good models. + +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +hop_size: 256 # Hop size. +format: "npy" + + +########################################################### +# NETWORK ARCHITECTURE SETTING # +########################################################### +model_type: "tacotron2" + +tacotron2_params: + dataset: ljspeech + embedding_hidden_size: 512 + initializer_range: 0.02 + embedding_dropout_prob: 0.1 + n_speakers: 1 + n_conv_encoder: 5 + encoder_conv_filters: 512 + encoder_conv_kernel_sizes: 5 + encoder_conv_activation: 'relu' + encoder_conv_dropout_rate: 0.5 + encoder_lstm_units: 256 + n_prenet_layers: 2 + prenet_units: 256 + prenet_activation: 'relu' + prenet_dropout_rate: 0.5 + n_lstm_decoder: 1 + reduction_factor: 1 + decoder_lstm_units: 1024 + attention_dim: 128 + attention_filters: 32 + attention_kernel: 31 + n_mels: 80 + n_conv_postnet: 5 + postnet_conv_filters: 512 + postnet_conv_kernel_sizes: 5 + postnet_dropout_rate: 0.1 + attention_type: "lsa" + +########################################################### +# DATA LOADER SETTING # +########################################################### +batch_size: 32 # Batch size for each GPU with assuming that gradient_accumulation_steps == 1. +remove_short_samples: true # Whether to remove samples the length of which are less than batch_max_steps. +allow_cache: true # Whether to allow cache in dataset. If true, it requires cpu memory. +mel_length_threshold: 32 # remove all targets has mel_length <= 32 +is_shuffle: true # shuffle dataset after each epoch. +use_fixed_shapes: true # use_fixed_shapes for training (2x speed-up) + # refer (https://github.com/dathudeptrai/TensorflowTTS/issues/34#issuecomment-642309118) + +########################################################### +# OPTIMIZER & SCHEDULER SETTING # +########################################################### +optimizer_params: + initial_learning_rate: 0.001 + end_learning_rate: 0.00001 + decay_steps: 150000 # < train_max_steps is recommend. + warmup_proportion: 0.02 + weight_decay: 0.001 + +gradient_accumulation_steps: 1 +var_train_expr: null # trainable variable expr (eg. 'embeddings|decoder_cell' ) + # must separate by |. if var_train_expr is null then we + # training all variables. +########################################################### +# INTERVAL SETTING # +########################################################### +train_max_steps: 200000 # Number of training steps. +save_interval_steps: 2000 # Interval steps to save checkpoint. +eval_interval_steps: 500 # Interval steps to evaluate the network. +log_interval_steps: 200 # Interval steps to record the training log. +start_schedule_teacher_forcing: 200001 # don't need to apply schedule teacher forcing. +start_ratio_value: 0.5 # start ratio of scheduled teacher forcing. +schedule_decay_steps: 50000 # decay step scheduled teacher forcing. +end_ratio_value: 0.0 # end ratio of scheduled teacher forcing. +########################################################### +# OTHER SETTING # +########################################################### +num_save_intermediate_results: 1 # Number of results to be saved as intermediate results. diff --git a/TensorFlowTTS/examples/tacotron2/decode_tacotron2.py b/TensorFlowTTS/examples/tacotron2/decode_tacotron2.py new file mode 100644 index 0000000000000000000000000000000000000000..089d7fc64d530a57ad16c9d3a94d1ec243c2acd2 --- /dev/null +++ b/TensorFlowTTS/examples/tacotron2/decode_tacotron2.py @@ -0,0 +1,171 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Decode Tacotron-2.""" + +import argparse +import logging +import os +import sys + +sys.path.append(".") + +import numpy as np +import tensorflow as tf +import yaml +from tqdm import tqdm +import matplotlib.pyplot as plt + +from examples.tacotron2.tacotron_dataset import CharactorMelDataset +from tensorflow_tts.configs import Tacotron2Config +from tensorflow_tts.models import TFTacotron2 + + +def main(): + """Running decode tacotron-2 mel-spectrogram.""" + parser = argparse.ArgumentParser( + description="Decode mel-spectrogram from folder ids with trained Tacotron-2 " + "(See detail in tensorflow_tts/example/tacotron2/decode_tacotron2.py)." + ) + parser.add_argument( + "--rootdir", + default=None, + type=str, + required=True, + help="directory including ids/durations files.", + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save generated speech." + ) + parser.add_argument( + "--checkpoint", type=str, required=True, help="checkpoint file to be loaded." + ) + parser.add_argument( + "--use-norm", default=1, type=int, help="usr norm-mels for train or raw." + ) + parser.add_argument("--batch-size", default=8, type=int, help="batch size.") + parser.add_argument("--win-front", default=3, type=int, help="win-front.") + parser.add_argument("--win-back", default=3, type=int, help="win-front.") + parser.add_argument( + "--config", + default=None, + type=str, + required=True, + help="yaml format configuration file. if not explicitly provided, " + "it will be searched in the checkpoint directory. (default=None)", + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + args = parser.parse_args() + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # load config + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + + if config["format"] == "npy": + char_query = "*-ids.npy" + mel_query = "*-raw-feats.npy" if args.use_norm is False else "*-norm-feats.npy" + char_load_fn = np.load + mel_load_fn = np.load + else: + raise ValueError("Only npy is supported.") + + # define data-loader + dataset = CharactorMelDataset( + dataset=config["tacotron2_params"]["dataset"], + root_dir=args.rootdir, + charactor_query=char_query, + mel_query=mel_query, + charactor_load_fn=char_load_fn, + mel_load_fn=mel_load_fn, + reduction_factor=config["tacotron2_params"]["reduction_factor"] + ) + dataset = dataset.create(allow_cache=True, batch_size=args.batch_size) + + # define model and load checkpoint + tacotron2 = TFTacotron2( + config=Tacotron2Config(**config["tacotron2_params"]), + name="tacotron2", + ) + tacotron2._build() # build model to be able load_weights. + tacotron2.load_weights(args.checkpoint) + + # setup window + tacotron2.setup_window(win_front=args.win_front, win_back=args.win_back) + + for data in tqdm(dataset, desc="[Decoding]"): + utt_ids = data["utt_ids"] + utt_ids = utt_ids.numpy() + + # tacotron2 inference. + ( + mel_outputs, + post_mel_outputs, + stop_outputs, + alignment_historys, + ) = tacotron2.inference( + input_ids=data["input_ids"], + input_lengths=data["input_lengths"], + speaker_ids=data["speaker_ids"], + ) + + # convert to numpy + post_mel_outputs = post_mel_outputs.numpy() + + for i, post_mel_output in enumerate(post_mel_outputs): + stop_token = tf.math.round(tf.nn.sigmoid(stop_outputs[i])) # [T] + real_length = tf.math.reduce_sum( + tf.cast(tf.math.equal(stop_token, 0.0), tf.int32), -1 + ) + post_mel_output = post_mel_output[:real_length, :] + + saved_name = utt_ids[i].decode("utf-8") + + # save D to folder. + np.save( + os.path.join(args.outdir, f"{saved_name}-norm-feats.npy"), + post_mel_output.astype(np.float32), + allow_pickle=False, + ) + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/tacotron2/export_align.py b/TensorFlowTTS/examples/tacotron2/export_align.py new file mode 100644 index 0000000000000000000000000000000000000000..3143ea1c70963cec327f702c53144367b98d866d --- /dev/null +++ b/TensorFlowTTS/examples/tacotron2/export_align.py @@ -0,0 +1,168 @@ +import os +import shutil +from tqdm import tqdm +import argparse + +from scipy.ndimage import zoom +from skimage.data import camera +import numpy as np +from scipy.spatial.distance import cdist + + +def safemkdir(dirn): + if not os.path.isdir(dirn): + os.mkdir(dirn) + + +from pathlib import Path + + +def duration_to_alignment(in_duration): + total_len = np.sum(in_duration) + num_chars = len(in_duration) + + attention = np.zeros(shape=(num_chars, total_len), dtype=np.float32) + y_offset = 0 + + for duration_idx, duration_val in enumerate(in_duration): + for y_val in range(0, duration_val): + attention[duration_idx][y_offset + y_val] = 1.0 + + y_offset += duration_val + + return attention + + +def rescale_alignment(in_alignment, in_targcharlen): + current_x = in_alignment.shape[0] + x_ratio = in_targcharlen / current_x + pivot_points = [] + + zoomed = zoom(in_alignment, (x_ratio, 1.0), mode="nearest") + + for x_v in range(0, zoomed.shape[0]): + for y_v in range(0, zoomed.shape[1]): + val = zoomed[x_v][y_v] + if val < 0.5: + val = 0.0 + else: + val = 1.0 + pivot_points.append((x_v, y_v)) + + zoomed[x_v][y_v] = val + + if zoomed.shape[0] != in_targcharlen: + print("Zooming didn't rshape well, explicitly reshaping") + zoomed.resize((in_targcharlen, in_alignment.shape[1])) + + return zoomed, pivot_points + + +def gather_dist(in_mtr, in_points): + # initialize with known size for fast + full_coords = [(0, 0) for x in range(in_mtr.shape[0] * in_mtr.shape[1])] + i = 0 + for x in range(0, in_mtr.shape[0]): + for y in range(0, in_mtr.shape[1]): + full_coords[i] = (x, y) + i += 1 + + return cdist(full_coords, in_points, "euclidean") + + +def create_guided(in_align, in_pvt, looseness): + new_att = np.ones(in_align.shape, dtype=np.float32) + # It is dramatically faster that we first gather all the points and calculate than do it manually + # for each point in for loop + dist_arr = gather_dist(in_align, in_pvt) + # Scale looseness based on attention size. (addition works better than mul). Also divide by 100 + # because having user input 3.35 is nicer + real_loose = (looseness / 100) * (new_att.shape[0] + new_att.shape[1]) + g_idx = 0 + for x in range(0, new_att.shape[0]): + for y in range(0, new_att.shape[1]): + min_point_idx = dist_arr[g_idx].argmin() + + closest_pvt = in_pvt[min_point_idx] + distance = dist_arr[g_idx][min_point_idx] / real_loose + distance = np.power(distance, 2) + + g_idx += 1 + + new_att[x, y] = distance + + return np.clip(new_att, 0.0, 1.0) + + +def get_pivot_points(in_att): + ret_points = [] + for x in range(0, in_att.shape[0]): + for y in range(0, in_att.shape[1]): + if in_att[x, y] > 0.8: + ret_points.append((x, y)) + return ret_points + + +def main(): + parser = argparse.ArgumentParser( + description="Postprocess durations to become alignments" + ) + parser.add_argument( + "--dump-dir", + default="dump", + type=str, + help="Path of dump directory", + ) + parser.add_argument( + "--looseness", + default=3.5, + type=float, + help="Looseness of the generated guided attention map. Lower values = tighter", + ) + args = parser.parse_args() + dump_dir = args.dump_dir + dump_sets = ["train", "valid"] + + for d_set in dump_sets: + full_fol = os.path.join(dump_dir, d_set) + align_path = os.path.join(full_fol, "alignments") + + ids_path = os.path.join(full_fol, "ids") + durations_path = os.path.join(full_fol, "durations") + + safemkdir(align_path) + + for duration_fn in tqdm(os.listdir(durations_path)): + if not ".npy" in duration_fn: + continue + + id_fn = duration_fn.replace("-durations", "-ids") + + id_path = os.path.join(ids_path, id_fn) + duration_path = os.path.join(durations_path, duration_fn) + + duration_arr = np.load(duration_path) + id_arr = np.load(id_path) + + id_true_size = len(id_arr) + + align = duration_to_alignment(duration_arr) + + if align.shape[0] != id_true_size: + align, points = rescale_alignment(align, id_true_size) + else: + points = get_pivot_points(align) + + if len(points) == 0: + print("WARNING points are empty for", id_fn) + + align = create_guided(align, points, args.looseness) + + align_fn = id_fn.replace("-ids", "-alignment") + align_full_fn = os.path.join(align_path, align_fn) + + np.save(align_full_fn, align.astype("float32")) + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/tacotron2/extract_duration.py b/TensorFlowTTS/examples/tacotron2/extract_duration.py new file mode 100644 index 0000000000000000000000000000000000000000..91bc27eac114b81724f4826e460ebc9aa5b28e30 --- /dev/null +++ b/TensorFlowTTS/examples/tacotron2/extract_duration.py @@ -0,0 +1,236 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Extract durations based-on tacotron-2 alignments for FastSpeech.""" + +import argparse +import logging +import os +from numba import jit +import sys + +sys.path.append(".") + +import matplotlib.pyplot as plt +import numpy as np +import tensorflow as tf +import yaml +from tqdm import tqdm + +from examples.tacotron2.tacotron_dataset import CharactorMelDataset +from tensorflow_tts.configs import Tacotron2Config +from tensorflow_tts.models import TFTacotron2 + + +@jit(nopython=True) +def get_duration_from_alignment(alignment): + D = np.array([0 for _ in range(np.shape(alignment)[0])]) + + for i in range(np.shape(alignment)[1]): + max_index = list(alignment[:, i]).index(alignment[:, i].max()) + D[max_index] = D[max_index] + 1 + + return D + + +def main(): + """Running extract tacotron-2 durations.""" + parser = argparse.ArgumentParser( + description="Extract durations from charactor with trained Tacotron-2 " + "(See detail in tensorflow_tts/example/tacotron-2/extract_duration.py)." + ) + parser.add_argument( + "--rootdir", + default=None, + type=str, + required=True, + help="directory including ids/durations files.", + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save generated speech." + ) + parser.add_argument( + "--checkpoint", type=str, required=True, help="checkpoint file to be loaded." + ) + parser.add_argument( + "--use-norm", default=1, type=int, help="usr norm-mels for train or raw." + ) + parser.add_argument("--batch-size", default=8, type=int, help="batch size.") + parser.add_argument("--win-front", default=2, type=int, help="win-front.") + parser.add_argument("--win-back", default=2, type=int, help="win-front.") + parser.add_argument( + "--use-window-mask", default=1, type=int, help="toggle window masking." + ) + parser.add_argument("--save-alignment", default=0, type=int, help="save-alignment.") + parser.add_argument( + "--config", + default=None, + type=str, + required=True, + help="yaml format configuration file. if not explicitly provided, " + "it will be searched in the checkpoint directory. (default=None)", + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + args = parser.parse_args() + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # load config + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + + if config["format"] == "npy": + char_query = "*-ids.npy" + mel_query = "*-raw-feats.npy" if args.use_norm is False else "*-norm-feats.npy" + char_load_fn = np.load + mel_load_fn = np.load + else: + raise ValueError("Only npy is supported.") + + # define data-loader + dataset = CharactorMelDataset( + dataset=config["tacotron2_params"]["dataset"], + root_dir=args.rootdir, + charactor_query=char_query, + mel_query=mel_query, + charactor_load_fn=char_load_fn, + mel_load_fn=mel_load_fn, + reduction_factor=config["tacotron2_params"]["reduction_factor"], + use_fixed_shapes=True, + ) + dataset = dataset.create(allow_cache=True, batch_size=args.batch_size, drop_remainder=False) + + # define model and load checkpoint + tacotron2 = TFTacotron2( + config=Tacotron2Config(**config["tacotron2_params"]), + name="tacotron2", + ) + tacotron2._build() # build model to be able load_weights. + tacotron2.load_weights(args.checkpoint) + + # apply tf.function for tacotron2. + tacotron2 = tf.function(tacotron2, experimental_relax_shapes=True) + + for data in tqdm(dataset, desc="[Extract Duration]"): + utt_ids = data["utt_ids"] + input_lengths = data["input_lengths"] + mel_lengths = data["mel_lengths"] + utt_ids = utt_ids.numpy() + real_mel_lengths = data["real_mel_lengths"] + del data["real_mel_lengths"] + + # tacotron2 inference. + mel_outputs, post_mel_outputs, stop_outputs, alignment_historys = tacotron2( + **data, + use_window_mask=args.use_window_mask, + win_front=args.win_front, + win_back=args.win_back, + training=True, + ) + + # convert to numpy + alignment_historys = alignment_historys.numpy() + + for i, alignment in enumerate(alignment_historys): + real_char_length = input_lengths[i].numpy() + real_mel_length = real_mel_lengths[i].numpy() + alignment_mel_length = int( + np.ceil( + real_mel_length / config["tacotron2_params"]["reduction_factor"] + ) + ) + alignment = alignment[:real_char_length, :alignment_mel_length] + d = get_duration_from_alignment(alignment) # [max_char_len] + + d = d * config["tacotron2_params"]["reduction_factor"] + assert ( + np.sum(d) >= real_mel_length + ), f"{d}, {np.sum(d)}, {alignment_mel_length}, {real_mel_length}" + if np.sum(d) > real_mel_length: + rest = np.sum(d) - real_mel_length + # print(d, np.sum(d), real_mel_length) + if d[-1] > rest: + d[-1] -= rest + elif d[0] > rest: + d[0] -= rest + else: + d[-1] -= rest // 2 + d[0] -= rest - rest // 2 + + assert d[-1] >= 0 and d[0] >= 0, f"{d}, {np.sum(d)}, {real_mel_length}" + + saved_name = utt_ids[i].decode("utf-8") + + # check a length compatible + assert ( + len(d) == real_char_length + ), f"different between len_char and len_durations, {len(d)} and {real_char_length}" + + assert ( + np.sum(d) == real_mel_length + ), f"different between sum_durations and len_mel, {np.sum(d)} and {real_mel_length}" + + # save D to folder. + np.save( + os.path.join(args.outdir, f"{saved_name}-durations.npy"), + d.astype(np.int32), + allow_pickle=False, + ) + + # save alignment to debug. + if args.save_alignment == 1: + figname = os.path.join(args.outdir, f"{saved_name}_alignment.png") + fig = plt.figure(figsize=(8, 6)) + ax = fig.add_subplot(111) + ax.set_title(f"Alignment of {saved_name}") + im = ax.imshow( + alignment, aspect="auto", origin="lower", interpolation="none" + ) + fig.colorbar(im, ax=ax) + xlabel = "Decoder timestep" + plt.xlabel(xlabel) + plt.ylabel("Encoder timestep") + plt.tight_layout() + plt.savefig(figname) + plt.close() + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/tacotron2/extract_postnets.py b/TensorFlowTTS/examples/tacotron2/extract_postnets.py new file mode 100644 index 0000000000000000000000000000000000000000..4f05f3a4b54df5d2c12c6e4057340edecef6b5ad --- /dev/null +++ b/TensorFlowTTS/examples/tacotron2/extract_postnets.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Extract durations based-on tacotron-2 alignments for FastSpeech.""" + +import argparse +import logging +import os +from numba import jit +import sys + +sys.path.append(".") + +import matplotlib.pyplot as plt +import numpy as np +import tensorflow as tf +import yaml +from tqdm import tqdm + +from examples.tacotron2.tacotron_dataset import CharactorMelDataset +from tensorflow_tts.configs import Tacotron2Config +from tensorflow_tts.models import TFTacotron2 + + +@jit(nopython=True) +def get_duration_from_alignment(alignment): + D = np.array([0 for _ in range(np.shape(alignment)[0])]) + + for i in range(np.shape(alignment)[1]): + max_index = list(alignment[:, i]).index(alignment[:, i].max()) + D[max_index] = D[max_index] + 1 + + return D + + +def main(): + """Running extract tacotron-2 durations.""" + parser = argparse.ArgumentParser( + description="Extract durations from charactor with trained Tacotron-2 " + "(See detail in tensorflow_tts/example/tacotron-2/extract_duration.py)." + ) + parser.add_argument( + "--rootdir", + default=None, + type=str, + required=True, + help="directory including ids/durations files.", + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save generated mels." + ) + parser.add_argument( + "--checkpoint", type=str, required=True, help="checkpoint file to be loaded." + ) + parser.add_argument( + "--use-norm", default=1, type=int, help="usr norm-mels for train or raw." + ) + parser.add_argument("--batch-size", default=32, type=int, help="batch size.") + parser.add_argument("--win-front", default=3, type=int, help="win-front.") + parser.add_argument("--win-back", default=3, type=int, help="win-front.") + parser.add_argument( + "--use-window-mask", default=1, type=int, help="toggle window masking." + ) + parser.add_argument("--save-alignment", default=0, type=int, help="save-alignment.") + parser.add_argument( + "--config", + default=None, + type=str, + required=True, + help="yaml format configuration file. if not explicitly provided, " + "it will be searched in the checkpoint directory. (default=None)", + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + args = parser.parse_args() + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # load config + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + + if config["format"] == "npy": + char_query = "*-ids.npy" + mel_query = "*-raw-feats.npy" if args.use_norm is False else "*-norm-feats.npy" + char_load_fn = np.load + mel_load_fn = np.load + else: + raise ValueError("Only npy is supported.") + + # define data-loader + dataset = CharactorMelDataset( + dataset=config["tacotron2_params"]["dataset"], + root_dir=args.rootdir, + charactor_query=char_query, + mel_query=mel_query, + charactor_load_fn=char_load_fn, + mel_load_fn=mel_load_fn, + reduction_factor=config["tacotron2_params"]["reduction_factor"], + use_fixed_shapes=True, + ) + dataset = dataset.create( + allow_cache=True, batch_size=args.batch_size, drop_remainder=False + ) + + # define model and load checkpoint + tacotron2 = TFTacotron2( + config=Tacotron2Config(**config["tacotron2_params"]), + name="tacotron2", + ) + tacotron2._build() # build model to be able load_weights. + tacotron2.load_weights(args.checkpoint) + + # apply tf.function for tacotron2. + tacotron2 = tf.function(tacotron2, experimental_relax_shapes=True) + + for data in tqdm(dataset, desc="[Extract Postnets]"): + utt_ids = data["utt_ids"] + input_lengths = data["input_lengths"] + mel_lengths = data["mel_lengths"] + utt_ids = utt_ids.numpy() + real_mel_lengths = data["real_mel_lengths"] + mel_gt = data["mel_gts"] + del data["real_mel_lengths"] + + # tacotron2 inference. + mel_outputs, post_mel_outputs, stop_outputs, alignment_historys = tacotron2( + **data, + use_window_mask=args.use_window_mask, + win_front=args.win_front, + win_back=args.win_back, + training=True, + ) + + # convert to numpy + alignment_historys = alignment_historys.numpy() + post_mel_outputs = post_mel_outputs.numpy() + mel_gt = mel_gt.numpy() + + outdpost = os.path.join(args.outdir, "postnets") + + if not os.path.exists(outdpost): + os.makedirs(outdpost) + + for i, alignment in enumerate(alignment_historys): + real_char_length = input_lengths[i].numpy() + real_mel_length = real_mel_lengths[i].numpy() + alignment_mel_length = int( + np.ceil( + real_mel_length / config["tacotron2_params"]["reduction_factor"] + ) + ) + alignment = alignment[:real_char_length, :alignment_mel_length] + d = get_duration_from_alignment(alignment) # [max_char_len] + + d = d * config["tacotron2_params"]["reduction_factor"] + assert ( + np.sum(d) >= real_mel_length + ), f"{d}, {np.sum(d)}, {alignment_mel_length}, {real_mel_length}" + if np.sum(d) > real_mel_length: + rest = np.sum(d) - real_mel_length + # print(d, np.sum(d), real_mel_length) + if d[-1] > rest: + d[-1] -= rest + elif d[0] > rest: + d[0] -= rest + else: + d[-1] -= rest // 2 + d[0] -= rest - rest // 2 + + assert d[-1] >= 0 and d[0] >= 0, f"{d}, {np.sum(d)}, {real_mel_length}" + + saved_name = utt_ids[i].decode("utf-8") + + # check a length compatible + assert ( + len(d) == real_char_length + ), f"different between len_char and len_durations, {len(d)} and {real_char_length}" + + assert ( + np.sum(d) == real_mel_length + ), f"different between sum_durations and len_mel, {np.sum(d)} and {real_mel_length}" + + # save D to folder. + + np.save( + os.path.join(outdpost, f"{saved_name}-postnet.npy"), + post_mel_outputs[i][:][:real_mel_length].astype(np.float32), + allow_pickle=False, + ) + + # save alignment to debug. + if args.save_alignment == 1: + figname = os.path.join(args.outdir, f"{saved_name}_alignment.png") + fig = plt.figure(figsize=(8, 6)) + ax = fig.add_subplot(111) + ax.set_title(f"Alignment of {saved_name}") + im = ax.imshow( + alignment, aspect="auto", origin="lower", interpolation="none" + ) + fig.colorbar(im, ax=ax) + xlabel = "Decoder timestep" + plt.xlabel(xlabel) + plt.ylabel("Encoder timestep") + plt.tight_layout() + plt.savefig(figname) + plt.close() + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/examples/tacotron2/fig/alignment.gif b/TensorFlowTTS/examples/tacotron2/fig/alignment.gif new file mode 100644 index 0000000000000000000000000000000000000000..3c2ab56aa951e66674ec4bc7710711d36ea98c9b --- /dev/null +++ b/TensorFlowTTS/examples/tacotron2/fig/alignment.gif @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:10acc84e7ee216d312f196e363acd750ba0c5a80b9551ec23a4d59a6d05473be +size 4347010 diff --git a/TensorFlowTTS/examples/tacotron2/fig/tensorboard.png b/TensorFlowTTS/examples/tacotron2/fig/tensorboard.png new file mode 100644 index 0000000000000000000000000000000000000000..4dfe3013c187c02e1539a38f636bad24cf6a0369 Binary files /dev/null and b/TensorFlowTTS/examples/tacotron2/fig/tensorboard.png differ diff --git a/TensorFlowTTS/examples/tacotron2/tacotron_dataset.py b/TensorFlowTTS/examples/tacotron2/tacotron_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..1d6170b42df99c84c5671b53b419a0b571c49ec7 --- /dev/null +++ b/TensorFlowTTS/examples/tacotron2/tacotron_dataset.py @@ -0,0 +1,284 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tacotron Related Dataset modules.""" + +import itertools +import logging +import os +import random + +import numpy as np +import tensorflow as tf + +from tensorflow_tts.datasets.abstract_dataset import AbstractDataset +from tensorflow_tts.utils import find_files + + +class CharactorMelDataset(AbstractDataset): + """Tensorflow Charactor Mel dataset.""" + + def __init__( + self, + dataset, + root_dir, + charactor_query="*-ids.npy", + mel_query="*-norm-feats.npy", + align_query="", + charactor_load_fn=np.load, + mel_load_fn=np.load, + mel_length_threshold=0, + reduction_factor=1, + mel_pad_value=0.0, + char_pad_value=0, + ga_pad_value=-1.0, + g=0.2, + use_fixed_shapes=False, + ): + """Initialize dataset. + + Args: + root_dir (str): Root directory including dumped files. + charactor_query (str): Query to find charactor files in root_dir. + mel_query (str): Query to find feature files in root_dir. + charactor_load_fn (func): Function to load charactor file. + align_query (str): Query to find FAL files in root_dir. If empty, we use stock guided attention loss + mel_load_fn (func): Function to load feature file. + mel_length_threshold (int): Threshold to remove short feature files. + reduction_factor (int): Reduction factor on Tacotron-2 paper. + mel_pad_value (float): Padding value for mel-spectrogram. + char_pad_value (int): Padding value for charactor. + ga_pad_value (float): Padding value for guided attention. + g (float): G value for guided attention. + use_fixed_shapes (bool): Use fixed shape for mel targets or not. + max_char_length (int): maximum charactor length if use_fixed_shapes=True. + max_mel_length (int): maximum mel length if use_fixed_shapes=True + + """ + # find all of charactor and mel files. + charactor_files = sorted(find_files(root_dir, charactor_query)) + mel_files = sorted(find_files(root_dir, mel_query)) + + mel_lengths = [mel_load_fn(f).shape[0] for f in mel_files] + char_lengths = [charactor_load_fn(f).shape[0] for f in charactor_files] + + # assert the number of files + assert len(mel_files) != 0, f"Not found any mels files in ${root_dir}." + assert ( + len(mel_files) == len(charactor_files) == len(mel_lengths) + ), f"Number of charactor, mel and duration files are different \ + ({len(mel_files)} vs {len(charactor_files)} vs {len(mel_lengths)})." + + self.align_files = [] + + if len(align_query) > 1: + align_files = sorted(find_files(root_dir, align_query)) + assert len(align_files) == len( + mel_files + ), f"Number of align files ({len(align_files)}) and mel files ({len(mel_files)}) are different" + logging.info("Using FAL loss") + self.align_files = align_files + else: + logging.info("Using guided attention loss") + + if ".npy" in charactor_query: + suffix = charactor_query[1:] + utt_ids = [os.path.basename(f).replace(suffix, "") for f in charactor_files] + + # set global params + self.utt_ids = utt_ids + self.mel_files = mel_files + self.charactor_files = charactor_files + self.mel_load_fn = mel_load_fn + self.charactor_load_fn = charactor_load_fn + self.mel_lengths = mel_lengths + self.char_lengths = char_lengths + self.reduction_factor = reduction_factor + self.mel_length_threshold = mel_length_threshold + self.mel_pad_value = mel_pad_value + self.char_pad_value = char_pad_value + self.ga_pad_value = ga_pad_value + self.g = g + self.use_fixed_shapes = use_fixed_shapes + self.max_char_length = np.max(char_lengths) + + if np.max(mel_lengths) % self.reduction_factor == 0: + self.max_mel_length = np.max(mel_lengths) + else: + self.max_mel_length = ( + np.max(mel_lengths) + + self.reduction_factor + - np.max(mel_lengths) % self.reduction_factor + ) + + def get_args(self): + return [self.utt_ids] + + def generator(self, utt_ids): + for i, utt_id in enumerate(utt_ids): + mel_file = self.mel_files[i] + charactor_file = self.charactor_files[i] + align_file = self.align_files[i] if len(self.align_files) > 1 else "" + + items = { + "utt_ids": utt_id, + "mel_files": mel_file, + "charactor_files": charactor_file, + "align_files": align_file, + } + + yield items + + @tf.function + def _load_data(self, items): + mel = tf.numpy_function(np.load, [items["mel_files"]], tf.float32) + charactor = tf.numpy_function(np.load, [items["charactor_files"]], tf.int32) + g_att = ( + tf.numpy_function(np.load, [items["align_files"]], tf.float32) + if len(self.align_files) > 1 + else None + ) + + mel_length = len(mel) + char_length = len(charactor) + # padding mel to make its length is multiple of reduction factor. + real_mel_length = mel_length + remainder = mel_length % self.reduction_factor + if remainder != 0: + new_mel_length = mel_length + self.reduction_factor - remainder + mel = tf.pad( + mel, + [[0, new_mel_length - mel_length], [0, 0]], + constant_values=self.mel_pad_value, + ) + mel_length = new_mel_length + + items = { + "utt_ids": items["utt_ids"], + "input_ids": charactor, + "input_lengths": char_length, + "speaker_ids": 0, + "mel_gts": mel, + "mel_lengths": mel_length, + "real_mel_lengths": real_mel_length, + "g_attentions": g_att, + } + + return items + + def _guided_attention(self, items): + """Guided attention. Refer to page 3 on the paper (https://arxiv.org/abs/1710.08969).""" + items = items.copy() + mel_len = items["mel_lengths"] // self.reduction_factor + char_len = items["input_lengths"] + xv, yv = tf.meshgrid(tf.range(char_len), tf.range(mel_len), indexing="ij") + f32_matrix = tf.cast(yv / mel_len - xv / char_len, tf.float32) + items["g_attentions"] = 1.0 - tf.math.exp( + -(f32_matrix ** 2) / (2 * self.g ** 2) + ) + return items + + def create( + self, + allow_cache=False, + batch_size=1, + is_shuffle=False, + map_fn=None, + reshuffle_each_iteration=True, + drop_remainder=True, + ): + """Create tf.dataset function.""" + output_types = self.get_output_dtypes() + datasets = tf.data.Dataset.from_generator( + self.generator, output_types=output_types, args=(self.get_args()) + ) + + # load data + datasets = datasets.map( + lambda items: self._load_data(items), tf.data.experimental.AUTOTUNE + ) + + # calculate guided attention + if len(self.align_files) < 1: + datasets = datasets.map( + lambda items: self._guided_attention(items), + tf.data.experimental.AUTOTUNE, + ) + + datasets = datasets.filter( + lambda x: x["mel_lengths"] > self.mel_length_threshold + ) + + if allow_cache: + datasets = datasets.cache() + + if is_shuffle: + datasets = datasets.shuffle( + self.get_len_dataset(), + reshuffle_each_iteration=reshuffle_each_iteration, + ) + + # define padding value. + padding_values = { + "utt_ids": " ", + "input_ids": self.char_pad_value, + "input_lengths": 0, + "speaker_ids": 0, + "mel_gts": self.mel_pad_value, + "mel_lengths": 0, + "real_mel_lengths": 0, + "g_attentions": self.ga_pad_value, + } + + # define padded shapes. + padded_shapes = { + "utt_ids": [], + "input_ids": [None] + if self.use_fixed_shapes is False + else [self.max_char_length], + "input_lengths": [], + "speaker_ids": [], + "mel_gts": [None, 80] + if self.use_fixed_shapes is False + else [self.max_mel_length, 80], + "mel_lengths": [], + "real_mel_lengths": [], + "g_attentions": [None, None] + if self.use_fixed_shapes is False + else [self.max_char_length, self.max_mel_length // self.reduction_factor], + } + + datasets = datasets.padded_batch( + batch_size, + padded_shapes=padded_shapes, + padding_values=padding_values, + drop_remainder=drop_remainder, + ) + datasets = datasets.prefetch(tf.data.experimental.AUTOTUNE) + return datasets + + def get_output_dtypes(self): + output_types = { + "utt_ids": tf.string, + "mel_files": tf.string, + "charactor_files": tf.string, + "align_files": tf.string, + } + return output_types + + def get_len_dataset(self): + return len(self.utt_ids) + + def __name__(self): + return "CharactorMelDataset" diff --git a/TensorFlowTTS/examples/tacotron2/train_tacotron2.py b/TensorFlowTTS/examples/tacotron2/train_tacotron2.py new file mode 100644 index 0000000000000000000000000000000000000000..1a85c6ebd41b6bd800e3cced030e935fcc9ae1e3 --- /dev/null +++ b/TensorFlowTTS/examples/tacotron2/train_tacotron2.py @@ -0,0 +1,528 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Train Tacotron2.""" +import tensorflow as tf + +physical_devices = tf.config.list_physical_devices("GPU") +for i in range(len(physical_devices)): + tf.config.experimental.set_memory_growth(physical_devices[i], True) + +import sys + +sys.path.append(".") + +import argparse +import logging +import os + +import numpy as np +import yaml +from tqdm import tqdm + +import tensorflow_tts +from examples.tacotron2.tacotron_dataset import CharactorMelDataset +from tensorflow_tts.configs.tacotron2 import Tacotron2Config +from tensorflow_tts.models import TFTacotron2 +from tensorflow_tts.optimizers import AdamWeightDecay, WarmUp +from tensorflow_tts.trainers import Seq2SeqBasedTrainer +from tensorflow_tts.utils import calculate_2d_loss, calculate_3d_loss, return_strategy + + +class Tacotron2Trainer(Seq2SeqBasedTrainer): + """Tacotron2 Trainer class based on Seq2SeqBasedTrainer.""" + + def __init__( + self, + config, + strategy, + steps=0, + epochs=0, + is_mixed_precision=False, + ): + """Initialize trainer. + + Args: + steps (int): Initial global steps. + epochs (int): Initial global epochs. + config (dict): Config dict loaded from yaml format configuration file. + is_mixed_precision (bool): Use mixed precision or not. + + """ + super(Tacotron2Trainer, self).__init__( + steps=steps, + epochs=epochs, + config=config, + strategy=strategy, + is_mixed_precision=is_mixed_precision, + ) + # define metrics to aggregates data and use tf.summary logs them + self.list_metrics_name = [ + "stop_token_loss", + "mel_loss_before", + "mel_loss_after", + "guided_attention_loss", + ] + self.init_train_eval_metrics(self.list_metrics_name) + self.reset_states_train() + self.reset_states_eval() + + self.config = config + + def compile(self, model, optimizer): + super().compile(model, optimizer) + self.binary_crossentropy = tf.keras.losses.BinaryCrossentropy( + from_logits=True, reduction=tf.keras.losses.Reduction.NONE + ) + self.mse = tf.keras.losses.MeanSquaredError( + reduction=tf.keras.losses.Reduction.NONE + ) + self.mae = tf.keras.losses.MeanAbsoluteError( + reduction=tf.keras.losses.Reduction.NONE + ) + + def _train_step(self, batch): + """Here we re-define _train_step because apply input_signature make + the training progress slower on my experiment. Note that input_signature + is apply on based_trainer by default. + """ + if self._already_apply_input_signature is False: + self.one_step_forward = tf.function( + self._one_step_forward, experimental_relax_shapes=True + ) + self.one_step_evaluate = tf.function( + self._one_step_evaluate, experimental_relax_shapes=True + ) + self.one_step_predict = tf.function( + self._one_step_predict, experimental_relax_shapes=True + ) + self._already_apply_input_signature = True + + # run one_step_forward + self.one_step_forward(batch) + + # update counts + self.steps += 1 + self.tqdm.update(1) + self._check_train_finish() + + def _one_step_evaluate_per_replica(self, batch): + """One step evaluate per GPU + + Tacotron-2 used teacher-forcing when training and evaluation. + So we need pass `training=True` for inference step. + + """ + outputs = self._model(**batch, training=True) + _, dict_metrics_losses = self.compute_per_example_losses(batch, outputs) + + self.update_eval_metrics(dict_metrics_losses) + + def _one_step_predict_per_replica(self, batch): + """One step predict per GPU + + Tacotron-2 used teacher-forcing when training and evaluation. + So we need pass `training=True` for inference step. + + """ + outputs = self._model(**batch, training=True) + return outputs + + def compute_per_example_losses(self, batch, outputs): + """Compute per example losses and return dict_metrics_losses + Note that all element of the loss MUST has a shape [batch_size] and + the keys of dict_metrics_losses MUST be in self.list_metrics_name. + + Args: + batch: dictionary batch input return from dataloader + outputs: outputs of the model + + Returns: + per_example_losses: per example losses for each GPU, shape [B] + dict_metrics_losses: dictionary loss. + """ + ( + decoder_output, + post_mel_outputs, + stop_token_predictions, + alignment_historys, + ) = outputs + + mel_loss_before = calculate_3d_loss( + batch["mel_gts"], decoder_output, loss_fn=self.mae + ) + mel_loss_after = calculate_3d_loss( + batch["mel_gts"], post_mel_outputs, loss_fn=self.mae + ) + + # calculate stop_loss + max_mel_length = ( + tf.reduce_max(batch["mel_lengths"]) + if self.config["use_fixed_shapes"] is False + else [self.config["max_mel_length"]] + ) + stop_gts = tf.expand_dims( + tf.range(tf.reduce_max(max_mel_length), dtype=tf.int32), 0 + ) # [1, max_len] + stop_gts = tf.tile( + stop_gts, [tf.shape(batch["mel_lengths"])[0], 1] + ) # [B, max_len] + stop_gts = tf.cast( + tf.math.greater_equal(stop_gts, tf.expand_dims(batch["mel_lengths"], 1)), + tf.float32, + ) + + stop_token_loss = calculate_2d_loss( + stop_gts, stop_token_predictions, loss_fn=self.binary_crossentropy + ) + + # calculate guided attention loss. + attention_masks = tf.cast( + tf.math.not_equal(batch["g_attentions"], -1.0), tf.float32 + ) + loss_att = tf.reduce_sum( + tf.abs(alignment_historys * batch["g_attentions"]) * attention_masks, + axis=[1, 2], + ) + loss_att /= tf.reduce_sum(attention_masks, axis=[1, 2]) + + per_example_losses = ( + stop_token_loss + mel_loss_before + mel_loss_after + loss_att + ) + + dict_metrics_losses = { + "stop_token_loss": stop_token_loss, + "mel_loss_before": mel_loss_before, + "mel_loss_after": mel_loss_after, + "guided_attention_loss": loss_att, + } + + return per_example_losses, dict_metrics_losses + + def generate_and_save_intermediate_result(self, batch): + """Generate and save intermediate result.""" + import matplotlib.pyplot as plt + + # predict with tf.function for faster. + outputs = self.one_step_predict(batch) + ( + decoder_output, + mel_outputs, + stop_token_predictions, + alignment_historys, + ) = outputs + mel_gts = batch["mel_gts"] + utt_ids = batch["utt_ids"] + + # convert to tensor. + # here we just take a sample at first replica. + try: + mels_before = decoder_output.values[0].numpy() + mels_after = mel_outputs.values[0].numpy() + mel_gts = mel_gts.values[0].numpy() + alignment_historys = alignment_historys.values[0].numpy() + utt_ids = utt_ids.values[0].numpy() + except Exception: + mels_before = decoder_output.numpy() + mels_after = mel_outputs.numpy() + mel_gts = mel_gts.numpy() + alignment_historys = alignment_historys.numpy() + utt_ids = utt_ids.numpy() + + # check directory + dirname = os.path.join(self.config["outdir"], f"predictions/{self.steps}steps") + if not os.path.exists(dirname): + os.makedirs(dirname) + + for idx, (mel_gt, mel_before, mel_after, alignment_history) in enumerate( + zip(mel_gts, mels_before, mels_after, alignment_historys), 0 + ): + mel_gt = tf.reshape(mel_gt, (-1, 80)).numpy() # [length, 80] + mel_before = tf.reshape(mel_before, (-1, 80)).numpy() # [length, 80] + mel_after = tf.reshape(mel_after, (-1, 80)).numpy() # [length, 80] + + # plot figure and save it + utt_id = utt_ids[idx] + figname = os.path.join(dirname, f"{utt_id}.png") + fig = plt.figure(figsize=(10, 8)) + ax1 = fig.add_subplot(311) + ax2 = fig.add_subplot(312) + ax3 = fig.add_subplot(313) + im = ax1.imshow(np.rot90(mel_gt), aspect="auto", interpolation="none") + ax1.set_title("Target Mel-Spectrogram") + fig.colorbar(mappable=im, shrink=0.65, orientation="horizontal", ax=ax1) + ax2.set_title(f"Predicted Mel-before-Spectrogram @ {self.steps} steps") + im = ax2.imshow(np.rot90(mel_before), aspect="auto", interpolation="none") + fig.colorbar(mappable=im, shrink=0.65, orientation="horizontal", ax=ax2) + ax3.set_title(f"Predicted Mel-after-Spectrogram @ {self.steps} steps") + im = ax3.imshow(np.rot90(mel_after), aspect="auto", interpolation="none") + fig.colorbar(mappable=im, shrink=0.65, orientation="horizontal", ax=ax3) + plt.tight_layout() + plt.savefig(figname) + plt.close() + + # plot alignment + figname = os.path.join(dirname, f"{idx}_alignment.png") + fig = plt.figure(figsize=(8, 6)) + ax = fig.add_subplot(111) + ax.set_title(f"Alignment @ {self.steps} steps") + im = ax.imshow( + alignment_history, aspect="auto", origin="lower", interpolation="none" + ) + fig.colorbar(im, ax=ax) + xlabel = "Decoder timestep" + plt.xlabel(xlabel) + plt.ylabel("Encoder timestep") + plt.tight_layout() + plt.savefig(figname) + plt.close() + + +def main(): + """Run training process.""" + parser = argparse.ArgumentParser( + description="Train FastSpeech (See detail in tensorflow_tts/bin/train-fastspeech.py)" + ) + parser.add_argument( + "--train-dir", + default=None, + type=str, + help="directory including training data. ", + ) + parser.add_argument( + "--dev-dir", + default=None, + type=str, + help="directory including development data. ", + ) + parser.add_argument( + "--use-norm", default=1, type=int, help="usr norm-mels for train or raw." + ) + parser.add_argument( + "--outdir", type=str, required=True, help="directory to save checkpoints." + ) + parser.add_argument( + "--config", type=str, required=True, help="yaml format configuration file." + ) + parser.add_argument( + "--resume", + default="", + type=str, + nargs="?", + help='checkpoint file path to resume training. (default="")', + ) + parser.add_argument( + "--verbose", + type=int, + default=1, + help="logging level. higher is more logging. (default=1)", + ) + parser.add_argument( + "--mixed_precision", + default=0, + type=int, + help="using mixed precision for generator or not.", + ) + parser.add_argument( + "--pretrained", + default="", + type=str, + nargs="?", + help="pretrained weights .h5 file to load weights from. Auto-skips non-matching layers", + ) + parser.add_argument( + "--use-fal", + default=0, + type=int, + help="Use forced alignment guided attention loss or regular", + ) + args = parser.parse_args() + + # return strategy + STRATEGY = return_strategy() + + # set mixed precision config + if args.mixed_precision == 1: + tf.config.optimizer.set_experimental_options({"auto_mixed_precision": True}) + + args.mixed_precision = bool(args.mixed_precision) + args.use_norm = bool(args.use_norm) + args.use_fal = bool(args.use_fal) + + # set logger + if args.verbose > 1: + logging.basicConfig( + level=logging.DEBUG, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + elif args.verbose > 0: + logging.basicConfig( + level=logging.INFO, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + else: + logging.basicConfig( + level=logging.WARN, + stream=sys.stdout, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", + ) + logging.warning("Skip DEBUG/INFO messages") + + # check directory existence + if not os.path.exists(args.outdir): + os.makedirs(args.outdir) + + # check arguments + if args.train_dir is None: + raise ValueError("Please specify --train-dir") + if args.dev_dir is None: + raise ValueError("Please specify --valid-dir") + + # load and save config + with open(args.config) as f: + config = yaml.load(f, Loader=yaml.Loader) + config.update(vars(args)) + config["version"] = tensorflow_tts.__version__ + + # get dataset + if config["remove_short_samples"]: + mel_length_threshold = config["mel_length_threshold"] + else: + mel_length_threshold = 0 + + if config["format"] == "npy": + charactor_query = "*-ids.npy" + mel_query = "*-raw-feats.npy" if args.use_norm is False else "*-norm-feats.npy" + align_query = "*-alignment.npy" if args.use_fal is True else "" + charactor_load_fn = np.load + mel_load_fn = np.load + else: + raise ValueError("Only npy are supported.") + + train_dataset = CharactorMelDataset( + dataset=config["tacotron2_params"]["dataset"], + root_dir=args.train_dir, + charactor_query=charactor_query, + mel_query=mel_query, + charactor_load_fn=charactor_load_fn, + mel_load_fn=mel_load_fn, + mel_length_threshold=mel_length_threshold, + reduction_factor=config["tacotron2_params"]["reduction_factor"], + use_fixed_shapes=config["use_fixed_shapes"], + align_query=align_query, + ) + + # update max_mel_length and max_char_length to config + config.update({"max_mel_length": int(train_dataset.max_mel_length)}) + config.update({"max_char_length": int(train_dataset.max_char_length)}) + + with open(os.path.join(args.outdir, "config.yml"), "w") as f: + yaml.dump(config, f, Dumper=yaml.Dumper) + for key, value in config.items(): + logging.info(f"{key} = {value}") + + train_dataset = train_dataset.create( + is_shuffle=config["is_shuffle"], + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] + * STRATEGY.num_replicas_in_sync + * config["gradient_accumulation_steps"], + ) + + valid_dataset = CharactorMelDataset( + dataset=config["tacotron2_params"]["dataset"], + root_dir=args.dev_dir, + charactor_query=charactor_query, + mel_query=mel_query, + charactor_load_fn=charactor_load_fn, + mel_load_fn=mel_load_fn, + mel_length_threshold=mel_length_threshold, + reduction_factor=config["tacotron2_params"]["reduction_factor"], + use_fixed_shapes=False, # don't need apply fixed shape for evaluation. + align_query=align_query, + ).create( + is_shuffle=config["is_shuffle"], + allow_cache=config["allow_cache"], + batch_size=config["batch_size"] * STRATEGY.num_replicas_in_sync, + ) + + # define trainer + trainer = Tacotron2Trainer( + config=config, + strategy=STRATEGY, + steps=0, + epochs=0, + is_mixed_precision=args.mixed_precision, + ) + + with STRATEGY.scope(): + # define model. + tacotron_config = Tacotron2Config(**config["tacotron2_params"]) + tacotron2 = TFTacotron2(config=tacotron_config, name="tacotron2") + tacotron2._build() + tacotron2.summary() + + if len(args.pretrained) > 1: + tacotron2.load_weights(args.pretrained, by_name=True, skip_mismatch=True) + logging.info( + f"Successfully loaded pretrained weight from {args.pretrained}." + ) + + # AdamW for tacotron2 + learning_rate_fn = tf.keras.optimizers.schedules.PolynomialDecay( + initial_learning_rate=config["optimizer_params"]["initial_learning_rate"], + decay_steps=config["optimizer_params"]["decay_steps"], + end_learning_rate=config["optimizer_params"]["end_learning_rate"], + ) + + learning_rate_fn = WarmUp( + initial_learning_rate=config["optimizer_params"]["initial_learning_rate"], + decay_schedule_fn=learning_rate_fn, + warmup_steps=int( + config["train_max_steps"] + * config["optimizer_params"]["warmup_proportion"] + ), + ) + + optimizer = AdamWeightDecay( + learning_rate=learning_rate_fn, + weight_decay_rate=config["optimizer_params"]["weight_decay"], + beta_1=0.9, + beta_2=0.98, + epsilon=1e-6, + exclude_from_weight_decay=["LayerNorm", "layer_norm", "bias"], + ) + + _ = optimizer.iterations + + # compile trainer + trainer.compile(model=tacotron2, optimizer=optimizer) + + # start training + try: + trainer.fit( + train_dataset, + valid_dataset, + saved_path=os.path.join(config["outdir"], "checkpoints/"), + resume=args.resume, + ) + except KeyboardInterrupt: + trainer.save_checkpoint() + logging.info(f"Successfully saved checkpoint @ {trainer.steps}steps.") + + +if __name__ == "__main__": + main() diff --git a/TensorFlowTTS/notebooks/Parallel_WaveGAN_TFLite.ipynb b/TensorFlowTTS/notebooks/Parallel_WaveGAN_TFLite.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..5b1b78c93db04b2d7ce7a17efab0c1e2559771cb --- /dev/null +++ b/TensorFlowTTS/notebooks/Parallel_WaveGAN_TFLite.ipynb @@ -0,0 +1,536 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/tulasiram58827/TTS_TFLite/blob/main/Parallel_WaveGAN_TFLite.ipynb)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "qu_1y5_ZDxpU" + }, + "source": [ + "This notebook contains code to convert TensorFlow ParallelWaveGAN to TFLite" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "1KQie-EQDzEL" + }, + "source": [ + "## Acknowledgments" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "h-qWgadcDzCW" + }, + "source": [ + "- Pretrained model(in PyTorch) downloaded from [Parallel WaveGAN Repository](https://github.com/kan-bayashi/ParallelWaveGAN#results)\n", + "\n", + "- Converted PyTorch weights to Tensorflow Compatible using [Tensorflow TTS Repository](https://github.com/TensorSpeech/TensorFlowTTS) with this [Notebook](https://github.com/TensorSpeech/TensorFlowTTS/blob/master/examples/parallel_wavegan/convert_pwgan_from_pytorch_to_tensorflow.ipynb)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "pBE0GfYwEwoT" + }, + "source": [ + "## Imports" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "PpGT8_mm8vs-" + }, + "outputs": [], + "source": [ + "!git clone https://github.com/TensorSpeech/TensorFlowTTS.git\n", + "!cd TensorFlowTTS\n", + "!pip install /content/TensorFlowTTS/" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "2tI8NSz_886Z" + }, + "outputs": [], + "source": [ + "!pip install parallel_wavegan" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "6iQq9Gkn9MYT" + }, + "outputs": [], + "source": [ + "import tensorflow as tf\n", + "import torch\n", + "import sys\n", + "sys.path.append('/content/TensorFlowTTS')\n", + "from tensorflow_tts.models import TFParallelWaveGANGenerator\n", + "from tensorflow_tts.configs import ParallelWaveGANGeneratorConfig\n", + "\n", + "from parallel_wavegan.models import ParallelWaveGANGenerator\n", + "import numpy as np\n", + "\n", + "from IPython.display import Audio" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "BIr9zN74E3PU" + }, + "source": [ + "## Intialize Model" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "id": "UoFriagU9NBx" + }, + "outputs": [], + "source": [ + "tf_model = TFParallelWaveGANGenerator(config=ParallelWaveGANGeneratorConfig(), name=\"parallel_wavegan_generator\")" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "id": "mayxwoLp9fiR" + }, + "outputs": [], + "source": [ + "tf_model._build()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "P__OyD23E8jN" + }, + "source": [ + "## Load PyTorch Checkpoints" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "gZq9ibuzHbI9", + "outputId": "660ebfd7-6ed9-49e2-b3b1-9c26894694f4" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Downloading...\n", + "From: https://drive.google.com/uc?id=1wPwO9K-0Yq-GYcXbHseaqt8kUpa_ojJf\n", + "To: /content/checkpoint-400000steps.pkl\n", + "\r", + "0.00B [00:00, ?B/s]\r", + "17.5MB [00:00, 154MB/s]\n" + ] + } + ], + "source": [ + "!gdown --id 1wPwO9K-0Yq-GYcXbHseaqt8kUpa_ojJf -O checkpoint-400000steps.pkl" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "id": "GoeX-YLQ9kaf" + }, + "outputs": [], + "source": [ + "torch_checkpoints = torch.load(\"checkpoint-400000steps.pkl\", map_location=torch.device('cpu'))\n", + "torch_generator_weights = torch_checkpoints[\"model\"][\"generator\"]\n", + "torch_model = ParallelWaveGANGenerator()\n", + "torch_model.load_state_dict(torch_checkpoints[\"model\"][\"generator\"])\n", + "torch_model.remove_weight_norm()" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "3NSfX33w99WW", + "outputId": "436460ea-2969-4f89-ba4a-801f0b60abff" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "1334309" + ] + }, + "execution_count": 9, + "metadata": { + "tags": [] + }, + "output_type": "execute_result" + } + ], + "source": [ + "model_parameters = filter(lambda p: p.requires_grad, torch_model.parameters())\n", + "params = sum([np.prod(p.size()) for p in model_parameters])\n", + "params" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "x7t7hPgiE_pR" + }, + "source": [ + "## Convert PyTorch weights to TensorFlow" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "id": "Y4vOfByl-ASZ" + }, + "outputs": [], + "source": [ + "# in pytorch, in convolution layer, the order is bias -> weight, in tf it is weight -> bias. We need re-order.\n", + "\n", + "def convert_weights_pytorch_to_tensorflow(weights_pytorch):\n", + " \"\"\"\n", + " Convert pytorch Conv1d weight variable to tensorflow Conv2D weights.\n", + " 1D: Pytorch (f_output, f_input, kernel_size) -> TF (kernel_size, f_input, 1, f_output)\n", + " 2D: Pytorch (f_output, f_input, kernel_size_h, kernel_size_w) -> TF (kernel_size_w, kernel_size_h, f_input, 1, f_output)\n", + " \"\"\"\n", + " if len(weights_pytorch.shape) == 3: # conv1d-kernel\n", + " weights_tensorflow = np.transpose(weights_pytorch, (0,2,1)) # [f_output, kernel_size, f_input]\n", + " weights_tensorflow = np.transpose(weights_tensorflow, (1,0,2)) # [kernel-size, f_output, f_input]\n", + " weights_tensorflow = np.transpose(weights_tensorflow, (0,2,1)) # [kernel-size, f_input, f_output]\n", + " return weights_tensorflow\n", + " elif len(weights_pytorch.shape) == 1: # conv1d-bias\n", + " return weights_pytorch\n", + " elif len(weights_pytorch.shape) == 4: # conv2d-kernel\n", + " weights_tensorflow = np.transpose(weights_pytorch, (0,2,1,3)) # [f_output, kernel_size_h, f_input, kernel_size_w]\n", + " weights_tensorflow = np.transpose(weights_tensorflow, (1,0,2,3)) # [kernel-size_h, f_output, f_input, kernel-size-w]\n", + " weights_tensorflow = np.transpose(weights_tensorflow, (0,2,1,3)) # [kernel_size_h, f_input, f_output, kernel-size-w]\n", + " weights_tensorflow = np.transpose(weights_tensorflow, (0,1,3,2)) # [kernel_size_h, f_input, kernel-size-w, f_output]\n", + " weights_tensorflow = np.transpose(weights_tensorflow, (0,2,1,3)) # [kernel_size_h, kernel-size-w, f_input, f_output]\n", + " weights_tensorflow = np.transpose(weights_tensorflow, (1,0,2,3)) # [kernel-size_w, kernel_size_h, f_input, f_output]\n", + " return weights_tensorflow\n", + "\n", + "torch_weights = []\n", + "all_keys = list(torch_model.state_dict().keys())\n", + "all_values = list(torch_model.state_dict().values())\n", + "\n", + "idx_already_append = []\n", + "\n", + "for i in range(len(all_keys) -1):\n", + " if i not in idx_already_append:\n", + " if all_keys[i].split(\".\")[0:-1] == all_keys[i + 1].split(\".\")[0:-1]:\n", + " if all_keys[i].split(\".\")[-1] == \"bias\" and all_keys[i + 1].split(\".\")[-1] == \"weight\":\n", + " torch_weights.append(convert_weights_pytorch_to_tensorflow(all_values[i + 1].cpu().detach().numpy()))\n", + " torch_weights.append(convert_weights_pytorch_to_tensorflow(all_values[i].cpu().detach().numpy()))\n", + " idx_already_append.append(i)\n", + " idx_already_append.append(i + 1)\n", + " else:\n", + " if i not in idx_already_append:\n", + " torch_weights.append(convert_weights_pytorch_to_tensorflow(all_values[i].cpu().detach().numpy()))\n", + " idx_already_append.append(i)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "id": "168kydzc-SxJ" + }, + "outputs": [], + "source": [ + "tf_var = tf_model.trainable_variables\n", + "for i, var in enumerate(tf_var):\n", + " tf.keras.backend.set_value(var, torch_weights[i])" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "p8D70bCeFOAA" + }, + "source": [ + "## Convert to TFLite" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": { + "id": "mTnGufeuH3io" + }, + "outputs": [], + "source": [ + "def convert_to_tflite(quantization):\n", + " pwg_concrete_function = tf_model.inference.get_concrete_function()\n", + " converter = tf.lite.TFLiteConverter.from_concrete_functions([pwg_concrete_function])\n", + " converter.optimizations = [tf.lite.Optimize.DEFAULT]\n", + " converter.target_spec.supported_ops = [tf.lite.OpsSet.SELECT_TF_OPS]\n", + " if quantization == 'float16':\n", + " converter.target_spec.supported_types = [tf.float16]\n", + " tf_lite_model = converter.convert()\n", + " model_name = f'parallel_wavegan_{quantization}.tflite'\n", + " with open(model_name, 'wb') as f:\n", + " f.write(tf_lite_model)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "zSaic3flIJX7" + }, + "source": [ + "#### Dynamic Range Quantization" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": { + "id": "6STZKNqg-vxS" + }, + "outputs": [], + "source": [ + "quantization = 'dr' #@param [\"dr\", \"float16\"]\n", + "convert_to_tflite(quantization)" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "VB5H4bUmIUFR", + "outputId": "e53b77e9-d680-424a-a1e7-5a6abb723866" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "5.7M\tparallel_wavegan_dr.tflite\n" + ] + } + ], + "source": [ + "!du -sh parallel_wavegan_dr.tflite" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "tb_FF8fNINWr" + }, + "source": [ + "#### Float16 Quantization" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "19kqBUnQ_KG3", + "outputId": "4ab46a0f-98cb-44ea-8b8c-a337b5fc8d35" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "3.2M\tparallel_wavegan_float16.tflite\n" + ] + } + ], + "source": [ + "quantization = 'float16'\n", + "convert_to_tflite(quantization)\n", + "!du -sh parallel_wavegan_float16.tflite" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "7Kab76pmFifJ" + }, + "source": [ + "## Download Sample Output of Tacotron2" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "IwuNQ_Z1Fm0d", + "outputId": "a5e18dc0-573d-468e-f215-87d5bc86e67e" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Downloading...\n", + "From: https://drive.google.com/uc?id=1LmU3j8yedwBzXKVDo9tCvozLM4iwkRnP\n", + "To: /content/tac_output.npy\n", + "\r", + " 0% 0.00/36.0k [00:00\n", + " \n", + " Your browser does not support the audio element.\n", + " \n", + " " + ], + "text/plain": [ + "" + ] + }, + "execution_count": 31, + "metadata": { + "tags": [] + }, + "output_type": "execute_result" + } + ], + "source": [ + "output = output[0, :, 0]\n", + "\n", + "Audio(output, rate=22050)" + ] + } + ], + "metadata": { + "colab": { + "collapsed_sections": [], + "name": "Tensorflow_TTS_PWGAN.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.3" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/TensorFlowTTS/notebooks/TensorFlowTTS_FastSpeech_with_TFLite.ipynb b/TensorFlowTTS/notebooks/TensorFlowTTS_FastSpeech_with_TFLite.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..64351bc9c42480cb8955d9265da083e1ee66d26e --- /dev/null +++ b/TensorFlowTTS/notebooks/TensorFlowTTS_FastSpeech_with_TFLite.ipynb @@ -0,0 +1,491 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "view-in-github" + }, + "source": [ + "\"Open" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "sSxhJVHSDGGc" + }, + "source": [ + "##### Copyright 2020 The TensorFlow Authors. All Rights Reserved.\n", + "\n", + "Licensed under the Apache License, Version 2.0 (the \"License\");" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "Q0ySjtvCD6nJ" + }, + "outputs": [], + "source": [ + "#@title Licensed under the Apache License, Version 2.0 (the \"License\"); { display-mode: \"form\" }\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# https://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Do75nZO17R_g" + }, + "source": [ + "Authors : [jaeyoo@](https://github.com/jaeyoo), [khanhlvg@](https://github.com/khanhlvg), [abattery@](https://github.com/abattery), [thaink@](https://github.com/thaink) (Google Research) (refactored by [sayakpaul](https://github.com/sayakpaul) (PyImageSearch))\n", + "\n", + "Created : 2020-07-03 KST\n", + "\n", + "Last updated : 2020-07-04 KST\n", + "\n", + "-----\n", + "Change logs\n", + "* 2020-07-04 KST : Update notebook with the latest repo.\n", + " * https://github.com/TensorSpeech/TensorflowTTS/pull/84 merged.\n", + "* 2020-07-03 KST : First implementation (outputs : `fastspeech_quant.tflite`)\n", + " * varied-length input tensor, varied-length output tensor\n", + " * Inference on tflite works well.\n", + "* 2020-12-22 IST: Notebook runs end-to-end on Colab.\n", + "-----\n", + "\n", + "**Status** : successfully converted (`fastspeech_quant.tflite`)\n", + "\n", + "**Disclaimer** \n", + "- This colab doesn't care about the latency, so it compressed the model with quantization. (112 MB -> 28 MB)\n", + "- The TFLite file doesn't have LJSpeechProcessor. So you need to run it before feeding input vectors.\n", + "- `tf-nightly>=2.4.0-dev20200630`\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "p5aF0cRBv57s" + }, + "source": [ + "# Generate voice with FastSpeech" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "VJTsCmhciNfz" + }, + "outputs": [], + "source": [ + "!git clone https://github.com/TensorSpeech/TensorFlowTTS.git\n", + "!cd TensorFlowTTS\n", + "!pip install /content/TensorFlowTTS/" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "-vKfQMu7PiVL" + }, + "outputs": [], + "source": [ + "!pip install -q tf-nightly" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "vFZ9aWOnP3y_" + }, + "source": [ + "**Another runtime restart is required.**" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "EHHcYEUyon5W", + "outputId": "f89c5c36-a33a-48c2-9fb6-11ced05d4eea" + }, + "outputs": [], + "source": [ + "import numpy as np\n", + "import yaml\n", + "import tensorflow as tf\n", + "\n", + "import sys\n", + "sys.path.append('/content/TensorFlowTTS')\n", + "\n", + "from tensorflow_tts.inference import AutoProcessor\n", + "from tensorflow_tts.inference import AutoConfig\n", + "from tensorflow_tts.inference import TFAutoModel\n", + "\n", + "from IPython.display import Audio\n", + "print(tf.__version__) # check if >= 2.4.0" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "nBr1A7MBSm6u" + }, + "outputs": [], + "source": [ + "# initialize melgan model\n", + "melgan = TFAutoModel.from_pretrained(\"tensorspeech/tts-melgan-ljspeech-en\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "VG0PwedpqFhd" + }, + "outputs": [], + "source": [ + "# initialize FastSpeech model.\n", + "fastspeech = TFAutoModel.from_pretraned(\"tensorspeech/tts-fastspeech-ljspeech-en\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "7cPCQoxam3vp", + "outputId": "3dcede9a-045c-4599-dea8-0a6c1e38d942" + }, + "outputs": [], + "source": [ + "input_text = \"Recent research at Harvard has shown meditating\\\n", + "for as little as 8 weeks, can actually increase the grey matter in the \\\n", + "parts of the brain responsible for emotional regulation, and learning.\"\n", + "\n", + "processor = AutoProcessor.from_pretrained(\"tensorspeech/tts-fastspeech-ljspeech-en\")\n", + "input_ids = processor.text_to_sequence(input_text.lower())\n", + "\n", + "mel_before, mel_after, duration_outputs, _, _ = fastspeech.inference(\n", + " input_ids=tf.expand_dims(tf.convert_to_tensor(input_ids, dtype=tf.int32), 0),\n", + " speaker_ids=tf.convert_to_tensor([0], dtype=tf.int32),\n", + " speed_ratios=tf.convert_to_tensor([1.0], dtype=tf.float32),\n", + " f0_ratios=tf.convert_to_tensor([1.0], dtype=tf.float32),\n", + " energy_ratios=tf.convert_to_tensor([1.0], dtype=tf.float32),\n", + ")\n", + "\n", + "audio_before = melgan(mel_before)[0, :, 0]\n", + "audio_after = melgan(mel_after)[0, :, 0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 75 + }, + "id": "SxVxtZI5sDF-", + "outputId": "b9bbeff9-2b25-4ef2-9979-e6f41859beb3" + }, + "outputs": [], + "source": [ + "Audio(data=audio_before, rate=22050)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 75 + }, + "id": "i5gV4y9RpLBA", + "outputId": "fdcbb41f-ad7b-4320-86f2-452385adda1e" + }, + "outputs": [], + "source": [ + "Audio(data=audio_after, rate=22050)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "38xzKgqgwbLl" + }, + "source": [ + "# Convert to TFLite" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "j3eBgJr1CfqF" + }, + "outputs": [], + "source": [ + "# Concrete Function\n", + "fastspeech_concrete_function = fastspeech.inference_tflite.get_concrete_function()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "d9CUR0UD8O9w" + }, + "outputs": [], + "source": [ + "converter = tf.lite.TFLiteConverter.from_concrete_functions(\n", + " [fastspeech_concrete_function]\n", + ")\n", + "converter.optimizations = [tf.lite.Optimize.DEFAULT]\n", + "converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS,\n", + " tf.lite.OpsSet.SELECT_TF_OPS]\n", + "tflite_model = converter.convert()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "IjLkV0wlIVq1", + "outputId": "bc6fc0aa-31ab-4188-a851-6135abd8a5cd" + }, + "outputs": [], + "source": [ + "# Save the TF Lite model.\n", + "with open('fastspeech_quant.tflite', 'wb') as f:\n", + " f.write(tflite_model)\n", + "\n", + "print('Model size is %f MBs.' % (len(tflite_model) / 1024 / 1024.0) )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "gLoUH69hJkIK" + }, + "outputs": [], + "source": [ + "## Download the TF Lite model\n", + "#from google.colab import files\n", + "#files.download('fastspeech_quant.tflite') " + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "1WqL_NEbtL5K" + }, + "source": [ + "# Inference from TFLite" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "JjNnqWlItLXi" + }, + "outputs": [], + "source": [ + "import numpy as np\n", + "import tensorflow as tf\n", + "\n", + "# Load the TFLite model and allocate tensors.\n", + "interpreter = tf.lite.Interpreter(model_path='fastspeech_quant.tflite')\n", + "\n", + "# Get input and output tensors.\n", + "input_details = interpreter.get_input_details()\n", + "output_details = interpreter.get_output_details()\n", + "\n", + "# Prepare input data.\n", + "def prepare_input(input_ids):\n", + " input_ids = tf.expand_dims(tf.convert_to_tensor(input_ids, dtype=tf.int32), 0)\n", + " return (input_ids,\n", + " tf.convert_to_tensor([0], tf.int32),\n", + " tf.convert_to_tensor([1.0], dtype=tf.float32),\n", + " tf.convert_to_tensor([1.0], dtype=tf.float32),\n", + " tf.convert_to_tensor([1.0], dtype=tf.float32))\n", + "\n", + "# Test the model on random input data.\n", + "def infer(input_text):\n", + " processor = AutoProcessor.from_pretrained(pretrained_path=\"ljspeech_mapper.json\")\n", + " input_ids = processor.text_to_sequence(input_text.lower())\n", + " interpreter.resize_tensor_input(input_details[0]['index'], \n", + " [1, len(input_ids)])\n", + " interpreter.resize_tensor_input(input_details[1]['index'], \n", + " [1])\n", + " interpreter.resize_tensor_input(input_details[2]['index'], \n", + " [1])\n", + " interpreter.resize_tensor_input(input_details[3]['index'], \n", + " [1])\n", + " interpreter.resize_tensor_input(input_details[4]['index'], \n", + " [1])\n", + " interpreter.allocate_tensors()\n", + " input_data = prepare_input(input_ids)\n", + " for i, detail in enumerate(input_details):\n", + " input_shape = detail['shape_signature']\n", + " interpreter.set_tensor(detail['index'], input_data[i])\n", + "\n", + " interpreter.invoke()\n", + "\n", + " # The function `get_tensor()` returns a copy of the tensor data.\n", + " # Use `tensor()` in order to get a pointer to the tensor.\n", + " return (interpreter.get_tensor(output_details[0]['index']),\n", + " interpreter.get_tensor(output_details[1]['index']))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "dRgCO2UfdrBe" + }, + "outputs": [], + "source": [ + "input_text = \"Recent research at Harvard has shown meditating\\\n", + "for as little as 8 weeks, can actually increase the grey matter in the \\\n", + "parts of the brain responsible for emotional regulation, and learning.\"\n", + "\n", + "decoder_output_tflite, mel_output_tflite = infer(input_text)\n", + "audio_before_tflite = melgan(decoder_output_tflite)[0, :, 0]\n", + "audio_after_tflite = melgan(mel_output_tflite)[0, :, 0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 75 + }, + "id": "vajrYnWAX31f", + "outputId": "6ec5fcf3-c66b-4dac-f749-e7149beb81eb" + }, + "outputs": [], + "source": [ + "Audio(data=audio_before_tflite, rate=22050)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 75 + }, + "id": "-eJ5QGc5X_Tc", + "outputId": "85654556-89ee-4b3e-9100-f60390af26f2", + "scrolled": true + }, + "outputs": [], + "source": [ + "Audio(data=audio_after_tflite, rate=22050)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "iht1FDZUd0Ig" + }, + "outputs": [], + "source": [ + "input_text = \"I love TensorFlow Lite converted FastSpeech with quantization. \\\n", + "The converted model file is of 28.6 Mega bytes.\"\n", + "\n", + "decoder_output_tflite, mel_output_tflite = infer(input_text)\n", + "audio_before_tflite = melgan(decoder_output_tflite)[0, :, 0]\n", + "audio_after_tflite = melgan(mel_output_tflite)[0, :, 0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 75 + }, + "id": "ZJVtr-D3d6rr", + "outputId": "138a0cc3-d367-4a0c-872a-47ca25e12f69" + }, + "outputs": [], + "source": [ + "Audio(data=audio_before_tflite, rate=22050)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 75 + }, + "id": "mBU2Zdl1d8ZI", + "outputId": "ce188fbf-5d86-474f-84ee-b22baac6f417" + }, + "outputs": [], + "source": [ + "Audio(data=audio_after_tflite, rate=22050)" + ] + } + ], + "metadata": { + "colab": { + "collapsed_sections": [], + "include_colab_link": true, + "name": "TensorFlowTTS - FastSpeech with TFLite", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.7" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/TensorFlowTTS/notebooks/TensorFlowTTS_Tacotron2_with_TFLite.ipynb b/TensorFlowTTS/notebooks/TensorFlowTTS_Tacotron2_with_TFLite.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..420d0667e9f843e7a1c269347e316e928f69a7ef --- /dev/null +++ b/TensorFlowTTS/notebooks/TensorFlowTTS_Tacotron2_with_TFLite.ipynb @@ -0,0 +1,640 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "bFYay4JAJLev" + }, + "source": [ + "##### Copyright 2020 The TensorFlow Authors. All Rights Reserved.\n", + "\n", + "Licensed under the Apache License, Version 2.0 (the \"License\");" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "Xd2YjZN6JP86" + }, + "outputs": [], + "source": [ + "#@title Licensed under the Apache License, Version 2.0 (the \"License\"); { display-mode: \"form\" }\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# https://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "Do75nZO17R_g" + }, + "source": [ + "Author : [jaeyoo@](https://github.com/jaeyoo), [khanhlvg@](https://github.com/khanhlvg), [abattery@](https://github.com/abattery), [thaink@](https://github.com/thaink) (Google Research)\n", + "\n", + "Created : 2020-06-30 KST\n", + "\n", + "Last updated : 2020-07-04 KST\n", + "\n", + "-----\n", + "Change logs\n", + "* 2020-07-04 KST : Update notebook with the lastest TensorflowTTS repo.\n", + " * compatible with https://github.com/TensorSpeech/TensorflowTTS/pull/83\n", + "* 2020-07-02 KST : Third implementation (outputs : `tacotron2.tflite`) \n", + " * **varied-length** input tensor, **varied-length** output tensor\n", + "-----\n", + "\n", + "**Status** : successfully converted (`tacotron2.tflite`)\n", + "\n", + "**Disclaimer** \n", + "- This colab doesn't care about the latency, so it compressed the model with quantization. (129 MB -> 33 MB)\n", + "- The TFLite file doesn't have LJSpeechProcessor. So you need to run it before feeding input vectors.\n", + "- `tf-nightly>=2.4.0-dev20200630`\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "p5aF0cRBv57s" + }, + "source": [ + "# Generate voice with Tacotron2" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 802 + }, + "colab_type": "code", + "id": "3kDDtdfy-Fcf", + "outputId": "c562941a-b89f-40aa-dbcb-c71ff93ec500" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: tf-nightly in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (2.4.0.dev20200716)\n", + "Requirement already satisfied: astunparse==1.6.3 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tf-nightly) (1.6.3)\n", + "Requirement already satisfied: gast==0.3.3 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tf-nightly) (0.3.3)\n", + "Requirement already satisfied: wrapt>=1.11.1 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tf-nightly) (1.12.1)\n", + "Requirement already satisfied: six>=1.12.0 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tf-nightly) (1.15.0)\n", + "Requirement already satisfied: grpcio>=1.8.6 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tf-nightly) (1.30.0)\n", + "Requirement already satisfied: numpy<1.19.0,>=1.16.0 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tf-nightly) (1.18.5)\n", + "Requirement already satisfied: absl-py>=0.7.0 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tf-nightly) (0.9.0)\n", + "Requirement already satisfied: keras-preprocessing<1.2,>=1.1.1 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tf-nightly) (1.1.2)\n", + "Requirement already satisfied: protobuf>=3.9.2 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tf-nightly) (3.12.2)\n", + "Requirement already satisfied: wheel>=0.26 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tf-nightly) (0.34.2)\n", + "Requirement already satisfied: opt-einsum>=2.3.2 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tf-nightly) (3.2.1)\n", + "Requirement already satisfied: tb-nightly<2.4.0a0,>=2.3.0a0 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tf-nightly) (2.3.0a20200716)\n", + "Requirement already satisfied: google-pasta>=0.1.8 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tf-nightly) (0.2.0)\n", + "Requirement already satisfied: tf-estimator-nightly in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tf-nightly) (2.4.0.dev2020071601)\n", + "Requirement already satisfied: h5py<2.11.0,>=2.10.0 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tf-nightly) (2.10.0)\n", + "Requirement already satisfied: termcolor>=1.1.0 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tf-nightly) (1.1.0)\n", + "Requirement already satisfied: setuptools in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from protobuf>=3.9.2->tf-nightly) (49.2.0.post20200714)\n", + "Requirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tb-nightly<2.4.0a0,>=2.3.0a0->tf-nightly) (0.4.1)\n", + "Requirement already satisfied: werkzeug>=0.11.15 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tb-nightly<2.4.0a0,>=2.3.0a0->tf-nightly) (1.0.1)\n", + "Requirement already satisfied: tensorboard-plugin-wit>=1.6.0 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tb-nightly<2.4.0a0,>=2.3.0a0->tf-nightly) (1.7.0)\n", + "Requirement already satisfied: google-auth<2,>=1.6.3 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tb-nightly<2.4.0a0,>=2.3.0a0->tf-nightly) (1.19.1)\n", + "Requirement already satisfied: markdown>=2.6.8 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tb-nightly<2.4.0a0,>=2.3.0a0->tf-nightly) (3.2.2)\n", + "Requirement already satisfied: requests<3,>=2.21.0 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from tb-nightly<2.4.0a0,>=2.3.0a0->tf-nightly) (2.24.0)\n", + "Requirement already satisfied: requests-oauthlib>=0.7.0 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from google-auth-oauthlib<0.5,>=0.4.1->tb-nightly<2.4.0a0,>=2.3.0a0->tf-nightly) (1.3.0)\n", + "Requirement already satisfied: pyasn1-modules>=0.2.1 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from google-auth<2,>=1.6.3->tb-nightly<2.4.0a0,>=2.3.0a0->tf-nightly) (0.2.8)\n", + "Requirement already satisfied: cachetools<5.0,>=2.0.0 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from google-auth<2,>=1.6.3->tb-nightly<2.4.0a0,>=2.3.0a0->tf-nightly) (4.1.1)\n", + "Requirement already satisfied: rsa<5,>=3.1.4; python_version >= \"3\" in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from google-auth<2,>=1.6.3->tb-nightly<2.4.0a0,>=2.3.0a0->tf-nightly) (4.6)\n", + "Requirement already satisfied: importlib-metadata; python_version < \"3.8\" in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from markdown>=2.6.8->tb-nightly<2.4.0a0,>=2.3.0a0->tf-nightly) (1.7.0)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from requests<3,>=2.21.0->tb-nightly<2.4.0a0,>=2.3.0a0->tf-nightly) (1.25.9)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from requests<3,>=2.21.0->tb-nightly<2.4.0a0,>=2.3.0a0->tf-nightly) (2020.6.20)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from requests<3,>=2.21.0->tb-nightly<2.4.0a0,>=2.3.0a0->tf-nightly) (3.0.4)\n", + "Requirement already satisfied: idna<3,>=2.5 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from requests<3,>=2.21.0->tb-nightly<2.4.0a0,>=2.3.0a0->tf-nightly) (2.10)\n", + "Requirement already satisfied: oauthlib>=3.0.0 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tb-nightly<2.4.0a0,>=2.3.0a0->tf-nightly) (3.1.0)\n", + "Requirement already satisfied: pyasn1<0.5.0,>=0.4.6 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from pyasn1-modules>=0.2.1->google-auth<2,>=1.6.3->tb-nightly<2.4.0a0,>=2.3.0a0->tf-nightly) (0.4.8)\n", + "Requirement already satisfied: zipp>=0.5 in /home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages (from importlib-metadata; python_version < \"3.8\"->markdown>=2.6.8->tb-nightly<2.4.0a0,>=2.3.0a0->tf-nightly) (3.1.0)\n" + ] + } + ], + "source": [ + "!pip install tf-nightly" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 105 + }, + "colab_type": "code", + "id": "EHHcYEUyon5W", + "outputId": "55c16833-e745-4fdb-b12d-378fe7b2849d" + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/lap13839/anaconda3/envs/tflite/lib/python3.7/site-packages/tensorflow_addons/utils/ensure_tf_install.py:44: UserWarning: You are currently using a nightly version of TensorFlow (2.4.0-dev20200716). \n", + "TensorFlow Addons offers no support for the nightly versions of TensorFlow. Some things might work, some other might not. \n", + "If you encounter a bug, do not file an issue on GitHub.\n", + " UserWarning,\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "2.4.0-dev20200716\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "import soundfile as sf\n", + "import yaml\n", + "import tensorflow as tf\n", + "\n", + "from tensorflow_tts.inference import AutoProcessor\n", + "from tensorflow_tts.inference import AutoConfig\n", + "from tensorflow_tts.inference import TFAutoModel\n", + "\n", + "from IPython.display import Audio\n", + "print(tf.__version__)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "nBr1A7MBSm6u" + }, + "outputs": [], + "source": [ + "# initialize melgan model\n", + "melgan = TFAutoModel.from_pretrained(\"tensorspeech/tts-melgan-ljspeech-en\")" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 289 + }, + "colab_type": "code", + "id": "n-eiPi6Vmf47", + "outputId": "f0c5e414-d126-4565-a9b3-bcfa2ca747ee" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Model: \"tacotron2v2\"\n", + "_________________________________________________________________\n", + "Layer (type) Output Shape Param # \n", + "=================================================================\n", + "encoder (TFTacotronEncoder) multiple 8218624 \n", + "_________________________________________________________________\n", + "decoder_cell (TFTacotronDeco multiple 18246402 \n", + "_________________________________________________________________\n", + "post_net (TFTacotronPostnet) multiple 5460480 \n", + "_________________________________________________________________\n", + "residual_projection (Dense) multiple 41040 \n", + "=================================================================\n", + "Total params: 31,966,546\n", + "Trainable params: 31,956,306\n", + "Non-trainable params: 10,240\n", + "_________________________________________________________________\n" + ] + } + ], + "source": [ + "# initialize Tacotron2 model.\n", + "tacotron2 = TFAutoModel.from_pretrained(\"tensorspeech/tts-tacotron2-ljspeech-en\", enable_tflite_convertible=True)\n", + "\n", + "# Newly added :\n", + "tacotron2.setup_window(win_front=6, win_back=6)\n", + "tacotron2.setup_maximum_iterations(3000)\n", + "\n", + "tacotron2.summary()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "38xzKgqgwbLl" + }, + "source": [ + "# Convert to TF Lite" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "j3eBgJr1CfqF" + }, + "outputs": [], + "source": [ + "# Concrete Function\n", + "tacotron2_concrete_function = tacotron2.inference_tflite.get_concrete_function()" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "colab_type": "code", + "id": "d9CUR0UD8O9w", + "outputId": "93780e00-091f-4589-c688-abeb0b19eab1" + }, + "outputs": [], + "source": [ + "converter = tf.lite.TFLiteConverter.from_concrete_functions(\n", + " [tacotron2_concrete_function]\n", + ")\n", + "converter.optimizations = [tf.lite.Optimize.DEFAULT]\n", + "converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS,\n", + " tf.lite.OpsSet.SELECT_TF_OPS]\n", + "tflite_model = converter.convert()" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "colab_type": "code", + "id": "IjLkV0wlIVq1", + "outputId": "7085e0d8-844a-42bb-fc49-b777fa3beb03" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Model size is 33.242188 MBs.\n" + ] + } + ], + "source": [ + "# Save the TF Lite model.\n", + "with open('tacotron2.tflite', 'wb') as f:\n", + " f.write(tflite_model)\n", + "\n", + "print('Model size is %f MBs.' % (len(tflite_model) / 1024 / 1024.0) )" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 17 + }, + "colab_type": "code", + "id": "gLoUH69hJkIK", + "outputId": "fadbe364-c346-492f-dd89-644382b454eb" + }, + "outputs": [], + "source": [ + "# Download the TF Lite model\n", + "# from google.colab import files\n", + "# files.download('tacotron2.tflite') " + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "1WqL_NEbtL5K" + }, + "source": [ + "# Inference from TFLite" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "JjNnqWlItLXi" + }, + "outputs": [], + "source": [ + "import numpy as np\n", + "import tensorflow as tf\n", + "\n", + "# Load the TFLite model and allocate tensors.\n", + "interpreter = tf.lite.Interpreter(model_path='tacotron2.tflite')\n", + "interpreter.allocate_tensors()\n", + "\n", + "# Get input and output tensors.\n", + "input_details = interpreter.get_input_details()\n", + "output_details = interpreter.get_output_details()\n", + "\n", + "# Prepare input data.\n", + "def prepare_input(input_ids):\n", + " return (tf.expand_dims(tf.convert_to_tensor(input_ids, dtype=tf.int32), 0),\n", + " tf.convert_to_tensor([len(input_ids)], tf.int32),\n", + " tf.convert_to_tensor([0], dtype=tf.int32))\n", + " \n", + "# Test the model on random input data.\n", + "def infer(input_text):\n", + " processor = LJSpeechProcessor(None, \"english_cleaners\")\n", + " input_ids = processor.text_to_sequence(input_text.lower())\n", + " input_ids = np.concatenate([input_ids, [len(symbols) - 1]], -1) # eos.\n", + " interpreter.resize_tensor_input(input_details[0]['index'], \n", + " [1, len(input_ids)])\n", + " interpreter.allocate_tensors()\n", + " input_data = prepare_input(input_ids)\n", + " for i, detail in enumerate(input_details):\n", + " print(detail)\n", + " input_shape = detail['shape']\n", + " interpreter.set_tensor(detail['index'], input_data[i])\n", + "\n", + " interpreter.invoke()\n", + "\n", + " # The function `get_tensor()` returns a copy of the tensor data.\n", + " # Use `tensor()` in order to get a pointer to the tensor.\n", + " return (interpreter.get_tensor(output_details[0]['index']),\n", + " interpreter.get_tensor(output_details[1]['index']))" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 88 + }, + "colab_type": "code", + "id": "dRgCO2UfdrBe", + "outputId": "12f39cb3-2ce7-4b74-9142-bbca3b8d2373" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'name': 'input_ids', 'index': 0, 'shape': array([1, 1], dtype=int32), 'shape_signature': array([ 1, -1], dtype=int32), 'dtype': , 'quantization': (0.0, 0), 'quantization_parameters': {'scales': array([], dtype=float32), 'zero_points': array([], dtype=int32), 'quantized_dimension': 0}, 'sparsity_parameters': {}}\n", + "{'name': 'input_lengths', 'index': 1, 'shape': array([1], dtype=int32), 'shape_signature': array([1], dtype=int32), 'dtype': , 'quantization': (0.0, 0), 'quantization_parameters': {'scales': array([], dtype=float32), 'zero_points': array([], dtype=int32), 'quantized_dimension': 0}, 'sparsity_parameters': {}}\n", + "{'name': 'speaker_ids', 'index': 2, 'shape': array([1], dtype=int32), 'shape_signature': array([1], dtype=int32), 'dtype': , 'quantization': (0.0, 0), 'quantization_parameters': {'scales': array([], dtype=float32), 'zero_points': array([], dtype=int32), 'quantized_dimension': 0}, 'sparsity_parameters': {}}\n" + ] + } + ], + "source": [ + "input_text = \"Recent research at Harvard has shown meditating\\\n", + "for as little as 8 weeks, can actually increase the grey matter in the \\\n", + "parts of the brain responsible for emotional regulation, and learning.\"\n", + "\n", + "decoder_output_tflite, mel_output_tflite = infer(input_text)\n", + "audio_before_tflite = melgan(decoder_output_tflite)[0, :, 0]\n", + "audio_after_tflite = melgan(mel_output_tflite)[0, :, 0]" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 75 + }, + "colab_type": "code", + "id": "vajrYnWAX31f", + "outputId": "aefc25c4-3985-4325-a4dd-87ed2db10f3b" + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " " + ], + "text/plain": [ + "" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "Audio(data=audio_before_tflite, rate=22050)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 75 + }, + "colab_type": "code", + "id": "-eJ5QGc5X_Tc", + "outputId": "2da7480d-a602-444c-f286-26a35730b1fa" + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " " + ], + "text/plain": [ + "" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "Audio(data=audio_after_tflite, rate=22050)" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 88 + }, + "colab_type": "code", + "id": "iht1FDZUd0Ig", + "outputId": "063f32d6-6d0a-46da-f264-9b6ea4b39ed3" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'name': 'input_ids', 'index': 0, 'shape': array([1, 1], dtype=int32), 'shape_signature': array([ 1, -1], dtype=int32), 'dtype': , 'quantization': (0.0, 0), 'quantization_parameters': {'scales': array([], dtype=float32), 'zero_points': array([], dtype=int32), 'quantized_dimension': 0}, 'sparsity_parameters': {}}\n", + "{'name': 'input_lengths', 'index': 1, 'shape': array([1], dtype=int32), 'shape_signature': array([1], dtype=int32), 'dtype': , 'quantization': (0.0, 0), 'quantization_parameters': {'scales': array([], dtype=float32), 'zero_points': array([], dtype=int32), 'quantized_dimension': 0}, 'sparsity_parameters': {}}\n", + "{'name': 'speaker_ids', 'index': 2, 'shape': array([1], dtype=int32), 'shape_signature': array([1], dtype=int32), 'dtype': , 'quantization': (0.0, 0), 'quantization_parameters': {'scales': array([], dtype=float32), 'zero_points': array([], dtype=int32), 'quantized_dimension': 0}, 'sparsity_parameters': {}}\n" + ] + } + ], + "source": [ + "input_text = \"I love TensorFlow Lite converted Tacotron 2.\"\n", + "\n", + "decoder_output_tflite, mel_output_tflite = infer(input_text)\n", + "audio_before_tflite = melgan(decoder_output_tflite)[0, :, 0]\n", + "audio_after_tflite = melgan(mel_output_tflite)[0, :, 0]" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 75 + }, + "colab_type": "code", + "id": "ZJVtr-D3d6rr", + "outputId": "2ebad60a-ec4f-4ae8-c013-65e3e6baa259" + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " " + ], + "text/plain": [ + "" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "Audio(data=audio_before_tflite, rate=22050)" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 75 + }, + "colab_type": "code", + "id": "mBU2Zdl1d8ZI", + "outputId": "00cbd782-b763-4d17-ec1d-12ad713f6e1f" + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " " + ], + "text/plain": [ + "" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "Audio(data=audio_after_tflite, rate=22050)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "colab": { + "collapsed_sections": [], + "name": "TensorFlowTTS - Tacotron2 with TFLite", + "provenance": [], + "toc_visible": true + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.7" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/TensorFlowTTS/notebooks/fastspeech2_inference.ipynb b/TensorFlowTTS/notebooks/fastspeech2_inference.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..ff5fcd0608d5713ed3f8edf27f301488f937ff38 --- /dev/null +++ b/TensorFlowTTS/notebooks/fastspeech2_inference.ipynb @@ -0,0 +1,234 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import yaml\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "\n", + "import tensorflow as tf\n", + "\n", + "from tensorflow_tts.inference import AutoConfig\n", + "from tensorflow_tts.inference import TFAutoModel\n", + "from tensorflow_tts.inference import AutoProcessor" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "processor = AutoProcessor.from_pretrained(\"tensorspeech/tts-fastspeech2-ljspeech-en\")" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "input_text = \"i love you so much.\"\n", + "input_ids = processor.text_to_sequence(input_text)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "fastspeech2 = TFAutoModel.from_pretrained(\"tensorspeech/tts-fastspeech2-ljspeech-en\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Save to Pb" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "WARNING:tensorflow:From /home/lap13548/anaconda3/envs/tensorflow-tts/lib/python3.7/site-packages/tensorflow/python/training/tracking/tracking.py:111: Model.state_updates (from tensorflow.python.keras.engine.training) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "This property should not be used in TensorFlow 2.0, as updates are applied automatically.\n", + "WARNING:tensorflow:From /home/lap13548/anaconda3/envs/tensorflow-tts/lib/python3.7/site-packages/tensorflow/python/training/tracking/tracking.py:111: Layer.updates (from tensorflow.python.keras.engine.base_layer) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "This property should not be used in TensorFlow 2.0, as updates are applied automatically.\n", + "INFO:tensorflow:Assets written to: ./test_saved/assets\n" + ] + } + ], + "source": [ + "# save model into pb and do inference. Note that signatures should be a tf.function with input_signatures.\n", + "tf.saved_model.save(fastspeech2, \"./test_saved\", signatures=fastspeech2.inference)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Load and Inference" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "fastspeech2 = tf.saved_model.load(\"./test_saved\")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "input_text = \"There’s a way to measure the acute emotional intelligence that has never gone out of style.\"\n", + "input_ids = processor.text_to_sequence(input_text)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "mel_before, mel_after, duration_outputs, _, _ = fastspeech2.inference(\n", + " input_ids=tf.expand_dims(tf.convert_to_tensor(input_ids, dtype=tf.int32), 0),\n", + " speaker_ids=tf.convert_to_tensor([0], dtype=tf.int32),\n", + " speed_ratios=tf.convert_to_tensor([1.0], dtype=tf.float32),\n", + " f0_ratios =tf.convert_to_tensor([1.0], dtype=tf.float32),\n", + " energy_ratios =tf.convert_to_tensor([1.0], dtype=tf.float32)\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlAAAACuCAYAAAD55TMFAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9ebB123YXhv3GbNZae5/ua27/3n2NWkCyZKBAUjBGtsE2whTEFdMkccAmFk7FgO2YpigqjmPLyJWYxkUSmoTQBDCk6IwNZQMyhMb0jQgokh7iNfe9++69X3PO2WfvvZo558gfY8xm7++7jfQeV0Les+q795x99t5rrdmO8Ru/8RvEzDi1Uzu1Uzu1Uzu1Uzu1D97MD/cNnNqpndqpndqpndqp/ZPWTgbUqZ3aqZ3aqZ3aqZ3aD7KdDKhTO7VTO7VTO7VTO7UfZDsZUKd2aqd2aqd2aqd2aj/IdjKgTu3UTu3UTu3UTu3UfpDtZECd2qmd2qmd2qmd2qn9INvJgDq1U/sR3IjodxHRf6I//1Qi+t4P6bpMRF/1j/H7P6HXcD/Ez38tEf1tItoQ0S//ct/fqZ3aqZ3a+7WTAXVqp/YlNiL6NBHtieiOiN4iov8nEZ1/ua/DzH+Bmb/2A9zPLyaiv/jlvn7z/X9OjZ9vPHr9j+nr3/qP69pN+1UA/hwzXzDzf6Fj8NO/nBcgol9CRP8/NdLeIqL/hoguvpzXOLrel2RUntqpndqH204G1Kmd2pen/WxmPgfwEwD8JAC/7vgNP8oOxu8D8L/KvxDRQwDfDOCdD+n6Hwfw978cX0TSzNFrPw3AfwrgFzLzBYAfC+APfTmu96W0L+ccet5zn9qpndoHb6fFc2qn9mVszPx5AH8KwNcDJRT2vyWi7wfw/frav0JEf4eIronoLxPRN+TPE9GPJ6K/pajHHwQwNH/7ViJ6o/n9dSL6I0T0DhE9JqLfQkQ/FsBvBfAtiohd63t7Ivo/E9FnFU35rUS0ar7rVxLRm0T0BSL6Nz/Ao/4+AD+fiKz+/gsB/FEAc/Odhoh+DRH9Q72/P0REDz5IPxLRVxLRd+nnHhHR7yOie/q37wLwzwH4LfqMfwDAxwD8Cf39V+n7vln795qI/m6LjCmK9h1E9JcA7AB8xdEt/CQA/wMz/20AYOYnzPy7mXmjn/9d2od/WsfqzxPRx5vv/zH6tydE9L1E9POav62I6D8nos8Q0Q0R/UUdi/+PvuVan+NbFE38S0T0G4noCYD/AxFdEdHv0XH/DBH9umwIEZHV735ERP+IiP6dFtV63nMT0b9BRN+jz/EDRPRLm3v9ViJ6g4h+FRG9rXPk5xLRtxHR9+nz/doPMqandmo/6hozn/6d/p3+fQn/AHwawE/Xn1+HICP/sf7OAP40gAcAVhCE6m0A3wTAAvhF+vkeQAfgMwD+PQAewP8MwALgP9Hv+lYAb+jPFsDfBfAbAZxBDK1/Rv/2iwH8xaN7/E0A/iu9jwsAfwLAr9e//csA3oIYfWcAfr/e91e9y/P+OQD/awD/HYCfqa/9NQDfAuANAN+qr/27AP4KgI/q8/02AH9A//YJvYZ7l2t8FYCfoZ97EWJc/Kbje3jeGOjvHwHwGMC3QRzFn6G/v9h8/rMAvg6AA+CPrv9TAewB/EcAfgqA/ujvvwvABsA/q/f4m3Ofax9+DsC/od/9EwA8AvB1+vf/i17/IzqO/xP9jmf6RMcyAPhl+l0rAL8HwB/XcfwEBA38Jfr+fxvAP9A+vw/gz7Tf+bznBvCzAHwlAALw0yCG1U9o5lwA8L/X9/5bEJTx9+v1vw7ACOArfrjX4enf6d+H/e+H/QZO/07//kn/p4f3HYBriAH0fwWw0r8xgH++ee//DWpcNa99rx5c/yyALwCg5m9/Gc83oL5FD7JnDBAcGVB6MG4BfGXz2rcA+Ef68+8E8J3N374GH8yA+l8C+AMAvhbA9+nfWgPqewD8C83nXoUYhO55xsL79PHPBfC3j+/haAxaA+pXA/i9R9/x3wL4Rc3n/4/vc82fCTE0r3V8fwMAq3/7XQD+y+a95wAixID++QD+wtF3/TYA/yHEmNsD+MbnXO+ZPtGx/GzzuwUwAfhxzWu/FMIHA4DvAvBLm7/9dDxrQL3fc/8xAL+imXP75rkv9Pu+qXn/3wTwc3+41+Hp3+nfh/3vRxMn49RO7Yez/Vxm/jPv8rfPNT9/HMAvIqJf1rzWAXgNcjB9npnbCt+feZfvfB3AZ5g5fIB7exHAGsDfJKL8GkEOY+i1/+YHuOZx+yMA/nMIsvN7n/P3jwP4o0SUmtcigJfbNxHRT4WEPQF5pq8jopcA/BcQJOgCYng8/YD3la/9rxHRz25e8wD+++b3Mi5EdNe8/uOY+bPM/KcA/CkNj/1zAP7fEGP3tx1/npnvNMT2ml77m3L4VJuD9NELELTwH/4gnqWdPy+gIpW5fQaCZkGv376//fm5rxHRz4QYd18D6ec1gL/XvOUxM0f9ea//f6v5+x5iQJ7aqf2Pqp0MqFM7tX/8rTWIPgfgO5j5O47fREJc/ggRUWNEfQzPP2w/B+BjROSeY0Tx0e+PIIfc17FwtI7bmxCDLLePvfujNBdh3hHRnwLwv4GEgJ53j/8mM/+l4z8Q0Sea7/kLePYA/vWQ5/gGZn5MRD8XwG95r9t5zrV/LzP/Wx/kMywJAM9/E3MC8GeVe/X1zZ9Kn5FkXT6AIIifA/DnmflnHH+XGmMjpL/+7vs8w/NefwRB8T4OCdUBMl55XN+EhO+eucfnfR8R9QD+MCQh4I8z80JEfwxiYJ/aqZ3ae7QTifzUTu3Dbb8DwL9NRN9E0s6I6GeRpMf/DxC+yS8nIkdE/yqAn/wu3/PXIIfld+p3DET0U/RvbwH4KBF1QDEAfgeA36jIDojoI0T0L+n7/xCAX0xEP46I1hA04oO2XwvgpzHzp5/zt98K4DsyuZqIXiSin/MBv/cCGhYloo8A+JXv8/63cEgE/38B+NlE9C8psXpQQvRH3+XzB42Ifg4R/QIiuq/j9JMhYda/0rzt24jon9F+/o8B/FVm/hyA/xrA1xDRv05EXv/9JCL6sToWvxPAbyCi1/TevkUNmXcAJDxLaC9NkaA/BOnXC+3bf1+fF/q3X6Hjew8Synyv1kH4V+8ACIpG/YsfpI9O7dT+x95OBtSpndqH2Jj5b0CIuL8FEpL6FITnAmaeAfyr+vtTCJfmj7zL90QAPxtCtv4shHv08/XP3wUhsn+RiB7pa79ar/VXiOgWQi7+Wv2uPwUhmX+Xvue7fhDP8wVmfjfNqd8MIa7/d0S0gRgf3/QBv/o/gpCvbwD8N3iXfmjarwfw60gy7v4DNWR+DsTAeweCCv1KfPA97ylknL4fwC3EQPk/MfPva97z+yHG5hMAPxHA/wIAWDL1/kUAvwCCSH0RwH8GMVQA4D+AhMj+un72PwNgmHkH4DsA/CV9jm9+l3v7ZRBO2w8A+It6H79T//Y7IOT+7wbwtwH8SYhRHp/9mnKvvxxieD0F8D+HjNmpndqpvU+jQ7rFqZ3aqZ3aqb1fI6LfBSH0P6P39SOpKaL0W5n54+/75lM7tVP7QbUTAnVqp3Zqp/ajpJFoTH2bhoA/AkHI/ugP932d2qn9aGwnA+rUTu3UTu1HTyNI+PMpJIT3PRANp1M7tVP7MrcvKYRHRP8yhOdgAfzfmfk7v1w3dmqndmqndmqndmqn9iO1/ZANKJISDt8HUfh9A0KI/IXM/A/e84Ondmqndmqndmqndmr/hLcvJYT3kwF8ipl/QLOH/ktI1supndqpndqpndqpndqP6valCGl+BIeKtm/gfVKU7dkZu/sPqkQbAWCAEkCaZGtC/p1BGRzLKBkRWE2+sCLwOmHlF3RGdAQtJRgwGIQpyaPtQocYjKir5AszgASQXhvQ3/Nr5br6v1ZSjvR3al43+rNpXqN8OT747DMtEkwAzAKYIO+lBCAx5nsG3ZnUZu1tADNhThZzkGfjQEAiud/cRanp03T4WvJAzInU9j2QRybtLwCGQY7hbIQ1en/NMyV94JAMYrRAoDKWeVwP74WfLxfIR/1D9ZnY6jVWAHsGzOEXEAFE9TVmAicCIoFUYtIsgIkyr5LX71tLP1DzfUQs/+rl5fuYAGKYo+uk1EyEBCDJeFKQ6z3zzEfPzrY+H1v5uwlyn+Wzx40IsSPEXj+T+ytf4HieRSpzmh3D+ojORjgdlPy87bgyCIkJkQkhyUWWaMFR5lyZb83caz5cxvxwDjZrun07AWzkppOvz3SwJ8TDfsyPON0jnJ2NAIDeBCSWtT8uXr5b18gzffK8+0MeD/3Z4Nm5FghmBpIDzJncYJfXZrRIwRz2eXONg+fQ+5kvCN16wZmb0dMi79MxTEyYWJ5jH7x8f9Ln0X6WzmtvML/+nO3maKzyWKQ8hyzXfat5n3Gyzw5G7s9RKnMlssE+dQCA3eKRojm8DpP0ITUXzPfc7DPEdbz5+MZN/TqCrq12v2TUM0JbWBnEAXLtKhv6/L2nXTvlR9k77KK3sHCdI8/7eHN9NoTkCcnr77Y+U3u2tPsjoD8zy+dd/XLZSw6vmpyu/14+3PmIlV3QUSjr2uh5SM95aAYhKn4S2WAXO+yCl30OgDUJziTM0SKGvCDywx/14/F8y78fnUtlPZTX+Nk5edSXz7nxD9Z0P8m3K/eT59+7X4P42bG63b/5iJlffN77vxQD6nnmwDN3RUTfDuDbAcBd3cfrv+zfKwtFDgtCdwt01/LR4SbBbSPsnA5WjUwqQljLYD7+egv7E67x4195Ax9bPQUAXLkd1mbGwhaf2km1iL/xzut45/EF0mQB3dxoIZiJYBaCnXTC7AG3A+zEsFpP3ixcJkE5DKNubrYukNgRwkCIq2qgpA5InmXx6J6aXJ1luQ/sSOiuCeu3GKsnagjuE0xM+PS3Dfjab/o0AOArzh9hSg6f297HZ57cBwDsHq9hdlaeZa7fZyfAbxl+q69NDBMY84XB9tVsPHAx+PL95d8pygYFAMv9hPVrd/jI1Q0uOjmoBhvgTcSSLMYoU+iL20u8/fQCy9Me7lbGyO0Jbgf4u3ovbpR7OTAMdDNhUw9SQBaYmRnhTG7w5issdq8kpPN4MPvIJ7g+wDrZOGIwWHYe9onH6m154/otRn8TYceEu4/IwD3+8Ql0b4brIpyTAXE2oXMR1qRiHI6Lw7iXQ6Lrq+j3slgsowPvpQ/MzsDfyXgOTxjdXSr9nxcwNUYVRUbsDRZ9vrAyMAuj2yS4fSzvYy2/kjcAJsLuZY/Nx02Zb9lQTB2QenlfstKvdqwG1PixGa+99gSfvHyCF3qpXrI2M7yJ8FSlgha22MUO18sab+4vAQCfv7nC7e0KaetBi9yTmQgU9f70ILAjwe0Bdwe4vVzYTVyMB2oOzbzGYy/fMV0R5ksCW8Dfyt+Gpwl+lw4OsbwBfvp/avCt//T3AAA+sXqM2zDgH969iO97R/a7/aM1aNIJnh2ASDAzwer8BGTtg4DlDFgu1ThYMVKXBKfX63ZPLM7eAHavEu7/ZKlm8tX33sGcHD5zex9vvXVP3njrYCYDOwNuK8/WXQP9TUK3TWWef/6fB37M130O3/Tg0/h4/0jHYwIA7FKPT42yj333zUfwmaf3sd31SLOsL54MaDYwo4FptejVGSn7jpd+M0t1KOwk+wQleWYAmO/p2vIJ0DGFAS4fbPG1L7yNj6+fAABe8HfozQILxiYO+Ad3rwIA/t7br2JzswLP4kgBYnDyOsKuQnE+wuTAewu7MzC6/7qdzBkAiF19FEpirLZGdXcLrN9O6G5lvpollbmU18jTr+6x+SQQB4aZda4ugJnpwGgRI1n3HnUq2cp990/lOgDQP42wU6qfgTg+YhTwgWGUOoPpymK6J9ddzvWs4Lqv2hmwe4Yb5Z+8JvvEcm6we9GUaw1PGf1tLGMHAsZ7FtdfC6SvkE776lffxtddvYlPDI/wopOFc2YmDCTjZA6qKQFj8hjVOH8nXOLvbl/H33z0OnaTdP699R4Phy0+f3eFR08vZFxmKw5JASTUoVgI1DjOdiLYUZ5RpzLsLOeqWRh2kec1gUHhaE9IXPuzOGnP87r09XT0up4hsdP+cwQmAkUWmwKAmZMab4fO/HOvkxh/5q/+h+9a2upLMaDewGGZgI9CROMOGjP/dgC/HQCGj77ObGSDBdRgGYH+hjE8ld73mwA7ykxJXlZN6gxiZ8C2WubJASsXcO5mXOkueGX3GGjGwg6XuhrPuhnXfcDM1Q5nGCTdZMogRUKMkBcpH0BUPaNZD4IAUBAjIOkmY4L8i0s9CGIH8UIcwHrPbKiYuHnDMzPg9uJh5IMyeQIbg+S5oGsru8AQozMRzuqs8QnsDDhCEBfItTjI4o4+oxgAiGACo9to34+ymbaIQIus5Q0rnBOsSfA2Yu3mci+eEmCBrZEFd9dNuO5XWDqPuGojw4ptZJDGysKiRI0nArQoY74XE3HQ3A7onxosM4FdhSJSZ7AEQuzlAxwNoIeXFZsPdpYDXLxDfV7PcC7BuYhODSjvIryNsE3HxEQYiRGDxTQqsgEgzbYcBIAclG5LcHuZH+WZfUYMqBiOJgBwgozYOb+WYGcxFDIqlRyVPjloJHPH6gblN/J8qRN0Vq6LgmiFVf4cY3ABZ27Cla6RtZnhKR5ssgvbYlBtg4zxdb/CvveYowEbeeZkDBCy0X14n2zr3I9MajhTYwRV9CC/5nbyO1uUAzUjDWypIm6sqJJLWClMMJgFk3FwJsI2awTh0GMWp4Flrdi6bnLLhxxAsKOVcdTP2z3JWjXSjwBwz+8xJYfz7gzvdNJn0VlwZEHxhmyQEcJCoGSKA8CdrKsLO2JQBKqjiKintNWOcRThbIQxqUEsSQ594mLEGn3WODBSp/3WMTjisO8hY2bHanwTG8wJiGuq5I4uwTVoJSBzw3ANG3hdqNYkkGEw5c0VgAHICnKb0Q0OJMblnuB26sDqgZssQLo+KckB7I6QDL+V9ZXvMXXmYC6Vz87yvNmA9XeA2zHM0oBhRg00o2tN5wIlfa/2DVtC7GXcSv+bjGLWg5xYXrczY3giF+lv1JiMfIBKy/57iFyxk/vJexQIiF7uzRxBNWwAMjo/TMLazrgwe5yp1XJGMwwlWCTYjBiC5OfGKTgzE1Z2wdoviEk6tbcBnYnwJsHq3pgSATAQ3nQd37wUWyTtmahKRDHsSz8TYHIfNlGLsnEeGDe5k7j+nvjgb+VjzpQ9N3a6LiIAMuV9gogdGlDH7bkG2lH7Ugyovw7gq4nok5A6TL8AomL7rk0O5rrY3RYYrhOGJxHdtQy6vdmDpgXce9D5IJ9zXiduM3ktwxqGo+o5dxT04Avos+HhFngfEYJFzF5Vks0zcbWakwOMAzjWsBSRbOzS79XLtjiymiPrpkt1gCMQI4E8kFpvh+gA0TJLRhkaQ4tJNkfH6KxuUhSRiORw0EVDRjZyNtVAYVshYKOHTfRUjKM8IezI8HeA3yVBSFAne/KE8Z58eP+KGFCOUjEqLLHcBxKS9svazehcwK5LYLX0UzSIrBt3G9ZjqhsPoP1RF1VuCfK+pGNuZobfiLeTumwkMJAIyaAYtIhUvNsW7cybXzFougTnxXjy2YCyummYunK8tTCGEZE3ETFYOZCgmRmNmUm8rJCvlzfaw00y/z9vniYbUHMUwytv1s37mOgAdUtODNCMPPa3DL+NiJ0pBlToNeRN1Ygjw4o2pbJuPEVFFFJBCRa2sGAs1uIsG85+QdcFWUt56MpziiF10FoUxMrgEuHgIGDI63k9uEkOLbaHm2p2PrKTUfrCcvk+eQ7d+HXdkE/gxZQ9J181O2OtQ2Zy2HWshzrFw+ewU32t05vuTZBr21AMt5hRCsPlwIidbOgmh9EBwHH5njlbhwmIMIIS6Eka2IoB0i6Q7PDY2h+0SH/FAcXJYA3NJVf33vI8C8OpkyGIvMFyTsXgjvcTBh+KA5dbYoNsZ+b+tzlUd9SYgZQM0qIRgMkKSrmtCKCdZT9laozdpMjFiIJaUMrjxAdoNaBLrtlrKQG0EJyi38PjhOGpoNDlOTqD2Et0I49TciiOcw6lswNAYsDn/Qj5IG4MgOIHJobf6bpeGHaKoDmBYo7nCjKSBovY23LdfB9tGJldNTryg+bxy8vBUYQBHzh+M1sk9jBHVoClhIVdmW/5/5YkbCe3J0avNQnmKIz9TKc/72VdMxWt03kAKnQLA0ICwzQO9vMoAZTQRNzyOSxABB3ZQKx7fDacSr9RHaPkjZzfx7HiI7oEHYfHn9N+yAYUMwci+ncA/LcQm+J3MvPf/6F+36md2qmd2qmd2qmd2j8p7UtBoMDMfxJSa+mDf8agWHVuz+ivI/pHe5hrdRNu7sDTBFqvYNS8Np2F8UY8jiOLMLHBUixpBwOxrjMyYpQUDFRrvaAxhg8tZCKBkLPnmw3U/P78M2fUICMyQOwMUmys3CBx1xSpOObJoyACFTHKqMTzTd2gkOrCFgtbpExo1gc5tpILp6nxmgt6p/F+AKAZ6LYJw+MZ7kbQP9rPQIzgoQe9LnFvUsJ6ghCK36tlAvZBn2aIvAljJst6G9WbOCbutQhV8bYUVrcTEAdFWQZB/SiaAtGaGbCzcDwy+bI4Zk2/kE2wNsHZijiRPgdQkci2z8ucMCxon2PhtgGFOJoc8DxP/LCzZL5R4oL8uX0ExYSw9tW7JoBBh+MJHefmGmZh4Q6OCaSsYIoGsWsIqfmz+sHYfiEEwckhIwCY4BHZlH54v5aRNjZUvM/WU2ZTQxxAhuvV2y+hTYYLOYyt7ysQ/SEKJ6E1wtI8h6WE3gb0XvmELiE48XNL0+XWhhjz3mLmGsKjIAgIUw3NG13jlIDAdW2GZA/7qRCEm3tu12GDsszJKiel05cCZnbYpb6s/5AMQjKH6z/vYfYQjcloXub0CPeLntnHMrk5E6WHMSHuhBs2X8lr+wsDbxKciZUTmDwiGXiKmJq9VtY/Dud+ArAYxETArGHJnYFVzlPmyFGEhPu4jnfhn4ZmTBrkv21lCuR1yhm5rP3h94zuZoHdzjVE3juYlUPy5iCEJ19VH4SJnkkoAD/7t3Jv1KDN+fzhhvuZGEQETgYtfSGvnYNxav4h/yk/nyLic3LljFj0ASIM0nMS7Q0nJEU4AeHaTclJ5EPfE5NBYFMTaPSanBNISkituc+MmrUhyDIeDNb1njfRBIZhAjch/LzX5c8AR/tF/tqMzB8jVsfnCDdnbYvaEYGbDxKjEM/LNRK/7zb+JRlQP+iWO7k52MtEs7rpOwtOTn7P4bAlwixiQNVMNUJMknGz6AmRjHS+bcITjoTPYoiRTNOrEkuog67ZN5wa2JEUbmwgWhMYdkpyUDXxazMJN6EsGg0VZYI5IATz1Mkh2xoMdESaK/H0WDOg8uZ53JhkIZaR1mdKDjDNAi6fzHvqArhdgt0uoK1g+DTOQAigmOD2a31eVxeQtsiSoRXYY1ISeQkxHN8iyUO2oZfnZWIBh0aTabKuMtRq9ECzUw2LhBUhDDiAaCUJgPVwqAfzMxlthApTH2XXSfaZHpDBIkUDjhVqLllEB3OIkWzdBMvGidZYaK5DEmJs7GGB9tsNtLy5Ph9I5+FMsEpA9ZsAdzOBmGH3snvFlUNYWyxrg1nDjBxNOfCysWR0vUgWq2b1UChrKL+/hLbROCOZS2G4mfvSD6Y4B3j3QT96NjPJ+hJjVMOYTYiq3dJyVk9eIxHCE+lMgM/cEB+Fj3TAZ6gnEukBRFGNp1A3c7NISBEAllW5LOzMMIvBTjP9NsuABMIU3WFyj16m7avs4BTSdzDYzAOehjW8vjhQQARhl7qS4TZFhxgNUpB5qANzcK3SJ5zXko5Hk7pYeHlOslFjT5WDN7Im0ShPC8B8z2AzdbhZVqWfexvgNFy6sMU+apgxGjkkmyxNJAItBpgAu1Wu4I6EPnDH1YBioRokyyUkTlFCpnZmuOkwBe6AAgAcHNT5sxSk7zNFwY4JZlxAcwCcOltqtFBkmEwPyePlnjWqct+WblcezcHhboEwGMRMFYCFiQ52YjgNH5pFkkjaMyNnh1Giw+zEdGhY5uuZQFgaA3th+4xTdBy+A2TdJzaF8xghZ0ubyZwgWaUhGZjsXFoGpSRGX84EJShxm8trOVSc0Dquzzpy2TC2jdPSrr+2PbN/5nmufXbQ2v0Euk1HRpvlnsnqB9coBnHz+z+uEN4PuVlGWMldjQ8IyTksZ2foboXv5O7OC4m8cEisxCzNkoQkCYACYYmHXp8hiQFbCKcDADobZWI0SNRzHeqDAdHL58URK9HXThqrd4REh5u5CVxi3NV6rgdB1Hh77GuMNnaopLu8kS36HcmWwys/nzOpTOg64kfPcuQFFiJ882xsJOsLDweYSyFlmJj0MATmqzo1cj+nZnEGtohc5SLmaBGTbO7UkOspyFgV0nxglRRoMy9QZSsO9kQ6SN/NPBU7M/xWD/pbfVauaBMSI3UGYWVKF9l9Kv1ajEglt1JjYBriMqeOu5WDATKPI8hzmgVNJqeiXmqsFfQrZ59lozB/ufJj5kuVCTi3ygXhZ+ZgmZva7ALdkOVFtwswd3vQEmBu9aDqPdzFAHO/x3Qvp4yK8ZtAB9yhjgIGWsqmmthgNg6DWQ44SymZQ4Oa9aA+Joer02GyAZsJpUeHTfmaQggmgMzB/BVCfkU52mu3i9mTSG30JhQOVFkrjLLB02JgZtKsLO3POWerHh5UOZkhr39iMcopANMic3+raWMxmcPNJT9rQ64t/ZI388ngdu7xZD4rRvxgFkQ2uA0DHk2SIne9HzDuO/DegrJUgmY+mZmg/HNBfLOD1vazkYzCKhdBYK/7k+5FYTAlMy8/r78zuN2s8cX+EmsvnTXYBZ2NJcHldpa9e5w90mRBU82uo0WI0SYQXElgEeTJzk0Che6BbCr/x87CI/LbVDKokIecCC2huDgrsa6vvO9lI80ERuodUu/AtmZpJUvKSUL5PiRdwzp/ESqPtCKjR3MRKBlg+4cG0z19zQMAwUyA3+YMZUHSxWDNBzsfIJx5PJ+VBNG9JR2eDZ4iPIUDoykjUMeGlKfQ/Fx5xC3qXr5DjTROrTxMHV8T6GieVwO6ZqpDkzbqEjGLZOv5LUqWNrGcQ+W8QuNIt9mOcpPPNXIECc/jW7+DwpEz3XzuuRl4ABDf5XVtH6oBJV4BIZzJTcUVY3yJsNsa+E32TpwssJHh9jIidmJQSAcQGyWIR/YckM2iIlAhGczBIYZGoyV7SE3mkM0E4HZRL9lCrqmXbIF5ZRH6JpuL9GBf+ICQbeY80bMHFAtRM4cEwkrIhEI0VO9kznHArhxeziQYZiFzFyQNJbOmHEoZJm2s9Wx0hBUhaMryckFYLq30Voassw5Rk6USVhIeW6ItIYuQLGAiQqqvRTZiQOnhJM8hMhG2yRazk/RT62lQ4x3k9NPYy4aVCa4AENaE5YywrAl+q/0yNZ/VzLCYQ2mW4HXztIRntFSMSYcHcn79KIRXDuHFFIkGuydFLJpnG4WQmw2bsumnbEC1KfxiFMQrh/0Let8Dwe4Z/c1harOkoKcDo8oEhamy82oN4B0QIjDLaUrzAhsTvDVwikohEGIyWJItoe+FLWbdaQyrAQXxaMfkS4hsiRYhCBmYdQNFJNAsG2qW/7BTNiwPJUGyEX3g6emBlB2K5cyULJnsrftdHePSyoZHZbwsGFYPg/xa0QQ7+nghCTdGn/QjoEmHukapGPyAZAbaSQ66JVZ0GACWZBCVPU6LESM71T0mr0M31YPZ7g12U4enc4W4DDECG2yWHm/vJJS+uVsh3XnYbTVOsjwFG5SMu3DOSBail9boO5FVtEMJ7JwIKchhv2TEJ4nDY8fq8LBhhNHh6W6FMMiz7a1HZyJ6zUK8ncSAmvYe5s6JIzFWVCWu5f/ZMIqdOEcS3td9f6zGdR5rOwFuEkkPM2enUsP9lo4MRP1/iwTpvpzRiLA2mC8HLCvTZHMeOib5NRPFsMlnUM68FkPr8ANtRljyBrEDpgfA+IpmBbskaOdksOzViNyq8bABfM4S3KUakUiHZ10+T3IfGK/GA9XzwWu0pYbwqPxsUdHmrgT3KtpsiBHZYA4ZZYxIbBCZREcRknWM2RQZIABFGsM0RlO7lmKX93UgDUJ5KA7sRHBbA2Kq62HWMYlctfCinqPcGMgAntGVYwaIwK5meCc2FYFa6lhSTIeIVm7HqFRMeK/2oRtQ7o4wvygLz9+bYG3CPHmMW7kVGg3saGD31ajqNiySBwsX0TcJtx0eBDuFuy0S7qIs6rulxzR6pL2r2iSxZk/ZPKF3khWY9SqAqp+U4WUAmK4MpgeE5ZwPUp9pAexsKhQ5KzTZft/coBBUNxiT5LDNyJuZo06OFZwG8LNX6kw9HNB4A5W3QcXwy5ugPBNjvE/Yv6yw7UUEHINcgvW6uGwCMxBmB97pwxlGpxt6NigWNkASmLccHnq4Yjnc4O2oh07WA9IwgaB1eUOQPkm+hjvnSzl87FIPm+SA6SEj+eZQmoyEwTwQs/6Rlw3IjsDwjtzfyqGEgLPha6wYoznEC0gWXjmQ1XAKyTwbyp1zeKHROtqzepYMO1bdmKI5kn8GJBPUGqSHDuMD7ddLhttL6Le/1vvZAW6fyiYg81d4ZsuZGJQAkOyA7uUObpvgNzIZ7HaRDYIap2A0mKKEXfJ68TEeyBbI7RnsUofbMOBukVN3jhZhceC5Qvg0y3gbdY7kRahoa5VtEP4S1wzFti+MGMUAMF9JBpgJQH+t3nNUBLHdVBUhQPRlXsqBYTElX42bYGSj7FM5bNhaRBKCVtK1mToNTXtgFpsFy4Vs+BSqRlH/pCKsOSOzFRrNGk0libP177LRFpu1ORH2o8fNvELQfSxp+Phu7nG9E8Nq2XRwNxZuR0Waw291TTxgLJdyQV5FUJckzJd171wC2eqVA9VLT6uEdK59OkQYy5gnC9pna0fue5pdXSMuYjQJdulgiHGzl7023Xn0NwZ+i6L/k3pgvsdI64TlYXM4BYKZTZW0uTPwd7JvFYQnMhaVRDGq5WWnJBlt+3CY2l720zy5hoLGjffls9tXDcJaHPfcMk8tZ4jnMaNI8HcG/RM9g+5yeDNVpEX5i6bJrkudhQlWzifdV6mPgt4QIxSrFsrDojIvTRQJFNsgozA5EzGV/QSsSG2qzl6nmaCRCVvl0o3cYUq+ShdADKiBFpyZqaz3bEhNwWHSsPTKB8zJYlocwqL3vMjcp9AgUAFFOqM6cyKjkf8PQAQ/XaohXUCMymy0N2dkflYKqcwDiulgHxVBzmedXyYCLIF6V8aocE2nrBsWQYucsccRzkNUkcUhfY/24RpQrOEOnajn6xFn3YLlzGB3JoM+64BNk8WsRtV4Z0Robaxex3LOGGwqysMAcBPWmIyHoYSni3B4dksn6e2qPQLUQTdT3YxsgxwU73+Sw2e+MAUl2L3KWO4H0BCrE6yb1cHEWlRhXMnM8n1UEK5KitTDrYnp0xILdJgXiFH3rE1TZYaI/DIOwmZF8fhAOoAR1oT4UGbq5f1dEYzMejZZPHIMDjdb2bjn2ZU01kJoJSthvuQwKv9hDA7L7GBGU0JaxcgYuXBJhMOUDjwMfVARlTzPSETbb9lwECMpXkYUQifLBmRXEf0gz2ZtwrI4jLc9WEVliA2SJbgxFfFJY/i5CFTu79aQMjYBnkWAVC+bOkIaUfldnA9Hlb8Ime9Qx7PG8BncOYTBYLnS/r0XEFcGFG3lrxjhYdjZFESLIoTXdA/lfvavQA6kvUV3I888XCf4bZIQmO5ZZiFMi8MYXeGueIqw7DQNWg1sNpiSwz522Ckks5894ighmmxA2VGNd0IR8AxrRYhXQFAHxU5Q0jDK+pK5Kvc2K/9kORfDRW4jX8OgYwBTgmm0X8STpPIcT8MZxuTxaD7Ddtaw2mIBBmwX4TuZ57PziORUmwrap6TcFWC+r4fhVYDtIzgB01bnUXTwW0k2yWhTYCOoUWwMlI4RHQOJkLPe7Sih+yxsKeMBzJPD7TgUoy8bULvZY7+T5zBbC78RQy7vWW7HWM5JJAvO5UG69QLnIpbZYRlz/ELmphBu9WYSZN9iiFYWgH61oPMyB+eVfHZR5HKZXbFR8n1mZHavIrP2zgq3acNl7Y4PCeky4OKFLdadGvYmYYlyOE+zXGe/7bC808Fv6h4f1iKqahZTnCi/A/rbiO46wO5Uub2E5pu90ahRvOJCiF/uRaBP0h+N5EmZUFk3zwk6Md96hJUaxzeknC0qpHs7J9BdBIUEM0v/U2LYyYsBqWcBO1PDzdmIN4TkGLEj0KquEXGya8Qj2byfM0xoEO1gxTFS8eC1mzGYpRhNALCJA3apQ+IGpaWEtZmxMz2urMTNFhYC+hgclqWiqlETF0pzDEYCkmgPSpcTkmoJtHzi1KsIrc9OEitabQ508/wG6G6r6LC/DYo4RiAbUCkBkVWXKfcB4xnuE3THcLbut9EKxy0xjAIUNAdgCe+OLjV7NH4kIVCM6l0AEls1JIJ02TCYfcASLObeYu6VDHthEDS2nhWQ03nVeskG1JQc7qiHIcadnpLWJPg+YJptIV/mGC1FVLHG4hU3/ARmTJcWdx8x2L+iHt5LE9ZrOagXXfwxGqSFwZ5KVgQiISZFhLL3NItBZWaq2iuTrCvfNWRCkh2PTdWIEeKfPciEIwLYHemh5DnVxOqTBYyKtBndLDsX0blwmDCjC23tF7AiArcYaggrvw9Cbg/NAsv8J3MQq2/+ZWNOw1htqR4mCbeFviJQqUNBKwpSoQ4bLMOoaCYZhrUJQ7/gfJBd25uEKVo8ZWA5U5L7WlWYqYHvISKZ0dTsE5OMwAd0yAMwhsWTVOM/9AZpNEh91ZVKPvM3jIRv53ziHBpOZaislGOIgxpaq4BEDmFl4DNHboCI6AWq/L8kJUzmqwS+r4dIJ1lS095ivFPH49qguzHwd4cbjWyMtqybffTojcD4bUg8NohF/hxyiDZlCF/OhNgD4VwHuRdPM051s7Sj9A+4IrIUATNlLyCHMRnhMgoyO+ex0xAB1fuhVA+TbOA9ns+xjx5PxzV2k+wdaRbShW20vlJSXqIzRdMn84biipFW8t1uCOiHBcYk7PK431oJLx8lNzlSPbGVjEfqI0CMFAxCp+HRYCXkkQiu0RLixWBc6lYckxhQ0+SRRpmsXisM2D0ONJFy+C6va6uZpdGmWqKIIeT7NmOXSb34OvdJEypaDbSUDOIsazsfrjEIDy5zS6Maap1SIFr0O3XA+v4eH7t3XYR4AWCMUppmt8jY3XQDbmcDJFf25OL7c91T7J6wrC3ODCHLaRE3Tmemh60Iy0VCWiXQSr7p7HIEs+zbOSzF2eg1XIwR3wUQMfYELHNec0bXa01kcns1zqcE5DldxHBtQVrYc6FVHBNwD8RmO4l0mHj4NnHWTKHjmEX2J3ZAp9mm505EM3epw6NFINTbMGAfDw0oQyI8e9/tCt8xI1FLsDUErXPB2winjkdyRsSDmQ4iXUQkNnq2zb2G6lTkVR5OQph2Z8RIhgib9teM/jahu5ZruN0CMwYZz9ScOzHJ3lmSBJ41mkt/pVQy+JMByBggJDGcAEmWivHdjaOCciVw+hFkQMFomCVP8mSwJEmTrWgslYMrK6BaF5H6gBisEHmhHqWLmJPFraq+TckisYE3sRgaF92EeHWHpz5iuxGoOe4cAAMKQMqhoCY7MC/WsDLYvmqw/XiEe0Ew/PO1HNLTUrPTitFEqMhITun2VOLASREqWgihIF9yALvRwA+60S6S4ccWZZIvbHEXeglJ6mbLDPEKXIOfAs1irS9l5dssZjcHWxYJUs5IERVhb2NBpSYvm4mjdIiGacvCa1WF+PBW8n0U4dAgfLaWjySKyof1o1JTAqcQX7U8DrmqjmutGFAHmz4TLDG8j5izxICKsKZc7ws615JBiKhSF/k7qConz0EyoMgwSJWmE7HQzajpDc3hpgi4ycBOtva9y9Zs3QDYGk1p13kUjWw0mkGU+48tEF0NGSUrYaa0jlhfyES6XI8wxFJ25r6igg87jBsPf23LppW6Oo45KSDCIMLof5+zIWViadLw1RFhPHZAuIywl3KKOKfCtcYVBx+Q8DY7hUMAzSRUZe28HxoAfQK5hGVUhGcwEtJg+Z58XaM8hmz07aPHNnaYokPI9bsCAUaG5jDTEgf8CTYAsrCmGiPOR/R+weCro3F73iF2DuxRRDMdJXS6Zq4uZJ/onGQC7haPay9o+DKvhPDdIBFyjwbz7Kp8RjIIwSAo5wTIjhcKER+Qe5XSPakYh9aKGCL7WBw8TmrsmOPnz4eljpBRcWJbqx0QMUbqytwDZH3Jd+pztDwyUgRC1/FyyXj9aoOPrK9LqDMjhu1e3866uu71+5p9Vfj6BLc3cDtFwuYIu0TlFSql4BzgBwv69Yy1ItOrbsHd2GOhuge0tfLaM8jaBOtSoQXEgQGmcn1A0FRKBna0Re7FBAnxtdwrTqjOX75GA3zVrD/N0G7EV2GAZQ0kZ+GmzBNOIv7puch1dJoReRPWeHMUyO1mGTAnd2hAgTG4BXvvC9p8bkfso5ckAD1fvUk49xPuur6E9eYZSLCHhiBTkVXIjRIErLBU3keBYPemZGACQHfDUtroJsDdyRiZcRHjKcRDI+nYAX1ejdBsXAHgLKQbjM7RVA2mlCQ0F58fnuP3usZRM+/7jlM7tVM7tVM7tVM7tVM7aB9uCE95EsarRpNybhbNlAOAKVjMs3iQRe+EUOBosjmbgBETYRe64sXsQifeg0kl9GXAuDeIV5i9qH0ipJlgj0JfOaSXPaD53GB8gWEeTOi1iOwSLZZFyLQZzSkV6ttmxOMjy0DO1mP15hMQl0qOTJ2B2xO6jXpUi4NVKzh7bXehx13ssQtdQaAySbSFuEsNoqWGStzEsPskZOe5htyy7IM7Qm8KaRpSXDcLZLYy/8/VpEoawsuSBZq6btuaSEEyKqjJqjIAkpKCi7y+erJsqojhcs7gs4hhvWBQPkW+N28Pn+HYdzjQjWkcm8SiJ+ZyIdHmMzk8OQepSM6RqhcZcwgE5bVWWyd2JDW6AGRhSwCF92UWnZ9Bii4DQLBOkifuapahpHYL6lSQs5zqTSgpxgBgibHyASv1SuN6xPaiw2a1BlsJlbDnImPwvJTltiWukgeArB8mPnxoI2Evcy4lk0q/BiNp9o2mDwUcZuMpAVeIsr48L4jh+4D5TENGK4M4Aa0LT4lkDTUohjMRV2aPebC4GQRtnn0naFUzxw9KohS0g3XOtQilvJcArHtZTLd90gxPLtwmQ6notDn1fM/9jN4F9DaIiCSAJ1uPdOcRm/I/SBBEerEF0UqJVHeslqDJe1MuIwMIyhPOADoLWA2yHtb9DG8SZt2nACAq2lxI0gAiLDindet1vY0462YMbilrfRksnnYr3O37Et7h1IicEpeyNclpCRlTs6CWq4CPnF3jxe4OT7Rq8Sb0mILD3dKV4rX7XS/yBy0o0KCEpYQXMhJ9FA1Twi8POezLWF2MWPcLOkXTYzKq6dacLUyFG5a3h0Wfy9iEoGikZAfrHpHnjJNaft3KwClayqwRliOQsQpj6j6YCdQNVzX3IbX1OlVLLa6AOOZbNqXAckaH99Hj7fkST+YzvDOdAwC2S4eo8zIjUESMffAIyUg9UwCXrsPj6QzT3oPzudQk1pRkCS0CbfZNpnVz/6VflHTHTZxbMvdQpBsATSgaJSkgZ54X9CnEijbl8F0e5+OWmg291LbJ3yd1O4VHlWPD8jO/D78J/D5/xw9DCC8NjH5QwbhugSXGHG3hAMyzkMjTbIGlTvLjxquAJVrsgy8FELdLh8iE3kYstnKHIpsDghwHgtFQSVuTLhse2RjJYaS0GOySRNw5koRaGNVoeu5CEaOPshGlf8rFH/PBlxaDmTp0twbzRT5wcwZBVTs2xFLw0c0Frt8DoFl1OPKhpJyjTNAFoLX6WMIgTU0wZxJ6F5qQDiGpnlMOgcZEsCX6pAcpqPCgMqF00Q3fNBmBtnAiaiZd1uA4EMELOZyDkn1Cg4aueiok7fmKMVxOuDrbHxh9RsOO7XNkA7EVbc31BcuheaQhBogRkoU189/b0E/LIck1/irnSzd5FTLNMLyxhAMuV/kuSSbI+jgUpMCqv6tEa4ryfeSoCW2I2CBNBtNOdtprrOGc9EE+xK1R3TCf6uEZRfBwjpUD1XNASPEAj06cM9pcMSSzUGrWpgHUsNMw6TypoO1sgdFIQoFutHZPhURe5DqWBLNEsKEqeDoLz4qIJXMOcijHVT6R8sZtYDotoKzjtLILrtwevQnYLGJAbfcdYrBwqjgPSJiLbFKhxNzPepA2fcAsWnNzbIRWsyPXRBXmzCVbPCYNHYZoy9rKsiPkUxXstXnOAGYSYzB6c3DtZ5oaFHkWpU54PueXe7x0IXGRlVvQmYBd6LBXo3QxXBJB8j0vxFgYUjdSuT+9D1j7WQjJVgn3yWKKDvvZS0aj3lsb7mp5l8kdbdedcG4Gs2Cli9vQWoQag8Oo9xhGJwKeDf/HJCqhsLa6gZmzQZmdEc3YavkqpoZYW+d8mR1SbLiqRVmbkNXq2TBSIumzTvfrIcnh3NjeFGQMoqdSzw5Jq1K4es+HlIbGcNb9o2ZQy+BKyFLfblC0k8zx9yUU2YGn8xpzcng6rYusxBhq+K5NlolG1te1jvGkn+PRljNtSQZj8NgvHqMmMvBWHLwsXQDIuin81nxOQOrbtdudiTiUOni3RlT/5WbN8/lKR0YVa6iPADHAADGeLAOxqUMYghhP3Bhm+drH7UcSiRwAuI9YK9n3qh+xJItJOSaAELJFsFAOCADirWXehT5jYGBeOcyDxayH+G4SNCr6gLVeb2KHu6nD3XbAcicTgXa2aJXkdNus9WTnVCaEmxhuZxBvvXBngBrLbglyFhChzoqQkWUYk2D1/wBK1hcBxeibjcUyWISVwXyRBzAvRsKsSt+GGD0FXHVj9YZdkoytpohrW0LjYAGXmL/8r/cLLvsRQ04pgYhjTuQwJ4s5x72DgzEJU3TlXpJanYFNMfBSykZlPVzzz1lFtrSjOUkxwe4B19W0ZrogRA8s66aw5nnA+WpCZ+MB56kzUQ4O3RASE8bosVs8bjMHygnROevPAEr+Z4JpUIzc1wZ10zEmwdgIIlMM4GgZCU6E6oohrQq8XnSNcrkZSgZmzmn4Oj+ipNeKsr2+j6noDLVCes8bT0ntNwhWCePRwPgIMoDJBpSVgyDtHLqiy0OYJ4/d0mHXkHoXa9GbUARoIwy2occu+LK+UjSy6fPReEaITEhGS1QnxoQq8HigudQ0tkbLaNRnQyLxekvGkhbMNlQMwegB28k1bvTAeDKvkZiwDbVab9dFRMsFmQHk0Jl9QgwJrOhmEfceEqw6eN5H4RNxU8Yo1X7MTlBQcdE5WOx3cu09hEPlfV2cgigdog521uy8yWLJjhaxoDyzqXIdBVWuHZ86IZBfDBMuvMATgw1wJmJs5B1CMPA+iuGY78VHKfAbqWQn5sLlc3KFmH8393iyXWN31xd04th5NCp54G8J/Q1Er0j/PN44vDOe46HfHvBwDBiJUQu8z0bT4eu4U44KLDgw0syc5VAUZZgWyaoKsWS+2j1hv+sR+6UYj8tixXgKphlwPOOgs6VCki9nKlcHqai+F4eJSjICdbV80nGiQRspaNP3i1K6ZuFlGQVAzvQ89jnxyM4MeDFc8jkyRkGVrqdVkZXIPF06MqCMYey9L6rlZ27G7TiAZlP6eQ4OgQ2mYMWwAmBGyTTH8X6Owz2eNAmqoXdKV1sGuUMR6TgYxMmC8jnHrCVa6l4pf6BDJKpBnQqSpIgRJ+HJHrwvpYPPgNNzjCPGQTkXmaB4r/bhGlAJQDDlHgcb0NmIKTjsjaYJ5/dyU2YhSwIEKvW2Ym+Qkngx2evb73rZyLoAzqmhJmE/dVi2HmZbRRDdvkoKADXcRKERuZutKPMudeOGYcAzTBfLwjQ2FeMov0ZUq1m3LRMnMywagwXNpiANADT8IwtkqxvZJgwwYOyjf6YuG8VDUm8lcuaJamAGRlwRaCWb5YP1Hi8MdweyCJEJs5PNMy9MUiQmJoNZQ1HHpT1Ko3ptAIfedgsmGp0LmfQXxIO0o4HRsWSCaju1E5owBYvUzHNrEkzHcCYWY9A+p+9F1A0Hei85s4ibSudtFfKKQAHOJfS+on9LsNj5DsvoEF1GXkhU81mN8+bAcVC0IVd3z8T5o8rr5VFzZk4WBW1qw5V6aqnZxBOE2HtEFI5RDLwqc8EIQRDZreo7JTWEkz2sK7ePXmpsqQGVw8/tfKMI8GSQ2k0/HKJxgITuQ9aQUWcpOUIcJKttulJj5Ey8fiKUhJEsN5KVwsvzkXjCt3pgfM7ex9v2AmPweKL6SfPk4LuA827GymmGnM7nESiOUeoJMIBfLYUIftbNZQ6NoeqiiTHLBcUoJHumEpqX0JCs8yJNtLdSA27XSnNIGCjeGURNx2JI/5lZ/gYIcud3osxdBQYlq2k/e2y8hiyToF6P92L0AEBaLFIvMg5O568xXJwC29AilmRxOw3YamjtbjsgbLyE11qaQg5fM+C0REt/DazfSfB3sdzjfNXhC3dXeGW1KcrlvQ1YOSHn7/WgWwxrlnaLEDcK9I3hkZ1dahD2tuwXAHQ3wP62Ay6BYVXlTcKxUUOQBzFc5HWMk708RirGg9f6fa0zaiZUKZYSxpRM4rhisKJX1ElnMeyhU3vkcJpMISFUGgRUkHZCEfV0uwSsDKgJ3ycmbMKAJ7sVdltNlpoNSuZlHjo1CvddLOfmWT/jbt/D7ioiK5VOtA5easagMWSPGzfXyA5f/j4QQC7vB/n7SBMZXPk+SwRjDMwcwCGH4ZpF3xpOwsEohlNZaPbo5p5HCs9G1AFMRk3qJ8Qw+5FkQJkI+GuLzQNZ2Juhx4WfcOZnTJrqKxsOIQYqBxs7PKsmbGUizMFiVB2SsPUCofrKORm6RYyV0MRtF6gWVPUEc2mRNlqTrKAWaZ1gzvRw9lE807YArRoZLexevL9karhOPesQquAejxZ+IyJyWc7e7ZPUqZscNpP01Rd2V0gg3E4D7sYaTpR4e1WuJUZRdi5OswPC4DA+BM6v5HC43+9giUstO6AiL4NdsFKjYOt84ZUdK3Rnjkx9aFQIGoeGU1GfjQxaVBSt6F6lugH5w/+LUJ0O+Y3Fxq1hfCqGqnUR81D1c/Jz7ILwK8w+ZzFVflg+6KFIMXPlO5FuKtakYkROwSIlgncRF71mYVoJbcVgEV32fIyk8FqqRg5QMgzBpm6gk3CkljPhdgGCKFASIyO/L6wIqRfE5RDWZ9mk1zKB+/MJXSclTHLIKCbCfuqwd+6A/4JECLGih1kkdWEDw3WMjw1k5mo8lXWzKGLQlMLIBka7yUaoGrEj0cWB6IPZWcZ6vtTnXSeYToQHM1/P7wC/5SJEm+eFiCrW9f9Y5+c0ecxblRNfCNDvzgd43m+YgaUJqRuTcL6e8GC1AwBc9XvRd0oGG5JDybgk+5FHDc0pP9DbWML1gBgoxnChDlBW5T/QgRO5CxH0bTgjWv7IZZXqrWjluF0qhoOdGN21x/XTsxIKcy6CINpMaePLd8XZIK2oFkU2slBbZCLLkuxmj92oe+rewextleTILaFkX5X1OTH8XUT/ZCqaO92Nx3bqcD2vcKaIpyPJ8EqgusZGj3Rn1Umul2EnZ+bzsp3qPiMZgdyEeroNw24s4tpgpXxJo8bDbGrEIxsXYlCm0ocAsEwdjMpw+FvVDGwcxFwKyASuqDqJNElYM2itBuNarr84LqBH1MiBXUh0CgGkoELLjcHT0vXKUBWLvCJLIRncjAO2mwHpTsddJTOO0SI2ckbe5YzslcO869DtCVkbIjv6B6Fkav4dj4WBZoPrOu8Y3FUjEpYFWZ5NU+MyO5EGSc+bzhs4Z4DRFNmBfGkkRq6ZK2h/VEcyz46jkFzTWvoERzGc5P8NwMHm8PcPwJN6XwOKiF4H8HsAvCJ3iN/OzL+ZiB4A+IMAPgHg0wB+HjM/fc/vSjIRs2f0dFjBn0tJgMtB8ElrErY2YTI+lx1DdAY8msNDqRPV7HHyReyNdlZVqRmzl0dzTgyeuUuF1CubPnAg45+NVEsghTeyloi/mrBSL6b3tc5Wbku0WIKUOymp5moIiqFbDSjOEHIm641CIPdbRrdRMucmwN/OsGNXjKW3iDHOHvvJY9aYNGYDGD5AJ8Cy6aATIUMAWCCH3vQg4aqrXvjTaV1QJUA2GEepcJxkPBhGK9xnHZfEhFmrd8cmjIFgxEhpBDxzSm/mY5k5FaX1Oi8S2DmEM1uENJNn2FG89SzC6W8N0uCQ+lRUflMk7CCHWBH4IxaEZdfD3eUDSEILdlI+DWTT9Jqi3pIsExNicIVfMI4eYXa4M1wIwdMiIofpzjdGmqKaexE5rIKsGhY2KMYDR0bsDMIZicAfdE47i+SMiMYCCCuR/jiY+049vMsFF/flsH/54q4gLJOiJbdzj3EWj/44nJAR0txfMq4GQcclNuTx0jSkUtSHoWhT5goOzWaZPfumpd5AKgwrquJFiDRr2sgXirOUyFT9tFEU3t2UmnIWkNJIs0dUZ2SkTsKykwXdZUkQYPESzs0HeH6uLGMBQEjvTh2e5rlbQdVyf1Y8614P2hw2O+9nLEp8z0ZVSKbICRTkjqsBKjpCUmC3Ld+Rjf2S8r1h+Nso1QoKKsPoNh776w77rDruE4xlxMnCKLHZBEJkBpNFzAilS1XBXY2JHKqNDbLxjCSJ/r8NzT+j+xZSQYIoAiGIBEvu1zM7Y2UX+IYxPgeLp3cePNt6GQsVYqwobS6SHDtCUt5RGjxsZGAJZV8RI1V4OGedjPtFN+Gsm7Gdu7K2C8psmhJZqOEv24ZQs7ZVdgYXSILMUuteJivRg7RKxXC6UImRqXPYORWM9h0Wb8HOlJBW8gS3lT7TQhqCRFMuTVWNvuQPi/Pulg6b3QC+7eA2edxrf7VGlGiHEYLOt5kJvLdw+7oOc79Yk4qsR/KkIcXWqOISZUgqmpkGBvcJZgjwXUU8YySE2SHaLPBqANUY4UzwUooF2ypAm506MaIatIlIjKhsvebp1FZ+EI0a+VzIA6gSBsfoUjo0wDijVO/RPggCFQD875j5bxHRBYC/SUR/GsAvBvBnmfk7iejXAPg1AH71e32RQNMonuV+6rDrOxiaymIaXBBRxsbyDXqopL7x0LokMfzFlLIDdi8bFAdCUCn3tJ7R+4B5PWPRTT8uTrwJgzKxyh5pcLiAsyegr8VEYJ1lJWsjWiyqoF60odqzQ38uxRiPjWSWyW619pcdI8wUYCIwKTH3Jq3ES9u5+rwBJdbeikOKN1DDQHlic5dKBt/j8exAhyW3jKq1YRsyCYMNuHBTeV+pwK4ncwnvtIT2d+G9yB/rddkZxLXDfGGwnOtrHqBd3gj1oM8Oia0xbkGQhOw7qpHGTBgXh7iv3J9cp86NjKlof6WC2LQGVM4yrE6LBweDafTlMAyTBW0d3NYU1WUzC2rgt1LWJRc8tmMEDFVtl9yMhhXV8LBnC6JnsHFIu7yp6vgRSvg6E+H9EHChfMILL5v03dLjdhaj+2a7wrjrhPOnCEKG1A2h6MB0JqjOVypcn6wR1WbjMGvyReu0WeXinEW4czkwshBhSqYIFkrdSiB1phSvTdkYa3SvzKRoieODGnJGQ+ulPE6U0gxmQeHmRCtCf5iqGj5FIC6E/exxYzXMFS2mUOv6AQDr/YVgiwE6Og9HUbiBsWoq5efOjpQzsSRk3F8LwuutCJvejn3ZCwxndI7KhmOi8lv2VRUdnHkvUkIEALpNhN0HmJCQOl2blqQf5ho6lGWYaoitbc30a/fXogOl894Ql7AeuZyAUPetXN8vG1Bt5hyrk8AaBUhe1pQYZ6qqrkapNxHnuqc8PNvhen2GtKkbGWv9vuT4wNFlktI/WSXcBAczH2ZuCXdKnNVsEBowVm6RUKXT5BfNUjvQpMo/Hxkdx6H2g60zo+mmloXJSNbgwjMCpUSMxTss3hfHPnkJo9sJpVqCrH957rDKF6xF0mfdj25owLjpJZpx0xh9EQdhwhKhGKg8SCAnZbcmQBMlyz7Y+wCfETQCeJY6mKVsU4I6QE3IciXh4q6rYteJCcFINKYg9s4gdYww1KzUMAMmGI08aEcbA1gDTuZZQeIcxjv4OYIXzdLOmXkxVlHMGMFRhDqz5hMRyXVaBCox+H20oN7XgGLmNwG8qT9viOh7AHwEwM8B8K36tt8N4M/hfQyooo6d0Z5E2C/+wMMb1fMPoYa+iBjw6VCk0bAUNd27Ws9uFN6QpNPKZ5fZYegkxXpxdSdjosOJFWVTLkUjobD5tcF+1SOoIWM6zfZq7jmpCrcYRw3eSjgsFZJDRhp2A44WYf7ZqkWeUOoQpWQK2pF5EdlTOSalMgEcAMqGvh56bC02l6vSp86mZzM09IaW4o2K3IOhVOry+UZUM7feL9g6Pgzb6bMKgVsX9WxlorcfJyCsHJYzKsJ1teJ5/b5cX8oNAa5JmbdWStzkQyFEI0KKR4e9GKo1DCSfFygyZ/V5G9GZqBtc7fvbkInUejOzkQzIBYcH/QKdQxV1owQwZCEfbrp1gwPkAOMuIvWCopTuydXO9aRjEog8LLZkWj2ic0QmbMYeOyUyx40HjVI/zRTbV1DQmI7geeCAD2cpAkkUtotYqktIHWvmTRP6WTH8xYwHVxKDXvkFzITt3OFuL/cSFltFQvN+l6NWjaFtghQvZUqHyQis/88HVZCsGhNRCMEcjUgntFwdkgvl0hSAzO0YjaazZ5ibKrFOmxCdJWNxyahMdoCatS0ljcSIzyjgyi3YBwl/l4yvPJfN4TZhZwltl9diRTb8TsNK2ygKzcyArxyygqJnB8A2grZo+tnKHmoyUV0FdJmpIFBLNAWFKfxOlxAzN6gJ29JyuHfmljpCHFzhLiUn2WzOJDh9Y66heYB+QzKWW7FP6SfZU8rUTITUc+GJAoLMsKph5wNQaqoBtLd4ujss1JyapIAlWsQkSGQr6pmUT1hQFV+jFi2aGzuhS7R8R1JHModu52hBzfwDZCmTAeCT8O8AxEWrF1A1oDjL4AhMLu8bKiVh1uhLDBZ0Z+E2hG4jn7Vj3YdajizbWsReB1tquY6HRYwB2Q8zhwwAgnHgxRzwRjN/zCja5LqIrgsHmdIhChIbJwvSvc1MYji1e2AWXpaKFY1hFJMYyLk2ncoSSBguv8aKLKEYVdyiTBqOYyWjc2N8scGziJQQS/Fe7Xmh5XdtRPQJAD8ewF8F8LIaV9nIeukH812ndmqndmqndmqndmr/pLYPTCInonMAfxjAv8vMt/Q8zYTnf+7bAXw7APiL+4cIBcQy3S0ei3r4c7CYJ+E18NzYdw3yBKBUG6cWrm8gS6u8lDBbTIsviAQg3kFOH808FTcmuH2C3YUi6jU4wnzuwcZhUY2mtHICQXo+yHh6bmOAUYmapNkJhCbkZsXDShbFowrJCVGUgJwBkbIHHlHg01zewsyVK+F2TQp8kw1HAdi/TNg+UL2tlYM1C9yRfhJDOBCZ6yPcJmC79Dj3WfNhKbyoHMI76xZcrwLi4JD2uZ8B8pKqbHJG1cIgtuo5Z+tfUv5jR9UDbbo2pwjHnkGDeETZuwmq99SS+hNXdKDUA3SSSZkaPaUUhbeWSfKAOHveSkZf4YgkgzmogOqc46LqrTogFU+pchnY1gK+2XtkqllyWWyQAqTcAGSuchT9JL9t+lBDOq0XuZwT9t7hTrOvdqPoHcW5ph3brYGZJOM0p0AHkv+E1EhQZB4IuKCM+fdgDXoVIvRdwH4Q5CJzoJJj4GrBvcsdrvqxfHa7dBgXh1nrpPHOwW4N3J2B3ev3BwAJh2V7nofIUg4NUSGg2izE2jqIJJ4wM9f6ggtJyBc4qCNpjMiL1JIZMveYqaBNgY2EN01TxLsp/ZER8u3SYQwOu6nDmUqMWJMwq05aESdstOYKmhNZi502jxGhArQ1fE1Byjsdo7ci75BKx3GqSEl9WCn3ZHwtgcRMpetyqOmsW3DcZmOLUGbLgWq7vFAUjGT8LucOtNYQ2blked3rRJ8LEARqs/QHUgvbpUOaLVysmdacie7p8JpgAnFFKEpYJ6Wi/2NnSTCwW1OzEVXbKYeXAQ0tpwYlLA8FpS/oV/v8t0qNyEiOnasAqJllfEQcVd64y3pcS9UiTIspXNgiNtvs27kP5AzQazeIn52FsoKt1mO1LJmCC5oszUqjOM7SzhIKgMhoGD0PM5oYksE+yPjkItDOJMy9wzyLGCmAUqYm1yQFUAR1l2iLdlhYBH3CaGA1a9PtanHsrAOXpYRMy6ObA2gWqYrMYyo6TrFBiVJGlRoWY4sg5X06h/AO0Kn6Yx50fpeCxW37QAYUEXmI8fT7mPmP6MtvEdGrzPwmEb0K4O3nfZaZfzuA3w4Aq5df59gzoIVgh36Bd7FwWAAJuYXJAmMlQWbYMW8EgPwMTfU1akBZFVgjC0TdC8IkhYlTMqJzAZkwkhbdyBjMoitipwjaK5fDGgzXDmFdSW6BGamHpNi20gYEgBqjSiF+MqgqyE3sIh8EkUm0MAZgWetrXT1IivaKZcA0nBjI4WBmTXFW5eruLhW+SHu42Dkh9h22RV9EBBe9rSGanHkV28wcFdbcLh2eTBIgPyaT55+NZaSOi3xCTmdPkaGUKdhOQgZSTLjC/KE3kt30nPBfhprjiuH6UJSF5cMG1iQ4G8shl1hF8AzXNHovsDjFRgdqMdijA6EK0oVewk/ocJB23ZVCtJlv02xSmVCdQ1EETdHXhajK4bFrwkvcGFFTDkU4UCD4jUH/RPtLNzU71xR+Exm7FwzmK4uwVoKyS5Vj12yWxzwYSgCiZILmNTfmwQHgtHOsprTPOv6AGqVdAi/18M3rMiYqPKElGdzuJSMIt/LdfmOk+vqd8MNqHwipP2XORy/XMF1E0szc2BmEXnWgSsINg6JR/lTuUwL5JPUhdb0mawCX0LmIi04cgDGIwzYyFakEqAbOGAkbPQgGF2A6LkKtgIQxM26f+287i2jl1NSzM8r7aWvI5fAEkxiN7ZwxsRpGRg0o2ZMq50u0bFD1cYiKKncWfLR9hDEJgV11oAiAyhjkw62VUcmH+n7xWPkFgwtFE8qYhOtgEWcqhj4bFKk6pGr8xo4wXQDLqobmwjnjop/wcn+LlcqMTMlhSRabecBdLia8WwGT0QNcDXrVf6JmTpM6jPJP1+CSRGn6oNgsSpZ1anh4zMcGVI2ltjUBjREpjVmfJSy1yG/e981CRTg4r2uXuVIMJJ2XyyIZnzHYci88WpGGmCv/x85Uifh53UaUrOEs2ZfPLTcS7J1+n2XYPR2Q3Qt3MKL2C0MdRglxApLMIecIF8doWZzy96iEdYPWaMzGE1BsDXCiUnycWfqQWRT1Ae2LhWBmUzJQS+HphQ+EoJ8rtqlje6D5dCRFkI2n8jMAxOd8WaqfKV/fcNg+iAJ5bh8kC48A/D8AfA8z/4bmT/8VgF8E4Dv1/3/8fa9m5BDzKlR3PkxFibws6KhWeUNUy+UL2JBUtgaQuoQsl9/G4k1QXlsRShPmPy+mGFrFE2xkDCixdLYKeQGACdkYQR30KOgXt5apWuAwlU9EBqUcQKu9koU0s+c1EiPsDZYzW++FATaC0hQ1dsOAS2DDiLozysFKqjyeF7XK489JymQAQGKYJYK4Aw3qbfYzrvoRazcfIFCBbfE+cluiKJPnAzITTVPz2hRE4feAI2B03I7BSspkgvq82ZPNG0eyjDhorD5v0KsEZ7gU983zxfRKeFTDyhALsumqwc1ZwI2rUcWjFa+IxdAGgLn32A8e08pi7VVUUlPcnSUpRAogGgb3jMUCZsgogRYS3qrekc5po/yJ6GtHZCQsdk2/JChiBHglD7tRMnyEQJ0ND4a5MjLfXOYniTAiE1dHCyiGRN6k88F9zH8qWlD5nsGYk8UudEUDKQSrzgwXdIgdw3cBhoCNagdtVLTWbFxNw9+I8dRtKuoLFi7L2FGTwSMcN+sipkETFc5EwFPK3lRU2kRxEvI+kRiwnkGUihPAJPvNeT8VoyCygXdR0tbz3qFlZzgQtmmVbw98QVj7ygEp6zsAs2b/zc4KCurqxptUIsDaVMYoeSB1AIhqtpPVAy9WjlHmf1Bq10iz3ygXhi0Q1gAPsVxDCOAM9gmhq/djuwTvK6k3kvCfrE14cC6ZnC+tN4IyRNfwkwbc+Yi5E+dIbkae36huTgGA+oy+ojzfchVx1e2xsktJWpD9Q34uWbzBaNp960RR2d9b5WuRgmiRFn7moCx9R6h6eflISKYaTm2qft4HIXxio5p/8myplHLK6BAl0oLOVNcx6xgfkWPEmJBsNwAgLUNEqVZuyGhzvq/64eyo5WeT56NQzyVWVXPJ1Kh9YCIO5lF+nWJTMWLMkiI1erEsFpP1Ot/VedA5nlIsBmgpidPesz5rYip/l+oddPg2g5Jxl43SpPqFyRkYn5MlLEgGBOQy8kWNERRLv+T97nkRMn4OmlRI4pwAMiLCaZ797Lu1D4JA/RQA/zqAv0dEf0df+7UQw+kPEdEvAfBZAP/aB7lgWiWslZSWIf+7ua9ZH/nem8O0EkhRJB+ISRWKuVkhzeBkdMiK0cKoRlVGCVqxyeQNUm+VSJ7Ka9GLl1c2PCOHBiyLtwvx/km9lkK+VEjTmVpCIqMkhriEEwwxbtcOYW2fIfCZiJLKnSxLyQyfkIbsPRrACJTL7YQhUhhZDbcpgMYFTBdwWjPr3rDHZSf973TV9FnaP7paS1AlASzV+nft/w+0kmYLF+p9JCsb0bGRKwu7TmZOEMM1teOXUSNqDhHCvBcZh7Jw1ShddUvNtiFG5yJMF8vGFru8YJs6XqMtascZEQyThMFCMNh1jYo0gN5FmLWgGFtiBO/Aky2ZXFBtscjiseZd1C5cUdR82OhBE1cA9xl6qBdrkSM2uUp7nedhTUhDKojC0EvIMUaDOat1wyEREFAVrfPn29pwQCX2ttIGz2YnAUUHJo/JZLDcdbhu6icuew/aOjWasgHF8Fug2yS4XU5xZ8SVSFeU88KlouA9qQEQeyGX81wdnqThPBO4OEZpEkIwWQbneagbeWciLlWte44W27HDfNvDXqvcyU68cPbArH21twMeEbDquqq3s1i4IAZpDufOXcSqW+BsKuho74I4JNFi1Dqa4cxjXrTckdpk3pB6/s3wcO3fMgdylhBxmQexN5gvGf5qwsWZPNtZt8DbiNtxwKMx18wkRVRqWRlrRM0/JFOyDrdLj/vDTkq5oO5PU3B4vPfgXO8tiIMoad5NSFzr07E6TgBAi8ilXC9rrGw1RHutHvBkv9Z+dbI/N3vFQRp+6yQvUFX/jKrkyah9hIwUAalj+JXWCRwmGNJSRjkppEFX897trej8hWiwaDZ3XKjWHm2TGyIOnMbYiQ5U6hNsPh+IJZHMJfCqhlDtpAZjRnFs3gubcdfoBnGVckgOh9I1QOk3ExqkKguOxsP35X2o0EhY66VOVZ+Mo2gWOpcK0jqNXurhTc+h1jCKEGlyBuSSnOm5VJIBIqngQO7zJF/ABgeCqcTmAIlySUENQyCVcqCs4RRNPftiAmVE6khRvEWn2paNJU7m+YYT0TNrsW0fJAvvL6J203H7F97v889+YfV+p+jgjXhF2TMaqVrLxTFoNuwDA0l5Jq1FS7p4ixDjKsAY8cozehUHIE6abl927houLOVizj3mC8JyLsJoABDPEniIsKuq6mtVibxVv7ZGfh60oChQ054zvA+IEbLpVkgdl0w1NihQahawY2fAkQFTDUF2MiljXxeX/CEfyLUDuXeYrwgPLtXbXG3Qm1gEFIHKEcn3C1SRxXzfuTmTCj8EkDDGBjgIG+YQXC58KeOmmxtR4TCYyBUez8Ng6kCUArx7QmKfH0n7ICF0UqsrIwxrP6O3Abu1x1aLyqY9PVOHTBBI5VgURW9Z5DN5xNBo+viIe6sR/arWcXxye4Z5qZtJPviY5JmzDlfyh2WIACAFHb+hVSyOSNFrKRh9Pq2/JbwP/awVbgn7VBwPQMIyMZpKHfLiNbPjorjdZjXmlusGJhg4PckdSajK29iEcw492fKFlkEmIWbByI1Dd23gN4CT6aZK2klFYtWAUoTXbw2sbspLyOiAU8IWinBn9qjza4KsNujwTjYANhXNYSeIb2CDJ5Mc1pu5L2haLRcjnjAFoLvVe2GPbSLMa1vn29aJXo4hLIpaLr2VQrz9jHMNE67djMASAi116CwLZ4647DvJA2nWcdbQgSAEONiLYFSAlKsBlRwhXiQ8PB+xUrQ07z/Oxsq3UzmJZXHl63K9xHW34KMX1wCAl4Y7TNHhZhlwM8vkvd6vcHM3SOp6O+4shk25T+gYAQeaZWYm3OwHfMq+UPaPzkQMWnopi5buJo+96+TZyh6gX52qEWDzOiXUAsOGwN6C2B85khISXve5bxhLsJrlnbNqjVyDqcyFmRyM1RI+U0WMchmTvDe7Ut+Ri5RJdvwwxKIdeD5MQoMYu5JVDZ8Q7yXE2RQdORHqlDlYs/+0H12dDEIp0b+1g8LKnWuK9Upt12bRqnGRXFYBV6NrYpHPUd5WWiywklq1eYcxNsF2EcmmEopEVNTCMYyvWXjmCFBgJuHH9RZRNRoXb0XWZEThIma6A5sqOky65xtLwJJJWipbkSxIxTU5hGcz6YBqWOUs5tJtpsoY5JA61egSEYOf83Vt+1CVyCkBZjRF22g3eJx3It6YiWrz4ETT5BnNfRTUCIBYuyyTuVjYOpatVd/1Uj9t43pMyoFKziJ2ELVfPZyjxpopGiE5Q7gXcSAsZ4xwpeGhs4D1esa6n+vBoqhSFs8DRGPn2MhwTfr/aDT0FV1VN25QrlQjaPJoQblerTHpZIOIAx/UEpNJSBWxSAkU5T15owWAm2UoNa8A2fQdJcyg8vp26bBfhExYiNYmgXnB0C/Fq9/2HZ50ETC+ETuVPSlNR97SUcv16QTpq/C4nQi28c7tngoCmI1hONbwWp3pzCREcB9wlzcixyr4SNU7zsUtG6+XFQLnxRySjm3CeTcV1M6AEc4s9j6WUE4YPdJkwDsr3mUmlraIUv6/GovJAibXXusDpkSIK1Pq6CVfD8y8EXIOATGqSCNlNXxUTaDJlrT+49BANvhzSyz9kl9zJsIpND7phtd1AbP34MkUfqKdxNBYUtOvigzLGOr37VnCBJNonAGAmSPMEuF3vhaRVmQnuVSTPpKGpptK7nZMsEsjdQA92DKpNa+RmRB2DrvFF625zdhjfjqge2zhtZCzHTX8b4GQtXACgfYWS+iRkxvsnYXdq4Gr147BYj95GNWCAkTaYLuIEG4rpJnHvaDrIaMGXIi0wh3kQxRS13XLj6EkPMyVX/DSWnLXMz/xzd0V3moSMjiKYZoPqhBFsDLOBm9uL+W1ZPGw3+LCTwgawpu8wy0NUqsua6rNKAVlWw2vElbjJhyZCDc3axAxBq0LODhJPnHN4FkrBjtx9cFKkgXjOdGDplKFF0K26AQ1OlK6V+SzZeWXsu+2en2EzHvici+GxNg6HASUaIiMp45dUxIs9hKO7tZL0QRb+xmRDc66GfOZvHa9XWGv9QULLYUhfRCUCwggKT2Al7q+7D4fdBWRJc6JUQ2vd2GYJR0a4gAQhe9VRWkZZhK+Xamrqv3d+1DQ16iisGmyDbrB5f2JarSk7ydcDE3YPBkpUDx77KyKiRqPaBwAU9ZxTNUwz6h57C0o6HrISBVp1CWlEq4rWk5Z+0kHmCmhLRxMhg4UzXMr35OHO0Zx4t7DiPrQa+G5HWHSor6PAcxnVjx73Xg6F7C3HZJLhVgKLSbMtpGGd0nUvBvPng3KBCzV4pMU+WQmQOHTOEnRWnaVl1L4Ux0VjlFYKfp0L6K7Jy7G5dmIe6s9ztx8kLEUGk0TQA6grOydER1Hov47JXeQ4QWmglrIc7B6CNUQPBavAxS98GJItUrkgFr0g24W6r0uV1yItEmRpc4EBH3em2mFBCr8FwC4m3rsJtGzKRuMSZg7C2sSHvTiQV50E7ohYHYMi2azg3plOXnNknAniA8RGathqowOTcKZ8U1WoZ0NwpmUCMg1/YxPQo619fC66EYMNmCKDu/kauoqVkeNKB/nPYlQ74VkMxKeW73BFA06E3Hl93UsIQKNy64WqTZLTWqoyuEAZ45LDk/k7KtYbSrnEkIfC+EdkEM8C762BHQkgKZaTDjOmu3VoGm0kNQXnGpJkKSGV0o12yyxaLEtNtY6h7by3NRHr13UeOvJUeHgsGYEdU8s+msN2ylh3E1JlcNrVXSaA2ghuG2EywVLZxHSjEHqvAGi7J4PhqqtxWXtt2Rpk6Aord6sARAMbrarQ/XpPiKsTHkfE+k1gO4a5V7mYIrHDwB+K1UDwprKIRMnK3XHgJJsAIim3TiJ+C0gNePsTtaw00zEnBzQ6pO1fKeCrjvhZyDUg8AsDFKEJ/PUrGYMbqdODGgANBuwYQTjyhoWLgsVDTUAGKPDP3j6MrZTh0WRiLBYhNFVTulzWuscVLSw3r+E+2qi0OACBrvgfrcv++CcLDbDGZI3hWOUDABN/MjjDhB4zE5mPWAzB4oaRet8DzkLbj97jJMXweOWm9OsGb2Elh4BaFtDvBltM5nUPWcjpRZ5ZnXShn7BCytJjX5ltUFkwjvjOd7eXQCQeeKu9pjXDlPWcto58N7AUOUElrqdxLWGZEfgSc+EUohc1ogJlcMrZ4qeI+3wHevRMQrnrlArNISXmMr88DaCziakVf1wPhc6Fw4M1dx8QwUpSSY6D4K3SJYPMpYLAmUbhNGiZOCaxmQhvU9uf8+1udqiw4ZkMh30AQOcQNXra/pD9yfjDgys57UPF4FiJUw2XrC3h2VCVm5B5yIem3MEloMpGYh5blGyTUiLeRKag8roE6kxBMjzX61GdC7i0U7DP4WUjoNOZWpeb5tlnK3E8Li32uN+vyuFawE1RnSiHItNmsbLCsliHz12oStFgveLR4oiW19SV/P9mLqAMvLUWuZsxFNhU4Uqw2CkGGaSzLv8WVZv5Xpc6b0Y3M29iLzpAuk1lDpFVwjB232PZXI1PRqCduy9xxIrZ2awCy7WI95ZrcBZ6FPj9hKz175yYhBIX+th3RlRFl5X0jdpau2xynE6i/BXU+H+EAlE/mC1O8gmLKHHRr2bLQqXDpCFSS03CCjGibj8te/JMAa34FL1APbR49HdGbbvrGFv9fCaUWqYZe8cqAcKxYqmkWbGuJEwKoKVVmK0RN9kMurtmeZQksrsBLetchOpF0+TAhUJDyloLCGuHEqLg7wnBItxqd64bIQJnZaa6K1wZBJTOZyXIEaa3RO6m0xOB9hZQesoX4MlvXtEsQ6LU5MNn9xChJ0bzzcbfzZVtLkYkM1hTTqPTGNAWS4bahnVZkntdU7vH63RPbLocn0z7VOKKFmBgCCiFPWQKiEpPaQSlSxSXgi0WExMuNH5dmd7TJPDsutAGp7MYyhVB6BjBOWeNMVxi0FyiELJl9T5WtS2gZLwMDjJIt3NviJ4Ec9UQMjp+xltAYDLbsRLqw3m5PB4FBju7c05bvdORQ/zWFIdwwaRkXmp4SBF05I32N453NKqjMmNXeEtf4F76z1eObsFILU5P382I9w1BySLEXuQhk8oDkbMTlEAcnZEu3WbRdZ3TjixVkLebcYYkORcIVSR0Yy4BAOUhIejgu2oP2c1eQCgTgwWS4xBHeesvP7icIeH6nA+ntb43O19Eb3N32e4hHlLlmN2NE3jhKrDTKEa4lI3kctcyh/NxtLBmUao1ABk8IAPQqVIgqwyE861FM7gAvaLx36u8hNSIcVgnvtSj/FsmPFgtcNFV2VN5uSKmG1OVOEoRc7bhDETUeUXsjFXxr4x/IwBjITnD6T+gSKY+Ux7v+y6ErFpOsva5783f+S9v/HUTu3UTu3UTu3UTu3UjtuHG8LLlnA2LKPBEgUpyOjBANEhWa8nqNMs4QkGjEslVTgxIY6MZAHqMtypmhyNNsy893jSraVYZA7nKBqS00EBaPkEsd4zXGyVd4GFMIdD+HFlF3h1SQwlJDaHaJPWz9rHvoTDxuixXTrsFo+desPTLJlc1JATs1BaS76GZdHSaOPeCZXH0/Rx/n9GMdhYzVo6dGfXfsaDIdRw3dxjM/VaB0m/hiCEwcWU6zATknG4DtU6f/n8TrzgIYI11RRN1lT2HJJTz5GrhyH3JqGuonkykpAgAyPkUgcDw54H9H0o4bWUJBMwMRU+1qUf4ShiDL6kdwsMLPwMtOEIRfWKF82NxlFeHVb4EY4S9srufmt3iWlxoCEWocGYdU5G0uKw+p0Jz8phBNFcsSNKbciwqF5Zqv2WE5eo8XLtLGil3Vcv0ky2agrl0jIzpP7frmq+zCReH6c6pzP5HAD2TRZp5lVlMbx571Wrp8kI0r6lmdBdy/v6xwR/J+G7UlBZi0hTTIcp+YDwGwpTlWH6qGGf+ux5noQc1lfoSdBNXf8do8gIZXHCAJi9wbT3WJ8Lijw83GPECmwqB8rtJDTn9ihFvZczwvhQqQPZyQ05+QCVCxNEIiU6U/rKNPyjjHLavRTHFgSKy/eVVP1jtKnJnsk1vTIXSvqF4baEzdM1dltJlnBetJ7m2dX7a2IcVetIuCPLYvHORtCmp7sVFi2j1cZ4yCWkgSsPiVXDjYE2KSN5QoyCYmT0NXYEe7HgtYc3uNSs63M/YWUXrOyCRfeeTejR9wGLIqmAgsCeJIswZ4cZRVAHwnyWXzOqR9aElkjmld0a7K5X5fmz3ld9oyDliDisC2pYNdnkNb+pGXOFVtHIDuQ5nRNJstQLANzzO3iKeBrW+Oz2PgDgze0lQjTohwWz1twKLDIaz0gDsHwxNeuhLRWV78XvNZTY9B80w62cIxl9alommhM3WnOzQZgEpV66mkyTNcLa2qHOJHgTS1QmscjgPJ3WheeWQJijfF+u38eTFWRzbvbFLDTb6lk1ZV04Z+Hlx0xUuEuZEwXHVSvNGvm51YjKOlDHiFQO4xU6TAK9j4n0oRpQTEDqGPZMeubF+xu8enaLl4Y7vKgFfHoKeGO4j+81L+Ed/dzsHFI0B2qnYbGSAOAYMXPqo8CdB8TyxUhq92wRNyrqt6uDlsmruWitWbjoJ1kC/C3DbSyme/LZaWUxR4eQ7IHuECBGVM5om5Mr4bqNFnctgnujL9kYvBjQXjbgWpRWdIDCUA/h1DPSKonWUp7/SnhuuVLRyyZrAiPzqs3CsPuI/nGPt2+kWu9+nbPZuBD9zrxkEe2Dx42Gd2LOsmCUTZWCvJaixzVkF7NGxAaNS4d17LiB3wEc8o2yYUQIK5JMsYagWeaMkkWTZzgvxtNeeUccRM/m3mqPnWpXzUl0eW7moQglEprrthvRgqJkD6AQ+JOjonvDHWOeHK7nWlOLiHE2zAiLLdlYJTsyx/BzaDkbowmNcQ6kxCoEq1lfowMiwc/UbCisRlElULuJpbbWkOUSUHlwyoUAAL8RDllbkDkOBhSgWVk5lZukLlzDJyxUgkhFzJVGC7c18HdUQoIlfNYxwpkaBVrjyu+acEJUA8CaKoYLB4SkvBeUMe6HgM4H3J6p05J1oBaWTJzcl4vyGvKBYRnooPImmfMGpHWEs4ztjY7fjUe3IREezCFVrg5LPmBYC7sKh1DHQ+vWSZi2htJpkTBELKKUFmkvhb9zIecyToxD7L898PReJBmmHobPa/J3gu0SViqv0da1LCFGA8AxrEvwSuS2KqUy+IBXzmXvHeyCx+MZnu5W2I1K9N174M7DjHUvynxNMhrCbfiXJsrcbtd8CgbX+wGP7yQLMksHXJ3t8TX3RX/5pX6Df+hfwJ0KJJeO0IyATBiP2cAPhJidiySeuTjOOZ1KHA03EkKuaOG4dG7VCdSbtADbxuI0MvfjnLl5KCHeMnYk+21yXLPIdD6KyLB89slyhiVZ3CxD6ZMX11u8gzNsb7pSZ5Vno8+CxvjNBlQjGL3oeTXXfrYLw+0TzHyUfKD3WXjCRIf7M9T4Y0CyWrV/R0LqLMa+w67LwtJRq1RU6kB2XgEUCY+LfsZlP8I3vLcpSKb0NHlEldeguQqJFrpDqMZhVeHnotFYnosAsnIOFQkEQ80iav7f1PzLz8sGh6G652o/vXf4DvhhQKCSB4we2Eu0eDyeYYy+HE73uj0MGJ84f1KzZuYeUXVmstbPHXqEToiQeTGkCJijive0GMyTQ9cHLFnoruz6zSRKmcjZCGkuDDcx3M5g3MrhvBkGnHczHvZbeGoIcuwwJYeNusi384C7ucd+8YVrMs9O+ESTLaroFKTUhh3blG+Gv+PKvwDAnZY4sFzQArNQ0TGq1bZZ0Y3aB8kT4C3iAKwH2XU+cnGDy26P3sSysG+mFabo0NuAF8+kMCwR4yYZRLIFuWGo0GmkUvT2bt9j1c8gAynvoQPeipUCchglyyBPB8gCSA1aLQMjitW63zXeU1gc5j0Bd2oALoRxtPhsIrz24AYA8Pr5U5y7GdfT6tBoUrJoyY4LsnjthCbOrsRoV9XseSYs1uPxfo0HvfTLZTfiC7eXCI+GgrwYRQiFi/QsB+r4MMyGUeFhzQYUTCmtAKBsklkTJ3+fGwVVymVuWK9p56r0bea8sYq4KqDGiBJEs0wD6+eZCZmSmRJJ4kagWvxzbxqEV+7FTkD/xIDJlHJC/k6Q2wPJhHwRVAQFzohR1Xj17KWKffYuc8ukYc46VZFgNRMsG8BstUq9Q7OwZVN1PqLTdPbdZICNg53qmvM7LgZf5hPO52pELcV5LRwokwu/QhEwdeY61XxyLmF2CYv1iEqedVDplCOHIvO4ioFSlg8dwFJCyCbx7LTZEZi2DlNG8FczBh+QUtUDgyZtZERRvluQA2bCY9ViuugmPBy2eDhsizr99bjC2zfnmJ6swLuMrlGVWmjQjJgNqCYTjKJwix6e7UoCCyAczCVZfH57T67j17AmgfpYCzwniLBhMAdcRSbJggzrarTI/DFl705eDA87Av5GHYUsqmvxLLd0IpjMv0tUOKO5pJLbyfiIc6UfbQzKdjyJUQjYcleMSyelbL6wl4zHzzy9j83tSpyTPMSJyr7atuyIFgd70rXeaDzlJARq1lnNREN1GrPOCuprbasaUkAaDeJsS5k1a1Ipa5XP4cylyz8DwDh7PNmtsO6Wkrkdk0FIRpTJm5JqBXl9D0ehNMYR0tegs4CiSvKvoLRMgG3gbO0XivFZApMxONCKYgbT8ZsO24eOQMUh4b6Kvn3i6gleW93gJb/Ba91TAMBACz63PMCndi/hXi8Muc7GUpenZJuowi97Kmx9RAm/SO01eYkWQtp4TNddrZmng9YO3EEmSUEJArqNhd8YTFnQMtWspEljgQsbbFQSIJPDd2o4TYvHrB5GmJx4GU1KsAkEuxOP3mk5Fr9jdJsE4qqPk3oSL8lx9SacVAs3s4h9AsCyFsVtO1dtEjdKiqrfoniWT/o1NkuP3oaSmv+x86cIbLBZejxVzZzc1+yoTCaOejImSGgPYhw6F2FsRNAU3NixhJMasn4OQbSHZuyoaJpkY0lUyMVIbCUd4myAvYVXrR4zE8JCCGcWd5MYr2+aK1hKeGd7VsJjZiZAsxgpk2ETlbIQrZFnVMulIAwOYGPxdLPG5/09AMBm6nF3swISFeRF1IEFPbINClqI8IQyV1OBpFEnn4YSWpG75NXI4kYDqdkg8hhDs3LY1oQCo8aT38ZKyGyMmjLlI4GjkVIwBZqHEI8jFT0mowKSYBkfAAgWmO9pYoduoN1GNJ/8NtVSJFKg8MAgkC+FZJjlnciJKKq3EXcrTXhYC8vXLKK9I12VkQhN626erdXHyXNsWaxkfwIw64D5RSCc2fJZtyV0G4Lb1rCj33Ex3IoTqweVaVSgZWAh4UdFSGI0CLMFjbYYWoAQ0w8zKuVAS47qXhR1jBuHmolU068xqIyse+pSrct3N2C0rYUmfUo+wTSE8QOkSv+/mXt8+tEDEarNtpdL8gafkDKAYlQ1nHBgJTNJXy1nJOgggP0rjI+++BT/1P0v4Fwna87u9CaWvfSd+QLv7M5AlkE60W2mayRTy50FA9o5UFOPMTvCJtZws8gc4BDNUc032qOgcyIEKohTUen3XNTUYz5Hstq9R0GmTVDNpp1WOYB8B9smhKvtNqzw6bsHeLyt++rF5R7T7ATlg6w3trWsCXL3plw3s1mHAdVgyoOo84Wa8chGR91i6GDspV8awyRr7s0GZgbCaLG5k3W4tZ1qldZIUOelzFXvAs5Usb93QQRao6tloCCoJ5WDFwdJW20iWEHwG2me0opyOCt/4DmWV4bPoU7V82raNe85eK0Nb/JzPnfUPnwOVCeZd4Bwgj599xCfxkNcdi8DAL5y/Qgf7x9hfT7j+/fy2mYZgF5qKL2zlxDUbuqk3o5ttpQEwKpwYM7wGhLs5Sxx5bflgLX7KntvGgveTEl0MxrhMREmQ0mpnmcnGV6oApT76HEXelyPK2xGucZ+8qKZMdtyACEoR2YmmFyXtynd0W90476LcLsItq5sRBI2gxw4ObXV6WJvsp1El4QlxTaHwYzwjuZLxsceXgMAfsKDz+HK7vE0rPHWJKm1m9CjMwGvrW7w2krQnDe6e/g0P8AWPUKjUstJeRDNxsZM8D5i0XIxWSSNHEBNGC42GX0AEAZN26eKuGUEJnmqUhMLQBsHd2eKwrXwSQhzGvAoF658YHC1GrHEWk8xpyG3/IycySLx9jrmydIBD4kX2XjG6wGfa1CRbrWAh1DCsXGywFI1knK6M0X5PB9sXNofuZYZcHAYtchoRq8ODlN9vYS+evm724t8ACBGTEbUTGOwpY7hulgOqEhGnMIcroXuG0eIXQ55trUXmWSTCisuRvz4kBB7i+7WoL/RrNRNLBpBbZZZvqdSCsOK/s60uFqENwjEb5rMnKpWzVVlvUGzSgkfyzBnCy7OxnIQ0BcHrG60HmYO4U8SEmELjA9Nua/c7wcbfBOyKs0AZhXwwoUglN5GXA8Dduse45CJWx5+K4duPhwqH7OyLQwxEAUNrM5h9bzLZQ0Qe8bZ1R6ffPAEAHCv28ES443tPfxAeEGusXewKrraCv0mBi6HEV97JWG01/prxFcN3pkv8PYk++zn767wztMLpFiPCvZc+IRpIRg1PBxIKmI0fC6KUuLne29eLnXv9rPHHBysSXj1UrLwXl3fSpWGxvBITIh7Bxpt1eJxojHWSn0UUc/ExTBNjmoZqLxGOlbjiFEytpqyRPUBIa8FQow1PE9J+ruE9tV4l/qkisL0Utalc1X/b0oOU7KShTfI/Hg8nuGNJ/cw3QxVMDZKOSERza0OT9aeOs4CbEWksxHvEhdj82CuFFCK8QwPSo2Rg+2pvW7zh2XSdZkFhDMPl1AkhvzFjAdXW1z1Y6nCQWThnew5RXuYslPVrC+lPiRLDXXDwITDc7mUXDuSLMjcp2PtvPKcgHxPSodoU9aPaltKzxfmbNqHjkCBuOixPNmv0buA+/0Ol04sioUtvm//CryJuK/4emLCJgwHJD9jxKOKZGosM1FdaNmgGA1S6EXYL29aHeB3OCgHIMaJWKVGS7nQEuG2FnZyZaJ2XcCFn2DAeDqLN/H27kK0WObKbUqpIQPGbBSI5yaFGyuC4LaM7o7hNyqFsI+wuwWuM2WDNwtEzdWiLH72DPRRytDoAg6D1GcKA6qaeeBCSs8e3z56eIp4tbvBV6/eAgDcxQGfn+7helljqyfanBycpv9miJaydkeLZkQhIjsX4VSjKVyJO8G7xrtO0sdsqUDhy7mQHCmibHaxb97bQOZ2rEVppf+knlo4Q9lNQjK42Q/YbYeKMOwF+m6NpeRk07VT3ZzYAOS0nlzDIXF7wNxZ8Lnc4MN7d3Am4e2nF+AnnV6jcnJyGABoPKqsf5VfIxkn7vXinoEZFUUBFK3Tjm7CUvMZYbpX+9DOMqfBwHKh/bAy6G8ZVlOdATFUUp+wHpaijD8ZBywWydQdOWV9lVDHRIwxqb+WN7eMetmR0D+Vt3Wbqo3Tpp9T0rWV11dKYGcQO1OML3Kplggq9crq/RfDSesDmo7QVAgRA7Fv6A0G4GjgbMJHX5QbfLs/x/7tNbrHFt0mo3WaNHLH6NQ4ny4J8z2qBzUgejTuWWHY5BjG1qLDK7dg6aykdjcp2vIdjcGYMmdRKAj5eYnkWQ8q7uTDIqeBq7DmdjPge+eXAEhJn4thwhRUekT7Xmrk1bJShsQBGoPDD2weApA94aV+gxe7DR52ssBeXz3FP1y9iE89fgG7W4GgeDZqWOe6d4q6DYrQ9FRQlPlewjfcf4xvvveP8IITY+nMzFjY4geml/Ddm48AAB5PZ2AAzsdSZmjoFgwPApZoS9LNPFssexFzLUZoR2Jfcr2u8Cpl7dqM9k81MtGG3Yo4Zj5GNDFB1r3uH7vy7jr2B5ywilCxF9HQCz3TXuw2iDD4/P4ePvVUjNrHT8/FuXapoB4M82wNN3VSj30vtkBCXf+Fu8imCGQeF8xtmyCc9VrPoDTl++p7nEuwdkHqqcytpDCpdbGRlhEdspgMzvuKOnob4Vyt28hOuI/JcUX7rfRlyytLjpCsgQ0Nv0vRJ6mDeBz7brrP1IoXB9pQahgV1fEj8c0P2j50HSgKpsB6Z37GmZ9w4aeyYL9u9QYuzYjvn17BZ6cHAIBzO+HcTni8nBV9kiVaqa7dxlSzrsRSyw7kwqf+1sLdVUQg2exZlpkii8pKIUNA1lMWYMwLLwSDMTpsY4e7RXbWMThMi8M8OSQVBMQioTrbVOqmpLyUserPuJ0YT90mwt8pKX0KMHOE3Uc4cViEHO703jOh0itpuxGHlIwFaCafvGinLO9vsXJL6dMnyxn+xpOPFYGzh8MWrwwbvNzf4jaIt3639JgWV9E06AaqfV3KLuhCcyYVrknoHFJvRF2W62Yuhkv1wsMZsFwmpF6EAQFoWPIQUrUTAaNkltXiovJ8/VOCydpajz3SKoECobuRz/dPWRGHmtERVplLVg96tmI8iMfXeMOLZNYt6j7tZo9x3yF9YYXVI3mtzZzMSI18WF4Tsr9uCjov48ClwDPZBDb2wCMuITk6XODEKHwkQDbu8QVFoXSzHx7Js8FIuApQ3kifSpgsNyIghMqRMUY2rchUHLU0WhlurgZe6oHxpQB0CfN9GdDhHYPhMWCfipEkz0FInWTEZHJo3qiSEnEBgBeD7U7htLxu9Fntngvy5bdSEia5upZkbRyieskyjEu42/fY7GS9xmCBIWG5T4iDIssXwLQ3JTwHVIQvdtWItyMVJ6WEkNQYCaHWlct0g94HLFdyiMyJ4G4lW9Jx3YuIdT9qUC794oOTU0LEqcyh2BGW+wkffflpyXCbooNVDTqXa/CRGCaWDg+Gds0CwM084PuuX8T1dlVCgkO3FDXqcvBNtdi78Pv08CMgDYzlvMKMdH/GhZswJo9HQfg/i91hMAu+cf0ZfOP6MwCAv7b9Svz349dIUouKka58TdI5H5Qkv2LcuBX2owXrXpuRmFJGBVquqwfiihEu9L6zo6LrO3cxtGiw6TR0aMU5T4kw3lWRXDDJd2g/0M7C7a2g5KnOZfZJM9Xkfbdhhbenc3z29j4m5cO+8sINAODp3RrjncxLDnpeaaRBHl7WL+JhAepkATiU0GExeBqupWgu4pmsV/mS+jpFBi1R/jX9wgQgAUGFYMPeVUdY6/xZn+D7Bd5FXOgYPViJTmJoSNpJleeXaDF1tZRL7AXirdnwBNKi26EAAARKym/L5b9SQiaQH5icR6FIKc2QDo2mEA7RqNxl5vC8kYcOeK/24YbwGLB3BrdflJDR7Rcv5MA0AM7lRl966QY/+aXP4OvPPl8QqM+MD4rSd858O+tnFQM04LaOnTrrufYXryP82Yx4ZZG+KBO1f2xqSYDidYjSaVoYFhWBMrOEQcKZvPbqvTvc6/fYhQ5v3sqGcHu7Qto5ITs29bvkB5TFWlI2Qw0duFFIv24XYfdLuS6FBDMFdLdcvih5IDR8p9QTcC4eT47fl9ATAYsKAi5KtgwvLvjE+WMAwEf6p3ipu8XL3S3eGO8DAD51+wK+7/GLcCbhvJcbTExYgkVcjBhOgFQSz2GlXJlcUSpnE1gLCC3rGQsxUm+LIWoHKuJ4UcsULOcM7hNopuotLlQOxANuSBRl6nyQuqmBlxV5SeskcHK0Ncty1FpsY6oQuBXvPxf7BVBDEEkNKZ1XlhjdLYG/IHNo93aPnNEz3+c65gmwmkWXuXQmAhzRltvSvoOEfnyFGVI25POoNwkCB+niPbBcAGGl1wZgd4TuTsLB+X6mS4PY1wyqsALcEHDWzQfCo21pi3IvmrgRdJtIkwFGghslO7XcEzuEs2qExoExPhRDY3gi3zs8zSrkXKBzCglsLWJb02sySPseZiIMGz3E3xEnw86A28ln3T7CTBF2MoL8AeApH6J1jUQCzs9HvHRxh8+8I/Mcn1thfU2qJK39HORAiQNhvtDPrmReSM1CRW4WUSbPJGVANniOAO8t9npAPljt8HDYYhc6fF4PkoUGzfw9zP4r6eYljEHSH1RDeKybu2Qxaqh6RaDLCV977238xItPAwBedJJR99e2X4E/H78KAPDk5gzMhGmpFRByKO/haodvvv+PAABfNXwRiQ2+sNzHm/MVAOAfbR/ijc09qbGY13qfBIHWkG8meJtR6Alui0I9WMYBf6n/JP4yfbIaI1ZqS65WM16/dw1AkofGIKrcmce0RIuUSOQ9smFfPMW658GLxAIxanZYDxEZpkq/4EkMX/YJOCKRYzJIuVhy5kUx4DZyEX8nyFNYU93LsixFW7A3AYhilPY6yCu74HWz4LXVDWaFjH5g8xCffvQA03UTwmsEJcsekO/Pci2MPkh5pqRoa2lZfb0eGTALgxsTo2TctWBT2ZMOVczZM/gsolf5j7A4xI2H2ZmSWUoLwJEwGcZWk1w/f3/Bg5dv8fqlJCrlNrgFgQ32Kri57D1gBUnMRPQsDRE1+gAAlIRo38oOJTiYRfPvy8b4HCSqoLYJnJGqmFBJdfq8zFoKptl8E0s5l/do700xP7VTO7VTO7VTO7VTO7Vn2ocbwouS7ZJLJfCQgMuI4XzCq/ckPv7yaoPrZYW/fvvJog31Sn+Lx/M53p7OcTtJHH6cfbFaD5XiFNLNT7YYLE8HUCCwkv+WS4Z5IvBfDh0kJ6ESKvVgABhCWFkhMeulEkt6qlRZzzBmBPfCLqKcdaAp8kYLb8qHNYNiQolTCz+JYeZYiKIABJrkBmHZoHBxghaaDSuDdB+ASyUrar6k4tFm79qOIhBoNg5/69HrAIC/Qx/FO5szEAH3tOjlZT/i1YsNnmrqMiCkwaTkaGoy2jKCF0uKCzB4QTZmlb8PySAlCQOVzOReuROmlm0pAGLHWB5Ui9/sDLobU1CkXHondQDv62vJkXhl6tX7pwZsxEsqIZ+9ZIW5bYVkUycFl+10iEold4j8yHgA7q56iLGXDCP30q6QsZfZIU4W4U4w9pJ9NXHlsxQOjz5Pcw0iFsFUz2WMc+FZE2rigQnCQ4orLvPS7gX1CmfApECLWQj9tWpO6ZSOK0bXL+htKN48M2mormboiJBmEgRcxevYMcKgpYO6jBTW6/uN9v9dFqStfDN2uW8NTNHqIaSVK1mDAGAmFRINdTwzImgWqRgPAHZUUc7YpnKLh5qaQtrhjOBsxMfOnuKrL0VZ7m9ffQRf/Px9uCe+FhPeS6KB21bR0ekeYXwxIV2GMkfDZBAGKuMiL8p42juLJ28JKv3k0YXAX7OB0XXjVUsp898AHGT0FeHAwCgk3gaxrBNFx1L3pU/dvoDvv3kRgOxPaz+DmXCjkMCy81gMgwxj9pUDZWzC2+4cf8vInnATV/i61Rv4+uFz+MaVhNa+eHYPf+fsY/jrjz6OL14LNLdEAkd5CIo11Co0iBqaBGoyyIuXd1g9kMny4uoOrw4SwvrcXibrE836JYNCJH/x4g5fdfkIl25fMp6/sL/EZ27u48nOVyK40cQhDf8AgoLGrlI4AFkP3Z1kCLaE7KyjlxHxgl6ZBpluEMJWTqQgyaULquBoLvv00e4JIgy+f/8yvvvxa9Kvj68QZyPh+xwCnQxAIjPSImRFJkPXYWhqbbaRh3KPJZzHKlJ7GMITqR5UBDUq+TrVcRPJBkZ/PuHh5TbfCnYXvlA6AKG0cJTFYHVu9S4iRItt6PDiINSclZ2xjx1G77HtlWrRd4iTlXErVAQtk9ShLBITGSYSTCSQPrCFdJtpsukoSYFscPPMbbHgjK4lEpLk80q7HPPGvlxZeERkAfwNAJ9n5n+FiB4A+IMAPgHg0wB+HjM/fc/vSGIAaGY+uqsJfb9g8KFwc77i7BG+Yf1ZvLXcw/fuXgEA9GbBa/01Lt2+bPqJSbREZncAd7KBHEIa73aXM+JiYL/Yl82SnYQyxAioMWT5ngrKsSOEM4NwDuBCdrfzbsbd0uPN20tsbmWD4p0DTcJ3ysZSu7iiGm52auop5XtmPDPB2RgQx4NQTlZIt01NqvleraOWm50UtqW60O2s9+K49PPKLbgdezx95wK7NzTjxjPs5YKL8z0uVJjvJhHSnYfb1HBYLmrMphqMy2ARojkowNu5AKyA4GMVbQymLTEn3+cYq/WEVbcUzaz9rkcwHmEhuDbKrdctIRqWxddfc+H+LOeE5UKeOd+z2ye4fYAdQ81sYl/g7JZEDhweXq2eUdAs5OmFBPviiGE1Y5+5NXuLVrvoQGNsyYZQjtFDuVKm2FDOR8TBIHlXP9twvfIBayeJ+4QVlU1/vkxIFxEwDHMjfbh6i2BVrT8b2LEHeiv6P3vlvmWBuxRNNaCUbByDLYTR3A+xq3OaLRCvAswqYFKttO4di+Exob9uwnqeQGt7FDpghLVD7FAV6PcovKBqUGhm6ZRglxpel/upKsuiCcXoNrWW4DwTQrT4/O4KL2gG1Ccvn2BwAe88OC+CrGnrYPb2QNA2y0n48xnDIOtms5xjOfcHGZBZ88ssVEJIIklBgGeklSo0Dxb2zkjYp8nQFN22RsSwUWYvWYcaiiA+PBBX6xmvnz8tfMzP3NzHzX5A72t2aE57YmKp7wYRpDQu4SmvcaeZw9/3+EX88fmfQgi2pKnn4uljcAi6NtOdh91JuE7C6pW2kBwrB0336Y4xuIQHww4vr8RJfn14iq/q38I39p/HoA/6p3dfg//67W/AtLjCd7roJrw1XuAdnJdaqed+wr3ViKf24tCoBHBQL64D0iqBhwh/Jv3f6xjGWIUgjWFYF3HWz6XmW28DHCWM0eHtO9kb75SXZ20SLSMA83UPSg5Ao8zvhNZgwEUncGSPt+dLfGrzYrnlT77yCCu3YDP3Rdpgv+sRFyOVH7LR56NU3yAugptL6ACIEVgM8WxAEQ4pANkQzxwjzgYUF+ctF+amTMqGOkZGkqYyZ3laHCITOhdxobVhL/oJF16Kt9/TcN093YgXtlg3GR6GGFNnsVfB4+2qw1arjKQD1eFmw4XIEsk8q8W/ZewkrN2G0cgYIMRaVzYbtM9kHbIYUIUzFVGUyNvw3jEn6qj9YBCoXwHgewBc6u+/BsCfZebvJKJfo7//6vf6AhNFILJ/W2fbo3PECdgC+NQgHtTff+Wj+Ktf9Qn8zJf/Pl7rrwEA3735CL7/7iUYcOn8RYsScqNTQ0uumE2AEnM/+uJT3O93+NzD+3j8afF2uscWZCQDKrdcBTq5qivldqlMyEwwfDhsJQOvX2HvZfNdnNHkmEYyXyeuWahkgrWDXzbINmbdpiwTgZ2pHgbrIbIwspEn2kYGuQglIMgAG+HI5Oyh5GVR2wdT4ZB1NuCjVzf42NU1Nrr5vvH4HuYnA26edCUlFY6LOm7ZKFXUkQg1XXQRpGlwC84VmQudwRg8NlOH0WjFcWeKgGNWT74YJhCAx5szjI/FKLVbA9foeckYAZlXmwnafptACZgvDPYvyb3sX05gn+Bu7VE2m4xt9uTY5rT3vGhxZNjmMWJJLuiA5UIPhnVE2jncPerhM1dnEWSNEokWksoJ2LkpIpxJkJoMYJa6Xp2LWLxF6hPioOnTS7M5NtpQywUwvRiBq6wtAdCNR/fUFiTIRGC6X71VAEiD8Ab2weNOM5t2ux5hskBoYJU20ygjj7q+7Eil/5MDRmeRfAJp9uX8EiOcW4wPCb2KjHbXsiDk+VWJmBnhzCJ1TdkbRW3tjML/Wz2J6G6CcKgWJdw3yEDJaNMxs7MUtZUbJFwME+71e3zPY8lUe/L2JWhr1RHQtZ73zI4RLvWXLoF8Qt+HQpDdXfQI51JcNxsJBgRWwm5GDobLScZzsZi2coN2a9A9NfA7IcQDKHo+dq5K80VSg2tZKVoSKCQ5ANWINEG4TJ9cP8Yne0HXti/0uAlrfP/uJfy18WMAgEAehZSf15MRzbzufCo8JGcS3ri5wvWTc0TVi3v0ZMAjdyUdpIKqxXgKiow2ZThyun+e78sZYefP8d1vnsMoFwmGEc8SLl/Z4Gd/4v8LAHi1k3vY7XqMk4p4bldYFgtOphj0XReKllCR/9AplTmIgBhy3CXAAMtG9+mbXhyMVYD1dTMeR0kIeay/kxExVwDYK8EbG3G22FdUyt1YEftVZxlQJHIibJcOiy5YTxEf6x/jK19+G1Yn+hfm+/hbN6/jencPkz4vMwnPLE9GAMaKOKsxCblgwjJ4pInLeiwt2x3HZ/4xqpK3u3aNG9kPqzyIvOdimHB/kMX+5uYCtzdnSDtXErfeAgoJ36uj8NrDG3z9/Tfx+vCkEOmnJFnfnlI5g1bdgqn3WBYDzpnRHiAmJK6SFLFDmWvUpL+yqSrkKI8TIFywyodiFcikrERuDY6lDLiVNMj9FYII175H+0AGFBF9FMDPAvAdAP59ffnnAPhW/fl3A/hzeB8DKiNQecHFFUsoYgDihcwEcxbwxpN7+MPLP40fc0/0SV4ZbvHZ7X28sbmHG9VyWSYnxtNkK3E7e0PNfNnOnZARGXAvyESYaQX/1MDGKtJIAQL9r6r3uqwtwkpCfnlBXU8r7JYOTzfr6uEp6iU6T+pJj1TT41uvO1YZfuDQ2yzNACLw1hhk6qlKOYF8MAM0kwApOcSwhqYSN9CllitZHvf4HgiqF0YH2jhwl+Cu5GYuL3aw53tsxw47LXtBO/HKM4EbkE2i1IHSy9itwX7XY7fqCso1J4spWhgCes2oCdaAfYS3sWTb3I09JlVpz5tHXEFkH5rxNAGl1Eo2Ds0iYYwwoIReVl80YGs0tT+Tf7kewNkb01IYJkh2E6CbSKSDzYRYaqxRRK2NdeeLSvhh9pQYfbkWXL72MfIoKf36TLFuCsYkJNN48IPMBwpNOnYghJUazZoh426sEF0tML3A5X7MLKGpYkj2kqG1nT22e+nEsPWgsSZAtPeJdISqGjEyoPfiRqB7aoCbvsglmJA98fp17DTTrKkCz0RlrVVRShRUsIZPJMFD1K+rkyGZfZqtCXHQ8v/zOjQRWPkF33zvB/CR4RoA8CfHr8Py9Az+1tTDX+c0iEp25vhqxNmDHe6v95hVD+igTllzv5JhS7BPFSW4doiTVBlYZ+RwzkZGFUC1OjfcPpXwpGmkVQoC1dQDy3PVLozN9Rp/4jNfD6t7Qqe1yvaLx34jHqLZWw3rM7iI0kpIb/ABL2iY5aPDNX7qw+9H+qTBjWbhfmr7Ij715AVcX5+VbSwNooWURWMrVUBI1f5OUMD82nJpJGsxh+KdhAo2b1ziD+5+IgDgq199G7ulQ9i5kuF1+XDEV77yJl4ZbktXvz1e4DOb+4JQa5QhWRKhXwuYjECtJLvVuCT6bABo4+BvLczSKPNCw0GeawhvnTD3SeRa9vJZtzWF8J+Nd7vTMDOhZsgp8j+FStj/iH+K19xTLGzxl3dfDQD402/9GHz+8RVSQwUxJiGyqQK2EIR9sQnOVaXvVhOuUAIWmcPPoHIADmoEPufPMACMqSFR6P4eCVf9iG+4+jwA4BNnK3zq7IXD6Mud6nQFRk6u/QJdipDmfYfXdM1FNtjHDlNyRUpncAG+CwidretK7RrJzKs3TYkOpBeYDNyUwFTV55EYhi0IsSSWiTwBS5jXZANes/5Si0DpzymVAWbjv2w6UL8JwK8CcNG89jIzvwkAzPwmEb30vA8S0bcD+HYAGLorKQ4rqCjm12YM51J64L7ycD56fo0X+jt8YX9V9EmuuhHnfsIr55uSPXK7GySNfDaHvKOIA695P3vczCvMT4YicAgri4WadFbSpCqEukIy9yL2jNfvyyL+iovH+P7bFxGCBW+l+6TExaGRwYaxnAOSCaGdvdPsrDpGNbPgeGaXbAg9vEhh1pDq5jsJz4ZTzV7zm4putOEo4dtISQsAsC5img38Uwv7RYGQN8MKy70Ic7GgO5PdbrEOmLqyOQJyaBrV+GkRnv2Vx915VzyM+/0OZmCM0Rfu2hxtKSmx6IZgTcLDyy06G7GdxVu83Q6Y7noguZoppTouZq4bCFs5mNaPEtJTeW1ZG8xXasjm+nH7ADMuMGNAGhQBibLAzJxgNEMjJQOOLOJtZSxkD+o2DL9F+d7YE7avCU8GANgnmFF4W/7u0LBqM0jytWFVvDHrhBFLQdUuIQsZuVnViWP19MUJkVpVedMPFxHptQWui1h2MijurU4ERxmI5zqnu4TIhHHsMev7aG9gR3PI+1IDRmrQldsWp+eMEc7zJsPgdQQiwT/JMgaE/qkUBG0VwWNHiD3BZLVofS07VuW6pZP0mp5gOiNGREZnUhQDzNZ53koBFOM3iGr8Lvb4tnt/FwDw1V/3Fv7MKz8W3/f4RWzvZF6mvYPZWtgd1YoF+v8XVnci5gvgHVw8J2tO7jXzAuULNYvUM1iR7qQZam6nmZ+ooV0Rgcz3/txjTi/GRRKEIoNng/8/e3/2a9uSpfdhv4iY3Wr32s3pb595K7uqYt+ILFIUKAmwZFMvtp8MyA2gN70ZsOS/gICf9EoIsA3YDxb8QsOmCMoUq0gWVUVWsYqVWZV58+a9eZvT7nb1a7YRfhgRMec651ZlySykaONO4OCcs/Zea80ZM2bEGN/4xvdttiO6Oy8SvNG41NFNu8hZNAcZI5vTW5gYQVo2+5zfad4C4EfZA85Ge+6PNlGX736+xZ4qPuoMW69756ysucGKY8jj6zLhjnWBp1lAfd4yub/jYrqL43maHZgkVexUa63m4+19pud7Jr4D+Dtnr7jItkxNxYlffN7Jb0l0x/Vq2n/vAHmJ9ySxpFnLbFJy9kAml1GWVVWIRlFYe4xlnDVcjHc8KAS6XaR7Em3ZtEU0/73cTSmbhK7Tkf9TbjPM2nj+YZ9khGdy73kGX9bnfFrd5/e3j/lseybjZDUf3L8hM12Uvtg2GfsqY19mEaXX2lLkQUpCrm2XpTijscZhBkmPUyFRkddsIpywN5L4GED4OegkcWTQ+ab9XtZYE5G0RbrnL51/BucSEIGIhGrlKHTDNGSwwKodU9k+vGicobIiQh2OPGnJ05YqTek8quqc99Zz/XOgOkH/pewaLkSB0qJuHl4KibZzBEt41w0AuYi4eQ4juu/cC6jUEafKfnXAOTh+ZgCllPofA5fOud9WSv2Nn/X7rx/Oub8D/B2A+eSxMzVkS79h1Dmqy2k7eJXJRP3i3gMefOOaP3fxNDo5f3x3j7KRGxXqsU2d4FqNanSvXt30ZYawwz6cb7hXbPlh8YD1Txdy0VuNzRzdyB21Dnepz57Lvo7jPBoUMtB1I4KeWdZShbb53GAbhWoGMgYecgytruH8/McOuA2+hKTgZwl5iV2Bw0WCpsOU3s5hUBJUnYPUi0sCzcTRjS3JvZJzr5Q8Shva0zVNZ7jzZNPyckJ2a1BXgrzJL9q+uhVTUKKid/je5ACqEo2v4GFYmBaLom36hyYzHXUnbclnPmg+zfe0VvPZ8oy7S4nR9SbBdB62jS7w/ThmvlU/3Vt05Wgnmt0juUeH+0LETreK/M5zPnIDXQpKYVOPJgSlac3AI4y+jTyOu2xupnL9pp4p9g8Uh7cbRudyHUo5yn1G43LSrY6IWNqBcjYiiOEeO+dLlIH/oy2JcphpQ7vzme++D8pjI0MqpcTuYcX5uaAHedKyOhRsryZkVx6V2omYYDt2dBMfOKeWpkmoywS8jo6utJfXUEecqyCJEcuozj9frRICO9CedCzubbmY7tj64Pfqdo69y0hXInoKgriYA6B0b7yqoZ560v4RYVzQueDpl+4s6U5KeLHt2OijzDqOz4DTGD5vV2X8xt37XDYyt8a65iSVhomVN0o91CnNqWjLBQBonLecjEq0ckw8B6cY1xxmGbZSRwGUWA5JaQrAzBqscnSNhrUEqvmVId06iqUj3fmAPRmgzGGYhwKHRzufYsjJ0C2ovOPDR5d0D+Rkrvdj2k68yna38hB33k7JJa7fRLTMv7pMY4lxW0252Z/zSd2fUzfrMLNG5D08kpOsdWxaiMgdg3k6IDejZQNsGsPGl4ytm1F2Kb988ox/fy4lvNoZ1u1f5Fkyj0nyZ5szfnf/hKpJIoJ9NtmLCKjqn0XVKY+YDM7BKeE3acfBI05aOcZpw4PxNnKqctOS6Zbc/wmH9Y0V0Qy3SajrRCQO4lrh3iBug4xH1Rpu62Dbco/WGkam4T98KNf7QX7Jzub8uHzIJzuhrxg9JjMdadJFvags6ZhkNanpom5bOc3oKvniIaFdKpt9abkLXnOvJ0ZIwh7LU0qsoY5EPB1gFauy4KONOILsmoxEWUZJE/lsD/M1F8mWsa44T2QtOjPy95fNOZ9XIhy6siMqa2Tt94OlceRpS5qKGDT4ddirqccmscz54G74bLjBsxAeROF2ucEDJcm6gyHnC6Cz4gwxVCwf8Kbi57X/+gjUXwX+llLqPwAKYK6U+r8Ar5RSjzz69Ai4/NkfpVCdI196CO9ausysUZGcm60Nt8v7/NfvzfnwsXzkvCjZ7E+oNrnAhXg0woYN1n+6HdTf/WDlpuVb01d8MLnmH2a/AMCrTy8wOy11ch1PTRy+k56g3I6VlBcvKqbe5+fFfs7nr86xt1mvfwIRwk/2vrSxJ2bgwwUyEIqPODFeBTU+l8rDjYON/Ej52PUlvGSnsAM9G5sJiboriJYDaNkkm2XO8+D8XRo5/1nLbCER3r137tjeyzmsCvTab8JbLWOpeoKmThDRt8HaLuei2axHPPOv3eUjjHLkSXtU1iuUY5zWFN658nI/49n1gu6qIFuHYLi/9lju9J1rNoM2+L21mmYk3Ych6DOVEq2oQabvB7YPVqEvERnVPzMegHzDJsmCTRXlmby5vHA0C8+TuPIdRB7RNJU6MhMOqtmqHXBarKNTIqoYBSOVLPgK6EYygesQyFqOSmxu0jE/OcTN5uXNCTwvKHZ9Z+nhYYebdEfQPMrRNAZbmzh/h8FSeJZM3QdPAfWRAN6rwPvJWi9SlszYzQOE66/jrKKZa+qt3whuDTnBdd0nHkaesahC3/9IhDOrvnxljYJM9wgUEDwWe/5pr98TSxxOkWhRN/+NV+8BcLOcStmyGpQtQyA96hify/PwjbMbTvM9N9UkGu62rYloRz+meJFbReKNa91OQ6sotorUV6DylQROphk4zVtHl+mIhg6PaFXRX4rMTRPmvsPtE7Z1zjszgV/fm91glOPj9T0+CcrhRtBRNOC5jcpziopxzclEEoCyTlneTFE3ae+AcGuwG4PLBjy+9nhDHqLppu4FayE4Ixjciyl1KaWHGrhN4KPFe/zX3/kuAP+jd/6Au3rE3c0sCnZOpiXzouL+dBuRC+sUVZuQZS3t3JPCD9Il7Ko+mSbvyDM54dutJ2kvCxHE1AMkLtxH4yJ/TRkp3wHYjQRf6dJ31RoXH6dRfdwpGv5O9oq2NUx9kPbh6BXvZVd8O71m5jfs79dzfn3zIb+/esTKo/NNZ3BOUTaJzDMEqeqsijpeAF1ppCONQVLr78Ww9B2SkSPu7Vepdft10aamF5ZGnknneu/XTZWzXI/F03XYLJPYqOsF8PZiyS+fPONRtoroVfCKrQeoVGo6iqSlyJrY3NASpnzfXNOb+g1U4KM11uA8rPY6c8RgcEiMj2hTq6BzwpOKnXmDZ20YQP3rksidc/858J/LOau/AfxvnXP/C6XU/wH4j4G/7f/+uz/rs8JiV8/9JnTu6MZgJy2J75QwSSdS/duUH38hfJ3ZYs+4qLFW0SiJbpzS6EoLZywEUF0fiQfxtI9f3eN6P2GWV+Q+gs/u7akZY3a6LwVpiGJdA86CS8DkHY8nK0CUfl8WM/Zp2vMsKhWRr2heOguZiYsPdboVSQHhPQQIXzZVUUHvWX0KsEYPnL89yd30WaipHOlOXk/2oVRFhLQjBGoFMdk+Saju+dJQqSiuFKY0dN6ra3nhaM5azLTFedJ8u87IbjTJgchxSUq/SA4EKHGycDSloZ3Id6TasigOjJM6qtLWvgbeOs2Xa0Edb5cT7CGBSUtz4id0o1EHTbrRg3EmtqpHFfNCOEzFrWX6LGz0jnakcVqRreU6km2DrlpU5+iSnkQuY3WMAriveGacFgArBGkugXSpyVYmqsWH13GyWWYbj0aUFlNbsTEZIo9Kzj0SX62mSGs5Jd+qqjpBDiTL7pFWW2nWbsI6lHNqjVu0mHeqaKlAY6RMV+t+46RPQvs2SGIHz9Gl+8W454L1r4XANNn50t1tQuqDX1P3PLUQzCnrPcOMGoghyvcOVfN7IVyivEGXGcBgGkh3nidUCaF6iOC4hChBEQ5lITGWP714ynIiG+mvN++zXGfSEefRZtWEAF1TreYA/ETBd++/pLWa243AufVd4blTfaYfHAaAOFfTnSLdiETJcFDbkaYtiH6gUeZB9dl1QJ0c6ijJdmErC5ucE1Traf2AZ05QAmecyMMo1ydBe9kcu5HtOSRWoxLHpKj5cCEE9HvZlupxyrrNua3kep+v5yzvJrhdgvWZvp072mCc3fbJm/Zq/apzfQDWMeiEDTeFaLGyQ9aAv6++wzhrcFZFu5nv3nvFX118wrvZFZm/qV805/x3q2/wg+tHVJ5DqcbSXWi1iTyXtBDi/zSreO/kFoD1WcGz1Qn7bY7zG7ZOLEnWMRlVnIwEMp6kNU1nWNc5y1zmTDXKcK2S0qxH1Nuw7rvBHtQq2rFjlrbMfCfD2+kN53rHx805/+ogxP7fvHuPp5sF7aDz1TlF3SZUZSrIJeCsoszlw7uDp4xsEqGD1Kof21LWR9O4vjzcDdZnf4hytzcjt4PnMJEGktih7NeCxHR8ay5WXx/OLvlyccrL3ZxXK0Fzy22GqwxNp6J6/aftOdf7Ce/M77jIZXGsbEJthRcWBHwT1UEKdW4i0hdkgobPA06jEGJ5eJhkLVK+OhASwZ4eoE14zUkjlCPSIrS2YidlVb8e668IoKx7ffjeOPTP+Pkfdfxt4N9TSn0M/Hv+/18fXx9fH18fXx9fH18fXx//f3/89xLSdM79KtJth3PuBvib/72+LQSUob4+dthpi847jM8mFtMDi4tbVlXBq6sTQCxfSD2sGnC9QBZtiDIBoSNEWUi3vpNmmXNZJrwadDZgHKTiKRUlBpxky8oqzIDz4ZRErIdOoNx17TtbJg3WC4J2rcD1yv8NviOnkq6t0FYezFV1KyiOnLOLKEhk/xuNclYsO4LJqi8hDcs4SeVIdoLGBMHI4rYj3bU4pehynymNDW3ueSvn3twxb9k9SXDrjNTzhPJbRXGV0uVp366f+si+6lGu9OBIDg5T9+xR5Qxl6TO0UIZRjrozHNpJhIFHSUNtTYStAU7me9xMdL1qT9BsSHGt2BUM+SzJwZGUfQkkKR3mIB5One+equaGLleS/UfRKQVa47A4X2+PYpCBDDs43AAuDuWt9ODgWl4bXQmvJym73iRWe3FDINva3tuwtlH0MULGWqGT0FTg55FTZEag8J0WVDBdabEIsn2WqxtBOLsLx/xUMrz7M3F6rzvDxzfCqajvCvRBY8cdxiNQJulEz8dzCuRa3+RxxM6yti/dyFg4mqmi8uhQfWrhYSWSKi/knhaXUrbKrIsoUjv24zPqyw5in8SRmXOUyHA9GTYQtK3tO2QFMvMekQMNHmsUxrp4T0wJd6sJ64cFf+v0XwLw7y5+n19/+0N+9+4tXni06bDLsL6sh0fN6irh6jAl0TZ2SgWEzCWuRy0DkmZ7AnE7gfJcSp5HdkI70SuLMgzedV7sYAI65McAN6R39BTJMCzOeZmUvjs0OQjfrx33mbkO3X+u1z+yucVhuVtN+C0vd3AyOXA+2jNPy6jpk5xYjHbcuBnsBuVOf0+c7g1tnRfS7DIxug5Hed/RLkQrDCArWjGVVS5KnpyODzSdYXq6jw1FF9mOj/YP+cHuMYtUXjsxBzSOskmot4FrIaiQqXpbma7T4qGXVnxv9gKA9/NLsicd9QBunpuSM7NloUsK/4BtbMrLbs5H5WN+cpDeqOtqwr6Vju5tI9+73I0o95nYeHnESFeKbiKWVre1oHi/vX+fm3rKp9vzKMNzkpX8qYvnTJKKXSvP+l094nI/406NaBJfCnaKNO1wDip/47taSwm+G0wK+rkRynAiMizr8LCs1/Oww0LNa7VAf1jpkL6upPT6/viaD89eUZzXEY0sbUrjElLVLxKrbsynhwtWzYhtm8XXwx4QuhMt4o9XJC0H33jUJprGerPi8MglLnbmvQ4Jda/56OlO+9/xiHsgzDsHYZ12ShoxnIr0Bme9p97wQTPHY/tVx89Vidx5EmR+KyedLRW6TXEqjd1c25MZl291TN7asFjI5rC0U9RSSmaxI8tLFphqAJ83oSQGVVg8Ji2PLla0VvPq+QIAc5tG88lQClK1BDAucXS+TNMVivqi4xfu3/D2SDgGv7V/h/3dCL1Oeqd0L+Zo6j6YS3dC8h6q83aZwnhV6nDTnVZyFwZcmOGm39ekXfTDihyoypLuVexkAl8iGGXYVPWK5YVcC8rFlnnnFONpxfRiHYn5t6sJ9qogv9ORixQWa5v23LDOQ/fKHeuQJAfR2NpZWTj2k5y0aKMYH4j+0KFNyUzH+WgfX3t6u6B6OZaWeKCoBUNWbW+OKwKScu8DwdiUHc4oyouE/b0QMHqi+RqyTU8id1odmbEOTUjDhUaOwGvPjm4s+dKR3/lr9S3nXW5ix5E1yneviOijrrr43qMHM9zR4F0VNk6n6KymSFv2C5nUVZrKpjDgHKAdalHzC4+v+NaJwOu7Nuf7N4+4enmCuZWHyQDdomVyvu8JqHVKV4eAP8DcgS/R87aCwKNqB3y9znmJh76U2S1a/szbT/lwdsWLb0ow8qPbB9zcTrHbNHa+Bv838fHz4+UFF91WRWNerZw806+JSYbSrW6/YqEfHGoQ+IH4Etq7jF99+s34O78y/zF/YfopF+mWn0xlg7wqp+zaDI3jJJe5Oklq7qox14dJLCvpWUOjZO2JPpC1FpmJg4oBTzuR0oHZabJYwnfka+FBaZ98dCMdg/VBH8Og1jq8OHX8t5Xvce8caP1iVNZi+OpKQ7JM4gcO1c/j0cnYVH4Dv3RTrkIDjr82Wzhc3kGrIy1COE4qBrtvmi/3MiMo8RHUo5ZiLHN6UtRMMhEA/cb4Op7Oby/fYXUoKH1X2u/dPmZ5KNjvirgkjicVk7ymqpKoyBx0qcyAk3SoDJ1TrOoRv3n7HgC/nz7iIt/xOF9y4vkIe5vzrDll2xXc+q6by2rGqi7YNAXXW3ltsx2Jcrh2cSBdadAHTTLQYxKBXE3VGla+c/PQ3WPfpsyzkn/n3kcA/NuTH7HQNZ81C37nIOf3pZYOPaNtbFoyykXeV1Bo3zvPFVK9MG3QopLEw08PI7fRDYIMOfFQAutLVQz/H6ZAC9vliH/Wvg/Av8ze4u3Fkm/NXvG9sTBdv52/YKFlLPUgCHs5mvEH1RN+spfS8m09xiLrW9gyrJZyXqJslLQptZPprQbrZUjuFPGZE10/vHaj/7xEiXm46bUc/zDOl1Iq/h0HBd5Yo0le6/B47fj5mgkrjsxhk1I2RZtC63esbOXQjaHcnbB7LGhJMa0olaNbp30d08qfr2p7Vpa4kJmk463Zkm9OrvitQjKtj37ymOQ2ERXlov+AqKzbDD7PwTipOfEF/HujLc9nJ9SdQnuNEFN6HlTbo2v1ic8ujYtBX7aSa7YJUWvGVPLzUJs+OgaqwzFj7YEqOefW+W64fuMPWXkg4aY7qQGbUrMLUZDK6CrFzcTRnMqqk55UZE+21Bcp1VJ+L73rO6n6wCN0n6i+m9DbBuhaxU3TpgYzkcUyHIc2xWhLblqu9rI4vbo8IXmRM75TkTuBRyF04yJ3IlyP2C74TThNYrAYULh85YOYRoJMea93G+9s5JBEpevGHRGlQ5s6sY7uP6+2UcRQdY52knA4N1Sn/noTCdzSrZP7mr358MXA+bVuKvAKyU5Rtybaw5hFiTHujfhrlNcYbflsJ1IfP709Y3s5kU7IuXzg+GLPw5MNk7RmWUrEc6hS6HwGFjuyiLyUYVOF86ho2Hi18xyLWxcTBZumfHJ+wThpIkrwZ+49ZbUYsaxGXO3kHt/dTemWKebQdw6FJgddqyhqqz0qZcvexkg7IcbrdtBoYZ3vOuq1oUJ2PcyynV94N7cT/t7uFwH41ck3GWcNdWvYe8HItjUo4PRkFy1fvjV+yQ/0Y55tTjiUXuzQHo9Jf2M98r0J65jyGlyObOs5Hx45PZKzaEEb5zs0B896PH9/bQoCI2PIUwHI84a3FisAJqmsmT+5vWBbz/24aLm3SS+4iwPVaHmW/RqoS+muyzZ9W75NoZkYbN5zOZO9X/O82KKOawCx4WQoQeG0wS0LOidzcGUcywQ+mz/g+0+EYf9nHzylahO2m4Ktf+/iZMe96Q472bP2aul1m3CoU7EaGvskRgsn0CVEuyOVWIySDrzbRr738/qUpk5Q2h3xjmynsZ3CBl2/Wvv1R6FDQrzR5M1xIBr4toE/G+6d09A0CZnPRt4Z3XI/W/Pd/BnfzSQDM8D361P++f4b/GgrXN/basy+ydg3KY0nkQfJl0OVik4eYMtEgvZB00fgYQ077qIK+RuB+Gv/Dc+Rey2BdAoqQ+kdi8tqxPLFnB8UT/h/Tb8HwHRUkfnkLMjXPBht+HB6yYk5kOuBJmCbUFsTuxiVdWS6ow3SBf5+xJhn+Lf/E/lOdvD68LrUV2UeHEltxEMPXtccEeP7sfmjE7afewDV5or9QznReuEkwxl3mKKfCd0mJVkl6Fdy4w4Lgxm3uEVNGzR8Ko3dalJ7jEqFhzr6WV2P+Je8Rf3Y8O5UyITLt0a86s7Ibo0QA/25Oe1i6S0culRc7mf8wHgPo91c1HBPHdZrDXWNEauSoYinFTg5XekjBMUpvAJ1gB2dVxge3Cj1ZrYYJfoHrwdlbdW56FKf7lrxP+os+iADo+oWlyVYc0J5LxCohdReXIH7wqu7z1OqhaM76SLpuJmDrg3prg9gTC3kWHOwgq74DxT9KYf1HWR6JKrBt/tRPO0s6UhMx/V2wvZaNtfkRpSd64XribmtIlsr8rt+QYjdS34Mw1iZ2pJuHMmhh8PakXTbmIAC1R2q9pHKUEiTHm0ZDD92+GT4e2NTjcu9z99Ys7sv6uftpH+vqQRlMbUm+jb5e6Scg6hj1DufB3Sp6zSpFqHLYNvgGk0TStdRCV5TFjmt1eQ+c0tNxwffeMV701se5LIpaeW4qqfcVJPYrZckljoTfZOoyK57pPNogYqj4Qb/lkAgX3sE5YVhPT3h128mfQLju5jUsPvPKtk16D8u6Ajpuk8KrCeC20xJ+z34wELUz2MwcvCQvBuca9gABtGmTUHNaybTip0Xlty8mLH1ordRP82Tv2/nBf9gIxvux/fuoZQTiQNfLtLbhOQg5x0yfd0JmT7QB8CX6g70HYRAPVGx1JcebD/O4eEICYp1/QY2XBZe61QESFeKw9MZP77ymiWZRWcdtjKYAfrnEkene/RakERHdl7y+EyCr85qLldTtssCFZND1d8bH/C2Cq/nhUcuw7yUdTc5DDv2pNFFWRXV13sqR8L+5hSAf/oLGZNRha0M2pOmn8zX/OXTn/IgXbH3m/gn5T1+tHrA0+WCzje6WGOxmaHNNMbPaZNajLYssoq3zpcyDjieH+ZcH6YR5ZJhP94kO6spm4SqSmh94FzOtCQeg9KSHiDDQwXvrhCB0nkqm9A3iksepkssmr+3k07wP9g/5rPtOau6iNp3hzqlaYz4jw6DOeOgU+iDJ8gfVOz0DQmnqWR9NnXf4CHiq+GZ6K8vJEbDpNwpSah6oUrkd4qO8VyuoypT2n2COxjKtcy3Kog6KmJw/unoAb93/phvnl9znvcdNgGBamxPve70QL4ACRiDWsfRXfmK8t0b3bDDZ2UQfKkhjePoh4PXnOsTymGw9W9UAIVs3IFf4x6VzKYlo6yh8NHrPJeb9XR1wt0LyaDMMsFtpZW254GoCNf3r7kBB0peyy8N3WbC72zeY37Pa+akLXrW4JYmdg45TVT0DuVEm0F33nB/vGHva7mvljPxQGp177fj9xhV6yhjkG5kwddVn6FZIxYrpnGx6yiWKiyDcl0/K4aT/I2x9DVujCgeA+hDi9lW0HZxcrg0oZvmlGeKwxPPQzgt2XWabp2R3srDmt8qJs8V9ioREVA8Queh0q/MXOLCIaWXdNfrLHWtYr9NIbEkXo65Kxqqcky7TftulpOONnGiQh4WpL2hq7Us1GGssmAlYjGHvotJ1xY0NFMvcLcwtIWKHoAy+NKZoTqHy/rAAed8GW+wcHzFYVNNMzNUPmguzxX1XIKgMIdC+QvnM9XwmSbU4b/6HoabbDuN0ZZJXrM2vka2S1DevDmW3Bw0xnE+2fMXLz4H4M9Nfsqfy58xVpLZAvzDzfd4cTiJQn0gfD5tnMRisRtOxaA9dqWGOT0QeJRxgMb02lxBA8ltk4hUJjuFCW4DgaYyFU++YcnHWc8VHMhwBBTMDjP9IEpp+mAk3LvXM+yodD54lPJRw19+/Dk7/wx/dHuP1WZMt0uxHqJPDkgnXq1or2Tsf3x4QOI38zAvTakiyhTBXB84KNeXNptZf54xAah8gLF3fcmyk+vsMh3brANvw9mBSKCi50MNoMikhPxGY0I5q5Yxb8f97VWtz+x132XlMoeeNXzj/jV/6ewzQExvS5dy2cx5Wcna++PVfZ7dnIgNSuCQhr9CGThMo0bWoWR3nIQ2M7xwsT9/r7+mWtdLxuxTjLHoomMylT0g0y0/2DzmBzxm5oORkWkoTEtdG9q1f7MVNG1I8WjKhLozLLI9f30uZbNfyp+TKsveJnSDxSxVlsZpSs/nuOmmfNmc8+nhHs/KBQDbJqfqEqo2icHX9pBTVyldaaLfoKoVrrAUaUvjI+wXzYJPyvv8ZHePLzfyeVWTkCW+jd/vfU1rKNtMzNtDN1mpRb9rsC4NzaeH0hKRwhI9Fd8MnuAPQVXCGhifOZmXSdHwiw+EQ3aSHlg1I27KSSxt7g4ZXRPQMn9+CprG8HI3I/GLYqY7ctNGqQYQLpRzvYE5gNZO3BgGjgX9eR8HRm90CdsASPQVhUifGB6vrxdwLFdwlPi9OVTD4+ceQCkr8DZAVxdUFFT0pYOnhcNeNJxfbDh9JJn0nZmR3KbordS5AXloLL2Evf/sIDYZOSOhPXtvWL+Q1ku8VYAb96WIpJT1a6i31I0c08WB785fRj2Lq/2EV4cUKk2y9UKEtXACdN0jX8F2pJ32xGLdeqXwAdphUyFdRwX1cM4R5vTXhlxX1LQILzpZpFtPaK8uCtRZIUhXpuJ3dKkQO5O5pCz3TracjfYU7zS9EentKVsvphk2Q90ISbQZ95PN+uDAaaJqc0CETNnf37bVEiyP2+hlddgJ6RLj0Kee0K4czT5F36XRVy45hA2nz2adEtkC5RSplwjQVYfNDPuHKdsnXiphLu7d2QqSKvS4g240prF0QbRNgfL9urE8OUhS4gbkYfl6qijP/bWNHclOUdw4imVPMG4L+bykdH358OBLh8MjePNpIg9Pa8u+SclNx2QmE6lMO1xQ2vcnpI3l3cWG/+jxv+J/MhVhvjOt+bjN+fuHD/jvlt8A4Ol2QWs1Z6N99EDrOi0LfqV74qvfeMzASNeUr8lVIIlKnaojza164dDv7riY71jvBeHZX03IbmQOxQC7BifC8r3sgC8Fh3IRDAKswcJoKgnOxVPweP4H7lZ8KaCFg+pp5VGEv3Xxu3Iu9yxX7Zxn1SlP/Qb5aj9nXefUrYnt2Psyo63FN1P7BKBt+jpnmCumBlcr6HruT33a4XKLajWp9wNMt1JmTnculifl/qujAEn4kAP0AI4X/RBIaUUzhercxgQq2XnV8WGyidcGG+wRrug4mUvJ9ftrQddfFXMWyZ5Ud7Ec+8HsBq0cz81JFNwM/qOvJ1Vd4rCZnNMQyWzuN9x7sOL+RBLYeVoy8hF24Am93M3ZlDku79vcP765x/pmgtomUaNJn9bkRU2zy0iWQWzWz9+6nwvdOGF7lvMqnfOPzLcB+GH+mHeyG97LriJvp0Ox7CZctXNeticAPKtOua6n3FVjXu0lk1zvi6jNFG5JWyW4Q4Iu9ZGmWpv4sptvPPpkf49XhxnXnrIA8MHpDb80f85ZsuPaozgfb+/zeX7KXTaOSul2wFm1/nmtckOyMSR7FTdw5ekjQ5075eybSVsIOl5b345+To8sdlb8TAHeHt3xnckLxrrGDCKLsDd2fgJvu4IvyzMuq2kszYEExLkxUdKmtVK6G0oB9M0aA3TPmwkHAed4ft3x8687N/gTqBbDxaE/F6dC0OTe+NnR8SdoJvwncjgNme9KS14KJB/0XECCjeo0Z/k4w74lm0g+r6gUmLuhS72fKG2/oAQNGNUR7WLKhy3F+YFx2rL1EL5bZuhaYTNHfSYDbLeiYWLqASelEAXdq3qK8V98kpe4+yv2i5TD3tfl9+IHpAYbgTOC3KhW9WrMd6L7YxNFMw4ZoyOJcH7Aynkzu3b0NfcQQ/psMvhpgXCrukzGpveuc6QHS75UlJcyBs9WGc+two1bpguvCD45cPr+geWDEbsb0T9JbpNY5ogk8k55ZEINEAFoJlDPofFmrN3UoqcNWd5KiROwtYHMkhZtXCTau5zRK0N+C+m+36zj+FR9gCIWNUpKdIBONc1UU0974+F86ZGqaugubjGNcJh0yDoCj8YS/cWiGK0illRU68DIZ4aOymwJ2caSr3tCsNOKpJLzS3YdyUY2CV23ck/NoClA+3KAQzJMIE079lXGxuo4NpNRzTivMcpFIniqOy6KHY0z/OpeyNF/sH/MP7t8n5vlNK6Ii9mBDxY3XGQ7PtsJQbWuUlRp0OVAV6oNaMJgzrQ+q+3c0TwytaJrgNho4Xjv4o6/9fD3eJAuAfhpdZ/fXr3DF+tTlt67sj6kuNqTXgdrkq00KB3V/1VHvxlGyyJISnvk9RX0o46EAwc/C916XQZ2n/DPX7wTF/M/O/+ct9MbvpFdsp7I83B7MqVxSeRsAPxg94R/cfUOd5txfK09dTR5Aq3uUbiD3MvADQLIrw0oI6js2s+ZjSMpj4U0bSq8LjUYZzUInl5fEwBU6HpLoLyw3P/wOvIM9151e70v2F/LeeudwaXSIIP3jytmFeO85mo3ZeXtbLrGYFstG1fYhBOLTiy21dEXzux174/pOMo6Aqk3lHOs19ErkjaWylqnsSg+HF/yVnYDwCfzB/zaqw95enV6tFaog8GUvceo3Rfs5+nRPAqJtKnon/da0bSGl5sZX9wKItvUCVpbRqO69+bsNGWd9kgSQK1Fd085TCibrTWmC0GK/Frh94rhJu40lL5WHUp4mRa18+/MX/Jnp4IY/4XiC840vOo0v12+LePvP3iWVfHfRlkaa1geiuhd2SSJNHsrM9BA8uty57BxHqmBMTX94fq5FP4v1i6DPUhJgtPtUn7/2SMAPkrv8+Bkw1vTJR9OROT6reyWs2SLwTHxBLSZPlCOU35UPeb3d08ATyJ3onkVAuTOQ97DICvMo6PDx/5f1Sj4Og/Kqdc0pLR6433xueocauh1N0xYwvEnoET+J3Y45UsAoTyUK9qxEdE9TyINhLxspWi8iVR11qJGHd1ZSzf12XijZHKvBxyoMFfMgJQ6bXhwsuG7py9jRvDbL99i++Ucc9B0hef6nIj/WbrpFw+bOfK046aacLkX9OpyNcVaaS3NC/niJO3oJlrE3Or+ITQ78UXLVv5cahc7mBIXkC93JKZ4dLxWPgljGPkdiZixKisLM0h5yykfjQ/4PzhHm884LHvmcHYHujW0I9+Fc+5oTjvMvCadei88QF0nZCsVhTQDIiAbXZ9Jhw4tG4j5eYdtNOV21F9fbjF5R1MmJJcSkRXXIjqoBxm8U7KJZjt3VK7rrzt018lCPrns4qbklKKZaGxKHJfk0KHL5qj1N5DvVSvBlXy4L7mpfo1RTgjkxaojD2KRpfC/hgGtjDWA78JrvNBf15dO4sPc+Qd48LyK9YRluy2iAnJpClbZ8SRwDj4dNXxxehq7da6XU7pDghm1vHUuE+4vXHzOO/ktr5o5VSfdZl2rPWpJzOqtU2jnBBUbSAKoVPynYlmvlXswunFkm4BQaj49v+BfjN/lVxbyPHyneMb7+SWvThd8VgrJ/Yfrh3y5XHA4ZIN2fWndt2mPjoQmCVOqKFAqEh4a3fbzzVQymMMyckg4pCQmrzUzhx617HYFv/oTMXL9J8kHnEylM9SosJhrRknDB9Nrvj2SksW3xi9ZLkZ8v37MdueTr+7N0oL8QP547UTMUnwOh0GfciLD4LRCB9XrgAYMG0E8B8qhIiqlGKDqwzVeO4qk5YOZBCKTpGJqKn60ecDv1bJ5NSpDZZY0b6Na9DivfXdXXyu1u5TEe7sNy202c5C6iBSGEl309QzTc4D6h8SqG8FB5Xy5usezIA5rZX796vwXePux8FI/PLkiNR3dPolrRbaoOL0vczkEse0uExKWVXRTn/xmCl0pklJFortNbETxjUe/G490N5usP9dQ/mvB2MEz6p+REBCbUs57KLgbjKGdPt7clROrrG+OJMj4bvGMh2bNu0nHqZHrWFnND+uM3y3f4Q/2ggBeVjPWdUHVJfHcK5fQOUXVpNQH38gQkvXBsiCyEu5YpgBZC4MV1dGhVGxKUD7geKOJSQGdol155HFr+NJM+Hx8n9+YvQ9AXtQYI8LPY+9h+PZsyS/Pn3GRbFh4AvB1NaHsEjqnIwLVWU3baUHXwhy0+nhB9ecxjNOH4/wGyBBcPaIEge4X8lC9id13g+v9w3hPP4MDpf/In359fH18fXx9fH18fXx9fH18fbxx/NxLeF2mODz0mcNJg8k78rxhnHs0x3QCP19NSO8k3E/vEtpaC/wc2k8TfF2/51kEg1s3KF+5ZcYX3Rmp6fgrF58CMHrS8I+aD2k+n0TOjU0Fdm7HRB2o5n7Dn773infHt9yUvutgm6PXCbY7joiVhaRSJL4HN916sUznYkdXV/S2K4FXYmovsKh9xgkS1n4FcuheC3cDmmWNIvPfa/YtumxRXRfLUgAuS2hHinrhx76wdLkhWypSn+mnX0D3KqE+SahPfFkxdsUNtJcqubZ0bweoD6SFIr9TaA/Bd4WOUHs7df33blOS2yRypVBCyu7G/Vjp1uuENbpv/a+clOIq21tgGIWuRXMpZB31IqMdKd/RdZy2BIhX/jOAesMRWvpfe111jnTT9Z17jY0luT7bGb6BvlxnFGh9LEHhYWSniNlP0xgmeS16L750kOx7AnlEWi1UZxkvgZFHQWeTkkeP1vzS4jnfGz0FYGH2fF7f47P9eSR9uk5JnTLpz08jnA0hvvvTS+R37VA/Bo92Vo5sIxPU1IZuNOKfrL/NP1t8AMDJbM/FeM8oadCD1LdIW8oB6ZRao0uxRYmludyJH2XqcKon+2tP1A7lfxFJDePY30/l51ukBMwb7p1t6azi9lKI0fpZwcpOWBp6LScrJOvvz9/mvzn/FgD351syLYKPrefS6VVKuldHyKMp5RkKyIpcrIjgDsdUdZ4iULvIWRJxXY849Y9D33E7WGSCIOCwzJBsNV88P+fZ9UI+L+mYTwQ2CWVgHCgjfmWhdBW4XuOsYVZI6eXGWCo7QlkT9ZQCdcB0A1PvAfckmpczJDe7eD+biQj6Kqtj+TuKkZqcly+khf/l+3Mm40qI6h5xfet8yZ8+fco0qVh50t0Xu1N+cnvB5nqC8/m/m1jsSGH3veQKWkpapycHvrUQrbRct1xXU26rMVXnfeUCD0e52KkKUHeGfZVF5HFXJnJutr8nqhLi+hCFUx20M8s8L3mQCnr23eyGC53RoPgNrx/3/97+GX5v/YSbchK78KomobWaqkroPN/KNloQolaJjx8iEv3VXXgi/aEHVi69PlRfygsaUEedap07bnQJfye2h1n8+Kqtwd3IuTS6oEHu585TEV7OzvjowX3+1INnnGWCQAVJh6YTfS6Q8mnTGaztDZqH/+6FpT2CPxT0Dciz7TnQOsg6fEXX4VcS5zU9Emnkfc6+9rv/RnGgvOeY9c7wp+db5kVFbnqz2XFSwxxezOd88VJ4G1zl4ltX6v6m256nMWThh3ppgNKzG4O6NnyyfMLmQ6kPfe/sJe+c3fHjVU76pUzeZCubsUug9U7zyajl8WjFe8U1y7lAr7dnY9bdFL0x/SJYes2XQ9/qr6zwuZoZsc3dKWnp1W2/6NhUEVSVY9u385C9Vm/AlvI5fggSEdF0Bmpf2jSHhEQrIBVyKmCNpis0u0eK4m0hZLx9uqRzmlebKcsbLydwlQrx+oCoqiNEepTvSIwGvoHr0Au0BfKi8I4CX0HRTqA6tViv2UKtSW9l7MK4tFOHHXVSUvLfa7bal3hBBe2vAlSnyTaW5CBPkyodWEc3Ttg9Eoh790hTz2WTSkIgsjMY37llvRRBLFelr0Wm+rVBf20BCoc1mm6kI59N7qXfFDuH8oGC6BepCEWDbIYu1cIrC+fhu/DG44p14WtQB+M9ngZdflYWlfP5nl86l3LTdyYv+FbxnHO948YLmf7O/j1+6+5dvlgt2Hm+XtCAisRMf3664Vg008/RKA3iz9kmQkiOC3ftSDegugT1he+C1GM+z6HLHe3cB1qLmixrxWG99vd4pzF7kRKIpRHVD/KRaN7rz4FfQF8vkUFPFQDQk5Zvn71imtT8eidlh+3mhHSlPVl9EBQ4hV1qWh9ofTGeYU9aklEb52WyURTXUloLZUIx0nbCmTz1c3rspPRliNzIZCPSHIntI2lJirwo51A4MBzdYN75TW+4vutaoW9SjCd4m4NiOZvSnPUZmGoUVifUGtZ+w3BOMS5qfuH0kl+aiSjidTPjNxfv8ex6QVP6raHWg0AhrFnQBd6cJxuHMZTmgz6BlZLloMw3uD7hFMp7D1cFq7mU77T3crRO8dHmAdapqIx+mh04n+zZ3I3Rm36glJVrD2uyS6CtE4qk4ZenklB8I7skVS2d00elW4tGY+NrpUu5auf8pHzAT3ai6r+sRlSddPbVvgtvV2ZUh5SuTCAYy1caN+pIlGXjeSmftVO+bwt+Z/8ev3Erc/CL5YK6To660Gwn3MehAK1pJOFxnk8LUkJNDsflU5EwcNFbFY4D2Tc78SDKuQTpjyHnzpcwk3HLfOYFj+cZh3UBpfehRRpA+v3Xz4Nas1mO+XF2n28uRCg11y2Z6SjbNAZIzimsVXS+jCev+VNVrn8ONJL0DYKZXvNteI0uBohhLeilYoYXT+/7OZQx+P/i+Pl34TmRJQBYlguW1gtQhqCgsCSLmkfnK95+cAfAi2xOcz0i2eioOh5am3XXIzNBpFN7Iir0derRS811LQ/Drz2ecXqyozipaPy5jF9okr1EurVvQd7PMv7l9dvczse0vq3n3dM7NpM92ypn4zelwzZH7aUuHTtkUgupv5t+w8huDeYgm1Nb+AAlcySJ80KNxzf9D2upD4hHlxInQ+jCKy9SIPUSB+roc9qJ43Qk2eaT8YqLfEtx0bB/RxbfT7YXfHxzj+3NGL31xpW1ZF3tpOfx2EzRlookdySec+A0VAtNtejFSbscurmYE+Mz+OQ6QTciAdAtfBCUdVAazDIhW4WAh8izCMBcm6tIIg+HbizN1LB9bNi+I681Z40EfcuE5kZ+t9kn2FRMfYeIkdMKl6hoTRCOoVKvUmAzTZf1aBgYylPD4b7G6xXijMOU0v03ulExgNWVPe6oQh78oGLufKu80mJRoVUvEtg4n/UORd6UI7lX8ufvfclfP/kRAOdmy2f1Pf6b/S/yr26F+/JyOac6COE2isQ1Gu11ZAJfJJC2hYjrF1+vJyOoQo/2tYUE63EuJHjT4N4sNt2CakU5v534oOqe4XDRiIjh0O6oFFSm192Sc8MRn/V061HbocbNcCxfC26dVnHx1dpxlu35a7Mf85aXkf97xfd4frnA7ZNo2iwB5LExbLrR2DKlnZto7+JMQF0GiJEPPG3XX0c361BFh+s0qu5NfdONI931OlDhfNWAJI49Rgdev97hEToZw+ZlamCr6AodmxNkvDVdl9IZ/1yPWs7O9/yVk0/4yyNB5me64T88+V0+fvshP61krfzx9j4f39xjdTfBleb4ywfBU/gec5A/Q/uf5sRiC0sZpS/8eQVEB+Rc9wZd66iB9MXLM9wqw+w1ndeWU6c1aSbrSUT7Ayer6jlyyiqac8PNfsI/uRPu2+9nT3icL/lm/op7iSSSRllKm7KxU3Zea+qunXDdTHl2WETZgdvNRPiDg6MpE9Q+kesddOHVhWbXZnxSCu/wp9U9Pt7c45PbC3Ye0bKdIslaJqOa1Gu5Na1ht8+P6bD+/gpq5+dgqnA1x0Kaw2azIz4PX3nEACIcRhhQw7UVDaNxxS/fkyTtLN1xsBm39ThWZJaHEYc6lcTIHymglKPpNGvfZflotGKaVFRtEnWgpOPOvCaeOYBhXzvfoVp/kDuRbvB+fXKJww7Q0jgGhuNnSssHumD50kGwUwvIpnIOzB/Ncvq5B1A4Kc0AJAdDuvUwY2zNNdTzMU/fKhi9JZjvfFKyBuokj+rfqnNRyKyfRIowY0IG2kwlktW1tBED2M8Krs9TzKyh8yrcZZNSXCnxWgvqs2vD01envLybxdbmJBHn7lHaksy8/Hze0CyOociuU7R1AuuUzMOd2UoW6nbUw4SmUoOHPix4kg0M7Szkmv0YhUU691ICg1b/LvP+eYPSUCDfplvN5bXs9svtCGsVJ9OSJzOBms/zHX/m4VOuF1NebYXpf3c3hbsMdVB0WVjgPSJwVMOUjbSZOtqFDwjG0nnT7RPM2t83C/XC4mb9CqtuMkZXmnzpopp4KNHorh+DLoM2eG2dhM/T1DNNPVdxA0qWiWSk5UAp3Un5L2hGydhrnPHq0L7ceaS7NZA26DJNdaKjPU43UpTn0nEYNk1lAwne20r4oDvxiu1iSdKvaILmEIUD87yhalLKMsX5lmUS2y8mA92xvGiobMLv7SVq/NHmAd9//lj87/aDhz53uFEXL0w1vtv0oCJKGzYeU/cbkChjh799QFxo0Ip6qqhO/TjksH+nxZzUlB610OsE41vLYzeWnzvKWGzak3+tH6+hblPoBgxJUDy3QbeplGJdRKLiEbLP8BzWhs+253w4OuFXpqIH9N1vPOPTt+7zRXXGZSnNIct6xLbO2ZQ5G7/JdessiheG8WsnjupMEOeQLBkTCNUuWiDpNsGZBF33Xpj5nSPdO+8vOEBunZ8bTb8OvBEwvdZwEMagGzncWU3pBYabicFmDjfpUbOhW30M/5OOe6Mte5vzGwcpvY51hVEOoywPfOtgM5G28584FYVIbe1Fg/H3IGxqqcUWiuaUuPaoTpHeP/D+vRuejGWdWaR7ct3SoXlRynr08fIer65OsNueRG43KclWtPUyL1nQLguaqcN0REmbZC8BdjJYB5uJQtWa2+WEm2u5x66UDuDxyYEik8llrXThNXUiFkcgmk4OcArjPVWzlRaZOm8/BGI3lew8wXxAunfGsKny6IW3ago+X52y3+fR8/Xh+ZZfPn/OW/ldlAF4Wp7y8eoez9W8L3Mbh3KKrjQoH8A2ymKNIt0p8EmLuGDIehLl3ZqvCLwDktkdv+6UQhl6dNAjO9bq2Hw1H5X8ldFPODfb6H23szmlS7Ho2Dm47Mb8uHzIl/tTrJ8c1mkmSU2ZJrRuQCI3NiJRcj8ECVbaDRBo561XiEmB7eR8h+4JwR1DukGPA6iv0sPCuT+cPP5HvTY4/gcR0ozZoa/hhuhSfgHSjWP2meawl4fr+n5DMmrRkxYbOpJajd1Lx8hQLDFoQQWdmvZ+QzGr6DpFeScLQLo0pHeGtlUQMv3zFpskIgTpF/1m0TEa11RliruR7KQ7KFbJhLtsADEqcEpKV9HuYKeZ7DyS4jclsXCRMmFUQN710Gu0Exl0b8VMToWFlcjP6QrZwIalQ905qY87otmv9sFIeaZRN4I2dU1OfqvYqyl/MJdOqeasJT8tOZ3tmfgFpppW7PYJam2iSKipibo8ARHACYSebhVoz4GqFarSFNsemWtOfPDUKZIbmX6jS0W+9PYnwV7FB06mtPFhNwdZNGzSl3yclo16dOXwTS8o59vX0563pevgT9dGBW7dOFyHt3kZRqoq6lyFw6ZKEDZfVW6mTuwtSkXhE4LAG+nLYSHT7q06hhA5yPNgfMliXlTs65TdYdQHnF6o8HWtlm034fv5I36rkRbo5Ys52ZVhVCm63MWxVvOaYtTQeBSkW6dRbT8GKI0EKEOTZgne/aIz/G7/jEUBxLEjOan5zpOXPBrJBnnoUi4PMzZ1zqH2KvdeQM9aRdBY7KwCazB6sAkbn1m3/fi33neuq3urn8S5mBwe60DJmhc3tErzye05h/ZP8eJ8AcA3i1e8ld3wOL3jZiSJwqodU7mEfZfFje+T9QXPb05o9mlEAF1uqU+IHBT/rejCS2cEFG6t/Bx2R4GgM9AZFWUWwjMulkw+iA/B1THo2COiA7uNbmp5eH/F1MsYWKdEsNAavrg+BaBe572Okp9ro6KhbFP+8c2HvNxJgLGv0zcaDI2WDa6qUhF4BPQ2QZc9yhS9yHKHLWzkMAG4VolApnJxM+0Qo98n2Q2/ULwEYJEe+LXqm6wP0yO0tRtbXKoGhvF9d2b8ncyjh8NORp9Lu1bjAndoZVCdob5OCY5RqhMkNGuO0RzR0uq78LKVTyJGvZejSFRI4BbtiVJFfSJ2TOF6E22ZFxWLUcl3vHflX5x9wi9kr6gxfNnI+muUZdPm7JuUsvGVkaxBKce2zNkuZVOzJKhWYesepbXBNsyAGwRzr1ueKOvXodeeaxWCiYh4yhjWVcJH14KkPd0s+GJxxvemz/kgl8X2zGyZI4NU+AcizSy/VHzJj0aP+NFBOgzXfkOephW17flngYPm/IKutcNiUc4cJYyBBzU08H5jsrrBn8E6O5QtkMHyAeSQT/j63+F4Xb/vteOPFUAppRbAfwn8oj+9/zXwEfB/A94DPgP+5865uz/yg5xsatW5nFS9kIzYZg6XhrAZ9EGRrRT5rQ8yNhnNPIXcoaKXk4rco55I57kZqi8jmbzjZHLg4WQDImfBT+/OWL+YkS4Nbuej4bGlm3Z0M9ATWfG++fiaX14841m54PcKmQjlywnpSpMtdfzeuGE2rhci9BtyW6ioStyM5fqSXf9A5uuO5GBl8RxajASLl6FvVlBZHXARmrlkKEF5PV1aklZa7PUgy+gy4duEcXadzw53MPJ+nvbzhG404+50GsnmrrCoSkiLkcBb9mjFMJNOSodbqYj0OWOkxFPIZg5gpy1UmuzaUPj7aw6OtpCFJ2wauhVPuWJJlDFQDsy+81IE/pyNIgfQvSZVO9ZUc0GXom5YfGCOYWD5B0fjLIuvi+q0ErgazwML9wjya83o0jG69XIRraMZaWyiyDcd+Z3XgTq0/fdGVEuhCuMF3eRrR2lDnrQszRTjg9V8qcR3rBnYY1jYlClXo1mUzUhWcn71RUu6kEn46HTDSV6yrXOuNxIUdHFA6MUmU68fk/TBiPMZ+HBcQkk42JTINSs2i5zPxqckfgd6f3LDL0wuGes6Ztcv6hM+3V7wdHUShS3DwmgT1WeWI/FrU61C+ySjnUkQaSpFtpS35nc6itX2J+gDEtdbJeEUVZXwyct7fPJSylJp1jIdVUcejdYpUtMxSytOc+F8vDO7JTUdX94sqHzypUvPB2pBDzz4woYbxjTdgfIfH6Q5mqkWYnnSB6/ZqrfF+VmqxzGTbo5/0TkVz/lhseZJvuSynrGp5IuvaoM2DqUtWdZzo35ycyGkfi+ZYSI5uee1OC2BUZc5ksDB2XqXBV9eD2h/l0lw4RJztDkfujE/vBrzURk1GWQdmjU8vr8E4P54I6Xr0sRgT53UnJzsydOWzcGTuVcFap2SHFScy10B7VT5EqkfykS+Jx01pN6K5DDLsLsUVQ9KblYsbY5s0Hy3h+zzfu12MgfD54Lca5dI0BQDds/36qyOYqEfji8pThrOki0LI/epdobfLt/j08M9vjxIoLttcrZ1HhXOQTwaR3ktPCl/klKC5zX+r4uVhsjDbZ0XlX6NMP46wtmFoMqhPQoX5RmA1cpLL6xOeJZe8KuTD8lHcm152oq7gSI+T+/Pb/hTsy95mKx44svmjTMcuoxUiXkwgPLEfa16b8IgYzD0/nTa+UacweLty+iC5spLuvUcsMYeJa+B29VbdzmwNv4dP2/ABZPXBj//Q44/LgL1XwB/3zn3P1VKZcAY+N8D/9A597eVUv8Z8J8B/7s/+mO8Ce2pDPR8sWec1xRJS268xYju2LUZz+5O2L2QRT+70/LQ7gZok88wgmpxOJRzONW7uzfXOa9WKbdnY37hoZiE/tL9F3yS17z86TmjFzIE2dLIgzixWM9PCt0Zb4/uyLwFyqvTOdf7MbtDzmErC5TaGSHEVmqQifg/WQ9FmlKRLRXZWjrYQDbSaCwc7lUrqYCoqvfXJrYlXT/J/QNdL1xEtMCQlJZgIApEvajyzJGcy2KSpB37Byl2m5KsQolRFsbiWpF6BKQbSRA05DQMO4ZCzbzLoJkq6hmDTi4nSM1Ji/LmuGqTkl8bkm1Pwq1OoZlaXN5Dquagye4UzugYkAEoa8i2Hcnek5MbizWaembYe5+/8lwI6oF8DZBtNJ1LONIIUb4UpAdRTCBTouLDoxuLKR3ptl/ETCUmsfnSYnxnjbKgK1FjTg72WIfLWjAGFwjrRko3ThM7bvZNyiyrSIoGtAyO8ouiHvJ/rGTi2jgm55KOq3O4P9vyaLwi9ZNw0+Zc7mfc7saUe/95VhDQLgM9RNg6sO2QECoLXHjOwj3ucilzZxt5MTk4bGKoVgv+1fgEgN9ZvMtkceBiuuP+WHa0WSJBXdMZ7E6eObPVkYTaRUFWC3kHIyclQzxSZRxNpXFeRdKU4gkWBADjuLQAvWYRecdsUnKoMg5eWFItC9btlPXg2uS7JfGyvjSfziqm40p0hPzHJTt5hsMGI/ND7lG1UDE5rM4F2Qgq9v4b4uccISoHCf510m8O/Y0J73S+KU8dnbM+aF69WHB5KWOf5C1nJztmecVq5xGLfYJNHCq1kYrQVUZ4ia4v8QWOiaIfF3FXUKSD7xXLkH7zNkH0tsQ/cy6ic91Ise9EDLO47U/caY1NDVcXDwB4/tYpylh0reh848HiZM/D2YZxUmMnUlK8m415enVK96qIYyjWPwKZ9IGRoFKTUcWfeyAk8nlyYNvl3FQTNrUEZIc2pXNKGo98F15rNVWbsK9SDrtgpOuDQqtQvnQo3KyQ4Mh1qU7mkHYqWrmkukUry+fVBb92ELTp4+U9rteTXmA2HMqJeKgvHVdZzmEsDTZq5z9vrUm34T7457AUzTtZd/y61bk+6B4er5eDrUO3FpyLiX+4nrxoYnNT22akS416lWCVHz/8/LGw9sn5FycP+M3H7/JnHz/lu1PhT+W6ZdflNE4f2bvU2mC0pVX9YuTc8RwPFZ4h5HTUBTpcs4ISedT1E1/YIYJL9CW1xwiUk2BziEa94SDx2vEzAyil1Bz468D/Uj7T1UCtlPqPgL/hf+3/DPwqPyOACq2HYcJUTRLhyaBSO8lrzkZ7/vSjZzyfy6Lw5eUpXOak275ENjTWDOWEMMCykckr+a0m2Wnsl1P+4EYWlPN373g0W1M+SVlXCwDGzzXZFXRrTeUd2j8tH/LlYsFkVLMYS8r9YLzhnekdWllW3uX7ppywPIwomyTWcjUSTddlAl6ILNlrsrWcez3zLbieP5HrAV+n01IddAOExy/2RxL8gbyaSBACIpWgOvOap58EE81px/0T2XDfmd8xSWoaa7itZGN5tjphfTtBL5PIAwvEWptALUg/eiTIX1cOIORMfl6d29hlqUYdad7iHDRLeeCyO42uoD5xNKd+cs7kIXVlgt6F0pXczsA5Atm8u0y4bMUydLg5mrFi87Zm/66HkBclWjuqTU7qTWDTncbkiiTpxzl2xmki4RuriOnPcKOqJVgKHJco4eAcLvH30iNcveCnD3RTI+J/RvW/a7SYEw/Wzm2Zk2pLmnYcvOyDrhTdKJQn+vldnzoena/45bPnAJxnWzSOm2bCRyvZlD6/OaW6K1C17rNXT9AeetKFAO3IR8uXlEMGC+B0QjsGGPL2pJSR3ynGLwO6ltHlGZejE77091g/LJlND1RlGrkc5iBcMTfYsV2p5RYEA2WQzsHG22XE1mbh3yjHETIXSqXGc8jSUcO3zq7Ytxk/7GRc2v2Y5KD9Nfv7GxMVFeUTujxlczqh9U0J+NMMZflQypEGEH+qXtwxXXgpAatFHBLQm8SLUPaWOSKyKvMvdIOqAU8uZOZDnmj8mZXnWu0MmZd8MWXO3XTMq3ttH+xvfTLVTwOMd2JQ5xWjsRfNbQz1TUGyMX05y/lMv1JEl40ErKYPXCPK60vlZf8MOG/dI9yhMD/kXJwiyg7YVxld4TCN8EcB1psxq9UYZxVJLjd5Oq4wSYeFSCkIiui6ES5UOHSpol0IwGm658PRK9JZF5HRvc0pbYpFxde2bc5tM+HFfs5zIzSS/T4XZMQS/RM7p3ol9oCc+/lU1wkvDvLeVVNwU054fncSaSR6a9C1InED1NfzeUypYunQZopur8QUOwSqtW+qqPqxD3SKqA4Pf3j7/kBEE0BphU300e8K31BxMiqZePuvn1pFc1WQeLAAQAUxVccgeDWUbsrvqCeM3pIBeX90LV3jr3VGtU7KeI0XBI6+eIrBxJL1OEp7+J+7kMgPKwrxOQ3Pje2Dxdjl6kQmyAzI6xFW5Ciocn8CQpofAFfA/1Ep9TtKqf9SKTUBHjjnXsj3uBfA/T/GZ319fH18fXx9fH18fXx9fH38//zxxynhJcCfBf5T59xvKqX+C6Rc98c6lFL/CfCfAOSjBaqF7JV8rftyTuNRjEC1WOXw8r4le2/Le+e3AHz4+JJnkxO2t+MIY+pG4Npk09efbYonFfdQuU3FPiHZw+xjee/67pzbdyfcO9+Qvy3koZ2eipTBTrJpAF0n2OsJOzNhky4A+Gx2n3ReU4zq3tPHZzltawRxgmiLkK/7uryppLW7OlOxfKU7yJYutmQCUfcmSvPTIyRHhHski9eG2O1UnfoukUHLp/JZs2pUNHz9glMSbVkUBx6NBB7/YHrD7mHGTTXhxUayp9u7Ce42J12riIBYB6oQtCtwYUJWKeJm/lwbTV3mmI0h990sTjsOjzrUaY3x59zuUsxtwuhORW5YsEQJ5w/COWimQsRvpj0a00ygvG9R4951uKlNdEiX7+2zlSj8h5X58lpHCsbrNoUOEutNbzsgunwfQ73hPJ1XCRR5BF8K7qQxVu6divfTZsL9CZ05AHf7EXWV4jwRtzkRO51hqUU30M46pllvef/R9gE/vHrA+nqCuRPoMd0pRtFk1F9aNfhT+tJyLXMzKS2mDBC+i4TTOIbKm2MXgOqJ+O1YynthviV74a9lG8i8ddBhP+LuYYLK7CCzVD0aEcthCl0ZSUD9sCRb1Wfbg4xbB3L+a114wy63LOv48yef8252zW/MvgHAP8i/zfrlTGxLfKYfkFa5Yf5cOilnN4eMZuZL7h7ZNU1/LvhzTQ7EcnhD4TNdReLL4bmf38NmCRnj8Bl+bnVWzmGI8nT2uKyBIAzd1KJGLW0oK1VakL216cuOe6/75VFdEPI/T0r+/Q9/xF+fS3fiWFd8VD3iR9tHET253k+4W02o73JU1Ze+5dz9GAzQOV2riLKBjFV70uFSS+M7Z90QXQiHAVV5s3iPLqs7MaTWNbHzeHsyoZ1a0UfyR1J6XmYJqW8a6VK5n/tdwT/+VPwif819SDGqOZvsyRPvhWc1hyblUKfUnrgdGi6cA3cnC3V+E8SBnXSr4cvIW+GRhmdJt8LL3O5TXm0Fsj/UKbu7EeYuISv7dcsmfs9KetRFeE0DlEbJ7yYH4gP2VZIFQVpjKCKpAkn6tSpUEGR9k0Te/264txejHX/jQubH9l7B767e4pPbC1ZL7w+5TYWraNUA+ZJz2S9HfDwT3uE8OTBPehNpkM68UdJQtUlfPlUGpfquu+MTH1BHwvI8eO34dwPapAZaamEjCeM0GJuvgpL+JEp4wFPgqXPuN/3//+9IAPVKKfXIOfdCKfUIuPyqNzvn/g7wdwCmp285lxA1GvI7yJcDyM4f+Z3icDPnh2/LTRrdFyLheHGgHsnm0LWabpegml7QMrq4u74zpJlb2okohIfyS3GtaMoRrx6nmIXAk+qsYpelmF0/kt3YipZTq2I7c3qXoK4TKjumHpRUQklkGrpwdo7kIJtQKEPVc0W9EKHOsGjmd16duHP95Eg0LpaZ/GuhzNT1HTxdjpDS90TOl017sm8k1zWyoZVnmn0uY1q2E7Kl5jKB3/eie6OLPRezHfO85HwspT7r4LYysE6jYjkRsu27/5wCWyoyq+GuH8NocuxFM+uLjmRe45yi852N4xeG/FZavENp6PUaOPgSQdcrPIMv3yohFatrCQ5VC6mS30m3fZCQHMRXT9deqC8TcRDV2KOHH6t6UVOk3NaNDG0x6Dxqgg5ZX+IiaEqlStS9B8bBTsnnEPSTjKJLBZqfFDIHJ3nNcj+i9cE39N2ayhF5JaYCmxqerU54sZaNbvliTvEyYXLo73sQlHTD1uZaulx103OqdANJJcGTqUIn2Gtj4o927GhnHeWDUD5R2HlDOmqijIFap748NyhPJEArxP6gqdQVnqiriTIHIKUX1Ry3qWcbCZQCSfuI+/jaohrK3QDlIeO6mfIrk4/4CxdS7vxrs4/4jXe/yY82D3ixk/Hbljl1LQrQ1j9L7pCQrM0RWddmjvpUyqpxo+r6jsHi2o/LqyRuaj1PxXnl8b7EJV9EVNOXz/MJ4HAjdYNNLgThGvS04d7ZhuVI6AT7eSESA6lFVUHNWqEah7GDbthFx7ceXvFX5x/zYSadYfdMzV/IL9nP/hUbX///pLnHr62/xa+/eJ+V96Nz1ntFWvm39vdTmw6loOk0pZdQcK3m4v6aX7x4wZNiCcBZsiNVHaVLuG4kyPhke8FH1/fZvpzGS1aNosu0X2d8gFhKBqlsf99tKgGw4TiYdr4a323lWrLrhK4e8SqZHwd9PrgMe3vaesmUiYv8y9GlBBzVmaI+8QlTKwlkunUDE3QnGn+lYe+J4MHAW7UiOwHQzTtm97Y8nq8pPP9312bSsPByTOKlSNqJxSWu936Vb8EpKfOFLmOcQmcSKMeqkw+Yh2tZDBj+sMpUkATwz5JFcS8RBOCvjD/m35n+AV/eP+fTSgpOL+s5m7agsSbKE6zrgqvdlEOdRs2nq3pKrlsMFut/L/x+oi2JJwAnIZlswdmQdBODpWGcEJ+vQH0JAT2DNSuswda9EbO/ccSxCYHWz3rDHyOAcs69VEp9qZT6lnPuI+BvAn/g//zHwN/2f//dn/ltKD8xfbR50F4rRsWAx3nBq3QL8x/Li/XLOatziyu6fgCt6HyoQeSrWhfJjaELL3+0ZzHdU9Ypy0t5WLOXCdlaMfkiofYbVbPoUJMW5o7E2x3cm+25N96RqI6NFwS73EzZrEewSaNGSHLwD2A1CBgKRTNRtCMxNAXfkps4Mb7chffKJLCJgmIQeDjnxQj9uOiATPVCku3Y0RXCT8jW8h3prq+DD1vmlYP6xMQHRHWKdK1Idw733LeV5nOuZnOenlrc3HeQJRYaUYAPvLKkdEd2DuARCM+z6Nu7weZQn1jcqXxemrc0ZYK5zphcynVkK/m8LpPMEeRzQ3t94ITktSPbyThEPaaUyIMZOo83E0U77rkmunGYskPXXR8YaOQBU6rvtgguwgPyPkbMievpQDvJq4vbVMVzDh06uoV0Z+N560aB0X6ee1QqF6V1m/QmnIFwvbsZx8ApaIepbhDw1IL+HQ5ZFPfLbkSk1enerDsEOySuF4ysTVyIQqAdFMZ1orA+udFKCxVswAHQrTQrtHNg1ougPj5b84tnL1ikkj1s2oLLcspdNeZu7zf2MiO1Cmd1dGToEtcHCUmAfcBmmmRrIp8ELcnCkN8RuGt/WAYaA567jF979U2mpuI/mP0eAN/OrnhnccdyPuJZewrA0/qMVTuisknkwlxWMz65u+D2btKbhAPdXDhZtAH1kQ4wp3vyerqWzW24aFsDdhySm/BescYZSlyozkrQ/bpFUJAx8f91GibTkr/28JPY7VTZlL3N+OJwxr98+RYA69uJbA6tjgM4vb/j0XjNDw+P+b29SGGcmANvZTcszL5X6kYxNRWzvI76WG2dyjzxxsrWPywmgSJvGOc1qUcUdnXKg+mGb4yv+OXRFwDcNxtmumamu5j4/8HslP9H8Wf5b7sPxcoIsFaJPpNVNAcfnJfChTNVHwx2oftxYOslCadjMi1xExnHrZuS3hkvfOnniUc1h5zAsH65ZNBdNwjOY5PMUG/IBfTQr1m1jlY6Sdph5zXqzPLoTND+Xzx7wfcmz7iXrGNA8bw55R8nH/IH9gGNV5Yv5hVF1mCdYrOSZ6nbprResDRqG6YIKqwgCebrIEGUPX6OX2/VD4H5sCHDm2Nwcxjz360FwXs+WvAgWZGpju+ORL3+W8ULOlS8BpAOw9tuyqeHe1xWsue21rC3GanqSIedUYBSjsR3IqReS9Fa1e/1XxH5BG7eV4mJumgWDKEj4kgGxFre0MIKp+RcL10wbAX8Q44/bhfefwr8X30H3qfA/wq5P/+VUup/A3wB/M/+OB/UFQ79RBba+i1HjWiSTL0fU2Y61mXO3e0U80oQimSryG9lA4oKo+44Kwck2vaQsvXt5tO85tFkzenpgcOFZAQfP7rH9ZcLihdJlEow+4R6oekmHS6ikznjtOHJZMmHM+ngO7u/o7QpN80kTo5AIj/UKXXIvII0vZeqB7C7hGSZkN8NtEx8CcoZLd55SICgHGhlexTDB0SqGwRVKXSLljIx6DZskJB4NKsvCcpDX89cJLeygG1RkKxM7yBfysKfLTXOyNi3BX3b9QANC9F+QATasaI69SKaYTM0jrRoGeUtjReGq+8K8kvjdXLk18pzRTt1dIUb+IaJEnR2R8zugn1P8OEDUb21iQRU5cKTfwv54xQxgJIFVdSZYyesJ+1qMxBjdWGx7B8cazTVieJwXw3KO4Zm5mhmVlTnQYL5SpEtNaMrTRGQwkKLFpeCLg8EZUUzBlvYuE5M04ps1nE5nWGzJM4FOa/B4mYExZxOStFQQkA/qxzniy33J1KWniYV2zbn5XbGza1EVfVOBkYPUBUhrTqSXGF8h2cISFXX79g2UaQbhWoTbBpKGvDKKU7ykulcBvvd4obvTZ5hsKw6eZg+L8/5aHWfV+tZbLRwoS1ZO4yfM7n39jvsMirffNH4hgvdib4SiChlQKgiEd/hy5V9UpVsNc8/P+f/tPxL/MML8bj7YHbDW8UdZ8mO0teGNl1BZRPmSe9h9osTxy/O5vzz2Xt8fCmliHKbgVW4xMbs1jpN57uvQum1nmsROgylODwimPtysJ+XxbUIyKrO9PfDI1FDVWjlP8C9lhVbq0lVxweZFACeJEtSZfl+8YSrUu75p60hT1usU9HvbZw1/HR9zm98+R6lb/CgUXKuqUX5OZ3mLcZYmjqh9UiOWSUke+8gYaDLZQzbImM97tjkFuURBecUbWfY1t/mH7jvyL1LWmZpyTem1/zpiQRVD5Ml8+RA12mMR2R+8eFL3h3fMjZ19ML7dHvBpzfn7O5GtEEZ3Q9JVSmyW18p2IPNLG+drPgr558CcPn2jN+9eYvn14so3aBq2aidcV9ZxgmitPWJDxZTJx6sCEIqnal9QBxcMHQLeSbX8eRkxTwruch2nGfybOaqZdWO+aI6Z9PK+G/bjGU1ip2Scn8VWltyY9mnXovQ9d8XkfjkGHkNh3o9UHhdSwz64GmoUwegYbUf8Y/LDwDY776LUo68aDjxTVXTtCb1wc84kUQwSGl8OLpk7jeXZ+WC23pMrrtI6q87E/Xhjs759YDJCto7FNcNHYhDiyv1R6FMw3EIXXivSxaAJNJHY/OvX8LDOfe7wJ//ih/9zT/O+8OhnMMlcO47wd47ueUs2zPSNaep6GNcJBsK3XDbTvmnt8JZ+IOXD6nWeTQfBZm8ugbtej6R01IS0U2f2S5vp/xgn3MyLbnnN5a353dk77e8GJ3SPZc3Z2vF+KXGGo1N5eGqioLPxnN+urjHyamc8zfOrnk8WjE1FaOx3M37+YbleMS2ySNStSoLtoecwz5HL+XzijvpwtNt38LfzZCFcdVD/cZfyxFkGSFJjvSLSCxctOy0f7gKHbOzqM/iF+3mUc13Hojo03vTWzSOZTPihRcsfX53wm5ZoDcmIiDi2ycTOHQ72kSCH10TH9iu8GWY1GJGXpLCLyCHXdZzCe4MupYusvrUZ64nDXlRk2kXA622SSgnKW3RC3jaXIKfdCPjCJ6DM1KUF476odyPYlFijBXfJiXjomtNlymSUg+MiP25W30EDb9uJGwzTbVQHN5u0P7alIYkbcnoORO20bhcUZMIFyUEb0pjgs5RMLlNvdWLFgFDgH2bcZKWnC+2vDz1EhmdeOGpYOni70mzaHnnZMmHM9k4T945cJFsWJg9pZPPe1qf8burt6Q7NCgbF44GvG7ZcBEOQY1PKBoJZo8sOSYyr4obou6YM7Bbjfjo6h1+OBILmWTWsJjvuTfZRg+z4HeptcWFJKPy457auBIZbRnnNaOsYeM7r5pKPMO6RuO038SrAf8uBFDW3zt9vIqqVtNejvjiC5FF+cI9kWDnNUQbwMwaHpxLAPXtxSUP8jVvjZdcTeW9L5cF+YsEU6r4fKlOSsT7x47igawTJ5ODnLPV7HxXb7XNRWG+6ykBTSlcTtPoN7SEjq7BOnGaGGxwTivKQ8a/uH2XH2+lpHKe7/jAC7u1fiNuG3FJEKBVrnO9GdNtUpKloQgUiLpPIHqhWvljtBMPPyT4M5XP/IeJmlKg9BEq0Mxg/Y5h3c0ovGSMqeGZge/Pvsnffe+XAPh3v/Fj6WobaAJd5DveyW8Z64rGB+wX6ZZEdfzIPuDgW+nxaJgFurFHgnyAUZimR76mG35lfsYPHz7hlfdf2ra+S1fZ2AleWcO+zbg+TLlcSxBaHdK+ZOi5lW6boDqNcupoTZaOObn3AFo5EmVZNiM+3YqMwcvNjM12JCbVgQoSNA7XCamvULSrhNtpjh63YgSOyCcUVyp2QIOgndnOkuxtFFBWQUvwK4KK4RqnNB7d7AMK5SSoNNpS+fVNPStEWkbBbXoCwHWwC7K9lttvzS35wz3fefCSdyaCjHZOcV1O6ayOQfyhTdnVGU1r4lztOi2OHv6PDHjg8PUyFdqLiJqh91/jpBRed4OOdtuLaL6OwnX2zdfca0Fke4yWvX78fJXIHWCFbwDwuTrlM3dG02kyX/s8LQ58c3bFt8Yv+ZsX4vP17dkrPt1f8Hx7wtILqpWHjGqXotdJbIXVXVADHkgC3KWoXca2mXI3OwPA3qtZnO64uLdmWfgSw+WI4tKQ7HvUApSo6x4Mq61MmH/55Qm/nUigoL0onTKy6thG4w69sGG6Ucx3PU8IwmYvukcgC5XYavSQvTOeVD5opQ8tm24A7esG1DbBTluU53IdZj1MHx5IpRy2TEjyLi4SGsc8OfA4X/Kn5l8CUN1PWbUjbusJz/Zyvc/XczY3E8xt2hNugw9h02cENpENWS0T3EqmVaXktbQk1lm6kaN6u+H0YsMjjzpWnWFzKNhvc6xv+VatLMQucXgQg2bRoScNTWM4eHVh1Shs0ZGeljxeyK4+yyqcU1ynDctbT6jeaEGmDqpXbW8lY1GZHkDxPWIcjZKVBI/prBbHeMTNvtxncJNHorRqiCja0IQ3WpAMAuLo5aQclSevvtjNqUcJ47QhPfHfozMJCAbZpaoMatpSJA1j3z8/NSV7m/O0PuOzvSzSP1leiD7QKiUZkFd1I2XfMM+TvaB8ryuRB2PSHrETBXbdqJ672Eowk1/r3sewTGizMV+OLvhkIb/XvFVzfrERsqgeTmoB+8KmWTcJdZOI9EXlUbh1JkRV18+3fvz6cYnHAK7vcmDS4kqDlriIfOmRbNUHLbERJUu4G8ua8GuLB7iHJY/urSg86diMW1Sbkt+6Xk3cyeJdXvRWTkXSMklrWqtpPQJ96AqSu0Ta/APpux5cT1RZVoMNLZzfm+rRKLCd4vOrU9pLEfpNtpp/cNoxe7Sh8fOqvSlQgbMXSowHRVH31y33nLgpR5Q7cM6aoS3SYKgHczoEksmh5zJu3jaUmRWuVBAPXQpy4K7hsJEA5e+X32M0K6nuCiqvJ/RPeZ/fSt9GKxfH//54g3VatLkC6rM1YrWyJyL7unVUZ5rn2xP+2/V3AThJDmIe7BQTP/kT1VHZhNYadj6YKrsE6zS5aXuBx2BTVLToYEUUgoeuHzfTyNgle8XuRhauH+4ybCl2Vml4RvYw9hylqLhfiLG6rlXkm3ZexqAbGewkQNDyfdm6R+xN5QRFLntXheAk8JVSBkOBZocEGNYOysjys2lRxb35zkxISoWuvuIzB/93iaF+PuN3H0747H0JoO5Pt2zrnHWZR4TNWkXbamxncLFqJkG0cyoGjCFAFm0vf73B3qlysfHFlH3wFMjfqusFMyMCZS1RJPM1f9IjVAqg+zcpgFKyIe68JH31kznjF4pk7wjc4ee54qfn7/H/fL/kF94WcuM3Z9c8KZacpAd2Mwm+1k3B1X7CZTGnexkELUUQDQ3Wc6DsuKNrE7I1FDc+c/i84HBWUD5uyE8lKsge7jnMMtQ+iR1to4s9v3jvimla8dlagq9nL05JLjPSbRJvZtDBCDYX4XBaLGXKC/l/c2Jh3pDkLc4jAm6dwUFhTa9Y7JSOBqOBIDcUe4zWCSnoUqP2WTxnl8gYC+TpF/hGkR+guqe4PJEF69V2xu1yQpJ23DuRwOOd2R2PihW/MHnFt70A2pfzM74/ecwX2RldKKnuEUTEjzXgtV6ccLwCSlgBTsiY9kIWrIuLDY+naxLd8dQbdV5enmAuM4pljypge4J4P6BGuo0yjx4AbiTf0Wwznq/P/XuVCM9lXVSLDtB6UK8FeQhl0R88XIMFJywmWiuSXUp5XbDyWW+y0YxvFJOXlmzTwzT1THM410cmvMEVvcsGfKkUz/dTcUFZHwqWuxHOKbEPAdTBkwIHgYKuNJ1JuCkn/HbzDgCfXp9zuBxjdmbA+1KMovm2/7sbdN35sU5KR7K3JIdOBPX8+Efldt3f62Yu52L8PVbOUZ06upFDBb/IgyzuxZ2j9WbO2zrnRjum8wOJtxNpMi1l2dTG19rG0GwyLyYo7x1dCsdPjIuHUKHc1yFyE641dm6OLIvTHXVrONQy95O9JEoyH1x8jzOg2l5xP18p7PMRlw8K2kcyqErLtdqsL/vGUkIN1XOPcj2dELg1Ac2dLpUvPYJyYRNWvtTcK0jr2vbIwXAuRiuncC+l9FkUDTufZYyuFNk6YdvMcZ6Yny012VqMjHvUwtHMBbltpv2Gka6Fz3XUQKB8kB21q4h8R2fo9Z1ySf6GXWRdAZOTA4m2bG5O/XgJl1B1LgYK7fOMWmdM7gZE9yyj9XZfPkfmxdxRnwtyGMprphI0Jl86ijsbz3tba+42Y/6eD6Daq5FoAibuaK7oVpoWoo6R856eJx2p7whcfCkB/P5RIt9PL/YrbgQedV82tJOEw72MduSDrkaoItl64HcYkD5N7DCU7z8OFNwYcCKeGcVXvUCrcpK0gQRuwamif4ZdHxT8EZWoKCppbfQENY1DNxqjXKzc3JzPqNc52ZpoWRSbOY4SPCduGxvNupZ9c/t2LvZBh4ymDGauKpbnhkRGNxwgZP/S3h9zqDlmakdysFFUWVcduun6Eh30gVPb9YHRoCzX84QHwdMfgzwejp+7F143tWQTeYrbVSLQ3IA9n5QO8wKyZcFPv5TN4aNHj5ie7cmStjfrtZq6SbCVIfMdGtk6QMsuwonpvKJJLWWXkXmUId1KV0WyS6nOfevqohMPp2nDyJ/fN+9d82+dfcqj9I5XXtTzX0zf46PT+2w3RfSGolP9BAieU0VLPmqYFDXT3PO7dMeuybjdjmm8oFq21Ji9V2POQhlFFiGT9wiUqPtKhhAsS5rTDrQju0rIXw2CFojS/iAPo3JwM9URgbrdjMh/NCJbwaqQjeW3zh7QXLScPVzx/uIWIHaIaNNLrwWht8CBAOgWDu5VLOb7CF3XTYIxlnvjA2cjKdFqHMtqxIu7Oc0rCaTza0O6lXM/MtttlBCnwxjUCilw9l6HLiES+E0keDvasaI87wVBQ6eMqa3wkeKYOkzZxoUDT9QPKABI3JovHe7Lvu1djI/FhicOjPb8rK07CvyskXJslyuRAEDutU2QQDEaaYrnWHdISK4kgMrver5SREta2L1tWB5G1F7FvPlkxvxFP1/kF+X+DLsWdYs3zO7FH03trX9aiwrwv1f0D58DviR8WtOeOdbTPJ5Ld9ZiipaDlotzSpPuBeUaCoW60lBmae9q30lZ0rZ9Z6n8Im8cqoOk6ROK0KYe3Qj8eYpav40lHJRjMT4wyyqe+uaQu+yEZG28BVO/aVrjQPcdsulW5E+KK0UphkHY1EXEO56bD75s0neLjV5KsDRUIje17dHogU2Q/MxF7pOuu+MyAn2gNzx0B0pb8qRjM/KNOd7VQHVKIGA8etSG50veW50odk8syTs73vIJ1DwvqbqEQ5NGwcO6Naw3Yw5XBcW1/+yhhY7uG3Zs6gPGg4oIW3Vh+ebJivujDb/vSzerJ+OohB5UvV0qiVegYYT7GcjdsWu5VZgq8Qr9xO8F/0z5tdE0MvZJ2lEePH3g2jB+IdZXPROfKPAcx1hBeaoo654+kG0EiU32imbuL96jakfyH1WLzvQxx7BS0XA40CBCg5HNiebE1ojafLJR8ZlrC4ctHLrshUNFhVyCwJ5A7aL3Yly3wjUqFZbNY3TFz0HnIDjzBukV1TnMQdFYzYORZBSHJylfZqeUy4xk03cJx+cvrNONV6W3xM738nrEdmEEVfL7pqp03PtfLzMOpXok6RV3keg36jmaYV+U16zncfXI0pFdS5Q2CFn/66bDfvwGaxb6+Fl//fgK2tzXx9fH18fXx9fH18fXx9fH18cfdfz8OVCZ5cSLJV2fphy2Gdq7rcvvqMitCYKW6SajyzL2A3KlcAYcRdcLVWYeonYG3FhC8z//9pecZXuefrDgk1sp8awuJ6S3Ceka8pvwHYnIAuRwmMqw/Ki9z7oquD/eUHiRkMI0fOfeK+oz0/OJlItEwdCiqXHR32/tieVfbE55cXWCflYwve15TNYcl/4i70kPyIkeFXFKxVLf6GLPfFzySp2SbIN4oozBUGJAt9KmbgvLSS4QyjbL6RL5mfGE7HQD9kVK9ckFv3ciY9XObCzLBWKjqQTF0G0vO4CG0bjm8XzN1GP9GhmX2hou99Kx+Ox6QXdVkN9pJnviGGDlnoYSTShxseu5F10u5aH8zh0hVc5Am6ue5J4qT4DuyyzhGGr69CKOCjUw+UWFkmn/Pt1Csu11V8QAWngwscSlBG2KXboBuUkkq26L3sndJnK9yrvVA4zzBmMs26E3Fr3oZeTmKdgaOBvtWdcyGYIcTFcQSzLNqUUtRPS19UjV4dWIyRfBqmMAnfl/mtD1F/lfvR2E3CfF+b0N4wd38fwW+YF7xZbdu3IDnm4XLA8FuzKNPn+uU5jE4azuyaEeuneeOApi9OtmijZxdNqja1NDKG0M9Xu6lqPu0GAYqlxfKknuEl6tZuRnLd+9EEpAeXpDbZMjmw+tHJnuSHTH3nNhXm2n3N3MYJO8hoL2CCj08609byICXZepILWVi2lqO1LUU2+4O+w23TrhQMZ59BUigoY3EEEcpGnHt84ueX9xA8DlBzP2TUrWGpa3Uk5sqwybyrMVMv3ynkU/2XM+30VRyUV24EG+ZmQaxr7WZ1E8rxb84PwRz68XAHSbVIRHW69vNvIlrWlDljckxkai8Czp+PbJK35p/JR/e/FjQOxT9jZj0xWsfQfay3LOD14+ouqmWK+V5FIxMhf+VpiXcv7ZUh3ZtnS58pSJfoycgjzthRurUUGXiw1RpF94FBN6lDaUJ6Wb24/XuadZjFzkcgVE7w1uGnjepJ8LqaMrFF3u6E7liyenBxbjA0XS9uT1NuHVckb1fIzd+Hl2v2F8It2J5bVMOvVMPD3boufXSsVCozpz5Kn4OpIpJ/f6yfoyXudEq86Pc3KAm+WUF37POM335A9b9ucZZesrN62JXoLxXljNvkxpd2lvzVUr3DKTytCgGUY16s3x82XjCKA5z9scWi8Nu4TDZRhptnH0tjQOC8qIFtZQ+Pj1cfHyNe6I4OdQ7o/GmH7+AZRyfO/8JQD3H39M9UsJU1NR+JFpnOGqnvHF7pRProQ8VF6PMF4XxkRYGlQlkF7YJNuRQqV+MviBmKUlvzz9kn9r/hNuL6RU9cW7Z3x/+ZiPn99Hef5UshOX9WQP3PquqhdTXqQTXqhHsU7dji1u0pGOm9hynZqOLOkw2sYFuaxTDlVKtctiF1620ox3xCAPoJ7Lg5auB+TmJpDkbNQNcn5zVqbf2LV2PJ6uUG85XnYS8HSjvoMuLghGYVNHelpxWkjUMkkrPv5TjuVqhAveZFshYupKxXbxZGui31kskXUuCs+Fcli6UuxeTfjhPot8Fu276rplFrkE2VZFKLs8l/e2J0IOT7Iudu6lpmNfZuxejTFBVO5+JcrON2kkbutONrPqokOfyaKfFzXGWGyV0nw59mOqabxjexIVuJ0/F42uA6lfvRE8oaA8U9RzIhnbGR11fnqxU9/WPFJeA6pf9Lu09/IL9yYSQH0A9WC6IdMtP1XnLEOLtjI0U9912u8FtPOWSVpxkksyUn4nRX3X8fZ8zXsTKb8+zpeMTcW2K/jpXp6lf5G/zb4+wRkdZQCCYaypTa81NVD4DvfYlA7zKuPKzRnNJJpLE2lLnmcHHhUrAN4e39FaTYem9ToX6zbnxf6EV5spu60v9aUWnEIZS+YbMh6drDnJDpRdyuVOntf1g4Kyk+aItpRnSd+kFJfiJRj5Xa0Ycw87jMavFAc348ePcpb35fxO8pJJWjHPS+apTOqRrhmZhlR1UQMpvejYvF3wojzh0408X5+/OKercglyhvy/BCbne947k7FfPyl4tZzRNqbfoL3oZJK2MbDs7nJGzwzKaayRRSY5mDdKdrFF+zUhzarMsE7x/kQCqF+eP2PfiQ7Ub9ei71QeDCSOuhtM6qLD1gnPvzgXr0TgJ7klPymZjSsWI+/9OdqQ65aL0ZbdXMb+rpGgUrVIYBdkKTrP5zM2yms0neaL3Sm5bkn9Qt04Q+WJnOG1t0d3PJ+e8Gw6Jj+T7/7FRy+4X2wZ6ToGGeu24Mer+3z2xT2aS9+RWfrg2oqcCki5y6WOxajkWw+lC+/p+YLP7k7Z7oq+k86B0g6lHdoHvybpMEakA4IETQtkSYdyimYj87d7ldNMFWWtscYLPOcalyi6EXRn8sAuznaM85pZVpH5ln/rFNb1nXog8g7TccntOKfzDRQqsUyKmklWc+nXieowQ91o9KB7tm3ANNp3cPbyDj2R3H+Jb+cfBi2hWUGoLyHBk3W6ucv5cfVQ3moVKrHoxEbBy7B2DRtBsqRluqhQpy4aWh9uR5iNwdXHZX0F8d4dvxgvzYMBKnIK5bWQ4BHFiZ1VkGhpUomPjyF62oX3Ol/W+wpe2LATUcze/wRkDP7EDgXUOrpUP8qWzHRJoRtSnxIYHGZsqU8Mn1/Iov/9zRM+XV2w3I+o/ALa1UY6a/Y61oZBUIdk39dZf+/mMYcu5UG+4TSR4OF+tubPn7VM04ofjcVgdHc1xqwTX7sNKIhwWUylSJby+ZPSCGSks0jmbg00/uV4qRayVozlo45ODvXc0ZxZtO+aG09Kqirl8Gw8iHadKG771nuQCW1qfTTxd3cjXozmzLMK3pUFdHO/iK3AQR4/99yPeVYx8cSBJ8WedyZ3WKc4+F39rh5xV41Zl0VsvS53GaxTsltDFhKbYDDcDGworIg5qsukR0ocjDy5NgSg1cJRX3Tk5wfuz4VBOk6bHrkboHrrouCVEmIxwLceXnGW79m2edxcO6s5G+35cH7F24VsXmNds7cZv799xD8tRQSu2RbUXugwiM9FRe5aRfNZN3hoA9+mzTX7h472rCW5863YpYqkxhhAmSDgGQIefy0OyWgHfB0L8emrgwyCU5zne/SZ4wu/QDWPjLT+O3XU6vtwXHKaHZgkMtgPn6yZmoqxqckHkdZdO+Hp4ZTPtkLmbFuDzQVpPVJjbsF1w8RMeXHH40A8W0G6zlE+Na8zeHre8XRxGrWcsrxhnDfkvhMNJGAPgUkUpfTini4lyldYpzjNDqR6G1Hfm2yCc4rctOwamZcvkhPaXdFvnn7O2EShTT8vTekorhTdJmf1mbT632ZOFPtHFpf7DSDvSLKWomhYjCSoejxd8fbojm+Mr5j5QGtXZ9xeZSRb1RMg/Ph1neZeIXyiv3D6OfahorQpt7UgQa/KGZs6p+4Mpe+QWzlF7a8jfKBNXN+5Ge5G5y1gOhe7rGya0O0TPr67xxebU0BUnadZxTipyfxzX2oGWjo+qL9LydaadN1n9coZbJJSJjOeeYTtpwtLt2hJRj3/1JVGzJiDTEfgWgGl5AAAJ/RJREFUgt4arIXSgc+16MaOHzwp+HF2P3alqb2R9SOzJH4d/ODBtSD50yZuyuCRfN1y4jsenuR3pMqyLnNuESmC9mDQB+2bIvw5en7hvkkZeTTtW7NXvDO+49ClMYCzTvkKQv+drTXU1kSUMhyJtrRW88z/f7PO6HKoZwPEUPVG7qELOqhs3x7GbA/y3JTbHLc36Hqg+VRY9LSRJDEkaq9yrmrN5kQENQH2Jy3dNhMe6gCRMXXPxQpHeIaPGmNCg8hQaNm3+9ssJG4pbSF7ob6WPXfyVGNKQd2jUX1KRPvCGOzGjuXCkpyXjHwXSzuvaLoCs9NxDjrjZCtlGOD1f/fd0MQGrWHXbOgSjl2Hre277sIxsGwZmgS/gUK9rguFjMmflJDmn9ih94Yf34ko3dVhKi7YXR95pKbjvNjxsFjHgOf98Q2L9MCyGVF2vWbOts652kzY3/oH86ClO8iq2An28sszXv70HOUUyrtKn5zsOZ/sSXXHvZkseKOsobyf0LayYQHMi5px2lB1hpuVLILb24J0LR1CR114vvOnF5Z02NzbwUw9bHty4J35hrcmy+gJtG0zPl1d8GyTUR/k2mwiZOMuO4YPBYLv0ZH0MuWlO2Vzdohq1tOiz3KGzteHOuV2MM536YjWGQrTxI3qJC05SUuSeReVZVdNwcvdnBc3J2y99YrZ+y6zuh+DZi6q16rtVdZ1A00C7dTCQr5jOj/weLJnktbxvq/rnF2VUQ6UtZWWjKZrdHwyl+WIs3zPe5MbPpiK1k2qOnLdMtZ1VJB+Wp9SWWlFNoOWY2UlKA5BaaJDoNQ/mNoFlfU+i7FTgdJPLzbcJVKK3Hcp6r60HdvCL1qJRSVOylW36ZHcRFApf91+RHVQe9Xhz29PqbqE03zPaSEbhkXFUnBANw9tSt0Znu1PIsnfoqg7w7bO2XiZkMMho9mlInsQvMOsdMdkq97mJj2IMGmyFzI5DBZaBKGT+ZfQ5VLqnbwIZHPYLw0uMVENXzcjbAK7QnE38/PvSUdycZB76TcNs5fSoc0UtZIx+NItuNuPmOY1h0aeh9u7CXaXooqOdCTzyFkV0aeon1ZJaTXZdZEMK4iiEG+Dz2IITpxWEfVxOsEmOTaDlVe5vzp5xG8+anj87g3fXoje1sPphuvzOW2ZYXziFjSR1jcjfq94BMCLyZxpWkkiUAtiESRB1DY5Co6S0ndGBqSvDpYvfSCoGyvdeQNPvC5V0CmuX5xQfOlL+Ft4fuHgg11c+83GSDOCL4PIOTuyrT0qwyvnx0UR5277StEVGTbNjgRLdUv01otI6kG66oJPIcD2kWE1zWmcYuadJfK7IOthqE5l7f74m09g1sA65XArc+G370ZgHDqxFGM58XfP7lhkB0ZpS+p1+NrU0iUJujWkno6QLcVC6+puxm+q9+T3rEYph/GUC5A1MiBBQW4iaBXlaUvpNdpWqzGu1aTjXnxJNdI1nK0c+cY/S9sOm8kaqPx13GxOMTtNupYEBGC875t8YtfhNOHwUBDL/NZfx52iu8moFynL+yHSFW0/5Xo0Pd1bkm1Hsm+PymFRZXyAQMXjSDASVNcrw+vaiabWtMUq303YQr52hO7c/nM4+r810Iw1h/sT1u97Av+iJDmpaVUW7dKOGsj8+/UQJR0EUKHSpF4nyA+O6N3Z2jeV17+qbPcVUgxH3nevSxp8xfFzD6BQcHMn6MHd0wuKa/WGr9XLDH53AtV9mWHTh1vuz7bkpu07vKyh6gzW6ghB5ze+PbmG+tR/XK3JLw35HaC8FL4Z8bw4pz7xytmIgF6WNxRZw2RgrXGv2GKUY30ii+Dy4YhtnVO2SdzQQmamlIu1/1RbycCTOnKCcv/EHLqU57sTGYNyxGo7QlWmFwlr+3pvXFQbR7qVLMF5N0tlIblNKe9SYnNP10fsRyWaGjYPHTePZOzdLmH0NJEF1Geb9YnFnrRMFgcupoIOzfOScVozmx6485lmZ1PUfpDpId2V00dbpkV11CmZJy2jIQ+hTdhWOc9uTmhv5IuzG026VYwGXUI28ec1eDjvnt3jN07OceN+I9VG0JmmTMCrCydbLeKhZzWsffZ0KW3cyknXDIjdSrLvMGWHaryml4Mg4R/Kp/V8xOxsx1959Bm7+x6t+8aYe8WWt4q7WIYA2NuMH20e8IMXj9hOPGfBC5HquheCwwmyqVoVDbL39YRPVyPyaUXtkVbWqfAEbI8eYKGbWS5PK1pvcZE/zchWg/sPTJz87uviq0npyDYuWj6Y0pLsWulGrIcQmRztqZcdWQh6qpyJQWhESsKGimzO6R7U0jK6kt/L1obtOxPa07ZHQQJHzSlCot8eRqyaMXe5jVIZo2eG0SvJjstz3w03k264xOtXgb+uVYuuOuqFR1AvJJmR9vxwRb4NvHOxZInzY3QAfKA1fgntFwl3Tx/yjz5cyGvTCue5P8E7LduIxEKyMqw80lfeXmAq+cyAVOsGzjYiHxFym67w6O3akq9kHiWHFtX6xTxsGFEpWjqN5BgLxO1UFDYtbhyqU2ymo4iujZaK8UvpvAz8vDaXUnUz7rvYlFWRP9l3/4I6ONSmX4viKDrpfAvG3oEqoDuiro+yiHBopSN/UDrU5PfUjR+HzOCMIVsS74lNep2uLvdK5BcnVA8bRMYgRG4OEicyKqELr5ZO5m6f8GwplYzpZwnJlogGA0flrEDDVRqqhWL10EbO2/ypcKf2DzKqi3CfFOnWUSwt2cpTD7YNXZGQHEykQZhSk9850p17YwyHOlooaNeSIEddskIC9MkXitYHZO3Ir+m+mxYkgDKVRVftH4rAyD0boDDxRqr4e+G96bbFpoZsXFP5TtW2MNRTdYwEeeBgaHyPg2znMM9AvCJg/64iXZToaUPnX9MHjaqDXE9YEwb3ZrAGRYX3iLg5r1XXJ329llXfhXcUNL3RUTf4v3WhDnk8dj/j+LkGUMo6bG7jRM1WiuLWHU8iv2EW1zB5HkikC55PF9i0v0lBeE51MPaEu+JWJmmXKraDSRlMb2MGVUsWPboEZ3xb/DihHY/YTxwrT8J9NjsjHdfkeRvLYWnSkZqORNujcpPx/9evsQk3Tc7VQdCr1X7EfpuLsOF22KcJxUaR++wk2TuyrSPddCSl39h9u70zCpd4i4tFJ4jCrSFfyntDWQoGsK2HcZuJpj3x3IOVYXTlSEoG8LPCpinNJOPViQR4X5xJmUO1vZllulOYg3Ciwpg2U01dJ1Smi5C18ZD35WbKNvBelhnJWpNuFF7ZQKQrKhf1kkCIua0PIsPDmmwVPDOYpuccBL7SULZBN3Kthwd5XACLWysEe++JB8iC00jwFGUMIAZQmoBOwPlkz6/Mf8zMC6DsbE6mOmpnuGqllHDXTmicIdGWLGup/AbmGtE7cp2CcG+8+J4x4IKOTqvgoKlqTbKSgR2/lEV6SH5Hwe6xYZ+m6G34PSeb6PD5V3hiPhB871oXhQ6PCPZGedRmsCT4hTUgUM2JxZxWVG1Bugkoo6KZyDPbBmjeS0toHziCbGjZSuF0//niHalAOVzQ4GlF4NM2JrZ3hzXBlH1mnhykNTzdi5ccIEbRjaA01pP1y7drxosD+3UR7Ymy9WD8hzpZ/ntCg4Jw/xz5raJ5Ie/dj1OKG0E6Aok53TrKU01X2IFYJKJJpIbyJD35NT5zpUh1yAbon/XGRsmSIwf5cE/8Auo06MTiOn2UkQsqp7D0PEGRs1Cxo7ueibF5feKi7ZWuFelKHa0JNpU/piKiJ6YHYWgLRe1RRmeEg5Tsez5nMyUGLKGBwqmw6fafE9TXe9HhcB1y/jHwUAplU3QtwaK8V9FOlBcl7t/rNIJg+VJxtnQ+2O0bI76qjd5p4Zs2U3XkWSim7D0ia0op0+shkVn1FYKQKMSNf1ChcFrFRpIQwLZjRTMVi6LQKFSdytqdbnrl8azqk5TYsDJI/OJD94cFAEO/t3CEieHnlilbnM4YFzXWI3P1IpXx7wZl385THVzfoBAst5QTtBHAZgmlLWDURasvlylch1gIDYKvnufk/3Z+PWn7BDtI0MiY+98LpephEKSJgdHrDhPy2a7/Pfva71h1jEh9xfFzDaCcVpDZSNarFwJDxvIGRH0PU/XQfLoVTYrwQMUj8lVCoCD/twlYb3Y6O99RnmXs1xnaq4Sbg3h6mbJfDHQNeeUobvoPtkmG05k/L78gaEWV+YVlaHdg8DfBf1476BzwNz1vHUV3vHDUM0U78STtqs+kk4MlKbvo0A5hQ+wXrNmTNYnpuM1OUL60qTrxNQvnBUj50voHdYBUBY2g/v74RbmWABbwZG3t/Z36bFg3HGs0tZpqM+Wgp32200mWbg6wCNpLPiAaQqxho28mKm427UQ2flP12lbW+PtVDUT9rD3uVvSfZ1O/UA+ChH48/AvD9ySvdVt4noCcs+NmN+Z39u9GtOlZueDpbsHT2wXlyq92jRbj3tTCNqG49qr0HhVVg41AkBcn7u4Lv6BMuiguFzp42sJbGLSDgEd5PZmgUI7fHE8F0QgdYu3E0c47zLyOpM9qWTD6Io2IBAhSaVONLr6KvNz797nc8uRiyfhRzfpbEhAfmgTXGZrWsPaomT0k0Ch0rWMAK4u/Oy6XNqEE3BNLbeZoE3CZix6Dh8eO5kTKfeH8jA+gwnwJN7RxKao1NGM558npjj/98Bmjtxo+fVuI4K/WM6oyFR5lE5jg4b6rXhuq8Yihc3RBBbrz60bVz6MuU7RTcPNG9I2AdqzpdioGFSDBRNT+SfrnYXQZrs3PF0AZfazpEwyG6cdAOVDGUYxL1t8NdSAFo45iWlN5MdayFmNz3ar4vfWTmvfevuKD2U2kE1gUhy7l0AUhJikbl13Ks/Wc22tZePQqjd1TXW5RZ/Iwzuc9lSCg87lyvBM6f7+Xx5+FDsjABav2OfYmp8t7mkF4BoB4T1DO6031z3xy6Ltjwz0JQaPJLJ1PZOqTROaZI/JcXw+cQCgU9Vz4W20IVo1oWw3X0BgUJQMXiURjfSNKl4fgAZqp78LzVjNu1KGLFpN2sYEiNR1pZ9i9mpD6BMq9c6AY1Rz2OfZaxi+/lQQ0rNcAymqUTXGJjsmg6ryo5kA40gUu0OslLo9UR40khE+cpy3vPXkOQPkwZVUVHOo0uie0rembCPw9t53ClgnqoKPyum6guExoCxMD9ngfFP1aPAie1OtBlSOudzZRdLl028Xm1E6jVdcjaq9f21ccbphxGiN/COdgcXXzFe/qj/9BSniLuUAP5pe2NJ0+QtY6Kw9X2STiPwS4fYKqZDEeZuEgC3O0cvGEYGeINitn4wPz0zuq+wmH1i8obcK+ytjtM+zOdztUwp8yh4EwGETyWuj+MxUkO3dk1aFbD40PbnCY3DZREcLvMkU3lYU0tJp3c2l91uuE1i/6uVfjVdaQDDNLE3gd8v88bbk/2WLecqxPgyGlAScGlIHUmyQdSjkuiiq2LO+blN33MqztW8iDrL5tVb+xtBp9kAc2kK9FgPBYmA/nhUxLF82Jo+ec7jOqeiYk63biaCd+gRlZyDuUOe6EUcqxL1OCV5ROBS+2w40vlLUG1jom7cjzFt0ayi8n/r5pTCkLbe4Ry3Rr5QFIjjN4pwIk7QP4nWX/8YL/6uVfJFnL4Kcr4T+M9o5pyMidLJZtIXMo2/QBcfSniu3Tkgk30wTnOVTzc+nWqZqevGrf1bQQ7Q1A1oRx1vAgr2M78eH9lDTpGJkO4y9mlDaMkoZpUhEsi7+cL3huL1A2ic0XphRRTd2aPhmxIQDuVZuxjrJNmOclDydCNtHKUXd+MfW/WHUJTScl9rDQHqqMcpuh1mmcRy4ki5a42eh7JafzfTx3gFHSkHgT0rtKODNfXp7RflGQLVX/bJZOyM2to57La3VteLGfy2f4xWMxPlD7jf51I1Oj+xb8wJVprGZfSaax2Y442Jwu1/F7VStITj6pyX2benWWULbGz2P57CTpOB+VjNMmBhhX2wnb4gSnNK3nXqV7Q1JZH1T5+eJFEsXvyzdctA7bauanJfdmwZ0ciqTBOcXLVAKeZlKjtZM1wc+Nb8zXPBmvsE5xW4+P7l+4ryDiv5nu0HPHyJPSlycjyjLFWiXX5L1N7413zNOSSVJFuoJ12vMRFU0hn192aW8o6xfH5WjETT6muUhiVFMUDflr3c1tp9kdcg77lMOjkBCLGLEgX3L+6U7m1GhcMffehoe3Uuo2Ea+1wX1/3bxWa9lWx9pGo1z5bkNZpdReBFnZhGYuyO3w3jktCWBz7st6JxVZ3jJLW7JgCeTn1uvfXXeGwywLQDWzccXZ+ACTPXeeErBZjKjvMrJbjbsL19HvDSEQV1ZQmjcsgax7k2wdEah+DqQrxeXNPD7Do0zmbZ620d7FOhnLQXMoMNjHvYhpuc0wS29jdOTBiQQ4g6R76Bohv0hs6nndI9KpXkIhSDEc2baEYPE1OZuvVBv/isDyK2UgBscfLXLw9fH18fXx9fH18fXx9fH18fXxxvFzR6DUwcRuhzxpY7YXMozEwEg5VFHReXJiZ8WCpOn0EUG57TSHXU6zDHogHkGqiaa0L+7m3OYjEm1jtG+042QsQmbO+9TVbcK+SqmqtDcxDBmykkwPECf5Vom5YYD626BDMgjBlXCuXGpjKYLMYjKLSTrygY5G12kOaU7ly4XOaGyqsYkiz0PHgmSfQzHBzb6gSFpOipKLcZ+BgrTcBj5WZlovFNjGLsZZmkTbBzvIxvq2Xl+y7AyrquB2NWG79fW+VqEa4UQFBKrLhEgQOyv8ADoFLnXRm9DlHSq1KONiu3KiHVo7nwD4MfXfn4/6tuY8bRilbUTR4jzw8yPMpXD+VZvw9Fyut9rnmIPnSoQOLa283knPY+i90foau64tk2cKVBI713TtSYxtj7SBII6ZFkTSVP58gmP4gGipnMNpRbYxGG+LsJ/mzIqKx/N17IwcJ7Vkq7gB587SWtHSKT36E1quG2uoPCq1rTNWZQHM4vkFJCXwL+LhOO766vpsNQg8pivDqy/OeGVO+9pHMKz23VJh/FFONIECamYRNLMlenqlO9+hmELny1xtI8+1yfvsPzP9/Q6o1Gx6YDXP0LUZlDEUrnbia+bf0l6O+On1SDLc1ykQml7AVyElUePQuXxvXjSM8poibVmMhVwzyWtu8zGHfEx250tuO+F6tU3CuBBk63S2lzmpXOyGDQ0wmyqnCR1fncbmlnak+8xcC4E6kK1ByimmtnIZ/oKVBXWbcanmEW1W2pGmHaOsITG9NEgQOwzn8nIz4/PbU/brArzmWGxScMTU2qUWVXRkRYMO2k51QndIoNFYnXDpJSiuzQylLUli4zPrnBjGNnUStZei/5kijnUxEu22ZFTTeI0shQcKfPcciBZTPt/BXBAhgLJJOOxz6k0auYPuRrhATWNIffk1LSqsGxC4Bod16g0HIQWR+2qd4tAIAlb7c+4KI8iTG0hpeBL060KoTWNoGkPbCtpnW93vJWHfSCw677BlgvZ8083llN2oIM1bUq+vN5pWHJyiKzOsR3PtAM2JJTxHRJ+OyOOvoSpOgdL6Dc5UvnR0nxbsvEXT3vPK3GDtCHw26dz0a7z3RFVFhwnlyWlNk1jsNom6fsHcekhzCd2dYf2RD+RNzpzyN+grnmk6Ipcp/vio43CANA3+/ll8p686fu5Cmtm1YamEdLvUxA6lI1Fk4yC1mJGM6mhUM/ELWTisU7SdwTnFwQc3tlMkO4WpYfTUl/9epHS1R/aCD9HU0S1a0lnFydQL6aUN07yKbtAAuWnJTHdEDNdKyOLWKVrXa9e0Vh/BwnZwZ4fvr61hV2dxI6uqVAKH/097Zxsr21XW8d9/75nzcl+8Ld5Cm7aRahpjQ7QQghgSQhC1IKFiQgKJhEQS/QAJRI0W+SB88yUSP2g0KESivKQJNJAaKSVC+IK8FEtpvRQqNnBp5baUe2/vveecOTPz+GGtvWfPnJlzz0B79tyZ/y+ZzH5Ze63nWWu/PHvtZz1rODGBZz4pqwujQETlx5TZPrvB9y6s0d3c5WiOt9HNXd7llK7h7d1OPXHtYFAwGAiJ2j+mU6ahu0e6uxxbS33hR7vps9/RtR6XrsrGyG6HrZ01dra69PNNsdzss7nZq284QB6VmD9VVp8JhwXb2136Ox12L1b+ZUU9W3w13F6RfbQ2RrF6Lm0MeHo9xerp1g+HfFPrl3XQuypI4eaRnRSBlnTxdbMvUjOaeDUP3J455AoRlX9NIdbOJd+Leub0zZQwXeiNB0/9PX4UhkLDUdDOpr9edVOrHLJ7xQaP7RZcurrLkRzz5Wi3R7cc5Ej3owfiud4ml3a7bPUqP5cuu70O/a0OyqOTOheKFF8oqP0OButBZ5A+dXQvVp8oo3bkn+yyjkJUc0Ot/1CsP9Wh3Gp0r2vUFT/2uSnf8Krzd/e42D0etQ9Kavc0kqjcaU4SvM65c13OHj2WfMkgOXcOBEVQ5oeXgFAqpxiMyi93ki5VO20+XlIMkh9ac2Th5Gzy9Q03gsghFaKzyfZROPu8IeW1ye3gp09c5OhGj+2jawwuVvWcjMLdp9b4UX5xS4Ll/yo0x05BsTOaJw5g2A3WdpLjdnVerl0MOtvDPHBgdP5Ww8er86rYDY49WjL4v006+f1JQ+gfgR9dO2TwU9WQO1FeKupJWQF6w+QEfXx79GCqJ2sdc0UoGXa7DLsbtStCp4Buw0cllEdVZb/Pyc/hxSZsDEe+jFGM8q8dqDc32NlIfk/VebTbH434qt0gjgzhxC5rG7t0KqNRwcZmjx0F/XyTH5zvUPbE1lObfPdSbs/aj6rR8M2o+BMvwGOj9XpF/cJc2fOdi2n0Y/diGvQD0H06GwxPF3RzQOb4UQddSp/8Nxox8ppFVXWxc1W6VjvZzUBPFBSDLoM12Lo66Ts4nnwlm5G5y17jmqy+pU06t0Oa8q4cNxYrx/c0wrsy4kX3Ihx5nNq/S4Oo/U33xG3aU7HJcXzrudnovmaX7tFd2OjTz0FCx+I45pcqDdkTeqm6l0yGMUjzJhajAQ/DEg07e67ttJPaWBoFFx03qpojsJOAMVaH0zjcUXgR2Tk5V2BlsSY/5VG6PmhY1hfmdnczBYObMLSGnSDWGk5k/ZExVl+ERwM2cyNUwfV6UDzRgSc7nC2Tj8xT3SC6eRLi6sLsDik6QdkZRcg+sbnNczYvcaTTY4NRDB4Y78npDTr0hiXndzY4t5Us+IvnN+BCl2J71HsF6YIpGPezKnrJd6Y6UfvroqzetCs/mq2CYreE3hoXdDTXKVBEdqBunJTZCa96kIZymINB8n0G6JVwoRs8Wca4g3UZ6Eif7kYqWAp6211iqxz1MKyLjW4KWVD1lPSHRYpNtLXO1rlUB8X5DuWO6PYZTfg6HL3FVOUOO+kk71woiK2qx6gD/XV2BlDfhxqO4VWddvupHi6e2KhnpK9HajS/o1dvTsUoEF6UqduxeZMPpbhcW9dozLG16YxaUQd/G4y+9RfZkb6zPT7kXsP0YOhXvXOdQD9c4/wTa5yvH2DVAyfG709BMmyyzp08yGKteY1ErseGs2kl02ATepXTZ0cM1qrYQ2Pq1PVT5bd7PNLIqrpCY6zdxg9uPGSLYLiWHpCVvxNFkSYR1sjxPY20FTo/mj5Fg9GUPPXk1ZtDKIPdY8NavkE3T/mhUWyjKGD7qkBXN9qofvBDw4FiVK8TfRHFthh+P/Uc/OCJzfott/ng04AUJLCa7ii3R/WGXldJVU8NQ5tIgzkqI7K/mxIWnfGujMq4qd/+c7vuXB3sPKeRn3K5Ow0/wWH2FWkYLsNu0D/aKKKqj7Gna2O5eU52Ys/Lr6ryGTs9GOao+5WhG1XiphFBNVpNe+qoOWlu93wB59YJ1qtwYkQnPwvKkXFTh3J5uoRq4tsedeiM5kivWv+Jeo0yxtopqpeFqg7Xg91jQkPVg0O6eSDR2CCZbjA4kkNWVJuHjVOuUf/DtfTSWE+uPVRtHFUjt8utXEcx8i0dbEC/L1DBsBkgs+o1akw3NRKssTxhQClSkMveCdE70biHVs+WSQMqGve+ytgZjGZPKM920viGE6Pgmr3dDjtHuvQudurYUGmiemVjfHT/LHbTM7KaLHrYAeVJ0tUt63Zr3rObeqnRo6WGMVVPqKzKUBtVigZRz1AxC8UBYh08U0h6ArgIPHlohS4OJ7Heq8aq6r6qesPq6m69V49V0f1nIuKaaTsO1YACkPTViHjxoRa6AFjv1WNVdV9VvWF1dbfeq8cq617hUXjGGGOMMXNiA8oYY4wxZk7aMKDe30KZi4D1Xj1WVfdV1RtWV3frvXqssu5ACz5QxhhjjDFXOv6EZ4wxxhgzJzagjDHGGGPm5NAMKEm3SXpY0iOS7jiscttA0qOSviHpfklfzdueI+leSd/O/1dfLp8rAUkflHRG0oONbTN1lfSufA48LOk32pH6J2eG3u+R9P3c7vdLek1j37LofaOkz0k6JekhSe/I21ehzWfpvtTtLmlD0pclfT3r/d68fRXafJbuS93mFZJKSf8l6e68vvRtPhcR8az/SDGT/wf4WWAN+Dpwy2GU3cYPeBQ4ObHtL4E78vIdwF+0LeczpOvLgRcBD15OV+CW3PbrwE35nCjb1uEZ1Ps9wB9NSbtMel8HvCgvHwe+lfVbhTafpftStzspvvOxvNwFvgS8dEXafJbuS93mDX3+APgIcHdeX/o2n+d3WD1QLwEeiYjvREQP+Bhw+yGVvSjcDnwoL38I+K32RHnmiIgvAE9NbJ6l6+3AxyJiJyL+F3iEdG5ccczQexbLpPfjEfG1vPw0cAq4ntVo81m6z2IpdI/Ehbzazb9gNdp8lu6zWBrdJd0A/CbwT43NS9/m83BYBtT1wPca66fZ/8ZzpRPAZyTdJ+n38rbnRcTjkG7EwHNbk+7ZZ5auq3AevF3SA/kTX9W9vZR6S3o+8ELSW/lKtfmE7rDk7Z4/5dwPnAHujYiVafMZusOStznwN8AfMzYN92q0+UE5LANKU7Ytc/yEl0XEi4BXA2+T9PK2BVoQlv08+Hvg54BbgceBv87bl05vSceAjwPvjIjz+yWdsm3ZdF/6do+IQUTcCtwAvETSC/ZJvjR6w0zdl7rNJb0WOBMR9x30kCnbrji95+WwDKjTwI2N9RuAxw6p7EMnIh7L/2eAu0hdmT+QdB1A/j/TnoTPOrN0XerzICJ+kG+2Q+AfGXVhL5XekrokA+LDEfGJvHkl2nya7qvS7gARcRb4PHAbK9LmFU3dV6DNXwa8TtKjJJebV0r6V1aszS/HYRlQXwFulnSTpDXgjcCnDqnsQ0XSUUnHq2Xg14EHSfq+JSd7C/DJdiQ8FGbp+ingjZLWJd0E3Ax8uQX5nhWqG0vm9aR2hyXSW5KADwCnIuJ9jV1L3+azdF/2dpd0jaSr8vIm8Crgm6xGm0/VfdnbPCLeFRE3RMTzSc/r/4iI32EF2nweOodRSET0Jb0duIc0Iu+DEfHQYZTdAs8D7kr3WjrARyLi05K+Atwp6a3Ad4E3tCjjM4akjwKvAE5KOg38GfDnTNE1Ih6SdCfw30AfeFtEDFoR/Cdkht6vkHQrqev6UeD3Ybn0Jr2Zvhn4RvYLAfhTVqDNma37m5a83a8DPiSpJL103xkRd0v6Isvf5rN0/5clb/NZrMJ1fmA8lYsxxhhjzJw4ErkxxhhjzJzYgDLGGGOMmRMbUMYYY4wxc3IoTuRmsTipa6NHL60kZ/fxIB7as6VOx2XT7VmYsqopUUM09bCD5wmxXx4zjplVbkxNu18+1XZND36yr14T5f4YZcdl9h9k34+t88S+Pfrvd9wB0sSPK9O+aeLyxx94X+zZN/NQNRf3ninTLrPGmT0j/1E+k8c3y5i8jKbuq5Y1a9+ssuJA6faWOXHcnnJjfxmqPKaUoZnp5y93Ms/pyzFWx2qkmH3cZJrRlvse2LknIm7DLCw2oFaQHj1+ufg1VAiUOiGbyxQCCRV5XYKx5Xypq6jTTk2nxr6xdEWdx37pQkp9pJpI29zX2B51HoynU2N9bF8z7/E86+Ma6eoHuajlT/tmpxstazxtsfe4Zn7T85gsb1LGWemmLE/bx8HymNy3n7x71jmAHGN5x75lpV+MjmNauhgrq3nMSJfRuiaPq2UfrUt7l5vHVQ/kdGo28x49ZDWRrhhbj8YlMdpeZGOhaKSbXC7Yu6+YXOag+4aj5YntZaOsZrqSGF9XUORg1oWCsrmsYZ1HqSGFhmN5lI38yyl5VMeX+biCJFfKYzh2XFOOsplfPr4ui2GdX5l1rvNo1EFJNOTL+3LbloIyt3QBlBJFXi9pLEsU+agCUaqo18vrvn0Ss9D4E54xxhhjzJzYgDLGGGOMmRMbUMYYY4wxc2IDyhhjjDFmTmxAGWOMMcbMiQ0oY4wxxpg5sQFljDHGGDMnNqCMMcYYY+bEBpQxxhhjzJzYgDLGGGOMmRMbUMYYY4wxc2IDyhhjjDFmTmxAGWOMMcbMiQ0oY4wxxpg5sQFljDHGGDMnNqCMMcYYY+ZEEdG2DOaQkfRp4GRj00ngyZbEWXRcN7Nx3eyP62c2rpvZVHXzZETc1rYwZjY2oAySvhoRL25bjkXEdTMb183+uH5m47qZjevmysGf8Iwxxhhj5sQGlDHGGGPMnNiAMgDvb1uABcZ1MxvXzf64fmbjupmN6+YKwT5QxhhjjDFz4h4oY4wxxpg5sQFlAJD0V5K+KekBSXdJuqptmRYFSW+Q9JCkoSSPjgEk3SbpYUmPSLqjbXkWBUkflHRG0oNty7JoSLpR0uckncrX0zvalmlRkLQh6cuSvp7r5r1ty2Qujw0oU3Ev8IKI+EXgW8C7WpZnkXgQ+G3gC20LsghIKoG/A14N3AK8SdIt7Uq1MPwz4Ng90+kDfxgRvwC8FHibz5uaHeCVEfFLwK3AbZJe2q5I5nLYgDIARMRnIqKfV/8TuKFNeRaJiDgVEQ+3LccC8RLgkYj4TkT0gI8Bt7cs00IQEV8AnmpbjkUkIh6PiK/l5aeBU8D17Uq1GETiQl7t5p8dlBccG1BmGr8L/HvbQpiF5Xrge4310/hBaOZA0vOBFwJfalmUhUFSKel+4Axwb0S4bhacTtsCmMND0meBa6fsendEfDKneTepq/3Dhylb2xykbkyNpmzz27I5EJKOAR8H3hkR59uWZ1GIiAFwa/Y/vUvSCyLCvnQLjA2oFSIiXrXffklvAV4L/GqsWHyLy9WNGeM0cGNj/QbgsZZkMVcQkrok4+nDEfGJtuVZRCLirKTPk3zpbEAtMP6EZ4A0qgr4E+B1EXGpbXnMQvMV4GZJN0laA94IfKplmcyCI0nAB4BTEfG+tuVZJCRdU418lrQJvAr4ZqtCmctiA8pU/C1wHLhX0v2S/qFtgRYFSa+XdBr4FeDfJN3TtkxtkgcbvB24h+QIfGdEPNSuVIuBpI8CXwR+XtJpSW9tW6YF4mXAm4FX5nvM/ZJe07ZQC8J1wOckPUB6Qbk3Iu5uWSZzGRyJ3BhjjDFmTtwDZYwxxhgzJzagjDHGGGPmxAaUMcYYY8yc2IAyxhhjjJkTG1DGGGOMMXNiA8oYY4wxZk5sQBljjDHGzIkNKGOMMcaYOfl/XlUVePq40+cAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "mel_after = tf.reshape(mel_after, [-1, 80]).numpy()\n", + "fig = plt.figure(figsize=(10, 8))\n", + "ax1 = fig.add_subplot(311)\n", + "ax1.set_title(f'Predicted Mel-after-Spectrogram')\n", + "im = ax1.imshow(np.rot90(mel_after), aspect='auto', interpolation='none')\n", + "fig.colorbar(mappable=im, shrink=0.65, orientation='horizontal', ax=ax1)\n", + "plt.show()\n", + "plt.close()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Let inference other input to check dynamic shape" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "input_text = \"The Commission further recommends that the Secret Service coordinate its planning as closely as possible with all of the Federal agencies from which it receives information.\"\n", + "input_ids = processor.text_to_sequence(input_text)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "mel_before, mel_after, duration_outputs, _, _ = fastspeech2.inference(\n", + " input_ids=tf.expand_dims(tf.convert_to_tensor(input_ids, dtype=tf.int32), 0),\n", + " speaker_ids=tf.convert_to_tensor([0], dtype=tf.int32),\n", + " speed_ratios=tf.convert_to_tensor([1.0], dtype=tf.float32),\n", + " f0_ratios =tf.convert_to_tensor([1.0], dtype=tf.float32),\n", + " energy_ratios =tf.convert_to_tensor([1.0], dtype=tf.float32)\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlcAAACuCAYAAAAbOSh8AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9ebR9W3bXh33mavbe59x7f91rqlcVSEiAZBrRKjQSNtgGrAHDiQ0kJnQxOCMGO45pBoM0jk3AIzEYD5LQJARDAJsM09ixiWlkCKK1MRAaIQk1VSpV1Wt+zW3OObtZa838Mdfa59zfe6V6qioKEGeNcd/73XvO2Wfv1c75nd/5naKqnNu5ndu5ndu5ndu5ndsXp7l/2Ddwbud2bud2bud2buf2A6mdjatzO7dzO7dzO7dzO7cvYjsbV+d2bud2bud2bud2bl/Edjauzu3czu3czu3czu3cvojtbFyd27md27md27md27l9EdvZuDq3czu3czu3czu3c/sitrNxdW7n9o9wE5HfKyL/Xv33TxGRb/0Sfa+KyFf8A7z+x+p3hM/z818lIn9dRG5F5Fd9se/v3M7t3M7tC2ln4+rczu0LbCLy3SJyEJE7EXlDRP4fInL5xf4eVf3zqvpV7+F+frGIfPMX+/tPrv9nq2H0I1/6+x+rf/+Gf1DffdJ+DfBnVfVKVf+jOgY//Yv5BSLyy0Tk71UD7g0R+S9F5OqL+R0vfd8XZHCe27md2z867Wxcndu5fXHaN6rqJfC1wI8DfsPLb/gBdmh+G/A/bb+IyCvATwTe+hJ9/0eBv/PFuJBYcy/97euB/wPwC1T1CvhhwB/+YnzfF9K+mHPo3Z773M7t3L447bywzu3cvohNVb8X+BPA18AaXvtfiMi3A99e//YviMjfEJEXIvIXReRHtM+LyI8Wkf++oiX/KTCcvPYNIvLJk98/IiJ/RETeEpGnIvLbReSHAb8D+LqKpL2o7+1F5P8kIp+oKMzvEJHNybV+tYh8WkQ+JSK/9D086h8Afp6I+Pr7LwD+KDCfXNOJyK8Tke+o9/eHReTJe+lHEflyEfmm+rm3ReQPiMij+to3AT8N+O31Gf8Q8GXAf1F//zX1fT+x9u8LEfmbp4haRd9+o4j8BWAP/OCXbuHHAX9JVf86gKo+U9X/WFVv6+d/b+3DP1XH6s+JyEdPrv9D62vPRORbReRfPnltIyL/gYh8XESuReSb61j8f+tbXtTn+LqKQv4FEfmtIvIM+N+JyEMR+X113D8uIr+hGUki4uu13xaR7xKRf/0UDXu35xaRXyIi31Kf4ztF5Fec3Os3iMgnReTXiMibdY78XBH5WSLybfX5fv17GdNzO7d/opqqnn/OP+efL+AH+G7gp9d/fwRDVP7d+rsCfwp4AmwwZOtN4CcAHvhF9fM90AEfB/6XQAT+R8AC/Hv1Wt8AfLL+2wN/E/itwAVmhP3k+tovBr75pXv8D4H/vN7HFfBfAL+pvvbPA29gBuEF8AfrfX/FZ3nePwv8z4A/CfzM+re/Cnwd8EngG+rf/k3gLwMfrs/3O4E/VF/7WP2O8Fm+4yuAn1E/9xpmePyHL9/Du41B/f1DwFPgZ2FO5M+ov7928vlPAF8NBCC+9P0/BTgA/w7wk4D+pdd/L3AL/NR6j7+t9Xntw+8Bfkm99tcCbwNfXV//P9fv/1Adx/9BvcY7+qSOZQJ+Zb3WBvh9wB+v4/gxDEX8ZfX9/xrwd2ufPwb+9Ok13+25gZ8NfDkgwNdjRtfXnsy5BPxv6nv/VQyd/IP1+78aGIEf/A97HZ5/zj//KP38Q7+B88/55x/3n3qw3wEvMOPo/wJs6msK/NMn7/2/Ug2vk799az3UfirwKUBOXvuLvLtx9XX1kHuHccJLxlU9NHfAl5/87euA76r//j3Abz557St5b8bVvwL8IeCrgG+rr50aV98C/DMnn/sAZiyGdzMkPkcf/1zgr798Dy+Nwalx9WuB3//SNf5r4BedfP5//zm+82diRuiLOr6/BfD1td8L/Ccn770EMmZc/zzgz790rd8J/G8xQ+8A/Mh3+b539Ekdy0+c/O6BCfjhJ3/7FRj/DOCbgF9x8tpP553G1ed67j8G/Bsnc+5w8txX9Xo/4eT9fw34uf+w1+H55/zzj9LPDyQOyLmd2z/M9nNV9U9/lte+5+TfHwV+kYj8ypO/dcAHsUPre1X1tJr6xz/LNT8CfFxV03u4t9eALfDXRKT9TbCDmvrdf+09fOfL7Y8A/wGGCP3+d3n9o8AfFZFy8rcMvO/0TSLyU7BQKtgzfbWIvA78RxiCdIUZJc/f43217/6XROQbT/4Wgf/m5Pd1XETk7uTvP1xVP6GqfwL4EzXk9tOA/xdmCP/Olz+vqnc1bPfB+t0/oYVkawtYH72KoYzf8f14ltP58ypHhLO1j2MoGPX7T99/+u93/ZuI/EzM8PtKrJ+3wN86ectTVc3134f6/zdOXj9gxuW5ndu51XY2rs7t3P7Bt1Nj6XuA36iqv/HlN4mRqD8kInJiYH0Z734Qfw/wZSIS3sXA0pd+fxs7AL9ajRP2cvs0Zqy19mWf/VFOvkR1LyJ/AvifY2Gld7vHX6qqf+HlF0TkYyfX+fO883D+Tdhz/AhVfSoiPxf47d/X7bzLd/9+Vf1X38tn1JIR3v1NqgX4M5Xr9TUnL619JpYd+gRDHr8H+HOq+jNevlY11Easv/7m53iGd/v72xj691Es/Ac2Xm1cP42FBN9xj+92PRHpgf8MS07446q6iMgfw4zvczu3c/s825nQfm7n9qVtvxv410TkJ4i1CxH52WIp/n8J47f8KhEJIvIvAj/+s1znr2IH6W+u1xhE5CfV194APiwiHazGwe8GfmtFhBCRD4nIP1ff/4eBXywiP1xEthiK8V7brwe+XlW/+11e+x3Ab2xEbxF5TUR+znu87hU11CoiHwJ+9ed4/xvcJ6X/P4FvFJF/rpK8h0rO/vBn+fy9JiI/R0R+vog8ruP047HQ7V8+edvPEpGfXPv53wX+iqp+D/D/Br5SRH6hiMT68+NE5IfVsfg9wG8RkQ/We/u6auS8BRTeSa5fW0WQ/jDWr1e1b/+t+rzU1/6NOr6PsPDo99U6jO/1FpAqivXPvpc+OrdzO7fP3s7G1bmd25ewqep/h5GCfzsW5vr7GK8GVZ2Bf7H+/hzj7vyRz3KdDHwjRvz+BMZ1+nn15W/CSPWfEZG3699+bf2uvywiNxjR+avqtf4ERnj/pvqeb/p+PM+nVPWzaWr9NoxE/ydF5BYzTH7Ce7z0v4MRwa+B/5LP0g8n7TcBv0EsM/DfrkbOz8GMv7cwNOlX8973vOfYOH07cIMZL/9HVf0DJ+/5g5gh+gz4McD/BEAto/CfBX4+hmR9Bvj3MSMG4N/Gwm7/bf3svw84Vd0DvxH4C/U5fuJnubdfiXHovhP45nofv6e+9ruxRIP/H/DXgf8KM9jzOy+z3uuvwoyy58D/GBuzczu3c/sCmtynd5zbuZ3buZ3b52oi8nux5IJ36Jn9o9QqEvU7VPWjn/PN53Zu5/ZFa2fk6tzO7dzO7QdIE9PQ+lk1rPwhDFn7o/+w7+vczu2ftHY2rs7t3M7t3H7gNMFCqs+xsOC3YBpV53Zu5/YlbF9QWFBE/nmMV+GB/5uq/uYv1o2d27md27md27md27n949g+b+NKrOzFt2HKx5/EyJm/QFX/7vf5wXM7t3M7t3M7t3M7tx/A7QsJC/544O+r6nfWLKf/BMvOObdzO7dzO7dzO7dz+ye2fSEioh/ivtLvJ/kcadZ+e6Hx0RNwIAn8DFKU+YFAKDhnKFpZHJIFt4AUU7zLFwr1dVS4GCY6yTgpCPaeQ45MOZBnDypIri8IaFRjI6zSePWFRXAJJINLgNrf1YM6+/4S7O0ug0tqH1WQE9RPRSjRLi5qzyXZXp8eOxBFsuAPdg0pun4uD0Lp6jWL9Y1o/a7C+t53a8ulo2wL227Gi+Kk9qEKu6WjJIckWa9t17NnQZVVO7t+xbF79Cg12P4v9n0IuAXUH/smHBTJkLZC6Y99Lb7wqB/pJOE5Eequg3aTN0zZU1RQFUpyUI5jJ8X6gnrPKpAu6msZ/EViExa2fkZQBEjqeDZfkLMz1aAi6/ul2Djf0wwXbKwUlleVB/3IpR9xKIJyKB3XacO8hPv9UeqH1cbM5ePYrd3o6k/rjwxhUutfua/T2N6jcnzmdezF5goOShRyd5yz+WFhE2cGnwhSmErg+rCx+1O7Pxy2fkStf5P1sTobR5u0pzfTntHeF0YoHkoE7ayv3CRIgdwdn03q/JIMeYBHD3dEl3Aoi3qeHi7rfR3HxXTi1dZs/ay6Y19AvXcEN9d77gsPh5EomSgJEViK51Aid+OwrtH1munYp6fzT+xryRfKKxd3BCkkddylnmmJkFr/1Q7JUtdOvcz6PXWvcjC8OrJxC52YtmvG8WzesiQPuT5UsXvwM+uatGvVNfny/uKF3Al5Y6+55Ti31rmS295z//6k6Lrd5U6sb/Xk3k/mqIaTZ/H1uV3bRE/mReu/Re5dS4rdm5zsH2091KvUX95lTxOxvb4XNLw0B9oHtV0f8lYRX9Ds7D4AN9f1nZX5SgjbxJNuR6AgoijCXAJjCYw5kma/jquc7BGnY6Iecg/D5UTnMlEyCiT17FMkZY9mud83Tu2aWY77zlLH9+TR23hoOHnOAvEAaQDZZLZxpqhwWDo0OdsbT/YxOd0n6vyen0DoMpdhIkjBScGfbOiKcJ02TClQihz3iTo33XK8fu6h9Mp2mAjOrlMQbueesri1707HRtteEXSdK6jYuT/VMzCfdITY/C7RPrue13VfWNdhVDa9nXWtw4sKWR3TFI/jeHKmr3NxnY8nE/Llpu/4B7f7T7+tqq+9y7u/z/aFGFfvpuD7jtsVkV8O/HKA8PAxX/4L/y1KhO4aLj6TCfvCx79R8I9mLrYTCty+cUl4Hrj8HiHuFBV4+qOLdbgCQfmnf8S38OHNcy69bbCLev7u3Qf5thev8anvfQKzI9x624w9LK8tSDiudvGKZsG/0dM/E7prZXim+Fnt/VtH2kDcKYfXHG6BeKtsnmYk6zsMqBId0yMPCn5W3KLEXUKd8PGfGeszC6/8baW7yfjRZGfUC3cf6hgfC3Gv+An664yblXDI9dC/PwkB2xBF+PTXDaQfdcdP/dh38Cju6V2iqLAvHX/5jY/x1rMr9GmPm4V4J/gJwh66a3vWMJmhJ20SluOG7JZii7YAqmhwvPFjB0qER38/kzaOuw8JfobH35oI+8ybX9tz9+UJvG3Km0cj3/jlf5sPd8/Zuml9jCiJguPPvfgqvvv2CWMK7KeOm7cucTtPuBXCQfCjLRA/qs0FJ7z5dRk3Obprx2tf92l+1Cuf5GsvP84gC04KbyyP+IOf+LE8u7lgmQK6C7jJEa8dfoTN29bPbtF1A497G8vrX3LLz/7o3+EnXX0bgyx4lL83fYD/z1tfw7d85n1oEXJyqAo62WEpi9C/7elewObtgl+qsSaQBiH1sh4YYac8+Pi8jn2djranRXccg6K4pLhUUCf2I/aZ3QciL74K4o0wPFX2P+OOH/nB7+WrLt/g1XDHJ+fH/JFv+VGkMSAHjyxC2WT8ZcKHzHzXEZ5Gwl7Ig5I7RYOiUZFiG5mfjptsdys8/M7M+Nixf78wvp7xO8fDv28b5e6D9hzDU6W7U/ykiCpv/cjAv/Bz/hIfG97myh14Iz3kd//tn0QaI5oFuQtm622zGWezw+0dcSfkaqDLAlIErcbN8LYwP4b5Kw787B/6t/lQ/4KPdm8zuIXvXR7zd3Yf4k9/x1fadySB2eH3js2bzub9ja5j0wy5+YHw9OsWfvGP+Ys8CTv2peOvPP8Yf/tTH2B5NiCLQ3vz8sKNp3shdLdAAb8ofrTxCpMyPnR8xS/9Vr7i4i0+3D3DibIvHX/se38U3/v2I9J1h8yOcDBD4vITtu7dorhkh044lHoYl3VHzb3j8FrgxQ+x+dK9EJYr1sMqjHVNL3atdYeu+5Fktb3mA57SCX607wNYtkK6gLSB6ZVCOAjhVhhfL+hVwnXZzrgiaHJ2iPaZkhzDx3vc1A5MW6fbt8u9PUWSrvP8XmsGgavroDo486PA+NAxP5S6TuygTltbKP1z+9jux+95cHng+maL+9QAwOYNYfOWMjzPfPKnOb7sn/o0/8qH/wqvhDuiJLI6vmd5hW/dv5+/8+IDfPdnXrFnugu4SeieO8IB/MH2fr+YkXb9lcqP/Ynfxke3z/hw95yMcJ22/KWnP4g3bq+4fn6BLtWZ6wouFso+EJ4HuhdCvIOLN4qdHeV4wO9f9ZRQjecBDu+z15/8LeXZ1whPfuRb/PjXP84hd/yF7/lBHJ5tCC8CfoT+qZ2P4aDVYYNwsDnz8f+h8qEPP+Mb3v/tPAk7Lv3IA3dYu37UyJ98+tV85/UrXO82TLsOnR0yOtwsbN5w9M+V7lZ58RWO8YdMfP0P/TZe7e649BP70vHffOqH8PbbV8jzDjcLboLtG4I/KOlC2H9A13NXsyAHT3xh+0Z/U+iu0zq/NQj71wLjE8dyCYf3F1tzXiEJ3VOPekgfmviJX/5dXMWRKIWCcMiR26Xnr3/8I+h1hxuF7oWt981bds75ds5lW1dmgNk+tZ5xcDR+TwCNP/MX/9fvtRzYvfaFGFef5H5phQ9jgnn3mqr+LuB3AQwf+oj6uhDtYeubHDhRvCuomnd+ijK5DPHa4ZJ5odOTQu8TvaTquWaKOhxKdAWJBVUonblj6q2jtNQLtmvr0T5cPcf2nUnxo6Fnbrb79fNx0yre7gUv5l15QbIaurXYgOXOgRhK4EQNLSgFdVBC+yLBJWX7ljI8Tfip4OZqeDlBg6MEdx8NOTFrNUAImYswcemn1dDMOC66mZth4bAJFO9IxVVEzu6pxOr5FvPG2+Sz/9uBvhqQagugRNvowBZy/8IZoiL2uqEg9acaIQAZWe/Lfnd16A1ty8WRi6uePbgsq1emcvRizSu034s/doqjEMX6LUoiukKMmZIdySnqqwHhhRzFjMUT5KgEs1u7kIkuk9WRcXjJOFGGsNj1ilCKY6209i7zp/gj0NGQKpfsnv2ilOjuzW/Vurn2Zkz6XKAoJQgqDg1uRRva+JeolE4onaAKqTiW4skInoIPmRwc6mwO4sC5ig4LqFNyZ/cqDYGYxdbYAmFvHqBb7KBpDqRk6J554g66u4JblOGZOR/bNxLhkHFLYbmySVLUxr3U8e77RCnOEMqg9kjRNgLNAk4NDcXWXf/M0KqGFrlFWa6s45J6sh6ZDVEynUt0XbbvEJt/pVNKRaJXNLQ5EdkMdvG2UY8ayOrofCbGzOJ13WilyDrGkqsxtNhesa57tWeeSuC2DETJ7LNph8rJIpZs8/gUmaTO8xIrqign614g7gubt7yt2YqSDk+V/qYesPvqjFVjvAQ5zjFnazOMwKjEvfV5GqQade3ZdEX73mENNUSm2HVF7B7iDvrnrY/MGcaJHYxF1lNGT9FixxEtacvQ234T9oVBwWWHCrg6RsvW9p5wsGiH94UuZHzI616QO0N10yBoVHqfGNzCIIsZVzgGWbj0ExdxtnUClKCUXPe3BZyvRp1WRKUvXPiZh/7AlT8wa2Dxnss4cdv33HUDWdSQJafmA0vbX+2neBARe5760FLsPBxeZEoUlitHqaiNehhCYuMXijq8LxCUUp2g0kOuCHS7lmsIWt2AfEWtOkm4euDaWVnY+IXOZ5yz85IiuFns3Eu2VkqwM0aCvX/rZ7benOQhJEKXWTqDbKX2n9QIwzuglhNkvnjI3XF+awUQuxsl7kC9Y3kg5E3BzULYmfEJEFymd4m+eQfAlAM+2L0UnBniCMsFdR+tZ0pW1Bv655zeM6ZOwLB1XcoXkPD3hRhX/y3wQ0TkB2F1rX4+pu77fTaD6I9eEw7wWifk/QexA9TeEvZCd20bWR4Eh+LvQZ1QEEQU55Xs7UAtkQq1Nhwb+0PtND0NPVT40MIgum6WLimhbkjN67WFczTW1NvB6SbFz4USnb0udY+qxoLWzxlKcfz+eFD6pyPusKDRo9FTOns6DQ5tYaSXwknqwDu9F3Ir9YCNLhNDZuyKRWGS7YgqasiE2MJ02Q5bKdUILIK4k/Fy1RgJNsFzr+ROCKMSDnbQW4hQ7GD0aj/Z7tMOm2ghLXUVoj7daWFaAsvij384mej3/rZOJOuHokJSz6KBsXmDpb//uRODTL1tFqUesu26Wt8TvBlWt2XDKJFOMvvS1bAldlhnMUPgNLxV2njUvnLVOMkgi1a0U+shJ+vG2/o4d2b0hUPBzRWtCjXUfGKMu1T7ulM7ELyQk2efOu5yz2Xu2ZfOkLUsNpblPvi5hgDrYakVxfGjIVZuhnjX0Msazqgfa6+FQ0XWsiE3YSz0zyfcmKAU0kVAvYUC97nHo6uRsQ5nXe9tWp+Or1sg7ITNW8XWYmpIoFsXbK6G26weCuxLRyoe58q6zC2sVce8OQZiDg25bg1ecEHZ5w6w8OKcT+biyfxT6hyq62gN+1YDuQTW0PxUIguBpZ0c7XlPDKZ3NKlhu2bF1T1JqxPWPzejOG9sXIcXyubNhXA3m2ETHCV6NAjgK+pTESCx+eUXM2By58id3DtU7t2X3P+bcDQK1j9niHfK1Scn1Am5c6StOxpYKy8DxOmJg9D2Rl3/Te1DtxT8WPCdGYQt7OWShaBdUuaHZlz56jDkZkR6anhJUF/McVNhVnM8sjpGjSzqzbFzSnG2X2kslF4oSSgJSg1B5wG0LzyKe678uD677WWKF0VcQSq6vA5ydeqk/rGt9+ZwmoMHrkDcmYUkSweBFam276mGmOi6ds1Bh9JBSdWowvaDZiQrkNVR1DFrIGomq1BwjCVSqPt9NZQVjI6T6n5W950SldglOpdWBxbqPBe9F1YuHtwJIk8RO7teml/rPtjO4XpWdruCH5USPJKFyVdwIkMWsxOilBVUgaOD7r2ytHO/U3KxEDOq9kyitML1Tiy0Kdr2nrYO7MxvjoDo/fn+/Wmft3GlqklE/nXgv8ZYE79HVf/O530n53Zu53Zu53Zu53ZuPwDaF4Jcoar/FVa76r2935nn6NSs4RLFLENfCCETfTErvYZwSqxE74pyhdEQJEmOUr2QfAIBNAuWao0biZf7hMiXDVF3RIJaLFxyhf1rqAY1PkE4VFjVywr3Hh/OvDg/FfyYgEDuj+EfdVqtcwtT3kOxghxDXCmjXUC9VGLziWUPKz/htE/llMh+8oDBFYLPKylVg1bE5gS+9dVJqTiuVm9zDZFWZE2wUEPeKPmikDZuJZibx2JolwaQPuNCIWugi8fQrUdrmM08Dy8W3nXVy1JlHXv1FsJTx4pyrl6G1NDHu4ALWcXmBBYGkxMPskQbW/Pk5Zic0KZGgehsUBf1hoxIYCpxJdznLIZUNWLyCax9mgSxTq+kuNnQTElKHtw9xLONYXu+sM+Eu5kSPUiw0PLJ/VFJ01rRQvVQijDlwCF3TBqYS1i9sVNielFDstZ5kyxMgbe54GYLg/saBj8NaTZSeDgYUuHS/XkpBWTKyJxgDWuy9uWoJwjO6X21cec418DI8vEOhmcZP5dKtnakgTUkVdTVUF7HosWQInUnyADvGJ8SWZG6xnGy6xnKNpbIVALlJNx4umfYNWxuFioXTAWtfVSioeo2F4+TwbtyL3+hzZWV+3UyZ7SGzESOoWutHMt4KIgKeWN/97PiDwm3X4yiAIh3K2/vNPS9vn9S3Fwo/qW9pK339oeKoInTlZMsxdaqc0rxdtNugfjWHoBy2ZG2m/t9d4JWyUtwtJ7uZ3UrN8TV5lj2QhpO1plrqOEJAtQ+rifjfBISLTj7UbciOetn2tvqnlKCRTtyJ/hgY1kCSGehKCfFyNPF0K9UHLlugnpKCm/P3lCdE1rD8WXbw0o9BySVYx+8hFo5KRUhUztLWqQgsp4frmbCKBUl/CytUR7yKSLW0LtgIXQLLR/v/TQ6chotsvCnWog/YGh7o4bAEdk/Qe/Weebba0c02E+F7sXMNvYgjnQpawSrRNakt0YraX3jpNh+HwqaDUGWila7ZOu0eBtPLXZOOlVL2Cm6gsRtbzq9/c+3fUHG1fe7OVguwEeDf8PBNibXZfqYGIKRsV2XKZ2vGVGyboolgk5mqJyGBWPNygkuE+RoXmiFLBHW0COwbhh6klVT/2wdW3lGjUicByEfOGZxna7pdsmaSeiWYovkdGNv9t9LiwyOm8F86Rhf2xCuOpbLQO6O0OpKDm1ZP+sNU5+70DvjFqycK3Fsw0wX8jFMGtqmX+FSKhH1xJCyn2PIA72/Iaoz+DxtjftRTu9T7TMuGN8J4OFmZOtnYjWqijoLWYrd76OwZxMs7iSC8eXq4nDtAFq/+6QfvX1X9MazuXBTfX4jyl92E4clkmImRW8GYG+LvEWnysz9EI3AJiw8DHuu3IgXu89d6LmKEyHYM+VQUHVQydanXKvT+wXWQ8IfMpIK6gJ58HVe1L7jGH52s80fCUYcbgZl48LhLCOTvqDewj5ahFwcRWXlHTl3Ot9r/xWxA67OR1+NqSPxuWb7nGTa+PlIwJVivw/Xeb3/RggVBY2O4iI4R7r0trGJEiUzSGJxM8E1spPQ9kc54YGdGqhuMaPUzaUaMwUVm3fOZx6EA1s301XuZe8WohTLaAqZnI6nVFuDWg0z40vpGmYVZ6FqX7l7tlnX+zqhK2gwrlsLV51ypdraTuqYSlifHSDc88RO5rTYxr/uLY0Dosd+OIaPFT8q4JizTbriQTtH2UbUGz8v9646JrWP6/xsYWnjTHpKL0daRNvTaujcDj/FxbLO+1IslCNOcd7iRqWD5RKWV7f4MR2N7boffLbTaXUakXsG2Pp61urMOpaNJYO0zLpwODn9YDVa9V3GwokyiHGuHGa8vBbM2H+7vyQE41G2bNp2zrj5ZN14cLGw9bNdq8bJF/UM3vidPhgwUGr4U1sm7KkT0cKkbV2f9rvqfWevORl1DhURgi+WiHW6D9afe4waAfHKEBJXfuSh33PlD3RilIdFMwMLT7o9z7oLdnPHGAoZCzejjjTUjOJke473xrnq3cLWTTgpPOhHnvUb5i6aDdW5GhY8GQ9qH5xkYLa/nWa1nhrPAPE2sfGBtPGU7rh+fciEmrEZXV6zkKMUvC+r8Yk0Ckbdl+XelDn21+n/21mr3CO0f77tS2tcoaQLJQ8QvJHU3AKxS1z2M5fdRCqOEDNzVIsnV3QlbZXxicXSS6/07jjQURKLZmIj7FVOjKWb116t6I3UmPV68Jykf7vFMlsagTJtbPNKG/Aj5MEdM3neRSbBFodSNsE82KWsHgCVgL1ulDV03Tyj8RVhfhhBI2l7esBYxkN3Z9yTlqmo9zYXZaj9MUhb+IGLMK9ITIupq1OkemhukXrAHzMp1myW+oxGnK3ZFUWIdzZlcof1TwC/HJ/JLeZ5arAFaZwHZ8hFDhRkPXC8KFs/EyQzjpE0BXQx6Qi3yFHeoFBlMNqGdEQbnRw9qiiJKzeSveNBN/J83Kyezrp41kV0/Hd7XZ0QvRkCXszfvZCZ18MND+Ph/uIUzOtZjARqSQ+N4Gz8OxTj4E0ZNyVkKfiuHnxeXhKZsxMwbTy531A6Sz6w5Ag74EvnSBvLpnExg0S77+SYsmeXu7rR2IGhi8Mt1pdl4ORQZE1XPlkGlShv/3cJNAHiKueQVXqjBJtMDb1CIEdh/6Ht+u/5ygjFqfjK7SjroaSlrs9cUVynKwrRaBzGdRGWK48fna2l+rp66LrMK3HHq+F2NYR3rqd3y4oyrpt6S4evBHS/HO89d67KiRh/y6nWQ9KZlEeSY7agVOMqVdR0uT+XVOz79qnjNg1MJVRjL5tETPK40eEmwU22j2mo+0vlVLmKfp8ezHJySFkGoVsNu+XCsX+9Q7Q7zqSGBlWD2CVzzgDmR0Lq3Wp0ucR6AFH3Bw2WTEEshJgJoVSqp0OLjZX39rflshkkA/FOVy6eJHDz0ZD4rO2lF0Wpxqs3A2ssuORI1YjTUDtEbN+Lzg7V1Yg8XU7Y/tCI3M0Zf+T37ErPk7gzdFBtjlCOsjw2X4795lxh6+bVgfN1X70IM9HntgzutypjYrzFo/HUDAxzmo1PFMaMm/L97y/tGfMRqTndv9Y5rete7ZKhlc4pm7DwOOx45PcMbl4RKyeFSOb1eMvb3SVP/fZ4z74iUNXAbvO6q4ba1s3r/n0ZJnv2tnmcnKUm4/FSp1Qn0S/HjPqV41STp8ZHnmU7rPO7f27OT9oI80Mb+kZm7+s+vain9zZQWqMKpxIY6/nRklAqUOGWNh4npPbSfv/HzLiSikiUbd3YeqFMyqZfeNCPPIgjc/H0/cLcR0rnKYuAh/QwUwbH8lBID3I9VN/ZAUkdZfEwuxrmsI0wnTo7ymqdtAnqZ9Zsm9I5lkvH+MTScksHy5UwTp5tJSe7mu3ocrmXxqnRMT22TKn+2VJJ1McwV+6MNBwXS2/OgyNthP1Hj5kPxLq5Lg43OdximZIuCWGqxl0dfMmQsoUz+np4Nc/b9E0MoWsE7KZv1LLxXNI62akZUMeMQT/V1OFqXAEMb3V0L0ymYnlghqEfIfVCywQtu8iUHKHP7JfI0+WCfek45LgS2vexY9SdQevqWXYdMnrcYtkq8bZmldYMTdfkDWI1lmtYZ0wWtrspG5yYMTTIQpDCnDzz7NHJI4vDH9yatt6MoYYQgD1Dy27bVVJ8Q688hXkJpMVTZpNgcFOVizgI/XPLcul21mdutgUbDgmZC7IUJNsGGgqVrG5fbEaNI29g//5omU7O7u/iM4sZWALzQ8fhiZAu9dgHGRgdu6nj+bjleb8lqSdNATk4wt7uM13ZARFCYRbbVEo9j0vQlegPthmly+PBEPZGLF8uhfkK0sbjZ9i8VXBVT0i9kLZH+F4UypC5ST2929K7haUEpiVQRg/J4UbLeFK1FH9ZTK/ILUK6UEoPkgNhr/Q3BT9VtMMp0Weu/MjgFvwJKrSoX8dJT5CrUwNdsiVklFAzy7ZQiuPFslkdtF3qmKeI35uBuvS2jpujlrvaPxXlbeRfl9WMelEGv6xr8HbqyJOn29l8sRCiGQxgxnlDqUx+5Jile3pAzQ8C89Vx3oyvCOMrhhK2sPExs9Ku093oikAeXnHMjyFtLBt680YzWOoam+3apVekag9KhWZbwlFL7ADIjxe0z1y/3+PuAt0Lx+XHlW6nhCpv4qpcTXNEi3fkwSgTUvX2zCjA9sgLkyfwU0V9F63JOMeQLAVydmvYbEUm1nE259dkaWwtd5JxFDOyKsKZsyMnj8w290Idn7Cv+0Q6erIt49lTaiZxqWi8ybOUbA4NwfS3bI8Swl6J+6oFeJKlLEWPhsYhIWr7cJnEaATZnISleA45MidPWbxldqYqq3Ow+wxjPY8mM45ElM4lBpnXyM6osRLZLWPyyo/0LhndYfbm2C5uRa6lGiKIhQWbUbUv/RriL8VB7Ts/id3LQVc6zXrWVkqCZCGMNWFhzKv0TImOEgO3H61Ri2Rron+hq7qAWyypqKF5p+sebD7oYg6Rm+x+/AHCXtekGCnt7D4au+sZXg09qbSG79MpeA/tS2tcVftB+0I5GNyXOyEGC2X0PpHUEX3Gd8U8og4oEB9NhJhZ5kDANtF9PR2cWPz7kCP7JcLkzEOcrYNLUMhSx7pO7KpzFdLxEHeTTZ7x/R27DzjmhzC8CcuFMj+yDBV1gf6m0L9IuDnj5rwOQrqIjE8iu/d7OxCeY95uZ1BliaapI9nRXbNa7MsDePD+W4LPLNkzTZGcPJnq5TcvolANnhp6dKYxNS+BZ8tF9ZSPcOlt6tkvkTIGWAR/qEjGVI2XquUSJgs3SLawpssVwZtS9a5qupkI/YsBBF58pZCuChoLGj3LVvCLM6PlzlEWIXvl7tDzid0ThrAwpljHq/AibrjuNqZRMvfmOSYzrMJO6G4MLUGriJ2H6YEjDw3hsA3m5jDw5nTJp7tH7EO3bqBz8RymjuUQcXtfESZwi9RNz7xsk8wwpGW5svDavnS8sTwEYFd6Hvkdh9Ixj4EyeWQ04yre2b3GHWzfKmumX9MosjCuZXClh/1qpLopmTGRdA3flGjaancfFpYr87C6a2H7tkA16JetMD0Wcl/QchTHDHee/W7gWch8pn9ga2wfiDtHvDV5h/H9xldoHjfAclUonaIbky+RimppdixLDUtOjnBrRtr8AMbXCgTF7Y/r6/A+08taHuaVuyF706N4MZ94xcB0iMje1/RqR85K6qrxe+sIo+nljO/LEJS09YS9sLzljeuVbS04V1ZEYld6sgrXecNtGmycxmDCg9UwWT39uuamh2YM5h4z5JLw9nhJcAWHcj0O5LvAcGMyE8sjDPlOdpDkwbxzMw7sIPEVqbnebxBgE5d6ACv7sYfJxiPubD4vVzA9NkTdTYLL9VCfHLIv+H06kWoQlsvAzcc8yxaGZ/b3w+tKHtSQtWIOrCyyijC7ZOhW2Ct+hvFVZX4l464W0i4Sd2HNCPUTpFvjVS7VgFet4UCV9f+aj1pv28cHXn9wx5I914eBu6st3XVnIfu7isZNeaValM6TLxyHV+zo6W+yIb5zpmUTl2BSCjkKYdIVETRUQVaksGTHnD0puRUNOoZ7QRbHIUXeSlcMslmzlAe38CJv2aWeZfGUyZv8wCTEmzoGB4sWtPB4KY67PKzc0baPFXUs2ZwZnR0yO7Qr0Ntc8YemRWUGBQ151+rMXhgynC474+hWA8vPluE2psBd7g0JnQNMzjJ6p3qfeyXuLfvT9oOySpCElT8aGFV4Kz1gKpGMMEhiXzoWdUxLQCczrFx1QONd1fmazLj0rpAR7vLAvnSMJXKzDIxzrJ8R/F6It21ftTm1ZumfIG1+KoTRuKVAzYyOTI+E+SsOJu+QHXkMLG9G4o3QvbDxbwb1oh6vpYY5PVMOLLV/3Gj7iB+NHxr3StzlFTRwcwUNcrl3vklWKOWoLfkFoldfWuOq3WsoIN48kQBRtGrWCHMOpiNUThw2gc1m5uFmZM6e3dRVld1oG6xa2vMud4xzROY6AZfKKTl1/VosvIZ1mnaWCuStEYhvv8yxf79BzfHWkR5k4mMLWd7lDektj0tKBCOvZ/Nc50eB3Qc8h9eV/rmQBtOR0K5YPD8L8wOPWyAPHrcUU9selIuQKWqLOM2GjsjeQiLhYJa3P2T7vlSQUkDs+abF8/Z0wZ3vqnKtcTyejhfc7gfcztfFaAZGGKF7YUZVd2taRX7KFQYvuFSQ5ZScXI2rGCoBn6P6blfQ5NBgYplmBFadHoV5inzq7gGdzyuB0otyGwfulp4xRw5LxF8uZK+UOa7hCpcsjJo7Rx5geWChVfG68mCmMfJ0vOBT3UPucs8gFiqe18VmjH3Rloav+F7QuYbugCUKh9eF5YGS1HGdNtzKQFHhYTAZhptloOyDGRt78+7MuLINrrvNxqsqal5PLmhwjK/1zFeOZWteXdwVNm/OZnT5mswQLX19eiSMH8joRYKDkfHSxq2hWTMAldLbgd3CB36EaR+463qeDheG6u6dGX63dn9gYdpQYXwpkC8Lsk0Mm4UQ8nqA5uzQvhL4O08ikHtP3ih6mZBQyD6QhwCizA8LZWPXauH2prT/YtxQVNZ5WQ6BuHcrYd0tgjrzxsOdVAQV3NVC1yemPjJNHvWBfG2bt5H4zQj2FJZKln97ueJ6HihjQEZXtdr0qDZdld9LgOlhNarqnNDJ83zc4EUJrrAbO9zO092aMOb4Pod6JewcfqyJHX3d06Qe+NcVTR0jN66QijMiOzCPAZkd3Y0JM6ZBGDPQaSXV2va0En61rskloyLky47pcWD3oWJh0RsLG6fHCRksdFeyUBZvB2U2DTQpZgTmwZCOvCnQVY5Qn1FnxpU5V7BcCmlbjbXJszilBJszRYWyODOuaii3v0xcxJnJBbIK00UgDR2ShbQxYyHcmkOo3pEuAodXAvsPNCfH4ZLxEQFEhFIlLbSj6geyaq4Vbwe0OjP6cnGUfAyZrUZ0MeNkv0SeLxdEd5QQiJK5ThvenC7Ji7fQb5LKMTSUJIxK2Jmj6SdPWRzPl23VkjPeaO8SU5Xt0NEjkxniueoTSkVfVCrpvKvagdo4tDU5aoDpYx1pa3zQlWenwpKNv9fCylIRcz/W+5wswhDq3qNO6jkipn2ngdti0gtvLg/YZ9vP7N4DL+Yt0xJWp8F0HWWVHrJkIiG6wlLM0Hu6XDCXwItxwzKb+KrfyyrREg8WVpamuaUtTFejJXM1cMZkocPOzpXlCh4+3BN8YVwCS8yMBXIXcIurY177o2bLFDUR0V3uSLPHTc64pJPti2ZYFfyhGEKVTZhZsiJLNmOqgQf1rJPcFuMXZmB9SY0rBZouBhzjyrmYB7JPHYcU2R06yj4Qx4p2qb0HuOd5N1iwZYGkYj+rEN7L/XKSzXGqvVGChSjnB5750rH/QEFfmdF9zdbqCw+vDvQh8b0fiKgLxJ0Zh37vcZrAOfaveXYfVJZHGfC1VIwgfbXQF0fuLQtSA2tWjCThZjcYMTk7yiEgi1s38rC3hR4OGZmW6sUV1Nkhn5NbDRXrD9MveXHYMB1MidvP1RCoMOlwbZ5UOByNKslVwDQVZEnIvJhhVSebOkfaVoG+qmXVCIS2IVQCYVP79mZQ3h16YsgnmS/KnD1L9owpUFR49dEdh23kZrkijxbmyDWrMW9gfgDTBy3s6UJBnalfptlzMw681V2yCz3bKvQ35lAFaZUylBoWFXSBsqv3MRt8vWyFwwcyus1MKfBsvrB+xMQge7ewTxGZLLQYdsYHkxpicitXrRpWqpQ+sFwFXnxFYHxFyVsl3An9c4+UDj9bnzbOz/RQGF9T3JOJGDMjHXnrWDauHhi2+aSrAkPB+apHVgz1kNmxzIEXh8Eye5rntrPNBWcCqUNMhnplgVCIfaKLlrG5ZH/MwBLDeCUUSlco0ZN7xQ/Z9pwq9inZ1N/pK8m9tHC7IZF3k6HLnbPvkMmt99bdGGqowa1Goq+HUdcnhm7B+8LSe+Yspt9U+YJZTSF7cYFJA0vxvFg27JMpoLvJQhFOZSXpg5UUKZ2QrpTc1wSMWZDJQqteFOcK09iZUvnOPF83VzL13sZ+udI1/Glog6A3JlJYZs8cQt2jLKxWJm9rsKLFUiA0JLmiRt2LWjVhX4yiMC22LoHDhy64+6AjP1wgm0FeOsVtTXVfBHIKK3q16q556+OmvVYi4NXCOclQCn+wZ1z3SJv8yOIozqpOaBEb28WM5qJAEaYlcDMNlqVbHN63bLsqUJrFRBsV8kVkfBwYXxWmx2b0ljeN9+mWSprv/ZHPGSGno3PQNPnW8kitiVpJpuokaw3RShbm5HmRtsZtrU52VuF2GXhzf2VOABZulJrs4hYocyujUgWAJ8/b0yW33p41SuEqjsYprCi6m5vwsRg4ULt0uWhJNK5yaWUNXS6XRg3Zv1/Jm0zYO8JdrejwUsafyDGTetXOasT3up+UwUKuWkz/b1/6VU3+M9ODlZrRu8Qudzw9bJmnaJpvmXU+uvkYxkQsmnSXe16kLU+nC8YcuT4M5NkRhJP1e6REvCPlroGwSg3BVeL/SUZsqmLSOTtEFL/J5CKkiyquWtsqTqz2nKnYMxt3ijUbeK10MVf0tJhRJVnvgQdyCiI04yq/pBT9/Wxf8mzB0quVUyCukGsqJquwTx27pWO5M68x7I815qbZMw+eXAzy3LiZrZsrgdUWSHAWR9dNocyybogrV6IRoVu2oLbUW5v8S3HMV0J5uLDZzoxFUB+QaGJ1fUi4y4XlYJwsvzh6L+AcpfPs3y8s75+J25k5b1bPzUUL46i2WPJJer6aTP/utjuulGQHUyNKr1mIdVJQakyYSkidHXdzvxIrlyqAuJ8iOvpqXDWI1JSeu+tkBMoW1mxwaDOslgRLsgl2Yr2nrZA2thkRCi4o2dnGhtimqp2iQ+bycmSeA84dVdhVQUVI2bF4x5IdF93Chy9fMBfP39z35LuB5cKZgnh3rG01PBrxvpCSY/LBeEKzZz9Fnk9bdouFRB/GA1MOiCjSZ9RZLN7vrU/D3lSlTezVU3pwT2Zilzgsgbemy3UznnLg0k/slt7CqrNtIqhxVUon5GwSC3DklcwPA/tXPbdfnvCPZoZ+YRo77h519M8MthatgocBDq8K8/tnnlztzTvz0YTwBiFV2Yd0oeg244eE86XWzTKCqExCniwU6pwab+RgG2Q4GMK47Wc6n21utSQEuR8qXFtDGUNGB6HESNkom2EmJc9S/yYKskn4WMiLq1lSsoYF9mOHiNKHwaRSRitB1ErRqFfypipxV+JviUIXzbiKJZOC9e2yCUylp3TWxzdp4E769cB4Pm+4m3tksnHGYehIPShLAB2MZ9VK/oAZI250jGPEezOu8uTpRjn2H7IislJs7DWaOK8sfiXFSgZNQsmOZQnk1rfJsdZHTFoREisD5mf7jv7aUOR4PePGhBzmioB6du/3HN6vSJfRqTofg9IPSyVlC0n0RMSy7Xeg0Uj6iK1L31cScpZjWGm0fm8kZjebwZt7QXMd18JKFlaxz09jx3UV8wQL1+aaAZ07sTnRObIXpseR8YkRk3Ovq7PrUkURxJCd1MrxbI1nY1y2On41OQjHKjztnMJFshBm78m9kCuHbVoiz2poOlUHfcyRu7nn7bsLi2QEpWwKGi15oYSmZm8oiIqFxz+9f7ASpzufmUpgLuaQNGL6mgEo1KoQkC7NaGmOg+0TZijOD4XpiRJ/8C2XMfHi6SXqIks1Ln0VzXRiySjucmGZBbd44zcGOVb7UNuHpgcOF4olCmngLg88nS95a7xkzHF1vm+mgWe3F+TR48pRcLjNn9VhVDtT3pgecL0MPJ+2TClw2PewOPJgzhxy5LuZkXsEUk6z0S2T8ySapIZkuRkO+/6YdCNKiAndwHLlj/uV2HnSxhPMYRdnRnZRQzxLrUayGuIKnJRzu2dYtd8bWlUNLf0CkCv3ud9ybud2bud2bud2bud2bu+1fWnDgg70InNxMbHv+9WqVLX4sMPgfqoEv2WIHT8voqv8xGWYeOj3VYYh4KTqcMREvJxZUo8fKwGzpoSK06OMiDf8sNS0UxUqbCgrfOm6vGZeHObIko1vIMJRj6c6YLn3TE8KmwcjQ7fw/KIjRxParBQKJMuarebHYpyKrPgxmmcremLhV7G4YBBx6Q0dc5OrIY4aFswKs2M3R6L3SEWIwPhOMjvzkkdW1CrsLVPDzRZzxi5XHxrw9TnbuDXr3Tty35Ckgu+t/lrpXa3DZUiT9oXucuaHvvom19OG3dIZj2XuyEXwTtnEhctuYgiBx/2ej2yfM5XAd16+wouhJ21AerteI0KmxRNCpu8SU4PMk7AsnsMSWZzn6XTBIUfupt5QBa/oweF3zhCjvRVQbRypEm0wm+DoOEeeHbb0VXOtqPDWfMX1NBjptWpBlc5CTJYIIyzbkxqLzgqyHl4T3IOFrkt4X3j4YMcuJtL2yvgWNRxdOiOKP3h1xxATt2NvyJg/ZqU1mQTxSoy1Hliby8k845LEyL0CXSXF+oaQ+rLynlo2Tav96FsdMnecBKWut65LpFDIw4AOmavNZOWM+sC0HVAHm8sJ7wu7W+sQpWbHOmGZA1Mo3M01w3eyRAo/Gt8PYHzsVq6JZOPaBJ+JrpCLhQeG3mo77m+jcTWLM0KyWkg8qWe3dNyOfS34bULFKiccFlfDgr0ea2BS94YCefFoMakSZltnxmmp2jlaS5I0XlTl6r6jVcFWspgeWvtzOEFfKsrVX5eV49PdLMhc8LcTMi8Wlq/rbn4o5ItsGZC1tt9aVUdsrMTV+R4q5aGFZqRq3nqQwdbsPIWa4XVMiy+xojRJVl7TKmFDfeZUEYl23dmx9J7sbJxEsNBVpQioWHagOOOZWbhPj1nB+2wZtUu2sl+txNagpK1WHpFxmUyEupbuqm1ajIvku4y4Qho6YldFkhfjZL6YNnUfs4zHKQdup45pjOtZoH1Bg7A8pO61AsXjp8qtW4Rnu62dLz4bAoyhYSk7mo7TPaSmalAVb6+tJWJqZmfJwnKhpMeJ1y/3DCGxZM9uriWMPEdx6Ap5dX1ieiSkg6uczVaDFASTKkpb41cOPhlPKvXcpJ7d0jPlsCJXu6ljnoLRJZpMUN3DSqjPUtHFMRmifzv3lqG9BPLeeITaF/KmcfyO2eVHFFUrKtmEtNuCKCZivGScq5zA2dv6D3mV0fGhsGwKkoQgSqii1AsWFmzi4eKNj5q9kO8sytBq6baIFZXzdhqNMRmI0/VbaTD/OBHa1UHczlwOE7uW/u1hTsa9cVHpfUY2mbL3lnILoBBj5rKbuaNjyVYXKkrCi1Io9zY5HwpLNDhdq/pq4w1QjteE4wblkpHfVCzTadkEYszk3jataQ7sU4/ugmU0LVqzIfSe1lXLqEGOYYLS+ApFjmmubeDaRPOKtAK2i2k858Eg/fmBEA6eNcNkTiuM7hL4g+MwdaSQLaujGlelan642fhFfjqqM68x5tac9Y+4ygvwHrw7QqQA0sjsCpvMMCxsusWIpUNv9biCcbG6LvH+4QaAu6Vjyp4pNU6PbUyDX3jQjTyMh1UDqQvZFnqsXx2VUDfi+UXHbvZsH4z2XIvgJkulXiqpdc6ezjlLW549moRw7Ym3wuZNpbuzWmxuzjU70jIll10k947YJyOoctzQnrott4feQoILq2FUOl1DRUZYNY0ndcazKD2U0TNmE168uLKwZmt5EJbLSqx+vHA1TOznyDQ1sSnbiJM2LpsRObw3NeJU11TTbqHIsfxk03SaqghnzawqKVR5jxoOc8pFNxtvRJQl+TXM5FzhapjIxfHs4gq/STwaDsbPKp7vHh4BwoNhxrvCeOhosnLrOshm8E1LME7OKNWYgLhLla9kh6oo6w8YN+Ywx7XmpGv8vmwG9fUyrMTarMK4BA5jXDOFJFdJh1DD1Q7ytirHr2KDdRxz5RU50GKHZQtxrPVEsc25JUi0vaTxS2wO2M1rFoopvB4fqB0utRnvqmaSTcXS8ZdihlXK5q3Ug0CyhX7Vn+gdYQ5HSyLQGtW4R3ip4SmqQrXRIaDMvorIas1MO4rBtn0rd7YvOF/7qh7CIFY7FKAmQZxuZqfK8y6369paCwcl3xnnNOwg3mX8boGUIbj7CTO9UqTy4lK9tlfSWENsxegFeTb+pY8nhp1AOAj7Q2C/dPhKGYlSuIjzyjHcZbFDOTnwZjimTmt1DcuILVWqYprNMKGzJ51SIKkpna8q+6chqNqP4cBaCNnWdA2bN/6xws1h4ODz2pfN0cjFsdRKBCWbMyS14gQcnTkrWVAq5cT6piBMGjiUzhKHUrR7xcSXt/3MYYrMxTLpi3OkpDjf6B/OKjUAS/IcUmRMYXV4bJHaXDAOpiUj4Gq4txHaVzXWk7O3qI15fVWcq9p61r9dZ2d7qvyrU82slhFPgSyORS3Dd9W1qwZ+iXY/q+ZWOan9uC5CORpaTiCdhAO/QCHRL7GIKIRQM50qn0gSjIfKE6mZOt2wMHfRJnUAFIZu4XFvJRZS9vcKSLaSBkvxzMnbZlxOelGprhu0bMHizI1oab1SwB8KfVHijWe58my2E8kb7wQgjYF44wm3gh+rdkzLQJgz8bZj2lnRXCZfBQvVdH1gFWxEIA0e5xsiBW6TiJ3F81PylOTITslZCLvAfBCkeDOyAF/TRf0MbjKEACA7dywwvDh8O2RrdlwTSTXuVr2fZsR6UKTaoqF6zvk4ybRyOAJsriaeXOx5NBwIPnNzcXksxj07DoeOb7t5nbd2F9zcGuchL7bzuFisUHNxPB4OBCmMMa4HPNXraePSuHndM08aHVOXLW14Aj8Ky+gZN5HFFy46Sx7oQkaTIHu/yiV0t5bVtxa5dSb02t0p/jqQrwQZTPdlXIwQb8agkT77ZETPxgO8l+avR6+01MLMksDdhVpSQtkBJTk2s41B6czLXB4ocbDOm5ONo+syOTU17WrMDYXYGWqVkl/vwy2V1F4NTWhjroR9xu8XmDfs50gpVf8tgYbC5Wbi1c0dczFi8iiKq4Zq9IXLOHNIcc1eAjOKjcNjh5ETc4r6YbFMQxWSU9OWE5uLS/ak5I/zcVLcmHGp4JZ+TVxpxbC7YMhVSn6d295b6ReKHTTX84a7ueNu7GtpIkeag6F2lRdXHgJVkNgygoshO7FaEQ0IWmSd4pSjCKFbjjpNLdNwXcdtWThddb60HiyanTkb1aBprQk9gs2Ppvnj5owbF+NVLskOnpTMsJpaOr8wX5nzUkI1JLQWEi9i8hONF9XKHHmFyi1Tr7b/tgLKxRZZswFby51l2mnVa1qFl8WK566izAriS72maUal5GnK283gkJqd1V/n1bhcLgypaORmavZz7s0gTluFq8UI8pdH3iEKaYnI9XF/VwUmTypCbDp+yeaA7Dx3U0fwhc5nShCu4sQmLFz0M/McELEKa6qC+owGq36wVERYagRlmQOlmDG5hIxyLJfVCh6XKjvQSmS5VBMYqoRIK7Zsv4s557NjdzvgQyHP3qQNZnuGOXlSMcJ2KY48eVjsOdsetPLQkimbDxvH87qG71LPi3ljEYS6/kN18KIrdF3VuQrFRIqdoVF+FOZLR6zRnqLCYYmMc0X7pPL/lpYF2BAvM2haopYkcxIbYX4V9myk8ab5VQVvcQakPNqMeFdYsmfvI/NdhxaH82WVS3m5aTY5CVpCh5iDYIhsBTyEWqWiGoH55DqWZmmG1Utc48+nfWmlGArMc+DmMNDdOLpbM1DK6JlC4BACvhLN1Vf9lhp++8jFjo9snq9EtodhzwM/muqsCFH8mm5bTju4tbUcgdz7/Zi6awRnlwV/CMxqmT4qZkU7UeYQzfM+LZtSmxlXwrwL5K6Y1kwlA8rkq+dXkYgBpsceKZ7cWXr9g6sD2342naU5krNjlI4y+bXEgWWaVOTqMEMpFubbeQ6ToS2uog6qAqPpt4SaDeQnSy22kJPVddSaNnyvbEUoSNX8MvJ8+7euyNvQLWzjzDbM9H5YvVU/C/7Wk0vHt5fXKbtgY1HJxQhkrxwW24h3U8d0GXjS7YiSmZNpHh3VdVu1csvUAkyDqRJI/Sikg+fQ9TivPKtkx1Bhe0vNZRVUNPJslbJwDjcXwkHxB0e+sI1zSp65HuiTL6a5MnsjYo/WB34QyyjKR6+0icSeklaPArNYFuhkGVot1Gf9ahmfUwp0IdGFxJwC++SOpS4AghJiJiXPuO+4HO25pFioOR2EtLPTv4Wa/H6xunNsTDAxuzXU7i4STzZ7Hsax1tMzhfopWekWX0OlS7EQWRo919NAVsfd3BmSNwu7yUjrm24h10P9IB1pMiHP7D0zplETm6E/HxFYP1mG2Rpm9/B4ODD4xTKSsqDFHTNTsXF6cdiwGy28s5ZAWdyaTo9UFfR4RKnosynVhyNElgmQTURRi0KuAoSTIX9+MskCDXVPClC6sqIUZbAlIomqXi/o7KqnXMMtdR64hDk4sBpt67rL1ZsfJ7QUM66yGVktTBcuFpwv5CGi28ywmVmWaoA2oeDmSNY16YaEOPuSi+1EDJl5DpSNZ9nayewW1nVWHiVKNmP9VMXmtAak1CShzeXEq5c7wEJHt4f+XpLVuhZyId7MZuB2VoWidDBfefzU4cZE2UTmS8f8xHS4Hjw4EHw5StQUxzwH0k0gBENpW5knVbHsx4pammMBYe/YHXpCyIy+EFxk7Gyu3x4GUjJKQDNQNdezo4rZutkkV/IsHC5N2DqFwhxMpPjBdjQ0Sc3xopLb15JV6ShWnDZGA8gdx1B161KniCtoCvj5iDqC7elr2ahk4+Lm495xGtLyh4V4540a4BI3acPT8YJnhy37sSdnwXtlrNmsh0NnmdRSv6835yNv7D7DWM/tJXCDASHUJAKdvTk7DaVsc0iOWZ0mhGpo8+osN2c9eCxcaJQKFYtsPbrc8/6LG4IUq3YQem6GLbkIMWa2bmbrp7U+YHFCcFWvY80SrM5FqyjR5vHL9pI20r6+05gqet/r+H62L7mIaL620M7lDfS32STo6+Y5LdFKKkwVhRkMsi9d4UPba17vbrlJG+5iX4sBJxyOTjyLZC681dKDk0MZrGONRHLs3LoBtfCJSfbLyhVwoTDO0YT1kufR5Z504ciuX3cM8xgEKQWZ1TylZOGU4ur1VM0rrlo46UJZZuMeUJGO+UnmQxd7Yi2TMdfwmWYHi/GE4h10dwV/N+PuJmR3AKghBQ+T6YTlFUIBf+ct/f8mm8DlcgxxlOgNhg/Hciwq1QCZBdcEMEuxrEFA5oW4A1Hhbt/TBbvfp7st3a2FHrkUfIcZjsXCeerMy5YGy9baW3MR0uzxouwuezZ+4TB1+IMdbK7xPmo/rcVuK+JIwdCrvSO7SHbKTTEvq4+JpgBtZWlq+GMuuNmQAQke7f2a/UUspNmbHEaqmZ4VHdDRRCzjwYzTtkn6ir4ikKv4IdjBUToomwK9cZ60iIncJa2CsFQleseh7xgvJx5uRqBmfGoN+S3tuSElR8ke3YfVaBaF5WA6Znlj4p0muVHwuxnmxXgzYuG6lsoeooWRl6qaP/jFjCoxcT6AKQf2U4efzIi92Q/MybISW/mqw96Qoy4cqwyk5ExjTGtSTrTDyx9MxT7eJtxhgVzMwZosu6fVOryMExfBtO1c9WBNrdq+M4+e67uBNEZ0qmGDUNZMSD9XflNU05nztvG2OoZNakLBqjmMFsZrYUc/Cv6ghLsFtxQ0duiQSX1BJodcJlyo6e/JOIpJox0yo7PrpNOwjYKvYFlSUrSstOmhxw/O1jDg9gG/JGSaj+eAKmkjpIvMdlhQNaRFYuH1qzv2S2Q3ddzOpummVV4AUaQvDNuZUDP6XrnYr8XS3wbmR1tKNAesuy1WqWJIxl+aN+jszLnXSm2o68JQE+X1B3d85PI5qXhezBubG+WYnVmiGDLuBJlyzZA2/hUOxifOpEn2iXQRTWT5MrG5mBm6ZQ3fQanF5e271Rl60oXE1GWW+sxSS0+tmlWjMI2B5P3KTdv5Hi3CcoiQhBwLVGPYTVVtfDYRXzeZSvhyIcyPnCWUeSEFU2TvoyE/TSTTYWdA0y5zVXNLnXGS8gbyxjhnKy8rFrZbK6NzPQcrMl+LMvcxsfEWto8xk8PxyG5ojxQg68olEoUQExu/8Hze8nw0J2Segs1TgdkFAyFGW6OsPL3qZFdjKRwKYRcYdx2zNxHlZoj5m7q+L2um6GR7jtFO3H0Aos6Z5kSYEWaGFcGhtULFa4/ueG2zY/CLhQXV4VJXs1DNyDRl+WWVYIqSOYQOF4tlCGZWK28FJhpa1UKxhfsRGap94JxJRKgZVpr/ARpXIvIR4PcB76+39LtU9beJyBPgPwU+Bnw38C+r6vPv81r1wM/UA2/SqvdjC8NKETh0H0w/J5pGkQxWrHFfOm5TzyFF7vLA6Ds7COrngzOioQtqjuJJv0iFlCvd56gPklp6tVaxSmG5VHwojLc9F3fCfh/JW8fVZuLtjaWRr9et5WFUKi9mKDzYTrzYRXKsJOeiODUrPvfmwUCzqi09uqil/Tey4DIH2AXCnaO7VoYXmf7ZjLs5IONsXq0ckRNZBKW6/jWtNhyqUXZj5G3aJNM6wZBVyLIRDbWVqlDsVEwJra6ozIuVtEjC7sXAMxWcL0x3Pa9XjR7ESgW1sgbSFHprrUeLkAi5VO6Hcxz6yPVitdjmKRJmk0qQbKUi8gBlU+uYDVVJvC5UPxu3QioMnifHTXI8erJb4zd+NGOjFUUGDJFLVDK6qc37IZN34WiMq4klTgru4BiuLRmgxErM7eQY4kq68q4QyLGmm0er1B6iISZzNpV/l6iCklS1bkd63RNcYcrePMpDMPL3bOiHTM6UoLOrelFmcAPErYVU8sbGMh4K4W5B9uPKbcjZkRdPP8kawj2kyD51ONF7OmSqJkUxpsC8BAuHLhZ+DlUOo69zL4+e2QXjRmB7lSnjm0ZX6QStpX7iTok3ie56tnsTwS9KXnRFnJbR8Xzakorp62zqATYtAXcwZXf1gUWlIoG2iZahKpNXIn+JmKyJl9WZaoZHjsXGOQt+5wh3jlxFZaUYWtHtqnFaHRLXZ3zMpDmsYofAinTOOyPn+EPVbqpGt1buXNnoaiSXaHIDd1u36goNV46479hEh7+bkOs7Q65COPIvp0BOjn4U5uR4ZdjxygA3/cCnMW5MC6M6b0WXh27B1/DtVRx52I10LpGL426zXR1MV7WK0uQJPcgshugFZxURtBkOUKQqrPvEZZg55MjgjYO5b1Zri+SdpN2XaEWB04XtQctWiIOYSG3n1oL1pcgagmq8m5wdaTF1f9SeL3pbW7lrMiD1i5S1X/UQakJCPWqc7UdubwXdtdaPNP6qIVZ+gnhj+8bmWcYvjsNOoHgz2L1SFse0NfrAauTICZKkx/05V+2v3Df+X+sgauIDVp6qS6TYreT9zmc23gyJoVvIG8cyWwJR2wNXXltWcFZ/NMZMUbcKSa9VC3IFBkStKsLeHIEy1ChLXSf+YHtId5PobgPjdbRC3stR2qB7Ych6vrTkGr8o8S5xWp/29LA1niKrSDVLQmIgd5HlQcf8UHlls2cbZuYqEnq3WJWRnAxRbELUtk/ZF3gpbPxM1yUOfUDFUQ4e523utcSwgsO5ugcULHFFDfioFzyJbKn9LAufb3svyFUC/leq+t+LyBXw10TkTwG/GPgzqvqbReTXAb8O+LWf62JugZJakVJTSz2yAK1JaiUcME8sC28cTADtrcMlLw4Dn5iemLctCxlnCu2pNz0QZ1BygwZpgqFt8oDxi6SJX9pmNz72puE0FCgO2Xn8BLoL7K86LoeJfJHJd+EkpFNZpF5YLsBdWCz/ha9K0rCKQLYsM7sn1rAks+PF3iyuJXvGQ2eE070zIcODEnYZv5uPwp71udxSbBOZKrFUasZjYQ0JhjsjcJfokUr2NCu99YmFUCi6LlTSSViiciI0pVro1cqfLK5D+gyTFbQOo7JszT1fq7bL0cC6l/1ZxJT4g5KTXxdTqcWGpRaxbUrm6iA/SscQR+N1ZGr9KHvmfBDmHDlcRMsCikexTXViOjhXPW4vZhR3pm3G1YzzmXKwOmRr5lql78VRCLtMvEvkjcfPbhW9jHsl3mYke6YH9hmXMX2lvYWbls4xPJhwQ6aEiJ+rAnI1IO8mT87Cbu7YT5F8G/F3ps7vZ+sDPzpSbylYbhbLBJwKFCXuXOVzmIHnJzU1/3mBbBy1ZbYN1tfaimn27OaOTVjuGVe5OKYcLAyTPKUIvva1ZReWSi6HBv8UFcocKqlakH0wozcJZWnh/ap8vU+43WSIaPBmDHUVXRwLced44/aSu74j12ys3aFnmQL9nSPsMD0k9Ra+q+hoyoC2jFy7v3Aw7qGtFzHUVEDjMVTdNMFcNRxcMkQx3hULv6tCxpDkYAKuQ7es2ZdjzVg7LTiOM08ebH9JF6xFsUVr5YFoYqRGSRBK54g7waWBLnrCvBj/yntDMA91zS2OuINxZ8LL2zDzqD8wXQQTY546UiUdCzYeDdlJNYNn8BZ+vifGWVE1dx1Jg6fby5Gkv6IOLbPKDOfraeDNeMlcAlMK9n3uJOtaWCkH2nuWS8/8QCh9qa+3bO5q2GfQ0TP7WLc4Iee6VxShzJ5QjVbnlGkJhpK2SFoF7qkovJ+EcOONe1YRL40WTQh7Q4dKcUeF9klWY7PV7POHTPQQb4OFfiPgDLmeH0Tj5La9vIIjbW9qRoYVHzbtrtIr1MxjfxDitWffb1gu/DpHXbJ+caIccuQm2b40bGZzsLxfi9qvSKIq6apjemhjfL0MPNttmfYRrZSElcLgWMOepWs3XSM5ydaDn4xCEQ5KvDVHvInxIrZG1MH0ylG8003ZxKizVdrAqHSrodXe1/SltBOWBx3jq9FKLmGFz5M6Kxu1dIxLsHrBizCOkbeXS3qX1vqGXgpTqXqKfabUyIzOtg5LjUjJu6b2trlfjamGWuWMqqIpffbPfI72OY0rVf008On671sR+RbgQ8DPAb6hvu0/Bv4sn8u4ap186mHUCel8WTOpklJTdUGyp0yOtw4XjDmwmzumJfLmeEWUzNZb8HkskefzhlQc/bCwhH4NMRT7zwpnghlW6799zYwB0oWgQyVxNl5WskF1zngWJeq9TUmdo2w75ieFi6vRHq0teMcqzmYel3lDfqRmLZny9O5xfy+Th9mtcWt3Avm+3NxcDJmpG8VqhdewU6vbJXMyWyo41rTUem1EDLFS1nqJbk7osqDLCXJVYdS2MEuuBXfzkRdl3CNqam9VE06GpLnlWMqCmnnV7N/O3Z/EGoRSye1rPL/L9JuFZa7q63U+nXKh0mwZVdMYEafkbWG5aNPcI9nT3zg67/DXI+oM8XGhUIrVApPq9UoyNHIOWg1VKz+kXtbN189KGAvd84mwD+S+J0cLK5gX7E1IcFCmUCpvaN1Dj5mjFZK/3ZsRYbwKWeUUWp83z3MtQpwUlwvdXUa0CihuKko3J9sccra8hCZOW/ln4s0z7ipXcSyhJodYYkH0mf0c7yEC4gp9NE5YI3jXBUVJskoFuLmW6ZhBp5btKIQx4SYz+jQlpJWOKSeFVZMyz2HNFkrJM+8jjN7CwDWRQT21cLata62JBM3oVGeeuQvrLaJVbkC1Gf62LpvBaeOqhrDdpVUOQZJQZisnAiYB4J2SsvGAcnLHupV1rwljXYoeK0Ld9iBYkyBKfyTypq150fOVR5ISngXrdueId0r/3DEX1mLAfu94e3/Bk40w+IVXhh2382DXr8OSK7coZ0cpwu3crwkJuRxr8rWxtMLRgixWMqshPqfrG6qzmODF3fadqGfrb2/ZsyXaHpe23gpHPzIDQyq3yJTj3erskcwgzsFVv87CrhaWlCNfSWG371nGYI5U26/r2PrFqBrxVo6JBr72eS1Ivjw4QVlOno+6RUl1OP2h1CLC9XkC+A4OrwRkW2zbbYaDHvd8MKrAsq3lta6y8ZBqElHcg18E9ZFlcVZCKh+R5azC29Mlz6Yt4xzNqO/yPR6fnTN17jwMjI/N0X4+bdkdurU0T9jJykcqa4UHo95oKKD1vk6c4FY9IewbbYS1/8O+7ktT5XE6kFxr3+bjHFe4J3GkQdA+olnJFx3zw2AOri/cTAO9N7mLILlKXfg18pEXEzO9CBNJ/Sr2/GI2Tqk4kFDMiarRGdvPiwlxn1QhIRfLys3ZojQ5Gw2mGlbkvJ59n09zn/stxyYiHwN+NPBXgPdVw6sZYK9/3ndxbud2bud2bud2buf2A6S9Z0K7iFwC/xnwb6rqjZzK13/fn/vlwC8HiFePj56PciSUCWsZDhFlcgZPhp15peqFu7HnMs5VqE55MW8ILnMZZjqXmEvg+bRlzp5Nt3DXdFhOsgdarSatiNLqpYh5NGsK8d5bGrlrmjgWrtrv+3v3vxL2giNdRPJVpg+Zu7G3wrHpCAsfdYfEOFTPTWiteIMw9x92+NDcyGP/qRisWbyg0YNzZpVXuNJNycJSd44cOYbitFZMP1iBTFmMo4VqJfbJen2cVbo35CpZpuC8rOiCWfQKJVeCoJy4JKz3LE2qwVl/0hcDDA/u5L2ywvbqLGw3bGZe6Xek4hFfVl2i5lk18rdO/qTMzPHnXp8pyAK6D7irBXe5ML3irYzM5siDkaT423bfmGecHd3MGtpx2cbHjZad4yar9+aiw9dQl5/VuIO7GbdfGHpHiY68caRemMcq7DcIu01H2Ryrszd4viGqRYWSvHGq2rOfzAX7XEXs2jUaXWAx/qKfzUv3U+U1NK9MbLCbV+wSVi5nCYSLXEtHZeZi4cBDiizZ04VMiNk8/gibPvGgm5iTZ1/RXomWiq/OrXpIbWyakB8nEgyyZCTVOdXQWL2PjHZdYtvPXO83pMUSNtzojmtU7/fNadjfCrNyT3dIasaqUsm0VZ9nlWPJrKWmWskbPyaYZltz2GdksS1zf9GtIPFyqKR67u8jDVkQdxxnrTyQlYycjiEVW+tWNqZ0Du2jDW+p97QX8mDor5/tfm/2AyLK+7aZuTj6kFbu5mGOVre18rRKcjztt+TKZTvM8YgkiyGLxnnStTySelbJh9PnUq9oFuYxcBv7dRvoQl73gBIFyVrXA4xPAuMrwvywmISHNiSlcmKirLqDa0aaYglbrk6VGtbFKZodyy4io0e3eZU+aGvXCtLrinJqvUbTzVopIYGqkyQr6tYEXw11M3K2lUGCnAUJdQ7uPTl6AvX3dDL+2D52eFWYHynpQYZYYO/XRBV/MBmV2Bu5Ozu/9gnOEMFn05Znhy3zGEyOxBVDnipaWLxY/xXHfOmsdFDy3M0dabaizGvx8vrcru7DpWbAymDpy0ZjqQKlUcjDUdoFWPtXnSGz6upcTBW6cw350ntZjKf6UstFoESH7z15sIx5q2VpHE+AiziZxEtITCHYWnWgxQo1n6Klu9zxfLT+WWtgVjmVNZFpyvjJECuZTFfNtOSKnXO1pByl7Zn5SPn5PNt7Mq5EJGKG1R9Q1T9S//yGiHxAVT8tIh8A3ny3z6rq7wJ+F8DmfR9RDVgtJ28bCE0CQEwh2kMVSDPieBNsBAvvPeimlROyTx1zCavmxVTJ4A83I26TkNLi9hXmXPOCOULANeTkR+h2ShNFtpp0arHbzvR18iHgDq7ytip3wzs0GhzrLydT+Z5ilWKwRW6LvRbrXIyjs3l7RpJtPLsPeMRbmj1ACt74BUGRqoieLrxlTRwGmGaTEsgWwuvuCv2zKgPgWTeIuLPsjSZKqH2oIcEWD7W0dhNalaOwaeu00zlQYWcL71holL6e/mKb0OJMabtlB/ZXE0sX0Ll/R/83A0y3iYebkQdh5CYNOK81Q/SYERhGE+Lz14E5FnxXjiG1E/JkOyQQVt2ibkhMDwulyk34iSOBHzMIJWFkT6pBl1qGITgP+WD30KQD1gKg+cQAKgYxh12PeiGMjtA5XA7kTvEbYX5UeUKLmnHmbMxsA9f1gJJQKEOhzJ7UC77yBkoHGgt4S75QLyZ2CMaXm00BXcU2E6sNWex0ahh1liqear86Mf5N7xLXZTA9HRUu4kzXJ94+XBJDZhos8eLBZuRhf+CQIre98cVcNFLxWstOoASrf3fUtbFx8WOuRl+zPOo41LFu6dpDTFzGmbKx19PBMp1yzzGLtK5h16QlqhSBX6pQ7iCkwQx4N1Hnqa7O1VpwPBi/xU+6GoDhUHBjQktpPsE9HlHLPMvZ4bpsYpcHb5p1a5LMcSmVKt+QYzDD6fS561xuB640Xa1Vj6c+08LJvdv68b7Q+8yYA68OO17Mxt1sem+HObKMwUKqB8ftdMX+Qc+DywPjFInNGGlGgYO0LWuoP/daM32PYVSlGqoetMl71MmbstqB3R8rbJTObnq+smQh7Us9+EwvMEwKxc6E3AFR8bHQxWR8b28h+1KEJN5qWhYLF67yE636xuq4VadsNrFSDbaPFgU6O2OsgHeGzupimiaTq/pmMC/NmA2mEVgJ/5Iqn3aGcOdYQjzOxabXVo3/3BuvLm+qI5QcvjreK2cKrfwn45fZPDMnZVoCo4+k7CiLZ54t67GFcEtoZ6RDREi9hSxzduynDp2brt0JHxDrj9KrrYdtJvYJLY6M5XylrWO+sEShEo7rzJIE7FncYkk8LcnB7sfhU1nX85pE0I4VgfmBQ4qjqyFjKRAPZgQXFbZx5pXesuev63yWUNDgcM4yCFPVtQQYU2S3dMdMxjoOrQ5s45/KnCyhaUmms1WNqxYKZA0FFla9qy+gvZdsQQH+78C3qOpvOXnpPwd+EfCb6///+Of8NsF0YqIVzM1DTfOlrIrOzhXoTMSt8ZpcNjn8921uSeqYayXyferoXGYb5rVMQLN2vS9GFq1CcBQjtDVUh5P4/HHimHJ098IxxQBDMU+0K7hYyHNYSYCnBL0ilkaex8BNGFhG0yrxcz34T7WFanFamW3zlt4yVoZhYdvPJsC4eJJAvjANqvmBpWqrFyRv8YeI897QKDWhyM2zSj711IrqtdTNIa/va23tg3drhftG1ssvh7owLzOuy4QuM6uwbKNlQ1bPV4Py6PLAjRuYl+HIEar6TlIXrO8zl91UdZacab3UA7odYlqRGj9CPngzMMrx4D7m1HM0LhXjhClr4e6GToSxEcHtef2syOTqvDRPsolwgulH+dGyQmnlHRJId/zelsZrh7AdFpJPjLCaiaShxv+nqiXTC/OlkHsra9N4h6NYJslyFUjJPpe2GbdNOK+kxZEGb1mSDfVph/QCbjFi6T1wRzERz2QbTvxMx9vLQ7qQeNiPPOhGLuNEKo4xR26XgSfDHifKJzePkCGzjUZ+jz6vauQxWkZaDo4lFZY5kHpP3gi4hrTY/bmlbmorJGH3bhUPbFN2CcbkmYsnZW/OUbDCuiW6ug4MBbZ0+3rtTUNfTdTXLY4yWIaplTOpaJW0OWE8nTwIpRdkT82+qvfZynP4pjXX5ER0RdC7LrHfWWq/dravtX3BLU3x3IwRgmUwlmrMNT6hcQqtoHg4KN2uEPZ5lQwBqiF+nN9pMFJ49MZLGXziu2+eEFzh+jBwtxtIt9FQi8l0u8Je0BeOdO15/iRCge2dFXQPByXuLIVeO0WlkJNppq2Z1m2q27lfZS10dYxz1aJqvKYyCTrbnkQ9SLXqi7mDM5mY60z3IhF2iXTpjbcTCz5kgs+WQCFK0dI42+TOBFVLEqS3v8c+WUZhpJZSEnOIkhJ31UFXgQ1ohOLMcJTLRIiZZQzo4qh+s/GiKsJVOkfYm26Wy5bV2pC5cCvkzvh2tr/pkVeMyQktTzKySchtROa6F9T/Ny5iO+tWuZp6VkxLQDYjfUyG6mdfRV11NUyk1OLWzfA/dWQbityc/OaYUA3nzorbO2djTmdCscsDz3InuOSOiGLlePrZENaGZp+KKOfB03Sj3Iq0H9FpOMrVuMWQ6O4um5M2h5UDOhXPLnX0PvFgGNltOw5ZEAdzDqveZVFZye+nVQPWPszG4aQJZ59KK5xmB4oDzWZ9N87Vl0Ch/ScBvxD4WyLyN+rffj1mVP1hEfllwCeAf+m9fGG5zPhtIg9W+RuoB6GrdYKq597Zom4p7q9sdzzpdjybL+hc5m6xmMQBk/XvQ7LyBiHjpRC7xNKQDNc8RKkHcUuJtk3DqtIX+hcLfvakC/CPZvIh4GfAKc5ncvCEvayicKIYAdBVJGS2jCzNR1O9hS+OkD8r8RYvq9TAo+2BB/1ILo6iVs199EqeHfPD3gyXwaGuI+wCXeeNmIct6HiT1vvIg4UR467gD4mmfmx93TbLl5CpOplWrZSXDmagHjJGnI9XE4+uTGvrRpTpUWceVDyupVxTqMOBmknFqrHEZAbDMpkw5Sd2T0jqVimyhmAcvXq1DUmPitdGfq73X5ER2rCGWjk9WTjJTVRpCrWSG1M21CqXWsYIiKYM7aaadp7t+xuKxYmq71oxvn1f1Q0rnV/vQ7SKWWKhnhYmMw2eQtpaWvr8SMjbzEVMXA6WYuZdYYdB6K5mKOll5uJysgN0ceTejCtDaqBle5mUdh0E7zi6o25F2/ysqx7PYY7rpubElKwfd1bm5nt2jygqaLTrHZbIcyy0VALIgqWQeyO6p+BJMbETZZGO/CzW8kvgKvpB8PbjTUSwGaJ+KchSiLeO6+st0xIIIVu3x2L3EWysfAKmaozcmMG0XNq6Mq22jJs8Kh66AoOFsWxgbI+RalylbKn44c4MJ9MYcjamQ29ohlZ70CkE5XJj4zQtgYvLkZQ8h3nDWo5orvMmQ1lLZZ2S91tYyA7lcIDheSHu1YyNuypVUcVuU19r7l0WKwFzGcjbwoNhoveJtw4XCPD23QW7p1to4dlZ6J9ZZmu8swNvuRQOamru/fPC8CLjD0b49aM3oVWvcBMsqaPtn5jR0cKdKuBjYdsbSrbU6gDaKWU+0RhqNILWihDvHN019M+rLMdhwT8wg69psjmhGm/2hSk7vFeyq/vI4oiXMyX6KqYa0GhrDa2oSoa4lBWptvlmhgFecaHgfDFCvCpaYVAFlugtS1S8GS+z4hdzWg05FOLeky5aOPJ4uFMd+VU/6i7id26lhjSkJ9fs3kY/0Xpg2N4hVgoLGEIidLlWjLD3ltAMp5pRnWv4eBSW1t81877JSzTDsCXI5BrezbXslbhC6JR0UUhbXzNza9fVKI9LukrYLJdCHmx+m7q+XzWs1tB4kwxaM02Pe5WblXi7WKIVgT4ktmGmnOjsLdmvdQbLYsltXqwOsa90oqKGoLdrawiclppaZYic7XkaPLLYXiktY3U1SMsJZem+Y/H9ae8lW/CbWY+Ld7R/5vv7hW6wMi8lmuUPQC0pkKpR4ryS+2K1jWohySe9edCP4oFn05abw0BwZdW92HZLvV85Frvsqy5H5aboZ3kK4z2Z1kqJBnOmQ8BdB+Ktmu5Wn/BDosRYQ3wn2iIlE8ZCuPYksLh6+651gpnHqUsLEVQBtk0gXSiPhkOdVMJcPHM0q2F2gTx0ayaRWyy8JSXip6OB5adjfNhPjuLFijMfXtLpKOt/1tZCgy28ZaEkNdRBBJUTY7EWt77YWD25y87QtkO8MkHNakDIbJlv865jezgaKw2ZaAswv4i8uNzSe0Owui5x8Ec7eOWDdELuakHgchJ2qUaYrxpF/gChFyYHPhTjitR0feOgKeGQLWMtZUOhih0IfkiUg78HY69Q+Ekyo5RSx/+I8pkYqyNtbHMp0VC83MkawnE1Y88tJkEiWjfYHvCmKj8E0x/qQmaKiXoG2gFXtYrqoFmoMLR6dZbVFCal3aoGj8SIegvDaAufViRp+xnBLYHn8oDr7ZaLq5Hoc4vU04XEK5s9t3M9DEbP9WE4oc/Zv7wvazHb7Ao5mB5NWjzFRyTUAzar9VP0kCwjEncaWrX5FcYMzzsO40kZqwqX2Djaxtx4hf2NIcxj1f+RbEaznwrhICy9Qy4WwFVHSen6Zd2w9wJp6Si9neg51rpkfcD15sRJAqnrQLHyTa0fDvuetA+4vbfwddUm63a6lroZXxhSH+r8K6lWqJikqokr2zcW/JgJL0ZknNC9OS/SdWuZJN2aeGnujYdyM/ZMywXBF24+c2V0BDFh3Xhz5GeFfRWWnVp40ebl8CzRP59wBzvg/DzAwaN9MZSjyLEQdN3P1m3UwdXlgScb25sbT+9FNC240gm5OgYuH/dCmR3x1spRhX3C7WeYZsK4WcNqpQhFwTtlqOK0s3impVRZBWeFx4uzaAf2GdfmeJvnxeQ9DFF2uOSqRp05uktytl60InGOatypTb1cDe06T03ewb5PneAP7qiofrqn6nFOU9G18KZf0Sqfa5/UEN9ypSY4PJhafI4WpsyjZchHb9xH5ywUnULNyswtRCj3HVKxTFZ/cGutzbgz2sDKhXQCxZF6h/YZwcrTiShLVyjeH4uD56oXWMu+oRbemx4ry6NsJXsOVo9wldsROLUcThE5l1tNzUy4GdE+UmLPJizcpZ7dYjWEX9/e4nozpA47C/09vbmw8c5WtaHr0lo03LWC2r1lpObOogMaqy6Sw9BotTCyWPFfJOeamWtI9XrbTU/p82hf8vI3zitdTEyDrmUA8FaMduXIi65ok6jpg1zFkS/rn/HNz76ct3aXjIduLTAbu8QBVqJmqHWupKZGG0yoNFHJtRZTJWC3MIkfs6VkLyaF0L2wFGiZHCUb9OjaQdNQor5K+Huz4P2D2UKSoydt3aqV1Hhk6i0unjYeF6y2YNoq2zATpDCrCUk2PaGVS8DR4yi+fr832QF1chTHpHrGYBpiVVhu5Vo57qFYa3PVwGouYw2HSNeZ3lVtTUJinCMPNyNXcWS/RPb1vizMamTC+RBhfHelXoodWH40YcqlFpvedAv7ajA0rl1DChFwF2YsuiQ0NWKXdUWI4l7qASsmLphdlZxo99Z+1BaVVi82mhRIyUfE6kiSrn9rHo2ehvxa/zWiva7yDi+XXWjeqRTz/Jph+G6uS6p12vqKcJjOknESFNDkjgZeaeRo8wSNXFsRrBCQlI6hirrpSzb0tvRKuFrYbic+8ugFD+LIo+5AVuHTh4e8ubskF2fhkSyk5InOjKl1vtWfJfvV2elDZh8KLcGi9YHNW4+EmpxRTEpCW8HarPhDIt72pOztwBGQZOPYNKwaUmmcxiPvrokrgoVG47Udjrk3VDTEjDspni1SSwp1uq6tlRPoBI2h8vKE9rEsRwduTp40BqQaIIYQ1JqfycKdUhQ/evxB6O5Mdy5vA36MVRXdUv2769kSJnYHWKqMhvOGwkShxILv89HATsbJURVubzbWv165+HjVMqu1TcNBK9ez1L5xNcQF3c2Cv62aY5tuDTNprS9XOqWcJC+tRnDt/8fbA69vbknF0znjfhGL3Tcn/Si6FtC11H6rLmBp8UYulqVU48q+rwuZ3meGsFR1frcaxGZsW71XF62cT5o9m7lJcVDlOGQ1rmx9QnnmyNEEd+f3e/N9W4jzxIFRNWHmI+pT5UJabUjRKoZ9vHbzCNVXPmIR/NMK56+OYH3E5nR7KJuCDpm4nVlSTwluHeO7Wr7H+8LlMBkPK/bHzaI57mJjLGpGxzIH/L6WAzoYh9mMa4sCqBc0CqnScbzX1UhpetSGVplDHXfmnLrZENADVW3+cqG4QL4zZ9J35kBqW0e1yLSF4sXqaS5KuF2Mx7qf0OiNl1Zb7xOP+z2peBMzXmpEqBjKVrJpnuHUEl6w/TF2SgiZZSjk3lXRVpMCcRWNktxgaD2egW2Oi1v3cgDxHj5PHdEvbeHm6pEEn9kPBiNKBokWxos+r8J3dMU8ejXP50EY+d7pEZ+8fcTdoSdNRgIHKDkye3sU7VlLBWjiKBraskyAlqkG1My0at06C73NH54ZLmfcJ67wi9I/c4wXgYsnB6ZLpbu2mG7uBLwgSyu+ajN8O8y86DszvLTyzBy26BZYHgjp2g6sZePIlwZr7nPHIUUOi1UvnxfbMELLfkk1hDnVOoFLoVWcb4aTVqRvVUV2hmCspQZc46zoOqFUGmQKjTuk3kEttbB2m9R6fpNSinB9GHi235CSJ1TIOfcnhmQ9MFtV+WbbtQ2l8QO0wGubHV928Yy7ueftaATQFsJKg82BdGHK+csYagjSwg0qdqCJstYUk8kQFe8Lk5x4dA1uqMZmiY5lK3SXpvCbK+nzNItv5aA5Z2E25+6hoC3sq97VigOwXHnmCyFtLbTdSJa6Ht7NmLDNWZLpuYzJYPDdoSPvQ+VP1XuZHeOhFgYfjxmY7fBa71WNqyct/AaV9yUrMbmFzyQJ+XnPzV3k795sEGeOzsXWCnNf9ROfevGgcvHsnh/0I6HV+nC1REdcbN4WK9w81eLj6wRqG20QtPdo9kis8yupGSc11EJRws7msbsJtKxKKdA/V7o7W0+pr/M31EQOOT5bCze2mot5dpaYUkNOjTPV+CutBmAr7LrW3Ix+NeJp/E117Pa9ZeAt3nR1riP9U0e8q9lTy0noWC18Ge+E7tYEVEtniQ3D20tdy9kQnKWG8VVXA2XdPqOii6MA3QLhzrPfDMje4M1wcPRPrd+WS7uPkoR4p3R3GT8W698dqDO+i7+bkINZByV6iofuqaf0Zjggho6e7uHt0FUPD7sDD4LVeQsuE1JnDqHYPqBeyLGGwqByHIV4B3GXq/Nne84R5deVqB8r7yqpux8SqwXN5eApSZhquSVfNQTjoVVNsDWhNRzoZ9Mw8wGWtifUOb8KaUr1u12rSblitZUWcVz8fq7RkZOTtNES2pzNFxk3Gz3CEGczdPzUiseDbjL91cSmX7ieAnljiugUYRojiwvELjGEhBc1dDCYLQBGAwFqfVO7/7R4+lbIOjcnrKKp0jilAqGYonstDu1cOTqWpSJsSVfDTKoz6xZbdD4USlcqn5BaX/PEsHKgaK0GwspvdnMy3vE4w9WWMhQ6l3g2XhB95nYe+NjVU6IUbueem25LBsQVqNpXLeq1ctz6hGth6FOn1cEKyZ+2xpUFoykAQmCtKfgF8K6+tMYVNmGHmHje14ywDK4zQmysrqECiFpmSTAJhbfnS6Yc+PDVCz7JI8bbHm1ZCmoTBBUmFW6mgf2uxwXMCyknhyV1wFckiZUDkbYm+Nh9qiP7juH2iBaR7GArvbI8MAVbKULaBJy3EKZ86MA/9eHvxYnyLckzPe5W46p5Nblm0owP3cqlQXRVOJ5y4LAEUvYsi6dMJubnpgrLHpSwL4T9sqJV6uL6b3FHNVojYLMaUaInKFibZ854X7hK/g7OEB3nakhQ7k/KutfM10bi/ejrz3j77uLeHLxXA1BlNShWrsO68GwxdkPiMk5EsVp3rUDuKocwG99EHy988JVrPv69r5hx29fDuKq4k5vatYVbRJRttzAGU8O20Cp0tx5JERkT6TIyPYahXwyhWY4yAisRtKJD1olyNFz1fl+6VNDkjoryvTA9BjDIfrmwBb9sHWsNy4rGkAyVWrK38LgeNwz73tr3rhjty8UVaSkeQ0HLMQtyvVfvjHa4VGP0xCALe8sond6n+IvEk0d3DMGcnN3c8cbNFZfDxNAtzJhD0sXEa/0dh1z5Mcp66CmGuDWu3b1Q0gnKZ6Wi/GrgtuKpzaiRYgd7iULeVsO0FcleWNHBNeU/HPk9bTxaP/hRqyimQ6PS9wvBFeYUSMnI8nnxRyesdV1FtfG2Llr2k5/se+ZDRHzBdRn/PcMRNaMhHKwIqDZS/whuKpByRb4NXfFjsvTwebEspjbXalgeVcJOCTeOpffrffgR8k00Xo+D7rmtk7CD7VvK8KKsqLEkQ+ZLdOTOEHW/mCyGrdlWZ7SiK5Vb2oyY4zjCWhwapaijINwsAy+qiHNDb6SczAE4Fueu60nFnFP1NU7W1lEwBfzLbqKocEiROXumJdyfV2JOVNujpBZzbyRmSWrOb+1Pl60smp8qiqSCDJl+WNYoSFqsJira5GmO86Lxm9raQrXydY24LqfnTHPMCoQbT96a8RH2FkBpmYwlmhNJsoSuoVu48Sci1Qpl9DWBoFTJlGIGTWzzqwIEg2e+dCwP6tk2+WMmaK48v1SN/rbFBEUqVyknR5oNtGjlgFyy6eiy1RmUdAx1x73W2ruGKL0recixhpVLliOyXLC5Vwo6dCyPB9ylJcu8vr3lYRx50u2YSuC7717h6W5rlJ8ur4agTSogu7WOZ668P+OXsiYmuSmbIbfk4zprMjU1inG8Z4F2/fhuD/Xemvvcbzm3czu3czu3czu3czu399q+tMiVgHOFbVxq2rtDvAkGbmKiD1at/bnfUsSvWjx5gOt54MsunvOdd68C4GI2sqUILpj3VXaR4ouhVVol+4O25I31Hu6x/6VmWogRTZcrz/yBBfGF/OZA8YZSzbMwbGZ2vaU3lyBruE9a2ACYS+AyTjinLFVZTruy6rHgjENWOtY4OSqm06GGXpRi3n+pxSpPOUsrx6YWv1TnaNl9a/Zis5nvVftuHC1DM6gGvjqL1RXvjh4emGcRo6FX7cuDFTVWAenN4/uOT76Gzp7HB626LrKGQF3MlEWMPyDg1FAAKrJVopIvCq9cHPjY9imLem7GYUVXBBsrCwlaAeSpwcGu9qG3/suxUsUquh/2Qs6OISTSg4ybPWU0HZm0cbjF4w+B5cKTrpSH/czd2K/e+cpjOtGzap42NUN0zfxq4Z+shNsJSmHoHCUExledeW8NuRyU5cJZqKU6ReGg+IOwLJ4hJrzTSq7VI6HdA6EJKxrsbcVIKx+npj2v6E0db/EexbhtxVORORvR6bFweM04E85n09LxwiYsfMWDtyjq+Jtvf5DD1Jn32RUebkZ6n3g6Xdi11Grrpd5xGWspqiUwTtFQvBYSrPVBTKepIhbhGPY6RQapoRK3QHh6FK90CfobK56dNg4GWbV+GqGXxq2riGzjy6lTXHfkiXUhsYgn52O4qaEOa5KC1PBqNvTbJZNhcVEQXwwZuPMsjzMobD4ZrDaea6Eh1jJN69pVe8YSra9Nb6d6zc7VTFlbIFLXNt6ZDtDkyHfeQjUTuEnAF/zOyOvLlYUer77HOEa5E8KkDG9Oa1krLjriribm1GwpDR68q/zNis4JR6SxRXe1hsvWkJFQEO6S7V2DX7grVWjZV1QkV/J4bxplQJWpMVrDuuFUXqgUy8zuQjYtwyVycxhYFm9cmyKrVIqcIrsNJqiI85ptcAq6t7nojRuUNkI3LPQxMc7RvmMXaUW+GwJnyVfHMTxOWis6Hvb2Zpcq7zQf55MlLyl+dBU9rpp947HWanQWyvfeuLaniK/92NlRskkQ9T6ZkGhs43B8TtMghJKs5I3LcBTS1iNy7S3hoIV8D3c9egi40VG6YtpYlb9GrHtGRe7aWvWT4g+OeQxWVms+ontt/7x37tZHMTkRK4HTxlwrP/VhHLlNPU+6HYNbeHO6ovOJPmSmyjt7dHHg1hfuFmfltpbaZ7nWEVVwo3EGw2jl4WTJts5eFgZd6TPOIjveUwsT19c/f/zpS25cxZjpfTrGzZU126j3Zlx1XSInO/QaDPuwG4mSuZmsdlaoG+Uq8Fyc1aITuJ26VYzSJQF3oharxi3KjWysJi0wXwm565geCa47UA6Vw+XrArh27OIF3ZORw7wh7rxJYlQRx8MTx4/56Cf4mqtP8V37V42Y/bB+aSwoDkGqiCPkwUJ9Uowwn2uh1Xc0rSGPGstuB8k9MVDahqfrQWJE7MrHau+vXCoNdfFXIn4T1VwVv0VwqqtC9Mo58q6S6UF3gbtly/BwYomlhiYtlLcu4EaCqSEbbTIMav2aO9BL01h6a76kqGM3dlarahFL82+G1KB88NVrPnz1gs984glgG4kIuNBqQ8oKWfsDHKYIV4BXSmc8lBJh2le+yT7Ypt8p+6njcOgIdcNrEHoT+lvVvrWG9OQY6pKmo1JM6I4QSBsTTRw/vOC2qYb0bJzmB5u1jp2cbI7eFzZxqZpBNUQxYOHUYCrKF1UC4PkhmuyFGMeiqbfryZyoAkQ1rGTj3YjWUpThqYXXxmUgD8rzq57nonyvwN/bvI/HD/b8sFc+w3dcv8obcomEwiYsvDle8pndlRmdGXa3g8H5V3dEV8iV62bCf2rh+5ODriWDEGwjk1TskG+3XuzAKp2wXFIJ0G1cbE9wWY8h23oIuaWGEKseGb4+78KaNFOKQ3zGO7UQbLINumVu3ssorkahYITeVmXBzSBPO9s+elu//VNHd1vDkLNJbfjxSD3wi1KmGlaqa3AtWOxPQsDy0tqufMruOjO85UhbIwa7pPhF6J6aE5q2yvbTwvCscHjFnJmwt2wpP0a657o6msULYZ+P33HyfaJWkBdOHAt9qV9q36CwWzoGv3A9bbiZ+9WYVG/JP82AkCZUDMjS5vzJvhSPqfOuZuvdzR13Y8/+rl8L91KEfieEnbBcVZJ0UJtr3sKaOVZDJ7p1DNe9LlYJgQshbe1RUnFrcWN/ZyEmoSYO+SqjcWpUZl33UFezQqUYWTx3smY8mrOr6Kuzicw+3ayaW67OiVR5x/G55+A2hJDtWdX2PlFBK3ggYqrk+6WzeRyq7po3/qFcOHYfEvL7R3TyhN1RrLQJ9Nq+2TKZ7bV8Z/pb4WD9mgfbcy2ztcY65fj5xuu1uqrg7nzVELTqCC5X3brUzqM6t+p6Pa5zM3R007FceLxfKAgf3T7jSdjxLF3QucR37V/h+c3WwIY+0fnMEBPTYNpmltXoLPtYhZIcYZJ1j11DoZ9Nv/FkHQj1jGphwXfjab3H9iU1rhTLABl8lU1o6IlaEdFcHNSYMqKUviCzs0GXwk0a+Pr3fTtvzVf8d3zEMqqKY1kCEhJ6ZchYypbB4y7VDug1N11q1ldVlW0ISxLiXtdNvPu2jf3/xmLqJQilB5wyX/eE0bFcQBhrAc+azfZ0vOC7/KtMJZCrXIA2IKmWchA5Pnfpq/eJaXVsghVUnatQ3BgiORYTu0s2yGlj3kjYBdwJMd32NKkaWm2Ds81I9KXNunqkUg+85qEes+lOUATfiD3l/nW9cvHkwLafeX59QemhVIX4FmePXSIVoTSEQnS9RlN49p099/N5yyduHnO47Y9Cl5Vnow5kEZ7dbelDM8wVvL2neZanSu1+gry3MiDxciaNjrDzK6KQeqH0nhzN+C4rGxo4+SecHOA0g/TEm9GjJ4cqbjRC8sV3Z+Ltlulxz/TEUzZKfG1P1yWmRxvLQJqPhlxz8TbBjKsxBstiG3TNaGz6XkNM+C6TBmoZGyq6dpQ10OBWvTXBNsoSW9FmOC36urx/4erJjq957TM86g5GKp23fOL2Cd95/apJLFxmusH4WE/HC653m9Ur1YPn0EUuHpuq+93Sc7MfbKMLIJ2hjMZLquhpI4sXbAOr6K/xBe2g4hriXbXPKzetu064uYBE3KYVe7b+W4Uc03ETtddNbBJRxkNHzo4PPr5mGxf2XeRZ9uTZ2T1qc2IMXXJe1tIvjdesDspVNt7m7Nh8ygjg4ytC74T+5mQTr0rbYdTKvdJ7hgwV8QTQPtSDu66XpiQNhDHjUqh7jVvV5BFluYJ4Zxlah9cdw9vK5lmie2ZIYt4EM/qXvEq3pMuIH028Udp8VkMnGsIUd+u0rHP/uA5WpESFMUe8KzzoJnZLzeKOpaqSV5Xzary6ySIBfjIV/eNAser/qVoW5N1uYDlEZOdtbCtSGm+E/lpJF1ZWpulV5Yb0dpDUjAVoxjzr3pXrfCwRE7xdAtxEws7R3RxlXla5oDbuzUE4OXAlW9kpFWcF1oW1UD3YXsML40emrX1+eNucACvwbtpZYe9YHsmKXDWnniSIN/mQ0yxXAI1QBQXseZqO4OwgubXU17s5DC0r1s1CuHVrqbm401oqrBpXu7wifXZuOZqa/sqfS7Lqtmnj8FKdn3yiS1jnTu6q+v9lhzsk3JzxUyFNgS/bPAPgzfmK4Ap/9dMfteSeuwhJWIqVyEkne5uLBfWFkmtWZqn3k4/PryfZlwJorpUXXIVjczFCu5qTjOg/eIX2L2pzsO1NTV3S0fhIybEUhy+egBBqaE9uzIJPG8VJ4UdffoIf2n+avzF+Gd8+vMbd3KFzhGgZFMCarnoXB9wUV+K0iQ/aRntMf7PXlkugOIYXheVKmH/IgXEfkG+LlsJ8Z4ssPYTXPvSCt3jE8Ha7trBcOA7vE37yo08D8Nbdqxba29picDFTWhxusfTm3Fcx0jpBL+K86lzdzT25OPO0Q1nvP3cWUpQihIuAHwW3FPOiqp6XNkNLoSmQr60ZAuucqXB0sVCkaFO2Lmv9N/J9KLURh8PVQs6Otz7+GNHmJdjCbCnGMWZy8lWX5oR0XCq6Vg3OQ4o4KaZnM3ncLPiDKSyv2YJ94X0Pb4kuQz4x4nKDwmXNgjFUCUg1vFqMzLxcGgFzOdjcS1u/wv3TFMizI+gRQbG+1ON9uwo9tRBrSxzQY8amRiN35sveVI4fKPlxIm4XHlyMDCHx6Qsjl+a2Kc2Qt4Vtv/CwP6zOx27q2HWV8Fqz3KK3fopdWg969e2nwgmFe+hmywxb1cCTqaFPj4XD+yx8Ox46vvP6FR72I69t7vjaB5/g6x99G3/55sv5zttXoCt0XSJI4cV+w3SIlhNSwN95Ftdz/crAw24kuMLQLVb6yEPx5qC4NSxdJUSqcbWqmpemaG4bdX5YSd9VB6rp67QU73UWn4z5URpDaOrwJhugFlaazXGZag2zXExi4pTQ3gjxdpjeN7jXLNfR4XemIXT46AJFuPiugB/bfKljUZ2J06SINneKr0Zw723Tn1ttEq2hCVkzfd2U2TwrjK94c/SqU6QONm9Ymvz80OqWXn6qino+7PBTpnt2QKNHg7O+VpODKJ0/hkBbWE5h85bJ5OT+mIX5Dviq7iFL9jiUt/cXjEswUcdQ0OxWlM/NupL72yFsdetMh8kVO9jawZyz4zB1VrZndjUMaKiKZOivlf5amR450saRN1bbMkclDVThUyXv5Egyr+v5dCwlQbmzxIz+uek0dTesiEszrtS1OcS7oJuyIqqmvSark6piiHv3/j3T2xs7RwL8/9n771jbljy/D/tUWGHHk+65+b7Y3a+nw3QPp4dRJIeiRFAiLSqZggUZkgMIw4IgwDAkSrDgvwwQsAxb8D8WLcgQbEOmKJGiYZIjUhMoaSjOTHdP53453HfDyefsvFJV+Y9f1dr7vh6RPTNWG7LfAi5uOmefvdeq+tUvfMPmtvwc5aPQaVzvqhEpoX6M3AnLsus0QYnGlVai+dR3WGJ3TPxLYXPfMT5as3o2IYHd+/MuFc/x7+lsM7WKvweypTx79hXZxkeojBXpilzY0JJs6RdGvWkylOLRC+D2nfMWJffU7+gGumFGMzHYsua6lXZiFww/uLxLZh2TUcXVJkM1llAZ5msZE3dVJoz0nrUhcArxFoSeyJY6xJ9oGoQIp/nd+Af+/a6fbOcq5jWNs6KCLecDbaJRxlZcbjuKsmU5dZJZavip0XN+z+BD/pObn+PpZp91m9F0W10dHyTrz23HMGsZTSpWeSE0WSWdFIjd94AoTmvx9usG0pzZ3NE0R47QaczS9EG8nQAa8nPL6lZOsV/RDTKyZWzHBllkBs/AtNwpF5xl414BW5mAxuGDES0pJ0lSUJA5CHngqFgxsVWvTDtvSprIZurKPB4aghMggB0JnsUqaX/retux2l1Iqs/KdXwGKuJR4jOJnnuJ3dePvIDEogjRgkNFZftgwV0VdEPHg9cumFcF3YcHPd1aTJsdB6MNNwrWeR4VlWOgSpiU+CxqZ/nw4pC2FmFJu1bkCwnKIAeV2UiidDhciz0S5kXdqrhx0/uXgKhoOsN0suamHaOvpQKWDS42DK6QteGdEYbJJ4KER/UUY+I9CSoFXTmUvRGvOOU9vrCgxT8u2e0AlIOGzx2ck2nH0+KOPCMb8B6MkvGGNU6S67bAB4XVvk82ghc5gbq17BUVmXViLu62gU0ekhwssqGCGJWm52aALiUJMkICTb0e0I49Z7Xl3Ex4Vx3zreED7u3N+WfufZOBafjgyTEAlbOsq5yQfLxiV1HVgl0qrSSGTWdxjSF5kfvo7eZyqZD7gBZNZ8X+wm3PcCUik81ETK9fwNCoaCpbbJOvXa/IGBAkeY/wA5N5XKdRC0vXaDiE/XLDtFAs1yXNeosn9JmwS63dnhDeBHTS4vJgNlq6VYcdZmEoLrR0uuu0ThBcZJD9GWLHIGFXpDMmX6PXDb3tVC/7oeTZOR/p+l5wQsOo7dYKVrS8lHuwvqPYf9dTzByLhzk+J+KPLONck88aEguxG2foRgQck5WQdJulq1MdJsFMwQuFhA8MW5X7FE8L2+FR3B4tqZ1lVpWiO6SCjLQJdE3cZ5lgxUwjwp62EpeEFyy3FPjGUDcGfZ2JyOpGRSFUWe/5IpDPHdlC0UzBOSXK7Znb6WKrqGAemcQxLnmzTcR1B3YmRsnFtYgM54stq85l8r7boewXb+S5qUxv11rEFSV8UYo9wcrkJWSB6aiizjuqq33ymbyXboh0NDdSNGRL6bS5yLBMEgiqQ5iEWiQSRpkU4MY6Or0tWr0BCkUoPIO8ZZkYh7tJzicu3ftVRv21KpCtHF0punB2k3C9sne7+D6A3rarHQXcxEkjBPp7lH4F63t3h9DGEWYZR9P7GaY0IkXSSfHzqLyi9hn/+cnnUSowX5XUVwOyK3lO3ShQT3J8qwkbE8eiKZYEbO7wNuDzLO5j1fuwKpCuVLTfUqlxkOy40pgyWeQEz393MFeIufJVNcSspT0cNDReS6KkNUbHrpUKYgzcaPRC8yi74p3mLt+8esS6zbhZDaQ71EXbnJi1ZsajCbStxQ8doRbjyi3eI76R5DEW/cKyZdxsG4OpDdlcTjVbB9rx1h+qXRaElaWMmyiNDV0Z+GB1xKwZ4Lxmthqgq9gx0F6CVxBwuCsMxdVO18F6buVLxkb0YjyKXDtc0NzYARfXJaE2eB8kuULRbuIcPNLSE6A3AdQDagvyjFewkW5toy6KTr/HwL+TwSeMxPabZcEl3Jfab3jp9jXzqmC9KhmELegTLW36/XJD4wzrdCjuXCoIQLjbCGg1yxzNTUE+V2QrKG6kEnS5wg0Ar3h+uceqEZNUBVu6e3qLir7dnTp3XRR/HU4r1irQbizeGnKrBCActZKkExTwWTK3ph/pJs/GPulU2+ChvIq4PqAFc7VEtR3WB/Ii43B4h8U8Z3M7472i4eXpdcQhxXviFL4IUIravY1WDiDFRrINIoBfWOZW6O7VJqfcNZjutvhEFYgq5Q7lHOx0NPtWvkvqxVC/VvHwzjV/7O7b7JkNWnneXt/l/cURv3D+JQ6KtRA8nGbRFDTrTMCyUfdJgKyaZ1dTOq85HKzZH25oWwHW6e6/IcJ7+gJHDjwNnZhyu1xR7ymKmx1dtyagKxcxhPR+cWn8sV0IEjgFgyMaRN4pwtpia4XTisvFiLqzgm+LVO7eM3TntXoXhrRXY/fK3anl65aW8kzjBrC+B+WFjAA/ud777meQ5yJGwgo3lBCsqw613GwD/gsFkjw73cnryz6TIm1zW0Zh5Zns5fkjsewqrz3FVScH48DQjTKyeYNynuKyoroVsavXcqCoGCyCETkDv+MM8MJILMXReC8GVorJ717eo4kWKjbvxAPPx/URRTNdIRmyKxTtxBCsQldOiq04UQgaQiUTC9OpbberifY9LeQLh122ZGtLvlC4gcbtaXH2GMp4eDcufPLSDqgFDpKSy/xGRE3FXzEKv1opaFQ0DBfMFts95lOSu7NmUnfTBMHSZZ7zp/uoWmNyqI4Dw2eqL7pcKTG7mUJ72PFovGK5KPsOv24VqhGig3MaTaC0LVnm6FKXrC8sgVYxWw62GKv4q2/ueHnzuhMpljRSz9Y+dogdug20Q42u/bbgSYllIubkim6k6CYeMhHs9Lna4muN6qV2+saGlbOyG0qnNXX8kujycFRz1kz51vVDWq9ZVAXt0xEqk/1kKkkkm8pAq9EbOV+lOIMwdOSFtMTWgxxXmZ3uY1q4YfsruZEk02aIps1uZ/+9OIr97Vw/8eRquSnYNBnZUsTtABa1oS4ztJY5aJEJUC1/nqFrhRsEPmpu8e7mNg9HNzxZ7XOy3hOAW2QHuCyQDRuc14yymqa2mLntR1Kpaus7E2lRBqkGvYVspaiOAu6oRS0Nw2cGLiBbKFQwNEfwxc9/zNPZHvVzAVW3YyVg+LsVv+/gQxau5PHmgNoZTiYDVKcYl20v0ObWcst9LqwRMTUNFLrDKM9ayj0K3VGYjlHecJF7cT13O+woI9l/qvpDpvvPFLQEJZMEQztitW9kE2RbQG1vGhoXU1AKE5MTHTWSCLF16iOmwcJoXHGxHLH+eAJatF50G/rOWvCi2dSzX3Y7r0EqS90EzMxyUU6weYcedeg2i5YmDuWgmUh096Xn8/fOGGc1VxcTaWS4rSZV2vwQ+ta96lQUxWupV2XPhklkCldEraxczIbdxhDFhGOFGrbYixwBTTrfkwB8Joe2b2PXUIOfDFHe0x4MaPYss9c0m7sefVxxbzRnL9sQ9LbT0rutNpp1m1F3eyzrnE2dU28yynkCoYPuDJXNqDMnXaFVFKdcR4BzE/pkUbcu6sjEILKTHABoF1jdU2xearl1uCQzjpN6iss19/Ib/smDb1DtZ/zVy59l1pQoDd4rFlUBtYldWUhq3UYpmtbQRfPeTWtpa0vRqN7uSLpIEW8VtZnQuleyD23UdYprJFiYfUahG+nQ2A19wdCMNK6ULp0woXihO6Q6j6aTzsM4RO0eRX6jCAvDZpwzLGv2i5rVIGeZZRBMP0by2ZboAfHAjL6cyoG6zMmWYsezeq2FLFB+KArnLlfxMJDg31fxJu4z5+U1AthFg5ltXhy9J+HXzvVVta5adBfY3JXOaXmZyC0wfCqxdPVA1LgP3qkxmy4yimNRNMzwVhMyTTPOyOct3cjS4018iEr3gfxGPm+9n3A228O5x9kZUSGfZhUjW/Plo+e8vzjifDmiq2IGksbVCUqwVrJm19Hfs/IxmZSOQhoLp25SGgNmKyEKFDMBS2dLwenYKlBcgjea9SQHGygasBV9RwjiAR67VcrL//Ufxmt0JxZKtvLisei8dH2NwlhhFrdjIfMA/Sgvdav8Toez7xBrEabVhWM8qVg8nlKeC4h+fT+QXyuKazFvDzpqoC0Ni7rA10YwadGw3S81Llc0Zcaqy9FKLGp2mYAESdQOHs44Hq14+/I+aSyYYAO9F6EPEeu1xYvatcdskh1YINsYTOVQTSfCoYltSNpn6RwKZMOWtpb5WzpT/K5WV1pAWr6+nQgOL1vJHgaZALWt4elmn4Ft+fhyn/pqgDpqhHByLcWcqRSqMgLAX6u4vySEtrloG2bGsRp43EZLnM+R89FqcXpQv/UoMIQgZ52PB1b4nSdW8JO2vwlQr3LQgb2liJAFDdSGprb9Dk4jPtWJlEJ74Hh/c8zI1IxMzbPVnkjftynhCNBBV1vq0pAbx3hcUW9K6epYORhgG4SllJIf6Yae7nZHcbDm0WjNvCq4tmPqtdA56yPxZiufW34wuMdgXNNMA2W7A5TcWP76sy9R2I5JVrHYlOSXwqJw93RkmAecltFBMIGulC6NzhxLV/SMuVlb9t0LH5MTUdjeBjm/s2GAntWUKPj4gLdb36vEltn+iv8W8SFJ0kCAxxrfBXrbHHkjpJFYsLC4HqJM4Ke/+gE39YCz0/sSDGLlpq10HwXYH41De0d4CC0YBHdWDTPUdUHmJTBmy0A+6wgqySZAfq15Pp/yxq0ztI3jMvdi9yqN8YIBN4CQB1ynORxuuDNecjKeSNJyNhQAahUrUa+4dbBAHQZON7fQyYDWRNaRjfc714Rgtmyb6CCgvIxWdZdA/wqzbilCoLw0+FxT5Tn+gcaoENWaFYneH7QkgleLkQjHtprQGFQlDDS7iZ5gnQIsdTdAOUW+kIrTrrcHkytMTK5Dn1gp53uWZ1/lKsF9hOcZs6sjrrPA++O7/UYtDipePrrmH779Fm9zB1fLPXGd6deidluAfOiA84LTxjCbDpgMK/KyBS3SGmonAUr4q2QbJUr7bMeFQejv2TKQLeT+p+6cbhOYdost1I4ILI64u7hWpcMo69XNc/KZRHufBdTCchWmLEelqHPb7ejUR+HihNeQ9xgiYSSu7yhsW9+XFkT+NGNwFscsRuKNzyKu1CYLDiLlXPVJihtlKOfRmxY1X0HXieXGC4FTaOvVnphLCzPNs8kUo2fSVZ2/BuMngeG5Z3Oc0Q5zXC57bf+dCrNuZD93nm5gqQ9z8nlLMKbHncjvsHx5K9gq8SFlVLF1Gw9JssC9csbdYsZ3mwc8GN0wsC3vNpYsc6xUoF5kMjaPcir5tSzCTWPJVlEMOQRJZgKoLmDX2/ujvGCBbCXMNd0FzLojuUyUNwGfa5qZxQ18jDXRJmktYq26dpLQR3VxHfG+fac+SDfMrDvZwzHeBSMWUxa2BVc0hA6x69+v50iikCmCfJa0114+uOYk65hfHwumqxGCVDtVvQ9lN1T4gWdV5aiN6QtS3YJdKHQB9UDGrmkyI2St7R70GSyWA4qs64uQnum8c+km4kR93CtaSWLlA2bdEBpNNrCYqpNCIKqzZ+vwwphVOoTxrB46uqH5EfkFlXIUpfomUDsOmIq+EC9mnswEuhh3P7w+oDkboiYt6iJneKYZnEmyWh8odCUFl/0Eps6VmrY1MsXKPD4LBB0TPqPRJjUb1ItTmd0rJVhpDfwu8Fi/84Hip9en16fXp9en16fXp9en16fXj1w/cW/B0GgwnxAZ8wijKyaJdRAz5vpO12fBHsWe3fD/ePdn2axyVLXN7pN9ALVmk3nWXc7eoOLjWw4bRfdeEOJMo9RYgSmnULOMRTVh6acMnhqmS2hHsdq+UzPdW7OuctTTEfW5cLZdFDHsBmAmYtfQesN1PcR7FS1cFH3XWAXMuKVbG4bPtDBycghBcVGPWbYFqy4n0w7nNfOmoG5FaVQq9J2qrA69kWg/ukidgCh0mTBWSkvFpVsfsVme0AkTTkWKbeqUyftkiyFwToybYzcgiSQ+un/F77n1MWNT84P5XZ7se2n9b6QacbVh2RS0Tgxrdau2ZrKbbdXYDeXr231PfiWGsqZFqs1M9/YuALPrET8Id/CdFsic23ZE0nNNPmLdEBh1TMYbnlztEwIMSzHVVnsNrc7IFsJmU6XjcLBm3eaEwuMGeisEaNJYUNGVBqMUPo+GoBForQL43KBzi76YEaoK03WYouCQBxTzgtWN5QcHd/D3YtcnivRpl9hTmmbPCv7EafRK7lu2jJZHVcAbTTdS2KW0yLONdK2yZde3udXIEFSIIzsvoyXvX8CFCP5FqOPZXFF9zjG9u+DB3oxZXXKzGrA+H/HOyZCTxYSXD66hUzhnUblDbxQmWjKpICMNENaR78QE9qDcoFXgshxjIzW6HxnkqvcdDFr11OxgFcHLmm1His3tQHkRDWPXEi+6oZjutuMICkZAwXoH5BTMtuPaThTtQduXkXYt4yaCofWKBhjubXoLkBdGKRGLCERNtRD1kQL5qwusFcNg/e0J2sm4x64VxVXU94nsTR+xOkKgMNjMCns5gF22It+hFGEylBEdSCepaWUsiHQkbSXaVi6DdiRdpNWDQDfx2Ll4161vadb3YndrJR3c6zdKRicZxWVNMIr8qmL22RHtsGBvVkOwJOshn4m+nC9ih8YEERze7UiEiAOL/3BoVkxtzfeu77FqctEobC3MMrKZpriJkjWV2caARrTKEkNQQYxRApeIQV9YdLE73QtUto5gbIw3AknI5gqzMeQ3IqGTLwQw760W4H7jeuxcEkxWQbBFQD8SlVF6jCM2EIKIyOra4XMhcbgI+PYm2s7kimzDFljd01eFvf7d9x7KPTtytFPF+EONWgkkoZlCNxI4yv79OcOiYW1HMj6PY3xbCfyg3WgW67K36TFJe623t1G4q4KLeO9SF083QQRB20gk2AiRQHVxGpHp/rP7Ivl9xtFoPDfsRlPctJHhqDGFph0rNpXgiIMXFnw7lPsVIs65P1ZS7NHgDltcq3HPLTqD9bGhmSoGg4bvnN5ndTLCHtZ0lWX8VDpPXZnA/6BrGcfbNS/gLbuRotlkAkXR8vzS1MHnWtixzvdWcUmwWAzkfd/tFiFMJ10r/4m232/j+rGTK6WUAb4OPA0h/Gml1CHwl4BXgA+BPxtCuP77voZHWEVZ6E2ICaAbLYDTaMQorXP5P7PSmFoxsRXfW9yneXv6Qrst0VhBHlwzsKzanNmmhIGjU0IV73E/XvUMRAHTBkIe0JVm+NhGaYbEdJGEJsxzFiZw62DB3leu+OD8CP/BCJcHVveh3Xfc2l/yp+58l7t2xm+uX+br5iXeWhTgFLcGFZ0T5l/bKNrS00yEHdKNZUNe1UPWbU7nNZfNUOjibUbTGNRaDlpTyUEm83h6Q2BiUkEAup2OZ8pWO4lMupZDOBjdg9h9VOVWESeSkt0XMFLp8j4qCyt++vAZmXL85bd+hhAQwbod5lqnLafllK42DBfR86uWzWDq7YtrC3qj4X5FPTQMzvIeUOqV6scpzV7gq689pvGW759MemBlrx4dqd6Jju/KQDZoGeUtnz284GQ15XQ2wUU3deVkXOwGAVu2nC7HrKtC1kfCXRnACJZBt/Hw77ZMNVfIz/KtJLK+tIQ7++iqo5uWtHs515/LqI6gOXS8euuG42IpY+pMiBQeud92pXAXBXQKG++jqZSMTqpoDD4W4LcbBNRaiddk5dDRHw4vmjsoLayYHrgZXsC+BC3J4vxV6I4b/sxXv8UfnrzNRG+4cmMu3Zjvv/SAt2a3+eDJMR/EYEWnCa0A2nTCUvmIR1Pg8wBBsZ6XfBhb/P3YcAcMHnaCbsiEVCDq1kokryJgXQWYf6EFpyjOLaaCfC5EDjmUJAEwjeo9/0ACZi89EMHFamP6MYLLZUTlhw5TdozKRoo7W8pEQG/fn6izBzCCN3SdsMC6TrO5HJCfW+ojD7drlILmMkc5E8HCOhY/MckbiU5dXpges9dOc/LWoda1JFMhjuNBEi2levPtZqJov7bAO03z9REuh/ZuQ/Y8Z/REMftMoBsFBieK8jIwOpXRenVgaKYa5XIhs2SKwaWjOjBSFHjfj2xE8FSSp2Yaerp/v7fS760kY7XPeNIc8gem7/JTo2d8Z/mQX5q/QVE26Ec1m1s5i5tcEo21FoFgH8fsvZisllGwkf/TkXGZfPvypRdj9ogHkvepJRFc+T7uSOEZ8UTN9jOpVgDKJgjYWWX6hbEjCnQ8dBMpJCRAsw9o7/EYGWG6gG4sutC40vQjbb9TvEjFIa+TFx3tMqd4btGt3NPly57yXJPPIqA9SEI5uxmyzEqymSTSXakgjsSVl4KsqS1r7XGtJuu28Q5kH+49mnFvOufN60cviOwmkot2odc6063gR0MURVVOodeNSKQg+0BiviMzCntTo7wn5BaTG7J9Q7bQeOvQg4CbRcZtgqwkYH88a4OVn6Nzhyodm7uGdqXI55JUz28GjPc3TO4vWJyNyc8sqweCFy1PE/6GqLwuiXXCLoL8e1MZWiC0WpoRu2zqXWmauIYUSBbkpAERotYVGGlC/CSSK+BfBX4ITOPf/zzwiyGEv6CU+vPx7//63+8F0gJJB4qKKsv9gRZictUJ6Lo8tdg1NNPAVTPiuyf3cLk8LLsUlpZqJRgEHVW6WwlMq1WJvs7kge6C61SQTZDMmw3YacP0wZr2FcPRaM0X90+4aoZ88+lDVidDQubp5jkny0P2PvOMV25d8faswF5bSXIqzcXFhL89/gLH5ZLaG86WY7KTHNXB6paoORvjaaPGV3PkaDuFXWlsJp2q2hmaztB0lrY1Ug3ErlXq0iT8Tbby6CZSve228gpKEkJAmB7tlgmBi2rYMXlQSbrBx2w9YnF6Boo1YnljbR/0s3WgqRV/9/krLFcle5MNo7zhRI12pBgUqgy4xkBlekrx7jx+l71iGlDvlahcnqOob0tVm8T+VICDfENhOr7PI0mO1PY1hLkmlY3qAt7A4XjDOK+puoyvHj0hu+VYuYJvX97n9HKPdlXii4D2mpf3rrEHnm/UL+GXgqFKgFxXihF0wgr5LFp65II7cEWgG4hmULeXy60uNe1Q0+xJYmVvVfyx229T6JZfVl+SNx6DHlo6BcEEoS0r0FXEqIVtcNBdEEHddUxu2ghYjriJRPPfYh5UX50JK3WLyfMWuj3HwfGCRVvy1y6/ytPVPq0zZMbxmek5v+/oQ0ZZw8lywhIgdnd84fGNhk0CHMd9rDTNVNEcwa171xgVOMlGsN4mVkmTLFhJSF1haIeRal+pXo6lGwiRJL/JJIn10rE1dYjODuoF8cLUwdU7xrq+MDQHgXzS0C2G9EKOwOBM42403dByfteCU4zX259hK7BVxCFpwX5ighTzNtBdDMgWmm4cePSFE6rOcvrkAKVENy+JQCamZLMnKurtSNNNCrqoIdUNDcEM0U2JndWiL9e00nXESiFkDO1exuaOolnnhFZTGGj2PdnznPJCUd2G5tCTzTSTx5KQqy5gG0e26mj2LN1Ikr3VHY3daPKFxw2zF6r5YGD9ZWkdhes8sr+iPmAft6V7QNDUUaPkL538HA+HNxznSx7cuiEzjqv1gNYaXO4lARk4Nrml3dO0Y022BLsW8LtWsi4Shs5Ho3W5jxpvImuzERcEnwvIG4hGzarv8tiNFwV6In7OvIiv6bFG9DW8JHZGE5Qc5sROnjhaSCJoli10HtN5VGclUS9lUtCLDEfCkbLiypEZx89+/gO+v38P9ZsTBmeK9YPA5q7Yv9m1vIdmD/JBS70oKCuxpQkRL6ii3p92Ijvh3FYgOHWHgpY9PSoabpVLmUqkhFmlompHu20HSyRAb4/PDCpqz72goB9jj67b/vu095i6EBz1TYkdt/ixkw5RvYMxzIVNqFTsbjkRRO0qS15L96m8DrRDUCZwMNzw7GKf7NLSHsgHKE4Ng3NpKHQD6ZCbSp51wrqBdOpwitBqVG2i3FM0mt7VuOq/Qf3IL6UUQRtZA8H//ZQs/oHXj5VcKaUeAn8K+N8A/4v4z38G+Pn45/8A+BV+jORqV5coCe0FK2ye4NOpHwUoCxE0a+81vL84IgTFS19+zs16wOLdfUItoGgHPRtKCtdAUTZ0m1La3ZZ+g4sVRwAdxB/MCM01t46vHj9jP1tzWk95ttojyxx1Hrh1fyasveWIt967L9+fedzIky0MKg+YzFPaltNqIofRquyrhra1qCgvMRlVXN2UmJXG7XW4ToHTzOqS1mnqNhM7MeOpKgmkttoZB1YR3Ll26M7HsYVG1922qks2ElUrwdpHD0TvJZHCx9HJzsNJa4+4ENOa1UqUa50EK9NIlbGuco4PFvyrr/0iz9oD/p1bt2nHAhJHSbJgi442AraJGku9TELY/u4t5EvF4IxYoSIdNqtwWQKdw3/54WsYI3Yj0uaWxEw3knDqLip7IxstN47Xxpf83eev8ObpbSbDWvzzgrBKQhwLj0cVlcu4WIzwlcG6rfjklj1GP3L1NlGz5d+dU3RDjfKG4qpGbVpUVYMxZOtDlvcMm7sjfv3+K9wbzETTZiVVFUC2kI6PKzXd2KFajVmr2OmL1g1O2D26iezV5HCfnlMa2yZRWdm48suYT7CGZI0U55qbdp9fvhoTOoW5zuJ9VDwePqA7bHnjteeUtkMvhHkbok8asNWEKiSItWMZ7+pJy3xTvuCTJlo5wmb0m9RJEw2abhBFHzOFcpLYttNAO/GMPzQUN6EHw6eOoc/k+SQmpVTvoRcm9lG/zQ0902HFtR5KXFlIEl7dUjR7ATftyIYNXW1RPutHUL0AI+yIT4pPqB23TCdrcuu4N5rz7tUtlk+n2IXYr6Ruwi6Y2GcBV3raoaYdW7pBHJVtHPlNLXvOKBl3lVLM6GX6dxlDmRqRKdCBZhrwQ49uDIs3WrK9mvJ7Y4obWD6QteQz0A3sfeDFxgTpDpZXgZvPavK5objp8IWVWBjjxnBcMy5rTlZHomHmUiIr4+B0j5yDWVuynxVMs4qvnz0iN455VeCcZn02wqw1eRUB8s3WM3JwIeSEoKD36gxEJfuYoLi0x2XtE2I8UsmbcadzE+Rri7knWzh05YSAYlTfCez1rpTYe8mZFASj73wfD1Xnpbuxy6CuuuhP5ySOxsuupAsWzHatA2K/FjwuKDpv+Oztc777eoG5yhicaLqhdBmDiaDsRxt+76OP+MazR9Q3FrvSvb5eRuw6tdJ46FoRV026Wv379vDsdJ+b9UDEWePI8JPdG5lYps4cMvUwAvx3g4xeLxEQzz35+UlQOnle2rVHtwa8opvlqFZ00owVyIl8f+gV5l2nCJliPKpoS4MnF3LUI007CZTDho+fHJGdZbR3G7T1jL41IFuEWOjI+pUJTiQ4xGePikV6K919vVHYSkmhFKVcVOdFOy5qW6lPCGTL4pBCVTaLlsbC7/D6cb/z/wD8a4hTW7ruhBCey/sJz5VSt3+rb1RK/TngzwEUg32pgHZHWchBGJyCaFSMCmRZR/fyBmMd+0VL6wxfuvucV0eX/HB+l++Ve0DsdCTtEQfowNV6QG4d6wOHWQmtl6RyHTPyEBA1VxNwG8vJ/IjTd24R8oCdywih2fPka8XisODlvSu+evSEb08e8PjkkHBVEHJPddsRJh1v3D3nX7rzq6x8wV+7/CrvmVs8fyjdp7vjNas6p+sMk6JheVDRrYeolcXvddBqNk3Gpsqw1lMtC3neQTJx6VYIhblnzdQRQ2AUKhrAKhde1KvybJlT/Q5UcSy4w3ix2zFh/JKY/EqAUUpJuxQ5mHQT+NrDj/na3of82uJ1nld7JI+rbYCSCmvbXqJnd/VJtqJPCNevtLTXlum7xIpN91gVVwbCUcNkVHF9OcFs1AtBVTSzIqag8ZGOr3BBcdMO+Bde+w3e3dzmrdltnNecziZ0lcXo0Avcvf3sDm5l0SuD3Ww96YRhF3p5i9QpTCy0NG4T/RaDXRls3RLGA7pxLiOZfUU7Fv2qWTvoEx1n4zOayGtnc0V+bft1bWpkHNj4rQJ01Dqzm3gPdwODTwd6SpB3JB+I98rLc3ZF3DuNYnKwYn+44fLWkNWyxC8yylNLNs95q3tAcbhBN1EwNRdcka4jfTxi8NAymstuNGFZsprkhMwzmimyeWRvGhllZpvQ++6lLmDSHApRCyPp1lQ/V9FWFn2dRRxciMKcAVMpuk6sOnTqHkQMCSrQlYZgZbyez2QPdQMIk4iHDKAaTddYqXZdXEN1xLOtol2MisxlNGixdfr5++9S+4wfzu6wfm9PtLnuNoRaky2yH2FoBYWwV0uhoSd5j/nLOeWeJZ91FBcbOcCbdvuNedaPN5tp4Oe/+kM2LuPXus9A7rnz1QtcUJw8PSDvoLoF1cu1xLboj9pODYMTS3kloqz5Unwl632FKzTKGbRSEkscLJ9MWA6HgnmKchIpgRUz39iZ84rGWT5YHfH5ySlfnjxl7XN++eRz7Jcb6v0Z19WATSM4mNXlkOzCRvFPOSTzhYpdo2gP5Lfr1MQx3672FLE7n/T8lN9aIPVaYil+9d2nmFTp3RioYmyM/+7Zdu+sBh2FX42WNRCTLJWsmpwTHGktna6u1H08kp8R0FmgsI5vv/2S7PPcE+5V1K6kuBFZivooJg2d5ulqn82ywDZb7TaRlIkepy3Qxi5e/JokMZHWWDlq0NrH/Uo/gg99TI+fLwRoVGwZCiYJwM7q2Bkstvcr+X9+gjmnWy+FIYAOmLUR2Yw6bAuLaIKtjRN7tCjsrHWgOfSCId1IbNnMStAB96iC2pC/N8Dl0DxQZEuRrlBui7NLmON0CaZP4YOo7ds1FAtJtu2qRTVdTLBcHx/TepA3qWPHmC1L8L9NEVGl1J8GzkII31BK/fxv9weEEP4i8BcBJvsPQ39gxUvo1Uq8gpw8aDpNXWeUgwbnNLPrEa+/fsGro0v+1uPPs65y8cdLreq4GXWtoFPMF0O6yqKGDqeBhYkLUfWCaGkDqsyjc4drc8ozI3irQdhpjUL7dMQ3Fq9w8uCK/9HLf5cfHN7nr3zzZ0XcLQ/gFeerEX979kUAKpexaTK4ydEeVge5iJp6xaqRsZG6U+OXFrzClC52U6DeZOjM4yoDtWbX+T0pM+vaoyM+wFkrh1ImFa0IX+4cqOleR7sRn0Xz5uTjZfVW0iHdziAA61AYQptvabtR5V0FaLzhF06/yFsf3EMXjuLMYCOwOVkitCuLavULSZC81jbn0p3o33T7gl1zhY3dIS3A0YF0wcZ7G/7QvQ/4TvGAJ+f3SMbPygvYM+gQwe+q744ZFVh3Gb8+e4WfmX7M75+8SxVyvrH/Mt++eMBpfQgqsFiV7E1X5IeOk2cHuCoTOnhaw1oO84QXEhmGIOM8o9BpfFgo5q8O0F1JM1E0e4rVK478eM1nbl3xZ+/+Bm9X9/i1g8/gYjDWnSJUYh4tIGKi/pGMBXsfPu9lLFzJAbdbJacuZNKSCgmkac0OQPrFpNZnED6z4vXjK/61V/4m+3rDm809nrX7nDVTvn75EqezCer5iKbKCFPXH7SuCDHRAlVDeeXjKE1THSqq48Dg3lIknZ7tkSXcjiaOU4XgkMDsfidJTbYwaMjOMrqJgYiLpAPVxgIgqVenve/iQbsrKmsV5dGG48mSj4diNG03YvkxPJF12o4M1W3pAuQzAc6bWEDsBm6c7E+com0s37p+yNOrPZrzIeGw5cG9a24NVjye7TNfHgi2yW6Ni7uRx0xamonFrqN4bey2dANFV2Z0YyPJ5yJ2nFvf79/qUONf27BoC65r6cKN9jfcHi741vsvkZ9krD7TUO7VMC9Q1xmDCwEDNwee9T1Jbl0Bq0cw/kjuQzM1O12N+NtBgzWetpaN3Hfi3I6HXCQodEFT6pa/8u5XeHRww1cPn3B3NGdkG95c32ZV5dR1BkFhRy1d7mgrQ3dhsUtFMYuegT4KyTrREvMamrFCl9KF1B1kay/drhizRGhTnlO2jObJG8EI6brDlTbq8umeMAGxc5UCGRKTXKEJJotFq4yCky2TG4qptL2p++eRMH1pb+lkDBzPFa1lFHY0XHGhp+QnmZwlex5e2rAeFZQngqVFifL96XyCvsgF8hLPSWXoYRVJ1yoEgX4kfT+JAYJB/Mrd53Re812zL7EkQ7TZ/NaWKSS/TCtFts8F0O4yDeO8h5eICf1WFiTk0sUIhZEENXaHizNDOwlbnNVOIoeRzpW1gl/qcrG3Wy5LTDSLLq6FPIaC49tzlpsC3hygHKxf6sQWaKX7tbg1n49vLE6I0lQl6C3g3W6iE0ArGl49QD01GYjFaAK1p62Q9N9+F1IMP07n6g8B/4RS6h8HSmCqlPq/AadKqXuxa3UPOPtxfqCut/o0/fUCTkQCaLPKaa5K7MJgPNz/0ox3Fsesvn8gLBGI7WkJmCkA6ErjWo09ywhZwA1Efj9Z7fSXks6Vyjx3D+dM71VsXs/43N4Zj8prntX7/NrJS1ydT1Erg73MeD6/zV8bfJV7gxmT4yWLkwn5pQhzLoYDfunJ51gsB6ACbpUxeSxBe3kwjJWMZ11nNLMCM24pDyuapyNC2VHXog3jnajOm9LhWh3dyreieL2eitGSUGTS4QlGSytWQW8M66J6cJE+c+y++O3N19pvvZh22suqC/QeTJmN+iZxlBjgN955BTXP0Ec1g2ED9QC7BJ+EOdPNjgf+C1Vd2CYCgsODyVsZwUYQayWJo+BzJLGpqozfOH+JqsniZ6NXbk5jg16sMq6pzmsO8g3/xfuf4TfcyxwerAAY5Q3rJkPXImZprWNa1pL4hu1BL0xUuS9bjzX5fD6nT1J8F21dGrnfLupj2RWYhYg2rtucNliu2hF6La4B/T3owDpFpyDoIOa20cEgMXpSl89WgeGZvBfdenYV+kMcr0AsJBIwmm11K/8nn+fV4yv+7P2vc2xWfKt6yN+5+TxtEC2urx09Zny75hcnb3ByPaFdW/xYmFRhZWJQF60ngjAPN7fESNdNHN1MVKIHCbxs41iwjHgSI+vRR6Nd3xCTarkn7UEHTjF9x0LghTFs6krYTbzHFVEQUYoP4kHqculQb9pM9IAaKK5ltLq5ragPAt3UEYYOtZZ9rJ3qOwa6M+g6gXu33eAAfHyxT7soMAc1/8Mv/jpDU/NfX73GIG+5njq8FXVoGYMpwqRjNKrYTEuaTRxt5vJei1nqzMgzdoVBW4XxgnEJRlPvK7xXfOP7r/V76HC05tsfPURdZTR3Ol5+6YLL1ZDyo5x8Lsmi8lAvBPvXTmQsWlu4+aJn8NzQLRR6JOtEt/Lhfu7VjxiYll+Z/ZQc/FEPb5tkgfYytl+1OdNM7JjeeXKbJzd7WCNd2tWTiWBKG3qngQQFKG62jgKJwAAR72bD1nUgJcxpNJW+Pq1pvdU6Su8taIUvs60yt1Gig7wz7koG4ele9uP3WEjudny91ejabWNoAkar7ftQMfHpOzYKtA6UpuNPfPn7/OrBazRvTsmvDHWRoQ8aNoUlP7P4HF57dM694ZxfPfkcrlS0blt0eAOmi+ddq+N5pnoiV4pZPoePF/s9bhHojcwT+WTXZmgbK4hdZYcrDcn7su/wEZ9b1KZLY9Z0juhGUZ5tNR/Te0o/R8V7oY1HW8W96ZyrrGP2YUlQwnht9x2j/Q3nJ3sUTzPCINAcd2QXlsGZYnDhtxpydotx254pUfG9le636JxF+MwmjnS977F2/bOEONIVl4Lt2vrRhOu3e/0Dk6sQwr8B/BvyXtTPA//LEMK/oJT63wL/IvAX4u9/7cf5gekGbNVeY3vXBIILW3B7oynOBZRW3XGcVFO+++QB2WZno8cqyq7pH6pdK9rDeLjNNO2eimJi268JMbHSKuABqz2vjK/4E/vf47695r9avYFVjv1BxawYkU+lJbOZlXz3/Qf8sLiL0h417HALgzvo+OkHz7g/mPO963uc3EwIpWL1QKpCW7a4Tk7nYdGyBng6oNrvBDd3k+MPGza1JR+0dPMcFzsMwYQXNy07nyHi0/ru1E5iFR/ezmZS4P22lRwp0DrST3UCfcZDXEXz5j5zDzvPSwG1gYOGf+6L3+C8mfArhz9NN4qdnRL80KMGTkCMFlQcc21lE+Kzjj+2uAm9qKzqkn2JjG9C6Qlec/ruLcxGY5oEPI4bqB8dxLdqZY0tNgXPN1P+iTe+w3duHnC6GKOA+WpKs5SuYoiGxB++dwd7Y8idsE5MZKOkEXYwoR9piumslEvKaVRQtGOF38DgWlSebeVwuUa7jPWm5OP2iL+c/SyniwnFZbJtiFicQcCuFMWlVHKp6hNmpt8+9xCfnQPbbNva/QHyQkv4xQRrd2SSKtgPzo74q+Zn+L82v5+Pzw5Qp8X2aw9aRnsbPnt0zs2mJMxH+DzgSy+4Ouhp8j6TIJ6tENHaUytJZwHleSBbCcjb5YpsLkrbdi1rK1lH6eg/1xcGA8ed4xmnoz2YZ0KEyWSMq9qtEGBQMlIwEctjarClYMyasWZQtCyrgmwmyVhXKhjILZLPoelcJAj0YqSC0bCrTmQSkqJ8ZICNhjVfPD5hYFp+z/QjfND88uXnePPsTnxh+dpepNcAKmC0703bfTRin7+q2Kws+SwwOnHYjSdbNFFuQOHz2Jk24GvD+M4SgNW8FF9VBdPXb3j98IJvvvcy5iynqOWZNPsxobyRw6Y+FPzp4FSx/krDOssYnJtoqbUFV79zdYvD4QbVRaxVitmJPNDGvdBJ4vr+7Ig/9OADNndFQfzjxT73RnOag2su1iM2TUbnNOvTEcWFkRg9UlF2QaFLgSXsKp3rToQ9deO3JIUYE3cL89Tt0g2x8FGAFdxUb1ulUDr0SbmwiySm6SYWLi6Iq4ELfUdGOU/INNmsRjWdQCNSUqVSh1z24a7cT1BgdEBrz6rLeW1wwdGrK/6T9qvUVyX5maUbGRg72n1PsIFXxlcc5isYSILTjwX1Tjcs3q9O6V7SIuhtoYqHk6cHqI2hWKsIKwhb6Z7ax2Jt+z4BTO3phvLZiosNPjd0k5ytNAEvXvHflQuU157NHY1xUNxELFQdR6h9k0hU09O31s4yKWrOj7vtm9CB9aIgO8+kuHpYw9oyfV/IW7aW9+0zqe77ZNjTx8FUbKPUlpAScakhwltiy1J+5g58Il09pKL/h09++B//+p0PFCWp+keVUu8A/2j8+6fXp9en16fXp9en16fXp9f/X1+/LSh8COFXEFYgIYRL4I//dn+gdhB22AtpvkvK0AMx9fd0A/AHjuLumh+c3cVdFYR9Lx5hsy3zZDvOEsp840Q/KluKozpBQNEv6JDENiVOc74Y8V8sX+d7V/fQKvDRh8eoSvBUZqmp78Dx0YLb0yWd11yvBqzPRlB43K2GycGaV0eXPCqvGJiGr6uXOF+MWQcVxStb1iuD2xjcRGFGHfbEki0ymn2PbrW0rRtDXRvUsCNsDGZupWrsBfR4QbuEeKuSKa/c0/Cj2baP99fLX1QaDxLETFpF/0HYdq6SiNxuGzX+f1Bw+OCGnzo647Se8uHyUIgBpYwXQyZEAZx0FUgjth4fk5557MJUsDlW+BsYXG5BpS6XroYZt9w6WHCytqjli0bc/Ti4ZxNBCAJIr+uMx9cHTPOK33f0IeaWJ1OOj6sDfvP8IefNAXjF4mqEahW+DJiZsLLsRiwaUsew91jzMq4MRWRftUF0VgdCDsg20hpqx5pmrFndV9S3Ow7vzNEqULeWdiIMJR/p+mjR8VIOGgfKK7KFaN8Eq4XhEraft9ce6p9JqqZTJwvRcLGB0PkXRgG9xUsG7TznrXAH12nCypK1Im2iW4U+K3C24DdfGqHLDtuAXYsPYGI6BiX4h/LKky/lftRTw+ZYsbnnQEFxZVDLLcA2jVj7pZnLuNAUoh1GYnGuLdeLIZ97dErVZcyrghAUdWup1jm+MnROGIy6U/hGRf20aKcT78OdyYJFU3AxmoIS/adiFiiuO9xAntHmSDA5w/NOGFCNj8KRHcp7YT0qUIVD6cCt8Yp/+d4vYgi83dzh3/3wj3AxG/fSKTRaumseYdo5oDbitam3z0A7yOdCjhAfPzGNL2ZWsCKbTuRTvDDLXnnpnNcml5zXY763uM+yKvinv/AtAL5++RLZ0xzdKlavdKiBE+xmbRi8l5Mtpau3euDJFhr9rMQfdDRT1ceQBDm4fv+Qq9JJBzeKNibmmalDrymmW2idpuksf/udz/PS7SteGV8xzWvuDhYsu5zaWXxQYpR+f0F1mLGuLeoqx64U2SqBt3U/ntNOhFebiUE7LQBpJyMeFe2NZBQez5TGMzpzECBbdb0sTVNsxzyh3yNpDARKKcHj+J1RYIx9KjISde1QVScjsXRFC7EeZuFCP1pKl9YeYzyd1/ylD34PuXVMRxVd2TBze2TXGpZRbsDDe/NbfLe9h7nMopZhjO3RWke6VgG7SF1G6RwR4vOzMmbWpSOsjQj87jBWe6LAzrgOpfpOTX5d9/pWuvmEFdBunEkY0PjZs7Vn9FTRTtQL+zqtc2UCxjqKrENreb1lXTBbluAUutbkNxEbeKjp7jbozGOelgyfCfvQlYrixpOthGGaLJHSmLh/LLWQbVyIWpDtThfKiL5V8rGM75Lf8vpd4Kx2r5+4cbNu0sYM6OR23wqdHScO4LpRuCH4OzVZ0RECrK4GYAKD1xasVwV+E3v7MVD1StEe1NISskCzn+awQByhpYXonAhoGivBc30+pD2fgobpmQTq+iAaQ94MuJyW+HsV/+yXfhOA/+j6a6iFoI8XDPm7xassq89TVxk+KNw8Z/hYbu/iNQ2Nxqw110oIl2ESKC4U5bmmOQi4tUUVntBowiaCeLMArcJsokp3pJOqKFMAvDgTh7hetosjqR/jQUUkeTKhle/bMcvtvynEoL6zuXY0T5TgPfnBxR1m7x2InMEz8QD0uWi2uFo89UyloiJ7fOaRWh20whEwLeQzWN1TVIeKwSV9kPdGxmaZ8Qyzlv3jJTd6jH0v7zFcOgrjbd+7vD+zga6yFGXLN/6rN/j18WfJbm3I846XD64lAVuKBlc3VJT3V6Kq34zFHNTL+zDRv8sTkzcfk3QtCbrPBJCbkob1HTkk2gm0k0D22pyvHJ/zj9/+LjM34D+tv8Kzw7h2I1khu5LxsbdRU6uKCU5LFE6U8YNpPPkStEsHkYxuVWRHKufx3YvCev2+S0rXMci6Esa3VxwMNwysgDeuNkMW64JqlZOd5phKUTy3NIc6ak0JQ08nbEsWXQw6TVfKaLSdCKst+UmK/tx25CwjsS2Ww2WKkIsPmLeSiPhcgQ40Z0PeuhiADVutnEajNoZso8jmco+SCKFu6Sn7SSPobDlmvhxQrMTHc3ApoOjFw4z6QN6vLwJ2qegK3TOsRM3b7khdIHsnQOsM36pe5v3NMX/v/BVOr6bsTdbcGq55Np+yui522GtxRBLZs/2hE0c5ykN5td0XupOYSJDRvG49QUFz4HhlcsUvv/05Qm1QtSY/XPJ6ecZfevo1Pnz3DmrqCeOOlx9colSg6ixaBc6HY6qzAeW5wQ897m4FjweYuaEdxZGfC4JRUxAGkqjYlY6jpYTdoR8xJRzdus45GK9ZfzDl48cPeH//Lqp0vJcd4y9zspnEgEQk0hZyBPuVFNe1i4dml8bgmmTAvd3TWzyUjmKiaY8qH+gM4pm34/HXh66QCku2CVQaBwWRETHLml7/yQVCZkTuqukIg4jzbJITgo5xII6mtHpBMkW3Guc01niOyhUfPj5Gzy2+9Ohxy/D+kpUdUZxazFLji8Cizrm5GVHcRNbqJiZMYWddK0l2QxTR1C2oiP10BvzEcf/WjFO1h3o62GLV+s/KzmeP99SJLI9ZNTKmHxU92SmNzFQ0Mg4Rb5WSVCk0FYNLTzFX4kCQx0Q9aY+1WnDEEQemVODBZEbVWpgZTB1jxEDgH0oH/EXBwbuydzZ3hZlc3MjP022Ur3DhxRjnQ2T6xvtUp/20A2JPayGdcelcS0fHDvbqt9TF+m1eP3H7my2NVwCUunGYRtG2Uu3p6HYdMoM3gaYqMHMDe47h7RUvH1zz3E5YlGLK7GPypLseEkE2F/ZJuyc31UZdIdhW7a42dAqKcsP96ZwT61h0U/kareNDFyZjPoPyEtqLAb908Dn+0L33Obwz5/rDA4pzQx0U82nJ5vmY7FqjcsgbGD6XANDsZ/2hH+Y2GncG2r3A9L0obOgUodbYSYs/KYVuXgZ8ITRZnyFK4BGcmQLPVrwuSjD8Vouhx1NtQZgp2PRA3ReeU9i+llbgt1Vi+hyXHx2QXWkGS0U7DpRXkgC2I9W7r/vYBRHCQdgu+DaJuioR/qyFveWzuC4aOVR6gU2neX4zpVoW/WfusQRJmqKJDJ9YleoW1NLgJpruoCM/t6iLMXUR+P7BCJxicCPPIewHNrMStTbkmy0jSpSeA7q2AvfwO4mcDYLj6FlL9OapIeEjlopqlTPbK1m6klk3pOlE7oGInfKliAXamSJvRCAvEQr6wyNVkFYCr13L31VKrowmoHsF610phn4J1PLaOnYoXQH/zKvf5feO3mOkaxZ+wIfNLa67EdftkLdeusPFesj1Y0me3cjhC9mf+Y0IK9qlNBy6Mbhuq1g/OJVEJphtxemtWLgUM08+d9hVt1Vsz7zoUkXtK2/h8N6MprNUb++RzI/FPDkqNK9VX+GbKCbYuxYocLnYc3hncLOcfCGHb7UnP6c+FAkHV0SWUy7JhWmTplOIAq0elFTYPuKuVk3GX37ys6L432m++ugJf/TwHdpg+HvFq3x9XuJyAwmz1ChCKdX7pnS4UkVRWli85jFr6cSPnsmayxZdr7rvo4UVpefN69sU75ax0+kZFzV/9fnP8OHHxxDgs194yv3RjO9d3ON6NsLf5AQTGNxao+5sqPQAs9Tkxy3FFzZcP59iNiINoZyiawT7dOv+jM5pmo8ORfcsgcvdFvitgyS2dZWxznKO3rjk4nwKlSFsDF4F1GFDU1j0RoQpi0tNcUXvzOGjhl2XmJNACLqXk8lnW50+YNsxYVuoqSin0BV6S9yJRdcnyTuQkrTt794onDEol6FqsYUJhSGZXIfMCKC9iolVTMBUkN8DcmCrNiV+0kCoG0uedzwc3vD8wRXP3BH2ysLCUD8M5AcVtSkwM4MfO944POd947nYK6Od1Lb72kspxNibsG+mJpJDlOAIref8ZoxbZH2XUSyTJDnTbvvZSVglH1B43KTAzDbomxXkGd1e2Z8Zv9XZEFDoxqE7y/rYUMwD5bUTSYp4z+0G1NrQ6oxV5ghBSBkHxZqfOu74jdMxrQfKyBJxivzDguJSUR1BfRgoL6G8FDujrbPHLlMw9CQJ3Qr5wbuUOKYzThMyJZMYHeIkILbxUuIdMcnskDfk/v93JbmCrRiapm+tqg4JRK0Ef1OBChp1mff6Tss/0nB7uuQHj+8RWk0GsR0SuylRUFDGTpIQBa1EqG0HTJ1arT43uFbBHvz0wVP+sbvXPH3pgJUreLLeZ9EUfHx+gL8s8DZqH23g5ntH/PX5kKP9JUw63FKjjmp+6vYpbxvPYjhCrQ0+VyxeEQqxK7ZMh5AH9FKRz0RIzuVKKNEPPGpj6FYZ7HeohcXOdD+SSmrgVqt+UajdSgR+dCMk/Y5dVkT6935MyPYQ9ryAwutBgDuvnaQUhGwAq5c7sIH2JOsFJRPDLxgIcRQEOwdgn1zJ6/lMU974vrulXEwaYpfFRw2UwQd5BKsTtYhEyM5UkoyhIZSm/1lmrakuBrzxxlOWLxVcLkYU1mE6Q3Vd4gZB5AQ6xfCDfOuR10gFlCj5RuzXdsYHoLN4+JkQQafSdSkW2wrKZ4prU/Jhe8xf7n6G2WrA5mrA6GQLMve5UOW7MbBUfQfLNHLAu0ITLepwhYwbvVUiCpmYO/rFZ7Rl6shzDl7FwygKk8Zn8pnylC/mZ/xG9YjfXL/MO4tjumCwyvHK5JKfPXzMt8YPeff0Fu1VKb6Lw0DrpVgwlYoq2/J5tZP33pWK6pYkD8WNAGYFTC4K/zJ285HOjoC/TUrmZb0/ms742sFHfPPoEU8W+yw3hWjjNIZ2XuBKAWKbWsaJplbyPiKDyuXyXsqsYzVw1Ps2FkqBYh6YftQSrKIrNdW+JBWDi45s2fXA5sQwCsZIgqQNofBRxzIwGVZ8/vCU//X9vwHA31h+kdvFkv2DFXVrqesM32lco7HDjjJvUQOHK60kLE7ini8C1bHsA7sxNJOSbC2+kab2/cE6Ww2oHraynzrFsi64ng/JRw1vvH7GHz56hzeX97h4todeGoqZRnXQzid0+x2MOvQ8Z3My5stffYePso6ri9u4DBjIngK4no3QxpH7bWKVCtj0Kxj5N4LYtnz24RmvffaSm3rA+2dHvHp8xT98+y1m3YDHm0Mab/hwdsjVzVi69BuN2WiGz6OWXa56gWQhXih8lqPrQLbu+nGgxKVtPEmdmfK6E4HJ2vewhjCyL5CAku+o8qH3zUv7pRtmqNJKHIndsZCJfI1etzIejurlSYgzaW0RYqETktYeMrYuDQbPv/WZv87fPv4if/V7X4VFRpjl6ElLsVdR64Lh/obPjs+onOV0/wDmohCuW7WN0fH961ahoyNHmgAkSIZaWPTHOaO1EBlMjUiLNAJkF0ePbWKVijblhKEa9ofYyyU0LTpqpG0hB/yo5FOAfNaxumOo9xT5XG/FXpUUM3aj6LRho8VaTFnP2zfHZNpjp43I6Mxz7FwmH3YlvqGb1xuoNeXbhmwZPXQ/mefsJMppPJvElXvQfq/VFZtxDpSJOpdpTe0Wop8YCX4yOf/tXD/x5MpbtcUd2Kix4egVgFO2qWsYnAfyhVS9k/GGm/WA8q2yV5pOreo8qi5LgFe0ZSAsFOWVookK4VKBxZFB7HDoRtPcFkzAo+yKr5aPebO+x+PVAQBl2bLKM+oHjhowM4tdarqnA07XUaDvsOP4YMndckF+7FgeFLx/ecR6WVCP42Y0Ab8WPzvGLW5dMP5YHlozJWJRAmHYoZYWpi0cNISqkEM2qVDvjsCNilUTcY4c+kSrFxL9xIJJ8gq9rmeQgzf9GRD9sPg9/Wukdnkv5QDtKOAetvz8l97iyWqfjy4eSnJUgxsG0YGKP8cVcXRG6Nv5hNATN7oSgtLkK0lmeh2ugOgpdTIGs+sXGTJiVhp6XzAUmPhsTS0/3i4Mb31wj4PjBXf2FmTGMbIN72eHrGf7YrtyIViYdgzZeqdS76QCtpvY6m5jUAaUCdsor+iFRW0VRI+nCzRjE5MGWGxKqqdjEcyLlRYq4qq0jAaa/UA18NJVOxH2VrAiiqe6gM/FUqcdQqHFOmYH5rDzXOmDoooJVkpIUzdOdYr/+ORn+aXi8/za41dorkvMwsRxVcDtdwwPNvzBhx9yNR1y8UR8Y3whz9auVe+/KRW0dIKaqaI+CjRHHcopspUhKEW+8oLzGipMY/qxQrp/sF3fPoOLzYjFtORffvBLnHdTfljd57od8t3r+zxTezQmJ6QCSWlcHW+Akoo9dTAvLyawFDkHU8P4uaO4acFDO7VSDORsR6lx/4QsivPGA0h3QK3ocpiWFf/cg69z2875WnHCOij+s+UX+OXLz7HucpzXUZ0+iiM7hWsM6zqn9zZVsn9Hz5OmkerXdTcAb7R0ZDuPNxqdOx4czOj2FizrgovH+1xeTLh9e8Y/8fC7/COT7/FvP/2TfOfZfdQmxhol93RwqujmGdWxp9vz6I3iO8/vczheS9HWSNGeZEHyNwdiMxboYRVpNGVrGav6TGG0wuYO1xne+f4D9K2a4bCmayzPFxP+S/MZamc5W4zxQZEZx3SyphtpFlcjXEhWTAptUide9pXXgt8TvJngrrJlJx3qIKO9NDbHKHQjtjS6cX033xcm7uPQh0EVx12pUErMaFO7XlhUtRFPOc4l0Y5+elHZeWfDIfAJQq87aDaebKlRK0szyPhofcjh4ZJ/7vDXePqZfd65usXV0338PKMujOgs6sBv3jzi7dNj1FoY8jZh0UJk+jUh6qYFdNRPM038XFo60fm1obiKLOc42jaVJOja+f5zpauXcTEKu2xwo5zu1hgV72Pfte2ks/wjV/wZ4+eOal+KHUmSfSxQk0m6xrkM5cANPBezMc28QK2FOTp8rrArgeGsXnWoaQOVpTy1BC3TEO2M7Ic2Yq12pwghrRv6zlUfV9T23CFpkaXvSzpWn2QH7ogvq25HOO+3ef3Ek6tuxAumnT7TsdWpesmBbhikIjiXiqbeV7jOsjwfMY6YrZRc6UjHF4XuCLKcOnRjGJwIMFi6E9sgEXTM9L1iU4kg4A/nd7naDLm4GaMfi4BZNwhklaI9dGR7NXpa995e9ioDD27iuboZ83ea11mvCmwm8gNhbcnP5ehr932sQhSYgBs7fG4pL6Xa2RxL1cF+C3st5lmBLwPdvoBu7cpEMUIleBT0zviHF0CKIWHL2B6sAf1CsqV2FlNKxvpxYRxxKXY6VyZiDIzuRR5Hn73hi8cnbFzGoi5Efyh2w9IhH7KAb6N9TSD+jG3CljZE0IrlyzJG2/sw0LUG5ex2FOkUZuDY3A5kK+mW9F0yo9Ba/UjTTkYPim7s2P9mTlcccTqVjlr7oCE4xfBKsAvdANafk7I9fFhIl8KIn1zoohp5HGWnBPOTV9CytqsjTTuSUUu9D/anZnzx+Iy9vOJXbt7AGcNqCISI62oV5YUibCRZ6XzEXNSyZrc4lADBxCIiJsgRWLur35Oerbc62WNFXEt8rm6bqL/59C6uNpSPc4axsysJhiKc5wST84urNyiHjVifVIqwDr2RtXKCK+sGkjC2E4/PJYDn56bHjYnsgOo1anwWE+Dd5CqoXhS0GwQWVcHfevx5frh/l6FtaJxh1gz4+PwAd5P3zguqE3yKqdlqnaVubwHMMsozzfBMsGxBK9Z3CuYvadppoBvIurUrha0sdipBXOj9HruJopJhm/zt5Rv+6fE7lMrwcaf5t0/+BO/Nb4mXXmupV7nYbLW6x5D6PLC0JWFjturZWWD1EPJrRXETGFzErudG5AfMuhOw9EgznlTcGSz41R9+BhpNdmPoguJ//tqv8HPlY75dP+Cbjx/h5jnKBlzhcSPQlaY8l0R9cKpxX13gvaI6GfFsmsNQfCKVl9gStBQxycZKFjfbgiD+6mVPvOLWwYKrx8dkJ0OaYojNA/XjgnfUvnTbV/I9TSyKg4ZxJbEmga1NI0mEbsQ70niwlfyQVET1mm9RODSNKINSbO7KKCyfd9uuRRy993GPeHCmQ9eL5U8oDGbVYtZdjFuGkBnpRtUONxLhZ71JrJ/tpUL6OfLeTO2xa4GmNEPLyWrKv/X+P8X90QwfFK/uX1E1GevzEebG4kaeOst4+/SY7qMxxSLiCGex+FCJRBATJh+thNzOBEBFjGYH7TQBwGNxu6s99UKRHGOwD7jcYFqHvV7T7Q3wuYnK9EnR3G/NvXfkMoJWNPsZuvEMz+VMbaJmmqmlyMwWBhVtk4gEj72XN1xsMsqLDLuSz+EKwT+ag5quNozek/9b342YrFPB4xYzJ93FuC7795LICp+QnMGH/tmgZb2qpFAfzDa5iv6sP0Lgci8mpL+d6yeaXAUdNZBs2Nm4Ox8mJk3t1Iu1zFw0L5r9gJsNsNcWbyFpfvRswd2CwgOFpzlSwlQKEoATEzF1ehLLMLSa8+WIxcWI8Zs542jcqjtoR5L4tXNLOzI0B547r1+wV1S89cE98ucZ+krT1QUrm1OeR32dfbHN2XtXft7sdbHh8AbcMkO1MlPGa0anjs2tqFA5z7BHFa6UYO86qfATYyIBCHW7zcRT66K3jlD0Iq1BEcd6EYyYGC679z3aPMif1bbrlbL8oF5YYEmsbm9Q8Wy1x+O374BTjM7ia3tJMpyXJpiKgbhniqXAptJ7TiM0aA7APZPRp4ikSiKMj16Ajyo2lUG/k2M2sQtqo3FqJngMn+lYjcXnPnAsHxmGp4ryDIJR6KYAtWVqtdMg8WYjNzN1oUR9Xe6D7qSLoeK90ErwFujQs2R8DsuXJMF0Qwf7Lf/869/hD4zfwRB48vI+T6/3aBorJt6bjC5A0+bkMykEissd1k3q0MVnrTvRkOkPupg4EbV80vOT5/9iReYzolq1+H75HGzW4TaCbfQjec3UKU2MsOL9ks1tizFAHlWwVyIEajYx4Y9YObtSqJkc5PkiEFSgvAkxWZHuZLb2FDctKhnwGqIGUUB5GYUGC4/2b3h8s8/3vv2yrBUrIHm70OQr1XtJ4qVST50VweHIe/dFgIOGps2xG40eQLNnejHYhNtKSuq6izZDrSS0ZtP1gTxhX1DiwPBLm/t8UN/mV69e562T2/2Z1TUGljauF7ZsQQ2uMehab5mTAXi4oX4I60VG+TQjW0k3y24C3UCMltuRYZC3fP/8LnvfzuMYDdp7HUd2yf/95vfxt55+HjeXJODgpWvuTRbs5xuu6iE/fP8+9jwjnyuKvOP33nvMf52/wmpWgvVikBzHUEFBdUcO0sGp7p+txGvVa1GJuj50tXT+9758ycXzPfQqWpBkYgwOkuApB/mNxmzoNZycka61NrLeBHulo/gnFDcOU7t+jLxrttxrHKVYpqINUmsEy7urrp++7JNjJZ2YdprmqBRl99ilkS58hx9YfG7EPiUWoi+8po17rnERkymaTHatabxoKL793j3e9vdRg45y1HBnb8Fzp2maoYhEZ06swpTgp4KjF+ndJkZEPKLaifGyZ+SNCBmmOXKoVmHXBhtjJHn021OhHwsmiEwq0rv9AfZ6jb1aEQqLH+aSfHUeMtVPOHortXjG+EyxupNRXnuKmw5dSxKjG4+tFNkiOrAM0tfDQblheL/lyfk9XAGVlufpBoFwVjK4lHVSH0F1p8MsBWYjkJOoccdOwo/cC59HdmGhcDVbHHJMDFPhmUhe0qlUQtzabczt5iS/C+bgT7hzFQ1xU6vO7VhMaAgm4L0ilB496NjcM9sNsbDoTiT+g4XiattBCDaOkVv5s1pLMlMdhf6w2EoBhNjqjK0/JTYptFrUm5MCcZzxaieV3DAI5uUkO6R49Zwvvv6U7/uHFM8yghGmQzfSqN3gbmOF0YHTEIqA3miyhczJ6yPF4EoOCg4amGe0s4LRSws2qxzztETXimwdAdw7sv/SVdoJ+rt3OfAjSWtvV5C6HOm/d53S03PwiNmm97HtTZ/hByUb9snzQ9RVxuhEmGTD09AzjuoegSk/x/QJa7y/sZWfAobuENptId+gW9+z5AgKVWlcW8iILfd4k3BXoQd+4+MhHS8B1SvU0nLrK2csNiXzWZxjNRqzNJgqEgUMlO8U0i0KsVJstnRe3cjH0bGLpDvErkiWtAiMelANvUClXWmcz/nm9SMATDwJjPG4mxxnA3pl0KlTCyLaGKS7phs5yV0hu163XoqTTFg5/b5JwS7Qj4VVrNZUb+xNr3avG/k2lwceHMy5KQZ0R5pxWROCYlkVbFY5XBRSDTegG40vPG4kgbM4taAkmbQbMNehH2F4KxXs8qG8rdRp1K0EW7uR8YQKO8K+sRIQrIwkxX/w8H3emIz4K1e/B7WRpISY6/ewgpR87nZW+mcSE5s44qzaDLtUDM8EapDPXeycK+qpJPLDU+le6Cieq2oXBUQFkxVsAB2Y1yX//pN/iI+v99ksZE1N9tc82r/hdDnhYplFL9O4AZR0rpT1wh7U8sx0p3AnJX7gwQTqYxd9KCVBLS8D2SrQDhXrTcH6dMRUS6cVBeW44f/4+I/z7skxbp5jJiJZ8o89+AGlbln7nNdHMLQNbx7cYf3xBLWU9/s/feNX+esnX+bdd+6JkwXQRdeA/IE4GYTziTxnq2KHKN33bRcSHbi6GfO1Vz7ip45OuKpH/ODD+5Tjmj/16puMTc3SFTTe8v7iiKvNkKq1VFVGu8mwp7kkxCEKq0aBVZ9BM7LYSkgQpvGY1EWMPnepswMwOG/wmcZsxPpGOY8b53EykqAS28KTBKPwoCsnYrZDS2jFt1U3Dp8ZfGGwyxZVt5JsxC2ngvqRBrbg9HwfP1Bwa7DkI44xS42+KWhMwckriixzNPstYW25PV0yzBreV0fUFwPs3IBWPUszeW56Q6/anmJ4YkuaVgu0Jiaysv6jjVMUIk4dnHQ+JPiAbhzttCAcDsmu1qjWoTfyeUM6N6yW/bDT5VGtI1s65i8ZIXAoKK66CAiX92WrGBtjp7MdKZ7O9hiXNW6vI6wNdpEkOVTvXbn4jEMdNJiTgvJSkjRx7ZBCuh/zsd334kUrY36fS+IXjI5jxNTt2iFrffL6rfBXv4vk6rcYpH56fXp9en16fXp9en16fXp9ev1Or59s50pJVWo+MbruaY9JfymAb0VS37QKs1Y0B15A0vdqtPa09QirYtcq3/pWdQPIr6UN6gYiZZDN1Jaa60CpqMERK+f94YbNUcb63kSyfi2ViXL0Lt900sGavpnx8eYuf+D3vsndh1echEPGxytuT5asHuY0nWFd5dSrnDmix9QNI0By4KWb0kll6nNY35aquRi0+KKjmRV0nWZ/f8W10zC3FDemZ86lKmbXX6un3u8yB6OGSWqHBkU0ZORFemliaPWsQLataK2l9bY7l45UVX2RUVxKbu6z2OGJ3cAkXpfkLNLfe7xGQLRkoq1HMFBeb4HDOlpRiFYKqFYsb7KFohvRYzSyyKYSSrDgJ3QEtCeNLLvSnF9NuXWw4HC0Fg2yoLi4ntDUA0yjyBaK4kaqQnTEf2xitVw7TJORNG3wwvz0nZZOWRpNKFlb5Xm0XlhJB+dN84i3Du7Imt4IsHr0kWw700gHrplI96rd84QDaS3p8xzdiH5UAp91A011oGj2BfA6PNEoZ3Za42zfj/fgk2BswtXRt/MB/sCtD9izG94onzPRG27ciBs35MqN+MHyPs/XU95+cofQamHzFh5btjTtALPSNNNIA0d8EbuREB26/Q4zbXCVZX2T4zNDMRewK0q60ab29LiIHiJAZIwqSt3y39v/FvZnPB+sjvh4sU/dWubLAdUsF42yZCJsktUHfRxwhewT9bwkc5DfCFNw9FyYgPWBoRkruqGMLEwN7izeTy33TCdvsbjmfRZQhSMzjsv1iPVMZh1ffu0pf/joHQ7Nim+PH/ELiy/QNQbfREKLV2ADWeZoc/GSYyOvly0V9sxuaePxPuiOSKMPNEZRbWTkN//pBlt2hOclNsBb79xH1RqGjj/5uR/w2uCcp/U+39/c4+lyj9w47g3nvHRwzQdOU10O+LXnL/Env/Bd/ujxO7z35v1oJbRdP9Uyx+Se3PACTm9X7sJF5vFwXLNZ5/zG+y9zfLRgUtQoI4vwqhnSWcPGZXgUn5ueUex3+KD4YHXEs+UeZ+sjwjrqqLkIYdCq18vTrXRqbaUprxV243r1mK0RueB3deOla5UwOVFDS0XQubd6S6B2vu98qSAimmjB4qimk9ccRCPnTdsLbCrv5XkqAbH7yBhMMVi3PrLz5OsP8g13Hl5zerqHOpPzoD4bUg07TO5h2nB/NGNgWpZ7BU+qDF9pfK3orUF7bCp9V1bOytgh6qRDlM8UxbV0eNPExG5cL9GyO0KTzp0n/ZBs0dCNMrq9gYD4my5KF8S3oKJVWvKcjUQPUzuKawGd11Md/TgDpu368WPS6Ern7+p8yMqPMEtDfqMYPRd81uquZnM3sBl7Dl6+ZrUpMJcDwWUlLGXPMo9nX6/Dtb030vlUuFxIQIn92et29UD4eK7tGDSrXYzV76JrBf9fALSndqY3KVFII43YylRymNJoBqfR76oLVPc8Zq/hz7zxHTY+5xfOv0LQumcYyhxf0U6jT9scqqNtgvUjPdz0PHTgznDBg9GM7+T3aFu5Jc4r2vMB+ZUWlXfiYdjA8Knmm88eMh1WTG8vebR/w2Gx4vBgzVA3vLs65t2rW1w3U+lDmyCH07BDZ55uMaC8ULARtqAvoG0sx4dz6kHNzeN96rKg3Ktpc0c1GxB0FOP0CtCYCLoVQVGD2l1A6fIxGIQgh6zTfXu8vw273dHdP8dx4ycFRpM2Ecj73hw7QulonmSYJs28I8bHxKBg0vNOv7av6Y0ET5E+2KHRupiUtVG0shN1bT8XNXTtBKvVM37clmatlCRtbigjmNG3B1ztl3RR98wc1LhOU65k5OoNrO8HumEgv44svTT+SInozqhVuUDoNNgIXAhxBIz4uOXzOLq1CrPWuJBj1ppyLorz5aUcUiK5ICKS62PNOouYHZfYlfGxxJ/blYp2qqiOAlkuTNtQ6x5w3YM2tdriseJC7+99Ck4OxqbmjfI5+3rN0+6AH27uA5Apx1cmH/O1qePXBq/yvfN7XJ9MhfkG2FsVnSrRlRg1N1MZe+njiqJoKXVgeTkUSZJMRFqVkxGsYHt0jxsMWoodiPdbyYH39dnLjE3FH598n/PhlB+O73PejPn2xQPO/QSnwEf8UpLQSIB4H6VLumHUqbtRDM7lAOqGmmasuPk8kgSOW2Gj3mTkC0u2MoLh6gJmI3pcgucIhDxgM89BsWaS1zSd4d50zr/56K9zbDZ8q77P3XzO/nTNus5FUNgpgtMo48nyjlaJ9lQwCjd2cLejPSsoLjXlRYiJfcS21V50mApFlnfk9yte2r+hdpb3Lu9TLwr02hAOWr786lP+yN6bPGmO+Nsfv8FyNoCliF8+PdxnMt6wN9pQr3IWVyP+w9Pfy/3BTLZ6I+tFRymG8oNC8C9x7NonVTs6VyDJ+q3xina44eLrd7h6UnI2lH3buJL/6ukXwARUFNn1045s0GIzR1NncV2G+DMSSxcpyIIEpB5XlbbhJ3SHlPOEXLOKgPbyUmJDr+X0Sa+4F4KZyF+4XGOWDXoVSS2ZwRcW5Txm1RBKiystpuq2xWpM6npxzhdwOi/+mH/plf+aXzt8jV9xb0T1fo1aWFzhyfdr1l3OR4sDzq8nhLUl2yjyeZQkSqO1qN+W8FYyHoxJsUIEhuciZwEyfuuxaW6n6O5jCv2Z6weW7GxJ1gp43xcWrZSMQj3CHoQXR2se3Cij3s8oZw4bIRZdocn6op5tMR+2TRRswF5YynNFeRUYnom+2PKBxn5mwf3pksvVkOb5iAHxPMll/YEU3yqyxIG+oPQ2ipgmzKxVO00Iv4Mj3mkWxM/Tj5n/PrIMv93rJwtoVxLwkmmtywUXw042ildRg0RRXAfsWmi/w9srfv+Dj3htcM7SlYSxw7UK3xE1rgRA2u13eGvIlpriStHsSfckGMRiRm8feiSPUHUZL42u+PmH73LVjHj35harOqfJPD7X1Lc6ETqrNeVzi63AvTnhbG+EOmh4r73FO/6YYdlwOFpTmI5NnVM+F0ZhfSQrTZnArf0lZ9cFymuh4RvF6iDgNoab5ZBXb11yMx6jbzKqRktwyogK2fL1oq0Uk9OEmfrkBoftQvLIF+wYN/faSMk8+RNaSUlkr/+3IHP2dEi7ww7zqOEfffUdTjdT3nr8ek8u6IbCKPMDj446RBBNVPtKSO6/z2I3KqheaTsBEXUnmDlAmHt6W0VvDXF3Xg/ks8b36HNPdlCj3x8xfQ9U/OHrO0N8AaPn8gFX9xTFF28YG8fNO4dkS01XRioTZrtm4qUdgqnZ2T3BQJcH3ELAvM1Usb4bCA8r0a5ygmEKJgJvuxANc2H8NFBeyzpXZ/HQsbHL2wR07XtxzFSV9h3G3coNtpi63ao3vl5KrnxMjn/5/HN8PXuJ0/WEy/mIOmHSrGf/cMW96ZyfO/yIi8mY2bsHALS1JjusCKUju8hxUZoBwG0s61WGXhnGzzR4UU+3G2GCdV6RLz35vMUsG3wuOMrgtKgTeMHkBRN48/I2b17e5u5kQWlaKpcxq0vOLqZC/FhqbCVMznxGT/3WiVnmEaPp4wY3yEHpHnzvc3ClRGpXG+h0j/kR9fgAbXytJjLIohmzNp67gwV/ZPoW1Z2Mz+fPmeiGv1e9zK/OP0vtrAhwNgbX6tj101CCc8IeNJvIjDSB3//qB6wfZTxb7nH65AC9NAyf6yg7Ivut2YN7B3MOijXvXt1ivSrJrzVurVEvrfmTn/0Bf3r/W/yN2Vf4jfOXWD6ZimVNjIlcDbmeDrC3N+wfLlmuSr7x5qt8dyrJhG6iMOtG1oetwEepjR6jFENIL6WS1qgKfOnwOX/rzhH5iaW4kk58chmASB4C0DlB5ZIQ5EIk8SOP6sCu6O1NulKSqmIm1PukO2dX25FHz5SOnZtuoCKD3GIrj649pvU/Etfkz8TupLzXbqDZ3BuRrTp07fCFbBB7UxO0pjksRVcq4pAkpm7PLflsEZCe9qUCdOCyHnJ8sOCfOvoGP7x/h6v5kO5ygKoVujV0Q8MH14csnkzJrjWlE3bl8CROO6wSHJkLBGVf7PRCj7lNDGPdCS4z/V/PFvxEnhCM6tnBLtOYMsfMVmIYbjVumKODFeufFEc8vSB10Ip2aFndFSZ7vvDki22M6pOUsI316Tq8PedKTzBPxMmgmQiJoboVeLQ/Z91m1G/tMbyJjRctnWjVxcIjJVY7Ku1BR4X4QvCkNkqb9OeCUnIupBwDomNJ6L9oVzy0FxX9XVw/eSmGYSC5VIs1ht5u4EgndWOpuF0mLCVXwBvHZzwcXPOXPv4aw6whUf6VogfMKQ8q93Dc0SxLisvIBsokcG5BwPTdMrzi8WyfD64PaTtDXWVwVmA3irKV72/3YbS/ocharsoJepZRnmryuaG5KSU/cYoqwMf5Ae29BrWy3PpAAtFca7oy4KYaoz0h87jCoF0gnwVWD8AMO6pZwUf6gC+88oynsz1mH++h15psFcGNEYwctMKVhtDtbGYtwMNdRk1vfQMkiQXw8m+xClCxLZy6ST1b0Adp/e6K7UEPan354QVv7J+SKXF+97kw0HSjCFlMhgYOZ3XUreKFxa5iwSc0fWjvCTtz+tjjCo3PTT8i8lZo/quHGt0IiNrlinaot7poKWFO79MIqyTLO+ZfqclOhJGXLruWNdCNFPWxxwKLVSmHchZ/dZEt6FMbOsRKDukumYCzHpzoWXWjwOYutBPF5oFj7+GMf+TRW1w0Y745eciq2wcVPRhBkuRMhEIl0ZYDJhhFm8ln6te1k8QzW0gweEHfbOfqWVSfiAu7QV/kEOC9J8eEymCvLaaBYaX6ILwpC94eHnDxhRFaSTdYeZGM6Joh5B6zEfmMJKAKeR+cugGEHOwpFAsnwNpOU1y22HklYwcvazp0qqfk6y6Oi4Pi5nTC8nuH/YGiPBTVls1o19LpyRce00YwfNSoMTXYpUbfbQllxyovMWvxMcsWMDjRoIRZ6qKy9/DMYdceWwlDzSxrsT8xcfSqglh5BMVXiqe0aM7diH/35Of5YH7EvCpQKnB9OUatLHqj+25Mc6RoVMAuNfkijs7Xhh9e3ubWcM3d0YLBqy2rJufyeIxfZJRnFl2LNEVhOt67ukX7mwcULWRLWN8L/InPvMkfnLzLO81d/vpbX6Kb57ETlcbzqlfnb7oBD792SjNe8vbFQ7rlsAcEbxmBsh/7gyZ2SvpDame5uUxxtRpyPRjyh7/0Fm/ev8NsOaA9HwhoP9qZBCfvJzEJVSfMNl+G7evFuOKtwg1SvI7r3+0UEDolBjvA5BAYnbo4PvQiZdE4kSpRbAvGIMri9AdvnEjU0tF0ZYbdGHTtsWvxE2ynsjjssolaei92znZtwV7ccKBsYNUW/J8+/qO8Mr4S4dlRxdxrunmOWRh8bViZguLCkC2SWLSMhENUDzatiIAmd5FtwrD9lcZh1aHGF0SJIkSS5ZN6UMTPbnSceATq2wMKDXq+kWmB1f3nClZLN8vL/UdLTBcmu2L5MFBeGIZn0WNUyfeI1IMUKWmMjIJB1nHnzozZ4W3cIPrPIvCZD0+OUM9LJh/LM6qPIkkmF3B9tn7xXu9OXlKHK2T0TQCfaXxhZAyckr50P1q2Z116vd0EK8ECfofXTzy5CjaxTrYLPulcie4PkHuUCfjM4nIxNT0q1nx/fo+nP7yDn3TohRWhtWivkthjoTKM7mxYT0RGP1VNvQRD8qSLKsMAy3WBOxkyfKoZJ2uBKgjGwkM7Mqy7MZv9hlvHC5oDwzLsMTjVMt6LixsgC4qg5ZDJNiEKuRm8ga7RXM5HqEbT7IMbKIbPhY12eLDgPExZXw3pDm5449YZ33OG1fUAbraHljegIs4M6OfH0nqNB10IBB9EUTf0WdX28umeBEJcPOn11c7XKCcCetvvi2y/AJlxPN/s8YMn93C1YRyrDO2gia/iW0ncVLSE0L0tQVzMSUbB00sidNEaJGRyoCXR0FAEqpdr6DTDD0QDpSu1tMGDjgw0+krNZ7Km1tcD3njtOct7BTfrAd4rNvMSe5GBV3RD6XCt39kXan5cg73ZacIMQGTdpINc5CGUDYToz+VLT3vY0XSaRw8v+SN33uV/sP/rfNztc7tY8Df5KZbzAeFKRkOqky7J5qFDV1J5V7chKOnYZTPp+ikngSyNT+xmZ02nfZU0yUKIGIv0QFMnMj73QC9TEDphTRY3qteBS4mkX0Nxo7h2t+iOW0q3FZ8dPNe0EzkAs0b2SmIouVLR7EH1UgMqUNwU6E5jVWQ7ZRKoldZi5xN4MULGQnJvUHGTjcjnKnbw4v9Z+Ry2CqLH1EbR1jYGyajDplwgWyiqpyN86bELwXeMn8jXS8UuY4R2LPcvW4lZsopsVcGcaDByAASv6CrL+4tb/D8HX+Hj6pAfzu7w/sfH0Gwrh+xGGFDJOka0+4T5aVdKxs4KTKWZ/+CImT7CD7x4+imkKwqoKPzajeDj633qDybsPRfKOR7aA8d+tuZvXH2Zb50+IDwdYKN+VsgCThP9JCUhLS40dWf52cPHvH98hDsZRhPubeUeFFS3veAgz3Sf/LzAyIwxGyU2L987ucefeu37vP7wgqUr+H/pLzEsa376+Dkj02C1wwfFRT1m2RXUnaULmlWTc/r4sHegwEvsaiZySHZD0VbLZ4K5KmZaME21j4xBIrREU1w19BaW8bAUBltqbYetWLM222CnEAmG1otJcu3Jli266mgOB3RDQ3m2wSxrYc3pmHTsdIaTKPKu4rvyoIzHaM87333Eu8M7soczz9HhklXesWEInSY4mUYkQVvf0ksubPc3vQSHyJpEzHBKjIPIHK3vB9zAMzgx+IW8ju6i4HTCXfkgHaio66RrRzuxVLeHlFqLO0ay3DIKnxlcYeTfdzpRuvHk88DyJdjcEfFnf65E9LSWglo6ndsuGQrOricUZUs7lhcTNjEisPxhyeBMYkt9FGinAV0pjCJaYMV1GnUCX5CbSXhkUtGucAMd5ZgijMal5sInRn+fHA2mdfPfts6VUmof+PeAL8lj5H8MvAX8JeAV4EPgz4YQrv/+L0SvUSGjH15YHMRAW0xqhmXD+rCgOVBUdzuumwHffXaf/ErjNhl2rXrFbrsO/WvotWY6qNjcyenOhtsWdhyVBAU6VchaiWu3CYRKMXq204ptQhxbwuQx8FjTDQZcfD4jO6gZvzpjPpgwft/iMugmAVfKBvZWKsVqX6r9pM2FU9RXA7K5phsEmr140qlAaTtevXvBx5f7vPXOfbJpw95kAwdQX0q5YvrDT8EmUnAbH6nSSb5gu1gSAHvrHu76lujOI9m2Rncqwd4ewXl5jURnjh3DD06P8FcFwyeGdhQYnAn41uWixeItUpmVvk9807imD9gxqQIYPVN0JbhsO3YwjWgpBQ1mqemMQQ07uqEY6qaKrDdNDYGgBGjuM0nIzI3l7Y/vsLe/5nC0BqAa1FzYCd26hADFuWX0THSZ2mm0UWm2ekc6KsST5vYRA6ZUQBlPyJIflVD/sZ7r9YCvX70EwLwrOammTMqaprGEVQEoGd10UB8E/NCj9hoO9lc0nWH+fIJqDe1Qk8Z8XalEoHQgh6a3SlSoG2lvv6BXFscUqg8a8Z7He+8HnmzY0q1NrC7j/7t432MgHLeKqspl5D4JdFNHfmMprhSb20Fse4IkVt2eQw07xtMNe3nLYlNQHRYErbGrEBPfjKzUEWgbNcKSZli/IIXCrh4GHs/uYVdKcI+xiBELH9Haoper8C8kaT6X7tzwucZUmvLSk21kHTdjzex1TTuW+x5MwCw12cqSbYyQJLqA2eSYTUcSuyWAWhk+PD3i/3zyD+GvcuxKU66jKKiVgi2fKbHsaLb3dRmxl6LNJ5RyV8pDKc8N9sSgOvMCbd3UcuCioPpowvBUUx2JHI3ZKLL9iv/kna9SPxuRzeUAcoNA+eqCe3syRryuh7z38W3UTUY2V7x3csxBueaPvfYOv5q/hvvOXrxf9JABf9xQDhv8zRRW8YaGdFCGHZ2pwP54w/nlhL/yg69y+2jOvdEc12kWq5I37W1K2zHJa6xyHBVrbpcLDJ6rdsRlPeLUHGwT/gxQSeYh0E6kAHGlwsbnn61FFsK47bN2haad2H4y0cefaITdJz7pwEz2Nkq+NyjF4HRD0kNCgxtmIuC8cZJYaU0wZtv5YVvIBKP7Dr+KGlumkeJ2mlfYpUbNNUlaZpZ3HO2tGJUNVzcjppMNi9eg7RRhY8kvDZu1WMkohyQ2TtaCK0VEOGEr0/5+oYkQFN0g9HIMuE9iU2NTwwV8JgSO/KalnVqqY+neZ/MGndZ++voeOL51Lsg2geGJFqP6sSS7xTXk8yRuHIvuLhVgCnc6oGqGFDeK/AaG55K0NVMTCzRYfqbFjlvU81L20yqdFwpfaGgFNJ+klQQ/Rw9/8Jm8TjuQlqipNSrEGBETpi1sJuqo7eKx0vW7wF39uJ2rfwf4hRDCP6uUyoEh8G8CvxhC+AtKqT8P/HngX/8HvVACoIco0pgO290jf1g2PNyb8cPPDrCZ46BoeLrco5kXFDb07uxifyDfE50vUE7RdJZb+0tOX8kwS4Ndiru7C9EWJVYxugVlPcOy5no4JGgdE5bQMxNME9Cr+Pu5dKGqoxHtzzqGxyuWjNCjluG45uWDa4a2EfzE1ZSFH8iZXEiVRekE0OgF49DpWCUqeH495dHRDZ+/c8a3L17GPxlyOSgJNiZ/mYjLvZCIAr0/Y2IHwrZRlebk6YBFv9AkkAfyicXzyYW1m82nJEuDW2Xk13GkmKqJ3fGBit0GtU2id0dTabO7QtENA8MTGXWkaiwohAWzCXRDMeotLy3NNI5oPFsx1UDf9tV4easGGHU4Y5h+s6QZlTzfk4XW7XeoVpPNk3WLYKTEPSDIqIu4Ua0kVkkThrRmPfgQxfUCooHkQZ2KZVHzuOC9Yo+3Bw/7rqwbedCB8Y3sAR9HAJOPFNWhoVIZl24sHaWVqGYHHXr9lmasqI5FKNAuDO0TTTCWbBFVi9OaiMnYCw7w6TOkCrJwPLh1w4mdsprk6NwJm62xuJUlu5TOsIiJiq1U0KAGjtVDIZtoJwe9HzlU7hhPK6z2+KA4fXKA2hgGyVMz4uMSPrBfU2m5pLcaO9qzZsBrk0uKn+64XI+Yr0pCENFKbnJ8rrFlVIDPdG9KLaNkCbTdKNAcCknB1JKY1gdCemkfVRSDljJvcUGxnA2oD0vaWpEvpbjKtJL76ryMLpzgmLraoG8yykvds4u7SaC91UKnsSvba0P59NlUQNtU6cX9Unhee/2Uq9WQ+WJAuCwwGyWg7AZ8IT5rrhRg/uaOx760wuhA8+EY31h4UlDUClcE/MsVr9694F9+6Zd5ZK8olWMVLH/z1lf4jeuXeffsFu6m5JuPH/HHXn+b144u+WE5FZybisB2gJuMKkBm5bmnkXjfffChTwBf2buitB3Pvn6Pq49KzkbH4CFbKmb1mBlwEp9/O/Gi6WXlYavcoWp5NraS/R6sEo25RuHXKnaW6LUKU/fT1Dvr3SgWD+VgTp1ugPLKk62DaDS53bUWY5nWuFzRjDXBDMiWIgTaDQ2u0GRLh121dAdDXGlEVPYTKu1bzai4/5X4nJpKlvetfEU39phN8s4D93jE6V3L/v6K48MFhwMp+jqv2WQ53bqkGyqoVG81pJSMvWQvhd7pQLr78txMDcOnwoB1g/ic/E5SnPTBoik1LhCGlnZsKc4rslmg3csk1uzlZCYC/VNsSdZf8bN3Y0N1oCiuJNn3Requ7UylXMB4CE7FsyPgC09xaRmcB4qZdJK7gcaVgfZRgytb7u8vOb+ekF0JZCKd86KFJuM6mx5pTJx3PW3lz6o3B/dZZFESE078ttufLu9f7FTtJuS/g+sfmFwppabAHwH+Jfl5oQEapdSfAX4+ftl/APwKP0ZyFUwQawXkRqnY3lMd/Wit6Syd19w5nNN5zWJTsFgOUJs4Zopt7t6kMXV+QUCEFxMGk4rD+zPmiyHd85LiRr+QAMh4K6AUPJjOWd8rWN2fgFK9irxuJdCZOtAOVM9oGZ4G5m9NaI47sr2avOjIjKM0La+OLrk/mPEbvMTpU6Fq+8KjgqIYNfiBolsZhnMZf1W35XBqNhkfnR3y6u1LHr1+zvPLPcyTQRRxlNGnqSXIKC8L2GemF5HzmUHt4qWA3sdst1uVOlO7f4cea9VjrkAqtSgkt+1qxaRz2OFKixt43NTRDbK++kxgc/Eq2yYqLk8imbqvfHwO3VjUqLN1iNWkImRRJTkGS28D5QVkc6gP5D322D0rY8SQcBjx/evcUUwqeHOPwWlgcIqMMvYzghHvyqBhc0ex+mLFcFJTXQ6x64x8IVgFb0IfxEL019JdEGFNpwSw7MR8NxQeM7NkCzkgXA7KmygGGGhHlmY/dkEU1PvyPqcfQHmlAItd2yglwPZnxarP59KZMHsNnclwmSiBBysYpe1YOO21WDk6tcUlpoNSwT99/1u09wxDXTMxFZnqWLgBMzfk+8v7PF3v8fbHdwheYc9yGTcFGL80ZzkYMXgsBs6dN4BhdZOLDVWlGF/HTsMi9BgSlNhXZPNGMDEqju3DDuM3rq/HFwfcbAa8vHeFHXsK29F5zfViSFOIJ6Bu5Gd4K0VVSrS9pU8EykcL2ruGq7ulsNbSsr/MacipNUIaaRSmClHEMInHRmp/kEJBMI0Kk3thD6b1/vklrx1f8trkkg+Xh7y9eBlXyvtLmCufe7ROuJA09tMMbMvX7n5Mdt9x0wyYtyUfXh2yXOX4RSaEnwAcNxwdLjkarripBly6CeoqxzSKdupR9yr+jZ/5BX62/Ig93TLzGR93+wx1zZ+efos3yuf83fFn+IV3vkC7zvg7H36G6ajCDcRIXreqXx+jx4ZuFH0x45hcR+xTb5YbY8XAtPzB4/f5D+/ewp5l2IV0nmScJ2tNy4Q4ijYLQaQbKpppJl24WkbLupPDsSuFIVxchojDozdDF6LKdiyXuiMqgLNpzyBFpUqxI8qm7LxvpQXXmYQ518cGsyf2UslarWg8rrSs7+YQolBymg4kaZPeFUGhUqEbnRR8qznOF7z8Uyc8vdinOxmAijHtLOd6bdm/N+eKITfPp6hWLLeyKnY+o6xIErVODOwk4ppwYy7XfYxIBJLqKJ6xmUI7Da3fJhIvjNI0zcSgXEm2aLEr8VjshoZulBiS4rBAso1BnmUz1qzuR/P2VcSDph8RpSJstZ0Gpc7d6M6KdTsWO6Yc2oGlmSqa444vv/qU0rR88/Ej9OOBNEUKIsuYXjw4FWp90UtMqPIg0KNq+2xdTLSk4qQvDpQNhA4547TqCU//jWflb/PS/+Av4TXgHPi/KKV+Uyn17ymlRsCdEMJzgPj77d/xu/j0+vT69Pr0+vT69Pr0+vT6/5HrxxkLWuD3AP9KCOHXlFL/DjIC/LEupdSfA/4cQD46wI08ypkI0FN9Jtp3O0xgdTMQwbtZAY3Gzg3dQYddJ2ZbQDm1xfDsZMs+D5jTnPZ5Tvfaiizv2Iw8yukI7kzVgFQywcNLw2u+9JlnfHj/CI9i2Ra0zvDe6S2aExnNoRC20UyYe6MngeIqoz60NBqaAN8c7/PNvZd4+f4ll/MRk48UeMXqkcINApNhzeFgzduLAn8hzLf8RrPZ69g/WHFzNeadp7f5Y597my8dPufbh/c5v57gPhr22C0BKEoXiyDYLjF7NSJb0H+27Zw9+HTfYufvEwwJQCqw+MeQZBi8F9+eHSyCGBrD4f6SzbDmT7z0Jhuf84tnPxNFQ+UZdBMP0xbmGa40gmOz8pztUMa0EKuNkaPZk3JEBcHE2LXpdVx8EX2n1PazJ2aRjubKQUdrClRf2bnKMpyuuf69a8JZSTZLnac4Dsyg2VOsXnbcPp6jVGBlBhFMG5mPip5J6QZW8E3EStxLVwinCIVneLhms56guwhMjYwnFLCJTvaV/F/Q4MaeMHTMEU85XwTUSu5hN5KugchthB7gajaK9rJg11OMhIeL0hw+dvLSM0u7/AVXolZzJ7vh9eycVcg56fb4qLkFQKk6/uGDH2IOPO/eusP3Fvf5dfMKoZHOzSBvmb50yfnlHbK5oriOgPg0otdQH0qXLVtI10q7NPKNnbuQBGd3x9DE+xwIneHygwMu/aGMkTzgpRuUVzJ2EiBsoJjHUb4L9Lo6XsxzN6uCYtDSjTpYZAyeGuxKKvzU4UueZaNT14+alYNs2aEbJ/c22mKFLHC4t0LtL1neKXh4MOO/f/8bTPWGj9tDrHK8eXyPemjRlXSEdK0IA4+xjnYiHfFsJTjCH37zZb4/dgwON9zbn1OYjof7NyyGBc+qI9RC1uzt4xlGBd56+wGqVQyvxei2ernmK6894Z+/9/f408NzTl3L/+7sj/Pu4hZXmyG5cfy+4w95VF7x2uCcl29f8Xw2Zf10zHk2kNDpZfRmos1TMHF8FfFe2/gs3p06asr5Ap6t9rhXzPif/dzf4dvzh3y0OOD56T5OBzaJ4r4R7Taz3uoS+ky0w6Qjq8T8O3Zn68P4HJXEWul6bs8KbRS2MBKPck03EEydt1HCpWWLxQFCpqVb0YUeK+WtphsYIdDEjohLHpkb8cNs9i2bAyN2azeCXU2M7F4DL4n0ZrI3drXwaDRLV/C//8x/xNsv3+Z/9Y1/kua6QDcxDlWa2fUIgOFHwthtJ4IpzJe+l1VJYGxv6UHdXUkvqRIMeKK4cyRsiUaWTAmcC0Q92L57BxByFcU2FTevZQyuDOVVR292TIw/edaPRRMEIxiNt4p24mlebVAXOYMzzeBM9rfLo5ZdlEvQEbuFChxPlqxeb5jNbqFbLfvdBPSo5XIz5GI2Jv/BcDsOdPQwkIQtC1r1z5W4hn0OIfdghIGt2jgtcUJ+MlEqI2mGBedRaDnjiF3QkAgL8axsf9Ss+8e9fpzk6gnwJITwa/Hv/zGSXJ0qpe6FEJ4rpe4BZ7/VN4cQ/iLwFwFGtx4FPW7pTICnGXGa0B82SVxPzy31xjD+0MhBqmB+CL4MdEcOrMeuir5lHowcdt5AGDlYCYuv7kZU+77f0KbduqmbRsY9vjacVBMGo4YvTZ5RB8sPF3eZuQEEOaRHjxbcGguy86PnR3CTMf7IoBtR5E4spm5l4Mzw0eIuOMXBlbRTq1san8kieDi64fH+Ad3AYokYsNrw+aMznhQNT8/2+dXHr3JruuLeaM5BueH71X1cmfWCibpW5EHGlbuMMWALYA70h9WLX7DbSv9RI1IApWOCFVvf/VfsqHz/sfvv8FODZ/zhwft8v7nN37zz5YjvkoONgeNgf8VMDWlHA5JxM0GJDkvQPVBY5Z716w3NVcbwuYq4B/2CLIKbOlaPsp5BlXz2dKdQ2c56i8mQzwPUmqubEW88OKW9Y1g2OSEoruZDqosBLjfiVTlwnH50iK40+Vp0ZlQSTXRx76WkJPpRkpIrH5lkpedovOb8vqaa5ugsHqad0K3VxmDWgnkzNVHHShFahbtf0zUaXTjcK4AS+nqY5bQjYSglHJFuFMOn0vLWTsCzaUwj7LbdB/bic999/qrR/GdXX+YwX/Gd6wc8X0xYzgcoFTCZ4/7hnJcmV/zBvfd4NLjm1/2rotcEnH98wODWmvaoIygrEhe5AEjdQEDvhw9u0AqW1S3BjETz4qAtPteYOkM3oqovmhNsSQoqcHw456Q6YPiujBqTDAkRk6ebqIvUBbJVZAvGqxtGjSYD2UcFXhUMF2KMXV57GcnuCVDaZ/FQDeDOIwMLFX0kI3gDtmOYLHA0XPEn73wfQ2BiNlQ+4+8uPsO3Lh/2ODxhPgvbTddIAu6jv12CJ9hAeaGxH2t8lvFsOBXaeSFrYHAjmK76yDNfl2xOxkzfNkK1b2FzN/Cv/Nwv809NvsNDO+Bvrvf5O/PP80sffZa2seLT2mn+04s9jg6WfP7wlJ87/Ij3y1v82sXrsiY3W7ZgOqybfUkA7ZrtaC0m+X3yGsexqzbnG1cv8T959F/yxu3n3BwN+ffVH2Kc10zzCh8UPig6b5IIDJ3XOK+ZNwUnHx0Jni0WC74ItLc6VO5ZHBgxNl5FXFY02s5nMXZbhcs0zUQU3nULKo2OTEyI06LYJX2lhMiIKG9YBYYXgn3V3Xb8OH9k6Yaw94GXpCMWBOyMoojrJZgEmA7bn+EU7yxuU+1bPped8fLtK84GY5aLEr/K0JUmVAblEuZMEivxkKXHW+46mAQbRFw9Eod61fII6K5uyZpOmmU+U+hOE7rQe/oGYtJsdMS4CZGnnWiaSSa6VUuPXclY1JWGbqApXIj4K/nspg4U1xr9Sk2Td6zKkqAt2RLypdyDNNIkjoUTvng0qGkPRAcRZK/4Vcbzq9sMTjX5XMaBm7uio2caRXGl0FdRbSBhthOMRUlRrwrBtYY8RG9BegxeDyUxWwjFb31I/n/m+gcmVyGEE6XUx0qpN0IIbwF/HPhB/PUvAn8h/v7XfpwfWJQtoehAZz2QsmfyxaAj5q1i1mgrUZsdHG4Y3W/4A3c/oPaW//zmKyJtH2TBqajrUk5qqkrjZjbipUQlehcE/v9u789jLcuy807st/c+453efVNEvJhyzqyBZBXLRao4NN0W2VRJJqR2A22ogTZkW7ZhoG23bAO2BAGG9aftRsN/tNGG0JJttNUi1N2SJYhoidXsosimRBarSllVmZVTZWbM8eZ353vGvf3H2ufeF5GRVaViMCKLcT7g4d35nrvvuXuvvda3vu+By5XmvZNd3j3ZZbmIqQuDOYxkMk8cYa5E+6oH28mcKy+POVr2eM9cITrRBAs5wasU8p1aeBZLveqEU8ovfpViPE0ZbyQkUcnoSkm5MCQHRnRFlOMLO7fZShZ8+9vPc/9Ol3tbm0SdgpVmTSWZC5N7a4yc1Q5DV86bGJ8ntnvC9zkbg5WoXnO/WJVzRgAAWERJREFUr9s/ErUTOQafCXPGrE7UvWjEtfCE98ptbpfbflzVelytorJa5CBgNUmvRO2aRd6CywzRZkaV1JTTdGV5oSv5AYHDdCv4bEZRGsK3O4QzWcxFq8hh/JiDD7wiIHC4k5gPom02+wtiIy3hW4MFx1ZTTVLZ5Z1IUKe98bdZepNsL0hpA8mI6cKu6/VOPq9qgqta0YtyfubFm5TOcCUeEeuS3IZkNuSs6vDmaI/9SZ/53f6aAF9oSGtMpyJNC65sjAG4O95glhvRM/N8g9rrt+hCAmB1fsFwrLTDmu9TNRlHeNDw1c8pv/vhS5SLkOR2hM6hW7JajQ6THnd7e5z+qS4v9o7FdNhnPtObIeW4j76SUcc1i22DTiq2t2ZspQu24gVaOY6WPc76Mvs3XaZOa6xRRHOx8LERMhl6joZwnRQ/sbVPbGrunuxJVsVrcDX8n1VGpdHPeYgbYSPIN60otI/FYkPVjmxbk2/C8oWCqFvQSQqwmvk0weTJqtHFFI4w9e3/hRW+SyTvkVUhoaopneE786v805ufZu4V6V0ksg/BXIK5YCm/0+yCxvY1wVIyJSjJXC4vFHAWEU4V0UT5DZSTcQolk2EjR3a7T2dfi3l9R86F5NqUX+l9l6kL+AfzLf6D936V0TSlyoN1VrXS6GnEyXHMfzsc8Jd++l/w5e03OHqxx439bbiXiBBsJCRyLKhXZiJ4+kYXHbLSWqsrhTESBdtAvo/LvTEfjrb523d+kZf7x1xLTsmqgFkecWy61FZhtCM0Ndvpgl6QE5uKeRWhlGM/tJJdSWRuqGMHgXThBhtilVMNDGWuCcYBwcyT/UOfpQ01ZRemz0tAEk41DacxOXJEc1iJWrKKC+QnoBVFH8qewhTC+aw1ZEND2RNJkWAJ8WmJyerVeqPqWuaBZrpbcV3XndnWyOc5Wnb5P9/487zcP2ZZhkRBze72lHGSkp2k6KXwgPIhNDtlXam1EKiBOhFCvgT3Fqz22WyfnapBaRHMnr1WoCJL/H5CsFgLdSurV9Wa8xvyOpLbo5Ej31TM92ROjsaG9ESBD9AeFGxWq81QfAaTm33soILAeRkPTedAYXGiNaaV16+T+bXc71DQIZhrgpm4ZOgc5kjCIpwiv9HLFZtXxhRVwPykAy6QTHClsbk3pa4czvpxMKBDK/Z4OlgHVueOu3GAWM0Z55p+1tY3DveQOPOPgh+2W/B/Dfwd3yn4AfA/QZbJv6eU+svALeDf/oGvosBahTEyWUkqH5RTq52cq2X3Anh7D/lSfu7qDX5+43tsBzNOqh5f2f40eRjKQNVI9D9TxGFFOSwoR4bAl2KaVvaV/oj/8UlQ51gsYtzdlK03fJDihFw5eklLRurDDrfvptzoWZ5/6YBuWDC8NmLU7WH3I2mPvbTg115+i2Ud8Xu3XyCbxUxnMTjphnMGqlnEdw8ukS0iom5BOKxZ2B4Y+M7hHs9tnvFa/4B7Lw44Pu5j9mOsjUhyyaaIGKCMTR158UMrGSzpwjp3svgga6VX0rD/VgGmn2yAh/3dVx0wpde5qsRf8HwI9p3pVb42eoGv375Gv5uR3A1XtjFVKp1BU9sD40hnEhDock1M1aUTzSIFwchQLTrY1IpPoScVNwJ0uoRyHhAnBRv9nHE/JT3CyxE0gYNEDKJ1JZNBZ2vBIkiI/0WfadRnlMoHzndkS9s9kF1bo5RedeSzBwtWpaGVL6L241nWmKLZLvpzqgI9M+xP+/zM1k20H8/chsS6ZDOYczU6JdYVbwWXeONOH5ULYd8sFeYkoexbZhsB705FHsLlRh7TiCg6yQqVQ0vZl9b0+kACOxNqtDsXaTXZSVg3DniypzVNqdNRLkPMaUi671blhJUUgM9MvOOe54OXt3FWEfZzNnoZo7Nt4hNFphPssCLolkRxyWayJNCWk6zL+/u71NOQZKx9sCKHFs4c8dQSzGuvTYUITTaH7jtpX+4c8m9svsGvJz/LnemQs3GXutIwDTEz2dQES+W7yAzhUq12yNbId+JCx/DVU+ZZxPGFnpQO+iU6qRj0MzpxQRJUZFVAtowk+3Hew8wHocp5oqxxBFPD7cNN/lb284wnHTiKic40HS+T4owv90+lE6qRIDlFE0ZSVjFL/Lnv+Inn7hE8X7OoIg5nPZZ5RHaaSNCNfAZVK6ITCUDmL5Zi2XMW88LGlL998ov8weFz7O8PUZNQylq5zxT6uU6XIqjLQcxvXvoU/+bVb/E/v/a7fKX3WX776CcgkMDRLeR8e257xEa05Fu3XpGAK/YZbAs2W2dSdA3/3a13uRBf5je+/jnera6itwrq3BAehkSjdSPFUsNpV4InG8oCV3es/1357sDAQQRqYVB5gMoUWvlkrJXfCo5Vg0lzruNEBoTAUvYC2eBWUjqui+a8cutSoWKV7UBD0becdRUml+C1juVL79xXxGeWqmsoBwG6cCIh0sjc+EyWC6RBR53zrwPAOKxTvP+H13m3c1UU82tF9NKE7f6cqisyFp1ezmZnidGW20eb1LdTadDwHZlBJkR7GzlUUuPs2ikDDbXPiNYxqNASJSVlL/aZLbva2DwAL8/SyLt0DyzxRDG/pD1dAkBLVjh36Fw27ysZC6Mo+hKcDr6nqTqRWJ95YdgHHD5g1azWbLCTfUNy4ogm4hpSpV4+YmgpthTPvbbPbjrj7myDiW9mc0rWFl1KhjmaKR9YNdlCP+yBpVTy+7KRoq6UV/BXq3nIgKc5nMtunpNikN/PEwiunHOvA198xF2//K/yZg4olqLZlHi/vyC36NLvbH1ZUG0WGGMp73VRtSLfhE/17pPokl8//Fk0IoDYnKxNR044UywXMZuDBSeDGA7Nqm3+vOFjA+WAQCwtbC3ZIOm8k9JUkMnzzFIRTSFYGg7uXKEYWvTVBVGnoLxi6fRyXtg65deGr3PBzLgYf5HfO3qRm9NL0nHTZB+NY3mWEpwG1Dqm2C5RwxIczBcxb073ONnt8JPb9xkPznird1HMYfdD6SQxksVrWk1RXu9m5tXZvez6inNVrYOrVSr0oR3+g9kPv9g5B5WVenNdryJ657yIaA2/f/c5lvs9wlPN6eWIjVPx1atDKIZe/HIUUA+kZh002aBS+Ayqkh1rlUpQPXxbUSeBmLVWjnBmQYMuJZ0ZjAKqowGnmzX0auooIDmTVL4pxBpjxROINLoO2ewuuTCYsX/jCuFUygnKQjSRslrnyFKmiuISVK8sSJKS2WGXzs1AlMM1Xt+LtehtJWrJqlark0lZKaGc7Q/4e/kXqGtFXRoxdzaOIKqI44qNVKQKdCFZm7Inz0+ORVAyr0I6+5I9yC4434Iv3T/KSVdluLuk18kYjbvUHyYi12DW39/Kr68pE/rAuCmfrzKHxqHDGuVC6UQt1hnQRjTVaYW9qZnSJ14osisl8XDG8DMnnHywSf99Q30cUgwDSlLeD/vSyVtCMpUFMJq4VabVKegcVUSn2Ur5PFhGq3LZatGuFDezbT6d3OV/c+W32K82+MPZC9zPNnjz6BKTsw52HFKnvuxWKt+e7zw9wJct5ppBknGlP+ZmWDGbJ9hJBCcx2a2EzGewnQFtEW+/3G8ACkd8VhGOMj8WAcqKT2FuYuY3ErqnUqaSAEqOXayNpGQZTWqRDABcoIjCimUs71GjULnm/eNttnsLtpIFzw9PKWzAWT9lWYSMJx3cNMRMNdlexe61M/7MpQ8pbMA/eeszHE57/MMPfprgNCDJFMpvJGV+EMcE5flUTdB474Md/m7xRf6Xr/wur3YP+Gr0WcxMCz2vkIXr3mhA3gtWEhCiiO/8n2Rn6kQWpt1gwrXhCf9V/zNwHGNHEYR2XZFQSKbOZx2FKyt8yWKoWV4tV8KwTcbJhRYyQ+euWm3ApcrhKHs+g1k4H5jJvNK5FVClIh1hlp5DtnSrQEhZv1kEzitvmyWoWlMMLWXs0Ll0cYcz6O7XQkm5LlmQ7n1vylzWq7lGgXB38loyWlqvz2XtiE1NOFEES0OwkHNsNtvg7tUOgwsz9nbHXOpOiLwg2nwj4mgSYU+DdXbaNeVy/ztPahn/c9td6ZiF+P2EqhOvymJiHeQN6Jclq65vT/GoI8VyVxFkiuSsJppYbKiYXRa/WpNZgkUtjXa1RVdW3BUCTT5ULK5XcDsgWMhvXbKK641xs2FrlM+Vg+TSnLzsSQe/gTKSNYPNnD/10g36Qc5WNOfmYot7N3YIR2alzK4896rZoOvC+lKlT8jUCmsVBFayv4FUOKqisV8TikWNRhdmFQ+o2icRqmqdePgjdArCk1ZoV4iJ6VTeVvldf0OGVZ7gc2lnzHa64Dvj6yxLBYOS/XyDf3b8Ku987XmqQY2ZaeITLZoomQQUQeZYHCZsXzqiuGpYToYrz8FmErWmmWjW5Lg4Lpl1rQ+qRDXc+fbPYC7p0qqjCGeWzXeEJzLd75HtOOyFkmwZ8eHpFv958rO83DnkanTKdnKR+2feu6ojisloh8s0JlN07zqquxGTVyv0oGRrY87R/gb337nAyV6XYW/J5eGEvLfgjtmknAeil1RJQBkshX8TZJwj4bq1o7n16etGMM020eW5Lcy5mvMDpaTa+f8+sLK+HOa8KJ+FbBkRH4llQzkIRHgzl4mwKryRb6HQSyM7Rf+jEPXeWsT2akfVCbA7JeqtiM6BpJQbbRacLNR17LBNABYbJi/JMTckZpNZzLyUz6sUtg5QFRS14ZXhEe4XFPdONqiOJBjRnhBddhSLPYX+7IT/4Uvfpkbz1fgVjostwrlBF1KeAVZtz05LGl1XknGRMUO+l6WBO32SGd78F09Kj7Gh4mQXskuVtORH4HYKrHHMVQIWqkFNORW/varrsInFqWBFQLWxxRhLGlZUvQwbJOtW7UK0rmrdZAuc//7rlQbYSq3BAsaRdAqWcUwx0CLuF0uGySyFJK5LJ/yJsaKz79BVyF2zxXPXj8muTanvDYnPHOG8IaY3pRohJcvriSirtLj7x9VuRdI1OZ4IzEpWpU4c3z65zLIO+Vz/NtYJMXhWxVS1BNuqVIQTteJeycbIf8ZQsny6ghv3tzFhTXUsJuyDu2ttM1GQ9tnPUBaXYNmIL3rl7mkGgY9eKyn11YkiPlF0joTr1cgM5H35/OHCEi6snJdZJQtZYLBORCBt0JB9FdVbAw7tgPsJ1Kn19Aj5vqNjQzSVReG1L93m04N9JlXCtExw84D5tE98Iqrv+F191bN0XxqzN5iw15kwLhK+ffsq7igWNfZcM7o95D92v8RzwzNcIM0+0VITzGRuXL434G7cF86hz8KFS0uwsIRTv/goMUr+xvwFfqpzmz/9yrt8d+ci40XKchFhhpmUrpwiKwKcbzyymfAPVa1EYLhbYRXYaZONcXR2FlRDw7zoSrl06jP2Pvhe6bdZGfeyq0iOvbl0JZs3XZ3TW/IZZ+198pyXFBB1d4hGwp21oehKRVNLkDmKvma5qyj7TugfDk+Mt2uF74bb5TeihIEPQhzUiiQovS0Yq7mid9uRHgZk25scf27Kogg5u7chpO2BmCWnR2trs2hSE2Q1WCNJhbjGdiw2CFZBVx3J63fvy/PmV9dBqMlrEQTNqjXPVoPrRCs9uNGrivQgpH+nwhSW9ET5jaso11eJIZyWUvosa/AEe7NRUG5mLE9SkvuG/q31RgPWfCvw4q+Z4vrWGadpzlm+g8mlsmMjS9rN2YoWRLriq/de4ejekO4NmcuDTJYqVTvikSOa1YTTcl2dObeuVaX/vTaUlPMZNC3njGqCf5Cgqq5xRSHZKlv/kbNW8KSDKwcmrnETnxlQPsWLT10Xvt5ragZhxu61MwCyMuD3Dl7k4PYmg7uK6ixAWcTY2e8cG/HAzj3D/edFDbt6bcJyHqMPYuLROpVqjaQFhY+k2OtPGb9UclTv+Mm59jv8mvROSHZJdhX50JAeyvv17lo6B4riToQNI+q4w3+bDPmd0BF/dkRZBmy+JcHI5AVDPnTUQ+stFCQISY8sugoYfQYuX5ug9xwHtzcJvtlnqvscXqpxvVo6bZYSlAUzSW1GY9ldNyeOLiy6qGRXVfqTo1GeBTmBrAOzDrLOc61W3Kgmc1Xbc+KhbrXTk0CO1fvmm0IIN3kgisJOnNld7Kj6NWZs/I51vQtbWeAoIWEOt2YsL2yLtYtfiJudlXKOuudw3QpnIoIlJEeaaNrsovH//efWGqUtTsPZuMv7wQ6/eOF9yh3D5KWEyhpuzTe5dbzJNO1SbohI7TdOrzMpYs7G3RV/r5msYZ25giZQ9BuFWoJ3G8oxqqNoZaAqA4bvTvRlxlLOXRdAPQ2pOxVcX+KsYmtjTvfFkqI29IDxPKXqifZawxNcnqXcu9vDxZbdDFnE8xqTVehlKQFX7bxIoEU1Jd0m/e0zfdSKT184YL+7ZHQtJQoqOmFFbTVlZThZxJSLkPBAiwH2XPgm4XHIrfwi0e6C/OWCbBQAUrJwsYXAYdKKy9tjtHLcevui2GeMPRfGhdhAEc5CdFbK+JZ6RXxVVgIEB/yzd17hd48+K8FO5bstC0WspHTbvSfBenxaYnIxWbeBwu4ElD2w/RoKg9pP2HwPwoWjimWnPnu1xHTkS6oLDblBl2ZF4haCfIQuOpLFraR5Ipw7sh21ygRVqWb8MhQbDhd7Ucw3JPvxALcjcDincKGj2IB4BLZXU25azGFEOFZ0bxtvVIwnV4uv4Ow50MrxW3deZfrdLWziSI+16MN1JENV9S2vfOou13tnvJAeE+qKRFVoZXmlf8R7011ujLaYL2Oq0jB9b8i3Q9l8BkvJ9sQjmU+yHaSc67P54UICq2BZr3S/XKjRZcBv3voUr/eu8qWdDxnsLplUKa8fXeFKf8wr/UNyG2B92SA1BdYpSmeonCGvA755cJWx7eBCvxQp2O4t2ElnnG11mGQx40mXOjOohUHnivhEU3X1KiOyvCjdxMFMkx7Jd+KU/EbDpXvgt7vK3GtNHSuWezVlTzN8B8KT2mttKbKhZvRpR92p6dwORCizaLKQGoX1ZUEtWR2tZJPg5zaTO/TcEJkaPj1leZqS3g6k4mAlwO/dcZz1e4wTx/Ybcn5PnwuwoaNzWK86+aJJKV2rQSwJikpLyTFGfBud+JXaeJ08iM8U4cLPkaX/s3LMCsB6W5tIOLzZxZpiE5YXzaqZIRo5io2AKtGUHUUaKbHK8pWRYAHL05iN584ok5JZt0PVEf7gxgfn5kuf3GgaUcZ5wk5nztHFAVVuJAOZabJFxFdvvEJ2lNL7MGC4lPUA/PzpuVXxuCZYVuispHEhabpJg7DGOSVVA5/xbDwqdb0WvwVW9Jf1GuegoVc0/43hR8UTDa6Ukxb+U4D9lCoR4rK0m0omRjnpJsiqgJmfCMpxzKxTEZ4F0iabyWAFmbeTqBqxObFLmX6wwWRY0d+a0x8smRQa9UG44v00gw2gMs1msuBLOx+S7YUkuiTzHULfPL3G+9Vlrr9ywFYy595sg8PvbRMsNMmhJlg6wplbiRgKgRWm1SZVR6JrVTt0YeQ9oxoVV1TTLnUqk1c8soQTw7IK2Ygz7NURo7MdwrFi8J7BRoY6gmjisyGrkk0TnJ4r7TV2A976BFgLgDaorZAYausnhGayOXfZkzMfJTbatMMmnYLsZbh+8RSA0etXhHjpeVAusFy7fsyd/U3sgSgON/IJQScgOOe4fKk/5e1PdykHMcmxBDbFQNLS1ihcVDPYXDC9HhEs1MpSpEq13xlpAmPW5VDfOWMrze17W/yBqdlNZ6RG1Lg3oiVbg4jjsItZaE7vDpmdbGMykYuIlxCPLaaAYGllB6zcKnjVRY3JAJ+F1IVIbUT9guyCqDuLbMV6sjO5ZFnqxNG7jRdaNNjAsNyrcf0KreCVjSMsiqOsR14GFE03bcMbrBTpvsaGWjz1ch9gr7gC3kOr+f58CaTxdzS5fCY9N/TDnGRwyiRNKGpDVoWooCJOKzqbJwDcvLDJZNohn3SoEylVdm4blkUXvZvDXk3aydlIs5XStMYRmYqsDnGRZLBsKOKKDQndBXKeKgs6a4je/vx2ml5Y4EpN555/TOE3Q55MrSsnJr2+7KFLXxK3ypeWHJ2tBZvdJYfdHhPbBRT1xYKNzTm/uneTntcDOS273JtvcOv+dZF38ObgplCU/UiC10JKkNLJ6pi+4JhdU1QbNeFmRhrWVJWhzAKKQYLJNUGmwQXoWrhFaVSyLITEX0cKnVT80ivf4861IXfPNhgfd1ClEsPpUgLKquuodkveuHGZ8FZM774i35LFL7tg2X7xjIu9KS/2jvmJ7l1yG/LG/DIHywGjPKUTFnx6sM9PbtzjU/0D3p5e5MOzbWb3Epmv5mqV/Q8yRx0qqp4XUL7v1ouazxo0Gy6d15gcRtOU0d0Bp8sOu90ZW/GCvDLcnW5QOU1Zy+JktGUYLUlNSWpKIl2hcSjVUDz8dBAo5kVILwrZTWf0o5wkrMjKgPkyplhElGXkO4Ul+1Snjv7LI4rKML7bW4nLxscaDiUQdJXBNZwov7G0gYJBgd1ynIYJ0cSgKij7TgL8xKJnhu49J4bEIErfzfzq5HfmzstFekqFKSzROGCSJ3z5pbe4szfkm/VL1IkmmnjfSc8rVecEXE0u550pRHhaGOUyD1e9mjAtKacRVOvObZG/kblleUGt/UedqJnrUouX4MqORl5PbINk7Y0PDcXQku8KH1VnWp5bG6+Krig7GpMZzFxL9tlBfGw41UN0X8pA5YWSqhswuLGuVkgzlg9MFezf3uKoV6IWAcFEi+hyDjMiiipm44YmnEoGOdvyTS/OB3xTRzQ5t9k970IBaCPZ/bqOz1ntNXOxBIbOgKuQcT13TuMsH/ES1D96cKV/8ENatGjRokWLFi1a/LB4smVB4OXhMXrziD+4/WkRVPStvk1XjiqhvNNlv5OS3g1IJ9DLHWdfcJiFWnEcgmxtBNx0dokLNvRuadSNiMXlkHK3FHsLu05xCyGuxmlFdBpyc7LJpWTCS8kRoaq4Xw6pnWYjWuI6Fdf7p3xhcItwp+bW3jbHeY/fvfEi1UGHzj0hzVV9RzBXuLEiHrHa5TaidPVcscgCLl8csb9nGPVDzNSQ7muq1PLh0RY4xUZ/Sf+zJyyymPFRillowpmm6uC1XaSk1HQgKs8D6d036NyIWJtzIt8AcnklzdD8b25n1WnhtFp3XTzgSef/6hqMQZeWYAlXtk/ZiudcjKccFz2+tnFFiN1LVm27P7Nzk0Gc8b2bz/v0s3DETCECobpyqFJRWsOvfeY73Hthg2+8+SLJvYCyqwiWwidAO17aOqbzy/e5O9/gxnsXsSYgHxrh2yyMr+dLVgnABY5OL2d+0uH+V69yJ5HSAU5RD8WEbnDXWxv1AsKZb2YIpKwZzhsyeY2yWtqJlyXUFl0INyecGIKZfOaqA5eGU1567kNKaxiGS2Itu/TcBozKDncXG4zzhLPsEoA/J2HjXUO2Yzi2it8+6+KcIoikZBX7BgIpU7vVdkh0cLzeTentf8paOiX1Q7tUz5Vr2vyjcUXnfsLrB1fQ2nJ6NEB5D86mHd5tF6TdnEsbU7pRye3nA1RgCeMKd9gj3dcUeULVs2TKYa1inkeUtaHIA9ldW0WyH3gjVxnP5KwmHBeYRYEqKoKsQzAVXRuztEK+nQfspjNuDzPqOFp1/DZino3u1YqM6ksxSltApB50AWUR8MqVI14ZHjG9EmOdSJ5o5bi/3FhpMAXaktcBTeOJZMO83ElWCT+sFPKzNWATS+fKDK1FuWk6SSlHCcHYoBsNK/9aurbovIYiZJBkjDPp/EWDzQLyOuBqd8R2Mqe6KF9uVocsq5D7owEuD2FpiG8mRBNWdlFOK7ZfPOOvvPJbAEzrhD8Yv8gH021uvH+RYGIIp7Jzf+vyVfp7U17cPOVzwzu80D3hN/LPkp+kRGMpU6NkZ1+liguvHXI27VCf9leZ/qZpQJfrln5dOTrdnMVxTP7f7HAz2eHtyzVYKE8My2xnVcpxBj7sSHaljkSHSA0LOt0cPH9OuIKK08MBZyf9c3ISwrHTuUYFznNNZR4vBlIyz8sAY0SU12L88XohVFhlKVYToDFUsXxR2lQEL0ypKk3t+TouN6Q3IpITWTOke9b74irhpq64PsrTKBp+ay38x2gMd442eW14wIVkRnJxTrVlWFQaOw8JR0Y65LoVpz8lS7EzlnCiWW4Hq7Jy3QlwhYXYsrUx57AwuDpYcbmcVtKhfbHg0uVTSqs5fHuXzl2NKQLqVBONNYH2jSOF8OZspCk2hc+687qsn4uL8rpl364suKRa5DwFwa44vWUXqsQxfDOg6gZY0/CLATwZPhO/xpUIdQjBaUBwJySaQHpsiWY1ZaqZPSffv8kdiz3F4mrFlReP0coxzWLGow7B3RiTG8KllsaiSkj2WOGVVmWAMQUuFK5YHXntOqfQEdgCQGOMw+Qy5npuhJ96vlLz48a5UhYsihc7x/zznkVXwsdp9DqkUwOChaLsC+k7Pa0pehqdVjgj5bo6Ys21OqfQWg002Y4jORaC4/BtmI8j8h3rO6hYcYWa5wUL2L+5zT+8twm5RjmFXnptkOtzWBpeP7jCrIy5nE54MT3ienxC9+Wc1zevcn93g+3hjBc3TlhUEfdmA44ONlAzw8Rzw5quqeJeTL4dkCQlUW9JdLXm+FKfQFuqIiC8GTMOO0SvTkiikt71HKUck3nCfBlKrb2WyUSVGlXJRO0CtW4dX5UG/WRyzjX8o9+HW93elNRWxMDzj38o9Wpyx1mWUtaGN48vsdNZoKwYL4uKsULPDe/PdumFUnoR4r3cr2vniZVCLL95uMXFdMp2PKd/aUp+PEQ5n7YH1MKwqCL+e9vvkg0C/tN5h+J4iLJixFvHEGQBQWYI5vJjVpViq7sgjUryt3eIR16AtYaiH+IC6N+VcyvbheVFKSUFc0j3hUQrCuJ6xROTMptMULqAYCpdReHCUUeKk1mHX750Qt9k9EyGwTI0C4yy1E5TDg33yk3+owu/KoTbjkXnms5dz0cqNb1vRwSZY/JSTLVdktRSPnBGYXoVmxtzljshy3lEepBgG9V9C0apVcnNGfWA+KuQQf13UFnCqWPxrU3CmWLvlvXdN02tHKokpOzEfPDzHZ5/8VBK7EnOc/0zvu6uYW936d5R6NpQ9DsSAC0grhyJDwbrWIK5ZGSl+9M54sMlZjyXYF0pgqUlmhrxRMtqzKIimiTsxDN+9YW3+c7wMlkVUFQyVU3mCcVpQnI/ENmDEnQZEiy17+xUlD0RvKxOEl7vXKGf5HTCgnGesH9vEzMKSPd96c1C2ZNFoXfPkYwaL0dHfFoQHE0hDNBV30sGQDjWzDupcCGnhuH3FNFUnlP2RFIgXDrCSSmNFmWNmXVFYDSAeCzlNzM2/P43XsXFQuzudHNCUzNIcoy25MsQdRiTjKWbrXgl59NX99lNZvz2m6/xWn/MQbnBB8tdXj+5wv3vXiBYiq+jlLQdqoLuXUPR3+StC0P4RfjM4D5ffukt3tq+xO0712Vj6mVd6gRe2jgh3drnd975Ke/tJkKjykqZqFkodQ3PbZ5xEFYUN3aIxuCUoeo7TCak7FVwpdbzr3KimD2/nFL9pGwijPeKrWNQmUEvNF2/+dGF7zgEFhdkA2BykRioYh/Fvj7A1tCrxOPOFPK9roSKrRPSuXPSoKBFeNRlBnsSkhv5/etMEU4k2I9PpaR9/DnZQKeHSsyGTw0UUsdbebE+sDCzEhu1ZxFf+RefI7iwBKDMArobGenmnOKSITA1z22c8UL3hEBb3hzv8dadS0wWqRDKS3A69GVJy1a6IB8GjOqelLuazXbqSPs5P7V1D4DfutqhmPWIx1BahaoNuvBke+slCKw8L9qbs7jfJz1yDL9XoxzM9kRfLxmJZIpTinBeofMalct3Vgwd7tqSfNohXAgPOP7QUfTX/qbC9fIbeKewxlFdKNH3w5XhdNHTZJsatbWk28sYX4n5zJV99tIxsa44znscmD6BsRzXmuVhRLl0KBtilpZgXokrRuBwpSYnQsW1SDDk0k3ccPBsoESSJVAoGxBEGp0lsmfNcukWBNZWFg+VCf8V8ESDK6fh5mRTJplOjVOGeLL+MrRX3646onVVJVo4ALGSjp+uZXFJC4nzQDrxGuKwrmQXU18oWKoIVSs6B5ZorHzHl2Ql6lj5jI8sSmVPfnzRvZDBB/44vaDhSHWJMsgmQ74dbPB6CPHlOVv9Oc/1zxgmS/SO40JnyoVkSkcXXO+ecqO3zQcn2xT3NuQYvdmyyeHkuI8rNHMHyVbGpe0xoalZliGH422SQ0P55oCs47C7kj1I4xJba8pai/ZRqYjGYiHTBG91orGxkR1lo8Oi3YobsNoGNYGWdQ+08D/SoPK8kJrV4C0PbAD37myhJwHBUjF5oUNaCn/MBoqyKwv5G3cus7s1wUay6ze58wTDNbHQBVBOI37v26+ieyWdbo7T3gQU2anrUvHh0Tb/Wf5FttMFgyTnIIb00HOaijXJ3gUKfHajdoqXNo/52ud6MAoJx9pLF4iExXJLM78K+jMTvrh3j0BZvnVwmVkyQFfat7Fr6Up0EG+kqMpSx8ZnTxurDEdyAqNbff6++RxRUGO0RSlHElSkQUknKHi1d0jHFLhuRV1oepdmpFHJ8abs0rsbGcvTAcFSUQ0qUWyPWeu6xCV7/QlXOyNuzLa4M3yeeuk5RkZhEsmw5QNNNInkHCgM1PYBUq+NjZhfV74xIm+ywOtzIJzL5+q9H3Iz3sFVimpouNIb8ysvvcsbm3vsV5cIJ2vitnJeXDISEcAqdaKqXOnVohoMImmPL6rVb00X68YBGwgZ+YPZDnvpmC9s3SbWFUZZSmu4sdjmnc4up2qILgMv9hoQLLXYpDgoBuC2CtwyYPrekFklPD2TKYYnIrXgtFvJswRzmYCbbHgdKgilMzXQet1QoMSINjkypIcRwVyCKF3KQm8KhykVeV8CPYwSEUVEEma0SKkvFGSnsbSsp47OLUM4NzgdYsOU0sB+V3S10rFsnsoevPL523xh8zbPJ8csbMQ/773ArfGQv3nwC+THKeHY0DmVjF008d2sDQ+lkvM0Him+tfkcN69s8guXP+SlwTHvXb5CeOabFXxz0fvjbS52ZhS7NfVRsNbRU5xTZxehy05Q8IULt/nqlxLKhW9ZKzTltmP+wroRQzm1sgMSg2IohxWRdzmQE0jWCL2Z4zYUC5eIYnm2ViKv+hJsZZsypsVQUQ9L1ElEPGo08vxr+YapqmtQdYgqKhoLGxca6kRh+iVMEwYfyqIryvxyboxfhWKv5NqVE5ZlyNk7W6A08TQiSI3IbPg5Ui8D4TkCNg2xoZbzMK0ZfCcim3VQNQwPFNPnIxYXc3qDJRd7My6lUzbDBRrH9e4ZB8M+c5OKNqKBsqOEN1XCrIgZdpbM5glORSJWGsnvKF+G/M7dFwm0JQorFtE5CZzm9+0ArXGNMCiw2V8w/QXL4X6Xzp0AkwmX1+QS+FSxrJvpqSaaep2vylJ3LNd3Rpz+TM7ksEd4GlDf8z8qK4E2NIFwI4mhePH6IWc7KaPBULpG+xVht+C1i8dc6YypnKZrCiZVzD+/+wKzUQq5QSXS3GVDsKXy9mcKG8n72I7FhBZb+kxKbKkTvaqkiFo9oBV1KXON6SiUTQljQ1DV0ATg9Tnm+4+IJxtcGTiddDmddFHaUadgZ3hZBMlamaVjseeoBzX5tsaUmrKr2N6Y4wYLrg/OCJTl919/leXJulvLlLLr2d6ZMo5T5iYVDye19uGzDym2KodI8PdK7NhIt1uzebeQHmo50SIpv0VTh/1uj3nU5w+vXqTq16hOzZ1wk28FNXFcYpSjn+RobTETvA+XtG/bwKHOQtld3oZi0Gf/c4bdzSmf3jpgK11w42QL3uoTnWr0YUIdJ0w2LcFC0ZlL9qVpqwfR8HAKylRhlobA+UlVnyvpnW9hPh9ENST2j1NobxRtz5UMxZ1doWcB3bsSgEw2Ixq5BVM4yq7Cpg5zJ2F/EhL4MQ8yhynBZFLCAyiGhmgjJ/yXPXQRMH01JKpZBTS1E1kHmwVMXr/IacehXpuJKn8FyRiC3JGcSEdNswiaLOZk0mU7XfDzL38gLeG1nO7zMubOaIPTwy66X/L8cELXFJRO040LZsOKYhCtLCi0zxiXgwhVOXQtmZ4maxVkMjbJocbe26TMoXFjWTRkygi+8cIL7F4dSWkvEILz5d6Ez27vU1pDakrOdlOyOmReRowWKdOtmKoji5SrNIfzHkVtKK2RILZqyJ1ulZUVWQWD6oRov7M2hdcsqiQQz7ctZim/ncabbNUx6s9/gPjEke9HJEeKYiPk67OYL7x0k5c2jik/r5llMUUR4BxkVmOCmjiquD6YAPDOjT3yw5D41GCWUHZigp2IIBMh0TrSmGz9u6vTgKrreOPmZd5wl9kYLugnOYNYrFTOsnSVxQrnErA33WxNgKZLiDslve0Zx/sDzEiIswCz6466Z9FbOcaTX61VFMsQZxLyuV8QCoeyAbrsr2RbpHwmXV3R1BHN7ep8Ax8YlI5wIZ/HBhoVGRoXickkpTPImL+oCY8D9GbOoq9hFGIyUasWvT7kRZ0EqdnVkpf7x4yqDr9z9iqxriizgNFBSnJo2JjIeRAuGmuv9fE00hvGW34NvxtQ3tziN17pcfHyCDUoKF0kVYNMyvZHb+5ykO5AKKWhcCGZx3BWibSEc1T9CBvAjfEWWS/ki9duYxFXhg9HW1wZyCYApMEhtwG5NVinRXXdaSqrefP+HsqKGKWyIqGxtTHncm9C8kKJdYpJkeD8hDcrI+7vbzIpY2meiaCzuUT9zJLRNIZZY1fk0Est3q3bAXWsiY2SEu8qmw9Xd8+Y9GOmbotgJhWLsueotip+8rXb7KVj7i83KGpD3bHkQ8X8oiHoG5GnWPqO5WjdUCOq5FBuWMJugS4j+a3V0igTvqGo3o/JNxPe/cmY2+mQ+TjFObh4cUxtFeGEVTk8PRUrGr007J8OGA4WhFElXqRVs7ly2GlI9W5K5WB5vST0VJhgKVpVOq9WpVGFbMTMUjHLYn7h6gcUlwMOPt1nkiecTLvMj1KWF40v9dVkhwHRSNMZGPnNzTWni5TP7B5w1Jsz2kuYvxiTz2LS9yMROo6UZHFnovlmMgh1zae2D7mXiFp/N8wprJTI7y8HzMuIu8dDylFMcj+gmzdJl4A6lq7WYOkI5p5GkFU+aHQkaUFhpAkOJBtZe13IldNAIs1zNpD1NFxK8G3SWLrpnVt1WX/s2vhD4Ilnrupa4w4SqV5VTSeUFwItZMcVPjdnI8k5rYfkWxq3WfBvXX6LvWjE9fCEGsV7z+1y0u/J6y6NpJJL+NTgjGuDM94OLzKr+2sJAKWoOuCMdPnpQgTIGJRESUUZOqrEq+L6tarq+C8jgrAR0itEZTo9BpTGBoba+981k+y9F6Hu11w+qqUDJ5IfZDm0uMAJN6pAtIPKDvuvRAyinI14yU9cus+tTsayCJlOUlGRrTSlEa0tFzpc4DCDgiCoqStDNYlITg26Mt7oV4IhHfodtFfLdV5YUp3vBHxYO+bh0mCT5fLaOyarJShO63XJrF4v0it+TKciOIwJG02zWkpb1otygpSn2Mm5OJxyZnsEC0d0ZAgW3qQV+R4YlARRTXosAcQ46hHNFLr2QUMl2YOVmJwStd9qHvHdO3tc3hkxiDM6QYFWjspVDDoZiyTBzgM+uL3Lh9M9dK6pe7V4DI7XvJt4Iiu/CJVasI5oGuKMXrU7Z0MJCjr3FNHs3C4RVn5yqgo5zreIR7LqHedbHHU2eP76EZuxdNptx3MKGzAvt6hXwlQyDuU45mCUcDI2VBs1vaXXIJpLoBIsaukijJWkypfV6ruOpg6ULMA48WrEGaqOJkNKLeJXyDndOddY6xEsnFeNjvnW/iu4axnXLpyyvbkgUBatLPoho8usDsU2p1nkfWkSWFlomMISZFqESy3UHU25XaEmIZ07BrdIGBs4jfC7T58R81m3IHdEo4pgueZMBMuIvFbs9Sfs9SfkVcCyCgnPdY02KvoAuQ04WvZ4//AakVG+U1CC1qobCH9mKvNTmSqKIb5jVa++G5Rs7qxB7DxqMY8Vyw6L3S7RGpbzCJ1WlENFqC2ff+HOimO1KENqq6mtoqwNeR5ijGUzKvmtG6+SH3YITzXFhYroKMAsFfEZK25i09U1uyrlPRtKkBeNldehk2MNljB4M2J09wKhcl7BXTqSAcKZJroNxaZZc9t841oDnUtWdXQ04PjDLTqXZ8RhyW53jtGO/Vn/I/O/dYpAWQLf7lvYgGIRekNnX7auFJN5Qics+UxnTKQrLsQzP1UpjvMuJ50udSIBoaplTfm56zcoreH+YkDtNf3unQ4o646YQluxQlOVaIzpWrKNSVByffeU08GEvA7QOPqRzBXWae4uhrz54WX0OCTwmTec884RolreeHs21AEH6MJSD2DQyaniniiXA/lAzvUgg/Cuo+qm5EHKxi3ZBB29toPtVWwvZPMIEE4lMHGhplyEHC03MHFNwEOd7z4okwxsuNp4iT7f2ty+mddtKBnd2X6Pr+nnuLIx5vneKbrnOOz1eDu8wDTp+i9P+FUmaazJFOFYMbs14C2gl+REQU3UXzCPKsobEY2QqPJ/zVz43t0LbG/NCE3NJOvy4WKLchGhtCQfkiNNuoCO32QLv082U07jnQ+Ez6UL4b4pB9SKOCzpJTkH80j8PCO7ymI25u92Kd2pqpY7rJFMrDMKpSW4wq93rtG4+xHwZIMrBUFQU3niYqMp8oA6r4H/zpXb7CVj8r2AzXDBhXDCS9EhcxvznewqoaqJA19WKDXKa1ooq7BO87ObN6R0srvNKEs5GPVZ3ungFORzRTiTqD2aWXqDCcM04/ZWxPxqLFyNrhMLnn6JvR3LpBmJOafJGrG69e6+0T2SKxCNFFVl0LVdCxWGkO7JJLFwXYoNSb+Gc0d0Ynj3xiV0XLMxWLDdXTBMllzqT7Eo8iqgdvLDiIOKQFmMttRWsz/tM8kC6iigjhBuRK3Xir7uHJ+qOcRGCA9WtXEJZM4tjkqx8so651Gnypo6AhXblYO93s5xN/wkFkrAHCYV+YWAYKxJTmQhrVJptQ6XitpqDNDtLbncG7O/dcmf4JIpqtL1oQRxhTHOO5yLoF8wl0nFhlJysN4hvRHJqzZqgrjGHiQcvbPHfrIOFOq+zEbxQUA49eWqhZQWi4EQSaOZRZfiIBAshMdlZsIfc4HGFBZTaJEByC3FQFFcLAlnoW+pbmxmHuSaqFqRHCt04Xzrt2H/1hVuXqhRgwJbGOHVAYRWSkML+azlwGByReeeolgE4r25XAeYyjp0URPOlOgReV4TWhNNasAQzmvJbnRqku0F440O0SAniUvRhrOavAyYL2LKLECPQ2yvIptFkoHR0LulKMYpN/cu4bo1YUdOfucUdaVxhUbNA5SDcO4FNw+tL5+WEgTbhuMVYXIRbAXJuulOic1FpgJA5yLm25yvDUl1xeVYbSjW53A5jrmZbHJtOGIjXnK1OwJgWYec5F3uTwfklaGqjMgBOOU3TlJWi7zCerCU3X58JrIPy21NdkFqVFEIVQKLKxa3XdAfLCkrw+K7feITQzzW3gPUceHCCUlQcfu7l6T8pOUYb/U3CY0oh8emFikMU4nBeNBhuog5OxiQ3A3pzkUywZmAaKKoQ6hTqFMpxWcv5MTdgr3NCf0opxMUVFZzfz7gZNIlHyWEx4G0+nsV82iqVtp1JvOUDCOl6OhMgnFrZOGxgUaFejVh2FChQ0u0b1A3NshCePfSFmhHfGSYqt313G9EsoaVhIyI5CqrCOeKzoEEK/mmZnKny63DDje72yjj0EZ+PApI0oKyCEhmwousI5iOEr6d7hEFNfM8IstCbG2oFwFRrlYlbp3bVSDklJTFbp8NmRYxV3pjBmGGRbGoIvbHA+7e3UJPAzoHws9Dy7kYzWRDE85kcUezsoZpzlFTWnRqubox5o3nh7jUopaarDAS6BXyWso6VL7OjgRzRV0H6/XE+blYAf0SBQR3Y6qBWfnMrrxOq3X51HiOax3JuqisRueGxgbIKRFRLnuS/Sp+f4t3Njb57qXL6Khm0FuSZSHmLJCyeeFNsyfCoTS5xYaGYK6pvrbJ4YbYQwHYxNGbQXLqSI/rlWi0C4XSY24nTD5IcYEjnCrSJXQczC87wpkiPVzTPOpEAp9mPWicJGygqCPPN/XJBFUpsiIk6mTE/ZyyCISn3MgSWQXVukyo3PpPJhotfDznxEe14S//iHji3YKfv3KXo60eH3xwkWwpq53xNjPaZ0CG4ZLnk2OuhGeEqqJ2mq+MP8vvHz3PvfubmMhSZ+JfZZZrR3GAN+7usZvMeC494VpyyqKOea9/gd87exVVKRqje136yQK42hvxyqePCD5bE+tq9ffu7AKvx1f4zN4hgaqZlgn3RgOyRYQ6iAmn2hPKZUIynltlY5lIal/jFXdyuLQx5bWNQ+7sDLm5t8nMexrayKHmhvBOxEIljDY2Pcm1JIwrumlOVRuqWlMWAXWtsbMQlYvHWlwoookvjyxF7K9RwAZE++hR3Q/nSoarXenqumSAmuCweYwqZSfwwtUjFhdDvrS1T6wr/sn9nyYfes+7xLHRX/JLr36HN8d7fO+b1wjm8ga6hCoJCBdeCVnPebFzTPdfL7i/HPDO3YtkJhG+QmPpEFdsdpbs/2sKazX1LMTdD0FJKcNkjmARYAK1tlzoVgz6C07nIem7a4sk8dMSsmawFG5FHUvaWbpKWJWAzjdAPDxmqnIrToeqNfmW49r1Y0a7CdNFTO1te5Sx6yDXOKKwJj/rE2TK86nE/iacGYpBSnIspaViQ5HtuBVfRnwlZSNSdZASs9+QrERZvZms+DbKpgMrNhAmt4S+2xavF/VnnnuL8rphM1wQqprwnMJe6QyZDbmfbTCtYt7b3WW+kI67JR0A4hODOjSoWsRdTbbWRNKFZFJsJJneaOaFKKe5WIV4hWydmXOZxxpTGLRxbFyYMsqGgBehtWr1espKIwr4HanyulkWQHRxgnFAdjzkXT2UsmcgGZpopAjnckxJM4HHUHUVnZnz+mZWzqlZSTArhMy7iGXx31IMXjij+6mCvArYSZc83ztl6McwtwG/YT/L7F6X4kz0u8KJYyMs+fTwgFvmIsmJ6CnVqSJ7Z5cMCcLrSM75OpUOrmAhXXSduc+IaDETrvrCKC/3Cl69fkA/ytgIMy7GE2o0yzqkcobaKYxyXEhm1Fuy8fxgus3htMfsqIvKNM4YtF+Uw6mU9Hl1xuhKRHwnRFnlOaNynisrAa+qRLx0azjjdBjTu6kJZw5daupUbF5Cb5qs/LGf70yWxoOA+RXhSYlmoaPKZYE1S018pr3tjZ+aQkW22yEIHemhIxlb5pcMZhxQf2eHzAetA19mLjZkPQl8+a7h/jSLZx1DdrPP2ekGp8El6kh+b2YpG5rdsc/yxm7VqW3ypiRVo/NafPaUWjeD+Nd2QYCrFcNowZ/7udexKJZ1iMZxkPW5NxkwmXao57IElxtGDOh3curcUKXRqoGiThROGTr9BVFQsbgZE3qRV+nk9HNRp2b8hRIdWuw0JDkQo+PGTxRCnwiQCb1KFQxznFWkR5reXeDNyAuzdogUK+V7ZZuSsyM+kw1SOTDYyNG9I52pqlYkI0vZkYDG5OsObmUdVWQo+2IxFE1kDQ6nbrU+Nj6UdayYX3FUG5bu5SnduCANS0aLlLP9Pu47oVjYxDKfmVK8D13gWIxTFuMUE/uuce/RSS3ZS2q8aK4/r2tWWT2nJIHRuHIAKx7dj4InLiIK8HzvlA/0hfUN6lx3gYPfvv0y30iviohiFZBnIdU4IjoxdGfyS1O1TLC6EqVY43dfddzhn934KaqBJd2bMewuUUB0YohGaqUWHiyEjHq83+c7TjFIM3phQWhqylp2s70w5/LWhGUVkgawES3p7hRkdcDhRo9FFrEsA7S2aGNRxqK1I9WW2Txhtt9BHMYdaDiadXltA/bSMXvpmMIGHOz1OctSDo42KJwEC/GptDybTLIoy7Qvu+olpMtGMFXGq47xvAgvqthYbnjj5sYGRzU+c+ehH4rKzwdgTXnQC4oq66N53xof6pq97pJ5FTEnWtnd2FCClPEsYVR2uJBOeacnfnogi0ix4RdDp5nNE7433+Xl7hE78YzjRZejSUgwM6sFsSwNWjk+5YPctw8vUowDTO7JjZFiWRsRbvT+Z65yaCXmzZOX+uIXNlWriVqXcixVB/JtR7lbrjIw1XECzgifx9vgmMLiomCVzdOllXOuWmdQrFO8un1EtSmisBZFbKpV2QykVPbuawF5pYg7JU5bRmepmNVWslOSQEICIOcFD5V1Yig7LFCv5rjCUEx6PrjDd5fL41ZWPUahnOdcZZWU4TIpHdbjiN87eBGlHEVlyMuQspTz3hhLJy6JgorNZEliSl7aOibbkM3QwaDPeJpSHSeYhTRWON8sAjwgamgyWZS0L8fIb9x5XoNDZ5Xng7mVs0Bdai71p3ReLhlfSbBWrMWjoCYOK2ZZzPS4gw1CTCZZLFPolQhp0Ye6a2Gq6exLx1mwaHb164C6sT9pgppw6su3Nd7xQKykFGAysWuyISjlSMMSrRxlbXjr7CKV1czziNDUVGXg/SMlMx0u4GDcZyPK0Ns5eZWQHOrVxkVZoITkZG3h4nzQ7LQ0/Jz9VE24mXNpa4LRlnunGzy/Neb5/gmFDVjWId8eX+EsS7l/MMQtA3Smxa+1V5H0Cja6Sy51p/S3cuaDKdM85iDdXHUgm7m4R7yye0JyqeJbXEPVESiFjUTRvDE3lmwubCZLspfHjHt9sYAqZeGaX23mFPnuAbn/XLbfGQh9x23jF2dyOXes98Zb0zp82TUWSR5TSuBjfZcf/jfivJxE8x1rJebEuskGO7eyibIhwt2sRe1fl76cG0rGZHZFUfUd5YUSZSzqNCI+1UQztQoyyf0cWSKUi7JGBQ4bGfRRzOsbV3h+84xBmLETz+iZnKvpGZ/dCJlXMd+b7vip2BHpGovig5NtlruxZMAysIEhXDicg15ccHapIhgFq8y4qxRoh+mVXNs9oxsWHM57HLFJNBZVeBvoleF1kMlv0RrQoaXTyTn5ollZyjXjbaTBcSUD1LgXOKNwtaLuWaLNjLFJUZ6zl29JqTz0PEAbNdId1ncxO9y1nOkgRGfyXnXPQmiJ+zkYS20su0lOJyzZTWYEuiZUlmka8z3lWBxsicBuqvwcLQE0sXczGRtQgXR/egNsKec2Gz+1piQFkOcSvMZnRqgzlTh+cK6b/keB/sEPadGiRYsWLVq0aPHD4onrXH3z1jX6vSU6rsFFBAvZMcoDZAczfXfAaT0gOVYElRxk2ZMugWhiV6WHRj+lEZTThWXgtbCcVhSDAWdbGxRDR/dQkZytTT0lTVzT+15CtT/k1CK2PI0IoHYUV6XtRk0D8KaiKq2kvBNXaO2IkwKtRZiwacEPtMU6RdGV8kk4kTLJZL/Pf529ShjWDDoZvahgI1oSdWriSzXHvS7LeUSWGyi1iBLWov9lA3AdKSVY42vRkdiRSA1dY0OIAiVkzcpboTSlv/OCoE00fk5E9CP3wTpyb7JXgKpqorHjnRt74FOu4TDDzDXxqdTlXaBYBF2+mr3GcGsGRjIY4VzGIR7L9xDOLSf3U76eP88bvT3SuJCuIAXxiSehZjCNu9ycxUTdgjguCYKaZbeG04Bw5o2Gxz5rV3jn9kVEXhm6SUHw8piq1uR5iNKWujLYRSAt4MphehW7wxlJUFHUhlMgP+1KVsRA5MU1m0aExrOx4VuZ0hLMI+4dDjlJu364GhYlKC3ZoG5S0Itzrl04pawNW+mCXpCT7Qac5eKjZq10US3mMXYRYGeaqouUvuKaOC3Z6CwpooDJsCvdOFNFkCuU06uU/0rzrJbapi6t8Ag9Lyu9mzC5c2HVoJEU0MvWZZuyB5Ou4mDPUm9UdDaXREFNJy7Y7szpxTlHcU+yyk4JbwmkS9Y4kqgUgdLDAVUnFKuNrgI6kgUqLGZZrvgkLvDGrrlFnQgXKQoqoqDCaIfRFuMzRpGpqSpNOQiwoXB2hOci2Qw0wpccOKZd4Yo1mVObSPlAJTVohw4sWlus1eS3UqKxJj1WRBNN2DHEoUHVftdtRb/pbH/AqBiic+UzVJ4EX0LRgaAjWnvdfbvqkjq61+Vtq7i4NeEsqlhGXVxkKbabmheYhWjX6VpKnMIvg2qjZufaiF6c0w1lTro4nFLUhn9242WKRYRbGsKRIZgrekvPA829tEAVUUcR836fbz+/jRoUDIdzoqBmeGEq3Xu1ZjFKodDcGQ3pxAVxL6fqhsRnviM7F66RLuSc1wVMipitzpL+8zkKWJYBk2mHKCkxfh60XmrBWu2nFEVVGmxuSG5GkkFusk0RlDslYa8giNcOwFo5Ym3JipDlaYqyIfHISFZlo2T8eXkPHdWYoMYYR1kE2NMIlKGaqFVGMpxV2MjgDAQXliz6IctLYkSNAxtbXFrT3VrSCyoCYykrw6jSFHVAtiWcXWcUbmG9FIfzXELPZa0d6YEmK4e8pYbUHQsbJTpw7G5N2IgzBlG2agJZViG5Cgh1TRqVTEKRLdDaSxQVjuU04Uw7wn5BWSniUy+C6Q2Ry0nErXqLIKrppjlEwhs12ZpDKNw6oY3oiym21ISm5rkXD8mqgNEsxTlFmQW4eYAuNC4QPlU4MoQTjTUR8VSIXklcsvnCgrLW5GWItfLdlrc7lH0FhEQzQzS1q8z27taEeqhFLNnUJEFFoC0aR2ikunKWpYyWKfuTPqUXdjXGUuQhYSFcs/jM68nNRYtLGUvQqShVhBkFqEr5Y2+4uf5kSpvqj5QGde7534FGKYWua1RW/JGyVvCkg6sa4n/ZZbLVwV4sULVvp27IrFpqzJ17UksNFr4ro4IFUn5oHqeawKCpbPmW43AhNV9TQnIm7fL1IT7YaNLHVmrBec3gZi2122odrElpS1Hsx54PsuYdWSP6KmVP+A/FwHfSOcUskJKD2sqlu20kJ3Yytiin6dwMCN7pgYNZuMGoA++/mJF0C17cOeHSRWnXsU5R1AHzKsI6xbyIpFyjHIHXT4p0jVKO0hpO5x0W2SYSGYpnnSksqjYr7pSIR5kHeFbKOUmDAivzyodPqHP3q6rGBYbk1DH4VkQ4k7r38pK418cjWXwkSFZUJzHT3QjV9TpAS+EPJafS2WbymsEHIfZWjK5iihTmz9cEU018Jp2ZpvDlSBehy4g6hdlP5qKb4zvGorklPcyFxO05EOE4YZp0WXZEeDONHHVc+vGFqic/2CwPcQ5Ox9IVE3lldCH7CoEzWMiP1yxLVsbWTs4pU4oCd2ffoWxCfBavzUE1Ky0lgGxDcXbRoq8siCLfMRUZLqRTemGO7Xq5gDLmA7XF3CVgQ6/d41CFIh8l3F9ERJ1CyipziCeWaOwF/qzDBlpKWmUNgRYftchQpQZtFNG4oH9LPsOKo+Y7ewCccuhKeHzpkcJGIVUaUXRh3nccPL9ko7/g+uYZ1ikiUxNpmSDlYztmVUxeBcz7MXmucWeBlDB0Iw6rEQ6IWxmIN9yV/g1NebTBMpR2dhsARn5nui88xKoyREtxJ0hPfRCz4hqGREnJZn9BZ68k8D9epRyJqYhMhfWBb7O4ZXXId90euUkIlg1HT8mYVZqyqzGBiBOjQuLT8/PC+jcTjaWM0viANoKk6b6hyHrMXq3Y7C1YRiXOKTbSbEWoL63IFDivHF9bhVbiy1dUhvtnA+rKYHyXcH0Wk94x9D15WRfeNLiElfCtnxtNLp2l8anGhgnZZsrZlZpgW7Rd6sqgp0JeVh9uMIuhfHUp39LUrc6xcFII3UBrgoVj//YWpleytTGnE5YM04zQ2PVnqjViGCDXVTPeZcBSx7ggRNXSXGQymU9UZOl1ci71pyRGgulAWWJdibr+zoDvLp9Dl0KQHmwueG7zjEhXbIQZqSkJdM2dxZB3Oheo9oeYpZx3suB6WgKwO5wRbdcPnLvKz7N5HTDNY47P+tS5GEebTIm+WWNmPSvXDULW4kLjaS6O7n0rPoKZf2UT4wzMBynTQJoRskuVuGyMZO6uXshIOwWhd34Ilo7OoXTDmv2YxSjEdmTu0KU0ekQzR3KkiU8CorHBRorRa6kvZzviqSOc1sQnmZwT/nduyhQmIWeqR3phxEacsdOZr2gM0zLheNalspqqMmQqBcRr0AZSAl4sYtJIvnfSjFCLcfJ7hUHVsaylpqELKEymGc06dJKcQZKLgLYVD8ppHrHIxE/YHiZEI+E0R826HIGJ13y+zlG1WsdREMSOFy+c4HYVd8cb1LXGGNk8OR/kWyu8ZVtq7DTAKL3ygHWhXnkw/lG6BBs88eAKvMjbUeQta6Sea31Xlc1FVt8FvkvKyq6p2JCARtm1Qnaj+C2WKo4g1uR9zXLXc6vcmqSsHJSIpUqw0OjCYPxAZpt6vQj6OnZzvM54UUy3Pn7Aq4uLmnyjmdII7BXThMqLkzbdTQ0XqUpZ7ZBMDt3vJNgo4c1rHYgsw50Z/9qV9+mZnJ1wRkfnhKrGKEuoaqZ1QuZC7uabjMoOH0y3qZ20CJtizdlQlVsdpzIKx0OdD01gdZ7QvgocmmzV+vbVcwJNNKuZRppi6Anvns9Q9qSDSVlHlQrnIZgpVCXj22Rgqo5eBcYmFxJt0w0SH0p2qEolyK0jGXcRlpXHBPeiNecsgaqWydJaIwEkkBwpTBahqohp1BFiaybPqRJPGo6EvKpriAr5TsqewyasjFXNOaPs1Tj488SGGuvHOpo7yoXy5NOPnkuNQnpyqHHHPaoEDsMBRw7eD6R7yoVC9lRWiQVNUksAOZEAPTkw2FiyG1UnkqymNy5faVxpRZ0YXKjRWb0i0xcbIdmWwRSG6CQjWFpmVw3npR5Wx+nW113AuuvRSjeT+k6HRdjh3Y0t0drZKFAa6kJDLura4VR+UFXPEmWK9EAW6OQoQ+W1aAEFGmdE/doaWZDUsiQ9stSRN2w+0Q8cl1OB6NREwpXRNetWbw0ukDbr5SjhqAx44dIx/SijH+bUTjEqOpxkXe6cDGV33jQehBZnFclcEY0lQxNNa5HfcI5sGKMrtRLuLQZ+3BSrLNPqp+W5fE7LxB3O/A65gOVbQ6Zdi+vV6LBmkUWEYU0UVHQi0ckLjGTAsypgNE9ZnKWEh6FkyGq/CXXy3SjrFwblrV7OcVfxc6Xz9zcb0EZ8uXPboD/o4ppVwHNtGuHj8IPUE37d2uECEHVzQzR3pLdCVB0y6qeM9Pq8MUu1Oo+a7izdiB5zbq/ntQeDWY3JaoKuxuzHjE8ipuWWnHuhW3HHtnam7Hbnnvcl5/t8nvBusUsYStbH+cDUaLFlCrKmw89391m/0Odw7+4WJqm5duGUbijdleM8YbJIyG73MUvlOVbrzX80E92vYFlh8vqB+dLFck47LWtQ2ZVNeLOBaXQWlZPNZnwQIKbw8t3ZuwmLbkS3GTvPcbKBZGjrEtQ4lKpFLONsvQROlYqoqlOQ7BtptAocdQQm8hZZfmPYNMLEJwaODScfXOJYy2Nt4jCXFoRhzeIsRc8CdK5IfJd257giWNTERzF2nDIuO5wF8rp1AvW2cNR0ocTuai5j5XRAdGawsz5z02dmzh1LLVkms1RE5+SQYFUAkDm5lPNc1hi9nme1oi4lIN6MF3z+0l2yOmBcpFRWs/QyJ0VlyJchrtAES0nYmIxz2nAKG4ciyaBZNYX9KFDuj5j6+ld6M6WmwDtP7A1/fLADHD/tg/iEoR2TR6Mdl4+iHZOPoh2TR6Mdl4+iHZNHoxmX55xzuz/owQ/jSUsxvOOc++ITfs9PPJRSX2/H5UG0Y/JotOPyUbRj8lG0Y/JotOPyUbRj8mj8Ucel7RZs0aJFixYtWrR4jGiDqxYtWrRo0aJFi8eIJx1c/c0n/H4/LmjH5aNox+TRaMflo2jH5KNox+TRaMflo2jH5NH4I43LEyW0t2jRokWLFi1a/ElHWxZs0aJFixYtWrR4jGiDqxYtWrRo0aJFi8eIJxZcKaW+rJR6Ryn1PaXUX31S7/u0oZT620qpQ6XUG+du21JKfUUp9Z7/v3nuvr/mx+gdpdSfeTpH/ccLpdQ1pdRXlVJvKaXeVEr9+/72Z31cEqXU15RS3/Lj8jf87c/0uAAopYxS6l8qpf6xv96OiVI3lFLfUUq9rpT6ur/tmR4XpdRQKfVfKKXe9vPLzz3LY6KUes2fH83fRCn1V57lMWmglPrf+nn2DaXU3/Xz7+MbF+eVuv84/wADvA+8CETAt4DPPIn3ftp/wC8BXwDeOHfb/xX4q/7yXwX+L/7yZ/zYxMALfszM0/4Mfwxjsgd8wV/uA+/6z/6sj4sCev5yCPwB8KVnfVz8Z/3fAf8Z8I/99XZM4Aaw89Btz/S4AP8f4H/mL0fA8Fkfk3NjY4B94LlnfUyAK8CHQOqv/z3gf/w4x+VJZa5+Fviec+4D51wB/DrwF57Qez9VOOd+B+8JfQ5/AZkE8P//zXO3/7pzLnfOfQh8Dxm7P1Fwzt13zn3TX54CbyEn+7M+Ls45N/NXQ//neMbHRSl1FfjvA//JuZuf6TH5Pnhmx0UpNUA2s38LwDlXOOdGPMNj8hB+GXjfOXeTdkxARNRTpVQAdIB7PMZxeVLB1RXg9rnrd/xtzyouOufugwQawAV/+zM3Tkqp54GfRrI0z/y4+PLX68Ah8BXnXDsu8H8H/g886O74rI8JSOD9m0qpbyil/hf+tmd5XF4EjoD/ly8h/ydKqS7P9picx18E/q6//EyPiXPuLvAfALeA+8DYOfebPMZxeVLBlXrEba0GxEfxTI2TUqoH/JfAX3HOTb7fQx9x25/IcXHO1c65zwNXgZ9VSv3E93n4n/hxUUr9GnDonPvGD/uUR9z2J2pMzuEXnHNfAP4s8O8ppX7p+zz2WRiXAKFg/MfOuZ8G5khp5+PwLIwJAEqpCPjzwH/+gx76iNv+xI2J51L9BaTEdxnoKqX+3e/3lEfc9n3H5UkFV3eAa+euX0VScM8qDpRSewD+/6G//ZkZJ6VUiARWf8c59/f9zc/8uDTw5YzfBr7Msz0uvwD8eaXUDYRO8KeVUv9fnu0xAcA5d8//PwT+AVKmeJbH5Q5wx2d7Af4LJNh6lsekwZ8FvumcO/DXn/Ux+RXgQ+fckXOuBP4+8PM8xnF5UsHVHwKvKKVe8BH0XwT+0RN6708i/hHwl/zlvwT8w3O3/0WlVKyUegF4BfjaUzi+P1YopRTCi3jLOfcfnrvrWR+XXaXU0F9OkQngbZ7hcXHO/TXn3FXn3PPIvPHfOOf+XZ7hMQFQSnWVUv3mMvCrwBs8w+PinNsHbiulXvM3/TLwXZ7hMTmHf4d1SRDaMbkFfEkp1fHr0S8j3N/HNy5PkJ3/55CusPeBv/6k3vdp/yEn9H2gRKLfvwxsA78FvOf/b517/F/3Y/QO8Gef9vH/MY3JLyIp1W8Dr/u/P9eOCz8F/Es/Lm8A/yd/+zM9Luc+67/OulvwmR4ThF/0Lf/3ZjOntuPC54Gv+9/Q/w/YbMeEDnACbJy77ZkeE/85/wayeX0D+E+RTsDHNi6t/U2LFi1atGjRosVjRKvQ3qJFixYtWrRo8RjRBlctWrRo0aJFixaPEW1w1aJFixYtWrRo8RgRPO0DaPHksaMuuYJCriiR73hAxEN95JbV4/iBj/vIhUdcVY9QDVGPfNoP/5rgvt9rfMxzPu593SMf+/1ep7ldPVr85Pt+rofe90d4b/cD7v9h7vuRP/ND933k83+/5/0Qj3E/6jF938e4H/z8H/o+95H7Pvap6vzFj54pj/qZnTuzP+b116/z8PPPv8fDP6NH3tdcVh9338e9l/uhHvfR93zoeR95X/f9j6F5jUe8h/rYx/+rv+/Dr/noy+6BMVbnHvHxz3v4MetbvvHt/J86575Mix9LtMHVM4iCgj+l/w2UVqAkeXn+MlqBUijtrysFD1z204DSq8c+8nHq3H0PPE6vXuP7Pc4pJblV9dBjz9937na3eg0efJw6d/2B+86/9oOvuXreucetFnnF6vjlvo9/3PqyevCx+qPPO/96j36Nh9/v4WP8uMc94vKj7uOHe42H7/t+x/uR6/wQx/HAa7vv+17y59bP41GPcw+81/nnrD/L+rp6+HmrY19fV+qjl88/r1ms5dQ8/9rrBVg99Dj9wHV37iexvl37QEKfe9zDlzUfvU8/fJkf9j67vvzQ7ebce51/nME9eF05tBfX18phzl9WdvUaRlm0sg+8hjn3+uYRr9E83/jnaeS45DXsA887fxzm/Ov556/eC7t6PeM/8+o1zo2BwZ07Pn+f/26NAuO/aQ0YpdD+uuHcZaXQ/lkahVF6dd3svbdDix9btGXBFi1atGjRokWLx4g2uGrRokWLFi1atHiMaIOrFi1atGjRokWLx4g2uGrRokWLFi1atHiMaIOrFi1atGjRokWLx4g2uGrRokWLFi1atHiMaIOrFi1atGjRokWLx4g2uGrRokWLFi1atHiMaIOrFi1atGjRokWLx4g2uGrRokWLFi1atHiMaIOrFi1atGjRokWLx4g2uGrRokWLFi1atHiMaIOrFi1atGjRokWLx4g2uGrRokWLFi1atHiMaIOrFi1atGjRokWLx4g2uGrRokWLFi1atHiMUM65p30MLZ4wlFL/BNj5mLt3gOMneDg/LmjH5ePRjs3Hox2bR6Mdl49HMzbHzrkvP+2DafGjoQ2uWjwApdTXnXNffNrH8UlDOy4fj3ZsPh7t2Dwa7bh8PNqx+ZOBtizYokWLFi1atGjxGNEGVy1atGjRokWLFo8RbXDV4mH8zad9AJ9QtOPy8WjH5uPRjs2j0Y7Lx6Mdmz8BaDlXLVq0aNGiRYsWjxFt5qpFixYtWrRo0eIxog2uWnwESqn/m1LqbaXUt5VS/0ApNXzax/RJgFLq31ZKvamUskqptpsHUEp9WSn1jlLqe0qpv/q0j+eTAqXU31ZKHSql3njax/JJglLqmlLqq0qpt/xv6d9/2sf0SYFSKlFKfU0p9S0/Nn/jaR9Tix8dbXDV4lH4CvATzrmfAt4F/tpTPp5PCt4A/i3gd572gXwSoJQywP8D+LPAZ4B/Ryn1mad7VJ8Y/L+BVqPoo6iA/71z7tPAl4B/rz1nVsiBP+2c+xzweeDLSqkvPd1DavGjog2uWnwEzrnfdM5V/urvA1ef5vF8UuCce8s5987TPo5PEH4W+J5z7gPnXAH8OvAXnvIxfSLgnPsd4PRpH8cnDc65+865b/rLU+At4MrTPapPBpxg5q+G/q8lRf+Yog2uWvwg/E+B/+ppH0SLTySuALfPXb9Du1C2+CGhlHoe+GngD57yoXxioJQySqnXgUPgK865dmx+TBE87QNo8XSglPqvgUuPuOuvO+f+oX/MX0fS+H/nSR7b08QPMy4tVlCPuK3dabf4gVBK9YD/EvgrzrnJ0z6eTwqcczXwec9z/QdKqZ9wzrW8vR9DtMHVMwrn3K98v/uVUn8J+DXgl90zpNfxg8alxQO4A1w7d/0qcO8pHUuLHxMopUIksPo7zrm//7SP55MI59xIKfXbCG+vDa5+DNGWBVt8BEqpLwP/R+DPO+cWT/t4Wnxi8YfAK0qpF5RSEfAXgX/0lI+pxScYSikF/C3gLefcf/i0j+eTBKXUbtOZrZRKgV8B3n6qB9XiR0YbXLV4FP4joA98RSn1ulLq//m0D+iTAKXU/0ApdQf4OeA3lFL/9Gkf09OEb3r4XwH/FCEm/z3n3JtP96g+GVBK/V3gXwCvKaXuKKX+8tM+pk8IfgH4HwF/2s8tryul/tzTPqhPCPaAryqlvo1sXL7inPvHT/mYWvyIaBXaW7Ro0aJFixYtHiPazFWLFi1atGjRosVjRBtctWjRokWLFi1aPEa0wVWLFi1atGjRosVjRBtctWjRokWLFi1aPEa0wVWLFi1atGjRosVjRBtctWjRokWLFi1aPEa0wVWLFi1atGjRosVjRBtctWjRokWLFi1aPEb8/wGIRPYOQPLlwgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "mel_after = tf.reshape(mel_after, [-1, 80]).numpy()\n", + "fig = plt.figure(figsize=(10, 8))\n", + "ax1 = fig.add_subplot(311)\n", + "ax1.set_title(f'Predicted Mel-after-Spectrogram')\n", + "im = ax1.imshow(np.rot90(mel_after), aspect='auto', interpolation='none')\n", + "fig.colorbar(mappable=im, shrink=0.65, orientation='horizontal', ax=ax1)\n", + "plt.show()\n", + "plt.close()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.7" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/TensorFlowTTS/notebooks/fastspeech_inference.ipynb b/TensorFlowTTS/notebooks/fastspeech_inference.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..c705f43502e0ef0ac2a05ec207c2c54d537d71ae --- /dev/null +++ b/TensorFlowTTS/notebooks/fastspeech_inference.ipynb @@ -0,0 +1,349 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import yaml\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "\n", + "import tensorflow as tf\n", + "\n", + "from tensorflow_tts.inference import AutoConfig\n", + "from tensorflow_tts.inference import TFAutoModel\n", + "from tensorflow_tts.inference import AutoProcessor" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "0d74a729d9aa44f18d39abd22706f60e", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=3568.0, style=ProgressStyle(description…" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "processor = AutoProcessor.from_pretrained(\"tensorspeech/tts-fastspeech-ljspeech-en\")" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "input_text = \"i love you so much.\"\n", + "input_ids = processor.text_to_sequence(input_text)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "06bb2cbad64c4f3a901439a6494da363", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=120784120.0, style=ProgressStyle(descri…" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "ca8e1c3d613f42f880f1aa4f097f67e1", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=3710.0, style=ProgressStyle(description…" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "fastspeech = TFAutoModel.from_pretrained(\"tensorspeech/tts-fastspeech-ljspeech-en\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Save to Pb" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "WARNING:tensorflow:From /home/lap13548/anaconda3/envs/tensorflow-tts/lib/python3.7/site-packages/tensorflow/python/training/tracking/tracking.py:111: Model.state_updates (from tensorflow.python.keras.engine.training) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "This property should not be used in TensorFlow 2.0, as updates are applied automatically.\n", + "WARNING:tensorflow:From /home/lap13548/anaconda3/envs/tensorflow-tts/lib/python3.7/site-packages/tensorflow/python/training/tracking/tracking.py:111: Layer.updates (from tensorflow.python.keras.engine.base_layer) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "This property should not be used in TensorFlow 2.0, as updates are applied automatically.\n", + "INFO:tensorflow:Assets written to: ./test_saved/assets\n" + ] + } + ], + "source": [ + "# save model into pb and do inference. Note that signatures should be a tf.function with input_signatures.\n", + "tf.saved_model.save(fastspeech, \"./test_saved\", signatures=fastspeech.inference)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Load and Inference" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "fastspeech = tf.saved_model.load(\"./test_saved\")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "input_text = \"There’s a way to measure the acute emotional intelligence that has never gone out of style.\"\n", + "input_ids = processor.text_to_sequence(input_text)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "mel_before, mel_after, duration_outputs = fastspeech.inference(\n", + " input_ids=tf.expand_dims(tf.convert_to_tensor(input_ids, dtype=tf.int32), 0),\n", + " speaker_ids=tf.convert_to_tensor([0], dtype=tf.int32),\n", + " speed_ratios=tf.convert_to_tensor([1.0], dtype=tf.float32),\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlAAAACuCAYAAAD55TMFAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9ebBt3XYXhv3GbNbazTn33u++73tPr5HeQwjRiLIDKRAKBuQYjOkC5bJpEjtgEwunYsBx6ExRSRxbQS7HNC6SgLAJjQFDyjR2DGUDCoTG2FgGQoFCI/Gk137dbc45e++11mxG/hhjNmuf8zVP7/EJ5D2rzj337GatuWYz5mh+4zeImXFpl3Zpl3Zpl3Zpl3Zp77+ZH+gOXNqlXdqlXdqlXdql/aPWLgrUpV3apV3apV3apV3al9guCtSlXdqlXdqlXdqlXdqX2C4K1KVd2qVd2qVd2qVd2pfYLgrUpV3apV3apV3apV3al9guCtSlXdqlXdqlXdqlXdqX2C4K1KVd2j/EjYh+NxH9O/r/n0REf/sDui8T0df9A7z+p/Qe7vv5/R9ORH+ViG6J6Fd8pft3aZd2aZf2Xu2iQF3apX2ZjYg+TUQnIrojoteJ6P9BRFdf6fsw859n5h/+PvrzS4joL3yl799d/8+q8vOPn73+x/T1b/4Hde+u/RoAf5aZr5n5P9A5+KlfyRsQ0S8lov+fKmmvE9F/QUTXX8l7nN3vy1IqL+3SLu2DbRcF6tIu7SvTfg4zXwH4sQB+HIDfcP6BH2QH498B8L8sfxDRhwD8BABvfkD3/ySAv/mVuBBJM2ev/RQA/2cAv4iZrwH8SAB/+Ctxvy+nfSXX0EPPfWmXdmnvv102z6Vd2lewMfPnAPxJAD8aqKGw/w0R/V0Af1df+9lE9NeI6AUR/SUi+sfK94noxxDRf69ejz8EYNO9981E9Nnu768moj9CRG8S0dtE9NuI6EcC+O0Avkk9Yi/0syMR/V+I6PvUm/LbiWjbXetXE9EXiOjzRPQvv49H/f0AfgERWf37FwH4owCW7pqGiH4dEX239u8PE9HT9zOORPRDieg79HtvEdHvJ6In+t53APgnAfw2fcY/COBrAPzn+vev0c/9BB3fF0T013vPmHrRvpWI/iKAI4CvPevCjwPwXzPzXwUAZn7GzL+HmW/1+79bx/BP6Vz9OSL6ZHf9H6HvPSOiv01EP797b0tE/z4RfS8RvSSiv6Bz8f/Rj7zQ5/gm9Sb+RSL6zUT0DMD/kYgeE9Hv1Xn/XiL6DUURIiKr136LiP4+Ef1rvVfroecmon+JiL5Ln+N7iOiXdX39ZiL6LBH9GiJ6Q9fIzyOin0lEf0ef79e/nzm9tEv7QdeY+fJz+bn8fBk/AD4N4Kfq/78a4hn5t/VvBvCnADwFsIV4qN4A8I0ALIBfrN8fAQwAvhfA/xaAB/DPAQgA/h291jcD+Kz+3wL46wB+M4A9RNH6J/S9XwLgL5z18bcA+M+0H9cA/nMAv1Hf+2cAvA5R+vYA/oD2++ve4Xn/LID/FYD/CsDP0Nf+WwDfBOCzAL5ZX/vXAfxlAJ/Q5/sdAP6gvvcpvYd7h3t8HYCfpt97DaJc/JbzPjw0B/r3xwG8DeBnQgzFn6Z/v9Z9//sAfAMAB8Cf3f8nATgB+LcA/EQA49n7vxvALYCfrH38rWXMdQw/A+Bf0mv/WABvAfgGff//qvf/uM7j/0SvcW9MdC4jgF+u19oC+L0A/rjO46cg3sBfqp//VwH8LR3zVwD86f6aDz03gJ8F4IcCIAA/BaJY/dhuzUUA/3v97L8C8TL+Ab3/NwCYAHztD/Q+vPxcfj7onx/wDlx+Lj//qP/o4X0H4AVEAfq/Adjqewzgf9p99v8OVa661/62Hlw/GcDnAVD33l/CwwrUN+lBdk8BwZkCpQfjAcAP7V77JgB/X///uwB8W/fe1+P9KVD/AoA/COCHA/g7+l6vQH0XgH+q+95HIQqhe0hZeI8x/nkA/up5H87moFegfi2A33d2jf8SwC/uvv9/eo97/gyIovlC5/c3AbD63u8G8J90n70CkCAK9C8A8OfPrvU7APwfIMrcCcA//sD97o2JzuX3dX9bADOAH9W99ssgeDAA+A4Av6x776fivgL1Xs/9xwD8ym7NnbrnvtbrfWP3+e8E8PN+oPfh5efy80H//GDCZFzapf1Atp/HzH/6Hd77TPf/TwL4xUT0y7vXBgAfgxxMn2PmvsL3977DNb8awPcyc3wffXsNwA7AdxJReY0ghzH03t/5Pu553v4IgH8f4tn5fQ+8/0kAf5SIcvdaAvCR/kNE9JMgYU9AnukbiOjDAP4DiCfoGqJ4PH+f/Sr3/ueJ6Od0r3kA/+/u7zovRHTXvf6jmPn7mPlPAviTGh77JwH8PyHK7u84/z4z32mI7WN6728s4VNtDjJGr0K8hd/9JTxLv35eRfNUlva9EG8W9P795/v/P/gaEf0MiHL39ZBx3gH4G91H3mbmpP8/6e/Xu/dPEAXy0i7tf1DtokBd2qX9g2+9QvQZAN/KzN96/iES4PLHiYg6Jepr8PBh+xkAX0NE7gElis/+fgtyyH0DC0brvH0BopCV9jXv/CjdTZiPRPQnAfyvISGgh/r4LzPzXzx/g4g+1V3nz+P+AfwbIc/xjzHz20T08wD8tnfrzgP3/n3M/K+8n++wJAA8/CHmDODPKPbqR3dv1TEjybp8CvEgfgbAn2Pmn3Z+LVXGJsh4/fX3eIaHXn8L4sX7JCRUB8h8lXn9AiR8d6+PD12PiEYA/ykkIeCPM3Mgoj8GUbAv7dIu7V3aBUR+aZf2wbbfCeBfJaJvJGl7IvpZJOnx/zUEb/IriMgR0T8L4Me/w3X+W8hh+W16jQ0R/UR973UAnyCiAagKwO8E8JvVswMi+jgR/XT9/B8G8EuI6EcR0Q7ijXi/7dcD+CnM/OkH3vvtAL61gKuJ6DUi+rnv87rX0LAoEX0cwK9+j8+/jjUQ/D8G8HOI6KcrsHqjgOhPvMP3V42Ifi4R/UIiekXn6cdDwqx/ufvYzySif0LH+d8G8N8w82cA/L8AfD0R/YtE5PXnxxHRj9S5+F0AfhMRfUz79k2qyLwJIOM+oL029QT9Yci4XuvY/hv6vND3fqXO7xNIKPPd2gDBX70JIKo36p9+P2N0aZf2P/R2UaAu7dI+wMbM/x0EiPvbICGpvwfBuYCZFwD/rP79HIKl+SPvcJ0E4OdAwNbfB8Ee/QJ9+zsgQPYvEtFb+tqv1Xv9ZSK6gYCLf7he609CQObfoZ/5ji/heT7PzO/EOfVbIcD1/4qIbiHKxze+z0v/WxDw9UsA/wXeYRy69hsB/AaSjLtfpYrMz4UoeG9CvEK/Gu9f5j2HzNPfBXADUVD+PWb+/d1n/gBE2XwG4H8M4H8BACyZev80gF8I8Uh9EcC/C1FUAOBXQUJkf0W/++8CMMx8BPCtAP6iPsdPeIe+/XIIpu17APwF7cfv0vd+JwTc//8F8FcB/AmIUp7uX6b29VdAFK/nAP7nkDm7tEu7tPdotIZbXNqlXdqlXdp7NSL63RBA/z2+r3+YmnqUfjszf/I9P3xpl3ZpX1K7eKAu7dIu7dJ+kDQSjqmfqSHgj0M8ZH/0B7pfl3ZpPxjbRYG6tEu7tEv7wdMIEv58DgnhfReEw+nSLu3SvsLtywrhEdE/A8E5WAD/ITN/21eqY5d2aZd2aZd2aZd2af+wtu+3AkVSwuHvQBh+PwsBRP4iZv5b7/rFS7u0S7u0S7u0S7u0f8TblxPC+/EA/h4zf49mD/0nkKyXS7u0S7u0S7u0S7u0H9TtyyHS/DjWjLafxXukKA9+z+PuFQBAGgjsAO5VOAYoA5QA4vbavUZAtkDeyJvDJmLvFowUAACWMhiEyAbHJJnDx+CRk5Hr1WsTkOWegN5X/6buvkyotHJMAIz0u/S9vk9Yf7G+Kdc2ATARoCyfyZ5ArwQ89hMAwFNEhsGcHU7JAwCW6MCJtN+lE3IbKonJ3J4he33JATC86g+dUeMxA8YwvE0YTKpjRyTvAUBigyVbLMmCoz5wpjpPKGPHZ7+zdDkP8gPc7w+Yuvloz1bnp/S3fCfLCybKWFIGdJiQNwBshjHyWWMYRr9XniUzIWezviYAzlSvjdSerYyvDg3S0PpEUV4vc7nqd9fooZcNkLxciF0bitWXqGc7RJ1jVu5wGjIGFzFa4dD0lGD0G6ydZMj8JTaYk9xoiRZI7XmJ27XrXObu9b7zzPeWd72R9pOJkMszWdkjda1A1z/zg2OVNtKnzdMJV3aGAWPWAboLo/a9rMFuuPr9m3Bv/9YxPO9y9xqltk75OmF0EQRGyDLgITggUr1++Y7JAHQN0ANrIHtCepxxNczY2xkAMFBcrYsEg4UdpuQx6+DFZMBMsjbr86q8Kl/UfrAFYLsbJ4KJ3Zjkh/tWnj9bIG2BcbNg75bax7KektrZxzTgGAek1AnDKpf04gYgw3XPrT6ja6D0p8jS8jHKuN/v/PBaebeWBkIe2p4y3boAurHor1vWwrvei++fS++2zt7h7AL07LgnkB/YG9xdkwA2hDSi7jGwjJldOlmE9jkAyNuM7RBw5WZsSebXUYYBw1Rpcb/7AJBAmNniJknd8UMckNnAW5lMg4w5OaRkmgwt/eYmxFZypj4rVvNQP3M+ftz+eFe9oG9fJrlAvxzubj/3FjO/9tDnvhwF6iGm2nvdJqJvAfAtADCOj/FjfvKvAACcPuQQ9kB2VAfVTgx/YriJYYIeBkY+k53cLjsRStNTwu2PEIXpG77+s/hxr3wvfsj4BgDgiT0CAN6Mj/Cdd58CAPyV178Gz17skWcLRD08ogHNBHeQv/0dwR8Ae2JYrSlvEt/baGkghB1B15Qs6AFIAzeF0KhQUKHhJsLmTWD3ZoZd5IHvPmrx2j/3Gfz0j0jU82P+OW7yFt89fRh/8+VHAQDf89aHcLodgdmCgioQC8EsBHeUv+0JcBOADCxP5H7hESONjDxmwOsDWAbZXCULM7B/POFTT5/hU/tnMnb+iI0JCHpSv4xbfN/hFXz6xVO8eLGX7x0c7NHAHrs+LIBZADfJvdxJft9+tcH0mjxv3mXAZznAAVAwoIVg5m4NLFQPwiIk8iC7ysx6wD4DxhcZ7sS4+ZT08/aHRfgnE/Zbmbj9uGB0EYYYIclnDsuAm8MGKZm6eHMm5KODvZXPuDsDO8l4+htV0A/tWUqfxueMzXOGm6XjZb3KALXf9aDoGjvC6UNyv+WaYCLDRDnIAFnfeWjP3ysh06tycf91t/gRH34dP/RKaJ4+OrzElRVFPOsNAzvcpg3eDnt8953s/+95/hR3L3bgSW5Gs4EJgJ3aPnBHmT8T24FmIp8ZNnKY1EOuvEyEuNX77wlpJJjA8DqG/sigxPcOGsrA86+XB/7Gn//X8RMefTc8RXz3LFVf/vJbPwR//82nCLdyKtBiRJlIVNeFvyX4W8AfuB3EGav9S5lFETVAGlsnhjvG3cdl85pvfoYf/uobcJTxucNjAMBn3ngKfnOEu21r3t8B/k5kFiBrwCSu4wAAxw8bzD/9Bj/jU38LP+7qewAAX+VeYkDCopV0bvMGn15ew3cdPobvufsQAODNwxWOs8c8i4WQTg5YDMzJwE665yYCDDB/KAGPQ3vIG4/hbbn28FL6aGfA6holFQFFpk5PCTffEPAjf9jn8I1PPw0A+MTwDBsTYJFxyDLmf+P4Cfzl1z+Ft18IaXtaLHgxIk+dXnuTYF1G1j2eTw50srK+TqXfYgClAYhXakxawB0Jm7d1Pm7lHKDUxpQS3vFgpKKxMXD4iMPh41SNjc3bwPg8w83t+R9s3F2nm8N7a7VTcmRPcP0sWzSDpyiM3NZE6RPbzmDUz/QG8eqZtGVPWPYGt59sMpUSsH3dYP+FXPcYIGv7xdfJeo4/+g7/o098Dj/l6d/BN4xCWP+aPWBDCZ5aGMoCsETd34SJM743bvGn774BAPCX3v5avJw3+OrrFwCAazfjb7/4MF5/cY24yMPlaIDFiGyZ5Wp2lvVqdJnaRY3g2PaqOBi4KbpZzt9+D5exr2P1QHsnA+09Wy8nynIi4M//iV/7jqWtvhwF6rNYlwn4BIQ0bt0n5m8H8O0AcP3oE5yL5U0inJ0qTADg7zLcKcHEtsKTN8ijQRrLtMoiJEa1ugYTsbMzHukB8sjI72AdnvoDAOBqnHE3jlgAsJVrsWUAtlpUxdPAhpBV6TCRVMNvm9iWjVg3ESFHOeBXHjXqPAZJhbZHPVHTQNj7GY9V4dsZsVCv7YQrL/8ffcTsPXImsC7tDACsHhEAFAmpWG9FV5rkYMlDe5biEajds4x8TXCUsXft3mOnQAHA42GLq3HBYRRBukSDYn+0i+kzdV4ykFr1RQ4tBMwOVg88MwN2JvXKtXEq3hDuFAo2bX4oqtB1zcrCkDGOEaOX3Th23pliSQdnYG1GXCxS0IvPBvbOwt815aF4uErLKhSzbx6+NJZ766HbeQB67wflJlxLY9uUI2IRLr3wy56RPNV7iWdHha5eyruEnQu4Uq/GlZ3wxB5gwUhoCtSoD3KzEW3/2XaHEBwW63QMLVI0yN40b5glsCPYuSmGFKl6Z+W5SIS+KjGr565rUOaqH4/kCeSaDCjfo8x1PY8mYmMCPEXsjCrEfsYwJMRBFgEzxOI11QGEuCt9aYK6HL7VQRKbd6YeZkb2UllLuyHg2s0wlHE9qNdoDDiNHnmxSL2nLlF9PmPLuLRnixvC6GWe9rq/NxQwINc+eSRsKGBrl+oJHlzEHC2MUePDcvPYlOVkgOwY7BnetxMlDLbKyzQSTCwnOtV+A72BAtCQsbEBGx04TxGeIiy4vrYzCzYuwjq5V05UFQEa1JBwWYySWQbXHC3cgWCPsp5kDtozNIVi/WxiOANkqFM2HlaAqHrBmjK/WqvdoSxjCYBIDNyVt4dBnbVT//ugp0jfsgBALThg23VbX6jKQ+6e7zyKIPKZun2x3idsSbxrHuAi0w2JIjq2dQ2WfVb2s7WMjQ3Ym7meMUV5Om+J2zopCpWnVOXIxgbMzmGvHoa9m7F1Ad4nifAA4KyOhPPrU3uNSc8iSyg47KzP03sKczqbSyaZpweU4CZ/etfy/c/V9m7KeHnv3b6PL0+B+isAfhgR/RBIHaZfCGGxfefWWT3Zy3PahTHcqNB4vsDezqAQASc7y2490taDdmoxb40qQARyMoo7F7AzC3akSoAqUAtbPHainFwPM14MAcxAMnKtZMRNnnURx1TcRoDR1dUUJj1gJ5aDfGaw6RZ48WJ3CwTUvJpsAFbvWWl5kL4XwVoUv51ZsLWyYEcfYW0GO+ocGwYZ7eDKqd2jHrqQA8zOTQCtQo0A0oYRo4GhDK87eWcW7My8VqD8CTu/YBhkh8ZgkRlIjOo2Obf0waL0pK16wSCHsJ0Jtliis1oh5yHb3tULEVJs1oJEbstIGsb124D9uGDrZdy2LmBQiRJ1vpdsYa1eOBYlzkg/iscxQpS3cliV+zkZrxLmSTMhntA6Vc6oc+OHsRLArEp18X6kQcZBhGm3a5lhdH5LuDiNrEo/sB0Ctt2BtzczHpkJnlpZvMAOGwrITHjudwCAR+OEu3GoIZZogJwy2BlEa7WPRgS5bV4/E9beWBFq4i0sYdP6vF0zqQjL4kHmlXVXGuUm8Ld2wYaCHuIqG2yCtwlGPR0pMZD0dp0ikLYiZKvyXRScck/D9b5paHOXhmaQXA0LnvgjPKUaSt+OC07jBikUyS997pV/S03JLIZKHoDBJXhKsN3gLDBN0YVFgqmeQwBIWWRTGyBZXLw6hFmUIJ/hOgUqDlm8tgDySEixKbxAO7TrGDnA+IyNjTDv6J4BDDGsyW05V0WOqyw2xIjRAosaFpMoT/54ZpQ4NSSKAuVZDBTX1ny2BCIGFTuNVTk1a8WnX3KU1dBxnRfBEdjyanyzxb1GfVi73fL+55jq/iEDUex0ULJFpxzp9ahb83UftP6We3NbWq0f3ZmSra7xkcFD0YIJaSM/sfO6yhiog8ElkRUUVmswnO9V/V0+s4CRgdVZYIjhKFfjdDRR9qXJoKpVaucNBLYBmYPcQT7g5XNk2phQOpMxaiSZPvyaxTq5F6Z/wHsEPDx/55+51/g93u/a91uBYuZIRP8agP8Soqz+Lmb+m9/f613apV3apV3apV3apf2j0r4cDxSY+U9Aai29v0ZiVQBN0xZ8hGiz7tkB9PIWHIL4wwHY/RZ0tQNFxT4kJ1YKA6Ta7WgiNtS7n0VdHShVC3ZjA0YfKx4GEKU1JQJXQK9YdBSb2koklgRTsd5ayNGoy5ISkJK464sVDdOsLHleXrlnSzOUV1afVW9QCTuRWp695wiGxXpTN26O0seM9hoAWCbYCSuvQXEVA0C4JizJIndmT3Xda58kDBSxsRFOwwnGiCeELTdXslP3dfXalFAV135TJgFfn+NT6Mxjymu3rYxfWzPl/9x5GqzLsCbXPjpq/y9WjyEBllNvGem89J5DKrkGHcCVjT7TmYdx9b021K317n/1HgimT7vmxd2eLaMgs4r7e5WkACiGoq2L3nO4oVC9NtWCpIQEccGXzznKsIYb2N4mEBlkahZ6ygBl00IP3bNUvFP3WBmd5X4+ALz+7D1wfPeZ8rzS34iBUg05bG2AdwlGwavZEpjNKtTLjmqYdeVI6b2jnffkfD0VE7zswdHEKku8zYIftBaslr3cq/PwOpaqc2ehGkOMDMKiHZ1YvFpTLr8HzNljzh6LukaKB6rME+l+E1mg97NyT/IZzjUPFNlc92X2LOvNAqbkT6S1lwwk62C0ceWhAAREXMLyZX1Qb56fLfrMBE5UZWjdx7wekyIby1iK7D1LQEhloor7RubywX2G8j2qc1m/Vp9bx5LV49N5tR9MmsB66b6TN+OhL/Qe7H7Rl+dLg8xJj/epn67hKq54tfNnq0kD1GRtfc4y1uXZuocq85tw/2HKCiqeKQ9GAGGBRdCOlzVQzieLDEcqAcoZaVjlJVePIjJAjmtSBJd9y+vnPY8+AFjPUwlDd7Kmx7+t3jt/xG5++V0WEYHA7xNE9WUpUN+f1mdrUZIQnjnJiUpLAM8LeFnaF3KCSRmUkn5vizwamGBXLm5LGVZPSo8MS4wNhYqh2NhQwwClMYvylDW7LA8MEwkcZYIBOUyJ26HvTgx/F2FChh/ke6M3iFuDuDEtNOMlTBOu9TF8y+Dp8T5LdgiqdfWLumSQWWJYy0iG26GvC68Kckuib3aLxgSCvwOGG66YrQLQK2HE42sGp6PDlFzN/gnsEDjVzJsEuic4ibQvroWUauy6U9bYiDJqTnKt4aURUOuhhRIo33enUxbAa8WqGAhof1PCSayZNbySau9HvskH70vJlXAs51QXi6+ZeWV8a2bP+lr3XL89EJRIhL/hFi5QoKRJMj/l/ny2w5kIxq1j/5a4YhMKvmHThYomtghksTEBow7mYCMGF7FoiJyZkCkLvk7DMOwFN5f9Gi8kj1Kkubro23kuy4/bmJS+rvAQ3An4c4HXDVVRnq4V13jtJlyPM46zxNk4G0hyalP+6yHsuYZPTLfXyjhWQV/uV37rZxMbBLYwzFU5zJoRtxLuZd2cLTzK7TXKQEgWU/a4zYJD640TQEDkxzwgcAvjZZZ9VnHMncK+MlKMfq5/lF6xLwfrmQFyD24AIGSLo8aoj3lEYAdLGQsX2WBlHHqcIwBkQlYAMRLBHCxcwRQeCG4SGEQPyqYMMaY0MSZTkwdlLsR27NZ82YeMtq/O8Ull/M8UEkodiD6p8msBmE5+PHRmdvuXuLvf2ev1wGW6L4d0nfTA/biVrNMi+0zJ0g7cwQlkrbasS1nXFAGEtu7LmVIxolnlS7Efs0FmszbUFSuZVmco1/fkb3nOAak6JwabMKVm9HuTMNgIZxNswVVmFiWqGP9lEM5bbnNTn2NlsKHpz2eX4V4Z0tDuKiO6/413UJDfgQOTHrjfO7UPVoHiDpRa8DuOkHZiidHTK5jBwywBiEXiMWAbUpRCkkUWWDJAAJySx8KuHvoBpmlAXctMKgDapR9UNPtBywBFrpvPhAyKGRQSrKJJ7QlwdwT2BmnUTARPiFuDkypncU8CmJ9bhqFdgCXZGmMuKecAVoudGeDc+n3eZ+IijFDjyZJZxfDHDFs8ZpFBmasC5a4INBscw1AVqIkdDOfap2MacUoDpuSq9y4lAw4GCAZmkWvZmVQAaJ8iw6b2OgCMz4Hd6xnDrSrDmsGVXfMAZU9VUNZMTALsYhB25QQQxZsyg1ITJIa4Wkb9+BUPW2bBkZUsEQCwkxFcVgEdF0xWboJM7gWYYOrYm6V9FlDhVUDV95Sos/90+4CdeuUC1+xMY8QSX2WakMxtOXBisjDg6lnyFLGhhM1Zekp5vxzaxVqsMjkTUjIyJgWzkUXar6086Lw05UgwC50ALBbkA+tzRZEg2sE9EHlR1sras8gV0+UpqfdM8XQmgwwBttmKzFyVqOJtqUK4eKlYlZNePPD68I65KVCF/iFECwQDCp13pRhExbMR5ZCmhLqe7Uw4LR7Pwh5v+uv6fIlN9UTdpQ3eWK7xfNnhdhFP+7R4hMUharKDZLvp+iuGVAEZ52bksHqAVjLCiLJAHb5IrtPGKAWLZ/MObwytjyNFeBPr/rmJGxyWAcusB+XkhPYjEVjHwJwMhhcGmruj2VfitS8KXKrJGJLkUsbOHQjuKNdxEyvlBepBV42a1fritbxmWb8rj04qnpziKWxe4OohKV8/V4ZXqfhclZV7h35nWJXx7TNWZTCahz6NhOUaFcNJGTVT0R3a84ph1fqTraypkpEN1qzs0M4VeXauERKgycNipE9sEdhg6ROBWJSn4nXdqE8qgdr3oq/UHqXFbBBTS8bipGswne2VFZapjVvB7da5KHOggmoVjcjdPJTsPcVW3ZPFK2VpvecfzLbsX/+HUYGizHAnfYpXDOKOkEaD+XEJz42wC8vPymvSHjY7QtwZ2XhqxU/J4ZgHHDXddoCELBY05eQYBxwXj3lxyPq9FK1YTh09AAU5HEvGiDuxUBtoFl4eDObtUEORgKZaRvUgcFG0GA4ZXlOe2aowmRm2pL4vBlGFKQBkGGRd0MUaYAC5KCwltMiyMFuKKOm1m4uYHbA8IqTR3vOSlJDDcg2wAryjjlPIDkf9DQB3acQhDbhbRsxVcFrQbEGzAEQByajrAbXl/3ZCFZJsJMxpYum3zLVZcpfRaMRSGwlBPXySbdesNbuIQltS0gGxwq3JjZ+k2z252wU5y1iaSa7tjiQK8FGvPTUhWUK1okCpolPmPKKl2KIJaXOOzOxaUTBMbEplHlRZjAyj66J472r2EangZQN71DBQEK9hWd/iOTSrEMzE4vkI3PbBki3m6LAsyjcUdA9EAqlVSyUzsleOdH3X7DZ9f5V2XA67DlBbhGLZP27OoAgNR2lHVYEpCuucHQ55hKdY9/QpD1iyXQGBjWHwkGpYIEOF90Jrbws1L2faAOD2fPVZOgEcksUpeWQWLjlADgnhV6KmWE+kiRrQZ+PqHS3NnRh3pwFfPF1jq1987vbCzaV77CZu8HzZ4YuHR3h+J2D/6W4ETxake5wAURY9Iw2dEXoe7mCstABJLGHxEvn2GneSPw+MHCyenXb4on8k450GbO1SFXAAeL7scJgG5IN82ZwatUoJGZpAKwqSMresh3wZ76TeleKxN0E8VW5qY3lOA9AU3Qf22GotFZA/6u9CfwNI+CxuNJuty8S8d61yvSKbssy90O1oPyeGWbrD2LGA3x8wznPn9sseSDtG3HZyyuue7zIKJd2f699sjJ5T+hlGNVyrkR8ZiU1VMHImZDaYsscL9YICwCGPVYkvTbIuNcOOFhjKeJF2eEtDKS/mLYJ6tADxWk7JI0SLFNV5EFSeRGpA+ox76zINUChKeX4WT2k3DcKfSE1ZUq+lWQBbM2HkmYuMaZQr3QSczUcvq7g7K1beTQAPeTj79sEqUInhNFyXBofpVWB51HA0MAAiwSym44uQwes1fjZA3DFoo3gIJcy8yRu9TIYl4S95GUUg3YURc3BYJt8IIRMBgWBPxRuhFsAJ1QpwJ1mYy14+szwizE9k8VeitkCVO6hYUHaWPifpkm4MFbDF0xCxPhDQrIRK4JckzRyLEe4bqCUWurTgWfqJDMS93D88SRIn7w4qqhgKvSEDu6tZ+JJ0dQW2QEYlMDzmAbdhxCm4lvpf+JtCp5iMwDJilc3mTjJfcafXvgKOXwU45SCyJ8DfqhAqnivlVIk7IFypwNP08nLt8UWXDVksfSuYp5J5N5gk2BMmOGpKVfE69haNCahCULhnZJgKx5M9ZaSNAWXTyABTUbR00yad2zm3a6ssPA8V5cHUfqcBoD1hXgy8KhvuqF5WpfMgVvycoaroTacBt3HE8yjcXC/sDhsKmChWT+yUPZ6lKzyLV3h7Ee6et6c97k4jlkmkFs9iQFAwnULRlP6iMRZBXfF/ytliUnfQKelh2qjSsTGIG1Wki+K1tPVfU+BZxtOqUneII14k2bfl900Q70fQNZiTpPhbm6tBBAZyMGDHDZek+Ili6acNwIZ1z8pn/F17PgA4Lh63YYPFRtwsmzrewsGEmkXqTsJr5YvX5JTlsOsOXbclpIPHG8frisnb2CAHWmokobfLiGe3e0w3akzeOfhj62PaMsKjDNpFWK9yIhrwycLYRhpbMp9qeH9ggBt2DpAsrrzh5hVMBETCzXGDN5wclMdhEFqDLjPv7WmP02FsnGkHkdN5YCw6vvFxRrzu1k0g2KOBvyUMN2XcuOLSaoiY25qSPoniQNz1M7Maqe+gRAGy56wTHFwqz0tqeMvfcSfroVea+sy+Rl+wvkfJIva3XdhrZpUFavBmAnyLBADNCWBCo5ShrFnVXeaaeMbW+8IEhp2KYSXreu3JaTQ7xTlBZYxUkUxZDIHbvMUXdXwPecRd2lQcXn0eyhUWsDMLNrTgNm/xhUkU62fHLQhA0ME75gGn6CUq0ZO+cufFrgPYZa57UTbjFkh7nagxC9avXEeNOpqpUt/YU3v+piOIh7M6JiIrfcpagVr9nYEHw8Bnnqt+Hh9qH2wIL2XNfZdFPb+SYV6bsN+JJuBtAjMhJFsFZYgWnEi0WkBZlAE4xv5KvudMQmCLW2W2LPHe27TFnZ6+hhpwtnpyqrLWhaHUwihEmtkByyOD+Yn8Pb+awa8EuE2oOy1Eg3lyoJOBO6oydmqWKqCbYVlbfkALFwCo3qfAFnPunj+LJl/CZTV8srRwHSXZXPGRLKLth494vD/BFxCIjq9XxQIAjmFAzAZjJyQzCBnUvGIPcWpYoSbggSrwsAhoe9AQZDQaLgTiYw0jXQcYlyQcAmA6Opg75YmZiytfLbQRCFfap4FBsRE92oWQ74SAkgvIVsN3ht5BsHaNHCOr5ZeCHqZzO3DPvUl2ziK4GMid0F3hDhKUFI5BhceMSCyqCnwU5VCwEPKReAWEayBcGTgNewy3Bv7YBGL10FE7cFIwOMYBt6qhv0i7yp1U5m5ij5u8xW3a4EWQvXEzjVgmBz4p/USUsFTvychOgfaMSmMAPeCKZ1g8h0WB6ixkktA1AMSNhK4F64h6HRuy3K+MpSqiZbwPacBb8RqBLd5YVHDPOxznAaF4QYMFNj0CqoW00IVwqjevvOXFYGOHup7rPtKxXaLDMQ6IbHCnIbW0WFg1Gvo5RxfCs7N6U7vD3c1CKHg7jXjpZQ6OVvZdMZIOy4C7acR8GEB38nz+TrmTihzyMl7GceVhYgDJGllmvRXdUTWwY+QMediqwGsafDmogozZsjjcTPK8MRuMztd0dQB4cdqA7xz8rdE+ikxbnhD4Sj63ezRh9BGD9nGJFsdpwOl2xPJcnq2G+HLnAVJjox6wlgWQn1G5vEzIoJBlf7byAuhbHqyk9O9zxRkCkmwS9ypPNqpgMqqHRN7A2uIhrL1RmQFjQKmRglZlpxCoWlHWisEM9N7aBLtpMttE8cyVe5lAsEszjMUob4oBMSMPZu3NUa+cXVA92CIX2x5gFqyT4OxkwF/GHW7iBnP21UNfjOiiQBWalGMa8PpR9uHxsIEfIhYVYJ4ymEkMGcVQJlZF0nKdXyh0piZjseLQBgaUksFuEoi4GkSZLTgyDHcEnDPgDoKjraTNs8BU7KzQkH6NdOvjQfzTOQ4qrxUttoR3ax8wBio39/4g4aNxDLjeKLmXk00YkkXQk2qJVuKr+neJEBiT6/cGk5BB1R0ZkvCq9Avmys94vJ0QgsVcNk0Sb0uzxNpiLwfc8phw+jAjvqoguqsFu80CZzOSTnRIFsuQEDcWYauhkXntpTFBwkVxNDB6CFXwJhf8kcchjzimEZPyz2SmKhBXAHxe/13hNWp57jcLXtmchJdFx26wCTvXOKZu3Yi7MGIwscPSSAZgwcwYYvHsuIRxI99bjFi1RE2YD0MEM+HuuZh58XaAmQlpwzB7+d6j6yO2Q6hzedgNOO1GLKeeN8bo5mLwVifGZXA0iMrcnG4FvyBeGflICXnGLj5fPFBls4dkpZSLYbB6L2MGasaZrgE3EbBwHVOKIulz7252BKBZnqQeKIrdAUqriJa8ZMQDWMIJcctIu4yFBY8FAPNB2K4buaeFm7lySJVrZ6Y6t4GdWJMGnfIroOTAtno7lugkZFf2QKSqzNSMqAJy77IcC/C3sgYvgtmicwWqqxqQNoSw1++pIrBxLTxRlTbm6r0CBGfxLO5xmzb4wvwYAPDstMM0ecHdAIAK6ZwJWbF5XD2j7X52KWGstg+TU6xULXtEK1B7zrQK+0qHGXlkRG5hyawkiq4o30d5LuqIgCmJFyZEi1OUG0adl0X7fVo8ltmBF1OvZSclMq2lTcSIyomQFeBVLXWgJngY6HorEAADQQN3JLuUafVdsPQxzRbT0DwSMRsE2/bTtHjYU1P03UE8x/OHCFdPJKb11U9eYOeWanhNyePlsMFzwzhBlP2FhQTQLquoDvpQa7YE8qpjx7a+TBQMajHEzxt5i3AFpMepPm8arSS8FO4kYlEeY8PoFFZ7Kspm16dipJXPoTOAzhNg2AoTf7YNV4lJ1oQNDY9qZ66ezNJMKHAC+Yw/ZthTglm6DMszUtDyOJL114UW0MKSxjAsMebscccyB8/DDrdxgzm51Vo3YDi1ELZWuOZOyeOmGBInC2MFjwiI88LbhNHHasDkTJrd3DzBRVmuPVSDkLit4xwJIGoRIoVauEPz+g03jOGG4U8dtnfJMEsbJ0pZE4z4ncNxVQHHypADAMrdAJveRLvfPnAQeSMnhLj0mOqBWn6HbKTuFVQZSo1UzhjG6AMGl6rCZYgxZV/DdRl6aGSPk7LjXbsZH97dYkkWz/XaMRjgfHx0YuNO7nf6MANffcJHntwBEEHV97k0azMwdKW6Bsnuy4orMSeDuFUm2YIJstL3ouS9SDu8SDs8j7saOkjZaCioHTAFMLcCMmaIoOkMs5SFMbxlEqlSpB/aWLEknMnVgi2KlOkWngFj1E1SxgAAnM31tZ0PiNngdJLxzs4DhYyxMMa7BG9yVXYwyik3W4eo4ZvsbRVSZqP3swz2rTxE8katvGbhOCtEbkVRMNlWBaoAgacooFwilpIygHjRBgOjodYYqEqkSvCm6zYPQB5VOLmCS9I5SCzZoquyJqheKPmblOW+0V3kgcHbBPIZ8VqVqmsLM5kuVCQKlVkkeweQMXWU35fHrSRPAJIAIPX+zp8tt5I/xIK5o077Uxd4JWNUpb0PX4i117A2aZRwEQA4xbpkR2v3uV4bubnOl2xxUNqS57Ps6cM8IE2uhrEBCN7JGBG8gAjcoF6iGjKUg8QWxXeRPrBva6dWNtBGxHAkntqCqXNDQtgn5E3Dn9jJgB3VkKc7GZg5w6bmfZASHYQYLKao2bY6H3NoAPUYLCiYFgJfGsFqP06cCNm2ZIrzVlPWC47moSzVYnBR90YmIJgKWl8MI2WDYNt4z7OHmanilOwsHtg0Mj7x6BYA8NHtDQIb3AU9cLO5TwpqWA77cx21BwujeKTOlKyioPYehJ6o1hDCNWN8PFVPRvDrMJVAEAzM1AxcMZ4Va9MZpr1iUmkC4tl66fZ8GghhLwoUDt0jJwYtuYYoBbO09nCIh7fD1E2idFWCXUIzNCoNS8N0xV3Bz3byCajkwbdpU/fVi7CV+pJnGmAhygSAxTksWpf1Tj2TNFnkrYU3zeCW+qmd3EsERCMYqL62YUekSQwg6ViG5hyRQdSzvihPd4ShlNW6YYw3CfaUO4WJYUJqhkvOzZO0CtP1Mqf//5lilToD6B0y9ep4veu7l3Zpl3Zpl3Zpl3Zpl3avfcA8UI0LJDsAlkHECBXsNiBlwhJdxUClaMXzUBRJy4iDAW8WjGodRjY4xLGGM4SUziFywxkYtdatyaAOF4TO6ikWEFMDLqerjOvdjEHvtSSLJVoNBxWXpakKbSH6MxbAICnichuP7I2WjGgWTci24lg8JdylDU7JV69JjEb5ZzpcRwJ6ksNVFfrU+hTZrFL7AXHLn9BCCTI2nQXFBpbyiv8mspFU1eIpTEbCHLld2Zq8qh5fM4QYK+uk94aQhgeTy2B1m6asKbBAtVCNyTCOMQ+KFdNCu7mzsogk86THbBVLqtAvzMEJ8DZRM1oL0LE4Wup1eWUhl/qILcSgcAmdExNZYu+ZV54rBlesCRmxHld8WQlicTkSkD8AbBPykJHVCxqDgT0auLsGQgXxvTBTgsHCjRh1Yo+JJQuvhDFzWTc1fKahO8fVqhXwJjWLvDxfAu55PTqcAaWMbGwjy7UynpJVqeOmIFlZv+1iNUUdsicyCFuzYOfEJTO4qIWwi9sISu1BK/CqhLr6PS1uhIahkDWTuTM8WUNvheSPScOfnSVq1GtZ8C0QBAByK+4bb0m8UGkdZjFRPFCnRSEGpnnXZX4tWC324s0qmYG13IrilkhlpnyIGqFi37r4PktURPmv9HFLckkN2ZKsQcMS4obInWwK7YvKgsXC9Xg5EkqCuGN8ZCseqGs/4dmyqxCEU/Q4aogSmlFYMvXOs6JKNiqAlmWbUD1MTCW7jc9CeN3/jYTZd0OsWMtoWMJCNctU5swsgqcEmgztM4nrc5aogu1C6LXjOn7ax7ghhCsJCZfwa3ZUx79Cl4qIK13ndv7U+o0FXlK+o4kkfS1NGEYapXC3Lc+X1v3MmXAbRhgwDhqROcYBc7wfvksAcqHCSRmZST6rSSfmZJD2pp5PgxHMZcymntk8WwmlL/c9fL0HSiFlDcyva486TLKdSTK8S0g+CEDcdJ4503sk36mdv3+Gj3rHz75DqLi0D1aBIoOoNe3inuF2EZsh1NDtFERxisEhFVd94acpgDgC5sEiJwOnrskpehzsUOtWzdnVlOdyoEa2OAQBa5ZwUU1LLpNcUrf7GHMCptmvQiBcAHFnpCHGNOFmDMOYLHWhAEzRIA9WaxmVA4ZWi3g0Aa9QxtEPtYjp64bB2s/zCtSrzVcOxRLm0r55kzp27oSY22G6JCEjHUzs0uFtDX8Cks5cKSB0E8XFgoNBADArZiLuLJxLVfkpPB9sgHEUSbIfFglZFjb4bOBdWgPVASlSmqm6nr1PUgR4o0r16BG3kn1SiTwVD1TmxJfaacRVUbRar4nZKMsxapHcjgevY1HXeXL0oOCU5yzKQ8O+dLpZI0AEarydYqPpsCcCWzUSXHPVg7gejoyMtAHM0nJ8OdFqfQe2OOQRFr5m4c3Z4y5tBCyaegWqG28CahHXMneTVT4ayUgFNLQQuGVJZT3gevliSIyEoix5xbJZIG61nzsDv7WwprGam5CRXXu2OTo4SvjwcFvxeoWH7LbEyGcD0v1GlcwVoviehYdKGE3uBalZx+0QKvu+gNhjlAy5jIZTisECs1WDSy+eqB7YgBR0TaORkEJHimpmAi/tgEHBXj0gm/sDli1a5th1hn8kBZVLgeGUDGbyMCZXWWhNxjBETJoJydFImBMNVM1jhtnFWj+PWXigQKjXrv3pePN0MFtIy6pSMWY88hLXu7IzXtK2rsslWcRokYOR9QuAQjMiVjIsccUN2akdkO8jSt2Nnxg/1jQCUA6m0q4AkjRgF/ndcweVKgmrrFK9JgCQYiC5qzCRBpKsU523cKXZfgxkDcFLyJ7AVkK+8lozwoC25VfULOX+NXPNtILitcYcCw3FQEi+zYt8X/sULO7CKFAXPSOXZO+Bx4FzY9cgKxdacQTYIAkshYJjm60Y1zrHMihKYdCdWeX86osnG8Wb1bqRaJ8FzhwDvZOj2pBdGPccFmDEwFqfLGefeS+l6320D1aBMoSgdADxKuPxfsKT7dQ8BFEUoxypgVwVPFky5wroLFpuFgaLRlwOymOULBfXCYM5Obw4bXA6juCj4mQmScNdcUwkIXBTAmT4W4NlHCs43FhRjIzN9YAnzfBzZ4IMAE7l/jYL90dX9DE7wXsV0O9AEY/MCWGweG2UrIfP+Cc48WatPBkIaWDH7HtO3kaQgrqP/FQzaQwYc3YVJzRHUTSj/gByEFvKnTfPYk4OS3SIS29hiGLLmpI+E4N2zWtTsAvsGNtBpOIr43G1iUsm0uxcSxoYLZYoG7bgqzZDgFcPFwAcdiPizgqBnx6eKQsurWK5TMLGhtUamAaHw5BQitMDolCkTSN1rOR6TPCa9ZdGe49wr1qe1ZPEDTOx+lx7gVVZkYwzec0dm3VfCsAKd1HnAWNo0eN2v2WReZn19C4M0l7Lt8jceRzTIGz3BWhdsHtnlnDxOgGCubJH5TiqtAUlw1APtajg8cwgxQywNUijqfjBsGekfQYMC7YMYinHvRwEJuTalzWIXDiuEpuKyXs8iKyY93oAkIc9L6Lrsnp5GaYQNKrHrRSBrqzo/V7Jaw9YTrbh5kLjPrN3ius48xrU88eo8m0at4yJDDuJUhpVhhDhLNuIqif8nJuqFDjmfcTjqwn7YamH3CkoFQVTlTfeCC4xFm+tY2SXwcY27/AmYRgjBt1fDGAmIKculx+o/Ts3FFvWoXgQaDa4iwVrOmHQ0k8ABF9plJm6ZMze2yT3PVCmwwut7nt26FEvB/VvsxBOc6O8QFwn9FBu62JF0Fh/uH5OOtNZV/qrlWQRr1OlJRnF2Do3dkUWUos+FKOseF/UI7dSFoq37XysMppXuwO/9xGKkjULiKIdssWSHGbF4c2pydx6XaCWuwKAYCWh6Bh8ZZofFkJIdA87db5G1tT4+rvHuFUl6ExB7u07NT7vl/Gih8FHPTAcnYJV3uu9TuXv8/Elwr3Mn3dpHziIvGTDsM/YjQuu/YzoNDTEpLTzVDPckIW7p+qS6tZcUTowYUoOBwUu3iwjUjawJtcwHyAZSOlka2kROxFMz0KtbN52aWnA7kCIe1srW8MmOC+A6l5ZsibXBVjv12UP5mDhsmy8YimAJLPlRRAz85m7ws7MUjJBn5cKeDw3UjKQUvqfLzwLZCXGvN7MeG1zh6f+UMt4GBIF6lBcBABeLtt79fhKKQtAstoKrQQXCyNSddGWjImk3qFWkVsyMNhK+AVA5ZQpobXIBkty8CbVcZqtw+yEzmI/iN926wIyE24VyFif1beNJcpYU6CCKoV9CNMbyfochlg/t2wc5o1HVG4qmg3y0YCYEFQR8EeD5KkpNpAsob5ECWUGos6aK+hN3aPFxWqLK59WAqSUbKgC2qIpxWV6u8NVOkCaYafhuuxXpId1LmEqaBkQQUcdQ3BdW+gyRgur/BlYtm/EJcTSgPPsgLg1CFd67yvhLQIAvlOBawRga7q6WNkSiFqI+jAN+NzxCWJu++ALx0c4Bd88vD7DDxGjj1hUeZCMPJJszWIMK+i+KiKe16VlIHIgjlRlU0luYSbE4i1dNOwTzsq5cKNoKGSjdsmV6woQklZ7ZxAH9YhkgAxqUkTxkBRCXEC4kvpSJ2CCswkbF+raTV1IvQDS2SXxPpdx0lqHiVD3qvFi/JWpDcEhTE4sds06KnVGyTRZy5OFnaiWYto8S6DMGN/2+MLxMQDgkZthiXHl5UFO0ePkHZbBITjtYwl/d8NYge29UtsnKACy1jQDr/c+9GBhWjL8LeF4M66uLcStvcHJoC6LVy+lJXQ6Y73bh1Xm9LqDHvItYxfVw7biqqISftM1X2oBFuVslRVZXlMPb27CgpR3rfJnZeUgPErVCUBpVzoaA7CslUMccDsrMe3i12V5ajdbFMUZqZs5BQdohrCZAQpUjeDiiTImtwmls5+HWlGMeoXJdOMB3c9ejJ/Cpxii8tWxrcoLGTXGcnvgmknX010wd59p87PieioZwXjgvQfaB6tA5VxDFxSkxMEyWmxUg3llc4IlxkuzwcnIBCUvrsEysJwIsAzjm7cnZYPbvMGLk0jLwzQgJQNrM/YbZVUdFsmoIdTDw5ynPCsztl0ktg+oq3WXMFxpTb0xYDtIXb1i9ZVFWA5xQOgX5uBxOorkppNtcffCVk2SifPGLCdOYANPGafk8dnDEwCC2xGFgVeLkbvQQdiLYpod4J+I4PrE9Qu8OtxhNK0wsAHD2pa5NXuZ/o2NNVSyMQGJzUoRjAXj1WX2FAdJdWg4FcodBqqwyJb6ZS+WbSW6BCSseooeIVnMJVSSbFWeB02hdpRxih6Ho2x+ezBi/XalVMrQ1EzOZHGClzp/6k24nQfEaDH6iJ1mAE42IWdCKNa2erX6jUMFh9DH6+scqED0RjJeMzpvlkEeLZIyqherM44G4ZF6ZK4Ew7AqzGxZFADXPDQ5GBBbUEmNdqKI2gc0HFPnm2Cx/gwzNPulWbCr5wSq8ptGNEs3ASaZFSbq/BBkQ1oDsihUUsuNY8e1Nin7fFjjhIgbg/cye7x13Ne1AQDPDjtMp6Fm3JFhOJewGxd4V5R3YDKM5BlhLHgQmc+iyFmnpWyYasghhEFrk8n9rc01BFxLx3DHq9P0rBWhn5sZ9pRhT7F51+DgJoabCEm5tzJD5reETBch83V3VEuguKMYciXcf7qzeHG3Q8qmZgYe5wHTaQBHUxO+JpsRFsH6AZoB2xk1gBy6y2KRVYCko4O5c6Iw1PqafO8AtAehMBju5NmGlwGUMvzB426RPX5KHlsbakjPUCu/87zgWk+ijIpx0a4vdQzLfqKuRIfOQZRDknJeH4xA/dtMEcMtML104LoOM3jLiK54t8QDKjUTdWhSCzH1W4q7MaiFfBmw9XtaqUD/didG3JdCxcVAKEV1DaLOp6w37sJtDKgRU3jU3GQlIlL4jc5D5mjrz02NN87OCcm3sCJIZPgUHW6OsshDV5FjdT1qRrCx4j1M0VSyaTfJuVk8WafkK8di+V45Ktg0ZTMzwxAhVa0HWm8TSBs9RwcW7qiyzwaD7KUKRfHcpY0Yaf5IjZB5ZthTgq1/JyAkWSsVw5RXeCYqCtWZp6m+3o3vu7X3VKCI6KsB/F4AXyW9wLcz828loqcA/hCATwH4NICfz8zP3/ViOcMfFI9z53B7t8XoI17byWtXfhaeIhfwcpCJPi2+pvnKJQyIGOMmtDBZ9DgFj1s9YJfJC27I5Uqe+Wgz4WpccLNJYF/AD0BhcgVQKfELnw0gB9z4ZMKHHomI2rgIpyDrij/JpoLLl9jA32FxyDqpRq1Lf2T4O52gnWArSqr2y3mLyAbH4OtCn6dB3N++1bCTvnMF5rL8iewZryndwtNBwmUvwq56KbyyzBaMjDMJGxuxd3Mturwzi9TB0kEZbYS3Cc5lxFExE8RImj7OO3ltfzXDmVyt3IInsBNwe5Bned1mjC7WsFrMBnN0mKOtpUVKOQDqsFJ3dpDEgjsR0ts7gr9jZXtXZcsljLbhqcraCNniTq2uw2nEcvKIg63XXma5rlEPiT8S7Ingb4FB58kdkhxkmVZnEVtUhnqwk1p1SwJ7DVPvLMLOVIFYShfEHeH4VXKh+FqA20Q4l+paLUVkTaegz7ND4E0jKnW5kocCwiBsKAuPV7frvTmrIafOr5W7X0MSjb0aoFHmrz/QSjkZeX7xjBJDhJV04syaE+8KlkbJ4I+5Mq2XVriTirDLiQR43O2n4+0IPrlq6fNGApWDTVVBTFrXD9wRvDJgxoTdXrnmfJSEg0yYFNR9PFmk0VXslnMJWxdw5WbcbZROYRcRZl9B6dAxlFpvnbLNZ54H/ZwQ9OohAFu9yIBiciatgFA4gJTVfFAuMH9rMO1HhGArdCAGi3znAQaCrp1IjDw3nq/kGWQzWOkrALSSN4Vi5c7B3YmCWCME6qVj057XHcX75I5NoREAbzMiMwiGMrYqP67dhCu3YGND/czzxSAGD3fo6iFmSTQoXl+jcsAuuRoNBgAtQA2zlAnuQ3ghwd8x/K1BLIkpTxh+3+6fghE4Quq8+gkdD5Re+ky/IPXWmohKvips6o0EttSfY9MSJ9JISEG8o8U7G64Z4XFq1CEMKVE2mq7kjMW4IQw3po7FuWICoOI1Cz1OWgzyYFafmaPDafGYT8ovuNjm9epxH9S8j1KknpFT551exPgqIbxFvfzbIVSM6pwJbKwYkBVP1s0Z1GAcGHAZRs8VP4gMrLpxJqRgESaHeK3GzklkSdkvgJbWOVgMRbc4JLgpCW3EolQ4hT8sljPsXYDnXwI26v14oCKA/x0z//dEdA3gO4noTwH4JQD+DDN/GxH9OgC/DsCvfbcLMWe4gxYHvfU43nncjJvK5zSYiMEkjC5ik+S1zCTcFIV9l6lijcpheTcPOE4DFvX28GQBBrIxmBUjE/YWo4sYNwuOo2IHbMfsBbQDAfc3T+/ZmNkhde7PkpUXgq2g8RzMKvPDBNKSK83VCpKNXNyqIZlaRDScinuJBDfDWLs6LXeuZa5equI5ugmSzXdQPBggIbyNjZXDI1SgvamhEqnQ3cxCRxmjFbB/FUDe6EEFbLYi3a42s2QZledN4oI3CyEp5uw4eqRMneeQWtHUuQE+AQjbeQ3rJeRkQepGLlwpdmaY1BSmnjW5lKdZsq3KQ84EnizCbBEOWsD6ZDHcmErU5rTuoTsC4wtdqzcz0sbAzi00ZNRNXzwEs7VIA8GdbBWAYUeIuxYKTBsRduGqEbO+8uotHm8Fp1bGt+AT+uxB6SyaNZxlnlZ1/lisztzNn7CzP8AXtXKbA3mTO6JByOESqHrPnJPMuVYLz6hXKjc3pIYtTMkGWsRzaRaz9vLOufPQQNzmMbespWgkrAS0YrqTrfULAbHqYzKrMHkpe5SDqfUtSxmTmg3rhCcH6M5dw6vMJm8zNjZg7xY8GUVKv/lowok07NaRkLKxsCrIC4jcjrZixUrpDbM0vFvJLKsZnEsfhuE6Tqtkg6MB3TmkSEgFThBMhSNk9cIxsZR9qvglZV1P1A7MbER2dDVAC6C6eBib94Kq0uqOYgD24ckCam4cfhZzdnW97WjB3s740GhxuxdD6nA9IhyF96rKWybEDWD2zd3DBOTZ1LViZynijpxWYnvlMUhJirafqE5w2hnQNWNULGYeCSFY5GSrR5M1OxfnXple8Gb13BJVA7uG77pzIzsxQorykL3iKm3jF0y7DNpH+E3nkd9aLIMXLjwUgHqTJ8NNbsZO562OO4PlEQFKNDw4mbMKzg4Gh2nAMnvkk+LwFtNQ252nSggwy9waYRdPBNeVTTGp4e+OXs6XwSZs1avPDETHYmAXhVLHsiR8GJfhXIL3qUI8vC1JPjpG2WCOFpP3NfwdB4u0NTAzIXblXdIWVVaNminruqKUBlHkzMrlmbuQXfda6pSrjgftofaeChQzfwHAF/T/t0T0XQA+DuDnAvhm/djvAfBn8R4KlADbtKzHDNBksMy+lg8oJF5zcjioS3iJdkVaScRwVizqipspNe600KqZhO6eHarb9rB4WAU01gyG3jWrv5kkO6BgEfwdYXq5wTNdCM6nVaYCoGn9yQh+q5CClWrUxU2vcevzIqa8WBzVEl4WhzALUzTVUhtyAJjZtLi3BWQn6ZhUICvhxUHCmF9wj+CNYImqS5YYS3bVa5EhdeKcSTXrbiTJyCvKaVaFdVTLHRAvYMny2ygQ1ZqMmLpNWYyaDMmkBBCjxaD4pnKdnEUZ44LJKZQVqfMiDDKefWHKmo1RgL8s7NHl2QYTlRG5URucFi9erGBqkVZ3kNBJYVf2dxJicjPDHZWS4hRglrHiGwD1KCxc8WxpBNiYlWIbN+J6LqSZ5ZBmC5CG55zW8BN8mipMyeI4D1hUcVpm8WSak6mWbwrivas1E7NFIsGu1bI8SknhKVel2ZisRX7V28WQumj7CDdoqIDEbZ8XWw9rYou4NKsvTZJinJ2BrRlnIlyrkhUAZikVU1/TbEUKTbgRA4gdaDgKoWPuKS2yrPe69SJhmT2OBfMD2T8xWPBiKjmfHCTiDZbnlySQ3KVdF0FQKCKckfCoN4I5AoBHuwlECsitoSiHHEznpZM5t7NdeQFLXcgiz4vSXRWoIFb0cNuHYTKQuRXeDqLoZGfABe8TtLwToZGJkmY/Ve9Cm4Oyf1iZ2PswLiWtV1hCxOdgZgjz+HCba8kME3Ol8ihhd2HHt1jOZGRmwlbHchwDFr8ROVYVDwZxq1enrhDYgSvVAFuCmR1szPcVndJigpsy3KEpkWm0CLuGnyswxDV/Ceoaq43KAJZxoGrEnIOaq7LiisLElc4ja7YiqGHx8pBhfYbvkiCIGAsTGnbeqgLb1rJJcq6xEgHDMdKWsVw37y8bIzU9y7PMFpMdxAipZ2TnbesVKDXEAcV7eVqVrpFyRVQpOe7ciJBNTV4ARPGxNle6m9KM4eo9dVaqWzibarmxQnycurOHynx1xKFsNRpTxkmzbKtsnBjuvATLQ6E41gSYFW2B/v0OpYLO27urV2eNiD4F4McA+G8AfESVq6JkffhLudalXdqlXdqlXdqlXdo/qu19g8iJ6ArAfwrgX2fmG3qfaX5E9C0AvgUANtg1UJcizUrxYECKapYSB8X6jsGuMgWMprMXtx+g3Ewdz4gJVIm6So21o3q5Qmj4gAoa7EqimCQhBvUGYnwu6cdBrbUwZhifYCw3Qs4+m8H0z4eq3VPv5u2zOBIhlFTpUrJGPTCAYKdAGv47dlYlddY4Gt/G4UPiJr8ZA7Y+YHQRvqtrB6DGrzMTIgndw23c1C5lploCZ84O8awcQyXCRMM+FA6v6iXSZxfgpYYhrIRenSIw31G5L9ZhZzGiL5BKxfJrc3ech1U6tzUZO7dg72ZoCS4cw4C7zUaJWbs1wJ1XMJXwIyp4Vdy8uj4KGHphrcQuH4kbmTdSGgz5ngCLc6FDGMSVHyfCYSPje7PdIGWCNVxDdXPxRM4lM9BKCY1DwyKEIOGr6rXKks1oOReaIXkerW1V0sqtywiW685nAnibsL2asdEQR8pG+gDUTLHsSlZlsXJ5Hf4uj9wB7Qsv04qNhbupLbxZzOKBKrIhSY07ztTuYbUAbKUzkc9UTjGgelvfC8HA+t17UIeSEEFCUnpKvvJAWZISUtF0BLouC1dX97wlbFQ5wVjDlr5l6xWvUSMCRMVH9gWk0e9x7vd795p6n2uJG1vqGOpnlHTXdB6oXLwMnfMFwCqEVwDc0j/16t4y/F2EmdRNlQGMFtkBO/U+XPsJnnLFXR7iiDkLD9EhaJmnbFrfy73Rwl9AAfRT9dhKP6gInVZuI+d7IRgJEzfh606EeLRYoO6f4s2ILdRLQTBi92gpzNmgl2zW1RpHK82kBXL7fgvRphYSN+3aBKyoIlK04mnvvDYl8w+QkB4nreGoHijjMrJ3Ujpsad8BOk/wZJDhQLOUrwFkLdZ1spKrrX4dJVSC10Lr4yYpvj6rR/fWKn3KuFQsorOpDaFpz2dMwySXs4g7j1NioaIpxNoh2epVrrJwaZQUPceUSW2dSjY9a408xUCFBEqpYqAo5Rauq4OtnqeU6j5+LzqD96VAEZGHKE+/n5n/iL78OhF9lJm/QEQfBfDGQ99l5m8H8O0A8IiechoLGSLAmySpyK4cqFJ0cwmuutxrHLXIVqMhmQ1qNgozaWFI1AGEyrXi2g7B4sAjlsnDzAVLI1lxFQAYhevGzLlyGvpbA39H4EK1AJEbZFIlAKUO7EukITwroMGqdHgjLv6RUCRudnKx1FUENy6DiRsp2WwVrEpwR/2MdqKSGibZENkDhx+i45tFmdi6UNmcDeRgOCpnS9S0/5gNboLW3ivhLk1DPEaPu2WU8FcB8quiR8QIvoSGuHEMoWxGQhoZRjOgHu2mSk1Qxssqnxb34FUGYBhGw1zGJjCZGk5idZNLAoD29yhsuWUuSujRj3cVEL/zC4YxYo6mhhVrGm1xwVuALEAOYFuy5wQzco+zBG0O/EkOxOG2S2E/23usIOvlkQAsAeC4H+s4FJxODFZAnoV4sLD6xuZ2p0yCmSuMz3kAIpCsWdEZBLY4JV850ohYlZHyAIDdROw3C0Y1SuboEKIaLirMW8ZqE1LCy9OFwr1B2BLSTkMAmww4lsjI0OE6xpY1Jy/mNU2DHmKSBKJ4OWJkuIqxY8saBmihZTGuBENVCV0V11KmzWph7GQJi9FadC6vyBEB4ZI7xgE3s+J25qHiHivz+XkKuGLA+vR7ihluzgCZxjOlyksD8jNsKBl8LeOKHTXmewclJe0gCMrybjry35oaX7pYSEYt1bBEWe+Fq4q9Js0wWkJNVBoFzTIDIDXIDqEqhzw4pI1D3EkGNQC8NtzBU6q8ZLdRsJhLdrUWYDG0VoBts1aosgMwSmfNOS4roz7LedYUUlaW6s4oWlSJKOKpZCZG0wy+QvrYXUp0C2pKlBpysp7ltbSR4sFF6Yl7QtxlqQFa9MyBatUEduvrV2U8GcmcVAWh9ntp41IKqGfX9g5zZ56UbqaSwKOG6yQTXghEAawUkPp1ArAKR4rBYgLV5Aa7ZJhgMOn5fDIM75IY2SUbXevXCkSjUwa5hZ9zpnr+FKNXps9UaE6MBjGoIRk6WRjWeD2K7QcooH6BCVRDJqU1tumdeKAAaAbP/dcfaO8nC48A/EcAvouZf1P31n8G4BcD+Db9/cffx7UQrrWcwSOGvYq42s7YellpIRucsoCKk2avIRis2H9JFI7FZPit4qlsfjAY2cdzwYQYBR9hV/Tyaw9UYT5dabMRLVuDAZDgKEo8t6RwMhO4A7tnJfwEgDQbpK1FnLHKpitV7wHxDhTCyKmUtwkEWzg4ikAIXPmqAFGe7JwRdm0QBpew9wse+ammFNf3VCIt2VWQfu4UJ0tciTUX9UAVfBagllsGYJo3ahgijM2IFdwITVEF9ldy/1d3BwwmVgWu8Gf1ZIiZM6DKmdWx9D4hRouoClV2VrBHzHXT5DuP3Gk3zztOqKJAArJWjM9IthOK6JQAK4c8CEhb9T5MUoan/1xJry3CnRJLgsDNAnNq3lHquEcoiRXqXtni+GHBqh0XKdUjcAc9FMoBWKxcJ6m9OQJGO8y63lomqIUjC5MZuYBnIYzBfWkgeSBuQGjTEg9KssGiWZF8srC3mp14RxhuUUHObhbMEjFXVHbcWGFi3uvz7iOsz4jeImq2Ytgb2MUiD6biKkqaduPSUc+BgkwBCHGuQaUCgGVstgsebeb6WIdlwExe1k+hKpnFa5a0mvCk+ElnM1jp6MnxihU6a3ZouSYAnObm12ucWjJ+uWBGnHBcZS8gV5lLglkYDhlm7oQ1dc+rSpedc+e5YrCzq/TtkrXUyzr2Kne6tVLnuPudDSpRKnsGhtzwgxtCWlgMyoKB4iYHi5wxs5YqUkBvuB4wv+IQHmW8utHsX3eAoVwN1wNGRDZS0kXpTGKQDLCV/nnmkXqQR4h1rB6kcG8H4zul+1cMmFHr2nTeSi5KXHfgGxYutyor9HsMxLLvrqlGPgBgfsLI1wkwjFjKcZ06rJoqXujoNABZUwXEbnpwf0fVIn3S3x3RdGH57rPJ7ZIl6QWihGVbvGtlkTfvZR0jglAIlEQZJsBAoh96hJhFPOyFWDM64UQcXax7Bvpc96unUEfDRJLg0M1B2RKtRBpJf7j1s2Cg0GHTznnd5MV6U7xr6z1MfUZn6ai197/TtffjgfqJAP5FAH+DiP6avvbrIYrTHyaiXwrg+wD88+95JWsxv6Kg11cSnj464LX9HQbVDA5xwBwcZpMl1VcbMa1BYBqC6cumrMk70FyTnVuemdCXhQHQuD2Amg7KjloZDwWW90A7sgznUw152I4D47yVsMzNbBF3pd6VXkr5eIrnQRQzOeStAnrj1iABcMc+b1V+qFOo7MwIOwCaEvpkc8Jrmztcu6lyPFl98K2mRD1UMzBmu1JEHJXMiG7TxuaZKJ4UGhneJyzqWi4EaHnM2Gt2RuH7WmX5abmVoowWgULENUw7uCS8Whvlrdk4pA2tFj/NZhWKi0x4ZoQ352qUQzYkC2bA+YispS7S1qh3p1xINmZegOVaBVDwSBsjYFAVgGEvh3RNNjgBxHIAUu4XGK/WWyGg7AWXsZKR0oeqeaCGxydCNsVa1i8WK7ETwAUwP3dmbsxSG68oR9Ui7OY4JVMZtwFgOg1INwPcS4vhpSrIL4HxZa4cQO4onjZKzeWfB8mGyWrYDJuIcQyYrUfcqeF0ZWAXAdsXl48D0EMCzCLpy9FydfmLbGOQZgo6n/BoM+PROFXBfZNHRE3AsEf1MitYtmTsLEn222YT6v24N47QvLA9qeBDBwIpd1dhOY87YJkJgK0HnSsepYSaTdbA2eUE6NZF6RMJYHy51n4/yaBHC5xvdBc5GQQMwK1dK0yGa5KC1f2YnGlEmoN4/it/3WgRrAfIVu4iNgSnwO7qBRsN4tVQZePy2GF+ZJDHXMtDPYt77MxSvaB7N2PODkc/4IVpMAEY8XqdM1PXsS2JIrmFyctvJgIV7zAgYbwSiimvU/P25KEAonX8hyzUDrl5E9lKCI9X5JfSzxKOIJdrFllRmpfFrwDt4XGGv55BJGsNAMJcaj8Ccatn1ph0LptwYCf7vCpspSRRuXw5JjKqRwbojfzyPq8UUlEKudLd1PFVD2OvwfQh+BJOdRNqNmjxTNYsV0iliK0L1XN0L+O3XF7pWcr/rSbQ9ITUKZsaCjTGopi+uZ7RQkFCiaqnjlgcDWUfipfbwCy2iTljhOg4qYPDZFDsXLel9R4q6eiDz1La+8nC+wu4r9+V9k+91/dXzTSBYK4CHm2E96lawGywHYLQ8Pex5/P5MKiofEAPhbMsLQmocxUsfojCK+Fsi00XL8m9TLmOi2MD5LGzHIYMP0rIo4SjrGbtPKRETV5DYccRafQwG9T0VruIVVi8bewMcrIV5wUANCQJy46mZnUYYBVOknIGsnDGvfTpo7sbPPYnOVB1J1hItlfhgSqZQnNq5V0AyUIqSu3GBYyFB6qEAxJjVSAXqApQbWXFKKYNAJ7PotDMHW7F2wzYDH8WDjHEld5itBGzdbjTlN88jOjI1OXzxe1dhOxkEI4Dbggr5SBng3GItb9HBgJ8TUu2kyjYNjTL0iSHsJMNWjfpKO76gk8AAXYxNVSlD7EqM1E+F65cKwZKcjinZFoIL0px2eolKgZEv7yyuLtjrWFo4NjAsK37ySLDmYTRxFbO56wsBRKQJ4uTGepr6c6J8vSCMLyUj40vM4ZDhi0YnVLMkztrvzlTa6uKR2/o6efq97K8XzFBCyFNBil4TGOPcWryzCjZpQFX79pxGpAPHu7OVLygO0Kz4PTQDxYhjjjsbQ0R02QrMWsZ/wzCaFItIj6o/CACsirI0RhEiBIOAHFvQZk0HKh72lInb4p8etiLwoTqzSMWYy6ovKSnE56+cljxqIVs8CZdIcRNd8hKqrjRQ99pHck+I2ocQzVqADHy7vyIgE1VakHiuesrdhCLglUUk7iV9U+R8Pa0BwBsbcC1m2rYHBAussFE2euQNRhLtmo5UJM8fxXr3P3UDohix942LorEoJTARRHJuRFYlhqFWwZvE2ijEQsnBeVzbqHeXLCnXXZzuWfto2aRkeG6NfPOInbKDG8ks44ICKrs54FrLci8zToHWge2cOLZjGAzZrSMxhJ+7juzwhD344TVxxQu0D5DxaOo0+60TFNPHFocBX0xcJBQ77hS0mkRfrKS5Zo1A+/KzytPtrOSlZ06gWA7g8QaMZB9Rz9TnBwFoxuSxTw4TMGteAJzJPBiUaEwQQssd9mweSDkoSlQRABZAoqzImZRth/AQJHJzQP1HqG8D7iYMKr1UgR5zLbyNWxswCsbjYPqCo0E8GJWC4nGtLLYY7CgQvEOVG2/166dE+6QtLU1NJMmI+5P/UxUtyCTqa7z+RXC8iQjP9JyJNcznl4f8Nr20LBFxPAmKeeOWigsZTZuFgnVvDnucdpsVjWLcCi7UBfsQsLzYmzdxOQY8FlSrAtA9fwwBQAj7LavXAtQ6sPjLTwl3MRNK9ZLBugqzGc2iGxwF0ecYgtRjDbCaCmGwURcDTNe+g0WDYOkAgwn1DCbJQVBdxXBoditOyXSTNlI2qoK0q0PVVErGJ1ixRAx9l7JPd2CmA1ebuQ6LzdbZK+boznv1sKEIVQI0aJGTphAhnG1mTsrh3EwjGVo9REpiWephGFMsogbII+8sg77sg7EhLAQ3N7BuoaDWykURv7ui49SIIRZ6gxWzpRItX4XoHgF9XbV1POzYsLiZbKwxBio1SHbKu/ClZex8y5hMo3tl4Ly7SxdrbRSTPiIWrbDTQwzr4vkFuWpXqtYtCV9XMc+Lk7q+EHDzwlCZ1CKxZbCxAU3xHp4dBjC0goeJY/Ck3WKjQZlOQ4wR2G5biVRxHquWyASKFuEhSpTtbs18HdcOXpiNJXnpjRvk5TH6HhqjJG4X9C1k0bWWnkNj2kXqgdYSTPPvnjVy/PreFI7IEtgo6S9b3YLPrQTmVMUqCU5HGf17FRlVNZYZXOuBwjDqcK48RFXHRbREgtRrc+VTyo7PTsM1jisoVvTrAbnTJXL7k17hTBYPB2EF2Q0EYOJeOwnvFRP8M2wQbRewo0d/pMdNzoT7kIz5SUrniL2BqwbX0oomRYmj518K+DrfYbdRzi/NiJioCp3SiF2wUIVLxyvvI5sDLJl2A7RX3iTerC/rI/OwHeiRLFBZcS/3s3YD0sj/E22rrkK30ikrpc2RiYIg3rVNEmunTaoeNB4kpBeX1TYBFQyZwCwJyEi7qiSanJOdU6UvIQJsFNnOOlYAUAOTUlyegB726hFej+It7kaJN6mis8tkQmvBNXFIFqyxZQ8DmGoofTjLFjXmO4rNuskI40k1fMEoGQaq70lVS54HSHIWXBasSgTX6YH6h9UY8iiiWywUdNva8PKIwUAt17Yd+tAABjGgN0YqqaekvKcnCsV3YHqbcL1KNd+MRcel0F4NUrM16EejEWYTq9m2I+c8KFHqpjs7/CRzS2eDocVWLcoUKVlUOVWAoDdGHDcJETYVT/z0G0G9aSh38RIoCFLZkep4m4BTp1g0xprcUf42Fb6eW0nHNOgWXQPx3Hn7HCMHi+mbd281Hl+AODJoNls41KZm5mBTAZkGeOoh7XNOBxtPfSrApiAMJVCzBlumyrm7fEwYaN17mr1dsVcGeKqoD72EzIT3lK8y4sxI22MWvdNkKFakajZf8yo7OacCW6IuPILHitB4ugiXifG0amSN1ogkhSPnZoCk0cgDWf4E2pCGjv5nD/alql2zkWi/cq+I1GcDdLRyfmZ2v0K/qH8TcqvVBUBVS7auOnBB1pbfUbYyYuiak1GvwCFS0vxFyqA7SxZn6W0EVAI9HituFvJCqqKTxL3fs32SgRAvGmrMMwDHn6mjsemGJddNXcmzS5DM5pOQQhtT3Mj0LUF7F7HTg6KHuwi5I+EpGvV3RL8MWPRv+docTuNmKNbZYo2Atj2WikEXMcSzZJf/b9T9hlU9KXap37t1nFC+461GRsbsXNhxTFFxRVdge3oDnHxzpefYkwbYljK65CxroPm1W5dW/0/c0uMDFSL9xYv8yl6DCZiW7KzKMFTwrWf8MoosunZuMXJbbQmXqdEdrgWE+XPlKgmLiRPyIMBuMlQzlIipLFqE0r2YsULDRmb7bICK2cmxGhWiieV0FBqrwFY4Y44ETJMMzYyrTIWkfS6QFUGq1fccg2pXo0zXhmPVaGI2eAujLAmV0/hzEAiJ4avPptZoJUC2qJLOwLIVNlgAjUFFGLY1DJic/mMhv16DZFk/msUS0WFDWdkk51YYy3iDgA75fnKPGEwHkteZ9D3ZMeDcqztrWZKA9VrWeqwnpKvpWLKPWo0oavnWRIGyji/QwRR+l0yv7gkBnULnLkJ2LJZvlwiza9o0w0IADlKGnZIFiXvemsDtmbB1oaqzT73OxyWAaEr8bEdArzJVSslkph0K0PAbQHVcWBcDTOsyThO8r2w08O64FKVVdf4RrmfnkR87OkNvub6OQDg49sX+Ii/wc408CqAqkyFGlJxOOYBJw3h7YcFz/ZR0sFLxgYseJMqlX0uYZvYhHIVDB1TchpKtXddQCwLKA8t/lyUJwDVw1c0/NApVJEtlmQrMRrQMV9DvIIbG/FonCrhZ86EbAQH1ocCci+QvRiFvUUzDhGv7Q/46O5GxsQuAjjtGLXn5Gq/C/j92okCtfOP5XnHjLRpCiUgiowJzUv3TrvI+4TH4wmvDCLMp+S1YnwBqBOYDDhnwVlBMm3ygAbQ7VvRF53G3n0DgdasrnIolSQFg5r56U6om3VFurrQfWGXcC90WQTLMQ6YiCttAdBqHGZumZe12x2YtIVLuvsrrqIKpczAA6EnUk9U37iGdbkd8E1uyVhlghnKc0vWTMVSeQY7pUW17dqMVv6EE2GJThisCyFmbqGKqrBpgd8yB1ZZv7NrB6WbJExRwhs5GJwmj2Mea1bnMEhtPNKsVUA8VTmYJsgV32iXjrIgyvOehzYrQyAAGFEEqLyO+8p3CW0co68eqCl5mf/+4r0XFgA0mUUyZ1URyQapw8UtWkUBnfJQDiTxDGo3u8wnQGSNrBVTQzPeJDjTqkSc8tCR28rFtz7CDAnsbDN4VF5XXc2KUk0JdR/GYBC3UkS2ei9jRkZT0MvYlTAxAMBxnbvy/KJUmur1rYZrZwAQVAwXBUqxiEAWWQ316kzN4xj3BilamdpS1qrIchIjEhAv/+Nhwl4XStD6o1P0NXFCPIJNDoEB44G0y3BjAdIyIhOS55roYqJVr3UxfgAO8nuVRatg+zpOnWdRL41a1Ll4RulsLWeSpJNkEVXRixrZSNmszgQZ5qLFp1rloq/DCmB1PhVsXc3wi1JtAJFqUoQJVCtfAGhZsOfyijrvKbEGZPosRknyAXWvvYcH6t3Vq0u7tEu7tEu7tEu7tEu7137AQniAhNX2fsZT9QY89Qdc2wkfxi0+pGXJX2x3uItD1UQByQybkkPMjwAAxmQkz0gdpoBycZ+r5qzp+t601OjFCqalAIhzbhZWuR0NGVfDjFdHSdP9iL/BR/xL7M0Mo26irHrolD0OWkFyYq8lURqmx9oMdi3zI48E2iZsdoveX7BfcXHgY1f/rGaKaN+8vFG9eUlLmDBqRlLhYRlNXIGKU6cz97ij4nZO0QrAslArmIzXtnfYuZYyXshPRx9qOK5wvJTkyTSIhyyPXOs9ffTRDb7u+i088TLfc27epuJxNLZ5UPbqgrm2E27TpmXvkRSi5FzCOmWeuzBuZ/lUp5RyB+3dgiu9tqOk6bXqdi/laJqDr2ViagqzvChWXMX/aMp3j+VpRWe5foe0z7ZwkU0N7FkwDKTWYiGvs3PzAtTwIGGVwZO5kUpmJQy8RaPXuAslHX84C6lJOjFyl/qv3i+7oGGeuqyo8vdDPCpsISnyQA2bnIxH0vqTaTQ1PGOqhS4eqlIMNQ8M2iZwbNlkRCz8ToVQz3bUIWWcuREh9sSoPe6QI6t3jer8lrI8NYU/GAQehIeryI/rANIMqlLvMgYHXmwtNSKFhQWcW0C3dsmC23nAkO0t/3eyc2tWabQI2WIwCVnHPD5g4ZfM4+L1NTZrgdaW0dgXoQbEG8jZ1DmWzrWfls21DuWY2GRSKePxaJiwsWFVhwyQhJXiadj60Ahdy+W037l9QUhY+32cADcZUG71+KyGPismqnc49GG1jtZEnKbqSumeV9bJusSJidQywAwBUce8EuoSbFemxy6EEATe0GAMZwSdaOWCxkoW1dzpNexVx0b77SREz5ZXSVQwDPK5AeKdeFJsqV93klBV8WQDikEskeNKaLgOPxd51fOM1bEtIi2J3Fmyg6OG5yrep356AawSrTJTrZ0I3E9yitlKRCK6GrqLoUACek48rNZuy7xrsABCCUlyfa+A17jIQtPt9xLC+4cLA9VtPpex8wGPhwlPlCHyqTvgsT3CU8RH/EsAwG3a4mXa4qhoygSZmGfLHs+dFE6yLiMMuRHD6cYgppqFlzJVzqM6iVpEtAygENDRSmhwFFdkoQDwlLChgB3NsPqhiT2mPOAmb/E8SjbKy7jFMQ94EQRE3jMm94B443KlQzAkC2wODnea8stJpWsX084DROiWDWOVKZtRAXgFnL+1obpGAaEQOKFk8VgYCJHZOY1AicMvSeri7dyCR+Ok1xb3/8bFWt9qSVa6WdJ8RxaBuE0V2P7Jq2d4bbitG+ZF2OEQBxjiGv92JsESwyl2ApCw6G3c4E5DtpyM4MYMGpOwKgA9mWoKBrkj5OxPqSJMDTESU2XcRl/D8Dxi1wmXQvjZiuvKQdzjIYr4aKnDUiCWU8MWuZNIrZ6CQQgrW+qwXWTjs6GKbTxvpRZg7IoQF+LHkEzFCS2TAP37OmjCDNi5xBdoKGMN7H7nU74o+JINU6gGduOCnQ+wJuP5TjFmG1Gg2HThokKxULPVhBIDQ8QwFKLWjBilphegmVT63FXoUatDWNo9xYWLsttesovMSwmrFpoOc2wV7dMmIfqiOKmiqkWLK29PUNzY3EK0vTJessOIuB1aQAvnxdxp+6iyCEAlHsygldJExG0vAIAXnjM/aAbrEGFNRkgtO3Pj5DWnE9p3hbv1LaScjUiyB48DGjLRF3dqSL0yHLG1YbV3e4oNQMJ81mUEx50Co3hG1SOyAcgqzrccgokQTk3xBqDUTBmlcGaGZitTF36eDebJ1/BZzXqdGkGjmRudSdkbIFbSSr1/IZnMqFloZhZyyp7UUSh28r3QGJumQBjiFZattKwhV0Dl3BkzeaUv6Yr0cjBqNHQhrcA1jAxwC8eVc41Izr+eHf0sPFf9D/3et7SqqoHcVQSokfSyrtbKUlGY6qOcPX+2a07CQrET2TQDt2AFqZ2HBaqzWrtGONkqf2N5oP4Bs1ZJKOs703sqTOftA/dAlc3gfcJrmzt8fCOYIgB4bA94Yo/YUDvwX5gZniJuVaEI2Qm7sh0qIG07LlL8dFRBnKhW1y6aeswGhyCHdTpH8Hcx3/J3ORjpZPF82uJFEGVtGkWxGyih1AQP7PAi7fBWuMYXF/GK3cYN5uQqw/eSBAjPwbTYeBBQYlFarE3YuAhvU80yKAD6ZLlRKfSLvvS7j+8CGE3AIzdhZ2fsFNzhKcEi46hesmeq7E3JVYDlEh1iMtW7MVgpA/LEn1pJGDCWbLGxoW6Su2UUAVVSd6P0h8aMx6p4PfEnWMq4UyDP82VbsTklE2Njo6bep7YBM/DWvMdhLoQ7VLnB6jwlUq9Ns97YGGRG5aqCYUze4/m8q9bwlLzQBnSlAqow6swnyTxBFSTlEG7kdSKweqUDwPqg1GuJ9aN/KmZmTYTH9XWgAbjZAKEyCYtXomRPFsHTF+NM2SAmgyU6LEoEmScnbL4dANNEFV7Fqg4CHO2zWIvBXpUAzh2fVVGyeGWpeitA0TgYPNd1kQbADGIR1LRjRzC2eQ8pNmOnrENvZbdF31LRnU2Iya4AvY0/qM1TT4wLEPIsxpJRb547ASbkLkPJIHsW74M+W8+ofD6f54K7x3zV9cJn37HnF2GQoZVHj6k9B7/D/QmQA6B44R74GJFkB5cSSqOLdb8BwsVGRhIkqpd7ELA8xeaNZ0urBAi2sm7SyHhtKx76r948x2hC9by/jFvcxC3m7Gp5F2fEaFy2rnJTFRLJKsNMWWud0R0IcVs8tRWRr97YJsSF+bt91ZwMwsE3rixADKXF1KoURRES7FAbXWNQqUtyMshB9mHdK9PaQ2wCNaWwZIw68fCy7fpZuqHPIQk9Aw6Lr3uVJyuKXSFlLR7BQEhLS4zBbEAsCj9QMk8b0XI5G3rFh1j2LoE7AD5WRlJTujpPYcFxFiUzSxLXnFw13qfolFH8/mIsiRg7H1SJZDhVqgy5Ff3FuzUpOaOKj67D3jtf9mDN+tfqJFQ+AwhAnBm0SuUuNzi3nh9uPwAKlPwefMST4YRrO+HaSkbU3izYUMAjM1UL5pGZ8GF7i4k1xZM9XqQdMghveEF6D24H5xNS4ZExhGS0HEoRbmqNp07DR6bKPA6ggkA7hw3sweDmsMHbV+pZ2uwweY8DD5h0Z70Zr/FWvMYX58d4Y5Y+3YVR+JU0tHV3GpGODnQyjbMoEsLWYdrKsxVQJ9AEoXMZKSl3aJEZVg7LstaE80MWTFEqX/FHvOpusTFhFWpMbOBVQmxMkDRjm2paP6DWgnoxTsHjLox44k81WyKDqudo6bwdALqq2SzWistVOZuzw218gs8fH8u4nfY4LVIlvVAbbFzExglw/daNdVzeOF7jqHQIpNYiRarAXwmLtbCXSAzlDSkZKxZYjMeLeYsPjRIidiQklivhykDPft/CQrQCNaOT71RT89G4WoA1SLPrWn1fDykB6Or61SyymgGnWTBsujCfEkL2lcvLXQuwPKllGKOpzNAClO29P93z9plyZyGHpgh0HimWv2teliq0rLxmp8Vj4ySEvEqNPpernVIKSAgxnRxSd9ikwgR/Npid/lbHljtFoBADtoxVHX995nIR4qZkURC+p1VWHPUhUz28uH23tgKS922ezseuPOu5t2z1XGeHWYEgDCZWMHZmI3vLAKt0QW6Zp7HjZ7M6nr7jt5K/R1jLCIYlRV77R0xIua25tGBVTigr8XAeGdcqG66syO6STDMaj61d4EyCKeWhTIR3yk9Vum1UgSoHXAbgCSlRI+vNmk0We2NXZXzZO8hIA9VwMNC80ehC6UgEM5lKr9HKenX7gOUZW2hIDB+2ncfpHfZJ///mgW6KePHKlQzpAqcoUYj6xW59kT63CYRU2M8zwcxGx0ZeciehHbGhKHC4v557L1JfqqYnJy5KVl6v3VWfquLOK36yUOq6ds2d0RhIGTFb4TmecjXSyxidt+px7XXoIlNWhowYM83jVkKW2l/RHkV2lUxQrA0YveG9Pqye6V3f/QfQSqj3ejPjiTviyk71gE8gBFgsaNkZCywOPDRsUfa41ZBeUTiIuMb5gRLTbxMLyMItmnHUeCopv07vgm/pnXJ/OwHL5KulP2eHQx4R2OLNeA0A+Mz0FF+cHuHtaV/5UKbgkJKpeIlw8qCjhTtQx4kDxK3FstdMgySFOgbXGGqtkRTmpS/+abDOytOaUdmj1jMrggxAVY42NMOCq3ADgGduj51bMLsOY2ZTPYQHm+CM/BTc0GgitnZEyBZvp52OuWmLG5DFrUzSxZP0MmzxfN7hiwcZt5vjphaLLtlO3mtpAB8rnULKBs8OO6Q73WgHUzlNirIrVqFkUwGq7Kg3gFQ4sZF6as+OWzwaRdFd0jrVtuI+uFesUTOJq5vasXhOOmW7CaR2Pe5qSxUiyOxNY7ovU5G7kMNDAivKWu7xGdZK6R9A3kt5TVxXjIasxbYBXfM9904vfMr6UuFUQjjyRV7HerQPJWulNLMApOVW7o5j7Vtl+y6eu9DCXCawZuLp39HIgUeM5Er27X3rXRw2XW06DWOv5o7PfvceqiI4lYi2/2zFeOj8GJ8rzqesF2ZCCi3MJ2WA1EKvRbVNPYRrKKrnf9L7E0MO+O4sq/MDwTI5yvWQutfOlK3KI5XMqm6ajNsaAwWonDxbA9kyyNGKPNZ0yjdbpRZwXI2rp/YOjzqZ/sLu8Cxd4WXcwWpV74Md5QC1uYZmKulxCQd3a4rVAErRIG6b97S+b0wNdzMBcWMQd1iTgAZqnFMZwqvWUYWQZrmuePq0O61moSpX/WtlnZztHesycq3KYMRJaHl1HheuQECgF6fo5dxYmjfcLu28qISzEa2odhTKk6JcAsrxtLT9RCNVUszeo3y+N+S1M7lzztGW8plnWp4pddCBKTjxDANd6E1KRJVs74MdqozflYkqtZ61Q0HDvmu+O2oQi34OHlAOe3b/xmrfDMCWqVcmJHfXbOvp3doHrEB1hQpZGLLv0qYe6Nc8wYLhkRBIXjvkEbd5WxWoOXvcpg3m7KqbuLRWQ8dUXAd3obDZSe23ao13Ln+gt2qahWEnAh8dnk+CZXoRdnju9whs8fn5CQDg88fHeDbtcDcPlSspLE5CiSVWfbLwB4K/69LTFyBcEYIqUHXDdV6yzRBgCDj4vLJY2Ta8Rzmk08g19f+pvcPeLJjYN48TBQyUsOh4JxhcWyn1clf4SHTRGtt4g8oCLliqjRGMw10aMViZF2dyrVkHQLwHKi2K8vnmdIW3TzvcHDWsOXvkoGzAOvbRMWbFcBRsWGbCdBpgDlr645aUSbd5DdIggr2MLVhc+WLRlokVhfnuxQ5fUAJQZ5OEdIvQrBZL+xoxKpt2dRt7KIs96hekoHPPiH9uzQDC8NwMiTSgKSuqVJEXZarHl7pJ+lO/NzKuh4CdbzQSIVvErhRCUCBkjKZhZIw8UMPjNCWqLn+rnpoHvEX1YKMH6o2xhDP8rXxm2Yy4yUbwdYXVPQJm4VWIwc4i7MvYmgXCJ2YIWfFGUrswt5lULGM9+Pu+u0ZAaQaAMnX4qsIoT9UAiYsccissiAHgGKyhx2EMeLydYIhx0LVzZxgnRvUGxNkIpqRngdZ1ZEOHz3oQUa44lSK49aHKd6zW8BOOpYKZZIw+glyuobDzE73U5DSmFW3duQVPh2PFIka2OCwe88Yj1xIdBsILxJXUuNZm0z2SBqEY4F3EYydRhEd2WkEwMgxexh0+Pz/GIYqsuAmbSkFR+mssKuAdQKNeAKryneARF8EErRQs08bJkUHYA2Hf1kV2AHrOKVBd4+UEpCxer4y106Hfq9kLJoo9V1xU8kAa23ynDcNtIjbbpT5bSITkDGC5yrSdW+o8lrl0JCDqViyakR2DSh/Tek2UeT7fh/qQ64obrmh4+ja3P3oQObo1uLpuRxtR8LbyGak0MLq4osWICavqClkTFYqhzL6F84pRMGjFhFKrddQyVFsXMOieOz7wqMVbu1JiAVWUi5GWgU6hghosfTHqUlmhJ9ekB7j8+vbBZ+Fpf1I2eH1+hGdhX4FlhjKu3IK9nWssVMJMYUVamSHKVyEPnIPDPHnhh4BuuBJT78MACh4swsYWrpveClF+jBV2JWPlpTjmAW8u1/j03VMAwJuHKxxnKYJcAKY82xUhpj0Z+DuCv2tekqJApZ0W6fWMJRjE0VaPh7UZGx/BjBqusovEtFvl9PYMZdz2ZoGljJQJhyzhx8C2/gCCJ3srXOH5vMPbJ/EkHeZhZbV6K1lqmzO+jsAWpzRgUuWIIRuhWA99xYGjknTeziNe3G4RTiqRggEFs6oMnh2Qxow0yjjUuZssvFpY/gD4W8ZwaBb7ctUOcLmxMoUbtEwelgoQ6cbhZitK3KP9BOcSjJZ5yMFI6K4P7VYyRwCltthoYEZaCaQ0AvmEiq0pFn0VXuVa1IRgYTMn216zQE8YX+8tIQUVoqPUgitFXAFJHIhsKq5sTg5WPVOFLTvPRlzbDUIiDNCWVy7xopTzmXTuLbi+b/IfUYyGG+2jc4iLQfIMdyNz6e+A8YbhThnuoEr6lKSmntFsnImklh1VbLD8HlvdCWszNi5KXawaNi6d7MbXEdLQsjXZiNckjY1TizIhbTuvoNExyVRDu84JGenouqxWI+Wb7pZS2YBEGfatfAtAquQTbMFj3DvxSA74YhVDDzGCJGMAeLSVuX5tuKt73JKQ3hrLUoQb0OxEtMLUnaJZFKiWQNEOrq2PuBsSFq+yKLGeIWdui/o8yns2ArCMl1EMzE8vryKwrfL6mEbcpRGZTS0XdQiDyMu5HT/WiQE2dCS+MZuqAALABCApBKINYVmn7dQsBa17rNaKT8xwLZzbkmZYwmFYr6PsUMOaonhLBnARcCYYpNApUNuM/XbBbgitNJOTxBFYrl7Mx37CE39cAaZLcfUeBiF9rz1vRlzpo+OGUSuYwkHWfTnD4qgldxir0D0pFvMcT1s9lcUFh7W3ijJ3Cr5EGfpwW0wG8+IEM1ZCfJCqFaMmN2yHgMEKt1rJwB5NWp39Wfs/JVfXRVHAVtmhZXlWiAsaB2R3jPdYrur1TYxauzQpb1SfXfxucXb8AGKggELV7nAXRJIVLM4Td8Rjzcx7bE/YK5AckLj/CwqYBo9bBWi/HDa4M2MnNVUdJdQMLKsbKEaLnrWXzzT1kipaXdkWgONa9+6RO4n3JQ54OatLehqaN6WEShYDmpu7WZid1dpWRcifGP6OpLQH1MIZCHm0ddMuPmI3BJBphwBFiMCth4Q8Rx5QMyC+GB/jrXCNz06v1Pj6YCIscVVY52zxbN6vvEJBFcBS9sH4jI0LeDKc8NQJbujKTpitMMTOXQx/6Cj84ywWJgGtCnsyiJMHCmBbydD6Yr7GiuWSuavyYBhQlzsgCuhwxxjuUhUSabAy31X+soCFLa0UIcpAGgyWray5o0twNsMVaovRghcjFmbZbFkOQBPagc6epXRHsY6zgKPT2ClLZ0zGxAAZ8VLVlP1R5p0SGqh6EGuzKMV2IimlEgHF/YO2EU/GEx51uLQ5NaBume/RSu20kpRw1JItPb6TPQNOlFbpEyFtgBQa/sWoy9wUJYOpeUs6Y8NNGcNLU1+nYMTaVK/UcMsYbhPcIcEe5QFJi+xaJda0E+Br5pXiuSIhbanWMxuuhdxViHE1bO68eM50H5exBBrlBxt5trAH4k5fs4TlhakKFRMqPrJ6P5JkA3kmeI217rx60tUACpNIbbblIooJW6RESyP6xaqRRQ2L1PAqq/Kn9Xdf3R3w0c0NXvW3q+9undRTy1wUH6wA7ykaGMswNtX5HUysIbfSMhOWZPGsjDd7wBhdJ1VDBpsGeaiK6mTwPXevAhAl/ok/4oeMbwIAvnZ4AwMlvEg7/L35IwCAv/byE3jT7lV7L55QqY12Nbb6orVUl3o2njHhtPHI85ptX5JqylwaxD2Q9wnUZevB5wYvYFKWcNPtA8E35i5RRDw3LXmHBwbGBONy9YzGEk7SU5w3YvA6k7vQqT4nN8V2NBGP3XHlGDjEAS+HDQ5FFnkLHriWZSvAbdbyXvL/Rk5Z5E3YC56rKBZxpwpUbgamEGnqdTtjrYa2gIrnlN8F+JZXn8eQcbWd4SjjRs/xmnFOvDIWiFDl0GnxyJ4wulgZzJ/4I15xR+w0jJDZwJuEwKbqCC+HrSROUjdPpii68ncaxCto+sgCA8Y0jxQXDxTeo70HmPzd1atLu7RLu7RLu7RLu7RLu9c+cB6oglF5cbPDdyud/qK4mdFHzPs1EeRzs1/VmSshqBJPBwBvNHZerJIs1jIcw6o2P/gIZxOCcV2MWT1OHf6F+AGtUjlYyv2fhx2ezfsKiIvRKN6JmtskKeCyKypqZwnD2QqWFbBfBf9NWgF94IrhCHsH+yjDbyLStmjDJCGG4tlQUGQaGzh0zh5zdjilRtDmlHHsoCGeYxxwt4yYFgk/AmKxCvC4hUqYSbwZ6hKxkPpqj+0JsXgNiOFoV8vrHGwGk4zLpGm5KRnx7hQMWi3eiS4tlmFAnfULwLJk3nUFYv0xwx1TV77AqAeqjZFdxN1dvSiBtRwKIWlI6eg2GHahFV11WblVOi+kFnl1R0LeFmIgmafi8GJbMFjNphG8SHODM7iC/8v8poHBTrwQLftGs8DKpUhfywyNlMANCYNSPgBS8kA4ghphoIRUA7InXG/VS7I4BEZF2FIPyC1WtBFPwzlUp488UY8TQAlnMPwhV0+LlBwS61e5UyV0dxTvk5l19JJsumIdlrp/4rHVa50IYU+Ir8hXrOFaN/OF4kqmYUQejOLSWufZdJ4dI6G7uGPER4V4yyLs2lfK/U2UdQgIXq+EomsIjzKuhgXHjZaG2nrkM96eirHkLvypYOAVHqWMbRdiAMQDArQyKHP2dY+XFPjMJAXXAfWcdc9uGckwkjc4jNLPuzDi5H1X+FzWjLcteaVmQPZ4l0RaCFqvrWuUIlWcY+GpeqnJJYGFf+oubXBb3GkQDyZ3dDLMUqi58MptXai8ZiU93rlUvV5ZIwrkCDQAXRwZccswu4i8lIwPyQZujrTCf8RgrXXGnla1FwFIONYxWL09GDLsqNmDhV9wMUhzkxVwjJgMUvaYJ5W9sxXKGrQMWWcSrs3USoINwE3c4nV3vaJkKHsRQMsUNqhnmHEZPGQYy4j6/jI7mNi8+gXK0EM9CgbRdBnDOnwtGzYKP5qdUy3STIlXJZ5gGE+3R3zt9dt4sYhw+oJ7hLt5xBxth4GS8jlcqCys8EB+ZHuLT+3eBgB8dHiBa3NqGE629cwpmXp3y4hldgiRkEt5t6TRmyJ3RkJUeE7lxip0QRWHCMAaCVmWjErSRLQSygPe0wP1vhUoIrIA/jsAn2Pmn01ETwH8IQCfAvBpAD+fmZ+/60W4ha/CyePkk5Bw6eF9Ooy4udviM8OTCrbbDgFXfsHey0LbaJr+km11603RKUitub9BggfYbuQ6r14dJG0yWcxKxsdeXbY1VEPCG1QyFgCEK4a5CvjwTlznH/IHPI873C1jrakXZ+Uz6bmDHgKKaqZV3aS9AIWE5mgGeCYY1Q9PB4/jo2F1ScqopIflOqXi9tuz0i1st7oAY1Wi3p53UjOtZBQmizk4HI4jwo3WFZzsimH4bufweQDXw1Rj056SKmdDvfYhDojcUlcL028OtikZyTTlFmj11kKfzaahoa64LIyk6vY8SSYwKOQK5ixFofvNbwJgO/e+VYXGnQj2qOEy77AAsEOLrfdjWq8VAXekGgYBRLgW9z4ByDMpsJzqfPXZV1BFrAcZswXSJgNMMKVY9Ay4TCgDVw7c7AlpI/fzQ6zMvwDqoSr10nRdKoZkTq4Jbi+hnBw6EJRcoNb6SwNgNlKtvo15K+ra3XT9Z8rADLiT3usoz5oD4A7a70OGmZKE7Sqeitc8R+qitzPX1GyziGKQtgUvKanzg0nYadjndkxIsxHGcr1UtoDZtH2ZHSPtGOlxhL/SOmRuRHjk2xokCc3aqR2oy8HjZtxgcn6FLzIEzCq/CkHpQ7X4bEdLUUk2S2M13EKWMQREVkReYThiNjhiqBjG1+drvJw3SCcn/GVAI5Mt+9AWsmDC6STr4q3T1SoBZ0oOU/S4m0ZEfRaaLcxJIAim4x4rqf6A7DfWsGMhuf388TG+O75alZ7MBG9TpVcB5BCcTgMwm2bMjgRruGYRvzIe4SgjsmkGX/C4HZMUFO44AtgQGtaXkLYZwxirkpMzwdpWXYEV4nGefUt6blTDSZWnUrjXjQIOH13j6btlEgYFnVK7iSBiYeMu2XSz0CXkyBXO8DJscRzHqkDtzIIn/ogPbQ447Fvdytl6ZFOMHQlJsWu0IGQZ1iSMY8SxrPHRSei6nmsiZ+3EFX/rJhb+ql6BUqW+Lw5ukrK+959j7kLNQrL8keEG106JlvUcIPJIHQjb24zrUZ73Q5sDng5HXLupQkM2FJBhMKmyNGePYx6Q2FRg+aNxwsvNRgrUK4cXnyfmbERhCgk1Xl5lecEKJmjyDlUeKM5ZlG3kxk6Od29figfqVwL4LgCP9O9fB+DPMPO3EdGv079/7btegbl5CFgsCku8Sk8efcT1ZsarSsz2se0Nvmp8iVd0kAHgLm3wufkJvkjSlWMYYAyvwaS6OQp53LWfYSjjhduuTOkeA5U96yHXlBW2UjT3lUGyTHZ2xtthXw8nQDdkFO9TZagNbeECKkjT/QWbbbP0rbJZo1cEJoPjNEiJlS6ey92BJ5gLATjeqlL5N28+irenvaT/r/il2v2L8hpOHlRKi8zlsCm7kRCjXZXSubYTdkYAgoVpvXizijexkRIRcqOoVQGvuAfFP9m5xy6VWD43On3D1SMBlLIb+X6WG5rljqyHPzULzi4ssfBFlChAsq/iYJE7fASiKmzdwWEXwatVELcVPFLe5fKYMCMhDVStluzlcFlhhp0Ab2MpVr3PYMX1lCSIvCHEPVW+MOG3UjzItdzv6XbG1oaqwD5fdng5b3EIjd5DyimI9VdwCcx6mBQMBRvACXC1nqlGPbNDA1rHYMT7ULxEkTX9v2GhSpqwnRXTtBjkSQgrvUp3O2WYkAS4WUntSBTmUlLJi5eOckucIGbAUB0HIsbTQYyiZ7N4O95yV0jGITugZvsn/emMFbaA3aZqXKVgEbe+Fl4vXkthZNf7nQym0wDeBPGEoAnXwrlUPKq9kmFi4wYzNeswV46d+myAAlqbfBRDQe5yG0a8CDs4k+qcvzVd4XYahRCyjEvSdV+e38jhkrJB3qsXPZuabQsAUxTv2ry4yuFlJ92by1qB6nnOTACwAP7G4O0XsqBLFnIBS7+6O+Br9s/xyc3btRTT3z1+GM8OO8RbX5VwY4QjyK1cQGIYnONo2DbgNHLBH3Vet13Ck6sTQsnAThbO5JUcTFkyxELHKp+jqcY3AJDPcK5jdVd6FckaU0VTs0ArFnJyuCOhZ4HSrtiTspUnql6p5/MOb41XK8b2U/KYkq/9TiWqUZ6LlGwVqIlQGQB5VCofQJXmjgOpFAReZb4WGcqdrf8AYLxmq51xmNVMY8MYbMIxD3ir40AsivNGzygp27bgqtBdDEc8cie84g+4UivJUkZiU6NPxzTimAfMuRFsPh2PuNuNWBaHpRuDVeokFZnbeQZJsIklwYcyq2FMzZDLRjxtmZpij3dv70uBIqJPAPhZAL4VwL+hL/9cAN+s//89AP4s3ocC5dUSNS8dbrGXByzu5yEj7ReMPtYD+5AGvLFc47kygScYHOKIl2GD5yo4D4uySRerWrluSp2e0gYrWR6mWNpWFmRNTWdhHqZ+MqTbFZz7POzxxekRTqENHWlNvTW5Bq0Gvy7Ge2GRVu9JQkBqdapFbmZqblDNyImqLPHYNgwA0Dbi2jdwaMpGPZWyk0YvLOdFkMzKVUUugzUUl5Dl0GgeceQsANO5U6J2dkZgi2dBPF5zcjgGj0XHhWMB8HUPnCEEdhWYrJZAZ8FV4rZI1b3MTLU8AYBac663liRzoiOIBCv3CTV3f1FKUzuY/YEAY1Ejf5a1j2smcAm1MnwBeluxdKoS6xlpEXB4nc+MtcJc5mUj4Vb5XkYpSVO8ihRolSpcrCs2XO+XmfAybOo+efu0w+00IgRXPWB9Knt1NGmWT6kxx1Hd2h35kToPUXmeIPfOtmUBmi4Tp6xVAisTO+o82UU8gDXTtXgXnWlCj8qYmjq2JZOmMTwrv40qCsdZPDFXZq6s2sMQEbZOZL6SL7LrmaWBvMnAo4Anjw7Yq5d7XhzSZp2pJ/9pir2dDOLRYeqJMfXwzCc9KA8G/kBwR8BpcqSW9FRPqyZmlJTquuZZNYNOqUpZPFK6nW+mDT5nHkslBd2/hzBgWZyGwam7T+cJJ0gGGAhBkzemxePkW2i/eKJjtJVfyARRVu3SGS4nhpuaB42yzJM7Eo6aUUf7Ca/tD/jUtYRlvm73Br52eBOvuZt6v52d8enrpzjejU3OWcngLSntxbval66pHiNCVbxYKQP7OqFmG/Fkc6oKW6kPelDlbokOMQpHX66Z2+odZzSweaZVrcV3I3asmXIkGWl8ZjhVqpACjGeDBFMzIR+bGWkk3MQtbjQ5aVq81EUttzUAZxY5qmcmRwHUx8HeC360vaMGaFhXCuCShVeVDKz/k7lCCQrBaa2J2em5e7vgsT3hGe3rOCWWs3cq80uipN9YebZjHPCJHfCqv8NeF7lFxg1vcVSr7WXa4pQ8IttVpv7OLxjHgKBrmKNW9CjOW2ZhQc9osimJglQsCxOAXEv0lPkuiie3kOV7aFDv1wP1WwD8GgDX3WsfYeYvAAAzf4GIPvzQF4noWwB8CwBszFUTSDOBJytadbHyjg7xpcMbbovX/VP9IAsGpmSFaUZJ77WKwSHducoTVARI2ucaT300nDAaKZXSx/nZtRXEGZJDTs0az1vG4/2E1waRiIYYd2HE4TSKGxEAL7aRe3Up8wDuR/K4mxTufoB2aBb3PUQgBmVUr7oZBJ9RrJOS1pqNrR6oD20OeLo54mqY6+KL2eAUPGZVchIr1qarg+aORg6pshgHwoINPjc8rpbgZ90TAFIGpWz2l6cN5sVhqXF/AzMZdZP2VnXbfCaU8glYHXI1hb9YW0ZCOdUSLtqO6Q6zYimdFTuVcaX6vZJhVT1eswoS9ZKlkTVbhbpadIWYTtzggFgycSHUSKrPyKNRHqiuT671sYT0ylyXF8klTWPXfaDkrgX/U5TMNKJeKyaD22VTMyxDstWiLtgPDqrEcrOo8ibBDVGKnUIUP+MVW1KUszGLYp17DJ+OUz24OuVHhQ1nAnGuwscuph4cBfcn2WVSeoLPDZWO7oHVvX5eh61yEGWDU/LYmaWG9QcXcSReh2b0uaqiexXx6PEJT3enGvY0hpE6YtrCiUXceZIWgBYtCtzXzkoEe6fZbS8M/EFCl5pEDH/KQrgau8PrHdiO2bRDl2F0v6iXKLi6j0t1g+M8ICxOyVH1Ur1RouNITOIxVC/z7WGDJdpKcpii1BpLRwd7KpmQyuw/t8wtO0vopzcKMmh1KDubkUH4zOEVAMD33j3FnzNfj51b8Kqy/ycmHJZBcFtFOVFW/aKkjF2m4EG55l6OWzwb9wjB1PVHhqQeZu0SwXlhWi/UDlsXsHNLrZpQvOWn4DCHHseq4R6dX+cSNkPAVr1pe79I1iNlHFTOLtHK+OmeM1qj8R6GUQarhs7vlhG3cYOXVpwA2QoH4tPhgI91IbznNmPSQtwxWOTZSqi4q93JAJbBVWXQJNm3RdEVGcsrgmhR+NZGvrxR/wGscDyxNRUrVrO/y3Wi1OD85PgWPjY8lznfvIrXl0d4sWwr1VBhXt85EeJPhyNeG25XrPUTe7zU2rcA8GzZ4zaOtaxTaUXRplrb0Kqy1D8DRFb1irbpDEKCnPN2bWw0hdGsLvVO7T0VKCL62QDeYObvJKJvfq/Pnzdm/nYA3w4Aj/1rXAUpCLxJsJumynJGxUIUBck6STH3HfEhsA5LxKjeh95lmQFeqLqUj3HA1eYOe7/UlPVgBKRXRiEDAmCW7kmfDGPjG/eLEHgSXKkmDghXDpNwB1VBpoDLrkBrZTmvLkNeE3lmXoWyAHH/LgcHGMCVsgPHs4PHCqg8ZosvvlSW72mD0+KxzB03VfEKFfC5FeyNkHwqbuWwJsvLgZC9xWG3wetOXLT7IUj6dnQVSL8sFjnZCmaVsjhq4XRp7hIOK941Ecpm7kJDatXmpaPhNwQ7AU4FOcV1qEPGWxZ/w5dJfXZKqIewiYqXWIDc8UVZ0yxYSnIYuQMq/sYuEn5xU++lYMQtCeEjAPZQTBTeddcJlxMQt81rOG41lDQq8/qVRQimegwKezhbBu206LLNWLLFqXBsHTdYJod8bHiYyhrdeUZTJIRt404iw1WZum/CotIBpIGUSLB4oIpXtXlSKK1DeG6SxSYs8d2gGALjPki9jpFauBImXHetCm6W0g87u+BayWO3PuKlFY8Aem9eUk8bgOQtTpPHC7ettbqm4wC3qmtIlci0lsc4iNcsRRLah9K6xAF3AvwdiwJVlQ7xKpmzwsyrZ1cP1MrarWFn+XOeHaZt4/UCRGnmaOTArPw+sufo7DAx1IwizuLVzjVUZCqGsxqzqVjtnXzi4p1rnScW2cZHWavPXlzh7XwlBz0gdT8tw+0DPvREjNDrccbdSUKP9YBjEjiHDsIhDVXW3mrC0F0YlCS5GUWFHmDVIazLYkn5m4YZdEYY3bfe4RRk0metAQqgqwIh4amCyyolrEKyuFUs0/E4yp5TxSjPRgohL8plhqLACO4xqBJ7DB4vli2eqKYd2OLtsMfnT4/xhYNAU57f7TCffCNjjgREA1r6eSLwQkKvUjgHJ9yHPCiOc8WCz6upfLjpHq9NlfwKIg8Gb05XeD08xlN1u35yfAuv+lvMW999TRKPCsmqUBOJ8tSXaZuzryHqU/KYokfsEmMGIyHU7RBwp+d49g4cuXpPS6IBE6ojpCdbfaf2foHjfXs/HqifCOB/RkQ/E8AGwCMi+o8BvE5EH1Xv00cBvPGeV2JU8jx/Y5EHh+xcOygHBvYRw27B9U5jpdsjXtkcsVdJkkG4DSPenvZ4cRRNddIS3j0wF5DFtahr+a3TFQYjMfYSnz+VAS170bJ4P84sWAJwULfiW/Mebx72CMG2mPOgmVtkmvcTcp3z6hfn7M4rwKmGfKgTgHYmuFsruIzikciKE1EAs/BHMXjMwvoM4Dh7HG82oINrAsax/FDzGCCaFVNz2nDF7pT+spNDtrDBPt0c4UzCy6UdQstiheE9NgWqZh91wtwuVMMSRXmyC6+8NjIHDFs8UNRAj0BTtlbTlFnmqi/km4FVgVuW8bULA4cibBgxtMyppCzg/o7hK+Ep15h5f8iFI9VipGmfGnNwByLvQanF25g2jLzX7FAFnS6zRyqHDiAe111Jo1H2/M4SO84DpuBw1HIp6WaAORr4pR2ULQOsZ8Y2SAZgVxaYHlrE6GtsE4BVPTwjBJS51niDZqt0n+nnAoCZMxxDubh0rQ5mbfUBFQ9US51w540tylku4Sn5e5k9buKIDw22AqKdyRiGiHlLyFY9iqW2mxpq+0cTnu6PGF3Ei5PIDyLJhEyF78dy9WqUteYmSWqwp9b30r9yUPk7Aemer9W+dtq7tftZjy1rORwGPLN7wa+VtZqMev06hSJjpfQYncOsYXF5Xgl3Fk/dsjhEVku+zEsZ74U7Al818Dqm6rI3C1FqygqJUOD1+PSEJ1cnfOL6BT62fQlAmM9fThucNpt6Pz9EXA0zXhlEobhSiMAhjo3hOrrqneZqAOi66b28NbtQvVu6Poo35OW8xct5g8MsIVBAPGCFGb1msarhXjKNB63fFrJBLJhCYO1l7+ew+3/xPhduqjkIEXSBRTx2J3zVcKP91KK8QUONZWF0h8mK206zMMv82oXEa95BHnrladXO926vPJAaXpaaB0q/0zsKDmHAX7v9BCZVfHqPUcke3dqAp8OhFra/thNGE7ChUD1QxzzgmAacNEV5SjJGIdvqLS4QAq+VLwAg+qyhzHIuUHVqtAQtWhV/B0R3zxA4AgBkGDEsjVmD5t+lvacCxcz/JoB/U8aTvhnAr2Lmf4GI/j0AvxjAt+nvP/6edyPALjLz/q48QhOKAJC9RdwPeLaTeOpb2yfAmCXMAFQG5Z60EoFguyK9RSnJjAq2C1kYmpdsGztqoT5I7XsVoN3hij68u60ZBp8/PsbhNGK5G5rGW6y+zoIzkdYZd+jciH19sf6A1THqsRfuKNYvqJWAKQdxj7UBC+NyCVn6IcA8YaRrU5Uqp8zJi4YAQrAIk5P4cQlxdNQCdSwjIc22km2mbOBtwmnxtd5ZOHlgsjAlA+tEMPO6VE4J4RWrWrw64p2ongaS8RHB0NwPq+rietgS0IqPlt99HTpL1RoBilXNcHNu9bSoCBw9lAY57P1BUu4B8R5Ua4aLUFJSVB2nRFhjIUofeuGKpggUL5HUAgRyoppVYibN5iqPqwBvtqiUG5PilpKGkavCmpvXkxJqFfIaJnJAHklCRABgMowVxTtv1AMWCdEYsGmhErkHEJdyUjWFhouwMVgJeVE4s4bi9DCzJGFN7qxh9XTU6Ldai9kSTPXy6j3LeC9CAltC66XFaITk8KxeWgm7DS5idFHwGRrKzlGZ94sBRqhOuxUGK3TkrigKBiquc7gVxclNuaNkYJm4bp+LYt154Mr89BZyloOgysbZVGOwyDTOBEymerbLHNmlKWxl/6VRsEoAsLz0uJ07ZSlK8os7mvoZd9JQ5NSF8MI6RM6GVmFXQIzJ4fGMj74iysDXXD/DJzYv8NHhZcVBvZ2u8NmrJ3hxs6v4ztFLkeQiv0YTMSKuiCafjzvcDBspVBu6EEte94FIDvES8gSA10yqa+WrNje4jRvchhG3i8i0U/QIqqiV+pKji9i7BYOGiI3im45xqP3MTDhmqjg4AeCp3ClrV8l8S/FpQGTolDxedNjenVlwZWd8ai/4sY0NeHNzhRslij1O4/+/vT+N2S3L7vuw3977TM/0TneqW7equnpik2yKIQmKkixDECwpoR1bio0okYEYSixA+SAZNpIgpuwPcb4pCWLEQIIEii1HcGQrRCxBggJrSizLhsmmOHSTTXZXd1d1zXd6x2c84975sPbe5zzvvbe6S2jVLfY9C6i67zOdc/a8hv/6L6pNhsNE+EYo/B1q4oU5oNt9wzz2zeDvp4lTKgb2JMHKQ0WGJU2unVmhLQ+85+xqV0jSwmBckqRjUVQcF5KMdatYcztfxQw8EAVq3eV7NDvbJqOxMS5E7lpS3WG0jaVglLE4Ndh4lZN9aOB1cgm+yHffD86v7xgO9kkxuLhd7q/Jp8j3+Pgj5S8Cf0wp9W3gj/nXo4wyyiijjDLKKD/08rGINJ1z/xDJtsM5dwb8kY91twG3j02hPnKR1wa81tyI5R1xMrUBDCgfpvO8JlLr0nsDQsZIOdDAO+GzaXxa7q5OqfOeQya2SQ8qZA/SOGNdstRymJUYetU9y1raIolFRKmMx870ZQ6EM2XAo9MOwxK9q7FLVSRVNKmE0rTrCc7StScIU/Shr8Y9ERqzGZQnCZcep5QfVCSJeBaClVdVCV1jYhkCOiUp+7uB5bmRQr3BbQ8SRqxcym7nXa0DAH+f/RMy5fxvtj09wdDdHPiZQh9FMrdwSTXwQAwsKCmBMxg3b7EHK96GjLu0t3rsAMANck1jXSTUlGs7VKejZ0NCcAL+Tbadb5v1wGfic+rGCbYhEGd2Knqghl6oYdHn/s3+OkLnYUnTisanS3eNkXBewJOFmo4ayEIyhY//ByBlqrCdES6x+IxEPFucz6X0kZ30jyK10wbP5+9lU9eHA/3c7kIh7IFnVXnL3Tp8ySE/v4fewAH4XEj9XBy8sJ73PED+vf0wbe9ZcZ3wW627nIl3ad6crDkvJrRljxvRlYTVrbdQ15mEhJomodrKnqK2iczTIf7He/MiiN4DRlRLxIXFjNFr8zv8GzuY4H32jfEZgn24ZACoD14q7z2NeMlOcGta277OZCWgb1P2YP+A2wpeo7hHqGFZKY0b0ImEfcqUCl8TuPc+DckWvec34OBs4tdpqrA+ZGfmDUXeUPkQ17cvb/GmuklmOu5OxQN1M19TtYJvCpjXEBXo8S+HbNqMbZtFks5lmYvHttIxFBZLQQW6i07RdeJ9Ctl3ndZcNUUswTPRNSfphsN0xy7v8TYhdBaTCwKeyk/ebZuxqnPWVc7WkwOXu0wwUN57HDCUIWQEfl49xfNjnYrZ3aum4FG3YNtm0QNzVRaSKBDKxjS+zFQzjHT4e3VED1SyC+v+yTDUE3uReuIrfTJL52kSzMA9S++hlUZIUsNxtuV2Lh6+xmlaH54cZi4muuMolQl2I91wmGwxOEoXkpo0rTN9jdsuoeoMdWuiY9sBSWpJdce8kM2obhLKxkT3sPPA+uFzBg9+7wnH42P7NeIMsokNQsLfSz5hJnLVbyiJYEHctIOAx8gsynR7m7lWTipz24Hb2rtJIxfG1ki4KGZ3EYkVQ0hpvSloZltuFWuauQzQZpsLqC+Ef5SL4bgYdjYWjeOyFVfrlXf5FpMa54l1m8bQVgm2NjjPjG1rhTZEbIJNfAjGDDLAlD8UB4N8XbTn73C6zwpLquvgc3A7wEF1w5OQdYq69fX4gjLqu7pPufX/tEReJFP5xTcoGqtrCccxCZ3iqRcCQNlLCLOEZxqyMMt7gROnb9uzRCZ/fz9nBiGOYejj2m/6F/TYmj2OEB96CGE9K88V0+w907KuHDqksra+IOvQFe4GPCLhfqE+no1veUXAP6jHq3S5Q9+QHf/4cEOetHSDdGmlJBTbDugmms7sucS1dtS+rh0gRZn9QTjEk6nBeEhbRNm1IXympUJ8nrbx+rtScHMxLAiCgfLkmkGMgaQabEBKyxiH5noF1iZqv6CyI+LcwqC46+MU+vQanis+Tytp0q01pCEVPC25u1jxANh51u1QC+5oIdiau4sVibJcVhNOjcAEtkDbZuiQTJJbVKelUsEwMtA6kqFB4BXhnqIhzC3XhwU6x17ojsH0vd7ewZx3Phwax7IRZSPJbKzrWVmFLY1ECIf0ScN+6/obBlZzO7GoSRtB5LbSmI1kdrkAE/CHTwilgjdYlYrZilKw1mcvBjuiTFhuE1aVhHNUJYalzRzvnpzIGNy6Ylt7zJ8/9Fpfa3CPHNaHzEKyUNWk2NqI8hSUXT/fo/JrwXaGpjMx684hysraKyZrMmqbRIAy9KSzaoCB0oO/w3cAX0Tad5N2kDhcxJUS+QBd4HELaG3lYlhPeyB7qGV5w4cX120eqWGMOtjLKLWdxnrKnH3IBXv7am+MhocMm0C81BMKe/8BfW6eD9UNMYzXf6NaxaYUQuUvLd4H4EvFhyx0SUYXaRoM7gnAeOlSLrsp+Ky7SjckqotUFqnuSLXBGRUTJzqrsSgmpmWeyh5aT4S6pcETToeswb1QY2hMaNu1pA3C/qKEzmC4xj9CPlkFSvVasdlBfqbhrGeY7gpHO3N0ExtBiDrrMImNmREoySBxXb+Iwvzci3f6DSAsrGaXcr6dcJxvY9aOSSyN6zOp4sajBJANMJ+WnGSbCPZrOkNVpT15Hn5MFGD6BUKCFPb0MVebysZjhlpxUDgG3ojrB0Wc6IOBtEbhsmtp1zoUjRx+0StX/r1mgiz2SBrZH7yRA8gI0aMegD26HJqFxR3ISZHPPINznWDLPsNPVbpXKJ4i4XB52qJ92t9xPFGoZJABZmSSDy/zBI3BAIQ8rK4um8xAQ3YDb4F/RhWuc91qHF7LBetv8NxamIuDxw8tfT/EQAGQWmYzmYO3ZmsSbTnfTWNmT9tp7xUaePqUwxgXr13XorTjvYJ6p73yu+/1sUmvFIL8q9reWu2sIk9bbs/WsWD2A+XYmZxOJ7gyaJow5NkSLidv0fndxgzmcejLsG8NMW57cz42sP9Td+J9HGal9sST/eJprGTnhALaqzaP3grjjTKlHVnWcjSR/j7JN7TWcFUXcU9R8VAKD0BcU8O5Y1Npc2+oOW+sBS+RJxwcWujPWA4RbzF8TY8/wXN3RdqXWtFsk8irEzszcVjj0EH5dn6P/SjjQjnhwvPJNJ0S5dy2DlsGg8+DwZvrSrjrM5WdX5Oun08sk4h9lOsgNBJ5r/g5iJUKniUHyY6JqZmYJgKRd03KNslxWg+M7L7dIGPTVYZNnUbFSysp+/Pa5AKg93x4pmuQ7OrGCT424K4SbfciD5UVj+dFPeUsEyXnIpmw1j2Du2u195xaLL0HWeP6pJL+HzY+/KCbKTfSDa8XZ3xuIoWYTycLHi4O+HB7CMCH6wPOmdF1acQdusZn59rBRUPXXssmuz7f4mBckz3vvE9s2kv6sIM10CqqMuMbZy/xcCtK82/PXuZOvuQ43bDQsu4WpuTI9Hinjc1ZdRO2No9jUNpUygr5myXakpruCf6tpjPkpu35DZOWLOuwnSckdtJ0awcu7Lg3Da1vRTLoC9UQxygSlj7ZPXvyiXugbNZvyGFjD6nC2VJc2V2ug1JKO3HY3EUSyaB57zFaD2r+ALHuj4QC/PeNpekMy7qvxxQPqLj4RCu1iYNcBuPWbMPtbMmFtwpmac1V1rJrTZ811Sh/uPR8Srr2dAADy+ipo/GUPWSPgM3IZja0JJ3nrBlWd7cptFNwC8+aO5cQnlJCwwAwSRuMtpFHpqxT1puCdpNivafO7jwodXAQd7mQdkbuIKfQ2gnwOGRndNf6MoCl9wCM1xf0M8+XPevBac8jEkIHRsj3haNExWsJMLe3loeAyPhsQdu9dsANqSSGmWfyM+VDF0SrLihaMZTVqR7MGp/bK0+hlpZ2kqXYqUg4uq5zpmkdrW3wYYGBdU4qNbhC1gmAs96ICPdKHR2SDDDkBJISLIi30PfBkElYPDmyaR16tv1uoTk3ll2a0/o57jaJ9zANFLRBaRqQtXaNSHqPewX8Af+90oQHnsvhd51WcY2TWp/2bmMqOMBZOaPtNLUH11ufKPG+P+Aer2c0TUK5ziDQRFQSwg7P3hX0lCPh9iExJQVf5g2cT+zw3+npP1w/TxJhr1fDuWplTu31E2pPqQrA+0gQaZCDsdZ7oOkh4aq/kP8g9BmRVX7I3t2tkz55xhtRapDMM/RADe8l1/R7qlORCDieU8bRHlrSQ/Gs3Dpac1zsOMp2zDwHUKo7vr28xWaXRUO0yBoSZfdKQy2bgmVVxP1qU2a4RkJYIfs1hNFjyZ8WqLUny/SeK8/dt8vk2jfTjlvJikI1dL6jtjZnY3Os0/E9EDbswEt02Ux5XM1ZVkUEdq+3Oc0281kdfl1ZFddeeCbVETkGQbxYGheB7o+qBR/uDilMG0ONuy5lWResaw8irzLhmxqcffi1PAxvX993nfL3dgNDBvXMddgbid5rptiLCgkXWj/e2nRsyozzCzkjv+1uR/qhwNo/yRpemq+46zMxb2VrCt3s8UCF/S9mQSpLoiydtnTXlCitXCzv0qWaXSoZi0CsSyv8iP3eJHVm5fedlb1rWC9PdaDwBMFR4froveoT90CFQ7CZQflShytsj7OoNMZPxJCiHy346NmxMXvOhknUaNyuX+kykeSQDN6XbNKwKCq0ctFKdU5BYrFF+J3n29Gg/aFXmJZtl/PQu6Q3TSa8UDNHlXqvVAjfDVJ5rAOsCrVI6UNHPSYmWLhhk4xstarfbLtMNsA9/I3PflLbfpXYRFE5hU8cY1rUTLNm72AGiVdvvKdDPGk6LkK4dugGcYEDxxdN9a5SCeX55tnelRyuo3zq7BAL4y/n2zsIdwxcq5It0WPDAqeHCfgm7Z0eXmkKv4vYmfBa+4NquPitYIBiynw8FK55pAZ4PYXDJWo/XTrE0wfKd/RERqVMQUuf8Ra+p1xkC7dOkWjLzcmak0IUgd1ByrrJIseWcyqWkAhhiV2dUgJdYNyeyHroak9giscROhmHZxr7VlE1KZfVJIYotnU6mBtBERCqDBuwJq1C+8yWcDDrVjHMhO9yXxx7EAYLmal6yMQdO+fa36ofF6d8Fo0vLaOzTjJBu5QmkRu8nF/CoWyuD41gAasqxRgba2sC1F6/cREsJgPXh70GocuBV814MsK4Xs3+nLNJgA4ovwGENnjd3a/ppyVCCXqgP6nCXtAF5vuiI5k16AGm0dZGWJhNbzyCzDs32NltCs1ciIUB8e4/BQOlbD+/rVe6WgalpjxeJIyn9ZhNp4XGAyC/ueP24ZovHQmrzcvFJYdmh1aWwJW9tRmX9YRHxZzK4xpTj+UbUg08Ws/3lCzbaNTOYHzmIfSEnwG7hQXVeJ6rQPfgFOflLNaIfGN1J/IJRZyTFdyOHUzC1moaa2IR6V2dCjt4Y3puvdJIRm33rAWGN1oGOEYgSzrmacXdQhSKVHU8rhecVVNWjXD5raqc1S6PtWLbSkrtDItFyzrx8/e611H3c0m5a54kx5PemeE1/QdB8bfXPo9E06njYFpxe77mfOcrg5QZbWvoOkXnz9qmEbb7cPa2M8Nrk3PmpiRHBlP7MF/wyiW6Q6lkL6wKveMj8zhQrcq9Oqwga9zZHqqAVais5/ELxpAd1PsM5aOEO6//3UfJJ+yB6ge1K4BFy+ygjK64xHSkxmJ0zyAeeDciaeaArCak42/XOa5JwbufdSDPS4iTI1gj06SOMdZTX937ujcC1QPLLYrH9YL3N0cAnK1mdK32YUW/+H3hStupCNCWGKwm1AbVlepBsfE+TxkcfwiHdEubKdpCNi/jNwlhle15PvowkkF5Qrv1puDqavoEwHFohWCFr0O1iiT0nQd+R5yWlY3UmWtsx9YrQCF7V7F/7aBEuME69eUq4gFkg0Ldl7Nx2itPuZQ8Ce+hHLbqr6PC4h6Sn10D//XKUf86gGB7TE5/QPffEcUvYj+0eBGGIR1nFF3aKw8ucejEYrXeuxadetLLNrHc9qSCP3XjA17Kr9A4piaUNOgPGoDzdsauS9l1GQ9L2Vwfb2c4p6gD67jVdFp4gWL/Du859EhceyCjLblpaf39Ak2H2pm9kKyyA2Up8RvQQNG9niLd5Yo2V3sYHVM7sYWuPYNTA66qvff755bafN6CT+SZrdO8tzsG4KFekCrLjWITrfjOamZpxUuTlTw3jg93BzzaLrja+RT2bU6rM/QuWKtEQ2qIgeoymV89YFsOpOsYKJsM4HpRkVJPtPmJdg6pDdS19ZVZJtOKWV7HvXFZ5qyZ0m1MVAad95L2EAB5r5053FwefH60I0tadt6QKreZsJBv9MCQkJJEQxoOYVQfHJ6ZjHF9COmxzN3Xb55zlO+ovCL0W1cvU3sw8dyXmTrOtmzbTLBJ/oCqmoSySyJlwCIrUQvHKs+l3h+wWRcyp5/mFIiGnIyJlGCR9xJtOcp38f6t09Q+GtEMmLLdtYVx/aBOC8s0a6g7w8YXki91Jpxj4aD1EAXXKlwodlvqaDgrb5hP0oZZUnHoNb+byYofnXxI4xJW3qL/oDrmvd0xH64PAThdzdi5HKcG9CJKWP2FxHZf+R4a6likRNmAw+tZ8oRn6jrFBoM9RIt36ffdeJvXMqFfMMpSWsE4Be9SZVNWXRFDonNTcTNdUajeUl/okmLIaRRuMVCgOqvj+R0k01Jnr8lN/I7tNI1V0Y4JnTLEBopx56JBIHuUP8gGtf4+Sj5xBSoerB2oy5TdZRpNMjuxqFlLknY9U3IAkXe9FRIrt4dZUBmSnRa+JCApidwnoTRBu8w4S2YssoqjXCbtJGtYK3rm5tqDfBNHlstA3ilWHKVbJonXkrWlqlOadbZvSipEyw0hO88tZAYAbuWVnmEtKYaHSbC6B5a3YAgkAhPcjzawX/vvhNp+bgBabzYpepWQrXuSSJSLhVpj/13f1L1F3Q0Pjhzaue0tWOVQrbjSYx8oPymHBJwG7DDsAh6Y3FvnIc4eJFj01itR4VpDMsgA4FZd79tR7tq+6vrvhtUeyDat0dGy7xG98o/uBEDohiBDRwRHD7P+uqx3CZO4yFG2tzkN6tfhWbLzecVriwsAfnL+HjfMmsftgtPWW55d4TN0vNFgjWS2DArAdlb3cX7oedGG3hOIHr69xAEFw+y6xHS8MruMm1SetDxMF2zTfMAorXw2TnzZb+IDPrLhvYV4kzg3pH+DB9VFriT5oB+MHi+l4gtJOlG0fg7OJzUHWcnNfM0jr1S+tz6mbBOc60Gn1sEyyWPpDYCLcsJqW8TizV3tQ1htPxlC2GWI5ekKCZMHMLZuJGM1XfnXrdoDjftL+bnpBnPSf/ZEltSgWKy7psApGeu6NZhguJkOk3V0A8vaAmrAhi8ku9AedGRT2cPmRcVhXtJ4XNiyKLhMprRkqOjtAeuLEkdlzMMIhngjwV46JhM5CKsu4Vtnt1iuPL/ROkG1GmccZiH3f+nGFa3VbDdFhEFsgKush1cUpo3GblAY29awS9K9+RtKeETyVo/v6zqFC4lHTjFNaj4/E2zR3fSSIyPe3o2VeRFYsMvBZmVRe8VtV23Bus04r2acad++TlPVA94xpWSdDz0XSrzwNnMkeVAQq0gWCnC/OeLQ7DhJ1ryUiFdqoUsOkx0TX+vROsWDOpF569dTCLM6QJv+PX9b/wYx+/sJz+o12StyHcLoti/f8zRPdtkkbLuMe6nsaT+enXGiE3KVRrxcS8eVrXngD5bH3YyNzdnanC7u4i25bpj6rNqJaai6hFZ3e+G9SKp5LStj6IgxiaVLeohJCOfF0lAhPD6Yz6H91zkIP0o+WQXK9RuC6iBdKpKtInBpKaexJokNg16LZrhX+0kTKBCc8XHwkGI9SN0euuWrTcbDfBEtillWczlpIr2+88V3XWE5mcsC++zklMNkG4k9N0cZ59mEq9WUbikLS1U+xd0O4t4eRzTM7roOEN8D1vo2OuUjCoOQVohhB48MOIas5+HfdgrWb1LptKHLO+ojTeLxXHnRMMmaONHq1rAtM+ptRrfxKbyl2lPqnAGbW8xBw4EHPs+Lis5q1mVO6Wvf2UZ7QLkMsEn6w3OoQGnNgPrA7SmNIFausKwPvDtaPB57HgonkzxajY69yR7wJMMwW/iOcvsZgPup6K4vuxGVKgtK+1p6w42x7yecxN4Z4OBiUucQ2O+kduNlLbiK+/URp2rBO+UJb69uAHC+m1I2SWxblrRMsobMdLF0y3qXC3t5oNIIytPAYxQLrXbghkzYbtDeRrGrMhqn+exELMiTbMP92QGPdovoll9uCupthg2pamHvahUmZGWZfn3ufccOQOxtTyMxzPQM5azC9yUJoO83a6AtwM7lQiezLYfpjuNky3wmC3+Rlry9usGD1YLdzmfheY/1o6BQ1OKl1VWvVJoOkk0fFupy2U9C+aXQZ6b0GCh/zreZQ87fsOY9Jq5hf+MNOtX1wyd4e4YlXgbGxdBoQEsCwaouomJgkk7miO49uKEO3l5CTeYg78gCTYbVnG1nkUi0rhM6X19tqBwNk1ogKL+uz1BGxbGsfFr//fqA6mxCspLOTf11bNrD8M7zKfYaDqs1Ai0IDN91a2g7LYkqwWiojCSqDDCaphaDOZZdahxmp2gbEw2apjPc3x7E8Ny6yHm9OONWsmTh3fqpbTFYtLJ7XpNlW0T4xuPdnKuqYF3mVH7fa9cpemMinYlLXNzvhjQGCpnjIblhmtQcp9vIzH3azPmt3T02bRZD6a0zbJqMVSVK3nJT0O0koqAGZLk9gW4/Ttf3w6fhTb8nFjFcY7AXPqE/KcHS/tKjz/Lm+iYAn52d8Xpxxp30koMAItc7FronsZ6pmlKlaGVj0efGJVQ27QsH40h0n8kXpHUa1RmSAdhf03upjMfnKn3tgR0D7ZLojf+IXIbvKU9xmo8yyiijjDLKKKOM8lHyCYfweo3XGYnLd0XvWUm28h8NUbWzxrvvA6DYc7MMwZsx7DP4TgA7BjA6s5a0aOic4rIU6986RZq1dHPv7k+E/0ZP2lhAsnIJj5oDzmuxxDWOO/M1R5OS0+CVCq7otge2S2ZLbxnhFCZTtHnfDYGras+7FtozIPyC3qMG0l/XwedOeVf6odzwtZMLTvItuWljGZqDRGLMV21f7fqD7SGP1nOWmbzXbXurUG4MKrOkWRtr4RnlSJMWNXExFbyuE2qnsMGbZ/q2DUHEOHq+nZRoZfegRAmNdRneupe2dbb3ZNlEoROFVTpima7XGAwuWgnFDUKmweIYjMEwjChYgr7tcv+Blyv2S3ju4NpQPvtmQKDX4cH1feYpDuwm4Z3iGPD123QrWULtsPim63EtTqrUd1bTBACxVaEGrXwnteKivuaZtEjGZJ/d5T+KYSSpK/f28kasZVWYhtomFKaN1AZtpwUYGmgrQrbjNQ+FcgOQcZgDQ4/IwIMcw1XXQ1kujJMaYHsUNgft6wMeZiVzU1HZJLr8f3r+Lp8pzvnm9A4feMxiAAAHb966zNllmZTACTiHVmGb3u3qksE+M/CAq05KUIWUfZsS8SXhO0/DmO21y/fBfphXxY97b/RgXgMqEb6upjM012EiavCcCVjVUzLY1IkHOe9iNq7RltWuYLuWBeZKg2p8kdphFqXe74MYLrpGRpjsFLttGr+kak1XSKe4wqIyi846Cp9FmiYduzL17nZ/q0S4z4Z17MpdJl7tOE6ecmWYeeiC19h3hU+Csa1GByoL5dg1KW9fnQDw5sVNfjl5ncx0kSLBImtsiIVqrWClgqeuaRKaxmBb3RdNr7XH2/rnzpzUdFUO68vNaJA2GBez0jLdMjclr2enAHwmO+UNc5dvbu5wvzoEhEhzU2bRu9dVRjzNMKBGUX19wGEIb3g+xjWo4lrTwav0Ud6X4E3Wiiey0cK+k1iypONqV/DhoyMAvmpfQ2fi8Ux9e+dFxauLS77gw6g/Utyn0A2Z6iLms1GGjh660DgttAbKRdyT68SV5pyi9d9LvOcwcIilRkq9aG3pwv4c11UIRXrv7vVz1Km9EKz6CK5CufcnKn1mUzd1qLsli/muB4NvMuzGY6JCEdCsw6Q2YpJCtkbb6X5ibcSN2gUmcocAPTV03uW/ONpyMt2hlIuA9FUpYZAYBtJ9x4WN983NLZZNwTsXcuDVTcIkrzkoKg49hiBLpC5c05i9jJGuNIQ0PGUVna9RFTFJjWNIpBk3YNPjf7oJ2NztZcYNKQ2AqIDYVEVg+81iw518SYdm46uZb9qcyvbho3Wds6kzyloI6uSZPNNtUKIcuFRTthIChX4txVqCILi0WmOGddgaec5hKA4GiqKm5+wKClQgHB0Ud8WH/brMz51MWJWVYq8GnlMDZUe7/r2ArTH9tWO2Ef1BDj4FWMuzR2+vV7okvBju52KoUSaBlfkT5h4+C66iv5l3ibczFzNr6s4wTypenV1yKxOgc+41sCH2onOKVVPw4UY216YzgpMxg0xMJzULnd8RlE+mGIZmbNhcQ5O1HDCbOuU71c29aw/H2lotpH17zPM9ngmIOJSg1HaFx4jZ/kKm6nlr4jMYxRCoGtjcnTeeAJ9U4Eh9GKpIGo7TLdsu452dhD63WcbtbMmX5/e5WywBSQVPVcfcExZuu4z75SGn5SyGJ6/WE2pTxOzFmAGXq0FYT9FN/JkfAPEDyIA0nPh7NzB84th7eSJbStErTEPbxbm4dkzWcXMu4PigaO+alKvNhDJJe0PRKxeBD86lDnJLnjccFLJfzTxtRkhAaGotimqg6gBULtr50JhzGlw9VPKkXbomHuyq6FAnFYeHAoF47fCSW8Wag2THzs/nR+WcN89vcrkNiwfyomaW18w8OeJxsWM9kUzUrQdsV+scV/vMzmEyQyJgYPBGiwMGGYVF2vLSbNXzhdU5uyblsurvrwbKWyxVqhxGWw4843Uy3fkae5qVT0DYrnO6zPTh6qwjzSS0GihAOiOZek73YabUhwq1nzQvJVfcmi/5QvGA9xcyn9+tTnh/e8R9X2PufDWjUhluGKZvtdQjdKD8JppsJTtSmX7cAr/h0Cjco3gZitr/exiWj7CToNjnljsL2bfe8rUam/MCu0woXUEZEh5Sx8PDAz68IfvX8kbBl6YPeCm5wvhFUqiGqa6jQRSKCg8pCxJlI/npdSxUgKYY5UhMhzZWirIDLtESXo3KOAODZzC/AxM5A6P7I+QTpjHos5+63DGfVrx8sOxTFG+IZn6YCnklCFq/0E2MgzY2oXIJF82UdzdiUbxzeczl2RynpDmmUr3V6xeWUY55VjFPq8gFtSxz2jJBBTLCRkBmVknKJcBlPeGynIjFhFRFL23BZWoj1YHWVrwEneoVkVqjK72HiQolZobsynuxao/PcapPX25nDps6zFaR7OSLSeme5JVSoDvNlbcq316e8Dv1HWFg33nrsO29JOF+KKAj0kfo2oNhB8VIbQrtpPdsyMEZKAj8pfwii+nFVV9OIvI3pf7A7fprE3SwQR/ITQZWdbCE99LH1VOTGPfE7e8FQxBvVOIMPn0c/6x+rIb8Q0p5AOLgmfAlUfwcUJ5ewzkVSUhV++QCdEYUgVuHMr9/5vg9Pl884tX0jHu+2OrUuwFWXnv4sD3kUbvgzeoOZec5ceqMpkliVqpz3vtZ7Rd0NqXC7PpxsYEPZmCxm6TjZLqL+KqzVUZdJTEhwDdXOHgGc1c4b3hi7Po09552IBgNbhu8Yfub99CzMbzn8Fo2dWRJ36G5bpibkre3cuC8cfl5JknDcbGNm2rAk4QMLOs021ZwFkFJ7Dq9xyuHcX1B1nBgJNAsHM2h7ddBqTBbRbbq17gNpTwG/RTSyK/LdW8T8AQuMsx5baR8xc1iHZnXz6oZTacpdTFQRv2WEnB3qXh/sqSLPF8vT5a8Nr3g3YkYhfdXB1ytC5p1RuePBBXTagc4LK9M7Xlr8e3zOMu7ty/5zOKCnz58F4Av5g9Z6B2p6jjvhFri67tXWNUFq03Re2m0IzVd9PxPk5pbkzWt0zzeye/ed0c0GwHI7u2h/tliH7ZAq3Bpf7DemSz5THEOwEmyJlUdW5sPvB37/4JQCwyLGTfOsLUZj+sFb3m84gfqkI2d9MWNO00X6AKuFzn2Hi4Qj9e2y3mjugvAW+o2J2bNgSn5Qv4AgJvJknv5Id/KXgLgG9zxIHKzf+1gFA2z7r7X3vj9yveASanEcm96xY/O7/PlQ1GOvrF8ifPdlF2dxv0pJLusvdL61vqmnO2qiYD+VHUs9C7SkpQ2ZdcJW3yfeWdJBv0YxA4SbCRrT+ZU5C4MZ9U1TPV+Y57x/kfIJ88DFTaERrE8n/E7V5P40EnWcrTYscgrHiaSWRPcckHrbDoT3a2h1lHdSvgs1MIzpViONnO0fmKv1hM+AG7P1ywyD2wrKjZpgVv3FjuJuEennjfmJN9QmCbOo8t0wm6d4zYJ1h841mv2qlMksYYfkUwTRKFIN4506zB1r91K2RTfPV6Z0l0fmwlhSJcODm//vSH5o7JSv8ptpU8enh/QneckS00WFRZ/iIWsPd1bJ2bXHwJ7RGzhXjlx8gXSNm3VnifD6b4tIZwzDFcMFZd4nWuZiJHvaqD8xL3CDX/nUK2N3iPdOVGEBtdGueh9Cv3knIoAWXkTFG7fqr5mbSnrIABo98KRqr+OD6kN2cljfb2g+CViONhZx82pKFA/MnnA6+kpqWp5rz0CYGkLz5LcZwitu4Kzeh6zyTZlJpXZA0WFD8EktRzq4JXYGqGm8K7obiKpvXEuNGA7za1izc1DoVZ4MD8QwOyuiIZE0xhalwg9BwOrdE+pBFwPaA7KcfA0ghgGOgPQGGv7/nW9ByvW8BsSWYa1MUhh3nY5n8lP+eJcOIfubw/44OqQd86Oew6g1leuDx61TomXdWDF652muOxDr12u4noNXl9rZF9pFkSyWjuDbmZwxu8fXWCxV3HsnRnwXQ3m/nWvU8iSIn7Vl7cJYfvWcLqdsWtTFlkVv5cayzDLMYSpbSQ3lRIwEp7qB+og2fGaJwQVMO4x543BDUK0AWAbl4Yl1voDeeZ2omgOHMcnMnd+4uQ+rxYXMSzzte1rbK0Ao4OhXNlEeHsG5biqKmGTZTE68NjN6KyWSEOYg2WCrhVmQJxpdrLvDctcJaWsB+frRlZNwsPdQVQ8T5I1n88esdDlHpHm0hY+9d7fzxlW3YTTVhS4x/WC02rGWTnjfOM5j1YFrJJogLrEYfN+TYI/BzqBoQTw+arJWaYFZ56g+cPdIesmpzAthc/4TpSl7BIuKrnXxWZCt07QG7OXHBXERBofb6j6LFcXjKa9BKZnaEbuGX9fC7f3mZmKyiZ8MX/Iv7L4GgDlDcPjbsZZN5dSLcB5N+OD6jhCYYJsbR6pCwzC3h6U1qmuY9WQUDMwKEr2KZpO4r+Tmk4y8YztaQiCzXZtP3mivU94hz9ao/rEaQwi/44Fc5aSrvvsOWtgmc+48jHbKEPN0HmvQeqwEz9BlCNZGbKroEDJALeTnrG2WydclnPKOuXl4ysA5mnNdF6x9l4jq2XDSycNNydywL1SXArOwrPobmcZp4s59y8OqC/lMAtZEaobeG6upZOrMIG7ocLkGCpCIWVYdf1hEtrfZY52tu+S3eNz6nprH/qMMPQgFKYkJBDK1IQLqNLsUfPv4auUYKu6qcMWfYNUo9BlX4Q3PHsoKeEUkWhvyBDPAM+mOq+TWQaYDQn7uGSgePhUbGeGJxB7WUsxw3GQfqo6CLws4b2ADYhRW58Ntmf5X8M8hTi4aRxPkNMFRaQVL5WQsfZ9cj3UCqBaHdmF369PeNgc8tbuJm+tJIR2uZtQDUoF5YlsCEAMHeyWBWqV9DUMa2LWZzj0TQ1qULw5PoehJ5G0cjCt25wvzR8C8GpxzsVsxsPqIPJOnW5nXK4nkrKNPyiMeCR6z9GzzdUwll0ulrgzREVL1quLIbBAyKo7t8euPGTHz3RLZRM6p/nyRGpw6duOrySv8875MU0tkywyugfSyFJjqn6dyrUV6aZflxA8qH140iaSNaw6Q7P1e0rhYMATE6oGOO32DIUnvKvxfT93/Vwe0hrI+htwejlYbQuu1pPISD8vqqhc9KFV+sMCOXCcg6pIYh1PrQ44NT2x5KrOqVrB9uiBAWhqFT1xYVy0pzaAYBBAu7CcTMW7VduEX7l4nUcbUTouVlOaMgGnSCcyEY8XW9ZlLhx1XsloVMY6sREjVJYpzSZDlX16puoUZqf8s4Vxcl5pcvG1bpAxDx5Gp/hwfcCHawmF/Xb2EnemK06ybWS43rQ56yantiZmgTWdYVNnkfOprtKIdQ24LLOVYs7hGbtCoULx9xDKH5RYan2Y66qesCuyGJqyTvFwtWC9KWKYCSUKSlAyqTVmbUh2/XgHbjaXuD6kXIvyFA244EwM2cX0c33P+Am3/R5hq6E4qzgtZ7xV3eZLqRgyX0gTvpBWrOyK0s/xysGD6ZQPWvF6Pm4PqGxKh9qjjljZgrVPc61sgsEyMU1UjoIC1Qwe3ChHp9VeDcOAGQ38j11qpEbeoAZgOOee8NwN9+yhwfwU+b4UKKXUEfAfAj+BLM1/HXgD+H8BrwNvA/8j59zF97pWBIOnQixmE4nZAmRbSB71EwAGmm5spMS/24mmFo8hNpNJGugQgofHJv3ERgGNprwoeM9PyMmkxlqFmYSaUL5TEhtxBu+XRyzrSayQPUkaXp5dcXOy5sGhLMjL7UQAj42m8xsCrYApg1VgN0q8FErSbkE27uFmdx3QHMQljm7iKG8Q+2SoBAQL1BmY3BR36PF8S3kkwzvz3rRJ0kjdIL9oa5twWU4430zZXAouql0m4okb8O/YSUd6WHEy9xiKrKa1mmWZs9uKItCWCWprSNbe6mo8p1a2r9ipFvDrpQukj67HEoWwT1f0CpuUJ9F7pWtEEdq3DsQr5f/2fDyCtVHx8y7VNNPerNa1L4jrD0pt8eRqSrAF0POODTwCYQEGTiCC0u96z5P2dcSiAlPK/NWN4YPjIwD+a/MFlnXOw8eHsBqEWpULjBCsEguZRSdWyrcAamswO7HGQzt0t+/RHHL4BO9hl0l5pOjKdkCj+c7jmywr2bhemi3JtJBUhrkySRs2aUY1ZASOntDhwa/wOQqC34u1IP17uUx4axTKu/VVZ9H1gNXeCSmg6sBExmnxkoZQwDwRrMR3q1sxFfxLxX1eeemc3zp4hQ93sjmsvccubK6bJmNZ5mzLPGL62CTYREfONpuKotzl/dxpp4qukP7Mz8I8FMqNaBB55cUONuW9ftqrmcfeRn1duVLOSXTUhyCStKPIGja7jHK3j0UUw6B3c6mWyL+jrEALmmkSQ7TOzVhVGeutjHdbG6F32JnYByaUohoYfCAeRDdQhrtC1mdItf+11SusHiwwqwCLgMyzt7dTuf+j4xRnFXpt4mN3RigLjMf0ZVmHta1UiGrDPOk9vnZocCXD9hONKwZ8WYu8irjWx6s5H5wdineyDaE3FSEOfag/jHNY464/YMOWYEK1CH/mTDuMJyzt1kG5TWI/BvzSupZSNa9PhTrkxw8eME8r3lsf9wSvlQ+lDylIVCid1I+3cH253sBQwaAM801F6pZgEESlNP6v77vBrZBC63ZvjevW9X1kHGWb8g8e/yi/evUZAF6fnnE7W+6FP0OoLvPv3TBrHruDiPMEYlmXoFCJ51LmUeDCAuHFS5yOnwk3lMLq/n4x6SbzynCW0CWDfe+aohTaG6MfHd+XfL8eqP8A+DvOuf+hUioDpsC/A/x/nXN/USn1C8AvAP/2R15FDRbfxJIc1fCyZe0zONQ6IV2K0jHE6ewpFN6wCFiS8J5uBnWjwkGbgPPMxcm8wSQdbZPQeit6WU5lcUYPkPxut8p5x4qm/P7lIeU26wkFjSOf1RzNdzEuOy8qtLaCSfFM5F1txAsTNOfWWyeDzBNTXbMAXD/xnQrZFl6J0APeq4T9IrUB/G4c9w4F0Pcjh4+4kW7IdRsncuoPxZCFt2wnTJMao20MeewooB70iXGooiNJbDSQW6tJTcfRpIx9sE0yKptjAx9KKHMxUDKUVWjbW2bDMNowQ0nKg/TtU7YvsQM+S6/QuNrte+oGh3ksIaP693QrxU/baX8t7YlFI+Fp4KZy0OV9uEp1vnBqmJbBkxVIFdMO2/pjy/edqXtCxtheJT8ufaj1qirY1ik6segbnpMm7Ug8Iz8QSSHbTlN5a7hLhKAzNN8Z2ZydjnoXDDbeCNLPJUsokntqGd+qTHnH4zrecTfRaUeSdfFA09pR7rxHADxPkhLP1wAv51RQkqCdSPhZxjbMC7zHyg3wTaovheMleGzDhm9KwXQFD1jnFIdmx2kz56uXrwDwzfwOrxSXHCY7zMSHb/KURHeR8Rl6IPl3l4KhfHR+QOPymIQSvJTNVmF8PzULqE6seL39IRg8WWHugDc4BvNEWU2smRjDAe6JzfuJ117hCnvaJG+4e7CkmRuuyp5wsmySqMzDtf0EojLVtnoPbOucivugDZm3XR+Stwlg+/MGhFxX9l153RaiCKpGcb6U0EyzyUgvTI+58wZRyAaUN5VgRGs1ONAExB32k6PpjvyopeqSqFBcXs5obSae56Y3pmQ++fCNPyxU228vqbG8Nr/gZraObXlYLXi0W0RPcBcO3NbsYWnSpGOaSmOKRLjY6s7ETO6L1ZR6k/VGcN6RZi3OQefrvna594pZopVeNQmPd/OI7T1Kd7xSXMaIB0jtvVWbRwzYw6sFOzUF1YegVReSWVxk6RfPf++RkY56unc4Kg1P+V7wirrrLPqDM9kkwvL+cDvnjQ/uAPDL7efQqdRhVX4807TjeLbj5fkVAK9OLqJSFJSoQyNYuWAQWadYOxmfwGCuPbAfm2L9GVlZQ2tNLANUdim7NqXqTAwJW1+mbchEHipuRGdN4AAcRoCe7VQHvg8FSil1APwh4H8q/etqoFZK/QngD/uv/RXgH/K9FCgGLrHMcvfGFa8fnFH7CbOsC7aNdGZu+pTbIXV73RnKNmVVZWw9xX+5zmgvJBwI3l3qPHbIx8GLSc3txZpUdzze+Era53PY6Rjmk5APdK3UlQNQicV5AjcA1Sqay5RH2TRemwBsbbUUFkZCBUnVh3NivamBCzxUm4+gY+990oMsO5c6CXeWfagzMHVHqEkih5TLXayVVtuEh9UBlU3Yem9abRPWdc7KA/maztB1mqYxwqwOqE0SY/Yg88fmht02YYePXytpcyhLAIhnpB2a3b7dISsmvG37EFMkHoToOXLaF17VvQUY2IajApVD2yqM6l351+8bSmeEor+xMXilLnhCDdhM0YVQUd0vrKAI4PSTlP7h0IogX9lVbOKi50oU+QFDufcSthNIfDr+K4tLDtOSRVryWr4Pcg0syafNgqtuwge7I968EiXnkTugs2kPwm3BNYDt54kwCIsXp7u+AQbG+MQymVUUWcNqLYdCc5XjdoZau74GZWolhOAB6qZSPU5pgL8JXmXwm/rERUUafJixRazqMLwGyPWe1zmEsmPGm6cECVXvV03Bwuz44uQh727E2PmNB6/w6+5VJoO6d20nikOg4FBKcHBVa6hDjbFtIkSadT8nwnodht+cAXVQk/tQlLWKpkpo1rK+kqWRcE2qImZEWZ+tV7tB+SDNPgfFtb+hxwv6t/O05d70ilvZKh44p9Wct5cnrNSsH9qhZ5HBcjCORe7LrczPuTfT3J+Jl+50PWOzy2i2GZ3Hczmj0Jkf4xCm1k48yrPeM5ysDGan+0QVB+3MSio/oOcNWd4yLarIoN60hqvLKW6t97K5JpOaPO3JPjFyDoT3krSjTTyz/iAEFTNrw3O3LrKRh2t1TnE7kySN31O8T6EaLu00YnRC6ZFtl+8ByQMbOUhIqXGGq2YSP9/WKfU2hbo/qMsQhQjGRh0SC3pKk9Zqdm3KuytR4t+ymknasEhLpqHqhbKxTAkQw5u66Qs/S6klP38G+5uQUe9r5e5aQoC82XuT9j/w9p5VuCGuEdlTo9GkLa9ML5kmNWdXMg/bsxw83UTQx2oFH07n3D+SOffezSO+ePSYO/kqGvgrCgrdxJJWqW7Jfag+yLC2a8BBVV55CjQsVZdQtgllnfalrirxrvZnz+C/AWZVcJfXlMiPEP3RHwPwOeAx8B8rpX5DKfUfKqVmwB3n3H0A/+/t7+Nao4wyyiijjDLKKL/r5fsJ4SXAzwD/hnPuK0qp/wAJ131fopT6s8CfBSiywx5w2moudwXf6nq9a5IKvuj16RmvF0Iwdi8954beRC21dAmPuwPeKO/ytaW47r99cYszPafOfCiwGQB5vXeoaaSq9r35lvmRL9qqHY/tAc57aLQPI7jEkc1EVT2c70hNX4tnW2WslhO4TEnOQ8ovMW4aQZiDcjLhO7oWkGPwmuyFd8CTPzpoBhaVxwEpB+mqD2vGwsQEL4tQHlzcEYvqm+42j84OsMu0zzhywQum4jVDtnI+wNI8UdMsV3SF2qNkkPv2HsVYwmAPl8Ge2NTRTYZVwxWhiF3EG4XwXeKEHDI8g1VYD5juUoUxxBpZDC7XF0PtwyRDno8QntvLWmIIdFe45hpPUQjHuP69UOct4COUthilaAep++4JgIH0V3ViueFpDL58cJ/P5o85MtvICQNS1qBR0rkLU2JRzJKqb7MLoU3/bN4qDVYVyL+mEU/fXjaJVVKyB3C1ojvQfPHklNktmZjvb464KCdsyozGh6RtJ9XVQ3q8TUD5sjux4K3pQ5vhtc0tJI7OZ+9JVqzas2JN5aT0ziDLc9jv8p7gogJm5ayc8WF9zM9O32J2W9bzf1L/ft5+cIPNoxkqrMNKiA53/poBjO40MQkl6yC7UHFddoX0Xbrpn8lUkF1papezO/bhoklDknXUvoipTX1NvZBZCvtg8iDDAtiD9/aA5CFZIcw3q7hqCg6SXQxHpqrjsp7wgXGx6PN1apRQTkhpF736N/M1r2QXNAt57g+OjnhzfYs3z2+w7sSLILVHZd7shck1sYyI2Wkpb5NBWsi1XzpZcnOy5scPJBX/Jybv83J6wQ2949J7VL+y/QL/xcMv8ya34njODkpuLwYhttWcDx4dCS1MwCB2imQnfRw8Ccmuz26W15YuNXsJB0Zb7m8POS0lFPY7k5e5m1+R6zZ68zZtzqbLpPaa671EmybnymMDV7s8lpYJFB92nZJcDUKWGXRTizMOHcg2OxlId82LnSdtpNk4X0/ZLQuJYIRHT1w/gAC1Jr00pMt+rqoOmrmiVYNQetyD/d7kaR/2PSvXPU8DGUSanRJc6JBQUrkea2qt5ka25qfn78T59ZXkM+wuJqjdMNSoMDuN9ZvjA59xnB7bSFkUkhrm3gMlnFAVU1NFKEznNI0zdGgyj1HItaHVXcREtcqSeiJNNdjDQ1Yp9H8PvbxRBuvnqet3IN+PAvU+8L5z7iv+9f8bUaAeKqXuOufuK6XuAo+e9mPn3F8C/hLAwfxe4PhDl5rVhwu2m8Hkyx1vH9/iNxavsJh6qoG8YpFWMQsuuPDWbR4zPa7WBW6XREBt5KdRxMlXlykPy0OW24Ibvs7dLKspj7esjQ9deFyKWTScHMigvrq45DjbxkFNtOW0mvPNy9s8OhcQebNJhZG2Uf1Es2HS+vbWeGCsim86o7CZ6rEg10jh5GEcJu9ot4bUKxmmBlqZ2NBjRZRV7HxYokxT7DYhXfZZLFGGUAwlbksTge3sHwADbIXNe9CSZJb017KJZ40eTFibspeFZxOwmaX1zPMSBhLXswsg4xD2yS3ag/uVcXTG0XmsnDOBk2lYHJKnynWAc+TkuZbxFUUHV/c1bJXfTOxQ0TPDvuwZjAN2yyU+zHrt2XSrIrvxRTOlc3f4oDzi3bWEolZVvofFCFiopjNsN549+jIj2ejele8z8JIdmJCRFLJxhvQLTgnuKWQRlYrqfMLjkzn3jiWb7ZWbF1y1Ex6VC85KOVBPt1OWakrrDzxbSSq17XrOMuNDKT1w3HngbY9nszm0XrsOsCQB2g82dD//hhteP6/kS7s25eurlzlMtnw5/wCA//G9X+W/mX2B33r0MuuNB0hXXqsLa6DV6J2OnG9yafUk47ZXtIdhYxAlyq1y35aMLnUk/rBOtiqGGvfHgFhHTsb/Gr5iuB79IecsuFT1c8kpzsoZy7qIe+E0EWZyNegnFTCG8Q0JkzeNjsSh72QntFZHwtadzSi7RDKWmn5eBCMwHIIhpBnr7lnpl/qk40fuiMH7L730m/xU8Q5fSmVwj/UEozQwYWvluQ3f5t2jE07Xs0iSOc1rJklD4XEx6zpjfTURjr4AcRgkSaiBQqwGZK6SjOD3IT++k7RhkjQ8WEtG6ZuPbtI2Zo9GgUZJLckhNQnAXmUB/9kAjJxWytdVDPPb4aYdKrU9bhYTz4KQ/aqAw2zHoU/TnyQN7+pjtssiUklQKnSt9zK7TaW8EeDf8/vX3lrxY9PbWh6rNsT2DD9/mgJBcAw4X50hhJYdqjN9hl+jKW3K7528zT8zeQuA//bwc/za+nXeXp9EjFnZJmKQ+fPJKEfbGVZtHqkKUtWx6zLOrew5a50zNxULU+4B0i2KVHW0AfemO3LTRgxU4jRGC0N6yFhtUit0I4PzLBD1RvLYDpQh4i+hNwafJd9TgXLOPVBKvaeU+pJz7g3gjwC/4//708Bf9P/+ze91LdTAOjcO1WjStcKHpgFF9yjD6YyNksm+Ae7r3sq1xoPm8h4Iq1pFtlYxmy8UU2wn0B752WEUrjTsrlLem4jClC1qkqQj9RWyW+VwnUabLqYHP9ouWDc5B5476l5xyY/MHvLa5Jz3j+XAe29zxPluStUkfRV4KwUtAzZArYXFyyaqByw3Yu1GPE7tM3iG2WWdlJupDjWVT802tR/sdrCxtJKNEuLkh5OS+oZhm0z2NmplLMZ7dhxIWYJtgl0GcLCPe4eN20iaMgcNSZiMnabaCgFp8Lhd53zqcrFebeYiK3IA54bsOlv0nEyRSdmASy0q7zD+flo7bKL34/cqLILeUt8DRF7/F7GirFeiIh+ZZY/bRjiLJAMreLz69LB+QVkPqGVwwHWt7gkm/fjuJUE4f5BWSvhjgO+sbrGsCj744ARznsbfDb2XfVmcHosgBVR7UGTwPO17ON2+chgeI3HYIhzmCrPWvH3/BpcerHsy3UVrMrAzF2nLNu2GuHSup5JFZvksKJAyKVSlo0dIUuGv7djXNvc9x+LQi6jBeMLGeVaxbTP+q/Mf4e2p0D/81Oxd/ujx7/D56Skf+nIYq6Yg0R0HSc+ddNlM+GBzyKOlGGDbywmoNHo10DJGplRxbdbHjnbRySHmM01FWeqVWBOAwgOrVgXPkj+MpC37bRt25R5vmeuNlnlek+qOq6rg0Vqee+JLuzjXK+1R2YjKqJI1vU049xiVXZ3yZnKTzhuXVZNIFvEqjUWAk81AgQpzrO73VpC9q8sd6qjmC4sQMbhgZSf8t97bc+65gLZdzsJbablu2HWpL5si1yobyQi+7bGBnzm4YJo2kiHssw6bdUbXpeDc/h7pFLoNY+LXuCKSKM7Smh87eMAr00sAvpHd4YOzQ5pVHhVGMbpV5NGK42RcLEtDZlHGoY2L2XTNLkHVvWdWzVom07ovVwM0XYEupeBwoG2om4Rtm8VnupmveXV2wVk1i5mj6zqXjNFgDKwTUIk3wv2YNNBNnHi9PE7XJux7IX1bIiO5f/NpDqi9tRfm7vXv2J6mw1nFo2rBeTflD/vEjR/LHvAvzd/kvTbl0spZW7qUD5oT7tdHALy9u8FZNaPuklgp42WfALLyWQq7LuWymdI4E7miYN8LNRR97WG16kuNocWYu14MXs4Cv19rv4epviN+UDxQ/wbwV30G3lvA/wzR5X5RKfVngHeBP/k9r+IGoal5S1q0VIcJ9YVMtJCWbaqBR6TmCZ4e5zOpmnnvfUg3inTlfxMJxPqJVswr7FRRXRVon17arhIa01sTeJR+myRcpDKIFyF1NaQTFy1HB1vuLpbRWspMxyyTbLba807Urc/Iuz4BVd8HwcIdWr5PkE3uNO3MkE4amjveynF+cxg6STrhe/k9d6TW0O8//i6Le2WcjACFbsh1Q6FC9kPOG9uX+M7qJu9fHgGw22SSTTZInz6e77i7WHF3IpruQbJj3eU8Luc82oqie7GZsF0WKO/FkyxC4W+K1ckbGePrddlQLgJVbWLBOHTiYtX5utK4TbJfysaxpwiF/owZd53rOZ0GruuuEMW6Dz3KfIoWnSR44LIejB3K03QhZRrxfqFhjweoU5im/12ykQ19j1jTAUc9Y/26zrncTlDbPqXbZj509JQMzT2vxUBhHaZXx/qJRkkmjO0rAEgCgMMW0uC68F/epFycC6D10oryM/QCJlknFe4HYVyzU1K/MnhbGkeXq0FNSvF06V1Pcpuue2b0qLB463nIURO8DPvEfypmBd6bXnGUbvnVs9f47YfC1PyrB6/x+uKcRVJGl3+iO3LdceDdXVNTc5Ru0crGDJ2qSukmpu9wBTiHzRWtXz7dxJIeVcxnZQy7lLuMZpeitt742GnPK0YkM3VKka0dqb126A8k8JntJVs4B12vGN2ervjs7IwPdke8sxLDrW6FiV4p+jR6Kw0ItBwhU1A1is5TYDStYbPN6Tz4XW/lcDfdIAxke2/WkDjTJr0RXB852rkly1ouPbD6r5/+DN88v8NFABTvkljmxRzIgL9844q6MyzPZ5LxixCSWl97DuD2bM3r83PuTpc9E3l6yLbVoDQ2JJ14vqzw3Eml47oYJn7cSZf8gfl3APhDB2/wxu27vF8ec9V45cRqapvQdKYvFaIcqe7iPl+YVkrgWBM9s4/XM7abItZANYlkn3VWx7JeqlUkpSLZEDPEq23K+1eHMWlqmtbMfaTlRi7RD6Mcuy6N5ZveOTumLmdCehw8JNrTKGQO6xeLTY0Hke/Psb39w++Vw7DxHmktYc2xx9F2HXSulBDY/uL5z/GVQigZ7qYXzPTBnpKTqpbbyZIbRsK0N9MVb2xf4u3NDe7vJJKT6o7PFGfczS4BWHcFp82cVVuwoj/Humsus9Zzd0VwuWcqbzoTeaBcK17nPtu6997prm+/cvt9oVrHR8n3pUA5574K/OxTPvoj38/v967l77g43vKTt+9zb3LJo0oO4Q83h5xup2zLnNK7+qSAotor/BlS2iMhpAWX9DMmhPLaCfEAuH2w5pX5Jes7Od85E4t182gm8etgsYesjcyJ1wVwU59t5vux3aScLo841Yd9hhJIqLBT/QHjw1PpwD2vPYPukJgOBSoQSw4meFA6lFW0m1RKJYRB1Y4k7WLdu+B1SrTltekFAJ/LH3mG675YY6osHYqVP+HOujlbm1HZhMqXCHnQHUQ3K4A2Fq0ks+HC19DTynKQlExmTSyR8TA94EPt2GiZ6LbNpO6ZImYrWgQzkcbDJcwJFb0WjdK4SYdJusi/5FrJlIxKtA8VKav2vDuwT3QZ49tRYVM0c0Vz6OIeELKMeiLRXpFtJ/18clrRFipm5rnEh6iCXqIcOpEsvGDR2EzurYL31C/UdkrkHrs5WXNzsmZ1uGTis29C3bawkS/rgqpLWJY5y5WEYeplhln3pYJCOr1N+9AYytdQbHvKAKeR9HTvhTSThtlEFsByJePrLjJ0paA2sTh0PZN0yiGmAPaVnKjUBo9jbnFApzTKP0BPQNrji7rMc0JdS8UP35M35dBoSunMXZfyU4tTLg+mPLyS/ePtt2/ztr6FMq4vQ+NADcougSi6rtNR+XWlIVtpEp/Fa1PxLiWbPnPObDXNVc4aOFx4ZexwQzPXkU+pXmd0lczVMLeNz2iV2oH+WpY9pT60dY+Gw/U0HADH2Y7fO3+L3zuHb8zvAfDtzW3ubw9YbQo6X/7Cei+pCQ9Qyli51DGbyby6Od9Qd4aLTObSLs2xpdRrCx5lY3wtyRDy9v3STkVpAjGCqTV1mfKdS9lTV7uc8r0F6ZU/OIM31UDjvdzvB2/ReRK9ta1x2EnDaifa2dVmwtvpMYm2UWFtvEE6LNgth+IgpNU5TyCrewoO5Xhrd4vTRhSxualIVcfLxSVHqa+zZ5OIgwpKnHWa1mmu/L73qE3ivhGU76ZJ6DZ9ObC26CIRpvW8btmVJruCdN3vT+1hytopVuc+g7IUxY/UogvZjPKiIUm6qARUq1wcBWuikQayT3W1ikYosF8ey/8tOMJgpLgeF/UROoIo8gO6GMceHkpnHVo5vnb2Mn/v/EcBObN1YuVs82vMGMt8WkbC1TuTVQwhX3hKiGVV8GFxyMvTKwAmuva1Bw27LmSSmzgfhtn51qmYzT8s82IH+LlhPVlTiuGX7ISIFbzHvvX9ZPt++ij5xEu5uIGRd5Jt+LHJh/zkVCbM5cHUH+gpW8+4ue5ydl0WO3Dd5EKDXxbR21NWKXWa47SPr3rvVTt1mAAitxqtLD999B5fOhDG5a8evsKbD2/SnHmyPU/Hb6eW6V3RlH/01kM+Pz+NTfju5gbfOb/J5cUslk1RtY7g1KiMVcofwt4yzBAApmNwWDvcEPDu+8cZISEEH+7qFCxTjGdAxgonS6g0XaXeizZvOb8pm+Ib5V1+Y/sZVm0RmXWtU2y6jItSvlN1Ca3VtFZz6VPYq8sCvTXRa9KYnNPJhNP0oF9oPpRiBjFmo8XySnw4tEkTXGJwqUPN5D19aKknKd1VqFnoQ4+po536ix82LI62HBRVtDSWWcFWFzT0nsoQXtDDEJVSDEht9/oUxGvULKA5aVHe8mXjU8qv4ZRU2ycEJDs5SAJxIHgqhYlD+U3aJBZtLNUsofHWaDtn31rzh2Q7sxzOZSP5/PyUzxePODLbWBOqUA21M1xaGaeHzRGrruBBfcC3ZpJ08U56QqkKqVaPf1blgdaDzfU6LYZNxcVpN95TOFEcniz5yZMPogv83c0xD7cLLjcTal+wWwV233Dg+TTlUFRYLiaYvm7iecemNZNCyiCFMMQuz2nm4pEKip67Cgqv9uPUj9uwPp6pHfpU9oWvLu4xMQ2fmZyxe1me8Vfa12jOC8y6n7/Ro+iNHV333rBm4RWoxJGuFNmVvO4KCXtl6579H8CsNW455ayQtrjMCb1DoJLYaW849UbckAohetNaK1bvEx6na4z4Sf/bTZtR2pTPZ494aSEHzCvZOV/LXuXxesZqE/CBDpTGVSHGI8aGyywHvvj55xZn3MzX2NvynctmysNywf3VAWfnomTYqxSz0ZhB8WCbe4C0P+BpNWaZYGvN7iCQdHoC1BBBCN5iK/AAgHaToKwiXfZGUTcThafwlAVXqwnl+VxCXkEX1k6YuDcqkhGbEtKVI1tL5yZbC8rvx77w9J1ixabL+KUHn/HXnoqX3VNagBhpgWE8chDasKf3RkrsBx+uUo0i2/T1L+sjYv+EcF2yUZid24usqEphCxXnTro0JKteMQSwyYR2kKiTK8E4ZksXr+MM1IcqGhhBxJsSLiQYMfkvuK68crBHvvYUD6nHQOm2v/gQGqC15WYhZ+WHnkdu8n76RDKSU1Cmc971+/x3bnUsXlpx4gs0A1xuJjy4WPBmLtc5mJQc5iWZ7vZKudXW0FkdsaHXQ3eBRHOvSYEWxZ/P6daRrRzJTkh8Y59ZB9btcUN9lHzixYTDYbYrU3799FV+8/zenkY5TWsOsjKSbM28y/2WL+jWTbS4NXeHsUp122ma1MZJHTZim/XVr8+WM85XM949POGLhxLm+uLBYyZJw3sLcZFuyxxrFYfTkp+8dR+Af/bw2/ye4j0WIex1lPDenRO+vnuF727F6npYLrgoJ+zqNGKn2tb0DNYI1qh6nON0zzdjarWftRRjsCoqXpObW9pW055OyC78gb6VjWnobQEob2o+fFnaUlvD1957Bd6fDA449jA6Tkv83GYubhLFdr/4rDPQzjRt0XM26cBvpaHxGI3dzOGOm1j9WsZBDqfEb2SLWYmbK6ob0kdVmdE0Gp1aDrxCcXO+4UaxiQzYANt5xoPZAfcTaVu7LUhKCc0NAa267b0a/UMMFrEWMlIza2PRVLay2ca9xsghpAbKmE18SMn1fd15Qsq4uTuxskh7zMQQqxBEtT60NVj0V92Ub+/ucN7M9r4bvIK1NdQ2YdemXO5E0a3LBL01Efdndh7A7GuDAaQ7qbuoOsArAnbiILEYHzZXFwnvqWNemi356YP3APji5CHrruBRveBhJWvs8W7O482MpROlrmu0lEoYHDDgRMH0Hscs65jmNXpwUK0XhiaT39mlVzzqEPYMlqA/rBxxg1ee3yow3W/fn/MPyy/y5XvziCP5udfe5f7NAy62E2pfJ7PrNFnSxYO57TS7KqVtknh22MrQLrPYjsiang1CeHcrkqyl2WTopVw7Odfots/4ipi3oZFU92GwaP3b8L3wAH5uXNurh5GKx+Wcf3Dx4/xK8nlm3kO5MGV/eATcXSlKXFL2h752oDeGh2eyfhRgjxSvTyTkci+/4LOTnG/nt/lqJ96ty3IBO72Ha9QNsNYoP27pWkK4uzs6cm8dT3e8eyOn1F6hCoS/iUP5Pdyklu4yi2sKiCHGeyeiHL56cMH7B0dcLqeRnBhfckp1gz4LxkE4KYPn2fVvnWQbDs2OtwsJUZ/fPyQ9k6SjYVHtPfB06P/B/hxKprgBfEI5MY5bHw1xJzWLA1EKYomwTIuXfQA+Vw6UcSQz0YS6uaHZJOghrtQb5XE+JK4nyfTGs02gmTk4bLAhHLrS8qzhXrEywTVvk4brBY/3DMkhXupaGa1Qe7BrDLnuuDtZ8q25EGk6k0YOtyGJcLIlllvLrhJ2l0e8+/KE2aKMl+5aw9pXCFirKQ89MWmIsjiPm3NOxT1Ua9cTbIbbOSXOgTB3PAA/3chvspUjXVtM2e3DBOJcD7oEHymfqALlGGRyOcXjqzn1RUFyZeIXbOGwkw7tvRbFpGZW1Mwy0TpmaU2mW7Ztxq72IL06wdW6J3wLoTUDk6IHn60up7xzcYv7vgTLzcM1qbZSkBPI0hbrFFrBw52EBX5ZfZ7vVrc4TMQ7cGh23DBrfnr6Nj89fRuAjc257GZsbcbWAwTWXc5lO+WRB1O+uzzmcZnQDNzP1me6PEkp3+Ml7h4tSXXHt5vb2FM5PK3/zTDUEcp2BPDvIq2wnSJfDvA9oesHmCvwtaXK4G0Jm75foKmimXtFyy+GBDmwVQtJzJBRNCqNbMNmK4piV6iIBdjscqm67heDmZUoBAz70kwAbMf5NpYR0apfEAd5yWkuCkaX+kPO9inWpgreloHlEDZAgpIluChtOvC6SldrWPc1/UzrcQrDMen1gyi6U6haxU2rNQltA5SaZONDFOW1wsXIptIVsLopY/nB7ohvLF/imx/ewV54cEko5RKUs8g4P7jOVpNd6ejF0ZV4aEwpihNAUlrJTrKgPf7F5R3pvKb1uJ383NA8yvmN7BXeOzySMSh2TJOabODeS00nXihvMYdwz5A2IYTwVNI3eFNlWKtj+RG1M6jgHRkav4OxSyqp2SgZhOFzbxV640g3Cvuw4GvVK3xwSxSDzx+f8vr8nC8dttF72VpDojtupIIrmXqE/VUrxKQA3zy7zeX2KF4bpyKZZnMoz3T75pIfOX5Eqixvr+Ugvn95IDUJN9KXZqPjVAneimSjIpmmzcKnOipacj8XcZ5hPjulcImOMIHWat5e3uB802dC3jjYME0bNuuCxIfHkq3yWYD+TrH/NI0PNX54nvHhwRG/dig0MDdmW2ZpzbrOY1Yc3rM4TFQIzxuxgaWEOG3meGVxCcCPLh5y/+A0kh+eZBs53AbukfvlIb+evEKzmvc0Irml63QkR/yJgwf8zNF7nDZzHvl6jN++uMUph6ATukg5oWgnakBDIhUgdAuN76dVW/DZ/DF/7M43AfjVfMub5zfZ7bJYGsgNM5WDLqYdJunIvAFYpC2p6TCqJyxebgvqOol72mxSMUlbqjZhE4z+LBHi48H6VY3CtYrEG5yH8xKOIhwJ6ImOQwivbQztaS70CLpX9rt5Rz6raX3Zkm6SCPVMFhR70SaV0xFC8wRthh/fPfxtnJ8DZR9RCENlFbsV2oe7+RUv3RDl94NlSpqbSCAKwcAdRl8gv1BU5KxvSsdMjkpm85I688Wcq4SuNHSlieH2HkLgYnhQGfeEF8o5hbUqZkLqUvtzTj43lewnw585pVB+U4qe7+sG+TX5Hh+PMsooo4wyyiijjHJdPnEMVKjndjgrcU5R63wvy4NaAT2iflsadmnOhQeBplnLJK8p0pZkgD9pXG9tBbekVZZDH/c/yEs+UI7LBwuaBxKGuP940tcEA+/FUayN49SHi77h7klh4BArzzuKw4rXTi54fSEu8FBzrnFmr47RWTXlopJ7basMVelIQQC923hYn+c6ieUkafjC4jGd03ynERep2vrsvuBGbhW6VXRTy8LTLZykGw4Odixv5oMyKYLbiBgG4yTmXxm49Ja9x+90HkDdFlC91HBwZ828ENOz9pweu23W48B8YeTgBZQ6aYrW9oDeaplDrdkFtT0VgH496ekfLqsJuRFLL1gVuzZlVeW0ofL80Fr0Miz/Ia8HIFzF3u+Ugql3ne+Uo02yWPw02bFPoomkuWrj9qxI4c5S2ElIHZZ2mp2OhKfZUqz067XidrcVTahv5zT3VwvUuxMmVwN3/ZA/K3UxvTrSL7Q+Q3VYhsBnUUW+Ie99goErXcONww2X3rNRJhNUp+guCu4/FK/Yw0oA1t3Momc+A2lai/vcj2VSKZKNIt1AsutDUxLaDdai1M9rqwTnQbbZpY4ZajEkHCgkYjkT3x7btwWE0DXilk5q3DYhvZ9x9VAwE185PqQ4KTmc7aI7XyHQgMID9KeJZDsdJBW3C/F6ns+nXEwX2JX3HA7WaADEh/3olckFn50KJvLieMrjes79rewV968OKMtUqEFCOOUyQVlv/dZ9+5Tt4SdCWaEi7w7Q/+37JNMdyzZhczFBewzhh8UEvWhwlxnpsg/vJ1vXY4TqkHKu49pOtgZ3aqgS8Xi+nx0JLYzqcX/FRsJzptwPgdu0XxvVkaI+dKSvbfhnjt8C4Gcmb3M2nVN6YrfOKVZ2QmVTcu+qvJFueHiy4K3LIuKEyC1da3jrTMay7gyfW5xxlGy5N7kEoOwSlpuCulMRr+oSCS+FfceUxBqUAUD8/vaIqksiLOSV6SW3irVkbw3cQvZa/F8r+wRZr/ab7sZjdB8kBzy8WtB5b1fVpHKutUnPMUU/zmow59UmYec89rRI0cbt4UpT0zHNmhiq2lQZq3W6R+kSIQWdinXnumR/r3JawXUsj1LCDzXAGDrphP519PaA0zq+ZxMV16VZGz7cHPLa5Jw/dlc8fG8e3uTh9oBN0wPy205T1mkMqTVXOclSErjMpU8MMTmzw5LF1J8zWcuuTOkagx0WfQ7PP8jatfuuJIHPdKrnNbtWt1MKIvtIzyALMdKIDDLlP0o+2RCeVpED6OWDJa/PzlnezuMGVLVSMbxuDW1Iua0Tuk7HCdo1GVWZMp1VERR5ONtRlwmsvSvdh3NspqMr/0uLh/z+k+/y9Zsv8/WHdwHYPJ6SXCURRxL4XGwmQF+Qg8yUclgA6MagXMZ76YK3p6/KMxVOQpOKeGDpWoET0KW84bMxLobEbMTaaLCvBCSeg+jdyyNuFWtemV3Svur7wGqmaR1rNLU+/bfpDLc9oO+Lk4fc/tyK+68cRvd5rlumpubQBCbjlnerG3xrfZtvPBLlbPNwhi515DVxs45X753x+269ze+ZCkYmVNV+0B7yTiU4sHd3J7y9PuH+pYRHy9aDURctJyfyTGWdsn08i8oKCGtZOU0oA23EtCXLGw6mfaHiyitsIXyUDEKWEUPh5AAIacI4HQvXBgxDwEso5Tjyc2eW16wnOduZHCbVJiVk+nTe/Wu30KJop71Ss0chgGS1CQeSiWn87cS7hQeucJtCdeKYeQVukVTcma+5em3Kzs/xJO0k09KHBVLtaFtD05j4TGqTgNLR1WxTMGkI/QUwtorZNlEqTaot/52XhXxyeVMy/FZVzuljGTv9QDL80pWmncj9treESmOfHZz9eoZWYgAB83Y4KdklLWsthx6ATQxG7SupoeBs5zc767MsdcNe2KPLFHYmN/vM3XOMtrz13i2yD6XDZ2+luLdTVmrRb3xq/xrCeg/1kSV7RcJ6adqirxKKU7/GW8nAGxSA53I55ZeXn+Ur+vWYAHBc7JgkzX7WT6Nxjd6rCxlA8T1nV58NBfvzqL+Q38r9dSZJw53pkro1XBgJaVEarCcQDmHUkJGWDMK49VxTHUJ1y+9pqUOXQgAJ9HU2nYohjhDKN42L1B3NXFEdO9pDnyV2suNoWvHqwUV87Dfr2/zy8vORFPZ0O2VXCYHi3M/5uwdLNnUmhm6Ym1o4ezYXshl+68M5b5h7qGlLMetrwdWbjGTZY/90LUpjwLakWxuJJkN4rGoTfuX8NdZnvpancaSThizronJirWCwAsZGOkreC5QjtEqIjScteS4dXlUp7lERjYLtoaU8rnBWxcLbyU5hSoep+r4UYDeYRx5Pt04jf1v4zm4Cq8LFM8SljuTKkK4hv/QYpAySlaZJ+jT/tFT7fHABQF7ZpxqfQ1HXskNDIeGgrOCpgSJPa6N4//KQr6jXOcr7gt2LrCRP2j1qAegB37tbKafrGcvzWcQUmrOU7Tphu/DUKUVDmopCGcKYXacli3bPwlV7eOP99uwbLRHi0UqfDHFPIZEDpSKE5np/XJdPWIEi7haZbvnC9CGfyx5h/KiWNuOsm3PVTXhUy2Z+Ws+5qgs2nlxsWeeUdUpibKySrXHsFinL4A1pklgk8HIjC/Lx4ZwvTR/wp26/z7cPRVn4r298kTce3Kby2CK9lTS5bmbJb8hkOJp7QKBPr91uMlilJOsBgZ7nuXHXDtUuI2IYVG7pMs+Eumd5MsC6PAn8W53N+Jp+mYOiovTA2GH2CPiCm6bde29lC6a65rX8PGKJCtXQofcqW99M13RzHQsxfisUFg4TPu0o24T3dscx7fS1/Ix76TkvJVcstGyKn80f8SOzY36teA2Arzav0Cwzpsc7vnxTyjpMTMNbRzf50CtZdZXStZoksRivLORZyyRrOMzLmNZvUWyKjNNEgEvLrSEJYz1YIHrAgq0zUWBDCm/ot66wFGnv3Uq15Xi6i57K7qZkcGyrjC2iBOpWsrraqYvKEXjvhAdM50VNlnSstKOcBGpbf+8wB4zE7Wfzkh+7JZmgr03O+fz0MT919H5kuz9J1nvEcRubs+4KPqiOeHsjFvrbF8es0jkmbD4VWE+34CLAtE/xjrQYreJqV/DSbAnAjxw84na6wqJ488YtAL5557ZUmS/T2Hdp3mKMo/WHks2EJ6krevZ95UTJnHsL8pXFJZluuaynPN7K2J3NZux2qViH3kuTXRihCwhDqcTbJAbGPn5Nec/Dtkn50ZOHHH5+xxsHkpm4/XBOstYkW7UHXh0qfboBVQkmaFd463/Ska175QH67LFwmGVpx25ZkJymXHim5LPMYSeD7KRKCzB54B3WzZPeNF1bn8nUY9ueVvFeFGDvCXaau8WSg5cqHh3LvPxwfSj8a92Mbj1Q2JTqlT+lqY415csdi3sy5iezLVq5uJ/s6pSyTql2KfW57HPZlRKco1Mx87S83TK9s+FzR4J1OS62XJRTNk3Or1y9DsBFOeVbb90lOfPzspY5l1pYz6W/37g3QRvnKzf48U0st45XlHP53fnDA8xFiloZyqkHpOcWvTJkV+Idk76UjKp04yMUPhtPtToaIDeKDY/Wc9LHnlbgUqFsvj8vrKeXuCZ73Ek+E7c+TNkeekyfEwLnYBQ75TDG0jnT46qeoszbHLpFh809prA16FbWccCYCWxP0XqutnYmHud0I9ljcr9+3ANZrSjUrmdn99UIdOdQTd/IpyruMHCNqj1PfHivVT2vXDe1NHXCdx7colv6zdFKXwXyytB3SdGSe0zy0XTHq0eX7OYbHq1kPq9PZ+i1AU8o3EwM7bQlyboe7+SvtZ89uA/eUh4/6qyKzO82E0xYoJiJYP5rHFeCRbQQkqh+QESaPxBxmliN+sP1Id+avETjTDyEG5dw3s44a2aRG+qymrCuc3Y+fLOrMuo6wVnF1VpW9lBDBUAJyNdVnmUY+Jq5x2k556eO3ue1XEJvf/jGt3hlesnbtwUUuqwKtHIc5CWvz84BeLU459BsaTz6bmszrtoJj+tF5EVaNzlVl6CVI/HKSp60HKRlVF4e7RZ8S99hp7I+a8eDcHvOJ/Yy8gDUznDxeMGFW0ioDVCV4rEP6YDf5I2UEGhf86GwesLjcs7ZZho5S5xT4sXw3j2FcI1MsobK91+zyTBXSbROUQlnZzmPi2O+kn3Ov2UppjXzSRUrvN8oNrw8ueI4k53tpZMlZ9mUxaSKLNA/MXufn1t8l+au3Ouqm3LezmIWJkBlUzZdRmMNB57if2YqtHK8M5Fx+vUypSqnJLmi8566NJXU8whw1Xjek97qcFrhJh2zvBYLGFjvcrS2LCaeI2e64SAtWbc5v+OBz/WuwNSesG7A/eUyJxxhQJZ0HE5KpnkN8pgUScskaeKcyEwbSd5m3jysbEKRNBynm8jX1aFjdXiAi3ZGaVNaZyKp3zRrWIUMJ2CYghzDBIGdvHbUBwPlejnhq6VkW729OOHLNx7w+uSML06lGtO94pLqTsKyLVh5osHLesLjzTxyhHWZRWh1ei+Y6iRJJIRjNY6TbMtJtuXIz4sbk220Rk+9UvU4OwTSQfi3NybCRh3q+hWPZO48To7prOJnbn/AP/uqhI+uXppwUU7ZtWn0PHdW0tXTUA7HatrO0FlF7r+z2WU0i75mpLJgc/EaBK65104u4ATeOzxie+VdxqWv9RWsX0vPDeaHxSmFzmTuRFBvKyHSqEANlbzhYZv0CsZFOeFRsSDTbVxjs6Oa+9kB395lNN77HhUxFdqiqA7BJTb2e25aXp1dxmw+gNomnNdT3rnpPUcXC6pVimp1nGPJQU2etix9eY77qwWrsxk4mJ2IwVlXCdmjJGYM7wGRfT/tphlt4kgHSl+zlTB+MLbK4zMebA5YV1mcT2WVUncF9aGKXntTC6myTcKGqWLyQV0HIPuWP/jyd/klPwfOTxeotexxajB2UQZJNs74bFvAZVIhIclbpj7MZq2mO9Fo//qO95xf7QrWniqknWnqVgiEG+88bBctkxs7JrnsA/Y1CXGVjel5pGoDte5pFRBvU3elaEPyyhTauSU9rGgrf7+tFqoQz+GWlNYf9AZl+nFRPitvL1V/EDYO/Qn0yQ04dG37hKR5x3xWslxPKD6UuxRn/ffjZZXC5lnMar1/a8Gjl7d89tY5X74tY355OOHRes7V0nsKK4OrjSQDXKsjOBQVyK5jE7zxPOTFMhJZ6mlotCfJ1L1y1vrz1w6e/6P1p08eA4Xf3M8u5/z9yx+lXf5kjGfHNPtmQJTWqj2cEEBwAnR+YlcFoBxZyCTbSmaZKhStx16s3jvg29884huHr3D0slhi9w6v4uEGkuEX5IHP/LhsJpxkm+h9yXXL3FTcni/3mmadJtfNXs0ekMMPJBtIJz7MMwisSn0j+Vt4kWTw2pn/zkGDMQ57mZGdeYv9at9aDoNc3jDcPxDvzuWu4OKdY4r7Zq/vEiAdbGrdBC6LfgLmledZavtrt6XycW+ZLkLDULBK4cpnVX537khu7yLJYNkkVGVG12l+Q8thfVrPeKlY8louyumh2XKcbCL3F8D9+pCuVVzWE7a+yHNhJsySOhab7FrzxLxQrWRW9KzYsjEM+XaaqWJ6suW1gwu+eymenPJSCnhuM0/mdlRw92hJbtpYBqDz+Cbl2DtkO4ix+apJKFM5BAI/2dIVe95CpZxsuJ3myNdjfDgRCowHp4fYdQA9+TYF3J1y4sVKHdpn9tjSkJylPfbFZ16Z0pFt+vCNhIuIB4ybdWKwvSvz8qqZ8Q9Pjpnd2XDoSe4WWUVh2qjwgSgibaclIwbJajGVV1oDt411mK1i7cvUfDu5xYfZIdO07rleygl1a8jTNtYDpNUSRorcPpIdk5QD4j/rYh00gOJRwmV1wn+1nvDSiazFl2ZLjostd3RLOpj0wkYuEzpVXQwNB/bs7yxv8Y7VlLkvlbTxpLy2dynP04ovH9znZ090pHa4agrJBvbztPJEi53Vkc5kuZxQP8oxlUb7ELRN1ROhzx4POVCqNDGUvi5zfvX+q2w3eZ+ef7TmuNiRpF1fg9jz79jBzq5byE4Tdit57jcOZrx1cCMSqC6KinkmRkqRSD9NpxXrTmE3KcofxN3jgqtHRTSuko1ivpOQdO2xcm1tyFoVlZDYhQNvHkq8dclmSCOQ8JDjqCz92I0H/MxNgQyEufOgPOA7k5usDiYxw66uNXqd0HksYpcr8ksJVQVl/7ye8uOL+/zx174ufXkvF/Jgm0Tck3UqZgDGfsM9MXdiCMpjoHZdSuN0nG+N05yWczH4vXFlU2Ho10VvIIc96cDjSo9z2Q+06jPKAjlkmF9nmykXDw+otmn0jNhU5shsWtH58VyVhnqbDOhFJLKi074GplBIuD1loeeNuqasaPXEe8GbmhQtN2ZbWqspvUe3napIcjw8DnUDud+f07WmOZ3z7bsT8pek7a+dXPCFk1PqQxm3wPe4q9NY1NxZMUCdVU/gk2IEJoT0LH0o3Qb8lrzsMgXOYGrb77cDZvZodH+aPFBA76YHuk1K9thEbog9AHXoC0NfuBXRIl3i9sJlpgJd670K1Sg/uXyIRTXi2s+uEranYmV9c3qETfuaeiG8poaD0ynvivTPrR0UHcW8jgWPp2kjYbSk5y6qO6GXDwraRTmhW2YUl31xSFNL+nmI1gRmVN1Be+AtmptLqSbeHdH5QrJNJ+DumEPp+82mDu0nUd2KhTXEcQB71aedb29ws8e+bAf4FM+4Lczfg2H0qdkBGGtKRd1OOJ1713atMTtNkzk+vPDp0/oGaEfiWbjns5JZXpPo3jquO0PdGuo2iSDQrhOSvfDaXQhoNhTPBWH5TXduH0DN/gJGwY35li/MHkdl7CyfQ52CVwx2ZxM+sIqDWRk9dbHfoB87XyKjLvxGnqdcWiVhr6Un/Nxoz2nUz2+ceGke3JI+OVvMaa5yig8T9LWxGnoYbSbhwy73tmQD6aonFAwMunFcEfB74MNpPA/U7GiHUo7tubQ/u9ToKqG+OuCxkwP2kQ4hOgses2iKVpRIT3+QrpWQTy5dDKeAWHg7z8B82i447RQqs9HKc1eCr1omfT2tfC1lmNK135QrF0NecRyto5lpqhs+dJH4Nn9nyv1ULNb3Z7dxhZSfCSFhY4RzK7BSJ9pSZA0HWcWtiWDzjvMtD4sFGy19orveexc85u9cnVB2KUfZjtRrPoWR4rdl0pP8Vp0o0bEQdGLpfN3E4GlurUIbNaDcUHHzjh4R75VynpQ0T1vOLuaY94sIHTg9ybi6uaPepuTBE7v1+0nAW3XCjSa1Mv2he6lxJmXnPUubTEhhXWr70Ear0DtNviMqTMO6i/HfwX4SpJ1bPARS8DsK8bj5vTgpGprLApTur1kr0rOEs0r25l9aTrl5vOK42MU6pNYpsqQjSbt+e+4ULnHRmA5tVJYIOH7r6gYPd4ue6T+tmBjxDA/xaxqHRfXeSxRVm7ByvrqCb2hQrAC2bcZZOWPXeNC81ZRNwm6XCU4RX/Jo573j/nbJyrBTU95eyrXfyzu06SJuC0BrmbsBp9V4pXxY0043ss8sVxPSLHBnWCkHFcJVilgLLzxA+FswQPvjGyXo8tfKwgwLf6dZy93pFVWb8IEnyWxaP6e7/f1Xzvfey5yuQb9nqK9ksnzrdsH0xpZbC8EmzrOKybyh6pLYv63VNJ14kZtA79BqoaMIySih0F+nBvVixQAOfdLm+DI4plcgA6bW9QriD6SUyw9KhmU9jLF0sMeYGkjJbNJn63UZwg0VyAkLcaEmadcj+rcp+iohXat4I5s6mqOOk7tXgCjRZ7MF5kEWSebMhYr8FEBURPY4lrrgFeq/AwnonFqLl6oyXsG7RgphjRQhBWjnHemlIV1d56LoJ5lphPgQwCxksf/srfd4Kb/iN+f3+O2Z1PyqAleL7zjXKWxjUNpxeyYahXWK9VHLViV7yp8b4HZMIWzibWNwF3LN9FJHgk/wpRtuNWTzOgL7q10irmXPwB7HNnHRWk1XGrNTdBMiD1QA42tvvVXZlF0oDB0O/UwUWilE65vZKAlJeyU6vdKkGw8g9WDZdCcgzWHcP0qIlztNajpupmt+7/E78j3leOvsBjtP3ua2CdUm46I1Pcu365V2M1CGdKPQwTqvNbZNJTPMZ3MlG7WnjMbHSYju5a6TQrsBsxdFCd8VyFrowhqIxYvNXh1FpUL4pi8/EniZAvcUwKuHV7w2v+DrhcylB9MT9Ebq8A09C2op5VXioT9PsVMbSxWpzvOHVcRSCDiHrk3Px1ZrmQ87E58zWWrvNeuzhIwvghwPhdZFDMew9IQ9NOh73ks237FaT+DNaTTA9KkBDMr1dPRR8fXXtha2wHIC33pVOtwcNrjHOdOHfu5uILtymMax8orJ6QeHXKxOJPMyOAozPyZRufYb9tAz2ihREJcB0xKyxNwA0DoIp4Q5bx1dpiMx7TRtuNAO1RAB1ChNrQtU23uxdSWGWF/b0dFOlWfRD/eT70eSxnCwuJ4PLSZpDA5Am4vhFmvhHTrqmx1HLy/5qTuSlHAzW6OVi4kqC1PSoWicofHm/6Nmwa+fvsp76Y1YAiWsd+M51PRbEy7bKReqT+jpZhZVSwH61M/VvJE5GEDk+ZUlW3aUJwad9N67R2/eoHho4lzoC3T37YtzJXpiiISQ8XfaF6n35WyEILInk22njm4qe0VIPErXinTpBJjvx6WZKyFh9WG3pOwV3yHvluAB/fPMITOObNkbGygxgtqqINh7WSdRivAdAZS7J7wte20FSVzwCkSQJ0DlPjElJOs0dcI8qfkDt7/Lt71Bcn9zwMZDbcLe75yS0GTYKnaJMN37+rcAk/dSuvuHvD+Tc7U76EgXNXnRRK+cgz2PPoiRpHWHDcTSxmGtfLkr/AAAGn5JREFUwpm+pJJrBRg+HGM52wfrMHiBvWcOevjHs+STVaBMrxjcOVqR37zAfla0fBDt0mhLZno3u8aRmi4yU2emY2IaZqaOKagPdwe8c3nM5ZlosmprJMth0XBnLoN6b3pFe1Pz7ivHnK4lfLHd5rS7pC8GWurIsN0Fwk/TbzrgO71WsZzIE20MykpCrJItP/TvDxZsGMDejS0ba5epCECsbEKqOn7y4AM+M5XQV6rErRzChY0zkUKh8T7i2iasTx5S+qrnQMRoRbJNX3T1vJ7y3RsC3Hl8fkC9M72lNGl59cYVL8+v4hjUNpSAEVr9cO1Nk/HYAwI3FxMBCeeWfC67u7WK7S6NBYdFcVB71a/li/7fUBLBb1bWK8x2o2kLmbw2gP28MhFZg/FW/dBr5uB8M+U7u9vcyiSF/UuLhxxlW84rDwzeTdlUGbttHt2/0e1Nv7nJvXrvZeJL27iipTv0GaTIIk9CuRsj5W4K03HbMzdPkobljZzu85rMdLEvtXJxnGaphNQS3UXP2bvLYx7fP8T4lHapsyiA1niYNsRaYaFfWqc5SHb8kbvfAuD+8WFk0l+VslPvtjltZbxlMOi/rq/nptqBxziyhcvGHIyk4rhEaxtJ7QDsiabydAiR5G6ZkE21x1RBfqlJ184rAv7axtHMFEcH4u56/fAce6T4RnaHnQ8ZcpXuh58HEg+l4NrXIVQH7DT5uSbzUXlTijdzv1C1r225HtZfVDit96ZusPQDYaFTAb/V19xKdnavrIYAV/fDKViHmia4kAnqFPduXnI2qaNnVDtF2kqNvhjydwLsD4YFKHa3FdVnK2aHfZZU05j+cENC0a4ycS8MSoFyPVSiOemY3d5weyF76t3pknuTS25ny6gcGWXjfgTwsDmQemaup6bZdSk3J2ualyVZAwRDaLSNtfC251PMlZAxBuVEN0bYpFc91lG1km3WM0x3pJuWemFiCD5LWjapi2tAt/Q1SQfGexivIE8QDxsx6FXXGxvhR6GWpzP4wti98qmDkrcb1Oo0Ur5KeaXW5qJ8D0Hr4RrRuExc78UOmb5FT+1hvLKf7KSQeSTUrSzJ1qLrAYA81LcbKkgOVGf3lSqh/Y6GjDMKtKadeu/PKuWNq9vcm11yw4ch52lFa6Uo8zALbxieDMWbl1XB2dpT/VxO0KskkjrrswR3kbBNXQxlo30USDufjo14N6/hoLA+mzKcu8b5aFP/JdM4TNn3S0jsCAkrcE3BfIroj/54lFFGGWWUUUYZZZTr8oljoIqit7yPiy0zU0eCM4tUXg5Fb0FwBU1n2DQ+I6qTz4uk5dDHxhPdcXu+juDdrZ2gKoNtDA/Xc/8dy418w+cWZ7w+F0/OpstYNzlrn1Wy9SRoiekiRUKquz2MTmOFb2nb9HXvgltxWN8sNVJ/KxCxrcqcy2QBOqGNloKEtEJdIUmBtlgj4E2Arz6+x3vTIw6yklkiIK9Mt+S67VPxB3721pvNIY6fKEvr9WStHLU10YtxVRfxGoe59OV2lrFReU9c5iTtHSQmHa6TKEuiu4gFSLRlmtTRa7Ka7qjbhMR08drTpMY6zSbcvyzYVlkECMr9IADcQjr8PK9JTceylOc45QBcit2pPgPMgwRNDDE5lFURRB0G6vJszn/jPsvUe4CKpI3PLG2sSb3XZL3pQ0FCB9AXiw6AxOBtmc9KTmZbJknD1I/TYVqS6t57OvU/1spFwlUQQKpWdg88OgSsBuu9dSaCVW/P1myOM7b48j5bgzMSk+hD4go658uSyHsfXhyS6o7P+DVwkm04yTZU8yQCY1dtzrbNWNV5xB6sdjnbZYH1gFYpqKpo64GHzgo4U+XS4bcO1tyervaqpsvc7dh1KeeeZPbtsxN255PID+aM9lxQA5yQlbBHiGbUXSLA7jsP2N6Q515WBesq28vqVIDRtifd1UKO6CCC2De7nN1hTnPep7nHeniek+bopRXtHU1VJX35j07AqoGDRnlP6tAadq2GqxSne/LDzGjSnY0p6LqxMlcZhFiCV8+HiMs24WSy5dWjyz3v/K5N+SA/ZKOlL9uZidm9ILir7qWSz997zC3PEZcMSvQEaa1h3eYsK1ljqyqjrFOs1eQeT/bqYs3nFme8XFwCsu9su4z3yhPe3x4BcLqbc76ZUpYeE1QZ8eRaYvZKOmmYTHyNRH//zipmWcvsUCbqdlpS3pZsyhCyaRpDVaaUOxOxaapRmK0i90TA+bliciah54C7e+3wkhuzLZeveJBzp6UobauxXb/POb/vBB+F8mMZ8XNJR246jO4TQzor+74JGD+naDvNZpdRX/qi08ZI+CiRRBaA+qgjvbOLfFJGSbFu5/8O9x9KZzXrdUE7y0mXIQvPYV5fk6YdO581XF1lZOdGSgEhJVOAPdydbiw0DoWKAPEnOI+89wmLxL4Bhcap3iOVnqe8ZW7z3fQm2oPmtbZSn073+C2tXSwHBVIWp0ha5lnF0U3xjG4OM1ZVzsqXHKqrBLtLoNnPllSBqyu4ibSL1C39s+/jJlzqaCcu9r/ZebqHuv+ecs4ndPTpBMNi5k+TT1SBUg42nlbg7cbwfnKIUkTWYKMtrc9Suk6cFQ50KiM4DKt6gPi0Rac2Ep6pncFsNaw0l1cSmjrPj1DTjsPjDbd9WG+RlczTKh54QzfjEFw4S+rIQKuVE6VqcMANeZVi1hJ9dgfAps35TfMyp90R1gRmx1ArLRxADtuIGzc7le+cNcecWZ8Xf22SPFEjLbURoK2Uo9mlUOmIW5EwRB+OBMEf2WmHCsUway1FakMx0gZKVbAz+3PSpU6KNw9AxotZycJnlczTGps2Us9pwGQ4TWoOMk9EmG8pu5RlVcR+KtuEziqazsQCpXnScpjtIgh0W6Xs1gZd670sCWHj3o/tO9OHb3TnyN/L2D1KCUmMNhHeL+XBusW8YlbUTLKGre9Lt5IN25REELlcX8Uirp1TJMpSmCYqurluI/syEMMauzZj5wFOFsWmzSi7tA9le9d3oJ/YNQlNZ7BWMFxByl3Wh1w8niDZCgkkBFyFuP0zn623emfGG6cT3siFTLZYVNxYbDgqdjHhIfHze5bW8bBuOk2VZQMwvc90HWBEcBKucDuvmNQpZZ5ykmyYD5R/g+Ug2UWaiqpLeHuX4jYDRVorcbuHOoZaQmrLjWyuuzolNR0vLVbMU5lzR9mOdq73QL65z6Ka+AyToJimqovXLm3KB7sjfudU+OEu3zvCKSNKs6/J+WM3H3K7WJGqLo4j+OwtFwwUi3U6GoIAmzbjG6d3uNKHRBCfA6c1Jg14Pe3DzQNAayfZesan+p+eLbhMp2R5K1QZiBJ9Z7pEKcdbHkvTuAyXatpwCE87jo/XLDw1R5DCNLHWYa5b8kSgETdyAWrtujQal4E1v+kMb69PYi3Ay91Eiv2u08gorj3OMfV7TObDUAFbCmDTjN1kik3pQ19Fx/qw4taR7M13pmsOjnbkuoug/c4pNm2+ZwSu65zHqxnrxz7beZIAmsEQcatY89PFBVMf90tVN6AM8cqZM7H+pol7vY3fB/bq+W1tHn83DFleNFMeVQs+XB9yP1QbqAqMhysE+gU37bh5uObOVKAEx9mO3LRMdN1n/V1TdBtreL884mvzeywfeD6ExPK5Gxe8PLuKRvOj3YI3H9xiq+RmYuAYTD5Isun0HvYO6LPQBvLUUJZ12MwrZ2eKZJuBpU9Y8mDsPY5DDWXSQwmu5o7uqGV6suWlQ+mDO5MVXzh4HI1Ei2LdipMj8EDW3oHhIO6PnVWemsT3d6sFA2V1VNCdVViT0ARmcg9tAB3xXF3rIi9UT2PwKVKgdA3Fu96TdJTQ4otVDlLmrwNutX9PD4ONYYACAHGrIxdS/Erm+krdSNqs2mqW50cs9ZH8TjswA0VEOSIBWNB4O7V33YCfUFmHCRxAeUOetpiBxt20hs6pWAV+kVcCwnYDrqKdHMgBL9FlRK9JwJo448A4KSAZFCGPs+gBp9KHqjV0nsTRZVZi6o3qY8M+JhzjyQFItzHg+zKA6+I4pCET8lqs3ILZ9SUr3DLhSufEutDBCg99eq3/AOFQCpkm4TseOWk7xcbzfOHApB3Gj0NTJ9JP7aBQctfjT8JvnBbuFRU2aS3fb2eO4KTQrZJCrN77UV2m7GYdyazpywakjm7iJPMr4BoCi7OvS7O+nPLmphCjbciKO3zdieKPVeDBwSq1uEYLx9dwvxwSwwVchILt4MAZVpwXnhNRwLui3/V0ovZwFJIdo1AeO9VcJHyYzPhgkIEVnlvqoHjjIO9AEZM5ukLhNvuLVTmP9fGT4Dw94OJsgTKWAd8fzgkerPBp184pbJmQ7gLWxXtOsh50q5wYFtXKM8ZrB6XhgsN+fQYPUGrj2kyzljTpSAaKp1Ywy2pemV8CQmZ6km1iSvlFLgSHplZYj9d7Z3XMts24kW9YeMUv4BDD4VnZhF2XsOtSWhc8R6lQWaQ9CatNlZDsBoUioSeEHfS/M0RDplulNCahsTkbLcrC+WLOydFavOVhM+gUulQo71mxtea8PWC1KSLmyTY6enmB6DFTymH8eCdpJyVJqqRn4g5bwJA7yQp2J+wP3cyJJ3TgzVD+e8EAjBihtgcQ287QVpOYsftgfsjBwY5ZXkfP90FWihe961P7t41Pcd/D7ch8r31iyNdOX+bb6S1OClEOX5lecpxumeo6KkeiULuoBMt4ppQ2jd7iyiZ06Ghog3juHlVzzkoZk6pN6JyiavosYtWJIaob4acDwf09eHzIaSYREmMkO/RwUkaPeKLFKx243xZJxTypOZltWWahGrridDtFKRezFQvTkmYt0a/u55JNiVlodpBp3nN1PUOBUtcKIXf9xqM7qCYuJijIm+F3Hw0gUqVm92DOW6fiPX13ccLRwZbPH0uppM9NT/ny7IO9NbbuCrY2o7QpG28QLNucZT2JkY1dk7JrEuo2idGNtklA9bx51khihbJqwMPYe9JjFt6Tztr9NrjvQVX+gxSl1GNgA5x+Yjf9dMlNXty2w9j+sf1j+8f2v5jyIrcdfne3/zPOuVtP++ATVaAAlFK/6pz72U/0pp8SeZHbDmP7x/aP7R/b/2K2/0VuO/zwtn/MwhtllFFGGWWUUUb5mDIqUKOMMsooo4wyyigfU56HAvWXnsM9Py3yIrcdxvaP7X+xZWz/iysvctvhh7T9nzgGapRRRhlllFFGGeV3u4whvFFGGWWUUUYZZZSPKaMCNcooo4wyyiijjPIx5RNToJRSP6+UekMp9R2l1C98Uvd9nqKUelsp9VtKqa8qpX7Vv3eilPr7Sqlv+3+Pn/dz/qBEKfWXlVKPlFJfH7z3zPYqpf6Cnw9vKKX+e8/nqX9w8oz2/3tKqQ/8HPiqUupfGHz2Q9N+pdSrSqn/Uin1DaXUbyul/k3//gsx/h/R/hdl/Aul1K8opb7m2/+/9e+/KOP/rPa/EOMPoJQySqnfUEr9bf/6h3/snXP/1P9Dahi8CXwOyICvAT/+Sdz7ef4HvA3cvPbe/x74Bf/3LwD/u+f9nD/A9v4h4GeAr3+v9gI/7udBDnzWzw/zvNvwT6H9/x7wv3rKd3+o2g/cBX7G/70AvuXb+EKM/0e0/0UZfwXM/d8p8BXg979A4/+s9r8Q4+/b9L8A/lPgb/vXP/Rj/0l5oH4O+I5z7i3nXA38NeBPfEL3/rTJnwD+iv/7rwD/g+f3KD9Ycc79I+D82tvPau+fAP6ac65yzn0X+A4yT37XyjPa/yz5oWq/c+6+c+7X/d8r4BvAPV6Q8f+I9j9Lftja75xza/8y9f85Xpzxf1b7nyU/VO1XSr0C/PeB/3Dw9g/92H9SCtQ94L3B6/f56M3lh0Uc8PeUUr+mlPqz/r07zrn7IJsucPu5Pd0nI89q74s0J/68Uuo3fYgvuLF/aNuvlHod+GnECn/hxv9a++EFGX8fwvkq8Aj4+865F2r8n9F+eDHG//8E/K9hUHH5BRj7T0qBelpJ4xeBP+EPOud+BvjngT+nlPpDz/uBPkXyosyJ/yvweeCngPvA/9G//0PZfqXUHPjPgX/LObf8qK8+5b0fxva/MOPvnOuccz8FvAL8nFLqJz7i6y9K+3/ox18p9S8Cj5xzv/b9/uQp7/2ubPsnpUC9D7w6eP0K8OEndO/nJs65D/2/j4C/gbgpHyql7gL4fx89vyf8RORZ7X0h5oRz7qHfWC3wf6d3Vf/QtV8plSLKw191zv11//YLM/5Pa/+LNP5BnHOXwD8Efp4XaPyDDNv/goz/HwT+uFLqbQSe888ppf6fvABj/0kpUP8Y+KJS6rNKqQz4U8Df+oTu/VxEKTVTSi3C38B/F/g60u4/7b/2p4G/+Xye8BOTZ7X3bwF/SimVK6U+C3wR+JXn8Hz/VCVsIF7+ZWQOwA9Z+5VSCviPgG845/79wUcvxPg/q/0v0PjfUkod+b8nwB8FvsmLM/5Pbf+LMP7Oub/gnHvFOfc6crb//5xz/xNegLFPPombOOdapdSfB/4ukpH3l51zv/1J3Ps5yh3gb8i+SgL8p865v6OU+sfALyql/gzwLvAnn+Mz/kBFKfWfAX8YuKmUeh/43wB/kae01zn320qpXwR+B2iBP+ec657Lg/+A5Bnt/8NKqZ9CXNRvA/9z+KFs/x8E/jXgtzwOBODf4cUZ/2e1/199Qcb/LvBXlFIGMcx/0Tn3t5VSv8SLMf7Pav9/8oKM/9Pkh37tj6VcRhlllFFGGWWUUT6mjEzko4wyyiijjDLKKB9TRgVqlFFGGWWUUUYZ5WPKqECNMsooo4wyyiijfEz5REDko3y65KZ6ydXU8kJA7vvEHOqJd+L3+J7fe+KPp7xUT2ECUU/92fd/TXAfdY1n/OZZ93VP/e5HXSe8r55OaPKR7bp233+Ce7vv8fn389k/cZuvffZE+z/qd9/Hd9w/6TN95Hfc9/799/2Ze+KzZ/5UDf98cqY8bZkNZvYzrt9f5/rvh/e4voye+ln4Wz3rs2fdy31f33vyntd+98R93Uc/Q7jGU+6hnvn9j3/f69d8+t9ur4/V4BvP/t317/Tv/NpvVn/XOffzjPKplVGBegGlpub36T+G0gqUOCGHf6MVKIXS/rVSsPe3X+pKx+8+9Xtq8Nne93S8xkd9zyklPlJ17bvDzwbvu3gN9r+nBq/3Phtee/+a8XeD78WDXBGfXz579vf6v9X+d/WTvxte7+nXuH6/68/4rO895e+nfcb3d43rn33U8z7xmu/jOfau7T7yXvKf63/H077n9u41/E3flv61uv67+Oz9a6We/Hv4u3Agy9QcXrs/ZNW17+m9126wJPr3tVcW9OB71//WPPmZvv433+9ntv/72vtmcK/h9wxu/7VyaE9QrZXDDP9WNl7DKItWdu8aZnB985RrhN8b/zuNPJdcw+79bvgcZng9//t4L2y8nvFtjtcY9IHBDZ7Pf+bH1igwfqQ1YJRC+9eGwd9Kof2vNAqjdHxt7n77JqN8qmUM4Y0yyiijjDLKKKN8TBkVqFFGGWWUUUYZZZSPKaMCNcooo4wyyiijjPIxZVSgRhlllFFGGWWUUT6mjArUKKOMMsooo4wyyseUUYEaZZRRRhlllFFG+ZgyKlCjjDLKKKOMMsooH1NGBWqUUUYZZZRRRhnlY8qoQI0yyiijjDLKKKN8TBkVqFFGGWWUUUYZZZSPKaMCNcooo4wyyiijjPIxZVSgRhlllFFGGWWUUT6mjArUKKOMMsooo4wyyseUUYEaZZRRRhlllFFG+ZgyKlCjjDLKKKOMMsooH1NGBWqUUUYZZZRRRhnlY4pyzj3vZxjlExal1N8Bbv5TuPRN4PSfwnV/GGTsm2fL2DcfLWP/PFt+mPvm1Dn388/7IUZ5towK1Cg/MFFK/apz7mef93N8GmXsm2fL2DcfLWP/PFvGvhnlecoYwhtllFFGGWWUUUb5mDIqUKOMMsooo4wyyigfU0YFapQfpPyl5/0An2IZ++bZMvbNR8vYP8+WsW9GeW4yYqBGGWWUUUYZZZRRPqaMHqhRRhlllFFGGWWUjymjAjXKD1SUUv8HpdQ3lVK/qZT6G0qpo+f9TJ8WUUr9SaXUbyulrFJqzBwClFI/r5R6Qyn1HaXULzzv5/m0iFLqLyulHimlvv68n+XTJkqpV5VS/6VS6ht+Pf2bz/uZRnkxZVSgRvlBy98HfsI595PAt4C/8Jyf59MkXwf+FeAfPe8H+TSIUsoA/xfgnwd+HPhXlVI//nyf6lMj/w9g5AB6urTA/9I592PA7wf+3DhvRnkeMipQo/xAxTn395xzrX/5y8Arz/N5Pk3inPuGc+6N5/0cnyL5OeA7zrm3nHM18NeAP/Gcn+lTIc65fwScP+/n+DSKc+6+c+7X/d8r4BvAvef7VKO8iDIqUKP805R/HfgvnvdDjPKplXvAe4PX7zMehKN8DFFKvQ78NPCV5/woo7yAkjzvBxjld58opf4B8NJTPvp3nXN/03/n30Vc7X/1k3y25y3fT9+MEkU95b0xLXiU70uUUnPgPwf+Lefc8nk/zygvnowK1CgfW5xzf/SjPldK/WngXwT+iHvBeDK+V9+MsifvA68OXr8CfPicnmWU30WilEoR5emvOuf++vN+nlFeTBlDeKP8QEUp9fPAvw38cefc9nk/zyifavnHwBeVUp9VSmXAnwL+1nN+plE+5aKUUsB/BHzDOffvP+/nGeXFlVGBGuUHLf9nYAH8faXUV5VS/7fn/UCfFlFK/ctKqfeBPwD8f5RSf/d5P9PzFJ9s8OeBv4sAgX/ROffbz/epPh2ilPrPgF8CvqSUel8p9Wee9zN9iuQPAv8a8M/5PearSql/4Xk/1CgvnoxM5KOMMsooo4wyyigfU0YP1CijjDLKKKOMMsrHlFGBGmWUUUYZZZRRRvmYMipQo4wyyiijjDLKKB9TRgVqlFFGGWWUUUYZ5WPKqECNMsooo4wyyiijfEwZFahRRhlllFFGGWWUjymjAjXKKKOMMsooo4zyMWVUoEYZZZRRRhlllFE+pvz/AXm70Fj/6FOYAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "mel_after = tf.reshape(mel_after, [-1, 80]).numpy()\n", + "fig = plt.figure(figsize=(10, 8))\n", + "ax1 = fig.add_subplot(311)\n", + "ax1.set_title(f'Predicted Mel-after-Spectrogram')\n", + "im = ax1.imshow(np.rot90(mel_after), aspect='auto', interpolation='none')\n", + "fig.colorbar(mappable=im, shrink=0.65, orientation='horizontal', ax=ax1)\n", + "plt.show()\n", + "plt.close()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Let inference other input to check dynamic shape" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "input_text = \"The Commission further recommends that the Secret Service coordinate its planning as closely as possible with all of the Federal agencies from which it receives information.\"\n", + "input_ids = processor.text_to_sequence(input_text)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "mel_before, mel_after, duration_outputs = fastspeech.inference(\n", + " input_ids=tf.expand_dims(tf.convert_to_tensor(input_ids, dtype=tf.int32), 0),\n", + " speaker_ids=tf.convert_to_tensor([0], dtype=tf.int32),\n", + " speed_ratios=tf.convert_to_tensor([1.0], dtype=tf.float32),\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlAAAACuCAYAAAD55TMFAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9ebB925bXBX7GnHOttfc+5/x+v3vffS97MiWRLhFFRaA0FUqxwSIgjFKkqhTs0IqyK8suDKOqLEUxqmyDqkKsorDBhgpBtJRSlEIFxQawh0wFs30v33v33l9zzt57NXPOUX+MMdda53fve/clmd6U9MyIE6fbe+3VzDnmGN/xHd8hqsrTeBpP42k8jafxNJ7G0/jaR/ixPoGn8TSextN4Gk/jaTyNP9bGkwP1NJ7G03gaT+NpPI2n8cMcTw7U03gaT+NpPI2n8TSexg9zPDlQT+NpPI2n8TSextN4Gj/M8eRAPY2n8TSextN4Gk/jafwwx5MD9TSextN4Gk/jaTyNp/HDHE8O1NN4Gv8DHiLyG0Xk7/Ofv1NEvutT+lwVkZ/03+Pxv80/I/1Rvv+niMgfEJF7Efkbf7TP72k8jafxND5pPDlQT+Np/AiHiHyPiFxF5EFEvigi/y8Ruf3R/hxV/fdU9ad8DefzK0Xkd/9of/7u+L/LnZ8/8a2//8v+95//39dn78bfDvwuVb1T1X/cn8Gf86P5ASLyV4nIH3In7Ysi8q+JyN2P5me89Xk/IqfyaTyNp/HpjicH6mk8jR+d8YtV9Rb4k4GfDfzdb7/gx9nG+N3AX95+EZHPAD8X+PKn9PnfCvxXPxoHEhvhrb/9WcDfD/xyVb0Dfhrwm380Pu9HMn4059DHXffTeBpP42sfT4vnaTyNH8Whqj8I/HbgZ8CaCvvfiMh/A/w3/rf/mYj8pyLySkT+fRH5me39IvKzROT3O+rxLwKH3f9+voj8wO73bxGR3yIiXxaRD0Tk14rITwN+HfDzHBF75a8dROT/IiLf52jKrxOR4+5Yf5uIfEFEPi8if+XXcKm/CfhlIhL9918O/FZg3h0ziMjfKSJ/2M/vN4vIu1/LfRSRbxeR3+nve19EfpOIvPD//U7gFwC/1q/xnwd+AvCv+u9/u7/u5/r9fSUi/9keGXMU7VeLyO8BLsBPfOsUfjbwH6jqHwBQ1Q9V9Z9S1Xt//2/0e/g7/Fn9OyLyrbvj/1T/34ci8l0i8pfs/ncUkX9IRL5XRF6LyO/2Z/Hv+kte+XX8PEcTf4+I/CMi8iHwfxSR5yLyT/tz/14R+bubIyQi0Y/9voj8dyLy1+9RrY+7bhH5K0TkD/p1/BER+Wt35/rzReQHRORvF5Ev+Rz5pSLyi0Tku/36/q6v5Zk+jafx426o6tPX09fT14/gC/ge4M/xn78FQ0b+Xv9dgd8BvAscMYTqS8DPASLwK/z9A9AD3wv8b4EO+J8DC/D3+bF+PvAD/nME/jPgHwFuMEfrz/D//Urgd791jv8o8K/4edwB/yrwD/j//nzgi5jTdwP8c37eP+krXO/vAv5q4N8E/gL/238E/DzgB4Cf73/7m4HfC3yzX98/Afzz/r9v889IX+EzfhLwC/19n8Wci3/07XP4uGfgv38T8AHwi7BA8Rf675/dvf/7gO8AEtC99fnfCVyBvwf404Hhrf//RuAe+DP9HP+xds/9Hn4/8Ff4sf9k4H3gO/z//1f//G/y5/g/8WN85J74s8zA3+DHOgL/NPDb/Dl+G4YG/lX++r8O+K/9nr8D/Fv7Y37cdQN/IfDtgAB/FuZY/cm7OZeB/72/9q/BUMZ/zj//O4AR+Ik/1uvw6evp69P++jE/gaevp68/1r98834AXmEO0P8NOPr/FPif7l77f8edq93fvss3rj8T+Dwgu//9+3y8A/XzfCP7iAPCWw6Ub4xn4Nt3f/t5wH/nP/8G4Nfs/veT+docqP8V8M8DPwX4bv/f3oH6g8CfvXvfN2AOYfo4Z+ET7vEvBf7A2+fw1jPYO1B/B/DPvHWMfwP4Fbv3/58+4TP/AszRfOXP9x8Gov/vNwL/wu61t0DBHOhfBvx7bx3rnwD+D5gzdwX+xI/5vI/cE3+W37f7PQIT8NN3f/trMT4YwO8E/trd//4cPupAfdJ1/8vA37Sbc9fddd/58X7O7vW/D/ilP9br8Onr6evT/vrxxMl4Gk/jx3L8UlX9t77C/75/9/O3Ar9CRP6G3d964BuxjekHVXXf4ft7v8IxvwX4XlXNX8O5fRY4Ab9PRNrfBNuM8c/+fV/DZ749fgvwD2HIzj/zMf//VuC3ikjd/a0AX7d/kYh8J5b2BLum7xCRzwH/OIYE3WGOx8uv8bzaZ//FIvKLd3/rgP/f7vf1uYjIw+7vP11Vv09Vfzvw2z099guA/zfm7P4Tb79fVR88xfaN/tk/p6VPfSTsHr2HoYV/+IdxLfv58x4bUtnG92JoFv75+9fvf/7Yv4nIX4A5dz8Zu88n4L/YveQDVS3+89W/f3H3/yvmQD6Np/E/qvHkQD2Np/Hf/9g7RN8P/GpV/dVvv0iMuPxNIiI7J+on8PGb7fcDP0FE0sc4UfrW7+9jm9x3qHG03h5fwByyNn7CV76U3YeoXkTktwP/aywF9HHn+Feq6u95+x8i8m274/x7fHQD/gew6/iZqvqBiPxS4Nd+tdP5mM/+Z1T1r/la3qNWAPDxL1KtwL/t3KufsfvXes/Eqi7fxRDE7wf+HVX9hW8fy52xEbtf/9knXMPH/f19DMX7VixVB/a82nP9Apa++8g5ftzxRGQA/iWsIOC3qeoiIv8y5mA/jafxNL7KeCKRP42n8emOfxL460Tk54iNGxH5C8XK4/8DjG/yN4pIEpG/CPjTvsJx/iNss/w1foyDiPzp/r8vAt8sIj2sDsA/CfwjjuwgIt8kIn+ev/43A79SRH66iJwwNOJrHX8X8Gep6vd8zP9+HfCrG7laRD4rIr/kazzuHZ4WFZFvAv62T3j9F3lMBP9ngV8sIn+eE6sPToj+5q/w/kdDRH6JiPylIvKOP6c/DUuz/t7dy36RiPwZfp//XuA/VNXvB/4/wE8Wkb9MRDr/+tki8tP8WfwG4B8WkW/0c/t57sh8Gah8lNC+DkeCfjN2X+/83v4tfr34//4mf74vsFTmVxs9xr/6MpAdjfpzv5Z79DSexv/Yx5MD9TSexqc4VPU/wYi4vxZLSf23GM8FVZ2Bv8h/f4lxaX7LVzhOAX4xRrb+Pox79Mv8378TI7L/kIi873/7O/yzfq+IvMHIxT/Fj/XbMZL57/TX/M4fxvV8XlW/kubUP4YR1/9NEbnHnI+f8zUe+u/ByNevgX+Nr3AfduMfAP5usYq7v9UdmV+COXhfxlChv42v3ea9xJ7TfwO8wRyU/7Oq/qbda/45zNn8EPhTgP8lgFql3p8L/KUYIvVDwD+IOSoAfyuWIvuP/b3/IBBU9QL8auD3+HX83K9wbn8Dxmn7I8Dv9vP4Df6/fxIj9//nwB8A/nXMKS8fPcx6rn8j5ni9BP4X2DN7Gk/jaXzCkMd0i6fxNJ7G03ganzRE5DdihP6P6H39D2k4ovTrVPVbP/HFT+NpPI0f1nhCoJ7G03gaT+PHyRDTmPpFngL+Jgwh+60/1uf1NJ7Gj8fx5EA9jafxNJ7Gj58hWPrzJZbC+4OYhtPTeBpP40d5/IhSeCLy52M8hwj8P1T11/xondjTeBpP42k8jafxNJ7G/1DHH7UDJdbC4bsxhd8fwAiRv1xV/+uv+san8TSextN4Gk/jaTyNP8bHjySF96cB/62q/hGvHvoXsKqXp/E0nsbTeBpP42k8jR/X40cipPlNPFa0/QE+oUQ5nW40vXh3lWhLFwi5IkUZ34sQFQmKKlCEMAtSQQpIhTJA7dXeryBJ6VOmD4UuWJVu0UDWwFwiy5RABaq9X9R+1Yi7jh9F32QRQoZQIMx1e4mAiqBJkKqgUAZBCoSiyKKAIvtDqoIIGmQ9Zxzxa68rQ6QMdo1xVsJSH783+vv99dIQQz+W6PZagJoCyzOBQ+G2mxFRqgbOc48WgWL3FLVrpEKoINmPVZXlNqDB7hkC9aC8OF7oQyFS148XFBFQFV7lI3OJ1BrW02cJfkwImfVz7VnYddRo9yaOldoH9L3MMS4MIdNJ9uPDfTky5kTO0ZRy1K/Dn2tY/Lh1uz+ioAGWG+H22ZXOz18RZo28uR7XY/ll2dT0+RJnkLK7x+tzhUcyg7p9rkp7Trt/x+05qth7pWLPur0vhm2eRXtdWGyuLzfC6flIJ4WK2HmX7foJoN123WG0/2lkdxJ+nrvf46xItveVQ6AmP7diz8vmk91DaPcX6ovCTTdzitY3eNHIh9cTVLHnUny6xu1mSd6eyUeG/60OSjdk7rqJSCX4ixXhPg9MOVFLgCLbsdp11e3Y6vc/ZLcZx8qLw5UganN1OqJLQMruWQrbXJpZn/nb51ujEN5bOMaFTgriJ39fDkxL2taY+qGrrTPJ27y3daDrubc5QgANm32pndsXt5EfuW27Obheu3z874hsNqwTaoDqogpS2J6929sagfTWnF8Pbr+HeXumodhaoa3t+jG2sJ3Hx1zHdr5CHWwu1o51vazngD3XOMPyXAnRFm6d43o/42L3OyxKPgr1ZPYrSd3ZLLjUnikncptTfjJ7uyLF1lG8WzjETBfsmWeNzDUyLp1dWt1dV7E9REqzS7rZpXYpQdAAmoQabY3F2Z5D/2ImSqXUwDh1Np/Kbo63/aza3EDtOMuNoEPldpjopBL9q83RNsbacS0dc05oO+91LtpabedOgHyE02mkC3a8ooGlRsacqFNc12E7J8l2njXZ+ts/PymQRt9vitpc2c0NTWGbL2J2UMNu/mLzog5K6CpDym9dn9h5LQFp9620/Wc3P9tb3tqPH81VH2/GH3pfVT/Lx4wfiQP1cUq1H1nlIvKrgF8FkJ6/w7f/5X8LYbITfue7F/pXE2HMfNdfeYfeZY53E8ucyG96br4n0b9W+jdKd1HefFtk/Iy6IQD59jPf/rn3+bbbD/lsfw/ApfS8P9/yfQ/v8Ee+53PIFJFJ6B6EeBVqD8utUoe64W8CKopU4fCFyPF95fBSOby/EEpFm3PSB6bnkbgoYVHe/IRE9wCHl4Xhw8knSH20WDQI2kUzjGVzyEQVyZXzT7jhzbdEhlfK7RcW+g9He0FRCFBue/Ih0r1ZVqdDwzbpQ65Q62qYxs8d+YFfkLj541/xnd/0RxjCwpt85Pd+/ls5vznAQ0e8D6RROLwP6aKkqzK8KoSixEvmh37uieUOjl+yxfPwU2f+op/1+/n64TWnYJtm1UCQapu6Cv/fL/8MfvD+OW/OB0oJlCmSvtgTZwizcPyyEifbtOOkxNkm8fhOpCZ48UdGHr5poPsVX+Rnvvt5fuLxy3xz/wERpSD8jpc/g+969Tk+//4LyhRhCsSHSJwhXoTTl5TubMcOvtnEUcmnwA/93MCf9wt+P99y+JDbOLJo5Aend/ht3/UzWS4dLIFwac6e0J2FdIHbH6h050qc7Fk1B02yO5Ep+HNUZCkQZJ0rbR5oEMpNRxmiGcwo1CSErPQvZ3t+wPTOYI54Uebnifk2cHw/o0H40p+a+Fm/8A/yTcdXTDXxr3/Xz6C+6on3gTgJ+aToN4xIUOocOX73QLraJhgWu+fdGUL2zRvb7IaXmXTOaBTO33wgD4JGGN5UuvuCBmF6EclHccNna+/6y1/xp3799/Mn3X0fAC/zDf/sH/rZTOcepkh8MAe83hRbYxW6DxNxMke1O/vGkn0dFAtGXv+MhW/9477ML/i67+YujjyPF6JUxtrx7778yfyhDz7H6zcn9HUPBeIYCDPESejeYM/eHbW4QHepfPhTI+VPeOCX/uT/nFOYWTTyW//wz+Ty5RvSq0ichDrYOcRRCIs99zgpadTVmWmOwvQicvyrP89Pe/5Fvn54TScWuP2HL7+N737/c1zeHJBXHXG0eZDuhcOHZk/CooSspGtFsgdL7qTVPlC7QE3u/Apcvi4xvKkcPpiJl/zYsH+MI7LanEcOiTnnGm1zqkm4frZjvhPuv81sX/dGmN41m9i9DgwvhfGzyvLZxQ5SBVkCFJAq5qwXuPvDkXRV4mxzJk52XXGqhLn4huWOVDZ7uwYKbsP266X9fXnWMz2PPHxjpPZtA/VrSdC9gdvPF37ol8y8++JMLoFXX3hGGANUOH0hcHhfuflS5v0/oWP6mRd+2U//fbybznRSCFKpGvjPH76Z73r1Ob746o75vocckCykN2GdC2mE+Tl883d+P9/x4gt8rrsnSuX95Zbvv77Df/nFb2AcO8q5891diA+B/lVgeAWnL1XSpZKu5VGglY/RnJND4Ppe4PJ1wt33KdfPCd/xS/4Qd2ni5Xzk9/2330p43dG9sf0rThAn6M7NbmfiVJne7fjSnxLQn3jhO/+4P8w3HF7zPF14N55Xp6eNPzx+jv/yzTfyPS/f4eHNEc0BZrt3UoTulc2B45eVfIIP/6TCd/5Jf4hvOLzhLo7clwNfmu74L9//Bj74nneQWYijkK5CvMLpSzbXx3cDD9+qVA/uwmz78Lv/daF/nUnXQph8XnsguDw/EKeCLAWNgTpEampOtTlRr78t8eanZt75xtf81M98iSFmkhu2rIH/6oOv58tfekZ41dG/DnZOX7T7laZKvFa3hYosdXNw21ytdQ2oCfA7/pO/5yu2tvqROFA/wOM2Ad+MicY9Gqr664FfD3D4pm9RdG/odHMwkhJSJcZKDls4qSIWjXg0NHxok6h2MP0k5RAX+pBXQ9ZJsd9DQZKiWaGD2pthqp2inaL76ApscaNowh5YUovKa9iMXDIkoapFoqUXUlA0gnaOvBDM+WlDhNoFj+4sulQR875jsEjPvewahXKwRyLVPPAyxPX9FEVTsHvmnrqm7fxUIB8Ddagc+4VjnOmk2PdYCF2lRLv2Gu3zNFqEpUmoAtJH8/A73Yy0QCFQNTDVjoKsDtRBDKpIjgDWEqg5mDGiXbMbP7HPlMS6kZfBnmU+JkovnGIhhc3IRSlE1BCpWAih2nPIgkb1jQFKJ4hH1JobglPsWfbKMc6cwsxNmCgaeJ6uDIeFWgMlKFoE9eipdna85ShICWhQwuLPrCohNuNvN0ciSL8ZKKmK+rzRFMgng4LWqKuaI6FdoIohD+UY0EUJc7XPPkG6jaCbAWrzO8RizzHi90AJQZFQ0SSUg3+GgrjB7e8rYXFD0dBQWA0UCsO9GZN4rcS5UvpgqGhh3fTzKRBCJfgDjJgTnVJlSZVafL4Hhb5atF9sDZvzKYTZnLE42t9Cgeu7AlEZYuYQFk5h4hAWQzwDHOPCsV+49JmpT1CEWu06dXHEeDHHqTkq64bbGfIY/ZyHLnPpDfEUdUcTR2Eq5INv6mobiqhtCOpR9RAz4a1NyZa6eVnaAGNHT1QEFX3kQIsA0qJtf38xdKR2AY22NvJByEc30w25crvyaLwVNTd7gM+vGt2pdSfNbJk5h20e4ci8nScQdfXGtCiiYjYyKIKt6zhBdzWnCXBbZWhrKHU9Z2nIQtitnbgPOHbrJyvdQ+X4vlC7DbXVaPcjOFoeUzEEQix7ocmupxwg3wjLjaH7XW9zagiGGkYqRQLP0pVnw8jr4cB87tdn0ZCeFUU5KHfdyG2cuIsjnWRDXeMNfcosMVJSNRRKmj11uzaI28CwIo/iiJEGSJdKmIPZ5A5KBzdx5lm6kjUQh0LpI7WPSJNCdaQQFdLB9oM8CLWDlCopFDopHMRQ/PgWjHobJ551I4cu86AYwrXYV1jMGQrZPqcmga7yLNm1twD6mjpuh4n3e/N8NAulV6QIpbeTrJ3bpzbNw/ZVu0CpO2fffYEyWFAa2hzBkbapIllWJIqopFi5SfO6z9keJZy6hTQU8iGSJ5tjy01Ds+xkqiNSIYqvc0fD6s6Z/yjo+5HxI3Gg/mPgjxeRPw7rw/SXYiq2X3XU9onaYEw3uEkJUQlim8EacUTf0HyC9K+Uw8tKPgQmIIjapoISpdKFwhAyfSxIrBZ9VXfABvv82leDp/ewdLTPrJ1SOzMwtbPFiooh7sHhVn+ItXeYOQg17QxAeBuuNcMpBaRNADDDlnyiJdYo0d60Tagy2H0SMPSj843VnSlRXZ91TYJ2lRS2FMij4YacwOZ4ZHtf8LRBO5/2XVJlrolL7emksGikaDD0KYTVebXTFk/l+O/rc3SHx42SVLtv5SDb802s57xo5FwHDmF5PH/a8WG7j75RaoJSBfX7X/pA6e1578+xEAjYXEMc0hfbIFZnL9k5lWWL5iV6BB4ep/UsVeaLW9XnjJ9vJ+RjMORhMbSqBN/UumAp6yjkgyCdkALko5BvhHxhTcVlDSwaqSqP907d3QfwDdi+DNq36wmOeIRlh44mc6DLIaIRZFTCbE6erulj3KkqlkY/BrpYiaJ2Pvh5VbF0gO7SV4LvRALBAwfMwYkTpLGheqxGsYvl0UYXpBK0Pro+Qrvv6g65B2IRtOw3PzPqh7il9yuCiFMFYluban9PShUohx3C66gLVEPKeqEPheDXv3ietK67hKzBweqIrBuHoI5Kgge57WHu0OXai28yQul8TS7hqxp0ecuB0t3cMPskqzPVzkujbvMgmSPeUmc16fr8VO2e2zNtz9LucZyV7qEQp0oZArXfUmGInbOoP5BgtlPU59ee2rC+TdxBUNKklM2Hs3Q/W/DXdeZABVGkq5Y+VSiDmuN5EMqgHJPtD1UDiz1JswGi9CGT2l4huw2azQ7UTjnEzCnMDGGhl8wQFoaYiUGJsRJSRUtwW6fUpO6osu4lba4rZltaml6j7SWlswzJMVqK+KbOxGhBb40gu6DW0lNK8fvdrvWQCrHtiWLrtKgwa6KqrPY1SCWGzVGQbM5TmDe6hfoeJ31dA9C21xpQUSCZc61RLQh3W65BPTh2x3x11Nte4PdEg2XrVKkxUI62jlY3wWkPGu3ZqEDpQfrCIWWOcXa6h63xRaM9l1TISam9IkA++f1Xc2ZDMmevpe5D2RAoKS2A0g2J+grjj9qBUtUsIn898G9gt+g3qOp/9Ud7vKfxNJ7G03gaT+NpPI0/VsaPBIFCVf91rNfS1/b6htr0FklY5BMsPx6NEJhiIcdgkKyjTzUbslATSDFuQ/PEm0cdpa6cnCFkDnFBolq02qLT6lFXi2JVNug7WBi0eskNJgVWQjKs6bYW8a7Riqd1qLrC1O2aCQ4T1sd/MxRu+9ojCQb7O9FQIMyFcDEelHYR7SKc0hqxrUO24+RqKaKs0ZCh9v/2me3nNRIHreJwshoCc4DYVdLOw19qpDiB7MCGEImopTGCrlGqtvsf9tcqu7RlIyp71K1CrpGlJkoMa4TfnvV6qcFTkQXEo/SSxUnZjoJEiwLxSLsgjLVj1I7X5Uj2dKMWQfLuy5/Lo/u0Ppj2fUub+sXbe9TSNY/e4pFmvBbCUoDOovXYUjntvRvKaSgAhMWONJfE7PCttvRcbSlSdeTPorn24Y0Ma+ktJY1lzfmrCPUQV0QgZE+tz3Wbi35uUjxFowZpJY9cF40s/r1WQUvwlM3uPu3uQTunkDc+1lpQIB/NSoEhO4smFg2UGqgN3WzHl936jjhC7O/1+5hitXlUPRVdA1qFFSjeIQ74nIx+6JXwzTZ/g2xhaUsLJkel7E2sNqbN6+1+Pr637XMFWefUNse274/I1LtL35Beeet/ur1+tV/7CfvWdTe08CsNf42tZ2ydqaViu/uMlErt+g0NLts6UATShi49skX+XZE169Ds5Z5gr2z2oqHZIXjGYmdzqLKi560AIog66mRGp7rtWjSS1Qtfdvel2cIGWtfOUsttFDU6w1Kj3YY9c39vU5tdXW3etjj3BSWr/X+cETb01e/3assw5KQhW7qjYdAyOLvFV/zcDL02RHDRSK6RUneFK3tkMnimJjUgcbOfwc+/ENaU9Zq2jFsGB93mvfprpK3T3V5nqLRuz7/dg5ZW8zVU4uP7JX6tdp+2nzsKMbT75j5EsnMqvWWU6mIXHTzL40vP5mrVDRRGPpJNenv8iByoH/YQyEcljkKUxiESJAqSKl1XOHY2UadDTzl6+syNUBl8oqinFWJdb16Q6lyZhU56ktj/SrsBsnOKlC0NtOa+tk2pGRyp6pU6uhrTzalyGLoZHycHPjKA/i/Dob3qKkAlrGTCtbLCN+1HULxXxIhiTtNBvQoheqXWW9fQfnZnb6odQSpT2T1md2z2jlQ775ZWqwnqUFnuhHxUbk/TCpNuKYvNcRvCwl03Wdo0VEIUys4ofMT47+2NtvttG2qplmYBOMjMTbCKlJs0cZNm+mFhBooENAdDWNWg47AAy/as1wqOZByqgyyWEqxwCjMpVkJUavVNoRkSv7R2jL1DYufZKkjafWyOuGx2uF1TaWmOTHp5RaYZWY6EU085RkuXRdZqreBVce2zjaQuzNXue8Arj8Sf9W7Ts9SUzZkwQ3cP/b1yeFXoXs/mvDk/T4KdtyahlkD3UOhfTchUKLe9zdMSiHMljkq6nywNXXsznD46KQySSakyx4qW+HgzcELQun6y8WbSjjezn8MtJd/JnteY6cTS0jHWlZj+KAWvrNU2aVTiXMlDRKOS4pa+jVRStEW3zo82GYNs3CX1KsXCyoEC+4xL7jnnwY4VLGg7xEyfykcMrnzM2ty+/Jhuwe3ZK6ETauNJtqAryqNjNp7Vo+PXzQFrjrG9Xo3fZ5Nkc1xbqmKfptg5Ne3gsp/X7QREHm38suh6Pevz3p3b6jzJx9yTjxmSlf6+UOZA6cWCuW7bZDXYfA/olpL1c1432uZIqwVOQ1iovkdEKs/Tlds0Mfie0xxEc8QVaVzHpCSpdCHT+7zsxCq/Y1BCqJtTvL+0t85j5bo10nIQI93Pti4kG6cvePVcEPX5rptDtnem98f28297Ybezd1WDcQnF5v8pTtx1I4eUCUmpWtFeqG4PrAp9c3ZE7B4uNRGD0oXMbZw4pZnQVQtCPf0r/r42TWxP9cnqAd3HBVjrvKlvzQ8vrIm409nWgb+mPYtG9Vg0rjQgiWpAjFpqtM5bsL46u/CoEnC/H9nk+fj52can60DhXmoC2VVXgKEcQ7dw208EUS59Jnf2OlFDoTQp+UaYn0VKJ8RYVwJ552Rj/AYCFlm0kudiG5SKkfpWAdHVAFv0IlmssmtWwuRs/WI3OHSB5aaRJB8/6Fb5JVUfk8iBLW9tRMNQyupANSmBdYNZZQzsbVIiNQXO3zggOmwRLUbsTddKnGxjFPXIfhHGJXEuPQG1EvDFSjuZg8lDeOlycIe0VdCY4TNWbTko9VgZOiNNXkrPpfaGEGlYS7mHsJCk2hop0T7HS0jDYtUsYdG1pFSKk3KDcwBEDKGZ4bJ0POSehzQwas+BhYNknscrh7TYPWmTX1lLVVfnt25OyCqdABycVxOonMLE83Rh6DLXUM1wOKEWBRm3cw6tfHit1GB1nhrRvznP7EvBvVovVuOvyFyRaicji1efNEcggJS0VXw1vm0jRo/CZem5lo5jXKg+T0PGiJUoIRabZo6mxdGcp+G+kC4FTYHSh5XbZPNRV4fQqkQVKcXmaYGQKjVFJFer4OqicbpK5Fo6ptrRxUIXsj0Tl4MQt0cNKSM7QdXPOU1KHOvmQAlrmX5qa1l0RXeibEjDGvUiW5myE9Mbobk7F5vLzyJS2vn21LAYGqF2n8Jic6S06Nlh5to5ihEtSrVgawt4HuaB1/2BqkIKhWNcyBqMf1eCzfmMFQi0OeXOcZMqEWWVkNCg7kBXmmRJm2Ma8AKYzRGtzqksg2z8PJ87bX09dlLc4RF3xHYBgc0D1nnXuCwafQNS0Lc3EffeygHGdwIaB/rXGU07W7tUrxDePn91qovyCG5c6WMC0TZyQUkPC+lsHNn5eYJTeMRPA7MFDRFto22CwW1+LoGHMhhH1rlBiAVRN2mmbw62guw9khYg8Rh1PMjC83jhLo1e2LLxxfbXtQ+6W9XXXh4BzNFPVyVdg1f7bkFEpK4SCZvtZJVIkKLObdyqT2FzKjrJRJQFQ3GLozwHcamYtKFqhujbjzXJo8AazDGZNLGUaMUdHtCsz6E0DpWvxUXJpUWxvrcWswFxqdt5t0pUt6VhicaLTXYyca62XrLS5ArCAqpCDJVjXIybtequsFbkad1JvdTds3CnzGyHFwYtmy2U3AwYnzg+VQfKSoUBMSIYzcAAqSscuswhZooGUirMSQlhF70EGD+jlD6AQCe6VhhUFy4qCFNNjCVRs1g12LIR5GrCnKq3PFn7AN+wJiuHT1crp1wr4trm445KM7all/XGy1I2Fv9uaBeN0B6EMGVLpagi5fDYEfMJRVEzJr0wviNML8KjlKEUOH5JOLwUhqyk62zEy7EjTIlx7vhgugHgvPSMlx69JLo3EVlsw1iemfMSr1blEKeKlEo695SD3TfJwnns+aHxGX3IjKWz1KAGDmnh2nVMNXHOPZepp9x3lgYrVtaaLpCutpkHJ2SHxUq5y2AbjQb7/HStvLwOvD/erpvmpet5Nz4QpVJVmMaOMkd0CsSzOYNxEtLVZAuSl5+vTqFrvIy1474e6KRwEyY6KRaZVEOyWvquldrGEfoH9WoxM3TrYtuVaOOFEMBaoi25bjIGKSCHyPxOz/KsWzcyqUo6Z0LeyWREgYORyDW2tFulOwdeXw98MNxwmybylAjXQDrb/VvuNmK5urFFIJ9AY2S6M6mIfUpBFA4fVvr7ika4fiaRjyfSWG0ujMUImwchnzrG9zpqgvlOKHPHy+nEy+5E0UCUyrJEdIzIZCXgNUFJwTb+bHMhzDZvV9TCnUxRc340+31wxAAsYm7k+aUG0wHb6ZmZnhar7ECczQmph83BuM4dH84n2zRiZlo6ZIyki9kFI9JXK5jIVkW03Iof22H/FFay+3nqed0dGUtHH0yH7mEZGOcOHSPpCmES9M5tU2f3vjlNafSAp26OjXiFbR1sDm32BeabQIpb6tMq8wLz7d4Z2qVE8y64023TaJW+Ni9YtbnCAmEKlFbF1ivEnXlu/qOjCeoBw/SuMj+Hyxg4frlbZURaKjgsZQ0+95p4moIfc9ukpFVkR5dz6JpdWuxeuo7achLozTGqNZjuX0tFKaCtytNT0lNgmRMfzifmmkhSGFPHISzrvCoq6ByQtlfMEGe3TYIjwIlLGTiHmVOY6CQ7uiNeQGFrT3WbkyaD4PNyci0vRzcMSBDSw0J3k4hTsNctTmFQo0nkHCGbjYuTIAtrsJBGR3LnSpzt3Nd7gWU6zrXnvh75MN9QEQK6nneuJjfDIsgcLLhfnTNWqRFVuJZ+Pe4QMlNNvJkP1DEho9mieHW7+cYcu8WJ28pOW9ED0zgVmyNzMQfKgzQNPZfPRpoOYXc2x/LwwUIcMzIV4nS0c/UgqwVaVcNaKFLd7oTZnLZ2r9J1u2dSMMmErKu9lqJGKWKHQn2V8ak6UCFDuppuTSvxrH2AGUKovmlWrzTystSWoxTPy37DzOJl5+/FSlFh0chYO6JULrXnTT5wvxzQOa6bYpjcgFeovVp0uB8RmuDjipi486RByHc904vE+RsC6awMb8y4Te+AhsDwMtLdL4RxMU2guhkNVMmfvWN+YVohw5QJ0wK5gNzZZtObUSRXRwGUeuxYbgLXzwnT54pHqh7lFyHkQFgC6RLoXlZkKcRrJV2E68PAl463thktCe470n1g+NAqe5Y7pZzMwahvhDhV4mVGlkJ3fwMaPAoJXO8PfP72OTGYuFtViwX6WLj0hox8MN5wnTrC1Z2RakKp3b3SPyiHl2UVpWvcgnzYZBziYpHY+DDwxeGWpUamknjVn/hsf2LRyMMykB86ZAprtBNnRxJm1/kYlXSpW5ojAlV4mU9MNRFEVz2g89hTzgmZA+nBnbERjl/eaWN5tBTHbJv9TndLQ3gE/2oI5pgXNbRJFUhoEl79xG7duMIM3YPy7HuF7n5BqrqDI8y9MD83LoGViSvdg/L6fODL/Q3XvkOvie5B6O9tnk7vsGVPXMQxH2F61x0mryRaNxhHHGrXsHbh/lsFlUicIzc/qBw/MH2o8Z3A9Nx0ggwpAh073r/ecEoz195QzuncEx9Mlyo9WCpg8bkaCnQPrNVGl88F5hvh+GEgXasZ0gLkQNbIpQ4EqVxc6XHRyOvlwGXqWcaEjHEtu24Cg2DHz0NgeiaUwZwgFWUaO7483jLEzCnNjGNHfAimrTMbklIONk/CbPZmubHnYbZAiK53pAIP5wMhVA5dpguVIWbOS8802vzv7k1qpRwN0ck3tvHHSUlViefFUxONZAN6SMx3HeO7iTRW4sVQheKVvlK9ek1hvg3M/kwapWE/Wok7NKfNuXQtZak4582eT7pC/8aQOgDt/LQcvdcqK4JAkdUmL1+3EA+ZJQeW24HjlwPDh7Z2UCVcl229ANon6u3A9I5xpbqHTJhsjclsO7V2keVZovQm0BgvsopFtjR/Q3bmOXJZOrNJszkaUmwjTxe3BSOMY+L98ZaHuJBC5dr1DI5YvJyPvLkekGv0zRZzrGdHOqLpjb2czAG/1N63C9MnG+eOZU6bkGcVwhTsHM5Kd/YswXWni+XagiGAzNlszGTIbJjg1XIkSOU+H8hzJExhPac4Y68d7T53ZwvG4xwJS6CUsFaHnuvApfa8zie+PN9SNdAFk/n50nTHa79umS2Q2ASExR00pRygzpEPphNwWuVr5pr40sMt4RyJo9C/EtLZnJPDK6vYzacWoBgNQRzxjWMlXjNhzOtzpyoMiXwMPHyzVfGFvAWzoSjDB0oaM2lUdA5MOfGQB0oMq9bVVBPX3DHP7thdPCh+Y/tQd1H6Nxa4SrZgmFod+FCbq7khkm8hpR8zPt0Uni90Sw0ZkrQSzjwSGEvHZelYlk1FfCMTKncvLtwdJq6LnfpcE7kGppAIqjzkgXMeuCzdljpYbEKEBQoQ8o7Y2Y6NOScNztYI9Wgk7TIEzt/QMX5GuH5OGV6aYV1ulXJU8kk4fOiQ45sJckFyWQ0HMTJ+tufyngs/Tj3hshDyvJaulgPMN4J2wR0w24BLZ+WprTR/5Tg1h7LDOQgFmc1LT5eB6Zx4fTqiCnlJtmE8COkBuIUMawokZCVMmXBdYMnESTeoX4T6quODuxMxVtSfFUAMlevScV56Xl0P5Gx6XgiQ7T6beKYtGsCI4Tf2NT23ja6hryEreoncHw7UapHlQx44l56bOHNZenumeVPaLsOGs4fS0MNK7YT5eWK5FQjK/XLgGjqqBm7SRK6R8WEgPJiYYv9aHMkwUcB0qQwfjJ4aUWQqSK1oCOTPHFlOCQKkSyGeF+J5hk6oIaB9NCQxV2oXmZ5Hzt+slNtq5NdLoH8TOLyKxuWbqyOZZnTy0Z61VAgO8edL4s1wYM4mVJnOFunFWblMgaUpwGcjimqC+d1qz6MzHgXFENk6GZqy3Aj5YI7y9JmKduqieIG42GY6vRCmzyjLOxaOh2tAp8T9OPDl7pZL7glSkftEdx/WCNT0e2TdwOMV8q2tmeXO0pLlEOkeLPIuLruQa+BS+1UqA2CqiQ/GGy6XAT0n0kNY0yBSDUHKN7JKNiy3siogi0KZI6+uR4aUmbpEuSaOZ7H7N8H83NJETUxTigVZ+WQSH4ZE2MGkQr4mLt3AnI1Q36XCde6o10Q3bsedXrjxbaky9ZTEZX5sI0plfuezXN9LjO8Ebn7IUhtp3EjotmG6M38TCHmXHmnpt11hCOialpayIb0qFsTaWrFnk65eDq+yOtsyi+m5tRRS3hzWSoCgpOcLtzcjuQbuL4llTMSrSZNoF8x5WrJdY99RbnrG93rOX2/Oxs0XYXilMGXkOttG1nfUTnxuJtJNtFSvsHKgWoqzLJHr3Jk9mgNh9mc44Yh0JV0ickl8eD3RRyMYN+QQ4P3rLdfLQLr6Br8Ymm0OlBG300X48tUQnNtk6PVdHJlqYs6RvERwmQmpTgHZnUOcLGUvigXItRK7SDlE5s+cyDfRA3dD6S+5YwgD98tAnSPROY1xdLR1sXUfZyWMhTBn4tQjGUrxIKQMTLXj/eWW++XAB9OGQKVQ+GC8sUBg9D2ybOupBaTdtZIPEebA+9fbFa0D46rePxwJo4tonl2A96KWzg1CHKOjnGYHQmmCvMWcp+uMzIvv7QEwLcB8W116xfafMsBlNDHqvuntFWEukTf5uPKeqgrX0vMw95RLoruYIxtn6C7QXUxyI52XNRiWOUOpSJunquZA7fburzY+dRK56ZuwVsNZWsE25TknHmTgYRyYLqbmG8dtE24VSWBBUC62ybaKsKbvUffE0rpB1Y1DsI5HP2/pRKtCEJaToQfzrSnFTu8o+XkhzJH+AOW2wrOF5RS5fF2HaGJ4P5pqLwU8laeHgftvTIzvmQpzuibSuUem2TR/Ti5AdrBUn3lnBmumyfLj1SeO7FsG+DVZ5GCTIEzZFu7VoGutUJfIMNkCjLNSPBqINAVZc6BknFnLEcQjHYFyL4yXnpjcgfL7FUIll8CcI+PcmW27zRaxLoEaw4oetlu8nALju4HrZ4V8Yzn/7uw6WqrES2Q5dpyxVM516ZhL5L3D2eDppNTgomq7liONRxNng2JrF7l+JjC9ALrKtZhKXUVMU6lG9BrpLnYPuod2HzdHNUx5VR0HezblduDVtx+Yn9vG1r9KnN5PnD4P5ZRYbhK1F0snPizkm8T1vUD+zEwYitUbSGIG5lshTmYYytBQSFsfxt9QQrGNVKbIeO2p1YxC59FtnBQ0rG1K2pyvPeixrJWouvIRGkLBmtKrCerByapiSGg+WGopnyzg2adxdIqMU8fr7sDiKFZ6CHQP5igNr5U8mB4MwqqGvgR75noqLItQ+2jQvzsteFXVpfQWSdae4lWZD9NAHhPxElYkyM7fuBsz4kJ7mDJ7sjSaIWaGNs4pMhdXsb9uKdpzjoSJFc0Uo4CZI+F8x7AYDzPMwGxrKy/RKodTYZ46ZA6WtrmawxMW3zhm1hRzmArMixnsXMBRqOndjvGdwHLr03mxZ9sKX47vz4Sm0CxHQklocHVmHGVcRXHtvtTeyd/tWfu6bimmlqqJ08YPIUOoQkgbArU6UNkDLicHlRJYilf4pkrpzXHOgwkA70c99czvWHrm+jlzTPv7YA500XUD0xgpfWC5sRRsmNXSW5NpftW02fXWhqTZe/XK6rUAqLqu2SScp545FlI0FL2LhaVEPjyfKA+JmLcNvs3X1oYlTMLry9GeU5d40V9XSkGtlvoy29Hul6fAnGuzpmtXjqG6ox+5fDZS+h0hX1g1xapa8FfThrqtXDf/OeQNPWlIbMCqDqfS8/50y8My8Go6rjpQuRolYLl0pCJbMFLEyewtrWtOmkyBh6mn1OC1D0LOkXxNJL/voWyBcphNRVy8IvojXKKG9JRiDrbrw6nbJcmyVU73poE1vWsBV3oeyMctdV3VqsyD2jVXdSQ1h619UuMFlh3NZimrgy9l5+jXiuZiJ6MK6au7SJ+qA6Vii5ukkBxCAJs0VZizlYVOo6Up4mi5y3j1CT2b09T6F7WF08S92s8p2EbVyknX6ogWpbH7WxutpMqdvL3YV+mFcoRyqtDbItcg6FBNzborzM87ugeh9okQvbTfIcB6O3D9OphfmLDn9CxwPCZiiiaAdqjEa+NX6QolynWme6h091aJskfMHlWGVTVnrdaVoB0WYZk95JzD6oiG3BajQFY3GDapWDIEWQ1VuijMxl+Z57hu0M0jsntrz7CUQOoKMRm3qJRA7dMq6lgORmBebgLTO8L42YoeiqVj1EQvbbMRyhTJUbliz3sp5iAUFeRQrEQag9xNAK5tUiYVAEa8nd6B+bmRYV/NZkBUhbmbyBqJlx00PloUXjthOQavSAyIFnuGh0Q5WQR9/20wPzeHo/8wUrtIHAfm54npmSEi3Vk5BDOS43tKd7Ns6F1fqVXIp0S+WCqwDIZAVe/3KNMuWMiKzIE6RWbgcDWnN11MMZyQSF1hmdNK3gc2GQ/YeEO7PnKt/5kmDKVSoIn69fZ889HbHrVNQkGmwDIlSyeqUKpB7emilrY9V6QGZleEb721NJhT191NlByZo7I8C4Qx0L8RcMX1h2Io8rU5vDVwnbu1ACIsYuXSzqWqQU1cr2XEeqUR9CVDGAPz1JFzIJdAvAbS1eRQ4rUiGgl52zxWWyW4IW+l4rYAZRHKHBCBEgIlR8podAHU0+GT3YPo6drDq0J/vxAvCzItkDNaq6V0DgPji0i2zPm68Yaszt2odO9fzPEqlUOuxgOhp3SyBqU1YUr0vX03Z3wLVFf0xjfIVnARstmp5qyFbHNg7Q9Xt2colbXX3zJGptit9mh1ADpzfjQFpFrRTTl2zLeR5dYQVnuG+6oqXYsxSmc8wOXO0bMF+jd27o+I80GJwT2WZGkxreKq60KrOAxZjKOnQq4W9MWgXOeOy8OAjJFWgSc7aYlWpRkKXMeO4BSGD+cTfchUF4HUspH8W3XzR8ZO6VqjOQPjO5E3P9FSk/1rWYU3g1fgpVCJfaUcCzUGo+22/an97M+syRmkWBm8Iu1Se17NR85Lz3nuKe5wLiVyvQywWPXcvodl2H8tlbhEwgLXqTdeEea45jkio2UcitieIcXS8ZLrygttQ/Fgx5/L9g/1r4AsmTg7YND6IGL30wI5mJ959ija/Ulh2/ubOPLaEaDt9036ogm3AsjHnMd6PnWbjy3N/hXGxz3qp/E0nsbTeBpP42k8jafxVcanm8ILxlnRoRAGExQELGopHh3uSlIb/BZayaE7icbFE1I0OflWtgl4CWiliwVc3r/1Har+WW/rH7Vz0N3f0cbUF7rUSOhCWUmMFk2UHKlFiPvCux2xmBTJzwZLK/SVOhhHoPQBQljTKK2SKEx5g/jVKgHjZBGyRuNpqUdLTWhUk6BdWr3lVvq+TIYOiZdVm2SAR6v9LoUanRDtVWRNaqIMsvaqI7jWSivpVkECxKj0qdCnQtfQpyosJbIMJ4sYKyyzE1R3UW4JoEM1uN9TcNZzTtAcKGIid6UE3veI0criAzIFaxQ5Wnrk8LrQnTNxzNYayHlAmhTNgfcvN1Z1p3DuegNbvGqkIXMtylkyiAo6RLSUNYLON4nl5L2eks3j5bkwjYH5w8T1M8L0jnMEeouc51thfq70sZKXRC2CRJsL+WQ8pBr9Pqe2Piq1BkcCDbmT7GnR4KKX89a0tY1arJomjjtw1yf4qpET8B5kSjlZu4UyCGEwZI9kCMHaYuemwKFADo4uuJDfEpjntKKNnaeIG1enVauKr+O4OHHPA78QFD0WGCrlEMhLArWK0fcn41Zds9mHrIF59gq/yZEi13JpKTwNdQtZo/dry9vry+RNSWsgTJZijV4d1ZCGZgfW7+vPnj6KEIOlGDQHFwgUSlFYWpWep46alMZiaGT/ZiG+mY3rk/OaJpCU0OPgoqmOpF4qYc6E3BOyE4UfLis3I9RKp8pwiIb0RO+F5j08rZ+jnW/jiBXZtOyi83s2eYXNFm4VZFvqRRyO8Sm0pgXJgbzETRdsl6o37lbw1i9mE0NRR3ubhISXjDsPrNnMhsSUg/1uUh2ejk1GLdDAup5z9aq+ZHZ3TWd6JZdks9MAwbWEclVDpZZAqE7UjwDqbVZkRc9rhDImxmAUhjfzgWNcNsHKRqVgu377LuxFlU1PUNEQqUMwNO652+w5ujCmr2W1dFQIlToUNHU0Qc7Wz3I9fhSWU6D2rhsFXIpplZ2XnuvSMeVNTLkUF9yNSj2YdlxY4OMI01biH5jHtD5jzcEqbrPdIzqlDjs08SMH2d2XtxG6xjVaMiJCXIxITxXfp12vLarREqRxox7nBSvbZ5uwqs0Vr0NZq5BrEtt3gx1DQkDVkcePq7r7hEq8T10HKt9U0imTuuI3yPvbLIFFEjeHmcNxJi9xS5W1jT6wbtZgPx/jslZUtBzoejPb/G4PrRHS6+7+775vmipm8NODQaGhdPSve8pRKC/auQhUIV+sKqi7N5JayDtjUCsEK8sNBeocVpi3QZyNh5BGM7Th4sS6Yjliq1Kh+SzmEDgsWTslTsL0EOmfHwhdhFY5NLFyQtjxpvJJmO+UfOcpm2rl0OX5wcjoU7ZjDzB1TvgflNhXg7AJqxMVQqXrMjfDzJCsI/Z56Vei4dRt5NTGLYmL0r+y9NhYrdFvI0vHpVql5CKow/sSLL2bi2/YZ+PBdA+B45eU7mypmMMHC+nB0iNBE+lSSJdIOQp1DpzHfk2hNd7Gxq/TtUKwdBB989HYnFzf9VWJC/SvhZyDyQd4aioPQj4Jy+1m3NIozHeC3i5bX2axno+aKsttZ5V/nTlvRlxW5FAMPe6jpT7jW/aibXSlrnomeYlGOJ2ENBoXRWIl9QURZRGoIRhsL5Z+zBHmO2sefTjOLIsRZfJdYrmJ5picCt1xoSxx7fXFaLyPWqy3WCmB3snW0RXXaydIE9VUvAQfKGLnujh3pO3KAEvgYRr4INxwzR1jTtsmOadVYyZdrQiCU4PorUH4uogDENQyUBJ2QZGlW1pJ9Vq2jK+rJLgPtHGGPPWlyVObuQUQbOe/T6vrLsWkmGzBVYlvZsJ5RKbZUndNXClGSNEI4/6cWzf6JpcSzws6TmuAJKqEEOjuD0iOzo8M1NzUqm2Trb03PjczZLZsMXmAOHrglndyH26PWrXyoyF7tWgPoDzQUYAsqyOxcZA2Um66nxh6s+cEi2a7s1VjyZR3pN3gqUhWJXHxNKQUC4Q7zEEELNCowT7fN8U1vRVsfcZZmIsgQVYduS4W4sECwqUItcmY+HHFJTWsX6kFyy2Ym0piKlbRW2vL9eG7u67PPjTOTdWPlMSra0TILGi/NdyVCllNZy/XaM5O/miiqBU1qFcD71Nek0uAnHPPXCJLsXskLg1zOFiPvSlW8hypU6RUW+9xsoC3dlCHsN7DmsOjPpRERbvq19o4nFD74DIV8hGbtX5vfCQv0FHfKyUE47zNeLC2e3tLxQXQYve67TNfbWjw9HSE6mk8jeJBQYAm+xBNTuGTqu7eHp++kGavpK6YeutqaBS9JrJvcEMqzIeF2g9eFeIchKTcDDO3/cQl9ARRjnHZGs46mRYsz9s0oN5yVv21/tnNUIisDlZrfRGueZ343bljmsQajsaNP8ISnEDr+kPNaBQjo8mSiZdMerDyVyNvNlG16khCEwJUJ7Nt+dimhq4RtGtEUd8wAixTZL4T5mcdnWBVeJOSvLqiGcmW355vIN8qPFsQUbL0zM8j84uOLgnp1bg6a/WklN5LpbNQsYgNL2te3OiuQodJXZnZfn9wjoomW1xhgf6qW7PKYJo0puNUCHOhO3tFlURba7VCb87yNSjpTaQ7y1qF1j9UuvtCejURZiPmSlG6N5HhVWck/VvnZFVBa6AU+35oithz066JxI5VDiHMxatmICyFdLFzPnwg5KsLOWoj4Roy0wjcdVCrJDpCGApdl+3zFbrOicPDca0WK4M5qnos1phU4lrM0HgrUsT5Fs5dmYpVB2ZzoFaR1Gr3fTgu3Byse/qDwBIiNRgiJz6H8o2pdd/0y1pluQwddYiGZnSVvi8sAlormQQeCJTs9zQH4y5MSryaHIZGIV2MA7cXNEUx52mKm4MPqyjouCReypFxSUzTZkF1tArWNBo3zxpvKzqYMypDRRoMLCBBKSGaFpx/biMd2+a/aRUFd4IbabxVeGpU8E1bj/6ZjVfZHKhmSxwNayKOba3HyYm1lwm5TuhsCFRb4zovyDjTXRoHDy++cGPfVEn37yFbwci4kIDqTmCIjgiItb4qg61jEXZOhRcnOEIWG/K9sFb2pnYNe+ewmcnmKDgqILG6YGpY0caVK+XOk4wzcp0YckXqDXkwWCCOdv8beZcQrFDjaOsmPyu2YQtklyiwkzAZDPHKK8A5fkCQTfxW1RENMZkBAa26SuYc+mUlgpfZAsNSTVpn5Qo6cR5HXXMIjDkx10gfCupize311L0+2aYZt2+iTDVxyHRR0oNlJOJkwUfIMJfIXO0z6mRco00I0pHDncq81LoWWZmMgTlfD3ngOnfMObE4UphSJYaFPmVqL4RYKV0lJ6XMgTwbh0yyWHDlpH12iOs6PDjC148pxgcPPFnvQ5tHIct2Ddk4t7r43h0i5LwGhOqOa6sybxkXDTvnFiPcV+fHgqHV9o82F+25tGIZy0Y5cufk9UcZo1WBXC3IqR91XvfjU5cxkCzMk8H1zy6bIrHMVlVymTqD99VQgNLhxDvIp8rX37zhnf7Ky/nIwzJYV2x3oKqa4uwKrbbFvFNpBbfZbztVa+TEytYPrlERRAylUEFSg6/EiLe+qBvC8piYVtF5Jt1PdOcTmrwsevHWELlYyuMceKRu7QZTSwWHkvNJqYe6yQS0snRPy1glhRLOE/2bI/OdISwq5jjFq208y41tAjFVYipMQ6L2kTIYuVayVX3l0YiRku1Sl2CbWVsEliILLFPg9ZSYbhPPTiN3w8QQMymkPd98TduFYvtOnE0nSsXF25aCzJnuXslH1zLSSO2FUoTyPBBCXdMcVs7rZcJjJozzWlGhpRDvhTieVlmKlj7TIohEtJrIZ3dRF49UupOxM7uz0j8UZFoMDYzBBNyKbaJxSrbRXFn1rlqRQ5g91boTetQc1ojX5qkhaqG052ZOpAYM2Rkj8RzorrY+QhTSJZrAIG1TVsLDbFU9zXB7R/UalXyEFzdXbvvZBEg9hZHB2lSIpbVsntpDiqKW5m4VLGKOshWjFJOpcK0kDYJOcUUf4mSpp+4hEx4mUoX+oTeR2Zaa6UCOhdDV1YkFi2BbWijnyLmKVRw6uRcBGb0QYjSJgHwD5VjhUJFULShzB2pNVWgyA1oxxx9QUde4qVbOPGdDFxzxaHSBcqw8EnsUKIuQ1p6XFqDYojBnDq8KXKkHa8l5RebFnKfraGlhWNe5eLVqGQwhXu4icexZbm0jj9NAHyKUeQuuakWmQhCD52OQNXUdkrjsiaxp/nyrqx5Pdx9WtegWpIVsCFwe1Np4tJ1B7J41T3dNT0VFjpn+aMr4yxK24pyWMYjR51BFL1dEhHTsiEu/bmZ1SMjQ2zoLgXI7MD2H+d1C/+5I8mxDzpFaghHB54F8Eyz1JEpLYjci9z49GGcj4svZUM8alZIDS1fI1VDtMjd4Th1tdMTLC1TiKObYVWHJgVfdkUPKvDNctm1EtyDB5GHMeWsIpzkbNsklVxe/VEIJqKc2w2JIl/Xas2wKS0Cmzebuq2ibnINk05qSYk5SrpH7PPBqPPJwGYxmshiCtKRKKYGS7SumapWkh2xp3yGQa0ecTLvMUryCXBtE5nPfP187Zd8GrVU/NkFR8T1qTXOuX77PtbVQClpN/qV2mDzQUb06GFZkGag5QqqriHZRWSsXS7tvKqsTBawFDhvS56igF1+1jI+WlkGyAg++uorBp69E3r0KLNqDwvCy0L+aCXNG8h2azXBqFctNJ6UOeNWCwrPM5w4P3MSJqVpDxNajLVIpEjjFeZOpj7oa5keQdPv97QhrnQT+zL1Eux6Tq/8a5Jsmq4KSqMhQKJct1fiI3e8PQy6TOSI+8VurAyl11QwB0znRweQASBEB5mcd8wuh3OWtUsoRoHgxBdj+TaV7M5NeX5GHK939Ld05ks5hVRzuzkp/rtZ/8CIsl2SVb5NVJPWvM93rETlfXTnczrVF3WCpkK1tim0aZbEo/6ogotZfCWUpcVUID7MJmMXZy3dPpvNSvdKrRY6SK8O9eqWNQ8edpSjunw3GnSk7/sQeCm5aHsWqqlAXgRts8ZZLWiNFFeOSdWfrt5XOVtLa3RpkZrIOPmG6ZNWEp47aB5bbxPxcWG6MLxVHiwLXBrnFpSFcRFSqcLk2/oUd83odmMeO4xuhuzdnyKq3BMFa9/SvA8Ormf7lRO0j3UNnG1wNxNF4MeHhYg524zu5wdcolKPS+eeVXRTV+uWhAqM9ew1wGQ0VUIV4H+nOhhpOl8jU9YRYjEfi4nSlBru3/rlN1C/dT8g4EYDucrPJboit4f4003WFUXpKZ1G/VtMNAliWSM0deknIFFbDGabgTpoJDk4qkBRJ1bh5ainFqrIeU86RdG+BQPb1ruICrw+V8Pri5wb1VNGuUqpYWuV2V47XeGRTpC42r6u3bAGPbo8W3BiXyeZkTS21G6l3R9tDq8L9vRlq50ARI9OzwPKscQ7tfsw34qmqjuH2hN5XdJo9Ui/rfUVkQ8eSBZ2lF5bbJpir6LuzIZtFmK4HpJgQq0X7slYflpMy6+YobbZx40Ah1mz85tnI86PpQH1pTmgyO1i9DQ47TozmjCwmIFp6IR9AJZm2j0BXCtpF5ucdyzOFu8ztaSSI7WNzsHRZKYElDB4EGu9SRDnHLQ22prg9UA2TybrU6py5LCwpkqdkyOY5rJxQKsRLcM6oyZvE0eyfZKEcAtc08KpfuOnmRw7cWhndbPzcKu8eR+vqtI4yCPmg9tmt3D9AlEofirWYSdVa5LTj1v3nuBOyZOOSCQxdpiK8no68vBxZLj0sYkgqoCEyXpOpj2dhuSkGCsDaDHwNeotzGi+GLK2ZHK9sRGF57ppNC5tI5WWhHpKdm6dUVx9c/LxrfYSqAl4BHJifqx33UJCuBTLbddc+EIfCTTdzjLNLM0TvARg5+N6/OkyNKhS2Y9gFqyFRK5K8R1kCvNUi6OPGJzpQIvItwD8NfL3dOn69qv5jIvIu8C8C3wZ8D/CXqOrLr3osNcVbs+DQ3U+k11cvn78zQzZbKkJm8ZsF2hvZLfbFenCVxJv5yDV3XKr1Zzu4BQhYs8hjWpBjMdRhjujI6jWbkyR727jm9c1pgDIElncO1C6QT4HxM0K+q9aq4UGsBcYc6I4L812m9p0ZsUcPqK4TpAxmnGTByc2CJtvo19YbRyG/c7LUkZhOzsM3Jsb3FAZfprNDxllID4H+tZVIp5cX5P4C3svMykE9qs7GE0qXSt8J3X2kDMmaZPoxhi/cIy/foMvi6Q1DWBBLGy63sjpQq1ef2swOlBRZju5wIkwlGjnbc9rpasYsnwLzM2F610rkNQAPQu0iIQW6+8IhYS0NLk5kHoTLi4F0zMSwEzwVDJUZIqHvXEtG0RRNtO8zQj6ZRRBXSDcRVXvW3UXpXi+mDB0gzj017fq0iVBPHfm2Y35mIm/zbWD8jHH56kFN1HKUbbG6MUlnOL4szEsgXgJzTqaur8J8NQehpSHTWBEN5MUlJAZDDLtXE+HVmdB39G+OTswWU6o+L+h1RLrO5lPxdg+zOXJUuB8HrrEzDlGOnkK0Y9QiVs5/tvl+HZP3PjOnN10MrYjXQOnNqdMsBEeCgJWTEoojOg8LcplgXqBL1rJhVcpW0IQIHFrD8JC23okVwhgoZxPAjZfgquCOorqQYHfxyH0x3ouOsQWbtpjrZhC7c6A7OxLou6sGJV2subOcr8bHEdBUCSc7Ly3C6XZ6JBpbcmApjvZMli5rRGuNSiaYExZx4q0VCZTB1nFNzxlenUivbgnfX+FyRUtB+h49HSygcCFQKyYxZDGfhCuB2697TuwS4Xy1e3JzZHnvRHEieT4GE+XtrDChHGH8bKXeFsIxc3M7kYJ1bnh41pPOkTSCOiE7NO5TYZWEWFNmbc37V0uLPDuOvHc824Z9PjIN3XoNe97gym8aesqznum5pe7zCZAI2hOvC7VPzsmzjXdxZfRaA/OUVue49RpMqXDqFqYQkaNB21pl5U7ZHPXU2ALgyGlxkdE5kM7BxT913aPSQ1OgtxZUccJ78JndWYbEdeqZcvLG3ro65/CW89l6aO5H3KRiau+ZBUcha7IOD0PIHOJCd7OYRmeMH+PUsmoqqR/v1C9MJfF6OjBee5i2NlXNQbM5Zr/nXtd+e01tPl3NBvRnbyR+Nr2xPW+4peXLSSi96UClayXdz4RpQbuwAxXYJAXW51LRUjcC9/GAnI5cP5OY3yuEm4UQ2+Zs6VPcXxSBGCt3/chnujNg3QpaIdkPHe/4weMLyiGaIy3NqTdKhHaB1j+e9vlLC5gWVuL/J6Tv4GtDoDLwv1PV3y8id8DvE5HfAfxK4N9W1V8jIn8n8HcCf8cnHcxSMHaCYbb8OEtehTXNCLL2sJHF/a1ZyNfE99y/C7ASTH9ofAZYY8hVjbT2VBWG48JYhHrZXM+Vo1BXP277u4/SmV5RGTpqEuYbq6Sqh2qIjVcbyRjQZ8Zxycfe+ULBSnW8oo0uUW+PzO8oy/NCGJsSd6I7DV5VYQu9Jphf9Ca0Fl0z6V2hHL0Kqlh03FI1pmVjLRHk4YpOM5Kiw9hKOqunDdU1bwqpDx6BW9SaHsR0e16+oXz4EumNq6WxQfyWnm6tIFYBvopzLAzRqXOgLJExuzL8ktYKH9Oe8ihecCOvlHcWR4US5RgJS0e6WnFBGm1DKL0Z2/GziRIVDrh6uek11WhR4aF6L99xQg8D87sHljsXVFxAskeVWdb2MelSSQ8zch7RY28b/mK9ypqIZh1YFfOrb1BrPn6oFGC5NzG86hyWJljavSloEMIcmCZfamoOvcymwN1dzUhV10DKsxiPY8HaAl1GyIXhtamgmwOlhHGBaWJtWDaHlUvXenldp46uKy58F9AaHKlxhHex9iI1QV0iwWNpqbKmdcT7SaoqtJ6SvhmF7OnAKsSxIldLeWrbMNUcsoa4pqs5atfoaQQnvrMYqqXJUF9patDLVkDStLpWpOsSiee4sxuPwCKbt5OlFluhBlgk2p2dp3W5Qt+tqK7mAMG4fHvV/YZ8sAQv0BBzuCdWlFYqLK0RcAXJnoLoYLmDcoj0rwPHDxN3r+/sZK4jdAmCc8gutsb6Nwvx9ZXufGJ+JpSjML53oO8i8XKg9ol813H+us5Q1sRaNVmjpT9KD/VFpjsuREcYVsHJqGu1V7tXq4L32TYbKVCX5ihvHLXWBFfU+Gpjb/O66zJjp2vfv1WrqNnXoae+uOX6Xs/0jiHD+mBcJ1MujyuKFrLZ+mvXUFGhjN4qRYXk6S7r423PKHXFCOVOJq/J7qUU3a5rsLVbVx6MV+JOFqjb63lst5wPG6/uyGdLc003HefbnhB8J947T6sGkaAxrJXOpnNnvMpQvC/bRVg6WR2L2tk1nUvPJfd0fUaCUvtuzWy0Y69oSrA9pQ7W4PfVfOT+OpCviTB64OiZg1ZQFZYW8Ol2nIoFL6NXJ49KHAvpGlf6ifXxsz3Frsf5wLCmE5uA9KNK1sZjWu9JsKxDSrYGn9+xfOaW8T1BTtmCOSyY0RLQKawAh8wb2tycpiimfxVRbruJrs9MQ2c0A29e3e5bjaYDtqvsWX8WsQrMdWF8pJP24/GJDpSqfgH4gv98LyJ/EPgm4JcAP99f9k8Bv4tPcKB079CtcKDlINXFNWNfKYuli8Lsxso37nIw5dgY6lpJ9cFwQ1Uxb92hkfs8cFl6+i5bifSQtgneJrs8XuAaQPAH7L3pmsfaiGxmaNpNt7RC8ZYXtfcUXBSLakOAlJDjkelzR+Z3C/H5TJHeUZhAfzespbZx9tvRWxRgMLen4IqslU9hdpLk4rym0cjuLCbORwyEbCrG/b2uvI50LsYVOsUdV6fxBUBzQZdsE1rapmDRU/ENXRTijgS+koBFKCerLmscKIDR0SNEWY5hJa62cu94ymgR6iWSjwHJHXEqdG+U5EKmZQgsk6F+820gPytWeZQsimvNXqHnmCtJFR0iy21YI+k4bc2kpUB0VCOOxdoJXCcIwSoBo7jic0WuE0GEdExrqw+A4ZW10pjc6SkHZXHia+11je5Ca1BZDQGyCMrg9PUeOtl1321dXKZAlmIcgRy8PUICAulckXFBF0vrSsVbFvnzLFiAsJ/f7jypc7JowpFlF0jsXy9upNtC9fTnuplWYPZUDV7J1dS1W8S9OwHrvg51ioyhNyfukqwPl6eRa4Jw2HgnVlqNl+P7tS1WuZXG3pCwtqkktmjXHanWDNWKBOwYKtZyIjxMllaKJj8g88bhKV1lSp03hzUHql4TofVgdF5gvLIKkqpAGDaSrFRzUsqpGk1Sm2p45ObZiTD7egVkXhheVeJkdi+9npD7C/3DC8bJRCmX24DGjniKLHeR8bn1yGwOS/VKLk2WEtJOSQerdhZRK6JwBFSaI9WesSMDUtyhaPfh5OvVeWSrcrkKWuDhcliD9aZGrq6Gbhwnt4MxIscD42dPnL8uWnommG1v6xh8L1d7ZrIIZQlrepZG0m4Vb1j153XprI1ODevc3neeiLNxOvt7oUys9r2ptsd5mxeNY9POv/1d1ANmNR5cOQjTueP8rF8RynV4ZRxitjzfdVZlfjD7ly6B9GYiXguHV4HhpdmWNmpirfJ7mK3zQEqFHNvzwh1N/5wo6KGzat/e1NEf5oFpNPRpbUC8eHGEF0uEYsgRffXUbkAJtnYb0dufSVwUvbjcS1Ffj+rBoSCHzalTESRFWmP1ZldUzM7Y5wcYerMXXYceesq7N8zv9taDsgi1RkPGiiNjc9jszywsY+L1fOSD5cYKmETXfrhj8fY+jeSuGx9Vqnr1cvW+s7q1bmkVgftU3tvo4VvjkzGq/dwQ+TbgZwH/IfB17lw1J+tzP5xjPY2n8TSextN4Gk/jafyxOr5mErmI3AL/EvA3q+obEfmkt7T3/SrgVwF0t+9sOVBlqzgDCIokNbJqF9BorTq6ywY3lkNYCbGttceUE5fYM5dksu6ivJkPjDnRp0LuM0va+gR9pJoBYBfciKf3wmJpLwTyGFieBWqKlrNOhjaJ7iuZ2vEbQc5Ce707cflcR3w2MgwLF+lcb8gbmK7wuZ1jd29l+DEJcY7Mz4y0XFwQTBq6UA2JCrlVPahVESxWWZSumf5sEV7IahVH04Lo4OTDXdT3MSil6fdsmirrfWkIRDERzeKIXD1Unt2MHJMJzA0pr+0lQJCjRUFW6q50D4FxikbEf+szpWxYcyOKxzHAIsQXMwu9aZTEsIr+DW/kcSscGppj3dnDrseTTSBI12K6PNOM9J1VInnpcZgzer4QlkwCwmTpuHJIoB7xxUA+2v1pSMAmvtjgYkP6pMHZVdEV4jeEU4qskfueuwGYTlCw9iDpqpTBUowyL9SckVpWZPRRz6cqSDBIv6r9bFVvG4K6/4pDIXXZoPFoOlJG8ndYAFsgDdkBv97a0B6rYqEUqMbDMySmlW9viK96VVwYTdSyVVY2CZE1zd7WadjmafBeVuvrvCKoBH1cMbNb47LqPtm/0rlYqndZ1l5XYXGUzqgWpkPnvBCtjoDUx+tlf56PztmUfrdzcXRjK6X2FH9VqBl9uHD4MLPcWhpLLpORxVsVkTp/o7MwfnwemF8Iy7NW2q2baGzUrVUWkLOlPkrZFRIsu3Y/LbUe7RwbF8rSeFt1pOzK81Xsni7XjnvRVVuptc6qSSgd1D6iXUL6jvzeHZev7xjfg3JT/V6GFa17VPHk90u8Ks4yA/vcrEl8lCJc547x2lslnVetNW23JjKbrpHuQQhetFJ66/VXeruelvZEdF1HazpxFsrS5p4jjZNxAS9jv6IkUhoCKi422sRN0yoPEWerDOyyVQ73r6B/Ze1tVl0yhcvSUWrgMncscyKmsq2NyiaSWz01NXQsNwJBGXNiyZEyRcIcvLpPVm0vI1UbglUHpTtkYqpWlRcD1UU+iws+S7EFsfYHzEryOVO6DZ1s9osU0EVcXmG3JvbodvSWZ4fB9pi+s7RegzNn15vb8ZLBbU021Isp8GY88MXpGUG2ijyAHzo/I8876YeWhm1yNVeTAJHFKq2pFaYZzdnlhzJf6/iaHCgR6TDn6Tep6m/xP39RRL5BVb8gIt8AfOnj3quqvx749QDHr/sWNUXQlpMUtO+QGI01H0ynovamTYHaBhmvvpAy9CnTe+XFZTKu01QSE5YHDaKMubOu9UFJsZpqtEPUbbMJslVtNCMBdsPjAv195fClq1XT9JHldKL0wvSeO1C9k5Fda2pdtEs1QlrOSIosz4+M74o5hiUgy67Joeraiyoupozcf/FsJb0i6LFnvn3G9G7YIPrmryV1xV4jqcbDYBU+OSNzJp4XhqLWjypXwnmCXFaCZ9O5aUQ/GXrC8YDEuG4QZRBPS9KkXzYZhsKqUVST0r0Y+ZYXr/jm0yuKCinc8P1DU6jdOQSjkq5C9wam+4QeK3GxpqHGuakuZChUke3+zJYy7bpCPWZq6eDeDrnqds1lU7SdPJUG3uRyS+e0zvTxPMM4wTJDOexy+O4ETxM6L6bLeLGlkoYO0Tu6ayKfEvKiLRKlcUma7kh2gm/plWFwqY0qzNVKbZebZPO7dzHTo5BvbUPMR6HeHrxMXQlLIc6RdBXiNcM0byXA4gbeS+fTZCmrpQRIxRT7+0DOShUnTbv4XvWKreNpok+ZaemYeqswLK5nJYMr+jt8byRShQWC836MM1aM/1TK6shKNgXqdewrYXZOx95xlGIbSqtOXTUWF9NukmJroQzY52Bz8JEDW7BCjTX955WSRUkPsxlMf++amm9kYFj5z9rO0c+9JtfWbIRq3XUHaKKbUWi5LXEZiNAI/gs2v1xuo92rOBbj2vX24RID5Wg9ME1p3Tb0Glk7BJTBnRbBbFxUs6OpIgJ5TLQeiGHcHKj04ByXuaVFt64Gpds5yOLzuoki6rYJi5oTnLNVCzcytWn3tOKPQDp2VGB678D4bmC58/Mbo0urWIGJLBXt4ionoV2lcxFYVaFJ/0hQatcZxaIGlgXjPXkgZ/xQtyVq8yPMlf7eOYwJlpPxrvIBSvLm03fZ5mEWlmipWsliUgud0D3Y+o6L0l2Echbmc0/oy0asb3wotXnXCm8aTQJcqibbrh6iFbKkq6xV2lJhXMzWlBIoU0Qr9FloDYMbLQDcTqZgituK6T7NaesbObeUOGu6eblTykEpd4Wbo3GH5xCREClRyTeJfBbmG5szNW3ac2mstH5+eZBtLnhqsXaB6JIhxqO01HGrcLRzsN6itt6L0SemTDpH4pyM0O5zWUQ9lW7339vmIdWkWV7Nx7VBMljF8Zff3FLPHcEJ9O36u6tTWc6zdfvIZU3fNefJAsAdohC2dfNx42upwhPg/wn8QVX9h3f/+leAXwH8Gv/+2z7pWMDaqFRUqUMinIaVeyBi5E1DoRJNPTy6EjfArQtpNsHMFCq5WlfwFCqHtBBDJUVvyBiBZNoSQTf05JGEQWDlNplyrRNV37+HJROGjuEbDoyfjR7l4R3szemjhm2R6KawSmftP/LJlJRRCNctQpKlblVtavn68PKNkVu1Eg4H+m+5RXJYhUTVK6VqsWqb6U7oXvSE5YWpmL9+QKsiY6a7zKzkuHH2+x+sVHrYXUcHeneytT/N64Y2vQjkwxb17JtNmm6VcykiDEMmSeHzl+dkNbXehpathExfQHZ/IZ0DS6eOwFXjHrm6NlUgtcopWbli20SC4GXtTY7Aoops2lKzqZprhxuxpv9jIpkaQKbFFs4j4VKs/No5bGsV0bzQWjHEayb2gXTxlicuNtfQxFZROT+PzLdCPRWencaV8HqJlamrLM8Sombk5meQb5TlhaGeUiLTZw6kUzL0S1xbxrlRK1E7xHUuN35S91Dp3wQezkZyPfQLQ7fQRVsnpQRKEvIhkY+R2ltFVRSl1sDlYA6cJtCh0g3ZxAaxqpvaxXUd7Y362oCz8SdKa4dkv4eMca8GIKhV1wposmoz8YAzOFIYJz+OzzNDEZ0zKVBuq1dU8lEyglrknZ0QmyZHyrI52qpqgZs7OluVkMJQ6IeFHOLKranBNLLKaFVKq+1oTkVyZzPjJeqRlZRcZRUA7c9KaM2Ec7aq2WDRd21ChMceWY7WNqiRxPs2v8QbTjeHbRelOxqCWquOcImr45a8GhGxzaR/bbIm6WwSKKhV0JWjr2tlcyib89KQtmY7FUPnxDb7VWCzIVFdoBwSEcgnQ2u1s6AzXgLDK+Xw2qVspoV6TF6FWpGhrIKzVnml3gKoMnYHCwKLCRvHzqoLVdnZZyEfI3Gx+Zcu1Rstbxt/OXkwfKrEg8FLNbs8i2sblVMgH+3aLEB2cvVFkGt03S9HJxFwPb/aweLyA+lsxQ82f5tX7ss3m8zCpoxvf2+Vqhc9mrzAW3PbHghoDIb0ueOWc7RGv8umHdXQs00GAOpRCUf7jCZCGWNFRJlOhXxjUjOi5tSHeffZYg7ocmvVi42blQehHJM1ExZZ+Zyt7VJDMVfZjS6YA5VNaLYDwjxYNupQiKnaNKumqK/BAnqqFY8YoC8riALmQC2LX3/eUNOQsSC9BdnNefpq7Vv2jtRXGF8LAvWnA38Z8F+IyH/qf/u7MMfpN4vIXwV8H/AXfw3HIt8o5bZCEZbbBOGwsv9rMKl8EVZCol2Qf0vw4nDlNk1UhC+dbzmXaDohOVm64mCTIZdA8koacNSrRYUKqrL2gNI1ctjgvjCVDUkKQiieznIya1y2iE92qbRHzN0YvCM56IORsw0KhTRVI3vPFcmusVEVHUf0ekVVvczSjfOhGizvxkursGRhmgWpiXw6kS4Hjp+Pa+gsl9mvT9bv811gfgbLXduogymZf90d8e5AfHleMzbTC3te6WJ6KK2fniFQSnUV9XQVLueBLxyeUaqJRrbn1iDURkCUAhE7ZpyE7OTQUOx+iH9RBRmSOyYb+rbMCb0mK1F/sE73/VlJ9wsyLqseVBqLVQp2Rrbs3nhl2eLGNPB4AVVvw9NIyVEIrphMl9i355GlWEpthDwCw3adqKW+ssL0TMg3Aqmapgu24FugcD0dLUL0FMKqEB+UZRwY34mkY6C7z8SxVciprRcACWvVZauQMoRloX8TkTcdo0dxL26u6+fnYqKAL297lrtoG47Kmh7XQ7HKqAgkpettoSwhsQjko/VmjKvzBL67rWsMzNmxlEgFNQci3keKsu7KGhQaUuTaO2m0Tae/tw289Ia8pGsljFYdKRXrqXkolurx6kLAWuVUWABqtGrD2SakFEFTsPTBNK0pvJaitutQq65aWc3iat66qpCL+nzOQKtWBFYRSUceUKtqGl4qw6vK4cOMXMZNgdkaSpJP0YokjkK+G0i5Ounc72+rLHOkqHYevPkao4iVYztSH7LQv3RJiqvSv6krMThk6M6FePXCktcX4nQDOKonukpVrPfmrS8NNjdCUCRUaonbxg6obA6h1LjaIMmmM9a/Ek7vZ4b3Z9KrEblOyO1g969TYlJiqGtldQuIRZSrVxHWKaJ9IXXFlTKsvUc+JpNheZbQc/GgrZrYou5SYe40m03184uKhLI5G10gh0R5E4024cUQwYnutQnmtvnjEGZDC1thRxpdFLdYNa2mYNIgcUP3cKdDVTikTBTlpbdLacheE1xuFIFWjbd2KlCspdPc7MGGMrb3SPFzXyLj1K3OaVMqnw+FMkRD6XKjGWyVhbUXpmeB63vC8szOr8yGuE2OnoW5WoalYK1X2GgUTYtAYzDHc7E1EZot86AtxmrVw4RNxkcNxW8p6qVGQ/NLJIa6tqupAQvSgmzBUfT13b5iWIM+SQklg1ajTez7H32V8bVU4f1u1un0kfFnf+In7IZg+jncLWgJ3kaic2Etg2Fbzl6CensLoY72/jIoL/qLKY4vA/eXw6poXLJVbCxNjTVHTofJNgWPqNaCCd1PeFy00Y1R2aUcknlw2iXjLCUTZOvfKP19tVYpB7UKQraNV6M30RQTh0wXF3Lr22ZtE9ochd1i3jdjbPpRB2G5UeSUCckICOoOVDkFllvbzMoQ6C6BdDkQx2woxW4za33dpmfC8rwi75hzNceOdE5cP9cTp4QX3pjzcfDyejUIO8zQP9Tt3JshJTB9eeBLPGM4LNweJw4pb8aJ5mCyOplNaFCTOa81BkKUtSpCqmx6WNcmEKkWdU6uSj21vLZtrGsbnWw9BBGbM9J4WJ7milOxyp+Vf2c/y1II1a8N1hJzPfTrcde5XHRFtVajVKEJx1kFoUsmZKsWAjNSZd8CZEWuzDM2octdv0PnzIiWNUIVVSQExM+vITZNYiJOhe5aSZfIMiTGoCyHmT4Wa7uTMkuJhK5QeosM31wOG5fl7TJ3sFR4n83ROnTU0XkVbUONpmsmKaHRhUPXSNSaHvcPSv8qMG/LZb0P1pPN7mG8uijfg1dr5WCNfK9lbdeTpmrp85BdqgGKdYMlpoIIzFgLkHyN1Htokgq1j4Shh2FAUlp5UuIl8EuxQE73tsP5UC21GPImVyFqiOPyzF7bdMQa6tO/Vm6+UOhfL6TXV/Q6orM5UBID0ncst5H5WeunmIjX9EjaII6sQVxLzcRr2JXdbyiUIdwwfGhoa+cK8Wtl1WwyDqgiLpWRxkpwtfqVY9NQ+d08XYPZCN1p5nBYEGCalerV0+JBT3UUSkfjdRoKIKSzcPhAGd6f6V5eTY9rychSV4RjXWdi7aGaoGbTMUNBrpEldYS+uA/UjBeslYVqqE+cCiFXE6l9sBfkk+tm1UA+xVWUNXjQUXef9YhP589+VaJ3e7nSHFZb4MHyqKRLsWB5qXZuyfTr5htLx8aJFeWflsSYnTLgwUvtrOG0JNkCSt8z9ik9wKrvRiFeTNA3XZTurH4cSwHnG2EKkdxFYlPyD+60pmroc9zPhZZJsB6LpbfgugnO5pyYnwXPxkS6h21tr3tt4yNlNZuQTaFflmyZj2WwyvC+bN1IfA2GoIRYCUEZL5bmm+bEh5cjuVhHhC4WOk8la6rG5VoMUbVefUI5RtKhM+cpF0OiViKcOU8iAdR0qj5pfOqtXLSv9IdsAqrHG2oTywrFJlb1/lolrF78Srg9VoZQOOfI5P196hIM4nSBxFEM69YqnA6NRGEGpvXY0Y9xLNvkb6WYYI6TmGqXcRE66O6F4XWlf51J54HlGB6lD9ZmmdGQIItYfFNNjeyJk3Q37ZPSCeEYkOPRDjQvMAxMd4H8vDAcNg6NVhMCUyyCr0kI3ondxDw9Gu388bYUVTBiYD1ljkc73liFfGOpptgHhpNtKMZJEnRuys26NoptXCXAohoxozjfJOagTKlsKBSsZOlQLCKwruMuD5EqGqIr3SZzhFYBP3Nq0ij098ql8QzahtciP7BnEII5vdlSXvloJd1hEke/PkbULsQ1IpHGfarm2FpvroT2ye6pQ7qtRHfPC2nXuaYadWfAFyO7ikfQ6lD7WlrbosRZjPwIdJO4M4872pbyDe7skYyca0666T6tTuqcrdXLgmmmTJFpseanydV1N2TWNrtliaRUGbqC9HV1fiWZ0QpiaEMIvgFEn+utGW9DoDprzWENYxU8wpbrTH9f6V/bTVr7Wvnj6B5szpXDliqRbJGxRttVw+75pEsl3neW6sf6C7ZWLiHYfY6psnQWBNRkc8Y2BT/Pvl8RsyZPoALLGEzGoBGTFeMRXU10MV10FadtjaiH3lIaAP3rmXBd6B6OpKty+nLh+IWzpdgvo8knuF0hRuRwYHpmArOmnm/IQnf256AWuKwNrwex+zHLlh7P20aVRgssDi+LoR5TIV4d8apYCnFeYOh9DXtByrQ5sc0Zb/y6fTpI1JDdF3dXbnoLxN7EwQoQun7liOZDMKR+DNvaUOjOLlA5ZjsPb6chb6VMjIJRicEzCTVQ2oYsECahxEiZAzIU2wfGYM2SJ0Ms47WswRE1EIoVsEh1dOnofK2ghGQN04MT1jVHSzn7fW7p6uiBU1iw/SvugnO3ibbujb/bXash4ouh6xojtTPUcXpXTK/OJVs0wDx1vPE53PdmS9/0p427itn6VeuqOroUzbbENk8dye0uyvDGiOi1E0pnaNpyJ0i03rQpmRMaQyXGyrJTdg9OHYhzNakXD2pQtw9JKUdLdYbZnntYdMt82HTdhvo5ezreUCALlsth6xVaq6vfh2qtx2Kli4VRDpCFeewoORp4Iub4xlSMLoPtt+XoQMQgzkcN1GNnAWhDmKoFeZJa4YgHGrFsPNOvMD51BwrsBsVQKQdWCJKutWVwbxAox0o+RUKxSF5cKfjNcuDleKQ8mGoxsGoSlda5usJDd7Cf5424vZ6K4C0tYK8LtYrARduMNVqDS1toyvBGTNX7vNDdH1heWM581cEIYpu4o1cWLQj1JhNOmayw3CTmm0B43rtgJ4BxS24+8ww594Qlo6cD8wsh3Fqj1+JdtbUY/BpmsY3T+SLWENc1LvadpRva4mJia8dwRztac2QpJjIGHm1Mtvl3Z10rMFZH5FFEZscOx8w3fuY1Lw5XkhR+6PA5lhshDHbspnW1HG3x5ueZMBQ0JJbbQB2EMPdWjFPr6pjKUuke1AQW+8zc9ytCUzoh9FbVIX1nhliEckrWwqJTgldyrmk2sI0ztBSYP2sxBMJIjM0ZNpG/MkRTEE7BG2aKc1RkayxtIJJV/s2GTlQVZBHmOboD4gu66dqwe18FGgGz9VbUdq4OO7f3ROdoiT3/2m3rC905uK6hk0tgztGqAtUImDUHOkdetAZizJyGmft4XNdDc/pW7lSOJDwICdt3u0/J7r+q/dzun8+/MLuycbchcO0a+ze6bgRSGorhl+QGt1VrSbVjdff23AvQ95mUPkb80pGBmgTp1Dku9kylSysK2VLrYGhx6XzzbH2+5p3AYNMTW9TadVQvjDg78naZkXFxLbbK8MFEfHmGebHUXXOe1FIHDL2hIb2l5pouUndffM4q/evFDHsQpPTEyTb/lqZZK0+rFRGEWeneZMJiDafD7CrrIqvQoXZGAG98ujQq3UND0SAcZat+0+2ZGOII7x4v3HbTmkKZhsxlqNQ+mpN1gLwEwtSKKexZtGbdsrzFQdENSQmhchoMNQ2inOfeetGVHUqmpiHG5GmdCvEc7fjXumrfqZgNsWrlYOlwVbphE7BMQ15RmK2vrKz3dF+x3XT74iRk8QrBPajs6zlkJ+o3Zflc1+vVaHys+YUanYLAcjQHKU+Rqw4gyvE4M3QmNN2I/gjU7JXcUSym7DDkeKfU3gpK0mTpQ6lKnYV0F1hcsf3mONPFQqmmg6aO9qzFHr5GV225qqRrobtE4hiYS4Aur6h1s8saZQU/moimNPsKvo59r/Ker3roWO6UIVnBlao1O46x0nkxTBercaQ0oDmQJ1unijmzOSRDpousFamGpm3pThWxvT+0OaeP52GDO4FHatsfMz5dBwpAheiL43ISYjIHIx4yIVSD46uLvvXWx2nx1h0iyuvlwHmxxWSTWTwl5t5u66Jdsd5d8EiJdSszV5SGuepWYu0ExNo5f6ko2idrjzBURMXzwNGjNEGPhTIkaqeUQyQeO0IuyJKpKZBv4PDuyHGYeROPzPeR68U24XxghZI1wPgNt3RvesKUKceO5RZS7yq7JVCzOVAsuwqLSZ2gWi3SVENpKPURJwXcEOTAMltrkbpEYt3g2VZyjmKtHsIWlRpMLS5ut90vq4hSIxurcF4ssg15IwbXAUNLKszPheldpXs+IUHJAeY7iwrD1NGDEX2F9fy7qxIW4TgsTH1FU/BIXIFAvkkGCVdLG43vJspdK/FnbWT8CIHabWSPhd9k+97KsaKQD5F8Y33LaoTpHWuZAawq0Box8o1/bivBj9ELJEJ17VFl6g0O12D3p3Zq/Q49GKgdDj8Ha13iqRFjCAeks7RgnNUFWB+fv5WmKwQjh+dqkRr45lCblwSH48yz48hdP/HFth48qhtSYcq4mrmspFFz2oSAUvtAGCKinVXMeF80i0KDOXqNP+KOSqvmpBpagNh6aNVTjyLXFWlszlghXSFfhTwEYqwruparVYdpDrRefOu9LGrtf/qIDJ0bb58jXuodZqGOVl27ooxFVi6VpSD8/Je6PoN0cSdmysbr8IIUE7rNq3Cm8Z7smMaTjFv7kd11hlINTVkq8X40WyTCoErIFnyFlXrAuuHZfa5WGblUI+pWu7+y3kOzb2YTkqFD2dDmVrU4u41dhyNczZk4xLwW8QwpM3QL56FSDrb75iaKOgdvL2Mb89o4d12DjaDi8zsYKnLbzea812jVZUuyALm2YFf9es1Jl9bU2gtFGpWBLppd8z3E0om6UifKgKUixQRHc9s3xNaj9fjbz0X1yl6XuGD3v5VrY06toVXGm5Q9Wiu2qeejoqdCGQO58Q6XsMrD6GGxeb0P8Nc9yqqsVSv5pJAqeYn0TRpkUQ+sLV3GFpOaM3ssq/N0HTtby4033BynZXPQW2pWFr+/EzAblwuVVSZmlUtogqK7vnStC0S7jytfNyXKqaccncPkfKxDv9AnA1xsOzC5I81OdZiCVdutMhLmC2i0PrqtQvIRf3cp9jxyc+C2algzFru5+Qk0qE9mST2Np/E0nsbTeBpP42k8jUfj00egfHTBpAXAvO6UCjGax1mdFN7g+4Yc1WxR9DEtcICXfTE0pjry5K9viNMyt3TURvJtujCPJPh1E/sD1mhD+2T0h5uOfGIVDqwRi7gXS3HpqSEQskW3B0MHymDaJ5+5O3PqFlSFV8975nNar70ctoh+ehHR0BPHZKmGgBHGXTtoPwTWEs00KumhIGM2MbOGouwRF4/+ZLY2CcDWQ20xqDdMxdJ0KsTRIt6w6Cq4t1ZyVFxnx3PLt4Xnx4lcA9frgVysZUzr8o6nItr15lvlOGQrOS2bUGg+BaLzgGi5/WC91rqHyNBl4ik74ufzR5R8G4lTQuZsXd1vA8QCk+uhLHWNQIyMvYsy9pWTlS1aLxUlWxuBwTrH54Nw/azB3/nWCh1CllWvp0HfLcXZuFAmJZCJLmw5zh3XzpC7ELeu7KEv1BDs+EMTMrR+eiG3nBZWqJAsTdb63zUUcTet1/OxAsJAhlUCZH1dVG6GmdvOpS5yIBYs/eZVg4Lz73J4zPmKmDBm63GVApKN2/fovgaxqtNFqY0s3iozq6NojmjtuTaKR70F1+dqqSpdydK5mpxJcLJxcM6MrmXTDYFqvBXvveYprH20La2yri2waij32k+vTZvKqnGljq5FFwPdp8/N7mwpdQFrv6ECsa4pgjQqZRRCNA4YVaEokUq8ZOQyGU9DmkihIrV/vL7bfV6q8ceqr9tH1IVWrefoYBSTURC3I9etlH4/kVauXnG6Soa5RlItZK+EisFTJs4JasKheTByfE2GOMblY9Zfu18ViMrQZYaUmbIRqs+XgTobRyXt2ulowKu8ds+lpb6rPRuN1s5ku0eG4JReyEehHOwGzXNimRL1mqy6ujNSuXbtmgziW23uBZji1qi3zaW2Z61zZdOXE0ezpdmjOazVow0J3TfELiVQVNi3EGv3Vp2iV1yaBhX0Giz9Ore0tNk8Wj84F/gsA0hfmXNiGjvym97Ot6vEY7E9dDLkH20ioD6RqqWI4wQytTUvq/1pCC2u3dSyEPuxL+JCBFKidiZPEENdpYjeOVyJoXLNJi5aVJBY4WgioHN2OQvPdkixVLuu/flco7Fx0a6mk2htsraUKiE85jsFgRoeI7AfMz5dB0qAqBz7hWNa+GDXeLLxQ/pUqP1igoOL8U2AVdwthUpo5bhJKUOBKlRxSLzuJuEYV0hvHXW3yPajpfccJm2OkAZhOSVvROmTIQhliM4TMu2QJkpYB6EOkbCYHkY+mgN110/0sXAzzLw+ZvJN3NILvc0wTcYPCjl4ZZdNxJLDCs+3akXLi7F+rfwk1a2auKXvdrwHU+h1B0pwvQzbyPbVdds91/WaQbdKQecIlc6dn75y0y+mEF975iVZZRCs2k9xbvcW6qHSp8w8Ryd0Opl+sObAKJYeCawaSOmCk4MLebCyXiuUMeJiOkbClJyzBGTjSISZVWxTshmTlspamz6/razvC0tygaUQZhOylBrJJ3N88rOKdtUaKR+ti/ymJ+SHcf2um2FmSJkklblGcongpckavH9ZUmJUJBbqQe0+BHNmaxfQ4BWia69FK7eNM9TGUXOdmbd8JOcG2aMNoZLaevTS/GNnC+xhHtAxrgauZHPe+1SYY2KRrUJQo21eGnaBQ3VuSwisnJbm9GTTw2nrzwjYLpQ6WuonDV7p5qK0ZnhN5iPMZSWdPnJ0kldN1YCK2RDVzCVWmmr1pnnWxBEDoYurEW9co+AVba2DfUsxrvpxLaXXnHAFQdd03SZw2LoM6COHyp7F5ggCyJK9b6XZkTBvkgPiyuuy5JUrIrb8tiDJ1/qq11SsyknGbM5b6/Ul5iw1btq6ielGjm6l+k1CYV3A7NKZak7lNXuQWANziSw5vv2WtYCkObBGbrYqxY9o8DTbHK1Y4X4eeJgGHq4D5b4zuZdqDXjT1QJlPVTTxOp1TYut87Mzna+aApIsjVxToHTBeqQeobq+1vnNAb0mwiXQX8UbNFt67ZED0K6/WHpMprCjN2zzYw10Vm5sQKjm7NUWAFS6+0g5hdXpC03zMNkHtWBglUZo67bZmSamPJijLFPTJNQ1jSfNeYpbQ3QC6BI4vzoiF2t0TYU6RPILoRtN5LO7VDR4EL3bM9eK4AKadx0F1h6fZrfaXFHRx84tGDdXxPmdgSYlFIPSp0znKfmlRl5fDysPs+ZA6Cqnw2xyPlEpwfZTyZZabQKaRuQXa9p+rchOl84uRB59iT8z8T6DnzQ+dQdKusqxW+hiMdXdttCqoBqZfREG526s5aitjFiUZ93IKc28vjtwnXpKEZbQrS0XqnfApq/gZLK9RMBqaPYTYndPW6VYHYx0m0/BVH+xhZmPQigetbmzlo8mPmgVCHElpi03gerk96pCFwuhq9ReV9RFOyNtquBCoxbxtca9dY6meNtONDSIo23QXl3RG+HdXqaPIrut2sgdTV/t4jom+9FQgTWX/Zade8TFwQ15ET48nzi/PCKjCZndvjbROSledbOoIUN+vKY7hLASsZvBeeT5ixmsdFXGufPnYIiFuDFZjkI6BuI1bQTIXWSyPWddOTb2h330azeicU3sg9RkES4zKQrdKQLBRBOfz6SukCcjwecba4+gYtybOJlzqX3l1h3oJvpWqjcEdWHUFj0phsbOnfr62Jrqbvf/8abcXa0825xjdy6KccbwTV+8ii6FSp8KpVrRRumxxrMe5b2+HpCGTlYzskuJ3AwzXZeZU3In2LWPWtPVCNoZsbPxxlrEKlWhGCcnXTojgMpm3K1Vj83vOO8dj7ZZb8jhOp8dzbIDKXNO5BxIqXLsF0595aEvLO18veCsFHPS4xQcxdtQvXaPpVhgFFxbpzkaq9q7o9mNE9KEW7vzrpKzHac4AhSDRbRe5KDs+IlLZnhdLIDoxSu2CiEFc7T21WlN2DU7MXzP12tj//q93s0elVa11k5FYV5W0dIo5tDmg83hVb/pI6iKrd+pJJYSGZdktlsfBxBtTeOOT/dgnM22qe95hlLraotyscbx12vPcu2Ib9JaGdi/huFVNWVzcYegt43eZF9sQ45jMkcNtoKQaDpgzcaEGdK9UHLvFZZCd3bk7ChM7+1u5Wo7WBHLeBVW5MgD8BU9Tb7Gk1hWoII0j13xOQPLdWPS2JoQtKsQLRjoY7E9rAMaEtoedSdGPk8K2flAu6Bg49Q1R873lhHia3M60kXoX9txywAjiXQvdA+V9FCoffAOEW7vd/fPkFjjB67n1e5P1q1huWyili3bI285L61VSwx1LR645o7r0nH/cKR4xb1cIvVYmI+Nq2b7QRPC1SCE4s7Ujg/W5BP2a0NjsGWf/EBvV4N+nNDmbnyqDpSCwW9gek0+6VQ2wvcYurWKhilad/Drpqr6Xn8mSOWcB2JoSEJDZNxTF0Xewt60hW2wLeq3/0+LmEx/CUnWvuW5pVRaFKpiaYAV4s8mcaBpi3JLZ2mf5QRyKCw1oiqbLlU7j4jJD7ihql69UKNaywh30MQ3Wm2LYneM6hUdsQ+UY0do1S1epim7SfA4CpB1k1ojpdSgk80Q7gVNVymAtoF4VZRMgeu5J7xO1qG7mvOUrjZ5h1fFO3nL2iYhN62duj0TSwU1/07NYGA+UbrCpQb6PjMPtmvvq4KsrU1wiNvRp4VNvG0/dvCxeoWbhrA6huvsaSTy3KqZ7I01QeoKw+AKxmkXpYat3U/pQYa6Ok+5Bi5Lxzh3yBwM7ViAdk8OiXBb13PcnNmtCm0972hKvlubEn+WbW7uJnZznlL0vlHB0xOOzFyXjjfjwPn+QLi2lJUgU2S89pz6hShq87AdeleJV5NXBLWqO3ZByYrmVUMV3fCG2YQFxdXBax+2U25fPufC3IifXpZeyla6H1j7oS2pMnQLh856fO3Lsdfz3puGWk3sb037+HzqvMhEPchwxwn32ZqTJ1nNigZWNK09n4ZSPQpmQtgcHI++dVnoXy0uvSHEs4nCtjY+MuUt3dDmY63I5DtWI+yy+75eq7i2ze7vS7aNZBJTQl9MIyp4ak6KIj2PCOMr8qZ+A9QCwpYOziUa+XqncbbfwEMGcTmUMPs82MuGNBKvKprFHKcxIZdEHIXu3gj8kmF4aRIN48uAaCAfleUOiK4tFz1NmwRmXzuKdWjwdJpUR7G8b2QZXHT0ovQPurbMEbX0WJOLaDQATU3OwJBha+UCTV/GZGuaDXB0tmmnOnnZECglfAaXE/GqvWugxG2tBzEgoUYlsGv7op6K9GKaVv3b6Cr2THf2WnRNfXf3UGND9GB4rSvyWDshnU06I12Lpf3bXF9FMHeOsq/TTd5kC7xDhtr8fJ8TKu7EVkWTzZkVcfXzVBXGEnn5cGKeOsrrzmkM5ojnG+ESDlZxN4dVyNbS9J7irV4NPbW9exc8+3pq9uRROs9tln6C8wQ/BghUN2RuutkcqLoZI2AtozRF1MJyKJbCcw8/HArP0pVX+cTL+ch57FnmZDnkLOvibamxdawiT/6re6pr6alPdssnW1Uc2OZbephfCPVQVq6OVRo4uuBVBQ2BqtF1jlKgHKzibDgt5jgFMzq6d2LEUnhN4Xg9vz18GO1+iEBpPKsUKBfrlRUHyN74MZ0SjK5r0fq6wVa6vJaWNkPPmgazVFJ0R4S1D956/zMkF25be3CJrFpHCNS7Ql2EePHKjGKltP1raxETb+Om49JUjHcOWt1VQTZUojlYUpRpSgxDJt5k5ufJDM7a8ykgNWH9C3fOIs0x0zUK0yQrAiB11xcxyrbZNGeqKpSFoEr3JhGnHlF4fjvy7vHC+djz+S8f0KFCp5QYkBKtsrIDSZUpJ6ZifI6H68A0diZDMcraysGc78QSLc2aro3n0+Byh9GL2sbnJcB7VeX2TDTsFpYYB+vULwwpE1yWIKZCSQpRmUtkHDv0asKsK29wMW2qqsLQZcaumHJI05HygGPt/yYmHqsp7H381TiFooSriYKupd3F2puELhByt6Geuw1bnBNkB/F0TNeEUivlnJApUJPy0B+oh9k1q9wBXbaNxeaUbmtkhyq0EvX0sKV+2/tWuZAdh0dbEMWGZLX5ZLo9b9miVfdGWUukqzewzuoO9U5/pqGNxdr32NxMyN7gl2aHZHs9PErfyU4EVtOmDN4cO+sTZxWVrUJOW9/SZj/3w3/PNbBUa/JeveWIqaTLI3u2oj2jo4melpRcVttEsMpkipCnhDwk4jkQZ9OOildbC4fXhf7lTP8m2eursFTMgYnqGQhzBh5pETUpkIbCTN5jM5vdMdFRpX+dQaAcArVLzHeyol+P0PrmILi0gLoALr6f1A7yIiw3gTgnb+5bCFKofTSn28971UVrPLNsKdkuFYaYad0ydIckqhjyPj8z27Z3JNbH1BwoR8+kuISEB9CWPofhjXXF0GAtcNIV+jcL8TxTu4Pz5ew9TbOqHEAHR9H9fNZ53+xsC8D9vtQE+RRp3NZwTabbt5NOKDVwdlTzej/AGElvonO6TBwU9X56xez/PioSD6i1ARP74LwF5k1CodYVkW1/1/a3r2F8+hwo4Jo77qdh5TUhMDtHo1Zx+6jEvlD6hGbzulNX6EKhqFA1bIJn+5Cykf6SMNxN5mBNgZo2xKt2BodqWwzBF71A7a2UuhFDaydGIO8symjIQnDSa2vKS6rkkyueLkLshHKILHfKqV/INVBVmEs0GLKaxkz18629evmlb6TZ+FRlgO648M7dxZo+5sSULdq7XhNlEnJrfBkgjZ4Cnc0Qt7x7i4JrZw6b9O5QJbvu1qU8H6OTDO26S2+IGtUmb3exFS/ZJ6dH6N29MIeBw8tg+lEVjh+a4Gj3kEmvri4LMTgvxvhsJVVm7ycl7FALwXLQq4FxB81L8VOXmU8VjdYKxTSxnL+idt9q78+sZ3tug23stQt0MSIs64ajnfFDqn+m8diKRUm+wOJ1IV4tEj52C585nBlS5gd7RYZK6AuFRMlCvRgvSoIylcR57hmXxHjp0WtkuLI2VI2zG/wYGLtEugjDG13TcnEq2wbdnDrfjE2UFerkKYoQ/FgYP6iztNazwVLfAHNNfDCceBgMEbrOHcu1I1w38r/1/RKWc+L6rOPYmx5ZxjcT31tJO+etRXNpSye3oS4FMT+zv6epqdor3X1ift4zPQ+rblPqZEuNYBwoXZIpF4dgrSZOJrJXpo54sY1zOXaM3gNw7AsaukeO0Kph5k6Q6S/ZZ4o6ihEs7V/BJAc82IsTq05TGYIJm4JxUXpBJsw4Eyg9jj4k4qXdBI9sm/GpAWrZoVL+rapxd3bo6erYu/TBR5yztwnjIUAfkGrtQ/wg60axpvZoa98FEMXSQmXwnpGq1EUICdOgcxvybBjpQyGFnmlJ5BTIKVGPlTLtNlCfH3EnHdDkKNavYPcpD2K9P4uQzoF0tSKJ7sHQ7DiZgxPfTHSXw6rJRhWkK+hgdtccG0N/4lxXmRLd7bWWAsaLW8TlYArpsnhaNtC/iZtT0OzkYA5nPlqjcE3+XIPtE6XaB5Wj2bV8FsrFswFF0VCtkKA1iO7aA/NH5OivJivpH1LmkVhze+xi/VDnF8ChbALE7RrFz7sVU2Bzv7tYA3fUEPc0Kt25GOcwCOkaLc06V78uQ/YbSlT6wHKy/YGbTOwqeTJZGSlCHswOGzpmc6X2/txVmJ/Za9MYiNfFebiV0hk1oo+F65IM0Rwj8RJIoyH1hp75vjzaM2k0jXbNTTi3+vxVEZZTIE4RqZ31TK26FqgQZXPiH62najI3X2V86lV4eUl84dUzprHjxZtN6+XS7EkVarG2LGUJpIdA/8oJxqKcwsxtnHidTLdDYY0g///s/UmsbVma34f9Vrf3Pufc7jURLyIzIyurIamSWTZN0bANNyRMyRBswZrYGhmQG4AzwYBhWJTnBgh4YgEeEQIMAfZAhiaaSYAFEzA4kESyKJKlarIqK5uIeBHx3rvtaXazGg++b619XmRWZhYMJ2wjNnDx2nvuOXuv9a2v+TftsvKXm14C/nEf1i5XHTvQEvmWOWPETTwNFcUvI5jlOqv3jm6CvFYeNfvG18CrI7Ve3KyXG9G8sqaI5kYx771GNEDIZOcgq5K4q2NA2YS77cSFKv46IyyF2WbGTSQdrSRrKsa5KCDRBplbW5MwpFZpLltRIh82CtQfREE2buR92ygWB3EL83URjIyXsVvxhXSQEUQD8iep3LoHcTDffLWCe4d3C927E2Z/EjPj7bDOxRfxt4LI5FiFTouOMTtDnmTTFiMdqRQMzmWuhok5RN48SgswQ8M+1NdIvQazIuBkaXODKRanI0+8XQ8To8wsb9ZYpXL/7XBzTnzUkmzaMXoyhsEtkKFEQyoOZllgrSIFHk4DpykQF085euzJ4g+G/r7I4TApcLdYinP4Iwy3iyROWkUWZ8lBXzAlEWbMWVWflW3SGUovKsfSAcuELvJsOHEVRnoXOcRO/MVUVdxEw+nYw8lhR+2Kqbq1nWQUOs+eIUSCS0ysYNFSoT0VEFqQKs6J+Kgp6umo4yup6s/AskWCerwITM8c0zN5TrK3VjwHRcyn24HrrRQ2vfwHfxD15WINiw+MyeCeZWnyGOnoVXPTqpNklgRLbIXQCswVhmXrOiTBUpBYD7yKmyul4STnncV1hU0QgVNheBkd7WQxEK7dp9pJsAZwovm2E3HMmuzk4LAV+K2Jl9Hum4xRvhbcdZRXlIVrqg6cdrjqZeYIEWEhGgMxCdllJ+OP7GUvpWEdt+bZkKPuz04A1q82T+zczO285bQEluSY1QA29Va82/JKsJGEQbWLKnmj6vBYC16hEidJoroHUdM2ap3ix6Lq4hEzTmpKLt0U9+RIWjxVWyO7rGbWYisEGGWzmtKYihVcXRMoO0YdfRqGOw84uTca11IvcXS+VvA2yP73onpvknzOeCkbxI2SLFRogi+FpPu4eEhDxo22YevsIl1psBznwFPoRctIoRGN4QukDSwXGdcn0uQa3lfA5bIvJcHIAocwqkuVDF7Hhv4kTE8bJdF0UyWkrF1OiWOSsKRemgxpU7Ah40Nk6oLAP8xarOZKGFLvxqI4qPGZ4DX9yRD2HWbO2CWRNk58OIvhOPZMh046kKPR7uM6Yo0bESg2to5Xqz6dZFLFF9JWsKHFw3Kv+Mrk8MFp5/trXd7WhaqZ7Ndm/z/j+pUrkefFMhtPPvjmzwOQRw+miIVLNrhewda66OKmEHwmFctn4w1/cveS/Vc7YUEk0xhfVB8xC6nIgUvI73WhG8airO+rCT4G1k2BZs/bJOJdrrRqVYCp0jlqgG6vzAP1JkoDFO30bMPM1s9YM2C71NqLRemyAFX1uDLV4mCJ28LLiwOXYWTOatSoSdRxszBtPWmWD5SDdIkwVoLNwUoDpxSMBqy4A3e58OLywJwc99kQd15E+TpJMOJgSEEMh0tXMMnReUOpOoBFRzFRcE12VkPia9h/IovZHyB/JrN/59y6WM/Gap2PTLFv83xYP3sqFtutwOG4tSwXUk2lYni5PTB8EtlPHaepY/pyqzR6qVjTULC7hdxbliyHk2xkQ1D19RLc2jEp0rXMzmKMAiZzbfUWSaSsxbYAA51LPO+OkhSfF/j6LE00uATLY8fDYiknj5kN4SislfAklfVwn3CnTNpIkpg7sQsJ9xN2UgXpIOaetaIyKYuqtXZMapctKzsyDkZc5neR64uRjzePbNxMLI6HeWju5fWZpEnkJJracQFbq9YoY7y0kTHewzaTjw5/0D1U40wpKjmQWzJCMW2smL1UvN1eu2qjYGFMqkwftyZVef2SNVfaz8CIkO18XXCXC84nUCkHWUOGWDzTJkj8r0rxZ3HovI2f3dqFquQBOwkJoCWS9VA+czSoyazEDqOMStOwhxUb9p4padbukzEiQ6F7owTZd3Ew5I3HHkWioo6eSoyyRuv3AXnwTS6ieLO+91mZY1j1GxMZhDbyc64lWvJnq4fdegjlTpNKnbcXpwrpQfZW7gvPw4Fn4cjGzexjzxQ9J1fAJVnLg22j/tSB66qkgUhd+OrY0HA1pjH1auIVEeyjyESIlYg9iX+ajdI9sRG6O0ucJcj7o4ziwj5JspUL5rRgFHdDAXuG0xMGqRW/vEkp7s5QcLgx0T0JFqriiqK6D6Rekh+TDXZaF1hRDKQIZCqwfWsoRgoru7iG91sujDD99rZ1W+0EPhvyAsfDIJ35WQqkFVMmhWjqIO0yncskPfuqbVGusiog6yAVypywk3btSpB7Hs/wlUa73g2fBjZlMU1Ogg012a7Y0zpR7DKpKzhl+eUF3QtnDQrFcU038nzDwdA/BDW11g5oL2OpZXEiknkOJ6gJpuI9bQKUdefm+nNqd1juZdkkorEsOxlL+kkL5SVhkls7T38GhvAXXb84xfrm+ub65vrm+ub65vrm+ub65nrv+pV2oEyBMsrIxURlSNVqMEkXpXY5sst8+METXy2W0XeYBDsf+XR6xu+/ecXj2x3+3ivavr6OafPf4mFZPEO34DeRuA24yTTWTnHSLRIdF30NtALfiL6PmW0DJtqQSX1mubBNrCt7wS9RQXRewdxapcaNoVKsh82BwUWOUQT/YsWNODBOcVCFBmyu3bAS5LXfni5Ysm1Mvp/CuNm14k2dZOvFnVFL6/x6EH+lm+HEkhwxWe4uOm25ayu0VuwKNjdFTCn9CcJjxB+jjARSoQSL6z32lYwTskdBtGuFB0AXKINUPNnLPd6GhaexbyzAynAsRnRDZNQqVUzsjdjahMQ4Bx6MzMo3nZoib3rSSYC/tYs3bGdKMZyyIW261tZeu3wWZy2QwFlS78hBwOjVd6x417oEFQNQjX+n6LmfN8RisaPV1nnBqM1Oq5iOlox0n+wsrNIKqA2ngjtl8eyyXv3VdBx3mDGnSe7l0GPVQsict5tBwbLrUmhMRlcIXeRqGMnFcEodpxR4WgaOOnKRiljwWu6o44ZT/Yy6PrNheuh4tGL5ImD8wgrYeX8pFid4FsFNSPULUDrLdOk4vtJOgTI07Qz9U2a6sSwX0ObrZe342EWIE87qWBUZCxhTiIsX+woVV7SLdiFnnVFoZW7M2VvOpb1OcRAHYTGZBHErmlzMem8VS2GjsnPRtWplbG0yLDsxAzYJyhBkbzi0s6Pv26lwJ5ZSiri9awWcevGLixvDchmwU0/cOJw3mKXDhXDWQRKsXpMxOGPcyudZRU2zAVvcahyMdvOqYXbVjCoabzp53rmTvV+fb9NW0k59cYXeRoJJXLiJF/2Bp7nnwQ3CrHUiMlsBO+cd/tgbXK8YuejXyYCTDo0/rsKdfmS1qRpXWxS0a+lm0WMKhyq7AN0h0+0T/mluHVx7OMlI2Dms3pvinLYQwqp7dWYmL3s9449pBWPrgom9TgtCkft43oHykLtM2CzEzglJ5uvxWq/UAVbp9jGTvVmNihfDfPScbCejdLUayiodYit5QElGs1qmVCC6dE+VoHBamqxIUXxi7p3ooVVLoizrwE1nbMV5wZ4CYc5tDGoKzBcd/mhY9oFJLX9KHYXzMz6vkfVUnCFeShdQAOsC03CLGGeXYng8DqR9EDkc7Tydd4LdnOV+TbLfwlEkf87Xf+4MaWco20zpM2njiBtYJkvfO0wKuv7TSshQosYKjkZkYH7O9UsnUMYYB/wD4LNSyr9ijHkO/HvA94AfAv9aKeXu575IBnuylCgsLT9VKnJBdjpNwE6l/pS+L23dOXo+P14zTgGyHFYlKfi46f0ohqKsfm3GrN5eqHdeE92s/0fJOBgNnhpA0Zc0Rv4ubzJpFgBr2mXsxYJxhXQINHxALwtquQQ7JHbdzAfDHmsyt9O2BRX051lbms5UdqJGXEczxcvf358GoWon2xZnXpwIp80081nBDKEHifphTSurp1ga9Vj8q6KKttH+vQq6GQUahifDcJ/xx0x4nAWEtwiuqiiFLpwK/a2lvxOWEhT6tyfcF3eU/R52O4pzAij1iOKtEYNkoyQBGwtuEXCnjQU75TMRO0fcFr777IHeRU4x8Ha/k2RSTok2/xc2W2kGzPVqAXyrSUZQLSDFP8WNVd85DSgVb2INpe/JwZG2HXEjgWJcJIE6xdDGDtWXsW560ATlfETpy5mPYZYAt2TVKZH2ua1jwyXK+DVnUS+uYpJJ/g3nVgaeMk1y54iDgZDZ9DOXYSJjuJs23E1bbg9bluiYZ68ATgN9aabYFevQKOFB/q0aC1eZhqzPsoI2m/edtRKcOxHgy2rMnXrHciHYuipua73BBsRj8BlML7IkIUHuvVXvOT8JJqXODFLvSLvMpouM+76xqYoeICZBmS3Ygq2GwLo/2q/KwikW8lAEHL9AvMpwuTAdPZUpCygLVwooP9J8v3KA+cIwX8rBlXadAN6tjMVy7yjeCfi9BqVqDGwM9B3HDzynDw1xW1heO/whkIZ11GOfNk1MswRP6Txps+JOzv3VUi8kgiapUOTQqWGne5ixx1kwgFkSEdE9ogleyg/lPVB6A/OrAfUxd2zLzFIc3mR2YSaEREqF1FvipWtafzICNeSlEnHkEDfRt3tSjOx9P9ZR5KojlzoLJavq91o4uDljp4ydPVbz5f4+ER5n3P1R/m/wMC9yr72jBI8pjqr3Zq2VRHXO6/jOWokPViAZ4WlRPznxISwusFxosVsZ3eeFhBc/v8UL2UiwbYCRUV1WjNByXfBDxMRuPWsKjQGKsmC7WXBgNYY0mQj9HucyKE7KTWiyKQmnGyPmNLe9YxCM3irpUOeCUgTWPVJ1kURCZJZ4g7y/7trjRocdreB3z2LsOeOt3puiBXmxkLY64jdioGyyaCemThj4MTrMIphGf9Bi8yiMZHHLKIqJ0yT7KDjS+t6yg7gzLKMldcL+WP0DpVgxS8YttpFxyrKIV2VKbW/+MtefpwP1vwJ+H7jSP/9t4D8upfwdY8zf1j//mz/vBYRGKQ/mHNApD1GClYkVHCaZaD54+r08gZwN1mSeXx14ColD2UL20gCqD6msAeA9ll6NJRVE3nAb8k9FmTdkAz5jQqZsAVvwfSJ0UexlnG2Yk9Jn+iESQmS/WDC6kBr+A0nwTGn4k1SkiyTQgiIMH5sFgGjOPocXdXVsaQ7kPiScXyPabGiUZFHaFpZCpVnbWQCRVLowGvCLuJpXvQ2gsarq75tZpqn6KBk3plWI7FwATe+jjVL9NXmKMVKmmbJEjHfkwQvGx4PtE/u553To6UalqdfNV4OIrg2UEZkGw7P+yHUYCTbxYtixX3qOS8dnsyf1njQLKyb3mWkKoi822/cOgqzvIQVLUGXm0gcB0/cymwfpoljFSJkpUpwcaGkQhtK3L/d8vHnk9emqBYiap9skAc8UyHsDxjU6vT9p8qQMNMFYCaW+ao1JZ/XsTcckukBeOjClAoS9b+SFhh/r5T1ubka+dfXIi/4gy9IUET+cheGSZ8E9mYJ0gLWIyU7wT1V52yTZk2mxLL4Ktepjr5iptskNJXg5/L12ZCzkTZDkNMLVnypwtSp3Gzi9qGAJ6e5ZPQRWILdW+sZQho7l0uGuZ662I9MYmhArpa5fQxwtOLDaVRN1c0lazZRYARzSMSheEuCyjYQuihXULN3FinGxZxXpmozpfeiLYPQG3/AfoifkyBcbbD3IK3Xf2Xa/sq/PDqYriz8FwQrlAsUT+k7wTzFpEuBYLgNpEHBy7Kt0hXQ2TRJSQhUhJZem/FycdiCCxeIxi9y/tCnErXwmO8u9WAtOmiiidEMMb6cLnG6sYBODE7uixQpLONeuegGjWmmN8RgMufeq7SV7UPa5/JwcYL4Gu9X4M72vhC1SHZI42lQai1WSDy00KrbKW4yvsXkF4cszzGtSa6WQquzMqnNHEUC1YK9SA54vW9FyKyELOL4mCfqZs+IgG5NUJwS5Mw1kHS8z22FpUhSpkzXjqgDlYimzU22jsiYnbt3z+Czs5MmKvcq53ILRWO0sVcledO2kIDPW6T42rFWe/mI0wc9ZzpBZuv3mtOCPGX8Q0knujVj41POrkgXqlnbSfChKtCl9FrmJqGzvqBqGnZzVOQmbNm0KS5HunMmmFXQtwV4UL3ZaLWeERWtwoxPGrLoq1O6dfK56D1VYNMGfKVvgf36K9EslUMaY7wD/Q+B/D/yv9a//VeBv6O//XeDv8YsSqAwmG1Inm6ZaQZg2tjoTwDSiyGz6zHJRKH3mphNn6otORjPHfpDRVzJN4E9Utuv7LuSWeNHGROdXvW1t8Ruh+PuQSCocGLqIc7l1sqrGCEBKBucsvo8sG+liYASkmANYX5T51HNIHfu5I0W7dlYK0vLuKnvljFmjC/Jp6tg/bCjzCrKVFgH4J2FzhSc5IPrHrAewaK00oToFRZtoWBbHlLzIKkQH1Ucor/fH6MItpi4+1Q5RNpX4vqmjuzUqOArLhbRd/QG2r3t8usGeNpTtQNp4lq1luSrsLkfujxvK3suoyIuMhShTG8pyFsRiwo/SYRpT4Bg7LsLEh/2e6zDysAx81V0QXSfrweiGLij48aya0PsnnnxajS6R1DsF8Qp435MpwZKttHrttGBSwo2pKYNfdyesKQ2Q3TSRbMEfjVSMGaWia2JYO3z6jJwyUExK2NlKxThbVSReWTCUIkFsMg10jXMYv3ag6ufLvSFu4GIzcRVGrCm8m7YctcrdDqLDdoiDPHMFYvqTvGd3ovmi2cWQR8BYltQxzQ4zOklyIo2eLGBpfY6DkBKWjYoMZtEXmy9sY0rGrap2a8GTOjkQ7WLwe1GDruBhGVfoffDChEydoesiS7Lkk2/sy7ZvaixQ0KlU4zUpL6uqcmWDWShOMn/XJ+LscXdBElldL8VJwVFlOHKQDWyTJD7LhXzWuHPYToDZlVH7Htq0Hmhno9g0iKZVDoW4tUxXTu+VaMsN207sSJYkIxhv8WPCn3Scqb6UTSNIf66NIlzplkxJRkUMJVE4B8zW0VDeCh3eRE2uVQvMzQanB5YzkLaGYwx8qfX0xi3veXWWbCSuJKgWG421GTRR6Cy286Lv5VeGqR8LOUoS7EZh2g23sXn8tfuHxqUlC8AaSZDD04w9TJhxpgxdi1nteTu7fvb3ikAVPtXuOqWgjT1JNmYpRq0x+L2je3K4kyU1TS390oTBVm0jSyuMa4emgfaHKHYtWaYPlQ0qcU8A8qnabcV1nzfdPi/wkhgl0T8Xp8bQ/FlNFVN975nrqK6eDQVKrgDyNaEQwdjUYpGI92adFOhUySmYW3XrbFzdP4pCZip0Bl90rF4nBkIsyEEaDnH2ovs0QTjIa3b3wlR2c5HuWq9dqJPIMvjjGiutN7jRrp54hfe7cnPtqmlD4EycGCNF12qD9vNh4r9sB+r/CPxvgcuzv3tVSnkt6668NsZ8+LO+0Rjzt4C/BdBvbnCjbJxqISIebFnLXj1kdIx2sxlxpnAYOlAhv2Ps+PLpgsN+wNwH/F7ZMvPa4TJZxminU4e1hXz0uIR6XCGLPPysNwtViRjtzpjCqoyum6OKpJmQ8doRyqowXnwRAUojFd2wmflw8yQaTslxmDrK5FSvCEirsa/xYvFST4B6KNQOlJw0MsIz2WDVesCrwq+fCuHpLEM8FwSz0o+3C0yL52nqcIrPMottlb6b9WCxKC2c1ilo2CZnMNhVME2rQKxhviwYZWDEi4CJAzY48rYjbsVHLl4lnm9PvL67ws527VhpctPUt89opRI44SqM7Jeed+OOHz0+I2WLrZ00U84+uyEnJx3AvK4Nq92MnGVziXSB07GXYFxSJ6ybylLKG0e87hGrCKuVUOFpGTgsPV88XK74ulLHFawHOuvvm3JvZvXmq631rL5wius4m0GvgS/LvTVejK7PK6R6C1IwpAF23YzXlsnDvOHtfsfh2JOOXjoL0TSl/2LXN1u7QnEw5B69LzQ8nhs10RppjL02vsjIqLOX9yDKwLaNFccXhflVlPt0Um22LIlI7golZNzoSIshznKImgTuTJSyeEscLLtBZuxfr7xzkE68U7ySPxjCIbfA3sag9fNW3R1Nbrsu4jaZ/WxJBtyQ2O5Gcha1/Xne4AMygqt4xx3kiwRRhROnmqBpcliZP9H+tOZMFzh+DPOHEdNl0qd9+74qI5C2XnUMjTIyPfOV57zrVpSBOV9UPCM6Etauiq6hcEgt/JUk4yoRHyyYTaJMInRoFulsm4X2vP0oSW/cGsYUiMVxioGNX5iTI2VLcAkfEvOQ1JbKQLZkjTG1c1TUp855WR8YdO2r7t+UhfGaijC0gjDG/KOTsZwmCbVbnSvms8Y9W4s90c1qcbAWfpow/ZR+VnAyEq1n57noaHV3iHJo+6NIn8hfogXDWtBXWv98KcWBm8Bk27pNbqNjsaqqXzt2qaxWQs2ol1V02NSkHpH8ie5rrFXejz8WCnYdy1reiylY04Rqa4Fhzjq01fKkYlvtnKXDnmvhrditZmqOyG5U+rtCcSRBlMZEUS/Q1MvPSYOhimFnr2P+QiuCGhMvSpHpTwY/SkJVHSJAHqeb5EvYhoLh7faF7inh94uwMudFYBIxrvfp67ZHv2CU9wsTKGPMvwJ8VUr5h8aYv/GL/v/Xr1LK3wX+LsDlzXeKGyXLFpCtCASaJWNS1zYDyOYdo+KgDCyT+OTdjxv2Dxt4CoS9JBB1dFXF8aqWxmEfyKa0QL0+AKNYDn2P+nNL/UMxlCxaVNYlvE+STNkiCZTibYzL9CGy7WditNKxyu93c2K0PCrQeD/3LItvLXFR6i7kWcUv+9IOpKLgbbLheOwlqAGo5EHJhjJre3KRFqY/ZsLTQrUmqYrG52M3N0OaLdMSMKYwz07UXGsCNdWKQQJm8fpvqth8nqPIm65BubD5sir7SmXjnxbcfhKtnevNqovTZQa/NEBqq9zKulFk7LD+DPmchp2f+Hh44H7Z8v2HD8jq3L0MlmkIYjR5AjKSKJwBulcrBhqOQBepVOX+rDVuERxOkcQx9pa0sdJFuYB0kRsWK0bX8ofWxdM1YBfpRtloWrCxk3RErDqctyCudgkVCyWGtG7tkgCmKuiCaALB6s2mCXcNStfdyMYtzNnjjYhpxmQ5niSBqslrPaRl3ckzRztk5HWsUn312phsKopzOttbOlpMXbW3Ma1jW7QFb0aLO9kGFi4Opme5aTrxtXtpqwWDXrnzxC1sfOS0eIqVDo5RenMNuHaSjW0XOfgFb5bx+3nFhKBrbjbrWvaJb18/8LgZ2xLJxRDVzHS5qGNlsa6R7o1U1XiYdyKkC1DFEtPgpdBwDjvO7yVReRNU0VxlOgYdyen9Sot2plORTnpwpMExPnOi69ap7pkm55UAIEmcFqrRtL1WMSdiQ1OwKthZ42JR9XbZLxWDgirjy7NYTobj0skeWAJT9A1b2fnE6Oq85Gyf1TGzN+RQRORy1MTY6nlwyrhgKINhvjCYjSRxQaEBpnLnS1lfO69yBlX2pAQviZR37yU+VAC52jYV8tdeT1XSSxE9rgC0ToqVg1+LHTdluidJLnIv40pzZsJsrcSwc0xZG7U7wUE5l1kWT1c0OTLruq+g6ezrHlMrl7Te02IRUtLkCbHGnXW03H620QBbl13WuGf0XKv/rSANjTrmi0mSybISg0iCq+r2YoYsZsGaZE8yMnZLoSTETmvQOt7ID7Be9NmSFdxdMaJjGDdI93MSckbFYRrVkmqgeQTT508i7OxP6T2ds5It4VRYDmvM7R8L3WOie1yw+3GFtdTpTHk/9sjNsC3G/lnXL9OB+m8B/yNjzP8AGIArY8z/BfjSGPOxdp8+Br76JV6rARCbcOJ7gbvoIS3A6Hd3F4LT2Et1Ml0GpiWIa7ZahzR2iOpFuYbbKMKMMoKBsMk0LSijiUC9Z6YlUgXjwDgZp4nXmfhqLdExEVT5Un5unhynKTB0Cy+vDnwRHfmtl2pWW+JDF+lc5PZwxX7qiIu+UStvxFQbGivO21URvdnN+EzJBntw6iG3HnZuNHRPEI4KFtTWZBXSkxt+9vsihwmTE52NAnHyYkAZtRsYlT2EZPfCSjvrCAHNC8lZnSXLyGm4L2w/H6VTFSz+/oh5Ogq7wd/I+G4HrpeENEXXRobFvv9soFYP2n0p8h5en66JveN5d+Cvv/o+S3Ycc8c/fvcdHkMm+kzubat4agXXAksS7FFK+vreQvCkzhKHOl6QhG3ZepwTrSI/JsGaDYhq/G7hg2HPm/ECawtRQdi1cneKmTHJrF5rrImHW2piUBrWwJTSwOQ2FQWI68jBnlVG9UCw8t7rvqqMNUlUC8/6I7lY3owXvH66pKgu2sXzI/PsmW8HUeiPMq6VNjdtLXSP8jCyk0JlvnIslxY3m1XZW7FzPwVi30gX2M5iT5I2dSRVuP5Dhz+qiKGOdIwKuAL0t4awL/SPq4mwneXeFCOg7LgVX8GHpy3h0b2XPMniQUH8mgCW0g6UouKUxZ91MjQprEXVfu55+7hjmTx59IJ1scKw7B9tG3U6TfRNXvd07jjbw7Ke0nA2mndGCpIoxqXV/NSeLGU5EyvcaFKUBRhOlvFl9speSrL3yyjdwmq75EbVoeuM6nLxnutD0dGOi7VLZloSXirgXtfEynwq7UswUnBcglhTAdYUxuiZZo93SUZKR9dYim5c8XDSmTSiFTW5NpI2RcYrXuN1TJVllRnezmshmCWurUlCTY4FVG6WJM9Cx73FW0mk9LPKnjIU6kRCkje7JBUZTVq0rUK473Vk1EzXpow7FZzee6Od5dJJkhBc1ftTfUDdK1VgMvUQQmIcA11BvVRXRq0UWwZnadpzGe3qoM9xKPQ+s9yvHqStyNECtXqcmp+qfs9iet02RvdAQj6nd6z4F9O6eqBsuFG7lUkM4/1RJiF2KSqkerYnbRHMb7UH0h9ddRXjRhoUZjGEvXS5+1vR9Bpuoz5bAb1HPD6IibetQPh6Tiy5JVh1jXRP0lywx2VNnurzdGsBDEjnrJ6Z/+9ioEop/xbwbwFoB+p/U0r5nxpj/g/Avw78Hf31P/hFr1XHG8D7gECQm2tL6zaQDWkfsKPF7w3LZWFZnCQgyCJN0TTMTPsRRqsoY4SZZrUrMq0/t2jLEVcaM6X40nAQoVu7Tl47HKccZIxXKZsWiJZp3/M2Ol7c7HEuC5i1k5+fLhKvLvd8a/PAm9OFYLIUkCvYExl95dmCL8IYU1wYSNWYZiubsBeQq1kE01MPy2ZPUYG2ChKVF7BtUQFQXe0nS5xl4ZfJyWGwFG2bFxkdULB7WdhBKaLF1DGKgayKwsEKy6p2g3vXZuvnHldxo+O7rXT1DktHeegIyrJonQZN4hpOxRllR8qGe/10xacP12y6hVfbJ/1YYtBbn48ksJotV4uYioXRn0XRSthbjBcF6LQRHErcyvf6QYQ5KU6qMrQzhsH5zMM88NXhgukYmk91McgztNqeHuR2pV7z5iBdPmZURFLvrRXLDbskwpPFTqmJDNaAv2546UxJ58yd4SLk1zhIh2zjFqbkidlyOvbEfcCc1qQ1zJIYrVIXtLF6diIbUX0IVcNV1YMhPIoEQxpqkrsmKKmXZCh1soZtgiXb9vznKzEDbTY9rXOih616lDV8xtm9FbakjFudzaTJ0T2Jons96G2S8cj4TEZT4akQHtNaWYOCapMQA5zEE8kEwLvEu/2W+fVOElMDeZAOrPgXSqJpU2n3rF1G8WBFC5FOEtq40ZFRzKBj8vrZ0jYwfnvBbaPYVr1WSRCtvE1Bvh/IqZAGUT1PgyQy4VAY7nKLS9OV3P/s5Z6iptFyE1EAt4zWZJxUO61QFoupEINU14OOZpZ1nG9jYVw8S3TEbNn1M6dZurFDiMRh4XGTKZ1RKriyhc8K0BSU1Zh9ix8micCqG2H7Oq7ClqWI7IAz8hX8WjhXILB6XeZNaK+VvaV0FjN5mqXNmU+jALoKFvVlzEj3yq7vB+16FS1emoVOVrLNrPIWlc3sM84n8WJ00h3NoTT5k9qVjBvoXeI4Dw0fVu9Nncb4kyTnVR28WsqI/Zgh90k8RWtxXTvttZN91oFqyVErSAtWiUHyJfHFhPXzmuAl+aoSCM1xQFjSbpL3ZGfpUPpRk+xYGqdK7kkR83IvMj4U04Q/W3wxwCJnSVIbLs7uRcM2x6zmxqzkkrq8lTVpZ+1KptrZTCvu6RwwXhMlt2ICW6L8teTyZ10/vz/186+/A/xLxpjvA/+S/vmb65vrm+ub65vrm+ub65vr/++vP5eQZinl7yFsO0op74C/+ef6adpeDgehKVcNCpOkepLWoVmzcQWapV6oj3ERUDB9kqw1Bspc6ay814aDOuqSSqtq8lC0ogStJtYsWDAMIkq27Rfm6HBWqd9jaFilOssHeb/x5Lk1O+Lk8QpWr+yd0xL4/HTNm6cL5tmRjx5/Eo0LN0vb1nogSpvfnbVg7Qz+0VFOFqNedOLDVciYNno4tzlo8/yaQZ8DkS0N4JwUU2Vmq+BGqRz8MVOMtKjDUc0lD2llLVWXdQWQZ7VPkVEFHD8MouVyzPhN17ooMv4xpE2m95kpesK9bRi2OvN3C2LzMecGaqzdFzsLa3CcAg+PW74011iXGIYFZwolyn2yk4GsgqeztsFVOySchKUo1gqsnTMvY6+keBKTlD1ldATTSWdNfA6h7yO34477xy3lICywWgXWTkrtegkrsTQgezgUwilLl6n57BnFQKkv1Sx+XEbX29kmbM+zMoqEGVqrcUgbg9lGNnZmHztSsXS9eB9mwD54xbfwHitVBCXBKfUoqrVGMaKdlQZEnPNRGFniTVZUBHD1lytetLJyV9ToWjq+OUC+jizfEvB3HIN2CgvMVtbiJAwaMfdVckPWqrYETC6kjVgcGWR/F9WkqgwkGcfIGNBk6PYZf1QhwVSwx+k9EDnQ9NYwQl7x2yOfGelyXW9GPtw+8TQPfPF0yVN/hTuIwWntQMVdxiihZLkQqnUFyVbNrNxZUnKiJ2SyvAfrWC49fjdjXaYUt8YWA7kvRNQKxFpMkni4XMD4QnFSxjQsW6ndplLwB4MZYKmyKdplCntZVBXIXUUi7WIhWoE8TNqtVQajHwt+1I6Dkdg0noTVGWfH1EnXp8bIzifoE2Wx4IWtjFBP9I3qPenFP1IY0updV+rYyZCMh0GxsGrD4mo3xAqDC2Pwh9jiXvbSsTWz4PEqIFxdGXW/2KY/aHKS5xj1mZwxznBO9ljFwrSRnvz/cCzCwNxVaRowvhBCYgixdU3q/ipVAseKqHHMlnLUqcrZa1fIgeAn0W5KIXE2AdGfl5Iy8LRb6GaZJLjpbLxV33uFAFTGYRQGcMVh1lGlmClHkd0gvCcuSimYMeIPjs2tJx7k3ypLzmgswNT9Kx/KOJmieJ8pxRD7TLwoZwQIsCdLd2+bDlb1ls1OmPRVWsIsAq2w5wKn7f2BGzOdMvxMLoKTmqJoqeX800SO88/3Nbbiz7t+5WbCFaRcW4x2ijJ31llpm5laRIvJFJKXNl0eHW6TKLmQo8WpCvn5VVt7ttG/aWMI0IO6skESGGcoQaiixRmMS02ywLtELhCjZzmFVU/oDIxeVDPK2NLGGBXEaZLhOAd+/PiMw91GhM6O4nZfv5w1DTieFk0AzuiqIBs+jILlcbN8HptUQOxQlG6uG6WCIc93btWAsVYSisWQonwWO6n5ZhSAqD9FUh8E+zSX9qyqxkZroyoCXEZ1NNG+uNGgViBtO+wiJ/SyNSw7yH1u+lzvPbcoYxt/Er0pOyVVvjUtSfQnwVpcX5wYF8946nCu4LRPXQrYmiwXGR25k2mMDD8pY0OFGWtARoHMqZP3lztHXlQNXW9l6qR9HQdDfLbw8e4owpK2jn71vtiauNNA5DVZqQxDP2nQsHWjnkfZsuIMbFqVcM9wbHD+vRVXuCZDcQDfRRKWY+y4P22YpoDzieHFTLx2xNmRv6ompejorCZ8pWEa6lhueTIsF/L5a3FQ2+TNfFhHt6kXQchiwQXFQi1C0XdDou8jy+LAShJknOroFEOJAoqOyP5xTlv5RggZdvYsGzHZfhp7zNEJKcBKwhIxamxM+6rYLHN+jyvzsQgezSymPcdXmyf+2vWPSK8MjkIwiWPu+Gq+5Nlww3++eKZTIB59wzCWXkybjREjYsGCQYqS0EaVdGgg2CU3Vl0OhvSuX8f7Z6Sg1Ml9m6+sJokKuN0KUSD3MnYum0RVDrePnrCX5Cp2sj4raUXiyflhYbHJSUJ8AnsUYVu7SHypBB0/ZhmtzasCeIoWq3iW5RDk9RfD3X4rGkh7L2rRVcagaJIu8n2KUbGNAerG3JK64mB83ilYu9DdjitQuAJ/kXVbTD1UhZAEUHoVynQGY01LEOSm1iRFR6qVjbfoAbuowruKbv7UYXo26gmHTAqO5VLig1HRXO8yl92E2STywRHUJcHp2VBxP9PkcSctgjTWnROe7KIjsrM9XovNuqCXRX3eRi0+J/my04oZakxDW34qlry3J+rvjVkLuHNWWsMzQcU+SXGznhPtNuUqH2BYlvU5e5+Eta6epKYmhGe/+hHcqbB9kwn7RPfmJAndEuW5WIML9v0zr8pOJBUn7lYSUMPPOSvP1J/JxKjW309dZ3IZf9b1K0+ggLZIrGprmCliZ0MKK2WzJIhH0SKRqtQQLyxskui+jBZ3VAmDr83q3bJSWitLrWbrTTCyFkIK6IuDoRgLWwT8V4w8ZCBlK52xswBnI5TJkJzF9InrixN717P4fu0GDJltt2BNwXaJvLyv8VSTLTfKxokKAqz0TTdBMYU8wHwtnRN3MiqqJgmcneWQynUx2YrGNg1XA+vmEB0NS4xaPVeqcjrP7AumRzpaViwGGsVVbR8AyAkLwlA8FnqdP9fXsnMUIc/63xUgH6PlWMLKOtHPGk5ZjSUTrgodWkPB6hwbnvYbLi9ObLtlfd1imBcvYpCaENnzmbliNqrCuABVbWONGWWYFF8onRoN1w1tZD0N94XYS3Jot5Gb4cTT3Esy6KoKugZGI/T7kqR7lQOkG3kvdjHke0k8wt5SlI1KneVn1sOhUawVC3AW4CoAm5T1gJPK0yapcJ0rPCwbjrEjZUM6BPJsq3sJ1QD13CC3BuxWxDTsQdG1SKMqh1O9j4IJsousHVTAUeQ4aMzG1Ev3sWQ4/eAKdzJ02mmW7kwR3Z+ZBmavwNkVrF4aKLv4wjgF3N7iDwpgrRovi7yHZadgV33u9bB8T4MpZylmRhHvoxMCxZgDd3HLYxx4WDZ8vr9mjJ7j1DE+9LBY7Mk24G4qlrQ4Ebqt3XTkV+lQyKZvpuc1FDqjMgeZKu1SmaxgZD3aoqKakoimXvBl/mSwD+hrrjhEsnQ6lgut2JVJV87iXRVyzApcFuyR2KjU2FOtdqqsRu1y1wQqzw67idgukbKHyWAnyzJ7rMvtkK17EPR5B/lscVM7UQa3yBprmLcscguVNWnvDwI2r8D/UpoMiEmF3DsIGifGqHil8h6Bpu2rqFWBMl/rczDZUoqTrnntOJ3jYoByPueoTLyDYT5IFy13hrzYpok1bGdOF45YpFgto8Fn1Iy4SBJd5HmVs+Sp6sTZpRbYKnGylCYwK4mxIY2e4Whwp9J0kdwpiWBsm/KsSVSxrHiws8/YrirhcAbEr8/EFNV3spKc1L1dGc5GdaRMAWLGj4U4CbOuAFWmoBS0+3e2v/Xf0lCYi8F1Bn8SPTUTB2XsJ+xxkc9UJWAKa4JkjcT0bFdg+dc/m3eC4bV2ZeK1DVne/7+/4PqVJ1BSPUFzGq8LtLI/Eo1thnHCDlJhv2OAODr8vW8S71WBu1o+uEVB41YShTpGAQkgVQeodgiyqg+7GWVoFC77mVwMd8tG1bqhZg11RGOjoSzA3pEKpBujfkQapDyYXk6rXAxOVcSXyVJGt35Wc9adqHR7PdS6ezlU5ijjpdwVlmcCZjVL1W9Rmq8zWAU4nosDyql3tlH0c5uo2j81vmiVLs8C2bCpgAK669WSp/MxoSbDoUB4e8SUIirehxEzzRJo5mdS8SfDfJTWv6dWjzWxWyn87fWtfJki4OD8ruP+IWC2ibBZVnx8cjJ+mCXZrslxG4dWCYO4SgXIPVJxQYMcoK5QFLybghwylcVhkgQ9Y0Wg9d1hy/jUYyar4NjSfq4oE2vws5CqRYaRDhFFlaPPdUYsOq5kDfi1pV69y/Q5NiArGmi1cJART+Gin5mSJ2XLpls4XE3Mjz3u0dHd29Y2rwKKxQMb2tizODidjfByQOxcsmHzhdptaGCu1jsyCsqNZdTYeU7Gf+kqMQyRfBpEe8ohlWCmWdvUX8+r7coOrZ0ZoewXVSxeRyLvHapmXcsmAsp60w25JqEpryy1ZIi+MMbAP7j/Ln/w1Svm2ZMXS0nSsTWTIzxYFXlc9+58g8hodJm+dsZrAp+ku5B90eTPUlIRvayNZ3xmuPzoifHUsRwC5W2n34OSW0oTnyzGNDXvOpJ3o2jcVDuX6doQe8he4kcdoxTdrufMZVFAXzWwqmzBSkc/T0BrEqJrTzXsjC3YkIQMA4Qusulnnl5AWhw5WdLBY1TBvGp7FS+Hb/XbFAaaaQBv/7Rgx4hJCfqO8jVwb2WrmpQb7R2nSvDOtL+rzDtTO1fnYpI6rioVRBycTkJ0zBNTY8EWZb82lrMBEyWJ8idhR/pgWK5FN3DJ4vZg+kQyMuVIkwVjyb6Qd4lgxYHiPQeGhSYFYLWosUkSP7sYLZbU37GeA5p0iZdmtYU6n0qcFV9JR5bnSeR5ohmzMP2ynhln97zeA1nXVcBSkvcWA+qX0YbGiAiO1nE94FzGdYncl7M1x6rVqGfS9Nyw7BzFGfzocGOhz2sxX5OnJkwNLU6e6bq2Pd/ug+J4ivpKtk5bPvv3X2KU9ytNoIoRtkK8qEHVYWOQYq1IEmX09yA3000iEilt3YI5OLp70yriRrFtOlC00Ywf1wO63cyy/r82rsiobYn4YW3DjLeZN48XxMVJO3pymNm0KrEGd5fAzY7b/grbJRHsVMZKOTmhQs+e/BSEohnf1yTKqtMC2hVQH7XGqNDkMOyrinD9v5zZXdRIzfvteWjMkTr/bt5mRv+/pani5iQLqia2NUicP7+v61W+dxkNPCBz9UVm6CiVuhggGcoonUV3WmmubfPFvLZW9bM0pqSRxK+7c4BnfukpPkMnI1Qzia5MONASlTo+Oz/k2/uxNSDQMCS4QuoKtlMGHdW7yykbSqQApuTZP2ywjzKmsBHtUrI+47rGMvg9jRVTZQLaVZ8NIuhZgprNVq2as2SpMpBa27msY7aKu8pdoQ+RjOEUA8epYzvMbIeZ9IFh/7gRMdcnJziZIklI1azJsyYfSXM5J+y53BeKKaLtkqDbS+BsOCpz9quTwqM4eY65A79b+NazBz7/SwhF3UggNcVIInpw5IMwR22PHoIig+BPWvwUy3IpnZm0iIhqGmS8mINl2akCuHZWxP7C0t8F7GzlcEjy8OuItt67uicuwsQxhsb47bcLNxdH5uh4OgykZasJh2nGrsIiKm0cXJMTKNqlWrtn86XFB4M3huVSsIHTGFiOARbb5AhMkfsoTK6iRU8dNReWq8z0gcQ0FGfS3BSSwT/ZZvOTK0Os5vlZkpnswFS/SFO7E7pvtNhsyZzKiogptxG1/yjeplU2hALLrPIftpBdkbUcBJeRjSxba4wmhAi7OCkOsbdiT2Mh9z0m9bJF5tw6YG4/yaF/dmiaJSmMIas4rnSjsre8h/OocQ2oQr16S3TNFxkTNWyiW7+nxou6xnUEbKJ0lKUQUX01A9YUltmLXt9km5k4aMHSZdGQi3XyIHFKut4SJxojr4oLJ01sp5pAWYyOrIt+kDXO6YeqiVCNqVrolNYm1Wd7protsSetHXFj3h//QSvUbTwrxJ0hG4PLSc/gQjqKmK2JmswWYco7n5iH2pioya5qjh3k81Ybn/52EWjHrFITxvwUA85gKF9j2NX1fP537VL9MVKmeM0bXP38tQD7sw47uX7FHagV8C0HuW008iaKVqv0rcz3zWLwCtzN3apOmjo9MOY1mWmQlWZkSPu1arycW15UUcUcDekEFEMsMrLzNpOiFZzDKFVnPXRrllw/h0ngbgN5cKtK8wRmsswn0a1yBwmMMnYzLUDVLknVfbKLVraahS87mF5Ke9+N0N2b98QmqwN9GrT6U6qrOV/00CqoFDRw9VnGkfWeGU2kOgGFyyFktGtnWheqjnQabgDEhqFzzNee7C71IC90U2yZfT1UAK3k5VCsuCDpHGYZ3VV9HA8G0S/JapqaryOL9XT3luG1k4C7LaTdOk7yR+0EWdPuk5tykw1oQMSvjb2L01Fr7wW70ssmiluYbvR5XBU+vN6z8zMl2feSSZNoNPfzw8gopq/a/wgGTA4LO7sWCIwR/aw4OJwx2OMsI4NGtZXAJhW7VkutcqyfSYDLzghAdcmWw6ljOXTYPjFsZp4927Mkx1O6FJ+t0eCOq8SDjZK09w+5BeHUCY4n9/r80tln1mJA9q4m+k7WbH1vxYB1me9d3PIvf/R7OAoPacOYA6cU+IdvP+HL2ytiH0hbJTbsZETvZsiPEny9N8QdmE2knDxhb/HHVfpANkF9npCsUWKAkiacoWTfDhGJF2dfrvDPXXzBb/Vf8t9+/oxgEq/CPS/cni/iDX80fsR/dPXbPJ16KYyyJUdDWSzhQuZkceNxDvKQpfB10hkNh3KWKIBdbOuAlx/t2DxWzSTaHsdIdycNXgsNwX/FTaHsEn6IbLYTV8OEt5lcDF/eXzLf92Lpc5kpXYZw1kU+uta9zuorVhXGMaV1pN4TYqwIXgX05mDEBNcUltGDFkU2GuJjx34UEVszq+tEWe9v3NViQjuIWnRWB4DYy+vL2FMSi+0bTSK8ahppUmSKvLc0eElm5ow7LaxdKsjGtq4L9qzIi9pl8k6KmNr5dlZkEoyovqMA97UbXFrn2MSCI+NPKxmp+ibu/CyGwslI13JcJVuKNdLB2wf6aW0ErAbkupcdKitTgxQqxyIx1SYoi2nFj03SgTIpN+hFc3SoI8zaTVKNLKM+nMUYeb6d7BU7G7C5FXD1HhRjJOb3jvG5k6JyUg9GxawJFKfgp0w+WtF1mkTbDAT4vkweexRcrj/IJGW6KCyXhfBk8XsZR7ZOY8ty1+5QK+ijtHyNpUEystfxuH4mwccp0ySvCXTV1SvvBfP6+j8DG3V2/eoxUGUNEPXAFG0G/WcjY5LlOlO2CYmGMsKqhrvLlW6OBT1AJPWu7I2s96ip807SKTp/D40hlaVNnY7yw0sxHJaOx7knTh4zOdxR8FatS1Hqz6Y5S5sqO4/8fXFQQiF0Sdq1WwPZUB49ebSqQURjH1TNF5tUNXkUY97lykkl+3IiGZhGJx5Ti6W/s4RH6J5kQ60nMeRzDagMkEU80CFjgT5RsmsdgnZrNDjWL0DYUCn9zM6TaHYV0uA4PXfEb3tMKoQD3IxbwUAV1ZbSVn3VIznXsLKzbvw5rngrI0rBRjsPFOguZvzNidOznv5PB8F/nOSQamv9rNt4jgt5D0yof7aq/VJxK84nYihN0K5WhqmTpH55Efne1S3WZFyfSDsN6E9WE/bSNHSANnpZrks9nwiPYr/TP1pVA9dkqFY/1bakYQ5Yg4YzsMB5K7qNZOpndYVUDLkYrrqJeOF4+9jDm555HsSE2hZCFN+5qs1WGZoVe3QOCK1mrlkPgHDU0a56TZm03tvGai3Q/NQslGzIGNHtyoE38yX72DGmwBy9MGwLkiiedXKaQKcv2g0s7z1XN4rSsD/llugXbxifyd6R/SSaQrqw5M1VsKyOHHNXwBde+if+e9tPeWbfElTM5sdxz87MBBP5k5sPuN1sWbJjSY7jErh/2rDbSAZ3v+mlkxsKmEKyMF+v+9GP+tY1wMvel4JRCqjaVdaDS3FiRseocVdjI8THjqf7jidzKf+3WjxlQ7xKmE3EeXkf1hSsy8y2I8VAmvUAymuCY7Jp9/zrOji1I12cIQVJiPt+wfnMmHvKIqOptm40YcKw7jEjmyAbwbpU/8M6YWj3RDt7dobuIB0Iu6Q2NhbjbS3isuLuerFg6e6NYr/SOiKvB2bthJ99vorpKQH59bzjW/GAddSlmkvAmlxFWXu1g+NGK5qFReyGxqEXIdQCFCPNOi+dRRbbOkf1XGpdJL1nTetK8UVOY2WxClfIKzZRkhft4ue8YsraOcAKDlfGorFGR6FJMEpBNAIFl5nBqRWQwggE1iKJWXaCZUtKhPITmmiieLSCdwV/0MS81lTFkBfHoIKZ3YP6/gXD+HFmGhLTM8NyJazc7t41NujljyfsFM+wWWe4TX2euZfELvUCtSnKzCSdrYVSGuOyjTrPr/+vw0DVIkbnonaSdpwZl9ZZKk6eYdkmhquJMRmyryrchjJkliALmWSIe0cYTFO69WrjURwsV5kqfFnl5jWWyPtQzAim4EZHMYWSDHNyPB0HePINl4Suf7fU7g80k9g+waX8rHi07dCgz1gn7JxtL6fUOy5JU0c+8t68v3gJ3iCv78dCfxvVOsSynHrSLmOuFugMJYkFgJ0sZa/JSKyHmJXD+KxLVMGPslllfFL6TE6miZi1jl390k3bAsp5JZOhWhyQV1Bz3EjHykYFdsLaftbxZAkCAk6djFlbIhXPgL7IJiMbiUs65pv3HfZq4sWLPQ9dIs5OqNKzdHOKpxnTpr4mQEYDJk0Qrop2fn3TmBpca0vcyKi0X+S+Lc+NCFRmJ8rxfZLYpx3GshEMlTsKVkFGERC3WQ6qxTQq85rolbUqVJyaBNM6ynRrJ/Hrm1z3VVZZCVmQEJNjTIHgEt++fKD/buSr+wuW+4H+K4dJVhIGI8nDclGrUwhPsi5zJ5YbRUHeMhqC3WcqUHkQU+li+ZnBxiTTkjLR/TR8erjhn7z5mIN2cEoVPZ0c9mgJFdi+rBgjO0vCJgbL+mwWC11muaySBfLA3CzrNvUa1OtVWANnHeNp90HAvyJWa7rM23jJP55ucCazFM992vK7x7/CUxx4XAZ+/92HjHMgJUuKjrQIrvEpW6zNjcWWetq6ky6xPCwbIU+CQZwvDdPzQv/dPeOxoxw9/Z0/O/j0vnpZV6XIvvWbSE6G8uTpby3dA62AWC5gepGxFwsUSCcxgE6+YLaRauRa31eVWWgkm0oqKLSuZt3j5izplFBgGLoFrmA6dJRFJDT67UIXYsul5tkTFy/yKZN0v1JXJC6dLek6McDAcFcI+7RS0Ct2rZr6zhmrIpD+GCmTlUTFGAoFW5Bu0Z9xEAp2SLrS5SyZMqm0Ljhn4GQJmqZ10svZ/nVTrdDFl3FS27E5OozLpCsptuwk5u/LZabfzUzz0Cx7qhdspeZLIsSqpB+z4LqtEWcEY7BTgLzK0MhUQpOnivnTeP2e6njOEDPWmRX3AyqjMoMxmGlZP3/FhRkj90ptp7p9lk6iZf35S24sajdKLA5HSwpylizJCka5TwLLKWrHNkM4Aq81jvei6l+cjMEx6urglKw0n3dFaFCI+hyrUbHEUNYilPdj6E8lT38OIPnXx4jfXN9c31zfXN9c31zfXN9c31y/4PrVdqCK4GzWqkwZBKbaBhitjpCKLVoq1dGNAl40Lxf6YaEPC6ep49QP5F7GbFbxBdXKIT1fdDbvGrI/OynoG4ajFhhKGSUbrCnkbJo4WRU9Ex+1FVfjTwaMJSYDH0wYn8ldkTGHMbAYpoMgJachEEISsb3q2J0KVjNc0QURAGHWLkoabLPQGN4ZeOvafFqwQ2JT4U9QMT/1apWiuo6vLBJtq4PobAWxipGWueGciSeYHQMlN3HFdmnqLe1w8STqH0WXqsoGmKjaLDGrj5Z2NryhBLE4aAKnoGOfdbRSwe9V5wUEazY/et7edHz00T0pW5bouH93QTm4taoOtNfPE++B62t7uWG4tKKpvqLNn1E8d3GT0OSzN8x7y2fHa6bkRfsmqm5OxVv6akMAnHStzNDfutWypopYVnyUvp/sLbmzzaD3z7QTUADz1xl8Dd+nf52y5ZQd1hS+d3XL965uGb/t+cG3XnAcO+bHHvsoIqDpJsqHzbLu6mvkLkOfGa4mQogsi2ecLwCDXWR/iOil4EVKlbnIa2fRxkK0BucTU/Tc/fAZ/sni61in7sFF9Ye08+TGuudWk+oaN6pkRe4KyxWMs2FRcdj5Urp+xUinOz0YuocOfxJLHneY9WUk/ohxdIY+4brEQ9zwHz38Dv/o9hMxyl084xxkihMd6a4X3NhocFFp6V1hiZbks1jxLIZ0kcXLMpiGB8seAbyryOZ8ZUgXkZyNjDBrLLBnHQ7k/WUdY5s+rUy3kBiHnrjzjUmaryP95YQx0hUykxMMpi/kFChdFrFfZftWHGUdBVd4QnZmBddXDFRtjFnB8uVsiFhCiMzen41o4FJxWQDLYDnNgVPXMQdPSZY8OhW0RHGXst7b57eQeiuadBunbNNMmBLGO75OmnFjxJ0k5mDMOn4qvDfKkh5E7UprvFFMnIlZQOQ1BlUz4vN9aBEAtalaSYVVb0h+XwrMybHfD+RDaF3lxmjtCt5npvI1Kn9Z92+9z/J9tXuinfolY4xK/rw36lbIhQLQa9cIVkmHhp+E1m2TNeZkjInuX2MED1axtBUiofci9U7kf6zicZstlQEnnWWjU5Aqy5EWIRmELmJdEiygWuEUV1i2EjO6O80Hsuz//j7JKP6UcMdFupDQzolmMaPPSkSe5V6DEEzqqNaQVxkFL2Pfrwvrttf+WfH37PqVj/DqwSYjAosNKmwFVDA1BYiG5aEn3DvCg4xD0sY0DaDeR+boz19afi3rX/g+EecV6Ft8EWB0MkIjdkAxbXMJ2LewCQvTMPNkt8oSkRc0RhIwW9bAE/biJzbSkzYZrzR6u4B7chQnLAusZ+oKpcuExTSdldSLX09VXm+Gkx6ma8f40nD6OGJnwWH1t+/74ImIICwbUQX3o1uxCt42PBGA0OlF9M+oSTHuzCeLtfUpuKw63rOqDyML01Q+tDGNBooBf8x0D4uC9rQ9HgXXJDgUPazUKL0qNVflZKhtdcluG2VWR50C6i707yy87fni9AL0szBL69wfUaNa8bY7F1FFD4LC2irn7NmbbCjZrpgQJ2OY1Muzrpi6149XHMcOfxtaIu6OBuugNBNhWGU55CD3Jwmu1WSzjggrrqQ4AWrPF7Y9C1M00J/RqJu6poJZK5NSAqloCu26mWfDkS+Pl7zd73iaep5vjrwcDvwLrz7lKfb8bvkOy+gwk1EFartKacyC0ypOVMHTJjD3oncVKoHCmvfGApW92fwpy5okFgebfuGim1azZaPJcapJLU12oI5961qXw1NBvAqkt09egbl1veo48rLoWKRIMpoMcSdJoZ0Ndlbz0KWsiacv2CAeZr2NvB6v+dHnLygnjTF9Ei+vxQrA9SCegFXQcL42xMVAtisg1hbCZoHNQnza4Y9GxH+NJAxR37NZDOkHF2ye5J60tXM+tXWVyQY+ZPoQ+bXrO4bnET5BzL9NprfCvvzydMkffvpKGa80DTaiALUrZKIe3G3M2OtBF0zTaxJMjhZXGqNzEEVpYwrz7EnJihNANJR9YDp5voxO1dVFl6wKFA/bmVIkZpaC6CYFJQp5wdQUD/vdylCWQlGMZd1RGJU5WKyc9CyXcvL7MRHeLi1+GLuO51rMOqfma5FZegeTHCDGWYpzlODIqoLe5BQy61inYv1qvD0raKwtdC6RJ4d7co0lXu979pbTocMdBFxeVf0rFq2RN3q5L5V0VSzrOFGxP8KSFYB6rjIZRUZjBVVcP4+x2chn7CWBtQ3fJfc0d4p58hZyliR3yWsCBeAtces4fEs8Gbt7Q/ckiVIOrj0LwbbVJFFJFMk0Z49uprHhszcsV4V4meTcfzIM71SfbFSMbNZEr4adliDWICTPVNaTfGVXSL3FBYdLRfY9aFJYC3X73ve31/xZidX5bfi5//r/gSt3sFxmxVfUh5XXQGs0kYkGv5fkqb9TAKsvdD7x7mnHsjjSY4e/d825uapzmyQL6qQMOqe4ilJMqwQAvVFyqNmEKJPbgreZ57sj9xeXmGUF/9XFWgG7WRMPN8L2M0u8sNhJrBLcXFguVZFZOz52NuS8diIqfboJfVoBmIaDGk9qkmcvFz548UQuhvvHbcP9+HeB8Ci0/bBHA6Bbs+azeW8DUHooPuNdZplCq+RFoby8X/0YSbiqiGHFjr2nUZSgGGGO5U6CUgWWt9l6LriTOHfnwJnWz/o8RL8nr9WRVk8lQwVnph7SRaIcPMMb2LxxpN4TdzBfFcJemE79YyGNctAK87EeyOv7kQ0t1iMmqXLvKArtVVhUqkWxakgDpE0m30ROU2B+6uhHmjmqm+T+13tZq3kUAJy3NJPi2pVKnZqpJicqy0YOqbg1uMWuGzkjgR3kMMiwqjGvchOi4lzAFZzNvOwO5GI4zB1vPr/hTbnhD10hbBcwheV+oLu3uMmQ9xZ/ouGO3Fjon0rrhNSubtxYUic2Kf2jkj+KxR8TbhTj1yryapKBs33tXeaqG3HfOjI+Cwp2BKIl3DtJQguKe6hVM7BIEKwQL2n1SeLUAPkParZqDP2t3PP5Wg4cSapFSdsquJZq2+EUMOuK6Htlw6Ubyb1h2M0sIeND4sXlAWsK+6nj/nQDWMUIynuKm0LuBOfWDKsB7zObfubuQ89y14Ex0rlG1ovovVmGtwIez0FEKl21wEgizEis2WbBWNm/3mYuw8iLcOBl2GNNZimO3334Lq8fryi3PTZrInkhtHJTrVqWupelc9K0t7KCu33tVnCGx9Ffs9jNBJfIxajOU2hx1M6iOp2mASUSEoMUjyZkuu0sBrguU0ztJK1dlKx2SvO1SsxoYVTvWT0zJEgBarC87MSY+CJKDFljSb3fpjHNWgJlpXOaBo8J9SxS2ZyzhKgETUTIkArWZNGEDLbtQ4mHWnwVQ3AJE7JMCvYrHlCKKMvcdWLmHVVI+Jipnb+6VnOohVtN2urBLlinepY0QpIxDdd1jic9B5Gf34vcSaCypyiYKBAwvq9dHS2QLSvmVV9PGL8VPymK801wdzbkxbTi2mRwetaVbCizwz06Nl8Z/LEwPGSyM5xeWcp3FsrNzGl2jLcBOxu6By/6V2Ph4nXC75MWVamJhbbPqESE1EuBYqmkKCkcTNVgzrQ1wFkx/ee5fuUJVK0y5abWTFBVoaN0eQTwJoeBKSv7rXSF+/2G5bMd/iCt87A36xirKFsoFdxkcXdhpfyrW4y4ZxfVaiqreqp64pVs2M8d1shBhLbqq2t9bbE2ccBevMCGd4DqDwFNjTntMmYXGbazgGb3gTR60gCLtspbq77ukQW6fRI/n0fHeNvz1hUudyOffHDHkhxzcnxpbjAlNLE8N30tSVjSmkG39qsE0VyTyTpCyqV1XdqzKqz00SyvSU1ytBNQhUZzZ5pI4PqsJUEhJfyYcGNon88kkTFYmW7SlibK/5fZmWt0ZZBAay8WRlvIwXP5p9LVMUm9kvQQqO1iN5VmK2M00FY2iV1WWQNyVgCkFUBwTXr0K20z8YNE2M48vxQbmcn25A4spem4FKtJ/EmewXwlDKu4K8wfxEYltw+e8GRIb1XKIGqwszr6rEyhr1dWpYjW1HmCXNvy5yDRYliS45QC12Hku1d33N5fkG87/N7iTp3cSyd+aTaKXEYF8FZdsXlXRRtX+RGsBLEKGK2MIVcJIUkpx9GeJdzy/qbo6GzkX/zNP2JKnrtZlNL3c89nnz8nvwu4UTt0ChZ1ThKAejBVoc802XbQ2wjhIKrHQgqB1FncbEmdAFPDIWJPUejdo1pCgCRQ9bYlQ5wDqVj+hd0P4TfBmsLL8MRH/oHHvOGz+Rn//vJXOO17lmhbAkgGNklEcvU+ko3s+WJwIRMvIrN12MmpVUUFyRumZ+vzvvgxbbxskkgkyHOXbq21BWMKP3x4zh8uH0oNUgwpWWK0LA89ZpSNnDcZMyT6rTAEU3SkN4M0JLQbXEG5NoGdhPFcmcaivVRwVUjzjDBijCgJhS6SZukK5GBB7WPOx1YiXSKaUVPppDOkIxqznCWdtStbRBXdjRAOsHmbJAme1+TAznUPJ/whNfLAchkaS88uGfKZIrlFO5hgmgCcxmFvKF7o7pKcJEx1UtDxosRFEbaV7uOa0JhUMKqHVbL4oHbDwrzpMAvvjU1zOHMv6Nc4KMmssEbPL3k+4iEnMVTY6/5YWke7diyrVlIDkWus+LpGoI2Zki0lWFisxOrTgs9ZzuSTdvKCa1OB8zGiPyY2X/kmXCz3W8/XSQsaTbzqerZpLQTIBn8qdIdCeJICsr+zxM8H4kWCPlOeiYdn8Z44GvzBsH2jseDcS/fso1bNsuLO5Izql7NKFIC1zZfbOVmT51/2+tUnUBnNumurXw+0mpzocyohi4bLo1L+g/zd9DAw3Em1nDpWT7n5DHuTK25CbkTFhqxCZDTdoWoFYINpOj3HqeM0BszJaaveNHZf3dyiMl1IF5l4VTDJr8nhRrs3SovuNwvfe3FLypYf+WfM85b06LD9mTSCXnUMkb3BJ6Ft9ncW++WW42bDw/NrYUMVg99baZ0+qifZvOJ6zHkCdd6SBMwsVa3tEznKzan4g8oM/HorGbQLVTdSRmbJzmGKtEiXjSQE8p8rmyXpKE/eX/FrRRKexJbCJtbsPysF1xhJpMqakIEc+hev9qQPLLc3F9hxFR61SX3UBkPspZMDWvWcJSSNcv+ep1Zpo4zihf2RCs2SxHaJi+3Ey+2Bx2lg3AbizmG6Oo6Vwy15lIVVNX/kkMIXGTXqfa0Ch+vNXSvemgiXrz23er032nsvydIvU1iy5fXpioswcRkm/uK3vuT1xRWPjxvK66Ex4/IiFf/0QdJ7I12ditVIm0LZRoYb4d7HxWH/ZCOt/yT3LfWCT8RaOVh0vNzej3YmxjFwO+248HKYz9kzxkAqBjdEcu+wi12pxsosrZppdS2aaLAnS7qOknhkiz9aGTkV8V1Mg6yFmuBVvbn3hPGU/SgFgtCqzcnxxXzFX93+kP/+1T9jayd2JvKUO+7zFkfmZnciRidjq6QpfjIYxTCW+tgKpMUSVROJIti/uCutWs9BvPOWDxe1e7JsP+/W91gLnKYYKaMwZzP3+y3j7YB7lITcJAhZhF/itjB/ZyZsFryaHDuX6bvIY99Rju4MM0PrqpsEbpIDqo5NrVr0oIeVSWsCVYqhDxFzWZinQJwc9Akbckv0nFdNvdmBJoNF/Q1rgdzssbSAMxk2X0L/mOn2me4+tmTIKq6y2vPIYR7xpySWVkXNcPX/v3dlJHlDR0saD+yUGobJzOrPmrJYfYDATHR0VvXpICsWCuBMU0/39WHqRC0/ZJabGnuEiZsuE+5qJk0D1YczdZpIxpX5WO9F7bSZ+hmqMXasRaQWprVbGLUY1diCs3J01S5S/acla+LpsM5gpiRfJcI0Sxwa1vVY7V9KLx3Ybi+jRzHtRb0+NdGNWXTX0npv6qTFDpl0aVl2YmzuN7bdu+0XBpO9dP5V7zHspXDzoyRb/rA0fK2MH6FKUBQv2OGGgdJJSg6WnAomOTBZ7+U6AjR5PQtWiYufz7P7pRIoY8wN8O8Af1lvwf8C+EPg3wO+B/wQ+NdKKXe/6LVykADopvO/1IBb2/0ewtUkILvpAhD3dbuNlIeudYHq2K8d9kgiYGvrv/BekFjn1HWl07AW9eEbJ3o9y/3A8M5hqyfUrFVA3eSFNqoou8z4UcQdLeFxveFisuoZ44ZPwzVDiISQmOw5vmNNKmQ2DvOFIQVHHCynl4bpRaJ/4+geDNsvXEuSRPVWALbL1jSxwOr79p4ZpF4tE8+GfrswFam6BLhpmgM2SCKSA3DSBXr+vIpa3CQJktOl4fShYXomz8cfC24acIPHjlEqUz1MU3+24TRYlGDJg8eObsUs1DZ9bYsbiPvAyWa+9fyR57/zFYel53Hu+eLT55jkG8ZguTBMLwomQjgYrS41aM61ui80Ne+a/KIJVBaphTxkqZzf9dxnUdDddQtdl1h2SfSKRoffO+nW9IX5WoThuge1MTgazOehWVjUjlizltEAsIIydT3Ue97A4Zo0VR2omGiimudZuI6Mb09bvtxfYA18cnXHb7/8kvzS8ObjC56mnnd3F8TbTtrazySpKckwbc5EpbqMHyIvLg/swsycHT986LCL19G44OTixmNPEZtz62Y2wLx+5pwcP76/4b/40ceUyYnI4rLiwcLByNde7k3Yi7+dmI7XzoehOCeg/cWSh8z8DEy2kkA7GF9l8iAnhD1aofk/BcLB4cZE8OJjZ0YVXFTIANFgj5Z/evctYnHs3IQ1hSl7Pj3eELNjzo7XX93AU9BRmCHMkrCkSs7Q2NCKjwJu79rIuhZ2cRDJgXgpCy9P0g3L3VnHz9Q1KeMd0wn+KdjM0C2MoRcsByot4GWkkp4vvPzgiTk6TmNgeejBi5kzXZbRzXEtMOuoKHf63s871hpjmw9mkTU5hMiSHCkb+lCI0WmnWgDEYVjkvfpEcIk5ek5zYJ4dJVtStJTFtvddE7laXDiNbXJP7EpfT3n9vcaKuBUyhJ3VhNzZ98Z4TRIkSxcXV8UzRUDS5IKZ0mpY6yxlCOROlB+NdnxIAt4WDwkrkgFAVrxVLYitk+Ryue9xBwHOZq/dkU0WwoLLJI07bXzpZYxdnICdK6A+B7PKwuQi2nMVcK+K/Wv8skJucWXtrii2R/T0CsUpISnLGCxvPCl42Hi5F3PGP0KZqjqt3CujhKTUeabnnqdPJDj5g56fWQKXnUu7NxU+0TpgShYzm8hy5bV4dKQOTq/keQ7vDP4A2y+K4sNy685LZ8sIyL0K4uoEwBRD2aoYq0IvcoJlZ7DRUidfxlmMz9hS9HVq52ZNnuq9/nnXL9uB+reB/7CU8j82xnTAFvjfAf9xKeXvGGP+NvC3gX/z575KEZ8umWvrTLJTA8ezdn+xcLmd+NbVI3+cDeNlj3HCWmjSFrUgy/V7VDWbQjZ15rkeitUvq3Z4RDBOZ7Rq/WLOMFAU6B40i+2qppDgLeqDdKN00tLJsFwnqUySipot2hHx4E6ew3TNU59hSLiDuGe7qbSkqXhJ3mQUJG30NBiml5ntt/fYTzLHY0/5alCHc+28VXyXrUHbiXP6XOgGVQM+0wPJHZRtwnUK7qzMwWDAa0WoayluVAW3QHYWY7XjZPL74+KcyR3MzwppkyEbugfDcBvwJ98c3GsgTj3kITHunXYPIW6lArJxwEzLyqSpSYM+Q3/v4Z3nx19u+fTlhHVJlb3hXLsJxSAVJ3CX1Bls71qFVtQDz+j8W6o2s+JygDxkqRIfO4bPPfHUc3vw3G2jJACjbYlvZTdmX8j67NxkW8s+7AVkKYBeHVMVHRsrYyX1hvnCEreGskeAja4mkYX3mDD6PKVjtxYTNhaMgWAzqRgeDhvGx553by8Jm4XtMHOzPbENC/cuE4us4XzfYU8Wp/iA6vUmyXTHHVvebAUP1ulYRT6bBsaKJaiM2lRHFtV2wuCVxdf/YMCd63/lFajvj4X+XpTp/UHtO+Lqw1aCpTgv2jL3sjDOk1+QMZRJlrTLlFB95ARMLv6auXUaKGpYrGQTk+DN0wWf314zP/Zy2GYplOp4qX/r8HsprNwsSfrhW5ZxkDhmJ/37JIeq20RiNNiDEwPg+SwmuYKZDMPrTseXsHqQnT1rtXSxXaIPkY1f+PD5E8erRx6/NTBFj3eJYDMfbp8YXOSHj895PAwsTz3+TkJ97j1sz7rIZwVo7iBeiHYPquRcvAgRfv1QKRYGL4ncYe7Yjx0pOrEWmYSdOfaBkwpGut2C92LS7lyh2CQ6VqAsZ1rhkDvIrnD6QFiKJlv8aR0Pbb8wBGRcaIskE9PzwLKRGNM/yTivOhC4U8Id5vUz5IIxuRFsSu+Yr7vGFjOpCBs22FYs+1PC7xfRRkKeT1v3Ffu0gDUam11m0y2MeyfEH03u5RnAPBvmk2Nza9/zuwMdmXsJZmmgadvFzVmBPDpskTgzX4LrDf5QCEdD7r0AwxcwVoDgtVBshVfn5fWswR9UvFg7UXFjpQD3Frf3+sx1rFfvoTXEwXD6KIsLxKNreC6xHdP7NiY5lxc53E2xlNlSTk7JJDQh3+IN6SKx+WjP6ZNAegp0bzxulA5ztccKT4X+yRP2iXA/rVARjYlFiQhxkFGpcWrB1fa4k/s9oy4o5b2t9ue5fmECZYy5Av67wP9M3l+ZgdkY868Cf0P/278L/D1+QQJl6kjEyNy/OEjBrhs5oSm62FB4k9ltJnKyLIfAUjy2ujnXjL3SN/Mab4qFFAQM6U8V/K3z2Pe4wWt27GYBgJZopSW9i+TgiYNgWIqT7Do8GrpHQ1D8RzhAf2+YRi/mnZN2X+aVQZE20hGzk6UoyLRWqBUnkkPBdbII3FLWcWEWY8qPr/dsnt0xvfJCoc+Wr26vmO47woMjPK7VpKg2Qw6yyeQfiryWB7eNbHcjx6NwvatUggAYVbW9VCkFVClYA9zX7yFQlXDdyUBWA+dFZ9A61jDqzC0CkoWySRy/ZegehD1XR3+pd7ivg/msLPKqcty/M+x+YuCPNg3gHS4Ez+OP0u4VG4Q1ia4drOws1mpLWdu+UHE8cpCiApBpW9htJ0aX4fWFHH7RwVsRdq3dyNTJZiQLcDrlQg6F+brgRiESCF5ubWVXaY4qBSAjMxk7VjxMuxepYIoGCKtf2bT7UvFI1e8PU+h95Lo7ibXHocN/3kHp2Lsdj/1NIzZ091YO7oOje5LDH+Sg8qfS8Ej9UyJ1lnknwSkcCt1BqsIURNjPnFWc1a6oWjwUBzcXJ4wpPM6XhAOrPVCW4qQxFhWb+J4txSKVdFHcUerlZ3SPYjYe9msFvnsta3V87iXpPkI4ZalilyKijJUGncSGw51qt1bA0cd9z/CTICOVTPPXKwb6O5G16J5ygwxMzz1mlgSg2sqYZCjJYruF7csj00XHfPAU45tkg1OM5ear0sQ3JTaU1rGtiZuxoiYebGYXJn7n6nNehid2duLGHQkmEkjc5y2/d/oO/+DTT5ifOsypVt7SGU1JiTV6v62KA4PE56p8XeNJjZX1qlT93kW8MgMfnjZC+ECLuhnsrW3vPW48sRND9LJL0umPIhVjF7m/eFnbWZPe44uoP8/Q3VncSbCo4eBbF0Jikvgfxo3Q4dNQrUigf5IC2XvFkWmxJV2nDN6SBs9y6dYiPNapxOozmjqLGbyYBZzDIrSDaZaE07PELsIuvOgn3u4S3Hr8npZ8ZG8w0WqRVe/p2f09i3+5Kw2L2QQ8K1YMuW+pnlHWkB6rqrvi8jS4VHNt+QEy5kqDE1/GnaN7jJL8LxkzGHJniDvfQPUsqDOEfv6ssJGjJfeGtM3SBZ7kzIsbeQ1zLKoBLIvDRkmozUkmKv077TBNBZMsbm+J0XG5G2E3Mr3w5Gx42vciljxatp9KEDS54E5eFer1o2XpKBZtShTLCuw3MqEqzkA1k8/6pVYuDT5SGZq/AA/18wd8cv0G8Ab4PxtjftcY8+8YY3bAq1LKawD99cNf4rW+ub65vrm+ub65vrm+ub65/n/++mVGeB74q8C/UUr5T4wx/zYyrvulLmPM3wL+FkC/uRFH922iGK9GmquHWTFgkErs6d2OP4qOcd/Dk2d455hvMnmXGF1pEvDV/NadJHOvjIDl0lCuZlIJ8neKjLaKtfp6ReWmBFk6RL1LfPjykS+/56Vtvo2UZNXryWGy6J2kAak4RxjeCk1dmGxiU7BcwvQqsnl55Ls3jzyOA7d3O+LUC9ZFxx1ZPa76YUHNx/FaxfZ3ltN2x5+83lE2id3zU3vfOYnztzsZGXvo6ODPvBT/hClcDRPz7Fmm2qKl4R2AM4aOfnn19jpp9Vb1MjQFd1NheCe4H4DsC2Gfhf00JrAGN3vBv42GNFvyVWRJnr6zCr5VFt/PAk8XHcddJeLBN8kK6XKYNl4Nx6J0YEv3uI5uV+kK7UZ9zbVbQKkIJkeFMVMSgOzz3ZEfvhpwR3nu3UO1DEGr7pUWLmMoGX8s11Jxx4uEv5kpqomzPPbYgxMfvAQYi/MCfM9eOpZxUgNMxZ4wsd7zdk/kM1glRtTRcq1OL8PEr13d0bvEj5YPsHuHOxk2X8iekw6ZjlB3RXACvbDDijcsO0Pc6si7iJltcXV0RSNd1HForWybMGCuOBYZ/T7fHPl488j/46MPSBt1pi/SlncTTVMpDgYTdJzjBAzsptWaws2GWArzy4TJ8pnCqaz+YUhlbrIlDjRZBmnb59XRvVqD1FGiynx8cnMPwGiGBqZfLgslyKime5RxTBxk3ZksYNf67+seEpxgnJ0AuIeZ3EXGuME/ucbsxcDpQ9O84S5/iGqpQekzdhNFyUO97JzNdC7xGAf2GjC2biYVy5Q9v/fwMZ8+XDPdbuSZhUK8WGAx2KMQYyQmrs3khiddTAOU1/2CavlIACktPlSRzItu4vLipMLGgUUhEtWAHRTacFKzYSMTAcVhN62v2gFvvoRdlhHqot0nHZmKdpy+QScyBDaJH2I6rd1mIRbVlq/gbs6HNQULTjpWuWKqlL3rxiTMQwVZN7Hds5hxfgnkIGMVjpKTI2XL5sMj0/ECMYNcv6caQmftuoUD5EMV9K2dNbNioM6sds5/fl23QjyS/fJTV42rtUvjiuq7STd52Rqy99KJHRPuJBgzN61d5fd89KxVgWbYfQ7prSNudQqh7PZmvTRnBepbKjMzbBcWW0hTJ3tfY1c4FcLekr6/4+B2pKGQd0k6VgeHG62axUM4ic+enaJIwLz3WdVfsqsDLVkQtjKE9ctGlbE503qq9mW/yMKlXr9MAvUp8Gkp5T/RP//7SAL1pTHm41LKa2PMx8BXP+ubSyl/F/i7AJc33ykYcJtIiiu+RQCCErxQFoh/FxiPojDePRi6RwFcvvz4gV03sw0zXzxdcheuSJ3XUZ0cohRYruDy2ZEntsSxw49GAL8VI2NQwKIKU8YiWJpoGaPnuh85fuuJaQrkZMXjbqljJmmbLpeZ5VLe//BGWsa5l1YyimOiyzy7OPLffPmnnFLg/2l+kzeHICMruy5+gJwNaVuE/u5lTCD/AJvPHW5yFN+1ZGDwotrsj6X58qXOtPazqQBZBds1XJCqrX94veerhwtm05F9ZZOseKKmCFzv01ITm/fHgnjbMAfbN5FijWqyFLmvcwRrmySCmwzh3pGGVdBP3Nnre14Py6KBr7a1w7ORMXSMiyQz1V9PGCiGNMGSLPOlYb6Wt9o0mmoMK+W9Nrk407OO15TVZyZLKYaPt4+k37TcHTecjj1j3wvOpiacWZKmurb8sR7GhrSRF/UhMXQLuRjiLG3pirHIDoyyQE1RbF2mib6CFc2cohu8vnEdP1bj4/ZMgFwMnx+uRQ9qs+fqL448TgN3xw2PtzuY6/c6kTH4zizY9GwwT14OG/VOG3Yznzy7JxbLYe746o9fkL0TXFGUkXw4eGW41hPzPAjI8zkuHR8/e+C/8df+kHfjjrtxwxIdpylwfL2jvxVNquol2T0pRiLKOFFGcOoPNhrMBwuTSlq0A8LAfCUHcB0l+wN0e1F4l/FSEKzKkjAptUMMTaS+u73j480Df++f78jAdpj5izd3ANxPG34SPlLWm21rb/wwY57N5IMn9WF9TSBPjvGxoxjZg+4k4omShEpyNn9XGHMUSK8vKFaSb9MnuiE2xptzmc4mvMn83sPHfP54xeFpIB9lvmiiEdVxC7xY6HYzF9uRm83IkhwPp4H9D68xyoKrLNHKVHWzYr7K+gyLQWn8qMmzrLfORsYUsKbw6mLPnd8wHjp8H3GuMF97WU9W3nsdv8pN0eS7E7HTUt0Umu8idK+DChVD96hK15ocFWdUVkH+HAfRzxoeEnYuLdkwMeN0/HvuB1eLwGJs2z9+KtgpE46KCXKGuBX2nZsSDUOUNSg37J9+nqaKLffp7rgRoPhFZrTreqBA3mTsbhHWYhRQdrFK/DFnIGkd40qcEIJUxckKKFzOADebNc4BTX294b7ORoNGirOsRuAlwOmFVYyZIxxlNF3GryUTWZlrxhC3jvGZZdmJyPVwW5QJLQlUlRQ51+IqGi+NLWwuJ0ZbOH01EA9go5PnuBVZoP4rFHeoo7qxElKEFRoOGTemtRjSOI63Df8sav9QIkrMMFowlVXXLGdhilfG4jnkBVb2659x/cIEqpTyhTHmJ8aYv1RK+UPgbwL/hX7968Df0V//g1/4WgZIRgwlz95Yc5N3yAIYK7DM4k9GAaeFtM381179mFfdI85k/u/LP8d9n8iDI9ZzXUHQaSi8GCbmnWfeecLeYbMeNKb+zKq6vXZSyIanU88cZeAcF0d+CnSKe8nqsJ47I5vgcmH+EGCDP+jBaZBD9WAozvNFuOafbb7FRZgYfBRA6FlwEvqwqLM6B8ulsAbixjC9yIQPTxwHAYIOb860RISNKZYQz+VzuQkF/1qyKq8WpZMC5L7gvAjgfe/qllwMn90NpF5ozdYZPJLoNKq9BrYmBfB1MDOiI5QGAe/X5DRuLHa22MUJNTauyaI/GPo7xSjppqvJSBOKBGW8KKMmwfPrA9uX9+Qips9z9MRkebjfUlxPdga/kUN0fp6xo3QMUtBEJHGGETgzSq7r02h+nQTHcZwCx9jx28++YL72HGLHD2+eMy6ecQyi83P02CgYuBrowgG6e8gng0mO5bgTyx/EM7pAU7OvcgAoANUfV2DlT4nfOSuJk5GgBELRFfNcVeVOhpQtt6ctx7Gj6yJ/4cUbvnNxz3cu7okfWObk+P67D9jbHWa2DBdTO6SnEETeQjEjORtisWz8gjeZL4dM7oV1WLwkjgLSFCHCxvQ0NJyKP8Jnb2/44cUL/sbzP+SYO344vuR23vI4b/j98orJbvAHxUTqc6q2LoJdUGydkSR8OnpMn1ie09SPcwDzfMIFASxP+47wztM9SgHgBgO5IwSLOyxyePWG7DMuyf85pI6/evljfu23b7l0I6/CPS/cnrEEPlue8386/nX2DxtJCBbBZZRnM9eXJ57MhuUirM9PL3sSUoGJtWCTQixuC/ONPOTlFNQOao2LLogQp7OFOTqczfQ+snEL+7ln/7iBu47hbsWSFgfzTeZ7n7zhNy7f8bw7cEryokux/L3pLzDbDW6U70mKxWp7UztjFfdYrKGJDkIrQC+DAHj2S08pUpRx1jwOfRQ3FJ/YdAvWFJZkGecgQsiLI+OJyWAns+ojhQIJhltLfyfJc9UUyt60PeYXBUZbw/hcfnB4Mgz3irdZCtbYZjEie1w7N1XLzsvzny8NOTj8KZM2HakzIssyyFrcvHN0Twm/B3dcRJTYIGrdQAny85tauIFpDCyPHf7BaTxV0khfwGeMBTOJE0DrwlHjU32fGgaC6Oyx1D9bjCYdQibSTmoqTf/IaPyoRWmNIfJ+RRDXLYLtna/EuWE2MGaHPxY2wYjwbCpiOjwhMjY1gXphOPx6xMyG4atVbT3sJa45Z6Qw159X8XzzMeC6jA2Z5UoJTLqulucL5XJhuu0JD5bhjXQeK9Eh2RrL5f+fF0NSIIuUTurlrCuukK1pjhJuNqvUwZmVWlMgr7+vv/4Ccc1floX3bwD/V2Xg/QD4nyMpx//NGPO/BH4M/E9+8ctod+QhqGVEkirCqO6Ml4M7bSSwYKCc6pjL4K4XbvyRPzm+5MvjFZ++vcHcdnQPtnVg/CjBqTjDYeqk8lF9mOb5dPYlfzZQgZO2MJ46jm92wkqaDP2ToX+QxRk3hrjRTDoarC08uz7w9jcMy5c9u88qSFy99bAspuc/P30X02X8sGAmK4KeI21RFVcgiVBY7UrFLaTLxAdXB168esNh6XjzdMG4iAZNeuya/UZRAbfuXhaqqLEL68zWqqlI2/jD6wMX3cScHVf9yOtdpPjQFmXdtMKY1MrzvAqJa4cIBefPN6ZRas8e9/rblNb7ty34vWH7ZWG6MUp/pb2uSUmYIlrRGQArTJZpke5gUYXtXMDajAuZtMmkSaxYqlCdMCilo1eFCIuzaycnozTY0saYZAXXJjjte34/vuJP++c825646U/81s1bYrG8G3cc5o6Hw4bl6YLcZ8o2sQDjwXP5A4s/SFewfyd6XZVxVq0XzhlkovT9tR2jejBmjmBEfYaYYVYKvnMq2lo7UQayIWYROZ0fetLjlt99fYHZJsJm4eX1nmAz8+xlLR4s4+sd7iRVqC+0kUkF4X9lLgWk2xc2J1H7DntNtJ0KWVaxyis5rM/93OxSiI8d//Srj3k77piS5+E0MM6BuDiW0Qtrb9YOXJLXlwRK5QwqoHerHYqvvDDyoHkq2gVS6YlOiArEc1Zg1csRt/jK9CxaKAizFv7RF99hyY7vbd+xFMdD2vDVfMmUPacUhA179GLDMRncyXBSILLzSdTvZ4kPJRlwhfQ8kkaLO8r3tM6Cl45U/3mPP9IObBkdFq4uTnywO3CKAWdlfHcVRjZu5qPdI6dnnqewYdxq1mULF8+P/PrNPf/iB3+ANZnX8w1/+PghwSZuuhPPrw58se/gaBuZIge5r8t1ws5SyWanyva6DttazBJjPu4f+PZQ+OPDB/zw4TmL2rksTxvMYrCjFKaLK+xvIn4jnbTcqkaNH11WLbHaEROJAJHHqF110wpfu4AfrBB/FOibeunkjS/htFcrkRP0d4XNO0O4s2tRdg7SDtJJnZ4ZRgtGkdq1CKjML5BOVW+QkVHt6NcYpzGl6kL5kOj7BfODrSQBR9Eiw0qxuVxacghcfGka+9Qu0rFJnRR3oOvgTCeqXiLLYtuInSKilCbLRMRO0lI1lYF3LoSpli0ygobuIAlqJT1Nz9ROx1nsErBzIRyjxh61itEazgyJ8GzhNPTYo3jO1jPIzRbXS5JXPVmzB3PwcCud5rA3jbgSt4AvfPfDW+yrIlIr9xci0np02pk29G8t/a2jfzwrhvZzKzZTMEqGKMoql/wi9YYlgl1Ead1Zgx2VsVg7UECTu6i//znXL5VAlVL+MfDXfsY//c1f5vvPXonSFaxSeYvTzPJ8bOSgbArlxSzt7KmnOEPsYdjM/OP77/AHP/4I7jtsFIxQ96AZeJFs3CZpA97d7tZRpnaFzhVb9S0ptkR+TxWAO1h2P16pzVVywGk3xk4S+NJdz9vF0m8XxmeW9K5rB2PcwvQyYZ7PcPKUo2OJOgqrP1uDO6EQhogbZXRXO2TjB477/ZbH48DQLTzfiadBKobbsGN86DH3nu5ODr+wl0SvUmIppfnRSccFXm33vNo88k/efQtndHSp7VU3Sru0qHyDKDtrsLOmtT/rgjM6O5qvC/Ei8/TgNCkobL8qikcQnALQqNLF2CbcJ4eJzqfPkr2zBdiESx8etzw8bqUNfmbSaaIIi/qjITzVB67so6SbvRloZvn7JWPmBRMTtg/y3OLZaA4oi2V53GDfXfDZ5oYfXSb6lyesLaRksYpzQatvXKHfzeTdwrjf4g/q77RdWXhFO3Q1maqFxXIp2lUF6B6qn5NTfB6YImMDs0TMvEhg7ILowbSKtbQE6oPdgceLDebOc/knwv4qvue2uxD24EVmuLf4I8S9o78Hp5pfouMiCa8UJqLdldVjrttLm76OF7qHBfc0Chtp7LFLOBNFlOBMlzmdOv74+98VbTWV4+gT+E6ekz/puGaBbi/Yp6oobXIhBUsaLGmbcQfRePJHoTbbWCtVo2NktQ6qyv7KVuweZuzjCTMvlC40/I84DsDjj675+59f8vcNVENTK6erjK7uLRf30D0IbsNNibfGc3Bb7BAJUYujs6J/uBwpl6JMPj8G7GRlzx4FN+LVvsmNsk6KNaRt4sXuyKvtIz/ZPwPF1eRiCCbx159/n//6M88x9SzF4UwmmMR3u7fs7Mw/OX3C7z99xJ/cveD2sxuwBbuJdEOExeJHozZHuha7gr+emWNPf+dap0dip+qVVdZZhmAT3+7umLLnhw/PV5XsUZLK/rZivAzzY0caZE2UqsK9S7pNlfkHMjbeJCCx/63cRn22jT0N/TvRDKvMXUlA5FnFzRkkomKDuorNy+tnqKGlk0TCxrXjXjF5biyEg75WZXtr4lJZsefODwJlkFFWKfB8e+LT4ZripJNaoQNSJEuyUsdu56K6TUm7Ynk2WXBaXt5bLjruD1a6RjeZ5VI6+m5mZZqBxOkqTlyZZYqVSsOKcdzcZsKpJssymmvSILUTaWSsWIyM7rv7wvLjjrgLmK5IF3FZu6Bt/2vHHfR1QsI+ebp7w/Cmar3BeGM4fcvxwy9e4HwihMSwmSmD4eQ6slqYUf396ouedenbWQ7r+K3i64J0yk2WZDhYgxu9fM8S13v0NbHdn3f9SpXITYGySSQjY53sV9XrarlSQiH3hY8+eOCqm/iT8JJDN5CHzMtu4Y8+f8Xw/YHuEREsPEn27kfF12iQ9KNh/2Un5phWQHsVqwKsoMkzQLkpBWPhYjdyfxlwky5i7WLU0RT6c8iiReO+GBg/CNAV5hs5zGw0HD+JvPreLb/z4jWvT1d89nDN49OGPFvioJt/RpWqMxfbkePugnCQz1WMbLbxqWf7/Y45wnFL66LlvrA5GPo76O9y67BFfa9uzkLZnhbMaZIORoRYLB/1j/yj9AnH6MhpTejcJFgAsTCRezfPrrXzBUuQ15awamjEy8zNd+85vhI9mPjQYf+hA+NxG4c/eQlkBrhayM8K+/2mYV2KM60tbYLDLG6tmlRQMw2FvFjcu4DJRiSS6tjCQniS5MmPhXixPucV+7SuQzkQdPadpBvh5tI6GXo7MF6+afeZajh5x3x9KQmQgeVKqvbhyZJPhXxSqrov8CKx3BjM1czNzaFJju2PA/OhI/+koxipHIuxq6jixcIUOpada50/G87uRylrVVl862Kej0D3Y0/nErvtxNNHlnkc8Ec5oPtb2Sv770jnthiYn2WKt80+o2yl67tcFeJG1r1V+Y2wX7sB9d6v412rNhqQ7Lq/4gZuXuzZ9TO3v7eje0BxEto16mg6auEgkgzhKQp4/EwBm95hUofZRnYfn3j67Irhc0d4oiWnqZP3W7WY5KFXnFhNAnQNp0Q1T+4eJZEJe/Heuvg0N5B3MUVlV2Qh9U+Z7jHhjhE3JfxvXmJGu44qCuRtprsU0afpFMQDLMnIz+u+tYsklw9/schoJxo2X8hByCbxrD9y5SdStkyLJ2bL/byhd5FPhluu3Ynn7sCgM8OlOL6M1/xkfM5/+P1/nuWuxz86Nk+V4BKYnmXCaPA6aqlFJ0jnpLyYiK+39He0YqIp9zd8Jfz49JwlO3IxvNxKl2y/GYg+C0kk6CjTiFdgtYoqOsM2PmNcIavXZcU3dcPCpl/oX0bBM0bP/m5LOTrSbLCTJT4a/KSHYbAULwnt5k0tmJRgMhfcqJ3tJa14F2MofSD1juVSCvruXjwIa+IuKviFHMRnT/A3K37m60UeyWGMYJGWY0d+Zuh+bc8+7Igb14SXbdK4cVWYXsj92Xxp8ZMkiDXWps6SN5myScSdmHrnqNOaTjZgDiL2W24Spy7gTpbtORo5n/nEnb3ftLHEHUzPCvMVxI0jHAT8HQ6yL7t9oXsUTKudEu6k+nyboO4g0oke3sgZWZPOop6udaRYi5+iBfjwfGQaAsX19HfymWRB6Bn0k4HhrexbnTxzcVzdRioeMhwz3f2MO0yYqVruWHk2dXIS5ABIwxoLUgQpWg1dsE2UFMVDtfjwS1y/WiuXAmE7wxZi2ZCCwQWrujESxHJfwBYGH/nO7p7Ti8BXPrHMnuMUyPcdYS+BDlYl7qp2LMBRWfjdndODWVhG2Rfyg1aUUUXWzpkGxlCi43ozYj4qPH3vObnP5G2SuUY2mNkS7gT8HK+jBMO9YfepY7nQkdG2iFL+JuGUqfLbV19w0534I/8Bb+OVqMAqWNgk4OQY5yAHmbVNYCxtM2E7U2xHf1+4/IlWgsB0I7QGNwqIsoK9BRT6M6xAjHS/fnT3jKsw8mJ74PXjFXn0UgmdJaCy+AREPMH7ehjnGij1gB/l109e3BOz5e1mx/TsBrtYgl3ZMG4C6wvf+eCOr4aF49st+XPP5o2MG0NYOylmWlo1YFIm99LdmZ+8dAO2qSUPolbu8aNhsYbxeSF+sIh2yElwDlJlnlVn1or3lf48N8kaBFo3o9/NpD4yX+3EuywWuofSEgeTDcXIAV60WncnwTwcPyosl/L+n29P7PxMLJa7EHnsBsY3gXLQ5NHLyKZaLKRtJldAZKdu8Yvce+MddAGyuKnXNVRZJmTLOAZ+cvsC4wvddib/pcgpOvLoMJMoRpchYw9S+t/81i0xOZbo2D8MspdCZnMx8eHFkY92j+RiOMaOP/zsFfOnA5svbete9DdilGsnHTWqVUx9TznAq8snPtw88c5+RLG0EYJV/bRSK0V/Vj2iyZmz0lIw0vKPJ89f+I033F0c+OyDa959ZyPrwGe2z050PrE/9oIr2jv4I4edC360mLwhdE7EFXV87I+G/i7jFlUv3huG2yT6UwjmJPVyaBRTWX0KRF2S4MFCkTH8ST63GRIfXO+xpvCTP/0A/+iERXQwbXw/3YgY4V/4y59y3Z+wpvCf/ad/UfTBjOCLAG4PW8ZTR06G1/aKx2ngj+4/4Dh1jHPQ81H88PLbHr+39PeGXkNb7cq4EbZf2DOxT2GuCtDfMB1DKxT9WPDHjD8p06nqfC2SeP/B3Yf858u3cLbw6zfvGNzCl/0lHzx7onOJ+TvSMetc4uVmTy6WjOGwdByXwLR4USc/KhtrMq17ADQRWPvk2X4l67zZzmghKwKghv0nmfBk2byBy0+jQDpquKqYyvOxjHYaspcujOg/0bqqy86xbAQbhJFDu3sS5tdPXaWobRYqBiyssS9ur7i5OjK/CJy6oBMH/YCXC9c3R4JPLNHxlG/o70xjDBdvIErDwXaJ1HtJFBUgn53BKC7VjlaaMF463ecWae/F7HpZsd2Kg9zz1BUOn2TBfS5CtHBn2MN21QTDdIzPLE/fK6SriHtyDG9tmyqtP1O/ipCJjBXNOucyrz584HDd8RRvhIDjZOxmX8wsW0d46ujuYfNGvsePWQtYERuu51EzTl7iChIvWleGLCNik0mdrPkG49EJT9UFPH+WrVD9+n37GdevPIG6UIXxnww3LLsbYQEpO0uckuXTvb67wtvMaQliq/HQcZydCPTNQn00e2l7+2qiW1YwcnF6WGuFLTL6Om+eVARsFCzEeZA0Y4ezmd96/pbv/7bQ2K/7kYzhtAQeTgMHLnEnR3g2krNlZGD7mSU8mua9A5DeBT5fXvDl7RVXFyecLcKcjkYr0NKqb//kOPYbLDLiWq4lKcm7xM1u5P63HadvB8K9zoKzWZlGsyFupEIW/ypxfF+uHCZ3+FKwZ23J07Hn9958xLevH+hDhCRO8LWD56YsB1iNN+7cjFO5x/XPTjL47t5y9/qKu+6iZf9XcxVkzHQPi4yjRkc6SdX6lz96zR93L3k4PmPZ6m5rStZZRCTbyEDezOV24u22F08tK5vSK1g1Hh1RXc9zVxiuJuLiiMETh+5MHV5Gs3nw2JwhOshZFNzVhNUplu5yK2a8/2j6Ndh73NE2nBpIV6yOorPiAvyxYnYM4wvLyXb8Sf5A2s/1vtpCN8so2M6aBeoeyPuwUuvP1JMrHbl0Yn9QA6RV7I6b1LesWKwp2DfSAViuAu7Via6P0EdCiASXWJLjye2k6xEiySeiF3xdXHTkmi33p4FUDBdhFgDzdubU9YBpVaa8EQTYXqQgqnYclTkH8EG3Z/qNiXGyYq5sCiyW/gvfEgs7GBXT9OQg1auNBWt1bGPAHSw/fnzGs+HEr7244243Elxi8JHvXtyxcQupGN6MF3z/3QfMr2+EiesKy4UF1oTPJOk69Q/a6dzBMhumGwcIRqYyY02G3RdZxkFxtauI24K9WMiTk3FchKJK28GJ71nMHeCoHolpkC6E/7AKjA7c9CdKkBF8mYQIcHvacrjdiEFwMtwvlsc+Ue47ultLeFrZxyYXgv5+fK5WMbtCvM6yvibD5kurfoucJd+612enchfgx4w/JcGLqTccgEnSTTlOHY9fXWAmS/4e7LqF8dRhbWYIkV0303cTnUt8tHliY2eW4riddxxj4KvjJXP0tNPqbQAAPMtJREFU5MUSJsXCFCHTzGMg/KTn8lawg91TbjZX0zOzqlsXPeQ/mMgfZ+4/7pgvQ5Or8UcY7jPdbcA4NcX1YmGCE2Zm6mH8KDK+ErkEkw2pk9FZ2ckB4r8KbL4SwU5/CGL2WxPKM2xVKVWZH5b7njen0Cj4WMHu5UFgItZmTgqob3COc8sTwG8i1mViJVppgtUuK8/UH1wbPzYGXpZ4XcgqE6Gv6x1xI4zv4Y3BG8P4oazHdFGYOymsirO4JWAX6cIFZSMulx3TM0P+cOQ3vv2WKXo+++w59kn8YN3RMLyTKYyv9jNRGdkJDvcbcjYM3UK8zNpBlA8VQuTViwfeXl5wPHSYg1O8tGLTdGIVHiybNwYY8NtAuJPOt+CszCqDocXiuVSSPKuv3UdjWjFdBYp/mcv+4v/yzfXN9c31zfXN9c31zfXN9c11fv2KMVAFZwvf3d1xFUZ+7+KZjC20ZWMXCE8Cho7HC/74sYdo8feO3VvD8Tva4rVi1YIFM1cBvdIqsOKkBVttEcJeqvfpZsVknHtnocwvW4RZc3vY8sFmz+98+Dlz9szJ8eXxknHxXAwTp5ue+Tjw6x/c8Z3dPV9+fMkf+E/o3zn6d2sXzI2W/MZiciDmLVMPx+9E/N7R3xW2bwR/ETeWx2Iwo2P4SnLaNKghsinkbNnuJszFSP5I2FVWZfXHMTDuO0xlKahhbTGwbKxqzjgZgRQFpxd4fLvjeOzpetGeCSpKGfYZPyYW66UpcvS4J7dqilRWxxkGCmsFbPmTwO7zOl409A+F/iERniL2uLTs3z14Putv4EO46GfuLhPF+gb0boyf2n0qBWLGToZfv3nH1TDy+v6K07sNBViyaT6CoNpYB8P4IFY1onHBiuOqVy4rYF0tPcQGRrpIcSusv1/b3vI7/9XP2aee+2XLD55eMEbPuHhichyOPdNpI6MfLywaNxp2r7OSCSz+9cDmq6ISGwjtmtqhEXB97iAPBXewbS033NYiRp61Tb1SygVX4E/SObRTwqSAD4nFQH8nmJrpzY64K6ShMF5FbJ+EZn50+L3ls/wS/+iar2J/EBxOCjI5e5rhbkt7jc1XluFttWAo9HezjMRihm0gqJ1R2GfcKdI9waf3N3x7+8C//F/6PbxJXPlRxoK54z97811ef/EM+6bDH1VkU2VMRCxVPl/cWJZnGRK8/cFz3nQZs0nS3dP98qP+OQbYbCecKRwPPRfjCtT2JxlLmUXwDtXUOxyiVP+dIf/azOuXAXzG94muX0jJssye9E82DG8MW2dkfy2Z5aJwdXni4DswAkxntrx73GFt4dXzR/IzWU+P+w1p9JiDE1bWQ88Pfvhd3GT444tMdzSEx4I9OB6PA3FxuHsxbyZDTJ7SO8KDpbsXWx0BUa/jnbiDp7+Q8C9Gri9PvNgesKZwioEf/f5HbD93Ta/MJBEmLL6Az3T3ge5J8T5phQy0OK6QixDE/qO7s9zl59w9m+E+cPx84FTg1tAEM//pLmE3EVe9HU1hGBbm2cHkGj4vRxiPguvr7qX7VNll8uykqyavIbG8OEOeHS4khmcjRxABTiDcO4q3XHzmcSXKxD8Vahc99oqP2UbMVVn5KwUB8VXczDazXDgWHblTVEPorDNfjGmA5dJJbBl+rKOot1lZjTA984wfOG6fAt2ddFa6kxH9paW0cSu6hnufeBfEtsrO0p0typCs54R/pwzaYwWRW2E81esMblGcesVdJvK9o3souJ+IFEru4PQtGUfHDUxXRoWKwZQgwqO9sPDK5Lg9bNn2M88/fORpuyFFy7wP2CjSIW3NVBNoA+7WM99eMHaF/tZhJ4FCpB7mlzLWfXZ5pLt5YuMXDXN6riTPF/dXnN5sMMXTPdWJQcCOEXImddJpbiiTRSA3VaxVWJ2FlNTr0dn3nqM88Dr7/vlYqF/tCM8YHvYDP9w95yJMCghThoMv2Cj6Of5QSI+GAwLkHt7JDPr4LWl/pt4qiwC6RwkAXqmQ2cicdHxmGD/MpAfLkA3DO/HqySpXkH1Zb54mYkUDw/3tjt8dvyMqwmOgHB1B2WXzR4vMxDNM0fNh/8TvXHyG/y9n/uD1h8z/7ILwpGDtrYDX7WLYfl7oHqAYSRT8SRzmZeQC6cLA5UL36BRjIwyohz5wby/oXgehsl4oc8UV/G4hHj3hq8DFj2k6HNVLKpwkGbJjhHlpY7BhMzOWDvuDDceXHXayMtKcCt2jAAVT7zBDpoxVMVmc2GubtB3gmkwtFzLG6J6yaou4pgxdnCEPXmm3RgQF3/Z8/tUr+GiCZOgO8n+7x0R4nHFPE1QncGPACcsuV2HLbPnR260kjAfx9gpPpoGT/cmQbxV3UKo/mahiu1GE8kxKmHHSpNtiF/VFm+WQjTvH/ZcX/P3uN/jvfPQnPPNHvtvf8lcufwzAMXfs08Dv3n/CP338Lu5y4frySMqWefHc/cEFbjTMzyPFOcJeWE9pgBIgu6LYrCKeizeR7csjx7sN9tGTAopLKC2JNBq8mavrr2tAX6ukAXcUAdDyyciRge0Xhssfa6FRIDtPDoHb3yn0j5awBzd6dp8KzmPZyc/MHuJOBAs373TM7WWc1T1l+geRITGp4O9PmMNJApAVIC5Adz/hHkeGd1ve/OiS/9R8l//Kq8/ZuKVhYpbs+PWrW05z4P7pBnfrmkebaGWpx5/+/PBSmKjx8y3DlwF/CiIRoVo6NZGO2w1xa9g62H2e8VMWmvXdjL8/yT20FjtfCeN0ybiY8W92pFeFi5cHgkt0PnGa5VQzfWS+KoJ5qyD5JOttSY6L7cRycQEUzGyZ7gfMyfHFi8B2NxFcknJxtiqOK68RDpKwxEGCuvgMGubZkyZHN63q4WKOXJg/jCzXKihbR3AWyvOJ3dXIb149oQMc9nNP7yO7MNN9dGScd2y+0mK1yOGSh8zlsyOnLzv9OdXPUjGPzlJNad1UeDaciB9b7so14d6xBI8/SVLnT2jiIwdTdp5igwChe4Et7H9rEiB5lBGkmwopGxnN94nDdw3Hb4sMTfEZMwuOpbsTvOeytbilkDoIbwLuJx2pK3QJbJRkpSrcmyhSIGaOsChAqu9YtiJ2677oBR+TaSKn/iRnUbGwXAg0IByKiDcuqwxGO94UP2NSgZDpLmbK2x0mq3bfInHdJsE2muzo7qvECu+PlICqSfdie+Btf0WxwowsCkvBQL6M9JcT4zDQvfHC3u1EXd1ODjPqs6vZBPIZi4UyJA6/nlm+8lz+uNA/yF4LT464g/AE/YN8TxvFqyuAP4mMyPj58/9Xe+8aq1mW3nf9nrX22nu/t3OrU11VfZnpy1ywCWRsLGNjiMAxYWxQDEhIjgSKRBBfghQLJOTIElI+woeILwgpSgIIQizI1RgJY4Ijf8CyYzu2M5Oenpmenpnuruqqcz/vdd/W4sOz9n5PVVd3dVmZqpqu9ZOOznvd73rXuy/Pei7/h1WmXQHs0uCWV+a9Q/Pn1u2wINZrQSA/sbgPVGYiX2qO0+qapTrMOb9T6n45Dvi9BpN5jFWNOt8YWGa4C5WIKS473EWDWdRI0wtq6mf72oDzseBKQ7O9qntXCFkO+dJi6j40HR0DjRpihADO8XE8UQMqAG2V8Z3TffJMhRWLCxXe2+ybYZXRN1ftNYlMrHgLWcAdbJibUoXInGd9klMeGfI5uvNX6oFa3RImb5wzPx/TjXJso+0XQt8qoNOEUGkNkquKcq+HQic0H4wZ3TUUreapuHiBXzS5GgrnwrvvXeNiXXI4XfLq9JT2huHNeUF9YbXVy4sVu7srfBBOb86wS02+tBuJSuZ64Q9WCLlntrNm/UKBdyos53PwzoMENZA2AHbI8Vq87Mg7KM4D5bl6KzTRVY0WVeLdJhMGq56NMm/Yn6y5e6ccPFa9xoh2Adcyz3Ja0ZaGdjEeqi5CnqlC7pWkTOkCmxc6wrjDO22zol6w2B6hC4Rct1nvBfx+A2vL9J2MlSmwXnWEpNWS8A9Vy1hDsGrAfv3kOpnxZNaT7dR0tSZG29rSK7B3efSUjPTkPDRObaNie69AC/HEoitJzcOLeWCxobO9zLj39jX+1u093KRhOtnw6t4ppW3JTMdBvuJ6scBMG1zeMs4bDkda+/xuWbFcF1yfRN2qz+eazxdUlbc5L8FYulJopgG3W/HK/jmr6Yr3j/aodkd0RaDLBbcsMFWnhp4Isq6RtlP1XeJJo1fZ7aCpMw72FiyKhssXSqr3HNmqb4UQhvd0MXeuOuyAjM0mdo03qvnUjj3BBRYLE5vObhOgCXaY12xZYkWGVXlfzCFVB3VDMFqdtDie8P+9+0X9/Hx7Qrd7Nb4xZAtDeaJJ5ePjFlP5qBOmv4d3QpZ17E3WXLiOVTalvGeH6qa+Aq73Tpg6pmUZraATC12ZYfNsOOH2In39vjz+QKhXBZtJziYap9kiGgKjvlGy5lDadYtd1bjLMatFweigYfGCLn7CqNU2UI3BvleykZK1VYmVyaU2EA4Gmh3h/AsQu65S3lMPXDfxZMbjjRkqHSWodyCUHYc3L8lsh5XAxNXDKn2c1RgJvHN+wHwxollpHk7IAow68nFDN/bayD3u87r9wDhvmN+oqY8L8rkZctz6JGmJ4pPeqTfg1b1TxnnDneNdzd8EuonmsmaLvnuB/ha2jvmohS4eQifxx+nzdvR3M6OWvb0lr7x6wTir2cvX5KbltB5zbz3j62++jG10J802UM+EdtrhLizFiWou2SYMua+mZWvwtJ3+7iL4Uj0xfqchu+MoTgS3CGr8h63HWkKIYrfxOxgZdI2k1f39at5Sb8Df2Jtz53OGi+s5yxf1/GRavZBX+54wa6lvoU2ijx1uboZWXDY24247Q2Y8puwGvbJeeLrLDaZouLk3p9lZctsdkC1z2pOokN86/d6tJ0SjQLxe50IGJu/4zGdOOb0x5my2S7ZQj2p/HOmCU3OKNEcrxGo6tsKUayguBbvOcCuN9jQTGXI4h3npr68ZlDeXrEcl/sghQeUUpBPqPSGMO9w9y/Q9Nb69zekLS3RytfjENFCeefKzGruoMKvN4GXzfSK7FyT3kHU0e2jni3pbuNWVorlpweHOcljXen3ovVCfgCcbwgOCF9bzklVt2Is7g+o2oUreXn+AzWGg3e3wzpCtjB68BzV/6o232HMrbrhL7tS7/NbJq3z33gGryxzphOwi9vl6qeI//txv8/XlTf7g+oucumtILME2rcQfxOKijH9/8elGHsk99kTijxiGCZcAxWn0dMwDvsip3nfc7vZ55/VDykmNm1Wq2eGFvb0lN2dzxlnNfDZn0eScXE6oj8bYyuJd7CZfB2RtCUGQH1iwbvUH9N5QljXjouH0Bw5wlzKoBKuGUCBk6um6fIN4gOnRazcGW1m6oqAoLe7Sgdd+b+sq5/pkyfnnLlkvc/xZzvpa9Bh1OdlKD1aRwLXdJfdaSz0rQSxuVy/kg5hmn0C5XzMa1WwOdrFrrUY0rYZStV2OntCaaWCyu2HpVbm9PDKxoiZ6OKzBOEtw2f3ib5nBF4H5nRnuwtJOtHVGVjZk04p2z7C6N6Id6dFTv9AwubaibQ3VMqe6W8SqtkA+z7VHUwhRMkFPrqb26r3aeLK1Hzp625Vh9k8NweS0xYQ3d66p+zyH7mbN/rU54Tyn8jnvn4y4PdrDOs+Ng0sOxmuc6TgoVhSmxSNUXcaiLfgaL9B0o9iXSgUKN63j5uSSTZux3BvRbTSh1a0cptawqq0sWWYwC/XQmSYMshqq+QLd3HFuRxzsrHj98IT1ZxybNmNdO+YrDW2Oy4amsRgT+IFrp6w/79R71lk1Uo1n7GrGWc2q1XpiH4Rv3bvG+oMxxZEZVupQku049XhW3WA4YwWs1YvcRC/C+181Q/FEr9PVTMe0I3Ax6dfWenLsk7z7ysmsMKznqon2pZvvc7o/5ng1oWoyjO2wZruM3zQZq1VBd5kzei8muFYwGhvKiSU/L3T7Pl65rba4sJvA9EIXd30iv4ZWhGZs2OzFi3TtMU2HbBo1QhYZF3lJs9fRlQY3q7m+P2ezm3H+9oEaFCszyEGsbgr1bqC7teFLr71LadWg+80336A+dioEGkTboVyNJASgE+argizziARWtRuU5BergmaZU76bUyxhstaqQQ1fZCw+k+N6T11gECq1K8PFckQ+bqj3c9oTwXSxm0CdMWjmeZ2LD85nzEc5znpev3VM01nulVNe2FngrHY76LzBGo8RFUasW6timt4glaNeO/CxI0HvWUHtqt4QXHeOdedw4vns9JT3X9xlvd7RhZ7TkN6tzx1xdG3G+qSkvKuCjr1nNltBs1fob1t1yLpRQ7DMwIAtOuwXN8zPRpiF1cVroX3Y7LRR6ZS7hcqkLIRgStxCz5PZolbDr40h9lioJEvLosp5/cYx9XVLbjraYOi8tgobuYbrowUvFAtt8fXt11mtJ2Qr9cZla11wVJXjZD1W3bk8kNmtlpzPwK8yzlYjZmXFaGdDdZjRfKBGmmkzpM610KHNkK7TVjxONaWCFyau5sb1Of7wHnWXsekyjpYTzk6nVB8UrM9jn8FTIZ8b3DKjmRo2h4HsswvMG57lMiec5TSbvvehCiWLFzaHahi6ywbTemwVGBU1By+tWB7mzF9R5eC8aDTUXVYclTPqvULFfFf37/shOjncErqV0Ozk2rGgT/XwfrsgkkDYxAofE+UeXGz/Ffe1diLUtaGbFlgRaLoohfBApOUjeMJVeFo1FTYWe5lh6ujSEzWc+pVQdS3g31izP11zcTlmZQr8tOVf+uy7vD46wkmHET0oM/FkrqMtNA8iZAapGWLXL5dnuBc6frPVXJXmu1qzasbR1ZkLuZNYvdYRJirgNaxARCAq5CJRKbg3qBrIaq2mK85KmklJ2AuajyNwtsw4G00xzjMa11jjGRUNVdmpRyWGJmwVcHPDcl6yu7siG3mmRcWm1Z9n7BrsF45V6TcI1qirO2MbG54VejG93BQs1wX1yrG6LOhyNWDEByRWG26WOd8JB1zfWTC3HeebjGCtVg3FEKd0sJ6Xgzhel2vM/T5BNStIbDA6mW44GK/5zr8wHs6Co/czusJiGhU7zKqAn3aMi5qVK7blw40KnJlOBr0QMgM2LiWMakQ104BdWMojoV1YNr6gKTzdpGE62zDfcTRdDHXEvCdrPa5saSaF9lYK0IwzbOa1maQ1iLOYRY2pO7KNVxHJxmslzrgDsdprsRf6vBcNWCcsLwtOX7aM39fKJL0gZQQL9w5HNNdaslmj+jrbtCWyrKOtMm3jUIM0Qn2R8936GkezCZn19JU5plEvbD833hm6UQYxV6xv3tmHaLsSLal+a8rRzpiTG1NuXbugzFrGrmF3tInTqhe1ttPQ6MTpBatqMw2tdZZVk7NqcnwQiqzFimdUNlyWGhJX+ZG+7xWDiGVfRu6dRUpHvadGdmi2BkTf7kS8CnN27ornqmWr3dNf7KOX0N7LOV3v8Y5ruTZa8eruKYCeC4x6ZLog+KCaSbenOyzODmKFVb96F7pobNtqK0TaFZbNoZCfQ3FBFHiUIX9FvOrjaIjeQtAq1+ogDPlM7lIrf7pOL5izooY3TrXizAtta6liq6iirNmbrNl0jk3n2Mk3FDsV9cpCbWhNBvW2BQzEsO7aEu5NYSmYNYRmGwHaifIEpg1blf8rv49Kb8jQW65Xe7eVofpgTHABZ4iVkDH021ecRWOhK6Fa5jTvTcBA9do5e2MVmJ1XOaVrtWrTtWqMS8ckq1m3bjAkqiYjNKp/JLGtUTDgl47jdsbJO/vYlRmuC12p3uud/RXd1CN3ogROK1wbrXht55TzWyNuf2aH1usidHE+JruTU5xnuJXFbjzZyg1aTqYOdJXlR15/G14CHwyFUe9yESd83pb83u7LXJ6NqU8cpompIBlArsdg3W2Pv1xzlc5u79Lc0MWIMR4X879EAnVnuahGXNYlTXSZmCaGDjdhG4nphKPTHbq1ZVTHLhe9bA9g55Z5vcNcIDiPDbEqt9FzmLmyyO3D68Fqq5PQGr769ku4Sc0Xbh4xzmp28jVf2LnHt3eu8fb0GqvTMVKZQS/LzaMs0KzjcLbisztn2nfz+i51ZzESOL0cs7k7RrwhW/eCyUErOtvA6fGM6d6andFmuH7txl6NmYnG+AtaqW2NJwQZfs8AHJ1P2ZyWNNMM7zQPqiztcA33mYaj7azBnxaay2ai5y32o8SoZ7QXNe4KDU0ZK9sIS6dCyx/HE5cxyEfaidnXqijaOdTtFj1BPheqw5Z/7bVvcau85N3VPucvjfjCzj3eKI94c3WL91Z7LJucplOtjfakxC0M0qiHyVYAjl8/+iKlbchtxxsHx8ynJV8/KzCtjeJdvYvRkK0t1gq2VPHBi2nJ+gU7lBu3k6iW6zzZXP3p7SzAQo0wt9AE5NE9hri4zywh05VbOxrR5rB+scM0glvA+NiTLTWmDDlhkXF5sgcBjkcebMBOWkbjCu+1kaiAdpcGMuvxAarGcTkfa1hy4ZDaYNs+HHqlRQkxOd0L1e0J714W5NMaTMAtei+g5k21pYV5xmaRxfBIGMpsdd7QrtyiVvr16ZwfP3yHf+X6tyhMy0kz4f/I/zjVQaY5HpeaG4FreGGyYFZUvOOvE2pDdpaxOYjhxd79KqKhqnjw+8ISdho40TZARQXZSg3mdpRxecshlWpy5ZdCtspYb2a6ErGBIp6Ysit5Z0OHcWsGpfRtLomeKMykxRtif7OgIcEakJjsGcCuNIm+D7H0WjHju4Fq17E5dDq3K32+y1XAzpQM+SKIfke/MVQnjtXEszfkcwWyjeYb9VpT0ngdc6cNdjOIOQaWbr/BFB3F1zNGRwb/3TH39jUJ1efaDSDYQL6/oV7kmHnGudmL7u2ox1Jv82r60J3mDWrYr7gUxh+o4ZZtPOVxHZNENV+uz1lSzRVDMwvs7qxYrAq6MqcL29BZMDGU1KmmmebeQDd2UfRUL3ZBVFxQOsiPLffmN7gz8TBthjBR/5l40fwaAd8YxheCu9T5HB13uGWLNB5Td+TzGHZuA+3MsrnZAZbmYuvK73Jic+3ofYr7SIgJu82eVxmDhWN0Vw2C+bjg7sZiio7r1+ZMctV56j2Bi3nJ6rJkdTrm6EiPk2YWCIWnOLN4F2gnJjZoZRD17Jsp994KNXK2p9lelLhXqA+xDUifZ6OhpO17bB2G38LUgjuxUTPnSoEODB0cxOviwmSe4sTgVjBv9rm4MSasMppjy0LgbhG2pfc2DKXqDBcx9bz3YWW31jFl5yofUZ6Kaq81fe6eod4tuPycRGVyNf6Cga/dvsF0smGcN1ijcgJd0H1AFcavLACtgJio7A9sDG+e3Bya1OemY9NlrJqc+aag6SzrVQ6V1dL8ONb+OO9buISod9eNBJ97sktLdbZLE69LG9tfrNVDfzcL2LWeZ7qRZ3LcF/MEskpPTlneUa9cLBK6sn/H66l0grvQcKnPY+5g6+87V/f9S0OUiQmZoRsHJPOU3ygRn/O198faO855Dl+6ILMdbauyGXTx2hzHb1rILi0ffLDH2XzMbFwxyWuqNmNZO4KPEjMxDGg6hv58wQj5eznVBzl3JjPdn1vh3EQDcE8jGUYCme2YFRqadraLvRStXl5zrwa11XH53KjcjlVbgrhIractoTbDgrovQqMPhUbniM8N4q2OJ3qgPiSW+hCeuAG1P1vxyovnnGwmHL3zMn3VSLBRwC4DRh3TrOaN8h5vlCqruvI5d+pdfv3bn2dzNEIaQyi0ncPoRBNhTaOGgFt5ijPD18vP6ASNO268fMYkryH2jLt6QvKZJqcaK/hOmJUVk88dcf7iiNx2FK6lzFqs8TSd5e75jMVsxPjWghDg/HxE+Z0ct9SkuF5jpY9XE6A8i0bP2mqoYh4ozlrspgMfqA8slJ7pWxmjE49ptRfU8qajmY2G6rY+VyMYYbmv284vYXrmB0OpyzVEmW00JFWct7jjFeI9wZSI6AE9/nrO+obD5do+JtuosqtpOoIUSGvJY7KeW6jyq10229DdFddq02kriRtugZOW0jS4cU27tgSvOUmmEagsl1XJi9MLbnx+zslmwrfuHlKdjoeLrXoHDMWZILWuooIIJvPkry64dFOKM0N5BGYZyM8hv8ji/Abc0tOWQnksMdRpyJbq5XBLT3622Xo3YhUhbafu/NqBD0Ni4c5sza2XLnln7xrVyhHWGWalKzlfdrqyCVDvahiuyxlyqabf7fWMYnPhC/VgqUdQqHfUFa19smJo1kBxaugWhuLCq2bZypPNVfQRK9AFTB37RXQeaQrNjVo1SN3gppa92ZrLA1X0dZdq3BO2ORgAF29MGW80hy4YQ3mmobO2UE+gd6KJ7F5V7fsLaLWjRmR57oeKtuxspXMYK1qy3BKcwa6aoVv6OG94efeCr/8b17FWV5Yu68iiJtX5yYT8tqM4VyOpGRncWg3XbK05cvWOpR2rbtPkPc1hEF+o5lv0qAwNsCWulkuJY9VKvvLeGrOsdO5iSyK7cdhlTbieM3vpks2h496Lpf6+uWeyuyHPWgQ4e+tA8yMvBLuxuFVGcWPFywfnfHA5g6/tkW0C7sIgZzluDhfTkYaFMqLXR9i7E7YCtrHPXzMR6llGcR5YX5dt1XC39djZKhq5Q3UiQ+4MqGe0mcLqpqEdB7pJh9uvNAm3E8ztkuJMF3F9QrEKiAb8yDN5z2hLpzoM+j2aYBtTHVrdF3Z3V8xnJW4ljI6ETVdgGpi83xcA6H42tEYKWxFbb+Hyda08NY2QVX7bLisa8u4ybAtjMsh6jb939Fyr+6uq+GdfmeDnE+bRYJMWjIdpprdHJw3ZssOuGhV7jYsN78ZIY9j8xiF1o5WDOnYtKCo2gTJAMVMvpG0Ck7stbtFiV1e0oHr9JzQ3KMxaOpMxel8rDMf3/LZdi+jv3OVGq30nwuaajftnr+2nx99kVGkk6W7O0ODZ6mLNttFAjYu6PhHfVnr+MhsNV/bjG1q5GBXPdGWrHtVzGN/ROZdgqHYP2YxgtIGdRZ+T5bV/a+Xj+DPai4LiLKdjytG+jiFfBMxIDRg3D5SnHdmqw27aWJxUYmshPwJp9Tu72Cy6LQ3zV8ZkF2PyhRpHFyMZ8pb7RZJzGqBwC5jeaXGLjmxeD4nqEnKkigUcRYeUsUq3E/Cxb6zahTRTzb/ybpvbOaQffAI18icuY3B6OWGa10xczb24c9subEvMA5gzxz/41hf4vdnLlFlL6w0fnOzS1YbynYLds1ixUWTYWiu/egXZbNXhFi1FaYEc2wR8lnHx2gvcO/C4pVAeaQ6Ti00UbR1wi7giPSp53+xRjmq8N3gvbGrHSRQWHI8rDb/sakhuXNTMRhWnkzHVxrE6yjXc02rvNh/zPHRFrweYdh3vQwCo+3fS4sqWzaHD52Y4sXq3XVH2J8w+L6OX/Te1GoA+9q/TCzSxlFwPJmnaWCEikHnqw5b83MU4M8Py0mxaTN1iRg4/6ahyLcuf3CFa9jJ4BGjawUJ/953r/M93DmCREUzA7DSY2yXTDyS22gG38hAy3l/f4N3JIQe3LpjkeoDnF+q2LuZ6sbSVVnxo/F49BeGioNxdwUsLNvs51X4eE0S1DYeNq9FmLDQzvYj0F59stTU+pYsVOZt6mwzvPQZwzsY8Bk+2gflixLioubE7p50ZTT+JeR0C1J1lXTuWoxKbecbjimmprTdOPz+hWTuQwGqjic5AjK2rB6Ftoh9Z1DvU7basdgVZqdgd0lcUxebHIWA2tY49/qbGe8js8H3azR7M1oQvLrm4VWAWdpAG6MNngOZxjWATVYCrvVjIcaWiy1ui10OueIz05GUrXc5Ja3CxykeqmDC+aQmt0WT3usHNhbunO3AAL+wusMZT2HZIxs9Nx3fKfW531zCdw1Sa2+KdhnB8prmCzURgv1I1YuvI5jK0jaAXI+2/X7zQYPpFWogrXq/JxHUDdYO9yPQYXG4IZoY1nt3pGqZrAD3OXYMzHaVt+Se3xmy6EgmCderBvBqKeP8LWgkJ2j7C1FpJbGuGUJVttGQ9WKGeCPNXTGwZFShOGTw32kA1GtprPcf5XNWzF29ccTvZ6NITkEw73WdZR2601+W0rIauCKdly/KiIDt2ZL0nayH4UUt5bc38tTGjOxa3NGQWJGSa69V6iJ5F08Iob1i/seDyhQJZqOBhQFi+BOKjKGof1o4hyG3bH4aG6m4Rz1VrrTwM1lLvdWxe6RDnsZnH5a2GRDuD+e5Ij51Sz3/tROjKAHONBPThrz5vDfS6YFcNdlkjKw1h+8lI2/4UHrzBxgpcW+l39JkmmevOxFCpLG2IfTRbZLMN8Uivju3BOE95a8EqmyCNsHzZDIak3URvmFUZgm6kCfDNVEv5i4sweJnWVY5zHc1ghMZWJhv1dvkcup2O5jqYhcrg5HPNXbO1xWyieOi6Gc7VUsaogOtY/XMbVssMd27vKxKxUfYjX+jnZGuvBR1toB3ZrSRAGdvgXEbDI7YGGlpLxfNtn6/ZOWFzo6Md98LTQjvZVsdpAZTKp5gORid+2z/U6G9S7enkFBcBt4jtlJb1EHaDHQ3XLzOQDJ8HfKktZvoCk2ACkmlOLgjNxGArEysNHyg4+Bg+ebp5IpFIJBKJRAJ4Cknk/tsTvnlWku9vcBJbsnRhiPMHC4ihWUy4aKfMYy7PbKnelfI0ag1tvCYxh21LFtD4r1k3mMoyyUQ9GV3ALXPW1wztSChPNcznFt0gPmhXGiIZvz+mPR0RzGirr+Mhi1UdqxdGdCPVWZhf5FzmHsk7XNmSj2vqG4GqVXeh5B1Z3mEzj/eCD9C1lnChLR1sZclL0fwt07Iz2bD6wY6q0aWzDxJznoT1S05d6I3avOoN8DEpUMMMElehwWuobPR+Rn5hGB0ZZq3Hrpu4CgyMrq9YZCOojObwXOrKpZi4uE3BTlpc3lLvZixPR2rxLwuyuVEvFQxuzunbGabNKM7U9VrvZdg1TO52uGUULqw9bTFmdCwQMlY3Djm72YEXynMNrxUnlYZ9Gi1/HwTgMkNxbDkbz3SFnXk4jO5tYLVw2KUZcnfasSccNIj1+NqC5KqR4oR8XuhrY+hOPRFePXQxuVS6jtGRp/7GiLu3S7pJBy4geYdxWsAA4HIVByzHNcZo3L7MWjLxHNxcsW4dTWfpggwx/FWVU1cZ7UkJYgbJCZ8HJPeMpxWbiWN9fUy20pi+m+dxhRc0QZzolrdGQ5B9ubAI5thxLDNmu2smNyq4gTaiba3q7IAWc/ioyWUCznVDdVTTah+2rpPYHzj0nWQwxrNZFlSnubZ3WATyicO0Ew2PxJ5pPuaUYQ3BGm3+/d0Rt08KQqYlyiGLOSgC+Y7+lrIx5OeaRzY+0rCOhlO0yimbGYwNuFFFmFXUTca6Nlq4YcL9ratES+V9ZWm+k5OtLNnSgJ/g5jnZvELqlpBn+hvE8uWz93fp++r1bn9pBCyEooNWW4/kFyE2YPUc357wTp0xHlfsvXSpv/OiQExgdWjZXGQammqjVynmmvXabvnBRgseTGB+b4Kd22F+7EaQ022ne02ED5hJo8UQ1pNl23CD94L3Qr1xGrpoDKtuqucJG3Czimzc0s2sauEZLbyQVuU1yheXrGVCtrK4XL2S2doi1mCkD3nC6XzCqKhxrqXdsdSVo+2ELG9BoGrt0N8wdNpIechTC2Au9fzUy4/0IeKuDLDXcP2a9tRztqOIFYohCG9zyLIsY4W2hsL9G2suXtwm3Gs/P9XicpeCdDnlucUVGZmIVqQ5qx7+acPin++0PVRsQ9X//pIFddysLdnC4ubaJLkYW/LLDHep4rHS6wbF7xa8MClrxi83WOMH79/VptAA01JDq3dv77Gpc+xGovaZeqw25yV23GrbI1SfTT1hPuYKqmZWXja0E8vaFuRzOxQCmU2n24pe1/58aiuhri0vHF7CIdQvad9CH4SqymiWObJWkV1phOLMxvQYlVVZvhhor7WYstXzSN1Xu6HXqWVGcapJRmVpGMVQZ1fC6NYCf8Ow2WS6T0jAOj94cS9rS2h1X+n7dkoXG9FnnuA8ZmVp7hps7cgvjHpIG2055J3QjbWC1S2u7NtZn5OnLq1+f/NO9de6PLby6T3UbffIMN6TNaCA2bdVh2VzOImhqTAYUVnMiA8XetLIoppxXw3S5dHlNxI9QQfdqbpc3W/S6Y7VhwKDUUEx0N82X2zziLpChcxAPzsYg4SO6ften+s04bhPfOt1KPILTUAEos6RATKWL5aaALrTauNhu3XDdp2Q53qBGs3WXOYlVTWhPBb8WoXzfGWxxvPF6/cwEihtiw+Cjxvx0aCq40B80HJ4K57StkNV4qIpWNQF6ybj1O7SHmeIN4yOMjBaRbGaqzjY3uEC0ItrfbqDeGgnmVZ7WcG3kE9a9qYrTq6V2qfNiWo6Sba9aIeg85bLkJxqjuMPLqrEHWxGtuzIF16VwwNM3wO3jIrxTRSgC3BfV2yg7000+iDg5lpO30yg3vf4wkPpGV9f0uxZusbi1xlkAVc2ONeRzTzzlcW0lmyD6t+UWaxICtAVmE2tORG5NjWmVbHI8R39jrayUR8pG/oqhgzqHVjPQlTUh02AszwQSs/4YEUZQ5S9AHoXL25A7DHIEM7IlobWOJqiZTKu2IxHGg7uiwCCGrZ+lEGm1YPaFDjTBNLCId4zvmNo5iXzgxy5VrG3u+SFncVQtQoMZeb9vtQFgzMdnd+Gozato4kneh8EGx8vXMulDXQnI20AHpM4pcs0F6v12rMvE7quwFQqDjvVhEcNgwTwWR9ThXrX0UwDZRPVpzcxR7DSHEGJysDFhcUfF2xmlp2DJbNxhUhgVlQ4o4mmRsJwvDTe0nSWd9vr5Md6YWmmBvFR+b5zeKeGRBYNqOnbWRRM3J47+pzGLjdsrmnY0C1VM8jNW2bfKqlPxyx3S+ytFWXZUIwayrwh2/N011X/qx8XMOiZ9RdXgDJr2R2vOTqb0Z6W0Mqgnj00jK5F95W7Ba0DbKAyYehfaVd6IZidxYqtdqvSD7B4xWnvPnRxajca+iqOLRVj9j5zTnu4IbwzGYRM6YKGukPA59rvb/mtKecTTyg7JgdrdmYrlWKJ+0kvKSES1IiI3ROaztJ2hrOwQ7eR7cW+8YBesENlOb2YDAtIYzSMNxtVvHHjmNujHZqLPV10O3j1xgnOdEPFH8B+vuaiKfnGyXUWi328E4rcUBqQVvu7BQuZ63jtxXvDvtMvjnrqzjKvC+abgsXFCNOUBGO0e0Jc1AydE0Lso3macxxmHB7OGbmGUdbct2+u261AY+cNe9cXzM/29WLv+7zDQPm+ox3rOV+6GAaOFZYIuMuMrjLa+WFSU+81BGMZujlA1N5ySJ7p71dkZAthfTzi3AT2pmtemC60WlAC06zCI2xax9F6wrp2nB3PMOcZecxP7F7ccHiw4GC0YuKqIRTvxDNvC24vdrn9wT5uUSDB4C71O4S41puOKmS8YewaCqu9Oftz09RVw7XPRIu7HsSdtHr4rBrzrWuHZKsRnRNMV6pERe2pZ5Bd31COatarAt/JUE0OaszTGkInGK8JkyYKAotXnTNBz2W0Hx+ke+IeqC7X3JQgsRppV1WwpVNxST1JaYy/mUZNjS62lShU88hWsWFslBYwNWRrMyjFZpVWWa2u2yFu6h1aQRT1iUyrJ41sLbiVJc/VAPMO1oea63NV0FPHD33DRoC21P8m6stkGyEcuyH3ImRxFeSF1mie1+KFFik7sAwnJ+kC7thxtzngrtnX+HHuMXnHaFSzP16zU2yYuorcVLR9s9hYcn5ejTiaT2gaS1tlqhtjw5DkrSW3Bp8H8ktojzP8Wcb5Xk5xsGZc1mwsgyhif7E29wrmC8dipyaMAz5Xb0jnVMyxV3EPItQzYXNN83n6k7VpoZnaWOZrcGuVVFjdkOF36HM9qh1NHkdystIOWlNE2QREK26qA922eChOtJwiZLDezwjOa/LpQktnO5fRZoEwVu9RV8QE+yJ6fUo7iDT6kR4wzTQbpAGasaHaF9oRw+q/F/3r94tgIFvpHe/0pJYtDBIs4bu7LDNd4Vzdb/BABtZpoqpEL6tda8JOtx5zuddS+N7rEGL+lhrbXWG07DzXRUQ7zqj2M/XMVl1cIATyc0O4LFlQMu/PP4ahms7vaCNpqUzURlExSu/CkLfS7wtXL8DNjoprSszJM+324hqs0FlLN7K0Y4PNDbZymE7FJbuSKwfU/cdVX6q+uqHNsAmObJMNCs4qQWFwC4GFY3myxyILBBc4isfZUF0ZVJyW3OPG0YhtZEhK3kouWKprjmbUNyeFZkcTe4fv3M9B72DI1IBfX1ePdl5G0dss4OZCWE5YFeDHno3zkAUOrl8yzpvh4twF4WI1UmmVtcOeaPsX7wLhoFGpl5U2WfV5oB0bxIeYd8bQ9qrXO9rmdhDL63WMMtrOdy+/ovlUavj23SCki/lIF0Y9cGVH7vq8FMEXdri4YLT1S7YyuIVWzNVHjtU4QBYwccU/eHIkfi8Xhjy0Pl+rGweameYzSafHZbYWOMkwd7NoTGgFaGPg2MHZZ5bqLXV6TpIOvnu8z2RUcW2y4lpZD0bqOKu5OZvznclerKQU2pEe4+1IvRz1POf9fBdnO0Z5g5XAunGsa8dmnRMC5IV6mk0WF9Xxew35QFewlR573arg7IOCM4G+vdb9x6Aaw8EChxW21euId7rA78vsJZ4nuxLasdDUmuxNQKV0OsFXOfU4I+R6DfNOHQtd/K5DU3KgHWnVqV0a/NtTTuyUI7d1LmQ3VhzsrCizNuZ7BtyooWmEttacuVBZTs8nnF+OmU42vLZ/gvGWVZtT2oa9cs3ZbESXF7HKMOb7NrD+zox10Hk4Kv32hJp58lnNy9fOmbiaTDr28zWZ6ah9RhcdCCPbcLO8xBB4d+czZJvYui03Q16YzTzXp0teuvE+rbec16MoyeJYrAuaOqOtLKEy9AUnGF0IMsmHRHKzqvk4JFyZ2O81IjIH3npiH/j9ySFw/MhXPd+kOXo0aY4+njQ/jybN0aNJc/Rovt/n6LMhhOsPe+JJh/DeCiH8yBP+zO8rROR30hx9PGmOHk2ao48nzc+jSXP0aNIcPZpP8xylKrxEIpFIJBKJxyQZUIlEIpFIJBKPyZM2oP7KE/6870fSHD2aNEePJs3Rx5Pm59GkOXo0aY4ezad2jp5oEnkikUgkEonEp4EUwkskEolEIpF4TJIBlUgkEolEIvGYPDEDSkS+LCJvicg3ReQXntTnPmuIyF8XkXsi8pUrjx2IyK+JyDfi//0rz/3FOGdvici/9XRG/eQQkVdE5NdF5E0R+aqI/IX4eJqjiIiUIvLbIvIHcY7+Unw8zdEVRMSKyD8WkV+J99P8XEFEvi0i/0REfl9Efic+luboCiKyJyJ/S0S+Fs9JP57maIuIfDHuP/3fpYj8/HMzRyGE7/kfYIG3gdeBHPgD4AefxGc/a3/AnwB+GPjKlcf+G+AX4u1fAP7rePsH41wVwGtxDu3T/g7f4/m5BfxwvD0Dvh7nIc3Rdo4EmMbbDvgt4MfSHH1onv5z4H8FfiXeT/Nz//x8Gzh84LE0R/fPx/8E/Cfxdg7spTn6yLmywAfAZ5+XOXpSHqgfBb4ZQvhWCKEGfgn42Sf02c8UIYTfAE4fePhn0QOV+P/fvfL4L4UQqhDCO8A30bn81BJCuBNC+L14ew68CbxEmqOBoCziXRf/AmmOBkTkZeDfBv7qlYfT/DyaNEcREdlBF7x/DSCEUIcQzklz9FH8SeDtEMJ3eE7m6EkZUC8B7165/158LKHcCCHcATUggBfi48/1vInIq8APoR6WNEdXiOGp3wfuAb8WQkhzdD//LfBfMnSEBNL8PEgA/m8R+V0R+U/jY2mOtrwOHAH/QwwF/1URmZDm6KP4OeBvxtvPxRw9KQNKHvJY0k94NM/tvInIFPjbwM+HEC4/7qUPeexTP0chhC6E8CXgZeBHReSPfczLn6s5EpF/B7gXQvjdT/qWhzz2qZ2fK/xECOGHgZ8G/ryI/ImPee3zOEcZmm7x34cQfghYouGoj+J5nCMARCQH/jTwvz/qpQ957Pt2jp6UAfUe8MqV+y8Dt5/QZ38/cFdEbgHE//fi48/lvImIQ42nvxFC+Dvx4TRHDyGGFP4h8GXSHPX8BPCnReTbaLrAT4rI/0Kan/sIIdyO/+8BfxcNpaQ52vIe8F707gL8LdSgSnP0YX4a+L0Qwt14/7mYoydlQP0j4PMi8lq0VH8O+OUn9NnfD/wy8Gfj7T8L/P0rj/+ciBQi8hrweeC3n8L4nhgiImjOwZshhL985ak0RxERuS4ie/H2CPgp4GukOQIghPAXQwgvhxBeRc81/28I4T8kzc+AiExEZNbfBv4U8BXSHA2EED4A3hWRL8aH/iTwT0lz9DD+DNvwHTwvc/SkstWBn0Erqt4GfvFpZ88/rT90J7sDNKg1/ueAa8A/AL4R/x9cef0vxjl7C/jppz3+JzA//yrq0v1D4Pfj38+kObpvjv5F4B/HOfoK8F/Fx9McfXiu/nW2VXhpfrbf93W0GuoPgK/25+Q0Rx+apy8BvxOPtb8H7Kc5+tAcjYETYPfKY8/FHKVWLolEIpFIJBKPSVIiTyQSiUQikXhMkgGVSCQSiUQi8ZgkAyqRSCQSiUTiMcme9gAST55DuRlqar0jKstxnziHfOiR4XU88nUfuvGQu/IQNRB56Ns++TYhfNw2PuI9H/W54aGv/bjt9I/Lw0VNPvZ7PfC5f4TPDo94/pM890f+zg8896Hv/3Hv+wSvCX/UMX3sa8Kj3/+Jnwsfeu4j3ypXb354T3nYYXZlz/6I7W+38+D7r37Gg4fRQ5/rb8tHPfdRnxU+0es+/JkPvO9Dnxs+fgz9Nh7yGfKRr3/8z31wmw+/He6bY7nyio9+34Ov2T7yu39Y/WoI4csknlmSAfUcUlPzL5t/EzECok7Iq7cxAiKIifdF4L7b8VAXM7z2oa+TK8/d9zozbOPjXhdE1EcqD7z26nNXHg/DNrj/dXLl/n3PXd32/dsc3nfldcOFXBjGr8999Ou2t+X+15oPv+/q9h6+jQc/78ExftTrHnL7Yc/xybbx4HMfN94P3ecTjOO+bYeP/Sz9C9v38bDXhfs+6+p7tt9le18efN8w9u19kQ/fvvq+/oKsu+bVbW8vsvLA68x998OVQ2L7uInGgrnyugdvGz78nHnwNp/0Ob+9/cDj9spnXX2dJdx/XwImCsIbCdirt8UP27DiMeLv24a9sn37kG3077fxfQYdl27D3/e+q+OwV7cX3z98Fn7Yno3fedjGlTmwhCvji8/F39YK2PhLG8CKYOJ9y5XbIpj4LoNgxQz37a1vHJJ4pkkhvEQikUgkEonHJBlQiUQikUgkEo9JMqASiUQikUgkHpNkQCUSiUQikUg8JsmASiQSiUQikXhMkgGVSCQSiUQi8ZgkAyqRSCQSiUTiMUkGVCKRSCQSicRjkgyoRCKRSCQSicckGVCJRCKRSCQSj0kyoBKJRCKRSCQek2RAJRKJRCKRSDwmyYBKJBKJRCKReEySAZVIJBKJRCLxmCQDKpFIJBKJROIxSQZUIpFIJBKJxGMiIYSnPYbEE0ZE/i/g8GmP4ylxCBw/7UE8o6S5eThpXh5OmpeP5p/F3ByHEL78z2Iwie8NyYBKPFeIyO+EEH7kaY/jWSTNzcNJ8/Jw0rx8NGlung9SCC+RSCQSiUTiMUkGVCKRSCQSicRjkgyoxPPGX3naA3iGSXPzcNK8PJw0Lx9NmpvngJQDlUgkEolEIvGYJA9UIpFIJBKJxGOSDKjEc4eI/Aci8lUR8SLy3FfKiMiXReQtEfmmiPzC0x7Ps4KI/HURuSciX3naY3mWEJFXROTXReTNeBz9hac9pmcBESlF5LdF5A/ivPylpz2mxPeWZEAlnke+Avz7wG887YE8bUTEAv8d8NPADwJ/RkR+8OmO6pnhfwSSDs+HaYH/IoTwA8CPAX8+7TMAVMBPhhD+OPAl4Msi8mNPd0iJ7yXJgEo8d4QQ3gwhvPW0x/GM8KPAN0MI3woh1MAvAT/7lMf0TBBC+A3g9GmP41kjhHAnhPB78fYceBN46emO6ukTlEW86+JfSjL+FJMMqETi+eYl4N0r998jXQwTnxAReRX4IeC3nvJQnglExIrI7wP3gF8LIaR5+RSTPe0BJBLfC0Tk/wFuPuSpXwwh/P0nPZ5nGHnIY2nVnHgkIjIF/jbw8yGEy6c9nmeBEEIHfElE9oC/KyJ/LISQcug+pSQDKvGpJITwU097DN8nvAe8cuX+y8DtpzSWxPcJIuJQ4+lvhBD+ztMez7NGCOFcRP4hmkOXDKhPKSmEl0g83/wj4PMi8pqI5MDPAb/8lMeUeIYREQH+GvBmCOEvP+3xPCuIyPXoeUJERsBPAV97qoNKfE9JBlTiuUNE/j0ReQ/4ceD/FJFffdpjelqEEFrgPwN+FU0G/t9CCF99uqN6NhCRvwn8JvBFEXlPRP7c0x7TM8JPAP8R8JMi8vvx72ee9qCeAW4Bvy4if4guTH4thPArT3lMie8hSYk8kUgkEolE4jFJHqhEIpFIJBKJxyQZUIlEIpFIJBKPSTKgEolEIpFIJB6TZEAlEolEIpFIPCbJgEokEolEIpF4TJIBlUgkEolEIvGYJAMqkUgkEolE4jFJBlQikUgkEonEY/L/A4Q8+3PB2QyvAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "mel_after = tf.reshape(mel_after, [-1, 80]).numpy()\n", + "fig = plt.figure(figsize=(10, 8))\n", + "ax1 = fig.add_subplot(311)\n", + "ax1.set_title(f'Predicted Mel-after-Spectrogram')\n", + "im = ax1.imshow(np.rot90(mel_after), aspect='auto', interpolation='none')\n", + "fig.colorbar(mappable=im, shrink=0.65, orientation='horizontal', ax=ax1)\n", + "plt.show()\n", + "plt.close()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Let check speed control" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "mel_before, mel_after, duration_outputs = fastspeech.inference(\n", + " input_ids=tf.expand_dims(tf.convert_to_tensor(input_ids, dtype=tf.int32), 0),\n", + " speaker_ids=tf.convert_to_tensor([0], dtype=tf.int32),\n", + " speed_ratios=tf.convert_to_tensor([1.5], dtype=tf.float32),\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlAAAACuCAYAAAD55TMFAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9e7RvW7bXhX36GGPO+fv91lr7dU6dqrp1694CLlzh8hAIjwvyEEEFpEFMFNQQEBRMVDRGgUajJTGKYEsEsZGER0IQgihGQIzSfN2AcBUkvEKUh8Ct56065+yz916P32POOcbo+aP3Medv7TpVdS5VFnBdo7XV9tprrd/vN+eYY/TR+7d/+7eLqvIwHsbDeBgP42E8jIfxMD74CH+jL+BhPIyH8TAexsN4GA/jb7Xx4EA9jIfxMB7Gw3gYD+NhfA/HgwP1MB7Gw3gYD+NhPIyH8T0cDw7Uw3gYD+NhPIyH8TAexvdwPDhQD+NhPIyH8TAexsN4GN/D8eBAPYyH8TAexsN4GA/jYXwPx4MD9TAext/EQ0R+h4j8y/79TxCRv/R1+lwVkW/57/H9P+Gfkf46X/+tIvJnRORWRH7Z1/r6HsbDeBgP4yuNBwfqYTyMr3KIyCdF5CgidyLytoj830Xk8mv9Oar6R1X1Wz/A9fxCEfljX+vPP3v/P+zOzw977ed/wH/+k//7+uyz8cuBP6yqV6r6b/gz+Klfyw8QkV8sIn/RnbS3ReQ/FJGrr+VnvPZ5X5VT+TAexsP4+o4HB+phPIyvzfhZqnoJ/AjgRwG/+vU/+F52MP5l4H/e/iMibwA/Fnj36/T53wz8N1+LNxIb4bWf/STgXwH+IVW9An4g8Hu/Fp/31Yyv5Rp6v/t+GA/jYXzw8bB5HsbD+BoOVf0c8IeAHwxLKuyfFJH/Dvjv/Gd/n4j8WRF5JSL/pYj80PZ6EfnhIvKnHfX4d4DN2e9+soh89uz/HxeR3yci74rIeyLym0TkBwK/Gfh2R8Re+d8OIvJ/FJFPO5rym0Vke/Ze/4KIfF5EvltEftEHuNXfDfxcEYn+/38I+P3AdPaeQUR+pYj8Vb++3ysizz7IPIrI9xOR7/DXPReR3y0iT/x33wH8ncBv8nv8PcA3Af+B//+X+9/9WJ/fVyLy586RMUfRfo2IfCdwAL7va5fwo4D/SlX/DICqvlDVf1NVb/31v8Pn8D/1Z/VHROSbz97/b/PfvRCRvyQi/+DZ77Yi8q+JyKdE5FpE/pg/i//C/+SV38e3O5r4nSLyG0TkBfC/E5HHIvI7/bl/SkR+dXOERCT6ez8Xke8SkX/qHNV6v/sWkX9URP6C38dfE5FfenatP1lEPisiv1xE3vE18nNE5GeIyF/2+/tVH+SZPoyH8b1uqOrD18PXw9dX8QV8Evip/v3HMWTkX/L/K/CfAs+ALYZQvQP8GCACv8BfPwA98CngfwV0wP8UmIF/2d/rJwOf9e8j8OeA3wBcYI7W3+G/+4XAH3vtGv914A/6dVwB/wHwa/13fy/wNub0XQD/ll/3t3yJ+/3DwD8G/CfAT/ef/dfAtwOfBX6y/+yfBf448I1+f78F+D3+u0/4Z6Qv8RnfAvw0f92HMOfiX3/9Gt7vGfj/Pwa8B/wMLFD8af7/D529/tPAtwEJ6F77/J8AHIF/EfjxwPDa738HcAv8RL/G39jm3OfwM8A/6u/9I4DnwLf57/9P/vkf8+f44/w9vmhO/Flm4J/299oCvxP49/05fgJDA3+x//0/Afy3PudPgf/s/D3f776Bnwl8P0CAn4Q5Vj/ibM1l4H/jf/uPYyjjv+Wf/23ACfi+f6P34cPXw9fX++tv+AU8fD18/a3+5Yf3HfAKc4D+z8DWf6fATzn72/8L7lyd/ewv+cH1E4HvBuTsd/8l7+9AfbsfZF/kgPCaA+UH4x74fmc/+3bgu/z73w78urPf/QA+mAP1PwN+D/CtwF/23507UH8B+LvOXvdRzCFM7+csfIU5/jnAn3n9Gl57BucO1K8Aftdr7/EfA7/g7PX/+6/wmT8dczRf+fP99UD03/0O4N8++9tLoGAO9M8F/uhr7/VbgP8t5swdgR/2Pp/3RXPiz/LTZ/+PwAj8oLOf/VKMDwbwHcAvPfvdT+WLHaivdN9/APhnztbc8ey+r/z9fszZ3/8p4Of8jd6HD18PX1/vr+9NnIyH8TD+Ro6fo6r/2Zf43WfOvv9m4BeIyD999rMe+AbsYPqcqp53+P7Ul3jPjwOfUtX8Aa7tQ8AO+FMi0n4m2GGMf/af+gCf+fr4fcC/hiE7v+t9fv/NwO8XkXr2swJ8+PyPROQnYGlPsHv6NhF5C/g3MCToCnM8Xn7A62qf/Q+IyM86+1kH/L/P/r88FxG5O/v5D1LVT6vqHwL+kKfH/k7g38Wc3d/y+utV9c5TbN/gn/1jWvrUR8Lm6E0MLfyr34N7OV8/b7IilW18CkOz8M8///vz79/3ZyLy0zHn7gdg87wD/vzZn7ynqsW/P/q/b5/9/og5kA/jYfwPajw4UA/jYfz3P84dos8Av0ZVf83rfyRGXP6YiMiZE/VNvP9h+xngm0QkvY8Tpa/9/zl2yH2bGkfr9fF5zCFr45u+9K2cfYjqQUT+EPC/wFJA73eNv0hVv/P1X4jIJ87e54/yxQfwr8Xu44eq6nsi8nOA3/TlLud9Pvt3qeo//kFeo1YA8P5/pFqB/9y5Vz/47FfLnIlVXT7DEMTPAH9EVX/a6+/lztgJm68/9xXu4f1+/hxD8b4ZS9WBPa/2XD+Ppe++6Brf7/1EZAD+Pawg4N9X1VlE/gDmYD+Mh/Ewvsx4IJE/jIfx9R2/DfgnROTHiI0LEfmZYuXx/xXGN/llIpJE5O8HfvSXeJ//Gjssf52/x0ZEfrz/7m3gG0Wkh8UB+G3Ab3BkBxH5mIj8Pf73vxf4hSLyg0Rkh6ERH3T8KuAnqeon3+d3vxn4NY1cLSIfEpGf/QHf9wpPi4rIx4B/4Sv8/dvcJ4L/P4CfJSJ/jxOrN06I/sYv8fp7Q0R+toj8PBF56s/pR2Np1j9+9mc/Q0T+Dp/nfwn4E6r6GeD/BfwAEfn5ItL5148SkR/oz+K3A79eRL7Br+3b3ZF5F6h8MaF9GY4E/V5sXq98bv85v1/8d/+MP98nWCrzy40e41+9C2RHo/7uDzJHD+Nh/A99PDhQD+NhfB2Hqv5/MCLub8JSUn8F47mgqhPw9/v/X2Jcmt/3Jd6nAD8LI1t/GuMe/Vz/9XdgRPYviMhz/9mv8M/64yJyg5GLv9Xf6w9hJPPv8L/5ju/B/Xy3qn4pzanfiBHX/xMRucWcjx/zAd/6X8TI19fAf8iXmIez8WuBXy1WcffPuyPzszEH710MFfoX+OA27yX2nP474AZzUP4Pqvq7z/7m38KczRfAjwT+EQC1Sr2/G/h5GCL1BeBfxRwVgH8eS5H9SX/tvwoEVT0Avwb4Tr+PH/slru2fxjhtfw34Y34dv91/99swcv//F/gzwH+EOeXli99mudZfhjleL4F/GHtmD+NhPIyvMOQ+3eJhPIyH8TAexlcaIvI7MEL/F+l9/c00HFH6zar6zV/xjx/Gw3gY36PxgEA9jIfxMB7G95IhpjH1MzwF/DEMIfv9f6Ov62E8jO+N48GBehgP42E8jO89Q7D050sshfcXMA2nh/EwHsbXeHxVKTwR+XsxnkME/q+q+uu+Vhf2MB7Gw3gYD+NhPIyH8Tfr+Ot2oMRaOPxlTOH3sxgh8h9S1f/2y77wYTyMh/EwHsbDeBgP42/x8dWk8H408FdU9a959dC/jVW9PIyH8TAexsN4GA/jYXyvHl+NkObHuK9o+1m+QolyvLjQ9OwZUiEdFBDmp5UYKyDUKmgRKELIECYIWSmDULZAVJOAE9j2E7s4EVAqwlQT+7mnThGpIAXCjH1fFQ1C3oE2ebj2r78fYp8XR/tMzpE5EWq0F4Vi10OFMCui+qXl74JQk1ATaLTPlgpxgjCbOPP4OBBniCdF6np/ALUL9hoFVP1fvyQF6msfLDC+IVxsR7ZhJkrlUHrupsHmtdp1S/sqEGdFyvq54+NAKHat/dXE4+7IIJmAfbgiiF/G8/mKqcTl9gWoKtQpEkYIxa5T/XqXzzmbfykKQcgfrjwb9vSSiVQUQRFmjbycdkw5+vWLPdts14/6XGDPWarNlUahfKjypD8QpSLAsXbcjhu0CDILUn09tGupINmvu6op8gAE0CDnN4kU7j/78/UElCHYMw/tPiFOStkIpYd0hDDZ+tEo1GhvEGdFBaY3lDe3e/qQEZRXecd+7CEHpIAmIFYkKJoDYbTXa7Qvm5iz62rXVuy+w2TXUztbn4jtlzDp8nyX8Kqy/EyjIG/NvNHtSVIYNfH8dEmttm+pYs88AMGvoQphsvlt79v2wzLvBWoH24uRXZzopBClUlUYtSNr4JB78hztc3Rdx8ua9v3R9nwb01N4dmFrK6C8zDuOUw8FpIhdq782uN1Y1pX6c27v9Qze2t2ykQnxhz1p4vl0yfzaGgWzJ8v1vP5+7W2DMO+EurEfyCzLhKfDaivujbYe23vJ2f9F0LM/XZ5n+5MAtRPKxp6RTOvnhRn7eecXq7Je572NLjZPvj5CdnvYruH9bOKXk+ZUu6cy2P7QTte/f/32VYijvaR7MjHlhE7B91izy/an41PhycWebZiIogQqx9rzctpRqi1wLQJZFpsiCjVC7YGuMnSZIErRQC4B9YWravNwbrNlErqDnq0ffe3Sba5rsjNNoxImu5+8ge2jE30oZI3spx5Odl9Ny79s7ZlIhnTytWHHJ/kiwKPCRTeyCTOdFBLVLKkoquJ2FUbteDVvmUu0vYus81zt/aPv2byBzSPblwAvxx11iuteK+u8mf2FMkDdnhlngBK++Iw9+3XtA+q2oTbb2WyQrmd67SBeZjZxJord0T73zGOya2/nf/4Ae+98yPv8XJXbw+efq+qH3ucVX5UD9X7b4YsuS0R+CfBLANKTp3yfX/jPEUd447+dmS8C7/yPR54+3gNwe9hwuh6I14ntFwKPPlXprzP7j3bcfF/IO6V2wJOJH/f9/xo/4tGn2YWRWROfPL3Bn3j3E3zuk28S94F0F9i+DcN1JY1K3gjv/WCh9ra5FsekCpoU7ZTt5yJP/kqhv61+gPrtBGG+jOZUXGduvk9PPMHunZmQdTXWrxnb0gWmJ4njG4G8tYMqjnD5+cLw0sSjP/3Teq4+CY+/a14dMn+v0xsdon6olWYo/Z8KYbLrFFVUzFn79C8q/H3f+v/jb7+wuflvjt/IH/zkD+H6eoceEukm0t0K8QjDK2X3vJDuCqHYe3zqp/ds3hNqB5/4KZ/kZ374z/Px7j16PxEmjUTfzb/r7R/HJ6+fMc6J4kYl58j46Use/ZXAcF0XZywU6G7L4sxqNEPS38zMu8T4y17wj33iO/l+/Tv+TCOFwBfyY/7tL/xo/vLztzjue+opEa8jm+eB7gbSUYmTErI5BPFUCXNlepJIv+Rtft43/kmeJevS8ReOH+MPfPKHcv1qR3i3p7sN1KRoZxsu3Qnbd5TNdSWelDhVVMyRLYMszlDIEE+VONX7h4r6ZhW4+4aeMtg8qpjDtHs38963dey/qfDszwUuP5eJYyXvItOVGY/d2zO1C3zu58/8Yz/kO/nWzefpJPMHX/xw/sgnv4XxxZb0KpKfZS7ePLDtZ15eX5D+6oZ4EvJWKTs1pygpGv0CK0gW0j7QvxIuPq9s380c30xLYHHxTqW7sXWpUdDkjkrxPROE8XHkzf/lJ/m5H/mTfCS94jPzG/zmv/YTeXGzY973hNtEGIV8VQhXMxKg3HZsPp/or22eyxbyDvLWDFscheEV3H6fwo/9kX+ZH/7403xD94oncc+p9nxqepPn8yV/+uXH+a5332C6HpBToLsJ9DdC2kN3p6RRPUCphFmXg+Uz/3Dmn/jb/ws+3r3gKh75fz7/UXznd31f5kNHeq+jXFRkMsM7PA9cfF5JJyXMSsj2b3vOn/lHMv/k3/5H+JbhC1yEkYjyufyU3/nZb+e73nmD+dAh+0h3HQhFuPoupTtU4qzLNYW5usNntqPGwMtv7Xn1bRVNyua7E2Vja/pDf6ayfWc0B96dIwLUGFaHSFgcKgsI1rXaDnIVd36LUgfh7qOR6x+g1KvM9lM9pVdCFq4+pbz4IQofGQmhUkqgzmFxDCkCqSKnyMWn4hII9HfK8Kp4ELLe33IqyFkQ0q6zbR3/WxXh+FbH7TcFDh+t5kR5ML3eG4RZuPy0kLfwbT/nL/KXnr/F9ace010HLr4bLr5QSUezUZ/8n8Av/jF/lB+8/Swbmekk8xfHb+Df/eyP5OVhy2nsmG4GuueJzXOhu7Pg6Pgh4fCJmScfueVHfPizpFA4lo7PHx5znDuKCrfHDYfbAR2jBQtF2H62480/nxeb3QLd5oyXIUCA0gdeff/I/uOF3ecij76r8vyHCX/nT/2zfPPmBV+YHvGHP/stHP6q3Ve3N+fh+ttmZAoMzyNP/1Jl9/aEzBWi8PaP3LL7qe/wEz7yV/nbtp/nY91LrsKRiFIQTtpx0o6igc/Mb/Afv/ttfOrlU/aHgTJGOFpnpzAFNu8Edp9XhtvKuz8s8CN/yl/k25/8NWaN/J5P/o94/rnHyBiIh8DwUhheKWHG9k2Gm28O3P2gEYnrecjLnovPBK4+UxleZXeiPPDNleNHBmoS8iBMj4R84Y5YZ2uzu4XhhXL7CeGjP+5z/LBnn+PN7o4hzPyJl9+HP/1XvxnuEuEY6O6E4SV0t0p3vL+Xpei9c1tb8LGcrbqe5UX5jj/6q79ka6uvxoH6LPfbBHwjJhp3b6jqbwV+K8Dw8Y9rdA8UoPRCTIU+FhQMiVpC3RYFBFAYXgjDe8J8BcfLwBAyQ5j9sC08SiceDSe+e5upuaPMSr4Q4iRosGir9uYoaW2WBwQ1rzcomjwyGwQpshyGAKXzyGEIlN4NUR/QZEZR2uF55nTVZC+2KNKQhRbl1O4seyqgSShB7kUttRPi6M6TGyBtT0xBJdyLtGsX2GxOvNXd8qF0w4VMvNvdcLUZ2R97chV0HyyC8WijJqEOAS32ubVXahRzFkMhUqkECpWigVkTpssHyd8k10Apdj85h9V4ij1Dmu3dhNWBCvbzvInUQehCZRNmNjKzkUJPpSA8Cid2aWboZqYuUnOg9oEyQBygFpvA9n4Q0CTM28Bbmz0f6a55Eg4EqbzoLnm8PXE49Uy7hGRH0wSLdKRFP0JISm2RZusY16Y+Qh0EQliexbKe/P5KbxF6HO3/UpXam9HXbWG+iORd8GfvqEASah+ovRDiGnZ3FLZxtv0RbA3TVfqUGVKm6zN5WFEYmYVQQQtIO7SqoS1xFDv0sqLJHLwwO1rraxpsrdVO7O/O0LcyiEV+VCJqzytlYlRyVAtGCpCUmCohVkofbV11QjcqHO39JNu/hnwBEYaY2UjmIoxsZIYAl/HEqXZcdiPDMDMPCa1C7c8i1bMDuSZZ5lxF6Ad7z10YuZCJizgRUyWnag50VHNAxKLbMtiiiMEdyeiHYYXUWSAxa+KkNgdFhRgMDSS06MxtiqM9YIZaqtq68ci4OTfdHtJNoGz8ZwlqMBtZhogG7gV+yz3qGYoqK8Jhz3BF/dRR1NDsSBA0VojmbNfe0JDamZ3sUiEE2wMS3f60CY7qcwLxqIZUFKi9/VGIYbGJzZadO09tLa0bap2LOCndjdJvA7VTt8MsNhqxdRxmRS+EqzSyGyZe9UrZwHwh5G3b0CDDzGU8Lc7TRmauwpHLfuTmNFBLgGyIC8rieNbBUI63Lu/4pu0Lopj9u0wTL6Ydr8YtU46MXUeu683UTil9ACqyIKWrA14HQcWuf3ihTI/Cen8JLuPIVTxxFweGVLjbKGU0B0JlnfualNILeRMJ0QLS2kEKlV2ceBIPXIUjGzFbXfT+Ub+RmU2cLWMwBzhF0l1zoBwhdxRVIzzpjzxLd8waebQ58d720oLL7FmWzg7uUgUVW1MSFQmKuO+vQRf0ufbB9p1CKIo4Ci8VuqPvtZOt/9oZopuOFtggZieepgNP056NzLwx7EmbmbkIJSgQ3b4IGiFOYoF2Z85Tzfefy72lebZuQ34/qGodX40D9SeB7y8i3wfrw/TzMBXbDzY8IkqpkoIdFjHYAQFYlOXOTE3mSW6uK4cPBY4fF4JDg1EUyHb4xpnQVUpXqRsh74R4MouTt4IO1R7aghj5wRuBpNTOnYfQvDhZD9UOPxgDtYdSWB6suPNhUeKawqidbaDhxiIzS1u0Q4n1vZMtQiks6S4V88TD7J6yiKeEZDGimrgHb9deGLqZq3jiQiY3GoUumjFEdE0ptWuM9tmh3a8jMhqUFAqFwKSRooGCuAPln7dA2eIwMGgNy7MtnUO6Yv/WpEj1ze5OSeyCOSxAUWEiEtrBhFBYP+M8bNVoz6pGM/CV5sTaPJUNXHYjOxnp3IhswrzORcCc3yxLdGjvuz6Tc3TJIsmzufMof5lPh65buq/2dhikky6QdE1C2ShxWygbc0ZEw4IO2OEllE4QUU6141Q7QrBUlrZ0igMRpYYl8Jh6pZTViWroyz3nTle43ZA1S5csqRi/jjYPltq7n6IoA2ziTCFw0o5JozvQgjp8jq8FCUoIisRqjk7EESdFdE0BSW6OptKHTCf2FVFagrihnsuItl81iTufFuSEYmuspfdqEvrO3q/6Wrb15O/TUo3BAqvaua0ISp2cSpAhZHvvYbA91fbCSQO3dWvPQoEcDDGRdS0158UcALFHU3yt+NyGWelu16DK9ordlyZZM2ki1gneEVz152wOEasT1caSQ/dr0dXp1KSEVM3u9eYQ5U1AOyXGiogSJKCiC7K1vq+jtqP682RJQy/7oa4L59yxe/3QauiTOtqZThBPtpdbetbSOW77yvqzbZzoY4FUPbAyu7mkUJOtm4LQAcUdqyBKrmHdU2e22Na5krrMVX/icToQUcLZGjzk3nzuUAn+mkrwdWjXLuIBVVwD1uLOtFTo9srwMpzZHiWFyhBmulCIwc4rdQcXBUmKFrPRZXCnVYKl4jvoYmGQzCZMiw2tHvg29KntpyBqtJkcLOjylF08yUIpacHlRRy5CCOn2rFNMyFVSgiL414jlj6LNpl1gBAtddjShznpYgftzLF7qgJEO+vjZBmFPitlEsoApffrmnQ5RzcxLwH3RRjZxplhyNQSjUmgYsG8erBwtPN52cuz2wltKNjZutQ1CHqf5Pm98dftQKlqFpF/CviPMRfkt6vqf/PX+34P42E8jIfxMB7Gw3gYf6uMrwaBQlX/I6zX0gcb96IyixxjqMRg3nCKhZDqWcTuUHWyiKR/lZl3PajciwYAI8yFanl7TyW0tEHN7TM9DA8WdSiKiEGOFsWxeNQLJP56naKsMHpL8Wnzvtv/1VGIZGjT8GJGckX7wLxLxi1pkWK4nyKBFd0AgzfTqTjyFJF4hhIEVlTGrzUGi5QKhuaMtaPUsCJEcU1LNsLe+n9xGN9Qk5bCa5HMrM51khURaBFGCPYvwOzP7h7a5VFyi2qWtFiLYFQ4ac+pdsRQaaDIrImpxoW4KQK0tdGtEb5FpEKNumRRqgZm4hJ53ZYtuc2F2twtRORzFKqBXbL+XGojyK7zBWskvKyTupKS06ikU6UMYUm5EGzOWqpQz0mWr6EHs0YmjXSayBpXFE4N7Sk1rMhHe/2CoDnKFtb3l9L+aL3Phg62579w7c7H2T4onTCEwqzRORU9RYVaW1rqNUTvfH36vgnZrksclWjFAPgzKwRDPEWY1NbwqXZMNdnnnBGbX9+zCyF8QUGFocsLkhroGGu6j2ieI3WN4B6NG1nbohNDj6I/L7tOXRDSdULXt23rQpfXn60fASvIUE/tWcqiDmfIp9oLFvRHz7739zLuYntO56lsn88ga0q9PRdfY9L21LLGzxBeRzyX53eGfrYPlGrIYTqeFSPg9knub5R7aXC/v2Xd+TwsROHsSLWTiM/RQlvP9+1lELUCo6SW1u2hTu2RKbNGZk0LmlkJbgfC+r5uU0JcrzfGumQ62jOPUhlLYsyJXOw9VMXtIAsCrf5/S7Pq2TpdYbjuUOluLVPS5qWqUFUoGsyeBk/ZRSwd384qR300Clr938i96y2+ISbikkVoGYSiYckgEBQiltEozcTIvQIbMP7rdP4gz9eEz2N7Xvba9qD9bGj2qWVflrPVEbzomfTS+EqGrrWsS9tH5/y5NpbzPxa089Rib6hWyFBnQZZ15QiuF0kY7eLszYqvR86f1/uPr8qB+h4PsYUQwjqRfSpskx2XfSrErpA7y2fnrXGAmuFNx2LkQLXUnUGTkeBAW5Jq9yv4JLEYGilYdUt0XG7ZxO5MtQo/zv7eq7o0NAjWN3lbIO1vXzuA7VYtTRFHJV0fkdOMbntq3JFjXA3p+XmlnBEvZeE6zBfJUzzOq1JdYOwGf7fXAwtka19x2SgSdHEO1uq+9XWtcsocE2Ub7bmYA+IOFIFeC5swkUIhiNJFW7wpVnIJjEOlJoN310qmZQmY0Y/NCNtkFjccURq3xtJuexnZxJkuFWKq1Oop2i6cVZy9Nv9+kHShOEfFdkcnmSFm4xK5k83sfLezyj5ZHoE40ffsuTqv5oueHes6kGI8v7RfieY1RefIrYdsu3ZLO/lm9nVTq3AsHRUz2n3IKwdq4Rb4gVADki0l1uZEFLRiR5O0azLCde+E65DVApO7ugYEZ3MZihmzMgSyE+LLFrIG9nXgKpzYyMQQiwUuwdZNM5AG3bNwDpfKz1Gtiqo58G19VAhSzWn3ddBLYfCK0iT2ORIr2spz2j6sLDyldnbXaGmOLtTFCetjIUj1NO4Zyb4to7D4LfcdTZ+XXAPXZUvnnKpeypImlyVAO1sPcu5I+by09eJ7XVRJY2V4YRvTeGm2/9tXc56WYETc+TrnFjVbtBg3P7M9nRVWr9ZSGKNXzM5CaOl2lj/xf81Z10YiP7MZUq1qNE7V3j3et0Ovj7aflrk5n4d2r1kttfXKHmLesgR0bV0F5221IOn8IS3rry0LFe7KhpuyYQqRizDSSaEPxdbmedB0FghptDTmJlrK1t5rTf+JOEdSbV5UVsfh/PPFbfXCCHE+qxToX01Az/4j0ZzGEhir2e2WYlvOr3adrcqzzd/5mVXdAUOcchEowKl2HHSwIEQjUZRCIInZ7DQUchXKbNXJVKsW1sP9ebQ0YE8SBypalS1n69LoX3ZuVnFKSrx/rT7fwnqGtfcI2ewSFeLsRVIlLOs8FOOD5bNJDlLNgYq2r2tzppOvh2avz32hr6R/uczrl/+7r68DBSwRr9pk77qZR92JrIGbsCE0j7vThXwJlgfN22g/E2Wu0SP0RHTCcVYnM2dB5mAl6XPjeChhNNJ3MyjAyplJYoZktkOv8ZqWkuzFaKg7XEYGjHNdFu/rJHKohjzFCBvQ5lRUXXlQDQUpXp2zPC9FJXL4UECjVQBKNa5E9FLZONp1ikepIUCpwqH2vJOvqARe5AtOOVHmaBVsXrLcqsNCsSqaUGzRNmMAMGvguuw41MEQqGpFsXOyDTHVxJTtC1jQKJvXxh3R1RFRJ9N7lNYMeRRlP3Xc1g23dQvBNsVGihu8ukbCcDbfLBwVq3KyDWYGR0iOxBWiOWVhdpTSD8+kILI6PEdfL/5+8hrRcCEXNgehNrsti7O9kg9Xgn+rDGvOVeNr2d+1ihC/r2wHXZ4SN3nLq7Kjk7Lk9K3c2oiSXXTDXsWeo0sZSI8bXou2xGUE0gmGl0p/V61KqKodgO1eVRfyrzapBHsb5kuYHptj+nLc8Xy+MiJqmO2ZqyxEdXEjb0ifwBy8zN3mtjsq6WDVkrULzg2zPRtFTcKgcU4UInU5xBZU5LVDJM6tbFmXwKNJiIxzcqen0Esx9CgHdA5GzlWMhODOsUkZ6FLGjdq1x1kZTx3vTFfMGrmMJ67CiVmT8cByNA7UaLakpjPkwZ95k8qQ4nsOQyLToXJxrISSmK6E+bJ5S+ZUtEOmIdt1EHIX7qF9zb4utuKs6si4VxaEaRDiZMTaOgVktmuy9z9bnJgTrr7u2jMGlgKA0oelerhVy7WqxQVhcodhRaTPHHXc8W2VnmLcIKvMipzeFGZHs22/2mHenLWKIbEsUgSG+sfZ9mKdIi/nHZ8PT9mFiat4XO5tnqPJ3ozBbcfqbErx9/cgfXYk9DpvV+TGt7uW9XQWdVvktq859ecwmkb7Xbo5IUWZLrdL9VrWaDarIUSzFX6E0dalSWTIcq61ilPJguQVqZ81GYdSOw514KZuOdSeqoHByYcVC1pDrEj04hRl4VPZ+WRE/oIFTYfaM9VILRGmQBxtLYXJ7V7bh7nNy1lcm1fOcJwUyc0+rgUEiDBdWtFEOllVdecB3pKVmeKC0rV7HWtiypGcrdBolaXwf7MuUhsh68KnuidXs+zRter9fRH5s/F1daCWRblAvbDrJq66E1NNFsUJHhmeRdJJOD6FMgzMV0Cfyb6wx9oRpHIoA3fzYBsi+4M9GhkunZRa7EFrkfsOVGkLxrQ4umMl7cu9ibOUW7TKqmwHdd5C3nhF01wJuS4LYiV5W5XE3fe7NEK4RyL9bSGOxb3qtBDOu329V1U3X/bcfbNStgXJRohPd0J3sPfp7iCOZVmhpQamnHg+X1qKpXZ87vSE67st9a4jXUfbpDulbM153L5XSaeCzJUoQrwb6PZWtfj8dMln0jNSKOQamX3B3nYbruOOtw9X3B4GpkOPKpwGQ6TiIZCOSnewhdgCkHSsxLE6cd03zbEgWdkfe74wPmYjmat45Fm841E42XpBOE4d85QoJzN46WhzH4+WQmhl4mH2CK+aAbotWwqBTrJB2CUyz3bQSV71Tro76G9traRDdcNUlw12/lw0CTWGe+if6GoQDMbvOL3hjqUTIONoVaahEXs94sIPfMKKougx8q7P/zxEbvOG6dCT7kyGon5USV6pp647ZBo4oEdzIFvKLsxqlaDtvR39ipMhGPM2EGdleFncEWyVhMGQvgDzlTA+VdJReH684Lu3j4lSeZr23I0D87FDjpF4MOeh7IQym0GXKRAPskbiCmmfSTcnEKFc9EyPO8IcF5SkIG74N+ZYlw2H3DOOCR29wuYM2YqTrppmeHFEZ8/qNPa8O10xe+XCPver3MCtMO5AUzXoXq0KqTt4xY961OsGd77r+cLpEfs8cJFGLr2k+HrcUI6ReBfor825K48MRZeiXuW4HnhhsuAKEUIU4mGmbhNoMmf+wJJmWNYh5kCVIXF8GpkeyYJ2rek5FocvHZXNq0q3L1AqOkTGR0Z0xu9TpmBV0UGWajxDD80x12qOCUWWqrKG0E2PrbK2uxP6G3PMpSphrMvhtBDElyrH4JQHX49FCbl6VkDRZL/vb7KlYL3qubouVHttO0znakUMtArTg2kMpoPvjTHwYroAYBtnHscNUSr73JPHZGvgTuju7LVxtM8JM0xT4tW05Z3pEWNN3JWB69loAHONlsKb3bl0wx9PQncoRsLOdUWXfRR1h/luhhAou7RqaGVhLIlDGRhrYpyT2dI7IZ3OnnM1hDKNlXQohMkI9CF35sirFXicSsdtsf1znXeMNVFVuEwjg2QOued07ClzQLMYKj8oiFUCn6P7VYXrsuX5fMWL445y09G9iouEQTraPbaCgjhHc7zP0NuQm+NutjBMZg/FNaFUEqcnwnzlWnknGF4ENteF7tbuU6MQZqOlNMeWCndl4DR2lFMyAGUMdl4ejP6TTmeSN7MST2V1ls71FCtIqWfO/ZenkX99HagMZePl08kqqFKodFLJ6AKNcsZfUoH5Ag6fmDn0Vio8bM2DLu5EFU28zDuuxw16iiZMdrLS4P7Ocs15CMRjQBNLFdIiApbNgbHNV4mH2SZOzJKUTSJvvDzWUaD8pnB6GhiuC/GYCWM2g3jvhoXDhx/x/IcI8yM7fIbnwuPvUrpbCHMxtK0X5gth81wJc7FIOAamx1A/eiJEpZwS2kdUAgS7VhTCqdhrAMmJ8dTxueMTnsdLxpL47v1jppcb+vciwwthegz5zQyihLEzAz4WwjEjqvQvt3R7Qxvevrs0bprUJVqpKrzsdlykibevrxjvBuQYEYVSzDHqfOF2t4UwV8rGdmK3z0hV6q45Fko8ZcII837gU4dnHtmPPO32PIt7nsQD+9ybBtS+QybTMkp30N0ow61vxtwcqGpOjZrj9W5+xG3ZsAkzp9rx/LBjuhlIr6Jt5qM7znfK5mUhjpU4FmSqhCkjsznTmgLaJ4+mxcFx7qcl/EDUKMwXcHojoJ1t4M27yvaFHQBNoqCtv7wLTJf2PqGYIxZvO94+XJJCYawd740XcJPoXwr9DZyqVaK2ZxLVD5fckMqGcnj571E9HWwSC+mopH2xQOCpObzbd6uvA6jJqiN1Y8FA8fJ2DsKLux2f3z4G4NR3XN9uCa+SHaTXNvdlG8h9cmfJdGxq7yhWDEDPRpW4n32O0xJgzRp5L186gnrJ56fHvJgueHHcMd8OhH20qqHc0JzVKLe9HZIu3Ilx3/PZwxNuug0Ar6Yt4SaR7oThhTB+SCDZARZH8Sh1Fbe19WXrKtz2fGH/iOs0c9GNVgEGvLzdEW4T/cvA5j0rMR+fwnxl6yWdIORq+mGnstoL15oRVfYf33H4cKC/Ubo7d2YwIx7GbChSFymbwP4bhNNbdSlrt/Xk3BsV2ycHIX8ucvGOpZPzNnB8yypgh5cu4Hi0tR8nS5cV/8xSjNemORjq4SiiCua0RTh8Y4YA8TZy8d2B9CmryItzJYwehOYK0a67DpFyKeRNII2VcFCzPVPxFG4lXw3kbVx0ehoKqMHWnyYveU+rMzzmhMxm89Md9PtKt8+2N08d7x4vOZVEHwq3/YY+ZF4ctnDX2dq8cSfg4EK2AdJd4HAz8N2bR+zSxN08cDcN3I29cepqYDz1MEYTIvX5725geG9e7IGl8O4f0BGI+5n9Jy7ZvxUXxC3MsC89d2VgXwaOY7/snXRUq+BzR7ZVrqY72z/UDqmWYh5r4lXZcVc2vMgX7PPATR6YHLm4iBNDzLx7uCDvu9UB7A2Jqn1lmjv6a6G/Mcd3nwc+c3rGO+MVL64v6N+LbN4TNu+Z3WwObXdbTP5kjDCfkWDFnL54hG5fSXez2+o1OC29cPiIML5R0U1B5sD4NDC/nbj4grB5byZMhZBhqpG7MrAJhtC/nHbMxw5GC4zjIZD2ph3V7ZV+v2r7hbkSTgUpdVl3C9epKFJ9X1aQcoa2vM/4+qbw1JRUS13LRXMNHEvHbR44zJ2lKTiDf70kfPvGkaeXB08Z2eapiDtQ9h6n2TZSmMWdqBVRAEur1Aaj45DtAju75lJeVWxXPaZA3llE20rB804Znwnzu4HhhRJOGaZGffbXDj37jwTyJ45cXp3Y3244sWH7PNBf26GlvXGO8tYOXjl62i85X6kE6mRpEM50iyz9psSjbyAAEeZTz7unS6JU5hp5fndBuom2GW6V8Q1gKBa9i2/cqRAOI6hxU7q9Oa+vri1y61NZDmtV4W4aiKFyOvSQnQgdQJKVrLfoMp0MXdIUlo1SUyBfuJbWCTs8a0UOO54fLwErUX2Vdxz6gdI74fNoAmkNibNNoXR35vQYUmCfLcGQg2PpeHt+xIv5gj5kppq4vt0RXyU2L5wzdM5XyEraZ8LJD/XjhJSKbnrykw3j0w4wJzvdzoRTRqI52QuqnytCYHoinD5c0KSkO3Pq+7vGlZBF+DPMlXkbyReQDrJs7u5u4OawoXMJgxfHHd1NYHil9DemPj6X+/wPk0ZgQSDCbKinBiN/a3JEpKm4BwsK5os1JR3GbKnVGuGq89Jfc8TiaM7mYd/z3uWOJIWpJubrge17ge7OxFkbYlWG6NxFIR2Uw0eF+UqZHgnTo8h0tWX7vCMdC5qc8+fI023dMNaO5/Ml75yuuJk33B42hH0k3Ynfo6cPHMVbkEJpXAnMudgnXhx3TCUyhMzNuKG7FfpXwvBSkSmY4ZxtbmovGGgRzCbUNeCKJ+H6uOGYOvZzTxcLqsK47+n3wnANm5fKdMm9tGWYzRGLx0w4ZsJpgtkCCkph+qY3uflE5PSmEk8257R04lQIRz+U+4TUgTooepmRqMTOJAeWY1pN22faRsKYiGNgW/AgEBofJE6gR6G/rZTBggG2QBZKjpaCmZ1fl7H5ESgRtKuERzOpK0xDx3Hu2b5jtley28Nc7QByvbR8kchb49NBIJ7c9pzmxZnUJ5tFG+l1svlCvm8Oo5qcwJijPzuxtM/R5llFSIeel6ctY7EMx6kkUqjc7jeEoywoRXenCy1Cg9DfBMZXievNjs90TziMPaexYx4T/Wam7zNlXNN/4I7yQYmHCY1hQS+WwFp1EZDPTwbe+0GJ+VJ59F2y0FWmEjnUnmPpmOfIcMLnCSPQV+MpheKilYcJppnWfMC4SpGX+YKX844X8wW388Ah98zVUl83MZNC5dXdDjkGU+PvnGOXKhKhbiO1i8t593Lacj1veO90wXzXc/XSnacXhf56Zr5KZiumgmjws1WWlDJe6BNHNdRs9ODUDvKFFlE2ivYVOkW7wiQYaKDR5A1eGro5l8ix9BxiTyeFQ+5RX6stuDLagnoq0FAnC7Lqcm5aAUdFfY1KMbS2EeQk/83kQIkRApcFp8opW3S9zz13p4F86iy3erKIwiJyz4lhFKPqpOOiYWnlcv4Z6mVYDaILc7WN3UAlJ3e2FgQaW8Ru11RT8BSILaraCXnDmjMVy8fPlzBfNCw5I9NsFxgESqVebDi+pWwvJjZdZhoy064y7yJlE4lzRbsK6hynYrCvTBmZDWkYX3bm7Llxjyc7xEx80P6W2aJTmSOMkdvRHJwpJ44HM+zdrTlHKM4XCM75UlvI42SLRsypTAeotx23acMwZCNNesVX8uoUncNSwUHAnCcsEmvQqBSD9TUKNQWmR4nTY0MCLe2gyJiJh8DNaUDEtFDu5oGiwm5RXfVDsxhXqbvzSOZQCGNZUhcEa1cgCq/GLe92l7ycdvTBjGe56dne+MGZYXpkmiV5a0hQd+tO3WlGThOkSHm04e5jA4e3TLNl81K5qEo/mbNR+2gGM1dCMY98eqToxgnkvS56ZgC1WNokTpbKaVwdwFKpx5l0gOOp59rndD/2pL3Q31SGa0s31WqVOup8sLx1XleV5fCW7IKY7iyq2PJsn1c23KuMlbksgUNzekNbD51FkHpI3J0G+mgOVLqODK+gv1G2LzK1E8anibK1fRlPlhIrGws8pCrzpVUflT6xeWkEZMVSMofac1s27PPAi2nHe6cL7uae8djR7S3dosH2azq0lhMrFG/7iMXOxH3g9uTrKU0cps4cqGtlc10Ic0BOYTksSs+iadQFQIIHWJU4CeNkFVhjjqTg/LxjNMf+TulvChoi8WBoVH9r6a14NLQl+J6VcYacISVuvs+Gw0ccTTorColjJRwmW4u1wpzobrakfWI6RTQqpQiSVvRbm2K4mr01RXxZ0KXmQIcJQrRDRoMsra/CKVC3lhaTbMFoO5TsDUxbqE6RbP8lX1i3h/7OjLShIhVyMRuhyZ10VmI8hq7JabaDSg0lnS4tfZSOdp2GXtopXDtPEZ0c0azReK8tvdee/1iQYPZyf+opVehiZS72R/O+J50hOd1R6W4z8WQH5vA4cLoJnC47XvY75ilRR+P9zKJ0XXGEggU5bVxK45zpyq05QzMkRrQL3HzTwOFbJqgwv9szX0TrsoE7QTWCugOd7RzMwflPHviHWWGcbB11yQvM1akLG17MF7wcd4bgzx1Tics8AJzueuJknMWqoCFQ/WxcCz4qYUq8OF0gomajD5HuVhluCt2dZV/0SWdUmP68Ss+fhwpqKQHjKs4GEshccEE7UHVUPBhamd2pi0q+VManwngd6O7C8tazWiq1Cb62Ao73K1ySqks6Vaojo3O2SmBVZBHqKk7bqHZd8xko8j7j6+5AaVI0twNWOOXEtWyMm5Kj9TSaAvFoaYZ2EMwlcBh7pmwbJtd4z3HqpNJFc0iqhkXd+bws/F7549kE20HeTuDXLlkNgaqdb+hGfHPYszr5VPK55yqQM2XXMz+uhBI4Tp2RTFXWA0ss7xxy468UW1TZjE9/o/TX1m4kZFn7CE1WIh/GAtOabtRakSwcpo4oan2ODskirL3B043Uu1aeWQqhOWGL/ICCTIEyRybBHah1AhsR2yr7fFGr3OOYaRBEvM9bcCRvK8yP7HMk26aVUogjHMd+ef+7ridr4DKOZI20Rx1PQv/KYOPuLlu6da6QwurMJLueF8cdm5jZ535B5OKdISX9jRni+dKuJe+E8VEgHTqLTsYMMVAvtpze3HD4UOD0oYZICumY6O4iMhfKNlH6QPJ0qkYhX/g6mcNCoG2VJHXpNVXt78+qf8QPnzBBmQKn1BFEOY4dw9HSE/3tTGN4qwo1exSZKnVQTwEboiRJAVnKs606Zi0iaEa7FXW0tSeevm7FDfGkxMEj/ENgnBJ3aWAujUOiDLeF/pW1okmHaKR20SUSrB0mZgtoL4wx+H5wNeZoFX6H0nObN7yatryatlyPGw5Th47ReY12eIbR+S6ntVJSg6zcGidPp4NwGu1Gr7stp6ljOEC/V/rrDDos/e9CWR3KVgLdEKgWxOQcqSWQgzL5vohHe87pZGmV+SKQTnDxdjFawFhWtHjOyJyhVjQXePqIu28UyoUFEgvPCOeUHEfbn6Ugc6Z7eWLzXs98aSni0ge0v5/Ku0foDyySMLLYmnYwuw0QJ52fDGlc2reUs0O7rNXIVnwTqZOdWLKUhrf1ZOvw9SqmJpTY0Fcp7bDylHgyx9qEI5XO2/SoyBIMEAwlzztZUHHt1FC5ts5VwZ3EaezMySuVUq2fHVO4T/CvlnqMpwxFGW56un1gvguM3WA20zMbJSXqblrOh0ZpXCpA/f7P23pJMbsuqpQ+cveNwrMP3XB32DBf9Mw7MfFOF5W2+4LGbVvaXxVZHbfG3Zlns92d2elC4Jh7Xo47rqcNJ8/aWN+7QC6GajPaeSQFYhEjZw82Lxaoq/FWJ7g+Wvr7NHXEoxUxdbdlobvUzoWfd3FJzTW7YvN0X97EbxK02tlZq8k63FkniVwCdeOdQ4IhU/OFkHfRzg5YJEWARcrCqjBlqfCtCReOtQsyNMxpFy11d+85tbR69f2Z+XIjfNnfPoyH8TAexsN4GA/jYTyMLxpfVwRKBRiqIVDujY5zcm0WXVAN+2OW8s+Weqsqi3BZ07qojjAFMR4AnaKYeGETF2u9n+6Ld72GSLXy+6xGHva8tYaw6ElJNZ6NVEh7g7y7QzUERI3LsHRDVyVfdRCMxDrFSp0iaZR7Gk6tq3k6Nd5AcTSrWFXRLAQEmRuvxSPi4LkYRwo0BowQLhwPA6krlBy8GsGqCzUI2hAHRx9qJ2jn4lzea68M1nOQpEhwtIk1sxBjtZTkLiGi5DlZxKpYjt7RjBoFOosobG7rgnRYzzGBKE4mF07juhynbGKHfXiDdw8XXrprpMDti8rwclry6GBIDKW1H7A5fnl9QRBlypEYlFwC6SCkvdLfFdf3cvQsGj8oXwTKtrPIOEXK442laT1lVDtrhDtdBmoKhKrMV9E4fQJhMqFU3ZSFr4BlMCzthKdYZEVHNbUUGktEJAWYA7UEK8+dE9vZ+BDxkIG0CliO0XrJqaevA4Y6YJUvtfe0snOjqti9lq33XssWveaLCF2C0aLr2nklzmvtbsIs1jQ6WyVSQ0TjSQmnGWpaUuLWwJaF04Pz08CEKsvWiOXGRVRu54Ft3PFi2nEzbbgZN9ydBuY5LhVj8eSo2sIxcmHSrlX92POqrZ1QhjxFpqDcjgPT2LGbnPB8zBB6lpY1zsdovI2aWBplN8HWkoPpcFWzG3WKpEYkLucl0Cb+G0/O95iLpS4aWl0KaCU/2ZK3Spg8VTjr0uoozNW4laUs0XC4O7J9ccl8FclbkC3U2Z/zme2UeoYGt73b+pxlv9d6jrDZczyvcFyNZXvPlrJSwuhSFWKEdF57Cd4sWbH7juMq0yJZrYI4GycFR9FbCX3e4iXo6tdsqFft7Tl0h8pYImNJlBLQ4P3hurAg0Ki9tkyRnMp9ocmg1F0lz8ZvnbfWgD6kgKhVfMWj8RJrHxf7EyZbZ/PUhKhs3SxZV8GRDe+b00jkjsZptPfLl7rIv1j/0TUFCcYFXJIn4iiKsJwdywgBYqRc9JTO7ODtvDFe8Tx42m4V/Lw3Wl9Nl0qQAnoy6ZMweQZoqkZuP5qAdSmB1M6i0YnYZ1SX6SrQHbiH7n2RzIbzRtszwtG5MK/Vqi2d3my58aNM4qBabYrReAiL1qEEawfVzvkaWQReW8anife+Pu5V4rXr+gDj65vCC5C2mTwHKwOvMGW7+SFldsPEcdNTs22CBjsjkFJh6LI7Uabxswnel0rDujHMtq4TmGQpxT4fq0Dd/VxpmIsR8xqvqO+Wyh7jRphD1e2hv7YO5MG5MCbE5BbLS8HjPlBI1FStkmVkLbeu2OFUrcR/4Q04hNg2So2m2nwv5YhY6fdhuzoRnYnizWNc/6yuB9h0KeTLiuwyOgev/goMlz3hsIFaLZ11CdNjRYdCcJFE23x+SAB9LGwGJ7YKzGI7vLiqq4p4w+TqPQOtAqK/rUx3sjzXtoHCBHWKlORSFlja83racnvcEA5eLv1KGV5m4u1pOahWUc4KI4QuGmfgmLjpN5QSCMG4Ip1D0/FkGkR24MnCi6tRbO31EemMqyYKaW/PM29tYs2pNsc1b7zUumnTdIIMBT1FGjejDKaSrEBIlbwzzlXZJvJWFt2z2gc0BE+tLnq45pc7n49sJ940JUsHjsFTTZYSJqo7aYHqWmPaDnxdHdzxKjJfOjdOYHwc2V4NRF9LeRsc/lYvJXenF0WLGdOCF2NU5+TNhRD8elpqo23NtgerVYmF2ecrgbhzcsj9ynuaek5Tx3jq7D7nNY299H3zPV76lTeURluDeevFGcmc1lKEsTn7Xk3bjL86j69h8s3Jb0UmqJXU2wFp6RXFrlubwCTNwXAuYMGqTI9Okm68iuobWwRJiToYf0o7lpSNKP7sMOepqT6KIKeJ/lWmexZMIuC8o8Giw9YUyFc7aGk6bwA8qz83WYKads2LMWy8ktdPnCbw2L6dwsJXsp+tKRBKsZ+fZrpXFqzVZARrmaulMtthFV2xP5pTMV+azUkHFw4uq6PWHMBSg8l4zGFJV2oETcZJjCfQOdxTVb/ajNxebYzDVRPTGAlzIJSO2gXiVL2owTXUZnOUjHckhDGQpwhzS22uz94WNIjUtcqyOQshQLCAMsxw93KHxMpQZXkeU7EjObBq6rX1YJpxwtpgWSAGSNG4R2JUl2PpjDTuzlOpsoopi65dP7bZ5H2OcUntMgHIwls0+wh5Suv1rMtsOfPacrG097oX29+34GTx4aqtDynVnMtSzjjGTXl8PactjQfzbnWAqlqBUWy9QktYUpz3BGxbCs+DImPZCBIMdKDW1amr9T5xXL58ku7rjkD1w0ymp6mxzlNi2kS6aGq+w2ZmVCibSNkI884M2a6febY9cBsHTnNyDZYTnVhbiS7v7EOK54mxyL6c6VlIdUPdjFRl6Ta/EJ/nYqS8aYbozVon2xllC3kXyRt7//5O6e6cxN1yqoDWCvNEdzOR9h1lu86B8RNWAxROa9WhRkFSRFWRak6fdla5iJg6bJj8Ngfh9DSSDgNpP9uhCsSDOVDlrFLFKu2MoMnlzPZyZJ4SeYzMF4H5USLeDYTDuEajGWSM5N7aZ2g1fpNWIc+R0Q/vYXCFbGxzTpKoTQU2CaqGTIVsnK0+K8NlcGKzkeaZZtIRGAOli3YohUoOpuUkok4eNhHGeHKuWFWIxj3BSYlSZrQLoANUE5+sJVKAWoTB+ThhLF4mnZz4ufIzJFcIsqCbcap0BzfuZ5H8UlDgSIXl2aPpl6XZSJmbRqwPlC7QXCKNJg1QhkjZGKdHl/cwkmw8BMomMoe6aj21e1fbO5qDHSpqTlq4yEaqLEIVg700++YTjIOogkRD0nRTyJ05u+PjwPyotyrQYJyG0nsRxc4I4OD8giqmp6XCbrI5iqNVLiLe+Xxujq3v/96cO2pznlksapvXUgOvxi3v7Xccx548R/LJ5RBGWQi/ZTD+moo1HM3bhgAaZwaxSsC8s3lBoNZgPMssRBchlCkvTkb7d7Edg12YoVtqDmnv53054yq1tiNtj5/ZFkOc6sItW+Sv/UtdPyyOMD1V/9zghRf2PWlFBSkVciaOhXQygrgm0NnaGNGaFvv0ll6RnRCKVVu2A0m8OOecS1MGI/lqwhzJs2bA5zZcgweTW3tdcGe4HbbNWVyQpZyRXJDTiG4GZNv77x19cN6Jpsi8DXZQXpkhyhfGYQujOeqAOU69LEKv7Rqas2vOoiG56aiIOzw1WtFFUSGlwlyEui3MjwQp5j33G3FRS11lblwVvvHhlkBkCU4b+uhFT6owOZc1BHRjcjGN9ybZ0Dtms49has78qrC9SPrI2X2fnVuLOGextdXm3qZHOJXEVOLCfco5Uop5JaUrpGQk+yYZpJ2YncCd7NGDfA9sdPLgoWUjGtofbUG0/WsOi+9vRyfX4ELuF3qcaSyp6iIkXRPGZ+t1EQJWMZ5qvjgrxjlD1FTFwAhHcUVtDWtiRaCic3QF60jiWRurqvlbAIESN/rxLiwtWvIUOU5GlJ0dKYhdsa7oGzOQ0yPlWy5veGPY81684IXseJRMBbiTzEl7hpCNVKaYh65nHqisCqjiqZhlczRI1P8fThnGCR1HkIDkQjwWfxBe0dLb4dOqyIAVovVFoaUSb0akXKBDNW+/kfTcSMbeBD/BDELd9RatlgohkDfCfFUpl2VZkOJVExqtLLlVTsnRDGx//Yj5kaFeYGTAdMKq1eYIwVoUzD5XKiySEjLnpaQ3HQSVwFw6Sn82r072HUNnm+kxDEOm77JV5qlY5NyiehcuEz9MwlTo7jqmK48UVNF5Ni2aO+ukTVBKVEoJHLcdKVSqE1+l+FyflZpiH7eQbWU3uNK5RU7a0ovFS5b3RhbVHEinfun2behGdWE/hbT2YGo9GVsqT8rZZzeD5heyqIr3dYnk9WDCm/EknPYdF3tzBsNUV4XdpbJyNiHHu0DtE7OTeeMIcT+ZIcY+Q6ewEp83he12IgRlniNTc3qDGY12+FmgIMtBQFIqqyo4shq6srXUUNlYBBiyf+4Y7brUNaXuCvFuRA6m+Gelw/a3TZlaN4W4zaYxJRGVuAAcUqwP41gSh7nj5m5LcS0ZGS01F8cmiFfRZ4l8aUENdSXtp6MsEb8hULpcb5kDo5fmp6Np0bR9U4cKAYprmtVu1ZGSwYx/a3G0SIo0RxC8SMK/11btc2YfwKNtCxg0F6vAw55HGaA8MV2lsukJk9mZ+TJYWnV/sBRereZQ1VVZuQnCLsr4YrbKHGbcARTKTr3C1iQ1QnES+VSJszI9CV423mybLAjVOZqgCeqmIo8nQyKzVYgue17WZ7DYxNkqlEUEUjTkrIuIScWbIxVNHHR6UuHxjAQl50A+RsIpLJXUYYbpcq0qDLFSXf/qHOGQ2ZzMuA/k1FGjUmbrVjHeDbQquYYQ562savn7SrcX5r0stqSplZcNFFjbALW5qW4XclmQNd0mQ8OoMOOyLeaYhIu8TtOZAx5E6YLlsTSYI169lVf7GbBoGGnORjsx/5lD7nl12nJ7tNR3LV7VVgx5zV0kdJVySFDxs9YcuIZGxiRL4YsURQ5eANVVI5HfGYG8buz+2rkoGc4dveUGzwj2C4G7oXMxWMZGXGpjp9TLgvSWjdAcDOmfZUHrxeepfdlnnJ3z7dE0Mnm0DEFoKFQIhkBxtkfrWXq9ocRfYXydlchhvh7YvRfor0ekRnQOpi6sDS1wjlMH85UyP4L5WeZDmzuGkNmliWPqeJyOXISRIJVeC4/TgUfDic8lBa+oWb315imfOU7nRuF8OP+IUs1BKMalKhs7/NKxEuZo17ZbI52Fsd8WhVfM5J0yPDmx20yMc+IwBvIuMV0EwhyX9OB0FeieDKQUiMlSS6dnQnk820GseG+hCKN4O5dKupuQu6M5D7go3lEWHkA62t/Fw0x36NBD4pgGyhSJ+2Ao2m0h3B3heCIdrUxfqlo6KQbjVjQHTkGXdAHkbSJGpfP0qiquG+OKr1NdWhpoXPlk1QVNAVAvI94H93B90U+B62HLdpjMgE3WMyxMLhnR5rqlMaYZnQyBqglkcni/RYhZ6O+U/nYmHGdqn0gndV0a5+kANQU3CoH5MjJdBcYnxgsrvYkOtpYYTfa/9dHr9vasag6EzoRf67SKu6YDhNvI5kVl+4UT4ZiJo1WBpb2SbkfkOJLGSjwFQ6GwuU8nRfYnu+9w9ky8yo+onqpoG46F5+B0HUeyZNH7ivtA2Vr1ZjpaalOOM1IKabzkFAUdxFOQNt9xhDK6ejFWCddfT8j+hI4jkiLxVAmTpRUaT0lSZbcbURXmbWQ6dcxDssPxZNd7cxq4OwyU287SMrNp9Wg0lCGdqgVfcXWOEEv5tP2dN75OoyOLJzF+V1RqthRM1xy+aTbnc1eQvjIVj5ojyOjpCHeQuztPnxyjcxFl4VxQPWoO0ORAyiCUbef2IBEOrHIh84TOGYmRvI2Mb1aGR6PxCZNz1aLZBe07S+OdRrQU4mYwhDRC7cUb6OqClC0VeX5tJQtlXNHQMKdF8NTQL5vT2tnc4XpDi7aWsjpSuCMelcvLk+vCRcrRbHZNrv/Vd+ZEtAh/zOg0IY+vqNuOsu0oQ6C7S6TjiE6Kdon5EuplXsWSowVVtTNUVbLxPIsHokPKDH1m2jpS4YhJq2Y2/S2hnownpUUY50C4SSb54RVn6WA6ecON0TL6FyeomyU7UdMaPMSTMKnvLVmdH2mVtp5FAEx81zmmTWi39oF8YfMHMPVbl5qATZwZQvbel2c9/zqXHAmtPdCZs9gqGAMmNTBueHW3ZR6TtTVpWZmWri1CmQNhH5c50KQWSHqgXAaWMzNMkA7W8qd20aqYX46mk9dF44RdCNOVVeMutof3P2ONv2voj31v62V+lBifWSDRXU6krlCrmITE7MGKO0DbbuZROvI4Ha2lUn+CrqKdwCRWaZwMKQ29Odvq4t0iQugCWoPxZv0MOq++W8Z5r8n3GV/RgRKRjwO/E/iIbSd+q6r+RhF5Bvw7wCeATwL/oKq+/LLvVSC9ivTXkO4mNA3m1dZAzmrlwXNAS0Cw1JX2imwK1/PGGkPOA8fcLZ3qW+H7RjJP+qNxrBR0H9aHeIayrE+RBaU6/ztigK6zSCkE6Dumx4kyKJvnQv9qortNlE1lfGqLZ0nfNWJojOg0waZjflL42KM9l/3IKXd8ej+QN8k0cTZrBDs9EtLRyMipt55/01MlbIqnzzzqncScoltMOfv2ZBoxpUByLZAiCz8oTM6vKkbijPtIjh3MbjTuCt3LE7y8tkixQjpV15wKS++8dkhYmkNM+0lMvC/GSgqVGNTKhM/KSy19Z++rfWC+SByfWaTZ7XFeQLR2I3uc+OEbehMZtz19ly0qmkzEMpzyutBDsGdmAlXI0DM/Gawh68nFSbMdCuYkVOLeUrQiq7YPgvfPsvRxvoxMF4H50gxDvrRS2uod3+cLI4PGUzaI3a+tfzUZPJyTbegq6Nx6RlVzpEZhuCmk53dQK2F+vAq5ujaQyQewdhKvzp2bZjtQxQxh4xK1PlmqQgiFlKAkF0jEjIe0dhxi0aahRME1rIxcnO4m5DQ638qkHnK/oiumHySEU1h6k6VRzema8wpULDpgLhp6rGi2fb3pZ/pkqcaTQtVEOBkP6OZuSzkkwj4a38MRkzIYwdT4awUp3eI82Tpv6IT9oDUpNhFPmI+GIFZ1p2yshj6VikZF+sr2YuRYhdB5yv4UqX7wtNYQ8WTvKbOlbhr5X6oZ6xax1yRMV7D/2MYKTbLS3Xaku4lwE1ZjHSP5IlK3xjMsp7gorIfiKPOmQ4KAViQIstkwPek4PQtMT2C+ULOVQ4VUka4Sky7cxTIHakgwFNJQGLOQv2DFJXGCMGbibFzPunMrX/FntxawLE2JJ6MeqAqbLnO8mMnHYOnoXiibSDojcts/ivQ99cklp7e2jI/Nxu3eCcT3EjJaoFo7kGQFRTmHVTolWVpdo2lQGeKnDDGz7WduL2cKnfUdhSWwMl6r3YMET/u4GG8dWEr208FaOW1eZPp3j4TbA30KjlYk5p0sKfs6ukMS3Xl2e6fBnchdj0zBBZFN14iOVZU9WSK/1MC2nzl15iiVQdmlichKiWi8w7xdU1fn51eb4zokysbQmMNsgp91jIvjJFUcNhNDe5xPVpJpx9VudZBDtt81WkKcjRoSJ3NKuoNa8DGbWGrAfp63vifu7jd0Xq61BTtdMKdydAAiRbRLHN4MTE8LcZfph7xwm/KcHA1lARwu0sTT7sDTZN0qPrK5pbuYmENHJiG5pXjNNpaTB1a18R+T8fC0wBnlyYpt/OyvdUWpvsT4IAhUBv7XqvqnReQK+FMi8p8CvxD4z1X114nIrwR+JfArvtKbtWg0jJkwdTSS1gJl5rA0KdRgmlHcdXz65ikxVI6T9fv57tMTBslswkyQyl3ZkDXQ9dl6OKW0QPDv240ZmydpcLWPuuuNR5qLechDYnxsBInuTon7ieFmgxRvi3IRGRoKpdUMRSloKeSnW8LVzLabl5y2nFUJNL6LqHMKFpJuXHoR1TlYyqBY5ByP4hL1Sv9qRO6OlgqIVkHVUlEyN1SFpe9WO2CsgajD1ZMSThN6t4euW6oVQvUmlbMQWr+3lppyJ6Ke6b6oCilmhg6m3vg9eRv8wK0EKqULHN9IHN8yReo4mUEMKRrB/Maeu1RLOcyXUHaJ8sirxYpFzI0rxnw/vNHNQHlsxtmI39482g86I+srcjJOBqkQxkqcTEQxjrrw3RoxuTneYbRDumzWZ1U7O1BtXSvdPpNuR7RPkDszYOB5edaGt+2ZHKwXnGR1J+j+/ZhejxA6WHo0Nd5AFdNFGo1DIQUoxktqCNRSdZMMjTADb1o29jwbOdc2g/E96kqCrrqSqNve9cazccTTg+Ygy2xOraRkAUSrfPX3CVOBuTcOEs5HmhI6RcIxOLoD021npOTDenhbHzBbr/FUSfuZ7jCQDuu+bvdhDVbt+bTvTSvKtccaGXgqK4qpoLNVKkmsxGRBS0MZKWItIoq/98k5Odmi9KabtiC12XRw8oVy97FAt7e+jWUTGLpAB4Q5w2iocZiUcIxMYSAcojk2oy7Vq9pFJMZ1bp9csf9wYnwDpseVuq3QmeMUUiV4w+zWz6607g5B6fpMHjpqjCRPM4o3RI8nPwRnFv2yhTjdhEnVnoVUYX+zoVxMBFHKUCke+JVttAMSHM0vVgTz9DGHj12w/3AygrjCcOOHVRsLoG+BtTphXwRzolTQLAvxP1erUk19YU7RtKm8slim7AiUE6RdD61GQ/DroAvK1gQr49G6Msj+SLwZ6NXt4WN38KIHE0Wgew1eaXY9ilH8olCHxHzprauKtRGJY2V4KexfbMmPwroHo1I1cJO3XM9bpinRqXN/Niycr8UZccI1IZJ3idpbVftcotke1/vDqReSV2RWg3hQa2tHRf3sgerBgnUvsGxCOgppb9fQ7dWCdvW2J1Nd9dICX6S3eP69IXaWIZHgBPhdT+0j85UYeurPPwRWRL3aWly6KLw2hjCz3RhqOXvQqtEcpjKJ9a0dhJCDV+3K4mS7sTQHuWmXeYpRVb/4w87GV3SgVPXzwOf9+1sR+QvAx4CfDfxk/7N/E/jDfCUHSu57zaIGH4ZYSanYdadgvI4ZUFM+BuHVh7b0feZ06qgl8pn9E5+4zBBmjrXnvdMFqkJMhfI6svQlb5ClWgWgbJJNapPfT8FLau3QAENo4slQqfnCOQpgk18tqpQYOXy4Z9jemUhoiZyykX6Xpqp+XeKbd+Fm+cEWT8J8shJa8VLT1rstjs7Xmmc0u6IqLIvd8uWOKhy8fLp1nm5f7RrOxUY9xWbIkR3656R7I/mZ8auXlX4z8+ziwNZ7g51Kx8vdU/JFZFTj1PQ3wAiaArNX+JXBUhStDDfMpthcvelt6S1yyrvANEXKpZHo80UkzFsSIIdxrZhQRbc907MNeWvrpzVkDdmMpiikox+cucCcTTzPG8XG0fqUmSqzVSrpyMJvshY+wvSorSvbgDX685tdGd5F/5rAKGdkXRN4XBHC19dhq9TBHbowriKYwZturn2bZKmAw1MKMVa6rpizLlCiUEtEK3YY5YCOsvACoKGwhproEA3hglWU0iv3zpsvx6MsKEAYdYW9U0JTZKmGcg5iG7WK7eEaKHedtWbZGwIgxZ3N0RCC5gDFk11ba8YrR+eIHRr85PevLArvKjZvTZE8HiE64dyCDCf5gln1KTAeTKwz44b7LP3R5EPMyTSnLox2oFSvnsuLg8FyUI1PrdelNfANxCkSTh2hs9JDzZnhvRPD8ysmb7aajoa2afS2Ui2A6XvYbpjeuuD4lnGF6qZCXwldIabqtrS2LUEp1pJFZkF74/9YKl69ygykujTLQV3xW8jNuWr3fGaflnHXcQJrJTNUam/BxbwL9EMkupwLqrDdkD/6lLtvSIxPbc/EcQ0il20Q1nNNgiFuttXUY22l5JYuhMPcc3cYDFlSWQ5p170xh/tu3f+W9rQgU8WQ7tZ3tfRQezvUUSXsj8g0+zwNZ42NsXTYUh2xynxIgXg7Qq3Ui4HpUcf4JLpdNpHJeMjs3u4Z3+gYA2wncZFjU1Z/Pl3w3umCckoMo91n7biXLVlEOkUMdb+ypvTjnMizKaaHqTlnZ9ygylLpiIBsiknepOZkQ4mJchB/nmFxrntPz/U3xuOjS+5snAVX7+PctC1mH9DuQ6BL1F1PvuyW5t9STMQ1NyQRs1tLVWm1DMsxd7ycdxQNjLXjrgykWOi6QBmKccJLXORIFnvXeNJn/Nnz829BoMAyMvrluVBfHp96bYjIJ4AfDvwJ4MPuXDUn663vyXs9jIfxMB7Gw3gYD+Nh/K06PjCJXEQugX8P+GdV9UZEvtJL2ut+CfBLALqrp2dVEv77FjVHI4xJ9NLFYqTNMFl57ayygCVa4XYaeDdat/o+FLIGbk4b8hyRUJdooLV1kMBSIXBegRcy1AVb9GvzNJwhNXVp42ARi2mvxKNQtl4p2MUV7mulyYP1Ttv0M6ecqCqc5oROYWmlsKTEqpKOMFyXe73d4jEtBOhzQuf50OJVPapICKST0t1aBK4R64F3nE1qAdwb94qLMzi18b5a5WLtz0qzfc7O21oAlG3l8XZk102kUJfO9No7IXFrcxbHsJS9SnUo2qtmmg6UZGvJUaMu2iqlh3QnHA89XBROzzqkGES/rUoqasltr55QEcrGrjuOSn/juiTeB04U4jEvrQMkF2SuS5VYmKoJQYrQXwtxjEu01p7VdBUMBp6tgq6Vv0qDgEu1uU8KzqWx1ilhTWm1KC0a2lR7fxbi8H+wMvZ4MqTDoGhLEy46Y2f6PAuCma1cW718WOtarhMiEJXcV2pcNdasHN32S94m8q5DxgECSw81V6KgpcvCpAtvTIpxu1ovs/M+bvfWaQoQlZQqp1NnjVhPrWWTIzzKwte4h36cRc8hV1q39LZOrDruzOZ4kUirSAuFJdXa5iqMGZ1mizidE6Z+TbWZRSfck2UtjYa1arcq54KToax8gIbWamxpCxZkd61Cqug0kd69YXh55VWORpQ3BNjfu/o6ixHZbpgeJ6ZHauhTV5FYCVEJsS6SItUlPOZDj9wmulthVphDt4i7Asset1SsFTKUXpa0bltqS9qofY/Ngc7BGgt3lby11+aN8Qg1rQ9FLnYcP7zh9IYsacJWRbikS7pI7ZXQWUYiRmEWQyDWN9KFOK9ROc6d9U8Nq55bq06jWPPY/q6lbMymNKHUuqmQlDIL806Il0J6kgjjln5/NBtxHIm3iW6IJnS6iUaSn+y+23pa1kAGudmjuw3z44HpscnedAfMBs6FOGU2LwfiMZGnsDQWl2wcX4DjbD1hk6dzF1Fb2nNZ0UndbZgvbI6myXTOQqNpnO+fLIsdK9Fa33Te57TNbYzq9sr0umrXeGQu8VKMqkDO6Ka3s6+e3ft5hfsZeNPWf6vwVIHaJ8omUbvgbX18fQU9y0oJIVXKIo1j624/97w9PuJYe27ChndPl0ufQxHbuyz33bIp6rarEo4uwlzhnrSIa1JpLlC/BJx2Nj6QAyUiHeY8/W5V/X3+47dF5KOq+nkR+Sjwzvu9VlV/K/BbAbYf+bhq8sN58IUYTZOi98nKXWF2wbp0MFj59KbQ95nLzWh+RAlMOXE7D0vJZx+KiXIWQVgbg7YF0zrKV7in0xIyi2GzRVAJ+3GpatO0llqXASPrDS2l4VyYIZHEFX6a2unlBeMz2Lk8g6qs8LmXD6+Ly8Ti+pcT6fq49LeL48Xi8CBQi1CKGe2ygbLt6IYenaflwceTsnlViaNVvG1fFLufaXbJAoXOy9b7sMgnpMsLtFY0wrwN1CsXIQxnG6LpoDiBXC4zH7rY89b2liiKqWZ5KUcw6JlB3IkJpGMhHS01qa6NJbmitckbrF3J0WBk+TsYbxK8MTK+kSydVAPdTSTu4xdlZ5uTErJpR4XJHDPbvEI8nDmTXgWicS3bNyVq04kKU3GxOlk4PbVzYT9PA4k6tyhCHSK6GyiXPbItdNvZtLG6jnwZrd+Vp3PyZaReXaBDZHrkzVi39izkNICYFEDZY4uvGj9jycl7tZdih5Hx8wLlUpaDNATrup5r4xkqBxXKLJRjNDXyy0p6NFFy8CqYBLJxZ9F4aHmrS5VX6YW+WmrYSqBNm2rROXI+yyJc5wFCm9suFuYYTSPMxSubg6HcN76LQV5S2yy9KNtBHTKLYbX1bc+19CykW/W5lGwOTZwwAnl1Rqo7Wss464agXhjaCM5Lw90WfLmnVnvbWxrWe6LZmmZ/3OmWsZVKKzpnq349qTtgzq0KZ05hCCZdEOywnC6CacMNxQ6bznhbKVVisEbfWYxjJreJzbuB4SVM+8jYnCc5S+Em5/e000B80s6c9PN7UdwxdNFWu8RK2VqaXapxQ3XbE449pER5esHxWbQS9c767aWjEo9lkeWwyjzjaW267BQf6zXYhDBFFLQ4YR9K0xry59UcZ+2cE1qUbl9RCZ5q9YKQCyU8mo2srjDPTZYiUNOGMD0mvjo4UbrYgTsFoxdshHQMzH3wNcnqHAiQIvOHLtl/pCNvzImJTfNOFYqthcVmiTtXOXA3DWzT2sDWeH/cW88mZWB2S7uEbjvmCwtWaw7oGO/x1hbHspk9f3Z1a2dvzmeOrigxVXLHUpVZkxexTJYSjUd/o6Gn7HqahMf9FCNrEOXz0sRpNRk3jN4qvanWP1LKBk1Kv8lcbEdfAyacfdxEysakfTQpY07czMZ77oP1/Ztn0yas2dPuZwU2ofVfzL4HvWAK17Fa8satQpCCEea+vBP1QarwBPi/AX9BVX/92a/+IPALgF/n//77X+m9zAmxpoBl15N3AaSQgrVhCcH4G6WEpYomZCMkP92eeLo50sfCC6BPNrmlBgLKpj+x7WcOcTjX5zNl6Sge9fk8VV+wbqyJzTi4kObdET0eQQKysTKmmpSyMYXqvG1VQSZeV7fJKuDciVJVwmagbE2uv4tl9fJhRXnOSvpDUeJhQvZHK1fO2RZdX5G+oJPXZohVF0yTML45EE5PiF1alNNDVuKrQn9jh313k43wN2dbuFslbDI1RvJWmXeB6clAOD61MvSA8ZQeuXEVCO3wOvvCI4O5Rj5z95RSg6GIaot3QUVcmbx2AsdWRWUO4DKcZCwKeWuVjaK6tHGIJ5vnfFnIh+TCaHKPW2MXa2XFLQqNo5JO0HnbFo0sVW4Eqz7Tpn1E4wm4UmIUxMUhUWtSXHsju8epISSNmG33d3rWuTCm0O+OPLk8UlXYx8rhysikeWfVpafHkeHJhrKJTE8rZVuZrxL5oiMce6t+mayZaiNJx1NetINMYmPVxenvlP5l4LjrGB3JjanS9VbtNnQzXazUGjgUYT4FpqsAl5ndbmScEvPVwOlpABJSYHxsGmxl52iHKGXnxPFJl6ocI4g7Cnq216Wa7TTEpxD2piy+tLLpqiF8nVVENV5eI4sar0IXntfSEsObl86PdZFwaAe6FFkQvZqs9L0M9n793tZHnLya0StmTTZDl6rJsClGkJcAxcHfpCt3BLvW5ui0IpCycWHLLq7q+Gfk9u5YScdCOE02X97KpZWk12Q2ojX0boK9ZYjEnW2YsuvtEOnrUi24ksbt9XOJTGMi7zu2zwO7LygXbxemy8B+Ckw+b4tJCqY431TbqwdIXzT8UFycu86ctyWpsFHnmJnMQN12yKZHSiVfDSaC6HIT8WQNvbubyXT3VK1R7NYkUVIslBqsua5XzzVR3eqK4+IOZxqyVXI7SlF6MSS1GPKUjqY0ndUc67xRylXl4mI0kv0cKBeBGW9kuxVC2bGLwdB7FwWV2eY7HSLpLprg6VkXA9yuzx95wu03bzh+KLiTaMiHLLpHbr9caqIJ6IYCp5zYppkuFujqIp/QlP2bTW7ZA4IX2gy+4ZSVNF7PnKe2h9SWXHNqco6U7IUn5wfn2Vw2x8gI+S6A2nXUbcd8lai9kbRb897Su36aC2CeryWTuRDmq7Q4vOmYCXcnRK+gq1xsR55sTwwxe/PnyDxH5p05UTVBLoFj7khSPHA31LUWr+LPLnnhtiROzimdjcjfnCcNrtEl4mCJQnVkmrP5+BLjgyBQPx74+cCfF5E/6z/7VZjj9HtF5BcDnwb+ga/0RhqgXhbmvTA/SuSNVVm0SQKI7kQ1p1gF6qA83Rx5NuzpQ+bmNJzJ1AfmFLjqT5ZGGmbmOdpmcsHLeQpWmq/qKQJdIshQTBtkJQqrib5NMyxkWjMOi85Jb8J9wwt7XRks2llIdQDBCHzzHNn2s6coA4fkZEs3NFa948Q+18YQ1zXKW0gXM6kr5K5S+kDpIyULSOTuoxGNFwyXvXV6n6zcP42VMGY78Ef3oGtlvojIo5Hd5cg8R8ZTYHqUEI1IvWB4x6PHrTA/UtKdrJvXYWYzoFZ9Nt52PN9dEF3CAMzQSV1RoOgk7FadEWdd0inWsiGSUlyqv6L3+FMnSLc1sMD4tRkUvUekXHrI+Y/yxua2O6qR6NX6ZDFndJ6Rzp+tO2GmKxTQaM6DRqv4Uo9Wau/pvLCmP9pnm1EwaYPxkSEwfZ/ZdrO1mhiEw6aQd8mNTCPEJ+tHt6vItlCGZAZia9cWZiWNtjZEWdXXwdbxZNUyRlCt9NeB6WkgbwVJldQVNv1M8hS5AEM3U7aB0y4xXwViX0ixkGPgdKGMjwPUQJxhvoL8uKLbQuhMuXgeA2UbSYem1mzzvujetHRMYEmNxtn6PA7vBY6XG9M1gyUqbbB+KA2ZsKq1dGxRI8zTuh6ao5wvqvXyaiOaRS7D+gylKvNNoL9x1LDYe943TICT7tUr1WoVSoxGup+DVQclgzdaNJ5O7rAl0KdK3TahXSP0UnFCvJV+dzcuO3Ic0dnFIyWgF1tmb7OkQZkuzXZU12jJF4lut3HH0Rw+iu+JKuQq1FoXIdsyB/QUibeR4QVcvF3YfuHI0EfiPHD3DZHuUBcH0GycIzMda7/M1+boXJyw9haI9UP27IchA+bIOmIyRGLfobksFWxG9Bf6a2XzMhOvjyb5EhyVTJUoShRFRemS9Um1YNlQqOxCuyFbz8y+L5SibiOt3cf8KCLaLbIScbLUdcs4aLCS/4oFG3WoZKDGQBngcAikY088JpMqmQpSrfdbmOpSIbtU0DkCpQKHj204fMTep9uzpL4aXUJTYL6w38umUDbJ0qYZ9qeeuLTOYpUPqQ1a83Ojpcf8rDB6CUaBUVanyeUwllSjO8BhND2qkg09EMS6F9RAiAWNuhZI+H3ZNZjNDpue2lkni7wNlO2KkJWNvaYBFvbuujia5mAFswujmu0/TdQEcVPY9TNDzEbNEWXbzUwbqzQs2whqZPnb0YoHwnAgiFGBqncIQB0Ma+v2DBEzra7gjqRXBGJnt7p0gZxRcr7c+CBVeH+Mcxz3/vi7vtLr7w2B/vFIvotMV94SZXGg7EJTLCgwpwZTmrPxqLNT91Q6jmN/Buti0LUoh6mj915qc2c8nPnS+2ctbSV0LSXPei/NB36gilhkGiPE4Atf14NToV4U6q1toBrFKpdScu6UQq2WqhLYdbYgAG42xVpP7Fp+w4xN3kK56AjT1kp+c2F6pFxdHq0Rbm/lunOfLN0icHojgURKH+huE90h+wFvizKAcbiSCZWNV8KjR0cuholjSky7nvkyIdU0qNJdt0b04pVGkzmVCxzsKRmpwngXOT3p6PuMSF4UYTXqEqmH2Q9aBZrwpDtUNUHZdaStRddN3r/xzWKEMEUTB2wyDhN+qFbOe4otZblic1mTqd2n0auNshIxZ6htDCmVUIwDVbFIq/RW5mraK/Z9PDqXKbiGSxQouiIH0flCnv0KM5QSmEr0yJlFCdwqycRhf1krQ861ZFKgpZ11NgcEsMrQFhnN1mdxqQ6r6ulEi2olGiKxpD586QYx3S5cI6oWYZzNStehGky+E3R0baFdJg2FrreWPXdTIG+itaLAeV8iVmJ/Vvp7zl2y1FWmv4HpJlI3q7xEExgNk/MUWpXpSU1vZrTPiNN9x9XU15VweZbucBRmFtAihihh4r2xNUltTkNLi4mXeue1z10INm80+9mieq/ElWyl3P1NMY5IDYxvuFCjR/ZSlHQM9DcwXCvDdaG7GZHDCCdTzFdVJEbq4x3TU9Bnpoo+PdqQjrjsAp7ujC4Hkc0Zuw1MKSGzeUA1KcUbNZveWaC7FYbrSn8zWxp/DGwFatzQ7evKq6nWQiNvMLXyNqWtGqkhHuLnYrK07qNHR+N4Th1TjkxRF3S99ELpAikF56z5Vs3Q3cLuuYlVyuF0ny53dtK0JvNggVlrjCtOEZAMx0NvmkGxIqkuDlxTnl64g+6QaYTuLpiYpVMrQrC+nypOaQtW/Zu3nvquVj0HZ1tVHbVsPERP64rCeBVMwDF7EDCqaw42GkDk9EQoF4XtbmIaemtFBIynnmtPW8kxrohfaHIZjefVRDyNX2qIrhqlL1vFdnD5hsWJanYYIUVr3xSCBVo5B2pOq7+Q6gq+aAui7ZdliMShN4HpQZguhfnSECvTbDtDan3OmlN3XiUp5QwRwsCOfjDJn1ID3ryIIRl953jqyJuOMAamseNlCRyGzvyHGkjJIqxaTKpnVSG3NVt7o9FYwBaREpZqPE3BAh7VheIhpXxxluO18XVVIge42I683G3Im2i9ppo+0hnMl0I1zoV7wOWicpGmpU9QzoEyxwWVKFl5EXZMY+Ly4sTQzew7e48yGzpQOvPUxXVf1nSUnhkKg0SJ0ZyhZJIGLd0UR4tgu6MdUmXjmymAdonQdWh1jNQlA7bDxJvbO3Zp4mIeuL0cOF5sFqukTuabL4X5UWeHRRQkV/LTzOOtqf3ONZBiYoyVaUqMk/VQy1shHyHkgGhaDVBygUTFynJLpQzCxTCx7cxCvorVHZnmGKyaJCZAqAthNno6xYY40d8g01qFXCJjabCh/9M2vlhqpxm1soGyq5RjZL5MdFdbjwjq8vowmzPQHe3NQleXxsvLCAHt1iWsXWTeBU7PjJPSvxK6vRnySF2d4+DOsZgjJEVNDM75V+ZstUNAbKNlXWBvdSO7OKst4jk7CYrz9M7XdUtrhVFcOmH9nTpxemmE2tqB+MFtr/WQDhadnjbPUowz2F8HxtQZyrQJpj00GBcriJLLmTihKDVbaXvXZUsZdBbJ1iqUjQkvps4aeXepcBx6T1nanjJZi2Bp5CYOeWZ0xA8AVE2PaR8o2dTRZRa6WxNUTUffl63k2vdoU7Ff1Nbd+IZZCWPwx1mI0SPQKuSY0KCkwVLnpe8XBCS0Z5eioZDtxMjOnZyt12NVMfvSZAxcD0mcjBpbYFChi4aqiheISK7uDMLmRWW4qfSvmhMzW+FHcKczBPJlz/is8OTJ3vqW7TZLwUbT+BIvtgiHic2LwuZ5BzUu5HZL+6kTfs0JTXcmtpuundO52xCPM5v3InGuTFcdZUlVrErmMcsSvrc0UXOMzHGAuqt89NENfSjs+567qee4H6idqUaXwYnkXTTuD46QVFOr7m+sybLt4/ZggKgMXeayH5eilDEnjrkzUcXiUgyOxNQpMgddDs97/NbS1pTt3RhtrXa3FlgvAsCxMoZk8hUV6/15Vp/e9AR5TVSxnVGtQW2zXUhzngx5DPMaaFij8sD4VIhPJt56dMenNxfmrIlS58BERy2muN4KYExt3vfUgj4BLvZZe0zTCTwwaRIg3OvrBxCSiwYHZbN1p31KZoPCWnyyFDm181JtHdZoCA6YDSgbFm4bWIBYIwtia5ytMzOoLaiqxNla7iDmhD0eTFdsqpFBlD4avzmg3AyZuTcRUM1CLYniottVhSC2dnI28dLaB+podr0MljmyhtNh4bBSAdcss5Svc+ew5/2ViuW+vg6UwtBlZJfNgdqa6m8KleTVI51vmpdDJW+9Z9K2EKRyzAN308B86GHyJxKsbcFRe+oYmYbMbpiQBicv3rujInV9oMvvPGqxrwB9ZxOYrJIkb6Hp8oinS8iBfFW8WsFI8UZabJbeUo9vXd7x1uaObZgYQuHtzcjdRUWwdJ31mRPyBYxPItDTNdj50mDMsdgDjaEC0Q7AvPKMGkFVcjVnCTMU1jyRle+TsN5B2GF6Xt3SHEFwJMAj7aU7vSy3tXj2ZVN58/Gety7ulve5m4Yl922pLU/zTArbwPgoMD5VuJrR68B8GchPNp7uyOhZtdqabhG224lj2JgD1lu7iJgC0g5uEcqjntMbwviGOYbdXfBDIazphxRNhNOdL3UDRIOo/V/FnMrpUhifuCEqSunEUM1ZvekrCw9GptVYqSNPXbBomaBLw9o2h0t/q1QIqRoPZQiEXVyVgd1pW4ajoudVXjXKWbsHpV5mU/PtM9thIgb1tcNCylzCS7WDZNNlbj0FtkS7AatoBSOXOkF50XiKjoz3AR16I9yGAF1ae7Aptg6bzowXUKiaIGx/a1yYONnc2h+t660mMb58I2fHADEs6b48W8qh3edx6lx4zx3N1m4jrnZgCSqSw/aODFItcs+T7SGdA2TnUzQEajpDrdt6KboolMdpTSPHCTavCt1NJu4nby4rSAyopuWx5k1ErzKXw8T1ceOpMPu8dFJLw0+zcRlPE8Pzge27jYFv81U7Fq2j5ng10q+M870oOx3Lcu0NjbNDrTmwFvQsxN8E4lwcxRwH2WY+ur0hhcI+Dwxxy4vhgjoM1OKB3dbU+mXK1pB6Y9fW7dWIyNUdyb5b0GSJyuPhxJubvQkkzwO3deA4dxynzugZc6Qr9iwB6rlieT1HPauJkS4iiSycQQ1wtTEn7Th3LjganRi/7rmGuizCoLAE4NoppffGwO1XASctOwfzUNd+lwGUsIisvvnkjo9c3PCp7kPG/3J/fkE9qyx9LsvSrmclkNfeUKTSBZvbrhqlTtdrXDMrLHauoTJlV9n1M/uxt5YvoxVhaVctcDi739bmaqU8iKmqd5YhKhcV+komkfbOpXO+24KuN5sRcFS5IlM14dmho1xZMVlV2/N9LDzqTgSppFDYdDvu+orO7uiVQJ0iR+mpNbC7ONGlYindTbF1MUWK6iKmKTWgooRxjcSt4EWAikTztBWMaP7VpvC+1qMLlW7IlM2GeQddV+hSWfK+gjlRMhTyhamJS1fJdeVILWJYVSyPWS1/K2Mg5wAN2XJYNZQVPVgRBDc4RReIT91Aq3ugzZnKV55umAE1SDacIvpsMgXYzngPIQYkmqQBwXhEH9tdc5VO7IJ51o+HE9+9LeTWl2hQy3yIcWhCjgsy1vUj9ez0rJ6O0Rpsc2WsysyrWeJolXUtnWNwr95bBFOJ1ri5BhP1bFF+XRc5rjaNp6cWoXU9O6gHQS8KjzcnjrnjxX63iLjFfTC0zhGrdiDPPZzeFOYPTVxcjYxpYLoQ0pOOeKp0LgEALeJuxL/AGxcHPt1fGbl2a3ylNKSlskW7yPFDPccPK+Vpdgf7zNFo0xg9dZOifQmLw7ZUbM6KaF3I5XkrlA6v+DJnN0xCvoxLZRe1KZnbOoqxcuHct1IDMVXrlSagzehsrAoybAoxFePsbU0ttyYhuYibXdf957ho+DVUJqwcBHKgjJGxWCl73xd2w0Tn6fFFoRyQVLnanrjsJ57Lo4WXt8yFp8Ua37DW0Aqv1gM2Bgs2zlKqC1Qf10Ma/HCbDHkN2VJ1cTQ0p/SCukBi66JugpL+ecriLEs1BEpPETazp+4VkWTOUzFHUQmEJW1w9j4iLC2Aoq6k0wkT0FRMyXkKS5oxjtwrjljuVZzv5w4U2djz1oamEMbsP5MldShgqeRqDpdEXRCW5vDZIQzhVEwmImdrHHt9YPNya6i6O+9NlkLdSY9HT4Ge3FkK4cyJNYR8QZ+9kKJVPQKOOlnqvFXjLT53sOtNoVgT904JUhmGJ9xtK1ICsfOK0z4S+sR8Zf1Dm2SFfYbbWm+erkEIfeHp5sCj7sTNvOF23vDqsOU0dcxTokzBO1UY10bSmh5sKO7iOKiiEhB0QVLa9ddt5UPbPVkD+6m3fmtT9NYnLGjbfdt4hgx7LYchxst2QpNJQQTv9JCOGYqiXVjS/i3wuRpGHndHI2y7LMNC5xSWea++F9RRHY26oPkag7eeUiRW6hyJ586krmnl80q4sgHZWSHW8TDAbUc82lrQ3Vp51qph2zMzOgQLB2stKKiEoVgrti5QmyOK0wrU0M3ayVL9aR0YrApTuwSpLs2BN2nmjWHPs27PWDuqBmLwNG0X6PrCPAo6BWZNMAWmLpGCKfHHWKnJGqRLteKB0ntKs2KIZ+tQ0u61NYgPAtUzMl/BRQpf9rcP42E8jIfxMB7Gw3gYD+OLxtcXgRJDl/o+ewSubLrM1WCaS3OJ1l07ZqILswHmvSJs48zj/kS3ydbvJsv991fjbszZqmfCbBHl0ivstWu599ImeBcFHVxzJUbTq9oYX2PJPasSDwIfycy7Fvl5dBmDRXddoj6eedbv2YSZjbvwuzQZ1NqZd6x9RWcrTyldMC2ZaFBpKbKIg7UKm0aebymFdLIoNx2L5ZKvzsMhtagnW/WIVJhLIAVDE87lBiwF6Dl9WdNDmoyov8Cw4mXCWwh9WZ7bOCcTcZsi/cn4U83bF4+y80YYnynbJ8ZTG5WFrG38n7CmFbSVwlrLjkfDCd1UtDMZiXkXTNxuDEt6YnwUyI8qoS/oMdqzn8+QmxZ1RE9pyiqFsEL8anIBCnEslH5gvhLyI0M8a8Ii6YNwemwtGrJr3wRP8ZRBuNhMfOTihiSVQ+55p79kHiwK007JF5FyY8hW11u6beytDLyMlj6M89ryxZoF61KheW8dK56aMeHRvAtUscgvpeoSBoU+Foam++LPsxsyb2wPhm569N2It61nXmi6SO+zd+5V+QZ/FvEssmuoRVznqAYwHpPzNGZDi6tXKJqulizIcCPutnY4ba+Hye/b9Y/aPjlHmrSsAph2PWJcsjZi9FSQX98khl62eQVLv3ayCnYWXKhWlhSqtLTNVJeo30jfVv4u5+hcbDnwCjUTx4KOieOcmKdkHCSfuzgZcZxajTsFyJxJh0K/j4ZOxJW0a+l8S/2lk/cgPL9fL7gQWIsiurCgfa1Fj5XY271/EZG2ARwayBoJKEOwtXXbrfSIZhc1BeatFQ5E5/9RPU15ZjNrMr2/x92RosKLccc7t5ccDoNVFk4RZiFM1gjZGq9naolWGKHrvKmjMzUJ0dnvLS2cd6AXmctu5O3jFa9ut8zXA+HoHNB6Jk3AiuJIkNWUzKyCpJ5GXT6+rLbSnr2avFy2KsqQK/Eo5BropC40FMuMKKnL1BAorm0msGRrOaObOIxJ2Qhla826GcM9Yc+F1+uXWZMVC+TLShoy+1NPuenobsLStHsOYeFTNQHK1OQLfD7w9aNJvCLTuJJTSEuGRxtU7SmAcySsoepy1jLN1pQwxMyz4cDHNq/YhYkvTI84lcTk5zoKu83EKSijdks1asmRXAM5R0PZz5etn2kLQnnGP8V50SZYy4oUt/n7MuPr6kCpwOP+yMu45dRbKqNPZdF7KBIYYuZJfyR1hbFX6mRkzrlGOq9jjqlSN8U6pZ+nEwwhpahAXlMRLY239iNbjfJybW4wtAvWDLYCKXhJslXULNVkWLVBBvKlHaC195TQ7Jh63zF4o81D6RkkE8UhymRluJowjafRKoFWPouVfdYzaQec2Lc0V2zXryzVa81JOi9rWXr+eCVaS98UX3TAwhugWjrznOtUl/SJpU1aCq929hxadWFKhWns0DF4CTpWAedaSahxhvJV4cl2tObKsGiHqGujNN0uW9C2sNPeLiFsMzUl71vlKtkpLCXCTR+HYv3U0mlVz7X79AUCvklYD0r1A68RPqvCbOum9ta/T4M5/bot1D4y3gZCtpReHFnmTgNcDiNvDXc+vV7pkxQdrOInb6KlR89J8efrMbjxfM0oLxi/lzKfOxVlA9MThTdHttuZ7TBxNUzEYM9plyYrc6/CqTcy+HaY2aWJd4+XtnbBy/RZuB19Wi1y23PiTpuIrzGvwDs3Pudq4gQrW47+nnrGE4knm/PQyMG6GmlZnounUzxl24whAl0qTDlZpkqU0BUqEQncsxHrAdc2hx2yCw6/pH5wI9/K1FsvQ1Z+zbyWpoM7H6Ol1G0emtNXnRd59nBbz0xrUEjcz4TbgePTnjxFUl6vI2RMeLN6agHLqYex0O3LIq1RZnPGWwDS5k0F46SF++R+m9v7hQztHmpvn63iztNZ+ud8HsfFOJx9n6oFJ41bKnbQlh4kmzBuOtZ7n7tekKW+qwbePV3y+ZtH3N1tqKdoXLRZCKewaEghQhULQHb9zL4ra4pq8HRUFG8+LItjlS+swOCQO965vWR+taF7GYnH1XFaGyevw561zXeclXgMS9qzicK2NaupBYJun8WLIaIFtd0d7CdjhUvQNf2OSaDUGjh0ztttAph88TUB5MF5S1MgHs4qc5tj5zYJsWdbtmrSKTVweNXTXUf6a3s21kM1MD2zoCaNuqx3WxNCazbeqBka7bnFWH2tNHvrFx24vwfPzy8wsMJtWhDlohv52PYVH+5uuC0bXnhvwMOpt6bes5Bi5XI7oupaVrOpQRUvMqh+vp13M2CxR+s5rtJ+Vq1f7OvSBa8VDrw+vu4cqMvO0KbS2+YuVRhLskqL2VqePOmPVoIdvFpH4Fm/52l34HF35MVpx93Ycxx7StOuEKWIsttMhFDXhRPvRwZN/6lxf+Ks1NlbNtRVq0Ncr6Nu3Ik5rQ6EqFU8nU4doeV1OxflcsKQ9ontMN+7904KV2kkdZnSRXRSI8PBarTbwVFNUn+ImS4akTy5anuO0WQauqa3EqinQCx2wIhX+QCL8wR2KI5njhiwOhHtv35QLBJL7d+G0DSjoJDHxKdfPeH2vQvii870boptxP6u0u3N8JdNdG6GORFdLAuyZg6Uo0Dq7V3OG+0q9LfKzbghdWVxtmqPoWNhRWkAyILO1qA2jiupexmNrFqMvBimSugMObHo3ZSipVhlyPAi0T+zJsh5p+hQCdtMqcL0yAxo3tlBE7feuLiDi27iIo3s82ByBk1U7myIGlIwHXpyjnSTLMKRWnRx+tu12QMKUKsLw61qw+1v4wjlpuc4Rk59z92QeXRx4sOXt6RgelDJW39ohG0/k2vg+rhBpobGuc7RwaQz+mQ8rvY87kXbldXhbOutVhPaHO0e41ShVrq9kjfGDykDZ9V2a6Vd0xBr790KJBZDPpkzEsdKOq3o7N1xoOTAsJmJyfZ/CNUaKTc70JxSteclzRkFM7bLWhfj3x3tMArjWtUWnAQf5moactGeUzop5eDNqnN1Z491XuRsP56PGAnHme7OOxWc3/sE6VAXpW5xeRVNVtkWTxVJtv7TwXR98sb2RHeopvKtuM4Na7TfRS8h9wCjoVYHe7hzMKeRc+6Zr9dm/7QIp9Ix1URAyRoYc/OY3EkOKxKkCZIj05Kr7VvXvVvvWSnFnKcv7K+43W+odx0yrarS0TXChuuC1MDtoWN7NbLt5oVnmDcWTKVNWK79HmoazbF+53DF7c2W7mVkeCmkvZ0Rmto+bMiEBzBeYCGq5vwf12q32rmPO6/BznnGwnhnFgiLKv21cntw+RaXf5Ai1GNk3lr1rrbqNcXEZWdzQu9VkbtYKBXCKRIPsrRsWYOr9Xmc/5v3HeEu0l0Lm/fU9qdXq82XxjNLh7o0MV/OpqJIKYaknQypnnIgxLpwo6h+rS1+WfZWs1VnjoqjwjIF5hLpY+FpOtBJ5r35gnePl7w6bJmOHeFkwfFh7JbuJUEMkACsSXkJhqZ7E+0wicvfmC0KLoPTnPglkGhn5fs5919ifN1TeFW9vNsXwZRNECt72XcMyiF35GzkL8mCzoGxJu7ysJQrlhrMeSpWOUCqSHCj6SRrWDf8gkTNLJUypm90JgiI6wBNcdnkZYi2eRv5zDd8PIGezDEonUGZpODGDeom0aUj+2y9jZ6mA0WDCbcJSKwQIiGqcQvreng1Fe46RbIaKTw7ide867B49xZtBWJv0HBw71oX8aGzRVpgnhOjR+yN6LtEKKGl9Oy150J7wbVEFpG2DFx33I6XiFdE1sEOfqr1R4vHsqTvrB2AVwiqpVkbWbN6ytIeGPcQCKj0t8rLw9aiU58jQ8JkLTFWP2yPhgp1t+KH2Zng5mJM/bMctQuz9zvMlXiyHkkyTshpYsiV4a2nHEowlLKvbC8mjkDeRTOePaBK9rLjRpLc54EX046X445p7Ey5HlAJHu0r6VQJ73WUXWS3t7LndLDWE5J9LXhqbWkzUBthGTOcrvGyfc/SwhCYHwn18UzfZ55sj/QhE/C2R7EQYqF0SgqV62nL7d2W0BTWsy4q6Md9x3zlTrcXHsC6LprMBeG+43suEyIuRZAOhbTzdJMK3d7vP5vRMpFVT7u7U9gOsKaC3NZzGCvxaPt2ypHTzQDZ1KrBonpbeC4P0ciutOtllaFwwm8ztOk2LIUBGqAOhv6EGfTg6/VMCV+KqYzXFK2PpetgLWTzdqgsGnN+Ma3H3TjR3Qn7U0JPcRWunZ0E7kNVF6fPSPTFENBtYr60dJUUNZHMyec1e5l4qV7yHtaD0ANI68+m3q/N9lVbqyHLktKS4reQBZ0i1+OWGKwbQZS6ygs0dPAMZZBq+k/Dtamx3zuwfIRZOR16Pnf7mOvbLeWmN62zce2XGF0aoL8pNj/Pe05B0UdemdUQ9CUg9P1fz9ZkFuq+4+34iPBuz/BSGF6qtUOaLJWIOJqIrRVpdIgYgGop0oOsFcsVK15SVvStOZAalkNZ1PbCcKu8mhLz0sPI7eshcuoGJFbi0ZwYK8ixZxFcwLPRLoI7xeEUSHuTbbG5XIPHeq5X1fTZpkDYQ9oHur21ghpuCn0QNESmx9YGK57KgsoZgCDL3MpcvVDB5CRqV8DpJYSzYOjMAW9Zk2Vvt31RFJmsX+wh99yVgUPt+a79G7xzd8ntfoPuE93eqncPr7achmLEf1ELnKtRXOrs7WxOYZFyiEcPwiYPkvPqKN1z5u4hxd7j88uMrzsCBXA69nTVvPxahXFOKDBNVsZ/2nWGKp3Wio1H6cQuTnz6+Izr44ZptgSxVouG6hkf/p72Thu6Lp5wnso7+wI8729RWx1MaTVMFvWcl/rHWZExoJ1SBz/MowsWVs91i/J8ugDgIhnydjsPhka09IC0CiDWBeWLrDmBKVSKep+rYC1vSoOqt94I82QNJPHPFxFE6xp1qkVUpQhjjkxTXBsVO7Rdu7BEN/AacucbVqM4KmDRYN6ADpUSDdJO+0B/Z+KT8ejl0wJSm8aUCaeWL4Jj/J+m09W4PwrdvnI4DPTDbGmwJrDaW2udNZIwDZJ4smiyO1TiuDpINjdnhrsUwlyXA5KKVUvZIlourQmuAkiqbPuZUgLj1rXKBoUaDB7v7bDZzz2fPTzh7cMl1/st5bZjc1jTLd2dyWF0NzPD8468E2ttcVdJh0LtzzXLHHJuqZ9SaO1BziuFpqvA/hshf9OJ7cXEs4sDzzYHADbRuIVNmDAEJSelqPDu3QX5pqc/uQNxsjRLdxuIN5HxjW4JTOQUCeO6Ru7vsRbB2fer4r8iUzaneoZ6AknqKvGVdDsTsrWyaVU/bU/GyYX2AsYnyhVyIZ7Koqx83A+k9zrk/8/en4Xs1m35fdhvzDlX8zzP2+zm6853Wp1SNZJKkqXIMkLECBfOhSPHBBKDIeA0oDsTCCGWcx8Q5Ca+FYZg0oATX8QXJkFBWAkiRrZkNVVW1ak6dfqv3c3bPs1aaza5GGPO9bxfnTqnRMyJE74Fm/3tb+/3adaac8wx/uM//v9opuNjajYnJUlTjF6Tc1pSo9C9CQ/O6z0tThPXmljV+10LDNUZskxMCiWhyeAUYYmaNGb0YMh59QrMWZMZW3+IQ44T3X1RM+PFUC/75Rd93tUdoQASO016bI26KREenVpTiCJRK0KY27pRvZ2sYoGAnzudUkq52Sa5pbDYdKkr6/du90b0v4nCzWnTVMKdFOap05hVViSbXHTKz+t0miJqCZmjPsv6+dD35rbjDRdw19HfaVLQVOmnWnQU+puJfAqMrwOHvufhRb8Wdw19sThSp98M6QxHyLeeOI1sXjuGt0UFRx/Uuil7IZtEiaKtFSFHC+uiPK5wtP23aIKSva6N8yQu994SyrQiWaXQPSbS5Dmmjjx7FdFP2uko3lM6hz+YX+BSNJGN67qoRUkzu581eeoeNcH3hgBTQCq30KQ8/EkTnLAXugfYvMmMbyNhH8FkYg57bx0a3Sipc01HsXUjoqLK/V3hdBNYQHlkEcSxeu+Z4DBWbPn2+XOLGRITbhGWJRCz482y42be8sO759zdbSmPYW01PhROd4E8VuqLFlIlV3qLte/svGluGudJWz1rQPdGsrhV90qyffdz0KhfuA7UJ4cr8tsefwQQTlNHXDSLXI4dy7Hj7bglL45u0uCHL1yHI0vx2u5bAsviybNXrZaqW+LAu0LnIze+tB7wOa/HGeok7fcaEHWDpN4ho8eLkEYVZvSGQBVBN6MtIH8U0q6SJmXtl6askvOobEBvHnFL8cTsVW4+KbqWi0Gu54mcHZoUbU/E7FqQCi4TQ2YJavuQOyEO1f9LVsjZTBDlNOtCSNkOHCOQ5jXhrPY0aVj1krK37l5ekye/KMKUvfa+uwfBxUB/K/QPGlBczKq6fHfC3+xVa8k7spGGZVIOTs5uJQfXanXJhoKtqFFJmhzGx04tNkysrXghWfvUn2g8mQarT5YgRzUqrj9TzUHBqsElIYsHa+NJ1paJahoJLJHhbWS4UY++aQgcdqY15FbCdG0r5l6r5Df7LXfHkfvHDWnfEe493aM+37gRxptMf7cQ3u7ZfbJh2QqbN4nh7YQ7RnK/1cTPjIydHaQlRuS0ejxVcjWoDQcF8kPHfvIcHgc+9s8YxpmvPbvDu8z9NHKYO7VwyMLjaWB/t8E/aNszmKilW4r669065imw2cxamFjbrXIbyllRAbR2k5+37QBzUV3t3WlDt9fWy7Jxa+Kb1mxsNRy197BqtdRgvkQNtjG3QqPsdQ26GXCe5RkUn7UFVdW5Kx+jfs4aJJfIH7C8WDTuhKMhQXZw94+ltf8lGrImpVXmLqHt39PcyOSSUkueNChnkyMwVIhMKVmTkzNeW+NszqrjVJaFMk1IydB1SBwoPeTgcHMkHBdK50kb9YDzcfVuA7SSrvc5daolVtH3KemeM4/CpnPlaIdPdQAAdLihOjuItt1icSQj+FaFbi04TVTRWTJg1kgS12dZE0s/Jfqbntk0wrpHtXzpHqHfWzE0qfiif5yQXBjfjqSN42E/khbPMK/cR54UY1lV+CdHf2dt5F7YvCps3mb626hK8aeo93HXqTm4hdUcHA7jySxZi4GTLnwlbRecSUGkXprZcQk6nCFJk4/qguCnDCfP58dL5Gj8qxb7ldTf7bUVK0lbhrKpsZ1WPGhcw3TRLO55mvfbOfpanKhkSBLSsRZxMNwk+ptJzdODo+8c3d7rEMJkWoedw0VD9I0r5ObEcLNQXMfpXc9pY8KutjWl8g8FNEPU2FE7HopGFV2XSZGubJSejw7P+OThktvbHRjJfbg1pHCvSvxpXkVkcYVcwPtC8pmCb9zFJ21c67Q8adU5h3pROJiLxrCz5O5nXb9wBOonb54xvPFKAgT2syOL00A3adJxt99QZt/gfGbH7+7f4/XpgmPsmKeOdAiw1ORj1UM5LWreixSza/lCtWxBdyWVlXaApx7mC2eVg0r5z1dWmabVnkSVjHXRpkLzyKsGogDzlVdF9eLo3cRSPA/LyMOiXI0qVOZ9pnFajXzatIlcUWVicY2zpIbLkanvmlZOGoVl4/A7fZz+5E3LybgGswbt7JVwWSeW6nulAZaN4Ced6kmDkDaYynFphMR6r8KkG2DzWu9bOK52LUW0veIe1a6CoTP/J/v8URo9q6GCbYGviFhrB1WNpkUtHMqQiVvjQ0xCGh1+8saNsZd1rP6CwYG1VYq4tfKvH8FaIk84MU602t/vQRx93zF82Jnnkye9YyRpc/xODpuSs48cC3f3G9XZ2ntF5e6E/lZh5PlCGG4j4e0e3t6x/fwZ86VnfDUT3uyRecFfD0gpJLH1GJS8WaoRcm1JVWSsFC4+jfSPnuPLwHythPL4POJ3qtUFEM0KQ0Sf1+nYw12n0PhB+Wb9fcIfE/2D0N86joeOcaMEdCqnsNDMsFPn6Dg7pHtRdNCEMdWaprRqvvKQ3KL6ZasOEK16z8Fa4zZQIEXXhMSkopKp+q4V/KOne9ADowS1wkm9x4eMC1kVkUWeDCxUrpaiQGt4qK2mOuWUeyXnu0V/vn/QdeRnRT5rC7l5vYU1m6z8PIlJ90JNnIomUiVGxDvwHZVoK4Z6NfTDa7Av86KecTnDMKzIbsrMLzdMz9Rk20+F/i6u+6cUZJopx5Mqnwev39nEGOuzUH0efd825WWfR1JNGO0eJcCr0GFw2VwIak9XEdls6yQ3wUfw95XrVnQyuK4X4yW6KbF5BZK0daV+eboewzHhZkWMZUnIcUKcY7hTztP9mw3Fa7I13BWGG5tutNa8digy/pQYbxzxoGth+yYxvFkI9yfkMCExqSZR60I4shPECXiNETpNd5ZsJkV5arKwbMX8TrUo1U3AGV+pTtzB7WmDf3SEAy1+SdH71T3AcKuxa9k50kb9Cs9J2MU7LTjOgILzaTOXtOgpsQZcTaIqTy6cMsPNpEKv84KI0Dmh2/eGdGXoPHkQ8iIgjiJB0cyYCQ8To0B375mfrZO2qSZJyeJ5kXbO5oDe16DghE5hqnPIEJQf+3a/5e52C48d4cGSpzeF7euEnwrd3rVkrepaZYGui0rt6b2KSVeie2vZ0wYbpJLXS0FQgU0JXvdqQ9N/GtS+Xr/QBEoKLKfA7lFJ2C5p0lTsL2XWRZmMZyFV2M1O2GPs+OTmivL5QLB/2wh1VjUd9qNNK8hT5r09vEaYllq9l5YYpNFsRgZtN8WdcHph6EInzSAz9W71sgNz+BbSGHBjpz52Oz1UALx9/sVE28rskKhcHTEYRoo+TFWcFSgO6RJT8sTkCabQrj6IWcd3u95k6tWaALzxYTr85JGUlVhuGX4ahGe7Iy83Bz5xhTezJ24dLgh+Fuak3I806K/OgSxrFVp97fxJk5fcCceXjseXGuhr0Lv4OCOzupgX79s9qwGnJXCsz6VVa04DTOVvlc4Rd17VY10hXM0s86D3J2vSBwpHZ1O/LRuYstDfO6T4FnRrMlYFHyUESqdJkao4rxWJlp0FNh1l1G1SEYoYvZFyDS2YRQme6aw6vOtwSeFyf9LkZLDkxC2e/m5GHo+UaaLbRxDwhxk5TRCTcmkABq8Th8HrxJhNdH3Rpb04lXZ4/Krj8ZsZns88e7bn3d2ejHAwvztVFC/4kLWFZy0jihHHj5lw0vdwS6F/KMjeIy9hO8wcdtHappWDtlp2qPBk0WTIEgqdUNLEdLnsOD33ZqRt+7AU3GFWhDRtNBgmbH2VNnwg1XvQCZLUnHa5EOgz/o1Wy/4E+UFFVtOlwAZ8p96TqcfEHSGObn3Gdi+LWfmlAaYXRaVLagtvEUM+V6Q4DTpOr4eY2NSXCUeaG0C1sNFWQK1oz36HVgXnALJJ9kfXAn0afSssSi7G20sU50jbTg1hB4eftZ0SDsovirugCer9tFbRjbOVKd1ZwVZqHJM2QVanwpwpX7e4WfOyPvNsPBIkM4aFu2lk70Ydxw/ZvOSE5dIrQm52Wmkwf7ymGFkacu9OkeE2qwflopO8/X2iv511b8yGHpeih/12pNsnxrcwvVK5me5eE8juftICslMldEdQ+ZcjjG8twe+F7j4RHiZkf1LFdvua7rjgOw+oPVb2agklwZGA6coTt2I8vPV8yx6mFzbpt1eJFpXcyM0dQgraOt1EpiU08+xzvmnxqtje3c0g0F2pf2yNmRXJyRs1ItZuhD5DnRA967bEtfhtlA60PSyx4O9Pa9IAuNOiZP+yrpk6HZ284JwoV+90QpZIlwq7T3umlyvy1roL2FI33mPuNMF0i8efVETV2ZrMncb4h9PA/cOGsg8GoqyG3P29tsf9sWvtzBxgLmqqDSrOnceoRu0nQYoQoxBPmpy3CcVp9bprHLekrXnNIUrrVvxh18+e0fvy+vL68vry+vL68vry+vL68voD1y+2hZehJB39Dmbf0Mic2ak1g40jSp9ADNruCjF77o4j093IeOto2hvFSHWGRE0nz9yHte95flklUT2UmoCW/V3c6Eh6GpWwl3v9M2jFSRbiVnlR8zWkwdoSoZi9iCcMHSVmlgth6zLPB5VemHPglNQ5muiQWd+jFCFvCilWHShrXTjBhcgcFYFKdaqwiBKwS9XgUPg9dYKYuXE4eOsvO+1XzwuyqHjptlt41h+Yt579Rc8y9hRXrC1mjymg1cCsWX+dDPNTUmE4q6AO7/fEnd73cIL+tjDeZtwxagtsXsCLehpW9Edg0y3cH8aVrO60RZW9tnHEibWYCmnwnJ452Mz0ITJeRd4mIaahEdndolWFWqGY1pI4To8rfB5OCueGsVeUB9R/adex7Ly2MZbaMnJwuYXtSN6NLNcDqVOEqwg6IuvXdqtWe9LIyZLA751ySCYxn7RCt09aUZbOqj674ZXvErPet5hwi/G0YjCtHww1cRCCtklSnfqDZRe4/WVP/nMP/MrLt4w+4kQn7D67v2TTL/RBSeQJbQUDlMl6LXKGntXK03SxKtej84kwLsRtrwiv7R11X/fqS2i2DGnU1oLkM8uJjVNj7qyI0Mm8H0GR4OnSs1yq6bX1+4ijIwxVJfIslAye+RrckJCsaxipfEahjt+GkDltMvngST24AeJiUiVmq1L6TNzo/slHYXmRVGvmoFyccNJ74qeK5KxtgCKKWE9XjjRAvOzxD71ZpEAeOtwStaIV4x+a4bJkQzy6juVSGC8mptCxXAZ40DgVd54ydEjf4dKgz8V79SELiqT4Y6K7n5XDuQ1MzzuzZim4ZaOIz2OgVNFKZyKyrv5SZGS+1PUfN5CHrIhBrHFGybrN5sQXnvVHrroTAB/7a24etkyzx3VqwzVfi/JtBkPWO3QNbJQED1b5NxNxs/Ux0ro3TbaGDpggsC6ArO15Q079CchC/5jp72bcw0nj+3ZATovuMUNCZUn4TWDZBW3915H21rbRFr6/B1k6RbGCWpTEjdfn/UxYLjCUsqwinRthvi7kvpB7lVPQbojHOyOyL+rR50LhNHf6XU8qwFuc/rcUm8i9P1G80B16JOkartYuuVcT9zSotlM+6j52ySY4J+VaORvEUL6n3T+7F8qxNJmLXOUUlGpAod0b9wVpGYkZmRQ5liXSP+xwU6cdAFnRtPZToghZ3NaWrvJnKeC9wwVH6ZVacJqVWC9RuXb+ZJZPJvZc0fGSlFdXz49TFrwUvMkbHEdP2jqz2lHaRxGbIvbqXalnUkVmrfPhQJagf56fShF98fojJ1Ai4oG/D3xUSvmrIvIC+A+AbwE/AP71UsrNz3yNggphpUpws2Qpr0RqHEow9qUJupGE77x9l5uPr5HF+E7RJkOMANradFG0BRilyRa4VHUeaO9ZSXX6P+0QH2C5MoHLbDyMzUoWzD0sO8e8E+brTN7oBE/uzDxz60iDh9yxXMLVcOJFt2fjFz5aNjwuwyoLU4maQBmSHt5B23Eq4aD/cI6q8F09yfRZqCCmti4rWVM/39oawaaE2gPUCToKj8ugo8c+M9vBWRd99cDDNvBwl83PSzkIqmcCLjj80jG+KmxfJfqHRQODd9oqS0k5H8E1vgAoP2ITFmJ0qrhc23deW3jnkxI1wM/XQr9d6Mxw2ofMMmTSRvTX3oL0KOQxQZ+VNz9q+yttNJkpTujGHj/0yr0YeuJWlc1rIMxjh5tUyoDgkay8CTV1NZJoUXG+sNhEC+uUUrCN7ienpquFdZJpyrgl4We/EnrrvrCrVIFFEVRPKeFSIHbGFXACfWfmsZXTY0MEBaZPt/zup1uM8oGbhDwWlnePDMNCMM/JnF3bW5UV3oRmW9AUvBHzl+jJRQghM3sNNJpoYx52ThW8naOMgWWjz0O1vwS6wLLV6TAfrV3emznzVUfu9TnnUYNA5c0k01iTROOrEQLLNhB3xp1p7dPVg0yiSpykyr8wLoaS1GVtBwYPToUF9e8d7uTo3zrCSf/9sjWOG5WfpL591fu4OG2hxy3MV4HublCT117IY8BNQSUI0N6MJlHos/Secrllvi6MpvGWRiUBp1GYjp7t5RY/rYFc+p647UmjJv7zdeDxq70mPnavhvuMn4XlSv0iw6Qk89IHTSLm2KZZCY44CNPzM7X1GguM9tCm+upazZCLEn4zwkU3sR1nYvT0feS4CyyTJsLFm3jjWLlxJlZsyZPMllDGbOrpNN2zNDri0uGCw82dEtCnhFs0thS3DlR4IEwZV1vhQBm1JywW9CvX0bmEjH6VTDknDDuNv2VelJo2JZwX8qYjDWr0nUYt1vxRD/gaw+JWWK4TJej3jSPA2up1lsDMF45xMzNPQb0l632uxP3FBEcPJ0QEf9whuZC3mTg50l4LEy/Ktypjsmdfp0mt+JkSMmvcEfuFSPNFLJtOeXvJYlJNqqSuTy2k/FGJ83USr5G/bQAg97KexZVnJK0D2HhI2dwWshcVp07eQAiHjAkvhZPFpBJqflDpCusz8jMU0Uleip59RAUXBHCu4PtMGs1QOjvjMa5FTzi5dtZItqSRYIWv6bnxs69/FgTqfwr8NnBlf/7rwN8upfwNEfnr9ud/+2e+QqGREqvRLAJNAdgp8kFUTY84Fh3VdUWf5eVCjsKy1FF1WUm0WV9TFsdyCriTMxJ5VQ0+S1icBn9JmMBh7fOqynR0DrKpu3Za6eeg+k3LVjSLvoj43UI6BPKkhrPLzhEulTRdhXlfzRd4KexjTy7CaTZhuGgEZPveJZTGs0CUf6HxRYxGIaahqCszRYfPq4WEWrHYjyfdOG5O2tevJM2lsF/6ZnBbr6q4DCvpPhxEx7Ln3HhiT66cCYfMiN7j1DnSZUfcOjavZoLYQT8E4ujapspD5hg74qmjqwLXFRkUS3ajjZWapUfcwNXuxPWg1e5+7FmGTpXNN8Ky0yoqboBQs6+Vt6GbWQ9ignFfgidve+ZLz7KzEeHoCL0DAnKKuMMJZkGGVbeMoryQvLiWxOu9PT/IdSKmBNoG1SmZrJV0QQOB9+D903FeQ0Vy53BHRf3CUXWhWoALSvYvHXCyx+FhfFsIB9UmW64gjsU4YYkUPaeiticiRSdfs3K36unYpkAtiFbH9HBwzIsu6C4knfWwxDcNioCWzquh9hJIm045QXXIode/n66F0wtp+3C40feqU0Z1zzQLpmU9SJsNhHPQBS1WrhPelzUJd9VgWxPHZLIe3oQwVVjVPnu1ETE0uvQFfCGJHkan3inK2JVmWFuOgeGz0CpXFz1+LpZAKVodN0Lard+/mCCvlKCHj1f9J4lJEzhRI/I0FttWDgkrqXu5gHTV4w6jShIkTexLcHZAK/o1X+u96/aaPIVjac/URSPp9/pscOCL6h3lIKSNIlbLVSENRYm95azIMz5L/X/hCDJ5Xp0u6FxiG2Yuu4nn2yNL8nQ+MV90LEmRyRYyOh1WCVuPmz3epmBbMXHWEWiHpiUB7rQoQltlIbxT3p2AP6moag7gJutqVCsO7/U5n1+GsqyWKzZAYlZE9bPUzyY1TC0DYVDeaE3I1ZVBY03xRp7eRfu+3pD1Kr2ihW90nmUrvNgd+PikD67yzorxBv2iNkBV3b9KmRRTLU+9oYJOeXuqu2TnXRXhjbl1eLRgcBofa9cn5qdJ1dhrkh1cI2fn3vb9/YKbI7lXm7MvTqf5U8EfzSanzhPYvmzJUDSOlNfzbtlZEmXyOKUUpiW0CeFzvnKN4VTRUDvbVdUe4uiR2THPwR6fqD4k6H3vNKGtThrLhbR4UBPW5oBB0KEY06f6WdcfKYESka8B/23gfwX8z+x//2vAX7H//veBv8PPSaDaNJxgxFCQ7NbyBlBtJzEESj+86xMXw4Q8K+xPPYdDsCkYmtedr0nSrAeMN9VbFesrphxs2h5PWncrOlICYDdbscWivnVON6cYwTr34MbIOC4ck1B8MF0irZjEMuKLMOGlcEwdcwqt8pczz7GcVMKhDJnU+yYq6SKU6Dgde/X6affH7tHJW2tIRdD6h9J82fwpa8CZEnKadCwzZ9wM96eBVFSLKUa/kuvt+dTktkMUSq6SAhW9y1axWcJ1fOFYvumbYq6fYHytwUhASdq9BoDcA2PmuHSUo28HH9h6KKxj71MCr17YxcOzzZFdNxGLZ9MvHPpE6b1C5YNoMWRWFkSnaFHdIC0BlEZoVq0ux2IJmJ8K6WT2D04UNZgcZTcyvTtyei7MlxAvC+M4s7hAGnrVWsm6doq1NyTrc6mojkoDqEaQJEXzKskeEdwp4nY2cmBK43q/M0yJsO/0vsekB1IlFZtPmVZUwvEd4fiNhf7ZxPPLA5fDhJfM7WnDzcOWkoUYdb+lxZnXnZhtS7VtsBFz08Vyk44670+Bh+PIdOpwhsSdTz2WINZuD8RdIG6sMp7X9mPc6iGtCabokEaGIkEtcwZ9fm62dd32bgFrfdRiIPWC30ZyPJfDsNc25eq0OFIWhqOS+MOhtPVQ934x53W997oPv/q1twBMMfB4HJgOHa7LZInEC4cUR5qEZWsHXlZ/xHiRmS8cy0Vo6vp6Xxxtw5zHw6os3nnSJhNM4DQPkA86WZRGJS+HISDTGq6rK0IcBJcKu49puloA86UKa4ZDNlQ86IHem+CmieViSIOUQtpA3iTcEpAolsha8nksqzwADv/oeJgGdv3M6BdFoiyOx+y0wCjahiyepjmnch9Gtq+JXWfeo05tmfxikiSTqa1X2YMzeZN6KdKioqouQPcYtbUUU/te+saGdjmnWm9S2lkgpbSprNoqJNuUZE3EsiZS/rEjjJ7u0bFcSNMUXKcx9baqfYx2NfQlhZrluFmLZe8y6RTWOFgT1KkQjqUNcxTvbJrTit12FljS3Wt89icjkJ81Hn7aGL6UAtHuRzJVftMnk5RhCIRTJpW1E+IOMzJFZNAYJJP5M04T7Pf0t8/oH4JSQaLNAdUJPJt+ayBJX8Dp2o4bFVhWOk/idOgpJw9R8AcTB33UboifTJqjaAertikphXBSWkxcgh7v0SnR/WjDS1HjWaXc6FkhYALZ5/6NudfY4VPRhO1nXH9UBOp/A/wvgMuz//d+KeUTfUblExF576f9oIj8NeCvAfS75+qvVTgTAjurDgq6sAV8SMShqC5bET56c81yOyJRGN46unvlJri5PNUombUV5k/aH+33eYUzZ4P5oyUEX0BW1iq3gCtIn/EhkyRocmUTRZpMlSaoeBo7G1nVXyrtX9j4hYxqPZ1SYL/0dlNov+fK1whZOSO1d+xQjStYTZPPbpXYhFf3aHo9D8mmC5zCtlGndc41dlxUJfK3kxowxsXTzWdKt4vCouEEUdbpROX15BZs6pUGYXqhVYVbaKJs4bBQp5x0pBxc0s3SbRZS1gm69bvUX4VzT6JS0OqogJdMcFpReZfxXdI2Xq9cG7cIOehzw/RHqtq3s8SgjtUXr8ldHrz15PXhR5vAdEshjYHcXTK9M7B/z6uh8EWhPFu42p64SVuSVzjaRWnJU5ssm8FVPZa5Kj5nuy/QPONEA3qFjEmJUkxh2A4MNyWCQBVILd6t7VurOtMgnH7lxL/xZ/4+73TqwffJfM3vPbzHce7Up8rDMgf1jVqctaUsabb/LkEovWtKvS6pVx2Tiq/mJIS0om1p1Gq0yUF0Kj4bd7Q2o+4tfSbdvR7I2WyI3CicnD6TZacHhLPvdW4To9/TlIHFrDq6yHQY1wIgWQU+6wGM7ZvqPhCm0uLEuf2PBtfatijMyfP2bkd+O+BO2qpFwCdMFVuTsTWhUPSrbLJyJHe+6ZXlTsVenaGLxUQ1z6fQ8hCQXVRrqsXr/3aQxuqzaahpvcfBt/Z0OFnSaDyc40vP6aWY9AJAoAShN26HHhBOX9PuGaahBkAoRrGwGHrSRLZ/NIXzpSbAqk23JM/NtOW6P1GKEFzmenNingNTFDJec8di+kjD2WeYlfsiVkhIUrmCcNBpYkQLUtAW3rnnpzO9Md1r2TTqaIgNXu9ZaznV+3d+2T4swSHR66lfpzOX2BJcbS/GhkqFU6LbB7pHaZO4de9rsVDAa6JQrXXscGtofxqF3iVte9ePo3kdbjFkZcmcWyT5hdataYg4mGr+uWAk654ofzB2t3tS/zs4E2hNTf/OLYUcVpSudgTOhZlxTu9P0gnDik62946o7En9HPW4d8VkHrTlScG+m6jTQdbJuRrfyLTiWr9TMaHQbFIVKmzqFqdc4iw6ETy55udZjcsl25kb6r61wrsm+8b30+Sbn5qAnl8/N4ESkb8KfF5K+Qci8ld+3r//4lVK+ZvA3wTYvfP1Uj3l6gOhQnysSJD+oFCCnUpZWI4d/sHrQX1vInfNP6suHFXDLrhmZusmk6MvZwKa1j5s46d1xLOABOvzCriQcb6qfhccmjxp4uPIRU106XVcf/Xb0i9yt4w8zCMZYYqBw9wxn4IG5YMeJMX0U1a9GjTw2+9FxBICuzl2cPq9W9Gn+6Sj8BmKC2vFVpWrAZyaIWdLXHJVa80aBKp7exVbK25ttXxxEdXv52JheKsWIsPtoqaokykMW0sq9U7JkakQtzCMC4epQxZpQaA6fj9Riq1kzs6E2ez7X3QTF/3MfTeySGlLpgmzJTHvLCNSLtqGcjGTZ0eVSChdMP6THvJS+TyDtpTyoOJxaRDCVEiPStSfF8dh6pn2Pd3JNMjkCwhU0QO7eDHfJWuxlmKtLk/pu1VosVgiv0QN1iGsqvaghPIqnuV94y8AK5G7hzBEfnB4yf/z4Y+zn3rubrfIm16f44cndttJfaKK6HivtWtVasGSzcVQrU7wh0WRW+MUlWzJX6G12PxIs8soTsASqCYk6PXgzNtOkdukAweToU1tWfeoonBFDudV6LbaGxVLHgjKE+r7yFR9v2q7vgpfntR4tvRl5aBZxapE3tTWdUWESUIBXr+5xH8yMByENKqkgZuliRW6+fz9VjFVGRK579Tc1Q7UKm2gNLOkLVM5O4SA3Dtclznte8oxEM7oBnlQxEaJ3k4R++CVG7Q1HacEp436NS6XhTRkukeHm7UF6BfVSsteiDtH6gQ/uRb7wJ7hUknZtr8nFYUNR1VZr6r+OSg6tUTPEj0ipZmKV42xrkvMfVYtNNNLcxGTatGE309uTWqMU6NFrd3bSVEYf4wr6TydxbS6B2LWNreXZm1SworUrvY5pf2/mtBrm85e8wkPSiCcHY/WitShHLUw6R7s3i26dpWDgFrKmJ1J9V+scS17gUFbXUOIDX2q8gbNmilbTHBCa3PHonHzLEGqQcIdne6b5dyejDW2nNuV1CsrEsRkhtd9R9701AGS5r6wpBabniAywSOls6GWOrhknydaUUtphX8B7e7UP1cyfEWDDCVSZw55IuBaQRC36HOq6KSbs51ZGT854knbutiaC0dL0GboHvX+ki3O2yOrwEn9ffXmPGuB/iHXHwWB+svAf0dE/hVgBK5E5H8PfCYiXzH06SvA5z/3lQpmzGleb4E2Edf6pUUos2MpwQiugCtcXJ44Don4aqR7FCIG1duhW5ElP+nDCHu1APFT5dPY+5+by5oYXd1MFDSTDxnxRZMj+9yqi6K8gSJQZsdk2jr1FGsQYCfkoXDdnZhz4GEecKa/U5JbuTKJ9oCc1/shood5RRiKnK8+rIJZCXsuoRXYXjkCJUgLNnXz6X/rd4+LVzFNn9WxviaO1r6rwas76Ou21ppt7iI16RXCIXH1o8Lw8aOSNrtA3qn3nwBND2hQ7lbcFa66yM39tiGRwBOrlIbOYIdmXpOqXISYPbsw04fEwUw4awIkWZDZ4U9OOVzHvCZl9ZnXe+KdJXf6nrW1EAehaYXkQveQ8Cdte+RBmPaex3FEHgJ+sknKOp1nBOUixoFyCrv7ueCmDDFTetvg3tDFUmCacXOnn6vvWuWMQ6H2upkralVRLOr72vTU6w1/75NfZXyl04xbq0bnZ4VlccxLICenuixF2npSRPi8olVujJsVwlf0TEizp8xuNTE+85DUqlYRl2WnfKCqAaWkW8/0vBCvEv6gRVE46j0PJ0V0/IUp3D8ql6d/yHT7qEEyrIdh6bVVINjBYQhTDbR+Vp0yPysPwk9P//4Jn89aNFKXmStsdhOnDwvRF8bNrEhzFh4PI6efbOkeKsldieYuKt3Ah9z4KXUtp8HhzJBcl56olYohaWXoiBuPc1HFHu0AdanGRFP+7xwMPcU58tgTR03up2d6r/OgQoSSVPy0v7Uk14Aa1bFzLFtn3LT6/PVztniUzhLrip5OBX9ap3B9p/y/w0ntU7ou8TgPHM2G47h0HA8D5eibQXUJSryXBEvSPeZPjtB5m/Sy9W3ogouF7jHiDzPOLKFaLDMRTkk1NtXir1jrX3lyjTNzlky1QQTqQblOKT/hYqXU1oYKk3n9c7JBkGOm269cwbo+W7JkHEmsuNJJaUWhctJEF0Dm1f2h8lidGTw3RCxn3Bzt3FQydN1/yuctxvNbgQDsvtQ9o4r8FcVU66HmxFDthcAEJVUjSp0bdEqRbOh55YadTU6WaaK7m+ieBdLgmvK8Gw3NqTYp2Hlf43HWuKttPCtkkuAmaSbAzjok4agDNeeJtDOETuwM8SdBJo2N59Y22t5UEVyXCnNU+kClW1QOsT87lxvI83Oun5tAlVL+HeDf0ecgfwX4n5dS/gci8r8G/k3gb9jv/9HPe606kaTjiBlxvh0EFQ1xsZBPTnlQi8KCeXQsi45b15Hjzto09fXUuqOYRYz1TI+qXFsrRP1C/FRYrk4IVFKpsyoKYIb1588/rxRNjPpsZEJINsWSNpln3YF96pliYM4e7zJhiErkhJV/kQVx2SBTa8uYC3xypd2HtjFyza4L3T6vnI6cm9R+DTglmFdclcqfPdPitEo6+bZA1XerEJNYW6J6fZ1dFfiwxEiWjCsQX24osiWNSprcfKr+awRr56jpOMtlZghR/clKTdzODrazceJyRvyUAnP2rRX69rQlF2lTFsqnsH981v5pLbwlryRMUXi2OHWujzsTjCsCI/hNhXyFbq++WPFKRRvnK8gXsd2G3BV8XYe1v+4x42RFLdo48aKHRPGuSTpQyatLRE61wvPGjTrbNzkrGuldQ2Ia/8gm8XJPG73XaSAbpR4KZZuQIkx7PfA0uJ/JLliwVRFV16pdjERZq8k4GSxu5G4/Y+0LaRyQ3HudSNupdVCatW2TRke6yIzvHImLJ971OBvBXnarhIZO0ZVGolVz3vONB3kILBfQoaPI50KvfsqEya0oUXmKPtWioN5/7NHjDAEbEn/xqz+itwX1sIy8nbY4KWy6yEePPcUFLZKCkE4qClw6Q6uH2tY0gd1BkJ2imX7yeBNIdRUVcY64dQxDJJ661lqoBUUbCXfWaiqePCrHLA12/y2Kh72juxP6O/0+uYfuoK+hhHHaGHwaFW2qBUulOKTZtUES3T+KPoVTwh0jkjSBcgniopO8MXoeTgPzEvA+82Jz4OE4cDh5Si4qT+MgbTXhVr+3omtiDPiY1TnB0JJwzG3iqnhHHjrdA0UPYvHliRWVTomllojkba/TwKD37HS2b2qLBtbWXvCN49SuKq9ggx614KkJRF1nqaol273MXVEaRqd+qGkwZNbWcF50ujVeZmI2HmKL65Y8L7lN69Xv0fZkLXYtgYo2YSzLmnQ3BKWiWGVNYNq69+h3ynqPcE75stZedtsO70TPlNquE1kTrVqE5Awx4u6PdPsNadAiNI2ltSubLZPj7Aw5S6pNFLjaA2E8r3P0WZ8XjcPajMAr1SMWk4MwFPUkhEft0CjKDv2jnQPmy1onldfzorROiDRPvD/IXTy/3M/82599/Q3gXxaR3wP+Zfvzl9eX15fXl9eX15fXl9eX1//fX/9MQpqllL+DTttRSnkD/MY/6xv6aYXI9H+sLRrNBDULzV5hOLcALrA8KHfImXilM95O5W2IjW92e9WC6R/XKkZisept7a83nYtc+RuQN4Vhs5Cz4L2Slp3LPFYjxLTWKLI4pikwjos61Zv0QPPeE/j+/iU/vHtOKUIpoibI+45+klY56yi5kKI393drOZw0iy7O2klLJQyDRIUmq+mrLAp/FzN9bBYJsLZ+GtcAWJQH445OYXpDssIhtym+1rLzWrXl4IzDwMrB8cLpZceyc81Hqzso+RnvjBgsLJfWztklhhDJk6ebVgJmbem20eJaWRhaJBFOMfDgRjZhaQ7w2SbJaqti1VuqkxuZcIz4k/Ezqj+FQftpEOJFPnuuSnRvPBAJzJeOw7uO4weF5YOZP/6NzzkuHR/nZ8Tc6+h00rVaCY+VJ9SIy1NpU0+l19ZhCcplkaHX6s/0VFqr4vxXg9qtErR2dPHFWmSw7GD4+iPXuyOdy1z0k5J8s+f2OHLz5lLJlVIaTO7NlFjXGo1nUNuCcI56gvpZrdIZ4aitg9YucEoOjjsoF5G0OOKish/L1kGC09sRmRx+0TH9tNHpr8oV0QGMKryoBGxJKpPhMyo/0attR89apdb1o8/Z9vkiUD/nrFzIaizdLEG8ktZKB3QZFwpBMjfzllfHC24PGx72I9h6k73XZx3XWJAN0XCuNFmHKvSbBpUsaURjWMUj7fdl4xi7yP4h4M4mdMnKgYqj7qWGmAVnunP6HDafWtyY9R6kQdHSYjE0e5BiiOtWEcplq8MumE6XZNM0GlybLPXGi/Qn5f2IoahiLfcSHdkViI4TkJIjJeEYO5wN4lR0Radi6+fHxGGlxRdd05UsT4sFTafpfALPOei7tk4lZcLB9nhKFLdKJxTTMioVcahcMuO36OBGbkRrKmKT0hpHz3laWUf//SkSjkpyquPvRVCLHCmK8HfKjSXa+g6QRSgRyiax5MqNs5cX2tBFM731HlkiJedGP6nWLGRDu6PKzvhTFc400+ZzEnq9H36NdVIt05bU2oWlkyf/nozynyoaVxH8rPIBhADDQBm7xieqPOBKDdHpxKIc0a728nR4p8rAnE9qrjpYde/W/V2nBmkdhXq5lHXS9lFok7w1riVdxyp5ULlhrvFVm7lxPlt3s06Cy3+dvPAAqlGmttYybcSxMbr038lSSWQGuzrMI8gCUoY6jVBbWCupXDeuklptogiDPU2VtQor1qs4KH3mxeUebwc06Cjzk6sYVLoIafHkPumknsGUrZ3WZR6WUXWfbB/Oc8A9qnN8f6+f9fFb+pqlclEqqbpN90DpC2WbmzKrJgwaDONRuQRYa0A3qo501iSqOEHOwEa3jeTFU+Y1KdNAmQC/TjwU291tYmO9B1IKKTjmS9cOBr8UukeDPZ0GKTWO1OTSjwlXn7NdbXJqOQ9iaNKgb4WLMEfPQTpyEcaw6IFjff3GB1v0vtT75ycT6WtTPrXvpUExd1DGTEmWwC5GaHSwbB3TFczXohyi68TFsyO/evU5n54u+Xy4YBmCchCiqLCvcfaKq2ry9h0zOt3mpQWWlhzVMW4RC+q5EVYbF6rJFpzdt/oorD0TLwp/+Ws/5C9d/z65CA955Hf3H/Dd+3d42I8Uu/8lYyabdsALelia+XLlVaSgflUUmllpW/9G+g9T1oMxrZyN7JUk7Iak3emgXJ1lq3h5uNP1v1zqPc0bTRhKV5BZ/SGrF2MaVWXeWQGUM+RNx3LZEXcZ79bWeSXiJqettaYhY5+3Jej57JC0A1Py6k7gfOK79+/ww09ewkNHVTiXWXCT02SzcjNOFs+KJqXeZ5ZOEygHTW9GeYwZKY6ctKUlU92Mql9Wso5tVwJ8LS7iZVZSvgn0Su9ZLoMJdxbyFYxvNBlS7zmYXmSKA3/Um6C+gso3Wy70uXePdZintkNoLdkqZBlssCCcEm5eidxPlKat+IiLJ8+eHLI6Rpy61irOQ6b01oKOxhN0GsfD0eMP2jYuqORCtPaiP+rhLrP5tFWdpkpPqG21+t+laMJiSVIjlDuHlLNWjJOW+BSRp627tsHkScLQWlj1r6MKDJP1vrqN0lGyh81m5khP8sVa7EXvg2o06j3uk/pTWoLRtLbOr9Z+y8hpWTlSS6VdZLL3GluN59eSpyocXWONr+H8LJ7UZKrGyC60e3xONaiaZe3eGQ+zGBVTvCNeDORB1vZo1u/sRPdLLMY5GhNVqiibyXwJUBbAJC/qmqoivXEEv3H4g9P7Z8kkZzylUluWxQYrnoAyK2gjUf32ukNuUiOVEyhGnPdztvVeeSF/+PWLTaBqUAMlDsasn96d9UtrNmgLBXSBxZdme/AYkPtV+6aN3J5pOxUzPqw99RJso2QzCsw69Sec96dBhsS2W/Bias0Ic/INFdPD3j5fghQdKTklZPc6yqmVVUH6zLvjI/laNZcOi1ZljxeBNPakSvqzxSJu3T01iFE0qZMkptQurWJZ3dKrSJs0t/Qq3kdGScmlUBpBQBAHrjur1Orr1eqhusgXrPI7e4amrVIXbjgWxpuofftTwu8ntU4wHRbEuGWlEHzShNTQEzD+ivX72/vUYYI67bfAae4oRTgtgWnpSKlyHGxisI7WVzsaf/6Zn04cVaHI1AsyJq2kF9X+CGebKm308/ujEB48+4uR79y/x+vHHcvtqHYtdV1DG4nNQZoOkgoqFvwp4Pe23WolXVEIb8/LOE51zStBlYb4ifdnP0tblzhIfWHJnr93921+8PiCm8OGmzcX+Leq28SzhNvNZHEqmWGSG0pyVWQpHNxKrEyl8WRSL1SLpVZp1ymmegBUno4XI1QnSla0o7qlcxWJF6qblq4S3cVMnD08dusey+s6qAamJa32EHkIzFeesklMS1hjhpHVJdNEWYsAQaUmiquFmB0g3q18jqQPsSRVWnfGKC99pruYudidmJbA8XGAz4d2EyTb926pvnJgdCxaRUbjVpTnyNmBlDvlltna1H/jTA1dk7Q6uFG8WoKovpyHokKOyVwaii8cvqJoT9kkut1CHzKn25EyB+LGiozRXA52hVyJu1ZoNKPviuIan8TZBKucWW7hoHRKRMeVNh0rFFh0z16NEyKF2+jI6DBQCZl4USxJ1GRi2Qph5wj7YIcb5N41Zf200VjhTDxR9YuyHWxrUFK3A+M5DWq6XQQl5XtR8VxDEopNmKkBtMNtQgs5sqiceXGichlnsaOtG+eUn2m2LG7RZ1a1hPKYeediz32I3KYdaXIoKUrjXZ3U3uxm5vPi3LZyRUGexN2z6UCVObBkKZV1GKCGNkP8Gmm7JhPnEg5nKHcxYeE2lWrfW2zAqn33s+TqyTRj0C9UTPS4JjGVQ6druO4xEK9TmqXQkGQAXztPvd1IqbIC+jlddIRjWO+5FfH6QytQoFPyT3miDaGuhXpRHpSbsUJakzQ1iy+rxFJFqH/G9QtNoIpoq0E3HzTNpRVV1AXmVdOhEuzKDvxGRfOqs70GmWI/oyhJbcXV91rfd0UD2uXODgEj3w0bTZ4OS88pqvDlHM2Tp45UThWiF4hObWOgJYBqOQEihftlJBclm/c+sTiP3yTmayUTu0kaka2haQtWRdiEkyWdbqpJlH3es1Zh7jTYOO+aEGQjChY7dJNl3K6QJhXvbCP4rh46Kxl4lfO3/QIrodE2l4uZ8c3C8PqokxrUZyIQo463BqGMCpf3fWLJTpO4M6StTVmdV5fy9BnNk7I1g89Mp061jI6BMNVJDQ2K3jwGnenbSMr62Wxz64fUQJg76Edt2S5xIEXVF+o8uKzPIewtuUQ4jj3f9++Qb3v6t9UawIijZU1CK6k797VSF5ajp78LLbk/bwuUPpC2vQq3WQZYA9uTX1Xoz/ZKvXKA0hf+6ev3ufnsiv5TVcu+Ouh9iVshXgreF0KIzKD3zz7vk1/Qhj2Ks+CyURSBPqswZli/ayXPllbVasK8GSIlO1JfyJ22ld57745vX7/h7bSlFOFuGnl7t2PxQQ9I08lR2YQvHArYmuy0fSV9Ypm15VX3XLUmUQVwmhVEVdzWJEqeHihO1lFzV+hC4jfe+w43L37MdTjyPOx56R+5zxt+a/9V/tbm15juB5gd/uhMAkK0XVNoaHm9D9kq7NxDitI8H13nKKbuvlzALiTKRVTvv0PXVNxxNSbaAVWqvY1+/OHGcfzawtX7j1yME6clcHu3Qw6+IfiNkO5VWyp3hbjxhj5p0rKSmKW1Odp9F8C0gvBmx9Jr0de6XHXKyw7Cy2HmuOmZgHwIOgizKOrUBBWDtdG3Xm0zxIj5HYraFf3e/qieni4aknx20D+J82cILUYYz97VOs6eN6ZHpHqBefDaDenC0/gTQFJeUa461OJdS8Tb+xXrPBShbBLX/YneJY5Tz3HfkaPG2jwUStSW6leuHtjPPXk4S0bOwsKT5MneP1uh46yQl1zMbxLaMEj1DrTzsL1Ozk+J5KyxsBVoS7L15g0dcpp8dkGtd8Amgs+mvK2tXoIKJte1WdvateCq8THYYFYpsIyZZI4cbQo7FLKzwa/irCC18+lsWEVqu7V+Di8td9C1wJmskOVkVdKhdsDqRKJX0VdgJd3XNfRzrl94AhUv1e4gj90XWmgK/2fTichB77w3fsty39O/8fR32v7yE1RZgnP+Q73hyj+oKn52wNVF/4VkqnKjLjYT74x7frT0HOeOaQqkRTkPFbZ3sx2KBUhqqZKLKK/EWhsVxvz9V++wLJ5xXOh84nDqSYdAOPeAk6JPoSZgNokh2Q4pV2ATyReoyJjB4qVznBZBsmewxMeLBoVz7sx59ZKDJqKpGciWMwsBaZIAOO3VS+/bvVnnvOs9o3E11B5C+S+pd/R3M2GJdgCDGy35dVbdswa+2ttu1c0Zf0vfSH+Jebg5KVxdHrh/2BL3rHpEC+RYVkj+7ACosgv19UqL+mpt4lxW3zRgKdDtHf19afo3y1aTjdJl0uwIe02eGjxcYeMzDlTu9V66rId56tDk9ixRqVB43nTkweMPZ4lSKyrWg7+OUZ/zrOqaL2Pi8TDQvQ4MN2uLO27V3LT0Gj27kJSrYqJxmvDpNIxyZWyfbrR1Fge10iljJoyROLvVg8/VwG+fN1um7SC4rGK4IZueFnzt8pa/9Ox7HHLPdw/v8d38DsO4kCuk/9BpMTCYSfJ0dq8sQcpeW17Opt5ytZKxrZ5NdypuFAFyVVPNfl59v2ydVbHFTj+3dJlNv/DfvfqHfODhwg3c5RMfJ89DVium3375AZ92l8xzYDl2pIOnu/PQFYLPxksDFm1dR5s8o+ikW+oF31nLyevhNV8X3h8mXpcrEE1wauIlYyINwUxpjQ/Wa/suXmhb0O099zdb7tki+0D/RtvRxWnRV/d+Ggtpk8EX0sY35E15WiYi6Fa0r8lydKp5J3aI5c6RxoL3+gzqlYzP+vpxB6inJ0C4XDROHgKlM8/KvT6UZRHCyas23ylqwr4V2782LZULHkheYxLBqUVVXRvWZchedA/VdXgWR9pU7xmvqfGwRL8bwdP83VgLRt2rZ4eptQhzVwtKWuz2m4STzJSeHq1FWrOFEgovxz2HpdP25tn7nCeE7Wpr1d7nDJ3KVfKmPob8NE4/aWednQUtUXRik65ayCma54kXXdMwczG3+yfVTbppL2CyIjZ9u5F2PjaaTUYTyKCT7c5ieezV9gyjkmRb72Sh4OFU1+Cqyh+OluDbZLV9s3bWNEFTE+rNvf69XzTxB0vuauxyZ8XVOd8Nfi76BP9faOHlUMdXz3ostYrtCmUoFoQKaSMN2ncHx/BGH07qBYk2/m69zeKF5I0XtIHlIHSHmgWroKHqcThc+QIxzDbAplv4xuYtsTiWrNIJy+xxtYdaNLnJWT+f6mk4xCfNnE0PCAHxhZSEEBKbfsE7HVud80Aw4UJnOiF0ubXwzjdQNmIrAt0Qycn0Z6IjlkB3Z55odT2LZd2WJLSXqtVYJ2y3E+Eic5o7jmlD9l7h8r6QB2dtJ104LhZIVl1mWQl1tnDT4FguA+ldrZjjqEF/96lju6hOSxoh9KmKQrMkD76spqXnC/ac7F71tgxV8F7RgWkJ5g8oa3XeqgxNwIuXZlzqg8LtRazyz+jBZciDd5k+JOKwMOnXZblQZMHNhWXnOLwvHL++8OyDB0QKN9M1xbkmDghQBUkBa5HQBBjLwQ6i2pIybpN4RyleRTt7bQ1ITZRa9S+raWpwCEEPNK/dCdUeg2630HWJ06ZwfF/fu/QZ2Ub6zUJ3JruhBF/M0mBNQJa94FL1ptJAE0chbUCGRNdHYhfWtlxfqz57Tub91ZCss0ebenBS+M7hA17PO25OWwCuNicuxgknhU/ic+LG2i9GxPazo/oYikE887Wu42UJ5LB6uuWwkurTWMibQjbOTTIdJLdkxKv/nBhRuCJVXZ+4HCZ6yXyaHG8X4cfL1/jt04fcLFs+Pl5zdxpZFk+KvnE1igd8YehUNyGHgnOKvC7GbVzRXbcikIJaHV0mgsvIo2/t+nr5PhG3Ra1ZOi1m5mshXifCixPuK5n8ZkP3WW+DARqj0qAJnJQ1uVyuCnIRcV6V9t0k5GjPUaz4rF5l1hrNfj14VAS06noVnl0ctfh3WUWCjSDc+aSmruJxvhC6xMXuxLzzTFZExuJposAnZ8KeQREpay+mqRgBf01UKqLhS23vaCsobvS+pimsHFc4K1bOYgxowtVpMpiHsB7GRn1YUZa1sKu6Umq940m9e8I1kgxdH/lg88DNtEWkgC+UvpBRqkkJ2kINkpmjxcKzGK5t4bULQFAD5NKHtQVsSVSxgvmJ5IUXy5Gc1ll1KAdVwT93kijBkbadvWZuPne5czbIod+1dNmCjcrLFFOF1xcpxIue6Zl6XeaO1VS4dRnsz72ule0wU4pw8ENDiqToHgpdIkVNUNM2W2GvBYifvGpk1aGpiiZj63SodBGloGhSDWKxrAQhnbXSde/WmKzJbXvWXa3I/utEIhe0IqyXbQgJmpDkEbhYtEiIjrSxjLEv5MvE8T2tFsOjo3sQM7wtNg2hL5kGneypEvo1cWrqp2fQa/V4qwlURUeCZMYQGfrI3HUromHts5b4+LIG0cphEn1YzmU+fHHPs+HIVX/kdt6y73tFtd5q8GoozKBE9PPkqYrouS6T94H5YG2OqFYX4SD092qz0IjrWLuiV72ZkvMaEIpu1OAyz7dHTv3CdOwovqO63seNTeqMdnh5weViiUclu6AbvMByGXj4mmd6TvMt9Cehf3CkbY/ETOrB+0yKnpQd+1O/Qsl1wWJQe1PbNt5BSx7AGXoVo2N5q8JS1d+oCUA6rO2nSUF75nVyJIhNoq2vK6CJhdPJGZwqTy8XYlY1hfitI3/665/wJ64+ZR8H/l98i9ubHfm+a9olfoaQpJmLpqEQdxkZBCmOZStmS7FW9ZywSliRO23nltbeVvK/rVlr6akwaVDhxNnhsgaRzWbmm89vOD675yvbO97tH9n4hUt/4vVywd979S2W7Noan7tADkFNdBPktFpt1KBXK7g0FBWXlaJK/Ua6TL3+jNo+CM54EMWmV+VsMKR0cEod/+mn3+Tm5oJ+XHjnSgc2ALwUxGdKV8hZK864FZaToxqouqi8iOWicBUSp2PfeIFqqiot2apFGL6o6GRvLRtrDYWutmQEeoUrh3FhF2b+9uFX+Lu3f5wfPbzgFIOi0bOanMbJw0mNS72pk9cY4uTs2XnUbNwV0tzZ6VjXISjZxlD4PvP6cdc0bOqgTUMYg8Y1yXogzFfA5cIwLmz6hfkUyA+KgqUB4rOEv5pxUjjeDqu+zvXCxdURAR6HgRCUpxcHHYJoxYhX2EJjZWkxU2ryb62STRcb2CE1aAHPtkeu+xOvjjsOU8/+MDCJHjU5qdihJEvCB0MXBv1/cdA95KPGJFdtmsR88mbb7JXDY3sj9fW/dY9LqkmPIklPLLu8FlPZUPdiCIQAdcq1oDGzeNHisdRna4mEKLKsZty0ASbvM+/399xuN+znnuMw6NkgXv3xHKSLTHBJi8l6dpwj5PadSoHiPSKi2l+jnVsW82oR+GSSz4l2Nex5UUGnXPT51aDo9L6kwVFcryKVZw/zvDBt9zsVbeUKmlg4wHniLrBspRXFxbiBOvilZHI/KzeyAJ0zuocvZK8xt1IAsk2sI4UyJlJwSPLN+iWNOpmLQMlehYilJn1WzNhzq3zeOghyrgO2OhtY10Uq+GExNuiZJ19sV33h+sVP4VUl8uo0XZu4Dkou+C4TQmJ6GCBbNbnNvPjgDj6AaenYv92Q3nR090I+GJKwnI0K14VQaBMyjesUV+ivZueqWgxLdnw2XfGTx2e8PWzU2dkc69UiQoW2itOx4cp/ShJwJ9fcneOImgZLIbjExi/cAimrYN5hU8zIFQ1uAqUIPq7Ze52G8j7DLpJnD4tuZozzUVGAxiswFVm9B0+VVBuXKAbuT5qAlFIXGQ1xqAvJ1XtU72GDfrF7p95yy6UiGZKVbD3c6siopKwmuYtOH6bZsQSt3PXwodmtuMVUlPPZe5SCWKKjHn6eUoSuSyybhOwD/lgVx/U900jrp1d5iyr9r8/MndnM2LNKDl/hfWvFUjSoT88L+asnfvnDz/n1q4/56nDDjd/xfHvk/nFDMusd7bvrhJHP6z2l6OGejQicO2k2QkBTVH7S1quWE1bV0lzjtbXQJsms3epE75d3mX/pnd/hnxt/xNfDPfsS+Dhe8/vze3z3oDaV18OpkX4f/Uhy6/0+951S2F2jckV2nMu6FmUN+K1tWT9rqe10Nc1OyTVLBoqazO6PA9x35D7SeT1E7g4b9dlbTMojmhqx5ewrT09fpwSdytQ9dpZs2N/Vir5d5/cXaArUxYZJvMoX9CESXOI7hw/4z3/yTU63I4SMH5K2vZMgR68t3NnuWQQcLItjP/U6SVjXoM+UIZFHjyRFdyvik7oVBfFD5HDqiZcqGhps2ECKtsG6tBYJasJaGnfv1UfPNCZcRfIm8ezZnj/73sdchhOfnq74nc177B9H8uLY7GZ2g/b0HkJFGWl8tjqtpPcIG7phNePOINRJZ7WnygViUpHgEh1E4c1+S3CZzhLpfojaNrZ4WYZMRq1HVJB0jVkVUXXenqUVJM7QmYqsy5KQzhn5PCvq5ljFV+0w/KIMgv68/lwTpoylrd/KMZRSVOC2+ZCKQRkq5eBnbU1XCR0EwsFzjI5D7rnsTnS+3kysCDAqyTay8eqjqq1dW0e1mK/TZcX4RjbsUNwZF9cm7erAUT0z2v83QeU28JNze01NhgDU27DUxDBllcgYPamrPFUTAM7o72W9p+VsU1U/zVK5WPVXk5YB3wnLEph7tf8BLYL9SacISZb8ugKhqBeshb7G8zIF8tZy+8LVYkaNwVLvP21P6X2lFTUS1pShnptVMuGnvcf59fObfF9eX15fXl9eX15fXl9eX15fXk+uXywCZVlplSeQVNSi4ky8S1BvpSlpP78muWMXeT4eWZLnI2A/61jjOaFVR0rtdQzWzN2KPoFVU3Wao05pifFdkmefet7st+zvNhSzrvCTrP51WV/DzaJaSw4Ka9VZ4cM0eX7y6jmvxguebY/MyXOYek7H/izdpXG8cha6MxNcRSoUzry4ODF0kZicinHOgeVuwEUlPLlW5at9hquZc1krGYwMnrNw9zjiXNGK8azC16pMCdmVN6XTdlXUrjT+k46TFrpH2H6qz7Q7JsJRTR79YUEOE/39NYd9h0yOxUZYFe2o+kFAVm2ucwuaejmvfLecPEm0qr96fuDBbchz3xaImJ5PXWfruD1tLFUW81OyiR43wWTTfSk5clKEr/KaSiiULLw57Pgt/yE/CC+5X0Y+vbsk3fcqY2C8g0pWrn1/oFmgQIXDRd/bGQkyplXuAVpbQtdvfWbqvVWKoWdLxE0d5GG9RwmmJXBIAz9aXvBfHL/F947v8p379/jo5prTvsd3ma++c4uv8LmrEHxpLZmmCyO0lkgdyPCusBtmpqGj+OHJFJ6Ku+o99lPBHT3HqSPO6oXmFt2D2zDzztWeV0X48MUdL8c9nx8umWfP8jCATeT4g9DtTUTPzFFhnQ4rwBK9toO60jS36ih5qYhGyFrV1orU06b7VEBxFWd0LuNdYc4BJ4WxX+AZXG5PfHhxzz723B43vH51ScwdzouN+4uJAwvTFHBHaa7vIoWuj0w7r7z1YojcYrEpKDer6yNdFynPbCjljbe2Eroek97o7NfvN/RR+fp7T+kKF1974I+/eM2vXX7GN4Y3/Gh6Scwq0Dn3kaUEnFMO3JPWERYfK/pUQcm6FL5oCwIam7JKijgpLNHTj0ktVmbHw5sd+8eRfojkLFxsT/ihKH9xFOSqcDgMpHvlba3WKrT2UdroPXWzIrhSRSjPLFXqXm9Iu+i+cdGmuHJGemef3563F0rEUKvy9LVEkGIk8mrlkavUjbUBnbX0lrzq19m5E46FGD3f37/kcRk4zh1ldopcJuOiZRBXiNkQ1CTNGqlZl1T0KRfkpIihLEnPHUOaqgioIu0WSyt6ZeiTq9IEhh41jT1rc0mmWQvVuK6cMENGJ2utnU0MS/XFA+VVYfehrp16lhiiVs9LPwt50vU9R+0m5MXhTo7wqF0EAB8S3hei88TZNdP5eikSpdiXxNz4Yk+ENZ3Fs/MJ2y9c57p2zcql3sMqrFr9GX/G9QvnQMVNUaf2oKebJNGDHFAn69zgvWpaGreOz95e8Vm5ZtzMnI696okcTVDMEpeq/lvq6GNY2wdQGtlVanKAboa6gZ0UNt7GyBeHO3jCQZrKdZ3syF7h+NKdtZxa39gC0iEQj544BqZTpwTc6HQKbzofLVaT4VyFNKtMgS2InITT1NGHpC1BnyldYgmZIn4dhjh34Y5rQFD9q6wwsOgUVlx8m5B5+nzWjRJHZ5IAX1hAdXoBhZE3r4XhJj3hYVUfIUohHApy0o2Seo9sI7nPNvUlf+B92/WFRKo6vE+njmVWtn7pi/bG+9pqwjyYIJ7ENGVMddjep0H1JsaWJs9ieLPIOoHkZvUWS3HgzWPH6+FKn/+k4+uDud0rOV+zteZhGItyeYKtm9oiaZMv9dk746Yp36oEt/bpqxRF5XmcidpJrN+HFiBi9PytT/8En99fcHyzwR09/a3ukY2D4weZ/VXPxaAKjs4ZR6iO+tvUShpEE+hi7R3T+/E+M/jEMCwcrfVTOQdSR6YN9nYLLHPQycsKvWd4b3zkly5e8/BypHeRT09XJvOh/8adnBHHaROvbjGOnyVrUjSxlToVKjRZhRykyRjkTrXYyNImIrMX7cK0NWcHQhGcV47dwzzwS+PnbL81c+2PvN/d8Uvd58x4fvP0df6P4Z/n1XhJio7lGBC7z8VG+muyWEQnPPsQVQ5FOqIEijgkq+5MOOpBFUJi7CKHw6BDKTX5b/wQI9W7YsTnzG6Y2XYLxw96nM/8yXc/45vbtwD83dtf5jtv3+Pmfkuxtkg9MXIRUjWAPm9r1PVkQyh1DTYtLlOxf3JIGVfFuaKt8D4RkyAhkyfPFB3iM0ffc707cjFOzNb26/vIaaMTX9V8Vo3YaRINX+yPVEmQ3Mk6em9roI7Qp8HjpvQkCcGSpzWG0eQYQD+r5IIrJpwZBckRnKPIF2JSRnmllWMjep4Ub4K+wM205fOHC6Y5QFQOKuks5mfhs9Nla2kCbUhJW5hKnC+1+DURzydcT1nBiOoQkXobnCGbBI61uazI1/ijRPBzMrk7Laisi7U2p4LrrJ062BSeaMJyPuVcOWE19rYpuAJyNogANFmMynGK2VEmbwKgqjqOK4RgVAEg+uocsRYOqYp11laukcWl2L4fVfPQ4bQArvIGtbAqNAHNNuleKlerJq/1+7nGzfrDrl+sjAHKZ0qDp40UZ2m8E0S5Q8sSwOtBGPZKTI5vB4bPPdN2JMzQ3Yu6K1cn9mXVgap99IqqtDeva6YU6wGv1zqVWbgYJ47bgTw7ynR24Mn5hi06IRe1ijh/HbeAzEK5ToyXEyEkdsOMSOFV8vA2UCfIapCoXI62SSrHxBfm+4HXd8oJq5Mr3d4xvhG6h3L2M7Y4zsjT4qV91yIQfOLZ1YFpCeyr9oV99zZFSA2klnA4nupu2M9UrhEOch0H9UJ3b/+uC80k0kXIk6MMouR7tx68P1V3Q9bEtjhBnB5GJzr4eEQqebLQgoFOU+jQQDJl7XM5gPbs6/9KwORJISPnQpEm/NYfQIoQ77ySORN0e9q0XeUhpdHQpzpggG1YI0H6R7cSGivSYwaeuNIOj5bsGvGaspIrz8evm2yArO8lUvjRxy/pftxzeSPNokeFOGG5EqbouRrPv79N7JxVYY1jauPyqmmliXvMjpzXg6txDOp1NiVUou3tmlgmCJL4E5uPOeWO3zl+hVfHC6YY6LrE3GfcXTCbFEN5oyFQkwa1OnUFihiW5Jq8iBTaMETxmmQ5I5Fnkylovxw6SQSUqJE9BN1Ej9PAnx5/zL9x9V2u3YbHfOJQkvH7P+L97a/xeBqIyXESo4w9rp+roQgZW7OJKSTKBpYspKK8u3gQ0kGfe4yeh+gpn424hGlLKZ8vRbfe67I+j85lXo57vv3tN9zMG25OW35w94Kbhy3z25HuzhMmnUpN20IeM2nnSNWEN8vKeytP1+1PvcoXEi1X2lTndnPi/jBaQgXbywnvMtOsWnqnfU9cPP2gRWRcwgogeT0AUwducE20VSYlgit6YTYc0QzOjSDeUDGR9bnbxK3USeuK6Nh+LU1FW99TknETjZPaYmZF4b8oq2Lx6g/swfo70LlEyvo9z8UcvQ0OZeD1YcdyCuu5U9dNvcVn+k8VdXNLWffp2evW4r5NLZ9LLtTLaeImZ6/dkD/bA64oouamhFsCy2Vo360VntCKj/rnUiemrZgRG7Kq30nfw34kafKUkkOWVQ5GuWTCMgeSz8oPqy4Ahm7V862Yu8i5PIO+d6GE3B5G4/c6sU6VDUE04r1x4irnNCl62Qru8+fwh1y/0ARKoEFyDRpOQLQkyhVydMxmiZGbjgNQsKkzUeRpXuHMKj5ZD6j6ZnUMt5Ig1aLABNkMjhWDeyXBkpxWBtkZ9O6IOeCPvj3IqtQrBU2ezJ3en1bisoz6kJ69fOSXXrzmeX8kFkcyHaNPblRh0S2syU07xOrihtIXNuPCBMRDWIOe+WVpixJSFuJWN1m1SaiTd1K/qyVDUwy8c7Hn+Xjkx9FTZLMmr2E9rJuk/ZJbe6aNdNZRYDn7OcFGxQU3e0rnWwB0JkQqUcgnb20J1rbokhDv/iBkmqUhMjk7TnOnKFRf8CcloXZ76B9Umn++rO0TURuKWduCYjomUoMvQLbx2smRg8cNqpyt91DbgbWF7GyyRJL5D84rgb8mvtUTr7WN0cO71ElPu19NIbesKJ2iNkUh93kBEfwpri3NLJS6LoLpF5gYaa1cQ8hMiyMcxWweSjt4qyhjjJ5TDJyWQIwedxSSV6KmW6o3I0bWlpbklz4TfGJOnunUtXHr5gq/rOtWCjotZ+3Q8wL+09MVf7/8MT49XfKj++fcPW5wrhBCotssFHpNhs4OdfX3SkTxTTLBRccyhVYU1cMfWAsTb2dddPizSblKvK2HoBi6IlKYo2daAr8/vwd8zlIcvzt/m+8cPuCYe97OW75/85L7tztFKSa1UfJHRWpTcgRrO7kEc9KEJefaCy9gCFkarIU3wDwF8n3H9nPVG/Mnna5dDkLc+0YSrirPOK3kH5eB6+7EnAPff/VC1fEfHeOj0+kwR2sdlU6TMdVSo7Wa26+lPE3+UiUl26Fqre9i6wWLZQCDTwSfOZw8zI55ClxeHKHXwRn1yHOcTp3JaGRyduSs5PB68MMqhNzQmpoMmAK1W1Lb08Bq01VUFb0qTTfttJaI2OvmTMG14rAiI+vARlrbVOeHZx00yRlqC3Bx7cB1UdGbkoTgMpebE/MU1Cqp/tIwTBiSShgcfUtM27mSIZySerGV6rKQ7f3qGaaxrbX5UiV8V0us0lqQTcH9XP/JrioemTcd7rggp6iCwxLUyuegnn/VP1JS0TUMqtpeRZpz3fd2LNn+VakhrCi14ZLZ63AWrIAGtPM1noJO4yZdZ+o0cfbPaju52n5JTYpkHdwxUv45YleHpLx1djQZrRISFUjQ7o2b09N79zOuXzgC5U/SDHA1mRGSp/nc4YpOnCUhjrq7cgdll5heWLA6z2yrQaohByqkaTyk3jbfDF4skVpy83QinWedekh/vr/g9mFDTipOUbryJFDXKSNZpC0cVU8W2wDFxCi1ag8u883NGz6drjmmjotuVl0N25Ct8qkHov1SXR1FXd5995GUHXPyHOeOJXqODwO4nu5OGG70wJekmXbLqo0zc84XmE4d+75nt3skhEQdCqutoNo+aP6Ccd2M9UBXvylUP6qrMD8sO5NAKDC8DWbGiE2OCNEmIiWdPcMvTD3oZ7ANH4pC2baB5jnQ95Ht1x7Z32wIr7s1iTirwioyBWvlVFVoKxdAihnMTkLsXdMCwhfTH3HgIG5hviqkjVZIaRCFm1lfv8LXNRi3/no09eX62RrqVr/3GZ8gQ62qKbpGm72CiOpDNTS1nE2l6P3tu4VDp1C7Fh5m1jtCvCikFzq19XAaiFF9y7whmlW4VoXzYPGrNEDuQYbM2EU6l7mVdS2RV6QIC9p6yLFOvhr6VAReHS/47Vfv8/j5Dln0nsuYWPpESTowXNvlko0LaFNV0p/BwBnTX1NhyNypHpq392luBqIt8HOOSFUhfsK3K5r8LIsnRs9/+Nlf4O1xy3EJHE4Dp4cBJtfiTX+z8iIVKYLppeqzKdKg75WSow+RzSAc6bV1nZIqkvcqAxFHpTB0d36dJKtJaaS5McjZMy+uEFzm5rThh2+fq17dY68Cux6WC51qLUPGP9jId6VFuMxih1Ot7NuU3dlhX5OqFqsXXattQsz4VNFsqi7HiXt2yOxYbgbengJh0Dh7dXHEu8IUPdt+YdfP3B5H7u53T1DMSluoibJKZZSmS5d71Yvyp4SfT3pYnsW2bDp/KlJqe7DaehkStY7l86RwTIMmA24WyFFpHvWqxQ7YnrVD1jiNmkDpGVKymilvu4UbrzZcTZzUa3F2sZ0IPmsCU797Kc1KxE0JmZIlwFmndWe1y1LgwM4smz78ohZV5Xi2ovcPTQQ0zqYx6PcgkTcd87OBuHF0+4Q7qQdiM5D/wj0RHC5VZwpWVCjXJN24t7VAS0JOZ61Loz1g8eYPXF4Nur1NMf8BTlKNo4Y2netqtOnd6pdpbcUnP248z2ZJBVTDaM6QqD/s+iMlUCLyDPj3gF+3j/g/Br4D/AfAt4AfAP96KeXmZ7/QemMROxDOPmPxhd1uYvIdy0NP6QpLKKTLxIcfvuXhxcDDp5f0rz3do1ba+aS7oI4E6+GnYp3LTpoMvKJD9r4VlrUDnKxISfCZmBzLrTrGgxKqqxr6OUJUv0/uM0UUDalto+LB7x2vP7nm8Thw+3LDKXacYuDhOOCOnvCoaEkx7572en7VrUCUHDz3nufjkVyEqQ9MMfAqOVLXUXdgDT5ydj8lW9BLNfGBUoT7/UjnEzm7FsCad6Cha8m8xeomrv5Muh60ilt2jscPHfHCVI6HYoiEsN0E3BTbqHuFdREUUanoRtU4gvWz1mAVHc6p75aTgrjCdOy4uDzR7WaWxTHvgxqjFqdKuJeGuBXHsnX0Qds81XtRFiNue69t36joQTHuhgwJf5GZjupRNn+w8M4H97zc7jksPa8fdjzcjbiHQNjruH0arW04KkzvT5CH3BLt4lWJvPHjLFDWA1y9zYS0DbjDADGhfl5ylvDZ/VkikgYqB0IPbNh0ke69Oz6XK+ZQ6IeF3TjzYnOg84k3xy2Pp0HJmxVpC8X4IIW8yWp8654WKGksiM94KbwY9+wve27C9klVva63tRihcvosESg9HJeOx08u2P44kDaF+bkig3mv5OvaGpWze1RbNBogMFIviPEkcq9q392DiefZIYUrOJ9gXKvYJ2TSSiy2153nQF70H/3Tjz5AfrTBG1dxawlNJdT3d2q+7RboDnp43P8y67PN+u+8z1z2ExfdzGO/6BBJ33HyhXjs1ZGhA5zunf039bALJ7/aAY3K81MOlL4uobDtZqa04fjxBX7vcGNRVeshM1yf+ObLG56PB37z069wfBgRKYybmTFE5ji0hL5+/3VgwJA5Iyw3cm6Nl2isLJ0K2xLhMPVcX9/TbReWWTlectORhkBxhWOXePfqkV0/M/jIECJL8hz6yNT1LYnQM0APu7jBOCnCfAFuNEQ7esLk2dSYhD7XZN6O86WSXXQN6bPxluiQMuKEElUEMwchAlXcVInTHh8c7sCaZMe0GsvWgqZKYaRVNsAvBfGF2+OGlHWfuZNREbKu1TzkRrwvvlCFvVXot+DOb0Y9n859G2tLsu6NUp9fHfygxYrzovenXXnsqT6qadcBHcs2cHrhVQ1eoL/xWoi3M9P2TFPxNzqOU1S1Xs72QDmLU7XIFXN+oBbXR11rpSuEMeJDUt3A2aufZqf7I42ienqD1/x4yRrXDanLQzF6SCEFQaJX/bQGcqwxoHamqhaYS3oGliAGbpjEzH9FXnj/LvB/K6X890SkB7bA/xL426WUvyEifx3468C//fNeqDq7506jQbuxVg1vehWHe3VQE9TitALuXObF9sh+N5LvfFMYdVXsrR7GQ9KA1Kv2Tn39pq7bsswV6ZBkVbPLDCFzgxI8lcgqjczasnihEciltikMhq72CcOtIAROacPvHt9XPgaQjp7uoCiGP8FyaYfB4htptwqmkYVl8Xz8yXM+Li+01WkHk3/0bF4pB6r1z5NW3a4mIZVEWQrqhQfb7URMjvujKiqLKIlZqklyPTgH4MG+bt2Q2D2LWu7EUTh+kEkXGXyBKHS3+kxz57SNl609VMUBRV+wqsTmQcibbg1w54iZZJhtYqgI4zAzHzoef3CtkDB6OKZBSDWBHZRAmYeytiSrf9MiazAMFpAXS3KiKHwdMpcXR/Zf02m5v/zVH/Pnr35EJ4nPlis+ub7m9y7f5aNXz1hkwHcqNKm4MPBoHo5d0bZb9KqOfsbRoN7PUsB74kaIgwWGPjT9q8Y7S8USQEv+Ym4t0YoUBJf5V7/6m1x868Qzf7CvLRzywI+ml/wTvoqTwmnRLb+EvB5clkTWZ18RkGwDoyU5jkvHoesJPq0TLkbArJo7gELgEWTWSUs/mcaLKPoh1p5frgpcLfqje0/3UHssrEl3RWKNJHrOwSvGE9MWulgbdSWMY6Tu0EXSWJFpOyx6pxy0pShCuziS80r49YW8OMYHbQMXb+vKDILXVr1xL2dDHuogilXhdQlPKRCcCvMCiBRVMq/xKak/WH5/YrudmE4d8fOd8p82RXWkTBlazkj92zAz7hZevbwkx1EP5ouFF8/3/KUPvs+f3f0YL5nLMPHbN+/zcBp452JP5xI3eaP3sKI/NT81sm1tC5/dbOpEKFZk4dTfcwyRm8OG2+OG0CWWMRHGyHIKMOkhOX2+5aNTYNyqAnU1Aq9IRENnjIz8BM2wFjQFvCG8OYgpqatuVulUAFiTL2nefvo8bP+0mCi4OSrvatB168+12QRycGrcndG4YfGTuPaKZUmIE7MUUR+/cNQNtD/1nPY9FCHMsqIhQbsXD3vV5Vrvb0VKrI3pHeISjcjcuIXaZnXzOhFZ249VT6u1LWuiFdOa9LQPbwXUGEjbgKRCDq7Fy+6gr12RuTCvyeM6wKJARB5VPT51wnKx3sfhdhVdlTkrXcJQ6ZKdTuDN0my4XNT42Q+LOkN0iUMaKCevGovV7sX2sT/a5ynFujc6FCN9VvpAYiWRQxNtrdp8LXDJ2VoXaC4R9X/9HBTq5yZQInIF/IvA/1DvYZmBWUT+NeCv2D/794G/wx8hgUpjaZlexqshazu4hSV6XuwOvO6y+t5EOFx5fvzZczWlNEPh7qCZq7L4Ffmg0wy0Il1KPJOWqVcE6ckoZtK/8EthSo7r4UT//MScR8KDb4kF1CrNWm6VvT8pAbEKOlaDXElCvMj0z0+k6OmHBecKh2W0TQrOKtrgc0uwirVdEAGvHKh46uA+4I/riL0/KtJRRFsBsgHJ7sk0XP3MUgS8klHfudjzctzz6f6Kt2XLbO0nF60NVWjSEBKx1sxa7dVgIjb1KFHMe0sXdfcA3WG9t90hE46OcNJ/iysr2bgekN5puy+mFqyfjM4myFnFGfvtQvmsb5uxe9AksjtkwsHj9zoqHh6F7pCUM5E08RARfY8Kwy+6fnKvxMT2nIvwlef3vLd94Jd3n7MUz0fTMz6brnhcBpbstAW21E1SoAglZEV1Kiq40WQj74NWyZ3YoVlWTgWYByRQjUAd6xrNWT+/g8oRk5wb0lW9AB+mnve7O94N93y6POOz5ZrvH9/hR4/PeX3YMsfA9eZEHxJOVIk8WQtHsiJp4VCNmS2gWbApSU213xx2HOdOE5b6s2dToxXNra0nF6UlHCA8H4+8evfIoR+4ev8R7zKP+5HYq8hgff/WKq8JU9bETBNm+0yzJjth0mKkWTcEmrddydLE/tbWHmvCYyK5MrknRXp/sTA/71myEC8S3YsTIjA/9gw/6XFJFG08ATgdwRZwJigrEcIM91PHm/0WQYc3YvLMi04nOotPCPiQGcLC6dgTJ69C/JZnN2Vz6/bUAnD0kV/aveLXLj/je998h95F3h8e+LXNJ/za8DEPecOPlpd8c/OGhzjww/KcXTcTJCma24oue45lbW/VtotOsFnsrKhHm2xTrs9FrzY8n95eEqNSL0KX6IfI8dDrMMFjoNz3HGZtJ+oofYFkRrK1fWLPqA3i2D1yEcKkYrnh8BRRkVSsbUeL+5IVRcud0SqKxZyK6CzREmNpbcpwSLi5ovU0ztSTpL0mIilZAuVwc2xniT+pmGgXEsfkbJrP7q/QCuC07/S/XUGy+0LXgCdXadZO2trzU8FPuUnx6L+xJBNrT593SajP8WniQymU4Fh2Xr97neDOme5hoR8881VQviUoCpWy+gVCm+Qr3hFHM/0dyrq/nLXN02q5phxLISfRKfc6XGQT8zXrbVP4XSK7QBULbbFpzrhp5YgKNM6cflZ5QiFoA2T2Z5dWgdiVf7fuAzlPOH+OlcvPxqf0+jbwCvjfisg/FJF/T0R2wPullE8A7Pf3/giv9eX15fXl9eX15fXl9eX15fX/89cfpYUXgD8P/FullL8nIv8u2q77I10i8teAvwbQXT6nbBI5dFpR1YrBKlkHPOxHxn6hG5Q/050gPHrS4th+6pifl9anrxMkjVBe0BZXvc4qibVKpml3tH9UtAo5Tj03rnC1O7H3mdOuJz10uI/UcDeb51eRNeNt7Ya0Qvpp8MzXhfe+/Ya/8O6POaaOZ92BKXf89t37fG96n9x5lllJkoPxNSr5uYjaZ/ht5J2LPe9c7Ll9OTItHcviyckxP3YsNwE/Qfe4ih76WejvPb5C0LWKtMm6IJn/xvWP+HxzyT+Qb/CRuzrT0THEaqzIVW2lGRHzXNq+aEW4/cyx+TwTTrm16IqAPynxMewT/hiUWJvQygBo8vyFFYr1Dpw0NKN4p4R1e0ynY8/V5YHDtwvT7Uj31r7/UStTt3gl987rJFolN9fPfP75m3hdUsQkeaHYtN92mLmZtvwnh1/h1cOOw90GsakZMnSPaiOjSKShV7NfR86jIGNBhkQxaw7lQokWlMGB91R+U7PMMOL4kxbKupnW9dzad7ruHw4j/5fP/hwfP17x6vMrZB8IDypSJxmWy8LNrwpjv1CKNEFTBMqYyNkrEmmtOyVs2voOGe/U/PTwONCdc5XqGiu1pWDoRVJ0x9kvKPxzz37Cf/Pld3kbd2zdzD+8/Trfzy94uPTEuad7lIY+VTJ5JXzXSi851YpicUhIpE0m9R43asWphNGCmEH3MgU6m+yrKEcT0z1rWeaqb+YLf/HrP4Svw1WYeNYd+PXNT+gk8VvHr/G/2/xF4sejtmcOwvwgjG8KxatOW/b6Xn7WYZilC9q2i6q9lhZvFhXWkuhhPgVO+w3DZwEPhAOEQyGchBjPeS00ukDnEs/Dnn9p9zv86nuegCeSeMgzvzVf8ncff4Xv7t8lZsfDPDItyp0ch0UJzFGo1leVg7OuM9b2nklHVC9G2mSYjqMD/LGrN+znnjdvLpCj5xi3MCRcyOAK3btHW76q5TZ2kePcsb8bW8w7l1KpBtd1QKLp8Fm89bMhYUknqHPvUGkZa9/Wr5HreLuD6FckpvrgnemHpd5Z69AEmZ3TnqFNkxI8kgOltvFS5om5uk3blsVxvTnh38nc3uwoPrRYJ0mg17WJcd/Ut48W4+pnrp58NUZUf9Av8nCrblT92cpnE19pAOoX1067qi9XVo2k3Dkg4ydt66dtUI6Qxc/iHIIiXqV6q7YJ4tw4RG4+W6vYs7PpwnIG5NRp59o+ToPdG1SINGdHXDzOZ/wuEqMgxRsnsUqzODAifzUDrjIJiiy7J8NKblH0KkwruioJi6dn97PuAxNQ/a/CC+8nwE9KKX/P/vwfognUZyLylVLKJyLyFeDzn/bDpZS/CfxNgM0HXy+YB5OOH666TVjgSfc9b/2OYVjYP8tUJ+7SqfdQvEqmERVM5bm0oKJfHJsQ01/YyHl1JG/aQ6CHa9EfllSY58DbqdPDxWV8l4nbRHH+yWvoyKUeZnmTiU77uWlYxynjReFymNiFib9w+X22buKz5RlTDvzw8oXuy0WntHLWg7v6BvlJIcYchZvDhufbI1+/uiM4hd9PqeOj+yvexme42Tc5gNpDrwKVbZrtbFLlfh74ZL5mcOpFJvWe2d9X+Yg2jl+d40GDjym34xx+KfS3heFepQ6q31v1HNSpltREytp9s/c7V7P+qUTH+r61jZHh8TDw1Zd3vOkjD1wwP3Z0D4KLjriDuCvIoJnzcuHpNwE3LTxpC9Zg1Dyw0Gk5cZSuEBfPq9sLlmOH3Hf0t46LR23ZSNENX1u46+RnFUY0GDsL+RRACr4m+V/4ilXkU9vK1rOpZNUqP3GWxJ5PoDwxIC0QZ88//t7XGH408OyVraHZAkUuxI3w5sWO9OJIil4JroKSrYdEdoV5MUHA6s0FlKD+lJt+ofeJ47FvewDO2kBV9LNqh9V9kq0lZP2hX9/8mNu047un95mzeqhdXh15dIV0u8FP66Gur6/E9Bpz3ZlmnPhM3iTma7eSRB1KcO4jfR81EMc1TjSxvKpCLmvwlaySBn/68iN+efiMTiK3aYezCu2d7oGriyO3mwFZMK82acKPpUjz4nNJg/nYLwxdZEmOyXXMRd+j6YEVKG8HNq8cm1fKK/Sz+Uma92UTJyxrIpGK8Pl8xf+DX2W8+Kc8c3Cb4R9NH/J37v4Ev/X2K7zdb5mngDMz7+Bz82cT9DP+gS1nSVU24+BafDRj67M13PlELqpH/asvPuc/exyJdLijw9168qA8tPJ+4sX1njFExrBw0U18frhkOnWk0LW1VJwazuYxM19DJ5oYzVngQgs6tzi6fWF8G+mnpIKYZkCcBp0EDpNbY47TtSlnsiFVzFZ1heD0zOF31ay40O0z3T34Yuukjsy3m+TqQ9Dfz4nGSZiT52KYeRwGYKCaE2vSUvB9oh+0ZcsZ97C0z2p/rjc8a+usTRPWieKY172QrfA25fUVVdBYXd9//Q5r4rHsHC4prSaOjuM7yofafp7o75TeQHFa3GX0TDBeWU1oJGkLXidza0w4W1hnbTR9/1LVMExTSwUwU3LEReUOsMEhhkzc6lBP3Apxp0r9brZJ5XPQxP59EeDk25puWpFNsV3jUuVFrpyy8lR09f9TGYNSyqci8mMR+dVSyneA3wD+qf36N4G/Yb//Rz/vtQBY1gkscDYxV7WcBHdyzA89w7CQny+czDH+4v1Hjs963rk6MEfPw3SFJE86COUeuoM0M1x80Y0YlFPRyLvnN6UmUpVwl4GiFizuNph2ipJe1arCftZ6pnqAFdV1IdsUiFYDksEfhO9/+g4P08D0flAS8nTJZ4cr0imoOrepFkezpaiHsY57QjkG7tyWm1eX+DHhXKbrYxOnG157hjdinCueLNhz7Y96UOdejT///qtv0PnE4zS0pK32l1Ob/zdujndqp3L+DK0vXzkTcVTCcDZLjXCyxMQUdPXzrAhArXCbQazQqqIn/Xr7HtmDmF7QdDvyo+WFCl92mXhRiFthWYTlslCeLeQoQGDZVG6TQ8QqqOrMfpZ84OxAWUQ5+kWINyP9W0/3IIQ9dI+FYIKOcSMsO+XBtIDgdI34VNd3UcmGxa3k3PO9WIrewy40K5LcO+UYnAu5GZ+jnN1LnX6pZGl7uSy4u47hLTpYYCgOWMWewT94lgubNkuqwUMW8uTbd6goVCVsljoBCby7eWR/ZVN49d5h+ysIJbm1Gqwf1dC1TOEf3X6Nj07P+Mn+GZ/eX3I8DOQkDJtFVbZ7TSoqmbjdp2xV0TlKYlxHkjRYpgpoEhTp6HxSgnYt2qxKVzE+r/yyskqVSAKZHP/w7hv8jv8KD3Hg88MlN4cNKTu8y9y/2TF+7glHQ4r2he6gaF6a3To2nQTxqnmkiYPa4YgU1UCqmlYmCtztdV2lQVWZ62QTXSYPdl9rwiOFTjI/OT3j//7Rr/F/7v48z0a1i3pz2HH3sGkkbnd0LGOGMan8Sew4LeGMu2aJWUUQm5xKWSU3qLdYWtwsvnDdn9jHnt+7e5c/8+JjLncn3j72uvZvOkODIcYNr5Lj+urAg/R8li+bnYwKyZ7Fg4AayVZ0yMN8dZZcJFguBT/r3qxcpzhiaIZKqfjJQJzF1lBMjRBcoAkO5yDQQ+5Vry6brZifvB7QlQN0Nh3crJccFFYESewseX1ziQ+JOAXGvRVVYnvVKZ9zWTx5H3RCuG12WmKn6JBrCVDd90/4TTGvk3gtrp4Xu2cF2PllSWTuXOtc6D30xEE4vdR16CdHf+/p4sq5UucEXbwC5N5kMqwY6A48mVhtX60i/os0Zfw6/FQ7AcbpXs+xyWuCWQzdEli2wrLVjLR2SRoSKOBCVu7U4EmzkA/WFfDShhFWHaizYq3epkxLts+R9T/s+qNO4f1bwP/BJvC+B/yP0OXwfxKR/wnwI+C//3NfRWjyAAq3G4y6TaRFV1juMyyOx7sNREfZJrbPj/y3vvE7XISJ7zy+z29++hWKL8Sdrhy3SFNNrlNebTz0zJn9iUprJRTmoodrKfiglcF8c0F/4yC71oLwc2ktmNRB3migZnHIbLowsTRC/OaV8LgZ+Hxx/K3Dr5kbuSZK4VVHf6uJ2XylMG4p0iDouNEJIDpdDOXtgP+0w816wOdQGGahv8faM6Kiko+WMAWDbxfRA7k4itMx9a9d3vPDm+fs9yOhi2AHnDeSJrJ+jtqO0kqUFQXJBZx+ztzbZndmqVJJhEEnNJTgqSO6VZQRoMy6iVTvSkf2XbNdMBK6JEpQAUXnMle7E2+jh09GyIKXQngQvI2UN0X4JPij6PepG6IuwTObmadZIY1QWzJ0N57+Rs7QPYiyJqt+VsStXrkvLNcQ9pYwbiJl9low+Np21sBS0RiAPHTEnd635cLT7watqoJbJ2pqsr9EMNmH3K2TjMWB77IWh3MxXR9aoph64fiuLkxniFM8nakknzyyCP2tCjA2BWH7++XY8bAZeOUviIYIVwuJuq9bq7RZJ1jGXGqlL3z383f4L+++Rvcq4Cch+ELuYB56TfDq5Gus38EOh1qhh7MiyWl1KsuqDafj7wW3iXiv3nYqvFrXqAksClpVLyAkSleaZ58s8J/+9i/Rve7aFG44QjdBHGHbwfZTRV3DMdPto8WU6j+i96O2PpfkuTuOq0TK4sl2KNRpsxJg/7VMeW8iLx6+05MftO26fXbkMO+aLpkOn2QGH8mp4+3NDvfxyCf2rLQVDAPr4eAWT+wywWe1cqkihYKOm7uKXmhsqUkU0KYLnw6SACHz/uaeV6cLfvzmA/4JH2q3r1dE8Tj2LLcD/tHjj0J8NXAze8SVNgCAWxO0dchCC4/uzhEOrMMtcW0HlxZ79ITOXqVE/EljcOrWYZ/h7il6VJzoRLIlXV60Xeon0x2rU5ReX19Ry6wCt9npVFs5S6bqOYLodKJAeujIk870dw/Y5Kci+y5AuulJUegmMf01O7JaN0YoRZObsulbDOCslasPW/dVsUEgbR/XIhRNLGwK71weRltxnrhxLNtVhLN4ldbZvFLEOgeIW0/YR+QQNf4Erz/f2/RerzIzcQNxW5NQcFNFMa04MaRaohLIidKmZ6v2F74wjAveZUXCHge47/B7FQgOJ1YS+JI1FnqvCX3QOFtzTeeyaoiNhWaDU0xoOa2JTz27dAjIXuec4vNF3akvXH+kBKqU8o+Av/BT/uo3/ig/316nZsn+7HfQzZQ1KXFXC/mxg4eO/s7piPhzSDg+na743u1LDp/t6G518suZ0ilgwXVFUEoHS6dvnDshL3ZYdx4pZiDUDijVbfnj777m96Xoe9x75FGavg+sI81lTPgxKdo3r9MWubPxfA+8O/HO80fe3u1Ik4liRCEche7RzCc3QrbKVLkG+pEWB5vrE+9fP/B5lzjcbpCTb+OZbnKkjR0GKA+qCgbGUZo4Wgt8Ticl/tyzH7MNM/94+aoiOjXBLCv0et6WqPeS4NSE06o5sjBfCtMzIW4sgRr084+vob8PBLdWdNpWVU5QPy5MaUPugiKQncM7qN5wVNHIiqA5Vdp2Uvhj77/h++Udkm0sXw/8okG29sB1c1qydjaW2qYIY1LNojP4XNA1KUKriJZL07ga9fn4o6j+U21J1omzGeJFQrLHH+yQD5nSSSsa9J5aFemcQv/BsVzowbhshbjrcJMmT07OqlDQAOhsCqa1pPX3ro8cN4nUd8yXa8VVA9vyTJ+ZZGHczMbX6ewz6gtWXqKOd9v2EJCjZ38Y2O9H0uQZTAzXzxoc3WT6T0tWLa0a4M+Rxg5KEcLbwPhabNpTg39FeSkmQlmDbW2fYMWPcTCKyRRsNjOP20DeO9JGD/+8S2w3M0NIeJfX7nVFjj06eXcmqoorSFS+WPEQftJz8SP9QRetnWbWNstWGO4zw23EHyN+P+tnCiocee4vJwLBZUU0k1MrnORsn1tBNsDy/sI7793z/sUD99PIJ68/UERrk3m2O3JwK+InBUoWgiT+5MUrPvnqFd95/Cr9a/PtNAXyZQfxIlOcJkSYkbezNkldQ3UdFaft77zNZCtaasHZtHDOJ5JsavjDzT0/3Dzn07dXio5Pulacy8g2kvpMcoUwJIY+EkKiFGGeA3FWiY+GVojGCKQQDmtB4486Wt/t1aw6DbofctB4V79DnfBOg/7uoonDDmEdv8968MZR90XuVhFk1UfTJHu58KoruARCcARDKyVnRVCCN2SokutoE3yyizD1xv1TlFESZJtOK0GnXnMH6SKTHt1qFC20lhYO0qajmdvWfeANCOg8cdTvkkYIjyZqHOQJ9eJ8Qlafnf7dshXma9EEcoHuQXlQbs7Enef0zNClcBaDqnxBcMgpknrXwIu4M8HhBOMrZ+LY0NTUbf0SBX90a8Fq8YFQ6ENk2y+kIgSf2XeZ6Huwif1Qu0VADg4HlFyMbpLtETs9B0zeKI21itPP75ZCSitSrv64RR09Rk8Ygz7nc/rLH3L9Ys2EHUYiV/EtqeTAZGiEL7z34oH7ceT4emvtKeH2ast/4n+Z/e9f40/C9lEhbyWHFfwJwsnsOoIK/+WLRKxq4UlYNmcK0hVRMU2ggmbTMTqe9Qd+45tv+O6Ld/nRzXMODwPypid3TrWbZkgbYbic2I4zcec4HAamONI9uFYx7b+e+au/9lt8a3zDf/zpr/N2r0HwYT9SRO0wKgG+65ICIolmpKptoMwpBi43E5ebqd1G7zJ3hw37z3f4R0d37+xzKTei29sGqsmTaStJhgt/4i9e/4A5eR6Wke93V6teEprdp0FIG4iD0NcKrJRVDmCJEBPztXD6k0ecV85YyY54DEDP+Lae8JiauaJP/RC52p14MwfiLpDvVtNfghEei8FAVfdI4MXFgZgdnUv8ma//hE/2V7y6ueSUN4rMBViuC5vrEyk5prTh9Jmjewz4Y1hh3lJUz8WJmY62+Kf9+wguJOavLswfwvN31eX+j+3e0Eni1XzBP3nzIZ999gz/qRKfNaEF2Wgl7GYsIRZtAx+dHZamV+JQ487Or9Vpp5XzE1TMIPvSOHtKmlQi7zoy7KKaRL/3x17x0eUzfEiEkBm6hefDrK2n08jrj64pjx2pj5pw9YpGdUNkGQIzPe5kn6Hqm1lxkyYPs0NOvqlkq2xHoXuMagURM7gqW8A6km4tjovtSdWqCywXihZJ0RaWWOWfg8Lyuv6NtGooJVgc7Ap+SLzcHRi7yBt3Sb5RLk13NXO1PTXdpfvD2CpgVTbXMXD3cKJZBxUtkIYbiDv9rONNVruMujRqa36xgYUp4Y7xTC0eNTofNxSnwyFdH/n28zf0LnE7b7ifRh6kMPnAdPKU4EnAcKHecb/32bua2HZFuV7mXUZeJSskQzl59nHgw+6Wf/WDf8KSPd8b3iPeq3Al1wvvvPPAN65uuJs3fHqvEgNOCoOPjMPCwQqnihhLgbTNuN1CGgL5wIp8VJpDlTSJGVLgk+M17w6P/NLzN/zm6UNNzBYhvx4sydDkdPfBnm+9eMtFN9G7yOASHx2u+eT+irsipNGvXYIx019PTC+C0Qo01leEqsnTeFsfRQuP4vUAn2bR9ZT1fqnWnBU1WQun0gVDsBQZnl4Ic1pRinCqrdmKYhf8ISAHwKlGVK4DIFHjlETB9QFZHC9fPHLXjyx3A9Nzp8+ukuCdOh2kDbirheCKeW1qrMxBGt9MlmxAg+0B83Rr5P5stJFBk4R4IaSNqbWfycDwxUTA/juOijpG08jwk6LqaQzEQf+fWywp3XZND6pUdfezAlcPDxXkJQvxoEMW1aWiWMKShwJdIUfVaqyOIVLAdZpc3x42jbLa95HpSlDfZY+bxXQSPX7OlKQefoqKFkIX1dbq1FUrTvvMpaFN2QvSFTucrG1s3RAXtS3pjkKVj/hZ1y82gQKGq4nc91TBNrcoKiOLfplNt/DsxZHvLR4k0D0U/L3n3u947x9bpRC0MgQNKOGY6e8jRTpcnxRZWXQKLRw1WapCeDJnZFpU0XmadfGDZt1T4Hdv3uPDizveGR9xLwr3u5Efn95hMXQpHDTjv9qd6L2SuoPL3DwLLG96+nttZfl3j2z8wmfLFdf9kSCZWBzTEkiuKEoSFXau7uiSlagc9nqwvL7d8DjvcHtP3madbOmyogSzY/w0sPmsEA6qVjxdKzemVIE3g55lWiBn/AT/+e232mcGkEUIB+1dd/uEZD3s47YYvFtUc+O0IPOyWsMkjQi7yxOg6MJhPyAH3yDmCrOGE22BX2z134srKnAYIA2rYaakrDotsQoP6r15sTnwMA9897N32G1mupDYbicennUsD6rOGp8vfPv5LRnhI585fXyl/mLBaVvwyYFQENNVcVHzNTfbRvaFX//jP+Rr21v+hcvv8Uvd52zdwiF3nErHr26/xf+1/1N8d/qA4oOauz4KwxuvNhqdavskACfki9QmCqtAZO69Ik3Nm0mraQrqiSeiyWMVAO0suaxeXR5VLl6Uc3Y3dfz6V7/Hn3nxEVMOxOxZiuOUOu6mDYepw20j5aZnehzotnPbk8scKNE1CNufDFHb1RaaRiJ/uVB2kbgfzUE9r/5kJy233Um5dHXKS7W2Cr4TvnJ1x299bcfDrmf74SPvbo/c7jccPrlgeOUJezRox5WXkcaAS3mdjrEgPParMGUYIolOlYw73ZOzcWxaQrKg5OCHRSvhkxUk3kNWVHHzJrPvHfMVnJ47JKmbwfRc0Ww3wcVHhbDP2iKvCZjT3krOmkxrhS5shpn3xkd2fmLw+nnn5JnmYBpIui+mtxve/OCC8bWw7KBPymMLe+Ht45bw4OnvS5sk9I+ejw7X/B35NXIRfvX6c779p96w8TPPuwO/NHzGy/CIp/B70wf8o93X+c7te82L8/n2yL67VuK96db5SXlmvktGgtf/Fw4Jv19wp7nxiNycIPb83ufv8p38Hh++uOMbL2749OGSqc+UAfzrDrfX5HQvOz4OkQ8uH5qZ8Zw9QxfxIbdiEkDGxIcv7vg8KOoe3ujeXrZivqiaTI5vC5tXepgvO3WgwBfiptDda+KjGkMWWxcr/JYIfUce9FANR+1yVAS6aYRZu6eIIeImSlliRJzTYnIpGhPttJfYIwlSFq4vTtwB8UEnuJWLJcRdIVzPbLcTz7dHPru7ZPFnrWVDbMXsYqpTg5wSEtzZBJlN7IoirxR1O4iDMJRiQyhn4rst7lV6hJLv3aTxSDsJOiw1X2tbcXyjxbifdB/64JAptkm+cw86l9R9g4osQdMTa96x6D13m0j2nnxyahVV27O+MC2B481GtZzCGn/EBElzr4MCFBgKajWTkhY7lnCNNiYcjzrEU23O2iRuUDCufs6moVW5bLkgx+kp4vqHXL/wBOqbL2/4/cud6vzr9gAAQYFJREFU+hsVnZSQRXCTLo5XDxd8++UbXlzvefvOVsnJfcb1iWXbMdypUKOLOm0mRQXM/H7B7QIhJLbjzPRmc2asurYE/GFGDtOTg7qZ7uL5/NUVn330nO5i1n59FtxeM98qTQ8whsgpBh6PA/MckKNvli+g5r//8ff/lJqvSsEHbSfMp44hSSPOdfvCycwVc6/eaworA5NHJpVv0GnE0IiW2iordI/md7YxxCBWyJan2bN5In337TvcvLokbFQyv78TxreF4S7R38z6IcQblK4LVZakyeYSKfNi6FCkvy3cfHqJOyghfnhQXlY46ut1j4pK9Jdeq4wu8872wOPStxtZqxdXffvmRd/DUKI6MROz493NIz+cX3L32VaJpq4Q7jz9HXrAzI7Hpec4d0ynju2sa8MdF/38oK8foyXNkW5vvCVrx+UoTIvn3fGR592Bj5dnfH96l5tly9tlx8YvvJ23vD1saH3zjVoPhEfBe235jkNkKp12iC4nFgbAN+Iu0GDx4mlJZ1uLMTekTwCWoMmUc+aNpYmOnwphysw3I997fEnvEh89XLd7EPcdctQpUfnwRB4yzI7UKZKUolOy8cHRHUwU1hLebI9JkpCSMAwLY79wNwyK1M0ZlyoioYdrKcWmyBxu1rUQDhro3x0f+Y1f/h1u5i1/4vJTtm7me8d3+C93X+Hjq2fkHw+KFFhxCHaQTAmHJqFVXkGkcH8auH/Ykh4DYRbioByvw9S3EXtnVbEUb8lcIg2e0gWNASnhJkf3CP1dYv++Y/pjE6cPtZ8iY+Ly2QEnhf1h4MHtcEudAuo1mShFLSSM86VG1YVUhN+7f5fj0jUfy+NhIO0D4d5ropmhf+UZboXusdqo0GLJad8zPArd3logudA9eD66u+YHr18w7Xu+9bXX/IvvfZdfGj6jl0TC8XvTB3wyP+N22XC3jMTsOM7aly9FD5Q68eungpsymPGxM3QxHM1M9jC1YlMFHSMyC/MpwJuBHxw6fuUbn9GHhN8k3n95x+ebK+bbXjlqB8fNR9fcDJdaDBRBNpEwROIhMBzE/AM1bi1Jk6tjl9Ue6gJrxSjCKYvgZ2d8oELcQnm2ILcd/mTyEB7StfIjWyysRPklKnKzKXCv+79O41aT83pmuMTa+koJjictUIMdncuyJtHGU7p5dYnrE+JL+zyVF1iCrpOhi8SsnDhXJSXCyj90U8SdIiU4ku/xt4+4eaS/6vCHiBwVBNC2dln3S1Hkyk1W7MYEy0JJWXuI5rVJ1qGiKrmxXAjztSY4cauTkMvkiaNjuEuNV0XOmkDZvcy9NN9Nt6wOHiVUXm3WGDxnXArNNst1idzrAI0vUBI6LVp9GqOii252KlZtfnn1l18sLsz2HbN++eAy+1PPMofWHtQbc/br7FkDlFjPIPN+nBIcjpSlKiX/4Zf7uf/iy+vL68vry+vL68vry+vL68vryfULR6C+dfmG33v3PeadMlXFWifeJl4OH1/wQ5f55vMbbr695f6dgf5q4lvvvOV3//kPCK86tp85hrem55BUDj8fAmkUvFfNmhsz6My9kuvOnZ4bqe6MbS8pI95RoqP/LMCnQfvCUegmrQjnaxsPX6S1COapI5100kT9uPRX/0lH/mHP5qCokvqlKfewv4P+Ue1HcmcSBlmQoTzRmxpeKGnrcRzxjx5n5qbZODPLTnvlOSgPbHgruFzJuSZCab9kmsHB/jgw/rAH6ZmfZzYPMNxn+rtIuD2SxtAgTeVGlIaUAA19AhhvClffCWw/Vb6ITiFWU05tNckclZ/mPRJUh+banXjb79p4+ROj12yVUlFEEK+w8qv9BePVwofv3fJReo48BsKjZ3wrjG8z4Vjwj47P3ppMxKNX0ulsBsKg3KGYFIWy9+ofM/HW2odZ4eHDXc/f/fG3idEzP/QqSnk04qeZ1+JLs6Fwm0gKhbTvVAuqLwzdwjwFhZE3VsE20mL9XciboK+XYdmihPrOK4nRnZHfc9E2pJH7/aR2RmFSFLd/7fnO8RsMbx2bzwtjhItTaRIGy1b4/LJjfHHi9GajKEQRujFShkSUDo764OPGiPN9sZYapEU4Pg7MXad8j1QUfVr0/jaLnC4oArXXcfz+oQr0Ca9OF/zZZz9hcJGbZcujDFz4iX/h3R/ww90L/kH8FsNPOkWdbeTYTQn3qGu39IFy1bUqPmVny6QQrxMyKnH8NKvgrKA+c+eEeH0OAl2gSNLWyKJE2nBMuNjRbRaGZwsxekoRvMuk7OiHyOG9zPYzIZyMpFtNiZ2iXaB7JhwLx+j59P6S/f1oUF6h1Am4OlG3gEtC3MD0THlIm0+VVJx7KMmZX5hVx7kQ9ipomh87+reeHxzf59XDBS92B/Zzx+3tjvJ2IByUk5evYkNETpOiot7QeT9pDNX2sZBOwdrCihr8NO6MPm+hHyPLMjL8aOD33HtsLialFoTI19694f5SJ9GW5DmdOpZ91RBTZD8n5dX5I3RH1e4ps+fV3QXz7YA7+FVbaBb8UafiqixARerTCOPFxHFxcKNDGYgQN4XxrXGsOn8mnqwTjnI9My/Dk4m/ik6kAeZLJUtDoHvscQ89nE6UlJFlAf9FkUrVkQuvO1zsWJ5ldq/0b9NY47oj+p7X8RLxhTx5fEWljd+l+x1FlzpPHkxy4zjR7SMya9sK78iDom2ls1aj3t52plGKok8xQkqNT+lEWC51DQ5vFXVORdHm/k6FRXU6Tgna/qjoGn1HGr3uaeMvLTvln2qr3Lo+Ew3lkaSoZUWW8iFQOhPHFRpC55y2wf0ukgehGyIisJwC8eSRRXUgamzQfS1qEB1UNiRlnd4rx9qXUzQsm3GwmN5k7RRlv35O0H+rS72smnw/4/qFJ1BzDmwvJ2BowVk9s7S3LZ94HvIVr4aZ64sj/mrPu9s9/8q7v8nrdy/5nccP+M9+9A2OP9hpr/soxHtNLE7PHMFrsNu8PHJ0G5BC3Aa6RxunHwJyCtoe6WQdywwO5/7f7b1rrKVZWt/3e9Z6r/tyzqlTp659HXpmYJgJMA4mONi5AEnGjgVOIitYsoMSS5EinODIUgLxhyjfIiWykii2I8vGdhQEQhjLyJbNPZkPIdiAB+baPT0909V1r3Pdt/e+Vj48a+9zqrqq6QLmVEGvn1Td+77Xede71/us5/J/PHtXjrlbXyQ9tpoL0rCp8NL+XnpBWtQ526OK3Z0FVZuyYES7zFRLKFVF8CFTg8J0p+Xt6xJPzTsIlUwC2uRSNtoULvNcmGpTWD+pNHfKa+n+KG8R4HhVqhjdIkX65FQcLoOhtCQzLTclCVVgFrYnFYdbI7Jj7SlogriY6dymx92Qq67NJm6dWshS7SUnoj/GNNVEzLWezeA3CXkuVF4MZYrkSVDK1fyUbrBMs5qdyYr9bLJZMLw1auzlmR6OfkCs0Qud8VRtypfuX+HDl/aZvNJwfzHhZDaipsBWhjTzDGNHlgxQhN6fpQ1aJyY0Dw5xsjRB8hxnLbZ2pAvZuMCHTBsiu7vb5AsYVWykC1QWALqJ5sloKNXTdJZyq6Z9bWB4p9iE6PomQRYJbpGozpE9vWi61CB5QjdJ6Lf04l8vcvq7FtslKrUV8h28Nfgi1eORpfgiDYsRp/PXCuVcmNx0ZEutBjKtHlzbOvJjmN/MSa8saMcdSTbQjhO2J5WGVXcz7k+3aFcJkjpM4vGd0WaenVr0Zj/DrDQvQpyKp2o4p9cDbtfHWTWS0qUnXQ6YzuES4bPvXOezN67jFmqE+dyRTDou784YpR1p2WHrTMOqdUj2bnvNM0kT/OBxuSCjHgm/g8QOIdLrN8ZO0yb4oMPUVCl2EeRIQhm+hF6IeK/nwrDOx3Rkc8/yq2OaAWwtJA0MHXpBG0E6Vt2nYr8lOVohqxqfWJARo7ylWScjCwyDYVzUmG2P89o7Ttcn4bgc0zU5yVJoX6t4+eohH9+5y3LI+PSvfhxvDN2uHhPxaniY0PRWc/YEM+3oa8PoZoJ5c5tZv41pPRcrPX5JM9CNDMtrGaurnmG7x5c9bpUwmZ+e15uu9KmDIEK51tnxRtdIP9hTQyHRi+uV7Tk3ryf4twrytwpWrxiS/ZS3mquYSUc5ahnlLa9s72PEc2uxjRFPahzNYKnblONVEnI/w0Lg9IKZ39Xa/nWFloZYVXJgCM3Wu5EhqbVCVrywc3nO8bCFCXk4tlFDfCjCOmgMGIPPU/rpwM60osp62t2Upran/dOGs70hhSE3pLMUm2dIWeqmclRqCooxZwwXTUofxo7kvsUu1jlLQGhMrdWxBudSbXxeDLppXlerhtw/rRY3dFsZ3TQhPZ5qLusa5/FFQjfSa4XP3UZGZt3InVZlScQavNeqXwEQA2VBN/XwYsXifsHolqE4CAVZja5VQyZkS8eQG2RkMV2G9FqhhxHsyoTGztDtDCRbLc0lQ71IyPdDwncjJLnRfM+wgTSVQRZ2E6q2QSJk6A1iPHmh6RZZMrA7XpGIoxkSjlYls3RCukxJl4ahTDSE3gl9aUhCXqRJ/MYYZG0EZqJpsMGnIIPKmmzCj71eg10i5EcZ9rjUZvBnJdQfw7kaUB746uwifW+C5hCABONELywI5PuWe1s7+F6QzLH9Ys1hPwHgj+98md1syc/Jx2hujNUC7fUADrmQ2IF5lZOnPU3Z4zqDKx1DeN6VCdLlWm0Ep8raok0OX9k6YvThjgeLMctFgast9iTZCHkNR1r50HeWRehwPko7hsHQ7aehBBWqlzuuvnjI4AwHRxOVZggHIakT+pOgX5WK9g0uOrzJNuJzLhVmq0K1MLxgs4G86MizbhPGrVYZ7OfkMz0G7QUfdF/0R++zREs9e7cp0b88XtB/1OgutbH0d1NdiEqLmRb0hdXk7kzlEFyihidFqsZmYtVYy1LqXaHZ8+Qnasgi+qMbgj6ST4QhDRWXTmUimiGhWk0ZnGoZrbWbfGZU82R9EQ7Wv16cYJS3PLi1wxvuEtcuzLg0XlJmHXecUDc5QynITsvFrSUAR9mIbpqpOnxukVbVeKXI1cuVnc6H7R6JlXvIjjWvyvScdmsPOSgEafR1pVhfpvRlx6XdGQ8A9yCnG7Rlh1jVGHKZ18rG4EXsR9pOohsZzKRla1ox387oSoOtLCYx2OTUbTqMMtKi0AuAtWuHhhrg6I5avVIeE6rgbOM2Cal22TG+mXHw2ohs1JJlPW2R0Q2WWZuTGseF7SUntsQNBu8FkzqG2qowYjDOTX9GpVzY5G5hDWvhVNOHBqKdGuamHRh2M/xxwugdS37kw8Jr6cuUB+NSS+5Tz2gBxaEjPxlI5i2m6h5K5nRWNEF/0IKMxDrGWUsbLvDtYPXnbIIzoVLvsHrq/EZ1fK2N4xN9n+6UHabzTL8q5MfrdiFBk6rXC/HyqiVdDZpLWTXQhaRigXHWcpBpc9tmR8/rIum5OplrvkvoM1J1KSfGb/LMPvbSXb7vym9xOZnxtXaPT1/6MN2iUMmPvN9czLTIQS9uSTbwwt4xi52c/WKH0Y2EZOW1yXpQXTadesKymafdEoaJ7uqb3kCowPN5uGCLgdxBqJYacoOtDXiL+AxJDPSazO9G2cZL/cqVA24mO7S3tcp4ndPFvqUa5axyz8leye7WEiMeKx5rnOpUpaLyCuvflQCZo5w0rK4kkDowHrGefpXQ7phNQ/VkBe3EMGSefjpAZ0lHNfluRXNYksw0J1HXJINLDSZN1MjJU8icyjoYz3Srwu6o3MQQdLJWxyXeJNhaq65dGjZzk5FuuItUPdtnVcqdtsgZXV+wzEbgYcmpgQ7qiRq2e+ykoyg6UjtwvEpwqd3oug2FwZXaAqbdTlSzapJhrZbZ29QivVadu9yr4SsenwYtvrUOoLWQJmo4pWG9CxIxflQwjB2vXDpivxyzlC2VJgiGszhIZ5DUwlDqdcr0Hlv1WtwxGFyugtMu8ZDp+rFd1CzajLvZBZJVFnLtgucwQXUeBezKwHCqOehyzYHyXmgbHWu1zKmalK1xTRoKtqQYwnobDMWwRg4pWKsbldG4ZgUMywTQRuESIh0S6nKGVLDduliFzbk7lEJ+kpEcleohc047AT+B8/VACTyYj2lOio33yVaQWJUYUFn5cLLNE8r7FlvBG+4qd2ZbVFXGv/3aG+ykFaOiZeHGG1l2GTS8liUDVZNxcjSGhbZ78SZUKBTCkFtMbvFOBZfOhvBclXN7sc1OUTG90LCcZjgv3Li/S98bcEK9yhlKz7homc1KlqscAfo6Cfo40G7Dq6/e5zv23uawHfNWfpH9xZgsGWh6S3WyQ7IIYSPvcb2wu1Px4FJBXxvsSr0/7eGI5CChOAjJqQZWFhaZhgSLuZb4DgVUlzxcr3GdoWlzmm2DrbKNq1V7OUEzJFwYVUzyluOqoJteoK2EpNGdoE+CmN12Rz/O6UtLsgyVdQ4kSA2skxC77YHmgp6ozkI/0uNdnCQbg0ONDhULdV7Yn49Pk/ysVso4G3ZNnPEQBDVY0wtXJ3NmOwXt3RE3bo1xhYPUafJ+rRd1t0qYVQXDYKiXGWXHqY7NWssmTViXr8swkNQDG0ViQsJ0qSWva9G2TdL3umxXwvkWjH5nhVVSsu+FremKuXhSO5BtaaWXd0JqHe1isulA3o0M6bq/VG82yszrMuZNu6N0LRRn8KNic87YRt3qptV+YMNWjz9MVSNn3Woi/G8QUUG8xpO+k+E+crqTXcxK5qupWmODFnPYWsBCv9NrdawHrMdeX9FdNfQ3S8oHhqyw2GbQ3f16I+LVUEkq1U8yjfZE7AtRgcQFZPMgLDo/LcxwmVa8pXNPNndkxx120SB1q4mr9kwVUqggnc1HDLVlP9HymnJahwVYPVBaB+1wuVYnuVS0HDxbezb1AOlFWc+5tfFvO/9Q2B8gWTlG98A0Hp+GRPQgSuidVrj5iy3VrNBQuxPuHm7xIJmosN+gOlB9Z+Eow9ZCP/Us2pxfPvwm7i63wnmmhionKZXxlJu2OqdVjl2dsAhijTLu6bZt0D7TRPQhFRibsPaFbgqtwVrH9MKKai/beMebbauGuPGko45uK6VdGWy7Dm17jDOb+VUFaLi5v8PezoLXLu9zsl1oRaWeoMgywa6MVk8djzmQsXpKNAqDGw+Ystf+ko6NkjXAizsnbF+5u6lcbocEh7aM6b3hoBpx92sX8TbRBPBJS5Z37O9P8Z3RopYGhlIT+ftC1FsxyZEm0e9yQpYMHB9OqEPLEB9ivMb6zVg2oZ5ccGWqukNWk8Z9iqZHnE1Qb4XtUUV5vcN5aC9rL8SmSXGDIUkG9iYV07yh6RM1+MueIQ8pI43fVJjD6XFZi5hqsrvfVKV6AcoBOrPR4luHLH2RqsHUn6YwqAbdug+efubl6YLVRxtmLxbkWcel8ZJVl/HOVy/R3FI5iewE0pU99Uxaj8us9szrBVlaThalepKNI5u2uDRjKLQ9TFL5jQfKbrf4qTDUln5P1xZ6w27Z0HQpy1UKvcEsLEOTc5CN8Zkm4Et7Gnp+uM2VeqH73uq1BbTQyAX1cwGfq/E2eJWUMZvKvlCF2WtlYTcWXJEiTb/ZyD+JczegqmVOcpSEsJGW4IvXaonqktC9VuFWiZYwOkt+4qnvZ8znCeVty8/PP4GddLiDnPJEDS9bqzaL9IbcDuxNl7xzXGh+UK6LrctPe9UBejKtpe67HmlUR+bmm5d5J3WkWw1F0ZEYp+XJncHkA0PIV5kWDas6oz0sEHcaghwK6LYcyzbjF29+lMWqoA+y/VIMJFmQTVgL9DnNc8jswPTqnNQO9IPFeSED6nHGYic7VUruQ7+o0tFd1FycfKthd6wXj5N5ST/KtC1EaTcW+voke/vBBbp5TjppyfJetXpCKMi0AybEvk02nGpoGMF4r9IPbaeqvIOKBuYXK5ZlxsJDkg8UZcvypMTbHNNBfuzJgkK6tU51qpKBtkn1IhF+VOK1LFeqVkvM1+E263FWy9I/fHmfL3VXGU5URFMGS7LUyj9bedLDhIUfayPJVluwbFofPKqLEuQBbKWx+dOeSBY37Wl2040qrTe6GK/PnfWFNZ2rQeAtZMcGtyo5fsGyfWHJ7qhip6y1V5h4jquC2TDZKHirflmouFskrBYTihMhWw6kqx7pnBoQTQ/WYLpE/warnoDiAEYHA8lqUONo2tNNE5bX1JAdcg29rtV43ciR7eui0TQJ+aSmHrUURcfSqKfVrNSt7hIYxgOkTg2R2iCp4+L2ku285vXldV3wg6gfidHwYpBdSOqBdGWwlXqfpNfwAqLSFUMmGxFH3QmqEnQ2dyGELad9wNYE/SHbOpLEMS5ajmcadvHOYkfa4qjv7KaPVlr0lFsts97QTVM9Tiuru/gy3Zxv68pRGRzNTlB2PpLNRWvIDKQaSpWgkO5Si00T3UwE4cP9xZjpVsV8LyU50ZOkq1L6hdVKKXuaCydGOyn0peftNy9z5+ga2bG2I7KpitGa1tJ0ubaNqZ1qWNUDNoSGD2a7JEshIeysC2iDtk0/hm6ybkW19kzDalZw4eKC/lKL9BnZTA0MbRwrZLlWqG2W7FAVqjIAWhJvRfCmQO4UPLhX8OBSw0uXj9gZVxjjeWH7hEWbc1IVmwvasEhUUHYtOtkanE9IFtpTU41GD42ldZbMDBvj6bgpEfFMs4attGYrrVlczWkPtjXMljhe2zvgC/U1hsrgRo527LGTnuEkpbtr6UurTc17p1VWg7BbrphNCupFjqtUzgJgSFUSwRWOobP0BTRTSzbNyFZtUBwPFZNraYDBQare8nsH25SjhkmhBkFqB5Kyoe2TINhouHOyRbXKsNbhe7PxJGezta5aHzzM6h6xc92MJUvVXGNwKm5sweYDPnEMtdGc2jQIXYZuFOtq09Omxx7pNCz89p2LFKOWly4cs52rxMxWVrOTVxxfLZinY9L7mtu58YI6v6maNIMnqQR/aOmbETf2SxgAA8Vi3RTbYesBrULW9bCYNKpZZR15MrCsM4q0xwi0pcUNFrvVqNhqa8N1wkOTauiv09+dSwzGDaFJuyaw1bNUIxtBBmHdVHgtFaEeQdmouksniFk7Yjjd+Hf9Rq7nSZyzARXK+cNFEwnJW0GLp5t6vuu1r1APKV85usjJwUXMILjEaaJnDcXtFEhJFpqkqg0gNX9BPDSD5WK54sLlOVWjJ2jbJAzz0cNKr6Ekc61KLd2AZAPZnQRbWVya0qSeRjRmmjVCu+M2eVHLNuXahRn76UDXWdplRuNSkpWqPR99bo/8SCgqNoqs3Tiln3iyY6E41sTttYFz/3iCMZ6kcIzyFiuecdrCFrSXVAQPNCHTGkeZBN2d8Pi9xZT9O9vYmSVdaFsBFUEL+Ta97lC6k5zxWykuTVldGRivoDhxZLOOJPxIbZ3QzdNTzREXjJu2g7rZlHc2u44/9Q1fYiupseJIZaDzlv/n3kd45/gqthLW/ats5/Ee5nXOKOtwXlhl+cYokd5rcmTTqoF2Bp/A2we7vLh7zCtXDqgvqqepqVPq4xyXW9KZ0G31pFstzguuN3QHBUNuVHMpMZrofKYh8kZPZfDaNSX8gLJpi92pcaKexmnecLHQ0KDzwkE95sF8wvLBCKktPncqaDo3JHczjnvBGIdzhrY/LTZIVkJ24snmnmymhkZfWEydkKzWmwFNzDaVam+t286YOvyYjSBtR3HgyI57TDdgesPFnQXVNzcMzjDKW8ZZyzRrKGzHdlqzmy359N3XuPfWHr62DE4oi47L0wV2a8aizTleldpk2KgHbVVntJ3mk/heeHC4xVE6QlZagm+rQXVYWtXXWbfJMa2WLifVwLofl0sFP+2o91S5uN3xKqrnVO9Ij43q9/S5YJsU2xRIapHhdMeNU92qfLLi4s6CdmKxxjPJG+o+4eBkvEkwFeNJ7KA77nUCjzndza/JTjzpQg299oKGJJKVLlD9SIIujvY0yw8s219Vj544j1mpSGuSD8yPRyR5j080vJKlA9uXZ5wEz2GWaZI7QFVldIm2Osn2E4p99dppg3S9kA65kM5MkIJwwes5qBHeaOud/Fi99s0FT7c74POBdNwxnVTsjiqaPuHe8ZTuJEcqizlJOXLTTY+xtcyL6YHesJoVlL1qfEnvT/Pc1rvxrg+5RJoTWuwLw4OSG4cZO68es1rmnBQFo1TPrdQMFLbHITxYjWm6JJTvJ6o/dpIEMViVmEiOc96+tcfX2stq0DgN90iv5fouV++4GE9e6+9pWKRkpufD1+5zs9wB2DRNPl4kISTlT5vPBsN80eZ8+NI+8+2cuk82GlWDE5wzzJcFfZpi+oQhC9eNtTFiwqYhtEgR5/GDelaTrxZUecHiYoc9TKjDGqYaU7C0nmRpIPN0W/2pJhiaTqDtp1Apk402kZ43m4KYoA9lOqFbJdhy0I31ugjJhmvces76tRsznPtdD8aT3M4Zhpw3rhTqsa4NtjLqlckcONlIr/ggG6Sb7bBOhfQba0GcgbmGV73R1mK21s2JrVVqwFSG7iinW6WI1TzLudo92mbK6AbJGc9LF48pk46qDyG9LuVuvkU3L+nGQjYL15fwex8GQ5m3LIsBliqZoH8zrBPY19IUm4bpXvek6xZsttUcsE1D4d+PVi6/n3zy5Xf4jLxI+8YIXIiv5iog2Y8dHx3f5+Vsn69Mr/D/jr6B+/MJ28axqjPm6ShUYwS3ofPvCrOcLEseHE3ZmlTqOq1ThtaShoO1Vjdeh0Y0p8RAYrCpY8ggPwRZEgS4QtjGQLIwGn4wcPjODsu9ir2Qc3Oclqwqi8wtxQPVdFm7Yr2Bdrw2FHT33Y61IaYMUG5V1HfHZIeGlReWoT9S8qEFXau7lrzoKLOOLOmpmpxZnbOqc+pFhsxSin3DzlEwEE2w0L0Hf0YxVvQ/qtehYRPTBKt7UAPGtAnSQ34vCYafhsBcmariqzVIr7uXYex4tTggNx0rl3G/3eKwHTNvsuCRO+11BKg0/7JgOS/UkE41XwTUSPOJ0STydUOjdagw9fT3R3ztqxN4seLaxRMuTxc0ZcK+dbTDCLwhvdBw+cKcwQt1mzLfyXXhSzRx9KF8nbAbc4nmR8haAR24tL3g2y/dAKAPeSu56RgwpDLw4uiY/fGEm5MdmsFyoai4t5hwdOMC+X2LuZ1y2O+QHdhNnzbZ9qGnonoTknrQ8Fauu6uhgKFWXRVvNYHUF6kaD6Evl81SzW0QTe5ce3e8wE5R8Q3bBziE0nZkpicVh0OohpR5X/DvXv8SvyTfyK23L3JyMkIM3Ootk6ByX2YdTag8WywL+lWCWai+mZslpDcysnlIq+g0dKjnt+Zb+FTzK1xmQmNpASvBEwaffO0G2UcH2sFyIas2YwNY9RlfuHWV9EsjkhrViClGJCunu+4ueLIKQ3+ScWu1SzZtmYxqsqRn2Waq/9QmeuHtDG1nOLpfkp0YzQ3rdGG0Z/u6pQnp0msoV4Tm0sCHvvEOwEa5+0oxp7QdW0nFL9/5KIfFZdI5NFsl+TwnWQ5c2D7hwc0d/N0RRdhkVYucrVHNCxdPNuGh3mlLl7rRlj3lA/VwdmOhugT9xFM8CMr605AAa4GghyXOn1G11orJdsfTX+qY7K6YFA25HXBe6J0hT3r2thc8cEI35JilIb2nHgVbaUFLEsJG0grZvZx0waYnnFYxmU0ujbYw0YTz/npHVxckSxjdshzlW5Q3U058yf5EKyNJPMWOFo30g9XcR3cmNhpazUhojGs6MDczJjdON55eCA2u1ajtpgXNjvZsS1cOs7D8xpdf5eq1I6xxVFXGclViD1OKpWwqUVmHwq2Gzd65eZHDCxUv7hxzIdeCnXpImTUFszqoqYfKOBOKSHyRwhAKc3q3Wau893quZ7pBFgd1n1Ls63Fe9zFECKkO0G0NJKOevrGaGC/Qd9BNQnpJ8DD50Htu3cpomOSacpJpWkZyGCrjWtmEHL2IGkRpEhwG+jtbe+R9kWMnHcPCMrpjyA8yNXRCgVOfr9dF9c6vPWTryNE6t1Hbl6nw8lCur3eqvu4StKCn0HUAwvpXWUynRU/ZTOe2ncJSRsGw1x56by4zRls1edqThPwoa11oUSWnFXNB7dw5YVnljLZqKpPjaz15vPFaOdyBRzQv3J/mpXnYdAVZRyNcmSBDcdoC6AmcrwHlhHpIKcoWLyNMiO2u1UyTSvgntz7Oh7YOuVac8C07txh2DL2zHLYjbu9sc/twi/a4wHQa/07q08oF0J2+ebtgbsuwwxKSQQ2GpHFBhmDQMtBNiwzd5btBkJcrZlsZptZcEPFaQbfu5u5Wqoibzizm9pR7k4mGwRyUx0JxoJ9Z7wnV1QE/UXcxIU+D1iB3E23+mAgkMCkbqrLAdKrGvG6yWK+mjGaaMyJDCR5a9MfsU5g42GnUoBsyT7OjFREahgljn/fYqlPvjh+TXaiZf7TArFR5OjuxGyl9CItEot6YtTEGuoi6XI0oH15X3kr4G7/1b+IOMuzKqPct7FYufs2RVgO21p1lP7LcXxS4RUp+Xy+oWSNkM092Mmi4qmo3AqenTcx8qJhwlPcE7o04kpEev0wdmdMTVW6e+xF3JmUoCRbGt4XxnZbs/kI9JP1wWpoa8jlsmW2qaPBaYXXrxkXuHW5pUmlltX1JZTCDLn7DVk+201DknfYWyxo+vneXe6MVb25dQu7nutvpT+Uz1o2th5TQlVzDIqZ1uNwj2w2dK+hzQ5oaxFmtAql7pHeYukNWdaiCHMiOJ+qlcnpOv/72VWgNZp0TFnZZptHFYSgg//ZDXtk54vDiiGp/hF0aur5g1kxPQytBSLBs9OK8VokeCi0fX/c9dKGdg75HQ3fSaWjUVj2msOoVbtWTli0891ZTMjswOMNhPaZzBiue3WJFYTt2t5fMEm3No2KGnqTqsYt2s/P2VhWP84MEl2Us7Zgl+rf2pccmanQQGuKu+xbaOghDNuqFlGW9yQnJ5gN21gbFfqj7hFHaYcWx7DJery8D2iVhcGYjgJtWXkV8W0fVpkg5YO5r4rGtILmVc3jz8qnS9RA60Pe6LpXH+ls5/ld6plfnfOTCEU2f8Mab12i3E9prLVJbynsaipdBW8eYzuNKT3O5pgs97qwXVoucxdEIWVrSY/09DqWn21LRWTMIw8ThGkM617XTDCr1oRIJRvOXhjNe4dDRQNahjFBwI53h5Q894N5oymKeIwuLnSV4gaTW8La5mSC9x7YTOjPRvBPAOk83FfwY0iWU+z3ZQehQ4LSgYPmibOZ7LQGz9oaYwZMs1hXLunaN3sw4unlZK6ArIa/ZXMTXPenWeZxYVOTyXop/M+Oroy3t4AAbbwYOslY2c1kc9yQnzalXOOQsbrrXwsZrsXrRYWrB5Z7lC+vejrIJowJ0ez3jPTV4793d0U1v6C0pm3/B6A8bSROajEvbh7ZJ+t2mRQtP+hAyWzqSZaehvvW6F9a3TaN278mynvy1Y44nU9L9RHMfISi+o5GSQ21/lNTBaz5vsbXFLlukarHtWI34yz3Fbs32uKIbDIcPtvBfyUhqsLUhLUKOp9Xjrx0X2PSWdSkwSPASCYKQ30qQIadFr3tDpn9ycQSje478oMMuguvOa2GXP8hpc7e53sogGlIMazDWM4T0AcnZVLyD5h2aFuqFUN5PSI6rd0VDHsW857ORSCQSiUQikXdxrh4o08Pnb1wjK3rSjVtWrUHTerIj4f4XL3Ev3yPdqxiXDZO8ZZS21H3KrM5VW6c5tfs2LsWgXZIXLavdlPTQhp5jQXQth2RlyOYuNGfU3ZYKNwYP1DJldGkJV7pNxYwXT1WnG4dIs5uqTMCVFrmZkZ2c6r5o/zCh3vX0r1Zcv3TCTlHRO8NhNaIfDHWb0s6nqj+RaILn4ITrLxxyN9+maix0JsTBhX5k6LbUDZxUepx6Iyqglqinqdv2dNuDVjcMglumeJsgQ0pxbMgMmNCIclxqE+S2twyDoaomqqO0zNTFbQ39xMGkp5KM/MRim5Sk6jHO4zurbQKcY+dNx2pWUhy6zW5+3RJhdK/FtAN21emueafEzTJIdUeMV3fuOs/qUcE+H1qvSKKVH1vX5syYkh1Y8iMhOzl1R6uWiKc4MPShgtElbGLcmwbI3kMf+uyJQbJUvTutJsyL0xY0W19ISapk0z7ADGvNHP2+ZiuhupxSbal43PHuhP5lbTXzsZfucmtrG+eFai+4zQWseJpbY9KFoT8x2CoJ+Sxa+izW4xKtAvMGcOvk3dDzzfvTappw3ks3aEVe3TN6Y0J535Mt133NQgLwwKZ897DZ5Qt/JGN3a4XbFRop1DsqFtP4kL+nc6JJ6OCMh0ro9zqycYukA/Uyw3Ql2TLB1hlG1LPgu157TXb6d52dz/x44ObnLqtMRxO8CyFUfasITUgTx3gp5Eee0f2e4v4KM6u0Es+IVr05GHZ6GhL1LgcPk9O2V5rHFnrS9duO/qLuRG2Tks8tttZEVtIkeCPXiaaaIF3ctzyYXwkuffXgJRrd4Wik4bKtu56ttzuKeytk1UBiWc4mXNhdcGw87SIlPbIqqHss5AehWILwdSaEPcZQ7zl2rs14YfuESaI5a1tXFsy8Ci36zOGN3WgI6byCF085ajHGU1cZ7iAjO7JkM809SarTvNB2InRTQzuFejJQvLBgNSloD1JsJXhjKQ5D+469DpcVmN4ACRnhc/qHPcPJzHC0KtmdLmG6pO4Sjg4nNFegcQKLBLs0Gw08zVcN633IizStHltba0sOL8KQe9KXl+xOlxu5FhuqWtfMm5z9/Sn9zZzxTWHYbVmNLNm+hsx94ml2Qi9HExL+G0tSZeqxENGquxcr+qYkm51pFSKhki0k0jsLFNCVGlY/20uTRJuBS+iTR6bXpfGrJzin5fPTsiGzA51TaZC1nt/UDrh1e50QBktWGt5P5x1m1Wlu3VIv0dpOq0PydJPM721ON/EMUw2lSaeSAfmJIZtZpA1pKuu8rXXfvvCb7NqEVy8esjdZsn9lTO8MWdKzU9YMznD7YJvFSY6dW8p7JrRNS0PagApYah4p4AXnhHHWbvJzZ/sX8VZIS00X6Ueilb2jXqtwvRYWreULJllP31uGXo9VlWckCxt0E2XTPN0nmqKiFYWoV1FUNLfNHcnMampG6fASKu29rjfeqjfTjcLJlDgkc9qb1Ql9ZUmWKUOp+oFmOLOOPYbzNaA6KL5YUr0wMG0gWzgtu4aN6rWtjXaFTif4YcJJCg+29f22hdKEKqZGxeLSZei7dtKQLVP6ZOBD3/QOR3VJN1iKpCe1Aw8WY5rqApM762RUt+ntJM7jHRS3E+p6ipv2JKV2rDfGUxSqSiziqVPHcJxx/eoR/WXD4WykP5ZBcI2F3mC3Wna3VjR9wp35lH6wLJeFLu4GWDcTDo1WT2ZjPvTyO7y2vU8eEhxy0zPrc3pnN9UordMcgjLpMOLovaWwHeOkJTM9R+2IG7MLzMqC9nhKNwZvLS4R0rnBdnB8NCbJe/K8J7E9s+0BTLJZ4Ndls9OdFcvU0d4pKfe19lO64Mp3DuqGyY0KKEmXbhOrV+FHwYXFxDQ90oGpO7LDku7lhu0rM0Q8R4cT+pv5Jny4+XFbNW4ASDXBeruseeljxyzanFWXar+jzjL0FlclyMpi9irSrFc1+rTjcH9KNsuw9QQ7a7SKEE77WCU25Ac4FQntHdlRy/Sm2SS2rlXDgU0icz5zpFXIacmFdjvjxv4LfO16zUeu3+fjl+5ixJGK2zT0nbUFrx+WKhPQaFhTq2k8xT0VBE0qIa0c6azFnlSnFSDWqsGfJloJ1g+q05OqQrGpOkZ3g/ZSryGZDT4kR1aOi5/1HPgJD77F8vLVQ8pLByTGbTSKTupCF5HBktmBVZ3THBWkhwkX9ua8unPIdlZzr5ry5s1XNoaeymLJZozr0IJPNHdDqpb8sGH7y2PyE612U+2h02G61NLsaD5SttCiBjOrVGvJe0hS/VwPxXbD7ouH2luuyRg8lHmHcYblvMAvE8gdOxcXvLRzjPPCl2avMLpryI4NZBrqxBjoeobS4EYZtukZ39Jk7rQ6PYZrbTc8Kg+yzhmqghZUYknuXITdBR9/6Q7OC7dOthHxdINluQxyA0ZVq43Vnn3DIGSJY74s+PzhGHOS4gqHGXfaKy+EutOlJ531anDXvTZPP7TU/VjDIb2QLiQYIzrOIdPNVVJ58pkWLiTbQjdNKK+2XHtlxu2tbeqDEtMnJBUgnpcuH3E72aJ7MMEdn1kr3WlvRtKE0R2h7i4wu9oxubzk8nTB3osrrHEUtqMe0k1Sdu8NTa/J421v6QZL2yaq8v+V7FT4sdd1ZHtccW080wpor2kfazLTcyFfcXm84PPDC+SHGfm45Vs+cpsH1YTD5Ygy65jmDRfyFbO24A3zIsWh0erLpYbuh2XKxz58i9Vexv5iTN3ZjS6UtY5UPMt5gessslTDe3Qvwy4arfxMk9OLd9iAi/dkB5bhktkYTlt5zW6+pLQd1ZDivGG/HnPzeIflwQipDOJVCb84cRQHHcnRSpvAdz1Z7xjGmQq21g2Sr/XrwlppPOlOzfakJk96buUXyY4T8pMgMjl4sLrRIkkeMqD8nYLX3RWu7Z3wkYsPtPLbDCRmwHnD1fGMdrDcXmxzb7qLuIR0kZDNO60ATC2m9xQHHnEJ7WzMWyc5ZtRTjtpNmC6pIVkNmF6rY/OyY2eyYpR2XCyWjBPdLDdDQmIGVn3Gqs/YeqWmHSzzrqDqUgYvzKuC+XEJPsM2KdmhFthIr8bY1VcOOFmWqjW5Pn2HU4eLB7yTkN8miHXYRI24obf0abhudf70WvcenLMBpVn5xV2Vgi/vNyxeKjYqz9KfJh3b1pMuNH5cHAntRBcDH5Su4TQ/RwaPWTaM7qXceXubK5+Y82de/C1WQ07jEox4fpUPcXe4QLoYSI4rZLHaxDe9c4gRbYFxaMBnqmGR61jaoE01ZJ4EyGbC7ekOf/yjb/Ly9SOsOG7XO3z55BK3D7fo65SjL+1S7GuT0nzhGbXqCWt2tBpufG9g8vYSlyeYGyW/Mf8GxpeXvLB9wqVywVZS8Ue37nI9PWJsGgrp2LUrdk2PBY6d4Z1+m8Nhwuv1NX7t8FW++NXrmFmCt57yRCv91gq3tnOMbzm6caHKr4nu0FIP+ZGjvFthbx/gt8ZM37zE3G3DhVZzGqqB9KhCZkt83eC7FressJcv4JKS+oJ6iUyv1ZTeCHbdEBlNCJbBk82Ebp6S7Q1cHi9o+wTIyQ5rzLw63eGuT1prwXvGtzw3v3yZC68c8Seuv8U3lXeY2prBC3e6C3x+cY0b813+6N7bXM5mjEyLxfGLOx/jjc99FHfDkgQZBnpNFvaVGiiSJmCy0x1ekyJXCxVdTWDdzPi0Dcg6/0d3jUOmyvP5geCPS968+xJvXq+5sLUiS3rqLqFuU/pelYlHDwby/Yb09iH0A6bMmdzaIpvpeVHsdyT3jnWxTNNNwrv3XhdVwK8qXHYFVyaYqscsKmwbKt1CsucmiRv1WrpUcLlQPvC4L5S8Yy/wiRfu8Int23y4uMeOXWFwrFzO0uXc6Xb4zeOX+K3mRdhPmM1HfHZR4oGi6MBDunAkJ5V6+EKzWd92yLREfLZpKSRthz1a4WWselA+SBd0hJYc4RxtTnNA1AOnGxzfB7FK7ykOW5r7I0a7x3zy4i120hVTW7NyGV9ZXuK35Rrzfgxoxeq8zSmTjmHssJ3BrjpcZkNunVaWthNDXiYkdxqKY0efq3bNpitAUNb3VlviDLlq4mRlpn+30/y8k2SX4ytjdi9oYcnL28d8685NJlbze1LRC1PnLe/Uu3zx5ApvvX2Z4p2MyYFuCJ21DHmyUU5vt4R0pcdZGq1SzQ9KyvsjigdW8yEl5Mz0IQl7XYrt1+er/j9deqZvG2buIicvVfhBsEujraXmjux+wjujCxjrsRVkJz3pUY09muOXFb5p8G2Lazu2v3aZdGEZ30oYih3ubO1Qf6QmH3VMRzXXJzNenj4gNz3XshNeyfY1p8zlHPYTvlZf5DcOXubm0TWGIuhNDQPFgfDgjT0Omkunnv0m5CK1IY1029Ne70j2U9K5Z3F/hHnB8x+/8Osc9hNWLqNxCfeaKUd+hBc1IPPDBjOrwBqKm9t8ZWuP/+ijn+HVl/b1N+OFAcN+N+VGtctn7HUWq4K2y/HWqkHSD0jV6J4qyKH4sAGjLBjdEupum/2x6hbdaTUXypWnEyW9obxluXCk17d2Kkzu9prTU/fIfKXrbK+VrUYEd3iMW65IymJj+NujFeXdCcu0ZJUO7JQVdtoxFKmeC6sWWVbQtPgzhoCIIHXL+IaBd0qORiV39i6rF2clZEEsur42INut6mIR8obCRtosG6RqyA8ydhpHu51QXTR4mzBkKfVeQVqrN7k4HMgOKiZ3LPVeSjtLuDsukdHA1+xFAIpRy9aoZjuv6Zyl7hM+vn2HbxvfYPDCyuXcbHc57Mb89ug6d2aX2XrbUM4bzLJicrvk4O6Yyx9d8Cde/goP6gk35zv0QRTYhdZPTa2t16S2mFpzpJyBITRktp1GN9LjBllUp9WLT0DWvXHOAxGZA6+f2xdGnsQesP+sBxGJ8/CcEOfh+SDOw/NBnIeHecV7f+lxT5y3jMHr3vtvP+fvjDyCiPx6nIdnT5yH54M4D88HcR6eD+I8vH9iFV4kEolEIpHIUxINqEgkEolEIpGn5LwNqL91zt8XeTxxHp4P4jw8H8R5eD6I8/B8EOfhfXKuSeSRSCQSiUQifxiIIbxIJBKJRCKRpyQaUJFIJBKJRCJPybkZUCLyKRF5XUTeFJEfOa/v/aAhIi+JyK+IyBdF5PMi8sPh8V0R+QUR+XL4/4Uz7/nRMC+vi8i/9+xG/4cPEbEi8i9F5B+H+3EezhkR2RGRnxaRL4XfxR+L83D+iMh/Hdakz4nIT4hIEefh64+I/JiI3BeRz5157KmPu4j8qyLy2fDc/yYi8uh3fdA4FwNKRCzw14E/CXwz8OdE5JvP47s/gPTAX/Hefwz4TuCHwrH+EeCXvPcfAX4p3Cc89wPAx4FPAX8jzFfk94cfBr545n6ch/PnfwX+mff+m4BvRecjzsM5IiIvAP8V8O3e+0+gTWh+gDgP58HfQ4/hWX43x/1vAv858JHw79HP/MBxXh6o7wDe9N6/5b1vgZ8Evv+cvvsDhff+jvf+N8PtOXqxeAE93n8/vOzvA38m3P5+4Ce99433/qvAm+h8RX6PiMiLwL8P/O0zD8d5OEdEZAv4N4C/A+C9b733x8R5eBYkQCkiCTACbhPn4euO9/7TwOEjDz/VcReRa8CW9/5XvVae/Z9n3vOB5bwMqBeAd87cvxkei3wdEZFXgU8CvwZc8d7fATWygMvhZXFuvn78L8B/g7YcXRPn4Xz5BuAB8HdDKPVvi8iYOA/nivf+FvA/AzeAO8CJ9/7nifPwrHja4/5CuP3o4x9ozsuAelysNOonfB0RkQnwD4C/7L2fvddLH/NYnJvfIyLyp4H73vvfeL9vecxjcR5+7yTAHwH+pvf+k8CSEK54AnEevg6EHJvvBz4EXAfGIvLn3+stj3kszsPXnycd9zgfj+G8DKibwEtn7r+Ium8jXwdEJEWNpx/33v9MePhecMMS/n8/PB7n5uvDdwHfJyJfQ0PW3y0i/xdxHs6bm8BN7/2vhfs/jRpUcR7Ol+8Fvuq9f+C974CfAf514jw8K572uN8Mtx99/APNeRlQ/wL4iIh8SEQyNEntZ8/puz9QhMqIvwN80Xv/18489bPAD4bbPwj8ozOP/4CI5CLyITQ58J+f13j/sOK9/1Hv/Yve+1fR8/2Xvfd/njgP54r3/i7wjoh8Y3joe4AvEOfhvLkBfKeIjMIa9T1ofmach2fDUx33EOabi8h3hvn7T8685wNLch5f4r3vReQvAT+HVl/8mPf+8+fx3R9Avgv4C8BnReQz4bH/DvgfgZ8Skb+ILmZ/FsB7/3kR+Sn0otIDP+S9H8591B8c4jycP/8l8ONh8/YW8J+im8c4D+eE9/7XROSngd9Ej+u/RFuGTIjz8HVFRH4C+LeAPRG5Cfz3/O7Wof8CregrgX8a/n2gia1cIpFIJBKJRJ6SqEQeiUQikUgk8pREAyoSiUQikUjkKYkGVCQSiUQikchTci5J5JHniz256ltavRPaGT0k8iHvemTzOn7H173rxmPuymNUReSxb3v/nwn+vT7jCe950vf6p/6c9ePyeHGU9/y7Hvne38V3+9/h+ffz3O/6b37kuXf9/e+nY9Z7jet3O6b3fI3/nd//vp/z73ruiW+VszfffaY8vruYf/jz3vX5p5/z6PvPfsejP6PHPre+LU967knf5d/X6979nY+8713f6997DOvPeMx3yBNf//Tf++hnPv62f+gYy5lXPPl9j77m9JHf+O3m57z3H/h2Kc8z0YD6ANLS8q+ZfwcxAqJOyLO3MQIiiAn3ReCh2+GnLmbz2se+Ts4899DrzOYz3ut1XkR9pPLIa88+d+Zxv/kMHn6dnLn/0HNnP/vhz9y878zrNhdyYTN+fe7Jrzu9LQ+/1rz7fWc/7/Gf8ej3PTrGJ73uMbcf9xzv7zMefe69xvuu+7yPcTz02f49v0v/+dP38bjX+Ye+6+x7Tv+W0/vy6Ps2Yz+9L/Lu22fft74g66l59rNPL7LyyOvMQ/f9mZ/E6eMmGAvmzOsevW1493Pm0du83+fc6e1HHrdnvuvs6yz+4fviMUGM34jHnr0tbvMZVhxG3EOfYc98vn3MZ6zfb8P7DDou/Qz30PvOjsOe/bzw/s134TafZ8PfvPmMM8fA4s+MLzwX5tYK2DDTBrAimHDfcua2CCa8yyBYMZv79tqX94g818QQXiQSiUQikchTEg2oSCQSiUQikackGlCRSCQSiUQiT0k0oCKRSCQSiUSekmhARSKRSCQSiTwl0YCKRCKRSCQSeUqiARWJRCKRSCTylEQDKhKJRCKRSOQpiQZUJBKJRCKRyFMSDahIJBKJRCKRpyQaUJFIJBKJRCJPSTSgIpFIJBKJRJ6SaEBFIpFIJBKJPCXRgIpEIpFIJBJ5SqIBFYlEIpFIJPKURAMqEolEIpFI5CkR7/2zHkPknBGRfwbsPeHpPWD/HIfzB4V4XJ5MPDZPJh6bJxOPzZPZA77kvf/Usx5I5MlEAyryECLy6977b3/W43jeiMflycRj82TisXky8dg8mXhs/mAQQ3iRSCQSiUQiT0k0oCKRSCQSiUSekmhARR7lbz3rATynxOPyZOKxeTLx2DyZeGyeTDw2fwCIOVCRSCQSiUQiT0n0QEUikUgkEok8JdGAijyEiPxPIvIlEfltEfmHIrLzrMf0vCAif1ZEPi8iTkRihQwgIp8SkddF5E0R+ZFnPZ7nBRH5MRG5LyKfe9ZjeZ4QkZdE5FdE5Ivht/TDz3pMzwsiUojIPxeR3wrH5n941mOKvDfRgIo8yi8An/DefwvwBvCjz3g8zxOfA/5D4NPPeiDPAyJigb8O/Engm4E/JyLf/GxH9dzw94Co4fNueuCveO8/Bnwn8EPxnNnQAN/tvf9W4NuAT4nIdz7bIUXei2hARR7Ce//z3vs+3P3/gBef5XieJ7z3X/Tev/6sx/Ec8R3Am977t7z3LfCTwPc/4zE9F3jvPw0cPutxPG947+94738z3J4DXwReeLajej7wyiLcTcO/mKT8HBMNqMh78Z8B//RZDyLy3PIC8M6Z+zeJF8PI+0REXgU+CfzaMx7Kc4OIWBH5DHAf+AXvfTw2zzHJsx5A5PwRkV8Erj7mqb/qvf9H4TV/FXW3//h5ju1Z836OTWSDPOaxuGOO/I6IyAT4B8Bf9t7PnvV4nhe89wPwbSH39B+KyCe89zGP7jklGlAfQLz33/tez4vIDwJ/Gvge/wHTufidjk3kIW4CL525/yJw+xmNJfIHBBFJUePpx733P/Osx/M84r0/FpH/G82jiwbUc0oM4UUeQkQ+Bfy3wPd571fPejyR55p/AXxERD4kIhnwA8DPPuMxRZ5jRESAvwN80Xv/1571eJ4nROTSuupZRErge4EvPdNBRd6TaEBFHuV/B6bAL4jIZ0Tk/3jWA3peEJH/QERuAn8M+Cci8nPPekzPklBs8JeAn0OTgX/Ke//5Zzuq5wMR+QngV4FvFJGbIvIXn/WYnhO+C/gLwHeH9eUzIvKnnvWgnhOuAb8iIr+Nbk5+wXv/j5/xmCLvQVQij0QikUgkEnlKogcqEolEIpFI5CmJBlQkEolEIpHIUxINqEgkEolEIpGnJBpQkUgkEolEIk9JNKAikUgkEolEnpJoQEUikUgkEok8JdGAikQikUgkEnlKogEViUQikUgk8pT8/3c9MKg5Coe1AAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "mel_after = tf.reshape(mel_after, [-1, 80]).numpy()\n", + "fig = plt.figure(figsize=(10, 8))\n", + "ax1 = fig.add_subplot(311)\n", + "ax1.set_title(f'Predicted Mel-after-Spectrogram')\n", + "im = ax1.imshow(np.rot90(mel_after), aspect='auto', interpolation='none')\n", + "fig.colorbar(mappable=im, shrink=0.65, orientation='horizontal', ax=ax1)\n", + "plt.show()\n", + "plt.close()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.7" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/TensorFlowTTS/notebooks/griffin_lim_tensorflow.ipynb b/TensorFlowTTS/notebooks/griffin_lim_tensorflow.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..149fcb58249b27acb1a94850ead29d23193fec26 --- /dev/null +++ b/TensorFlowTTS/notebooks/griffin_lim_tensorflow.ipynb @@ -0,0 +1,291 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Griffin-Lim algorithm comparison between LibROSA and TF" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "import glob\n", + "import tempfile\n", + "import time\n", + "\n", + "import librosa.display\n", + "import yaml\n", + "\n", + "import tensorflow as tf\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "\n", + "from ipywidgets import GridBox, Label, Layout, Audio\n", + "from tensorflow_tts.utils import TFGriffinLim, griffin_lim_lb\n", + "\n", + "# %config InlineBackend.figure_format = 'svg'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Get mel spectrogram example and corresponding ground truth audio." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "mel_spec = np.load(\"../dump/train/norm-feats/LJ001-0007-norm-feats.npy\")\n", + "gt_wav = np.load(\"../dump/train/wavs/LJ001-0007-wave.npy\")\n", + "\n", + "stats_path = \"../dump/stats.npy\"\n", + "dataset_config_path = \"../preprocess/ljspeech_preprocess.yaml\"\n", + "config = yaml.load(open(dataset_config_path), Loader=yaml.Loader)\n", + "\n", + "griffin_lim_tf = TFGriffinLim(stats_path, config)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "TF version has GPU compatibility and supports batch dimension." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "inv_wav_tf = griffin_lim_tf(mel_spec[tf.newaxis, :], n_iter=32) # [1, mel_len] -> [1, audio_len]\n", + "inv_wav_lb = griffin_lim_lb(mel_spec, stats_path, config) # [mel_len] -> [audio_len]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Time comparison between both implementations." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "4.29 s ± 219 ms per loop (mean ± std. dev. of 7 runs, 1 loop each)\n" + ] + } + ], + "source": [ + "%timeit griffin_lim_tf(mel_spec[tf.newaxis, :])" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "717 ms ± 8.95 ms per loop (mean ± std. dev. of 7 runs, 1 loop each)\n" + ] + } + ], + "source": [ + "%timeit griffin_lim_lb(mel_spec, stats_path, config)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "tf_wav = tf.audio.encode_wav(inv_wav_tf[0, :, tf.newaxis], config[\"sampling_rate\"])\n", + "lb_wav = tf.audio.encode_wav(inv_wav_lb[:, tf.newaxis], config[\"sampling_rate\"])\n", + "gt_wav_ = tf.audio.encode_wav(gt_wav[:, tf.newaxis], config[\"sampling_rate\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "090262d2d93e4c87abb9038087d0b99c", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "GridBox(children=(Label(value='Ground Truth'), Label(value='Librosa'), Label(value='TensorFlow'), Audio(value=…" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "items = [\n", + " Audio(value=x.numpy(), autoplay=False, loop=False)\n", + " for x in [gt_wav_, lb_wav, tf_wav]\n", + "]\n", + "labels = [Label(\"Ground Truth\"), Label(\"Librosa\"), Label(\"TensorFlow\")]\n", + "GridBox(\n", + " children=[*labels, *items],\n", + " layout=Layout(grid_template_columns=\"25% 25% 25%\", grid_template_rows=\"30px 30px\"),\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlsAAAHwCAYAAACR9qrBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy86wFpkAAAACXBIWXMAAAsTAAALEwEAmpwYAAC14UlEQVR4nOyddbgU1RvHv+/l0l3SYQCKBQoidqAYhF2oGPwQEQvsQmyxW7DFbinFwEBQEBBERBSQDumOC/f9/XF27s7OTsfO3Hvfz/PssxNnzjk7O/Ge97xBzAxBEARBEAQhGvLi7oAgCIIgCEJJRoQtQRAEQRCECBFhSxAEQRAEIUJE2BIEQRAEQYgQEbYEQRAEQRAiRIQtQRAEQRCECBFhSxCEUgsRNSciJqL8HLf7PRH1ymWbgiDEhwhbgiBEChGdR0QTiWgzEf2XWu5LRBR335wgovlE1ClgHXcT0Vth9UkQhOKHCFuCIEQGEQ0A8BSARwDUB1APQB8AhwMoZ3FMmZx1MCC51ogJglA8EWFLEIRIIKLqAO4B0JeZP2Lmjaz4jZl7MPP2VLnXiegFIhpNRJsBHEtE+6Sm2tYR0Uwi6qarN2MKjoguIaKfdOtMRH2I6J/U8c9pWjQiKkNEjxLRKiKaB+BUm/4PA9AUwAgi2kREN+mmHS8nooUAxhLRMUS02HDsfCLqREQnAbgNwLmpOqbrijUjovFEtJGIviKiOv7PtiAISUaELUEQoqIjgPIAPndR9gIA9wOoCmAigBEAvgKwG4CrAbxNRK08tN0FQHsABwA4B0Dn1Pb/pfa1BdAOwFlWFTDzRQAWAujKzFWYebBu99EA9tHVa1XHlwAeAPB+qo4DdbsvAHAp1G8sB+AG179OEIRihQhbgiBERR0Aq5h5p7aBiCaktE1biegoXdnPmXk8MxcCaAOgCoCHmHkHM48FMBLA+R7afoiZ1zHzQgDfpeoElOD1JDMvYuY1AB70+dvuZubNzLzV5/EA8Boz/52q4wNdHwVBKGGIsCUIQlSsBlBHb9fEzIcxc43UPv3zZ5FuuSGARSnBS2MBgEYe2l6uW94CJbwV1W2o1w+LnIs4YtVHQRBKGCJsCYIQFT8D2A6gu4uyrFteCqAJEemfT00BLEktbwZQSbevvoc+LQPQxFCv235Zbc/oT8rAv66LOgRBKCWIsCUIQiQw8zoAgwA8T0RnEVFVIsojojYAKtscOhFK03MTEZUlomMAdAXwXmr/NABnEFElItoLwOUeuvUBgGuIqDER1QRwi0P5FQD2cCjzN4AKRHQqEZUFcAeUrZq+juYG4VEQhFKE3PyCIERGyqi8P4CboISOFQCGALgZwASLY3ZACVcnA1gF4HkAFzPzX6kiTwDYkarrDQBve+jSSwDGAJgOYCqATxzKPwjgjpSdmakBOzOvB9AXwMtQ2rfNAPTeiR+mvlcT0VQPfRUEoYRAzKLhFgRBEARBiArRbAmCIAiCIESICFuCIAiCIAgRIsKWIAiCIAhChIiwJQiCIAiCECGhCFtE9CoR/UdEf1jsJyJ6mojmENHvRHRQGO0KgiAIgiAknbAy1r8O4FkAb1rsPxlAi9SnA4AXUt+W1KlTh5s3bx5S9wRBEARBEKJjypQpq5i5rtm+UIQtZv6RiJrbFOkO4E1WcSZ+IaIaRNSAmZdZHdC8eXNMnjw5jO4JgiAIgiBEChFZpv/Klc1WI2TmElsMkzxnRNSbiCYT0eSVK1fmqGuCIAiCIAjRkSgDeWYeysztmLld3bqmmjhBEARBEIRiRa6ErSXITP7aGOmksoIgCIIgCCWWXAlbwwFcnPJKPBTAejt7LUEQBEEQhJJCKAbyRPQugGMA1CGixQAGAigLAMz8IoDRAE4BMAfAFgCXhtGuIAiCIAhC0gnLG/F8h/0M4Kow2hIEQRAEQShOJMpAXhAEQRAEoaQhwpYgCIIgCEKEiLAlCIIgCIIQISJsCYIgCIJH5s8HduyIuxdCcUGELUEQBEHwyO67A48+GncvhOKCCFuCIAiC4IPbb4+7B0JxQYQtQRAEQfBJQUHcPRCKAyJsCYIgCIJPli71Vr6gALjmmmj6IiQXEbYEQRAEwSdly3orv2QJ8Mwz0fRFSC4ibAmCIAiCT7wKW4WF0fRDSDYibAmCIAiCT4i8lWeOph9CshFhSxAEQRB84lV40sq/8w6wbl3o3RESighbgiB4gsj7aF4QSipehS1tGrFHD6BmTeCjj8Lvk5A8RNgSBEEQBJ/s3Jm9bdcua9ss4/azzw6/T0LyEGFLEARBEHxi5lmYnw+ceGLu+yIkFxG2BEEQBMEnf/5pvv3bb823yxR86USELUEQXKO3Txk/Pr5+CEJSME4L/v57PP0Qko0IW4IguObjj9PLN90UXz8EISkYha0NG+zLL18eXV+E5CLCliAIrtEb83pNUyIIJRGvQUr79YumH0KyEWFLEARfzJ8fdw8EIX4WLMhc79/fvvy2bdH1RUguImwJgiDYsHkz0KtX3L0QksqsWZnrU6fG0w8h2YQibBHRSUQ0m4jmENEtJvsvIaKVRDQt9ZFHlyAIxYLZs4FXXgG+/jrunghJwS6QaZkyueuHUHwILGwRURkAzwE4GUBrAOcTUWuTou8zc5vU5+Wg7QqCkFtWroy7B/GwZo36fv/9ePshJIc33rDel+fwVpXQD6WTMDRbhwCYw8zzmHkHgPcAdA+hXkEQEoSTl1VJ5dRT1fcrr8TbDyE5zJtnvc9Js2WmFfvwQyWEHXaYs82XUDwJQ9hqBGCRbn1xapuRM4nodyL6iIiahNCuIMTKu++WLm1HadVs7diRXh42LJo2CguBWrVK7zkubthpp/Lzvdd3zjnq++efgSee8NcnIdnkykB+BIDmzHwAgK8BmCphiag3EU0moskrI3rqEAHr10dSdSD69hX1cnHjgguA88+Puxe545JLsrdt3gxs357zrsTGxRdHU++4ccDatcC0adHUL4SL3VRhhQq564dQfAhD2FoCQK+papzaVgQzr2Zm7ZH8MoCDzSpi5qHM3I6Z29WtWzeErmXy33/qe9Wq0KsOzAsvxN0DwQ92hrIlDTOhqkoV4NBDc9+XXDF7dm7akTAaxYsvv7Tep9dsbdkSfV+E4kEYwtavAFoQ0e5EVA7AeQCG6wsQUQPdajcABmfZ3FCvnvp+7LFw6925M5gHSml6YQvFF6vrtCRrY3IlBPXunZt2/LB2LbBokXO5IHz8MTB8eOa2AQOAo46Ktl2//PKL9b4lOlWDRItPNoWFwJQpuWkrsLDFzDsB9AMwBkqI+oCZZxLRPUTULVXsGiKaSUTTAVwD4JKg7XrlnXfSy2FrkbZu9R5FWI9+WvOLL4L3RxCioDQFYzzjDCVglC+fm/Y0u7CkDby2bQOOOw5o2lT1bezYaNo56yyge3dg8uT0tscfV9OrSWPtWvdlnTwToyTK/6ukMHw40K5dbtoK5VJg5tHM3JKZ92Tm+1Pb7mLm4anlW5l5X2Y+kJmPZea/wmjXCz16RFd3EJuV7duBmjXT63bq6dLKH38k70W/a1fcPcg9K1bE3YPcMG4c8OmnwD77AE8/ndu2k5bE+PDD05rLhx8Gjj8+83kVNu3b2+8nAlavjq59N9ySFUnSmjiFrWnT1P8lWFNQoL6rVwfeey/atiSCfAg8/LD/YwcPzlz/999gfSlJDBumHq777w9UrBh3bzLRj241W0ChZKBNXW3erISuqDnhhPSylxd5LtBHQ9e07uvWqfsyrFAgRx6ZuT59eua6UbiK28HJy0DLbLoxV45QWny4JLBjhzeNYK7Q/osNG6LXopZIYeurr9InccwY4IYbom1Ps+vwMwXw/feZ6yNGlGwbGC9E5fkVBvppY7HLEPxSWAh88016Pcka0x9/zFwPS9P500+Z6+++m7luHIB+9VU47frFi7Zq7tzsbf/8E15f7Lj00ty044by5VVok6jx6tCiF3yffz7cvhgpkcLWH3+kl086KXyDeCM7d6pvP8KW2Zz6YYcF609J4Lff4u6BPXphK2l2NoI/Zs0Cnn02t20++GBu2ysOPPxw5v11xx3AsmXpF+O338bTLw239rlEwNFHR9sXO+KebtUwKhSiZO+9gTlz3Jc3Cs7btwMzZ0Yzw1Qiha2/UhZh+++fm/aCCFtmbN0aTj1RQKQ+QRwC3KBF7dajnee42bkTGDUqvS7u3cH49FPgrrvcl3/++bStRVjMmgW0bg1cfbVz2TCv/TvuCK+uXBP2f6DnuuvSy2PGAA0bptc/+ii6dt2wbp27ctWq+X8n6LWdfknK81LvnLZ0afTtebGhNgpbhYXAfvsBLVqE2yeghApbL72kvvUarqAMHpzpKbNxI/Dnn2rZr7CV5CkDM/RCxa+/RtvWsmXZ27T/1UjnzsBDD0XTjx07Mm1TpkwBypYFeulSqZ91VjRtlxZuuAG49153ZWfMAK66Sj0Qw2TTJvdl9RHlg7BxYzj1xMW++0ZX9zPP2O+PM1ail6Clfp/xN93k7zg92nUat9Clf26/+mp07SxcqL6/+85d+VmzgNNOy9ym/V9RvJtLnLDldtThlZtvzvSUGTAg/bDxK2y9HHM67r/+8jbSeO219LImaOaSvn2VO7ielSuVDcett0bT5lVXKU+VtWtV/ByzuEu5GK2VZDQh3nj/3HabCpg6ZEh64HTAAer7779z1z8jYWmwx4wJp544ies3bN4cT7uAN5stv1rQMM0oorZFSgpaLDi3HsQXXJC9zfh+CZMSJ2y51WaZGS5aYeY9oveI0dTpXh7ChYVAnz7uy4fNggXKtd2LfZjeU+qKK8LvkxsGDMhc79w5vWznHbXPPv5S62gJZ2vVAho3ttZizZxZMl6ecVC2rPo2jsAfeQSYOFHdJ2aay7hsUsIa9eoHL8UVTZuQayZNyt7WpUumxjkOjJ6SSZi9uPbauHuQJiplCJAWgonUe8HJ69PMEW3gwNC7VUSJE7aMbsRW2EUAtkP7g/QjFk1d62UU4xRYNeqYRpob7oIF7o/RT7W4tdeI2nNIPwI0ektpbNmitHjvveddK+E2qex++ylnjJJKlE4A2rWu1xrOmJEpfL39NvDBB5nHhRnV3OzlbUVYL9DRo8OpJ04++yyeds1CcowaFV9ieG1K2Bi2Iu4pvKQRpbOaJmxt22b/3vnmm3jyEJc4Ycst+ptg1Sp18o0ux0C2sd2dd6pvvWClBdz08kLq189+/wMPuK/LD23bRls/oM5H587eBLog3HOP+fb//S+9rDdsd0OQNEwliShH6Npg5cor1ffatenpQj3nnpu5HqbNk9P9qCdqbcUBB0TvgBIWcQmMZs9qIH0tEUXnaGT2nNeuCf3gjDk+55mVK+NpN040YUuvbTUTqvRx7ewIe6q61Apb+thb2lTaBReoG3T8eLU+Zky2MeTIkepbf8Np2hW3wpabl0QuI6aH6UigR9NUNG+em+jjZkb1l1+e6Q3Ttau3OnMRAbqwMBnTDXbkon/ffqvSwlxzjbvycbnVR+mFByitntgB+mPHjvSUc6VKuZtC0579+ph7zPF5lr/4Yjj1uDU2TwJWz2q997wXhUjY2RwSL2z16BFNnA7Nm4U5M8hcixbAEUcoqdZuWsjsT3P7Rz7yiHOZoUNzF7/Jb4gMp/7tsUd62ej1EQbGl97ixdllrLxf3EYzzsU0QNeuyhA8yRx8sP3+sISxRYuAt95yVzau+GZWXrFhEpbHo1+ef959qhd9sMqVK+OP76R3lslVuiXtWtTbhRYWZs+MeLlmZ83y3x9jKBU/19OmTSovZpKTpOuxm4XQFCj6ECK5JtHC1o4dSisRhhusFcZUK1rG9uOOsz/O7KZxq/p36+buRijLFWa/7eef3R/v10bODqeRh9WD7fPP3Ucz9iroL1iQ1n46MX++siX76afMsCJRc8wxarTnxZ5u5kz7/X4NX4NOl8Vhe/HGG9G3YXzRMqvfWqVK9G0DygvXbRLjtWvTL/O99wYOOii6fvkh7Okgu4G2XqgpLMwWcrwIW61be++bFWZafyeefFJ9v/RSPAObRx7xNg1r9yzQUnB5yfbh55zZkWhhSxsVRBnTqX598+12BrP6m1c/+gv7grz55nDrC4LZy9ZuxBMk5ZCTvYGmbXIytrTStmhaNjfaGK9TRs2bK02Vm5Hk3nsrL0lN/R32zW3FDz+obzceO24xmy4hctZo5iL3YNiEMY2oDeqsMEay186TH8FhyhSl4XGrpfUjAD/3nPpesyZ3HopuNWhVqoQ7Df7229nbrAQwY7u5EFrMnp/9+zsft2NHZv/0WkG/5hRBfu9NNwGVK7sv79RHrzk1zzzTW3knEi1s6V8Ebk7UhAn+6/eC9mABMkd/bm7osObSg2AWo8jpATt0aPY2s5hTGkHSHTgZsWtCuJWRrIZTWg83Afb8XiNuRmTa79C0QnGFjtCyAgR5MBpf5FoYjs8/V9o7M4N3oHhG3/cSodqKr7+23298lngJuqqnoABo107ZLpUtay4oGJkxw3s7/fsDb76ZXner3Q2CF5ua/PxoBR23wlZhYfTa2L32yt72ySf2g7k5c1T+Qn3sR6PQRqQyLHz3HTB8uLu+eJn90GP3brHC6bxaPYNyRaKFLf2F+tlnKs6T3Q3jJXZWEG6+2fzCdTMi1Dyu4sQsYF6ZMvaG+2YXst0o+5JLPHcLgHppO3kAun1oOrmBuxHg/Y6Iv/jC+zFxTInpCeIMYNT2rFmTXt5nH/UCN3tAuw2tYYdmj5Erwkg87vSsMgqvfoP2liuXuX7hhUCTJiodzuuvmzsj+E2U3LNnetmrI4ofHn7YW/monF0qVLAWtozvhMLC6LVbVvEGzQbMGlp6Gif7rGefVSY23bu7e9/5HUzpk5O7DaDt9P/GFRNOI9HClv7hfMkl6uYaMsRaHR7Gg9stZtOMSfco07C6Scwi6mo4pc8wYnbD201jHHecEjbKlgUuvti+brepOpymTbzEVvKK3bkEgCeeyN4Wd843wNoL1smG0XiuzaYVu3fPvkfCCK2hF+yKC/fdZ7/fOBDQeycGfVkvXgw89RRw6aXqvv7770wtxtlnB6vfC0F+i5/nfRQRwq1yxZpptrzOvoTJ3Xe7L+vGG94uiLSG2f3txtNW/w5wGzsz7sGqE4kWtsxU7VdeqR78eoPcLVvUw93pBRc1TpJ+HCluzLB6wY0cqS5YvQp34cLwcv/ZaSC8uBjvvru7ck4334cfZm/78svceCCa2VDkYurFCSuNnNNL0fhSsXpYG1+QYUwjXnZZ8Dq8UK5c9HGMpk613jdsmLs63AoyrVoBu+0GfPxx7oNwBhG29Bp6t/UYM1B4xcoW0+00YtRhQ8LSmrmxOa1Z09lZxOxd42SvCGT+jjVr3GU6ics72S2JFrasbCPGjVN/tHZy27dPhuu8ltrFCj/5rtyMHrziJITobTZ+/FE9hL0wcaL5dqv/088D3s1Dy2ussk8+AU4+2VzrFCb33x9t/UE44wzz7U4DCeNLZc4c67L6a/rSS931y45cJyXesUMJJ3HRs6f5C6uwMFMjZnUfWnHWWenUSWGwaZO6nuyeN0FekHoNidOzV08Q70SrZxhztq3prl3Zz6CoBYIwBvRE7p/Jl1xi/yw2S9rtxwHDje2XCFsBcJqW0y78P/8MPwCZH5wipfuxGfjkE399scNJ2NLnPfSS4V7DSvC10mL4Ge3Zjfw1jOldnNAi0EcZUHLmTOCOO6Kr3wm/9kaaB6MVxnvVzoi8enV/fUgauQjUa4WZBvKZZ4AaNdIvHa/eV2FTtWrai9IqcHJYkfK91BPEwcEqd966ddnpo5izPSajNjUJ633hZQB87LHW+8y82KOM7F+tWjR1h0GxFraGDMlMxRInFStGY4AZR9qOZcvSUxVhRpe3GlH6eQBNmRKsL2ZoL6nZs8OvW8POBTsXqYGiMCavXj37PyxfPtkPvjBo2jS+tv/3v2zt4XXXqe9Bg9R3klL+WAVODksb4eUZEuS8mGm9t24F9t3X3GPUuC3q/8QssLMb6tXLXPcyALZ7ppgFldUHfg2TYcOimQkKi0QLW8aAo0auuy7TVTVOmJ2nM/zEzYoikrSbG14zUNQe3G6xc1G3mqLwM4141VXm24MYScatHfWSn88vlSo5l/HzAjT+hwUF9vUUF2cSO+KO8m6l3R00SA1svE4jRo1Z2JugwtYdd6hrycszRO81GSY33pi9beTIzOtE72XnBq9aIDuPQzuMWlqvWlurd5+ZKYeddn3LFqWdNcMpzZ3fMBO5ItHClpPnximn5KYfbti50zkWjlHN7IYoUl+EObetH13feae1AWq1atYB6nbuDNeTNOhN16hRelmfcigM7CKtP/VU9HYHVas6l9G/FAsKgFdesS+/fn22AauTsJWfH7+wkmRatFDXQ/ny1i8nu2upShXvA6WoufLKbJsi5mAa3fvvV9dSQYF7TWouk2evWKH+QyL1LPcariJsT1uzkCNm9+m4cd7qrVvXfLtXTd6991pPfzsFSI5iZiDUOIDMHPgD4CQAswHMAXCLyf7yAN5P7Z8IoLlTnW3aHMzqMig+n7vu4gx27Mhc91tvGDz4IPPMmWr5tNPct+u23OrV9mWqVWP+6KN0f0aOZN61Sy3/+Wf4/8WDD7orV1ho/v9odOsW7v/WurX9Md9+G87/bUWPHt76PmKE+9+7eLE6Xw89xPzSS8yVKtmX//DDcP7r/HzmG29k3rjR3zkpLGRu3Nhf2/rrxwtO9TpdJ2bXWLVq4d9HUXz0BLm/wupDmP9bFJ8NG8Lr35Qp2cds3mxetlw5b/3Unud69trL+vxv2sT833/MV13lvv92HHdc+Of+66/dn3vVf0xmtpCTrHa4/QAoA2AugD0AlAMwHUBrQ5m+AF5MLZ8H4H2neqtW9SZsEeXu4rf6lC9vPPHpzyWX+K9Xz5AhzHff7e0C0Ppy0UVq2a0gsmqVu3LPP+9cpkoV5oMPZt65k7l/f7XtiSdUf6ZOje9FsW2b+f+lfbw+cPSfv/7K/A927XI+5rPPvP+3Xq8DN5+tW1X5MWP8/fbnn2euWDG3/2Xz5v7OyZAhwdrVzpUZ48apMmefrb7/+cfb/+D0+fffdFsdOuT2fPv9DBmSfjHH1Yc77vB3rcTR19dfZ54wQQ1ox49X5271atWf2bOZf/tNDXQKCpzrmjw5+zdZPee9PpP1z1JmNQBye+zHH6tjrrnGvtyIEcx5eeb/jdv3mtfPzp1ero9oha2OAMbo1m8FcKuhzBgAHVPL+QBWASC7eitVKn6aLYD5llvU9113hV/3xInp5b328jaSB5hPPlktDxzoTjh99NHoz9euXUqbU716fP/ZhAnqvLjVJnj9TJqkfudHH9mXy8tjbtXK283tFS/93rmT+fvv/f/uMmXi+T+vuEK9eBYuZF62zPmcnHtuuO3Pnavqvece8/0PPBBue2vXqvbatYvnfPv9vPlmvO1Hff/k+jN/vnOZQYOYt29X9/aaNeo3LVtmPjDyM9CsXFkNpm+4wdtxgwervhxzjLfj9Nqsvn1V+1Gc2/33Z377beY//nC6PqyFLVL7/UNEZwE4iZl7pdYvAtCBmfvpyvyRKrM4tT43VcbSpLxChXa8ffvkQH0rLRx4oAq0eOmlKvxEq1bKQ2zLFqBWLXW5dOkSdy+tadNG2X75zf9WkqlcWXm6nnEG0Lix+j9320153jRpAnTqlM77tmuXOoeaXdauXcDatcrgtLBQ2TSEFaC2OHHhhSpf3Lp1ygavZUtlB1WlinLa6N497h4KcXH88cqT8NhjlV1VQYEKWLttm0oqv3GjejbVqwfUrh2dJ11pJmheVj3lyuXGFrRJE+VENnq08kq+6CKVuJqIpjBzO7NjEiVsEVFvAL0BoHLlpgdv3uwQuEooMYR5wwmCIAhCLtmwAahWzVrYCsMbcQmAJrr1xqltpmWIKB9AdQBZfnbMPJSZ2zFzu4ICC/cGIYPbb1fuxatXq2B9WqJT4yfJPPigOy+54o4Xb5natVU0+zvvBL75RnnorFun/t+CAvVfb96stFdOSvDCQlUu6lQhSeWff9S5W7lSudIbz8/hh4ffZvny4ddpx8EH57a94ooxNMzw4cCsWel7RNMQa8tbtyrt1vbtJff+Of303F+verT/5KmngM6dg9VVsWLw/jixzz4qleCiRcrb9L//lDbN8R1mNb/o9gNlgzUPwO5IG8jvayhzFTIN5D9wqrdixeJps/Xrr8o4WG9fFfRz5ZXMffpkGlh/8IE3TyiAuXt3tXzffco+yKndr74K7zfk55tvZ2b+6af4DORPO03Z9zAzN2oUfv0336xsJJjVtWFXtmxZZXcQJW77fdllqvzYsf5/exDnAr+fe+5h/uUXb+fkoouCtVm5MvPVVzPfdhvz44+n6122zLz833+H+5u150Bxstnq10/dF3H2Icr7JxefK6/MXF+71vkYzRBdz/LlzBUqZJd18iY2fvr3V3apS5c6e6cbP/fd5/0afu455YDy3HPML7/M/MIL0dlsjRih3hNO71xEaSCv6scpAP6G8kq8PbXtHgDdUssVAHwIFfphEoA9nOqsW7f4CVvHHZd9Yw4YwHzdddYPXjcfPTNmeH+ZaH3RXuQ33eSuXbcPwzlzmI8+2r5MlSrKOJOZ+eef1TbNM2batNx7rmkfvTG6fnubNsqD9Igj/Ne9fn3mf1BY6HzMF194/2+9XgderrkvvvD321980fk/DVsYe/RRf+fku+/8t1mvXlpYN2PlSuaePZWTxMsvpz0Xw/rNW7ak2zr00OjukzA/y5d7vx7D/owf7+9aiaOvCxdaX2P6l78bb+fffsuuw0ow8joA1gaVGuPGWQ9gjYLi1KnqmF69nK+dTz81PxfXXhv+uW/c2Ov1EbGwFcXn4IOLl7BFlBYmrP8If58wmDQp7b3Yt6/7dp3KNGigyjm5HVerxvzJJ+n+rFiRXp4zJ/z/47ffvJ9bs+3du4f7vzVoYH+MWRycMHnnHfNRrFXff/zR/e/dulUJFN99x/zKK86jzGnTwvu/f/7Zf8wrZuZmzcL7j93gVO/++we7fpP6OfTQzD4/9lh4dXvRxET1v9l9mjb1d5wxpEKQ/mme13rC0jCa3X8tWjif/99/d9d/7V1jRRRxtrw+j+2ErURHkHdizz3j7kGaMmWimS9+6qlw6mnfXnlfAcDRR4dTJ5COSJyfD0yebJ0+aevWTLuA3XZLL9eoYZ3Kxw+rVikPR7/cfXd6+fPPg/Ymk9at7fcfdFC47Rlp1Mg8hYYefeaGI490TkMFqH5XqABcfjlwzDHqenBKnWSVL88Phx4aLFWTUxL5XLNsmcpLOnSodSqa99+3Pr5jx2j6FRTj/dS3b/Ccsl27AtOnh5uFImweeUTZDnpl2bJw7an22Sd7W7ly2du+/dZ73Wb3n5v/Vv8cGD7culyPHvb1RJF3sm3b8OpKtLDVtav9frPUA3GRn2+d00nDT/iFChV8dceWIC8lI3oB8+CDrfOOFRRYp57Q0m2EwXnnKeNyP2jJWD/7LJy+mGFnJH/55dG1q+EmUavxRe10PqtXB55/PnNb2bL21xlzNInbSwqrVqmQBP/7n/U1Y3d+J0wA3normr75Zf78zEEWoH5DkJfkzJnqBV2mjPskxO+95789r5x8srrWb7hBCTWvvurteL/PMiuc3lEa9et7q9fqP/R6j59yCvDoo+b7nFIdReHAEOa7MtGPOydtx8SJ/iTwKGAG6tSxL+MkmZsRxWjNzQWk5Tz0mujbrr9W+/zktPrmG/Pt777rvS4N7QETpsbFSK1a1vucchCGQVQeVcb/0Gy0XNII6jkVVfsjRqjvsF/UQWnWLHtbUIFb0xR7eU526hSsTSueey57W5MmmetGYdMJrxr/O+7wVt6KmjW9lbd6p5gJd3ax/sqUsc6v63St9Oplvz9uEi1sOZ3ctm2B447LTV+c2L7dOWm0nxFcFCERmO33t2+fnqINs32rRNR+BEqvDy03aAKDVVLVMBgyJLq63XDqqeHXuX59trC1Zo1ymTejQYPw+xAHX3wRX9vvvJOdeHn4cBW8VdOgJ0lzuGuX+fawNAe5+q1mg/tKlYAlS8wHjcb/KOp+hmXK4uWZ/Nhj1vvuuy97W//+3vvjhvbt3Scjj4ME3Y7ZOGk8NKn/ssuAfv3sy+aCY4+13+8k5Jhx5pn++hIE/c3jJxrvf/+Zb7d6EPjRgrjRPl1yibc6P/lEfUdp71KtGvDTT9HV74TTuT7gAPPtxxxjf5zxXt19d+uyS5eml8O01cs1YU4xeMXsudC1a6ZdUPPmOeuOKYWFwLRpSpNhJWSEdQ6tBnJmBBlAmsUzY1a/45BDMrfn5WVriKIWtsJ4D3bp4k3YshOezMxgojwHbqeS4yDRwpaVvdK992YKLq+8AjzzTG76ZIdmgG7FSSd5rzOKC9NOw3bUUcB++6XXzzpLpSTwgpVmyGpaw+tv7NvXXTmr0bQVzZopY+SoU9pEEUQzLJ54wny700vR+B+61Q4GmfbViGNA4mfgFBY//uhugNKypbd6V64M93cRqVRiH35oX8Yv+kFRw4bujwsyxW0lqBFlG1Pn5WW3FbWAHsZMxIgR7kw7unYFFi60L2NmtuDHbOSRR5zLxDn4cUOiha0rr0wvn3aa+v7hB+t5aTvvnFzgJDQkxYbCSth64AF1fvWjsQoVlJFnGNh5drz4ovIoA5xVwWaqaT/cfHP2Nj8PAj9MnJi9beDA3LRth5V2dssW++OM581qoGR8mXsViM249NLgdSSNo46y3nfkkeG2VVCQaXOaS41YkBeknf2jFdOm+W8PMH/Ga5ots7LG+yLqd0BYAkelSs5l3nor2ybNiNn93aqVc90nnphe7tpVORgUdxItbOndVHv2BKZMsX8IOb0QwqJZM3PPwiTZSNhhpiLee2/gllusj7nmGm9t7Luvt/JXXAH8/LN6cD37rH1Zt1NPTqpwJ4eGIPz2m/1+45QDYD/1lgvswieYCYd6jCN4/ZSxJqyb2TRahTXwQpJCwLjFzJhaj/Ha9ZtI3szDddw4YNIk5c3966/ZbeXSWy+IcOBVizNggNK0hc22bea/gyj7nWA1TZ8L3nzTfVk304hu7KPMBvZu/reLLkovv/22c3kgXm2zGxItHugv4G7dnGMQOdmVGPHqhqsxeLC5BsSNVuTff/21GSZm3jizZtk/+MxU73YeK04vEzucbnS3Qu2dd9rvdyMQ+hWg99jD+zG50qqZsddeStj1i1EA1sJoAGqq8IUXzDURYQhbe+8dvA4veJmyssLpRWW8FvxOlb70Uub6H38ARxyhjIn32ANoZ5Iy1214ACP6KaWZM/3V4YXbb3dftk0bd1NRfrF6dhr/R6Lop7uszCDOPdf6mG3bVAiXH36wLtO4sYpntnWrygnohkaN3JUzor/e3QrVTsLWAw/460toWEU7jftz8MEH87p15hFn7SO4uv94La99CgrSkcVHjEhvN6ZnCauPUeC1HbO8W2ecYV1+3jz/v8UpGr0Wqbh/f/s2du4Mfm7d5JA0++jTAFlx7LEqKnL16uqYHTvcn6Mg6Pt5993ejzH7LFmSfUydOsyjRtnXO26cv/Mb9j3ipb1mzYK35xS123hvTZjg//fu2sW8apW3Y7z+ByNGZB4XZVvaZ9Mm9/Xs2uWtT3YccEB2/StXZvehYkWVJ1eP0zMpjGvbLG3PDTd4r+fss8O5z/z+JoC5Wzf35adPtz+XXiPl77abn99aTCPIaxLt2WdH14aVzYjdqEmvedGr98OeRgxqXxAm1atnb3voIevyQabEnDRb2sjQyVDeSlPkJZKz1zAQzEp17kZLNXq06oumas+VZ9477yit3tq14dmJmdl4rFypghTaccQRwdpt3z7Y8X4I439yMtK+667M9Y4dVZy+F1/03lZeXvS2Qtr/fOyxahYiF7j1QGQO99ls5gntRbPlFr+R481+6223ea9HPwPC7K8vQfj9d28Bpp366NUx4qOPvJV3ItHCVl6eeiGFlbLGqg09miGekxG21Ry9G0aOdFcuCvsCM9zEKjP7bS1a2B+jT01z/fXe+uQGv7Y6e+3l/uHh52Xu9jqoUEG9MG67DbjpJu/t+OX889V0kpfpIidvX79TT0GZNCn3bVp5bIaJ2eDmrbeUbWMu+Osv92YWN92Ufo6OHRt+iqvigHbP642/8/KyBSYvwpZVxg0/mF1PTmhBQt2+r8Jm//29nS87L/vZs9X34MHu6ws7oHiihS1AGddGEQTxhBPSy/rIs4MHqxE/oALVWQkJQYQtp9E+APTp466uMPBiOOmFyZPTy14ucr+YxcAZNy5YnbkY0d1yi3Mqirjp3Ts37dx3n/vBVYcO0fbFCs1rNkqiyLPqhVat3Ht5Ju3a/e479X3AAdEIfnbPfn3Q4qDClhuPQCteeCFz3Y9mLy8PWLcumkDIUWAlbD37bDoMyo03uq8vbBvaxAtbUXHhhenlIUOUd9j06epm0EbpDRsCjz+eeZym3tdfvNpL3u2N5KZc1FG29d6Ffo0YndDc/7dsyU2SWDPvwiOOyBQmly/3VmcYoQlKArnwtL3oIjV9f9VV7hKJB3HCMOLlxRa1I8OoUZkOBoJ7KlRQjlIXXKC813M1pamhn6oiCiYwBSGs1DV+NGJxYSZsXXutep74IWwThVIrbOlf/nl5yrXdjVuuFvtL//LRRi9eXkhffWW//9Zb3dflB6/R1f1ApNz9czVKv/tu8+0XXJBe9voSC8NbriQQpYDRtKn6fvrpdFvff59dbuvWzPUwBXi7wJtGoha23Gi+k4KTx29UnH+++XZN2Hn77egGeGaDZbNnHFF8GspcDG6ThjYw1k/lPvlkdrk1a+IJklrihK0//nBXrmtX93XqVcFasmK9YKXd4F7+HKdkqFEbS2tJYbVgsW7QvwTcasP8BB70gt7WySx2FaBejs8848++x61ma/16Ne1cUonSXV0bkeozMFSvnnnt/O9/2YFSwwz54MURIixhy2pwUJxwCmoZFWYpYp56Ki2w5xrt2jRqQ+IM55JEwgpIbYb2HGFWU8lWg5aaNeMJklrihC23KSq8BMTbti17mz6ljTaK8PJCIspNLBoratVS03teou7rA829/nroXXLFrFmZ64MGpZftNIv9+vlTC3frBhx2WNoh2MoGpFq1cOIvlUa2b1ffxhfTm2+q62zWLODRR7OP8+utFZSwplTNYlwVN3Jhv2aGWSL6a65Rwa/jxKhRSoKw9eWXcfcgN2jCFpGaSh41yr68PkONRpTJ5UucsBWVRmjx4sykzLfdlhbCtDa9jv71EfLjoGJFb+6w+qB4cQgWn3+erc2oUEEJjGbTTmFw3XXA+PHpdTMP0c6do2m7tKC9OI33z6mnqpfn3nung4BqglmcqY3C0vKFlQYrLo44wl1C+CiwC6gcNV5MC/wK5nbZPNyiCX5Jej5FacOmXYtubTkHD84OHxR2Kiw9JU7YAtLq+TC97Bo1yhTk9J4mfoWtpCfONKL194wzMsM6RIGZB4yVses55wBHHx1tfzSaNVMarp9+Sm+zizcmODNqlHM6II1y5YB588KfgvPikGKV99ErxSW9lxWvvBJd3U5aiTASLvvFi7DlV7NlZZPmhSRo1QA1YNXwa6zuBs2YXzP1caJKlcyoBED6nrQySQlCMb/dzdFsP/TTXlGijSDCengmecTLDHz8cfTtvPZa9G0EYa+90svFTWhOGs2aeXu4RZFDsnFjdW3PmBF+3XZEpZEt7ujtbV58Ma3RBKINcu0Gt3ao69f7z9cbRg7FpBjJ6+2jvAYW9YMXO2FjaJ/8fGDDhnT4kDApkcLWKacA3bur5VWroreN0kYQfl66xx6bvW3o0GD9KQl4jdyea/SCtQhbJQe9LWYuyJVGNgqiuu6NQWMPPli9pLUX4//+F027bvESDsYszl+uwnrEkV3BjEaNvDliBYHZm5baGC6ibFmlNY1iurNEClv77JMO81+7dvRTXm4isFuhj3cFKKO9xo2D9aeksGBBOrBiFNqMIOhV9Hotl1D8ybXANWxYejnqtDpB0IeL6do1vHvSqMU2zkgYtTxx3292kcqNGKepAOck5GGRi0DSbvn003hS/jih/y+jnOIEAgpbRFSLiL4mon9S36Zmi0S0i4impT7Dg7SZRM47z/+xXbpk2oAkRfWbBJo2VSlDdu4E5s6NuzeZ6FXVcQUuFKJBG6ideWZuUvPoAyz7yX0YJVdfnV7W8sDeey8wfHh4zypjzD+jwKmfejrooPgDvmpx4fyWzZXQEVcKreKE9l/06+c+Y4Jfgmq2bgHwLTO3APBtat2MrczcJvXJcUxfxcKF6eWDDgq37iAekPn5wH//pdeNmi5BaZFkqi5+tNhsJZ099wR++EEl7M5VflKNXGk93PL008Bll6nlM89UqczuuCO69tatSy+bmRJMmRL/4Ob2292XNdOC5UrYatEinXpOMEfzqn/mGfN0b2ESVNjqDuCN1PIbAE4LWF9kNGmSjix7+unh1l2pEnDPPf6P13vWxK0iFwQrkjgNEBVHHaU0KmYx9qKgefPctOOHl19O2ylFpS2ZP18NiPXpYZ57Lpmevl40enGn+xLtlj1HHJG7ezyosFWPmZellpcDsFLwViCiyUT0CxGdFrBN32iG8tpILSyI4ktbIcRLade4tWxZsq/9XBkZh5nnMWyIog9T0axZdjT6s88Gbr452nb9MmCA9T59DEIzu7bS/sxIGrkKjuwooxPRNwDMIldkKFOZmYnIauzbjJmXENEeAMYS0QxmzrLCIaLeAHoDQFMvE+MuKVNGeSrEGRDPis8/z0w9IySfcuVKl43d3ntnTscDwOzZ8fQlVxiTmx92WDTtRJUMXogGuzhf+uDXxT2WmhAejq8KZrbM4kdEK4ioATMvI6IGAP4zK8fMS1Lf84joewBtAWQJW8w8FMBQAGjXrl0kkxZLl0ZRa3C6dct9hnohGLNmla6H6dNPh5uPsDjy2GPR1HvggcAHHwTzbBZyh92Uuj4mmCBoBH1VDAegZaPqCSArcxwR1SSi8qnlOgAOB/BnwHYFIXb22CPZtjZhU5q0eHr0oSCizAV49tml9xwXN+zCP/ix01q+XKWmGjIEGDPGf7+E5BJU2HoIwAlE9A+ATql1EFE7Ino5VWYfAJOJaDqA7wA8xMwibAlCMSNul/u40JKu65OeC6Wb44+33uclDpdGvXrqOuvdGzjxRN/dEhJMoHEUM68GkHXZMfNkAL1SyxMAxJSuVBCEsNDSYJU2NG2TPhG7ULqxi/wftweikExKkcWJIAiCdw44QKVd0ULHCIIdfjRbQslHhC1BEAQbiFQ8HkEwwzilePnl9uUl9EPpRIQtQRB80aZN3D0QhPjRpxMCgIsvti+/xx7R9UVILiJsCYLgmsmT08tm6VQEobRh1FQ5TSM+8kh0fRGSiwhbgiC4Rp8/7J134uuHICQFY6w9p3jcRk2YUDoQYUsQBF8Yo6sLQmnEKGw5JWwvTTlGhTQibAmCIAiCT6wyfwwenNt+CMlG4hULgiAIgk+6dMneZqe9Em/E0okIW4IgeOLaa4GNG+PuhSAkA6/Ck7H8/PmhdUVIMCJsCYLgiSefjLsHgpAcvApbmo3XypVi91iaEJstQRAEQfCJX82WCFqlCxG2BEEQBMEnXnMhGr0XhdKB/O2CIAiC4JOdO72VL1s2mn4IyUaELUEQBEHwide4WY0bA7/8Ek1fhOQiwpYgCIIg+KRxY+/HdOgQfj+EZCPCliAIgiAIQoSIsCUIgiAIPhg2LO4eCMUFibMlCIIgCB6RHIeCF0SzJQiCIAiCECEibAmCIAiCIESICFuCIAiCIAgRIsKWIAiCIAhChAQStojobCKaSUSFRNTOptxJRDSbiOYQ0S1B2hQEQRAEQShOBNVs/QHgDAA/WhUgojIAngNwMoDWAM4notYB2xUEQRAEQSgWBAr9wMyzAIDs054fAmAOM89LlX0PQHcAfwZpWxAEQRAEoTiQC5utRgAW6dYXp7ZlQUS9iWgyEU1euXJlDromCIIgCIIQLY6aLSL6BkB9k123M/PnYXaGmYcCGAoA7dq1k5BxgiAIgiAUexyFLWbuFLCNJQCa6NYbp7YJgiAIgiCUeHKRrudXAC2IaHcoIes8ABc4HTRlypRNRDQ76s4VE+oAWBV3JxKAnAeFnAeFnAeFnIc0ci4Uch4UuT4Pzax2BBK2iOh0AM8AqAtgFBFNY+bORNQQwMvMfAoz7ySifgDGACgD4FVmnumi+tnMbBlOojRBRJPlXMh50JDzoJDzoJDzkEbOhULOgyJJ5yGoN+KnAD412b4UwCm69dEARgdpSxAEQRAEoTgiEeQFQRAEQRAiJMnC1tC4O5Ag5Fwo5Dwo5Dwo5Dwo5DykkXOhkPOgSMx5IGaJsCAIgiAIghAVSdZsCYIgCIIgFHtE2BIEQRAEQYgQEbYEQRAEQRAiRIQtQRAEQRCECBFhSxASBhG9SER36tavJKIVRLSJiGoT0eFE9E9q/TQi+oKIekbQj0jqFQAiOjLKDBlENJ6I2qaW7yait1LLTVPXTZnU+vdE1Cuqfrjs6wFENCHOPghC1IiwJQgRQkTnEdFEItpMRP+llvsSEVkdw8x9mPne1PFlATwO4ERmrsLMqwHcA+DZ1PpnzHwyM7/hs3+XENFPFv3wXW9xg4jmE1HQPLB29TMR7aWtM/M4Zm4VUVtdAWxk5t+M+5h5Yeq62RVF235g5t8BrEv1WxBKJCJsCUJEENEAAE8BeARAfQD1APQBcDiAchbHlDFsqgegAgB9iqtmhvViBSmK1bOHiHKRRzYs+gAYFnalEZ+DtwFcEWH9ghArxeqBJwjFBSKqDqWB6svMHzHzRlb8xsw9mHl7qtzrRPQCEY0mos0Ajk1tu4+IWgLQpprWEdFYIpoLYA8AI1LTQeX1U0GapoqIHiWitUT0LxGd7PM3GOsdT0RPENE6IppHRIelti9Kae0spxxTdd1PROMBbAGwBxHtTURfE9EaIppNROfoylckoseIaAERrU/9poqpfd2IaGaqH98T0T664+YT0Q1E9HvquPeJqEJqXx0iGpk6bg0RjSOiPCIaBqCp7pzeRETNU9qoy4loIYCxRHQMES02/K4ijRgRlSGi24hoLhFtJKIpRNSEiH5MFZ+eqv9cY11EtE/qt6xL/bZuun2vE9FzRDQqVe9EItrT4jyXA3AcgB8s9mu/Sy847UlEk4hoAxF9TkS1DGX15yCPiO5I/S//EdGbqWsdRFSBiN4iotWp3/ErEdVL7buUiGal+j+PiIyC1fcAjiei8mb9FoTijghbghANHQGUB/C5i7IXALgfQFUARVN6zPw3gH1TqzWY+Thm3hPAQgBdU9NB203q6wAlpNUBMBjAK0TW05Ye6ADgdwC1AbwD4D0A7QHsBeBCAM8SURWb4y8C0Bvqd64E8HWqnt0AnAfgeSJqnSr7KICDARwGoBaAmwAUkhJA3wVwHYC6UDlXR6SEDI1zAJwEYHcABwC4JLV9AIDFqePqAbgNADPzRcg8p4N1dR0NYB8AnV2cn/4AzofKC1sNwGUAtjDzUan9B6bqf19/EKmp4hEAvkqdi6sBvE1E+mnG8wAMAlATwByo68WMFgAKmXmxxX4zLk71tQGAnQCeNuzXn4NLUp9joYT+KgCeTZXrCaA6gCZQ10gfAFtT+/4D0AXqvFwK4AkiOkhrgJmXACgAEMnUqiDEjQhbghANdQCsYuad2gYimpAa8W8loqN0ZT9n5vHMXMjM20JoewEzv5Syy3kD6iVaL4R6/2Xm11L1vg/1Ur2Hmbcz81cAdkAJXla8zswzU+fkJADzU/XtTNkXfQzgbFJTjJcBuJaZlzDzLmaekBIszwUwipm/ZuYCKKGsIpRQpvE0My9l5jVQQkyb1PaC1LloxswFKbsppxQadzPzZmbe6lAOAHoBuIOZZ6e0mNNTNnZOHAoltDzEzDuYeSyAkVCCm8anzDwpde7e1v0mIzUAbHTRpp5hzPwHM28GcCeAcyhzOlt/DnoAeJyZ5zHzJgC3AjgvpSkrgBKy9kr9Z1OYeQMAMPMoZp6bOi8/QAmWRxr6sTHVf0EocYiwJQjRsBpAHf10DTMfxsw1Uvv0996ikNtermtzS2qxCikPuE2pjx+brxW65a2p+o3b7DRb+t/ZDECHlPC5jojWQb3I60MJqhUAzDWpoyGABdoKMxem6m2kK7Nct7xF16dHoLRCX6Wmsm6x6atZn51oYtFnJxoCWJT6LRoL4O43GVkLpTn0gv43LgBQFuo/MNufcf5Ty/lQwvwwAGMAvEdES4locEprByI6mYh+SU3froPS/unbQKrf6zz2XRCKBSJsCUI0/AxgO4DuLsrmJEFpSpNTJfXZ1/mI8LugW14E4AdmrqH7VGHmKwGsArANgJld0lIoQQ2AMraHEnKWODau7OYGMPMeALoB6E9Ex5v0zarPmwFU0rVdBmpKUv+bTG2pHFgKoAllOg00hYvfZMIc1TVq5FgyTRNDuwVQ/4GG/hxknP9U+Z0AVqS0hYOYuTWUprELgItTdlgfQ2kh66UGHKMBFE1tp/pbDmkbRUEoUYiwJQgRwMzroGxsnieis4ioasq4uA2AyrF2LhtKGTcXfXLQ5kgALYnoIiIqm/q0J6J9UhqeVwE8TkQNU4bnHVMv7Q8AnEpEx6e0JgOghFrHOE1E1IWI9koJaOsB7AKgaZNWQNkg2fE3gApEdGqq7Tug7PI0XgZwLxG1IMUBRFTbRf0TobRVN6XOwzEAukLZxHmCmXcA+AbKzsotFxJRayKqBOXU8ZFNaIh3AVxPRLun7PMeAPA+M+8komOJaP+UELoBSmgrhBKiykPZ6e0k5bBxoqHeowGMtbBBFIRijwhbghARKUPr/lDG3StSnyEAboYL4SCHHAY1BVj0oYhDHTDzRqgX7nlQ2pLlAB5GWni5AcAMAL8CWJPal8fMs6GM8Z+B0r50hTJs3+Gi2RZQgsgmKM3j88z8XWrfgwDuSE1p3mDR5/UA+kIJVUugNF16Q/THoYTBr6CEjVeg7MkA4G4Ab6TqP0d3jCYgdQVwcuo3PQ/gYmb+y8VvMmMIlDOCW4YBeB3qP6gA4Bqbsq+myv8I4F8oDeTVqX31AXwE9dtnQXlEDkv919dAnZu1UA4hww319gDwooc+C0KxgpztQwVBEITiBKkQG/3YJLBp0iCiAwAMYeaOcfdFEKJChC1BEARBEIQIkWlEQRAEQRCECBFhSxAEQRAEIUJCEbaI6NVU6oY/LPYTET1NRHNIpdE4yKycIAiCIAhCSSMsj6PXoVI2vGmx/2QoT6AWUCk/Xkh9W1KnTh1u3rx5SN0TBEEQBEGIjilTpqxi5rpm+0IRtpj5RyJqblOkO4A3U6kxfiGiGkTUgJmXWR3QvHlzTJ48OYzuCYIgCIIgRAoRLbDalyubrUbITPmwGJmpKARBEARBEEokiTKQJ6LeRDSZiCavXLky7u4IgiAIgiAEJlfC1hJk5t9qDJO8X8w8lJnbMXO7unVNpz0FQRAEQRCKFbkStoZDJSQlIjoUwHo7ey1BEARBEISSQigG8kT0LoBjANQhosUABgIoCwDM/CJUhvdToDLSbwFwaRjtCoIgCIIgJJ2wvBHPd9jPAK4Koy1BEARBEITiRKIM5AVBEARBEEoaImwJgiAIgiBEiAhbgiAIgiAIESLCliAIgiAIQoSIsCUIgiAIPlC+X4LgjAhbgiAIguCRb+d9i7x75BUquEOuFEEQBEHwyJw1c+LuglCMEGFLEARBEDzCkClEwT0ibAmCIAiCRwq5MO4uCMUIEbYEQRAEwSOasCVG8oIbRNgSBEEQBI/8vfpvAKLhEtwhwpYgCIIgeOSZSc8AEGFLcIcIW4IgCILgEzGUF9wgwpYgCIIg+MRMs/XOjHcwfuH4GHojJBURtgRBEATBJ0Zha8euHejxSQ9cMfKKmHokJBERtgRB8Mx/m/+LuwuCkAgmLZmUsb5803IAwMyVM+PojpBQRNgSBMET/679F/UerRd3NwQhEbz3x3sZ6xu2b4ipJ0KSEWFLEARPLN6wOO4uCEJi2LFrR8b6DV/dEFNPhCQjwpYgCJ446vWjAADTlk+LtyOCkAC27dyWsb5xx0bb8os3LMaYOWOi7JKQQETYEgTBFzNWzIi7C4IQOwWFBRnrThHl+4/pj5PePinKLgkJRIQtQRB8sblgc9xdEITY2b5ze8a6U5DTXbwryu4ICUWELUEQfHHlqCvj7oIgxI7RZmvikolFy2ZarnJlygEA3pz+JmgQRds5ITGEImwR0UlENJuI5hDRLSb7LyGilUQ0LfXpFUa7giDEy9aCrZKIVyjVbN+13XKfWXR5TRPW87OekfVJSB6BhS0iKgPgOQAnA2gN4Hwiam1S9H1mbpP6vBy0XT/8vuJ38RQRhAA0faJpxnqlByrh/Znvx9QbQYifgl0FlvvMphTXbVsXYW+EpBKGZusQAHOYeR4z7wDwHoDuIdQbOge+eCAe+/kxbCnYEndXBKFYsmjDoqxt89fNz31HBCEhGKcR9dhNI2p8O+/b0PskJI8whK1GAPRP4MWpbUbOJKLfiegjImoSQru+mbpsapzNC0KxZFehuWHvm9PfzHFPBCE52E0jmmm2ypYpm7H++vTXi5Y3bN+A7/79LrS+CckhVwbyIwA0Z+YDAHwN4A2zQkTUm4gmE9HklStX5qhrgiC4YevOrabbZ62aleOe5J4bvrpBNHiCKeu3rbfcZyZstazVMmNdr/16bMJjOO7N48LrnJAYwhC2lgDQa6oap7YVwcyrmVkT/18GcLBZRcw8lJnbMXO7unXrhtA1c8SgVxC8Y3RxLy0U7CrAYz8/hhGzR8TdFSEh6N8hK7dYKwbMDOTrVq7rWEYoeYQhbP0KoAUR7U5E5QCcB2C4vgARNdCtdgOQ86Hw69NeL1pO0sXNzCL8CcWCpyY+FXcXYmHk3yMBAMP/Hu5Q0j+FXIh+o/thZ+HOyNoojnR9tysWrc+2E4wb/f9kZwNsptm69dtbM9bfmfEO/ln9DwCAKLxQEA//9DD2fnbv0OoTghFY2GLmnQD6ARgDJUR9wMwziegeIuqWKnYNEc0koukArgFwSdB2vbB+23pc+vml+j7nsnlbeg3vhbx78rBx+0bQIMLXc7+Ou0uJY8TsERi/cHzc3Sj1zF07N+4uxIKmufhm3jeOASv98szEZ/Dcr89h5n8zI6m/uDLy75E4eKjpREisuBWK3b5r+o7uCwDIo/Ase76a9xVmr54dWn1CMEL5Z5l5NDO3ZOY9mfn+1La7mHl4avlWZt6XmQ9k5mOZ+a8w2nVLjYdrZKxv2rEpl83b8uq0VwEAf678EwBwzkfnYNnGZXF2KXF0e68bjnjtiLi7UeqxMpAvKbw7412s3bo2a/sVI68oWj781cNDbXNn4U70GdkHD41/CEC2p5pgP00XF26FLbfC+TfzvgnSHVOMORuFeCnxEeRfnPxi1rYu73aJoSfZ/L3676xt67atQ8PHG8bQG8Erb//+Nr6f/33c3cgZJT2e1gWfXICXp2aGAPzsr88y1n9Z/Euoba7ZugZDpgzB8k3LAbh/OeeaacunYcWmFTlrb+B3A3HtF9fmrD2vuE2542Va+NLPL8XA7wf67VIWExZNCK0uITglXthKckqRVs+2Kloe8XfxML6lQYSfF/2cs/aS+vIBgAs/vTDR15fgHW0aZ9Tfo7Bqyyqc/v7pkbZHyLTRSVrevF2FuzB+4Xi0HdI2Q8MXNsOmD8MP838AoLIS3PPjPXh60tNF+6cvn160PwnUHly7aLlxtcaW5R7/+XHXdertiv9d+6+vfgnJpUQLW1sLzF3Vk8j94+6PuwuuOezVw3LSzvVfXo9nJz2bk7a8otkALt6wOOaelD6YGdd9eV0k0yREhN+W/YYu73ZB3Uei84jW+GLOFxnrSTOQH/n3yKIp/M9nfx5ZOxd/djGuGn0VAGDsv2Oz9rcZ0gbHvHEMzv/4/Mj64AX9IHD3GrtblvNrM5XEqVMhGCVO2HpswmNFyT2Hz47Oe0iInicnPolrvwxnKmHTjk2Yvny6r2MLdhWABhGGTR9WtO3DmR8W1Svkji0FW5B3Tx6emvhUkQdXGGhC85qta/D8r8+HVq8TG7ZvyFhPmrC1ccfGyNuYu0Y5Xmhe4nPWzLEs+94f76HvqL6R98kOo/dhQaF1uh6/mkqjxlMo/pQ4YeuGr9O5D2/5NisnthCAcQvGFS0vXL8wxp54p+qDVdFmSBtfx2oZBy7+7GIASrOStJdicWbxhsVFNktO6O1QwnSTX7JBhQa8f9z9ePm33KVurVe5Xsa60UYsbi757JKM9SUblmDdtnW4+NOLQ2tjr2f2ApAWMK4bc51t+RcmvxBa234wCoNecyO6IcxrW0gGJU7Y0pi9anZiIz4XciHemfGObZmx/46NPETF/HXzMWPFDNfl9VNmExdPjKJLtsRljH7oK4cWLX/373fIuycvcbY1xZlmTzbDYa+4m5q+7dvbIulDXLaBxmv6tWmvxdIPK4zXeeMnGuP090/HsN+HWRzhn/y8fNfPbLMQGRMXT8SD4x4MuVfZGMMz2D0L/HrwhqnZKo3e7V6Tfbcb2g40iDB+4fjIHEFKrLC193PJDeZ24IsHoscnPWzLHP/m8ViwfkGk/Thh2Ak44MUDXJfXv5C0uDC55Ng3jg1ch90o1A2Tl04GIGr+MCnkwqzpNCv0UzZ3jL0jtD7EJTyPnZ9pn7Rt57ZEGYKb8cd/fwBAqNO4ADB9xXTc/M3NrsqazVoMnjAYt42NRhjXc9+P92Ws2wlUbeq38dVGu5fa+TrOjKUbl4ZWV9JZtnEZNmzfgJoP1ywyJ3LDlGVTAABHvHYE6j9W3za5uF9KrLAVJjsLd4IGUWiaJu1h5USUmq03p79paxtheszv6YTDq7asCrtLGVj99qDn5IRhJwQ6/qZvbgKQ+dJv+kTTQHUKwOqtq115YE1bPq1oOUyD7bg0W3+tygw5uG7bOhzzxjGxB17etnMbOr/V2XSfdu+3fLYlVm9ZHWq7H8z8wFU5Laq/nlyFppjx3wzbdY1q5avh4AbxB2RNghb+qlFXuR5QBaHh4w1xyEuHFK0f8apzfEYzRxu3Zg1eKFHCVhhBF3cV7sqqR5Nyv5zzZeD6k0LPz3p6PuaruV9F0BNzpq8wN2Y3xk3bUrAFNIhcj2J+WPBD6ME5F21IXjqRsLng4wsib+O9P96LvA0rNE84N4QV+mTWSuusZa/+9moobfhl7pq5ru73XLxA3TJ+UW6yTNhptTVjf40khK6JOxjxxu0b8fzk5/HFP184Fw6ANl2q9wAdv2i8rX3tOR+eg4r3V8zaHoXgXqKELS8XtlFN2OjxRrj080uRf28+8u/NL9p+34/34ccFPwIA/tv8n2ldr0x9BScOO9FHj+1JUg7HXNN2SFvT7fp4ZMyMyg9ULlr/ZNYnrur28oJwa9M2YdEE7Pv8vq7rLW64PbdBME4Bbdu5zVSDEQVutc2AvbecF7SpCzOWbFwSSht+0dsp2vHpX59G3BN/LN+03DQbQBgYbbby89Lvi4yBF2e/k7xoLIPYqOqfW6u3hqt99IrmvelkOhOUeWvnmW4ve29ZXP/l9ab7PvzzQ9Pth7x8iOn2ICRe2NqwfYPreFlehBPN9gYAVm9ZjaUbl2YEldMMMO/87k6c/PbJAKwfgMN+H4av54Wf0zAXxp5A8YpHpo9LtHVnZr/NAsNePfrqjP8aADq83AF9RvZx1d4xbxzjqtzhrx6OP1f+mYiRrBMzVszAyL9HYuN2927923dtj7BH5lS8vyK6vtsV7/3xHsreWzYxHrB2rv5euOjTi0Kpx46Z/83EVaOuwnkfnefJDsVtSJMBXw3AXd/d5bd7gVi/bb3lvgaPNchZppAK+RWKlvXT3IVcmBUmwss7yq+N6vad2/HkL08WrXd9t2sgW9UVm1YEeq5p9mdRT2fa5ZV8cuKTAIC1W9fG5kmeeGGr+kPVXSci9XJB6L0B6zxSJ2v/fi/sl7Xt9rG3m9YVlZvuHyvdj7aDMOqfUY7ehb+v+N1zvQ+OexA9PumB8QvHo9L9lfx2zxLjTaMXlrX9z/76LNq/1D5j+z9r/sGQKUOyHoRmeH1I5TK6vl+6vtsVXd/timoPVfMde8yImeBGgwi/LvnVUz1th7TFtOXTisJtAMBln1+GnYU70ezJZlnlwzbSdoP+5eoXp2mKsDypH//5cTw/+Xm8P/N9PDrh0VDqNHLvj/fGMsiwyrox6u9RAJS2OYqgw0YbH/1z6PoxaQ3KLt6VNSDMxXl67OfHinLualjZlTmxestq1H+sPu7/0X/Qbf1/4GWAd+fYO7H3s+4d3dwk8a41uBbK3lsWb05/M+dKhsQLWwAwa5W1bYMeLyraN6crY28r1aMVNIiybLfCzNSeC4wX2dkfno1DXznUdm7fLFRF7xG9bdu5bexteGfGOzjitSOyHjph4DRSH/3PaNv9bm58rxqdj2d9HLnzQFD0Xq7LNi1DvUfrYeXmYBGrjXZr2gvo2V+9ZQCYtnwa2g5pmzHAsrt2Wj7b0lP9YdCoaqPAdTgZ4BpDQOwq3IXnJj3nuR39S9frs84LT/3ylCfvrzCwEqT0Gq0mTzQJvd1/1mQK+FbPTQZn7cuF/ZRZzt1ew3t5drrYvGNz0ZT5Xd/fhSNfOzJw3y789ELXZe8bd5+nCPxOmjP9wK/nZz1R6YHwFQB2FBspwc2N7GXUsHHHRvy08Cfs+fSentvUphU19HP2YRJ20lsNKwHCq43Yu3+8a7lPG1164eavb/Y02nhj2hu2+53qcqOR9OoC/MQvT6DuI3WLxXQioEbi/23+L7AmpXyZ8hnrmtbwzelv4p0Z70SazSHXcYTcTi3b4VUb/spvr6DfF/0CeSm+8tsr+G3Zb0VCyjVfXIOHf3rYd316+n/VHwBymk7n1m9vdV128YbFkQmbVtNShVyYtS8Xz4U3pmc/F39b/luW56sRLbCvRqdhnTJs935a+JOl3bJbonwO1KxQ03a/Vzus35b9FqQ7WSRa2DJOQTg9aLxmOe8/pr/nPpkRlbAVFVY3vN2o6+Hx2Q9lO7uOXiN6ZW1zmpIbPGEw7vvxPscHkrbfaYrASXj0er144ZWpr0RWd5hoD2C3LvdWlC1TNmN9+860QN/jkx7o/l5305dSGCEOvBi3JwWv2vBv//0WgPdgjUYOGnoQmjzRBFsLtuKZSc+YxqtyM71uRZwepVYMmTwETZ5oYjuwdovZM4zBptexWaaJOAdhQ6YMsd3f+InGGQKX2WC/3qP18Nbvb4XeNyObd2wuWqZBhLd/f9vxmLDNeQ4aelCo9SVa2DIGYxv771hb91GvnjG/LvVmT2KFlwCXUary3WI1Ent56suhefA0qZatvrfzsNK0iA/89ECW+7SRpyc+DcBZ6+S0/8avb7TdHwQvmqK1W9fim3nfRNYXNzz686OO0652GAV1s6m//mP6Z8VuCsPYvExemcB15BpjYEwnNGG41uBaobSvn0KZvHRyUTyiQi7MCq/ilY6vdAx0vFv0AqvdQLHPqLQzTNC4YHPXmj+bCrkwS+BKmrD11MSnLPdp09puBO2LPr0IJ711kqs2NU9+rxgHwq/85jx4LeRCVC1X1Vd7uSDRwpbxwuw0rBNOeecUfDvv24yHlaZhiStnlpcXRhijq6DotQ56JiyegFqDa2Wkwli1ZRUGfT/IcxtmgqzbKTknOxzNCPX5yfYJg+tWqmu73+o86Dm62dGOZcx44KcHHMvMXjUbyzctxxO/PBE42Kpb9CNGI6e+c6rvKTmjvcSarWuyyjwz6Rl8NferjJeSm//AiSrlqgSuo/Oe5gE8o+L9me/7PtatNtBtXLz2L7XH+EXjccs3t6D7e90x4KsBvvsGKI3Iph2bsGzjMqzeshqTlkyy/J+DaDb7tktnsXBr2xNUM2imkcyjPOws3Jn1n5pNI0YRmTwMGjzWAID7QMFj5o7Bp7OclRvGqUnAXbL1E9/KDKX03fzvHI8p5MJE55RMtLBlFtkVUELXnd/dWbRe9cGqRdqOXPH13K9x8NCDsXLzypwG+wyD9dvNXaY1I3gt4TKgIjXf/cPdnuq3MjzPdYqb+8fZe9C4cUX+YUE0qVO+nvs19n5ub3R+q3NOvWKcHBXO/vBsX/XqXd4B+5faz4vTHptbd24N7N3n5qHvRFjhHHKB23hFThpiIw+Pfzi0uGZDJg9Bw8cbos4jddDh5Q6WwmWQkCLNazQvWnbrNTxm7hjf7VlRyIUYM3dMVgaEQhRmXVdRh1AJGmT2xq9vxPpt612dzzM+OAM0iDwLkEGmqe2IO+uCE4kWtpz+RGYumq659strc9CjNCe+dSKmLpuK3R7drWibk6eJn7AAUVxATrZOU5dNLfJo8eNpec5H53gq70e17sZ40SmitFnOsN9X/J7xPx5Qz33uSC9oI7ffV/yOR3+OxiXfDKdRpR8D2DJUJus62bZzG6qXr25aXq/lWLZxmeWgyi0PjX8o0PGA0jJ6IU4Nxbt/vIvxC82vbf00WZzR3W/4+oaM9ed+Nfek9PvfEwibC9JaWrfOPV4yBZhh9Tzu/l53U+3LvT/em7Hu9brx+vwPw2au1uBa+Oyvz1yXH/jdQMt9Zv+LG0Fuj5p7uG6/qN7CgsC5b6Mk0cKWU2C9Ryc8mqHhihunSL1+IvlGETJh285tqFa+mm2ZVs+2AuBP2LJKa2SlCvYTKsHKeDHoi/vAFw/Ex7M+LlqvUjb4FJWenxf9jGu/yO3AQI+TsGV0a3dD5bKVs7Zt37nd8gV41odnFcWZunLUlZ7biwKvEdvL31feuZAN++2WHcfPCrMX7tCpQ7O2dX+vO+o8Ugfrtq3DrsJdOKRR+FGw/TJpySTTIKTbdm7z5WDE4AzN3UtTXnJ9bJCpRLuBoVlga2P5Hbt2eJr29hqA84qRV3gqb0YhF3p6Vz00/iFPA2Y727HFGxaDBhGaVveeb/adGe9E8r4Mi0QLW/2+6Ge7f/LSyTnxjHBL0FhFZuz2yG7OhTyyY9cO1yOmMmRvfKwfSTgZn2qxzYxs2L4hFLsbQEUd9zstp2l19Odm3fZ1YXSriJ6f9cTTk8ynvIPak7jBT/Rku0jdgBoQGFPY2LWzZusa1H+sPoDMKcXiRpC4Ul48KM2m/d+c/mZWKinNrb7mwzUx4KsBGdkWkkCNh2tkvZS37dzm+Iyx4s3f3ywyfZi5cqZD6TROIRDsaP18a8/H6O0k566Z6zo6P2BtkO8Xt8/9lrW9xa+zCnpt5nA19t+xlvVogzCzVEVOz652DduF9h6JgkQLW07kKvGoW5y0Kk4vLTP0qvKw2LFrh2u1u5MAoEUmXr5pOeo8UsfWwNrKRmrt1rW+H7hmPDDuAV8aLs3monx+Wmvx58o/Q+sXANSrUs9yX82H7ePEhMFrv73mWMZot1Xj4Rq25QsKC3DzNzcDUNfW36v/RkFhgeOD/aeFPzn2xS1BgkUm1dbjxwU/4uWpL1tO7R7wovUU9x///YFHJjwSVdd8c9RrR2WsP/HzE4HsmHp80gM0iHxluMgVVR6sgvL3lceLk1/EKe+c4unY8z46L9S+uNVYebVDtspla6YwsXtvG0PI6HHKgBBFwFizALF+CUXYIqKTiGg2Ec0hoqzALURUnojeT+2fSETNw2g37mStRrQcUBpP/PxExhy6l+i5YXPlqCuLjF/HLxzvShjZvGMz+o7ua1vm4KEHg5lx27cqiXDDxxtalj1gt/TL4ZfFvxT1IWyB8r5x95lmcjdDr43TNG8FuwqK7Iqs7I7cYPYS796qu+0xUdsc3PPjPY5lPvrzo6LlResX2ZTMhAYRyt9XHq2ebYVtO7c5Ti0MHj/Ydd1OXDb8Mt92m0FytkVl7AsAR79+NP434n9o9WwrS5d2K0HRjfdWHIxfND6jzxMWhxPrbu029yFrnvzlSU/apTDYsWuHrylzLyYWbgYNVoK70UnFLK6iE0YtllOWBDPstFe3fnsrdhbutHxGuhngeSVMx7vAwhYRlQHwHICTAbQGcD4RGXWtlwNYy8x7AXgCgOM/ubNwp2+3+zjRjM+ZGf2/6o/zPz4fNIgCTTn8uuTXIoPY1357DZ/99ZltvDEzXpz8YlG4jBenvOhqOsmt5vDy4ZdnpRcxkp+Xj49nfYyrR1+NAWMGoOMrHXHM68cACG5nFYShU9K2L1ooiXM+OgcV7q8AGkSWnptuyLsnL+s8O8VZiyuprxFNiDAL4eCGf9f+66ixsMpt54c3p7+Jpyc+7St0xbszrDMhOFH5gcp4dMKjeHTCo1izdY3rkbDek84NG3eYe/je/f3dRcv777Z/0XKSMxjk3ZNXlNMyjnv//Znv45S3vWmY4mLJxiVF74+v534NGkToN7pfxv9byIVYsWmFKw2hmYf2ys0rQ/kfjnvzuIx1LZyEGa2fa130u2gQFZl+OMV5LHtvWZS7r5ypUPXjgh9Dd1x57tfn8NBPD4USooaCSoJE1BHA3czcObV+KwAw84O6MmNSZX4monwAywHUZZvG8xrlMfdOpnrfDZ326BR5oMrmNZrjh0t+cGVMqAl7Q7sMRe+R9jkNzchDHgoR/gN8au+peHP6m3hxyouxCl09D+xpmuYiLA5vcjjO3OfMorQmdvx11V9FNhNhxo3ZWrDVUz6wO468A8s3LcfLv70cWh9yRbdW3TB89nBUK18N62+xF5gbPtYQyzaFm/Lnm4u+wd+r/8bctXPx/K/Po0aFGkVt3Hz4zb40B1Y8ePyDOG7349Dh5Q6h1Vka6NayGz459xPXQXFnrJhhO3UbB9XKV/PldXpoY5WGp06lOli0fhGmrwgnIX0QeCDj6i+uxrOT3OdTveXwW9CgaoOcRiNYeeNKVClXxTRkDRFNYeZ2JoeFImydBeAkZu6VWr8IQAdm7qcr80eqzOLU+txUmVWGunoDUJJAdRyM6yEIgiAIpR4Cec5fK+SG6X2m44B6B9gKW4lK6sfMQwEMBQBqSHJV+eTQxodiV+Eu5Oflo2yZsthVuCtxzgSCNyrmV8xwa65ctjI2F2xGy9otsX3ndtSoUAPl88ujDJVBmbwySotVthI2bN+AbTu3oXal2pHmgiwu5FEe2jdsjzzKQx7lYd22dZ482YSSyV619gKg7FSXbVqGymUrg8HYUrAFFfMrYrfKu2HB+gWx9jGooGUnrIkg55+WtVuiQRXrKVONMIStJQD0ifAap7aZlVmcmkasDsDWLaJmxZpYi3Dy9OWSw5scjh7798BBDQ7KyJgeBmfuc2ZGDKgZV85wHa9Hm0bccccOlLuvXKj9CgIPZHz050e4fPjlsQVhvKbDNRjcaTCaPtEU/20JltXeiiX9l2De2nk48rUjHcvywGgeesyMvHvcmWnuW3dfjLxgJKYsnYKzPjwrkv5EyZL+S/Dzop9RtXxVnLjnibZlO73ZqSjRcxg8duJjuLbDtcijPGzcsREL1i1AzYo18dKUl9B5r87Yu87eqD24dmjt/d3vb7So3QI1H6oZeqiSKLnxsBtj9Zj8+fKfi6bT3LB5x2ZUeTA5oQXa1m+Lp056Cke9rjw8f7zkx6JlMxiMS9pcggEdB2Dfuvti5ZaVyM/LxxvT3nBl3uCGc/Y9Bz3274FWtVth7+f2dn3cjYfdiMEnDMaw6cMyMpjY8egJj6J/x/4oKCzAuAXj0GlYJ7/dduTiAy9Gwa4CTFoyCXOumeN8gAlhCFu/AmhBRLtDCVXnAbjAUGY4gJ4AfgZwFoCxdvZagIogu7rGak8JfZPAuEvHZdnZ7FlzT9SvUt+3dmnLbVtQJq8MypUph7Vb16JCfgVs36W0GV7ovGdnW9daI/9e+y92f2p3x3KTek3CfePuK4rzY0bZvLK47cjbcEmbS5Cfl48mTzTBDR1VlOnq5avnPJWPxnc9v8MxzY8BgCJB64qDr8CQKUNQMb+i8n7xmcrl5sNvxkOd0tHNG1ZtiHuPvdc2EO+I88MzGjfixf7rj74qDpRfr60nOj+BW765xdZot06lOr4C2tqx/Y7tKFdGDSbObH2mq2MePP5BHPKy/wCg866ZhzzKQ7MazbL2VStfDfvXU4brg45N5xjdo+YeoSSln95nOlrUbgEAaFStEdatXBe4zqj56dKf0LFJR+RRHkb9Myr08CpuuO/Y+zwJWgBQuVx28N5ccPJeJ+PB4x/E/vX2x6ezPsV+u+2HVnVaFe3XBmduPJlrV6xdNEDfrbKK4Xhl+ytDEbZu6HgDHjkxLTzvVWuvrPh7GjOunIHnJj2HF6e8iNoVa+PhTsqG0clx5IQ9TsA+dffBUyelA6OWK1MOx+9xPB478THcMfaO0AObrr5pNWpVDJ4APrA3IjPvBNAPwBgAswB8wMwziegeIuqWKvYKgNpENAdAfwBZ4SHMKG6CVr/2/TJeaPOvnY9tt2/DnGvm4KfLfsIRTY/wVW/FshWLXiA1K9ZExbIVPQta3/f8Hq90U5nTHznhEVf56BpUaeAYaR4A2jdqjzb12tiWKSgswNRlU9G8RnM0rtYYPJCLbsygufHM0AQ5J/QP3M/O/QyAEkp5IGPL7Vt8C1oEyhC0NJx+a5eWXXy15xZtusQtjas1dl32ic5PoEMjZaRdt1JdR8HeSePkhZHnj8S4S8cV3SdeOKiBeTYCNxTeVYjda+5uKmjZ4UXQuvqQqy336dNJ6adD29Y3j3uUBA5venhRZoo6lerkvP1PzvkEtx15W87b9cMhjQ7B6B6jcWD9A5FHeTiz9ZkZgpYeNwPp6w/NNoSukF8BZfMyj23fsL3nvj7Y6cGM9X+uts5Gsd9u++GFLi+ABzJW3bSq6L3p9L756qKvMgQtPXUq1fGV8cSOwZ0GhyJoASHF2WLm0czckpn3ZOb7U9vuYubhqeVtzHw2M+/FzIcwc/AhHYDjdz8+jGpC49K2l2asN6vRLCNApt1DM2qObn40GlVrBEBpWIw3lxnl88sXCWhW/HeD0gZpD6+CO62FEystRrXy1UK1Fxh/2fiMEZYdeuGnfpX6ofXB6vc8MO4By2Nyod177MTHHMt8ft7nRcs1KtRwlaesdsXauO7Q6/BLr19QeFchypYp6/h7nj4pvBg2p7Y81fdgxq03mhlheosaKbizALvu2mX6gnSiVsVaob94wkAb0Gicv9/5ges8pNEhnoSDRtUaRfq/Geneqjuuan8VJlw2wXMKJU37FBZVy5vHazMOKs/d91zPdZulXapdMXu63C52od17YGKviY7th/2/9jvEPouNF5J3N+pwelg0qd7Edn+usQo8qFGprHu3e42eB/b02x1LypUp5/rF7hS3pHYldTOVzy8PHsi2ec6s1PDVylfzlUbGCk274pW2DZQ2QB+C4uAGB4fSJ42OTTpa7iscGH1spAPrHehYplurbhnrc6+xTxlSuWxlPHnSk0XrROT44KtSrkrRtVNc6bF/D9/Hmr2EjOTn5SOP8kwHAQTCltvMA6ruWXNPXHTARXi126u++xcFLWu1RPe9M4P6ntriVF/PRY1/r/0XE3tN9DTFFyQG2b519/V8zCfnfoJnT3kWHZt0xIunvujp2LdODzcdnZuZCgCW2jMr1t5sbl9tpoWy0yTbvW+cBFV9MOqwqFjWXXBsNyRa2HK60M7a5yw8ckJyUlJE8fJw0iz5oVyZcnCStbTkwk4pELyMnm894lbT7bUq1gotEjcPZN+aCm0KqmHVdBT8ivnh3WyAuqb/7Jt7GxUNP0l/nSiTVyYrGXXZPGvN1uVtL8fGW1WQzkvbXGpaJumcsfcZeOsM/y9CqyClZpg98C9re1nW9q8uVClW/rn6H/Rs0zNxwuysfrOytpXPL+/73u/brm+Rjc/pe5/u+jg/U2QaH53zkXMhA/pnZNXyVT09U+LK9bd041JP5a20VWYDrivbWUfS36fOPujVtheObX6sp/YBFVDcr9lHLki0sOV04k5teWqoaj4vmL20nOZ2967j3jtDI8gUhxUV8is4ev5pgSD9TO89eoJ5Dqsm1cw1kW5HW3rC8Ngze0AU3lWIo5unMxdMWTYlcDt6alasiX3q7hNqnV5wsuvwmoAWUInEjUap5fPLW6bB0WuEBh49MHSB1g9es1V8fO7HzoVsCBrpWjMo1nPCnieAB3LRC+63Zb8FaiNMrj/0etOBmV97TQJl2BN6+f+CPFPtBpdm071GIbBcmXKeDLi9TgV/3/N7T+XNuOPIOzxp8D4+52NLLbbZgKvTHtZeg1XLV8VL3V7ylW6qS8suvt4luSLRwpabG1Ez7lvSP7d5EhdctwDfXPSNp5e+V+PkqHDqR6+2vYoeSOXLlLcta8b1Hc1tTCxjvERkPzG4k33uPbOpB2NfwvZs0Vh14yrUq1wPFx1wEe448o5I2jDDafrYb4osvTYQUPellSejflqsSrkqgVPLfHrup4GOB6LR+EXFK91ecaW10mw042DCZRMypqw779nZtJxfQZvBGfawbvnp0vCSn+tZ2n8p6laqm7W9Tf02GeteHTi8PhuPbOYcWsaJO466w9bcwchpe5/mqf6obAmTaKOoJ9G9s3rRr7lpDeZfO79ofcedO7Ie9lHTsGpDHL+HMtC3epAkFavzOqDjAADAY53TRtRntT4L066Y5qn+XF30resaU3BmsntN+7AVbpwEalao6alPbqldqTYWXLcAr3Z/NVTDfCecvFifOfkZX/UatZZmLx4NvWZPC2MSBLu23FKGwtcgR8VlbS9zVc6rMfbS/kvxevfXffQomw6NO2Ban2nggYwdd+yw9Dz1EorGyKQlk4qW3XpnN6jqHHzSK3mUh9qVahe9D/TbjUK8n8Gr1774QbOjGnvxWJTPL++qnhU3rAAPZM9tVq9gbSAfBBG2AmDUhCwfsBzLBixDzYo1TV2t7eaCo8TLA2PHHeEmyvSD1ejq+N2PBw/kDFVsmbwyOLC+s1G1ETNvFqNdjxWbbrWP7fR3P5Xs97jmx9mWc4o74+bBu3abv8C69x+XnfDVSPn88sjPy0efdn2w8saVvtrxit21ygP9aQuA7KkZM3X+5+d9jlEXjMrY5rc9PWHk1Pxq3leB6/DC22e87am8NrBwG8TYS9nVN63GO2e8gwZVG6Bnm55ZHoNeGXPhmIwXX9kyZSPRXr8/8/2iZbvQG/pBWdCBjZkGtJALkZ+XnyXcmglbQYTLoNgJI1N6K3MJt6FeJvaa6MpT8uCG/hyMZvebnbHu5lomItME1Ukh0cKW0WuhXpV6tjeLlQG2FWG4HQPORuR64rzZNKymTIwjMz1ebSu+nPNl1ja7aQ1tOvbri7529E7SpkGdXtROD/iXur5kuz8IXjwiy+SViSXekJ73z3rfuZANxge52fVyaotTcUqLUzK2haFRsrINSzL777a/p/LalNxvV4Rjh6U3f6hVsRbO3z/9LDR6o3olzPhpYTDs9GEA1HMjiOcjAMswKGaCjJmwFaf2xY2zldNz6JbDb8G8a+a51pr6sQEFMk1d7jnmHgw/zzpgtoaWsSEsWtRqEVpdQMKFrabVm3oq79X75vHOj9vuP2mvk1zVE5UHRFTGflY3vN2D4LoO12Vts4uX4hQuwIypvafi+N2PdxSStP0dG9vbFTi9yOtVruetgx6wE1yTxC2Hq/jCTpGbnTBql/SC8KLrF2HljStNDZPD0Hj48VzyS9m8suh9UO/A9Xh1PNEcgYLalhXeVYjCu5SN3E+X/mRqIhDkP3nztDd9H+sVt4L6QQ0OQsGdBZF5AVv1g0CJEraO291+JoAHMmpWTJtN3Hz4zVllbjvyNkfzjDDQn6c7j77TVZthaLj1jL8s3HzCiRa29LgxRPcSFLJS2UqoX6U+dt5pHd/pix5fZG3rc3D2lI8XzZYXnGyS/GI1urN7EJi9KId0GWJZXi/4unW1btugracH/bG7279knQyE3WhE3E59anzR44vIchtGwXG7H4cXTn0hy5DXK8Y4adp569W2FxpXa+xJc1chv4KrQKoaudQWFxQWZGiB/OJ1uuOwJodhztX+crJpXH/o9SCionvs8KaH+zIRMGPcpeNwaotTcdGBF4VSnxvsnj9G8vPyI7tOrLwbtXhzenIhbN12RHZ0/Gs7XOtZefFQp4ew4Lp08u0xF46xDIrqloFHDwx0vB1OMbaW9s8MZ3FZG3vbx7qVg9uC6ikWwtaNh93oqpyXC/n5U54HoG4UM48YqzAFL3R5IevFEdSbyoqo5p+ND50ZV87Ajjt22J4/s1HOufvZRxn+5fJf8M1F3+Cz8z7D7Ufe7q+zNjgZuB/W5DDb/W4Mq72O8r2kt4mLri27AlAvoH3q7oM+7fr4SnOjxxjWhIjQqnYrPHC8dcR8M4afNxzLByzH1N5Ti7advNfJAMwDgVoF9owSq3xvXnCaXjF7Ye5Za0/P7dx11F1Fy+0atvN8vFuOaHoERl4wMrL6zbCayVg2YBkAZYwehY2s0X7ITrNl3JcLYctskH7nUdb5WO3Q7rkxF44JZXrYi7A1tfdUTwK1k7ZYs9GtV7keVtywAi93e9l13WGQeH9nL1oCLy9G/Uh+822bkXdP5k0w66rsAHxDuwx1XX8YBIlQ7YV96+7reO78jGg6NE7bLd133H2ejzdDbwtkFBrNHii/XP4L9q+3Pyo/kNZQvXPGO2havamreDut67bO8Hqy4rDGh2HC4gmeDJjj4u0z3samHZtC9cwye4n81e8vz/V0bdW1aLnngT3xxvQ38ODxD2LPmnviyZOeRP69mY+sMCM8uyWMF6ZTv8NwHACAmw6/Cae0OAVly5QNlAPSir1q7YUPz/4w9Hrd0L1Vd9Pt9avUxyEND8FBDQ7KidZTr736sseXOOntk4q2G4OS5kLY6nFADyzesBi3fJtOQayfHvRC5XKVA2vqBx0zCAO/V0KWl3d02wZti7J6uMGN0iPOWYdiodlyi5cLWa8+JyJ8ft7nmNp7Kq5qfxUub3t5UQym6X2mF6lSrUb/T5/8dJERZphEnZRYI5d5wtxyduuzTbfr7Q6MRthmUaQ7NO6QNW3apWUXHN70cFf9+Oaib1yV+/CcDz2n4oiLquWreha0wkrG6oXXur+GFTeswIH1D8QzpzwTSYBfP4QVhPXdM98NpR47KperjA6NO3gWtNwayb946ouBp6D9Yvfcmvi/iXihyws56YfeKFsvRBMoS2j28o4K4qHct33fouXxl42P1VasT7s+AMyDvoaJnbAVZ9YOjVIrbBnp1qob2jZoi2dPeTZDvXhAvQOKppuspOwD6h2ACw+40HfbQjZmEbKBbO1V4V2FeLnry+h5YE/XoyAvKTDcavQaVm2IK9pd4bre4obV/xEmRm8pIgo9Ea8VVxzs/r/zEvDRDjsHnKOaHRVKG355svOTrsp5sa0rKdiZd2RotinYOymIh7L+uRV3DDnN0cvJQD8oVtPkPJAts3ZYeWjqbdXCQoQtF5TPL48uLbvggHoHRFJ/HMy7Zp5nN+h7jrknot5kY+V9cmjjQzPWiQiXH3Q5Xj/tdVf1PnrCo6Fr8qIMIZEUeh3UK/I23EybR/Vy9zJlH9RzU8MuEOcxzY8JpQ2/NK/RvCjXoh31qkTn0euVM/Y5IyftGN8z+hkPvQZ4Z+FOT05bURF3sM8K+RXwz9X/4NQWp0beDqCc2DTGXTrO9pjL2l5mOrUYhYBaooStqMijPIw4f0Ro9ZllQjcjaEwYO3avuTuObna0p+kh/YvOa4qGpDDgsAGBjn+t+2sAMo3zcyGIlHRqVazlyk5JH27EKR2TF+J6IZ3T+pyM9bqV6ibCm5WIcMKeJ5ju05whtty2JdRn1J4198TH57jLOWk28MtVjs1z9s38z6xy3m4p2ILPZ3/uq40wBZO4hS1A2fblwlyFB3LR9PFLXV/CEU2P8FzHkU2PjCTVVfz/QkQU3Jnc7N/XdLjGMZfjhls2RD5q/OTcTzypS/XGpmEFhPWCU2T5XHB4k8MjH6GVNto3bI/Dm7izodMPDk5ucXJofYjrhWQMvZD0HI1Vy1XFCXsoISxs54TK5Sq7jpl2dYers7Y9e8qzmHHljFD7ZIbRNtROC7J662pfbdx9zN2+jjPDShgsyfBA9jQIHnPhGJy818lYc9MafHvxt5H0qcQKW/l5+Ym2o3LK5Rg0nokbKuRX8GS/pDeEjUOzZZY42i36pLhe0QTjcmXKoUXtFhh5wcjEGGuXBH7p9Qs+O+8zV2Vf7f5q0XKYoVHiErZa1W6Vse5W650rerXNfGFtuHUDnj75aUy4bEJobWhhdnYV7nLtNWc2BVujQo1YvIHtngVx20sBwZ6bpYUT9zwRo3uMRs2KNSPzYC1xwtaMK2cUqbZzFTqhtKCPDRQ0LlOu0ZLi+qFh1YY4u/XZmHfNvKJtSddAFCfyKM+1sNOgSoOil3OY/4EWm+jxEx8PdXrSCWPMrrDswcLCynYsLCcBAJh3rbqvtGvgo7M/si2vj8EWB8Y4Vnbx/vwOypKc40/wR4kTtvbbbT9svm0zAKDznp1j7o0QhAmXTcDvfX6PuxsAgA/O/iBjHl8L81CSnCaKA2XLlMXC6xdiSu8poU6PaJpkBuOSNpcAUPHnosboQZU0IV6foy4qtN+sea3ZRe7+8ZIfPcVeigKj7ZGtsOVTs+U1nZOQfEqcsKUnifGjrPj03E/j7oIrurbsGij+ixc6NumI/et5S9ibK3ocoLSmUToxCNYc1OCgSO7vQi5E3cp1UXBnAf7o+0fo9RsxCutJE7Y6NO6ArbdvBQBcc8g1kbXz77X/Fk0lH9n0yKz9PJDBAxlHNsveFwf6mHq/LPnFspyWrcErYSdBFuKnRAtbADKmfjSc7KVyxa670rn59H2KK0igG4afPzxQ/JeSxPxr54fqpSrET80KymZIE3qWD1geaXvG6dMk2gJWyK+ATbduwmOdH4usjeY1mhc9V4gIU3tPjcxQOQzO2++8ouUdu6xTAukzIjgxtffUIm2q34jvQnIJJGwRUS0i+pqI/kl9m14hRLSLiKalPsODtOkVs3hN75zxTi67YIn+Qaupm3se2BO//u/XuLqUWPbfLXkarmY1mpUqwfPcfe1zYRZ3Fly3AJe2vTRjm9Ej+KHjHwq1zdqVauPX//2KPWt6z3uYSyqXq5xTrVvbBm2LgmAaM0UkAbdCsZc4W20btM0KKxEEt56dQm4Iqtm6BcC3zNwCwLepdTO2MnOb1MddHogQ0WuQgGRNLw4/bzi6tOyCgxochD1r7onbj7w9cVMJcbOk/xJ8f8n3cXej1FPcnCK80rR6U1NDfX3sp5uPyE7IHpR2DdvhjdPeAJC8acQkcEijQ+LuQhZu/ye3jh9aZoyS4GErmBP03+gO4I3U8hsATgtYXyTkUR7+vfbfuLthStdWXTHi/BEgIsy5Zg5a1Ja5eiMNqzaMJTefkIkWX6m0oWkvbz/y9sjaOLzp4Zhz9ZwMj19BJZIfe/HYuLuRhd4o3k6ocSPw7FlzT9x6xK0AwjWMv+3I23Ka9UOwJ6iwVY+Zl6WWlwOwisJZgYgmE9EvRHRawDZ9oXepTkIKBUEobuQqMXrS0PIUNq7WONJ29qyV7KnEOOjQuEMi7dj0QlTVctYxEc1mUZ45+ZmM9Q6NOxQFiLVLpuyV43Y/DncefadzQSEnOOpCiegbAPVNdmUM85iZichKLG/GzEuIaA8AY4loBjPPNWmrN4DeANC0aVPHzvslSdOIglBccJNOp6QyqdckCfMhFKF/h9hl+jDTbE1ZNiWzLt3gPynOW0L4OApbzNzJah8RrSCiBsy8jIgaAPjPoo4lqe95RPQ9gLYAsoQtZh4KYCgAtGvXLrJAI+XLlN6XhiD4xcpQ+bI2l+W4J7mnfaP2cXdBSCh24V/MZlFWbs4MnaMX3Hof3DtUI3khOQSdRhwOoGdquSeArKybRFSTiMqnlusAOBzAnwHbDYQxkKAgCM5Y2Z+InZFQmrEbvJvdM8ZQEY+dmA6pkUd5Yp9aQgkqbD0E4AQi+gdAp9Q6iKgdEb2cKrMPgMlENB3AdwAeYuZYhK3Cuwox48oZMo0oCD65/7j7M9YPrHcgjt1dXMyF0oudl64bYWu3yruF3icheQTyM2bm1QCON9k+GUCv1PIEAIkIkkREsSQqFYSSwm1H3obbx6bNNaf1mRZfZwQhAdgJW2YD+wZVG0TZHSGhSCAOQRB8IdMdguAgbJnYbGkeh3/3+zsrqbVQchFhSxAEXzx10lNxd0EQYscobHVo1KFo2UyztbNwJwCgRe0WmNl3ZrSdExKDCFuCIPgizGjXglBcMYZEcQpk2rRadGGNhOQiwpYgCJ7Q0qd03qtzzD0RhPgxaracHLAe6vQQlg1YZltGKHmIsCUIgieGdBkCQLyoBAHIDv1wwG72wW/L55dH/SpmccKFkowIW4IgeCLqtDWCUJwwCltRJCsXij8ibAmC4Ik6leqAB4q9liAAwO1HZSYo15JUW2VcEEonImwJgiAIgk9qVqiZsd6oWiMAQPuGkuJJSBMoqKkgCIIglGbMvA/nXzsfVctXjaE3QlIRYUsQBEEQfGLmfdisRrMYeiIkGZlGFARBEASfOMXVEgRAhC1BEARB8MxDxz8EQIQtwR1ylQiCIAiCR6qUqwLAPP+hIBgRYUsQBEEQPKJptJwixgsCIMKWIAiCIHhGpg8FL8jVIgiCIAgeEY2W4AURtgRBEATBI8c0PwZHNj0y7m4IxQQRtgRBEATBIy1rt8SPl/4YdzeEYoIIW4IgCIIgCBEiwpYgCIIgCEKEiLAlCIIgCIIQISJsCYIgCIIgREggYYuIziaimURUSETtbMqdRESziWgOEd0SpE1BEARBEITiRFDN1h8AzgBg6ZJBRGUAPAfgZACtAZxPRK0DtisIgiAIglAsyA9yMDPPAhyDux0CYA4zz0uVfQ9AdwB/BmlbEARBEAShOJALm61GABbp1hentgmCIAiCIJR4HDVbRPQNgPomu25n5s/D7AwR9QbQGwCaNm0aZtWCIAiCIAix4ChsMXOngG0sAdBEt944tc2sraEAhgJAu3btOGC7giAIgiAIsRPIZsslvwJoQUS7QwlZ5wG4wOmgKVOmbCKi2VF3rphQB8CquDuRAOQ8KOQ8KOQ8KOQ8pJFzoZDzoMj1eWhmtSOQsEVEpwN4BkBdAKOIaBozdyaihgBeZuZTmHknEfUDMAZAGQCvMvNMF9XPZmbLcBKlCSKaLOdCzoOGnAeFnAeFnIc0ci4Uch4USToPQb0RPwXwqcn2pQBO0a2PBjA6SFuCIAiCIAjFEYkgLwiCIAiCECFJFraGxt2BBCHnQiHnQSHnQSHnQSHnIY2cC4WcB0VizgMxi9OfIAiCIAhCVCRZsyUIgiAIglDsEWFLEARBEAQhQkTYEgRBEARBiBARtgRBEARBECJEhC1BKAEQ0YtEdKdu/UoiWkFEm4ioNhEdTkT/pNZPI6IviKhnBP2IpF4BIKIjo8yqQUTjiahtwDq6EtH7YfVJEEoK4o0oCAmDiM4DcD2A/QBsBvAvgDcAvMAublgiKgtgA4BDmXl6atu3AIYz81Mh9O8SAL2Y+YigdRVniGg+1Hn4JqL6GUALZp4TRf2GtroCuIqZTyKiFwFcmNpVDgAB2J5aHwfgSqhrcrOuirnMfGCqrj8AXMDMv0fdb0EoLohmSxASBBENAPAUgEcA1AdQD0AfAIdDvfjMjilj2FQPQAUA+rRYzQzrxQpSFKvnFRHlIvdsWPQBMAwAmLkPM1dh5ioAHgDwvrbOzCfrjqmh236gbvu7AHrnruuCkHyK1cNLEEoyRFQdwD0A+jLzR8y8kRW/MXMPZt6eKvc6Eb1ARKOJaDOAY1Pb7iOilgC0qaZ1RDSWiOYC2APAiNQ0Ynki+p6IeqXqu4SIfiKiR4loLRH9S0Qnm3TRzW8w1jueiJ4gonVENI+IDkttX0RE/9lNOabqup+IxgPYAmAPItqbiL4mojVENJuIztGVr0hEjxHRAiJan/pNFVP7uhHRzFQ/vieifXTHzSeiG4jo99Rx7xNRhdS+OkQ0MnXcGiIaR0R5RDQMQFPdOb2JiJoTERPR5US0EMBYIjqGiBYbftd8IuqUWi5DRLcR0Vwi2khEU4ioCRH9mCo+PVX/uca6iGif1G9Zl/pt3XT7Xiei54hoVKreiUS0p8V5LgfgOAA/uPmPXfA9gFNDqksQSgQibAlCcugIoDyAz12UvQDA/QCqAvhJ28jMfwPYN7Vag5mPY+Y9ASwE0DWlhdieVRvQAUpIqwNgMIBXiIh8/5LMen8HUBvAOwDeA9AewF5QU1XPElEVm+MvgtKSVAWwEsDXqXp2A3AegOeJqHWq7KMADgZwGIBaAG4CUJgSQN8FcB2AulB5WkekhAyNcwCcBGB3AAcAuCS1fQCAxanj6gG4DQAz80XIPKeDdXUdDWAfAJ1dnJ/+AM6HyiVbDcBlALYw81Gp/Qem6s+wg0pNFY8A8FXqXFwN4G0iaqUrdh6AQQBqApgDdb2Y0QJAITMvttjvlVkAmhNRtZDqE4RijwhbgpAc6gBYxcw7tQ1ENCGludhKREfpyn7OzOOZuZCZt4XQ9gJmfomZd0HZhzWAEi6C8i8zv5aq930ATQDcw8zbmfkrADugBC8rXmfmmalzchKA+an6djLzbwA+BnB2aorxMgDXMvMSZt7FzBNSguW5AEYx89fMXAAllFWEEso0nmbmpcy8BkqIaZPaXpA6F82YuYCZx7mwm7ubmTcz81YX56cXgDuYeXZKizmdmVe7OO5QAFUAPMTMO5h5LICRUIKbxqfMPCl17t7W/SYjNQBsdNGmkVWpa3MdEd2g267VVcNHnYJQIhFhSxCSw2oAdfS2Psx8GDPXSO3T36+LQm57ua7NLanFKqQ84DalPn5svlbolrem6jdus9Ns6X9nMwAddC/4dQB6QNm21YGyU5trUkdDAAu0FWYuTNXbSFdmuW55i65Pj0Bphb5KTYPeYtNXsz470cSiz040BLAo9Vs0FsDdbzKyFkpz6JU6zFwj9XlUt12ra52POgWhRCLCliAkh5+hvL66uyibEzfilCZHM4Le1/mI8LugW14E4AfdC14z0L4SwCoA2wCY2SUthRLUAChjeyghZ4lj48pubgAz7wGgG4D+RHS8Sd+s+rwZQCVd22WgpiT1v8nUlsqBpQCaUKbTQFO4+E0mzFFdo0aOJd2xD5QGckNI9QlCsUeELUFICMy8DsrG5nkiOouIqqaMsdsAqBxr57IhIqqg/+SgzZEAWhLRRURUNvVpT0T7pDQ8rwJ4nIgapgzPOxJReQAfADiViI5P2ToNgBJqJzg1SERdiGivlIC2HsAuAJo2aQWU44EdfwOoQESnptq+A8ouT+NlAPcSUQtSHEBEtV3UPxFKW3VT6jwcA6ArlE2cJ5h5B4BvoGzNwuBoAF+EVJcglAhE2BKEBJEytO4PZdy9IvUZAuBmuBAOcshhUFOARR+KONQBM28EcCKU4fdSqGmyh5EWXm4AMAPArwDWpPblMfNsKGP8Z6A0YF2hDNt3uGi2BZQgsglK8/g8M3+X2vcggDtMbJb0fV4PoC+UULUEStOlN0R/HEoY/AoqNtorUPZkAHA3gDdS9Z+jO0YTkLoCODn1m54HcDEz/+XiN5kxBMoZIQzOT9UnCEIKCWoqCIIggFSIjX4pxwO/dXQFcBEzn+NYWBBKESJsCYIgCIIgRIhMIwqCIAiCIESICFuCIAiCIAgRIsKWIAiCIAhChIQibBHRq6TynP1hsZ+I6GkimkMq/9hBYbQrCIIgCIKQdMJy1X4dwLMA3rTYfzKUC3ULqFxpL6S+LalTpw43b948pO4JgiAIgiBEx5QpU1Yxc12zfaEIW8z8IxE1tynSHcCbqZxivxBRDSJqwMzLrA5o3rw5Jk+eHEb3BEEQBEEQIoWIFljty5XNViNk5gtbjMwcXgAAIupNRJOJaPLKlStz1DVBEARBEIToSJSBPDMPZeZ2zNyubl1TTZwgCIIgCEKxIlfC1hKoxK8ajeEvYaogCIIgCEKxIlfC1nAAF6e8Eg8FsN7OXksQBEEQigV//QVs3Bh3L4SEE4qBPBG9C+AYAHWIaDGAgQDKAgAzvwhgNIBTAMyBylR/aRjtCoIgCEKs7LOP+pbUd4INYXkjnu+wnwFcFUZbgiAIgiAIxYlEGcgLgiAIgiCUNETYEgRBEARBiBARtgRBEARBECJEhC1BEARBEIQIEWFLEARBEAQhQkTYEgRBEARBiBARtgRBEARBECJEhC1BEARBEIQIEWFLEARBEAQhQkTYEgRBEARBiBARtgRBEARBECJEhC1BEARB8MPOnf6PrVIFGD48vL4IiUaELUEQvFNYCNx7b9y9EIR4YfZ/7ObNQPfu4fVFSDQibAmC4J3Nm4G77gKI4u6JIMRHEGFL45dfgtchJB4RtgRB8E4YLxlBKO6EcR907Bi8DiHxiLAlCIJ3RNgShMz7YNeu+PohJB4RtgRB8I4IW4KQeR9s3RpfP4TEI8KWIAjeWbUqvSyCl1Ba0V/7hYXZ+3ftkvtDACDCliAIfrj77vSy2UumJFJYCKxdG3cvhCRhF/rhk0+A/HzgkUdy1x8hsYiwJQiCd/QCVkFBfP3IJa+8AtSqFXcvhCTx5JPpZaMG68wz1ffPPzvXs3lzaF0SkknpFLY++gj48su4eyEIxRe9sFVaNFuLF8fdAyFp6K8Jq+nChQud6/njj3D6IySWUIQtIjqJiGYT0RwiusVk/yVEtJKIpqU+vcJo1zdnnw2cfHKsXRCEYo1ewProo/j6kUvuuSfuHghJQy9gWQlbU6c613PoocCWLcCKFcBtt4XTNyFRBBa2iKgMgOcAnAygNYDziai1SdH3mblN6vNy0HYFQYiJN98EPvwwvT55sgQ3FUonboQtt2zfDrRsCTz4YLB6hEQShmbrEABzmHkeM+8A8B4AyUEgCCWVnj0z1595Jp5+xIV4lwkaYV4LH30EbNiglr/4Qmm7hBJDGMJWIwCLdOuLU9uMnElEvxPRR0TUJIR2BUEQcs/s2XH3QEgKVpqt9eu919W7d3r5qaeAiRP990tIHLkykB8BoDkzHwDgawBvmBUiot5ENJmIJq9cuTJHXRMEQfBAlJqtm24C+vSJrn4hXKyErXXr7I+zCxkByLR8CSQMYWsJAL2mqnFqWxHMvJqZt6dWXwZwsFlFzDyUmdsxc7u6desG79m6dcqepDixejXw999x90IQhGXLlB2NkU6dohO4nnkGGDIkmrqF8LEStvLz7Y9zijZfpoz/PgmJJAxh61cALYhodyIqB+A8AMP1BYiogW61G4BZIbTrzM03A+3bZ25zGlHETZ06QKtWcfdCcMu6dcC//8bdCyEKGjYEbrxRLeunhZYuBX75JZo2S0sYjZKCXsAaNSq97CQs7dhhv180WyWOwMIWM+8E0A/AGCgh6gNmnklE9xBRt1Sxa4hoJhFNB3ANgEuCtusKs1FpkvNXbdsWdw8Er1x4IbDHHnH3QoiKefPUd40amdsffjjcdv7+G7j1VueXsJAs9MKxPsSDk7DkpBkVJ4wSh4Ou0x3MPBrAaMO2u3TLtwK4NYy2PGF2wSZ1xNCtG3D88XH3QvDKmjVx9yC3lDZhYNQo80HQrl3htvPGG8BDD4VbZ0lmxw41VZcXc1xuK02kk7DkpMHUa8mEEkHJjiBfXEYHxxwDjBih4hcJ1mhaBiE+hg2z3nf55bnrRy4ZPjx7W9jC1o8/hltfSad8eaBNm7h7YS00OQlTuXw3XXABcOWVuWtPMKV0CVs7dypVvYYf99wo+OEH9a2f4nz8ceCss+LpT1LZc890HJokUFiYfBvAsLHTbL36au76kUvOPTd7W9jC1k8/hVtf2AwenJl8PFecdRbQyyLhyIwZue2LkVmzgPfeM9/n5E2fK2Fr507g3XeBF1/MTXuCJaVH2Jo7FyhbFnj22fS2SZNy3yc7Zun8BgYMAD7+OL6+JJW4hZsuXYB771XLdesCv/4ab39yTdznP1c4CVNhClvFYWr25puBQYOib2fjRmVrq2mxP/5YJQBPYkoou/RNTlHgcyVslcYQSsyJjMJfsoWtv/5KLz/9dPZ+J/fcXFAc5+Y//DAzXUsuWbo0nnY1Ro0C7kqZI5Y2ey2g5E/lHnkk8PLLztNA//wTXpu77x5eXcWZL78EqlVTtmt77pm57+yzgf/+i6dfVti9PzZvtj82V8JW2BrY4sCWLSq/JLMyAUiIh2/JFramTEkvmwlbSTCWj8qF3Au//urtQXbOOerz1FPR9cmI9nLLxehasObJJ+PuQbT89BPwv/85a5sWLgyvzbgHEEnhiy/U99q15vtPOkl9m3mZx4HROF+/7qQBFmErOl56SX3Pmwd07w5MmxZrdzRKtrClUa5c3D2w5r774u4BcMghwLXXej/uuutC74opa9akb5g4p7H0miyjoJ4Ewb20sXMn8MQT0dRdUBBNvUaKwxSinnPOAZ57Lpq6tQHxa6+pb6N2aP164JJLrIWxXGO8RvTPgKQYyCfFLjmXbNyovq++Wn07RfPPEaVD2MrVg9MrcXtLjh6dfkD4nVKNWkW7aBFQu7Z6yAPxjtS++iq+toVMLrwQuP12oH//aOqvWTOaeo0kRXBwy4cfAv36qeVPPonGJkhzgjn22Mzt8+apEBlOA8NcPSOMIUGSKGxdemlu2nHD7NnRCep6tPe9pik1m9Xavt38Ookw40zpELasiFPYYY4/Rsz06enlt95yd4xRsxR1Ul6jJ9j27Sp4oFGAXr7cOWDt+vXAH3/478s11/g/VrCHWcWac8vbbysPueKOVWquhNiZWHLvvcCZZ0anWQSsnU9++y29PGFC9v78/NxoM1atylzXP8+/+cb+2C1bwu+PGUmyc+vbNy2oR4nxHWU2G1K7NnDiiWmBDFBCfvv2QLNmSoMaMiVT2Fq/HujRw7mcUdjasSN33mVJmEv3o80yhl6I2t7k558z19esAQ4+OPumbdAAqFRJxSszMnu2GnXWqAHsv7//vjiN4pcvBxYv9l9/aaagQP13pe38WXlNJd3rU3MSCWv63MvzUO+cYJUqy8lAPQyMbRhTw9mhTZU6ceKJypPeLy1a+D82bHJ1b7vJLbp5MzB2LHDKKelt1aur74ULlQY1ZIqHsLVzpzfbhjFjgHfecS43dSqwRJczu1EjZb+0fTtQtar3fnrBrTr15JOj64Mxf5cbTZ/xoZhrg3VNzbtgQXqb/qFnph0xCtBGAc4NTiNVQAWnbZLKyf7II8kx5C0OrF6tvps0UZ5oWngNQAnYdiN0M+1Gcef+++PugaJXL3uBKixha/lyf8dZmYh8/bX/vrhhw4bM9DyAt/AUboXpr79W5h5+OfNM/8eGjZUWN2yMXuJ+Pf4POCB4X3QUD2HrjDOA5s3dly9b1l25G28EWrdWy8xptfDq1cCmTebHELkfldjh1v7nyy+Dt+WW+fOdy1idFzv++cffQ9nuQaEX+pxs8owPtsMO896XE05wLqNNqc6eDdx0EzBzpvd2ck2YU+kXXuj/2NtuSy/Pmwd8/316/aijMkfoxj6PHOm/3aRyzz3AaafF3QsV48qOsIRCv0KbVSiSFStUtgPt+R42mhG2niiELSDYlGMupu28Eseszvr1Suiyus70SheNGTPSg8AQKB7C1ogRwLJl7suXL+++7IYN6uGtj0bcqJH6vuUWZTuxYYP6k7Q/6rLLgM8+c9+GGVHbOrnB+NJyY0N2++2Z68bRnRl+7aQ++cR6n17T4dRvM4GiUyegYkV//XJCi7SddA/FHj3U9GtYfPqp+bYBA5yPNU5Pjx0LdO2qBK1Zs9T+5cuV3ZzRC9aooS1O2Nlmff55uG3de6967ui1wknB772i14ACwLffpuu7+GJ17URh/2ZngjFnTua62SyJF8eIpD9H3KB/BscRiqFGDRWQ2gqj/Z1GiMJ68RC2NMwSwprh1RZp4EDgwAOztz/8sDIc1+Zy9Zx+eqa6csMGb1NqQebhw8L4EHLz0jLaLbmxjbjoIvUdplfo77+nl43TSMaRk9k007ffquspxJFLEZr2L24HCCfeeUdpADZtCkfDZXYtXHyxSj3lhNmU68iRwLhx6eu0QQPgmWeyvYvi9uoNQq4M4WvUUHZWe++tZgnctOtW+5ILDzO3dOqkvg89NL0t197oRjtWs+tTMzeIEuMMjF/PUc1044cflOD377/queGFcePSy+3auT9uwwaVqSNMhwfj+6FPH+s8myE6GCT8bWDA7ZTaHXc4l9ELZI89Zl3O7gIdPz69/OmnSqPhxlbMK2Yq6zB45pnM9a+/dvbo82OHpL2Ey5VLjzzDZMyYzHV9MFtARQS34n//C78/2rTWww+HX3cUVK2qAgFa5XnT8PPg0QTPHTvsNSpBXoj33x+Oy3YcQpsX26JNm9SzymtOwF27suMtrVqVflb1729uM9W1q7v6+/VTz6gHHlAvYy2+kRfCDoGhN4n47Tfn55pX7KbCjOYDZtdVixZA5cru2vJzXW7YoGZg9Oy2m/d6mJXhf48eyiYVAPbYA6hf35vGzSj4uf0/7rlHXat6YS0oxvPpxqA+nHY5kZ+DDz6Yi1CnR32GDmVH9OWDfjp3tt7XsWO6zd6909udmDzZWx/WrnWu0w927a1Z4/4YO9atc1/+jjvU/v/9j/nVV53Pi0bDhpnbP/jA3e8EmO+91/k8FRb6v36SjFl/v/jCuvyECd5/r7Z92DD1PWqUed3HHRfsPj3//ODnY9eu3P+3Xtr46CN/bT//fHa9Dz6ovseNU9/XXcc8aBDztGnqmE2bgv0fPXowL1gQ3nlwe46sylx0kbdz5sTChdZ9MdteWOj/9z7wgPf+Pf64eV3LltkfN2AA81dfMW/dqtZ37AjnHjAe98gj3o/z25bxs2RJZPc5gMnM5jKN6cYkfCyFLTc/PsiN6/Xz5JPMZ5/N/OKLav3OO537d+aZ3tqYNMm5Tj/4uci8XIxeL16v576wkPmvv7K3v/WW+3rLlXM+T1u3+r8+koqVYHHdddbHfPCB99/r9pwcemiw+7BGjeDn5L//vLVpFOr94NTGhg3psq+95u+6atkyu95991XfDz+svvPz1fcVV6hj3nor2P/htZ9B2tAEA6d6wuTff63bMNu+dKn/33v//d77d/vt5nU995z9cVq5a69V6599Zt83q8GTVb3a5/DDvR+3erW/tnJ1DTOznbBVvKYRNYiUG+no0dkedLmOyPz66yqqsjYtWa+e8zFevUtef91rr3JLXCEOBg9WNihGvIQJcVM2V79v7Vp/3p5+sPpNdrkPa9WKpCsAgucIDcOmw+v0nN30dFjo7VT1zw19MEYnzFzutamum29W35p91pAhKszJs89662ecuJ2CDtPQ3GuoiiBt+7Hrs/ISveoqd8dr3nlOHrF+41LqTXDcUru2u3JhOxSEZFdZPIUtAGjVCjj1VGD33dPbli6N9oVghvbHasbQboyivQpbixZ5Kx8VEyeab/dqLBkWt9xivj3s6MxB8te58aLV7D+aNVOJU3OBnb1Is2bm253Og9lx+qCB+m233mpfVxx4tRvzKpz5QW+krreZMzuvYXHCCcGFX43HHlPeyJo3dxRu//pz5GRntmZNOF6eHTsGr8MtzOHWF2bA3PXr1f8aJDNH2IR9vkIyki8ewtZRRzmXWbIkHbIhl2ipIypUUN9uHiZeDd7NoqLnAv0IoXr1TA8fPWG5qIcVV0gfWyaMUUlBQfr/9YrVCJhZ9XPnTqUVnTJFXRf66NhRYvdAWrjQfL+TMFKpUvY2swf7F18ADz0U/os36EPW67Wydm30gRr158/o7Vwcou3fcENm1garwXDLlv7b0F+XTtqP2rXVcyYqDfL115tvD+IAEvZ98v77zmXc3gtauqYgmTm89CGONFYhaXmLh7DlJrxAFF6AXtDSA7nxxInKuzAKNK8RYxwkPWaqZD9Tb1GEYXDzkNOHkQDUSOb00zPr8BvLyUr7tmGDcpnXAvBqQVOTosU0+/+czuWsWdkCj90xWlR+q5QrXgkaSd7rCHbbNqVhj/IFYPeitQofMGmS+zA5ucbqOeI3nlGlSuoZ9eGHat2tYBLEu81OqLeahu/b1397YWfpsIqNqP9dfp5DXk14nP4rs8j5XqbPwyKkeyn5whZztiu/GV4CmUaBl1F1ruxywsAoiJgxbFj2NqvfWKWKdT1haia1CP1upkOMbvGTJ2cGrd261b8dgNU0onGqM9e2hk7ok/1qbN/uLHQahSu7l5pmPxKWhsZuQOCGPn38Hec3wLGb6WmnZ4XZAKVDBxXbLqnposx+k99zSKRewOeco9bdCr5BpmH9CNe5zHLgFLTWKu6dPiwPkffsJV5NeJymM82Cn950k3O9buymvRCSWUryhS03gsmDDwLXXht9X9zi9BD18+eFPQ/tFrexdow89ZT59vr1rY8JU0PQubP6totCr2GMfP7oo5nrgwb5F5CtjmvY0PqYJGg+DzssM2jvnDlKe+s0Gt2+Xf2PmrBr90CdOzdb0A2C3/x6Gn4F6oED/R13443OZe68U03FWWGVIumjj8wHQUkgzLyzmzcDV16pljduzE0qGL/PqcsuU/3z+iw3y/dqh9/gpb16pZcnTYo2Ly+QncNQz9NPZwuFRMB55znXG/YMSVh24FZuil4+AE4CMBvAHAC3mOwvD+D91P6JAJo71VkU+mH58vBdOaP+6N18CwuZZ80K7pq6aZMnF1RHvMSOctPnRYtUuf797cvVqWPdpy5dwv0fypTJjH/m1r3XuK1WLf99uP56899qd0yPHv7/VzesX++u73o38auvdndMv37MI0emz1/9+s7HjBnjru68PG//o1eCXGt+6NTJud6OHdPLlSqZl9mxI13nvHmZ+yZNCveeCuszY0Z4597vZ9Uqf//bzz8Ha1eLOef206mTt/6dfba/6/XAA4OfU2PYHWbrd80ll1j/Brs2tm2z//1RXCsuQZRxtgCUATAXwB4AygGYDqC1oUxfAC+mls8D8L5TvUXC1tNPR3PyovwMGpQ++w88oLY9/TTzs88yf/65vzpHjsz+Z7ds8RbwtKCA+aqr1MXvFKxO/3nnHXfldu50V27dOtWff//NjCEWNNZSGDfTmjXpbW4eWl5+r8bmze77EwWHHeau382apY/x87uXLHEnqJoF3fT7ueQS83vFDUHaNQatjLo9q/aN2zt0CLedsD4HHaSeR8zqORZXPzZv9v6/XXNN7vt55ZXM48erjxPXX+9cX5TXo5Ft26yvAWbm1q3V+i+/uOtLtWr2vz+K8z91qvN5Z+aoha2OAMbo1m8FcKuhzBgAHVPL+QBWASC7eouErbhuwqCflSvVjRF2vQ8/nA6gCrgLyqmhRaDesoX5nnviP0faRwvc2KZNfH14+GHmjRvDC+Zo/HTsyPzrr+p3uhnZLl/u/n/1ipWGxOzzxhvqmLivEa+fgw5SAW+Z1UPcjTAURrudOjEffzzze++pwYeRadNUX4JkJTD7XHCBqt+NJjFJn0MPZX755fjav/RS7/dPlSrxnzft07SpGtRs3qyi5K9apaLAOx3HrAIbh339A8yffqruvcJC5rZtvR27fr23vtx+u3pua7zySnTnuk4d9U7XfpvpI8Ra2CK13z9EdBaAk5i5V2r9IgAdmLmfrswfqTKLU+tzU2UsUm0D7Q46iCefd1466J4deXnxuIQmjXPOUXmsVq9WSXtr1lQ2M+XKqf1W9h1C7snPdx/v5sorVeiNQw5Rdg47dqg4WWvXqsfAwoUqWWuTJspRZNcuVXdenmpn0yZVtmxZtb9PH2X/EPDeL1Y0bAj07KkM/LdsUa7q2mO0QgXlUWsX0FUo2fTrpzwiidS9RqRsCuvUUffb1q1AtWqqrGYjJghnnqmcvhYsALp2Ra0BA+asYW5hVjRRwhYR9QbQGwCa1q598IIoQgEIgiAIgiCETDsAk5lNPW3C8EZcAkAf8KVxaptpGSLKB1AdQJYkxcxDmbkdM7er27x5+JnaSyqNG6sYQwsXqlG7lSL0zDPj7qk1b78ddw/ShJ3uQU+nTu7KlS8PfPwx8OOPKjPCxo3p/3HnTqXJ3bVLfTOnv+0+2nGVK3v7jR06+PutSeLDD9U53LjR/FyFFCU6C7N0UkKyuPNOFerE7h4qLFTxlkqTNjguHnss7h64Y9AglT1l2zYlq+zahSmAdZwqq/lFtx8oG6x5AHZH2kB+X0OZq5BpIP+BU73F3mYr7HlwgJlIeRutX898wgnMt9zC3LOnuV2BGZoN2Y4dzD/+GP5vrlPH33GabctBB8X3f2k2Ul98EX7de+/NvHhx2ovmn3+cj9GMh6PAS9814/64/he/n19+SScodmu8HqS9ww5Thslz5zL//TfzihXRtmf8fPhh8fyfpk5lnjAhvvbfecf7/dO4cbznTLPZ7dFDPUs0tETNTzzhXEeU16Nmd8WsPDfPP9/9sVaOHmafxYuz7SKnTYvuvH/9teOlgSgN5FX9OAXA31Beibentt0DoFtquQKAD6FCP0wCsIdTnUXC1u+/5+4iDuvz/vvps//TT2rbP/+oC8NvKIs//3T8o10xcaL6dus5CDDPmeNcpksX7Wqz/+gN+gsKMoWKI44I/79w642lsWtXettvvzHn5zNXreq//fbtsw1R3XiCRsm997rre//+6WP8/HZm5tq1ncu59XZ185k2TT2E/RCk3Vy3p//UqJGuc7/9MvfdeGN47YT56ds3mnPh5dOnj7//7ZFHctvPOnWYZ85UgzWjZ7MZPXr4u1732iuc/hopKDAvd+aZav/Ikcynn54pONnVf9999r8/iv9g5Urn887MkQtbUXyKhK2VK3NzQYf50UYYYV4M2ig9TLzcPE5ltDhg06bZe8M0bGjdnzPOCPd/+PBD5TLt9jcaz4uGG4HB6nPGGd7P/Vdf+f9P3eDWE+7dd9PHvP22u2O++EKF9NDOX926zsf88IO7uom8/Y9eCXKt+eGss5zrbd9efZ98svXLUI8xjMJ//4V7T4X1McZKCqPOChW8lTcOgtyyZIn/Pj71lDvNtv7TrZu3/rkJTWGG35kJ/efff7PrtXre3H+/9W/w2ne3x/r9uMRO2Ep+BHm79C4aScknp1Gzpv1+P0mN/SZCDoqWBsOJypXV94EHqgjsWgRxI5pHjxlansAwOPxw4KyzzJMj6ylXLjuRtjGf4d13++/HjBnm2884w/qYI47w354b3NhrtWkDnHtuel2LJu107FFHAc2bp/OZOkWzZlZek25gtt8/Zoy7eqzwe49ddpm/49xE6D7hBGDmTJUnbs6c7P1GD8qKFdPL33yjvFSTxv77R5NebcsW4Pzz3Zd3e90Z2W03f8etXAlccw2w117p56Ubli711s4113grr3Hwwf6O09O8efY2q2eGXdaXv/4Cbrste7uW4ssOu+wcfnDTpguSL2y5eQA2bpwsozqnF5LfmzUO7rrL33EHHWS+/a+/rI8JM3nue++pb7tUJ4By687Pz9x27LGZ6yed5E7oN8PqofrCC9bH6F+YcfH555nXcc2awLvvOgs8msCsCblt2liX1f6bXKRYcYNVYmcnrrjC33FOQlqVKqqMXZLm//3PfPvbbwPHH++vX1HjJt+qWypXTg+WiPwnjPeCHyGtfn0VRkJDG4y4YfJkb21Vr+6tvMazz6aXzz47OqcRDbvBdatW5s9cq8GrnqTlmU2RfGELyBxhW+H3ZRgW2oujY0fnsk7aliSx777OZXr2zN7mJ59UmAm6GzdW37VrO5c99NDM9eOPB77/Pr1evrz/OG5WWirtem3QQH1rI/KkXBtav/Ro8drsMD5A7a6DCy5Q31aCuVeaNg12/DPPeD+mSRMV/ywKNm3KHggYMbteduxIn9viQt++/o7bvBlo1055hQHuBaEgyc/9CFvXX++/Pa9aQL1QZ8Zzz5lv32uv9PJ//3nXijoNxIw4zWScdlr2ttdfd6437CgGIT2Ti4ewZTUlpefSS6Pvhx0HHqi+e/RwLms3lZYk9AlQ33vPWui9+OLsbVGGT3CiVav0stPLCsgWCMqUAY4+Or1etqz/zO9XX22+vVIl5W6+dKmaetM0s04Pylxh9iB0ejjut5+3Y9q2Vd9hJSYOGmbBj4Zt3rxgbTphp6mxGgCEOR0fNsYpeo2hQ/3XWbZserbArSDkZhDmh6+/Nt8eJKD0rbf6P9YMO02phtfnkNuQNnqc3hFm97PZNGXUBB3EpSgewpadWvCww9R32bJKsnZrYxQW2gujoEB9u7nZvczXA9ZTBVGjt2U691xg8GDzci1bhtPet9+GU8+NN6aXwxD6gry8zDREGtoU2w8/qHJnnQVcdJH/trxgJQQCwH33mW930mz98Uf2NjMh9dNPge++s68rDtwI5no6dvR+jFf09T/1VOa+OAc0bpk9Wz2Xb7lFaXcefNC8nNtsCmbo70+ngblm8hzUBtZoaqBhJXQEmd70a1tmxTHHOJdxe20NHKjewZ99FqRH7vsQxzV/+eWhVFM8hC0jTz2lVOXLlwNjx2buGzYst33RVKfaw8LNQ8OrsOVkcJ8rrCR8bcou18yYYT6FE7bdhpvpMyu8aG0+/NBa0AkbOxvH22833+7nPIwbl73ttNPcPfBzjVdbuVzYXuqFLf35d3I80NOokfuyzZurgWP37u6PsUMbiD34IPD44+HUaUR/v9sZk5uZO/jlpZe8lfc6xaYnDgFDE/Cc7vm8PGD8eO/vtCgJ2/kiJOe04ilsXXONGs3Uq5d9YoO8GP3w+uvAiBFK+APMXy5GvD6kg8z3+8Vq5OYFJ+N0O/RTgVbst1+mbZWGl5vNzQM4Lk/QKLHSyBi1J3oWLgzertlUIxBc+OraNdjxQLbtnhOPPBK8TSf0wrp+2cs0j1lZTVv+wQfq+9RT1ffpp6trY8AAb/2MEzdCcosW7ux93OJVoxlEAPCj2bLK8/nLL+6O1zR0TgbpXrw/9WgzUl5waze7fbv9fjfTqHpCEnaLn7Bl580WBXYCw7XXKlutLl2AadPUNjf2ZV4T3ubSO22ffdQozKgx9MMjj2SP6OrVsy7/669qnv6nn4CRI921UbGi8hbU40Xg3n135zJRTxXFgfEBoo1MTzjB+hg3zh9W/Pyz+raabgh6juvXD3a8nz60MM03Gy76l7TdvWPHa69lbzv8cPWtufsfcYSaBr7nnvS6F84+O3N96lRvTiVBtNFu/reQ3PeLsOuvcTBdpkz27ISXa83Py95KGN9nH/vjpk1TBuaaML7nnvbl/d4Dbgcqmn1zo0bhafh++y2cerxiFYAr7k9RUFN9kDK3UdRvu805SFnlyunlY46xLvfYY9b7xoxJt6mleZk1y10fvQRU27LFXZ1e6dUrsx196gcrjjrKf5C5Qw5hXrrUff/cBprr3z9z+/z57usxRrL22xf958AD1fcrr7j/rXGg9bdCBZXexinFzMaN7v8TYxtOnHSSt3Os/5x+OvOGDf7OgVV/Qwx0GKg9PYWF7u5RN+1s3pz+vxcsyE57wsz82Wfuz8X27elsH1Oneu/f4sX+/3+z3/ntt+llLeBymCxdat2X0aMzt1Wpkn38668zly3r7vc99JD3/pkFEq1Xz99vBZgnTVLZNADmPfdU99yVV7qvY+jQ7OvFDc8+q8prmU/c9tfuYxXRPoT7HMU6qKkeJ6lcQ+9JZoU+zskNN1iP2g85xHp6Qa8F6NwZ+PvvaBLPRqXZMvbVjarbz5SaFodo4kR7g3G/GN3GjbZldvZJftz9ndC0ZWEECcwFW7eqZNNO09t+wqtoXrpOuHFC2GMP8+377hueR2OusdMkGiHKdM/3gtEWsFKl9P/dtKm5psat3dbcuUqbvP/+6vWkOQ15ISwjcO3Zv3FjelsU3pl2/TXaL5lpZCZMSDtVOeFHo0OUbb/mNTiqBjPQvj2wapWyu/vnH+Djj4Hnn3dfx1FHpZfPO8/97EPfvsCXX6rwHmFh/O+GDw+vbrtmc9JKULxebJr9lFtOPRV4//3s7XPmKHX6+PHZHgmrV2f2i8ibSrVzZ299jALjRefG9d0olNoFrdRwEynbK3qbMqPK3Hi9mDkYzJihvOLC9vQB0sEx/cbmyhUjRihvorAwExxGj3YXlNFMiJ8yRXlqakEa585V0dSN0wBJP892RHH9mXH77cD8+epcMYdbt5UQ7IWw+vTRR+r777/T26IQtuz6a7RLNHt/5SKQr3FgG/Raq15d2Q8TeX8n621wvQxwidS7Msz7xFhX165KgDQjxEgAxUPYevJJbwabTgZyerRYUvro0ZqnjzZfnZcHvPyyusFefBFYssRf0E49SbQDcqNBu/LKzHU32o5jjwUGDfLeHy0KvBn68+f0sjW7UffbzzxoXhg8/LD6jiIlSZh06RIsFZERs+B/DRu60/Dtv3/m+nHHqWCnRx0FnHkmcMopanvr1tnaspIqbIVhN6mnWbPwPdvCilflV9gyale0gdeuXcqh47vvovHmsxOWjO+GDRuyy+Qq1mIS0zVFFePMDmYVQspMqQJY9ylI/DcDxUPYuuYalW/PLW7dUIcPT4+EAOCOO9R37drWISSuuCKc3EtmeZ/M+Oab4G1ZYXzAuTHANU43uDkX1ar5S/tjlzlA3w8ngc8o2K5f770v69Y5lznnHOVlU7GiCnbp1euluGPMMekFbTB19NFqykt/f7zyCjBqVHrd+PK86Sb/7SaVr78OxyM4KFOnZjug6OnTJ5x2/HqRWz3r27dXA+ioQowYp9zLlweeftr98V4G20FC/1gFkY2TXIey0ATOGjWs43CaneN33gm1G8VD2PLKiSeaJ201UqVKpor53ntVnCyiYBF/3dChg7tyUeY38xNI0PiQ0ATUXHHkker79NPT27T/sFw581QN3burfmoBQ/2MKqtXd34hvP++8qQE3Hk5Cmm0czt2rJry8nLdJyUOnR+sNORu7E5zQdu2wBdfWO8PS6voVwNjJbS4te/1S9mymXZE27d7E6Dcxib88kvzDB1uscvBmmvCCn7txBlnZK63b+/uOP3/V7eu/7AWFpRMYSsvz9llFTCXsHORyFRrJ6qcam7Rq8Ldvtxq1Mhc95v01C3GkVmVKkoj17t35nZm9cAzs/2pUkUJ0m+8ESxStZvpsOIQ1TuJ5Oer8BC5smFKCvffb749V88hvyxbpr5zEdjVyHHHpZe1EBZ6WrYMbubhBuOzb+5c98e6NWHo3DmYOcK2bf6PDZtcBTJ246TWqxcwZEhmcGDNYeGuu+zjDfqklD3ZEsbEifG2f/bZQL9+atlvDJ+Q8kZZYrT1CmLsShTsJZar6O6lFS9BRX/9VRnPR0WupvCs7CSTLnTWr68yeNilfQrKnXeab9cHxGzWLHv/7Nm5CURs1KjqzTKcvExzNSjz4u0aNU89ZW2IHibGZ7zeHlvjpZfUgN3oXMWs3jkha7WA0iJsjR8fdw+siXO6YK+90p4hZg+tJFCunLoBPvxQrcfpWKAfmenzLwLhGpoLzrRrp4zn//03mvo//TSaeo3EoRkKwq+/ppNv16sXjQZOc4bQAqxqtG6tXtZW6aRyjVGg00+pOgnLuRK2cm3mYUeFCv5Dl3hB0wRq14kWeihmSoewZZUaIGwXaD94jdQcBZs3q2k2r2gP3VyghY+IKw8jkPkANSblDjOEguCe5s2jqTfpnqRx0a5ddPaImoG5VQidQw5RL+ukTLEaBaokCltJOVe5RAszoTnd+IkPGAElW9jSqwLNYnskQdi69NK4e6Bc9r3clEuWqE8ujcA1ryO3XpxCNESVTDgpvPEG8PvvzhpULXdcGDz4YHh1FWeOP14JWh06ZD+PunRR4XeAaOJm+cEu/IOTM02uhK0khhiKGs1AvmZNFXYjF2m1XFCyhS39DXv55dkariRciHpDfrO55STSsGE44S/8kOtE40Z++SVtCPvEE/H2JQ6S8qKLiosvVjG/cmkzlUT3/Dho3VoFIz3//GxHlqeeSp6WxugspMfJQStXwlZp1NDm5aXNCxKUWaJkC1v6C7piRWW7pSXEBZIxhadHHzNmyhTrpL2lmVwm5TajQ4d0xOzrrvNm1F0SSLrhdlg4/c6kvfijZsYMlaQ414werbIGhBGlPmwGD84cwOtnSq67zv7YXN1HVgmpSzpRmRcEoGQ/Oc1GD/pI1Ul5cXz/vfrWuxIfdJD73GSlBebceBl5obSFerB76YUd8TwpLF6cvS1sYcuYmSFp7Lef+zyXYXLyyebBgXv08JabLwoqVbJ+qTvNmuTyufHRR8DIkblrTzAl0DwaEdUC8D6A5gDmAziHmdealNsFYEZqdSEzdwvSbiCSYKdl5OijVfTtBg3ij70lCHbYRRNPQsTzKKhfP3tb2MJWnI4fxZG33oq7BwqrAbuTMJVLYevMM3PXlmBJUNXOLQC+ZeYWAL5NrZuxlZnbpD7xCVpJpls3pc0SihdHHSUvypJMt25KsDImjg87mG///sD06eHWKURPVMJWly7++iMklqDCVncAb6SW3wBwWsD6wsUsZUNC3EBNKVMmO1O7kGweeghYtCjuXghR0KoV0LWrWv7yy8x9XvLguaFCBeCAA0qnQXNxRi806b3fnWZQkmLCIuSMoP94PWZO5W3AcgBWYcgrENFkIvqFiE4L2KZ7brrJPFdeklmyxN6lWBCE3PDXXyqth5GhQ6PLxygv4eKFXtjSB+x0SgvmJFSHlXNSSAyONltE9A0AE6MFZITxZWYmIitxvhkzLyGiPQCMJaIZzJyVSIqIegPoDQBNw0gDk5eXPINqJ4hKn9G1IBQnovRi/uILleNTKH7on9tOA2Ynr2otT59QYnAUtpjZMnofEa0gogbMvIyIGgD4z6KOJanveUT0PYC2ALKELWYeCmAoALRr1y6BluyCIJR6oozdE2f6LsE7egFLv+wUh9BJg5mEGJBCqATVWQ8H0DO13BPA58YCRFSTiMqnlusAOBzAnwHbFQRBiAdxiBA0rIQtP8F/J05ML7/1lvJQF0oMQYWthwCcQET/AOiUWgcRtSOiVG4F7ANgMhFNB/AdgIeYWYQtQSiuLFwI1KqVXr/11nQeMkEoTVgJW35o0QK46iq1XKuW8oQVSgyBdJXMvBrA8SbbJwPolVqeAGB/YxlBEIopTZqoPHYffqjWu3YFOnaMt0+CEAdh2dcSKU/5p54C+vQJp04hUZRO15dly4A1a+LuhSAUX/Q2J23axNaNnHLXXXH3QEga+vvASvA68UTneqZMUVOPZcqoaP1CiaN0Clv160fnui0IpQH9S6a05Ak87jgVe0sQNDrp/MeshC39lLsVpWXAUoopncKWIAjB0IdmKS2eU0cfrWJvCYLGqaeml43C1tyUw/3llzvXI+F+SjwibAmC4J0bb0wvSyBOobRiZyC/xx4qknwny+hJQilCnpKCIHintGizBMEOvYAl94RggwhbgiB4R6Y9BCHzPihu2UqEnCLCliAIgiD4Icw4W0KJRoQtQRC8o3kgXn99vP0QhDgJQ8Baty54HULikUlmQRC8U7my8rbaY4+4eyIIxZejjwaqV4+7F0IOEGFLEAR/iKAllHaCeOIOHAj06BFeX4REI8KWIAiCIPghSEDfu+8OrRtC8hGbLUEQBEEQhAgRYUsQBEEQBCFCRNgSBEEQBEGIEBG2BEEQBEEQIkSELUEQBEEQhAgRYUsQBEEQBCFCRNgSBEEQBEGIEBG2BEEQBEEQIkSELUEQBEEQhAgRYUsQBEEQBCFCRNgSBEEQBEGIkEDCFhGdTUQziaiQiNrZlDuJiGYT0RwiuiVIm4IgCIKQKHbfPe4eCAknqGbrDwBnAPjRqgARlQHwHICTAbQGcD4RtQ7YriAIgiDEDzMwb17cvRASTn6Qg5l5FgAQkV2xQwDMYeZ5qbLvAegO4M8gbQuCIAiCIBQHcmGz1QjAIt364tS2LIioNxFNJqLJK1euzEHXBEEQBEEQosVRs0VE3wCob7Lrdmb+PMzOMPNQAEMBoF27dhxm3YIgCIIgCHHgKGwxc6eAbSwB0ES33ji1TRAEQRAEocQTyGbLJb8CaEFEu0MJWecBuMDpoClTpmwiotlRd66YUAfAqrg7kQDkPCjkPCjkPCjkPKSRc6GQ86DI9XloZrUjkLBFRKcDeAZAXQCjiGgaM3cmooYAXmbmU5h5JxH1AzAGQBkArzLzTBfVz2Zmy3ASpQkimiznQs6DhpwHhZwHhZyHNHIuFHIeFEk6D0G9ET8F8KnJ9qUATtGtjwYwOkhbgiAIgiAIxRGJIC8IgiAIghAhSRa2hsbdgQQh50Ih50Eh50Eh50Eh5yGNnAuFnAdFYs4DMUuEBUEQBEEQhKhIsmZLEARBEASh2JNIYUsSVyuI6FUi+o+I/oi7L3FBRE2I6Dsi+jOV9PzauPsUF0RUgYgmEdH01LkYFHef4oSIyhDRb0Q0Mu6+xAURzSeiGUQ0jYgmx92fuCCiGkT0ERH9RUSziKhj3H2KAyJqlboWtM8GIrou7n7FARFdn3pO/kFE7xJRhVj7k7RpxFTi6r8BnACV2udXAOczc6nLpUhERwHYBOBNZt4v7v7EARE1ANCAmacSUVUAUwCcVkqvBwJQmZk3EVFZAD8BuJaZf4m5a7FARP0BtANQjZm7xN2fOCCi+QDaMXOpjqlERG8AGMfMLxNROQCVmHldzN2KldS7dAmADsy8IO7+5BIiagT1fGzNzFuJ6AMAo5n59bj6lETNVlHiambeAUBLXF3qYOYfAayJux9xwszLmHlqankjgFmwyK1Z0mHFptRq2dQnWaOlHEFEjQGcCuDluPsixAsRVQdwFIBXAICZd5R2QSvF8QDmljZBS0c+gIpElA+gEoClcXYmicKW68TVQumCiJoDaAtgYsxdiY3U1Nk0AP/h/+3dP6hWdRzH8fenbCgbipIwGpQGhwpKQVIbRCuSwiEcDGpoqSGCpqCWXBoKiSCoJQtBM0oNIsgUkmhq0IxLVEuRKZpOhRF2634bzs/+jnL6PT3P+wUXDnf63Ds8z+ec3+/8vnCoqmb1f/Ei8CSw0DlHbwUcTHIkySO9w3SyHDgLvN6WlV9Nsrh3qAmwFdjTO0QPVXUS2A4cB04BP1TVwZ6ZJrFsSf+S5EpgH/BEVf3YO08vVfVbVd3KMGN0dZKZW15Och9wpqqO9M4yAe6oqpXAJuCxtvVg1iwCVgKvVNVtwE/AzO71BWhLqZuBt3tn6SHJ1QwrYsuB64HFSR7smWkSy5aDq/U3bX/SPmB3Ve3vnWcStGWSw8A9naP0sA7Y3PYrvQlsSLKrb6Q+2h08VXWGYZrH6r6JujgBnPjLU969DOVrlm0CjlbV972DdHIn8E1Vna2qeWA/sLZnoEksW38Mrm7tfCvwbudM6qRtCt8BfFFVL/TO01OSJUmuateXM7xE8mXXUB1U1VNVdUNVLWP4fPiwqrretfaQZHF7aYS2bHY3MHNvLlfVaeC7JCvarzYCM/cCzT88wIwuITbHgduTXNG+QzYy7Pft5qJmI47hIgZXT50ke4D1wLVJTgDPVNWOvqn+c+uAh4C5tlcJ4Ok2b3PWLAV2treMLgHeqqqZPfZAXAe8M3yXsAh4o6oO9I3UzePA7naD/jXwcOc83bTifRfwaO8svVTVJ0n2AkeBX4FP6Xya/MQd/SBJkjRNJnEZUZIkaWpYtiRJkkZk2ZIkSRqRZUuSJGlEli1JkqQRWbYk/a8luSbJsfZzOsnJdn0uycu980mSRz9ImhpJtgHnqmp77yySdIFPtiRNpSTrk7zXrrcl2Znk4yTfJrk/yfNJ5pIcaCOhSLIqyUdtsPMHSZb2/SskTQPLlqRZcSOwgWFA7y7gcFXdAvwM3NsK10vAlqpaBbwGPNsrrKTpMXHjeiRpJO9X1XySOYZRYBdG28wBy4AVwM3AoTYC51LgVIeckqaMZUvSrDgPUFULSebrzw2rCwyfhQE+r6o1vQJKmk4uI0rS4CtgSZI1AEkuS3JT50ySpoBlS5KAqvoF2AI8l+Qz4BiwtmsoSVPBox8kSZJG5JMtSZKkEVm2JEmSRmTZkiRJGpFlS5IkaUSWLUmSpBFZtiRJkkZk2ZIkSRqRZUuSJGlEvwOEzYNVNfVvYwAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "_, (ax1, ax2, ax3) = plt.subplots(3, 1, figsize=(10, 8), sharey=True, sharex=True)\n", + "librosa.display.waveplot(gt_wav, sr=config[\"sampling_rate\"], color=\"b\", ax=ax1)\n", + "ax1.set_title(\"Ground truth\")\n", + "ax1.set_xlabel(\"\")\n", + "librosa.display.waveplot(inv_wav_lb, sr=config[\"sampling_rate\"], color=\"g\", ax=ax2)\n", + "ax2.set_title(\"Griffin-Lim reconstruction (librosa)\")\n", + "ax2.set_xlabel(\"\")\n", + "librosa.display.waveplot(\n", + " inv_wav_tf[0].numpy(), sr=config[\"sampling_rate\"], color=\"r\", ax=ax3\n", + ")\n", + "ax3.set_title(\"Griffin-Lim reconstruction (TF)\");" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Iteration time: 46.1579s, output shape: (10, 206336)\n", + "Iteration time: 49.9080s, output shape: (10, 218624)\n", + "Iteration time: 49.4804s, output shape: (10, 219648)\n", + "Iteration time: 51.8307s, output shape: (10, 221952)\n", + "Iteration time: 48.4572s, output shape: (10, 210944)\n" + ] + } + ], + "source": [ + "def gen():\n", + " file_list = glob.glob(\"../dump/train/norm-feats/*-norm-feats.npy\")\n", + " for file in file_list:\n", + " yield np.load(file)\n", + "\n", + "\n", + "mel_ds = tf.data.Dataset.from_generator(\n", + " gen, (tf.float32), tf.TensorShape([None, config[\"num_mels\"]])\n", + ").padded_batch(10)\n", + "\n", + "for mel_batch in mel_ds.take(5):\n", + " start_batch = time.perf_counter()\n", + " inv_wav_tf_batch = griffin_lim_tf(mel_batch)\n", + " print(\n", + " f\"Iteration time: {time.perf_counter() - start_batch:.4f}s, output shape: {inv_wav_tf_batch.shape}\"\n", + " )" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Saving outputs with both implementations." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[0m\u001b[00;36m0.wav\u001b[0m\r\n", + "\u001b[00;36m1.wav\u001b[0m\r\n", + "\u001b[00;36m2.wav\u001b[0m\r\n", + "\u001b[00;36m3.wav\u001b[0m\r\n", + "\u001b[00;36m4.wav\u001b[0m\r\n", + "\u001b[00;36m5.wav\u001b[0m\r\n", + "\u001b[00;36m6.wav\u001b[0m\r\n", + "\u001b[00;36m7.wav\u001b[0m\r\n", + "\u001b[00;36m8.wav\u001b[0m\r\n", + "\u001b[00;36m9.wav\u001b[0m\r\n", + "\u001b[00;36mlb.wav\u001b[0m\r\n", + "\u001b[00;36mtf.wav\u001b[0m\r\n" + ] + } + ], + "source": [ + "# Single file\n", + "griffin_lim_lb(mel_spec, stats_path, config, output_dir=tempfile.gettempdir(), wav_name=\"lb\")\n", + "griffin_lim_tf.save_wav(inv_wav_tf, output_dir=tempfile.gettempdir(), wav_name=\"tf\")\n", + "\n", + "# Batch files\n", + "griffin_lim_tf.save_wav(inv_wav_tf_batch, tempfile.gettempdir(), [x for x in range(10)])\n", + "\n", + "%ls {tempfile.gettempdir()} | grep '.wav'" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.7" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/TensorFlowTTS/notebooks/multiband_melgan_inference.ipynb b/TensorFlowTTS/notebooks/multiband_melgan_inference.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..d9eabe1ffc85b9887743674a37d3b38418935ee1 --- /dev/null +++ b/TensorFlowTTS/notebooks/multiband_melgan_inference.ipynb @@ -0,0 +1,116 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import yaml\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "\n", + "import tensorflow as tf\n", + "\n", + "from tensorflow_tts.inference import AutoConfig\n", + "from tensorflow_tts.inference import TFAutoModel" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "mb_melgan = TFAutoModel.from_pretrained(\"tensorspeech/tts-mb_melgan-ljspeech-en\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Save to Pb" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "tf.saved_model.save(mb_melgan, \"./mb_melgan\", signatures=mb_melgan.inference)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Load and Inference" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "mb_melgan = tf.saved_model.load(\"./mb_melgan\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "mels = np.load(\"../dump/valid/norm-feats/LJ001-0009-norm-feats.npy\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "audios = mb_melgan.inference(mels[None, ...])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "plt.plot(audios[0, :, 0])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.7" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/TensorFlowTTS/notebooks/prepare_synpaflex.ipynb b/TensorFlowTTS/notebooks/prepare_synpaflex.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..494ac4271ac24ad3c1b38711119c78b8e8112714 --- /dev/null +++ b/TensorFlowTTS/notebooks/prepare_synpaflex.ipynb @@ -0,0 +1,111 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "import numpy as np\n", + "import soundfile as sf\n", + "from pathlib import Path\n", + "from shutil import copyfile\n", + "from tqdm import tqdm\n", + "\n", + "input_dataset_path = \"[your_local_path]/synpaflex-corpus/v0.1/\"\n", + "reorganized_dataset_path = \"../synpaflex/\"\n", + "\n", + "maximal_duration = 12 # maximal audio file duration in seconds\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "wav_dir = os.path.join(reorganized_dataset_path, \"wavs/\")\n", + "os.makedirs(wav_dir, exist_ok=True)\n", + "data = []\n", + "total_duration = 0\n", + "\n", + "# Precomputing walk_count for tqdm\n", + "walk_count = 0\n", + "for subdir, dirs, files in os.walk(input_dataset_path):\n", + " walk_count += 1\n", + "\n", + "# walk through dataset\n", + "for subdir, dirs, files in tqdm(os.walk(input_dataset_path), total=walk_count, bar_format='Data Reorganization : {l_bar}{bar}|'):\n", + " for filename in files:\n", + " filepath = os.path.join(subdir, filename)\n", + "\n", + " # read wav files\n", + " if filepath.endswith(\".wav\"):\n", + " try:\n", + " wav, sr = sf.read(filepath)\n", + " duration = len(wav) / sr\n", + " \n", + " # Only keep files with shorter durations than maximal_duration\n", + " if duration <= maximal_duration:\n", + " total_duration += duration\n", + " path = Path(filepath)\n", + " current_path = Path(path.parent.absolute())\n", + " \n", + " # find corresponding text file\n", + " txt_file_path = os.path.join(current_path, \"txt\", filename.replace('.wav','.txt'))\n", + " if not os.path.exists(txt_file_path):\n", + " parent_path = Path(current_path.parent.absolute())\n", + " txt_file_path = os.path.join(parent_path, \"txt\", filename.replace('.wav', '.txt'))\n", + " if not os.path.exists(txt_file_path):\n", + " break\n", + " norm_text_file_path = txt_file_path.replace(\".txt\", \"_norm.txt\")\n", + " text = open(txt_file_path, \"r\").read()\n", + " if os.path.exists(norm_text_file_path):\n", + " norm_text = open(norm_text_file_path, 'r').read()\n", + " else : \n", + " norm_text = text\n", + " \n", + " # ignore file if text contains digits, otherwise copy wav file and keep metadata to memory \n", + " if not any(chr.isdigit() for chr in text):\n", + " data_line = filename.replace(\".wav\", \"\") + '|' + text + '|' + norm_text\n", + " data.append(data_line)\n", + " copyfile(filepath, os.path.join(wav_dir, filename))\n", + "\n", + " except RuntimeError:\n", + " print(filepath + \" not recognized and ignored.\") \n", + "\n", + "# save metadata\n", + "with open(os.path.join(reorganized_dataset_path, \"synpaflex.txt\"), 'w') as f:\n", + " for item in data:\n", + " f.write(\"%s\\n\" % item)\n", + "\n", + "# display reorganized dataset total duration\n", + "duration_hours = total_duration / 3600\n", + "print(\"total duration = \" + str(f\"{duration_hours:.2f}\") + \" hours\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.5" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/TensorFlowTTS/notebooks/tacotron2_inference.ipynb b/TensorFlowTTS/notebooks/tacotron2_inference.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..966d2c3cfec94199a9286ffceac6484f114789f8 --- /dev/null +++ b/TensorFlowTTS/notebooks/tacotron2_inference.ipynb @@ -0,0 +1,405 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import yaml\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "\n", + "import tensorflow as tf\n", + "\n", + "from tensorflow_tts.inference import AutoConfig\n", + "from tensorflow_tts.inference import TFAutoModel\n", + "from tensorflow_tts.inference import AutoProcessor\n", + "\n", + "import IPython.display as ipd" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "6561ecce84784cdba2af430c4cbfe8d9", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=3568.0, style=ProgressStyle(description…" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "processor = AutoProcessor.from_pretrained(\"tensorspeech/tts-tacotron2-ljspeech-en\")" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "input_text = \"i love you so much.\"\n", + "input_ids = processor.text_to_sequence(input_text)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "fa2e575f610a408c8c124fd360b29183", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=127975304.0, style=ProgressStyle(descri…" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "d37329f88e8442258f58d9ba57d09eaf", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=4027.0, style=ProgressStyle(description…" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "tacotron2 = TFAutoModel.from_pretrained(\"tensorspeech/tts-tacotron2-ljspeech-en\")" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "tacotron2.setup_window(win_front=6, win_back=6)\n", + "tacotron2.setup_maximum_iterations(3000)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Save to Pb" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "WARNING:tensorflow:Skipping full serialization of Keras layer , because it is not built.\n", + "WARNING:tensorflow:From /home/lap13548/anaconda3/envs/tensorflow-tts/lib/python3.7/site-packages/tensorflow/python/training/tracking/tracking.py:111: Model.state_updates (from tensorflow.python.keras.engine.training) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "This property should not be used in TensorFlow 2.0, as updates are applied automatically.\n", + "WARNING:tensorflow:From /home/lap13548/anaconda3/envs/tensorflow-tts/lib/python3.7/site-packages/tensorflow/python/training/tracking/tracking.py:111: Layer.updates (from tensorflow.python.keras.engine.base_layer) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "This property should not be used in TensorFlow 2.0, as updates are applied automatically.\n", + "INFO:tensorflow:Assets written to: ./test_saved/assets\n" + ] + } + ], + "source": [ + "# save model into pb and do inference. Note that signatures should be a tf.function with input_signatures.\n", + "tf.saved_model.save(tacotron2, \"./test_saved\", signatures=tacotron2.inference)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Load and Inference" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._2_layer_call_and_return_conditional_losses_10690) with ops with custom gradients. Will likely fail if a gradient is requested.\n", + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._1_layer_call_and_return_conditional_losses_31114) with ops with custom gradients. Will likely fail if a gradient is requested.\n", + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._3_layer_call_and_return_conditional_losses_31344) with ops with custom gradients. Will likely fail if a gradient is requested.\n", + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._3_layer_call_and_return_conditional_losses_17482) with ops with custom gradients. Will likely fail if a gradient is requested.\n", + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._2_layer_call_and_return_conditional_losses_31229) with ops with custom gradients. Will likely fail if a gradient is requested.\n", + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._3_layer_call_and_return_conditional_losses_10896) with ops with custom gradients. Will likely fail if a gradient is requested.\n", + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._1_layer_call_and_return_conditional_losses_17070) with ops with custom gradients. Will likely fail if a gradient is requested.\n", + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._2_layer_call_and_return_conditional_losses_30654) with ops with custom gradients. Will likely fail if a gradient is requested.\n", + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._1_layer_call_and_return_conditional_losses_30539) with ops with custom gradients. Will likely fail if a gradient is requested.\n", + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._0_layer_call_and_return_conditional_losses_30999) with ops with custom gradients. Will likely fail if a gradient is requested.\n", + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._4_layer_call_and_return_conditional_losses_17688) with ops with custom gradients. Will likely fail if a gradient is requested.\n", + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._3_layer_call_and_return_conditional_losses_30769) with ops with custom gradients. Will likely fail if a gradient is requested.\n", + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._2_layer_call_and_return_conditional_losses_17276) with ops with custom gradients. Will likely fail if a gradient is requested.\n", + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._4_layer_call_and_return_conditional_losses_30884) with ops with custom gradients. Will likely fail if a gradient is requested.\n", + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._4_layer_call_and_return_conditional_losses_31459) with ops with custom gradients. Will likely fail if a gradient is requested.\n", + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._4_layer_call_and_return_conditional_losses_11102) with ops with custom gradients. Will likely fail if a gradient is requested.\n", + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._0_layer_call_and_return_conditional_losses_10278) with ops with custom gradients. Will likely fail if a gradient is requested.\n", + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._0_layer_call_and_return_conditional_losses_16864) with ops with custom gradients. Will likely fail if a gradient is requested.\n", + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._0_layer_call_and_return_conditional_losses_30424) with ops with custom gradients. Will likely fail if a gradient is requested.\n", + "WARNING:tensorflow:Importing a function (__inference_batch_norm_._1_layer_call_and_return_conditional_losses_10484) with ops with custom gradients. Will likely fail if a gradient is requested.\n" + ] + } + ], + "source": [ + "tacotron2 = tf.saved_model.load(\"./test_saved\")" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "input_text = \"Unless you work on a ship, it's unlikely that you use the word boatswain in everyday conversation, so it's understandably a tricky one. The word - which refers to a petty officer in charge of hull maintenance is not pronounced boats-wain Rather, it's bo-sun to reflect the salty pronunciation of sailors, as The Free Dictionary explains.\"\n", + "input_ids = processor.text_to_sequence(input_text)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "decoder_output, mel_outputs, stop_token_prediction, alignment_history = tacotron2.inference(\n", + " tf.expand_dims(tf.convert_to_tensor(input_ids, dtype=tf.int32), 0),\n", + " tf.convert_to_tensor([len(input_ids)], tf.int32),\n", + " tf.convert_to_tensor([0], dtype=tf.int32)\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAhQAAAGoCAYAAAAemnx2AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAABJvklEQVR4nO3dd5xcddn//9c1M1uyyW6ym94TQgqhQwhNiiCC/BRERbGCDVGwF7CLt95y337tghgVQQQRbwtRkapgoSQBEiA9pPee3c3WmXP9/pizZBK2zGZnd+bMvp+Px3nszOeUuU5mM3vNp5q7IyIiItITsXwHICIiItGnhEJERER6TAmFiIiI9JgSChEREekxJRQiIiLSY0ooREREpMeUUIjkmJndbmbfCB+fZWbL8x2TiEhvU0IhcpjM7DEz22NmZR0d4+7/cvfpfRlXLpjZWjN7TV+fKyLRpYRC5DCY2STgLMCBS/IbjYhI/imhEDk87wGeAm4HruzoIDM718w2Zjw/ycyeM7M6M/udmf02o3nkXDPbaGafNrPtZrbFzN6bce7tZnaLmf3NzOrN7D9mNsrMvh/WlCwzsxMzjh9jZr83sx1mtsbMPpax72tmdq+Z/SqMZbGZzQr33QlMAP4cvs7n2rmvYWb2FzPba2a7zexfZhbr6FwzO83MngiPX2Rm52Zc6zEz+5aZzTOzfWZ2n5nVhPvKzezXZrYrPHe+mY3s3lslIn1BCYXI4XkPcFe4XZjNHzkzKwX+SDoJqQF+A1x2yGGjgMHAWOD9wM1mVp2x/63Al4BhQDPwJPBs+Pz/gO+GrxUD/gwsCq91PvAJM7sw41qXAPcAQ4C5wI8B3P3dwHrgDe4+yN3/t53b+TSwERgOjAS+kD71leea2Vjgr8A3wvv+DPB7Mxuecb33AO8DxgBJ4Idh+ZXhv8d4YChwDdDYTjwikmdKKES6ycxeBUwE7nX3Z4CXgHdkceppQAL4obu3uvsfgHmHHNMKfD3cfz9QD2T2wfijuz/j7k2kk5Mmd/+Vu6eA3wJtNRSnAMPd/evu3uLuq4GfAVdkXOvf7n5/eO6dwPHZ/yvQCowGJoax/ss7XhjoXcD94WsF7v4wsAC4OOOYO939RXffD3wZeKuZxcPXGQoc6e6p8N5ruxGniPQRJRQi3Xcl8JC77wyf300nzR4ZxgCbDvnDu+GQY3a5ezLjeQMwKOP5tozHje08bzt2IjAmbCbYa2Z7SdciZNakbD3kdcrNLJHFfQB8G1gFPGRmq83shk6OnQhcfkgsryKdkLTJ/HdYB5SQrnW5E3gQuMfMNpvZ/5pZSZYxikgfyvbDQ0QAMxtAutkhbmZtf5DLgCFmdry7L+rk9C3AWDOzjKRiPOkajlzbAKxx96mHeX6nyxC7ex3pZo9Pm9nRwD/MbL67P9rOuRtI10B8sJNLjs94PIF0zcTOsPbkRuDGsCPs/cBy4BfduRkR6X2qoRDpnjcCKWAmcEK4HQX8i3Q/gM48GZ57nZklzOxSYHYvxTkPqDWz681sgJnFzewYMzsly/O3AUd0tNPMXm9mR5qZAbWk7yvVwbm/Bt5gZheGcZSHHVDHZRzzLjObaWYVwNeB/3P3lJm92syODZs/akknGilEpOAooRDpniuBX7r7enff2raR7tD4zs6aDNy9BXgT6c6We0n3LfgL6c6VORV+s38D6YRnDbAT+DnpDo7Z+BbwpbCJ4jPt7J8KPEK6j8eTwC3u/lh757r7BuBS0k0uO0jXWHyWgz9/7iTdWXUrUA60jUgZRbqzaS2wFHicdIIiIgXGOu5HJSK9zcyeBm5191/mO5Z8MbPHgF+7+8/zHYuIHD7VUIj0ITM7J5w7ImFmVwLHAQ/kOy4RkZ5Sp0yRvjUduJf0aIyXgLe4+5b8hiQi0nNq8hAREZEeU5OHiIiI9FikmzxKrczLGZjvMEREpMhYWRnJQQmSlc7EQbtY+WLTTncf3vWZh+/CVw/0XbtzMyr6meebH3T3i3JysSxFOqEoZyCn2vn5DkN6wgzU7CYieRQrL4dYDG9NT1JrpSXsuexYWiqNqrVJBi7cwEp+vK6349i1O8W8Byfk5Frx0SuH5eRC3RDphEKKgJIJEckzdyc2cCAtx05g9TuN8g2lpEqd0U+kqJi/mlRdfd/EAQQEffJavUEJhYiI9Eux8nKIx2k6ayZ1YxNMumolAx6YRtOIgBm37MSamvHqwbB3X75DjQQlFCIi0v+YsfX9J4HBvmkpEvuh4Q2tjN/7BJZIkArStaex8jI8meziYrnipFw1FCIiItFgRsNls9l7dBIbmGTKz53S1dtIhjURmQlE0NDQZ2Glmzyi2wyshEJERPqNWGUlNreS0SWrqLq8hNTuPVgiQaD+XD2mhEJERKRAqFOmiIhILvTCUPJYRQU2dhSr3z2SlgktjP9OnNRf5r2835ub06+bZ46TinBNiRIKEREpHLn+g2qGlZWx70fQuswZPL+Min8semU9QIH8IVcfChERkQJiJaXYzClM+flqhpZu59+fOoLpzyzFW5MEjU3pg2JxCHIzM6UooRARkWJiRmLCOHadNZY33/AwD370HEo37SOxaiGpQ5OHAksmHEiphkJERCT/4lMmMeO363h6Rwl/P3UUJaklpJqa8h1W1tTkISIikmdWUkrNr/aw7PIJDFyzRkNB+5gSChERiT4zxv6rlC3vG0GwYW3BdLLsDgeN8hAREckXKytj5U0n0vLVJGUrn8dbW/Id0mGL7iwUEMt3ACIiIocrPmQwVY8OoqTOKHt0YaSTiahTDYWIiERSfGgNaz88gxHfamXyP18k6LNFvHqH45Ee5aEaChERiZxYRQVXPfkMyQqn7G/PEtTX5zuknnNI5WjLB9VQiIhIJMSrq2F4DV5aQsnNe7nj7NM40l965fwSEZVebTS6VEMhIiIFLz5sKOs+fBRbLhjJW373GPu+PYHktu2ktm3Pd2gSUg2FiIhIQTBS5H+RssOlGgoRESlo8WFDOemR7bhB/QTnd8dNoPz+ZyI510RnHAg8N1s+qIZCREQKxyHLl9dffiqp9+7imatGMrl2M15bR0pDQwuSEgoRESkcbcmEGXbCTBrfvZfKn9QQLJxH0A9WB41yk4cSChERKRyxOImxo9lx3ngGvmszo69qJajdnB79UOTJRHq1USUUIiIiPRYfPpQV144nNnk/NTcMIrl5SdH1lehM4EooREREeiQ2cCArPnsEEx5oYcDi7SS3bM13SNINSihERCT/zFj2vZkMeT5GvDnol8mEmjxERER6KFZWxpBFJYy6fRHe3BzhFS0On2OkIjybQ3QjFxGRotF89jGMvnclnkziEV/kq79SQiEiInkVq6xk3cUJUjt34s3N+Q4nrwK3nGz5oCYPERHpe+EEVlZSyqovHcPYf6T61WiO9kS9D4VqKEREJC8skWDfW04i3mQMmDs/Y0d0/6j2Z6qhEBGRPpcYM5pdr55A8+V7mfy2tQT9vHYizUh5dL/nK6EQEREpAA4EEW44UEIhIiJ9xhIJsBhLvzCeitF1jH/HBoJDO2L249qKKPehUEIhIiJ9IlZZSayqEmIxhj8dY+ifNpBqbOrXCUQx6bW6FTMrN7N5ZrbIzBab2Y1heY2ZPWxmK8Of1RnnfN7MVpnZcjO7sLdiExGRvtd0xnSaZoxmyVdHMezv60nV7wcPXnlgP+2U6Z7uQ5GLLR96s4aiGTjP3evNrAT4t5n9DXgT8Ki732RmNwA3ANeb2UzgCuBoYAzwiJlNc/fiXl5ORKQfiB89nZ3XNDCovJmhfxhBctPm9A7VThwkUJPHK7m7A/Xh05Jwc+BS4Nyw/A7gMeD6sPwed28G1pjZKmA28GRvxSgiIr3MjOaLZ3HGN57GLptEsGMXQcPqzhMJJRmR1Kt9KMwsDjwDHAnc7O5Pm9lId98C4O5bzGxEePhY4KmM0zeGZYde82rgaoByKnozfBEROVyxOPGqQSz99jSOn7aehReNIti1WdNqdyI9sVV0R3n0auTunnL3E4BxwGwzO6aTw9ur53lFmuruc9x9lrvPKqEsR5GKiEjOmJE89wTGP9RC+cYSWt7QSGr7DiUTXVIfii65+14zewy4CNhmZqPD2onRwPbwsI3A+IzTxgGb+yI+ERHpOTvlWNZf7ySeqOK4tyxh9WdnMOGfT5JSE0ZWoj4PRW+O8hhuZkPCxwOA1wDLgLnAleFhVwL3hY/nAleYWZmZTQamAvN6Kz4REckdKyvDb9pD6eNVNMxqYNdrWog9/pz6Q/QjvVlDMRq4I+xHEQPudfe/mNmTwL1m9n5gPXA5gLsvNrN7gSVAErhWIzxERHogXICrV1+ipJTYkMGc8egGnrhiIiOXPsnIm2MEgT6+D0cqTyuF5kJvjvJ4HjixnfJdwPkdnPNN4Ju9FZP0kT74EBORLPTF/0MP2HDlVP7zjqH4ilXp19R3wcPimDplihxEyYRIvxEbNJD6I5Owaq06XfZzmnpbREQOS6y8nPEPtZA67RmC9ma8lG4LtNqoiIiI9ITmoRARkX4nMW4sYx5LsP7qye2vxyH9jhIKERHpltbXzuIdjz7Fxo9NJnh+edgRU32nesoxUp6bLR/U5CEiIp2yRAJPpbB4HIAdx5Vy16nHYPuX4xoemlNRnthKCYWIiHQqPnY0DUeNYveMEsou2EH8QUjtq8USJfkOrai4k7dps3NBCYWIiLQvFsdPPYa15w6k6uxt7N9TSenvhzH67mcJ3PGUaifkACUUIiLSrhVzTuStJy9g/dwzqP5owOC1L+KpFEFbfwk1d+SYEbS7TmY0KKEQEZGDxKurWXvdUQxcBc9/spIj4otJ7d2X77CKnqMmDxERKQZmNF80i4br9tK4tYXpH3mBoLk531FJRCihEBERABITxlH9pZdY9+NpTPvtfI3gyIMoT2ylhEJERLCTj+Yjv/09Pz7mBKqansp3OP2SYwQRXm00uqmQiIjkxLqvn84b73yMHx11nBb4ksOmGgoRkX6s4bJTaR6R4o9HjwBvyXc4/Z6aPEREJHLi1dU0f2A3096wWlNnFwBHq42KiIhIjxmpCM9DEd1USEREDk8sTqy8nPc8tZDh1zRqgirJCSUUIiL9TN1bT+EHyx/lzvPPILlhY77DkVBbk0cutnxQk4eISD9hJaU03z8aT+7g40ddQNCwKd8hySHU5CEiIgXvooXbabxjNEPeup2gsTHf4UiRUUIhIlLErKSUdTeewX+vmccD7zmTwXfPJ6ir06iOAuRuavIQEZHCE6+uZsedw2lZm+IrZ1yCb1mc75CkC1FeHCy6keeaRbfdSkTkUJZIMPJvSYb8z0Cmf2YRyS1b8x2SdMGBIFzCvKdbV8zsIjNbbmarzOyGdvYPNrM/m9kiM1tsZu/t6ppKKNqo+k9EikisooJNnzqCxMJVBE1N+Q5HCoiZxYGbgdcBM4G3m9nMQw67Flji7scD5wLfMbPSzq6rJg8RkSIUTJ+IPfUiHo/nOxTJmvVVk8dsYJW7rwYws3uAS4ElGcc4UGlmBgwCdgOdLvSiGgoRkSITr65m1dsGggd4SpNWRUV6HgrLyQYMM7MFGdvVGS81FtiQ8XxjWJbpx8BRwGbgBeDj7h50Fr9qKEREikkszsobZjDu70lwx+JG538GpEjtdPdZHexrr5PFoe3+FwILgfOAKcDDZvYvd6/t6AVVQyEiUiSspJTW806gpN6oeGYdAB6of1iUpIjlZOvCRmB8xvNxpGsiMr0X+IOnrQLWADM6u6hqKEREisRL3zyZ6bPXMumNDaT27gMgNqCcYP/+PEcm2XBebq7obfOBqWY2GdgEXAG845Bj1gPnA/8ys5HAdGB1ZxdVQiEiItKPuHvSzK4DHgTiwG3uvtjMrgn33wr8F3C7mb1Auonkenff2dl1lVCIiERdLM5Ldx5HbAMEb2l5uXYCIGhoyGNg0l1BH/VEcPf7gfsPKbs14/Fm4LXduaYSChGRCEuMH8eWmwdS/p8BjP/RQlKHrtGhOXYiwx1SfdPk0SuUUIiIRJAlEgSnHI19aztlc6oZ/psnCZQ8RF4f9aHoFRrlISISQVs/Mpszb51P85dGMvj3z6kmQvJONRQiIhETO/4oamc18eTsSuKpF7GyMry5Od9hSQ+lR3lE93u+EgoRkQiJlZdTefN2pl/YRNDcDGa4hoUWjVQWC3sVquimQiIi/U0szm0rHqH+/dUHRm+oqUMKhGooREQiIFZZybIfTuO9RxieXJXvcKQXtK3lEVVKKERECo3ZyzUPlkgQHzuaPaePZdDiGJ7sdMFHiTT1oRARkVxyh1gcixl7r5jF/jfXsr+2hekfXobW+SpugfpQiIhIriTGj2PjDady0aKdpK7YzZA7Kznqa7s066UUNNVQiIgUCjPiI4az/odVNG5K8cC7zmToiyvBV5NMpfIdnfQyzZQpIiI9YokEselTWP22Gt572SP8+WuTGf/3FaT27tUojn4myn0oei1yMxtvZv8ws6VmttjMPh6Wf83MNpnZwnC7OOOcz5vZKjNbbmYX9lZsIiIiklu9WUORBD7t7s+aWSXwjJk9HO77nrv/v8yDzWwm6TXZjwbGAI+Y2TR3Vz2fiBQtKynlmyv+zcKmjXzjsUt4/M3HMXDlPFKqmeh30jNlqsnjFdx9C7AlfFxnZkuBsZ2ccilwj7s3A2vMbBUwG3iyt2IUEcmn+MgR3Dr/D1xz4iUE+2qZzrOkNCy0X9Mojy6Y2STgRODpsOg6M3vezG4zs+qwbCywIeO0jbSTgJjZ1Wa2wMwWtKK560UkmjZ/7gze+vhCrp56Pqk9+/BkUnNMSKT1ekJhZoOA3wOfcPda4CfAFOAE0jUY32k7tJ3TX1Hn5+5z3H2Wu88qoax3ghYR6Snr/JvmwE0B977q2PSiXoFaduXATJm52PKhV0d5mFkJ6WTiLnf/A4C7b8vY/zPgL+HTjcD4jNPHAZt7Mz4RkV7jftCMl20JhsXjtJ5zPIkmJ9i7L48BSiGK8iiPXksozMyAXwBL3f27GeWjw/4VAJcBL4aP5wJ3m9l3SXfKnArM6634RERyri2BMCM2aBAE6XktY5WD0vvKy9h+7hhSl+1m1JXbSQXqeCkZ8li7kAu9WUNxJvBu4AUzWxiWfQF4u5mdQLp2Zy3wIQB3X2xm9wJLSI8QuVYjPEQkUtyJV1Vhg6tonjKChlGlNA6L4QZNw53WQU5Q1cqMLxmpPXvyHa1ITvXmKI9/036/iPs7OeebwDd7KyYRkd4UP2oqy64ZyqQ/t9IyOP3xOmhTiv2jYgxbFBBvcQau2EtqxUt5jlQKkRPtUR6aKVNEJBdicTZdNJwJD7SSGhBnwM4WGkaUYoEzZGULAKV7m0kt19Lj0jE1eYiI9HPxKRMBiLUG1I4ppaUyRvWiPdDSSuqldRCkXjlsTaSIKKEQEekpM6748z+55/VnkXppLSOGDCGoqyOVSmktDsla27DRqFJCISLSA/GpR7D5daO4+9gSvHUNAN7crEmq5LBEOaGI7oBXEZE8i1VUsPQL1dSf1oC3trxc7i0tnZwlUpxUQyEiIlIAor44mGooRES6y4yGy07lG4sfY/hjpUz9wIqXywFswIAup94WaU+A5WTLB9VQiIh0U+Olp7D5zS18ccZZVLfOI2hbiyPsgBnU1eUxOoksj3YfCiUUIiJZildXw+jhlH10C9M+NYigpUWjOERCSihERLJgJx/Nso+VYbtKmX7ZEoKGLVii5KDOmAcONiUa0m0aNioiUuRiAwfy0mfjDH2wjORAI1Vbm97RUT8JJRNymJRQiIgUKUskuG7RM/zvJ49m4BPLCfbVHpjxUomDyMuUUIiIdCBWWcm5T2zllledQ/nWeWj5Y+lNUR82qoRCRKQ9Zuy9dwT/eO9EfPvSfEcj/YQroRARKR7x6mq2XT6DQbek8GfmH7xTHS6lF0V5+XJNbCUiEooPGUzt36bwxieWs/esJgY+viy9IzOBUDIh0i7VUIiIAInJE6n8dR07fjGC+x6bzfS6jaTq6pRASJ9xTWwlIhJhZqz63qnc/Ppf8u0PvYuavz9F0l1NG5IX6kMhIhJFZuy45jQS++F7044h4c8eSCKUTIh0ixIKERGRgqBhoyIikROvrmbnJTOoP2c/U65acWCBL5E8UpOHiEiEWFkZaz5+FC1TGpl21QqCpqZ2DlIfCpHuUEIhIv1OfMRwWqoCZnx5F8mW1vYPUjIhfUyLg4mIRIQlEtS+eRZ7L69n6hULSAYOHuQ7LJE0j3Yeq4RCRPqF+NAafPwo6q6opfruSjyZzHdIIq8Q5ZkylVCISL+w7kMzaJjUysyPbia5SWtziOSaEgoRKXqJyRNpHhow+u9xkps2R7teWYqWo1EeIiIFKzFpAh986FFuPeZovDWpZEIKWLTnodDiYCJStBITx/PRRx7kp8ceA6mUOmCK9CLVUIhIUUqMHcMHH/kH359xLJ5sZ54JkQIU5Qo0JRQiUnTiM6cx5peb+Onxx+HJhnyHI5I19aEQESkEZsQHV7HmxjKC647AG17Id0QiWXOPdkKhPhQiUjTiQ4YQ+1MFI28vxxe8mO9wRPoV1VCIiIgUCI3yEBHJMz/jeK6Z9zT13x5H+SPPR7t3m/Rb7rnZ8kE1FCISeS0XzmLwFzZw68mzKKtbgCuZEOlzSihEJLrMaL3gZI76xousu3wEqdpt+Y5IpEei3ClTCYWIRFbT609h09tbKDm1FYL1YKamDoksxyKdUKgPhYhEUnzkCCo/vYEjv5eCIJUuVDIhkjeqoRCRyLGSUl764SgmX7gcb92umgkpGlH+Lc4qoTCzUcBs0vc639239mpUIiLtsEQCLIafOJ2RvymHVAqLGZ6M8sewSKjYJ7Yysw8A84A3AW8BnjKz9/V2YCIiL7P0h2zyrOOov+REam9sYNC/X8KTSTyVynNwIjnkOdryIJsais8CJ7r7LgAzGwo8AdzWm4GJiLSxeBzicdZdVEYwpolp15eQ2rU7vVNNHSIFIZtOmRuBuozndcCG3glHRKQd8TitZx5DzYsw/Vv7CV5cCaY+5VJ83C0nW1fM7CIzW25mq8zshg6OOdfMFprZYjN7vKtrZlNDsQl42szuI12Rcikwz8w+lb55/24W1xAROSzxoTWs/Ox0TnzVChJn7yXlgWolpGj1xa+2mcWBm4ELSFcazDezue6+JOOYIcAtwEXuvt7MRnR13WxS/JeAP3GgVeY+YAtQGW4dBTzezP5hZkvD7ObjYXmNmT1sZivDn9UZ53w+zJaWm9mFWcQmIkUsXlXF+59cgAVQf3FreniokgmRnpoNrHL31e7eAtxDurIg0zuAP7j7egB3397VRbusoXD3GwHMbKC77+9GwEng0+7+rJlVAs+Y2cPAVcCj7n5TWM1yA3C9mc0ErgCOBsYAj5jZNHdXjyuR/iYWx089hit++VduO/t0jtj1DKnWlnxHJdKrnJyO8hhmZgsyns9x9znh47Ec3HVhI3DqIedPA0rM7DHSlQc/cPdfdfaCXSYUZnY68AtgEDDBzI4HPuTuH+nsPHffQromA3evM7Ol4U1cCpwbHnYH8BhwfVh+j7s3A2vMbBXpLOrJrmIUERGJPAdyl1DsdPdZHexr70UOrfpLACcD5wMDgCfN7Cl3X9HRC2bT5PF94EJgF4C7LwLOzuK8l5nZJOBE4GlgZJhstCUdbe0y7WVMY9u51tVmtsDMFrTS3J0wRCQKzKi94hRWvb2c3154Oslt23HVTojk0kZgfMbzccDmdo55wN33u/tO4J/A8Z1dNKtu0u5+6KiOrJshzGwQ8HvgE+5e29mh7b10O7HMcfdZ7j6rhLJswxCRiGi+eBY7TjTGPeokN2xWnwnpV/po+fL5wFQzm2xmpaS7G8w95Jj7gLPMLGFmFaSbRJZ2dtFsRnlsMLMzAA9f+GNdXbSNmZWQTibucvc/hMXbzGy0u28xs9FAW0ePbDImESliNusYJn95GbGvTGfA8xtIBupCJf1MH+TP7p40s+uAB4E4cJu7Lzaza8L9t7r7UjN7AHgeCICfu/uLnV03m4TiGuAHpJsfNgIPAZ32nwAwMyPd92LpIUNL5wJXAjeFP+/LKL/bzL5LulPmVNIzdIpIsYvF2X3VbM68dj5LT3FKggUk8x2TSJ/ru9VG3f1+4P5Dym495Pm3gW9ne81sEorp7v7OzAIzOxP4TxfnnQm8G3jBzBaGZV8gnUjca2bvB9YDl4eBLzaze4ElpEeIXKsRHiLFLzF6FNteP5n9r61nxcU1EHQ5Ok1EClA2CcWPgJOyKDuIu/+b9vtFQLrXaHvnfBP4ZhYxiUgRsESCNR84glSpM+XDm0nt3JXvkETyK8JdhjpMKMLhomcAw9tmxQxVkW5zERE5PGbEKipY9uMZVD0PY+c3HlibQ6S/ivhqo53VUJSSnnsiwcEzYtaSXnVUROSwxMrKWHnjsQx5Jsboh7YRrNmAazSHSKR1mFC4++PA42Z2u7uvAzCzGDCoi+GfIiKdan7V0YxYAEMeWExqnz5ORF4W4bw6m3kovmVmVWY2kHSHyeVm9tlejktEipSVldHwqX1Uz9tKqrY+64HzIv2D5Wjre9kkFDPDGok3kh5iMoH06A0RkW6rf/0J1Fy5j2DbjvRiXyJygOdoy4NsEoqScIKqNwL3uXsrka6UEZF8iQ+twT+4g2DPHiyuvt0ixSSbhOKnwFpgIPBPM5tIumOmiEiXLBF21TJj0gMNVL15G55KETQ25TcwkUIU4RqKbJYv/yHww4yidWb26t4LSUREpB/K7Wqjfa7LGgozG2lmvzCzv4XPZ5KeMltEpGvxOLHycva85zReunZqumbC/cAKohbdD1AROSCbJo/bSS8gMiZ8vgL4RC/FIyJFJj5sKMtuOYYdp6ZgwZJXdsTUCA+Rl/XRaqO9IpuEYpi730t6tTHcPUk3li8XkX7KjPjQGrZcMpFBy0qZ/stGjeoQ6UqE+1Bkk1DsN7OhhCGa2WnAvl6NSkQib+03TuPYR3az9+iAMd9+Ep//Qr5DEpFelM3iYJ8ivbT4FDP7DzAcTb0tIp1ofe0sUuXOovcdzdSF89SsIZKtCHfKzGaUx7Nmdg4wnfT0W8vDuShERF4hVl5Oy6d2M/XyHQT7G5RMiHSDRfi/S5cJhZnFgYuBSeHxrzUz3P27vRybiESMlZWx4r9PYMZHtpKsr1cyIdIdeez/kAvZNHn8GWgCXiDsmCki8gpmbPvgyQzYDsk165RMiPQz2SQU49z9uF6PREQiy0pK4fhp1J3eyPRPbyapZELkMFhx96EA/mZmr3X3h3o9GhGJpD1XnMzED61g6msbSTY05DsckeiKcC6eTULxFPBHM4sBraQ7Zrq7V/VqZCJS0KykFDwgNmgguy5qYti7BhI07Mp3WCKSJ9nMQ/Ed4HSgwt2r3L1SyYSIxAaUs+33R3Lr839l+hd2ktq4Od8hiURfkU9stRJ40V2NoiKSlpg0gbEPp0jcV80HJ55Fct0GPJnMd1gi0RfhhCKbJo8twGPh4mDNbYUaNioiIiJtsqmhWAM8CpQClRmbiPRDdsqxfPbRv7Dp3aMYeuczGh4qkitty5fnYsuDbGbKvLEvAhGRwmYlpSTPPIZjvvM83zryeOKVO/CkJs0VyaWinCnTzL7v7p8wsz/TTouMu1/Sq5GJSP6ZgcWI1wxhyy+GAw2suHAw+C5SdXWqnRDJtQj/l+qshuLO8Of/64tARKTwxIcMYc2ccdwx6zY+8t8fY9hzdaR2hkNDlUyISIYO+1C4+zPhwxPc/fHMDTihT6ITkbyJjxzBV555hPLHKvnqUWcx9GdP4gtezHdYIlKgsumUeWU7ZVflOA4RKSRmvObRVdx4zFmM+MnTBE1N+Y5IpF8wz82WD531oXg78A5gspnNzdhVCWg6PJEiFR8ymFU/mcSjF+4gaNBkVSKSnc76UDxBeg6KYaRny2xTBzzfm0GJSH7Eq6qovWcog/5UcaCvhIj0nWJcHMzd1wHrSE+7LSJFLl5dzbefu59rPvUJRv19Canm5q5PEpHcyeMsl7mQzUyZIlLkmv+/U/jo937Lp6efy8DUM6Q0jbZIfkQ4ocimU6aIFLHEqJHw8R3cds4ZeHOz1uQQkcPSaUJhZnEz+3VfBSMifW/DO6dQ8e4Gklu25jsUkX6vKEd5ALh7ysyGm1mpu7f0VVAi0vusrIx4TTXJCgh27c53OCICkW7yyKYPxVrgP+HQ0f1thVptVERERNpk04diM/CX8FitNipSBKyklD1vPYmvPzGXSb/dpn4TIoXCc7TlQdarjZrZQHff39XxIlLY4kNr2P6m6fglu/jCEaeCv5TvkESE/PZ/yIUuayjM7HQzWwIsDZ8fb2a39HpkIpJ7sThrPzKD5ov3Mfwt67TAl4jkTDZ9KL4PXAjMBXD3RWZ2dm8GJSI5ZkZw5vE0jCnHDca+dRXeqn7WIgWnGGfKzOTuG8wOuslU74QjIjlhBu7Ehw2FllaSxx5B61f2UploZciVRlLJhEhhinClYTYJxQYzOwNwMysFPkbY/CEihW3Vp6dRftRejqzZRONHhuLLNpJUB0yRglXUfSiAa4BrgbHARuCE8LmIFKj4sGGctqiV8h1G5V1VNL+7nODFZRrNISK9JptRHjuBd/ZBLCKSC7E4u+8YzBMfG8f4FavxxkaSe/flOyoRyUaEayg6TCjM7Ed0cmvu/rFeiUhEDlusspLVNxzD6JuTxP65gFSiBE+25jssEclGEQ8bXQA8A5QDJwErw+0EsuiUaWa3mdl2M3sxo+xrZrbJzBaG28UZ+z5vZqvMbLmZXXiY9yPSr6395LG0VgUMePQFcE+P5NDQUBHpAx3WULj7HQBmdhXwandvDZ/fCjyUxbVvB34M/OqQ8u+5+//LLDCzmcAVwNHAGOARM5vm7hpNIpKlWEUFrYMDpn58AUGg/zoikRTh/D+bTpljOHiq7UFhWafc/Z9AtisOXQrc4+7N7r4GWAXMzvJcEQE+vGghU7+4EJRMiERXMU+9DdwEPGdm/wifnwN8rQeveZ2ZvYd0k8qn3X0P6REkT2UcszEsewUzuxq4GqCcih6EIRIxsTgWj+OpFFaSIFZRATHDSkpY/aEp/PS8cQTNm/IdpYj0QLH2oQDA3X8JnAr8EfgDcHpbc8hh+AkwhXQ/jC3Ad8Ly9qYGa/ef1d3nuPssd59VQtlhhiEiIiK5lNVMmaSbH84KHzvw58N5MXff1vbYzH5GehVTSNdIjM84dBzpVU5FJBQfPhQbWAGtSVIjh9BUU07j0AT7x8SwAJIbNuY7RBHpx7pMKMzsJuAU4K6w6GNmdoa7f767L2Zmo919S/j0MqBtBMhc4G4z+y7p/hlTgXndvb5IMbOSEjxmpMbWQOCU7m4iSAxgwK4kA1buQFNWiRSBCDd5ZFNDcTFwgrsHAGZ2B/Ac0GlCYWa/Ac4FhpnZRuCrwLlmdgLpf7K1wIcA3H2xmd0LLAGSwLUa4SGSIRZn4+UTGfPobmKNrdj+JiyZIlFZSuOwEsp37cl3hCLSz2Xb5DGEAyM2Bmdzgru/vZ3iX3Ry/DeBb2YZj0jxCxf4wozgrOMY84+97J9cxYBtTfjAUoLSOKXb91P6wk5StbX5jlZEeiriE1tlk1B8iwOjPAw4my5qJ0SkB8yIDRgA8Tje1ExscCVbTxjAuLm7KakfwL4pFcSSULY3idXuJ7l7b74jFpFcKeaEwt1/Y2aPke5HYcD17r61twOT/sESCS1YlcFKSomPGEZQXUVy6ADWva6cwSsgKIPUsCrKttZTvqwOb2mFmJHatVvzTohIQcimU+ZlwN/dfW74fIiZvdHd/9TbwUnx85T+GAJghsXjbPrkLMp3OrvPa+LsI1ex+qmj2fuaJo784EqsJAFlZXgqhTe34I2NeBDhrzMi8koR/i+dTZPHV939j21P3H2vmX0V+FOvRZVvbW3XIn3BjJW/PIkp47dTvzJJ/WSYeFeCLQ83cmTyqXQfiszfx1g8/VM1EyJFxSj+PhTtTX6VbWfOaFIy0Xf0b822605n1N8CGlNjOOqRZVBSSmrXbrwtYTj030iJhEjxivBHYjaJwYJwfoibSd/qR0mvQioihyMWx2KWbu6xGPtPb2D0fzcRLFlJSsmCiERUNgnFR4EvA78lXSPzEHBtbwYlEnlh0kA8TqysDEpLIB7HYjEYUA7NLXhDIz5hFEd+YAVBUzOkp3oRkf6q2IeNuvt+4IY+iEWkOMTiJEYMw6sGEVQNIHDH4zG8JM7umQNoHWikymHoi0k2XGRMva4h3xGLSKHoo4TCzC4CfgDEgZ+7+00dHHcK6cU73+bu/9fZNbMZ5TEN+AwwKfN4dz8v68hFipkZsbIygpZW4jVDwGI0Hjee5IAYLYNiJMuNoBQsgHgTxONOc7Wx5wN11Px1yEHXebm/REePRUR6yMzipLsxXEB6La35ZjbX3Ze0c9z/AA9mc91smjx+B9wK/BxQA6+IiEhv6ZvvDrOBVe6+GsDM7gEuJb38RaaPAr8nPQ9Vl7JJKJLu/pNuBCrSf5hhpaUkTz2K0vW7aZlQQ2JPI5Z0yna1UFobI4gbFkDL4ATlu1so2dkAW3eAGamdSw9cK7NGIpNqJ0T6jRz2oRhmZgsyns9x9znh47HAhox9G4FTD4rDbCzpRTzPI4cJxZ/N7CPAH4HmtkJ3393xKSLFzxIJYtXV2KAKbFcjW187hmS5kRw4gIotTqLZGbC9ldK9zVgyoGxbCqtvwPfsI1VXh8Xj7V9YCYSI9NxOd5/VwT5rp+zQD57vk54ZO2WHfsnpQDYJxZXhz88e8sJHZPUKIkXISkrZ/c6TAdgzE6pWwZ5jAqpfiDHqyXriu+uhJAGtyfTojv2NBLv3kGpsfDlh0JTjIvIKffN9YiMwPuP5OGDzIcfMAu4Jk4lhwMVmluxsluxsRnlM7naoIsUoFid+xASCIQNp/lYdOze14I1xahbGKdnvTL/+BYjHSZ50JIn6BoL9DXhTc3q+CQ9U8yAinXP6KqGYD0w1s8nAJuAK4B0HhZLxt9/Mbgf+0tWSGx0mFGb2OXf/3/Dx5e7+u4x9/+3uXziMmxCJplictf81m6qXIHnpHirmjGLGc9ux5la8fj9B/X6C1hYwI/6fF0gRrlOiJEJEuqEv5qFw96SZXUd69EYcuM3dF5vZNeH+Ww/nup3VUFwB/G/4+POkR3u0uQhQQiH9QmL8OHacN56KTUbLYBj99Tg8/xyp1pYDB7W1MbofWPBMyYSIFCh3vx+4/5CydhMJd78qm2t2llBYB4/bey5SlFbfdDqT72tgz4WNTL2xHrbvwpNJgmTrwQcemjwomRCRwxHhj47OEgrv4HF7z0WKTstFp1C6z9hxQzPTPrQHb23Fx48keGF55wmDkgkROUzFOvX28WZWS7o2YkD4mPB5ea9HJpIvZsSH1pD47FYmfiiB3d5IcvvO9L49+5QwiIi0o72lyQFw97i7V7l7pbsnwsdtz0v6MkiRvuRnHM8X5z1M6XtSpF5aS3LL1vSS4W2biEhv8RxteZDNPBQi/UZ8yGD2fbmWbxx7FsH+Q4dli4j0ojwmA7nQYQ2FSH8TGziQTyx4gqrPlBLs35+xo4MZLUVEcshyuOWDEgoRwE48mtfPX8/3Tz+XYMnKg3d6kJ+gREQiRE0eIiIihSLCTR5KKKTfi1dXs+GCwfz1zaeR2rHylQdoVIeI9JEoDxtVk4f0b2bsubuG1oGQWrYq39GIiESWEgrpt2IVFdy1/t/EfjmciTc+rZoIEck/DRsViZ4V3ziOd08NGNSsZEJECkSEP4qUUEi/tOVTZ1CxGYKmpnyHIiJSFJRQSP9ixrqvnY7PqGfKRzaieS9FpGB4tDtlKqGQ/sGM+JAhrPn4UTSPTDLtihfwsrJ8RyUicjAlFCKFy0pKabrgeDaflQB3pn98Ee5O0Nyc79BERA4S5RoKjfKQ4hWLEx8+nF+v/gdTv7qEIAFTf7oRb0sk1BFTRCRnVEMhRSleXc3K62fw5Tf+jndNPgdPNTDFnySZ78BERDoT4e85Siik6MSrqlh5/QziLfDbC07DkxvzHZKISFbU5CFSQG5Y+E/GP9LCET9aRXKDkgkRkb6gGgopKvGpR3DTydWUx9aQ2rU73+GIiGQvj7Nc5oISChERkUIR4YRCTR5SNOzEo7nkvqdJ7d2LlZfnOxwRkW4x0n0ocrHlg2ooJPrMWPHTWcQGJJl75jTwPRAE+Y5KRKRfUUIhkZaYNIGlnxzDwJdi1CwzUnv2AJDctiPPkYmIHIYIN3kooZDIsrIyqu+upfz+cYz/wbMHz3zpqqEQkeixCE+4p4RCIis+djS7P1DKxNXP4qkAi8fxZHrqKkuU4K0teY5QRKT/UKdMiaTUuSex95YYvno9QUsr3trycjIB4MnWPEYnInIYPIdbHqiGQqLHjD2frqfmG1UETas7OCYGrsXJRSRaNFNmO8zsNjPbbmYvZpTVmNnDZrYy/Fmdse/zZrbKzJab2YW9FZdE35gnBzHiS3FiT7zQ4TEWsz6MSEREerPJ43bgokPKbgAedfepwKPhc8xsJnAFcHR4zi1mFu/F2CSiEuPGsua/ZhA8v+xAx8vYK39VMps/REQiI8JNHr2WULj7P4FD5z6+FLgjfHwH8MaM8nvcvdnd1wCrgNm9FZsUEDtQk2CJBMTixCoriQ+tIV5VRay8HMzwM47nwytX8dnH/0b5g8+FTRrh/xqN6BCRIqGJrbI30t23ALj7FjMbEZaPBZ7KOG5jWPYKZnY1cDVAORW9GKr0GTMsUUJs6iRsTy3ByBpqj6wkVWLEks6gdQ3Muvk55lzyOoJV6/DkIaM32htmZdZ+uYhIIYvwx1ahdMpsr8G73X9Wd58DzAGospoI/9MLgJWWEps0nvqjaijb00p8YBm4EySM+nEx6o9q4U3HL+a50ysImlZmf2ElEyIifaqvE4ptZjY6rJ0YDWwPyzcC4zOOGwds7uPYpK+ZEascxNbzhhNrhQGbG4nVNkLMKN9ZxqANKRJz61m6ZRBBc12+oxUR6V15bK7Ihb6eh2IucGX4+ErgvozyK8yszMwmA1OBeX0cm4iISH5FuFNmr9VQmNlvgHOBYWa2EfgqcBNwr5m9H1gPXA7g7ovN7F5gCZAErnXXJALFzMrK2PbBk2kdCCX7IWXg8Rgk4hAEDFi+DYDU9h14S4uaMEREClyvJRTu/vYOdp3fwfHfBL7ZW/FIYVnzlZOwqXWMmVNK6d5mYivWs+/Co2gYPpCyfQE1j6wmtW171xcSESkSbcuXR1WhdMqU/sCMWFkZfvQUcJj0zuUAeGsLKTMGP7KCwaOHY/sbCfbVaqSGiPQ/Ef7MU0IhvS8WJzawgp2XH8NVn/kLm5obiL1mGKnWFqys7OW5KFL7aom1tODuBC2tkf6PJSLS32hxMOldZuy+ajaz/72b4LJd/OWtZ7LwvKGkdu8BID5mVDpxcAcPCBqbCJqaXz5XRKQ/0cRWIu2IVVSw7AdHUza4nqc/cBLDn1tKcMiU2Ml1Gw80bbhrQS8R6b/yOEIjF1RDITlnZWUQi/Ou55ZTsaaEIz60EZ//Qvvra3igpg0RkZAFudnyQTUUkluxOLHKQdSecyR3/X8TGL9uAanWlo6PVzIhIlIUlFBITsWOmcr+iVUM+9haGs/Zlu9wRESiJcLfsZRQSM4kxo1lzVcSxOclKD9Xc0iIiHSX5qEQAU78y3qaPj0KvFlNGSIi/YwSCumZcLKq4K/DePYtJZSsewHvrM+EiIi0z4n0lzElFHL4zIgPGcKyb0xj9E+NyrUL8JSGfYqIHC41eYiIiEjPRTih0DwUctjilZUsvelIhs2LUXnfc+l5JiJcXSciIodPNRRyWKyklJVfPJrKFcbQhXsImpvzHZKISKRFfbVR1VBIt8WOmcGIf5YTS8KYH8wjeGFFvkMSEYm+l5cgyMGWB6qhkOyZETtuBiff8SL//uJpTH5wfvvTaYuISL+jhEKyY0bDZbPZcXwcO384Zbvm41oNVEQkp6Lc5KGEQrpmRmLyRLafFKN0HwT7atPl6oApIpJbEf5YVUIhXYoPGcKW75cx+qdJyh9apGYOERF5BXXKlM6ZcfW8+Qy+pSqdTGgWTBGRXmOemy0fVEMhnWq4bDZzzhhG6a5nsdISvDXfEYmIFCkHgui2eaiGQl4hPmwolkjAacdRf+U+Ujt3AhA0NeU5MhGRIuc52vJANRRyQDhqwwYNpOWESYz5+ksk3jqQpDu41ugQEZGOKaEQAGIVFQDYuNFs/nYJ+/bAgLcNIrlpU54jExHpPzRsVCItPv1I1r1pBKPmNbP5VWWM/V4TI+uaSG7cpKGhIiJ9KcKfuepDISIiIj2mhKKfi089gt2nDGPQJmftewMSjRB//DliKzdEOlMWEYmivho2amYXmdlyM1tlZje0s/+dZvZ8uD1hZsd3dU01efRTlkhgZWXs+xFs29LKEb+G6h8kia1eRsqdVF1dvkMUEelf+miEhpnFgZuBC4CNwHwzm+vuSzIOWwOc4+57zOx1wBzg1M6uq4SivzFj0/WnM/aC9XzziD/wufd/mOn/egFihrcmSQXhaA7VToiI9Kn08uV98tk7G1jl7qsBzOwe4FLg5YTC3Z/IOP4pYFxXF1VC0c80v24W+6c3UzdnHF9+8AIS+57F3SEWh0BDQ0VEisQwM1uQ8XyOu88JH48FNmTs20jntQ/vB/7W1QsqoegnYuXl+P1DGVe+kgHnNRE0N5PKyIQtZniQxwBFRARy9zm8091ndbCvvaWi260aMbNXk04oXtXVC6pTZn9gxmXPrWfvL8az85yG9IyXh1SracEvEZH8M/ecbF3YCIzPeD4O2PyKWMyOA34OXOruu7q6qBKKIpeYPJGfrfsXf3rLWQy+6ykt7iUiIvOBqWY22cxKgSuAuZkHmNkE4A/Au919RTYXVZNHEUtMHM/Zc5dwzTEXk6pdnu9wRESkM300ysPdk2Z2HfAgEAduc/fFZnZNuP9W4CvAUOAWSy/LkOykCQVQQlGULJHATzqK82/7D4++biap2o35DklERLrkfTbCzt3vB+4/pOzWjMcfAD7QnWsqoShCGz43mxGv3sSjFx1FcqOSCRER6X1KKIqEJRLEBlex7MapeGkr5Z8dSHLT0nyHJSIi3RDlxcHUKbMIWCLBiu+fzNVPzSNRF2P6tYsIFi7R5FQiIlHjnpstD1RDUQTWfWk2A9fDz846k8lbn+yLPj0iIpJrDhbh+YBUQyEiIiI9phqKiIsPG0rL4IDJP1pBctfufIcjIiI9EeGmaiUUEbfxyulM/ew8UprpUkQk+qKbT6jJI9LMaK5xTZstIiJ5pxqKKDv1WI68ZR1KJ0REikMfLV/eK/KSUJjZWqAOSBFO52lmNcBvgUnAWuCt7r4nH/FFQWL8OIZ+by3bz2rIdygiIpIrEU4o8tnk8Wp3PyFjbvAbgEfdfSrwaPhc2pi9/DMxdgwz/rSZXW8ZhKdS+Y1LRESEwmryuBQ4N3x8B/AYcH2+gikYZsQqKogNGczO8ydSO9n4/Nvv5Z43nUdqU1YLwImISBQ4EOF5KPKVUDjwkJk58FN3nwOMdPctAO6+xcxG5Cm2ghKfcSTNY6pY827wZCvWGOeec04iteOlfIcmIiI5ZLj6UByGM919c5g0PGxmy7I90cyuBq4GKKeit+IrDGZsO3sYu09pZfQDCYY8uwPfsp1UXV2+IxMRkd6ghKJ73H1z+HO7mf0RmA1sM7PRYe3EaGB7B+fOAeYAVFlNdP/ls1D3tlPZPxam/qKVxMo1BHX1eHNzvsMSERF5hT7vlGlmA82ssu0x8FrgRWAucGV42JXAfX0dWyFJTJ7I9kuamDJnPbH5i0nt3KVkQkSk2GlxsG4ZCfzR0qMWEsDd7v6Amc0H7jWz9wPrgcvzEFtBsESC1e8ey/QvbiK5cVO+wxERkb6gTpnd4+6rgePbKd8FnN/X8RSaWGUlNmYk8WYItmzLdzgiIiJZKaRhoyIiIv2aRnlIzmx+37H4q/cw4f3LSDU1pQvNIt3zV0REshThz3otDlZAYhUV1J7QTM2cQQT7ag/siPAvmIiIZCtHHTL7UadMaY8ZW686gekfnIcnk1FewVZERPoh1VAUiOR5J7Hv1CYtRS4i0l85qqGQwxcrL2f1F0+ktSZg+kdejPKIIRER6akI/xFQDUU+mfGtpY+TrHTGPeQE+/fnOyIREZHDohqKPLGyMvb9aRxfOL6SaakXCBqbNJpDRKSf07BR6ZZYZSWrfjqFmrsHkKpbrSRCRETSIvz3QE0efSw+fDjLfjCdwY8OoPqeZyL9yyMiItJGCUUfCs46kf+a91cqF5cy/K7n8NaWfIckIiKFwoHAc7PlgZo8+kD8yMk035riqMFL+OLUMxidepLAlMuJiEim/A35zAUlFL0pFmfX+2azbxoM/aXz0tw9eLIuvc9T+Y1NREQKjxIKOUgsTnz4UJb9z3jKBtZTPr+Swb+ZRxAoiRARkeKkhEJERKRQqIZC2lgiwfIfn8R7Tv8PG391JGN/shRvacFVOyEiIp1p65QZUUoocixWUUHNc3HmfaKS0S1PE3jQfsapSaxERKSIaKhBOyyRkWeZpbcsxCorWfr9aYyauwZiMegomRAREXkFD/9u5GDLA9VQtMNTB5onrLQUMyNoacXicSweg1gsnXSUJCBwgoYGzIzlP57K8L+XEOyrPZCExOKg5g4REclGhL+EKqFoj8VeHtZp8Tg2fgwMHkD9+Ar2HhlnwHanudoorXUsgJol+2mqLqPyuRKG3beYVEMDscrK8DodZIoR/qUREZFeoD4UxcficTxIkRg3ltrZ49hxQoySOmPISymq1gU0jIhRus/ZP9ZoHpHigo8uZH1jDeVn1+OlpQAEdXV5vgsREZG+o4SiHbGaIQS799IyaTi1E+IMWR4wZEU9QUmMWHOSim1lpEpj1CxLUrJlL89+shxP7gN3PJnMd/giIhJVEa69VkLRniFVbH3bkdSf1sDwvwTU/GMtXjOYWHML7KunNEjh+xsImptJRvjNFxGRAhPhvylKKEJWUkp87CiWfG4UA0ftp6Wlnmlf2U+wZglBPIbtb4DSEoK9+4Cw42aE33gREZFcUkIR2vP2kxn/wZVU3J9g3MdX4q0tpMK5IjwVJz64CqoGEQOCfXVASnNJiIhIDmlxsMjb/5ZTqZ1k1H9qNBNXLyMVLituiZKwJiLAGxrxvfvw1qSGgYqISO45EORnDolc6PcJRXxoDbuv2M+4H5TAwuUvJxMAnvE4qK+PdOYoIiLSm/pvQhGLgwfsO28qkz+3jdT6jZ2P0FAyISIivS3Cf2v6b0IhIiJSaCKcUPTvtTxOOYYtb2gluXaD5o8QEZE88/RMmbnY8qDf1lBs+cSpVLxmO0d9eC9JdbIUERHpkX6ZUMSrq6k7poUxXx1Acv1L+Q5HREQkHDWqUR6Fzyy9WBfwnqcWcvvRdZqcSkRECkuEFwfrN30oYmVlrPrOKZzxXCN3nn+GkgkREZEc6h8JhRn77xtNvNH497WzSW7arGRCREQKj3tutjwo+iaP+MxpLP1MFeNviXHE/c8cNFmViIhIwXDXTJkFKxZn02uHUbkYKh5QMiEiItJbijqh2PGnIxn064DK+57Dm5vzHY6IiEjnItwcX7QJRayigurvD6Js626CCPeaFRGR/sMj3ORRlJ0yLZFg9eePp2xLLWzcqqYOERGJgBx1yMxTLUfRJRSWSBAbWkPlWghWroGY5TskERGRold0CcXWj8zmNY+uonpFE55MktpXm++QREREuuZoLQ8RERHJAU29XRisrIxUGTz6upnENj2fLoxwj1kREZGoKJomj/jR0znpqUZGzWskuWkLaAVRERGJEAc88Jxs+VBwCYWZXWRmy81slZndkM05seNm8M4/PMKC604i8ewqJRMiIhI97ukmj1xseVBQCYWZxYGbgdcBM4G3m9nMjk+A7R85gwvunsevjpqEPbGIoK6uj6IVERHJrb6qoejqy7ul/TDc/7yZndTVNQsqoQBmA6vcfbW7twD3AJd2dPCRx+6HC3fz8Olj07US6i8hIiLSqSy/vL8OmBpuVwM/6eq6hdYpcyywIeP5RuDUzAPM7GrSN0c5FbzqjcsJlEiIiEgx6Jvmipe/vAOYWduX9yUZx1wK/MrdHXjKzIaY2Wh339LRRQstoWhvFqqDsgV3nwPMATCzukeC3y3vi8D60DBgZ76DyDHdUzQU2z0V2/2A7imfJvb2C9Sx58FH/P+G5ehy5Wa2IOP5nPDvJ2Tx5b2DY8YCkUkoNgLjM56PAzZ3cvxyd5/VuyH1LTNboHsqfLqnwlds9wO6p2Ln7hf10Ut1+eU9y2MOUmh9KOYDU81sspmVAlcAc/Mck4iISDHJ5st7d7/gF1ZC4e5J4DrgQWApcK+7L85vVCIiIkUlmy/vc4H3hKM9TgP2ddZ/AgqvyQN3vx+4P8vD53R9SOTonqJB91T4iu1+QPckOeDuSTNr+/IeB25z98Vmdk24/1bSf4cvBlYBDcB7u7quuUZIiIiISA8VVJOHiIiIRJMSChEREemxyCYUh7PmR76Z2Xgz+4eZLTWzxWb28bD8a2a2ycwWhtvFGed8PrzH5WZ2Yf6i75iZrTWzF8LYF4RlNWb2sJmtDH9WZxxf0PdkZtMz3ouFZlZrZp+I2vtkZreZ2XYzezGjrNvvi5mdHL6/q8KpeNsbTtYnOrinb5vZsnB64D+a2ZCwfJKZNWa8X7dmnFMQ99TB/XT796xQ7ieMpb17+m3G/aw1s4VhecG/R9IN7h65jXQnkpeAI4BSYBEwM99xZRH3aOCk8HElsIL0tKdfAz7TzvEzw3srAyaH9xzP9320E+daYNghZf8L3BA+vgH4nyjd0yG/a1tJT2oTqfcJOBs4CXixJ+8LMA84nfS49L8Bryuwe3otkAgf/0/GPU3KPO6Q6xTEPXVwP93+PSuU++nong7Z/x3gK1F5j7Rlv0W1hqJba34UCnff4u7Pho/rSA+NHdvJKZcC97h7s7uvId3bdnbvR5oTlwJ3hI/vAN6YUR6lezofeMnd13VyTEHek7v/E9h9SHG33hczGw1UufuTnv6U/1XGOX2uvXty94c8PeQc4CnS4+U7VEj31MF71JHIvkdtwlqGtwK/6ewahXZPkp2oJhQdTQkaGWY2CTgReDosui6ssr0toxo6KvfpwENm9oyl11oBGOnhmOXw54iwPCr31OYKDv7wi/L7BN1/X8aGjw8tL1TvI/1tts1kM3vOzB43s7PCsijcU3d+z6JwP23OAra5+8qMsqi+R3KIqCYU3Z4StJCY2SDg98An3L2W9CpuU4ATSM+T/p22Q9s5vRDv80x3P4n06nTXmtnZnRwblXvC0hO+XAL8LiyK+vvUmY7uITL3ZmZfBJLAXWHRFmCCu58IfAq428yqKPx76u7vWaHfT6a3c3CCHtX3SNoR1YSi21OCFgozKyGdTNzl7n8AcPdt7p5y9wD4GQeqyyNxn+6+Ofy5Hfgj6fi3hdWWbdWX28PDI3FPodcBz7r7Noj++xTq7vuykYObEAry3szsSuD1wDvDKnLCpoFd4eNnSPc5mEaB39Nh/J4V9P20MbME8Cbgt21lUX2PpH1RTSgiueZH2H74C2Cpu383o3x0xmGXAW29o+cCV5hZmZlNJr0u/by+ijcbZjbQzCrbHpPuIPci6divDA+7ErgvfFzw95ThoG9TUX6fMnTrfQmbRerM7LTw9/c9GecUBDO7CLgeuMTdGzLKh5tZPHx8BOl7Wl3o99Td37NCv58MrwGWufvLTRlRfY+kA/nuFXq4G+kpQVeQzmi/mO94soz5VaSr7Z4HFobbxcCdwAth+VxgdMY5XwzvcTkF2MuZ9EibReG2uO29AIYCjwIrw581UbmnMMYKYBcwOKMsUu8T6WRoC9BK+hvf+w/nfQFmkf6j9hLwY8IZdgvonlaR7lvQ9n/q1vDYN4e/k4uAZ4E3FNo9dXA/3f49K5T76eiewvLbgWsOObbg3yNt2W+aeltERER6LKpNHiIiIlJAlFCIiIhIjymhEBERkR5TQiEiIiI9poRCREREekwJhUgPmVkqXClxsZktMrNPmVmv/d8KV2h8sesjO73GFw55/kTPour0tSaZ2Tt66/oiUhiUUIj0XKO7n+DuRwMXkJ5b5Kt5jullbRMHHeKghMLdz+jFECYBSihEipwSCpEc8vT041eTXtzJzCxuZt82s/nhYk8fajvWzD5nZi+EtRo3hWUnmNlT4bF/bFsYysxODo97Erg24xrtXt/MzjWzf5jZ3aQnSSLjnJuAAWGtyl1hWX3GeY+b2b1mtsLMbjKzd5rZvDDWKeFxw83s9+HrzjezM8Pyc8LrLgwXfKoEbgLOCss+2UXM/wzve4mZ3dqbNT0ikmP5nllLm7aob0B9O2V7gJGkk4svhWVlwAJgMul1Qp4AKsJ9NeHP54FzwsdfB77fTvm3gRfDxx1d/1xgPzA5m5jbnofn7QVGh9fbBNwY7vt4Rjx3A68KH08gPZ08wJ9JLxYHMAhIhNf8S8ZrdRZzE+nZV+PAw8Bb8v3+atOmLbst0b30Q0Sy1LZa4muB48zsLeHzwaTXK3gN8EsP155w991mNhgY4u6Ph8feAfyunfI7SScknV2/hfQ6D2sOI/b5Hi5xbmYvAQ+F5S8Arw4fvwaYmV5mAYCqsDbiP8B3w5qPP7j7xoxj2nQV8+rwtX9Derr6/zuMexCRPqaEQiTHwkWOUqRX8jTgo+7+4CHHXET2yzFbJ8d2dP1zSddQHI7mjMdBxvOAA58ZMeB0d2885NybzOyvpPuRPGVmr+lmzIfep9YGEIkItU+K5JCZDQduBX7s7g48CHzY0svWY2bTLL0q60PA+8ysIiyvcfd9wB4zOyu83LuBx919L7DPzF4Vlr8z4yU7un5XWtvOOUwPAde1PTGzE8KfU9z9BXf/H9JNGTOAOqAyy5hnW3oV4RjwNuDfPYhRRPqQaihEem6AmS0ESoAk6SaJtuXpf056lMOz4TLMO4A3uvsD4R/hBWbWAtxPeuTFlcCtYaKxGnhveJ33AreZWQPpP8h0dv0sYp4DPG9mz7r7O7s8+pU+BtxsZs+T/hz5J3AN8AkzezXpGpolwN9I12wkzWwR6RUnf9BJzE+S7sR5bHjNPx5GbCKSB1ptVEQKQtjk8Rl3f32eQxGRw6AmDxEREekx1VCIiIhIj6mGQkRERHpMCYWIiIj0mBIKERER6TElFCIiItJjSihERESkx/5/jo6aPUTSfncAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "fig = plt.figure(figsize=(8, 6))\n", + "ax = fig.add_subplot(111)\n", + "ax.set_title(f'Alignment steps')\n", + "im = ax.imshow(\n", + " alignment_history[0].numpy(),\n", + " aspect='auto',\n", + " origin='lower',\n", + " interpolation='none')\n", + "fig.colorbar(im, ax=ax)\n", + "xlabel = 'Decoder timestep'\n", + "plt.xlabel(xlabel)\n", + "plt.ylabel('Encoder timestep')\n", + "plt.tight_layout()\n", + "plt.show()\n", + "plt.close()" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlAAAACuCAYAAAD55TMFAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9ebR9W3bXh33mWmvvfZrb/brX1ntVVKkrJNQhEDLYCGwZg5FR3ASD4+AGdwHcJLgZHh5J3BDjkbgNScDEBJtgHGJjMDbEYBrbCNMIkFSSStWqXv9+/e3OObtZa838Mdfa5/xevWqEigLJd41xx23OuWd3a80153d+53eKqnIzbsbNuBk342bcjJtxM7784f56n8DNuBk342bcjJtxM27GT7dx40DdjJtxM27GzbgZN+Nm/CTHjQN1M27GzbgZN+Nm3Iyb8ZMcNw7UzbgZN+Nm3IybcTNuxk9y3DhQN+Nm3IybcTNuxs24GT/JceNA3YybcTNuxs24GTfjZvwkx40DdTNuxt/AQ0R+t4j8G+Xnv1lEPvFVOq6KyNf8Nfz8D5VjhL/K//96EfkrInIlIv/0V/r8bsbNuBk340uNGwfqZtyMn+IQkc+JyE5ErkXkvoj8v0Tk6Ct9HFX9H1X167+M8/mHROTPfKWPf/D5f7o4P9/ynr//wfL37/5rdeyD8S8Af1pVj1X1PyjP4G/7Sh5ARP5REfnx4qTdF5H/RkSOv5LHeM/xfkpO5c24GTfjqztuHKibcTO+MuN7VfUI+Hbg5wH/ynvf8DNsY/wk8L+uv4jIHeAXAA+/Ssf/IPCjX4kPEhvuPX/7xcD/Cfg1qnoMfBT4/V+J4/1UxldyDr3fdd+Mm3Ezvvxxs3huxs34Cg5VfQv4o8A3wZwK+w0i8ingU+Vvv1JEflBEzkXkz4rIN9f/F5FvE5G/XFCP/w+wOHjtu0XkzYPfXxGRPyAiD0XksYj8NhH5KPDbge8qiNh5eW8nIv8XEXm9oCm/XUSWB5/1z4vIOyLytoj8I1/Gpf5e4FeLiC+//xrgvwTGg890IvIvichnyvn9fhG5/eXcRxH5iIj8yfJ/j0Tk94rIWXntTwK/BPht5Rp/H/Aq8IfL7/9Ced8vKPf3XER+6BAZKyjabxGR7we2wIffcwo/D/ifVPWvAKjqE1X9j1X1qvz/7y738I+XZ/Xfi8gHDz7/G8prT0TkEyLyvzx4bSki/7aIvCYiFyLyZ8qz+B/KW87LdXxXQRO/X0T+XRF5AvwfReRURP6T8txfE5F/pTpCIuLLZz8SkZ8Qkd94iGq933WLyD8sIh8v1/FZEfknDs71u0XkTRH5F0TkQZkj3yciv0JEPlmu71/+cp7pzbgZP+OGqt583XzdfP0UvoDPAX9b+fkVDBn518vvCvxx4DawxBCqB8B3Ah74deX/O6AFXgP+OaAB/l5gAv6N8lnfDbxZfvbADwH/LrDGHK1fVF77h4A/855z/PeA/6qcxzHwh4F/s7z2dwD3MadvDfyn5by/5gtc758Gfj3wx4BfXv72F4DvAt4Evrv87Z8F/hzwgXJ9vwP4feW1D5VjhC9wjK8Bvqf83z3Mufj33nsO7/cMyu8vA4+BX4EFit9Tfr938P+vA98IBKB5z/H/ZmAH/KvALwS697z+u4Er4G8p5/jv13te7uEbwD9cPvvbgUfAN5bX/2/l+C+X5/g3lc/4vHtSnmUEflP5rCXwnwB/qDzHD2Fo4D9a3v9PAj9W7vkt4L87/Mz3u27g7wQ+AgjwizHH6tsP5lwE/vflvf8YhjL+p+X43wj0wIf/eq/Dm6+br6/211/3E7j5uvn66f5VNu9r4BxzgP7vwLK8psAvPXjv/4PiXB387RNl4/pbgLcBOXjtz/L+DtR3lY3s8xwQ3uNAlY1xA3zk4G/fBfxE+fl3Ab/14LWv48tzoP5XwO8Dvh74ZHnt0IH6OPC3Hvzfi5hDGN7PWfgS9/j7gL/y3nN4zzM4dKD+ReD3vOcz/lvg1x38/7/2JY75yzFH87w8338H8OW13w38ZwfvPQIS5kD/auB/fM9n/Q7g/4A5czvgW97neJ93T8qzfP3gdw8MwM8++Ns/gfHBAP4k8E8cvPa38fkO1Je67j8I/DMHc253cN3H5fO+8+D9fwn4vr/e6/Dm6+brq/31M4mTcTNuxl/P8X2q+t99gdfeOPj5g8CvE5HfdPC3FngJ25jeUtXDDt+vfYHPfAV4TVXjl3Fu94AV8JdEpP5NsM2Ycuy/9GUc873jDwD/Nobs/J73ef2DwH8pIvngbwl4/vBNIvI3Y2lPsGv6RhF5DvgPMCToGHM8nn6Z51WP/feJyPce/K0B/tTB7/NzEZHrg7//bFV9XVX/KPBHS3rslwD/X8zZ/R3v/X9VvS4ptpfKsb+zpk/LCNg9uouhhZ/5SVzL4fy5yx6prOM1DM2iHP/w/Yc/v+/fROSXY87d12H3eQV87OAtj1U1lZ935fv9g9d3mAN5M27G/6zGjQN1M27GX/tx6BC9AfwWVf0t732TGHH5ZRGRAyfqVd5/s30DeFVEwvs4Ufqe3x9hm9w3qnG03jvewRyyOl79wpdycBDVrYj8UeCfwlJA73eO/4iqfv97XxCRDx18zv/I52/A/yZ2Hd+sqo9F5PuA3/bFTud9jv17VPUf+3L+R60A4P3fpJqBP1G4V9908NJ8z8SqLm9jCOIbwH+vqt/z3s8qzliP3a8f+hLX8H5/f4SheB/EUnVgz6s+13ew9N3nneP7fZ6IdMB/gRUE/CFVnUTkD2IO9s24GTfji4wbEvnNuBlf3fE7gX9SRL5TbKxF5O8UK4//nzC+yT8tIkFE/m7g53+Bz/kL2Gb5W8tnLETkF5bX7gMfEJEWZgfgdwL/bkF2EJGXReSXlff/fuAfEpGfLSIrDI34cse/DPxiVf3c+7z224HfUsnVInJPRH7Vl/m5x5S0qIi8DPzzX+L993mWCP7/Br5XRH5ZIVYvCiH6A1/g/58ZIvKrROTvF5Fb5Tn9fCzN+ucO3vYrROQXlfv8rwN/XlXfAP5r4OtE5B8UkaZ8/TwR+Wh5Fr8L+HdE5KVybt9VHJmHQObzCe3zKEjQ78fu63G5t//bcr2U1/6Z8nzPsFTmFxstxr96CMSCRv3tX849uhk343/u48aBuhk346s4VPUHMCLub8NSUp/GeC6o6gj83eX3pxiX5g98gc9JwPdiZOvXMe7Rry4v/0mMyP6uiDwqf/sXy7H+nIhcYuTiry+f9UcxkvmfLO/5kz+J63lbVb+Q5tS/jxHX/5iIXGHOx3d+mR/9r2Lk6wvgv+EL3IeD8W8C/4pYxd1vLo7Mr8IcvIcYKvTP8+XbvKfYc/oUcIk5KP9nVf29B+/5TzFn8wnwc4F/AECtUu9vB/5+DJF6F/i3MEcF4DdjKbK/WP733wKcqm6B3wJ8f7mOX/AFzu03YZy2zwJ/ppzH7yqv/U6M3P/DwF8B/gjmlKfP/5j5XP9pzPF6Cvxa7JndjJtxM77EkGfpFjfjZtyMm3EzvtQQkd+NEfo/T+/rb6RREKXfrqof/JJvvhk342b8pMYNAnUzbsbNuBk/Q4aYxtSvKCnglzGE7L/8631eN+Nm/EwcNw7UzbgZN+Nm/MwZgqU/n2IpvI9jGk4342bcjK/w+Cml8ETk78B4Dh74f6rqb/1KndjNuBk342bcjJtxM27G36jjr9qBEmvh8ElM4fdNjBD5a1T1x77oP96Mm3EzbsbNuBk342b8NB8/lRTezwc+raqfLdVD/xlW9XIzbsbNuBk342bcjJvxM3r8VIQ0X+ZZRds3+RIlyk271ub0Nq4U1E5HgFdkEs5ONzQSGXPgOnak5NDJ4UZwRSaw6hlLVtQJCKiAOvCjIWnxbmYRJrwoCmzGDk2CjIIf7TPUlf8ZCvpWvgnAISKnh9+V3HnirYxsHO4kctL0PNmuIQqSQFsFp/iNQyLkFnJjH+x3do7TWnAjiEJa2HmQ7Rpzaz+HAbgVQSEmB8lBtnOXDJL29yOuoekmpinYeSiEbTlnObiGIosnSee/T0eCi+DKvZMM01pYngw0LrGNDVPfIAn8YK9nb+euAi4qcni7nMzPRrISO0E9aANusPN2yZ5fDrK/71lJnZAbaDb7Z6De3qMe3NnEMkwIcNEv7ATKwZftBEAfA+y8zZlk1yiqny9PeHhfFMYz4dbxhqxin13uN1Ku+yTiRBm3DS7ZM5MIfoJpBX7cn18riafjElUhRwdZ5mcXeshhP59TC25inseidj7qINweGaaAjAdrQO1+j8c235rts/O3ronUlHsWy7p47/Uf/P4F53y9/8J8L9LCrsXurYIIOdgcV2/XEnaf/1mpk/1a+xLnMK0duYGwiCzDxGZqyaMJpktjBkAViLbG/GD3tL010pSFcT12NCEx7hrcZPdBEkynioxS7hXQZETs89zW4af9/VdXrnmdQQXnM3cWGxzKderY9J0911jWM3afREHuTWQV0ibMx69zcTy1B+16QUP5e7Y5tJ+T+0WrHtJpRkTJWXA7h3rwq0jcBVu/5bxdKuvmucjCR67HDo3CyXpHVsf1ZoEkO169J/WY062MjHZPQ6/EhZg9opzfDpp7Ays/8qRfm02NMh+7fm59uKIQXhhpXOJis4Rs71XHfN1+rOduNiO3e/tS17Dk95mbBxKfKjIfj/KVFsVuBLh354JHw5o0BhBFosxrzY1wfHdDWzakqI4nuzUkcKPsr0mBO3a//WD3TgXisYID5zJ58lSjG67tvEVhPBFwyr2jK5IaXrGNLX3fQBKEsm7ORoaxQSYhbCGu7OPCVpEMqRVyx2yLuTtxFEaGHNj2rZ0QiqT9/F6sRgRliIE8+NmGq4f21sCRHxBRphy4GBeoik29JLjR5jVS74Hu7f5zE0kd6TrMz3xxNrAdWlsDScxG2tKx4yaIR7b3hGuxvWuxt2HpXiJNHjfY82mPR5woMTumvqzjaX8uiBxuAfMecnxng5fMpJ7LastTmTNB583L9VLux35+jafQLSam5NGdn9fu5umbj1T1Hu8zfioO1Psp1X5ePlBE/nHgHwfolmd83d/3z7F8oqiDd77bLE77MPArf/mf57n2igfjMd9//8M8enqM3u9Yv+nonip+tEXmJ/s5+/2GHZeOZpMRhenXP+ajt+9zt71myIE/9cbXcn3/iPaRZ/W2sDjPjMdCboST16LdwLyf8GQ1Q6QgMZeFWZyzo4af+NVw+kMty19+n1/0/Gf5z3/o2wkPWhaPhf6uEu9M3PqLDe2Vcv0Bx3iq+EE4+1Sm2WQe/FzP+i1oNuZM7e6aY+d72L2guEE4ekPpfrV1Srj/5IS4DcjO4zeO0AvthTkkzVZ5+F2Rr//at/n0O8+Rz1vChePOj+iB02nOyrQSmq0SepvxkuDhtwSWD5XVwzw7lO/+fM+3/dJP8PLinB8+f5nP/NhLNJeOk0+Dn5TsywL1sHi679AhqqTGHLLUCc115uLDgfEEdi9Flm8F1m8rYVDCTtk87wi9bbiSlatXPZsPZJ7/c+aYqYPxyOEnpb/tOPuVb/Otd97kyA/8F5/6VvrrFnFKaBPf/qr58R9/+DzbT5zZc35ix3KT4qLOG5jKsxuXGzOf+3uEf/K7/jQPxmP+0I9/C+miwW9s0z56Q7j1vW/R+cin//IrtE/d7Ayt38k8/Dbh6DXh+Fe9w7feeZMPLR7zB9/6FnZTw+NHx3AV8DtHuBbOPp3pbzvW7yRSJ1y/7Fg+0Hnj86M5Vf0tx4f+3s/wiQfPET9zxPKBcPRWxiW7lrf+FsfioePOx+N8HWD3PTXCdGTf22ulu0jz866Oo2Sdv9c5L3UNpP0Snk4CuRHcYIbv4kMNq4d7OaHUCf1tZ/N+Cat3hdOfSHvHuDiv1y95jt5JSNw/g3oc0WePe/41LZc/C178tnf5ptvv8D+9/SEuXj+FLPh7PeIymh3xvGVxP3DyWaW/I7zyfT/Bq+unBEn8sc9+A6/cOefTn3qRxbuB1TtKc608+J6Jxac6EJhOFHllS86OtPPc+oGGxdPigDiYVkJaClfftSMn4fhkxz/1df8DZ37Ln7r4Bv7UZ76OadsQHjUcvWEOohttjdz6x17ncljw7seeZ/WusLqfizFW3voeRRaJxac6plOleyz4Htbv7m2NSyUw9ELshId/60joItOm4eiTLbvnMy99033e/PHnWb1tc6jZKn6y++h/432+487r/LHXv4GriyW/8hs/xvm05M/8wEcJ147V20KzMTtaHdv73zcibyw4elM4eitx/aJn+7KSOqU9d5z8RObD/5tP8PPPfoLf+eO/kH7bwqOO5X1Hc2m2qNrgOidf+c2f5OXlOf/5D3wH7tqzetfhBti9qPidsHpH6S5s/sdO2Lzs2D2XaS4dy4f2WrMtTrOTffBXfs8eqgdsjqTZks0LAfWwfV74Db/2D/N7Pved3H/tNgSlvR9w0QKB5UPll/z6P88riycMueEiLvl9P/jzkOvA+g1Pc6WEbQl+/oH7vPtjz3HyGUd3nlEnPPjuCQmZo9MdV4/WFnA9Ddz7S/uA/u1fDLrM/Kbv+hNcpCVJHT988TIf+/irhEuPJFi+K7z8fZ/jx19/gebNjjsfUx5+qzlX9/6y4ofM7o5n+7zgEhy/lul+/Tv8nFtv8+b2jL/ymVdh55EoLB6a3ervJb7j2z9N6xKfePIcj3/iFief8oStMtwSfs73fZxfeOvTNJJ4MJ3wx975KO8+OTHH/7xl/bqtG4BmZ/NKSnBx9htfZxcb3vgLL9Nc2QP5uX/Xj/BnP/sR0nXAX3nywoAEBNZvCt258uBvSiDKc98f8KPy5KOOez+YkKy43/CA1z/1PEef9Qy3lW/77k+w9iP3+2N+9JMfoH0QWL0jdBd5dmDVl8Cj7HOXr3p++T/4Zzn2PY+mI/7IJ7+R6WlHuLJ7Es8iNAqTcPZDDesHaQYjEHjzeyM/58Nv8dblCdc/dIfVu8LyUebP/77f/AVbW/1UHKg3ebZNwAcw0bhnhqr+h8B/CHB06xXt75jnByCrCFfNvCgm9XQu4l0mF++8PjR1Fu3aArXIOHvbyGZEA1iEyGmz47n2kqyOdTdy3SWmE2HcelwU8+YbiAvBRSkGXIrBF3Ix6q4gIDPilRXZBXP2gSEHXMhzBBa2QupCOWchdebBrt+2jXw+7tKijOWjTFwaYpI7aC/MKGiAxic6H3lXBaIjbBzNpd07Nx6gZ23mrNuxWvdc9Z68E6alOWXmCJqzGJcloioRiiuRYGrtmPX+aYClnzgKA+swoo05q6kDDWZ46sjNoQ8tJNO9ZlwLfrTfNQAOcmdOUQ7299SJGRmxv6nDjhXEkA1viJyo3bOjduA07LgVNhwtB8YhkCfHYjlyt92QEY4WA1fLTFx70rYGGxbFW+RSjuNljqjAIV3kVtjwaDoyX9nVeaeoF9bNyCqM5FbJrSF8KoZ8aFByJyzDxJEf6JwhUU6Ux644CckWujpzkNQL2dt9yC1IL+bkYc8jruCk3bFoJy4XSlwKqRN0KoajVcIOUmORXI3CKgLlB2bHMbWGSorfo1wzgpmkRGDyrANVkaPWkRtBUi73rSCnjczPMmyV5kronpgzPG9q7B27uDSHzrn9M3CRvUNXzkOyBRIAq2Zk7QeakNBGkQmczxZ8umzPKO2RIoDWRRpJhJBpXIKQSZ3NqdyUOEgg7Ay5dT4zbRsYnSHAYY+uipbfRREH3mV6bXCSzUaFTAyZ3FrkSxYkWhBx1AzsYkNe2PpOjcwGn5AIi4l41JIbRbLMqLjddyG5ep/tfHXrmRQYHZIgL5RlmNAmo96RW9ABEoIEgexY+ZHgE76gdlkNDVWxzd2Q+2JHD3YBiTqv6+bK3h929l4nikdpQ2JwkF0BPkoWIDcHqLLCOoychh00GW2c2esJ/FZmVEI95DJfwtZsaHtl6zYHm4NUk1/mVs06qJ/PekYNJds5pMYCpSE3ZS4KqhbMVoQ+ByG4TCOJSTxOFBcy2Zd5jB1DHZbRCEpu7Dr1gPwSXMZ1iTx6mmuHury3acEW6KSepA4vmVAXYMkm5NY+QyeHRCEHLcc1JNAuGI7fyMSlEBfCsiz61pcAKQrh2tFc27XtXlRalzhpepbNhDZKWlA+32x8I4khNzgU7zKqQurDjNLlpqBtZW5qscsLbwir3Qu7R0s/4UMiNZ7cKtpk0sJsT2rt89zWkZc2H2MnZk8b25eWPiJlz0gtPNddceQH22OXkbj2TMeCH+QA0cXuldg+gYAn232uG1VZSxLBX3vSOuN2jrCz/6PYRXVCs4gs/MRxN3LZ2J4Xuy/e0ein4kD9ReBrReRnYX2Y/n5MxfYLDnUGe8blfsNQ0dkhAVj5kUWIiFO0UeIC3GDIkzlNtvmp3ztU2ZfUUfmcRhILiTiXCS7ju0SKjunIEbYl9ReqA1CMWL3R1SPNtrAlK5oBJ7aYhZk55kTxIZuD0UJu9shPbiCtFEmG1ORgkyiVDTGuwE8yp/hygO6JsrtnC6TzkaNmICfBbR3NhXnxbva4MRi8zdxut5wte7bLjjQ54iqYMyjVmOyhyhzMYGeBuFT8WsoGY9GNelj7kZUbcZIhZDQ4cuswK76H61NTEYTiHBwgUKkR0tLuCb5a1b3hS0vIO1tcdW6oNzSxprWyF8Sb47XwEys3spCJ4BPiFHHKqp14obtgUs9RM6JdJq4d6cKcPS+gcZ92Ui9lnhSnIYBvMnf8NZ+Tu7b3h0xuHeqU1NmxF96MUFypwduxGAZvc7QLkaMSGYgUgzQ5XLRF7/u9Uz2nvRr7DD8U55KyobRw1pgDdd7ZuaTGztlSognJfn6WFbmoTs3hhqheitNRfmf2b5DqyABISb26vXNZn6OLthnElQUd6kqKxJmz1lwbWlDXX91ULf0qTCfVQa/zQGY/vAZI5lDZ6y7Z2nIH+WFtlBCSpUazgFeD9HW/FmsKz7nMwk8278pGUDfEuFJW70JcCeILGhvLffWCQgkoynn5jPPQhkRWx6SBzkU7B5XZCZjntoPjMDC2YZ4vdY2rEyQobZvoG0VbndP4dR3Mjmy5d6k1B1OjK44maJM5bnvwavZkZfPLiaFXfTQaxBgDOQtXccH11FnaKjKnZmy+lOt0eXYcq+NmARdl7oKTbE6a2O4lJZ1brzuHPUqkQOcinZtsDXsld/Ys/GCfKcWJqehwc20Id3tp15T9PqVXbdd+XjOj4fYHs3cu6myj1SlphqzYp9Oj3Ye4glO/49iVFKdks+dtJi2VtJXZbnYhgj+gZZS0tjgl+BopmHOmXsiYrbG0kYEDQw7PzOkaYOSAOVXRFZqEoI05bKkTXNknzj7VM9xp2d71pOxwogTJZgtzcS7Ks8WZU3MWtizDBE0mLu3Zq7e5ss0t12lBnxvG5EmTQ64O0s7VdxOg0gNaYeEjMftyjuWeqMMVm4wDgqKdBTypc0ZL2ApTcc5za2sxNfvtwQ+WxtWgnISeY9+zDgM+ZKZlZjoSc+qdzqnj1Nr8ccnsX+MS29yScGgxSJb2tPuYjhNuqlkUmVNm2cPRuue06WEFrx0/xzB6c7a/yPirdqBUNYrIbwT+W0zG4Hep6o/+1X7ezbgZN+Nm3IybcTNuxk+X8VNBoFDVP4L1WvryhkDYFDRJQXd+hrCvU0fnIitv5DFxSg77qCUHZh6HOrEoqUQ9M0FaIRevMyGsZGLZTHifSV7JBc6veXpDruwz7Z9r2FxOtmL+JTLTYIgZCmP0NJIMym+V3Arj7QSuvJHyUcVrT63gJiUXRCEuBYkFjfEWFTVbpc8y//uYAxodfjICpiskeCps7qFpI8ehZ9WMNE0itZm03JNSK0IRthCGmmKxS8utRXxaIr2KIISZVOnn4+TGIvVKSq4RMkntdHJFAy0a8KPxJyrM7oY970VdOZ63aLcWFUA5ToG2RXWO7gH63NA5g+S1IABjtAM0kixKbNSiar+H32eSNrJHOw/QsKaNHLsdR34gNJGchNxmi7hK97Kln8ApeZHR4PeR3Gjzo3WRhZtmqD6rwOSo6V0o11yida2ATE1jZePPjGJoTyMJLwecjwA5YwhRGfO9yXZBNbURFxY5t1cFIs8z9lMexh75MRSrpvF0vk/54BnVtNwhslXPq/4tByncNVtf2QGuoK6dzut2/78yc7Hm63GCS4pEx5QtYp/K88Ur3luaQUQMGS1zNgdDCWt070UJrob3zBG5eCWulWYLvhdbq24f6b+X6D6jW05pfbJnUrAz5+oi2KetKjJ93PR2Ll0iLVyJwNWKWHw2tKciJIuSFioFJC7uP1ayoSq6KEhwU56BwMIbESSXlMchmpGycJ06YvRodOxSwy42FqmPQuhzWXs19WUk+egNyQBLx8a12DVVtLHcEJmZu2W9H6xRW2uFDgFMOdj7VUjFTooaUf1wLqkYR9NFLcUxe9ts94IvPipSgqUo48J+8dhc0PK8cqfkydD2HGDhJhZu4ipnHIrzGSmpujlLkaHz0VC0UK65ksvr+kwOGd2MWKgraT6viFdWbuSCpd1bV56n98+mImsqVwyF0aAzAicJwnmPKGzvLZmypak6Fw3hE0tnVfoDojjJrPxI4xPilbRU8lZmNHFSb+m7cnM1C6Hfp8gqTHyYJgf7XyeGWOaSean7LmV/fKZIpE6XQk6vqJa2+3sQ1eEGMVuh4CXPNtCHzDSvlYL8ZrX9UwoKKmaHj3xPnxs8GR8SsZy/JAgbGCbLcFS7NiP4AqfLnpOwo3EJWSWmLMRzzxcbX1UlcnUG3brJLshtPf7a0hyb2HGdOhpJBw+jTAr2aYHDdF+F8dD9Iku6XzhejAvhvd2lauwPN6/585TPe/CH+XxJiu+V9rEvDlRgyGEumDHo3QxOdQCaS4McLz/oGG7JvmKtXItLsHpXaS/MuOZgjpJESOrM6InB3/OTOkStBZqm8D6cGWarrrM0YXXEJJWKuXR4ccUhqlU0B9cRs2dST8xuv5idGXQ3WfWZVfHsq1/2FZIlrbNJ+J2Y4zS6wslidihmB+/wHkeZXzfCNHOlUD2n67QwB1tAB8cwBR5NR1ylBWPy8z06XM/zcWbe07OvAWQcx77n7GhHaCN02RymVulTM28eYPM49LbQq2M75kBCmNTjJdsGWh2gamhlnzJzyZ6zG4346lJ1Gu37k2nNdmyQKHMqpaZLpVSgzZuK7j/TNiCbT6mTz5/TwjP34Jnxnr/PVZ/l3uEolazPGklJJe1zcJ/rsep1fd66Oqj8k4OfQ29p6il5NrFjSkaOJUmpEtovgDn9iKUk8sFrYwpIvda8nw/amvPixuIIJIE2F/6eOcySS3WQQkqOlGT+7EaSpU5CKsHFs/OiXuc6DLaxdcab26eOMScwg0zFwWz36e/DZyrJzl12tvhlGe08Z26dzJuDO1jbOTti9izaCVc5UOzXVnVm58BFIUZv/MiSlmuvMt1T3RcfAFO2oLHxNr+1BrjVzpR0l6j9Xy7/6EoKL60sZekmsxGzHT7439QahaFyxipV4TDwme/Pwbqag8Vc5lCyirtJPctmMvsp4HdC2BSOYLn+Pjdsc8tQLkSzBbdSuTZ5P7eqs/Neh06z4LeO5roSrnU+95oqrjY1SIZyT7Tsb2MOzIH8/Pz3e0YYFJkifjOCQj/ZuS79RGiSzevG5qCbgCyzzQQ7h9zozGNsJDPlQEZI6ow2IzY/uielYv3APs9DzBYHl40usVTSMjOpK/uPVWdalazg+sLLKtcmhQ5gtuygIlL298KNwlTOfcq+7N/M+9l8f6p9Oqik82VCdS7StdHoGI39rb1Qjj/tWb0j+/3qMIBT48Q5FAl53ve+2PgpIVA/2ZEDjGdKc1UcVa+GCjglqmOXWjo3ESSjdTGXUtlqXATbJFJXy0x1fs0lky5wonRumh2LGgHVz5kXfNL9ppNKJVKt9MjgpjxXCaEQxsjqXSO992Pgybhm7Bv8zkiR4dKRQ3EWJqU7t5Lay58z0jxoWN0HN7jZcw690p0nhlPPeAzbFwxZ63pD0lI24mclRrrDUmGlIDTmcE7Jk7M97biyaj4rRd1zFUQPnJiy+fu+3LtYqsGykIsjYMdn3ixrdFwdp/dO4ByEZgtTsOq27sJ4BuOZPSs32YR0qSBiO7sUlwqJf5LC07DnYjwJZUDmiGlKnn4yArnbeabJczUtyGGgj0YIrhwHK33V+fNQUN0TaOuxUnI8Tkc0EosjqrZJlevfTC271CDR4UYpSGYhPKttBrvYzIRVwIyk7p2fWoJby6BRJfSlAnNiX5VWnMvHw5qr6yX+2hF2pRS4VBsRDe1xk0WcFTmq19iW57x5wbF+N8+ba+WqVf5hXT9z+f17qvBC70hKOc5+zUgSBJ2jbLsmq3qEfTBTHcPm2uFifmbTMcdMZ+fKRbsGDzRb4/FsYsvQN1a63ygxelISNDuYHGFjaIPvhT42bFLH0pn8wya2ZkPKvPUjhjB7ZXe3SAGIwuj2jl5xtNRB2Ni9zpMzxFuFq7Rg41srrZ4M3XG5zjUtgQrsUkNTiYZl86+2Jk+elCxoPAyGcgtyXQjeNQhTO+/1657Nq8BJnBGhqG5GCUSN7OonO+cxepuvZXe4njquxxY/2MYxnBiPba5ockraBppqJwqCvHiSGU49LlqF3/XUMeSGKTlSclaGX9Za/V6fvy98yYWbcN4Ql9wBOLonVl03rZzZ7BJYTEth87KUgpz9XLXgWWDmdRbZg9mDKs8g2fzNvpzXBFdpwZS83aexVPddWtVfXMA2t9yPp1zEFedxxTRaxXPY2X7hi/2csp+DmZkjlA/QR/bX74tcAyjSezQo96cT3t6d0rrENrYmFVAI45KwYNVrsRlljQ17+ZjQK/moA+cIvbKJgevUGUJTHIxqa8JOcYMjI2yTzVdN+0yGijm329yS1dDK7dSgvV13e67PBntx72jkwp86DT2yiqg3jyzrs5wjsADBD0IoFZ9udMSVwYpuAhkMcXaT4jDUL2yF7I2f1eTEkD0x7udaBQPqmpszHMn2vIu0ZMjGU3QzlLu3OSevpRmlN7+hOG0IV4M50efTktyXqve/Vhyov5qhAcbnJ8KmtYl3Eolnijyyhzxkz7Hr6UI0SHTWgSolumUDCIMyHhshFxHaK3tAospYnIhWIknFNDCygyj4vsL0BoWHQeeNqG4m80aWFSnRvJ38XqvE90qKnkf9Gt0GmmsxnaMIy8dC2KWyOQv9ESxPevqNN2g7GuFzZvifecYjW5jXP8tmbLMxsqHzqZy3ORuzw5j20CUlutnFhlQ2jJoSqCkeP5l8gUSQZh/xu+jxg86pPd/rvMkldWakJ4ebbLJV2NPn6hDs7xtiulKL88x44piOA363T+lUZ8WOa8cIg84RXl0Is3MzZkKFtGFOoexSwzA16OTww8EOBAzJ43pnz7m36/Kjzot0Rh2dkb9RCLtMTo770ymdmxhiIE6mJ+W3rmzOppPiekGSMN2ODJcNq3eMRKneEJOkbo7oHDojBbPW12SG0Y8Kzd6YVdRoOPb2e3HI4uBpJ2ZHAWGev5UQLHnvmOQgxIWz9bEWds8rR2+V+V0cGq1E23kO6BwgmOHOVJZ5aJyRU/s8E8dhvzlWw+RHnQ2bltRdlVxwk+J7T+gP1hJ2P95POiR1pYgAcxLS6AkTkMScliTkyeO2VpLd7DLjIGxiy3VscUGJk2eIweZulNkh0CxIm9k9Z05XEDPqvheaay0G2TGeAOKQpGgybyYmz9O44tj3hiRMAQabg24om2oJEjaxYx2GuYggFNTdj+awxWjHTG0h4QbTh5IsdE+xtVpTj5gkx3jLMbaWOmZ0jMk25qqno3eE1X3wY2YaA4/7NZtdR9p5nvZLrnYLmkubN9sXbT0vn+wdNlTm9J5kiEtn0ifXlgp2EZ70K+5PJ0ZOHz2hF5praK4qWqzzebtJZ8QqJ3NSJRv6024yflBE87zm/aRcfyCwezkik7B605MbYfmkbt5l/tQUkSgp7zfHGXkdlPHYzUHak2nN1dDit0bC3yPAih/MwbpKC4YceDKuZs1AK4AojkslqyeZNdn8pIaKJmfngb1/85Jw+tm9dIrbOdJCeGt3xoPtMY1P7KYG6U3zqNpDS8ky7z0VPPCD4sdMah0XX39cjm3vu5oWtk6is0KjK6OJhEFLFkPYpZYhBXSwKu6wMRs0p8jV82Rcsxla/LUFn2kp+F2RCwr13tpzlcZQr+e7S1yw1GiebK+Ik7dKwlyh1vKostkt30NeGLle4kEQP2Tb8wWrts3CmAN9bgxFGwIymsNp0ht7mYhZrifZs386rTifltxut2x2rQWbowV62xeFuPI0G5P9qLIX9dlNyZNVuJwW8/N5RtfufcZX1YFCigDcuiF7aJYTt0823L+6y+N+zTa0eMm0LqKjaR650aIw034yXZTuPIFYWix1lqo4fsPuZMqW89/kjqzOhPiylPy06Z+A5VLDNs/ok4vZ+DwVhs6K1L9VHajTjs1LyslnIG0C5/0SmYTmqojPHSlhK8YNyqYlM9zONNnQFRcNtWgvld1zwnAmbFZmBFbvKMt7W7zPxNfPyCpWYTE5wqZ48UXnpULPKjAlx6Se892COARk8Pid0GyYtV4kGfdrcTEV56FGCiaI5PuMNELYJlyBhisC5nrLTUuG4VZx5rZm5E37Js+VkOMZ8IYJku3ueEv5DBYZ+J3O3Dc3KXHpGdfC+r5dTC5yB1XewF/pzANxscDeWFnyNHncIqIuQKlGqRGQ78u1b4sDVTYEN9WEPmiQufrH7xI5C/enE1ZuJCZHumoIlx4XzXG97juehpUhZMDy9o7drpQsizIdMUf7fW7YTQ0nXU931jPkJXES/CSsHpUUUVKkSENIFPTK7t/unm1YflSLmqPDJUMWmq3xVnyv4A0V8WPGDXnmzEx3Gvrbhob2d4TU5XINeY8sKbiUC7Jo832e80mRKRVxQvCNQ8XjhxLh5obUCu115fCYwRZVE2MtAEGtdjQHytLKYZtmB66uJymOXD0HiZnULagcwz416GhOSm7MUGssTv0ghN7S6i7CbmrK/AhkNafX7Rx+Z0Y29LbhuUVkup3IV55QUr57YVRl9xzkTunvQvfUUofSKGP0nE8rToNVbKXB2+f3ZmTNUbfNpk/BKlijCfaFraW+Q2+iwWnyNFWYtrXUxXAnEZfGjTx5TfBTnjljmxdlTkO4aCh2nxpkFZFHnulESRFzoPrM1AcebtaMlx3SO55er+g3Lcc7GG4rw72E25msQNvnPeJehFfdpFx8xDOeKIvHAhemRXTVd7y1OyNGb9IPI2W+mj2oTnJ10p+OS0OyJ4f0RtdIHVx+0NM9VbpLsyFGXVC2L2b8yUjqA+o8/b3M6Wfts81uZHLj9vM1uj00UgI03yeGk8Z0sQbl0bCmHxurADtSrl+x1FJ7Yf/2ZFxboIghda7Jc/owdWZHYmcIUUUyw2CfLUnQwRHLPIrrTDyG9VuOxS7PyH8OyuW0YDO2LJuJ3RQsGMvlvo+Fd1r4wH6iIGB1jSsXP8uxfd7m2MmnYZo85+OSrEIcAt25m2U9LJgRricjcNpa8LSXBdUc9hmfIQe2sWEcA+kkMagw3Ib2Umg/Zfc29MVeOEiNZ+lNwDP1wdKTo+NyXDBdt7iNn5+HlP9BoNlkcuvheCKHbkZmDUHOVgV4nJCnATfA5bQgSCKqMxHQUSz9ulPCLuMnZVq64ojrjAxexQXn48pSlNsWV4Kc8VRJtya20bF8M3DnxzK+zzMCqt5s+1VcsJlaW9uDzLqJX2h8VR0oSZCjs7JFZ0Fwyg4SnG+XbELL/enUoDdXjY498NQK/ZmpFB+9bcY6riEWYtnqgUWMWSl8qkXZzEIhfGKGvLcFG5cYUpQVkuJGQ42IdsMkZ2RKkMoNdEL2S9KdCT7TIDvPZmhxQ4leE+STSE+ge2qb2HQM2ijjtiUMQrNNtFeB9irT35UZvajQP5j+04A5MFlkzgvD3rjVCEW9sCuw43azQHemQdWdC8sHyuLcNsPdXTPMYZNprkr44g31ikuTMPB9xg8JN1rkUjklUsrwUwv93YzfCW0rhN4RdtE2V4BQImoHaalsXrSS3vZSCbsi3LfL88Y/HZsjsy5Rc9WBSktIA7hRC0k7z4jEpJ7L2KEKJyc7zjcNleI3qWNKDpmYVd/9YItEVJ/ZvDMOKciP347kfslVXPAkr3EuI4NDg8kTLB4Ku13LucuEvqQXQkKPI+OJaflMR3YO29zyZFxzsVtw1A48d3rN29Ez5RYVE1CNa4vuzak1pHBaWpnvcKsgI70y1LRDKrD5aMiQHxLqiq5X2TAklZJpmjllmpZ23/2Y8Ls0I04V8ZGU5/k+c5BiRqZoQb13uKnBta44Qbbm+juO9irjRjUdo4LMurJu4tIRNomMww/JnCQBv4slqq9FHLoXqo3Z1lvMbJ8/tmgwBktrlBR+DhgalGTmhriEHSOZGWsks0sNmoUp+lluwtKstgnk6KBL5CQMfWMyFEeZ3HhL1Zxm8sLeG3Zh/p+UHU/HJS8tbAOZz6NSCyrCqtCnxqgDk8w8GBeVsEnIFMh9KBy4sr6TEUHSKjMlZ8FEQQQRDJXJ9h6L2IXLYYHzRaV6nWHjCgKTYfDshhbZ2j0Y+gbdBXyvjKcKXSYrxJXHv1vsXQyGPjRC2CQ2ryS0y0DA90J7Dbuh4VG/NgmHzIzo5wbCeQmmDtLFj3ZHPDleo7tAe27O5vYVK/ToHwaOP+dYPbTjp9aR7ky0PpNKR4V0mohLP6ODbno2DWzSG8zIsu8TbkjElaWBXYKnw4p+17K6EoYXI7vjhIwOxBN28LA/ok+B4LJtnKLEIp2SG7NDzmP8ylxTgxVhEiSWghFROI7oZDQON+oeLPP22UP0eGdopmRL39UgM2ZfghSzh2ETbN4WpOX61YzeHolZGB9agcDFYAiUJpM/GM9sL/SjITmb4kBthxbfm5M6rS1jM6TAlD1j9mxjS06Oo3sbdusOzbBbNxx/zs7JDXbftRScDDnwuc0d3EUgLzMyCk/7Jf48mD5dW+ZvPiCMeyGudebkVRDATbb3Dimwurchv3NKGM2Z7VxkTAVJHkvw3hdxVYV4W2i2uSBcGT95rqaOq7Fj4ad5/kuEdJxYnA5MYyAeFaR/ynNWJAfb8x72R1wN3QFQ8DcQAiWJUnlnv2t2RoarHB4V3hlPjQDaZkt3lFTM5jnH7vm90F5N7bhkEeO0Mgg8Z8ekjm1u6XODE6x6pxB6DW2yhyGxbCR1Q0kltZEUSQliQqI9aQ1WLeC7BNIY12AM5qUOe7JlXiXSsjFHojNBMapGx6Q0lzrD5m6CJtUIOTOO5uzNAoTZQZuJa6uI0csCVRanAIWpcLHSlbUB0EXG7zztdaa5SuQgXH/AE9eZ9jKwAtrLCQosO61hOPUcXSdzGDFNj0qWNb0XIa4hHZlwnygMZ8LiiRhKJ4ID3GToVlplBi/wyNFdFFg27dVsJSvTcSZ3wnjsWD5Mdq/aTGq9tWKYJ82efBuz43Jcggqny56r46UZPHUEmHPwNX0k0RZn5QnNOkNkRM0BkSkhW4vmrmNHznZ96STh1xF9fUnsAxvpaAZrNbPdtbi2nHOnpNY+99FwxP3+mN2uJZ04TrqeB+GI1GXiCaTOM51k+jNh+XjPy6rVfnFtBtBdYVGtlgrJplS4SXGenbVBqurMlnZz87yKa4Pwm0tzjN0Q91WTlI25Ok6HDtQUbb6rpbLIZdMqQYQo7O7B+h2hvTIjNot5FicudeZA4cr6irmkdMwJy3g7Xllzdj5pDlYuP2wpsZzMaZEsM6di5uPVn8tQbzo9wSU2yegBMVsqMDdF8ylh93PwSJdM6C+WKsnJjImbFNebMGXlu5AEBuMtPe7XPF2uuNUUYoQoLpV2KxQ0DhhSoHVx1puZ00yT2R4tgpgugSbTg3OjEFfmQD/DU8vm8LD1llYYjWR/2Xfl3pkTNnMSk6Fc4xDMge6FfvCWAkmgqzTfu+qouZShcCTHE6G7EDidjFt15BnOHOv7wjQErsaOOJkD0BQkenvsZ+et2gNUebRdcn84OUgvwer5DcFnLvWI4bIpgVi2jEC3L8etjudw4lg+ZA6CXKppP7M92rq5GMSNCRczlYooCR5vVuRNIGxA2kL8z6DONtEn/Yrd1BB8oh8b4rh3vNNy7wzGXDcc9sUnScBZix2AsJiYxg4cs7MnmAN+2S8YBjuxlNw8b+s+sIsNuIKklJS4PSOzrdwdCKE4m4uOPHqu+660+hKmEyWeRuKylh6bk9T4REyltZgXhttKewV9CmxjS1LH1WCt09bdiKqlykdvau4zX1jVWtwkI7y/c3VCuJb5WjZDS9gKYceMlla/EldaNHksRVpSensepCHut9Y7HnQnhJ0wZuPxRXVz9mae3wmmI8fuOaG9ts9dX1mAdj11jMkbmrhzM08XsJY70TQfq6aY1KyTmCN8PXZMsSBePXuA4AuMr2oVHmAk5WDQtTir09C2LAosr3s1dZZfDTpzbdLSlEvrTUwLM3B+Z2m+3EB/225KU+5YIwnvKqJ0YMz0QMm7Rnr1PqkiOe8NfLYvSdnSEj7NG5GqOQ2SC59nqPXpe/7RrBJWUib+QMROiqidHwqvqfA8JEPrTDQwLCLTidLfMWMSV85SUF7IrcOHRJ9MPVai0J32HHCZzSjeTqQ7E5sXhf62R4ORSHOw/kS7e0JauLm0vULac7my2qaMK+J/nTleViVT0jClak4FtMsl111uhzO+V1oYr0FFyEtTie7vmEhnWpiB02BtQSzVyAyngyFQm9jaxgcslqMRVOu1ih4IpMKsGFlfTzVVpLPzbBC6LdhaxaeN0p313DrdGM8uCnG0XlLtpVqq1KlVvSwTurBihavYsZlaS+8csEtdl2BhlV7pNDIdy9znb14WQciLTLKgkuBtR5vWdr9zU4yuM6M9Hek8D+fjxHJOxQk3tef9PLZ5rvazlu+5oEsxzXO9zndJGZmeTf8Nz0eGEzenl0zQVpiOvc1N2RssSRk3xll9fU6H15RdWXeS1JBeVfrnIuOpmgJ4rWKEvaWqRFWd9wlSB8fNgBedCbNOrPqtv6Psnttz7IhiyE21OcGclsoR654KzYWnuXS0l2qOdjKH5GK34N3+hD43MwLld8wio1ZCbueX1c22wReu5cHp76u5JiFshPa8pJ+vypwXmRHI+TyrCjMWvDlf2bpK5eyAzec8lp6Qo12z2QeQLkFRvLYUUzaUsNzL4czWnQ/7+5OL4GuePLuxIW8NmWuuDQ0wKQU3n8rcB24KPB2X+FHm0vFVZ3CyW0amI2U8MQQ8NTI7IngLANzWH/TjK3M31QB3X1H7TCV1zHtZFof1kxyMeK25ONBD5WTBlB1jtGzCZtcWpK44UGUtqsO6Y7Q6i3vmtnoHVvV4WB5+WEQBwOAZoycnR983jENj9z6CesVPcD215owVSYoqw1JVz9tFtKyEFlQnCjE5c8oyxJOEP57Iq0z/vAmBiui+YtmZePF4N5ndV8/VaMUFm6ElT47d2BCjM67+YM/TJGH24rmSjfA9RF9U3Y2bFKPfS+1EW68aSvVlIwyntn/XqsmZ/783LVaJ56yidxXGfRakdM9IXXVelc0Lnv5uZjyydlI1CNRyj3axMSSuKqnX5yR2z2d7UOxRPYft1JTK30LGr3zPLzC++g7UzbgZN+Nm3IybcTNuxk/z8dUlkQNUQaxFZtkmuiaa7LsW7xFhM7ZoNkGwyivwO6V7YjL/kovAYeUejIKKMtxWOhWCS3Pn6zF6xtHPMCAwe8H7qLZExvoFvM1s6EtuHG1rHrwK+Cpg5ou8/OAshdeCBIPXjYRa0oBzCwxDz3LphcbGrlUcRSDy4Hb5TFwYZ6i/40AOon8B73dcjYs55eB9Jq5KxFgIl24UslPGMyU+qCGwztc/HcFw4lm8a5jz1dRx3ZoGjyufmxtFJhMEVEdpQbFvdaCHffFCxhUSNlgkFZfCtDauz6o2NM4wHhu8q0H36N38XJgr/LKK8Yv6BXnybMYW7/KsFeKckrObRTprSfohCmUoooXahxGim4Q+NazCaJ24G5MyOETgau5eClIYQmJcZfwikjYNXjJjCoXTJwY9Z8W5UmbcQG5a65kV9ujJLJOBGFK3Kj+WY6d1Iq4DcWUaM6pY+bQr8PfBnB1OXWkntO/V9QxKlTM0Hqb6+37+Gxp1sAbyYY5MUTGEUpaR6SjMejKSlPHIMZ5YBLp8nObqzIruWksfT+gjVL5WypaGqQiY1lQkxNNEBVHnaLFGrGDpeCrKYAhG6yNTLsTzKHiXSctskXBj76lhr/Npfra5y+TBev7lILQXhjqpO0Cpgdx7hmXg7WsrRZexIEeX9p7htlXLVc2gXWwMXe6F9roiRRiKe9BrrTYSr+XqhkbvjysZJGTcMqLbxSwd4QSWi4nd4mAdrwzZdJOQklEazE7a30QNWcq9K9xGZmRcC58qrg1N0oPy77kfWhKGKeC2HnVq5PjCj8mNHTuLM75eVqYxFNtkz29aQ+pbnFOcN7QldftUT94E9NTSbC5Ce+7mKqgZMaZcvO6rRy1PVtNtebbtM82jIGBMJnbpBjdrEWrhe9YUnZRqbdNS2s85Aes9GIqsRRGrBAwRHx2syvN1WN/QWNJZTaYNiZ0oOXny5GhLe6fU2h531XfWmqcre0oRFI4L44+KWBGFuIK8xfdgH20u+lVCXhWeF+z73AUYT2z9qjN+4XZo53XgQiZlR4oecRlf0l8ugdEoFLwhhVexY5qCya9ke20aA4uRkpqu6LTRPzRAbIS8jjQh2X4pukfq1FLftRAEZxmYWcqnoMz2eYY+7p4zjb7UGr2kFr6IKCk7rocON8heyylkmiaSoiN2SiwZF/sqa3hoSMkkShZDaXQf/wZCoNSBW0drKNgljhYDtxY78MowNAx9w5CCwaW93+sOZeguldV9ZfHU8uD5oA+bJEuBpIWR0quAWC7Ceyl64yNohcdtgtab/kyqJ5ffDxSf64KNS2HVjTOsbELlByX4XvFHE9NRqcxSkN5bfyCxRVU3HT/aZjmcKVNxItpuMujcmU5HVksdSeFZxLVJBQwn9rV5wfoPXY4dtVHwODTzU60QcHPh0F0gtwYXVwgcLXCrmPGtKYMxBy7jgqEQS60k2CqKcKamHteZ8diRO496R2qcNW5tBNdkwrVV6dQKMbvPMK2llLFaZSKCTebE3Exz7rBdysLJllZ8MqxMmVqUfgrsdkZ+dIfMUt3D3xTy4rxBV67boaOAGdjN1LIKkxmULPSblvPLlW0yxWHS6vhMjqZJaKuW0x8sZbSLjQm5ZpPPCC4TgvVhlLJhaJa9QKijaGOZQ2T8G3suqaQFJBshvDakBUv7uGGf3qE45sOZm6twwOD3tHCod8VxpKSm07NBwzPyAjqvB1Gg3j/K2tmFZxzcHITtC6bfs33eKjklWkFC/fy4PEg3llSipVOzpWQOgpdw6U1kUE0OYq9BVi9e5xS46TeVSi41w349dsY5rD25FJu3Wi6gSzRNYrUaEJdZnAzorWlOJTVbaC6V7ona5h1lz58U2E2By2mBG0wjpr3ONNtMXBRtmWTO/pjN5jTXxSkqFagy2RoCc9x9L/O9NL2oYs9aIZdGpqGLHK970jobF9JD20SOFgOpU6RL5EYZblu/TpkAp8SFMp3kfapETXXfjUJzXSRd3Hvsn5YejxnrvxcPArpknJc6v3zpReimktpqHNM6zAFejsJQCP4W9Cr9tiUlR7eYrAKxHLrZZvy1Jw4BJks3NleweFIqpafimB/OWd2ngtTLbKdT+571UptmV625DHFZ7FFyRUNPaBpz3iRatWtzvV8Hz/Swqz9mSy9Z9bMr+nFmc1NbqBYLxS8tkjQVfQuCrIckNJuSLkzOaCtia7ZqfGmxj7mkH+cAu1DN2jYii2RCmUORRigk/5kTKjoXS+houl5T9rOD0vhE00VSsuO4eX6akzynnZ111Hg6rKwSuogwa4BUFL5NHwsqibzO69ArskzEMexTeGCcuaw0Ps1dJci2/0V1jCXSUK+FbqFz5ZzfFScomRPkkl3XFD1DLP5DNt0+vzSaRdtFtM2MJ0Jc+hKQ2AWOW0uvpsnj+yKm+yXGV1cHysHJ8Y6LLPiQub3cctbuEKekPpCwqojWJyhaKVatY2WjYWeGzKJYiiJ0QRuC5V+nybNJLevU0LlonIgshRdiVtA0Z6RUAekB7+NgY535T+VvYpOn9YmtM+M3jYE2G5FXnaCryO3TDU9Wi1LFA0hpONxYo1wTRcuAt9LKtUVVzbWwaA0amDDSYh8DeTABNzdZ1YY1n4VaJrrbtUyTNx5HFMboWF6aQXKTklo3R5puEppNnmUbXIkggDKpjYsVs+NiWtJPwYxPgvapMNyF5ECXmdQKw1lguGhorhNx7UkLZTh1tF2cq5NMJqKUn/aZaVmUcZOVjao3NMuNQopmTJrr2tJBS6WEKUvvYmM5cWcbrPOZaQhWweJs4/p8NefKlyiEZi+o28vzi5o+yaPrNcFldtvONrmxYToW1oMt1GkMBFccs0J2lUnI22CCeCo83KyNczE605PKzhSeHaU025zEsNtrKVlTYtPykQM+3S7ac/fFEfWTzuRViWLk5eLISNLCi2PWV5HyTFNXnCdhz2tK5YNynuf5zPeLEUq7HFRJjbNKx4LahgvTdLK5BcOpEVO1UUJjqKer1XdgVr4ivvVcOVhvlXtVjoeCjGbIK/+hapBRG/gW/Z/DMvBtbJmy5/FmBQK7scEVErXfSXFgBN9mgs8MYyCOAb+caJYTadlZWfSos0GVwo/LOHSRGQczl0EStaG2S4oMdn3DbWFxYRFwNrVCqyat5yolos+mDRXLMWpzaXOmSnHHUlBnKGbXRVbdyOasZ1ofURHZMdr6dCGTFjq3Har6aHMrIzND+MkIz1LuWRisMrU6mpJtU0+d6RvNG2F17gZnmjx5L4zrYt43sK2tr8q60l3g6XZZBH0h9EIcLdiqzYtdKb7xQ0G4owUONQhqL1Op7rP5kluPNnZuORRUu6i8uzHjryCtrGWMGwunNB3YBLHNuHtq57kbWsahQYtzWYnSvreCkSpqmdVUtU0wtNim6IzT23uTHgiZ2CVrlLs0bhDJgqaLqyVxY0TxirD5UW1uRuMpNV0kY0FmbZTd7NS4r1Mw3alY0LPJMfR7squrRRHZdKzc4IjZHJCYXQnCFX9p3Ljt1NAPVrHadpE4BVbrfm7YXUAk/GRVgZIU1YxLnifbpYnUUos0dHbsKilexj35uwqthjYy7RqrJo+Km1zZHzK7qWGKfiZ9b6NpQ47JU5tot5cyB/7Lh8Luns5q9io2J/sYGKZgkjYlGNGFIVOny57LvmO7SIxnDcOpo9lUm6KwCySn6OiLNEne89i+wPjqpvAc3F5v2Ww7xCmrMHK3u7bXSqTzeLPiZDEgi2SnJ5RNVEppu+zL7RfMC18ytOfCdjIG/nEYyOoQUUKTDHLV0nIC9umnqsRcjfmsUZNgimbcVU3BuigSVwJg7K3U3E9Wkt4dD+XD95esy8xiObJrVsSuVk2VcvbbEbeIpLEjtcJZN3LVdxbpF/SsdrbOpezZOsvL7FCm85YUlOXOkJvozSGwah0TYEsLEC2oxWHapiBQuTkwtMnQj/vb49n4uMmUmYc7IE0uaST73GllJbvTypGOMnFtxjEXZ6MqazdbpblKxIWVxWtTyoVxxIU5hTEVQzjVc7HF5aL1ddpNjVWUOKUNkRgNCqy9+xTmxVbTQxV1wonB78GRloHcOPwu4WOivYKLqwVvJYfzCXflmJ6zjVVdZ87ydamg6Qzu3m07mgvHdMqshH11vSRFh984rrYdZ8sd/dYUsV2bLDqb3Ny+JXszlrUCz8rUrfx/jIWou7HNp7mMe4JuUTCeUZ2YIbhCetw7LLXfmf1ejM9MFFdIaT/XC5Fcc7bfq+JzcFRVftukyvMphmX3vJCOi0TBtpQHVymQmK3KLxa0tyKBOHutpg7r8dVgeRktCs4qhhrkvbNUkRSFuY+fS6YUfb5bsr22VMg0WfpZS++4GYURpQ2JYQzoNuDWAykGEwQsaZNpZeuuqlDbgxcTClQxpLJsFipilWGyL99Ohchq+mOUQKAioNj1TSCtfUY8MscjrpW4KWthRZH8MGkTL0oIlopRjEQ+joHmSuj7AE3GD6bZ5QdM5LO3uTmdmKZNdxEZdw2zMkoJMCwdXAjv/b7lFaWcv2p++R4Tdx2t6tCPCZnyLFgbNpG4DjNC5K88V6slqxHaa3Nq+pctAKooR6VAVMkGVyok4zqQ2lI8obHMR9MSGs/sBKelo7+1r45zKdA9sJJ19c5S4kOgnfYyG1UjafeCsnhgEg958JCEMQl49tcspTDDK7vk98UGpRKa7OdqQYDjZc84+j3xWiBcO6bQkLuMFEV9mnyA0jBv0uvlwHm3nh0osJZY/a1ArkhgtvnhBzGHrEhgqDOkKy882ijSm/TCkIIRvAdw0ZVOEJYynIYAWUjRkbeB2Nn8XrQTm2PrkLF8UgLQWKriByUmXwRmi/PU7DMAdQ3NjlMy57jZWSAQh7BHlUezKxIzm6Fl0U4Ub8CEYgtCJmq6WGHLnHJbPMlsXxDGUxO3jmtvYqxDy7BrEK+EvN9LusVE5yNtsAKgtDAwQsUCeYmlAjdYkUGz0z3F4YuMr7KQpnKr2/K63jZoP3uu4sK811LV0o8NwWe69Yh6E9zKjduXHWYgZsIWxlu2YUiyPjdugk0ySYRdQaBW7UTOjm1rKZS4MNmBmYXvZI9CpX0Zbf1dU8bwbDOA26Et1QYCk7Vt8YOyu+1YdpN5vx4rk06OsJ5YNJGtNyPpR0tzDbeM1yDOoGU/WkVIFWwbSkVYsxqZtDUOSnbkTsilX1foobnwVsZ9jk1Kbzoo6gsnoRGm44yuIjxpLcVTELU5+m2tNU69v41PPLg+IidHE4sOV2fCjBIyujXI2vdFzX2yppB4K7EekyMvTVRUDiaxFsdPnaBLg50TTWkHYBtLbmsUk/eOqJpS73XfMQwNmhzLdsI75Wn0LP0092canOl8zWlVZwskBwfBkVvPdOxJnaPLSpMS7YWiO88uLmhW41y+DAUZ2FmVogqMp8JyPRYI2zbatDAORdwV7ZbB5CWm5FmsRnZXHXlrOjw17QUWAJgBsvRGXmSaK4vKUioWvDgPfrDUroyGyFW1ZymSG5Lc3FZlPN0jtKmt6Svdoz6HqFOtwFNFY7Q1UFM6mdJWJNsels3RS605o6kVhjtW/ZO2waI9b0aJ4khVTkpuHbVa0xpyH0QZ1aGDoq4tOJdn41mjzBl9KoZNkuL6ieyXOJTt0JB35kwAc1PV6dh4PZIgbQPbrmF4ukBGod+1iEAY7SBxKQx3oD0vnQLEPkNGh+byzNQ94xzFlSMulcUjmdGhmvKZNaKm4swXPlLl1+QOprNM9g7BeEJxKXMFmO+FO93IMpSm6PU5FFX29RZkZ5yk5gp8H5HcIVFMjHE0nqXxsSyFkzsllrXvpuJsVEkNbNNbHA3mXBxFpm0zoyZMDr8raMYuF8faWlDlzs+BqWQL5FJviEezyezuecJ64gN3znm6Xc5Qae15l1tlfdRby55uwXRUWnYpNieTklvHtoj0TmthOt6fd3vh5vtqPUUVHfyM4NJk0pGSHBx/3AKi3JtgraXLPKwicWUizfGo2pB5+5rRrMPKVMTsRMqO2DcsBxOv9WOmvRA0eFI2VIhJSSelsnEhxnlUUxM/XibyKqESZsfdDZn2Sg119zpXdUssKM8ohVPnWZwO9KtkEiuTZUucKCkJLls6eTy1NdjvWkP7MmgyiYxFO3FrtaOPgavTkd3zKw4UO1B0z3kdCzdpnY3721hAP62r7ajSIXa/hlNh0USmZRHSFMrebWn8cWy4dbZjU54flMDB7QO3as+1VLvG0wzOkVplPPKEIdOPxSH2pSo8Gw/NlWr8VDJSUsR1K6dKVPGjkKP1b20vLZ1bbfUXGl+SAyUir4jInxKRj4vIj4rIP1P+fltE/riIfKp8v/WlPgtn/ZTabjIyHYboUAiPFHG7YQqWiy0TPy0c/Zlj87xnWtkpN9dKWmTikSmAi5ZIppB/Oxfp3MS6HVl1I7pMpSS8lBoLe26IKXruOTJV+0l1dp5IyaQGYOYASK7S/sp0JHRNNKMfxRyodSY0tQ5Z5nY0cWGv59GTBm/6GVvl6dXKVH7Vym/H6Fkupv3tGw/7ARmqs3hkDYuXDy1ll7OJjZqSronyuVFg8PtIfC5lh+lE96RmmHsdjdEbH6sYje2LSj4trPBSFt1cK+1FLGKOQDKRtjgFppNnydJxKRZJBXOIfZcIbUFAco32rNQ1Ls0Ia+FnVGdoHAJp8LZ4y2U4UZZ+Yh0GjrrRmpu2hcfhLaWk3pCn3JnjlFpTtNdgKZJml01Bd3C8fOcCicL69o4Xbl0VMb1y7zNMa+VstSv9FUE7mxRjsv+vJeJ5NNG8ftPSrUcrHy8rzpxEixzjkQlSaqPIKs48supfiBrqlVtnUVJBdMJGTBQ1lmjWCdOxsH05Md1OxLsT07GW5rMyG6VnRj3IYQBR0daq0VSdFS2b3e3J1lAoa6hV01ZpjJc4rR1pFRhPW0OvVE2heeXAuT3v0IzI/ueSPnSjWDqwyFMcyiLUyBWVmUOROxOPFVGG3nqMATRNMqmMdZ75UjKVoMdn/JHN5cVyJJVm1360lipxrVx9JLOtCuBiPKpwHtDo2MbW2htN5jDu7jjirTgHB11BjOrzo2h3AajTedNNLYwnGY4m4lliujcx3Mv097LJRTwXmY6Vo9aQ7WEI8waeZ+0qkMEa2YZt0T0rtgktLaDyfgMCQ2imY7OF6uQZDlzqhGZnhQ8v3L2gWU1zKbgbS0eFqsU72HwZj4XdPcfmxYbhVo1crJiBvA/Mti9mXn3uCVmFo240tKdIleRGyGvjqUyjORDpzmT6QY2b5VByEOJaGI+F6YiSuiyiu5VXV5A0N2G2KpvDcXRna5mNDNffPJgAbrRUkySBKCyPB8bbmf6eMp7mEowJixALslS/BAqh25AZMU3DQq6vdtEXKQlXSOPNhYNoDvJw29BHdUYH6ccG1GzozPNzwuJxxHmlORkJi7gP/nN1QsoeKoZC+Sahi8xJ13Pc9DinM5l6OipC0qOpyVOV/SdrZfLO+Qm7saFtI+NZZndP5j2jasmNU7AOGTub134QmIpYcm/2rfL8qt0ZzoSjbmC9HIzf50tRV5l7Obk5ZZ+DyRictTuOmtHOXQyVNVkHob8j+FsDcankk2hzWQwlpqYyEzPneZqCtbRRWxduEpqdoV8uFl7mVJFY43Iav+uLu0hfDok8Av87Vf0o8AuA3yAiPxv4l4A/oapfC/yJ8vuXGMJJ25tYoShHwbRbpEsm5JbMMKTsGK/b0kPKJtN0LAy3hfFILF3RWzQ33YlMp5m4EMZTcF5Z+Mhx03Or2e7Jf4WIbSS/8rmubmTyrDGvCI0IyLO3KLi9Qabm1tUq2cbo6fuGdJSQpRky57KJnckeDs9BEJV5AvvaS8vpzLMYo1V0DWNANh6/8XTnB72nJjPAzZXSXMLisaUV0nVTKnv2EfDqXWH5RiAt39P/RyAtDdHwQ0mhNHDeL43zE2r1IEy3s8HrsYgbqnnwfhdxYzLncDJHMEdB1nHWZBKF8VjYvGiQe0W7gMLvKueazDhNxyY6WQng6oXr2DH1wRzBZPyPoVRtmOCaOSxaKiO1OovlUKZ7JfMzF60S/tYOoLk2p3A3NaS7I5tHK95+fErtHO9LPzMNJhrnfSbsIKwienukn4K1vdmVXm6TsJ0aTs+2jNu2kDeZDYsKc4scvxP8xvSLTGtGrH8YMB0rcaVMpU8emKPZPdUikFk+zzv6e4ouM+vnNqzOdmhTIt3OeCPaeLTx4N2z830upnD7eQ+mVn9AKq/6M7UJpyQlnHsjlidzfOJSGE8C/R1PWjbmAB8l4/Q0fl81KPLsV0poTDSXDg2Z4PdCrhWVrGm4Wjwyre1/x2MjnKfBqm3nTTsKzVNPcyX4XW0MLuy2HenS5k6/bWkWcQ4UhttKfHFAS9WqBoVsUenioSAbz2ZqjcQ6WAXidCyEo2le295lpiq8qFj6eP6yP+fG0La8ymh1htpEPo7ICz2yjLaZtMqj7ZouRAumxJAjM03FuSsFGSbMqrPDIGrBnRtl3xQ1gz+a0HVkWgvxqHnGuY4rQ1fW3UjKRvYe7yWmoxLQ9oaQ57Bvyj2ewvYFa9I8rQsCVRx9mWS2e+k0sZ0aOl9I1bJ3JnMj+CND8OPgrZvAIpqjdBwsCHD7e6qhfBUEa1YkT9b+ag5yU3XglOAyR6c7nn/lKe1rpfBmsmcrk32tFyOcTKSFBTW5NbuxakYLdBzErhDEixCpOWyWVtXBl5ZdcXYOpDg6YScsnti8HO7F4qBZwJQHTz82+I2juyxaVg1MR57QJ7rFxPHRDufzs2vhIM2UC0cwTVZoc9QMLHy0ghdfHJCTaBXUozXLdb310ZNSIHDraIuUqjrEuiNYECposMBz7AMyCN254jel9+jG02ytb2JaPKsQr2KO4uPNin5sZttsBRPGQ1bgbLGbkcjWJ5Z+Yhkms+lN5Yra8xxuK23RFNsHJ1YRWW1A1aMSter0MdkeQa0w1RrgGCLvB3OiTcw0zWT1Lza+pAOlqu+o6l8uP18BHwdeBn4V8B+Xt/3HwPd9qc9CTSm0bSLxsuV8XPJid8HqyCIsyewrPXZ+Tl/UUvZaMQalAiRCWE9oZ6X746nStJFb3ZZbYUvnJq7HlmGyyg4/yL61xDBf4BwVV/Jo/ft+YzFHKgdYtlPhDOlMwFZvXveUPMvliBscurPGtm1ItKF0gO6klPuyj6bdvufbOAbaLhrk7jJtiExjsIqfXZGW3yrtxnLKiyfJuEXXSns+WMoiWyl2uJ4Iu0SzyRy/kegubKNuL6JVtEx2Tm6wXnvttc5/e7JZWfWfVOHSYgijm8mDcxn+gaL0bIhVaJeTRQclSZwW0N+1SiEEcjLV+GeqfIoxiiulv+VnAbcczKlj522xjw7vsqV5kmPMgevYshnb2ajU/PtcucMedhc1XlYtF3djNqXkUdiODe7cNtfT4605tlMl+NqzDj4To3EduoV1BB6GZhaWM5K/0LjM5dWSs9vXkC26n9MLWsiqO/uf5kosFVM2g1xW7nRmUPK0clYRh12X77EKtgMCdjxJfM2H3+Xukak9V/5QbsRQqsINe6bq7jCV5t5jLUTmHoIzcjSZnIIvvc8Wj4XufsCfB0M62QcJ1QGWYJF8PozmxBDAZ4IXZ+RqVHBS1J+dPc+0UHM+S2YT7H4++uYl01Epge4dvreIeugba/8wCO2Vqe8bgqDGh7izQxeZj77yrgkUFmd/vJMQr7z6DfdJLwxmIZ3B+8uHSvfYW9VWqwy3rJ+lZCMgmxRCJXj7WaZkrsryxsnKrZIW7Fu2bIJ1rg8Zv7CwWZOlSCQKHzp9wlEYaEMyDmRn9zQUSRXYB3KIcR3DVmZhw+YK2itDHgjKYjHRHI1Ma2E4LaTsMrd3LySmleO063np6ILnjq/xV57UlsqxS9uY7DqsAnc6VuKxbfqWOquRPaWUvCBYG29V1xhdQcqDjEuhP3XcOtla6no0e4cKwxls7/kZRZZ00Py9IPGW4tMS1KZ5jYvWAM3WeNdEgss8Oj/Cf9MF8dgI4kZytxSbiVKXgo26tJypy2tbWk11wu5OgGIfKyo69s2MxOYSAFr6uTqf0J1ns3ltJi/z7FySDHV2g9nDHCy9u3neWsOsupFhaph2zd7GDVZxqd7WRYyW0ch9sKbVhUS+rMVJR8ryrLe0/iS0547usaO93Duc7z465WqzICezGdNpJi4d2jjiwhv63JcOHL2R4CUZwdvvMs0u750n9oFs9vDq2TkfOLuwtPVyjyIDaBbeOD8r3GZ9RohYG0MZ08oKsXIjTKe58ByB0c18RUvZStnvtaT/hN225dZiZw6UKNlroTdUGoTOnEFbO6W12YGkyPuNLweBmoeIfAj4NuDPA8+r6jtgThbw3E/ms27GzbgZN+Nm3IybcTN+uo4v24ESkSPgvwD+WVW9/En83z8uIj8gIj+QrjactTsu3z3GbZ01xBSlayaLNKNF6Fq9/+LJulFprpTuXK25X8yljNQqCGTnjPOzypZakUxGuIgrtkNbegGV6oUi726loFWTpujiVMmCbJwojQlyMtKtZnLNhR+0SjmMAGu60A1i3a8vLC992vVoa/3sppWJkZExQmCR789B8D5bBUGyVOGt1W4PM8IcedQobPNCsF5JEdK6MRG9naO7VPxuQqZMcznix8zJa5Y7931BoEpVSthBewXNRmG0SKXfWcopZyPL+hHapw4dLCrWqmvl9shEDqUxakUIfZ6r+w51oMaTSl6piM6zLUl8acsznJUWDwtPbuB6aHG9wcVStHamyW6+wwoStkNr0V7pLeim0ookZlyp5qMgmnNLl2TcscoryNmhd0bCk8CTt09nIb+9dhgM0bPoJptLooTFRIp+1p6qOlZTdtw+2/D0wTE05dhNtlL5CiGnfVWn1PJzV3Bvp/iTaeacUHrY1fkmKZXqmIQbInSJB1dHlm5QmSdOFVSdhTeraGXh/s06THXeV8RujLgh7Zv9ZmieeitDHpPpxGxsDjVX1u+s2ZpchTUc3hdlSOFpHVbjzQhaTPtjRpDB7UVMMc0ebdgjnEWscPe8pQx3L+R9uXNJG6VoRQ6LhybC68Zk661UkQ5XRmR9/fyMWydbcmt94JrTgVeee8ob92/BVWPVwZg9ikuzG1N2xJUeEL2L0OGs/mAVtOqNGxkX1q5IVC31tcjEVUHVR6uQozQLroKJ4WFD99DTXAuX44J1GDlZ9IY2KPiQWHRTaUhe1qMHxJCBsLE2M1b+XfpvBpk1ibouGmp/ZAhHTQ1qa2vhcuyI6rnsF+R7o7XRKGi09R/NJT1f0oiDEDZKd1H64eVsKG/R3/Ojzf9PvPU8n3t0m1UzQSGypyX0d4R1O87aRaih1MO9zOZl0+yBUjgxoz7sexaq7Am/JUtT7aUfrHH8+fUSVeH26YbhsyeWuqtyK8mWy5Q8efQzIlPX/vXQzdVmKhWB36PnuVGev3dh83UhTKsAXvap2rJPTKvC8VtFZBX3B/Fqmk5q5HLrQ6f0d8SaPpeMhIQ8i3xWO6NdturowiMmGpr2pF/zuF9bO5dk3DeweSKT2XI/YPNEzZbdu31F18WZuaKLzLRyxJVnPPGMx2JzdldStTWFueOgB2lFBcu9CWbfPvfkNm9dnO5fL88KLZy74ysqIR/gYlqwia39rTVNs7g0nSw5G0nRl1Sd7Bs8F8kf15f2Pc5synN3LtnFhtYnfJPRwtfMB1QRq24s92OMZe/4CiBQItJgztPvVdU/UP58X0ReLK+/CDx4v/9V1f9QVb9DVb8jnK742IMXcTvTPXrr7dtM6rm72lL7dyGWy51TO2IOw+JpZnGeTR30wNjqUIW2KN3DYRNb3h1OrJGlFCVolVnoq2qPVALZM+WKqiXffpC+cyWF18KdxWbO8WpxflJrjt+qG0nJWWVCaUi5G1pOu93c462WrWswY157/qUOQkjcunVNXEHXRJ5fXpVrqxuhLa5cStP7O+aQtZtMDq5Ax0W7wjn8bjIocpcZTpxBk7vJNs0iRupGKWTE4kA6q5xhEsbC2wmDVfigggQ1WYWFaUGlZSAvGmvmvEikIqSZCwfEVHlt42muCpwvgvhMaKKRkBuoHLEqSzwd1wpCK2cepmDCg6M5aut2pGkiPiQ6H4nqiEWdt3ZMf8b5LI5nreqa07WzUwG1GqZ5oyMdZ46fv7aNKVBKZa3MftFEuiaye970qKaL7qDMmHnR3VrsOL9c8dIHntg1nYBr8lwd56JV36WOWZlaSuWblk1C897JMg5Pnp3XOf0VEzKYcfzgradzFZiGOm8sbfcMgfswRf2M6ng2wqh35uCA3Z9SZu5LU9paxl/5dL4vjtQm0T4dWTye5kbO82cAzwg3VusTY1lqRXU5wRR9ESUtBOh88FWGi6ZC7CZIhTRtmjNi0hGTVWm5aZ9KcG3C+8zZ3WukS/yil3+CmJz17OpsHr79+JSzs83eJuV9IYPvjQOni0RaKO2FGV7VA35TcQJUoL+tjMeO8cgI9BpA2jwLnrrRNOq0yzRNIu4Ci890LB5Z6lES3O62NEWqo24uTZO4vd6agOXCNujhVEidCRxWAc/c2HMJfWY48bRtpPHJ7NTSHNDax04dyCoynArPr655sltxttwhT5o5OJhTW77Mn2xpnLBxNBtoL2OpwrRG8NUOhl1m9bZjtR748L3HXA3WdHc6sb6O/V1lM5owLqGkaidHXmSmdZHTKHOwVtnOZGrMRsaFzS3jLtm6NRX2Pc3g6nrJw3dOSafRnm+ZJ763dFvORvKu15tacy6vd53xWtUKm6aj4vAqMye0H4u6s7NNPnvbM+JKZy6O2UMl9gG9DjTXQnNta/pkMdhzPLaKWvUwnmXGI8cixFmSp36WlPN060hYTPiQSprb7NlmbLnsO5uvArJIJrQse4qD39mcTp2tz4ePjxn6huVqsOfdJbPznWM8dkzHmAr/DtQLqVUTqqzOnJiNnvlPjjlA/rq7D/jwnccmHL0y3bq8atA20LSRty5ObQ15pU+Bi2HJRb8Ar+SFwnE0IelTT9MV6ZQStM/CnIWv6HsrhtBg6ex337nFq0dPWTcj3WIidfYMxxPbvzRY9bMvcgl8idTdbIO+1BvEGKX/EfBxVf13Dl76r4BfV37+dcAf+lKfpQpD3xiaIcDo+MTl87x5fjqjC4zOSHAHxDB1QugzvrdIyv5eIvdrj++dcSS8RXBRHU/HFVexY92N1mW6bFw1MqnVSerFuCXe7Qnl3hthsThO1Ymq/IZZidmVCqlGyF45aQeOlgN+W8TOMvTnCzOobZ4F8wBYZJpFxM8ieKZ9M0wNEm0xrsNI08WZKDkdF/XyE+MM5MAsSmZSD5R2K1KIw+Zdx7VncV6q4hqPNgGCn6PWqpVjJaIwK+yOobRhKK05CtrilqZ+mzoYTwPTcUNcCs1yIrdwcrydG/7avbFN0O/qpALfJNo2WnTWQhXPAzunXHhicSlow4yGkUGXmeAy9442OKdcx5asQlc+b76W+pyqWKDbO0+pwdotQNHpsbd6nxnvJNpHnuu3TixK9mZg4hK0zbx68pQXjq5QZ3IDsigVhWWTFsXKaEV58c4F7zw4Q7zdQ1eKEFJnDn1aWePPtNR5zkumcGCEvGmK419Qm7Im5kanIkhMhkZtAx/7+Kvcvzo2xfayUc+NrQ/vx+GinH8258m8tkK093sTUe9Dau3eqRemkwPF51ohOib81m6oNhYl5kJC1cabHldT11hxesumm1pb10mtbYhki+KtYmffWLSiIeaICDFZOVd93ZVmuItHWlAA2xjFK7dXO5pgBRH//esf4e7RhrSwVhcpGo/x/DO3CRd+lkmZEaNsCC3emvt2lwWVKDyqw+GicTWGU2E6subfgBHxS1UnTklHGdpM//ox7dsN7Tms3840l0p7CeswcjktOG6GopNmApG3ui1ppXA8kdaJ/q4ynTS4VOykWOcCMBR/ODMbE3yezzc3WGPcOgWuAzh4+/qEV4+fcv/qCP/izirdDncLV1BbJ3ue4KSGWHrjt7nS2sO0gBLTMWwuF3zy7ef5wPG5VSovbF2lVaafggE6TmdNrdquq4oe51CkTmrAoVAnYG4EXbbQ5lJtzcyTIivjrqFbjPb+JIVkX5z/bUGiXDb9q1IZOxxXx8qc8iogGjYluEi2YbtBOF32NItojkGwoCEulXwcZ+d/FsMt8zNsTEiT0bFsTJ09VikANb5bf8cKq642C6YhzLwqUXt+PiS8N52lUGyzBi2FGMaNyq3SrUd8SHOj5SrLAJCOMh+8/ZRvfPUdzk625GxOyNzyqJDa43LvZMSFBQRpMT+CPd9S93bBQBDhY2++zI++/iLdE+PkmZK5odCLduJnP/fuvM7GHNhMlj0iZLTNhC4ynsD2OSlCyloyGDpfE7HwzQbBDyVQydAsJz5zcZeTpud0tSOvE+MxDKee6cicKHP0iwJ58Tvet3r5cBl88ZcB+IXAPwj8UhH5wfL1K4DfCnyPiHwK+J7y+xcfyZHfXFFLH9vHntcvzthdd3bBI7ids01jlP1G6EskGou+jYh1tm8xJGRpzglZaEqH8m1suZ461s3ISWtkUC0TG2Eu4a3Rkm2y5ixVR0pELBL3HmTfS89VtKEswuptX44dTy7WhK3MiqzuyvPu5gTxhVQ5QbMF8ZluMVkZ9XEiLeHl0wu+4d594pGy2XZsYjsrB9umqYwnynhm7TO2r9gJPdMx+mxkXFvqC4qjGZXN84H2skxYB6YDVD63LQRnvxeGk1Sj+D36RWOLtFtOdOuRtBR2t60PWg7QdRPjsXCyGKgEyzlKfM9EtEyOmLEs4mvIfkGAQcK5PPv9P0JzNPLi6oJ3L46JU+CtzRlX44KuMQfKWsjs74lV5T17AnOHcZGidF+QHoH2kWd4ZeTFjzxkPJOCkBXj55QfffdFPvaZDxiaIKY1kwtRtEpNIMqr66e8+/SYb3jlXV567pz+jkFicWUoZNgaShTX5kTlLs/SD7VNkNu6/TOu6YKgBvO3YY/oZGXxjjlN5w+P6LctYRVNa6YTIwrXKqZn0FW3/1nqXC/f2TtKVX4hbGzu56IDlQNsPzSZU1Uc1FydpEIS16IcTtLZaZpfD8WRqkELNmdSMhKsREsxS5HOMCetOFERjl+zeziMRmy1bvBiKQiF7YuyXwdqG+Rl3xmisPOm4i/Z0tKAJuHy0Zo7X/uYeG/a3/NF3msAOaUKGlrwYp+bW6w8W0yNXBIz8TgHQ05MatoVW2BtJgjZKnKB5lo4eT2WNLSlCd/annI9dTzcreey+OnpgjGbLMhyPSBHkeksM57UKMRSksNtkxkwNEXo+4ascLzqjezfFISgAoSDYzyBrzl7xGcv7vCB0wvio8Vs81wtvjhIX9WRfSGWl3Xlq+iq2vrrnkBoE9/8yptcTQuks43eRVP7j9GzWEyGYLcgQfFbS3GaLTEnvBYUuXH/3U3miGjjZyI3UlJzGdQ7FuuR7cM1zfHA0WeDVb/uxEj2F0rY2f7humTzIWTS0uZP20aLratWcnHc3Wgij26C1964C5Rin505k7lVpM3miGdm/Sa9DqxfDywfWsGPDI7d1BgiX4sL1OzJcGqB+yt3z3npuXOGW9j5lSKrNHlSdKy7keVqoF1O6CKzaibaEG1udsrxqicnZwEzxXkqCHs4Gfnc49v8+NvP48Raq4WNybFUdMeNgGPuDDCcCmld9t/G5ve0fNbpqD+qV16+e87Xf+A+w23TO3MJ4iqg3jNMDZ+7uG1vLi2ArGpOCV2y/qo+E48y46ky7RpcySzR5rkXo5tMH9H34IoD5QfgJ1acLXYs/cSqMRAgLZXxyFKkuXWQrao97LToGh44gV9gfEkHSlX/jKqKqn6zqn5r+fojqvpYVf9WVf3a8v3Jl/oskuUyuyd2Z7M3pEVHv89FDyZFX/VbrCmuFA0Ki6QIrkS0WjSXdJ6ojc/E7LgerS9WRliGCVnFIhrJ/L16mForgrxDi0FX76BpIASLjr1VBb6zOSFsKcroxjMwFEg4v16h9xcmKra1ahjfCxc7c9FNUE5pr+3Bq5rIo1tFUmNikT9xfhu/E6Ztw5vXZ6yXo6WQglVRTGeZ4bbSP59xvXG/JGrh+YBvMuOZMJ4GcmPGNHfC8kkqDRkd2gZDAhzEtTUqHtcOXRSoPltJ7zwHvOmWuKppBSy7kfEExjOZlZtTMsj6YrdgsbDy0zkq1wphF4csOcZSKns4C7XwMGAfHUmqaV37ebkcGXPgf/GRHyYn4Xy34Gm/ZIpFhTfX/nLlGqpYKgVtSntnRJtgaYjB4PjnT64Y7yaa+w1vv3Gn5PZLdcZGkOj46PPvsjrd4QahCYnj566tx11pXeBL41KAX/DBz/Fkt+Kt1+/QXFtfxupshN4aUHM6kY6S8XNK9Cy1l1u9L+V6DL2haOAEe5aLFoJn8UhZfapFth7dBkKT5g0yzVVBMs9vDUXSIASb5+X3+v0Zp9NZyNq/kBiPCydFbF62DwLjrVp9JLPmlpbzpaQuoDqzBQmtr7eNOVLBM55AXlq7FSn/EAvaNiNQxclvr8yGtE8d47Yt1WcFcRBD9cKWeRO0lKjjdNnz4tklKHzN84/m9FvlEr30gSc8fP0W7jLs52Cp5vKDodwyujnqjSuxqsdckG0w3skcjJSqyYpQFl5QbbLrLwLd/cDtHxZuf8KComZjlYuWnnRcTR0fOX1kKKiHcOV5vFsBhjD4EqVPSzP6kgyxHm9n4pFpwvlBSZctTqzVzazwfuAI1eq9z17c4Rtvv8uj7ZrVy9emh8U+9VPnZE1LWsm9sLvXzAhj6IsqurPNdfuSGYHPPLnLq+un1qC7IARhZ6ja0WIwMc2V0iwnS6/tzJHPbSgBITOf1RVO1qyQL8XRO9hPataiCYmXP/SI9PaK8TuuGW9pea9xa8O2IN1OLY1Y4ww1UUcdLJ3nJ6W7tGq60GN2fgDxytSHws+t9kasIrvKSiS7xzLV4CBD0UQak0fbgmgWfSxJhsAftQP3L495fLUun2MOfNiakwNw2vV0IXHreItbRfoYGGPA+4weRWJyHB/tSsunEmxkm6ehSXzvR36En//B13hufU2OhvjLtkJ9pmNYsyGpNQqJriKcTKXiVOb5OXtdamt0OlYeXB5x//rI1tFo962+rd+1fMdzb5A7S2dPyRNTqbYumnBjb/uTi1axmEtfRgmZVJqBS7T5Erbm6KuzPSieZD798C5DDnTedLXqteRggY+foLsyDic1wH5vZfJ7xpfFgfpKDVFYPHBMa8vRu2Q6LO7KWwfnsqDdJMYx6AwmjMuS3ik8lhz2OWI9isgoxSGBZTMRnDlRU3Zzh+f1cW8GvjFEIbXMfZUONXK0a2djTt1gRJBggn23Fru9saUiJzYZVIvx35hekyuSCSlZ+b8fbKE2l9Hy+9k0Y0JjjWqvpo7nj67nhXu+W/Dc0TV6NqEv9ugiIWcj02kiH0UWD63cNzeVfGFl9dORlb2nzq6tuUps7xgx8hAZyEUnSH1NTwVy0Nnoi9c5rRdX1kG9DZFFO80k2UOSeCql1/3YGBL4Hve9NvBEQVwhPYa813E5GLUDfCUqpuiRXKJaUVoX+W9e+0ZQI37uxoZ+bAy+djZfam9DlQO0kXL8ilB5Z1okRTXXidLe6nnh29/l537DT5jY3bLM3wQyCH/lEx9i+3CNiyZrcfVkbZFaMdp+NMf+fFzyg/dfRkT5lq9/nelES19G23zCTmkvHM4rbhXpTgZYmOZOKE0vq0RGDhUNcuRgvJG4DjaHW/NSF0/tmnxvui7TGArScaBnUu9zQXy0CWhXHJiuRUrQYKjQXgeq3rtwXcqI12a8uqclDbTMBuW7Il4qe4RCQnGu2oJMlXS5dbZ3aNvMKO94pjSnA6GoECPGr6hp0eqEmCig2RB1WFqsiDxKBM0yo81xUeZ9Y87PZd/xuXfv4HaOT7z5PB89e9ec+2D9ut55cMby3pZ8lEqqUPaaXQOz2C3Y9U5Hxlmc0+Fga8QZolP7zlXORtXnqr32mgvH8oE5YrGz9RqL0zcew5g9x83Axx68ZBt1SQcNkz3fqQ9Wdl6RObVNvvJtauFEs7U5frlZWPBSHA0XdXZkUmcp9Q+fPuazV3f48Nljtm8eWcA4z28g7KP+uDbtn+nIREXVO3Lj53YeVTy3eyK8dPuSX/uRH+DNzZnpzFVi+gAnRzvW7ThzgZom7cv8KQiUp/RR0xlBr/UShqhICWaY07yhNw7UiyeXbMeGr/3WN5juL40OUOQrqrM6Rk/qA+7am/5eeW4pO2gs0Mpe9p0bSorQj6A7P6fy5bDYowRTdQ36QVjed6zfzaZ4jV3HMAXTRBTMYXXmGKrAw82al08vOF3vZlTfFfQNZRZsPlvuTFtRYDu0Ju4JEPfdG6rcRKUtuMEQ1P/q0z+H169uMcSAFnHN5tzNmm8uWkqxFhIMdzLr05K2bDAnpjXtstzq7ARWztbRcpjPu9qiKi2TBs/b29OZ+5wLwJC1NDdWk/kIpdWRlIbjOLv2uDwIXuI+eFFvaTl/7bh9ZGJoqzDOMUPlluJkry5fUPT8lUCgvpJDMsRjy+tbTze7eO32IZDlzIXjOxvi2nSDhhNhWrkS3cpMSsvLjFwFI4+LsjrpcWI9tGJ29FNgNwXG7Fk00R5s8TYNvhZyVSOvfIxi1C3NcMDTCFZllVVmBKGKIuYCHY6luWP3ZM8X0ILMUNMYJeJhsonRNdFUYYFb3ZY3zs9maHWaAi+tLzi7fc1iOSLLZA0vS3QU19Uo7+/xNBXjLlCrnvo7DcsnJYVYK6PEHMkKE1eVWW3sHAWeqYRKC+XurSvGGGhD4nTZG4F+wZwKkLK5DduG26ud3e+qil5QhCpGV0nmtQN5jXBrJ/m5AWh1zkY3R/K7oaFPDd/6/Fv4JuOlkLlHuxEaKtG0OE5t2bArx63MxYqEVM0YFXhxdcH0aMkbb9/m3c3JrKUjWqp5NoJfm+XJAdbtyIdffYDUasoasCXhldVTfukrn+Ln3Xudj73xUhGidLMD4JISriFHMw53TjZ065Hx2DYPmowuE6m1XoN5dn7tfuVgqbm8MESxvUw0tZkUkDYBN0F7bW0lJOY5fTYHCE0AX3R2mlDQqL3ujs1hWxcA/iPX5qCvbTMLvaU+ZBKmo6oez75CS0sbirLeakrejlfWWxfmtZbWieVyJGc366cd9mqsFVC1MGH9zsR4lufdSXSvz5Yb5fpDpro9HQdDrJ2y2XXWzkHhxXsXfPLyOfyuGO7kCW2k37RI72ahQl8a5koyY18DqNTadXtnaEv2wpSN9DtXdnY1GMAQhkUiLbU4WEJ7CYunmZPXI+t3B9LCdG1Cr4QeLvuODx095vnjK+KRziKPUzJUXCdH3gX8zu1pBnXelupjKOjZ6Er9wB7ltWbUtugtrQU/fP8ljtuB1y5vIbfHmTtq/6O44kDVNEdaKNPaUK/cuhn1rfQDHGxfNSHN//y1b+PrT+8br6gQvgG8U46bgau+21fB6X4+5dajsudczdWpNS6qu1nh36RmHzDhhOBMC2pINheMf6nWl7TweKbRNAPn516a1U/RCmus+KNUG+qhU6SEK098tDCx4+vJhG4n9mKihdspxan0pYtF9gYIBJ9ouliKjJidx+YaXjy+4sH1ERebZbnWEugqSJM5Wg6sm4GYjUsFpZvFaL3wpE08ujian3kVw62k+Bg9P+vuY5IK62aYEfSwK507qA6SljmiaKvGJyptpGphTmoxDll57rmB5qUNX3frIR+984Dd86ZwHpcQl568DJDhqBkspV1QOCl2fd8wwZ6r31EaylOamCdSJ7PI5txqpzhvuSQ6LnaLfTFGLTg6yEbMoJkcoP1/IyFQAKgRoZtLY9aLs95YWlMW5YxePL5iOs6MxxTmvWNa1koWt+fMLDPhqVVoxegZYuDpsGIztGx2HWMMXA4LxuhnIyb5IKqdz8t+mUU1veydqJL2yB5eWT9lOsmFPG4OWepKZZIo/toZKVMpIoY6G6vKw6ikOu+yVQkGq8L4sfsvmOiZgPhMzsLLi3NOFgN3ikJsCNlKq0sZ6kymLJ70eN2WBVIJl47mOtHfcualr5t5I1ZvDkF7WWQMyqhKvjla6bUflXxrMl6HKFe7jidbW8jWysYMQ9NYZYQL5sRq5fSY5hxpCdpZiiqEZE6hK8/xgNhYZfhzUza1AAx+VljPWbieOr5h/S4nx1tz5rLMbV5q5EnWGYrNjakHzxFtdTJLWf2hs/aBr3kAKrx9/2yOZE2QzVJ5q9VAczrQnluzy/uXx1CclVnVPcMPPX2ZHz1/kT/8sW/G+0x7KXPEXb/aSyW0ibaLTMkbnamDNiRLJXglnmbGU7EefoswE4mlNLieF7mD1YO8NwiTCQ+GrTV9ned5ReVESrBQHEsRS90dBhRg5djFgeofLwk9s4PrRjh6XQlX1s5kWrn5XudgnyFe95FeOc5eJNXtzyslFvd2LNtp3ghqheqsvly83ZoK3t0LhK3MLTvqM8iTQzule1TXfUlBOeWFsyvu3rkiHie+/uwBl8NiXkdpdNaA9nMLuod+riKTbBunS2pq+M7UlXMwAm5MexmL7dDSujSrdbthX3Iui8TyeCCeWGNg31tLpLArDnG9N95S49Mavv72Q17f3OK1x7ct7Qekdabxye5pEmrllYs6r6NakRi2FpA0W3sIKXqG80URiC3pphLIjM9F1MHX3HlkfTlFzemnBH4lDVUpGLkUGWhjDsXc99IVBLIiQ9nQjL/vg3+ZX/OhH+CTl8+ZLVNmYc47qw33t0eGPGJp+9zaMc353u8hc4l8QWhTC3FtG6xrrR9oKvIX1WF/+/KEj5w+5vX7t/m2b/oJ0lks/E/mymZVS9NX9NKPdj+HXTO3/0mt0J/5OQUEdt3x3oi7MzCcCdNJSzzuCspC4Xbuz3vxBCM5yz4YXzSRtjWl8NwYF1RKevL18zMW7YRzlYu3F6LsVhOtT7ywvKIrXTicT4xjYBrMidKddZbIug+aZ3ufrLfiK+un/D0f+EEuxiVuPTGe2fye1lJa6pjD0myMZySTWJPrJs320/hm5rwYhUPnoPL7f/Dr+P4f+VrWbziaK7t3zXW0ghMHt9uNcQ0LwX5Knu3QmAPos+2vg0ml3Ll1TdNGtEvWQm3BjNrOdBGRGS2bzsxxWvuRMQVrBybM+7E6mZtaS9Z9oHYgsfN+46vvQN2Mm3EzbsbNuBk342b8NB9fXQeqRBsVXQjXpW3L5GYotLY5mcsnW0OmUmfwKUAl8snoCI+tx5sk01HaTYHN2BqhOTmD/TAItkZllQh5WGY5V/t5sXSFczP/6bDs+5XFU9JRNogymM7EXC5bOEGptaoOg45LOqbknnMonu5kZa+3FjvrrO6U7/7gpzld9Iyn1qbBOeU6dSzCxFXfAdZQV5L1TlvdN2+8IhDqLPryQ+00bce7fjkQV1KQIjejMupNp6XZKM1GZ97KTBSNJW02KkSLRm+vdnMrkxlCLtHH8WKYNZ06X8ppG8hejXB+kqFRpqXQNImzZU/q/V5Atd2jYM12n49WD653pm0zQhwDX3P8kN//uW+jDYkX1peWX1cMncuUEnxL0WYvM8/BOHAyRxfq989Xg/LO9pQ33rzD7XuXfNOH3mY6qZGI3U8/Qf/xM/jcitxAH8OcNt2XVtu5f93JA7ZTw3d/9JO8cPuS4VaeV9zcsX6w6M+5zLodLeouqKU4ZXnaI6eW1huPHGm5h9alIAKU9GvYJjYvuj0xOFeehAnGzmhVTck5bC4U/hEHaGtFiXJN5TlrsilLK8muHJvuMhNXQncu9mxXEFdubv+gvlZzgsTyTMrfDaly++M7R/94ScqOlGy+USQwqqbVzIHyxk0bj4Tp2DhnFYGqc9f1NsetoMPK09smMmXH4ydHIPCnPvl1HLWD2Y+NIFeB8HbH8MpojV4P+p3VIaPg1pPZr1asxNqnGX2ZUu3LaMjMdFIEFL0VeZyudzO/rfZazMH4nmnhZ87a+n4k7OBTT+/xkaNHTGPA96V45XjiqBv3qIYUVCIzp1z8iDVBHrSglWaP8uQIR9MeMTgo13Ybb8d8dJd3Lk+sYulRY6TcQjp2Ued0vRaejhscfmdIdkX7avrOiN6Gyv5HP/Y38Qff+ha+887nrHebt5RkbpTnllcctSObXYeoabLlo5KCXRZk07Mn/pa2NqnTOYWo3tF2sbRcwXSKPGgQbq12/IXXPsgv+/qP80OvfcBQ/IPmzuoNgSJZNaTfOEJvyFreNPiSUhY1Xa2KKlVNr+VnOvKjjuVjEzB2Q5r1x/aIht3nuCxIdU1pB+XOcksbIqnV+Xxya7b8G+4+4Gq3MD6tmE3Nhcy9aCeGGNilhlfW58bjbFPhmAmpN2rD9nzJ1fVytqPztlxs0h//8Y/yB9781r2OWfl7FRDOjSGoFbX0O6GPgf66Lft6ESeuxRNl/wM4Wfd88GsecPLctWVCltaYPa4Dad0iPvPZ67tGmo9GHhdRpsna06DC2cnW9JtWwnE3sFqMSJNZdeOeE32A8iHFBk7K6rXA3aMNZ82WJ/3KGg5XdJR9uvQZLp2XZyQ+3m981R2oeGw9Z5pr60S/Xg3ocZwbQ0rpO/Zkt5oNA1qqjjqYiiCdi+C3jrTKNJdmVHbXHTH5mXgmzlTOhT3BmYMNv5bFPsO2lz2MXn+vKQ4/wI9cvWQNY3vbUNwEzU5naD2fmtGrMDdqi9K11tAxFU0abZSkwi42rI4G1MP/8MZHeLJd0p5bGkEVjvzAeb8k+EzehnlyV/XVCgPPG8tFS9har7J9WtKg+uGWEteeVEjziFUrWArBdohadgxYWkQNakah9Ymn2yWXT1f027ZUuRT9lxEal8mNPdMhBSMPFzhXEgZnOyUthWVrjSIZrJmyCnPKr1Y8VWdOvRZxtJL3L+mbX/nqj5Ky4zgMvHR0iV/GWTcotab+rME26kp4zY0pwZsInMz3p3I8bnVb/Hng6kfu8LGPv2ql11tKXzXTLglfe8V0OxJ64xncPtkUJ6Y4/WXuRPX8gnuf4zMXd3nzx5+nuRbr0VTSnuqsssUH07W6HjriaKmglA1idk45Od6ZIvNLjt0dK2Zww6EDiKWYG8fxGwm/K6TnqsxdexVWR7uqgX/e+tQ5pTmrhgsz6VsicN4SdkamrbIi7aUJSjKZwUmNMB6ZerF6Rx5M3NGlvF/T9b4frDP1DrpETOZA5VKoMJ4eNCdl/10SrB9k3N3BUhJaK2KBYFVSaVE2C2ebdRsSAibE12R8yHz6nedKIAHNhWN6fmLxWktzIcbJO7ChxsFwiCuNh5di5e7ZEVf23iroagc6SAeIkLP1SKxpyNqT0SU4enuguRyZ1saB2j5nMgX/7Nf+Cf7yk1e4fXZtemFi2j/Pra4s1VTUyWdStdtzbeZ0tkJzXUjbTkmPO+uNWR2eEuC1r2wYT+GF0yu+66XP8fqjW7gPbI0o3u3XS04lRees32Bz6WZbYh0e8j7AKk68m4S/8yM/ynfe+xx/9tGH6dpoUjDReKXn42qmBkjplWk2ad8IHPaplTkdL3NmF1GlaeKc4q3PQUV4/cFtbp9u+P63fhby7gJ/bc2n/bgPqFXt2GlRKrvLvZNJWC8HSw02Qn/r4BljNnD3gYgeJePsnrSkla1Va/Qse8ci233KjcwE+DpydvNcn9N7Cp949BzHy55Qlcgpaaqg9GPDxfUCh/K569vE7EnJsV4ONEuT4qhyKFnLOq6Oc7mfIvBrv/kv8h33Xuf51ZURuIsjZxW/liKTp80sdut3wnZoCV2aBaqNe1ckR+ocAT5wfM4b929xdbk02ydYetXZz+1y4u96/ocId3b4ZaTzkXU34r1ailqUq62lRENvAfqyndDB23qCZ3TY6hxx5Rx2LybeeHAbL5mL3cL2mLgvNqj3swa2lICjph+/0Piqp/CaSyEe2cn6nTBOAdekWf8mbK2CLWVbMDpHScziaBoEP2baC2HxwM+OjCbHMAa2Q8v2qmO8bunHhqRWPj6jBOWhVlSCSmr1pa1B5T9VFMrZBtJulD4aUbxyD6qwmh+UNDkYrGphOpJ5klUyXM2rqzPn73K3sFLTwSpiPnznMb/05U+xezmiScjJsUsNp13PGM3hSVeNITZYu5O4sI3WSjjtnJqtOT0uWjfxuBIef0cCheHEWXuU1plT0Bs3wk22uVlzSPMoZNwjg+FJ4MHFEV1jeihcNta+YWccqe7cqps0GNq3nRoTnivl/blV8jqhsZAHk2MztfiNN6JmEeqkEGsrl6NGh3tuhX3fxI5tskbRF9OC4EyY83AcVujs/8jMgwNmUrVLhmJcDEvkxZ54YohF6pi5HVIMxLBrkC7jRri8XPLu5+4Qrqwps/EDMi4KP/LkRf7QJ76ZNz57zzb50rKjfo4JeNp5jDGQspAn/4zEzsunF6y7EZlkFhd1Q1HLPXAoVIBszyLsLAipDigHjpBdc/1b+XLMaKvO3/0+AChRsi8cMKtG09JGSFm/M5oT9TBYUUGJyisfSno/IxCz81PbQFTnpKwzuQ4kFeJoTXuhOtHmWMtUq+3smmInpAubi7OY44QVJ3jl6HXZk43V2vB89NZ9fvGHPs3R7S13b12Rz1srYijl9KtPt/gB2suC4hQeVD335kpou4n23JTQKdW0pvOjxKkEOoI5ctW5CaDnLX0MyM4XBWwKcqbs7rbEVaC9soa+63dtPv9fP/NLeHi95vEn77C872jPhWnXcNwMtsklcw5VjJ/jy5xyk9Jcld/VKuHcJHDVoKvCW6ktfCZDd/qrrtynwP/vx342J+sefW1N2NharzIRuvMzN7C5tmrMxdNMd1G6O4hVcLpxL35874cj//Wnv4nORe4sNkbMbjLp2HawozDMpGF1Zuua43Gv1l84kabzY4F42Aj/f/b+LMaybM3vw35rrT2eIU7MERmRc1VlDfdW3bkvb09sUmS7KTbZkmgOMEhTfhFhwDBBQxAkPwjwkwU/GB5gGTJJyTQEURRFU6Q5k7e7KfZwu+9U460hK4fKOeaIM+5preWHb+19sqjue1uQcGETtYFAVlZmRpyzz9prfd//+w/RRBGPFfFY3st8nsr+5ZbrFQXNJMY6zezjEfrKDJcHsnIg0isHrjDEE0Xvsab3RNypo1L2xjSWz6Pl2phqGRKsHGK8GtvQnMnaj2byd3VJd1jrCtILT3JeC4nZyTTldNGjagxR8KdqrQ+8hiur5zx7ssbsuBcmKa2vlKKYJtQXKd95doVRsuDBxapwtrQnS2tRrEWe6CjGjpOAEvIch1KWQU9XfLH/gDtnm0Jmz5cNT1uo6krk/u2eWNeGZhp3pPR40SK+alm8I8aY8b2M+JOU3jNPeuaD4WVAx73i2+MbNFWEncXUzixTRAIqtb9+IXy0VcVqsmBaiJfbPCjt23OiQ5WU3Pd47smODDsbF0yblMUiAbvcL9rPviOUe8Raoo0N+iFX9MP/+H/8q8kln6rJZTE1tcEtIgJvjPRc3sTZyZBooknGivTMU6yrDg52RoqFaAblOp2DaPIkptzUlD4s5gYWsZMU80ahIkGxotnyQG5hR5xIXcUYUypQ5ZzEniBdfHLRcO9sXQqNMhAWg0t0VHj8IiIem66rVl6g4Nkkw1vZUDvbgxqm5zmzcYaJHdoqVuKCbx9fkz8fR9S15l88e4GijmTUWWraDLl4LIeoiwLahbi7tinZyoqRm0001QjSQ8kKs8GvwxWKeBJQrMJ1iycZS4GqK3DPdZxRoejlJUdPVjHnkWwYc0imnmhm6VnPw2crZAv5N5NFJp1uKiotXSrM2OAyMQcsZxmzRRpiJjxRoWScZyEuNFFhO5d1XUeocJ9143Fzgav/1jtfAq/4gG0UYonRusDrut28w+fXOHFojnVHDI1mFpxU5/HcE801Hz/dQj3N8CsN8bCkHrYRMjK+88bTezsXVLQPbhJD5LsOWNeyKcUTxUujIypruH7tlA+Ptzm/GglBNTgpN7kWM7lZTDOLsasFVKKkms8yeZ9NzKxMsANLUxmaHJILRXrhuwYDwMcGUzpmewnJOCBJEA7PcPL75Xilu9o/c7JzdGiV0aLYDEWOcopoBn7bY6pgb+AV0dTSDAzxwpGcR+IHlCtsrlBOE08NZqq7rk7XXp4F79GezmG6/ZnZoaHcjnGzmPkoDhB8GGsGm4I2L83FML4uifStB1criXeVIZtqOWgX4f2Vhvks46gYcF7mVFXEwZMhPrfo2oREdxm1zS/JQRDNwvNayuevG0927BlXEaYnSJ2aG8bzjLrv6T0FexFzvpYLelULotD0wM0U6ZHhbL2PmWniMaQXjnjmusLeRxrVOKLC0eSa7MRzfXTK7dNN3GpNc5GKCuk05qLKaLPxqGV0W60oslO/LPrDSE95iGYNZh7hUk98KGO5eB4Kv6A4NFkDKuHx0zVevf6Uu0cb2NzR9AxNrkim8llFF1GXLxlPvMQ9LUL+pPWAw5SOZBrUhrWjWDXUi5j/4u2vMVhZUMwSzLkYWqLge48vA1CfZmRWxom1T0ksS6+eypMfW0E6kxDtpGUMmkyDMu4wC8WVKLZ0LagYsebinQ24WtAc55i5Jiokwy+eyYbdvR6kUPMKornDzGIuZjk+oUNQ4vEyekjXnpU7immV0TtyJOcVGEVyHuMiRXIh5ppRGZCNKGTchf0pnigOzwfU04R+KYVK48X5W1vFh092WN2cUtYR9nHcrckEsE8TlIdyLeK7H10X0cZpzIkaSPFUGJILGdnH50JliWchgqilylSGv/ydn0FFjrW1qUTqxB5nPCrYwDQZpGcKvCi6kzHMx6mowkPhkZ5Z0jONslqEQ4lMbsZlRn2zwI1jmtyQTDzxWHfTk+os492VXXkOKhHmeC8Fmmo03nruPd5ExfKMvvl0n6qMieaKk7MBuZMawEUSbdZYaQ7Tc0Gkh/c9h6+u8GG2Q1NGnYhn2YT65Xlj22bKfgp9/p2uH28BpSTQsOlHZEdQbTbsrE45rEZLaXIAEXwl0S7xRA7p+Y4YtlVDhSlFHdZCt6YMHUsuKIJehGDTGVSbEXZFBSm176BgXXs6FVZ4bf+dq93YlQLniOaWyYMVsiMtniqIwiWe2JCHJwXDYkuRHQf5ZFvBlmZ52CmxyU8eJxJKGnuSAn79/RcxeUNybDCloukrLtbyJR/EKbJjLXb6pSiuZAOUgikdO/qPDPGsCRuZyNfXPnBML2sWO575ruQaZXGMKZZS2vZ+JhMpAHTgq4gcVBGP4fRoBax0zemZIpl4krHcl2jWkD3pEc1hNk1JehVuGmNCx+USsAOLqjT1wGMPc5SH3lQRz0J2YIhEiKcIehI2l9Yg0VQ+KNwUbz3bI84amic9Jouh/L1C8v5kXciDoGrXAltSJIdiGa2IFlaClY0mmlnimUFlNW6e03sa0+QxyVgKPGfk59tUMb3REI8NybmEVrqeC9wU2Zh07YnHcPtii/V8zpuP97H3B5h21ND66ZQygtUT4bYUZJipJpoqmqOUqFI8GYykC4sdzsh4Lzv1S+7bc5l+PtKsvT9lfLPfPXCCLKqueGr9aWiEj6WaMOp1YexirXCgrOuKTnExl8Nr9JEKKJeljQkSVZAnP/KMb8oIVWThimQiTtLdSMX7zm9FVVZen2tfgzjtx8FleVEmKCc+Q5JLJ5C8I3yg7fO/VcBF3nHbTOlRc4NZiIKofxBQoFpjC8Pds3VmkwxOUnpPNeWGJpnIfa0GMN9TrH4oe0Qb+aRrKcZ16ekfwNksRhuR76tasZhkZFO5N+lRxNHKkLQSw0RTyB5V9zTJBZSnKb1zeX7iuQ/jLY8pGrxWEsdSS4ZePVBoPGdnIkH3WhqSZKyZ1Bm6Vti+J8obmkpTTw3JWMkh0HbkYSxhcwnmNjMJQ1ZeEHij6HLVer2S2WrGy9eeMS4z8V9bq2jKjKYv6LbyElbc7m1RKebAUUCyOzR00ZBMDE3g7eUnltX1KddXT3nn0T79lYLpPMJlsmeP8pLziz6qZ3GpKCD9ZoM/NeKRlQcvvbnFOpkQmFLeazx3RFPhHCWnkssXzcUioTPSPIip1xsYxww+MV3zGc8gmTm8FoTOlNB/5jqKhK4syUVCWUWonuzvLe+pNVc1lWNyQ1OvN4yvx+gmlcKrlobDVJ5kFigXWtN7Vsv+5DzYwCc6zsM4lM4AV1eKuifn/PT9tRAfRLfOpciT5qGwA9i2RAeGaK6oq0xk/iFDNJ5IXJEpXYe+dFep2b9xzJODVdZ7C87P+6hS9tNkEqYslSI99QG1CUXY2BBNdaf8jic1+Yn8fBDAQtfw+GiV5OOc5FzWQXZag0q6exCfGbZenXGW9ygrw+wsF7CgDmhcrXGjGlUpmh5ExmEnMcaCP0mDxYzweKO5k2iexsmzbz2z3Qh3mPEhO+LXVajuTNGNfH7xTHWImHIeVXlU8sOHdD/eEZ6C5MhQDzzlhkKVQhZb25jgUie5QYEgGA+F5daG3CoPxaajXFMiq8y1jFcszPeCZftcBQnlsgPDIxb3/Sokl7cExCW68ikpOCwJo84vN3fv0bXFZ67bOFwcIjUiJZtE5PFrwYE7xKPYDOHspMEYtKfCWE06kXgiH4Hy8DOvfcSNnRPqq6V46iio5jFaO5rTjPRMk5zLmEGCT8PmaPnU5WKFqi3mfI6uHNVAMfl8iQ0p8E1fNqMuG66tIYN8s63MO+m4Fwh9f+8UvZDOwsV0vC8fKXTjiMdyz6O0YWNlhsrFCbv1vFGl7vxBzEJh5qqLPRAn29abhU7yLjwaOnRBOcB4irmYx5hSEV8Y4nODWYSDOoye6EwgWZI5IyHqNj3d2fd/6t45RbVhKbY8xSWRQ9d9hc0UVV+8RtSgoV5rOpO49NCEUM7g3bOQVPUnJyPe//Ay1UGPZrMmWsjr6gzsak+TC8yfniriYSCDKQLHTg60uowwFxHJhSIZ+1D4urC5h5GJB1WHUOmo/f5hjBkt13dr06FcQAqskzDixso6b+xyzYOMs8PltRQXLRrQhGDreCIBslHhaXoyvq5XPOUaVEMd5MztIlPd89Y9c44O6Y3mCttodLXkmEQzhcvbzKsl8uaMJzvxpFm95Fu0I/9gVhnPl9EYPpaRxHpvwUv7h/jYM7vREI9VZ8KHgv6jYJ1gPr1fOLNsvvK1BfGFolqzuJ4TWTSyIcczcGXwaIoci13H+AYUa8HgtVLP8aLknijvqVYT6mFMPGuoB4beYQUKvv3rr4CH4bspg4dyuEdzmFYJulbko0L89LLWeT6MUIeKYtNTrupun3Opp1mxxNNlPIrcGyFiT54NSc41B5Mhjx+vs7E6xTzMiObL6JouNaJaHsAqTAa69RWeXxdJ7qRNBSEdf7TGm/euECcN04MBZq6JzzV2vWYyy1gZLmAqqG/r8dMMxKS1yVQgVmtsprvft3uWqR1tNp8JIpq6rzq/NAAzaMgfRSx2XcjgpPNDQsleVQ9gdkkz3xYLDB8Lf9YWkTigl/LvXEiI8FrQ5Hgiz64LJr7RosHGKpDZ24mFvC6bmS50vB1lm7lGl4rs3HVrsdyQUevm6pSNN45Iv3DGYktEOfVAmpVo7rs9ND0QWw+xppH0BG1lr65GwfAzrOvOTiXsqev5nKuXTnlyvoK/SIjPdShEBMUptiRGTDikUhy1HmCy5hSEpttUUmi1HmRuHhFPZMIUzR3RpCI/brp7EE0VHzzZoTrqoRZChdEzQzQxEiME8CyV6JjUs5inwl8r5T1G82C4HDy02rGcrj3x1JKdeMzuHFsZETy09JMyeA6G6UYLRtCON6Pl/vc7XT/WAsqrYKR5HoIenWJRxTTWdLwe+YtBmVQK+qTrsKHtSphmk6lu8SwuNeRPxaOi2mlAL80b5R16Yu0YZKLSEqKl8HDw/9ILdEgn3h4uPow0Qlq9qi06bzofnHY23+Tirq5C8WYKRbElf2ZTz2hl3vGlmiwYkqXSxbWJ4MoKifLjB9vow0Rm9xbMSUw9S4KaKCBnAe6f7mmqoZENItKUQ81iy1OMDM0gwa3k4MWMVJ/FYGT8EwWUpMmeu0/Wo2op8mwmhV498MG9XXhWz05XcLmQQfMj6ah0LYe4aqSrUR7qi5QbK6co47rPo9xpGF4dg/adCZ5wxGSB22Dh35puLtY0NlWMrxkxSgxog4tAZw2DYYG/0+/ysFp1Z9OT+yOoWnhvRtEMYppeRLUSUawZ5puGci3uPnofySFanmeYmXSSoysXEp/TD4qvUXCd/jgjexxLfl3fUt8oJJ9poCiHmiaPsCn8zM073HzxGX/ip36bK/snlHs1zVpDNSI0AkJub4ai9tLaL0cyWuB7AG0k8VzI8YpqqChXTOB3PXeIOc/xF3oU65pqpKhWoMm0xLi0fktGdQ3Bp5CnlkDuXDfOBnCJ6ZArmyqiqazfpi+jgGixHO9oK3l+xZanulZSblqKDWkWmly+l/DvDC4N/x3LaKH1gQKoFzGqVuIrBp9SynTFU+Kp1x3FumJ21JORfiHPtLag8qZDCbxWmNJC5lDas57NSIwNBZWQhVtDWJsoyjXFxS3PYlsaDZuJI3jr7m9TifCxX5pgViv0oO7Uay2nzeQNPgbdbzq1bj0MCQi9ZaC0ZHJCPYhEvdjXNJkhWjhOXs1oenD5S08wsaPY8J05pK7EbwovESVaO1QsvLx2r62GUhDYRPzgypHBpfJMVpckv7AJP99nDpt49q8fUw8dL6wf8x/85D+gagz1Tk21binXED++vsam0qh4FZTFkXz+XQC19/io9bKS13z6SsLnv3aPJK9ZHPcgdiSnujMxvbF1KkaO+yGou++4tn2KHzbyHA7EULkcGRkTJ0tvKBXQZRUI+fFc+DhNj84HzF1bEH2cs7hRwaWCpr98puQL3KChDkIn5UJIe6xpBp5sWAYnbM/0RclS69ThPghyjiPh+JzXRGcLidHJg1lnLk2DjcXXsBrFwS9Litc2hSOZOFwaCpPcUa3C8dmQg/vrTO+OSE+lQNKh0TWBh5U/k4JevKMExW1FNS6WvXF2taHuCQm+yWT91bkiGZX84DvXefDhDr20Ri9017DpRvYrm/ru+QIo1hVure54WiKS0tQ9HVz15WfXK4rB1oz5bjtJCmu4Xk5P0OKonz0V5JF4SeB3AenNb47RCzmLXrv8VIr+dAmWuEhG5XVPvBiV81JkB6DB1kbSL1pNTfN8nUDnBYWX/c7FWjjGP+T6sSNQfr2i6YeA0K2SVzYOmVwIdPkpJKWN7QgVu3IwGBTUQ0kVL0eilEiPImbXmy6tXRy//XOzTclKs04HUzm67kFXvht1qfDwq8ZJ6Kn7l6AJpYSvcZCSnHsxdIwJRpoamynJdjpPwocZUJVY3MZ1aruqvckUdlWciuOZ2DkoB+++cw19HtN/JJL9aB6CbLUnOjdEM0jHvuuCvJZAx7ovBNC6H0YcCSE7Sg6rwWNHPNHsvXBEteop1xTFug4HWwgeDnJ+mwoxtulDs2KDUhGqVc/m6lTQlkIeSAgKvfb+BNKg7td8fL6JazTJWeAkzbWQRrXvEKzWydqUTsiwTjaxJpPN2SaKxY7HjoIxXiKbdZrXzOdpt/jbg1V5cHk7KgroWiQ5cPVAUw8jqoGW978hQcitw7aLFS6F/uYcU0L+MObi4YjsVNZh0wsFce4pNy22J8rAfE3QjHo1ZAqOFE1POlaN589f/g1Oqz4PH2+IdDaW9yrRA4pyRXfuz3la4zeq4PDu8JklDfw9XT5nCjcUrosIGYJZY20p11OyM0+5DuWqp15x1AMp8CWQ24dsujDGfK6Iep4nRdjUvVG41MjzYH2H0tQDFYKEBYGa7kcdQuMjT7Pa0BuW+J4NIzA6BKo1d7WppulLIeVDcLGyjvJq2RXe/bTCG0+xbUUyHgczQi2fs14rSX/6mGhYU607qhVYbGrqntgF6Abmu+GQ9Z4okwild59c4p2PrhCfRqy+E3fvK5kKKjF5sSF/JlllNpfiudx0VEMpCMoVWTNae5GKh1uXjIPprJG8Rhecz7NLM/JXzyk3HU3fo0KQqc0Ih4wcwPmzkuy4plyNukOl2LJU1tCcJ4IahM/JlBKg7GWpMT/P8ZUJKJG8Hpt53KhhsQ3FqmGxoentzFCpw5zEnWO/8oDxUtghxqB/YOND7hebeK/ory0EGY59eA5V5x4uz85SkCNrQO5PkxnJTewF2sCp43LvnDf2nkDsoJLCxBtIV0oWTcxGfy7jVa/Qg5pJmbK6LqkUdV8oHHU/oJehIW4LCR/QctWE0VoVXNuNAutxjcYlnpvXDzF3c6Lpcw7hEbKHZxYfeyY3HOMXPdXIYFOhHawP5XXMdxWDnWnX7MnnAuPPV2SvnTPbU8y3E+wwo1rx1Ku2c+934d7FC088azorEqGfIA34wkqjnUAUUg/+zOe+w/WXDnj9K/eY3myYXXEstr00KJmgXMWWJzsIz1J7/jnwJuxZfUc0EVSt3BBrjTqX+7nSL4Qbd6H58vZDXOpCYa9YbClRwq5XTG5aqr6mHBqKbcfm1gR/ddHlqdYrEdWKNHk2lQak7oFSnuxECRK0sHLWJLrbg2zmuXnlSAragZOg6edMot3AMp+lJDcm1JsNj8cr3YhR12LVYTNFM/DdmQihmEo1p68rVldn0LSolRTZpnDdme8NMuZOTDijnlPj/y7Xj7eA+uz67Prs+uz67Prs+uz67PpX4Pqxj/DUaUK9IhEtrtZ8YeVR5+uDki7AJYo4r7usNhcJcpTGDdVe3ZmkuQiavhOiaQ/MsKbNwWplicoqqipiXsaBcC6d3fMdFEj369vxhqaD+TruSDAZbBUabRCozcUYsu7BcLCQuftAFAbOSDX9wugEV4ssvJs/B3mzhMIKL2j75gnR3pxqNSBkdeBtWEU8FdIpLLkt6YX8vkUDfCTGcdKRiJrHa7i4oVn56hGxsdKVpuH+ZS1K9hwfJcy0m56H1AVVlbS5x2dDqlHglgRCpE2DsVsY8zW5KNNe33gqIZNJULztz7m6fga1iAOE4Pp8pAfCY9GeZtVSrklnXm82xIMKGxPGBPIym3GC2yu6SINO2v6c10prEth9GYIXFCEuQThBwv0SqPcb+/epVh31wKNG1aey/Jq+IEdf/OJdRp8/oRl6sqTm46fbmLmQhXUl6j5TwbceX+M/ffDT/PL3X0NNIvKHEThR6EVzOnm5TX2XZ9jds8xB4vjy9kN8LerL1qfEZuLN0nK8AGwWUQ+0SOBXHM3QYYdW0NoVg83E+6uLTtEqKO+eez7Nc9tBsDRoMoktUVb4NdMXxLOtGmiqvmKxbkgvJGBVRBOKZLXk1uYhUa8RxKUNlE1059/TojZeBd5EY6FuoNL4YK4aa9f9Pd2vxXyy5cwbj1aeVzcO+caNu9j1mnLDMd+REZxniSwU64LGmsiiFpp6mhCdRURTIYdnp8KdVFY+3+TEEE/okFKbeuzQyr4Ty0jv2toZ5SMhdrtKJP02UZ2RXxI1uEiQKK09kbG4XLrq3rAMsvzW7FF4Iy6RljuZWqaXIgZPLNFMc/ydHdDQe6LIT4RDYgokjd5DUcSyPipNMpHxvChxIcoE7XKB/1mWMcPVOebqLHCTBEGXh8jz5Mk60djw1x98jb/10Rf54s5j5k8GRBeBnB6QHZf4jjdiE+E5KbscebXWF/UgjEBX5B7949uv8p23XkSnlmgsZON4JmbDu/0xW/mU164+pRlIyPqttUN2hhNYrajWhAPbZCoQ4+niZVysupB02bfp9mm8oN5+FrH75WfcfbDdhdJ6E/bPwFXK8go3qkkuNNFUU+etB5UXr6++IHGzJ8PuTAM6v6CtwQybtQKAiGbgGe5OaPpO7FvCyG+2o7txEVrhAo9I2XYULvf4+s4J3sDjYpXPrT7j5ZWDzlus3ceUkz02OVMUl+zS26jd0q2Q0Xt7UwavnDHfVdRDh81VxxlsrOa1zz/AvzDn6WIUxmMEBaUg8L4y3fnqDbjVmq3+lF6vlDMjU1R9TdMPyFwUxEM9z2yakZ59OkPQBW5Ze/8en44wBZIB2ugwgpQ/N/2aVy8/oypjdN7w0vrx0oQ1ktF23Rfkvu4LUtyeR+31c3sfY/pN4MIGT8aAmPoo0Ckyhc2WvOQuYPh3uX7PBZRSyiilvq+U+nvh9+tKqX+qlLodfl370d8EcKKSsLmHi5jDaih8lkBYbiHRehEvjeCcQMexsfRGi0DsC69+ryA+l4T69dUphJyg1tiLRtHUEWUZi+nlfHlTdO2CTF5msXRFV1ClOYeqGyHZWoeqGqqN5Zwxnqku6NSlnl5aoXcKql0ZKboEiByxtiFsVj5wm4pxmK6C79VEHuZRWlCdiw+L8jLXTo8NybNYUtIDYbEddZoymGAWDl3JQ1VvSRhlsWYoNzK09RR7lqNnIx4crJMdabJjyE7CPPs5jxxV285QzqZeMonC5tSsNgLDHoccIyV8hGhuO2KwrsUPysw1v/30Kijf5UVVZxmJsahGdTlP7aElH4ZHlYEInMjPtWG0oY17zoTOiw9Tr4GjFBxBvSd+NGYunk7tZ6obhykt0cJhiqDWmQcidgG6alBVE1QYiHlj5kheGvOnP/9d5pecFI2JFJU+9tw53aRqDM1uJf5NFzGmkM/QBLWWctBLaz55uMnu9RP++E9/h8XNirhfUY08xaZnsaFpciUwdAXnh8PA/VNi8phaYiUFtMskf7FJlWwYKUsjTej4SEdfjLE9J2KHEFpb9+WQ7MKUn3fXf/5X/dx2ELgsLg3eUFrGxOlhJOOF4I/lNSTjNvRXRo1NbTgre0IGr1XwdpPPxJQOU4RfS/l/QDdGNFNDdCZk3aIR1/14LMaVrYUHTtYLyvMbH73AejJHjZdeUCCEXxdBfth6eXmcFe+zfLXAbot78cWLInf3Glzg1GQnivM3Gmb7rgvubUUVbWGu8Vx65VDy3GzwTkMMXJUTN3IfC9l1PhGDX9rA7FCsiHI4HN6ZZnw9YXwtFVVr6Tl5TRSO1a6QC+PZ0vDWVF7ub6moJynmPBIvskCmNaWo9ZrzRNa5lbHNi7tHjI/71ItY9qJMxh1tE3vz6iG275iVCX/p9W8SKSd5hEMnRrw9GaN546Uo1ypk0SmqFROaU92NkLocQ+D8Jc2/9eqbIvQ4SmmGFlMIb6WqIr4yesC4yhiXGS71bI6mvHWwz5PxCr40naeTcsJzkmfNd2kILW9VtyT3OgTfVh5dWcyo4mKRkd1PKDcd9VCoAs+fgklkoTAsrlfC41tXVEONmWtGaREIywo1qpZJFsiYPH2ccPfuDoOH0DuypKclLhGT3Na3STz7xMuo2IwlDDzWtLl3nUApAiLP6TynWnV8dL7F33/zDf7mr32d3r2Y3lNNfqjIj0Q5nUyksR3eMaI8bPmAtaiYXeIpiphhVsq9qhW68iRT+beTWcYnZ3KE3z7cAtOavAp9w6417F4+xezOpSGJhfPrvKIo4yBICI1pKJxa4Y6LpDCtB8K9EvFN2LcC/1J5aB72iRaBS1y2PoVylttFxFY2ZX11iisN33t4GVULgdwUingenP/bAHstogYXiuPsUHFvtoELOa8tX7fzBFRyv0wdsiGdQzVB9f1Drv8+CNRfBN5/7vf/PvBN7/1LwDfD73/4pSC9OkWcWBXR9oL3x7sigLAt4ROZ2y5MhzZ4o2hy6QCqMpYQ31gONf8so//6KSgYz7IOhWiJxQDeCdol7qMtWY/OubaL8zDLIM82TLiNtUAp6ZBDoec1qFoWpg2ITqwdn9t/iqq0LLrcoxJHbmqUcc/N28UpvR6EDTW4it9+9zL5o5jsWDb1eOaDm7InPZfDoe0slQtxAKEj1LWgRapV++Xi2t70DKP3DBvfitlYm3ZEu6anui6jQweCO7n40Xh03JJdAS/mgdWKF4QsFg8Z186xoUMW7MB1pnPxLLhh5w2PLkZCtpxJRxTNQ6FbS8HUqi16o0UgsiPhq6mciq3qAyDvlwwe6OWB1gajVnRE2m7ZNctuWeTiEE9Efi2kadClIx57Hs1WSQ4jikXCSd0XL6AgyfWJFH3TeyMmj1eI0gbnFFvXzgJqR0gFF7n4qxvP+PnX3+PffeGfEivLcGOGMYI4Nn3h8tXDdvOHla2pWBYoud95XvFgviaz/qnw4uJF2Gx6Ye3Y5zhLWlFsCiKrUkveL7GZHNKCjgTn/ST6lElsVzxpBVG0XO+BVOkSg49NZzNis0CADjETTU8vET0N3imuDM5QShoGvAq+V897pIXX74IdgzEQGfx2iQ77QN0YsIp65LDFv2QJHDt21yYwjfj1pzdJTzTZiZBrozmohcHF4aCtZE+xVra7/bUL9nbP5PsOLeWqPNN1Lh5d49dq4tUC23MdkZ/g46atGJUeLfq8vHpIGtfC3ctFKWZj+TyrKsIlnihuuLZ3wo21U0kj0AGdigX9bQIPqu6LijGeO2a7Cem5FO7VpiVdKUmOIuGiDKRT1g0S9u1AVbrjrETFp4nmZMEiJKQFrCQF168dSeDuwFOsw2JDo2OHShyvjg6INhZ8Y+8+ma5582iftb0LfOS68PTWEFgFawwXC0Ha1J66b0SZ3AWzymGqnDx3Q1Nw7YVD2V8qTbElopLrm6d8+/waf27/W6xnc3zsKOqIP3DlNj93+WP6m3PqVbknNgsE5Wip8mu5dT4KQegB0TVdg+jZWR9TvLNKuWMZ3Tql2gmfQ7xsTJXymFEFjXBaXYjqiuaKq4MzbALzS47PXXkq5sAJAT33VDcK4mFFsaGY7hmK7QwSx6JMgjmw7I8ugeSCzjNOXp+g6NIkq6BIbsVLij9z5bv8z3/iN/jJr3zI/GrD/JJjtueZX/KUo6UicXbZEU/oLHR0I4afPvEM+wUPn6zL2giRZq3hMl7hf2uV6L0+WytTWe9eeFjZkSI6inn2bJWmjEJBCr40WK+5vHEe1JGC4Ahf1HcImIs9w7wUztamcG7FsJfOuNdFYC7PA8dRjEVbcriuFcmg4rceXePqyhmv3nzC+socn4TpjQlNfr48hyRGJpDYe5rpC5bCxvimJZXTqbPlSwrKeGyFowVo+5xH3e9y/Z4KKKXUZeCPAn/luf/9S8BfC//914B/40d/I1GMzK9JCvbG6pT93gWTk37nqtqiL+34qB1PxVPFtEhxVnUHrcuk6jw/71OvOa5tnHVdYhuX0f5c/1x6NX4pRfVqOb5ysRBa2y9YfsDhRpA9ExNJgYzbqltUdUY7EtOg55roxpRmaNlYn5LrCh8yzlq/q35WdSTxph8QsQYW+wL9t/5WTd/jcjGry87tc/5BsNiSQ6w14PMRnUNwG2tR98TE8PSLjq9uP2Sx31BsCNEY1UL4MvKzeYwzUsi0ad3tGCRaqSgeDkXhMoXs3IXRW7g34WDUDWRPItayBc6K8SMa1tan/OErH4KhU8bYrO3CAzTsZSyqlIw2W3WLDi7PbS6R0jA/6kvB+BwaJ862IfvLtSgiUuCFord9vaYMLrOtEzfysO71L4QUO475J++9JmajVbjHmdhYRHtz4s0FzUlGXUUS1VPJPW/Xr4vhzsUmu+mYv/r4p/mvv/NV5ndGVGUw5nxONNHC3dN7I+pJIt+rVmRxw1Y6xceSWajDmtaVwvbdsthXCptFzLc0qx8qfCQRJcNcnKpt2OTbcanEKBi81vg4wicxPo1l3WcJnSO5ax2L5XU646lencsI2UDTl8Ls7CUhpjZZ8GwxnsLGuCISn56gkOv8n1pPqtY2w/nuORuN5thE7kfjtKg21ytBQ0PWpDeQ9GpeXj1kcHnMyfEwKKJCB5zIoeM1jF+QDbLciHGVFLYHkwFPj0cA9D6JqNZc17w1PU+2VqA/HJCciYVGK3FvR8Om8mzlM37lw1v0EzmhdblUhOkaqkWMjzx5WnP/zg4/eLKLKyTSJoubYLPQ2p3Iv+sd1PQOK8qRYr5lWLkLutCoHwylG28929q4CResBeaa5MyQnopCrB0j6gbU1HTj4uQcfnC4CwgS5UPMkjcKrTxxXvPbh1epzzPeOtnjf//WL3BtdCqImZc1KyM8BHUL+Wc2jLnLFQPeYxNJdVCh2PRa7DqyY89/+fFX+OTxBj51xBcioIinMKsT/uzut/gbz77Gl1YfojLL5zafMWtS3jy5LIaKIa6ntWlwgQ7RjoVdJM9Cm/soizb8kkacTvrYnue1zz3g9OGqyOUd3djfVLCSlWjtQqPsO/WfKeGoGNBs1/jU8f7j3W6fBUFqXrv6lF946QcUry04f9lz+oqEyFmrOrRDW7o9MSpcWPvig9UiRt5IwaNyyxe3nqAb+Pb4Gpfic/LOBjwodsP5hoL0FOKxph7yqWgV1chY+tLKmJ979SOKLSfPVyD320TSMmwi92BvcIGyYkXiNUyvO/JXznn1+lN8YeTve1ClZl7HfHK4HoQwwc4gavfa8BnFnvNpTnqqyA+DUlPJGYqWEZpLHTurE+oBKL0ECUwpn0+9iPkjN37ADw52+fhgk1mZSFbhrPXfk/ekC4kGez65oskU/b0Jf+rSd4RWMBA0tVzT1AOxxPDRkiKBDvujWtpy/G7X7xWB+j8B/95yOQKw471/ChB+3f6d/qFS6t9RSn1HKfUdO5syPhGTv2LLcTru87nBYyBwcpLnuCB5HSSyhHGCdJC9QRmUBUDiWL1xhj4UQlPjA8pUy4cpGW3tgxRUTEHubkqWhyxLZAP93FfbiUdGDp4sfs4QEBlDhdfoMsdGNuPJdITLHHkqHi6r+YJ7sw1xhw0PktcQGYvNpFtOz+QB3Xz1mHR9Qb0SOFDBfyM9MEFGKp0XEAw8g4R1YWkdps2wFo7KSPgp2sLZlxu2b5zwg7NdorEhuRCzNjrUT3VSXyAYV0oILISiKK+ILs2JpmJBUfWlgzClk+JNy0LOT4Tn8dHdS2yuTaTwMXD6dMQ/f/oieiH8H23l0GndizvVpJJRbXouhY3SYvPfbpoAtjDEZ4bpS3UX9mpKcfk1FZ25KDbI8UNe3PMjrw7dDLwJH0nH843RHZrtir2bx6T9inLVdxlnOrWYfs1/+MW/z5+49SZ6vURpx8n9NVElts2klvX05MEG/69vf4Pbv3UNMzbkBxpXiyoxHiuiBZRrYuthCrFEaO+BKiSQNjd1Nx7qMptsWHtdEKl4WrngIJ+tF5jIMkoLvAkIQboM8fVtXFH3JfdAtVYG3Zd0yYRCBwTJtbnvsivnOyqoMoXr0gwcdhpxUeZdsDMszQx9yNjrNqZW5aI1PjKcnQy6bLKqimQkdJIwHC3A+DC6E87MWZXzF279Gjs759S7FYtdKx45q+2pBvWKpVr1sl4LKcgmx33MowyzENfueCyjMED86O4MZATigl9ZocQ7pmi7ds9OPkbHTvIcAx8yngZ0uwS/iCDklG1dOePLVx+iU0vT88zLBNVrnot2kuda9hJN/8BRrYj6KQoIbrlpyY+CiWEoalXkgo8aXXOm7DJ7zxSeaKqXjYaWaKCDiyEfP90GI2uqQ2uMY3cwQZWaJ/c3ubQ25t/d/8dUTUSyVlDt1JSbjsWWFMXVUGNzE4ppgoefNHVthFI88d3mOrkOOysT4oMEMzG4xJOdSEOVxzW3yx1eXXnG3/nkdXytuXuxyVmV88b6E7QWlWBbcLaHtFch3kvzKc+etkhvx3k2NdRVxJe/fps7R5ukR5FkGrYFmZHPeyUtaKqI69cPeePyY5qBpxrJ/WuC5DE+M7jD7LnnXdRsHx9u8uHFDuowJT2TcGW10GjdxpaEJIXEM9vzLDYimlwa9Xiiu3MJJ8WRiRy11zR9x/efXeb/8Nu/wK/+6hv078bkTw39x5rBAzpD1nI9FFHTJeLXmm3qWvHRkx0iJaa/bR5eu185qxj8xDHTWxXvHe6SbM9pep5yU8agk2dDHpytEY9KORs96IVmWqShuOZTtgAdeIHsVeV5RnLhScdt0ac61SQIePLw7lbXiLa8LVMGMOQk5vZ0mz968z1u7R4xPRiI0j3suy3/0MwVUUGXxtEicdPDPhObh2Y3mDaH3Mw2yudT+1O3t/JDrx9ZQCmlfhE49N5/90f93d/p8t7/P7z3X/Xef9Ws9FCxI38sVuq2MVw0PdJhKQ7lPS9mmhHYRncESxcrim3L6mDOjfVT6hUx09KxZTLNeeFLj8DD3U+2BbJfCByuG6lk/fPcCe8xtRQgXis5ZIO8WzyfwvsO8/TWH0fVDaq2VBvCRZFFJF2ZqYDY8cWVR5xM+vjYc3HRAweLOkYrjw9we9tBnU16na9RNJMP/HyaUz/pS/xAJYdOeroMu6xzLcRjLX+29nFNelp/6mB9Ze9AxmwjkduWKxpVaI5/sMnBxTD4QIXQzxAW3KITurKdM69ZKFxlusKmLCOq87SDTKPCEY8b6mEU7B/kYKsHWgKFN+aywCrZSHevnHJyPljy00rJVPLdLJwOwQGY7wlpuTcI+VihKPFaQa2ptyRNvoOq69B1hVHe8jPkU99fN77jTujGd+hiizQkqmH/0hlf2XzIz1y7S7NVU4/kBnurybKa2htS3aCVxzkJlm5/RjsiBsQz7DTG7pV8+ffdZnbVYlLb2V+0kS4u2DPgFF1G00JR1hH3Z+vhDbSmbz6E4/pOOu4jsdhIJp7TNzxJ3NDLKqrAbG2R0s6+oCPahvGrdfJVN8L5C+vea7W09/CgnCJ+mAqiFTbKpufpHbmO0KprRb6xYCObLTeBwEdB8anoFmU9qnquJzMGSiO2Ho2iDiny8SQ48ddqGTQN3D3boK9LfmHvfeKsER+x2HdBurbnyJ9EwXWbIFIAXCCZB95Wfui7GAiMxBaVn19QXaqliA8+ba593wo+Hm/x5z7328TGQm6DzD5s4qWX8NbIsTqQ5+BoMcBV4nHTOI2JA68vEKFdDCefSzl/McFUTiKtXp9QbViaz09l5NXIeK5FUX3YI3UpPMrs+DlOGfI8NH05SAHSM8dZkfPnX/4t3rjyCLVSUexaFjvh81CeX9p+E7Vesnn5nD935Vu8XV7BuZAl6tSy2FqojmTrAqnZZmKr0oZpK+fDPiz8vGrD8m/tfZ/slfOA3AsXMJrDq6Nn/IOnn2doCn52/w4q8kTa8fW1e2wkU4a9QsjFxn8K+YHnmqFQ9Mt45rnMyXA4ZnnFdjqletgXYn0ixV3b1CrvSXTD3vY5D56t897TS6QnwvmMFrCVTVEzcTDfunXM81e5rqgWMR99uMfwjmbw0DN4aommmqaOlpl5YU9yMcy3FfVA/Nza4rx7X8ajtONwMQSv+P2X7/DSlQOSF8fUQzkn2/OxtXDID2WkZ4olD05X4rVnM4+dxPzyR7eIx4IUx/OwBwKuNkTG0l9fMD3tsb9+seRjxZCuL5gd9uFuX/ZaK6NXD6yNZp3x7JJSIYIfb+S9YKUgsTHLoHuF7EXBKDu6MJhKXkuLnpnKE83kWX7v4SU+ma+zEhes7E6CtQadFxlOJhCqCfmL7fQ9Bazi/3n369ipGI2m55CfSIqGKaz4h4VnR3nf3dP/jlfkv3T9XhConwL+uFLqPvBfAn9QKfWfAwdKqUvy4KlLwOGP+kZKwc7WhdjtjzW39g74rdPr9LJK4Png8YICf5506hEXCWlxqzcj0Q12rcH1LFFisccpjdfotYq4V3dkdAl0VJ2/kNyY8DpC6KqLxaHWRVr8aCKNy6IOkeiChOOINt7Cx65TbLRE72gunfFecsbqYE66vsDNYtCwmi1k9uraubZs8L2sEq6Glw4MD/qdIdmhFvOydtzRSOr74IkjP2nk4QjeJ/G0kUiN1mlawcvDg86oz1Qwu6RYf1sTTzWX188p1z3VGpQbgZfS92KamStsT3xlTBlMKMMcGg/rK3Pik0hcs89kE1tsx+IxFKr19mDrP1IkUcNkkXYjwJ3elC9ceYRNZdO0Pd8dxE225LfoWgoH5aSzjI2VUU5Yrd4AsWO4OSN9Gi2DZsOmKu7zHmc0Pgmf63NRJuL91fLGPLaXiDotlvv3nckNDs+GnFZ9Ym2hFNWgLhS+UeRJzX985/fzdx98nnoR4xqNvjzvgj87/ykNr199ws4bB/x7X/3HfHHlEfmlKVqLD1Q9FCNTUTPJQ7vyQSQk6hkdEfnh+Spe+64bUz5wW1pULvhceSPeK/0bF1R1xCiXU9NHwkdoeuLq7tI2TNrgskj4UHmCTyOII3wUxnhxy4WS0bZLDF57qnVR+QjKKfEOTSaO1zb1uMyzGGe89WxP3kuL2rQ5U0E15XVQUlkno/NUXke6HkQiWjK8cFCtWWaLBNWE9ejCWFd5/sr9n2bcZNRjQTXiiQ6RDArfb+SePZcK740Xr6+bU5q+Z/yi60YFdV/hE0d9a86tvQN6a4vA5VgicPLa4OHRGuMmY9HEwh/y4hnV5s6ZSuKjrNMcPVzj8ekItRB/Ge9lZNISmMWZWnVmi9PdiHTiKC5SelsznNNkB1Hnb+MC+qemQraPZ5JLpmtE2QRB1AHpqSjzlJeop6NHq5w2fV4fPcFE4jVmM482jsg4Pip28U5xY/WEf3j8ef7K3Z9iazjDVho9FSJ322C1pr4uRfhUG0tj0NYLytTyeSkHvUcRb06uspIJuTo5C2kSjee06vOn97/DX7/9FR7NV/GFITaWv/PoC/zte29w9HRE8iyW93MhiIuphMfY7lfKi5KxnS6IQ3lAIhLNrc1D/undl0HD9hsHpFem2NRT9wSxt6liFBdU1rC+NiNJxMDThRHrejyDlQa/W3B5eL5sgiIpMm5dORDUcVOQ2cWGOF63/L02vFfGbZKjhxflYrsGo1DsYjxae15flRHeeZ3zh3be50uXHktRPZTxYiuQ8Fqes5U7spZEoCH3PSrk5yVrBRvrUxFwhfXcciPxcP6bO5QfrTBYn3M668k6UqCvzLi2ccbWlTPq3brzsDOVKNyPj4bdaFtXy2a1FSMRe1SvYXLdM72ssYkWY+NUUQ8iXB5hc4e/uviUQpew1+naE00M37h5j9snW/z2J9cYn/SJZpr0XN5fPHdSjz3nJekjIe43mWLv5jFFFYMRpW09FFPYcjWiGkU0fRMCoGU/tXnUjV1/2PUjCyjv/X/gvb/svb8O/Bngl733fxb4u8CfD3/tzwN/50d9L60d11bOKK5XNLnn5uCExxcjZosUVchBFU1VCP9U3cHoFURjw9G8z/FigIqFD5EkDTsvHXP3o12UCpEOjk91KMoGLDmM3jpoMfx5a/zWwXbJcqTjjZEiquVBGU1yGAn5zwgnRAiaMmqa2Jw/tv8u1zdPIXKQWnayCfdO1rtN2MXLxdtkkFz4rpASroaToMywETW9ALMbhSnkz5KJGH+aeYOubDBjk83s9nQbM9NdUjgIV2rrG0/547tvk94cU+zXzC5bfCpy/WooyppyNe4gWFMKRNsaZuZxTX2pEkXdSNQUzqiQ7SQPYTJzxFNx0b34eI0bG6dyn1LH2/f3eevh5VCAKVQtXV3HQfJiwojyrPQKohCLMEgrQfD0ckSgjMhiu0OtHYUY2YxbC36bGlp+gA4uxV3MQBGCbY2SAzwQGZ8sVmiOMr735DL/8J3Pkx1GxBdyIFNp8rimqCOm80xGVB7yrA5oRosohc/Oa768+ZDfuHiB//Sdn2TxWMJCvRZys26AVSmkdQXTq88RjoCmkagjXegur8ssgnJ0rsPYRxCJemiY3HQsPlhFKc9ObyKhu6rdyFrit6h+XGKkwEwMticcKJdF+CyVpkHrzmC2delFwa2XnwQDRjk8orkcFk3fyWgvt2xuj1npFdi+pcnlntR9TZPrpRoQAt8g8CCMFGlXNs7FXDGWPQAF2dZCXNoTccu2mRTWN1ZPeXK4yn/79EX5fo2oMVt+43BtTrkuB4lNFaqWz2t/dCF8yZ6YJlYrotSqh2B6DTvrY97/aJ/5Ub9TpfrYdw2CcoJm/L+/9xVOZz3UQUp2oOk/Do2Pl1ECCibzjK987i7/01tvEm0U1CNLVUaYyNEEknqL6gweefpPHYsdMXnt305YPBmQvdUjOSdknvmOy2YWingiBURLhG4bUBdJcZEFoKTN1svWC/7Jg1f4b+69QVMbzFlEcqFJkoY0bvi7H7+OPk749u3rfO/tF7i5esJXNh6wtjGFrZJmraEcyWdfrmiqUdQpVFtbgM4uxKgO4ah7MqL6Zz94hScfbaFL+X12rCjXFalueHd2mT98/UPee3YJIsfdp5ts5jN+7vLHYDz10HVjO+VC8VS5btIg1I6gumvo7hNA09NULqI+zvnCl+/wJy6/ya2tI5qdisV+w2JHTGeHccG8THh5/ZArq+fYoZPQ8Ajeu7hEPixwi4jv3bvaRbB4I8XrNzbu8eILz7CvTZndrJleDeeMbZF+1e0TNvdLsYuWfx8thOsq3EwpoDZjEf78xkcv8Jff+Wl+87deIX8SkZ5o+k8U+fGykCw25b3mx072yaD607UnPfVU5yl7g/Hy/PPL8S1eRn+9p4pra2dcXPQk9WO7wj7p8fE7lzn9wSZUultzykJTRxKA3iZLVL7jMruglNexxTeBuvAcMN1xMrWCTFznbRLuaSCCt+snOVd8eLrNn7jxJj9z845MO9Ys8x3fqc5t6rvnsz0rUHKGfmXzIT95+T6m12AHlsW2o9hQwfRT3NNtprG57FMuaQPQ+aHX75UD9Ttd/xHwh5VSt4E/HH7/2fXZ9dn12fXZ9dn12fXZ9a/89d+rgPLe/6r3/hfDf5947/817/1L4dfTH/XvjfI8nKySrxQ0WzV3pxtsDmbUx7n4UtQiL5fZ7JLXgpJRx8lFn0mZiMkWkCc1r6wdsn/zGFtrpie9LhOtnWPGk9CCe6mK2/+vwhe6nZs/TyKjM/hrZd4+MrieZNTJ3wMfyJnKQdqrubA5f3r0XaZVSr5aoAMJMIksNLrjYNjMM570JElbt1Au6C9coC4VlCNC5y78p/5DqZTLtUgktQtBUZp+LOMbTUeOvn24FSpwgZLjGfDVC/613Q95c3KFqoxRpe5CjFFt96qWNhJexpKmVN0c2DqNPo8lnsLJZ5ROLNVQd3JiZWGxacTkcaukceLPpRrFjf1j3EG25ClVKpiFLoNmXSLI3ryKqS5LpMluf0xjdcdtAPCFwTzKhE9VBdJueJ1CGvXdR94iUgS+Rmcu5wMi1aIgSvgwibH4xPPV/QfCd2iVk9qjnGI1W3B97Yy1oeQb+sowebASjDwDkTJ447x3+zJ/7wev85v/7ecw9zOG9wxqbvCRFxPH0IG60HW1HjPa+o4juJKVRAshmevGExVBYlurbuwoAaYSuLtyV2T6u9m4G33q1l/md+im2nXfRRdpJFA4iCjaDrBNhp/VCc3QBk6OpLsXn1vIt0g9FJr1fM75tCdGiSFAOVq4wINSgVPlA2Igaj+CiOHZeNgZxSorqGRdReyvX2B6jRCJe47VvOCV4QFKe44froJf3v/2mVRA+soFLmlRJlE7PTof8fGTLVSl6T0QewZTCIcnzSqefLIhoazj1ntIib9Ym17vYTUvoFHMi4R4LMqf9ELUfCKugChpuLx+zknR583zy9STBDPXNJVh2Cvwicf2HDZTS/+cWPbA8U0JSY/Cc1pseXrHjni2NFFUni4QNz9y9A/scm8KY5kmD3YoYbQRxw1/4aV/wc/s36U/LLAjSz2U8YfRDufE08qcxvQuTfnf7P1j3h/vEhlHlNjOW8wbERI0uZJ8PaDc8NQrdCi2jKZkv2h6MLvRSNyVEt5WvbIk+b7QO+KXP3mJxhl+9trHoGG0Mmcrm3Je5+JTFbhCLTLQjoOFj9OSxmWcasKUoA25rfqaRRPzwitP+L9c/9t85+IaR4s+USocNheEEQNT8rNX7vCdR1f48NEO8ZkWbyILB5MhiycD4tMIvzC0UTKtNcyd+SaN09TzJIziA62hUYH/JJ+Z7TnKyzWLbbGlaMetumr5cIKyZ0nNr5++gPLQHy2oJ7LWWsQ6uVj6FEULR/+xoxqpjsbyvELQZor0IOKtj6+IkW+x5Gu2fk1NT+7XvZN1RqO5qN1LQ//mBYPrF7jckT2NOjS6tX2IV6pu3bVCrTabDg8mtpjTmPRM0TsIlJOwP5vKdSjd/acbHQ+sC7VHvlc99Jx8ssbb431Zx3mNaqc/QQzVviapGWQq0go13ju/xHoyw84jCbCeSs5ftPDdedpaqohJtutMln/Y9T8EgfrvfRntqJqIxZnYg16UGS+PDkWOqcMoJoxqVC1uqRCIwjsl26tTKUa8/L1+UvGdp1f4pctvkQ/kFG19jVr7A10jJPJayMsqcFV07bsDtuNkBFgTrcJcOYz2THvAR7J5PPfhgizE1cGcWFk0cDrtsb92AcC8SeinssB0UE/ZRMaZ0VwWbJtr5r3CHmVy4AVCcnoqB30yDkS7hQ8bvuPiRkqxnXabpbKipGmGLhywIUzTK/7G7S/zy2+/SvRRj/59w8odZAFWy3GKaQ/ntnitw+HrRZ0SzZQETF54mlQx2YuWM3SlQh4aZEee/rDgwemaeBJZxetrT0guz7pDoh3FdD5AIAVRqZgcDdAXUqxVNpKir5W0AmYm8H95uQoQNcsCK2ysnxoTwZIIbZfESdUJCFQ3Dnx5cMD+9WNe6R+g1ipx7A0uwVhFohv+t1f+PjdHJ2SbC1QqM56m70IuouQ/+cijCk38SYopoVq3oTgNB1Qla1hp3ylYhg/oVFe6UdjKMKsSIewGXkH3flribCh447kjOTYUm4pLvQv20zMhXqs2KZ2u2GrVjK0STldWeCq1lVyolvunFbpZCit0DY8frQcTPnlN5XaDmwtnzRtPslHw+GJEcZwLF2khayiaWXQlpqZLPlLgpnkfOIae6WmPaKaW8nLjcecJa+kcF1R9qlbkUc0PxrskaSNy5nMTuE/yjOtSMZ1m8txuVcIzBHSlWDwekHycYyaa7NSTnskG6o0niSx6bqjWHc2K7YqxeLpU8QIcnA95/bUHNJXp1ECLTd1xUZSDLKu5OTzm2fkKd4420bPgs2ZEWaoLLftdJGT8ui9+UPmxI1ooLr9yQL1mWby+eM7qxH/KjqMJbtJiZKm7XL1WrNKqUr2G7NQyuz/ipfQZ/4vNf8FGf85wa4rbrNHK01jD5y49xfYd8dUZf/KF7/Mr09e4d7zB0eEK9VEOZwnJuagDszNHPHOQONyoEbftXutK346PgyiihP72jJf3D9AbpYy9Kih2HPEY7i02+akr9/jHt1/lreN9zElMWUd884OX+Rc/uEX8MKX/wNA79GRn4pela999tfeGVnkVmpDOgiOCjWzGX7/1X/JbxR4HiyGTIiVOGkxqhQuWenq6ItcV++sX6MDLbe0x+mkl7uQrlq+8dg+XO+q+7GPFruX+eINPPtwlv5MwuK8ZPnTBdkTLeC6YoPrYM1yfMb/SUK0KXeP55xJkX0hj8c4zC8XLm4f81Odv0/vSCYst8T9qcwHbZzkqPat3G1pVcvveW5FCuWU7Xmu7t3eUFqvgi2MmtxqKh0N+au9e4C85Lo8uMNoRbRQU+3UnelEOlHbUZ2knehI7Ij6VyRgFPyvZp1V3fj9f5KlCE32SScNvl7mqygdFnYPVK+e8++wS//yDW/iHPdITQ+9AbDDSsRU6TXd2tYCIPAensx7/6JNXiU5joqkmO1Jkp45kKmtYOGOhIK99xyv7H8yB+h/72h9eCIepEmfXhY2JVircoMFmQuizuSIZKxjVYfEqtHZ8fes+G/kcncgmPExKmsbwz49vkSU1ptd0yosWieg+pM7GYDknbSvpljPTxbYEKWMrsfaxmPyhECfyFsUIJD1Te3pxzRfyB3y73CdLavKoRhnPRjrjfJ6LEsEjLtvh31dDWeitQaT9cMjgge5k/u33Xmwr0oklntoQnaADMTKQAEPxpxt4ceMYH0txlp0oyjWP+t4K1f0B8UlEckGQPkvR6tJA6M4ULtWd+ZrIseUAbAnN1U5DPYTFtmK2p2gGoetu1RoG0okUJLP7IzaHM0HDeo6/9+HnKeex+DRN5e+bQrgRppSusTU+Nf1aCgknPCJnw8HZSk0VVHuVIEBtAdV2pGrZUQH4KBQBLDueNrqlVesQuALKeqZNyi9dfotr6TEvXjrCD6TjVqFALWxM4UVZaYwQmYmeK2o8XVeX7Mxphp61rx3ySz/xPSYvNfiBbHAubMz9YdHxbM5vBU+wsGZ9YTh+ttIZWLaqrfZnNHlwGI8EPTMFZN84pnIRQ11QNFFAp2QTadPrn+ceuGBB0aGt3guxvA1ZNq1HSniPifj3KLd8rb17sTRBxtPLKr586SHZ5kKCVAfy8+thhEt116y0vLnWxb6LUFKhwbEIl8KBzyyH8yE+dI9mIUq2Ty7W+APXb+O2KlwmpNqmF9SAjVguLO6sMBgtwnMdNuSZFvRPyZpLWmm1V/TSimhvzp/8qd/ic688pNqyVGtSGLRWInVP0Tzp0YsqXBGhrRRuTU9166gtcC/qnD9163v86zffw69XEiJuFRfTTHhSDsRuwXPxRs3Za57Flia5CAreYU2SNqQnwnmznYEkHWrR5CGwNw7Fdzh4lIP0LCTSawm5dqnjP7z9S/zHB3+Qg4shk2dD9EmM84rpIqVyEXi4un7Gbxzf5C+//dNUZYSaRsTnmuRcRC5RAVE4eJSR1IJoJgpc/1xsD0r2G117Zkc9nFekaY3rSZOnKzGTPSl7lM7wtWufcHw+wBvP/LBPPijZ3Bl3aIYgVr4Lsm73b5sGxCExHZouC1gQKlN5/uD6B4x0xj84fQPnFbGxovANIiMXgUPx9vk+X9+4z6WNCxFg5NL0DpKS5lLFcG/C11Y/keI+FRVitLHg+sqJFCJ6WbgBnYlmh/ADk2dDzFzivsQUdLn223+bGstGX6xj3n16iXkjRaWCzuurXW82VUz2DeXQkI5dpypui4B44iF1DC9NOqSo82VzoGqNMU5sChxsJRN8v2FzZ8wHP7jC9L11zAd94tNI0iZCsWQbQ3xhuv2pswoK55GLxX5IWVmrXon4qWt0Q6Gtq2AZ4BAlcvPcngdEU0VRxXx1/wHX9o/FW6wn/F0XQZPpTymthSvYnvPwwvox42dDEafoYMPSGgXHywimtvBysaZ1S/9h14+1gHJe8a9vv8P29gXxqOTr6/e5P1kXj6FeIyTigECZEoF7kRtQjxN20wv2e+ekuegne1HFv/niWzw4X6WsY8m68u0HGW5EePBaKbJ8QzozsLaTU1bImZ3zqF52x6q2Ivlu/3mL+LglalVbw0vxCd+d3WAlK5lUKb1eycu9A4yWLt4riHs1LvbU06TrOuIZoSNT3eJuYcxyReNSTzk0iD8ISHq16dLfxUxR3vfV/ikqcVRrvoPSlQ1d+F5JtSJqoXqgZCy1WtEMJKepXDHdYQ6ibjCVbB7jKoVGSc5aLhtofuCX9gLBDbZcUVy8LLlfWi3N3txJSnI/C52JR9ulSKAtOJQTQ9I4ttRXSjkklMdb3VkVKOdxuWO4Nid/GEsXZZcbT+uqLuaiUiBgQ9ZdtFQotcn2PgooQPCI+u7JVd4aX+GjYpc0alBzyetqUZhFE/N/fPTzfO/JZeZjQTiynZkQmMOmpGt5QP/gjdv80Z/+Ln/phX9GT1dkGwuiXEwUbSYFUFVFlKtiStl6NnUFeqmh0V1H+eniJxi4BpVqsaqZX2n46s5DnFf0dCkIlAeUdOWtegZYFkXPHTqdKq/NFNPhHppQyGj4w6+8Tz0KnffAYqZakNXMQeywXjGtU4pp8qnPpV2HHUHUtS7WYrrYRseY4KTfPrNYxfWrR6IKavez8GtsHLd6z3jt2lP8WkW95rroEB/eQjRXNI0Rr6qo3Ry9ENW1oL9tQeKNkO//0uvf5E+sfps3Ro/pbc3w6xX1mqVck1FgPZDP6rfeeRFqKVzjmSc5b7veMFoArvVOmduEo2og1INK42cRTSWGqrqQz9f1rSTQJ46L1xqqIZy9vwGHKfb2gPzQc/ESTC+ZDnVUVuxafCSNTDpx3TMvSJigFK1CrRxqVvfGGOV582iPYpoQnxlRwzlNUxve++gy0VRz59kWd968jJ3E/NnXf5ve/pRqS1CmaqRoMihXDU1PS66n8oLCpq6TlXd0gIUc8Hpm+PD2HvOjfljrnvyZ5Ji+MjzgN+7dZDub8Puu38cNLPFawUubx6xkBc2K7dRu3ecVEC4Xq66RlPX63KGzZCrwtfwe/7ujL/JotsrTsxWM9pSHPdTTjPgwlr0Jz6ujZ/za4QscjQdyboQ96ul4hVtXn1HXEb9ydAu10ETzQOaPLV8f3SPbn7LYayg2xeG9NTp2oclxRkHkMFNNehL2nrh94Jf7vneK2mku9S7wMZQnOW999wX4zoj0WH5mtAiO4AHxi2diXTPfNEtfQ7V8HqLDmPksW1rGqPbc8Hjlad5ahU96JFdmfDjdAa8oGwP9hnqrlkSM0xbxWr5Od7UIliwy8m8LJdlnPN4pmrWGxQ4stnRYGy1lpDU/Vfiri85zD5bnQt1XVKue5uMhx8WA68NT3HYllhah2WrP9XYS0f4MU0E1cvyhjffJNhbYjZpmII1dNQwNSciCdXGYPAUCemeP8EOuH2sB5b3iVvKMV9cPyPOKS8k54yIliRqa0oTZpDxwLgJvVTd/xiseFuvspmOJZPCKSDl+fuVd3th+KnY2jZFioCc3vclCgeNUd8DhxXDRpkrmnKHKNHOBP1TtZIzhPKqUT0bNy3BjNfGpoeXaeL3Mp5sUKRbJ26md5nTWI4sbChfz8qY4PPjIE8UWl8lpHM3D+K6RjqrctExvNsJ/Cou+WhUJ6vySYrEdY1ONqTzViu6UZa1tg2o8x+UAnVjqyyXTmw3RTDF7qeIXXvsBt64cUFytKNdEcoyW+bSPRUFVjlQwnWs7BR+6PkUvrkkPI5JzTX4kVgbaesk2ihRNJvLqVgly84UDHh2tyRhhorn2yjPK3Xo5b8881YrvuAqimpH/LiYpfiGu0edFjg/mkyaMFckss2nWzb9bVKY103OxCodzWyzo7n7a1HQPFyDeX1bei4skf+237l/nv/74i7xz5zLpkRiPtnwK6zR3jjdYnOao8xjvFSu9oiv85ZvKIX1e5byUH/L3Tr7Af/X+lymf9WjK59ZP5KnmSefZsva+Ih4vR0XRXBGtVLKx2yVi2o4j21GNKaXIXb9yzjc/fpkr+RmJsvSTitbHCAJPKqwTCAit9bTO495oXB6H9e8+hfopK0jjarwQZLjvUI1w6XSFqE6BVzYPeffxHuYkFlO7oLoxlUNXDpsEXlbl0KV9zs5ACqqt9TF1QLakAFN8bvWZNCGp7Qw8vRc+2q8evyz5hZVBlbJfRIswEl2dU40kO7Ed83oDXCqpL4snxPjlRjh74aB5aXjEz/Q+5i8f/hzfPr3GYiprEeWJChVigcDnjt4nUWeR4CKJ2WlHEtHC44EXskP+mw++wK99+BLmNCY5NWLKGQX7hJDJiIfso4yVjyJ6WzOKlwtMBb2nElFTrktERosoaCtjP5sIimYT4dPYgPK50Jw0gzCSsJBMHZNpzp++8h3+zWtvk/Yr6q1aImPKCGcV8WmEmSvUwwyc4mufv8PPD9/hxY1jeptz7JokGdRDT7GuKYeaXr9ke3NMtFlA5rC5SMfFfkZQ9CZXsC0zeDPTRGNDE6Ks0jMoXcTu+ph/dOdVTsuexBdFjrc/2efeB5ckE/Q5xbIcnJ9GnL2SdQUEU9iAvmopsN4r9/gbP/gKdw82+erlh1wfnXafV6uwrL3h68O7HJwPKc4zWuNWgPFpn6NZn+I848OP9ommWigNYygWCS8lz/hjL7zL6t6YctNSrhGMP1vPpvAs9BrsakM9EsS0yZbNn20NbwvD+SznwWSdcs3R257JHhdSKrpGV9GZK/efWfJDudfP+yDaWFFsKKKFIrqThWSFsHaea3DiCeTPFJfXz3nvaBcaxWKecnXvhMHGHHdtwWzfdWee8qC0p9cPlinPjyBb/lKpJMs0lQBsXQfbhgBUtN5LXkkySTtqb+PNQJ7ZZmSpdyrev73Pr779CuokITnTZEeeaOZJpnbJv7LLvT+agVtpuJkc8pNX7tFbKXDDJkRvLYujlkqyLOh8Z7L6w64fOwfqn09fIVKOXlrx7fENnNOSFXQeE00V8VgCDl0CrjKdp066vuB7x1f4cLpDNUtobQn+q9Of4N/c/B69tBYlf27xxgceCsFxNdyooBI3RZifV5aosGjrMIsa1Th02QT40XWmgqpuoG6E1F0oWofbJSEZrFdMXEzjNJMiZTZPaazm9nwb51Xnc+WcQvUsOpfQX2UDGqTghdeeMNidhvDa5YMSTz3RlOA0HaDTJDwk1TK2JCrg3aNdjHHioRP4NV+69QmX0zOejldEctoso0ec059eBe2GYpfkU209W/mUcqtBV/Kgz/YU032NqZZmpy4WmLb3yHA8Fcd5XUsRO0xKBtszXCKblUtEFt6EDkCHmAbVyA3t3Y/F/2SRCQwfYiO09dDo4CRNF+C7NNoMb6Mljj9HDISltLUd4+lKXNzbkNYsamimMYtpSvI4ligKJa+3XcNlKd4O6ZFsdIf3Nrp0dzm8BXn71sc3+L++/XP85r/4HMl7PYZ3DGoeoStpFEDIofFE3ISLjWV3ZwpxqN9YnS6l9EaKIGURy4+W/NiE4E8F6bs9vj68g0VTWxMWUTvWDkTbOjQIzbJwaZFWABonzYOm20jk38GvH9zElwZVK8xUbna5BpSy04zigv3Nc/xuKXyYpB1XyuHejnZk829HXuHQq60YzDrZQM1CQaN482RfshUXQuqOZjJ++cLaYz483ObO4SZqoTuBh/BO4NraGaPr59LND2RtuMRzeeuM1bVQ2aWO+W5ABjSsRzP+k+Of5Zsfvsydh9uok4ToLEIFJ3Lx6vHorKHp+Y4L5mLx4YrKluzvsVZzp9hmdWVONigFYffgUoetdXBZDsh47LqR3OLRkKu7p11wed2H2c2a3lNNeu6Jw+du+05yDjMoNhR1D0HytDy/zsgIrx1RxDOHepjx62cvkupa/FRDx17PYlxpqNeaYMXhWX/tmF/cfIv/5ODneDgeUcwTqHUwD17miq71FqxlMiZVOvi7xa1xogrPO7x+5QnErYGl3P75JU889fzqkxf5hb0fsDpY8P69PXSpKJ70UacJ2BARFA7HFm3vOHztyFQJn0+aJRX4jst94V9cvExTRPz0zTv8qa3f5kb/hPzSFPYKym0xQy1dxD87e42vXH7I6vYkbPDtl2f8/oaMmBYhRqQU415Xa/7G8deZNBkX5z0prsby3PlWXm/lOUrTmls3nkkY+YrrDnJp+uXeqUKzmGQ8eLaOsvDTV+7ylZ+4zdpPPaNcC+kCvSXnV3mY7huiwpOfuA6BlAmF2CYUO83SVNa3e0m74UPLo314sipnVma5un3KJ3e3KT8YoR7mHfLVjeu0Z3ZvhC6XFApUEDqVwh10TqHOYvpPPL1DR9PXnf1K+/NNAQcP16QpLnUXQdNOlOIzw+7uOcPtKWZiyA4Dud8HIYWjs+7piPEB/aTR/K3Tr3I1P6UsZA3LSDGc496zjACS4tuUYY/8/yUOlMLza8cv8K0n1yiqmG89uUY/rVhcZFKYhApb/IWAWnehsi9uHzOvYt58vC9EuMgxrjN+5f5L/MbkRYo6GFpUOpi8tSMf2YxVW/AEBUc8l87bLBoZT0RaDtPSIo7N4c4pJd44YZzn4tYszHcPtSk8Rnnu15tc7p2zmhcY4yiqmJOyzwdHAof62NM0RkIqCyN+NrNApvZwPO0zezIU48ZakIX8SCrl/CTwDbzkSgmp2hFPW95XMLecp/TzkuxJTPYkplr1nBZ9/vqdrzD7eET/k4j0FAaPHarSaO0wffExMhUd8bS9okLu18F8SLxWUm44FrueclN4S9mZlUPcCnk0P5KHe/7RKr1+IXCuhnd+cJXFxyN5GGZewqQXKjjHu8C9Etg/6dXioFy2OPCnuTtqoUlPlUQsNL7jMC2J41IomZCJ9ztd7fxdBX4UAQre618AYGLXPR02k/uiLETKMRrOUantOEBmqjuHZPHzCoXBWYK+3SM7FPPB5MKjQwGgSxlHJFFDG9A6u1mLP0yzLALmVSzPQBbm9ak8/G1YdrRwgecFx49HAPR1ycylLOoYFYw0u2fQt+qoJcyuywbVOMy0RM8DalVWSwFFeE+6gWcnI5KjiHii8ZGXg+eluXiPJY53Ti7J7XSBSO8IjvMI4hn4bi4x2MxIgVbbjg9VPe0TLeR+RTNB4Z7c3qJuDPFYXMrjqYx2b2XPKKYJ1VkmhYyXbh4va2zRxPzRa+9hYsdiV0wjXW6ZVQkXFz2UVcQHMdWqCzwQmLuEf/Txq/h5hLchaaBUErUROInKIUa5t2afGvE0WQjtDWihc5q3zvb5xu49rm+cdigoCvwi6opoNOjIdR3v6EPNJw836W/PaPoQf/kMnTdd0UHIEvORoGFNLkWIRHqETLAQVJte+IDABB+uvuM3332Rv/qDn6Q4zsk+SciOFHoaQaO5+cIBTd+x9coxf+Lqm/z6+CW+9cl1To9WcLMYVQniYhZS9KdjS2wsR7OBIA2R60ZRbTHX8htfGBzxtVv3yPemNCMpDl3mKFcVZydDvnnwMn/88jvs7Z2KIrdnGd08Y3DtgmpNXNirgerQwiYP4xe13B/a+ykoNJ1KTddwd7LBrWvP+Is7/4xfn96SiK1QGBFyAc+bHr/9+CqRtsSREJNVQLp7K4UEIu9OWH3xlGboafqCGpnU8isfvMw/fOfz9N/JGN6D0b1G1Hf1c0HwHprGcHVwxtW9E9xKE8K5ZT+QyYY0nswifKNJTzXfP7pMZmpMO5bXdB59phJz5nJVcXFTlI/tGdHyiJILhRo0ZDcnct6ExrjlCBE76qGoapuHfdZ6C/qjgn5ckT2N0JWi90wxvC8/rx3Pei8u+G1z165v3QjvSldQX6QSlxSQbxsrXKJCeoEJ+7GifzcmmRCMd/kUFSc9URzc3eTF9WNGt04pQyJI3RfRjkuWquZ2zXkje5yqFd+8/Qr/5OkruONUHM+LIORpo9xMW1AtG0tCE/7Drh+R9PI/7mW9ZlolzD9ZwQ0aUPDCiyc8rjY7NKd1HU9PPNMX5FC2OazEBXrkefdsD5VZvFNMqpRRf8E/vPca5SLGRA49Fjv41kytK6BCWO3ztgXeKHxAYFwaocsGIuFjtMaZAD6NUEWNanxHUEVDEhoUUzlqa/j+/Bo/vfIRh+WQaZkwW6QMoiAdC4RYV8mCiQ9jUeEp1SVzL95aY3iqQsyM/L/s3DG7ZGRjNPL3vZKNoR4Ykekrlpb8Tgi2yVgekovXLA8+2CE7MuSljA1NGcwkS82gV1JbwyLPaHotfBPgcCf3cbGp+eTJBkxjlA6b55kmP/DBPC08hWFjW1ypUY1mepGTe6RQqgz9x1IMi7pQU61APBUUUDWCqjSZp57HqFVH9sxQlrEEqradgBdbhHrgqW8WJJOcNvepe2jCvVNNGM/hOrdzZdtuI4y0EkMredW156XBIfOXE24fbzLfSILxI7TE7Wmd8Bde/DX++qOv8SheQzWaZlXy+aK5oskD6talwEuXON9zDO/J2upy1xLP6dMRWU8O3/xhLFyFwlEP5e+MDwZkAVEwYRQjBoKCmLQu1i6C7EnM/GrDh8WerEvtBKnVvouNEXNWja4dPtaAg1oaCKxDKYVLIlQsET1NakTFE0xJRyszxibDWI8dWrK1gmKaEDWKrFditOP+3W3iMyEj67LtvKXYNfMG24s6hCJaWNCSBqC878JBdRk6z4msndlmq/SRr7KJ+Obpq5jTGJxs4soKAtgip/cPNtjKpmR5xbQXyx4TO44PVoiOYhTQe6YoNluisWZuE5pSst+ytGZ2NgrIRvtxKnws4ax+pDrFUYcGBpoAwW18PZ3z609vcH7eF5uKmQJtqFctuhLShq5FlVf3pQBMzyC/lzDzEKce4zT6SSYIWCjQLKAq2SuihXTPkkPXkn7k7xarWvIptRgG9vbHGO2YzVPQkozgo2AymgmX0xvY7M34laNbfPRgFz83RGMjVgeFIjn3NH1FPHPEU4t1mtV8QWQs0yKlIe/sAzrla+357slVXlg5ZntlyqMyxi0yoommWvWM1mbcv7vNt5KSF0bHPLPbDDdnKOWZz2SUFi0EbRDrAGkovJLXb4P5sEsMLe1ANW5p3zB3zOuYv3jzl/nl2av886cvyn24yFCTCFNLhuq0Sbixccq37t+gnibEHhEJVKAjy/nlmjc2DxlXGWf9NSFfF8KBKoqI+DAmmchrNYWo8KJp+Pe1x2pFNU341pNrlGWEKo2g+nHIRQ281mimsY0ivTRFVQknH23wa3qDeKzoHQhvLD33JBP5GTZRDB47ppc1i01N/5nrlGR4keyr44S5h2HVqjNb5ZmcjflXTpjOU8ydvoiglOfjo02KqxUqdthnqQQCn7YCEHCNotxp0HVMk4cGIgAX6cRRD42MhQvIzkX8ZQo5P3UdxvkBgRYENqQuFLLPRQvf5Wzmjw1vrVymN5DoNzcJ1iIBHGktU6Sh8d2IUzUK1ygO3tuG1KO86hTdHT8z/JuWttBmi3ac6N/l+rEiUJ9dn12fXZ9dn12fXZ9dn13/Klw/dhWeUTLGUAuDMp5JnULshIAbuBYAyVRg1baSfDBZ4+bgmChtBCYuDNZpfvHyuxgjFaxzupN4Ps+J8dp3XlMukmrbpoKYNP0IZ55T2MWtdE1GCoJ4OMkGc9J12zAmiBYCobtIUZUR53WPDTNl3iT0kpoosuSm5tJoDE46HF9pfLD2jxaSySNZVWJX38pddeiyWiO6aiDRKU2uiErxrji/aZjuJ9hcbOi9hrxXMT3vBXhTutR4rElPEQQsxA60ar9BWpFEDbbvqIahuwtkx3Y02BL74jNNeqbpHcioTjdif4BrU8YFRibybFw7g0lM0/foEqrLFXMBRgRyDbJdm2rarKzW90PNI/RCRlX1Il5KYwOnAQ3lfo2rjCi2eC59O3S+unIBbQxoT8gIa9dAC/m7POpUeQBH1ZB/e+/X+f1X77B//Zj42gy3FgKblYgFDI61dE6WCyyhgpFg60PVQvB+o6Lcq1FfvuALX73Dxeca7EAk8U0fXOKIT6KAnHiGn/iO4NmSlfXcCMoRuG54uQdtXEkLU9d9hYs833j9Nh/Od5jYLCxjjwqeYMI9avlqwb4gqBNdpPFZjI+NjKr7cmO9UR13yRn43OYzmo2Gas2hEocxjvRBis08SdQIFyYorDqZdTsuDM9UO2oVNWzgRBkhskfzZQisbkJgqfHUk0SQ5KAgmtUJ37pzA7NYcs9MDdmpdOUosGcpv/nui8TGomcGVWuU9qKsXEigsGogPwh8jAbGTUZvWPK/fv1X+IVr7+N3SiFab7hOfFGsC5qs7vakk26WYyIfPTdKAq70zjh7sEb0IAu8IUFxMD5I7+X+6MjR+9IJ5osXTF6QNZI9EA+w5p0RvSeqMxhu7Vl0LSiAKcRaZLElSIwOvERdeepBQAUC/81azR+7/i4//9IHDHemwdBRkEqVWR7e2yKaKj56us2d375K8iCRXLYwUo7HdOHmysp7VcpzuX+OdZqN/lzufbJUQMtzrnh4sMYv/+AV7j/alJghLwhjNFPsjy4YbM945/Zlvvf0CmahGJ/1OLu3hrmXiX3CLAQjB3J4G2sj9g6EX/XS90yrLk4kWjiuDM8pfMzff/Z5xvOMRRWj5hHRtOUzKXJT8/vW72EbjQpRVl4LMjEe57x8/SmH8yGfnKyDEy5Yi45nw5Km54KyE2xLtSjaPSiorOeG2YMV9Ed90iNDeiHrJ54t5fftul7tLagHHp94skMRFJiFPBfxXBCalu+UnjtW7jk5m+ZLv7X2/ifnmvyDrAtbN6XrOI44xSgvSNOGJvc8PFtletajmKaYvMHXWgxAV2UPEtV62HuHtRDlk2X2YWtcbBZenunwrNtsma/arslW4VwPCXYssjbadWsTqAee+bUa/TSjek9oBLoKSvGSbqrQGiR3+3HYZ6JEgp3VqOooDa2foygHXVDht+tGSySXWj7Lv9P1Yy+gXlo9EhKhho3NCY9OVsP8uSUCq47l322+2vPkYJWBKennFW4RoQtNHtW8kT/g5c1DCcZ0YtDYSUZDNhmGLvPHpvKW60Dca0nFOC+hqZGm88NRSuwLGuFo+ACPd+qu8BqbnsZ7RW4q7ldbPLoYERtLZByTJsWET1I1SnxCEGPF1sOik14HR9t2gfkIqr54whTrmrovh41ZSIp0uS4buovE40U5SOMafZRQrXgWu47kzFCvOZq+OAUTxltR4VANVNbIZubCeKTwtAnxoo6S+zYYLag2bSDMSvr4YrO9H1K8NZncj/gwxjolWWNaDpbB6oLqekk9lHFVPZRDT+bXQkZv/y6DWvLyHFJwhvm0aW0MEkd/bUH6MOk+h0+RIRXQZixppEDu1lKAnlsXb5CiJDii/9rjGzysNrBeoZWnnCVS0NVB1uo0//mjr3PndJPZJMOXhiirZcMr5IEWIjHc2D/m57/wLn/qxe+zl48Z7EzFyXkgShYiTz2ycvhXnvm26vxJ2gBelznZlMLbkz9bhiK3YxKXQLVf8/LggMYZpjZFBQsI3W7yLVwd1EvK+o4DpqzvMiHVour4Zq3Dc5uJd171iPo1PnEwjZhPUuHyrFbExnEwHbK5O8YObTAE9Z+SlTc9041YTemkoFLLYGGxw3hu01BQ7dadEKTjEQGcx1Q7oqhps+Ba0z0bgy4Uvfsx8yIhP9TohaLNXOxc7YOzvw3O3pWL+J+9+B1+of8+N9IjNtanDDdmqFFFvWpFgdYXUm5yJoWpM2Jt0PHUwgGulKhi1aiSANiQe2cTD1aI7U0mhZQxjo3+nEFWkl2Z0Ky4kI8G2bE0VPNdRTkK9yHsk/VAhWJcRnumei6JXslz4xL5HKOFpzjKOa973Oo9I40sKgrWB05hEktyLKNo/6BHMpZR+R/58jtUuzXV+rKItJmEsVYrhkRbns5XOL3oM69j+dxbo8YwvrU5pHlN+jCh91EKz1J8KLLTM8/D81X+wJXb9NcXzA77so88E+f21vNKDuhlsHNLlG+pAy0puZWed41R2E/38gv+2sNv8ORsxOIk5/ysT3yuiSeKeCo8pdzUPCtH3Nw7JtpeiDK45a2dJ0yrlE/ubVE9kpFsPJHUhrqKuLZxSrwj/mfVUFSRbeMlry/s7So0inY5Am7J0k2maFLdNbmzMqHpe/ZeOKLYs8wvecp1WTtVX1GuasqRnAvjq1EQC4ALZ5V4vUG5KsrgllvUnpG+tZsAPvloVwq73YLFYQ81M/RHC9TDnPxeQv7EEE9Vt8a8Ah07motExm7Vc8V9sAUyFeDEQmW+rVms6+78FF873z3XxfWSetAW5svPrzWn3bp8jrtUoGsxdJYkD7pmvOUBd4rM1tNpaHlx94h65EgysUtq7RKk4FoW4860r813DeoPu36sHCijPPvZOW61gVKzM5hw/GSESu3SETvMHOu+6tjyPgJfGk7rPr20Yjwx6FqRRg3/n9Mv8frKE75bXAOvMDakhRdiV9D5PIWHyMUqKMfk4G+7R5BuSldCIldVg3JOuCEtCmU98anpkq6r4ALcykivpqd8a/wCF+Meg7SisZoH4zUuZjmtIs6VEm9C6qhH4oNSrMuMer7r8IknmptOelyONPmpo+7J5hBPXfeas+NlxId4Z3mOL/okM9nY0SL7TfbHuEsaO0soqxzViBUCwMUsxzYaMzNkJ5Jy7tVS2aa8PHSxsazsTpguRt1B3HuqiWZLsrU3QpSuB4rx7TXi/TnpqfgBTR6s4FOHN16iawimf3XbDSGmn1aRD0q8yqRwCFdU0CnliKWTbuMRvJb4k3bxey3dTrQIMvmgqpBYFx1IkIpo1nT2FX6m8Tpmctrn//bez1LNE3ytGXwkp3lLJE/jmvsPtqDUqFpD6jDGQyGkdVPKZ2QWBucVu+mY+4sNfv3eTZqjDJ86orkoSLCKeL3ApgOqgWb6SkV8GAcnZWSuP6qAqONhtbwnsXXwHWqrK9i9dMY/evIqP71zl9JFpMbiS91ZIphAkoznriuedG1BKXRV47VGlzWqrNBFQtOLMYWV+zerwae88/5VMB4TnkFXJOhGoozyuOb+o03ivMZM5CCO5p4mkw0VWgK7R1v5PFq5uS4blPUUuw35owg04jFjYHN3zMUkpzk3KBcQJ+3wsedLL9/n3cd7VE2PqGiRRblPbUE+P8kZPRYPJ2sVKre4KBKydyDwt4hsoht+ceUt/s7kDd6cXGYyz7BW4WqNmQffnxKKHd8dgj54bDW5IvlYipgml7X43ukuX7j6iCfTEYd3N4inBpd6zEwaAxfk1E1tuPODPYmFeXmCzy3RIjiXW1jsyCbWzJcdse9ZmlxTjRyDTzTZqTRWKNn1baLIT8V4N57UKK9RjeLvv/t5NreucXI8JPkkxRRiieCVyO1bfki14nn9K/f4udH7/OrqixTTlCoy6CYS49BciveLMuPkfIBrNMcnQwZzSGZLZatI5g2r/QVnrIhQ5ExTDz2LXUtybpjdGfFkY8SffOH7/P30cxzX66Chd2mKtZrqzoBFKSrEaLFME9BWjHJdJHu5KV2HAvvAXdOVo+kbnhUrPHjnkhTNhRIvvTP5PE3pqUaKykX8yv2X+Mkr95iUKad3+10D4hPHkw+3yY90x4tLLoIichxz7aVTVpKC7z5+idKDctJY1v2Wpxq4OanH9x1NkSwjTwjNdKyIa9cVWufHA5JC8Y3teyw2HvNovsrbb96QPbPQlH5p9bPY9cwvCdG7HGmSqevOvabnqdct1RasvhMJHyvRy6JDe4a3pblZf/WcB6c7MKrZHk55VI0AhFfbtMKQgHxpR3QYdfZDLbqsaylulZcILFFvK0yXpOCDH6DsQ8rB+uaEQqediMmbEELthQR/dLjC9cvHPFDr1E8z8qPQtKTgT4MIKhROzigxTbYGIserK884fqnP+SRHOSVAy3ONWjuZEs6UNI0RS8Dld7t+vCo85flgskOUNehhzVkhDt2bm5MO6WhVPy4CMzECc2sYbk959/QSvbjuJO+N0/zy7Vti+lUadCLdfHLuyc8cySS4ZAfYf+lkvdyEzLxZeodYjy4b9LxCLSpUUYkHVGNRRYWuLMmFIA2doiCMu7xXjMyMN4/28U5xNs8pi4TJIqWcx2JPH9SAqtDikxMOXZfK91u/dYpeqyjXwkgmVnJIAr0jS3ZuMYXFRYp6xTC6b+kfOEGkFoIqmUcZ1aojmiqyA1H6/U+ufsAfuvYhcdpQrYqKbr5lQENTG+pCNsR4Lunm8dx18n+Qw+H08Srjs17nYZVcaJIL39lCqGYZnVCuO0yhqGeJ+LZYGN7XjH4QEY8V6YUlmgtkayqPLm1no29TT7FIcEnYyJUgCbr2mIXtIN/yoCdKj2nYUEt5GKV4UgG1CnlGtZNiqWiIZw1m4TCV/D9VNahFja6doBfaUz/pk91Oye/HsmHPZNTjYk8WS/GvGkV6JvmGxUmOqluJu8MUFlPC/bvb/M3bX+Kf//ZrmPf7YmNQhE27CGT4WUI9DKOWcSTIU+jeXQyDfkHdD4aPseoM3jo3YR+KwUKiOI7f3mYtmnNe55TWYKYmqMiC5UPl0aXr4Hu8F9uCokZZi1rI6akWFbp28lVaUech5OnsUUxyprtRZZOBMY5hUmJSC3f7pMd6OYZrjftAvl/VrheHWYgCUBfNc8rXAP/XUryPZxnDwaJDF5WTvUQPa5zX7K6P8UFt2KQy0sGLwgsN0YUhndhuj9ncGlNeqYIdh9x7F9ZMrBz/bPoa/9nt38ebB/sURznNSY6aRGRHmv5TT3Yib6ZaCWO7MBaXaCK6Z9Z7hXWag/mQi1kuLupFizxII+dD3JKrNfkTQ/5MUT4akK2UXRGw2PWUe7UoqdrxiPOYXtOZCIo4QGFzvTQxNHLA6CA0qXsKVmtYGE5vr8NFTH4A+aGXcSDgdwu5j6sNw9dO+amNj/kvnv4+UUofJkTnhvQU4qnQLOK543yaYyLL6upM3OKDSlmaQB9Uzp43Np5QrVvxRspEMICBYlte73dvX2fuEn5i+wHeeAb7Y4Z5IaLoODQLVuwYoufGUFHhuy8IKHUYC7d+SCi4f7Ee7EYU/soCFSJlWuNEPJxWPZra8JuPrnN0MpQR4zwg4IOG/FmgFvSlOY0XYiuhF5qHszW08p2auDXFdVlAB0tpflXsuH75GH+loF7xlKvyTDe56iwwuvGbVSTnirfP9lmJFiSmWSZF1IEMPpV9Lz0LlhbBi8+1WXCInQCpZW3vIgh5eE6yH5qYkATx7HxItFHQHxY8OlmlulyxuFozvSpu/HVf6CQ+EkTeZkGZGtMJmsTZW/YrU0F+5OkdObJTJwpB36LT0uDqChFaLJZJGe1UwCt5tgbvpzw+XiWKLXa9phoGW5vwTJhS7p0plukYykF0lPDexSVeWT+gnqQyHrTBvT4oOFvEriWTKyuClx+lwvuxk8jvnG7SzEWhc3Ayor+24JX1AwnTTMWaXYqm5WjCG89rWwc8PRmxqOPuVZ8VOW4e8a2Pb6AXWmbqUyk64qmVDkgBnTuvWvre2HYztyjr0O0mDqh5gWosWCmcaCzUTTdiACkUTOW7IFjJRtVcTCTnb3rWw1lFP6vY3hqLygU6x/HkXJEdyaYXzQLU6sFOozD6kIc2vfBUA9VtSKaWsUeTauKpJTuuiOcNyUWNrsUpNtmfheICip2GVDfcnW5SnuSiiKtVt/CUFj6ZrlXnjeR1kKSGUVe56YjODYP3U9LDiMEnmuEn4ju12DBSMFRyrxfbimh/jr1eYM4j8eIKxYCuQ6afC3Nr48URPshpdS3jFQ7TcNCETdPTuVoD4BT9B0Y8caatmkRGkq17rASNBt+jYIyqywZdiuovmlv5/20WGwLF68Si6qU0ve2iAFweCu1agwlqkkaRP4pkvGiXiJqykBxF8NYKg/uG9Fy4bu2mB4CG7GGCqcRFeOVOGCdMLG2oaG0NTd9R90NURxiJoJfFUOt5cny4QjxVXNicaZ0yWYg9iFnIhhTPHVEhm7gubWcO+y8rTXxkwDnMvAr+UxqXikIQR1eM2b7kBM6vNijl6UUVW2sTMWXdcN3h1Ma3AESFlTXcdqKNQxd1VzylR2ZpiFeF8cizHrFxHfqDk+Jkd/OC28ebWC+HYmtVosI4Wg9rbBLQhljWPVbx1e2HfOGFhzSrDfrKjGpVkBdnwKH4z+9+jelhn9k0E+uEC0Gf4onYdiQzh7KK5qX5sjBs5L60Stn2M742OuXpR1s0dwZyGC/kfdlM9iCfOJpM4lDa/WHwiaaYpCy2vVh1vDoVjlKzpA3o4JcWzSWaZn7JM70ikRY2kfcLUKwrQWUQpHy4smDt8gWub4Wrlsr3a8fZL18+oB45rlw/5icv3eet8RXe+vCqqMvOgpT9sSWeerLThngicSiX1sYY7YWOwXPPKu1noliNF7z02mOal+Y0fY/NPPGZplxzqEsFJmv4ux+/zu3xFmahKRYJzz7ZwN0dkB5r+geSXWYKabhErWbDugq0BBcMEFvKYygKdO2prRZ7lOtT/ldf+FW+dO0hxbZjsSPB6y6Bool5YfeI6v4A8zgL/1a+8l6J8rC4ZFHXZ914SfZmxUdPdnjryT7pqSI7kczQ7nUEPo+gyJ7rw1Nu7B7jh01XeKOleTKLkMHYKHQuBdNHt/f4m+9/mW+/+wLZoVgbpGeewRNLOnFEpWfwyDL8RM7N59En5QU9ojBMprkg0iH7TTXS9KGX/lruzoAb2yfUtaF+1pOmyEOz2lCu+y5n1hsp/LmywEV0FhNy1gZkvPsZz4EOcTh7m+fy+iwkd/KucBKQQpoRm0OxZWlyUB/38Hf66Img1NGcrnDWQaGsw1nkjJxzyZniww/3eTBZR88kzB3akWqrnA2FU+O6/bB18P9h1++pgFJKrSql/mul1AdKqfeVUt9QSq0rpf6pUup2+HXtR30f5yXTThUGdZhiC8PPXrlDYWPsWk09dJTrsmm4WNCIFo5dT+Yo5Tm6GGAzuUFlHRGvlOjDVEz3TtIQ2dDGT4Qg1ZAA3hY7Lg6xG87jYyPSVwCjsL0EHxl8luCTWMjj4csOEuqBQH/OyCHb5tHh4eNiB+c0fhphTiO08dxcPeH1jached3jUuG/VKvy3qqRIBx1TzF+b4PeJ7GM0iaeeNaga4GWXSg0bGog2M3bRC8/QS1wozMw7BXda8u2FvxX73+ZD3/tBoO7EemJovfEM3hsMZWin1eoSnccgvbhUA6iUgxHXd/SbNe4CNJzITtWQ8V8WzHb1zR9IWLbFIp1j/fw4qXDQG6Wez59teTiJen2qxUjKfETRZNrGZO6ljgsPzs7XS5cMd4TB9suwmIqhbY4/Cqavgn8IbrCz6UmGOupIMNX2Czq3LhtL8LHBttP5J5qxebqlNZ8so2naefqKrMUdYTvN/jYBePQdmTcwtWhm1HCbWtNQLt4EbvkHKjAq+o/8vSObAjpDB9nI1Dy7EQY/O3YIlrYAE0vO1qUdJ3J45gm97x3cYmD+ZAqWGa0B0A8dejS0ZpXeh1iXIzC9VLQGjfqQxLjeymtP1qTG5pRKuOdVyfyfCbgc0t2dcIf+erbaOUpQlbXys1z3HYln0287ILbAl3y3+QzcWmEapxEyBgjwclh4xerDcgPNJNF2uU4autRyvPK6iGLScbB6UpAdFS36erak6QNixfE0HO6Z+RjLTXr8YxeVKH7DU0ZUW5aeY0aImU5Ox5iJgY3jwK3TRHPloW+KT3RVPaytkDsPN1C3mA8k6JlI52JVUDgYTbhsPRhH1Cxww4deb/sDh+z8PQ+TjBVcBu3iuSpNI6t/5duPDa42nvTmlO2nDX5Pi0K0jaj8cIzm6d8fuspX//cHXxuKTZlf0GBNo55neAjTx7VvH26x298/2WyxzHZkfhvSfhyLZ/NvEE3jpW+5C4eH65A5LqYD/nQ5f7g4R998ipp1LC1NkHvFLhECMbRQgxj/8xr38Vaxe3391FW4Z7kZM8ioql4T8VTQY5dqrtRjTh4q86eBO+XQcamRaNlf87ihpUXzvmPvvS3uRKfcq13it6f0+yXlFuBo4bi2uAU23OCxPWlSdZWxsazWxWjaxf084q6v5wU+Eg+j+phn+zYy9eFk39bLSceANp43jrc49l4KCH3VWjGHKGho2syV1dnUjiUmvTtHisfiCVAPJXnI55YEQzUnrqn6R068gO5B2bhugBjFwsSm7zTI56GEVUhCLPXCh07qlWZNuQHQo+pjnr43GLu5gw/jMkfxsQzJRMKTxeNlOVVt87az721bYgXoVh5LqWhNf9txT4uksibeEyHErcjzGjmqEYOnzv0GxfY3JOeKfoPtKQ2+GXElbjv++7zXtIGBDl/8MEOrmfls8paDyxFa2vUej+1orIWlfph1+8Vgfo/A//Ie/8K8AXgfeDfB77pvX8J+Gb4/Q+9lIIkkjegakUyqPija29y/2KdZFjh+xbXc7hUyNQ+dcJTSDzvn+9wZeuMapJAUK7kSc0rlw6xm5WQKFsOhJIbqpyYzLXVdYustInlXrVIVxhlECrOMOJTTYuPy583eUTTX6bbOyOEUJuAKw0fTHZkJjw2JBcapTzb6YRJk5KsFfJakK7TbZfM92Q+7FLhYuRHgka1pmt4Ud/pCiZ7EcVGzGIjCgepfK+6F2FjTTUUhKBc9xw9G1GuemaXLeU8Jnq3T++xfG8Xh1HTPFjfaweR+MHUIQxV1E8yFjOFQ88Mm9tj6jemzC47zl/xXLzWUGxJ9d70dAj39MQThXvY52AyxG9U+ECSxyvcwFKvCPnR5tI9myoESgZuTzRR2I2aauU5roeWxdOqTXzsmF3xnL9Rh4ibACtHIZASQoek6KICtKjNvFEQyOVei1eMN+KL5GL4fTv3ia/NKLYcxa6lWgnjswRMIt32H/z8B+xdPaHYtdi+7ca4rZGca8fQ2yX1imexJZEh5boSwnMgk5q5orxS0fQUxZph/PmKas1TjcRgMpl6VDCGjaetD1TrPB6iB4IzeZNLl9/sl3z0bIuzaY8oktxCHzqtrutrOz+19Okh0qiqCSagCh8bfCz3pSNmGthfu6C8WrLYceAUP7H/gGvZCVp5ToseZ89WWJQxTGI6EqYSrzDVOhXrJSLlo/CztEY5R1Q8t4kG9V48hfk4C125FFVFE3FQDGES4Y7Tzlw0ni8723KWsLI2p9mqqFcC+lsq3r7Y5zsPruKmMem9DOXVshMORoV24NB5gwvFjxSTqmuWdKMwHww6RKhY9118iSBgUuQdLFZ4/XMPWHlZnMXLdS9eYUWIF3IKnzi2V6Ys9i2zK45iSw6i/EAKN/PBgN6zcNC2a8ABhaFchXrNkR1pek/lUIwKJ/EpmXArq4E8G/Hc4Z9lvH+yK07Tz6kadQlxbHnw4Q7xheH2420Ovr1L/54RknNAPpeKQ/8plOngYBU1Faf9rqhpUUYnqszqzTXe/60bHNzdRBuH7QlfKX+qOHi6yo30iH/7td8ivzTFblfYoaW8WVDcKpjvema7hrpvgtJOdc95hyKEq8tSdC0iKJ/RVj7jf3nrv+VKdMrfO/0CH052SBJLnDbdWGclLnjv9BI710+J92ddBIuuPeenfdY2JyzKmPPDYXjowy+pp7+66JSRBCTcazr+DwS0SjvOP1mlfF/UZK03n/J0+1hrgpnFDYs9i94saXJEeNCjEyq0MTWm9sz2FaevyLpabGjxNiukwJjvyDoW78Elb7cl3RvjxOX89TnVCO6drmMmmsHWTILdrWSf9p56krE0cS4Ww+Hp0wEgZ1b7Pk3pMIV8Pa+Kt4kKMVZBgdq4bm9vkzm6Pd8LYu1yjzmP6KU1+Qtjpi/I2eO1ZMXWgzC279AvQV3NIpi1JlIEp8eGZFR2IeUqiBDaLDwJAV9OJFpO7Q+7fmQBpZRaAX4W+KsA3vvKe38O/BLw18Jf+2vAv/Ejfxqwkc9Be1zmubQ2ZqgL5mVCFFlU3K5GUbnImwA1qvjk8SZf3XgAVZhVNjCIK3pRxTdu3cUOnIwUUj6lfLA9B7XuMOUuXDhUuMqFWWfoXvS0Es5TWePnC6hq+SpKgfc2WgeucCi3B3FheDwdESfCS4hmoJRn0mR8/9FlkiTMrmvdfdA298RjKDZkA7ApHc9DMuIU1Yq4kDcDKNaWG312KhyVVnYpDuIe9gry+wn1msP3LNGTlPRCDtBqlU65Y4PS42LS+9TnY8p2hhzm4pUjvtCcjXuMBgV6bwGXSkgcOsy2l/w16B1IVzl7fw0UJOeygfRuJ2SP444nZRaKco3OLbyzMHAQZ40QysPnuByTeHAelTqilybEw0o69HT5wLkkoEdKDlsppH1XSLWcN4lxEWGArpe772Y85Q9cv83Vzz/l6q0Dih1RXnntUdphneYnRx/z2toB2c4MMkexLfmLknsl831TwuevP8HtF2x97YA/9OX3mN5sMFuFEDoHsgjylYLZvme6r4jyBps7GTdZmef7WEZX2oryrs3xa40DWzsOU0C50/Bn3vgO9SIOz67In20im0FrR0DbZChkhF3LF1WNnpfCGysasTSoLaa04tivPPcONljbmOIzS3QasZ1O+IdPPyfPllfi0P6gT3poRFHpwziuXh6mPsiEVePBeuwgFcGGFoTRRe2IStZEk4O+iLsRQzz3zMqE9+7tkT0zpCdGEIoZqruDAwAAbYNJREFUJOMlny56kjA+65H0auI2lcMpfvB4F+71iE8N2TFkz0wY/YoKLxlU/JGvvM3rV59QrwsSUY9EfVf3RN7sEi+dfioCjmajYXbFdaMAF0mI8YOLVa73T9geTGV0oGQUrIuQzVkJ+np9eMovfv17/MxPvUd5a8Fiv2G2L+sunsg+ON8VpaZLVPdc1Csen1lBW1JFudIKUKRQb/kxLtbUPY2PPefvbfDt924KqnEm4+NoIQhL/sQQXyjZN05FyTzfs5Trsr6bVFH3hYBvU4NXiotJjjIOtV6JK79ajrakiA6NWQmjj2DtbY19IFFP1ZooVpNnMX/z6Vf4cu8+f+rF75P2K67fOOTG3jEb61OqTUu5FjhdVdvESgNhCt89495oyRJtx2uu2/r52Y3b/FR+h7998RU+ONvmcDagWCRUs4T4QmMWkOqGx3c3xf4jPFemkuLAnMacP1zFfzQgeyzhw9mFkzSA1HFr8xCbeeq+qCVbpbcpA8oRDmhrdeDDBQPY5yJD6oE0MDIWhNNJH99vuLV3wNpPHDD4xhHFtqPcFFFENRLid5OGLNK9hulVRdNTywZRK9x+gR01zK5KdIxYyUgzqby8pp/cvsfu+pjyRsn0qE+zYsXI9IWK8asN02tQrqmOL+ZiT5ZXxGNBd6NiGanVJmW0I/VyTTG+Zri4oYMCMKgEExFpoaHY+nQYtildx1dOTxWnt9eJjWW4O8FdW3RZgjZr/77vUiB05QLwESYJewUulabGzIQqIfmYqju7CIiYalMqPF2B/LtdvxcE6iZwBPxnSqnvK6X+ilKqD+x4758ChF+3fw/f67Prs+uz67Prs+uz67Prs+v/76/fSwEVAV8G/u/e+y8BM34P47r2Ukr9O0qp7yilvlOdL/jF7bdRqXT8P7/7Pt+cfA6lvJhgBsMytHCDdKFpepD1KtR5zKXkHF2GytXA/7e9P4+1LMvOO7Hf3vvMd3zzEPFizMzIzJoya2AVRRZFiVSLlGRR3YJsyY024RbQMNBttGAbaDUEGO3/3N1wAzZkWFajZXU3qJbkJqmhZZmkqIESWVNmVY4RGfP0Xrx5uPOZt/9Y594XmZWZVSGrIsiK8wGBuO+O5+57zt5rr/Wt7wudjO/cOU/LjdHNjNZGnyKsdkqhJu04olejwEbCMcmDStTLlyjYOrIbLiKHInTBKGzoYwMP5TigtUSpxoj3nJfP0o3TdmmTSVQ7mASc7fZIFwoKHzw/5/3DVcpCMxoEQpJ2SyjB7Pizjo50QfSSxhs5w/OnhM8i0IzOShTtH1r8E4t/nFMEGjPOq0xHlSGIpauq1ZwIRyIR3oTbl0xP7yXL+EKG16ci7EvkXYxcVCI7IW9YZZ+qTJCkwKU0512N6L27QH4UYO4FtN73aGxVO7OJ6GQVUcVNW8mFK7nnCV9kbPF60HgkpG+rRN8mWSyZzMvv6cSyk89alqzny1hVpGXR7ZHSU+lpHD/nwsIR+X4wk/2fZqmmAn7T2nYRSk1vyvnRWTHrXMNarG8oXUMROFgFv717hZM0pOPFdP0JZqIrEU9FkRnixOUf7X+OrXGHdhSj3YLwUr8Se1NkjVMewJ9Yepe/+Pnf4y+c+w4rfh+nnc74AlkL8k5JstnEOtJt5nwQYUYab1RWNh+go3ymdyLEck0eaeJ5yUChhD/n9eCVK5u81rhPe25MpzEhywxTA23hpxnStiFrOhSRK/wnhZTQjBLek1bgaPQ4fqy8dmr66V2LRD/nWITsrvVXefjeKt1owmI44vLZfYpuTryazzJI07Zi4ehpCt+IqXCVAc6aDtZzKCOXweWCrCWflQdSyhi/klQ8n6rBREFRaMLbPv4xeL2qG3NkZ9lFEI5d53s+eWYID063knYvqGQdpAs22hYyddYpKazif/nyG/xc5yqvtrfR3VQy2ysJRSCcvixUZJ2yEv+UEinKotbjGe+m8BVFblhtDfiN269w/c4azonB7Qtfxroy1+lY5DA8nbMRHHEhPOSrl+7hzsdkqxnJUkHWgMmKZf7Le0wuZKRNTR4oTCeTjFdsyF4d038lJ54Tz7tpBn68WvkeOtC/oPFXx5S+xd91UIWaeeihqozllEeopVSUfm7Mn/3pb5Os5EyWLKN1xWjNkLQ08YIjJtGF5tL6Ad3OiNKzM3HbWba0yo6kc3ZmnN58qAgfOeTtgtEZS9a0XL+7xl9/9DPMOyPigU+cO2wddTjY7OIdGtyhlK7dUSU94lQZ37ycWTFZI+V2q4RQLz6Tchzr7jG/MfwMf+/O5zk8aXLSj1CbAd4jF28gpaXduAVBycE7y+R3mzjjigxuFGVg6VwzImo6ONXNk9K05T9Y/x0uf3aL4bmSwaXKpNqpeGyuaKCVnggp5+2CIrKzzGERqEoK4zH+GJDsRLjbHr0k4PXFLb68/JAyLMmb0lgSdyVLGM+LuLHTM1LZGNhZVqV0oIwd3FbK4isHZM0pH0/OldIobKH4Jw+vsHPU5qev3ARtCZfHDLfaRHMT3G5MdiZldD4n6RrhrnoW1xRk3UKuSXfKPavmJk5lAMbrJYOXcibnstljpaPImtOSr+XSF7bIWqoSBZaMXRFq9FjI/80HmsG1eQaPWthCpAh0pRllMoszFl6h+ERqisCgC8jmCv7MK2/jf+aEdOxResJtyyOZOwtXMvp5qCsBTREZ1nn54WaIj8EPowO1CWxaa79V/f0/IgHUrlJqzVq7rZRaA/Y+7sXW2r8O/HWAxktrdsM7ZGFhyEHZomVi/uGDLzMZ+1X3mcY7MpgJ2PUCb0cOLx57qKWEg6wlE0ZFyktLg34U8NvmCmVsiBZS0qoEYCUTiso0ej7BmJLC98gjRdln9gNBtdjObmtwgcKiPFfSgEUJRuruyWFIA2Yp19KxUMqJU5aKL80/4HZricm6oqlLxonHK2d2ePf6hlwYmlmXlzu0jM6CnmiKQPGzr1/jwXCO7cOzYBXqNuTtknjezMQv88iQRQoz8ehdcAkPS8K9FJ1bslAzGAXolsU/ED+84YYlfPmEC50eNx6tUPguhQ9J21QCZJI+nXa96cJKeS895Q5l8wXKGoJ9hdoTvShVlGRNxWhV4/VFHM16JcMNTXt1gHe2oPfuAsmcIjiyjDaEH2ViKidx0LGq0vJmVrIpQtEY8o/lb5VL2WFW9wQct2CY+ri9SuOo4oMVnixOpaNmC60urfisTQmFvlPpRpVMlbjRIigHcH9rkXuTFVQoIoPNban/p22whabI4XvvXwTAX5hgS0VRSDkUKl6dI+T2u8kSi+6A7/bP89b+OnliGGUBTiklaOsX+HseXh+iXZkQJ4unZGWroNsZkZpwZpyqclst0FWQm0vaWheWz3Uf8Xd3v8KVxT2Ok4ieE1ImemZaK7wjO6v5A1ijsZUeivUq/tNMgd+eKpZrCTCdMYQfBDhjGG2UvHvnDEFf03BTcquZZC7nzx1wOIrI9rqzrsipYJ8uqt+lEu6ksDjjQpTQtcJZjGEYkQdyvO7Y0uhMyBuG7LgpXaGpwjElma2EZ6s2Zqdqx57ytawDzshSDtzq+1c+XbmUIGZ/l5UAb0sW5hW3x/909AVGuUfRczGpwmrpDJo6wLtLE0b9RvWbg+k5FBMzG2edW2wJn+s+4sYb5/FS2dToSqm5aBaAlkYCbbk3WOD3ti6Spg5f2XgAgBoZVK5I50u8MyPSXALiopJpaDUnpP2QSQPppnWk9J4HCm9UcV/WSqIdOd/ipZIz7SH+/Am3N5eg75K2RVoBRBZmspHh7Tvk8xn2bM6fuvIec85YBGBDLbIVoUK5YFIRwW00R2SFoddvYOdT7I50r9kpP6mEvFUwd7bHSdDFPzAzNXN/z5DOl3TPnTAXTfje9QvcWlrEHDnspAt4B4ZopPBPLOGBSKxMVadlMqjK9FNuVlrMOtpKZ3oeSOnvVrLC33zva9i9AB1LGTY6rIznUzlvDicRl87tsbl1pvoOzIQndTcFG85EicPt6SZGQwqxdfnTq+/wf91ZpEg1WeKhcsgWc4qHLmlTV1IpOS++9Ihb20uU20F1/EiH4EREkacBMFa4cI/uLNIbh5Slwj0xoppeuUHE83rGbe1eF1kEJ55uJKSkGN1xGV+CojWZdQLroqJfOKLUnbwxj59D+8WEueUBSeZArhjvNTBDA2EplePquGxQYLRlbr1H/HCBrFUFsfp0vhHagRDpZY5VxPOacJ9T7SVk7FtuzLYvY21iVZX6paM1Xc/Imw7+vqax5cgmsOK6lcbOfnOqJa1wFNoKsRy/YNXv8e9efoO/9q/+CCympJkG3MpHUsajdE4pGM4or36XT6/h/cAAylq7o5R6qJS6Yq29DvwccLX698vA/7n6/+//oPcCeGN0iTQ3uEHOt3oX2XkwD0Cw7aJT8I9kMMapaO04E+DA57Uv3+KNo3OUQYnbkwW3nwSUrkVvhjgWdvQcjYrwOCVbmrEhPJtglGXsifXIdALSaYmaGRlWLeoVF0NnmbR4P0Ykz0NDsOMIVyoDQmaLe+FbFqKYNa9HmSv0fMp4FHB26ZjPd7Z4z1+XTjArP3o2V5D0HdL5nMY9h8KDV5uPGGQ+9xakk6XxSGGGirQjhrxJRybopKPwhobei5B2De7Ime0A1GZI1i1oPjDSbXhxxOeXt4kLhzLTZG0xayymKu3TuKS6YKcke51VKr65RYU57isjBjsNmncc0rYoL6cdyQxFu0Y6HpxSLD4yhy+sbPEvl9tY46IKBReHTGIX/65kl+KlkmhLAo+pdo1wchR5p8AakbqwbomKZcftenrGIXi4tYBfcY2mSt5ijSGt+VO5fpVL56KJhSha+DKGqhDyosoKcKvfRYPNdCVLIGPq9SuV6yrAKjONvy+8g/K4iZ0vyHZ9ook81xue8sd+9errhFHC5E4bZ6jxtCWbK2dtxZNSkc6VNDblvQcXJJCVDIIcX+jmDBakscEdGYJDyYCajJk9isks45ZmL2nx1jdf5N/62e+xN27RDBOOi9ZM+NWJZZf2+PnO1FalLGeETmWtZGCLEqqOwtLRqLLqMBtWQcd8jrvtYSZCvt4bNdm9P8/cmR6jQYCrq8DWZxYUqsJiCpFRECNncA/HFE0fnRVkvQA/VTNF8jKBUT9gZbnHkWlijejlOKZg0rJMLuS4B6IUr3PIAznvyypzqkpw+oa8yhirAvKWnQmVTnky1oAJCiaFy9/beY2b72xguxnhloN1IY5cvL4lOJFsa+BnmM8ecbzVoQhkwo8eCZ9jymlx/RxXFagzE9KJg7vrVk4CmnJBIm6dQxnA5kmH/GobgO9wjqzvQ6PAZoovvPyAEsW71zfQYyP6QhYCL6OIkeabhw2CoapEauXxIpAxyJqK4Nji9jUnk4AvrW7SuJDy9s0N4oUqwLYQxy5fefUO37l2ia+8fJfz0RGRSfmHW5/FVrYl7gjcgWQD3bF06XbDmM39Odj2cc5MZnxGkXOQ56A1P3PmNoPlgPePVtm9N4+ONe5AY0aadpDwvzjzBn91+IcZHDUwClSihcQdiPjsNJOUN8yMS6cKmT90YiUzP2EmPwKn2ZzSUfze4SXMnXCmml34Cp1UWfRYzoHAyVkIRty7GGMLhX3kUwQin3F26Zj9dsj4XM7cmR5Jb4HJgq4aH0r+/uHrvBjtUQwdTF807/LQVhUXt5Jikbnhj69c5XJ7md/gFeJQOFiFLwFUFulZR5uNClRpMGONfaMj7f7IPBHty5w2XhHieP8FEVT1jyWockfSpFT4YnyfHTocjufp9kUw2RkLyRtlcNwCp+om/p2tS3xl9SH/9N1XCM8M4a02zgSsMhXXVvhFJsoJ3YxJ5sp1GSEk+mn1YqoM70K0q3CHmsKViog1jzUFVfP+1Z1VnGlApZl1nco6ZVl9eY9H7Xm4W805WUWor4IucZdQM94TuQT75tDlXx6+yB9d/AAcK3qImVdljuWffrzBpZTMeBGamdzKJ+GHVSL/3wK/opTygDvA/7r6in9XKfUXgQfAn/tBb2It/KPNzzC63aH0LN+yFzBDg7MxQt9zcYfMRML8PUPesDQegTNSLAdD3rx5AW8hhqMGOlOMEg+zNqHcjDAJ+Juu7ChnwpmyywzcHK0sA18CqKxxmnGYLqIzQm1eyqhMu/GKsura0mRNja2i3VmrsHvadvrK/C7Xx6vYxGB1jj3y+exL22zFXbwwIzvyqgBK4c+PGRPinJhZiv3/s/1ZHuzO4x/oiixb0tjUDC6XRJv6sbZZiLuGMiiJ5zVJx+AkJaWrcPsKc2FC1mqRzFsuLh7x1s4ZRnsNSYWGlvYjhd8vGa8a3EZKlkgqt/AUbkX+tEZSvM64gJ7L0vohZzo9bnirpz9oqgl2HbII3LGcsO6JJrER73rrzC0NGG/NkzcgG3povyBrl7hDTemXFL7G61eZoUp0zjtR6Asxo7MR7Zsat51gTxpkEfiOwsQleWZovedVrfnSMegOS7KGlGylI60qP1YBwJTMqouS0mhKz0gQoZToHXkBpQG3mWIdVxx3qmymtEJbbKGYehqmbUtjS8qszXvSSemORAlZiIhg7gYUcUizNw3q4aitqm6YKtN2YcCw15FOsBcHZHebZKESSyHgpN8QX7T9U/8+IeFLydAbyq5vslbyz6+9RHtT8WA0x9E4xKjTTBZQ2fNI2XdKnJ9molRSoCotpjJ0wTUzAqrOp8RKS/HVAdn7bVCWucUBo0fzlK7s3A9PmqCgf2MOvy9K2zOl8bQS76wWPp2X2JkshUVPJPvTuOeQNeyM3Ang3QsYdyY4Q1V5X1m0gmw54+uv3OCt3TMM3Q7+oQacqgFDdvRZU1TfRQ1bFNn9jQGTgY85chluiE9kERVoYG/S4ubbGwR7mqLv4Y6lbOcdGtGlmpQUnqZ/HHFm7ZjjoCBv6plPn5zHityHKEj5zuF5/tiLH7A57vJudg49loyqzXSljQaUiix1REndBfVOkxBIXpngNnPONY747fsv4Rw7QkSviPlx6kogE2aYoSdl8TlFtCvXROFDY1OTdKH1AJoPLYcrLX4vuchcawyZxusr3FFFLI5dNsJjvhvlrAZ9JoXHbzx4mfHtDqaUMr/bF8mNeN7gTMqqZCpk4lHHoeh7hLlcM15F7FalRY8drp2s8rXFu7RWYr6jSx7dXyDTokr+cHeO7ZUuf/zcB1yfW+HdDzZY2Tim48fsDFoMnQ5ojd2XjLMzFosaE8vvYYOKDB05FEHV2UulL5eU6FyTFA46l2pBvCDnc3CgZ9nLPFBsNI/59sPzbCwfk5WancNl8lBKdm0/5v6LKa35EcvNIfcCybqAeK1+a/M83ygv0HnfFd21UcnB5xV2Iv6S7kj0lJSCa6O1StbHUlbnznRjNKMYKNjYOGR/c41yMYMjf9ZYY1JbbXSr7roJmLUx/a6L98gDLF5P4yRS7UjbElQ2Hor6+FTwUhclJpEuvCmBO77axT9zF/fQ4cILR1zbCEXQ80jcBUpHdJ18PyN0Mh7eW8TzLM6RlCF1Jpt57WkKX9Yrc1wJHmeQtaVcJtkjhakycLzfEv20ajNdugqdWLJ2gbvjES86bJw5ZL/VZDjw8bbFtaGISrKGFoFUzSzrqUpZ74MDxbvXzrG53qG7PKDfD/G2Xdl0TceicrKYZjKLQOgNSp/OQx+HHyqAsta+BXz5Yx76uR/m9VMUhaY/CvCOZFIbdRV2KeUnz93jd29/FpDsx7QrJFkQuflsruB2fxESzbnzx9y/05BUtlW8tLbHTb1Eea9RtWRzKqSmoAwtLT8hdDL2/AUxVdWID13Fw3CzoirjVB0BUzPComLypzkEflWrPpWMLzzQzQxVGJyhYs3v8fdufx6VaWzqYWKFViX/8vYLGKeosjoKHEsau6hMhOkmqxbvRPHwnTW8nkzoptJzEVVXy+hcgU50xdlQTFZETFI0dk474UoXkl5A9vkJnc6Y7UGL4u0O7b5YpWQXY8qH4UzmIQgystAla4nqe+GJJYLVlc6Vq/EPDfdurhAsTdBBQZkazJGDOxSfvpnPU64J9xXZPMTfm6d8ZUhwKAGD/7ZH1kQEFhU4Q0O8IrYTwfH0s8CNIU8NZVhSBBqtLWlUosoqP6sVSosgpzWibZP7stO1GnBKilAydtZR2JmAqQIjuyOqLpCpOa9K89mFc2nlgFubUaVJxcxTsfSAXAKowrMU8xnqoTfTBSqnekfqNFCxzqkeipiKgvVKiqLyNIwVSexRrGeoscH2ArQnSsYgk0tyFKK08N+8YTnLYBY+M5FAlIhahnd8/BPL+3fO4EYpeeLgVSUiMZGu7BfiQroZixKlp1kTCSbJq668woqzQcU9M6OU0oScm+tx70WHItcQe2TdkqydsXPQodmMOX9mh2tbq8RNDzPSpO3KJ7EpWUprT/lUOi7kGg0c9DgVvpVipoc1Tct7fRj0QlpjmZxNLHydVy4+oumkvLSwz5t7TdJcPM3Cg8ovbTWh73q4A0nLC5dO8fVztxnlPr/7zku450YMNhvi22hhmPoVP4pZxk0VItBbejIfKWtxtz12jpZx1yfkroOdz0gn4gs489XLHW7vLDEXjEkLWZ28E0360gQ7Et8yEyscR8rSygIFM1uM0gtJ1jL+4TtfILzjEQ1EeDc8kov98CRCL5aUiUN2PoVM4R46NHaoyh8Q7UgGSlm5vnQzw3uzSS9qYlol0bYl2ssZrYsWx28+eJnywOcbnYscbHWI7rs0JnKaOSPhS3m9TDaT1dpyNA75o+du8Ga0wf73VsTEvFT4PSlvq8JiYsXdb29w+/wic+0xvpMTLkyIdxti+5EYfuXqV/gjl2/w9YWbvOue5YXuAWlpeNRvY10JgFXBY4KZVffdtBV/Ku3SKin3zCxDNZW1mPPHPAgs5dmYr168x83jJSa7i6J9557KLtjrTR5ecDDOqXirKuHO4QJOkDMaBNwa+/ixVEy8oWw8lttDHl5dpVWZw/s9ybLqiXlM/sKiTcFvf3AFWyqcfY/Grppd05LFYSYb0Q0mbF6KeensLjfUivwOWz46Ea89dyLG5M4Eyq0IvZyQrmUEDzxZ66py3eDVFEol2omjqYbetKNdjMCH7crVYKz45vZ58qhku99m48IBx+OQccsnHbk0toxks02J7+SYgaky0xKEzrLHlXly3rCzSocqESX/xyofKhddJpNIRnDmGVjNPwQW96HD6LuLxJ/tYUzJ/FKfo6wjn10JMouky2kGdtYFWtFmxm/N8/IfvsO7d7vV5zLrvp3KXpgph1JVXYA/QMrgqXrhYYVY6RRVCrKZ8JnlHeZdIR/mkVz4zkSCE2ckwUW4OuTGrTXcbkJeVh5SnrTd7g5bXFg84ubExYtSysOWcGwqsjjNjDl/TOSksvOrMgtFQDUZgnUqImK169dZKpmnQnzxbJqibGUTsZSStnxUIcfruLIIOGPFYdYg3mriH2tJnbdLrvVWKcYOhXXwUgBNvpDB2KF9RwQlnRcG2DfbMpmOq6HSouyadBWNh4bhhZxSlTS2DNFewf4XNctvlBSeWKOYpCBreKTnE6IbPvaLE6xVxNe6+FW60yqwPY+0De37shsYj33Iq4zXqNLAsBLFOxNZMErXEuw5eDdbqEjSwc64ao/3pgGrwroFhW/wlsYUgyb2vgS17kA0pvwT4b2Iezaz1tXinpIFw7VkbTCbAUFfLsR07KHnUtyxP7s4fD9jtB4Rr+a0bzhVKlcWX4ydnR+lq/BOCmmZNWLpIoGhPbXviTMJIDLhTvyZ1bf4L1ZWKCYGlWq8EymZlI5oMlnXoq4MaXk5acuj9C15Q1dt3VU2Z+rnuJDj9lxKVwJvVVjwStRQgtU8KrHHHm5/2larSRZOJ+SsAbgl5sTFHYto3tSeI160NHaEwD9e9HD6wuPII2hc98i/lGDLUxFQZm3BYCbZTKJAFTkznl8pJW0VJ6A1yjU4w4rcVZag4daDZeYWhpwcNSnuNbn4xS1+bvk6//W3v86XX3gIwLkXr3Gtt8Kd+8tkJx5WQdIShXEzKTBWeGkmKar31RLQ5ae74Gn780zJ+cATbaVSsgUF8EeWrvP/+uAnmWuOMT0Hd6iqBVbOgU57xIsX7vOdO+eJj0KsEemNthPzQrTH7zov0AwTeDHhuNegzDSDWER5VQ7KUHmpVY0XjkgIOGMpS3SvKSYXcjLX0pobM1QQ7PtMfQpHRyHNhTHf+c5L6EQRTBSNLUv0lRFZ05Btz2FG4J8owjChH1QLeLW5aT6wqNIl3LWV3EcVmBzncr5PHIq5HCYGbz6myA3mkYtVch1bB/qXhRcDMDwPC3NDhl4oqulayr8iZgjKKSne7NIawKGaY+6qrspv8jv4/couKs5RpT+TS4i8DFcVjBOPwpfjLFJZhPLISABTQLStMHdCsjDi+HwJqwlmLiGZBMwtDzi51+U3jz/H/Ze3UCPD+/urnBw0cXddor4i3LMExwXOJKc0GluViN1BQdY0OOMCM8qwTQVI1kc0vmTu7rgx7gsD/tyL32PRGZKWhre7C9Kubx3cgWJ33KaILNE74cwX0B1IkDbeauIdGfyJZHPcITT2xBpKRXC2ecJmtkbekIBI7UgpyRmpWcCgsxKtLWbHr2QMpPSfB/KEtFuV2AtLWShu7S8CstZ98dIDHF3yrewSJvbQufiqZq2S0lE0Hmqy45CsaQkOZD53xyVJR+E2MsIwxVsvGA8WpXmjIZsbE5eEbs7klQHjkQcDl/h+F39tzMlei2TOYXIczpbxqfdqaRVpYSiiEhObSiNMzQJdEeuEvF2QdB28QSWtMZLzOYukwqETUWPPmyX+sZ7xBKeCsaSGZN4S7inKNzsUDhyvFFWCQVFMS3D2VD9RtMHkX9qVjWu4YziKI+G0LucEWy6FL+tB4WucSYnKSqGjTCsQvuHT8MN04f2bQ1mpljqywK23+5yPjvjd3UtM3dadWCbOtCuk47whF0mw5fIT5+/zcG9exO207GYHby5yZ3cR7RUstEezDhCpwYvJqVaWfhqCsmI+WcikOCWJTeX/Z95g1nLqy6XAcbCFLCCfufiI4XnxBCp9S56ZSgEVvru3gXeiCfYh2FPohYR7uwvMrfShUJV7t0K5JdHSiDyA+FUJdEDsHYrwVKzOarENCPcsc+8YOtcN7QdFZX8imY/gKJeSTCqL42uXHpBHlvxmi/i78wQHSrpp2lBcimndNLOTQ2dIWaqK0mVytDPbmKkkfrqaUVyR43WH8rlJVxEvwujsdJzA6yQML+XMtcbMf36f0rNkTel0mHxtxPFPpCJmmFk4M8EZikhkadRsgUg7cgE0HkkniTKl1KSrCxKg25jQ/IkDvvL52/Sv5MQLMJkXJXJdaYkVPpVCcaW2qx4T9sutLALWSpDsmOp58KK/w5/97Pf4pS9/j5/44k3iZctktaT0LGYimj0/f/EGl+aOmLyQoOdTeq8ns/KqCP3JRbe0fsLkbEHvCyn6p44ZnVW4jVR87hywcxnu0oTmQ/BPLPGinRnbgpxTYSfG61fE61y+S9aSLh4hrGtKD8IdxfhswfAPj7AaQj/DeNM68/T6szijynOulG5QyhKyXDJ7aWWpkktgo7JCsnOp6KQVQUnzms/w6jyNqz7eseJPrr5HoDMunN/npcYu/+r+JR5N2hyNomqspQtpSuyenduOPiX35yU4orCfNS1FKMc25TUML4m9ztQLsDSK/ijg6nCd7FaL7evLhPuywEYH5cwf7aTXoGFSlhYGxEsSCJgU/sX2C7zZOw+55vD2PHmh8fwMBg6TiUfWtvRfSxidKWeWIWn3tCxQugq7Fgu/wpTgWEbDAJtqsqYsflgwJw6LzZHMCYeinm9SKEvNSmsoQXkBbt/imYILP/2AF37mHoMXcuIFJcKroZCkkznF0WchXkSCr0gTLo5RYwme8kcR5m5AY8vOhHBLY9GXh+ShjGO2nGF0Sfn5AcPPJahCkYWSibBa/AzdQTV3hiJinHYUJ1csk8XT7tLS1RUxWLwpPzO/w28+eJnevS7u2dFsI+hUnpoqly7HtCNzjn9s6X6gcG+E5H0P1mNeX97CWYrxDwy3v30Ot6cZ3OyK1VGscPvg9yv/yilf1cr8rCuelSotKitxw6zqJhQScekIf6rhJPyZy+/QMRN+7dFr3D5axKRSnvWPJJC58WCF8FKfvFE1u4RUrhVgg5LWPYt3YgmOqqx/dYlF7Xh2THnArAyFrro0q2tQpyXpRKgm4p15+h5Tex9l5brBQvKgSet7Abf2FtkcdNkcdKEyEHaHlVackk7u/svCj2zdr6ojuQQqhaew9yNGo4CmL92k8FimXAsd5pevfIs//dl3+NrrN7DGEgUJ86s97Lttorsuzesu0X2nItUrJmOf/VGDlQtHMla+CBlPu+J0Wql7K5hcSTj6jOLk5SnvsMoo5qVowbUKVl/ZI16S2KAIq45bozADA2cmjD4Xz9wwWjcMwa5kvvCqjXHFfVLiqzYLWvOFDH8upvRh8/oyrCZQdfGV1TpRuKdxwyyJUJQ/kET+dAMo4Eynx+RszuhsyZXOLluTLnt7naozQOEORGlYnZmIPcJAM9lukrVLvtB+SJEYdCppxrzU+CcQvBVhdwK2d7uIH57FPylwRyWuK3WOu8fzqNigEyr3bUnXmaRETzIxlB2n6GGCSrJTD7w8R2mNcoRofCY6oQjL2UVRDl3ZLRdweG9OvNmGluDYstgdUhaKpcYI3crE8mMCNtVcXDgi+/yIbndEdr8hJ95yQnJlQjInk5VJobExIG3LieQORVV4tCYCeCBlyMITOQZVwtnoRNrAlZQ+4kWLfXXA3M/s8PmNTcZrMrFOFiXrZJwSHeUSvPkaZ5LPJj1bdUmYsOCzZx5RvD6g/0rO0ZdyJq9PiM+LWvDUR8yYEhUWDCYBq40BZnVc2dbAy+u7fOXFe8SrOVlLMdcek83nuKMqO1OKn5TbV2RLIganCwgbKeXIrQjAckEGTs7Prt9kmPk4nZQitJIhmKZbp1lcrWZyBVN+m0g+5OKLNxF/QwqRgDCp5XeGL/NiuMtrjQf87Nx1uchCCZ5VATrR9HOfyElZXT2h3RrzU1duk3YsyTyMFzVJ10EV8NLcPmuX93np4g6X5w+I13J8P6+U8mXHf3n5gOE56F+2nPvyFkU3Z7ysSZvCEdiYO5FMXFuRh5VlTiDt75MlRdoSpXplYe3FfT5/5hGTMwVx6lKkelaKpMq06rw8lebQYI2ZZYBk0OxjWaFqoUolS2VdCfAaDxXBviWPLL0i5Fc3X+MXVq9yY7RC2vf53rUL9G93USMjyseOFb7M1Fy2IpFPvQj1VAG9tLQ/cwjdjLRbBf0evPjyFsVqyvB8yWQZJkuKpBfwL66/iH+sCHc03on4Rob7adUcAu6dgH9+80UA3J6CQuaXo6uLvHHvPE7P0LmuGX4wh+sUmLGmyDXRSyesrx3DXFqdP5Au5qe2Lb5iY/mY8Vo1Zk6JPfKI7rpMFaSnJr9pYUguxow+E5N2xQJpOAoYpp60yPct7kie9+X5B3xl7j4bl/fFYqUrO/14QUowS5/dI1nPiOcMeah4YekAM9LMt0c4Yylf5pGa8d5KVzqDR+uySOgTh8EkoNucsLA4wJppdqni4FhF1qxMkd2S8Ypi8LmEs5/fIV4txI/R0yTzvpD0PTOzuxiehFhPbF1EfLLaiKViUJ7P5YxfSuSc7Ui52yTQfc/B7gZsjTsszQ1INkQ2AqBoF6QvTOBzA8ZrMvcVoZk1H0zNrlUp3EMKKUUbp5wpiE8VwUsPSqs5zBr8jRs/yeZ31+nd6+KdKBpb0NwuCE5K9LGLo0uKl4fwyoB0rhT7ncISzY/JG4q0KwHt1CrEOlKq7LoT8qWUvEElAyH8y3xJpEuKQNrkGbpk7VI2uhVf0yQW285we1JSN9U1rQpFcGhJjkJ631xm/zsr+NuO+OBtZwTHubx+rLn0wg7O6ycMLiBB+JzwdrMmhHsKtn3uPViqglsRnXXGBSYp6fUiBkVAaRU/2b2DbmccHzd5ZWGPrGmJl8RmzRrZECsLxcClN4hoeOnMr9SqqsRenpbhdKxZXz3Gu9LHvTQg6WrpdiuRecgo8Er+5Pr7JHPyOcmcZbKgSVtmxj89v3ZI8cqI3ucyhhdK8qYYiGuvmHXVW6XkPLDMSnFRd8JLK/v0r+R4x4b57lBkPGaUHznQmVF71Zn9w+CpB1A1atSoUaNGjRp/0PHUOVBaWebPnAAwyAO+ceci6sjFPxGvp/BIdh/nlo+4VyrCd0PcY8361x7xMJ6HTFU7NxhMPLyGlJX8Qw3HgVg6TIToCEJcjwuX4SDAJNKGqzMhrDmDDDNO0YNJZceSQlFgyxJyKVvYJEUZDY4M1Z3BYlV6qrq6xsLp0QWEW4apDxaA7+R0uyPuH86JxUvlX4YWzZXV+T4PH80TDCTb8DMv3gLgn49eRt2WTo6Gn7L7Woq35RLtSDludLak+4EibUv8G+0WqFR2Ytd7K6j5hHInoPCg8eox/97lb+OqgvdG65RnYuxOIDuoBMIwJc0cEteryiO6Ej8sKbTGOlAeemx2uyx3hqTNCUWpSXLDYNzEPXYqfyNLPPIwOx6jVPPuaJ1ud0ScNSlduLW/yHxzjG5mlI7DKPZ46fI2N9MzmFiEHHUK4R4Ul3KSl3J0GhJ6GSO3pHANJpZt/Th3OEobfPD+BtYtifrCS0lbUpIsHYvKq1p81fXl9OLHTkMlYpFxClku0hW+g5NY/vu3v8ryYp+Wn7AUDPEPNbavSTtCKFUF/O63XiXYGJDnhihI0aoUb7+JmhHTdW65159HATuDFpN4HowliV1KXyQXytiQlYb5L+yTZA5rUZ/70QJpR357lcJK1OfG2QxrXIJjIwRQVzh3WVtKMyazjFfhpxYe8Z3dc6xcPmBncx41llLitO1dLDbUaek6L2dcPxWn2CQFWzVO5AUYDb6LSjLJUDkixqpvumBFH+x/vPUa6c020cabfHf3LKtnj9h5NIedz6DnVpkCVZWHq4xTLlnfacZAxenMg/LzS4d8M75A3vArbR7FStSnv+Kz57TJ8dG5Rk00zQ9csSpRFfG22nUKYVrK5f6bISd/KKf9SExzw6OSfMswUj7OSBGclJhMkV5hZvrsuzk7+x1sqjGplEncToLVrmRUEstSNEB9xdKbBBi/wPZc4S9WpRtdWsoAHt1bJFoasdQacX+yjDNysFaxu9+hPQS/V6Jz2Nzp8Ju8DEAniCnmMsqqHL3wuRMudQ7YGnUxUU7WdHHGlnPRMe9F50lzg35pyHjkoY5cwgOFzjVFo8S93SBvSvNA+47mZC5g/KgpnJBMERwVuOMcb+Ayjh3Crxwz2G3ywsYewxWPS+GYvNTohYSkG8l5WfFGrKMgh6vHK6yuntDxY+7uL+C4Mo6FpytJB2l5/4WXrvLt5XP0hiHpYYDbMzgjRfOe5oZ3FmdhQqMTY+ZKxpMuL1zewTdSQXj/JCA5cvGGRvwB46KSxqgaEtJyVtIrCkUZWrKWZO1RIl1zZ7jAtQ/OEj1waA6EJ+v1qt+3n1P6mrJj6d3rEp4d4Ls5/aJRtdtLNvjOmTbl+QlRlBB/aw5vYDCpJe77PBjP0ZobE297It2i5HyYXxwwjnyyUOH4GjPQnP3SI/qxz9F2B527uAMrz7vri0ilqsZwPiMPXVSUExyKwGERUmVuhWJRuhA90sS5wxdWHrHZGLPbaxEvtDCJFsFOR4RAo1se7kB4hCYp0JNcuL+9gF/5xk9CUPBLn3+bF9b2ufXuWY6SiNXP7eIa8QHd6zeJ87bIIPQNuXK5M14m6iu8QcFordL4quZBk1hU5rDfa5JnRqQhFqCxXdl45UIVoIRM6m8UPjh5Va53FWUnx30YcN8u0O2O8DojHF1yMg7JDiM8Lxc/WT0twVUTfcWjLKsGpI2L+2xmy/SGIUWjRKeq0n5S5EHly+pqVCFNNrZaCz8NTzmAgpvby0K8Br758ALB1RCdS11c5cyUtvcGTV5c3+PBB+dJV3J+dvkmf+uDL2OGpqofW4qeh/3CgMF2hDvQor49qL5xdWElscdJHGILKXV4fYs7KUlyhRkkQpidxCjPxU4m8rqiwFYBVDkeg9LowAcrJNrWvnyOiHwpvGFJVhl2Jl0RHwwOLfsHXb58/gFv/9bLxOdSmpl0ZrlRyvV7a5BqovsO43M5jfsOh0kDRxWoRM9q4bubc7z+8j3eC9fpdQJss6hq5i5ps+JCxQVmlGBNyI1ba6xsHHOwEzB6OeGPrmxSoLg1XuP6yQrlxCEYVeq6Gl5Y3KOfBlw/CcgDhyKQ1KrOCtCQOg7BnmG4u8TRXIldSSiHLt6RoTGQoMc6Fadn4hDuK0ZncprfDTl61aHbl1p7/q/anPhtWCtxh9A7DlHzxzTP9sm356SLJq2Cmx1fykUeHPcauFGGSR0hiPqG3cMOjx4u0LwvKuLRriU8zMmazmxCQUnt3wwSysBB9UeSli2tUMCiABUn2DgGpaHVwMQW96HP6J1lhg48aFoah1JSPv6sLNKlC827moHTJNo0nFxJ+Je7V9AKGluW4LjEHeQkbY/D31tFWYSb4yvSFkw2FAQi5RBPDLfurrB65pj+IOR3d19E+QX+sSU8KLAO7I7buJ2Ecl8u1Wl7c7pQ0LwtfIQsVHgv9HnvaI3BdxdY/do2KtHYsKD0xGxT5491thRWgqKiRCUppBk2TbFxgs1zUeAHdDPCWguJBDgq1fy5P/Qt/q7+MnrooDopxXsd3Byuj1dJc8NG9wR/o6Dtx7x75wxZy58JXU49KGfee9m0hCo+fHgub+2eEbHAnpwPztjye3cvsTQ3QG8GeD2FN4B4VbrCph1SUJWtPDFAzn0pR4W7kBwHOIl04DmjEpNo/CM9CyqdiWVSyKJVpJqTdxbpPEAW4HGJSSyuW4hPZFpi4oJHww6fmd/mu8mGGBCvTIgHEf6JOjXu9S3hfZf8oM39xQaqEO6KClPi2+0Z4V2nJeEdj+SdJZSF2y8UKMBZTmlGCa8tbHGShdy9toZOxPvNmVhOshDaOb1BxOWVA4Ytj+3eMkXV/WTDAv9eJYBrpdQetWO873RQpWayLLIMqvL/0icOn395m6t6hbONE0a+x+3jBU5uz1M2Cry+JTwspGzWdsQNobTsHbR5YX2fyEnJdiKMqco408XMQjFyOEojvrz8kGJJMTgb8PbOOoOwhX+o8Q4M3p0m4/USu5yggUHiszXukKYOemBw+xZ3WFRSIcWMv1r6zkzXzRppVDKPcX+nlIQbj1ZYeNNURtwVjSOTza8zzskcj8WVPuk7i+Q7HfoLJeGennHK5v0x8Ze2Ods8oZ8F3PTniOe1KOAPFO/dPYP2Chp7ivCgJDjIUJlH4OYkqSzQU95R6GS02glp7lDe7GI1dMOYowsp/rGHOxQ+1IVz+9yzS5xZPmHnoril6awyhndk46oWEorDkIM3Vjh5JWSuMSE+CGlW5tqq0CQXE5wgozxozvhHpdGYQvwDrbF033UoXYe/n7/On3ztHW5Ea1y7eYbzF/axVtH0EvKm5li3Z11saIjuePjH0mCgc9GKmpZuzSRHWQ/nrSbtXWm+GK9SiepKQ4tKc9TY8HdufhG3r3FGwqFr7BY445Lm/JjiXpfGtwIG533yTk5zecR8Y0wSu+jKwcRqhUrLapMIhZLAKd1q8O7QI2rHLF8+5PCdJYKX+kzut8hzKD2FzhXqRM5VXfFjrWtmcjKfhKcbQCkoDnx0T0T50m7J3IEl7UoXlrScliShw/h+G+e1Y5KFkqUzJ1wfrlA8aIh8fkLVdqi4uHjIuDPg0VGbwiqSXgPQmMSTun6ZE+eOyDqVp2x9qxTWN6B9ufB8D62rwZrE1S48lxqn1ijHmRnATlWnS7eq+SbSLpQ3wL44Qvk5J7c6FD2PvNQUgUxOUx6P5xWUWw7hrmayZLlyZYt7++d49+o58EuiHSNt+hq8fYfxCx4Xlw/ZCVtkuSHeaWCNYrwqZPjWlot1FElboceGo14D1mPWF3rcGSzwjc0LTE6CaoWRtnavb8lDyQg23QQd5hSeI3oamSVvCKsui0R8M9hXdG4qkv1QtJssZG2RmhC3boVuZCRdw/zciN5ygP/InbWS5pH8Zm5PAjSnkXH97hpBO0FX5FydwmjD4h1rol1L2oZJ36Vs5LPOtNI1FImhdc3FP7EVWZLKMkIJ/yEqKXNF2tFCVvYddCuadZqhFNZ1hDxe3S4jV3hoSgjdhS8yA9ImDaUnmY7Cr35/JS3i47MOjfuGyYostM6krFS2K4uRsa3ae+V1kzNAJaVBK8fsexweLONVWdX+FeHFmLigCA139xYA4TD4R6lcRoXD0rlj0mtLZKFieE7R8DIevbdC41DxcHcOZ3FCkRuylqkUzEXh2JnImFSkNSGLl5XqvlLCu6xWn7IRYh0t1kaRj0oVHTPh/LkDNg+6FD1vpmL9xv4GlxcPuba1Srs1prQKN8jB+pIJtI8pFLuGUmv0WGHLEusG0g2pFP07XdRyglcJpOoMnFshJ68UNDdFp8Ub2MqiSc9kBkxqcRIJbqY8sbxZwq5GxY9l4hCBSTFDlSyEzoX/oyyQaRpb0/lIjjkPFON+QJRIVgULj/a6HI9CWmFCUWheXN9jK+iQvNcRjkmioSiZrOfMv23I9x3SDng9S7M15F6rgTWOZEFSmZummmjdq6KfM8oiTpYcfqe8xGCvSfOhdJp6wxKTWu7251lc6nPUa3B7dxEFBAcak5UzYcDhpYL57wqnbvByxoX2kN35Ds542m6uKH1DEUq7/tu76wxPQj5wl9nZ6eJteTT3Fcm8SKiYpJg1nUiWQay2bj1aokwNNHOKoTv7rQu/koo5cvjG+y/QXRnQChLWmz3Wu33GjQnbu11satCpS/OeJj0OsRoO31mSLGIpfM7WVobXSytxRj3r/qW0sjmyFrTGDzJSz5+ppk8zlLbivGCrbIM6JcZPmwMWohF3lxaIthWq1DOhVZ1bbh4vcabV415/nnHqVjwiybKUjQJv08MaaOyUwi8aiqZcbxxK883U5Nm3XLt+FhXm2FKxeCDcoqNRxPr6Ef2rqzNxYN/kvHBxl3ONY8LXM1xdcGtniXjcYNJ3ccclnXaP8Wdz9Pstync7bLdbBCdapE8GJToxXN7YE82mMCV+Yx7rKEzqzlwJVCPHZCJQ3bjlcueFBc5f2Ofhe6s8vLoqAYorPMjuCbNzwGsnpG0HZyT8OJR0D4tQpujZFWGJd9/gjiSACg5krs59JRImIBWDay1MLPOgzkRHy8QFWlmGazk6c4geKdhySXa6PDwXErVj0sSVa8hWsj+O6EyZ5FTbyX/gU+Cz/LN3OHAXKQpN2SxIHUm8UGrCg2mrJFglTS6PGWB8LJ5uAIUMVHAku6jSlRN8vF7i9WR3ZrUi7mjcnuKDrVXUSkzg5Hzj+mXCE8XkQkZwIKqu1iu5sb3M+kKP9fk+vsm5txShC0U2NrLTNpZR7EGqK4+nyk1aQRm4KNeglaL0Hah0oDSglII0QwWBEN2UImshKdQAlFWzbrbpIMdnU75+/h6XowO+N7/B27c3uLa3QvdzB+w/6jJZcvFP5EJ2NkYMGyEvvfiIxWDErYbFOzBYY2Yk0KyhyFoFN947S3R2SJo4FLshwb5msmy59PX7PDie43jQwR2IQjjK4r7TYHwuZzubw+x4+MeKVi6EcoBkuWB85OAOLFf3V3B0iXGK004GXelBZSWlAe9yn2TDYXQQYCaaZE5aU1VU0JkbMbo6B1axMD8ka0/YaB+z8NUR12+coXTl+6x8ZZs4d9h/OIeJHS6sHHLnnTPkhy5BTqW1Y8lWU7KJwR0KEVtFOZy44oUWiveWCXKcsXva8g4zy4UiF48m60ibbN7yKT2DageS+ajUtq2jKbVCG4U1ptIAk0U3mRPfLZ2qmcYVVNIIvlygNsopjWjneD0h4FtjZ6ULq4SMqzOFqtzCdQpo0CND1oK11WN2TAfnekjhW/ovFaBFt8c/EXucrO+BsbOgCiUk4dXmgA/WF3H7inShINlv034gk4Z/PWT+p3fY2pqHhgisAkzmNc7ISMZh2gGnEcK468g5P91EWEvpVtYkgU/R9FEF/J07X5ROriOfxkNTZSvh4Ooin/v6NtceXGRQhvRcKBYy/BTKSMQOVaU6bJ3K99JzUGUpAa7W0o1nocxk4hLPSYWZwGQvIsynKTRQfkHacRmdy/EODc2HCn0kpckycsTfqpljjXR65pX1TdaUa6d0pcw7OiM+gnmuZ3PltBNSFdKYkIcQ3PerFVjIwHas0O8ETH76GFsoHF2y3BpyZ7FJuO9QeBb3yDD/2j4H40WsY3H7MncUpdgd5fckg6Fz8YC0B0BJlVWzdK8qJssBtgjoVAtq2pKjtAoebc3zwoVdPFOw972V0/bv6W481lx++REPDzZwB7C+cUjDTWl87YDeMMA+aDBaMbPzwxrgG10aCnZHi3Sum0r8UMpgkj0RZf/SkcDLKsVq55gHt89gSnjpa/e4Gp9jvGrwhqKo7SLH1X3LpXTnOTHw8KUlGlUW4fLZfTrehHda68QHIe6JRueVhce8SCyYVDwgnZFB6VJ8HHVFQC6q0ktpKZXCmFSy4lUpsfQqqxNTzoLotCMq517fYlPp1Cs8xf6oQef1A/bn52YMYTORze/RrXkOwg7enqjTRzGE+1Ji1GGOzl30WCRPrFHir+mXjPYadBFxSWsURbOgdcNFlQ5Zs9IeMorDwyadswenXqQF3NpZwnEKfJMz54/xdMF8Z8RRGJGFEuQpZXntzBbvOWuMdhq4J4ZkpcDrGYpAxGdPJiFuo+Dy/AHfW+iiM02cKqx2xF8wSMiaATqR+ez6987xx7/+FvvnG2TXq2zpuNKNqpwW0OA4BXOv7bJjVlBWdAmztkgkCKkbbFCCMuSBkPnDw1Kuz6AaI62wnsV7pGdBkAhqS8A5OIk4/8Iee8tNRvsRTt+IxuKJi+6KSFnhVdQGpWbnp84qa7JWgX+oCQ8sW/02c1eOOLo1j5pLsSWUWpMVIrUj57WDNT8cPfypZ6CASoRS9Dn6n0/5zOUtPsguEBwq3JFL1qq8p+4FZN2Sh4NFmjddshZ85sVNPuifR6cap5ERvBWxH0Qkl2Mcr8Dkqmo9lsnYcQqSxAXHfiia1JUxrSplB2Ndg7VALilNdFULnXJBlJJ27FxVi2S1O64Uk1UJ588d4OuC2+NFXm1vc62xwmSnycXPPGC84JE/6gDi7ffKxg56xdJ0Er63fQZVSMZDF+D2qawGwDYLopsebHUIcilpxPPQ/tIBX5x7SNNNeHe+A1QtpFq6errvO6i8Csa0ZFGGF0vm3tGcBNXk0YPBbhNKhemkOLHFO5GWdp3J+OkcIj9jZeGIZNlhkPgoZWl6KVlhSArDqErVF6WiE8bsjVusNvqsXzjgUbmIsopzrWPmvRHfVpa9YoE5f8zqK3ts73ZJE5+ko6XMMnZorA+YzLmUmxHduRHHWYvJvIuJPayGRjQmXozII+Gy2QMwSWWammtwS8jkYiy9KghQSixbKp+30xNBz9pWdWbRCxnJuOpDVuAfy8KiK0VfSkjmQBnLZFm894pAyfP0afeJzqtuJlvtxqoOLpwS/9ARpXJleWF9n+P5kPlQBMBu7SwxXnXxe1OvQnD33ErMU9TTCx82ex2KszE8DDAjjT6WhdgaRbRjGcQ+JFqyXb4ESlnLVOd+JRBnLdarpgCloMpCYTRkOTrNKUN3Nj5FVFJ8c45cw9yh7Cb7l0vsfIr70Odmbwl9cUR6GIAvUgRijioBpoklcBU9L1n0KEAV5Uz7SKeKfGJmJcepoGa4KUKcqhIPDaKU+HOWn7p4l4O4wc13NsjvGjlHjPxm3dUBvbyDVZZ40UGnlqStWXp9F6MsDx8u8MqLm1zdWsVmBuVRZWgl61SEEO5b8kgkCKbWE4UvwbYzgZNjkWu4trkqnntKdIO8YYk7cCitonPliIaXsbk9D3gMj1t0WxPiVLqhTFqSd2QR17ksBFYrnGohcQdS9slairRrsVvVqTtwuL25xEsbu+yci8kLycCmOyFYjcpEqydZz3Buuewft5g0XCIvY7495sBEM8FcM7Hk7QLnQeUxVsg8CsxMlJuPEJ0cdbp5tEbOY3VuxHx7jKdzyVZU0i4mk/Jg3hI18GnJVV1zmJy02bFtuDjipdWcjcUTxh3x/Du+N8fipSMaXoq1igdmBZ0ZsJ6UHSvpi6mnomjAKZSBLDOVl5rM0ToTPpS1ismyIl4tCNeGOKZk/MYcbqTw+6IJ1L+6wPIXdjlz6QCAw0GDIqg8WAvovu2eCnemVfefAuMUYiuWTNcDOS4d5Lh3g5mpMRZ0lAufLqvmFy0lXwrFg515uiMp0WaRwtwO8Q8U778SosdagrqFhMahIjrIcSYlh+OAe2aer6w/YHeuRVFqDsYRg94C/okhDyD53gJHFxqEjRSVKdwheINK12tS0oxihgsN4Qn3Fc0Hijf2JKu87Wc0/YQ4dxgnHqN3uygr2bssdfA7uWTojZpJMZzOrwod5KQtl9KRzYgqqs2JJ8GtKiw2KgCDU1kR6RyhW5QW+g7xssPG3AmT1ki0p0pNUSrS3MH1cjFnP1b4vaqakUsHqDNBtPeqgLT/7gIv/9RdDhodjGMpNFi3FDPnQIOW7lJrxC3hB+EpZ6AsZasgXlLoVJHOFXz91RsseUOura4SK590R3RtsvkSlSiCbYPV0g49eX3CvD+S1vlA022PGUYRZgL+7aAqAZ5G71aLncJo4kOYU5qp2EPFZzEKbZGAqVpURYBL/PAoq5Zu5VT1ddg4f8Du/mrV0i5E7GnmZjEc8s1H5xk9aPPqF+6z2Bny6Djg6v01Gu3T9t4ydtjqdeiEMR88WoG7ESaTHVfeEKVWUc9VNOfGZKGLmajKr0mRLJT84uptfnf/EpPMlV1DX8pOi5ePOBku4owU3kAEAUtPvka0PiS/3cHriYCbKhFphxxss5pIk+qkqbJ1urDsbXYZLvgEXobv5iSZQ5y6jGOP9DjAz8SQ82ivzVGmQVu21AIrZ45RmWQAvn3/PJ3WBM/JsWHB7eMFrszvMxdMuNo7x2TREO2VNO4Z0kWHIEwZLMrntBZHZI058khj4pK5aMKjz7p8eeMhb25u0H8UyaKmQLsFtlRCuamMRKXduThVmi8BXbXUlyJhoJVClR4/ceEeH7SEa2CtYvJNsSpRhdT8lYWFn9zBd3KOOhHZxKP3OZ/ojtRzS1cMNHUG8ZkMFWtssyBsx0z6AWE7pth1cYaKrb0u83MjzneO8HTBnd4CrislEtGvUZhmhtp3ZHc05fdElpN7XfRCWkl6yG57vGbJziUENwImOy1MN6WIHZl0y4pUX3GQQHSXrNGiRj7VPdMKq0/vkxJJKZNJWGBiRzJpVhbVr/7EdS5Eh/wd8yUeHXZ4eX2X/lxA4GRsnnSZDBzMRHb3UF0rVVkXreAxLZ3peaebGaVxRAvKh3ROFqbSgK6yK80w4dzKDmlpON884nq4Tt6Q0peu2pi/uLLJaHGPq/srDMs2Tl+TR4pfWL1JPw84HEb8scVrnI1O+M0br1BGJaoQ78nkcwnGKzG/K4KK8VKJzio/NgP4pYjIDsU+iJ5L64EIobpjmbyzjuXg1gLu6pi8MDQ6E1LjkY08jgpDt2cJD3JMWhKujjkOQtAWc+BhEovXO1W3T+YVo8sZTiMjfhQRHpXo5Ri7HbA/3+CFtT2SwqE3CSiCiDKRLOzdq2vgWbyehbcaHF/06cWSLXUSRWO3xOvl5IGL243Jg6ZkTnPJ/qVdsT0p5lOam+7Mz1NVQYSycH9/jhdWDmi6CW/ePYd/aHAHleVOVU60DUvaNTijaZlFfvfGpiI/bvL+pQDdyGi0YlpBgg0L5sMxSeGQFKZSkZdjUlYaiaxWWFuKNACyMVel8NXyyZRuIRyzvAHd9hjzh4Z8tn3MetjDUPJru18iHRqCI0e08EaKnavLOGfGeF5OfBLQTmVeLLqW0vMgqxTqqywJQLc14bARkTXFQmXqEmCcU91DCSotxi0ogurvHCF1T0rcVoK915iJxU4WZS51RxYz0jQeyKYw7gd4PSkd6rQkHXnsb0WYV0taXsJ6o8feUGyVUJDOlUTbmmInoBiF+AkERyV+v0DlFicumI8mtH5im6zUPLq/gL/rMLq6CK/CRvuYppvgqpJB7vM9rzu7zrKRy939NbyeorlZMFnW+IdV8FFxDIMoZXJFo0xJEKWM3+iIcrqppB5Ki99MGF7SRA8dkQCxliIwIv4blhxcW2R3PiNqxwReRjeM8U3O/aO50/I7shnRaSk8qrjAiQ3GK0VfEEW4Bx9sL7N69oj94xbKLUWrq1SnnqxVlUoV6veZEjnwuZce4r2cczBpcrZ5gqtK3jw8J4NApV1i4Guv3eCbty6SWulo6C2XfPbsNt+8fwHTczATaAcx/Q9F/dP09enJGXoZo4kv3VlROXN913lFOM0LitARjobRKM9IKcHIwqEcI3wZZBH9k+vv8f9sroCxuMe68gZSUML7O2sUN5s0jhTX4wv8yZ//Dv/oqA0PQoa5JvAqtWpjGb87R5wpgoHs0kvfwh/qcbY9YPu3Nig8KWn+0sV3+Vv9r8DAlbRzaGmc63N9sMKD99cwY0WjX5FRc8UfXrvFr/caxPsBpa9IlgsayyM60YSVaMB7S23yZolOjZRBg4LCKqa2FVnTmWXvlJVJMthx8K61oICTZZnYdQq+C5455YI5B65o0TQsjQeK/oNlokwmCB40SP0Gh5cL3L6mdzzPt5YbzM+N0IkYX05dvs17TXILpmPJ9j3yjTFzPVmUrFG0vISfv3yDeXfE/mKT24nDSHl4xxrHK8gzgxukJAuO+ARONDousJ6WjsoqBW21QvlOlW2TdHNoMr62dp+uM0Yry9+6/4ewjkXHorVlNbwyt8uK36ezOuYbR5e42Djk15zXSfc9SteZ2cx89TO3AWiYlIaTMMp99pMm7x5EFJ7BHvn0HwS8Od8FoHXLIb6SsXKvJNzPGK+4fP7sFt87uYiJHeI5gxNpimaJ09eYg1DKTxHEKwXnruxyuX3Ad9obuNe6eMsZk0JJQJlpspYl6RjMxKd0NbqoCJeuCImqaYaO6rf3HLK2J9nFwMHxC/pXcqIHsqrHn5nwhfYmW0mXK2d2uXb9LNf1Mo0wYaA8fDcjH1QTkarKP4GuPLwk4DXVuFJYdG74wk/e5DBusPloHZBy1saXtxgkPoNvL1H4luBQkY0D7hQL9G/OodcnhA/dmTu9SSzOyPJgNEfoZKy1BtzKJPtrHbg1WiLOXeKxx69uvc7F9iHF2EHlCropC5f3WWv0cXTJ9+5cQReKc5/bZqd3hsmCzAXnz+5xP1uSho9EzzJjbl/MW8tYka8lNN8JMDekE3V0qaCzpcjOlji3A/E9q1SQLy4coRctpVU4l0omuSvdm2OfuFQsL/T548t3mZQev/noNazR/OTFO/yrg1c42urSa0fS4XTssTiUDJiNSrpvepJJ7VdemceG1l059yfLSvwJrWQNWo2Y3usGpSxal4zXNItzAz7fPiItDe9tvkTWDGhui5hv1nJwRwX5XsgHyaoYcd/1RL17WFmtWNkILC73mXRdegNfrr2xg0okoFUFtG44qMLBmpD9JYtj4OZwA2ekMBNFM4bmVlFxJS2UVha8quQ8ba6ggE4YszvzU1RkLUO8lvHTS1tMCpdh5vPN3Qs0vBSn5+AMFeIDqYjPZDRvuqitJoUHzUKaFaxRrJ854uDRClnbQiej9bZPHoqxccNLWfzCAyIn5bu8QHysaWwqjBOz9KUddt9YBTSq9CgLS/LShDIxYBVO7JJkhotLR9x50BCh3IEE8tnZhLgfzKxdJMst6b/CV1hlUKagsaUZPlzlsAvX1zPcA4fmA4j2cvbbkFaWNM5I/s06cl1FrhRx6vHq3C4tN2avdcyb3Q14GHF0bYH9Tlc2nFVXaHQsrhJpR5G3DO1blZXLWHiJIKKaqtBC+NeWP/Lydc6Gx7iq4L9Lv0p2VxTBB2cdwn1Nu9HnC+fv8E/cVyFT+AcGVXE05lZ6JHcXcDZ9rPKJfbi/UFIsZDh+QZ4YWoNTizXJDlYadoXl4uoBrY2YuHC58d1zqHsNxi9nYpDtFRinIHdK4gVD3jCz181sYT4FTzWAUoVif9zgfPuYrj/hJA25+mANO3bQEy3dGP2SrCGT+Mpyj53xAirKaXYm3DlcwFxt4qXS/rt90ka/NCSOHcrYEcPDAyMtpNV3Px5GpD0fPTKYtFK8LiSq16nYEhSBUw1YlRquyhsYIyrNlTq5f6T4x9ufqXb9WpzJ+1bEyFoa9VaL9o4sFF4frn15lYXukL39QEpKFbdJmRIzUbhjUeeNN1JINOfbfYwuxUxVA2PFVtzl1fPbHE4ieqOQ5eYYoyzvf7CBkyqcibTwO5MSd6D4p1svsr7QY98tKJY1K50RoZvh6JLSarg0wo48rJE6cTAfk2WGYuRWXUyqIupLB5fVUJqK92OFzKwzKWsUgQRPJpZSRNEsKT2FdS3xkhG13L5MPmlHsh/eoSE4hMEFS/PNkNgNaQ8t7QcZ8ZzD6HxBsGuIdiz+sfDO8kmDaDfFHebkoWF70KKfBGztd3HcAveBL15lDqQ9H7SlUFAGJXlo0IVkUMppF15RStBQkWCx0k5r0pJ/9u3PYBYTWs0J7SDB7Wnx1avKeaD5J++9QnN+zCtLuxzFDdLSQRlxUkdoaJjYcudkgcDJyUpNfxxwfv6Y/VGTcHFMOmqCsehM07nqzJSUnSMHb1DgTArA5TiJUI0cqxwxwTYAlmwlw73libH0UklzQ86ddw7WmY8m3J9vku01UGFl+FRIiTcPRYxQsm+gqM77pKgUfAsR10wzCF3yphHug4ayUHzu1QfcWlrEmJIXOz1+9cFrHNxa4NwrO+AXcLNBMm6iChidz+nuKibLos7tjAvyyEiJMy2qluESeXNZFMe5R1qYauEVou84c2n5CT3PzoL17F4T/6FibmApbkl7fXhQYOICZ5zjNzSPfntDSr5fOxAT1Vx+lzd/9wp5q8Dfczh8O2S7s4ZvwT9U9F9T9CcBe0dtgjCV62sCujJmNqmIiSaFYW6tz/FmB6dvUBdGpOcsSS+g8F3CPYN2UyZLEvDpFJoVkVY70mFa+NVu11U8POkyGolE9MpCj6VwxGeWdgA4ScVG4+FkjmHmS6AHDDMfd2VCthei74ViB5SLo4DXky7i8aol3JdNXv9KgXUscb/a5WshkauBxWSWwSjgpfVdPF2QlkbKf4XD9riNo0umtkDTDqppdxsKGu8FuEPLeM2SN+VaEX+6UmRbCs2ZTg93vsDTOaPMZ2/YZLzqkY48vEcuJpYse/O+EP0lSyHXlEktXj+X90wqd4HKgmtaJgeRM9jpNyiiEmcsZXBrFMovePtgnf1HXckWAyehpb0J3qCUTupI01gaE5+0RZomU6QNua5LRzEXTDh+ZUDDy2j6KQe315gsOujccnjQpdsesxghtAIlc2My8mguJuxVFZGsoSh6Lu31AbSED5eHHbJIEToZ6tyIdNLExLJZPr9+yGEnoqVLRud9XLegSA29sIEqhaMWNscMXnCIHjp4PQj3XeJFZh6cKNCvDkhThwlg7oagxUdPFRZrYefBPDtbczhRztpCj/n2iN0wxOlpgluulJKHdkbADw9zkq7L5IxluCHnmFWVv+eCFZ5zKoKgw4MG73prDDpiE9JpTegXETqT9dgdlRwMQrYbbdREy/pNlYV3FQuNMbdX5nCH4uZhYmjd02QHPqMXUnRf+LzeSPitaFDTAKoU/hdAy42JLvcY3u8wutmlnM8otaXINLbQqKq3RlcuH/qHCKB+OKZUjRo1atSoUaNGjRmecgYK9q8usdOdQ7klduQQPhJC7ZTUFu5nlK7LN25cYn5xgDcf4zgF47GPvhcSHYsZod8vSR42aVzsEQWJlACV5ajsokrpLNFYJnsRGFvpJVWCfmm1258U6Ekm4ntFZaRaiLHs1A+MvJDuJKUIjixb313DnSjCXTFKdGKLO8wxcw7RHnjDylevtNz84Aydsz3CswOKQpOdNEXA08+JX0yYFIqwE9N1c3onETc3l7G5Jpoo/GNpf//n777MpYu7tL2EotRMUpeTkwbhQ9EAciaSnvePMgpfc/TGIvnrx4R+SuYYDk6aUp6IDd7ymFYjZqQteegSFKCUyOQDM586VVh0IlkPqyA7l1BcEnNLW7WEK69Ee8I30o8CdK5onekT+SlFqRmve4xHHu7dgLxR4myMsFaRpwZrAsz6mIEbEuwZEaCsWsY753r02hHxqot3aEgXC/RE+D+24lwcPeowOHJo7k21fqRsM1pXYvjrWIoJqFzNfP2E61RloCzY0orWlZLOrSnPZ/4dDSokDyJ2FyA8gmivJGlPNcY04Z5L2ury3bU2RaNEZQprLM2HlvBQ7IGKQHPw7SWOI4szVgT7cP2lFlZB41yfol2gw5w4MGQNF1VA3hX2ZdrUeD3Z29x7uIQyop0VHgqfRGWahQt9DvIO7qEDywlGWe7cXsHfcTm8EMtO+k6LopIkUJme+WuBEMlVKbtPIWtWu7cSqLobrSM2QVPrjDI2pIXhzFyP0io2T7okN9r4E8X9zUXa8yMGfckkUGUb80gMUuM5TbhfHUsuYpol8tm6Mu1GKT54b4NwfYh3LNlVLBy8vYy5NMQdKIIEggNpHoj25XplWJGuJ8Xse7mjkuamdGbtnu2yeFy1UJ9Yik1NHjli5DqwBIcwXq1sVXY88vs+0TFkrYhoz+INLXe3FmkNIDzIKB3FvfsLQk7NhY/Tak1YbQ64qZZIEo0TG8pc41wYMVzyINOYgZCbPT9jsqJJDn2ifSlhjW52ad2T77t3LuDRYsbc8oDQyzgeRkx6AWpiULmiewfCw4L3Hq2x1B2ymxuKygS09KuMk1GQa8rLE3pLHtF9h5XLBxSl5rAh/Jhy4DI4K92upaPITgLuu3M4Rnbvk4lHfhRU159l/pEl2i9wBwV5KMK2OitxlydM0og4V+gLI7KhR3rg4fcVrpLs48lui/4gJGokOLok8lNaQcJyc0g6b9iKOiSZEV/QY49gXzG8YCm9oqJLOET7BhNXzUCVkKQuKp/LQkqGepJT3GtiuzkqE+6sTi1m1+PocIHuHY07tDNumd8Xk24nLrDaEY7mC33SxCW3SkqZOqKxo7l3NC/+q8OQSeKBEa6Szi3qTsSJidhf6tDeVkQ7JX6vYHDR435rDjMRS5bguMTfdxiUbaxbgmtZOLH4fcuDXlfI/VGjIlFLhmo+mhC5KUVjhO/kjDKPOxOX+MDHP5Ymn8a5Yw6bTcqxgx4b1EpMMooIj7QI8nYGaGXRynKrt4aZOPgnauY9G913CA4speuxfS7EbkwIVkZMTIR1jDQipcKTDQ4U4aFIfeCVNF4cMDQdmg+lcSFbLcgDh+AYdFLiP/Lp7S7yZmNB6DV+SftAeLPhYYl/nJIfhFw9OId3bAj3JdsVHgpHa5y5dC8eM4494tShHDmoVKNTyawLNccRgn4lKK0KWxHBLUc35jnJFXk3Z36th78+Ir/dhLTq9oWKWD9tZBG5jpmd2afgqQdQjQcafdur/M9Oyd6AdPr4Gie2NN/3OTnjYlYnxGMP/SioHOulQwEL4Y5maNrQzglbsXixNXPYl5KIzi3BriuebQtZ5WBdaXF4QkIsGp4sHAVCFHcUlFUXXpxUxyWrTmM3Z7wiEgrT7o7SQOGLE3XpKLKoSiXnitZNxWA8x/nXtwidjGuPGlJ3LjUXz+4TOhmlVdw7nJdgcluCSZ0x0w1pXXN5cHgGe26CLRXs+YT7mnBP0qOlIxOUNQp3UOAfa9I35uhvZKjY0NisShcTS9ppcbLWEPKpZ/H7MLjTEqpAqxDtkaQUf72GSxaJX9DCwpCV5oCmm1Da0xNKK8v9/hw7I4fyyGWlOeJC84itcYfFaERzOeEt5yy+l3Nx4QhP52hleddb5+LiESzBzkaLwa0OwaF0wSw2R7yyuEs/C7hzsMBCkFJamLy7QB7IRRvdc6R7KbG4Q0m7ooTfoucTylyjnZIyMbgTKR2htXDcykI0ZEpp01eFcECKVgCFTKzKynt7J/Jds0gu9tIR1W+scHMaD6W12h3CZMlWAW1Z6c5Yop1Tl3WQc7/wYVy0oVHQbMZ0lmKyM5qi1LT8hONxSB7Nk7Xk0gzvesQrBc5YShgAZmLwnZwz5w/ZsosYbTnZa9G4K509+kaA99UR8XqMq0vKUlOULs7I4E4kSLPTVPc0oHSFVC5dhBbre2L2W9qqi1Dh7jtcz87gL04oC429HxHtVCKdhYez1Ofsy7ucnA/kM2OXYd9DBQVZ0ydrGZxRWeltqVmXpPggSide865h4EV0xuD3pmakmhFNgrF0oXqjElVWOjKVJIlJIG07eP2cwtUipOcBKKL7DqUrnViN3QJVallYczvzcUQJcdo/rBaVwqJ68t5OXOLfkfJa4QmRt33dQWcw3JBOuZNBOJsq3J4m2rEMLzg0z45oz0t59WgUEQ/myVOH7vyI1PXJA5GeCA5VJe5oad+FYssjbi+Q5mAKaFVyEdapdKASi77R4OAKnF85ZND1yXKD1iXFjQWSOQc1geZyTDA3YDddZNnJSQvRafOdnH23SdpukkeawgPvwKDvdGTfGIGXQJhOydJIsJtNzwdAK4rQ0AjHhJ8ZzuaG3diZdaTmocyNjTsuqnBRRUhZwnELJhdTmnNjFhpjlrpDAien4aZMzrjc3V3ghZUD2eBZxf3DObJ3GmBLrDYo36DyksKZdtlWRO/IxTtR1ZzMbB1obComq1QLq5QIVWFFjLhSqkbB8UGLM+tHzC8e4umcQRpw8yAgDzTF2x1QEPUgi+T57rCQoGLfiMl638PEwkHTaUljUzE0LRqxCLY6o5Jw3xDuiadfHkpZWeeW/vV5gst9TCpadFlD8/D2EqqR4wY5vp9hlMV3c3TPxetJcL/Xj1hf6PHKxg55qbFW4Ts5N25eIm1q9Bi2Dju4bkErTMAtq/WrWn8Ns7nKGVta9yA7jMi+PGD1/CHWKjxTkBYy1gdXF2lsV52PuWK5NWS06lO6oXS9KSkJm1TKvsGh8LYKT1WNXlrkMXIJVrAW70iLPlTArKznjAqyluHR5jxXLm3jtEs8nTPOPaxV9JKAw5Mmnp+RNzvCCXMUlqrBoOJCNzY1JgUeuByVXV54cZstZVG5BqvIM4M6Ek27WXxgKp3Ixzu2PwZPnUSedmSyE7VoSBaqFpwS2rc0yZyhNIq0Y0XI8FaEa8XRPe3IiW8rife0I1Gx2fTItEccWcxyTOkJJ6h0pUPJbya0GzH7yZyoYkeapK1xBwWF72DiEl11cJS+QSdCpnWyihmqNdYxqALGl1OCBx7MKYqJIm2DO3aIO5reS7Iz1okSk8NKdfbOwyW+9MJ92d1bKPYDtpwOC+0Rjx4u4Bw7uIV8R51BsmhRpSzM47Uqo7UZYl25ncxbJquW4ECTR5Zs2yHyNbqwDC6K5YgZGnSiRPMmKHH7wjNROdjtYGZ1YmIq82FLvKDwB4akZXBiQ9KVrMXhUZNJ6rLW6TPnS7v9YdygHwccHrRmLc33dxe4v7tAkRhINVde2iIMU0Zjn/dvncFrpVxZ3WO+PWJn0OKLq5ucax7zTwcvkYcBeaC4vz+Hv5oz74/I5gz91Ody55D3W4uzYKUILVmHUwuPQrR8sqblpbU9tvtt0twwNh5ZpNGpwRqfrGlmF6XXL8iaBhOXBHsTxms+4U7C4KKcl5RgHWlRVoUi3FViCOrL+TveKNBjTRmWpEONdS3Dc5J5yAOFO7bE84p4ucS6wjdyxpq8UWJGmjJUDE4ijC5ZaowoUQwSn3aQMCwhbUlHWTIncgBpR5F0nap1WrG932FpYQCA3QvAK2fmswCT/RY/9cot9idNHh53SRoKa1zRX/HlXNBwGiBVBsO6CqDKqBLWLKF0Kw2eiaJ0DeVJk7xdQlSKNEIp2i+TgxZf/ez7ZNVB7MYtrptlsp1opk2EhiwwYkMBpF0HZ1ziDDJK3zBZtWAsWRNArJdGZ+S6mKxU3XiuNIIk84jZswfRjiJeMLQfQNLWlA70X4DSFcmEdE6RdzJaDw29VwqstgTbElgVkaVcSvCPfNI5S1KNYd4qcI81fk+TLBTkDU20J/NTvChmytaxTBpg7jfYbwWYbkrYk85PM9acPOiSn+0TuDmBlzFxLMV+wEliaOYi5yKGwZZyvcQZ6qr93pI3hX9ThGK0rUoJ9lShq98Ryu2I4zDhSyub9LKAnVGbgVEkXY1OoH+nS3xWWMP3r69i/RI1Ea00FeVElWhh/5zYaAwv5Lg9QzZXOSLYaXcbuAOFNQ7hUUnS0oTHom3U60e8dPE+/TQQgchMLKfSloamkIvjRdFgmqrSi46PwzBtMspa2KBE+QWXz4gm1PpCj7lgTFoYDiZNHKcUZ4FUkTaFT+oNZR2YEoejvZTJostkrcAZafJIzrl8VzYu9vKIoWrgTBSTtRwz0bg9TX5gMEnFCRorHt1Y4uTskGaQMEld3BPJVOSR6IeVRoL/vGGJ54RLOlm1WKBoFSRzGlUadGZOOXsRsKBn8jSTFXkvaywmNrKpK2D8sEUQi7WIzsHpG8pYk4YOxbCBLuCkU+BV2ZI8ULAV8nDiMLc0YC6aMOePOU4irLYULrhDRbYZkZaKvahEUWmNWaoNvxg8j9cs4a4iXpD1oNhqcLim+OLGJo4qya0mLQy77TmyhsN4VYSbb91YQ4XF6TGfOJQGko6BtmG4IQKc1ljMRLqydarwT6hI+JJQGa1LcBvPQ3BkcMclSdug+5ob185CI2dl5YS5YMJiOGAhGHF40iRNXHTLkrQVqpC1O/fVbK6erFYd9gW4x4Y724v84pWrvH+ySm8SMEk84o4ijj3SbS3CyY7ojPn9xzUZvh/K/oAI698klFID4PpT+8A/eFgEDp71Qfw+RT02n456fD4Z9dh8Ourx+XTU4/PJeB7G5ry1dunjHnjaGajr1tovP+XP/AMDpdQb9fh8POqx+XTU4/PJqMfm01GPz6ejHp9PxvM+NnUXXo0aNWrUqFGjxhOiDqBq1KhRo0aNGjWeEE87gPrrT/nz/qChHp9PRj02n456fD4Z9dh8Ourx+XTU4/PJeK7H5qmSyGvUqFGjRo0aNX4cUJfwatSoUaNGjRo1nhB1AFWjRo0aNWrUqPGEeGoBlFLqF5RS15VSt5RSf/lpfe7vFyilNpRS/0wpdU0p9b5S6j+u7v/PlFJbSqm3qn9/4rHX/KfVeF1XSv3xZ3f0P3oope4ppd6txuCN6r55pdRvKaVuVv/PPfb852lsrjx2fryllOorpf7S83zuKKX+hlJqTyn13mP3PfH5opT6UnXe3VJK/d+UUp/u3fAHAJ8wNv+lUuoDpdQ7SqlfV0p1q/svKKUmj51Df+2x1/zYjQ184vg88bX04zg+nzA2f+excbmnlHqruv+5O3e+D9baH/k/xF3rNnAJ8IC3gVefxmf/fvkHrAFfrG63gBvAq8B/BvwfPub5r1bj5AMXq/Ezz/p7/AjH5x6w+JH7/gvgL1e3/zLwnz+PY/ORMTHADnD+eT53gJ8Bvgi89//P+QJ8G/hJRH/+HwO/+Ky/249obP4twKlu/+ePjc2Fx5/3kff5sRubTxmfJ76WfhzH5+PG5iOP/1+A/+Pzeu589N/TykD9BHDLWnvHWpsCfxv4paf02b8vYK3dttZ+t7o9AK4BZz7lJb8E/G1rbWKtvQvcQsbxecIvAf9tdfu/Bf7MY/c/r2Pzc8Bta+39T3nOj/34WGt/Bzj6yN1PdL4opdaAtrX2G1Zm/f/usdf8gcXHjY219jettZU3Fd8Ezn7ae/y4jg184rnzSXjuz50pqizS/xz4Hz7tPX5cx+bj8LQCqDPAw8f+3uTTg4cfayilLgCvA9+q7vqPqtT633is7PC8jZkFflMp9aZS6j+o7lux1m6DBKDAcnX/8zY2j+PP8+EJrD53TvGk58uZ6vZH7/9xx7+PZAWmuKiU+p5S6l8opb5e3fc8js2TXEvP4/h8Hdi11t587L7n+tx5WgHUx9U/n0v9BKVUE/hV4C9Za/vA/wO4DLwGbCMpUnj+xuynrLVfBH4R+A+VUj/zKc993sYGAKWUB/xp4P9d3VWfOz8cPmk8nrtxUkr9FSAHfqW6axs4Z619HfjfAX9LKdXm+RubJ72WnrfxAfgLfHjz9tyfO08rgNoENh77+yzw6Cl99u8bKKVcJHj6FWvtrwFYa3ettYW1tgT+a05LLc/VmFlrH1X/7wG/jozDbpUOnqaF96qnP1dj8xh+EfiutXYX6nPnY/Ck58smHy5l/ViPk1Lql4E/Bfy7VWmFqjR1WN1+E+H4vMRzNjb/GtfSczU+SikH+HeAvzO9rz53nl4A9R3gRaXUxWoX/eeBf/CUPvv3Bar68X8DXLPW/leP3b/22NP+bWDa/fAPgD+vlPKVUheBFxFi3o8dlFINpVRrehshvL6HjMEvV0/7ZeDvV7efm7H5CD60A6zPne/DE50vVZlvoJT6WnV9/q8ee82PFZRSvwD8J8CfttaOH7t/SSllqtuXkLG58zyNDTz5tfS8jQ/w88AH1tpZaa4+d3g6XXjVZudPIJ1nt4G/8iyZ88/iH/DTSBrzHeCt6t+fAP574N3q/n8ArD32mr9Sjdd1fky7GKrveQnpdHkbeH96fgALwG8DN6v/55+3sXns+0bAIdB57L7n9txBAsltIEN2vH/xX+d8Ab6MLJa3gb9K5c7wB/nfJ4zNLYTLM517/lr13D9bXXNvA98F/mc/zmPzKePzxNfSj+P4fNzYVPf/TeB/85HnPnfnzkf/1VYuNWrUqFGjRo0aT4haibxGjRo1atSoUeMJUQdQNWrUqFGjRo0aT4g6gKpRo0aNGjVq1HhCOM/6AGo8fSyqVZuSyh+VRdGHhDvU990zex4/8Hnfd+Nj/lQfoxSiPvZlP/x7gv209/iE13zS59onfp/p/erjBU8+9Xt95HP/NT7b/oDHf5jH/rW/80ce+77v/8O4YH3acf3rHtOnPsf+4Nf/0I/Z73vsE1+qHr/5/WfKxzuG2Q+/3/e9/+n7fPT1j3/GRy+jj31selt90mOf9Fn2h3re93/mR173fZ9rP/0Ypu/xMZ+hPvH5T/65H33Pj79tPzTG6rFnfPLrPvqc03vefCf5DWvtL1Dj9y3qAOo5RErKV/UfQ2kFSpKQj99GK1AKpau/lYIP3a4udaVnz/3Y56nHHvvQ8/TsPT7teVYpyZGqjzz38cceu9/O3oMPP0899veHHnv8vT/8nrPXPfa82UKumB2/PPbJzzu9rT78XP39r3v8/T7+PT76eR89xk963sfc/rjH+OHe46OPfdrxft/f/BDH8aH3tp/6WfLPnr6Oj3ue/dBnPf6a0+9y+rf66Otmx376t1Lff/vx100XZDk1H3/v00VWfeR5+kN/28cuidP7dRUs6Mee99Hbmu9/TH/0Nj/sY+Xp7Y/cbx77rMefZ7Af/ltZNOXs/c3jt1U5ew+jSrQqP/Qe5rH3Nx/zHtPXm+p1GjkueY/yQ697/DjM4+9XvX72WZSz9zPVd569x2NjYLCPHV/1WPXbGgWm+qU1YJRCV38bHrutFLp6lUZhlJ79bdZuLlLj9zXqEl6NGjVq1KhRo8YTog6gatSoUaNGjRo1nhB1AFWjRo0aNWrUqPGEqAOoGjVq1KhRo0aNJ0QdQNWoUaNGjRo1ajwh6gCqRo0aNWrUqFHjCVEHUDVq1KhRo0aNGk+IOoCqUaNGjRo1atR4QtQBVI0aNWrUqFGjxhOiDqBq1KhRo0aNGjWeEHUAVaNGjRo1atSo8YSoA6gaNWrUqFGjRo0nRB1A1ahRo0aNGjVqPCHqAKpGjRo1atSoUeMJUQdQNWrUqFGjRo0aT4g6gKpRo0aNGjVq1HhCKGvtsz6GGk8ZSqn/L7D4jD5+ETh4Rp/9LPG8fm+ov/vz+N2f1+8N/+a++4G19hf+DbxPjR8R6gCqxlOFUuoNa+2Xn/VxPG08r98b6u/+PH735/V7w/P93Z831CW8GjVq1KhRo0aNJ0QdQNWoUaNGjRo1ajwh6gCqxtPGX3/WB/CM8Lx+b6i/+/OI5/V7w/P93Z8r1ByoGjVq1KhRo0aNJ0SdgapRo0aNGjVq1HhC1AFUjacKpdR/qZT6QCn1jlLq15VS3Wd9TE8LSqk/p5R6XylVKqV+7Lt0lFK/oJS6rpS6pZT6y8/6eJ4WlFJ/Qym1p5R671kfy9OGUmpDKfXPlFLXqnP9P37Wx/Q0oJQKlFLfVkq9XX3v/9OzPqYaP3rUAVSNp43fAj5rrf08cAP4T5/x8TxNvAf8O8DvPOsD+VFDKWWA/zvwi8CrwF9QSr36bI/qqeFvAs+rfk8O/O+tta8AXwP+w+fkd0+AP2qt/QLwGvALSqmvPdtDqvGjRh1A1XiqsNb+prU2r/78JnD2WR7P04S19pq19vqzPo6nhJ8Abllr71hrU+BvA7/0jI/pqcBa+zvA0bM+jmcBa+22tfa71e0BcA0482yP6kcPKxhWf7rVv5pg/GOOOoCq8Szx7wP/+FkfRI0fCc4ADx/7e5PnYCGtcQql1AXgdeBbz/hQngqUUkYp9RawB/yWtfa5+N7PM5xnfQA1fvyglPonwOrHPPRXrLV/v3rOX0HS/b/yNI/tR40f5rs/J1Afc1+9I39OoJRqAr8K/CVrbf9ZH8/TgLW2AF6reJ2/rpT6rLX2uePBPU+oA6ga/8Zhrf35T3tcKfXLwJ8Cfs7+mOlo/KDv/hxhE9h47O+zwKNndCw1niKUUi4SPP2KtfbXnvXxPG1Ya0+UUv8c4cHVAdSPMeoSXo2nCqXULwD/CfCnrbXjZ308NX5k+A7wolLqolLKA/488A+e8THV+BFDKaWA/wa4Zq39r5718TwtKKWWph3FSqkQ+Hngg2d6UDV+5KgDqBpPG38VaAG/pZR6Syn11571AT0tKKX+baXUJvCTwD9SSv3Gsz6mHxWqRoH/CPgNhEj8d6217z/bo3o6UEr9D8A3gCtKqU2l1F981sf0FPFTwL8H/NHq+n5LKfUnnvVBPQWsAf9MKfUOsnn4LWvt//SMj6nGjxi1EnmNGjVq1KhRo8YTos5A1ahRo0aNGjVqPCHqAKpGjRo1atSoUeMJUQdQNWrUqFGjRo0aT4g6gKpRo0aNGjVq1HhC1AFUjRo1atSoUaPGE6IOoGrUqFGjRo0aNZ4QdQBVo0aNGjVq1KjxhKgDqBo1atSoUaNGjSfE/w9O0XSe0qi/XwAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "mel_outputs = tf.reshape(mel_outputs, [-1, 80]).numpy()\n", + "fig = plt.figure(figsize=(10, 8))\n", + "ax1 = fig.add_subplot(311)\n", + "ax1.set_title(f'Predicted Mel-after-Spectrogram')\n", + "im = ax1.imshow(np.rot90(mel_outputs), aspect='auto', interpolation='none')\n", + "fig.colorbar(mappable=im, shrink=0.65, orientation='horizontal', ax=ax1)\n", + "plt.show()\n", + "plt.close()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Let inference other input to check dynamic shape" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "input_text = \"The Commission further recommends that the Secret Service coordinate its planning as closely as possible with all of the Federal agencies from which it receives information.\"\n", + "input_ids = processor.text_to_sequence(input_text)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [], + "source": [ + "decoder_output, mel_outputs, stop_token_prediction, alignment_history = tacotron2.inference(\n", + " tf.expand_dims(tf.convert_to_tensor(input_ids, dtype=tf.int32), 0),\n", + " tf.convert_to_tensor([len(input_ids)], tf.int32),\n", + " tf.convert_to_tensor([0], dtype=tf.int32),\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAhQAAAGoCAYAAAAemnx2AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAABFwUlEQVR4nO3dd5yddZn//9d1nzN90ia9kkIITbqhKiiCyKqoqy4qgsh+kV2wu4quu+ju8pO1YFl1MasIKgiIhaAoKEtxaUlogRACIQnJpLcpmX7u+/r9cd+Bk2HKSWbmlJn38/G4H3Pu/jl3Jmeuc32auTsiIiIiAxEUugAiIiJS+hRQiIiIyIApoBAREZEBU0AhIiIiA6aAQkRERAZMAYWIiIgMmAIKkUFmZjeY2X8kr99gZqsKXSYRkaGmgELkAJnZ/Wa228wqejvG3f/q7gvyWa7BYGbrzOwt+T5XREqXAgqRA2Bms4E3AA68s7ClEREpPAUUIgfmQuBR4Abgot4OMrMzzKw+a/04M3vSzJrN7FdmdmtW9cgZZlZvZp81s21mttnMLs469wYz+6GZ/dHM9pjZQ2Y2xcy+k2RKnjezY7OOn2Zmvzaz7Wa21sw+kbXvK2Z2m5n9LCnLCjM7Idn3c2AWcGdyn8/38L4mmNnvzazBzHaZ2V/NLOjtXDM7ycweTo5/2szOyLrW/Wb2NTNbYmaNZnaHmdUl+yrN7BdmtjM5d6mZTd6/fyoRyQcFFCIH5kLgpmR5ay5/5MysHPgtcRBSB/wSeHe3w6YAY4DpwCXAD8xsXNb+9wNfBiYAHcAjwBPJ+u3Atcm9AuBO4OnkWmcCnzKzt2Zd653ALcBYYDHwfQB3/zCwHniHu9e6+9d7eDufBeqBicBk4Evxqa8918ymA38A/iN5358Dfm1mE7OudyHwUWAakAG+l2y/KHkeM4HxwGVAWw/lEZECU0Ahsp/M7DTgIOA2d38ceAn4YA6nngSkge+5e5e7/wZY0u2YLuDfkv13AXuA7DYYv3X3x929nTg4aXf3n7l7CNwK7M1QvB6Y6O7/5u6d7r4G+B/g/Kxr/Z+735Wc+3Pg6NyfAl3AVOCgpKx/9d4nBroAuCu5V+TufwaWAedmHfNzd3/W3VuAfwHeb2ap5D7jgYPdPUzee9N+lFNE8kQBhcj+uwi4x913JOs300e1R5ZpwMZuf3g3dDtmp7tnstZbgdqs9a1Zr9t6WN977EHAtKSaoMHMGoizCNmZlC3d7lNpZukc3gfAN4DVwD1mtsbMruzj2IOA93Ury2nEAcle2c/hZaCMOOvyc+Bu4BYz22RmXzezshzLKCJ5lOuHh4gAZlZFXO2QMrO9f5ArgLFmdrS7P93H6ZuB6WZmWUHFTOIMx2DbAKx19/kHeH6f0xC7ezNxtcdnzewI4D4zW+ru9/Zw7gbiDMT/6+OSM7NezyLOTOxIsidfBb6aNIS9C1gF/GR/3oyIDD1lKET2z7uAEDgcOCZZDgP+StwOoC+PJOdeYWZpMzsPWDhE5VwCNJnZF8ysysxSZnakmb0+x/O3AnN722lmbzezg83MgCbi9xX2cu4vgHeY2VuTclQmDVBnZB1zgZkdbmbVwL8Bt7t7aGZvMrPXJdUfTcSBRoiIFB0FFCL75yLgp+6+3t237F2IGzR+qK8qA3fvBN5D3Niygbhtwe+JG1cOquSb/TuIA561wA7gx8QNHHPxNeDLSRXF53rYPx/4C3Ebj0eAH7r7/T2d6+4bgPOIq1y2E2cs/ol9P39+TtxYdQtQCeztkTKFuLFpE7ASeIA4QBGRImO9t6MSkaFmZo8B17n7TwtdlkIxs/uBX7j7jwtdFhE5cMpQiOSRmZ2ejB2RNrOLgKOAPxW6XCIiA6VGmSL5tQC4jbg3xkvAe919c2GLJCIycKryEBERkQFTlYeIiIgMWElXeZRbhVdSU+hiiIhICbIgwKsr6KoNSI3qwjHCMMBDi0dTCWBqTQPjUp08tbxrh7tP7PeiA/DWN9X4zl2D0yv68eUdd7v7OYNysRyVdEBRSQ0n2pmFLoaIiJSgoLqG8Jj5bDqtmro3b6YzTLGrqZquPeWQCaAi5F9PvpN3175M3fSNLw91eXbuClly96xBuVZq6osTBuVC+6GkAwoREZGcBSkAUqPjEeozh82mc2wZtRsjyq8ZR1VLF2O7Qkh30D6xktZJ5dx2zRu5Zc164hH2h5YDEdGQ32eoqA2FiIiIDJgyFCIiMvwFKdKzZ9Jw/GR2HBMQdBrpNhi9NqTusW1Ycwvh9Al01VWSbumialMLNU83EO7YiWe68lRIJ/TSzVAooBARESkCcZVH6Q7loIBCRESGvfX/ciJTTttIZ8duoufGE6Wc2vWQ6nQyE0eR3rEbnl5FeTpNMHEC3t5B2NCId3UWuuglQwGFiIhIkSjlRpkKKEREZNiwsnLwCCzpc+ARdvjBjH0xovyPo5n4Yj3jm9dg5eV4VyY+JNNFmIwa7WGIb9uOmUFgYAZ5GlHa8VfKUYoUUIiIiBQJtaEQEREpFDMAgtpabNY0dh5fx/a3dPCxY//KfdsPYUzFNvZ8cwzBijWEbe0QxaNRWjoNQbBvLw53vCuDR4MzYuVIooBCRESkCDgQKkMhIiIiA6UqDxERkQJZf9XJhIfu4WvH/Y77GjtZs7acmiWjuO97J9I8t5bg2V3UvPA4URi+0sDSMxmC6mq8s4duoSU8uFQhKaAQEREpAg7q5SEiIpI3ZgRVVbScdSRHfflpXlwSMvqhWq7/9Cl4JmRO+zq8rQ3PZKhdBr01rwybmnruFlrAP+qlnBvR5GAiIiIyYMpQiIhISUhPn0bbYVNZ+94Ul516H0sb1rDiX45iwf1P42FEJgyxwMACPJPJ7aJFVMXguHp5iIiIyAA5hKUbTyigEBGRIhWksFQqfjl3FiuvGM8xR6+h6p753HfhQnhuNeWdy4jcX2kL4RGvDHRVauLZRkuX2lCIiIjIgClDISIiRcfSaezwg9l+4jhOuvQJOqLNbL1+Iu3/0smstsfj8SOy2z/09rqkGCGlmV0BBRQiIiJFwYGoVGMhFFCIiEixCVLs+tDrGXfhBnZvrmLlF46kYsNuJje9RNTSFo9kWbJZiOFLAYWIiEiRUJVHD8zseuDtwDZ3PzJr+8eBK4AM8Ad3/3yy/YvAJcSDmn3C3e8eqrKJiEhxSh02n81vnkh41m6Cz45mwcZ6wh07SnpI6lzFs40qoOjJDcD3gZ/t3WBmbwLOA45y9w4zm5RsPxw4HzgCmAb8xcwOcXdNSC8iIiNG5AooXsPdHzSz2d02/wNwjbt3JMdsS7afB9ySbF9rZquBhcAjQ1U+EREpDpZOgwXsuPB4gvfsoGFjFwv+bg1R954cUtTyPQ7FIcAbzOwxM3vAzF6fbJ8ObMg6rj7ZJiIiMiLsrfIYjKUQ8t0oMw2MA04CXg/cZmZzocd332NYamaXApcCVFI9RMUUEZF8Sc2awaa3TaNtEsz45igmvriJMAxHXHbCMcISHm8y3yWvB37jsSXEo4xOSLbPzDpuBrCppwu4+yJ3P8HdTyijYsgLLCIiIv3Ld0DxO+DNAGZ2CFAO7AAWA+ebWYWZzQHmA0vyXDYREcmToKaG9MwZNH3gJM668ynaz2hmymNdpP/vWTIbN+HhyGyTH7kNylIIQ9lt9JfAGcAEM6sHrgKuB643s2eBTuAid3dghZndBjxH3J30cvXwEBGRkUTdRnvh7h/oZdcFvRx/NXD1UJVHREQKzIz0rBlsPXsGzW9pIQicjrZO7jl9HrN2PQce4SOs3cRwopEyRUREioIReuk2ylRAISIiUgQciEq4l4cCChERGXxBiqCqEkuliObNpGV2Lem2iD2f2snO+i4m31lN0AVjn91NuGNnoUtbNEq5DUXphkIiIiJSNJShEBGRgTN7ZSCqoKaG7R88irYJRuvcLk4+fDW7GiawtbmKg742hsPXbcVbWokamwkzXQUuePFwVxsKERERGQRRCVd5KKAQEZFBYWXlcMwC1ryrlq5REam2gNHPldHw9XFMbG2hblolwaqXCNva8a7OQhdXBpkCChERkSIQD2ylKg8RERmhglGjWPfZ1/H7i7/OnXu2sejmc5l3w3ZoaMZbWgjbO+IDN20m1MBVfVAbChERERkgjUMhIiIjj8WNBzNvPo6NH+vEX3CuOOxsovYOZo9/kaihEc90jbgpyEcyBRQiIiJFIizQTKGDQQGF5E9WP3URKWFmpObPpWPGWI75+pPs/p8TmXr3JjJtbeBOuHNXoUtYkhwr6UaZpVtyERERKRrKUEj+KDshUvKsooLGvz2W4z/9JGv2tPHsCc5Ef4RM9kEegQX6P38AIvXyEBERkYHQOBQiIjIiBNXVrP7K0VQd0sCaD0zDWtp6z0JEYX4LJwWngEJERKQIOKZeHiIiMowFKXZfuJBRF2zElsCMj+0k3Lqt9+PVduKAaWArERERGRB3NPS2iIgMT0FNDfX/cDQtMyNqvjmZeX9dTtjaWuhiSRFSQCEiIlIUjAi1oRARkWHG0mmOf6iJ1Xc7h/ysGXt+HVFLS6GLNWw5pV3lUbolFxERkaKhDIWIiEiR0MBWIiIybKRGj8ZnTeOkm5bzyCXHMb9pG+HqdbhHhS7asOYYUQmPQ1G6oZCIiIgUDWUoREQkZkZQVcXmC4+k8ZAQPnwU/uxKwr2TfcmQU5WHiIiIDIij2UZFRKTUBSmCoxbw/OU1VK2HyY8YrN346iRfrsm+hp4RlvA4FKUbComIiEjRUIZCREToPPs4Wj7ewJi7y5h2707YsJmwubnQxRpRVOUhIiIig0JVHj0ws+vNbJuZPdvDvs+ZmZvZhKxtXzSz1Wa2yszeOlTlEhGRhBmpyZNoO28h6/4Wxnythmk/W4Gvqyfq6AAr3T9ukn9DmVu5ATin+0YzmwmcBazP2nY4cD5wRHLOD80sNYRlExERKSruRuTBoCyFMGRVHu7+oJnN7mHXt4HPA3dkbTsPuMXdO4C1ZrYaWAg8MlTlExEZyYKaGrZeeBR7Tm8hs92Y86uI4OFnCCP15igkTQ6WIzN7J7DR3Z/utms6sCFrvT7Z1tM1LjWzZWa2rIuOISqpiIhIfjkQJVOYD3Tpj5mdkzQxWG1mV/awf4yZ3WlmT5vZCjO7uL9r5q1RpplVA/8MnN3T7h62eU/XcfdFwCKA0VbX4zEiIvJalk7jkRPUVLPzPUeyZyaM+0MN1du6KPvz4+DJR6rZq69l2EmaFPyAuPlBPbDUzBa7+3NZh10OPOfu7zCzicAqM7vJ3Tt7u24+e3nMA+YAT1vc0GcG8ISZLSR+QzOzjp0BbMpj2URERArM8lXlsRBY7e5rAMzsFuKmB9kBhQOjLP6DXQvsAjJ9XTRvAYW7PwNM2rtuZuuAE9x9h5ktBm42s2uBacB8YEm+yiYiMtxZOk1qymRajprGptPSZGojDv1RA9HqdRCGuDISBRePQzFoPWsmmNmyrPVFSYYfem5mcGK3878PLCb+cj8K+DvvZ7rZIQsozOyXwBnEb6oeuMrdf9LTse6+wsxuI46OMsDl7hrnVURE5ADtcPcTetmXSzODtwJPAW8mrmH4s5n91d2bervhUPby+EA/+2d3W78auHqoyiMiMmLsHT8iyTqkxo1j5TUH87bjnmFbewf1j85nwaJG2LQN7+x8bXsJZSsKJk+zjebSzOBi4BqPU1erzWwtcCh91B6Ubv8UERGRYcQxIh+cpR9LgflmNsfMyonHgVrc7Zj1wJkAZjYZWACs6euiGnpbRGQYsXSaYMxoSKex8nIaT5wOf7+d0X8qY921M4jWbWBe56N4KkUUhspGjEDunjGzK4C7gRRwfdL04LJk/3XAvwM3mNkzxFUkX3D3HX1dVwGFiIhIkYjyVHHg7ncBd3Xbdl3W6030PMxDrxRQiIiUmqw5NixdRlBViY0bQ9v8SXSOTbPlPR185MhHaQ3LeWBrOfY/E5n+wCqihkY8yUp4ps8egFIA7hAOXi+PvFNAISIiUiQGsdto3qlRpoiIiAyYMhQiIqXGAiwwgupq1n38SNpmdXHIwZupTm1iw5LZjPtzFQ9/9UiIIsbsbiTc9TJh32MSSRGIe3mU7vd8BRQiIiJFIsxhYq9ipYBCRKSEWEUFqelTefl907jiojv41p1HULu6jMxtk0mvqGf2gnbSj60k7EhmY1a3UMkTBRQiIiJFYJDn8sg7BRQiIkUuqKxk68XHcu3nfkS7l7Et8xJfefQ8Fr9zIfMbXiDc1QBRSAZINzQStbcXushyQNSGQkRERAZBpDYUIiIyaGzfPyqrvnU0o6Y38I0z3w5hRLR9B/PbHyc0w9JlWGB4FJ/noXpzSGEooBARESkCGilTREQGxoygogLKytj17iPZdkrI8Ues4S3jVzIq1c5/3HwyUz7+Apko3Occ3PGuzle3dV+XklPKbShKt+QiIiJSNJShEBEpEEunCcbX8eJn5hFWRfzzWXfw7ZWzqFw+hi1/mcfv1k3B2jMc9PwSPDs7Ab2PL5FkLqT0xCNlqspDREREBki9PEREJHdm+ClH89YfPUh71MxLi+dR3pDi1+84mVk7NxE1r8IzGZx4sKP9ouyEFIgCChERkSKgkTJFRKR/ydgSli5j2yXHM/F9G/jz+QsJmluY1/gcRE7Y3BwfqyzDiFXKvTwUUIiIiBQDV6NMERHpR8t7FtJZGzDlo2tpWNfBlMvKidatIQQ8DMEjZSakpCmgEBERKQKOenmI9G/v3AT6BiYjUHrGdDa/wfB0xKh/m86hT6+LZwj1ZN4N/b+QRClXeZRu6w8REREpGspQiIiIFAF1GxXJhVK6MoJYWTmp8ePwjniirv933wN87x/+jvLd7QTrNhM17VEjTOlRKQcUqvIQERGRAVOGQkRkoMzAXv1+FtTW8NxXDuI/33wrXZ7mR8cdS3nb03jkhN0n+RJJaHIwERERGRTqNioiMhIFKVJ1Y9l23iG0TTY6xzqTjtrKfx96Mxd87zBu/PdTIQiImus1rbj0z9WGQkREREa4IctQmNn1wNuBbe5+ZLLtG8A7gE7gJeBid29I9n0RuAQIgU+4+91DVTYRkQMWpAgqK2g74wg2XtDFuDEt7H7RqdxqVOw0wp9P4p9uPpmp9hiZ7PYSyk5IP0q92+hQZihuAM7ptu3PwJHufhTwAvBFADM7HDgfOCI554dmlhrCsomIiBSdKJkgbKBLIQxZhsLdHzSz2d223ZO1+ijw3uT1ecAt7t4BrDWz1cBC4JGhKp+IyH4LUlgqxcufPYbZb17HmF8cRN0TXUzcvQmCADq7CLdux93B1ZtDRpZCNsr8KHBr8no6cYCxV32y7TXM7FLgUoBKqoeyfCIiInmjbqMHwMz+GcgAN+3d1MNhPVY4uvsiYBHA6KBOlZIikh9BitTE8TCqhlQ78O4WJrQ8jqUCwjDCM11qJyED5goocmdmFxE31jzT/ZX/ffXAzKzDZgCb8l02ERGRQirlcSjy2m3UzM4BvgC8091bs3YtBs43swozmwPMB5b0e0F9GRCRPAkqK+g6dDorvzSeWb/eRNi0B0sF2KhR8c/y8nisCZERaii7jf4SOAOYYGb1wFXEvToqgD9b/B/vUXe/zN1XmNltwHPEVSGXu6tFk4iIjBxe4gNbDWUvjw/0sPknfRx/NXD1UJVHREYmS6fxMIw/rfdmEPa3rUOQoulvXsfOI1PMvrWTaNMW8AjPZKClhaizS7OHyqAo5TYUGilTREREBkxzeYjIsGTpNMG4cXh7O7R34GGIpVJYZQXe3hFnGCLHgtd+I/TIX8k4BJWV2JjRdI4KGP9sSPWzmwjDeJ+HId7Wltww0NgTMkDqNioiIiKDoJSrPBRQiMjwYkZQVUUwbiyUl0FX5ysdwoLaGkgFcbuKzk6CsjTelYlHuYwiCEPcHTMnqKklc8Qc1pxTRdeYiPFPwajFTxGGYdwmY597Bq/cW+0oZKRSQCEiIlIESn1yMAUUIlLyLJ2O20OkUnEWYtoktpxWh4Uw+X+N1KhRRGNqaZ01ilR7hKcNN2idXEbbRCMqg7AS3KBiNwQZqDh3G6dPfYrV/3ciVZtSTPztSsKOjn1v/Eo2Isr7e5ZhyEs7waWAQkREpEhopEwREREZ0ZShEJHSs7fxoxmWLiMYM4quI2ax/uxKfF4LB03cTdPSOspajM7/CXnLpFU0h5Xc9ORCUrvK8AC8zAk6nHQLBB2GdcWXbZ4TwdhOpvxsAssXb2J+62MA9NkhtJTz1FI0HPXyEBERkQHTOBQiIvkTpEhPnkg0fixNh4+l4eAAD8CObSR4porae2pIP97FIY0boSsDP0/zQOMMCEMOm95E14TauLI3grJdrfExgLV1QGCQCYmamon27CFS5kEkZwooREREikQpx7AKKESkeAUp8IjUwXPiBg5RRMPxk9l5lBGlwIBUK5S1wNT/DEhvXEe4YydRZ+e+HTn3fko3NRFkTTEedt8vUmBqQyEiIiID4q6AQkRkSOy6aCGTHtjMixdPZuwq2DPTqHj9LtKP1pFuhzFrMtS81IR1ZYheWkdm7zTlfdk7TLYnOQxlJ0QGhQIKERGRIqFeHiIigyy14GBS79vOO/5pKYv+653sOD6icluKqZfuwls3ErW0gkdxW4m+sgzdJ+zyqOf9mthLikAp/wpqpEwREREZMGUoRGRQWUUFQDwteNTn+JI9C1LY8Ycz/ftrCE9u53fBVCZXLGdyEOCdna+doKs/3b/y9fYVsJS/GsqwoUaZIiIiMiCOKaAQEYE4O5GaOplo2w6IHMrS8c9sHuFht8yFOwQpgsoKVn39dZx74lOsuXAWRC/ibnhbe9z2QVkEkaKlgEJERKRIlHLInFNAYWZTgIXE73Wpu28Z0lKJSGkwAwuwIE7TpqZNIRw/ihS8MkcG6RRelo6PTafwshSp5jbIhEkGIyKcOIb2CZWMvnID6YcD1rx9DOG21fEtUil8b5YjSB1YuwyRUlDiA1v128vDzP4eWAK8B3gv8KiZfXSoCyYiIjLi+CAtBZBLhuKfgGPdfSeAmY0HHgauH8qCiUiRC1IENdUENdVEk8YRNLXScMIUghDKxlZQuXkPYU05Fjkd4yvpHJOmq8ponmMEHeBpaD+4nbL6Cqq2Gp1vbIJrZjLnj4+wTw7CAoLyAA+Tthd75+JQewqRopLLOBT1QHPWejOwYWiKIyIiMnK526As/TGzc8xslZmtNrMreznmDDN7ysxWmNkD/V0zlwzFRuAxM7uDOJFyHrDEzD4Tv3m/NodriMgwk6obS+Ob59NWZzQe6sz88yhq6tvpGlNOkHEyoysJMhEts2rwAMIyiMqh9mWna5TRORom3FdB8yyjdaoz799DeHEF3caxxLs68a6CvEWRvMtH4s3MUsAPgLOIkwZLzWyxuz+XdcxY4IfAOe6+3swm9XfdXAKKl5JlrzuSn6NyLLuIiIgUj4XAandfA2BmtxAnC57LOuaDwG/cfT2Au2/r76L9BhTu/tXkhjXu3nIABReRYSQ1bhxMGk/b97vYtDoi3ZRi0hJY/76IuoeqqXu2lZaZVXSMNspanYrGkKAjoqo9JNURktq0E6+pgl0NhLsaqAsMD0MitYmQEc4Z1F4eE8xsWdb6IndflLyezr5NF+qBE7udfwhQZmb3EycQvuvuP+vrhv0GFGZ2MvAToBaYZWZHAx9z93/s71wRERHJkQODF1DscPcTetnX0026R/Rp4HjgTKAKeMTMHnX3F3q7YS6NMr8DvBXYCeDuTwNvzOE8ERERKT71wMys9RnAph6O+ZO7t7j7DuBB4Oi+LprTwFbuvsFsn4BGI8uIjDCWTuORs/Jr83nb8ct57qrXcdjLjVhDM968h7r/rSJq3oOlUoxunU7bjFrKmjJYGJFq7cL2tEFDE2FjM57peqX1WffZxEVGsjzV/C0F5pvZHOKOF+cTt5nIdgfwfTNLA+XEVSLf7uuiuQQUG8zsFMDNrBz4BLByPwsvIiIi/clDQOHuGTO7ArgbSAHXu/sKM7ss2X+du680sz8By4EI+LG7P9vXdXMJKC4DvkvciKMeuAfot/2EmV0PvB3Y5u5HJtvqgFuB2cA64P3uvjvZ90XgEuLsxyfc/e4cyiYieRBUVvL8D47kp2dcz8evO5F135xKxZoniJIhtz2TgT0tWFka7+jAlz9PxfJ42GwsIApDTe4l0q/8zTbq7ncBd3Xbdl239W8A38j1mrm0oVjg7h9y98nuPsndLwAOy+G8G4Bzum27ErjX3ecD9ybrmNnhxCmXI5Jzfpj0kxUREZESkEtA8V85btuHuz8I7Oq2+TzgxuT1jcC7srbf4u4d7r4WWE3cT1ZECmjP+05k7S+P5r1PrqN6dTnfOO1spn3rMcLVayEK8Uwmzk5APGlX5PFkYemyeJsFaiQhsj+G41weSXfRU4CJe0fFTIwmrnM5EJPdfTOAu2/OGnlrOvBo1nH1ybaeynUpcClAJdUHWAwREZEiU+KzjfbVhqKceOyJNPuOitlEPOvoYMqlT2y8MR6YYxHAaKtThazIYAlSWGAE1dVEh8yiaV4tm8/KMPavNfz6qlOZsfoxMtDn9OGeicfItnQZVl5OMHYMtLUTdXTgXRlwdRATGa56DSjc/QHgATO7wd1fBjCzAKh196YDvN9WM5uaZCemAnuH8sylT6yIiMjwVsJfk3Pp5fG1pCtJCDwOjDGza5PWn/trMXARcE3y846s7Teb2bXANGA+sOQAri8i+8HKyrHyMjh4Fqs/MJZwVMSYGY007ymj4tmABde1E7zwAuGelpzaQli6LJnMqxOAcPtO9e4Q2S+lW+WRS6PMw5OMxLuIu5jMAj7c30lm9kvgEWCBmdWb2SXEgcRZZvYi8Sxn1wC4+wrgNuKJSf4EXO6u3KiIiIwww7FRZpYyMysjDii+7+5dZtZvcd39A73sOrOX468Grs6hPCIyUGb4KUfzXzf/gEpzbm06mjW/PZtgV0DZ0nFMawjprIngqecJ9/bi6OU62dkHD7t9D+ijvYWIDC+5ZCh+RDwIVQ3woJkdRNwwU0RERAbTcM5QuPv3gO9lbXrZzN40dEUSkaEQVFfjh88lKkux/coO3jxjOZ86+lyiPS1YeTnzalYT7d79yrgSVWZ4f20fuu9XRkLkwA3ubKN512+Gwswmm9lPzOyPyfrhxA0qRURERIDcqjxuIJ5AZFqy/gLwqSEqj4gMNjNS48bx/H8dTvW1Wznhh0+SvmMcz51RQ9jQiGcyRK2thNu3vzrqJahnhkgBuA/OUgi5BBQT3P024tnGcPcMmr5cRERk8A3nNhRAi5mNJymimZ0ENA5pqURkcAQpgqMW0PWtZsbfUkbHf1Tz5LZRTAifJGpvL3TpRGQYySWg+AzxwFPzzOwhYCKDP/S2iIiIlHCjzFx6eTxhZqcDC4iH8Frl7l1DXjIROSBWVo4tmEvzgjG0XNjIO2c/zX1XncrkR9eQ2bZDPTFEilj/ozwVr34DCjNLAecCs5Pjz7a4O9m1Q1w2ERGRkaOA7R8GQy5VHncC7cAzJA0zRaQImWGpFBv+6QROeudytnc00P7zOSy9fQJVDUvImIHl0g5bRGT/5RJQzHD3o4a8JCIiIiOalXQbily+rvzRzM4e8pKIiIiMdMO82+ijwG/NLAC6iBtmuruPHtKSiUhuzEiNr2P6H9o5Y+zzfPne49j84UlEa9czwXYRJlOJxyPeqEGmiAyNXAKKbwEnA894vwP7i4iIyAEr4b+yuQQULwLPKpgQKQ6WThOMr8Oqq9h++jRqPrSJ143bxPJ/OYaN66Zx6MvPEra1aehskVJUwv9tcwkoNgP3J5ODdezdqG6jIiIislcuAcXaZClPFhHJB7M4y2CGlZcTVFdjNdXseuNMtrwx4h9Pu5ffbEjTduNUVr0wmoqlywiVlRApXSU+fXkuI2V+NR8FERERGemG5UiZZvYdd/+Umd1JD7U67v7OIS2ZyAiWnjKZbefOZeJvnsNnTeX5T9fwtiNXUFe2g1tWTKLuwUru/49DGbN1Ax6uA4/6bzOxN+MhIsWrhP+L9pWh+Hny85v5KIiIiIiUrl4DCnd/PHl5jLt/N3ufmX0SeGAoCyYyUqWnT+O5f51O9YQmrvnXu1naNptv3/geXrz+MMq2NXNI2AANTWR27Ny/Cys7ISJDKJeRMi/qYdtHBrkcIiIiI5754CyF0Fcbig8AHwTmmNnirF2jgP38aiQifTKDE1/HxGvXM79mHRt/MptpP9/EF9rOIOrsYnr0MAAa51JEilVfbSgeJh6DYgLxaJl7NQPLh7JQIiIiI9Jw7Dbq7i8DLxMPuy0ig2nv2BKjR7Ph4vmEC5t4z8FP8uBXTmbXs+OYsm4JYSZT6FKKSD4VcGKvwZDLwFYiIiKSDyUcUOTSKFNEBosZBClSo0bR+LfH8rcPPkPLEe3U/n4Uj586mqo7lhK+tA5XdkJESkyfAYWZpczsF/kqjIiIyEg2LHt5ALh7aGYTzazc3TvzVSiR4crKy2l697FEF+5g27Yufv2GIzikeQVBbQ1hS0uhiycihVbCVR65tKFYBzyUdB195RNPs42KiIjIXrkEFJuSJSAeg0JE9lNQWYnNms6Vf/o1V6+dSNNPZnDoH1cRNjZBFBJ2dBS6iCJSDIZzhmLvbKNmVuPuysmKiIgMgUK2fxgM/QYUZnYy8BOgFphlZkcDH3P3fzzQm5rZp4G/J47FngEuBqqBW4HZxNUs73f33Qd6D5GiYEZ06tFcev1vaIlWc83xb4KGjYz2+njUSyvdQWxERLLl0m30O8BbSYbbdvengTce6A3NbDrwCeAEdz8SSAHnA1cC97r7fODeZF1ERGTkcBucpQByGofC3Td02zTQKQXSQJWZpYkzE5uA84Abk/03Au8a4D1ERERKiw/SUgC5NMrcYGanAG5m5cTZhZUHekN332hm3wTWA23APe5+j5lNdvfNyTGbzWzSgd5DpBhYOk3Hmcew4aIM1599BtG2HXhn877TiGtKcRHJUsptKHLJUFwGXA5MB+qBY5L1A2Jm44izEXOAaUCNmV2wH+dfambLzGxZF2oZLyIiUgxy6eWxA/jQIN7zLcBad98OYGa/AU4BtprZ1CQ7MRXY1kt5FgGLAEZbXQnHcjKcpcaOYeNHjqDpsC7mf9/JvLzhtdkIM2UoRGRfJfyR0GtAYWb/RR9vzd0/cYD3XA+cZGbVxFUeZwLLiAfNugi4Jvl5xwFeX0REpPSUeLfRvqo8lgGPA5XAccCLyXIMA2iU6e6PAbcDTxB3GQ2IMw7XAGeZ2YvAWcm6SGnI6v5p6XScnTiqk7m3RaSfXdtzJsICdRsVkWGj1wyFu98IYGYfAd7k7l3J+nXAPQO5qbtfBVzVbXMHcbZCRERkZCrhDEUuvTymEQ+5vStZr022iYxsZqSnTYV0Cq+tZutpdTTPgfe+9SF++Xgnh33mRaK2dsLOPubVUxsKEclWwh8JuQQU1wBPmtl9yfrpwFeGrEQiIiIjVCm3ocill8dPzeyPwInEsdOV7r5lyEsmUqSC6mrCo+ez5j3V1K43KhqchgXQWRdS1phi2RXHsWDJcsKuzj57clhZGu+MlKUQkWEhlwwFwELgDclrB+4cmuKIiIhIKcplcrBrgNcDNyWbPmFmp7j7F4e0ZCJFyNJpGn49lbKgCXu6mqZDQ6jtonplJXNvacK2bCdqaMQzmfiEPrIP3pVRdkJE9lXCHwm5ZCjOBY5x9wjAzG4EngQUUIiIiAiQe5XHWF7t5TFmaIoiUpyC6mqC0aNY+c+zeei8b/H+T32WmjXNzFvxOFYW/xeK2juIov0cnmV/jxeR4a3EB7bKJaD4Gq/28jDiqcuVnRARERlswzmgcPdfmtn9xO0oDPiCennIsGYGFmCB0fr242i5pIFvHX47l972MS6efya19jRRezsA3tXHGBMiIiNIv7ONmtm7gVZ3X+zudwDtZvauIS+ZiIjISOODtBRALtOXX+XujXtX3L2B1w6bLTIsWFk5qbpxbL38RL69+kFGf2oDFTfV8Z9HLGTOPy/BOzqIOrsKXUwRGYaMuA3FYCyFkEsbip6Cjlwbc4qIiEiuhnMbCmCZmV0L/ID4rX6ceBZSkf3Tx6iRhRbU1GDl5XQePYf6N1WCOZ895m14VxNjeOaVNhMAxD2oRUQkSy5VHh8HOoFbgV8B7cDlQ1koERGREWeQqjuKtsrD3VuAK/NQFhnuiiw7EVRWYuXlNJ11GNteH/CGM56hKrWCdfcfx8G3NhM2JE2HzPY9scjeh4gMI3n6eDGzc4DvAingx+5+TS/HvR54FPg7d7+9r2vmMvT2IcDngNnZx7v7m3MuuYiIiBQFM0sRN2M4C6gHlprZYnd/rofj/hO4O5fr5tKG4lfAdcCPAQ3tJyIiMlTyk6FYCKx29zUAZnYLcB7wXLfjPg78mngcqn7lElBk3P2/96OgIsXPjIZ3H0PbxICWk1qxl6vYfMl0wpWrmeePQir16rGq4hCRPBnE9g8TzGxZ1void1+UvJ4ObMjaVw+cuE85zKYD7wbezCAGFHea2T8CvwU69m509129nyIiIiIFtMPdT+hln/WwrXso8x3ikbFD696OrBe5BBQXJT//qduN5+Z0B5Eikxo9mtY3LKB1ckDbFGf+VU2ELz2Dl5e9MmGXR8pKiEgB5Oejpx6YmbU+A9jU7ZgTgFuSYGICcK6ZZdz9d71dNJdeHnP2u6giIiKyf/I3bPZSYL6ZzQE2AucDH9ynKFl/+83sBuD3fQUT0Mc4FGb2+azX7+u27//bj4KLFAVLp0lNnsSudxzOy+9xxq7OMP+/6/ENm7DA8Oy2EppaXEQKIB/jULh7BriCuPfGSuA2d19hZpeZ2WUHWva+BrY6P+t19+nKzznQG4qIiEhhuftd7n6Iu89z96uTbde5+3U9HPuR/saggL6rPKyX1z2tixSfIO6pkRozmtaTDmb92wNsTCd4B/N+YpSvXE/UvCee7MsjULsJESm0Ev4Y6iug8F5e97QuIiIiA1SoYbMHQ18BxdFm1kScjahKXpOsVw55yUQGwoxUbQ1bPngEN1z5bb788rsov38uNcsrmfKH9WTqNxKl03gm8+o5rnYTIiIHqteAwt1Tve0TERGRITBMMxQiJSk9YzrPf3Ymf3fGw9z+fCufX3A6QUUrB+15DKKQvTmJfbITIiKFlr9uo0NCAYWIiEgRMEq7x0Nf3UZFSoql0/zrmif4x/vvJd1iPHnxkRz8sbV4RwfhnhaNLSEiMoSUoRARESkWqvIQyS8rKwcgGDuGzCHTOfO6hzi95nn+/Y3vJNrdwFyWE7W1v5qVUHZCREpAKXcbVZWHiIiIDFhBMhRmNhb4MXAkcYLno8Aq4FZgNrAOeL+77y5E+aS4pSZPYuVVs7HqkLcfuZxtHSH3XnoKD66bR2brFmUjRKR0KUOx374L/MndDwWOJp6c5ErgXnefD9ybrIuIiIwcPkhLAeQ9Q2Fmo4E3Ah8BcPdOoNPMzgPOSA67Ebgf+EK+yyfFyU85mk1vqKF1WsRxx61m6g8CIGD1f86GXY3Y9mfJKDMhIlIwhchQzAW2Az81syfN7MdmVgNMdvfNAMnPST2dbGaXmtkyM1vWRUf+Si0iIjKUBmnq8kI17CxEQJEGjgP+292PBVrYj+oNd1/k7ie4+wllVAxVGaVIWFk5uz9yMtOvXUPb69pItRm7v3IQo19qYezTO/H6LYTbd6rdhIgMDyVc5VGIgKIeqHf3x5L124kDjK1mNhUg+bmtAGUTEREpGGUo9oO7bwE2mNmCZNOZwHPAYuCiZNtFwB35LpuIiIgcmEINbPVx4CYzKwfWABcTBze3mdklwHrgfQUqmxRQespkvLUNystYd9kCLnj/vfxq7W62vauag7ctjw+KQjxIEXoEXsJ9rEREuivhj7SCBBTu/hRwQg+7zsxzUURERIqGRsoUyYX1MY9ekCKorGTH2XO5ZNlTXP7oQ7RNy/DgJQuZ8sENZLZuixteZg+lreyEiEjR0FweIiIixaCAPTQGgwIKyQurqMBSKaLW1n13BCmCmmra3nAoABVNIdeffgre3sGhmZVEzc24mbIRIjIylPBHnQIKERGRImCUdhsKBRSSF5ZOQxC82o7CndTo0az57JF0TM4QtMfNeSY8blRt3Z4cE71yrIiIFDcFFCIiIsWihL8/KaCQvIja2l/JOFgqBRaw6iuHE2ScUS+kqVvVBUDVg88TaRhtERmhrIQzsuo2KiIiIgOmDIXkTXrWDHafNJ3dhwZMPGUzE35hjP/980TNzQB4JkPU11gVIiLDmbqNioiIyGBQLw+RbEEq/pnVZmL7Ja+neQ50jQ2xTqfrxsnULd8dZydSKbyzMz7HAnC1oRARKTUKKERERIqFMhQyonUbyTKorCAYM5qXL5zL2Ddt4YsH38XH/3AC0+93aurbsGdeJOroILIgnpMjk3n1WurhISIjmKo8REREZOAUUMiIZgEYpOrG4h2dbL1pGtXlXbStjBhz/SR++NhZHLLpCQgM7+zEX8lmRD1cS/N2iIiUIgUUIiIixcBV5SEjXPjGowkrU7SPS7HtbzqY+aM0lZtaOXTzWqKGRjIdHT1nHXrapl4eIjKSlXBAoZEyRUREZMCUoZADZwavP5KWqeXseX8TLZtGMXpJFVUPPoOVl5HZ3bj/vTa8h3YVIiIjgKYvFxERkcFRwo3SVeUhIiIiA6YMhew/M6y8HDtsHlu/3EnrU7XM/XKIWyPRilV4ugxv7ziw6osSjs5FRAZKVR4iIiIyMJptVEYKS6exigoslaLh3MMp/+gWJnxlDKmnniZsa3slu+Bdna9OECYiIjmzEm6XrjYUIiIiMmDKUMhrdR/+2gwsoP62BbS1lFMzqp3Ksp2M/udR+OPPEnn02rYPPW0TEZG+lfDHpgIKERGRIqFGmTKspCZMwGqr2XnKVLa/rYO5U3fwN1Oe4darR5GeEFC1vYKxi58hamuPB64KUvsOl222709lKkREhj0FFCIiIsXAKekvYAooZB+piRPZsGgi7W3lRGEHox+tInxhMndvqmHc1pcYvXMXFhhRJvPqSd3Hmyjh/xAiIoWkKg8REREZOAUUUvLM2HnJSVzw6T/yp4smk9qyHW9qJurogDAkCsNXx5no3k9aGQkRkRFPAYWIiEgRKPXZRgs2sJWZpczsSTP7fbJeZ2Z/NrMXk5/jClW2ESdIkTpsPu3nNnHPeceS2rgDb2klam3FOzvxrOyEiIgMEffBWwqgkCNlfhJYmbV+JXCvu88H7k3WRUREpAQUpMrDzGYAfwNcDXwm2XwecEby+kbgfuAL+S7biBOkWPsfCwlntjP//c8QZjJYWTmWCpSZEBHJs1Ku8ihUG4rvAJ8HRmVtm+zumwHcfbOZTerpRDO7FLgUoJLqIS6miIhIHpVwQJH3Kg8zezuwzd0fP5Dz3X2Ru5/g7ieUUTHIpRthzEhPnkjNRmP+d7vwKOnFkenCs8eZEBER6UchMhSnAu80s3OBSmC0mf0C2GpmU5PsxFRgWwHKJiIiUjClXOWR9wyFu3/R3We4+2zgfOB/3f0CYDFwUXLYRcAd+S7bSGJl5aTG17Hm/81lykMNBA0tr+5LpbB0Gkyz24uI5I0DkQ/OUgDFNA7FNcBtZnYJsB54X4HLIyIikl8lnKEoaEDh7vcT9+bA3XcCZxayPCIiInJglNMeodrOOYaxd8RjaPuK1UQvb4ynIoe4cWYQYKlUPAV5kCpkUUVERgzzwVkKoZiqPEREREa2Eh77RwFFCUuNHg1TJmItbXhLS9wQp6oyblAZBJDJQCoF6RS447sb8TDEpk6i5jP1bP/SbOY8+SxhV+drrm3lZXhXJvnl7j4bmIiIyL5U5SEiIlIk8lXlYWbnmNkqM1ttZq+Z6sLMPmRmy5PlYTM7ur9rKkNRaoJU3LbBI5gykY6ZYylrrCJoqsIy4asTw7R3QDoNZnh5GV5eRseCyaz7mzQLT3iBxjNbCdo3EQZJOwkLXmlDYYHh7R14piu+Zwmn4ERESoaTl14eZpYCfgCcBdQDS81ssbs/l3XYWuB0d99tZm8DFgEn9nVdBRQiIiJFIJ6+PC9f4BYCq919DYCZ3UI8n9YrAYW7P5x1/KPAjP4uqoCiVJgRVFfjh8wmtWUn0Z4WvLqCztFp2iaWEZaPonJXSFlLhkxlilRHRFRm7JlWRqbK6BoNrYd2ULs8oPH6yUTtyUSvSVYCD1+5lWcyGnpbRKS0TTCzZVnri9x9UfJ6OrAha189fWcfLgH+2N8NFVCIiIgUi8FrA7/D3U/oZZ/1sK3H1IiZvYk4oDitvxsqoCg2Zq9ps2DpNBs+v5DWmRkmPpqiLmV4ehJNc6sZ88IeWmbWEKWhbXyKlikpWicbHeMjPO1U7DTKG6FqqzPjLx0EGzYQ7WoYcJlERGTw5anKox6YmbU+A9j0mrKYHQX8GHhbMvhkn9TLQ0REZGRZCsw3szlmVk48r9bi7APMbBbwG+DD7v5CLhdVhqIYJL0srCwdjyGRiI6Yy+7Davnkl27jqt+fQO3aNLuOihhzYSPjK1vouHoe1hVSsbuLqi0hBHEWa9LDrXg6wNo6sbYOCEM8ExI1NBKG4avtJnIsW1BbS9TSun/niYjI/slTLw93z5jZFcDdQAq43t1XmNllyf7rgH8FxgM/NDOATB9VKIACChERkSLheatedve7gLu6bbsu6/XfA3+/P9dUQFFoQYpU3Vh82kTapo+icV4ZDUd38blT/8SecBs/XXkSN59zGoe0rsGb90AUQVkZjWFI5RHtBM1tlK2pJ6ipJppcF/8ybt9NtHNXPFbFQH853YmamwfnvYqIyLClgEJERKRIFGpir8GggKKAgspKtn7kWKJyo3Wak6mOKGtyptyX4g/XnkS0+mXmjd9G1NAYjw0RhvEomWEEHmHPvEiUShG1thLt2QNbthb6LYmIyECUcI86BRQiIiLFwMFKeC5GBRQFEtTU8OK/vY5ofCfpzeXU1BtjXwyp3NyIdWQIX3gJ3Mls3bZPxJo9gqVGsxQRkWKhgEJERKRYqMpDcmbGzr8/ida3NjPqvoCpX1uHlZURbt+BR07kSb5r7y9VCf9yiYjIfirhj3yNlCkiIiIDpgxFHgU1Naz54lFkap2Jv6ul7rEtRA2NkErhYahshIjICJenuTyGhAIKERGRYlHCAYWqPERERGTAFFAMISsrhyCFpdN0nX0Cc+4P6awLmf+LPdT9fiXR2vXxgFWdnSUdlYqIyCBwIBqkpQBU5SEiIlIEDFcbCskST/NKUFVF5rhDmPHNlzh97Cru2TmKFz93GIc8uBQHwuxfmhL+BRIRkUFUwn8PVOUhIiIiA6YMxSDaO9nX3A+9yLUH/Y7vbm/n8X89nq2rJuBbdxDseaqko08RERliJfw3QgGFiIhIMdjbKLNEKaAYBFZWTmryRNZ8u47Ojna2fnsely4+A4CKzFLCIAUelXTkKSIi0hcFFCIiIkVCvTxGIjMsXUZQVcnWm6YwobqV2ptqmbj4BcJdDXgUvnqsshMiIpKLEv5boYBCRESkKLgCiv1hZjOBnwFTiJufLHL375pZHXArMBtYB7zf3Xfnu3w5s4DUhDpeumwu426O6No+igmPLCdsa4fs7ASU9C+IiIhILgoxDkUG+Ky7HwacBFxuZocDVwL3uvt84N5kXUREZGRw4i+gg7EUQN4DCnff7O5PJK+bgZXAdOA84MbksBuBd+W7bPsjPX0qb7znJbpGR4z9/QrK7n2CqKXltdkJERGRXJXwXB4FHSnTzGYDxwKPAZPdfTPEQQcwqZdzLjWzZWa2rIuOvJVVREREelewRplmVgv8GviUuzdZMgdGf9x9EbAIYLTV5S+vk5TPysuxBXOY8eOXefCc+Ry8eSmRshIiIjII1G10P5lZGXEwcZO7/ybZvNXMprr7ZjObCmwrRNlEREQKRgFF7ixORfwEWOnu12btWgxcBFyT/Lwj32XrTVBTQ9frF9A8q4Kpl6whooENF88i3Lw6tzYTZiX9SyIiItKfQmQoTgU+DDxjZk8l275EHEjcZmaXAOuB9xWgbCIiIoXhQFS6Xz7zHlC4+/8BvTWYODOfZelVkMICw8rLyRy/gC/ecCO/a+hg6fZZ7PzObGrW7cGfey73rIOyEyIi0i8NbCUiIiKDQQHF8GDpNNHCI1hzecDMSbuoSGU4bfxjfP2UtxA1NDIqXI9n1lC6/9wiIiJDQwGFiIhIsVCGQkRERAZEjTJLn6XTpKZO4ejF61m6axdjb5lBtGUybQ4P37UFIg2JISIi0hcFFCIiIkXBwQs0EccgGNEBhVVUEFRX89J1M7n2+Nv4ziUfoGx7C5N2vgSZDN7WrmG1RUQkf9SGQkRERAZEbShKkBl2zOGs+kQl5x31NJuvr+X7Fx9H0Pok4d5Jyko4ShQREcm3kRlQiIiIFKMS/jI74gKK9NQprLt4Lpmj9zDqsQqe/+FhTH5qGVFXZ3xACf9jiohIiSvhv0FBoQsgIiIipW9EZSisrJzV/ziH2g3OmJ+UU/HgE0QdHSUdEYqIyHChycFERERkoByINA5FUUtPmUzLcbOov6CLCX9wajd2UrbkeaL29kIXTUREZFgYEQGFiIhISVCVR5EyIzVmNKu+NY1xYxqp+9V4xj3bgK3dSNjaWujSiYiI7EsBhYiIiAyMa6TMohSkSB06j5WfGMuEe1NMum8XdL5M1NRM2KLshIiIyGAavgGFiIhIKXFwzTZaXNJTJrPmY/PomNvO+AdT1N2whAxAFIJZSddRiYjIMFbCVR4aKVNEREQGbNhlKFILDub5L4+i5kmYcnNA1VMvEUKcnRARESlmJZxBH3YBhYiISElyL+mRModNlUdQXU3DhSdT+T+NjHmokml/baZ6eT3RroZ9sxMlHP2JiIgUK2UoREREikUJf+lVQCEiIlIkvISrPEo7oDDY874T2fOhJv79iDv45F+OZNynJzHxiUchlSKTyRS6hCIiIjkq7enLh00bChERESmcks5QWJCi/NItpG+fxo/+4Y0smN6KP/k8uOPKToiISClxSnpgq5IOKERERIYVDb1dGF11lVS+ZxcVbRsJAd+8pdBFEhERGZFKOqAQEREZLhzwEq7yKLpGmWZ2jpmtMrPVZnZl3wdD1NyMZzJ4qKG1RUSkhLnHVR6DsRRAUQUUZpYCfgC8DTgc+ICZHV7YUomIiOSHRz4oS3/6+/Juse8l+5eb2XH9XbOoAgpgIbDa3de4eydwC3BebweXNXS9uuIeT00uIiIivcrxy/vbgPnJcinw3/1dt9gCiunAhqz1+mTbK8zsUjNbZmbLOqPWvBZORERkSOWnyiOXL+/nAT/z2KPAWDOb2tdFi61RZk8phn1yN+6+CFgEYGbb/8LtLcCO1x4pg2wCe5+zDDU96/zRs86fUn/WBw31DZrZffdf/PYJg3S5SjNblrW+KPn7CT1/eT+x2/m9fcHf3NsNiy2gqAdmZq3PADb1drC7TzSzZe5+wpCXbITTc84fPev80bPOHz3r/rn7OXm6Vb9f3nM8Zh/FVuWxFJhvZnPMrBw4H1hc4DKJiIgMJ7l8ed+vL/hQZAGFu2eAK4C7gZXAbe6+orClEhERGVZy+fK+GLgw6e1xEtDo7r1Wd0DxVXng7ncBd+3HKYv6P0QGgZ5z/uhZ54+edf7oWRcJd8+Y2d4v7yngendfYWaXJfuvI/47fC6wGmgFLu7vuuYlPFWqiIiIFIeiqvIQERGR0qSAQkRERAasZAOK/ZrzQ/plZjPN7D4zW2lmK8zsk8n2OjP7s5m9mPwcl3XOF5Pnv8rM3lq40pceM0uZ2ZNm9vtkXc95iJjZWDO73cyeT36/T9bzHnxm9unks+NZM/ulmVXqOY8sJRlQaM6PIZEBPuvuhwEnAZcnz/RK4F53nw/cm6yT7DsfOAI4B/hh8u8iufkkcU+mvfSch853gT+5+6HA0cTPXc97EJnZdOATwAnufiRxQ7/z0XMeUUoyoGA/5/yQ/rn7Znd/InndTPyhO534ud6YHHYj8K7k9XnALe7e4e5riVsCL8xroUuUmc0A/gb4cdZmPechYGajgTcCPwFw9053b0DPeyikgSozSwPVxGMW6DmPIKUaUPQ754ccODObDRwLPAZM3tv3OPk5KTlM/wYH7jvA54HsAff1nIfGXGA78NOkiunHZlaDnvegcveNwDeB9cRDMze6+z3oOY8opRpQ7PeQoJIbM6sFfg18yt2b+jq0h236N+iHmb0d2Obuj+d6Sg/b9JxzlwaOA/7b3Y8FWkjS7r3Q8z4ASduI84A5wDSgxswu6OuUHrbpOZe4Ug0o9ntIUOmfmZURBxM3uftvks1b984wl/zclmzXv8GBORV4p5mtI66qe7OZ/QI956FSD9S7+2PJ+u3EAYae9+B6C7DW3be7exfwG+AU9JxHlFINKDTnxyAzMyOuZ17p7tdm7VoMXJS8vgi4I2v7+WZWYWZzgPnAknyVt1S5+xfdfYa7zyb+vf1fd78APech4e5bgA1mtiDZdCbwHHreg209cJKZVSefJWcSt8PScx5Bim7o7Vz0NmxogYtV6k4FPgw8Y2ZPJdu+BFwD3GZmlxB/aLwPIBmm9TbiD+cMcLm7h3kv9fCh5zx0Pg7clHz5WEM8hHCAnvegcffHzOx24Ani5/Yk8VDbteg5jxgaeltEREQGrFSrPERERKSIKKAQERGRAVNAISIiIgOmgEJEREQGTAGFiIiIDJgCCpEBMrPQzJ5KZlp82sw+Y2ZD9n/LzGab2bMDvMaXuq0/PLBS9Xmv2Wb2waG6vogUBwUUIgPX5u7HuPsRwFnAucBVBS7TK3qZxXGfgMLdTxnCIswGFFCIDHMKKEQGkbtvAy4FrrBYysy+YWZLzWy5mX1s77Fm9nkzeybJalyTbDvGzB5Njv1tMkcCZnZ8ctwjwOVZ1+jx+mZ2hpndZ2Y3A89klzG5V1WSVbkp2bYn67wHzOw2M3vBzK4xsw+Z2ZKkrPOS4yaa2a+T+y41s1OT7acn130qmYxrFPGgXW9Itn26nzI/mLzv58zsuqHM9IjIIHN3LVq0DGAB9vSwbTcwmTi4+HKyrQJYRjyB0tuAh4HqZF9d8nM5cHry+t+A7/Sw/RvAs8nr3q5/BvFEWHNyKfPe9eS8BmBqcr2NwFeTfZ/MKs/NwGnJ61nEQ7YD3AmcmryuJR6N9wzg91n36qvM7cQzhKaAPwPvLfS/rxYtWnJbSnLobZESsHc2xbOBo8zsvcn6GOJ5C94C/NTdWwHcfZeZjQHGuvsDybE3Ar/qYfvPiQOSvq7fCSxx97UHUPalnkw5bWYvAfck258B3pS8fgtweDxtAwCjk2zEQ8C1SebjN+5en3XMXv2VeU1y718CpxFP6CUiRU4BhcggM7O5QEg8s6IBH3f3u7sdcw65T9dsfRzb2/XPIM5QHIiOrNdR1nrEq58ZAXCyu7d1O/caM/sDcTuSR83sLftZ5u7vU3MDiJQI1U+KDCIzmwhcB3zf3Z14Art/sHhqeMzsEDOrIf7W/1Ezq06217l7I7DbzN6QXO7DwAPu3gA0mtlpyfYPZd2yt+v3p2vvOQfoHuCKvStmdkzyc567P+Pu/0lclXEo0AyMyrHMCy2eRTgA/g74vwGUUUTySBkKkYGrsniG1jLimRN/DuydAv7HxL0cnrA4978deJe7/yn5I7zMzDqBu4h7XlwEXJcEGntnxiT5eb2ZtRL/Qaav6+dQ5kXAcjN7wt0/1O/Rr/UJ4Admtpz4c+RB4DLgU2b2JuIMzXPAH4kzGxkzexq4AfhuH2V+hLgR5+uSa/72AMomIgWg2UZFpCgkVR6fc/e3F7goInIAVOUhIiIiA6YMhYiIiAyYMhQiIiIyYAooREREZMAUUIiIiMiAKaAQERGRAVNAISIiIgP2/wM5dTCymB1cCwAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "fig = plt.figure(figsize=(8, 6))\n", + "ax = fig.add_subplot(111)\n", + "ax.set_title(f'Alignment steps')\n", + "im = ax.imshow(\n", + " alignment_history[0].numpy(),\n", + " aspect='auto',\n", + " origin='lower',\n", + " interpolation='none')\n", + "fig.colorbar(im, ax=ax)\n", + "xlabel = 'Decoder timestep'\n", + "plt.xlabel(xlabel)\n", + "plt.ylabel('Encoder timestep')\n", + "plt.tight_layout()\n", + "plt.show()\n", + "plt.close()" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlAAAACuCAYAAAD55TMFAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9ebB925bXBX7GnHOttfc+5/y627zmvi57M2mSpoDMAgQVVLAICKMUKMsCtUQrbIsSNAyjqixFMSxRDKqKpopCKBqpABtKCUSzsABBAWkSSciX+XjNfe82v/Y0u1lrzTlH/THGXHuf3733vZtk8oD0zIgTv985e++1VzPnmGN8x3d8h6gqd+Nu3I27cTfuxt24G3fjw4/wN/oE7sbduBt3427cjbtxN/5WG3cO1N24G3fjbtyNu3E37sYPcdw5UHfjbtyNu3E37sbduBs/xHHnQN2Nu3E37sbduBt34278EMedA3U37sbduBt3427cjbvxQxx3DtTduBt3427cjbtxN+7GD3HcOVB34278TTxE5LeJyL/u//+ZIvJXvk7fqyLyzX8dj/8Z/4701/j5bxORPysi1yLyz/5In9/duBt34258rXHnQN2Nu/HDHCLyeRHZi8iNiLwjIv9PETn/kf4eVf2jqvptH+J8frmI/LEf6e8/Of4fcefnO1/6+3/sf//Zf72++2T8auCPqOqFqv77/gx+zo/kF4jIPyYif9mdtHdE5D8TkYsfye946ft+WE7l3bgbd+PrO+4cqLtxN35kxi9Q1XPgJwE/BfhXXn7Dj7KN8fuB/1X7RUReAb4LePx1+v5PA//Dj8SBxEZ46W8/C/g3gF+qqhfAtwO/90fi+34440dyDr3fdd+Nu3E3Pvy4Wzx34278CA5V/TLwB4EfC0sq7J8Skc8Cn/W//c9E5M+JyAsR+W9E5Me3z4vITxSR/95Rj/8QWJ289rNF5M2T3z8pIr9fRB6LyFMR+Q0i8u3AbwS+2xGxF/7eQUT+zyLyRUdTfqOIrE+O9atE5C0R+YqI/KMf4lJ/J/CLRST6778U+I+A6eSYQUT+JRH5QT+/3ysijz7MfRSRbxKR7/HPPRGR3ykiD/y17wH+DuA3+DX+buBTwB/w33+1v++7/P6+EJE/f4qMOYr2a0TkjwM74BtfOoWfAvwJVf2zAKr6TFX/A1W99s//Nr+Hf9if1X8tIp8+Of7f5q89E5G/IiL/4MlraxH5d0TkCyJyKSJ/zJ/F/8/f8sKv47sdTfzjIvLvisgz4P8oIvdF5Lf7c/+CiPwrzRESkejHfiIif1VE/ulTVOv9rltE/hER+T6/js+JyD9xcq4/W0TeFJFfLSLv+hz5RSLy80Xk+/36/uUP80zvxt34UTdU9e7n7ufu54fxA3we+Dn+/09iyMi/5r8r8IeBR8AaQ6jeBX4aEIFf5p8fgB74AvC/BTrgfw7MwL/ux/rZwJv+/wj8eeDfBc4wR+tn+Gu/HPhjL53jvwf8p34eF8AfAP5Nf+3vBd7BnL4z4Hf5eX/zB1zvHwH+18B/Afw8/9t/B3w38Cbws/1v/zzwJ4FP+PX9JuB3+2uf8e9IH/Ad3wz8XP/ca5hz8e+9fA7v9wz89zeAp8DPxwLFn+u/v3by+S8CPwZIQPfS9/9MYA/8q8BPB4aXXv9twDXwt/s5/vp2z/0efgn4R/zYPwl4AvwYf/3/4t//hj/H/6kf4z33xJ9lBv4ZP9Ya+O3Af+LP8TMYGviP+fv/SeAv+T1/CPyXp8d8v+sG/j7gmwABfhbmWP2kkzmXgf+9v/cfx1DG3+Xf/2OAA/CNf6PX4d3P3c/X++dv+Anc/dz9/K3+45v3DfACc4D+r8DaX1Pg7zx57/8Nd65O/vZXfOP624GvAHLy2n/D+ztQ3+0b2XscEF5yoHxj3ALfdPK37wb+qv//twK/9uS1b+XDOVD/S+B3A98GfL+/dupAfR/wd5187mOYQ5jez1n4Gvf4FwF/9uVzeOkZnDpQ/yLwO146xh8CftnJ5/9PX+M7fx7maL7w5/vrgOiv/Tbg95y89xwomAP9i4E/+tKxfhPwf8CcuT3wne/zfe+5J/4sv3jyewRG4DtO/vZPYHwwgO8B/omT134O73WgvtZ1/8fAP3cy5/Yn133hx/tpJ+//M8Av+hu9Du9+7n6+3j8/mjgZd+Nu/I0cv0hV/8sPeO1LJ///NPDLROSfOflbD3wc25i+rKqnHb6/8AHH/CTwBVXNH+LcXgM2wJ8RkfY3wTZj/Lv/zIf4zpfH7wf+HQzZ+R3v8/qngf9IROrJ3wrwkdM3icjPxNKeYNf0Y0TkdeDfx5CgC8zxeP4hz6t99z8gIr/g5G8d8P89+X15LiJyc/L371DVL6rqHwT+oKfH/g7g/405u7/p5c+r6o2n2D7u3/3TWvrUR8Lu0asYWviDP4RrOZ0/r3JEKtv4AoZm4d9/+v7T/7/v30Tk52HO3bdi93kDfO/JW56qavH/7/3fd05e32MO5N24G/+jGncO1N24G3/9x6lD9CXg16jqr3n5TWLE5TdERE6cqE/x/pvtl4BPiUh6HydKX/r9CbbJ/Rg1jtbL4y3MIWvjUx98KSdforoTkT8I/G+wFND7neM/qqp//OUXROQzJ8f5o7x3A/43sev48ar6VER+EfAbvtrpvM93/w5V/cc/zGfUCgDe/02qFfivnHv1Y09eWu6ZWNXlIwxB/BLwX6vqz335WO6MHbD79ee/xjW839+fYCjep7FUHdjzas/1LSx9955zfL/jicgA/D6sIOA/UdVZRP5jzMG+G3fjbnyVcUcivxt34+s7fgvwT4rITxMbZyLy94mVx/8JjG/yz4pIEpG/H/ipH3Cc/w7bLH+tH2MlIj/dX3sH+ISI9LA4AL8F+Hcd2UFE3hCRv8ff/3uBXy4i3yEiGwyN+LDjXwZ+lqp+/n1e+43Ar2nkahF5TUR+4Yc87gWeFhWRN4Bf9TXe/w63ieD/L+AXiMjf48TqlROiP/EBn781ROQXisgvEZGH/px+KpZm/ZMnb/v5IvIz/D7/a8B/q6pfAv4/wLeKyD8sIp3//BQR+XZ/Fr8V+HUi8nE/t+92R+YxUHkvoX0ZjgT9Xuy+Xvi9/ZV+vfhr/5w/3wdYKvOrjR7jXz0GsqNRf/eHuUd34278j33cOVB34258HYeq/mmMiPsbsJTUD2A8F1R1Av5+//05xqX5/R9wnAL8Aoxs/UWMe/SL/eXvwYjsb4vIE//bv+jf9SdF5AojF3+bH+sPYiTz7/H3fM8P4Xq+oqofpDn16zHi+n8hIteY8/HTPuSh/1WMfH0J/Gd8wH04Gf8m8K+IVdz9C+7I/ELMwXuMoUK/ig9v855jz+mzwBXmoPzbqvo7T97zuzBn8xnwk4F/CECtUu/vBn4Jhki9DfxbmKMC8C9gKbI/5Z/9t4Cgqjvg1wB/3K/juz7g3P4ZjNP2OeCP+Xn8Vn/tt2Dk/r8A/FngP8ec8vLewyzn+s9ijtdz4H+BPbO7cTfuxtcYcptucTfuxt24G3fjaw0R+W0Yof89el9/Mw1HlH6jqn76a775btyNu/FDGncI1N24G3fjbvwoGWIaUz/fU8BvYAjZf/Q3+rzuxt340TjuHKi7cTfuxt340TMES38+x1J434dpON2Nu3E3foTHDyuFJyJ/L8ZziMD/XVV/7Y/Uid2Nu3E37sbduBt34278zTr+mh0osRYO348p/L6JESJ/qar+pa/6wbtxN+7G3bgbd+Nu3I2/xccPJ4X3U4EfUNXPefXQ78GqXu7G3bgbd+Nu3I27cTd+VI8fjpDmG9xWtH2Tr1GinDZn2t1/hFSQDHGq5HWgnCkSFFUgB8IEUkAUagIEagSScrYeSVIRFEWYa2A39ZDNF5R88rno6FrrZqH2f1EII0epOPXvSKDJvjtMUHt7LWQIM4gqVP+YKoigwY9ZFA1i7wFqEsogSIE4KaL+GaCmgEYIWe2z9aujgPN5oPbQDTNDLIjr4BUNTDWSD8nO6+QnZD+uXy+nSOPp1+lLfxCBINQIGgQE4lhBoXbCfCYIoAGkq9wbDiQpy60UUVSF6zww5WRfWwRUlufezhG1e1mD3/dq93k+h83mwCpmOinLtT4bN9QSTCmnCqL+rDJ2/e0yVGna1xpAXsus00z0P+5Lx37fL+ekp5KBCsGfv52nH1SgRkGjvWd5pi/d0ltzA3jp0GiE2gvVrzeOx2ejUZCihKzkTaCslfP1SPJ78GK3gSrLM9UIBHvGYTxei0aW6W5a42rXmu1excm/L4hduxznYk2yzPt2D8tauX+2ZwgZBS7nNdOcIAunF7k8U1+DUiCUkzXo753uCdJXVt1MkooCU0nMNaBjPB6nHu3AqZZ5eybTI+XBak8QZa6Rfe7IY1o+D20dHB+AKFDt2WkQ6iuZVbJ5FkTJGrg6rJY52z4juV3P6bH0xKZ8wPr6oPGeifHeD4mCipDPA6Xz38NxfcfR7VHV98pe+vzDn+97zklPXoOjXVtetzn8fud16xgf+NrJi8GOXPvAvAGC3c+2lpY1mE6uQ7G53m6EgMxmTxc7V26vwVvnI3ZN7b+nl6wCZWU2jmD3tL2hHbt2QKpQBCl2L0OGdFB7Jo8qfcoM0TRscw0ccked4/E89Pis2jwpF5U+Fc67kYAyaWQ799QajrdMBWYhFLtPbc61eV+jHG26CHltcxMFPa+su5mxREoJyBTstQpxfmlPAOaNwHlhSJlVyASpBLfhFWGqie3Uo0VO7q8s9ratC6mQz2xP2PQTAaVoYDf2Zqvz8VrMnPm5b+xyQ4a0d7v9PnOu7UUabM/VaM+t7b/aKcNqJoXqH5flucwlwhhun3N129TM+/utQYX5InB4+80nqvrae06KH54D9X5Kte+5chH5FcCvAEj3HvLNv+RXMlwq/U1l9WTm8XeuuPmf7BnWM6UE5nfXbL4cGZ4pw5Wy/WggzHB4DQ4fzXz3j/ssrw43DCFzlVc8nzb8+S+/wfxkDRXSdSBOwviooGfFFmEWJNtCkAxxFC4+d9yEgvePHx8Ih9eU/krYvK1cf9oW7OqJsnlSCbMSZrUN2q+0DLb60q5QVjZZw1yZLyLXn0j0l8rZWxOh2MITVebzxLwJrJ/M1GS3cTEGbb8Jx0n+lZ8xcPjkxI/7ljf51OY5yVfUvnR84eYRf+VzH0P2kbgLxNE20+EF9FdKnJRuV5GqSFb7nmoGX+qJAQZQRbtAjYHaB8pKyENg/XhGVNl9pOfdnyyEDGVQuk9s+Tu/4bM8SDs6P6dBMhXhjz7+Zr747CHTIVFvOmQKdDfC6l2hu1HSwTcxsU17/5oQR1g/rbzz05Xv+gnfzzeePeET/TMKgZuy4vd87idzeblB95Gwj4RJSFth/Y4SR0ijusFS4mjXV4bA5p/6Mj/+4Zd5vb9irom/eP1x/uQPfAO6T8TreDTQ2Z/3U2HzdiVOdp4a7Bx3rwXKWoh75eLN4k6HXUftxA0DSK5osnkR5opGWRb+fBa4eSOy+6jNs/ufq3Rbm1vbjyS6XWV4nnn27QOXP37mp33HD/LR1RWdFH7fX/hJsE2EXTCDdb8QzmfqHFh9YSDuj3NHozml0z3bGaXA8DSweqZcvJkpvQccbjSCO1XTvcjhgT2L4boyrwPvfnfh5/2Uv8CnV0/ppPCH3vkOfvDt16hPe8IklLOKZCFtwzL3wgyrZ9WNoi4OmkbhSz8nsPnkNT/ho1/mUb8la+QHr17l7esLrr98j3QVCJPQX8HwvM1hXXbAMNnzeusfOvALv/V7ebW74QuHV/jzTz/OVz73KukqesADm7cg7fy5uOMbR3dSV8L8y5/xzQ+e8LHVJedp5CYP/IHP/ljmmx5m24DiQVg9FrqtMrzQxbFuQUqYlFBsbS0B0fs5LX6/zXGV5T1hrreDKP+/qFJT4Pm3r9l+TAgzTPeV7lpYP1YuvpzdJpmnoCJoEiSrB4T2e+2EMOsyB83etE1IlmuQ6tfwAaM5ou/nLLa/NRtH0WWzLuuEVOXmEz1PfiKUTaV/FpnvV9JNIN0I87mSX5uRVNEqy72nCDpUSJXVl3r6F+bE9Ndm19p3hmz2jcBybyXfDlzxNkYaYPvRxHRfmM/MlgGEIoQR0gGuv7mQHh2Yr3u6Z4kaYXgmPPxsoXTC7hdf8m2vvsu3nr9LVeHxdMH3Pv0Y77z1wPab0eZw3Aurp0Iczf5v//YbvvkjT/gZr/wgqzDz+cMr/Ml3PsPNfmAaO2oRNAfSk47u0vah/kaX+QYw3gtmm0al9MKT7xSGZz6fvvsF3/XxL/BXXrzO288vqG9u6G4CaQebt5R0qGZ7/dk8/bEd/c98wjc8eMZPuP8mD9OWSKUQGGvH5w+v8D1f/Fb22566SxaIjYG0NXs3PBfSXulu4PF3VdYfveGnvvFFzuLEk+mMP/35T1OuO7rnkdUTs/39tV1L6eD5t1sw2V8Kr3xfJrZz83XQ9oh8ZoFVGYR5E8hrbP3es7m8+/TMj/m2N3k0bFnHmaqBrIG3dvd488UDDn/1gjDZ/j+8ENJOGS51uRdhPu6FLcgKWXnru1f85X/jV35ga6sfjgP1JrfbBHwCE427NVT1NwO/GWD18U9qi25rEsoqUgeIXaFLBRFldq+ydkJeQRnMEeiuhPEVIYgu0SJAECWlwtQbSlJmsUWU1KLzoFDDEpWDoBkIjkJ5JGc3LqBRFqPf3ZjRiqMjYO4zSvWAIohdR9+8YiFMikowwzgpcVY0BUrUZQEDxNk29zIEW/zlaLxadNzMVO0U6Sv3ugP30n65B/eTkGvkiw8eMg4deUhkgCLk88h0JcRR6K/sOtLBzsdQteN3NudNqhnZ2h3RvJDsPEWh9ELt1SKyCCmZARtrohCoKhDNiVqnmb7LTGOy2xbdgHvUh0D1qEI8ItJk36GxUhHGmriuKwB2tbdzEvV7rYbmdLaoRJXqUVKL0qXa/2OoDCHzKG6JqfKV/gGbi5Gxq2TpkNmeF50SZiGvDWkz57ZaRJrMCZIMaW/nWj2El6rUJARsEZLM+TSHyuZe+3xNdg+0U2qbN8Gcrpps3ld3yinCIXfsS8fQZUJfKKPNUY1AV5GghFTJa6V25gR2NxAOfm96WSL+0CLFVWA+CzbHd/bca+dzU6HbQrev9C8yNXWICmNJXJcVmzARQyWmQkkKsxvuhsQ6yhimo8PTIkfgGDCIMoTMOs7kahHwup+5WhXKZMcsvVAGW3dx9A3aN/yXR5JCFIWk1N6+OBRbk7XzyLYayizL34/rsRA41I5ZIzEqc1STn+QYyLznetzx1ihUAQmG2DRH+vSz6m9Xvf29NsKtazJHzO0IkHZK2styvOg2iWrn0ZzhFqUTxZyyotQu+Jo+OhYqEKu6Y4/ZpWAbfHtOtxxA/39zRqQAQd7/WVRF3A6011qQmdeCLvfM17BwvF9+b0VAg6JR3WQfHdIwsyCpNR7PVdWfqzuJUrF9oKER7lBKsbWb9m5HghAPNlcaYhUnRcWyInTVUV2lDkIexPaIWEhS6aRQRbiX9jxY7Xk8XFBrQqogVcwp8/PNK+j7wioe0XCwPSyENlHsZ0HFPTMSilD9gdRkx2u2o6YjIjPEwlka2XQTfV/Y9UoZFMlCWYE46m52yz7TxUofCvfjnouwp5dCkEp1b/vR2Y6nCvsS0CpoBY0RBPIan8+KpsrQZS7SgXWcGWsyOzEEyjown9u9a1kds922V9t+H4BKmNt9OKJOZQjEsXqWQundDsfRXt99w3EKnsfRUGmNXKYV635mt1K3s8J84XOhBQRFCEkdSTuuX80fHEy08cNxoP4U8C0i8g1YH6ZfgqnYfvAQyBtLjZVqhqR00HWFFAtVZdkYy2CGab5Q1o/NiFCFIJV1nOmkkGNgXzpSrMhQzDHLAiFAUiQqImrrYg724N2hqlHosnmeIdu5xVHpr22RahCGZ+blh6wLUnIKf7bNuqVAStvYkMU4abANvkUP4GmSJEhv75eTlMpy3BMbqwlCVNZx5jyOVMSdyMqDbsd6MAhtcoOjKsxikzzMgAjxgE9GdyyipzeyOGSry8ZSVkfEpEZZDFFNmEPqjmAIlblGAh2FwlQTs0aGkMkabA3UttG4Q+HnUOMxAkfseaPQJZYbXG/l1iDFSkyV6sa1diCzUAeoRShZCQWqCHkAqXZ/gyirMPNKuiFQuZf2rPuZECrbKtSD99QNUCqMMSElkPaABH/+5liHAsNlMcPWN/SQZZOSYveqdEJwP0gjyyaXV/a+6oatdEJM5rSWlRDn41pB4VAS+9JxLx0IohR3VjQooS/EaNFi7ZU6GJqadh5pL5u2GQncYJZemNyYld4Qngafa9v41CDumuy7xpoYq5mLILaulvSH2gbT5q5UiJM5bOqIQECX54+vy3WcuYgHDtJxnkau0grpKtrbdRqaK5QepIZlQw9Z2pRiWwbO40jWSBcLJDXnFJDx6ICFWZFsN0WS3Zc8CEMsVCwQiVT2pSeE6s/VAg+qp3XL8frateKOP/lkMp8ESvo+aA0ix2fjzsLpZ2hLoIImWdKQzflrKRPaLanSYjv7bFXfbCx4y2sx566dhrij7o59yIp0Fhy0+dpQy2PeC/dG5OioiTtlp+fd3lvtvYuDFZozr8s50DlSK8fzl9AMbECr2vUlRVKFYIFnnM3e2txq13RM7dh8bAuAxeacrtk46dGZb07/bBu5itm8lAo5RTQpmny99r6pq5A1LIFdIZCCR9anKWNP32mwfW/oZvpY2ISJTix1nEKli4UpVKpED0jUnNTYgg5FRJasiUZ7fqWzidToBSlWIpUUKjFUO3dhCU4b5cHmsFKjBZgpFDo5blK9T7TzeOC8H7npeuYhU2ugiFJUbM63dZoFOmXoMudxZAiZbRjoukLpI+WsMDmCFGZD+uoA5TxDVMoo5JX4OjdEvqGJdg9sPWpo69AR4AKlt/tlDq0Fy0GUjUys4gV9ymhvxk+jUuYA1QLlMLszmo/3RVpq+UMU2P01O1CqmkXknwb+EMa0+K2q+j/8tR7vbtyNu3E37sbduBt342+V8cNBoFDV/xzrtfThhkDeKHktUNU9afeag5oHHyzqLSuo1QistXNEpyt0UtmEiU0c6aRQCAxdZpsq6h6qdga9Bo90oRqaoYZiaTVou+U7G99IqiENNVkE3W8r3U1lPg8LIhMy6BIu2c/qWaa7mhgfDQ5Xq6dqxKDoYGm+OFVHKhx5u1bCaNEiONR9DLSX6K4mJXWZszRyP+0Ya0dBqBoM/hX8OqFk865pRFwnIjbu063nJ2KptXpEa4zoGchDIypCOljkUjvQdTGUT5Qoyjoa+hU97MqW6yTXQK3tXCxaWUjOJxEhatB+PjOYuSYgKX3InKeR+3HPWRi5DiserPfkEtgCs3ToFKi1zSePmJ1Q3qLfPAibNHE/7lnJfOv6S7GbLZ7uDdEKGWoRyioYmtX5g1bjWIXZ+CJ508gkLFA4Kk6ItPRIdSixJpjO7Przxkif2lebByshZ0FDIJ8d0c+G9q2ipbmCKCFWe17+0/WFrvPUd4vEg8+9YBFe7RrCAdwcSeMLEtjZs8cZ/Q0d1AiSPUpPyr3uwBCOZOuWctBOoaXPV8Hn6/E+lF4WFCPQ0ia21s/SyCZMFA0MMdNFu5aaqhF1q3GqwmzrcSk+EDvHGCsB9VQqVlwSDC2g2j0srRDEH73o8jipHWZzTkY9Qho2jxrA4muhpcBO178Gu0csRNuTdIyjBu2Q7/lXWNIax4XZHocuSK2cFknAgkxItfepyMI1i7tMHA0KjX1AH0QonrJb5glHrpwKtfrzcZsjqo6mH8+p2b2FpO/nv5Dpxd+jJ39v93xBn22taVSIZuuPx7J5ZWCXOv1CkFQJXaX0hiL214aulS4cEfVwPN+GkC3p/pO05MIBq4ZmSbaiiYqtg7LGfglKiraHlATaK3XyVLcfLohS1bIBkUquAV0MHIt90wglGBp6r7P1vAkjndj/u1gIoRJiJahYjUynlF4dNXoJwY9H+2nrXNFkKHAMRgJPUkmxWGo92PFqL9S52Sqc6mKn2Tl5HGAVZgKV6rSMtiYkKIEKgyNQU6DOAWnFVH5PulDYxJF1HOx8YqHEiA5KAefksRSGGb3juC8sXL3T3+W49tGjjdModg2pskoz6zgzhEx0P+FBv+dxd44MBW0FAckyAI1OUX0xSj19aO9FV99v/LAcqB/q0GAEwvks2k15bhcQRVmlbDd7nSlDMmdlPj6YxrNIodCFzEpmaghswmQPXsXSRYAT/Jf8t7YJ7fBqIwvbOfkbG9I8H+Hy0gkpHr9/WXwn0J5k6J/uiU+v0fiI3UeHBZpfIG5xGFXaedgJlpUZAAJL+uxl7oEGqCtl6DOvdjc8iDvmEJk0MdaO8ziy7mamHJlFqVXQLJ6Dt2td0g+nz0JuZw1aSsGgfbF8dTDeVBs1CWmTLVtRhFU/L0akEAgoQ7BKiEfDjmfDhp14/9TQHOG2QAyixSteagfBOVhtdFK4F/Y8ijdchD33+z03fc84J/KYzGgmNePQyXLcpQJPLF12vztwEfeLUehCIfrGqSr2OMBSvMWIq40zc1qlEbIZ3dO50L5nmQ96JOprFObeU2b3LBWT12bw6Mzo5ZUsTmQZWHhRds+gj3lxXFKqTNHvW1RzIILlcqxy1DhvcbZUXi0Oe0dzBuJ4nN9htnR1f12NP+YVgFKFlD0FVKpXR1WSFFbBUuerONOlwj7YNYShoDlQ1xUkkDdCnECvT9Yu7bm78x10qbDsxPkkoRCiUpPD91Upa0tlNCJ0ex41QpfKUlARUEvhNWcxKtpZtZWl54V4UnXTzumQE7vcs48TFVmcMXPIzflvz36xIZw4M3pcO7cX2O21dZquPk6WE5vQyKunh/C10G0rq6eWdh0fcSsQsrkiNIJ+FdDzjulhv3DbSmd0Bcm6bEbgjn6V29976jAu5y9edMLRqXq/veXkM0tBjMB0YWk6DT4fqxf0uNPUnGqJ+r7HWm5igG6v9JcZDTBvwkLoP30epw6le3LLGl2uvepStDJv5NYcFTimGnEHrdkUX5vRX++kHFNGaSJ1hTnGI0+zEw/CnKYAnmqydNkqzPSh0MVKjJVawuIMamdpP50sIxra+b/PPGzPRv0CgxgogXjA5QGFOR5uI/2ZWBCu9JJ9jdu5NQ7UKmZiMDDCvv/2w1+qZEV9mz7em3U/sz90xpX0lLyl5pWEIHsDDeLBbGt4H96RcXMDolYVuQStHqTVZBymPmSGMLPyaMnsSiGKO8Je6f9yAL+k69re3K7pr2cK769puBNUVscbIAU2w8SD1Z4xJ5525ZjLbiWPevSap5qoGpg1cagdY03MJVCmADkgky3OWsRyyAo6h6PMQRHCJHQ7435YdRrLxt7tbQMc7wfG+wISjXw9KRJZqlWW61EIh4zuD8TtRMi9oRTJ8svIsWqoVW3FFJjOA/tPGcE47QJxgm5rVRIhq0dBfp8SrPt52XBWMi9Rzy70izOwDD0aBHAnLLJwcoDF8ISsC6F8Ib53ZmjzGWjyCetVNTEVdGX3N4iyzQNjjUbgxRbuQObV4Ya3+ns8UXGEg+M5+Yaj1Ry0xh2Kk1iJ/RTY5Z7LvOZxvLecc66Rw9QxjR16iMgsxL1VScURr67Cc/K3d8qigZ2aBevEHKgYnevikW4tAeZA3MbF2TD0wwx02lWrqgtyi8TZrmdZjCePwyqhjq+3AgSJ1dCiHvIJuVf8ecQJyMIu91znlZFVq1gU5ccPwcqNS45IkeN9OBi62Qio7b6nHQunL47CcFlYv3OgdpHxUbdUmaqTc6eHvUkaVOE6rzxynjiUjjlHmqSCFnF6jC7GvzjhNo0+r7Ie13wVShVmjcvPMi9Pd/Ny4szUNkdY+Iht3hcNiyNlJdPmMNZVpUzmtHc3vrHk42YbJ7g5DJz1E0nW9NElM9o8OAji1XxhNv5bQ6qXMvriG5Cv2fa67XgveRknf1v2oGByLnZM3/idRA4Qs3OUqgKJm/ZZbfbEDtMKFMZ7kTLA9ECOfCkvZrBnocuGF4qeIDLOUfNoYqHD3EKSWpn5if0+vV5ONnQviClDYPuxSFn58ZbXzS6Hao4RAjHVRQZFG3KtFtRUqQs/sHZCOhS33xzvT+OFifhmfxJQn6Br+D0WVVZPK90uGJ9o8MIRAXIgl0DNVtWNBAtG/RwOc2IqNneD359VzAtK3KDOmtyJ8+sec2KskYP29M6BOp3z+jIQUI8/jYvZJHDamggHk/6hCnMJ7EvPoXTkYhkAmW0eh8lsWZhvfx7MbgfPImzrwKF2VAIH7ZagYp4SmoPZoH30+2GVhnG0gDyFyk0ZmDVyU4Yl2I03kbQVuhuhf2H2rSaluwz27Pdyq3payu1tLLhMEG0tznq0vdWQMLsOn3caOKj5CkWD+QPFbIPJErmtnHwfnB2RbDIP1Wzx1xpfXweqQLxp+bKjjk8XKn3IDssrmaOnigrTfU9pAGMxxwlg1shYO8a5gzEuzpEUsTRT9MmYA+LVec0pi6OStsV0d4pLC5wZkS0dzKGaHlj0vnlc6baFIEI8lAVKVHGi7aaHT7xGOe9J+4pkNXQpsZx3HKuX1Sr5LLL7mHB4tYJA2hpysHoM52+70cttlzzOoifzuUc85kBVFQ6142Yc2B9MA8ecRV80jRjnCFRzloJrcDXJhTDVo2FVM9R1COSNV4qJEEol7WG8GWxji8rNYeDx4ZzJ03YpVPal4zquSFKZS0QPkbi3Z5JuhP5S6baO5ABpX6ldXBZh2itxG3h6OGMVM2NNXPZrNmFim3v2Y0e5ScStPdNuK/SX9rlu65ucI1nmIBrZ87Js6CQTRU1/JSdKCWgOZniLIDkQDkJ3LXTXNke6vZJcBqK/nJG5UNcJ0WQbuWt9NajZFl4lzGGJlKpXDlkZPYyvYBA4XkHjjliYodtB3Fe6G7sPT3ZnjCUxrhKHbY/cJNJemKMyz9GjViG2QEM8bXUhx02+shiIMKs9+7NAXgfm8w6p0N14EUYvHM4C01lkvmeVsCg8Hc8AOIsTLw5rDruesLNrrL2jdlMg7gwNqwnG+wF5bmXTaV99k42L81DVnKiKpQlKDZaC9qrI4JIj6QBxb0ZXIxweBVunokw1MXowkaSiUyB4ZaD2hdrbxrcgB7MuKdd0gGlM7OaOIMpQM6s4k+dE2Ft5fRxN38ZSnUrGzgcnxYesaGnryZ2IJgXQApbK4hC9TBYngEyG9omjSMvnqzuI647xfm9aUI40lMEKE+KhEoqSN8J0Ebj5lBhN4rz4ZmOOde2FtINua0GKOW0sRQVhhugbmDm5x/M1x+7oHFH1qO+FbzQnulGtWk9jYD6P3HyqmnTB87iQ4+Mo5DEgGUpvx2+b3+JAeCWbutNWO7j+RODwsGP9NC33KUxOOoaFjlH6wHQvLs88zI6eNFpEJwtyJVUhGZrVbVtloJh9KLLQIOLkiLzA1aHj2eGM+/2BszgtDryqbdRhts06HoS0tTlXBuEwJ27mgV21wLeTYkGQUx5akBQmcRkEJR7cdrt+Ugvg4lhRsfmedlDWcJgcVKiRuUTCIZD2Qpgw6YeXgvSQ45KimzVRa+BF2XBTVhQNtsfmxGFO1G1Cst2n6FIqtnfZ+pRoZPgX84asget5YLsb0H2iuwoMz62ifriqaDD0PcyGQLUgRbK6rIfPLUeGxgfRSeb29+46L0FpmBUtVgjSbEpRu5cGsETUZTEaSh8m2zNOZQzCfNShEoU4lvciyy+Nr6sDFbJ5wXmjxINBydrBWKIZwmLCi1Itim4O1u4NqyYgKlONjDVREX/AgZwD4eBGd/JUSav+QhY4Xk+8eSublwX2vvlYZP+66ZEMLwJxD7uPF+IhULvA+Zd1gRgl11uG8OqbzpjOLHWyeWcmzpb6KAPMZ8ZziQcljsX4UJ2w/3ixyoAsJr6Gwe1hVNI+I7NDFUGg9mzHni/sHrEtA2dxZBMnOik8zxsur9fMV71pI7kAW9yZHo9pdNi5p50tnDBXZK5+fyvhYOGmihCmYMbnQsnntng1AtW0eOLzZJv+prDdDbzdXyAOAVcVVmkghco6zVwdBsIh2ISdTddn9ULpbtrqsH/KcIyU00EJU+BmHHiWbCFWDdxLe+YSmcdE2EYrPZ5s8fZXSr+tRw0R34gQcxy2uV+cT4CKsD305pDs4pJqkFkIo+sPXdrG310X46gJlFVEz2zJxIM531PslmtpWjRxrORzRzYFiF6h5xFfo+OJsMD7AN2NXX/azfTXibSNXG7XTDkSpaLbxHAppJ2VrpcSDIUqZszKRim+KWrnkZSXUcedRX/dVjl/yyQTDq8G9q8MpJ1y7wsTcSyU2ZDXvIHxkTktcoi8OKypKlzHmcv9Cr1JDFeB2inTYOuvu/FzW0E+U/K5o48C/aXN/9oFiJYCmTUu0gGzBnOkxkjchiWFHyZMm2e0KHH/auDwqkWvQ4lczSuKCp1UDsXnxt6CpboyJyhtjwhoS6dZpaAy7ztuVsOyeQOUMdIdbB7Eg5LP7H7klW1UKpBGkBsLPkxHqSD5RM9J9RbkK3NZqrus6uxoP2Scb+kvWfqpWkloEG6+4ZzrTwXiwUq3peCVshbAhalS70UOD4X9GxlZFWJvG3OdA+UQqV20TfTKtYmemz2e18J8IbaGpNI9baV4x3kpuYKngZdAay5um3zNFb3lMIoqZd1RBqGcVxgK+iISRktDxINp9oHNW1HIY1p4T3iaz0gz9uDyw8zNq4ocIuOXI+dftk0v7QXRSjiUxaaMD1aLhuBwVem2xwCyOY2kQE2WAion0hJGwTA6BO4wLJmLfaUmYT4krg4D73QX3OsPJJ9/eY6LdliYzAavn1anR0T2c3R0fUNMynVZcTP37KeOeY7UKcLsqfgD9Dfq1b8NIYSaAmmspH11bq3pQtXOnL6sgf1sx4sHIe1tDa1eVNsDptN52rntDuzqQEF4d7rHVV4za6CTanvuoUMO0e30URQ5jo5szUd7fjmveD5u2M0d874zuzrB8ELpryvddaaso8nidDZvwiikfaXbZcJY3pPSzuvB6A6Yk9ttxd7XUNQ5MlVDBHelt2tC2JeOXANMwVCu2e7FkvHZtfVbTzIx9t1hzKis+Wrj6+pAiZrDVDaVemVGsiZlnBPX08B+7pj2Hf1kD0bUkJPy0JUuq0Wqp7pDU03kORniotjE9fJ8barNTQm1nUexBbR93bhWmuDmUzC9OhM2mUMaWD0OhFdHVIWdrghzNI/1ppCmI8SeLzpefFMgnyv9C6G/iQu8OJ8b5Ho4BEKOpF1eRO4A5BCOEOgBuhulu8nEvSEd9iYhjCv2+553dxdMJZmKrVSGULjOA/NVT7xKdB4xh9k2rNXzasJpu+KevW36MhdwhEtUYZws0gwBHRJSerTDnNZGHF2gU8ulliKUm47rYSCEowM1xuQO1Zr9bqD2lawm5BYPLc3mOX6E/SuRwyMxXtBBCMXSS4c5cTWuTqKjwHbqqVMk5RMytKcy0t6uc+HI9GEh2e5yz4u8YRNNIXdXzHniuiPugumkKMYV8BTLkv5QmB4k5nXg8NAU4dPexCjTttySfOiviiF6c6GmnroSpjOhrIXxgaNAByN0SrRye0JLa7FwA8JuotsOpH3kMCVCqOzm3tKVe0PHxkey8B3s5E0QUAMmPNjV4yZYhLCLlLVQB2H1wq5jug81KmknrJ9FIx5jkWz10mApQjgIL/YrphJZpcxuN5CuIt2VRdWlj3TXhgR2O+X6MzDfr8gspJ2R8S2VXYijVY6UKlzPLa9jfIuqtmHFg8uBNF5HwjV/lOmer6uopGLq9IeS2KSJ3dybw7RrKZDI+l3TkCqDbZLTRXAExhX2p8B46Lj2+buTDtlG0k7ori1C3bfUjae9wFMqc7UodS6EydaU1LrYBo1xcSpkdrX+YDpey7+AjBmZ5gXBIQakWJBW7224/kRkuq/06uvbfZyQlbgvhDGTdpE4BnDCdUzF+DQJNATKuhqaHCFuBI2BtDfkKq/MMQMIY3lPCrIJw0pRQ8UqSCl2vp4Ko9bj+Vezj1GVbjcQdgHNhuxqML8wjsf0kUYIB4FtQtu81SOKXnq35X1ldT5RzgL7uqK7iXQ35kxKFqI7nDUFth+NjK9YoB4nob+ppG0hlEpNgfogkV0XCWXRx8vro7PdyOAt4A6ub6ae4hvnxG7uF9L2PndGqs7H9GnTApRsG/Z1jhxyx+PpgrEmdrVnN/aMh44ymmK2OWumx5ZOEPCWsi2DBeQ1mqjkaYq7VqGoMJVIzpHOHbE42nHSPi+Ip5mO1RI8HJxXe5XXbEvPXCNnaWQ/u8J/S2E7IheyH3syiotWYT93RFnxYr9mN3YmhNpblmEpcpgrtQ+LtIMGc0RaAYSM8zEDA2gKSxaqJkP9u30y6svBUeZivkHW6HzcctLFwuZSQ7Tj1OgZStrZnJC5WgCk2DpWRaYT7aEPGF9fEjksRMKmFIxCqYGpRHZTh26TTXrXLQqToFOkiWIGUfa1J2lZWjjULMTK0nLFJrqY3oNDxuKaFTR4dlDnONlDme8Vi9y6QnHeSN9nRJTd/Y75PJlBc5HJMBfqkJjuRfZvZOcrRaazQJjcux6UoqZwnvaB4YkQssHN6cqhSxcESzvobypxNxH28xLBajQjUnPg6mBVDVONzNVQibFYfrm7tui4v1LSHtbPMt3lTMgVGQtSbYKgCrks6QEpFSZP7PedGW+/ZzTtIMEF+VhQDdRezzn6qVrElmNERCnFIFRWlSpQ8pHnUjtx0jeG+t1zUcydHT+OcNibYRpz4pA7rroVhzlBtWfXgvsweio46xKBqosHllWgdrDPHVfziiQX1q6jRrjpiDeGjpVVtUgomf5QXkfK0BCswHQW2L8a2H1MKZtqKOecWD8Vc6qipYP6S5x8rhweBhfkNCMxnxtptfZCHUzPKseWVjg6qsY/qZ6WgDoH5pDYz51FaW4MNSghGGekzWmwayCdIHx+6FZxVwYrjjAtMuOfZVHms8DwwsTsSm+BjaXRbdM+HDpKCexjR77p2FwZmhUnQ2fSHlP23TtPI5mxn89sLdYuQK1mrIrB6td5WKp2cnOg3EC3lJtpaBlvsqg4qdyueRo7nh/W7FNH7iOHbOnN7sacLhBWz+w+7l8z56GlAcKCJgfyHNnrsLTTiHtDLnqPUNMusHqqJ7wR9Y0tm+O0BCUFSjmmkEWOcGNuZBMT7LVnHg2VKgXmfEyB1WBOSJfI9wcOrzQ1fOeABbz1RSXuZ8J+ZgBW986QfaS0e6egOZhQbJWFc4dYNSheZNFaF8XxiKThqJOmsNiNJfhSXdAxgzw9aj9F3wBqpbvakHaROhsSYm1HZJmTNnfNbutBKK0KsLbUmSCToJijMPdGKNR1NQcVfL7ZRdcUGF/pObzqyLbTLcKsdNeTBStdRENyzpMLMs7mBOS1LGtmsVe+yTc9peb41hrINbCde+P+TKZeT2jUBzvedGZOu6V7hUNOvLl7wCZtWMeZuUTj6LjzRXWtJP9MS2m1ACdO5ozks8h0/+REBbQGAxWcv7VoecEt7ndLF6tAFwtdKNyUFZd5zbNpw1SPqb2bw2CpcT9OKKY118SZzdmr6BzYjj37ObEfe6axs/W8sr0yD0JcGbK7FPqkxmXy5z77/Jubiq2to9Kz6EHNCXavt8xGdD7esUqqqjAT2TD5dGwB1cm98O8MpS5pwxYALeBFPh7zg8bXn0QONkHK8Sa2UaulAhZugTtb1nNLYKhcdCMAWSOJcvvYlUXszhynkxnTjIo7UdUjWxUWgqPOgRLjEj0sNz7VZQFx8rA1CONFgPUE2ZAM6yXWFH0bXGyifWCbo/GvhLJSQjk+3AUan/OtSBW1vPhh6jj0sxERc6IqzCUSd+Y8pZ1tYv11YXg2WmpO3eidGDozhifOVGkTJsA0L+TYJtaHtOo5jmmQocIspFSYp2QtCFTQUGwfqEIIigyFohY14KrcpYl0JmE+h9of26WAoTRldhKnil1ntZQVUdFNQa/jsTJusrRZmOy+5T4av2cj5I0ZrBfTbSg2bo/cAE0KQzVUCCjrYEKuQei23j7gHuRHlh7RoExfXhOnwP5VO+fw+ETNPQb2r9nzhUbkVjQJRdRUmkWNN6THOakujNc2AymgWcghsp86DywslaURUmdpGi0NmhbbPIeTNdFQKHE5DRcDbBupJqVEnP8XmTfBzts3thbEzNUlIwCZw7Gir+gS8KSD8SvimMjOAdGolJWlsQnBUj1qa8siRiN4TsU4G+JOW8AqAttm1oQfNepyTXmf2G56JnfOd2Nv3IaDWmUVrcsAC89sQQbaHJ89zVWFEahFSF6pmHaVbptJ+8jm3UzaZ+tjKRDnStxORwRmmpeonloRlaOIZi6Lg6HAkr8VQVI4rsX291JtTabI+KBbnhVeXFNpQaIHReNErJXVk5607ckGDJmjWQREEQ9+WgVSTXh3BQvi4mxolng6X+bWrI6jDakcNxVVc7Ka49TOvzlWgORC2s2EebDiAj9/XVTP/bpcsTu3CtOTjU5awFYg7IUyeDB9UrG3HLeLzBcd+1eiyaJUWCpp1Z6L9glNQl4HpgtLN9fOgraeY6UccpQNyWv19RstwOjEHGGxcv79YSAGZXvorf2MGq8riNk3qcG4NgW0GHL17LDh0CUeDPsjify0958jX8Z9asTqulQX5nVgvB+Y7h3vAWJ2JaDL3tVUymu0famOYVFBF09/dcGq1S7zmncPFzw9nC0BzeWkHA6GJNWTYpgGVjRunLVCEaYcyblf5GCIShoy+aynrAN5DlS3b9aZwu2BF3EBUCri80yjB17Zbo9Ue16HR44e+vylr6xiJp10KamY8Da4o+bASNvHjX/qFwCeNj+Zw/UlB+V9Rvia77gbd+Nu3I27cTfuxt24G7fG1x2B0qDoUKkx0PhJLZboYmGXdCm9XkhkLfXWVe53lqxvAmZdsLRN7c0DrhGLtlr03SA7j1haKgMaHwWKmKCalqPXvuiCqEOrreSzQY9BvOEuMDlKFDhWPqkdM+5dhylj6bRcj6XUAapYG4Wyguk8UDc9Md/2fKWycDVukqU95hLJOVA8z50Oli/vrwvdlcH6t1J2pzD7abTY+Brq5+wwfBNmU08rLKKenVLWlfOHOw77ns0wc+3pOhGl6wpdLORkEcRuNyC76GgEt8iarfLM2nW052Nois5WlRJjRlWYcmKeEqEr1JJIO1l4Kt11ceIhtJJoxL6rDHC5M/RpLnEhvMdD42QBjZN0GvnWY5pk/RTyOjHdj8aVPc+GhgQTAAXQZxZFiSoEq4TSYOXzITjI55EyCnm2Chl15Emj83QGWVITKJAtdTrPkf7AgrBItVYTu90Ah0icoGTXK2ytL8DKjtVShaXzfl6bwHxmxRx1VSEo072OfBYNEew4ImPVUnlaXRJEDQ1oa8vkI1gQSyl+X3fBI1PxiNwiT/HUb63e6T0ruQaejxu2Y780xF56Vamhyi19Ys/GdcSyMI4dtQtcYWnfC79Hes9EeEvvkhOtW085VpCpGGeyrgOqlSLBqnW8ai/MlbjPSBlI20y8mYhRnJCdkV2r3Y6GItV6izjuC8sQqWr/LsJyqkgpLqKpx2g3RigFrRU5jMfDRBdn9QzmYjRVkWwE9XgoxJ1QY7A+hcnz7wEUtYi9ccS9/9jCS+09XadHGYtlhLDwQhaULIbbKY4TdJui9nqjCdTj3L+te8di5GvC5mLUI/et9erD9o3FKHsKsCYWLTWpVuyTN6ZDRrXUUpuXGoU6JMo6UVaGPs33DCmydja29opXnRKMlkAV4xSKiS+X/ngOxdGk/Wjtf6axW1DXlqLUzqo4y2ApTB0D+9Haj1WVpS2StGnTkMKW5a1YQdHJqEmYzhuCZjzFRRhVZamKRh1dV+OWNru4ZFDEzq8hNk/Gc54ezrgcV8zFqwK9EIGo6KqYSaqRkIWQ8OyEp+GKGBG+BmKspK5QU6XrCvN5pqbe0vFDOKKh4lv1krJ3Ha5lPRhpLo5uXwvgrbCsGlWX+ZVCcYpGoBCI9OQamUuATpf5cquYx3W/2vcu2k/v02/z/cbXlwMlWOf2TSZv4tIUcZ4TshpZ9TMvzgqaogn7VdcIWmd0isRk6qIFyz0DBKyZqsKSBmm8nVsckDanoosGqvXXKytvCLwypyx1heksk9e9KTxPyfklJ9eRBLA8dDxA2Mbl+EZkr8imbSAQ977puTpwuxfaCsCqzabDI+Hw2op1UWQ8qmaHGWQM1D4wTunI1WyOS+NljEq3PaoQ3z7pk9+PKqPv+V0H20jLyngGRY2vVLO9rw5KuJg5GyZyjow5UnJ0BwpqLUhSzgYTOB3HjnqRqYdI7ZJxIJyDIgr9Vft+e06mgaSWEi3itCw1VV2gzpGwjfSXZpBWL9QcxhPCn8yV7qaQV8JBxEjCwWQVYqh0oS4VfGGyh6aKSWF4F/XGP4hjYboXnUAvyGWi5OCSC0aurZ4CG+/HZSNrPJV4cGc0CVU8hVNZ9JyqC2qqgKygdDif7kQ53p918P5y3a4iOTLPiXJjRPgwmuOj55n1+UieI6qCrGbrX+VYu0ZhPo/MF0q9yIShEFNlPu8s5bkW6qqY7EebG8G4G7XeXgdgqR+rgjrVZWnrwFLLYWrrJixrRFXYzj3buWcukZuxZ3fol+ojSXYfSgdajYdijUvbPbN0j6rpNo0YCTc4l6WJqFrvsqMmmwZPkzar/fIlnfg/LYAzw10J44x2EUlhCaSWQKSRx0/TWC11lxvhut5OC9SKhABzNofJ+VBaK1RFy0x3XRCNlGS8NGm0hhN/glIhKfGQSQejJNQsx5RIb8RvnQK6UTRbDuO0110ZIK8DtU+Wylj6crqIYTJHXGojH3q02CZomyuturCq0xDCkf6wpGp8DnTG5Vs6EHTV8jTB14h3cWgbf+0w79E1f0zKwQMuD5BDVqt4dr2wVqRROyFvEvO9RF5Zai2vrfF07W1DLYN9d5jse8yBsnMhHoMfUUVmC15zDRaUBWVYzWxXvXP0judYozn9tcM4nCd80BS8JbmYZ9zsQ6OZLDIL0nr02X6V18f7cprWbms0xbqkq4KTvk8rKxdHGAssqwYOxSrhT50ngDRkigudClCGytgnwijUS9PJms9sPoSgxFgWjb0+ZVKs7PvB1p4HcHF05fig3i/Vz+nUV2zaYrm4JpMc53wLIoKtC6qpqbc+gFS8P2E5TmUHZ5rIdNNOCw088XvcvuL9mpa/PL6+CBTYImlDjCCd89EhSutM7TsvQ/dIEwirTOrMwxxkogZhrpF1tERsa0Jce4t6F+/9lDkHR8OjR87NKTn6fHPgqgTyuuO8y6YFMsrScLj2Qp2CydofCsN1JY7GfwrzsZ2CVUzIIlonCqSAttJ2PxUcEWsR1uFBJO16uivMiVIl7g3tyn1k7lK7dbaxOFGwVRWEfT4S8ZqTdErw9N+Xn5OqIUpBu8h0HqnnhXQ2k0UpvUF2Tdm85sDz6w3T1cDYF9Oe8qhw71FIjEpKxcrsRzcmK5enqCxRkZEjWVpu4E5n2Afm1FNypB8ym9VonIQpkK6PlSVxPFYXAmity4YdJ+NJ1Wpk922xyCiKLpUYzVkjszhP0QP/0gvjw47u2uQnpqeB+QJUvJKrKWMHI4vuXg/MZ8LqmT3gpqRfo2/6i4PhFXQn6BNgUV3GqrrGYvydbATa0oXlfONYCXNk3HeEnZUqm4xBJfSFVT+jXTZ1+jmyWs2M4EUVRsisg9KdW8f2GCpjODNyeQc6VLQT28Q6gX1Y+IFtwwuzGWYT5mtK8LoQPU1LB6++PFYSgRnzkgMv9qtFA2eaE3mKDC51IsX1lwYj5EpxmYZ1tfvVqaEVKpQsph81uWPrPLGGclTfvDQA53b9oQh5rwsPiOxtOLxNjqgsEWrtxAoTvPGtNDRmcRbq0XFqPKiiVlkH9jeLLo4IcFuKuaA5Q84nwo9HhyRMxdDOVaEekqFrvpnXKGifFidNprxsKkvwmNTa2wjIWUb3CU1KviiYV2C2abpvEibD/YEwFUII5khhdqsp1S+bXBSEZNfervnE3qAKXbLigRbMBsyJXzhLuqA11t7gaAPERQ8XpArQrtJ4+U201NaZLgT4/tLQhN1HorUBS1allwdB7ycOD4Jt4memcl/PHbmuQp5cC+rKiOAxWCQexkAdbFNuaz5uHe0aDNVOoZJiZbfOSFBqjDZfNCCp2TqT9ahFFoDjteGGt9I9dkfrvEh/AH6vPFJIYXm2LQiNexPqDY1b6JXq4shW8DZeoVX45nrcH5LtHblJiDgvsTZwQqw5cKmBCYhR6btMOFOu+xVln5DSwc6KT5r3U4owjT0hKOt7M2f9xJPqLVS8Ar5pxQFHkWHl6LyL2Bw7yZKYHXX0qdnO1OacMHjbqyiVA51rJhYLvn0+2Rf6R5auGNZsW6OYv1DeJ9j5gPH1d6DmgFZxFn81mYIcuDkMy1tqZ6XUYTSHaHU2WcrDlYdXYXZl6Z5Zj5UzVCO8Lmq5XtWz/Ai3nJrV82rRZQ2MrypczESvJpMIq37mMHVUj5BCNPgxZCWMlahHgmqbBHBMPzQdoFaBUVMgNoZ/8IcvgKM3Uo4LFFiqAbobd9y6QOVEOEjNoem2tml0u0rYzVZifFrF0CbhqYE+qZjR9rvoQiyUrjKsZtuYMGMdVEnXQtaO6RBJ15FyFhxyPR56ngIzEDeZlAqvvnHJ5fXaVowb9uByDnFU5jO5df/EqzzmZB32xmwCi6perjyJ96RzQbVDPoFe7RpFoyv3Qp0Dk9h9C8Es76aV9h7UUmmtxETsPs3nbdONHF5Npov0sC4VnvZsj/NLI8wXZqzD5Pes02Pl25kZYHFhPrJvEIot7gJp29AlKzNuxPBaQUlWvel6YlJ6dPJKlEmWiL7rC1NOdNEdKRUOh84iLBWrpil+DWIR4lxced0J1rRNTfzcqq3b6lWDcZJFjiJMTVX4+PzymVLOK/FgzyCNStoXwm5Gu2DiplPk6npjt8+fWZ3ikqKsnQnZ5o0uTnXtXPrBSbZMgdwqsUpARiPrxkMltChfzIbU3p+HmxnJpouFR6vS0hRONl2yIK3gIQa0T/ZsZpMukHFa1thS2apqaTgRW1Mx2E8IQERX/XH5dhHtInG7R6fZJ1L1z0Vk1VOHSLko0FfXfbJKxLkI+TwSSkf0oKquugXhab3mUJBojkedIqtXjAIxHZKRtv0Zh9GQ9sN1RxwTaZuI+2zVxl2grJIFh40AHM25k2pk6SOJ3om4taJDz3yevLim9UU0A9DOUTbZ0M3SHefcy0in+LpcKcPZRMmB+br3goAmMGnPIe5muiRIjY5qKdN9sxllNs2rsrKq2Hpe6C4sWtJqxQx1l6ijIA31TmbgpYp3SqjmQO0DeR+Z+m6pOg5BTZi3q8smvZT9t59JqFNkdxjIJZoEQjxRI2/2JKinMM3uB3dSjUR+nJtxtP2etg+1FjQLwboFAi1I9gKiVqqf25+F5+OGy/2Kw+gBcyPJ73rqHKiDned6mEipoL1Q1jbhLPUp5DlR54AeIlqEq2g9+TSbrVyKvPy5SrODGZr8xUItuRV4QB0gr5S6MuSSgGkpVgt85qVy0AGZlhZUCwZb4Zq6f1aXNPGxvZEtpNtBzlcbX18dqALpMlJXapox1xnJES2Bw763Sq4pEDpdSkzroKz6mZwKQypswsR5PACmRN4am5ZNtSqkwSJ5YIFCgePmrCDeUHH7MYOXlzYD3mJCcyCNppD+ysWWLz9YsXrqxsmPF2dTHrZWHe37g1c7WNQVihzztvjDErEqkI2a9hEQiyCj0F/D6slMd3kw6YH9CGoClnljkfCSLG4VRVnobpT+ptK/mC3qfT/V41MH6uRvWqoZbVVUK7If6fYVHe2Z6C7RbeuCoqW9o0nVVIRllGPlDIAEtIlZDkK/yTx55x7MgXW+zZcpvTkqTV25wdVxVuLBIsWSAlqUOXfW9NdtdCgsG7McxqX6TZxjopuePJiMgewSZbQNpbgcRjzYfbXqGLPczWjlNUwurTA+gtUTW2xxFPIrs2n7faEjr12vyPV5GhLYrid3xuvQoRLPMmUK6D4St9Eq2Rxp0WAOjYmeVpgzUnuPGoXWADtkUymPu2xaUg3NVa8e6o+w+ZQT86Gn6wr3L3ZcXm+ohwBVFv5XKYG5RA77nrVX0cVRlhSJFCHuTIyyrILrGdqGFUcrXY5TJU7BSuxd7yafWzAyT0J+bshZTZ7m83Wps1XkLAsqVWuf4mr0WY/dB5pDXQpWweMErTBDaSbMnax0cDG+2QRfZ0ecpvt1MajLZubl4irmiGp0YT+1ZxKnSjxkQzHWAWQ4iu4dZuLucJQf8EpWLZ6KixEdOnOmglg6KwXKpkeTVdiWlaX+N9sRbm5u8y60Qt9xeJTMydtHowuMkFdms/IqEMZI33dQK2XTmdzDuiLrTOgsPRSTVcZ25wemKdH3mfUwcdMNfuqBUaEOke7GnkE/CMNz0NEzAUlQT6E0G7ak/ovaBn/S0gWgng/sX43ki4r2lbKXWwEuWCVpWFWmXfJq69v2Gtzkdcrm/p4HZ3sOc+JFDkBnPMQBVALdTSCN2Uv0jQNHVBOEvBGSN7CvEbRXZGnGDarF+DshWdurpKy6jPQVHXRJkzWh3jBjosNTQKJagFeEcJ2o6+JOqQtqupCkqYULcx8ZOxO63JeOKMeqOZuguIyD8VIN3Q2L0KMUCzDmi2Pg0q45xsomzaRofSUXtLLxeluaNbj9D7BOM1GUy/1q6WjRRp4T9apDqqG8YzXuV/F9W5PtYxrMESrbZEHXbNc/XQ1cejVj2kPaWqDf9sTGR1pa8py0MTrNkpRBmM+Uuq6mFdbuU1/tvvfFmpO71h8Y0LKPnaX0oi786FbJumjMJUGKBTnmhJrD9R4+4/uMr+lAicgngd8OfBQ77G9W1V8vIo+A/xD4DPB54B9U1edf9VjYJiTqJc97l0r3klBUTBlajuSwOlRUheiQ5E0ZWIWZIcxs4mhE8j6jh5WVdu98kZ+QiY9JTRboN5/J4hRMgzJ88oZH5zseX57DTSJMVv4+zgmZA/O50l3LgqDIXG2zHYRyLxM3mXKIlKEzLkFk6bE3nwvDlacBhsh4LzDfL5aGKAI7N/5bpb+cCDuPbJ2kGWYWDaFGpK1Jj+KhoytmXx5ul3+GcJyEwC2xOyOPOKZ6QgadZlOr3SdKgLALJv3vaZrjpmbppgK3uBRajjlkVeH68Tnd+cR81dNdmxJtHM0wTOeR7UqYHhqykbZ+7GobfOoMp63rilY1x6zTIxkVXNk6Qp5sg/bNK28i04WTgfdCHfw2uBMZp6O4nEZvL9JKf3u1lGJQ0k3g8sfNBIfm2SUkmaNVnI8jRRbjkNwx0xAMal4XwqrQDzOTJMpouiWmzus8qNQgebwliD2nJVXhc9f4DMaPKysrD1ZOmldPgakMzPuOB49ueLDZ8+TqjP32jNgVdJWp150ptjunaTx01gpmdIdhMt0dxM5x6RdV2toJHkmqCxPa3Fp0iQ7eKzBV6isTh+2ABmG4iqRtD+LpcEfEjlyi4JpTag58PBGZnUw8NMz2HFFfuxXqIEvKzSQXXIyv2HM8vOLcmddGQzxTsZZHYyQekvW6i6b8XKNN4uCqyjJXZMrUHnavpQVtjpPS7Xo2QLg+WMk/QG4KlwG6BENP7ZNFyl1EY2C61xma7DyWmoTh8Yr4bsSIhqDFvOrGnQpbs4ndVmjl7RosPVV7QVcdFGW6n5gvKropPHiwW0jKXSvoGHv+to+8C8A7u3PkzDbucU7stEfFCNkq5kGGOZKiZQra/t508I5G/QTVPrUrQFl37F8T5MHEZjOx1Q3rL3VLWyOZTdG7W8/GuSsBnMPa+qyFAjpDySaLct5N9LEQHsI2rU3HbG1ozHyWQLH7cE/RM699Vwv20qgmr9Hb/QtRl3RVrUKe0iItIs6VjKmSN9nMZkg2tyddkOgm34IojNF0C7uwIMxhlqPy9Y2l+ksfKMm0Ane5Y93NNi+DnX+71yYI3UR+BRkbMm1rrvQwX5gqd6OvdH3mQbdn081mc1ZeLFBkSRGbtleAXKk9nHcjYzEyfJmj6S4CiFKKFYQYYijUQ2T2djPWe9Y5ozOEg6Cz3wPngNZ9YFx1Ju7qtrG7Ln5Okdas2/jCHjzO2Z3xarZ9cF2v83KUr2h9EmfLPMWzwv20ZxMma6QslfN44FA77g0H3tpko96U4Cl8ljR4iBY8aTyie3afjly1DxofBoHKwP9OVf97EbkA/oyI/GHglwP/lar+WhH5l4B/CfgXv9qBFJu4S0WbmlcYotL1GVzZKRwsxZfPjG8xzonx0LFaT7x5eMBNGZbGpmNNDF3msFaSNyZsOfRFh+UUjfONsgyw/ZRrnmRherLhK9t+cbbSAV5cbTg/OxAejYR3NrTUU5gqYczWo+qh0N8b6fvMtqzMMHolR8i2P7Q2EHkdqZ3xZEg+a5pgnKfvwnZE9iPapcU4NZ5Q7Y7RRFAz+lKh2xfSdiZs98eKu1Vv0Gepx3Te8iDMyGnORmDNx9clGwk9jGboDYK2ay5DtGjj5B6L6i1Sq4hapOqQ8Sc/9YR3L88Xx6d2sngEeS0cXqtL9Y1edZTBFmvaYZD8AaZqzkhdVegqZW2InEUtHZLPSVcH5GBqztpFxgeJvGlEcVk0xdRFGOPo6uXbjLrWlvEFrJ0L3sl9vqdsPtch2jGfK+XjM/1mYro3WOpqVZ2vEem3Srd15XftzfDlQJ2VHONRpdhRtCX68kiozV31fPypFtNpWkNyPRYtnCBy9JX+bGI1zGz3A9c3a155cEO6V3n76X3qdXfUTqmW0olrC1DMcIurojvcPvkGMHPCSTHSpbXQMG5Y29StEbIhsWWMdKtsiuQ1MD4PdNeJOFbrjXgwyLHxErS3/yfvBSgbU69vQp7djWmmzffavbO5GUZHRxaEAFMyrhgyPBhCLFGpU2S+7qzVxiT+WdCihNEcKVE1cdZq874pIu9fb6kQSAd37PZrhgrheodoBFVDn8SUvi1FFyDY77UL1N74IjWZ6nXprDositkraQ5JKeg4MrzIpBtDvxtXMO3dZpwUGtTz3hoJnxW6zUyKlY+cX7Odey76kVwDq4sX/OCzV02MN5tNVRUj309WpLAcr2nkYI5kEDkGKMJiAwniKc0MuaDzDFWRGJbjhKBshonDRUfI3fG4WVDfkLudaZPVlT/Hk6rOkIV6COx3Azfr3ihTJaCti5LbntoZqjdvTBzXzj24+rTzgVy/yFp7CXm2AhitsjQoN0XvwG7uzKxFXYo+pJjoYtoagl7WcaFiiKf2aVzGejy/1vakvwRNwkSiripvbe9x3k+3kwOiy/vTwYtnYjCu51xI+8j6aaD0kbJhcbpqp3TBWpjs5o6cowV4Kq4j5Vpki2ile7LA82nNNCbqISFjOO6Zwe6/BjeJrcnzGH29uvM0Q3djrZ36F5YmNb2xYFypoRiCPrtoJWG5RwsiWdXO6TDa3p0zcn6GrgfKWp07ileTHjX0pEI9N3HpnfQElNGRhsfTOTfTQOPgLZzkFqQfXLTYU/PhkD0DYBmNrzW+pgOlqm8Bb/n/r0Xk+4A3gF8I/Gx/238A/BG+hgO1yBFk89g1GJQYQiXGinbFHCgv+497Ky3dX65Y3z8gYm05drlnFTOP+i37Yg1mG4ncKihOHgpHY6vt5vmNP/+rJk8/X0D59hvO1hMvnp3R3ZhXW0rg+Tv3WL3ZQfU+VO3zMbD72MDhFei6Yorch3hs9aBKmGyilhVMF6aamkZL+8kUjoTcZVFjzk6pCHlBjKTY4rcWFbIQ6WyDsQ0n7CZTFG+lw63EuBk1OOE91UWsr47jLQRKs0n9L86EoxGabueKNTbI6Wjs2r2uSalrSH3mS2++Qnye6Gah25nMAjiXzCvb0hsH5kMCOjNKnZB2LnSoNnHyGcydIOeFsk6WmhlM/LGmFasnkeFdg7DqOjGdm5OcdpDHI3Jo5cxWqRVutTSwiDg48bmhHZu3lN3HzAkOk7D5bM/uW2DTDF7ASsS94ao5ar4JzEIgUCvMKkZyPpjjq4GFf7Cg97VtipYKMRXiAMWF+Yo5KRSDwEuOdN77kQqyi0zjimnV8epr15z1E28+eUCdA8NmRu9hTpTzAOLKJCKCc2FqZ86IteyQY/WO8/lao26LqNV6RSUj1JuzpxYNjlAO0XjBYo5/WXkUnatX5rWN2NcIds/TeGx0G31DD7OhB3iqkxMjmHaypOFDNnTVetIdna9ahTIZJ4OhLgUPBPEqTFmqtqTIkdPl6ydMraGwOuoAVEvV6xDh5mT3O5ELMWkQjG/iqYk4Ri8osI06OJdjKeiIcfm/3mzpn4+kXU8+00X2Y0ErRysokLkwf+SM6UKQTWa9nnhls+XJ7oz7w4ExW3ulz794xDc8fEaQyg88e3Xh3YS+OEImi51eSMezpTHjWJy4G/y+laXVhUyz261iNkgrWqOR0WcoU+QwJ9tI8zEz0opSKNEQwK4tRJbKvbY2pFh66MVqzTwlhtVs9lMNxUOd9tEZ8owosjeHSIO9NlcLVMSDgzIFk2J2B0omC3DCJMxj5Hq3omw7Gsm8IbHpUBheKIjPAW8IboGuUHsnjBeQ2fak1u5k9VxB7HvyJvD0xTk8uKE4otOGyd74PJkrgdOOEsr68USNA9ODlm5nCaqejGc8vTxj3vbE5oTOjl63VGsrMlLhZh6sbdY2Id78PTjivFREDhj601mroDIHGIOnMsV6823tWZx9pVoXBueRVpdrqa2KsRqP00RuZQkmb41ptsBjs6KerwgTHtC3PUfaVCUUmLeJt8b7nMWJWU2NvajwxetHvPX0PnXXOpxYkCbVeyTeWN/ZMNseKqPPZT0GAV9tfO13nAwR+QzwE4H/FviIO1fNyXr9h3Ksu3E37sbduBt3427cjb9Vx4cmkYvIOfD7gH9eVa/kFub4VT/3K4BfAdBdPLyNWDShNBc3bH29wiikG6j3ARW6zcxh29OtMrvcM5ZEEmtS+2Jck3NcCONh9nSIspCbxSNGcY+7pdfGR4YUxFEI33/G1b0N3J8pg0cXY4RUGR9V7n+/Nd+0Cp9q5f4XgXxeuehnnj49RyZvmjvpUpnX9DcW7vdsfbTiLlBW1VM6x8hvEeSrdUGGFug5NKkE88C7rUX9aZuRw2zoUeM9+XE0ez7ZieIL2lTVuBaN/HpC/tQoi26OESbVdTiOkYIm5/+0SMvPqXZqJaHBKn8+86nHlE8EvvzuA6YvrMhDMmHDE0LvPCb69Qx1deSuget0+DmMXoGXKuWVkVEHKxroxXg5dHRXHVKVsjKEynqH6YLcqYtl1s75OmPrD3hyDdlFSXeKvIDtG4a6NZ2l8ZEiN/EWD6tF7CZL0NKajlQ6B6MKJxGuF2Z5S5b23uBVRcYDqMbTano2iLdJsXRJmI3UuSCXAPdmXn10Qy6BZy/OeMYZn3z9OTFUvvT4IXVrVZDGpVNqDpzf23M4dEZKXVv6ua3J6oiEtcuR5bm0RrwyZUcPjBMWii6pR4RjleDc1qGVmpv0AS6a6uhwaMjzSdq9/WBcBdp68tRmyGYfFnG8LNZQepqtKbWn2lAlj9YTLuyPorgtMgduC3VmkAitnDos0gpyLAhJjtgFMbS3yRF4BY+E4PfHLyBAnE0GQUNH6E38USOEfUYPo1XiRTuO+pqNV3vS/gIwNE19eTdUI8yGJpc+UNZYCb0Kj7dnfMODZzw9nPHa+obn44ZvfPiUH3z2Ktt9z/xiRbwxykOchdW7VsQwXBp6118XK0qZrb2L1Gqp7ijLv4ukQeNWhmB2BqAUj+xpMPIyL+yBstAXmh1och6wAFHLVGhSB/MckeC9NpPLn4ihjy2zMN1z8Us15LyOcUFA8+aIbrW9QB0FWSrnPC0+TwnZWwWeNcY2nmDYZ/qt3Y+8PopYLvtNa2LfbIqnx8GKfRbB3CDsr3t26x6tgVt6g74mahSiN7I+lc6I25nhMpG2gXxm31EjqApfvrnPfDUQdtF4hKNVwoa5Li15WjNoUXh3e87VdkXYGhJolb0sSHVZQU0VGSoS7YJkKKgodd8ZiquWEWnIrWVMDLGNh0AeqqfQjihYk/pZsgN9QNeDp4OzFWLECLUSD7K0mLJ7rUdag4IcAo8P5+y7kUPuOBRr9Pz46px81S98NEMJ9cjZ3GW/JxU5nGRxVL288auPD+VAiUiHOU+/U1V/v//5HRH5mKq+JSIfA959v8+q6m8GfjPA+mOftEKOZhST8V1itFJOhaOuQ7CU2VQwMmRXGYZ50aqY1ETMiloZ/fnHbtj91XvM9ywP7XvOEe5rq9APn/Zw/heMQzA+gOc/tpJePTgk7IKPUQlPOs6+ZL3R4uTHrObojA+FclasykystDXOVkWWB1nEPK1hsBGo+8t5Uc41/SqsPDwZIb1uVgRVe5C+WJqGSJjMbrW0iEHsmHBmE+ILwY2ZHAm6QaA6FttKrEs1RydnF/CroBUZhqU6aCnp7gSSLPwljcb9yQTjCyk0Yi9g0Pq68PFHl9zvD3zvFz9OnaIbDRbraB29geuOcR/ZnEC55lAoJcpC+DNCgpL6wjSYFlLT+2rpBanFDIU7JC312vhXWkB657J5xY40Y75srOYAzefC/R8sBhGvzeBN94UX32EbR42yXIt1SLdrry9Dv6cbB7b5WVWaw9q+KbZ5D2bgmlBrg+bjqIRdg5hxaYKjkeaq48nhPsODA9/+ibdZxZnve/cjjGPHajXDPZiv+4V4+5HXLhnnZCnotVfmraEOxutSryIFcyCXfnILf0vdATx2im8FIPRubC+T86h0UeAPs4lsWkrMn3XE+kIqlN42prxhmW946noJjgB8/qhykgovx7Sscyw0sZBOCeoyFLII+TZ71OZd7ey5lFUkrqy8vqXNTP3Y5klNxm26paWmFdNXUsjF7ZCtRQ0QxkzI5siGqKY/tvTCs5yJVg9CsgU8aWcbp2RF+ybI6JyfIaKr7uj8Cay6zFk/8cWrh3zi4gVX04r7/Z6/9M5HyZ4mCrtA2lnwZikub2h+bSmi1ZMDsp9t02spDakwg6R4Iorq193mfIzmRLZATSB2Lm6ojVvlNmywClU5uBCxAF4d2ZxVq2S29SZzoJZIOUSzzcPRYVdPN4eKBZb7CPdn9BBJO1ubeQ3zmW/CXoUXu2p2rpq51GDVv6gRxBdnyDl+GoxjGQ+VPsB805wzFyM9U/J5MVkMTIA4AyHBNAtxFpP2cH7fuI3s972lxBrfEbtHpTdOVyhqRUudzStN7U0tEDk6cLWKSQI5J3AJXk5TZAG0T9QhUaPy/PKMfEikSVxE+Egp0QjTWqkPZ7ohE6LRbQ6lt7RnCyiCrfHFfrcA56TQtvSNBmKSOMu68/MrQ0TurzGBc7FCDFWbh+5QalJbXl64JOpbm1jT+CDKPnfMJbKfO0uNcvu+LpI5LUUsgrjo67KHnq7przI+TBWeAP8P4PtU9dedvPSfAr8M+LX+73/yNb8Nljy3tVQJSxSv6tVb0W5sPCj1TBYp+o+/+oKiwm7uKf7eXKMpkbvqab2fiV/pFyfHDvzS5HE+R9wrV58JSwQ6PI5MZU3/xpbDUCFE0mpmfijsp457nzseR6OgNZB2yurtxOHTI3LVLbyBOFqrmra5xxFWLyqrJwfi9Uh3kfyps5SfasBIlOe95bmrG2JYtHSkQJlZHDMV44bIOCNzNs6F3UyWKgZ7hihfYzK0yDFF8lk0wja+AQUjIbYmpGWldPdHZh2Iu2gbX+PJRDMaWoSrw8DnP/sRWFUrUZ+Ubmv3qPSGWPQvAuPrakrKo+kgIT4vnPxYOwHviF6yRaDRN4C0hf66lcbapA9jJkz2HUa+VddTYSHhS9alRUKYG5H/SP6MszK8VSm98OybA2WtLjUAq7etIsvmsv1M93ClcJexaByO5KrZrjRMODbSXpq6dvZsymBGRIducUq6a1fjz2Lo52TRWZuLTbsp7a1F0sVrNzxYH/jC84fkHPn2j7zNK8OOz12/wpObM7o+M5/fRzvlcrsmpWKVSAMeGSusKprFQIJ9oCZD5US9umbEkLtckC4S5mr8hFx9oxcYAzr49bsT3NZOqyoN85F7FKMcUbskTBfCdE+PTtXgm9RKbyGeTT4CnDM1Fy+kaM/Cn/cmo9lQmnIuSA6ky7BUtsURmOyZ5DXUwRw57dPC4WrNU6nOUZl1uQ80NLcVZIggOdmqS+ZUiIjxobI5SE38cBGBLcWQp2pNoiUlqyhdm7RGcHX4+QxkBdMzL7i4GExU023pfuooVfjWVx7zzu6CdZr5i299jL7P7K9WpMcdm6dHZLC/gos3J+LOuE4yZcKz68Ue4MroNNvSJfvxdi5LFWJDzRu67fajVmEukVZY0jIA4s6R3psp235Zi1QPgl0fKHjj4bqydRTXxvPavTDkFGVp3xWKHlGvq0R/GRiewfppZToXpvu2brWr5th1hZIDlWDVtaqLMxqiWmVmc6q7wHQR0ND5nDEBz7ISnzdKPqvIpqCTUiQg1YLvxY6WsHDZQram8tMhQQ7E2Ys4GsDQm65cPAixekAeTSqHciLY7MT+xttd9TNxbd0fJAda943auZOgUIfA9GhlwdQYjZTNMThqDksZMImeVM1OiKIaWW0m5q4wFaF7Hl3x35C21pA5r+TYNQDjEeaVUFcR2TcHrxUDWUCazxJSBmITZ21VeWJBO0nNOcy2ASo+j7rKOs0ErJVLroEUC6vVTN4kdGfyFNIbH06iKe9362RtrxTS5Ahym8+n1ekfMD4MAvXTgX8Y+F4R+XP+t38Zc5x+r4j8Y8AXgX/gax1IBdM+am0WBjMIxrv0m9HXRYCwkVMfPbjhnRcXJsaaKiJKjHXp4RNjZbe1p2Y6UC998QmqsWhOFHj1e/OyQT/+iVYVMX7ljG5rC7fWgNwk1o/NgHVb+74mphcmQ4/kJll/vzkc4dpWhg4LbChjOT4chyFVbKHkM4tQxlcGuhRIwTbhBYVqIoqzpdfipEwXYUFMFgOd4vIZSLT+VQIuKKaemghQixlpjgCJDr3J8l/M9OuZqevZP2rlLr4hrm2nEydFt0XXnNGkgr5IPO/OeeMbn/DqestXbu6x/eJrzGeWMgWrdCwDDO9Gxtf8JBoA6Qq6jIYG1F7Q3loFlG3HsLX2JfFgTld3U5ZKE2pdUKuystREmLx0v7N0VCjVYGw82nLV45Y2qlHYv2KVgvc/Z4jWdA43nzSjkA6ypO2ISj5TpuytAWa1VjjizzgpMlgpeKkehbkjV9ZqRibCfBaYzyPD+XoR6UyjidmJGrn6Vv+xFqVnm9frL3aMbz/gSx+f+fQnn/AN957y5vYBf/ndjzDuO6s0miJrXxMiyn43mGqx90arHXRrE1CtIVL2JpKX9ixoWPQ2PLRqwTaBXLS1uxYgUs6d4D2YLETpA2EM3m7DUxGe/mtpuFYNWNYw3zO5C6ohVhpc24e2jo+q8S3tsvRpg2OpvwcDVDG1eRcfTfvj3G2wPsB8Id7mwxCe4kUbNVlvw+GFOfurJxPx2oowtBl8CYbCHA3bbVukppmkGSvj9uuVGJEYF8cDCch6Td0MtuE76gzWw9D+dae2CwuSqoopaAN/+gufYr2eGMdEjMr1u+ecf9YQtbQ1p8JS3NUd9kq4Odgcm2e7jtk36EYreAlxklwMvW4IlASDgfRIbq7XHTsgXHYLQT/t7DhhKEh7ri1j0BDOZq89vZv7ypAKeU7cPN8YWCssPS3jaETtPAi6qoSbaBIYW6u4DdkUyqsjpF1XCKFSREy1u6u2PpuTHAtlXWgk87IKTOeG8HfbFhF4EOSahbqqdKuZWTsDIaHFwdQ+WEVc9qpSLwyavJODeHFDc6BKL0xngdTSztH0wwj2zGsXaBV4KmYHSo70qTAMM7ve+kTWZL1Gq1d8WqBjCCoeqJ6mzMXTmTWZppsmhWoSBoWE9IUcoj2X85nshPi2nzeUcboP08MC6wJqiPLhQSAdOgZHgEI+sbmDWBu3bsVKMTt+uUNjsDm/LqZt1tq/wPH/ORDEKhCLO1Ai1lJsWiVmURPMD9bPMGLXX2MgNGCki1B8z3zZh/iA8WGq8P4Yx/315fF3fbivOY56kalFrPfWjXvjwTaJWgOhL+QzU+0O2TcXNfGumgPzXgy+9Xy5YF73/Xs7nr9zzyLU3cnptrLtNkF8lB5u3ohLqmY+U+hMpEtuOoOBhxn55Mz07D6rx3gkGFDpSftCf6OsngXmT82wSxaFDGL9Dz3dcFpt1UZeB/KZ6bUQlNIF6mAY7O61RL8OrE70VlTkpCrGNubuxpozxunESKd0lD9wHkZLJYDDoi2Prmq6K8610BAW2LRGEyYbhpl53x3z2sUihqKQx7SUsLbRIgmDY4XQFV7s1rz7/AIJyuANj0P2XHk1J2r3qWKLafI0W3Akr1g5rwbjauyKmME7aSVi/ep0ieLb9ZXO0If5pHKJYqrJS2VZsZLe1pqkpRY0wvjQDOlrf+HA+LBjXpshfviXlWff0bhHhipJqtTY9GhgnEy1F5dMkL6ShmyKzoI31jXLVwY1ob5UrcnvWsjnvTn2ySvs3Eou15hc0b0e06ZxUuvv9erMqx+54ul2w1ee3mdYzXzswRUf+dg1z8YNz/YbLt95lTgJuycbuvsjMSrTmdLdGOLbdYUQK5MK5awyj8ZlOlVUVnGDAyzNpgCphjKCEJynIgUTgh0EPQTibMGIFLwq1dPE+HOfKnFvKIEh0ECwNjJ6XmAKqCg5C+nmKMxoooORMPRLqXxDbbpVpnYVOTf+zDxG4pf65VqWFjBF2X0kOupggVCcgZ2twfVjZf20EA+VdLk36RAR29zUApMmZXCrL9zJkGziv/WkYldS8ufqjkeXCGcb9q+s2H2iUi8y89OO4Zks6GVbcypCHCvddUCf9Vxdm/BhemXP9mqFlkB9nuj3wuE1T/vvvfF69PTz+JKjl9KJU2Sb9sIJicGrTvX4ehvRoFdp3JVRCftAlc6CDrcXIUN3LUxdjzxyL9g31bYGtbpz7WKQcpa52IzEsCeFyld2r50I8HKLfxe2kc1bgf5KF+6qBVtx4YqJKClaUKYqTGOiaCQUqN7rTrpqEhi7tKAyx8pAD2o9GJBF9DaYie0Ulbq0qbFAQqwyrjYpldvOy5JWwvYP6//oEiLePLVGoZx1i9PSWiiFLMwughtjRYZC7dxWBJsn2pDexkGqzQnxijrn+y17pSO9uk/IOpOGzHo1k0tgHDt79heZ2g3LfEyH4/6XHkz0w8w8R/KmY7ov7KdAmJKlwP2RWT9QQTuIVal99Ir6AMl6s0p0Z8nvkwRltZqpVdg/W/N4d0apgZv9QCmBlIo1ba5C6gvzxvr7tepiq8QuxmkWQ/VuCU5/CJ7311eJ3D3beD4zn3VLiiVFy5HP7kq3fHhZWSQhoouactn1aFepVTkcLCQrOSIb28xWzxxObWk7McSlSRi0yZn2LOrJm8cFDYnDbMdLW1P3vnqyQVaFcK8yPAt0NxbJtNLH0vccXlX0EOkfHZhuerK3Uyi9KSBrMMSx9dOiS8xnAXl4YLWaEVFTYY+RMgrzxhypeEjGGVm6oNukt/uolCF4H7hqEWAT/eq7xUFST2k1x0ljoIk0WprPol5LP1S7TyKLFMOQCuXiQFmZjoZOLv1fTDOllaC32wzQ8tJSYNp36Bc25NdnJCgX18pwaW9sve/ymUBXOX9lh37fg0UbJ2TQCoJttHHEURObG6cyFY2HsyyAZKhJdrQi7WVBGMqy8RhSJe3c1dC1iqFItYP+Snn841fsP3IsIU87obtphlQWMqMAGs1AlsGtgusPha7S9d5XcYWlANrtSq5y3PhmyXkP2QXwGrrXkMYgpnB9Gi36/1dPhbzreXb5CPnIyCdff8ZnLp7x+etH/Inv/0bCZUcdKpudXV/+qC2SVT8zeqpCA95HS+hXM2MRyqGjXreS/5Z2DEsrkmX4ve2uvfz6RLuq+rVBc5JkOX9LRThZfzZtrv7amiSXBxZ9VFdZD10hrmfybFwYON4jKXh0nhbuVtpDnoTpalgcgLAPdKPQXWHIZIVub2tbspIOtsmG2dPBM/SX5nicvZMZHh+sjH83WnDSUNxTtMm1oBY0KgbbIEJYHKpTR4i+s7lYiqXiY4DVwPQgGbKWhbKu1GhBXzwI/VWlv6501xNxH9gMgZtLm/vlQUa/cEZUKBd27+b7lYvPRborZbgqrJ5lwljonu+Rmx0LEbyRwuPx2WoQxHOli8CgX+NCbg6KdD63VSFXc15G43z2l56aC7gmkaGB5aonjeJrAVO7V+d6Yc1+y6Bc3N/zxsUll9OKd6/O0b5SU3DxTxZe2nRPqGeZvLG2Qd2NOWJpXyz97EhPSoXz1chcItkbs5c+oHRQrOWKBDUBxunoYJjtsH/TQemvTaCzDEK+h3GnUjVnSgQZFC1hUVpvx4mtAMTBAI3HfpmLVlZDZZMgO+M/1UGYU7Jga3NM9QFoNd3E6ghM2VTQeCw0adPT9awW4nwT/cyNiqDewkxsH76oppt16NjVwGo98ej+lsOc2F6vyCuz0ZLVnTOBasKem2FmipWbzs6/rMTEZBvPVo97pe15ytJORcTS6OfW9By1IFpVkFBdisOc+6vdiloD05gMMSzWw69MkeFsQmKlRg8gJjX5n1yXPbbNY40BIb0XPX6f8fV1oBQkVUMRBos8W+WFqikjlzHCUJnuBbobaE0dnzx+gO6ikR6daFl8gdcx8uzmAYguEeWtigbgyN43KHw+h/ufm4ljZf96z813HnjwcMvlDzykv3QYt7OJv/myp+ZcXK4MkSBCt6+km8irP/kpz282hKEwPuoX4cYyqIm9BWE6D/Tn1sbh8Ei4d7FfJsI0dpZ98E7kJg5mE7FV8mm0zaUtrJqaPpRvqp66WzY0VQhm3BZB0RTRcOpA1YXkKu050BatcJgT85RYTye8k2rnuWgXtWjJo8blPgswBc6//Tk/9WNf4PPXr/D5z3ySrSvHBicAjw8VpsA4JoYepnPxqFII0QLaW+rnQWEopvCMOaZ5FYhnyYX9oKyS5dvPq52rt9ABXyQ0PkBc0njNMZCgfkwXOlwrZ182Ib4yCNtPmPjr+l1DuJZgZbb7Is3RbRmdaIvcoly9dc8sZejPwp+zkZiN8J8HsVYzC59P37OxnR4vb2C6V9l8wxWvnO3Yzx1/5H/4NqSrPHr1mv1Fz/56ACwq1UPk4pVr1l3mRa8LGbVW30xyXATzmsaVcRc9dRcC1RvNmnNg/6aDCZ2e6hY11d8aZam0W8ivnCAqno62hsDeJHjRSoI627WLqN1zvf35RvJtc8aEOYVygQkdVuPdWJ/Do/PVjKVGITqqEDxC1WDOdLev9M8n4vXhmEoN/jxOG4+ePidvBLtUsA2RxhVZuIxRCF0HIaDecFhiQLtEHgReG7l3ceD6co0+jssmt1ThqRL3M/2LRHczGAqw65geWAuV/kkknymbL5vzVHu/zl0mbkdqn6ifeIVwmAnXByAiuRwdpVrt3Izda8Fau34x2yONGpDSrftg3E1BWqp34f3JsqaNyybkCOp8lCq4XqBpoNV15fWLGx4OO15b3fCTHn2JPzD9OPIuml32Ipd0MNsetpH+hSHeDZVqmztYNe+qy2y6mdBZ54erbuDJlNDO1b8r5vg0x7tpKbW1XQyxTAd1518gKjEZt0pVLHWerfm8drrYvWMlINaSKQm6j7TmyreEcgXjN62NS6diPf22Hw+Mr1TqoMwb/0ARDlN3pMVsCmWIJE+/G6IXrDpLWdqJtVQgeEDjCHvaH/fRB4+2bAZTgr86DOynjnFM1DkunK6WkgvZ6CbjHJG1su5nrtMxSGt96BYn2asoG6crzMWD4UjZdHQXVm2SJ8uwdF2h6zKrLrM9GO95nqPZrik6H8wQQeZA7iN1jkjro+pdH5ZgZqmWNbhO2zr+GuPr6kABaA6s7s3sViwqwaqybDKIukq3wZeAVRXcJCtVLW6rglA12Q2qQLBFY4KHR1RhKbw73fCDOSCPf2LvQmDAZcfzcgGbSlkH5Iltfp/46HPe3lww/PfnRnyeI2kfSGPl8jOR3TdNXNTAt772mBfjmrfvrxaxv9oZ8bEMyvggMFxFyhCYz+Fely1S8GoPciuv1gVtCLMbUw0oVoXDqUJwu8baDHY47ujNo355ErwfLNlE/PwzjUsBUJ04jLK0C6Bi5xEdYvfoz+T+WciHZ69v+UkfeZPPXb/KzdR7ZYltjFq8amWjfOZb3mE/d1wN50wPWqpICemIBjX+R4yVbpUpQ295eVd0DiWQbqLB2PcS031PkY6+4WqLGv2Sg1hJY3O427wIJ45ghNf+rDky04WhWGdvCpffVqit557Pv5Bd9dwJ0uKcPuP2WYPqWsXTPByr/9qcbDwE/10dRUsHaOijPW97U3CS7SnPrna20dw8PmP75Qt0Vfhx3/Im52nk6eGMt0tEzydqN1CjEi9mOkd/wz4s5Pa+y4xTomZZOFwaZFk7LaVipFY5/i0IBFmubYn0DhZ8pEMlZCU3gn3EGnfXY+TdHCIjaNsfZLCHpFWIQ6FmQedAaOcQOEqjcHJPaWlvQQ7mYdVNMaXl2hx1d2Tldu89e47G+2m8r3TjXexDAO9PRgi27k66t7cWLEtXeVj4jNr+1pyn9ro7WgJoNU6VzJlur/B04GqMt3sUTtDdFJMwmbILlM7Ew4AG2H+8IEVYvZk4fLRw9kVzKm8+ZeX4aQfTo55wliAIcV9MPX3VLc4/KR4NQYqLI6ExHqsLXTJFpUXwLUiw18JsttxSVifP3auPpWBrcGnm7T+niInfoiFm9qXj7e09tlNvBHA9Os9GRDanoF5kpgc9GsVbK8mCkNbOOl88Wu/49Pkznk8bnh7OuNyuiakwNT5eazeUKsxxcZ6WS/SgpzlFYYawyZxtRkN150QI1XqKLveOJchs8w8xZLX0cTnHtuas+s8d3hvj7I6POiNcr6G+NlmKto/LPZynZM7fGAkra0W0tGJJHkSkQBmCIbuzEdiNC3pct0sqtVfWD/fkGnj7yX00B7r1zGY1EVYz8/WwOHzm8HrltNj+sT30XKxHc0yLob6htGDMMxKDPbsINqfnskiA1CGSuonxYI2bUypELxxbpUzcVHbDihBMroRDWAIlmWxfreeGUjWua+socsyc+B7YMhPlCFZ8tRG+5jvuxt24G3fjbtyNu3E37sat8XVFoFRAkvfeCpYPtejfyHwiSreemV+sLBpMWIVTjpy9cc146MhPV0etqK7Swtnu/ki9WRP3J1Glp5taCgGgkfRE4ewr6k2NKzefCdR9ZPVWJO3treHdgS/tXoNVIQ3QjY2/YVVdaQfkwHe++mX+/JM37BrjMSJaqtaSMp9ZBUdvxB52k3EeqgpawqJ7kvZWVRbHSnCYVYqBitYe4hR98i7op5pPLX0grST2JQQqyJHU+gEkOalKnYNFTl7pFLJaCWpkqSo5dvn23HH1Coyk1E1l6Gb+yA98C/q8R6Ny/yvWoiUUS9/OF8L+EwZ1v352w2V6naJOw+icHBna98hCxl+vJ7b9GhTKJOTJKus0BVRh3gTL/Ue1dirONWi9CW+Nqgv36fS5ddcG/998LHL9TZV6fyJ01VoCXCbmC0c2YkWrdbC3aiBrmHkkYcqSj6/FkI+FjBoxtDUduQgLkRZLeS1VlNUq3HAOm+RGLIXGJRqeCXGX2H965tPf9jYf21zx1u4e3//Oa0xvb4h7K9pYHUDPIKbCWT/x5ObMeCoZ6w+HFXakvjDv05JKaefXnoc6r6d2R/TMUhlO7K+myYOCzN58eCxITZYi8Xt9WuBhSJtLmYyOGoh9ceir8UuCuuyFLNH8sa/W7fluvRDtGP3zQNpGk4UolpZLu5ZWOX4uTp4iOxiyo8FE99K+EA6uw+UVh9Z+5Yj83vp+cQ4MWArM0ye6lJT7nGtIVvC12yLhXIj7Sl2JNSt/0R+fQXhp/aoS9rPN8R7SNtBdCeNDpX8WObxm53X/s3bdJpQ5Gco9ZcL1/oiOLTYjsujKOcKtyVLfJHv2ompogep70x6lLuT02nn5+Anvq7WkkiKEIpRQj0hUEFM9OFkPc4nUJHzTvScA/Ik3P0NdV4I3v20aPxogPelYv6PW5Ht/nHtN96tLhVXMfHn3gLeuL5a09Twmlv6TAdOlOsSFEhJmI6S3VP3CS/V1kfrCxWrk+W7NYd+Td675NRTC/Yn6tpXTh2Drm4qhU1GWzAuiaIwLyivV+k62dlFhVnTjwpJg6cHgCHVUYioUJ3tKsPlQHX3Sk3R7Hfw7ZxMKbUVAobAgSFJZqp9Vhc3ZyDQlSo5cb1es1xMyFOLULdXAbR2KCjUHZAXj3ODdI4/MZF448tc6O8f5PBH30c6tKLULxFh5cH9LDMr91YHAseffYepIQ+FiM3INjGM0nahWaNYyKp4pWNDqhVIhxx+vStRT9PirjK97Cq9fzbx6vuULqwtThU1WFZNLIMXKMGTmVKm90l0Z7PbgfMdrmy2Pd2e8WwJBQCt0QybnSNl2lBzQleU2a88tpdm2aI9GW9m/ZoY/HmD30cSnv/NNqgpf4KMMj23mlotMOJ/hyeAVYrjyNeR1ZPsppX944M8+/gTf+OApzw8bniYMzi6W4jpNy5ROvKICDoeOlKoRi9tDLSebT9OI8b5ang8yThPiFWR4+tI4M0uTqTZ8MhybT52kE1ovpBO9KFU9lqU7Sa9erQyKdWNvlSUKQ0FDXCDYgPrG4PySdWHKiToHwsMJ9YaoTTvJZCzg7LUdz3drntycIfORWClFl82x3b+QYZoirz3Yc92bc1SdsJ8HvExYyesAMVtp6+FY6m+bpBwXcAiumm5O/ELM9gU23TeS5vqtQHkxcPjExMc/9ZQX2zX7t85N1TrYxqGn59qOk45aM32XmeZEnnVJgdXeCOTSmQOlnqrMa+sTuIgjNq+v6rJZxwlXZ8d5GJX9R4Ty0ZGPvnbJi92aLz95wHzTs36452/7cV/i2X7Dk+cX1Mu1NaUNyrPthu1uoL8xRzMdhC4WUqg8f7ZGxrDIO5z+uzgcnoayPlsulJlxtXKWHmAh2yYQcj2mJIvxoZoyObAo3sexgsRlg5FtonaVzas7dk83djtOesot67ylFWFJPTfNofH1wtgZuZcs1C4Zz1J06acWSnOCOepaOUE9jHlxJABzZttzeXmcGGQVd5a6SO2Dp6VNRd+uIxBiQGq97eGXQneTSS9WZOmI27jcJ3PyrE+djC50OWX6a5vj40Nh98lMvIlLhenmbWG4rEbYjULYW6pE1x3zxx4YB+pmXIIwbVIqtaW/ZHGezB6Y9leoRtQRjS7I29SmizUmn47NkNvcaH3j0kGY7tt1N06Y3b+j56RO7djOPfvc8Revz1y/TJBWjODBZ1nZsctamc+Di1a6gn+uy1zpUyH4xjCXyH43UJ4PaFT6vTsnTcDz/kzdJWrs3OkV4JjWLr3RTbJNSw7eqLnMAXIgXUY0dJR7meT7UJPMkKowBaoHBc12WCES7uyrK5IXQlWQgW5b6S8j+pnqPEH3B0Q53xyoNXBVLahV53WWDmLvIqUaTZKhmwm7287PKd+xDGKq4wq7JxvijfcXHJR6VtDVjOl2tQjqtg0MqTLPkS4WC2YzR+028T3Sg8nSCbIODAdXxz3lJqow5UQKlWe7NZP3d9z0xzLwVzZbhpR5twq1REKozF2ijNGWrBzPz6bXCZfXOZ1SytGx+puOAyXGeUihoqkuitd5jsye0+xigeBNQiNQhYerPT/45BVyjsRULEgr5pWKQIkJngwIMD70TdgNauvm3aL42pnhRuHmU3VxXD73xddtsWRrltpf2wZ9fn7gmz71Jt/7J77ZNWdMPybt7eZuVhOfuf+M73v8ER6d7dwJ8qfim7KcKDq3kunDHI3zBcfcbPaNyMt3jR9gVQlB9dbkFFVDk9RL8T3axUvrAZY2C6ejOU2LY3YySVxHKWRd8v+yySyQU5vwUUG9Ke8JyRCwxZCU0Bc+fu+KH//p7+MvPH+DN1/cZ3t9zzaovVV0lbWSgO/6+Of50+98kqv7yqz4JmHPyBab+n0Tcg10oRr/KqnzhBpPygQSy2DzRiZ3NLKhIXFmqRA5rbpoD+aUJxWy8ZZe+3Mmyjndj2xf9Dx+8hHe+Mlf4fNXKzR3RjINRyfdDKxrPzm6GkIlBiuekKDmODm/oN3XkKxCpHa4fIG6krYu83hZ9CEQ95BW4g63ISSbrySm3Yrnf3XFfK8iHznw837C9wLw2avX2I49IZZFUPTB+Y53n95bWrxohDDCfuwJQYnns+ni7G2dLnIBjvbcki/I7nCAlfi7ZTFla0NJw242J8Ej2+AcmeZEtSDB5qE3dd5k6hiJ20C+sGArnc+UOSAv0sIpkcPJkguyIBcNEQ4HYfPlRNodHd1uq/TbSpjdgRI7d7AWKzLXRY/LHG43svW49haHStUcDfxvjX9YxZZPPa4vibJE6Mcb2AKdRtIzrah0PRKnFeUQ6W6OjY4X+ZDT7/dzLSuYHlbOvpDoL5Xrzyj3P2tr9+aNYFVWGcLra9uch0DcG+olQznajoX47evCS5m1gvbd8T6383gJEQMIB9fLS8e1LNGKMjQZ7yVtj6jA0iS2HoPKtkamEnl1s+XV1294d3fB29sVuq7obA7pfGZvLT30V4HNOyawmw5l4dS0YqAYKtt54IvPHnJ4vnLWuppC+41trCUbQlN3yXhzbQOWNrf8frszrEGZ9x1XouRnK9J1IO7M+SwrpSTjw5o4ph3LmgWHRTOu8SEVC7Bqb6hR2lrXhNo7YT8Fui1sL3tr8DvbvjUdjEgdQiWmwrzvbD8S+67G6ylDZN6IPbK2rhviB7Q2MnkFWgLjwdDPclGMTxiBYsHW6nykDAOMepQAE5efqMJqM3OYOkO3haWqnJPvQ60op7jeVu2TBVtjgWo29BP3L9kkI/x/4fIRu7Hj3Sf30DkgUUmhMqSjZpAEJQ2FrGKaXqRbNkGTwOh2TB1u9Llre/DXZjh9fR0oha4rXB5WTuTEumJnq8LKMbLqZ8iOdkRzBq7GFburlTkaSdEsUL1CB2C2Esy4t6qpJr8fJoclJ5AWSepRgffBX3ZYdoLrsSNMQn/FQtxbfyVyePGAv/RtpvEkxQiYIdsGWHrl6mrND8irvHq+5cV+5d60TZCGzsgkrmdkpxtHqIdEiWopiTksatg1HqHMMIttPPN7Ek/HKr1ii0nVokEVcSfgJDpuabtTw44v3hOtFwkBzYUwmQNVciC8Myx6SxoN6tXOdsBGODSNIjPKxdOmZQ4MKfOnnnyaL3zpVQD6CeLO1LwNShe2757x/PUNr51tueKVYwWfHh0ZcS2kMAp521FfaRGxvbc5T3kdFjK7OcJ+D33BtnYjVqkppjWibrTk+FpL71x8qTA8HXnxrRv2rwn7j1pPpjcfP+Ti4Y7r8YLYWVqpCbjOviHQVVKfWa1muljoU0HE0tFlZRtrixibk9XEZWuyiLPJSSyVgynA0EG0SrdyImIqWZfoOn/Lnu/85Js87Pf8pecf5d2rc/ZP14S96cjEAgTl6Ytz6hyIN5G4N+QFFW4OHffv7diO1i8tnujCLM/EHQoVWeahichat/o6CGE0oqYGQ0u0C1ZB20jfcOzz2Cqc3FlJ+4zGzlKmN5H+hRByJLyujIcEolbWfxNvVTVpcmMoJ+c7Qzmr7HqxVK/tlazfFoZLTxc3X2Ey8mjXDHsKfoyTNbik7F6qDX955GLaNUREC6Rwcpyw6Cg1x1MdhZKWNgM0V1Nh76pv2B4UzHpMg3i1LThRf46cf96q5rafgPU7wv5128TPv2Rl992NOxZZCU8PhN1odiKcVA26UyZNHkQVmVtw5uXqU0tzhlu25ehAZfprfy5yDGxD8XRxctHk0e1I5lidVtyh8kzCnCNPdmfc7Ac+9ei5bYoSaT3RgtvPeIC4Z5kD5sQVT13ZVJ5y5EnZLDZEZmF4ahFNtzV7mM8iuin0D0amXUftzemAQDrUxfFo3xMnYb5JjFMg3fj+NsPw3NbKfmoSDyxSBkaYF3fgODoV/3/2/ivWti3N78N+3xhjhhV2PPnmunWruqurM3MQA2gakEhZgC0RAmxAsg3yTbBh2BblZxsg4BfLj4QAg4YNiJJlWbIkmDQINptNiWR3s4vdXVVdVbduPnHnvdIMI/jhG2OufatDFQGhYBt3Agcn7b32WnOO8I3/9w95GPuZFmrjYYVcFVWoUN8GQsXeAkEyure2bLYNVZ1vZBa5FEQp5UIqNqrkS4m9kaTsD2x3DyCytcSdpboxakLc6/MKDdz4IwCWN2kSYRSaTBJwVWD0FmvVpkhSaf/nL81RQuUqa3mykq0KjCLsJrIda666GZe3C4ZtdpcV3avSaLjuZvTeMW5qtaGozeQybm1iNCkrcCUXvQbT6dyJzihKFXUOJEm/u6Pze1w/dgTK2khtgyqxXF4AsvlYSInBZLOwWtENqSI+Gh4/vuZ6Pae7bDVnqPSOBWTmCbOA+bTBbkvxtZ+spbo2AVLu7fYPI4jBeNi8Fbj/pUtu1jNWT+fqG1SgZQE+XdCelwJMORNJIJ2OfOW1Mz65PAGgqfw0AbSazwVchi5LG8r2+tBjEJVWhv0JLDp9uNYZkgSNJohaVUuIRHFKCxljnoghL2zq5SSlWCqquhAn/6dUFsEcfgrAOKrvTOFwjF57/N4hNhEWgeHQ5Y2SSaIvWW6rYchMyIQWfQm5rfjmJ09ovz1jnu/H7FWi2kTsmAiVKtu2rwm/+q13kTZw8HTfLnGdTqrJv6kWDc3sDZuhQjl0+4UhOhhnCtmXYtfkKIjJfX4s8lwmhWM5tevirbwmM2jopx0i1z8xZzgQRbre2vD4eMXZasG8HllZLXwQUVTJ6ef3Cy30nYtT8dQ6j5HE1iTGOmHKhtHZKROzGOyZ3JYzA1M716HKEImKsbutLq6TomSMdPehf6zqmG8+f8LYOdLOUZ90/MxPfsp1N+PVzZLQLwmzSFN75RRaclRJNhS9rLnqLLPjjl1ntXDRg98kcdYsth9YYGJG3QZFdu2gyjDpA1jBbHqStdi+BfLXdEGFt9mhXMaMFsQqn9L1YKTSfTXMm9/bsts0ahfi9rxAyIcCdDyaXg0mbS805za3/JgUR4WnVeZ0+Xx0uj5N/LOghZWMUQ8ro5kOKBIiScI01/RNJJ2LImpLUHzaTJi66mJ0jCCop9QwKh9vGNSXLSUwFrPtmD8TwszhNuUzapFghqA+VdnMs6i2qnVi+0gYjpUGUWKHZq+ExQtPsmrE6652eq+nKBYtCiWovUm6E9dytziSlDA5C890XuM2nLbvpntQlvwx6PjdCnanc0Q5qoqcyk4je3RRy2vLVEDJhLymJIQknDY9Phi++9kjnbd5jTLeEjLSEmsm5dnn1I5WJo7metsQgyVe1lRrMyFdbpNd0pOmEAyNMGxqNW/N+0qxA0D26LAZ1Yg2NAY/F5pLoVpBvc5pE07vgZ8xOXVP2ZIZkGHKbM2Po9L/Hw4M9W3Mn81OqlU7JKoLhz+MGYXWcTFsM6XlDoVFyh6Y8yjHmWE4BErAeTEn9qo4LbJ+2wvVtcnZq1o8ua22K2MFqY6IN1OkTnJ7Y9aUa+pFO3B5vcCWmq7S+1fuXdlbolXmhRZvuo/JqEX+GCy3XUNK2cpmNEhncBv15/LzxNVmpnEz+TOlmA/7o3piYdPkR6fpD3cQU3RNm1rQPkwUij/o+rGTyHfbhmFw2J2osVqEblSHZpFEjFo9VrdZDm0Si3rgXruhHx2daTNhTtEQDBinG5lfJNoz3ex8zedIqqUijlmimkxi+OmtthRd4Pz8gDRYWARCtNQ3QpwlzEnP/eM1Z/Y+1Y3BL2R/wrmp+KC+zzuPLvj04pib2zlVjgVB0B2xoDMDezg6F1nGJqwbGTszkQa1h2+0gChFTSEOhzuz6wfQpP2f5Xf9/0R2zR4zxKiZXSmRxjE/GwHZn2RlEOJVg2QjumLLAECVEZQiw53eE/sCaCPEoaUEUla3MLsIVCslQ45LRzKGVCdt02ycRlOUIuKOLH1/Kson0WAxc08ajJ6mHJNrr/H6b2Te2ESlKMBEOV3FNMlk714TwVBg+8DRXgeO3h8YDypeccDznxH+5Nsf8r3rB0gTlNAcZL8gZjPNNBq6TU2/qzA20bR6n8dRDUjV9kDHr8cSo9DslGRfrTVaoyxkpfhTEnkCp+iUyQ7HyvqG5WcJt62InxzhH0Tcox1/5ed+lfvVig92D3hmjohJeHo0A5eoXaDLAaKFNCtJcwZ9neh3FfWFzdEs+8vkAtSESKjy/Zb8/gqnrHAiYiK2DjOGyaPM9oEkmqFn+zi1QKeF3kdM7xkX4G4VfWqu1Jupf2jY3sxAtBUqK7M/tPwAUCuZDFv+fPgJzK6CmiUaqDZxf38LUjZGorP672OYWnig94Y+tyFLAHCMEwIyXXcLKUSLG6dS81hb/MIRazWABGhzMUJK0PdMgbz5A3UPEmGWaNnz0UID42FNrC1V2m94fiZ0p4JfJA4+UgL19U/A0XfV9X13z6oHjwhmaDBj1PxFZzB9wG565UcO435NuUMDEK/3RMZ8Y4ecw1nI7zkLTweKIFFRcd+Cy95bBYkfDxR9qm4z2mbYI8vpzgH4zjUGy1tH19xbbPn+pw+1wN7kwOAhH+IGYXaWaK+DotcJJGTOZq1F97itlTyd1wYzCPOXOhbqlR4SwkzwC4M97kitIVZubwg7taHSfi7uEtVK51O1geL2PX+pc79eV6zfMBNSU9b7yYqkFFDTeqUtvHEuRGepVwa70WDdauMBl3P40iSasp0oYlTGpNFC1fTFN0yRw3Eu+EWEsM/mLPvk3cuMUF/niKe1FqYmwNEHehC+2VR0DxLVJuXAdS2uktExeHq41c/eeNV75cNuuOsDJnqQCI3u3YihWlvliwVdHyob6IaKbt0g1xUuh9jr3ipUUdhtGt1XO50nMWortMRW2ToS3d7vbprTUddUjVeTDE787r3h97p+/D5QQH85Y3krOShVJgizRLlgFCIsaeyN9fzGJ2/iO5cVA2ZfvVoIvSXdNNMmb3dM4aOT50YelKr60CLHfH+GjeosvcitueFIC572PBGdJZzPuf5aYPbWiu3TJdWtToBxCcdfukIk8cHz+/zC25/yfHPIq89ajZlJkNqgJmrBKGzYMg0Uezjy6PSW7VCxtZF+VpNchdsZVWl4g8nKBTMqbGuS9mWjM+BQC/pgphYAsDfSzItcyk7jAgpN5k0SY5gCUG32exH09WtDnEfNi3pV7QsRyqkuMTvo6K73GXl3CxQz6GmmOwxsf2rUIMfKUa3t1G4qkS3uxvKnf/7bPNsc8cH1m/p5vb5/k4NmUz45lpBKK4nlsmNrEt7VgBqsmlEYU16U6qRAXJX9fRL4tgRp5rcdtfWg/18W/fJZhHodWX7vWtsTwNH3LbubJbs3Kg6bjhdWOTlxsGr5kw/yyQJjJuHaRJTI0GfjT2+o1ybnYOViO+q/252eWJurgVgZJOwNMzVyJipSkVzOt8reOhbGhcPPlEfQvdfxr/zUN3l3dsb5uOSXL77CNz58k9RZqBLu1hIWkZuPj7Cj5Hw3sDtVEPbHBuMt7sGG7WuW9KKiuS4+SULIp7cUhNhoIWC8TK3i2DpCvtfJGtzNTu+vD9nMNRtxBl20ojX7hTslGD1hsZgUOmQyazJQXxjs19cA9JcHU8v1c074MU1IRFkoh+PIcCRc5zgMgPkLx/KpFlEuj+HwA8iapERsCtFbcnGDUgKGMY+jUizG/cElG0xiDGlW4w8aYm0IrVGVaFN4MzBfVNhzAe9Jo0fEQIrZ7FCRC8rmVrhoQiZFZ/Q5t+pLy3L5qaK2N19J1DeK3q7e0cNfvdL7ORzXyj/bFYVanChYEhOJ/Wm8rCmKrAXIP7MgZ+JDPpjl341WQ+lOGyS0ui6ogXIeM5Lo70WaCz1M7RF8mZ59IZEvmoHGeX776RNFyHIHonpnzW5d487q/N4h1sJw4DRzbqymc+W41EMOnaIUzYWdkL1YCfNXkWoVMMEw5sIsfrIg1lpUVZu9StoOuT006MCL2RjSeGiuI/U60lyN+NZSYmbcRtWIyUIsLboqqhLXFzWhtprCXNHT0Orh2beWEsfiW4ufaaD6aLTNbgcN3zadKVn1sNBFafKwyoX2eCCEZZjakFNRUdbx/Mg1R1CjmKqVFqXVKtDdrxgW+kOqG8F1MTvCy1QkJQcxCW8dXrHpa4a8boRW+WpF5FG4teNBKYpUpRrHhM3cXh+NmgBHITUR21ncWmjPtSjsT4Ruq2us292hlCTtVhVn9mRTpqRktLn4601zOCd23OXH/gHXj7+AyiRB2+WFz6OJ0QZta2WF13CU4xFM4uV6if3eHOM0lHTvBIsuwJ0S/1Qdp2IGyBMFXfglH6YKBGuf7Gj/6QIzwu4BbH6mI42G+nmF2wr9sdDdjzRXBn7rgP4g4Tw0F0ogH5eaMP7m8TUPFhu+c/5Qq9yhDICEnQXCkEg7JRoXwmFs4ORIFQOnsy2fcaQqg8HoJh9LNS6QbM4WMjhQK/zaqLW/N1hrqBK6iNosMbZ64pXKQkiYbiAZbSOkyoLNeV130+NLAWF1YlZHHfeO11wfzNldHmhRN6JOwQLe543kTs6f8SADOmAbNHS4t7QvXSauZk6MVXg81GqR8P2b+yzrnvZsb8Bm8+towZlbdAeJVEecjRzPdly7wBXgYw1iM1k/xwK4pGKruZI2C3pmAoxzSLUhZXPFqQ2UmOTWoGGxpMT2nUO2DyzDsRZ13zl/yJdOLghD2Z3NZANQDAPt7s7rV4mxjcpXGJT8aTtdTOVAkCT4mCH4PmK2I8yraSNIhZBZO6RTZCDkNkV0mew5M9y+F7D3e776+IzzYcG3bx7xwQeP9MBxNGCWnpgNP0v0g9vtEYESwFwW0e3ZAhm0wHJbMnJRihnBdIF45JS31eV/K3weA36u83K8t8hqMS04Qmv05+fct5gL5M+hDQm6B4Hm0uK2WliGCrZPoD9bQBPgxGPO6gllAKagb9BnGZ0WlVVGqmKVpgBs02t2ZbFMkFiiJvYcKmKcNvxyKFEVayJVTpPi74bs3jXGdJZUO8bjluHITdzGYv4aKpmK36qukK7XaCWboZiUSM7q+G8gDVpXGQraKZlblHtexjDOheRQzt5rHruxzFdw/XWP6Q0HH6mxpOsi1cpjO489u9GfVVd64h+zqq+u9jfWGN1YihmoMRADk9LQ6DieeCPGKBrZaCwOwqQGTE6LO5IKSvwiEeZJC4liKJv5LZJUNZaSMK9G/ti9j/ja0Qt+/fxNXpwf0c4GRVKpiS6b9KLrku2TIrwLOyGN41I3Ubs104GarY7vw0+8KrP7SKxzCPFG2N7zOh6SmcaCTKrTRLUO2FomZFPbXYpUjQvH7NkGfKQ+aoGWYZH3IZfVflXKwpx8u12eh3VkXBrGpeBbYX5usH1knAv1tVpRjIf7vbAcYPVQpWNcEarMuRoTtleOX2m57cl+MLX7grbxTAB2Srcwo36u7sROB9FqG+lHi2/JLdkMHJRMwgQpCR9c3eNkvmPjDqZg7FI8TUkAUceBX0TGA0N7qaCDJOWPhWAwV5UqN9e6hknUe0hQSyG70TnuNvt9pFAPulWDWC14Jeen2l1uV4cEWAUiylwuiOoPuX44S+qL64vri+uL64vri+uL64vri+tz1489Cy/d1JPRpesTvhcNWTRZ4YXFHg6EWaK5UHO1m9V8eqPVJvfLC7cp86HGk0jzSk+rKgVPmaOj0Hb2r1Q4OAkHiw7+vGqf/9TDpwBsQs2zN454dnHE+PGMh187IyXh/OoA83GLRBiOtS8dHXSfHfDtizn/6s//c/rg+Pj5PSoDSXL4rA2YWWDcWUJlkIUQ1wnfwv22I0TD09tDDVN2gcHuTwMae6K93DH7xtyVysdK1KIlyr5qziTjlDPKRAQjkWStnhydBsBOp+RRpvDP5IO2YJwlNMJsNqhBmQtMdgK5PRcWMFw3VJ1mT+kLgO3S9Gx9EuxlxfITmfKoZuee5rJnOGn0BCraWvns5QkpCMdbqLZpj0Zkrkes9FTtDwLNSUfjPI31HLYd1kTOwgHBC3Fj8WQe2VwJ/T5WjLl1CJms2CT8zKmpY0ikOhEmfpQq6nwrDMcVt+/coztVwuXuywOmDnzlYIWRpCaukhBv1MMq+9yU+BCNlciQQQSCtg3cVtGnapfNBRu9r8VCwgyeVJXE+9y+rBXWNlk+X+JtlAOmJqfVreBDy3dWr02qntPXr/nS8SU/c/iMMVk+2Z3wD3c/qUT/OuI7JQv2QVHB0n61vWBOdyxmPZeLQ9KH9fRMTID+xCKpZlgahgO19oi1nuLGpbYTJUKqDO66R0JAek9qK/zMTAiEorUytU0BcFZDdE1GBGtR1NAJzRX4B5Hj0w23q5me9tmjcXf9XYrX2LhQ5OHgI6G9yuidVZQztDouXCeQtBUxLhQdXbQO6YPSBBqDmVXYO0iTwOcy4aZ5WFSutSPMa8alBogXY8KCvNuUsjeaQOXYx7/s0Z14NKd/Y2BxsmPzckFzZrXLloT+xOk9lgNsr+2+/kToHiT8YaB94WjPYPVOorqxLJ7KJLKwXdyjgvkzyKiE8DSM+j7aHPZYadwLxkwc5zir1IPJiH5/CUk2QunHpip//oXOc9sb3K6sbTAeRsJBAK8tGoXSo86rrPpNObJIgnBQ9dz6lof1ir/6zq/wfxz+PGNQ1Zl+gIy0DHuUNUZFGc3MKnWgRi0SMkeo2LCEFnb3LIvnIyZEzKhh7W4nLD9UfqbtlUPntlHbnpkgL2MktA6Zo+HGqErO7QLVdafct6OGceGobwPVWhgOjfLCdsJokopyCv8JiHXENIG0sxN/tju2k3ptOHLs7hlS7XE3Tn3jMhxid1nlliAM2ZIjKLotXoUQoUbXgEZ9usyoa6/bqfWD3emYSFZIB2pMOX/lJwWuu+1zCO8hqzc0/Nz1+Vena4nbaZ7qohnovJueTzEdLW3pQisJM3QtJmq+6QhJlJjf9xWxjaQc1l7fGtwGmquIHRLDgVpGmKD8s5LJ50fdO4at8vxKtIvOwzi1NLnDa9wHzf9wfOlHLqBExAK/BjxNKf1lETkF/jbwDvAR8FdSSld/4IsknQx2K5Njs59lAlxOuw5O76rt8iSMhrBzxHc60mBpnjtKaKt6k2jmzfLRmq0/pLnWDLHkdIDssXimAWpGuDo74Be+8jHORH7n+iHPnp1Cgtdfv2Qx7+nHGf3oeO3wlveOz/n1+k3653NNiG+0Pz17bU2Mhl959i7vnlxwcThnHFt9HiYRspkXVSI2CXOrUGyyUJnA9a7lsO15cX3I0DltBRUX5KFIxbWQik5wrpBes5rMp0m5ND38wUOwmEpha831CfuiKf/b5xQzOQxV8gIpUX1SRBLrsyVHvRZHyerkjK3VVtpGVRmgE0Az4DK0ahOzV0JznSYPITPq+0umnfhMizdXPD5c8f7HjxiOwS/0/rgtE8dtMnhrI4tZz3G7ow8OI0nzpqwWQNNjdomq8aQojJVT4uOonDqV0WazyqHCdl5bByIwGjUKtUJ3atjdN7hN4vATz7A0jAcV9t2ef/O1X+U/fPZHwKbsRC5g7J53l/Frhc1NzkTUe6LO6Ckb12UPpD7D7FnNs1c7QfGtKsXSJG/PhVVpifiZ0FypYm33TuD1t8/5+ulz/ujBhwQMv3z1Vbp8z2i0tWouKjWsXWXIXfQ9eKcbfPhgyeWDRrOyMtfqLvzuNp7wmiPMyPwrg+lVPh+dFpD9cYVvLdVq1GI1OyJrW6Bw4WRyiC4+V741sBypP26wPZmUG+lODXjh5tOjLO0HRn7AVysXZFXKfKNEfNhzfWj1c0YBk3BXjvYiO9ynTDytlLQbGsEfNDgzTuPF9vr5pmKisogxyDAqB8jayaU8VY44q3SuOB3DJuU5kBImKNdR2yvZIsQYsFaDhAGMEGYV9tqxdQ0I3A1JLy7fv8vPLUHzyrL8JNHdF8JB4PjXLbZLDAcqYfezilgb7K6Gx0vl0+wi7We3SKd9j7jIa1nj1BMnc9wkRGLjkMpqmn1WTqaUlGBf1hopjuu6Mfq5tqnVAkaIbcTMPfNFz/rlUr8l838mLUzmGCUn+GT4ZHPK3//0Kzw+XBGiKs3ee3zGGC0fNfcYVhUyGPzSKHG60xa0HUUdGmzKvntkIYHyXe2oiRSxNrDVwsgO2urp7umBYnYRMYP68onPxUhKmF4/c6zNpD4rbeDQLnJxqwVMqFVtWt9GLTTmlt07YF3El3ZaPuRbF0hb9bWrNronRCcMS50v1Va/3u5kWp+T5HvsZF9AGLKqN2e/JQ25l1aLJLdjyvC0ff58pXtl1GMuOktoDdVZj+k963eWRCd0JzIZOuvBMWFNFsnsLOvnSzZLT92Ok1jE9OR8P11zUt7zYxupjjvC87kWTwb83Gpx1Tlmz5xyvXbKzSp5fRoinXA75ZZW61xTCLmPhzqte60rJm/A0rL7ASrK1I7+EVp4/yII1P8M+DaQu678deDvpZT+hoj89fz3f/cPegGJENuI29gsX9xvjpJPo7hEGAzNALtHaj5oZ57D5Y7b9Yxk3F4pVcZbZ/Dekk4HwmftnTux/5rJJKycPC4rfvtX3gOgvhXmGdF6tn1AmgcWG+H6k2Ou5RhzPHCw3NEdeNrPGqKFQQz3llu+dvKCjW/4Jx+9oxb6s/1CFkcDdcItRqJzjEs9/fh54nI3pxsqVhuVYcTRYnqzT7XOXlUT36t81uzkrPEVaQo3TXWl6JMzex5CGQBGIxg+R4rLJFdpauVyuD0yZbx6E719pPXw7ug+6VgmhMVuDX6WuSCFfBiLOaC+78L1UMd3jUZJTgjzGj8zE+EyBMMfu/cRl9sZt2f3ppMDZFntqMUbgNSBWeVZVlq1XXczFs3AZqdRMSlbwkSnLtshe6BMTtX5vsVGkZ8wM0i0SCb7F/O4aGE40J99/L6n2ngkVhy9b1mFJeZnI+uhQUyiaj1DFD0IJAUOVDGZnYTrNHm1lCBLVYdlMmOxVQiFp6DPMLmsbqMsSvmZ1U4XQKfoTHlmoRFWX/HM7m/5q1/9Vd6oL+hSzX/68hc43y54+eIYRsEuPXRWRRTZn0wyL6q+0QWpWgvDMYzH2ccoF8mFHFoWJ4mKfIWG/Mtgd2Z69rHKBWBIJGewm57U2L3TcZGC37UGsUKaV4wzwVTqgVStUialqodY+7yie+SxSz+dnou7cWhUwZZyAeXnkCpIWwdtoJ6Pau/gDf4oMPYWuxPGRbZwsDAudWwORy4T3rN3TGuQUKkXjzUa62JQ7mFRzGbeUpo3lKgbMyRcKqa92b/NMyFvxit/icohtDp+qvJcDbHNBYVN6hFW6yI/LLOpJxVuZyfyvD5L4epriXA8MvukYlzA+g0YDyJmyMaTS4d4l4t4qBqDW82wtQMfCfMaCZEwz6aZpaiJidCoijINERqL3Y2IM3vbA1Dy/NyqqWxGhfTEr6gAAdJljSw7nYOdJaGkX9MVHzddW5KBV5slP3n6kjePr3n/5X1EoK49L9dLtl2D31STU7bbqacfac8TBN1jbCYRJ1F00vSKpLfngRLxobY3ieYmMRya6cBX3Y7T8/9BkvFkehvBbVX4U61G3OVG0chFrd5XVT6QbIW20YObzNPeibyKENUQM7YRSZZxAd2J+paVsPndPcvy4S395ZF6OuV5ZQdIOY5sKCHGOS1AOxXaoUEg7ezn9lNzV5ABiipGoV4FhgPL7buziWCulgiyn+cjynF1ZkoikHkgbR1u2eM7mbymTDYbL8pl8VrwxWBJ9wZ2D1rMKLRWC0d8pHusbua2F9xKf5+dqzVOMiqQUKJ/NoOWvWhB/QD1WdshTrYlFLf9QpAmz+eUJs7mH3T9SAWUiLwB/CXgfwf8L/I//2vAn8t//lvAL/FDCigSSK8nQlUSfN6hW/IiYVxkOEqEw0AaLcuDjquXh0hnqPs7WTZktGQnjJ8sSCee7mH2HOly2yvI76owAeLDnuZ3tC3XPYjIax1hsMhlhdw6RRNc0pT6j1vWtsXmDcRtAYTPXp4QkvDfffI7/BPeoV81qubJ7yv1ZjJU9AZKlIkk8MEgAk3jWV/MkV7l7W6b85v6qEaEQU3tIir/NjuPuEzozKfBkkU1oUrOqttxn4ucGLPiKekamBeJ8j1U6KInAv2A7SK7vubT2xMO256hy0Wuzwo3k5CN/RwiUYiH06M2wniQ2H51QDaO5tyyu3bYA8twYIiNkrn7ruLvPf8q1iRFUHJie7XJ/jujkhUR3VAPm46ZHfHR8HDuueyy/XA+yZRJGYIQUzEp1QXT7XJ7t9mbQILGCwVvCcaSvG6Ubic0NzpO++OK/tDi53oC/0e3X2E7VLB2DL2Z4k5MLkRspyds25ERlxxvY5O2Fwb1uTJDdkjv7zjPZxQmNio3d1EXHPWmKYWYDv67C12sYPlozZdOL7kJM/7uJ3+aF9cH+E8WKhWe6feEUNG+coQ64ReJ+TMlhNY3aTqZkaA9h3FhCU881DGjHyB35msyQpipQaOf6Xy2jarM/Ew33O19y+xKSa/jSatKtFomF+qJSFqKcRFSbVSRQy6KZlqc215Pl5t3IphEvKip1lpgKzE6qe/UGCb7Aj9P+KUeOppPa+wuz9GC3GcD1GpbnN/3HjehMepMnovy6LKVSClaZxV2sheJWrg5S2pqbReMAeOENIDEXOxktY8pJprFDPeu6qf4QLUNw7Hj5K0rvnxywbdePWa3PiAuA3Ft6e4biMIwJJprwY5J25UHif6Jx849clnjtnD7NY3cWH6gflIasaPPu9pEmusRIrjLjdoSzBTxohROd+5ZMorQJfl8i8N4RbFlDHqYa6osFlEEqhTIxfw2zSJm5um6Sj/+oOu1GQW31aLGDGRDdG0Hvdwe8sbimq9/9Tm/9OwrrHcN68u5Ijw3lmqtTutuk9dgx/T+o8uedVE31GShuYTmRudGd7+ivlHTzVJwAczOsr3Bbc6jy2hcyll20+cfY37O+r3VSjfg3VtHExJXrYKmIPiEoH5zZmfwszyxTNLMTdQzMWY03gy6RgwHuo90J0or6DYNzTbviZIPuF2ixO/YXgnVNtsXTAi21Si0cXXH3NNnmwMfp2JcvM4RIiw/3imiKkJ11SEhUL15wPqJZluaPmKGSABFJ0dg7TDHA5urGcx0opug468gUFOnrIrE0eBe1DRXuv4XlE06Q/vSThY3SqJPtFf6TBRBzXFUfcotzYQEXZdMjvwp3R3bxT1yW2xAYtz/KiKrH3L9qAjU/wH4XwMHd/7tUUrpOUBK6bmIPPy9vlFE/hrw1wDq+QnNlRZQJP2ghQtTCh1Gw+xkx+a+xdxq/lU/OOqXuYeaYwgkG1NKALMQ6mthGCr8oRp7VWutUItD693NJhl47dE1/T1HTPBvvv2bPKmu+bB/wC+/fI9n58cM0vD2u69orOfZ7SHbDw8xo4ZzhlY5Fycna2IS/vNPfpr7JyvO0iFuU09Ihx+MqmZMJFVJnWqXunGstw2+q1QZkDkyZtRNvl5H3DZvknlQAxpFMMbiN6ebT0h7HgYw+dKIQPGXMWk/SEULqSl6ISXlPtxZwCUqt+ryasF5d8T9C+2NRycMxzIZRsoPFujlHud2ij/Uwq050z5+CWJWCF+RgXY28Op3HpBORo6vdZNMJhcYuaCwua0lwNwNHDgNk1z5hsoGzU+8wx+TCGG0OaSZPZLVJ0pGYbT7VtNi1jMGy9rPEJcRi6ze2J1a1m8I42HC3xtoj3rOuqWGY0Yw24zmBFXoFUWYGbXYjE3SVk1VgkAVEpeQspeShpiST7x3N+j9iR+m3rxOuL2aJf+KFaxfLPmtqzm/xRvYKwdRCCcj7VHPn3jjY4boeLk94JPuiSqb2sBwJFQrmZSKdoBYq+FftQbzWQMx+54BHn22dgTTe+WLzVTpGGptYwwHOs4B6hvojg2hrmiu9BSjHEU+Z3SrqKquBX7h8DOom5H6dobxieZW783ukaV5pVLa4SRqUOxW+WS2i5jtoJMjoiftCuRo4OG9FS9nR7Czk/LIXjuVqWeOmVgdf36ZCHViWAhmVNuDUCkXYziqcpG5d1+XwStfQtLnYP+pRW5kXzzdjb+549+0z6k007xMTU13ZFhvWy5mi/yaIJWa45oc8l0Ok9Gqt086GalnI3x3wWylyCR1pPm0pr7Na+8u5dxJnRPRGs20i3FvM5G5ZEU6X95r4XJNf76LWNh8otp59bxq1RLFnvT0uxnVJnPjBJqjjp9//SmfrY55ujvhrqFw8TXStV4PPI0LfHxxwvdfPODn3/qUxnn6yvLGO9cYEh9fnNBvazU63RnsVg9PbVTeocbzkFMryJ5GGudjdxG38fhlleectpXMmOgf6+vMX8Hn4j2iMOUuCsgYpyIkVobxSJG7+npE/EixiUnZR0qd3CPVytK3DqqIuIQY/dU0I3GzYFwqV6q50vWzOzXUq0hoBV8FRbFLYT5mP6z8d7uVaZyoI7kqN2OVcJKm1I/Svv+cdx55f0kQZoYwOKqbHhkD/YM5obVqeTKW4jEf6PNhwYyJ5tzi1y3x0YBb61ySbCEQp3Vunwk6P+jZ9IZxoc8hOoPrdYFIDuxGUyKqjc69/tiqom5QM1FFwdIdQ+U8rQbdC2w2J/58vE85TaXPr7M/YAr7e10/tIASkb8MvEop/bqI/Lkf+oo/cKWU/ibwNwGWp28mCeDWOUS0T5iZDmyTlaLiRUMiOzXwi09G+ssZ7bjf3Ke23J32n0rDhXFjJy8UM8jnzBP35DG47Rr+5be/zTzrHP/Tl7/AemiYuZG6GRmHFh8NP3v6kr/y2q/xn53+PN/67AnhpiIsAJv4l9/8Nu+1L/nf/vpfoqo9rvKMB2l6UBLUDfVg1rNrl3T3E/V1PoEFi2u8xtH0RuFqL5gQJ3MvdY9NE7k21tknxBlCY7LpYMLWe7+gskjvTzQ/EAWT750ZQh7wC10cs9GhBN0MjUkcHu643h5osWPKyVF5POEgkG7MdDpXifh+MSGBXRtmH1iaK2071CtdpLpjm31HEl++f8G3PjxAzquc1p6mE6O2KZmee4zCamip5pe8Mbvik90pB67nejdj23qic1PrM3oDSbBeJhfeuy3d0EDoNSl8VnkqG9mYVru8OWOvOxW6h5FwPGLqwNfffMEfOvmEMVo+vLrH7ngkRSF2Vk9WtW4ARSZfbSBlm4BYyeRBpsVrmrg/U+GZMmJVWw1rzafmclNjk5/pvFJEpE7Tid4v8j1vAqGzxAcDJydr/qXXPsASWecexIPZmo+OPNJZ7NpqkYu2C5ubjJzlYnnzViAtArKztJd2ajeGVkn2sXGMy0RsI9Gaqc3lZ+AXSqTvj4X2Uud6rAw+eyCVcVPIpIgeLMLMMR5Y+pNE3FXIobbYxxmYIMxfJDZPhO2bgVQrxFCsK8DgT2bZZywPRKMn7deWN7xzdMlpvWUXKnah4oPre1z+zr3cStNKtL4BPw+kWs1eJRvqxBrCCM1l2I+lUsA2Fckr11BStgoxhtCoeETFIEk3W9FNqYwJE/X3sKgnRKO4nId5rX5N24oPLh9DHXFeD0CpzghBGc91RldIpM4Szmpmt0ooP339mu2v3ae5yoVryfUjUTgkfmHVFuV4oUaaoBs9WhQnsz+DlRgXSbq+UExTyetEZcG77HsFyUXEREKrXBp9bRgHLejfPTrn+dkRk30B+zlvxpQ5PRCicLjoOHt1yK9/+0vMTne09chRvWM1tNS1Z+ydchKzSMB2OgYQGBeytxnJB3E/F3anFjsYWiuKOFqZQp9dlzj4LDLOyrNTdMIvHOLNHpmT/ef3rVDf+NzKUyf+sKjwC4cd1Fg0tpaS22h3gtkZFT4VlDnHm4R5hCvLONcCubnRAmE4EG7fC7xxvOLlYgHsbVTKnjcJFpKuI+OyCDPUJNraOJnoJpvnXyNI2heJhReYBDaPK+RhNf0MIJt8ylRkY9XrrKxt/WnAdkY5ztktfRq307on0+DabWul8ZRQ6LhHotWSRsUB4Va7NbOLiNuGabzoH9jTAQr/qxxsc3GZROecIm9RRVch7g8D8Q7B/A+4fhQE6k8B/z0R+VeAFjgUkf8L8FJEnmT06Qnw6oe+UrpTLOWHoItoTp7PgbrdusHdGMbDxLIdSbcLQqMIjmYmZeQpc4WS0cnRXCXqFQwH2QNDPRY/x4EhAQZWzw/4j87+CFJF5KqiOVeItXviwSTm18Kz7z3g6bNTNj/V8EdPPuJsu+Dyo4d58UtcjguqWeAPv/Mx/+S33tP2o9W+MGi/1Ud1Wk1OvUUKkTl4IYwV9cHAGGvlx2SvDjNo6y5a7ZdPPHjR4qn4v6SIRkKUoiOh7R0reuoNaT9GzT7vSxL6utnQEsAUA7Gg7r0pCfeXG9q3RjbfeTShfcmm/aI9S8ilfM7EcApq3SZmZ/tTE+wLubJxhhbeml/x3bceMH62YDhiamXZDsTwueIvBWEz1ozJsg01b80u+efXb1A7j2Qu1j5LT8DveVSFuF2UX4XDg8CsUijtqpoTvJJ+xzoy3NMP1nxWYzz8jn3EO4tL/jvH3+T/3v+cLnKiWWdhGYmDYDvt/ycDobRt68Jb0XiiyUDO6Gbqet0gTMjPySoHqtzX4k2ji5RRdMLs7zUJfJt47d1z/uTDDwFY2p5KAh/u7vPJ+oQPXt7H7xzVfEQ2NkfWaLtEgqJNJV4jibb0Dj6w7B7p5ml3ev+ivaMGdZnLUinCGa1u5KqczDyLJPi5UN/Y3Kpk78Rs9q83PeNSONbqzeV2+r6aVVRC74mlexCVI7NyilSWHL2oyKL1QTd/G3XcmcT3Lh5gJHEy33Gzaxm8Vd6kucOPyKfVtAjYNhBax5jU6TnmIm39ep2RQkVG3dbAbe50FVVdq6q1kA0UJXNqCgolyj2exmDJ1oz13jgXILYWP4fXnlypyGVQkcPioGMTZ9nRXCb02oRcOOa/r39y4P6jWy4ulizXuh5299XB3W2F5lLH2DjXArFaw8l3BbeL2K1nXLgJfZjUJQAx0yMyuo+DFA2p8N0EmFWTGhOnIbtua6a2v0SItxUf9Q9ovuQ1DHZldH0JihDVNxlZ7BOhNqx2DV96cMHp21terZdcXy3ou4rf3DaMNw3VpWN2rePZ7TSSJVaqmpaYD4bZwNZ1Ooeam8KhgeHAUmUuzaQMtTr+Fi8DZowMx/W+SJnWZdkjklmNFhuD2wbCzDE8brOoQtusdhf3CE8sLTohjhn1C4KxmpuZFl5duYv34CIjpjNILvHZixMWt0J9m8dQpQItRZMSbqffq8KefNhqDanx6q2U18e7tJi7aDDo97pdpFonxgMtgJszjxkCw0lNd2xVpZr5m7FSgMR1iWplGO4Fqgu392DMnQBTWjVJ95V2PlJVnt3TY9yGKfRczbGFxTPJBXWiuQ3Y3d7MNNYmt+1yQkYWCRS+pnZASqteUaeUDXMj+32VbFgMZvK0+4OuH1pApZT+PeDf03Eifw74X6aU/kci8r8H/i3gb+Tf/7Mf+tNQnkGY7eE1XYgTMQrWQ6wjbC12EPzrPSGfAP1rA/6mor4uaeK6oGtLSJECMxrmLyOmzYTm3Ae+61qOoC6tVWTxPZ0M0ak7r6oIHGGmOWT1lcE+t/zS7U/zrZ8443bbZp6SEk1/4/x1fuP8db58dEF11OMvWupbM+WvJUnQW9adnv7dWjcXSSip1SbGlzPt+2fUqlpHqrW21AzaAhSX0Yo+YPswuVQX7oRbD+pUnTPxkjNYp2RMDCrFrqy29EqGVx9UnReiGmvuelJdQYw0taVbNXw43Gc273FFhTdtfAl3bSdeme2ZFIGTRX4U6pSoN4lqkwmVtwMlf0gzkxLfvH7Mm/euef+2wZzXU4+7PF/j9wVwGo060oaaPjp2oeaw3hERzuxSTVglF3SD0fs6KIlXW2faMjadFuJuowuZIWmqeDPSS2IwdVaGwPwTR32jG110c/6u/CTv/swZ/aZG1tpWdltDrDTsWPIpx2Z7hzDLG/RGW2Oabxe1B59bKcnkfx+itvUGJa2XXj2Q/12fvwweifN8n5mInz95/Iqvz59yHeb8yuV7fHB1j+tPj0mSML2h6oVYORYvDaFS7lJ9owtle6ny/jKnfKsn2vZMn1e9ylygCrzXhUzGqKomF5lywjJJX7OmmDIkkwO70v/32VhR33uaFDkakhtJ1qldRspOyNtEtQrZgBWWHyscPxztXaGVu6FzQUY9PSaXCG2ktXoPV58dsumPUMFD3ryCbmDtZcqnUX1uVe3vHPDyeK11w3VZ/ea2yvnQNliECYVKJLToBd24zBA+lxiA0XBoJZTrs50yLUv23LImVnBQ9xyebrh9tSTWiXuLrYZS1ypoKQhrsEKqAzQR+XLH2ye3XG1ntN9r6U8Sw/1Ae2/H0DvGiyarRvU+iVfkQYnpcNcFv5hQTkiLKRtu2vPxiirNR0zcb6ShBqn2nL3SutXYFc0qe//5Q8LG0fSQRFtQzVWivY758KVrzLCp+ZB73D9a83MPn/EPrr5C7C1V7WlOOvxK210mKDeve6CFsduAbXTtLaISt1UhSbWJ1FeDEsOdmVp45XLrwPZxhURDPcapOChZj2XzlpxH6bYet7P5IKD70OxFp/dGhPGwnl47OsFtA9Xa4XYqain7YTIRayPsLP0JIMLsTOfg9pFK+N2tJeT2pBnL2qzjulqFPG8N9a0qDM0dpJAgDLuKOhuOmkHnkoSEyxYGE/FezyHUFzuaV9ncMqOlteg4V4VfUvqQB5sPvofvO7prR3c/MX+h7cRqV2gpRVSj8TrOBZ4crPjw7YrhZkmsdcy5nf5/f4Jy4/KaqYa8qnw0Y9SiOBdPJiSk13GZrKrBC+fPddq6K8WS6cpGlsGEVZfNbEHSXdbS777MH/i/f/D1N4C/KCLfA/5i/vsX1xfXF9cX1xfXF9cX1xfX/99f/0JGmimlX0LVdqSULoC/8C/249QIbziSbFYWsaOedArZV6JC5uMikaIoe/8w0C4GDR1uzNSOS7kyrtZCmCX6e6rkkpzrZgdUEZAh7ZjKKSjx3pde8sH6dcwg+MPANoHZGepbNVmLFQwnkera0L40nI0PNQJglpn988SXjy745tlj/uvvvUvVZj+huFcKmFGwa0N/7CDoe2zO1RcozFQendqIO3fYTqYoj0I+lSGq9D+ZieulgcB6ooV8Chz83u1KRKvrggSFpEiTPrQ9AhX0pCu7HuqKz6mAYoLOwJWjCy33r7S6H2eluZy9VXK7x/ZpOgUnyvtUfyNQHyvb5RM62RepSuASHz+9jzh9T7OzpO2HAPVGVYhTwKUAo6EfHWfdksN6x6fdktfnN9z2LVUV6CykbBMgvZlMPWGPrEhM2J3JHi9RTdqCQ0TDdWMS+iphOs2ZKqosjNA/8BzMBv7jT38BeqtxELn1Y/ocDix7OFxN6fTU7ed6L1yXn3HK7cncRkzWYHdRzflKxlnUU27p2ZfnT+bQ3CXvSoRvnL3GN85e4/L8gOplje2ExUYRlOFYv9j02lYNjdBlfpXbKipUr7VV1VwH7Mywe2gZjpISUTMkXozwSq5WrBK2CVlpKFPETHKKevWn2so1Y6I7zRESBcEIRfmWT6NWWwfjXAjLiFzUasgZyBYCcPCZZ/vAsX69tBjI3Ia9R1pBnZFEahKzeuSrJ2f8pjxhu2lxlScGw7iqsTcKidhOX2+SN5PRtGqPmrks2Y55TI0Lg60kE2aDIrzFRFO0FZekhInrc03la0Ztm0/PNCWdt6PX+Z8SoVEO0fsvHhDWFTIqMTomYTnrWbuDTAC/wydrIvVi4HDR8fGHD3DXDpknzHtrZibRf6p5ntoa1fttRkVkzKhqRpPHnBkK+omi3jGbW2Z6gH5dnpu5vSwJUs6yjE7zP2eLnkU7cNZZJOhalwRSnTh4sGb1aom9tUrq7hXBVRFJ8cKTieg7vprx8tM5z+6dwGCgjvzi65/xpL3h14/fYjPUxAT9WLFdN6Stoz63arZ8kD3PDJMibzgwgHJh3DZgt16fEWRpfODg46hj6k4UVayNIrDO7BXkZCTOGqorVV3IbphUjanSNSPlqC2TKRf1OtFv1DqEUUiN7m2zemTbGcIssZ3rM6hWiWqV6O4J8fUOkjAcWcYDbXsqdUWmlrHt9uteGZdTC72z2W8x5TijeCeTcr++mKAcoeHeTM2HM+pWWn0SFPmNja6jRelod5FYQXOdkf8eZpdxL4rK65Z43Qv7ruKj4VTtGyqQkQn5LNSH0MCw1L2y6pMS9McARhiXFulzKzWkPY/YJ+q1bk52SJlneLdbomuqTngVUqjK8g63+Pe5frxZeJJbM0PmufRxMoMzXknUbm0YjwOYRP20VrLs6x39qzl2d8fCIDGx7usblfqPR+rEW1R6dgcTcSxoll1pG/75B9/lj/25j+ijY2l7vrl6wkW34OXtAZubFnNTcfLOFdYkzs8OMReVQolWXY3Dkecnly+ISfjHn3yVcV2BUcNMSYLZgXEqI01lVCYmb5L+PlQXDj/Pnj4ubyi5kJExqIGdZO5Q2BdNei9lv1jfadfdDRPWHykaUpkHS7JWJcaV1f2nzh4vtgL3+QET5lEXn6gqpEJmL345xZ18yjMqG2vSlkd/LGzmQn2TqFeC22n6u58JoY0kk6A3tN9vGBc5NTzL6G2XsF3QtlGBVncV603Li+qAja+pTOCfPn+L2gW8V15PUfEV1/TJIC4rM+yQqNY2K+AiMRk2Y0VMgrOBOqEGmeX7fSmAYPZwy3un53zjwzdBEqHNHLwkU46ckhR1rI8LmRyyJzFD5iJEmFpJmlFVuDBqJCmFqBv3RUF5rqmx0+JW2tOhSez6mt2qoXpe47Y6Vtdf9lRHPW8/vFSvrdWc9Tif3uNwnC0UokxFZn+szsKLZ1E5PnnBkagFYAkMlaRk5rr2jDWT83d0SVWHXnlMm9cyfJ7Hi+3296m8bpH+TB5RxUIkWz1U64DtAsNxxe6+3rd6xSRYmFzM8/wxgxbRaRlonecXDz/hz5x8l1o8IRluwpzfWr/OP/z+e/gIq9xaqW6MukKTifCDLu5hpptSZ5UjI0nbCRCxTg0vxUfsHRVedKWIzwt0fn8yZs15WcEj+sw1pXjyGBoPLONh1DbVhSM06ox9uZnTZN6eGZlI0tHpa42d4+LshPlTS2ih+bkrdl2F/3TB/HmWkXd77y0t5snzI+b3BpNiMI+/xJ01YIjqK0SZ9/sNWvmISqYOs8QbRysezVZs+5ru5lDnjOjz6XY1bjmSbu10GMFkvk7aF9oaMJ5IS4/sappPGsIs4Q8CL3cHPNsc8fLmgO5WA2cZheZcFbLAngszlsKf3IYrylcYDyzVKh9skn5dmDuICbf1iI+Eg3q6J5IgGpnWrGQlCwggLBu1Q2icin9yZmCoTPYo0veDEaptpF4JXS8qBBsNpsleWvcH7PutqppnWhyZEYaT/BqX9eR/5Hb6uUqLWULKBOr8bOz+uWCAMXufFUEB+udoZTq46u/5INyqIMJktaH4iICKW1JuY1cme6FNR3r6E2F2lqYWfyFnmxHskAOYMVin5s3++VxpOiHv44Peq/pax0G9TrSXXsfgEFT0UA5lKU1qZp1id9Sjec1SheW05OjcS2G/vs6baR25o2n4Pa8fe5hwiXiAMjEyVykBUYMAQ2toLgzteWL9NvidY/5ZRnb8fsMumyKooZYZdcMCrU5JIDvdQAH9u9cB//3tA/6N+78KwH9x9fN85/whw+ioKq9jvRfW25ZHxyv++Fc+4IOH93j58gh7vu9hf7i9z0HVMX99ze6jA0xSErzc5e0YNYukjlQv3JTW7bYGv4zYjcnxH+wXopRUiVUbPfFkxd30UE2O9Rgz96Sp9idYY/aS4+JnUSTJVvT/yxofgRwqS4g5kiEvOElwpzuGqmb8sNono+fCKbRKyqxu82SNeyQBdNINx4W4l59bKjJXtHjwQqoTzaWiiGTCYjFbm4jxsaAfgt85Vk1DPzoqF9h1FbPDkRiMnrZ2eULm+6yTNO3Rpy5SrfgccbkfHSkJtfO0lYoIypUEtm8F7EnPH339E96aXfEN3tSJ10QVKmzN5HZeTA6rdZbLZtVJCeE1xYGXHPg8xhzOy8QPUyQjjx9RwcB+EdCTkRYhe2POsIy09UiYG/y8wr/Rc3Ky5mtHV/hk2Yw1MRqq2tMfRCQI1UZJ4ONhorkUhkOhuQbfKmqovkxgw94gtWxEoVEOD3Vk0Q5ctCmPSyHOItSRKNA8dZNLuBZfGYXrSgVIXnz3RqzJKhocaz2MFNPd0Fq6E0u90vs6BYtnQjseNFJEuTN2Z/CzSEzCr1x+mddmt7w9O+dFf8Q6NNwMrYKyndu7CPj9ipkcpLJRVolxISxu0hRQWoq/WOn8tDGfbn1ERAgzhwlRx7pB1WG5iMbJ9LxLYa1zNRdVRv204sKzOOrY3VRIJrv32TepbHDSM617RCHtHNXKsHscaV9f87UHL/nVf/rVKaUhRiZEUxFkSCbHCgWbuXmKIBW+T9nwVSFMNmVkmqNqPqm8OKwWgX6myqmjejf5XhVOY7I652M/R97aECotqslzPWbxjxmU9yPRwmA4frwinGxZ38ywzxvcyvLB+48hwuy54+hGDyTKpVJen1/ukcVCJlbVsM7R6nbEeI1jSXb/XLRISGwfV7jOUd94/MxqkUkec1LGQR7/tT6/8Uj9UCSB7dRbiijquZc3duWypklp6LaCFzTw2wVCFGaLnmHeUK11bkYHw7E+r9hZZmeGaq0HFDskxrlQbcK+SChIb1AVbOH16YHc5AONEr6n+Wg0kLkY0pr8zKtb5eba/s5BPiWw6tpvO6/3LwnFd2t+HomVZTgSDj6JU0GXciSVGcA6iEnV94+O17x6TdiJ2nbYnVDfiPoGLnUPGK+FcV7rYXhb4dZBC9ycdKCIWj6MFqA6rytF3Sx37j+uOP+TA8LDxFn7YdePvYAaDtlLI8uGkVGLopJyK5UTF7THXTv1pPFpMrMrJMZSXbud/lt/IpP1fYHkSxZbvGNW+N989g6/dfGElISr75xSX6vabXMSsaMwfyZ0fsGzesH5lxb8q1/+bb5/cJ9fH7+E2Vjwwq98/C7WRu4fbHj6sCadNwqN50WgyP5jZ8HnwdrnB4uesv2DkepVpShUfXfxzjCyU3Kh6RMxRyf8ritG3Tgs2ibLmPJEWp3SpfPNnu550sLJ5RPE6EltPS2S466akA87QiiTX5hUDimrAFUVqehDgfHtTiXszU1QJGHMSjIHBUKRNhDaanq9WIwas9FlrLOa0O0lw86qQ/317ZzFvOfydq4oUL+PBIk1Sk68Q7ItSIrNcRrlNB2iwYcEOKzRCi4ZRSC6J4HDxyuOZh1/5PBjfqJ5xt+59zWub+eEUdGKYBzSG1UnzfJptFIzV5dbaCXmxfbgG4Mt2VUp+ydllU6qDVGYWiXIvrBKnmmDNYPe31JomsXIn3zyEQAvXjvgtdkNlQS+cfUGZ+sFq4sFjAZMwgbdeUOtPjHFPT8ZGA6FaqV/709gONS2R3Odc9TyKVLVhRZxkUU9cF7ldoYHDkba2Uh329A9CtQXWvD1pylv2IKxZMdy0VZGJp+SFLlLRpVi2sJLjIe6VDU3Orm6e4ZxAfVaK3szhuxvY7DrMau3hNAbQhK+d/GAb9y+SQpGWz95DtiNpdlqSwC0ndm9pcpZXWOEJOrlZVJidz9bT+TPMewS87O8FjnR1vgoGBHSmFtzcme+xagIYm4xaDwKikrB9G8lqkfqyMGso3tQEy9qYm8QE0lJbTFCLbgyFx3YhcfYgJ87/vJP/Rbvzs74fz7/WUVYFmkqOP1cCxgJimxMiEHedGMu4ovcvxDi76JNkNGJvA6QQO4c1vxciLPAsuqxkvA+r2mSUcqMCA3XrSJPudCmFGzkfSLHAslo2OxqHh6veefkit++eRu70162PfJ0o2E4LCrRlA9CqnyN2dA2WfU8E68oyfo1y6xutJDa+DttyrwnDYHm2kyImNsFLbYqM33mInVOOUIoZpQpiTqX201Pqiyxcbhd0MPAstL1LltEFO84zUlU5eiiHrlZzQmnnjCzqvDtFcmN9xOnj265Xp9iO1XT1VmkMV3lfqa8d9bska/EZHvgOlUiFtSGu8/XCsnr/TMpF7Q+Epa1qnBzhI0W0HZq9RITsTbs7inJuz/JiRQbJXyH5i51R1FeP1ieXxxxcrThbN5qO9ObSZk7HqZsUaIHtfpGRR0AqTJa1GcV6GSUnFV50emhzw4KTkzeTykRnXqgJZvtF3LGIftt6ve9fuwFlLpAkw34rPrc5PaP9sWZuCT9sZBaT/WqmloVUtxLyZNWFF6PuV1nO+iPE/FWpuLKjlpohdroojqqVcL4/gFuKxyc69doPpfJgZNQrYTZBsLZId9+8Jh3Fhd863THjhl2OeJHi38+59PDlgePbrhIYD6dTYNWizVdYKTXvm5zk0iSuG2hPreMR2p+F53BbXIuWCxyTN1gJnjYCuT+sRnCVCGb3Uiq1H9GzcHQyI+QmOJdSk5V0niJ4l4u2440b/e9X/LiacBeVtn6Xk8oaZFln40qCiXooq3IYFZWZPjfhaQ2+1u1ZVCllE4cdXnXe9TMR9ZfqmjOs7Q+o0ahyXlVLvvQZPmzaQNGEj5Y6saz3rSEwZLCXWSzvCem7wMyYpe9QXJBZ4FhsFCDjwZnIphEOhkJRtHR21dLVvWcfzD/Ctvjmp9/8JRfuv2K3otBIfvqxhDmKRdNuTBrclhtLhDMwHTyNR71m6nM/n26zC/JGWKFq1AWe33eGjZsQposCMrnu1eveau+4HfsE759+5hXmyVnnx1DFExnpvtTrYTxQKNa3FNVvI1LdWVOTt9ztUkTP6FA6IpGFE6PciOSt8Ry4rRaw9ftyHLWKyg6s/hT6FeVFh0rVSiFRtWRRXZMSpMKMVlITcRcuwk5cNsyfgyrtw3DccL0paWhi3K0eyTb9jFbPoCRxNGsY32+QDozbXi2M5oj1kN1y1QcMqi5qS7EqlLzbcTdWuKhttGi1YMQSaZNPjQWV9msQFNfmWJLgTOQW3exrTDdOKEWWEGGpDl6oPOWXBQF4dX5IXGwah8W9PBlTJq4gZMKr06cHG14sFjzR04/5i8cfJPv9Y95cXOgsTZtZPFwQ2UDq/WM8bJWS497PXXr6dY18X3NHvS9SuOBvVlnOXSITJlu8Q6SZnwk3DHcjBVQRWZ2pDFenbVh2thDq/fWbkxulTLxVYv6TttG+R5XgfjpgufP5rx41OefkTh+vOLN42vm7yg06CTSBceLzSGrrmH19FDnU5vwCx3fZtD1Qr2htE1tvGV+5qcDcEpCmDuqlc+8xEhYVMQ7ZpqKMKJrfNxnPZox4bZabPmjGX6uHYVq5RWp8xEp/n6VcrzsIMRO5773huN2x9NwrIczk+gepIygCfXDLV+794p/vJoz3s5y7I2+Tqy0QCS/l+kAZsuBV8fR/jOgJrSDItOx+JbpckVoDH5hCZWqFs1Y0Z26qVgzHtqLcTqQSlD+XrJ3orzW0B8pj7Ha7NesYvlCEoKAv605u62pzzViqdhRjIdCc7UPAm6uoyp0N+XgocWd7TzjYT0pIMmHi3GZi8DR6OeL+2zA5AyyS9kKISK934MSP+T6sRdQdpcRCPb+HCDTyag/idTXmj3Un2pVZTyMB1rhV5tCANR8rOiLu7dkIhy5xZB7p+wfVomDEA/tsicO9d61NWoRVa3Vq8kvmE4hzUXit3/rbT5+4wTnAmbuaVvlIGyaCndecW4PSN5MpOXCBZKk5obp2k2nXOPBdMJ4qi089T7R/4tWEGemEMXJPbac+JO2MkhMZppmjLpA1BbJDq7aa68RH4n5xDsVEDbzqlLCGkOcV1OLMBlhPKxIbSBGS32tJ6RxLlMxEJtEmGeYv9qjbfr++Vy7p/gFBWMwgyXWOQstS6f7bcXJ21dcumPac5fjd3RhjpVKoX1rpmfYtCODt4yj5WjZsXm1wB4OhLV+7vJeJNzh09i84VkhGkV81JE3EyGjwediW+9/5OBox3bbYL4/yw7yid+wb/Kt2WN+8bXP8J3LBqhGk9NHIQZtQZdxVwz7CvoQ3R1jV/K9rDICZfXP0QrU+89dyNOhyZtUNpTV+5NfR7SQ+ztPv0ZtA88vjghnrWZF3ewLy5SJxrNXGlC8a4XhONGei/J9PJityr/HA3BrJfa7TudaKaImI7wIlMicsPcDq6rAYduxqAc+/ugBZu6RNmBfNtTXGSHI1gDAtHFKjqsoXl3DvcDslc0HEf2/6y9XjMtEdZv9etweXSY/03DUTgs4gJXEn3j4Ib/mPNe7lspqW+/6ds74fKZz3d7xKPKCHx2iZu46tmcBbxOzz6rsai1TKsI4F1wOPw/LRsdWdYdkbAWMQ0aTn3mGi3I7nkaLLtPZqf2eRJESTMJ93E4+YhLVZZ9aczdDNuwNLfiDxM/ef8bPH3zKz80+5r+8+Xl+6/o1YjTce/uKnzh9xTvzC172h/zWxRM28xpnIk8Ob9n5iotqzva0wgzqTu86HWfVLsvDQznU6aCLGYlTfx+bTRlFrTaionYI3Ks2fH3+lO+ePOR7swNdt9sIhyPtYmB3NUMuleMld1qoMc+LUKs4oTocGJNQXVvct2b4A+UhrjYtT80hX7v3ii44hmQ53y159uqYNNwR4OS0C0V+9BBQrfbdC51TkmM+9D34mWF332nhMOielUy2qMgokx3yuip6YEgWukOHW1igzvehnPrd3sk+6c8MtUyIkB0gDhCzfc/x8YbVpznf1WQT24PIYTPy4e0p8arBdvngORW6TDYDmiGpY05D6fMaN2axVOZnxsoAYWq9Tf6CKDdsWJqcEWoxPrt/Z+uEUCmSFhP5/hiGA5vbl4VnCfOtPstx4aa9TVv5QhrAm8T80Zr1q4WOK7ffL/0MTKYBiM/JAMdCf9TgdonZ2agWE4MW9+LS52xExqUeyP1cjTjV9sHsQQqaPE+FeLrIiPZ+3/n9rh9rAVViVchQZVFvaNiqEOpEOvCM0eV+st5kv9BsJ4Iw/9hNbY3iQ9SfKheoucp27V32/FihC1apwjMSgMBXH5zxW8sloVY/Gb9UrkLx1EgGxsNIaA2zFxpHskqHpKXH1pGmGnmw2HDd9rz89AR7pp4tfsbE4wm1knud0aRtt9X3gssoy8ITDhLmrNYTpwW/tLkYygt0jNNGoCnflnFpp8+kcPhsgiR9a/Yk6lKIOEWRzJCmZHh9QTAHGk0Rs6mY+IifG0w9UB/27OyM6tZpHhJ5EQ96v9zGYgsXJUjeoEVjmHJrAASy+3QSmbyikkkIglzWbGYNZjki3lJy63QC7Z8xuejy3pCimaJmquMOPzikCSRbTd9LbhMYf2cRnpkJNQutIMlid3Ei+YvoyV6MGpD625qDq1w4AnHn6M8b/hlvaDDvNrfTOoOfJ9xmz0nqTzO371YXa4050XZCyGZ/BSEd5zlGZdwvRCH71owLDYlVtMGoW3SlXBY/zy21/DxfvToi7Szu1tJs9g7ssYb+YVBHaG9wGy1i7c7gT9S92XZCf6z+O0lgOE1sXtfP1FwbDRpG75+fa+aaXzhwkU1fQ9gbbDoTaaxnFItdeMKqUiTrjpJGop4KbadRDSlLOCfPqI3BHwaGQz1AbB9VRJvJ/GdZmVczubUPRw4TFGUG6A8Nfh6VjC6JP7X8Ln/+4Nscmy3XcU6XKv759i3+dvWLhGAYnlhiENImry8JdfvOGxJJkDbQPTKYnf4MVTjphlqvDUlSPv3mMTi3+89ktVACPfCE1qlBZd6oTWUwrf7sgkyNC7B1nDbFaVx3js4oSufn6nc2HkTiIuBMYBtr/tarP80vf/AevnM8enzNv/3Of8Pc9PyHz/8oz28PWW9aYhRcFfigv0d/00IQ6i4jQXeK9mE0yGyvxLx7CJgUmcWI10MyemArCMU21oQkLKtelcdWRQbGqMhgeX/DZqtRWcmod9qwzJEkQTfqcQl147n39pqX8yPk4ybHpwg8b7k6b/hHT49VeDDqWFve6NjfPVZSeMnDTFbHjp9rLIgG0KqjdazNNG8kqn9Sf2Tpjq0esDdq3hhaQxr3RsQlImU4UEQ55MOQCUn9le4Sx8kdhXyAiplLZQdFmlWlmDjbLjiedVwd6UG7udR1oj/VovHmZk773KoJbtx/Nola+E1m1VZIM5mKJ3XpZ///TnTNH3Xv0USI4kunnZno7njWhUS92vtKjQuDn5nP7TcFPW+u9eCzeSMye6kEcz8Tqk2mHgzo4cgJvre0h57lW5e8MKcqArGaLdjfC8jpQOwtsrFUK33O1S001+p3lYwwHNW6tgeZkOZJQJUjm5IRbTFagdzh8XOL7SOxzuDFHZrXH3T92FV4ScCVdPc7mV8pU1xMHYgLIXiH2yic2D8MvPull7y8PcC/OkTiHrEqCgK51+P7luZST6aFAGs8k8NwqdCTgZ84fMnJn9xyM7TEPNM3vubVasntxQJz65i/sUbeTFw/WGK3BhmF6lmNXySuRoMReLRccfrVLd/+nTeQQTdShVKZWhG+d7nizyaFjZ747csa/2AktmnaCAr8a8Y7Et68QJl8MpWAunQHbVskB5JVTXbQQex2KkUOjfag1fQuETETl8WMSi40fdDediYEul0kBcPBvEMeJuKHh5rnlhE5GYUwj5MRYzG+1D5+XjgzSVUtCzJS5UC6qCGhOb5GEsQPlqRFxIwwv1DTzRLiane6UodGB4/vKlw7UrmkET8PBsbrFlycFHcKzTPx6mzO4NNTXsLmhSA6wQqE3iKSsC4rX4Kqg+zG5JMphEVE6oi5coTvHCBLddn1bcJm7k59S+bfJfpT5Zz4+Z4XJiUvL09sya0mP9tzvQrBciLvQj7lpmnTmrh8sn8dsWr7UYJMwywRmsTy3RvePL7mj59+yI2f8e3bx3xTlAQvXpA6MDyE2acVw1GauAUghEaDeCUaqtvcLsuqxMLbYlBridJ6NR4kf8D1UPPek1dcHC24vF4wxBozCM2FTnbfZqQ4qzpDY3RzqoEI7QtdnkrwsPFw8GmkPxJ2D4TQJtrLOyij6LwoOZKpSvlwBv94/R4n1YbXqmtejkdsY82tbzEm0W0cxiVSb5HBgE3EweJym8dbpoic6l7HuK10LJ1XE2/zrsIn5cNNrGSax1IODuUgx16er0VCnMz99mIRiEHoHwZoArJSQj4RwkaL1zBX8nGsABe5Gub8av823/j0DeJVAzbxZ5+8zzY2/PLVV/n2R0+gt9ry6Axjk8AL9a1u4tVaNz/bldaKziXj9/xRUDPEkC0cbCjiD8FlU8JSiJDg2e6IIb7Hh9en2F3e2IHYGUbfkN7agFPSv7nTkraDWpmEVmHlYbB85ckZXzk+45PHp3z8/kNMb9T01oC7KXFDijD4maLlcR4AO63BWqTqHNk9EIqNSHM1KJpfq/oToLruORgj41LR8frW6zp6Wk0igpJfmdivv7OLsP8MxWTSZlf/MarLe5m/Gc11u/1n74Nh3alYhoXHW0t77qhWAIbwWsS5gF80mi8YdJ1LWd0enc0c4T1qbAewJIZR7QtsL1nJnjLPVIvnIhAQv3dWT0YzWiflrNfPPEVNociQQfco1+v46O7rs6ivjZLAfTatznYQdtT9OVYJdpbzz45pTneYtdX3t1Nndbc1jHU1HTjEK7+0vUwTx3nKv4v7z1xQfjXwzjyzOxyvQuI3ISJDBCNIPoSpWODzxqo/eP3wJt8X1xfXF9cX1xfXF9cX1xfXF9fnrh87ByrMlCxXcnGSMCVCS0CJhrl3Wq2F/h4cP7nFSVTI02Veyb6IxG2FYedIi6i915h7ydWe0Gh7RTQK4e/j7Sl/8fRbnPsDvrl+wgc39xmCVbmtURRpGCwHi47XvnTOy8tD4m2NuXbUoxBXNRe3J1zdn/Nn332f87euOL84gFvtdYU6t3AkkYJQ7bKSJHuaxFnEDJbqZZUDPvPn7zUrKTRmgnxLTEUhxqkPh2AGjRZIOTDYhJRPjoLd6UnQbjMaZkRJkK2bjNHMELG7UdV6Uclz8XCmCpOd5fJ6CZI42Or79gvlxpA0RuBzhpGZR2Cjoltjhoy7h4oU1Cthdl6ko/rMxGtba/GpmsEZn3DrsFcgirY7bAeImuFFL8zakWF02MOB7dlCn9fK6dflk5OSUpUHYIsx36gSXDu4CZ42PiK7Gi8JY6OSh4PgR4tJin6MR5HqwY669qx3ltlnjuhUDVP4pG6rz9eMOsbq633iu8RsbZH2z1jR0DQpM8e5no7dxhMaSzLZ7DPHRah6U3/WXdVjysHEpg4IEGzEz4XT+yvePrriz9z7HpUErrzKgls7KipjINkEndXwzjpNPmfJKemz2igXUedP4Yhl0umQZeuDEl41Wy+fAvO8tJLofMWj5YqY4HJdwbhvLZZYDck8P5KOc4mKHNhr9RAzI7TXuZ3QCps3hOGex91Y9Z4KMvH7jI/YLuB2ue9tEo31/P0XX+HydoEf7NSyNTYRVhXVlbba6p2elrt7iWCUZ6X8i0TdjHRB8BetoigJVSYOey8hMybsLiC5DXBXoVUyHZUjo+9TM7myAKPfK3+SFc2V85A6S3tvR0rQ7zLB0KVMzgKszjectp7XY8N6aAirCnM08MaDa96or/iPP/tFnl8cUbwPZNQTfswxUibs2882ZFQgc5j8XKjW+xbr3c/1uSsjO5LSXsiV4Nn6iLPdkuvrBa5X7iQJBlTQsDtop3EgAUyes6aMuSqrYruK710/4MtHF/zZR9/j//zZfVIQ3P2OuvF0u5pkg+aRGrX1MAKya+hsg7l1hFYmWw0EhlNFWMelAC3thVc0KQ+fMFe0sVp55ZzmD1aimMp4TTnc0IwpG1nqOl5IzaVN57ZBSeh3UGiJTLYNJbcu9hYWcHMzR64qRFQR6za6pr12esOiGvjmqqG6rXHsEUMJaXp/Ze0AEKdjrVqoSXBR/pX5rPvIHukua48ZEvaOSpPInp86k4y4ecyokWzaVgCZGfoTRYXmzzNPbybTuleyQElq84FB7UveX7K40LlVrRUB61aG2XO3V8B2ijzZIaO2gO3DRGQvLW8TEt4ZNUO+c4+KMeq0vm593hMDbgNYwa4HJLW/x0DfXz/2Amo80IfpNkwftvTWlXsgSGfV6bqDMIssmoEPXt4njIYqkBUTWfUjSkx3Fw5/HOgeRtxOcC/3m0ESHZyxlins8p99+gbP1kes+5qrZ0fU53aSrpsA1a0wfrLgxs65edLRtiODJGJWi9lOsJ0lXcz5FfsuP/H4FSEKu++0+SHLvgBqAsUTpIRUmp3BPxow1xVuvYdBy8SFPTQMad/2yZC/ydlTKgHNrQCf9nLocm+7vUHYZGdQzMZiwtzu1MYgRKQfkLHORFYhnjeYQahXavTYAXIPVZ2tZCKJG59VbaXlGHSidKfC7ksD0lsk2rywKFlXcksgWahXaVpIJOlipO0N/VnJZalvFCUHmsjQO44Ot9y8bInHIxLtZOAZi8cN+fkXtciobU0S2fU7YXYeu2nxxhJqLdJJYF0kVLl98fqWP/TGp2x9zXe8pe8WpDoxsM+uMqPgj1I2V8xeVONeyBBqpqy5kMUThdzoW+W8ldRzkzO3irRf4p2Wq1e4WQvr/WLRtiM/9fAFTiJP2hveaS9ozMjT4YRn3TG/9uJNfDRq97C1mrdlE9WtVTWUheZMDyj9iRb/zbXK+mHfBgdVYNa3OX9uFMJoqXrZk1hFeX/3Zxt+8+PXcbUnRoO70dZB2YRdB/Um7e1JMvRe2rC7x5H2opihKuR/865lPAqYXVGtpmkNQbQAv/ucsZpzCDDcNpi1FowI0AtuVNVkfcO0IQ6HKk6pNsrRMPcE7w3tfKC7rrTwDMpRC7XOrWqtRHI1Jc1tudwKUbPPXDAHk61YRMnHuc2QRBDS5NavTt9gNpZOWl3zejO1aLEJWduJ/pBswlZKjg9JMHPPn3nvfX7x4BN+e/MaT18dZ3+eBMHgdtoekTpNhrEmOz+X8O2igg3ZfqO0jSdVa26TJCPEvEaVNmwR+WBgNzqGYIk7lw2Ty1oGqQK7thO3qvBibK+vHWvJHEF97y8+O+XV+SGfPTpWrmIdeXiy4rjdMXcDr81upiDtdWi4HBb85vlrDJ0jVTZbNqR9hmAu6KOD7sRgvKPa7MnUw7GjP7C4LlJtVUUcswhHIph0JyUAJpXvcGCpgdDUDEd2AggmE8ncwiLb8UgSomFfEAShrTw709C8MpkfmxgP1f3/oO7Z+UqtSYTsRq9zMDRmev2U1YFEzVYVr5/DeKucqOrO9457F+/P8e5QoviwzF6MY1KLitzKNwHmL/PaVGwgshjCjNA/CHSD1VzMpN9TyZ37lXlbVJE09/BZMxXUhboQZgkTBHIua6xgyEIjSbB8lltxQ5wsJIqJZqiE8VAPRKVwJn+O4luGj1CrnY/xUekQ/22ECf+3fSWrUlK4q5ICTCIGJZLZbBAYa0iLwPV2Bp/OoE1TvIaSFZVsZ7yGlnZi8YcBb4Rk7OR2bIJuEJM5W/YeufjNJcbDSTbmixXs7htikxeMK+UGhKdzVu95mvs7hqNIfZklxRHsDvw3l/xOJmS6nXJghgPBL/NNrkLmFShxrRQdMQrysGdoKsxgMEHoDzPRtC+2AJkDkBR52jtS5998xO4CsbaY3k8GYH5Z60RAC5ASPEwe4JO/ReUoBpv6gFTOWU6T9bUu9MbrAC6KiELKhDsLn9eJlSQToI8S0lmacyWbF7ms3Vls9iQJs5jdbbU4DIXEGdNkiOdzyCYm4dqRwTtiZ7mVOfKog9uaMIs5nDUTRGcpn3CEUAm2ksxVkWkRKAG9pgdTG8LOkhoQlzg+3HIZhfg48Fe//o/4l+bf5Tf7Nzms3+Mf+y9hXSQ9gHFwyqMxrT6rRmXrw6EWOaVQoY7gDe7a0p8I6Ua9oEItDAcafqrcOIfrQr7fhbeX8HONerF90MDofc2t80ESXz94zpP6GoDv7R7xdHfMP3v6BmPvSLe12lu4RHuj709OBuTSqgVD5uHVt4nuIQyHnv5JwmxUiWl7RfEkKtqmkQj5tBxlMiiUCGNWEJl8lPUv51qIFwWeYXJOdjsVgJi09xqyvdpC9O/27O431CvY4lSZe5Jozi3VbQZjdjmc1OvCX190xNblYkhN/Xw0PJqveVkfgVhFb1zeyEYmlDdWgssy8WKIa0ZFdYfrFo56Dt661SIlGLp1QxoM6XmlB77RINHqGC8KIMm8p3JwGfQGlAOPfk3mTMmdBPgYdb7shMVn1eRlA0BvwCWqW7NH1fOScFh3HDc7fvr0OX/i8PtsYsN//fRL6n8FyMap5L0vSiQ90JWCP1lgYFo7/TybPObTfuFxTRLx/Oe7vMVkNSSXfNqvnbrBnzcBiYroRAexVo4dKB9rGs+lGMleaDF3K1wdGAeLed7y9OkT0nEAm3h5echVM+PNk2ucRIbasfY137+5z7prCNEQt04PHJImFTBBaM80aNaMTGaSKVuxGK/rdX8sjKPJ8Vx3DqgZjZKk90KqlIsYJZGPcyWUV5tI8c8q0nmTY7qiFUyd1bdVRv7K85TEYtHhmamit9eD2e5J5LsvH9Bfzlh85DRYeKtzoHDyMHd85ETnV8pK1pLUkZq9P12s9Jnt/Y/2HKLipadcTZ1XJYB4D6UJJgQt0n1iXOq+MnslJLEMT0ba8xq32RPNkcyNleyeno1Du7cSXWwVGNkKycPwwBPfHYlBiKtKDxNeRV9uq/fbNEbXpFBsX/ZvT585SE5SkGz8mpzORWx+nrLv6KTK/n+XCg/UgRty8TEqoTqZvXxVTFZpOM2bq+YDm7M5y3PBL2Q61ZshTSn0EsmeJUIfLX4Z71Te+8VRJ3v2mTJJHakz4lBiNNxWvSUnIzWf1JTzfcdmWGAe9PRVhQz5FNnpQ5b353RHkaNOC6jocgFlE2PvqCaSrSbd+8OAO6vwp4LMAzGrJUIN49zQjGEixCWjp1PTe2KjjyxWOUspRc3CM7liTlooqb1B9p2pLCZ6sNmbxlo9kfhInGUX8wiTY7lBVVFOvVr0weT7NAjSJvwyYe5siBpjEmFu8oakv9qPLM1NljSnfZyK7bNS8nBk+6SlvtGCzM9Nfp5qalc8oEB/hs2ZSVJH0quGxZduWF2rn00y5HZfLr5twq1zPluvKpkUdcMtieLJirYvRpCtKrFkFvjZ+8/gPrwxu+KBW3ERFxzbDX/6+H2ePzzi5WrJohlY7VpCMPh5wF06vV8JjTJp4rRR6wdgUlsmi6pRI5P/UKzU92ZayFJp12pLt1hPFAhcAQV9/d2u5pfP3uNeu+H7V/e4fHqM2RrqG0OTUQXNdEu051kNWgXGo8jsuaKqfq7FS3OpPY7x8UBcgh+Kd4wu8NUm5cgNjV8KfWmb6/vaRcMQLGf9gicPrzmrDhivGvxgJ/8dM+ZFvBxqhojrgroliy6Kw2WFXxbDU93UZi/27YbhSG0WCuIxRSHB5J/GYOh8xVcPX3H5eM7Fck5TeWoXuN22+PcPiCnR3c9ijChKaM1tnliLbraSGNc1Rwc7ulGL5hQF2VlKJIr+YDB9wPZB/dTySdgMAQkRBpBa53BBk/fWBWhxDEiMFIPhaqWFuXG64ZneECVq2zgn1oOi904ir8+ueau55B9ef5XvXD9kfT3D1roru4dbrI1025rhulbk6mjEusD6usHdOFWKtrkDkEURZY7bLh8IyAVGJsoDe08eAbsbIc0AmFcjP3H0ituuYV03UExAF5HqqAdJxKdz1F9KX8L2Snu4O9ZdFUiLkbjSoj7WSiA3L2Z4Zny/OeR9k1Tt2hmqW92ghy91mJ36CtmdIJUWb8lmwU/U4qm98pOKc4qUikmpB059y+wuYCbpO1NLT2K+F9lwXAnX6lPkNl4LGidTlyDWOmckt82MU9R3Gkc2selq2npk/TDiNsLiqf6Xnxn8sKC9NZp/l8jGsTqWXBfwrdWfV/aPEDMlQYuHkuiRTKaNjDqPSmxWMQUte6PbJWYxagxMgtllmIrAYWnz2M0k7JiQaCfX7/lzYTyxjEtFnU3Q91EK6SJAkM6wvZgjbcAMSnC3paVooKo9KQmDQLqpMb0ix7Znn5voDFL8nWTffdD9Jiulc9rH5FuVCfFm8MTa6fvvR034+CHXj72AEq+bcJn1ChVmH5kIy6Mdq60j1IbhOFIbdSLXnDT9NrdNE8Snp3OhuVCTylgLYSYTQvO5n13aRAHMzJNcRbDgur0fhvJT1Fsqyf7rm6uEJMNtWyFtQJaR2XwgJdhezmmeVbhNhlob2XvKVJGUTz+xTtiyGC5GwijYlUpHTdyjQqEq7RyrZmxFlZcUcTK2vF8htBbT7ft+KS/OptOQxVQ7SmYeGdK0vR5zZAxqvlls60UyzwrlySw8w9wwfFJ9PijXgz+M2N5S36T9JE0J0yvaZ3tt87WX+VSUWx36JnNxGsBUkf6rO8ZnbTZ3M3rqHxRx0ZNtmjay4NWJeXnYs9o6VpcLaCL2osr9/AQ58kLCnXZB2KNv1TqoR09lwBhtA0ku4sWQqsRPLp9Piq3/4uxnGaJj6XoetiveWFzz8atTuq4iBkvYKd9F2xyKXLiVIW3NxCvZK+80VHNC8FLKfkb7AWvGQEx64pWQsFuPzcaMev9K66S0zCB0jg8+eMQHUWhfOBYlH2u750vp4qd8lvpG2LyakapEaGD5WWL7UFuKGliKIiwuF1U3YZpvbmdw24jbeKrbWovefJCxvdoGjNGy7WtO5jsen97yyi7x2wXFebjck1AJdae8JdMHQi6YQwOLz5SDZUawuzRlfG1eU25FaBLLT8uz1jHojxoI2j5KTURG4WbXYiTx5x9/F0OiNSNGIh/v7vNfnv+czsHTkA8IVs1+833RwjZhD0ZC5zj/7BjT6ZhprtWI0/jsTxMS1dbn2I6I3XpK27y0A5IItqBMeT7ICDJ4kjHI6JFRDzulsA6trkelkEgZWSUxBa6arSE4y2U/x0jiw809fuOjNxV5rCL3H93w2vKGP3H6AZbEN9ev8cHqHkYS7x2eAfDR6h7vP3uA7y3DToNvVQlaigVhdh4pEnc1L8ztymyUW8xD9+m6sKgGTqsNs8qzqiCQeVt5ThwsOq7a2fT3WFSXhVvm8xySxOHBlqvBMiY1VzadUYWgh/aCqSXktjpWx6XQe51L1VomxFGibqj9vb3zuOtVfTe11pLOtcWLYYr9SK4UCPn3oaBuZjIT9o1QrXIgs09aPAlEa7A+x6xkfubEJ0vkQ13ulLiI95bbviKdDgyHhvqm1qKig/Ek0p9GxkNozpWD2I5J0yJy6y1mB3e1KIjqgj6KcrkyFUaSHkjcLmoBVGXla27JlkJHUqJaJ7VMSHuFsKr0fu9WV6iF3WM9xM6eWmIDw5Ewu8hzIbc79b1CqqPu9Z3j8CO9l+1VUG+uG0t4eQAJqkwvqNYJt41T69L0uYAriGBGQ1NW8AbHFHk0BY+Dql9jpsQY3RclJBj8vlPz+1w/UgElIsfAfwD8dH48/xPgO8DfBt4BPgL+Skrp6oe9VqyU/wS6eCpyUfgP8MbRDd/dtPReMPd6gre4nZ5MTDbuutub1T51pLk0uE2aNixdDJnMytxOb7RvNfX76GjL9qRFoiJF67fyDdlI9mtSYzrj1ZFXIlS3icNvO4YTx3AUGV8LPDha88df/5i/33yVtHXsvNVNctBTjqsCPmhUSfG9iU5Jggdv3rLdtJhn2v7RMFaNHBgHk08F2fQyDwa7GykS9OT0FBZmlUL/IoRWDfrsbtTiwO3bAtIHcGbPkwLMblQH5LrSU8esyuhEZLbssTYyLk4Y5xlezi7tqY4Mh8LsZTaQq4SQbfw1ZFT9ScozK5L9iSSc/ajCdc2Dt6+4sonwag6o+6yfGUxelM0IHgsi+N5RzwcqGzh9/ZrVN+/hHw/Tzyj+SWU87d1y8/vySYvOIZCiThzloSgqKV5gZ3k1HLINDf/5Jz/N5fMj8NrWSU3k6z/xGeNWDVTjPGrL2StJsrq9I5VOxSdIN+NYp/2YzAakE78DPg8Xi2RLCL2/blc8DdiTje+IDwBkNNiVmcj0RcKbmhxEOkuEFmbnOjfmTy3d/UiYZQHAoO+rO1UTRUV39HWKM7TkNoHbeEzvNZOvV1JsWQybyhOioak8H3/8ALcY1eNlbbRdWuwOYm7r5mytgsRI0rmXnLB4pu+tvdY2yO3blvWXR6QNcFNNsQ62T2DAzxy2y2RSkxCv8R//6Pm7vH5ww4N2DYBPho3X1VRbj3vZvZoP5nvS6WabkjA77NidzzMipa3iWOupNtTqh1OtLSbL1evr/g66m6b5SEhIyGKJUkT1GRWtHNIPYE12sk/c/uEeYxPxsqa6NopsSppQjBJ/hEAfHB/dnvL003vIVmX9h2/e8r967+/yZnXBKrac+UOeVsc8mesJOybDWbdkiJZmNuJdZDTgZwF77Yi14JMWKYWbZ0YBW3hQWiDG2kzEb7+sCK12EA6rjlvfcrXRIqmkENitIexm9O940twj0arZcFJepNuGXIRYRYkFDtueN790w+qNhg/ef4y7tfSnMR+UjBa/kn3f5uAXeq8K10xR3oKgwXCk/+dnwrh0zM4irktYIBo0ciQpQlPidZQj6z9vT1AQTzLSuzD54KeZhqUYbq8MtstRPjYXIxFtZVt9kNGC2ETTjKw/OSRVCZkFNu+EKQ/x4bsXhGi43bSM44LkBLdT9HY8cNMaMS4FOxiMtzpnx4CE4qWowzPUOSA7FhFHmhDw5Az9UQUiOK+ftwhXBE0U8K1hdjvmUGFQGoii++NhZHgYmX1UaQRNBhjcjry2634sCczC45PQnjmqra4JZogko/ev8KJLxqhEGA50/rtOD9xTCzLm4qo2+EZyWL3uDd2xpSULAULCJJ2H6c7eGGuHGYoJ4e9//agI1L8P/L9SSv+6iNTAHPjfAH8vpfQ3ROSvA38d+Hf/wFcRXXBA0ZYxt+QkF0QShRANhwdbNi7gR0tYV7ikgZASNHIhVvo9UqrXpac/qXPvEtzaTBtnygS54p8EqpB77/ScF39sYAiWRT0wcyN9cDy7PmR9PsfsLNVrG3ZvWda3Ne0zR7UpLQ6obwzbfsHT45af/UPP+KNf/ohPV8c8Hx6QXKK6VuKgcxG/8CRnCTV09zI6NhpiNBwdbrgajXIpthY/V3+OmBUT0enCYXxGUHJae0l4L/BjQVfKFSuL5Dyg0taQEHSQF7ffyiK911Mj5MUgc7TqQN9XCLAcmXrVCn2D2dgJddACIBGbrJ6L+8KlLBCl4i1k0bIQzZ45zhcHNPNxT5jOfXuMYHZB21deUQESzJqR6+sFD+/fMj4YkY1TLlV2aPdzfb+IOvdOY8AItgsMx5kjM+ZThxdiSgoWZEXot28f8+z2kNvvnVBvtc3n1iDR8jsHjyBBcyV0TohNYnZu6B5F/BzaC2G3iKQ24tuAGPWYSr0j3FT4mW5sauKqrbPk1B8lOi2C5U5Ya2yc8kkysXHqzYsuSFVIuNYTbCT4mtBpER4dpDc7ZvOet4+viUm42C7YrO5rmwqwnTAeJFbvKBxuRti+EemcEsT1QCEgNp9U8+tabQ0Xt+B98SFYE6lsoIpGYyguZlQRRd4AjL7vJExxLpqhpRwG22nR0X+px/YN9U2iOzb4Vti8mWGYm0pd8vOCLEG5DPXW5xZFQrISr64D50+PuFqfUjgdRYlTr2XyjktO0dX+nm6ybodGEY0wXtX4+5qLaI0eSLqhYhgc/pMZ/akQNlBt7WQwaHI/SsaIGczES1H+kN6/CSkP9TRPE/V+7WoiD+6t6EfH7dap4W0VwSgCZ0ZFUuI8YmeeB7M1Q3A8tadwPGAaz//8q3+Pn2+ecRZm/FfXP8f1OOf7t/e52anCqB8du1vNHsMmHWedAaMcLFW2ZmJ33iDL3C7ihuSMtn8aO3GiYp1o68DMjmx8w27d0PSaCqAqKG3TbW9bxKap7V/Q2X17TB/5vBm43s6oTeDBbM2HM4+3iftPbgDY9hXHs55ZNRKiYeb09Pb9l/fVgPgge2u1iWQSdmuotsrpM17XjH2Wnj6WcS50p2bizppMjLZ9jrIBsLrOqkms7m3dsZlEMne5OONcRQTVKkw0EVJBibN7djIYkziedayaJe2zitBY/GEkthF7MNI6z9lqwXDVUhc0KQdfh1o0esvktTCvK6k2WqhHXRd1LReGpR4e3Dbkg9++CMFHhgM99JtldiAfEsPSTpwoNbGW3OLXgjLkQ2F1axjayO6dkfmHFWbQYjXdCiZo4Ztcbv+bxPzBhs24RIKqvHXvhnQ6sHug/myycpMYRfmUqiCW5LB9xG3CVFNAPlQvAsZnP7Ej5Ui3N5m3Z/QwAmB6T+EDpztxN7/f9UMLKBE5BP4M8G8DpJQGYBCRfw34c/nL/hbwS/ywAipBnEViY/JmnKF3myZI+tntIYezjsE7wmfzYlKOz7lJ9Y2ZuCwiuXqfe/r7FgmW+nbvyOrnuT8v2b49V9e2NziJ/KXXfpsxWbah5pPdKUO0xCPhM28ZxxnzdmBej7j7kU/mp/RXNe25mQiz9a3QXDv+34c/yZ9+9/u8fXDFy+Eh9KpyGRcq62yXPcNhxeyl5uxFC+5oYPtsSXfa4xqPF0vq78RJpKLSspgxUd96XaQaR5hlLklIUyZecoZU6SCQkONbkp521SFYJ3usLGLN1FrAGPC5nYVC0aB8g+GmQXYW22XrhQwJJ5doLk1ewJmy3SZINl+lzQgpu0QLEvebbiH1t++37F53zIa9Aqu8HndJqy4bGiYhrite+GMev3bFy/fvq2qwJLoLxDaSglr7qzooW/wHLRL9olIC+R3ZoyltyjwOr54dsTgz+9DNLDE2H80IJyHfa4GDgdhY3FrwywQXOVLACSKJ5A3jaJCtw3Zmaj9DRqGWOrZDY3IBZalWY34eojlalXKjTEqMs5piRljSx+fzHmsi68bTNS0Hj1e8dnjLzxw/Y0wWS8RI4mlzzD9+cDIRfKsbfWDD2z3yQUt9rYiwbyPu/o6UhO1NQ7XWQG/QBdovLNbqeCjIXflMRhJWIt3oNKbn1SGysZOFxaTgzJvLuMyLVya3QjZ0rAO7x8r/sl0+OHmhfeZwXV6ffBlXiqDJGKe2RLVWY9uDWcfGzqhujKLLmdpQEDc76LMtyKVEHXdKys1rUxD8+YzuAbhsuBqCIXQO1wuFK6KtBB3vvrXKzSi5XLnILKGlqdJcSjNG4szpa4yBOKuQUMjGRvMMBWRncVvJYoe4Rx4BmXvq2vOwWTMmw+J4x1snV/zZ+9/jv7/8jL+zfY1/cPsTfOPiDWZuZDPU7PqK4C1hUHdn8TLxmzSkN7db091Tf26plZipPP5Kfloq5OWkcyMlRfv64EheEUgaIdg0Fd3cOtI8UDIhQQ/Xsc6k4ByYvagHPnp1n+tXB9SHvXLQ6sCiHrAm8mi54rDqOK53zMzA8+6IrdeUBwTCPBHrSDFXDUnz1VRtqZ9rXAjjHKqdjke1AFB02QSQHN8Tc0ankXxPxkiYWfpDM7UJS/u52qapcC8b+90Q4TLPSQUVgeAN1kQOH60ZPzmh7gS30fZybxMff/yA6tyxuNY1t75lypUrpsHa/mWKYfKtxcX9Oq4olI57PyoCh4HU6/sxo64NmrAhDId6I8u80dfVdmZ/4miuPDE/f5MV2s2FYIaK8as7+hNHfS2TubIZle9qhtyN6BzeRszpQH/dYgeh32XEs4q8cf8agKvtjPWqJW4q3I3Vtd+lzJ8ziubBdPAuk8QvshLP6OcJW+VZaS1ipzmpe0GApvqhJHLzB/83AO8CZ8D/SUR+Q0T+AxFZAI9SSs8B8u8Pf4TX+uL64vri+uL64vri+uL64vr/+etHaeE54BeBfyel9E9E5N9H23U/0iUifw34awD14gSZe/oTQ/vKTAZfSVBDOCNsbmbUzrM5n7N8Ydg9jvQnEfdoq8rR58u9iieTY+8dr1k1I/1wgO2zN49k6/aiNhBVublOeSjfPHvMRbdAJPFydcD1M42IMSc9YV3Rnlmu3BFXNtEedxwfb+gXPfHyaOJwFCaR+505/8B/hXuna+oro9wfr9V/CIbFrGe3DKQzQ3OV6O4J947XnANh5wilSM7vO9lM+EWr+Dq3jjQUV0+z0QnuDmlzIm7mpG9qq6fbGCfPu7CoptYFAYWeg5DqrMRLKCoTEtYq78pt9DTudjAc6OvENiGXTD480TEFPJMkh1dmvkGlPCBMmk6rbqftquEAxsPE4jOhhGwWdEZ9aKKSboc4qWPadmSzbrGHIzxv2JzU1I+2jM8W2scP2ioqmVuSFEGKTlUwoTEZas8njWxwGKuUo4OApO0ZGWQ6HaWMpiWr2UthZhS96MBvHd2TkYPvViQD/bF667ithTM3ITMxK20UBs9tWUQR/rQPZi6+Qcmi0lqn+VTVOqOGksna+ZQXKsFI4l9/5xsA3IQZ96sVrXg+6U951R3wvasHWBM5ajo4UnTLVpHQzbC94AfLcKKmgvWVUF85um5ObCPSm4lrVU5kalJb7mGOhAj6vrvR0TrPatPypQeXHLw5cLZasD1bYNd3vIzy60SrCJCaEBqGo9zC/nhGrBPRJSqvP6O51v/zMz1RtmfsVY3omJY+ZAn6nh9WHwz4ZUWJgSpt5CmqpyltSXQAeOVWhDyOOR5IO0f4ZIG3ihiaXn3pjIf6JtFeJ5qLMfNMEsNJg91lOb8I4gMYIZrMGctRGIU3pFCS5Faf1bbZVlh+WOEXqqBUNWXS9bKCmA3PjIsYk3Am8MCt+B98+Rv8D4//Ka9Zy6c+8p+c/SEuuoUGP0visO1wNrDuGnxl6bKBrBqdqmDDZDsZM0C1Ual5mZuqbo6kZCbPHZdDdbWVlDA9eG+5GVqG6MAUuEXNNP08MR7q4mxvMi8nozKFMoAzOfdTkU1TRbiuSGcLZBnBwsfv57P7TCVwUkU1Sb2tFEVbjBrRE4Fq3xpPS0909RSjNY3BWW6Bia5Fy6eRcZ7zRD1U25jJ3pKFJ2Ci8mn6Ex3b85eqaKvXUeNdquxnZQu3cb9HJquGlGoToiKfNBouN3OOZh3PjjXkvloBOyFZtaJwHROSZEblLPmm7D+63oYmW6vkEPnkdH0uxHzb7VWUfm5wXcwh62lqgYnXr3c7nSfRKpXGjOjrZS5nc71HtuwuEu6r2KNeCeOzlnAQ6J2h2popGswEXQslJugN4WJBrBOzG83Lq9eZ03Zb8/HqwaRqducVza2KlEBfo9qkPY8rJULtJl6meCG2kTAKciF7TmxkskBJtvB4QSSSqv069ftdP0oB9RnwWUrpn+S//9/QAuqliDxJKT0XkSfAq9/rm1NKfxP4mwCL+2+mNJYevplMBtWVOMP6veHqaom7dErEbhLHb13zU/dfsvUV3zx9T5UvY3GzTozBMG9Gtqcjw7ZWyHknuS+sE9FmGXcxf1tdzxm+cZInMxxl4u3u4Yy00FZS+9zlfLOKq3dHzMxjFlokGA+7R5FURSQI1ScNF7cV87CXc2vQq7DtanCJcZmYv9AFabVr+KnXX7DzFR+fnUASxqBck1grlLz3JVGCqvpfpb1RWOYJIUUZol5IGMH0YWp9mUFJtZJltGRInAR21RPntbqwZkfyZIRFOxAODeE6S66L+3VQcml/Ypi/2AdTRgtEsEHdvIdD5daELHu1vTB7pZYQxhcPEVGDtFE3xgI7T064OR9MksLPBOVBbNcNp6drroD1J4ecfOmKy8MG4+s73KtM3m6TFnTkIs/r/SqmcZ9bKIo4KvsHQS5QGi2GzSxbaIy53YeOs/a5o3tjxM/VAqA/Tfij/GLZe4lWyayydbTne2O90OaaNhdD6omGtneMbqbqMp+5c9lLaDLmi5n4PVT0yXHiNkSEj3f3uRzm/PMXr7FbN8iF8mpeHnnMyhGXniePr/jMG8zLhvZpRX8/MBxH3NpQreDgAw0vRrJ1gc8J7KIETbsLmLGaDALLGLF5o1zMBr736SOaxcA4WtyNpbotBNLsDj8kbPZEMqMWyuMiMR5FmgtLc6lFbL3S1x4PhM3riXB/gNHgX2g/Tg1KUS+wzB0MCw1h7YaKLz885wNJ7FbZWl2A3tC+UIXvZNyatEAx3T6JvYgCquMO3yoBU2widJaQhCSO2atSiOXNxyk5XmKaRLCSIGSX/cJ9KXYQEnLgbLpjhis6FqpNlshnbxuZB2wd8AfK+ROvRY9IopLA12ZP+ZPtU95wS577Nf/J7R/m2eaIPli6oWI3OlJSvuk4WvzopvaiRJmIym6rZqLVShWQEvKmI3f4ST5OZoYl3Lxs5rYThmAwknhrccWnJ8eMttGQ71EYTwLLR2sNc/5oiW/1Oe4l9pJbYbmglsS9kzVno8H2al2Ch/YiWwIkN5Hqi8O2n8HuJzQGQIIWrlj1p2Ph2b3pGdaqYvOXWqTrvNe2kJ/pv+nnz4TrUbkzxbBVivFvSFOgfLXWcRsaLY5CLfh5ESSpqIksu9f9aa8KK1YCXV8xjI5w6AkHotzJLIzxy0T/MMLSY1/WGC/Y3kzttuJ/l6yuMaoMLJ9LJhNNVSsyjbfJHX+MTIKVXNQunsXJUb3axOkgUoLigamNa0b92vHNnmEwzD6uGJOdWobqjbbnuiUj0ATSTsGV2ZmmW1Qb5cDKKMyfusn+pNrk9VsUbFDSeJg8BCWkyag2NErpkFEIrZL1SyKE5BaxGQNloqbKUNICftj1QwuolNILEflURH4ipfQd4C8A38q//i3gb+Tf/7Mf+loCsrF5gO5vXDGWnCSDt2pMpwxmzy88fEpEiKmZiJMpKioQa7i+WjJb9FStZ3hgsWt1KS4Kos//LD35pATz52l/ss4FQHOVeTcm96/X6pvjNhXjQcX2DY8/0hPg1997yluLK17sDvjnv/ZlZBSGI510ZoTQRlwd6FeNVtezRHdPB/n2tmVzVPPW8orKBNZDw2fDCdFVE7Hedvueech+K7bLLrmNukmrjCJ/Dh/BaYK1GcJUBJANNe3WZ1f2SGxc9qUZtY/bOl0MG0e0cG++4c3DK75bPWR4fsRwIGVA6OZxOjKu6uxinCbStkZqWIbjRP/aiGkCsXOYtcV4g+uUbBqa3AufB5J1FPfbdMd1QaM+EsVVXQZdjOdL5fu8dv+aT28fcHO7oJoPjIta+Qn1/pQemoRvjZ4oi5/UnQ1AfbL2PjflqitPVyeG48TwwGMPRqI3Sl6+MsQ2Kee2y34lt6pom700VCvBvzEyX/YczDpsdua+2bXc2LmmfSdd4MdlIsyjWnskXfRCbTCVFi7RCjakiWwtWfFU1JplLvW7iv/q068zq0Ze3SwZXs6xG41saPNYShaGvmL+TOju1+weV5ycrLncOJafOJJRjl7/eKR/INSXlvZMckBqRp02JdA6ZQJ+JoS7jFaMiRCFmIRHByvW3z4h9Q3WJtozmTaAYs1hBy0cYv7cKjoQ0iLQNZHDb1XYPqsYnbD6cmT+9i2VDVy9OJzk/UVuH/Pi51shzTxsDNuu5o+88zE/c/wMgLkZGJPlo+09/pF8Fbs2mQwt1FcqCiBm3ksR4txWxNPI/Qe31DZgJBGSsOoattfHEw9rPHSZEJ+oVuPEKRQfJkNNmdRMRv2iKouM6j1TeIl3Je3DUfaUy0WOqSJNO7KZK2EyjYKVNIU41xLoE/w/Nkv+4e0v8kvP3uP6ekFKQvK5iBjNVDCZneAyh61a7z2/2otEcxMVlcgbZInMsRltgqyuzYcyIXOCuhHbK6/loOp5b/aKZydHfGtxTHQwHqoH1P3lhoO657cuZsSV0TU9qEHxuNQ4o7KMNdbzJx9/yEfLe3yrfUzqHWk0jD77A24FMhpZQq+nOd9EgktIHREXSWtH2lns4Uicq5eZn1vac0NzpciIoskq8qg2MvHwQmM4/CQo4dto0RFrkzdxDRHfPtaDQrRCbBR9Lt5D0RraS4/xEW913rt1mDoMyRlM7bE20n+6xA3qGxgeD0SrLtn1bOSgHfDRsD2r8/zIhacVYl5fi+I51EwpHKFWpD00uleVdUZtKDLvL3PcTO+ntXoKjaegzfvXN1n8o2pFLQxN0Iipg5MN1/6Q+aeZjJUP3HvOrBbe1WwktZ5xmBFaXZtD5iaZezu2scXt1JC3xO0MR4pQL57KNO8kHzbFa/EXGrCHA/Gs3QsE7oJLiSxeUdsR7xpFsru4Ly5/n+tHVeH9O8D/NSvwPgD+x2hd+h+JyP8U+AT4N36UFypk02QVFfINpFpTpEOL+q2sKuxgGRfQLnt2oeJXP35bYyjGAvZY/QAAUqRJREFUHLuQCX5mhOqThm5ZExeB6qhnNDXmeUZsIhl6TXmAqpybuCc8F9RDYoEB9xV6IYfOzhOzCyA5LZIejszdwKP6lj9z+B2ufmbOs8sjhm2F2ETa6eJ2crDl1a7CrnTT3D3SgTk/7Pjgg0ecPVzw5GBFyKuEyv3VvweY2mOug+pW08JD60htPiXENBFOCQm3HiaptN16VWxZUW+LbiRlI06TyeMYA/2A8UHl08faIl1WPV9dvsJI4puHR/g2TerANBjccqR/ZGmubW7nMJFHNdFc2yZ17Vl7Q5wJSf4/7f1prGZZlp6HPWvvM37THWOOnKoqa+rqie7RTQ1mt6GWLIuWAQEUIIGw9ZOyKUOATVq//M+ADcEGLNsgKNoETIggKFIiWjabTYmyBInqueasqszKyCGmG3f85jPu7R9rn3NvZGVlZ7S7I6oyzwsE4n7z+b5z9t5rr/Wu9zV9F16nKZXtFazvTIjWQnbq+xJW52ovLf3AwMFik/HqwRlvvHOLa9cX3HjljCdvH9BmLTZYpbhYgydJHDKP+gVBW/WdKkZf0QHxBnzsguu74nCyJn2p4faXFnx59ojUNJQu4qIZ8V/f/wzNNlFRt0ZgGZOcGcq7NZvYMLkX4QtLGcVKIvdCVUbU6xgqc7koB2Kpz51OGE90Aekc0U3lexmO3mKhcZiquXxeHcqB1nP67h52bcieCFl1mQnohE2bkeoJqQ6L4fSdPbKba8y0RtqI8X3tQmrvNuzMNkSvOE7PJ/jzhPH7mu1wkWZs66l2AroY7Y6k62jRDFRqGxLbwstb6iNVFa52dEMQrUP6v+tKQ0uT3e42PYNmFJN+ZsHm1kyJ07mnzRxyvaDYJqzmCdkjzR51XVsq+Nf9uJ1YJdRFxL3NAfvJhoktcWH23I23EDmksbjU0RotLfnIQ9zSJglMlVhtSsEdZ5z1aamgd7WNGZ1p+ScqPPFKBQZt0YTWfn262VR4a1WyoctCGY8pGl0gNrXqP3mPRIFUX0M7aal+cQtAdTwiPbYkaU2e1BRJgjGO2sWY0Bm4bRN+b/0af+Phn+U7D2/oJmdj+266Oqh3SyOYQq6UPz2mEJILHXvxRrW/knlzpXWd4L9p+s2a2nWoOn6v/QSYZYFpdqAw1M7SIoyiqs86tGNHvY15cLrDV24/Ip5VRG/Ffddsd23Y0oXSob73z0/u8c/PvsPjazv8zXd/idP5mPH1Zd9JKJHDRI7WaEnTtYZIPA1gIu2GjaKWzdhiVhFyP1Nnn9AN1qahKSGUq02jfnn1jnaUaZOLBjqWUBEoG7zRTGi08WRnwvqOajQRqiGd9pmqftPrSul5Vh00rNCmqstnI8fOeMtJNWV6D9o0YvWKwe00pNMSYzwXF2PkJCE700xhugzlra1eC510gka19DZQmoHyIbsXAp9An/BWsKu6p4RI62hyHfdNJsQb7RRsU+2K7QK3/ERFNbvONS9B4PLdnPltw/jmmuZ4p9dvsnXIVoVj6wLd6WTL/I6wLvNA09H3299Zc/vOI4o24v3zXZbrVDcDDqgM1TKi3NFNZ3YeMmldxhTNXraJw8677j79PtnmUpHcbGoV014FWZy6fTrQ+hB8rADKe/9V4Oc+5KFf/Tiv7yBoKi2+sCFa9ldk+/XCun0w56HfpZpFNBPPLG74nXdeJX4zV8PTjRCtL5VUbenJjg1uoXoUxaueaFLjrV7QXVeCqpFrRwxOiEc1xX6iJYmQoldRT0IKE8pD31/4auzq2Xnb0eTC5iLhd3mNe9cP2H9tzf/41jf4ndGr/PbXPqc6LQbM0lA1lnRc0bYJ6YVQj7VFem+8ZbtKWT6esprnIeiKem5XPVVtGW+UfxCvteWexoU6ueutW2Rb4eNIJ7R1gSQxLomgqnXnO0lDpqpFCg+RRZqQq3ZOuRl1A2WFbGLETVnVKe9t9ynaWLU3iqADk4BsLU3sIG2xRdetoxOB+vNBtBS2qwR3OmZ0oYbC2akGsm0auqZqONxdMP+piov3d0kWoSQROhA73pPrOgQTR1nE3MoXvNHe5vjBLq++9oTk+obyJAw4p5mEqxe+pnM98bIJnYtejSLbVndZleikmAQdKA+fmZ5y43DBV/L7PG522LQp07jgJ0YPGL9S8hvf/wpVGZGOaprYIccjzDzC7daU+xa7tHARUTa5Bm8GYtFJOgpK3tHWE68NbmTweUs9hvQ8bAzKFpPaPuPYJga7qVUgtagRtxO0ikI93zhao6XR7Mz3Jb7OfNOHgBVRDke5o+J2ZTnB7dds7nimb6sYbXE/43wWM729ZDIpWDrBP7Ca7m+1q0g7My8F9WwROi2tdg9N4pLvnx3yk3cf8GR/ytHZjGKeEC2tcucCH66bQE3l1FR5EmFatRDanOew29JOBKn0nPqjjPjCMJqHY1npHKIBjCOal7oIVClSqvdeXVj+66998UqKG+3Esh57ETG5rx1B3sDouGX1qkF2KuJNAi4sMgeVLg6P0qeuryS0t0dbT7J2pEfrvgznrcXWqvckZYPQgHO4nRG0HhNSrXZdIUUZNgmaFUZUQBaBg9kaAY4aS7vImaYVd6Zz5qsM54z68jWWsoSTasybi2u8+Z07JKeGPMgc1HsO2an45dfeIbc1p+WYo82E2DhmacFFkXO2GrE2U0yt5R5nLU2uXNVoo7yyziLDx4aua7AT35W2U60N2aoGTGV4sN6h8YZ3F3vq++lBGhtEfGPu5QcgnmThqUSzSfHak140wUZKS3TbJuaN7W1uJRfcic+ZJBWbLOFLh0fktua8yslsw36y4XZ6wbzNOSknfOP0FnPJSZKGyDjiqMUceFZmRP69pJdPMLXyU5ucXvokO/U0udp7RRtIL3w/jnwk2GWN2VTgPS6LtfO7gtFDzebaAkZPXF9u6ux4tAtN+o09NqiUW93YNbVlJy14vN8g349I5x7ztqGaJpQHEfHKMF4HG5NMx0G8ViHaKHj5SWtxmVO5kS5LXPq+K68TgRWnJUpbaAnMhEyMTyIwWjGpJ56LHZ0rTalrs4t96HwW4pXB1m3P0VVldsgfC80iY/NF8NdUzic7RSkAhUOmts+A1cuE822MWNfzjLsK0nKTsc4KUtuwN95SljFtYYnPol67Ued6MKVT+gcglcNWsNnEEHlcquV9UrUI0+xTt2HXjYEsisty3p9EAPUnBQ/IqMWcBAG44BtEIOJ54znMV6xnMRevgjGeso6wb2fkR556Kn2bc+dt09W643NP1gIklJ91akEQIvIuupUGbOuJ18JPvnSfN7IbrBcZJnYc7q5onWG5SSkXKdl7CTtfOWVTxpw+mJAdqTBkdu7BQ3bicTZh8eiAv1b/Cv+Dl95kbCuyo8DdisFHnovzMTu7Gxa5xzwRpiee9W1hU8UcHi5ZrDPKdYKEkpOLgEz1OxAfJO1DNiqNMOjFHa0qzSrVLVKUgbuUaEapq4EloV/bGM2LB5VZvNeAyVoNnrxX/5o0oSMpXxQ5D+Y7yjc68iRrH0TaDEwd8eOENg0kPxPsACq1HfEipOfAOwnTdzziHeWuIZs3QY8plHEqWFcJnz845ntA++aBEjmrUC4KwYy0Hh8bzKjBRi3LJuVzrx3x9sND3rl/yGdeOuahgLw3pVNKj8Y1Ip56R4VG/Yaev6ELdnNloVJOk+rDKF+r8YZFk/GfnPwsX31wh6aOGI8LXj845idnWgqSo5Qij5FJQ3vQMnpoaecp5Z7DT5vLUglgJjXGeNra4N7PiINCOB7MVmjFhMxoKG9VaoprN7XyS6aJBszbCqmUBN6ZtQIY6yDqSL70j5d7WoIoDp2WKhNHOVW9MfGw8z1hdTehutkwt5FKc5zD6FFE9XiPNvfEjZJGRyfq8WYLR3qyReqWaJuqXlTR6TkptwZgtc64xwFxpOXkeG6JAznUS7cpCWWDWhWAq1nE5oZXgvj9pN/cJAv9rTp18Hqi4z8708yPCQ70NA4kyHlUEoQZhfxBxOix70uZLlJpk2JfF5/8tO2Dd7ztMzrp3LMSyEYVe5MNm/2ETZHgnYq6eid4icmPA+ekK78lYQdft6p0HFkdc4SdrfdQg48tZl1clpRBnxeyUHZpefyd6/j9ijhtoFFvubujC77tb9IuEqQ0OANVbbi/3OV8nZM9ssRr/Z7bz1T8M198k1vZgs9njwG4V14ji65h8EzjAiOexhmO80lfzmwzHYfJEsQZZOto5apXpUPKFp9d6pRpR4zHJzHR2mO3woOTXR7IDvUqYe808LnOYH0HTCPM39/Bp04z7+uwsdhqCdQFIcuuE+Y/efun8B5+8uYjVlVCVUe8t9xjkpRktmFRq7ZVZFpOygmbJiaLGhaikjINhlQaDidrxmnF2dE14qVm1rOtblabnL7JwMUwe9fRBgHRZO164VJVIm+QbamZnSTSCoOHve/odalcJ0IDi3KFTEsfZGgWWsnLtm2xRaMyF+uYiyIn3SnYXp8qiX/tSRai5yJsXOqJBlDJRZAn6MzqG+Uw+Uh5oC7WBhobZDw6QWS71Y2MOBXH7dYHqS6DqE72o010XCI6X2p2SDXIttcsUREqEBW4VNgeCtWeNufk38koriv5vh4pyV45gtFlA1IrxKdqJTS7p4KmCJRTw8lZxrtv3e2HyOhEiDYadIvXoCjeeuzWkcyrXtlfA1Og0mDJ5ZeVho56QK1Bo7fSf2+A3vnhI/BcAyjxoSR33WKqSBVJCx1kPtbd9FkxZpaVNK1lOc9pFiMmCwk+P/QTakdsA72IkkXwH2oNmCwsvCFdHAiYHbksOYfPjY/5Fw6+xUkzpfWGnWhD7S1vrG/x9dPbHJ9f58/d+R6H8ZL3X9nnv3n0GmfHM5o3k6APopkkuxH4b3b5jdf+DNHBlqSEyX1HmwiruwKLmHIcqRBYkaqWjof5YqSB2KgizoPmT2vwMUil38dUEnzjNN3YjCxxcNaW4IvmjdHAKPBSCJyLXkXVK7dCrkzKNGH0BddKH0eIU+VyIoupPBernGqTED1MdCLYOFWXNR6ft8QPLDFKtPNGa98Qgp3EkC60PBgHO5Zk4fuauakvMyInbx2wfTlmlhes0AWr6/SxpSNalppNyyOM9exMCr7+8Da/9tr3SG3DG199hfujXUZZRWkhavXaSNKaJGqZ15Ymi4NSdNQrevtIjV9pWuKVetA1KOnQlPD7j++yXmaMvpYzXnYGmDnfnuzx+J+Z4b2m9LNjQzOylLdr6rEhfyLgDfW1hjyvSCI9D3nc0DjDcpvSZhnMCTV8h0+1Vt+R9LsShoTyrPo0hRKJMfhEle0l7M5CBYx0p6AqRyw+a6nHnnbckl/fkMYNO2lF1URsypjFZ3doUw3gZu94kiXMi5jtSw3NjGDSaRg9FJJ3w2VkdDHo1KG7krCpNOC82lVU1hEn2wkHuysufu8aZa0lxexU07t94Fh35rohE2WVv1TvO6L9gmIWs/vVpBdwrCfC+tWG6a0l47TifDmifjTR687p5O2zKJxbj90adT0IWjPpRZiUfSjXxII0l4tRx+8BqNcJ0VjV2KVRW5sL8Xzh2hOcN1TOMi8zFtuM8slO70zf7Ka9FhXeY7YSmjusZr9BgwyrfB2sQGTxkdWT2Co/Sl0FBNN6Jt8Xttczyust+VbfY9vGNGWEXamdTOMsPjI8uZhQL1PGDazvONy1ip//7LtMoor3N3v8fx99jqqxlHXEZp7rRdRqF5w0wvSBci3jlSc/a8LvEjzg6lY5XVdIt9K22E1QizYGU9f9wpUuHfHCsn2Sa/CxkTDWVJB1kQr1TqtaZOECile+9zXrOqqiQn3e6tayPhqTnFp+58EEc1DRLmOO3p5wXOm4sUGTy6WXFIjdnzjFe6HYJPjW0IwtMvEcjta0P6lq3k1t2T5JiZdBVDWYjq9fgm1liLa6ad5c0+sl2nabc6u/idXAuRl53KhltYqDzZWh3NNSV5uqs0RyAbvfv+xY7DqBTaObidYIpjQ8OZ4RZw3FzZayEaJVMCrOPM3tknxckkQt64czkouIqtDGkzbwsVxM0EjSYKFNVWPOxUKbt0GY2OJONfjCyGUp1mh5TJsEtPIzfUcdIkBLdqb11CNhc8NQzwiK655ItBmkmXgmXzin9ULxxi7TeyYkQC5Fn6Wr+FQespZ6D5KLKASZ9CVfIrWfijYhERLWm+01Q5MrrSBd6DXjBR1rRud5HwFZizmPA2/4MhntIsFY0UqM0zHrR6HRZFvxR+H5euF52BlvmeQl5/NDbVcOIowuCHS993ifnZ0N601K/H7at5V3RNar6NKP9TQw+9eQLhztI0M1vSRDIp2RsO6So8Lz1voa15MFqal5VO1yb3uIEce2jbGiJZZ76wOWacY0Knh9/4SHSc2jk5vEC6GaearrDdE8In1H2HnDsr02CZL4V8inwPZkBEAzdSw+Y3CxxxhP/O0R5V5KO2vBeKgMUmvaN15dZs/KHb3YXOjMEufxnaK492BSXViDIaK+TsKErFkNQhtrH2QFN3EfgU8TsBbatn9O21h8bQKpNJgxG8BAOi2p9mIl3PaimNIvPp1tAu6Sv6NB4JVVNtS9kwtDudnh0Z2MneYDliFtKAk63R14L9yYLDk9m/C4mHIjW/Leq3M239/hYpyRh/etp56466ILYm29mGroMPKx7mwFLpVsnVFrjo2wfmeH5Mwwva8Dq01UgDNew8PvX4NxQ4oGg2q+m1DeaPDWkh8JxUXCahP1GSJVdgvcmyBI6EVwexXJKKSMTsbgdZJKYlV0xpj+XOoxB8kJoe+WEg91FfHFO495Mp6wvpWwl9ZEtiU2jqq1nK9GRFGLtY7ttRaftti8ZbnISc880/c9LrVUu47s9pooapnvT3RHWGhWqgoSIdG6xWUxRqRvh9ayjNOyhHgWRcrt2YLjySH5Ewm2GVqWTpa6m+4Uj7vx6WMNamhUyPWV1854e3mHZK6dee1+xRdee8R+uqFyluU2o55IyGYBYnqzbV3kJHgBCm0O1UTH3tXuunjTdc2GLsNtjctT7Qp2nVWEx1eGzdGYb5R39Hp3gistUljGJ0K8ccQbp6XX2vWbk0sXgPBdrwwBvc5VWb6TIKFrIfeeJlMrkk4lOVqoSGPrDI+2M5jHgdLQzQ8C1yCZlRQ/2fDS9TNujJYYPP/4rS9QLxKS40iV9yPPKLzOVqEU6i67q6LCkZx1bdJoAN8dc3spMdIJ8XYriY9tXwqJto5oq9UGH182DIHOC+1BzeH1BSdPZiQPYqppyGS2nTK47ecCW0DVqj9fslCJkK1NsBtDdqwLa7x5mtfoIg26my8aRDxuHWM2hqIVTmoNIg/GG81UN5YibSjmKcmjWMnSXqgOWtL9LetNQvV+go9D5+2F4LYGySOkTfFJpKXM4PKwfr1iHbLPJA6xDjGeujFUOzGTB0E9P/gISqN8VhebsOh7fGlpzpLAy9POO28dzGpeu33CNC7ZNjEXiWYNu0YGreYILtFsVBekKKE9SAjkLfG4pp0Y2kdKj6lm2rzlrcHElx01nUF6uvAapISGHxdpuTWZ64ZES3EELqxHGoPzwrXJmkevW9zJjLjrZswMTWX7QNW0EGcN8axg00z6c98lSZJJxeZLDl8baLSTX6zHJi0invL+iPFjlbwQn6rNTuiybkZKUG82EdFCg+HLzaoKovpEm6h8ai5VyVv/VNX/w2A++uEBAwYMGDBgwIABH8RzL+F5L+xlW04T1QjqMiq2CsKM76ZcXItIn1gm78PFF9WYsMkkdHupllCvRSJQXWsoljHSGLK5UysX4/vOHFu5y3pzpBHv737rM/zB5GUtdx6nJHNDm3h4bUOSNpgKfvebn9UdxbRmNNXW+fZGSbNrSXcLfvbmYzZNwvfyu+x81xIvNSNR52FHmHj8uMXMI5K5pkTrmcNHMB6VbKY5yYXBBTNSQjeg+hQpmdEloT4dbDR0l3/FiLT1/Q7WG8EHh0ifRLoLjtAMlJVLYlzw+pHQoylte1neEy0bJGlNlles3JjJ+1oHj7ZKsh7nJXyp5OzBLskiUp0aGwwdCz2WctdQ7Wj7b1RwpdNRs3vSSvCs84weCRuf6nGiu5jQoX+ZKWo97VYv152dDX9w72WuHy74/MExf3AxIjpK+pZal3ja1lD4WE2Au3RtEJLrUuYdSbIryaqJtGYtTGt602sfSPLSavdIft+yecnT5J7ywBEvDdO3odoX6msNSIQpDNG5JTkPmbDAp6jHSsDuPAo709BpVvJ4NKKeqidcR5zvOhBdolo1GNOXr5WIrhk0d5pwcZBjxBNZx6aMqauc5jTDbgz5kVBPobjREC8MzQyu3Tnn7L/Xsj7J2fmWJT8Cu7WUhxFx1HLzpTM21xO2RUz1jYnqrjmjn5drqUz98YR02aqJr1U9orKOOFpNufnlJ8xfzdkUqozYVga+lz4lbtpr6aCaOfFKs7Zm74JbX3qinB6BSVZSNDFvLG+wWme4o4xJJ5kUGkD0+pbgKwbJWj3PipcqiptX9otOzWynbwsJUGGJRYjWNWZSIwK2UFKwj1vGhxvW8wz7Vt5nGbosc0fCjbZOCeGNliVpdFx5YxAXSiMtgXQdbjctPotDBiIMgDAW29xjD0rmUYLkLb6wxKuITZlwv45IT03fdNJ1Ktq45fbugmvZispZLsqc7713k/y7KYHKEmxwRG1VRK/raBO0iNpQru58NeXyN9Ustglz6yU/RMng7nJuCVINpnTKh5m0Wl6eJJRHKS7SrNdoZ8uXDh6zmp3zh9VrgCVZSN9ZWc8iTKndXabV0nD00prVNMOsLN6ETEgCTTgPnXaUaS6lSarG0jZWZT3WQk1EA5yvZ5y3O5iNZqld5iHyvUk3HWUEzWC0edyXq5tMiGODLQRvrWYtRMB47CKi3VPDa99KsMkJ3cCZrlma0Q8kciMqAxAkaJTz6ZDEkb8dkx/pd6l2DPUEisxx/3RXrYQWCdHCEm38ZfY+ZIfaBHzW4sqwdvrwuRZM2hInDZEX2iynjVX6oG0t0dr31ibSONqpNnKUOzaQu0NZMFLNKZfC9D0XLMgCv8toKXT19g7Lw5zDgyXHrzdEc83QxxvB1GpT1q3jTRlhI4fdLynr7FJCwEOeNExGBUnUYsUzjiscwslqzHqb4CNPNRHVwyo6jmWoHqSaTWRS09bK8xWvIs/Kx+3iEOWrSV+pkR8tEjkeTi4muB29CKON1j7rsaZs0/Og4WAs6Xnwt8mdkj0PtdyXzIXoTHk3zmrKf3p9xfZiF3HSa9n0vlPhx9GuJG1RrUdCNI+YflUv6mgD4rW9c1mO2Ry2ZKV6bqUXEG0NTZ6p6esrDWQtadIwiUv2ki3tlw33Dg5oFwmjdyPMKJBXxevAS1raImPyHqQXGli4V4Ts83PKItb2dkFJmE7LPm3qg7EnpCc2+A+FSSxc2ADiHD4KwRLSSxjownsZoHpntN1W6IMpb4zqCm1LfJ7iY9UCktaTRC03pktO4wYXH4RSnJYXt2XC566dULcW98aeKj4bLdFFiQoZrl7yNPsN6VmkNfdMyE5dLxjpYqh2Pc1YA5BcBFurerS30l/YLrFKVvUeKS1HqymvHxzzB2++ztnDayxfT7l764yHyQ5uOwr6WULUEZdnFU0ek8yh62ZLto2miI0gkZJXXawK6aABSVde7rrEQCclW3vSC0+1p4tcfmdF01jq4wk737Vsbhmq6w3xtMQDq402CJjY4T242tDeT0MnJchJwsoJ7ILbbWjyhGRxWfpRH8NGRfTKFhtEFjuBWNOqU3x6FnH09RvgID8WspUndyHobTVA2e4bIGL6jmd73fJkNuOlG+esxwVn9T7j90IweS+nIGe542CiUUIaPL1sEFTsjJjjrQvE8xrTOGzlKOuING64+PYB13/qiMPJmk0acz4fQ2MuNd/kCtercogR6olot+2F5Xvfu8301hLnDMVZxtqNiS8syYUwCoKmUTD7tXUgGvun1bKjrSM9j3C3W6JJ1ctKeCe4XaFYjkJLNUEawiACo3GBqXOd4GtDntTk12rmuXa+CepXhgfzJCU71eAsOdZ2Sy+iXXZeBW5pLtv8xYgSxUObOJ03ZR2MveNIO2VbYXe24Zc//y2MeH77ySscr66Ri6fYJiTBN7L3oqyV5/PYTHm8mLJ6NCE5t0yPlbgPwey2uWw26I2V4Sk9vl4LyF+WH51Ir84tbduXSHwaI0WttIAkwqPfp/cSTFpGWUXb2EuCdgzFNuG95T63x3PS/S3m/lSPI5T320Q3nd0xFlXMV249wtz2OC987f27NGnEwU9e4Lyw2qYYox2pZWMZZ5XOB85QbFRsUpXoPfG4UgmKe6muQ6GTsjgMrflWOUTxhSW+N6XNIFtBfux0s+s7qoLywDqniHa/RhYxs2+qfoUttNGmmz/aRHrZiF53TvS681G4PsIGb7K7YXOQqMl3pYroeq5jTB2HUpTOVVGhYqe9U0VQ1R/vbVm3o96BobtfDMoJWyRM2q5j8qq48GUALXnLwf6K09EEv4mUOjCribOGcV7SNhHFcqauDV4vJBdDM1LvWnOccfxyBFlLM4OysoyO9DqwhX5XlwosI5onCc2kZfJI/S+9gEthczNlfT7Fxx4zDk00lSU+0rJjGgQ5lY7RKp+sciEohaa26rywV1EuUzVIR2gXwSM3UBCoasTFPU/4jyrhPd8AyuiXXm60UyJZ6ADbXpO+TltP1bLAWYFMIHNUrzSQaxfT9jvaZttpDmHg9mzBw8/DcjalySJ8FIjpIfvire6a460GUNUOtCOnxq7+cudlK8ifgKksyRI2NzVToO3moQ58bqmmEeubMf90k5JmNfvjDXcPL1hMUsp3D1RYMZDjK2e4frDgCbAtc/IjHXmbVYqNHEnaYKOwe/NQoq7opjTYjcGWQZQzhe2+IVlYVW7eNhoIhRfqriWQUOsWExRuhXBhdAOju+3cJe/ChduxxaeWNlX5hYsiR8TTJJrtcEHafjvPeJxPGSU1VVBd73Y93ig5tr1ZMZ4VbG7tqB1H4sAZkqUP3T1Q77SQqn1IvAlcrTBJu1hwjVFTR+dADF48F4sR0d4xyasryvsTeHPG8kstO9MNyyzX7pBej9DhvQqnOqsBeSfkqAGrV0Js43vCPl65BMU1R3qqEg2dcq9OeHrdmTJY1AD7szWPfzpm9rVUMz07huxaTRY3+EmBiIpLNq2lbizNKA2ifEHnamtZujE2bfvFwl/l7Tl1PrdV1Iu9AcqTECVVNpknPRWSuYofmkvKSs/9Ux6YDxkLiN/Oec8LB/srJq/OWWRTzFZ31empML5vaNOUNtPnR1vtdOsmG1O1vb5WN+m3iapbz0YFSwdH37pOO3JIZUhPDWkgk9tK5SRMdUXNOQQD7nqJrw3Z/YTybBdvYXIqfdDoEqhm4CLP5D69QrR2NOni3RkVu0h3wvb9DBc4fH0nb+app442NuQbR7TRzFFbRkRTx3YPRkc6L51djEmzmum4wDmDMY6qiWgaS2UTvV6jLtsYeBShq+dqc0e/MHWO73GkHbDW6H2tC6Kbalq+KWNiaUlNw1625SjTbjJj3WXgFPg+3oDbRqxWMfn9iN156I4NjgbifZ/d0ay/78d0JzBog11LvGr7zqT+OmoBY3VxKq90KxkTNnBGSciRLsBdV5gvLfP5SIV95TJ4lqOUdzfXaF425GkN5wTtnsDLWrfK07Sq3F9XEY/XM/K45u74gt3Zhm0W89OHD3qdtg7OG4w4nDd87eQ2vjW41NN4JZgfzjYwg+OLw95wPFoGHl7qKXdCELDTMn6oEh7duOzmOm2qCRptgWA/2d3C7hb3YLfXO6vHoTs11UAoKoJSfqrrkgHl4Yi6L2gQIozTivYzCy7SCdFGG1Y6Ze0+yEx1fos20gcuSZCKMA3Ecc0ma3FxrGrkYYwhHtdYkvPO3Nz3gXKnyq+ZIRVfjWzLn339LYo2pnGGnaQgMQ25rTmrRvzTV3+CeKnXSrQJIpd3KkzS4i8SsqNIBYTbkDEcCbbWbGavxxh7zBzyBxHZsQaEoBxg74TpWzaQ6C95jqa5/C06dwNbtDo31S3tOAEPcdpQlxEm8tQzHzoRBVMZmswQzy00MaZVTqIAVPWPVgYqbBAp5im2kr4Tq/PRanJRV3jrKCsNsqb7a/7ZO29zM51Te8vfsz/N+e0RNKLq1pXw2bjkz965x5ODCW/evMZ2m1A/zql3tIUx2gi2FOKlBknNWNu5fWQ0fojQTEzIWpm2a3HsUpLQdZCNjh2jY8hPDOX7I3wEj67NaG9WRFlN2qpWSLJy1GNLcZIyulHz0vVzzicF54+mqj9jPeZ7Y4qJpkglcX24K7UhPTPYLSCwva7eW21iyC4iEoG4WxRjG4Q0L8mq4hzeOZ3YGqetmTq/hRPhL3fE3oeOi0tiqLdCVcYczXNM0jIe68IMSmTECcfv7xHNKiZWS1Gdt1CX7djZ2/DK7jnJrz5mGpUclxO+Gb9CtDYho+OJ90ps5FjfTlTltqLflXkb9KUqJVN7ayBxtI3h3mKfn7n1gEc7M9556wbz+zswqZHEY2MtZfjKIgL1NsZYr/osTSdmp6RsU7fQaGeWeCXOukgodwR3raJqU+aRobjZakq9MCQXmtlrMy17bE5HNI1l72DF4ucdPE6JLwzLaMqyFznrAl0gdkwIi34K7UGNTTXaaTdaKi13hexC7TWiTds3D7hYM4xSt6rNs9aMmGk8zawlvYh64U0XEVS90UxkCDzaTNWHndVs4vjrGacvJeR3l1x76RzvhU0ZszrPic5ibKFZYXFQ7BnijRCvHDax/XiRFtUFqrXc0jaWTZmQvL6g/eYOyYVey9FWJ291TQ9aUAY8oqTtVks+050tadxwlk+Q9zWVXx5oyVT2S3Z318yykkWRMh/tq59X1fnxpUSFD5ICUO4YmhGMHwrZmesXC0Qz0cuXw/VbqwcZgGyUYFzcrWnzSG0gTlPqbUYxcRpDBxd6WiE7Nb1Yr3gVBe7LeN14C+W6vqRnNXgnsuB0LhLRElknd2BLWB6N+fvrn+HW9Qttbik1y2Qjp2oNjeqpORvmKSdIZcjOAtUB+jZv769sBERoWukD9k7cGEy4Nk1PE+hsnjpV9UurADRT5lwoLXepWi3rtbnVIG9jkaXF5S5YOOnnmVIwdcSDaJ90UpJGkGy749HgumtC8Qa8g8ffuIE4eOelA/JRSV1b/ruHr5JELWmkna7OS79hsUYzor4OdjOR145v8dyZzPnML5wSmRbnDReVSrfMz8esRCkB+c0Vi50Et44wG9UskkZ1nryxeKsdWz7SMWrEc2u24P1fFOZrfSwbVcRRSxY3nM/HtKcp+UOLaYXsRMguXP+dWYMxHrsynC9H3NxdwhfX1M4w3+TUtaVtDTvTDXnckEYND893WGRjbCmhozQiDV3PmyJVX8DMU+5KT3HI84po7LhoBVsEwUGJdJNUqv5cpzpOYXhyMtPvErqKH61ntM6QWP3d2xsV3qiNWjPSpSyfFfzcnfdw3vA7776Cf5hjKtUl29xQIc6uWSpZe+y0opnUNCcpzUTAS1+incy2LD8bB11GFbd1scdPWkzc4s8TwAbLsxRbOOJNo16qpRCnNdUyoa01e+wSTyNQ7iuVJD+LdYyGZiiJDOZHUcZAjIdlFKJUzQh1u6h6B7786kMezHdYWs+1vSVf2X/MT03e50k9Y9WkGPGIdXhMH9w8WO1wPVvx+ckTbmZLHhdTfn/zKq426mllum6wYDuSeuJZyfYw1tq6k6C7pGlHlzjt/DloqNbKrQLNVPVdXBIED1eQnUD9Xsr2ZtJ/106lOdoIj+dTsqTm5nTJ/nij3STiuX+WYgoNBF0e0sto3Xp7t0WyFrGeJG0otzHtOlWbmDbY4YTdoS1dSAEbpGyh40QJ2l7bmPDj6/ZPy3SmD558ZEM3TSfaKNiopalj/DqhTboOD/Xzs6OG7Os59dRqoCmh7CY+JGiEw8man969z2fTIwqf8E5xyDcmd6mTEOhVQp40/OytB3w3v87FYgTv5cFYUkjm0mcV4pA9sSO1N3jy3WtMfrLSDqPXPffuX4O5MjyakafdabGCdhI2Gjh1u54o2ON05pri1ZYCMRosxLqr++LLj2nvGl6enDOOVA1/2+pnHG1mvHe2x2aaI7WhOstoJpY71y9YTlIuzsbIRifcaNNxD4IeS/q0Av54d8skK4mM48H6gCbXrGd1aoi2PnAXpJ/MXGoxqDdcdmL6Rd9Ma7x05dJuVye96m49E8o9j8sc9STq9ZRGR6otUz7a4fgzNfn+ljt7c9hdsL0bU7WW1TaleHtKk6uNwqT1iI9xWxvKcNDkERGN+lYZz3qd8dmbxzz8Ka+TvhfWZYSvDMnjmGitHIgu7W5q6bOA2yoiT2q+eOcxFwc52zpif7Qlj2qSQDyqXMS2jsMuXBcLUwcrDKO8inityurFNZU3idZBdy54R9rSM3lfN0xtql1VnQDfZp4Tz0rcuCGNG81+n+YkJ1E/B3ScO2+VT9HGQjNJei00qTWw6LpXpZMdscEsuA0K34lOw1rCMX2A1+S6mZr9Xsbjz1zH71Uka6GsQ5dQ6P43lTbxtt0GSTwuMrhIO5ermWH9ksOPWvK9LVHUIkDTGtrWEMdqTdO0hvOHE+K5ITuNmY5MX7I1oRyiKtld5pqeDiB123cCEzYo1URbzNWSSVREdau/V98JWcP4ewnlbtxbj0BXJvTY0AllGrBxS3yuIp9FMWL1ikXOYppjg6uh7Pag4X/joM5h88VKTZKDC4Y0wpPTGVVj+cWb7zG2Jc4LL+dnZLbm23VEMY6wa8M4q1Rzqk55/2IXgLKKKLcTDaAx2DLGxYLdOtbrjHK84RfuvEfZBteHsJFqvOE0K3iUzSjXM900lkK07cxtg8xA6LIt5xkPnZBlNWnUsjPa4nwI0sYLEtOyqDNEtIztYo+5oiDsDRRnGWbU0En/dBvUcVpxOFqTJTXnD6/T5soz7sWIG4P3XWAncJyy+toNVhK4f2FDvs6huKbXVid+2UmKbOcZq+spt/MFP/fye3w9vc12k+AWMc2k29BKHzhmecW16YqLqVY+IutUQdwZZlnB7a8sAGi9UacDo24HjTN8M73FZjNRikhkNWN3bsLa6IltSzypqJcpffe513EPUI8t0aalzSIdv5jeteOj8Jw5UJ7Jzpa18bSbjGoqvXeZs6p2+kv797iXHRKZlq+MH1J7y9dXL/Gti5usq4TFwynJmSpWpxd6wh+/c8Bvns64tr/k5nhJFtXYrMWtIuxGiJfS17g7DaI/89J93t3Zo6wjkqjlcLTW9usy42g+pUxG/PyX3ub0tTEnqzHLixEsI9ITdUk3oXTRpWRtpY7ZtqAP2KpdTzNzNA8nbD2cRbvk1zcY47kxW/Ly549YlglFFeOCsSdeSLOKa9M1k6RkXSc8Op/hV1EveuYiHWg+uiz1WBu8gELU3LViahnD9CT6Dia2T5PLRdWefaytoMZolGgqJXC6BFyrwV0+KvHkRJvL8qJvlFzYJsqhquqYTZvwj8+/TNHGbJoEM9fv0Cn5FnsJ+8maf/Xlr/Fusc8/Wn+FJrjBO6t1tmRlAtEQRqMN06zk+Ptj3nrzFu/uFtw5mPPa3WMenu9QHOda3Lee2bigqCMYwWal9Xl8Zz0gkAYydmT63V9nfdDm8PP77zIyFUYcT6oZa58yiSqmUcFPTB7x98ufpqktzToGB+ZBxkOzy3hc8NLtM7Z1TNNqKVQEMtsSRy1FHVEsd4kXGhRWZcRaPHlSYyc1/iy6zHiG86KWEnr+XLABiQoXrn8tzU6mBeuDlDYLysG7LSSOdFaSJA07mXKyijriLJ5BbTAbQ7RVAm28hNG9mPo04t7LMVnQsLLGE0UtZVA970rtXoKzu0VL85GERVuI45ZinvLOyT4vH6rXo/PCo8WMzSalmei002Yq2WAlNCH4MA88GHOU57R3DTcmS/ZzFXlc1wmPihnzVUa9SjDLiOxY25JtMCbOT5q+ZJlkhtVdwR9UNItMxUOD2alqz2j2Rpxm5cRFpK2HaY14oT7LIG8xVnjp4IJ6tmJ+M8N5UVWKVgn/xWmOqSNcAlGR9Cal1oasdihRdtYVvrvmwlj10dPlPRvuq3c8PlPSoCkEd54QrYWyMjjXNTaEDGDYI2FCdm0Km1vQ7Dh27s75mb1TJnHJtUS7UVoMjbPU3hCLo/aGZZ3xh+4uRZ7ikhhTW6UilPR6bqYxuDKYlof5xAQjZOULhsaAxmvjT6rzQhfUdCK63kA76srJ0jebtEnY67WX79ONhf3ZhqWfhFKjfrYtVOjTVD5IH3TBV5ifa6A2SGA4m0ZwjeBPU87PE/7h2QQTOlZG44LdvCCKWqK1ITkXzi5UZ8wap9duGO9ZkFqIiqBrFul3b1cR79aHrA5TItvStDY0dETKmRPwTkg3ypPNTx3ZeavZ/4524ZVigvHwzph6I1Q2NCDFHmY1R9EMV1m1DBOYPDa9h+ropAnOGwapDa60xFWglVQaQG6qmAf1Do1TjpAtukrQpRREX5LOWqSKNFvUBVChDGtqXXfsY6NSIl0jUALRccwfuld5Y7fgs9dOOJysWSc1J8tdFdRdXZaMTe3ZrhOO3JQ4anHO0ARBXu8F5zPGSd2fC4jYNjGbOmZTJjR1RBToLia4hsTrVnWxUlhvU3YmBZu4ZbPIwAmuiXoJFmdDw465DEC5+vcPwfPNQDl4de8cs+f5enuXepX2C7BLPN56nlRTfnJ6H4vnUbXDW+tr/MG7L9NWSu6M57bXPTG1noDxOxHRJmI1zvnW7jWawxrZWsYPDKYkqJWrloitlJsUieMr+4+fOr7UNLiRkEU1963jerridj6n2olY3UhY1Sn3zg9YbxNca3Gt1vfNWo+pzRzJhS780mq5a3x7yep4THQWMXlPcIkaoL79yoh8T0sVSdTSOE8cN3qxOOFoPuX9Yk8v/uOYfCOkc8hOW1XU7jRDOiKiR7vLQjeeC92G3cSt3APfm4KqvhC9hx4m7DS3DeIS8rSi3TPUowiOsj4oxGt26fgXhc1FDm8mSEi16sRmiQrHg/v7/MZyTP3OJOyOYHJf+WTilRewzlL+cf4Fvnj9iIN0A05Jm92k2tkrgAa9sW15aXrBw1t75G+lmHcmvH8wJv3MgkleUthMLWfaiOUkJYqckoZFtVuigp7/AvSD3TRq1+BN0BQq4FuLW5yXI957sk+zSDS4jDzkLZ9/+YidtGCVpTRFBI2a9kZfy6lGOQ8+u2V/T3d3cRSEO8OWOzK6q7NVMIt9kLMxOavMYWY1xkF6AemiJdq0/TEZYwJXK1j5FI7ECNFWrTT2xxu+8EtP2DQJidEGh1h0B2fCtr5xFodQ7h/ThDTYW5875PxkSvwkDgubx97LcNucMpS3XepJN6oFlayUU2OLtl+oTAXxukFqh5eYSV5SVxH2mxPe3h/hcv3O8bnF1kJSaCdYsnTEy7Y/x6ZsaXMhWkH8OGJ775A3bu3jUke0tNhCm0hyB+PQFWlqXThVcDZo6oTrPFmpjUU+Lmm/VHNxV/Wd7KjB2FZLBG+PyE4M8VIXQjuy7OwuEfFs3zlA2ohyL+btxpKPKqZ50Sutt1aomqi/jnw/9vR3ia4ESS7SzBxccky8mMsyuoQN0BVtrHakhruLz1nstQLfGJpFCpGDxlz+/uGadjGMDjbMRgWjL9XcyJfktsaIZ15nnJVj3ry4RtVYjKBlLmeo6wjnlGfSnGXEC8PokTZ9REHEtCf7JhoMdfNNt/BLqw6DXiSk56W/nprDmihrqIuIjY+DkKaQvbTk5s6SRZFxejKFt5TaIS3YWIVv+2460e7to1capNTv7hstAZc7mvWINvQdql0my1uw62AWHUr4atwN8cqQvGkv5wTJefSSlsMnp1peKu9nVL+d0yaQe5TKIJpxTOeOZNUpaofgJ/XYJwnlNw6pgl9rCqQQSr+aDe/WrnTuiFa1dj9mtu9+lByirEYexmSnXaChzQn1JLsUnwS2N4RkedkEI43Om23u8eMGCt2Y21IFTOOVsDyaYNaWeCWhLO9DJ60Ke9pt0zfxxJOaJvK0aRqqRb7ndNXjjiweNOEa5bDVwZJn9u0YfMyb1yZUBy1SGj33p57s3KkgqehY9mcpHOeUsWpLdVpNbQoXdxzlkVad2uQyOLaBuJ5OlVtpaj0v0VZtlZpZRptZ2uMRJ6OEKG0RExpJUodLBRuSBEqgp9/I9GvlR+D5ZqCA9y922R1t8ZXpyazidTCbRvjPvvMVrh8s2JQJ6+/vgNMM0mitC3Qy933UGpWqkJ3OdTfkYhg/ENospsmE/NT1Niigg8oWjuwk5r/9wy/o7q4TsCy0d35yZ8E0KymKmP/3t39CJ1nryEYVadwQRy07kwLgB2ruVRNRf2OHehKcuxNHFjdsJzUNsPYR6ZkO4vzdmOTrMW0MxbVwscdeyw+NkD+07JzrbrnNAl8mRTsMKl1YO1VtlxhM0aqIpkM98ZxVtWgPZl32rcZPXRSxxRRVz82QTYH1HtOMiK3j1t6C+TajjDNst+PYGNZVwucOTngv2mXz/kHfzWO3Qj3SxWX8lsXFCbsP9VwhkCzbIJrmqbZKkHfvzfjWtSnVjYboPGL/W+HcFg7TeJKLus8onJ9PeNt4JrtbNgdaBspOhHa1wzz3RIJ6bRmhendCHXlsIWTBqDhZetJ5S7yo1ew1qO5qBsD2BNb8xPG1//Z1shPh4KHribbq/WZ45ysvMfrKOcY4bt46p2osZ9mU9D0lh6ffyZlPsjDIJSjn6i6n2vHkJ0KydNRjCdIAAheGdmFJz4T82JE9KTCbimamXMDWR9iixa5KLZm0CZBgSiWvvn+yS71n+yTjQ7dDWUes1hltY3SnasDkDfu7a6xx7OcbXt65YJTUPMmnNJsg+9BGiFOOVbLpyrPh+7tL0Ukt7ah8RXy2RSUwUpyHfFTSSM70nsGLcoS6cmJnVpssWqK1utK7yGDqlibT9n1Ta8C28x1BvO2V19tUu3brMVQ7MH1HS3ZRodeM7cyEBeJIsJWlLGNu7C/wwVMuj2tsCC6/295ga3VHikBUGiLrSKOGZa6LTTI3RI9zvOQs7A5wWYICmG0gPfOkC0d+VKiflvOYZfDUiiySxqGr7TJA6hHuNzYIR24r8Founo4LDr9yhojn3vEB5UuigVhh+2621ulCEhWwrS1Na4nTgosq59hNePd4D+6NNVAQQudxsDBqYdxlUULgYWtPdtYQrRts0fTnBu9ppimm1gYCGocpa9pZhllVoSPY9AT5dJGw3Vps1jCbbijSmHpUkVhPVUTM0prYtNyZzikbS5kmWlYPGY5qanoisXjPvMy48+oJjTO0znB6NqG9U5BONB21LeIuPdpvRF1tMOexbqA2Gvx58dT7jnYC9VjnIQjVhK0QrzTb0vlImlqzry5Rj1IAu/X9oiu1w3gdJ8mootwx+JM4cMm0642Qdeu6H23ffBPGUePwtWCDabL3hjRtWN2uqXctphTlX15pDmGiBPJmpBt33UBottNuGkytlJJ4blXSY6XrZbIQkicRtpKem9g9Fq21E9Qudc1oRzF5VjPZXTH/53LKQGdJg69gZh3bKmYdzXouYic8XO842pFolnAu5EcRttTgK9pqMJcs65AlsnQC1PFas2W9NIkT4qUhmV964HYZsGqm62PXTIPnciO1KZFJCkaTM/n3U9rc40YerMfnjnrq8GKoxkJ2SqjUhDmuagYhzQEDBgwYMGDAgD9pPOcSnmf5vT0Wt5R8m535XoepzTWtaqqcuc0xNew/8lQzTS8mK+3I6LJOhM4ZtRhIEK9dauIdfqUE5Kh0qm3h0F2hB9M48hN1s05WXVeW6mMgwurhHk92PPFGyE6CN1Pj2VzP2U60Du1yjWjNuMZYT5rVpHFNbFvKRL+PGi4atlXM3cML6n3D8nrK6kJtXfzGgkQq4NgfR9ciq/yQzW2N5NuJw6cOs7asHyakixZbuCs+Ypp16dtQnVO+Su16DRoRUZ0ZeGr3K2WtFi6doXCj7300HxMnWl50qdddmoNkYTh+uIu/JerDFHnsUlPpycKTXbTYbcv4oaFNIF12Eb06cINuEmMJnYojQ7wUqrMYU0N2rhmEeKGZJ7NttCQTGezDKcerSP3dXl5RVxHrRUJyrOJs9TgQegO3wtQSrGjCrrvQ0pctGm0v78sPwcaj1E7E7NwzezsiXmmKvvesCpYf07djLpI92nGLvbNgnFa8eveEB/kO5TLFzq3qn1S6u+va78kgKrodmlCPhfJ6lzHxRPMoaBGFrI/VrIipWlofaZkkSFB0mUQlHGtH57EdXdnlaoZh91jT0l23VZvGbHZy6qnnycSR3Nwwyiqmky1+XFA1Ees4p/FQtqKZGSB90pnTXh5XPwab0MFp9TpeLEfko5LycwXlPEZq3YWqEakje2LhxJNZzf75jncRjKy5XVBeNxRbi1QGU2pZyEfqM2YnDVHcME4aNsWuJh0eCc5acEnQDdMxHW1gc5zxYBtp55v4vpRmrMdaR5P7wKPSbMO6SIjGLfa1FdtFimws7VrLtPGiP12a9U40M9zkEJXdHKbZGglm1R60I7Ybc10m6gNZKGmCtIgLBsuFUFQxiyhllpRc21lxasYUi1Q14rpqfK3zlItg/WDEaTTizO8rf7GG7FRIzzVj0nFsTK0ZXkQuM4u9MKga+ZqiRspWFRhCSczGOm6kUbkUqhppEv2uziE+AjRNks5bkrlle5ZytomIxzVNZalLzfafPJ5xYmZcuzEnjVoq33FKIQmdzKZyyrGJhIeP9pjsbQAthefjkrY1JFGDCSX+rlTuvNC0htLEOBvrmAjlHpeC2a3IRyVJ1FA1EZFx1K2l2Ca0qwiXKF+zvVFwvqdcB4kdJg6D+VFKdWIYPwgnIWQq8rRmdKviPJ7quIwdNmoxxquOaivU25joKAm8L4upY+w2iNB2GcrKEBnHtZtzYtvSOu3K8x68M4hxRJFjFDdcnE7YFjHNWDM9XdYMh17r4Vq9amBc7ziq1EHkiY/iIOBplcO4bjBb03+vooyVJzxZK7nbOAweh+C8MIprTj8D62WGb4xaVjnID7bsjLdsyoTlIodFjN2YoP8ITWkwoSGiTZWruJ6Z3oBdL7iOaOcp76DzURPmCeOx05o4bik3MW2WaieityQrIb7IVZDYCfFOQbW0fdkbL/hG8InylFcvWaIyJt44olWrGazOfukj8NxLeNN7wrrJelFGCOWfQFYE6XkNycphWunZ/eqirqKMdtuldgmMeiW0ei492aqJpjXjjdP6MUAFo+OG7X6kpDfAO/qSxOSBgQf0xqLamg/jR67XD2oTEzzg9ORXOyMu7jjcrCEJzvXphSc7NqyzMbujLdOk1K6xAz3uoo05enlKUUeIEzLrsGEQJ5EGLlmkadJtrd1Q5/Mx1SzHVga7dX26Ub23fEh/Ow2cWt9PxB18EqupMOhjdQNtC9b2xEGAZFFjvjeGAi5eacicmgHbCjgRknnMxdkB7c2SyBAc7yHeONLTElM2uMRQ7GmXXtdl2U0QLpwfW3iKHSEJIn9R6XvBt3gZutUigw8t4ZP3hWYUsSkmzD57weF0TXqjYf5Sxun5BFcrT04iR5op4bCuLfUmpskT4o0gzhIvwI00vd2RobsgwLQeH1Ssez9D6UjSOnDTpWf2fcFHEZuzPS6uN7zy6jE/dechzgsn2wnnmxyAbRUhogt1ZB2ZeJZv7eKNoTj03Hn1BIDWC4tNRiEzshOhnsTYWMtu1C78fgaXRFoqsUI9sUQbVWqfvuN7kVINojRo7UrY3mi5xq89yUqC+a2hfn/KesdT3a7ZPVgxzQt2RlttT7YNRjy1s7yVXic/SvVE+8B1CJsS8REu15KTrRy8l7PcTfjc64+wdx21s5RNxH6+YdvEfH/nGkhGujBIY7FGzZI7YnKSNKTjhv3bulhqidxoJ41p++6mqrUsDluiuaVNlJNBR6ZG/bZs4Zm8Z0jmGqC32eVc1KbC8jWHLYXsXL3sbOEoHo2pDiK+ePuI9V7CqkxJo4ZtHVFUsV5jQZDTGIdvDefHY7JHEdlZKNU5j+RpX7KTTmVcLqUocA5vLdK2unkJXbHSOnzTkp4ZmmLGmZ/x+G5FNqkozjOi80h5RGsNnNKFBv9tKuSPDLZUU+6O1mDqoKkT5Ay6MrqXQOZOlFtja9/PnbZUVXVC8ORjq3IPm6rfePRig41qVwH4ulEdqzwlOS8ZP7HUUwtY2jQhbnTebzN6zs7J8oDo+haDEpnt1jM6aUlPS8R5qp0EaYX8+wm+0Vb57cRT3mqQylC1o36cdseL0/Z1WwbP5lKDaR8pidrVBueEa2NtHspsTWYbGm9YVBnfK+8Si+HwcMn18arvpItCDe272XW2ZkoyF9KlrlneKKfsYLzh5mtLIuP6rtGuey4yTt8/uUF5lFLONIhysb5H5wGanERc+Bmzm0v28w2J1UanxOiYdF5w3tB4Qxy1HDe7SBv15cjOr45FHAIqiNaOZFHjX4pIbm6YjQuuj1d8f3bI1k2Il13QFGGC64NLLc2jhNM453TcIJHTGL8RfKvBkskbxkHvTuIWElTkMmq5Nlozmp2T3GhY1SnrOuXJasKGfdXREpVeaTLh8OCcLFJphm7uyWyN80LRxsziAodQtZc+fd1vuqpTvr18mXZrqBZ6YZeHOW2mTUWTcUHyE2u2VUzd2D7Q7hq3ChtTHVmiQpQqUbbIaouXXT4Kzz2AMq0nuVASZT3RWnCbdORiz+aWkv2ijWqRQOiWqYRyVxV1vbGMjyBaV5hNzeZAF6tmpEGUC1mgaqrGpflJ6L6xQnami+HmprbDmiDC5xJd0Mtd/ez0AooD5afES9+3wfcifBGMjhzVRCek9Njg5skVewdPdupxUcST+Q0epR6XO26/esIkrvipvQf8/N67ZKbGiMPiiaVl4xLOmjGH8ZJVm3F/u8dXj29zcTHGbbQ2n140xPNCpQtiG+wwhDaPic+3qiMTtFmkrHTS9h4/y5Gq1smuu7+sYGeC1I3+PRtjNnWoB0NzbLVO3gW7Rgfj6LFQLzI62wS8unGbUrlFbWYod5QUG69FjUK3nvxUd1r1SHCxZXtD4AjKPaEphVQcbSLkT9COQE+v6JydO7bWML1nWBd7zCeOV770mH/u1luM7lb8/vnLPFpO2ZYJnzk85Vau6YLjYsI3s9vU9zNMY7AnC8pXD3vxOrWyaDFF4JTFhvUtnYDzY72m1BJARd1GR556KqpZdA/KecyDs1ucfmHOT1x/zK/f+jaH8ZJEGtZOtWBiaSldTIvw19tfoSxmNFPl2hSNLsy/cOc9/svjL+EiVUnvbU7KGm9TfCS4PMJsG9pRRLljSJaq1G5a2NyQkGWCeHXZKdopu8dLJeR2PA4XdVYwwvhBTLm7z3rqqW/UxKOaybhglNS0XhjvFNSTtNcW8pEgW4dLLeXUkI1jTKnCmvFSiLYR78wO+OXX3ia3oXtGPBdVzvZGzMPVNUaPDMlSoAwBn/ckC1i+O6Hy0HzW8MreObO4YNWkVK3l0WLG+s1d7a4SkKkjmV+KoMYLFXisd2K2ByaozKuKOh42t/Q+afR3mr1lqKdPB13xwsAi51vbO4x2t9zeXfAze/cZ2YrDaEWLkImSs5/UM97dHvDG+AaPJjvwT23ouuMyw1TVSFGBNSqcGed6e7OF2USzN0Wpht5J3GeKsxNPua9ZL1umSJuSm0CPktAZG6l+VpsGY+9e603HajULGc+1zqcupe9UUjFI+qx3vPZBL0ywpSV9v1SdKu+pboyR1pPcP0dah9ufaBCImgd3wryy2uI3W7h5DXu6JB9FnH0hVy2f+556ImE+13HX5MLokaFejZQv6OidI0wZjNGDjEnHH3Ix5E8Eu1UZmnhxyY0pDjq+YqgeVLB8RblCtgpduLEnfpBQRQlvu5mSysVTXm/5/OsPmSYqgW1q4Xw+pm4s06ykai1ni5HKPpjA05t7siclPjbU44j1/SlrpmS31qRxQ+uF5fkISl30452SJG1oVxGjE9PLk7SJRn4GkMIxvQfto4ji0R5vZ7t9FrkdOcxBiassrCPs0mA/s8JsLOOHel5Hj2uSkzWmTkmP1SAchHhZEz+6wH4up9okHK8TmgPDdFRwlo+Jt8FM2hMkOPS3z460ezDaaAYnO9MMeJup2baPYpo0J+vkiKYqDrqopnzr7R3ctOULn33IZ6ankC95dXrKPzqaInXM5KG6T7SJsFjmOGe4trfUgDIEigCNM6yalEhaKhdRt5ayjThZjSmLmC/deYxUKrOSnziyM+VXtllCtDKcn074tS9/h9xWLJuMso24qHLeOjqk3sZIq8F2ctGQvBs2tY+fIO7WR8Yz4q+Kov0pQ0SWwHef2wcOeNE4BE5e9EEMeC4YzvWnB8O5/nTh036+X/HeX/uwB553Buq73vufe86fOeAFQUR+bzjfnw4M5/rTg+Fcf7ownO8fjqELb8CAAQMGDBgw4BkxBFADBgwYMGDAgAHPiOcdQP215/x5A14shvP96cFwrj89GM71pwvD+f4heK4k8gEDBgwYMGDAgE8ChhLegAEDBgwYMGDAM2IIoAYMGDBgwIABA54Rzy2AEpFfF5HvishbIvJXntfnDvjTgYi8JCL/RETeEJFvichfDvfvi8hvicib4f+9K6/5q+H8f1dE/oUXd/QD/jgQESsifygivxFuD+f6EwgR2RWRvysi3wnj+5eHc/3JhYj8r8Ic/k0R+Y9EJBvO98fDcwmgRMQC/wHwLwJfBv51Efny8/jsAX9qaIB/13v/JeCXgL8UzulfAf5z7/3rwH8ebhMe+wvATwC/Dvxfw3Ux4McHfxl448rt4Vx/MvF/Bv6h9/6LwE+j53w4159AiMgd4H8J/Jz3/iuo881fYDjfHwvPKwP1C8Bb3vu3vfcV8LeBP/+cPnvAnwK894+8938Q/l6ik+wd9Lz+zfC0vwn8T8Lffx7429770nt/D3gLvS4G/BhARO4C/yPgr1+5ezjXnzCIyAz4Z4H/EMB7X3nvLxjO9ScZEZCLSASMgIcM5/tj4XkFUHeA96/cvh/uG/AJgIi8Cvws8NvADe/9I9AgC7genjZcAz/e+D8B/2vUqrXDcK4/efgMcAz8P0K59q+LyJjhXH8i4b1/APwfgfeAR8Dce/+PGM73x8LzCqDkQ+4b9BM+ARCRCfAfA/+O937xUU/9kPuGa+DHACLyLwNPvPe//3Ff8iH3Def6xwMR8GeA/5v3/meBNaF880MwnOsfYwRu058HXgNuA2MR+Tc+6iUfct+n9nw/rwDqPvDSldt30TThgB9jiEiMBk9/y3v/98LdRyJyKzx+C3gS7h+ugR9f/Arwr4jIO2j5/c+JyP+L4Vx/EnEfuO+9/+1w+++iAdVwrj+Z+DXgnvf+2HtfA38P+O8znO+PhecVQP0u8LqIvCYiCUpC+wfP6bMH/ClARATlSbzhvf/3rzz0D4C/GP7+i8B/euX+vyAiqYi8BrwO/M7zOt4Bf3x47/+q9/6u9/5VdOz+F977f4PhXH/i4L1/DLwvIl8Id/0q8G2Gc/1JxXvAL4nIKMzpv4ryWYfz/TEQPY8P8d43IvJvA7+Jsvz/hvf+W8/jswf8qeFXgH8T+IaIfDXc978F/vfA3xGRfwsdnP8agPf+WyLyd9DJuAH+kve+fe5HPeBPEsO5/mTifwH8rbDZfRv4n6Gb7eFcf8Lgvf9tEfm7wB+g5+8PUeuWCcP5/iMxWLkMGDBgwIABAwY8IwYl8gEDBgwYMGDAgGfEEEANGDBgwIABAwY8I4YAasCAAQMGDBgw4BnxXEjkA360cCg3fUWlN0RlPZ4S95AfuKd/Hn/k837gjw+5KR+iJiIf+rKP/57gP+o9fshrftjn+md+n+5++XBRlI/8Xh/43D/GZ/s/4vGP89gf+zt/4LEf+P4f9bqP8Rz/xz2mj3yO/6Nf/7Ef8z/w2A99qVz98wevlA8bZleu7B/y/pfv88HXX/2MDw6jD32s+1t+2GM/7LP8x3reD37mB173A5/rP/oYuvf4kM+QH/r8Z//cD77nh//tn/qN5cozfvjrPvicy3t+/+vlb3rvf50BP7IYAqhPISoqftH8DxEjIJqEvPo3RkAEMeG2CDz1dxjqYvrnfujz5MpjTz3P9O/xUc/zIpojlQ889+pjV+73/Xvw9PPkyu2nHrv63k+/Z/+6K8/rF3KhP3597Ic/7/Jvefq55gdfd/X9Pvw9Pvh5HzzGH/a8D/n7wx7j473HBx/7qOP9gdt8jON46r39R36W/vOXr+PDnuef+qyrr7n8Lpe35YOv64/98rbID/599XXdgqyX5tX3vlxk5QPPM0/d9leGxOX9JgQL5srzPvi34QcfMx/8m4/7mLv8+wP32yufdfV5Fv/0bfGYIGBvxGOv/i2ufw8rDiPuqfewV97ffsh7dK+34XUGPS59D/fU664eh736fuH1/Wfh+vez4Tv373HlN7D4K8cXHgvn1grYcKYNYEUw4bblyt8imPAqg2DF9LftrTcPGfAjjaGEN2DAgAEDBgwY8IwYAqgBAwYMGDBgwIBnxBBADRgwYMCAAQMGPCOGAGrAgAEDBgwYMOAZMQRQAwYMGDBgwIABz4ghgBowYMCAAQMGDHhGDAHUgAEDBgwYMGDAM2IIoAYMGDBgwIABA54RQwA1YMCAAQMGDBjwjBgCqAEDBgwYMGDAgGfEEEANGDBgwIABAwY8I4YAasCAAQMGDBgw4BkxBFADBgwYMGDAgAHPiCGAGjBgwIABAwYMeEYMAdSAAQMGDBgwYMAzYgigBgwYMGDAgAEDnhHivX/RxzDgOUNE/iFw+IwvOwRO/hQO58cRw2/xNIbf42kMv8clht/iaTzL73Hivf/1P82DGfD/H4YAasDHgoj8nvf+5170cfwoYPgtnsbwezyN4fe4xPBbPI3h9/hkYSjhDRgwYMCAAQMGPCOGAGrAgAEDBgwYMOAZMQRQAz4u/tqLPoAfIQy/xdMYfo+nMfwelxh+i6cx/B6fIAwcqAEDBgwYMGDAgGfEkIEaMGDAgAEDBgx4RgwB1ICPDRH5P4jId0Tk6yLy90Vk90Uf04uCiPxrIvItEXEi8qnsqhGRXxeR74rIWyLyV1708bxoiMjfEJEnIvLNF30sLxoi8pKI/BMReSOMk7/8oo/pRUFEMhH5HRH5Wvgt/ncv+pgG/MlgCKAGPAt+C/iK9/6ngO8Bf/UFH8+LxDeB/ynwX73oA3kREBEL/AfAvwh8GfjXReTLL/aoXjj+n8Cg26NogH/Xe/8l4JeAv/Qpvj5K4M95738a+Bng10Xkl17sIQ34k8AQQA342PDe/yPvfRNu/nfA3Rd5PC8S3vs3vPfffdHH8QLxC8Bb3vu3vfcV8LeBP/+Cj+mFwnv/XwFnL/o4fhTgvX/kvf+D8PcSeAO482KP6sXAK1bhZhz+DeTjTwCGAGrAHxf/c+D/86IPYsALwx3g/Su37/MpXSAHfDRE5FXgZ4HffsGH8sIgIlZEvgo8AX7Le/+p/S0+SYhe9AEM+NGCiPxj4OaHPPTvee//0/Ccfw9N0f+t53lszxsf57f4FEM+5L5hVz3gKYjIBPiPgX/He7940cfzouC9b4GfCbzRvy8iX/Hef+q5cj/uGAKoAU/Be/9rH/W4iPxF4F8GftV/wjUw/qjf4lOO+8BLV27fBR6+oGMZ8CMIEYnR4Olvee//3os+nh8FeO8vROS/RLlyQwD1Y46hhDfgY0NEfh343wD/ivd+86KPZ8ALxe8Cr4vIayKSAH8B+Acv+JgG/IhARAT4D4E3vPf//os+nhcJEbnWdSyLSA78GvCdF3pQA/5EMARQA54F/xdgCvyWiHxVRP7vL/qAXhRE5F8VkfvALwP/mYj85os+pueJ0EzwbwO/iRKE/473/lsv9qheLETkPwL+KfAFEbkvIv/Wiz6mF4hfAf5N4M+FueKrIvIvveiDekG4BfwTEfk6uvH4Le/9b7zgYxrwJ4BBiXzAgAEDBgwYMOAZMWSgBgwYMGDAgAEDnhFDADVgwIABAwYMGPCMGAKoAQMGDBgwYMCAZ8QQQA0YMGDAgAEDBjwjhgBqwIABAwYMGDDgGTEEUAMGDBgwYMCAAc+IIYAaMGDAgAEDBgx4RgwB1IABAwYMGDBgwDPi/weJw7m1erteoAAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "mel_outputs = tf.reshape(mel_outputs, [-1, 80]).numpy()\n", + "fig = plt.figure(figsize=(10, 8))\n", + "ax1 = fig.add_subplot(311)\n", + "ax1.set_title(f'Predicted Mel-after-Spectrogram')\n", + "im = ax1.imshow(np.rot90(mel_outputs), aspect='auto', interpolation='none')\n", + "fig.colorbar(mappable=im, shrink=0.65, orientation='horizontal', ax=ax1)\n", + "plt.show()\n", + "plt.close()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.5" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/TensorFlowTTS/preprocess/baker_preprocess.yaml b/TensorFlowTTS/preprocess/baker_preprocess.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d4ddd2d1a1dec10327547fb7ff902ec6e2a9d368 --- /dev/null +++ b/TensorFlowTTS/preprocess/baker_preprocess.yaml @@ -0,0 +1,19 @@ +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +sampling_rate: 24000 # Sampling rate. +fft_size: 2048 # FFT size. +hop_size: 300 # Hop size. (fixed value, don't change) +win_length: 1200 # Window length. + # If set to null, it will be the same as fft_size. +window: "hann" # Window function. +num_mels: 80 # Number of mel basis. +fmin: 80 # Minimum freq in mel basis calculation. +fmax: 7600 # Maximum frequency in mel basis calculation. +global_gain_scale: 1.0 # Will be multiplied to all of waveform. +trim_silence: true # Whether to trim the start and end of silence. +trim_threshold_in_db: 60 # Need to tune carefully if the recording is not good. +trim_frame_size: 2048 # Frame size in trimming. +trim_hop_size: 512 # Hop size in trimming. +format: "npy" # Feature file format. Only "npy" is supported. + diff --git a/TensorFlowTTS/preprocess/jsut_preprocess.yaml b/TensorFlowTTS/preprocess/jsut_preprocess.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d4ddd2d1a1dec10327547fb7ff902ec6e2a9d368 --- /dev/null +++ b/TensorFlowTTS/preprocess/jsut_preprocess.yaml @@ -0,0 +1,19 @@ +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +sampling_rate: 24000 # Sampling rate. +fft_size: 2048 # FFT size. +hop_size: 300 # Hop size. (fixed value, don't change) +win_length: 1200 # Window length. + # If set to null, it will be the same as fft_size. +window: "hann" # Window function. +num_mels: 80 # Number of mel basis. +fmin: 80 # Minimum freq in mel basis calculation. +fmax: 7600 # Maximum frequency in mel basis calculation. +global_gain_scale: 1.0 # Will be multiplied to all of waveform. +trim_silence: true # Whether to trim the start and end of silence. +trim_threshold_in_db: 60 # Need to tune carefully if the recording is not good. +trim_frame_size: 2048 # Frame size in trimming. +trim_hop_size: 512 # Hop size in trimming. +format: "npy" # Feature file format. Only "npy" is supported. + diff --git a/TensorFlowTTS/preprocess/kss_preprocess.yaml b/TensorFlowTTS/preprocess/kss_preprocess.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f6169eda484d96c0e9f911cfd2f2a8d66acaa3b2 --- /dev/null +++ b/TensorFlowTTS/preprocess/kss_preprocess.yaml @@ -0,0 +1,19 @@ +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +sampling_rate: 22050 # Sampling rate. +fft_size: 1024 # FFT size. +hop_size: 256 # Hop size. (fixed value, don't change) +win_length: null # Window length. + # If set to null, it will be the same as fft_size. +window: "hann" # Window function. +num_mels: 80 # Number of mel basis. +fmin: 80 # Minimum freq in mel basis calculation. +fmax: 7600 # Maximum frequency in mel basis calculation. +global_gain_scale: 1.0 # Will be multiplied to all of waveform. +trim_silence: true # Whether to trim the start and end of silence. +trim_threshold_in_db: 30 # Need to tune carefully if the recording is not good. +trim_frame_size: 2048 # Frame size in trimming. +trim_hop_size: 512 # Hop size in trimming. +format: "npy" # Feature file format. Only "npy" is supported. + diff --git a/TensorFlowTTS/preprocess/libritts_preprocess.yaml b/TensorFlowTTS/preprocess/libritts_preprocess.yaml new file mode 100644 index 0000000000000000000000000000000000000000..899de7d23b30e8f41a9e63e3d31eda715a731d47 --- /dev/null +++ b/TensorFlowTTS/preprocess/libritts_preprocess.yaml @@ -0,0 +1,20 @@ +###########################################################base_preprocess +# FEATURE EXTRACTION SETTING # +########################################################### +sampling_rate: 24000 # Sampling rate. +fft_size: 1024 # FFT size. +hop_size: 300 # Hop size. (fixed value, don't change) +win_length: null # Window length. + # If set to null, it will be the same as fft_size. +window: "hann" # Window function. +num_mels: 80 # Number of mel basis. +fmin: 80 # Minimum freq in mel basis calculation. +fmax: 7600 # Maximum frequency in mel basis calculation. +global_gain_scale: 1.0 # Will be multiplied to all of waveform. +trim_silence: true # Whether to trim the start and end of silence. +trim_threshold_in_db: 60 # Need to tune carefully if the recording is not good. +trim_frame_size: 2048 # Frame size in trimming. +trim_hop_size: 512 # Hop size in trimming. +format: "npy" # Feature file format. Only "npy" is supported. +trim_mfa: true + diff --git a/TensorFlowTTS/preprocess/ljspeech_preprocess.yaml b/TensorFlowTTS/preprocess/ljspeech_preprocess.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f01bf2089c2136f6e49d01d05047befc2c4824c9 --- /dev/null +++ b/TensorFlowTTS/preprocess/ljspeech_preprocess.yaml @@ -0,0 +1,19 @@ +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +sampling_rate: 22050 # Sampling rate. +fft_size: 1024 # FFT size. +hop_size: 256 # Hop size. (fixed value, don't change) +win_length: null # Window length. + # If set to null, it will be the same as fft_size. +window: "hann" # Window function. +num_mels: 80 # Number of mel basis. +fmin: 80 # Minimum freq in mel basis calculation. +fmax: 7600 # Maximum frequency in mel basis calculation. +global_gain_scale: 1.0 # Will be multiplied to all of waveform. +trim_silence: true # Whether to trim the start and end of silence. +trim_threshold_in_db: 60 # Need to tune carefully if the recording is not good. +trim_frame_size: 2048 # Frame size in trimming. +trim_hop_size: 512 # Hop size in trimming. +format: "npy" # Feature file format. Only "npy" is supported. + diff --git a/TensorFlowTTS/preprocess/ljspeechu_preprocess.yaml b/TensorFlowTTS/preprocess/ljspeechu_preprocess.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c8c684a6de26fa77328131a6d98006935bb03277 --- /dev/null +++ b/TensorFlowTTS/preprocess/ljspeechu_preprocess.yaml @@ -0,0 +1,19 @@ +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +sampling_rate: 44100 # Sampling rate. +fft_size: 2048 # FFT size. +hop_size: 512 # Hop size. (fixed value, don't change) +win_length: 2048 # Window length. + # If set to null, it will be the same as fft_size. +window: "hann" # Window function. +num_mels: 80 # Number of mel basis. +fmin: 20 # Minimum freq in mel basis calculation. +fmax: 11025 # Maximum frequency in mel basis calculation. +global_gain_scale: 1.0 # Will be multiplied to all of waveform. +trim_silence: false # Whether to trim the start and end of silence +trim_threshold_in_db: 60 # Need to tune carefully if the recording is not good. +trim_frame_size: 2048 # Frame size in trimming. +trim_hop_size: 512 # Hop size in trimming. +format: "npy" # Feature file format. Only "npy" is supported. +trim_mfa: false \ No newline at end of file diff --git a/TensorFlowTTS/preprocess/synpaflex_preprocess.yaml b/TensorFlowTTS/preprocess/synpaflex_preprocess.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f744b29b30b0a78a0da2d80f9910e17f8d227f1a --- /dev/null +++ b/TensorFlowTTS/preprocess/synpaflex_preprocess.yaml @@ -0,0 +1,19 @@ +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +sampling_rate: 22050 # Sampling rate. +fft_size: 1024 # FFT size. +hop_size: 256 # Hop size. (fixed value, don't change) +win_length: null # Window length. + # If set to null, it will be the same as fft_size. +window: "hann" # Window function. +num_mels: 80 # Number of mel basis. +fmin: 80 # Minimum freq in mel basis calculation. +fmax: 7600 # Maximum frequency in mel basis calculation. +global_gain_scale: 1.0 # Will be multiplied to all of waveform. +trim_silence: true # Whether to trim the start and end of silence. +trim_threshold_in_db: 20 # Need to tune carefully if the recording is not good. +trim_frame_size: 2048 # Frame size in trimming. +trim_hop_size: 512 # Hop size in trimming. +format: "npy" # Feature file format. Only "npy" is supported. + diff --git a/TensorFlowTTS/preprocess/thorsten_preprocess.yaml b/TensorFlowTTS/preprocess/thorsten_preprocess.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f01bf2089c2136f6e49d01d05047befc2c4824c9 --- /dev/null +++ b/TensorFlowTTS/preprocess/thorsten_preprocess.yaml @@ -0,0 +1,19 @@ +########################################################### +# FEATURE EXTRACTION SETTING # +########################################################### +sampling_rate: 22050 # Sampling rate. +fft_size: 1024 # FFT size. +hop_size: 256 # Hop size. (fixed value, don't change) +win_length: null # Window length. + # If set to null, it will be the same as fft_size. +window: "hann" # Window function. +num_mels: 80 # Number of mel basis. +fmin: 80 # Minimum freq in mel basis calculation. +fmax: 7600 # Maximum frequency in mel basis calculation. +global_gain_scale: 1.0 # Will be multiplied to all of waveform. +trim_silence: true # Whether to trim the start and end of silence. +trim_threshold_in_db: 60 # Need to tune carefully if the recording is not good. +trim_frame_size: 2048 # Frame size in trimming. +trim_hop_size: 512 # Hop size in trimming. +format: "npy" # Feature file format. Only "npy" is supported. + diff --git a/TensorFlowTTS/setup.cfg b/TensorFlowTTS/setup.cfg new file mode 100644 index 0000000000000000000000000000000000000000..6a1fcb7e2125ea4b604746dd025eea19aa457fc3 --- /dev/null +++ b/TensorFlowTTS/setup.cfg @@ -0,0 +1,11 @@ +[aliases] +test=pytest + +[tool:pytest] +addopts = --verbose --durations=0 +testpaths = test + +[flake8] +ignore = H102,W504,H238,D104,H306,H405,D205 +# 120 is a workaround, 79 is good +max-line-length = 120 diff --git a/TensorFlowTTS/setup.py b/TensorFlowTTS/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..71d64287bf02e73fb559be1d64fd7b86cdb9f1a1 --- /dev/null +++ b/TensorFlowTTS/setup.py @@ -0,0 +1,98 @@ +"""Setup Tensorflow TTS libarary.""" + +import os +import sys +from distutils.version import LooseVersion + +import pip +from setuptools import find_packages, setup + +if LooseVersion(sys.version) < LooseVersion("3.6"): + raise RuntimeError( + "TensorFlow TTS requires python >= 3.6, " + "but your Python version is {}".format(sys.version) + ) + +if LooseVersion(pip.__version__) < LooseVersion("19"): + raise RuntimeError( + "pip>=19.0.0 is required, but your pip version is {}. " + 'Try again after "pip install -U pip"'.format(pip.__version__) + ) + +# TODO(@dathudeptrai) update requirement if needed. +requirements = { + "install": [ + "tensorflow-gpu==2.7.0", + "tensorflow-addons>=0.10.0", + "setuptools>=38.5.1", + "huggingface_hub==0.0.8", + "librosa>=0.7.0", + "soundfile>=0.10.2", + "matplotlib>=3.1.0", + "PyYAML>=3.12", + "tqdm>=4.26.1", + "h5py>=2.10.0", + "unidecode>=1.1.1", + "inflect>=4.1.0", + "scikit-learn>=0.22.0", + "pyworld>=0.2.10", + "numba>=0.48", # Fix No module named "numba.decorators" + "jamo>=0.4.1", + "pypinyin", + "g2pM", + "textgrid", + "click", + "g2p_en", + "dataclasses", + "pyopenjtalk", + ], + "setup": ["numpy", "pytest-runner",], + "test": [ + "pytest>=3.3.0", + "hacking>=1.1.0", + ], +} + +# TODO(@dathudeptrai) update console_scripts. +entry_points = { + "console_scripts": [ + "tensorflow-tts-preprocess=tensorflow_tts.bin.preprocess:preprocess", + "tensorflow-tts-compute-statistics=tensorflow_tts.bin.preprocess:compute_statistics", + "tensorflow-tts-normalize=tensorflow_tts.bin.preprocess:normalize", + ] +} + +install_requires = requirements["install"] +setup_requires = requirements["setup"] +tests_require = requirements["test"] +extras_require = { + k: v for k, v in requirements.items() if k not in ["install", "setup"] +} + +dirname = os.path.dirname(__file__) +setup( + name="TensorFlowTTS", + version="0.0", + url="https://github.com/tensorspeech/TensorFlowTTS", + author="Minh Nguyen Quan Anh, Alejandro Miguel Velasquez, Dawid Kobus, Eren Gölge, Kuan Chen, Takuya Ebata, Trinh Le Quang, Yunchao He", + author_email="nguyenquananhminh@gmail.com", + description="TensorFlowTTS: Real-Time State-of-the-art Speech Synthesis for TensorFlow 2", + long_description=open(os.path.join(dirname, "README.md"), encoding="utf-8").read(), + long_description_content_type="text/markdown", + license="Apache-2.0", + packages=find_packages(include=["tensorflow_tts*"]), + install_requires=install_requires, + setup_requires=setup_requires, + tests_require=tests_require, + extras_require=extras_require, + entry_points=entry_points, + classifiers=[ + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Intended Audience :: Science/Research", + "Operating System :: POSIX :: Linux", + "License :: OSI Approved :: Apache Software License", + "Topic :: Software Development :: Libraries :: Python Modules", + ], +) diff --git a/TensorFlowTTS/tensorflow_tts/__init__.py b/TensorFlowTTS/tensorflow_tts/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..76fe15da520fb89b10ff5edfa8e58614a99c80dd --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/__init__.py @@ -0,0 +1 @@ +__version__ = "0.0" diff --git a/TensorFlowTTS/tensorflow_tts/__pycache__/__init__.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a96f69938e0d62453a235f830f0335e8f2f0d31d Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/__pycache__/__init__.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/bin/__init__.py b/TensorFlowTTS/tensorflow_tts/bin/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/TensorFlowTTS/tensorflow_tts/bin/preprocess.py b/TensorFlowTTS/tensorflow_tts/bin/preprocess.py new file mode 100644 index 0000000000000000000000000000000000000000..89901d6fc39e83b05d78d64af5d3dc8baf748f28 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/bin/preprocess.py @@ -0,0 +1,588 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Perform preprocessing, with raw feature extraction and normalization of train/valid split.""" + +import argparse +import glob +import logging +import os +import yaml + +import librosa +import numpy as np +import pyworld as pw + +from functools import partial +from multiprocessing import Pool +from sklearn.model_selection import train_test_split +from sklearn.preprocessing import StandardScaler +from tqdm import tqdm + +from tensorflow_tts.processor import LJSpeechProcessor +from tensorflow_tts.processor import BakerProcessor +from tensorflow_tts.processor import KSSProcessor +from tensorflow_tts.processor import LibriTTSProcessor +from tensorflow_tts.processor import ThorstenProcessor +from tensorflow_tts.processor import LJSpeechUltimateProcessor +from tensorflow_tts.processor import SynpaflexProcessor +from tensorflow_tts.processor import JSUTProcessor +from tensorflow_tts.processor.ljspeech import LJSPEECH_SYMBOLS +from tensorflow_tts.processor.baker import BAKER_SYMBOLS +from tensorflow_tts.processor.kss import KSS_SYMBOLS +from tensorflow_tts.processor.libritts import LIBRITTS_SYMBOLS +from tensorflow_tts.processor.thorsten import THORSTEN_SYMBOLS +from tensorflow_tts.processor.ljspeechu import LJSPEECH_U_SYMBOLS +from tensorflow_tts.processor.synpaflex import SYNPAFLEX_SYMBOLS +from tensorflow_tts.processor.jsut import JSUT_SYMBOLS + +from tensorflow_tts.utils import remove_outlier + +os.environ["CUDA_VISIBLE_DEVICES"] = "" + + +def parse_and_config(): + """Parse arguments and set configuration parameters.""" + parser = argparse.ArgumentParser( + description="Preprocess audio and text features " + "(See detail in tensorflow_tts/bin/preprocess_dataset.py)." + ) + parser.add_argument( + "--rootdir", + default=None, + type=str, + required=True, + help="Directory containing the dataset files.", + ) + parser.add_argument( + "--outdir", + default=None, + type=str, + required=True, + help="Output directory where features will be saved.", + ) + parser.add_argument( + "--dataset", + type=str, + default="ljspeech", + choices=["ljspeech", "kss", "libritts", "baker", "thorsten", "ljspeechu", "synpaflex", "jsut"], + help="Dataset to preprocess.", + ) + parser.add_argument( + "--config", type=str, required=True, help="YAML format configuration file." + ) + parser.add_argument( + "--n_cpus", + type=int, + default=4, + required=False, + help="Number of CPUs to use in parallel.", + ) + parser.add_argument( + "--test_size", + type=float, + default=0.05, + required=False, + help="Proportion of files to use as test dataset.", + ) + parser.add_argument( + "--verbose", + type=int, + default=0, + choices=[0, 1, 2], + help="Logging level. 0: DEBUG, 1: INFO and WARNING, 2: INFO, WARNING, and ERROR", + ) + args = parser.parse_args() + + # set logger + FORMAT = "%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s" + log_level = {0: logging.DEBUG, 1: logging.WARNING, 2: logging.ERROR} + logging.basicConfig(level=log_level[args.verbose], format=FORMAT) + + # load config + config = yaml.load(open(args.config), Loader=yaml.SafeLoader) + config.update(vars(args)) + # config checks + assert config["format"] == "npy", "'npy' is the only supported format." + return config + + +def ph_based_trim( + config, + utt_id: str, + text_ids: np.array, + raw_text: str, + audio: np.array, + hop_size: int, +) -> (bool, np.array, np.array): + """ + Args: + config: Parsed yaml config + utt_id: file name + text_ids: array with text ids + raw_text: raw text of file + audio: parsed wav file + hop_size: Hop size + Returns: (bool, np.array, np.array) => if trimmed return True, new text_ids, new audio_array + """ + + os.makedirs(os.path.join(config["rootdir"], "trimmed-durations"), exist_ok=True) + duration_path = config.get( + "duration_path", os.path.join(config["rootdir"], "durations") + ) + duration_fixed_path = config.get( + "duration_fixed_path", os.path.join(config["rootdir"], "trimmed-durations") + ) + sil_ph = ["SIL", "END"] # TODO FIX hardcoded values + text = raw_text.split(" ") + + trim_start, trim_end = False, False + + if text[0] in sil_ph: + trim_start = True + + if text[-1] in sil_ph: + trim_end = True + + if not trim_start and not trim_end: + return False, text_ids, audio + + idx_start, idx_end = ( + 0 if not trim_start else 1, + text_ids.__len__() if not trim_end else -1, + ) + text_ids = text_ids[idx_start:idx_end] + durations = np.load(os.path.join(duration_path, f"{utt_id}-durations.npy")) + if trim_start: + s_trim = int(durations[0] * hop_size) + audio = audio[s_trim:] + if trim_end: + e_trim = int(durations[-1] * hop_size) + audio = audio[:-e_trim] + + durations = durations[idx_start:idx_end] + np.save(os.path.join(duration_fixed_path, f"{utt_id}-durations.npy"), durations) + return True, text_ids, audio + + +def gen_audio_features(item, config): + """Generate audio features and transformations + Args: + item (Dict): dictionary containing the attributes to encode. + config (Dict): configuration dictionary. + Returns: + (bool): keep this sample or not. + mel (ndarray): mel matrix in np.float32. + energy (ndarray): energy audio profile. + f0 (ndarray): fundamental frequency. + item (Dict): dictionary containing the updated attributes. + """ + # get info from sample. + audio = item["audio"] + utt_id = item["utt_id"] + rate = item["rate"] + + # check audio properties + assert len(audio.shape) == 1, f"{utt_id} seems to be multi-channel signal." + assert np.abs(audio).max() <= 1.0, f"{utt_id} is different from 16 bit PCM." + + # check sample rate + if rate != config["sampling_rate"]: + audio = librosa.resample(audio, rate, config["sampling_rate"]) + logging.info(f"{utt_id} sampling rate is {rate}, not {config['sampling_rate']}, we resample it.") + + # trim silence + if config["trim_silence"]: + if "trim_mfa" in config and config["trim_mfa"]: + _, item["text_ids"], audio = ph_based_trim( + config, + utt_id, + item["text_ids"], + item["raw_text"], + audio, + config["hop_size"], + ) + if ( + audio.__len__() < 1 + ): # very short files can get trimmed fully if mfa didnt extract any tokens LibriTTS maybe take only longer files? + logging.warning( + f"File have only silence or MFA didnt extract any token {utt_id}" + ) + return False, None, None, None, item + else: + audio, _ = librosa.effects.trim( + audio, + top_db=config["trim_threshold_in_db"], + frame_length=config["trim_frame_size"], + hop_length=config["trim_hop_size"], + ) + + # resample audio if necessary + if "sampling_rate_for_feats" in config: + audio = librosa.resample(audio, rate, config["sampling_rate_for_feats"]) + sampling_rate = config["sampling_rate_for_feats"] + assert ( + config["hop_size"] * config["sampling_rate_for_feats"] % rate == 0 + ), "'hop_size' must be 'int' value. Please check if 'sampling_rate_for_feats' is correct." + hop_size = config["hop_size"] * config["sampling_rate_for_feats"] // rate + else: + sampling_rate = config["sampling_rate"] + hop_size = config["hop_size"] + + # get spectrogram + D = librosa.stft( + audio, + n_fft=config["fft_size"], + hop_length=hop_size, + win_length=config["win_length"], + window=config["window"], + pad_mode="reflect", + ) + S, _ = librosa.magphase(D) # (#bins, #frames) + + # get mel basis + fmin = 0 if config["fmin"] is None else config["fmin"] + fmax = sampling_rate // 2 if config["fmax"] is None else config["fmax"] + mel_basis = librosa.filters.mel( + sr=sampling_rate, + n_fft=config["fft_size"], + n_mels=config["num_mels"], + fmin=fmin, + fmax=fmax, + ) + mel = np.log10(np.maximum(np.dot(mel_basis, S), 1e-10)).T # (#frames, #bins) + + # check audio and feature length + audio = np.pad(audio, (0, config["fft_size"]), mode="edge") + audio = audio[: len(mel) * hop_size] + assert len(mel) * hop_size == len(audio) + + # extract raw pitch + _f0, t = pw.dio( + audio.astype(np.double), + fs=sampling_rate, + f0_ceil=fmax, + frame_period=1000 * hop_size / sampling_rate, + ) + f0 = pw.stonemask(audio.astype(np.double), _f0, t, sampling_rate) + if len(f0) >= len(mel): + f0 = f0[: len(mel)] + else: + f0 = np.pad(f0, (0, len(mel) - len(f0))) + + # extract energy + energy = np.sqrt(np.sum(S ** 2, axis=0)) + assert len(mel) == len(f0) == len(energy) + + # remove outlier f0/energy + f0 = remove_outlier(f0) + energy = remove_outlier(energy) + + # apply global gain + if config["global_gain_scale"] > 0.0: + audio *= config["global_gain_scale"] + if np.abs(audio).max() >= 1.0: + logging.warn( + f"{utt_id} causes clipping. It is better to reconsider global gain scale value." + ) + item["audio"] = audio + item["mel"] = mel + item["f0"] = f0 + item["energy"] = energy + return True, mel, energy, f0, item + + +def save_statistics_to_file(scaler_list, config): + """Save computed statistics to disk. + Args: + scaler_list (List): List of scalers containing statistics to save. + config (Dict): configuration dictionary. + """ + for scaler, name in scaler_list: + stats = np.stack((scaler.mean_, scaler.scale_)) + np.save( + os.path.join(config["outdir"], f"stats{name}.npy"), + stats.astype(np.float32), + allow_pickle=False, + ) + + +def save_features_to_file(features, subdir, config): + """Save transformed dataset features in disk. + Args: + features (Dict): dictionary containing the attributes to save. + subdir (str): data split folder where features will be saved. + config (Dict): configuration dictionary. + """ + utt_id = features["utt_id"] + + if config["format"] == "npy": + save_list = [ + (features["audio"], "wavs", "wave", np.float32), + (features["mel"], "raw-feats", "raw-feats", np.float32), + (features["text_ids"], "ids", "ids", np.int32), + (features["f0"], "raw-f0", "raw-f0", np.float32), + (features["energy"], "raw-energies", "raw-energy", np.float32), + ] + for item, name_dir, name_file, fmt in save_list: + np.save( + os.path.join( + config["outdir"], subdir, name_dir, f"{utt_id}-{name_file}.npy" + ), + item.astype(fmt), + allow_pickle=False, + ) + else: + raise ValueError("'npy' is the only supported format.") + + +def preprocess(): + """Run preprocessing process and compute statistics for normalizing.""" + config = parse_and_config() + + dataset_processor = { + "ljspeech": LJSpeechProcessor, + "kss": KSSProcessor, + "libritts": LibriTTSProcessor, + "baker": BakerProcessor, + "thorsten": ThorstenProcessor, + "ljspeechu": LJSpeechUltimateProcessor, + "synpaflex": SynpaflexProcessor, + "jsut": JSUTProcessor, + } + + dataset_symbol = { + "ljspeech": LJSPEECH_SYMBOLS, + "kss": KSS_SYMBOLS, + "libritts": LIBRITTS_SYMBOLS, + "baker": BAKER_SYMBOLS, + "thorsten": THORSTEN_SYMBOLS, + "ljspeechu": LJSPEECH_U_SYMBOLS, + "synpaflex": SYNPAFLEX_SYMBOLS, + "jsut": JSUT_SYMBOLS, + } + + dataset_cleaner = { + "ljspeech": "english_cleaners", + "kss": "korean_cleaners", + "libritts": None, + "baker": None, + "thorsten": "german_cleaners", + "ljspeechu": "english_cleaners", + "synpaflex": "basic_cleaners", + "jsut": None, + } + + logging.info(f"Selected '{config['dataset']}' processor.") + processor = dataset_processor[config["dataset"]]( + config["rootdir"], + symbols=dataset_symbol[config["dataset"]], + cleaner_names=dataset_cleaner[config["dataset"]], + ) + + # check output directories + build_dir = lambda x: [ + os.makedirs(os.path.join(config["outdir"], x, y), exist_ok=True) + for y in ["raw-feats", "wavs", "ids", "raw-f0", "raw-energies"] + ] + build_dir("train") + build_dir("valid") + + # save pretrained-processor to feature dir + processor._save_mapper( + os.path.join(config["outdir"], f"{config['dataset']}_mapper.json"), + extra_attrs_to_save={"pinyin_dict": processor.pinyin_dict} + if config["dataset"] == "baker" + else {}, + ) + + # build train test split + if config["dataset"] == "libritts": + train_split, valid_split, _, _ = train_test_split( + processor.items, + [i[-1] for i in processor.items], + test_size=config["test_size"], + random_state=42, + shuffle=True, + ) + else: + train_split, valid_split = train_test_split( + processor.items, + test_size=config["test_size"], + random_state=42, + shuffle=True, + ) + logging.info(f"Training items: {len(train_split)}") + logging.info(f"Validation items: {len(valid_split)}") + + get_utt_id = lambda x: os.path.split(x[1])[-1].split(".")[0] + train_utt_ids = [get_utt_id(x) for x in train_split] + valid_utt_ids = [get_utt_id(x) for x in valid_split] + + # save train and valid utt_ids to track later + np.save(os.path.join(config["outdir"], "train_utt_ids.npy"), train_utt_ids) + np.save(os.path.join(config["outdir"], "valid_utt_ids.npy"), valid_utt_ids) + + # define map iterator + def iterator_data(items_list): + for item in items_list: + yield processor.get_one_sample(item) + + train_iterator_data = iterator_data(train_split) + valid_iterator_data = iterator_data(valid_split) + + p = Pool(config["n_cpus"]) + + # preprocess train files and get statistics for normalizing + partial_fn = partial(gen_audio_features, config=config) + train_map = p.imap_unordered( + partial_fn, + tqdm(train_iterator_data, total=len(train_split), desc="[Preprocessing train]"), + chunksize=10, + ) + # init scaler for multiple features + scaler_mel = StandardScaler(copy=False) + scaler_energy = StandardScaler(copy=False) + scaler_f0 = StandardScaler(copy=False) + + id_to_remove = [] + for result, mel, energy, f0, features in train_map: + if not result: + id_to_remove.append(features["utt_id"]) + continue + save_features_to_file(features, "train", config) + # partial fitting of scalers + if len(energy[energy != 0]) == 0 or len(f0[f0 != 0]) == 0: + id_to_remove.append(features["utt_id"]) + continue + # partial fitting of scalers + if len(energy[energy != 0]) == 0 or len(f0[f0 != 0]) == 0: + id_to_remove.append(features["utt_id"]) + continue + scaler_mel.partial_fit(mel) + scaler_energy.partial_fit(energy[energy != 0].reshape(-1, 1)) + scaler_f0.partial_fit(f0[f0 != 0].reshape(-1, 1)) + + if len(id_to_remove) > 0: + np.save( + os.path.join(config["outdir"], "train_utt_ids.npy"), + [i for i in train_utt_ids if i not in id_to_remove], + ) + logging.info( + f"removed {len(id_to_remove)} cause of too many outliers or bad mfa extraction" + ) + + # save statistics to file + logging.info("Saving computed statistics.") + scaler_list = [(scaler_mel, ""), (scaler_energy, "_energy"), (scaler_f0, "_f0")] + save_statistics_to_file(scaler_list, config) + + # preprocess valid files + partial_fn = partial(gen_audio_features, config=config) + valid_map = p.imap_unordered( + partial_fn, + tqdm(valid_iterator_data, total=len(valid_split), desc="[Preprocessing valid]"), + chunksize=10, + ) + for *_, features in valid_map: + save_features_to_file(features, "valid", config) + + +def gen_normal_mel(mel_path, scaler, config): + """Normalize the mel spectrogram and save it to the corresponding path. + Args: + mel_path (string): path of the mel spectrogram to normalize. + scaler (sklearn.base.BaseEstimator): scaling function to use for normalize. + config (Dict): configuration dictionary. + """ + mel = np.load(mel_path) + mel_norm = scaler.transform(mel) + path, file_name = os.path.split(mel_path) + *_, subdir, suffix = path.split(os.sep) + + utt_id = file_name.split(f"-{suffix}.npy")[0] + np.save( + os.path.join( + config["outdir"], subdir, "norm-feats", f"{utt_id}-norm-feats.npy" + ), + mel_norm.astype(np.float32), + allow_pickle=False, + ) + + +def normalize(): + """Normalize mel spectrogram with pre-computed statistics.""" + config = parse_and_config() + if config["format"] == "npy": + # init scaler with saved values + scaler = StandardScaler() + scaler.mean_, scaler.scale_ = np.load( + os.path.join(config["outdir"], "stats.npy") + ) + scaler.n_features_in_ = config["num_mels"] + else: + raise ValueError("'npy' is the only supported format.") + + # find all "raw-feats" files in both train and valid folders + glob_path = os.path.join(config["rootdir"], "**", "raw-feats", "*.npy") + mel_raw_feats = glob.glob(glob_path, recursive=True) + logging.info(f"Files to normalize: {len(mel_raw_feats)}") + + # check for output directories + os.makedirs(os.path.join(config["outdir"], "train", "norm-feats"), exist_ok=True) + os.makedirs(os.path.join(config["outdir"], "valid", "norm-feats"), exist_ok=True) + + p = Pool(config["n_cpus"]) + partial_fn = partial(gen_normal_mel, scaler=scaler, config=config) + list(p.map(partial_fn, tqdm(mel_raw_feats, desc="[Normalizing]"))) + + +def compute_statistics(): + """Compute mean / std statistics of some features for later normalization.""" + config = parse_and_config() + + # find features files for the train split + glob_fn = lambda x: glob.glob(os.path.join(config["rootdir"], "train", x, "*.npy")) + glob_mel = glob_fn("raw-feats") + glob_f0 = glob_fn("raw-f0") + glob_energy = glob_fn("raw-energies") + assert ( + len(glob_mel) == len(glob_f0) == len(glob_energy) + ), "Features, f0 and energies have different files in training split." + + logging.info(f"Computing statistics for {len(glob_mel)} files.") + # init scaler for multiple features + scaler_mel = StandardScaler(copy=False) + scaler_energy = StandardScaler(copy=False) + scaler_f0 = StandardScaler(copy=False) + + for mel, f0, energy in tqdm( + zip(glob_mel, glob_f0, glob_energy), total=len(glob_mel) + ): + # remove outliers + energy = np.load(energy) + f0 = np.load(f0) + # partial fitting of scalers + scaler_mel.partial_fit(np.load(mel)) + scaler_energy.partial_fit(energy[energy != 0].reshape(-1, 1)) + scaler_f0.partial_fit(f0[f0 != 0].reshape(-1, 1)) + + # save statistics to file + logging.info("Saving computed statistics.") + scaler_list = [(scaler_mel, ""), (scaler_energy, "_energy"), (scaler_f0, "_f0")] + save_statistics_to_file(scaler_list, config) + + +if __name__ == "__main__": + preprocess() diff --git a/TensorFlowTTS/tensorflow_tts/configs/__init__.py b/TensorFlowTTS/tensorflow_tts/configs/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f3fdf751afb44e8ecda350fa2d89de2f080cd887 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/configs/__init__.py @@ -0,0 +1,18 @@ +from tensorflow_tts.configs.base_config import BaseConfig +from tensorflow_tts.configs.fastspeech import FastSpeechConfig +from tensorflow_tts.configs.fastspeech2 import FastSpeech2Config +from tensorflow_tts.configs.melgan import ( + MelGANDiscriminatorConfig, + MelGANGeneratorConfig, +) +from tensorflow_tts.configs.mb_melgan import ( + MultiBandMelGANDiscriminatorConfig, + MultiBandMelGANGeneratorConfig, +) +from tensorflow_tts.configs.hifigan import ( + HifiGANGeneratorConfig, + HifiGANDiscriminatorConfig, +) +from tensorflow_tts.configs.tacotron2 import Tacotron2Config +from tensorflow_tts.configs.parallel_wavegan import ParallelWaveGANGeneratorConfig +from tensorflow_tts.configs.parallel_wavegan import ParallelWaveGANDiscriminatorConfig diff --git a/TensorFlowTTS/tensorflow_tts/configs/__pycache__/__init__.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/configs/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f924dea4da9cbb8ef6e953335cfa5d75f69b71b7 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/configs/__pycache__/__init__.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/configs/__pycache__/base_config.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/configs/__pycache__/base_config.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ab86ff8a3647ad1f4c2b9eb74dfcdc6574a3e28a Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/configs/__pycache__/base_config.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/configs/__pycache__/fastspeech.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/configs/__pycache__/fastspeech.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e85720aa02fcb74d997d21037b1bc4ceb6a426a0 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/configs/__pycache__/fastspeech.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/configs/__pycache__/fastspeech2.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/configs/__pycache__/fastspeech2.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..43079cd851fb21f92bbde620722c42f5375a8010 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/configs/__pycache__/fastspeech2.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/configs/__pycache__/hifigan.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/configs/__pycache__/hifigan.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..84a95f0f1686c4e58f178473a79d0e4a996e1351 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/configs/__pycache__/hifigan.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/configs/__pycache__/mb_melgan.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/configs/__pycache__/mb_melgan.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4457205503a35d9d661082ffc7eb82d33795b025 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/configs/__pycache__/mb_melgan.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/configs/__pycache__/melgan.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/configs/__pycache__/melgan.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..69d04776bfd4d85875e1cd78eb9f608943f4c4f6 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/configs/__pycache__/melgan.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/configs/__pycache__/parallel_wavegan.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/configs/__pycache__/parallel_wavegan.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fe2d960d542d4080b22b31d606e35e5b8395794a Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/configs/__pycache__/parallel_wavegan.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/configs/__pycache__/tacotron2.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/configs/__pycache__/tacotron2.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1d5e758ac5d054bb0fd622dfdf1f9aed203b5da5 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/configs/__pycache__/tacotron2.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/configs/base_config.py b/TensorFlowTTS/tensorflow_tts/configs/base_config.py new file mode 100644 index 0000000000000000000000000000000000000000..a8c9de13dbeb34ab2bfe8e2c50ab9477dad2ca68 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/configs/base_config.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Base Config for all config.""" + +import abc +import yaml +import os + +from tensorflow_tts.utils.utils import CONFIG_FILE_NAME + + +class BaseConfig(abc.ABC): + def set_config_params(self, config_params): + self.config_params = config_params + + def save_pretrained(self, saved_path): + """Save config to file""" + os.makedirs(saved_path, exist_ok=True) + with open(os.path.join(saved_path, CONFIG_FILE_NAME), "w") as file: + yaml.dump(self.config_params, file, Dumper=yaml.Dumper) diff --git a/TensorFlowTTS/tensorflow_tts/configs/fastspeech.py b/TensorFlowTTS/tensorflow_tts/configs/fastspeech.py new file mode 100644 index 0000000000000000000000000000000000000000..d644e6f3a99ef5408284e74dddfc2f7f3622727a --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/configs/fastspeech.py @@ -0,0 +1,156 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""FastSpeech Config object.""" + +import collections + +from tensorflow_tts.configs import BaseConfig +from tensorflow_tts.processor.ljspeech import LJSPEECH_SYMBOLS as lj_symbols +from tensorflow_tts.processor.kss import KSS_SYMBOLS as kss_symbols +from tensorflow_tts.processor.baker import BAKER_SYMBOLS as bk_symbols +from tensorflow_tts.processor.libritts import LIBRITTS_SYMBOLS as lbri_symbols +from tensorflow_tts.processor.jsut import JSUT_SYMBOLS as jsut_symbols + + +SelfAttentionParams = collections.namedtuple( + "SelfAttentionParams", + [ + "n_speakers", + "hidden_size", + "num_hidden_layers", + "num_attention_heads", + "attention_head_size", + "intermediate_size", + "intermediate_kernel_size", + "hidden_act", + "output_attentions", + "output_hidden_states", + "initializer_range", + "hidden_dropout_prob", + "attention_probs_dropout_prob", + "layer_norm_eps", + "max_position_embeddings", + ], +) + + +class FastSpeechConfig(BaseConfig): + """Initialize FastSpeech Config.""" + + def __init__( + self, + dataset="ljspeech", + vocab_size=len(lj_symbols), + n_speakers=1, + encoder_hidden_size=384, + encoder_num_hidden_layers=4, + encoder_num_attention_heads=2, + encoder_attention_head_size=192, + encoder_intermediate_size=1024, + encoder_intermediate_kernel_size=3, + encoder_hidden_act="mish", + decoder_hidden_size=384, + decoder_num_hidden_layers=4, + decoder_num_attention_heads=2, + decoder_attention_head_size=192, + decoder_intermediate_size=1024, + decoder_intermediate_kernel_size=3, + decoder_hidden_act="mish", + output_attentions=True, + output_hidden_states=True, + hidden_dropout_prob=0.1, + attention_probs_dropout_prob=0.1, + initializer_range=0.02, + layer_norm_eps=1e-5, + max_position_embeddings=2048, + num_duration_conv_layers=2, + duration_predictor_filters=256, + duration_predictor_kernel_sizes=3, + num_mels=80, + duration_predictor_dropout_probs=0.1, + n_conv_postnet=5, + postnet_conv_filters=512, + postnet_conv_kernel_sizes=5, + postnet_dropout_rate=0.1, + **kwargs + ): + """Init parameters for Fastspeech model.""" + # encoder params + if dataset == "ljspeech": + self.vocab_size = vocab_size + elif dataset == "kss": + self.vocab_size = len(kss_symbols) + elif dataset == "baker": + self.vocab_size = len(bk_symbols) + elif dataset == "libritts": + self.vocab_size = len(lbri_symbols) + elif dataset == "jsut": + self.vocab_size = len(jsut_symbols) + else: + raise ValueError("No such dataset: {}".format(dataset)) + self.initializer_range = initializer_range + self.max_position_embeddings = max_position_embeddings + self.n_speakers = n_speakers + self.layer_norm_eps = layer_norm_eps + + # encoder params + self.encoder_self_attention_params = SelfAttentionParams( + n_speakers=n_speakers, + hidden_size=encoder_hidden_size, + num_hidden_layers=encoder_num_hidden_layers, + num_attention_heads=encoder_num_attention_heads, + attention_head_size=encoder_attention_head_size, + hidden_act=encoder_hidden_act, + intermediate_size=encoder_intermediate_size, + intermediate_kernel_size=encoder_intermediate_kernel_size, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + initializer_range=initializer_range, + hidden_dropout_prob=hidden_dropout_prob, + attention_probs_dropout_prob=attention_probs_dropout_prob, + layer_norm_eps=layer_norm_eps, + max_position_embeddings=max_position_embeddings, + ) + + # decoder params + self.decoder_self_attention_params = SelfAttentionParams( + n_speakers=n_speakers, + hidden_size=decoder_hidden_size, + num_hidden_layers=decoder_num_hidden_layers, + num_attention_heads=decoder_num_attention_heads, + attention_head_size=decoder_attention_head_size, + hidden_act=decoder_hidden_act, + intermediate_size=decoder_intermediate_size, + intermediate_kernel_size=decoder_intermediate_kernel_size, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + initializer_range=initializer_range, + hidden_dropout_prob=hidden_dropout_prob, + attention_probs_dropout_prob=attention_probs_dropout_prob, + layer_norm_eps=layer_norm_eps, + max_position_embeddings=max_position_embeddings, + ) + + self.duration_predictor_dropout_probs = duration_predictor_dropout_probs + self.num_duration_conv_layers = num_duration_conv_layers + self.duration_predictor_filters = duration_predictor_filters + self.duration_predictor_kernel_sizes = duration_predictor_kernel_sizes + self.num_mels = num_mels + + # postnet + self.n_conv_postnet = n_conv_postnet + self.postnet_conv_filters = postnet_conv_filters + self.postnet_conv_kernel_sizes = postnet_conv_kernel_sizes + self.postnet_dropout_rate = postnet_dropout_rate diff --git a/TensorFlowTTS/tensorflow_tts/configs/fastspeech2.py b/TensorFlowTTS/tensorflow_tts/configs/fastspeech2.py new file mode 100644 index 0000000000000000000000000000000000000000..53d116975b7858e189216ee7f7e2d8cc12621375 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/configs/fastspeech2.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""FastSpeech2 Config object.""" + + +from tensorflow_tts.configs import FastSpeechConfig + + +class FastSpeech2Config(FastSpeechConfig): + """Initialize FastSpeech2 Config.""" + + def __init__( + self, + variant_prediction_num_conv_layers=2, + variant_kernel_size=9, + variant_dropout_rate=0.5, + variant_predictor_filter=256, + variant_predictor_kernel_size=3, + variant_predictor_dropout_rate=0.5, + **kwargs + ): + super().__init__(**kwargs) + self.variant_prediction_num_conv_layers = variant_prediction_num_conv_layers + self.variant_predictor_kernel_size = variant_predictor_kernel_size + self.variant_predictor_dropout_rate = variant_predictor_dropout_rate + self.variant_predictor_filter = variant_predictor_filter diff --git a/TensorFlowTTS/tensorflow_tts/configs/hifigan.py b/TensorFlowTTS/tensorflow_tts/configs/hifigan.py new file mode 100644 index 0000000000000000000000000000000000000000..753dbe02252191d3a0a65ce48fee461c6357acbb --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/configs/hifigan.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorflowTTS Team +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""HifiGAN Config object.""" + + +from tensorflow_tts.configs import BaseConfig + + +class HifiGANGeneratorConfig(BaseConfig): + """Initialize HifiGAN Generator Config.""" + + def __init__( + self, + out_channels=1, + kernel_size=7, + filters=128, + use_bias=True, + upsample_scales=[8, 8, 2, 2], + stacks=3, + stack_kernel_size=[3, 7, 11], + stack_dilation_rate=[[1, 3, 5], [1, 3, 5], [1, 3, 5]], + nonlinear_activation="LeakyReLU", + nonlinear_activation_params={"alpha": 0.2}, + padding_type="REFLECT", + use_final_nolinear_activation=True, + is_weight_norm=True, + initializer_seed=42, + **kwargs + ): + """Init parameters for HifiGAN Generator model.""" + self.out_channels = out_channels + self.kernel_size = kernel_size + self.filters = filters + self.use_bias = use_bias + self.upsample_scales = upsample_scales + self.stacks = stacks + self.stack_kernel_size = stack_kernel_size + self.stack_dilation_rate = stack_dilation_rate + self.nonlinear_activation = nonlinear_activation + self.nonlinear_activation_params = nonlinear_activation_params + self.padding_type = padding_type + self.use_final_nolinear_activation = use_final_nolinear_activation + self.is_weight_norm = is_weight_norm + self.initializer_seed = initializer_seed + + +class HifiGANDiscriminatorConfig(object): + """Initialize HifiGAN Discriminator Config.""" + + def __init__( + self, + out_channels=1, + period_scales=[2, 3, 5, 7, 11], + n_layers=5, + kernel_size=5, + strides=3, + filters=8, + filter_scales=4, + max_filters=1024, + nonlinear_activation="LeakyReLU", + nonlinear_activation_params={"alpha": 0.2}, + is_weight_norm=True, + initializer_seed=42, + **kwargs + ): + """Init parameters for MelGAN Discriminator model.""" + self.out_channels = out_channels + self.period_scales = period_scales + self.n_layers = n_layers + self.kernel_size = kernel_size + self.strides = strides + self.filters = filters + self.filter_scales = filter_scales + self.max_filters = max_filters + self.nonlinear_activation = nonlinear_activation + self.nonlinear_activation_params = nonlinear_activation_params + self.is_weight_norm = is_weight_norm + self.initializer_seed = initializer_seed diff --git a/TensorFlowTTS/tensorflow_tts/configs/mb_melgan.py b/TensorFlowTTS/tensorflow_tts/configs/mb_melgan.py new file mode 100644 index 0000000000000000000000000000000000000000..a97795a500213124c4a6c83a061c8f5a63d5a1fc --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/configs/mb_melgan.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Multi-band MelGAN Config object.""" + +from tensorflow_tts.configs import MelGANDiscriminatorConfig, MelGANGeneratorConfig + + +class MultiBandMelGANGeneratorConfig(MelGANGeneratorConfig): + """Initialize Multi-band MelGAN Generator Config.""" + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.subbands = kwargs.pop("subbands", 4) + self.taps = kwargs.pop("taps", 62) + self.cutoff_ratio = kwargs.pop("cutoff_ratio", 0.142) + self.beta = kwargs.pop("beta", 9.0) + + +class MultiBandMelGANDiscriminatorConfig(MelGANDiscriminatorConfig): + """Initialize Multi-band MelGAN Discriminator Config.""" + + def __init__(self, **kwargs): + super().__init__(**kwargs) diff --git a/TensorFlowTTS/tensorflow_tts/configs/melgan.py b/TensorFlowTTS/tensorflow_tts/configs/melgan.py new file mode 100644 index 0000000000000000000000000000000000000000..dad47ed120c906d7f364ea2c50ba4bdd92bde012 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/configs/melgan.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""MelGAN Config object.""" + + +from tensorflow_tts.configs import BaseConfig + + +class MelGANGeneratorConfig(BaseConfig): + """Initialize MelGAN Generator Config.""" + + def __init__( + self, + out_channels=1, + kernel_size=7, + filters=512, + use_bias=True, + upsample_scales=[8, 8, 2, 2], + stack_kernel_size=3, + stacks=3, + nonlinear_activation="LeakyReLU", + nonlinear_activation_params={"alpha": 0.2}, + padding_type="REFLECT", + use_final_nolinear_activation=True, + is_weight_norm=True, + initializer_seed=42, + **kwargs + ): + """Init parameters for MelGAN Generator model.""" + self.out_channels = out_channels + self.kernel_size = kernel_size + self.filters = filters + self.use_bias = use_bias + self.upsample_scales = upsample_scales + self.stack_kernel_size = stack_kernel_size + self.stacks = stacks + self.nonlinear_activation = nonlinear_activation + self.nonlinear_activation_params = nonlinear_activation_params + self.padding_type = padding_type + self.use_final_nolinear_activation = use_final_nolinear_activation + self.is_weight_norm = is_weight_norm + self.initializer_seed = initializer_seed + + +class MelGANDiscriminatorConfig(object): + """Initialize MelGAN Discriminator Config.""" + + def __init__( + self, + out_channels=1, + scales=3, + downsample_pooling="AveragePooling1D", + downsample_pooling_params={"pool_size": 4, "strides": 2,}, + kernel_sizes=[5, 3], + filters=16, + max_downsample_filters=1024, + use_bias=True, + downsample_scales=[4, 4, 4, 4], + nonlinear_activation="LeakyReLU", + nonlinear_activation_params={"alpha": 0.2}, + padding_type="REFLECT", + is_weight_norm=True, + initializer_seed=42, + **kwargs + ): + """Init parameters for MelGAN Discriminator model.""" + self.out_channels = out_channels + self.scales = scales + self.downsample_pooling = downsample_pooling + self.downsample_pooling_params = downsample_pooling_params + self.kernel_sizes = kernel_sizes + self.filters = filters + self.max_downsample_filters = max_downsample_filters + self.use_bias = use_bias + self.downsample_scales = downsample_scales + self.nonlinear_activation = nonlinear_activation + self.nonlinear_activation_params = nonlinear_activation_params + self.padding_type = padding_type + self.is_weight_norm = is_weight_norm + self.initializer_seed = initializer_seed diff --git a/TensorFlowTTS/tensorflow_tts/configs/parallel_wavegan.py b/TensorFlowTTS/tensorflow_tts/configs/parallel_wavegan.py new file mode 100644 index 0000000000000000000000000000000000000000..10dee489b7ed6704f946ca4d5a8cc0e863c25dd0 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/configs/parallel_wavegan.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""ParallelWaveGAN Config object.""" + + +from tensorflow_tts.configs import BaseConfig + + +class ParallelWaveGANGeneratorConfig(BaseConfig): + """Initialize ParallelWaveGAN Generator Config.""" + + def __init__( + self, + out_channels=1, + kernel_size=3, + n_layers=30, + stacks=3, + residual_channels=64, + gate_channels=128, + skip_channels=64, + aux_channels=80, + aux_context_window=2, + dropout_rate=0.0, + use_bias=True, + use_causal_conv=False, + upsample_conditional_features=True, + upsample_params={"upsample_scales": [4, 4, 4, 4]}, + initializer_seed=42, + **kwargs, + ): + """Init parameters for ParallelWaveGAN Generator model.""" + self.out_channels = out_channels + self.kernel_size = kernel_size + self.n_layers = n_layers + self.stacks = stacks + self.residual_channels = residual_channels + self.gate_channels = gate_channels + self.skip_channels = skip_channels + self.aux_channels = aux_channels + self.aux_context_window = aux_context_window + self.dropout_rate = dropout_rate + self.use_bias = use_bias + self.use_causal_conv = use_causal_conv + self.upsample_conditional_features = upsample_conditional_features + self.upsample_params = upsample_params + self.initializer_seed = initializer_seed + + +class ParallelWaveGANDiscriminatorConfig(object): + """Initialize ParallelWaveGAN Discriminator Config.""" + + def __init__( + self, + out_channels=1, + kernel_size=3, + n_layers=10, + conv_channels=64, + use_bias=True, + dilation_factor=1, + nonlinear_activation="LeakyReLU", + nonlinear_activation_params={"alpha": 0.2}, + initializer_seed=42, + apply_sigmoid_at_last=False, + **kwargs, + ): + "Init parameters for ParallelWaveGAN Discriminator model." + self.out_channels = out_channels + self.kernel_size = kernel_size + self.n_layers = n_layers + self.conv_channels = conv_channels + self.use_bias = use_bias + self.dilation_factor = dilation_factor + self.nonlinear_activation = nonlinear_activation + self.nonlinear_activation_params = nonlinear_activation_params + self.initializer_seed = initializer_seed + self.apply_sigmoid_at_last = apply_sigmoid_at_last diff --git a/TensorFlowTTS/tensorflow_tts/configs/tacotron2.py b/TensorFlowTTS/tensorflow_tts/configs/tacotron2.py new file mode 100644 index 0000000000000000000000000000000000000000..33b791eca2780f314fad12933e9734b4606d4339 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/configs/tacotron2.py @@ -0,0 +1,111 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tacotron-2 Config object.""" + + +from tensorflow_tts.configs import BaseConfig +from tensorflow_tts.processor.jsut import JSUT_SYMBOLS +from tensorflow_tts.processor.ljspeech import LJSPEECH_SYMBOLS as lj_symbols +from tensorflow_tts.processor.kss import KSS_SYMBOLS as kss_symbols +from tensorflow_tts.processor.baker import BAKER_SYMBOLS as bk_symbols +from tensorflow_tts.processor.libritts import LIBRITTS_SYMBOLS as lbri_symbols +from tensorflow_tts.processor.ljspeechu import LJSPEECH_U_SYMBOLS as lju_symbols +from tensorflow_tts.processor.synpaflex import SYNPAFLEX_SYMBOLS as synpaflex_symbols +from tensorflow_tts.processor.jsut import JSUT_SYMBOLS as jsut_symbols + + +class Tacotron2Config(BaseConfig): + """Initialize Tacotron-2 Config.""" + + def __init__( + self, + dataset="ljspeech", + vocab_size=len(lj_symbols), + embedding_hidden_size=512, + initializer_range=0.02, + layer_norm_eps=1e-6, + embedding_dropout_prob=0.1, + n_speakers=5, + n_conv_encoder=3, + encoder_conv_filters=512, + encoder_conv_kernel_sizes=5, + encoder_conv_activation="mish", + encoder_conv_dropout_rate=0.5, + encoder_lstm_units=256, + reduction_factor=5, + n_prenet_layers=2, + prenet_units=256, + prenet_activation="mish", + prenet_dropout_rate=0.5, + n_lstm_decoder=1, + decoder_lstm_units=1024, + attention_type="lsa", + attention_dim=128, + attention_filters=32, + attention_kernel=31, + n_mels=80, + n_conv_postnet=5, + postnet_conv_filters=512, + postnet_conv_kernel_sizes=5, + postnet_dropout_rate=0.1, + ): + """Init parameters for Tacotron-2 model.""" + if dataset == "ljspeech": + self.vocab_size = vocab_size + elif dataset == "kss": + self.vocab_size = len(kss_symbols) + elif dataset == "baker": + self.vocab_size = len(bk_symbols) + elif dataset == "libritts": + self.vocab_size = len(lbri_symbols) + elif dataset == "ljspeechu": + self.vocab_size = len(lju_symbols) + elif dataset == "synpaflex": + self.vocab_size = len(synpaflex_symbols) + elif dataset == "jsut": + self.vocab_size = len(jsut_symbols) + else: + raise ValueError("No such dataset: {}".format(dataset)) + self.embedding_hidden_size = embedding_hidden_size + self.initializer_range = initializer_range + self.layer_norm_eps = layer_norm_eps + self.embedding_dropout_prob = embedding_dropout_prob + self.n_speakers = n_speakers + self.n_conv_encoder = n_conv_encoder + self.encoder_conv_filters = encoder_conv_filters + self.encoder_conv_kernel_sizes = encoder_conv_kernel_sizes + self.encoder_conv_activation = encoder_conv_activation + self.encoder_conv_dropout_rate = encoder_conv_dropout_rate + self.encoder_lstm_units = encoder_lstm_units + + # decoder param + self.reduction_factor = reduction_factor + self.n_prenet_layers = n_prenet_layers + self.prenet_units = prenet_units + self.prenet_activation = prenet_activation + self.prenet_dropout_rate = prenet_dropout_rate + self.n_lstm_decoder = n_lstm_decoder + self.decoder_lstm_units = decoder_lstm_units + self.attention_type = attention_type + self.attention_dim = attention_dim + self.attention_filters = attention_filters + self.attention_kernel = attention_kernel + self.n_mels = n_mels + + # postnet + self.n_conv_postnet = n_conv_postnet + self.postnet_conv_filters = postnet_conv_filters + self.postnet_conv_kernel_sizes = postnet_conv_kernel_sizes + self.postnet_dropout_rate = postnet_dropout_rate diff --git a/TensorFlowTTS/tensorflow_tts/datasets/__init__.py b/TensorFlowTTS/tensorflow_tts/datasets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d68ceaaa950b2ed8d136eb3782df31409fd826f6 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/datasets/__init__.py @@ -0,0 +1,3 @@ +from tensorflow_tts.datasets.abstract_dataset import AbstractDataset +from tensorflow_tts.datasets.audio_dataset import AudioDataset +from tensorflow_tts.datasets.mel_dataset import MelDataset diff --git a/TensorFlowTTS/tensorflow_tts/datasets/abstract_dataset.py b/TensorFlowTTS/tensorflow_tts/datasets/abstract_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..82519f068746d4b4b6cb6a34d3ac70e56cc55a5f --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/datasets/abstract_dataset.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Abstract Dataset modules.""" + +import abc + +import tensorflow as tf + + +class AbstractDataset(metaclass=abc.ABCMeta): + """Abstract Dataset module for Dataset Loader.""" + + @abc.abstractmethod + def get_args(self): + """Return args for generator function.""" + pass + + @abc.abstractmethod + def generator(self): + """Generator function, should have args from get_args function.""" + pass + + @abc.abstractmethod + def get_output_dtypes(self): + """Return output dtypes for each element from generator.""" + pass + + @abc.abstractmethod + def get_len_dataset(self): + """Return number of samples on dataset.""" + pass + + def create( + self, + allow_cache=False, + batch_size=1, + is_shuffle=False, + map_fn=None, + reshuffle_each_iteration=True, + ): + """Create tf.dataset function.""" + output_types = self.get_output_dtypes() + datasets = tf.data.Dataset.from_generator( + self.generator, output_types=output_types, args=(self.get_args()) + ) + + if allow_cache: + datasets = datasets.cache() + + if is_shuffle: + datasets = datasets.shuffle( + self.get_len_dataset(), + reshuffle_each_iteration=reshuffle_each_iteration, + ) + + if batch_size > 1 and map_fn is None: + raise ValueError("map function must define when batch_size > 1.") + + if map_fn is not None: + datasets = datasets.map(map_fn, tf.data.experimental.AUTOTUNE) + + datasets = datasets.batch(batch_size) + datasets = datasets.prefetch(tf.data.experimental.AUTOTUNE) + + return datasets diff --git a/TensorFlowTTS/tensorflow_tts/datasets/audio_dataset.py b/TensorFlowTTS/tensorflow_tts/datasets/audio_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..35751b6c302199a9cd61c6f05ce972971c1af8ca --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/datasets/audio_dataset.py @@ -0,0 +1,128 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Audio modules.""" + +import logging +import os + +import numpy as np +import tensorflow as tf + +from tensorflow_tts.datasets.abstract_dataset import AbstractDataset +from tensorflow_tts.utils import find_files + + +class AudioDataset(AbstractDataset): + """Tensorflow compatible audio dataset.""" + + def __init__( + self, + root_dir, + audio_query="*-wave.npy", + audio_load_fn=np.load, + audio_length_threshold=0, + ): + """Initialize dataset. + + Args: + root_dir (str): Root directory including dumped files. + audio_query (str): Query to find feature files in root_dir. + audio_load_fn (func): Function to load feature file. + audio_length_threshold (int): Threshold to remove short feature files. + return_utt_id (bool): Whether to return the utterance id with arrays. + + """ + # find all of mel files. + audio_files = sorted(find_files(root_dir, audio_query)) + audio_lengths = [audio_load_fn(f).shape[0] for f in audio_files] + + # assert the number of files + assert len(audio_files) != 0, f"Not found any mel files in ${root_dir}." + + if ".npy" in audio_query: + suffix = audio_query[1:] + utt_ids = [os.path.basename(f).replace(suffix, "") for f in audio_files] + + # set global params + self.utt_ids = utt_ids + self.audio_files = audio_files + self.audio_lengths = audio_lengths + self.audio_load_fn = audio_load_fn + self.audio_length_threshold = audio_length_threshold + + def get_args(self): + return [self.utt_ids] + + def generator(self, utt_ids): + for i, utt_id in enumerate(utt_ids): + audio_file = self.audio_files[i] + audio = self.audio_load_fn(audio_file) + audio_length = self.audio_lengths[i] + + items = {"utt_ids": utt_id, "audios": audio, "audio_lengths": audio_length} + + yield items + + def get_output_dtypes(self): + output_types = { + "utt_ids": tf.string, + "audios": tf.float32, + "audio_lengths": tf.float32, + } + return output_types + + def create( + self, + allow_cache=False, + batch_size=1, + is_shuffle=False, + map_fn=None, + reshuffle_each_iteration=True, + ): + """Create tf.dataset function.""" + output_types = self.get_output_dtypes() + datasets = tf.data.Dataset.from_generator( + self.generator, output_types=output_types, args=(self.get_args()) + ) + + datasets = datasets.filter( + lambda x: x["audio_lengths"] > self.audio_length_threshold + ) + + if allow_cache: + datasets = datasets.cache() + + if is_shuffle: + datasets = datasets.shuffle( + self.get_len_dataset(), + reshuffle_each_iteration=reshuffle_each_iteration, + ) + + # define padded shapes + padded_shapes = { + "utt_ids": [], + "audios": [None], + "audio_lengths": [], + } + + datasets = datasets.padded_batch(batch_size, padded_shapes=padded_shapes) + datasets = datasets.prefetch(tf.data.experimental.AUTOTUNE) + return datasets + + def get_len_dataset(self): + return len(self.utt_ids) + + def __name__(self): + return "AudioDataset" diff --git a/TensorFlowTTS/tensorflow_tts/datasets/mel_dataset.py b/TensorFlowTTS/tensorflow_tts/datasets/mel_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..0922fb50d753eecc761f528893e7894550100387 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/datasets/mel_dataset.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Dataset modules.""" + +import logging +import os + +import numpy as np +import tensorflow as tf + +from tensorflow_tts.datasets.abstract_dataset import AbstractDataset +from tensorflow_tts.utils import find_files + + +class MelDataset(AbstractDataset): + """Tensorflow compatible mel dataset.""" + + def __init__( + self, + root_dir, + mel_query="*-raw-feats.h5", + mel_load_fn=np.load, + mel_length_threshold=0, + ): + """Initialize dataset. + + Args: + root_dir (str): Root directory including dumped files. + mel_query (str): Query to find feature files in root_dir. + mel_load_fn (func): Function to load feature file. + mel_length_threshold (int): Threshold to remove short feature files. + + """ + # find all of mel files. + mel_files = sorted(find_files(root_dir, mel_query)) + mel_lengths = [mel_load_fn(f).shape[0] for f in mel_files] + + # assert the number of files + assert len(mel_files) != 0, f"Not found any mel files in ${root_dir}." + + if ".npy" in mel_query: + suffix = mel_query[1:] + utt_ids = [os.path.basename(f).replace(suffix, "") for f in mel_files] + + # set global params + self.utt_ids = utt_ids + self.mel_files = mel_files + self.mel_lengths = mel_lengths + self.mel_load_fn = mel_load_fn + self.mel_length_threshold = mel_length_threshold + + def get_args(self): + return [self.utt_ids] + + def generator(self, utt_ids): + for i, utt_id in enumerate(utt_ids): + mel_file = self.mel_files[i] + mel = self.mel_load_fn(mel_file) + mel_length = self.mel_lengths[i] + + items = {"utt_ids": utt_id, "mels": mel, "mel_lengths": mel_length} + + yield items + + def get_output_dtypes(self): + output_types = { + "utt_ids": tf.string, + "mels": tf.float32, + "mel_lengths": tf.int32, + } + return output_types + + def create( + self, + allow_cache=False, + batch_size=1, + is_shuffle=False, + map_fn=None, + reshuffle_each_iteration=True, + ): + """Create tf.dataset function.""" + output_types = self.get_output_dtypes() + datasets = tf.data.Dataset.from_generator( + self.generator, output_types=output_types, args=(self.get_args()) + ) + + datasets = datasets.filter( + lambda x: x["mel_lengths"] > self.mel_length_threshold + ) + + if allow_cache: + datasets = datasets.cache() + + if is_shuffle: + datasets = datasets.shuffle( + self.get_len_dataset(), + reshuffle_each_iteration=reshuffle_each_iteration, + ) + + # define padded shapes + padded_shapes = { + "utt_ids": [], + "mels": [None, 80], + "mel_lengths": [], + } + + datasets = datasets.padded_batch(batch_size, padded_shapes=padded_shapes) + datasets = datasets.prefetch(tf.data.experimental.AUTOTUNE) + return datasets + + def get_len_dataset(self): + return len(self.utt_ids) + + def __name__(self): + return "MelDataset" diff --git a/TensorFlowTTS/tensorflow_tts/inference/__init__.py b/TensorFlowTTS/tensorflow_tts/inference/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1067a42f5982d26c49d5dbab5ac667a1a1383c34 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/inference/__init__.py @@ -0,0 +1,3 @@ +from tensorflow_tts.inference.auto_model import TFAutoModel +from tensorflow_tts.inference.auto_config import AutoConfig +from tensorflow_tts.inference.auto_processor import AutoProcessor diff --git a/TensorFlowTTS/tensorflow_tts/inference/__pycache__/__init__.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/inference/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1d33293ce450659f13eaddf04052a97ee1632d52 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/inference/__pycache__/__init__.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/inference/__pycache__/auto_config.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/inference/__pycache__/auto_config.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..709c2bceb5fe817ec664a1733bf1df0e8ff99de3 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/inference/__pycache__/auto_config.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/inference/__pycache__/auto_model.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/inference/__pycache__/auto_model.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..decfa12b1f8f9f4149bf383fef5c94727b3cdb12 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/inference/__pycache__/auto_model.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/inference/__pycache__/auto_processor.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/inference/__pycache__/auto_processor.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..474af0ab5734e7d77ea954d29800231561b72064 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/inference/__pycache__/auto_processor.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/inference/__pycache__/savable_models.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/inference/__pycache__/savable_models.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f20c161a9e55e8b9ab32910740c538f2f1c03fab Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/inference/__pycache__/savable_models.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/inference/auto_config.py b/TensorFlowTTS/tensorflow_tts/inference/auto_config.py new file mode 100644 index 0000000000000000000000000000000000000000..6dc0616cc11ee5ec662e9ec7e14c577d09863693 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/inference/auto_config.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 The HuggingFace Inc. team and Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tensorflow Auto Config modules.""" + +import logging +import yaml +import os +from collections import OrderedDict + +from tensorflow_tts.configs import ( + FastSpeechConfig, + FastSpeech2Config, + MelGANGeneratorConfig, + MultiBandMelGANGeneratorConfig, + HifiGANGeneratorConfig, + Tacotron2Config, + ParallelWaveGANGeneratorConfig, +) + +from tensorflow_tts.utils import CACHE_DIRECTORY, CONFIG_FILE_NAME, LIBRARY_NAME +from tensorflow_tts import __version__ as VERSION +from huggingface_hub import hf_hub_url, cached_download + +CONFIG_MAPPING = OrderedDict( + [ + ("fastspeech", FastSpeechConfig), + ("fastspeech2", FastSpeech2Config), + ("multiband_melgan_generator", MultiBandMelGANGeneratorConfig), + ("melgan_generator", MelGANGeneratorConfig), + ("hifigan_generator", HifiGANGeneratorConfig), + ("tacotron2", Tacotron2Config), + ("parallel_wavegan_generator", ParallelWaveGANGeneratorConfig), + ] +) + + +class AutoConfig: + def __init__(self): + raise EnvironmentError( + "AutoConfig is designed to be instantiated " + "using the `AutoConfig.from_pretrained(pretrained_path)` method." + ) + + @classmethod + def from_pretrained(cls, pretrained_path, **kwargs): + # load weights from hf hub + if not os.path.isfile(pretrained_path): + # retrieve correct hub url + download_url = hf_hub_url( + repo_id=pretrained_path, filename=CONFIG_FILE_NAME + ) + + pretrained_path = str( + cached_download( + url=download_url, + library_name=LIBRARY_NAME, + library_version=VERSION, + cache_dir=CACHE_DIRECTORY, + ) + ) + + with open(pretrained_path) as f: + config = yaml.load(f, Loader=yaml.Loader) + + try: + model_type = config["model_type"] + config_class = CONFIG_MAPPING[model_type] + config_class = config_class(**config[model_type + "_params"], **kwargs) + config_class.set_config_params(config) + return config_class + except Exception: + raise ValueError( + "Unrecognized config in {}. " + "Should have a `model_type` key in its config.yaml, or contain one of the following strings " + "in its name: {}".format( + pretrained_path, ", ".join(CONFIG_MAPPING.keys()) + ) + ) diff --git a/TensorFlowTTS/tensorflow_tts/inference/auto_model.py b/TensorFlowTTS/tensorflow_tts/inference/auto_model.py new file mode 100644 index 0000000000000000000000000000000000000000..3d5f3923f0feca60755da494cd70471cf0b1ecb7 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/inference/auto_model.py @@ -0,0 +1,121 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 The HuggingFace Inc. team and Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tensorflow Auto Model modules.""" + +import logging +import warnings +import os +import copy + +from collections import OrderedDict + +from tensorflow_tts.configs import ( + FastSpeechConfig, + FastSpeech2Config, + MelGANGeneratorConfig, + MultiBandMelGANGeneratorConfig, + HifiGANGeneratorConfig, + Tacotron2Config, + ParallelWaveGANGeneratorConfig, +) + +from tensorflow_tts.models import ( + TFMelGANGenerator, + TFMBMelGANGenerator, + TFHifiGANGenerator, + TFParallelWaveGANGenerator, +) + +from tensorflow_tts.inference.savable_models import ( + SavableTFFastSpeech, + SavableTFFastSpeech2, + SavableTFTacotron2 +) +from tensorflow_tts.utils import CACHE_DIRECTORY, MODEL_FILE_NAME, LIBRARY_NAME +from tensorflow_tts import __version__ as VERSION +from huggingface_hub import hf_hub_url, cached_download + + +TF_MODEL_MAPPING = OrderedDict( + [ + (FastSpeech2Config, SavableTFFastSpeech2), + (FastSpeechConfig, SavableTFFastSpeech), + (MultiBandMelGANGeneratorConfig, TFMBMelGANGenerator), + (MelGANGeneratorConfig, TFMelGANGenerator), + (Tacotron2Config, SavableTFTacotron2), + (HifiGANGeneratorConfig, TFHifiGANGenerator), + (ParallelWaveGANGeneratorConfig, TFParallelWaveGANGenerator), + ] +) + + +class TFAutoModel(object): + """General model class for inferencing.""" + + def __init__(self): + raise EnvironmentError("Cannot be instantiated using `__init__()`") + + @classmethod + def from_pretrained(cls, pretrained_path=None, config=None, **kwargs): + # load weights from hf hub + if pretrained_path is not None: + if not os.path.isfile(pretrained_path): + # retrieve correct hub url + download_url = hf_hub_url(repo_id=pretrained_path, filename=MODEL_FILE_NAME) + + downloaded_file = str( + cached_download( + url=download_url, + library_name=LIBRARY_NAME, + library_version=VERSION, + cache_dir=CACHE_DIRECTORY, + ) + ) + + # load config from repo as well + if config is None: + from tensorflow_tts.inference import AutoConfig + + config = AutoConfig.from_pretrained(pretrained_path) + + pretrained_path = downloaded_file + + + assert config is not None, "Please make sure to pass a config along to load a model from a local file" + + for config_class, model_class in TF_MODEL_MAPPING.items(): + if isinstance(config, config_class) and str(config_class.__name__) in str( + config + ): + model = model_class(config=config, **kwargs) + model.set_config(config) + model._build() + if pretrained_path is not None and ".h5" in pretrained_path: + try: + model.load_weights(pretrained_path) + except: + model.load_weights( + pretrained_path, by_name=True, skip_mismatch=True + ) + return model + + raise ValueError( + "Unrecognized configuration class {} for this kind of TFAutoModel: {}.\n" + "Model type should be one of {}.".format( + config.__class__, + cls.__name__, + ", ".join(c.__name__ for c in TF_MODEL_MAPPING.keys()), + ) + ) diff --git a/TensorFlowTTS/tensorflow_tts/inference/auto_processor.py b/TensorFlowTTS/tensorflow_tts/inference/auto_processor.py new file mode 100644 index 0000000000000000000000000000000000000000..30d93e97f4ed5743e02f4d7dc7cc089a7d19d3d1 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/inference/auto_processor.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 The TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tensorflow Auto Processor modules.""" + +import logging +import json +import os +from collections import OrderedDict + +from tensorflow_tts.processor import ( + LJSpeechProcessor, + KSSProcessor, + BakerProcessor, + LibriTTSProcessor, + ThorstenProcessor, + LJSpeechUltimateProcessor, + SynpaflexProcessor, + JSUTProcessor, +) + +from tensorflow_tts.utils import CACHE_DIRECTORY, PROCESSOR_FILE_NAME, LIBRARY_NAME +from tensorflow_tts import __version__ as VERSION +from huggingface_hub import hf_hub_url, cached_download + +CONFIG_MAPPING = OrderedDict( + [ + ("LJSpeechProcessor", LJSpeechProcessor), + ("KSSProcessor", KSSProcessor), + ("BakerProcessor", BakerProcessor), + ("LibriTTSProcessor", LibriTTSProcessor), + ("ThorstenProcessor", ThorstenProcessor), + ("LJSpeechUltimateProcessor", LJSpeechUltimateProcessor), + ("SynpaflexProcessor", SynpaflexProcessor), + ("JSUTProcessor", JSUTProcessor), + ] +) + + +class AutoProcessor: + def __init__(self): + raise EnvironmentError( + "AutoProcessor is designed to be instantiated " + "using the `AutoProcessor.from_pretrained(pretrained_path)` method." + ) + + @classmethod + def from_pretrained(cls, pretrained_path, **kwargs): + # load weights from hf hub + if not os.path.isfile(pretrained_path): + # retrieve correct hub url + download_url = hf_hub_url(repo_id=pretrained_path, filename=PROCESSOR_FILE_NAME) + + pretrained_path = str( + cached_download( + url=download_url, + library_name=LIBRARY_NAME, + library_version=VERSION, + cache_dir=CACHE_DIRECTORY, + ) + ) + with open(pretrained_path, "r") as f: + config = json.load(f) + + try: + processor_name = config["processor_name"] + processor_class = CONFIG_MAPPING[processor_name] + processor_class = processor_class( + data_dir=None, loaded_mapper_path=pretrained_path + ) + return processor_class + except Exception: + raise ValueError( + "Unrecognized processor in {}. " + "Should have a `processor_name` key in its config.json, or contain one of the following strings " + "in its name: {}".format( + pretrained_path, ", ".join(CONFIG_MAPPING.keys()) + ) + ) diff --git a/TensorFlowTTS/tensorflow_tts/inference/savable_models.py b/TensorFlowTTS/tensorflow_tts/inference/savable_models.py new file mode 100644 index 0000000000000000000000000000000000000000..713f8a0fef9fd8bc983b5e748351ecb4d5b23bd4 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/inference/savable_models.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tensorflow Savable Model modules.""" + +import numpy as np +import tensorflow as tf + +from tensorflow_tts.models import ( + TFFastSpeech, + TFFastSpeech2, + TFMelGANGenerator, + TFMBMelGANGenerator, + TFHifiGANGenerator, + TFTacotron2, + TFParallelWaveGANGenerator, +) + + +class SavableTFTacotron2(TFTacotron2): + def __init__(self, config, **kwargs): + super().__init__(config, **kwargs) + + def call(self, inputs, training=False): + input_ids, input_lengths, speaker_ids = inputs + return super().inference(input_ids, input_lengths, speaker_ids) + + def _build(self): + input_ids = tf.convert_to_tensor([[1, 2, 3, 4, 5, 6, 7, 8, 9]], dtype=tf.int32) + input_lengths = tf.convert_to_tensor([9], dtype=tf.int32) + speaker_ids = tf.convert_to_tensor([0], dtype=tf.int32) + self([input_ids, input_lengths, speaker_ids]) + + +class SavableTFFastSpeech(TFFastSpeech): + def __init__(self, config, **kwargs): + super().__init__(config, **kwargs) + + def call(self, inputs, training=False): + input_ids, speaker_ids, speed_ratios = inputs + return super()._inference(input_ids, speaker_ids, speed_ratios) + + def _build(self): + input_ids = tf.convert_to_tensor([[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]], tf.int32) + speaker_ids = tf.convert_to_tensor([0], tf.int32) + speed_ratios = tf.convert_to_tensor([1.0], tf.float32) + self([input_ids, speaker_ids, speed_ratios]) + + +class SavableTFFastSpeech2(TFFastSpeech2): + def __init__(self, config, **kwargs): + super().__init__(config, **kwargs) + + def call(self, inputs, training=False): + input_ids, speaker_ids, speed_ratios, f0_ratios, energy_ratios = inputs + return super()._inference( + input_ids, speaker_ids, speed_ratios, f0_ratios, energy_ratios + ) + + def _build(self): + input_ids = tf.convert_to_tensor([[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]], tf.int32) + speaker_ids = tf.convert_to_tensor([0], tf.int32) + speed_ratios = tf.convert_to_tensor([1.0], tf.float32) + f0_ratios = tf.convert_to_tensor([1.0], tf.float32) + energy_ratios = tf.convert_to_tensor([1.0], tf.float32) + self([input_ids, speaker_ids, speed_ratios, f0_ratios, energy_ratios]) diff --git a/TensorFlowTTS/tensorflow_tts/losses/__init__.py b/TensorFlowTTS/tensorflow_tts/losses/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b51cd8e562d8e23b92efb9961b6479f1b382535a --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/losses/__init__.py @@ -0,0 +1,2 @@ +from tensorflow_tts.losses.spectrogram import TFMelSpectrogram +from tensorflow_tts.losses.stft import TFMultiResolutionSTFT diff --git a/TensorFlowTTS/tensorflow_tts/losses/spectrogram.py b/TensorFlowTTS/tensorflow_tts/losses/spectrogram.py new file mode 100644 index 0000000000000000000000000000000000000000..ac85a0112e135f8bc0532863cd80541ed1e4b07d --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/losses/spectrogram.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Spectrogram-based loss modules.""" + +import tensorflow as tf + + +class TFMelSpectrogram(tf.keras.layers.Layer): + """Mel Spectrogram loss.""" + + def __init__( + self, + n_mels=80, + f_min=80.0, + f_max=7600, + frame_length=1024, + frame_step=256, + fft_length=1024, + sample_rate=16000, + **kwargs + ): + """Initialize.""" + super().__init__(**kwargs) + self.frame_length = frame_length + self.frame_step = frame_step + self.fft_length = fft_length + + self.linear_to_mel_weight_matrix = tf.signal.linear_to_mel_weight_matrix( + n_mels, fft_length // 2 + 1, sample_rate, f_min, f_max + ) + + def _calculate_log_mels_spectrogram(self, signals): + """Calculate forward propagation. + Args: + signals (Tensor): signal (B, T). + Returns: + Tensor: Mel spectrogram (B, T', 80) + """ + stfts = tf.signal.stft( + signals, + frame_length=self.frame_length, + frame_step=self.frame_step, + fft_length=self.fft_length, + ) + linear_spectrograms = tf.abs(stfts) + mel_spectrograms = tf.tensordot( + linear_spectrograms, self.linear_to_mel_weight_matrix, 1 + ) + mel_spectrograms.set_shape( + linear_spectrograms.shape[:-1].concatenate( + self.linear_to_mel_weight_matrix.shape[-1:] + ) + ) + log_mel_spectrograms = tf.math.log(mel_spectrograms + 1e-6) # prevent nan. + return log_mel_spectrograms + + def call(self, y, x): + """Calculate forward propagation. + Args: + y (Tensor): Groundtruth signal (B, T). + x (Tensor): Predicted signal (B, T). + Returns: + Tensor: Mean absolute Error Spectrogram Loss. + """ + y_mels = self._calculate_log_mels_spectrogram(y) + x_mels = self._calculate_log_mels_spectrogram(x) + return tf.reduce_mean( + tf.abs(y_mels - x_mels), axis=list(range(1, len(x_mels.shape))) + ) diff --git a/TensorFlowTTS/tensorflow_tts/losses/stft.py b/TensorFlowTTS/tensorflow_tts/losses/stft.py new file mode 100644 index 0000000000000000000000000000000000000000..631f77837f4598944dc3659cfb7adcb3c1d85ffc --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/losses/stft.py @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""STFT-based loss modules.""" + +import tensorflow as tf + + +class TFSpectralConvergence(tf.keras.layers.Layer): + """Spectral convergence loss.""" + + def __init__(self): + """Initialize.""" + super().__init__() + + def call(self, y_mag, x_mag): + """Calculate forward propagation. + Args: + y_mag (Tensor): Magnitude spectrogram of groundtruth signal (B, #frames, #freq_bins). + x_mag (Tensor): Magnitude spectrogram of predicted signal (B, #frames, #freq_bins). + Returns: + Tensor: Spectral convergence loss value. + """ + return tf.norm(y_mag - x_mag, ord="fro", axis=(-2, -1)) / tf.norm( + y_mag, ord="fro", axis=(-2, -1) + ) + + +class TFLogSTFTMagnitude(tf.keras.layers.Layer): + """Log STFT magnitude loss module.""" + + def __init__(self): + """Initialize.""" + super().__init__() + + def call(self, y_mag, x_mag): + """Calculate forward propagation. + Args: + y_mag (Tensor): Magnitude spectrogram of groundtruth signal (B, #frames, #freq_bins). + x_mag (Tensor): Magnitude spectrogram of predicted signal (B, #frames, #freq_bins). + Returns: + Tensor: Spectral convergence loss value. + """ + return tf.abs(tf.math.log(y_mag) - tf.math.log(x_mag)) + + +class TFSTFT(tf.keras.layers.Layer): + """STFT loss module.""" + + def __init__(self, frame_length=600, frame_step=120, fft_length=1024): + """Initialize.""" + super().__init__() + self.frame_length = frame_length + self.frame_step = frame_step + self.fft_length = fft_length + self.spectral_convergenge_loss = TFSpectralConvergence() + self.log_stft_magnitude_loss = TFLogSTFTMagnitude() + + def call(self, y, x): + """Calculate forward propagation. + Args: + y (Tensor): Groundtruth signal (B, T). + x (Tensor): Predicted signal (B, T). + Returns: + Tensor: Spectral convergence loss value (pre-reduce). + Tensor: Log STFT magnitude loss value (pre-reduce). + """ + x_mag = tf.abs( + tf.signal.stft( + signals=x, + frame_length=self.frame_length, + frame_step=self.frame_step, + fft_length=self.fft_length, + ) + ) + y_mag = tf.abs( + tf.signal.stft( + signals=y, + frame_length=self.frame_length, + frame_step=self.frame_step, + fft_length=self.fft_length, + ) + ) + + # add small number to prevent nan value. + # compatible with pytorch version. + x_mag = tf.clip_by_value(tf.math.sqrt(x_mag ** 2 + 1e-7), 1e-7, 1e3) + y_mag = tf.clip_by_value(tf.math.sqrt(y_mag ** 2 + 1e-7), 1e-7, 1e3) + + sc_loss = self.spectral_convergenge_loss(y_mag, x_mag) + mag_loss = self.log_stft_magnitude_loss(y_mag, x_mag) + + return sc_loss, mag_loss + + +class TFMultiResolutionSTFT(tf.keras.layers.Layer): + """Multi resolution STFT loss module.""" + + def __init__( + self, + fft_lengths=[1024, 2048, 512], + frame_lengths=[600, 1200, 240], + frame_steps=[120, 240, 50], + ): + """Initialize Multi resolution STFT loss module. + Args: + frame_lengths (list): List of FFT sizes. + frame_steps (list): List of hop sizes. + fft_lengths (list): List of window lengths. + """ + super().__init__() + assert len(frame_lengths) == len(frame_steps) == len(fft_lengths) + self.stft_losses = [] + for frame_length, frame_step, fft_length in zip( + frame_lengths, frame_steps, fft_lengths + ): + self.stft_losses.append(TFSTFT(frame_length, frame_step, fft_length)) + + def call(self, y, x): + """Calculate forward propagation. + Args: + y (Tensor): Groundtruth signal (B, T). + x (Tensor): Predicted signal (B, T). + Returns: + Tensor: Multi resolution spectral convergence loss value. + Tensor: Multi resolution log STFT magnitude loss value. + """ + sc_loss = 0.0 + mag_loss = 0.0 + for f in self.stft_losses: + sc_l, mag_l = f(y, x) + sc_loss += tf.reduce_mean(sc_l, axis=list(range(1, len(sc_l.shape)))) + mag_loss += tf.reduce_mean(mag_l, axis=list(range(1, len(mag_l.shape)))) + + sc_loss /= len(self.stft_losses) + mag_loss /= len(self.stft_losses) + + return sc_loss, mag_loss diff --git a/TensorFlowTTS/tensorflow_tts/models/__init__.py b/TensorFlowTTS/tensorflow_tts/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1190eb7d8dbca4d2e151bd1c6d39757eef91ba6f --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/models/__init__.py @@ -0,0 +1,18 @@ +from tensorflow_tts.models.base_model import BaseModel +from tensorflow_tts.models.fastspeech import TFFastSpeech +from tensorflow_tts.models.fastspeech2 import TFFastSpeech2 +from tensorflow_tts.models.melgan import ( + TFMelGANDiscriminator, + TFMelGANGenerator, + TFMelGANMultiScaleDiscriminator, +) +from tensorflow_tts.models.mb_melgan import TFPQMF +from tensorflow_tts.models.mb_melgan import TFMBMelGANGenerator +from tensorflow_tts.models.hifigan import ( + TFHifiGANGenerator, + TFHifiGANMultiPeriodDiscriminator, + TFHifiGANPeriodDiscriminator +) +from tensorflow_tts.models.tacotron2 import TFTacotron2 +from tensorflow_tts.models.parallel_wavegan import TFParallelWaveGANGenerator +from tensorflow_tts.models.parallel_wavegan import TFParallelWaveGANDiscriminator diff --git a/TensorFlowTTS/tensorflow_tts/models/__pycache__/__init__.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/models/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d9bcd3c840ef5f48692f68554f8bac23660280da Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/models/__pycache__/__init__.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/models/__pycache__/base_model.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/models/__pycache__/base_model.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..abfabee17b2d86d338b1e2609ecac018a7d82511 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/models/__pycache__/base_model.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/models/__pycache__/fastspeech.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/models/__pycache__/fastspeech.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b6c00816a8de51caf86ed3b932b2c03e380f3a6c Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/models/__pycache__/fastspeech.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/models/__pycache__/fastspeech2.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/models/__pycache__/fastspeech2.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..68f039a8c8bab8d343c07f119f952af5ce471075 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/models/__pycache__/fastspeech2.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/models/__pycache__/hifigan.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/models/__pycache__/hifigan.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3227b7c7afd75b23fdcf4ee311bd1abfb8f9002a Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/models/__pycache__/hifigan.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/models/__pycache__/mb_melgan.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/models/__pycache__/mb_melgan.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7605d59e77ad580a54071fa28270c4ebd90238d6 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/models/__pycache__/mb_melgan.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/models/__pycache__/melgan.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/models/__pycache__/melgan.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..87c5469fcbbcae26fa65d684e29f79efacb765f6 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/models/__pycache__/melgan.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/models/__pycache__/parallel_wavegan.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/models/__pycache__/parallel_wavegan.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8daae46b791b958737228d71fae56c9afb61abc9 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/models/__pycache__/parallel_wavegan.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/models/__pycache__/tacotron2.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/models/__pycache__/tacotron2.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fb6bd8a88802557101db5b65e95372d96652ffca Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/models/__pycache__/tacotron2.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/models/base_model.py b/TensorFlowTTS/tensorflow_tts/models/base_model.py new file mode 100644 index 0000000000000000000000000000000000000000..03989ac095c4a00e3d29d255ea47bdc535fa214b --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/models/base_model.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Base Model for all model.""" + +import tensorflow as tf +import yaml +import os +import numpy as np + +from tensorflow_tts.utils.utils import MODEL_FILE_NAME, CONFIG_FILE_NAME + + +class BaseModel(tf.keras.Model): + def set_config(self, config): + self.config = config + + def save_pretrained(self, saved_path): + """Save config and weights to file""" + os.makedirs(saved_path, exist_ok=True) + self.config.save_pretrained(saved_path) + self.save_weights(os.path.join(saved_path, MODEL_FILE_NAME)) diff --git a/TensorFlowTTS/tensorflow_tts/models/fastspeech.py b/TensorFlowTTS/tensorflow_tts/models/fastspeech.py new file mode 100644 index 0000000000000000000000000000000000000000..40d0a5d3288d3d9dd3518751d11164c87bf67be9 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/models/fastspeech.py @@ -0,0 +1,908 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 The FastSpeech Authors, The HuggingFace Inc. team and Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tensorflow Model modules for FastSpeech.""" + +import numpy as np +import tensorflow as tf + +from tensorflow_tts.models import BaseModel + + +def get_initializer(initializer_range=0.02): + """Creates a `tf.initializers.truncated_normal` with the given range. + + Args: + initializer_range: float, initializer range for stddev. + + Returns: + TruncatedNormal initializer with stddev = `initializer_range`. + + """ + return tf.keras.initializers.TruncatedNormal(stddev=initializer_range) + + +def gelu(x): + """Gaussian Error Linear unit.""" + cdf = 0.5 * (1.0 + tf.math.erf(x / tf.math.sqrt(2.0))) + return x * cdf + + +def gelu_new(x): + """Smoother gaussian Error Linear Unit.""" + cdf = 0.5 * (1.0 + tf.tanh((np.sqrt(2 / np.pi) * (x + 0.044715 * tf.pow(x, 3))))) + return x * cdf + + +def swish(x): + """Swish activation function.""" + return tf.nn.swish(x) + + +def mish(x): + return x * tf.math.tanh(tf.math.softplus(x)) + + +ACT2FN = { + "identity": tf.keras.layers.Activation("linear"), + "tanh": tf.keras.layers.Activation("tanh"), + "gelu": tf.keras.layers.Activation(gelu), + "relu": tf.keras.activations.relu, + "swish": tf.keras.layers.Activation(swish), + "gelu_new": tf.keras.layers.Activation(gelu_new), + "mish": tf.keras.layers.Activation(mish), +} + + +class TFEmbedding(tf.keras.layers.Embedding): + """Faster version of embedding.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def call(self, inputs): + inputs = tf.cast(inputs, tf.int32) + outputs = tf.gather(self.embeddings, inputs) + return outputs + + +class TFFastSpeechEmbeddings(tf.keras.layers.Layer): + """Construct charactor/phoneme/positional/speaker embeddings.""" + + def __init__(self, config, **kwargs): + """Init variables.""" + super().__init__(**kwargs) + self.vocab_size = config.vocab_size + self.hidden_size = config.encoder_self_attention_params.hidden_size + self.initializer_range = config.initializer_range + self.config = config + + self.position_embeddings = TFEmbedding( + config.max_position_embeddings + 1, + self.hidden_size, + weights=[ + self._sincos_embedding( + self.hidden_size, self.config.max_position_embeddings + ) + ], + name="position_embeddings", + trainable=False, + ) + + if config.n_speakers > 1: + self.encoder_speaker_embeddings = TFEmbedding( + config.n_speakers, + self.hidden_size, + embeddings_initializer=get_initializer(self.initializer_range), + name="speaker_embeddings", + ) + self.speaker_fc = tf.keras.layers.Dense( + units=self.hidden_size, name="speaker_fc" + ) + + def build(self, input_shape): + """Build shared charactor/phoneme embedding layers.""" + with tf.name_scope("charactor_embeddings"): + self.charactor_embeddings = self.add_weight( + "weight", + shape=[self.vocab_size, self.hidden_size], + initializer=get_initializer(self.initializer_range), + ) + super().build(input_shape) + + def call(self, inputs, training=False): + """Get charactor embeddings of inputs. + + Args: + 1. charactor, Tensor (int32) shape [batch_size, length]. + 2. speaker_id, Tensor (int32) shape [batch_size] + Returns: + Tensor (float32) shape [batch_size, length, embedding_size]. + + """ + return self._embedding(inputs, training=training) + + def _embedding(self, inputs, training=False): + """Applies embedding based on inputs tensor.""" + input_ids, speaker_ids = inputs + + input_shape = tf.shape(input_ids) + seq_length = input_shape[1] + + position_ids = tf.range(1, seq_length + 1, dtype=tf.int32)[tf.newaxis, :] + + # create embeddings + inputs_embeds = tf.gather(self.charactor_embeddings, input_ids) + position_embeddings = self.position_embeddings(position_ids) + + # sum embedding + embeddings = inputs_embeds + tf.cast(position_embeddings, inputs_embeds.dtype) + if self.config.n_speakers > 1: + speaker_embeddings = self.encoder_speaker_embeddings(speaker_ids) + speaker_features = tf.math.softplus(self.speaker_fc(speaker_embeddings)) + # extended speaker embeddings + extended_speaker_features = speaker_features[:, tf.newaxis, :] + embeddings += extended_speaker_features + + return embeddings + + def _sincos_embedding( + self, hidden_size, max_positional_embedding, + ): + position_enc = np.array( + [ + [ + pos / np.power(10000, 2.0 * (i // 2) / hidden_size) + for i in range(hidden_size) + ] + for pos in range(max_positional_embedding + 1) + ] + ) + + position_enc[:, 0::2] = np.sin(position_enc[:, 0::2]) + position_enc[:, 1::2] = np.cos(position_enc[:, 1::2]) + + # pad embedding. + position_enc[0] = 0.0 + + return position_enc + + def resize_positional_embeddings(self, new_size): + self.position_embeddings = TFEmbedding( + new_size + 1, + self.hidden_size, + weights=[self._sincos_embedding(self.hidden_size, new_size)], + name="position_embeddings", + trainable=False, + ) + + +class TFFastSpeechSelfAttention(tf.keras.layers.Layer): + """Self attention module for fastspeech.""" + + def __init__(self, config, **kwargs): + """Init variables.""" + super().__init__(**kwargs) + if config.hidden_size % config.num_attention_heads != 0: + raise ValueError( + "The hidden size (%d) is not a multiple of the number of attention " + "heads (%d)" % (config.hidden_size, config.num_attention_heads) + ) + self.output_attentions = config.output_attentions + self.num_attention_heads = config.num_attention_heads + self.all_head_size = self.num_attention_heads * config.attention_head_size + + self.query = tf.keras.layers.Dense( + self.all_head_size, + kernel_initializer=get_initializer(config.initializer_range), + name="query", + ) + self.key = tf.keras.layers.Dense( + self.all_head_size, + kernel_initializer=get_initializer(config.initializer_range), + name="key", + ) + self.value = tf.keras.layers.Dense( + self.all_head_size, + kernel_initializer=get_initializer(config.initializer_range), + name="value", + ) + + self.dropout = tf.keras.layers.Dropout(config.attention_probs_dropout_prob) + self.config = config + + def transpose_for_scores(self, x, batch_size): + """Transpose to calculate attention scores.""" + x = tf.reshape( + x, + (batch_size, -1, self.num_attention_heads, self.config.attention_head_size), + ) + return tf.transpose(x, perm=[0, 2, 1, 3]) + + def call(self, inputs, training=False): + """Call logic.""" + hidden_states, attention_mask = inputs + + batch_size = tf.shape(hidden_states)[0] + mixed_query_layer = self.query(hidden_states) + mixed_key_layer = self.key(hidden_states) + mixed_value_layer = self.value(hidden_states) + + query_layer = self.transpose_for_scores(mixed_query_layer, batch_size) + key_layer = self.transpose_for_scores(mixed_key_layer, batch_size) + value_layer = self.transpose_for_scores(mixed_value_layer, batch_size) + + attention_scores = tf.matmul(query_layer, key_layer, transpose_b=True) + dk = tf.cast( + tf.shape(key_layer)[-1], attention_scores.dtype + ) # scale attention_scores + attention_scores = attention_scores / tf.math.sqrt(dk) + + if attention_mask is not None: + # extended_attention_masks for self attention encoder. + extended_attention_mask = attention_mask[:, tf.newaxis, tf.newaxis, :] + extended_attention_mask = tf.cast( + extended_attention_mask, attention_scores.dtype + ) + extended_attention_mask = (1.0 - extended_attention_mask) * -1e9 + attention_scores = attention_scores + extended_attention_mask + + # Normalize the attention scores to probabilities. + attention_probs = tf.nn.softmax(attention_scores, axis=-1) + attention_probs = self.dropout(attention_probs, training=training) + + context_layer = tf.matmul(attention_probs, value_layer) + context_layer = tf.transpose(context_layer, perm=[0, 2, 1, 3]) + context_layer = tf.reshape(context_layer, (batch_size, -1, self.all_head_size)) + + outputs = ( + (context_layer, attention_probs) + if self.output_attentions + else (context_layer,) + ) + return outputs + + +class TFFastSpeechSelfOutput(tf.keras.layers.Layer): + """Fastspeech output of self attention module.""" + + def __init__(self, config, **kwargs): + """Init variables.""" + super().__init__(**kwargs) + self.dense = tf.keras.layers.Dense( + config.hidden_size, + kernel_initializer=get_initializer(config.initializer_range), + name="dense", + ) + self.LayerNorm = tf.keras.layers.LayerNormalization( + epsilon=config.layer_norm_eps, name="LayerNorm" + ) + self.dropout = tf.keras.layers.Dropout(config.hidden_dropout_prob) + + def call(self, inputs, training=False): + """Call logic.""" + hidden_states, input_tensor = inputs + + hidden_states = self.dense(hidden_states) + hidden_states = self.dropout(hidden_states, training=training) + hidden_states = self.LayerNorm(hidden_states + input_tensor) + return hidden_states + + +class TFFastSpeechAttention(tf.keras.layers.Layer): + """Fastspeech attention module.""" + + def __init__(self, config, **kwargs): + """Init variables.""" + super().__init__(**kwargs) + self.self_attention = TFFastSpeechSelfAttention(config, name="self") + self.dense_output = TFFastSpeechSelfOutput(config, name="output") + + def call(self, inputs, training=False): + input_tensor, attention_mask = inputs + + self_outputs = self.self_attention( + [input_tensor, attention_mask], training=training + ) + attention_output = self.dense_output( + [self_outputs[0], input_tensor], training=training + ) + masked_attention_output = attention_output * tf.cast( + tf.expand_dims(attention_mask, 2), dtype=attention_output.dtype + ) + outputs = (masked_attention_output,) + self_outputs[ + 1: + ] # add attentions if we output them + return outputs + + +class TFFastSpeechIntermediate(tf.keras.layers.Layer): + """Intermediate representation module.""" + + def __init__(self, config, **kwargs): + """Init variables.""" + super().__init__(**kwargs) + self.conv1d_1 = tf.keras.layers.Conv1D( + config.intermediate_size, + kernel_size=config.intermediate_kernel_size, + kernel_initializer=get_initializer(config.initializer_range), + padding="same", + name="conv1d_1", + ) + self.conv1d_2 = tf.keras.layers.Conv1D( + config.hidden_size, + kernel_size=config.intermediate_kernel_size, + kernel_initializer=get_initializer(config.initializer_range), + padding="same", + name="conv1d_2", + ) + if isinstance(config.hidden_act, str): + self.intermediate_act_fn = ACT2FN[config.hidden_act] + else: + self.intermediate_act_fn = config.hidden_act + + def call(self, inputs): + """Call logic.""" + hidden_states, attention_mask = inputs + + hidden_states = self.conv1d_1(hidden_states) + hidden_states = self.intermediate_act_fn(hidden_states) + hidden_states = self.conv1d_2(hidden_states) + + masked_hidden_states = hidden_states * tf.cast( + tf.expand_dims(attention_mask, 2), dtype=hidden_states.dtype + ) + return masked_hidden_states + + +class TFFastSpeechOutput(tf.keras.layers.Layer): + """Output module.""" + + def __init__(self, config, **kwargs): + """Init variables.""" + super().__init__(**kwargs) + self.LayerNorm = tf.keras.layers.LayerNormalization( + epsilon=config.layer_norm_eps, name="LayerNorm" + ) + self.dropout = tf.keras.layers.Dropout(config.hidden_dropout_prob) + + def call(self, inputs, training=False): + """Call logic.""" + hidden_states, input_tensor = inputs + + hidden_states = self.dropout(hidden_states, training=training) + hidden_states = self.LayerNorm(hidden_states + input_tensor) + return hidden_states + + +class TFFastSpeechLayer(tf.keras.layers.Layer): + """Fastspeech module (FFT module on the paper).""" + + def __init__(self, config, **kwargs): + """Init variables.""" + super().__init__(**kwargs) + self.attention = TFFastSpeechAttention(config, name="attention") + self.intermediate = TFFastSpeechIntermediate(config, name="intermediate") + self.bert_output = TFFastSpeechOutput(config, name="output") + + def call(self, inputs, training=False): + """Call logic.""" + hidden_states, attention_mask = inputs + + attention_outputs = self.attention( + [hidden_states, attention_mask], training=training + ) + attention_output = attention_outputs[0] + intermediate_output = self.intermediate( + [attention_output, attention_mask], training=training + ) + layer_output = self.bert_output( + [intermediate_output, attention_output], training=training + ) + masked_layer_output = layer_output * tf.cast( + tf.expand_dims(attention_mask, 2), dtype=layer_output.dtype + ) + outputs = (masked_layer_output,) + attention_outputs[ + 1: + ] # add attentions if we output them + return outputs + + +class TFFastSpeechEncoder(tf.keras.layers.Layer): + """Fast Speech encoder module.""" + + def __init__(self, config, **kwargs): + """Init variables.""" + super().__init__(**kwargs) + self.output_attentions = config.output_attentions + self.output_hidden_states = config.output_hidden_states + self.layer = [ + TFFastSpeechLayer(config, name="layer_._{}".format(i)) + for i in range(config.num_hidden_layers) + ] + + def call(self, inputs, training=False): + """Call logic.""" + hidden_states, attention_mask = inputs + + all_hidden_states = () + all_attentions = () + for _, layer_module in enumerate(self.layer): + if self.output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + layer_outputs = layer_module( + [hidden_states, attention_mask], training=training + ) + hidden_states = layer_outputs[0] + + if self.output_attentions: + all_attentions = all_attentions + (layer_outputs[1],) + + # Add last layer + if self.output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + outputs = (hidden_states,) + if self.output_hidden_states: + outputs = outputs + (all_hidden_states,) + if self.output_attentions: + outputs = outputs + (all_attentions,) + return outputs # outputs, (hidden states), (attentions) + + +class TFFastSpeechDecoder(TFFastSpeechEncoder): + """Fast Speech decoder module.""" + + def __init__(self, config, **kwargs): + self.is_compatible_encoder = kwargs.pop("is_compatible_encoder", True) + + super().__init__(config, **kwargs) + self.config = config + + # create decoder positional embedding + self.decoder_positional_embeddings = TFEmbedding( + config.max_position_embeddings + 1, + config.hidden_size, + weights=[self._sincos_embedding()], + name="position_embeddings", + trainable=False, + ) + + if self.is_compatible_encoder is False: + self.project_compatible_decoder = tf.keras.layers.Dense( + units=config.hidden_size, name="project_compatible_decoder" + ) + + if config.n_speakers > 1: + self.decoder_speaker_embeddings = TFEmbedding( + config.n_speakers, + config.hidden_size, + embeddings_initializer=get_initializer(config.initializer_range), + name="speaker_embeddings", + ) + self.speaker_fc = tf.keras.layers.Dense( + units=config.hidden_size, name="speaker_fc" + ) + + def call(self, inputs, training=False): + hidden_states, speaker_ids, encoder_mask, decoder_pos = inputs + + if self.is_compatible_encoder is False: + hidden_states = self.project_compatible_decoder(hidden_states) + + # calculate new hidden states. + hidden_states += tf.cast( + self.decoder_positional_embeddings(decoder_pos), hidden_states.dtype + ) + + if self.config.n_speakers > 1: + speaker_embeddings = self.decoder_speaker_embeddings(speaker_ids) + speaker_features = tf.math.softplus(self.speaker_fc(speaker_embeddings)) + # extended speaker embeddings + extended_speaker_features = speaker_features[:, tf.newaxis, :] + hidden_states += extended_speaker_features + + return super().call([hidden_states, encoder_mask], training=training) + + def _sincos_embedding(self): + position_enc = np.array( + [ + [ + pos / np.power(10000, 2.0 * (i // 2) / self.config.hidden_size) + for i in range(self.config.hidden_size) + ] + for pos in range(self.config.max_position_embeddings + 1) + ] + ) + + position_enc[:, 0::2] = np.sin(position_enc[:, 0::2]) + position_enc[:, 1::2] = np.cos(position_enc[:, 1::2]) + + # pad embedding. + position_enc[0] = 0.0 + + return position_enc + + +class TFTacotronPostnet(tf.keras.layers.Layer): + """Tacotron-2 postnet.""" + + def __init__(self, config, **kwargs): + """Init variables.""" + super().__init__(**kwargs) + self.conv_batch_norm = [] + for i in range(config.n_conv_postnet): + conv = tf.keras.layers.Conv1D( + filters=config.postnet_conv_filters + if i < config.n_conv_postnet - 1 + else config.num_mels, + kernel_size=config.postnet_conv_kernel_sizes, + padding="same", + name="conv_._{}".format(i), + ) + batch_norm = tf.keras.layers.BatchNormalization( + axis=-1, name="batch_norm_._{}".format(i) + ) + self.conv_batch_norm.append((conv, batch_norm)) + self.dropout = tf.keras.layers.Dropout( + rate=config.postnet_dropout_rate, name="dropout" + ) + self.activation = [tf.nn.tanh] * (config.n_conv_postnet - 1) + [tf.identity] + + def call(self, inputs, training=False): + """Call logic.""" + outputs, mask = inputs + extended_mask = tf.cast(tf.expand_dims(mask, axis=2), outputs.dtype) + for i, (conv, bn) in enumerate(self.conv_batch_norm): + outputs = conv(outputs) + outputs = bn(outputs) + outputs = self.activation[i](outputs) + outputs = self.dropout(outputs, training=training) + return outputs * extended_mask + + +class TFFastSpeechDurationPredictor(tf.keras.layers.Layer): + """FastSpeech duration predictor module.""" + + def __init__(self, config, **kwargs): + """Init variables.""" + super().__init__(**kwargs) + self.conv_layers = [] + for i in range(config.num_duration_conv_layers): + self.conv_layers.append( + tf.keras.layers.Conv1D( + config.duration_predictor_filters, + config.duration_predictor_kernel_sizes, + padding="same", + name="conv_._{}".format(i), + ) + ) + self.conv_layers.append( + tf.keras.layers.LayerNormalization( + epsilon=config.layer_norm_eps, name="LayerNorm_._{}".format(i) + ) + ) + self.conv_layers.append(tf.keras.layers.Activation(tf.nn.relu6)) + self.conv_layers.append( + tf.keras.layers.Dropout(config.duration_predictor_dropout_probs) + ) + self.conv_layers_sequence = tf.keras.Sequential(self.conv_layers) + self.output_layer = tf.keras.layers.Dense(1) + + def call(self, inputs, training=False): + """Call logic.""" + encoder_hidden_states, attention_mask = inputs + attention_mask = tf.cast( + tf.expand_dims(attention_mask, 2), encoder_hidden_states.dtype + ) + + # mask encoder hidden states + masked_encoder_hidden_states = encoder_hidden_states * attention_mask + + # pass though first layer + outputs = self.conv_layers_sequence(masked_encoder_hidden_states) + outputs = self.output_layer(outputs) + masked_outputs = outputs * attention_mask + return tf.squeeze(tf.nn.relu6(masked_outputs), -1) # make sure positive value. + + +class TFFastSpeechLengthRegulator(tf.keras.layers.Layer): + """FastSpeech lengthregulator module.""" + + def __init__(self, config, **kwargs): + """Init variables.""" + self.enable_tflite_convertible = kwargs.pop("enable_tflite_convertible", False) + super().__init__(**kwargs) + self.config = config + + def call(self, inputs, training=False): + """Call logic. + Args: + 1. encoder_hidden_states, Tensor (float32) shape [batch_size, length, hidden_size] + 2. durations_gt, Tensor (float32/int32) shape [batch_size, length] + """ + encoder_hidden_states, durations_gt = inputs + outputs, encoder_masks = self._length_regulator( + encoder_hidden_states, durations_gt + ) + return outputs, encoder_masks + + def _length_regulator(self, encoder_hidden_states, durations_gt): + """Length regulator logic.""" + sum_durations = tf.reduce_sum(durations_gt, axis=-1) # [batch_size] + max_durations = tf.reduce_max(sum_durations) + + input_shape = tf.shape(encoder_hidden_states) + batch_size = input_shape[0] + hidden_size = input_shape[-1] + + # initialize output hidden states and encoder masking. + if self.enable_tflite_convertible: + # There is only 1 batch in inference, so we don't have to use + # `tf.While` op with 3-D output tensor. + repeats = durations_gt[0] + real_length = tf.reduce_sum(repeats) + pad_size = max_durations - real_length + # masks : [max_durations] + masks = tf.sequence_mask([real_length], max_durations, dtype=tf.int32) + repeat_encoder_hidden_states = tf.repeat( + encoder_hidden_states[0], repeats=repeats, axis=0 + ) + repeat_encoder_hidden_states = tf.expand_dims( + tf.pad(repeat_encoder_hidden_states, [[0, pad_size], [0, 0]]), 0 + ) # [1, max_durations, hidden_size] + + outputs = repeat_encoder_hidden_states + encoder_masks = masks + else: + outputs = tf.zeros( + shape=[0, max_durations, hidden_size], dtype=encoder_hidden_states.dtype + ) + encoder_masks = tf.zeros(shape=[0, max_durations], dtype=tf.int32) + + def condition( + i, + batch_size, + outputs, + encoder_masks, + encoder_hidden_states, + durations_gt, + max_durations, + ): + return tf.less(i, batch_size) + + def body( + i, + batch_size, + outputs, + encoder_masks, + encoder_hidden_states, + durations_gt, + max_durations, + ): + repeats = durations_gt[i] + real_length = tf.reduce_sum(repeats) + pad_size = max_durations - real_length + masks = tf.sequence_mask([real_length], max_durations, dtype=tf.int32) + repeat_encoder_hidden_states = tf.repeat( + encoder_hidden_states[i], repeats=repeats, axis=0 + ) + repeat_encoder_hidden_states = tf.expand_dims( + tf.pad(repeat_encoder_hidden_states, [[0, pad_size], [0, 0]]), 0 + ) # [1, max_durations, hidden_size] + outputs = tf.concat([outputs, repeat_encoder_hidden_states], axis=0) + encoder_masks = tf.concat([encoder_masks, masks], axis=0) + return [ + i + 1, + batch_size, + outputs, + encoder_masks, + encoder_hidden_states, + durations_gt, + max_durations, + ] + + # initialize iteration i. + i = tf.constant(0, dtype=tf.int32) + _, _, outputs, encoder_masks, _, _, _, = tf.while_loop( + condition, + body, + [ + i, + batch_size, + outputs, + encoder_masks, + encoder_hidden_states, + durations_gt, + max_durations, + ], + shape_invariants=[ + i.get_shape(), + batch_size.get_shape(), + tf.TensorShape( + [ + None, + None, + self.config.encoder_self_attention_params.hidden_size, + ] + ), + tf.TensorShape([None, None]), + encoder_hidden_states.get_shape(), + durations_gt.get_shape(), + max_durations.get_shape(), + ], + ) + + return outputs, encoder_masks + + +class TFFastSpeech(BaseModel): + """TF Fastspeech module.""" + + def __init__(self, config, **kwargs): + """Init layers for fastspeech.""" + self.enable_tflite_convertible = kwargs.pop("enable_tflite_convertible", False) + super().__init__(**kwargs) + self.embeddings = TFFastSpeechEmbeddings(config, name="embeddings") + self.encoder = TFFastSpeechEncoder( + config.encoder_self_attention_params, name="encoder" + ) + self.duration_predictor = TFFastSpeechDurationPredictor( + config, dtype=tf.float32, name="duration_predictor" + ) + self.length_regulator = TFFastSpeechLengthRegulator( + config, + enable_tflite_convertible=self.enable_tflite_convertible, + name="length_regulator", + ) + self.decoder = TFFastSpeechDecoder( + config.decoder_self_attention_params, + is_compatible_encoder=config.encoder_self_attention_params.hidden_size + == config.decoder_self_attention_params.hidden_size, + name="decoder", + ) + self.mel_dense = tf.keras.layers.Dense( + units=config.num_mels, dtype=tf.float32, name="mel_before" + ) + self.postnet = TFTacotronPostnet( + config=config, dtype=tf.float32, name="postnet" + ) + + self.setup_inference_fn() + + def _build(self): + """Dummy input for building model.""" + # fake inputs + input_ids = tf.convert_to_tensor([[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]], tf.int32) + speaker_ids = tf.convert_to_tensor([0], tf.int32) + duration_gts = tf.convert_to_tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1]], tf.int32) + self(input_ids, speaker_ids, duration_gts) + + def resize_positional_embeddings(self, new_size): + self.embeddings.resize_positional_embeddings(new_size) + self._build() + + def call( + self, input_ids, speaker_ids, duration_gts, training=False, **kwargs, + ): + """Call logic.""" + attention_mask = tf.math.not_equal(input_ids, 0) + embedding_output = self.embeddings([input_ids, speaker_ids], training=training) + encoder_output = self.encoder( + [embedding_output, attention_mask], training=training + ) + last_encoder_hidden_states = encoder_output[0] + + # duration predictor, here use last_encoder_hidden_states, u can use more hidden_states layers + # rather than just use last_hidden_states of encoder for duration_predictor. + duration_outputs = self.duration_predictor( + [last_encoder_hidden_states, attention_mask] + ) # [batch_size, length] + + length_regulator_outputs, encoder_masks = self.length_regulator( + [last_encoder_hidden_states, duration_gts], training=training + ) + + # create decoder positional embedding + decoder_pos = tf.range( + 1, tf.shape(length_regulator_outputs)[1] + 1, dtype=tf.int32 + ) + masked_decoder_pos = tf.expand_dims(decoder_pos, 0) * encoder_masks + + decoder_output = self.decoder( + [length_regulator_outputs, speaker_ids, encoder_masks, masked_decoder_pos], + training=training, + ) + last_decoder_hidden_states = decoder_output[0] + + # here u can use sum or concat more than 1 hidden states layers from decoder. + mel_before = self.mel_dense(last_decoder_hidden_states) + mel_after = ( + self.postnet([mel_before, encoder_masks], training=training) + mel_before + ) + + outputs = (mel_before, mel_after, duration_outputs) + return outputs + + def _inference(self, input_ids, speaker_ids, speed_ratios, **kwargs): + """Call logic.""" + attention_mask = tf.math.not_equal(input_ids, 0) + embedding_output = self.embeddings([input_ids, speaker_ids], training=False) + encoder_output = self.encoder( + [embedding_output, attention_mask], training=False + ) + last_encoder_hidden_states = encoder_output[0] + + # duration predictor, here use last_encoder_hidden_states, u can use more hidden_states layers + # rather than just use last_hidden_states of encoder for duration_predictor. + duration_outputs = self.duration_predictor( + [last_encoder_hidden_states, attention_mask] + ) # [batch_size, length] + duration_outputs = tf.math.exp(duration_outputs) - 1.0 + + if speed_ratios is None: + speed_ratios = tf.convert_to_tensor(np.array([1.0]), dtype=tf.float32) + + speed_ratios = tf.expand_dims(speed_ratios, 1) + + duration_outputs = tf.cast( + tf.math.round(duration_outputs * speed_ratios), tf.int32 + ) + + length_regulator_outputs, encoder_masks = self.length_regulator( + [last_encoder_hidden_states, duration_outputs], training=False + ) + + # create decoder positional embedding + decoder_pos = tf.range( + 1, tf.shape(length_regulator_outputs)[1] + 1, dtype=tf.int32 + ) + masked_decoder_pos = tf.expand_dims(decoder_pos, 0) * encoder_masks + + decoder_output = self.decoder( + [length_regulator_outputs, speaker_ids, encoder_masks, masked_decoder_pos], + training=False, + ) + last_decoder_hidden_states = decoder_output[0] + + # here u can use sum or concat more than 1 hidden states layers from decoder. + mel_before = self.mel_dense(last_decoder_hidden_states) + mel_after = ( + self.postnet([mel_before, encoder_masks], training=False) + mel_before + ) + + outputs = (mel_before, mel_after, duration_outputs) + return outputs + + def setup_inference_fn(self): + self.inference = tf.function( + self._inference, + experimental_relax_shapes=True, + input_signature=[ + tf.TensorSpec(shape=[None, None], dtype=tf.int32, name="input_ids"), + tf.TensorSpec(shape=[None,], dtype=tf.int32, name="speaker_ids"), + tf.TensorSpec(shape=[None,], dtype=tf.float32, name="speed_ratios"), + ], + ) + + self.inference_tflite = tf.function( + self._inference, + experimental_relax_shapes=True, + input_signature=[ + tf.TensorSpec(shape=[1, None], dtype=tf.int32, name="input_ids"), + tf.TensorSpec(shape=[1,], dtype=tf.int32, name="speaker_ids"), + tf.TensorSpec(shape=[1,], dtype=tf.float32, name="speed_ratios"), + ], + ) diff --git a/TensorFlowTTS/tensorflow_tts/models/fastspeech2.py b/TensorFlowTTS/tensorflow_tts/models/fastspeech2.py new file mode 100644 index 0000000000000000000000000000000000000000..12f3b2568b5dfc1442839529ff397eb70238d82e --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/models/fastspeech2.py @@ -0,0 +1,312 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 The FastSpeech2 Authors and Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tensorflow Model modules for FastSpeech2.""" + +import tensorflow as tf + +from tensorflow_tts.models.fastspeech import TFFastSpeech, get_initializer + + +class TFFastSpeechVariantPredictor(tf.keras.layers.Layer): + """FastSpeech duration predictor module.""" + + def __init__(self, config, **kwargs): + """Init variables.""" + super().__init__(**kwargs) + self.conv_layers = [] + for i in range(config.variant_prediction_num_conv_layers): + self.conv_layers.append( + tf.keras.layers.Conv1D( + config.variant_predictor_filter, + config.variant_predictor_kernel_size, + padding="same", + name="conv_._{}".format(i), + ) + ) + self.conv_layers.append(tf.keras.layers.Activation(tf.nn.relu)) + self.conv_layers.append( + tf.keras.layers.LayerNormalization( + epsilon=config.layer_norm_eps, name="LayerNorm_._{}".format(i) + ) + ) + self.conv_layers.append( + tf.keras.layers.Dropout(config.variant_predictor_dropout_rate) + ) + self.conv_layers_sequence = tf.keras.Sequential(self.conv_layers) + self.output_layer = tf.keras.layers.Dense(1) + + if config.n_speakers > 1: + self.decoder_speaker_embeddings = tf.keras.layers.Embedding( + config.n_speakers, + config.encoder_self_attention_params.hidden_size, + embeddings_initializer=get_initializer(config.initializer_range), + name="speaker_embeddings", + ) + self.speaker_fc = tf.keras.layers.Dense( + units=config.encoder_self_attention_params.hidden_size, + name="speaker_fc", + ) + + self.config = config + + def call(self, inputs, training=False): + """Call logic.""" + encoder_hidden_states, speaker_ids, attention_mask = inputs + attention_mask = tf.cast( + tf.expand_dims(attention_mask, 2), encoder_hidden_states.dtype + ) + + if self.config.n_speakers > 1: + speaker_embeddings = self.decoder_speaker_embeddings(speaker_ids) + speaker_features = tf.math.softplus(self.speaker_fc(speaker_embeddings)) + # extended speaker embeddings + extended_speaker_features = speaker_features[:, tf.newaxis, :] + encoder_hidden_states += extended_speaker_features + + # mask encoder hidden states + masked_encoder_hidden_states = encoder_hidden_states * attention_mask + + # pass though first layer + outputs = self.conv_layers_sequence(masked_encoder_hidden_states) + outputs = self.output_layer(outputs) + masked_outputs = outputs * attention_mask + + outputs = tf.squeeze(masked_outputs, -1) + return outputs + + +class TFFastSpeech2(TFFastSpeech): + """TF Fastspeech module.""" + + def __init__(self, config, **kwargs): + """Init layers for fastspeech.""" + super().__init__(config, **kwargs) + self.f0_predictor = TFFastSpeechVariantPredictor( + config, dtype=tf.float32, name="f0_predictor" + ) + self.energy_predictor = TFFastSpeechVariantPredictor( + config, dtype=tf.float32, name="energy_predictor", + ) + self.duration_predictor = TFFastSpeechVariantPredictor( + config, dtype=tf.float32, name="duration_predictor" + ) + + # define f0_embeddings and energy_embeddings + self.f0_embeddings = tf.keras.layers.Conv1D( + filters=config.encoder_self_attention_params.hidden_size, + kernel_size=9, + padding="same", + name="f0_embeddings", + ) + self.f0_dropout = tf.keras.layers.Dropout(0.5) + self.energy_embeddings = tf.keras.layers.Conv1D( + filters=config.encoder_self_attention_params.hidden_size, + kernel_size=9, + padding="same", + name="energy_embeddings", + ) + self.energy_dropout = tf.keras.layers.Dropout(0.5) + + def _build(self): + """Dummy input for building model.""" + # fake inputs + input_ids = tf.convert_to_tensor([[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]], tf.int32) + speaker_ids = tf.convert_to_tensor([0], tf.int32) + duration_gts = tf.convert_to_tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1]], tf.int32) + f0_gts = tf.convert_to_tensor( + [[10, 10, 10, 10, 10, 10, 10, 10, 10, 10]], tf.float32 + ) + energy_gts = tf.convert_to_tensor( + [[10, 10, 10, 10, 10, 10, 10, 10, 10, 10]], tf.float32 + ) + self( + input_ids=input_ids, + speaker_ids=speaker_ids, + duration_gts=duration_gts, + f0_gts=f0_gts, + energy_gts=energy_gts, + ) + + def call( + self, + input_ids, + speaker_ids, + duration_gts, + f0_gts, + energy_gts, + training=False, + **kwargs, + ): + """Call logic.""" + attention_mask = tf.math.not_equal(input_ids, 0) + embedding_output = self.embeddings([input_ids, speaker_ids], training=training) + encoder_output = self.encoder( + [embedding_output, attention_mask], training=training + ) + last_encoder_hidden_states = encoder_output[0] + + # energy predictor, here use last_encoder_hidden_states, u can use more hidden_states layers + # rather than just use last_hidden_states of encoder for energy_predictor. + duration_outputs = self.duration_predictor( + [last_encoder_hidden_states, speaker_ids, attention_mask] + ) # [batch_size, length] + + f0_outputs = self.f0_predictor( + [last_encoder_hidden_states, speaker_ids, attention_mask], training=training + ) + energy_outputs = self.energy_predictor( + [last_encoder_hidden_states, speaker_ids, attention_mask], training=training + ) + + f0_embedding = self.f0_embeddings( + tf.expand_dims(f0_gts, 2) + ) # [barch_size, mel_length, feature] + energy_embedding = self.energy_embeddings( + tf.expand_dims(energy_gts, 2) + ) # [barch_size, mel_length, feature] + + # apply dropout both training/inference + f0_embedding = self.f0_dropout(f0_embedding, training=True) + energy_embedding = self.energy_dropout(energy_embedding, training=True) + + # sum features + last_encoder_hidden_states += f0_embedding + energy_embedding + + length_regulator_outputs, encoder_masks = self.length_regulator( + [last_encoder_hidden_states, duration_gts], training=training + ) + + # create decoder positional embedding + decoder_pos = tf.range( + 1, tf.shape(length_regulator_outputs)[1] + 1, dtype=tf.int32 + ) + masked_decoder_pos = tf.expand_dims(decoder_pos, 0) * encoder_masks + + decoder_output = self.decoder( + [length_regulator_outputs, speaker_ids, encoder_masks, masked_decoder_pos], + training=training, + ) + last_decoder_hidden_states = decoder_output[0] + + # here u can use sum or concat more than 1 hidden states layers from decoder. + mels_before = self.mel_dense(last_decoder_hidden_states) + mels_after = ( + self.postnet([mels_before, encoder_masks], training=training) + mels_before + ) + + outputs = ( + mels_before, + mels_after, + duration_outputs, + f0_outputs, + energy_outputs, + ) + return outputs + + def _inference( + self, input_ids, speaker_ids, speed_ratios, f0_ratios, energy_ratios, **kwargs, + ): + """Call logic.""" + attention_mask = tf.math.not_equal(input_ids, 0) + embedding_output = self.embeddings([input_ids, speaker_ids], training=False) + encoder_output = self.encoder( + [embedding_output, attention_mask], training=False + ) + last_encoder_hidden_states = encoder_output[0] + + # expand ratios + speed_ratios = tf.expand_dims(speed_ratios, 1) # [B, 1] + f0_ratios = tf.expand_dims(f0_ratios, 1) # [B, 1] + energy_ratios = tf.expand_dims(energy_ratios, 1) # [B, 1] + + # energy predictor, here use last_encoder_hidden_states, u can use more hidden_states layers + # rather than just use last_hidden_states of encoder for energy_predictor. + duration_outputs = self.duration_predictor( + [last_encoder_hidden_states, speaker_ids, attention_mask] + ) # [batch_size, length] + duration_outputs = tf.nn.relu(tf.math.exp(duration_outputs) - 1.0) + duration_outputs = tf.cast( + tf.math.round(duration_outputs * speed_ratios), tf.int32 + ) + + f0_outputs = self.f0_predictor( + [last_encoder_hidden_states, speaker_ids, attention_mask], training=False + ) + f0_outputs *= f0_ratios + + energy_outputs = self.energy_predictor( + [last_encoder_hidden_states, speaker_ids, attention_mask], training=False + ) + energy_outputs *= energy_ratios + + f0_embedding = self.f0_dropout( + self.f0_embeddings(tf.expand_dims(f0_outputs, 2)), training=True + ) + energy_embedding = self.energy_dropout( + self.energy_embeddings(tf.expand_dims(energy_outputs, 2)), training=True + ) + + # sum features + last_encoder_hidden_states += f0_embedding + energy_embedding + + length_regulator_outputs, encoder_masks = self.length_regulator( + [last_encoder_hidden_states, duration_outputs], training=False + ) + + # create decoder positional embedding + decoder_pos = tf.range( + 1, tf.shape(length_regulator_outputs)[1] + 1, dtype=tf.int32 + ) + masked_decoder_pos = tf.expand_dims(decoder_pos, 0) * encoder_masks + + decoder_output = self.decoder( + [length_regulator_outputs, speaker_ids, encoder_masks, masked_decoder_pos], + training=False, + ) + last_decoder_hidden_states = decoder_output[0] + + # here u can use sum or concat more than 1 hidden states layers from decoder. + mel_before = self.mel_dense(last_decoder_hidden_states) + mel_after = ( + self.postnet([mel_before, encoder_masks], training=False) + mel_before + ) + + outputs = (mel_before, mel_after, duration_outputs, f0_outputs, energy_outputs) + return outputs + + def setup_inference_fn(self): + self.inference = tf.function( + self._inference, + experimental_relax_shapes=True, + input_signature=[ + tf.TensorSpec(shape=[None, None], dtype=tf.int32, name="input_ids"), + tf.TensorSpec(shape=[None,], dtype=tf.int32, name="speaker_ids"), + tf.TensorSpec(shape=[None,], dtype=tf.float32, name="speed_ratios"), + tf.TensorSpec(shape=[None,], dtype=tf.float32, name="f0_ratios"), + tf.TensorSpec(shape=[None,], dtype=tf.float32, name="energy_ratios"), + ], + ) + + self.inference_tflite = tf.function( + self._inference, + experimental_relax_shapes=True, + input_signature=[ + tf.TensorSpec(shape=[1, None], dtype=tf.int32, name="input_ids"), + tf.TensorSpec(shape=[1,], dtype=tf.int32, name="speaker_ids"), + tf.TensorSpec(shape=[1,], dtype=tf.float32, name="speed_ratios"), + tf.TensorSpec(shape=[1,], dtype=tf.float32, name="f0_ratios"), + tf.TensorSpec(shape=[1,], dtype=tf.float32, name="energy_ratios"), + ], + ) diff --git a/TensorFlowTTS/tensorflow_tts/models/hifigan.py b/TensorFlowTTS/tensorflow_tts/models/hifigan.py new file mode 100644 index 0000000000000000000000000000000000000000..ea57fec15152751991937677521ca6242e4e1095 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/models/hifigan.py @@ -0,0 +1,379 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 The Hifigan Authors and TensorflowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Hifi Modules.""" + +import numpy as np +import tensorflow as tf + +from tensorflow_tts.models.melgan import TFReflectionPad1d +from tensorflow_tts.models.melgan import TFConvTranspose1d + +from tensorflow_tts.utils import GroupConv1D +from tensorflow_tts.utils import WeightNormalization + +from tensorflow_tts.models import BaseModel +from tensorflow_tts.models import TFMelGANGenerator + + +class TFHifiResBlock(tf.keras.layers.Layer): + """Tensorflow Hifigan resblock 1 module.""" + + def __init__( + self, + kernel_size, + filters, + dilation_rate, + use_bias, + nonlinear_activation, + nonlinear_activation_params, + is_weight_norm, + initializer_seed, + **kwargs + ): + """Initialize TFHifiResBlock module. + Args: + kernel_size (int): Kernel size. + filters (int): Number of filters. + dilation_rate (list): List dilation rate. + use_bias (bool): Whether to add bias parameter in convolution layers. + nonlinear_activation (str): Activation function module name. + nonlinear_activation_params (dict): Hyperparameters for activation function. + is_weight_norm (bool): Whether to use weight norm or not. + """ + super().__init__(**kwargs) + self.blocks_1 = [] + self.blocks_2 = [] + + for i in range(len(dilation_rate)): + self.blocks_1.append( + [ + TFReflectionPad1d((kernel_size - 1) // 2 * dilation_rate[i]), + tf.keras.layers.Conv1D( + filters=filters, + kernel_size=kernel_size, + dilation_rate=dilation_rate[i], + use_bias=use_bias, + ), + ] + ) + self.blocks_2.append( + [ + TFReflectionPad1d((kernel_size - 1) // 2 * 1), + tf.keras.layers.Conv1D( + filters=filters, + kernel_size=kernel_size, + dilation_rate=1, + use_bias=use_bias, + ), + ] + ) + + self.activation = getattr(tf.keras.layers, nonlinear_activation)( + **nonlinear_activation_params + ) + + # apply weightnorm + if is_weight_norm: + self._apply_weightnorm(self.blocks_1) + self._apply_weightnorm(self.blocks_2) + + def call(self, x, training=False): + """Calculate forward propagation. + Args: + x (Tensor): Input tensor (B, T, C). + Returns: + Tensor: Output tensor (B, T, C). + """ + for c1, c2 in zip(self.blocks_1, self.blocks_2): + xt = self.activation(x) + for c in c1: + xt = c(xt) + xt = self.activation(xt) + for c in c2: + xt = c(xt) + x = xt + x + return x + + def _apply_weightnorm(self, list_layers): + """Try apply weightnorm for all layer in list_layers.""" + for i in range(len(list_layers)): + try: + layer_name = list_layers[i].name.lower() + if "conv1d" in layer_name or "dense" in layer_name: + list_layers[i] = WeightNormalization(list_layers[i]) + except Exception: + pass + + +class TFMultiHifiResBlock(tf.keras.layers.Layer): + """Tensorflow Multi Hifigan resblock 1 module.""" + + def __init__(self, list_resblock, **kwargs): + super().__init__(**kwargs) + self.list_resblock = list_resblock + + def call(self, x, training=False): + xs = None + for resblock in self.list_resblock: + if xs is None: + xs = resblock(x, training=training) + else: + xs += resblock(x, training=training) + return xs / len(self.list_resblock) + + +class TFHifiGANGenerator(BaseModel): + def __init__(self, config, **kwargs): + super().__init__(**kwargs) + # check hyper parameter is valid or not + assert ( + config.stacks + == len(config.stack_kernel_size) + == len(config.stack_dilation_rate) + ) + + # add initial layer + layers = [] + layers += [ + TFReflectionPad1d( + (config.kernel_size - 1) // 2, + padding_type=config.padding_type, + name="first_reflect_padding", + ), + tf.keras.layers.Conv1D( + filters=config.filters, + kernel_size=config.kernel_size, + use_bias=config.use_bias, + ), + ] + + for i, upsample_scale in enumerate(config.upsample_scales): + # add upsampling layer + layers += [ + getattr(tf.keras.layers, config.nonlinear_activation)( + **config.nonlinear_activation_params + ), + TFConvTranspose1d( + filters=config.filters // (2 ** (i + 1)), + kernel_size=upsample_scale * 2, + strides=upsample_scale, + padding="same", + is_weight_norm=config.is_weight_norm, + initializer_seed=config.initializer_seed, + name="conv_transpose_._{}".format(i), + ), + ] + + # add residual stack layer + layers += [ + TFMultiHifiResBlock( + list_resblock=[ + TFHifiResBlock( + kernel_size=config.stack_kernel_size[j], + filters=config.filters // (2 ** (i + 1)), + dilation_rate=config.stack_dilation_rate[j], + use_bias=config.use_bias, + nonlinear_activation=config.nonlinear_activation, + nonlinear_activation_params=config.nonlinear_activation_params, + is_weight_norm=config.is_weight_norm, + initializer_seed=config.initializer_seed, + name="hifigan_resblock_._{}".format(j), + ) + for j in range(config.stacks) + ], + name="multi_hifigan_resblock_._{}".format(i), + ) + ] + # add final layer + layers += [ + getattr(tf.keras.layers, config.nonlinear_activation)( + **config.nonlinear_activation_params + ), + TFReflectionPad1d( + (config.kernel_size - 1) // 2, + padding_type=config.padding_type, + name="last_reflect_padding", + ), + tf.keras.layers.Conv1D( + filters=config.out_channels, + kernel_size=config.kernel_size, + use_bias=config.use_bias, + dtype=tf.float32, + ), + ] + if config.use_final_nolinear_activation: + layers += [tf.keras.layers.Activation("tanh", dtype=tf.float32)] + + if config.is_weight_norm is True: + self._apply_weightnorm(layers) + + self.hifigan = tf.keras.models.Sequential(layers) + + def call(self, mels, **kwargs): + """Calculate forward propagation. + Args: + c (Tensor): Input tensor (B, T, channels) + Returns: + Tensor: Output tensor (B, T ** prod(upsample_scales), out_channels) + """ + return self.inference(mels) + + @tf.function( + input_signature=[ + tf.TensorSpec(shape=[None, None, 80], dtype=tf.float32, name="mels") + ] + ) + def inference(self, mels): + return self.hifigan(mels) + + @tf.function( + input_signature=[ + tf.TensorSpec(shape=[1, None, 80], dtype=tf.float32, name="mels") + ] + ) + def inference_tflite(self, mels): + return self.hifigan(mels) + + def _apply_weightnorm(self, list_layers): + """Try apply weightnorm for all layer in list_layers.""" + for i in range(len(list_layers)): + try: + layer_name = list_layers[i].name.lower() + if "conv1d" in layer_name or "dense" in layer_name: + list_layers[i] = WeightNormalization(list_layers[i]) + except Exception: + pass + + def _build(self): + """Build model by passing fake input.""" + fake_mels = tf.random.uniform(shape=[1, 100, 80], dtype=tf.float32) + self(fake_mels) + + +class TFHifiGANPeriodDiscriminator(tf.keras.layers.Layer): + """Tensorflow Hifigan period discriminator module.""" + + def __init__( + self, + period, + out_channels=1, + n_layers=5, + kernel_size=5, + strides=3, + filters=8, + filter_scales=4, + max_filters=1024, + nonlinear_activation="LeakyReLU", + nonlinear_activation_params={"alpha": 0.2}, + initializer_seed=42, + is_weight_norm=False, + **kwargs + ): + super().__init__(**kwargs) + self.period = period + self.out_filters = out_channels + self.convs = [] + + for i in range(n_layers): + self.convs.append( + tf.keras.layers.Conv2D( + filters=min(filters * (filter_scales ** (i + 1)), max_filters), + kernel_size=(kernel_size, 1), + strides=(strides, 1), + padding="same", + ) + ) + self.conv_post = tf.keras.layers.Conv2D( + filters=out_channels, kernel_size=(3, 1), padding="same", + ) + self.activation = getattr(tf.keras.layers, nonlinear_activation)( + **nonlinear_activation_params + ) + + if is_weight_norm: + self._apply_weightnorm(self.convs) + self.conv_post = WeightNormalization(self.conv_post) + + def call(self, x): + """Calculate forward propagation. + Args: + x (Tensor): Input noise signal (B, T, 1). + Returns: + List: List of output tensors. + """ + shape = tf.shape(x) + n_pad = tf.convert_to_tensor(0, dtype=tf.int32) + if shape[1] % self.period != 0: + n_pad = self.period - (shape[1] % self.period) + x = tf.pad(x, [[0, 0], [0, n_pad], [0, 0]], "REFLECT") + x = tf.reshape( + x, [shape[0], (shape[1] + n_pad) // self.period, self.period, x.shape[2]] + ) + for layer in self.convs: + x = layer(x) + x = self.activation(x) + x = self.conv_post(x) + x = tf.reshape(x, [shape[0], -1, self.out_filters]) + return [x] + + def _apply_weightnorm(self, list_layers): + """Try apply weightnorm for all layer in list_layers.""" + for i in range(len(list_layers)): + try: + layer_name = list_layers[i].name.lower() + if "conv1d" in layer_name or "dense" in layer_name: + list_layers[i] = WeightNormalization(list_layers[i]) + except Exception: + pass + + +class TFHifiGANMultiPeriodDiscriminator(BaseModel): + """Tensorflow Hifigan Multi Period discriminator module.""" + + def __init__(self, config, **kwargs): + super().__init__(**kwargs) + self.discriminator = [] + + # add discriminator + for i in range(len(config.period_scales)): + self.discriminator += [ + TFHifiGANPeriodDiscriminator( + config.period_scales[i], + out_channels=config.out_channels, + n_layers=config.n_layers, + kernel_size=config.kernel_size, + strides=config.strides, + filters=config.filters, + filter_scales=config.filter_scales, + max_filters=config.max_filters, + nonlinear_activation=config.nonlinear_activation, + nonlinear_activation_params=config.nonlinear_activation_params, + initializer_seed=config.initializer_seed, + is_weight_norm=config.is_weight_norm, + name="hifigan_period_discriminator_._{}".format(i), + ) + ] + + def call(self, x): + """Calculate forward propagation. + Args: + x (Tensor): Input noise signal (B, T, 1). + Returns: + List: list of each discriminator outputs + """ + outs = [] + for f in self.discriminator: + outs += [f(x)] + return outs diff --git a/TensorFlowTTS/tensorflow_tts/models/mb_melgan.py b/TensorFlowTTS/tensorflow_tts/models/mb_melgan.py new file mode 100644 index 0000000000000000000000000000000000000000..7340d840fbb82e2d5dc46991a7e1e097e2bd05b8 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/models/mb_melgan.py @@ -0,0 +1,192 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 The Multi-band MelGAN Authors , Minh Nguyen (@dathudeptrai) and Tomoki Hayashi (@kan-bayashi) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +# +# Compatible with https://github.com/kan-bayashi/ParallelWaveGAN/blob/master/parallel_wavegan/layers/pqmf.py. +"""Multi-band MelGAN Modules.""" + +import numpy as np +import tensorflow as tf +from scipy.signal import kaiser + +from tensorflow_tts.models import BaseModel +from tensorflow_tts.models import TFMelGANGenerator + + +def design_prototype_filter(taps=62, cutoff_ratio=0.15, beta=9.0): + """Design prototype filter for PQMF. + This method is based on `A Kaiser window approach for the design of prototype + filters of cosine modulated filterbanks`_. + Args: + taps (int): The number of filter taps. + cutoff_ratio (float): Cut-off frequency ratio. + beta (float): Beta coefficient for kaiser window. + Returns: + ndarray: Impluse response of prototype filter (taps + 1,). + .. _`A Kaiser window approach for the design of prototype filters of cosine modulated filterbanks`: + https://ieeexplore.ieee.org/abstract/document/681427 + """ + # check the arguments are valid + assert taps % 2 == 0, "The number of taps mush be even number." + assert 0.0 < cutoff_ratio < 1.0, "Cutoff ratio must be > 0.0 and < 1.0." + + # make initial filter + omega_c = np.pi * cutoff_ratio + with np.errstate(invalid="ignore"): + h_i = np.sin(omega_c * (np.arange(taps + 1) - 0.5 * taps)) / ( + np.pi * (np.arange(taps + 1) - 0.5 * taps) + ) + # fix nan due to indeterminate form + h_i[taps // 2] = np.cos(0) * cutoff_ratio + + # apply kaiser window + w = kaiser(taps + 1, beta) + h = h_i * w + + return h + + +class TFPQMF(tf.keras.layers.Layer): + """PQMF module.""" + + def __init__(self, config, **kwargs): + """Initilize PQMF module. + Args: + config (class): MultiBandMelGANGeneratorConfig + """ + super().__init__(**kwargs) + subbands = config.subbands + taps = config.taps + cutoff_ratio = config.cutoff_ratio + beta = config.beta + + # define filter coefficient + h_proto = design_prototype_filter(taps, cutoff_ratio, beta) + h_analysis = np.zeros((subbands, len(h_proto))) + h_synthesis = np.zeros((subbands, len(h_proto))) + for k in range(subbands): + h_analysis[k] = ( + 2 + * h_proto + * np.cos( + (2 * k + 1) + * (np.pi / (2 * subbands)) + * (np.arange(taps + 1) - (taps / 2)) + + (-1) ** k * np.pi / 4 + ) + ) + h_synthesis[k] = ( + 2 + * h_proto + * np.cos( + (2 * k + 1) + * (np.pi / (2 * subbands)) + * (np.arange(taps + 1) - (taps / 2)) + - (-1) ** k * np.pi / 4 + ) + ) + + # [subbands, 1, taps + 1] == [filter_width, in_channels, out_channels] + analysis_filter = np.expand_dims(h_analysis, 1) + analysis_filter = np.transpose(analysis_filter, (2, 1, 0)) + + synthesis_filter = np.expand_dims(h_synthesis, 0) + synthesis_filter = np.transpose(synthesis_filter, (2, 1, 0)) + + # filter for downsampling & upsampling + updown_filter = np.zeros((subbands, subbands, subbands), dtype=np.float32) + for k in range(subbands): + updown_filter[0, k, k] = 1.0 + + self.subbands = subbands + self.taps = taps + self.analysis_filter = analysis_filter.astype(np.float32) + self.synthesis_filter = synthesis_filter.astype(np.float32) + self.updown_filter = updown_filter.astype(np.float32) + + @tf.function( + experimental_relax_shapes=True, + input_signature=[tf.TensorSpec(shape=[None, None, 1], dtype=tf.float32)], + ) + def analysis(self, x): + """Analysis with PQMF. + Args: + x (Tensor): Input tensor (B, T, 1). + Returns: + Tensor: Output tensor (B, T // subbands, subbands). + """ + x = tf.pad(x, [[0, 0], [self.taps // 2, self.taps // 2], [0, 0]]) + x = tf.nn.conv1d(x, self.analysis_filter, stride=1, padding="VALID") + x = tf.nn.conv1d(x, self.updown_filter, stride=self.subbands, padding="VALID") + return x + + @tf.function( + experimental_relax_shapes=True, + input_signature=[tf.TensorSpec(shape=[None, None, None], dtype=tf.float32)], + ) + def synthesis(self, x): + """Synthesis with PQMF. + Args: + x (Tensor): Input tensor (B, T // subbands, subbands). + Returns: + Tensor: Output tensor (B, T, 1). + """ + x = tf.nn.conv1d_transpose( + x, + self.updown_filter * self.subbands, + strides=self.subbands, + output_shape=( + tf.shape(x)[0], + tf.shape(x)[1] * self.subbands, + self.subbands, + ), + ) + x = tf.pad(x, [[0, 0], [self.taps // 2, self.taps // 2], [0, 0]]) + return tf.nn.conv1d(x, self.synthesis_filter, stride=1, padding="VALID") + + +class TFMBMelGANGenerator(TFMelGANGenerator): + """Tensorflow MBMelGAN generator module.""" + + def __init__(self, config, **kwargs): + super().__init__(config, **kwargs) + self.pqmf = TFPQMF(config=config, dtype=tf.float32, name="pqmf") + + def call(self, mels, **kwargs): + """Calculate forward propagation. + Args: + c (Tensor): Input tensor (B, T, channels) + Returns: + Tensor: Output tensor (B, T ** prod(upsample_scales), out_channels) + """ + return self.inference(mels) + + @tf.function( + input_signature=[ + tf.TensorSpec(shape=[None, None, 80], dtype=tf.float32, name="mels") + ] + ) + def inference(self, mels): + mb_audios = self.melgan(mels) + return self.pqmf.synthesis(mb_audios) + + @tf.function( + input_signature=[ + tf.TensorSpec(shape=[1, None, 80], dtype=tf.float32, name="mels") + ] + ) + def inference_tflite(self, mels): + mb_audios = self.melgan(mels) + return self.pqmf.synthesis(mb_audios) diff --git a/TensorFlowTTS/tensorflow_tts/models/melgan.py b/TensorFlowTTS/tensorflow_tts/models/melgan.py new file mode 100644 index 0000000000000000000000000000000000000000..89d1da8145ac7fd6697c132094bbed9fe9b787db --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/models/melgan.py @@ -0,0 +1,498 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 The MelGAN Authors and Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""MelGAN Modules.""" + +import numpy as np +import tensorflow as tf + +from tensorflow_tts.models import BaseModel +from tensorflow_tts.utils import GroupConv1D, WeightNormalization + + +def get_initializer(initializer_seed=42): + """Creates a `tf.initializers.glorot_normal` with the given seed. + Args: + initializer_seed: int, initializer seed. + Returns: + GlorotNormal initializer with seed = `initializer_seed`. + """ + return tf.keras.initializers.GlorotNormal(seed=initializer_seed) + + +class TFReflectionPad1d(tf.keras.layers.Layer): + """Tensorflow ReflectionPad1d module.""" + + def __init__(self, padding_size, padding_type="REFLECT", **kwargs): + """Initialize TFReflectionPad1d module. + + Args: + padding_size (int) + padding_type (str) ("CONSTANT", "REFLECT", or "SYMMETRIC". Default is "REFLECT") + """ + super().__init__(**kwargs) + self.padding_size = padding_size + self.padding_type = padding_type + + def call(self, x): + """Calculate forward propagation. + Args: + x (Tensor): Input tensor (B, T, C). + Returns: + Tensor: Padded tensor (B, T + 2 * padding_size, C). + """ + return tf.pad( + x, + [[0, 0], [self.padding_size, self.padding_size], [0, 0]], + self.padding_type, + ) + + +class TFConvTranspose1d(tf.keras.layers.Layer): + """Tensorflow ConvTranspose1d module.""" + + def __init__( + self, + filters, + kernel_size, + strides, + padding, + is_weight_norm, + initializer_seed, + **kwargs + ): + """Initialize TFConvTranspose1d( module. + Args: + filters (int): Number of filters. + kernel_size (int): kernel size. + strides (int): Stride width. + padding (str): Padding type ("same" or "valid"). + """ + super().__init__(**kwargs) + self.conv1d_transpose = tf.keras.layers.Conv2DTranspose( + filters=filters, + kernel_size=(kernel_size, 1), + strides=(strides, 1), + padding="same", + kernel_initializer=get_initializer(initializer_seed), + ) + if is_weight_norm: + self.conv1d_transpose = WeightNormalization(self.conv1d_transpose) + + def call(self, x): + """Calculate forward propagation. + Args: + x (Tensor): Input tensor (B, T, C). + Returns: + Tensor: Output tensor (B, T', C'). + """ + x = tf.expand_dims(x, 2) + x = self.conv1d_transpose(x) + x = tf.squeeze(x, 2) + return x + + +class TFResidualStack(tf.keras.layers.Layer): + """Tensorflow ResidualStack module.""" + + def __init__( + self, + kernel_size, + filters, + dilation_rate, + use_bias, + nonlinear_activation, + nonlinear_activation_params, + is_weight_norm, + initializer_seed, + **kwargs + ): + """Initialize TFResidualStack module. + Args: + kernel_size (int): Kernel size. + filters (int): Number of filters. + dilation_rate (int): Dilation rate. + use_bias (bool): Whether to add bias parameter in convolution layers. + nonlinear_activation (str): Activation function module name. + nonlinear_activation_params (dict): Hyperparameters for activation function. + """ + super().__init__(**kwargs) + self.blocks = [ + getattr(tf.keras.layers, nonlinear_activation)( + **nonlinear_activation_params + ), + TFReflectionPad1d((kernel_size - 1) // 2 * dilation_rate), + tf.keras.layers.Conv1D( + filters=filters, + kernel_size=kernel_size, + dilation_rate=dilation_rate, + use_bias=use_bias, + kernel_initializer=get_initializer(initializer_seed), + ), + getattr(tf.keras.layers, nonlinear_activation)( + **nonlinear_activation_params + ), + tf.keras.layers.Conv1D( + filters=filters, + kernel_size=1, + use_bias=use_bias, + kernel_initializer=get_initializer(initializer_seed), + ), + ] + self.shortcut = tf.keras.layers.Conv1D( + filters=filters, + kernel_size=1, + use_bias=use_bias, + kernel_initializer=get_initializer(initializer_seed), + name="shortcut", + ) + + # apply weightnorm + if is_weight_norm: + self._apply_weightnorm(self.blocks) + self.shortcut = WeightNormalization(self.shortcut) + + def call(self, x): + """Calculate forward propagation. + Args: + x (Tensor): Input tensor (B, T, C). + Returns: + Tensor: Output tensor (B, T, C). + """ + _x = tf.identity(x) + for layer in self.blocks: + _x = layer(_x) + shortcut = self.shortcut(x) + return shortcut + _x + + def _apply_weightnorm(self, list_layers): + """Try apply weightnorm for all layer in list_layers.""" + for i in range(len(list_layers)): + try: + layer_name = list_layers[i].name.lower() + if "conv1d" in layer_name or "dense" in layer_name: + list_layers[i] = WeightNormalization(list_layers[i]) + except Exception: + pass + + +class TFMelGANGenerator(BaseModel): + """Tensorflow MelGAN generator module.""" + + def __init__(self, config, **kwargs): + """Initialize TFMelGANGenerator module. + Args: + config: config object of Melgan generator. + """ + super().__init__(**kwargs) + + # check hyper parameter is valid or not + assert config.filters >= np.prod(config.upsample_scales) + assert config.filters % (2 ** len(config.upsample_scales)) == 0 + + # add initial layer + layers = [] + layers += [ + TFReflectionPad1d( + (config.kernel_size - 1) // 2, + padding_type=config.padding_type, + name="first_reflect_padding", + ), + tf.keras.layers.Conv1D( + filters=config.filters, + kernel_size=config.kernel_size, + use_bias=config.use_bias, + kernel_initializer=get_initializer(config.initializer_seed), + ), + ] + + for i, upsample_scale in enumerate(config.upsample_scales): + # add upsampling layer + layers += [ + getattr(tf.keras.layers, config.nonlinear_activation)( + **config.nonlinear_activation_params + ), + TFConvTranspose1d( + filters=config.filters // (2 ** (i + 1)), + kernel_size=upsample_scale * 2, + strides=upsample_scale, + padding="same", + is_weight_norm=config.is_weight_norm, + initializer_seed=config.initializer_seed, + name="conv_transpose_._{}".format(i), + ), + ] + + # ad residual stack layer + for j in range(config.stacks): + layers += [ + TFResidualStack( + kernel_size=config.stack_kernel_size, + filters=config.filters // (2 ** (i + 1)), + dilation_rate=config.stack_kernel_size ** j, + use_bias=config.use_bias, + nonlinear_activation=config.nonlinear_activation, + nonlinear_activation_params=config.nonlinear_activation_params, + is_weight_norm=config.is_weight_norm, + initializer_seed=config.initializer_seed, + name="residual_stack_._{}._._{}".format(i, j), + ) + ] + # add final layer + layers += [ + getattr(tf.keras.layers, config.nonlinear_activation)( + **config.nonlinear_activation_params + ), + TFReflectionPad1d( + (config.kernel_size - 1) // 2, + padding_type=config.padding_type, + name="last_reflect_padding", + ), + tf.keras.layers.Conv1D( + filters=config.out_channels, + kernel_size=config.kernel_size, + use_bias=config.use_bias, + kernel_initializer=get_initializer(config.initializer_seed), + dtype=tf.float32, + ), + ] + if config.use_final_nolinear_activation: + layers += [tf.keras.layers.Activation("tanh", dtype=tf.float32)] + + if config.is_weight_norm is True: + self._apply_weightnorm(layers) + + self.melgan = tf.keras.models.Sequential(layers) + + def call(self, mels, **kwargs): + """Calculate forward propagation. + Args: + c (Tensor): Input tensor (B, T, channels) + Returns: + Tensor: Output tensor (B, T ** prod(upsample_scales), out_channels) + """ + return self.inference(mels) + + @tf.function( + input_signature=[ + tf.TensorSpec(shape=[None, None, 80], dtype=tf.float32, name="mels") + ] + ) + def inference(self, mels): + return self.melgan(mels) + + @tf.function( + input_signature=[ + tf.TensorSpec(shape=[1, None, 80], dtype=tf.float32, name="mels") + ] + ) + def inference_tflite(self, mels): + return self.melgan(mels) + + def _apply_weightnorm(self, list_layers): + """Try apply weightnorm for all layer in list_layers.""" + for i in range(len(list_layers)): + try: + layer_name = list_layers[i].name.lower() + if "conv1d" in layer_name or "dense" in layer_name: + list_layers[i] = WeightNormalization(list_layers[i]) + except Exception: + pass + + def _build(self): + """Build model by passing fake input.""" + fake_mels = tf.random.uniform(shape=[1, 100, 80], dtype=tf.float32) + self(fake_mels) + + +class TFMelGANDiscriminator(tf.keras.layers.Layer): + """Tensorflow MelGAN generator module.""" + + def __init__( + self, + out_channels=1, + kernel_sizes=[5, 3], + filters=16, + max_downsample_filters=1024, + use_bias=True, + downsample_scales=[4, 4, 4, 4], + nonlinear_activation="LeakyReLU", + nonlinear_activation_params={"alpha": 0.2}, + padding_type="REFLECT", + is_weight_norm=True, + initializer_seed=0.02, + **kwargs + ): + """Initilize MelGAN discriminator module. + Args: + out_channels (int): Number of output channels. + kernel_sizes (list): List of two kernel sizes. The prod will be used for the first conv layer, + and the first and the second kernel sizes will be used for the last two layers. + For example if kernel_sizes = [5, 3], the first layer kernel size will be 5 * 3 = 15. + the last two layers' kernel size will be 5 and 3, respectively. + filters (int): Initial number of filters for conv layer. + max_downsample_filters (int): Maximum number of filters for downsampling layers. + use_bias (bool): Whether to add bias parameter in convolution layers. + downsample_scales (list): List of downsampling scales. + nonlinear_activation (str): Activation function module name. + nonlinear_activation_params (dict): Hyperparameters for activation function. + padding_type (str): Padding type (support only "REFLECT", "CONSTANT", "SYMMETRIC") + """ + super().__init__(**kwargs) + discriminator = [] + + # check kernel_size is valid + assert len(kernel_sizes) == 2 + assert kernel_sizes[0] % 2 == 1 + assert kernel_sizes[1] % 2 == 1 + + # add first layer + discriminator = [ + TFReflectionPad1d( + (np.prod(kernel_sizes) - 1) // 2, padding_type=padding_type + ), + tf.keras.layers.Conv1D( + filters=filters, + kernel_size=int(np.prod(kernel_sizes)), + use_bias=use_bias, + kernel_initializer=get_initializer(initializer_seed), + ), + getattr(tf.keras.layers, nonlinear_activation)( + **nonlinear_activation_params + ), + ] + + # add downsample layers + in_chs = filters + with tf.keras.utils.CustomObjectScope({"GroupConv1D": GroupConv1D}): + for downsample_scale in downsample_scales: + out_chs = min(in_chs * downsample_scale, max_downsample_filters) + discriminator += [ + GroupConv1D( + filters=out_chs, + kernel_size=downsample_scale * 10 + 1, + strides=downsample_scale, + padding="same", + use_bias=use_bias, + groups=in_chs // 4, + kernel_initializer=get_initializer(initializer_seed), + ) + ] + discriminator += [ + getattr(tf.keras.layers, nonlinear_activation)( + **nonlinear_activation_params + ) + ] + in_chs = out_chs + + # add final layers + out_chs = min(in_chs * 2, max_downsample_filters) + discriminator += [ + tf.keras.layers.Conv1D( + filters=out_chs, + kernel_size=kernel_sizes[0], + padding="same", + use_bias=use_bias, + kernel_initializer=get_initializer(initializer_seed), + ) + ] + discriminator += [ + getattr(tf.keras.layers, nonlinear_activation)( + **nonlinear_activation_params + ) + ] + discriminator += [ + tf.keras.layers.Conv1D( + filters=out_channels, + kernel_size=kernel_sizes[1], + padding="same", + use_bias=use_bias, + kernel_initializer=get_initializer(initializer_seed), + ) + ] + + if is_weight_norm is True: + self._apply_weightnorm(discriminator) + + self.disciminator = discriminator + + def call(self, x, **kwargs): + """Calculate forward propagation. + Args: + x (Tensor): Input noise signal (B, T, 1). + Returns: + List: List of output tensors of each layer. + """ + outs = [] + for f in self.disciminator: + x = f(x) + outs += [x] + return outs + + def _apply_weightnorm(self, list_layers): + """Try apply weightnorm for all layer in list_layers.""" + for i in range(len(list_layers)): + try: + layer_name = list_layers[i].name.lower() + if "conv1d" in layer_name or "dense" in layer_name: + list_layers[i] = WeightNormalization(list_layers[i]) + except Exception: + pass + + +class TFMelGANMultiScaleDiscriminator(BaseModel): + """MelGAN multi-scale discriminator module.""" + + def __init__(self, config, **kwargs): + """Initilize MelGAN multi-scale discriminator module. + Args: + config: config object for melgan discriminator + """ + super().__init__(**kwargs) + self.discriminator = [] + + # add discriminator + for i in range(config.scales): + self.discriminator += [ + TFMelGANDiscriminator( + out_channels=config.out_channels, + kernel_sizes=config.kernel_sizes, + filters=config.filters, + max_downsample_filters=config.max_downsample_filters, + use_bias=config.use_bias, + downsample_scales=config.downsample_scales, + nonlinear_activation=config.nonlinear_activation, + nonlinear_activation_params=config.nonlinear_activation_params, + padding_type=config.padding_type, + is_weight_norm=config.is_weight_norm, + initializer_seed=config.initializer_seed, + name="melgan_discriminator_scale_._{}".format(i), + ) + ] + self.pooling = getattr(tf.keras.layers, config.downsample_pooling)( + **config.downsample_pooling_params + ) + + def call(self, x, **kwargs): + """Calculate forward propagation. + Args: + x (Tensor): Input noise signal (B, T, 1). + Returns: + List: List of list of each discriminator outputs, which consists of each layer output tensors. + """ + outs = [] + for f in self.discriminator: + outs += [f(x)] + x = self.pooling(x) + return outs diff --git a/TensorFlowTTS/tensorflow_tts/models/parallel_wavegan.py b/TensorFlowTTS/tensorflow_tts/models/parallel_wavegan.py new file mode 100644 index 0000000000000000000000000000000000000000..c19983f89db22fe5a589e3361c23b05e1e232eb3 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/models/parallel_wavegan.py @@ -0,0 +1,556 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 The TensorFlowTTS Team and Tomoki Hayashi (@kan-bayashi) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Parallel-wavegan Modules. Based on pytorch implementation (https://github.com/kan-bayashi/ParallelWaveGAN)""" + +import tensorflow as tf + +from tensorflow_tts.models import BaseModel + + +def get_initializer(initializer_seed=42): + """Creates a `tf.initializers.he_normal` with the given seed. + Args: + initializer_seed: int, initializer seed. + Returns: + HeNormal initializer with seed = `initializer_seed`. + """ + return tf.keras.initializers.he_normal(seed=initializer_seed) + + +class TFConv1d1x1(tf.keras.layers.Conv1D): + """1x1 Conv1d with customized initialization.""" + + def __init__(self, filters, use_bias, padding, initializer_seed, **kwargs): + """Initialize 1x1 Conv1d module.""" + super().__init__( + filters=filters, + kernel_size=1, + strides=1, + padding=padding, + dilation_rate=1, + use_bias=use_bias, + kernel_initializer=get_initializer(initializer_seed), + **kwargs, + ) + + +class TFConv1d(tf.keras.layers.Conv1D): + """Conv1d with customized initialization.""" + + def __init__(self, *args, **kwargs): + """Initialize Conv1d module.""" + initializer_seed = kwargs.pop("initializer_seed", 42) + super().__init__( + *args, **kwargs, kernel_initializer=get_initializer(initializer_seed) + ) + + +class TFResidualBlock(tf.keras.layers.Layer): + """Residual block module in WaveNet.""" + + def __init__( + self, + kernel_size=3, + residual_channels=64, + gate_channels=128, + skip_channels=64, + aux_channels=80, + dropout_rate=0.0, + dilation_rate=1, + use_bias=True, + use_causal_conv=False, + initializer_seed=42, + **kwargs, + ): + """Initialize ResidualBlock module. + + Args: + kernel_size (int): Kernel size of dilation convolution layer. + residual_channels (int): Number of channels for residual connection. + skip_channels (int): Number of channels for skip connection. + aux_channels (int): Local conditioning channels i.e. auxiliary input dimension. + dropout_rate (float): Dropout probability. + dilation_rate (int): Dilation factor. + use_bias (bool): Whether to add bias parameter in convolution layers. + use_causal_conv (bool): Whether to use use_causal_conv or non-use_causal_conv convolution. + initializer_seed (int32): initializer seed. + """ + super().__init__(**kwargs) + self.dropout_rate = dropout_rate + # no future time stamps available + self.use_causal_conv = use_causal_conv + + # dilation conv + self.conv = TFConv1d( + filters=gate_channels, + kernel_size=kernel_size, + padding="same" if self.use_causal_conv is False else "causal", + strides=1, + dilation_rate=dilation_rate, + use_bias=use_bias, + initializer_seed=initializer_seed, + ) + + # local conditionong + if aux_channels > 0: + self.conv1x1_aux = TFConv1d1x1( + gate_channels, + use_bias=False, + padding="same", + initializer_seed=initializer_seed, + name="conv1x1_aux", + ) + else: + self.conv1x1_aux = None + + # conv output is split into two groups + gate_out_channels = gate_channels // 2 + self.conv1x1_out = TFConv1d1x1( + residual_channels, + use_bias=use_bias, + padding="same", + initializer_seed=initializer_seed, + name="conv1x1_out", + ) + self.conv1x1_skip = TFConv1d1x1( + skip_channels, + use_bias=use_bias, + padding="same", + initializer_seed=initializer_seed, + name="conv1x1_skip", + ) + + self.dropout = tf.keras.layers.Dropout(rate=self.dropout_rate) + + def call(self, x, c, training=False): + """Calculate forward propagation. + + Args: + x (Tensor): Input tensor (B, residual_channels, T). + c (Tensor): Local conditioning auxiliary tensor (B, aux_channels, T). + + Returns: + Tensor: Output tensor for residual connection (B, T, residual_channels). + Tensor: Output tensor for skip connection (B, T, skip_channels). + """ + residual = x + x = self.dropout(x, training=training) + x = self.conv(x) + + # split into two part for gated activation + xa, xb = tf.split(x, 2, axis=-1) + + # local conditioning + if c is not None: + assert self.conv1x1_aux is not None + c = self.conv1x1_aux(c) + ca, cb = tf.split(c, 2, axis=-1) + xa, xb = xa + ca, xb + cb + + x = tf.nn.tanh(xa) * tf.nn.sigmoid(xb) + + # for skip connection + s = self.conv1x1_skip(x) + + # for residual connection + x = self.conv1x1_out(x) + x = (x + residual) * tf.math.sqrt(0.5) + + return x, s + + +class TFStretch1d(tf.keras.layers.Layer): + """Stretch2d module.""" + + def __init__(self, x_scale, y_scale, method="nearest", **kwargs): + """Initialize Stretch2d module. + + Args: + x_scale (int): X scaling factor (Time axis in spectrogram). + y_scale (int): Y scaling factor (Frequency axis in spectrogram). + method (str): Interpolation method. + + """ + super().__init__(**kwargs) + self.x_scale = x_scale + self.y_scale = y_scale + self.method = method + + def call(self, x): + """Calculate forward propagation. + + Args: + x (Tensor): Input tensor (B, T, C, 1). + Returns: + Tensor: Interpolated tensor (B, T * x_scale, C * y_scale, 1) + + """ + x_shape = tf.shape(x) + new_size = (x_shape[1] * self.x_scale, x_shape[2] * self.y_scale) + x = tf.image.resize(x, method=self.method, size=new_size) + return x + + +class TFUpsampleNetWork(tf.keras.layers.Layer): + """Upsampling network module.""" + + def __init__( + self, + output_channels, + upsample_scales, + nonlinear_activation=None, + nonlinear_activation_params={}, + interpolate_mode="nearest", + freq_axis_kernel_size=1, + use_causal_conv=False, + **kwargs, + ): + """Initialize upsampling network module. + + Args: + output_channels (int): output feature channels. + upsample_scales (list): List of upsampling scales. + nonlinear_activation (str): Activation function name. + nonlinear_activation_params (dict): Arguments for specified activation function. + interpolate_mode (str): Interpolation mode. + freq_axis_kernel_size (int): Kernel size in the direction of frequency axis. + + """ + super().__init__(**kwargs) + self.use_causal_conv = use_causal_conv + self.up_layers = [] + + for scale in upsample_scales: + # interpolation layer + stretch = TFStretch1d( + scale, 1, interpolate_mode, name="stretch_._{}".format(scale) + ) # ->> outputs: [B, T * scale, C * 1, 1] + self.up_layers += [stretch] + + # conv layer + assert ( + freq_axis_kernel_size - 1 + ) % 2 == 0, "Not support even number freq axis kernel size." + kernel_size = scale * 2 + 1 + conv = tf.keras.layers.Conv2D( + filters=1, + kernel_size=(kernel_size, freq_axis_kernel_size), + padding="causal" if self.use_causal_conv is True else "same", + use_bias=False, + ) # ->> outputs: [B, T * scale, C * 1, 1] + self.up_layers += [conv] + + # nonlinear + if nonlinear_activation is not None: + nonlinear = getattr(tf.keras.layers, nonlinear_activation)( + **nonlinear_activation_params + ) + self.up_layers += [nonlinear] + + def call(self, c): + """Calculate forward propagation. + Args: + c : Input tensor (B, T, C). + Returns: + Tensor: Upsampled tensor (B, T', C), where T' = T * prod(upsample_scales). + """ + c = tf.expand_dims(c, -1) # [B, T, C, 1] + for f in self.up_layers: + c = f(c) + return tf.squeeze(c, -1) # [B, T, C] + + +class TFConvInUpsampleNetWork(tf.keras.layers.Layer): + """Convolution + upsampling network module.""" + + def __init__( + self, + upsample_scales, + nonlinear_activation=None, + nonlinear_activation_params={}, + interpolate_mode="nearest", + freq_axis_kernel_size=1, + aux_channels=80, + aux_context_window=0, + use_causal_conv=False, + initializer_seed=42, + **kwargs, + ): + """Initialize convolution + upsampling network module. + + Args: + upsample_scales (list): List of upsampling scales. + nonlinear_activation (str): Activation function name. + nonlinear_activation_params (dict): Arguments for specified activation function. + mode (str): Interpolation mode. + freq_axis_kernel_size (int): Kernel size in the direction of frequency axis. + aux_channels (int): Number of channels of pre-convolutional layer. + aux_context_window (int): Context window size of the pre-convolutional layer. + use_causal_conv (bool): Whether to use causal structure. + + """ + super().__init__(**kwargs) + self.aux_context_window = aux_context_window + self.use_causal_conv = use_causal_conv and aux_context_window > 0 + + # To capture wide-context information in conditional features + kernel_size = ( + aux_context_window + 1 if use_causal_conv else 2 * aux_context_window + 1 + ) + + self.conv_in = TFConv1d( + filters=aux_channels, + kernel_size=kernel_size, + padding="same", + use_bias=False, + initializer_seed=initializer_seed, + name="conv_in", + ) + self.upsample = TFUpsampleNetWork( + output_channels=aux_channels, + upsample_scales=upsample_scales, + nonlinear_activation=nonlinear_activation, + nonlinear_activation_params=nonlinear_activation_params, + interpolate_mode=interpolate_mode, + freq_axis_kernel_size=freq_axis_kernel_size, + use_causal_conv=use_causal_conv, + name="upsample_network", + ) + + def call(self, c): + """Calculate forward propagation. + + Args: + c : Input tensor (B, T', C). + + Returns: + Tensor: Upsampled tensor (B, T, C), + where T = (T' - aux_context_window * 2) * prod(upsample_scales). + + Note: + The length of inputs considers the context window size. + """ + c_ = self.conv_in(c) + return self.upsample(c_) + + +class TFParallelWaveGANGenerator(BaseModel): + """Parallel WaveGAN Generator module.""" + + def __init__(self, config, **kwargs): + super().__init__(**kwargs) + self.out_channels = config.out_channels + self.aux_channels = config.aux_channels + self.n_layers = config.n_layers + self.stacks = config.stacks + self.kernel_size = config.kernel_size + self.upsample_params = config.upsample_params + + # check the number of layers and stacks + assert self.n_layers % self.stacks == 0 + n_layers_per_stack = self.n_layers // self.stacks + + # define first convolution + self.first_conv = TFConv1d1x1( + filters=config.residual_channels, + use_bias=True, + padding="same", + initializer_seed=config.initializer_seed, + name="first_convolution", + ) + + # define conv + upsampling network + if config.upsample_conditional_features: + self.upsample_params.update({"use_causal_conv": config.use_causal_conv}) + self.upsample_params.update( + { + "aux_channels": config.aux_channels, + "aux_context_window": config.aux_context_window, + } + ) + self.upsample_net = TFConvInUpsampleNetWork(**self.upsample_params) + else: + self.upsample_net = None + + # define residual blocks + self.conv_layers = [] + for layer in range(self.n_layers): + dilation_rate = 2 ** (layer % n_layers_per_stack) + conv = TFResidualBlock( + kernel_size=config.kernel_size, + residual_channels=config.residual_channels, + gate_channels=config.gate_channels, + skip_channels=config.skip_channels, + aux_channels=config.aux_channels, + dilation_rate=dilation_rate, + dropout_rate=config.dropout_rate, + use_bias=config.use_bias, + use_causal_conv=config.use_causal_conv, + initializer_seed=config.initializer_seed, + name="residual_block_._{}".format(layer), + ) + self.conv_layers += [conv] + + # define output layers + self.last_conv_layers = [ + tf.keras.layers.ReLU(), + TFConv1d1x1( + filters=config.skip_channels, + use_bias=config.use_bias, + padding="same", + initializer_seed=config.initializer_seed, + ), + tf.keras.layers.ReLU(), + TFConv1d1x1( + filters=config.out_channels, + use_bias=True, + padding="same", + initializer_seed=config.initializer_seed, + ), + tf.keras.layers.Activation("tanh"), + ] + + def _build(self): + mels = tf.random.uniform(shape=[2, 20, 80], dtype=tf.float32) + self(mels, training=tf.cast(True, tf.bool)) + + def call(self, mels, training=False, **kwargs): + """Calculate forward propagation. + + Args: + mels (Tensor): Local conditioning auxiliary features (B, T', C). + Returns: + + Tensor: Output tensor (B, T, 1) + """ + # perform upsampling + if mels is not None and self.upsample_net is not None: + c = self.upsample_net(mels) + + # random noise x + # enccode to hidden representation + x = tf.expand_dims(tf.random.normal(shape=tf.shape(c)[0:2]), axis=2) + x = self.first_conv(x) + skips = 0 + for f in self.conv_layers: + x, h = f(x, c, training=training) + skips += h + skips *= tf.math.sqrt(1.0 / len(self.conv_layers)) + + # apply final layers + x = skips + for f in self.last_conv_layers: + x = f(x) + + return x + + @tf.function( + experimental_relax_shapes=True, + input_signature=[ + tf.TensorSpec(shape=[None, None, 80], dtype=tf.float32, name="mels"), + ], + ) + def inference(self, mels): + """Calculate forward propagation. + + Args: + c (Tensor): Local conditioning auxiliary features (B, T', C). + Returns: + + Tensor: Output tensor (B, T, 1) + """ + # perform upsampling + if mels is not None and self.upsample_net is not None: + c = self.upsample_net(mels) + + # enccode to hidden representation + x = tf.expand_dims(tf.random.normal(shape=tf.shape(c)[0:2]), axis=2) + x = self.first_conv(x) + skips = 0 + for f in self.conv_layers: + x, h = f(x, c, training=False) + skips += h + skips *= tf.math.sqrt(1.0 / len(self.conv_layers)) + + # apply final layers + x = skips + for f in self.last_conv_layers: + x = f(x) + + return x + + +class TFParallelWaveGANDiscriminator(BaseModel): + """Parallel WaveGAN Discriminator module.""" + + def __init__(self, config, **kwargs): + super().__init__(**kwargs) + assert (config.kernel_size - 1) % 2 == 0, "Not support even number kernel size." + assert config.dilation_factor > 0, "Dilation factor must be > 0." + self.conv_layers = [] + for i in range(config.n_layers - 1): + if i == 0: + dilation_rate = 1 + else: + dilation_rate = ( + i if config.dilation_factor == 1 else config.dilation_factor ** i + ) + self.conv_layers += [ + TFConv1d( + filters=config.conv_channels, + kernel_size=config.kernel_size, + padding="same", + dilation_rate=dilation_rate, + use_bias=config.use_bias, + initializer_seed=config.initializer_seed, + ) + ] + self.conv_layers += [ + getattr(tf.keras.layers, config.nonlinear_activation)( + **config.nonlinear_activation_params + ) + ] + self.conv_layers += [ + TFConv1d( + filters=config.out_channels, + kernel_size=config.kernel_size, + padding="same", + use_bias=config.use_bias, + initializer_seed=config.initializer_seed, + ) + ] + + if config.apply_sigmoid_at_last: + self.conv_layers += [ + tf.keras.layers.Activation("sigmoid"), + ] + + def _build(self): + x = tf.random.uniform(shape=[2, 16000, 1]) + self(x) + + def call(self, x): + """Calculate forward propagation. + + Args: + x (Tensor): Input noise signal (B, T, 1). + + Returns: + Tensor: Output tensor (B, T, 1) + """ + for f in self.conv_layers: + x = f(x) + return x diff --git a/TensorFlowTTS/tensorflow_tts/models/tacotron2.py b/TensorFlowTTS/tensorflow_tts/models/tacotron2.py new file mode 100644 index 0000000000000000000000000000000000000000..d3ccaa93c1a4bf04f62200d0cc428365e6b5ac1c --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/models/tacotron2.py @@ -0,0 +1,1040 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 The Tacotron-2 Authors, Minh Nguyen (@dathudeptrai), Eren Gölge (@erogol) and Jae Yoo (@jaeyoo) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tacotron-2 Modules.""" + +import collections + +import numpy as np +import tensorflow as tf + +# TODO: once https://github.com/tensorflow/addons/pull/1964 is fixed, +# uncomment this line. +# from tensorflow_addons.seq2seq import dynamic_decode +from tensorflow_addons.seq2seq import BahdanauAttention, Decoder, Sampler + +from tensorflow_tts.utils import dynamic_decode + +from tensorflow_tts.models import BaseModel + + +def get_initializer(initializer_range=0.02): + """Creates a `tf.initializers.truncated_normal` with the given range. + Args: + initializer_range: float, initializer range for stddev. + Returns: + TruncatedNormal initializer with stddev = `initializer_range`. + """ + return tf.keras.initializers.TruncatedNormal(stddev=initializer_range) + + +def gelu(x): + """Gaussian Error Linear unit.""" + cdf = 0.5 * (1.0 + tf.math.erf(x / tf.math.sqrt(2.0))) + return x * cdf + + +def gelu_new(x): + """Smoother gaussian Error Linear Unit.""" + cdf = 0.5 * (1.0 + tf.tanh((np.sqrt(2 / np.pi) * (x + 0.044715 * tf.pow(x, 3))))) + return x * cdf + + +def swish(x): + """Swish activation function.""" + return tf.nn.swish(x) + + +def mish(x): + return x * tf.math.tanh(tf.math.softplus(x)) + + +ACT2FN = { + "identity": tf.keras.layers.Activation("linear"), + "tanh": tf.keras.layers.Activation("tanh"), + "gelu": tf.keras.layers.Activation(gelu), + "relu": tf.keras.activations.relu, + "swish": tf.keras.layers.Activation(swish), + "gelu_new": tf.keras.layers.Activation(gelu_new), + "mish": tf.keras.layers.Activation(mish), +} + + +class TFEmbedding(tf.keras.layers.Embedding): + """Faster version of embedding.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def call(self, inputs): + inputs = tf.cast(tf.expand_dims(inputs, -1), tf.int32) + outputs = tf.gather_nd(self.embeddings, inputs) + return outputs + + +class TFTacotronConvBatchNorm(tf.keras.layers.Layer): + """Tacotron-2 Convolutional Batchnorm module.""" + + def __init__( + self, filters, kernel_size, dropout_rate, activation=None, name_idx=None + ): + super().__init__() + self.conv1d = tf.keras.layers.Conv1D( + filters, + kernel_size, + kernel_initializer=get_initializer(0.02), + padding="same", + name="conv_._{}".format(name_idx), + ) + self.norm = tf.keras.layers.experimental.SyncBatchNormalization( + axis=-1, name="batch_norm_._{}".format(name_idx) + ) + self.dropout = tf.keras.layers.Dropout( + rate=dropout_rate, name="dropout_._{}".format(name_idx) + ) + self.act = ACT2FN[activation] + + def call(self, inputs, training=False): + outputs = self.conv1d(inputs) + outputs = self.norm(outputs, training=training) + outputs = self.act(outputs) + outputs = self.dropout(outputs, training=training) + return outputs + + +class TFTacotronEmbeddings(tf.keras.layers.Layer): + """Construct character/phoneme/positional/speaker embeddings.""" + + def __init__(self, config, **kwargs): + """Init variables.""" + super().__init__(**kwargs) + self.vocab_size = config.vocab_size + self.embedding_hidden_size = config.embedding_hidden_size + self.initializer_range = config.initializer_range + self.config = config + + if config.n_speakers > 1: + self.speaker_embeddings = TFEmbedding( + config.n_speakers, + config.embedding_hidden_size, + embeddings_initializer=get_initializer(self.initializer_range), + name="speaker_embeddings", + ) + self.speaker_fc = tf.keras.layers.Dense( + units=config.embedding_hidden_size, name="speaker_fc" + ) + + self.LayerNorm = tf.keras.layers.LayerNormalization( + epsilon=config.layer_norm_eps, name="LayerNorm" + ) + self.dropout = tf.keras.layers.Dropout(config.embedding_dropout_prob) + + def build(self, input_shape): + """Build shared character/phoneme embedding layers.""" + with tf.name_scope("character_embeddings"): + self.character_embeddings = self.add_weight( + "weight", + shape=[self.vocab_size, self.embedding_hidden_size], + initializer=get_initializer(self.initializer_range), + ) + super().build(input_shape) + + def call(self, inputs, training=False): + """Get character embeddings of inputs. + Args: + 1. character, Tensor (int32) shape [batch_size, length]. + 2. speaker_id, Tensor (int32) shape [batch_size] + Returns: + Tensor (float32) shape [batch_size, length, embedding_size]. + """ + return self._embedding(inputs, training=training) + + def _embedding(self, inputs, training=False): + """Applies embedding based on inputs tensor.""" + input_ids, speaker_ids = inputs + + # create embeddings + inputs_embeds = tf.gather(self.character_embeddings, input_ids) + embeddings = inputs_embeds + + if self.config.n_speakers > 1: + speaker_embeddings = self.speaker_embeddings(speaker_ids) + speaker_features = tf.math.softplus(self.speaker_fc(speaker_embeddings)) + # extended speaker embeddings + extended_speaker_features = speaker_features[:, tf.newaxis, :] + # sum all embedding + embeddings += extended_speaker_features + + # apply layer-norm and dropout for embeddings. + embeddings = self.LayerNorm(embeddings) + embeddings = self.dropout(embeddings, training=training) + + return embeddings + + +class TFTacotronEncoderConvs(tf.keras.layers.Layer): + """Tacotron-2 Encoder Convolutional Batchnorm module.""" + + def __init__(self, config, **kwargs): + """Init variables.""" + super().__init__(**kwargs) + self.conv_batch_norm = [] + for i in range(config.n_conv_encoder): + conv = TFTacotronConvBatchNorm( + filters=config.encoder_conv_filters, + kernel_size=config.encoder_conv_kernel_sizes, + activation=config.encoder_conv_activation, + dropout_rate=config.encoder_conv_dropout_rate, + name_idx=i, + ) + self.conv_batch_norm.append(conv) + + def call(self, inputs, training=False): + """Call logic.""" + outputs = inputs + for conv in self.conv_batch_norm: + outputs = conv(outputs, training=training) + return outputs + + +class TFTacotronEncoder(tf.keras.layers.Layer): + """Tacotron-2 Encoder.""" + + def __init__(self, config, **kwargs): + """Init variables.""" + super().__init__(**kwargs) + self.embeddings = TFTacotronEmbeddings(config, name="embeddings") + self.convbn = TFTacotronEncoderConvs(config, name="conv_batch_norm") + self.bilstm = tf.keras.layers.Bidirectional( + tf.keras.layers.LSTM( + units=config.encoder_lstm_units, return_sequences=True + ), + name="bilstm", + ) + + if config.n_speakers > 1: + self.encoder_speaker_embeddings = TFEmbedding( + config.n_speakers, + config.embedding_hidden_size, + embeddings_initializer=get_initializer(config.initializer_range), + name="encoder_speaker_embeddings", + ) + self.encoder_speaker_fc = tf.keras.layers.Dense( + units=config.encoder_lstm_units * 2, name="encoder_speaker_fc" + ) + + self.config = config + + def call(self, inputs, training=False): + """Call logic.""" + input_ids, speaker_ids, input_mask = inputs + + # create embedding and mask them since we sum + # speaker embedding to all character embedding. + input_embeddings = self.embeddings([input_ids, speaker_ids], training=training) + + # pass embeddings to convolution batch norm + conv_outputs = self.convbn(input_embeddings, training=training) + + # bi-lstm. + outputs = self.bilstm(conv_outputs, mask=input_mask) + + if self.config.n_speakers > 1: + encoder_speaker_embeddings = self.encoder_speaker_embeddings(speaker_ids) + encoder_speaker_features = tf.math.softplus( + self.encoder_speaker_fc(encoder_speaker_embeddings) + ) + # extended encoderspeaker embeddings + extended_encoder_speaker_features = encoder_speaker_features[ + :, tf.newaxis, : + ] + # sum to encoder outputs + outputs += extended_encoder_speaker_features + + return outputs + + +class Tacotron2Sampler(Sampler): + """Tacotron2 sampler for Seq2Seq training.""" + + def __init__( + self, config, + ): + super().__init__() + self.config = config + # create schedule factor. + # the input of a next decoder cell is calculated by formular: + # next_inputs = ratio * prev_groundtruth_outputs + (1.0 - ratio) * prev_predicted_outputs. + self._ratio = tf.constant(1.0, dtype=tf.float32) + self._reduction_factor = self.config.reduction_factor + + def setup_target(self, targets, mel_lengths): + """Setup ground-truth mel outputs for decoder.""" + self.mel_lengths = mel_lengths + self.set_batch_size(tf.shape(targets)[0]) + self.targets = targets[ + :, self._reduction_factor - 1 :: self._reduction_factor, : + ] + self.max_lengths = tf.tile([tf.shape(self.targets)[1]], [self._batch_size]) + + @property + def batch_size(self): + return self._batch_size + + @property + def sample_ids_shape(self): + return tf.TensorShape([]) + + @property + def sample_ids_dtype(self): + return tf.int32 + + @property + def reduction_factor(self): + return self._reduction_factor + + def initialize(self): + """Return (Finished, next_inputs).""" + return ( + tf.tile([False], [self._batch_size]), + tf.tile([[0.0]], [self._batch_size, self.config.n_mels]), + ) + + def sample(self, time, outputs, state): + return tf.tile([0], [self._batch_size]) + + def next_inputs( + self, + time, + outputs, + state, + sample_ids, + stop_token_prediction, + training=False, + **kwargs, + ): + if training: + finished = time + 1 >= self.max_lengths + next_inputs = ( + self._ratio * self.targets[:, time, :] + + (1.0 - self._ratio) * outputs[:, -self.config.n_mels :] + ) + next_state = state + return (finished, next_inputs, next_state) + else: + stop_token_prediction = tf.nn.sigmoid(stop_token_prediction) + finished = tf.cast(tf.round(stop_token_prediction), tf.bool) + finished = tf.reduce_all(finished) + next_inputs = outputs[:, -self.config.n_mels :] + next_state = state + return (finished, next_inputs, next_state) + + def set_batch_size(self, batch_size): + self._batch_size = batch_size + + +class TFTacotronLocationSensitiveAttention(BahdanauAttention): + """Tacotron-2 Location Sensitive Attention module.""" + + def __init__( + self, + config, + memory, + mask_encoder=True, + memory_sequence_length=None, + is_cumulate=True, + ): + """Init variables.""" + memory_length = memory_sequence_length if (mask_encoder is True) else None + super().__init__( + units=config.attention_dim, + memory=memory, + memory_sequence_length=memory_length, + probability_fn="softmax", + name="LocationSensitiveAttention", + ) + self.location_convolution = tf.keras.layers.Conv1D( + filters=config.attention_filters, + kernel_size=config.attention_kernel, + padding="same", + use_bias=False, + name="location_conv", + ) + self.location_layer = tf.keras.layers.Dense( + units=config.attention_dim, use_bias=False, name="location_layer" + ) + + self.v = tf.keras.layers.Dense(1, use_bias=True, name="scores_attention") + self.config = config + self.is_cumulate = is_cumulate + self.use_window = False + + def setup_window(self, win_front=2, win_back=4): + self.win_front = tf.constant(win_front, tf.int32) + self.win_back = tf.constant(win_back, tf.int32) + + self._indices = tf.expand_dims(tf.range(tf.shape(self.keys)[1]), 0) + self._indices = tf.tile( + self._indices, [tf.shape(self.keys)[0], 1] + ) # [batch_size, max_time] + + self.use_window = True + + def _compute_window_mask(self, max_alignments): + """Compute window mask for inference. + Args: + max_alignments (int): [batch_size] + """ + expanded_max_alignments = tf.expand_dims(max_alignments, 1) # [batch_size, 1] + low = expanded_max_alignments - self.win_front + high = expanded_max_alignments + self.win_back + mlow = tf.cast((self._indices < low), tf.float32) + mhigh = tf.cast((self._indices > high), tf.float32) + mask = mlow + mhigh + return mask # [batch_size, max_length] + + def __call__(self, inputs, training=False): + query, state, prev_max_alignments = inputs + + processed_query = self.query_layer(query) if self.query_layer else query + processed_query = tf.expand_dims(processed_query, 1) + + expanded_alignments = tf.expand_dims(state, axis=2) + f = self.location_convolution(expanded_alignments) + processed_location_features = self.location_layer(f) + + energy = self._location_sensitive_score( + processed_query, processed_location_features, self.keys + ) + + # mask energy on inference steps. + if self.use_window is True: + window_mask = self._compute_window_mask(prev_max_alignments) + energy = energy + window_mask * -1e20 + + alignments = self.probability_fn(energy, state) + + if self.is_cumulate: + state = alignments + state + else: + state = alignments + + expanded_alignments = tf.expand_dims(alignments, 2) + context = tf.reduce_sum(expanded_alignments * self.values, 1) + + return context, alignments, state + + def _location_sensitive_score(self, W_query, W_fil, W_keys): + """Calculate location sensitive energy.""" + return tf.squeeze(self.v(tf.nn.tanh(W_keys + W_query + W_fil)), -1) + + def get_initial_state(self, batch_size, size): + """Get initial alignments.""" + return tf.zeros(shape=[batch_size, size], dtype=tf.float32) + + def get_initial_context(self, batch_size): + """Get initial attention.""" + return tf.zeros( + shape=[batch_size, self.config.encoder_lstm_units * 2], dtype=tf.float32 + ) + + +class TFTacotronPrenet(tf.keras.layers.Layer): + """Tacotron-2 prenet.""" + + def __init__(self, config, **kwargs): + """Init variables.""" + super().__init__(**kwargs) + self.prenet_dense = [ + tf.keras.layers.Dense( + units=config.prenet_units, + activation=ACT2FN[config.prenet_activation], + name="dense_._{}".format(i), + ) + for i in range(config.n_prenet_layers) + ] + self.dropout = tf.keras.layers.Dropout( + rate=config.prenet_dropout_rate, name="dropout" + ) + + def call(self, inputs, training=False): + """Call logic.""" + outputs = inputs + for layer in self.prenet_dense: + outputs = layer(outputs) + outputs = self.dropout(outputs, training=True) + return outputs + + +class TFTacotronPostnet(tf.keras.layers.Layer): + """Tacotron-2 postnet.""" + + def __init__(self, config, **kwargs): + """Init variables.""" + super().__init__(**kwargs) + self.conv_batch_norm = [] + for i in range(config.n_conv_postnet): + conv = TFTacotronConvBatchNorm( + filters=config.postnet_conv_filters, + kernel_size=config.postnet_conv_kernel_sizes, + dropout_rate=config.postnet_dropout_rate, + activation="identity" if i + 1 == config.n_conv_postnet else "tanh", + name_idx=i, + ) + self.conv_batch_norm.append(conv) + + def call(self, inputs, training=False): + """Call logic.""" + outputs = inputs + for _, conv in enumerate(self.conv_batch_norm): + outputs = conv(outputs, training=training) + return outputs + + +TFTacotronDecoderCellState = collections.namedtuple( + "TFTacotronDecoderCellState", + [ + "attention_lstm_state", + "decoder_lstms_state", + "context", + "time", + "state", + "alignment_history", + "max_alignments", + ], +) + +TFDecoderOutput = collections.namedtuple( + "TFDecoderOutput", ("mel_output", "token_output", "sample_id") +) + + +class TFTacotronDecoderCell(tf.keras.layers.AbstractRNNCell): + """Tacotron-2 custom decoder cell.""" + + def __init__(self, config, enable_tflite_convertible=False, **kwargs): + """Init variables.""" + super().__init__(**kwargs) + self.enable_tflite_convertible = enable_tflite_convertible + self.prenet = TFTacotronPrenet(config, name="prenet") + + # define lstm cell on decoder. + # TODO(@dathudeptrai) switch to zone-out lstm. + self.attention_lstm = tf.keras.layers.LSTMCell( + units=config.decoder_lstm_units, name="attention_lstm_cell" + ) + lstm_cells = [] + for i in range(config.n_lstm_decoder): + lstm_cell = tf.keras.layers.LSTMCell( + units=config.decoder_lstm_units, name="lstm_cell_._{}".format(i) + ) + lstm_cells.append(lstm_cell) + self.decoder_lstms = tf.keras.layers.StackedRNNCells( + lstm_cells, name="decoder_lstms" + ) + + # define attention layer. + if config.attention_type == "lsa": + # create location-sensitive attention. + self.attention_layer = TFTacotronLocationSensitiveAttention( + config, + memory=None, + mask_encoder=True, + memory_sequence_length=None, + is_cumulate=True, + ) + else: + raise ValueError("Only lsa (location-sensitive attention) is supported") + + # frame, stop projection layer. + self.frame_projection = tf.keras.layers.Dense( + units=config.n_mels * config.reduction_factor, name="frame_projection" + ) + self.stop_projection = tf.keras.layers.Dense( + units=config.reduction_factor, name="stop_projection" + ) + + self.config = config + + def set_alignment_size(self, alignment_size): + self.alignment_size = alignment_size + + @property + def output_size(self): + """Return output (mel) size.""" + return self.frame_projection.units + + @property + def state_size(self): + """Return hidden state size.""" + return TFTacotronDecoderCellState( + attention_lstm_state=self.attention_lstm.state_size, + decoder_lstms_state=self.decoder_lstms.state_size, + time=tf.TensorShape([]), + attention=self.config.attention_dim, + state=self.alignment_size, + alignment_history=(), + max_alignments=tf.TensorShape([1]), + ) + + def get_initial_state(self, batch_size): + """Get initial states.""" + initial_attention_lstm_cell_states = self.attention_lstm.get_initial_state( + None, batch_size, dtype=tf.float32 + ) + initial_decoder_lstms_cell_states = self.decoder_lstms.get_initial_state( + None, batch_size, dtype=tf.float32 + ) + initial_context = tf.zeros( + shape=[batch_size, self.config.encoder_lstm_units * 2], dtype=tf.float32 + ) + initial_state = self.attention_layer.get_initial_state( + batch_size, size=self.alignment_size + ) + if self.enable_tflite_convertible: + initial_alignment_history = () + else: + initial_alignment_history = tf.TensorArray( + dtype=tf.float32, size=0, dynamic_size=True + ) + return TFTacotronDecoderCellState( + attention_lstm_state=initial_attention_lstm_cell_states, + decoder_lstms_state=initial_decoder_lstms_cell_states, + time=tf.zeros([], dtype=tf.int32), + context=initial_context, + state=initial_state, + alignment_history=initial_alignment_history, + max_alignments=tf.zeros([batch_size], dtype=tf.int32), + ) + + def call(self, inputs, states, training=False): + """Call logic.""" + decoder_input = inputs + + # 1. apply prenet for decoder_input. + prenet_out = self.prenet(decoder_input, training=training) # [batch_size, dim] + + # 2. concat prenet_out and prev context vector + # then use it as input of attention lstm layer. + attention_lstm_input = tf.concat([prenet_out, states.context], axis=-1) + attention_lstm_output, next_attention_lstm_state = self.attention_lstm( + attention_lstm_input, states.attention_lstm_state + ) + + # 3. compute context, alignment and cumulative alignment. + prev_state = states.state + if not self.enable_tflite_convertible: + prev_alignment_history = states.alignment_history + prev_max_alignments = states.max_alignments + context, alignments, state = self.attention_layer( + [attention_lstm_output, prev_state, prev_max_alignments], training=training, + ) + + # 4. run decoder lstm(s) + decoder_lstms_input = tf.concat([attention_lstm_output, context], axis=-1) + decoder_lstms_output, next_decoder_lstms_state = self.decoder_lstms( + decoder_lstms_input, states.decoder_lstms_state + ) + + # 5. compute frame feature and stop token. + projection_inputs = tf.concat([decoder_lstms_output, context], axis=-1) + decoder_outputs = self.frame_projection(projection_inputs) + + stop_inputs = tf.concat([decoder_lstms_output, decoder_outputs], axis=-1) + stop_tokens = self.stop_projection(stop_inputs) + + # 6. save alignment history to visualize. + if self.enable_tflite_convertible: + alignment_history = () + else: + alignment_history = prev_alignment_history.write(states.time, alignments) + + # 7. return new states. + new_states = TFTacotronDecoderCellState( + attention_lstm_state=next_attention_lstm_state, + decoder_lstms_state=next_decoder_lstms_state, + time=states.time + 1, + context=context, + state=state, + alignment_history=alignment_history, + max_alignments=tf.argmax(alignments, -1, output_type=tf.int32), + ) + + return (decoder_outputs, stop_tokens), new_states + + +class TFTacotronDecoder(Decoder): + """Tacotron-2 Decoder.""" + + def __init__( + self, + decoder_cell, + decoder_sampler, + output_layer=None, + enable_tflite_convertible=False, + ): + """Initial variables.""" + self.cell = decoder_cell + self.sampler = decoder_sampler + self.output_layer = output_layer + self.enable_tflite_convertible = enable_tflite_convertible + + def setup_decoder_init_state(self, decoder_init_state): + self.initial_state = decoder_init_state + + def initialize(self, **kwargs): + return self.sampler.initialize() + (self.initial_state,) + + @property + def output_size(self): + return TFDecoderOutput( + mel_output=tf.nest.map_structure( + lambda shape: tf.TensorShape(shape), self.cell.output_size + ), + token_output=tf.TensorShape(self.sampler.reduction_factor), + sample_id=tf.TensorShape([1]) + if self.enable_tflite_convertible + else self.sampler.sample_ids_shape, # tf.TensorShape([]) + ) + + @property + def output_dtype(self): + return TFDecoderOutput(tf.float32, tf.float32, self.sampler.sample_ids_dtype) + + @property + def batch_size(self): + return self.sampler._batch_size + + def step(self, time, inputs, state, training=False): + (mel_outputs, stop_tokens), cell_state = self.cell( + inputs, state, training=training + ) + if self.output_layer is not None: + mel_outputs = self.output_layer(mel_outputs) + sample_ids = self.sampler.sample( + time=time, outputs=mel_outputs, state=cell_state + ) + (finished, next_inputs, next_state) = self.sampler.next_inputs( + time=time, + outputs=mel_outputs, + state=cell_state, + sample_ids=sample_ids, + stop_token_prediction=stop_tokens, + training=training, + ) + + outputs = TFDecoderOutput(mel_outputs, stop_tokens, sample_ids) + return (outputs, next_state, next_inputs, finished) + + +class TFTacotron2(BaseModel): + """Tensorflow tacotron-2 model.""" + + def __init__(self, config, **kwargs): + """Initalize tacotron-2 layers.""" + enable_tflite_convertible = kwargs.pop("enable_tflite_convertible", False) + super().__init__(self, **kwargs) + self.encoder = TFTacotronEncoder(config, name="encoder") + self.decoder_cell = TFTacotronDecoderCell( + config, + name="decoder_cell", + enable_tflite_convertible=enable_tflite_convertible, + ) + self.decoder = TFTacotronDecoder( + self.decoder_cell, + Tacotron2Sampler(config), + enable_tflite_convertible=enable_tflite_convertible, + ) + self.postnet = TFTacotronPostnet(config, name="post_net") + self.post_projection = tf.keras.layers.Dense( + units=config.n_mels, name="residual_projection" + ) + + self.use_window_mask = False + self.maximum_iterations = 4000 + self.enable_tflite_convertible = enable_tflite_convertible + self.config = config + + def setup_window(self, win_front, win_back): + """Call only for inference.""" + self.use_window_mask = True + self.win_front = win_front + self.win_back = win_back + + def setup_maximum_iterations(self, maximum_iterations): + """Call only for inference.""" + self.maximum_iterations = maximum_iterations + + def _build(self): + input_ids = np.array([[1, 2, 3, 4, 5, 6, 7, 8, 9]]) + input_lengths = np.array([9]) + speaker_ids = np.array([0]) + mel_outputs = np.random.normal(size=(1, 50, 80)).astype(np.float32) + mel_lengths = np.array([50]) + self( + input_ids, + input_lengths, + speaker_ids, + mel_outputs, + mel_lengths, + 10, + training=True, + ) + + def call( + self, + input_ids, + input_lengths, + speaker_ids, + mel_gts, + mel_lengths, + maximum_iterations=None, + use_window_mask=False, + win_front=2, + win_back=3, + training=False, + **kwargs, + ): + """Call logic.""" + # create input-mask based on input_lengths + input_mask = tf.sequence_mask( + input_lengths, + maxlen=tf.reduce_max(input_lengths), + name="input_sequence_masks", + ) + + # Encoder Step. + encoder_hidden_states = self.encoder( + [input_ids, speaker_ids, input_mask], training=training + ) + + batch_size = tf.shape(encoder_hidden_states)[0] + alignment_size = tf.shape(encoder_hidden_states)[1] + + # Setup some initial placeholders for decoder step. Include: + # 1. mel_gts, mel_lengths for teacher forcing mode. + # 2. alignment_size for attention size. + # 3. initial state for decoder cell. + # 4. memory (encoder hidden state) for attention mechanism. + self.decoder.sampler.setup_target(targets=mel_gts, mel_lengths=mel_lengths) + self.decoder.cell.set_alignment_size(alignment_size) + self.decoder.setup_decoder_init_state( + self.decoder.cell.get_initial_state(batch_size) + ) + self.decoder.cell.attention_layer.setup_memory( + memory=encoder_hidden_states, + memory_sequence_length=input_lengths, # use for mask attention. + ) + if use_window_mask: + self.decoder.cell.attention_layer.setup_window( + win_front=win_front, win_back=win_back + ) + + # run decode step. + ( + (frames_prediction, stop_token_prediction, _), + final_decoder_state, + _, + ) = dynamic_decode( + self.decoder, + maximum_iterations=maximum_iterations, + enable_tflite_convertible=self.enable_tflite_convertible, + training=training, + ) + + decoder_outputs = tf.reshape( + frames_prediction, [batch_size, -1, self.config.n_mels] + ) + stop_token_prediction = tf.reshape(stop_token_prediction, [batch_size, -1]) + + residual = self.postnet(decoder_outputs, training=training) + residual_projection = self.post_projection(residual) + + mel_outputs = decoder_outputs + residual_projection + + if self.enable_tflite_convertible: + mask = tf.math.not_equal( + tf.cast( + tf.reduce_sum(tf.abs(decoder_outputs), axis=-1), dtype=tf.int32 + ), + 0, + ) + decoder_outputs = tf.expand_dims( + tf.boolean_mask(decoder_outputs, mask), axis=0 + ) + mel_outputs = tf.expand_dims(tf.boolean_mask(mel_outputs, mask), axis=0) + alignment_history = () + else: + alignment_history = tf.transpose( + final_decoder_state.alignment_history.stack(), [1, 2, 0] + ) + + return decoder_outputs, mel_outputs, stop_token_prediction, alignment_history + + @tf.function( + experimental_relax_shapes=True, + input_signature=[ + tf.TensorSpec([None, None], dtype=tf.int32, name="input_ids"), + tf.TensorSpec([None,], dtype=tf.int32, name="input_lengths"), + tf.TensorSpec([None,], dtype=tf.int32, name="speaker_ids"), + ], + ) + def inference(self, input_ids, input_lengths, speaker_ids, **kwargs): + """Call logic.""" + # create input-mask based on input_lengths + input_mask = tf.sequence_mask( + input_lengths, + maxlen=tf.reduce_max(input_lengths), + name="input_sequence_masks", + ) + + # Encoder Step. + encoder_hidden_states = self.encoder( + [input_ids, speaker_ids, input_mask], training=False + ) + + batch_size = tf.shape(encoder_hidden_states)[0] + alignment_size = tf.shape(encoder_hidden_states)[1] + + # Setup some initial placeholders for decoder step. Include: + # 1. batch_size for inference. + # 2. alignment_size for attention size. + # 3. initial state for decoder cell. + # 4. memory (encoder hidden state) for attention mechanism. + # 5. window front/back to solve long sentence synthesize problems. (call after setup memory.) + self.decoder.sampler.set_batch_size(batch_size) + self.decoder.cell.set_alignment_size(alignment_size) + self.decoder.setup_decoder_init_state( + self.decoder.cell.get_initial_state(batch_size) + ) + self.decoder.cell.attention_layer.setup_memory( + memory=encoder_hidden_states, + memory_sequence_length=input_lengths, # use for mask attention. + ) + if self.use_window_mask: + self.decoder.cell.attention_layer.setup_window( + win_front=self.win_front, win_back=self.win_back + ) + + # run decode step. + ( + (frames_prediction, stop_token_prediction, _), + final_decoder_state, + _, + ) = dynamic_decode( + self.decoder, maximum_iterations=self.maximum_iterations, training=False + ) + + decoder_outputs = tf.reshape( + frames_prediction, [batch_size, -1, self.config.n_mels] + ) + stop_token_predictions = tf.reshape(stop_token_prediction, [batch_size, -1]) + + residual = self.postnet(decoder_outputs, training=False) + residual_projection = self.post_projection(residual) + + mel_outputs = decoder_outputs + residual_projection + + alignment_historys = tf.transpose( + final_decoder_state.alignment_history.stack(), [1, 2, 0] + ) + + return decoder_outputs, mel_outputs, stop_token_predictions, alignment_historys + + @tf.function( + experimental_relax_shapes=True, + input_signature=[ + tf.TensorSpec([1, None], dtype=tf.int32, name="input_ids"), + tf.TensorSpec([1,], dtype=tf.int32, name="input_lengths"), + tf.TensorSpec([1,], dtype=tf.int32, name="speaker_ids"), + ], + ) + def inference_tflite(self, input_ids, input_lengths, speaker_ids, **kwargs): + """Call logic.""" + # create input-mask based on input_lengths + input_mask = tf.sequence_mask( + input_lengths, + maxlen=tf.reduce_max(input_lengths), + name="input_sequence_masks", + ) + + # Encoder Step. + encoder_hidden_states = self.encoder( + [input_ids, speaker_ids, input_mask], training=False + ) + + batch_size = tf.shape(encoder_hidden_states)[0] + alignment_size = tf.shape(encoder_hidden_states)[1] + + # Setup some initial placeholders for decoder step. Include: + # 1. batch_size for inference. + # 2. alignment_size for attention size. + # 3. initial state for decoder cell. + # 4. memory (encoder hidden state) for attention mechanism. + # 5. window front/back to solve long sentence synthesize problems. (call after setup memory.) + self.decoder.sampler.set_batch_size(batch_size) + self.decoder.cell.set_alignment_size(alignment_size) + self.decoder.setup_decoder_init_state( + self.decoder.cell.get_initial_state(batch_size) + ) + self.decoder.cell.attention_layer.setup_memory( + memory=encoder_hidden_states, + memory_sequence_length=input_lengths, # use for mask attention. + ) + if self.use_window_mask: + self.decoder.cell.attention_layer.setup_window( + win_front=self.win_front, win_back=self.win_back + ) + + # run decode step. + ( + (frames_prediction, stop_token_prediction, _), + final_decoder_state, + _, + ) = dynamic_decode( + self.decoder, + maximum_iterations=self.maximum_iterations, + enable_tflite_convertible=self.enable_tflite_convertible, + training=False, + ) + + decoder_outputs = tf.reshape( + frames_prediction, [batch_size, -1, self.config.n_mels] + ) + stop_token_predictions = tf.reshape(stop_token_prediction, [batch_size, -1]) + + residual = self.postnet(decoder_outputs, training=False) + residual_projection = self.post_projection(residual) + + mel_outputs = decoder_outputs + residual_projection + + if self.enable_tflite_convertible: + mask = tf.math.not_equal( + tf.cast( + tf.reduce_sum(tf.abs(decoder_outputs), axis=-1), dtype=tf.int32 + ), + 0, + ) + decoder_outputs = tf.expand_dims( + tf.boolean_mask(decoder_outputs, mask), axis=0 + ) + mel_outputs = tf.expand_dims(tf.boolean_mask(mel_outputs, mask), axis=0) + alignment_historys = () + else: + alignment_historys = tf.transpose( + final_decoder_state.alignment_history.stack(), [1, 2, 0] + ) + + return decoder_outputs, mel_outputs, stop_token_predictions, alignment_historys diff --git a/TensorFlowTTS/tensorflow_tts/optimizers/__init__.py b/TensorFlowTTS/tensorflow_tts/optimizers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a490ed675cf36db354928cc3abdab4b06343f833 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/optimizers/__init__.py @@ -0,0 +1,2 @@ +from tensorflow_tts.optimizers.adamweightdecay import AdamWeightDecay, WarmUp +from tensorflow_tts.optimizers.gradient_accumulate import GradientAccumulator diff --git a/TensorFlowTTS/tensorflow_tts/optimizers/adamweightdecay.py b/TensorFlowTTS/tensorflow_tts/optimizers/adamweightdecay.py new file mode 100644 index 0000000000000000000000000000000000000000..c2d0c1228438896b8afd394be49b17ddb6bee379 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/optimizers/adamweightdecay.py @@ -0,0 +1,177 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""AdamW for training self-attention.""" + + +import re + +import tensorflow as tf + + +class WarmUp(tf.keras.optimizers.schedules.LearningRateSchedule): + """Applys a warmup schedule on a given learning rate decay schedule.""" + + def __init__( + self, + initial_learning_rate, + decay_schedule_fn, + warmup_steps, + power=1.0, + name=None, + ): + super(WarmUp, self).__init__() + self.initial_learning_rate = initial_learning_rate + self.warmup_steps = warmup_steps + self.power = power + self.decay_schedule_fn = decay_schedule_fn + self.name = name + + def __call__(self, step): + with tf.name_scope(self.name or "WarmUp") as name: + # Implements polynomial warmup. i.e., if global_step < warmup_steps, the + # learning rate will be `global_step/num_warmup_steps * init_lr`. + global_step_float = tf.cast(step, tf.float32) + warmup_steps_float = tf.cast(self.warmup_steps, tf.float32) + warmup_percent_done = global_step_float / warmup_steps_float + warmup_learning_rate = self.initial_learning_rate * tf.math.pow( + warmup_percent_done, self.power + ) + return tf.cond( + global_step_float < warmup_steps_float, + lambda: warmup_learning_rate, + lambda: self.decay_schedule_fn(step), + name=name, + ) + + def get_config(self): + return { + "initial_learning_rate": self.initial_learning_rate, + "decay_schedule_fn": self.decay_schedule_fn, + "warmup_steps": self.warmup_steps, + "power": self.power, + "name": self.name, + } + + +class AdamWeightDecay(tf.keras.optimizers.Adam): + """Adam enables L2 weight decay and clip_by_global_norm on gradients. + Just adding the square of the weights to the loss function is *not* the + correct way of using L2 regularization/weight decay with Adam, since that will + interact with the m and v parameters in strange ways. + + Instead we want ot decay the weights in a manner that doesn't interact with + the m/v parameters. This is equivalent to adding the square of the weights to + the loss with plain (non-momentum) SGD. + """ + + def __init__( + self, + learning_rate=0.001, + beta_1=0.9, + beta_2=0.999, + epsilon=1e-7, + amsgrad=False, + weight_decay_rate=0.0, + include_in_weight_decay=None, + exclude_from_weight_decay=None, + name="AdamWeightDecay", + **kwargs + ): + super(AdamWeightDecay, self).__init__( + learning_rate, beta_1, beta_2, epsilon, amsgrad, name, **kwargs + ) + self.weight_decay_rate = weight_decay_rate + self._include_in_weight_decay = include_in_weight_decay + self._exclude_from_weight_decay = exclude_from_weight_decay + + @classmethod + def from_config(cls, config): + """Creates an optimizer from its config with WarmUp custom object.""" + custom_objects = {"WarmUp": WarmUp} + return super(AdamWeightDecay, cls).from_config( + config, custom_objects=custom_objects + ) + + def _prepare_local(self, var_device, var_dtype, apply_state): + super(AdamWeightDecay, self)._prepare_local(var_device, var_dtype, apply_state) + apply_state["weight_decay_rate"] = tf.constant( + self.weight_decay_rate, name="adam_weight_decay_rate" + ) + + def _decay_weights_op(self, var, learning_rate, apply_state): + do_decay = self._do_use_weight_decay(var.name) + if do_decay: + return var.assign_sub( + learning_rate * var * apply_state["weight_decay_rate"], + use_locking=self._use_locking, + ) + return tf.no_op() + + def apply_gradients(self, grads_and_vars, clip_norm=0.5, **kwargs): + grads, tvars = list(zip(*grads_and_vars)) + (grads, _) = tf.clip_by_global_norm(grads, clip_norm=clip_norm) + return super(AdamWeightDecay, self).apply_gradients(zip(grads, tvars), **kwargs) + + def _get_lr(self, var_device, var_dtype, apply_state): + """Retrieves the learning rate with the given state.""" + if apply_state is None: + return self._decayed_lr_t[var_dtype], {} + + apply_state = apply_state or {} + coefficients = apply_state.get((var_device, var_dtype)) + if coefficients is None: + coefficients = self._fallback_apply_state(var_device, var_dtype) + apply_state[(var_device, var_dtype)] = coefficients + + return coefficients["lr_t"], dict(apply_state=apply_state) + + def _resource_apply_dense(self, grad, var, apply_state=None): + lr_t, kwargs = self._get_lr(var.device, var.dtype.base_dtype, apply_state) + decay = self._decay_weights_op(var, lr_t, apply_state) + with tf.control_dependencies([decay]): + return super(AdamWeightDecay, self)._resource_apply_dense( + grad, var, **kwargs + ) + + def _resource_apply_sparse(self, grad, var, indices, apply_state=None): + lr_t, kwargs = self._get_lr(var.device, var.dtype.base_dtype, apply_state) + decay = self._decay_weights_op(var, lr_t, apply_state) + with tf.control_dependencies([decay]): + return super(AdamWeightDecay, self)._resource_apply_sparse( + grad, var, indices, **kwargs + ) + + def get_config(self): + config = super(AdamWeightDecay, self).get_config() + config.update( + {"weight_decay_rate": self.weight_decay_rate,} + ) + return config + + def _do_use_weight_decay(self, param_name): + """Whether to use L2 weight decay for `param_name`.""" + if self.weight_decay_rate == 0: + return False + + if self._include_in_weight_decay: + for r in self._include_in_weight_decay: + if re.search(r, param_name) is not None: + return True + + if self._exclude_from_weight_decay: + for r in self._exclude_from_weight_decay: + if re.search(r, param_name) is not None: + return False + return True diff --git a/TensorFlowTTS/tensorflow_tts/optimizers/gradient_accumulate.py b/TensorFlowTTS/tensorflow_tts/optimizers/gradient_accumulate.py new file mode 100644 index 0000000000000000000000000000000000000000..4116d2aca619bc4c7bf3427bc718b6cca5d303f8 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/optimizers/gradient_accumulate.py @@ -0,0 +1,88 @@ +"""Gradient Accummlate for training TF2 custom training loop. +Copy from https://github.com/OpenNMT/OpenNMT-tf/blob/master/opennmt/optimizers/utils.py. +""" + + +import re + +import tensorflow as tf + + +class GradientAccumulator(object): + """Gradient accumulation utility. + When used with a distribution strategy, the accumulator should be called in a + replica context. Gradients will be accumulated locally on each replica and + without synchronization. Users should then call ``.gradients``, scale the + gradients if required, and pass the result to ``apply_gradients``. + """ + + # We use the ON_READ synchronization policy so that no synchronization is + # performed on assignment. To get the value, we call .value() which returns the + # value on the current replica without synchronization. + + def __init__(self): + """Initializes the accumulator.""" + self._gradients = [] + self._accum_steps = None + + @property + def step(self): + """Number of accumulated steps.""" + if self._accum_steps is None: + self._accum_steps = tf.Variable( + tf.constant(0, dtype=tf.int64), + trainable=False, + synchronization=tf.VariableSynchronization.ON_READ, + aggregation=tf.VariableAggregation.ONLY_FIRST_REPLICA, + ) + + return self._accum_steps.value() + + @property + def gradients(self): + """The accumulated gradients on the current replica.""" + if not self._gradients: + raise ValueError( + "The accumulator should be called first to initialize the gradients" + ) + return list( + gradient.value() if gradient is not None else gradient + for gradient in self._gradients + ) + + def __call__(self, gradients): + """Accumulates :obj:`gradients` on the current replica.""" + if not self._gradients: + _ = self.step # Create the step variable. + self._gradients.extend( + [ + tf.Variable( + tf.zeros_like(gradient), + trainable=False, + synchronization=tf.VariableSynchronization.ON_READ, + ) + if gradient is not None + else gradient + for gradient in gradients + ] + ) + if len(gradients) != len(self._gradients): + raise ValueError( + "Expected %s gradients, but got %d" + % (len(self._gradients), len(gradients)) + ) + + for accum_gradient, gradient in zip(self._gradients, gradients): + if accum_gradient is not None and gradient is not None: + accum_gradient.assign_add(gradient, read_value=False) + + self._accum_steps.assign_add(1) + + def reset(self): + """Resets the accumulated gradients on the current replica.""" + if not self._gradients: + return + self._accum_steps.assign(0) + for gradient in self._gradients: + if gradient is not None: + gradient.assign(tf.zeros_like(gradient), read_value=False) diff --git a/TensorFlowTTS/tensorflow_tts/processor/__init__.py b/TensorFlowTTS/tensorflow_tts/processor/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..efda1d90560fe6567b9064476897a97128f41b09 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/processor/__init__.py @@ -0,0 +1,10 @@ +from tensorflow_tts.processor.base_processor import BaseProcessor + +from tensorflow_tts.processor.ljspeech import LJSpeechProcessor +from tensorflow_tts.processor.baker import BakerProcessor +from tensorflow_tts.processor.kss import KSSProcessor +from tensorflow_tts.processor.libritts import LibriTTSProcessor +from tensorflow_tts.processor.thorsten import ThorstenProcessor +from tensorflow_tts.processor.ljspeechu import LJSpeechUltimateProcessor +from tensorflow_tts.processor.synpaflex import SynpaflexProcessor +from tensorflow_tts.processor.jsut import JSUTProcessor diff --git a/TensorFlowTTS/tensorflow_tts/processor/__pycache__/__init__.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cef4693c1e25e57ccea9edec30067070eb14142b Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/__init__.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/processor/__pycache__/baker.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/baker.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0e31bc2a38d593bb75333fae429f21a1a40bc237 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/baker.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/processor/__pycache__/base_processor.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/base_processor.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d69e783291cb30eb880c24724be6ba80319caa88 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/base_processor.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/processor/__pycache__/jsut.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/jsut.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e68ae111fa1ff8a0b0e32e27e15b2741205a3034 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/jsut.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/processor/__pycache__/kss.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/kss.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d45d783e6949cbc125105396ecb644345d4c6391 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/kss.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/processor/__pycache__/libritts.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/libritts.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..be0202e6a87ce59b393e649d01ea3918211f61b3 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/libritts.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/processor/__pycache__/ljspeech.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/ljspeech.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ba2d2693c07e57d0ffa26224ede170dae882ab40 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/ljspeech.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/processor/__pycache__/ljspeechu.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/ljspeechu.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d9ccf56f5c1777f0e40f1b307b704f0240643a9a Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/ljspeechu.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/processor/__pycache__/synpaflex.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/synpaflex.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e8b3bcaeb89ba66ce1b133e6199f212e0f4f0cb7 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/synpaflex.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/processor/__pycache__/thorsten.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/thorsten.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..826b4cb1bcc23cfc6ac35cd2ab1ace4e1b505354 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/processor/__pycache__/thorsten.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/processor/baker.py b/TensorFlowTTS/tensorflow_tts/processor/baker.py new file mode 100644 index 0000000000000000000000000000000000000000..705e2ecfcc36c22a2b8971f6262b1a60a256dd03 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/processor/baker.py @@ -0,0 +1,682 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Perform preprocessing and raw feature extraction for Baker dataset.""" + +import os +import re +from typing import Dict, List, Union, Tuple, Any + +import librosa +import numpy as np +import soundfile as sf +from dataclasses import dataclass, field +from pypinyin import Style +from pypinyin.contrib.neutral_tone import NeutralToneWith5Mixin +from pypinyin.converter import DefaultConverter +from pypinyin.core import Pinyin +from tensorflow_tts.processor import BaseProcessor +from tensorflow_tts.utils.utils import PROCESSOR_FILE_NAME + +_pad = ["pad"] +_eos = ["eos"] +_pause = ["sil", "#0", "#1", "#2", "#3"] + +_initials = [ + "^", + "b", + "c", + "ch", + "d", + "f", + "g", + "h", + "j", + "k", + "l", + "m", + "n", + "p", + "q", + "r", + "s", + "sh", + "t", + "x", + "z", + "zh", +] + +_tones = ["1", "2", "3", "4", "5"] + +_finals = [ + "a", + "ai", + "an", + "ang", + "ao", + "e", + "ei", + "en", + "eng", + "er", + "i", + "ia", + "ian", + "iang", + "iao", + "ie", + "ii", + "iii", + "in", + "ing", + "iong", + "iou", + "o", + "ong", + "ou", + "u", + "ua", + "uai", + "uan", + "uang", + "uei", + "uen", + "ueng", + "uo", + "v", + "van", + "ve", + "vn", +] + +BAKER_SYMBOLS = _pad + _pause + _initials + [i + j for i in _finals for j in _tones] + _eos + + +PINYIN_DICT = { + "a": ("^", "a"), + "ai": ("^", "ai"), + "an": ("^", "an"), + "ang": ("^", "ang"), + "ao": ("^", "ao"), + "ba": ("b", "a"), + "bai": ("b", "ai"), + "ban": ("b", "an"), + "bang": ("b", "ang"), + "bao": ("b", "ao"), + "be": ("b", "e"), + "bei": ("b", "ei"), + "ben": ("b", "en"), + "beng": ("b", "eng"), + "bi": ("b", "i"), + "bian": ("b", "ian"), + "biao": ("b", "iao"), + "bie": ("b", "ie"), + "bin": ("b", "in"), + "bing": ("b", "ing"), + "bo": ("b", "o"), + "bu": ("b", "u"), + "ca": ("c", "a"), + "cai": ("c", "ai"), + "can": ("c", "an"), + "cang": ("c", "ang"), + "cao": ("c", "ao"), + "ce": ("c", "e"), + "cen": ("c", "en"), + "ceng": ("c", "eng"), + "cha": ("ch", "a"), + "chai": ("ch", "ai"), + "chan": ("ch", "an"), + "chang": ("ch", "ang"), + "chao": ("ch", "ao"), + "che": ("ch", "e"), + "chen": ("ch", "en"), + "cheng": ("ch", "eng"), + "chi": ("ch", "iii"), + "chong": ("ch", "ong"), + "chou": ("ch", "ou"), + "chu": ("ch", "u"), + "chua": ("ch", "ua"), + "chuai": ("ch", "uai"), + "chuan": ("ch", "uan"), + "chuang": ("ch", "uang"), + "chui": ("ch", "uei"), + "chun": ("ch", "uen"), + "chuo": ("ch", "uo"), + "ci": ("c", "ii"), + "cong": ("c", "ong"), + "cou": ("c", "ou"), + "cu": ("c", "u"), + "cuan": ("c", "uan"), + "cui": ("c", "uei"), + "cun": ("c", "uen"), + "cuo": ("c", "uo"), + "da": ("d", "a"), + "dai": ("d", "ai"), + "dan": ("d", "an"), + "dang": ("d", "ang"), + "dao": ("d", "ao"), + "de": ("d", "e"), + "dei": ("d", "ei"), + "den": ("d", "en"), + "deng": ("d", "eng"), + "di": ("d", "i"), + "dia": ("d", "ia"), + "dian": ("d", "ian"), + "diao": ("d", "iao"), + "die": ("d", "ie"), + "ding": ("d", "ing"), + "diu": ("d", "iou"), + "dong": ("d", "ong"), + "dou": ("d", "ou"), + "du": ("d", "u"), + "duan": ("d", "uan"), + "dui": ("d", "uei"), + "dun": ("d", "uen"), + "duo": ("d", "uo"), + "e": ("^", "e"), + "ei": ("^", "ei"), + "en": ("^", "en"), + "ng": ("^", "en"), + "eng": ("^", "eng"), + "er": ("^", "er"), + "fa": ("f", "a"), + "fan": ("f", "an"), + "fang": ("f", "ang"), + "fei": ("f", "ei"), + "fen": ("f", "en"), + "feng": ("f", "eng"), + "fo": ("f", "o"), + "fou": ("f", "ou"), + "fu": ("f", "u"), + "ga": ("g", "a"), + "gai": ("g", "ai"), + "gan": ("g", "an"), + "gang": ("g", "ang"), + "gao": ("g", "ao"), + "ge": ("g", "e"), + "gei": ("g", "ei"), + "gen": ("g", "en"), + "geng": ("g", "eng"), + "gong": ("g", "ong"), + "gou": ("g", "ou"), + "gu": ("g", "u"), + "gua": ("g", "ua"), + "guai": ("g", "uai"), + "guan": ("g", "uan"), + "guang": ("g", "uang"), + "gui": ("g", "uei"), + "gun": ("g", "uen"), + "guo": ("g", "uo"), + "ha": ("h", "a"), + "hai": ("h", "ai"), + "han": ("h", "an"), + "hang": ("h", "ang"), + "hao": ("h", "ao"), + "he": ("h", "e"), + "hei": ("h", "ei"), + "hen": ("h", "en"), + "heng": ("h", "eng"), + "hong": ("h", "ong"), + "hou": ("h", "ou"), + "hu": ("h", "u"), + "hua": ("h", "ua"), + "huai": ("h", "uai"), + "huan": ("h", "uan"), + "huang": ("h", "uang"), + "hui": ("h", "uei"), + "hun": ("h", "uen"), + "huo": ("h", "uo"), + "ji": ("j", "i"), + "jia": ("j", "ia"), + "jian": ("j", "ian"), + "jiang": ("j", "iang"), + "jiao": ("j", "iao"), + "jie": ("j", "ie"), + "jin": ("j", "in"), + "jing": ("j", "ing"), + "jiong": ("j", "iong"), + "jiu": ("j", "iou"), + "ju": ("j", "v"), + "juan": ("j", "van"), + "jue": ("j", "ve"), + "jun": ("j", "vn"), + "ka": ("k", "a"), + "kai": ("k", "ai"), + "kan": ("k", "an"), + "kang": ("k", "ang"), + "kao": ("k", "ao"), + "ke": ("k", "e"), + "kei": ("k", "ei"), + "ken": ("k", "en"), + "keng": ("k", "eng"), + "kong": ("k", "ong"), + "kou": ("k", "ou"), + "ku": ("k", "u"), + "kua": ("k", "ua"), + "kuai": ("k", "uai"), + "kuan": ("k", "uan"), + "kuang": ("k", "uang"), + "kui": ("k", "uei"), + "kun": ("k", "uen"), + "kuo": ("k", "uo"), + "la": ("l", "a"), + "lai": ("l", "ai"), + "lan": ("l", "an"), + "lang": ("l", "ang"), + "lao": ("l", "ao"), + "le": ("l", "e"), + "lei": ("l", "ei"), + "leng": ("l", "eng"), + "li": ("l", "i"), + "lia": ("l", "ia"), + "lian": ("l", "ian"), + "liang": ("l", "iang"), + "liao": ("l", "iao"), + "lie": ("l", "ie"), + "lin": ("l", "in"), + "ling": ("l", "ing"), + "liu": ("l", "iou"), + "lo": ("l", "o"), + "long": ("l", "ong"), + "lou": ("l", "ou"), + "lu": ("l", "u"), + "lv": ("l", "v"), + "luan": ("l", "uan"), + "lve": ("l", "ve"), + "lue": ("l", "ve"), + "lun": ("l", "uen"), + "luo": ("l", "uo"), + "ma": ("m", "a"), + "mai": ("m", "ai"), + "man": ("m", "an"), + "mang": ("m", "ang"), + "mao": ("m", "ao"), + "me": ("m", "e"), + "mei": ("m", "ei"), + "men": ("m", "en"), + "meng": ("m", "eng"), + "mi": ("m", "i"), + "mian": ("m", "ian"), + "miao": ("m", "iao"), + "mie": ("m", "ie"), + "min": ("m", "in"), + "ming": ("m", "ing"), + "miu": ("m", "iou"), + "mo": ("m", "o"), + "mou": ("m", "ou"), + "mu": ("m", "u"), + "na": ("n", "a"), + "nai": ("n", "ai"), + "nan": ("n", "an"), + "nang": ("n", "ang"), + "nao": ("n", "ao"), + "ne": ("n", "e"), + "nei": ("n", "ei"), + "nen": ("n", "en"), + "neng": ("n", "eng"), + "ni": ("n", "i"), + "nia": ("n", "ia"), + "nian": ("n", "ian"), + "niang": ("n", "iang"), + "niao": ("n", "iao"), + "nie": ("n", "ie"), + "nin": ("n", "in"), + "ning": ("n", "ing"), + "niu": ("n", "iou"), + "nong": ("n", "ong"), + "nou": ("n", "ou"), + "nu": ("n", "u"), + "nv": ("n", "v"), + "nuan": ("n", "uan"), + "nve": ("n", "ve"), + "nue": ("n", "ve"), + "nuo": ("n", "uo"), + "o": ("^", "o"), + "ou": ("^", "ou"), + "pa": ("p", "a"), + "pai": ("p", "ai"), + "pan": ("p", "an"), + "pang": ("p", "ang"), + "pao": ("p", "ao"), + "pe": ("p", "e"), + "pei": ("p", "ei"), + "pen": ("p", "en"), + "peng": ("p", "eng"), + "pi": ("p", "i"), + "pian": ("p", "ian"), + "piao": ("p", "iao"), + "pie": ("p", "ie"), + "pin": ("p", "in"), + "ping": ("p", "ing"), + "po": ("p", "o"), + "pou": ("p", "ou"), + "pu": ("p", "u"), + "qi": ("q", "i"), + "qia": ("q", "ia"), + "qian": ("q", "ian"), + "qiang": ("q", "iang"), + "qiao": ("q", "iao"), + "qie": ("q", "ie"), + "qin": ("q", "in"), + "qing": ("q", "ing"), + "qiong": ("q", "iong"), + "qiu": ("q", "iou"), + "qu": ("q", "v"), + "quan": ("q", "van"), + "que": ("q", "ve"), + "qun": ("q", "vn"), + "ran": ("r", "an"), + "rang": ("r", "ang"), + "rao": ("r", "ao"), + "re": ("r", "e"), + "ren": ("r", "en"), + "reng": ("r", "eng"), + "ri": ("r", "iii"), + "rong": ("r", "ong"), + "rou": ("r", "ou"), + "ru": ("r", "u"), + "rua": ("r", "ua"), + "ruan": ("r", "uan"), + "rui": ("r", "uei"), + "run": ("r", "uen"), + "ruo": ("r", "uo"), + "sa": ("s", "a"), + "sai": ("s", "ai"), + "san": ("s", "an"), + "sang": ("s", "ang"), + "sao": ("s", "ao"), + "se": ("s", "e"), + "sen": ("s", "en"), + "seng": ("s", "eng"), + "sha": ("sh", "a"), + "shai": ("sh", "ai"), + "shan": ("sh", "an"), + "shang": ("sh", "ang"), + "shao": ("sh", "ao"), + "she": ("sh", "e"), + "shei": ("sh", "ei"), + "shen": ("sh", "en"), + "sheng": ("sh", "eng"), + "shi": ("sh", "iii"), + "shou": ("sh", "ou"), + "shu": ("sh", "u"), + "shua": ("sh", "ua"), + "shuai": ("sh", "uai"), + "shuan": ("sh", "uan"), + "shuang": ("sh", "uang"), + "shui": ("sh", "uei"), + "shun": ("sh", "uen"), + "shuo": ("sh", "uo"), + "si": ("s", "ii"), + "song": ("s", "ong"), + "sou": ("s", "ou"), + "su": ("s", "u"), + "suan": ("s", "uan"), + "sui": ("s", "uei"), + "sun": ("s", "uen"), + "suo": ("s", "uo"), + "ta": ("t", "a"), + "tai": ("t", "ai"), + "tan": ("t", "an"), + "tang": ("t", "ang"), + "tao": ("t", "ao"), + "te": ("t", "e"), + "tei": ("t", "ei"), + "teng": ("t", "eng"), + "ti": ("t", "i"), + "tian": ("t", "ian"), + "tiao": ("t", "iao"), + "tie": ("t", "ie"), + "ting": ("t", "ing"), + "tong": ("t", "ong"), + "tou": ("t", "ou"), + "tu": ("t", "u"), + "tuan": ("t", "uan"), + "tui": ("t", "uei"), + "tun": ("t", "uen"), + "tuo": ("t", "uo"), + "wa": ("^", "ua"), + "wai": ("^", "uai"), + "wan": ("^", "uan"), + "wang": ("^", "uang"), + "wei": ("^", "uei"), + "wen": ("^", "uen"), + "weng": ("^", "ueng"), + "wo": ("^", "uo"), + "wu": ("^", "u"), + "xi": ("x", "i"), + "xia": ("x", "ia"), + "xian": ("x", "ian"), + "xiang": ("x", "iang"), + "xiao": ("x", "iao"), + "xie": ("x", "ie"), + "xin": ("x", "in"), + "xing": ("x", "ing"), + "xiong": ("x", "iong"), + "xiu": ("x", "iou"), + "xu": ("x", "v"), + "xuan": ("x", "van"), + "xue": ("x", "ve"), + "xun": ("x", "vn"), + "ya": ("^", "ia"), + "yan": ("^", "ian"), + "yang": ("^", "iang"), + "yao": ("^", "iao"), + "ye": ("^", "ie"), + "yi": ("^", "i"), + "yin": ("^", "in"), + "ying": ("^", "ing"), + "yo": ("^", "iou"), + "yong": ("^", "iong"), + "you": ("^", "iou"), + "yu": ("^", "v"), + "yuan": ("^", "van"), + "yue": ("^", "ve"), + "yun": ("^", "vn"), + "za": ("z", "a"), + "zai": ("z", "ai"), + "zan": ("z", "an"), + "zang": ("z", "ang"), + "zao": ("z", "ao"), + "ze": ("z", "e"), + "zei": ("z", "ei"), + "zen": ("z", "en"), + "zeng": ("z", "eng"), + "zha": ("zh", "a"), + "zhai": ("zh", "ai"), + "zhan": ("zh", "an"), + "zhang": ("zh", "ang"), + "zhao": ("zh", "ao"), + "zhe": ("zh", "e"), + "zhei": ("zh", "ei"), + "zhen": ("zh", "en"), + "zheng": ("zh", "eng"), + "zhi": ("zh", "iii"), + "zhong": ("zh", "ong"), + "zhou": ("zh", "ou"), + "zhu": ("zh", "u"), + "zhua": ("zh", "ua"), + "zhuai": ("zh", "uai"), + "zhuan": ("zh", "uan"), + "zhuang": ("zh", "uang"), + "zhui": ("zh", "uei"), + "zhun": ("zh", "uen"), + "zhuo": ("zh", "uo"), + "zi": ("z", "ii"), + "zong": ("z", "ong"), + "zou": ("z", "ou"), + "zu": ("z", "u"), + "zuan": ("z", "uan"), + "zui": ("z", "uei"), + "zun": ("z", "uen"), + "zuo": ("z", "uo"), +} + + +zh_pattern = re.compile("[\u4e00-\u9fa5]") + + +def is_zh(word): + global zh_pattern + match = zh_pattern.search(word) + return match is not None + + +class MyConverter(NeutralToneWith5Mixin, DefaultConverter): + pass + + +@dataclass +class BakerProcessor(BaseProcessor): + + pinyin_dict: Dict[str, Tuple[str, str]] = field(default_factory=lambda: PINYIN_DICT) + cleaner_names: str = None + target_rate: int = 24000 + speaker_name: str = "baker" + + def __post_init__(self): + super().__post_init__() + self.pinyin_parser = self.get_pinyin_parser() + + def setup_eos_token(self): + return _eos[0] + + def save_pretrained(self, saved_path): + os.makedirs(saved_path, exist_ok=True) + self._save_mapper( + os.path.join(saved_path, PROCESSOR_FILE_NAME), + {"pinyin_dict": self.pinyin_dict}, + ) + + def create_items(self): + items = [] + if self.data_dir: + with open( + os.path.join(self.data_dir, "ProsodyLabeling/000001-010000.txt"), + encoding="utf-8", + ) as ttf: + lines = ttf.readlines() + for idx in range(0, len(lines), 2): + utt_id, chn_char = lines[idx].strip().split() + pinyin = lines[idx + 1].strip().split() + if "IY1" in pinyin or "B" in chn_char: + print(f"Skip this: {utt_id} {chn_char} {pinyin}") + continue + phonemes = self.get_phoneme_from_char_and_pinyin(chn_char, pinyin) + wav_path = os.path.join(self.data_dir, "Wave", "%s.wav" % utt_id) + items.append( + [" ".join(phonemes), wav_path, utt_id, self.speaker_name] + ) + self.items = items + + def get_phoneme_from_char_and_pinyin(self, chn_char, pinyin): + # we do not need #4, use sil to replace it + chn_char = chn_char.replace("#4", "") + char_len = len(chn_char) + i, j = 0, 0 + result = ["sil"] + while i < char_len: + cur_char = chn_char[i] + if is_zh(cur_char): + if pinyin[j][:-1] not in self.pinyin_dict: + assert chn_char[i + 1] == "儿" + assert pinyin[j][-2] == "r" + tone = pinyin[j][-1] + a = pinyin[j][:-2] + a1, a2 = self.pinyin_dict[a] + result += [a1, a2 + tone, "er5"] + if i + 2 < char_len and chn_char[i + 2] != "#": + result.append("#0") + + i += 2 + j += 1 + else: + tone = pinyin[j][-1] + a = pinyin[j][:-1] + a1, a2 = self.pinyin_dict[a] + result += [a1, a2 + tone] + + if i + 1 < char_len and chn_char[i + 1] != "#": + result.append("#0") + + i += 1 + j += 1 + elif cur_char == "#": + result.append(chn_char[i : i + 2]) + i += 2 + else: + # ignore the unknown char and punctuation + # result.append(chn_char[i]) + i += 1 + if result[-1] == "#0": + result = result[:-1] + result.append("sil") + assert j == len(pinyin) + return result + + def get_one_sample(self, item): + text, wav_file, utt_id, speaker_name = item + + # normalize audio signal to be [-1, 1], soundfile already norm. + audio, rate = sf.read(wav_file) + audio = audio.astype(np.float32) + if rate != self.target_rate: + assert rate > self.target_rate + audio = librosa.resample(audio, rate, self.target_rate) + + # convert text to ids + try: + text_ids = np.asarray(self.text_to_sequence(text), np.int32) + except Exception as e: + print(e, utt_id, text) + return None + + # return None + sample = { + "raw_text": text, + "text_ids": text_ids, + "audio": audio, + "utt_id": str(int(utt_id)), + "speaker_name": speaker_name, + "rate": self.target_rate, + } + + return sample + + def get_pinyin_parser(self): + my_pinyin = Pinyin(MyConverter()) + pinyin = my_pinyin.pinyin + return pinyin + + def text_to_sequence(self, text, inference=False): + if inference: + pinyin = self.pinyin_parser(text, style=Style.TONE3, errors="ignore") + new_pinyin = [] + for x in pinyin: + x = "".join(x) + if "#" not in x: + new_pinyin.append(x) + phonemes = self.get_phoneme_from_char_and_pinyin(text, new_pinyin) + text = " ".join(phonemes) + print(f"phoneme seq: {text}") + + sequence = [] + for symbol in text.split(): + idx = self.symbol_to_id[symbol] + sequence.append(idx) + + # add eos tokens + sequence += [self.eos_id] + return sequence diff --git a/TensorFlowTTS/tensorflow_tts/processor/base_processor.py b/TensorFlowTTS/tensorflow_tts/processor/base_processor.py new file mode 100644 index 0000000000000000000000000000000000000000..ad3a254491bf32537b2efc002960167086babe4d --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/processor/base_processor.py @@ -0,0 +1,231 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Base Processor for all processor.""" + +import abc +import json +import os +from typing import Dict, List, Union + +from dataclasses import dataclass, field + + +class DataProcessorError(Exception): + pass + + +@dataclass +class BaseProcessor(abc.ABC): + data_dir: str + symbols: List[str] = field(default_factory=list) + speakers_map: Dict[str, int] = field(default_factory=dict) + train_f_name: str = "train.txt" + delimiter: str = "|" + positions = { + "file": 0, + "text": 1, + "speaker_name": 2, + } # positions of file,text,speaker_name after split line + f_extension: str = ".wav" + saved_mapper_path: str = None + loaded_mapper_path: str = None + # extras + items: List[List[str]] = field(default_factory=list) # text, wav_path, speaker_name + symbol_to_id: Dict[str, int] = field(default_factory=dict) + id_to_symbol: Dict[int, str] = field(default_factory=dict) + + def __post_init__(self): + + if self.loaded_mapper_path is not None: + self._load_mapper(loaded_path=self.loaded_mapper_path) + if self.setup_eos_token(): + self.add_symbol( + self.setup_eos_token() + ) # if this eos token not yet present in symbols list. + self.eos_id = self.symbol_to_id[self.setup_eos_token()] + return + + if self.symbols.__len__() < 1: + raise DataProcessorError("Symbols list is empty but mapper isn't loaded") + + self.create_items() + self.create_speaker_map() + self.reverse_speaker = {v: k for k, v in self.speakers_map.items()} + self.create_symbols() + if self.saved_mapper_path is not None: + self._save_mapper(saved_path=self.saved_mapper_path) + + # processor name. usefull to use it for AutoProcessor + self._processor_name = type(self).__name__ + + if self.setup_eos_token(): + self.add_symbol( + self.setup_eos_token() + ) # if this eos token not yet present in symbols list. + self.eos_id = self.symbol_to_id[self.setup_eos_token()] + + def __getattr__(self, name: str) -> Union[str, int]: + if "_id" in name: # map symbol to id + return self.symbol_to_id[name.replace("_id", "")] + return self.symbol_to_id[name] # map symbol to value + + def create_speaker_map(self): + """ + Create speaker map for dataset. + """ + sp_id = 0 + for i in self.items: + speaker_name = i[-1] + if speaker_name not in self.speakers_map: + self.speakers_map[speaker_name] = sp_id + sp_id += 1 + + def get_speaker_id(self, name: str) -> int: + return self.speakers_map[name] + + def get_speaker_name(self, speaker_id: int) -> str: + return self.speakers_map[speaker_id] + + def create_symbols(self): + self.symbol_to_id = {s: i for i, s in enumerate(self.symbols)} + self.id_to_symbol = {i: s for i, s in enumerate(self.symbols)} + + def create_items(self): + """ + Method used to create items from training file + items struct example => text, wav_file_path, speaker_name. + Note that the speaker_name should be a last. + """ + with open( + os.path.join(self.data_dir, self.train_f_name), mode="r", encoding="utf-8" + ) as f: + for line in f: + parts = line.strip().split(self.delimiter) + wav_path = os.path.join(self.data_dir, parts[self.positions["file"]]) + wav_path = ( + wav_path + self.f_extension + if wav_path[-len(self.f_extension) :] != self.f_extension + else wav_path + ) + text = parts[self.positions["text"]] + speaker_name = parts[self.positions["speaker_name"]] + self.items.append([text, wav_path, speaker_name]) + + def add_symbol(self, symbol: Union[str, list]): + if isinstance(symbol, str): + if symbol in self.symbol_to_id: + return + self.symbols.append(symbol) + symbol_id = len(self.symbol_to_id) + self.symbol_to_id[symbol] = symbol_id + self.id_to_symbol[symbol_id] = symbol + + elif isinstance(symbol, list): + for i in symbol: + self.add_symbol(i) + else: + raise ValueError("A new_symbols must be a string or list of string.") + + @abc.abstractmethod + def get_one_sample(self, item): + """Get one sample from dataset items. + Args: + item: one item in Dataset items. + Dataset items may include (raw_text, speaker_id, wav_path, ...) + + Returns: + sample (dict): sample dictionary return all feature used for preprocessing later. + """ + sample = { + "raw_text": None, + "text_ids": None, + "audio": None, + "utt_id": None, + "speaker_name": None, + "rate": None, + } + return sample + + @abc.abstractmethod + def text_to_sequence(self, text: str): + return [] + + @abc.abstractmethod + def setup_eos_token(self): + """Return eos symbol of type string.""" + return "eos" + + def convert_symbols_to_ids(self, symbols: Union[str, list]): + sequence = [] + if isinstance(symbols, str): + sequence.append(self._symbol_to_id[symbols]) + return sequence + elif isinstance(symbols, list): + for s in symbols: + if isinstance(s, str): + sequence.append(self._symbol_to_id[s]) + else: + raise ValueError("All elements of symbols must be a string.") + else: + raise ValueError("A symbols must be a string or list of string.") + + return sequence + + def _load_mapper(self, loaded_path: str = None): + """ + Save all needed mappers to file + """ + loaded_path = ( + os.path.join(self.data_dir, "mapper.json") + if loaded_path is None + else loaded_path + ) + with open(loaded_path, "r") as f: + data = json.load(f) + self.speakers_map = data["speakers_map"] + self.symbol_to_id = data["symbol_to_id"] + self.id_to_symbol = {int(k): v for k, v in data["id_to_symbol"].items()} + self._processor_name = data["processor_name"] + + # other keys + all_data_keys = data.keys() + for key in all_data_keys: + if key not in ["speakers_map", "symbol_to_id", "id_to_symbol"]: + setattr(self, key, data[key]) + + def _save_mapper(self, saved_path: str = None, extra_attrs_to_save: dict = None): + """ + Save all needed mappers to file + """ + saved_path = ( + os.path.join(self.data_dir, "mapper.json") + if saved_path is None + else saved_path + ) + with open(saved_path, "w") as f: + full_mapper = { + "symbol_to_id": self.symbol_to_id, + "id_to_symbol": self.id_to_symbol, + "speakers_map": self.speakers_map, + "processor_name": self._processor_name, + } + if extra_attrs_to_save: + full_mapper = {**full_mapper, **extra_attrs_to_save} + json.dump(full_mapper, f) + + @abc.abstractmethod + def save_pretrained(self, saved_path): + """Save mappers to file""" + pass diff --git a/TensorFlowTTS/tensorflow_tts/processor/jsut.py b/TensorFlowTTS/tensorflow_tts/processor/jsut.py new file mode 100644 index 0000000000000000000000000000000000000000..5902c3a090a5791402e7ee2595feab3c6612588f --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/processor/jsut.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Perform preprocessing and raw feature extraction for JSUT dataset.""" + +import os +import re + +import numpy as np +import soundfile as sf +import pyopenjtalk +import yaml +import librosa +from dataclasses import dataclass +from tensorflow_tts.processor import BaseProcessor +# from tensorflow_tts.utils import cleaners +from tensorflow_tts.utils.utils import PROCESSOR_FILE_NAME + +valid_symbols = [ + 'N', + 'a', + 'b', + 'by', + 'ch', + 'cl', + 'd', + 'dy', + 'e', + 'f', + 'g', + 'gy', + 'h', + 'hy', + 'i', + 'j', + 'k', + 'ky', + 'm', + 'my', + 'n', + 'ny', + 'o', + 'p', + 'pau', + 'py', + 'r', + 'ry', + 's', + 'sh', + 't', + 'ts', + 'u', + 'v', + 'w', + 'y', + 'z' +] + +_pad = "pad" +_eos = "eos" +_sil = "sil" +# _punctuation = "!'(),.:;? " +# _special = "-" +# _letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + +# Prepend "@" to ARPAbet symbols to ensure uniqueness (some are the same as uppercase letters): +# _arpabet = ["@" + s for s in valid_symbols] + +# Export all symbols: +JSUT_SYMBOLS = ( + [_pad] + [_sil] + valid_symbols + [_eos] +) + +# Regular expression matching text enclosed in curly braces: +_curly_re = re.compile(r"(.*?)\{(.+?)\}(.*)") + + +@dataclass +class JSUTProcessor(BaseProcessor): + """JSUT processor.""" + cleaner_names: str = None + speaker_name: str = "jsut" + train_f_name: str = "text_kana/basic5000.yaml" + + def create_items(self): + items = [] + if self.data_dir: + with open( + os.path.join(self.data_dir, self.train_f_name), encoding="utf-8" + ) as f: + data_json = yaml.load(f, Loader=yaml.FullLoader) + + for k, v in data_json.items(): + utt_id = k + phones = v['phone_level3'] + phones = phones.split("-") + phones = [_sil] + phones + [_sil] + wav_path = os.path.join(self.data_dir, "wav", f"{utt_id}.wav") + items.append( + [" ".join(phones), wav_path, utt_id, self.speaker_name] + ) + self.items = items + + def setup_eos_token(self): + return _eos + + def save_pretrained(self, saved_path): + os.makedirs(saved_path, exist_ok=True) + self._save_mapper(os.path.join(saved_path, PROCESSOR_FILE_NAME), {}) + + def get_one_sample(self, item): + text, wav_path, utt_id, speaker_name = item + + # normalize audio signal to be [-1, 1], soundfile already norm. + audio, rate = sf.read(wav_path) + audio = audio.astype(np.float32) + + # if rate != self.target_rate: + # assert rate > self.target_rate + # audio = librosa.resample(audio, rate, self.target_rate) + + # convert text to ids + text_ids = np.asarray(self.text_to_sequence(text), np.int32) + + sample = { + "raw_text": text, + "text_ids": text_ids, + "audio": audio, + "utt_id": utt_id, + "speaker_name": speaker_name, + "rate": rate, + } + + return sample + + def text_to_sequence(self, text, inference=False): + sequence = [] + # Check for curly braces and treat their contents as ARPAbet: + if inference: + text = pyopenjtalk.g2p(text) + text = text.replace("I", "i") + text = text.replace("U", "u") + print(f"phoneme seq: {text}") + + for symbol in text.split(): + idx = self.symbol_to_id[symbol] + sequence.append(idx) + + # add eos tokens + sequence += [self.eos_id] + return sequence diff --git a/TensorFlowTTS/tensorflow_tts/processor/kss.py b/TensorFlowTTS/tensorflow_tts/processor/kss.py new file mode 100644 index 0000000000000000000000000000000000000000..01fd4833c1b48ec29d72cb1d11253f155e146fee --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/processor/kss.py @@ -0,0 +1,122 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Perform preprocessing and raw feature extraction for KSS dataset.""" + +import os +import re + +import numpy as np +import soundfile as sf +from dataclasses import dataclass +from tensorflow_tts.processor import BaseProcessor +from tensorflow_tts.utils import cleaners +from tensorflow_tts.utils.korean import symbols as KSS_SYMBOLS +from tensorflow_tts.utils.utils import PROCESSOR_FILE_NAME + +# Regular expression matching text enclosed in curly braces: +_curly_re = re.compile(r"(.*?)\{(.+?)\}(.*)") + + +@dataclass +class KSSProcessor(BaseProcessor): + """KSS processor.""" + + cleaner_names: str = "korean_cleaners" + positions = { + "wave_file": 0, + "text_norm": 2, + } + train_f_name: str = "transcript.v.1.4.txt" + + def create_items(self): + if self.data_dir: + with open( + os.path.join(self.data_dir, self.train_f_name), encoding="utf-8" + ) as f: + self.items = [self.split_line(self.data_dir, line, "|") for line in f] + + def split_line(self, data_dir, line, split): + parts = line.strip().split(split) + wave_file = parts[self.positions["wave_file"]] + text_norm = parts[self.positions["text_norm"]] + wav_path = os.path.join(data_dir, "kss", wave_file) + speaker_name = "kss" + return text_norm, wav_path, speaker_name + + def setup_eos_token(self): + return "eos" + + def save_pretrained(self, saved_path): + os.makedirs(saved_path, exist_ok=True) + self._save_mapper(os.path.join(saved_path, PROCESSOR_FILE_NAME), {}) + + def get_one_sample(self, item): + text, wav_path, speaker_name = item + + # normalize audio signal to be [-1, 1], soundfile already norm. + audio, rate = sf.read(wav_path) + audio = audio.astype(np.float32) + + # convert text to ids + text_ids = np.asarray(self.text_to_sequence(text), np.int32) + + sample = { + "raw_text": text, + "text_ids": text_ids, + "audio": audio, + "utt_id": os.path.split(wav_path)[-1].split(".")[0], + "speaker_name": speaker_name, + "rate": rate, + } + + return sample + + def text_to_sequence(self, text): + + sequence = [] + # Check for curly braces and treat their contents as ARPAbet: + while len(text): + m = _curly_re.match(text) + if not m: + sequence += self._symbols_to_sequence( + self._clean_text(text, [self.cleaner_names]) + ) + break + sequence += self._symbols_to_sequence( + self._clean_text(m.group(1), [self.cleaner_names]) + ) + sequence += self._arpabet_to_sequence(m.group(2)) + text = m.group(3) + + # add eos tokens + sequence += [self.eos_id] + return sequence + + def _clean_text(self, text, cleaner_names): + for name in cleaner_names: + cleaner = getattr(cleaners, name) + if not cleaner: + raise Exception("Unknown cleaner: %s" % name) + text = cleaner(text) + return text + + def _symbols_to_sequence(self, symbols): + return [self.symbol_to_id[s] for s in symbols if self._should_keep_symbol(s)] + + def _arpabet_to_sequence(self, text): + return self._symbols_to_sequence(["@" + s for s in text.split()]) + + def _should_keep_symbol(self, s): + return s in self.symbol_to_id and s != "_" and s != "~" diff --git a/TensorFlowTTS/tensorflow_tts/processor/libritts.py b/TensorFlowTTS/tensorflow_tts/processor/libritts.py new file mode 100644 index 0000000000000000000000000000000000000000..27ed8b0a332118a5e358a1d126f0c01eb28fdeff --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/processor/libritts.py @@ -0,0 +1,124 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Perform preprocessing and raw feature extraction for LibriTTS dataset.""" + +import os +import re + +import numpy as np +import soundfile as sf +from dataclasses import dataclass + +from g2p_en import g2p as grapheme_to_phonem + +from tensorflow_tts.processor.base_processor import BaseProcessor +from tensorflow_tts.utils.utils import PROCESSOR_FILE_NAME + +g2p = grapheme_to_phonem.G2p() + +valid_symbols = g2p.phonemes +valid_symbols.append("SIL") +valid_symbols.append("END") + +_punctuation = "!'(),.:;? " +_arpabet = ["@" + s for s in valid_symbols] + +LIBRITTS_SYMBOLS = _arpabet + list(_punctuation) + + +@dataclass +class LibriTTSProcessor(BaseProcessor): + + mode: str = "train" + train_f_name: str = "train.txt" + positions = { + "file": 0, + "text": 1, + "speaker_name": 2, + } # positions of file,text,speaker_name after split line + f_extension: str = ".wav" + cleaner_names: str = None + + def create_items(self): + with open( + os.path.join(self.data_dir, self.train_f_name), mode="r", encoding="utf-8" + ) as f: + for line in f: + parts = line.strip().split(self.delimiter) + wav_path = os.path.join(self.data_dir, parts[self.positions["file"]]) + wav_path = ( + wav_path + self.f_extension + if wav_path[-len(self.f_extension) :] != self.f_extension + else wav_path + ) + text = parts[self.positions["text"]] + speaker_name = parts[self.positions["speaker_name"]] + self.items.append([text, wav_path, speaker_name]) + + def get_one_sample(self, item): + text, wav_path, speaker_name = item + audio, rate = sf.read(wav_path, dtype="float32") + + text_ids = np.asarray(self.text_to_sequence(text), np.int32) + + sample = { + "raw_text": text, + "text_ids": text_ids, + "audio": audio, + "utt_id": wav_path.split("/")[-1].split(".")[0], + "speaker_name": speaker_name, + "rate": rate, + } + + return sample + + def setup_eos_token(self): + return None # because we do not use this + + def save_pretrained(self, saved_path): + os.makedirs(saved_path, exist_ok=True) + self._save_mapper(os.path.join(saved_path, PROCESSOR_FILE_NAME), {}) + + def text_to_sequence(self, text): + if ( + self.mode == "train" + ): # in train mode text should be already transformed to phonemes + return self.symbols_to_ids(self.clean_g2p(text.split(" "))) + else: + return self.inference_text_to_seq(text) + + def inference_text_to_seq(self, text: str): + return self.symbols_to_ids(self.text_to_ph(text)) + + def symbols_to_ids(self, symbols_list: list): + return [self.symbol_to_id[s] for s in symbols_list] + + def text_to_ph(self, text: str): + return self.clean_g2p(g2p(text)) + + def clean_g2p(self, g2p_text: list): + data = [] + for i, txt in enumerate(g2p_text): + if i == len(g2p_text) - 1: + if txt != " " and txt != "SIL": + data.append("@" + txt) + else: + data.append( + "@END" + ) # TODO try learning without end token and compare results + break + if txt != " ": + data.append("@" + txt) + return data diff --git a/TensorFlowTTS/tensorflow_tts/processor/ljspeech.py b/TensorFlowTTS/tensorflow_tts/processor/ljspeech.py new file mode 100644 index 0000000000000000000000000000000000000000..8ee38d46a50d3d140bde017e5528cb6c871bef88 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/processor/ljspeech.py @@ -0,0 +1,222 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Perform preprocessing and raw feature extraction for LJSpeech dataset.""" + +import os +import re + +import numpy as np +import soundfile as sf +from dataclasses import dataclass +from tensorflow_tts.processor import BaseProcessor +from tensorflow_tts.utils import cleaners +from tensorflow_tts.utils.utils import PROCESSOR_FILE_NAME + +valid_symbols = [ + "AA", + "AA0", + "AA1", + "AA2", + "AE", + "AE0", + "AE1", + "AE2", + "AH", + "AH0", + "AH1", + "AH2", + "AO", + "AO0", + "AO1", + "AO2", + "AW", + "AW0", + "AW1", + "AW2", + "AY", + "AY0", + "AY1", + "AY2", + "B", + "CH", + "D", + "DH", + "EH", + "EH0", + "EH1", + "EH2", + "ER", + "ER0", + "ER1", + "ER2", + "EY", + "EY0", + "EY1", + "EY2", + "F", + "G", + "HH", + "IH", + "IH0", + "IH1", + "IH2", + "IY", + "IY0", + "IY1", + "IY2", + "JH", + "K", + "L", + "M", + "N", + "NG", + "OW", + "OW0", + "OW1", + "OW2", + "OY", + "OY0", + "OY1", + "OY2", + "P", + "R", + "S", + "SH", + "T", + "TH", + "UH", + "UH0", + "UH1", + "UH2", + "UW", + "UW0", + "UW1", + "UW2", + "V", + "W", + "Y", + "Z", + "ZH", +] + +_pad = "pad" +_eos = "eos" +_punctuation = "!'(),.:;? " +_special = "-" +_letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + +# Prepend "@" to ARPAbet symbols to ensure uniqueness (some are the same as uppercase letters): +_arpabet = ["@" + s for s in valid_symbols] + +# Export all symbols: +LJSPEECH_SYMBOLS = ( + [_pad] + list(_special) + list(_punctuation) + list(_letters) + _arpabet + [_eos] +) + +# Regular expression matching text enclosed in curly braces: +_curly_re = re.compile(r"(.*?)\{(.+?)\}(.*)") + + +@dataclass +class LJSpeechProcessor(BaseProcessor): + """LJSpeech processor.""" + + cleaner_names: str = "english_cleaners" + positions = { + "wave_file": 0, + "text": 1, + "text_norm": 2, + } + train_f_name: str = "metadata.csv" + + def create_items(self): + if self.data_dir: + with open( + os.path.join(self.data_dir, self.train_f_name), encoding="utf-8" + ) as f: + self.items = [self.split_line(self.data_dir, line, "|") for line in f] + + def split_line(self, data_dir, line, split): + parts = line.strip().split(split) + wave_file = parts[self.positions["wave_file"]] + text_norm = parts[self.positions["text_norm"]] + wav_path = os.path.join(data_dir, "wavs", f"{wave_file}.wav") + speaker_name = "ljspeech" + return text_norm, wav_path, speaker_name + + def setup_eos_token(self): + return _eos + + def save_pretrained(self, saved_path): + os.makedirs(saved_path, exist_ok=True) + self._save_mapper(os.path.join(saved_path, PROCESSOR_FILE_NAME), {}) + + def get_one_sample(self, item): + text, wav_path, speaker_name = item + + # normalize audio signal to be [-1, 1], soundfile already norm. + audio, rate = sf.read(wav_path) + audio = audio.astype(np.float32) + + # convert text to ids + text_ids = np.asarray(self.text_to_sequence(text), np.int32) + + sample = { + "raw_text": text, + "text_ids": text_ids, + "audio": audio, + "utt_id": os.path.split(wav_path)[-1].split(".")[0], + "speaker_name": speaker_name, + "rate": rate, + } + + return sample + + def text_to_sequence(self, text): + sequence = [] + # Check for curly braces and treat their contents as ARPAbet: + while len(text): + m = _curly_re.match(text) + if not m: + sequence += self._symbols_to_sequence( + self._clean_text(text, [self.cleaner_names]) + ) + break + sequence += self._symbols_to_sequence( + self._clean_text(m.group(1), [self.cleaner_names]) + ) + sequence += self._arpabet_to_sequence(m.group(2)) + text = m.group(3) + + # add eos tokens + sequence += [self.eos_id] + return sequence + + def _clean_text(self, text, cleaner_names): + for name in cleaner_names: + cleaner = getattr(cleaners, name) + if not cleaner: + raise Exception("Unknown cleaner: %s" % name) + text = cleaner(text) + return text + + def _symbols_to_sequence(self, symbols): + return [self.symbol_to_id[s] for s in symbols if self._should_keep_symbol(s)] + + def _arpabet_to_sequence(self, text): + return self._symbols_to_sequence(["@" + s for s in text.split()]) + + def _should_keep_symbol(self, s): + return s in self.symbol_to_id and s != "_" and s != "~" diff --git a/TensorFlowTTS/tensorflow_tts/processor/ljspeechu.py b/TensorFlowTTS/tensorflow_tts/processor/ljspeechu.py new file mode 100644 index 0000000000000000000000000000000000000000..a81d1a1f6526f29d1aec4bf03db051458866539d --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/processor/ljspeechu.py @@ -0,0 +1,252 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Perform preprocessing and raw feature extraction for LJSpeech Ultimate dataset.""" + +import os +import re + +import numpy as np +import soundfile as sf +from dataclasses import dataclass +from tensorflow_tts.processor import BaseProcessor +from tensorflow_tts.utils import cleaners +from tensorflow_tts.utils.utils import PROCESSOR_FILE_NAME +from g2p_en import G2p as grapheme_to_phn + +valid_symbols = [ + "AA", + "AA0", + "AA1", + "AA2", + "AE", + "AE0", + "AE1", + "AE2", + "AH", + "AH0", + "AH1", + "AH2", + "AO", + "AO0", + "AO1", + "AO2", + "AW", + "AW0", + "AW1", + "AW2", + "AY", + "AY0", + "AY1", + "AY2", + "B", + "CH", + "D", + "DH", + "EH", + "EH0", + "EH1", + "EH2", + "ER", + "ER0", + "ER1", + "ER2", + "EY", + "EY0", + "EY1", + "EY2", + "F", + "G", + "HH", + "IH", + "IH0", + "IH1", + "IH2", + "IY", + "IY0", + "IY1", + "IY2", + "JH", + "K", + "L", + "M", + "N", + "NG", + "OW", + "OW0", + "OW1", + "OW2", + "OY", + "OY0", + "OY1", + "OY2", + "P", + "R", + "S", + "SH", + "T", + "TH", + "UH", + "UH0", + "UH1", + "UH2", + "UW", + "UW0", + "UW1", + "UW2", + "V", + "W", + "Y", + "Z", + "ZH", +] + +_pad = "pad" +_eos = "eos" +_punctuation = "!'(),.:;?" # Unlike LJSpeech, we do not use spaces since we are phoneme only and spaces lead to very bad attention performance with phonetic input. +_special = "-" + +# Prepend "@" to ARPAbet symbols to ensure uniqueness (some are the same as uppercase letters): +_arpabet = ["@" + s for s in valid_symbols] + +# Export all symbols: +LJSPEECH_U_SYMBOLS = [_pad] + list(_special) + list(_punctuation) + _arpabet + [_eos] + +# Regular expression matching text enclosed in curly braces: +_curly_re = re.compile(r"(.*?)\{(.+?)\}(.*)") + + +_arpa_exempt = _punctuation + _special + +arpa_g2p = grapheme_to_phn() + + +@dataclass +class LJSpeechUltimateProcessor(BaseProcessor): + """LJSpeech Ultimate processor.""" + + cleaner_names: str = "english_cleaners" + positions = { + "wave_file": 0, + "text_norm": 1, + } + train_f_name: str = "filelist.txt" + + def create_items(self): + if self.data_dir: + with open( + os.path.join(self.data_dir, self.train_f_name), encoding="utf-8" + ) as f: + self.items = [self.split_line(self.data_dir, line, "|") for line in f] + + def split_line(self, data_dir, line, split): + parts = line.strip().split(split) + wave_file = parts[self.positions["wave_file"]] + text_norm = parts[self.positions["text_norm"]] + wav_path = os.path.join(data_dir, wave_file) + speaker_name = "ljspeech" + return text_norm, wav_path, speaker_name + + def setup_eos_token(self): + return _eos + + def save_pretrained(self, saved_path): + os.makedirs(saved_path, exist_ok=True) + self._save_mapper(os.path.join(saved_path, PROCESSOR_FILE_NAME), {}) + + def to_arpa(self, in_str): + phn_arr = arpa_g2p(in_str) + phn_arr = [x for x in phn_arr if x != " "] + + arpa_str = "{" + in_chain = True + + # Iterative array-traverse approach to build ARPA string. Phonemes must be in curly braces, but not punctuation + for token in phn_arr: + if token in _arpa_exempt and in_chain: + arpa_str += " }" + in_chain = False + + if token not in _arpa_exempt and not in_chain: + arpa_str += " {" + in_chain = True + + arpa_str += " " + token + + if in_chain: + arpa_str += " }" + + return arpa_str + + def get_one_sample(self, item): + text, wav_path, speaker_name = item + + # Check if this line is already an ARPA string by searching for the trademark curly brace. If not, we apply + if not "{" in text: + text = self.to_arpa(text) + + # normalize audio signal to be [-1, 1], soundfile already norm. + audio, rate = sf.read(wav_path) + audio = audio.astype(np.float32) + + # convert text to ids + text_ids = np.asarray(self.text_to_sequence(text), np.int32) + + sample = { + "raw_text": text, + "text_ids": text_ids, + "audio": audio, + "utt_id": os.path.split(wav_path)[-1].split(".")[0], + "speaker_name": speaker_name, + "rate": rate, + } + + return sample + + def text_to_sequence(self, text): + sequence = [] + # Check for curly braces and treat their contents as ARPAbet: + while len(text): + m = _curly_re.match(text) + if not m: + sequence += self._symbols_to_sequence( + self._clean_text(text, [self.cleaner_names]) + ) + break + sequence += self._symbols_to_sequence( + self._clean_text(m.group(1), [self.cleaner_names]) + ) + sequence += self._arpabet_to_sequence(m.group(2)) + text = m.group(3) + + # add eos tokens + sequence += [self.eos_id] + return sequence + + def _clean_text(self, text, cleaner_names): + for name in cleaner_names: + cleaner = getattr(cleaners, name) + if not cleaner: + raise Exception("Unknown cleaner: %s" % name) + text = cleaner(text) + return text + + def _symbols_to_sequence(self, symbols): + return [self.symbol_to_id[s] for s in symbols if self._should_keep_symbol(s)] + + def _arpabet_to_sequence(self, text): + return self._symbols_to_sequence(["@" + s for s in text.split()]) + + def _should_keep_symbol(self, s): + return s in self.symbol_to_id and s != "_" and s != "~" diff --git a/TensorFlowTTS/tensorflow_tts/processor/pretrained/baker_mapper.json b/TensorFlowTTS/tensorflow_tts/processor/pretrained/baker_mapper.json new file mode 100644 index 0000000000000000000000000000000000000000..82aaf7649ad843f376b544e8f2a160f298e9f83a --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/processor/pretrained/baker_mapper.json @@ -0,0 +1 @@ +{"symbol_to_id": {"pad": 0, "sil": 1, "#0": 2, "#1": 3, "#2": 4, "#3": 5, "^": 6, "b": 7, "c": 8, "ch": 9, "d": 10, "f": 11, "g": 12, "h": 13, "j": 14, "k": 15, "l": 16, "m": 17, "n": 18, "p": 19, "q": 20, "r": 21, "s": 22, "sh": 23, "t": 24, "x": 25, "z": 26, "zh": 27, "a1": 28, "a2": 29, "a3": 30, "a4": 31, "a5": 32, "ai1": 33, "ai2": 34, "ai3": 35, "ai4": 36, "ai5": 37, "an1": 38, "an2": 39, "an3": 40, "an4": 41, "an5": 42, "ang1": 43, "ang2": 44, "ang3": 45, "ang4": 46, "ang5": 47, "ao1": 48, "ao2": 49, "ao3": 50, "ao4": 51, "ao5": 52, "e1": 53, "e2": 54, "e3": 55, "e4": 56, "e5": 57, "ei1": 58, "ei2": 59, "ei3": 60, "ei4": 61, "ei5": 62, "en1": 63, "en2": 64, "en3": 65, "en4": 66, "en5": 67, "eng1": 68, "eng2": 69, "eng3": 70, "eng4": 71, "eng5": 72, "er1": 73, "er2": 74, "er3": 75, "er4": 76, "er5": 77, "i1": 78, "i2": 79, "i3": 80, "i4": 81, "i5": 82, "ia1": 83, "ia2": 84, "ia3": 85, "ia4": 86, "ia5": 87, "ian1": 88, "ian2": 89, "ian3": 90, "ian4": 91, "ian5": 92, "iang1": 93, "iang2": 94, "iang3": 95, "iang4": 96, "iang5": 97, "iao1": 98, "iao2": 99, "iao3": 100, "iao4": 101, "iao5": 102, "ie1": 103, "ie2": 104, "ie3": 105, "ie4": 106, "ie5": 107, "ii1": 108, "ii2": 109, "ii3": 110, "ii4": 111, "ii5": 112, "iii1": 113, "iii2": 114, "iii3": 115, "iii4": 116, "iii5": 117, "in1": 118, "in2": 119, "in3": 120, "in4": 121, "in5": 122, "ing1": 123, "ing2": 124, "ing3": 125, "ing4": 126, "ing5": 127, "iong1": 128, "iong2": 129, "iong3": 130, "iong4": 131, "iong5": 132, "iou1": 133, "iou2": 134, "iou3": 135, "iou4": 136, "iou5": 137, "o1": 138, "o2": 139, "o3": 140, "o4": 141, "o5": 142, "ong1": 143, "ong2": 144, "ong3": 145, "ong4": 146, "ong5": 147, "ou1": 148, "ou2": 149, "ou3": 150, "ou4": 151, "ou5": 152, "u1": 153, "u2": 154, "u3": 155, "u4": 156, "u5": 157, "ua1": 158, "ua2": 159, "ua3": 160, "ua4": 161, "ua5": 162, "uai1": 163, "uai2": 164, "uai3": 165, "uai4": 166, "uai5": 167, "uan1": 168, "uan2": 169, "uan3": 170, "uan4": 171, "uan5": 172, "uang1": 173, "uang2": 174, "uang3": 175, "uang4": 176, "uang5": 177, "uei1": 178, "uei2": 179, "uei3": 180, "uei4": 181, "uei5": 182, "uen1": 183, "uen2": 184, "uen3": 185, "uen4": 186, "uen5": 187, "ueng1": 188, "ueng2": 189, "ueng3": 190, "ueng4": 191, "ueng5": 192, "uo1": 193, "uo2": 194, "uo3": 195, "uo4": 196, "uo5": 197, "v1": 198, "v2": 199, "v3": 200, "v4": 201, "v5": 202, "van1": 203, "van2": 204, "van3": 205, "van4": 206, "van5": 207, "ve1": 208, "ve2": 209, "ve3": 210, "ve4": 211, "ve5": 212, "vn1": 213, "vn2": 214, "vn3": 215, "vn4": 216, "vn5": 217, "eos": 218}, "id_to_symbol": {"0": "pad", "1": "sil", "2": "#0", "3": "#1", "4": "#2", "5": "#3", "6": "^", "7": "b", "8": "c", "9": "ch", "10": "d", "11": "f", "12": "g", "13": "h", "14": "j", "15": "k", "16": "l", "17": "m", "18": "n", "19": "p", "20": "q", "21": "r", "22": "s", "23": "sh", "24": "t", "25": "x", "26": "z", "27": "zh", "28": "a1", "29": "a2", "30": "a3", "31": "a4", "32": "a5", "33": "ai1", "34": "ai2", "35": "ai3", "36": "ai4", "37": "ai5", "38": "an1", "39": "an2", "40": "an3", "41": "an4", "42": "an5", "43": "ang1", "44": "ang2", "45": "ang3", "46": "ang4", "47": "ang5", "48": "ao1", "49": "ao2", "50": "ao3", "51": "ao4", "52": "ao5", "53": "e1", "54": "e2", "55": "e3", "56": "e4", "57": "e5", "58": "ei1", "59": "ei2", "60": "ei3", "61": "ei4", "62": "ei5", "63": "en1", "64": "en2", "65": "en3", "66": "en4", "67": "en5", "68": "eng1", "69": "eng2", "70": "eng3", "71": "eng4", "72": "eng5", "73": "er1", "74": "er2", "75": "er3", "76": "er4", "77": "er5", "78": "i1", "79": "i2", "80": "i3", "81": "i4", "82": "i5", "83": "ia1", "84": "ia2", "85": "ia3", "86": "ia4", "87": "ia5", "88": "ian1", "89": "ian2", "90": "ian3", "91": "ian4", "92": "ian5", "93": "iang1", "94": "iang2", "95": "iang3", "96": "iang4", "97": "iang5", "98": "iao1", "99": "iao2", "100": "iao3", "101": "iao4", "102": "iao5", "103": "ie1", "104": "ie2", "105": "ie3", "106": "ie4", "107": "ie5", "108": "ii1", "109": "ii2", "110": "ii3", "111": "ii4", "112": "ii5", "113": "iii1", "114": "iii2", "115": "iii3", "116": "iii4", "117": "iii5", "118": "in1", "119": "in2", "120": "in3", "121": "in4", "122": "in5", "123": "ing1", "124": "ing2", "125": "ing3", "126": "ing4", "127": "ing5", "128": "iong1", "129": "iong2", "130": "iong3", "131": "iong4", "132": "iong5", "133": "iou1", "134": "iou2", "135": "iou3", "136": "iou4", "137": "iou5", "138": "o1", "139": "o2", "140": "o3", "141": "o4", "142": "o5", "143": "ong1", "144": "ong2", "145": "ong3", "146": "ong4", "147": "ong5", "148": "ou1", "149": "ou2", "150": "ou3", "151": "ou4", "152": "ou5", "153": "u1", "154": "u2", "155": "u3", "156": "u4", "157": "u5", "158": "ua1", "159": "ua2", "160": "ua3", "161": "ua4", "162": "ua5", "163": "uai1", "164": "uai2", "165": "uai3", "166": "uai4", "167": "uai5", "168": "uan1", "169": "uan2", "170": "uan3", "171": "uan4", "172": "uan5", "173": "uang1", "174": "uang2", "175": "uang3", "176": "uang4", "177": "uang5", "178": "uei1", "179": "uei2", "180": "uei3", "181": "uei4", "182": "uei5", "183": "uen1", "184": "uen2", "185": "uen3", "186": "uen4", "187": "uen5", "188": "ueng1", "189": "ueng2", "190": "ueng3", "191": "ueng4", "192": "ueng5", "193": "uo1", "194": "uo2", "195": "uo3", "196": "uo4", "197": "uo5", "198": "v1", "199": "v2", "200": "v3", "201": "v4", "202": "v5", "203": "van1", "204": "van2", "205": "van3", "206": "van4", "207": "van5", "208": "ve1", "209": "ve2", "210": "ve3", "211": "ve4", "212": "ve5", "213": "vn1", "214": "vn2", "215": "vn3", "216": "vn4", "217": "vn5", "218": "eos"}, "speakers_map": {"baker": 0}, "processor_name": "BakerProcessor", "pinyin_dict": {"a": ["^", "a"], "ai": ["^", "ai"], "an": ["^", "an"], "ang": ["^", "ang"], "ao": ["^", "ao"], "ba": ["b", "a"], "bai": ["b", "ai"], "ban": ["b", "an"], "bang": ["b", "ang"], "bao": ["b", "ao"], "be": ["b", "e"], "bei": ["b", "ei"], "ben": ["b", "en"], "beng": ["b", "eng"], "bi": ["b", "i"], "bian": ["b", "ian"], "biao": ["b", "iao"], "bie": ["b", "ie"], "bin": ["b", "in"], "bing": ["b", "ing"], "bo": ["b", "o"], "bu": ["b", "u"], "ca": ["c", "a"], "cai": ["c", "ai"], "can": ["c", "an"], "cang": ["c", "ang"], "cao": ["c", "ao"], "ce": ["c", "e"], "cen": ["c", "en"], "ceng": ["c", "eng"], "cha": ["ch", "a"], "chai": ["ch", "ai"], "chan": ["ch", "an"], "chang": ["ch", "ang"], "chao": ["ch", "ao"], "che": ["ch", "e"], "chen": ["ch", "en"], "cheng": ["ch", "eng"], "chi": ["ch", "iii"], "chong": ["ch", "ong"], "chou": ["ch", "ou"], "chu": ["ch", "u"], "chua": ["ch", "ua"], "chuai": ["ch", "uai"], "chuan": ["ch", "uan"], "chuang": ["ch", "uang"], "chui": ["ch", "uei"], "chun": ["ch", "uen"], "chuo": ["ch", "uo"], "ci": ["c", "ii"], "cong": ["c", "ong"], "cou": ["c", "ou"], "cu": ["c", "u"], "cuan": ["c", "uan"], "cui": ["c", "uei"], "cun": ["c", "uen"], "cuo": ["c", "uo"], "da": ["d", "a"], "dai": ["d", "ai"], "dan": ["d", "an"], "dang": ["d", "ang"], "dao": ["d", "ao"], "de": ["d", "e"], "dei": ["d", "ei"], "den": ["d", "en"], "deng": ["d", "eng"], "di": ["d", "i"], "dia": ["d", "ia"], "dian": ["d", "ian"], "diao": ["d", "iao"], "die": ["d", "ie"], "ding": ["d", "ing"], "diu": ["d", "iou"], "dong": ["d", "ong"], "dou": ["d", "ou"], "du": ["d", "u"], "duan": ["d", "uan"], "dui": ["d", "uei"], "dun": ["d", "uen"], "duo": ["d", "uo"], "e": ["^", "e"], "ei": ["^", "ei"], "en": ["^", "en"], "ng": ["^", "en"], "eng": ["^", "eng"], "er": ["^", "er"], "fa": ["f", "a"], "fan": ["f", "an"], "fang": ["f", "ang"], "fei": ["f", "ei"], "fen": ["f", "en"], "feng": ["f", "eng"], "fo": ["f", "o"], "fou": ["f", "ou"], "fu": ["f", "u"], "ga": ["g", "a"], "gai": ["g", "ai"], "gan": ["g", "an"], "gang": ["g", "ang"], "gao": ["g", "ao"], "ge": ["g", "e"], "gei": ["g", "ei"], "gen": ["g", "en"], "geng": ["g", "eng"], "gong": ["g", "ong"], "gou": ["g", "ou"], "gu": ["g", "u"], "gua": ["g", "ua"], "guai": ["g", "uai"], "guan": ["g", "uan"], "guang": ["g", "uang"], "gui": ["g", "uei"], "gun": ["g", "uen"], "guo": ["g", "uo"], "ha": ["h", "a"], "hai": ["h", "ai"], "han": ["h", "an"], "hang": ["h", "ang"], "hao": ["h", "ao"], "he": ["h", "e"], "hei": ["h", "ei"], "hen": ["h", "en"], "heng": ["h", "eng"], "hong": ["h", "ong"], "hou": ["h", "ou"], "hu": ["h", "u"], "hua": ["h", "ua"], "huai": ["h", "uai"], "huan": ["h", "uan"], "huang": ["h", "uang"], "hui": ["h", "uei"], "hun": ["h", "uen"], "huo": ["h", "uo"], "ji": ["j", "i"], "jia": ["j", "ia"], "jian": ["j", "ian"], "jiang": ["j", "iang"], "jiao": ["j", "iao"], "jie": ["j", "ie"], "jin": ["j", "in"], "jing": ["j", "ing"], "jiong": ["j", "iong"], "jiu": ["j", "iou"], "ju": ["j", "v"], "juan": ["j", "van"], "jue": ["j", "ve"], "jun": ["j", "vn"], "ka": ["k", "a"], "kai": ["k", "ai"], "kan": ["k", "an"], "kang": ["k", "ang"], "kao": ["k", "ao"], "ke": ["k", "e"], "kei": ["k", "ei"], "ken": ["k", "en"], "keng": ["k", "eng"], "kong": ["k", "ong"], "kou": ["k", "ou"], "ku": ["k", "u"], "kua": ["k", "ua"], "kuai": ["k", "uai"], "kuan": ["k", "uan"], "kuang": ["k", "uang"], "kui": ["k", "uei"], "kun": ["k", "uen"], "kuo": ["k", "uo"], "la": ["l", "a"], "lai": ["l", "ai"], "lan": ["l", "an"], "lang": ["l", "ang"], "lao": ["l", "ao"], "le": ["l", "e"], "lei": ["l", "ei"], "leng": ["l", "eng"], "li": ["l", "i"], "lia": ["l", "ia"], "lian": ["l", "ian"], "liang": ["l", "iang"], "liao": ["l", "iao"], "lie": ["l", "ie"], "lin": ["l", "in"], "ling": ["l", "ing"], "liu": ["l", "iou"], "lo": ["l", "o"], "long": ["l", "ong"], "lou": ["l", "ou"], "lu": ["l", "u"], "lv": ["l", "v"], "luan": ["l", "uan"], "lve": ["l", "ve"], "lue": ["l", "ve"], "lun": ["l", "uen"], "luo": ["l", "uo"], "ma": ["m", "a"], "mai": ["m", "ai"], "man": ["m", "an"], "mang": ["m", "ang"], "mao": ["m", "ao"], "me": ["m", "e"], "mei": ["m", "ei"], "men": ["m", "en"], "meng": ["m", "eng"], "mi": ["m", "i"], "mian": ["m", "ian"], "miao": ["m", "iao"], "mie": ["m", "ie"], "min": ["m", "in"], "ming": ["m", "ing"], "miu": ["m", "iou"], "mo": ["m", "o"], "mou": ["m", "ou"], "mu": ["m", "u"], "na": ["n", "a"], "nai": ["n", "ai"], "nan": ["n", "an"], "nang": ["n", "ang"], "nao": ["n", "ao"], "ne": ["n", "e"], "nei": ["n", "ei"], "nen": ["n", "en"], "neng": ["n", "eng"], "ni": ["n", "i"], "nia": ["n", "ia"], "nian": ["n", "ian"], "niang": ["n", "iang"], "niao": ["n", "iao"], "nie": ["n", "ie"], "nin": ["n", "in"], "ning": ["n", "ing"], "niu": ["n", "iou"], "nong": ["n", "ong"], "nou": ["n", "ou"], "nu": ["n", "u"], "nv": ["n", "v"], "nuan": ["n", "uan"], "nve": ["n", "ve"], "nue": ["n", "ve"], "nuo": ["n", "uo"], "o": ["^", "o"], "ou": ["^", "ou"], "pa": ["p", "a"], "pai": ["p", "ai"], "pan": ["p", "an"], "pang": ["p", "ang"], "pao": ["p", "ao"], "pe": ["p", "e"], "pei": ["p", "ei"], "pen": ["p", "en"], "peng": ["p", "eng"], "pi": ["p", "i"], "pian": ["p", "ian"], "piao": ["p", "iao"], "pie": ["p", "ie"], "pin": ["p", "in"], "ping": ["p", "ing"], "po": ["p", "o"], "pou": ["p", "ou"], "pu": ["p", "u"], "qi": ["q", "i"], "qia": ["q", "ia"], "qian": ["q", "ian"], "qiang": ["q", "iang"], "qiao": ["q", "iao"], "qie": ["q", "ie"], "qin": ["q", "in"], "qing": ["q", "ing"], "qiong": ["q", "iong"], "qiu": ["q", "iou"], "qu": ["q", "v"], "quan": ["q", "van"], "que": ["q", "ve"], "qun": ["q", "vn"], "ran": ["r", "an"], "rang": ["r", "ang"], "rao": ["r", "ao"], "re": ["r", "e"], "ren": ["r", "en"], "reng": ["r", "eng"], "ri": ["r", "iii"], "rong": ["r", "ong"], "rou": ["r", "ou"], "ru": ["r", "u"], "rua": ["r", "ua"], "ruan": ["r", "uan"], "rui": ["r", "uei"], "run": ["r", "uen"], "ruo": ["r", "uo"], "sa": ["s", "a"], "sai": ["s", "ai"], "san": ["s", "an"], "sang": ["s", "ang"], "sao": ["s", "ao"], "se": ["s", "e"], "sen": ["s", "en"], "seng": ["s", "eng"], "sha": ["sh", "a"], "shai": ["sh", "ai"], "shan": ["sh", "an"], "shang": ["sh", "ang"], "shao": ["sh", "ao"], "she": ["sh", "e"], "shei": ["sh", "ei"], "shen": ["sh", "en"], "sheng": ["sh", "eng"], "shi": ["sh", "iii"], "shou": ["sh", "ou"], "shu": ["sh", "u"], "shua": ["sh", "ua"], "shuai": ["sh", "uai"], "shuan": ["sh", "uan"], "shuang": ["sh", "uang"], "shui": ["sh", "uei"], "shun": ["sh", "uen"], "shuo": ["sh", "uo"], "si": ["s", "ii"], "song": ["s", "ong"], "sou": ["s", "ou"], "su": ["s", "u"], "suan": ["s", "uan"], "sui": ["s", "uei"], "sun": ["s", "uen"], "suo": ["s", "uo"], "ta": ["t", "a"], "tai": ["t", "ai"], "tan": ["t", "an"], "tang": ["t", "ang"], "tao": ["t", "ao"], "te": ["t", "e"], "tei": ["t", "ei"], "teng": ["t", "eng"], "ti": ["t", "i"], "tian": ["t", "ian"], "tiao": ["t", "iao"], "tie": ["t", "ie"], "ting": ["t", "ing"], "tong": ["t", "ong"], "tou": ["t", "ou"], "tu": ["t", "u"], "tuan": ["t", "uan"], "tui": ["t", "uei"], "tun": ["t", "uen"], "tuo": ["t", "uo"], "wa": ["^", "ua"], "wai": ["^", "uai"], "wan": ["^", "uan"], "wang": ["^", "uang"], "wei": ["^", "uei"], "wen": ["^", "uen"], "weng": ["^", "ueng"], "wo": ["^", "uo"], "wu": ["^", "u"], "xi": ["x", "i"], "xia": ["x", "ia"], "xian": ["x", "ian"], "xiang": ["x", "iang"], "xiao": ["x", "iao"], "xie": ["x", "ie"], "xin": ["x", "in"], "xing": ["x", "ing"], "xiong": ["x", "iong"], "xiu": ["x", "iou"], "xu": ["x", "v"], "xuan": ["x", "van"], "xue": ["x", "ve"], "xun": ["x", "vn"], "ya": ["^", "ia"], "yan": ["^", "ian"], "yang": ["^", "iang"], "yao": ["^", "iao"], "ye": ["^", "ie"], "yi": ["^", "i"], "yin": ["^", "in"], "ying": ["^", "ing"], "yo": ["^", "iou"], "yong": ["^", "iong"], "you": ["^", "iou"], "yu": ["^", "v"], "yuan": ["^", "van"], "yue": ["^", "ve"], "yun": ["^", "vn"], "za": ["z", "a"], "zai": ["z", "ai"], "zan": ["z", "an"], "zang": ["z", "ang"], "zao": ["z", "ao"], "ze": ["z", "e"], "zei": ["z", "ei"], "zen": ["z", "en"], "zeng": ["z", "eng"], "zha": ["zh", "a"], "zhai": ["zh", "ai"], "zhan": ["zh", "an"], "zhang": ["zh", "ang"], "zhao": ["zh", "ao"], "zhe": ["zh", "e"], "zhei": ["zh", "ei"], "zhen": ["zh", "en"], "zheng": ["zh", "eng"], "zhi": ["zh", "iii"], "zhong": ["zh", "ong"], "zhou": ["zh", "ou"], "zhu": ["zh", "u"], "zhua": ["zh", "ua"], "zhuai": ["zh", "uai"], "zhuan": ["zh", "uan"], "zhuang": ["zh", "uang"], "zhui": ["zh", "uei"], "zhun": ["zh", "uen"], "zhuo": ["zh", "uo"], "zi": ["z", "ii"], "zong": ["z", "ong"], "zou": ["z", "ou"], "zu": ["z", "u"], "zuan": ["z", "uan"], "zui": ["z", "uei"], "zun": ["z", "uen"], "zuo": ["z", "uo"]}} \ No newline at end of file diff --git a/TensorFlowTTS/tensorflow_tts/processor/pretrained/jsut_mapper.json b/TensorFlowTTS/tensorflow_tts/processor/pretrained/jsut_mapper.json new file mode 100644 index 0000000000000000000000000000000000000000..a6d22b485bb59c6dc5c18f8cc20f7dbda62cc904 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/processor/pretrained/jsut_mapper.json @@ -0,0 +1,90 @@ +{ + "symbol_to_id": { + "pad": 0, + "sil": 1, + "N": 2, + "a": 3, + "b": 4, + "by": 5, + "ch": 6, + "cl": 7, + "d": 8, + "dy": 9, + "e": 10, + "f": 11, + "g": 12, + "gy": 13, + "h": 14, + "hy": 15, + "i": 16, + "j": 17, + "k": 18, + "ky": 19, + "m": 20, + "my": 21, + "n": 22, + "ny": 23, + "o": 24, + "p": 25, + "pau": 26, + "py": 27, + "r": 28, + "ry": 29, + "s": 30, + "sh": 31, + "t": 32, + "ts": 33, + "u": 34, + "v": 35, + "w": 36, + "y": 37, + "z": 38, + "eos": 39 + }, + "id_to_symbol": { + "0": "pad", + "1": "sil", + "2": "N", + "3": "a", + "4": "b", + "5": "by", + "6": "ch", + "7": "cl", + "8": "d", + "9": "dy", + "10": "e", + "11": "f", + "12": "g", + "13": "gy", + "14": "h", + "15": "hy", + "16": "i", + "17": "j", + "18": "k", + "19": "ky", + "20": "m", + "21": "my", + "22": "n", + "23": "ny", + "24": "o", + "25": "p", + "26": "pau", + "27": "py", + "28": "r", + "29": "ry", + "30": "s", + "31": "sh", + "32": "t", + "33": "ts", + "34": "u", + "35": "v", + "36": "w", + "37": "y", + "38": "z", + "39": "eos" + }, + "speakers_map": { + "jsut": 0 + }, + "processor_name": "JSUTProcessor" +} \ No newline at end of file diff --git a/TensorFlowTTS/tensorflow_tts/processor/pretrained/kss_mapper.json b/TensorFlowTTS/tensorflow_tts/processor/pretrained/kss_mapper.json new file mode 100644 index 0000000000000000000000000000000000000000..5feaa0a766a93581f9fabfa36005bf37968385d1 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/processor/pretrained/kss_mapper.json @@ -0,0 +1 @@ +{"symbol_to_id": {"pad": 0, "-": 7, "!": 2, "'": 3, "(": 4, ")": 5, ",": 6, ".": 8, ":": 9, ";": 10, "?": 11, " ": 12, "\u1100": 13, "\u1101": 14, "\u1102": 15, "\u1103": 16, "\u1104": 17, "\u1105": 18, "\u1106": 19, "\u1107": 20, "\u1108": 21, "\u1109": 22, "\u110a": 23, "\u110b": 24, "\u110c": 25, "\u110d": 26, "\u110e": 27, "\u110f": 28, "\u1110": 29, "\u1111": 30, "\u1112": 31, "\u1161": 32, "\u1162": 33, "\u1163": 34, "\u1164": 35, "\u1165": 36, "\u1166": 37, "\u1167": 38, "\u1168": 39, "\u1169": 40, "\u116a": 41, "\u116b": 42, "\u116c": 43, "\u116d": 44, "\u116e": 45, "\u116f": 46, "\u1170": 47, "\u1171": 48, "\u1172": 49, "\u1173": 50, "\u1174": 51, "\u1175": 52, "\u11a8": 53, "\u11a9": 54, "\u11aa": 55, "\u11ab": 56, "\u11ac": 57, "\u11ad": 58, "\u11ae": 59, "\u11af": 60, "\u11b0": 61, "\u11b1": 62, "\u11b2": 63, "\u11b3": 64, "\u11b4": 65, "\u11b5": 66, "\u11b6": 67, "\u11b7": 68, "\u11b8": 69, "\u11b9": 70, "\u11ba": 71, "\u11bb": 72, "\u11bc": 73, "\u11bd": 74, "\u11be": 75, "\u11bf": 76, "\u11c0": 77, "\u11c1": 78, "\u11c2": 79, "eos": 80}, "id_to_symbol": {"0": "pad", "1": "-", "2": "!", "3": "'", "4": "(", "5": ")", "6": ",", "7": "-", "8": ".", "9": ":", "10": ";", "11": "?", "12": " ", "13": "\u1100", "14": "\u1101", "15": "\u1102", "16": "\u1103", "17": "\u1104", "18": "\u1105", "19": "\u1106", "20": "\u1107", "21": "\u1108", "22": "\u1109", "23": "\u110a", "24": "\u110b", "25": "\u110c", "26": "\u110d", "27": "\u110e", "28": "\u110f", "29": "\u1110", "30": "\u1111", "31": "\u1112", "32": "\u1161", "33": "\u1162", "34": "\u1163", "35": "\u1164", "36": "\u1165", "37": "\u1166", "38": "\u1167", "39": "\u1168", "40": "\u1169", "41": "\u116a", "42": "\u116b", "43": "\u116c", "44": "\u116d", "45": "\u116e", "46": "\u116f", "47": "\u1170", "48": "\u1171", "49": "\u1172", "50": "\u1173", "51": "\u1174", "52": "\u1175", "53": "\u11a8", "54": "\u11a9", "55": "\u11aa", "56": "\u11ab", "57": "\u11ac", "58": "\u11ad", "59": "\u11ae", "60": "\u11af", "61": "\u11b0", "62": "\u11b1", "63": "\u11b2", "64": "\u11b3", "65": "\u11b4", "66": "\u11b5", "67": "\u11b6", "68": "\u11b7", "69": "\u11b8", "70": "\u11b9", "71": "\u11ba", "72": "\u11bb", "73": "\u11bc", "74": "\u11bd", "75": "\u11be", "76": "\u11bf", "77": "\u11c0", "78": "\u11c1", "79": "\u11c2", "80": "eos"}, "speakers_map": {"kss": 0}, "processor_name": "KSSProcessor"} \ No newline at end of file diff --git a/TensorFlowTTS/tensorflow_tts/processor/pretrained/libritts_mapper.json b/TensorFlowTTS/tensorflow_tts/processor/pretrained/libritts_mapper.json new file mode 100644 index 0000000000000000000000000000000000000000..e7c56710d19138ddbd1afc319eca568dd5bccb99 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/processor/pretrained/libritts_mapper.json @@ -0,0 +1 @@ +{"symbol_to_id": {"@": 0, "@": 1, "@": 2, "@": 3, "@AA0": 4, "@AA1": 5, "@AA2": 6, "@AE0": 7, "@AE1": 8, "@AE2": 9, "@AH0": 10, "@AH1": 11, "@AH2": 12, "@AO0": 13, "@AO1": 14, "@AO2": 15, "@AW0": 16, "@AW1": 17, "@AW2": 18, "@AY0": 19, "@AY1": 20, "@AY2": 21, "@B": 22, "@CH": 23, "@D": 24, "@DH": 25, "@EH0": 26, "@EH1": 27, "@EH2": 28, "@ER0": 29, "@ER1": 30, "@ER2": 31, "@EY0": 32, "@EY1": 33, "@EY2": 34, "@F": 35, "@G": 36, "@HH": 37, "@IH0": 38, "@IH1": 39, "@IH2": 40, "@IY0": 41, "@IY1": 42, "@IY2": 43, "@JH": 44, "@K": 45, "@L": 46, "@M": 47, "@N": 48, "@NG": 49, "@OW0": 50, "@OW1": 51, "@OW2": 52, "@OY0": 53, "@OY1": 54, "@OY2": 55, "@P": 56, "@R": 57, "@S": 58, "@SH": 59, "@T": 60, "@TH": 61, "@UH0": 62, "@UH1": 63, "@UH2": 64, "@UW": 65, "@UW0": 66, "@UW1": 67, "@UW2": 68, "@V": 69, "@W": 70, "@Y": 71, "@Z": 72, "@ZH": 73, "@SIL": 74, "@END": 75, "!": 76, "'": 77, "(": 78, ")": 79, ",": 80, ".": 81, ":": 82, ";": 83, "?": 84, " ": 85}, "id_to_symbol": {"0": "@", "1": "@", "2": "@", "3": "@", "4": "@AA0", "5": "@AA1", "6": "@AA2", "7": "@AE0", "8": "@AE1", "9": "@AE2", "10": "@AH0", "11": "@AH1", "12": "@AH2", "13": "@AO0", "14": "@AO1", "15": "@AO2", "16": "@AW0", "17": "@AW1", "18": "@AW2", "19": "@AY0", "20": "@AY1", "21": "@AY2", "22": "@B", "23": "@CH", "24": "@D", "25": "@DH", "26": "@EH0", "27": "@EH1", "28": "@EH2", "29": "@ER0", "30": "@ER1", "31": "@ER2", "32": "@EY0", "33": "@EY1", "34": "@EY2", "35": "@F", "36": "@G", "37": "@HH", "38": "@IH0", "39": "@IH1", "40": "@IH2", "41": "@IY0", "42": "@IY1", "43": "@IY2", "44": "@JH", "45": "@K", "46": "@L", "47": "@M", "48": "@N", "49": "@NG", "50": "@OW0", "51": "@OW1", "52": "@OW2", "53": "@OY0", "54": "@OY1", "55": "@OY2", "56": "@P", "57": "@R", "58": "@S", "59": "@SH", "60": "@T", "61": "@TH", "62": "@UH0", "63": "@UH1", "64": "@UH2", "65": "@UW", "66": "@UW0", "67": "@UW1", "68": "@UW2", "69": "@V", "70": "@W", "71": "@Y", "72": "@Z", "73": "@ZH", "74": "@SIL", "75": "@END", "76": "!", "77": "'", "78": "(", "79": ")", "80": ",", "81": ".", "82": ":", "83": ";", "84": "?", "85": " "}, "speakers_map": {"200": 0, "1841": 1, "3664": 2, "6454": 3, "8108": 4, "2416": 5, "4680": 6, "6147": 7, "412": 8, "2952": 9, "8838": 10, "2836": 11, "1263": 12, "5322": 13, "3830": 14, "7447": 15, "1116": 16, "8312": 17, "8123": 18, "250": 19}, "processor_name": "LibriTTSProcessor"} \ No newline at end of file diff --git a/TensorFlowTTS/tensorflow_tts/processor/pretrained/ljspeech_mapper.json b/TensorFlowTTS/tensorflow_tts/processor/pretrained/ljspeech_mapper.json new file mode 100644 index 0000000000000000000000000000000000000000..71bcd42422041bd42e59cbb35722f76ca87717d8 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/processor/pretrained/ljspeech_mapper.json @@ -0,0 +1 @@ +{"symbol_to_id": {"pad": 0, "-": 1, "!": 2, "'": 3, "(": 4, ")": 5, ",": 6, ".": 7, ":": 8, ";": 9, "?": 10, " ": 11, "A": 12, "B": 13, "C": 14, "D": 15, "E": 16, "F": 17, "G": 18, "H": 19, "I": 20, "J": 21, "K": 22, "L": 23, "M": 24, "N": 25, "O": 26, "P": 27, "Q": 28, "R": 29, "S": 30, "T": 31, "U": 32, "V": 33, "W": 34, "X": 35, "Y": 36, "Z": 37, "a": 38, "b": 39, "c": 40, "d": 41, "e": 42, "f": 43, "g": 44, "h": 45, "i": 46, "j": 47, "k": 48, "l": 49, "m": 50, "n": 51, "o": 52, "p": 53, "q": 54, "r": 55, "s": 56, "t": 57, "u": 58, "v": 59, "w": 60, "x": 61, "y": 62, "z": 63, "@AA": 64, "@AA0": 65, "@AA1": 66, "@AA2": 67, "@AE": 68, "@AE0": 69, "@AE1": 70, "@AE2": 71, "@AH": 72, "@AH0": 73, "@AH1": 74, "@AH2": 75, "@AO": 76, "@AO0": 77, "@AO1": 78, "@AO2": 79, "@AW": 80, "@AW0": 81, "@AW1": 82, "@AW2": 83, "@AY": 84, "@AY0": 85, "@AY1": 86, "@AY2": 87, "@B": 88, "@CH": 89, "@D": 90, "@DH": 91, "@EH": 92, "@EH0": 93, "@EH1": 94, "@EH2": 95, "@ER": 96, "@ER0": 97, "@ER1": 98, "@ER2": 99, "@EY": 100, "@EY0": 101, "@EY1": 102, "@EY2": 103, "@F": 104, "@G": 105, "@HH": 106, "@IH": 107, "@IH0": 108, "@IH1": 109, "@IH2": 110, "@IY": 111, "@IY0": 112, "@IY1": 113, "@IY2": 114, "@JH": 115, "@K": 116, "@L": 117, "@M": 118, "@N": 119, "@NG": 120, "@OW": 121, "@OW0": 122, "@OW1": 123, "@OW2": 124, "@OY": 125, "@OY0": 126, "@OY1": 127, "@OY2": 128, "@P": 129, "@R": 130, "@S": 131, "@SH": 132, "@T": 133, "@TH": 134, "@UH": 135, "@UH0": 136, "@UH1": 137, "@UH2": 138, "@UW": 139, "@UW0": 140, "@UW1": 141, "@UW2": 142, "@V": 143, "@W": 144, "@Y": 145, "@Z": 146, "@ZH": 147, "eos": 148}, "id_to_symbol": {"0": "pad", "1": "-", "2": "!", "3": "'", "4": "(", "5": ")", "6": ",", "7": ".", "8": ":", "9": ";", "10": "?", "11": " ", "12": "A", "13": "B", "14": "C", "15": "D", "16": "E", "17": "F", "18": "G", "19": "H", "20": "I", "21": "J", "22": "K", "23": "L", "24": "M", "25": "N", "26": "O", "27": "P", "28": "Q", "29": "R", "30": "S", "31": "T", "32": "U", "33": "V", "34": "W", "35": "X", "36": "Y", "37": "Z", "38": "a", "39": "b", "40": "c", "41": "d", "42": "e", "43": "f", "44": "g", "45": "h", "46": "i", "47": "j", "48": "k", "49": "l", "50": "m", "51": "n", "52": "o", "53": "p", "54": "q", "55": "r", "56": "s", "57": "t", "58": "u", "59": "v", "60": "w", "61": "x", "62": "y", "63": "z", "64": "@AA", "65": "@AA0", "66": "@AA1", "67": "@AA2", "68": "@AE", "69": "@AE0", "70": "@AE1", "71": "@AE2", "72": "@AH", "73": "@AH0", "74": "@AH1", "75": "@AH2", "76": "@AO", "77": "@AO0", "78": "@AO1", "79": "@AO2", "80": "@AW", "81": "@AW0", "82": "@AW1", "83": "@AW2", "84": "@AY", "85": "@AY0", "86": "@AY1", "87": "@AY2", "88": "@B", "89": "@CH", "90": "@D", "91": "@DH", "92": "@EH", "93": "@EH0", "94": "@EH1", "95": "@EH2", "96": "@ER", "97": "@ER0", "98": "@ER1", "99": "@ER2", "100": "@EY", "101": "@EY0", "102": "@EY1", "103": "@EY2", "104": "@F", "105": "@G", "106": "@HH", "107": "@IH", "108": "@IH0", "109": "@IH1", "110": "@IH2", "111": "@IY", "112": "@IY0", "113": "@IY1", "114": "@IY2", "115": "@JH", "116": "@K", "117": "@L", "118": "@M", "119": "@N", "120": "@NG", "121": "@OW", "122": "@OW0", "123": "@OW1", "124": "@OW2", "125": "@OY", "126": "@OY0", "127": "@OY1", "128": "@OY2", "129": "@P", "130": "@R", "131": "@S", "132": "@SH", "133": "@T", "134": "@TH", "135": "@UH", "136": "@UH0", "137": "@UH1", "138": "@UH2", "139": "@UW", "140": "@UW0", "141": "@UW1", "142": "@UW2", "143": "@V", "144": "@W", "145": "@Y", "146": "@Z", "147": "@ZH", "148": "eos"}, "speakers_map": {"ljspeech": 0}, "processor_name": "LJSpeechProcessor"} \ No newline at end of file diff --git a/TensorFlowTTS/tensorflow_tts/processor/pretrained/ljspeechu_mapper.json b/TensorFlowTTS/tensorflow_tts/processor/pretrained/ljspeechu_mapper.json new file mode 100644 index 0000000000000000000000000000000000000000..1ce3c97bffa5cd901494df44da1b18d16fc3e2d6 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/processor/pretrained/ljspeechu_mapper.json @@ -0,0 +1 @@ +{"symbol_to_id": {"pad": 0, "-": 1, "!": 2, "'": 3, "(": 4, ")": 5, ",": 6, ".": 7, ":": 8, ";": 9, "?": 10, "@AA": 11, "@AA0": 12, "@AA1": 13, "@AA2": 14, "@AE": 15, "@AE0": 16, "@AE1": 17, "@AE2": 18, "@AH": 19, "@AH0": 20, "@AH1": 21, "@AH2": 22, "@AO": 23, "@AO0": 24, "@AO1": 25, "@AO2": 26, "@AW": 27, "@AW0": 28, "@AW1": 29, "@AW2": 30, "@AY": 31, "@AY0": 32, "@AY1": 33, "@AY2": 34, "@B": 35, "@CH": 36, "@D": 37, "@DH": 38, "@EH": 39, "@EH0": 40, "@EH1": 41, "@EH2": 42, "@ER": 43, "@ER0": 44, "@ER1": 45, "@ER2": 46, "@EY": 47, "@EY0": 48, "@EY1": 49, "@EY2": 50, "@F": 51, "@G": 52, "@HH": 53, "@IH": 54, "@IH0": 55, "@IH1": 56, "@IH2": 57, "@IY": 58, "@IY0": 59, "@IY1": 60, "@IY2": 61, "@JH": 62, "@K": 63, "@L": 64, "@M": 65, "@N": 66, "@NG": 67, "@OW": 68, "@OW0": 69, "@OW1": 70, "@OW2": 71, "@OY": 72, "@OY0": 73, "@OY1": 74, "@OY2": 75, "@P": 76, "@R": 77, "@S": 78, "@SH": 79, "@T": 80, "@TH": 81, "@UH": 82, "@UH0": 83, "@UH1": 84, "@UH2": 85, "@UW": 86, "@UW0": 87, "@UW1": 88, "@UW2": 89, "@V": 90, "@W": 91, "@Y": 92, "@Z": 93, "@ZH": 94, "eos": 95}, "id_to_symbol": {"0": "pad", "1": "-", "2": "!", "3": "'", "4": "(", "5": ")", "6": ",", "7": ".", "8": ":", "9": ";", "10": "?", "11": "@AA", "12": "@AA0", "13": "@AA1", "14": "@AA2", "15": "@AE", "16": "@AE0", "17": "@AE1", "18": "@AE2", "19": "@AH", "20": "@AH0", "21": "@AH1", "22": "@AH2", "23": "@AO", "24": "@AO0", "25": "@AO1", "26": "@AO2", "27": "@AW", "28": "@AW0", "29": "@AW1", "30": "@AW2", "31": "@AY", "32": "@AY0", "33": "@AY1", "34": "@AY2", "35": "@B", "36": "@CH", "37": "@D", "38": "@DH", "39": "@EH", "40": "@EH0", "41": "@EH1", "42": "@EH2", "43": "@ER", "44": "@ER0", "45": "@ER1", "46": "@ER2", "47": "@EY", "48": "@EY0", "49": "@EY1", "50": "@EY2", "51": "@F", "52": "@G", "53": "@HH", "54": "@IH", "55": "@IH0", "56": "@IH1", "57": "@IH2", "58": "@IY", "59": "@IY0", "60": "@IY1", "61": "@IY2", "62": "@JH", "63": "@K", "64": "@L", "65": "@M", "66": "@N", "67": "@NG", "68": "@OW", "69": "@OW0", "70": "@OW1", "71": "@OW2", "72": "@OY", "73": "@OY0", "74": "@OY1", "75": "@OY2", "76": "@P", "77": "@R", "78": "@S", "79": "@SH", "80": "@T", "81": "@TH", "82": "@UH", "83": "@UH0", "84": "@UH1", "85": "@UH2", "86": "@UW", "87": "@UW0", "88": "@UW1", "89": "@UW2", "90": "@V", "91": "@W", "92": "@Y", "93": "@Z", "94": "@ZH", "95": "eos"}, "speakers_map": {"ljspeech": 0}, "processor_name": "LJSpeechUltimateProcessor"} \ No newline at end of file diff --git a/TensorFlowTTS/tensorflow_tts/processor/pretrained/synpaflex_mapper.json b/TensorFlowTTS/tensorflow_tts/processor/pretrained/synpaflex_mapper.json new file mode 100644 index 0000000000000000000000000000000000000000..48a6850558442a420a826f07835c3fe51c1a15e6 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/processor/pretrained/synpaflex_mapper.json @@ -0,0 +1 @@ +{"symbol_to_id": {"pad": 0, "!": 1, "/": 2, "'": 3, "(": 4, ")": 5, ",": 6, "-": 7, ".": 8, ":": 9, ";": 10, "?": 11, " ": 12, "A": 13, "B": 14, "C": 15, "D": 16, "E": 17, "F": 18, "G": 19, "H": 20, "I": 21, "J": 22, "K": 23, "L": 24, "M": 25, "N": 26, "O": 27, "P": 28, "Q": 29, "R": 30, "S": 31, "T": 32, "U": 33, "V": 34, "W": 35, "X": 36, "Y": 37, "Z": 38, "a": 39, "b": 40, "c": 41, "d": 42, "e": 43, "f": 44, "g": 45, "h": 46, "i": 47, "j": 48, "k": 49, "l": 50, "m": 51, "n": 52, "o": 53, "p": 54, "q": 55, "r": 56, "s": 57, "t": 58, "u": 59, "v": 60, "w": 61, "x": 62, "y": 63, "z": 64, "\u00e9": 65, "\u00e8": 66, "\u00e0": 67, "\u00f9": 68, "\u00e2": 69, "\u00ea": 70, "\u00ee": 71, "\u00f4": 72, "\u00fb": 73, "\u00e7": 74, "\u00e4": 75, "\u00eb": 76, "\u00ef": 77, "\u00f6": 78, "\u00fc": 79, "\u00ff": 80, "\u0153": 81, "\u00e6": 82, "eos": 83}, "id_to_symbol": {"0": "pad", "1": "!", "2": "/", "3": "'", "4": "(", "5": ")", "6": ",", "7": "-", "8": ".", "9": ":", "10": ";", "11": "?", "12": " ", "13": "A", "14": "B", "15": "C", "16": "D", "17": "E", "18": "F", "19": "G", "20": "H", "21": "I", "22": "J", "23": "K", "24": "L", "25": "M", "26": "N", "27": "O", "28": "P", "29": "Q", "30": "R", "31": "S", "32": "T", "33": "U", "34": "V", "35": "W", "36": "X", "37": "Y", "38": "Z", "39": "a", "40": "b", "41": "c", "42": "d", "43": "e", "44": "f", "45": "g", "46": "h", "47": "i", "48": "j", "49": "k", "50": "l", "51": "m", "52": "n", "53": "o", "54": "p", "55": "q", "56": "r", "57": "s", "58": "t", "59": "u", "60": "v", "61": "w", "62": "x", "63": "y", "64": "z", "65": "\u00e9", "66": "\u00e8", "67": "\u00e0", "68": "\u00f9", "69": "\u00e2", "70": "\u00ea", "71": "\u00ee", "72": "\u00f4", "73": "\u00fb", "74": "\u00e7", "75": "\u00e4", "76": "\u00eb", "77": "\u00ef", "78": "\u00f6", "79": "\u00fc", "80": "\u00ff", "81": "\u0153", "82": "\u00e6", "83": "eos"}, "speakers_map": {"synpaflex": 0}, "processor_name": "SynpaflexProcessor"} diff --git a/TensorFlowTTS/tensorflow_tts/processor/pretrained/thorsten_mapper.json b/TensorFlowTTS/tensorflow_tts/processor/pretrained/thorsten_mapper.json new file mode 100644 index 0000000000000000000000000000000000000000..58deeaabb152027c58509a65d80bf2099c94ed6f --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/processor/pretrained/thorsten_mapper.json @@ -0,0 +1 @@ +{"symbol_to_id": {"pad": 0, "-": 1, "!": 2, "'": 3, "(": 4, ")": 5, ",": 6, ".": 7, "?": 8, " ": 9, "A": 10, "B": 11, "C": 12, "D": 13, "E": 14, "F": 15, "G": 16, "H": 17, "I": 18, "J": 19, "K": 20, "L": 21, "M": 22, "N": 23, "O": 24, "P": 25, "Q": 26, "R": 27, "S": 28, "T": 29, "U": 30, "V": 31, "W": 32, "X": 33, "Y": 34, "Z": 35, "a": 36, "b": 37, "c": 38, "d": 39, "e": 40, "f": 41, "g": 42, "h": 43, "i": 44, "j": 45, "k": 46, "l": 47, "m": 48, "n": 49, "o": 50, "p": 51, "q": 52, "r": 53, "s": 54, "t": 55, "u": 56, "v": 57, "w": 58, "x": 59, "y": 60, "z": 61, "eos": 62}, "id_to_symbol": {"0": "pad", "1": "-", "2": "!", "3": "'", "4": "(", "5": ")", "6": ",", "7": ".", "8": "?", "9": " ", "10": "A", "11": "B", "12": "C", "13": "D", "14": "E", "15": "F", "16": "G", "17": "H", "18": "I", "19": "J", "20": "K", "21": "L", "22": "M", "23": "N", "24": "O", "25": "P", "26": "Q", "27": "R", "28": "S", "29": "T", "30": "U", "31": "V", "32": "W", "33": "X", "34": "Y", "35": "Z", "36": "a", "37": "b", "38": "c", "39": "d", "40": "e", "41": "f", "42": "g", "43": "h", "44": "i", "45": "j", "46": "k", "47": "l", "48": "m", "49": "n", "50": "o", "51": "p", "52": "q", "53": "r", "54": "s", "55": "t", "56": "u", "57": "v", "58": "w", "59": "x", "60": "y", "61": "z", "62": "eos"}, "speakers_map": {"thorsten": 0}, "processor_name": "ThorstenProcessor"} \ No newline at end of file diff --git a/TensorFlowTTS/tensorflow_tts/processor/synpaflex.py b/TensorFlowTTS/tensorflow_tts/processor/synpaflex.py new file mode 100644 index 0000000000000000000000000000000000000000..61e9780ced17bb69ce8e71b8b910b751e9a2903e --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/processor/synpaflex.py @@ -0,0 +1,133 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Perform preprocessing and raw feature extraction for SynPaFlex dataset.""" + +import os +import re + +import numpy as np +import soundfile as sf +from dataclasses import dataclass +from tensorflow_tts.processor import BaseProcessor +from tensorflow_tts.utils import cleaners + +_pad = "pad" +_eos = "eos" +_punctuation = "!/\'(),-.:;? " +_letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzéèàùâêîôûçäëïöüÿœæ" + +# Export all symbols: +SYNPAFLEX_SYMBOLS = ( + [_pad] + list(_punctuation) + list(_letters) + [_eos] +) + +# Regular expression matching text enclosed in curly braces: +_curly_re = re.compile(r"(.*?)\{(.+?)\}(.*)") + + +@dataclass +class SynpaflexProcessor(BaseProcessor): + """SynPaFlex processor.""" + + cleaner_names: str = "basic_cleaners" + positions = { + "wave_file": 0, + "text": 1, + "text_norm": 2 + } + train_f_name: str = "synpaflex.txt" + + def create_items(self): + if self.data_dir: + with open( + os.path.join(self.data_dir, self.train_f_name), encoding="utf-8" + ) as f: + self.items = [self.split_line(self.data_dir, line, "|") for line in f] + + def split_line(self, data_dir, line, split): + parts = line.strip().split(split) + wave_file = parts[self.positions["wave_file"]] + text = parts[self.positions["text"]] + wav_path = os.path.join(data_dir, "wavs", f"{wave_file}.wav") + speaker_name = "synpaflex" + return text, wav_path, speaker_name + + def setup_eos_token(self): + return _eos + + def get_one_sample(self, item): + text, wav_path, speaker_name = item + + # normalize audio signal to be [-1, 1], soundfile already norm. + audio, rate = sf.read(wav_path) + audio = audio.astype(np.float32) + + # convert text to ids + text_ids = np.asarray(self.text_to_sequence(text), np.int32) + + sample = { + "raw_text": text, + "text_ids": text_ids, + "audio": audio, + "utt_id": os.path.split(wav_path)[-1].split(".")[0], + "speaker_name": speaker_name, + "rate": rate, + } + + return sample + + def text_to_sequence(self, text): + sequence = [] + # Check for curly braces and treat their contents as ARPAbet: + while len(text): + m = _curly_re.match(text) + if not m: + sequence += self._symbols_to_sequence( + self._clean_text(text, [self.cleaner_names]) + ) + break + sequence += self._symbols_to_sequence( + self._clean_text(m.group(1), [self.cleaner_names]) + ) + sequence += self._arpabet_to_sequence(m.group(2)) + text = m.group(3) + + # add eos tokens + sequence += [self.eos_id] + return sequence + + def _clean_text(self, text, cleaner_names): + for name in cleaner_names: + cleaner = getattr(cleaners, name) + if not cleaner: + raise Exception("Unknown cleaner: %s" % name) + text = cleaner(text) + return text + + def _symbols_to_sequence(self, symbols): + return [self.symbol_to_id[s] for s in symbols if self._should_keep_symbol(s)] + + def _sequence_to_symbols(self, sequence): + return [self.id_to_symbol[s] for s in sequence] + + def _arpabet_to_sequence(self, text): + return self._symbols_to_sequence(["@" + s for s in text.split()]) + + def _should_keep_symbol(self, s): + return s in self.symbol_to_id and s != "_" and s != "~" + + def save_pretrained(self, saved_path): + os.makedirs(saved_path, exist_ok=True) + self._save_mapper(os.path.join(saved_path, PROCESSOR_FILE_NAME), {}) diff --git a/TensorFlowTTS/tensorflow_tts/processor/thorsten.py b/TensorFlowTTS/tensorflow_tts/processor/thorsten.py new file mode 100644 index 0000000000000000000000000000000000000000..437d6bbd80480a1610f9230fe7757509de34dbb5 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/processor/thorsten.py @@ -0,0 +1,131 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Perform preprocessing and raw feature extraction for LJSpeech dataset.""" + +import os +import re + +import numpy as np +import soundfile as sf +from dataclasses import dataclass +from tensorflow_tts.processor import BaseProcessor +from tensorflow_tts.utils import cleaners +from tensorflow_tts.utils.utils import PROCESSOR_FILE_NAME + +_pad = "pad" +_eos = "eos" +_punctuation = "!'(),.? " +_special = "-" +_letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + +# Export all symbols: +THORSTEN_SYMBOLS = ( + [_pad] + list(_special) + list(_punctuation) + list(_letters) + [_eos] +) + +# Regular expression matching text enclosed in curly braces: +_curly_re = re.compile(r"(.*?)\{(.+?)\}(.*)") + + +@dataclass +class ThorstenProcessor(BaseProcessor): + """Thorsten processor.""" + + cleaner_names: str = "german_cleaners" + positions = { + "wave_file": 0, + "text_norm": 1, + } + train_f_name: str = "metadata.csv" + + def create_items(self): + if self.data_dir: + with open( + os.path.join(self.data_dir, self.train_f_name), encoding="utf-8" + ) as f: + self.items = [self.split_line(self.data_dir, line, "|") for line in f] + + def split_line(self, data_dir, line, split): + parts = line.strip().split(split) + wave_file = parts[self.positions["wave_file"]] + text_norm = parts[self.positions["text_norm"]] + wav_path = os.path.join(data_dir, "wavs", f"{wave_file}.wav") + speaker_name = "thorsten" + return text_norm, wav_path, speaker_name + + def setup_eos_token(self): + return _eos + + def save_pretrained(self, saved_path): + os.makedirs(saved_path, exist_ok=True) + self._save_mapper(os.path.join(saved_path, PROCESSOR_FILE_NAME), {}) + + def get_one_sample(self, item): + text, wav_path, speaker_name = item + + # normalize audio signal to be [-1, 1], soundfile already norm. + audio, rate = sf.read(wav_path) + audio = audio.astype(np.float32) + + # convert text to ids + text_ids = np.asarray(self.text_to_sequence(text), np.int32) + + sample = { + "raw_text": text, + "text_ids": text_ids, + "audio": audio, + "utt_id": os.path.split(wav_path)[-1].split(".")[0], + "speaker_name": speaker_name, + "rate": rate, + } + + return sample + + def text_to_sequence(self, text): + sequence = [] + # Check for curly braces and treat their contents as ARPAbet: + while len(text): + m = _curly_re.match(text) + if not m: + sequence += self._symbols_to_sequence( + self._clean_text(text, [self.cleaner_names]) + ) + break + sequence += self._symbols_to_sequence( + self._clean_text(m.group(1), [self.cleaner_names]) + ) + sequence += self._arpabet_to_sequence(m.group(2)) + text = m.group(3) + + # add eos tokens + sequence += [self.eos_id] + return sequence + + def _clean_text(self, text, cleaner_names): + for name in cleaner_names: + cleaner = getattr(cleaners, name) + if not cleaner: + raise Exception("Unknown cleaner: %s" % name) + text = cleaner(text) + return text + + def _symbols_to_sequence(self, symbols): + return [self.symbol_to_id[s] for s in symbols if self._should_keep_symbol(s)] + + def _arpabet_to_sequence(self, text): + return self._symbols_to_sequence(["@" + s for s in text.split()]) + + def _should_keep_symbol(self, s): + return s in self.symbol_to_id and s != "_" and s != "~" diff --git a/TensorFlowTTS/tensorflow_tts/trainers/__init__.py b/TensorFlowTTS/tensorflow_tts/trainers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..246dee8b97ab41787e668a9304b4ccd1a65a895d --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/trainers/__init__.py @@ -0,0 +1 @@ +from tensorflow_tts.trainers.base_trainer import GanBasedTrainer, Seq2SeqBasedTrainer diff --git a/TensorFlowTTS/tensorflow_tts/trainers/base_trainer.py b/TensorFlowTTS/tensorflow_tts/trainers/base_trainer.py new file mode 100644 index 0000000000000000000000000000000000000000..2301421484ee86ff242052a9cf22d2d7a7abb75e --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/trainers/base_trainer.py @@ -0,0 +1,1010 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Based Trainer.""" + +import abc +import logging +import os + +import tensorflow as tf +from tqdm import tqdm + +from tensorflow_tts.optimizers import GradientAccumulator +from tensorflow_tts.utils import utils + + +class BasedTrainer(metaclass=abc.ABCMeta): + """Customized trainer module for all models.""" + + def __init__(self, steps, epochs, config): + self.steps = steps + self.epochs = epochs + self.config = config + self.finish_train = False + self.writer = tf.summary.create_file_writer(config["outdir"]) + self.train_data_loader = None + self.eval_data_loader = None + self.train_metrics = None + self.eval_metrics = None + self.list_metrics_name = None + + def init_train_eval_metrics(self, list_metrics_name): + """Init train and eval metrics to save it to tensorboard.""" + self.train_metrics = {} + self.eval_metrics = {} + for name in list_metrics_name: + self.train_metrics.update( + {name: tf.keras.metrics.Mean(name="train_" + name, dtype=tf.float32)} + ) + self.eval_metrics.update( + {name: tf.keras.metrics.Mean(name="eval_" + name, dtype=tf.float32)} + ) + + def reset_states_train(self): + """Reset train metrics after save it to tensorboard.""" + for metric in self.train_metrics.keys(): + self.train_metrics[metric].reset_states() + + def reset_states_eval(self): + """Reset eval metrics after save it to tensorboard.""" + for metric in self.eval_metrics.keys(): + self.eval_metrics[metric].reset_states() + + def update_train_metrics(self, dict_metrics_losses): + for name, value in dict_metrics_losses.items(): + self.train_metrics[name].update_state(value) + + def update_eval_metrics(self, dict_metrics_losses): + for name, value in dict_metrics_losses.items(): + self.eval_metrics[name].update_state(value) + + def set_train_data_loader(self, train_dataset): + """Set train data loader (MUST).""" + self.train_data_loader = train_dataset + + def get_train_data_loader(self): + """Get train data loader.""" + return self.train_data_loader + + def set_eval_data_loader(self, eval_dataset): + """Set eval data loader (MUST).""" + self.eval_data_loader = eval_dataset + + def get_eval_data_loader(self): + """Get eval data loader.""" + return self.eval_data_loader + + @abc.abstractmethod + def compile(self): + pass + + @abc.abstractmethod + def create_checkpoint_manager(self, saved_path=None, max_to_keep=10): + """Create checkpoint management.""" + pass + + def run(self): + """Run training.""" + self.tqdm = tqdm( + initial=self.steps, total=self.config["train_max_steps"], desc="[train]" + ) + while True: + self._train_epoch() + + if self.finish_train: + break + + self.tqdm.close() + logging.info("Finish training.") + + @abc.abstractmethod + def save_checkpoint(self): + """Save checkpoint.""" + pass + + @abc.abstractmethod + def load_checkpoint(self, pretrained_path): + """Load checkpoint.""" + pass + + def _train_epoch(self): + """Train model one epoch.""" + for train_steps_per_epoch, batch in enumerate(self.train_data_loader, 1): + # one step training + self._train_step(batch) + + # check interval + self._check_log_interval() + self._check_eval_interval() + self._check_save_interval() + + # check wheter training is finished + if self.finish_train: + return + + # update + self.epochs += 1 + self.train_steps_per_epoch = train_steps_per_epoch + logging.info( + f"(Steps: {self.steps}) Finished {self.epochs} epoch training " + f"({self.train_steps_per_epoch} steps per epoch)." + ) + + @abc.abstractmethod + def _eval_epoch(self): + """One epoch evaluation.""" + pass + + @abc.abstractmethod + def _train_step(self, batch): + """One step training.""" + pass + + @abc.abstractmethod + def _check_log_interval(self): + """Save log interval.""" + pass + + @abc.abstractmethod + def fit(self): + pass + + def _check_eval_interval(self): + """Evaluation interval step.""" + if self.steps % self.config["eval_interval_steps"] == 0: + self._eval_epoch() + + def _check_save_interval(self): + """Save interval checkpoint.""" + if self.steps % self.config["save_interval_steps"] == 0: + self.save_checkpoint() + logging.info(f"Successfully saved checkpoint @ {self.steps} steps.") + + def generate_and_save_intermediate_result(self, batch): + """Generate and save intermediate result.""" + pass + + def _write_to_tensorboard(self, list_metrics, stage="train"): + """Write variables to tensorboard.""" + with self.writer.as_default(): + for key, value in list_metrics.items(): + tf.summary.scalar(stage + "/" + key, value.result(), step=self.steps) + self.writer.flush() + + +class GanBasedTrainer(BasedTrainer): + """Customized trainer module for GAN TTS training (MelGAN, GAN-TTS, ParallelWaveGAN).""" + + def __init__( + self, + steps, + epochs, + config, + strategy, + is_generator_mixed_precision=False, + is_discriminator_mixed_precision=False, + ): + """Initialize trainer. + + Args: + steps (int): Initial global steps. + epochs (int): Initial global epochs. + config (dict): Config dict loaded from yaml format configuration file. + + """ + super().__init__(steps, epochs, config) + self._is_generator_mixed_precision = is_generator_mixed_precision + self._is_discriminator_mixed_precision = is_discriminator_mixed_precision + self._strategy = strategy + self._already_apply_input_signature = False + self._generator_gradient_accumulator = GradientAccumulator() + self._discriminator_gradient_accumulator = GradientAccumulator() + self._generator_gradient_accumulator.reset() + self._discriminator_gradient_accumulator.reset() + + def init_train_eval_metrics(self, list_metrics_name): + with self._strategy.scope(): + super().init_train_eval_metrics(list_metrics_name) + + def get_n_gpus(self): + return self._strategy.num_replicas_in_sync + + def _get_train_element_signature(self): + return self.train_data_loader.element_spec + + def _get_eval_element_signature(self): + return self.eval_data_loader.element_spec + + def set_gen_model(self, generator_model): + """Set generator class model (MUST).""" + self._generator = generator_model + + def get_gen_model(self): + """Get generator model.""" + return self._generator + + def set_dis_model(self, discriminator_model): + """Set discriminator class model (MUST).""" + self._discriminator = discriminator_model + + def get_dis_model(self): + """Get discriminator model.""" + return self._discriminator + + def set_gen_optimizer(self, generator_optimizer): + """Set generator optimizer (MUST).""" + self._gen_optimizer = generator_optimizer + if self._is_generator_mixed_precision: + self._gen_optimizer = tf.keras.mixed_precision.experimental.LossScaleOptimizer( + self._gen_optimizer, "dynamic" + ) + + def get_gen_optimizer(self): + """Get generator optimizer.""" + return self._gen_optimizer + + def set_dis_optimizer(self, discriminator_optimizer): + """Set discriminator optimizer (MUST).""" + self._dis_optimizer = discriminator_optimizer + if self._is_discriminator_mixed_precision: + self._dis_optimizer = tf.keras.mixed_precision.experimental.LossScaleOptimizer( + self._dis_optimizer, "dynamic" + ) + + def get_dis_optimizer(self): + """Get discriminator optimizer.""" + return self._dis_optimizer + + def compile(self, gen_model, dis_model, gen_optimizer, dis_optimizer): + self.set_gen_model(gen_model) + self.set_dis_model(dis_model) + self.set_gen_optimizer(gen_optimizer) + self.set_dis_optimizer(dis_optimizer) + + def _train_step(self, batch): + if self._already_apply_input_signature is False: + train_element_signature = self._get_train_element_signature() + eval_element_signature = self._get_eval_element_signature() + self.one_step_forward = tf.function( + self._one_step_forward, input_signature=[train_element_signature] + ) + self.one_step_evaluate = tf.function( + self._one_step_evaluate, input_signature=[eval_element_signature] + ) + self.one_step_predict = tf.function( + self._one_step_predict, input_signature=[eval_element_signature] + ) + self._already_apply_input_signature = True + + # run one_step_forward + self.one_step_forward(batch) + + # update counts + self.steps += 1 + self.tqdm.update(1) + self._check_train_finish() + + def _one_step_forward(self, batch): + per_replica_losses = self._strategy.run( + self._one_step_forward_per_replica, args=(batch,) + ) + return self._strategy.reduce( + tf.distribute.ReduceOp.SUM, per_replica_losses, axis=None + ) + + @abc.abstractmethod + def compute_per_example_generator_losses(self, batch, outputs): + """Compute per example generator losses and return dict_metrics_losses + Note that all element of the loss MUST has a shape [batch_size] and + the keys of dict_metrics_losses MUST be in self.list_metrics_name. + + Args: + batch: dictionary batch input return from dataloader + outputs: outputs of the model + + Returns: + per_example_losses: per example losses for each GPU, shape [B] + dict_metrics_losses: dictionary loss. + """ + per_example_losses = 0.0 + dict_metrics_losses = {} + return per_example_losses, dict_metrics_losses + + @abc.abstractmethod + def compute_per_example_discriminator_losses(self, batch, gen_outputs): + """Compute per example discriminator losses and return dict_metrics_losses + Note that all element of the loss MUST has a shape [batch_size] and + the keys of dict_metrics_losses MUST be in self.list_metrics_name. + + Args: + batch: dictionary batch input return from dataloader + outputs: outputs of the model + + Returns: + per_example_losses: per example losses for each GPU, shape [B] + dict_metrics_losses: dictionary loss. + """ + per_example_losses = 0.0 + dict_metrics_losses = {} + return per_example_losses, dict_metrics_losses + + def _calculate_generator_gradient_per_batch(self, batch): + outputs = self._generator(**batch, training=True) + ( + per_example_losses, + dict_metrics_losses, + ) = self.compute_per_example_generator_losses(batch, outputs) + per_replica_gen_losses = tf.nn.compute_average_loss( + per_example_losses, + global_batch_size=self.config["batch_size"] + * self.get_n_gpus() + * self.config["gradient_accumulation_steps"], + ) + + if self._is_generator_mixed_precision: + scaled_per_replica_gen_losses = self._gen_optimizer.get_scaled_loss( + per_replica_gen_losses + ) + + if self._is_generator_mixed_precision: + scaled_gradients = tf.gradients( + scaled_per_replica_gen_losses, self._generator.trainable_variables + ) + gradients = self._gen_optimizer.get_unscaled_gradients(scaled_gradients) + else: + gradients = tf.gradients( + per_replica_gen_losses, self._generator.trainable_variables + ) + + # gradient accumulate for generator here + if self.config["gradient_accumulation_steps"] > 1: + self._generator_gradient_accumulator(gradients) + + # accumulate loss into metrics + self.update_train_metrics(dict_metrics_losses) + + if self.config["gradient_accumulation_steps"] == 1: + return gradients, per_replica_gen_losses + else: + return per_replica_gen_losses + + def _calculate_discriminator_gradient_per_batch(self, batch): + ( + per_example_losses, + dict_metrics_losses, + ) = self.compute_per_example_discriminator_losses( + batch, self._generator(**batch, training=True) + ) + + per_replica_dis_losses = tf.nn.compute_average_loss( + per_example_losses, + global_batch_size=self.config["batch_size"] + * self.get_n_gpus() + * self.config["gradient_accumulation_steps"], + ) + + if self._is_discriminator_mixed_precision: + scaled_per_replica_dis_losses = self._dis_optimizer.get_scaled_loss( + per_replica_dis_losses + ) + + if self._is_discriminator_mixed_precision: + scaled_gradients = tf.gradients( + scaled_per_replica_dis_losses, + self._discriminator.trainable_variables, + ) + gradients = self._dis_optimizer.get_unscaled_gradients(scaled_gradients) + else: + gradients = tf.gradients( + per_replica_dis_losses, self._discriminator.trainable_variables + ) + + # accumulate loss into metrics + self.update_train_metrics(dict_metrics_losses) + + # gradient accumulate for discriminator here + if self.config["gradient_accumulation_steps"] > 1: + self._discriminator_gradient_accumulator(gradients) + + if self.config["gradient_accumulation_steps"] == 1: + return gradients, per_replica_dis_losses + else: + return per_replica_dis_losses + + + def _one_step_forward_per_replica(self, batch): + per_replica_gen_losses = 0.0 + per_replica_dis_losses = 0.0 + + if self.config["gradient_accumulation_steps"] == 1: + ( + gradients, + per_replica_gen_losses, + ) = self._calculate_generator_gradient_per_batch(batch) + self._gen_optimizer.apply_gradients( + zip(gradients, self._generator.trainable_variables) + ) + else: + # gradient acummulation here. + for i in tf.range(self.config["gradient_accumulation_steps"]): + reduced_batch = { + k: v[ + i + * self.config["batch_size"] : (i + 1) + * self.config["batch_size"] + ] + for k, v in batch.items() + } + + # run 1 step accumulate + reduced_batch_losses = self._calculate_generator_gradient_per_batch( + reduced_batch + ) + + # sum per_replica_losses + per_replica_gen_losses += reduced_batch_losses + + gradients = self._generator_gradient_accumulator.gradients + self._gen_optimizer.apply_gradients( + zip(gradients, self._generator.trainable_variables) + ) + self._generator_gradient_accumulator.reset() + + # one step discriminator + # recompute y_hat after 1 step generator for discriminator training. + if self.steps >= self.config["discriminator_train_start_steps"]: + if self.config["gradient_accumulation_steps"] == 1: + ( + gradients, + per_replica_dis_losses, + ) = self._calculate_discriminator_gradient_per_batch(batch) + self._dis_optimizer.apply_gradients( + zip(gradients, self._discriminator.trainable_variables) + ) + else: + # gradient acummulation here. + for i in tf.range(self.config["gradient_accumulation_steps"]): + reduced_batch = { + k: v[ + i + * self.config["batch_size"] : (i + 1) + * self.config["batch_size"] + ] + for k, v in batch.items() + } + + # run 1 step accumulate + reduced_batch_losses = ( + self._calculate_discriminator_gradient_per_batch(reduced_batch) + ) + + # sum per_replica_losses + per_replica_dis_losses += reduced_batch_losses + + gradients = self._discriminator_gradient_accumulator.gradients + self._dis_optimizer.apply_gradients( + zip(gradients, self._discriminator.trainable_variables) + ) + self._discriminator_gradient_accumulator.reset() + + return per_replica_gen_losses + per_replica_dis_losses + + def _eval_epoch(self): + """Evaluate model one epoch.""" + logging.info(f"(Steps: {self.steps}) Start evaluation.") + + # calculate loss for each batch + for eval_steps_per_epoch, batch in enumerate( + tqdm(self.eval_data_loader, desc="[eval]"), 1 + ): + # eval one step + self.one_step_evaluate(batch) + + if eval_steps_per_epoch <= self.config["num_save_intermediate_results"]: + # save intermedia + self.generate_and_save_intermediate_result(batch) + + logging.info( + f"(Steps: {self.steps}) Finished evaluation " + f"({eval_steps_per_epoch} steps per epoch)." + ) + + # average loss + for key in self.eval_metrics.keys(): + logging.info( + f"(Steps: {self.steps}) eval_{key} = {self.eval_metrics[key].result():.4f}." + ) + + # record + self._write_to_tensorboard(self.eval_metrics, stage="eval") + + # reset + self.reset_states_eval() + + def _one_step_evaluate_per_replica(self, batch): + ################################################ + # one step generator. + outputs = self._generator(**batch, training=False) + _, dict_metrics_losses = self.compute_per_example_generator_losses( + batch, outputs + ) + + # accumulate loss into metrics + self.update_eval_metrics(dict_metrics_losses) + + ################################################ + # one step discriminator + if self.steps >= self.config["discriminator_train_start_steps"]: + _, dict_metrics_losses = self.compute_per_example_discriminator_losses( + batch, outputs + ) + + # accumulate loss into metrics + self.update_eval_metrics(dict_metrics_losses) + + ################################################ + + def _one_step_evaluate(self, batch): + self._strategy.run(self._one_step_evaluate_per_replica, args=(batch,)) + + def _one_step_predict_per_replica(self, batch): + outputs = self._generator(**batch, training=False) + return outputs + + def _one_step_predict(self, batch): + outputs = self._strategy.run(self._one_step_predict_per_replica, args=(batch,)) + return outputs + + @abc.abstractmethod + def generate_and_save_intermediate_result(self, batch): + return + + def create_checkpoint_manager(self, saved_path=None, max_to_keep=10): + """Create checkpoint management.""" + if saved_path is None: + saved_path = self.config["outdir"] + "/checkpoints/" + + os.makedirs(saved_path, exist_ok=True) + + self.saved_path = saved_path + self.ckpt = tf.train.Checkpoint( + steps=tf.Variable(1), + epochs=tf.Variable(1), + gen_optimizer=self.get_gen_optimizer(), + dis_optimizer=self.get_dis_optimizer(), + ) + self.ckp_manager = tf.train.CheckpointManager( + self.ckpt, saved_path, max_to_keep=max_to_keep + ) + + def save_checkpoint(self): + """Save checkpoint.""" + self.ckpt.steps.assign(self.steps) + self.ckpt.epochs.assign(self.epochs) + self.ckp_manager.save(checkpoint_number=self.steps) + utils.save_weights( + self._generator, + self.saved_path + "generator-{}.h5".format(self.steps) + ) + utils.save_weights( + self._discriminator, + self.saved_path + "discriminator-{}.h5".format(self.steps) + ) + + def load_checkpoint(self, pretrained_path): + """Load checkpoint.""" + self.ckpt.restore(pretrained_path) + self.steps = self.ckpt.steps.numpy() + self.epochs = self.ckpt.epochs.numpy() + self._gen_optimizer = self.ckpt.gen_optimizer + # re-assign iterations (global steps) for gen_optimizer. + self._gen_optimizer.iterations.assign(tf.cast(self.steps, tf.int64)) + # re-assign iterations (global steps) for dis_optimizer. + try: + discriminator_train_start_steps = self.config[ + "discriminator_train_start_steps" + ] + discriminator_train_start_steps = tf.math.maximum( + 0, self.steps - discriminator_train_start_steps + ) + except Exception: + discriminator_train_start_steps = self.steps + self._dis_optimizer = self.ckpt.dis_optimizer + self._dis_optimizer.iterations.assign( + tf.cast(discriminator_train_start_steps, tf.int64) + ) + + # load weights. + utils.load_weights( + self._generator, + self.saved_path + "generator-{}.h5".format(self.steps) + ) + utils.load_weights( + self._discriminator, + self.saved_path + "discriminator-{}.h5".format(self.steps) + ) + + def _check_train_finish(self): + """Check training finished.""" + if self.steps >= self.config["train_max_steps"]: + self.finish_train = True + + if ( + self.steps != 0 + and self.steps == self.config["discriminator_train_start_steps"] + ): + self.finish_train = True + logging.info( + f"Finished training only generator at {self.steps}steps, pls resume and continue training." + ) + + def _check_log_interval(self): + """Log to tensorboard.""" + if self.steps % self.config["log_interval_steps"] == 0: + for metric_name in self.list_metrics_name: + logging.info( + f"(Step: {self.steps}) train_{metric_name} = {self.train_metrics[metric_name].result():.4f}." + ) + self._write_to_tensorboard(self.train_metrics, stage="train") + + # reset + self.reset_states_train() + + def fit(self, train_data_loader, valid_data_loader, saved_path, resume=None): + self.set_train_data_loader(train_data_loader) + self.set_eval_data_loader(valid_data_loader) + self.train_data_loader = self._strategy.experimental_distribute_dataset( + self.train_data_loader + ) + self.eval_data_loader = self._strategy.experimental_distribute_dataset( + self.eval_data_loader + ) + with self._strategy.scope(): + self.create_checkpoint_manager(saved_path=saved_path, max_to_keep=10000) + if len(resume) > 1: + self.load_checkpoint(resume) + logging.info(f"Successfully resumed from {resume}.") + self.run() + + +class Seq2SeqBasedTrainer(BasedTrainer, metaclass=abc.ABCMeta): + """Customized trainer module for Seq2Seq TTS training (Tacotron, FastSpeech).""" + + def __init__( + self, steps, epochs, config, strategy, is_mixed_precision=False, + ): + """Initialize trainer. + + Args: + steps (int): Initial global steps. + epochs (int): Initial global epochs. + config (dict): Config dict loaded from yaml format configuration file. + strategy (tf.distribute): Strategy for distributed training. + is_mixed_precision (bool): Use mixed_precision training or not. + + """ + super().__init__(steps, epochs, config) + self._is_mixed_precision = is_mixed_precision + self._strategy = strategy + self._model = None + self._optimizer = None + self._trainable_variables = None + + # check if we already apply input_signature for train_step. + self._already_apply_input_signature = False + + # create gradient accumulator + self._gradient_accumulator = GradientAccumulator() + self._gradient_accumulator.reset() + + def init_train_eval_metrics(self, list_metrics_name): + with self._strategy.scope(): + super().init_train_eval_metrics(list_metrics_name) + + def set_model(self, model): + """Set generator class model (MUST).""" + self._model = model + + def get_model(self): + """Get generator model.""" + return self._model + + def set_optimizer(self, optimizer): + """Set optimizer (MUST).""" + self._optimizer = optimizer + if self._is_mixed_precision: + self._optimizer = tf.keras.mixed_precision.experimental.LossScaleOptimizer( + self._optimizer, "dynamic" + ) + + def get_optimizer(self): + """Get optimizer.""" + return self._optimizer + + def get_n_gpus(self): + return self._strategy.num_replicas_in_sync + + def compile(self, model, optimizer): + self.set_model(model) + self.set_optimizer(optimizer) + self._trainable_variables = self._train_vars() + + def _train_vars(self): + if self.config["var_train_expr"]: + list_train_var = self.config["var_train_expr"].split("|") + return [ + v + for v in self._model.trainable_variables + if self._check_string_exist(list_train_var, v.name) + ] + return self._model.trainable_variables + + def _check_string_exist(self, list_string, inp_string): + for string in list_string: + if string in inp_string: + return True + return False + + def _get_train_element_signature(self): + return self.train_data_loader.element_spec + + def _get_eval_element_signature(self): + return self.eval_data_loader.element_spec + + def _train_step(self, batch): + if self._already_apply_input_signature is False: + train_element_signature = self._get_train_element_signature() + eval_element_signature = self._get_eval_element_signature() + self.one_step_forward = tf.function( + self._one_step_forward, input_signature=[train_element_signature] + ) + self.one_step_evaluate = tf.function( + self._one_step_evaluate, input_signature=[eval_element_signature] + ) + self.one_step_predict = tf.function( + self._one_step_predict, input_signature=[eval_element_signature] + ) + self._already_apply_input_signature = True + + # run one_step_forward + self.one_step_forward(batch) + + # update counts + self.steps += 1 + self.tqdm.update(1) + self._check_train_finish() + + def _one_step_forward(self, batch): + per_replica_losses = self._strategy.run( + self._one_step_forward_per_replica, args=(batch,) + ) + return self._strategy.reduce( + tf.distribute.ReduceOp.SUM, per_replica_losses, axis=None + ) + + def _calculate_gradient_per_batch(self, batch): + outputs = self._model(**batch, training=True) + per_example_losses, dict_metrics_losses = self.compute_per_example_losses( + batch, outputs + ) + per_replica_losses = tf.nn.compute_average_loss( + per_example_losses, + global_batch_size=self.config["batch_size"] + * self.get_n_gpus() + * self.config["gradient_accumulation_steps"], + ) + + if self._is_mixed_precision: + scaled_per_replica_losses = self._optimizer.get_scaled_loss( + per_replica_losses + ) + + if self._is_mixed_precision: + scaled_gradients = tf.gradients( + scaled_per_replica_losses, self._trainable_variables + ) + gradients = self._optimizer.get_unscaled_gradients(scaled_gradients) + else: + gradients = tf.gradients(per_replica_losses, self._trainable_variables) + + # gradient accumulate here + if self.config["gradient_accumulation_steps"] > 1: + self._gradient_accumulator(gradients) + + # accumulate loss into metrics + self.update_train_metrics(dict_metrics_losses) + + if self.config["gradient_accumulation_steps"] == 1: + return gradients, per_replica_losses + else: + return per_replica_losses + + def _one_step_forward_per_replica(self, batch): + if self.config["gradient_accumulation_steps"] == 1: + gradients, per_replica_losses = self._calculate_gradient_per_batch(batch) + self._optimizer.apply_gradients( + zip(gradients, self._trainable_variables), 1.0 + ) + else: + # gradient acummulation here. + per_replica_losses = 0.0 + for i in tf.range(self.config["gradient_accumulation_steps"]): + reduced_batch = { + k: v[ + i + * self.config["batch_size"] : (i + 1) + * self.config["batch_size"] + ] + for k, v in batch.items() + } + + # run 1 step accumulate + reduced_batch_losses = self._calculate_gradient_per_batch(reduced_batch) + + # sum per_replica_losses + per_replica_losses += reduced_batch_losses + + gradients = self._gradient_accumulator.gradients + self._optimizer.apply_gradients( + zip(gradients, self._trainable_variables), 1.0 + ) + self._gradient_accumulator.reset() + + return per_replica_losses + + + @abc.abstractmethod + def compute_per_example_losses(self, batch, outputs): + """Compute per example losses and return dict_metrics_losses + Note that all element of the loss MUST has a shape [batch_size] and + the keys of dict_metrics_losses MUST be in self.list_metrics_name. + + Args: + batch: dictionary batch input return from dataloader + outputs: outputs of the model + + Returns: + per_example_losses: per example losses for each GPU, shape [B] + dict_metrics_losses: dictionary loss. + """ + per_example_losses = 0.0 + dict_metrics_losses = {} + return per_example_losses, dict_metrics_losses + + def _eval_epoch(self): + """Evaluate model one epoch.""" + logging.info(f"(Steps: {self.steps}) Start evaluation.") + + # calculate loss for each batch + for eval_steps_per_epoch, batch in enumerate( + tqdm(self.eval_data_loader, desc="[eval]"), 1 + ): + # eval one step + self.one_step_evaluate(batch) + + if eval_steps_per_epoch <= self.config["num_save_intermediate_results"]: + # save intermedia + self.generate_and_save_intermediate_result(batch) + + logging.info( + f"(Steps: {self.steps}) Finished evaluation " + f"({eval_steps_per_epoch} steps per epoch)." + ) + + # average loss + for key in self.eval_metrics.keys(): + logging.info( + f"(Steps: {self.steps}) eval_{key} = {self.eval_metrics[key].result():.4f}." + ) + + # record + self._write_to_tensorboard(self.eval_metrics, stage="eval") + + # reset + self.reset_states_eval() + + def _one_step_evaluate_per_replica(self, batch): + outputs = self._model(**batch, training=False) + _, dict_metrics_losses = self.compute_per_example_losses(batch, outputs) + + self.update_eval_metrics(dict_metrics_losses) + + def _one_step_evaluate(self, batch): + self._strategy.run(self._one_step_evaluate_per_replica, args=(batch,)) + + def _one_step_predict_per_replica(self, batch): + outputs = self._model(**batch, training=False) + return outputs + + def _one_step_predict(self, batch): + outputs = self._strategy.run(self._one_step_predict_per_replica, args=(batch,)) + return outputs + + @abc.abstractmethod + def generate_and_save_intermediate_result(self, batch): + return + + def create_checkpoint_manager(self, saved_path=None, max_to_keep=10): + """Create checkpoint management.""" + if saved_path is None: + saved_path = self.config["outdir"] + "/checkpoints/" + + os.makedirs(saved_path, exist_ok=True) + + self.saved_path = saved_path + self.ckpt = tf.train.Checkpoint( + steps=tf.Variable(1), epochs=tf.Variable(1), optimizer=self.get_optimizer() + ) + self.ckp_manager = tf.train.CheckpointManager( + self.ckpt, saved_path, max_to_keep=max_to_keep + ) + + def save_checkpoint(self): + """Save checkpoint.""" + self.ckpt.steps.assign(self.steps) + self.ckpt.epochs.assign(self.epochs) + self.ckp_manager.save(checkpoint_number=self.steps) + utils.save_weights( + self._model, + self.saved_path + "model-{}.h5".format(self.steps) + ) + + def load_checkpoint(self, pretrained_path): + """Load checkpoint.""" + self.ckpt.restore(pretrained_path) + self.steps = self.ckpt.steps.numpy() + self.epochs = self.ckpt.epochs.numpy() + self._optimizer = self.ckpt.optimizer + # re-assign iterations (global steps) for optimizer. + self._optimizer.iterations.assign(tf.cast(self.steps, tf.int64)) + + # load weights. + utils.load_weights( + self._model, + self.saved_path + "model-{}.h5".format(self.steps) + ) + + def _check_train_finish(self): + """Check training finished.""" + if self.steps >= self.config["train_max_steps"]: + self.finish_train = True + + def _check_log_interval(self): + """Log to tensorboard.""" + if self.steps % self.config["log_interval_steps"] == 0: + for metric_name in self.list_metrics_name: + logging.info( + f"(Step: {self.steps}) train_{metric_name} = {self.train_metrics[metric_name].result():.4f}." + ) + self._write_to_tensorboard(self.train_metrics, stage="train") + + # reset + self.reset_states_train() + + def fit(self, train_data_loader, valid_data_loader, saved_path, resume=None): + self.set_train_data_loader(train_data_loader) + self.set_eval_data_loader(valid_data_loader) + self.train_data_loader = self._strategy.experimental_distribute_dataset( + self.train_data_loader + ) + self.eval_data_loader = self._strategy.experimental_distribute_dataset( + self.eval_data_loader + ) + with self._strategy.scope(): + self.create_checkpoint_manager(saved_path=saved_path, max_to_keep=10000) + if len(resume) > 1: + self.load_checkpoint(resume) + logging.info(f"Successfully resumed from {resume}.") + self.run() diff --git a/TensorFlowTTS/tensorflow_tts/utils/__init__.py b/TensorFlowTTS/tensorflow_tts/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..21d99e927348f1986e5370485253fb96cd695fc8 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/utils/__init__.py @@ -0,0 +1,22 @@ +from tensorflow_tts.utils.cleaners import ( + basic_cleaners, + collapse_whitespace, + convert_to_ascii, + english_cleaners, + expand_abbreviations, + expand_numbers, + lowercase, + transliteration_cleaners, +) +from tensorflow_tts.utils.decoder import dynamic_decode +from tensorflow_tts.utils.griffin_lim import TFGriffinLim, griffin_lim_lb +from tensorflow_tts.utils.group_conv import GroupConv1D +from tensorflow_tts.utils.number_norm import normalize_numbers +from tensorflow_tts.utils.outliers import remove_outlier +from tensorflow_tts.utils.strategy import ( + calculate_2d_loss, + calculate_3d_loss, + return_strategy, +) +from tensorflow_tts.utils.utils import find_files, MODEL_FILE_NAME, CONFIG_FILE_NAME, PROCESSOR_FILE_NAME, CACHE_DIRECTORY, LIBRARY_NAME +from tensorflow_tts.utils.weight_norm import WeightNormalization diff --git a/TensorFlowTTS/tensorflow_tts/utils/__pycache__/__init__.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b0a5b41217aad5eca1d8e49dc09d1c28f8aeb26c Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/utils/__pycache__/cleaners.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/cleaners.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fc839b6bf214224ed6a963226baca6de707e60ed Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/cleaners.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/utils/__pycache__/decoder.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/decoder.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bf4a0727d04c271ed2206df196f7ab6e76547483 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/decoder.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/utils/__pycache__/griffin_lim.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/griffin_lim.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6fbc010905052be95cb4aa75cd3e6e7ed18af12a Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/griffin_lim.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/utils/__pycache__/group_conv.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/group_conv.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..25a6bbe20ebda0870815e194fb01d4b7f46689ee Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/group_conv.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/utils/__pycache__/korean.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/korean.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e3426a2bed7d825d0d218a1229bba66f3473ff86 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/korean.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/utils/__pycache__/number_norm.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/number_norm.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6b6da327d375c3a6456ec5cd41c970ec7c6b0e33 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/number_norm.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/utils/__pycache__/outliers.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/outliers.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8a303c7ff5248c2bc51e73aff544b7d57dc43b5f Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/outliers.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/utils/__pycache__/strategy.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/strategy.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..be89d035e0f766034e03f0952b924e62dba66c24 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/strategy.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/utils/__pycache__/utils.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/utils.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e9d0f3f29483c22346b1075293219aef43faf49a Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/utils.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/utils/__pycache__/weight_norm.cpython-311.pyc b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/weight_norm.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3c8ba9ccbd9a6ce758fc1de53327d536b3d4e222 Binary files /dev/null and b/TensorFlowTTS/tensorflow_tts/utils/__pycache__/weight_norm.cpython-311.pyc differ diff --git a/TensorFlowTTS/tensorflow_tts/utils/cleaners.py b/TensorFlowTTS/tensorflow_tts/utils/cleaners.py new file mode 100644 index 0000000000000000000000000000000000000000..5532f5f97861fe7cc5999cb511be33b9b292eec8 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/utils/cleaners.py @@ -0,0 +1,122 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2017 Keith Ito +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +import re + +from tensorflow_tts.utils.korean import tokenize as ko_tokenize +from tensorflow_tts.utils.number_norm import normalize_numbers +from unidecode import unidecode + +try: + from german_transliterate.core import GermanTransliterate +except: + pass + +# Regular expression matching whitespace: +_whitespace_re = re.compile(r"\s+") + +# List of (regular expression, replacement) pairs for abbreviations: +_abbreviations = [ + (re.compile("\\b%s\\." % x[0], re.IGNORECASE), x[1]) + for x in [ + ("mrs", "misess"), + ("mr", "mister"), + ("dr", "doctor"), + ("st", "saint"), + ("co", "company"), + ("jr", "junior"), + ("maj", "major"), + ("gen", "general"), + ("drs", "doctors"), + ("rev", "reverend"), + ("lt", "lieutenant"), + ("hon", "honorable"), + ("sgt", "sergeant"), + ("capt", "captain"), + ("esq", "esquire"), + ("ltd", "limited"), + ("col", "colonel"), + ("ft", "fort"), + ] +] + + +def expand_abbreviations(text): + for regex, replacement in _abbreviations: + text = re.sub(regex, replacement, text) + return text + + +def expand_numbers(text): + return normalize_numbers(text) + + +def lowercase(text): + return text.lower() + + +def collapse_whitespace(text): + return re.sub(_whitespace_re, " ", text) + + +def convert_to_ascii(text): + return unidecode(text) + + +def basic_cleaners(text): + """Basic pipeline that lowercases and collapses whitespace without transliteration.""" + text = lowercase(text) + text = collapse_whitespace(text) + return text + + +def transliteration_cleaners(text): + """Pipeline for non-English text that transliterates to ASCII.""" + text = convert_to_ascii(text) + text = lowercase(text) + text = collapse_whitespace(text) + return text + + +def english_cleaners(text): + """Pipeline for English text, including number and abbreviation expansion.""" + text = convert_to_ascii(text) + text = lowercase(text) + text = expand_numbers(text) + text = expand_abbreviations(text) + text = collapse_whitespace(text) + return text + + +def korean_cleaners(text): + """Pipeline for Korean text, including number and abbreviation expansion.""" + text = ko_tokenize( + text + ) # '존경하는' --> ['ᄌ', 'ᅩ', 'ᆫ', 'ᄀ', 'ᅧ', 'ᆼ', 'ᄒ', 'ᅡ', 'ᄂ', 'ᅳ', 'ᆫ'] + return text + +def german_cleaners(text): + """Pipeline for German text, including number and abbreviation expansion.""" + try: + text = GermanTransliterate(replace={';': ',', ':': ' '}, sep_abbreviation=' -- ').transliterate(text) + except NameError: + raise ModuleNotFoundError("Install german_transliterate package to use german_cleaners") + return text \ No newline at end of file diff --git a/TensorFlowTTS/tensorflow_tts/utils/decoder.py b/TensorFlowTTS/tensorflow_tts/utils/decoder.py new file mode 100644 index 0000000000000000000000000000000000000000..f85fa03793e571a3af249bd0e9569cd5119d9dc6 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/utils/decoder.py @@ -0,0 +1,307 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlow Authors, All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Any, Optional, Tuple, Union + +import tensorflow as tf +from tensorflow.python.ops import control_flow_util +from tensorflow_addons.seq2seq import Decoder +from tensorflow_addons.seq2seq.decoder import ( + BaseDecoder, + _prepend_batch, + _transpose_batch_time, +) +from tensorflow_addons.utils.types import Number, TensorLike + + +def dynamic_decode( + decoder: Union[Decoder, BaseDecoder], + output_time_major: bool = False, + impute_finished: bool = False, + maximum_iterations: Optional[TensorLike] = None, + parallel_iterations: int = 32, + swap_memory: bool = False, + training: Optional[bool] = None, + scope: Optional[str] = None, + enable_tflite_convertible: bool = False, + **kwargs +) -> Tuple[Any, Any, Any]: + """Perform dynamic decoding with `decoder`. + Calls initialize() once and step() repeatedly on the Decoder object. + Args: + decoder: A `Decoder` instance. + output_time_major: Python boolean. Default: `False` (batch major). If + `True`, outputs are returned as time major tensors (this mode is + faster). Otherwise, outputs are returned as batch major tensors (this + adds extra time to the computation). + impute_finished: Python boolean. If `True`, then states for batch + entries which are marked as finished get copied through and the + corresponding outputs get zeroed out. This causes some slowdown at + each time step, but ensures that the final state and outputs have + the correct values and that backprop ignores time steps that were + marked as finished. + maximum_iterations: A strictly positive `int32` scalar, the maximum + allowed number of decoding steps. Default is `None` (decode until the + decoder is fully done). + parallel_iterations: Argument passed to `tf.while_loop`. + swap_memory: Argument passed to `tf.while_loop`. + training: Python boolean. Indicates whether the layer should behave + in training mode or in inference mode. Only relevant + when `dropout` or `recurrent_dropout` is used. + scope: Optional name scope to use. + enable_tflite_convertible: Python boolean. If `True`, then the variables + of `TensorArray` become of 1-D static shape. Also zero pads in the + output tensor will be discarded. Default: `False`. + **kwargs: dict, other keyword arguments for dynamic_decode. It might + contain arguments for `BaseDecoder` to initialize, which takes all + tensor inputs during call(). + Returns: + `(final_outputs, final_state, final_sequence_lengths)`. + Raises: + ValueError: if `maximum_iterations` is provided but is not a scalar. + """ + with tf.name_scope(scope or "decoder"): + is_xla = not tf.executing_eagerly() and control_flow_util.GraphOrParentsInXlaContext( + tf.compat.v1.get_default_graph() + ) + + if maximum_iterations is not None: + maximum_iterations = tf.convert_to_tensor( + maximum_iterations, dtype=tf.int32, name="maximum_iterations" + ) + if maximum_iterations.shape.ndims != 0: + raise ValueError("maximum_iterations must be a scalar") + tf.debugging.assert_greater( + maximum_iterations, + 0, + message="maximum_iterations should be greater than 0", + ) + elif is_xla: + raise ValueError("maximum_iterations is required for XLA compilation.") + + if isinstance(decoder, Decoder): + initial_finished, initial_inputs, initial_state = decoder.initialize() + else: + # For BaseDecoder that takes tensor inputs during call. + decoder_init_input = kwargs.pop("decoder_init_input", None) + decoder_init_kwargs = kwargs.pop("decoder_init_kwargs", {}) + initial_finished, initial_inputs, initial_state = decoder.initialize( + decoder_init_input, **decoder_init_kwargs + ) + + if enable_tflite_convertible: + # Assume the batch_size = 1 for inference. + # So we can change 2-D TensorArray into 1-D by reshaping it. + zero_outputs = tf.nest.map_structure( + lambda shape, dtype: tf.reshape( + tf.zeros(_prepend_batch(decoder.batch_size, shape), dtype=dtype), + [-1], + ), + decoder.output_size, + decoder.output_dtype, + ) + else: + zero_outputs = tf.nest.map_structure( + lambda shape, dtype: tf.zeros( + _prepend_batch(decoder.batch_size, shape), dtype=dtype + ), + decoder.output_size, + decoder.output_dtype, + ) + + if maximum_iterations is not None: + initial_finished = tf.logical_or(initial_finished, 0 >= maximum_iterations) + initial_sequence_lengths = tf.zeros_like(initial_finished, dtype=tf.int32) + initial_time = tf.constant(0, dtype=tf.int32) + + def _shape(batch_size, from_shape): + if not isinstance(from_shape, tf.TensorShape) or from_shape.ndims == 0: + return None + else: + batch_size = tf.get_static_value( + tf.convert_to_tensor(batch_size, name="batch_size") + ) + if enable_tflite_convertible: + # Since we can't use 2-D TensoArray and assume `batch_size` = 1, + # we use `from_shape` dimension only. + return from_shape + return tf.TensorShape([batch_size]).concatenate(from_shape) + + dynamic_size = maximum_iterations is None or not is_xla + # The dynamic shape `TensoArray` is not allowed in TFLite yet. + dynamic_size = dynamic_size and (not enable_tflite_convertible) + + def _create_ta(s, d): + return tf.TensorArray( + dtype=d, + size=0 if dynamic_size else maximum_iterations, + dynamic_size=dynamic_size, + element_shape=_shape(decoder.batch_size, s), + ) + + initial_outputs_ta = tf.nest.map_structure( + _create_ta, decoder.output_size, decoder.output_dtype + ) + + def condition( + unused_time, + unused_outputs_ta, + unused_state, + unused_inputs, + finished, + unused_sequence_lengths, + ): + return tf.logical_not(tf.reduce_all(finished)) + + def body(time, outputs_ta, state, inputs, finished, sequence_lengths): + """Internal while_loop body. + Args: + time: scalar int32 tensor. + outputs_ta: structure of TensorArray. + state: (structure of) state tensors and TensorArrays. + inputs: (structure of) input tensors. + finished: bool tensor (keeping track of what's finished). + sequence_lengths: int32 tensor (keeping track of time of finish). + Returns: + `(time + 1, outputs_ta, next_state, next_inputs, next_finished, + next_sequence_lengths)`. + ``` + """ + (next_outputs, decoder_state, next_inputs, decoder_finished) = decoder.step( + time, inputs, state, training + ) + decoder_state_sequence_lengths = False + if decoder.tracks_own_finished: + next_finished = decoder_finished + lengths = getattr(decoder_state, "lengths", None) + if lengths is not None: + # sequence lengths are provided by decoder_state.lengths; + # overwrite our sequence lengths. + decoder_state_sequence_lengths = True + sequence_lengths = tf.cast(lengths, tf.int32) + else: + next_finished = tf.logical_or(decoder_finished, finished) + + if decoder_state_sequence_lengths: + # Just pass something through the loop; at the next iteration + # we'll pull the sequence lengths from the decoder_state again. + next_sequence_lengths = sequence_lengths + else: + next_sequence_lengths = tf.where( + tf.logical_not(finished), + tf.fill(tf.shape(sequence_lengths), time + 1), + sequence_lengths, + ) + + tf.nest.assert_same_structure(state, decoder_state) + tf.nest.assert_same_structure(outputs_ta, next_outputs) + tf.nest.assert_same_structure(inputs, next_inputs) + + # Zero out output values past finish + if impute_finished: + + def zero_out_finished(out, zero): + if finished.shape.rank < zero.shape.rank: + broadcast_finished = tf.broadcast_to( + tf.expand_dims(finished, axis=-1), zero.shape + ) + return tf.where(broadcast_finished, zero, out) + else: + return tf.where(finished, zero, out) + + emit = tf.nest.map_structure( + zero_out_finished, next_outputs, zero_outputs + ) + else: + emit = next_outputs + + # Copy through states past finish + def _maybe_copy_state(new, cur): + # TensorArrays and scalar states get passed through. + if isinstance(cur, tf.TensorArray): + pass_through = True + else: + new.set_shape(cur.shape) + pass_through = new.shape.ndims == 0 + if not pass_through: + broadcast_finished = tf.broadcast_to( + tf.expand_dims(finished, axis=-1), new.shape + ) + return tf.where(broadcast_finished, cur, new) + else: + return new + + if impute_finished: + next_state = tf.nest.map_structure( + _maybe_copy_state, decoder_state, state + ) + else: + next_state = decoder_state + + if enable_tflite_convertible: + # Reshape to 1-D. + emit = tf.nest.map_structure(lambda x: tf.reshape(x, [-1]), emit) + + outputs_ta = tf.nest.map_structure( + lambda ta, out: ta.write(time, out), outputs_ta, emit + ) + return ( + time + 1, + outputs_ta, + next_state, + next_inputs, + next_finished, + next_sequence_lengths, + ) + + res = tf.while_loop( + condition, + body, + loop_vars=( + initial_time, + initial_outputs_ta, + initial_state, + initial_inputs, + initial_finished, + initial_sequence_lengths, + ), + parallel_iterations=parallel_iterations, + maximum_iterations=maximum_iterations, + swap_memory=swap_memory, + ) + + final_outputs_ta = res[1] + final_state = res[2] + final_sequence_lengths = res[5] + + final_outputs = tf.nest.map_structure(lambda ta: ta.stack(), final_outputs_ta) + + try: + final_outputs, final_state = decoder.finalize( + final_outputs, final_state, final_sequence_lengths + ) + except NotImplementedError: + pass + + if not output_time_major: + if enable_tflite_convertible: + # Reshape the output to the original shape. + def _restore_batch(x): + return tf.expand_dims(x, [1]) + + final_outputs = tf.nest.map_structure(_restore_batch, final_outputs) + + final_outputs = tf.nest.map_structure(_transpose_batch_time, final_outputs) + + return final_outputs, final_state, final_sequence_lengths diff --git a/TensorFlowTTS/tensorflow_tts/utils/griffin_lim.py b/TensorFlowTTS/tensorflow_tts/utils/griffin_lim.py new file mode 100644 index 0000000000000000000000000000000000000000..29f256f1f5a7b7f0664a3b0789ae8f08d8e555e2 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/utils/griffin_lim.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Griffin-Lim phase reconstruction algorithm from mel spectrogram.""" + +import os + +import librosa +import numpy as np +import soundfile as sf +import tensorflow as tf +from sklearn.preprocessing import StandardScaler + + +def griffin_lim_lb( + mel_spec, stats_path, dataset_config, n_iter=32, output_dir=None, wav_name="lb" +): + """Generate wave from mel spectrogram with Griffin-Lim algorithm using Librosa. + Args: + mel_spec (ndarray): array representing the mel spectrogram. + stats_path (str): path to the `stats.npy` file containing norm statistics. + dataset_config (Dict): dataset configuration parameters. + n_iter (int): number of iterations for GL. + output_dir (str): output directory where audio file will be saved. + wav_name (str): name of the output file. + Returns: + gl_lb (ndarray): generated wave. + """ + scaler = StandardScaler() + scaler.mean_, scaler.scale_ = np.load(stats_path) + + mel_spec = np.power(10.0, scaler.inverse_transform(mel_spec)).T + mel_basis = librosa.filters.mel( + dataset_config["sampling_rate"], + n_fft=dataset_config["fft_size"], + n_mels=dataset_config["num_mels"], + fmin=dataset_config["fmin"], + fmax=dataset_config["fmax"], + ) + mel_to_linear = np.maximum(1e-10, np.dot(np.linalg.pinv(mel_basis), mel_spec)) + gl_lb = librosa.griffinlim( + mel_to_linear, + n_iter=n_iter, + hop_length=dataset_config["hop_size"], + win_length=dataset_config["win_length"] or dataset_config["fft_size"], + ) + if output_dir: + output_path = os.path.join(output_dir, f"{wav_name}.wav") + sf.write(output_path, gl_lb, dataset_config["sampling_rate"], "PCM_16") + return gl_lb + + +class TFGriffinLim(tf.keras.layers.Layer): + """Griffin-Lim algorithm for phase reconstruction from mel spectrogram magnitude.""" + + def __init__(self, stats_path, dataset_config, normalized: bool = True): + """Init GL params. + Args: + stats_path (str): path to the `stats.npy` file containing norm statistics. + dataset_config (Dict): dataset configuration parameters. + """ + super().__init__() + self.normalized = normalized + if normalized: + scaler = StandardScaler() + scaler.mean_, scaler.scale_ = np.load(stats_path) + self.scaler = scaler + self.ds_config = dataset_config + self.mel_basis = librosa.filters.mel( + self.ds_config["sampling_rate"], + n_fft=self.ds_config["fft_size"], + n_mels=self.ds_config["num_mels"], + fmin=self.ds_config["fmin"], + fmax=self.ds_config["fmax"], + ) # [num_mels, fft_size // 2 + 1] + + def save_wav(self, gl_tf, output_dir, wav_name): + """Generate WAV file and save it. + Args: + gl_tf (tf.Tensor): reconstructed signal from GL algorithm. + output_dir (str): output directory where audio file will be saved. + wav_name (str): name of the output file. + """ + encode_fn = lambda x: tf.audio.encode_wav(x, self.ds_config["sampling_rate"]) + gl_tf = tf.expand_dims(gl_tf, -1) + if not isinstance(wav_name, list): + wav_name = [wav_name] + + if len(gl_tf.shape) > 2: + bs, *_ = gl_tf.shape + assert bs == len(wav_name), "Batch and 'wav_name' have different size." + tf_wav = tf.map_fn(encode_fn, gl_tf, dtype=tf.string) + for idx in tf.range(bs): + output_path = os.path.join(output_dir, f"{wav_name[idx]}.wav") + tf.io.write_file(output_path, tf_wav[idx]) + else: + tf_wav = encode_fn(gl_tf) + tf.io.write_file(os.path.join(output_dir, f"{wav_name[0]}.wav"), tf_wav) + + @tf.function( + input_signature=[ + tf.TensorSpec(shape=[None, None, None], dtype=tf.float32), + tf.TensorSpec(shape=[], dtype=tf.int32), + ] + ) + def call(self, mel_spec, n_iter=32): + """Apply GL algorithm to batched mel spectrograms. + Args: + mel_spec (tf.Tensor): normalized mel spectrogram. + n_iter (int): number of iterations to run GL algorithm. + Returns: + (tf.Tensor): reconstructed signal from GL algorithm. + """ + # de-normalize mel spectogram + if self.normalized: + mel_spec = tf.math.pow( + 10.0, mel_spec * self.scaler.scale_ + self.scaler.mean_ + ) + else: + mel_spec = tf.math.pow( + 10.0, mel_spec + ) # TODO @dathudeptrai check if its ok without it wavs were too quiet + inverse_mel = tf.linalg.pinv(self.mel_basis) + + # [:, num_mels] @ [fft_size // 2 + 1, num_mels].T + mel_to_linear = tf.linalg.matmul(mel_spec, inverse_mel, transpose_b=True) + mel_to_linear = tf.cast(tf.math.maximum(1e-10, mel_to_linear), tf.complex64) + + init_phase = tf.cast( + tf.random.uniform(tf.shape(mel_to_linear), maxval=1), tf.complex64 + ) + phase = tf.math.exp(2j * np.pi * init_phase) + for _ in tf.range(n_iter): + inverse = tf.signal.inverse_stft( + mel_to_linear * phase, + frame_length=self.ds_config["win_length"] or self.ds_config["fft_size"], + frame_step=self.ds_config["hop_size"], + fft_length=self.ds_config["fft_size"], + window_fn=tf.signal.inverse_stft_window_fn(self.ds_config["hop_size"]), + ) + phase = tf.signal.stft( + inverse, + self.ds_config["win_length"] or self.ds_config["fft_size"], + self.ds_config["hop_size"], + self.ds_config["fft_size"], + ) + phase /= tf.cast(tf.maximum(1e-10, tf.abs(phase)), tf.complex64) + + return tf.signal.inverse_stft( + mel_to_linear * phase, + frame_length=self.ds_config["win_length"] or self.ds_config["fft_size"], + frame_step=self.ds_config["hop_size"], + fft_length=self.ds_config["fft_size"], + window_fn=tf.signal.inverse_stft_window_fn(self.ds_config["hop_size"]), + ) diff --git a/TensorFlowTTS/tensorflow_tts/utils/group_conv.py b/TensorFlowTTS/tensorflow_tts/utils/group_conv.py new file mode 100644 index 0000000000000000000000000000000000000000..5d748f655c832a83eceaad051753d3b1698ef71b --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/utils/group_conv.py @@ -0,0 +1,556 @@ +# -*- coding: utf-8 -*- +# This code is copy from https://github.com/tensorflow/tensorflow/pull/36773. +"""Group Convolution Modules.""" + +from tensorflow.python.framework import tensor_shape +from tensorflow.python.keras import activations, constraints, initializers, regularizers +from tensorflow.python.keras.engine.base_layer import Layer +from tensorflow.python.keras.engine.input_spec import InputSpec +from tensorflow.python.keras.layers import Conv1D, SeparableConv1D +from tensorflow.python.keras.utils import conv_utils +from tensorflow.python.ops import array_ops, nn, nn_ops + + +class Convolution(object): + """Helper class for convolution. + Note that this class assumes that shapes of input and filter passed to + __call__ are compatible with input_shape and filter_shape passed to the + constructor. + Arguments + input_shape: static shape of input. i.e. input.get_shape(). + filter_shape: static shape of the filter. i.e. filter.get_shape(). + padding: see convolution. + strides: see convolution. + dilation_rate: see convolution. + name: see convolution. + data_format: see convolution. + """ + + def __init__( + self, + input_shape, + filter_shape, + padding, + strides=None, + dilation_rate=None, + name=None, + data_format=None, + ): + """Helper function for convolution.""" + num_total_dims = filter_shape.ndims + if num_total_dims is None: + num_total_dims = input_shape.ndims + if num_total_dims is None: + raise ValueError("rank of input or filter must be known") + + num_spatial_dims = num_total_dims - 2 + + try: + input_shape.with_rank(num_spatial_dims + 2) + except ValueError: + raise ValueError("input tensor must have rank %d" % (num_spatial_dims + 2)) + + try: + filter_shape.with_rank(num_spatial_dims + 2) + except ValueError: + raise ValueError("filter tensor must have rank %d" % (num_spatial_dims + 2)) + + if data_format is None or not data_format.startswith("NC"): + input_channels_dim = tensor_shape.dimension_at_index( + input_shape, num_spatial_dims + 1 + ) + spatial_dims = range(1, num_spatial_dims + 1) + else: + input_channels_dim = tensor_shape.dimension_at_index(input_shape, 1) + spatial_dims = range(2, num_spatial_dims + 2) + + filter_dim = tensor_shape.dimension_at_index(filter_shape, num_spatial_dims) + if not (input_channels_dim % filter_dim).is_compatible_with(0): + raise ValueError( + "number of input channels is not divisible by corresponding " + "dimension of filter, {} % {} != 0".format( + input_channels_dim, filter_dim + ) + ) + + strides, dilation_rate = nn_ops._get_strides_and_dilation_rate( + num_spatial_dims, strides, dilation_rate + ) + + self.input_shape = input_shape + self.filter_shape = filter_shape + self.data_format = data_format + self.strides = strides + self.padding = padding + self.name = name + self.dilation_rate = dilation_rate + self.conv_op = nn_ops._WithSpaceToBatch( + input_shape, + dilation_rate=dilation_rate, + padding=padding, + build_op=self._build_op, + filter_shape=filter_shape, + spatial_dims=spatial_dims, + data_format=data_format, + ) + + def _build_op(self, _, padding): + return nn_ops._NonAtrousConvolution( + self.input_shape, + filter_shape=self.filter_shape, + padding=padding, + data_format=self.data_format, + strides=self.strides, + name=self.name, + ) + + def __call__(self, inp, filter): + return self.conv_op(inp, filter) + + +class Conv(Layer): + """Abstract N-D convolution layer (private, used as implementation base). + This layer creates a convolution kernel that is convolved + (actually cross-correlated) with the layer input to produce a tensor of + outputs. If `use_bias` is True (and a `bias_initializer` is provided), + a bias vector is created and added to the outputs. Finally, if + `activation` is not `None`, it is applied to the outputs as well. + Note: layer attributes cannot be modified after the layer has been called + once (except the `trainable` attribute). + Arguments: + rank: An integer, the rank of the convolution, e.g. "2" for 2D convolution. + filters: Integer, the dimensionality of the output space (i.e. the number + of filters in the convolution). + kernel_size: An integer or tuple/list of n integers, specifying the + length of the convolution window. + strides: An integer or tuple/list of n integers, + specifying the stride length of the convolution. + Specifying any stride value != 1 is incompatible with specifying + any `dilation_rate` value != 1. + padding: One of `"valid"`, `"same"`, or `"causal"` (case-insensitive). + data_format: A string, one of `channels_last` (default) or `channels_first`. + The ordering of the dimensions in the inputs. + `channels_last` corresponds to inputs with shape + `(batch_size, ..., channels)` while `channels_first` corresponds to + inputs with shape `(batch_size, channels, ...)`. + dilation_rate: An integer or tuple/list of n integers, specifying + the dilation rate to use for dilated convolution. + Currently, specifying any `dilation_rate` value != 1 is + incompatible with specifying any `strides` value != 1. + groups: Integer, the number of channel groups controlling the connections + between inputs and outputs. Input channels and `filters` must both be + divisible by `groups`. For example, + - At `groups=1`, all inputs are convolved to all outputs. + - At `groups=2`, the operation becomes equivalent to having two + convolutional layers side by side, each seeing half the input + channels, and producing half the output channels, and both + subsequently concatenated. + - At `groups=input_channels`, each input channel is convolved with its + own set of filters, of size `input_channels / filters` + activation: Activation function to use. + If you don't specify anything, no activation is applied. + use_bias: Boolean, whether the layer uses a bias. + kernel_initializer: An initializer for the convolution kernel. + bias_initializer: An initializer for the bias vector. If None, the default + initializer will be used. + kernel_regularizer: Optional regularizer for the convolution kernel. + bias_regularizer: Optional regularizer for the bias vector. + activity_regularizer: Optional regularizer function for the output. + kernel_constraint: Optional projection function to be applied to the + kernel after being updated by an `Optimizer` (e.g. used to implement + norm constraints or value constraints for layer weights). The function + must take as input the unprojected variable and must return the + projected variable (which must have the same shape). Constraints are + not safe to use when doing asynchronous distributed training. + bias_constraint: Optional projection function to be applied to the + bias after being updated by an `Optimizer`. + trainable: Boolean, if `True` the weights of this layer will be marked as + trainable (and listed in `layer.trainable_weights`). + name: A string, the name of the layer. + """ + + def __init__( + self, + rank, + filters, + kernel_size, + strides=1, + padding="valid", + data_format=None, + dilation_rate=1, + groups=1, + activation=None, + use_bias=True, + kernel_initializer="glorot_uniform", + bias_initializer="zeros", + kernel_regularizer=None, + bias_regularizer=None, + activity_regularizer=None, + kernel_constraint=None, + bias_constraint=None, + trainable=True, + name=None, + **kwargs + ): + super(Conv, self).__init__( + trainable=trainable, + name=name, + activity_regularizer=regularizers.get(activity_regularizer), + **kwargs + ) + self.rank = rank + if filters is not None and not isinstance(filters, int): + filters = int(filters) + self.filters = filters + self.groups = groups or 1 + if filters is not None and filters % self.groups != 0: + raise ValueError( + "The number of filters must be evenly divisible by the number of " + "groups. Received: groups={}, filters={}".format(groups, filters) + ) + self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank, "kernel_size") + if not all(self.kernel_size): + raise ValueError( + "The argument `kernel_size` cannot contain 0(s). " + "Received: %s" % (kernel_size,) + ) + self.strides = conv_utils.normalize_tuple(strides, rank, "strides") + self.padding = conv_utils.normalize_padding(padding) + if self.padding == "causal" and not isinstance(self, (Conv1D, SeparableConv1D)): + raise ValueError( + "Causal padding is only supported for `Conv1D`" + "and ``SeparableConv1D`." + ) + self.data_format = conv_utils.normalize_data_format(data_format) + self.dilation_rate = conv_utils.normalize_tuple( + dilation_rate, rank, "dilation_rate" + ) + self.activation = activations.get(activation) + self.use_bias = use_bias + self.kernel_initializer = initializers.get(kernel_initializer) + self.bias_initializer = initializers.get(bias_initializer) + self.kernel_regularizer = regularizers.get(kernel_regularizer) + self.bias_regularizer = regularizers.get(bias_regularizer) + self.kernel_constraint = constraints.get(kernel_constraint) + self.bias_constraint = constraints.get(bias_constraint) + self.input_spec = InputSpec(ndim=self.rank + 2) + + def build(self, input_shape): + input_shape = tensor_shape.TensorShape(input_shape) + input_channel = self._get_input_channel(input_shape) + if input_channel % self.groups != 0: + raise ValueError( + "The number of input channels must be evenly divisible by the number " + "of groups. Received groups={}, but the input has {} channels " + "(full input shape is {}).".format( + self.groups, input_channel, input_shape + ) + ) + kernel_shape = self.kernel_size + (input_channel // self.groups, self.filters) + + self.kernel = self.add_weight( + name="kernel", + shape=kernel_shape, + initializer=self.kernel_initializer, + regularizer=self.kernel_regularizer, + constraint=self.kernel_constraint, + trainable=True, + dtype=self.dtype, + ) + if self.use_bias: + self.bias = self.add_weight( + name="bias", + shape=(self.filters,), + initializer=self.bias_initializer, + regularizer=self.bias_regularizer, + constraint=self.bias_constraint, + trainable=True, + dtype=self.dtype, + ) + else: + self.bias = None + channel_axis = self._get_channel_axis() + self.input_spec = InputSpec( + ndim=self.rank + 2, axes={channel_axis: input_channel} + ) + + self._build_conv_op_input_shape = input_shape + self._build_input_channel = input_channel + self._padding_op = self._get_padding_op() + self._conv_op_data_format = conv_utils.convert_data_format( + self.data_format, self.rank + 2 + ) + self._convolution_op = Convolution( + input_shape, + filter_shape=self.kernel.shape, + dilation_rate=self.dilation_rate, + strides=self.strides, + padding=self._padding_op, + data_format=self._conv_op_data_format, + ) + self.built = True + + def call(self, inputs): + if self._recreate_conv_op(inputs): + self._convolution_op = Convolution( + inputs.get_shape(), + filter_shape=self.kernel.shape, + dilation_rate=self.dilation_rate, + strides=self.strides, + padding=self._padding_op, + data_format=self._conv_op_data_format, + ) + self._build_conv_op_input_shape = inputs.get_shape() + + # Apply causal padding to inputs for Conv1D. + if self.padding == "causal" and self.__class__.__name__ == "Conv1D": + inputs = array_ops.pad(inputs, self._compute_causal_padding()) + + outputs = self._convolution_op(inputs, self.kernel) + + if self.use_bias: + if self.data_format == "channels_first": + if self.rank == 1: + # nn.bias_add does not accept a 1D input tensor. + bias = array_ops.reshape(self.bias, (1, self.filters, 1)) + outputs += bias + else: + outputs = nn.bias_add(outputs, self.bias, data_format="NCHW") + else: + outputs = nn.bias_add(outputs, self.bias, data_format="NHWC") + + if self.activation is not None: + return self.activation(outputs) + return outputs + + def compute_output_shape(self, input_shape): + input_shape = tensor_shape.TensorShape(input_shape).as_list() + if self.data_format == "channels_last": + space = input_shape[1:-1] + new_space = [] + for i in range(len(space)): + new_dim = conv_utils.conv_output_length( + space[i], + self.kernel_size[i], + padding=self.padding, + stride=self.strides[i], + dilation=self.dilation_rate[i], + ) + new_space.append(new_dim) + return tensor_shape.TensorShape( + [input_shape[0]] + new_space + [self.filters] + ) + else: + space = input_shape[2:] + new_space = [] + for i in range(len(space)): + new_dim = conv_utils.conv_output_length( + space[i], + self.kernel_size[i], + padding=self.padding, + stride=self.strides[i], + dilation=self.dilation_rate[i], + ) + new_space.append(new_dim) + return tensor_shape.TensorShape([input_shape[0], self.filters] + new_space) + + def get_config(self): + config = { + "filters": self.filters, + "kernel_size": self.kernel_size, + "strides": self.strides, + "padding": self.padding, + "data_format": self.data_format, + "dilation_rate": self.dilation_rate, + "groups": self.groups, + "activation": activations.serialize(self.activation), + "use_bias": self.use_bias, + "kernel_initializer": initializers.serialize(self.kernel_initializer), + "bias_initializer": initializers.serialize(self.bias_initializer), + "kernel_regularizer": regularizers.serialize(self.kernel_regularizer), + "bias_regularizer": regularizers.serialize(self.bias_regularizer), + "activity_regularizer": regularizers.serialize(self.activity_regularizer), + "kernel_constraint": constraints.serialize(self.kernel_constraint), + "bias_constraint": constraints.serialize(self.bias_constraint), + } + base_config = super(Conv, self).get_config() + return dict(list(base_config.items()) + list(config.items())) + + def _compute_causal_padding(self): + """Calculates padding for 'causal' option for 1-d conv layers.""" + left_pad = self.dilation_rate[0] * (self.kernel_size[0] - 1) + if self.data_format == "channels_last": + causal_padding = [[0, 0], [left_pad, 0], [0, 0]] + else: + causal_padding = [[0, 0], [0, 0], [left_pad, 0]] + return causal_padding + + def _get_channel_axis(self): + if self.data_format == "channels_first": + return 1 + else: + return -1 + + def _get_input_channel(self, input_shape): + channel_axis = self._get_channel_axis() + if input_shape.dims[channel_axis].value is None: + raise ValueError( + "The channel dimension of the inputs " + "should be defined. Found `None`." + ) + return int(input_shape[channel_axis]) + + def _get_padding_op(self): + if self.padding == "causal": + op_padding = "valid" + else: + op_padding = self.padding + if not isinstance(op_padding, (list, tuple)): + op_padding = op_padding.upper() + return op_padding + + def _recreate_conv_op(self, inputs): + """Recreate conv_op if necessary. + Check if the input_shape in call() is different from that in build(). + For the values that are not None, if they are different, recreate + the _convolution_op to avoid the stateful behavior. + Args: + inputs: The input data to call() method. + Returns: + `True` or `False` to indicate whether to recreate the conv_op. + """ + call_input_shape = inputs.get_shape() + for axis in range(1, len(call_input_shape)): + if ( + call_input_shape[axis] is not None + and self._build_conv_op_input_shape[axis] is not None + and call_input_shape[axis] != self._build_conv_op_input_shape[axis] + ): + return True + return False + + +class GroupConv1D(Conv): + """1D convolution layer (e.g. temporal convolution). + This layer creates a convolution kernel that is convolved + with the layer input over a single spatial (or temporal) dimension + to produce a tensor of outputs. + If `use_bias` is True, a bias vector is created and added to the outputs. + Finally, if `activation` is not `None`, + it is applied to the outputs as well. + When using this layer as the first layer in a model, + provide an `input_shape` argument + (tuple of integers or `None`, e.g. + `(10, 128)` for sequences of 10 vectors of 128-dimensional vectors, + or `(None, 128)` for variable-length sequences of 128-dimensional vectors. + Examples: + >>> # The inputs are 128-length vectors with 10 timesteps, and the batch size + >>> # is 4. + >>> input_shape = (4, 10, 128) + >>> x = tf.random.normal(input_shape) + >>> y = tf.keras.layers.Conv1D( + ... 32, 3, activation='relu',input_shape=input_shape)(x) + >>> print(y.shape) + (4, 8, 32) + Arguments: + filters: Integer, the dimensionality of the output space + (i.e. the number of output filters in the convolution). + kernel_size: An integer or tuple/list of a single integer, + specifying the length of the 1D convolution window. + strides: An integer or tuple/list of a single integer, + specifying the stride length of the convolution. + Specifying any stride value != 1 is incompatible with specifying + any `dilation_rate` value != 1. + padding: One of `"valid"`, `"causal"` or `"same"` (case-insensitive). + `"causal"` results in causal (dilated) convolutions, e.g. `output[t]` + does not depend on `input[t+1:]`. Useful when modeling temporal data + where the model should not violate the temporal order. + See [WaveNet: A Generative Model for Raw Audio, section + 2.1](https://arxiv.org/abs/1609.03499). + data_format: A string, + one of `channels_last` (default) or `channels_first`. + groups: Integer, the number of channel groups controlling the connections + between inputs and outputs. Input channels and `filters` must both be + divisible by `groups`. For example, + - At `groups=1`, all inputs are convolved to all outputs. + - At `groups=2`, the operation becomes equivalent to having two + convolutional layers side by side, each seeing half the input + channels, and producing half the output channels, and both + subsequently concatenated. + - At `groups=input_channels`, each input channel is convolved with its + own set of filters, of size `input_channels / filters` + dilation_rate: an integer or tuple/list of a single integer, specifying + the dilation rate to use for dilated convolution. + Currently, specifying any `dilation_rate` value != 1 is + incompatible with specifying any `strides` value != 1. + activation: Activation function to use. + If you don't specify anything, no activation is applied ( + see `keras.activations`). + use_bias: Boolean, whether the layer uses a bias vector. + kernel_initializer: Initializer for the `kernel` weights matrix ( + see `keras.initializers`). + bias_initializer: Initializer for the bias vector ( + see `keras.initializers`). + kernel_regularizer: Regularizer function applied to + the `kernel` weights matrix (see `keras.regularizers`). + bias_regularizer: Regularizer function applied to the bias vector ( + see `keras.regularizers`). + activity_regularizer: Regularizer function applied to + the output of the layer (its "activation") ( + see `keras.regularizers`). + kernel_constraint: Constraint function applied to the kernel matrix ( + see `keras.constraints`). + bias_constraint: Constraint function applied to the bias vector ( + see `keras.constraints`). + Input shape: + 3D tensor with shape: `(batch_size, steps, input_dim)` + Output shape: + 3D tensor with shape: `(batch_size, new_steps, filters)` + `steps` value might have changed due to padding or strides. + Returns: + A tensor of rank 3 representing + `activation(conv1d(inputs, kernel) + bias)`. + Raises: + ValueError: when both `strides` > 1 and `dilation_rate` > 1. + """ + + def __init__( + self, + filters, + kernel_size, + strides=1, + padding="valid", + data_format="channels_last", + dilation_rate=1, + groups=1, + activation=None, + use_bias=True, + kernel_initializer="glorot_uniform", + bias_initializer="zeros", + kernel_regularizer=None, + bias_regularizer=None, + activity_regularizer=None, + kernel_constraint=None, + bias_constraint=None, + **kwargs + ): + super().__init__( + rank=1, + filters=filters, + kernel_size=kernel_size, + strides=strides, + padding=padding, + data_format=data_format, + dilation_rate=dilation_rate, + groups=groups, + activation=activations.get(activation), + use_bias=use_bias, + kernel_initializer=initializers.get(kernel_initializer), + bias_initializer=initializers.get(bias_initializer), + kernel_regularizer=regularizers.get(kernel_regularizer), + bias_regularizer=regularizers.get(bias_regularizer), + activity_regularizer=regularizers.get(activity_regularizer), + kernel_constraint=constraints.get(kernel_constraint), + bias_constraint=constraints.get(bias_constraint), + **kwargs + ) diff --git a/TensorFlowTTS/tensorflow_tts/utils/korean.py b/TensorFlowTTS/tensorflow_tts/utils/korean.py new file mode 100644 index 0000000000000000000000000000000000000000..a9f2c28ceead6c7aa8ef966125244a06e60d0800 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/utils/korean.py @@ -0,0 +1,529 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team, Jaehyoung Kim(@crux153) and Taehoon Kim(@carpedm20) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Code based on https://github.com/carpedm20/multi-speaker-tacotron-tensorflow +"""Korean related helpers.""" + +import ast +import json +import os +import re + +from jamo import h2j, hangul_to_jamo, j2h, jamo_to_hcj + +etc_dictionary = { + "2 30대": "이삼십대", + "20~30대": "이삼십대", + "20, 30대": "이십대 삼십대", + "1+1": "원플러스원", + "3에서 6개월인": "3개월에서 육개월인", +} + +english_dictionary = { + "Devsisters": "데브시스터즈", + "track": "트랙", + # krbook + "LA": "엘에이", + "LG": "엘지", + "KOREA": "코리아", + "JSA": "제이에스에이", + "PGA": "피지에이", + "GA": "지에이", + "idol": "아이돌", + "KTX": "케이티엑스", + "AC": "에이씨", + "DVD": "디비디", + "US": "유에스", + "CNN": "씨엔엔", + "LPGA": "엘피지에이", + "P": "피", + "L": "엘", + "T": "티", + "B": "비", + "C": "씨", + "BIFF": "비아이에프에프", + "GV": "지비", + # JTBC + "IT": "아이티", + "IQ": "아이큐", + "JTBC": "제이티비씨", + "trickle down effect": "트리클 다운 이펙트", + "trickle up effect": "트리클 업 이펙트", + "down": "다운", + "up": "업", + "FCK": "에프씨케이", + "AP": "에이피", + "WHERETHEWILDTHINGSARE": "", + "Rashomon Effect": "", + "O": "오", + "OO": "오오", + "B": "비", + "GDP": "지디피", + "CIPA": "씨아이피에이", + "YS": "와이에스", + "Y": "와이", + "S": "에스", + "JTBC": "제이티비씨", + "PC": "피씨", + "bill": "빌", + "Halmuny": "하모니", ##### + "X": "엑스", + "SNS": "에스엔에스", + "ability": "어빌리티", + "shy": "", + "CCTV": "씨씨티비", + "IT": "아이티", + "the tenth man": "더 텐쓰 맨", #### + "L": "엘", + "PC": "피씨", + "YSDJJPMB": "", ######## + "Content Attitude Timing": "컨텐트 애티튜드 타이밍", + "CAT": "캣", + "IS": "아이에스", + "K": "케이", + "Y": "와이", + "KDI": "케이디아이", + "DOC": "디오씨", + "CIA": "씨아이에이", + "PBS": "피비에스", + "D": "디", + "PPropertyPositionPowerPrisonP" "S": "에스", + "francisco": "프란시스코", + "I": "아이", + "III": "아이아이", ###### + "No joke": "노 조크", + "BBK": "비비케이", + "LA": "엘에이", + "Don": "", + "t worry be happy": " 워리 비 해피", + "NO": "엔오", ##### + "it was our sky": "잇 워즈 아워 스카이", + "it is our sky": "잇 이즈 아워 스카이", #### + "NEIS": "엔이아이에스", ##### + "IMF": "아이엠에프", + "apology": "어폴로지", + "humble": "험블", + "M": "엠", + "Nowhere Man": "노웨어 맨", + "The Tenth Man": "더 텐쓰 맨", + "PBS": "피비에스", + "BBC": "비비씨", + "MRJ": "엠알제이", + "CCTV": "씨씨티비", + "Pick me up": "픽 미 업", + "DNA": "디엔에이", + "UN": "유엔", + "STOP": "스탑", ##### + "PRESS": "프레스", ##### + "not to be": "낫 투비", + "Denial": "디나이얼", + "G": "지", + "IMF": "아이엠에프", + "GDP": "지디피", + "JTBC": "제이티비씨", + "Time flies like an arrow": "타임 플라이즈 라이크 언 애로우", + "DDT": "디디티", + "AI": "에이아이", + "Z": "제트", + "OECD": "오이씨디", + "N": "앤", + "A": "에이", + "MB": "엠비", + "EH": "이에이치", + "IS": "아이에스", + "TV": "티비", + "MIT": "엠아이티", + "KBO": "케이비오", + "I love America": "아이 러브 아메리카", + "SF": "에스에프", + "Q": "큐", + "KFX": "케이에프엑스", + "PM": "피엠", + "Prime Minister": "프라임 미니스터", + "Swordline": "스워드라인", + "TBS": "티비에스", + "DDT": "디디티", + "CS": "씨에스", + "Reflecting Absence": "리플렉팅 앱센스", + "PBS": "피비에스", + "Drum being beaten by everyone": "드럼 빙 비튼 바이 에브리원", + "negative pressure": "네거티브 프레셔", + "F": "에프", + "KIA": "기아", + "FTA": "에프티에이", + "Que sais-je": "", + "UFC": "유에프씨", + "P": "피", + "DJ": "디제이", + "Chaebol": "채벌", + "BBC": "비비씨", + "OECD": "오이씨디", + "BC": "삐씨", + "C": "씨", + "B": "씨", + "KY": "케이와이", + "K": "케이", + "CEO": "씨이오", + "YH": "와이에치", + "IS": "아이에스", + "who are you": "후 얼 유", + "Y": "와이", + "The Devils Advocate": "더 데빌즈 어드보카트", + "YS": "와이에스", + "so sorry": "쏘 쏘리", + "Santa": "산타", + "Big Endian": "빅 엔디안", + "Small Endian": "스몰 엔디안", + "Oh Captain My Captain": "오 캡틴 마이 캡틴", + "AIB": "에이아이비", + "K": "케이", + "PBS": "피비에스", + # IU + "ASMR": "에이에스엠알", + "V": "브이", + "PD": "피디", + "CD": "씨디", + "ANR": "에이엔알", + "Twenty Three": "투엔티 쓰리", + "Through The Night": "쓰루 더 나잇", + "MD": "엠디", +} + +num_to_kor = { + "0": "영", + "1": "일", + "2": "이", + "3": "삼", + "4": "사", + "5": "오", + "6": "육", + "7": "칠", + "8": "팔", + "9": "구", +} + +unit_to_kor1 = {"%": "퍼센트", "cm": "센치미터", "mm": "밀리미터", "km": "킬로미터", "kg": "킬로그람"} +unit_to_kor2 = {"m": "미터"} + +upper_to_kor = { + "A": "에이", + "B": "비", + "C": "씨", + "D": "디", + "E": "이", + "F": "에프", + "G": "지", + "H": "에이치", + "I": "아이", + "J": "제이", + "K": "케이", + "L": "엘", + "M": "엠", + "N": "엔", + "O": "오", + "P": "피", + "Q": "큐", + "R": "알", + "S": "에스", + "T": "티", + "U": "유", + "V": "브이", + "W": "더블유", + "X": "엑스", + "Y": "와이", + "Z": "지", +} + + +""" +초성과 종성은 같아보이지만, 다른 character이다. + +'_-!'(),-.:;? ᄀᄁᄂᄃᄄᄅᄆᄇᄈᄉᄊᄋᄌᄍᄎᄏᄐᄑ하ᅢᅣᅤᅥᅦᅧᅨᅩᅪᅫᅬᅭᅮᅯᅰᅱᅲᅳᅴᅵᆨᆩᆪᆫᆬᆭᆮᆯᆰᆱᆲᆳᆴᆵᆶᆷᆸᆹᆺᆻᆼᆽᆾᆿᇀᇁᇂ~' + +'_': 0, '-': 7, '!': 2, "'": 3, '(': 4, ')': 5, ',': 6, '.': 8, ':': 9, ';': 10, +'?': 11, ' ': 12, 'ᄀ': 13, 'ᄁ': 14, 'ᄂ': 15, 'ᄃ': 16, 'ᄄ': 17, 'ᄅ': 18, 'ᄆ': 19, 'ᄇ': 20, +'ᄈ': 21, 'ᄉ': 22, 'ᄊ': 23, 'ᄋ': 24, 'ᄌ': 25, 'ᄍ': 26, 'ᄎ': 27, 'ᄏ': 28, 'ᄐ': 29, 'ᄑ': 30, +'ᄒ': 31, 'ᅡ': 32, 'ᅢ': 33, 'ᅣ': 34, 'ᅤ': 35, 'ᅥ': 36, 'ᅦ': 37, 'ᅧ': 38, 'ᅨ': 39, 'ᅩ': 40, +'ᅪ': 41, 'ᅫ': 42, 'ᅬ': 43, 'ᅭ': 44, 'ᅮ': 45, 'ᅯ': 46, 'ᅰ': 47, 'ᅱ': 48, 'ᅲ': 49, 'ᅳ': 50, +'ᅴ': 51, 'ᅵ': 52, 'ᆨ': 53, 'ᆩ': 54, 'ᆪ': 55, 'ᆫ': 56, 'ᆬ': 57, 'ᆭ': 58, 'ᆮ': 59, 'ᆯ': 60, +'ᆰ': 61, 'ᆱ': 62, 'ᆲ': 63, 'ᆳ': 64, 'ᆴ': 65, 'ᆵ': 66, 'ᆶ': 67, 'ᆷ': 68, 'ᆸ': 69, 'ᆹ': 70, +'ᆺ': 71, 'ᆻ': 72, 'ᆼ': 73, 'ᆽ': 74, 'ᆾ': 75, 'ᆿ': 76, 'ᇀ': 77, 'ᇁ': 78, 'ᇂ': 79, '~': 80 +""" + +_pad = "pad" +_eos = "eos" +_punctuation = "!'(),-.:;? " +_special = "-" + +_jamo_leads = [chr(_) for _ in range(0x1100, 0x1113)] +_jamo_vowels = [chr(_) for _ in range(0x1161, 0x1176)] +_jamo_tails = [chr(_) for _ in range(0x11A8, 0x11C3)] + +_letters = _jamo_leads + _jamo_vowels + _jamo_tails + +symbols = [_pad] + list(_special) + list(_punctuation) + _letters + [_eos] + +_symbol_to_id = {c: i for i, c in enumerate(symbols)} +_id_to_symbol = {i: c for i, c in enumerate(symbols)} + +quote_checker = """([`"'"“‘])(.+?)([`"'"”’])""" + + +def is_lead(char): + return char in _jamo_leads + + +def is_vowel(char): + return char in _jamo_vowels + + +def is_tail(char): + return char in _jamo_tails + + +def get_mode(char): + if is_lead(char): + return 0 + elif is_vowel(char): + return 1 + elif is_tail(char): + return 2 + else: + return -1 + + +def _get_text_from_candidates(candidates): + if len(candidates) == 0: + return "" + elif len(candidates) == 1: + return jamo_to_hcj(candidates[0]) + else: + return j2h(**dict(zip(["lead", "vowel", "tail"], candidates))) + + +def jamo_to_korean(text): + text = h2j(text) + + idx = 0 + new_text = "" + candidates = [] + + while True: + if idx >= len(text): + new_text += _get_text_from_candidates(candidates) + break + + char = text[idx] + mode = get_mode(char) + + if mode == 0: + new_text += _get_text_from_candidates(candidates) + candidates = [char] + elif mode == -1: + new_text += _get_text_from_candidates(candidates) + new_text += char + candidates = [] + else: + candidates.append(char) + + idx += 1 + return new_text + + +def compare_sentence_with_jamo(text1, text2): + return h2j(text1) != h2j(text2) + + +def tokenize(text, as_id=False): + # jamo package에 있는 hangul_to_jamo를 이용하여 한글 string을 초성/중성/종성으로 나눈다. + text = normalize(text) + tokens = list( + hangul_to_jamo(text) + ) # '존경하는' --> ['ᄌ', 'ᅩ', 'ᆫ', 'ᄀ', 'ᅧ', 'ᆼ', 'ᄒ', 'ᅡ', 'ᄂ', 'ᅳ', 'ᆫ', '~'] + + if as_id: + return [_symbol_to_id[token] for token in tokens] + else: + return [token for token in tokens] + + +def tokenizer_fn(iterator): + return (token for x in iterator for token in tokenize(x, as_id=False)) + + +def normalize(text): + text = text.strip() + + text = re.sub("\(\d+일\)", "", text) + text = re.sub("\([⺀-⺙⺛-⻳⼀-⿕々〇〡-〩〸-〺〻㐀-䶵一-鿃豈-鶴侮-頻並-龎]+\)", "", text) + + text = normalize_with_dictionary(text, etc_dictionary) + text = normalize_english(text) + text = re.sub("[a-zA-Z]+", normalize_upper, text) + + text = normalize_quote(text) + text = normalize_number(text) + + return text + + +def normalize_with_dictionary(text, dic): + if any(key in text for key in dic.keys()): + pattern = re.compile("|".join(re.escape(key) for key in dic.keys())) + return pattern.sub(lambda x: dic[x.group()], text) + else: + return text + + +def normalize_english(text): + def fn(m): + word = m.group() + if word in english_dictionary: + return english_dictionary.get(word) + else: + return word + + text = re.sub("([A-Za-z]+)", fn, text) + return text + + +def normalize_upper(text): + text = text.group(0) + + if all([char.isupper() for char in text]): + return "".join(upper_to_kor[char] for char in text) + else: + return text + + +def normalize_quote(text): + def fn(found_text): + from nltk import sent_tokenize # NLTK doesn't along with multiprocessing + + found_text = found_text.group() + unquoted_text = found_text[1:-1] + + sentences = sent_tokenize(unquoted_text) + return " ".join(["'{}'".format(sent) for sent in sentences]) + + return re.sub(quote_checker, fn, text) + + +number_checker = "([+-]?\d[\d,]*)[\.]?\d*" +count_checker = "(시|명|가지|살|마리|포기|송이|수|톨|통|점|개|벌|척|채|다발|그루|자루|줄|켤레|그릇|잔|마디|상자|사람|곡|병|판)" + + +def normalize_number(text): + text = normalize_with_dictionary(text, unit_to_kor1) + text = normalize_with_dictionary(text, unit_to_kor2) + text = re.sub( + number_checker + count_checker, lambda x: number_to_korean(x, True), text + ) + text = re.sub(number_checker, lambda x: number_to_korean(x, False), text) + return text + + +num_to_kor1 = [""] + list("일이삼사오육칠팔구") +num_to_kor2 = [""] + list("만억조경해") +num_to_kor3 = [""] + list("십백천") + +# count_to_kor1 = [""] + ["하나","둘","셋","넷","다섯","여섯","일곱","여덟","아홉"] +count_to_kor1 = [""] + ["한", "두", "세", "네", "다섯", "여섯", "일곱", "여덟", "아홉"] + +count_tenth_dict = { + "십": "열", + "두십": "스물", + "세십": "서른", + "네십": "마흔", + "다섯십": "쉰", + "여섯십": "예순", + "일곱십": "일흔", + "여덟십": "여든", + "아홉십": "아흔", +} + + +def number_to_korean(num_str, is_count=False): + if is_count: + num_str, unit_str = num_str.group(1), num_str.group(2) + else: + num_str, unit_str = num_str.group(), "" + + num_str = num_str.replace(",", "") + num = ast.literal_eval(num_str) + + if num == 0: + return "영" + + check_float = num_str.split(".") + if len(check_float) == 2: + digit_str, float_str = check_float + elif len(check_float) >= 3: + raise Exception(" [!] Wrong number format") + else: + digit_str, float_str = check_float[0], None + + if is_count and float_str is not None: + raise Exception(" [!] `is_count` and float number does not fit each other") + + digit = int(digit_str) + + if digit_str.startswith("-"): + digit, digit_str = abs(digit), str(abs(digit)) + + kor = "" + size = len(str(digit)) + tmp = [] + + for i, v in enumerate(digit_str, start=1): + v = int(v) + + if v != 0: + if is_count: + tmp += count_to_kor1[v] + else: + tmp += num_to_kor1[v] + + tmp += num_to_kor3[(size - i) % 4] + + if (size - i) % 4 == 0 and len(tmp) != 0: + kor += "".join(tmp) + tmp = [] + kor += num_to_kor2[int((size - i) / 4)] + + if is_count: + if kor.startswith("한") and len(kor) > 1: + kor = kor[1:] + + if any(word in kor for word in count_tenth_dict): + kor = re.sub( + "|".join(count_tenth_dict.keys()), + lambda x: count_tenth_dict[x.group()], + kor, + ) + + if not is_count and kor.startswith("일") and len(kor) > 1: + kor = kor[1:] + + if float_str is not None: + kor += "쩜 " + kor += re.sub("\d", lambda x: num_to_kor[x.group()], float_str) + + if num_str.startswith("+"): + kor = "플러스 " + kor + elif num_str.startswith("-"): + kor = "마이너스 " + kor + + return kor + unit_str diff --git a/TensorFlowTTS/tensorflow_tts/utils/number_norm.py b/TensorFlowTTS/tensorflow_tts/utils/number_norm.py new file mode 100644 index 0000000000000000000000000000000000000000..b36b8b0085a573ab24f4f73a963ff0a82f80c64f --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/utils/number_norm.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2017 Keith Ito +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +"""Number norm module.""" + + +import re + +import inflect + +_inflect = inflect.engine() +_comma_number_re = re.compile(r"([0-9][0-9\,]+[0-9])") +_decimal_number_re = re.compile(r"([0-9]+\.[0-9]+)") +_pounds_re = re.compile(r"£([0-9\,]*[0-9]+)") +_dollars_re = re.compile(r"\$([0-9\.\,]*[0-9]+)") +_ordinal_re = re.compile(r"[0-9]+(st|nd|rd|th)") +_number_re = re.compile(r"[0-9]+") + + +def _remove_commas(m): + return m.group(1).replace(",", "") + + +def _expand_decimal_point(m): + return m.group(1).replace(".", " point ") + + +def _expand_dollars(m): + match = m.group(1) + parts = match.split(".") + if len(parts) > 2: + return match + " dollars" # Unexpected format + dollars = int(parts[0]) if parts[0] else 0 + cents = int(parts[1]) if len(parts) > 1 and parts[1] else 0 + if dollars and cents: + dollar_unit = "dollar" if dollars == 1 else "dollars" + cent_unit = "cent" if cents == 1 else "cents" + return "%s %s, %s %s" % (dollars, dollar_unit, cents, cent_unit) + elif dollars: + dollar_unit = "dollar" if dollars == 1 else "dollars" + return "%s %s" % (dollars, dollar_unit) + elif cents: + cent_unit = "cent" if cents == 1 else "cents" + return "%s %s" % (cents, cent_unit) + else: + return "zero dollars" + + +def _expand_ordinal(m): + return _inflect.number_to_words(m.group(0)) + + +def _expand_number(m): + num = int(m.group(0)) + if num > 1000 and num < 3000: + if num == 2000: + return "two thousand" + elif num > 2000 and num < 2010: + return "two thousand " + _inflect.number_to_words(num % 100) + elif num % 100 == 0: + return _inflect.number_to_words(num // 100) + " hundred" + else: + return _inflect.number_to_words( + num, andword="", zero="oh", group=2 + ).replace(", ", " ") + else: + return _inflect.number_to_words(num, andword="") + + +def normalize_numbers(text): + text = re.sub(_comma_number_re, _remove_commas, text) + text = re.sub(_pounds_re, r"\1 pounds", text) + text = re.sub(_dollars_re, _expand_dollars, text) + text = re.sub(_decimal_number_re, _expand_decimal_point, text) + text = re.sub(_ordinal_re, _expand_ordinal, text) + text = re.sub(_number_re, _expand_number, text) + return text diff --git a/TensorFlowTTS/tensorflow_tts/utils/outliers.py b/TensorFlowTTS/tensorflow_tts/utils/outliers.py new file mode 100644 index 0000000000000000000000000000000000000000..d12088e4cf641b88861129778e1d404efe2356b2 --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/utils/outliers.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Outliers detection and remove.""" +import numpy as np + + +def is_outlier(x, p25, p75): + """Check if value is an outlier.""" + lower = p25 - 1.5 * (p75 - p25) + upper = p75 + 1.5 * (p75 - p25) + return x <= lower or x >= upper + + +def remove_outlier(x, p_bottom: int = 25, p_top: int = 75): + """Remove outlier from x.""" + p_bottom = np.percentile(x, p_bottom) + p_top = np.percentile(x, p_top) + + indices_of_outliers = [] + for ind, value in enumerate(x): + if is_outlier(value, p_bottom, p_top): + indices_of_outliers.append(ind) + + x[indices_of_outliers] = 0.0 + + # replace by mean f0. + x[indices_of_outliers] = np.max(x) + return x diff --git a/TensorFlowTTS/tensorflow_tts/utils/strategy.py b/TensorFlowTTS/tensorflow_tts/utils/strategy.py new file mode 100644 index 0000000000000000000000000000000000000000..30918c7ad7d0a998679f5bd993b399e828f3d96a --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/utils/strategy.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Strategy util functions""" +import tensorflow as tf + + +def return_strategy(): + physical_devices = tf.config.list_physical_devices("GPU") + if len(physical_devices) == 0: + return tf.distribute.OneDeviceStrategy(device="/cpu:0") + elif len(physical_devices) == 1: + return tf.distribute.OneDeviceStrategy(device="/gpu:0") + else: + return tf.distribute.MirroredStrategy() + + +def calculate_3d_loss(y_gt, y_pred, loss_fn): + """Calculate 3d loss, normally it's mel-spectrogram loss.""" + y_gt_T = tf.shape(y_gt)[1] + y_pred_T = tf.shape(y_pred)[1] + + # there is a mismath length when training multiple GPU. + # we need slice the longer tensor to make sure the loss + # calculated correctly. + if y_gt_T > y_pred_T: + y_gt = tf.slice(y_gt, [0, 0, 0], [-1, y_pred_T, -1]) + elif y_pred_T > y_gt_T: + y_pred = tf.slice(y_pred, [0, 0, 0], [-1, y_gt_T, -1]) + + loss = loss_fn(y_gt, y_pred) + if isinstance(loss, tuple) is False: + loss = tf.reduce_mean(loss, list(range(1, len(loss.shape)))) # shape = [B] + else: + loss = list(loss) + for i in range(len(loss)): + loss[i] = tf.reduce_mean( + loss[i], list(range(1, len(loss[i].shape))) + ) # shape = [B] + return loss + + +def calculate_2d_loss(y_gt, y_pred, loss_fn): + """Calculate 2d loss, normally it's durrations/f0s/energys loss.""" + y_gt_T = tf.shape(y_gt)[1] + y_pred_T = tf.shape(y_pred)[1] + + # there is a mismath length when training multiple GPU. + # we need slice the longer tensor to make sure the loss + # calculated correctly. + if y_gt_T > y_pred_T: + y_gt = tf.slice(y_gt, [0, 0], [-1, y_pred_T]) + elif y_pred_T > y_gt_T: + y_pred = tf.slice(y_pred, [0, 0], [-1, y_gt_T]) + + loss = loss_fn(y_gt, y_pred) + if isinstance(loss, tuple) is False: + loss = tf.reduce_mean(loss, list(range(1, len(loss.shape)))) # shape = [B] + else: + loss = list(loss) + for i in range(len(loss)): + loss[i] = tf.reduce_mean( + loss[i], list(range(1, len(loss[i].shape))) + ) # shape = [B] + + return loss diff --git a/TensorFlowTTS/tensorflow_tts/utils/utils.py b/TensorFlowTTS/tensorflow_tts/utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..43aa637697dc24e616b9ec403898f122fb5b592e --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/utils/utils.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- + +# Copyright 2019 Tomoki Hayashi +# MIT License (https://opensource.org/licenses/MIT) +"""Utility functions.""" + +import fnmatch +import os +import re +import tempfile +from pathlib import Path + +import tensorflow as tf + +MODEL_FILE_NAME = "model.h5" +CONFIG_FILE_NAME = "config.yml" +PROCESSOR_FILE_NAME = "processor.json" +LIBRARY_NAME = "tensorflow_tts" +CACHE_DIRECTORY = os.path.join(Path.home(), ".cache", LIBRARY_NAME) + + +def find_files(root_dir, query="*.wav", include_root_dir=True): + """Find files recursively. + Args: + root_dir (str): Root root_dir to find. + query (str): Query to find. + include_root_dir (bool): If False, root_dir name is not included. + Returns: + list: List of found filenames. + """ + files = [] + for root, _, filenames in os.walk(root_dir, followlinks=True): + for filename in fnmatch.filter(filenames, query): + files.append(os.path.join(root, filename)) + if not include_root_dir: + files = [file_.replace(root_dir + "/", "") for file_ in files] + + return files + + +def _path_requires_gfile(filepath): + """Checks if the given path requires use of GFile API. + + Args: + filepath (str): Path to check. + Returns: + bool: True if the given path needs GFile API to access, such as + "s3://some/path" and "gs://some/path". + """ + # If the filepath contains a protocol (e.g. "gs://"), it should be handled + # using TensorFlow GFile API. + return bool(re.match(r"^[a-z]+://", filepath)) + + +def save_weights(model, filepath): + """Save model weights. + + Same as model.save_weights(filepath), but supports saving to S3 or GCS + buckets using TensorFlow GFile API. + + Args: + model (tf.keras.Model): Model to save. + filepath (str): Path to save the model weights to. + """ + if not _path_requires_gfile(filepath): + model.save_weights(filepath) + return + + # Save to a local temp file and copy to the desired path using GFile API. + _, ext = os.path.splitext(filepath) + with tempfile.NamedTemporaryFile(suffix=ext) as temp_file: + model.save_weights(temp_file.name) + # To preserve the original semantics, we need to overwrite the target + # file. + tf.io.gfile.copy(temp_file.name, filepath, overwrite=True) + + +def load_weights(model, filepath): + """Load model weights. + + Same as model.load_weights(filepath), but supports loading from S3 or GCS + buckets using TensorFlow GFile API. + + Args: + model (tf.keras.Model): Model to load weights to. + filepath (str): Path to the weights file. + """ + if not _path_requires_gfile(filepath): + model.load_weights(filepath) + return + + # Make a local copy and load it. + _, ext = os.path.splitext(filepath) + with tempfile.NamedTemporaryFile(suffix=ext) as temp_file: + # The target temp_file should be created above, so we need to overwrite. + tf.io.gfile.copy(filepath, temp_file.name, overwrite=True) + model.load_weights(temp_file.name) diff --git a/TensorFlowTTS/tensorflow_tts/utils/weight_norm.py b/TensorFlowTTS/tensorflow_tts/utils/weight_norm.py new file mode 100644 index 0000000000000000000000000000000000000000..bdde9d70d6736f693630811ce88e45dd737b18ea --- /dev/null +++ b/TensorFlowTTS/tensorflow_tts/utils/weight_norm.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 The TensorFlow Probability Authors and Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Weight Norm Modules.""" + +import warnings + +import tensorflow as tf + + +class WeightNormalization(tf.keras.layers.Wrapper): + """Layer wrapper to decouple magnitude and direction of the layer's weights. + This wrapper reparameterizes a layer by decoupling the weight's + magnitude and direction. This speeds up convergence by improving the + conditioning of the optimization problem. It has an optional data-dependent + initialization scheme, in which initial values of weights are set as functions + of the first minibatch of data. Both the weight normalization and data- + dependent initialization are described in [Salimans and Kingma (2016)][1]. + #### Example + ```python + net = WeightNorm(tf.keras.layers.Conv2D(2, 2, activation='relu'), + input_shape=(32, 32, 3), data_init=True)(x) + net = WeightNorm(tf.keras.layers.Conv2DTranspose(16, 5, activation='relu'), + data_init=True) + net = WeightNorm(tf.keras.layers.Dense(120, activation='relu'), + data_init=True)(net) + net = WeightNorm(tf.keras.layers.Dense(num_classes), + data_init=True)(net) + ``` + #### References + [1]: Tim Salimans and Diederik P. Kingma. Weight Normalization: A Simple + Reparameterization to Accelerate Training of Deep Neural Networks. In + _30th Conference on Neural Information Processing Systems_, 2016. + https://arxiv.org/abs/1602.07868 + """ + + def __init__(self, layer, data_init=True, **kwargs): + """Initialize WeightNorm wrapper. + Args: + layer: A `tf.keras.layers.Layer` instance. Supported layer types are + `Dense`, `Conv2D`, and `Conv2DTranspose`. Layers with multiple inputs + are not supported. + data_init: `bool`, if `True` use data dependent variable initialization. + **kwargs: Additional keyword args passed to `tf.keras.layers.Wrapper`. + Raises: + ValueError: If `layer` is not a `tf.keras.layers.Layer` instance. + """ + if not isinstance(layer, tf.keras.layers.Layer): + raise ValueError( + "Please initialize `WeightNorm` layer with a `tf.keras.layers.Layer` " + "instance. You passed: {input}".format(input=layer) + ) + + layer_type = type(layer).__name__ + if layer_type not in [ + "Dense", + "Conv2D", + "Conv2DTranspose", + "Conv1D", + "GroupConv1D", + ]: + warnings.warn( + "`WeightNorm` is tested only for `Dense`, `Conv2D`, `Conv1D`, `GroupConv1D`, " + "`GroupConv2D`, and `Conv2DTranspose` layers. You passed a layer of type `{}`".format( + layer_type + ) + ) + + super().__init__(layer, **kwargs) + + self.data_init = data_init + self._track_trackable(layer, name="layer") + self.filter_axis = -2 if layer_type == "Conv2DTranspose" else -1 + + def _compute_weights(self): + """Generate weights with normalization.""" + # Determine the axis along which to expand `g` so that `g` broadcasts to + # the shape of `v`. + new_axis = -self.filter_axis - 3 + + self.layer.kernel = tf.nn.l2_normalize( + self.v, axis=self.kernel_norm_axes + ) * tf.expand_dims(self.g, new_axis) + + def _init_norm(self): + """Set the norm of the weight vector.""" + kernel_norm = tf.sqrt( + tf.reduce_sum(tf.square(self.v), axis=self.kernel_norm_axes) + ) + self.g.assign(kernel_norm) + + def _data_dep_init(self, inputs): + """Data dependent initialization.""" + # Normalize kernel first so that calling the layer calculates + # `tf.dot(v, x)/tf.norm(v)` as in (5) in ([Salimans and Kingma, 2016][1]). + self._compute_weights() + + activation = self.layer.activation + self.layer.activation = None + + use_bias = self.layer.bias is not None + if use_bias: + bias = self.layer.bias + self.layer.bias = tf.zeros_like(bias) + + # Since the bias is initialized as zero, setting the activation to zero and + # calling the initialized layer (with normalized kernel) yields the correct + # computation ((5) in Salimans and Kingma (2016)) + x_init = self.layer(inputs) + norm_axes_out = list(range(x_init.shape.rank - 1)) + m_init, v_init = tf.nn.moments(x_init, norm_axes_out) + scale_init = 1.0 / tf.sqrt(v_init + 1e-10) + + self.g.assign(self.g * scale_init) + if use_bias: + self.layer.bias = bias + self.layer.bias.assign(-m_init * scale_init) + self.layer.activation = activation + + def build(self, input_shape=None): + """Build `Layer`. + Args: + input_shape: The shape of the input to `self.layer`. + Raises: + ValueError: If `Layer` does not contain a `kernel` of weights + """ + if not self.layer.built: + self.layer.build(input_shape) + + if not hasattr(self.layer, "kernel"): + raise ValueError( + "`WeightNorm` must wrap a layer that" + " contains a `kernel` for weights" + ) + + self.kernel_norm_axes = list(range(self.layer.kernel.shape.ndims)) + self.kernel_norm_axes.pop(self.filter_axis) + + self.v = self.layer.kernel + + # to avoid a duplicate `kernel` variable after `build` is called + self.layer.kernel = None + self.g = self.add_weight( + name="g", + shape=(int(self.v.shape[self.filter_axis]),), + initializer="ones", + dtype=self.v.dtype, + trainable=True, + ) + self.initialized = self.add_weight( + name="initialized", dtype=tf.bool, trainable=False + ) + self.initialized.assign(False) + + super().build() + + def call(self, inputs): + """Call `Layer`.""" + if not self.initialized: + if self.data_init: + self._data_dep_init(inputs) + else: + # initialize `g` as the norm of the initialized kernel + self._init_norm() + + self.initialized.assign(True) + + self._compute_weights() + output = self.layer(inputs) + return output + + def compute_output_shape(self, input_shape): + return tf.TensorShape(self.layer.compute_output_shape(input_shape).as_list()) diff --git a/TensorFlowTTS/test/files/baker_mapper.json b/TensorFlowTTS/test/files/baker_mapper.json new file mode 100644 index 0000000000000000000000000000000000000000..82aaf7649ad843f376b544e8f2a160f298e9f83a --- /dev/null +++ b/TensorFlowTTS/test/files/baker_mapper.json @@ -0,0 +1 @@ +{"symbol_to_id": {"pad": 0, "sil": 1, "#0": 2, "#1": 3, "#2": 4, "#3": 5, "^": 6, "b": 7, "c": 8, "ch": 9, "d": 10, "f": 11, "g": 12, "h": 13, "j": 14, "k": 15, "l": 16, "m": 17, "n": 18, "p": 19, "q": 20, "r": 21, "s": 22, "sh": 23, "t": 24, "x": 25, "z": 26, "zh": 27, "a1": 28, "a2": 29, "a3": 30, "a4": 31, "a5": 32, "ai1": 33, "ai2": 34, "ai3": 35, "ai4": 36, "ai5": 37, "an1": 38, "an2": 39, "an3": 40, "an4": 41, "an5": 42, "ang1": 43, "ang2": 44, "ang3": 45, "ang4": 46, "ang5": 47, "ao1": 48, "ao2": 49, "ao3": 50, "ao4": 51, "ao5": 52, "e1": 53, "e2": 54, "e3": 55, "e4": 56, "e5": 57, "ei1": 58, "ei2": 59, "ei3": 60, "ei4": 61, "ei5": 62, "en1": 63, "en2": 64, "en3": 65, "en4": 66, "en5": 67, "eng1": 68, "eng2": 69, "eng3": 70, "eng4": 71, "eng5": 72, "er1": 73, "er2": 74, "er3": 75, "er4": 76, "er5": 77, "i1": 78, "i2": 79, "i3": 80, "i4": 81, "i5": 82, "ia1": 83, "ia2": 84, "ia3": 85, "ia4": 86, "ia5": 87, "ian1": 88, "ian2": 89, "ian3": 90, "ian4": 91, "ian5": 92, "iang1": 93, "iang2": 94, "iang3": 95, "iang4": 96, "iang5": 97, "iao1": 98, "iao2": 99, "iao3": 100, "iao4": 101, "iao5": 102, "ie1": 103, "ie2": 104, "ie3": 105, "ie4": 106, "ie5": 107, "ii1": 108, "ii2": 109, "ii3": 110, "ii4": 111, "ii5": 112, "iii1": 113, "iii2": 114, "iii3": 115, "iii4": 116, "iii5": 117, "in1": 118, "in2": 119, "in3": 120, "in4": 121, "in5": 122, "ing1": 123, "ing2": 124, "ing3": 125, "ing4": 126, "ing5": 127, "iong1": 128, "iong2": 129, "iong3": 130, "iong4": 131, "iong5": 132, "iou1": 133, "iou2": 134, "iou3": 135, "iou4": 136, "iou5": 137, "o1": 138, "o2": 139, "o3": 140, "o4": 141, "o5": 142, "ong1": 143, "ong2": 144, "ong3": 145, "ong4": 146, "ong5": 147, "ou1": 148, "ou2": 149, "ou3": 150, "ou4": 151, "ou5": 152, "u1": 153, "u2": 154, "u3": 155, "u4": 156, "u5": 157, "ua1": 158, "ua2": 159, "ua3": 160, "ua4": 161, "ua5": 162, "uai1": 163, "uai2": 164, "uai3": 165, "uai4": 166, "uai5": 167, "uan1": 168, "uan2": 169, "uan3": 170, "uan4": 171, "uan5": 172, "uang1": 173, "uang2": 174, "uang3": 175, "uang4": 176, "uang5": 177, "uei1": 178, "uei2": 179, "uei3": 180, "uei4": 181, "uei5": 182, "uen1": 183, "uen2": 184, "uen3": 185, "uen4": 186, "uen5": 187, "ueng1": 188, "ueng2": 189, "ueng3": 190, "ueng4": 191, "ueng5": 192, "uo1": 193, "uo2": 194, "uo3": 195, "uo4": 196, "uo5": 197, "v1": 198, "v2": 199, "v3": 200, "v4": 201, "v5": 202, "van1": 203, "van2": 204, "van3": 205, "van4": 206, "van5": 207, "ve1": 208, "ve2": 209, "ve3": 210, "ve4": 211, "ve5": 212, "vn1": 213, "vn2": 214, "vn3": 215, "vn4": 216, "vn5": 217, "eos": 218}, "id_to_symbol": {"0": "pad", "1": "sil", "2": "#0", "3": "#1", "4": "#2", "5": "#3", "6": "^", "7": "b", "8": "c", "9": "ch", "10": "d", "11": "f", "12": "g", "13": "h", "14": "j", "15": "k", "16": "l", "17": "m", "18": "n", "19": "p", "20": "q", "21": "r", "22": "s", "23": "sh", "24": "t", "25": "x", "26": "z", "27": "zh", "28": "a1", "29": "a2", "30": "a3", "31": "a4", "32": "a5", "33": "ai1", "34": "ai2", "35": "ai3", "36": "ai4", "37": "ai5", "38": "an1", "39": "an2", "40": "an3", "41": "an4", "42": "an5", "43": "ang1", "44": "ang2", "45": "ang3", "46": "ang4", "47": "ang5", "48": "ao1", "49": "ao2", "50": "ao3", "51": "ao4", "52": "ao5", "53": "e1", "54": "e2", "55": "e3", "56": "e4", "57": "e5", "58": "ei1", "59": "ei2", "60": "ei3", "61": "ei4", "62": "ei5", "63": "en1", "64": "en2", "65": "en3", "66": "en4", "67": "en5", "68": "eng1", "69": "eng2", "70": "eng3", "71": "eng4", "72": "eng5", "73": "er1", "74": "er2", "75": "er3", "76": "er4", "77": "er5", "78": "i1", "79": "i2", "80": "i3", "81": "i4", "82": "i5", "83": "ia1", "84": "ia2", "85": "ia3", "86": "ia4", "87": "ia5", "88": "ian1", "89": "ian2", "90": "ian3", "91": "ian4", "92": "ian5", "93": "iang1", "94": "iang2", "95": "iang3", "96": "iang4", "97": "iang5", "98": "iao1", "99": "iao2", "100": "iao3", "101": "iao4", "102": "iao5", "103": "ie1", "104": "ie2", "105": "ie3", "106": "ie4", "107": "ie5", "108": "ii1", "109": "ii2", "110": "ii3", "111": "ii4", "112": "ii5", "113": "iii1", "114": "iii2", "115": "iii3", "116": "iii4", "117": "iii5", "118": "in1", "119": "in2", "120": "in3", "121": "in4", "122": "in5", "123": "ing1", "124": "ing2", "125": "ing3", "126": "ing4", "127": "ing5", "128": "iong1", "129": "iong2", "130": "iong3", "131": "iong4", "132": "iong5", "133": "iou1", "134": "iou2", "135": "iou3", "136": "iou4", "137": "iou5", "138": "o1", "139": "o2", "140": "o3", "141": "o4", "142": "o5", "143": "ong1", "144": "ong2", "145": "ong3", "146": "ong4", "147": "ong5", "148": "ou1", "149": "ou2", "150": "ou3", "151": "ou4", "152": "ou5", "153": "u1", "154": "u2", "155": "u3", "156": "u4", "157": "u5", "158": "ua1", "159": "ua2", "160": "ua3", "161": "ua4", "162": "ua5", "163": "uai1", "164": "uai2", "165": "uai3", "166": "uai4", "167": "uai5", "168": "uan1", "169": "uan2", "170": "uan3", "171": "uan4", "172": "uan5", "173": "uang1", "174": "uang2", "175": "uang3", "176": "uang4", "177": "uang5", "178": "uei1", "179": "uei2", "180": "uei3", "181": "uei4", "182": "uei5", "183": "uen1", "184": "uen2", "185": "uen3", "186": "uen4", "187": "uen5", "188": "ueng1", "189": "ueng2", "190": "ueng3", "191": "ueng4", "192": "ueng5", "193": "uo1", "194": "uo2", "195": "uo3", "196": "uo4", "197": "uo5", "198": "v1", "199": "v2", "200": "v3", "201": "v4", "202": "v5", "203": "van1", "204": "van2", "205": "van3", "206": "van4", "207": "van5", "208": "ve1", "209": "ve2", "210": "ve3", "211": "ve4", "212": "ve5", "213": "vn1", "214": "vn2", "215": "vn3", "216": "vn4", "217": "vn5", "218": "eos"}, "speakers_map": {"baker": 0}, "processor_name": "BakerProcessor", "pinyin_dict": {"a": ["^", "a"], "ai": ["^", "ai"], "an": ["^", "an"], "ang": ["^", "ang"], "ao": ["^", "ao"], "ba": ["b", "a"], "bai": ["b", "ai"], "ban": ["b", "an"], "bang": ["b", "ang"], "bao": ["b", "ao"], "be": ["b", "e"], "bei": ["b", "ei"], "ben": ["b", "en"], "beng": ["b", "eng"], "bi": ["b", "i"], "bian": ["b", "ian"], "biao": ["b", "iao"], "bie": ["b", "ie"], "bin": ["b", "in"], "bing": ["b", "ing"], "bo": ["b", "o"], "bu": ["b", "u"], "ca": ["c", "a"], "cai": ["c", "ai"], "can": ["c", "an"], "cang": ["c", "ang"], "cao": ["c", "ao"], "ce": ["c", "e"], "cen": ["c", "en"], "ceng": ["c", "eng"], "cha": ["ch", "a"], "chai": ["ch", "ai"], "chan": ["ch", "an"], "chang": ["ch", "ang"], "chao": ["ch", "ao"], "che": ["ch", "e"], "chen": ["ch", "en"], "cheng": ["ch", "eng"], "chi": ["ch", "iii"], "chong": ["ch", "ong"], "chou": ["ch", "ou"], "chu": ["ch", "u"], "chua": ["ch", "ua"], "chuai": ["ch", "uai"], "chuan": ["ch", "uan"], "chuang": ["ch", "uang"], "chui": ["ch", "uei"], "chun": ["ch", "uen"], "chuo": ["ch", "uo"], "ci": ["c", "ii"], "cong": ["c", "ong"], "cou": ["c", "ou"], "cu": ["c", "u"], "cuan": ["c", "uan"], "cui": ["c", "uei"], "cun": ["c", "uen"], "cuo": ["c", "uo"], "da": ["d", "a"], "dai": ["d", "ai"], "dan": ["d", "an"], "dang": ["d", "ang"], "dao": ["d", "ao"], "de": ["d", "e"], "dei": ["d", "ei"], "den": ["d", "en"], "deng": ["d", "eng"], "di": ["d", "i"], "dia": ["d", "ia"], "dian": ["d", "ian"], "diao": ["d", "iao"], "die": ["d", "ie"], "ding": ["d", "ing"], "diu": ["d", "iou"], "dong": ["d", "ong"], "dou": ["d", "ou"], "du": ["d", "u"], "duan": ["d", "uan"], "dui": ["d", "uei"], "dun": ["d", "uen"], "duo": ["d", "uo"], "e": ["^", "e"], "ei": ["^", "ei"], "en": ["^", "en"], "ng": ["^", "en"], "eng": ["^", "eng"], "er": ["^", "er"], "fa": ["f", "a"], "fan": ["f", "an"], "fang": ["f", "ang"], "fei": ["f", "ei"], "fen": ["f", "en"], "feng": ["f", "eng"], "fo": ["f", "o"], "fou": ["f", "ou"], "fu": ["f", "u"], "ga": ["g", "a"], "gai": ["g", "ai"], "gan": ["g", "an"], "gang": ["g", "ang"], "gao": ["g", "ao"], "ge": ["g", "e"], "gei": ["g", "ei"], "gen": ["g", "en"], "geng": ["g", "eng"], "gong": ["g", "ong"], "gou": ["g", "ou"], "gu": ["g", "u"], "gua": ["g", "ua"], "guai": ["g", "uai"], "guan": ["g", "uan"], "guang": ["g", "uang"], "gui": ["g", "uei"], "gun": ["g", "uen"], "guo": ["g", "uo"], "ha": ["h", "a"], "hai": ["h", "ai"], "han": ["h", "an"], "hang": ["h", "ang"], "hao": ["h", "ao"], "he": ["h", "e"], "hei": ["h", "ei"], "hen": ["h", "en"], "heng": ["h", "eng"], "hong": ["h", "ong"], "hou": ["h", "ou"], "hu": ["h", "u"], "hua": ["h", "ua"], "huai": ["h", "uai"], "huan": ["h", "uan"], "huang": ["h", "uang"], "hui": ["h", "uei"], "hun": ["h", "uen"], "huo": ["h", "uo"], "ji": ["j", "i"], "jia": ["j", "ia"], "jian": ["j", "ian"], "jiang": ["j", "iang"], "jiao": ["j", "iao"], "jie": ["j", "ie"], "jin": ["j", "in"], "jing": ["j", "ing"], "jiong": ["j", "iong"], "jiu": ["j", "iou"], "ju": ["j", "v"], "juan": ["j", "van"], "jue": ["j", "ve"], "jun": ["j", "vn"], "ka": ["k", "a"], "kai": ["k", "ai"], "kan": ["k", "an"], "kang": ["k", "ang"], "kao": ["k", "ao"], "ke": ["k", "e"], "kei": ["k", "ei"], "ken": ["k", "en"], "keng": ["k", "eng"], "kong": ["k", "ong"], "kou": ["k", "ou"], "ku": ["k", "u"], "kua": ["k", "ua"], "kuai": ["k", "uai"], "kuan": ["k", "uan"], "kuang": ["k", "uang"], "kui": ["k", "uei"], "kun": ["k", "uen"], "kuo": ["k", "uo"], "la": ["l", "a"], "lai": ["l", "ai"], "lan": ["l", "an"], "lang": ["l", "ang"], "lao": ["l", "ao"], "le": ["l", "e"], "lei": ["l", "ei"], "leng": ["l", "eng"], "li": ["l", "i"], "lia": ["l", "ia"], "lian": ["l", "ian"], "liang": ["l", "iang"], "liao": ["l", "iao"], "lie": ["l", "ie"], "lin": ["l", "in"], "ling": ["l", "ing"], "liu": ["l", "iou"], "lo": ["l", "o"], "long": ["l", "ong"], "lou": ["l", "ou"], "lu": ["l", "u"], "lv": ["l", "v"], "luan": ["l", "uan"], "lve": ["l", "ve"], "lue": ["l", "ve"], "lun": ["l", "uen"], "luo": ["l", "uo"], "ma": ["m", "a"], "mai": ["m", "ai"], "man": ["m", "an"], "mang": ["m", "ang"], "mao": ["m", "ao"], "me": ["m", "e"], "mei": ["m", "ei"], "men": ["m", "en"], "meng": ["m", "eng"], "mi": ["m", "i"], "mian": ["m", "ian"], "miao": ["m", "iao"], "mie": ["m", "ie"], "min": ["m", "in"], "ming": ["m", "ing"], "miu": ["m", "iou"], "mo": ["m", "o"], "mou": ["m", "ou"], "mu": ["m", "u"], "na": ["n", "a"], "nai": ["n", "ai"], "nan": ["n", "an"], "nang": ["n", "ang"], "nao": ["n", "ao"], "ne": ["n", "e"], "nei": ["n", "ei"], "nen": ["n", "en"], "neng": ["n", "eng"], "ni": ["n", "i"], "nia": ["n", "ia"], "nian": ["n", "ian"], "niang": ["n", "iang"], "niao": ["n", "iao"], "nie": ["n", "ie"], "nin": ["n", "in"], "ning": ["n", "ing"], "niu": ["n", "iou"], "nong": ["n", "ong"], "nou": ["n", "ou"], "nu": ["n", "u"], "nv": ["n", "v"], "nuan": ["n", "uan"], "nve": ["n", "ve"], "nue": ["n", "ve"], "nuo": ["n", "uo"], "o": ["^", "o"], "ou": ["^", "ou"], "pa": ["p", "a"], "pai": ["p", "ai"], "pan": ["p", "an"], "pang": ["p", "ang"], "pao": ["p", "ao"], "pe": ["p", "e"], "pei": ["p", "ei"], "pen": ["p", "en"], "peng": ["p", "eng"], "pi": ["p", "i"], "pian": ["p", "ian"], "piao": ["p", "iao"], "pie": ["p", "ie"], "pin": ["p", "in"], "ping": ["p", "ing"], "po": ["p", "o"], "pou": ["p", "ou"], "pu": ["p", "u"], "qi": ["q", "i"], "qia": ["q", "ia"], "qian": ["q", "ian"], "qiang": ["q", "iang"], "qiao": ["q", "iao"], "qie": ["q", "ie"], "qin": ["q", "in"], "qing": ["q", "ing"], "qiong": ["q", "iong"], "qiu": ["q", "iou"], "qu": ["q", "v"], "quan": ["q", "van"], "que": ["q", "ve"], "qun": ["q", "vn"], "ran": ["r", "an"], "rang": ["r", "ang"], "rao": ["r", "ao"], "re": ["r", "e"], "ren": ["r", "en"], "reng": ["r", "eng"], "ri": ["r", "iii"], "rong": ["r", "ong"], "rou": ["r", "ou"], "ru": ["r", "u"], "rua": ["r", "ua"], "ruan": ["r", "uan"], "rui": ["r", "uei"], "run": ["r", "uen"], "ruo": ["r", "uo"], "sa": ["s", "a"], "sai": ["s", "ai"], "san": ["s", "an"], "sang": ["s", "ang"], "sao": ["s", "ao"], "se": ["s", "e"], "sen": ["s", "en"], "seng": ["s", "eng"], "sha": ["sh", "a"], "shai": ["sh", "ai"], "shan": ["sh", "an"], "shang": ["sh", "ang"], "shao": ["sh", "ao"], "she": ["sh", "e"], "shei": ["sh", "ei"], "shen": ["sh", "en"], "sheng": ["sh", "eng"], "shi": ["sh", "iii"], "shou": ["sh", "ou"], "shu": ["sh", "u"], "shua": ["sh", "ua"], "shuai": ["sh", "uai"], "shuan": ["sh", "uan"], "shuang": ["sh", "uang"], "shui": ["sh", "uei"], "shun": ["sh", "uen"], "shuo": ["sh", "uo"], "si": ["s", "ii"], "song": ["s", "ong"], "sou": ["s", "ou"], "su": ["s", "u"], "suan": ["s", "uan"], "sui": ["s", "uei"], "sun": ["s", "uen"], "suo": ["s", "uo"], "ta": ["t", "a"], "tai": ["t", "ai"], "tan": ["t", "an"], "tang": ["t", "ang"], "tao": ["t", "ao"], "te": ["t", "e"], "tei": ["t", "ei"], "teng": ["t", "eng"], "ti": ["t", "i"], "tian": ["t", "ian"], "tiao": ["t", "iao"], "tie": ["t", "ie"], "ting": ["t", "ing"], "tong": ["t", "ong"], "tou": ["t", "ou"], "tu": ["t", "u"], "tuan": ["t", "uan"], "tui": ["t", "uei"], "tun": ["t", "uen"], "tuo": ["t", "uo"], "wa": ["^", "ua"], "wai": ["^", "uai"], "wan": ["^", "uan"], "wang": ["^", "uang"], "wei": ["^", "uei"], "wen": ["^", "uen"], "weng": ["^", "ueng"], "wo": ["^", "uo"], "wu": ["^", "u"], "xi": ["x", "i"], "xia": ["x", "ia"], "xian": ["x", "ian"], "xiang": ["x", "iang"], "xiao": ["x", "iao"], "xie": ["x", "ie"], "xin": ["x", "in"], "xing": ["x", "ing"], "xiong": ["x", "iong"], "xiu": ["x", "iou"], "xu": ["x", "v"], "xuan": ["x", "van"], "xue": ["x", "ve"], "xun": ["x", "vn"], "ya": ["^", "ia"], "yan": ["^", "ian"], "yang": ["^", "iang"], "yao": ["^", "iao"], "ye": ["^", "ie"], "yi": ["^", "i"], "yin": ["^", "in"], "ying": ["^", "ing"], "yo": ["^", "iou"], "yong": ["^", "iong"], "you": ["^", "iou"], "yu": ["^", "v"], "yuan": ["^", "van"], "yue": ["^", "ve"], "yun": ["^", "vn"], "za": ["z", "a"], "zai": ["z", "ai"], "zan": ["z", "an"], "zang": ["z", "ang"], "zao": ["z", "ao"], "ze": ["z", "e"], "zei": ["z", "ei"], "zen": ["z", "en"], "zeng": ["z", "eng"], "zha": ["zh", "a"], "zhai": ["zh", "ai"], "zhan": ["zh", "an"], "zhang": ["zh", "ang"], "zhao": ["zh", "ao"], "zhe": ["zh", "e"], "zhei": ["zh", "ei"], "zhen": ["zh", "en"], "zheng": ["zh", "eng"], "zhi": ["zh", "iii"], "zhong": ["zh", "ong"], "zhou": ["zh", "ou"], "zhu": ["zh", "u"], "zhua": ["zh", "ua"], "zhuai": ["zh", "uai"], "zhuan": ["zh", "uan"], "zhuang": ["zh", "uang"], "zhui": ["zh", "uei"], "zhun": ["zh", "uen"], "zhuo": ["zh", "uo"], "zi": ["z", "ii"], "zong": ["z", "ong"], "zou": ["z", "ou"], "zu": ["z", "u"], "zuan": ["z", "uan"], "zui": ["z", "uei"], "zun": ["z", "uen"], "zuo": ["z", "uo"]}} \ No newline at end of file diff --git a/TensorFlowTTS/test/files/kss_mapper.json b/TensorFlowTTS/test/files/kss_mapper.json new file mode 100644 index 0000000000000000000000000000000000000000..5feaa0a766a93581f9fabfa36005bf37968385d1 --- /dev/null +++ b/TensorFlowTTS/test/files/kss_mapper.json @@ -0,0 +1 @@ +{"symbol_to_id": {"pad": 0, "-": 7, "!": 2, "'": 3, "(": 4, ")": 5, ",": 6, ".": 8, ":": 9, ";": 10, "?": 11, " ": 12, "\u1100": 13, "\u1101": 14, "\u1102": 15, "\u1103": 16, "\u1104": 17, "\u1105": 18, "\u1106": 19, "\u1107": 20, "\u1108": 21, "\u1109": 22, "\u110a": 23, "\u110b": 24, "\u110c": 25, "\u110d": 26, "\u110e": 27, "\u110f": 28, "\u1110": 29, "\u1111": 30, "\u1112": 31, "\u1161": 32, "\u1162": 33, "\u1163": 34, "\u1164": 35, "\u1165": 36, "\u1166": 37, "\u1167": 38, "\u1168": 39, "\u1169": 40, "\u116a": 41, "\u116b": 42, "\u116c": 43, "\u116d": 44, "\u116e": 45, "\u116f": 46, "\u1170": 47, "\u1171": 48, "\u1172": 49, "\u1173": 50, "\u1174": 51, "\u1175": 52, "\u11a8": 53, "\u11a9": 54, "\u11aa": 55, "\u11ab": 56, "\u11ac": 57, "\u11ad": 58, "\u11ae": 59, "\u11af": 60, "\u11b0": 61, "\u11b1": 62, "\u11b2": 63, "\u11b3": 64, "\u11b4": 65, "\u11b5": 66, "\u11b6": 67, "\u11b7": 68, "\u11b8": 69, "\u11b9": 70, "\u11ba": 71, "\u11bb": 72, "\u11bc": 73, "\u11bd": 74, "\u11be": 75, "\u11bf": 76, "\u11c0": 77, "\u11c1": 78, "\u11c2": 79, "eos": 80}, "id_to_symbol": {"0": "pad", "1": "-", "2": "!", "3": "'", "4": "(", "5": ")", "6": ",", "7": "-", "8": ".", "9": ":", "10": ";", "11": "?", "12": " ", "13": "\u1100", "14": "\u1101", "15": "\u1102", "16": "\u1103", "17": "\u1104", "18": "\u1105", "19": "\u1106", "20": "\u1107", "21": "\u1108", "22": "\u1109", "23": "\u110a", "24": "\u110b", "25": "\u110c", "26": "\u110d", "27": "\u110e", "28": "\u110f", "29": "\u1110", "30": "\u1111", "31": "\u1112", "32": "\u1161", "33": "\u1162", "34": "\u1163", "35": "\u1164", "36": "\u1165", "37": "\u1166", "38": "\u1167", "39": "\u1168", "40": "\u1169", "41": "\u116a", "42": "\u116b", "43": "\u116c", "44": "\u116d", "45": "\u116e", "46": "\u116f", "47": "\u1170", "48": "\u1171", "49": "\u1172", "50": "\u1173", "51": "\u1174", "52": "\u1175", "53": "\u11a8", "54": "\u11a9", "55": "\u11aa", "56": "\u11ab", "57": "\u11ac", "58": "\u11ad", "59": "\u11ae", "60": "\u11af", "61": "\u11b0", "62": "\u11b1", "63": "\u11b2", "64": "\u11b3", "65": "\u11b4", "66": "\u11b5", "67": "\u11b6", "68": "\u11b7", "69": "\u11b8", "70": "\u11b9", "71": "\u11ba", "72": "\u11bb", "73": "\u11bc", "74": "\u11bd", "75": "\u11be", "76": "\u11bf", "77": "\u11c0", "78": "\u11c1", "79": "\u11c2", "80": "eos"}, "speakers_map": {"kss": 0}, "processor_name": "KSSProcessor"} \ No newline at end of file diff --git a/TensorFlowTTS/test/files/libritts_mapper.json b/TensorFlowTTS/test/files/libritts_mapper.json new file mode 100644 index 0000000000000000000000000000000000000000..e7c56710d19138ddbd1afc319eca568dd5bccb99 --- /dev/null +++ b/TensorFlowTTS/test/files/libritts_mapper.json @@ -0,0 +1 @@ +{"symbol_to_id": {"@": 0, "@": 1, "@": 2, "@": 3, "@AA0": 4, "@AA1": 5, "@AA2": 6, "@AE0": 7, "@AE1": 8, "@AE2": 9, "@AH0": 10, "@AH1": 11, "@AH2": 12, "@AO0": 13, "@AO1": 14, "@AO2": 15, "@AW0": 16, "@AW1": 17, "@AW2": 18, "@AY0": 19, "@AY1": 20, "@AY2": 21, "@B": 22, "@CH": 23, "@D": 24, "@DH": 25, "@EH0": 26, "@EH1": 27, "@EH2": 28, "@ER0": 29, "@ER1": 30, "@ER2": 31, "@EY0": 32, "@EY1": 33, "@EY2": 34, "@F": 35, "@G": 36, "@HH": 37, "@IH0": 38, "@IH1": 39, "@IH2": 40, "@IY0": 41, "@IY1": 42, "@IY2": 43, "@JH": 44, "@K": 45, "@L": 46, "@M": 47, "@N": 48, "@NG": 49, "@OW0": 50, "@OW1": 51, "@OW2": 52, "@OY0": 53, "@OY1": 54, "@OY2": 55, "@P": 56, "@R": 57, "@S": 58, "@SH": 59, "@T": 60, "@TH": 61, "@UH0": 62, "@UH1": 63, "@UH2": 64, "@UW": 65, "@UW0": 66, "@UW1": 67, "@UW2": 68, "@V": 69, "@W": 70, "@Y": 71, "@Z": 72, "@ZH": 73, "@SIL": 74, "@END": 75, "!": 76, "'": 77, "(": 78, ")": 79, ",": 80, ".": 81, ":": 82, ";": 83, "?": 84, " ": 85}, "id_to_symbol": {"0": "@", "1": "@", "2": "@", "3": "@", "4": "@AA0", "5": "@AA1", "6": "@AA2", "7": "@AE0", "8": "@AE1", "9": "@AE2", "10": "@AH0", "11": "@AH1", "12": "@AH2", "13": "@AO0", "14": "@AO1", "15": "@AO2", "16": "@AW0", "17": "@AW1", "18": "@AW2", "19": "@AY0", "20": "@AY1", "21": "@AY2", "22": "@B", "23": "@CH", "24": "@D", "25": "@DH", "26": "@EH0", "27": "@EH1", "28": "@EH2", "29": "@ER0", "30": "@ER1", "31": "@ER2", "32": "@EY0", "33": "@EY1", "34": "@EY2", "35": "@F", "36": "@G", "37": "@HH", "38": "@IH0", "39": "@IH1", "40": "@IH2", "41": "@IY0", "42": "@IY1", "43": "@IY2", "44": "@JH", "45": "@K", "46": "@L", "47": "@M", "48": "@N", "49": "@NG", "50": "@OW0", "51": "@OW1", "52": "@OW2", "53": "@OY0", "54": "@OY1", "55": "@OY2", "56": "@P", "57": "@R", "58": "@S", "59": "@SH", "60": "@T", "61": "@TH", "62": "@UH0", "63": "@UH1", "64": "@UH2", "65": "@UW", "66": "@UW0", "67": "@UW1", "68": "@UW2", "69": "@V", "70": "@W", "71": "@Y", "72": "@Z", "73": "@ZH", "74": "@SIL", "75": "@END", "76": "!", "77": "'", "78": "(", "79": ")", "80": ",", "81": ".", "82": ":", "83": ";", "84": "?", "85": " "}, "speakers_map": {"200": 0, "1841": 1, "3664": 2, "6454": 3, "8108": 4, "2416": 5, "4680": 6, "6147": 7, "412": 8, "2952": 9, "8838": 10, "2836": 11, "1263": 12, "5322": 13, "3830": 14, "7447": 15, "1116": 16, "8312": 17, "8123": 18, "250": 19}, "processor_name": "LibriTTSProcessor"} \ No newline at end of file diff --git a/TensorFlowTTS/test/files/ljspeech_mapper.json b/TensorFlowTTS/test/files/ljspeech_mapper.json new file mode 100644 index 0000000000000000000000000000000000000000..71bcd42422041bd42e59cbb35722f76ca87717d8 --- /dev/null +++ b/TensorFlowTTS/test/files/ljspeech_mapper.json @@ -0,0 +1 @@ +{"symbol_to_id": {"pad": 0, "-": 1, "!": 2, "'": 3, "(": 4, ")": 5, ",": 6, ".": 7, ":": 8, ";": 9, "?": 10, " ": 11, "A": 12, "B": 13, "C": 14, "D": 15, "E": 16, "F": 17, "G": 18, "H": 19, "I": 20, "J": 21, "K": 22, "L": 23, "M": 24, "N": 25, "O": 26, "P": 27, "Q": 28, "R": 29, "S": 30, "T": 31, "U": 32, "V": 33, "W": 34, "X": 35, "Y": 36, "Z": 37, "a": 38, "b": 39, "c": 40, "d": 41, "e": 42, "f": 43, "g": 44, "h": 45, "i": 46, "j": 47, "k": 48, "l": 49, "m": 50, "n": 51, "o": 52, "p": 53, "q": 54, "r": 55, "s": 56, "t": 57, "u": 58, "v": 59, "w": 60, "x": 61, "y": 62, "z": 63, "@AA": 64, "@AA0": 65, "@AA1": 66, "@AA2": 67, "@AE": 68, "@AE0": 69, "@AE1": 70, "@AE2": 71, "@AH": 72, "@AH0": 73, "@AH1": 74, "@AH2": 75, "@AO": 76, "@AO0": 77, "@AO1": 78, "@AO2": 79, "@AW": 80, "@AW0": 81, "@AW1": 82, "@AW2": 83, "@AY": 84, "@AY0": 85, "@AY1": 86, "@AY2": 87, "@B": 88, "@CH": 89, "@D": 90, "@DH": 91, "@EH": 92, "@EH0": 93, "@EH1": 94, "@EH2": 95, "@ER": 96, "@ER0": 97, "@ER1": 98, "@ER2": 99, "@EY": 100, "@EY0": 101, "@EY1": 102, "@EY2": 103, "@F": 104, "@G": 105, "@HH": 106, "@IH": 107, "@IH0": 108, "@IH1": 109, "@IH2": 110, "@IY": 111, "@IY0": 112, "@IY1": 113, "@IY2": 114, "@JH": 115, "@K": 116, "@L": 117, "@M": 118, "@N": 119, "@NG": 120, "@OW": 121, "@OW0": 122, "@OW1": 123, "@OW2": 124, "@OY": 125, "@OY0": 126, "@OY1": 127, "@OY2": 128, "@P": 129, "@R": 130, "@S": 131, "@SH": 132, "@T": 133, "@TH": 134, "@UH": 135, "@UH0": 136, "@UH1": 137, "@UH2": 138, "@UW": 139, "@UW0": 140, "@UW1": 141, "@UW2": 142, "@V": 143, "@W": 144, "@Y": 145, "@Z": 146, "@ZH": 147, "eos": 148}, "id_to_symbol": {"0": "pad", "1": "-", "2": "!", "3": "'", "4": "(", "5": ")", "6": ",", "7": ".", "8": ":", "9": ";", "10": "?", "11": " ", "12": "A", "13": "B", "14": "C", "15": "D", "16": "E", "17": "F", "18": "G", "19": "H", "20": "I", "21": "J", "22": "K", "23": "L", "24": "M", "25": "N", "26": "O", "27": "P", "28": "Q", "29": "R", "30": "S", "31": "T", "32": "U", "33": "V", "34": "W", "35": "X", "36": "Y", "37": "Z", "38": "a", "39": "b", "40": "c", "41": "d", "42": "e", "43": "f", "44": "g", "45": "h", "46": "i", "47": "j", "48": "k", "49": "l", "50": "m", "51": "n", "52": "o", "53": "p", "54": "q", "55": "r", "56": "s", "57": "t", "58": "u", "59": "v", "60": "w", "61": "x", "62": "y", "63": "z", "64": "@AA", "65": "@AA0", "66": "@AA1", "67": "@AA2", "68": "@AE", "69": "@AE0", "70": "@AE1", "71": "@AE2", "72": "@AH", "73": "@AH0", "74": "@AH1", "75": "@AH2", "76": "@AO", "77": "@AO0", "78": "@AO1", "79": "@AO2", "80": "@AW", "81": "@AW0", "82": "@AW1", "83": "@AW2", "84": "@AY", "85": "@AY0", "86": "@AY1", "87": "@AY2", "88": "@B", "89": "@CH", "90": "@D", "91": "@DH", "92": "@EH", "93": "@EH0", "94": "@EH1", "95": "@EH2", "96": "@ER", "97": "@ER0", "98": "@ER1", "99": "@ER2", "100": "@EY", "101": "@EY0", "102": "@EY1", "103": "@EY2", "104": "@F", "105": "@G", "106": "@HH", "107": "@IH", "108": "@IH0", "109": "@IH1", "110": "@IH2", "111": "@IY", "112": "@IY0", "113": "@IY1", "114": "@IY2", "115": "@JH", "116": "@K", "117": "@L", "118": "@M", "119": "@N", "120": "@NG", "121": "@OW", "122": "@OW0", "123": "@OW1", "124": "@OW2", "125": "@OY", "126": "@OY0", "127": "@OY1", "128": "@OY2", "129": "@P", "130": "@R", "131": "@S", "132": "@SH", "133": "@T", "134": "@TH", "135": "@UH", "136": "@UH0", "137": "@UH1", "138": "@UH2", "139": "@UW", "140": "@UW0", "141": "@UW1", "142": "@UW2", "143": "@V", "144": "@W", "145": "@Y", "146": "@Z", "147": "@ZH", "148": "eos"}, "speakers_map": {"ljspeech": 0}, "processor_name": "LJSpeechProcessor"} \ No newline at end of file diff --git a/TensorFlowTTS/test/files/mapper.json b/TensorFlowTTS/test/files/mapper.json new file mode 100644 index 0000000000000000000000000000000000000000..7945e8958c9ee73b037f3b8f858bac6bf267bc54 --- /dev/null +++ b/TensorFlowTTS/test/files/mapper.json @@ -0,0 +1,17 @@ +{ + "speakers_map": { + "test_one": 0, + "test_two": 1 + }, + "symbol_to_id": { + "a": 0, + "b": 1, + "@ph": 2 + }, + "id_to_symbol": { + "0": "a", + "1": "b", + "2": "@ph" + }, + "processor_name": "KSSProcessor" + } \ No newline at end of file diff --git a/TensorFlowTTS/test/files/train.txt b/TensorFlowTTS/test/files/train.txt new file mode 100644 index 0000000000000000000000000000000000000000..31d77d626b9cbf87dde274eb2efb1ff2a2879203 --- /dev/null +++ b/TensorFlowTTS/test/files/train.txt @@ -0,0 +1,2 @@ +speaker1/libri1.wav|in fact its just a test.|One +speaker2/libri2|in fact its just a speaker number one.|Two diff --git a/TensorFlowTTS/test/test_auto.py b/TensorFlowTTS/test/test_auto.py new file mode 100644 index 0000000000000000000000000000000000000000..5247df9e130f61efabdeb7f2af9bc24fb0bd3a13 --- /dev/null +++ b/TensorFlowTTS/test/test_auto.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import os + +import pytest +import tensorflow as tf + +from tensorflow_tts.inference import AutoConfig +from tensorflow_tts.inference import AutoProcessor +from tensorflow_tts.inference import TFAutoModel + +os.environ["CUDA_VISIBLE_DEVICES"] = "" + +logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", +) + + +@pytest.mark.parametrize( + "mapper_path", + [ + "./test/files/baker_mapper.json", + "./test/files/kss_mapper.json", + "./test/files/libritts_mapper.json", + "./test/files/ljspeech_mapper.json", + ] +) +def test_auto_processor(mapper_path): + processor = AutoProcessor.from_pretrained(pretrained_path=mapper_path) + processor.save_pretrained("./test_saved") + processor = AutoProcessor.from_pretrained("./test_saved/processor.json") + + +@pytest.mark.parametrize( + "config_path", + [ + "./examples/fastspeech/conf/fastspeech.v1.yaml", + "./examples/fastspeech/conf/fastspeech.v3.yaml", + "./examples/fastspeech2/conf/fastspeech2.v1.yaml", + "./examples/fastspeech2/conf/fastspeech2.v2.yaml", + "./examples/fastspeech2/conf/fastspeech2.kss.v1.yaml", + "./examples/fastspeech2/conf/fastspeech2.kss.v2.yaml", + "./examples/melgan/conf/melgan.v1.yaml", + "./examples/melgan_stft/conf/melgan_stft.v1.yaml", + "./examples/multiband_melgan/conf/multiband_melgan.v1.yaml", + "./examples/tacotron2/conf/tacotron2.v1.yaml", + "./examples/tacotron2/conf/tacotron2.kss.v1.yaml", + "./examples/parallel_wavegan/conf/parallel_wavegan.v1.yaml", + "./examples/hifigan/conf/hifigan.v1.yaml", + "./examples/hifigan/conf/hifigan.v2.yaml", + ] +) +def test_auto_model(config_path): + config = AutoConfig.from_pretrained(pretrained_path=config_path) + model = TFAutoModel.from_pretrained(pretrained_path=None, config=config) + + # test save_pretrained + config.save_pretrained("./test_saved") + model.save_pretrained("./test_saved") + + # test from_pretrained + config = AutoConfig.from_pretrained("./test_saved/config.yml") + model = TFAutoModel.from_pretrained("./test_saved/model.h5", config=config) diff --git a/TensorFlowTTS/test/test_base_processor.py b/TensorFlowTTS/test/test_base_processor.py new file mode 100644 index 0000000000000000000000000000000000000000..1c44d59547bb28cda69dfd6ea00db05d926f2024 --- /dev/null +++ b/TensorFlowTTS/test/test_base_processor.py @@ -0,0 +1,107 @@ +import pytest +from tensorflow_tts.processor.base_processor import BaseProcessor, DataProcessorError +import string +from dataclasses import dataclass +from shutil import copyfile + + +@dataclass +class LJ(BaseProcessor): + def get_one_sample(self, item): + sample = { + "raw_text": None, + "text_ids": None, + "audio": None, + "utt_id": None, + "speaker_name": None, + "rate": None, + } + return sample + + def text_to_sequence(self, text): + return ["0"] + + def setup_eos_token(self): + return None + + def save_pretrained(self, saved_path): + return super().save_pretrained(saved_path) + + +@pytest.fixture +def processor(tmpdir): + copyfile("test/files/train.txt", f"{tmpdir}/train.txt") + processor = LJ(data_dir=tmpdir, symbols=list(string.ascii_lowercase)) + return processor + + +@pytest.fixture +def mapper_processor(tmpdir): + copyfile("test/files/train.txt", f"{tmpdir}/train.txt") + copyfile("test/files/mapper.json", f"{tmpdir}/mapper.json") + processor = LJ(data_dir=tmpdir, loaded_mapper_path=f"{tmpdir}/mapper.json") + return processor + + +def test_items_creation(processor): + # Check text + assert processor.items[0][0] == "in fact its just a test." + assert processor.items[1][0] == "in fact its just a speaker number one." + + # Check path + assert processor.items[0][1].split("/")[-1] == "libri1.wav" + assert processor.items[1][1].split("/")[-1] == "libri2.wav" + + # Check speaker name + assert processor.items[0][2] == "One" + assert processor.items[1][2] == "Two" + + +def test_mapper(processor): + # check symbol to id mapper + assert processor.symbol_to_id["a"] == 0 + + # check id to symbol mapper + assert processor.id_to_symbol[0] == "a" + + # check speaker mapper + assert processor.speakers_map["One"] == 0 + assert processor.speakers_map["Two"] == 1 + + +def test_adding_symbols(processor): + # check symbol to id mapper + assert processor.symbol_to_id["a"] == 0 + + # check id to symbol mapper + assert processor.id_to_symbol[0] == "a" + + old_processor_len = len(processor.symbols) + + # Test adding new symbol + processor.add_symbol("O_O") + + assert processor.symbol_to_id["a"] == 0 + assert ( + processor.symbol_to_id["O_O"] == len(processor.symbols) - 1 + ) # new symbol should have last id + + assert processor.id_to_symbol[0] == "a" + assert processor.id_to_symbol[len(processor.symbols) - 1] == "O_O" + + assert old_processor_len == len(processor.symbols) - 1 + + +def test_loading_mapper(mapper_processor): + assert mapper_processor.symbol_to_id["a"] == 0 + assert mapper_processor.symbol_to_id["@ph"] == 2 + + assert mapper_processor.speakers_map["test_one"] == 0 + assert mapper_processor.speakers_map["test_two"] == 1 + + assert mapper_processor.id_to_symbol[0] == "a" + assert mapper_processor.id_to_symbol[2] == "@ph" + + # Test failed creation + with pytest.raises(DataProcessorError): + failed = LJ(data_dir="test/files") diff --git a/TensorFlowTTS/test/test_fastspeech.py b/TensorFlowTTS/test/test_fastspeech.py new file mode 100644 index 0000000000000000000000000000000000000000..62045b5262212c82610db3e1d90a0517b1eae4a0 --- /dev/null +++ b/TensorFlowTTS/test/test_fastspeech.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import os + +import pytest +import tensorflow as tf + +from tensorflow_tts.configs import FastSpeechConfig +from tensorflow_tts.models import TFFastSpeech + +os.environ["CUDA_VISIBLE_DEVICES"] = "" + +logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", +) + + +@pytest.mark.parametrize("new_size", [100, 200, 300]) +def test_fastspeech_resize_positional_embeddings(new_size): + config = FastSpeechConfig() + fastspeech = TFFastSpeech(config, name="fastspeech") + fastspeech._build() + fastspeech.save_weights("./test.h5") + fastspeech.resize_positional_embeddings(new_size) + fastspeech.load_weights("./test.h5", by_name=True, skip_mismatch=True) + + +@pytest.mark.parametrize("num_hidden_layers,n_speakers", [(2, 1), (3, 2), (4, 3)]) +def test_fastspeech_trainable(num_hidden_layers, n_speakers): + config = FastSpeechConfig( + encoder_num_hidden_layers=num_hidden_layers, + decoder_num_hidden_layers=num_hidden_layers + 1, + n_speakers=n_speakers, + ) + + fastspeech = TFFastSpeech(config, name="fastspeech") + optimizer = tf.keras.optimizers.Adam(lr=0.001) + + # fake inputs + input_ids = tf.convert_to_tensor([[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]], tf.int32) + attention_mask = tf.convert_to_tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1]], tf.int32) + speaker_ids = tf.convert_to_tensor([0], tf.int32) + duration_gts = tf.convert_to_tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1]], tf.int32) + + mel_gts = tf.random.uniform(shape=[1, 10, 80], dtype=tf.float32) + + @tf.function + def one_step_training(): + with tf.GradientTape() as tape: + mel_outputs_before, _, duration_outputs = fastspeech( + input_ids, speaker_ids, duration_gts, training=True + ) + duration_loss = tf.keras.losses.MeanSquaredError()( + duration_gts, duration_outputs + ) + mel_loss = tf.keras.losses.MeanSquaredError()(mel_gts, mel_outputs_before) + loss = duration_loss + mel_loss + gradients = tape.gradient(loss, fastspeech.trainable_variables) + optimizer.apply_gradients(zip(gradients, fastspeech.trainable_variables)) + + tf.print(loss) + + import time + + for i in range(2): + if i == 1: + start = time.time() + one_step_training() + print(time.time() - start) diff --git a/TensorFlowTTS/test/test_fastspeech2.py b/TensorFlowTTS/test/test_fastspeech2.py new file mode 100644 index 0000000000000000000000000000000000000000..60ff44055c4ec0a7e36f25eda0dfa889a0a56fb1 --- /dev/null +++ b/TensorFlowTTS/test/test_fastspeech2.py @@ -0,0 +1,129 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import os +import yaml + +import pytest +import tensorflow as tf + +from tensorflow_tts.configs import FastSpeech2Config +from tensorflow_tts.models import TFFastSpeech2 +from tensorflow_tts.utils import return_strategy + +from examples.fastspeech2.train_fastspeech2 import FastSpeech2Trainer + +os.environ["CUDA_VISIBLE_DEVICES"] = "" + +logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", +) + + +@pytest.mark.parametrize("new_size", [100, 200, 300]) +def test_fastspeech_resize_positional_embeddings(new_size): + config = FastSpeech2Config() + fastspeech2 = TFFastSpeech2(config, name="fastspeech") + fastspeech2._build() + fastspeech2.save_weights("./test.h5") + fastspeech2.resize_positional_embeddings(new_size) + fastspeech2.load_weights("./test.h5", by_name=True, skip_mismatch=True) + + +@pytest.mark.parametrize( + "var_train_expr, config_path", + [ + (None, "./examples/fastspeech2/conf/fastspeech2.v1.yaml"), + ("embeddings|encoder", "./examples/fastspeech2/conf/fastspeech2.v1.yaml"), + ("embeddings|encoder", "./examples/fastspeech2/conf/fastspeech2.v2.yaml"), + ("embeddings|encoder", "./examples/fastspeech2/conf/fastspeech2.baker.v2.yaml"), + ("embeddings|encoder", "./examples/fastspeech2/conf/fastspeech2.kss.v1.yaml"), + ("embeddings|encoder", "./examples/fastspeech2/conf/fastspeech2.kss.v2.yaml"), + ], +) +def test_fastspeech2_train_some_layers(var_train_expr, config_path): + config = FastSpeech2Config(n_speakers=5) + model = TFFastSpeech2(config) + model._build() + optimizer = tf.keras.optimizers.Adam(lr=0.001) + + with open(config_path) as f: + config = yaml.load(f, Loader=yaml.Loader) + + config.update({"outdir": "./"}) + config.update({"var_train_expr": var_train_expr}) + + STRATEGY = return_strategy() + + trainer = FastSpeech2Trainer( + config=config, strategy=STRATEGY, steps=0, epochs=0, is_mixed_precision=False, + ) + trainer.compile(model, optimizer) + + len_trainable_vars = len(trainer._trainable_variables) + all_trainable_vars = len(model.trainable_variables) + + if var_train_expr is None: + tf.debugging.assert_equal(len_trainable_vars, all_trainable_vars) + else: + tf.debugging.assert_less(len_trainable_vars, all_trainable_vars) + + +@pytest.mark.parametrize("num_hidden_layers,n_speakers", [(2, 1), (3, 2), (4, 3)]) +def test_fastspeech_trainable(num_hidden_layers, n_speakers): + config = FastSpeech2Config( + encoder_num_hidden_layers=num_hidden_layers, + decoder_num_hidden_layers=num_hidden_layers + 1, + n_speakers=n_speakers, + ) + + fastspeech2 = TFFastSpeech2(config, name="fastspeech") + optimizer = tf.keras.optimizers.Adam(lr=0.001) + + # fake inputs + input_ids = tf.convert_to_tensor([[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]], tf.int32) + attention_mask = tf.convert_to_tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1]], tf.int32) + speaker_ids = tf.convert_to_tensor([0], tf.int32) + duration_gts = tf.convert_to_tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1]], tf.int32) + f0_gts = tf.convert_to_tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1]], tf.float32) + energy_gts = tf.convert_to_tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1]], tf.float32) + + mel_gts = tf.random.uniform(shape=[1, 10, 80], dtype=tf.float32) + + @tf.function + def one_step_training(): + with tf.GradientTape() as tape: + mel_outputs_before, _, duration_outputs, _, _ = fastspeech2( + input_ids, speaker_ids, duration_gts, f0_gts, energy_gts, training=True, + ) + duration_loss = tf.keras.losses.MeanSquaredError()( + duration_gts, duration_outputs + ) + mel_loss = tf.keras.losses.MeanSquaredError()(mel_gts, mel_outputs_before) + loss = duration_loss + mel_loss + gradients = tape.gradient(loss, fastspeech2.trainable_variables) + optimizer.apply_gradients(zip(gradients, fastspeech2.trainable_variables)) + + tf.print(loss) + + import time + + for i in range(2): + if i == 1: + start = time.time() + one_step_training() + print(time.time() - start) diff --git a/TensorFlowTTS/test/test_hifigan.py b/TensorFlowTTS/test/test_hifigan.py new file mode 100644 index 0000000000000000000000000000000000000000..933d3dea93849907aba0f15f1dad3a55e10e0bbe --- /dev/null +++ b/TensorFlowTTS/test/test_hifigan.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import os + +import pytest +import tensorflow as tf + +from tensorflow_tts.configs import ( + HifiGANDiscriminatorConfig, + HifiGANGeneratorConfig, + MelGANDiscriminatorConfig, +) +from tensorflow_tts.models import ( + TFHifiGANGenerator, + TFHifiGANMultiPeriodDiscriminator, + TFMelGANMultiScaleDiscriminator, +) + +from examples.hifigan.train_hifigan import TFHifiGANDiscriminator + +os.environ["CUDA_VISIBLE_DEVICES"] = "" + +logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", +) + + +def make_hifigan_generator_args(**kwargs): + defaults = dict( + out_channels=1, + kernel_size=7, + filters=128, + use_bias=True, + upsample_scales=[8, 8, 2, 2], + stacks=3, + stack_kernel_size=[3, 7, 11], + stack_dilation_rate=[[1, 3, 5], [1, 3, 5], [1, 3, 5]], + nonlinear_activation="LeakyReLU", + nonlinear_activation_params={"alpha": 0.2}, + padding_type="REFLECT", + use_final_nolinear_activation=True, + is_weight_norm=True, + initializer_seed=42, + ) + defaults.update(kwargs) + return defaults + + +def make_hifigan_discriminator_args(**kwargs): + defaults_multisperiod = dict( + out_channels=1, + period_scales=[2, 3, 5, 7, 11], + n_layers=5, + kernel_size=5, + strides=3, + filters=8, + filter_scales=4, + max_filters=1024, + nonlinear_activation="LeakyReLU", + nonlinear_activation_params={"alpha": 0.2}, + is_weight_norm=True, + initializer_seed=42, + ) + defaults_multisperiod.update(kwargs) + defaults_multiscale = dict( + out_channels=1, + scales=3, + downsample_pooling="AveragePooling1D", + downsample_pooling_params={"pool_size": 4, "strides": 2,}, + kernel_sizes=[5, 3], + filters=16, + max_downsample_filters=1024, + use_bias=True, + downsample_scales=[4, 4, 4, 4], + nonlinear_activation="LeakyReLU", + nonlinear_activation_params={"alpha": 0.2}, + padding_type="REFLECT", + ) + defaults_multiscale.update(kwargs) + return [defaults_multisperiod, defaults_multiscale] + + +@pytest.mark.parametrize( + "dict_g, dict_d, dict_loss", + [ + ({}, {}, {}), + ({"kernel_size": 3}, {}, {}), + ({"filters": 1024}, {}, {}), + ({"stack_kernel_size": [1, 2, 3]}, {}, {}), + ({"stack_kernel_size": [3, 5, 7], "stacks": 3}, {}, {}), + ({"upsample_scales": [4, 4, 4, 4]}, {}, {}), + ({"upsample_scales": [8, 8, 2, 2]}, {}, {}), + ({"filters": 1024, "upsample_scales": [8, 8, 2, 2]}, {}, {}), + ], +) +def test_hifigan_trainable(dict_g, dict_d, dict_loss): + batch_size = 4 + batch_length = 4096 + args_g = make_hifigan_generator_args(**dict_g) + args_d_p, args_d_s = make_hifigan_discriminator_args(**dict_d) + + args_g = HifiGANGeneratorConfig(**args_g) + args_d_p = HifiGANDiscriminatorConfig(**args_d_p) + args_d_s = MelGANDiscriminatorConfig(**args_d_s) + + generator = TFHifiGANGenerator(args_g) + + discriminator_p = TFHifiGANMultiPeriodDiscriminator(args_d_p) + discriminator_s = TFMelGANMultiScaleDiscriminator(args_d_s) + discriminator = TFHifiGANDiscriminator(discriminator_p, discriminator_s) diff --git a/TensorFlowTTS/test/test_mb_melgan.py b/TensorFlowTTS/test/test_mb_melgan.py new file mode 100644 index 0000000000000000000000000000000000000000..d6045040915e10877f42b5c1fe983f89c9a98102 --- /dev/null +++ b/TensorFlowTTS/test/test_mb_melgan.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import tensorflow as tf + +import logging +import os + +import numpy as np +import pytest + +from tensorflow_tts.configs import MultiBandMelGANGeneratorConfig +from tensorflow_tts.models import TFPQMF, TFMelGANGenerator + +os.environ["CUDA_VISIBLE_DEVICES"] = "" + +logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", +) + + +def make_multi_band_melgan_generator_args(**kwargs): + defaults = dict( + out_channels=1, + kernel_size=7, + filters=512, + use_bias=True, + upsample_scales=[8, 8, 2, 2], + stack_kernel_size=3, + stacks=3, + nonlinear_activation="LeakyReLU", + nonlinear_activation_params={"alpha": 0.2}, + padding_type="REFLECT", + subbands=4, + tabs=62, + cutoff_ratio=0.15, + beta=9.0, + ) + defaults.update(kwargs) + return defaults + + +@pytest.mark.parametrize( + "dict_g", + [ + {"subbands": 4, "upsample_scales": [2, 4, 8], "stacks": 4, "out_channels": 4}, + {"subbands": 4, "upsample_scales": [4, 4, 4], "stacks": 5, "out_channels": 4}, + ], +) +def test_multi_band_melgan(dict_g): + args_g = make_multi_band_melgan_generator_args(**dict_g) + args_g = MultiBandMelGANGeneratorConfig(**args_g) + generator = TFMelGANGenerator(args_g, name="multi_band_melgan") + generator._build() + + pqmf = TFPQMF(args_g, name="pqmf") + + fake_mels = tf.random.uniform(shape=[1, 100, 80], dtype=tf.float32) + fake_y = tf.random.uniform(shape=[1, 100 * 256, 1], dtype=tf.float32) + y_hat_subbands = generator(fake_mels) + + y_hat = pqmf.synthesis(y_hat_subbands) + y_subbands = pqmf.analysis(fake_y) + + assert np.shape(y_subbands) == np.shape(y_hat_subbands) + assert np.shape(fake_y) == np.shape(y_hat) diff --git a/TensorFlowTTS/test/test_melgan.py b/TensorFlowTTS/test/test_melgan.py new file mode 100644 index 0000000000000000000000000000000000000000..3f91390b4623305eca68dc9a32c2a79a8d920d91 --- /dev/null +++ b/TensorFlowTTS/test/test_melgan.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import os + +import pytest +import tensorflow as tf + +from tensorflow_tts.configs import MelGANDiscriminatorConfig, MelGANGeneratorConfig +from tensorflow_tts.models import TFMelGANGenerator, TFMelGANMultiScaleDiscriminator + +os.environ["CUDA_VISIBLE_DEVICES"] = "" + +logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", +) + + +def make_melgan_generator_args(**kwargs): + defaults = dict( + out_channels=1, + kernel_size=7, + filters=512, + use_bias=True, + upsample_scales=[8, 8, 2, 2], + stack_kernel_size=3, + stacks=3, + nonlinear_activation="LeakyReLU", + nonlinear_activation_params={"alpha": 0.2}, + padding_type="REFLECT", + ) + defaults.update(kwargs) + return defaults + + +def make_melgan_discriminator_args(**kwargs): + defaults = dict( + out_channels=1, + scales=3, + downsample_pooling="AveragePooling1D", + downsample_pooling_params={"pool_size": 4, "strides": 2,}, + kernel_sizes=[5, 3], + filters=16, + max_downsample_filters=1024, + use_bias=True, + downsample_scales=[4, 4, 4, 4], + nonlinear_activation="LeakyReLU", + nonlinear_activation_params={"alpha": 0.2}, + padding_type="REFLECT", + ) + defaults.update(kwargs) + return defaults + + +@pytest.mark.parametrize( + "dict_g, dict_d, dict_loss", + [ + ({}, {}, {}), + ({"kernel_size": 3}, {}, {}), + ({"filters": 1024}, {}, {}), + ({"stack_kernel_size": 5}, {}, {}), + ({"stack_kernel_size": 5, "stacks": 2}, {}, {}), + ({"upsample_scales": [4, 4, 4, 4]}, {}, {}), + ({"upsample_scales": [8, 8, 2, 2]}, {}, {}), + ({"filters": 1024, "upsample_scales": [8, 8, 2, 2]}, {}, {}), + ], +) +def test_melgan_trainable(dict_g, dict_d, dict_loss): + batch_size = 4 + batch_length = 4096 + args_g = make_melgan_generator_args(**dict_g) + args_d = make_melgan_discriminator_args(**dict_d) + + args_g = MelGANGeneratorConfig(**args_g) + args_d = MelGANDiscriminatorConfig(**args_d) + + generator = TFMelGANGenerator(args_g) + discriminator = TFMelGANMultiScaleDiscriminator(args_d) diff --git a/TensorFlowTTS/test/test_melgan_layers.py b/TensorFlowTTS/test/test_melgan_layers.py new file mode 100644 index 0000000000000000000000000000000000000000..1c311e21d5aa989de05b6b21c1dec8a37917990b --- /dev/null +++ b/TensorFlowTTS/test/test_melgan_layers.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import os + +import numpy as np +import pytest +import tensorflow as tf + +from tensorflow_tts.models.melgan import ( + TFConvTranspose1d, + TFReflectionPad1d, + TFResidualStack, +) + +os.environ["CUDA_VISIBLE_DEVICES"] = "" + +logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", +) + + +@pytest.mark.parametrize("padding_size", [(3), (5)]) +def test_padding(padding_size): + fake_input_1d = tf.random.normal(shape=[4, 8000, 256], dtype=tf.float32) + out = TFReflectionPad1d(padding_size=padding_size)(fake_input_1d) + assert np.array_equal( + tf.keras.backend.int_shape(out), [4, 8000 + 2 * padding_size, 256] + ) + + +@pytest.mark.parametrize( + "filters,kernel_size,strides,padding,is_weight_norm", + [(512, 40, 8, "same", False), (768, 15, 8, "same", True)], +) +def test_convtranpose1d(filters, kernel_size, strides, padding, is_weight_norm): + fake_input_1d = tf.random.normal(shape=[4, 8000, 256], dtype=tf.float32) + conv1d_transpose = TFConvTranspose1d( + filters=filters, + kernel_size=kernel_size, + strides=strides, + padding=padding, + is_weight_norm=is_weight_norm, + initializer_seed=42, + ) + out = conv1d_transpose(fake_input_1d) + assert np.array_equal(tf.keras.backend.int_shape(out), [4, 8000 * strides, filters]) + + +@pytest.mark.parametrize( + "kernel_size,filters,dilation_rate,use_bias,nonlinear_activation,nonlinear_activation_params,is_weight_norm", + [ + (3, 256, 1, True, "LeakyReLU", {"alpha": 0.3}, True), + (3, 256, 3, True, "ReLU", {}, False), + ], +) +def test_residualblock( + kernel_size, + filters, + dilation_rate, + use_bias, + nonlinear_activation, + nonlinear_activation_params, + is_weight_norm, +): + fake_input_1d = tf.random.normal(shape=[4, 8000, 256], dtype=tf.float32) + residual_block = TFResidualStack( + kernel_size=kernel_size, + filters=filters, + dilation_rate=dilation_rate, + use_bias=use_bias, + nonlinear_activation=nonlinear_activation, + nonlinear_activation_params=nonlinear_activation_params, + is_weight_norm=is_weight_norm, + initializer_seed=42, + ) + out = residual_block(fake_input_1d) + assert np.array_equal(tf.keras.backend.int_shape(out), [4, 8000, filters]) diff --git a/TensorFlowTTS/test/test_parallel_wavegan.py b/TensorFlowTTS/test/test_parallel_wavegan.py new file mode 100644 index 0000000000000000000000000000000000000000..dce89f91c17f650f81e0f7902828a8fb88d16c28 --- /dev/null +++ b/TensorFlowTTS/test/test_parallel_wavegan.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 TensorFlowTTS Team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import os + +import pytest +import tensorflow as tf + +from tensorflow_tts.configs import ( + ParallelWaveGANGeneratorConfig, + ParallelWaveGANDiscriminatorConfig, +) +from tensorflow_tts.models import ( + TFParallelWaveGANGenerator, + TFParallelWaveGANDiscriminator, +) + +os.environ["CUDA_VISIBLE_DEVICES"] = "" + +logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", +) + + +def make_pwgan_generator_args(**kwargs): + defaults = dict( + out_channels=1, + kernel_size=3, + n_layers=30, + stacks=3, + residual_channels=64, + gate_channels=128, + skip_channels=64, + aux_channels=80, + aux_context_window=2, + dropout_rate=0.0, + use_bias=True, + use_causal_conv=False, + upsample_conditional_features=True, + upsample_params={"upsample_scales": [4, 4, 4, 4]}, + initializer_seed=42, + ) + defaults.update(kwargs) + return defaults + + +def make_pwgan_discriminator_args(**kwargs): + defaults = dict( + out_channels=1, + kernel_size=3, + n_layers=10, + conv_channels=64, + use_bias=True, + dilation_factor=1, + nonlinear_activation="LeakyReLU", + nonlinear_activation_params={"alpha": 0.2}, + initializer_seed=42, + apply_sigmoid_at_last=False, + ) + defaults.update(kwargs) + return defaults + + +@pytest.mark.parametrize( + "dict_g, dict_d", + [ + ({}, {}), + ( + {"kernel_size": 3, "aux_context_window": 5, "residual_channels": 128}, + {"dilation_factor": 2}, + ), + ({"stacks": 4, "n_layers": 40}, {"conv_channels": 128}), + ], +) +def test_melgan_trainable(dict_g, dict_d): + random_c = tf.random.uniform(shape=[4, 32, 80], dtype=tf.float32) + + args_g = make_pwgan_generator_args(**dict_g) + args_d = make_pwgan_discriminator_args(**dict_d) + + args_g = ParallelWaveGANGeneratorConfig(**args_g) + args_d = ParallelWaveGANDiscriminatorConfig(**args_d) + + generator = TFParallelWaveGANGenerator(args_g) + generator._build() + discriminator = TFParallelWaveGANDiscriminator(args_d) + discriminator._build() + + generated_audios = generator(random_c, training=True) + discriminator(generated_audios) + + generator.summary() + discriminator.summary() diff --git a/TensorFlowTTS/test/test_tacotron2.py b/TensorFlowTTS/test/test_tacotron2.py new file mode 100644 index 0000000000000000000000000000000000000000..462a37564f1d424776493927c4644c87dd58ba78 --- /dev/null +++ b/TensorFlowTTS/test/test_tacotron2.py @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Minh Nguyen (@dathudeptrai) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import os +import time +import yaml + +import numpy as np +import pytest +import tensorflow as tf + +from tensorflow_tts.configs import Tacotron2Config +from tensorflow_tts.models import TFTacotron2 +from tensorflow_tts.utils import return_strategy + +from examples.tacotron2.train_tacotron2 import Tacotron2Trainer + +os.environ["CUDA_VISIBLE_DEVICES"] = "-1" + +logging.basicConfig( + level=logging.WARNING, + format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s", +) + + +@pytest.mark.parametrize( + "var_train_expr, config_path", + [ + ("embeddings|decoder_cell", "./examples/tacotron2/conf/tacotron2.v1.yaml"), + (None, "./examples/tacotron2/conf/tacotron2.v1.yaml"), + ( + "embeddings|decoder_cell", + "./examples/tacotron2/conf/tacotron2.baker.v1.yaml", + ), + ("embeddings|decoder_cell", "./examples/tacotron2/conf/tacotron2.kss.v1.yaml"), + ], +) +def test_tacotron2_train_some_layers(var_train_expr, config_path): + config = Tacotron2Config(n_speakers=5, reduction_factor=1) + model = TFTacotron2(config, name="tacotron2") + model._build() + optimizer = tf.keras.optimizers.Adam(lr=0.001) + + with open(config_path) as f: + config = yaml.load(f, Loader=yaml.Loader) + + config.update({"outdir": "./"}) + config.update({"var_train_expr": var_train_expr}) + + STRATEGY = return_strategy() + + trainer = Tacotron2Trainer( + config=config, strategy=STRATEGY, steps=0, epochs=0, is_mixed_precision=False, + ) + trainer.compile(model, optimizer) + + len_trainable_vars = len(trainer._trainable_variables) + all_trainable_vars = len(model.trainable_variables) + + if var_train_expr is None: + tf.debugging.assert_equal(len_trainable_vars, all_trainable_vars) + else: + tf.debugging.assert_less(len_trainable_vars, all_trainable_vars) + + +@pytest.mark.parametrize( + "n_speakers, n_chars, max_input_length, max_mel_length, batch_size", + [(2, 15, 25, 50, 2),], +) +def test_tacotron2_trainable( + n_speakers, n_chars, max_input_length, max_mel_length, batch_size +): + config = Tacotron2Config(n_speakers=n_speakers, reduction_factor=1) + model = TFTacotron2(config, name="tacotron2") + model._build() + # fake input + input_ids = tf.random.uniform( + [batch_size, max_input_length], maxval=n_chars, dtype=tf.int32 + ) + speaker_ids = tf.convert_to_tensor([0] * batch_size, tf.int32) + mel_gts = tf.random.uniform(shape=[batch_size, max_mel_length, 80]) + mel_lengths = np.random.randint( + max_mel_length, high=max_mel_length + 1, size=[batch_size] + ) + mel_lengths[-1] = max_mel_length + mel_lengths = tf.convert_to_tensor(mel_lengths, dtype=tf.int32) + + stop_tokens = np.zeros((batch_size, max_mel_length), np.float32) + stop_tokens = tf.convert_to_tensor(stop_tokens) + + optimizer = tf.keras.optimizers.Adam(lr=0.001) + + binary_crossentropy = tf.keras.losses.BinaryCrossentropy(from_logits=True) + + @tf.function(experimental_relax_shapes=True) + def one_step_training(input_ids, speaker_ids, mel_gts, mel_lengths): + with tf.GradientTape() as tape: + mel_preds, post_mel_preds, stop_preds, alignment_history = model( + input_ids, + tf.constant([max_input_length, max_input_length]), + speaker_ids, + mel_gts, + mel_lengths, + training=True, + ) + loss_before = tf.keras.losses.MeanSquaredError()(mel_gts, mel_preds) + loss_after = tf.keras.losses.MeanSquaredError()(mel_gts, post_mel_preds) + + stop_gts = tf.expand_dims( + tf.range(tf.reduce_max(mel_lengths), dtype=tf.int32), 0 + ) # [1, max_len] + stop_gts = tf.tile(stop_gts, [tf.shape(mel_lengths)[0], 1]) # [B, max_len] + stop_gts = tf.cast( + tf.math.greater_equal(stop_gts, tf.expand_dims(mel_lengths, 1) - 1), + tf.float32, + ) + + # calculate stop_token loss + stop_token_loss = binary_crossentropy(stop_gts, stop_preds) + + loss = stop_token_loss + loss_before + loss_after + + gradients = tape.gradient(loss, model.trainable_variables) + optimizer.apply_gradients(zip(gradients, model.trainable_variables)) + return loss, alignment_history + + for i in range(2): + if i == 1: + start = time.time() + loss, alignment_history = one_step_training( + input_ids, speaker_ids, mel_gts, mel_lengths + ) + print(f" > loss: {loss}") + total_runtime = time.time() - start + print(f" > Total run-time: {total_runtime}") + print(f" > Avg run-time: {total_runtime/10}")