diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000000000000000000000000000000000000..59c4a32d1aecdadbf4f1018d0c7db5a000462a67 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,34 @@ +# http://editorconfig.org + +root = true + +[*] +indent_style = space +indent_size = 4 +trim_trailing_whitespace = true +insert_final_newline = true +charset = utf-8 +end_of_line = lf + +[*.bat] +indent_style = tab +end_of_line = crlf + +[*.{json,jsonc}] +indent_style = space +indent_size = 2 + +[.vscode/*.{json,jsonc}] +indent_style = space +indent_size = 4 + +[*.{yml,yaml,toml}] +indent_style = space +indent_size = 2 + +[*.md] +trim_trailing_whitespace = false + +[Makefile] +indent_style = tab +indent_size = 8 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..9ccdb5e5bcee3977bdecd257268fa0a0d5216bb4 --- /dev/null +++ b/.gitignore @@ -0,0 +1,241 @@ +# Created by https://www.toptal.com/developers/gitignore/api/linux,windows,macos,visualstudiocode,python +# Edit at https://www.toptal.com/developers/gitignore?templates=linux,windows,macos,visualstudiocode,python + +### Linux ### +*~ + +# temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + +### macOS ### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### Python ### +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +### VisualStudioCode ### +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +*.code-workspace + +# Local History for Visual Studio Code +.history/ + +### VisualStudioCode Patch ### +# Ignore all local history of files +.history +.ionide + +### Windows ### +# Windows thumbnail cache files +Thumbs.db +Thumbs.db:encryptable +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + +# End of https://www.toptal.com/developers/gitignore/api/linux,windows,macos,visualstudiocode,python + +# setuptools-scm _version file +src/animatediff/_version.py + +# local misc and temp +/misc/ +/temp/ + +# envrc +.env* +!.envrc.example diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..54df47d0f60d9bc89a3fad6d0fe9ce010cf82bbe --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,28 @@ +# See https://pre-commit.com for more information +ci: + autofix_prs: true + autoupdate_branch: "main" + autoupdate_commit_msg: "[pre-commit.ci] pre-commit autoupdate" + autoupdate_schedule: weekly + +repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: "v0.0.281" + hooks: + - id: ruff + args: ["--fix", "--exit-non-zero-on-fix"] + + - repo: https://github.com/psf/black + rev: 23.7.0 + hooks: + - id: black + args: ["--line-length=110"] + + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: trailing-whitespace + args: [--markdown-linebreak-ext=md] + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000000000000000000000000000000000000..cf3bd9a12992e2dbc7e39c8d2ca24f4a7baf9d9e --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,75 @@ +{ + "editor.insertSpaces": true, + "editor.tabSize": 4, + "files.trimTrailingWhitespace": true, + "editor.rulers": [100, 120], + + "files.associations": { + "*.yaml": "yaml" + }, + + "files.exclude": { + "**/.git": true, + "**/.svn": true, + "**/.hg": true, + "**/CVS": true, + "**/.DS_Store": true, + "**/Thumbs.db": true, + "**/__pycache__": true + }, + + "[python]": { + "editor.wordBasedSuggestions": false, + "editor.formatOnSave": true, + "editor.defaultFormatter": "ms-python.black-formatter", + "editor.codeActionsOnSave": { + "source.organizeImports": true + } + }, + "python.analysis.include": ["./src", "./scripts", "./tests"], + + "python.linting.enabled": false, + "python.linting.pylintEnabled": false, + "python.linting.flake8Enabled": true, + "python.linting.flake8Args": ["--config=${workspaceFolder}/setup.cfg"], + + "[json]": { + "editor.tabSize": 2, + "editor.detectIndentation": false, + "editor.formatOnSave": true, + "editor.formatOnSaveMode": "file" + }, + + "[toml]": { + "editor.tabSize": 2, + "editor.detectIndentation": false, + "editor.formatOnSave": true, + "editor.formatOnSaveMode": "file", + "editor.defaultFormatter": "tamasfe.even-better-toml", + "editor.rulers": [80, 100] + }, + "evenBetterToml.formatter.columnWidth": 88, + + "[yaml]": { + "editor.detectIndentation": false, + "editor.tabSize": 2, + "editor.formatOnSave": true, + "editor.formatOnSaveMode": "file" + }, + "yaml.format.bracketSpacing": true, + "yaml.format.proseWrap": "preserve", + "yaml.format.singleQuote": false, + "yaml.format.printWidth": 110, + + "[markdown]": { + "files.trimTrailingWhitespace": false + }, + + "css.lint.validProperties": ["dock", "content-align", "content-justify"], + "[css]": { + "editor.formatOnSave": true + }, + + "remote.autoForwardPorts": false, + "remote.autoForwardPortsSource": "process" +} diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000000000000000000000000000000000000..39af6c5aac86be244b193e1042f00890e1ba927e --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,2 @@ +# setuptools_scm will grab all tracked files, minus these exclusions +prune .vscode diff --git a/README.md b/README.md index 013a5fc2b23a574b174835fe978ac6c64262dcea..0a450559de13a0b15d2fde6d7fd86d64d75e283d 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,100 @@ --- -title: Vid2Vid Using Text Prompt -emoji: 😻 -colorFrom: green -colorTo: pink -sdk: gradio -sdk_version: 4.32.2 +title: Vid2Vid-using-Text-prompt app_file: app.py -pinned: false +sdk: gradio +sdk_version: 3.35.2 --- +# Video2Video Generation using Text Prompts + +This repository contains a pipeline for video-to-video generation using text prompts. The system leverages AnimateDiff and OpenPose ControlNet for pose estimation, and incorporates a prompt traveling method for improved coherence between the original and generated videos. Users can interact with this pipeline through a Gradio app or a standard Python program. + +## Techniques used + +- **AnimateDiff**: Utilized for generating high-quality animations based on text prompts and an image as an input. +- **OpenPose ControlNet**: Used for accurate pose estimation to guide the animation process. +- **Prompt Traveling Method**: Ensures better relativeness and coherence between the input video and the generated output. +- **User Interfaces**: + - **Gradio App**: An intuitive web-based interface for easy interaction. + - **Python Program**: A script-based interface for users preferring command-line interaction. + +### Base models + +- [XXMix_9realistic](https://civitai.com/models/47274): Model used for generating life-like video (Recommended for life-like video) +- [Mistoon_Anime](https://civitai.com/models/24149/mistoonanime): Model used for generating anime-like video (Recommended for anime-like video) + +### Motion modules + +- [mm_sd_v15_v2](https://huggingface.co/guoyww/animatediff/blob/main/mm_sd_v15_v2.ckpt): Motion module used for generating segments of the final from the generated images (Recommended) +- [mm_sd_v15](https://huggingface.co/guoyww/animatediff/blob/main/mm_sd_v15.ckpt) and [mm_sd_v14](https://huggingface.co/guoyww/animatediff/blob/main/mm_sd_v14.ckpt) are some other modules that can be also used. + +### ControlNets + +- [control_v11p_sd15_openpose](https://huggingface.co/lllyasviel/ControlNet-v1-1/blob/main/control_v11p_sd15_openpose.pth): ControlNet for pose estimation from the given video +- Upcoming support for depth and canny controlnets too for better generated video quality. + +### Prompt Travelling + +This is a technique that is used to give the model, instruction at which frame what to do with the output image. +For example, if in the prompt body it is written like, 30 - face: up, camera: zoomed out, right-hand: waving, then in the output 30th frame, the image will be generated according to the given prompt. + +## Installation + +To set up the environment and install the necessary dependencies, follow these steps: + +1. **Clone the repository:** + + ```bash + git clone https://github.com/TheNetherWatcher/Vid2Vid-using-Text-prompt.git + cd Vid2Vid-using-Text-prompt + ``` + +2. **Create and activate a virtual environment:** + + ```bash + python -m venv venv + source venv/bin/activate # On Windows, use `venv\Scripts\activate` + ``` + +3. **Install the required packages:** + + ```bash + pip install -e . + pip install -e .[stylize] + ``` + +## Usage + +### Model weights + +- Download the model weights from the abve links or another, and put them [here](./data/models/huggingface), and for the downloaded motion modules, put them [here](data/models/motion-module) +- For the first time, you might get errors like model weights not found, just go to stylize directory and in the most recently created folder, edit the model name in the prompt.json file. Support for this is also under development. + +### Gradio App + +To run the Gradio app, execute the following command: + +```bash +python app.py +``` + +The gradio app provides a interface for uploading video and providing a text prompt as a input and outputs the generated video. + +### Commandline + +```bash +python test.py +``` + +After running this, you will be prompted to enter the location of the video, positive prompt (the changes that you want to make in the video), and a negative prompt. +Negative prompt is set to a default value, but you can edit it if you like. + +## Upcoming Dedvelopments + +- LoRA support, and controlnet(like canny, depth, edge) support +- Gradio app support for using different controlnets and LoRAs +- CLI options for controlling the execution in different system + +## Credits -Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference +- [AnimateDiff](https://github.com/guoyww/AnimateDiff) +- [Prompt Travelling using AnimateDiff](https://github.com/s9roll7/animatediff-cli-prompt-travel) diff --git a/app.py b/app.py new file mode 100644 index 0000000000000000000000000000000000000000..b28d6ce0e71c8d3e9bfd8bd98e829c1ebf537d90 --- /dev/null +++ b/app.py @@ -0,0 +1,82 @@ +import json +import os +import asyncio +import gradio as gr + +async def stylize(video): + command = f"animatediff stylize create-config {video}" + process = await asyncio.create_subprocess_shell( + command, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE + ) + stdout, stderr = await process.communicate() + if process.returncode == 0: + return stdout.decode() + else: + print(f"Error: {stderr.decode()}") + +async def start_video_edit(prompt_file): + command = f"animatediff stylize generate {prompt_file}" + process = await asyncio.create_subprocess_shell( + command, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE + ) + stdout, stderr = await process.communicate() + if process.returncode == 0: + return stdout.decode() + else: + print(f"Error: {stderr.decode()}") + +def edit_video(video, pos_prompt): + x = asyncio.run(stylize(video)) + x = x.split("stylize.py") + config = x[18].split("config =")[-1].strip() + d = x[19].split("stylize_dir = ")[-1].strip() + + with open(config, 'r+') as f: + data = json.load(f) + data['head_prompt'] = pos_prompt + data["path"] = "models/huggingface/xxmix9realistic_v40.safetensors" + + os.remove(config) + with open(config, 'w') as f: + json.dump(data, f, indent=4) + + out = asyncio.run(start_video_edit(d)) + out = out.split("Stylized results are output to ")[-1] + out = out.split("stylize.py")[0].strip() + + cwd = os.getcwd() + video_dir = cwd + "/" + out + + video_extensions = {'.mp4', '.avi', '.mkv', '.mov', '.flv', '.wmv'} + video_path = None + + for dirpath, dirnames, filenames in os.walk(video_dir): + for filename in filenames: + if os.path.splitext(filename)[1].lower() in video_extensions: + video_path = os.path.join(dirpath, filename) + break + if video_path: + break + + return video_path + +with gr.Blocks() as interface: + gr.Markdown("## Video Processor with Text Prompts") + with gr.Row(): + with gr.Column(): + positive_prompt = gr.Textbox(label="Positive Prompt") + video_input = gr.Video(label="Input Video") + with gr.Column(): + video_output = gr.Video(label="Processed Video") + + process_button = gr.Button("Process Video") + process_button.click(fn=edit_video, + inputs=[video_input, positive_prompt], + outputs=video_output + ) + +interface.launch(share=True) diff --git a/config/GroundingDINO/GroundingDINO_SwinB_cfg.py b/config/GroundingDINO/GroundingDINO_SwinB_cfg.py new file mode 100644 index 0000000000000000000000000000000000000000..f490c4bbd598a35de43d36ceafcbd769e7ff21bf --- /dev/null +++ b/config/GroundingDINO/GroundingDINO_SwinB_cfg.py @@ -0,0 +1,43 @@ +batch_size = 1 +modelname = "groundingdino" +backbone = "swin_B_384_22k" +position_embedding = "sine" +pe_temperatureH = 20 +pe_temperatureW = 20 +return_interm_indices = [1, 2, 3] +backbone_freeze_keywords = None +enc_layers = 6 +dec_layers = 6 +pre_norm = False +dim_feedforward = 2048 +hidden_dim = 256 +dropout = 0.0 +nheads = 8 +num_queries = 900 +query_dim = 4 +num_patterns = 0 +num_feature_levels = 4 +enc_n_points = 4 +dec_n_points = 4 +two_stage_type = "standard" +two_stage_bbox_embed_share = False +two_stage_class_embed_share = False +transformer_activation = "relu" +dec_pred_bbox_embed_share = True +dn_box_noise_scale = 1.0 +dn_label_noise_ratio = 0.5 +dn_label_coef = 1.0 +dn_bbox_coef = 1.0 +embed_init_tgt = True +dn_labelbook_size = 2000 +max_text_len = 256 +text_encoder_type = "bert-base-uncased" +use_text_enhancer = True +use_fusion_layer = True +use_checkpoint = True +use_transformer_ckpt = True +use_text_cross_attention = True +text_dropout = 0.0 +fusion_dropout = 0.0 +fusion_droppath = 0.1 +sub_sentence_present = True diff --git a/config/GroundingDINO/GroundingDINO_SwinT_OGC.py b/config/GroundingDINO/GroundingDINO_SwinT_OGC.py new file mode 100644 index 0000000000000000000000000000000000000000..9158d5f6260ec74bded95377d382387430d7cd70 --- /dev/null +++ b/config/GroundingDINO/GroundingDINO_SwinT_OGC.py @@ -0,0 +1,43 @@ +batch_size = 1 +modelname = "groundingdino" +backbone = "swin_T_224_1k" +position_embedding = "sine" +pe_temperatureH = 20 +pe_temperatureW = 20 +return_interm_indices = [1, 2, 3] +backbone_freeze_keywords = None +enc_layers = 6 +dec_layers = 6 +pre_norm = False +dim_feedforward = 2048 +hidden_dim = 256 +dropout = 0.0 +nheads = 8 +num_queries = 900 +query_dim = 4 +num_patterns = 0 +num_feature_levels = 4 +enc_n_points = 4 +dec_n_points = 4 +two_stage_type = "standard" +two_stage_bbox_embed_share = False +two_stage_class_embed_share = False +transformer_activation = "relu" +dec_pred_bbox_embed_share = True +dn_box_noise_scale = 1.0 +dn_label_noise_ratio = 0.5 +dn_label_coef = 1.0 +dn_bbox_coef = 1.0 +embed_init_tgt = True +dn_labelbook_size = 2000 +max_text_len = 256 +text_encoder_type = "bert-base-uncased" +use_text_enhancer = True +use_fusion_layer = True +use_checkpoint = True +use_transformer_ckpt = True +use_text_cross_attention = True +text_dropout = 0.0 +fusion_dropout = 0.0 +fusion_droppath = 0.1 +sub_sentence_present = True diff --git a/config/inference/default.json b/config/inference/default.json new file mode 100644 index 0000000000000000000000000000000000000000..9c33560882502c6bf73f8eddd958da82ce97b969 --- /dev/null +++ b/config/inference/default.json @@ -0,0 +1,27 @@ +{ + "unet_additional_kwargs": { + "unet_use_cross_frame_attention": false, + "unet_use_temporal_attention": false, + "use_motion_module": true, + "motion_module_resolutions": [1, 2, 4, 8], + "motion_module_mid_block": false, + "motion_module_decoder_only": false, + "motion_module_type": "Vanilla", + "motion_module_kwargs": { + "num_attention_heads": 8, + "num_transformer_block": 1, + "attention_block_types": ["Temporal_Self", "Temporal_Self"], + "temporal_position_encoding": true, + "temporal_position_encoding_max_len": 24, + "temporal_attention_dim_div": 1 + } + }, + "noise_scheduler_kwargs": { + "num_train_timesteps": 1000, + "beta_start": 0.00085, + "beta_end": 0.012, + "beta_schedule": "linear", + "steps_offset": 1, + "clip_sample": false + } +} diff --git a/config/inference/motion_sdxl.json b/config/inference/motion_sdxl.json new file mode 100644 index 0000000000000000000000000000000000000000..bf6158d41e382ce9e5cdbb0421369c2d599b1189 --- /dev/null +++ b/config/inference/motion_sdxl.json @@ -0,0 +1,23 @@ +{ + "unet_additional_kwargs": { + "unet_use_temporal_attention": false, + "use_motion_module": true, + "motion_module_resolutions": [1, 2, 4, 8], + "motion_module_mid_block": false, + "motion_module_type": "Vanilla", + "motion_module_kwargs": { + "num_attention_heads": 8, + "num_transformer_block": 1, + "attention_block_types": ["Temporal_Self", "Temporal_Self"], + "temporal_position_encoding": true, + "temporal_position_encoding_max_len": 32, + "temporal_attention_dim_div": 1 + } + }, + "noise_scheduler_kwargs": { + "num_train_timesteps": 1000, + "beta_start": 0.00085, + "beta_end": 0.020, + "beta_schedule": "scaled_linear" + } +} diff --git a/config/inference/motion_v2.json b/config/inference/motion_v2.json new file mode 100644 index 0000000000000000000000000000000000000000..174a8fa36c12b9a645d91cb426076330e93c961c --- /dev/null +++ b/config/inference/motion_v2.json @@ -0,0 +1,28 @@ +{ + "unet_additional_kwargs": { + "use_inflated_groupnorm": true, + "unet_use_cross_frame_attention": false, + "unet_use_temporal_attention": false, + "use_motion_module": true, + "motion_module_resolutions": [1, 2, 4, 8], + "motion_module_mid_block": true, + "motion_module_decoder_only": false, + "motion_module_type": "Vanilla", + "motion_module_kwargs": { + "num_attention_heads": 8, + "num_transformer_block": 1, + "attention_block_types": ["Temporal_Self", "Temporal_Self"], + "temporal_position_encoding": true, + "temporal_position_encoding_max_len": 32, + "temporal_attention_dim_div": 1 + } + }, + "noise_scheduler_kwargs": { + "num_train_timesteps": 1000, + "beta_start": 0.00085, + "beta_end": 0.012, + "beta_schedule": "linear", + "steps_offset": 1, + "clip_sample": false + } +} diff --git a/config/inference/sd15-unet.json b/config/inference/sd15-unet.json new file mode 100644 index 0000000000000000000000000000000000000000..6db217f3331db78e6daf0a8f46f12e4529de3fdf --- /dev/null +++ b/config/inference/sd15-unet.json @@ -0,0 +1,89 @@ +{ + "sample_size": 64, + "in_channels": 4, + "out_channels": 4, + "center_input_sample": false, + "flip_sin_to_cos": true, + "freq_shift": 0, + "down_block_types": [ + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "DownBlock2D" + ], + "mid_block_type": "UNetMidBlock2DCrossAttn", + "up_block_types": [ + "UpBlock2D", + "CrossAttnUpBlock2D", + "CrossAttnUpBlock2D", + "CrossAttnUpBlock2D" + ], + "only_cross_attention": false, + "block_out_channels": [320, 640, 1280, 1280], + "layers_per_block": 2, + "downsample_padding": 1, + "mid_block_scale_factor": 1, + "act_fn": "silu", + "norm_num_groups": 32, + "norm_eps": 1e-5, + "cross_attention_dim": 768, + "transformer_layers_per_block": 1, + "encoder_hid_dim": null, + "encoder_hid_dim_type": null, + "attention_head_dim": 8, + "num_attention_heads": null, + "dual_cross_attention": false, + "use_linear_projection": false, + "class_embed_type": null, + "addition_embed_type": null, + "addition_time_embed_dim": null, + "num_class_embeds": null, + "upcast_attention": false, + "resnet_time_scale_shift": "default", + "resnet_skip_time_act": false, + "resnet_out_scale_factor": 1.0, + "time_embedding_type": "positional", + "time_embedding_dim": null, + "time_embedding_act_fn": null, + "timestep_post_act": null, + "time_cond_proj_dim": null, + "conv_in_kernel": 3, + "conv_out_kernel": 3, + "projection_class_embeddings_input_dim": null, + "class_embeddings_concat": false, + "mid_block_only_cross_attention": null, + "cross_attention_norm": null, + "addition_embed_type_num_heads": 64, + "_use_default_values": [ + "transformer_layers_per_block", + "use_linear_projection", + "num_class_embeds", + "addition_embed_type", + "cross_attention_norm", + "conv_out_kernel", + "encoder_hid_dim_type", + "projection_class_embeddings_input_dim", + "num_attention_heads", + "only_cross_attention", + "class_embed_type", + "resnet_time_scale_shift", + "addition_embed_type_num_heads", + "timestep_post_act", + "mid_block_type", + "mid_block_only_cross_attention", + "time_embedding_type", + "addition_time_embed_dim", + "time_embedding_dim", + "encoder_hid_dim", + "resnet_skip_time_act", + "conv_in_kernel", + "upcast_attention", + "dual_cross_attention", + "resnet_out_scale_factor", + "time_cond_proj_dim", + "class_embeddings_concat", + "time_embedding_act_fn" + ], + "_class_name": "UNet2DConditionModel", + "_diffusers_version": "0.6.0" +} diff --git a/config/inference/sd15-unet3d.json b/config/inference/sd15-unet3d.json new file mode 100644 index 0000000000000000000000000000000000000000..bda0b03d27577e4384bdbc1b0d4e126a08b79f03 --- /dev/null +++ b/config/inference/sd15-unet3d.json @@ -0,0 +1,64 @@ +{ + "sample_size": 64, + "in_channels": 4, + "out_channels": 4, + "center_input_sample": false, + "flip_sin_to_cos": true, + "freq_shift": 0, + "down_block_types": [ + "CrossAttnDownBlock3D", + "CrossAttnDownBlock3D", + "CrossAttnDownBlock3D", + "DownBlock3D" + ], + "mid_block_type": "UNetMidBlock3DCrossAttn", + "up_block_types": [ + "UpBlock3D", + "CrossAttnUpBlock3D", + "CrossAttnUpBlock3D", + "CrossAttnUpBlock3D" + ], + "only_cross_attention": false, + "block_out_channels": [320, 640, 1280, 1280], + "layers_per_block": 2, + "downsample_padding": 1, + "mid_block_scale_factor": 1, + "act_fn": "silu", + "norm_num_groups": 32, + "norm_eps": 1e-5, + "cross_attention_dim": 768, + "attention_head_dim": 8, + "dual_cross_attention": false, + "use_linear_projection": false, + "class_embed_type": null, + "num_class_embeds": null, + "upcast_attention": false, + "resnet_time_scale_shift": "default", + "use_motion_module": true, + "motion_module_resolutions": [1, 2, 4, 8], + "motion_module_mid_block": false, + "motion_module_decoder_only": false, + "motion_module_type": "Vanilla", + "motion_module_kwargs": { + "num_attention_heads": 8, + "num_transformer_block": 1, + "attention_block_types": ["Temporal_Self", "Temporal_Self"], + "temporal_position_encoding": true, + "temporal_position_encoding_max_len": 24, + "temporal_attention_dim_div": 1 + }, + "unet_use_cross_frame_attention": false, + "unet_use_temporal_attention": false, + "_use_default_values": [ + "use_linear_projection", + "mid_block_type", + "upcast_attention", + "dual_cross_attention", + "num_class_embeds", + "only_cross_attention", + "class_embed_type", + "resnet_time_scale_shift" + ], + "_class_name": "UNet3DConditionModel", + "_diffusers_version": "0.6.0" +} diff --git a/config/prompts/01-ToonYou.json b/config/prompts/01-ToonYou.json new file mode 100644 index 0000000000000000000000000000000000000000..033fdd951b1df1397952785094f665fcc4d4564d --- /dev/null +++ b/config/prompts/01-ToonYou.json @@ -0,0 +1,24 @@ +{ + "name": "ToonYou", + "base": "", + "path": "models/sd/toonyou_beta3.safetensors", + "motion_module": "models/motion-module/mm_sd_v15.ckpt", + "compile": false, + "seed": [ + 10788741199826055000, 6520604954829637000, 6519455744612556000, + 16372571278361864000 + ], + "scheduler": "k_dpmpp", + "steps": 30, + "guidance_scale": 8.5, + "clip_skip": 2, + "prompt": [ + "1girl, solo, best quality, masterpiece, looking at viewer, purple hair, orange hair, gradient hair, blurry background, upper body, dress, flower print, spaghetti strap, bare shoulders", + "1girl, solo, masterpiece, best quality, cherry blossoms, hanami, pink flower, white flower, spring season, wisteria, petals, flower, plum blossoms, outdoors, falling petals, white hair, black eyes,", + "1girl, solo, best quality, masterpiece, looking at viewer, purple hair, orange hair, gradient hair, blurry background, upper body, dress, flower print, spaghetti strap, bare shoulders", + "1girl, solo, best quality, masterpiece, cloudy sky, dandelion, contrapposto, alternate hairstyle" + ], + "n_prompt": [ + "worst quality, low quality, cropped, lowres, text, jpeg artifacts, multiple view" + ] +} diff --git a/config/prompts/02-Lyriel.json b/config/prompts/02-Lyriel.json new file mode 100644 index 0000000000000000000000000000000000000000..9dc50afc96ae3d3d9b1d14b8caf0fc7630ab72a6 --- /dev/null +++ b/config/prompts/02-Lyriel.json @@ -0,0 +1,25 @@ +{ + "name": "Lyriel", + "base": "", + "path": "models/sd/lyriel_v16.safetensors", + "motion_module": "models/motion-module/mm_sd_v15.ckpt", + "seed": [ + 10917152860782582000, 6399018107401806000, 15875751942533906000, + 6653196880059937000 + ], + "scheduler": "k_dpmpp", + "steps": 25, + "guidance_scale": 7.5, + "prompt": [ + "dark shot, epic realistic, portrait of halo, sunglasses, blue eyes, tartan scarf, white hair by atey ghailan, by greg rutkowski, by greg tocchini, by james gilleard, by joe fenton, by kaethe butcher, gradient yellow, black, brown and magenta color scheme, grunge aesthetic!!! graffiti tag wall background, art by greg rutkowski and artgerm, soft cinematic light, adobe lightroom, photolab, hdr, intricate, highly detailed, depth of field, faded, neutral colors, hdr, muted colors, hyperdetailed, artstation, cinematic, warm lights, dramatic light, intricate details, complex background, rutkowski, teal and orange", + "A forbidden castle high up in the mountains, pixel art, intricate details2, hdr, intricate details, hyperdetailed5, natural skin texture, hyperrealism, soft light, sharp, game art, key visual, surreal", + "dark theme, medieval portrait of a man sharp features, grim, cold stare, dark colors, Volumetric lighting, baroque oil painting by Greg Rutkowski, Artgerm, WLOP, Alphonse Mucha dynamic lighting hyperdetailed intricately detailed, hdr, muted colors, complex background, hyperrealism, hyperdetailed, amandine van ray", + "As I have gone alone in there and with my treasures bold, I can keep my secret where and hint of riches new and old. Begin it where warm waters halt and take it in a canyon down, not far but too far to walk, put in below the home of brown." + ], + "n_prompt": [ + "3d, cartoon, lowres, bad anatomy, bad hands, text, error, missing fingers, extra digit, fewer digits, cropped, worst quality, low quality, normal quality, jpeg artifacts, signature, watermark, username, blurry, artist name, young, loli, elf, 3d, illustration", + "3d, cartoon, anime, sketches, worst quality, low quality, normal quality, lowres, normal quality, monochrome, grayscale, skin spots, acnes, skin blemishes, bad anatomy, girl, loli, young, large breasts, red eyes, muscular", + "dof, grayscale, black and white, bw, 3d, cartoon, anime, sketches, worst quality, low quality, normal quality, lowres, normal quality, monochrome, grayscale, skin spots, acnes, skin blemishes, bad anatomy, girl, loli, young, large breasts, red eyes, muscular,badhandsv5-neg, By bad artist -neg 1, monochrome", + "holding an item, cowboy, hat, cartoon, 3d, disfigured, bad art, deformed,extra limbs,close up,b&w, wierd colors, blurry, duplicate, morbid, mutilated, [out of frame], extra fingers, mutated hands, poorly drawn hands, poorly drawn face, mutation, deformed, ugly, blurry, bad anatomy, bad proportions, extra limbs, cloned face, disfigured, out of frame, ugly, extra limbs, bad anatomy, gross proportions, malformed limbs, missing arms, missing legs, extra arms, extra legs, mutated hands, fused fingers, too many fingers, long neck, Photoshop, video game, ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, mutation, mutated, extra limbs, extra legs, extra arms, disfigured, deformed, cross-eye, body out of frame, blurry, bad art, bad anatomy, 3d render" + ] +} diff --git a/config/prompts/03-RcnzCartoon.json b/config/prompts/03-RcnzCartoon.json new file mode 100644 index 0000000000000000000000000000000000000000..fcf0bf0a4e2c9ec9f03843ea8b24cd1af061a897 --- /dev/null +++ b/config/prompts/03-RcnzCartoon.json @@ -0,0 +1,25 @@ +{ + "name": "RcnzCartoon", + "base": "", + "path": "models/sd/rcnzCartoon3d_v10.safetensors", + "motion_module": "models/motion-module/mm_sd_v15.ckpt", + "seed": [ + 16931037867122268000, 2094308009433392000, 4292543217695451000, + 15572665120852310000 + ], + "scheduler": "k_dpmpp", + "steps": 25, + "guidance_scale": 7.5, + "prompt": [ + "Jane Eyre with headphones, natural skin texture,4mm,k textures, soft cinematic light, adobe lightroom, photolab, hdr, intricate, elegant, highly detailed, sharp focus, cinematic look, soothing tones, insane details, intricate details, hyperdetailed, low contrast, soft cinematic light, dim colors, exposure blend, hdr, faded", + "close up Portrait photo of muscular bearded guy in a worn mech suit, light bokeh, intricate, steel metal [rust], elegant, sharp focus, photo by greg rutkowski, soft lighting, vibrant colors, masterpiece, streets, detailed face", + "absurdres, photorealistic, masterpiece, a 30 year old man with gold framed, aviator reading glasses and a black hooded jacket and a beard, professional photo, a character portrait, altermodern, detailed eyes, detailed lips, detailed face, grey eyes", + "a golden labrador, warm vibrant colours, natural lighting, dappled lighting, diffused lighting, absurdres, highres,k, uhd, hdr, rtx, unreal, octane render, RAW photo, photorealistic, global illumination, subsurface scattering" + ], + "n_prompt": [ + "deformed, distorted, disfigured, poorly drawn, bad anatomy, wrong anatomy, extra limb, missing limb, floating limbs, mutated hands and fingers, disconnected limbs, mutation, mutated, ugly, disgusting, blurry, amputation", + "nude, cross eyed, tongue, open mouth, inside, 3d, cartoon, anime, sketches, worst quality, low quality, normal quality, lowres, normal quality, monochrome, grayscale, skin spots, acnes, skin blemishes, bad anatomy, red eyes, muscular", + "easynegative, cartoon, anime, sketches, necklace, earrings worst quality, low quality, normal quality, bad anatomy, bad hands, shiny skin, error, missing fingers, extra digit, fewer digits, jpeg artifacts, signature, watermark, username, blurry, chubby, anorectic, bad eyes, old, wrinkled skin, red skin, photograph By bad artist -neg, big eyes, muscular face,", + "beard, EasyNegative, lowres, chromatic aberration, depth of field, motion blur, blurry, bokeh, bad quality, worst quality, multiple arms, badhand" + ] +} diff --git a/config/prompts/04-MajicMix.json b/config/prompts/04-MajicMix.json new file mode 100644 index 0000000000000000000000000000000000000000..b462b5f19cf8299fb6afd0f178afd708aae84325 --- /dev/null +++ b/config/prompts/04-MajicMix.json @@ -0,0 +1,25 @@ +{ + "name": "MajicMix", + "base": "", + "path": "models/sd/majicmixRealistic_v5Preview.safetensors", + "motion_module": "models/motion-module/mm_sd_v15.ckpt", + "seed": [ + 1572448948722921000, 1099474677988590700, 6488833139725636000, + 18339859844376519000 + ], + "scheduler": "k_dpmpp", + "steps": 25, + "guidance_scale": 7.5, + "prompt": [ + "1girl, offshoulder, light smile, shiny skin best quality, masterpiece, photorealistic", + "best quality, masterpiece, photorealistic, 1boy, 50 years old beard, dramatic lighting", + "best quality, masterpiece, photorealistic, 1girl, light smile, shirt with collars, waist up, dramatic lighting, from below", + "male, man, beard, bodybuilder, skinhead,cold face, tough guy, cowboyshot, tattoo, french windows, luxury hotel masterpiece, best quality, photorealistic" + ], + "n_prompt": [ + "ng_deepnegative_v1_75t, badhandv4, worst quality, low quality, normal quality, lowres, bad anatomy, bad hands, watermark, moles", + "nsfw, ng_deepnegative_v1_75t,badhandv4, worst quality, low quality, normal quality, lowres,watermark, monochrome", + "nsfw, ng_deepnegative_v1_75t,badhandv4, worst quality, low quality, normal quality, lowres,watermark, monochrome", + "nude, nsfw, ng_deepnegative_v1_75t, badhandv4, worst quality, low quality, normal quality, lowres, bad anatomy, bad hands, monochrome, grayscale watermark, moles, people" + ] +} diff --git a/config/prompts/05-RealisticVision.json b/config/prompts/05-RealisticVision.json new file mode 100644 index 0000000000000000000000000000000000000000..ba93d964c342f5de995047b1cc38a6bb1fe34db0 --- /dev/null +++ b/config/prompts/05-RealisticVision.json @@ -0,0 +1,25 @@ +{ + "name": "RealisticVision", + "base": "", + "path": "models/sd/realisticVisionV20_v20.safetensors", + "motion_module": "models/motion-module/mm_sd_v15.ckpt", + "seed": [ + 5658137986800322000, 12099779162349365000, 10499524853910854000, + 16768009035333712000 + ], + "scheduler": "k_dpmpp", + "steps": 25, + "guidance_scale": 7.5, + "prompt": [ + "b&w photo of 42 y.o man in black clothes, bald, face, half body, body, high detailed skin, skin pores, coastline, overcast weather, wind, waves, 8k uhd, dslr, soft lighting, high quality, film grain, Fujifilm XT3", + "close up photo of a rabbit, forest, haze, halation, bloom, dramatic atmosphere, centred, rule of thirds, 200mm 1.4f macro shot", + "photo of coastline, rocks, storm weather, wind, waves, lightning, 8k uhd, dslr, soft lighting, high quality, film grain, Fujifilm XT3", + "night, b&w photo of old house, post apocalypse, forest, storm weather, wind, rocks, 8k uhd, dslr, soft lighting, high quality, film grain" + ], + "n_prompt": [ + "semi-realistic, cgi, 3d, render, sketch, cartoon, drawing, anime, text, close up, cropped, out of frame, worst quality, low quality, jpeg artifacts, ugly, duplicate, morbid, mutilated, extra fingers, mutated hands, poorly drawn hands, poorly drawn face, mutation, deformed, blurry, dehydrated, bad anatomy, bad proportions, extra limbs, cloned face, disfigured, gross proportions, malformed limbs, missing arms, missing legs, extra arms, extra legs, fused fingers, too many fingers, long neck", + "semi-realistic, cgi, 3d, render, sketch, cartoon, drawing, anime, text, close up, cropped, out of frame, worst quality, low quality, jpeg artifacts, ugly, duplicate, morbid, mutilated, extra fingers, mutated hands, poorly drawn hands, poorly drawn face, mutation, deformed, blurry, dehydrated, bad anatomy, bad proportions, extra limbs, cloned face, disfigured, gross proportions, malformed limbs, missing arms, missing legs, extra arms, extra legs, fused fingers, too many fingers, long neck", + "blur, haze, deformed iris, deformed pupils, semi-realistic, cgi, 3d, render, sketch, cartoon, drawing, anime, mutated hands and fingers, deformed, distorted, disfigured, poorly drawn, bad anatomy, wrong anatomy, extra limb, missing limb, floating limbs, disconnected limbs, mutation, mutated, ugly, disgusting, amputation", + "blur, haze, deformed iris, deformed pupils, semi-realistic, cgi, 3d, render, sketch, cartoon, drawing, anime, art, mutated hands and fingers, deformed, distorted, disfigured, poorly drawn, bad anatomy, wrong anatomy, extra limb, missing limb, floating limbs, disconnected limbs, mutation, mutated, ugly, disgusting, amputation" + ] +} diff --git a/config/prompts/06-Tusun.json b/config/prompts/06-Tusun.json new file mode 100644 index 0000000000000000000000000000000000000000..95e400d6ff4f848228adebbc87c8d64481b34e27 --- /dev/null +++ b/config/prompts/06-Tusun.json @@ -0,0 +1,23 @@ +{ + "name": "Tusun", + "base": "models/sd/moonfilm_reality20.safetensors", + "path": "models/sd/TUSUN.safetensors", + "motion_module": "models/motion-module/mm_sd_v15.ckpt", + "seed": [ + 10154078483724687000, 2664393535095473700, 4231566096207623000, + 1713349740448094500 + ], + "scheduler": "k_dpmpp", + "steps": 25, + "guidance_scale": 7.5, + "lora_alpha": 0.6, + "prompt": [ + "tusuncub with its mouth open, blurry, open mouth, fangs, photo background, looking at viewer, tongue, full body, solo, cute and lovely, Beautiful and realistic eye details, perfect anatomy, Nonsense, pure background, Centered-Shot, realistic photo, photograph, 4k, hyper detailed, DSLR, 24 Megapixels, 8mm Lens, Full Frame, film grain, Global Illumination, studio Lighting, Award Winning Photography, diffuse reflection, ray tracing", + "cute tusun with a blurry background, black background, simple background, signature, face, solo, cute and lovely, Beautiful and realistic eye details, perfect anatomy, Nonsense, pure background, Centered-Shot, realistic photo, photograph, 4k, hyper detailed, DSLR, 24 Megapixels, 8mm Lens, Full Frame, film grain, Global Illumination, studio Lighting, Award Winning Photography, diffuse reflection, ray tracing", + "cut tusuncub walking in the snow, blurry, looking at viewer, depth of field, blurry background, full body, solo, cute and lovely, Beautiful and realistic eye details, perfect anatomy, Nonsense, pure background, Centered-Shot, realistic photo, photograph, 4k, hyper detailed, DSLR, 24 Megapixels, 8mm Lens, Full Frame, film grain, Global Illumination, studio Lighting, Award Winning Photography, diffuse reflection, ray tracing", + "character design, cyberpunk tusun kitten wearing astronaut suit, sci-fic, realistic eye color and details, fluffy, big head, science fiction, communist ideology, Cyborg, fantasy, intense angle, soft lighting, photograph, 4k, hyper detailed, portrait wallpaper, realistic, photo-realistic, DSLR, 24 Megapixels, Full Frame, vibrant details, octane render, finely detail, best quality, incredibly absurdres, robotic parts, rim light, vibrant details, luxurious cyberpunk, hyperrealistic, cable electric wires, microchip, full body" + ], + "n_prompt": [ + "worst quality, low quality, deformed, distorted, disfigured, bad eyes, bad anatomy, disconnected limbs, wrong body proportions, low quality, worst quality, text, watermark, signatre, logo, illustration, painting, cartoons, ugly, easy_negative" + ] +} diff --git a/config/prompts/07-FilmVelvia.json b/config/prompts/07-FilmVelvia.json new file mode 100644 index 0000000000000000000000000000000000000000..cf53c4fd1eeac864ceeba88b0af17003485d214f --- /dev/null +++ b/config/prompts/07-FilmVelvia.json @@ -0,0 +1,26 @@ +{ + "name": "FilmVelvia", + "base": "models/sd/majicmixRealistic_v4.safetensors", + "path": "models/sd/FilmVelvia2.safetensors", + "motion_module": "models/motion-module/mm_sd_v15.ckpt", + "seed": [ + 358675358833372800, 3519455280971924000, 11684545350557985000, + 8696855302100400000 + ], + "scheduler": "k_dpmpp", + "steps": 25, + "guidance_scale": 7.5, + "lora_alpha": 0.6, + "prompt": [ + "a woman standing on the side of a road at night,girl, long hair, motor vehicle, car, looking at viewer, ground vehicle, night, hands in pockets, blurry background, coat, black hair, parted lips, bokeh, jacket, brown hair, outdoors, red lips, upper body, artist name", + ", dark shot,0mm, portrait quality of a arab man worker,boy, wasteland that stands out vividly against the background of the desert, barren landscape, closeup, moles skin, soft light, sharp, exposure blend, medium shot, bokeh, hdr, high contrast, cinematic, teal and orange5, muted colors, dim colors, soothing tones, low saturation, hyperdetailed, noir", + "fashion photography portrait of 1girl, offshoulder, fluffy short hair, soft light, rim light, beautiful shadow, low key, photorealistic, raw photo, natural skin texture, realistic eye and face details, hyperrealism, ultra high res, 4K, Best quality, masterpiece, necklace, cleavage, in the dark", + "In this lighthearted portrait, a woman is dressed as a fierce warrior, armed with an arsenal of paintbrushes and palette knives. Her war paint is composed of thick, vibrant strokes of color, and her armor is made of paint tubes and paint-splattered canvases. She stands victoriously atop a mountain of conquered blank canvases, with a beautiful, colorful landscape behind her, symbolizing the power of art and creativity. bust Portrait, close-up, Bright and transparent scene lighting, " + ], + "n_prompt": [ + "cartoon, anime, sketches,worst quality, low quality, deformed, distorted, disfigured, bad eyes, wrong lips, weird mouth, bad teeth, mutated hands and fingers, bad anatomy, wrong anatomy, amputation, extra limb, missing limb, floating limbs, disconnected limbs, mutation, ugly, disgusting, bad_pictures, negative_hand-neg", + "cartoon, anime, sketches,worst quality, low quality, deformed, distorted, disfigured, bad eyes, wrong lips, weird mouth, bad teeth, mutated hands and fingers, bad anatomy, wrong anatomy, amputation, extra limb, missing limb, floating limbs, disconnected limbs, mutation, ugly, disgusting, bad_pictures, negative_hand-neg", + "wrong white balance, dark, cartoon, anime, sketches,worst quality, low quality, deformed, distorted, disfigured, bad eyes, wrong lips, weird mouth, bad teeth, mutated hands and fingers, bad anatomy, wrong anatomy, amputation, extra limb, missing limb, floating limbs, disconnected limbs, mutation, ugly, disgusting, bad_pictures, negative_hand-neg", + "wrong white balance, dark, cartoon, anime, sketches,worst quality, low quality, deformed, distorted, disfigured, bad eyes, wrong lips, weird mouth, bad teeth, mutated hands and fingers, bad anatomy, wrong anatomy, amputation, extra limb, missing limb, floating limbs, disconnected limbs, mutation, ugly, disgusting, bad_pictures, negative_hand-neg" + ] +} diff --git a/config/prompts/08-GhibliBackground.json b/config/prompts/08-GhibliBackground.json new file mode 100644 index 0000000000000000000000000000000000000000..fbe981aeed0a2a6d6f3ee15b5dd13c51c28cf553 --- /dev/null +++ b/config/prompts/08-GhibliBackground.json @@ -0,0 +1,23 @@ +{ + "name": "GhibliBackground", + "base": "models/sd/CounterfeitV30_25.safetensors", + "path": "models/sd/lora_Ghibli_n3.safetensors", + "motion_module": "models/motion-module/mm_sd_v15.ckpt", + "seed": [ + 8775748474469046000, 5893874876080607000, 11911465742147697000, + 12437784838692000000 + ], + "scheduler": "k_dpmpp", + "steps": 25, + "guidance_scale": 7.5, + "lora_alpha": 1, + "prompt": [ + "best quality,single build,architecture, blue_sky, building,cloudy_sky, day, fantasy, fence, field, house, build,architecture,landscape, moss, outdoors, overgrown, path, river, road, rock, scenery, sky, sword, tower, tree, waterfall", + "black_border, building, city, day, fantasy, ice, landscape, letterboxed, mountain, ocean, outdoors, planet, scenery, ship, snow, snowing, water, watercraft, waterfall, winter", + ",mysterious sea area, fantasy,build,concept", + "Tomb Raider,Scenography,Old building" + ], + "n_prompt": [ + "easynegative,bad_construction,bad_structure,bad_wail,bad_windows,blurry,cloned_window,cropped,deformed,disfigured,error,extra_windows,extra_chimney,extra_door,extra_structure,extra_frame,fewer_digits,fused_structure,gross_proportions,jpeg_artifacts,long_roof,low_quality,structure_limbs,missing_windows,missing_doors,missing_roofs,mutated_structure,mutation,normal_quality,out_of_frame,owres,poorly_drawn_structure,poorly_drawn_house,signature,text,too_many_windows,ugly,username,uta,watermark,worst_quality" + ] +} diff --git a/config/prompts/concat_2horizontal.bat b/config/prompts/concat_2horizontal.bat new file mode 100644 index 0000000000000000000000000000000000000000..1c7663ca12ec0135bb0516c5e795ee2f960982b2 --- /dev/null +++ b/config/prompts/concat_2horizontal.bat @@ -0,0 +1 @@ +ffmpeg -i %1 -i %2 -filter_complex "[0:v][1:v]hstack=inputs=2[v]" -map "[v]" -crf 15 2horizontal.mp4 \ No newline at end of file diff --git a/config/prompts/copy_png.bat b/config/prompts/copy_png.bat new file mode 100644 index 0000000000000000000000000000000000000000..4085b42bcc00d17a6ed4d65914be44373fae75b7 --- /dev/null +++ b/config/prompts/copy_png.bat @@ -0,0 +1,11 @@ + +setlocal enableDelayedExpansion +FOR /l %%N in (1,1,%~n1) do ( + set "n=00000%%N" + set "TEST=!n:~-5! + echo !TEST! + copy /y %1 !TEST!.png +) + +ren %1 00000.png + diff --git a/config/prompts/ignore_tokens.txt b/config/prompts/ignore_tokens.txt new file mode 100644 index 0000000000000000000000000000000000000000..e9f87a6c4e6b03a33df48bf3afdf8e551625c34b --- /dev/null +++ b/config/prompts/ignore_tokens.txt @@ -0,0 +1,4 @@ +motion_blur +blurry +realistic +depth_of_field diff --git a/config/prompts/img2img_sample.json b/config/prompts/img2img_sample.json new file mode 100644 index 0000000000000000000000000000000000000000..5aa4b976c8074a9fdbb4d331e421b768475585b2 --- /dev/null +++ b/config/prompts/img2img_sample.json @@ -0,0 +1,272 @@ +{ + "name": "sample", + "path": "share/Stable-diffusion/mistoonAnime_v20.safetensors", + "motion_module": "models/motion-module/mm_sd_v15_v2.ckpt", + "compile": false, + "seed": [ + 12345 + ], + "scheduler": "k_dpmpp_sde", + "steps": 20, + "guidance_scale": 10, + "unet_batch_size": 1, + "clip_skip": 2, + "prompt_fixed_ratio": 0.5, + "head_prompt": "(style of studio ghibli:1.2), (masterpiece, best quality)", + "prompt_map": { + "0": "forest, water, river, outdoors," + }, + "tail_prompt": "", + "n_prompt": [ + "(worst quality:2), (bad quality:2), (normal quality:2), lowers, bad anatomy, bad hands, (multiple views)," + ], + "lora_map": { + "share/models/Lora/Ghibli_v6.safetensors": 1.0 + }, + "motion_lora_map": { + }, + "ip_adapter_map": { + "enable": false, + "input_image_dir": "", + "prompt_fixed_ratio": 0.5, + "save_input_image": true, + "resized_to_square": false, + "scale": 0.5, + "is_plus_face": false, + "is_plus": true, + "is_light": false + }, + "img2img_map":{ + "enable": true, + "init_img_dir" : "init_imgs/sample0", + "save_init_image": true, + "denoising_strength" : 0.85 + }, + "controlnet_map": { + "input_image_dir" : "", + "max_samples_on_vram": 0, + "max_models_on_vram" : 1, + "save_detectmap": true, + "preprocess_on_gpu": true, + "is_loop": true, + + "controlnet_tile":{ + "enable": true, + "use_preprocessor":true, + "preprocessor":{ + "type" : "none", + "param":{ + } + }, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_ip2p":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_lineart_anime":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_openpose":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_softedge":{ + "enable": true, + "use_preprocessor":true, + "preprocessor":{ + "type" : "softedge_pidsafe", + "param":{ + } + }, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_shuffle": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_depth": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_canny": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_inpaint": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_lineart": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_mlsd": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_normalbae": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_scribble": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_seg": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "qr_code_monster_v1": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "qr_code_monster_v2": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_mediapipe_face": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_ref": { + "enable": false, + "ref_image": "ref_image/ref_sample.png", + "attention_auto_machine_weight": 0.3, + "gn_auto_machine_weight": 0.3, + "style_fidelity": 0.5, + "reference_attn": true, + "reference_adain": false, + "scale_pattern":[1.0] + } + }, + "upscale_config": { + "scheduler": "k_dpmpp_sde", + "steps": 20, + "strength": 0.5, + "guidance_scale": 10, + "controlnet_tile": { + "enable": true, + "controlnet_conditioning_scale": 1.0, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_line_anime": { + "enable": false, + "controlnet_conditioning_scale": 1.0, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_ip2p": { + "enable": false, + "controlnet_conditioning_scale": 0.5, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_ref": { + "enable": false, + "use_frame_as_ref_image": false, + "use_1st_frame_as_ref_image": false, + "ref_image": "ref_image/path_to_your_ref_img.jpg", + "attention_auto_machine_weight": 1.0, + "gn_auto_machine_weight": 1.0, + "style_fidelity": 0.25, + "reference_attn": true, + "reference_adain": false + } + }, + "output":{ + "format" : "mp4", + "fps" : 8, + "encode_param":{ + "crf": 10 + } + } +} diff --git a/config/prompts/inpaint_sample.json b/config/prompts/inpaint_sample.json new file mode 100644 index 0000000000000000000000000000000000000000..e4bf2d49e58463024623709c82e1199b35a62600 --- /dev/null +++ b/config/prompts/inpaint_sample.json @@ -0,0 +1,299 @@ +{ + "name": "sample", + "path": "share/Stable-diffusion/mistoonAnime_v20.safetensors", + "motion_module": "models/motion-module/mm_sd_v15_v2.ckpt", + "compile": false, + "seed": [ + 12345 + ], + "scheduler": "k_dpmpp_sde", + "steps": 20, + "guidance_scale": 10, + "unet_batch_size": 1, + "clip_skip": 2, + "prompt_fixed_ratio": 0.5, + "head_prompt": "(style of studio ghibli:1.2), (masterpiece, best quality)", + "prompt_map": { + "0": "cyberpunk,robot cat, robot" + }, + "tail_prompt": "", + "n_prompt": [ + "(worst quality:2), (bad quality:2), (normal quality:2), lowers, bad anatomy, bad hands, (multiple views)," + ], + "lora_map": { + "share/models/Lora/Ghibli_v6.safetensors": 1.0 + }, + "motion_lora_map": { + }, + "ip_adapter_map": { + "enable": true, + "input_image_dir": "ip_adapter_image/cyberpunk", + "prompt_fixed_ratio": 0.5, + "save_input_image": true, + "resized_to_square": false, + "scale": 0.5, + "is_plus_face": false, + "is_plus": true, + "is_light": false + }, + "img2img_map":{ + "enable": true, + "init_img_dir" : "init_imgs/sample1", + "save_init_image": true, + "denoising_strength" : 0.85 + }, + "region_map" : { + "0":{ + "enable": true, + "mask_dir" : "mask/sample1", + "save_mask": true, + "is_init_img" : true, + "condition":{ + "prompt_fixed_ratio": 0.5, + "head_prompt": "(masterpiece, best quality)", + "prompt_map": { + "0": "cyberpunk,robot cat, robot" + }, + "tail_prompt": "", + "ip_adapter_map": { + "enable": true, + "input_image_dir": "ip_adapter_image/cyberpunk", + "prompt_fixed_ratio": 0.5, + "save_input_image": true, + "resized_to_square": false + } + } + }, + "background":{ + "is_init_img" : false, + "hint" : "background's condition refers to the one in root" + } + }, + "controlnet_map": { + "input_image_dir" : "controlnet_image/cat", + "max_samples_on_vram": 0, + "max_models_on_vram" : 1, + "save_detectmap": true, + "preprocess_on_gpu": true, + "is_loop": true, + + "controlnet_tile":{ + "enable": true, + "use_preprocessor":true, + "preprocessor":{ + "type" : "none", + "param":{ + } + }, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_ip2p":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale":0.5, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[] + }, + "controlnet_lineart_anime":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_openpose":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_softedge":{ + "enable": true, + "use_preprocessor":true, + "preprocessor":{ + "type" : "softedge_pidsafe", + "param":{ + } + }, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_shuffle": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_depth": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_canny": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_inpaint": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_lineart": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_mlsd": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_normalbae": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 0.25, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[] + }, + "controlnet_scribble": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_seg": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "qr_code_monster_v1": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "qr_code_monster_v2": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_mediapipe_face": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_ref": { + "enable": false, + "ref_image": "ref_image/ref_sample.png", + "attention_auto_machine_weight": 0.3, + "gn_auto_machine_weight": 0.3, + "style_fidelity": 0.5, + "reference_attn": true, + "reference_adain": false, + "scale_pattern":[1.0] + } + }, + "upscale_config": { + "scheduler": "k_dpmpp_sde", + "steps": 20, + "strength": 0.5, + "guidance_scale": 10, + "controlnet_tile": { + "enable": true, + "controlnet_conditioning_scale": 1.0, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_line_anime": { + "enable": false, + "controlnet_conditioning_scale": 1.0, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_ip2p": { + "enable": false, + "controlnet_conditioning_scale": 0.5, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_ref": { + "enable": false, + "use_frame_as_ref_image": false, + "use_1st_frame_as_ref_image": false, + "ref_image": "ref_image/path_to_your_ref_img.jpg", + "attention_auto_machine_weight": 1.0, + "gn_auto_machine_weight": 1.0, + "style_fidelity": 0.25, + "reference_attn": true, + "reference_adain": false + } + }, + "output":{ + "format" : "mp4", + "fps" : 8, + "encode_param":{ + "crf": 10 + } + } +} diff --git a/config/prompts/prompt_travel.json b/config/prompts/prompt_travel.json new file mode 100644 index 0000000000000000000000000000000000000000..c1846328b25670586b767c558b5836c56bd16448 --- /dev/null +++ b/config/prompts/prompt_travel.json @@ -0,0 +1,316 @@ +{ + "name": "sample", + "path": "share/Stable-diffusion/mistoonAnime_v20.safetensors", + "context_schedule" : "uniform", + "lcm_map":{ + "enable":false, + "start_scale":0.15, + "end_scale":0.75, + "gradient_start":0.2, + "gradient_end":0.75 + }, + "gradual_latent_hires_fix_map":{ + "enable": false, + "scale": { + "0": 0.5, + "0.7": 1.0 + }, + "reverse_steps": 5, + "noise_add_count":3 + }, + "motion_module": "models/motion-module/mm_sd_v15_v2.ckpt", + "compile": false, + "seed": [ + 341774366206100 + ], + "scheduler": "euler_a", + "steps": 20, + "guidance_scale": 10, + "unet_batch_size": 1, + "clip_skip": 2, + "prompt_fixed_ratio": 0.5, + "head_prompt": "masterpiece, best quality, a beautiful and detailed portriat of muffet, monster girl,((purple body:1.3)),humanoid, arachnid, anthro,((fangs)),pigtails,hair bows,5 eyes,spider girl,6 arms,solo", + "prompt_map": { + "0": "smile standing,((spider webs:1.0))", + "32": "(((walking))),((spider webs:1.0))", + "64": "(((running))),((spider webs:2.0)),wide angle lens, fish eye effect", + "96": "(((sitting))),((spider webs:1.0))" + }, + "tail_prompt": "clothed, open mouth, awesome and detailed background, holding teapot, holding teacup, 6 hands,detailed hands,storefront that sells pastries and tea,bloomers,(red and black clothing),inside,pouring into teacup,muffetwear", + "n_prompt": [ + "(worst quality, low quality:1.4),nudity,simple background,border,mouth closed,text, patreon,bed,bedroom,white background,((monochrome)),sketch,(pink body:1.4),7 arms,8 arms,4 arms" + ], + "lora_map": { + "share/Lora/muffet_v2.safetensors" : 1.0, + "share/Lora/add_detail.safetensors" : 1.0 + }, + "motion_lora_map": { + "models/motion_lora/v2_lora_PanLeft.ckpt":1.0 + }, + "ip_adapter_map": { + "enable": true, + "input_image_dir": "ip_adapter_image/test", + "prompt_fixed_ratio": 0.5, + "save_input_image": true, + "resized_to_square": false, + "scale": 0.5, + "is_full_face": false, + "is_plus_face": false, + "is_plus": true, + "is_light": false + }, + "controlnet_map": { + "input_image_dir" : "controlnet_image/test", + "max_samples_on_vram": 0, + "max_models_on_vram" : 0, + "save_detectmap": true, + "preprocess_on_gpu": true, + "is_loop": true, + + "controlnet_tile":{ + "enable": true, + "use_preprocessor":true, + "preprocessor":{ + "type" : "none", + "param":{ + } + }, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1], + "control_region_list":[] + }, + "controlnet_ip2p":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1], + "control_region_list":[] + }, + "controlnet_lineart_anime":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1], + "control_region_list":[] + }, + "controlnet_openpose":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1], + "control_region_list":[] + }, + "controlnet_softedge":{ + "enable": true, + "use_preprocessor":true, + "preprocessor":{ + "type" : "softedge_pidsafe", + "param":{ + } + }, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1], + "control_region_list":[] + }, + "controlnet_shuffle": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1], + "control_region_list":[] + }, + "controlnet_depth": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1], + "control_region_list":[] + }, + "controlnet_canny": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1], + "control_region_list":[] + }, + "controlnet_inpaint": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1], + "control_region_list":[] + }, + "controlnet_lineart": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1], + "control_region_list":[] + }, + "controlnet_mlsd": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1], + "control_region_list":[] + }, + "controlnet_normalbae": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1], + "control_region_list":[] + }, + "controlnet_scribble": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1], + "control_region_list":[] + }, + "controlnet_seg": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1], + "control_region_list":[] + }, + "qr_code_monster_v1": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1], + "control_region_list":[] + }, + "qr_code_monster_v2": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1], + "control_region_list":[] + }, + "controlnet_mediapipe_face": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1], + "control_region_list":[] + }, + "animatediff_controlnet": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1], + "control_region_list":[] + }, + "controlnet_ref": { + "enable": false, + "ref_image": "ref_image/ref_sample.png", + "attention_auto_machine_weight": 0.3, + "gn_auto_machine_weight": 0.3, + "style_fidelity": 0.5, + "reference_attn": true, + "reference_adain": false, + "scale_pattern":[1.0] + } + }, + "upscale_config": { + "scheduler": "k_dpmpp_sde", + "steps": 20, + "strength": 0.5, + "guidance_scale": 10, + "controlnet_tile": { + "enable": true, + "controlnet_conditioning_scale": 1.0, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_line_anime": { + "enable": false, + "controlnet_conditioning_scale": 1.0, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_ip2p": { + "enable": false, + "controlnet_conditioning_scale": 0.5, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_ref": { + "enable": false, + "use_frame_as_ref_image": false, + "use_1st_frame_as_ref_image": false, + "ref_image": "ref_image/path_to_your_ref_img.jpg", + "attention_auto_machine_weight": 1.0, + "gn_auto_machine_weight": 1.0, + "style_fidelity": 0.25, + "reference_attn": true, + "reference_adain": false + } + }, + "output":{ + "format" : "gif", + "fps" : 8, + "encode_param":{ + "crf": 10 + } + } +} diff --git a/config/prompts/prompt_travel_multi_controlnet.json b/config/prompts/prompt_travel_multi_controlnet.json new file mode 100644 index 0000000000000000000000000000000000000000..107d02f7ec3e1820a0b0ad155c707e31a86afe36 --- /dev/null +++ b/config/prompts/prompt_travel_multi_controlnet.json @@ -0,0 +1,238 @@ +{ + "name": "sample", + "path": "share/Stable-diffusion/mistoonAnime_v20.safetensors", + "motion_module": "models/motion-module/mm_sd_v14.ckpt", + "compile": false, + "seed": [ + 341774366206100 + ], + "scheduler": "k_dpmpp_sde", + "steps": 20, + "guidance_scale": 10, + "clip_skip": 2, + "head_prompt": "masterpiece, best quality, a beautiful and detailed portriat of muffet, monster girl,((purple body:1.3)),humanoid, arachnid, anthro,((fangs)),pigtails,hair bows,5 eyes,spider girl,6 arms,solo", + "prompt_map": { + "0": "smile standing,((spider webs:1.0))", + "32": "(((walking))),((spider webs:1.0))", + "64": "(((running))),((spider webs:2.0)),wide angle lens, fish eye effect", + "96": "(((sitting))),((spider webs:1.0))" + }, + "tail_prompt": "clothed, open mouth, awesome and detailed background, holding teapot, holding teacup, 6 hands,detailed hands,storefront that sells pastries and tea,bloomers,(red and black clothing),inside,pouring into teacup,muffetwear", + "n_prompt": [ + "(worst quality, low quality:1.4),nudity,simple background,border,mouth closed,text, patreon,bed,bedroom,white background,((monochrome)),sketch,(pink body:1.4),7 arms,8 arms,4 arms" + ], + "lora_map": { + "share/Lora/muffet_v2.safetensors" : 1.0, + "share/Lora/add_detail.safetensors" : 1.0 + }, + "ip_adapter_map": { + "enable": true, + "input_image_dir": "ip_adapter_image/test", + "save_input_image": true, + "resized_to_square": false, + "scale": 0.5, + "is_plus_face": true, + "is_plus": true + }, + "controlnet_map": { + "input_image_dir" : "controlnet_image/test", + "max_samples_on_vram": 200, + "max_models_on_vram" : 3, + "save_detectmap": true, + "preprocess_on_gpu": true, + "is_loop": true, + + "controlnet_tile":{ + "enable": true, + "use_preprocessor":true, + "preprocessor":{ + "type" : "none", + "param":{ + } + }, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_ip2p":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_lineart_anime":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_openpose":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_softedge":{ + "enable": true, + "use_preprocessor":true, + "preprocessor":{ + "type" : "softedge_pidsafe", + "param":{ + } + }, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_shuffle": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_depth": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_canny": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_inpaint": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_lineart": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_mlsd": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_normalbae": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_scribble": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_seg": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_ref": { + "enable": false, + "ref_image": "ref_image/ref_sample.png", + "attention_auto_machine_weight": 0.3, + "gn_auto_machine_weight": 0.3, + "style_fidelity": 0.5, + "reference_attn": true, + "reference_adain": false, + "scale_pattern":[1.0] + } + }, + "upscale_config": { + "scheduler": "k_dpmpp_sde", + "steps": 20, + "strength": 0.5, + "guidance_scale": 10, + "controlnet_tile": { + "enable": true, + "controlnet_conditioning_scale": 1.0, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_line_anime": { + "enable": false, + "controlnet_conditioning_scale": 1.0, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_ip2p": { + "enable": true, + "controlnet_conditioning_scale": 0.5, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_ref": { + "enable": false, + "use_frame_as_ref_image": false, + "use_1st_frame_as_ref_image": true, + "ref_image": "ref_image/path_to_your_ref_img.jpg", + "attention_auto_machine_weight": 1.0, + "gn_auto_machine_weight": 1.0, + "style_fidelity": 0.25, + "reference_attn": true, + "reference_adain": false + } + }, + "output":{ + "preview_steps": [10], + "format" : "gif", + "fps" : 8, + "encode_param":{ + "crf": 10 + } + } +} diff --git a/config/prompts/region_sample.json b/config/prompts/region_sample.json new file mode 100644 index 0000000000000000000000000000000000000000..4095981be6531c6f722ebe620458045130301c66 --- /dev/null +++ b/config/prompts/region_sample.json @@ -0,0 +1,299 @@ +{ + "name": "sample", + "path": "share/Stable-diffusion/mistoonAnime_v20.safetensors", + "motion_module": "models/motion-module/mm_sd_v15_v2.ckpt", + "compile": false, + "seed": [ + 12345 + ], + "scheduler": "k_dpmpp_sde", + "steps": 20, + "guidance_scale": 10, + "unet_batch_size": 1, + "clip_skip": 2, + "prompt_fixed_ratio": 0.5, + "head_prompt": "(style of studio ghibli:1.2), (masterpiece, best quality)", + "prompt_map": { + "0": "forest, cute orange cat, outdoors," + }, + "tail_prompt": "", + "n_prompt": [ + "(worst quality:2), (bad quality:2), (normal quality:2), lowers, bad anatomy, bad hands, (multiple views)," + ], + "lora_map": { + "share/models/Lora/Ghibli_v6.safetensors": 1.0 + }, + "motion_lora_map": { + }, + "ip_adapter_map": { + "enable": true, + "input_image_dir": "ip_adapter_image/cat", + "prompt_fixed_ratio": 0.5, + "save_input_image": true, + "resized_to_square": false, + "scale": 0.5, + "is_plus_face": false, + "is_plus": true, + "is_light": false + }, + "img2img_map":{ + "enable": true, + "init_img_dir" : "init_imgs/sample1", + "save_init_image": true, + "denoising_strength" : 0.7 + }, + "region_map" : { + "0":{ + "enable": true, + "mask_dir" : "mask/sample0", + "save_mask": true, + "is_init_img" : false, + "condition":{ + "prompt_fixed_ratio": 0.5, + "head_prompt": "(style of studio ghibli:1.2), (masterpiece, best quality)", + "prompt_map": { + "0": "house, cute dog, rain, street, outdoors" + }, + "tail_prompt": "", + "ip_adapter_map": { + "enable": true, + "input_image_dir": "ip_adapter_image/cyberpunk", + "prompt_fixed_ratio": 0.5, + "save_input_image": true, + "resized_to_square": false + } + } + }, + "background":{ + "is_init_img" : false, + "hint" : "background's condition refers to the one in root" + } + }, + "controlnet_map": { + "input_image_dir" : "", + "max_samples_on_vram": 0, + "max_models_on_vram" : 1, + "save_detectmap": true, + "preprocess_on_gpu": true, + "is_loop": true, + + "controlnet_tile":{ + "enable": true, + "use_preprocessor":true, + "preprocessor":{ + "type" : "none", + "param":{ + } + }, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_ip2p":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_lineart_anime":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_openpose":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_softedge":{ + "enable": true, + "use_preprocessor":true, + "preprocessor":{ + "type" : "softedge_pidsafe", + "param":{ + } + }, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_shuffle": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_depth": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_canny": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_inpaint": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_lineart": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_mlsd": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_normalbae": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_scribble": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_seg": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "qr_code_monster_v1": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "qr_code_monster_v2": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_mediapipe_face": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_ref": { + "enable": false, + "ref_image": "ref_image/ref_sample.png", + "attention_auto_machine_weight": 0.3, + "gn_auto_machine_weight": 0.3, + "style_fidelity": 0.5, + "reference_attn": true, + "reference_adain": false, + "scale_pattern":[1.0] + } + }, + "upscale_config": { + "scheduler": "k_dpmpp_sde", + "steps": 20, + "strength": 0.5, + "guidance_scale": 10, + "controlnet_tile": { + "enable": true, + "controlnet_conditioning_scale": 1.0, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_line_anime": { + "enable": false, + "controlnet_conditioning_scale": 1.0, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_ip2p": { + "enable": false, + "controlnet_conditioning_scale": 0.5, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_ref": { + "enable": false, + "use_frame_as_ref_image": false, + "use_1st_frame_as_ref_image": false, + "ref_image": "ref_image/path_to_your_ref_img.jpg", + "attention_auto_machine_weight": 1.0, + "gn_auto_machine_weight": 1.0, + "style_fidelity": 0.25, + "reference_attn": true, + "reference_adain": false + } + }, + "output":{ + "format" : "mp4", + "fps" : 8, + "encode_param":{ + "crf": 10 + } + } +} diff --git a/config/prompts/region_sample2.json b/config/prompts/region_sample2.json new file mode 100644 index 0000000000000000000000000000000000000000..4d8d249d27749e6679d4523755e129772c2e2967 --- /dev/null +++ b/config/prompts/region_sample2.json @@ -0,0 +1,299 @@ +{ + "name": "sample", + "path": "share/Stable-diffusion/mistoonAnime_v20.safetensors", + "motion_module": "models/motion-module/mm_sd_v15_v2.ckpt", + "compile": false, + "seed": [ + 12345 + ], + "scheduler": "k_dpmpp_sde", + "steps": 20, + "guidance_scale": 10, + "unet_batch_size": 1, + "clip_skip": 2, + "prompt_fixed_ratio": 0.5, + "head_prompt": "(style of studio ghibli:1.2), (masterpiece, best quality)", + "prompt_map": { + "0": "forest, outdoors," + }, + "tail_prompt": "", + "n_prompt": [ + "(worst quality:2), (bad quality:2), (normal quality:2), lowers, bad anatomy, bad hands, (multiple views)," + ], + "lora_map": { + "share/models/Lora/Ghibli_v6.safetensors": 1.0 + }, + "motion_lora_map": { + }, + "ip_adapter_map": { + "enable": true, + "input_image_dir": "ip_adapter_image/cat", + "prompt_fixed_ratio": 0.5, + "save_input_image": true, + "resized_to_square": false, + "scale": 0.5, + "is_plus_face": false, + "is_plus": true, + "is_light": false + }, + "img2img_map":{ + "enable": true, + "init_img_dir" : "init_imgs/sample1", + "save_init_image": true, + "denoising_strength" : 0.7 + }, + "region_map" : { + "0":{ + "enable": true, + "mask_dir" : "mask/sample1", + "save_mask": true, + "is_init_img" : false, + "condition":{ + "prompt_fixed_ratio": 0.5, + "head_prompt": "(masterpiece, best quality)", + "prompt_map": { + "0": "cyberpunk,robot cat, robot" + }, + "tail_prompt": "", + "ip_adapter_map": { + "enable": true, + "input_image_dir": "ip_adapter_image/cyberpunk", + "prompt_fixed_ratio": 0.5, + "save_input_image": true, + "resized_to_square": false + } + } + }, + "background":{ + "is_init_img" : false, + "hint" : "background's condition refers to the one in root" + } + }, + "controlnet_map": { + "input_image_dir" : "", + "max_samples_on_vram": 0, + "max_models_on_vram" : 1, + "save_detectmap": true, + "preprocess_on_gpu": true, + "is_loop": true, + + "controlnet_tile":{ + "enable": true, + "use_preprocessor":true, + "preprocessor":{ + "type" : "none", + "param":{ + } + }, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_ip2p":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_lineart_anime":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_openpose":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_softedge":{ + "enable": true, + "use_preprocessor":true, + "preprocessor":{ + "type" : "softedge_pidsafe", + "param":{ + } + }, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_shuffle": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_depth": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_canny": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_inpaint": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_lineart": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_mlsd": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_normalbae": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_scribble": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_seg": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "qr_code_monster_v1": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "qr_code_monster_v2": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_mediapipe_face": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_ref": { + "enable": false, + "ref_image": "ref_image/ref_sample.png", + "attention_auto_machine_weight": 0.3, + "gn_auto_machine_weight": 0.3, + "style_fidelity": 0.5, + "reference_attn": true, + "reference_adain": false, + "scale_pattern":[1.0] + } + }, + "upscale_config": { + "scheduler": "k_dpmpp_sde", + "steps": 20, + "strength": 0.5, + "guidance_scale": 10, + "controlnet_tile": { + "enable": true, + "controlnet_conditioning_scale": 1.0, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_line_anime": { + "enable": false, + "controlnet_conditioning_scale": 1.0, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_ip2p": { + "enable": false, + "controlnet_conditioning_scale": 0.5, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_ref": { + "enable": false, + "use_frame_as_ref_image": false, + "use_1st_frame_as_ref_image": false, + "ref_image": "ref_image/path_to_your_ref_img.jpg", + "attention_auto_machine_weight": 1.0, + "gn_auto_machine_weight": 1.0, + "style_fidelity": 0.25, + "reference_attn": true, + "reference_adain": false + } + }, + "output":{ + "format" : "mp4", + "fps" : 8, + "encode_param":{ + "crf": 10 + } + } +} diff --git a/config/prompts/region_sample3.json b/config/prompts/region_sample3.json new file mode 100644 index 0000000000000000000000000000000000000000..31e73d34d398ca6aae3c9644f2b921cd0c3fedf1 --- /dev/null +++ b/config/prompts/region_sample3.json @@ -0,0 +1,299 @@ +{ + "name": "sample", + "path": "share/Stable-diffusion/mistoonAnime_v20.safetensors", + "motion_module": "models/motion-module/mm_sd_v15_v2.ckpt", + "compile": false, + "seed": [ + 12345 + ], + "scheduler": "k_dpmpp_sde", + "steps": 20, + "guidance_scale": 10, + "unet_batch_size": 1, + "clip_skip": 2, + "prompt_fixed_ratio": 0.5, + "head_prompt": "(style of studio ghibli:1.2), (masterpiece, best quality)", + "prompt_map": { + "0": "forest, outdoors," + }, + "tail_prompt": "", + "n_prompt": [ + "(worst quality:2), (bad quality:2), (normal quality:2), lowers, bad anatomy, bad hands, (multiple views)," + ], + "lora_map": { + "share/models/Lora/Ghibli_v6.safetensors": 1.0 + }, + "motion_lora_map": { + }, + "ip_adapter_map": { + "enable": true, + "input_image_dir": "ip_adapter_image/cat", + "prompt_fixed_ratio": 0.5, + "save_input_image": true, + "resized_to_square": false, + "scale": 0.5, + "is_plus_face": false, + "is_plus": true, + "is_light": false + }, + "img2img_map":{ + "enable": true, + "init_img_dir" : "init_imgs/sample1", + "save_init_image": true, + "denoising_strength" : 0.85 + }, + "region_map" : { + "0":{ + "enable": true, + "mask_dir" : "mask/sample1", + "save_mask": true, + "is_init_img" : false, + "condition":{ + "prompt_fixed_ratio": 0.5, + "head_prompt": "(masterpiece, best quality)", + "prompt_map": { + "0": "cyberpunk,robot cat, robot" + }, + "tail_prompt": "", + "ip_adapter_map": { + "enable": true, + "input_image_dir": "ip_adapter_image/cyberpunk", + "prompt_fixed_ratio": 0.5, + "save_input_image": true, + "resized_to_square": false + } + } + }, + "background":{ + "is_init_img" : false, + "hint" : "background's condition refers to the one in root" + } + }, + "controlnet_map": { + "input_image_dir" : "controlnet_image/cat", + "max_samples_on_vram": 0, + "max_models_on_vram" : 1, + "save_detectmap": true, + "preprocess_on_gpu": true, + "is_loop": true, + + "controlnet_tile":{ + "enable": true, + "use_preprocessor":true, + "preprocessor":{ + "type" : "none", + "param":{ + } + }, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_ip2p":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale":0.5, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[] + }, + "controlnet_lineart_anime":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_openpose":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_softedge":{ + "enable": true, + "use_preprocessor":true, + "preprocessor":{ + "type" : "softedge_pidsafe", + "param":{ + } + }, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_shuffle": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_depth": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_canny": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_inpaint": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_lineart": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_mlsd": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_normalbae": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 0.25, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[] + }, + "controlnet_scribble": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_seg": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "qr_code_monster_v1": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "qr_code_monster_v2": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_mediapipe_face": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_ref": { + "enable": false, + "ref_image": "ref_image/ref_sample.png", + "attention_auto_machine_weight": 0.3, + "gn_auto_machine_weight": 0.3, + "style_fidelity": 0.5, + "reference_attn": true, + "reference_adain": false, + "scale_pattern":[1.0] + } + }, + "upscale_config": { + "scheduler": "k_dpmpp_sde", + "steps": 20, + "strength": 0.5, + "guidance_scale": 10, + "controlnet_tile": { + "enable": true, + "controlnet_conditioning_scale": 1.0, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_line_anime": { + "enable": false, + "controlnet_conditioning_scale": 1.0, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_ip2p": { + "enable": false, + "controlnet_conditioning_scale": 0.5, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_ref": { + "enable": false, + "use_frame_as_ref_image": false, + "use_1st_frame_as_ref_image": false, + "ref_image": "ref_image/path_to_your_ref_img.jpg", + "attention_auto_machine_weight": 1.0, + "gn_auto_machine_weight": 1.0, + "style_fidelity": 0.25, + "reference_attn": true, + "reference_adain": false + } + }, + "output":{ + "format" : "mp4", + "fps" : 8, + "encode_param":{ + "crf": 10 + } + } +} diff --git a/config/prompts/region_txt2img.json b/config/prompts/region_txt2img.json new file mode 100644 index 0000000000000000000000000000000000000000..cbf738c6ae8111bb16ef166e93aca4548f812e15 --- /dev/null +++ b/config/prompts/region_txt2img.json @@ -0,0 +1,324 @@ +{ + "name": "sample", + "path": "share/Stable-diffusion/mistoonAnime_v20.safetensors", + "motion_module": "models/motion-module/mm_sd_v15_v2.ckpt", + "compile": false, + "seed": [ + -1 + ], + "scheduler": "k_dpmpp_sde", + "steps": 20, + "guidance_scale": 10, + "unet_batch_size": 1, + "clip_skip": 2, + "prompt_fixed_ratio": 0.5, + "head_prompt": "(style of studio ghibli:1.2), (masterpiece, best quality)", + "prompt_map": { + "0": "town, outdoors," + }, + "tail_prompt": "", + "n_prompt": [ + "(worst quality:2), (bad quality:2), (normal quality:2), lowers, bad anatomy, bad hands, (multiple views)," + ], + "lora_map": { + }, + "motion_lora_map": { + }, + "ip_adapter_map": { + "enable": true, + "input_image_dir": "ip_adapter_image/cyberpunk", + "prompt_fixed_ratio": 0.5, + "save_input_image": true, + "resized_to_square": false, + "scale": 0.5, + "is_plus_face": false, + "is_plus": true, + "is_light": false + }, + "img2img_map":{ + "enable": false, + "init_img_dir" : "init_imgs/sample1", + "save_init_image": true, + "denoising_strength" : 0.7 + }, + "region_map" : { + "0":{ + "enable": true, + "crop_generation_rate": 0.1, + "mask_dir" : "mask/area0", + "save_mask": true, + "is_init_img" : false, + "condition":{ + "prompt_fixed_ratio": 1.0, + "head_prompt": "", + "prompt_map": { + "0": "((standing)),1girl, upper body,", + "8": "((smile)),1girl, upper body,", + "16": "(((arms_up))),1girl, upper body, ", + "24": "(((waving ))),1girl, upper body" + }, + "tail_prompt": "(style of studio ghibli:1.2), (masterpiece, best quality)", + "ip_adapter_map": { + "enable": true, + "input_image_dir": "ip_adapter_image/girl", + "prompt_fixed_ratio": 0.5, + "save_input_image": true, + "resized_to_square": false + } + } + }, + "1":{ + "enable": true, + "crop_generation_rate": 0.1, + "mask_dir" : "mask/area1", + "save_mask": true, + "is_init_img" : false, + "condition":{ + "prompt_fixed_ratio": 0.5, + "head_prompt": "((car)),(style of studio ghibli:1.2), (masterpiece, best quality)", + "prompt_map": { + "0": "street, road,no human" + }, + "tail_prompt": "", + "ip_adapter_map": { + "enable": true, + "input_image_dir": "ip_adapter_image/cyberpunk", + "prompt_fixed_ratio": 0.5, + "save_input_image": true, + "resized_to_square": false + } + } + }, + "background":{ + "is_init_img" : false, + "hint" : "background's condition refers to the one in root" + } + }, + "controlnet_map": { + "input_image_dir" : "", + "max_samples_on_vram": 0, + "max_models_on_vram" : 1, + "save_detectmap": true, + "preprocess_on_gpu": true, + "is_loop": true, + + "controlnet_tile":{ + "enable": true, + "use_preprocessor":true, + "preprocessor":{ + "type" : "none", + "param":{ + } + }, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_ip2p":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_lineart_anime":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_openpose":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_softedge":{ + "enable": true, + "use_preprocessor":true, + "preprocessor":{ + "type" : "softedge_pidsafe", + "param":{ + } + }, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_shuffle": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_depth": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_canny": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_inpaint": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_lineart": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_mlsd": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_normalbae": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_scribble": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_seg": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "qr_code_monster_v1": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "qr_code_monster_v2": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_mediapipe_face": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_ref": { + "enable": false, + "ref_image": "ref_image/ref_sample.png", + "attention_auto_machine_weight": 0.3, + "gn_auto_machine_weight": 0.3, + "style_fidelity": 0.5, + "reference_attn": true, + "reference_adain": false, + "scale_pattern":[1.0] + } + }, + "upscale_config": { + "scheduler": "k_dpmpp_sde", + "steps": 20, + "strength": 0.5, + "guidance_scale": 10, + "controlnet_tile": { + "enable": true, + "controlnet_conditioning_scale": 1.0, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_line_anime": { + "enable": false, + "controlnet_conditioning_scale": 1.0, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_ip2p": { + "enable": false, + "controlnet_conditioning_scale": 0.5, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_ref": { + "enable": false, + "use_frame_as_ref_image": false, + "use_1st_frame_as_ref_image": false, + "ref_image": "ref_image/path_to_your_ref_img.jpg", + "attention_auto_machine_weight": 1.0, + "gn_auto_machine_weight": 1.0, + "style_fidelity": 0.25, + "reference_attn": true, + "reference_adain": false + } + }, + "output":{ + "format" : "mp4", + "fps" : 8, + "encode_param":{ + "crf": 10 + } + } +} diff --git a/config/prompts/sample_lcm.json b/config/prompts/sample_lcm.json new file mode 100644 index 0000000000000000000000000000000000000000..af8c4d1b95f23279407cf52c628c475e95f58147 --- /dev/null +++ b/config/prompts/sample_lcm.json @@ -0,0 +1,298 @@ +{ + "name": "simple", + "path": "share/Stable-diffusion/mistoonAnime_v20.safetensors", + "motion_module": "models/motion-module/mm_sd_v15_v2.ckpt", + "lcm_map":{ + "enable":true, + "start_scale":0.15, + "end_scale":0.75, + "gradient_start":0.2, + "gradient_end":0.75 + }, + "compile": false, + "seed": [ + 123456 + ], + "scheduler": "euler_a", + "steps": 8, + "guidance_scale": 3, + "clip_skip": 2, + "prompt_fixed_ratio": 1.0, + "head_prompt": "1girl, wizard, circlet, earrings, jewelry, purple hair,", + "prompt_map": { + "0": "", + "8": "((fire magic spell, fire background))", + "16": "((ice magic spell, ice background))", + "24": "((thunder magic spell, thunder background))", + "32": "((skull magic spell, skull background))", + "40": "((wind magic spell, wind background))", + "48": "((stone magic spell, stone background))", + "56": "((holy magic spell, holy background))", + "64": "((star magic spell, star background))", + "72": "((plant magic spell, plant background))", + "80": "((meteor magic spell, meteor background))" + }, + "tail_prompt": "", + "n_prompt": [ + "(worst quality, low quality:1.4),nudity,border,text, patreon, easynegative, negative_hand-neg" + ], + "is_single_prompt_mode":false, + "lora_map": { + "share/Lora/add_detail.safetensors":1.0 + }, + "ip_adapter_map": { + "enable": false, + "input_image_dir": "ip_adapter_image/test", + "save_input_image": true, + "resized_to_square": false, + "scale": 0.5, + "is_plus_face": false, + "is_plus": true + }, + "img2img_map":{ + "enable": false, + "init_img_dir" : "init_imgs/test", + "save_init_image": true, + "denoising_strength" : 0.8 + }, + "region_map" : { + "0":{ + "enable":false, + "mask_dir" : "mask/r0", + "save_mask": true, + "is_init_img" : false, + "condition":{ + "prompt_fixed_ratio": 0.5, + "head_prompt": "1girl, wizard, circlet, earrings, jewelry, purple hair,", + "prompt_map": { + "0": "", + "8": "((fire magic spell, fire background))", + "16": "((ice magic spell, ice background))", + "24": "((thunder magic spell, thunder background))", + "32": "((skull magic spell, skull background))", + "40": "((wind magic spell, wind background))", + "48": "((stone magic spell, stone background))", + "56": "((holy magic spell, holy background))", + "64": "((star magic spell, star background))", + "72": "((plant magic spell, plant background))", + "80": "((meteor magic spell, meteor background))" + }, + "tail_prompt": "", + "ip_adapter_map": { + "enable": false, + "input_image_dir": "ip_adapter_image/test", + "save_input_image": true, + "resized_to_square": false + } + } + }, + "background":{ + "is_init_img" : false, + "hint" : "background's condition refers to the one in root" + } + }, + "controlnet_map": { + "input_image_dir" : "controlnet_image/test9999", + "max_samples_on_vram": 200, + "max_models_on_vram" : 3, + "save_detectmap": true, + "preprocess_on_gpu": true, + "is_loop": true, + + "controlnet_tile":{ + "enable": true, + "use_preprocessor":true, + "preprocessor":{ + "type" : "none", + "param":{ + } + }, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5] + }, + "controlnet_ip2p":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_lineart_anime":{ + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_openpose":{ + "enable": true, + "use_preprocessor":true, + "preprocessor":{ + "type" : "dwpose", + "param":{ + } + }, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_softedge":{ + "enable": true, + "use_preprocessor":true, + "preprocessor":{ + "type" : "softedge_pidsafe", + "param":{ + } + }, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_shuffle": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_depth": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_canny": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_inpaint": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_lineart": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 0.5, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5] + }, + "controlnet_mlsd": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_normalbae": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_scribble": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_seg": { + "enable": true, + "use_preprocessor":true, + "guess_mode":false, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[0.5,0.4,0.3,0.2,0.1] + }, + "controlnet_ref": { + "enable": false, + "ref_image": "ref_image/naga.png", + "attention_auto_machine_weight": 0.5, + "gn_auto_machine_weight": 0.5, + "style_fidelity": 0.5, + "reference_attn": true, + "reference_adain": true, + "scale_pattern":[1.0] + } + }, + "upscale_config": { + "scheduler": "k_dpmpp_sde", + "steps": 20, + "strength": 0.5, + "guidance_scale": 10, + "controlnet_tile": { + "enable": true, + "controlnet_conditioning_scale": 1.0, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_line_anime": { + "enable": false, + "controlnet_conditioning_scale": 1.0, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_ip2p": { + "enable": false, + "controlnet_conditioning_scale": 0.5, + "guess_mode": false, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0 + }, + "controlnet_ref": { + "enable": false, + "use_frame_as_ref_image": false, + "use_1st_frame_as_ref_image": false, + "ref_image": "ref_image/path_to_your_ref_img.jpg", + "attention_auto_machine_weight": 1.0, + "gn_auto_machine_weight": 1.0, + "style_fidelity": 0.25, + "reference_attn": true, + "reference_adain": false + } + }, + "output":{ + "format" : "mp4", + "fps" : 8, + "encode_param":{ + "crf": 10 + } + } +} diff --git a/config/prompts/to_8fps_Frames.bat b/config/prompts/to_8fps_Frames.bat new file mode 100644 index 0000000000000000000000000000000000000000..570efb39c33b988b7a912102469b520beabe79a0 --- /dev/null +++ b/config/prompts/to_8fps_Frames.bat @@ -0,0 +1 @@ +ffmpeg -i %1 -start_number 0 -vf "scale=512:768,fps=8" %%04d.png \ No newline at end of file diff --git a/data/models/DWPose/dw-ll_ucoco_384.onnx b/data/models/DWPose/dw-ll_ucoco_384.onnx new file mode 100644 index 0000000000000000000000000000000000000000..df84ce34881c5701a29e09badd8c96f5c17bd214 --- /dev/null +++ b/data/models/DWPose/dw-ll_ucoco_384.onnx @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:724f4ff2439ed61afb86fb8a1951ec39c6220682803b4a8bd4f598cd913b1843 +size 134399116 diff --git a/data/models/DWPose/yolox_l.onnx b/data/models/DWPose/yolox_l.onnx new file mode 100644 index 0000000000000000000000000000000000000000..d6ff7914feb199e342967b877f8b2ea3179db915 --- /dev/null +++ b/data/models/DWPose/yolox_l.onnx @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7860ae79de6c89a3c1eb72ae9a2756c0ccfbe04b7791bb5880afabd97855a411 +size 216746733 diff --git a/data/models/README.md b/data/models/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b8ba997a3717007ebbf63705478cf7d8f8a18e2d --- /dev/null +++ b/data/models/README.md @@ -0,0 +1,4 @@ +## Folder that contains the weight + +Put the weights of the base model in the huggingface folder and that of the motion module in the motion module folder + diff --git a/data/models/WD14tagger/model.onnx b/data/models/WD14tagger/model.onnx new file mode 100644 index 0000000000000000000000000000000000000000..04e43d8b1e43c798db00b19ee67a934fe1e32c09 --- /dev/null +++ b/data/models/WD14tagger/model.onnx @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b8cef913be4c9e8d93f9f903e74271416502ce0b4b04df0ff1e2f00df488aa03 +size 326197340 diff --git a/data/models/WD14tagger/selected_tags.csv b/data/models/WD14tagger/selected_tags.csv new file mode 100644 index 0000000000000000000000000000000000000000..71796801c13109547bc017d40fc6f5b89bfd9cc0 --- /dev/null +++ b/data/models/WD14tagger/selected_tags.csv @@ -0,0 +1,9084 @@ +tag_id,name,category,count +9999999,general,9,807858 +9999998,sensitive,9,3771700 +9999997,questionable,9,769899 +9999996,explicit,9,560281 +470575,1girl,0,4225150 +212816,solo,0,3515897 +13197,long_hair,0,2982517 +8601,breasts,0,2323580 +469576,looking_at_viewer,0,2089971 +3389,blush,0,2040471 +1815,smile,0,1903619 +15080,short_hair,0,1568265 +11906,open_mouth,0,1565950 +16751,bangs,0,1516840 +10959,blue_eyes,0,1225129 +566835,multiple_girls,0,1120328 +429,skirt,0,1100620 +87788,blonde_hair,0,1098200 +403247,large_breasts,0,1083979 +412368,simple_background,0,1074818 +16867,brown_hair,0,1072209 +12590,shirt,0,1001030 +13200,black_hair,0,981413 +380350,hair_ornament,0,939495 +8526,red_eyes,0,897316 +1882,thighhighs,0,890813 +5735,gloves,0,886283 +383159,long_sleeves,0,883900 +540830,1boy,0,881194 +2373,hat,0,879697 +515193,white_background,0,874291 +2241,dress,0,838290 +4563,bow,0,795194 +464575,ribbon,0,793922 +9294,navel,0,786293 +375387,holding,0,732899 +1821,2girls,0,729721 +6126,animal_ears,0,722334 +4607,cleavage,0,693321 +658573,hair_between_eyes,0,692014 +376054,bare_shoulders,0,656975 +1709,twintails,0,648384 +16578,brown_eyes,0,645874 +16613,jewelry,0,644654 +667868,medium_breasts,0,642525 +12289,sitting,0,630993 +417660,very_long_hair,0,622920 +572080,closed_mouth,0,618062 +464906,underwear,0,610036 +8889,nipples,0,591774 +16509,school_uniform,0,585679 +10960,green_eyes,0,584783 +10953,blue_hair,0,564360 +15675,standing,0,551783 +15654,purple_eyes,0,536623 +466499,collarbone,0,520875 +391,panties,0,506334 +3843,jacket,0,493731 +15674,tail,0,487490 +1681,monochrome,0,478584 +444,swimsuit,0,467619 +608813,full_body,0,463008 +465619,closed_eyes,0,455512 +464561,hair_ribbon,0,449452 +89189,yellow_eyes,0,447582 +376766,white_shirt,0,435867 +547463,upper_body,0,434670 +2355,ponytail,0,431021 +11449,weapon,0,430315 +11429,pink_hair,0,427100 +16442,purple_hair,0,426385 +8101,ass,0,423113 +4334,braid,0,417832 +464559,flower,0,411874 +63,comic,0,411524 +3522,ahoge,0,408654 +16581,white_hair,0,407226 +472154,short_sleeves,0,389311 +384553,:d,0,387533 +622137,hetero,0,384576 +374844,hair_bow,0,381335 +513837,greyscale,0,377967 +16580,grey_hair,0,375103 +1300281,male_focus,0,371503 +2750,heart,0,361897 +2363,pantyhose,0,356177 +484168,sidelocks,0,354421 +6539,bikini,0,349809 +3870,thighs,0,348316 +2365,nude,0,341086 +5403,red_hair,0,338682 +390728,multicolored_hair,0,336863 +660909,cowboy_shot,0,336374 +4569,sweat,0,334084 +383282,pleated_skirt,0,332323 +2376,hairband,0,329485 +13804,earrings,0,328755 +465265,small_breasts,0,325542 +5827,boots,0,320845 +13879,outdoors,0,320641 +301022,lying,0,312125 +4352,censored,0,310189 +194013,frills,0,305699 +664375,parted_lips,0,304757 +387884,detached_sleeves,0,297463 +461042,one_eye_closed,0,294831 +1575,food,0,294463 +1707,japanese_clothes,0,289163 +8388,green_hair,0,286703 +568656,multiple_boys,0,286561 +375669,open_clothes,0,286373 +2866,wings,0,284183 +384774,necktie,0,281254 +2785,horns,0,279479 +406,sky,0,279456 +4190,penis,0,276603 +8672,shoes,0,273266 +6532,glasses,0,264431 +3985,shorts,0,263445 +11826,barefoot,0,260159 +6054,teeth,0,259880 +4378,pussy,0,254400 +268819,serafuku,0,250901 +400314,sleeveless,0,247486 +931006,solo_focus,0,246572 +431446,alternate_costume,0,245996 +4025,choker,0,245304 +435324,day,0,243849 +10863,tongue,0,243841 +15522,pointy_ears,0,237263 +474820,black_gloves,0,236325 +1731,socks,0,235479 +2646,hairclip,0,224901 +10228,elbow_gloves,0,224390 +1793,fang,0,220644 +9843,midriff,0,217384 +2177,striped,0,215308 +581144,puffy_sleeves,0,214965 +2772,shiny,0,213040 +478849,collared_shirt,0,210998 +9864,belt,0,210075 +410129,looking_back,0,208023 +10926,pants,0,206605 +401228,sword,0,203029 +657955,artist_name,0,200583 +455880,black_thighhighs,0,199445 +464549,cloud,0,197453 +375171,indoors,0,197331 +1407561,virtual_youtuber,0,190790 +72,cat_ears,0,189097 +6010,tears,0,187990 +474821,white_gloves,0,187659 +539367,hand_up,0,184840 +411263,signature,0,183546 +400123,hair_flower,0,183132 +71730,dark_skin,0,182935 +399836,3girls,0,181979 +16252,spread_legs,0,179394 +1842,cum,0,177804 +5948,hood,0,176872 +449194,2boys,0,176291 +2357,sex,0,176106 +502710,tongue_out,0,174092 +9168,miniskirt,0,173815 +394970,wide_sleeves,0,172751 +537684,blunt_bangs,0,172693 +401481,on_back,0,172347 +375020,fingerless_gloves,0,171771 +11628,bowtie,0,171509 +374628,black_skirt,0,171449 +426781,medium_hair,0,170863 +16750,pink_eyes,0,168674 +5576,armpits,0,168191 +389378,sailor_collar,0,166004 +445,kimono,0,163145 +632214,grey_background,0,159811 +1573,water,0,158702 +4068,necklace,0,156154 +375229,off_shoulder,0,153621 +467104,stomach,0,153138 +4333,bag,0,151509 +454933,hair_bun,0,151363 +1747,chibi,0,151305 +473983,clothes_lift,0,150801 +1254363,twitter_username,0,150476 +234192,from_behind,0,150284 +656624,star_(symbol),0,150164 +3198,scarf,0,149539 +6346,cape,0,149374 +102962,nail_polish,0,148880 +659098,black_footwear,0,148127 +670071,holding_weapon,0,147624 +3796,bra,0,147612 +14620,white_dress,0,147542 +87676,orange_hair,0,147142 +261,yuri,0,146254 +125238,sweatdrop,0,145506 +4659,armor,0,143723 +465583,rabbit_ears,0,143630 +4019,mole,0,143178 +374791,white_panties,0,142500 +399541,hair_over_one_eye,0,141370 +384884,grin,0,140525 +6107,blurry,0,140047 +393959,huge_breasts,0,138103 +641577,looking_at_another,0,137202 +14599,:o,0,137129 +4152,uniform,0,136675 +13199,black_eyes,0,136390 +273,apron,0,136126 +510962,character_name,0,135131 +6176,vest,0,133100 +39127,black_dress,0,132376 +682673,mosaic_censoring,0,131028 +475187,arm_up,0,130488 +10484,vaginal,0,130137 +470807,red_bow,0,129853 +9882,high_heels,0,129652 +524070,shiny_hair,0,128887 +52138,twin_braids,0,127920 +399827,arms_up,0,126645 +378032,flat_chest,0,126144 +381629,side_ponytail,0,126049 +4052,collar,0,125695 +11904,bracelet,0,125557 +2060,feet,0,125245 +515329,covered_nipples,0,124425 +511141,from_side,0,122971 +560958,dated,0,122085 +413564,two-tone_hair,0,121190 +89368,aqua_eyes,0,120894 +3477,sweater,0,120879 +440465,speech_bubble,0,120449 +375002,white_thighhighs,0,119709 +590165,english_text,0,118831 +6295,leotard,0,118770 +379615,open_jacket,0,116898 +1515536,official_alternate_costume,0,116756 +390401,red_ribbon,0,116423 +633894,dark-skinned_female,0,115383 +652604,two_side_up,0,114391 +464586,tree,0,113682 +464553,cup,0,113550 +490655,blue_sky,0,113376 +1931,sketch,0,112175 +684639,puffy_short_sleeves,0,111946 +5501,lips,0,111075 +428173,blue_skirt,0,110413 +10644,zettai_ryouiki,0,109096 +719985,streaked_hair,0,108228 +5569,coat,0,108099 +547860,black_jacket,0,107631 +395321,crop_top,0,107314 +754325,groin,0,107206 +427008,fingernails,0,106798 +4867,wet,0,105408 +1445905,v-shaped_eyebrows,0,104747 +389404,cat_tail,0,104720 +438623,neckerchief,0,103697 +95405,orange_eyes,0,103149 +1486996,animal_ear_fluff,0,102828 +431755,head_tilt,0,102674 +451371,see-through,0,102344 +391631,gradient,0,101686 +451155,hand_on_hip,0,100931 +589,gun,0,100672 +4311,legs,0,100492 +494251,one-piece_swimsuit,0,100304 +494744,shiny_skin,0,100275 +479939,sleeves_past_wrists,0,100013 +551772,parted_bangs,0,99655 +579466,looking_to_the_side,0,99629 +464572,pillow,0,99176 +412078,wrist_cuffs,0,98658 +89228,grey_eyes,0,98579 +10923,torn_clothes,0,98539 +464535,book,0,98180 +465277,plaid,0,97928 +457408,black_pantyhose,0,97171 +59,maid,0,97157 +537200,symbol-shaped_pupils,0,96639 +481846,hands_up,0,95746 +379475,sash,0,95553 +539584,fur_trim,0,95386 +3314,kneehighs,0,95270 +463173,maid_headdress,0,95208 +7820,military,0,94582 +16718,black_panties,0,94548 +358,cosplay,0,94464 +475418,bare_arms,0,93864 +13132,petals,0,93775 +12552,pubic_hair,0,93487 +524961,black_shirt,0,93152 +2335,fox_ears,0,92753 +128,loli,0,92684 +531371,gradient_background,0,92533 +394174,short_shorts,0,92401 +449676,ascot,0,90602 +1594634,clothing_cutout,0,90293 +822149,completely_nude,0,90178 +375372,dutch_angle,0,89835 +416202,eyelashes,0,88539 +492223,bar_censor,0,88359 +1277433,mole_under_eye,0,88173 +1128907,pokemon_(creature),0,88046 +542846,no_humans,0,87715 +469714,bare_legs,0,87348 +2813,window,0,87096 +7577,open_shirt,0,86869 +464579,sparkle,0,86798 +8641,dress_shirt,0,86540 +10447,kneeling,0,86034 +407186,sleeveless_shirt,0,85905 +389813,single_braid,0,85125 +2687,v,0,84806 +1390441,black_headwear,0,84355 +382397,strapless,0,84204 +412555,4girls,0,84095 +6536,bell,0,83798 +5126,hug,0,83531 +390035,no_bra,0,83268 +7926,saliva,0,83232 +468554,double_bun,0,83167 +474500,black_ribbon,0,82872 +5032,uncensored,0,82813 +94007,aqua_hair,0,82429 +9344,bodysuit,0,81762 +2508,blood,0,80986 +2585,bed,0,80885 +202817,hoodie,0,80505 +387214,military_uniform,0,80184 +6028,sideboob,0,80142 +1247160,black_bow,0,79946 +1269639,covered_navel,0,79696 +464584,tattoo,0,79445 +391128,gradient_hair,0,79377 +444539,skindentation,0,79260 +467863,neck_ribbon,0,79084 +13853,pussy_juice,0,79048 +1736,profile,0,78791 +9312,makeup,0,78404 +443395,thigh_strap,0,78303 +427348,leaning_forward,0,78279 +513475,multiple_views,0,78169 +3918,4koma,0,77795 +411323,capelet,0,77781 +1797,mask,0,77477 +465719,muscular,0,76977 +2217,anus,0,76396 +1595,no_panties,0,76376 +10422,witch_hat,0,76061 +600177,detached_collar,0,75584 +28200,toes,0,75467 +5565,:3,0,75433 +675314,copyright_name,0,75245 +442167,alternate_hairstyle,0,74848 +626,underboob,0,74757 +3472,night,0,74665 +395400,buttons,0,74509 +565513,floating_hair,0,74507 +5832,fruit,0,74401 +568880,sleeveless_dress,0,74380 +460262,depth_of_field,0,74158 +553142,blurry_background,0,73931 +1393877,feet_out_of_frame,0,73883 +381555,headband,0,73755 +529256,fake_animal_ears,0,73059 +402217,^_^,0,72948 +377140,blue_dress,0,72944 +4075,cameltoe,0,72587 +465950,cum_in_pussy,0,72511 +51528,fox_tail,0,72338 +457597,swept_bangs,0,72173 +13176,shadow,0,71857 +374620,black_bikini,0,71747 +503552,red_skirt,0,71298 +420531,nose_blush,0,71248 +4528,bottomless,0,71247 +4320,glowing,0,71120 +1231230,side-tie_bikini_bottom,0,71041 +460802,rose,0,70153 +478565,bed_sheet,0,69319 +1094664,colored_skin,0,69055 +4009,turtleneck,0,68964 +464808,holding_hands,0,68738 +458933,facial_hair,0,68495 +464546,chain,0,68464 +442865,headgear,0,68404 +464534,bird,0,68263 +4108,pov,0,67888 +377844,siblings,0,67882 +2279,headphones,0,67786 +11325,ocean,0,67516 +567316,6+girls,0,67320 +479563,low_twintails,0,67147 +3449,heterochromia,0,67128 +394222,arm_support,0,66756 +10701,animal,0,66577 +546821,halterneck,0,66448 +374938,frown,0,66197 +4244,leaf,0,65930 +258190,beret,0,65845 +1345229,white_headwear,0,65590 +1915,umbrella,0,65482 +490999,on_bed,0,65478 +653206,one_side_up,0,65367 +144876,embarrassed,0,65057 +395448,thigh_boots,0,64869 +446950,fangs,0,64492 +1875867,upper_teeth_only,0,64490 +8714,watermark,0,64472 +444002,from_above,0,64438 +377124,back,0,64206 +436054,highleg,0,64134 +580545,blue_background,0,63957 +1328533,ass_visible_through_thighs,0,63880 +421663,wavy_hair,0,63724 +469652,garter_straps,0,63656 +1382794,black_choker,0,63389 +4010,halo,0,63200 +546609,blue_bow,0,62796 +3988,scar,0,62791 +426936,white_bikini,0,62532 +447393,on_side,0,62264 +418899,plaid_skirt,0,62245 +2904,chair,0,61608 +484666,transparent_background,0,61514 +541727,wariza,0,61484 +481508,facial_mark,0,61342 +10231,mouth_hold,0,61142 +452032,looking_away,0,61139 +400120,traditional_media,0,61046 +2799,beach,0,61036 +2091,bandages,0,61017 +1723,parody,0,61010 +663669,female_pubic_hair,0,61006 +419938,expressionless,0,60732 +1303252,brown_footwear,0,60667 +499624,blush_stickers,0,60546 +8830,shirt_lift,0,60141 +419309,thick_thighs,0,60024 +463179,no_shoes,0,59911 +698161,holding_sword,0,59858 +600250,hair_tubes,0,59809 +375404,chinese_clothes,0,59795 +8091,drill_hair,0,59557 +572767,grabbing,0,59427 +475775,arms_behind_back,0,59272 +375476,soles,0,59215 +3920,obi,0,58917 +572731,heart-shaped_pupils,0,58894 +4207,eating,0,58688 +467856,clothes_pull,0,58462 +415974,looking_down,0,58352 +2993,phone,0,58200 +612641,black_shorts,0,57955 +403649,thigh_gap,0,57892 +514890,black_pants,0,57867 +436576,short_dress,0,57816 +3593,topless,0,57655 +4831,piercing,0,57372 +2770,pantyshot,0,57314 +534835,hair_intakes,0,57252 +2270,eyepatch,0,57130 +511136,border,0,56981 +8831,skirt_lift,0,56906 +476621,floral_print,0,56816 +460404,stuffed_toy,0,56698 +466226,bound,0,56676 +2689,formal,0,56558 +709734,playboy_bunny,0,56534 +647551,flying_sweatdrops,0,56366 +375176,crossed_arms,0,56301 +574407,wavy_mouth,0,56126 +425206,magical_girl,0,56021 +3234,erection,0,55814 +387991,abs,0,55656 +2726,moon,0,55466 +688777,half-closed_eyes,0,55390 +413672,leg_up,0,55163 +420366,from_below,0,54898 +389777,red_dress,0,54821 +477028,cleavage_cutout,0,54599 +3986,sandals,0,54528 +3209,table,0,54497 +13227,happy,0,54420 +356542,sunlight,0,54218 +1850,oral,0,54046 +1976,cover,0,53985 +465810,squatting,0,53344 +1801537,single_hair_bun,0,53328 +2716,cat,0,53145 +419387,testicles,0,53002 +584749,pink_background,0,52864 +5524,sunglasses,0,52501 +488167,scrunchie,0,52459 +1288957,white_footwear,0,52395 +563478,dark-skinned_male,0,52274 +547073,underwear_only,0,52043 +461293,cum_on_body,0,51426 +413179,trembling,0,51424 +445388,bob_cut,0,51373 +3949,ring,0,51372 +9354,bdsm,0,51131 +221,school_swimsuit,0,51107 +582201,mob_cap,0,50771 +385639,wolf_ears,0,50613 +6059,blazer,0,50589 +468534,light_brown_hair,0,50353 +580232,white_jacket,0,50297 +459291,standing_on_one_leg,0,50267 +3875,sleeping,0,50236 +450107,thick_eyebrows,0,50035 +1556,backpack,0,49896 +520398,white_skirt,0,49388 +8068,demon_girl,0,49382 +492544,frilled_dress,0,49309 +643561,eyes_visible_through_hair,0,49304 +15399,breast_grab,0,49237 +6128,cardigan,0,49158 +431235,knee_boots,0,48944 +7952,suspenders,0,48876 +464588,hat_ribbon,0,48794 +465525,crossed_legs,0,48759 +319,lingerie,0,48692 +379915,stuffed_animal,0,48687 +5831,katana,0,48654 +589398,hood_down,0,48528 +466990,;d,0,48266 +487156,3boys,0,48154 +15737,bat_wings,0,48120 +391680,horse_ears,0,48063 +3649,helmet,0,47863 +487205,cloudy_sky,0,47782 +3239,cellphone,0,47781 +464903,crying,0,47537 +9114,antenna_hair,0,47482 +900563,own_hands_together,0,47389 +7455,tank_top,0,47356 +10833,bottle,0,47321 +10229,suit,0,47203 +6364,grass,0,47129 +516477,outstretched_arms,0,47051 +4526,cross,0,46975 +464539,bug,0,46589 +609507,holding_food,0,46523 +4474,fire,0,46465 +419429,frilled_skirt,0,46415 +429999,tiara,0,46411 +1836990,aged_down,0,46406 +464574,polka_dot,0,46378 +14452,feathers,0,46364 +510254,breasts_out,0,46228 +1936,crossover,0,46213 +4188,crown,0,46184 +539837,high_ponytail,0,46117 +375459,looking_up,0,46055 +580738,black_hairband,0,46027 +9714,bent_over,0,46026 +3592,undressing,0,45997 +397327,blue_shirt,0,45944 +701697,white_bow,0,45887 +421662,5girls,0,45886 +462569,straddling,0,45739 +575982,light_smile,0,45730 +464565,knife,0,45555 +634316,pectorals,0,45480 +1337464,x_hair_ornament,0,45361 +464573,plant,0,45164 +14814,couple,0,45140 +219401,denim,0,45131 +432696,on_stomach,0,44802 +487562,wing_collar,0,44774 +16700,>_<,0,44725 +3846,robot,0,44517 +576310,white_flower,0,44392 +452195,hair_bobbles,0,44248 +8709,fellatio,0,44136 +517832,outstretched_arm,0,43838 +404507,sharp_teeth,0,43781 +498000,blue_ribbon,0,43753 +4850,lipstick,0,43621 +4232,tan,0,43535 +381279,girl_on_top,0,43530 +465046,cat_girl,0,43463 +458482,short_twintails,0,43394 +1297467,lifted_by_self,0,43380 +537,bondage,0,43370 +397117,curtains,0,43288 +463397,white_socks,0,43090 +526340,letterboxed,0,43050 +423613,animal_print,0,42743 +1451588,muscular_male,0,42629 +413908,spiked_hair,0,42548 +403577,pointing,0,42540 +463115,pink_bow,0,42424 +611487,juliet_sleeves,0,42359 +395015,monster_girl,0,42318 +645320,sex_from_behind,0,42283 +521420,slit_pupils,0,42116 +2328,polearm,0,41911 +374979,all_fours,0,41887 +619736,blue_jacket,0,41861 +10707,sisters,0,41457 +534982,^^^,0,41381 +507741,frilled_sleeves,0,41351 +656169,hand_on_own_chest,0,41116 +492682,red_necktie,0,40987 +1388799,blue_sailor_collar,0,40975 +410004,crescent,0,40913 +82326,?,0,40842 +4027,staff,0,40813 +569748,black_background,0,40690 +401137,clenched_teeth,0,40516 +7558,panty_pull,0,40452 +405345,cherry_blossoms,0,40306 +464713,head_wings,0,39969 +820510,horse_girl,0,39913 +12464,brooch,0,39793 +2849,goggles,0,39780 +470790,demon_horns,0,39763 +1710,towel,0,39695 +11879,blouse,0,39694 +584911,shaded_face,0,39622 +589376,red_flower,0,39599 +515302,green_skirt,0,39506 +655303,fox_girl,0,39434 +1369969,ground_vehicle,0,39280 +447919,cover_page,0,39211 +471601,black_bra,0,39210 +302,elf,0,39186 +358992,bike_shorts,0,39177 +1019196,otoko_no_ko,0,39156 +4123,wind,0,39127 +2362,casual,0,39098 +8072,black_socks,0,39083 +401601,loafers,0,39011 +379387,t-shirt,0,38864 +489235,motion_lines,0,38824 +561547,shoulder_armor,0,38616 +403060,gauntlets,0,38611 +383830,no_pants,0,38528 +464540,building,0,38491 +93927,pink_panties,0,38441 +401836,messy_hair,0,38068 +415668,single_thighhigh,0,37934 +471498,multiple_tails,0,37915 +3046,kiss,0,37833 +466654,wristband,0,37663 +389882,group_sex,0,37616 +314230,breast_press,0,37538 +389705,between_breasts,0,37481 +465870,surprised,0,37404 +4543,striped_panties,0,37300 +524399,hat_bow,0,37085 +479729,gem,0,37068 +464542,butterfly,0,36971 +683385,red_footwear,0,36919 +390594,red_shirt,0,36911 +395533,sheath,0,36903 +374782,sneakers,0,36836 +469701,rabbit_tail,0,36820 +516211,tassel,0,36640 +464564,instrument,0,36574 +5746,box,0,36505 +521712,ear_piercing,0,36426 +448007,drooling,0,36412 +103483,fishnets,0,36371 +1227044,ribbon_trim,0,36289 +575322,clenched_hand,0,36272 +405311,sex_toy,0,36243 +1281370,red_bowtie,0,36203 +385882,third_eye,0,36102 +628293,skirt_set,0,36091 +12667,child,0,36073 +3540,hakama,0,36071 +446647,pale_skin,0,35749 +3474,portrait,0,35713 +464568,musical_note,0,35511 +427968,revealing_clothes,0,35499 +4241,rope,0,35471 +594766,star_(sky),0,35465 +376491,wet_clothes,0,35421 +4275,steam,0,35400 +2252,candy,0,35362 +166531,pink_dress,0,35227 +376002,genderswap,0,35168 +2119,facial,0,35125 +380831,demon_tail,0,35104 +368,dog_ears,0,35085 +3417,anal,0,35064 +390662,foreshortening,0,35051 +1008243,holding_gun,0,34916 +5648,nature,0,34895 +376043,covering,0,34886 +566918,adapted_costume,0,34783 +6526,side-tie_panties,0,34714 +515769,black_nails,0,34714 +166133,night_sky,0,34326 +404,christmas,0,34266 +10847,breath,0,34088 +4555,ejaculation,0,34061 +15672,veil,0,34028 +395223,scenery,0,34026 +378850,armband,0,34010 +448202,peaked_cap,0,34001 +533054,waist_apron,0,33989 +556011,lace_trim,0,33903 +421107,convenient_censoring,0,33709 +631529,white_apron,0,33648 +5855,couch,0,33563 +468477,arms_behind_head,0,33416 +465152,china_dress,0,33413 +1865,bandaid,0,33315 +704500,holding_cup,0,33262 +1314596,black_leotard,0,33183 +601824,male_pubic_hair,0,32925 +524897,interlocked_fingers,0,32856 +1302826,mole_under_mouth,0,32799 +4000,microphone,0,32717 +417741,bridal_gauntlets,0,32598 +375764,bara,0,32520 +538298,strapless_dress,0,32510 +448225,tokin_hat,0,32508 +1568,yaoi,0,32378 +478557,straight_hair,0,32366 +456255,front-tie_top,0,32272 +426598,bow_panties,0,32233 +2755,lace,0,32198 +561,mecha,0,32195 +531068,hakama_skirt,0,32173 +723773,hand_fan,0,32170 +477288,white_ribbon,0,32158 +389108,glowing_eyes,0,32156 +566461,anger_vein,0,32053 +466984,...,0,32033 +659059,breasts_apart,0,31889 +664258,no_headwear,0,31856 +602295,hair_over_shoulder,0,31780 +670638,clothes_writing,0,31699 +469125,jingle_bell,0,31677 +146061,baseball_cap,0,31672 +593298,yellow_background,0,31665 +1288118,hair_flaps,0,31467 +457883,string_bikini,0,31418 +674623,feathered_wings,0,31394 +428523,hooded_jacket,0,31351 +400041,cum_on_breasts,0,31297 +1411175,bikini_top_only,0,31290 +1416353,red_headwear,0,31191 +383851,twin_drills,0,31075 +665452,facing_viewer,0,31063 +389553,skin_tight,0,31044 +486674,multiple_penises,0,31006 +1441885,semi-rimless_eyewear,0,30943 +401968,red_nails,0,30789 +1275600,bright_pupils,0,30787 +647129,black_necktie,0,30692 +643253,web_address,0,30676 +376528,:<,0,30640 +3508,angry,0,30615 +611670,grey_shirt,0,30549 +320292,cloak,0,30520 +1441877,eyewear_on_head,0,30414 +554980,motor_vehicle,0,30386 +593296,red_background,0,30382 +11410,claws,0,30300 +389814,side_braid,0,30190 +407678,wolf_tail,0,30179 +1316316,pelvic_curtain,0,30158 +635112,light_particles,0,30146 +1593554,light_purple_hair,0,30050 +670636,multicolored_clothes,0,30015 +410734,carrying,0,30007 +392990,micro_bikini,0,29907 +467811,knees_up,0,29889 +645083,smartphone,0,29850 +4172,corset,0,29825 +2329,tentacles,0,29635 +635786,index_finger_raised,0,29578 +648130,clothing_aside,0,29578 +390596,purple_dress,0,29414 +547132,extra_ears,0,29398 +382270,rifle,0,29351 +15764,striped_thighhighs,0,29349 +668761,white_border,0,29331 +374845,mary_janes,0,29321 +15260,beard,0,29165 +581,paizuri,0,29093 +392810,vertical_stripes,0,29091 +488864,red_jacket,0,29047 +15689,:p,0,29013 +1388801,red_neckerchief,0,28937 +717927,short_hair_with_long_locks,0,28919 +566116,scar_on_face,0,28750 +462808,tareme,0,28700 +1452299,neck_bell,0,28677 +9474,licking,0,28656 +15749,furry,0,28624 +549225,single_horn,0,28498 +77257,strap_slip,0,28432 +429369,finger_to_mouth,0,28430 +668635,pom_pom_(clothes),0,28411 +2102,snow,0,28403 +510802,french_braid,0,28301 +10902,close-up,0,28288 +149598,androgynous,0,28230 +1463605,1other,0,28215 +533356,areola_slip,0,28137 +3284,forehead,0,28104 +375723,puffy_nipples,0,28072 +448477,buckle,0,28057 +458223,horse_tail,0,28024 +685432,two-tone_background,0,27949 +422340,full_moon,0,27938 +390257,eye_contact,0,27886 +819964,pink_flower,0,27858 +4536,tsurime,0,27654 +563256,yellow_bow,0,27619 +4530,gift,0,27609 +396969,seiza,0,27454 +389402,upskirt,0,27366 +491144,blue_bikini,0,27350 +470798,pink_nails,0,27118 +455456,santa_hat,0,27080 +1364406,genderswap_(mtf),0,27054 +11813,lens_flare,0,27008 +1501540,skin_fang,0,27006 +376594,spikes,0,26942 +375519,armlet,0,26928 +656165,hand_on_own_face,0,26903 +2907,desk,0,26794 +462583,between_legs,0,26647 +486611,brown_gloves,0,26624 +379489,side_slit,0,26588 +467493,handgun,0,26485 +2447,camisole,0,26462 +384552,wading,0,26457 +486327,faceless,0,26424 +586765,low_ponytail,0,26408 +397411,restrained,0,26372 +10905,pendant,0,26314 +394151,plate,0,26272 +379725,dual_persona,0,26249 +577,masturbation,0,26248 +467585,highleg_leotard,0,26243 +584958,spoken_heart,0,26233 +378743,curvy,0,26139 +615165,green_bow,0,26102 +529213,maid_apron,0,26053 +393879,alcohol,0,26052 +418395,after_sex,0,26042 +616524,grey_skirt,0,26031 +1656,handjob,0,25938 +491758,sleeves_rolled_up,0,25936 +435433,red_gloves,0,25869 +658106,o-ring,0,25822 +435555,heavy_breathing,0,25815 +1585391,abyssal_ship,0,25726 +14946,eyeshadow,0,25687 +665184,ribbed_sweater,0,25673 +459933,drinking_glass,0,25619 +571690,hair_scrunchie,0,25536 +491112,cowgirl_position,0,25531 +535373,cross-laced_footwear,0,25481 +1491014,blue_headwear,0,25429 +2863,broom,0,25426 +5474,ball,0,25393 +684620,puffy_long_sleeves,0,25158 +1455296,sleeves_past_fingers,0,25090 +438458,clenched_hands,0,25038 +673911,hood_up,0,25014 +626528,cropped_legs,0,25012 +406964,floating,0,24989 +376117,wide_hips,0,24969 +4261,forest,0,24911 +689532,low-tied_long_hair,0,24901 +382111,breast_hold,0,24875 +3943,smoke,0,24872 +9311,zipper,0,24870 +375441,dress_lift,0,24870 +392024,tray,0,24870 +396680,personification,0,24869 +662952,headwear_removed,0,24860 +545992,high_heel_boots,0,24765 +643274,partially_submerged,0,24661 +390681,headset,0,24628 +4596,halloween,0,24593 +10279,hair_rings,0,24560 +380540,legs_up,0,24503 +521477,half_updo,0,24493 +469517,doujin_cover,0,24453 +520397,pink_skirt,0,24451 +523327,starry_sky,0,24440 +536573,colored_sclera,0,24438 +470314,pencil_skirt,0,24405 +1295582,strapless_leotard,0,24362 +479374,single_glove,0,24336 +471755,machinery,0,24312 +374915,clothed_sex,0,24306 +497607,blue_nails,0,24306 +497007,backlighting,0,24291 +15261,freckles,0,24267 +671227,tearing_up,0,24176 +11527,reflection,0,24165 +465145,tanlines,0,24162 +609,fish,0,24161 +432529,sweater_vest,0,24147 +658950,holding_book,0,24143 +511594,arm_behind_back,0,24075 +549356,arm_at_side,0,24024 +453340,santa_costume,0,24020 +1629722,large_pectorals,0,23996 +494669,spot_color,0,23925 +8243,flying,0,23894 +463127,white_bra,0,23883 +615735,asymmetrical_legwear,0,23876 +646879,brown_background,0,23852 +592555,panties_under_pantyhose,0,23749 +634781,nontraditional_miko,0,23719 +460324,red_bikini,0,23696 +149799,happy_birthday,0,23664 +491367,cropped_jacket,0,23653 +494241,long_fingernails,0,23613 +402062,!,0,23577 +531403,kemonomimi_mode,0,23538 +479146,sailor_dress,0,23441 +651957,clothed_female_nude_male,0,23322 +14442,walking,0,23236 +1851,fingering,0,23218 +574271,science_fiction,0,23187 +1651,rain,0,23181 +403173,white_pantyhose,0,23165 +7581,garter_belt,0,23110 +483898,frilled_bikini,0,23102 +460502,dual_wielding,0,23083 +572753,6+boys,0,23077 +520991,pink_ribbon,0,23054 +6175,cuffs,0,23036 +1373022,red-framed_eyewear,0,23024 +664922,dragon_horns,0,23024 +427674,epaulettes,0,23020 +376923,black_wings,0,23007 +464538,bubble,0,22982 +388067,demon_wings,0,22937 +4925,thong,0,22871 +617615,legs_apart,0,22854 +4648,teacup,0,22847 +1569,condom,0,22825 +442816,veins,0,22823 +465382,crossdressing,0,22797 +1258089,ribbon-trimmed_sleeves,0,22795 +1251593,holding_phone,0,22789 +7450,gym_uniform,0,22731 +475720,short_ponytail,0,22714 +529447,arm_behind_head,0,22709 +1916,cake,0,22701 +602223,out_of_frame,0,22689 +2169,innertube,0,22668 +583857,oni_horns,0,22662 +669624,contrapposto,0,22656 +10376,naughty_face,0,22568 +593297,green_background,0,22526 +633528,alternate_breast_size,0,22467 +643898,purple_background,0,22460 +1373029,black-framed_eyewear,0,22424 +1810,rape,0,22416 +6441,beads,0,22362 +654772,knee_up,0,22316 +610698,hat_ornament,0,22311 +1636487,one-hour_drawing_challenge,0,22279 +570718,fur_collar,0,22266 +617356,blue_shorts,0,22246 +713987,outside_border,0,22190 +728008,thighband_pantyhose,0,22190 +8198,meme,0,22166 +10402,bowl,0,22146 +2056,toenails,0,22138 +3354,cumdrip,0,22082 +1242758,blue_flower,0,22045 +416676,denim_shorts,0,22042 +394305,curly_hair,0,22035 +457726,track_jacket,0,22026 +1388800,black_sailor_collar,0,21962 +1388933,light_blush,0,21928 +374849,school_bag,0,21924 +508016,pocket,0,21923 +10543,spread_pussy,0,21902 +403286,toned,0,21876 +514515,pink_shirt,0,21867 +5875,doggystyle,0,21840 +688711,white_sleeves,0,21801 +412202,:q,0,21781 +1278062,hand_in_own_hair,0,21700 +1292999,spoken_ellipsis,0,21670 +375986,empty_eyes,0,21646 +610272,purple_skirt,0,21645 +572906,crying_with_eyes_open,0,21630 +583299,goggles_on_head,0,21629 +166757,green_dress,0,21584 +479955,4boys,0,21539 +168390,bulge,0,21519 +466881,sun_hat,0,21508 +377078,cum_in_mouth,0,21505 +452086,lolita_fashion,0,21497 +427050,shiny_clothes,0,21494 +9872,pauldrons,0,21475 +397215,outline,0,21438 +2378,buruma,0,21384 +670088,hand_on_another's_head,0,21384 +389,futanari,0,21376 +1592077,topless_male,0,21362 +1441886,under-rim_eyewear,0,21290 +579793,frilled_apron,0,21156 +560247,white_pupils,0,21124 +464578,skull,0,21119 +492380,jitome,0,21103 +684644,gold_trim,0,21037 +398273,long_legs,0,21023 +2489,sunset,0,21004 +464566,monster,0,20950 +1258262,frilled_shirt_collar,0,20942 +559163,emphasis_lines,0,20888 +426491,hands_on_hips,0,20853 +1119509,high-waist_skirt,0,20812 +7585,new_year,0,20801 +106450,shield,0,20770 +580379,aged_up,0,20754 +1670660,animal_hands,0,20736 +554098,mole_on_breast,0,20679 +4145,spear,0,20661 +538475,asymmetrical_hair,0,20605 +472916,female_masturbation,0,20559 +534168,v_arms,0,20548 +483919,single_earring,0,20521 +6439,running,0,20477 +4039,dog,0,20400 +384087,angel_wings,0,20387 +391713,long_skirt,0,20326 +1874884,breasts_squeezed_together,0,20292 +442474,competition_swimsuit,0,20256 +11882,watch,0,20197 +380572,dog_tail,0,20182 +546229,black_belt,0,20142 +1233478,black_serafuku,0,20119 +10031,faceless_male,0,20106 +652293,legs_together,0,20102 +10538,ice,0,20076 +391297,white_skin,0,20066 +1293269,blue_footwear,0,20063 +434996,o_o,0,20055 +454379,=_=,0,20055 +18013,ass_grab,0,20045 +509884,impossible_clothes,0,19994 +1247162,purple_bow,0,19993 +493843,head_rest,0,19941 +494245,red_scarf,0,19940 +9831,teddy_bear,0,19932 +399655,striped_bikini,0,19911 +537668,poke_ball,0,19839 +613885,brown_skirt,0,19829 +442316,pouch,0,19819 +1559,minigirl,0,19805 +452445,+_+,0,19786 +1396724,white_sailor_collar,0,19749 +1515358,blue_theme,0,19669 +16554,plump,0,19626 +2467,ghost,0,19612 +2336,cigarette,0,19600 +538012,hat_removed,0,19598 +394759,hand_in_pocket,0,19584 +474995,brown_pantyhose,0,19528 +520850,bespectacled,0,19521 +494842,braided_ponytail,0,19474 +417888,age_difference,0,19463 +690177,tress_ribbon,0,19460 +648056,paw_pose,0,19458 +375606,open_coat,0,19416 +465871,rabbit,0,19393 +8565,sarashi,0,19391 +494869,black_coat,0,19379 +658682,knees_together_feet_apart,0,19360 +15571,shawl,0,19349 +1255562,folded_ponytail,0,19273 +553367,turret,0,19264 +1271922,black_vest,0,19257 +1835738,blue_one-piece_swimsuit,0,19186 +2268,popsicle,0,19177 +462546,hand_between_legs,0,19129 +15916,hair_down,0,19087 +483081,high_collar,0,19049 +578153,drinking_straw,0,19046 +502548,yellow_shirt,0,18994 +477354,sheathed,0,18974 +375883,dragon_girl,0,18958 +399,yukata,0,18933 +620577,brown_jacket,0,18918 +1811,twins,0,18785 +382969,bow_(weapon),0,18780 +15200,robe,0,18714 +3661,oni,0,18708 +462209,thought_bubble,0,18690 +451370,upside-down,0,18665 +2967,jeans,0,18614 +417866,serious,0,18609 +401289,yellow_ribbon,0,18572 +466335,object_insertion,0,18543 +417883,striped_shirt,0,18515 +506510,military_hat,0,18514 +464562,injury,0,18443 +405124,smirk,0,18439 +398889,circlet,0,18423 +600222,light_rays,0,18405 +3603,lollipop,0,18324 +16721,blue_panties,0,18317 +11285,@_@,0,18293 +473529,;),0,18288 +452112,tabard,0,18277 +380289,missionary,0,18275 +1303583,blue_necktie,0,18271 +534968,mini_hat,0,18270 +717047,page_number,0,18270 +1386163,patreon_username,0,18260 +394368,bouncing_breasts,0,18250 +446472,red_rose,0,18244 +1814875,eyebrows_hidden_by_hair,0,18221 +467008,hoop_earrings,0,18199 +400,pajamas,0,18122 +572346,wide-eyed,0,18110 +1822,threesome,0,18100 +1611666,dakimakura_(medium),0,18057 +467285,crystal,0,18054 +4033,lantern,0,18052 +1369802,blurry_foreground,0,18022 +1314750,white_hairband,0,17984 +427398,butt_crack,0,17982 +458932,sideburns,0,17937 +1517199,mature_female,0,17906 +712723,striped_bow,0,17905 +1045681,white_shorts,0,17904 +493026,tentacle_hair,0,17900 +392585,contemporary,0,17864 +374921,sports_bra,0,17840 +5705,ofuda,0,17806 +561624,water_drop,0,17796 +1320204,black_bowtie,0,17724 +15770,top_hat,0,17717 +379945,hairpin,0,17711 +475744,breastplate,0,17707 +1256698,partially_fingerless_gloves,0,17688 +502047,blood_on_face,0,17681 +423620,:t,0,17653 +472727,wooden_floor,0,17584 +538768,height_difference,0,17567 +11135,rock,0,17531 +508272,belt_buckle,0,17522 +387213,handbag,0,17511 +448882,|_|,0,17504 +447403,jumping,0,17482 +461117,snowing,0,17469 +544306,colored_eyelashes,0,17455 +3478,mirror,0,17445 +252271,mug,0,17428 +1283885,crossed_bangs,0,17403 +4357,chocolate,0,17384 +513807,hug_from_behind,0,17364 +872616,white_kimono,0,17362 +461492,green_jacket,0,17340 +456193,outstretched_hand,0,17340 +677856,star_hair_ornament,0,17329 +464536,bouquet,0,17310 +4909,chopsticks,0,17307 +381092,paper,0,17304 +527682,object_hug,0,17298 +394992,shoulder_bag,0,17284 +610013,pink_lips,0,17263 +6207,sportswear,0,17239 +7532,stairs,0,17184 +643286,full-face_blush,0,17181 +1349206,cropped_torso,0,17141 +1532466,colored_inner_hair,0,17112 +1648,oekaki,0,17056 +667171,holding_umbrella,0,17024 +662799,animal_hood,0,17005 +460907,one_eye_covered,0,17000 +464567,mountain,0,16918 +507245,glint,0,16875 +409364,covering_breasts,0,16833 +469668,raised_eyebrows,0,16829 +8807,city,0,16812 +1631677,mature_male,0,16767 +1671,underwater,0,16731 +2074,vibrator,0,16722 +2312,valentine,0,16712 +9434,wedding_dress,0,16709 +456688,multicolored_eyes,0,16704 +548722,enmaided,0,16697 +553015,open_book,0,16653 +498828,adjusting_clothes,0,16652 +1405752,meme_attire,0,16650 +1582,miko,0,16636 +594859,backless_outfit,0,16613 +379632,panties_aside,0,16594 +701042,bandaged_arm,0,16573 +472407,turtleneck_sweater,0,16443 +16609,bloomers,0,16435 +486,doll,0,16425 +411759,cum_on_hair,0,16414 +7426,sign,0,16400 +458728,waving,0,16390 +1249693,borrowed_character,0,16369 +7783,pout,0,16369 +5953,snake,0,16363 +478640,hair_bell,0,16356 +7486,fighting_stance,0,16351 +11858,forehead_mark,0,16335 +397690,motion_blur,0,16322 +376102,realistic,0,16156 +1226391,yellow_flower,0,16135 +8705,ice_cream,0,16133 +464582,strawberry,0,16122 +442898,skirt_hold,0,16109 +379820,aircraft,0,16077 +472197,white_pants,0,16042 +1243463,orange_bow,0,16034 +560,card,0,16015 +617355,grey_jacket,0,16004 +2809,spoon,0,15980 +433182,!?,0,15963 +499629,headpiece,0,15949 +397574,panties_around_one_leg,0,15937 +2205,mouse_ears,0,15932 +428330,brother_and_sister,0,15870 +1802,leash,0,15849 +456585,pink_bra,0,15847 +401340,nose,0,15816 +375144,nurse_cap,0,15808 +456370,hands_in_pockets,0,15808 +570942,front-tie_bikini_top,0,15724 +471090,hair_tie,0,15710 +1260354,text_focus,0,15704 +8418,fence,0,15687 +687736,short_eyebrows,0,15675 +1339995,green_headwear,0,15662 +13207,bandana,0,15650 +643257,white_leotard,0,15646 +667463,visor_cap,0,15637 +2319,fantasy,0,15635 +1441883,round_eyewear,0,15633 +594664,green_shirt,0,15616 +16139,mustache,0,15597 +1332796,round_teeth,0,15590 +1335533,holding_staff,0,15576 +421198,center_opening,0,15520 +10549,clitoris,0,15483 +15224,sand,0,15473 +446622,hime_cut,0,15432 +3429,drink,0,15413 +509379,on_floor,0,15395 +1352777,eighth_note,0,15359 +406736,covering_mouth,0,15357 +464544,can,0,15347 +165438,anklet,0,15332 +1081309,mouth_mask,0,15326 +378561,blue_skin,0,15320 +417996,overflow,0,15310 +645753,orange_background,0,15299 +1755,dildo,0,15284 +464569,palm_tree,0,15266 +10440,door,0,15240 +1252945,scar_across_eye,0,15238 +16738,geta,0,15231 +1226390,purple_flower,0,15227 +3313,dragon,0,15216 +660726,rabbit_girl,0,15211 +375266,top-down_bottom-up,0,15186 +645579,fake_tail,0,15181 +513625,asymmetrical_bangs,0,15171 +548703,sitting_on_person,0,15168 +435262,stubble,0,15155 +1681088,furry_female,0,15151 +588674,furrowed_brow,0,15142 +457114,dragon_tail,0,15125 +15126,interracial,0,15118 +13027,fork,0,15080 +465444,antennae,0,15070 +384441,alternate_color,0,15070 +676924,anchor_symbol,0,15035 +383337,sailor_hat,0,15022 +1569657,arrow_(projectile),0,14974 +1262298,off-shoulder_dress,0,14966 +1757,music,0,14950 +501706,card_(medium),0,14946 +477439,white_wings,0,14916 +2367,blindfold,0,14860 +487236,long_dress,0,14814 +444411,licking_lips,0,14807 +1310938,white_one-piece_swimsuit,0,14803 +1569656,arrow_(symbol),0,14797 +553797,sharp_fingernails,0,14786 +407647,adjusting_hair,0,14768 +478262,double_v,0,14763 +527256,green_ribbon,0,14760 +662424,frog_hair_ornament,0,14727 +460642,spread_arms,0,14713 +549355,arms_at_sides,0,14706 +394528,winter_uniform,0,14666 +2562,camera,0,14662 +5228,wand,0,14618 +432198,arched_back,0,14618 +498413,jacket_on_shoulders,0,14614 +1509969,hip_focus,0,14602 +421492,veiny_penis,0,14598 +1516547,brown_headwear,0,14574 +681331,heart_hair_ornament,0,14555 +594129,head_out_of_frame,0,14554 +8891,zoom_layer,0,14541 +7508,straw_hat,0,14516 +461529,apple,0,14510 +524661,folding_fan,0,14476 +5267,labcoat,0,14458 +397045,railing,0,14457 +494337,purple_ribbon,0,14443 +9260,blanket,0,14441 +5160,gag,0,14424 +387050,sun,0,14422 +297980,hanging_breasts,0,14410 +1358345,pink_footwear,0,14406 +1835743,black_one-piece_swimsuit,0,14389 +541716,yokozuwari,0,14316 +450779,emblem,0,14316 +464583,sunflower,0,14308 +448621,arm_warmers,0,14308 +4318,pasties,0,14304 +465048,dog_girl,0,14302 +392031,groping,0,14294 +622688,muscular_female,0,14294 +2614,shota,0,14290 +10801,android,0,14284 +640898,alternate_hair_length,0,14279 +457574,horizon,0,14277 +403785,lowleg,0,14275 +390589,branch,0,14230 +660814,dot_nose,0,14212 +526693,holding_clothes,0,14179 +5621,smoking,0,14176 +414971,purple_shirt,0,14154 +3714,cannon,0,14150 +1304046,black_bodysuit,0,14119 +1363594,red_hairband,0,14114 +472943,black_sclera,0,14107 +1505129,chinese_text,0,14085 +8104,angel,0,14083 +1349592,blue_kimono,0,14078 +9044,leg_lift,0,14074 +526218,bandeau,0,14074 +420598,highleg_panties,0,14066 +9351,gothic_lolita,0,14051 +646549,red_cape,0,14046 +1351963,ass_focus,0,14019 +380423,goatee,0,14013 +460323,pink_bikini,0,13999 +393578,beanie,0,13998 +397380,name_tag,0,13995 +445092,wince,0,13987 +656161,hand_on_own_cheek,0,13981 +453244,wolf_girl,0,13969 +578430,floral_background,0,13951 +1256471,off-shoulder_shirt,0,13946 +451332,bun_cover,0,13942 +407056,unbuttoned,0,13931 +15395,clock,0,13912 +594404,hat_flower,0,13902 +529493,jack-o'-lantern,0,13886 +560473,partially_visible_vulva,0,13886 +578,lactation,0,13869 +1263229,hand_on_headwear,0,13839 +550131,large_areolae,0,13796 +1343866,black_sleeves,0,13790 +1077303,underbust,0,13739 +408040,hitodama,0,13722 +1359441,collared_dress,0,13716 +562882,blue_pants,0,13705 +442577,ahegao,0,13675 +516350,white_coat,0,13658 +547289,drawstring,0,13624 +524779,microskirt,0,13600 +133767,swimsuit_under_clothes,0,13598 +541903,purple_nails,0,13596 +505450,gift_box,0,13585 +552306,criss-cross_halter,0,13571 +451767,food_on_face,0,13549 +409425,bookshelf,0,13542 +4974,gangbang,0,13540 +6032,tabi,0,13538 +484456,back-to-back,0,13531 +8822,eyeball,0,13531 +2898,basket,0,13522 +560167,character_doll,0,13507 +381163,petticoat,0,13506 +15987,logo,0,13506 +376830,untied,0,13497 +390591,nipple_slip,0,13485 +667849,bandaid_on_face,0,13474 +444095,tail_ornament,0,13468 +1256690,suspender_skirt,0,13451 +4816,gakuran,0,13430 +462124,large_penis,0,13430 +609887,blue_gloves,0,13421 +643824,grabbing_from_behind,0,13400 +448279,cabbie_hat,0,13375 +1312862,blue_bowtie,0,13370 +670983,hand_on_another's_shoulder,0,13361 +305065,saliva_trail,0,13305 +453768,fairy_wings,0,13293 +509,pocky,0,13291 +572684,white_scarf,0,13274 +409592,argyle,0,13257 +455932,size_difference,0,13250 +518422,covered_mouth,0,13250 +663804,armored_dress,0,13203 +537093,lace-up_boots,0,13197 +1354790,yellow_ascot,0,13188 +621064,red_vest,0,13166 +519572,toenail_polish,0,13148 +14859,pointless_censoring,0,13141 +639717,taut_clothes,0,13141 +1332478,shrug_(clothing),0,13135 +550405,holding_flower,0,13117 +420,onsen,0,13111 +1764,guitar,0,13111 +11030,silhouette,0,13100 +457877,leaning_back,0,13073 +558436,silent_comic,0,13061 +588702,vambraces,0,13060 +530083,kneepits,0,13036 +460911,tiles,0,13010 +375026,headdress,0,12999 +10713,unzipped,0,12965 +469042,2koma,0,12953 +1329246,center_frills,0,12943 +535691,butterfly_hair_ornament,0,12934 +375285,winter_clothes,0,12896 +232297,gohei,0,12889 +493832,military_vehicle,0,12879 +149791,drinking,0,12869 +583983,black_cape,0,12865 +713203,crotch_seam,0,12849 +522051,on_head,0,12835 +1602539,vision_(genshin_impact),0,12775 +455615,cow_print,0,12766 +481511,red_lips,0,12763 +5252,cow_ears,0,12756 +399930,highleg_swimsuit,0,12754 +474819,brown_thighhighs,0,12740 +392133,heart_censor,0,12731 +2586,witch,0,12722 +1601823,retro_artstyle,0,12713 +454489,no_nose,0,12712 +4308,pen,0,12700 +1799095,female_child,0,12686 +413878,electricity,0,12677 +470562,spaghetti_strap,0,12671 +406034,arm_grab,0,12648 +43263,anchor,0,12642 +23249,everyone,0,12603 +559022,shirt_tucked_in,0,12589 +2276,pool,0,12575 +11173,sundress,0,12575 +16704,glass,0,12539 +1804525,bridal_garter,0,12500 +571873,checkered_clothes,0,12495 +450702,naval_uniform,0,12444 +378454,wind_lift,0,12417 +1241714,korean_text,0,12403 +742715,adjusting_eyewear,0,12401 +818365,selfie,0,12344 +1571375,poke_ball_(basic),0,12343 +416906,bangle,0,12329 +379970,bound_wrists,0,12313 +10280,flag,0,12313 +488169,cleft_of_venus,0,12302 +482679,facing_away,0,12302 +11270,mittens,0,12284 +464560,frog,0,12276 +843399,double-breasted,0,12249 +435550,headpat,0,12235 +423612,wedding_ring,0,12233 +84427,reading,0,12185 +3468,classroom,0,12185 +469224,blue_thighhighs,0,12162 +720637,chromatic_aberration,0,12156 +517935,purple_bikini,0,12141 +280408,salute,0,12131 +542666,flipped_hair,0,12106 +297241,smug,0,12103 +1447826,notice_lines,0,12078 +1372775,pinafore_dress,0,12076 +1328010,fur-trimmed_jacket,0,12052 +2880,scythe,0,12042 +450337,ankle_boots,0,12038 +2181,cheerleader,0,12038 +644684,chestnut_mouth,0,12015 +693118,absurdly_long_hair,0,12009 +1574,magic,0,11985 +246,nun,0,11976 +659072,muneate,0,11975 +1781,car,0,11969 +458819,talking,0,11958 +668671,holding_knife,0,11922 +607626,hair_stick,0,11915 +481383,wristwatch,0,11893 +230,waitress,0,11884 +1374209,food-themed_hair_ornament,0,11861 +3242,airplane,0,11859 +1307840,stud_earrings,0,11851 +624352,strap_gap,0,11840 +643027,light_blue_hair,0,11820 +376451,inverted_nipples,0,11812 +411783,potted_plant,0,11776 +1297465,pulled_by_self,0,11774 +1667541,pom_pom_(cheerleading),0,11740 +2780,beachball,0,11717 +3880,light,0,11690 +1609981,uneven_legwear,0,11671 +680569,foot_focus,0,11656 +665407,hand_to_own_mouth,0,11651 +1053124,sideways_glance,0,11642 +390064,gagged,0,11622 +492202,flat_cap,0,11622 +458,nurse,0,11587 +1315939,brown_belt,0,11579 +416331,animal_costume,0,11554 +495530,bikini_under_clothes,0,11545 +397948,road,0,11538 +2802,earmuffs,0,11535 +684289,spoken_question_mark,0,11533 +661340,holding_poke_ball,0,11521 +408359,bare_back,0,11507 +8318,nipple_tweak,0,11485 +536,bukkake,0,11472 +656170,hands_on_own_chest,0,11451 +1230896,horn_ornament,0,11447 +3992,bucket,0,11441 +1416387,kita_high_school_uniform,0,11437 +1113504,hair_behind_ear,0,11436 +152,ninja,0,11416 +516299,style_parody,0,11403 +391568,assault_rifle,0,11397 +1387418,back_bow,0,11386 +704492,armored_boots,0,11381 +376460,fishnet_pantyhose,0,11378 +1278425,rigging,0,11372 +374936,lamp,0,11370 +414765,hair_over_eyes,0,11368 +471181,eyeliner,0,11367 +12457,tatami,0,11350 +543958,bags_under_eyes,0,11328 +446289,cow_horns,0,11318 +2565,battle,0,11310 +6312,femdom,0,11272 +1301111,white_sweater,0,11258 +4572,candle,0,11252 +393028,controller,0,11245 +4436,bench,0,11240 +476771,bush,0,11232 +42918,scales,0,11213 +407744,happy_sex,0,11210 +440780,sailor_shirt,0,11194 +646445,mask_on_head,0,11186 +5214,overalls,0,11182 +1327458,steaming_body,0,11182 +1685196,hakama_short_skirt,0,11165 +626241,triangular_headpiece,0,11147 +411560,sexually_suggestive,0,11144 +11883,cityscape,0,11138 +688713,snake_hair_ornament,0,11138 +7580,mecha_musume,0,11109 +465707,biting,0,11105 +461439,tube_top,0,11104 +1328421,crown_braid,0,11099 +613947,toeless_legwear,0,11091 +484631,naked_shirt,0,11046 +15131,riding,0,11036 +468449,brown_dress,0,11032 +605892,pov_hands,0,11004 +441419,:>,0,11000 +473301,bow_bra,0,10993 +544390,>:),0,10964 +498201,mini_crown,0,10958 +1516549,pink_headwear,0,10944 +479176,bird_wings,0,10934 +452205,stuffed_bunny,0,10882 +374409,bad_anatomy,0,10880 +331704,bodystocking,0,10879 +15272,bridal_veil,0,10876 +1862786,reaching_towards_viewer,0,10864 +572672,japanese_armor,0,10861 +482857,joints,0,10857 +2231,fairy,0,10857 +568920,+++,0,10851 +699839,halloween_costume,0,10845 +1372733,blue_leotard,0,10838 +404958,pilot_suit,0,10829 +464543,cable,0,10826 +705211,fake_horns,0,10826 +412037,open_fly,0,10811 +843857,red_kimono,0,10769 +393840,purple_panties,0,10765 +526025,pleated_dress,0,10761 +374533,creature,0,10746 +6158,orgasm,0,10743 +506,dark,0,10736 +558230,ice_wings,0,10733 +532943,covering_crotch,0,10708 +5104,shade,0,10696 +211409,wet_shirt,0,10693 +6318,space,0,10690 +682963,claw_pose,0,10690 +7495,sarong,0,10662 +1308012,red_hakama,0,10624 +669033,triangle_mouth,0,10620 +1669227,hugging_own_legs,0,10590 +1257079,print_kimono,0,10568 +535186,breast_pocket,0,10565 +386389,bracer,0,10564 +390180,bathing,0,10559 +185924,key,0,10559 +1389122,black_collar,0,10551 +480899,5boys,0,10529 +394881,school_desk,0,10512 +394136,red_panties,0,10506 +1801550,single_side_bun,0,10504 +463399,randoseru,0,10500 +1303436,red_choker,0,10497 +464570,peach,0,10486 +389456,holster,0,10474 +599873,air_bubble,0,10456 +493887,food_in_mouth,0,10448 +576561,alternate_eye_color,0,10424 +484924,heart_hands,0,10415 +16149,sack,0,10410 +547893,indian_style,0,10398 +525747,lolita_hairband,0,10392 +1611664,oil-paper_umbrella,0,10388 +695937,white_outline,0,10369 +414957,confetti,0,10366 +4965,dagger,0,10358 +648077,heart_of_string,0,10355 +421188,against_wall,0,10350 +603647,frilled_shirt,0,10347 +1685910,painting_(medium),0,10330 +1693074,own_hands_clasped,0,10329 +613597,print_legwear,0,10327 +656166,hands_on_own_face,0,10322 +507378,yellow_bikini,0,10320 +580906,low_wings,0,10316 +466164,laughing,0,10311 +669725,holding_bag,0,10306 +459290,snowflakes,0,10304 +469422,paw_print,0,10283 +670635,cross-laced_clothes,0,10276 +394722,wet_hair,0,10266 +528129,one_breast_out,0,10265 +614988,pink_jacket,0,10262 +466325,autumn_leaves,0,10259 +394150,blue_rose,0,10234 +377998,petite,0,10222 +5626,antlers,0,10214 +610524,star-shaped_pupils,0,10212 +408438,hair_up,0,10209 +1262171,holding_bottle,0,10182 +614271,lace-trimmed_legwear,0,10161 +551848,polka_dot_background,0,10147 +1795966,cone_hair_bun,0,10144 +8180,drunk,0,10135 +461736,alternate_hair_color,0,10128 +15020,hibiscus,0,10120 +1447858,feather_hair_ornament,0,10080 +464545,carrot,0,10075 +502136,whisker_markings,0,10073 +468789,paw_gloves,0,10067 +434704,string,0,10059 +653480,yellow_jacket,0,10056 +2559,incest,0,10052 +1681089,furry_male,0,10041 +411770,cum_in_ass,0,10031 +4358,plugsuit,0,10025 +372,bath,0,10024 +464674,stretching,0,10003 +288161,office_lady,0,9994 +15771,raccoon_ears,0,9993 +1335363,arm_tattoo,0,9987 +4026,pumpkin,0,9981 +602796,star_print,0,9977 +1285325,wrist_scrunchie,0,9952 +1297657,zipper_pull_tab,0,9949 +1320887,anal_object_insertion,0,9943 +474598,tate_eboshi,0,9933 +473213,red_thighhighs,0,9930 +656167,hand_on_another's_face,0,9907 +450266,wine_glass,0,9903 +617956,brown_shirt,0,9902 +1314823,black_sweater,0,9902 +9586,habit,0,9879 +469551,facepaint,0,9873 +496617,marker_(medium),0,9871 +594366,bat_(animal),0,9870 +1250483,holding_fan,0,9859 +409720,scabbard,0,9851 +1222476,crescent_hair_ornament,0,9841 +412048,ribbon_choker,0,9807 +3444,computer,0,9803 +4960,parasol,0,9779 +638006,thighlet,0,9776 +576287,blood_on_clothes,0,9756 +592643,navel_cutout,0,9752 +616593,purple_gloves,0,9741 +13810,torii,0,9738 +1481917,sakuragaoka_high_school_uniform,0,9728 +375110,haori,0,9727 +544230,asymmetrical_wings,0,9708 +9481,teapot,0,9705 +456270,tasuki,0,9704 +720760,holding_tray,0,9683 +485597,grey_dress,0,9677 +251010,tiger_ears,0,9659 +4787,axe,0,9654 +158473,string_panties,0,9651 +1369967,watercraft,0,9647 +1410771,artist_logo,0,9625 +14607,tight,0,9619 +414233,cum_on_clothes,0,9606 +488078,purple_jacket,0,9606 +375461,belly,0,9587 +15202,nipple_piercing,0,9585 +6182,shibari,0,9571 +461639,happy_new_year,0,9568 +613998,mismatched_legwear,0,9561 +477517,chinese_zodiac,0,9552 +9504,magatama,0,9538 +3748,slippers,0,9521 +1799094,male_child,0,9515 +555246,black_border,0,9514 +426594,lace-trimmed_panties,0,9497 +871328,black_kimono,0,9495 +4939,precum,0,9450 +672705,pink_kimono,0,9438 +408248,transparent,0,9427 +125420,bald,0,9406 +428808,wagashi,0,9402 +1316394,hip_vent,0,9399 +614827,tachi-e,0,9376 +1260880,fang_out,0,9362 +2414,vampire,0,9361 +1393212,yellow_neckerchief,0,9349 +547199,bikini_skirt,0,9343 +675233,open_cardigan,0,9334 +472448,highleg_bikini,0,9302 +499324,red_shorts,0,9295 +666816,hand_on_own_head,0,9293 +1516550,purple_headwear,0,9284 +723097,after_vaginal,0,9283 +419116,tied_shirt,0,9268 +448185,garrison_cap,0,9268 +1310698,curled_horns,0,9264 +1749079,sample_watermark,0,9235 +589024,arm_strap,0,9233 +394083,knee_pads,0,9225 +1339640,midriff_peek,0,9216 +483988,condom_wrapper,0,9209 +881062,grabbing_own_breast,0,9198 +377888,torn_pantyhose,0,9190 +1244676,one-piece_tan,0,9186 +400382,mouse_tail,0,9163 +678558,hand_on_own_chin,0,9161 +1276712,layered_sleeves,0,9154 +3473,giant,0,9154 +500472,evil_smile,0,9152 +4964,nosebleed,0,9148 +1355714,crescent_hat_ornament,0,9140 +9983,dancing,0,9125 +456515,tiger_print,0,9101 +474210,leg_ribbon,0,9099 +396079,looking_afar,0,9094 +686321,half_gloves,0,9094 +397156,presenting,0,9077 +390148,green_skin,0,9064 +504750,princess_carry,0,9062 +492982,layered_dress,0,9050 +480027,purple_thighhighs,0,9049 +615659,blue_vest,0,9031 +376270,pantyhose_pull,0,9024 +388908,raglan_sleeves,0,9022 +200140,magic_circle,0,9019 +375526,short_kimono,0,9014 +500740,very_short_hair,0,9011 +578891,shoulder_blades,0,9004 +515909,asymmetrical_clothes,0,8994 +473248,blue_bra,0,8975 +578643,striped_background,0,8966 +562575,green_bikini,0,8960 +785234,shoulder_cutout,0,8944 +11019,scared,0,8937 +461530,balloon,0,8936 +618117,legwear_under_shorts,0,8925 +444507,deep_skin,0,8924 +75228,aura,0,8909 +2246,wall,0,8904 +451190,layered_skirt,0,8899 +1451201,draph,0,8887 +411966,used_condom,0,8885 +460555,hair_censor,0,8876 +1618371,body_fur,0,8864 +493064,over_shoulder,0,8857 +454128,head_wreath,0,8850 +547384,tail_raised,0,8841 +468991,kicking,0,8840 +395583,mother_and_daughter,0,8838 +533391,open_hand,0,8832 +520839,sound_effects,0,8828 +461159,reference_sheet,0,8828 +1635846,tracen_school_uniform,0,8818 +541599,mechanical_arms,0,8817 +470019,one_knee,0,8804 +449932,w,0,8800 +166945,yellow_dress,0,8796 +11870,manly,0,8789 +498546,male_underwear,0,8784 +397155,huge_ass,0,8779 +513423,graphite_(medium),0,8775 +1672,tea,0,8754 +521120,backless_dress,0,8747 +1303251,grey_pants,0,8746 +1318267,public_indecency,0,8718 +538586,hat_feather,0,8714 +1233477,crop_top_overhang,0,8702 +14461,open_kimono,0,8696 +596449,pigeon-toed,0,8687 +8577,paintbrush,0,8685 +1302741,red_ascot,0,8670 +1420938,holding_polearm,0,8660 +5857,leather,0,8653 +473218,lace-trimmed_bra,0,8648 +1462380,disposable_cup,0,8626 +669790,futa_with_female,0,8624 +15258,torpedo,0,8616 +574178,o-ring_top,0,8610 +522720,naked_towel,0,8608 +572821,star_earrings,0,8595 +574501,ringed_eyes,0,8594 +1740,mermaid,0,8590 +567329,limited_palette,0,8586 +547394,o-ring_bikini,0,8586 +1257811,on_couch,0,8586 +1253894,holding_microphone,0,8580 +516029,shimenawa,0,8576 +728807,animification,0,8571 +558829,animal_on_head,0,8562 +1379635,mmf_threesome,0,8547 +1256688,bandaged_leg,0,8535 +462974,squiggle,0,8526 +501583,in_container,0,8526 +416486,photo_(object),0,8523 +1388797,blue_neckerchief,0,8514 +469978,glowing_eye,0,8491 +12724,field,0,8487 +484616,maple_leaf,0,8481 +507586,yellow_skirt,0,8480 +3102,greaves,0,8472 +1575551,gap_(touhou),0,8465 +484880,architecture,0,8439 +10369,bedroom,0,8419 +383117,breast_sucking,0,8415 +388762,sleeveless_turtleneck,0,8392 +513428,watercolor_(medium),0,8390 +1617412,bare_pectorals,0,8389 +646363,company_name,0,8382 +384905,symmetrical_docking,0,8373 +421507,leg_grab,0,8371 +1349338,red_leotard,0,8367 +563862,gameplay_mechanics,0,8356 +380303,bound_arms,0,8353 +593165,out-of-frame_censoring,0,8346 +1409552,ooarai_school_uniform,0,8335 +430765,sunbeam,0,8334 +13126,lineart,0,8325 +1081137,imminent_penetration,0,8323 +617541,brown_pants,0,8310 +10704,bamboo,0,8306 +516432,imminent_kiss,0,8301 +8433,thumbs_up,0,8295 +440369,multiple_persona,0,8287 +726933,clothes_around_waist,0,8264 +447310,green_nails,0,8240 +576693,tile_floor,0,8235 +726825,shoulder_tattoo,0,8188 +811495,undercut,0,8166 +718360,polka_dot_bow,0,8151 +13851,halftone,0,8144 +3356,peeing,0,8144 +396470,arm_cannon,0,8124 +545704,blue_hairband,0,8107 +412080,sleeve_cuffs,0,8091 +443796,under_covers,0,8086 +4065,explosion,0,8083 +459097,cowbell,0,8081 +639869,white_cape,0,8076 +481547,holding_hair,0,8074 +7834,naked_apron,0,8071 +514087,cross_necklace,0,8065 +4633,singing,0,8060 +465393,pee,0,8051 +546667,print_shirt,0,8035 +5199,leggings,0,8021 +7929,shackles,0,8009 +4429,pencil,0,8008 +530616,checkered_floor,0,8005 +412376,stitches,0,7998 +1379926,black_hoodie,0,7998 +637674,slingshot_swimsuit,0,7995 +1301235,pubic_tattoo,0,7995 +12391,double_penetration,0,7977 +1447595,foot_out_of_frame,0,7969 +2570,idol,0,7969 +108777,bra_lift,0,7966 +374955,ruins,0,7962 +2100,summer,0,7947 +6203,vines,0,7946 +9331,falling,0,7937 +443371,raccoon_tail,0,7937 +515552,striped_dress,0,7913 +8301,black_cat,0,7906 +16791,strap,0,7905 +411657,torn_thighhighs,0,7901 +1396655,white_capelet,0,7899 +460204,hair_over_breasts,0,7896 +1441862,purple_footwear,0,7895 +491150,^o^,0,7888 +491645,head_fins,0,7886 +530962,arm_under_breasts,0,7879 +11651,annoyed,0,7846 +2566,mushroom,0,7841 +2215,house,0,7822 +463475,iron_cross,0,7811 +1271756,black_scarf,0,7811 +1488686,tilted_headwear,0,7803 +1429562,two-tone_dress,0,7795 +682686,spoken_exclamation_mark,0,7795 +186201,skirt_pull,0,7793 +581500,plaid_vest,0,7793 +30851,3koma,0,7783 +1441878,eyewear_removed,0,7776 +662950,name_connection,0,7769 +3898,tank,0,7761 +1320888,vaginal_object_insertion,0,7760 +491943,dark_persona,0,7756 +53988,fox_mask,0,7754 +457806,artist_self-insert,0,7751 +399391,spiked_bracelet,0,7746 +1257081,scar_on_cheek,0,7741 +610074,platform_footwear,0,7726 +430066,id_card,0,7720 +1364407,genderswap_(ftm),0,7710 +1393342,erune,0,7710 +1501454,tinted_eyewear,0,7706 +496894,face-to-face,0,7693 +11555,bathroom,0,7684 +5157,sad,0,7681 +403904,french_kiss,0,7681 +16170,television,0,7676 +384906,asymmetrical_docking,0,7672 +400657,one-eyed,0,7665 +388149,ranguage,0,7639 +715545,duel_monster,0,7629 +650433,invisible_chair,0,7628 +421031,huge_weapon,0,7615 +1278539,long_sideburns,0,7615 +268105,pants_pull,0,7606 +532949,trigger_discipline,0,7603 +395294,head_scarf,0,7603 +724764,hand_on_own_thigh,0,7601 +395929,bikini_pull,0,7592 +502731,hachimaki,0,7590 +471436,doughnut,0,7587 +720163,partially_unbuttoned,0,7566 +698860,hand_on_own_knee,0,7563 +1644111,official_alternate_hairstyle,0,7560 +392495,tiptoes,0,7543 +1470809,red_horns,0,7539 +1441860,grey_footwear,0,7531 +238471,babydoll,0,7528 +1370047,white_choker,0,7528 +458210,bikini_armor,0,7519 +408887,shoulder_pads,0,7517 +419695,sakazuki,0,7507 +745997,holding_animal,0,7496 +474306,headphones_around_neck,0,7496 +465836,cutoffs,0,7481 +404056,yukkuri_shiteitte_ne,0,7470 +376034,white_eyes,0,7469 +838605,animal_focus,0,7469 +1392820,black_neckerchief,0,7468 +1328011,fur-trimmed_sleeves,0,7459 +525152,white_fur,0,7451 +1450619,wide_shot,0,7448 +414159,x-ray,0,7447 +665137,sandwiched,0,7438 +485014,white_rose,0,7434 +3138,bread,0,7434 +658791,necktie_between_breasts,0,7425 +1554924,m_legs,0,7415 +2998,cunnilingus,0,7414 +4924,camouflage,0,7408 +560670,narrow_waist,0,7400 +490069,disembodied_limb,0,7396 +462735,old,0,7394 +390530,grey_skin,0,7379 +500,fundoshi,0,7377 +537886,shorts_under_skirt,0,7375 +575497,starry_background,0,7371 +434799,0_0,0,7365 +1383260,purple_kimono,0,7360 +406340,oversized_clothes,0,7355 +474715,crescent_moon,0,7354 +1701063,holding_another's_wrist,0,7353 +584250,summer_uniform,0,7346 +498950,orange_skirt,0,7335 +401680,sweater_dress,0,7335 +665739,holding_gift,0,7327 +612000,constricted_pupils,0,7324 +464580,split,0,7321 +629455,striped_necktie,0,7315 +392034,dripping,0,7314 +538023,on_ground,0,7311 +529429,covered_eyes,0,7294 +403779,navel_piercing,0,7290 +420828,sliding_doors,0,7286 +703196,leaf_hair_ornament,0,7283 +420298,big_hair,0,7279 +385088,earphones,0,7265 +10403,futon,0,7263 +6202,winter,0,7255 +732590,interspecies,0,7250 +1363856,star_hat_ornament,0,7249 +472069,cross-section,0,7243 +279773,body_writing,0,7234 +715714,voice_actor_connection,0,7232 +1379636,ffm_threesome,0,7231 +1267724,symbol_in_eye,0,7230 +473742,two_tails,0,7216 +661337,aqua_nails,0,7214 +510068,splashing,0,7209 +467794,cow_girl,0,7208 +1456682,armpit_crease,0,7206 +534254,brick_wall,0,7205 +415106,neck_ring,0,7199 +142348,wardrobe_malfunction,0,7198 +590917,leaf_on_head,0,7197 +10802,bonnet,0,7196 +478328,rolling_eyes,0,7194 +491802,standing_sex,0,7190 +1468069,multiple_others,0,7172 +5861,bishounen,0,7162 +478321,checkered_background,0,7160 +595503,facial_tattoo,0,7160 +412952,arm_garter,0,7155 +374961,no_pupils,0,7144 +581653,smoking_pipe,0,7135 +3971,handcuffs,0,7116 +684956,frilled_bow,0,7114 +666125,holding_fruit,0,7112 +416892,;o,0,7110 +462594,bobby_socks,0,7106 +558653,casual_one-piece_swimsuit,0,7099 +1515359,pink_theme,0,7096 +710372,orange_shirt,0,7095 +108765,broom_riding,0,7094 +588956,untied_bikini,0,7093 +4940,foreskin,0,7082 +1264677,anchor_hair_ornament,0,7082 +391251,breast_rest,0,7080 +438708,frilled_panties,0,7062 +569780,pink_rose,0,7060 +464551,cookie,0,7055 +1238881,yellow_necktie,0,7052 +606614,shoes_removed,0,7051 +606732,soaking_feet,0,7044 +447060,spiked_collar,0,7038 +1622419,straight-on,0,7034 +1269638,handheld_game_console,0,7006 +390893,water_bottle,0,7005 +551141,arm_ribbon,0,7002 +395658,christmas_tree,0,7002 +388207,yin_yang,0,7000 +402799,tiger_tail,0,6999 +501739,folded,0,6990 +2918,fireworks,0,6979 +490606,afterimage,0,6977 +492645,gyaru,0,6975 +493164,forehead_jewel,0,6972 +596854,blue_scarf,0,6969 +1400566,toned_male,0,6962 +423161,object_on_head,0,6956 +490261,partially_colored,0,6915 +511662,arm_guards,0,6904 +483436,fish_tail,0,6898 +392008,chalkboard,0,6879 +410002,sheet_grab,0,6869 +562151,short_over_long_sleeves,0,6864 +1269062,mitakihara_school_uniform,0,6848 +1441887,rimless_eyewear,0,6847 +439130,anniversary,0,6846 +452549,!!,0,6845 +1508967,purple_theme,0,6844 +3509,syringe,0,6844 +1228172,asymmetrical_gloves,0,6843 +511640,single_shoe,0,6836 +1551197,1990s_(style),0,6832 +2298,watermelon,0,6825 +3531,bridge,0,6818 +487745,sitting_on_lap,0,6811 +2576,fat,0,6804 +148527,stool,0,6804 +5006,rice,0,6803 +12286,badge,0,6793 +526607,print_bikini,0,6792 +54490,monitor,0,6790 +673732,goat_horns,0,6781 +390703,purple_skin,0,6770 +543207,east_asian_architecture,0,6768 +436870,:/,0,6764 +1491182,vehicle_focus,0,6762 +498466,implied_sex,0,6760 +385431,submachine_gun,0,6752 +396065,green_panties,0,6749 +16507,lion_ears,0,6743 +663521,shoe_soles,0,6742 +1447648,sailor_senshi_uniform,0,6742 +1261156,green_vest,0,6736 +559585,playing_instrument,0,6736 +1637638,heart_brooch,0,6717 +1328025,fur-trimmed_coat,0,6714 +381114,red_skin,0,6712 +546385,heart_earrings,0,6708 +1505172,engrish_text,0,6700 +675356,multicolored_background,0,6696 +614744,red_pants,0,6695 +643811,micro_shorts,0,6693 +1898,hammer,0,6666 +1330221,fur-trimmed_gloves,0,6666 +714822,red_theme,0,6657 +1313352,brown_sweater,0,6654 +416507,torn_shirt,0,6653 +418177,ripples,0,6653 +606748,chibi_inset,0,6650 +606296,dappled_sunlight,0,6636 +1248463,brown_coat,0,6636 +1326379,red_capelet,0,6625 +604394,pointing_at_viewer,0,6623 +676529,licking_penis,0,6622 +724584,hooded_cloak,0,6622 +1600,beer,0,6619 +376671,scroll,0,6603 +1661326,content_rating,0,6602 +378581,brothers,0,6595 +1708,sake,0,6594 +3904,rainbow,0,6593 +1733109,jaggy_lines,0,6576 +1490428,fringe_trim,0,6558 +464558,egg,0,6557 +585852,skirt_removed,0,6555 +374606,giantess,0,6555 +706995,erection_under_clothes,0,6541 +1314745,blue_choker,0,6538 +1392012,white_bodysuit,0,6532 +399604,fucked_silly,0,6527 +8988,sniper_rifle,0,6526 +622607,official_style,0,6521 +1397428,white_tank_top,0,6513 +413865,planet,0,6511 +1382120,holding_instrument,0,6509 +483418,mini_top_hat,0,6506 +509903,tying_hair,0,6502 +1354336,grey_shorts,0,6501 +375594,pink_thighhighs,0,6498 +1375840,rabbit_hair_ornament,0,6488 +69696,undershirt,0,6482 +439891,d:,0,6477 +1542129,skin-covered_horns,0,6470 +8696,bathtub,0,6469 +608355,karakasa_obake,0,6453 +473284,frilled_bra,0,6450 +11231,feeding,0,6449 +1422110,ears_through_headwear,0,6435 +933253,on_chair,0,6435 +1251101,off-shoulder_sweater,0,6427 +482927,belt_pouch,0,6405 +501783,miqo'te,0,6396 +1431068,holding_stuffed_toy,0,6394 +4547,lowleg_panties,0,6386 +3577,motorcycle,0,6373 +391915,print_panties,0,6371 +468075,jiangshi,0,6364 +2409,error,0,6362 +394743,cushion,0,6357 +1732562,bikini_bottom_only,0,6351 +466142,fishnet_thighhighs,0,6345 +1260411,holding_plate,0,6341 +464547,cherry,0,6336 +16710,sepia,0,6336 +1304069,yellow_bowtie,0,6334 +694174,torogao,0,6327 +2182,police,0,6319 +474091,downblouse,0,6318 +14297,cyborg,0,6309 +507496,chess_piece,0,6302 +511571,zzz,0,6294 +374967,power_lines,0,6288 +464550,coin,0,6279 +1401801,black_capelet,0,6271 +6230,latex,0,6269 +202427,3:,0,6262 +376018,ribs,0,6257 +482376,game_controller,0,6245 +465523,race_queen,0,6245 +487559,lamppost,0,6233 +462578,merry_christmas,0,6216 +474717,front_ponytail,0,6216 +616616,panties_removed,0,6210 +460159,body_blush,0,6210 +561394,reverse_cowgirl_position,0,6205 +393313,nervous,0,6205 +393361,seductive_smile,0,6198 +1261355,bead_bracelet,0,6191 +1450868,chaldea_uniform,0,6191 +6425,scissors,0,6177 +4803,birthday,0,6176 +1320207,white_bowtie,0,6172 +1169097,frilled_hairband,0,6172 +1262142,holding_bouquet,0,6169 +519374,white_belt,0,6166 +3569,sleepy,0,6162 +642359,faulds,0,6156 +5011,horse,0,6155 +420311,\m/,0,6152 +495147,hair_slicked_back,0,6150 +378042,bear_ears,0,6149 +502301,tail_wagging,0,6145 +13859,perspective,0,6139 +375373,long_coat,0,6137 +476548,pinky_out,0,6133 +1312164,green_necktie,0,6130 +34553,dirty,0,6130 +606828,bead_necklace,0,6129 +1316159,leaning_to_the_side,0,6125 +8354,reclining,0,6121 +651956,clothed_male_nude_female,0,6120 +1302909,brown_shorts,0,6118 +1424093,otonokizaka_school_uniform,0,6112 +109153,saucer,0,6111 +15994,shirt_pull,0,6107 +545740,taut_shirt,0,6107 +458796,sheep_horns,0,6107 +681480,light_frown,0,6101 +461699,blank_eyes,0,6090 +6235,harness,0,6090 +835846,white_collar,0,6090 +546417,girl_sandwich,0,6090 +422720,uwabaki,0,6087 +423348,chest_hair,0,6087 +378899,bride,0,6079 +1684829,heads_together,0,6079 +417892,shell,0,6078 +398297,shouting,0,6077 +238935,track_suit,0,6077 +504234,punching,0,6076 +488261,blue_cape,0,6061 +1328339,multi-strapped_bikini,0,6061 +15959,loose_socks,0,6059 +664517,kariginu,0,6058 +527888,animalization,0,6048 +1313798,blue_sweater,0,6045 +712121,trait_connection,0,6043 +1515356,green_theme,0,6040 +351213,unfinished,0,6040 +383707,pun,0,6039 +381678,broken,0,6035 +1385413,white_hoodie,0,6033 +493465,playing_card,0,6014 +2762,coffee,0,6011 +379375,red_coat,0,5995 +493130,plaid_shirt,0,5995 +593668,huge_ahoge,0,5994 +418912,cum_string,0,5987 +1385535,grey_sweater,0,5986 +377360,take_your_pick,0,5971 +435834,torso_grab,0,5965 +397487,pillow_hug,0,5965 +1329446,green_shorts,0,5958 +1248639,grey_gloves,0,5953 +1364404,male_swimwear,0,5951 +464537,bruise,0,5949 +465180,ball_gag,0,5938 +473775,pointy_hair,0,5932 +1437338,brown_cardigan,0,5931 +391704,pervert,0,5928 +1468296,beamed_eighth_notes,0,5921 +14258,nekomata,0,5908 +375915,animal_hat,0,5879 +579943,hair_pulled_back,0,5875 +1613133,assertive_female,0,5874 +1320534,pink_bowtie,0,5868 +415942,jester_cap,0,5864 +329,bestiality,0,5862 +379427,skeleton,0,5861 +695377,planted,0,5847 +1152618,bandaid_on_leg,0,5846 +710095,striped_bowtie,0,5842 +516937,oversized_object,0,5840 +541644,aqua_background,0,5839 +563425,waist_cape,0,5834 +1367825,green_kimono,0,5834 +487850,tentacle_sex,0,5833 +399265,fat_mons,0,5833 +659487,black_tank_top,0,5828 +466467,burger,0,5826 +390768,old_man,0,5822 +3860,cooking,0,5819 +1322401,black_horns,0,5817 +1396773,fur-trimmed_capelet,0,5814 +1282122,green_footwear,0,5807 +440458,xd,0,5781 +421520,no_eyes,0,5769 +9894,lap_pillow,0,5767 +1375839,holding_chopsticks,0,5765 +526026,multicolored_dress,0,5757 +399455,poolside,0,5755 +16739,clipboard,0,5755 +1326641,purple_bowtie,0,5755 +1251890,suggestive_fluid,0,5752 +471574,over-kneehighs,0,5752 +644510,dark_background,0,5752 +2311,pregnant,0,5750 +626633,spoken_musical_note,0,5741 +375185,doll_joints,0,5739 +15670,beach_umbrella,0,5737 +466986,horn_ribbon,0,5730 +380747,speed_lines,0,5726 +379609,bra_pull,0,5725 +1394264,holding_bow_(weapon),0,5725 +2648,nightgown,0,5717 +145788,fur,0,5713 +1041483,split_mouth,0,5711 +580402,plaid_scarf,0,5706 +462351,animal_nose,0,5706 +1428556,standing_split,0,5701 +1474967,grey_headwear,0,5699 +467264,yawning,0,5693 +1367435,falling_petals,0,5680 +3249,wine,0,5677 +377,mouse,0,5676 +1616,kotatsu,0,5667 +1275190,twisted_torso,0,5662 +414161,cum_on_ass,0,5661 +1246198,mechanical_halo,0,5658 +1750248,onee-shota,0,5654 +408582,expressions,0,5652 +1689923,pectoral_cleavage,0,5650 +623300,bubble_skirt,0,5648 +420744,shelf,0,5647 +473283,purple_bra,0,5644 +2786,loincloth,0,5630 +1318186,braided_bun,0,5618 +464571,pillar,0,5618 +520820,female_orgasm,0,5616 +1484630,see-through_sleeves,0,5615 +378072,huge_penis,0,5614 +1425519,tokiwadai_school_uniform,0,5612 +5086,whip,0,5610 +1316339,sweating_profusely,0,5600 +506311,weapon_over_shoulder,0,5599 +516375,biceps,0,5593 +492465,grey_thighhighs,0,5590 +422021,cow_tail,0,5579 +412879,playing_games,0,5575 +603198,red_sweater,0,5574 +552246,red_collar,0,5571 +422654,gigantic_breasts,0,5569 +421658,cardboard_box,0,5565 +582833,ear_blush,0,5561 +639323,skull_hair_ornament,0,5557 +560578,multicolored_skin,0,5557 +615907,pink_gloves,0,5555 +547756,bare_tree,0,5555 +1496184,tassel_earrings,0,5537 +387233,paper_fan,0,5535 +1316985,shark_tail,0,5523 +410239,superhero,0,5520 +41161,street,0,5515 +460438,damaged,0,5505 +407260,polka_dot_panties,0,5502 +663579,holding_spoon,0,5500 +480577,:|,0,5494 +3516,river,0,5492 +8671,female_ejaculation,0,5488 +432914,flat_color,0,5483 +449907,topknot,0,5483 +1318935,aqua_necktie,0,5482 +660963,diamond_(shape),0,5468 +665765,animal_collar,0,5465 +438648,stand_(jojo),0,5463 +389066,castle,0,5462 +5473,tape,0,5460 +172313,time_paradox,0,5460 +393283,orb,0,5456 +2282,footjob,0,5450 +556987,sparkling_eyes,0,5450 +482590,=3,0,5445 +11458,baseball_bat,0,5437 +12438,grapes,0,5437 +15117,police_uniform,0,5434 +3767,laptop,0,5430 +605308,between_fingers,0,5424 +495048,lily_(flower),0,5422 +490010,extra_arms,0,5422 +1410613,patreon_logo,0,5421 +474502,explosive,0,5420 +302968,wet_panties,0,5417 +473214,red_bra,0,5416 +547348,extra_eyes,0,5410 +511691,sweater_lift,0,5404 +821623,female_pervert,0,5400 +1251292,fur-trimmed_cape,0,5393 +634063,flag_print,0,5382 +1631734,single_mechanical_arm,0,5382 +511642,single_sock,0,5372 +652562,book_stack,0,5372 +6413,uterus,0,5372 +1398817,thighhighs_under_boots,0,5356 +1303018,blue_coat,0,5353 +613923,argyle_legwear,0,5348 +1719049,split-color_hair,0,5347 +564515,vertical-striped_thighhighs,0,5345 +1303914,pink_choker,0,5343 +413872,breast_envy,0,5329 +1416880,yellow_footwear,0,5328 +1408307,black_blindfold,0,5327 +6188,dressing,0,5323 +582261,unmoving_pattern,0,5319 +399834,forehead_protector,0,5312 +614881,open_vest,0,5307 +623331,company_connection,0,5304 +399073,waistcoat,0,5304 +727102,hand_on_own_stomach,0,5303 +1492952,super_crown,0,5298 +1320446,facing_another,0,5296 +491474,gym_shirt,0,5296 +1290625,single_bare_shoulder,0,5293 +501384,long_braid,0,5282 +413907,;p,0,5280 +3288,penguin,0,5278 +626213,diagonal_stripes,0,5277 +665797,hand_on_own_ass,0,5270 +381455,shopping_bag,0,5251 +568318,bokeh,0,5237 +577266,chain-link_fence,0,5236 +660644,heart_cutout,0,5234 +407913,bursting_breasts,0,5231 +403477,electric_guitar,0,5230 +474204,frilled_thighhighs,0,5228 +526806,tengu-geta,0,5227 +416268,v-neck,0,5223 +2806,exhibitionism,0,5222 +456272,dog_tags,0,5216 +710056,fox_shadow_puppet,0,5215 +9127,dango,0,5212 +471020,covering_face,0,5210 +1243442,competition_school_swimsuit,0,5209 +10322,albino,0,5205 +582827,adjusting_headwear,0,5204 +425624,charm_(object),0,5199 +4491,monocle,0,5192 +9449,internal_cumshot,0,5189 +2083,meat,0,5184 +388072,costume_switch,0,5183 +434225,purple_lips,0,5180 +1398298,fur-trimmed_dress,0,5174 +415545,twilight,0,5171 +405409,tail_ribbon,0,5170 +5814,autumn,0,5169 +1418952,colored_tips,0,5163 +55663,danmaku,0,5161 +375456,zombie,0,5161 +374357,yandere,0,5160 +492671,moaning,0,5160 +632123,clothes_removed,0,5159 +1258734,looking_ahead,0,5157 +405981,breast_suppress,0,5155 +656330,hands_on_own_cheeks,0,5141 +1548634,braided_bangs,0,5130 +4440,wolf,0,5123 +379597,shorts_pull,0,5123 +1008745,sanpaku,0,5121 +471598,tail_bow,0,5119 +1441881,holding_eyewear,0,5118 +540619,holding_hat,0,5118 +1406853,serval_print,0,5115 +1618887,infection_monitor_(arknights),0,5114 +432947,no_socks,0,5102 +485638,blood_splatter,0,5101 +1782390,traditional_bowtie,0,5099 +442229,flame,0,5094 +400587,reaching,0,5094 +450547,water_gun,0,5082 +448399,dougi,0,5074 +436889,stomach_bulge,0,5063 +485218,trading_card,0,5058 +406885,thong_bikini,0,5056 +374989,quiver,0,5054 +651810,holding_card,0,5051 +481812,teardrop,0,5044 +587579,floating_object,0,5042 +388210,gold,0,5036 +666921,holding_fork,0,5027 +7818,anal_beads,0,5024 +1356896,armpit_peek,0,5023 +486149,military_jacket,0,5019 +423823,ship,0,5013 +2688,knight,0,5010 +439147,cum_on_tongue,0,4999 +2364,onigiri,0,4999 +424779,thigh_holster,0,4992 +449465,firing,0,4985 +603226,heart_print,0,4974 +10367,prosthesis,0,4974 +456933,microphone_stand,0,4972 +578856,pink_sweater,0,4971 +578948,closed_umbrella,0,4969 +398857,mind_control,0,4963 +655244,cross_earrings,0,4960 +398955,flower_field,0,4958 +431532,fake_screenshot,0,4953 +438177,father_and_daughter,0,4953 +455753,leg_warmers,0,4938 +10495,cowboy_hat,0,4938 +615718,green_gloves,0,4935 +9469,noodles,0,4928 +11067,jumpsuit,0,4928 +646772,solid_circle_eyes,0,4920 +10045,family,0,4917 +11062,fox,0,4913 +494896,hydrangea,0,4913 +383172,train_interior,0,4907 +468509,paper_lantern,0,4903 +409832,robot_joints,0,4893 +1268860,chest_jewel,0,4891 +502421,glaring,0,4887 +1345874,weapon_on_back,0,4887 +715118,ear_covers,0,4883 +784749,leotard_under_clothes,0,4883 +405861,rose_petals,0,4880 +417653,convenient_leg,0,4879 +553870,sleeveless_jacket,0,4875 +533114,turn_pale,0,4870 +410885,skyscraper,0,4869 +449502,dimples_of_venus,0,4869 +547349,heart_ahoge,0,4867 +398219,breast_lift,0,4865 +1269470,chest_tattoo,0,4864 +8475,bicycle,0,4861 +640872,pov_crotch,0,4861 +1304062,yellow_hairband,0,4850 +1347239,holding_wand,0,4848 +508932,side-by-side,0,4841 +398888,vase,0,4840 +1168235,halter_dress,0,4833 +1587249,oripathy_lesion_(arknights),0,4833 +459964,unsheathing,0,4830 +3152,lance,0,4830 +3778,amputee,0,4811 +661260,yellow_nails,0,4811 +1528415,roswaal_mansion_maid_uniform,0,4808 +619558,peeking_out,0,4794 +591983,disembodied_penis,0,4794 +549705,red_bodysuit,0,4786 +821048,pink_hairband,0,4785 +492769,picture_frame,0,4783 +513605,colored_pencil_(medium),0,4777 +1322968,old_school_swimsuit,0,4774 +12858,bone,0,4765 +538432,full_armor,0,4765 +6328,chick,0,4764 +389995,red_wings,0,4763 +561421,striped_ribbon,0,4762 +486934,no_mouth,0,4760 +695469,plaid_bow,0,4757 +511644,single_wing,0,4757 +105306,condom_in_mouth,0,4753 +615970,yellow_sclera,0,4752 +430739,large_bow,0,4751 +429487,double_handjob,0,4750 +1378403,purple_leotard,0,4748 +1298744,frilled_choker,0,4746 +1281052,brown_bow,0,4746 +466015,kitsune,0,4740 +439983,knees,0,4739 +1384761,blue_hoodie,0,4738 +1393856,hooded_coat,0,4730 +515350,knees_to_chest,0,4721 +531754,long_bangs,0,4721 +679252,solid_oval_eyes,0,4715 +418178,waves,0,4712 +473022,side_braids,0,4709 +479932,strap_pull,0,4702 +410928,multiple_4koma,0,4699 +390918,elbow_pads,0,4698 +433344,pussy_peek,0,4694 +459940,beer_mug,0,4688 +1230827,dark_blue_hair,0,4680 +7770,naked_ribbon,0,4672 +1312162,short_necktie,0,4670 +495851,open_hoodie,0,4663 +1286651,heart-shaped_box,0,4659 +1076628,holding_ball,0,4657 +667024,mixed_bathing,0,4652 +481294,detached_wings,0,4652 +1553182,grabbing_another's_hair,0,4650 +1582095,bow_hairband,0,4650 +1258905,holding_can,0,4647 +8594,ladle,0,4645 +494456,utility_pole,0,4640 +469544,navel_hair,0,4640 +449974,fur_hat,0,4637 +498156,panty_peek,0,4635 +11990,spring_onion,0,4633 +7469,revolver,0,4633 +475381,striped_scarf,0,4631 +512126,shushing,0,4626 +1862825,one-piece_swimsuit_pull,0,4620 +386870,trident,0,4616 +1326581,brown_vest,0,4613 +430114,popped_collar,0,4601 +419682,bra_strap,0,4601 +1369553,blunt_ends,0,4599 +11935,whistle,0,4591 +10205,notebook,0,4583 +1639310,arthropod_girl,0,4578 +482236,striped_skirt,0,4574 +406023,projectile_cum,0,4572 +626243,aiguillette,0,4572 +387742,gourd,0,4565 +396968,reverse_trap,0,4564 +647130,white_necktie,0,4561 +585249,faceless_female,0,4558 +539284,heart_pasties,0,4548 +489801,pillow_hat,0,4543 +7529,ringlets,0,4542 +248665,money,0,4538 +415000,impossible_shirt,0,4533 +633774,bandaid_on_nose,0,4531 +482941,flight_deck,0,4529 +524012,pinstripe_pattern,0,4526 +5723,fedora,0,4524 +1309652,holding_strap,0,4517 +684687,grabbing_own_ass,0,4516 +1632773,heart-shaped_chocolate,0,4508 +383468,lowleg_bikini,0,4503 +400837,crazy_eyes,0,4498 +511643,single_elbow_glove,0,4496 +888554,cropped_shirt,0,4493 +4022,maebari,0,4492 +396696,evening,0,4487 +550308,>:(,0,4482 +1319919,hands_on_own_knees,0,4480 +1533927,garreg_mach_monastery_uniform,0,4462 +431528,gears,0,4461 +360746,waking_up,0,4460 +517481,winter_coat,0,4457 +394089,lock,0,4453 +698475,showgirl_skirt,0,4444 +1590430,squatting_cowgirl_position,0,4434 +1518573,cable_knit,0,4433 +616137,reverse_grip,0,4429 +510943,orange_bikini,0,4427 +1631539,slime_(substance),0,4424 +177781,cane,0,4413 +1336152,asymmetrical_sleeves,0,4410 +9857,throne,0,4405 +476779,ice_cream_cone,0,4399 +570005,green_pants,0,4399 +1389146,white_neckerchief,0,4396 +400555,business_suit,0,4395 +569436,blood_on_hands,0,4394 +1409996,flower_knot,0,4394 +15294,lightning,0,4392 +1295644,vertical-striped_shirt,0,4391 +4069,sailor,0,4390 +520554,blue_fire,0,4386 +356574,tower,0,4385 +461563,poster_(object),0,4383 +1411020,white_bloomers,0,4381 +707075,tile_wall,0,4378 +496288,orange_(fruit),0,4377 +4174,tomboy,0,4372 +1263450,print_skirt,0,4370 +392700,machine_gun,0,4369 +394179,crotchless,0,4365 +399003,skinny,0,4360 +379975,bound_legs,0,4355 +505613,thigh_grab,0,4352 +553763,unbuttoned_shirt,0,4350 +12857,wedgie,0,4349 +6280,waterfall,0,4348 +378695,statue,0,4343 +463152,poking,0,4335 +291535,pearl_necklace,0,4324 +1269471,leg_tattoo,0,4318 +392453,butterfly_wings,0,4317 +498646,crack,0,4314 +1574663,two-sided_fabric,0,4313 +1392010,blue_bodysuit,0,4312 +8591,bustier,0,4311 +648630,boy_on_top,0,4310 +1390702,multicolored_jacket,0,4309 +409818,polka_dot_bikini,0,4305 +4898,baby,0,4305 +375076,crowd,0,4303 +1576265,bra_visible_through_clothes,0,4302 +669436,streaming_tears,0,4300 +7444,dark_elf,0,4296 +550765,wrist_ribbon,0,4293 +559442,red_buruma,0,4290 +673216,toeless_footwear,0,4286 +5094,snowman,0,4282 +1411717,medium_skirt,0,4280 +1389116,red_scrunchie,0,4280 +1285261,pink_necktie,0,4279 +500990,torn_dress,0,4279 +396001,pocket_watch,0,4276 +568513,animal_on_shoulder,0,4273 +380477,whiskers,0,4271 +464541,bullet,0,4267 +492359,towel_on_head,0,4265 +547742,open_hands,0,4265 +1302924,yellow_shorts,0,4260 +475366,trench_coat,0,4259 +379182,energy,0,4259 +10049,crotch,0,4257 +4478,bodypaint,0,4254 +437197,surgical_mask,0,4251 +841221,very_dark_skin,0,4246 +9176,boat,0,4244 +1257578,frilled_collar,0,4210 +11620,spread_anus,0,4209 +572,toilet,0,4200 +9172,kitchen,0,4200 +188179,letter,0,4198 +4568,crab,0,4196 +656163,hand_on_another's_cheek,0,4194 +466143,wine_bottle,0,4190 +1970,drawing,0,4189 +405636,locker,0,4189 +1312163,purple_necktie,0,4185 +395984,dress_pull,0,4182 +408969,no_pussy,0,4179 +3093,bear,0,4179 +1515362,yellow_theme,0,4178 +441477,defeat,0,4173 +609001,jacket_removed,0,4173 +1444169,yellow_headwear,0,4168 +640090,red_eyeshadow,0,4164 +427529,fat_man,0,4163 +507242,monster_boy,0,4162 +492418,orange_dress,0,4154 +483830,imagining,0,4153 +13099,hot,0,4152 +596219,sagging_breasts,0,4149 +431758,locked_arms,0,4147 +618610,yellow_gloves,0,4145 +568650,grey_pantyhose,0,4143 +416506,torn_pants,0,4143 +1258092,ribbon-trimmed_legwear,0,4139 +483292,gradient_eyes,0,4134 +723993,bird_tail,0,4133 +1492981,bikini_bottom_aside,0,4129 +579607,towel_around_neck,0,4122 +1313101,green_bowtie,0,4121 +391942,long_tongue,0,4121 +434827,soccer_uniform,0,4119 +1276067,floppy_ears,0,4117 +415277,recording,0,4113 +683424,cat_hair_ornament,0,4111 +539090,imminent_rape,0,4106 +151645,evil_grin,0,4104 +797417,ear_bow,0,4100 +395355,fusion,0,4099 +380931,pirate_hat,0,4097 +3997,landscape,0,4096 +1320211,orange_bowtie,0,4094 +613646,on_desk,0,4091 +4365,tsundere,0,4090 +882518,blue_butterfly,0,4083 +2606,milk,0,4080 +1428577,ryouou_school_uniform,0,4078 +524381,ear_ornament,0,4074 +616376,assisted_exposure,0,4073 +1619425,crescent_pin,0,4071 +589477,argyle_background,0,4066 +10332,crow,0,4066 +400542,road_sign,0,4062 +636788,frilled_pillow,0,4057 +1350141,cat_cutout,0,4054 +596179,anal_tail,0,4052 +6453,death,0,4050 +667200,hand_on_another's_chin,0,4045 +486797,unsheathed,0,4035 +1928,tiger,0,4028 +1389971,nintendo_switch,0,4027 +633106,mask_removed,0,4024 +526090,checkered_skirt,0,4024 +648045,sideways_mouth,0,4023 +686687,hatching_(texture),0,4023 +640153,muted_color,0,4023 +1270233,1koma,0,4022 +1537333,interface_headset,0,4021 +537720,track_pants,0,4021 +485999,coffee_mug,0,4013 +482913,black_suit,0,4012 +657475,back_cutout,0,4011 +426587,yellow_panties,0,4004 +9618,library,0,4003 +477117,raised_eyebrow,0,4003 +468460,torn_skirt,0,4000 +634270,bat_print,0,3999 +1429011,holding_candy,0,3996 +98920,starfish,0,3990 +1505313,romaji_text,0,3990 +3045,alien,0,3987 +1350220,cat_lingerie,0,3983 +1346561,holding_cigarette,0,3981 +609685,two-tone_skin,0,3980 +494236,taking_picture,0,3967 +466061,album_cover,0,3963 +465851,defloration,0,3962 +427758,kiseru,0,3959 +3291,teacher,0,3955 +1403308,virgin_killer_sweater,0,3952 +661261,planted_sword,0,3951 +560881,soap_bubbles,0,3949 +460488,feet_up,0,3949 +509953,holding_paper,0,3947 +677008,school_chair,0,3940 +1235097,blood_on_weapon,0,3938 +586284,heart_background,0,3935 +1405351,mountainous_horizon,0,3929 +16128,you_gonna_get_raped,0,3928 +403588,pink_skin,0,3928 +415839,armchair,0,3927 +4855,school,0,3926 +216327,shy,0,3923 +1245131,bikini_tan,0,3923 +1291919,holding_shield,0,3922 +502180,blue_wings,0,3921 +605808,snout,0,3920 +1303985,blue_buruma,0,3918 +491861,sleeves_pushed_up,0,3916 +376585,stick,0,3912 +516128,backboob,0,3909 +1413412,obijime,0,3907 +725289,multicolored_nails,0,3907 +599652,spoken_blush,0,3906 +1393858,white_camisole,0,3903 +12336,costume,0,3902 +1291748,print_bow,0,3901 +1551196,1980s_(style),0,3900 +4119,suitcase,0,3895 +631144,halftone_background,0,3894 +668478,holding_pen,0,3891 +1289945,orange_jacket,0,3887 +460085,mechanical_wings,0,3885 +584023,shark_girl,0,3880 +473300,mandarin_orange,0,3876 +551333,uchiwa,0,3874 +605145,soul_gem,0,3874 +1415843,white_ascot,0,3872 +521411,husband_and_wife,0,3872 +397397,cum_pool,0,3871 +614707,no_legwear,0,3869 +491550,lanyard,0,3865 +419496,male_masturbation,0,3864 +1390882,blue_sleeves,0,3858 +1307533,carrot_hair_ornament,0,3855 +582680,unaligned_breasts,0,3852 +380538,bubble_blowing,0,3850 +473410,dragon_wings,0,3846 +1298903,half-closed_eye,0,3837 +649373,command_spell,0,3834 +6381,clover,0,3832 +473818,tissue_box,0,3832 +406620,pouring,0,3831 +390579,colorful,0,3830 +390,demon,0,3827 +1574241,dolphin_shorts,0,3827 +382729,striped_pantyhose,0,3822 +1350193,musical_note_hair_ornament,0,3814 +584126,pointing_up,0,3814 +426470,silk,0,3812 +494868,black_cloak,0,3809 +1253174,employee_uniform,0,3807 +673956,falling_leaves,0,3805 +1342157,fur-trimmed_boots,0,3804 +493515,game_console,0,3803 +466779,belt_collar,0,3803 +399131,barcode,0,3799 +589889,v_over_eye,0,3798 +616844,blood_from_mouth,0,3797 +385804,lifebuoy,0,3793 +1256300,multicolored_skirt,0,3787 +375424,the_pose,0,3783 +675901,frilled_hat,0,3782 +394046,power_armor,0,3780 +411038,:>=,0,3776 +639592,heart_necklace,0,3776 +571473,bandaid_on_knee,0,3770 +1399133,grey_cardigan,0,3770 +464548,clone,0,3768 +1497310,button_gap,0,3767 +432388,leather_jacket,0,3762 +410903,black_skin,0,3761 +406397,against_glass,0,3754 +891436,food_focus,0,3750 +1161088,2021,0,3749 +1246561,brown_scarf,0,3748 +383694,jar,0,3747 +429161,paizuri_under_clothes,0,3746 +473292,blue_socks,0,3744 +706041,cross-shaped_pupils,0,3735 +684169,ambiguous_gender,0,3732 +1312340,red_rope,0,3730 +631542,pointing_at_self,0,3729 +7801,lamia,0,3720 +560091,cat_hood,0,3720 +394795,baggy_pants,0,3718 +382106,fisheye,0,3712 +6376,lake,0,3712 +1555383,fur-trimmed_headwear,0,3711 +1471843,waist_bow,0,3710 +677023,underboob_cutout,0,3709 +1338623,purple_hairband,0,3708 +376168,puddle,0,3707 +414773,aiming,0,3706 +506307,thong_leotard,0,3699 +716981,holding_own_arm,0,3697 +638022,see-through_silhouette,0,3695 +504035,film_grain,0,3687 +11028,swimming,0,3684 +423074,photo_background,0,3682 +422775,dark_nipples,0,3681 +525461,smokestack,0,3681 +1516548,orange_headwear,0,3680 +570382,hands_in_hair,0,3675 +11828,flexible,0,3672 +1344144,metal_collar,0,3672 +1622516,earth_(planet),0,3661 +2191,shop,0,3659 +544274,backwards_hat,0,3658 +1454257,2others,0,3657 +400545,red_moon,0,3653 +379878,thorns,0,3647 +375225,flaccid,0,3644 +538953,zouri,0,3640 +411896,no_nipples,0,3638 +512713,aqua_bow,0,3638 +476596,sode,0,3637 +458486,mother_and_son,0,3636 +1280405,sleeveless_kimono,0,3635 +553771,cheek-to-cheek,0,3632 +1397752,two-tone_shirt,0,3626 +1316028,american_flag_legwear,0,3623 +662240,mini_wings,0,3622 +472867,arm_hug,0,3622 +222138,shrine,0,3622 +386432,leaning,0,3620 +565064,framed,0,3619 +1766885,demon_slayer_uniform,0,3619 +508193,novelty_censor,0,3618 +1304117,yellow_sweater,0,3618 +404134,spread_ass,0,3614 +672073,brown_fur,0,3613 +392612,pentagram,0,3612 +1161089,2022,0,3598 +702916,imminent_vaginal,0,3589 +456376,pince-nez,0,3589 +466449,spider_lily,0,3589 +652486,v-fin,0,3586 +460863,spider_web,0,3579 +1262158,holding_broom,0,3577 +390314,nude_cover,0,3577 +46079,robot_ears,0,3572 +553007,strapless_bikini,0,3572 +443901,shide,0,3568 +618153,pink_scarf,0,3568 +567636,painting_(object),0,3567 +476371,showering,0,3565 +500976,netorare,0,3564 +423625,bikini_lift,0,3564 +2476,hose,0,3563 +2288,octopus,0,3561 +1468959,horror_(theme),0,3561 +1409116,red_sailor_collar,0,3559 +538327,black_flower,0,3556 +1417504,nanamori_school_uniform,0,3556 +85824,pole,0,3553 +461531,banana,0,3553 +451769,toe_scrunch,0,3552 +1875949,lower_teeth_only,0,3550 +390569,cat_paws,0,3549 +902927,collared_jacket,0,3549 +9799,kunai,0,3548 +1281555,aran_sweater,0,3547 +1392011,purple_bodysuit,0,3546 +1356361,grey_vest,0,3545 +540428,arm_around_shoulder,0,3543 +605759,skirt_suit,0,3542 +522913,multiple_wings,0,3541 +5980,rooftop,0,3536 +1399176,two-tone_fur,0,3536 +399479,fog,0,3529 +3375,chicken,0,3529 +546453,cat_boy,0,3529 +1498357,single_leg_pantyhose,0,3525 +1312906,rei_no_himo,0,3525 +405756,hypnosis,0,3518 +1282899,improvised_gag,0,3516 +586174,neon_trim,0,3516 +578557,digital_media_player,0,3514 +411148,remote_control,0,3514 +376648,fishing_rod,0,3510 +481454,copyright,0,3508 +1477193,lightning_bolt_symbol,0,3506 +1468130,paradis_military_uniform,0,3506 +1345564,partially_unzipped,0,3505 +1770616,blue_gemstone,0,3502 +1730,what,0,3501 +1335474,sleeves_past_elbows,0,3501 +395305,wizard_hat,0,3495 +1330707,cake_slice,0,3493 +621745,brand_name_imitation,0,3491 +1385010,kissing_cheek,0,3491 +382125,rapier,0,3490 +541609,open_door,0,3489 +623390,glowing_weapon,0,3489 +660422,leaning_on_person,0,3488 +6424,cage,0,3488 +481427,steepled_fingers,0,3485 +589286,purple_rose,0,3482 +638728,print_gloves,0,3476 +667663,elbow_rest,0,3474 +1391985,grey_hoodie,0,3473 +1409115,green_sailor_collar,0,3470 +1328598,holding_bowl,0,3469 +1328649,orange_flower,0,3466 +516646,macaron,0,3460 +375919,demon_boy,0,3458 +717483,flower-shaped_pupils,0,3455 +718139,food_print,0,3452 +581054,keyboard_(computer),0,3449 +566804,single_sleeve,0,3449 +2709,needle,0,3448 +468016,;q,0,3442 +9275,butt_plug,0,3440 +383907,swimsuit_aside,0,3436 +380328,pompadour,0,3434 +7646,slave,0,3432 +643102,loose_necktie,0,3431 +525997,print_dress,0,3426 +485061,hair_tucking,0,3425 +524552,shiny_pokemon,0,3424 +1684874,laevatein_(touhou),0,3423 +576406,folded_fan,0,3420 +400245,opaque_glasses,0,3419 +410131,perky_breasts,0,3418 +11912,wrench,0,3418 +4434,violin,0,3418 +378993,energy_sword,0,3413 +411511,adjusting_swimsuit,0,3412 +1419007,stirrup_legwear,0,3406 +460475,naked_sweater,0,3406 +652354,too_many,0,3405 +664058,orange_ribbon,0,3405 +754200,oppai_loli,0,3403 +378543,egg_vibrator,0,3402 +384248,trefoil,0,3402 +379189,floor,0,3400 +477228,drum,0,3399 +483981,american_flag,0,3396 +12570,armpit_hair,0,3394 +8255,dancer,0,3393 +560040,framed_breasts,0,3390 +382440,pantylines,0,3389 +4768,sheep,0,3387 +441110,office_chair,0,3384 +1315028,lower_body,0,3378 +11366,policewoman,0,3376 +513303,afloat,0,3376 +1312614,blue_hakama,0,3375 +4191,sushi,0,3368 +531372,mouse_girl,0,3367 +9510,cream,0,3364 +575181,o-ring_bottom,0,3363 +207834,mascara,0,3351 +458795,suction_cups,0,3350 +10769,transformation,0,3346 +383173,train,0,3336 +634393,heart_choker,0,3335 +1411341,hooded_capelet,0,3331 +446070,lion_tail,0,3327 +1257056,holding_towel,0,3322 +554588,suit_jacket,0,3321 +482172,contrail,0,3320 +507625,calligraphy_brush,0,3319 +548668,excalibur_(fate/stay_night),0,3316 +1560432,reverse_outfit,0,3315 +1483713,multiple_rings,0,3314 +723098,after_anal,0,3310 +262264,tight_pants,0,3305 +1330167,american_flag_dress,0,3305 +554092,food_on_body,0,3304 +711562,striped_tail,0,3303 +1247747,hair_strand,0,3302 +458711,hand_under_clothes,0,3301 +639641,bra_removed,0,3298 +449248,scope,0,3298 +543339,bandaids_on_nipples,0,3297 +514260,left-handed,0,3297 +422830,paper_bag,0,3297 +611884,uneven_eyes,0,3293 +463490,spacecraft,0,3288 +395198,test_tube,0,3287 +4470,duel,0,3286 +667460,real_life_insert,0,3284 +1219136,hand_on_own_arm,0,3284 +2073,hairpods,0,3283 +675353,frilled_gloves,0,3283 +1421763,blue_capelet,0,3283 +147971,stylus,0,3283 +430704,medical_eyepatch,0,3283 +5171,map,0,3282 +1413751,print_bowtie,0,3281 +450123,staring,0,3275 +479792,circle,0,3273 +1416797,pink_cardigan,0,3272 +408052,gym_shorts,0,3270 +1374290,covered_collarbone,0,3266 +522010,character_profile,0,3264 +1303383,blue_scrunchie,0,3262 +1305030,green_leotard,0,3255 +684190,gradient_sky,0,3255 +1582181,string_of_fate,0,3252 +8295,concept_art,0,3249 +49941,lemon,0,3248 +375251,egyptian,0,3247 +10359,middle_finger,0,3242 +151042,envelope,0,3238 +393891,no_shirt,0,3235 +1636976,footwear_bow,0,3234 +750849,holding_mask,0,3232 +10983,shotgun,0,3231 +1504889,rudder_footwear,0,3230 +386503,binoculars,0,3222 +1233722,maid_bikini,0,3221 +380805,flashing,0,3221 +411223,father_and_son,0,3221 +626303,mismatched_gloves,0,3218 +163224,fins,0,3217 +1475826,holding_pokemon,0,3217 +392360,stage,0,3215 +619121,hair_beads,0,3214 +459009,penis_on_face,0,3213 +641554,tomoe_(symbol),0,3211 +687681,partially_undressed,0,3209 +521852,pussy_juice_trail,0,3206 +758409,taur,0,3206 +403195,squirrel_ears,0,3206 +1335364,stomach_tattoo,0,3205 +687202,pussy_juice_stain,0,3203 +1238711,pink_shorts,0,3202 +16144,rubber_duck,0,3201 +674492,dress_bow,0,3200 +5567,pizza,0,3200 +2553,basketball,0,3198 +455038,capri_pants,0,3198 +724241,holding_camera,0,3197 +1641895,gloved_handjob,0,3196 +1612103,milestone_celebration,0,3195 +394619,furisode,0,3192 +638426,arm_belt,0,3190 +399418,shore,0,3188 +1227744,multiple_crossover,0,3186 +419379,bandage_over_one_eye,0,3185 +1337096,white_vest,0,3185 +508748,horn_bow,0,3183 +548829,chewing_gum,0,3183 +613840,happy_halloween,0,3183 +619633,baozi,0,3180 +1387105,pink_leotard,0,3179 +1512867,multiple_hair_bows,0,3177 +1229697,drop_shadow,0,3174 +447030,candy_apple,0,3174 +396737,striped_bra,0,3170 +409293,open_dress,0,3169 +459215,finger_gun,0,3169 +481814,background_text,0,3169 +1422082,digimon_(creature),0,3167 +234,gothic,0,3165 +471771,pumps,0,3164 +535308,body_markings,0,3164 +540545,rod_of_remorse,0,3162 +562136,lip_biting,0,3161 +379713,bad_feet,0,3158 +413820,deep_penetration,0,3156 +447189,spitroast,0,3155 +467674,sake_bottle,0,3154 +549346,blue_pantyhose,0,3153 +535048,humanization,0,3152 +1035339,red_headband,0,3151 +387248,banner,0,3148 +10469,duck,0,3147 +1454264,heart-shaped_eyewear,0,3141 +1425516,uranohoshi_school_uniform,0,3140 +560677,arm_rest,0,3139 +1311981,meiji_schoolgirl_uniform,0,3136 +3759,parfait,0,3135 +510576,swim_trunks,0,3131 +411566,nightcap,0,3126 +538859,kindergarten_uniform,0,3125 +1559152,reverse_bunnysuit,0,3125 +492468,plaid_dress,0,3117 +822828,reverse_suspended_congress,0,3114 +684686,grabbing_another's_ass,0,3111 +1410709,ejaculating_while_penetrated,0,3110 +375882,cuts,0,3109 +1408536,holding_lollipop,0,3106 +379595,breast_slip,0,3103 +1400488,pink_neckerchief,0,3103 +536524,tablet_pc,0,3103 +1436828,obiage,0,3100 +425142,submerged,0,3097 +1319842,forked_eyebrows,0,3096 +588209,irrumatio,0,3095 +1582503,scar_on_chest,0,3095 +585211,energy_gun,0,3094 +1341593,bandaged_hand,0,3090 +493348,christmas_ornaments,0,3089 +769705,holding_another's_arm,0,3088 +666708,w_arms,0,3085 +551537,lying_on_person,0,3082 +590746,goggles_around_neck,0,3082 +420283,leotard_aside,0,3081 +502536,paint_splatter,0,3079 +10692,sandwich,0,3076 +7641,piggyback,0,3073 +420128,upright_straddle,0,3072 +601423,yellow_scarf,0,3072 +568804,anus_peek,0,3070 +1326150,purple_vest,0,3069 +1415656,grey_sailor_collar,0,3068 +473217,green_bra,0,3066 +481604,penis_awe,0,3059 +545136,coat_on_shoulders,0,3058 +676352,pokephilia,0,3056 +1590671,chest_harness,0,3056 +1394528,single_sidelock,0,3055 +10919,lineup,0,3054 +15667,riding_crop,0,3052 +1217551,black_armor,0,3052 +1358847,polos_crown,0,3051 +7631,harpy,0,3050 +599594,prosthetic_arm,0,3048 +438208,throwing,0,3047 +666377,hair_spread_out,0,3045 +592316,colored_pubic_hair,0,3044 +15004,toy,0,3043 +484752,clothes_grab,0,3043 +398608,hair_twirling,0,3042 +424585,viewfinder,0,3040 +511645,single_kneehigh,0,3040 +403196,squirrel_tail,0,3040 +378782,hiding,0,3039 +1613301,obliques,0,3038 +488003,leg_hair,0,3035 +1299132,holding_box,0,3034 +54471,midair,0,3034 +1312165,orange_necktie,0,3030 +1627141,gae_bolg_(fate),0,3028 +464585,tomato,0,3027 +1670357,furry_with_non-furry,0,3027 +626701,bird_ears,0,3025 +1417279,holding_underwear,0,3024 +891880,lion_girl,0,3023 +669594,bat_hair_ornament,0,3022 +1339523,bone_hair_ornament,0,3013 +486697,frilled_swimsuit,0,3012 +676138,asymmetrical_footwear,0,3012 +1409831,kuromorimine_military_uniform,0,3012 +1839475,chips_(food),0,3011 +682128,hands_on_another's_shoulders,0,3002 +646029,egasumi,0,2995 +1408498,japari_symbol,0,2995 +1328271,layered_bikini,0,2991 +656168,hands_on_another's_face,0,2989 +573688,reindeer_antlers,0,2983 +1373025,blue-framed_eyewear,0,2980 +464577,shooting_star,0,2980 +380157,shovel,0,2977 +990162,heart_in_eye,0,2975 +464576,seagull,0,2972 +583009,arm_around_waist,0,2970 +385637,earbuds,0,2968 +466613,slime_girl,0,2964 +7994,fighting,0,2963 +1481015,hands_in_opposite_sleeves,0,2961 +702200,sailor_bikini,0,2961 +474988,speaker,0,2960 +688378,multi-tied_hair,0,2960 +1332539,pubic_hair_peek,0,2956 +513153,magazine_(weapon),0,2956 +390685,darkness,0,2953 +659834,spoken_squiggle,0,2947 +493576,feather_boa,0,2942 +461372,mobile_suit,0,2942 +720478,lifting_person,0,2941 +539465,tiger_girl,0,2941 +378767,bleeding,0,2938 +567734,shikishi,0,2938 +440347,leopard_print,0,2938 +347508,miniboy,0,2936 +1258923,star_in_eye,0,2934 +378810,grinding,0,2930 +557508,loose_belt,0,2930 +1371098,purple_umbrella,0,2930 +600421,after_fellatio,0,2930 +12721,gas_mask,0,2928 +489904,arms_around_neck,0,2926 +1455730,raccoon_girl,0,2926 +470690,pancake,0,2924 +1291177,orange_bodysuit,0,2921 +378151,megaphone,0,2920 +467500,beer_can,0,2920 +11436,deepthroat,0,2919 +506137,jacket_around_waist,0,2918 +4550,grenade,0,2914 +1327681,red_sleeves,0,2914 +461788,scowl,0,2912 +1336138,red_belt,0,2911 +1382349,goggles_on_headwear,0,2904 +488365,letterman_jacket,0,2903 +1277869,horned_headwear,0,2898 +471357,treble_clef,0,2897 +1308608,bag_charm,0,2897 +612379,bubble_tea,0,2897 +447984,vegetable,0,2896 +375460,ceiling,0,2895 +405671,soda_can,0,2894 +692860,alternate_legwear,0,2892 +526914,licking_finger,0,2891 +559692,clothed_pokemon,0,2891 +707150,finger_on_trigger,0,2889 +5597,newspaper,0,2886 +399829,magazine_cover,0,2883 +446690,gloom_(expression),0,2882 +584478,leaf_print,0,2882 +375231,shirt_tug,0,2881 +1301494,spoken_interrobang,0,2880 +1455114,hair_tie_in_mouth,0,2880 +4262,surreal,0,2880 +1235204,bike_shorts_under_skirt,0,2878 +546810,orange_sky,0,2875 +487143,bamboo_forest,0,2872 +399543,flip-flops,0,2871 +2230,guro,0,2869 +588534,in_box,0,2866 +460940,lily_pad,0,2865 +644588,long_eyelashes,0,2865 +713253,cross_hair_ornament,0,2865 +541137,kanzashi,0,2862 +380265,skates,0,2861 +1385298,animal_ear_headphones,0,2861 +1384365,orange_hairband,0,2855 +481088,constellation,0,2848 +1397450,plantar_flexion,0,2847 +531896,2020,0,2845 +13976,frying_pan,0,2844 +2377,piano,0,2842 +424961,remote_control_vibrator,0,2842 +546133,ghost_tail,0,2841 +1291900,yellow_fur,0,2839 +577161,in_tree,0,2836 +442883,city_lights,0,2835 +483264,yellow_rose,0,2833 +486613,extra,0,2832 +12733,thinking,0,2832 +480578,hagoromo,0,2832 +1273311,vertical-striped_dress,0,2831 +633428,bunny_print,0,2829 +1423707,holding_scythe,0,2829 +847095,sparkle_background,0,2828 +13273,stained_glass,0,2828 +828858,hand_over_own_mouth,0,2825 +1253185,snowflake_hair_ornament,0,2824 +1298076,winged_arms,0,2822 +7526,crotch_rope,0,2818 +629659,food_on_head,0,2818 +9335,5koma,0,2817 +462213,peeing_self,0,2816 +377929,sheep_ears,0,2813 +507697,purple_pantyhose,0,2812 +1327007,holding_axe,0,2811 +1837805,traditional_youkai,0,2811 +2571,mochi,0,2810 +14111,candy_cane,0,2809 +1304169,au_ra,0,2806 +476448,kote,0,2801 +389804,quad_tails,0,2800 +1335455,watson_cross,0,2799 +663513,brown_bag,0,2798 +1853327,bar_(place),0,2796 +722453,thumb_ring,0,2795 +1252645,orange_nails,0,2794 +471821,debris,0,2793 +1794320,bow-shaped_hair,0,2792 +1400482,earclip,0,2790 +549590,uneven_gloves,0,2786 +379306,public_nudity,0,2785 +1552754,breast_curtains,0,2785 +572410,ankle_ribbon,0,2782 +1259682,ribbed_dress,0,2780 +530963,arms_under_breasts,0,2779 +390154,left-to-right_manga,0,2779 +1307725,green_scarf,0,2779 +1298625,open-chest_sweater,0,2778 +585305,ear_ribbon,0,2775 +458232,flats,0,2774 +9921,panda,0,2773 +426621,grey_panties,0,2773 +468765,pith_helmet,0,2773 +16978,writing,0,2772 +513195,bird_on_head,0,2771 +1379954,diffraction_spikes,0,2771 +590943,neck_ruff,0,2770 +662265,object_namesake,0,2769 +1408401,white_scrunchie,0,2769 +1373356,yellow_kimono,0,2767 +511736,heart_in_mouth,0,2767 +475209,button_badge,0,2763 +447780,peeking,0,2761 +4807,spill,0,2758 +1234394,see-through_shirt,0,2755 +662500,caterpillar_tracks,0,2755 +618149,yugake,0,2754 +1315142,pink_scrunchie,0,2754 +4773,puppet,0,2754 +404857,cupcake,0,2753 +501057,breastless_clothes,0,2752 +2641,paint,0,2751 +402497,lotus,0,2750 +634400,light_green_hair,0,2744 +380355,chained,0,2743 +467080,thank_you,0,2742 +633519,american_flag_bikini,0,2740 +1400511,fur-trimmed_hood,0,2739 +1205954,looking_at_penis,0,2738 +413033,gaping,0,2738 +25477,chemise,0,2737 +548543,arm_hair,0,2736 +1874313,covering_own_eyes,0,2735 +484394,four-leaf_clover,0,2735 +649327,wide_ponytail,0,2734 +457141,lace_panties,0,2733 +1344905,holding_dagger,0,2733 +1215711,numbered,0,2733 +610726,shared_clothes,0,2731 +1714873,chest_sarashi,0,2729 +406406,shoe_dangle,0,2727 +652707,domino_mask,0,2727 +4177,lotion,0,2724 +279898,shell_casing,0,2722 +409789,rubber_boots,0,2718 +478568,medal,0,2716 +10624,exercise,0,2713 +2833,collage,0,2711 +1384369,green_hairband,0,2709 +8808,hallway,0,2704 +666648,mitsudomoe_(shape),0,2704 +378,cheese,0,2702 +406895,tunic,0,2702 +743282,layered_clothes,0,2701 +494802,on_shoulder,0,2701 +464876,bento,0,2699 +602690,stuffed_cat,0,2699 +1770615,red_gemstone,0,2695 +1373278,black_ascot,0,2693 +619493,ears_down,0,2691 +395226,shoulder_carry,0,2688 +1441861,orange_footwear,0,2687 +1350604,cat_ear_panties,0,2687 +1435434,two-tone_skirt,0,2685 +1474162,gekkoukan_high_school_uniform,0,2684 +503705,paw_shoes,0,2682 +397148,huge_nipples,0,2680 +433752,yunomi,0,2679 +558702,pinching,0,2676 +1383796,rabbit_hood,0,2675 +15996,moonlight,0,2673 +2490,pirate,0,2673 +656171,hand_on_another's_chest,0,2673 +405984,snot,0,2672 +9002,stethoscope,0,2671 +530091,corruption,0,2670 +535299,red_sclera,0,2668 +1316609,mole_on_thigh,0,2667 +724362,single_detached_sleeve,0,2664 +417408,keyhole,0,2663 +619992,world_war_ii,0,2662 +626084,bangs_pinned_back,0,2662 +478424,striker_unit,0,2662 +1322674,latin_cross,0,2660 +1474402,qing_guanmao,0,2656 +631299,pillarboxed,0,2655 +12650,subtitled,0,2654 +644051,aiming_at_viewer,0,2652 +399594,urethra,0,2652 +2626,volleyball,0,2652 +640241,helmet_removed,0,2652 +709557,torn_bodysuit,0,2652 +1183480,blue_belt,0,2651 +1471518,horseshoe_ornament,0,2650 +5272,dice,0,2649 +429439,fetal_position,0,2649 +587679,vibrator_under_clothes,0,2648 +11981,kyuubi,0,2644 +465450,trash_can,0,2644 +1242915,rose_print,0,2642 +10731,chainsaw,0,2641 +1312757,bra_peek,0,2641 +420964,hip_bones,0,2640 +3237,goldfish,0,2639 +484121,leg_lock,0,2639 +1390670,breast_tattoo,0,2639 +505313,torn_sleeves,0,2635 +470159,jealous,0,2634 +1383010,purple_choker,0,2634 +1291568,pink_apron,0,2634 +473427,green_thighhighs,0,2632 +1330014,year_of_the_tiger,0,2632 +494260,purple_wings,0,2630 +407041,profanity,0,2629 +375792,kogal,0,2629 +2848,wedding,0,2628 +447824,doorway,0,2627 +1449257,gem_uniform_(houseki_no_kuni),0,2625 +350,ramen,0,2624 +1280149,crescent_earrings,0,2624 +528555,flat_ass,0,2623 +397935,ladder,0,2623 +378452,hand_in_panties,0,2622 +650514,wet_swimsuit,0,2618 +8874,pudding,0,2617 +555933,sword_of_hisou,0,2617 +1441888,over-rim_eyewear,0,2615 +519703,alternate_universe,0,2615 +413035,soccer_ball,0,2614 +515648,coattails,0,2613 +1261354,feather_trim,0,2613 +414783,padlock,0,2612 +1229660,multiple_horns,0,2612 +11316,abstract,0,2611 +1262166,seamed_legwear,0,2605 +15201,soldier,0,2605 +1468295,quarter_note,0,2603 +617590,wa_maid,0,2602 +449458,bullpup,0,2602 +474982,white_robe,0,2602 +667185,burn_scar,0,2600 +1467259,diagonal_bangs,0,2598 +10309,fashion,0,2597 +663385,crossed_ankles,0,2597 +1554926,wide_spread_legs,0,2594 +605757,pant_suit,0,2593 +673352,flaming_eye,0,2591 +675804,nervous_smile,0,2591 +8796,electric_fan,0,2586 +10631,paddle,0,2584 +619600,zombie_pose,0,2584 +1297470,lifted_by_another,0,2583 +400527,skirt_tug,0,2582 +449624,bass_guitar,0,2582 +12326,owl,0,2578 +695265,clitoral_stimulation,0,2577 +1355655,grey_coat,0,2576 +1178138,snowflake_print,0,2576 +1901,orgy,0,2575 +682862,hand_on_another's_hip,0,2573 +1434382,spade_(shape),0,2572 +1444529,linea_alba,0,2572 +469445,sleepwear,0,2572 +534387,heart_pillow,0,2567 +1438791,holding_pom_poms,0,2566 +1515727,strap_between_breasts,0,2565 +11243,shower_head,0,2564 +1397369,year_of_the_ox,0,2563 +1392007,pink_bodysuit,0,2562 +547698,sunburst,0,2561 +32298,duffel_bag,0,2558 +540097,turtle_shell,0,2557 +615041,finger_to_cheek,0,2553 +619145,orange_gloves,0,2552 +609122,multiple_belts,0,2552 +595006,green_sweater,0,2550 +1431325,mismatched_bikini,0,2548 +1395493,eyepatch_bikini,0,2548 +375886,sheep_girl,0,2547 +1376523,weibo_username,0,2544 +1161083,2018,0,2544 +670773,happy_valentine,0,2543 +69844,pig,0,2542 +1518440,mouth_drool,0,2542 +14861,chef_hat,0,2541 +649248,red_pupils,0,2541 +1304208,black_cardigan,0,2539 +622606,anime_coloring,0,2537 +1396558,frilled_hair_tubes,0,2537 +10340,shark,0,2537 +13958,locker_room,0,2535 +525182,santa_bikini,0,2535 +4031,negligee,0,2533 +1344732,striped_jacket,0,2533 +821841,2019,0,2532 +476787,talons,0,2531 +380173,seashell,0,2528 +422957,come_hither,0,2527 +1410787,st._gloriana's_school_uniform,0,2527 +9554,g-string,0,2526 +722842,frilled_kimono,0,2525 +1161081,2016,0,2522 +1416804,orange_scrunchie,0,2521 +1283223,shibari_over_clothes,0,2520 +1515354,brown_theme,0,2518 +1201870,prone_bone,0,2517 +428664,embers,0,2517 +1582500,scar_on_arm,0,2515 +578713,giving,0,2514 +656916,animal_penis,0,2514 +1336997,green_coat,0,2514 +527684,doll_hug,0,2510 +1338640,yellow_scrunchie,0,2508 +10168,suspension,0,2506 +475709,bandaid_on_pussy,0,2505 +1314965,horizontal_pupils,0,2504 +381372,cold,0,2502 +602427,wrestling_outfit,0,2502 +499060,ainu_clothes,0,2500 +586749,mismatched_footwear,0,2499 +1530508,scar_on_nose,0,2498 +1515361,white_theme,0,2496 +632927,alternate_headwear,0,2496 +1723482,flame-tipped_tail,0,2496 +550123,sitting_on_face,0,2495 +11433,nipple_rings,0,2494 +228096,spacesuit,0,2492 +677406,multiple_earrings,0,2492 +471929,cum_on_stomach,0,2492 +835764,untied_panties,0,2491 +640801,clothes_in_mouth,0,2490 +4011,blade,0,2489 +390993,sigh,0,2487 +9101,cigar,0,2487 +404436,ribbed_shirt,0,2487 +642455,shirt_removed,0,2487 +14074,raincoat,0,2487 +1437532,two-tone_jacket,0,2486 +551584,updo,0,2485 +398742,pastry,0,2484 +402204,holding_panties,0,2483 +495857,arm_around_neck,0,2483 +725441,black_headband,0,2482 +1393295,orange_choker,0,2481 +515468,head-mounted_display,0,2479 +556999,two-footed_footjob,0,2477 +379432,lights,0,2477 +399308,walk-in,0,2476 +405623,dog_boy,0,2474 +633092,arm_held_back,0,2473 +374959,still_life,0,2472 +607667,checkered_scarf,0,2472 +1393855,holding_flag,0,2471 +685091,hand_on_another's_back,0,2471 +388452,sweets,0,2470 +376473,mallet,0,2470 +393166,bloom,0,2470 +10736,spandex,0,2468 +1319452,hand_on_another's_arm,0,2467 +612542,bow_bikini,0,2462 +1538888,kitauji_high_school_uniform,0,2459 +9511,spotlight,0,2458 +533035,undersized_clothes,0,2458 +1451957,naoetsu_high_school_uniform,0,2457 +663171,sun_symbol,0,2456 +495002,skirt_around_one_leg,0,2455 +527607,magazine_(object),0,2455 +461542,red_sash,0,2454 +480444,shouji,0,2452 +883683,white_feathers,0,2450 +523426,hand_grab,0,2450 +10412,shaved_ice,0,2449 +374334,open_shorts,0,2448 +540150,puckered_lips,0,2446 +446993,open_collar,0,2445 +692418,suspender_shorts,0,2443 +472424,skull_and_crossbones,0,2442 +470441,plastic_bag,0,2441 +410525,typo,0,2441 +473040,tombstone,0,2439 +374379,hairy,0,2438 +542457,2015,0,2435 +11943,zero_suit,0,2433 +433704,super_saiyan,0,2428 +1409556,ooarai_military_uniform,0,2427 +409526,thigh_sex,0,2426 +816551,jackal_ears,0,2425 +1515357,orange_theme,0,2424 +1403936,bodysuit_under_clothes,0,2424 +1827216,cube_hair_ornament,0,2422 +375083,tuxedo,0,2421 +2316,carpet,0,2421 +701781,calendar_(medium),0,2420 +637971,heart_tattoo,0,2420 +468923,screaming,0,2419 +13265,cum_on_boy,0,2417 +11717,mohawk,0,2417 +647581,look-alike,0,2416 +1322421,bandaid_on_cheek,0,2416 +3917,toothbrush,0,2415 +486062,on_lap,0,2414 +570082,swirl_lollipop,0,2414 +572679,rubbing_eyes,0,2414 +462454,small_penis,0,2412 +1141250,black_fur,0,2412 +469020,rice_bowl,0,2410 +523488,flower_pot,0,2410 +6339,dusk,0,2409 +1274212,green_cape,0,2409 +617471,female_pov,0,2408 +375037,striped_socks,0,2408 +1257075,number_tattoo,0,2406 +1434570,hadanugi_dousa,0,2405 +691977,egyptian_clothes,0,2405 +7867,orc,0,2401 +418007,o3o,0,2397 +662701,brown_ribbon,0,2397 +514768,grey_sky,0,2395 +505860,tally,0,2394 +390347,flask,0,2391 +1161082,2017,0,2391 +242,princess,0,2389 +862221,club_(weapon),0,2388 +404541,hexagram,0,2388 +1791866,holding_smoking_pipe,0,2388 +645791,patterned_background,0,2386 +445826,phimosis,0,2386 +601821,nape,0,2386 +423600,bit_gag,0,2385 +512534,squirrel_girl,0,2383 +569544,condom_on_penis,0,2382 +1673151,uneven_sleeves,0,2382 +1835734,red_one-piece_swimsuit,0,2380 +1410099,purple_sleeves,0,2380 +1437301,black_bag,0,2379 +375272,evening_gown,0,2371 +573757,beach_towel,0,2370 +643796,power_symbol,0,2368 +510070,:i,0,2368 +407322,smell,0,2367 +587435,side_cutout,0,2366 +580695,big_belly,0,2366 +13691,bomber_jacket,0,2365 +1304570,cat_ear_headphones,0,2364 +556491,convenient_arm,0,2363 +669792,futa_with_male,0,2362 +453104,cameo,0,2357 +400905,talisman,0,2355 +623327,creator_connection,0,2354 +1513962,tam_o'_shanter,0,2353 +396063,asphyxiation,0,2352 +459289,blue_lips,0,2352 +1268355,carrot_necklace,0,2352 +528325,red_pantyhose,0,2347 +413782,d-pad,0,2340 +297185,drumsticks,0,2339 +493298,police_hat,0,2339 +419192,monkey_tail,0,2339 +456655,broken_glass,0,2339 +440796,triangle,0,2338 +479397,white_headband,0,2336 +3874,sink,0,2335 +474252,path,0,2335 +382047,sunrise,0,2335 +1484339,diagonal-striped_bow,0,2333 +455175,smiley_face,0,2331 +676653,mouse_(computer),0,2331 +1300235,purple_pants,0,2329 +513424,millipen_(medium),0,2329 +613439,roman_numeral,0,2328 +1227710,tail_through_clothes,0,2327 +1695986,tied_up_(nonsexual),0,2326 +622943,cover_image,0,2326 +484578,disembodied_head,0,2324 +517255,plum_blossoms,0,2324 +1373030,pink-framed_eyewear,0,2319 +776463,enpera,0,2318 +395568,cyberpunk,0,2316 +1551114,covered_abs,0,2316 +390096,hill,0,2314 +494832,faux_traditional_media,0,2311 +480812,steam_censor,0,2309 +699975,shoulder_spikes,0,2309 +1441884,rectangular_eyewear,0,2307 +673610,green_choker,0,2307 +526974,coffee_cup,0,2304 +377197,desert,0,2303 +3054,curry,0,2301 +1631337,shimakaze_(kancolle)_(cosplay),0,2301 +12396,ice_cube,0,2299 +425654,beak,0,2297 +1505132,russian_text,0,2297 +398959,trick_or_treat,0,2295 +1407477,black_scrunchie,0,2295 +380573,quill,0,2294 +589610,spider_web_print,0,2294 +16614,diadem,0,2294 +4381,ufo,0,2291 +426146,fur_coat,0,2290 +1787327,shuuchiin_academy_school_uniform,0,2290 +631055,tree_shade,0,2288 +502177,bird_girl,0,2288 +1396985,holding_sheath,0,2287 +578084,black_tail,0,2287 +393321,scratches,0,2287 +466898,strangling,0,2286 +700189,finger_in_another's_mouth,0,2286 +665350,microdress,0,2285 +189096,anal_fingering,0,2284 +634002,matching_outfit,0,2284 +488880,fourth_wall,0,2284 +426384,clitoral_hood,0,2282 +8371,hamster,0,2282 +1343573,holding_arrow,0,2279 +856710,neck_tattoo,0,2279 +2909,cyclops,0,2273 +498627,short_sword,0,2272 +410605,shared_scarf,0,2270 +581705,sitting_on_desk,0,2270 +410379,praying,0,2268 +13606,cervix,0,2268 +506562,tri_tails,0,2264 +222208,eraser,0,2262 +1321704,string_of_flags,0,2262 +160066,driving,0,2261 +11486,jellyfish,0,2261 +419919,diving_mask,0,2261 +1528617,retrofit_(azur_lane),0,2261 +435180,bad_hands,0,2260 +1853,tribadism,0,2257 +666137,aqua_bikini,0,2257 +462978,??,0,2257 +575698,torn_cape,0,2255 +613216,multicolored_legwear,0,2254 +1312065,chinese_knot,0,2254 +1320347,octarian,0,2254 +401614,tennis_uniform,0,2253 +554120,dirty_face,0,2251 +1637933,evolutionary_line,0,2251 +506895,ankle_cuffs,0,2250 +1392360,black_camisole,0,2248 +1533601,sideless_outfit,0,2246 +673694,large_tail,0,2246 +1262143,phone_screen,0,2246 +5871,restaurant,0,2244 +539295,feather_hair,0,2244 +16335,spatula,0,2243 +453895,mechanical_parts,0,2243 +613940,white_cat,0,2242 +1793456,cooperative_fellatio,0,2241 +8721,breast_bondage,0,2241 +1376935,blue_ascot,0,2240 +490993,colored_tongue,0,2240 +1430633,two-tone_bikini,0,2240 +399776,torch,0,2240 +400119,melting,0,2239 +381796,satchel,0,2239 +1524489,looking_at_object,0,2238 +534340,calendar_(object),0,2238 +384086,jet,0,2234 +854287,ribbed_legwear,0,2231 +543351,crossed_bandaids,0,2231 +548910,chain_necklace,0,2230 +109323,cliff,0,2230 +1252969,white_nails,0,2230 +1342770,strappy_heels,0,2227 +844443,red_umbrella,0,2225 +564233,hand_on_hilt,0,2221 +1514281,shark_hair_ornament,0,2221 +539390,insignia,0,2220 +410599,crotchless_panties,0,2217 +423171,prayer_beads,0,2214 +1286664,kabedon,0,2214 +490692,shading_eyes,0,2213 +434965,studded_belt,0,2213 +1321301,purple_shorts,0,2212 +1391834,feather-trimmed_sleeves,0,2212 +1570687,starter_pokemon_trio,0,2212 +640820,spoken_anger_vein,0,2211 +457376,milk_bottle,0,2211 +1276335,fur-trimmed_legwear,0,2210 +550560,oni_mask,0,2210 +703219,multicolored_wings,0,2205 +615392,hair_horns,0,2205 +14927,dessert,0,2204 +1577661,dynamax_band,0,2204 +3139,samurai,0,2203 +493584,striped_pants,0,2203 +379208,smelling,0,2202 +466731,ear_tag,0,2200 +1443349,blue_cardigan,0,2200 +144057,harem_outfit,0,2200 +381887,caught,0,2199 +1513986,creature_and_personification,0,2198 +712510,bare_hips,0,2197 +1452471,hikarizaka_private_high_school_uniform,0,2196 +539585,sleeping_upright,0,2196 +379758,cave,0,2196 +479466,fine_art_parody,0,2195 +444189,arabian_clothes,0,2195 +1402052,white_cloak,0,2195 +1304064,black_apron,0,2195 +455722,curtain_grab,0,2192 +381727,open_skirt,0,2191 +491230,horseback_riding,0,2190 +1422860,two-tone_swimsuit,0,2190 +427930,liquid,0,2189 +1447394,whistle_around_neck,0,2188 +1447450,d-pad_hair_ornament,0,2187 +638777,ritual_baton,0,2186 +87501,wire,0,2185 +480182,keyboard_(instrument),0,2185 +950553,yoga_pants,0,2185 +687728,hands_on_own_head,0,2184 +664440,lace-trimmed_dress,0,2183 +543455,bolt_action,0,2182 +458230,strapless_bra,0,2181 +423438,racket,0,2181 +458132,winged_hat,0,2180 +514239,holding_condom,0,2180 +482447,tailcoat,0,2180 +1098217,drawing_tablet,0,2177 +1399795,no_eyewear,0,2173 +540767,abstract_background,0,2172 +1297466,pulled_by_another,0,2171 +488260,purple_cape,0,2171 +501554,sweater_around_waist,0,2168 +595850,hooded_cape,0,2167 +513765,broken_horn,0,2164 +633435,magical_musket,0,2162 +1243347,mega_pokemon,0,2161 +8279,church,0,2160 +572264,gold_chain,0,2160 +1344680,holding_doll,0,2159 +379555,graffiti,0,2159 +519892,naked_coat,0,2158 +486210,tie_clip,0,2158 +433619,dreadlocks,0,2157 +1076181,flower_(symbol),0,2157 +10861,town,0,2156 +9032,attack,0,2155 +519804,black_lips,0,2153 +839099,green_lips,0,2152 +482936,long_pointy_ears,0,2150 +1609053,twitching_penis,0,2149 +438849,pill,0,2149 +478965,light_bulb,0,2148 +457115,when_you_see_it,0,2147 +453756,;3,0,2147 +405207,arch,0,2145 +644564,butterfly_print,0,2145 +683211,carrying_over_shoulder,0,2141 +467696,black_rose,0,2139 +389161,french_fries,0,2139 +427771,omelet,0,2138 +637281,mechanical_legs,0,2137 +1446770,instant_loss,0,2136 +2281,festival,0,2135 +1280102,single_hair_intake,0,2135 +1411065,anzio_school_uniform,0,2135 +403072,rocket_launcher,0,2131 +1338775,patterned_clothing,0,2126 +425236,pulling,0,2124 +1315557,grey_bow,0,2123 +553947,>_o,0,2122 +644284,holding_sign,0,2121 +530881,spilling,0,2120 +721674,purple_scarf,0,2119 +713394,hand_on_another's_thigh,0,2118 +1448342,two-tone_gloves,0,2118 +7728,tankini,0,2116 +442279,vial,0,2115 +1411023,st._gloriana's_military_uniform,0,2114 +560724,belly_chain,0,2113 +635980,impossible_leotard,0,2112 +430139,single_vertical_stripe,0,2112 +658525,white_cardigan,0,2112 +514196,cat_print,0,2111 +481778,stage_lights,0,2110 +562075,plaid_bikini,0,2108 +489623,glowstick,0,2106 +403183,nail,0,2105 +5058,surfboard,0,2105 +588453,laurel_crown,0,2105 +543193,spread_wings,0,2104 +396962,sweatband,0,2101 +465814,video_camera,0,2100 +9534,trumpet,0,2099 +643426,reverse_upright_straddle,0,2099 +540105,fleeing,0,2098 +682638,flower_earrings,0,2097 +1300390,bandaged_head,0,2097 +421313,product_placement,0,2096 +422126,moss,0,2096 +396859,budget_sarashi,0,2096 +420749,turban,0,2095 +586819,energy_wings,0,2094 +1438722,plunging_neckline,0,2092 +1669073,official_alternate_hair_length,0,2092 +497966,penis_grab,0,2091 +551641,thigh_ribbon,0,2091 +5983,briefs,0,2090 +485269,clothes_hanger,0,2089 +423281,mini-hakkero,0,2089 +1605926,see-through_legwear,0,2088 +475076,cat_hat,0,2087 +472623,hammer_and_sickle,0,2087 +437149,tongue_piercing,0,2087 +421506,head_grab,0,2086 +1353605,standing_on_liquid,0,2085 +1726780,egg_(food),0,2085 +1281566,spiked_armlet,0,2085 +417809,denim_skirt,0,2084 +561087,covering_nipples,0,2083 +1420004,blur_censor,0,2082 +1594627,monocle_hair_ornament,0,2081 +648389,spread_fingers,0,2079 +448292,porkpie_hat,0,2078 +1350232,orange_shorts,0,2074 +667237,oversized_animal,0,2073 +661191,aqua_skirt,0,2072 +669534,clothes_down,0,2070 +435549,veranda,0,2069 +442051,dust,0,2069 +1769934,finger_in_own_mouth,0,2068 +541927,manga_(object),0,2068 +402726,spanked,0,2067 +482791,tusks,0,2067 +1338653,fur_scarf,0,2066 +521817,black_feathers,0,2065 +700045,stray_pubic_hair,0,2065 +1289190,vertical-striped_skirt,0,2063 +1269637,hands_on_headwear,0,2061 +488487,ankle_socks,0,2061 +389909,potato_chips,0,2061 +631042,pool_ladder,0,2058 +1307801,feather_earrings,0,2057 +460050,sleep_molestation,0,2055 +473703,car_interior,0,2050 +405548,tail_bell,0,2050 +519626,on_table,0,2050 +12284,gown,0,2049 +1394292,blue_serafuku,0,2048 +527651,naked_sheet,0,2048 +879036,false_smile,0,2048 +6267,whale,0,2048 +3993,mop,0,2043 +1277599,ribbon-trimmed_skirt,0,2043 +491714,santa_dress,0,2042 +1257581,bowl_hat,0,2042 +699964,pocky_day,0,2042 +448198,shako_cap,0,2041 +589583,kappougi,0,2040 +1370858,mechanical_horns,0,2039 +1453261,low_twin_braids,0,2038 +378768,dreaming,0,2037 +1782786,median_furrow,0,2033 +447322,korean_clothes,0,2031 +381083,sketchbook,0,2031 +1327736,purple_coat,0,2030 +653262,back_tattoo,0,2029 +439517,nose_bubble,0,2026 +2130,tanuki,0,2026 +541092,tegaki,0,2026 +419258,child_on_child,0,2023 +466441,sweet_potato,0,2023 +104991,have_to_pee,0,2022 +1254442,scissor_blade,0,2022 +513425,nib_pen_(medium),0,2022 +287085,boxing_gloves,0,2017 +1389873,orange_kimono,0,2017 +8235,shuriken,0,2016 +1465099,black_sports_bra,0,2016 +477241,birthday_cake,0,2014 +615345,bunny_pose,0,2014 +1349485,clover_hair_ornament,0,2012 +784913,wife_and_wife,0,2012 +581551,covered_face,0,2012 +10884,crotch_rub,0,2010 +385780,high_contrast,0,2005 +1343930,black_hakama,0,2005 +1391600,borrowed_garments,0,2004 +711978,clothed_animal,0,2003 +453979,gusset,0,2002 +1282702,wooden_bucket,0,2002 +460720,wind_chime,0,2002 +555866,pillow_grab,0,2002 +1233843,impossible_bodysuit,0,2002 +686220,pink_sleeves,0,2002 +1279634,mating_press,0,2000 +576093,hands_on_lap,0,1999 +511515,grimace,0,1998 +5114,turtle,0,1997 +495234,inflatable_toy,0,1997 +396726,wreath,0,1997 +6327,afro,0,1996 +551169,transparent_umbrella,0,1993 +1721304,dyed_bangs,0,1992 +585,taiyaki,0,1991 +661899,glowing_sword,0,1991 +521722,nengajou,0,1991 +118742,team_rocket,0,1991 +454425,juice_box,0,1989 +493037,drawer,0,1989 +380358,setsubun,0,1989 +1743,baseball,0,1984 +450205,prehensile_hair,0,1984 +580340,picture_(object),0,1983 +408804,bad_proportions,0,1982 +1618015,slime_(creature),0,1982 +549041,vocaloid_append,0,1982 +1231742,three-dimensional_maneuver_gear,0,1982 +702226,holding_cat,0,1981 +717296,hand_in_another's_hair,0,1981 +416114,symmetry,0,1979 +470701,cube,0,1978 +559145,school_hat,0,1977 +409034,bite_mark,0,1976 +403942,earpiece,0,1974 +4510,crepe,0,1973 +416356,corpse,0,1973 +662965,tokkuri,0,1972 +461237,skewer,0,1972 +379744,counter,0,1972 +1468298,beamed_sixteenth_notes,0,1971 +43623,scepter,0,1971 +10638,boxers,0,1970 +2560,wrestling,0,1970 +1360724,brown_cape,0,1970 +175633,comb,0,1969 +1372686,shark_hood,0,1968 +1427258,holding_sack,0,1967 +1463294,holding_controller,0,1967 +1374594,grey-framed_eyewear,0,1964 +1343802,holding_basket,0,1962 +516012,clothed_masturbation,0,1962 +516930,hanfu,0,1959 +535055,warship,0,1959 +549629,ankle_lace-up,0,1955 +699142,diamond-shaped_pupils,0,1954 +589659,in_bucket,0,1952 +1770614,green_gemstone,0,1951 +1204400,playing_with_own_hair,0,1951 +558703,cheek_pinching,0,1949 +488,flute,0,1949 +408254,tripping,0,1948 +494375,shared_umbrella,0,1948 +416493,red_sky,0,1948 +646637,cocktail_glass,0,1948 +421443,spinning,0,1947 +419280,between_thighs,0,1947 +1240137,swim_briefs,0,1947 +558384,on_grass,0,1944 +1667144,mod3_(girls'_frontline),0,1944 +552818,parted_hair,0,1942 +1637,failure,0,1941 +473206,grey_socks,0,1940 +378083,barrel,0,1940 +666929,light_trail,0,1939 +1392029,multicolored_bodysuit,0,1937 +502382,hyur,0,1937 +422427,sports_bikini,0,1935 +465416,sleeveless_sweater,0,1935 +1456557,hair_through_headwear,0,1931 +608539,column_lineup,0,1930 +574719,multicolored_swimsuit,0,1930 +657424,candlestand,0,1928 +383903,vending_machine,0,1927 +498946,turnaround,0,1926 +1330708,purple_sweater,0,1926 +422821,safety_pin,0,1925 +414746,spider_girl,0,1925 +1334708,holding_paintbrush,0,1925 +499,toast,0,1924 +393832,impregnation,0,1924 +374542,daisy,0,1923 +477184,large_hat,0,1923 +498823,burning,0,1923 +550621,head_back,0,1922 +600284,cheek_poking,0,1921 +605455,volleyball_uniform,0,1920 +1227460,hands_on_another's_head,0,1919 +514943,torn_shorts,0,1919 +5001,squid,0,1915 +1691759,censored_nipples,0,1914 +4864,lion,0,1913 +1435804,bisexual_female,0,1913 +616023,aqua_dress,0,1913 +1442709,fewer_digits,0,1913 +377200,balcony,0,1912 +604468,eye_mask,0,1912 +565089,caustics,0,1911 +1258088,ribbon-trimmed_clothes,0,1911 +1499947,looking_over_eyewear,0,1911 +1314744,yellow_choker,0,1910 +541224,heart_hands_duo,0,1910 +1582499,scar_on_forehead,0,1910 +1401067,face_to_breasts,0,1909 +688904,overskirt,0,1909 +447307,stone,0,1909 +16006,union_jack,0,1907 +478305,striped_sleeves,0,1906 +577309,multiple_condoms,0,1906 +484182,nose_piercing,0,1905 +1399079,holding_drink,0,1905 +381921,energy_ball,0,1905 +7737,soap,0,1904 +452530,ok_sign,0,1904 +1039733,licking_nipple,0,1902 +1606642,white_male_underwear,0,1902 +880368,humanoid_robot,0,1901 +459931,prostitution,0,1900 +1713028,two-tone_bowtie,0,1900 +5707,chaps,0,1899 +379983,under_table,0,1898 +418677,gate,0,1896 +374503,strawberry_shortcake,0,1895 +481420,blood_stain,0,1893 +453886,cosmetics,0,1892 +380490,alarm_clock,0,1889 +1405888,harvin,0,1889 +1296384,multicolored_gloves,0,1888 +530877,lace_bra,0,1887 +490349,head_bump,0,1884 +2834,69,0,1879 +16150,keychain,0,1879 +412179,polka_dot_bra,0,1876 +598410,ceiling_light,0,1875 +468817,flower_wreath,0,1875 +523897,fried_egg,0,1872 +1344665,yellow_vest,0,1871 +1791862,industrial_pipe,0,1870 +1383063,clover_print,0,1870 +536151,snake_tail,0,1870 +486179,naked_cape,0,1869 +419890,rug,0,1869 +4651,dominatrix,0,1867 +492858,lemon_slice,0,1863 +412890,tissue,0,1862 +420546,spear_the_gungnir,0,1860 +380154,squirrel,0,1860 +585851,bound_ankles,0,1859 +484124,deer_ears,0,1859 +1261917,looking_at_phone,0,1853 +1686527,jack-o'_challenge,0,1853 +1739,shrimp,0,1852 +1412053,magatama_necklace,0,1852 +476158,traffic_light,0,1851 +1700602,thick_arms,0,1851 +433424,if_they_mated,0,1850 +657469,rainbow_order,0,1849 +1926,chart,0,1847 +1408089,star_choker,0,1847 +1404865,brown_sailor_collar,0,1846 +378821,watering_can,0,1845 +1306104,orange_scarf,0,1845 +400230,akeome,0,1845 +4313,pointer,0,1844 +1614785,finger_to_own_chin,0,1843 +8806,train_station,0,1842 +1386147,holding_hammer,0,1840 +617893,sleeping_on_person,0,1840 +413859,footprints,0,1837 +1331430,bandaid_on_arm,0,1837 +1506804,pearl_(gemstone),0,1834 +546293,happy_tears,0,1833 +708792,checkered_necktie,0,1833 +473239,yellow_bra,0,1832 +5494,harem,0,1832 +711883,checkered_kimono,0,1832 +1552758,breast_curtain,0,1832 +406727,against_tree,0,1831 +1440529,two-tone_ribbon,0,1831 +1345684,holding_syringe,0,1830 +7827,whisk,0,1829 +664405,plaid_necktie,0,1829 +706581,condom_packet_strip,0,1828 +440956,fluffy,0,1828 +8781,bomb,0,1827 +521350,color_guide,0,1826 +15412,refrigerator,0,1825 +377878,hairdressing,0,1824 +1371311,aqua_bowtie,0,1824 +460146,hitachi_magic_wand,0,1823 +546937,cracked_skin,0,1821 +1307800,skull_earrings,0,1819 +493288,chasing,0,1818 +394047,circle_cut,0,1817 +1289525,cross_scar,0,1817 +408365,tape_gag,0,1816 +155336,poncho,0,1816 +612699,dirty_clothes,0,1815 +1332003,champagne_flute,0,1815 +1342774,rabbit_house_uniform,0,1815 +1551,drill,0,1814 +1403984,brown_capelet,0,1814 +443220,you're_doing_it_wrong,0,1813 +538158,shared_food,0,1811 +1414514,purple_sailor_collar,0,1811 +428250,plectrum,0,1809 +1793464,cheating_(relationship),0,1808 +528616,drone,0,1808 +1805482,non-humanoid_robot,0,1807 +566340,loose_clothes,0,1807 +1806377,homurahara_academy_school_uniform,0,1806 +3946,faucet,0,1806 +388868,m4_carbine,0,1804 +470667,board_game,0,1803 +514885,after_kiss,0,1802 +612635,sidesaddle,0,1801 +3537,sticker,0,1801 +379794,cafe,0,1800 +638838,in_food,0,1799 +15222,alley,0,1797 +585021,spoken_sweatdrop,0,1797 +621654,pale_color,0,1796 +419998,:x,0,1796 +512237,identity_censor,0,1795 +664066,aqua_ribbon,0,1794 +548122,strawberry_print,0,1791 +382271,destruction,0,1791 +690509,purple_sky,0,1790 +400296,gatling_gun,0,1789 +83990,tennis_racket,0,1789 +10415,railroad_tracks,0,1789 +876848,bandage_on_face,0,1786 +1641903,eye_focus,0,1785 +1489840,white_horns,0,1785 +478617,stripper_pole,0,1785 +1336190,drill_locks,0,1785 +396286,cockpit,0,1784 +716395,dixie_cup_hat,0,1784 +188238,crossbow,0,1783 +1793517,legwear_garter,0,1781 +472,monkey,0,1781 +3342,beltbra,0,1779 +674367,starry_sky_print,0,1778 +390436,birdcage,0,1777 +1076157,futanari_masturbation,0,1776 +478170,anti-materiel_rifle,0,1776 +1231015,character_print,0,1775 +14562,holly,0,1775 +911862,mask_pull,0,1775 +590039,animal_feet,0,1774 +719573,doyagao,0,1773 +10911,pond,0,1773 +1286826,cloud_print,0,1771 +471577,red_socks,0,1771 +511657,alternate_form,0,1770 +375862,butler,0,1769 +521377,winged_helmet,0,1769 +961827,pink_hoodie,0,1767 +148824,dove,0,1766 +1356833,frilled_capelet,0,1764 +610944,mixed_media,0,1762 +433470,fanny_pack,0,1762 +403432,hooves,0,1761 +11395,swing,0,1761 +1343849,fellatio_gesture,0,1761 +1287705,song_name,0,1760 +380828,open_bra,0,1757 +653053,strapless_shirt,0,1757 +390242,variations,0,1756 +1333731,hands_on_own_thighs,0,1756 +1329387,deviantart_username,0,1756 +1448570,gold_earrings,0,1756 +1373028,yellow-framed_eyewear,0,1753 +478938,impossible_dress,0,1751 +380329,briefcase,0,1750 +1344462,blue_apron,0,1749 +16992,see-through_dress,0,1749 +2684,fishing,0,1747 +412950,rubble,0,1747 +1365001,black_robe,0,1746 +6389,tickling,0,1746 +13185,spitting,0,1745 +228746,spider,0,1745 +655271,naked_jacket,0,1745 +444545,pocky_kiss,0,1744 +629987,radio_antenna,0,1744 +440406,buck_teeth,0,1744 +501689,body_hair,0,1743 +585543,pussy_juice_puddle,0,1742 +4144,mace,0,1742 +1253396,character_hair_ornament,0,1742 +533490,adjusting_gloves,0,1741 +3532,ink,0,1740 +670532,holding_pencil,0,1739 +1438761,high-waist_pants,0,1739 +2431,missile,0,1738 +8309,straitjacket,0,1738 +473259,artificial_vagina,0,1738 +638540,multicolored_bikini,0,1737 +1597364,chain_leash,0,1736 +461218,selfcest,0,1734 +675346,frilled_ribbon,0,1732 +1328016,o-ring_choker,0,1730 +440374,leg_belt,0,1729 +4948,mimikaki,0,1728 +466439,sparks,0,1726 +1521615,skin_fangs,0,1726 +1510922,other_focus,0,1725 +15614,tube,0,1724 +430228,pole_dancing,0,1724 +1617800,see-through_leotard,0,1724 +593715,bird_on_hand,0,1722 +391066,roller_skates,0,1722 +1416546,red_hoodie,0,1722 +1258390,holding_leash,0,1721 +406767,super_robot,0,1721 +633809,eastern_dragon,0,1720 +1353409,fine_fabric_emphasis,0,1719 +555276,surprise_kiss,0,1717 +819855,puffy_shorts,0,1716 +400313,beam_rifle,0,1716 +557675,no_wings,0,1715 +414749,helm,0,1715 +1308909,barcode_tattoo,0,1714 +477329,brushing_hair,0,1711 +429442,patch,0,1711 +1472233,high-waist_shorts,0,1711 +464556,dolphin,0,1710 +1476489,pokemon_on_head,0,1709 +389904,cabinet,0,1706 +1696268,cowboy_western,0,1706 +570969,red_armor,0,1706 +1675931,kalashnikov_rifle,0,1705 +521422,skull_print,0,1705 +688139,implied_futanari,0,1703 +15124,pain,0,1701 +376636,confused,0,1701 +550124,sitting_in_tree,0,1701 +487161,breasts_on_head,0,1700 +547292,reflective_floor,0,1700 +475729,through_clothes,0,1699 +77315,worried,0,1698 +394155,instrument_case,0,1698 +603830,on_person,0,1698 +507655,freediving,0,1695 +1637843,crescent_facial_mark,0,1695 +443879,gorget,0,1695 +1372792,fox_boy,0,1694 +524875,nail_art,0,1693 +473417,yellow_thighhighs,0,1692 +1415469,torpedo_tubes,0,1692 +641695,aerial_fireworks,0,1691 +1253523,\||/,0,1690 +1113198,latex_bodysuit,0,1690 +909672,star_halo,0,1689 +1416333,cherry_blossom_print,0,1687 +11960,tanabata,0,1686 +1427966,yellow_cardigan,0,1686 +450823,x_x,0,1686 +445622,leopard_ears,0,1686 +1489808,poke_ball_symbol,0,1685 +499937,lip_piercing,0,1685 +1233871,circle_name,0,1684 +423915,wooden_sword,0,1684 +468068,spread_toes,0,1684 +859661,red_cloak,0,1683 +468635,coral,0,1682 +520227,cowlick,0,1682 +481514,white_suit,0,1682 +645771,seigaiha,0,1680 +431012,tangzhuang,0,1679 +1410984,kuromorimine_school_uniform,0,1679 +1559079,light_areolae,0,1678 +11745,breast_smother,0,1677 +396795,menu,0,1676 +1378575,monsterification,0,1676 +1284171,after_paizuri,0,1676 +1319620,wing_hair_ornament,0,1676 +1531697,mismatched_pupils,0,1673 +679014,greatsword,0,1673 +377843,cousins,0,1671 +399148,voyakiloid,0,1671 +1383226,ofuda_on_clothes,0,1671 +614243,plaid_background,0,1670 +695921,white_sash,0,1670 +10429,shinsengumi,0,1670 +1441680,holstered_weapon,0,1670 +723924,very_long_fingernails,0,1670 +1819565,jirai_kei,0,1670 +5873,confession,0,1669 +670645,tactical_clothes,0,1668 +495132,autobot,0,1667 +459174,dress_tug,0,1666 +1345660,leaning_on_object,0,1666 +420651,blinds,0,1665 +396872,ribbon_bondage,0,1665 +516894,tokusatsu,0,1664 +469082,milk_carton,0,1663 +392117,insect_wings,0,1662 +1499389,patchwork_skin,0,1660 +526470,thighhighs_pull,0,1658 +385733,baseball_uniform,0,1658 +622860,spring_(season),0,1658 +494944,flexing,0,1658 +1426293,yasogami_school_uniform,0,1656 +1174935,hand_on_own_leg,0,1654 +1262479,plaid_bowtie,0,1653 +542055,polka_dot_dress,0,1652 +567592,untucked_shirt,0,1649 +610495,bondage_outfit,0,1647 +418420,stuck,0,1647 +1418114,green_hoodie,0,1647 +413866,breasts_on_glass,0,1646 +586859,beckoning,0,1646 +1641153,icho_private_high_school_uniform,0,1646 +476416,biker_clothes,0,1645 +1323958,pink_vest,0,1645 +10293,marker,0,1645 +1346997,holding_whip,0,1644 +1275036,german_clothes,0,1644 +1268348,pawpads,0,1644 +442232,scarlet_devil_mansion,0,1643 +663528,wooden_wall,0,1643 +667138,hikimayu,0,1643 +475212,very_long_sleeves,0,1640 +1307726,magatama_earrings,0,1639 +1684797,grabbing_another's_chin,0,1636 +418987,park_bench,0,1636 +379603,hair_brush,0,1634 +379164,messy,0,1632 +1411720,medium_dress,0,1631 +549281,in_the_face,0,1631 +1242767,star_necklace,0,1631 +427833,palms,0,1631 +1328688,moon_(ornament),0,1630 +437999,full_nelson,0,1629 +444634,torn,0,1629 +403915,wheel,0,1628 +2232,tarot,0,1627 +478821,teasing,0,1627 +539771,incoming_gift,0,1627 +6303,naginata,0,1627 +1623670,diamond_button,0,1626 +1373026,green-framed_eyewear,0,1623 +610198,leotard_pull,0,1622 +660019,color_connection,0,1620 +462556,cuddling,0,1620 +1496652,gold_hairband,0,1619 +599839,horned_helmet,0,1618 +1434344,pencil_dress,0,1618 +1402835,scene_reference,0,1617 +464552,cucumber,0,1616 +1530483,elite_ii_(arknights),0,1616 +8989,viera,0,1612 +540139,different_reflection,0,1612 +1088997,bear_hair_ornament,0,1612 +667586,single_pauldron,0,1612 +815986,fortissimo,0,1612 +1400382,barbell_piercing,0,1611 +544708,fish_girl,0,1611 +454517,unconscious,0,1610 +574422,claw_(weapon),0,1609 +379311,chandelier,0,1608 +377993,petting,0,1608 +450208,chocolate_bar,0,1607 +713479,2014,0,1607 +476495,dowsing_rod,0,1607 +469396,role_reversal,0,1605 +375381,hair_in_mouth,0,1602 +565136,projected_inset,0,1601 +664546,taut_dress,0,1599 +692799,choko_(cup),0,1598 +613128,embellished_costume,0,1596 +1379633,fff_threesome,0,1595 +457418,fighter_jet,0,1595 +442863,heckler_&_koch,0,1594 +393579,mechanical_pencil,0,1593 +514889,polo_shirt,0,1593 +508957,health_bar,0,1593 +684621,puffy_detached_sleeves,0,1593 +700119,cropped_vest,0,1592 +421489,ushanka,0,1591 +403478,suppressor,0,1590 +411198,leg_hold,0,1589 +1396796,green_neckerchief,0,1589 +1260991,pink_pants,0,1588 +669791,futa_with_futa,0,1588 +723472,stiletto_heels,0,1588 +1274842,two-handed,0,1587 +412925,cupless_bra,0,1587 +642312,handsfree_ejaculation,0,1586 +1598810,nontraditional_playboy_bunny,0,1586 +1403286,black_mask,0,1586 +412357,lounge_chair,0,1585 +664263,bikini_top_removed,0,1585 +430449,spine,0,1585 +442852,old_woman,0,1585 +1439570,tall_female,0,1585 +406711,strap-on,0,1584 +1342607,holding_lantern,0,1584 +512488,crotch_plate,0,1583 +679286,faux_figurine,0,1582 +12083,graveyard,0,1582 +668887,shallow_water,0,1580 +1362227,holding_stick,0,1580 +469368,striped_gloves,0,1579 +683983,poke_ball_print,0,1579 +1515352,aqua_theme,0,1578 +388093,hakurei_reimu_(cosplay),0,1578 +600210,perineum,0,1578 +452130,neon_lights,0,1578 +492105,groin_tendon,0,1577 +395436,reindeer_costume,0,1577 +721109,visible_air,0,1577 +592444,mars_symbol,0,1576 +399999,whipped_cream,0,1575 +452157,hagoita,0,1575 +759492,wolf_boy,0,1575 +538746,lipstick_tube,0,1574 +542357,red_cross,0,1573 +405026,large_insertion,0,1573 +410111,bowing,0,1570 +480324,staff_(music),0,1570 +495020,crazy_smile,0,1570 +397693,shikigami,0,1569 +703456,sorcerer's_sutra_scroll,0,1569 +1426454,holding_clipboard,0,1568 +1505130,german_text,0,1567 +391475,fireflies,0,1567 +29555,office,0,1567 +451262,notepad,0,1566 +1440623,jacket_partially_removed,0,1566 +1514129,sunburst_background,0,1565 +545758,wisteria,0,1563 +550778,checkerboard_cookie,0,1563 +1517305,presenting_armpit,0,1563 +582361,head_between_breasts,0,1562 +1374895,spiked_tail,0,1561 +660754,aqua_shirt,0,1560 +537783,bamboo_broom,0,1559 +1781164,gem_(symbol),0,1559 +1296314,single_gauntlet,0,1558 +15578,eagle,0,1557 +1240006,bishamonten's_pagoda,0,1554 +485203,video_game,0,1553 +585862,reach-around,0,1553 +617315,pantyhose_under_shorts,0,1552 +1438860,black_sash,0,1551 +699609,frilled_cuffs,0,1550 +16279,air_conditioner,0,1549 +657500,fish_hair_ornament,0,1548 +892083,green_apron,0,1545 +2812,ramune,0,1545 +1314693,grey_scarf,0,1545 +1505167,simplified_chinese_text,0,1544 +473599,fingers,0,1543 +9781,pacifier,0,1543 +585009,sword_over_shoulder,0,1542 +399875,beach_chair,0,1541 +1296346,stationary_restraints,0,1540 +501344,fallen_down,0,1539 +1274924,hand_on_another's_ass,0,1539 +671484,stuffed_shark,0,1539 +632854,naked_hoodie,0,1539 +1557024,mouth_veil,0,1539 +8775,mahjong,0,1538 +1464685,collared_cape,0,1537 +394989,hexagon,0,1537 +476602,kusazuri,0,1537 +590941,santa_boots,0,1536 +378038,pentacle,0,1535 +693272,tan_background,0,1535 +1373024,white-framed_eyewear,0,1534 +1775923,circled_9,0,1534 +467745,japanese_flag,0,1533 +453530,power_suit,0,1532 +1665195,reference_inset,0,1531 +1781330,nijigasaki_academy_school_uniform,0,1531 +607070,caressing_testicles,0,1529 +1247633,narrowed_eyes,0,1529 +1791857,painting_(action),0,1528 +633053,hooded_sweater,0,1527 +1835746,pink_one-piece_swimsuit,0,1526 +440361,lipstick_mark,0,1526 +650139,clothed_female_nude_female,0,1526 +1258091,ribbon-trimmed_collar,0,1526 +524796,age_progression,0,1525 +1558733,tulip_hat,0,1525 +3489,dinosaur,0,1523 +598269,kissing_forehead,0,1523 +1328687,earth_(ornament),0,1523 +420294,yellow_skin,0,1522 +1333530,grey_fur,0,1521 +714422,sports_car,0,1520 +664350,tail_censor,0,1520 +9587,love_letter,0,1520 +724196,thigh_pouch,0,1520 +1265860,pink_blood,0,1519 +482956,caution_tape,0,1518 +415621,blueberry,0,1518 +712801,pussy_piercing,0,1517 +472861,ovum,0,1517 +478228,title,0,1515 +503126,ribbon_in_mouth,0,1514 +374573,centaur,0,1514 +10562,truck,0,1513 +628220,claw_ring,0,1511 +541729,rotational_symmetry,0,1510 +1333955,side-tie_leotard,0,1510 +1298471,blood_from_eyes,0,1509 +1424925,holding_needle,0,1509 +1320208,grey_necktie,0,1509 +1452545,kibito_high_school_uniform,0,1509 +656250,arm_between_breasts,0,1506 +629486,used_tissue,0,1504 +1346802,spiked_shell,0,1503 +1678267,palette_(object),0,1502 +485953,puffy_pants,0,1502 +1446284,looking_at_animal,0,1502 +615984,bendy_straw,0,1502 +1560,mascot,0,1500 +1424211,sakugawa_school_uniform,0,1500 +16695,magnifying_glass,0,1499 +1482386,tiger_boy,0,1499 +1923,truth,0,1498 +528496,holding_shoes,0,1498 +415310,overgrown,0,1498 +712381,thigh_cutout,0,1498 +1298573,sanshoku_dango,0,1496 +416448,yagasuri,0,1495 +1672391,viewer_holding_leash,0,1495 +1450243,weibo_logo,0,1494 +635465,twitching,0,1493 +452581,vibrator_under_panties,0,1491 +414700,buttjob,0,1491 +607798,arm_wrap,0,1491 +1859633,a_certain_high_school_uniform,0,1490 +362945,garden,0,1488 +399302,drying,0,1487 +593466,flame_print,0,1487 +1179281,bear_girl,0,1487 +1323483,fangs_out,0,1486 +1396969,holding_innertube,0,1485 +453719,plant_girl,0,1484 +1390886,brown_kimono,0,1484 +180889,morning,0,1483 +16135,cotton_candy,0,1483 +376147,hanging,0,1482 +460511,hand_puppet,0,1481 +437614,boned_meat,0,1481 +714136,hand_on_another's_stomach,0,1481 +1234797,foot_up,0,1478 +397906,party_hat,0,1478 +709214,alternate_skin_color,0,1478 +520939,wiping_tears,0,1477 +655903,fake_facial_hair,0,1477 +1373018,striped_kimono,0,1477 +10125,halberd,0,1477 +461720,buzz_cut,0,1477 +1637102,doughnut_hair_bun,0,1476 +465754,spanking,0,1476 +544204,bowl_cut,0,1476 +705551,pixiv_id,0,1476 +475794,column,0,1475 +699449,raimon,0,1474 +8918,inflation,0,1473 +516011,playstation_portable,0,1472 +492764,heart_tail,0,1472 +14637,tight_shirt,0,1471 +394771,utility_belt,0,1471 +544443,penis_out,0,1470 +387935,spirit,0,1470 +421056,spiked_club,0,1468 +466411,invisible_penis,0,1467 +1568979,plaid_headwear,0,1467 +617246,leather_belt,0,1467 +1325039,oral_invitation,0,1467 +434522,gym_storeroom,0,1465 +1747766,lapels,0,1464 +625484,glitch,0,1464 +405444,helicopter,0,1463 +533100,sitting_on_object,0,1462 +1609998,after_ejaculation,0,1462 +1411201,kiyosumi_school_uniform,0,1462 +1305007,chest_belt,0,1461 +590857,okamisty,0,1461 +670994,chocolate_on_body,0,1460 +1240596,mole_on_neck,0,1460 +389933,harem_pants,0,1459 +376486,skyline,0,1459 +1495030,red-tinted_eyewear,0,1459 +628115,open_window,0,1458 +14860,chef,0,1458 +480571,mechanization,0,1458 +662800,fake_antlers,0,1458 +9590,jersey,0,1458 +637922,2013,0,1457 +656303,shirt_in_mouth,0,1456 +1441520,horns_through_headwear,0,1456 +398229,people,0,1455 +381954,untying,0,1454 +483333,pavement,0,1454 +392935,tree_stump,0,1453 +410544,handkerchief,0,1452 +1257314,idol_clothes,0,1452 +622601,window_shade,0,1451 +493565,timestamp,0,1451 +418781,pet_play,0,1450 +399061,camcorder,0,1449 +595751,ass_cutout,0,1449 +1605421,halloween_bucket,0,1449 +580394,dirndl,0,1447 +1227348,back-seamed_legwear,0,1445 +642015,expressive_hair,0,1445 +472201,baguette,0,1444 +439406,shiba_inu,0,1444 +1378645,constellation_print,0,1444 +535,molestation,0,1442 +1373313,excessive_pubic_hair,0,1442 +11754,neck,0,1442 +1317604,panty_straps,0,1442 +1476167,holding_handheld_game_console,0,1442 +556841,sitting_on_stairs,0,1441 +504308,redesign,0,1440 +1549885,crane_(machine),0,1439 +974780,stomach_cutout,0,1439 +1267456,holding_leaf,0,1438 +389048,donation_box,0,1437 +523847,omurice,0,1436 +426749,2012,0,1435 +1378370,black_buruma,0,1433 +470025,open_pants,0,1432 +1279772,yellow_wings,0,1432 +473475,orange_thighhighs,0,1431 +547320,excessive_cum,0,1431 +478325,2010,0,1431 +399246,stadium,0,1430 +658254,holding_helmet,0,1430 +1507608,h&k_hk416,0,1430 +1279653,holding_brush,0,1429 +451782,brushing_teeth,0,1427 +461401,treasure_chest,0,1427 +494781,makizushi,0,1427 +1441880,hand_on_eyewear,0,1427 +565996,goat_ears,0,1427 +414107,gamepad,0,1426 +1602480,index_fingers_together,0,1425 +405498,panda_ears,0,1424 +162717,triforce,0,1424 +431374,curtsey,0,1423 +479141,raised_fist,0,1422 +526532,oversized_shirt,0,1422 +1397563,fur-trimmed_skirt,0,1422 +629378,ruffling_hair,0,1422 +1321158,brown_necktie,0,1422 +1442004,eyewear_on_headwear,0,1421 +380988,goblin,0,1417 +1374287,ribbed_bodysuit,0,1417 +673730,palm_leaf,0,1416 +1518970,pectoral_grab,0,1416 +646260,pointless_condom,0,1414 +421385,kanji,0,1414 +381861,\o/,0,1411 +1414389,pink_sailor_collar,0,1409 +8234,radio,0,1408 +545201,micro_panties,0,1407 +587990,bodice,0,1407 +500820,stone_lantern,0,1406 +1422826,sangvis_ferri,0,1406 +384435,onmyouji,0,1405 +1285391,turtleneck_dress,0,1405 +1667891,pom_pom_hair_ornament,0,1404 +467946,coca-cola,0,1403 +12086,globe,0,1403 +1163924,blue_fur,0,1403 +539536,blue_headband,0,1401 +494162,monkey_ears,0,1399 +1478255,dangle_earrings,0,1397 +665766,carrying_under_arm,0,1396 +501337,39,0,1396 +594236,expressive_clothes,0,1394 +662045,lotion_bottle,0,1394 +534991,pillbox_hat,0,1394 +379633,fertilization,0,1393 +460870,spell_card,0,1392 +536429,cupping_hands,0,1392 +400656,red_hood,0,1392 +431361,baseball_mitt,0,1391 +1598686,male_playboy_bunny,0,1389 +1835740,purple_one-piece_swimsuit,0,1388 +3197,takoyaki,0,1387 +508969,cow_boy,0,1387 +380,priest,0,1386 +1413926,orange_sailor_collar,0,1386 +497917,thrusters,0,1386 +525748,dakimakura_(object),0,1384 +420884,countdown,0,1384 +1613542,electrokinesis,0,1384 +569173,spread_pussy_under_clothes,0,1384 +644441,fiery_hair,0,1383 +480430,bear_print,0,1381 +433011,polka_dot_swimsuit,0,1381 +1436311,u.a._school_uniform,0,1380 +617594,alternate_weapon,0,1379 +396034,propeller,0,1378 +497295,happi,0,1378 +586366,prosthetic_leg,0,1378 +592445,venus_symbol,0,1377 +547081,incoming_attack,0,1377 +610123,forehead-to-forehead,0,1376 +632009,blonde_pubic_hair,0,1376 +683919,ajirogasa,0,1376 +1448374,reflective_water,0,1375 +12254,tribal,0,1374 +450649,shin_guards,0,1374 +663848,naked_kimono,0,1374 +593015,finger_to_face,0,1373 +560134,u_u,0,1373 +11353,twincest,0,1370 +593552,plaid_pants,0,1370 +1415445,nanairogaoka_middle_school_uniform,0,1370 +1230150,leg_between_thighs,0,1369 +549142,yes-no_pillow,0,1369 +401422,screen,0,1369 +1324575,tapir_tail,0,1369 +16095,glomp,0,1368 +389416,impaled,0,1368 +488375,rainbow_gradient,0,1368 +578012,talking_on_phone,0,1367 +638293,novel_cover,0,1367 +1670355,furry_with_furry,0,1367 +646459,striped_shorts,0,1366 +1332978,short_jumpsuit,0,1366 +16601,overcoat,0,1365 +5323,kettle,0,1364 +1624910,sidepec,0,1364 +473249,print_bra,0,1363 +1409636,furrification,0,1363 +723185,stone_floor,0,1362 +389374,cat_teaser,0,1361 +395712,christmas_lights,0,1361 +384969,intravenous_drip,0,1360 +424974,no_testicles,0,1358 +481054,grey_bra,0,1358 +1454017,debt,0,1358 +991822,blue_eyeshadow,0,1356 +14704,nike,0,1355 +419304,crosswalk,0,1355 +659938,poker_chip,0,1355 +494704,2011,0,1355 +572582,masturbation_through_clothes,0,1354 +1344701,grey_border,0,1354 +649700,full-package_futanari,0,1353 +8411,sheet_music,0,1353 +683789,wooden_table,0,1351 +1288040,incoming_food,0,1351 +1249322,hauchiwa,0,1349 +1282898,cloth_gag,0,1349 +394997,sweatpants,0,1349 +1515355,black_theme,0,1348 +1455250,chest_strap,0,1348 +449943,stab,0,1347 +389647,clock_tower,0,1346 +1665347,full-body_tattoo,0,1345 +1249520,drawing_bow,0,1344 +8617,potion,0,1344 +680046,carrying_person,0,1344 +563716,italian_flag,0,1342 +374948,school_briefcase,0,1341 +1373027,brown-framed_eyewear,0,1341 +1268086,pink_bag,0,1341 +593696,leather_gloves,0,1340 +903375,flip_phone,0,1339 +635016,broken_chain,0,1339 +548895,frilled_socks,0,1338 +594785,blowing_kiss,0,1338 +405,soccer,0,1337 +468165,mixing_bowl,0,1337 +395851,log,0,1336 +1454154,black_pubic_hair,0,1336 +375527,kimono_skirt,0,1336 +562826,slapping,0,1334 +4577,snorkel,0,1334 +587202,shoelaces,0,1333 +9355,humiliation,0,1333 +2839,lube,0,1333 +1608327,mixed-language_text,0,1333 +626479,orange_slice,0,1333 +585203,veiny_breasts,0,1333 +1761435,onee-loli,0,1332 +500152,breathing_fire,0,1332 +559601,cheering,0,1331 +505317,lily_of_the_valley,0,1331 +1287296,diving_mask_on_head,0,1330 +473312,camellia,0,1329 +3408,origami,0,1329 +410102,yes,0,1328 +621930,mandarin_collar,0,1328 +1276269,ribbed_sleeves,0,1327 +1406765,jaguar_ears,0,1327 +408186,frottage,0,1326 +565797,tropical_drink,0,1326 +705858,partially_underwater_shot,0,1326 +624870,imminent_fellatio,0,1326 +1584543,pokedex_number,0,1326 +600401,wooden_fence,0,1325 +1317361,multicolored_bow,0,1325 +1396461,brown_hairband,0,1323 +1387512,orange_ascot,0,1323 +413721,latex_gloves,0,1322 +457946,stone_wall,0,1321 +1402473,blue_sash,0,1320 +1578780,diagonal-striped_necktie,0,1320 +496388,._.,0,1317 +1354736,green_flower,0,1316 +9396,april_fools,0,1316 +568512,bird_on_shoulder,0,1315 +1719547,sailor_moon_redraw_challenge_(meme),0,1314 +11756,cup_ramen,0,1313 +573617,food-themed_clothes,0,1313 +564677,playstation_controller,0,1313 +61654,tail_grab,0,1313 +486968,red_bandana,0,1313 +639779,mechanical_tail,0,1312 +393602,severed_head,0,1311 +58241,platform_heels,0,1311 +482510,plaid_panties,0,1309 +722706,jojo_pose,0,1309 +1793460,cooperative_paizuri,0,1308 +525108,hands_on_feet,0,1307 +8803,cleaning,0,1307 +843166,yellow_hoodie,0,1307 +609486,single_strap,0,1307 +539990,tail_between_legs,0,1306 +1365638,bandaid_on_hand,0,1306 +473470,wrist_guards,0,1305 +9628,scooter,0,1304 +1314748,pink_collar,0,1304 +1422416,twitter_logo,0,1304 +1780,cow,0,1303 +1409793,grey_kimono,0,1303 +546044,sleeveless_hoodie,0,1302 +698497,gloves_removed,0,1301 +563461,full-length_zipper,0,1301 +1262922,head_on_another's_shoulder,0,1301 +1383769,dot_mouth,0,1300 +395433,traditional_clothes,0,1300 +668383,lace-trimmed_sleeves,0,1300 +1259138,mole_on_ass,0,1300 +1410489,shower_(place),0,1299 +477993,bad_perspective,0,1298 +391175,stove,0,1298 +424623,unitard,0,1298 +825568,shortstack,0,1298 +392839,torn_panties,0,1297 +661926,ankle_strap,0,1296 +1574672,two-sided_cape,0,1295 +16550,stream,0,1294 +12727,phonograph,0,1293 +10399,x,0,1293 +15305,steampunk,0,1293 +488193,slap_mark,0,1293 +543247,eyebrow_piercing,0,1293 +416904,cat_on_head,0,1292 +612104,respirator,0,1292 +963327,yordle,0,1292 +592457,nippleless_clothes,0,1291 +389021,deer,0,1290 +388798,hatsune_miku_(cosplay),0,1289 +1606641,black_male_underwear,0,1288 +595244,hamaya,0,1286 +444567,tail_wrap,0,1286 +524686,hand_net,0,1285 +511641,single_pantsleg,0,1285 +1476395,pokemon_on_shoulder,0,1284 +380213,sponge,0,1283 +1504221,purple_tail,0,1283 +1318470,yellow_pupils,0,1282 +1601080,animal_ear_legwear,0,1282 +431872,balancing,0,1281 +629555,desk_lamp,0,1280 +1475076,blue_horns,0,1278 +686531,undone_necktie,0,1277 +477355,chainmail,0,1277 +695246,hakama_pants,0,1276 +1414657,purple_capelet,0,1275 +531186,gold_armor,0,1274 +473618,clitoris_piercing,0,1274 +433212,bookmark,0,1274 +15307,werewolf,0,1274 +374909,whispering,0,1272 +518423,black_leggings,0,1271 +637282,pants_rolled_up,0,1269 +690442,boots_removed,0,1269 +992674,emoji,0,1269 +695232,mismatched_pubic_hair,0,1268 +492677,bolo_tie,0,1267 +534114,cocktail_dress,0,1267 +1507443,sidelighting,0,1266 +410474,picnic_basket,0,1265 +1554055,fiery_horns,0,1265 +11623,bad_end,0,1263 +1238376,multicolored_shirt,0,1263 +8500,eggplant,0,1263 +1325030,new_school_swimsuit,0,1263 +399269,tablecloth,0,1262 +1400049,on_bench,0,1262 +685325,shiny_legwear,0,1261 +1586950,sobu_high_school_uniform,0,1261 +628271,turning_head,0,1260 +1392009,green_bodysuit,0,1260 +457562,fake_mustache,0,1259 +1565319,power_suit_(metroid),0,1258 +652288,alphes_(style),0,1258 +1713829,mithra_(ff11),0,1257 +382610,skateboard,0,1257 +1542756,turtleneck_leotard,0,1256 +1411229,orange_neckerchief,0,1256 +392251,fireplace,0,1256 +1394090,see-through_skirt,0,1256 +551591,deerstalker,0,1255 +415735,sideways,0,1254 +414232,cum_on_hands,0,1254 +726190,breast_conscious,0,1254 +613598,polka_dot_legwear,0,1254 +418106,dark_penis,0,1253 +4654,flustered,0,1253 +666658,grey_bikini,0,1252 +711344,hand_on_own_shoulder,0,1252 +1237065,sunflower_hair_ornament,0,1251 +1304010,pink_pupils,0,1250 +702474,ribbed_leotard,0,1250 +3334,kigurumi,0,1249 +798294,club_(shape),0,1249 +1429613,tomoeda_elementary_school_uniform,0,1249 +543408,real_world_location,0,1249 +1492501,shinda_sekai_sensen_uniform,0,1249 +4930,doctor,0,1248 +565279,german_flag,0,1248 +616223,no_gloves,0,1248 +1501189,stitched_face,0,1247 +652468,brown_bikini,0,1246 +674500,large_testicles,0,1246 +1762322,greyscale_with_colored_background,0,1246 +507478,aqua_panties,0,1245 +726943,pink_coat,0,1245 +383146,panties_on_head,0,1245 +1274419,spoken_character,0,1244 +519211,ryona,0,1244 +447287,high_kick,0,1243 +9348,wheelchair,0,1243 +1413210,blue_collar,0,1243 +1824446,lycoris_uniform,0,1243 +1349438,animal_bag,0,1242 +491661,hat_tip,0,1242 +496346,impossible_swimsuit,0,1241 +643516,pyrokinesis,0,1241 +632853,fake_wings,0,1240 +603398,lace-trimmed_skirt,0,1239 +1781886,stroking_own_chin,0,1239 +383835,runes,0,1239 +1492898,green_sleeves,0,1239 +443587,sunscreen,0,1238 +427020,stepped_on,0,1238 +575113,kimono_pull,0,1238 +651163,kourindou_tengu_costume,0,1238 +583212,fingering_through_clothes,0,1237 +492714,curry_rice,0,1237 +495137,tulip,0,1236 +330013,pie,0,1236 +488259,skull_mask,0,1236 +11216,soup,0,1235 +11603,paper_airplane,0,1235 +557999,wiping_face,0,1235 +1771225,lord_camelot_(fate),0,1235 +522405,kanabou,0,1234 +684234,perpendicular_paizuri,0,1234 +548540,puckered_anus,0,1234 +588173,sex_machine,0,1233 +420441,teamwork,0,1232 +439204,friends,0,1231 +629461,duffel_coat,0,1231 +176537,bartender,0,1231 +416557,ammunition_belt,0,1230 +179157,tent,0,1230 +510652,flashback,0,1230 +541435,cellphone_picture,0,1227 +465875,age_regression,0,1227 +600052,butterfly_on_hand,0,1227 +518510,dust_cloud,0,1227 +1372592,bath_yukata,0,1227 +1283900,single_fingerless_glove,0,1226 +491621,cheek_bulge,0,1225 +609657,animal_hug,0,1225 +1501551,sakuramon,0,1225 +330372,sausage,0,1224 +603173,molten_rock,0,1223 +3419,shinai,0,1222 +15057,nearly_naked_apron,0,1221 +468159,sparkler,0,1220 +1403859,3d_background,0,1220 +498369,naked_bandage,0,1220 +549321,cum_on_penis,0,1219 +1271959,short_sidetail,0,1218 +434471,angel_and_devil,0,1217 +475249,large_wings,0,1217 +710257,odd_one_out,0,1217 +688960,holding_pillow,0,1216 +1835593,ornate_ring,0,1216 +509739,burnt_clothes,0,1215 +452607,2009,0,1215 +1377225,bruise_on_face,0,1215 +1856853,asticassia_school_uniform,0,1215 +1369179,orange_fur,0,1214 +1461344,papakha,0,1214 +164246,teruterubouzu,0,1212 +408976,bat_ears,0,1212 +9574,sick,0,1211 +11998,open_robe,0,1211 +1410988,pravda_school_uniform,0,1211 +1585295,athletic_leotard,0,1209 +11273,harp,0,1208 +1330230,black_wristband,0,1208 +13916,tempura,0,1206 +577080,hand_on_lap,0,1206 +1388990,two-tone_legwear,0,1205 +995808,penis_size_difference,0,1205 +476199,striped_sweater,0,1205 +2660,lettuce,0,1204 +570296,giving_up_the_ghost,0,1204 +411779,ankh,0,1204 +451184,holographic_interface,0,1203 +547391,winged_footwear,0,1203 +468977,split_screen,0,1203 +660817,opening_door,0,1203 +537130,arm_blade,0,1202 +513421,acrylic_paint_(medium),0,1202 +822829,suspended_congress,0,1201 +374515,hawaiian_shirt,0,1201 +578544,leg_armor,0,1201 +1419859,fur-trimmed_cloak,0,1201 +1281997,asymmetrical_horns,0,1201 +473050,crate,0,1200 +405415,milking_machine,0,1199 +10246,wig,0,1199 +1361800,bow_earrings,0,1198 +518732,anilingus,0,1197 +735235,eye_of_horus,0,1197 +652086,gathers,0,1196 +271102,ladybug,0,1196 +5632,laser,0,1195 +628425,tiered_tray,0,1195 +10428,wading_pool,0,1194 +494464,uvula,0,1194 +700361,watermelon_bar,0,1194 +656386,hands_on_another's_cheeks,0,1194 +1392006,yellow_bodysuit,0,1193 +685266,sandals_removed,0,1192 +472922,inset,0,1191 +1627136,excalibur_morgan_(fate),0,1191 +389703,ema,0,1190 +1342945,behind_another,0,1190 +8260,ferris_wheel,0,1189 +636872,lizard_tail,0,1189 +251194,gym,0,1188 +15187,machine,0,1186 +5497,fountain,0,1186 +515008,cum_on_self,0,1186 +1292445,torn_scarf,0,1186 +421282,pasta,0,1184 +3493,voyeurism,0,1183 +891632,artificial_eye,0,1183 +671651,hair_ears,0,1183 +1349107,candy_hair_ornament,0,1183 +1090326,bird_mask,0,1182 +1355967,on_vehicle,0,1181 +437001,living_clothes,0,1181 +664061,grey_ribbon,0,1179 +426371,through_wall,0,1179 +1534682,aqua_headwear,0,1179 +542830,chest_of_drawers,0,1178 +658981,open_belt,0,1178 +629694,leopard_tail,0,1178 +1853184,kamiyama_high_school_uniform_(hyouka),0,1178 +6259,park,0,1177 +324075,ballerina,0,1177 +373823,ketchup,0,1175 +1251057,ginkgo_leaf,0,1175 +9706,snail,0,1174 +1360843,neckwear_grab,0,1174 +419773,iphone,0,1174 +12011,potato,0,1173 +456884,brown_panties,0,1172 +511422,newhalf,0,1172 +487203,overcast,0,1172 +1354455,year_of_the_rat,0,1172 +1561175,champion_uniform,0,1172 +456589,leather_boots,0,1171 +374749,heartbeat,0,1171 +541262,disgust,0,1170 +1514953,cropped_shoulders,0,1170 +1504868,eyebrow_cut,0,1170 +504744,load_bearing_vest,0,1169 +1230967,rook_(chess),0,1169 +433522,cheek_squash,0,1168 +648546,belt_boots,0,1168 +1457585,hooded_cardigan,0,1168 +582353,lace-trimmed_gloves,0,1167 +383854,native_american,0,1167 +653049,red_eyeliner,0,1166 +252960,tengu,0,1166 +1494963,orange-tinted_eyewear,0,1166 +379968,breast_expansion,0,1166 +419074,hitting,0,1165 +514538,hands_on_ass,0,1164 +708149,blue_armor,0,1164 +674293,gift_bag,0,1164 +1241321,striped_horns,0,1164 +442124,orange_panties,0,1163 +1394304,honeycomb_(pattern),0,1163 +662440,konohagakure_symbol,0,1163 +667190,plate_armor,0,1163 +1023384,white_serafuku,0,1161 +1506275,riding_pokemon,0,1161 +535323,art_brush,0,1160 +1605653,utensil_in_mouth,0,1160 +494932,hickey,0,1159 +626246,crystal_hair,0,1158 +574252,mismatched_sleeves,0,1157 +1384370,two-tone_hairband,0,1156 +660778,knees_apart_feet_together,0,1154 +412641,steering_wheel,0,1154 +383675,bus_stop,0,1153 +1235654,gradient_clothes,0,1153 +393169,torn_jeans,0,1153 +721796,kesa,0,1153 +16114,chalk,0,1152 +554447,dark_aura,0,1152 +1086828,bow_(music),0,1152 +530749,orange_pantyhose,0,1151 +404788,wrestling_ring,0,1151 +381519,vibrator_in_thighhighs,0,1150 +522500,dark_green_hair,0,1150 +408474,flashlight,0,1150 +456305,pink_pantyhose,0,1150 +494923,futa_on_male,0,1150 +703702,hooded_track_jacket,0,1150 +1399177,brown_cloak,0,1150 +1719544,they_had_lots_of_sex_afterwards_(meme),0,1150 +553041,flower_necklace,0,1149 +211997,battle_axe,0,1149 +1379986,alpaca_ears,0,1149 +501457,lalafell,0,1148 +1405177,purple_belt,0,1146 +1469769,grey_sleeves,0,1146 +3533,laundry,0,1145 +1845182,guiding_hand,0,1145 +483079,shards,0,1145 +1452744,collared_coat,0,1145 +1294981,digitigrade,0,1145 +1349162,holding_balloon,0,1144 +68505,bikesuit,0,1143 +1400652,torpedo_launcher,0,1143 +493245,theft,0,1142 +509961,battle_rifle,0,1142 +724856,low_neckline,0,1142 +11370,island,0,1142 +1465616,eyewear_strap,0,1142 +1682706,phoenix_crown,0,1142 +419288,cum_on_feet,0,1141 +469760,oven_mitts,0,1141 +641957,bishop_(chess),0,1141 +1589331,off-shoulder_bikini,0,1141 +478077,ready_to_draw,0,1140 +374636,unicorn,0,1140 +781851,user_interface,0,1139 +1509564,holding_game_controller,0,1139 +494838,soda_bottle,0,1139 +5525,chimney,0,1138 +1932,ipod,0,1138 +616555,uchikake,0,1138 +1479378,silver_trim,0,1138 +596634,gradient_legwear,0,1138 +659417,mechanical_ears,0,1136 +1440480,holding_water_gun,0,1136 +397626,guitar_case,0,1136 +1473511,petals_on_liquid,0,1136 +68500,ruler,0,1134 +687207,round_window,0,1134 +62962,buruma_pull,0,1133 +1810603,back_focus,0,1133 +2370,cactus,0,1132 +611089,implied_yuri,0,1131 +441185,ballet_slippers,0,1131 +424682,horse_penis,0,1131 +599606,fuuin_no_tsue,0,1129 +10326,archery,0,1129 +615286,pinching_sleeves,0,1129 +1306233,triangle_print,0,1129 +615369,blue_sclera,0,1129 +10023,toilet_use,0,1128 +1259665,bow_choker,0,1128 +601192,mechanical_hands,0,1128 +1476659,french_text,0,1127 +543093,motherly,0,1127 +533771,kitchen_knife,0,1127 +165653,shirt_grab,0,1127 +1314077,dice_hair_ornament,0,1126 +529177,ootachi,0,1126 +482051,drum_set,0,1125 +563204,dumbbell,0,1125 +526083,brown_socks,0,1124 +682286,title_parody,0,1124 +632801,blue_tongue,0,1124 +451182,grimoire,0,1123 +393428,vaulting_horse,0,1123 +1853211,single_hair_ring,0,1122 +623242,light_censor,0,1121 +1350077,mask_around_neck,0,1121 +4861,lighter,0,1120 +406035,legs_over_head,0,1120 +5884,champagne,0,1120 +482391,bad_food,0,1119 +432965,red_oni,0,1119 +484724,pouty_lips,0,1119 +688664,adjusting_legwear,0,1117 +1262324,shared_bathing,0,1117 +1339894,fishnet_top,0,1117 +1345473,ribbon-trimmed_dress,0,1117 +12413,coffin,0,1117 +1467809,3others,0,1116 +1334628,anchor_print,0,1116 +10938,reindeer,0,1115 +1820926,sleeveless_turtleneck_leotard,0,1114 +1277344,soap_censor,0,1113 +644571,flock,0,1113 +375275,climbing,0,1113 +522628,upshorts,0,1113 +396796,luggage,0,1113 +685558,cat_day,0,1113 +16355,sundae,0,1112 +607136,split_ponytail,0,1112 +471850,cum_in_clothes,0,1111 +422468,satin,0,1111 +642029,queen_(chess),0,1111 +516468,fake_cover,0,1110 +877384,hooded_robe,0,1110 +1764814,compass_rose_halo,0,1110 +705059,fish_print,0,1109 +1260944,heart-shaped_lock,0,1109 +398497,mouth_pull,0,1108 +473421,pink_socks,0,1108 +388012,legs_folded,0,1108 +472508,drum_(container),0,1108 +653485,torn_leotard,0,1108 +842878,liquid_hair,0,1108 +5563,comparison,0,1107 +1399082,heart_button,0,1107 +473988,cooler,0,1105 +707712,consensual_tentacles,0,1105 +413758,ammunition,0,1105 +1346569,ass_support,0,1105 +618005,pennant,0,1104 +1425103,body_freckles,0,1104 +652996,salaryman,0,1104 +10157,honey,0,1103 +723350,weight_conscious,0,1102 +657530,spoken_object,0,1101 +634911,photo_inset,0,1101 +1835748,striped_one-piece_swimsuit,0,1100 +724478,pink_wings,0,1100 +550954,drawing_sword,0,1100 +1262645,yellow_apron,0,1100 +557859,puppet_strings,0,1100 +668848,multiple_monochrome,0,1099 +421120,shell_bikini,0,1099 +519382,decepticon,0,1099 +463031,anal_hair,0,1098 +684567,looking_at_mirror,0,1098 +499809,butterfly_net,0,1097 +375377,boar,0,1097 +1290693,looking_at_breasts,0,1096 +416752,strap_lift,0,1096 +536575,cheek_press,0,1095 +1467811,6+others,0,1095 +1607725,stuffed_winged_unicorn,0,1095 +652002,no_lineart,0,1094 +1409106,head_on_pillow,0,1094 +399923,washing,0,1094 +497039,test_plugsuit,0,1094 +374998,abuse,0,1093 +1411111,keizoku_military_uniform,0,1093 +566734,tanzaku,0,1092 +458295,rising_sun_flag,0,1092 +572545,wall_of_text,0,1092 +1349919,brown_bowtie,0,1092 +1402459,blue_cloak,0,1092 +1231611,arrow_through_heart,0,1092 +1438305,holding_baseball_bat,0,1091 +547746,patterned,0,1091 +689483,plaid_jacket,0,1091 +414165,cum_on_pussy,0,1090 +575592,glowing_wings,0,1090 +403486,party_popper,0,1090 +661553,grey_belt,0,1089 +462342,after_rape,0,1089 +1247470,chocolate_on_breasts,0,1089 +665720,candy_wrapper,0,1089 +614276,bow_legwear,0,1088 +27649,phallic_symbol,0,1088 +1475838,blue_overalls,0,1088 +496608,master_sword,0,1088 +541640,button_eyes,0,1088 +643517,purple_fire,0,1088 +533033,covering_ass,0,1087 +482734,bicorne,0,1087 +1257558,round_image,0,1087 +575714,c:,0,1087 +388391,screwdriver,0,1086 +419287,swim_cap,0,1085 +436891,combat_boots,0,1085 +15731,clothes,0,1085 +476893,torn_swimsuit,0,1085 +507338,fishnet_gloves,0,1085 +317,bus,0,1084 +598531,dropping,0,1084 +499376,wrinkled_skin,0,1084 +483095,birthmark,0,1084 +1334561,loose_bowtie,0,1083 +1193565,jimiko,0,1082 +1102042,strawberry_hair_ornament,0,1081 +386413,clog_sandals,0,1081 +1411060,saunders_military_uniform,0,1080 +4051,duster,0,1079 +474653,cutting_board,0,1079 +411563,forked_tongue,0,1079 +1361077,mole_above_mouth,0,1078 +608993,uncommon_stimulation,0,1078 +1336227,red_bag,0,1078 +1499492,k/da_(league_of_legends),0,1078 +411498,tearing_clothes,0,1077 +2795,picnic,0,1077 +7702,hairjob,0,1076 +13242,hanetsuki,0,1076 +512928,white_tail,0,1076 +485392,denim_jacket,0,1076 +716813,sword_behind_back,0,1075 +1353739,borrowed_design,0,1075 +663596,aestus_estus,0,1075 +549266,grocery_bag,0,1074 +1258750,mechanical_eye,0,1074 +11435,spreader_bar,0,1073 +457698,comforting,0,1073 +8722,detective,0,1073 +634879,fume,0,1073 +1271601,bunny-shaped_pupils,0,1073 +701116,weighing_scale,0,1072 +417929,tennis_ball,0,1072 +1326209,yellow_sash,0,1072 +665044,holding_own_tail,0,1072 +1367101,holding_scissors,0,1072 +1708252,licking_another's_face,0,1071 +490582,onbashira,0,1071 +1881200,ashford_academy_school_uniform,0,1070 +1327857,red_apron,0,1070 +597826,santa_gloves,0,1070 +1483316,single_horizontal_stripe,0,1069 +479250,tricorne,0,1069 +699876,imminent_anal,0,1069 +473049,doily,0,1068 +1299138,back-print_panties,0,1068 +1750472,galaxy_expedition_team_survey_corps_uniform,0,1068 +555432,head_down,0,1067 +1335609,cross_print,0,1067 +1379921,camouflage_jacket,0,1067 +1391097,grey_neckerchief,0,1067 +1512007,pill_earrings,0,1067 +705129,holding_jacket,0,1065 +547495,morning_glory,0,1065 +1270497,pearl_bracelet,0,1064 +699336,sharp_toenails,0,1064 +1505137,thai_text,0,1064 +1255353,loungewear,0,1064 +502383,elezen,0,1064 +365028,stealth_sex,0,1063 +1579902,nata_(tool),0,1062 +547034,guided_breast_grab,0,1061 +540293,vampire_costume,0,1061 +602058,shoulder_strap,0,1060 +659959,scarf_over_mouth,0,1058 +489995,bulletproof_vest,0,1058 +1274530,angora_rabbit,0,1058 +374852,wakizashi,0,1057 +637714,holding_legs,0,1055 +1445390,aria_company_uniform,0,1055 +4760,campfire,0,1055 +390867,soda,0,1055 +378755,beans,0,1055 +1346040,pink_hakama,0,1055 +1352539,squidbeak_splatoon,0,1055 +204274,upshirt,0,1053 +1440630,pink_capelet,0,1053 +1283735,grey_nails,0,1052 +10639,yarn,0,1052 +378487,telescope,0,1050 +1248044,tooth_necklace,0,1050 +564526,vertical-striped_pantyhose,0,1050 +463235,training_bra,0,1049 +759865,accidental_exposure,0,1049 +49867,summer_festival,0,1049 +479155,necktie_grab,0,1049 +598441,yin_yang_orb,0,1049 +1421701,smokestack_hair_ornament,0,1049 +462605,snack,0,1048 +481999,motorcycle_helmet,0,1048 +612924,"don't_say_""lazy""",0,1048 +527131,canvas_(object),0,1047 +1510188,dildo_riding,0,1046 +490687,prehensile_tail,0,1046 +610305,hand_to_head,0,1046 +12725,headless,0,1046 +1697967,tassel_hair_ornament,0,1046 +1530487,off-shoulder_jacket,0,1046 +437409,hospital_bed,0,1045 +1265402,heart_balloon,0,1045 +646372,asa_no_ha_(pattern),0,1044 +1381675,holding_stylus,0,1044 +432413,lute_(instrument),0,1044 +682869,extra_mouth,0,1044 +507520,goat_girl,0,1044 +1399019,multicolored_fur,0,1044 +375033,bored,0,1043 +1583922,pointy_footwear,0,1043 +1408058,brown_leotard,0,1043 +1601,clapping,0,1041 +466838,walkie-talkie,0,1040 +550352,hand_on_forehead,0,1039 +989449,paw_print_background,0,1039 +1559228,2000s_(style),0,1038 +439288,basketball_uniform,0,1038 +1326377,black_corset,0,1038 +378166,star_of_david,0,1038 +1468127,survey_corps_(emblem),0,1038 +1557673,pendant_choker,0,1037 +591683,under_kotatsu,0,1037 +5254,thermometer,0,1035 +180296,wetsuit,0,1035 +581056,multiple_braids,0,1035 +375906,hot_dog,0,1035 +573761,trash_bag,0,1035 +1825415,shuujin_academy_school_uniform,0,1035 +10673,vore,0,1034 +374312,pilot,0,1034 +430316,messenger_bag,0,1034 +456535,spit_take,0,1033 +1450741,oohashi_high_school_uniform,0,1032 +658794,poke_ball_theme,0,1032 +515827,frog_print,0,1032 +1344962,egg_hair_ornament,0,1032 +510057,sand_sculpture,0,1031 +433234,erect_clitoris,0,1031 +470625,torn_gloves,0,1031 +558698,sig_sauer,0,1030 +712047,belly_grab,0,1030 +572583,radiation_symbol,0,1029 +542338,snake_hair,0,1029 +643402,shorts_under_dress,0,1028 +760104,hand_on_another's_waist,0,1027 +1768621,heart_o-ring,0,1027 +1401971,backless_leotard,0,1027 +1326818,fur-trimmed_kimono,0,1026 +1610148,poster_(medium),0,1025 +465863,cum_on_legs,0,1025 +428847,ugly_man,0,1024 +398109,ice_skates,0,1023 +578777,outstretched_leg,0,1022 +1575195,crocodilian_tail,0,1022 +1514232,breast_focus,0,1021 +1469130,multiple_straps,0,1021 +618406,yellow_pants,0,1020 +9213,diaper,0,1020 +643912,tail_piercing,0,1020 +1372846,pink_pajamas,0,1018 +269258,chat_log,0,1018 +615253,applying_makeup,0,1017 +9017,kaijuu,0,1017 +1556082,print_headwear,0,1017 +1258484,key_necklace,0,1017 +6160,cocktail,0,1016 +398803,cowboy_boots,0,1016 +1243775,colored_shadow,0,1016 +1330451,multicolored_cape,0,1016 +7877,nintendo_ds,0,1015 +1409572,yellow_leotard,0,1015 +600996,socks_removed,0,1014 +501095,berry,0,1014 +649302,hydrokinesis,0,1014 +572593,noh_mask,0,1014 +1090981,tantou,0,1013 +5231,tonfa,0,1013 +627286,covering_one_eye,0,1013 +671725,purple_eyeshadow,0,1013 +1716159,human_scabbard,0,1013 +407500,easel,0,1012 +1465373,triangle_earrings,0,1012 +1293722,bandaged_neck,0,1011 +394852,hair_flip,0,1010 +1295529,frilled_shorts,0,1010 +669112,broken_weapon,0,1010 +542449,furigana,0,1008 +698204,multiple_piercings,0,1008 +465806,voice_actor,0,1007 +482205,crayon,0,1007 +1386020,st._gloriana's_(emblem),0,1007 +714474,thighhighs_over_pantyhose,0,1006 +385968,player_2,0,1006 +604753,panzerkampfwagen_iv,0,1006 +533783,reins,0,1005 +504063,ninja_mask,0,1005 +1373454,instagram_username,0,1005 +4910,corn,0,1004 +389154,screw,0,1004 +425258,naked_overalls,0,1003 +438516,zabuton,0,1003 +6481,tools,0,1003 +713580,torn_jacket,0,1003 +1604448,duel_academy_uniform_(yu-gi-oh!_gx),0,1002 +1250999,red_apple,0,1002 +1327189,glowing_horns,0,1002 +1734694,musou_isshin_(genshin_impact),0,1002 +1242015,vanishing_point,0,1001 +714848,rectangular_mouth,0,1001 +552812,in_cup,0,1000 +508103,tentacles_under_clothes,0,1000 +1302681,orange_pants,0,1000 +406104,psychic,0,1000 +383197,toilet_paper,0,999 +434844,folding_chair,0,999 +222856,good_end,0,998 +317438,war,0,998 +1311812,green_hakama,0,998 +586561,penises_touching,0,997 +1505168,traditional_chinese_text,0,997 +375718,daruma_doll,0,997 +1502082,brown_sweater_vest,0,997 +539907,calico,0,996 +1324009,lactation_through_clothes,0,995 +5248,snowball,0,995 +458547,wood,0,995 +1881201,eden_academy_school_uniform,0,995 +727101,overall_shorts,0,994 +3114,thread,0,994 +452907,chewing,0,993 +542208,blank_stare,0,993 +1411474,bc_freedom_military_uniform,0,993 +682863,hand_on_another's_leg,0,991 +489621,corded_phone,0,991 +1516826,drinking_straw_in_mouth,0,991 +1341736,hanten_(clothes),0,989 +1423058,facing_to_the_side,0,989 +416845,toast_in_mouth,0,988 +642887,wiping_sweat,0,988 +601148,huge_bow,0,988 +1353525,from_outside,0,987 +665502,saiyan_armor,0,987 +1353702,hands_on_own_ass,0,987 +576788,drying_hair,0,986 +8664,hoop,0,986 +483367,pornography,0,986 +385619,facepalm,0,986 +620608,no_tail,0,986 +469426,tiger_stripes,0,985 +689166,king_(chess),0,984 +412915,pet_bowl,0,983 +1770613,purple_gemstone,0,983 +687072,dark_areolae,0,983 +8787,cd,0,982 +527949,troll_face,0,982 +1538197,square_4koma,0,982 +1238314,transparent_wings,0,981 +1582502,scar_on_stomach,0,981 +614593,white_snake,0,981 +633878,pursed_lips,0,980 +663918,holding_fishing_rod,0,980 +1360081,purple_scrunchie,0,980 +1193485,dudou,0,979 +1304063,yellow_bag,0,979 +1411064,anzio_military_uniform,0,979 +383664,pier,0,978 +668886,animal_on_lap,0,977 +666509,>o<,0,977 +2374,shopping,0,977 +1366557,ink_tank_(splatoon),0,977 +400507,sailor_senshi,0,976 +487760,under_tree,0,975 +1636694,sleeve_garter,0,975 +1415933,fur-trimmed_shorts,0,975 +1498023,adapted_turret,0,975 +1430264,coin_hair_ornament,0,975 +460344,ear_biting,0,974 +1492155,eyewear_hang,0,974 +565669,telstar,0,973 +430138,double_vertical_stripe,0,973 +662488,palms_together,0,973 +417908,white_tiger,0,973 +538324,manga_cover,0,972 +560409,streamers,0,972 +467246,dotted_line,0,972 +1397657,cat_ear_legwear,0,972 +1335718,vibrator_cord,0,971 +10497,rocket,0,971 +593433,rice_on_face,0,971 +543900,hat_over_one_eye,0,971 +43071,blind,0,971 +590886,bird_legs,0,970 +1476481,multicolored_horns,0,969 +1509772,industrial_piercing,0,969 +16929,barbed_wire,0,968 +1663181,alice_(alice_in_wonderland)_(cosplay),0,968 +406420,popcorn,0,968 +11381,frogtie,0,967 +11835,ballet,0,967 +589292,slashing,0,966 +1443767,team_rocket_uniform,0,966 +1339824,cropped_hoodie,0,966 +504393,looking_outside,0,965 +1609648,cum_on_pectorals,0,965 +1527638,bubble_tea_challenge,0,965 +1793017,griffin_&_kryuger_military_uniform,0,965 +608070,boxer_briefs,0,964 +1515353,grey_theme,0,963 +1638312,footwear_ribbon,0,963 +551459,kine,0,963 +463437,pencil_case,0,963 +677487,brown_wings,0,962 +1484368,white_bag,0,961 +475500,kissing_hand,0,961 +583243,telekinesis,0,961 +1237688,open_bodysuit,0,959 +74341,floating_island,0,959 +1595384,blank_censor,0,959 +1303272,print_jacket,0,959 +1295636,shark_costume,0,959 +456565,flying_kick,0,958 +1457953,sparse_pubic_hair,0,958 +1440492,green_capelet,0,958 +1347286,yellow_coat,0,958 +445618,wet_dress,0,958 +643256,tentacle_pit,0,958 +1324856,virgin_killer_outfit,0,958 +583869,character_censor,0,957 +393237,bandolier,0,957 +1396368,frilled_ascot,0,956 +4638,drugs,0,956 +669932,wrist_wrap,0,956 +1582504,scar_on_neck,0,956 +568887,single_boot,0,956 +569329,catholic,0,956 +478362,kepi,0,956 +1720557,swimsuit_cover-up,0,956 +593295,red_border,0,955 +494539,texture,0,955 +471805,pastel_colors,0,954 +548034,keystone,0,954 +514359,naked_scarf,0,953 +8173,bokken,0,952 +1478006,holding_vegetable,0,952 +1282931,raimon_soccer_uniform,0,952 +394999,spooning,0,951 +80452,unzipping,0,951 +665405,white_umbrella,0,951 +1629910,star_brooch,0,951 +500760,flower_ornament,0,951 +1373023,purple-framed_eyewear,0,950 +476104,saber_(weapon),0,950 +567635,portrait_(object),0,950 +565478,skirt_basket,0,949 +1070114,single_stripe,0,949 +447171,lowleg_pants,0,949 +1575185,polka_dot_headwear,0,949 +1342773,fleur_de_lapin_uniform,0,949 +1337534,boobplate,0,948 +464554,dandelion,0,948 +649117,multiple_swords,0,946 +1677790,blood_on_knife,0,946 +1845191,glaive_(polearm),0,946 +4640,hanbok,0,946 +1484444,yellow_butterfly,0,945 +497027,pointy_breasts,0,945 +54102,noose,0,943 +16756,aquarium,0,943 +1426213,multiple_riders,0,943 +381183,brick,0,943 +507308,voile,0,943 +13597,triple_penetration,0,942 +1298917,brown_apron,0,942 +1592986,rabbit_boy,0,942 +477804,rainbow_hair,0,942 +16833,sidewalk,0,941 +1681601,mash_kyrielight_(dangerous_beast)_(cosplay),0,941 +1506800,diamond_(gemstone),0,940 +1382643,flaming_weapon,0,940 +1271930,nanodesu_(phrase),0,940 +1406788,otter_ears,0,940 +3614,stain,0,939 +1350036,crystal_earrings,0,939 +1349615,red_fur,0,939 +1448822,brown_hoodie,0,939 +468645,child_drawing,0,938 +255403,cleaver,0,938 +382573,akanbe,0,937 +1243923,backpack_removed,0,937 +405411,team_9,0,937 +753278,analog_clock,0,936 +603414,space_helmet,0,936 +645622,sleeveless_coat,0,934 +460553,no_eyebrows,0,934 +507379,yellow_belt,0,934 +600871,ar-15,0,934 +468827,pushing,0,933 +519098,yarn_ball,0,933 +660791,fur_cape,0,933 +9535,icing,0,932 +12002,foam,0,932 +605380,vertical-striped_bikini,0,932 +538478,haniwa_(statue),0,932 +474917,peace_symbol,0,931 +396687,hourglass,0,931 +540308,baggy_clothes,0,931 +1201168,undressing_another,0,931 +568529,barefoot_sandals,0,931 +1309935,notched_ear,0,931 +12456,dvd_cover,0,930 +1386653,falchion_(fire_emblem),0,930 +8697,porch,0,929 +453294,houndstooth,0,929 +1407009,japari_bun,0,929 +413107,puffy_cheeks,0,928 +1265662,lace-trimmed_hairband,0,928 +393568,amulet,0,927 +1423578,brown_collar,0,926 +417078,bayonet,0,926 +632074,owl_ears,0,925 +548331,bamboo_steamer,0,924 +394157,papers,0,924 +545481,hand_on_leg,0,924 +584716,camouflage_pants,0,924 +1350589,bandaid_on_forehead,0,924 +1478379,dress_flower,0,924 +540095,bilingual,0,923 +4219,henshin,0,923 +1553320,two-tone_headwear,0,923 +487790,studded_bracelet,0,923 +1430735,black_garter_belt,0,922 +1363208,blue_bag,0,922 +712473,mummy_costume,0,921 +1426299,tokisadame_school_uniform,0,921 +1258824,print_shorts,0,921 +1493305,heel_up,0,920 +471824,searchlight,0,919 +1509089,between_pectorals,0,919 +1427498,holding_pizza,0,918 +673693,kouhaku_nawa,0,918 +1058783,aviator_sunglasses,0,918 +1375732,snap-fit_buckle,0,918 +693639,striped_hoodie,0,918 +1393881,green_bag,0,917 +191357,loose_shirt,0,917 +617955,polka_dot_skirt,0,917 +1312469,purple_hakama,0,916 +409179,smoking_gun,0,915 +657589,crotch_cutout,0,915 +1759071,cetacean_tail,0,915 +1301010,orange_sweater,0,914 +408039,crystal_ball,0,914 +415025,convenience_store,0,914 +390528,seaweed,0,914 +564002,guided_penetration,0,913 +1154025,ankle_wrap,0,913 +517244,anglerfish,0,913 +585400,inverted_cross,0,912 +15147,concert,0,912 +1289607,nursing_handjob,0,912 +1607304,linear_hatching,0,911 +377994,playing,0,911 +448692,saddle,0,911 +643356,dress_removed,0,911 +10014,washing_machine,0,910 +239,valkyrie,0,909 +1518587,striped_headwear,0,909 +710509,antique_firearm,0,908 +1365439,jaguar_print,0,908 +652982,visor_(armor),0,907 +9716,strawberry_panties,0,906 +438209,checkered_flag,0,906 +1378073,ears_visible_through_hair,0,905 +617340,strapless_swimsuit,0,905 +537085,objectification,0,905 +182494,audience,0,904 +1450864,head_chain,0,904 +663896,hand_on_another's_neck,0,903 +465140,breastfeeding,0,903 +1403674,pink_camisole,0,903 +670641,clothes_between_breasts,0,902 +664838,green_wings,0,902 +5119,pinwheel,0,902 +521714,cursive,0,902 +1274515,double_w,0,902 +663895,hand_on_own_neck,0,901 +474428,blowing,0,901 +692139,penguin_hood,0,901 +690466,monster_energy,0,901 +15469,coconut,0,900 +1334692,side_drill,0,900 +589458,_,0,900 +515225,sperm_cell,0,900 +498585,cute_&_girly_(idolmaster),0,899 +5302,elvaan,0,899 +8482,waiter,0,899 +374917,prison_clothes,0,899 +657147,fur_boots,0,899 +478497,sleep_mask,0,899 +1424580,oda_uri,0,899 +462190,public_use,0,898 +399273,adidas,0,898 +696698,gold_bikini,0,897 +593473,coke-bottle_glasses,0,896 +4047,pickaxe,0,894 +491766,painterly,0,894 +599986,cutting_hair,0,893 +457725,traffic_cone,0,893 +585968,heads-up_display,0,893 +609352,themed_object,0,892 +550434,side_slit_shorts,0,892 +1508825,pouring_onto_self,0,892 +504098,m1911,0,891 +562229,food_stand,0,891 +1258067,hands_on_own_stomach,0,891 +1494889,lion_boy,0,891 +2944,airship,0,890 +600288,tail_feathers,0,890 +502629,bullet_hole,0,889 +471358,bass_clef,0,889 +720344,round-bottom_flask,0,888 +10079,double_dildo,0,888 +577453,lace_gloves,0,888 +15251,undead,0,888 +427033,hologram,0,887 +1323466,brown_nails,0,887 +395164,napkin,0,886 +433870,broken_heart,0,886 +539119,ultra_ball,0,886 +2430,recorder,0,885 +564894,united_states,0,885 +1490333,yellow_sleeves,0,885 +401062,x3,0,885 +1456713,cross_choker,0,884 +607245,cropped_arms,0,883 +579303,tail_ring,0,883 +4281,hole,0,882 +1303240,polka_dot_scrunchie,0,882 +495021,rider_belt,0,882 +570043,pine_tree,0,881 +661864,pink_belt,0,880 +652399,araki_hirohiko_(style),0,880 +1393886,multicolored_kimono,0,879 +1414615,mole_on_stomach,0,879 +514585,plaid_bra,0,879 +581487,hishaku,0,879 +9656,crazy,0,878 +559663,unamused,0,878 +1451576,checkered_sash,0,878 +1437142,purple_hoodie,0,878 +699281,oversized_food,0,877 +500653,mahjong_tile,0,877 +1433794,holding_saucer,0,876 +585602,jeweled_branch_of_hourai,0,876 +1326378,black_umbrella,0,876 +406042,exhausted,0,876 +430683,sling,0,876 +15227,screentones,0,875 +1480168,white_sports_bra,0,875 +668837,ghost_costume,0,875 +395011,tube_dress,0,874 +463435,parka,0,874 +416210,dirty_feet,0,874 +497015,wringing_clothes,0,873 +1582498,scar_on_leg,0,873 +378541,gills,0,872 +468583,melon_bread,0,872 +430376,bear_costume,0,872 +381280,lighthouse,0,872 +716956,puff_and_slash_sleeves,0,872 +607934,tentacles_on_male,0,871 +1593160,unusually_open_eyes,0,871 +1328259,multiple_moles,0,870 +401136,kimono_lift,0,869 +1292517,glowing_butterfly,0,869 +388245,cum_in_nose,0,868 +473178,\n/,0,868 +405504,apron_lift,0,867 +672588,cardigan_vest,0,867 +663869,looking_through_legs,0,867 +681979,double_\m/,0,867 +460270,sparrow,0,865 +138127,art_nouveau,0,865 +1404921,pink_ascot,0,864 +10268,net,0,864 +564789,romper,0,864 +545590,easter_egg,0,864 +1881202,st._chronica_academy_school_uniform,0,864 +1826631,tracen_training_uniform,0,864 +607513,arms_around_waist,0,863 +645655,wall_clock,0,863 +376621,wa_lolita,0,863 +1336108,crime_prevention_buzzer,0,863 +635240,star_pasties,0,862 +418811,acoustic_guitar,0,861 +1786594,tokyo-3_middle_school_uniform,0,860 +637908,adapted_uniform,0,860 +389325,grave,0,859 +388208,orange_hoodie,0,859 +399680,arachne,0,859 +549347,green_pantyhose,0,859 +3912,p90,0,858 +1874891,object_through_head,0,858 +423080,hat_with_ears,0,857 +546085,brown_bra,0,856 +1300914,leash_pull,0,856 +1470700,black_undershirt,0,855 +1374880,bralines,0,855 +461078,squinting,0,854 +697415,storefront,0,854 +389926,panty_lift,0,853 +1292892,cracked_wall,0,853 +16792,golf_club,0,852 +799867,futasub,0,852 +1504031,white_butterfly,0,852 +428710,buster_sword,0,852 +1371471,anchor_necklace,0,852 +513996,lyrics,0,851 +531189,foliage,0,851 +405020,wheelbarrow,0,851 +1235655,gradient_dress,0,850 +1822470,wataboushi,0,849 +390026,chalice,0,849 +594313,shoulder_holster,0,849 +1384374,grey_hairband,0,849 +529750,glowing_hair,0,849 +1360271,grey_cape,0,849 +472653,stained_panties,0,849 +10056,grill,0,848 +413290,hand_under_shirt,0,848 +1411208,purple_neckerchief,0,848 +1270535,curtained_hair,0,848 +1613886,animal_ear_headwear,0,848 +164255,nudist,0,846 +1243627,penis_peek,0,846 +438748,breast_poke,0,845 +533029,dragging,0,845 +1715534,baton_(conducting),0,845 +468850,tall,0,844 +582333,ojou-sama_pose,0,844 +664894,aqua_gloves,0,844 +457370,masochism,0,844 +570496,struggling,0,844 +675461,rolling_suitcase,0,844 +561654,animal_skull,0,844 +16918,tutu,0,843 +1448672,tsab_ground_military_uniform,0,843 +499780,holding_breath,0,843 +477179,tire,0,842 +1317436,satin_panties,0,842 +1398131,hooded_bodysuit,0,842 +690260,vibrator_on_nipple,0,841 +482315,breast_padding,0,841 +1298677,armpit_cutout,0,841 +1372787,multi-strapped_panties,0,841 +469235,amplifier,0,840 +400231,kotoyoro,0,840 +1208724,string_bra,0,840 +1337502,heart_lock_(kantai_collection),0,840 +393062,skirt_flip,0,839 +626440,season_connection,0,839 +688469,spiked_choker,0,839 +381750,thermos,0,838 +8409,spaghetti,0,838 +1344738,snowflake_background,0,838 +600460,group_picture,0,837 +496226,multiple_legs,0,837 +8698,windowsill,0,837 +1229971,shoulder_cannon,0,837 +1411084,pravda_military_uniform,0,837 +1411077,chi-hatan_military_uniform,0,837 +439167,urethral_insertion,0,836 +46728,excited,0,836 +386926,polar_bear,0,836 +742744,bean_bag_chair,0,835 +555321,hand_gesture,0,835 +1468128,training_corps_(emblem),0,835 +1575514,yuigaoka_school_uniform,0,835 +4517,gymnastics,0,834 +594978,naked_tabard,0,834 +702958,holding_ribbon,0,834 +469710,energy_drink,0,834 +419383,wallet,0,833 +1835765,evangelion_(mecha),0,833 +609535,witch_(madoka_magica),0,833 +1642271,yurigaoka_girls_academy_school_uniform,0,833 +1396676,dressing_another,0,832 +557780,alternate_wings,0,831 +1376753,crescent_print,0,831 +680577,color_trace,0,831 +1316299,multicolored_scarf,0,831 +720999,aqua_jacket,0,831 +383546,stole,0,831 +1242619,fake_nails,0,830 +509088,penis_in_panties,0,829 +646377,kikumon,0,829 +1375970,hair_flowing_over,0,829 +664951,coat_removed,0,829 +571951,flaming_sword,0,828 +1355541,lattice,0,828 +488521,canopy_bed,0,828 +1328348,group_name,0,828 +614256,bunny_hat,0,827 +411205,pigeon,0,827 +1441859,aqua_footwear,0,826 +505302,bar_stool,0,826 +679205,catchphrase,0,826 +1545323,multicolored_headwear,0,826 +1452498,grey_capelet,0,826 +1518971,pectoral_press,0,826 +550074,in_water,0,825 +1345310,collared_vest,0,825 +1407027,jaguar_tail,0,825 +397136,trigram,0,824 +228097,astronaut,0,824 +1377986,riyo_(lyomsnpmp)_(style),0,824 +1475691,hanasakigawa_school_uniform,0,824 +653176,circle_skirt,0,823 +1392086,grey_bodysuit,0,823 +1247424,brick_floor,0,823 +1529528,yellow_raincoat,0,823 +403070,grenade_launcher,0,823 +409820,cat_costume,0,822 +620885,latex_legwear,0,822 +440354,yo-yo,0,822 +440482,leg_wrap,0,822 +556519,electrical_outlet,0,822 +562990,bath_stool,0,821 +1358423,multicolored_footwear,0,821 +400267,dumpling,0,821 +1281944,anchor_choker,0,821 +511246,blank_speech_bubble,0,820 +1397788,brown_choker,0,820 +472542,beam_saber,0,820 +487949,arm_above_head,0,819 +455483,shorts_around_one_leg,0,819 +394402,wheat,0,818 +15450,onion,0,818 +586471,rounded_corners,0,818 +1605157,colored_nipples,0,818 +476906,spray_can,0,817 +1501670,double_fox_shadow_puppet,0,817 +533099,sitting_on_rock,0,816 +507276,paper_crane,0,816 +628065,eyepatch_removed,0,816 +1324692,pink_fur,0,816 +1232450,sparkle_print,0,816 +1417853,heart_collar,0,816 +1734,karaoke,0,815 +5364,ganguro,0,815 +1409664,floating_scarf,0,815 +1406324,no_blindfold,0,815 +544688,bunching_hair,0,814 +11414,nipple_clamps,0,814 +678345,maneki-neko,0,814 +574527,surcoat,0,814 +473342,imperial_japanese_army,0,814 +1549436,cutout_above_navel,0,814 +1281282,taimanin_suit,0,814 +1327704,nejiri_hachimaki,0,813 +1371202,yellow_cape,0,812 +622973,chicken_(food),0,812 +261984,snow_bunny,0,812 +1092127,condom_belt,0,812 +1281553,german_flag_bikini,0,811 +121795,inline_skates,0,810 +488001,tape_measure,0,810 +10727,bib,0,809 +504316,hands_on_hilt,0,809 +330159,green_tea,0,809 +1318429,pendant_watch,0,809 +15619,sand_castle,0,809 +839892,no_mole,0,809 +14739,hammock,0,808 +393203,handstand,0,808 +1287846,ammunition_pouch,0,808 +1453635,cartoon_bone,0,808 +1161750,boy_sandwich,0,807 +1251054,okobo,0,807 +484561,arm_cuffs,0,806 +488430,seat,0,805 +535613,upturned_eyes,0,805 +1827382,st._theresa's_girls_academy_school_uniform,0,805 +721903,fur_cloak,0,805 +642323,teacher_and_student,0,804 +704182,character_signature,0,804 +713277,no_eyepatch,0,804 +682665,super_soaker,0,804 +1474354,holding_ladle,0,803 +1483518,orange_sleeves,0,803 +413459,inflatable_raft,0,803 +462239,chinese_new_year,0,803 +483045,kinchaku,0,802 +519941,finger_biting,0,802 +399166,cursor,0,802 +684797,hands_on_another's_hips,0,802 +1617036,pokemon_move,0,802 +2477,lightsaber,0,801 +498775,orange_skin,0,801 +9166,windmill,0,801 +1390981,ribbon_braid,0,801 +1296117,sideways_hat,0,801 +1113314,thick_lips,0,800 +553675,ehoumaki,0,800 +1152233,star_tattoo,0,800 +1467949,single_ear_cover,0,800 +1835737,green_one-piece_swimsuit,0,799 +531424,multiple_heads,0,799 +695119,blood_in_hair,0,798 +593299,pink_border,0,798 +1319348,holding_bucket,0,797 +1257023,dog_penis,0,797 +509238,dialogue_box,0,797 +603240,vertical-striped_panties,0,797 +622330,oversized_limbs,0,797 +548294,nib_pen_(object),0,796 +596588,jacket_pull,0,796 +558489,white_armor,0,796 +394916,2008,0,796 +613430,bagged_fish,0,795 +662803,honeycomb_background,0,795 +433742,frozen,0,795 +475494,dissolving,0,795 +1505371,star_guardian_(league_of_legends),0,795 +714122,ball_and_chain_restraint,0,794 +1322883,flag_background,0,794 +659790,eldritch_abomination,0,794 +1648109,hololive_idol_uniform,0,794 +659364,licking_armpit,0,793 +1272545,orange_leotard,0,793 +1320993,print_necktie,0,793 +7947,beltskirt,0,792 +475951,ornament,0,792 +591348,breast_cutout,0,792 +614915,pink_cape,0,792 +1713082,neck_tassel,0,792 +549921,swinging,0,791 +822043,mundane_utility,0,791 +705927,frilled_leotard,0,790 +1569456,pizza_slice,0,790 +1299125,elbows_on_table,0,789 +7700,weightlifting,0,789 +410619,toe_ring,0,788 +701996,nipple_bar,0,788 +1728479,kousaka_kirino's_school_uniform,0,788 +1393397,orange_goggles,0,788 +6352,elephant,0,787 +1293918,bicycle_basket,0,787 +3499,kappa,0,787 +5946,boxcutter,0,787 +592592,personality_switch,0,787 +547300,misunderstanding,0,787 +1544416,clothes_between_thighs,0,787 +721466,pointy_nose,0,787 +1648167,x-shaped_pupils,0,787 +452532,two-handed_handjob,0,786 +392867,melon,0,786 +702627,ass-to-ass,0,786 +548516,pinstripe_shirt,0,786 +741418,card_parody,0,786 +504065,cephalopod_eyes,0,785 +1391806,two-tone_bow,0,785 +1390068,joy-con,0,785 +5466,mummy,0,784 +398573,cartridge,0,783 +619281,blue_border,0,783 +383898,bullying,0,783 +472408,sneezing,0,783 +4899,stats,0,783 +1365868,holding_envelope,0,783 +496190,kabuto_(helmet),0,783 +1396770,animal_on_arm,0,782 +5547,pet,0,782 +1522400,tented_shirt,0,782 +582033,clothes_tug,0,782 +411403,tongs,0,782 +1488405,mole_on_cheek,0,781 +444001,legband,0,781 +379013,note,0,781 +1409157,flower_tattoo,0,781 +553040,flower_bracelet,0,780 +1577478,super_saiyan_1,0,780 +1398925,speaking_tube_headset,0,780 +468311,tail_hug,0,779 +1495160,mismatched_eyebrows,0,779 +1860041,multiple_drawing_challenge,0,779 +1481466,purple_horns,0,779 +1426179,luna_nova_school_uniform,0,779 +573016,hands_on_shoulders,0,778 +1432417,brown_tail,0,778 +419647,stakes_of_purgatory,0,778 +492703,long_neck,0,777 +723519,chin_strap,0,777 +393452,leaf_umbrella,0,777 +715181,post-apocalypse,0,777 +550385,linked_piercing,0,776 +15836,butter,0,776 +379180,zora,0,776 +1376893,pink_eyeshadow,0,776 +499526,green_socks,0,775 +2730,massage,0,775 +1464853,blue-tinted_eyewear,0,775 +658977,wooden_chair,0,774 +397923,group_hug,0,774 +657414,panties_under_buruma,0,774 +382030,beetle,0,774 +529455,tight_dress,0,774 +1347574,mole_on_body,0,774 +284608,tea_set,0,773 +453888,hand_mirror,0,773 +416380,gokkun,0,773 +399607,hair_dryer,0,773 +643401,dragon_boy,0,773 +463449,sugar_cube,0,772 +501887,levitation,0,772 +467444,kerchief,0,772 +1427360,gold_choker,0,772 +646311,polka_dot_shirt,0,771 +446404,cheek_pull,0,771 +1283338,american_flag_print,0,771 +1411059,saunders_school_uniform,0,771 +501282,lipgloss,0,770 +1352247,variable_fighter,0,770 +375406,clown,0,770 +507447,family_crest,0,770 +416825,omikuji,0,769 +524801,hakurei_shrine,0,769 +641133,aqua_bra,0,769 +214260,keep_out,0,769 +1378265,z-ring,0,769 +435855,messy_room,0,768 +1339258,logo_parody,0,768 +1252372,sitting_on_bench,0,768 +1441858,lap_pillow_invitation,0,768 +474442,plume,0,768 +1136370,sextuplets,0,768 +1303976,print_sarong,0,767 +534139,popped_button,0,767 +1433477,floating_cape,0,767 +1245350,implied_fellatio,0,767 +168769,band_uniform,0,766 +712902,pumpkin_hair_ornament,0,765 +7839,beam,0,765 +375496,kagami_mochi,0,765 +716637,argyle_sweater,0,765 +380200,2007,0,765 +1571287,berry_(pokemon),0,765 +7461,lizard,0,764 +587390,hand_on_headphones,0,764 +1680143,pegasus_knight_uniform_(fire_emblem),0,764 +447159,fishing_line,0,764 +605773,head_on_hand,0,763 +317012,zeon,0,763 +646375,sayagata,0,763 +635045,solid_eyes,0,762 +1340897,blue_feathers,0,762 +1373241,meka_(overwatch),0,762 +1594642,holomyth,0,762 +6527,cart,0,761 +449699,grand_piano,0,761 +460701,pagoda,0,761 +15902,jetpack,0,761 +1008938,clothes_theft,0,760 +1315295,holding_another's_hair,0,760 +683907,jingasa,0,760 +2145,juice,0,759 +691041,yellow_sky,0,759 +499998,sitting_backwards,0,758 +1336500,pointing_at_another,0,758 +606330,condom_box,0,758 +1552258,fish_boy,0,758 +960973,cardigan_around_waist,0,758 +623986,green_sclera,0,758 +5099,pegasus,0,757 +746897,pussy_juice_drip_through_clothes,0,757 +1289376,bishamonten's_spear,0,757 +1539213,flower_over_eye,0,756 +511500,head_on_chest,0,756 +1421046,holding_cane,0,756 +509654,forced_orgasm,0,755 +1548224,butterfly_brooch,0,755 +382343,adjusting_panties,0,754 +457036,steak,0,754 +1412249,green_scrunchie,0,754 +5817,sauna,0,753 +443238,wardrobe_error,0,752 +3438,japan,0,751 +5402,crowbar,0,751 +1510994,sweaty_clothes,0,751 +1398336,year_of_the_dog,0,751 +527344,body_armor,0,751 +701994,pov_across_table,0,750 +14011,height_chart,0,749 +460435,ivy,0,749 +421692,game_boy,0,749 +426382,bear_tail,0,749 +778226,greek_clothes,0,749 +1039471,prostration,0,748 +1255173,shoujo_kitou-chuu,0,748 +1386173,pixiv_username,0,748 +1366556,splattershot_(splatoon),0,748 +9745,hungry,0,747 +385480,buruma_aside,0,747 +399404,skull_necklace,0,747 +467744,mount_fuji,0,747 +1611404,food-themed_earrings,0,747 +452085,hospital_gown,0,746 +658126,cream_on_face,0,746 +396015,gunblade,0,746 +646003,curled_fingers,0,746 +484040,winding_key,0,745 +9347,puppy,0,745 +1059619,kissing_penis,0,745 +375353,soviet,0,745 +475528,ankle_grab,0,744 +8921,white_day,0,744 +5102,wizard,0,744 +397561,radish,0,744 +471296,infinity,0,744 +693309,sleeve_grab,0,743 +531095,book_hug,0,742 +1375741,extra_faces,0,742 +3742,ferret,0,741 +436459,acorn,0,741 +583137,ear_protection,0,741 +1239921,pink_sky,0,740 +1353065,falling_feathers,0,740 +1496708,hitodama_print,0,740 +1257082,clock_eyes,0,740 +729551,kiwi_(fruit),0,739 +1193090,military_helmet,0,739 +1343008,orange_vest,0,738 +395496,cloth,0,738 +384700,dock,0,738 +541741,strapless_bottom,0,738 +563180,nintendo_3ds,0,738 +386212,diving,0,737 +630151,bikini_shorts,0,737 +1429582,dress_swimsuit,0,737 +9476,minotaur,0,736 +381360,wrong_feet,0,736 +1469257,single_epaulette,0,736 +563949,french_flag,0,735 +1424344,sling_bikini_top,0,735 +458787,testicle_grab,0,734 +450248,sickle,0,733 +429978,between_toes,0,733 +120759,mat,0,733 +474597,mitre,0,733 +1657712,veiny_arms,0,733 +576346,adjusting_necktie,0,732 +1305509,box_of_chocolates,0,732 +596273,behind_back,0,732 +1489407,martial_arts_belt,0,732 +1595590,pectoral_focus,0,732 +393640,aurora,0,731 +554570,jockstrap,0,731 +478042,track_uniform,0,731 +11347,army,0,730 +404973,galaxy,0,730 +1520977,print_mug,0,730 +487783,leather_pants,0,729 +375625,model_kit,0,729 +1344687,holding_letter,0,729 +1344492,on_motorcycle,0,729 +1001710,shell_necklace,0,729 +1303975,blue_sarong,0,728 +489578,whiskey,0,728 +467700,salad,0,728 +548098,mast,0,728 +485599,silver_dress,0,728 +1377852,hair_on_horn,0,728 +642272,above_clouds,0,727 +1427943,yellow_tank_top,0,727 +12070,battleship,0,727 +611065,lace_choker,0,727 +445428,guard_rail,0,726 +564925,polka_dot_ribbon,0,726 +1286244,rei_no_pool,0,726 +490067,holding_head,0,726 +630542,butterfly_sitting,0,725 +1379634,mmm_threesome,0,725 +1392344,white_sarong,0,724 +524545,box_art,0,724 +630404,pumpkin_hat,0,724 +287045,hanami,0,724 +1262145,hands_on_ground,0,724 +1562798,rhodes_island_logo,0,724 +495149,throwing_knife,0,723 +1404665,aqua_neckerchief,0,723 +411775,clothesline,0,723 +483853,bulletin_board,0,722 +665892,cross-laced_legwear,0,722 +1411110,keizoku_school_uniform,0,722 +441997,track_and_field,0,721 +877388,long_tail,0,721 +621690,no_mask,0,721 +678477,shared_speech_bubble,0,720 +434547,oonusa,0,720 +1601728,gladiator_sandals,0,720 +1325214,seal_(animal),0,719 +1660195,long_earlobes,0,719 +474347,affectionate,0,718 +1495060,pants_tucked_in,0,718 +1386603,orange_cape,0,717 +401696,hook,0,717 +552418,detached_hair,0,717 +1282902,solid_circle_pupils,0,716 +571097,crane_(animal),0,716 +486022,vacuum_cleaner,0,716 +409759,cleave_gag,0,716 +1657724,just_the_tip,0,716 +1597931,pancake_stack,0,716 +1419059,blue_tank_top,0,716 +82088,sewing,0,715 +449709,uneven_twintails,0,715 +1070070,fishnet_bodysuit,0,715 +1481865,brown_corset,0,715 +420279,sock_pull,0,714 +648669,lowleg_skirt,0,714 +12500,pendulum,0,714 +1441951,gold_footwear,0,714 +490118,pool_of_blood,0,713 +468109,tengu_mask,0,713 +523961,hair_between_breasts,0,713 +562139,glove_biting,0,713 +1419480,maid_day,0,713 +1260902,folded_hair,0,712 +1494953,pink-tinted_eyewear,0,712 +4410,dragonfly,0,711 +487992,inkwell,0,711 +1326625,implied_yaoi,0,711 +1273445,miracle_mallet,0,711 +547020,fidgeting,0,710 +707514,stone_stairs,0,710 +1423439,livestream,0,710 +1417085,double_horizontal_stripe,0,709 +109159,nyan,0,708 +119610,triplets,0,708 +561371,qr_code,0,708 +1329132,cherry_hair_ornament,0,708 +500191,rabbit_costume,0,707 +1247324,implied_fingering,0,707 +1466798,holding_money,0,706 +1324059,holding_jewelry,0,706 +663412,holding_own_foot,0,706 +1338731,holding_skull,0,706 +533238,milky_way,0,706 +1450600,very_wide_shot,0,706 +607013,stitched_mouth,0,706 +809507,eye_black,0,706 +675626,in_palm,0,705 +537098,watching_television,0,705 +376977,parrot,0,705 +479840,severed_limb,0,705 +1416325,multicolored_hairband,0,705 +576440,naked_cloak,0,704 +638337,multicolored_stripes,0,704 +572327,splatter,0,704 +467754,foot_hold,0,703 +876691,cum_in_container,0,703 +54531,blood_bag,0,703 +532082,knight_(chess),0,703 +1398335,year_of_the_pig,0,703 +512129,symbolism,0,702 +1386018,ooarai_(emblem),0,702 +1627128,caliburn_(fate),0,700 +1076064,covering_one_breast,0,700 +384589,lyre,0,700 +1247797,bike_shorts_under_shorts,0,700 +1532157,green_eyeshadow,0,700 +1515529,shindan_maker,0,700 +465272,kyuudou,0,699 +378206,joystick,0,699 +615409,bag_of_chips,0,699 +1426900,white_bird,0,699 +1375854,dream_soul,0,699 +393506,shrimp_tempura,0,698 +4374,meta,0,698 +1505135,italian_text,0,698 +971148,underbutt,0,698 +605322,checkered_shirt,0,698 +1353857,blood_on_arm,0,698 +1258093,ribbon-trimmed_headwear,0,697 +694148,candlelight,0,697 +1531641,light_blue_background,0,697 +580277,green_tail,0,696 +470859,purple_socks,0,696 +632045,no_jacket,0,696 +476447,kurokote,0,695 +613159,homu,0,695 +1470859,black_garter_straps,0,694 +502186,broken_window,0,694 +478554,contrast,0,694 +1258486,musical_note_print,0,694 +381761,cola,0,694 +1574670,two-sided_dress,0,694 +649072,grimoire_of_alice,0,693 +394720,afterglow,0,693 +407128,crumbs,0,693 +477060,hair_lift,0,693 +1386162,facebook_username,0,693 +411331,long_toenails,0,692 +525804,self_hug,0,692 +552753,perfume_bottle,0,692 +1401831,purple_ascot,0,691 +643172,shotgun_shell,0,691 +502652,ink_(medium),0,690 +624228,sky_print,0,690 +535487,hand_over_eye,0,690 +1631697,alternate_pectoral_size,0,689 +1263595,soap_bottle,0,688 +505038,duel_disk,0,688 +434881,baby_bottle,0,688 +1238293,grey_wings,0,687 +1749113,arthropod_limbs,0,687 +1470190,frilled_sailor_collar,0,687 +427948,tying,0,686 +397500,electric_plug,0,686 +389771,syrup,0,686 +391741,fingersmile,0,686 +2685,kickboard,0,685 +1252120,roundel,0,685 +1578028,heart_on_chest,0,685 +1370485,crescent_rose,0,685 +10240,hardhat,0,684 +433080,koi,0,684 +664262,bikini_bottom_removed,0,684 +662225,floating_book,0,683 +3513,goat,0,683 +1417605,blue_shawl,0,683 +4854,cast,0,682 +437278,masu,0,682 +168887,age_comparison,0,682 +572094,h&k_ump,0,681 +14528,bathrobe,0,680 +518347,yamakasa,0,680 +1494956,purple-tinted_eyewear,0,680 +1318748,floating_weapon,0,680 +492292,behind-the-head_headphones,0,679 +513429,watercolor_pencil_(medium),0,679 +725442,blue_bandana,0,679 +1345296,skeleton_print,0,679 +1332216,propeller_hair_ornament,0,679 +1370713,pink_umbrella,0,678 +417910,duckling,0,678 +433244,first_aid_kit,0,677 +446984,gun_to_head,0,677 +504867,digital_dissolve,0,677 +1253889,joestar_birthmark,0,677 +719987,ghost_pose,0,677 +1289258,bokura_wa_ima_no_naka_de,0,676 +389367,pineapple,0,675 +1349482,igote,0,675 +618480,sitting_on_shoulder,0,675 +1230297,single_wrist_cuff,0,675 +1513506,patchwork_clothes,0,675 +1353604,walking_on_liquid,0,674 +529805,shamoji,0,674 +547795,breaking,0,674 +410219,scylla,0,674 +720294,weasel_ears,0,674 +974485,hoodie_lift,0,674 +516577,groom,0,673 +1867868,motosu_school_uniform,0,673 +374958,cyrillic,0,672 +622480,food_art,0,671 +451855,studded_collar,0,671 +1556083,camouflage_headwear,0,671 +510646,blue_pupils,0,670 +489491,platform_boots,0,670 +14645,marshmallow,0,670 +403,chikan,0,669 +1344657,cat_bag,0,669 +473987,tooth,0,669 +1253133,spade_hair_ornament,0,669 +403466,medallion,0,668 +1240072,hair_color_connection,0,668 +381685,pig_ears,0,668 +565798,crazy_straw,0,668 +1574664,two-sided_skirt,0,668 +616835,calligraphy_brush_(medium),0,668 +1455458,brown_flower,0,668 +191640,ak-47,0,667 +378288,blob,0,667 +542882,against_railing,0,667 +1268234,alternate_hair_ornament,0,667 +1274150,print_sleeves,0,667 +449449,beretta_92,0,666 +556112,red_tail,0,666 +571640,jacket_over_swimsuit,0,666 +381749,tamagoyaki,0,666 +657138,cat_mask,0,666 +646318,necktie_removed,0,666 +413830,wakamezake,0,665 +537572,teeth_hold,0,665 +496893,checkered_dress,0,665 +1429073,holding_beachball,0,664 +1433301,two-tone_leotard,0,664 +1331283,bridal_legwear,0,664 +1494960,yellow-tinted_eyewear,0,664 +510173,easter,0,664 +690728,missile_pod,0,663 +396000,bowler_hat,0,663 +561682,clownfish,0,663 +412041,biwa_lute,0,663 +4947,groceries,0,662 +1243501,gibson_les_paul,0,662 +1322934,gesugao,0,662 +1578455,cumulonimbus_cloud,0,662 +589932,negative_space,0,661 +423622,pelt,0,661 +1609367,tail_around_leg,0,661 +1391188,hand_tattoo,0,661 +1684976,craft_essence_(fate),0,661 +590202,pinstripe_suit,0,660 +1453787,golden_arms,0,660 +1799877,tracen_swimsuit,0,660 +1401249,grey_leotard,0,659 +727812,loaded_interior,0,659 +3927,gao,0,659 +1252918,sitting_on_table,0,659 +1317359,floating_clothes,0,659 +673705,hand_rest,0,659 +1440820,usekh_collar,0,659 +1328313,shell_hair_ornament,0,658 +1325319,wrist_bow,0,658 +1402775,white_mask,0,658 +1391449,cropped_sweater,0,658 +551086,protecting,0,657 +1333942,exposed_pocket,0,657 +609075,red_mask,0,656 +1231690,slim_legs,0,655 +1425321,hogwarts_school_uniform,0,655 +457678,animal_slippers,0,655 +1249968,little_red_riding_hood_(grimm)_(cosplay),0,655 +1435997,eye_trail,0,655 +528408,soft_serve,0,654 +613600,checkered_legwear,0,654 +1435395,grey_tank_top,0,653 +1583008,side-tie_peek,0,653 +1861772,crisis_management_form_(machimazo),0,653 +647108,flying_teardrops,0,652 +666041,multiple_torii,0,652 +546978,two-finger_salute,0,652 +529223,cellphone_charm,0,652 +1297290,split_theme,0,652 +1460120,fur-trimmed_footwear,0,652 +1327283,cracked_floor,0,651 +563545,braiding_hair,0,650 +791840,blue_umbrella,0,650 +1328442,green_belt,0,649 +658222,stuffed_penguin,0,649 +1497365,pov_doorway,0,649 +728040,flat_chest_grab,0,648 +524807,unfastened,0,647 +172018,nail_bat,0,647 +11665,seatbelt,0,646 +723518,arms_between_legs,0,645 +522469,centauroid,0,642 +562074,plaid_ribbon,0,641 +679788,wet_towel,0,641 +1782264,sticker_on_face,0,641 +1714871,midriff_sarashi,0,640 +1684568,paint_splatter_on_face,0,640 +464635,cattail,0,640 +707905,object_on_breast,0,640 +1329330,bunny_day,0,640 +1881204,starlight_academy_school_uniform,0,639 +655737,pants_under_skirt,0,636 +1442516,paw_print_pattern,0,636 +518157,peony_(flower),0,635 +1427573,brown_sleeves,0,635 +551767,pastry_bag,0,633 +1012946,breasts_on_table,0,633 +449872,walther,0,632 +1535711,cross_tie,0,632 +543243,chrysanthemum,0,631 +1407354,brown_neckerchief,0,629 +1468297,sixteenth_note,0,629 +647474,stuffed_dog,0,628 +1325576,four-leaf_clover_hair_ornament,0,628 +1397372,year_of_the_rooster,0,628 +549834,person_on_head,0,628 +1588824,lifebuoy_ornament,0,628 +492648,yellow_socks,0,626 +651505,animal_on_hand,0,625 +1414486,red_mittens,0,625 +1291060,rabbit_on_head,0,623 +1672268,qingxin_flower,0,618 +385430,hatsune_miku,4,78616 +12239,hakurei_reimu,4,67710 +12242,kirisame_marisa,4,62327 +12248,remilia_scarlet,4,46894 +12249,flandre_scarlet,4,43434 +12314,izayoi_sakuya,4,42465 +1301082,admiral_(kancolle),4,34812 +1478495,artoria_pendragon_(fate),4,33041 +9123,alice_margatroid,4,32516 +388200,kochiya_sanae,4,32504 +12247,patchouli_knowledge,4,32120 +12244,konpaku_youmu,4,31146 +1720,cirno,4,30904 +12246,yakumo_yukari,4,29607 +427142,komeiji_koishi,4,27406 +12308,shameimaru_aya,4,25978 +12307,fujiwara_no_mokou,4,24550 +12304,reisen_udongein_inaba,4,24375 +11374,hong_meiling,4,23457 +592924,akemi_homura,4,23123 +427143,komeiji_satori,4,22757 +592925,kaname_madoka,4,22080 +12245,saigyouji_yuyuko,4,21758 +1631074,kaga_(kancolle),4,20549 +395218,inubashiri_momiji,4,20497 +12250,yakumo_ran,4,18043 +390427,kagamine_rin,4,17416 +602257,konpaku_youmu_(ghost),4,17065 +384842,moriya_suwako,4,17047 +2082,rumia,4,16841 +593109,miki_sayaka,4,16686 +427145,kaenbyou_rin,4,16675 +401582,kazami_yuuka,4,16551 +1275729,shimakaze_(kancolle),4,16285 +427184,reiuji_utsuho,4,16263 +6,saber,4,15967 +1275718,hibiki_(kancolle),4,15954 +474,chen,4,15887 +12302,kamishirasawa_keine,4,15388 +473327,tatara_kogasa,4,15022 +383392,kawashiro_nitori,4,14973 +1414209,mash_kyrielight,4,14919 +415326,hinanawi_tenshi,4,14700 +589430,sakura_kyouko,4,14333 +593108,tomoe_mami,4,14245 +1275728,shigure_(kancolle),4,14238 +12306,houraisan_kaguya,4,13824 +1799,koakuma,4,13641 +1350122,kongou_(kancolle),4,13106 +1602203,ganyu_(genshin_impact),4,12981 +12300,mystia_lorelei,4,12948 +12303,inaba_tewi,4,12571 +12313,ibuki_suika,4,12550 +1275720,inazuma_(kancolle),4,12537 +475833,souryuu_asuka_langley,4,12338 +503349,hijiri_byakuren,4,12211 +1630449,akagi_(kancolle),4,12210 +473267,nazrin,4,11834 +393597,kagamine_len,4,11428 +503343,houjuu_nue,4,11360 +1270870,tenryuu_(kancolle),4,11216 +1473623,tamamo_(fate),4,11128 +1631504,yuudachi_(kancolle),4,10901 +12305,yagokoro_eirin,4,10879 +384841,yasaka_kanako,4,10876 +457810,megurine_luka,4,10737 +1275719,ikazuchi_(kancolle),4,10591 +1479739,jeanne_d'arc_alter_(fate),4,10481 +413783,mizuhashi_parsee,4,10406 +1471064,abigail_williams_(fate),4,10091 +1275715,fubuki_(kancolle),4,10071 +456150,akiyama_mio,4,9944 +1275711,akatsuki_(kancolle),4,9910 +1631523,zuikaku_(kancolle),4,9855 +1391754,fujimaru_ritsuka_(female),4,9711 +1391753,fujimaru_ritsuka_(male),4,9444 +638495,toyosatomimi_no_miko,4,9247 +1631219,nagato_(kancolle),4,9212 +1630699,hamakaze_(kancolle),4,9178 +13814,morichika_rinnosuke,4,9131 +1335350,haruna_(kancolle),4,9124 +9466,suzumiya_haruhi,4,8980 +7441,fate_testarossa,4,8966 +6313,link,4,8924 +465977,hoshiguma_yuugi,4,8846 +3861,pikachu,4,8817 +1686237,raiden_shogun,4,8798 +1433170,nero_claudius_(fate),4,8774 +1351818,kashima_(kancolle),4,8665 +472387,nakano_azusa,4,8571 +1593594,gawr_gura,4,8530 +1533309,houshou_marine,4,8514 +384197,kagiyama_hina,4,8503 +465646,tohsaka_rin,4,8476 +599169,shanghai_doll,4,8466 +9715,nagato_yuki,4,8457 +700405,nishizumi_miho,4,8294 +12299,wriggle_nightbug,4,8246 +1333465,rem_(re:zero),4,8222 +7493,daiyousei,4,8196 +638499,mononobe_no_futo,4,8163 +1728232,avatar_(ff14),4,8144 +1631280,ryuujou_(kancolle),4,8080 +455424,hirasawa_yui,4,8070 +465688,shiki_eiki,4,7982 +600200,kyubey,4,7934 +12311,onozuka_komachi,4,7924 +1380245,suzuya_(kancolle),4,7856 +1452630,scathach_(fate),4,7702 +12342,usami_renko,4,7645 +1581467,lumine_(genshin_impact),4,7630 +395918,misaka_mikoto,4,7624 +391888,tifa_lockhart,4,7606 +1277919,inkling,4,7466 +8552,takamachi_nanoha,4,7465 +1401122,yorha_no._2_type_b,4,7437 +503351,toramaru_shou,4,7431 +12108,illyasviel_von_einzbern,4,7414 +1631004,houshou_(kancolle),4,7348 +506041,maribel_hearn,4,7317 +1233711,imaizumi_kagerou,4,7306 +8327,ayanami_rei,4,7267 +1639113,shigure_kai_ni_(kancolle),4,7221 +1408636,jeanne_d'arc_(fate),4,7191 +378876,joseph_joestar,4,7142 +1639135,yuudachi_kai_ni_(kancolle),4,7052 +1231258,matoi_ryuuko,4,7016 +456151,tainaka_ritsu,4,6997 +1635696,hu_tao_(genshin_impact),4,6878 +452135,producer_(idolmaster),4,6859 +1275723,murakumo_(kancolle),4,6824 +1405000,serval_(kemono_friends),4,6806 +592977,nishikino_maki,4,6768 +1765420,jeanne_d'arc_alter_(avenger)_(fate),4,6725 +593046,sonoda_umi,4,6721 +1277794,amatsukaze_(kancolle),4,6636 +544560,himekaidou_hatate,4,6634 +1585358,northern_ocean_princess,4,6621 +1631456,ushio_(kancolle),4,6609 +662185,shibuya_rin,4,6569 +615562,toujou_nozomi,4,6537 +94496,princess_zelda,4,6535 +1600114,zhongli_(genshin_impact),4,6477 +1438816,okita_souji_(fate),4,6473 +1285206,atago_(kancolle),4,6461 +638538,kaku_seiga,4,6444 +1275734,yukikaze_(kancolle),4,6441 +1631360,shoukaku_(kancolle),4,6390 +503350,murasa_minamitsu,4,6356 +1511608,inkling_girl,4,6321 +592974,yazawa_nico,4,6312 +713730,kafuu_chino,4,6283 +593043,ayase_eli,4,6236 +1682530,tamamo_no_mae_(fate/extra),4,6180 +1631488,yamato_(kancolle),4,6121 +466373,dawn_(pokemon),4,6100 +1479836,shirakami_fubuki,4,6085 +413645,nagae_iku,4,6078 +1523054,manjuu_(azur_lane),4,6061 +1531173,usada_pekora,4,6052 +487910,joseph_joestar_(young),4,6030 +1275712,akebono_(kancolle),4,6011 +1233709,hata_no_kokoro,4,5911 +1631124,kitakami_(kancolle),4,5907 +54494,kujo_jotaro,4,5902 +1649146,gilgamesh_(fate),4,5822 +570051,ibaraki_kasen,4,5806 +1287532,megumin,4,5792 +669326,kaito_(vocaloid),4,5790 +1239690,rensouhou-chan,4,5717 +1369674,lillie_(pokemon),4,5687 +7438,matou_sakura,4,5678 +1596257,mona_(genshin_impact),4,5668 +1733099,yae_miko,4,5664 +1435530,shuten_douji_(fate),4,5656 +473328,kumoi_ichirin,4,5646 +413652,kurodani_yamame,4,5643 +619180,kasodani_kyouko,4,5610 +638583,soga_no_tojiko,4,5592 +619181,miyako_yoshika,4,5551 +1427460,prinz_eugen_(kancolle),4,5516 +101752,hoshii_miki,4,5496 +1631257,ooyodo_(kancolle),4,5453 +456152,kotobuki_tsumugi,4,5431 +1600563,keqing_(genshin_impact),4,5414 +1448304,astolfo_(fate),4,5413 +1630648,female_admiral_(kancolle),4,5408 +13108,chun-li,4,5374 +1372463,darjeeling_(girls_und_panzer),4,5362 +382074,hiiragi_kagami,4,5341 +1361469,mutsu_(kancolle),4,5321 +1631410,tatsuta_(kancolle),4,5315 +1478494,cu_chulainn_(fate),4,5269 +1630515,asashio_(kancolle),4,5250 +466360,may_(pokemon),4,5242 +1532746,byleth_(fire_emblem),4,5192 +1243755,kijin_seija,4,5186 +1424365,pyra_(xenoblade),4,5178 +7439,kinomoto_sakura,4,5176 +1593595,mori_calliope,4,5176 +1328621,clownpiece,4,5157 +713720,nishizumi_maho,4,5153 +1593596,ninomae_ina'nis,4,5129 +593045,minami_kotori,4,5119 +717860,rosa_(pokemon),4,5119 +1405300,kaban_(kemono_friends),4,5109 +1648389,yor_briar,4,5092 +10658,emiya_shirou,4,5021 +1495213,minato_aqua,4,5017 +713721,itsumi_erika,4,4979 +466669,c.c.,4,4976 +710903,meiko_(vocaloid),4,4975 +11941,samus_aran,4,4963 +383424,aki_minoriko,4,4963 +615290,hilda_(pokemon),4,4950 +1630531,bismarck_(kancolle),4,4949 +1646740,archer_(fate),4,4941 +1448306,mordred_(fate),4,4940 +1599785,aether_(genshin_impact),4,4932 +1631313,sendai_(kancolle),4,4923 +1533332,marnie_(pokemon),4,4923 +1464562,minamoto_no_raikou_(fate),4,4918 +1631252,ooi_(kancolle),4,4907 +656409,yuzuki_yukari,4,4892 +382075,izumi_konata,4,4858 +452021,hieda_no_akyuu,4,4837 +434767,caesar_anthonio_zeppeli,4,4796 +1350123,akashi_(kancolle),4,4794 +1233706,sekibanki,4,4768 +9071,amami_haruka,4,4760 +727352,anchovy_(girls_und_panzer),4,4757 +1507496,takao_(kancolle),4,4740 +1293664,musashi_(kancolle),4,4740 +699794,nanami_chiaki,4,4735 +1328650,junko_(touhou),4,4731 +510663,yoko_littner,4,4679 +1590956,mythra_(xenoblade),4,4644 +1682533,nero_claudius_(fate/extra),4,4616 +1734279,okita_souji_(koha-ace),4,4610 +1442931,amiya_(arknights),4,4608 +1275730,shiranui_(kancolle),4,4605 +7866,tsukino_usagi,4,4576 +1339317,d.va_(overwatch),4,4559 +593044,kousaka_honoka,4,4558 +638500,futatsuiwa_mamizou,4,4557 +1248945,wo-class_aircraft_carrier,4,4554 +698982,asuna_(sao),4,4534 +1478256,medusa_(fate),4,4530 +1631305,sazanami_(kancolle),4,4523 +1631308,souryuu_(kancolle),4,4520 +1243757,sukuna_shinmyoumaru,4,4516 +1631484,yamashiro_(kancolle),4,4489 +424565,shijou_takane,4,4488 +383855,aki_shizuha,4,4481 +1275727,samidare_(kancolle),4,4471 +1593598,watson_amelia,4,4470 +700403,akiyama_yukari,4,4468 +1630992,hiei_(kancolle),4,4468 +21,suigintou,4,4430 +1631272,ro-500_(kancolle),4,4425 +1246753,senketsu,4,4424 +1544889,gloria_(pokemon),4,4412 +10365,cloud_strife,4,4410 +1667133,hk416_(girls'_frontline),4,4402 +1593597,takanashi_kiara,4,4377 +9072,kisaragi_chihaya,4,4365 +279302,dio_brando,4,4360 +422767,gumi,4,4359 +1572405,klee_(genshin_impact),4,4357 +1333552,kirishima_(kancolle),4,4342 +1342225,verniy_(kancolle),4,4342 +12295,letty_whiterock,4,4323 +445942,gardevoir,4,4320 +1595272,venti_(genshin_impact),4,4305 +1231257,kiryuuin_satsuki,4,4282 +1081167,mordred_(fate/apocrypha),4,4277 +9078,minase_iori,4,4276 +1518952,nekomata_okayu,4,4269 +9079,kikuchi_makoto,4,4243 +615222,hoshizora_rin,4,4221 +1421198,bb_(fate),4,4202 +1631098,kasumi_(kancolle),4,4202 +1631042,iowa_(kancolle),4,4196 +460796,ex-keine,4,4122 +1532747,byleth_(fire_emblem)_(female),4,4110 +1328656,hecatia_lapislazuli,4,4100 +1769318,jeanne_d'arc_(ruler)_(fate),4,4087 +1442946,texas_(arknights),4,4083 +1440040,atago_(azur_lane),4,4071 +472979,saber_alter,4,4066 +422691,oshino_shinobu,4,4060 +425446,ganaha_hibiki,4,4060 +1492223,taihou_(azur_lane),4,4049 +1631502,yuubari_(kancolle),4,4039 +1631121,kiso_(kancolle),4,4032 +1526564,doctor_(arknights),4,4023 +660315,shimamura_uzuki,4,4003 +1610604,tartaglia_(genshin_impact),4,4003 +52967,princess_peach,4,3999 +1531312,uruha_rushia,4,3998 +466315,ash_ketchum,4,3989 +1316173,watanabe_you,4,3972 +1442306,zero_two_(darling_in_the_franxx),4,3955 +1525565,hoshimachi_suisei,4,3955 +405454,aerith_gainsborough,4,3938 +15550,morrigan_aensland,4,3935 +1516925,makima_(chainsaw_man),4,3935 +9750,asahina_mikuru,4,3932 +445830,red_(pokemon),4,3928 +1622755,asuna_(blue_archive),4,3923 +1533307,shirogane_noel,4,3913 +1526509,skadi_(arknights),4,3894 +1536480,formidable_(azur_lane),4,3879 +665139,shirasaka_koume,4,3864 +1630996,hiryuu_(kancolle),4,3855 +1440089,prinz_eugen_(azur_lane),4,3855 +1569865,karyl_(princess_connect!),4,3855 +665166,takagaki_kaede,4,3850 +1629450,meltryllis_(fate),4,3848 +1341413,djeeta_(granblue_fantasy),4,3814 +1233049,sagisawa_fumika,4,3784 +1630687,graf_zeppelin_(kancolle),4,3779 +646093,ultimate_madoka,4,3776 +1561709,bremerton_(azur_lane),4,3770 +1645004,rice_shower_(umamusume),4,3745 +701140,komaeda_nagito,4,3741 +1233712,wakasagihime,4,3724 +1518950,inugami_korone,4,3716 +1564340,paimon_(genshin_impact),4,3714 +1515171,sakura_miko,4,3713 +11498,kirby,4,3683 +1328609,kishin_sagume,4,3650 +1641843,eula_(genshin_impact),4,3650 +1649150,medusa_(rider)_(fate),4,3649 +592976,koizumi_hanayo,4,3644 +1549065,tokoyami_towa,4,3617 +1646765,mejiro_mcqueen_(umamusume),4,3612 +1511455,yumemi_riamu,4,3603 +9075,takatsuki_yayoi,4,3591 +1627707,miyamoto_musashi_(fate),4,3590 +9681,kyon,4,3589 +1312747,hestia_(danmachi),4,3577 +1631228,naka_(kancolle),4,3575 +496900,black_rock_shooter_(character),4,3567 +1493247,bowsette,4,3536 +1514981,selene_(pokemon),4,3530 +1440003,belfast_(azur_lane),4,3530 +1509133,sirius_(azur_lane),4,3507 +663994,kanzaki_ranko,4,3498 +1549061,amane_kanata,4,3497 +1500247,oozora_subaru,4,3471 +1627592,ishtar_(fate),4,3469 +593780,super_sonico,4,3466 +1646762,daiwa_scarlet_(umamusume),4,3462 +413714,kisume,4,3456 +664384,jougasaki_mika,4,3455 +12257,ikari_shinji,4,3441 +1649152,cu_chulainn_(fate/stay_night),4,3437 +1585359,seaport_princess,4,3432 +620127,narukami_yuu,4,3429 +8882,lily_white,4,3428 +1549082,kiryu_coco,4,3421 +1350743,aqua_(konosuba),4,3417 +11055,furude_rika,4,3411 +448496,cynthia_(pokemon),4,3404 +1630645,fairy_(kancolle),4,3384 +700407,takebe_saori,4,3382 +1378250,i-19_(kancolle),4,3374 +1651926,kamisato_ayaka,4,3363 +1452466,ereshkigal_(fate),4,3357 +1316172,tsushima_yoshiko,4,3355 +8048,sailor_moon,4,3346 +1631521,zuihou_(kancolle),4,3341 +670056,kuroki_tomoko,4,3329 +1502759,ookami_mio,4,3321 +550231,gokou_ruri,4,3312 +634453,barnaby_brooks_jr.,4,3307 +13270,cammy_white,4,3303 +662047,sanya_v._litvyak,4,3290 +687394,nitta_minami,4,3274 +14152,kakyoin_noriaki,4,3265 +1440013,unicorn_(azur_lane),4,3230 +421104,eila_ilmatar_juutilainen,4,3226 +781225,lucina_(fire_emblem),4,3217 +1236324,serena_(pokemon),4,3217 +660302,ahri_(league_of_legends),4,3215 +701382,hinata_hajime,4,3213 +1467839,trainer_(umamusume),4,3201 +1528907,corrin_(fire_emblem),4,3191 +591909,stocking_(psg),4,3188 +1265245,ichinose_shiki,4,3182 +1388968,ouma_kokichi,4,3182 +649985,kaburagi_t._kotetsu,4,3179 +1275725,naganami_(kancolle),4,3171 +11853,haruno_sakura,4,3163 +12309,medicine_melancholy,4,3157 +1631030,i-58_(kancolle),4,3154 +1717604,shenhe_(genshin_impact),4,3154 +1586173,yukihana_lamy,4,3150 +12296,lunasa_prismriver,4,3143 +1500444,commander_(azur_lane),4,3135 +1527899,kitagawa_marin,4,3133 +1685272,ouro_kronii,4,3132 +1561704,higuchi_madoka,4,3130 +1507493,maya_(kancolle),4,3125 +1462685,kokkoro_(princess_connect!),4,3125 +16266,son_goku,4,3124 +8553,yagami_hayate,4,3122 +12107,kotomine_kirei,4,3116 +1631482,yamakaze_(kancolle),4,3099 +1275724,murasame_(kancolle),4,3081 +724152,anastasia_(idolmaster),4,3064 +382076,hiiragi_tsukasa,4,3059 +1646764,gold_ship_(umamusume),4,3051 +1598280,xiao_(genshin_impact),4,3049 +1685271,nanashi_mumei,4,3036 +563892,kirito,4,3035 +1532725,edelgard_von_hresvelg,4,3027 +1317066,doremy_sweet,4,3026 +1292960,midoriya_izuku,4,3021 +1586175,shishiro_botan,4,3007 +466818,misty_(pokemon),4,3006 +1447551,florence_nightingale_(fate),4,3001 +419123,shirogane_naoto,4,2998 +1295748,mercy_(overwatch),4,2997 +413395,kujo_jolyne,4,2994 +649693,jack_the_ripper_(fate/apocrypha),4,2990 +1316171,sakurauchi_riko,4,2988 +1822797,boo_tao_(genshin_impact),4,2987 +722007,katyusha_(girls_und_panzer),4,2970 +413396,giorno_giovanna,4,2969 +713714,kay_(girls_und_panzer),4,2969 +403357,nia_teppelin,4,2963 +1342592,shimada_arisu,4,2960 +1646761,tokai_teio_(umamusume),4,2957 +1667136,ump45_(girls'_frontline),4,2944 +644424,nami_(one_piece),4,2942 +12355,uzumaki_naruto,4,2929 +1631514,z1_leberecht_maass_(kancolle),4,2922 +398775,sheryl_nome,4,2917 +1594094,pecorine_(princess_connect!),4,2909 +1491767,murasaki_shion,4,2907 +1441167,ayanami_(azur_lane),4,2900 +660098,futaba_anzu,4,2897 +1292959,uraraka_ochako,4,2895 +1386438,saihara_shuuichi,4,2891 +1480186,takarada_rikka,4,2888 +1529309,anya_(spy_x_family),4,2877 +9077,miura_azusa,4,2873 +1631389,taihou_(kancolle),4,2873 +1631516,z3_max_schultz_(kancolle),4,2866 +3030,mario,4,2863 +445932,eevee,4,2860 +1478507,iskandar_(fate),4,2845 +1275713,akigumo_(kancolle),4,2844 +1631134,kuma_(kancolle),4,2842 +375279,waver_velvet,4,2840 +13543,hyuuga_hinata,4,2831 +620175,mikasa_ackerman,4,2829 +407911,jonathan_joestar,4,2825 +1533759,barbara_(genshin_impact),4,2822 +9074,hagiwara_yukiho,4,2821 +1631474,warspite_(kancolle),4,2821 +1630673,fusou_(kancolle),4,2818 +1054766,chloe_von_einzbern,4,2812 +1462591,narmaya_(granblue_fantasy),4,2808 +437574,shirai_kuroko,4,2803 +1631023,i-401_(kancolle),4,2794 +711691,motoori_kosuzu,4,2792 +663996,jougasaki_rika,4,2791 +1337035,leon_(pokemon),4,2791 +1631239,non-human_admiral_(kancolle),4,2788 +1262414,izumi_sagiri,4,2785 +411462,aisaka_taiga,4,2782 +11349,ryuuguu_rena,4,2779 +539110,yuki_miku,4,2778 +700406,reizei_mako,4,2774 +1317065,usami_sumireko,4,2771 +1417248,common_raccoon_(kemono_friends),4,2769 +1431828,elizabeth_bathory_(fate),4,2767 +1799500,asuna_(bunny)_(blue_archive),4,2765 +508752,pyonta,4,2764 +1276066,hoto_cocoa,4,2751 +660394,midorikawa_nao,4,2740 +1380244,kumano_(kancolle),4,2730 +1240694,ruby_rose,4,2718 +170637,lelouch_lamperouge,4,2716 +1627605,kama_(fate),4,2713 +406263,satonaka_chie,4,2711 +1524374,lappland_(arknights),4,2710 +476837,holo,4,2701 +1544892,raihan_(pokemon),4,2699 +407910,higashikata_josuke,4,2697 +1705933,monster_hunter_(character),4,2693 +1442947,ch'en_(arknights),4,2693 +660393,kise_yayoi,4,2690 +1246772,mankanshoku_mako,4,2690 +539789,beatrice_(umineko),4,2688 +1599614,jean_(genshin_impact),4,2687 +12514,shiranui_mai,4,2686 +664492,maekawa_miku,4,2679 +1630489,aoba_(kancolle),4,2677 +1631010,hyuuga_(kancolle),4,2676 +1582962,fischl_(genshin_impact),4,2675 +1333466,emilia_(re:zero),4,2672 +1429424,tokitsukaze_(kancolle),4,2667 +9076,akizuki_ritsuko,4,2662 +1627620,kiyohime_(fate),4,2659 +603070,charlotte_(madoka_magica),4,2658 +1500246,nakiri_ayame,4,2653 +1325863,robin_(fire_emblem),4,2652 +8955,luna_child,4,2647 +8956,star_sapphire,4,2628 +1405055,fennec_(kemono_friends),4,2627 +1440018,illustrious_(azur_lane),4,2624 +1275726,oboro_(kancolle),4,2615 +1465315,tomoe_gozen_(fate),4,2604 +1530284,bea_(pokemon),4,2602 +1361608,boko_(girls_und_panzer),4,2601 +1499605,natsuiro_matsuri,4,2595 +1631194,mogami_(kancolle),4,2594 +473279,unzan,4,2591 +1533296,shiranui_flare,4,2591 +1631303,satsuki_(kancolle),4,2588 +1630519,ashigara_(kancolle),4,2586 +471219,kousaka_kirino,4,2581 +1719003,sakamata_chloe,4,2579 +761961,weiss_schnee,4,2575 +1442945,exusiai_(arknights),4,2573 +1629409,artoria_pendragon_(lancer)_(fate),4,2566 +1452972,yorigami_shion,4,2562 +1631066,jintsuu_(kancolle),4,2559 +1275732,uzuki_(kancolle),4,2559 +1631126,kiyoshimo_(kancolle),4,2558 +1631081,kagerou_(kancolle),4,2547 +510605,makise_kurisu,4,2545 +1631452,urakaze_(kancolle),4,2540 +8954,sunny_milk,4,2533 +633863,tachibana_arisu,4,2533 +1385142,isokaze_(kancolle),4,2523 +1316036,takami_chika,4,2519 +1435235,nitocris_(fate),4,2515 +1526515,nessa_(pokemon),4,2515 +1528908,corrin_(fire_emblem)_(female),4,2502 +195216,kallen_stadtfeld,4,2498 +12112,emiya_kiritsugu,4,2496 +414823,erica_hartmann,4,2487 +414822,miyafuji_yoshika,4,2485 +1631447,unryuu_(kancolle),4,2484 +1275717,hatsuyuki_(kancolle),4,2465 +665397,koshimizu_sachiko,4,2462 +1468120,nakano_nino,4,2457 +1275731,shiratsuyu_(kancolle),4,2455 +1623068,karin_(blue_archive),4,2455 +11053,houjou_satoko,4,2444 +1702683,diarmuid_ua_duibhne_(lancer)_(fate),4,2444 +707204,dizzy_(guilty_gear),4,2442 +700933,hayami_kanade,4,2442 +717140,hishikawa_rikka,4,2440 +1337527,mika_(girls_und_panzer),4,2440 +1515819,power_(chainsaw_man),4,2433 +1593304,don-chan_(usada_pekora),4,2426 +1333601,ram_(re:zero),4,2417 +1631385,taigei_(kancolle),4,2414 +1630472,akitsu_maru_(kancolle),4,2411 +10284,asakura_ryouko,4,2404 +1589924,simon_(ttgl),4,2403 +512930,alice_(alice_in_wonderland),4,2399 +1667138,wa2000_(girls'_frontline),4,2389 +1292961,bakugou_katsuki,4,2386 +1443053,nakano_miku,4,2384 +1318502,callie_(splatoon),4,2383 +1435236,oda_nobunaga_(fate),4,2383 +1644978,agnes_tachyon_(umamusume),4,2380 +420872,gertrud_barkhorn,4,2379 +660390,aoki_reika,4,2379 +9081,futami_mami,4,2375 +378867,dark_magician_girl,4,2373 +1403841,yoshida_yuuko_(machikado_mazoku),4,2365 +1762231,kirima_syaro,4,2363 +1757377,yelan_(genshin_impact),4,2355 +1770073,nishikigi_chisato,4,2355 +1630723,hatsuzuki_(kancolle),4,2350 +12405,nagisa_kaworu,4,2340 +1631174,michishio_(kancolle),4,2338 +1631299,saratoga_(kancolle),4,2336 +1595577,surtr_(arknights),4,2336 +394054,louise_francoise_le_blanc_de_la_valliere,4,2335 +1271042,kiana_kaslana,4,2326 +1670936,accelerator_(toaru_majutsu_no_index),4,2324 +1630708,harusame_(kancolle),4,2324 +482064,makinami_mari_illustrious,4,2308 +12094,arcueid_brunestud,4,2303 +438007,kamijou_touma,4,2299 +4587,shana,4,2288 +1627610,katsushika_hokusai_(fate),4,2287 +554006,tachibana_kanade,4,2286 +1584981,shiroko_(blue_archive),4,2283 +12297,merlin_prismriver,4,2282 +1440020,takao_(azur_lane),4,2282 +1387675,kamado_nezuko,4,2273 +1318503,marie_(splatoon),4,2268 +1279297,yura_(kancolle),4,2267 +1267920,re-class_battleship,4,2266 +499269,miyu_edelfelt,4,2250 +395970,ranka_lee,4,2248 +658637,chitanda_eru,4,2248 +385134,ushiromiya_battler,4,2247 +1631440,u-511_(kancolle),4,2246 +12298,lyrica_prismriver,4,2240 +439625,kaenbyou_rin_(cat),4,2236 +591958,kirigiri_kyouko,4,2236 +1287911,hino_akane_(smile_precure!),4,2236 +1452342,amamiya_ren,4,2236 +1398811,kanna_kamui,4,2229 +1631478,yahagi_(kancolle),4,2226 +1821363,nahida_(genshin_impact),4,2225 +1524970,reisalin_stout,4,2221 +670005,yukine_chris,4,2219 +700404,isuzu_hana,4,2219 +549180,kashiwazaki_sena,4,2218 +1378260,lana_(pokemon),4,2210 +1533761,amber_(genshin_impact),4,2210 +1631206,mutsuki_(kancolle),4,2206 +403861,kyonko,4,2204 +1399517,tippy_(gochiusa),4,2203 +557446,ethan_(pokemon),4,2201 +1440130,laffey_(azur_lane),4,2200 +1255756,t-head_admiral,4,2192 +479706,yuuki_makoto,4,2190 +1440202,enterprise_(azur_lane),4,2187 +1481047,akai_haato,4,2180 +1630695,haguro_(kancolle),4,2172 +615600,toshinou_kyouko,4,2165 +601487,enoshima_junko,4,2163 +1602944,mudrock_(arknights),4,2159 +1566117,kal'tsit_(arknights),4,2151 +1447375,tamamo_cat_(fate),4,2150 +1631032,i-8_(kancolle),4,2149 +1630524,atlanta_(kancolle),4,2148 +1630210,abukuma_(kancolle),4,2147 +12564,uchiha_sasuke,4,2142 +1483850,napoleon_bonaparte_(fate),4,2142 +454513,hanekawa_tsubasa,4,2141 +1811190,nilou_(genshin_impact),4,2136 +1515525,mayuzumi_fuyuko,4,2133 +99,shinku,4,2131 +1631433,tone_(kancolle),4,2131 +1630477,akizuki_(kancolle),4,2130 +1602206,qiqi_(genshin_impact),4,2128 +598066,houjou_hibiki,4,2127 +9080,futami_ami,4,2125 +1055516,akari_(pokemon),4,2120 +12106,kousaka_tamaki,4,2116 +1452975,yorigami_jo'on,4,2113 +1440110,akagi_(azur_lane),4,2111 +1283598,bronya_zaychik,4,2108 +626945,shokuhou_misaki,4,2107 +1765430,jeanne_d'arc_alter_(swimsuit_berserker)_(fate),4,2096 +1311294,gran_(granblue_fantasy),4,2093 +1678980,sangonomiya_kokomi,4,2091 +1316167,kurosawa_dia,4,2088 +1605761,hina_(blue_archive),4,2086 +573907,blue_oak,4,2085 +662472,takanashi_rikka,4,2085 +717136,aida_mana,4,2083 +1631116,kisaragi_(kancolle),4,2074 +1250847,junketsu,4,2072 +1236625,eren_yeager,4,2070 +1366306,rowlet,4,2069 +1316169,matsuura_kanan,4,2066 +615289,hilbert_(pokemon),4,2062 +1234829,yang_xiao_long,4,2059 +1402355,kagari_atsuko,4,2058 +1596241,diluc_(genshin_impact),4,2051 +405240,amagi_yukiko,4,2049 +853361,bb_(fate/extra),4,2042 +1719009,la+_darknesss,4,2042 +1432299,matara_okina,4,2040 +657125,ia_(vocaloid),4,2033 +1183449,nonna_(girls_und_panzer),4,2033 +1252196,akuma_homura,4,2030 +1448715,kizuna_akari,4,2029 +1431862,nero_claudius_(swimsuit_caster)_(fate),4,2027 +1585360,battleship_princess,4,2020 +1667140,ump9_(girls'_frontline),4,2019 +426767,ikamusume,4,2018 +691541,perrine_h._clostermann,4,2017 +1631333,shikinami_(kancolle),4,2017 +1685270,hakos_baelz,4,2016 +530171,kurumi_erika,4,2014 +1316166,kunikida_hanamaru,4,2013 +1631090,kamikaze_(kancolle),4,2012 +1515818,denji_(chainsaw_man),4,2010 +527779,han_juri,4,2003 +1587880,gotou_hitori,4,2001 +599208,charlotte_e._yeager,4,2000 +1271044,raiden_mei,4,1994 +1308178,octoling,4,1994 +409162,senjougahara_hitagi,4,1991 +1630675,gambier_bay_(kancolle),4,1990 +1631046,ise_(kancolle),4,1985 +610107,akaza_akari,4,1983 +1387121,tamamo_no_mae_(swimsuit_lancer)_(fate),4,1983 +1464778,nakano_yotsuba,4,1983 +1631071,jun'you_(kancolle),4,1980 +660392,hoshizora_miyuki,4,1977 +1631188,miyuki_(kancolle),4,1976 +16066,raising_heart,4,1975 +416172,kujikawa_rise,4,1967 +1467626,anastasia_(fate),4,1960 +1440070,kaga_(azur_lane),4,1958 +1630677,gangut_(kancolle),4,1953 +1159816,mirko,4,1952 +1646759,silence_suzuka_(umamusume),4,1950 +1627569,fou_(fate),4,1949 +1631265,pola_(kancolle),4,1949 +663681,honda_mio,4,1948 +591906,panty_(psg),4,1945 +772426,ohtsuki_yui,4,1941 +1250855,hex_maniac_(pokemon),4,1941 +1658781,scaramouche_(genshin_impact),4,1941 +502807,shinki_(touhou),4,1940 +386434,piplup,4,1933 +1452668,hassan_of_serenity_(fate),4,1932 +414820,lynette_bishop,4,1929 +476084,saten_ruiko,4,1929 +1488442,bb_(swimsuit_mooncancer)_(fate),4,1926 +16082,ryougi_shiki,4,1923 +41,suiseiseki,4,1919 +1610754,yu_mei-ren_(fate),4,1911 +11050,sonozaki_mion,4,1910 +1509453,fu_hua,4,1909 +1625332,yuuka_(blue_archive),4,1908 +1540881,w_(arknights),4,1905 +1580066,suzuran_(arknights),4,1905 +1817174,inkling_boy,4,1902 +553633,lyra_(pokemon),4,1894 +1645002,nice_nature_(umamusume),4,1890 +721694,tatsumaki,4,1888 +1631016,i-168_(kancolle),4,1887 +1716513,avatar_(ff11),4,1885 +384405,irisviel_von_einzbern,4,1885 +445847,charizard,4,1883 +1459161,tsukino_mito,4,1883 +1675937,kaedehara_kazuha,4,1883 +1472743,ibaraki_douji_(fate),4,1882 +1387748,katsuki_yuuri,4,1881 +1316168,kurosawa_ruby,4,1876 +1631510,yuugumo_(kancolle),4,1875 +1378262,mallow_(pokemon),4,1874 +1406378,shoebill_(kemono_friends),4,1874 +1770074,inoue_takina,4,1871 +1257126,p-head_producer,4,1869 +16122,nico_robin,4,1865 +1449263,nia_(xenoblade),4,1865 +1387712,viktor_nikiforov,4,1855 +1408667,kochou_shinobu,4,1852 +1670077,springfield_(girls'_frontline),4,1850 +1680379,mash_kyrielight_(dangerous_beast),4,1850 +1544632,mostima_(arknights),4,1850 +660389,christa_renz,4,1848 +1239637,jakuzure_nonon,4,1842 +1631211,nachi_(kancolle),4,1842 +1534498,meltryllis_(swimsuit_lancer)_(fate),4,1842 +1275722,makigumo_(kancolle),4,1841 +1506818,neptune_(neptune_series),4,1840 +7579,vita,4,1836 +438339,rotom,4,1834 +664082,miyamoto_frederica,4,1825 +383738,vivio,4,1822 +1542251,orange_pekoe_(girls_und_panzer),4,1822 +1533460,lysithea_von_ordelia,4,1817 +1384409,akamatsu_kaede,4,1815 +7477,noumi_kudryavka,4,1811 +512688,brendan_(pokemon),4,1811 +394317,rosalina,4,1811 +1693513,elizabeth_bathory_(fate/extra_ccc),4,1810 +938279,lyn_(fire_emblem),4,1809 +1268743,ujimatsu_chiya,4,1806 +171918,android_18,4,1802 +666034,akagi_miria,4,1801 +713731,tedeza_rize,4,1800 +543959,sakura_miku,4,1799 +1630542,choukai_(kancolle),4,1799 +1629430,mysterious_heroine_xx_(fate),4,1795 +1525541,gojou_satoru,4,1789 +1222730,sesshouin_kiara,4,1788 +1629420,jeanne_d'arc_alter_santa_lily_(fate),4,1787 +433649,johnny_joestar,4,1783 +1631163,maru-yu_(kancolle),4,1781 +1160566,blake_belladonna,4,1780 +1549045,tsunomaki_watame,4,1780 +14180,lilith_aensland,4,1770 +1631225,nagatsuki_(kancolle),4,1764 +565443,shinjou_akane,4,1763 +628777,yuzuriha_inori,4,1760 +1447957,nero_claudius_(bride)_(fate),4,1760 +1533066,hilda_valentine_goneril,4,1760 +1631102,katsuragi_(kancolle),4,1757 +1664919,yoimiya_(genshin_impact),4,1753 +1406537,shima_rin,4,1752 +1239070,akatsuki_kirika,4,1748 +717139,kenzaki_makoto,4,1746 +1525756,sonia_(pokemon),4,1745 +382079,takara_miyuki,4,1742 +1346647,shinomiya_kaguya,4,1742 +1533350,morpeko,4,1737 +1670933,index_(toaru_majutsu_no_index),4,1734 +596343,mima_(touhou),4,1731 +753977,sinon,4,1731 +8276,bardiche,4,1730 +1341257,toga_himiko,4,1730 +1440145,javelin_(azur_lane),4,1726 +1631425,teruzuki_(kancolle),4,1725 +1631244,noshiro_(kancolle),4,1723 +1251354,phosphophyllite,4,1721 +414816,sakamoto_mio,4,1720 +1442305,hiro_(darling_in_the_franxx),4,1719 +399147,yowane_haku,4,1717 +664359,tachibana_hibiki_(symphogear),4,1717 +7914,mizuno_ami,4,1716 +10146,koizumi_itsuki,4,1715 +1631402,tama_(kancolle),4,1714 +1257062,pepperoni_(girls_und_panzer),4,1714 +1405419,lucky_beast_(kemono_friends),4,1713 +568921,aegis_(persona),4,1709 +713449,morikubo_nono,4,1709 +1852825,iono_(pokemon),4,1704 +4906,lum,4,1703 +1670940,last_order_(toaru_majutsu_no_index),4,1702 +1630685,gotland_(kancolle),4,1698 +1631495,yayoi_(kancolle),4,1693 +1631373,suzukaze_(kancolle),4,1690 +1292966,asui_tsuyu,4,1689 +1644992,manhattan_cafe_(umamusume),4,1688 +1631198,sensei_(blue_archive),4,1686 +1735921,asashio_kai_ni_(kancolle),4,1680 +1602199,ningguang_(genshin_impact),4,1677 +1623528,dodoco_(genshin_impact),4,1672 +1441509,andou_(girls_und_panzer),4,1669 +301250,yae_sakura,4,1668 +1631357,shirayuki_(kancolle),4,1667 +1275721,kuroshio_(kancolle),4,1666 +5036,signum,4,1663 +1685269,ceres_fauna,4,1663 +1667145,ak-12_(girls'_frontline),4,1662 +424944,hanamura_yousuke,4,1661 +1350741,darkness_(konosuba),4,1661 +496829,makoto_nanaya,4,1657 +681874,shiomi_syuko,4,1657 +1403842,chiyoda_momo,4,1657 +7794,aino_minako,4,1656 +606667,kirino_ranmaru,4,1655 +1446811,st._louis_(azur_lane),4,1653 +529058,hanasaki_tsubomi,4,1649 +567872,n_(pokemon),4,1648 +399582,lucario,4,1646 +1432467,morgan_le_fay_(fate),4,1646 +401222,noel_vermillion,4,1644 +1317049,seiran_(touhou),4,1643 +1248475,jinx_(league_of_legends),4,1640 +1646757,special_week_(umamusume),4,1639 +1295749,tracer_(overwatch),4,1635 +1630510,asashimo_(kancolle),4,1634 +1631376,suzutsuki_(kancolle),4,1632 +1303014,producer_(idolmaster_cinderella_girls_anime),4,1629 +1672736,twilight_(spy_x_family),4,1629 +378207,matou_kariya,4,1628 +1734277,oda_nobunaga_(koha-ace),4,1628 +1344591,matsuno_karamatsu,4,1624 +1459170,hachimiya_meguru,4,1624 +488853,kasumi_(doa),4,1623 +1524282,lio_fotia,4,1622 +1243759,horikawa_raiko,4,1621 +423669,saber_lily,4,1619 +1581062,shiomi_kotone,4,1617 +386311,otonashi_kotori,4,1613 +664313,moroboshi_kirari,4,1612 +386242,sakata_gintoki,4,1611 +1300728,oumae_kumiko,4,1604 +1344594,matsuno_jyushimatsu,4,1603 +1407560,kizuna_ai,4,1602 +436994,sakurai_momoka,4,1601 +593802,minamino_kanade,4,1600 +1631112,kinugasa_(kancolle),4,1599 +467096,leaf_(pokemon),4,1595 +1630499,arashio_(kancolle),4,1595 +1344590,matsuno_osomatsu,4,1595 +1678981,kujou_sara,4,1594 +1639112,sendai_kai_ni_(kancolle),4,1593 +1344593,matsuno_ichimatsu,4,1593 +1383241,lusamine_(pokemon),4,1592 +1629361,ushiwakamaru_(fate),4,1591 +1501023,artoria_pendragon_(lancer_alter)_(fate),4,1590 +1799501,karin_(bunny)_(blue_archive),4,1590 +1401124,yorha_no._9_type_s,4,1589 +663432,kamiya_nao,4,1588 +658795,mimura_kanako,4,1585 +716185,nishizumi_shiho,4,1585 +15221,hanyuu,4,1583 +1584593,felicia_(vampire),4,1582 +1529256,robin_(fire_emblem)_(female),4,1581 +527774,monkey_d._luffy,4,1577 +1630529,ayanami_(kancolle),4,1570 +1273262,rensouhou-kun,4,1569 +1525544,itadori_yuuji,4,1569 +1255499,satou_kazuma,4,1567 +13036,misumi_nagisa,4,1560 +1631054,isuzu_(kancolle),4,1560 +42,souseiseki,4,1559 +396068,watatsuki_no_yorihime,4,1557 +1248309,sendai_hakurei_no_miko,4,1557 +1630445,agano_(kancolle),4,1556 +1824247,etna_(disgaea),4,1554 +1386288,harukawa_maki,4,1554 +1467675,symboli_rudolf_(umamusume),4,1553 +1493508,princess_king_boo,4,1552 +1417247,japanese_crested_ibis_(kemono_friends),4,1551 +1245442,i-class_destroyer,4,1550 +1586177,omaru_polka,4,1546 +1338854,mei_(overwatch),4,1543 +1734866,musashi_kai_ni_(kancolle),4,1543 +712088,leafa,4,1540 +601490,monokuma,4,1540 +727253,hojo_karen,4,1540 +1522661,angelina_(arknights),4,1539 +1316170,ohara_mari,4,1537 +1542253,rosehip_(girls_und_panzer),4,1536 +1231206,levi_(shingeki_no_kyojin),4,1535 +1407467,northern_white-faced_owl_(kemono_friends),4,1535 +697698,kirigaya_suguha,4,1531 +445901,gengar,4,1525 +1682833,satono_diamond_(umamusume),4,1525 +1561724,fukumaru_koito,4,1523 +657914,tokisaki_kurumi,4,1522 +1631141,libeccio_(kancolle),4,1522 +1560134,morpeko_(full),4,1522 +1560614,saren_(princess_connect!),4,1521 +1408471,camilla_(fire_emblem),4,1520 +674693,skyla_(pokemon),4,1514 +1814848,takodachi_(ninomae_ina'nis),4,1514 +1423783,eternity_larva,4,1512 +479648,hirasawa_ui,4,1510 +427708,ushiromiya_ange,4,1509 +354943,edward_elric,4,1505 +700409,tsumiki_mikan,4,1504 +1447804,oshida_(girls_und_panzer),4,1504 +547415,funami_yui,4,1503 +1253671,gamagoori_ira,4,1503 +1596245,kaeya_(genshin_impact),4,1501 +1304225,mikazuki_munechika,4,1500 +1561956,bremerton_(scorching-hot_training)_(azur_lane),4,1500 +595668,hatsune_miku_(append),4,1496 +404724,kagura_(gintama),4,1495 +1382909,kawakaze_(kancolle),4,1495 +1789597,asakura_toru,4,1495 +1629414,artoria_pendragon_(alter_swimsuit_rider)_(fate),4,1494 +11427,kasugano_sakura,4,1492 +1446812,honolulu_(azur_lane),4,1491 +1266581,ramlethal_valentine,4,1490 +8586,chibi_usa,4,1489 +1525545,fushiguro_megumi,4,1489 +1409154,tohru_(maidragon),4,1487 +1295790,widowmaker_(overwatch),4,1485 +559717,purple_heart,4,1484 +10546,vegeta,4,1479 +1627626,leonardo_da_vinci_(fate),4,1479 +1317200,endeavor_(boku_no_hero_academia),4,1475 +401594,palutena,4,1474 +1631052,isonami_(kancolle),4,1474 +1654650,skadi_the_corrupting_heart_(arknights),4,1474 +1459802,shirase_sakuya,4,1472 +414819,francesca_lucchini,4,1471 +1524986,specter_(arknights),4,1471 +418600,kamui_gakupo,4,1470 +1317067,ringo_(touhou),4,1470 +1467838,oguri_cap_(umamusume),4,1469 +432194,tina_branford,4,1465 +657793,cure_peace,4,1462 +1593680,ashiya_douman_(fate),4,1462 +1627803,yang_guifei_(fate),4,1462 +432816,kishibe_rohan,4,1459 +1303947,kashuu_kiyomitsu,4,1454 +1344592,matsuno_choromatsu,4,1454 +1551569,nian_(arknights),4,1454 +1613891,albedo_(genshin_impact),4,1451 +1629640,martha_(fate),4,1450 +722867,ymir_(shingeki_no_kyojin),4,1448 +1534779,artoria_pendragon_(swimsuit_ruler)_(fate),4,1448 +1407816,silver_fox_(kemono_friends),4,1443 +1594464,bloop_(gawr_gura),4,1443 +474547,uiharu_kazari,4,1441 +1631100,katori_(kancolle),4,1440 +56878,fujibayashi_kyou,4,1439 +8151,hino_rei,4,1437 +1493611,kemomimi-chan_(naga_u),4,1437 +1631208,myoukou_(kancolle),4,1435 +1630559,enemy_aircraft_(kancolle),4,1432 +1577501,lisa_(genshin_impact),4,1431 +1631408,tashkent_(kancolle),4,1429 +403378,yuffie_kisaragi,4,1428 +1644998,mihono_bourbon_(umamusume),4,1425 +445833,jessie_(pokemon),4,1424 +375109,midna,4,1423 +1695267,noire_(neptune_series),4,1422 +470503,konjiki_no_yami,4,1421 +1639094,murakumo_kai_ni_(kancolle),4,1419 +1528228,le_malin_(azur_lane),4,1418 +1622591,koharu_(blue_archive),4,1417 +11051,sonozaki_shion,4,1414 +436223,nanasaki_ai,4,1413 +1499780,jeanne_d'arc_(swimsuit_archer)_(fate),4,1413 +1631062,jervis_(kancolle),4,1411 +1257906,elsa_(frozen),4,1410 +1627580,helena_blavatsky_(fate),4,1410 +1405975,grey_wolf_(kemono_friends),4,1410 +428744,yuri_lowell,4,1409 +1627695,marie_antoinette_(fate),4,1409 +344358,roronoa_zoro,4,1407 +1410565,fujiwara_chika,4,1407 +1644994,mayano_top_gun_(umamusume),4,1405 +437685,bayonetta,4,1404 +1409820,ezo_red_fox_(kemono_friends),4,1401 +550010,tsukikage_yuri,4,1400 +1627756,scathach_skadi_(fate),4,1400 +1292970,yaoyorozu_momo,4,1399 +439343,ragna_the_bloodedge,4,1397 +1288044,popuko,4,1397 +1757557,jeanne_d'arc_alter_(ver._shinjuku_1999)_(fate),4,1397 +466937,higashi_setsuna,4,1396 +1630671,furutaka_(kancolle),4,1396 +1627727,osakabe-hime_(fate),4,1396 +8545,kamio_misuzu,4,1394 +1768632,mysterious_heroine_x_alter_(fate),4,1393 +1586174,momosuzu_nene,4,1393 +8107,bulma,4,1391 +1186381,arch_bishop_(ragnarok_online),4,1391 +674691,elesa_(pokemon),4,1391 +1421907,komi_shouko,4,1391 +1368824,takamaki_anne,4,1390 +518079,tiki_(fire_emblem),4,1389 +126661,furukawa_nagisa,4,1389 +1862749,aris_(blue_archive),4,1389 +1307612,nishi_kinuyo,4,1388 +1524993,saria_(arknights),4,1388 +401823,sonic_the_hedgehog,4,1387 +1365551,sakura_futaba,4,1387 +1786496,artoria_caster_(fate),4,1387 +1344595,matsuno_todomatsu,4,1385 +1495228,konno_junko,4,1385 +602082,charlotte_dunois,4,1384 +392481,madotsuki,4,1383 +1631161,mamiya_(kancolle),4,1383 +54495,jean_pierre_polnareff,4,1380 +1630726,hayashimo_(kancolle),4,1379 +707037,hoshi_syoko,4,1378 +1258989,failure_penguin,4,1378 +1532748,byleth_(fire_emblem)_(male),4,1371 +1599722,aru_(blue_archive),4,1371 +1304908,yamato-no-kami_yasusada,4,1369 +606875,little_red_riding_hood_(grimm),4,1368 +1572074,lucifer_(helltaker),4,1367 +419122,tatsumi_kanji,4,1364 +384100,natsume_rin,4,1363 +544077,kris_(pokemon),4,1362 +1534338,kicchou_yachie,4,1362 +1589926,kamina_(ttgl),4,1360 +592111,oshawott,4,1357 +695285,sakuma_mayu,4,1356 +682208,bridget_(guilty_gear),4,1354 +1630503,ark_royal_(kancolle),4,1351 +1459804,tsukioka_kogane,4,1351 +610092,yoshikawa_chinatsu,4,1349 +1525577,hoshiguma_(arknights),4,1348 +361258,ranma-chan,4,1347 +1462453,morino_rinze,4,1347 +714995,yuno_(hidamari_sketch),4,1344 +520787,elin,4,1342 +1631092,kamoi_(kancolle),4,1342 +1517401,lize_helesta,4,1342 +970886,roll_(mega_man),4,1338 +382240,nekomusume,4,1337 +1667148,m4a1_(girls'_frontline),4,1337 +935913,alice_margatroid_(pc-98),4,1335 +1588986,abigail_williams_(swimsuit_foreigner)_(fate),4,1334 +1243753,tsukumo_benben,4,1332 +510496,ryu_(street_fighter),4,1330 +446937,reiuji_utsuho_(bird),4,1330 +1533840,kurokoma_saki,4,1330 +8152,kino_makoto,4,1329 +1667154,an-94_(girls'_frontline),4,1329 +1674212,irys_(hololive),4,1327 +1551542,dido_(azur_lane),4,1325 +13037,yukishiro_honoka,4,1323 +424220,silver_(pokemon),4,1322 +1460817,kuwayama_chiyuki,4,1321 +1581646,noelle_(genshin_impact),4,1321 +1386217,umikaze_(kancolle),4,1320 +1419028,komano_aunn,4,1318 +1805355,suletta_mercury,4,1316 +1377502,tamura_yuri,4,1311 +1412756,theresa_apocalypse,4,1309 +1467925,vodka_(umamusume),4,1308 +1526355,wattson_(apex_legends),4,1308 +1539717,reze_(chainsaw_man),4,1308 +547150,myoudouin_itsuki,4,1307 +1433946,marina_(splatoon),4,1307 +1479089,okita_souji_alter_(fate),4,1306 +1631145,little_boy_admiral_(kancolle),4,1305 +1514982,elio_(pokemon),4,1305 +1451890,kaguya_luna,4,1304 +1618002,slime_(genshin_impact),4,1303 +816414,joseph_joestar_(old),4,1302 +1630693,hagikaze_(kancolle),4,1302 +1732422,kasumi_kai_ni_(kancolle),4,1302 +1584986,hoshino_(blue_archive),4,1301 +428824,bruno_bucciarati,4,1299 +1307567,cagliostro_(granblue_fantasy),4,1299 +1627148,koyanskaya_(fate),4,1299 +445414,bianca_(pokemon),4,1297 +663499,totoki_airi,4,1297 +658638,oreki_houtarou,4,1296 +1375862,mimikyu,4,1296 +1513845,scorbunny,4,1296 +387299,prisma_illya,4,1292 +1360561,natsuki_subaru,4,1292 +1438046,android_21,4,1292 +1631358,shouhou_(kancolle),4,1291 +451173,haramura_nodoka,4,1290 +617637,oomuro_sakurako,4,1290 +1695263,blanc_(neptune_series),4,1289 +1667146,g11_(girls'_frontline),4,1287 +1462526,higuchi_kaede,4,1286 +1549066,himemori_luna,4,1285 +1649113,gilles_de_rais_(caster)_(fate),4,1283 +487228,sengoku_nadeko,4,1283 +1304230,tsurumaru_kuninaga,4,1280 +396219,tokiko_(touhou),4,1276 +1597302,beidou_(genshin_impact),4,1275 +405776,riesz,4,1274 +1222667,ike_(fire_emblem),4,1274 +1630643,etorofu_(kancolle),4,1274 +1667152,m4_sopmod_ii_(girls'_frontline),4,1273 +1527621,marianne_von_edmund,4,1273 +1432308,nishida_satono,4,1272 +12091,tohno_akiha,4,1270 +1630653,fletcher_(kancolle),4,1269 +1392841,altera_(fate),4,1268 +1533372,miyamoto_musashi_(swimsuit_berserker)_(fate),4,1268 +416940,su-san,4,1267 +1258306,ninomiya_asuka,4,1267 +1682831,kitasan_black_(umamusume),4,1266 +1470527,natori_sana,4,1265 +489852,akizuki_ryo,4,1264 +1296572,todoroki_shouto,4,1259 +663460,sasaki_chie,4,1257 +1667150,st_ar-15_(girls'_frontline),4,1256 +1535146,schwarz_(arknights),4,1255 +1226975,diana_cavendish,4,1254 +653217,nepgear,4,1253 +1630497,arashi_(kancolle),4,1252 +1407454,atalanta_(fate),4,1249 +710980,kadotani_anzu,4,1247 +1406536,kagamihara_nadeshiko,4,1246 +1654886,kudamaki_tsukasa,4,1246 +622669,alisa_ilinichina_amiella,4,1245 +570743,kirin_(armor),4,1244 +1630636,enemy_lifebuoy_(kancolle),4,1244 +1525687,blue_poison_(arknights),4,1244 +1677020,fairy_knight_tristan_(fate),4,1242 +599168,hourai_doll,4,1241 +475559,araragi_koyomi,4,1240 +7449,daidouji_tomoyo,4,1238 +618147,honma_meiko,4,1238 +1511279,murasaki_shikibu_(fate),4,1237 +1508231,sunazuka_akira,4,1236 +532518,cure_marine,4,1233 +1544952,piers_(pokemon),4,1233 +464857,sailor_mercury,4,1231 +430852,hachikuji_mayoi,4,1231 +437588,kuma_(persona_4),4,1231 +1762182,ranni_the_witch,4,1231 +408655,kasane_teto,4,1230 +1631154,maikaze_(kancolle),4,1230 +1522707,siege_(arknights),4,1230 +1440177,z23_(azur_lane),4,1229 +433648,gyro_zeppeli,4,1228 +1305388,namazuo_toushirou,4,1226 +421102,minna-dietlinde_wilcke,4,1225 +1630544,colorado_(kancolle),4,1224 +1432310,teireida_mai,4,1223 +1627780,tokitarou_(fate),4,1222 +389156,frederica_bernkastel,4,1221 +1631255,ooshio_(kancolle),4,1221 +4908,kero,4,1220 +1513846,sobble,4,1219 +1525770,hop_(pokemon),4,1218 +1719005,hakui_koyori,4,1218 +445954,bulbasaur,4,1215 +618301,ivan_karelin,4,1215 +542683,sonya_(kill_me_baby),4,1214 +593284,naegi_makoto,4,1214 +1529257,robin_(fire_emblem)_(male),4,1214 +1425456,seele_vollerei,4,1213 +1631260,oyashio_(kancolle),4,1211 +1282191,hoshino_fumina,4,1210 +1388692,touhoku_kiritan,4,1210 +181219,kirijou_mitsuru,4,1208 +1631270,richelieu_(kancolle),4,1208 +596093,caren_hortensia,4,1205 +1300741,kousaka_reina,4,1205 +1305386,honebami_toushirou,4,1204 +1625330,hibiki_(blue_archive),4,1204 +597205,silica,4,1201 +11052,maebara_keiichi,4,1199 +734427,sona_(league_of_legends),4,1199 +592340,snivy,4,1197 +1675744,selen_tatsuki,4,1196 +633741,furutani_himawari,4,1194 +375278,tohsaka_tokiomi,4,1193 +1414917,alpaca_suri_(kemono_friends),4,1193 +1512399,wraith_(apex_legends),4,1193 +1299395,amanogawa_kirara,4,1192 +513774,shantae,4,1190 +1624311,nagi_(kannagi),4,1189 +1303946,midare_toushirou,4,1187 +1531714,baltimore_(azur_lane),4,1185 +533537,ciel_(tsukihime),4,1184 +1407495,eurasian_eagle_owl_(kemono_friends),4,1184 +1536889,kanroji_mitsuri,4,1181 +1622307,hasumi_(blue_archive),4,1181 +1472521,medea_(fate),4,1180 +1630505,asagumo_(kancolle),4,1178 +1443455,igarashi_futaba_(shiromanta),4,1178 +1441567,monika_(doki_doki_literature_club),4,1177 +681655,oikawa_shizuku,4,1175 +1242334,ta-class_battleship,4,1174 +1575935,nagatoro_hayase,4,1171 +1181678,boo_(mario),4,1168 +1341419,anila_(granblue_fantasy),4,1168 +1288043,pipimi,4,1166 +1383156,gladion_(pokemon),4,1163 +1571821,modeus_(helltaker),4,1163 +1542249,assam_(girls_und_panzer),4,1162 +1630538,chitose_(kancolle),4,1162 +1242336,ru-class_battleship,4,1160 +1827774,sanji_(one_piece),4,1158 +1533833,haniyasushin_keiki,4,1158 +1641844,yanfei_(genshin_impact),4,1157 +1399519,tamaki_iroha,4,1156 +7459,white_mage,4,1155 +1719006,kazama_iroha,4,1153 +375868,kururugi_suzaku,4,1152 +672515,abe_nana,4,1152 +1277829,sento_isuzu,4,1152 +1318793,zeta_(granblue_fantasy),4,1152 +1667157,m16a1_(girls'_frontline),4,1149 +717606,yukinoshita_yukino,4,1148 +1696728,rydia_(ff4),4,1147 +445832,james_(pokemon),4,1145 +445831,green_(pokemon),4,1145 +1292110,shidare_hotaru,4,1145 +1442807,marie_(girls_und_panzer),4,1145 +1374966,kamado_tanjirou,4,1144 +1544888,victor_(pokemon),4,1143 +1631180,mikuma_(kancolle),4,1142 +1305416,saniwa_(touken_ranbu),4,1142 +8760,kusanagi_motoko,4,1139 +1596439,mutsuki_(blue_archive),4,1139 +12208,shihouin_yoruichi,4,1138 +709319,yuigahama_yui,4,1134 +533539,kohaku_(tsukihime),4,1133 +1536880,zara_(azur_lane),4,1133 +644430,rias_gremory,4,1132 +1631019,i-26_(kancolle),4,1131 +1405299,emperor_penguin_(kemono_friends),4,1130 +1781047,artoria_pendragon_(alter_swimsuit_rider)_(second_ascension)_(fate),4,1129 +1630536,chikuma_(kancolle),4,1128 +1467946,haru_urara_(umamusume),4,1128 +1667163,commander_(girls'_frontline),4,1127 +1406070,jaguar_(kemono_friends),4,1127 +7559,luigi,4,1126 +1630717,hatsushimo_(kancolle),4,1126 +1496142,minamoto_sakura,4,1126 +10145,tsuruya,4,1125 +1630667,fumizuki_(kancolle),4,1125 +558421,high_priest_(ragnarok_online),4,1122 +1630715,hatsukaze_(kancolle),4,1122 +464855,sailor_venus,4,1121 +459048,momozono_love,4,1121 +1627521,boudica_(fate),4,1118 +575975,iris_(pokemon),4,1117 +544078,lucas_(pokemon),4,1116 +425768,umbreon,4,1116 +528716,yui_(angel_beats!),4,1116 +746341,tharja_(fire_emblem),4,1116 +1399291,pod_(nier_automata),4,1116 +1368823,niijima_makoto,4,1115 +374467,princess_daisy,4,1114 +1724523,irida_(pokemon),4,1114 +1452674,frankenstein's_monster_(fate),4,1113 +727697,sylveon,4,1113 +1492381,shiina_yuika,4,1113 +630836,huang_baoling,4,1112 +1525780,rotom_phone,4,1112 +502557,nu-13,4,1111 +1639053,fubuki_kai_ni_(kancolle),4,1111 +1495229,mizuno_ai,4,1111 +1657251,elira_pendora,4,1111 +1492855,hawks_(boku_no_hero_academia),4,1110 +12518,leona_heidern,4,1108 +1401930,nemoto_hina,4,1108 +712140,kawashima_momo,4,1107 +1589903,yin_(darker_than_black),4,1105 +1239071,tsukuyomi_shirabe,4,1105 +1498626,rita_rossweisse,4,1105 +445848,squirtle,4,1104 +1292974,ashido_mina,4,1104 +1645011,tamamo_cross_(umamusume),4,1104 +1496188,yuzuki_choco,4,1104 +1626315,dusk_(arknights),4,1103 +1556681,rex_(xenoblade),4,1102 +1349628,fubuki_(one-punch_man),4,1100 +1125853,enkidu_(fate),4,1098 +1304228,izumi-no-kami_kanesada,4,1097 +464856,sailor_mars,4,1096 +1431995,minamoto_no_raikou_(swimsuit_lancer)_(fate),4,1095 +1594449,kashino_(azur_lane),4,1095 +1542542,higashiyama_kobeni,4,1094 +1631147,littorio_(kancolle),4,1093 +1846970,rebecca_(cyberpunk),4,1093 +1281721,slaine_troyard,4,1092 +1619061,jumpy_dumpty,4,1091 +1232066,armin_arlert,4,1089 +555,haro,4,1088 +1440247,yamashiro_(azur_lane),4,1088 +1534340,joutouguu_mayumi,4,1088 +1589993,chongyun_(genshin_impact),4,1088 +1629448,passionlip_(fate),4,1086 +1631088,kako_(kancolle),4,1086 +1631518,zara_(kancolle),4,1086 +1846070,hibiki_(cheerleader)_(blue_archive),4,1086 +423518,berserker_(fate/zero),4,1085 +420502,guido_mista,4,1085 +1631246,nowaki_(kancolle),4,1085 +1629366,xuangzang_sanzang_(fate),4,1085 +396070,watatsuki_no_toyohime,4,1084 +425771,glaceon,4,1083 +713156,hoshimiya_ichigo,4,1082 +1630728,hayasui_(kancolle),4,1080 +1518710,hisakawa_hayate,4,1079 +1489783,carpaccio_(girls_und_panzer),4,1077 +1298160,kotonoha_akane,4,1077 +1391739,karna_(fate),4,1076 +528478,sabrina_(pokemon),4,1074 +1644983,eishin_flash_(umamusume),4,1073 +550750,cure_sunshine,4,1072 +1447775,katou_asuka,4,1072 +1631068,johnston_(kancolle),4,1072 +421683,angel_(kof),4,1071 +1481030,caenis_(fate),4,1071 +1517396,ange_katrina,4,1070 +1518711,hisakawa_nagi,4,1070 +422389,cyndaquil,4,1069 +902082,tateyama_ayano,4,1069 +1295903,pharah_(overwatch),4,1069 +1560602,kyouka_(princess_connect!),4,1069 +421660,tsunade_(naruto),4,1067 +1533343,bede_(pokemon),4,1067 +542685,oribe_yasuna,4,1065 +1332548,chi-chi_(dragon_ball),4,1064 +472786,america_(hetalia),4,1063 +657796,cure_beauty,4,1062 +1527167,azura_(fire_emblem),4,1060 +7583,fukuzawa_yumi,4,1059 +564326,nakamura_yuri,4,1059 +1517393,inui_toko,4,1059 +1611363,kureiji_ollie,4,1059 +416695,uryuu_ryuunosuke,4,1058 +1589994,xingqiu_(genshin_impact),4,1058 +394620,yuuki_mikan,4,1057 +1814893,magical_mirai_miku,4,1055 +1629357,medb_(fate),4,1055 +1462508,tokino_sora,4,1054 +8305,tomoe_hotaru,4,1052 +1627153,medjed_(fate),4,1051 +1522347,suzuhara_lulu,4,1051 +1515913,reines_el-melloi_archisorte,4,1050 +1440644,inuyama_aoi,4,1049 +12258,katsuragi_misato,4,1046 +503437,eve_(elsword),4,1046 +1177793,katou_megumi,4,1046 +439063,rachel_alucard,4,1043 +12210,kuchiki_rukia,4,1042 +15312,millia_rage,4,1040 +1685273,tsukumo_sana,4,1040 +532519,cure_blossom,4,1039 +587823,tsurumaki_maki,4,1039 +1631192,mochizuki_(kancolle),4,1038 +1541326,hoshikawa_sara,4,1038 +1789592,osaki_amana,4,1036 +6177,moogle,4,1034 +601379,takagi-san,4,1034 +1620953,hifumi_(blue_archive),4,1034 +654355,kazanari_tsubasa,4,1033 +1515526,serizawa_asahi,4,1033 +485033,sora_(kingdom_hearts),4,1032 +1732814,kongou_kai_ni_(kancolle),4,1031 +1607439,arjuna_(fate),4,1031 +1343724,miyamizu_mitsuha,4,1031 +382261,subaru_nakajima,4,1029 +596939,akali,4,1029 +1402193,nanachi_(made_in_abyss),4,1027 +1420942,yuuki_setsuna_(love_live!),4,1027 +445845,charmander,4,1025 +428743,estellise_sidos_heurassein,4,1025 +592339,tepig,4,1024 +1387115,scathach_(swimsuit_assassin)_(fate),4,1024 +1593360,bibi_(tokoyami_towa),4,1023 +1617664,rosaria_(genshin_impact),4,1023 +186196,winry_rockbell,4,1021 +1241333,sanageyama_uzu,4,1020 +101524,bowser,4,1019 +15840,kos-mos,4,1018 +1155353,kars_(jojo),4,1018 +664191,tada_riina,4,1018 +1241962,kishinami_hakuno_(female),4,1017 +1541815,rabbit_yukine,4,1017 +1334378,albedo_(overlord),4,1017 +1366296,popplio,4,1017 +1448421,pearl_(splatoon),4,1016 +1525546,kugisaki_nobara,4,1016 +1554856,hayakawa_aki,4,1016 +1304904,horikawa_kunihiro,4,1014 +1522439,niwatari_kutaka,4,1014 +1280966,kaneki_ken,4,1013 +1722586,abigail_williams_(traveling_outfit)_(fate),4,1013 +1509864,prinz_eugen_(unfading_smile)_(azur_lane),4,1012 +1530489,eyjafjalla_(arknights),4,1012 +1646767,twin_turbo_(umamusume),4,1012 +7566,kamikita_komari,4,1010 +628623,sorceress_(dragon's_crown),4,1009 +12099,komaki_manaka,4,1008 +1462463,arisugawa_natsuha,4,1008 +1503277,makaino_ririmu,4,1008 +1519338,nursery_rhyme_(fate),4,1007 +1631276,roma_(kancolle),4,1007 +1436151,uzaki_hana,4,1006 +87514,cure_black,4,1005 +1318266,clarisse_(granblue_fantasy),4,1004 +425767,espeon,4,1003 +1631106,kazagumo_(kancolle),4,1002 +1334050,frisk_(undertale),4,1002 +1462454,sonoda_chiyoko,4,999 +9090,mizunashi_akari,4,998 +385133,ushiromiya_maria,4,998 +705643,ib_(ib),4,998 +716728,kitashirakawa_tamako,4,998 +1341416,ferry_(granblue_fantasy),4,995 +1412049,mitake_ran,4,994 +1350177,violet_evergarden,4,993 +1805391,miorine_rembran,4,993 +1627618,king_hassan_(fate),4,992 +1243761,tsukumo_yatsuhashi,4,991 +1386434,momota_kaito,4,990 +715313,marth_(fire_emblem),4,989 +1319288,yoroizuka_mizore,4,989 +1459806,tanaka_mamimi,4,989 +1631437,tsushima_(kancolle),4,988 +1285741,suzukaze_aoba,4,986 +1643885,mari_(blue_archive),4,985 +657794,cure_march,4,983 +1639062,haruna_kai_ni_(kancolle),4,983 +286,kooh,4,982 +176593,takeba_yukari,4,982 +1631396,takanami_(kancolle),4,982 +1548047,vikala_(granblue_fantasy),4,982 +1229728,kitazawa_shiho,4,981 +1292136,raphtalia,4,981 +1462457,komiya_kaho,4,981 +677075,reiner_braun,4,979 +1387746,yuri_plisetsky,4,978 +539676,taneshima_popura,4,977 +1631040,intrepid_(kancolle),4,977 +1333583,kozakura_marry,4,974 +1630474,akitsushima_(kancolle),4,974 +657791,cure_happy,4,971 +1660217,lucoa_(maidragon),4,971 +431436,momo_velia_deviluke,4,970 +1806092,shiroko_(swimsuit)_(blue_archive),4,970 +1768637,miyu_(blue_archive),4,970 +1403111,sucy_manbavaran,4,969 +464858,sailor_jupiter,4,968 +1513843,grookey,4,967 +14569,jill_valentine,4,966 +402795,meowth,4,966 +717859,nate_(pokemon),4,965 +717540,yotsuba_alice,4,965 +7570,hina_ichigo,4,964 +12207,inoue_orihime,4,964 +480039,manabe_nodoka,4,963 +605171,kujou_karen,4,963 +1435314,oyama_mahiro,4,962 +401814,saotome_alto,4,960 +594282,cure_melody,4,960 +16268,son_gohan,4,958 +1257905,shirayuki_hime,4,958 +487246,araragi_karen,4,956 +618134,karina_lyle,4,956 +1783170,meltryllis_(swimsuit_lancer)_(first_ascension)_(fate),4,955 +1579977,cheshire_(azur_lane),4,955 +1654887,iizunamaru_megumu,4,955 +1303902,miss_cloud,4,952 +472784,japan_(hetalia),4,951 +1362417,elaina_(majo_no_tabitabi),4,951 +1290263,danua,4,950 +1349627,saitama_(one-punch_man),4,948 +1464773,nakano_itsuki,4,948 +7587,toudou_shimako,4,946 +655527,kayneth_el-melloi_archibald,4,946 +445933,vaporeon,4,946 +1442307,ichigo_(darling_in_the_franxx),4,946 +1584985,nonomi_(blue_archive),4,946 +354942,alphonse_elric,4,945 +13066,okazaki_yumemi,4,945 +1572402,xiangling_(genshin_impact),4,945 +9687,kula_diamond,4,944 +1373495,airfield_princess,4,943 +6000,baiken,4,942 +1230568,isabelle_(animal_crossing),4,939 +1401126,yorha_type_a_no._2,4,938 +1684882,florence_nightingale_(trick_or_treatment)_(fate),4,938 +554323,may_(guilty_gear),4,937 +1583984,kino_(kino_no_tabi),4,937 +445840,jigglypuff,4,937 +597292,fujisaki_chihiro,4,936 +472787,united_kingdom_(hetalia),4,934 +1767936,cu_chulainn_alter_(fate),4,934 +1654888,tenkyuu_chimata,4,934 +664000,ichihara_nina,4,933 +717654,kiriya_aoi,4,931 +615130,anjou_naruko,4,930 +1630481,amagi_(kancolle),4,929 +1436229,uehara_ayumu,4,929 +533538,hisui_(tsukihime),4,928 +701392,mioda_ibuki,4,928 +1644977,agnes_digital_(umamusume),4,928 +508153,okabe_rintarou,4,927 +713607,sawa_azusa,4,927 +1473353,arthur_pendragon_(fate),4,926 +1766156,astolfo_(sailor_paladin)_(fate),4,926 +376498,zidane_tribal,4,925 +1323324,yuna_(ff10),4,922 +456946,assassin_(fate/zero),4,922 +610702,oktavia_von_seckendorff,4,919 +628849,yuuki_(sao),4,919 +1630548,commandant_teste_(kancolle),4,919 +438329,torchic,4,918 +471218,aragaki_ayase,4,918 +745262,celestia_ludenberg,4,918 +1631110,kinu_(kancolle),4,916 +1645007,seiun_sky_(umamusume),4,916 +1677018,fairy_knight_gawain_(fate),4,916 +10082,sephiroth,4,915 +688703,miia_(monster_musume),4,915 +1606513,len_(tsukihime),4,914 +1672711,suomi_(girls'_frontline),4,914 +12093,tohno_shiki,4,913 +606666,shindou_takuto,4,913 +1631480,yamagumo_(kancolle),4,913 +620348,keith_goodman,4,912 +1405056,lion_(kemono_friends),4,912 +1055501,micaiah_(fire_emblem),4,911 +439615,misaka_imouto,4,910 +1478568,merlin_(fate),4,910 +1445023,hikawa_hina,4,908 +567695,cheren_(pokemon),4,907 +1468121,nakano_ichika,4,907 +1677022,fairy_knight_lancelot_(fate),4,907 +16210,lenna_charlotte_tycoon,4,901 +388724,lala_satalin_deviluke,4,900 +1783064,miyamoto_musashi_(swimsuit_berserker)_(second_ascension)_(fate),4,900 +1591636,goh_(pokemon),4,899 +1515002,aisha_landar,4,898 +425936,doujima_nanako,4,898 +1533061,bernadetta_von_varley,4,898 +1627134,euryale_(fate),4,897 +676594,lulu_(league_of_legends),4,897 +474262,slime_(dragon_quest),4,896 +471221,kousaka_kyousuke,4,896 +1630493,aquila_(kancolle),4,896 +1235294,kurokawa_eren,4,895 +1304226,hotarumaru,4,894 +1444059,hammann_(azur_lane),4,894 +1459172,sakuragi_mano,4,894 +1564800,moona_hoshinova,4,894 +1598866,sucrose_(genshin_impact),4,894 +1672682,type_95_(girls'_frontline),4,892 +1631536,hilichurl_(genshin_impact),4,891 +1762549,mordred_(memories_at_trifas)_(fate),4,889 +1319289,kasaki_nozomi,4,889 +16119,star_platinum,4,888 +428825,narancia_ghirga,4,888 +743241,racing_miku,4,888 +1644970,grass_wonder_(umamusume),4,888 +1487055,sasaki_saku,4,888 +378846,tenjouin_asuka,4,887 +252261,godzilla,4,887 +1667166,ro635_(girls'_frontline),4,887 +1439987,cleveland_(azur_lane),4,887 +1311289,lyria_(granblue_fantasy),4,886 +1631984,ako_(blue_archive),4,886 +1314604,sakurajima_mai,4,885 +1534941,claude_von_riegan,4,885 +664381,mukai_takumi,4,884 +1254335,marie_rose,4,884 +1366290,okumura_haru,4,884 +663434,kohinata_miho,4,883 +1688938,vira_(granblue_fantasy),4,883 +1439979,graf_zeppelin_(azur_lane),4,883 +1477331,osaki_tenka,4,883 +596161,steven_stone,4,882 +601489,maizono_sayaka,4,882 +1459180,kazano_hiori,4,882 +547417,sugiura_ayano,4,881 +587354,lux_(league_of_legends),4,881 +1235700,seiren_(suite_precure),4,880 +662499,nibutani_shinka,4,880 +396114,lei_lei,4,878 +1631064,jingei_(kancolle),4,876 +1298161,kotonoha_aoi,4,875 +1420690,hikawa_sayo,4,875 +1500657,hayasaka_ai,4,875 +1525261,platinum_(arknights),4,875 +16208,faris_scherwiz,4,873 +378317,garnet_til_alexandros_xvii,4,873 +621741,nyarlathotep_(nyaruko-san),4,873 +1292965,jirou_kyouka,4,873 +1524996,ifrit_(arknights),4,873 +1524346,pramanix_(arknights),4,872 +1275716,hatsuharu_(kancolle),4,871 +1690906,kamisato_ayato,4,871 +661692,dante_(devil_may_cry),4,870 +660387,annie_leonhardt,4,870 +1631235,nenohi_(kancolle),4,870 +1246021,momoe_nagisa,4,870 +1456824,nekomusume_(gegege_no_kitarou_6),4,870 +12213,matsumoto_rangiku,4,868 +1630540,chiyoda_(kancolle),4,867 +12095,aozaki_aoko,4,866 +1462223,hatoba_tsugu,4,866 +1557434,ceobe_(arknights),4,866 +1606545,diona_(genshin_impact),4,865 +726764,sakura_chiyo,4,864 +1591011,nia_(blade)_(xenoblade),4,864 +1569872,yuuki_(princess_connect!),4,864 +1594283,shinano_(azur_lane),4,864 +1594498,alice_zuberg,4,863 +1257904,aino_megumi,4,863 +1337528,aki_(girls_und_panzer),4,863 +411080,lopunny,4,862 +665521,ogata_chieri,4,861 +893075,suou_momoko,4,861 +1461094,shizuka_rin,4,860 +1480638,nadia_la_arwall,4,859 +526169,trunks_(dragon_ball),4,859 +491993,chibi_miku,4,859 +657795,cure_sunny,4,859 +1258956,anna_(frozen),4,859 +466365,mega_man_(character),4,858 +1552816,blaze_(arknights),4,858 +1719012,takane_lui,4,858 +1455448,nagato_(azur_lane),4,857 +465401,ten'ou_haruka,4,856 +981832,guts_(berserk),4,855 +503373,furudo_erika,4,854 +701163,kisaragi_shintarou,4,854 +1340376,murata_himeko,4,854 +1544890,allister_(pokemon),4,854 +87516,cure_white,4,853 +1829230,octoling_girl,4,853 +1267895,ayane_(doa),4,851 +452145,miyanaga_saki,4,851 +487224,kanbaru_suruga,4,851 +890228,alisa_(girls_und_panzer),4,851 +1478519,cu_chulainn_(caster)_(fate),4,851 +1304346,yagen_toushirou,4,851 +1479891,rimuru_tempest,4,851 +1427681,luoxiaohei,4,851 +1572068,cerberus_(helltaker),4,851 +602083,laura_bodewig,4,849 +606793,alastor_(shakugan_no_shana),4,848 +416104,eva_02,4,848 +461883,chikorita,4,848 +1630999,hiyou_(kancolle),4,848 +557902,priest_(ragnarok_online),4,847 +1243506,magical_ruby,4,847 +411757,roy_(fire_emblem),4,847 +425769,leafeon,4,847 +717114,kasumigaoka_utaha,4,847 +1644982,curren_chan_(umamusume),4,847 +1246632,monomi_(danganronpa),4,846 +1537880,dimitri_alexandre_blaiddyd,4,845 +1627523,bradamante_(fate),4,845 +520564,lucy_heartfilia,4,844 +588022,cecilia_alcott,4,844 +1275714,arare_(kancolle),4,844 +381743,yumehara_nozomi,4,843 +652455,maou_(maoyuu),4,843 +712653,kondou_taeko,4,843 +1631178,mikazuki_(kancolle),4,842 +1682834,matikane_tannhauser_(umamusume),4,842 +437349,garchomp,4,841 +877404,yokoyama_nao,4,840 +1631405,tanikaze_(kancolle),4,840 +1412894,tsurumaki_kokoro,4,840 +1584990,arona_(blue_archive),4,840 +1592675,bianca_(dq5),4,838 +1631297,samuel_b._roberts_(kancolle),4,837 +1578300,taihou_(enraptured_companion)_(azur_lane),4,837 +1733431,murasame_kai_ni_(kancolle),4,836 +1631290,sagiri_(kancolle),4,836 +1792537,zabaniyya_(housamo),4,836 +1533059,dorothea_arnault,4,836 +662826,himejima_akeno,4,835 +1631170,matsuwa_(kancolle),4,834 +1439994,akashi_(azur_lane),4,833 +1525557,nanami_kento,4,832 +1551985,sirius_(azure_horizons)_(azur_lane),4,832 +1222099,nanao_yuriko,4,830 +1551455,silence_(arknights),4,830 +445935,flareon,4,829 +623899,shirabe_ako,4,829 +382135,reinforce_zwei,4,828 +467599,takoluka,4,828 +1270627,jouga_maya,4,828 +1327418,maria_cadenzavna_eve,4,825 +1205702,kirishima_touka,4,825 +1560591,yui_(princess_connect!),4,825 +1345441,asahina_mirai,4,825 +1572071,justice_(helltaker),4,825 +1645715,zero_(mega_man),4,824 +1239636,inumuta_houka,4,824 +1605760,iori_(blue_archive),4,824 +1285682,iroha_(samurai_spirits),4,823 +1644965,air_groove_(umamusume),4,823 +691588,pit_(kid_icarus),4,822 +457596,diego_brando,4,822 +1279056,revy_(black_lagoon),4,821 +1124739,angela_balzac,4,821 +1515527,izumi_mei,4,821 +14172,rainbow_mika,4,820 +398745,lambdadelta,4,820 +420074,izayoi_aki,4,820 +1528909,corrin_(fire_emblem)_(male),4,819 +1631288,sado_(kancolle),4,818 +1536980,zara_(poolside_coincidence)_(azur_lane),4,818 +1644973,narita_brian_(umamusume),4,817 +1631002,hornet_(kancolle),4,817 +1390755,cosmog,4,816 +1263716,kiss-shot_acerola-orion_heart-under-blade,4,815 +1247199,braixen,4,815 +1630508,asakaze_(kancolle),4,815 +1571010,takasaki_yuu,4,815 +507427,anegasaki_nene,4,814 +1572599,rosmontis_(arknights),4,813 +1753908,goldenglow_(arknights),4,812 +439179,mohammed_avdol,4,811 +1291135,elphelt_valentine,4,811 +8711,prinny,4,810 +654784,lord_el-melloi_ii,4,810 +435392,boa_hancock,4,809 +628333,yumi_(senran_kagura),4,809 +1293670,yamada_elf,4,809 +536735,yuuki_juudai,4,807 +666040,takamori_aiko,4,807 +1351075,yunyun_(konosuba),4,807 +176585,yamagishi_fuuka,4,806 +50896,pichu,4,806 +429524,mudkip,4,806 +1408788,brown_bear_(kemono_friends),4,806 +434747,kira_yoshikage,4,805 +1630704,harukaze_(kancolle),4,805 +1439976,kisaragi_(azur_lane),4,805 +1353706,mumei_(kabaneri),4,804 +1367142,litten,4,804 +487247,araragi_tsukihi,4,803 +711467,kamukura_izuru,4,803 +1462595,andira_(granblue_fantasy),4,803 +1631293,sakawa_(kancolle),4,802 +1369698,hau_(pokemon),4,801 +378592,usada_hikaru,4,799 +400139,kaiou_michiru,4,799 +1790620,clumsy_nun_(diva),4,799 +1760945,kuki_shinobu,4,799 +555676,makoto_(street_fighter),4,798 +538729,cure_moonlight,4,798 +1629426,mysterious_heroine_x_(fate),4,798 +1765868,jeanne_d'arc_alter_(avenger)_(third_ascension)_(fate),4,798 +1506004,amagi_(azur_lane),4,798 +1628204,momoi_(blue_archive),4,798 +1768617,scathach_(piercing_bunny)_(fate),4,794 +668055,kohinata_miku,4,793 +1628203,midori_(blue_archive),4,793 +414192,rx-78-2,4,792 +436529,toon_link,4,792 +1304942,female_saniwa_(touken_ranbu),4,792 +1443456,takeda_harumi_(shiromanta),4,792 +616166,caitlyn_(league_of_legends),4,791 +1374971,female_protagonist_(pokemon_go),4,791 +1420943,tennouji_rina,4,791 +684942,garry_(ib),4,790 +713599,koyama_yuzu,4,790 +1302247,sawamura_spencer_eriri,4,790 +1695265,vert_(neptune_series),4,789 +1644990,king_halo_(umamusume),4,789 +1846972,lucy_(cyberpunk),4,789 +1764432,jeanne_d'arc_(third_ascension)_(fate),4,788 +1252365,calem_(pokemon),4,787 +1270628,natsu_megumi,4,787 +1472931,y'shtola_rhul,4,786 +664518,kobayakawa_sae,4,786 +390798,eva_01,4,783 +462517,yamabuki_inori,4,783 +1407120,humboldt_penguin_(kemono_friends),4,783 +1456294,admiral_graf_spee_(azur_lane),4,783 +1233044,yuuki_haru,4,782 +420577,shampoo_(ranma_1/2),4,781 +455510,cure_peach,4,781 +1432652,jack-o'_valentine,4,781 +1578189,atago_(stunning_speedster)_(azur_lane),4,781 +1398095,hinatsuru_ai,4,780 +437875,takei_hisa,4,779 +1452666,brynhildr_(fate),4,779 +664499,hino_akane_(idolmaster),4,778 +1627556,edmond_dantes_(fate),4,778 +701389,sonia_nevermind,4,777 +1579913,yamato_(one_piece),4,777 +1404427,tsuchinoko_(kemono_friends),4,776 +1569874,yuni_(princess_connect!),4,776 +1561721,ichikawa_hinana,4,776 +1211856,kiki_(majo_no_takkyuubin),4,775 +1419237,tanned_cirno,4,775 +1405052,moose_(kemono_friends),4,775 +1600783,ryoumen_sukuna_(jujutsu_kaisen),4,775 +1379601,guzma_(pokemon),4,773 +765367,chrom_(fire_emblem),4,771 +1593833,crewmate_(among_us),4,771 +16164,celes_chere,4,770 +545377,pannacotta_fugo,4,770 +1680386,senji_muramasa_(fate),4,770 +1645001,narita_taishin_(umamusume),4,770 +1489740,aki_rosenthal,4,770 +633080,takakura_himari,4,769 +1306594,ichigo_hitofuri,4,769 +1645009,super_creek_(umamusume),4,769 +1637361,pyra_(pro_swimmer)_(xenoblade),4,769 +1416553,maruyama_aya,4,768 +1634345,izuna_(blue_archive),4,768 +664409,sajo_yukimi,4,767 +1388311,elizabeth_bathory_(brave)_(fate),4,767 +1768610,saber_alter_(ver._shinjuku_1999)_(fate),4,767 +1701951,arataki_itto,4,765 +1766600,abigail_williams_(third_ascension)_(fate),4,764 +1667213,m200_(girls'_frontline),4,764 +9881,meer_campbell,4,763 +702015,matoi_(pso2),4,763 +693236,ibuki_(street_fighter),4,762 +601752,milla_maxwell,4,762 +664406,natalia_(idolmaster),4,762 +1462589,zooey_(granblue_fantasy),4,762 +1522437,feater_(arknights),4,762 +1589614,pochita_(chainsaw_man),4,762 +1675004,elysia_(honkai_impact),4,762 +381201,konpaku_youki,4,761 +14899,yamamura_sadako,4,761 +1245116,ri-class_heavy_cruiser,4,761 +1471118,roon_(azur_lane),4,761 +668312,senkawa_chihiro,4,760 +1394776,acerola_(pokemon),4,760 +1475191,aoba_moca,4,760 +790426,ara_haan,4,759 +1346609,beatrix_(granblue_fantasy),4,759 +1667171,g36_(girls'_frontline),4,759 +1705709,irene_(arknights),4,759 +700633,ene_(kagerou_project),4,757 +1629302,gawain_(fate),4,757 +1259432,oomori_yuuko,4,757 +1505470,galo_thymos,4,757 +1624414,neru_(blue_archive),4,757 +1447950,nearl_(arknights),4,756 +1591026,pneuma_(xenoblade),4,756 +1572332,helltaker_(character),4,756 +1337529,mikko_(girls_und_panzer),4,755 +1782427,bb_(swimsuit_mooncancer)_(second_ascension)_(fate),4,755 +1706706,toutetsu_yuuma,4,755 +1257345,harime_nui,4,754 +1631233,nelson_(kancolle),4,754 +1784572,abigail_williams_(swimsuit_foreigner)_(third_ascension)_(fate),4,754 +506315,yumeko_(touhou),4,753 +428827,trish_una,4,751 +1631216,nagara_(kancolle),4,751 +1435684,merlin_(fate/prototype),4,751 +1398097,sora_ginko,4,750 +1538828,ingrid_brandl_galatea,4,749 +1587911,tomimi_(arknights),4,749 +1790903,lisbeth_(sao),4,748 +1643688,azusa_(blue_archive),4,748 +1657252,pomu_rainpuff,4,748 +1249877,iori_rinko,4,747 +4858,chocobo,4,746 +419659,rita_mordio,4,745 +552918,kaine_(nier),4,743 +1573721,st._louis_(luxurious_wheels)_(azur_lane),4,743 +396395,naoe_riki,4,742 +662005,mifune_miyu,4,742 +1440255,yuudachi_(azur_lane),4,742 +1531303,swire_(arknights),4,742 +597426,eirika_(fire_emblem),4,741 +427882,bazett_fraga_mcremitz,4,740 +423092,dead_master,4,740 +708151,kido_tsubomi,4,740 +1515003,rena_erindel,4,739 +1246940,matoba_risa,4,739 +1420954,nakasu_kasumi,4,739 +1480691,platelet_(hataraku_saibou),4,739 +432714,trafalgar_law,4,738 +1441359,shoukaku_(azur_lane),4,738 +1674016,manya_(dq4),4,737 +464859,sailor_saturn,4,737 +693559,ingo_(pokemon),4,737 +717909,vi_(league_of_legends),4,737 +1586655,eunectes_(arknights),4,737 +1667329,oberon_(fate),4,737 +11594,lina_inverse,4,736 +1304907,gokotai,4,736 +1657250,finana_ryugu,4,736 +1644995,meisho_doto_(umamusume),4,735 +1597904,shishio_chris,4,735 +1526525,jill_stingray,4,734 +1783171,meltryllis_(swimsuit_lancer)_(second_ascension)_(fate),4,734 +672299,oshino_ougi,4,733 +1630639,enemy_naval_mine_(kancolle),4,733 +1310847,hunter_(bloodborne),4,733 +1711621,master_3_(housamo),4,733 +1528128,himeno_(chainsaw_man),4,733 +254440,kirlia,4,732 +432818,hirose_koichi,4,732 +680146,mary_(ib),4,732 +712651,isobe_noriko,4,732 +1305420,shokudaikiri_mitsutada,4,732 +1781830,jeanne_d'arc_(swimsuit_archer)_(first_ascension)_(fate),4,732 +1627248,shun_(blue_archive),4,732 +400328,okita_sougo,4,731 +1228013,regina_(dokidoki!_precure),4,731 +1631231,natori_(kancolle),4,730 +1527683,yamper,4,730 +1538944,sussurro_(arknights),4,730 +1627531,carmilla_(fate),4,727 +1413408,small-clawed_otter_(kemono_friends),4,727 +402521,meta_knight,4,726 +1344498,vane_(granblue_fantasy),4,726 +1663193,white_rabbit_(alice_in_wonderland),4,725 +1237385,akaboshi_koume,4,725 +1522750,projekt_red_(arknights),4,724 +1478614,wakan_tanka,4,723 +1496206,kuzuha_(nijisanji),4,723 +1504965,hagoromo_lala,4,723 +1626978,reisen_(touhou_bougetsushou),4,722 +1305382,kogitsunemaru,4,722 +1627732,paul_bunyan_(fate),4,722 +1431868,nitocris_(swimsuit_assassin)_(fate),4,721 +1627625,lavinia_whateley_(fate),4,720 +716715,cure_heart,4,719 +1299400,haruno_haruka,4,719 +1755026,fujimaru_ritsuka_(male)_(polar_chaldea_uniform),4,719 +1240487,yusa_kozue,4,718 +1400058,satanichia_kurumizawa_mcdowell,4,718 +1631012,i-13_(kancolle),4,718 +1505029,leonardo_da_vinci_(rider)_(fate),4,718 +935914,kazami_yuuka_(pc-98),4,717 +520900,sf-a2_miki,4,716 +1631108,kikuzuki_(kancolle),4,716 +1468152,roboco-san,4,716 +1261730,elesis_(elsword),4,715 +593801,cure_rhythm,4,714 +1386435,yumeno_himiko,4,712 +1708017,sirius_(scorching-hot_seirios)_(azur_lane),4,712 +1281005,naomi_(girls_und_panzer),4,711 +742395,toudou_yurika,4,711 +1479269,tomioka_giyuu,4,711 +1597903,ryugasaki_rene,4,711 +1665751,t-head_trainer,4,711 +1447393,alina_gray,4,710 +462261,nikka_edvardine_katajainen,4,709 +1765330,elizabeth_bathory_(first_ascension)_(fate),4,708 +1496591,nikaidou_saki,4,708 +1693731,mash_kyrielight_(swimsuit_of_perpetual_summer),4,707 +1667168,dinergate_(girls'_frontline),4,707 +422506,erika_(pokemon),4,706 +1631249,okinami_(kancolle),4,706 +1443050,zuikaku_(azur_lane),4,706 +1631262,perth_(kancolle),4,706 +1790618,froggy_nun_(diva),4,706 +1665128,gorou_(genshin_impact),4,706 +566218,platinum_the_trinity,4,705 +1791907,utsumi_erice,4,705 +644034,kariya_masaki,4,704 +1386439,iruma_miu,4,704 +1477969,sister_cleaire,4,704 +1272430,isolated_island_oni,4,703 +1646429,fukuda_haru,4,701 +391387,akita_neru,4,700 +516743,lance_(pokemon),4,700 +523901,shinonome_nano,4,700 +1237658,error_musume,4,699 +1627167,stheno_(fate),4,698 +1399945,tanya_degurechaff,4,698 +1644996,mejiro_dober_(umamusume),4,698 +1758613,ui_(blue_archive),4,698 +1229736,satake_minako,4,697 +1528912,takumi_(fire_emblem),4,697 +1676518,thoma_(genshin_impact),4,697 +1631044,irako_(kancolle),4,695 +1786499,artoria_caster_(second_ascension)_(fate),4,695 +1762593,kirisawa_juuzou_(character),4,694 +1305340,morgana_(persona_5),4,694 +1533069,rhea_(fire_emblem),4,694 +1555021,sei_shounagon_(fate),4,694 +1386133,ogata_hyakunosuke,4,693 +1755030,fujimaru_ritsuka_(female)_(polar_chaldea_uniform),4,691 +1276392,otokura_yuuki,4,689 +1401541,izayoi_liko,4,689 +1459805,yukoku_kiriko,4,689 +559713,black_heart,4,687 +1346345,takimoto_hifumi,4,687 +1441569,yuri_(doki_doki_literature_club),4,686 +1644984,fine_motion_(umamusume),4,686 +1644976,admire_vega_(umamusume),4,686 +1630689,grecale_(kancolle),4,686 +1571004,shibuya_kanon,4,686 +1452677,semiramis_(fate),4,685 +1627797,wu_zetian_(fate),4,685 +1607938,getou_suguru,4,684 +376441,arle_nadja,4,683 +664614,ryuzaki_kaoru,4,683 +1797173,hyakumantenbara_salome,4,683 +1619673,puru-see_(hoshizuki_(seigetsu)),4,682 +1152270,ibuki_tsubasa,4,681 +1227600,mochizuki_anna,4,681 +1275005,ousaka_shizuku,4,681 +1782428,bb_(swimsuit_mooncancer)_(third_ascension)_(fate),4,679 +1693037,le_malin_(listless_lapin)_(azur_lane),4,679 +1768854,mysterious_heroine_x_alter_(first_ascension)_(fate),4,678 +1631348,shinshuu_maru_(kancolle),4,678 +480982,asbel_lhant,4,677 +1316228,nakagawa_natsuki,4,677 +1667164,g41_(girls'_frontline),4,677 +404920,sage_(dq3),4,676 +1331206,kawakami_mai,4,676 +1423942,eris_greyrat,4,676 +1560517,utage_(arknights),4,676 +1405059,sand_cat_(kemono_friends),4,675 +1680295,mika_(blue_archive),4,675 +664407,himekawa_yuki,4,674 +1631118,kishinami_(kancolle),4,674 +1285963,alena_(dq4),4,672 +1645008,smart_falcon_(umamusume),4,672 +497689,cheria_barnes,4,671 +1631097,kasuga_maru_(kancolle),4,671 +1432300,yatadera_narumi,4,669 +1387122,mordred_(swimsuit_rider)_(fate),4,667 +1529378,sakata_kintoki_(fate),4,666 +1408392,golden_snub-nosed_monkey_(kemono_friends),4,666 +1780213,hina_(swimsuit)_(blue_archive),4,666 +1875916,tiki_(adult)_(fire_emblem),4,665 +1386436,amami_rantarou,4,664 +548087,hasegawa_kobato,4,663 +1257848,hoshimiya_kate,4,663 +1335159,sugimoto_saichi,4,663 +596743,fukawa_touko,4,662 +1476820,yozora_mel,4,662 +1789595,saijo_juri,4,661 +1327917,yuel_(granblue_fantasy),4,660 +1460549,nekomiya_hinata,4,660 +688953,igarashi_kyoko,4,658 +1639090,maya_kai_ni_(kancolle),4,658 +1588338,tomoe_gozen_(swimsuit_saber)_(fate),4,658 +1657407,blue_poison_(shoal_beat)_(arknights),4,658 +1856336,hasumi_(gym_uniform)_(blue_archive),4,656 +1598223,kayoko_(blue_archive),4,655 +1434774,uehara_himari,4,654 +1344209,roxy_migurdia,4,653 +1837736,saren_(summer)_(princess_connect!),4,653 +1485459,jean_bart_(azur_lane),4,653 +713347,katagiri_sanae,4,652 +1440285,queen_elizabeth_(azur_lane),4,652 +1667205,9a-91_(girls'_frontline),4,651 +1479271,rengoku_kyoujurou,4,650 +1386290,keebo,4,649 +1599618,whislash_(arknights),4,646 +971934,vampy,4,645 +1639099,naganami_kai_ni_(kancolle),4,644 +1326283,jeanne_d'arc_(granblue_fantasy),4,643 +1644991,matikanefukukitaru_(umamusume),4,643 +1645010,sweep_tosho_(umamusume),4,642 +879608,toyokawa_fuka,4,641 +1387119,kiyohime_(swimsuit_lancer)_(fate),4,641 +1836895,karyl_(summer)_(princess_connect!),4,641 +1596254,razor_(genshin_impact),4,640 +1480768,siro_(dennou_shoujo_youtuber_siro),4,639 +1491126,tsuyuri_kanao,4,637 +1667169,negev_(girls'_frontline),4,636 +1440310,eldridge_(azur_lane),4,636 +1284942,midway_princess,4,635 +1420829,matsubara_kanon,4,633 +1541801,magallan_(arknights),4,633 +1782915,ijichi_nijika,4,632 +1411715,okusawa_misaki,4,628 +1875918,tiki_(young)_(fire_emblem),4,627 +1790615,spicy_nun_(diva),4,625 +1067206,hayasaka_mirei,4,623 +1781831,jeanne_d'arc_(swimsuit_archer)_(second_ascension)_(fate),4,623 +1420746,udagawa_tomoe,4,621 +1445024,shirasagi_chisato,4,621 +1678878,la_pluma_(arknights),4,619 +1265247,sato_shin,4,617 +1491428,takamiya_rion,4,612 +1623487,crypto_(apex_legends),4,612 +1533772,uzuki_sayaka,4,607 +1533771,kumada_masaru,4,600 diff --git a/data/models/huggingface/stable-diffusion-v1-5/feature_extractor/preprocessor_config.json b/data/models/huggingface/stable-diffusion-v1-5/feature_extractor/preprocessor_config.json new file mode 100644 index 0000000000000000000000000000000000000000..0d9d33b883843d1b370da781f3943051067e1b2c --- /dev/null +++ b/data/models/huggingface/stable-diffusion-v1-5/feature_extractor/preprocessor_config.json @@ -0,0 +1,28 @@ +{ + "crop_size": { + "height": 224, + "width": 224 + }, + "do_center_crop": true, + "do_convert_rgb": true, + "do_normalize": true, + "do_rescale": true, + "do_resize": true, + "feature_extractor_type": "CLIPFeatureExtractor", + "image_mean": [ + 0.48145466, + 0.4578275, + 0.40821073 + ], + "image_processor_type": "CLIPImageProcessor", + "image_std": [ + 0.26862954, + 0.26130258, + 0.27577711 + ], + "resample": 3, + "rescale_factor": 0.00392156862745098, + "size": { + "shortest_edge": 224 + } +} diff --git a/data/models/huggingface/stable-diffusion-v1-5/model_index.json b/data/models/huggingface/stable-diffusion-v1-5/model_index.json new file mode 100644 index 0000000000000000000000000000000000000000..00d856d11246e19af02e1b1dd8f13e1cecba052c --- /dev/null +++ b/data/models/huggingface/stable-diffusion-v1-5/model_index.json @@ -0,0 +1,34 @@ +{ + "_class_name": "StableDiffusionPipeline", + "_diffusers_version": "0.23.0", + "_name_or_path": "runwayml/stable-diffusion-v1-5", + "feature_extractor": [ + "transformers", + "CLIPImageProcessor" + ], + "requires_safety_checker": true, + "safety_checker": [ + "stable_diffusion", + "StableDiffusionSafetyChecker" + ], + "scheduler": [ + "diffusers", + "PNDMScheduler" + ], + "text_encoder": [ + "transformers", + "CLIPTextModel" + ], + "tokenizer": [ + "transformers", + "CLIPTokenizer" + ], + "unet": [ + "diffusers", + "UNet2DConditionModel" + ], + "vae": [ + "diffusers", + "AutoencoderKL" + ] +} diff --git a/data/models/huggingface/stable-diffusion-v1-5/safety_checker/config.json b/data/models/huggingface/stable-diffusion-v1-5/safety_checker/config.json new file mode 100644 index 0000000000000000000000000000000000000000..15a40eb9a92b1c5921815b9e6925ea089701c222 --- /dev/null +++ b/data/models/huggingface/stable-diffusion-v1-5/safety_checker/config.json @@ -0,0 +1,28 @@ +{ + "_name_or_path": "/home/user/.cache/huggingface/hub/models--runwayml--stable-diffusion-v1-5/snapshots/1d0c4ebf6ff58a5caecab40fa1406526bca4b5b9/safety_checker", + "architectures": [ + "StableDiffusionSafetyChecker" + ], + "initializer_factor": 1.0, + "logit_scale_init_value": 2.6592, + "model_type": "clip", + "projection_dim": 768, + "text_config": { + "dropout": 0.0, + "hidden_size": 768, + "intermediate_size": 3072, + "model_type": "clip_text_model", + "num_attention_heads": 12 + }, + "torch_dtype": "float32", + "transformers_version": "4.34.1", + "vision_config": { + "dropout": 0.0, + "hidden_size": 1024, + "intermediate_size": 4096, + "model_type": "clip_vision_model", + "num_attention_heads": 16, + "num_hidden_layers": 24, + "patch_size": 14 + } +} diff --git a/data/models/huggingface/stable-diffusion-v1-5/safety_checker/model.safetensors b/data/models/huggingface/stable-diffusion-v1-5/safety_checker/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3664190c342ff99f7f7ab5bda9648e375ef361e0 --- /dev/null +++ b/data/models/huggingface/stable-diffusion-v1-5/safety_checker/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fb351a5ded815c3ff744968ad9c6b218d071b9d313d04f35e813b84b4c0ffde8 +size 1215979664 diff --git a/data/models/huggingface/stable-diffusion-v1-5/scheduler/scheduler_config.json b/data/models/huggingface/stable-diffusion-v1-5/scheduler/scheduler_config.json new file mode 100644 index 0000000000000000000000000000000000000000..7a9c8f41a4f07aa3c337f572aeb0a04746d716d9 --- /dev/null +++ b/data/models/huggingface/stable-diffusion-v1-5/scheduler/scheduler_config.json @@ -0,0 +1,15 @@ +{ + "_class_name": "PNDMScheduler", + "_diffusers_version": "0.23.0", + "beta_end": 0.012, + "beta_schedule": "scaled_linear", + "beta_start": 0.00085, + "clip_sample": false, + "num_train_timesteps": 1000, + "prediction_type": "epsilon", + "set_alpha_to_one": false, + "skip_prk_steps": true, + "steps_offset": 1, + "timestep_spacing": "leading", + "trained_betas": null +} diff --git a/data/models/huggingface/stable-diffusion-v1-5/text_encoder/config.json b/data/models/huggingface/stable-diffusion-v1-5/text_encoder/config.json new file mode 100644 index 0000000000000000000000000000000000000000..fb2abf83e58e16f2e2048c06ae0969ab5f4ace2c --- /dev/null +++ b/data/models/huggingface/stable-diffusion-v1-5/text_encoder/config.json @@ -0,0 +1,25 @@ +{ + "_name_or_path": "/home/user/.cache/huggingface/hub/models--runwayml--stable-diffusion-v1-5/snapshots/1d0c4ebf6ff58a5caecab40fa1406526bca4b5b9/text_encoder", + "architectures": [ + "CLIPTextModel" + ], + "attention_dropout": 0.0, + "bos_token_id": 0, + "dropout": 0.0, + "eos_token_id": 2, + "hidden_act": "quick_gelu", + "hidden_size": 768, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "max_position_embeddings": 77, + "model_type": "clip_text_model", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "pad_token_id": 1, + "projection_dim": 768, + "torch_dtype": "float32", + "transformers_version": "4.34.1", + "vocab_size": 49408 +} diff --git a/data/models/huggingface/stable-diffusion-v1-5/text_encoder/model.safetensors b/data/models/huggingface/stable-diffusion-v1-5/text_encoder/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..34cca545bfc07f566fa79345178b6db761f9d53b --- /dev/null +++ b/data/models/huggingface/stable-diffusion-v1-5/text_encoder/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:778d02eb9e707c3fbaae0b67b79ea0d1399b52e624fb634f2f19375ae7c047c3 +size 492265168 diff --git a/data/models/huggingface/stable-diffusion-v1-5/tokenizer/merges.txt b/data/models/huggingface/stable-diffusion-v1-5/tokenizer/merges.txt new file mode 100644 index 0000000000000000000000000000000000000000..76e821f1b6f0a9709293c3b6b51ed90980b3166b --- /dev/null +++ b/data/models/huggingface/stable-diffusion-v1-5/tokenizer/merges.txt @@ -0,0 +1,48895 @@ +#version: 0.2 +i n +t h +a n +r e +a r +e r +th e +in g +o u +o n +s t +o r +e n +o n +a l +a t +e r +i t +i n +t o +r o +i s +l e +i c +a t +an d +e d +o f +c h +o r +e s +i l +e l +s t +a c +o m +a m +l o +a n +a y +s h +r i +l i +t i +f or +n e +ð Ł +r a +h a +d e +o l +v e +s i +u r +a l +s e +' s +u n +d i +b e +l a +w h +o o +d ay +e n +m a +n o +l e +t o +ou r +i r +g h +w it +i t +y o +a s +s p +th is +t s +at i +yo u +wit h +a d +i s +a b +l y +w e +th e +t e +a s +a g +v i +p p +s u +h o +m y +. . +b u +c om +s e +er s +m e +m e +al l +c on +m o +k e +g e +ou t +en t +c o +f e +v er +a r +f ro +a u +p o +c e +gh t +ar e +s s +fro m +c h +t r +ou n +on e +b y +d o +t h +w or +er e +k e +p ro +f or +d s +b o +t a +w e +g o +h e +t er +in g +d e +b e +ati on +m or +a y +e x +il l +p e +k s +s c +l u +f u +q u +v er +ðŁ ĺ +j u +m u +at e +an d +v e +k ing +m ar +o p +h i +.. . +p re +a d +r u +th at +j o +o f +c e +ne w +a m +a p +g re +s s +d u +no w +y e +t ing +y our +it y +n i +c i +p ar +g u +f i +a f +p er +t er +u p +s o +g i +on s +g r +g e +b r +p l +' t +m i +in e +we e +b i +u s +sh o +ha ve +to day +a v +m an +en t +ac k +ur e +ou r +â Ģ +c u +l d +lo o +i m +ic e +s om +f in +re d +re n +oo d +w as +ti on +p i +i r +th er +t y +p h +ar d +e c +! ! +m on +mor e +w ill +t ra +c an +c ol +p u +t e +w n +m b +s o +it i +ju st +n ing +h ere +t u +p a +p r +bu t +wh at +al ly +f ir +m in +c a +an t +s a +t ed +e v +m ent +f a +ge t +am e +ab out +g ra +no t +ha pp +ay s +m an +h is +ti me +li ke +g h +ha s +th an +lo ve +ar t +st e +d ing +h e +c re +w s +w at +d er +it e +s er +ac e +ag e +en d +st r +a w +st or +r e +c ar +el l +al l +p s +f ri +p ho +p or +d o +a k +w i +f re +wh o +sh i +b oo +s on +el l +wh en +il l +ho w +gre at +w in +e l +b l +s si +al i +som e +ðŁ Ĵ +t on +d er +le s +p la +ï ¸ +e d +s ch +h u +on g +d on +k i +s h +an n +c or +. . +oun d +a z +in e +ar y +fu l +st u +ou ld +st i +g o +se e +ab le +ar s +l l +m is +b er +c k +w a +en ts +n o +si g +f e +fir st +e t +sp e +ac k +i f +ou s +' m +st er +a pp +an g +an ce +an s +g ood +b re +e ver +the y +t ic +com e +of f +b ack +as e +ing s +ol d +i ght +f o +h er +happ y +p ic +it s +v ing +u s +m at +h om +d y +e m +s k +y ing +the ir +le d +r y +u l +h ar +c k +t on +on al +h el +r ic +b ir +vi e +w ay +t ri +d a +p le +b ro +st o +oo l +ni ght +tr u +b a +re ad +re s +ye ar +f r +t or +al s +c oun +c la +t ure +v el +at ed +le c +en d +th ing +v o +ic i +be st +c an +wor k +la st +af ter +en ce +p ri +p e +e s +i l +âĢ ¦ +d re +y s +o ver +i es +ðŁ ij +com m +t w +in k +s un +c l +li fe +t t +a ch +l and +s y +t re +t al +p ol +s m +du c +s al +f t +' re +ch e +w ar +t ur +ati ons +ac h +m s +il e +p m +ou gh +at e +st ar +wee k +! !! +c lu +th ere +n er +t om +s el +ï¸ ı +wor ld +v es +c am +go t +in ter +of f +u m +ton ight +o ther +h ou +loo k +j e +i d +si on +be au +at t +el i +or t +re c +f f +st er +su pp +g en +be en +il y +te am +m m +i c +pe op +it t +at s +on ly +mb er +en g +b ri +m p +k now +b ur +b ar +in s +lo w +sh e +ro w +â Ŀ +t ro +peop le +vi a +lo w +ag a +be t +x t +f ac +ch ar +e ar +w al +s en +f am +b le +n ati +is h +n or +g ame +li ve +s co +le y +d on +ic k +b all +ver y +the se +p an +i a +at ing +c r +a re +g ir +ma ke +st re +sho w +. " +f l +u p +d r +than ks +il li +w om +st s +i g +s ur +ever y +c ur +vie w +le t +in to +mo st +n a +in di +g ar +ha d +s ou +v ed +an t +iti on +ma de +f ol +un i +it ed +ðŁ ı +ic al +th r +read y +ch ec +d ra +k es +boo k +e p +si c +mor ning +ne ws +c au +c t +w ell +an c +pho to +th an +or s +bir th +g g +ou t +ne xt +som e +en ing +stor y +ch ri +do wn +hom e +f fe +fre e +d a +b or +f il +ci al +than k +si de +le ar +qu e +l ine +t en +at es +ye ars +m y +pho to +beau ti +ri ght +n u +for m +shi p +b an +th er +d ays +g am +as on +g y +ðŁ İ +birth day +se t +ic k +e t +st ill +com ing +ta ke +ðŁ ĩ +b b +s ol +s on +d en +e p +mu sic +the m +de n +wh y +f oo +c ra +am az +w n +h ol +t ting +w r +u e +ma g +c ro +l an +c lo +b ra +a k +s ing +c al +re ad +' ve +jo h +b ab +d ri +b lo +bi g +er ic +in t +t or +tr y +l a +le g +hou se +m ic +v al +beauti ful +l itt +chec k +ne w +ver s +s w +ar i +pla y +h er +âĢ ĵ +w in +m a +con gr +sch ool +f un +. @ +he al +ic h +d el +wh ere +l on +ke t +tw o +mu ch +wat ch +v en +d ed +a st +k ed +b as +go ing +m p +e ver +w ays +ro o +de sig +l y +s ed +to p +l in +ch an +to o +it ing +d ent +gh ts +t y +sp o +ne ed +b lu +in st +be ing +âĿ ¤ +w el +l s +hi m +m ay +st ing +n a +el y +litt le +g a +n at +tom or +m c +h on +w ant +a ir +pi c +am eric +p er +le ss +wee k +ve l +a h +c ap +ch am +g er +ti m +tomor row +ne ss +st ate +h al +ser v +z e +o s +p at +v is +ex c +s in +f f +c ity +c en +an y +b el +su mm +t in +w ould +loo king +k o +ce le +fam ily +m er +po w +hel p +bu s +c o +c le +sel f +en s +ic s +th o +an i +ch o +le ad +b s +t wee +th ink +for e +ch il +vi de +di d +al e +ch i +v il +en ds +w ing +p as +' ll +v ol +s a +g s +man y +j ec +be fore +gra ph +n y +ur ing +w il +d d +bu il +f av +st ed +tr an +l ing +ou d +d ge +fi el +nati onal +st a +c er +w ere +in a +se ason +c ou +n ed +amaz ing +ti ons +cele br +n s +a th +he ad +s day +d ar +lo c +v in +an other +g oo +s at +n y +jo in +pre s +s es +s ing +an a +in ing +.. .. +c our +ï¸ ı +ac t +cau se +li ght +am s +t a +b al +f c +hi gh +off ici +t t +chri st +d ic +d ay +ra l +h or +: ) +vi si +n am +o b +ma s +gh t +re ally +t un +fin d +thr ough +por t +u t +ti ve +st y +n e +or e +ðŁĺ Ĥ +supp ort +ne ver +ev en +ðŁ Ķ +h a +y a +l d +u k +r an +j am +wi th +me di +d es +ne y +ch ing +al e +h y +k in +! ! +d y +pl ace +al so +b le +wh ich +bl ack +b li +s ay +par k +pl ay +ir e +vide o +week end +a il +ke y +p t +w ard +fri day +d in +ine ss +g ro +b en +al ways +t ball +ag o +m il +c y +pro duc +di sc +un der +ple ase +sp or +fu ll +e y +ðŁ Ļ +is e +iti es +c at +k no +u se +fo re +k er +ar t +hi gh +op en +s an +e f +our s +sh ed +st ri +d ro +aga in +i m +ðŁ ĵ +en jo +fu n +ge tting +p en +g er +c li +an y +ever y +e u +wom en +â ľ +e st +c ould +r y +" @ +th ou +sh a +comm un +b er +d ents +di s +wh ile +aw ay +di o +h am +g la +d ate +k a +mis s +un ch +w on +in f +roo m +g a +re al +ex per +di rec +sh ould +sp r +g ol +l ong +bet ter +or i +e y +i ence +il s +z z +h an +f ound +v s +â Ļ +po st +ti c +par t +m en +ren ce +ce ss +v ic +s il +sho p +ðŁĺ Ĥ +f ood +v al +sti c +y ou +s ays +e lec +st ar +o c +l and +i d +c tion +fiel d +s of +st art +wat er +fri ends +on es +ðŁ Į +f la +f ar +wh ite +par ty +in st +gr ou +t v +every one +m ent +j a +ch a +pr in +an ts +d uring +l at +l ar +we st +th en +k a +y oun +in sp +in te +we en +visi t +aga inst +re le +he ad +c es +to wn +loo ks +th re +re gi +ren t +pro jec +gir l +se ar +w o +m om +c ar +h un +pu bli +d i +p le +c all +c ri +u m +for d +per fe +fri end +h ard +ssi on +te st +pla ying +ar ound +be cause +ke ts +me et +sat ur +ar ti +wor k +j un +v en +r un +me mber +por t +su per +t wit +s am +el s +t ly +ad v +ati ve +at h +s ure +av ail +la r +s qu +ar ds +ev ent +m en +l l +o ver +lo gy +it al +tim es +m al +b ack +c oo +ma king +st ru +â ģ +it u +sh ar +g an +c as +s n +summ er +pic ture +f an +h in +christ mas +c y +pr oud +cham pi +desig n +pp ing +ho pe +c a +avail able +ma y +we d +photo graph +spe cial +sal e +sto p +er y +a we +al ity +hi story +am a +pre si +b ru +wor king +d one +d r +k en +fe at +w ood +ate st +sun day +mo vi +vel y +s le +f ace +sp ec +stu dents +b y +ha m +sp on +bus iness +d at +i e +i p +so ci +g lo +h and +re cor +r s +me e +ke ep +p ur +heal th +sh e +com ple +go d +da vi +col lec +li st +r a +clu b +t ers +in clu +th ings +pl an +â ĺ +joh n +sh ing +at ul +so on +blu e +g or +satur day +w on +congr atul +se e +âĿ¤ ï¸ı +tho se +ðŁĺ į +fin al +d ou +it h +o wn +ro ad +t our +a st +indi a +ti l +n d +f er +fav or +su l +lear n +fir e +ju st +grou p +a h +r ac +bo dy +u r +c are +à ¸ +p lo +o h +po s +gi ve +te ch +su b +c ent +er ing +y m +il ity +f ic +lon don +v ir +gu ys +b a +ðŁ ¤ +bab y +sc re +ðŁĺ į +tru mp +un der +chan ge +i an +col le +ss es +l er +ss ed +n ice +ann oun +pow er +s ar +a king +min i +s li +s wee +k ar +fu l +c ru +ac tion +a ther +) . +st and +de vel +a a +g an +le ft +lo l +re l +tran s +m ents +in t +e f +man ag +di g +gen er +do wn +p au +ti v +k u +th ur +k en +st on +f ans +tal k +twee t +t oo +sty le +pro te +se con +fr on +awe some +g l +p al +ne t +s or +la u +g on +sin ce +t ty +ser ies +me mor +b eli +fil m +di d +di es +o t +congratul ations +p ra +e ve +w oo +offici al +su c +in cre +b on +par t +pp ed +cla ss +si ve +bo y +cu l +perfe ct +t ou +d am +wel come +foo tball +h i +p ap +wa it +ad a +congr ats +youn g +exc ited +re ce +j an +v a +re d +st ra +medi a +' d +do es +le t +mu l +ill s +gre en +m el +to ge +fu ture +ye ster +vers ity +for m +ta in +i de +ch es +ki ds +qu i +ha ha +de ta +bi g +favor ite +gir ls +con tin +do m +sear ch +u al +a ir +d ers +mon th +c er +yester day +commun ity +ad e +do g +vil le +ic es +d eli +sy ste +ru n +is m +he art +c up +en ti +fe w +presi dent +e ds +un til +fe sti +o k +f lo +sa id +ol e +me d +tra vel + £ +ph one +toge ther +fa st +lo t +gam es +sh ir +bet ween +y es +th ers +do ing +m ac +at or +b and +fol low +projec t +devel op +di ffe +con fe +spe ci +ca st +y s +bo ard +r d +i al +sh oo +r am +ha ving +sh are +fol low +on e +n ame +m r +pu t +disc u +or y +c ame +ou s +s ite +twit ter +t b +t it +fin ally +z ed +su per +com pan +us ing +all s +li st +r is +sho t +g al +t ar +de l +joh n +âĢ Ķ +some thing +ra m +inte re +wh e +b it +ðŁ į +stre et +oun d +a i +tic kets +movi e +re al +k y +ta king +o pp +c c +l am +m oun +in ve +bl ack +us ed +on line +y or +loc al +gu e +c ks +o w +ge st +bo ys +illi on +con t +re ci +in ed +eu ro +no w +se en +p h +te ach +de f +sou th +su ch +aw ard +mu st +is su +ca re +fe el +p lu +l atest +spor ts +we b +te x +e ment +s k +fi c +w an +te ch +o t +bo x +n er +fre e +t al +a sh +c ase +ho t +won der +mee ting +er a +ch all +ðŁ IJ +jo b +il i +c ool +j our +th s +m o +f el +di e +mic ha +e le +te am +serv ice +st and +ma kes +p ing +ear ly +com es +e k +ho li +v ers +ag ue +s au +thre e +mon day +fa shi +some one +th ro +se a +b ad +supp or +tur n +ur y +m ing +photograph y +n ic +mar k +pre tty +ss ing +wat ching +me mb +ar ri +coun ty +be ach +fr an +cen ter +pol ice +b at +publi c +t an +pre ss +s af +s y +ge ts +ro y +n ers +y our +bu y +st ers +sho w +as ed +chil dre +af ric +in es +sp ace +sc ri +h all +pa in +ar ing +hom e +m ur +heal th +ch ed +s and +rece i +gu y +e a +americ an +re si +childre n +- - +i ri +ing ton +coun try +ro ss +le n +ann a +boo ks +b c +e ce +d om +lo vely +k h +pe t +g y +g ri +st age +off ice +ro ck +m on +b ay +t able +su n +m ed +th in +l or +f low +( @ +uni versity +stor e +fron t +goo d +z a +vo te +nor th +he y +an im +or der +mi d +with out +a de +re member +mar ket +? ? +mu s +tra ining +e duc +bu t +co ver +st an +sc en +b la +bre ak +l ou +s ame +g old +a in +o s +bo th +l it +ver n +a i +al bu +p a +enjo y +be g +ell ing +thur sday +inf o +s an +americ a +ha ir +te l +mar ch +con cer +colle ge +confe rence +ap p +h our +ch ang +â ļ +s our +ol s +we ather +w ar +p hi +festi val +secon d +cu te +pr ac +en er +str y +le a +pol it +s av +se n +o w +m i +ne ar +ou ght +z e +co ffe +w illi +d an +se y +davi d +e se +f an +de ci +the at +no v +ati on +tr ac +sc i +re view +c el +e m +u n +ju ly +or ig +ti on +d ru +form er +st ay +af ter +in v +too k +dat a +b al +tu es +d an +ev ening +ðŁĺĤ ðŁĺĤ +d ol +u res +pro vi +t s +e st +sig n +j ac +u k +s ong +ye t +bo w +in du +j ap +h oo +po int +any one +z y +i st +h ur +it al +buil ding +wom an +ch ur +j er +per for +co ach +le ague +ce ss +ne t +i mag +nati on +br it +qu e +aw ards +ag es +wor ks +c ed +man ce +l ate +ig n +mon ey +tru e +i i +t ell +pl ac +p ac +as y +wor ld +be hin +im port +read ing +gra m +gi ving +me t +h it +for ward +st om +pres ent +jun e +so cial +no on +mar t +hal f +s we +go vern +k er +deta ils +li sh +_ _ +ac y +si a +ber t +f all +! !!! +) , +th i +d iti +sp ort +k ing +f it +st af +c at +mu se +cen tr +y er +con tro +b loo +wal k +ac tu +did n +li m +lear ning +re search +wed ne +au th +h ours +k y +f ar +h en +.. .. +it ch +ri l +str ong +sk y +que sti +jam es +r on +d g +f ur +c in +do es +app ro +mar ke +tu res +ful ly +ch at +behin d +te m +fin i +mis sion +b att +fe el +he av +every thing +b ar +w ish +pre mi +i ma +exper ience +e ach +re port +swee t +tic s +spr ing +re spon +syste m +vic tor +l in +sa w +al ready +gh ter +f le +ã ĥ +br ing +albu m +- - +ell s +st an +to m +inter national +w ent +an ni +mat ch +pp er +st one +sm all +ra in +fashi on +are a +v an +ag ram +k o +thou ght +wor th +v an +m er +coffe e +it es +g n +arti st +c on +ar ch +c ir +se cre +gr ound +is o +h and +co m +bri dge +h s +x i +l ink +pu l +sp l +r ace +f li +ri ver +g as +di sco +d al +play er +f it +photo s +it y +o k +j or +tr a +ap ril +ad s +a di +sol u +beau ty +do or +me ss +up date +ali a +sch o +en ed +mom ent +sco t +sc ience +i or +ti es +ac ross +ous ly +sh es +does n +p age +wat er +m illion +cla ssi +l ic +ca st +form ation +micha el +ell o +s mo +in ts +vi sion +op ening +ld n +au str +tues day +win ner +po ssi +r ound +shir t +di t +b o +u es +il led +al ong +tri p +star ting +im pro +k an +per son +no t +re co +ne eds +c le +li e +re st +r ing +win ter +si mp +mo m +be er +fac e +tor s +us a +collec tion +ge or +se ssion +tr ying +la s +la ke +j en +orig in +stu dent +se cur +v in +pic s +ex pe +com p +gon na +e qu +b ad +le y +a u +memb ers +bre ak +w all +gi c +din ner +bu l +insp ir +r i +min d +ic a +win ning +tal king +t ren +s is +t en +wonder ful +s now +he ar +th om +no thing +gu i +st in +blo g +fe st +b un +le e +war ds +ch ance +dre ss +re n +pau l +p es +tech no +ru ssi +c ard +e ast +mar i +w ine +t i +la w +str ic +k i +ap e +au gu +pro fe +as h +cour se +ma il +ren tly +d un +m un +lo ve +is land +dri ve +s l +end ed +ma in +lo st +nat ure +âĿ¤ ï¸ı +ch ic +re por +p in +pr o +st ation +ce p +ta kes +compan y +go es +on d +ma ch +ra dio +d ad +ro ck +j a +p ay +champi on +e e +in de +tt a +ati c +t ab +beli eve +ener gy +z i +t at +wor d +on ce +re sul +y l +and re +an o +inst agram +clo se +t am +cu stom +w a +con om +sho ws +li fe +k in +ro b +t age +n ation +al most +list en +sa ve +re li +ac e +mar y +tre e +for get +j ack +wa iting +direc tor +h ill +bor n +te mp +f l +st e +on a +sing le +wedne sday +un ited +in o +@ _ +ne l +celebr ate +en ding +de al +j i +can ada +hu ge +tr ack +âĢ ¢ +f y +fan ta +an g +yor k +rele ase +p un +ep iso +wor ds +t our +p ack +i gh +classi c +perfor mance +ke t +after noon +recor d +win s +pro ble +âĿ ¤ +f our +b ed +ban k +d ance +s la +cal led +mi ght +a p +pa st +ðŁ ļ +diffe rent +it e +gi ft +ssi ve +chur ch +c us +pro gram +ho tel +ic e +ma d +secur ity +en ge +d c +en ough +st a +e ty +de ad +g un +he ar +m ir +hu man +gre ss +oun ds +pi ece +bre aking +gar den +fi ght +vie ws +f ish +star ted +run ning +gre en +ser i +s m +as k +d or +de ath +e conom +er i +ir d +s er +l unch +âģ ¦ +bo x +nat u +ba se +b an +f al +glo bal +wil d +wo w +out side +mo ve +le ad +an al +muse um +on g +ha w +pow er +than k +b ac +char ac +cam pa +dig ital +r o +op er +de v +w ol +p ati +f a +m ale +pap er +ill ing +c s +â ĥ +educ ation +ta ken +e ffe +m ou +s ad +" . +bas ed +staf f +inclu ding +li ving +a c +ch ina +mo b +stor m +lu ck +ph il +o o +y n +tra vel +k el +ti al +pr ice +boo k +import ant +bi o +p ool +ny c +f ab +lo ad +? ! +chall enge +cr y +ser ve +we ar +bu s +ta in +nu mber +ro r +k at +i z +th ough +ho sp +m m +fa ir +ut es +ho t +po p +fi ed +cam p +develop ment +li br +c ali +em s +âģ¦ @ +b ol +is ed +stand ing +mo del +it a +g le +bro wn +ima ge +ve red +for ce +o il +par tic +sh u +da ily +la w +se c +cla ss +cam p +holi day +cl in +k ers +pres ent +gam e +incre di +er ship +inter view +b ill +du e +and y +ab o +in nov +ke y +ac ade +p il +mo der +st ars +br and +f er +wee ks +con si +pr e +sa fe +wr it +di um +la unch +marke ting +ann ual +as si +cour t +la dy +c ted +and a +in side +chil d +opp or +sm ith +centr e +gu e +âģ © +f ren +st y +for t +ent ly +is n +ke ep +to ber +on y +bo y +al d +col la +de mo +le vel +com pet +ad o +b our +fanta stic +m ate +s u +sou th +oppor tun +vers ary +lat er +bu d +face book +la un +ster n +p it +! " +ma j +gr am +tb t +fi re +happ y +a ks +wh ole +actu ally +ill er +ell a +lo ts +al ex +an ge +lan ds +ðŁĺ Ń +en ter +r ou +episo de +p ed +in ten +sh ire +wh o +pl an +h o +ca ke +we st +mag az +fre sh +c c +n ar +ch ris +wr iting +w er +n om +l o +mi dd +dre am +o l +ti onal +de b +> > +be come +s i +gr and +all ing +hi stor +ri de +i red +saf e +que en +ci l +in tro +vi l +d ani +.. . +ar tic +st at +sh ort +or ing +sel fi +mis si +do c +b it +g all +b om +i re +se lec +d ition +ðŁĶ ¥ +fri end +be at +gh ting +ðŁĺ Ĭ +pe ace +ex hi +ant a +ab ility +il lu +j on +qu ality +tri bu +m es +play ers +fa ir +cu t +c ab +suc cess +b i +su s +pro mo +sch e +an ge +ic o +comm it +cat ch +ill a +kin d +feel ing +qu o +s ay +anni versary +spo t +mo ther +an e +p end +your self +op s +app le +min utes +p o +gr and +ri es +ha ha +care er +ed ition +de c +ric k +am i +concer t +iti ve +ge ous +d ly +t te +adv ent +i g +li ghts +ak er +sk y +âĥ £ +r ay +fini shed +w ay +s d +ac coun +ðŁĴ ķ +ck y +ch el +lit er +pain ting +lo s +st un +techno logy +n as +ma r +b il +afric a +ki e +ey es +gol f +plu s +ni a +it ec +serv ices +wed ding +kno wn +te le +.. ... +star ts +pa ren +w ants +ati onal +mon ths +win do +fav our +er t +magaz ine +ex clu +re ve +b c +origin al +e ss +n al +an ti +st ro +t ice +stu dy +à ¤ +v ac +nation al +fi ve +ra in +ve ment +u te +ver se +em er +ar my +possi ble +gue ss +val ley +ther n +cro w +m r +col or +on to +pic k +cle ar +dar k +t ac +wan ted +it ting +can cer +govern ment +di e +ri se +z ing +col d +f oun +stu dio +str ation +bro ther +a head +sh el +mic ro +ic ally +d au +sig ned +vi ol +a x +as se +i o +w re +spl ay +ch ick +augu st +pl at +ti ps +sp i +hu man +e asy +lo gi +mi ke +gro w +ag re +w w +sh ad +mo tiv +wi de +tur ns +om g +v ar +de fin +su g +j im +ðŁĶ ¥ +t d +campa ign +nam ed +re tweet +co p +t v +le av +k is +dou ble +s mar +issu e +vil la +in formation +li es +sto ck +n t +di stric +sh or +mi x +er o +se p +me x +see ing +li ve +re min +co de +g ur +s c +wil d +l un +h ood +spo t +fa ther +fore ver +up d +tra f +f ly +ne ed +gra du +tra in +ma ke +s ab +be y +si ze +lead er +tal ks +e u +lo g +fo x +gor geous +le ss +le ts +sur pri +my self +no te +li ves +f ru +lo ved +se ver +de m +j i +so c +h old +do gs +n i +â ŀ +lea ve +air port +ben ef +ex pl +shi ps +comple te +ach i +gre at +vin tage +j ack +ro c +woo d +pri v +off er +ey e +ver sion +te a +co ach +off ic +w ell +g en +s at +h h +you th +o x +? " +m t +mi x +g g +d le +natu ral +buil d +break fast +thin king +theat re +mo on +ber g +go als +geor ge +en e +exc ell +il ing +tun e +y ed +g ate +m it +net work +jo e +h ello +f b +tu be +we aring +ath le +stru c +har d +gla ss +g ers +thro w +g es +b t +indu stry +manag ement +ali st +go al +stre am +y el +a vi +ici ous +o thers +s ki +chri sti +bir d +e sc +m in +tr o +l t +j an +im p +ri ghts +sh a +or gan +cent ral +ar a +ro ll +favour ite +che ster +el se +p ay +car s +m ine +ste p +prac tice +maj or +h ang +ðŁĺ ĺ +n on +v ari +eng ine +vol un +di a +i led +arch itec +p ink +d s +th y +wa sh +web site +ba g +contro l +el li +f ra +an sw +d ence +y u +r on +ol a +g in +dr in +li c +cou ple +sp ar +g on +cre ate +c t +celebr ating +de ep +e at +te e +vo ice +dro p +vis it +at ors +sta dium +f t +w is +ro l +gra de +fam il +po ints +re pre +w as +traf fic +jap an +or g +hon or +tex as +man u +âĻ ¥ +safe ty +re r +b ag +em plo +rele ased +re gu +ak a +n av +ro le +sen ior +spec t +cro ss +lin es +be st +p ack +s in +ti e +mis sing +sun set +li ber +is ing +j ay +sk i +champion ship +ac tiv +la dies +play ed +y y +pu bl +al o +pri de +s r +pa ki +lu x +sur vi +ck ed +e ts +cho col +austr alia +par is +mi les +h at +ment al +al a +me an +mob ile +en a +in si +f ound +chi ef +t ag +incredi ble +re turn +à © +goo gle +fren ch +cre w +hal lo +ali an +j az +ch er +sil ver +nor th +eng lish +base ball +c af +lim ited +follow ing +app reci +ear th +k ir +ve mber +w ed +p tion +g ed +oc tober +fl ori +c r +en cy +ga ve +lor d +stu ff +ber ry +po st +sm ile +bro ad +st ate +gg er +me ans +ic y +gu n +y o +ma ster +bur g +han ds +ni e +/ / +uni on +brit ish +big gest +distric t +am ing +h il +o ce +per son +pas s +en vir +scho ols +arri ved +anc es +insp ired +ex pla +be n +libr ary +bo tt +am p +ste ph +cont act +b ang +m s +cali for +t old +batt le +b b +chic ago +âľ ¨ +str ate +sh i +de ce +- ) +ad d +la b +j ones +leg end +cast le +ing er +st ance +be l +ur a +re fu +lead ers +po t +se x +h ic +artic le +ki d +fr ance +x x +ex e +gui de +volun te +pr int +al i +ce o +twee ts +w x +scen e +vol u +ant i +h an +as soci +shar ing +ro se +mini ster +sh er +in ste +cle an +demo cr +po ster +sk in +p sy +pro per +cra zy +i am +o re +in i +any thing +po d +mo ving +cl ick +ex plo +com b +cra ft +f i +bloo d +is ra +publ ic +d ent +ol ym +eng land +a si +ch er +fac t +envir on +har ry +g one +me dic +enjo ying +just ice +j r +indi an +wi fe +s ound +t es +dra wing +p al +ide a +cr it +ju li +il er +war m +cl ar +thou ghts +def en +coun cil +intro duc +di ed +jan u +an i +s end +li er +m l +intere sting +tra de +win d +b ay +s ac +anc y +sour ce +b es +org ani +ar ly +lar ge +ff ici +ta g +u t +de sp +o es +tit le +sy m +pic tures +op en +wom en +sho wing +ri a +le ast +lead ership +cur rent +elec tr +val ent +list ening +c key +gener al +de ser +du ce +; ) +c ent +ðŁĺį ðŁĺį +sco tt +po or +selfi e +ev ents +i on +wr ong +de v +h ill +sep te +cul ture +l ine +sor ry +s ent +si ster +ce pt +k ri +no vember +ar i +announ ce +z ation +br an +g ent +d u +l en +per s +f m +mart in +o p +e mb +om e +midd le +suc cess +pe ter +janu ary +f lu +rac ing +d av +bi ke +ðŁı » +pe t +shoo t +profe ssi +feat uring +septe mber +now playing +sta ur +z a +on ic +qu ick +bas ke +spe aking +mil it +z er +chick en +b ell +s ad +co ast +lo ving +y ers +d j +pan el +ver age +s wit +ic ks +b ou +califor nia +s am +paren ts +er o +k illed +ph ys +jo bs +mi gr +an th +e mo +hallo ween +and er +c m +compet ition +e ag +s ket +sp ir +may be +exclu sive +app e +jour ney +scre en +for d +i o +h ate +u g +sou l +her o +soci ety +sy n +gu it +n h +d j +as es +im pre +ti me +sal es +d d +f ts +summ it +stun ning +om s +tur ned +cle an +sof t +be at +re staur +de red +en ces +ma gic +di o +sh ine +gu est +health y +exhi b +stor ies +po pu +n is +el a +bel ow +fun ny +resul ts +s ne +cur rently +ar d +down load +f light +m al +f ine +p ad +ch u +ent ed +h at +ðŁij ı +ste ve +j o +mar k +r at +b all +p c +p on +b by +o li +ar ts +as ure +bow l +att ack +mi c +de ar +ran ge +en ter +chocol ate +br illi +ac cess +, " +? ?? +ch ap +con st +t n +mat ter +blu e +gall ery +em p +work shop +lead ing +y ours +baske tball +w anna +th u +_ _ +mar ri +sle ep +bi a +ch e +ma d +imp act +o wn +si r +chan nel +euro pe +e sp +k itch +hosp ital +w ra +roy al +f s +ne u +qu ar +ne y +ac ks +ch ase +pp y +st al +at ely +ti m +dece mber +r are +per form +cre am +we ight +ch oo +ni ght +ha ven +fr anc +kh an +buil t +hel ping +tru st +ty pe +gol den +ta x +s now +s wi +di sa +questi ons +ve y +li ght +c n +cl oud +thom as +ag ed +sh ou +te ams +gr an +re ason +a a +you tube +v p +pi zz +manag er +bur y +cre dit +tre at +ma x +i k +ma in +g ing +de ad +pro bab +ye ah +ã Ĥ +br and +so li +pl ant +ta yl +gir l +ðŁĺ Ń +nam ent +au to +mess age +ko re +n ur +ter r +ag u +ma p +sen ting +lo ves +gi ves +g ab +z en +ro bert +con fir +w ars +o m +sta in +cam era +and er +won der +a b +ca p +s old +su it +wal king +contin ue +effe c +dau ghter +d anc +cha in +mul ti +ki d +y an +champi on +v o +ta ins +ho st +min i +mis sed +re sc +ly n +fin ish +del icious +s as +tayl or +i b +pro mis +produc ts +moun tain +flori da +regi ster +tre at +rec ent +fe male +boo th +mat t +ve hic +s op +mo tor +suppor ting +phi c +ex tre +dr ink +lan e +th ird +p s +con stru +ce re +far m +ðŁİ ī +tu red +ðŁij ī +c ats +a j +gi e +shoo ting +as ked +paki stan +am e +m b +g il +leg al +squ are +in vol +dra w +oo oo +!! !! +opportun ity +p y +e i +b ts +teach er +charac ter +john son +br on +ly wood +ch ine +c ing +c ine +d ge +gam ing +russi a +ci a +quo te +ric h +go v +flow ers +sp iri +st in +grow th +ðŁı ¼ +comm er +j uni +mu m +r an +s na +a ren +c b +ac tor +col or +si t +pa ir +ch i +bo w +acade my +hel d +r ang +me tal +y l +ac tive +probab ly +t ch +need ed +spe e +cho ice +ital y +ry an +ðŁĩ º +flow er +v it +m n +found ation +b ak +si ons +ne igh +f loo +he ard +re mo +fre sh +ing ing +re f +to wn +cl ou +je sus +spiri t +cou ldn +z es +ðŁĴ Ļ +willi ams +pro ce +moder n +pro cess +sho es +cre ated +tri c +issu es +ann e +att en +de but +h r +n it +sti g +a po +e ps +z u +ã Ģ +si x +car ds +lan gu +fam ous +tour nament +se l +e bay +y n +st on +k ick +announ ced +k am +vo c +brilli ant +hou se +che ese +war ri +mus ic +ho ckey +ðŁĺĤ ðŁĺĤ +sk ills +au tom +smar t +med ical +mon y +e x +gu ar +gi ve +pers onal +ven tion +al li +pre ss +flo or +m c +victor y +hi m +simp le +th or +ðŁĩº ðŁĩ +ta il +lu cky +ale x +qu ite +bo t +ssi ons +chall eng +c ann +amaz on +h ell +b ought +) : +ed y +secre t +produc tion +inde pend +de fe +ad ded +p r +p ag +be d +gre atest +with in +j ay +ðŁ ¥ +ire land +re ly +s d +te xt +dri ving +pro gram +spe ed +col um +str on +à © +fore st +â ĸ +mach ine +co in +sc ar +oun t +bi e +¡ ï¸ı +por tra +comm on +wre st +recei ved +kno w +inve st +pl ans +ac cor +ad op +ter y +re ali +p p +k al +art work +me an +go d +inste ad +an ci +motiv ation +as ing +inspir ation +up coming +polit ical +euro pe +m ers +heav y +ðŁij į +fe bru +scot land +ou gh +b t +bo ss +sche du +spe ak +n ick +u red +in o +e k +ri sk +tor y +pres ents +b on +ru g +st ates +exhib ition +il o +m ill +br ought +: -) +tou ri +com e +offici ally +champi ons +do ors +re p +po se +ex tra +k ings +soc cer +squ ad +app lic +at a +some times +t ari +excell ent +ðŁĺ ĺ +stra ight +car ol +ri p +âĢ į +gra phic +m ol +elec tion +febru ary +as ons +l i +di r +m t +n ick +u su +m rs +com ics +inst itu +cor por +v i +ðŁĻ ı +tu ral +di se +ac ci +we are +am ong +sho pping +t ill +wh at +cha ir +sp an +chine se +innov ation +jo y +k it +cent ury +ob ama +ph ili +f c +re ach +c iti +ul ous +n on +d ang +happ ening +bur n +p el +or ange +d v +k ick +cla im +ing ham +ph y +no v +pod cast +wh i +ni ghts +ear lier +be ar +la h +exc iting +or a +gi ven +s lo +memor ies +contin ues +produc t +gh o +c d +kno ws +ðŁİ ī +publi shed +discu ss +y ard +i phone +tri es +w all +fe b +are n +tru th +win ners +tu re +diti onal +milit ary +proble m +m and +do g +lo ss +c ric +can adi +ve ter +villa ge +" , +y r +un g +don ald +ag ing +bir ds +sci enti +le s +th is +regi on +tic al +itt en +il a +ðŁĺ İ +d ad +di am +abo ve +st ren +li t +p ir +la b +fo cus +bus y +d ur +app ly +s ma +auth or +ac i +exe cu +dom in +re la +jack son +at o +wash ington +ðŁĻ Į +k ill +popu lar +ce ment +ro ad +e ating +loc ation +v ent +ar re +n an +cu sto +advent ure +or din +spor t +ul t +lo ck +questi on +dri ver +land sc +on i +k ins +p d +jor dan +te red +k k +a f +chil d +s p +just in +en i +s elling +z o +wh it +bo ston +partic ip +sig ning +happ ened +he at +m am +dre ams +lo ws +gra ph +the day +head ing +br o +ble ssed +vi c +ve gas +h d +in ning +ro man +and ro +den ti +u se +c it +pro gress +writ er +bo b +ff s +gro wing +b ly +aw are +ex am +sp ent +be t +sc ore +bey ond +do cu +ad el +s f +cou ra +colla bor +in c +priv ate +bo at +* * +z one +p ha +b ill +to tal +plan ning +to wards +plac es +pre view +cre ative +dam n +ide as +se ems +po ten +say ing +di splay +s w +a qu +lou is +by e +li l +e mail +we stern +ger many +ell er +re s +f ant +ment ary +de als +ric hard +jer sey +stren g +ra d +pizz a +mon d +w are +l ac +g i +ar chi +c d +yel low +rec ently +re ach +à ¹ +kitch en +desig ned +tr y +g al +restaur ant +at ure +w w +j as +l ma +ðŁij Į +pa in +av o +min ute +sch ol +ther ap +tic ket +d ry +jap an +diti ons +ter ri +sel ves +happ en +t up +ma g +cop y +sh er +free dom +f ile +speci ally +tor onto +lo ad +g ary +re y +answ er +lo y +cau ght +pri ze +u ne +fic ation +ni ger +sy d +tou ch +feat ure +jaz z +recor ds +him self +di sh +ro ber +spot ted +ma ster +wa ve +fin als +bu ll +for um +al d +re comm +ch a +a e +d oo +inst ru +tru ly +l g +in k +bro thers +de st +j im +m it +clo sed +is on +tri ed +s anta +af fe +w an +hor se +g row +camp us +rel ation +nati ve +jour n +go v +o ct +k it +b ound +part ner +re ma +crow d +! ) +c alls +ra il +qu ali +solu tion +con test +con vers +sn ap +b ase +in iti +ta x +y e +ent repre +it or +constru ction +foo d +present ed +n ings +cli mate +k m +mo del +b j +blo ck +present ation +dre am +fi x +c alling +bus ine +con gress +under stand +we b +val ue +ï¸ı âĥ£ +mex ico +it ely +ki m +char ity +ref lec +bl an +fl ying +anal y +famil ies +b and +reci pe +celebr ation +ac cep +ar y +to t +g b +intere sted +cap tain +âĻ ¥ +ti p +ab sol +bra z +inve stig +o logy +de c +tru ck +ver ing +c lear +don t +go tta +ad vis +beg ins +ma ss +de scri +blo ck +k im +davi d +son gs +memor ial +feat ures +su stain +' . +gra b +jo se +v a +con serv +se ts +man chester +fi ghting +de gre +ag a +in d +sle ep +pos ition +ha ir +sig ns +pol icy +it o +al ert +st am +sp end +w y +absol ut +d m +anim al +my ster +success ful +proble ms +ro bo +k ay +gar den +p d +may or +d ale +t ol +off ers +vis iting +friend ly +tre es +offic er +accoun t +ke vin +ðŁij į +gi ant +contin u +con su +tr act +n fl +ðŁĺ Ĭ +h q +b ility +a ar +dis ney +te en +on ed +wh ite +tra iler +de dic +al one +absolut ely +dig ital +willi am +in ation +s wa +e e +enti re +ger man +ro ll +h its +co st +st ay +th a +ali ve +accor ding +co t +liter ally +her it +re ti +haha ha +exper i +li kes +g t +ste el +__ __ +ch air +christi an +to wer +diffe rence +m d +tre ss +mi d +prin ce +afric an +fe der +foo t +car ri +ser ved +r ice +sh all +feat ured +ck er +rec ru +po e +sen se +ni fic +com edy +cont ent +f at +po sted +con tribu +tim ate +li ver +mb le +inter net +ag e +europe an +cl ing +gla d +ff ic +sc o +ak es +el le +ter min +ton y +p ale +col our +seri ous +pat ri +movi es +b m +professi onal +ad o +al u +br inging +f alls +isra el +ter m +langu age +bro ok +man n +commun ic +can not +ac ti +p he +y an +entrepre ne +tur key +log ical +lon g +ar m +ur s +work ers +ing ly +gg s +ri c +tu al +recei ve +op ens +ge ar +soci al +fe et +c king +ad ver +fin an +fe els +sp la +h r +ea ster +bra in +ã ģ +fi g +le dge +ne arly +prote ct +ma ssive +e th +aw a +ðŁĺ ģ +y rs +aware ness +defin itely +k n +imag ine +k u +syste ms +ðŁij ı +f as +li k +provi de +am o +disco ver +inf lu +ma ker +g az +fit ness +stre et +er s +te d +w c +ys is +pos itive +hel ped +que st +andre w +bra d +b in +hang ing +l ing +bri ght +se ction +ma ss +ðŁĻ Į +follow ers +ho sting +tem por +fla g +a ve +let ter +k ur +re qui +of ten +cry p +su ff +âļ ½ +russi an +treat ment +al le +ha y +l an +keep ing +hol y +power ful +pre dic +fun d +e specially +windo w +je wel +il y +ðŁĴ ľ +gener ation +app a +seri ously +o d +ðŁĺĤðŁĺĤ ðŁĺĤ +cer ti +iri sh +ðŁij Į +mi ami +be th +v ity +se cu +che f +cri me +graph y +ma x +arti sts +re volu +gu ard +spee ch +u c +upd ates +fac es +st ant +chang ed +repor ts +low er +pe ar +n c +k il +loo ked +spe aker +s f +re spect +ok ay +oce an +s itting +architec ture +tra il +se at +i ra +le g +japan ese +d am +u lar +sw im +polit ics +finan cial +ol d +mou th +at temp +de stin +fi shing +atten tion +me m +chang es +deci ded +reli gi +g in +c av +z z +ad am +ma c +wr ite +beg in +sc ul +al ter +is s +ath on +imag es +m oo +jo ined +ðŁĺ ī +âŀ ¡ï¸ı +pas sed +mu sli +h ir +lar gest +cam er +com ic +gh ted +rug by +bur gh +gg ing +te sting +pre par +lau gh +al ed +impro ve +beli ev +adv ice +sha res +he art +tur ning +s b +t el +caf e +n es +dani el +pat ter +t z +se tt +par k +c and +st ick +happ ens +bri an +ne west +e pic +ad or +ki es +war ning +anim als +custo m +ar c +di an +gol d +cor e +t f +c ity +pan ts +re ality +con fi +in ju +fo x +gu il +k new +âĺ º +cor rec +itu de +d den +. # +re duc +pas s +f on +y a +ow ner +re turns +n c +e ast +ap ol +in sur +th o +si m +juni or +be e +ang el +att le +elec tric +hor ror +cra sh +e ye +pat h +sou thern +emplo ye +ge o +t an +ha z +r ally +ðŁı » +proper ty +was n +enjo yed +gre y +g as +bre w +nor thern +hol ding +g p +ta ke +ch art +ly n +dr ama +z o +pa id +throw back +cu p +discu ssion +down town +w ill +le w +b is +t ary +bre ad +up on +r ate +teach ers +it ation +anc ed +cy cle +choo se +d c +ir an +co w +da ve +ra ise +prin cess +fa ith +- > +indu stri +sp ain +guit ar +fac ts +m n +sp en +cour te +go tt +projec ts +au di +o sc +pe ter +s and +intere st +happ iness +ven ue +sol di +surpri se +poten tial +per io +custom er +i i +g ni +manu fac +e co +bro ken +sing er +vel s +wal es +hu s +in j +f our +tal ent +d ying +mat the +fil m +jo ining +s ell +j ar +lma o +sur ger +bb c +sour ces +au stin +ni k +char les +f am +prin ci +ange l +cas h +lo t +o red +pla ys +pl ate +don e +memor y +br ings +n ba +solu tions +teach ing +gr ace +cir cu +hel ps +foun der +mar y +expl ore +de cor +par ts +ch o +inte gr +ha u +is es +pu tting +in er +r it +v y +mic hel +blu es +every day +for ms +bi o +ye ar +p in +t ter +spr ing +) ) +po t +al ing +perform ing +sh an +plan et +mus ical +head s +it alian +stru gg +âĢį âĻ +w ings +pu mp +h h +tr ou +a id +pri me +ear th +pa int +mon t +am y +bb c +fab ulous +fru it +andro id +bour ne +cere mony +enti al +? ? +deb ate +on ing +dra ft +sol ar +t x +j am +cor n +!! !!! +bro o +mil k +po sed +o hi +mo vement +b ren +part ner +p g +et te +ar ies +sh out +n g +leav ing +t ells +sen s +ta ste +kel ly +wor l +gy m +ric h +e gy +pi d +ma s +â Ĥ +courte sy +fran k +incre ase +wr itten +pp ers +re l +ha i +s as +s ound +tt i +w ich +ri ver +.. ." +a g +fel low +ro me +sm all +gen cy +ic an +lux ury +pro of +me t +wild life +mom ents +ra ther +cor ner +com pe +canadi an +lik ely +therap y +li am +econom ic +indi e +rou te +fi ght +ho pe +se tting +ant ly +cro ss +fant asy +de e +sket ch +comp li +ym i +ru les +engine ering +fig ure +ro w +. , +f w +syd ney +w ou +t ation +dre w +us es +the re +sp read +struc ture +pat rick +appa rently +ro s +h ills +w we +ann y +com mission +di v +f ying +con sul +anal ysis +ex i +ten nis +vehic le +ðŁĺŃ ðŁĺŃ +as s +high ly +op ened +b ann +ðŁĴ Ļ +mp h +wi shing +v or +fi f +give away +r r +ra y +je ss +g at +ic ymi +x it +high est +yor k +pi e +invol ved +high er +ri e +mal ay +int elli +desp ite +che e +sar ah +be an +reco gni +ar sen +tal ented +pas sion +ic h +ab c +lead s +dise ase +v is +se c +pre senting +m illi +hol e +sho ts +de part +surger y +gov t +b in +du al +e vi +lon ger +ev ol +scre en +portra it +et c +lo se +ch at +p en +p i +om a +s ick +er c +compan ies +en try +plan e +gr y +ven e +liver pool +premi ere +sha red +a red +fil ms +ir a +holi days +cric ket +ici an +v ing +. ) +ul timate +di vision +con duc +se pt +for ces +mon t +s mart +disa pp +sun shine +in d +b less +ma de +col ors +fran k +ir on +bott le +s go +m ood +j ason +er ic +bir th +te en +respon se +tar get +state ment +fe ar +th el +al um +ar ab +bl in +direc tion +ste ps +er ial +wor ked +at l +ðŁĴ ķ +fel t +pol i +scen es +hom es +b ell +e at +ate ful +t in +l ace +fol ks +p se +an n +wis dom +fa v +but ter +s r +are as +sm oo +bi z +dg es +app o +mo re +the m +effe ct +windo ws +sun ny +cap ital +tot ally +c ities +gr ant +mb ers +s low +au tu +il ities +w ro +ri sing +st ics +viol ence +i gh +qu ot +h it +t c +herit age +bu ff +ne s +z ar +den tial +ex ac +ed ge +de ep +aren a +be came +benef its +mar ks +mb er +a z +am es +pre ci +dra gon +re g +d ings +do s +ðŁĴ ª +n el +s ity +me al +di st +leg end +pur chase +pic al +st ick +f at +du ba +profe ss +car to +pro f +coun tries +respon si +se qu +fa b +tribu te +hon ored +prac tic +pur ple +an ton +pa red +t ough +summ er +environ ment +s ons +ðŁĻ ı +m ps +gi es +her oes +t elling +hen ry +f en +know ledge +Ģ ï¸ı +f r +ne g +u re +ac king +hear ts +s oo +hol lywood +ju mp +sau ce +schedu le +tur n +yo ga +cre ating +c ket +cre ek +â Ń +custom ers +ma dri +gu l +asse mb +moun t +c ell +to p +st al +dav is +t wi +sig n +premi er +iti ons +he aring +un k +pati ents +app ear +heav en +al ty +doc tor +a e +plat form +je ff +ðŁĵ · +regi onal +bi d +box ing +ex ten +or ity +a w +w ise +il le +sever al +bi e +s itu +sy ria +âľ ħ +remin der +enter tain +li on +part ners +in n +ph ar +f au +pl s +expe cted +sug ar +deci sion +s b +ch ron +associ ation +leav es +vis ited +sh ap +ðŁĴ ĸ +fur ther +h ann +w i +run s +l er +fun ding +fil led +.. .... +tin y +han g +or g +co ol +se min +ðŁı Ĩ +spon s +nav y +sa int +dru g +d al +r oun +co vered +tra ditional +invest ment +de te +al ism +f low +n is +sun rise +fe at +f ted +we ird +je re +ve gan +medic ine +an o +ac cu +deli very +temp le +chang ing +wil son +phili pp +re fe +n d +is er +g ay +r and +ati ves +t ely +p and +intelli g +g are +am bas +de mon +commit tee +strate gy +refu ge +bud get +prote c +pi er +ex press +nom in +econom y +al low +ic on +gal ax +o h +indi vi +dem and +vir gin +lu ke +ali sts +man i +s mi +ju dge +ent y +mic hi +resul t +am ed +spe aks +' , +hou ston +sh in +b ing +fl y +ch em +au to +v as +ge t +ar m +thank s +d in +gan g +x x +si on +loc ated +p l +jo sh +in fo +jo ins +adver ti +ot d +el d +si e +re asons +v ent +ðŁĩºðŁĩ ¸ +â ł +convers ation +stu di +ðŁĶ¥ ðŁĶ¥ +go s +s ounds +un it +mu sc +ge l +ack ed +pac i +co s +de re +u u +a o +la m +inspir ing +ar ms +tw are +mat ters +ad dic +du de +ex t +cri sis +b ath +me et +sing h +expe ct +del hi +resc ue +wor st +au g +shi pping +ser ving +st o +dar k +ac es +histor ic +landsc ape +desig ner +b illion +gr ateful +wa ke +e ve +m iller +hou sing +dy nam +is co +be ha +sh op +pr ou +e as +a sia +e ding +k on +depart ment +aw ar +mar ine +in ci +photograph er +ta pe +lo go +r ings +d it +-- -- +vin yl +w c +vo ting +se ven +ambas sad +dal las +t u +com ment +k ra +b les +w ag +u d +au dio +stri ke +offici al +o ts +me tho +to ols +ra di +al an +hun t +wat ched +a ke +fa ke +drin king +mer ry +m l +b day +ri o +ni ke +c ant +re pe +co stu +mur der +ak ers +ch ers +ou ts +beg inning +so s +ad es +n in +not es +wro te +sol o +c i +li ghting +ur ban +bre xit +att end +shir ts +pla yo +ac tress +pl ic +stand ard +quot es +par ade +anci ent + © +tur ing +re e +pri mary +fla sh +citi z +mat es +ste in +z i +clin ton +sk in +gen e +hu m +g ar +t le +y i +fo cu +de an +pl ants +cy ber +b u +om e +ho p +ad dress +ti x +gi fts +relation ship +sub scri +fe ed +exac tly +haw ks +ex o +stre ss +s n +arre sted +an e +sof tware +z ero +the me +mu mb +im migr +mi a +make up +ple asure +uni vers +har b +eng ine +ap er +r in +br a +institu te +le ather +al th +sing ing +co s +gh ty +me as +st ic +si de +insur ance +co t +pit ch +moun tains +cri min +su pre +valent ine +at er +wou ldn +sc ale +rel ated +re gar +star tup +pack ed +mi ke +week ly +p ts +coun t +ha r +gott en +min d +ber lin +con ditions +swit ch +cor n +sa ve +g li +emer gency +tun ed +sto ck +discu ssing +every body +s day +whe ther +wrest ling +ec es +gen der +ch en +ðŁij Ģ +madri d +mar athon +e gg +i er +th x +as king +kore a +wol f +ay a +g m +g au +at ory +v r +gra ss +k illing +b ble +ur o +un i +e th +sh ore +th en +re ale +bot tom +ex erc +k ar +or ies +ad ri +san ds +se x +. ' +volunte ers +per form +par liam +inclu de +deli ghted +execu tive +fu el +kis s +ã ħ +char ge +h u +ca kes +ve t +g lu +agre e +pr ices +n au +h l +g ru +ra j +streng th +b ic +sp ending +al es +av en +b last +: ( +yo f +nor mal +si x +qu ick +se a +d aw +mee ts +lo vers +upd ated +po tat +comple ted +coo k +opportun ities +p ure +organ ic +tem per +c am +avo id +par king +duba i +and o +di stri +to y +comple tely +don ald +tri al +bas s +b oun +back ground +v as +mar vel +lu m +ru s +t ool +com missi +throw back +fin ding +is lam +! ? +st op +e vil +or al +resi dents +i denti +o ak +ðŁİ ¶ +l il +span ish +chap ter +sto pped +direc t +ho sted +pic ked +lab our +lew is +defen se +à ® +health care +wh is +mat h +pe ak +ra ised +fi x +bu ll +th ir +chel sea +fol k +tr e +can di +pau l +ei ther +ad am +poe try +jewel ry +ðŁ ¦ +pr ay +Ø § +g c +o z +wi shes +fore ign +sun g +lear ned +en e +n ing +micha el +illu stration +legend ary +w av +b au +ðŁļ ¨ +cal end +stre ets +â Ĩ +mon ster +bu ck +g r +scho ol +ba th +wa ste +ne ck +ha wa +be ach +re plac +jec t +on er +fac tory +coun t +ðŁĵ ¸ +mor gan +der ing +se an +steph en +de p +no vel +vide os +ic al +press ure +arsen al +ex pre +ir s +tren ding +ss a +fla sh +re sear +thr ough +profess or +scul p +to s +gg ed +mm a +be e +a pe +hun ter +am i +he i +pla stic +bu cks +uni verse +le gen +niger ia +ple ased +ri s +thin ks +autu mn +i ds +d is +anth ony +ðŁı ½ +ak ed +gla sses +fin ance +z er +k as +con tract +nu mbers +sh aw +partner ship +t il +laun ched +s al +victor ia +theat er +usu al +nam es +perio d +eli za +i th +bar cel +ro cks +bag s +mat e +distri bu +j on +di ffic +ali zed +cur ren +sco red +b ha +du blin +ro se +in ted +soli d +beha vi +wal ker +simp ly +garden s +head ed +in i +ohi o +we ap +f o +gl en +e state +ran dom +th under +thr u +k ill +jac ket +it i +entertain ment +thanks giving +ent al +en coura +el o +a ther +tan k +high lights +f ting +ru le +model s +bor der +bj p +hus band +in done +ken ya +be ars +al o +n inten +pi x +str o +or ders +sal ad +ro ads +n or +l ation +sop hi +ðŁı ¼ +pi eces +b one +min s +inclu des +nu tr +phi l +s ent +fun dra +ga in +bor ough +n ad +mon day +activ ity +it ems +be coming +ken ne +de tro +car di +gue sts +u x +world wide +sever e +new s +thank ful +fic tion +ve ge +m all +si an +er al +inj ury +le e +men u +danc ing +scot ti +exam ple +( # +na i +studi os +ba i +ðŁĴ Ľ +j av +diam ond +vin ce +ric k +prote ction +lin col +cham ps +appro ach +d ar +m ile +clou ds +je ff +in fin +l ers +p les +pe ace +go p +âĻ ¡ +tech n +str a +a verage +ef fort +introduc ing +di versity +austr alian +am p +boo st +s ke +pati ent +appreci ate +ici ans +pu r +f ell +woo ds +illu str +ðŁ ĸ +ag ency +ac tions +brit ain +under way +se attle +el and +ag o +f ill +stre aming +pro test +challeng es +ky o +et sy +coo king +exper t +ru ss +rain bow +commer cial +sp in +be ats +c ry +val u +el i +th row +gr ams +le vels +michi gan +c ad +ador able +const itu +w s +pu b +mid night +th at +net fli +braz il +die go +regu lar +jo y +âĤ ¬ +li qu +ea stern +k ni +fl at +n p +bro wn +w er +se y +tt ers +ac ting +v anc +cy cling +program me +ra w +comple x +tat too +throwback thursday +se ssions +ro oms +si ght +speci es +bom b +lau gh +ke eps +mo on +offic ers +con ver +t r +ha sh +t ack +ri ous +ad ap +a j +reco gn +ex po +sug ge +confir med +rol ling +dre ssing +ic t +fri day +ph ones +ri dge +con cept +ro y +ke ys +ef for +c ate +k ne +ev en +l ay +commun ities +mo d +n az +every where +al ab +bit coin +ban ks +out door +feder al +sto res +h p +c al +m ely +sig nific +be ar +re public +clo ser +al lah +pic k +x d +pal ace +ch ill +b am +er ous +un a +al len +out standing +olym pic +supp ly +fi gu +v au +l p +char lie +un es +> >> +legen ds +ici al +co ast +benef it +mul ti +f its +far mers +am ount +si sters +har ve +hon ey +que en +b ers +pl ann +âŃ IJ +m u +barcel ona +al ber +stat us +re main +ex tra +c andy +vi ous +âľ Į +o v +warri ors +-- > +ju mp +am ar +x mas +stu dies +i ors +k or +don ate +pre p +fi sh +im a +pain ted +ad mini +co splay +spor ts +dro ps +fi ghter +evi dence +ðŁĴ ª +la ke +ro b +cine ma +pro file +à ± +stan ds +leg acy +sh ape +ro of +ci vil +i ans +sy l +sh am +vo ted +re tail +ph illi +li sted +du ty +n b +th es +f are +au ction +ffici al +stor ms +d p +l oun +sh ops +al y +ani me +multi ple +ðŁĺį ðŁĺį +psy cho +je an +ap art +candi date +gg y +con f +jose ph +w ick +me at +fr ame +c l +for got +ph y +f ing +li ed +re p +se ed +f all +u fc +nu t +lin d +mo de +fiel ds +en ce +s ley +ðŁ¤ Ķ +ch ill +follow ed +announ ces +cor ru +tro phy +them selves +ac le +al du +k ong +l on +s v +bro ke +ander son +ta i +stor y +tempor ary +activ ities +k ati +ari z +cry stal +spo ke +extre mely +tra ding +ðŁĴ ļ +à ¼ +in ch +ed in +out fit +equ ip +ma di +form ed +be ef +po p +ti ger +this day +ti red +neigh b +re tro +is a +un t +t as +kan sas +de st +secon ds +ta y +hur ric +o u +galax y +dad dy +bro w +bur ger +en ced +de sk +ac cur +secre tary +el ite +k ab +ch in +touri sm +bud dy +ici de +dre ssed +u d +vac ation +che ers +com for +charac ters +j et +bu ying +l ins +n ap +reale state +li e +af c +i ii +f ame +n r +b at +ag ent +ma kers +âĢ ¼ +sec tor +op ti +le on +di et +pra yer +hi p +mi r +le x +br y +an a +pas sing +w en +reco very +ak i +po pul +res ort +mar ia +stu ck +read s +ti er +perfe c +netfli x +p oo +cham p +o c +re duce +we red +comm ents +cla im +acci dent +s ag +h ack +sal t +kin da +k iller +i os +z y +ex change +lec ture +eng er +ic king +t au +reve als +pri son +z om +gh an +u l +jour nal +i ot +tr in +jon a +govern or +cap e +quar ter +spec tive +impre ssive +bab ies +t x +m ill +o y +har ri +jo int +su e +collabor ation +tren d +revolu tion +re new +alum ni +ge tt +sh ell +sun day +ent u +ni c +donald trump +block chain +paci fic +expla ins +sp y +ad voc +par adi +to f +star ring +p av +fe ed +br ac +smo ke +ham p +y am +to kyo +si mon +d h +e ffici +phys ical +n j +ell i +s low +gradu ate +americ ans +ti fy +f red +ap ore +fin ds +rob in +we t +not ice +se mi +un ve +k om +pil ot +scre ening +da ily +ðŁĴ Ĺ +roy al +sp a +vo tes +n ag +wh ate +att ending +exper im +ad dition +k ate +sto l +m ali +foo t +chri st +ch an +de e +lic en +glo bal +mo ore +ti a +bri gh +myster y +y ay +âĿ¤ï¸ı âĿ¤ï¸ı +cre ati +me chan +clo ck +di c +âĢ Ķ +pp er +al ph +through out +al low +re sources +selec tion +ham il +bb q +aa aa +virgin ia +dis ney +en g +so red +drin ks +f ancy +consi der +end a +jan e +hand made +du l +on tari +i us +s ville +color ado +whate ver +whe el +promis e +ne ver +desig ns +ab ly +sex ual +vanc ou +at i +con vention +cul tural +sing apore +pro mo +load ed +gla sgo +pp l +n oo +ke e +ste m +men tion +i do +cru ise +ri ding +be comes +be y +âļ½ ï¸ı +tw in +dedic ated +na sh +de si +work out +jen ni +i v +grou ps +rela x +pho eni +li ft +mix ed +m ck +p c +mu st +me tro +ci es +y ar +a im +ang er +i e +rec y +marri ed +dro pped +eng ag +le st +ambassad or +op h +de s +w ick +assi stant +nat ur +fa il +l td +shor t +k ap +sha w +bi gger +rema ins +crit ical +sur vey +co verage +er son +win d +n b +bil ly +let es +ac ts +jim my +at lan +al and +t c +import ance +dam age +f g +stor age +tw t +bon d +bal ance +cr ying +pu ppy +vo te +pu sh +ðŁĴ ľ +pol y +me l +lon don +terr ori +effec tive +corpor ate +atl anta +jac o +nas a +gre ek +sen ate +i sh +ev a +intellig ence +effor ts +al co +k un +h all +di ag +claim s +fir st +h b +ba e +v ul +pu ll + ° +se par +spe ed +vic ti +on thisday +audi ence +r ates +te ach +fil ming +bu sh +son g +y um +br un +ra ine +aw a +par ks +ð Ŀ +ra bb +ra ch +ra id +reach ed +ra il +mo ves +selec ted +fr i +ra ising +om y +st ones +su k +franc isco +cas es +cap it +con fu +w tf +po ke +equip ment +gre g +ess ential +off ering +ne x +pi es +be c +cre ation +chair man +cro wn +w al +john ny +shi ft +ne ck +ban g +bir d +ðŁĺ ı +du ck +re serve +de pu +ma sters +over all +no tic +ju ice +sne ak +che er +cla sses +eag les +n ca +car pet +ci vil +coach es +har ris +u ps +b alls +dec or +mar tin +ro s +v ice +announ cement +who se +ti gers +ste red +c ts +dr am +ste el +youn g +inst all +supp o +recor ding +de ck +se ats +l der +ang le +bo t +sty les +elec tions +for tun +n ab +but ter +ari an +ka sh +in ner +ou red +be ast +we i +ic onic +exper ts +ne cess +b eng +jam es +li a +gre ece +ðŁĵ · +ðŁĺ ģ +good bye +m itch +tw ice +mumb ai +ste am +ru sh +med al +ne tt +fashi on +t ar +r s +sav ing +ric ul +l m +sleep ing +brook lyn +mis s +sen ding +disco vered +sp here +of theday +k icks +missi ons +w right +er n +ght ly +i ous +mel bourne +star tu +mo ved +car ry +d ak +ag ues +bel gi +e ma +way ne +do t +er ie +pe l +it unes +matthe w +no body +est ab +cal m +win ds +lu c +prep are +tren ds +exerc ise +adv ant +ðŁĴ ¯ +athle tics +app s +c tions +adv ance +laun ches +litt le +real donaldtrump +eliza beth +carol ina +hu b +hi dden +n w +us er +pol l +great er +mo st +f ed +p at +life style +s ati +sco res +marri age +l r +aven ue +de serve +ri f +ðŁ Ĺ +wat ch +champion ships +gr ay +en ni +cot ton +g om +whe re +pack age +su m +ab solu +new ly +foo ds +ty ler +assemb ly +musli m +ban k +re memb +op tions +produc er +land o +fun ds +u pper +shad ow +pro gre +co p +ing e +leg s +detro it +hill ary +jo se +gi ants +sou p +sustain able +t us +clo thes +roc king +n z +min ne +mat eri +bru ce +ear t +ca sting +independ ent +thou sands +ta h +de cl +veter ans +li ons +wra p +âĢ ¦ +de ss +bl ing +st ine +e ggs +o on +clo sing +z ay +at t +bac on +fa il +ariz ona +de pre +gho st +new sp +w ers +vi p +li ked +id ent +volunte er +ad ult +pu pp +cir cle +mat erial +degre e +gro wn +boo m +calend ar +su r +vie wing +ath letes +ch and +re ll +asi an +en tr +vol ley +victi ms +bo dy +m ama +trans fer +ge ek +in dic +sav ed +ma i +g ent +it s +loun ge +k ol +the ory +situ ation +is lands +ar th +z oo +floo d +vi ously +show ed +parliam ent +ch ev +el ine +at trac +ab ad +ta il +h rs +lu s +por tu +gor y +provi des +to ys +de ath +in fe +an ce +g le +li am +lo ver +hu d +dv d +reve aled +g w +re ment +ca the +l ying +ra dio +der by +stor s +che mi +hosp it +âľ ¨ +' : +ilo ve +le mon +re public +s ni +ne ss +do or +re action +pre gn +fla v +schol ar +spo tify +is ation +vis ual +aw are +spon sored +jo ke +less ons +leg is +lo ck +si mil +ðŁĺ ĭ +kin d +la y +ma h +ho ping +vancou ver +as er +clean ing +gal a +thre at +la p +ach e +ro mance +ex pen +re post +z am +e pi +mir ror +o ak +ad ul +bat man +s lu +l c +vie wed +re views +d ates +indone sia +acti vi +off en +lea f +i si +ag ricul +costu me +s ites +spir itu +appear ance +ir y +st air +applic ation +spec tac +ic ity +ski es +hand le +pun k +paradi se +t n +de al +provi ding +do c +recei ving +bre w +micro soft +à ¶ +fer r +me tro +th ail +y um +car ter +à ¡ +gent le +bre aks +coo per +show case +cu tting +egy pt +bab y +semin ar +gl ori +ss on +fa ve +re hear +lo tte +la dy +al as +pre p +deli vered +nu clear +ir o +engag ement +at ta +con ven +z an +gl ory +hol ds +busine sses +str ange +sch e +it self +gra d +mar kets +f alling +st ats +ge on +bu dd +li s +she et +thi si +co lo +deser t +regi stration +ig n +expla in +inter ior +la ws +writ ers +spr ings +k r +fri ed +blo om +inf ra +a o +cre d +pa st +line up +bo o +bre a +boo ts +celebr ity +att acks +bro ok +ev es +ex cu +cher ry +oo p +fas cin +boy friend +se as +n ine +effec ts +po wered +k ha +ðŁĺ Ģ +sh out +con dition +i j +her o +enter pri +win ter +applic ations +sho e +g el +batt le +pro grams +w art +ðŁĴ ¥ +ra p +ho l +dang erous +di a +coun ter +ric s +i or +k night +co at +emo tional +at ures +d as +whe el +fore cast +tran sport +glasgo w +king dom +prepar ing +im medi +ff in +awar ded +prin ting +ro man +fight ers +any more +bel t +p ine +win e +x i +employe es +logi es +al led +de mo +birth day +ange les +lo g +dri vers +neck lace +k ath +s it +athle te +ef s +s burg +pur pose +resi stance +rele ases +t is +vari ous +deli ver +ch al +s anc +opp o +cra w +neu ro +dr a +suppor ters +sna p +diffic ult +swe ar +logi st +pa th +attemp t +à ¥ +swim ming +ste ve +hur t +inclu ded +b ap +wa re +ðŁĴ ĭ +end ers +ja ke +le eds +cli mb +l b +im ple +li sa +clo thing +ðŁĺ İ +d t +com pla +sw ing +stra w +v als +k le +us ers +stor m +cu ts +ontari o +p an +hand some +i ow +ar gu +chec king +scotti sh +Ķ ï¸ı +si er +em ma +po d +patter n +de sh +en h +ed ward +t ing +k h +hal f +lincol n +mo ther +al leg +r c +volley ball +d n +g ay +all y +le ton +gro ve +l oud +adv anced +re spec +cli ent +supre me +thail and +ho w +gi g +to i +do t +dol lar +ðŁij ĩ +p it +r b +h n +produc ed +gg ers +âĨ Ĵ +ml b +can vas +fin eart +us d +in the +p son +actu al +s l +t b +ip ad +en sure +u mb +w d +sk a +mar s +k end +f eli +th ing +count down +absolu te +r out +dra l +p y +inju red +min t +hun ting +mm er +s age +li gh +ac ity +ex pan +mur ray +ar o +sec ure +four th +eag le +reli ef +st akes +industri al +clar k +under standing +see m +pl enty +sil ver +cla u +thre at +sa il +pro duce +ab str +is is +b r +eng ers +wor ry +bie ber +s j +just in +reali ze +ky le +esp n +fil ter +s ch +ty pes +game dev +d ing +twit ter +soldi ers +p om +car bon +y ards +child hood +ri ed +ke l +ele ph +t ons +key note +qui et +wi re +po sting +is sa +repre senting +bac ks +alex ander +celebr ates +ta ining +| | +ch or +esc ape +pe ek +ti ves +fiel d +ssi e +im pac +spons or +r c +we dd +cann ab +si des +trac ks +com par +con trac +techn ical +bi ble +expl oring +sh are +tra v +n ate +ill o +sc ru +m ingham +gun s +of the +sh ame +se es +ca tho +ac cess +ce l +repor ted + » +mari o +p ad +hope fully +ou se +y on +disapp o +ol o +p itt +pa c +ga p +cru sh +s g +k le +ge m +emp ire +dir ty +a is +avi ation +ze aland +fac ing +high way +d anny +spi der +ot ta +ðŁĺ Ħ +w y +col ours +in fl +co sts +olym pics +au s +h m +ho ward +pas ses +lau ren +mu sh +op in +r ho +disc ount +oper ation +em ily +mm m +cham ber +d il +to yo +shi p +sam u +pic tured +un ic +po l +keep er +carto on +st en +ig nor +n ations +n l +ta sting +deta il +offici als +mo tor +franc is +ed itor +ðŁij ĩ +pe ts +rang ers +t g +r n +w ri +nic hol +i se +spo ts +ani e +chec k +tri ple +ku mar +spe akers +ic ing +pre pared +ab use +friend ship +mon th +swi m +air e +sc ent +hamil ton +indi an +j es +yum my +te ars +da wn +i zed +worl ds +ðŁ ķ +b illi +st one +n hs +ba sic +p or +st le +ir on +ol der +cle vel +e ing +ðŁĺįðŁĺį ðŁĺį +prin ts +fir m +air craft +fin est +devel op +aar on +t z +gra ham +own ers +fo li +less on +qu es +bab e +cra ft +ph en +ju n +bir mingham +v ine +ll er +i an +fineart america +evol u +st ab +im per +war d +com ic +wi z +inv ited +du ke +mat ch +por ts +ro ger +diag no +ke pt +te st +vis u +r hy +so c +to x +b aker +sur face +co vers +man s +b its +x box +ff le +n an +gar d +h art +wat ers +v illa +re tro +light ning +catho lic +democr acy +neigh bor +pen n +cr an +jona than +la ura +vi bes +su b +coach ing +clear ly +uk raine +bra ve +commit ment +t all +mar t +ra p +mo di +sco tt +bro s +show er +ðŁı ¾ +âĺº ï¸ı +cou sin +appro ach +br e +com pos +hil ari +phil ly +g ad +quick ly +ri an +t m +vir tual +hou ses +k t +phoeni x +w ire +ff y +b unch +anc ing +tal e +snap chat +star ter +h t +k icking +ap art +th y +) ! +blo gger +it z +com fort +ang els +w ash +" : +ar gent +re quest +hon est +mi ghty +bo bby +k g +ro l +thou se +ex po +h c +tab les +mag ical +po sts +de m +n w +or lando +ab er +* ** +ðŁĺ ľ +environ mental +trans formation +mi le +w ic +hir ing +ma ine +bo ar +r ying +ti s +nit ure +twee ted +anton io +opin ion +fin ale +di y +f is +th in +trou ble +le go +fi les +qu art +sp a +curren cy +cli mate +fan art +rail way +sp ace +ban ds +dani el +mo tion +l eng +hol der +oc cu +mar ie +cathe dral +bu zz +bi es +nas car +bm w +bat tery +char lotte +doc tor +zz le +se ven +in san +d dy +st en +lab or +thr illed +se ren +docu mentary +wav es +cer tain +can did +allow ed +ninten do +star wars +ta p +home made +d les +ther ing +bre e +emp ty +pi ano +pos iti +coun try +por k +pu ts +per ry +m atic +spot light +ti st +or ities +we alth +c p +bar bar +commit ted +as sau +pro fit +e ight +hu l +fini shing +run ner +ss o +insp ec +char ged +christ op +lo sing +co al +ho o +ele v +de le +mo ham +don ation +c able +clin ic +j in +manag ed +ter ing +â ¬ +ur ban +depu ty +bb er +bur n +acade mic +o tt +sta ke +it er +sto wn +ack er +advent ures +ad ams +gre g +pro m +vo l +ac qu +con gre +pa int +citiz ens +c all +af ford +v c +as ks +the tic +independ ence +â Ľ +h itting +bl on +fu ture +â ı +in no +gen e +bo ards +di stance +se t +re mem +th al +pre vent +l ang +ob jec +su sp +mat t +in duc +bor o +pi one +re di +vir tu +prin ted +sco pe +shar k +suc ce +a stron +il legal +j ag +c ting +ine e +at o +rob in +nutr ition +b f +du tch +b n +fur niture +for gotten +at ar +ru p +hy per +bran ch +communic ation +degre es +on ia +un cle +promo te +or che +wi i +j s +but ton +ma jor +c bs +bri stol +premi um +ordin ary +e dit +m g +we ed +st even +: ' +gu s +te s +cap tured +dru gs +do w +wr ites +bi shop +whe els +ali zation +disco very +w r +rach el +ne il +hy dr +cu test +entreprene ur +kore an +ore gon +ul ty +perfec tly +suppor ted +histor ical +t wins +ell y +we l +de vil +in come +scienti sts +de leg +h en +on i +ic ed +gi o +cur ry +reve al +e g +buff alo +n ol +op era +camer on +haha haha +j ab +gradu ation +cra ig +r al +i f +organi zation +le ge +g ang +su d +edin burgh +l ack +fli es +g ate +thr ones +q b +the real +e leg +pp in +c les +jam ie +tn am +cryp to +ou l +p ages +a se +roo ts +stu pid +a did +boo t +prote in +s ap +si um +su s +end or +fun ction +don t +en na +ch y +squ e +wor ker +m tv +e a +k an +ðŁĴ ļ +mu s +professi on +t to +oper ations +al lo +c tor +inv ite +sc and +ou th +z im +lin ks +cli ents +sam sung +discu sses +n ell +ul tra +some where +ste wart +ine t +de z +b out +fac tor +ti an +tr ans +jere my +d b +ðŁĩ ¬ +or n +develop ing +spo l +coo per +ma u +rememb ering +tre k +famil y +sen iors +fo ster +att ended +w ing +trans form +ele mentary +hor iz +li sting +malay sia +it ch +warri or +philipp ines +russ ell +m end +initi ative +cre ep +to ps +br iti +a ur +shar p +adverti sing +ug ly +achi ev +materi als +bu g +dev ice +bon us +fac ility +col e +nh l +y as +plann ed +pol e +excell ence +tr ick +con fl +r p +achi eve +lo an +swa g +jess ica +ho we +p our +sc u +z oo +r ated +dre sses +re bel +mex ican +co ordin +me ss +atlan tic +t l +osc ar +wal ks +phar mac +investig ation +... # +cc i +eas ily +monday motivation +y ment +au ti +for ced +ar med +colle agues +pap ers +pro per +sha ke +bu c +le an +exhi bit +e vement +co tt +bi z +sp er +k ent +sw an +/ @ +girl friend +haw k +âĺ Ģï¸ı +mon o +ðŁĴ Ľ +stat ue +ðŁĺ ³ +ra s +te eth +preci ous +t ile +p am +swi ft +v ali +no se +dr unk +experi ences +come back +gen ius +wor se +sh ef +ra d +ed it +hon our +au spol +lar ry +h ire +gor don +achi evement +.... .... +su icide +alter native +su p +sur roun +sha ke +ke ith +pe pper +tur k +crimin al +be ck +su m +w alls +cn n +an tic +of fe +col li +win es +high light +hawa ii +emb ar +l fc +ðŁĩ ® +m v +> > +at mo +wor d +car l +shout out +bre wing +ì Ŀ +do f +s ic +hot test +col on +hh h +shu t +low ing +volu me +apart ment +agre ement +de stro +we e +religi ous +iow a +ro d +land ing +re present +ðŁĵ· : +la s +usu ally +h l +c ac +sal v +al ong +laugh ing +be ans +remin ds +pha se +some body +ma sk +ran ked +dest roy +sc i +âĢ¼ ï¸ı +gab ri +le o +ro a +fa iled +si l +refuge es +re vi +r ing +ber ries +coo kies +y y +conserv ation +sh ab +human s +de termin +a in +ni all +as su +mb a +fro m +extre me +vic es +commer ce +ght ful +or dered +suppor ts +re cap +v or +dro pping +correc t +pay ing +mean ing +n j +qui z +" # +busine ss +ðŁĩ® ðŁĩ +indi gen +du st +box es +bl ind +x xx +zz y +ðŁĩ¬ ðŁĩ +ss els +s ant +dd le +hilari ous +desig n +wonder ing +vehic les +k re +ju d +rece ption +par ker +Ã Ń +pri vi +hy dro +sof tball +pol lu +lo cked +ba h +e ar +scri pt +di vi +br ace +geor ge +the ast +bel o +j al +tion ary +dent al +roc ket +pur ch +sh ak +manufac turing +e z +it is +con cep +tb all +ch s +direc ted +pra yers +oo k +phil os +vari ety +che ss +ser ver +g and +bal ti +ðŁĵ ¸ +sel y +cru z +spectac ular +bur ning +re present +i z +t one +mer ce +h ell +bed room +estab li +bo l +com mon +ãĥ » +ab or +kit ty +hei ghts +re pair +willi am +qu ake +alab ama +popul ation +re v +re tt +i sts +n ite +le m +a ha +clevel and +r m +po ver +ob se +mon tre +man ia + ® +con ne +car ni +sh ah +f y +u a +sc or +strugg le +bo b +' ' +appro pri +deci de +ff ed +ca ster +s ort +hun gry +dra g +ا Ù +gr ounds +d w +sli ghtly +car din +dead line +bron ze +web in +bar ry +sil ence +e uro +op tion +ear n +ðŁĴ ĸ +howe ver +na ren +na ils +bath room +v ine +ph d +min ing +gar age +( ) +shou lder +defe at +di r +o v +liber ty +ple as +x on +com pre +a v +j in +ab les +sil ent +fam ili +vis its +di pl +ha bit +milli ons +regar ding +innov ative +sen ator +r ts +v on +k l +wh il +requi red +âĿ Ħ +lu v +presi dential +po cket +hun dre +sho wn +fro zen +to ward +fa st +confi dence +r ough +indivi dual +qu et +ðŁı ½ +dom e +fi fa +engine er +z en +re mix +ðŁĺ ĥ +pl ant +min or +robin son +as y +pul led +cer tain +potat o +( : +pre s +oc ca +w it +it em +si e +d ating +thom pson +own ed +an u +vi e +te dly +good night +ex cept +ðŁĮ Ł +ira q +ki e +ren ces +li p +simil ar +sau di +vi g +arth ur +pic ks +mil an +hon da +ma xi +o g +ste st +ar ch +analy tics +ba sti +pear l +ter ry +hor se +ast ro +ac ce +laun ching +inter national +s no +ta sty +den ver +ir l +pe te +tor n +advant age +var sity +" " +sol e +g c +lan g +demon str +ol ds +un ity +ne ts +insp ire +cre te +nash ville +nel son +e ter +wal k +hy un +m ack +tre as +see king +ra ge +bru sh +ab and +whil st +co con +h ong +shel ter +i p +possi bly +so o +it ed +â Ħ +rac es +war ming +qu in +tele vision +mat ches +ra pi +ment al +pal m +jenni fer +rol ls +indi ana +b ars +cat ching +resc u +candid ates +fa re +âł Ģ +se o +vie tnam +alph a +michel le +visi ble +re gre +wn ed +app le +li p +f fe +li z +york shire +ha il +se asons +be gan +m d +k c +la p +fascin ating +hel p +ur y +u ms +nu ts +se m +along side +bri dge +ori al +o ve +world cup +briti sh +comfor table +i ve +hot els +fair s +hor ri +so x +d ining +stre am +bar ri +ss y +w im +ter ms +v u +pe re +l ens +wal ked +r or +l ars +shi eld +dou bt +pro to +cro ssing +me ant +medi um +ad ding +e b +che ap +fun c +pap er +bran ds +ry an +feed back +col lins +un known +tro pical +sand wich +fal len +for mu +selec t +lo ads +answ ers +or i +mag a +d or +du o +ali e +dru m +ur i +de er +sou l +sh ut +âĺ º +sto len +don ated +bu zz +patri ots +ha l +na sty +nomin ated +mon te +ki a +th ri +ing u +te sts +pe tro +ðŁij ij +ho sts +ne st +to pic +pat ch +m my +hu gh +ab ilities +ma the +s miles +g b +ag enda +insi ghts +chi p +ph an +fail ure +dg ers +ha i +signific ant +sho ck +ru ral +gl am +figu res +pot us +o ta +mini stry +appe ars +fe ar +r h +americ an +h att +son y +fi res +e di +n ou +e qui +wh en +univers al +mad ness +i x +sculp ture +b ach +t to +swe den +et a +en to +develop ed +month ly +ma ps +ra h +le d +del ta +sa ints +is lam +ben ch +fif th +v ard +so cks +wel coming +j e +tur ner +v b +ad i +nor way +ad y +hurric ane +por sche +tra dition +ex am +newsp aper +lu ci +a ver +ide al +d na +madi son +ðŁ § +wit ness +ac ou +insi ght +si mon +robo t +sna ke +n bc +ac o +ro ss +sh ment +religi on +ch ann +in su +camp bell +inst alled +we ather +hor ses +ol i +rober t +k az +ðŁı Ģ +veter an +th read +quar ter +ea sier +cap ture +hi pho +law rence +roman tic +pas sion +cl ay +ox ford +th ai +stu dying +fi a +elec ted +most ly +c b +tu mb +âĢįâĻ Ĥ +x l +sh an +fa ster +ev ans +sli de +sh ri +see k +mi es +chemi stry +pump kin +tu m +, , +ro om +fi red +li ps +pres ence +af f +brew ery +arri ve +sw ag +photo graph +pen gu +chi ps +at tor +val ues +accur ate +con temporary +princi pal +cannab is +ari o +any where +gi a +democr ats +buil dings +li ved +ap s +neg ative +m are +bal lo +li on +diam on +loo k +re form +tom my +il la +tre ats +hundre ds +port land +wor thy +ex cep +ar ia +ido l +be er +cd n +y u +aw k +ðŁĩ ¨ +c ells +à ³ +ident ity +dra wn +de vil +f inger +th am +ðŁij Ĭ +ear ned +fin tech +dol ph +twee ting +evolu tion +ðŁĵ į +est im +m vp +n one +ðŁĩºðŁĩ ¸ +toyo ta +au x +mar in +b old +l bs +ste ak +mur phy +it able +lou is +sol ve +pi a +sk ir +ill ino +webin ar +ban ana +lo v +th on +vo ters +afford able +defe ated +lm fa +air lines +super b +any way +deb t +bo red +ver si +me tal +responsi ble +m k +s se +f ay +cau sed +f p +recomm end +pla za +spor ting +alli ance +au stri +n n +t ours +surpri sed +arti f +th under +sur ve +wor e +bri ef +necess ary +z ie +ash ley +dra ke +r t +kni fe +im mun +char ges +a the +bri de +rep ly +g av +broad cast +pu er +brace let +cap acity +harve st +id k +perfor man +d ding +il ers +par a +jam a +pro vince +ch in +id ers +har i +te aser +ch en +re stor +r at +fl at +col om +ðŁĴ ŀ +ðŁĩ¨ ðŁĩ +smoo th +r t +p itch +stay ing +isra eli +t cot +per spective +do ck +open er +lo vel +x o +class room +l ington +go al +kenne dy +sh am +sp aces +mitch ell +home coming +uk i +claim ed +recru it +ing o +mu fc +mon it +g roo +resi dent +per cent +per man +otta wa +int ment +an xi +stand ards +wor ship +sche me +f x +pot ter +bi an +athle tic +af gh +s se +sat ell +par ties +âĿ¤ âĿ¤ +infra structure +rela x +mo du +wor n +smo king +y ach +practic es +wc w +am b +dome stic +tay lor +k entu +provi ded +mo di +ve g +" ... +ob serv +ðŁĺ © +be ard +m our +an gry +ðŁĺ ± +startu ps +woo den +di ve +na il +anti que +ro ses +torn ado +m at +^ ^ +su spect +far m +de vices +me ga +tu l +scholar ship +ge e +disa ster +arri val +po in +mar c +kati e +bb ed +fal se +deser ves +ric hard +ju ana +fre y +tion ed +hy bri +r w +sar ah +ach i +c ure +o le +mor ris +ch ic +broad way +la bel +pa k +pover ty +gol f +e red +f u +er ies +be es +alo gue +st el +wire less +je wish +ti de +blo cked +life time +b har +sp lit +am ster +th i +jo shu +br unch +ha ps +s for +oo ps +ka poor +hi king +suppo sed +ro of +re as +tra in +ti ght +tru mp +bas ically +r r +ea red +see ds +entr ance +c p +wi e +son ic +vic tim +he re +e h +ear rings +sal mon +arc tic +an ne +dou gla +corru ption +hann ah +ha sn +vo ices +con ce +att a +fle et +clin ical +democr atic +ton y +st ood +le f +twit ch +a il +honest ly +incre ased +dro me +don na +accep ted +visit ors +ap ar +ad or +p ar +jer ry +ra i +brand on +ab u +!! !!!! +me me +in gh +glori ous +b hu +pu mp +j ol +li ke +fi sher +ma z +ag an +destin ation +play list +le tters +gen u +br ace +celebr ated +bann er +r he +dra gon +ðŁĺ ħ +sig nature +gre y +âľ Ķï¸ı +al ice +be red +ph er +ber n +ca th +ga thering +sc oring +influ ence +sm iling +de pt +lo cal +a x +ac u +reti rement +hon or +her self +chem ical +asse ss +y all +fre qu +appreci ation +ac a +cho ir +cu z +so il +c il +repor ting +u h +enterpri se +gr at +jaco b +ru m +fe e +j ak +sp in +bi kes +phi a +ste re +p is +bloo d +t att +ra ft +war ren +sh eri +back stage +mar sh +hash tag +ther ine +re in +game day +guar an +reci pes +min ds +stron ger +issu ed +bic y +n ak +ment ed +sc ary +u x +pre vious +tt le +th ats +ac tors +u ma +tin a +bun ny +promo tion +u ss +oli ver +montre al +what s +appreci ated +la kes +excu se +kno wing +pri zes +musc le +shad es +sco t +ing redi +electr onic +ju an +comb at +s ri +e h +turk ish +l om +stri kes +pri son +re e +po pe +vi d +ol dest +dol l +sw iss +certi fied +cli p +re turning +lat or +le igh +tt es +wat son +heal ing +el im +per haps +ha ss +k au +d der +mou se +new castle +indigen ous +wel comes +co le +tau ght +no ise +appe ar +jo e +can on +wedne sday +u tah +c tive +dri ven +i v +c ell +stri p +ac c +focu sed +ar rest +sto cks +wo o +â Ĺ +notic ed +shad o +di spla +ter ror +bor ne +secon d +que ens +wo ke +ja il +no tt +cam bridge +har t +se af +fa x +ac cept +âĺ ħ +goo ds +k at +t win +h s +thou sand +s ins +su ite +amp ton +ar n +rele v +ric har +hoo ps +n bc +class ic +p ab +soldi er +de plo +le ans +install ation +cla sh +le ban +ee e +ti re +belo ved +fu sion +travel ing +ne i +coo kie +glo be +phys ics +s q +co l +wol ves +d l +ex it +" - +foo tball +le af +ster ling +hi de +minne so +fresh man +natu re +indi e +supp lies +bri s +iri sh +ink tober +doo dle +ic op +mess ages +adul ts +recor ded +fix ed +ar do +offe red +under ground +dr one +p ine +ma inten +and re +ham mer +s x +r ound +hi ke +bra d +ro me +fu ll +on ey +ro ws +colum bia +archi ves +appro ved +bat ch +illino is +recogn ition +shou ldn +fo g +nca a +ke vin +human ity +al though +pow ers +p ou +s ar +pe st +alco hol +con sci +phil adel +en o +t m +ok la +cate gory +particip ate +accu sed +bri ef +po em +clu bs +consul t +ja b +big data +amster dam +ac ing +certi fic +n u +d at +impro ved +and y +campa ig +pale stin +p ace +mo bi +feel ings +wol f +bra in +pro pos +inter active +prin ce +inde x +c is +cha e +peace ful +co vering +ac o +cour ses +mon key +re place +b l +bloo dy +tal es +brigh ton +neighbor hood +g ates +spiritu al +af raid +bre ast +b ones +ðŁij ī +vide o +w au +tou ch +inju ries +car l +ri x +une x +âĢ ¢ +fre d +consi dered +thu si +an ch +on y +u sa +graph ics +ac re +ðŁĺ © +com memor +com mod +go ti +guar dian +star bucks +pre vention +haha haha +admini stration +portu gal +fac ulty +bet a +ul a +al bert +bre ath +er i +le tting +tr ic +ment ation +incredi bly +ten nes +v d +ðŁĻ Ī +ed die +br ick +gr ill +bt w +wat ches +resear chers +t ney +ni e +p as +a ster +vi br +poke mon +ch rome +go at +pitt s +il ly +festi ve +y d +can al +ðŁ Ĩ +fi es +car los +re que +partic i +tra ins +sam ple +temper ature +sym ph +pic king +in door +z ers +playo ffs +____ ____ +ap es +ly rics +islam ic +performan ces +d ick +spar k +se as +hom a +gr ound +disc i +employe e +com mu +alas ka +al an +fe ast +dg ing +ban king +manu el +slow ly +tru cks +mc car +oo o +sc rat +orche stra +indivi du +m x +bre ath +stair s +equ ality +bla ke +loc ations +cocon ut +balti more +aa a +l c +ðŁı Ĩ +har vey +resi st +immigr ation +adid as +fil i +re f +lg bt +mo s +pp i +ken ny +terr or +ban e +apol is +s g +social media +ka i +hon est +as sas +bol lywood +âĢįâĻ Ģï¸ı +ferr ari +hor n +cryp to +bo om +mainten ance +i di +s man +w l +ext ended +in sul +ve s +go sp +tr i +pi g +tar ge +cel er +st ati +sm h +ri dic +appe al +? ) +con clu +cos me +she ep +christop her +en thusi +po lish +me ts +oun ded +sustain ability +creati vity +con crete +ra i +ali en +ble ss +te es +clu b +ro t +bo s +ex ist +perfe ction +lu ck +rock y +expen sive +mean while +happy birthday +pre t +thr iller +ca ve +playo ff +som er +l u +le x +def ence +am writing +home less +pro phe +ch et +past or +ðŁ¤ £ +land er +ww w +Ģ ï¸ı +tic a +! # +o tic +rad ar +po sters +pow der +po li +ha un +tra p +bl in +assau lt +shor ts +re y +sh y +squ ir +rac ist +gar lic +fu r +remo te +sm ell +impre ssed +fing ers +âł Ģ +din o +le ment +s nu +promo ting +str ing +produc tive +b age +ma son +ra z +direc tly +j k +ev al +ðŁij Ĭ +doc tors +co w +ri der +st v +re move +w u +na than +ro d +n r += > +affe cted +inve st +mp tion +g inger +o d +agricul ture +s que +mu g +coun ting +ke e +mag nific +coo k +ani stan +roo t +plac ed +sym po +gh ana +un d +che er +thro wing +secre ts +f illing +opti mi +butter fly +bu bb +ðŁĺ ī +terri ble +d g +sil k +obse ssed +lo u +ai de +sal ute +mon u +philadel phia +scienti fic +i st +u ae +dess ert +bott les +can yon +ðŁĺ Ī +car ib +o ther +w ich +re source +guil ty +un d +le on +e ss +kan e +el e +tra iner +he im +an te +man age +roo kie +tre ated +po ses +rs vp +cau ses +aw ak +je well +le tt +on ics +tit les +cardi ff +g aga +bu mp +use ful +? ! +loo se +bb ing +: : +argent ina +de bu +cy cl +wh el +dis gu +j el +k ills +bio logy +ex ter +tra sh +bo dies +tr am +circu it +expe ct +la ds +w ells +sho t +ge e +naren dr +fa stest +b ent +b ills +mar shall +h ats +intro duce +citi zen +im possible +gi b +az z +net working +r ant +thin k +in dy +st ops +f theday +bri an +* * +amo di +dom e +coura ge +pac king +af fairs +g n +si zed +ent ary +pol and +swit zer +afgh anistan +w u +ten der +subscri be +mo sco +att end +republic an +hon ey +âĢ ĭ +si mul +we ster +foo die +or o +midd le +ab t +co pies +ma je +narendr amodi +ty pical +inspir ational +vit am +wis con +cu bs +tiv ity +h ali +e ars +k ay +d are +mari juana +cu rious +an ia +tom ato +re mind +ðŁĩ · +sc ared +cou p +po et +land ed +ri d +wra pped +mor ri +climb ing +e ws +fe eding +con tra +tho logy +gri d +ti vely +read er +la ser +di ving +di g +lat in +ti ed +shake spe +o ci +ad m +show ers +chu ck +mar cus +oo s +kne e +o live +ow l +dy lan +an no +g ym +deci sions +well ness +arri ves +sati s +chri s +thur s +ðŁ¤ £ +inter views +thank you +switzer land +over night +journ alist +ser ves +vol can +.... ... +plo t +nic ol +car rying +mag ne +tre asure +ex p +be ver +ðŁĺ ¢ +mar ty +mo le +don ations +recogni zed +b h +du s +sh ann +al do +success fully +ent e +ðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤ +cab inet +cu is +tit led +d as +so l +strate gies +deli vering +ad ds +ani an +ne ther +ðŁĴ ĥ +con tain +su its +pa irs +to dd +rel la +ro pe +ci o +cro p +paint ings +su z +re jec +bu st +d h +fra ud +m h +contro l +je al +destroy ed +al lows +wo ol +minneso ta +om en +j u +sympo sium +d af +lim it +accoun ts +load ing +inter n +re solution +hol land +qu al +meet ings +gra ve +cam ping +v am +re nov +liber al +am ber +gre e +hu mb +fe ver +el ing +broo ks +à ² +be th +ad ed +al t +ro e +perform ed +jo sh +frank lin +nic ole +de ss +bb s +m g +net works +min im +al t +weap ons +gu y +jas on +g ha +harb our +at on +pra ise +kentu cky +bel fast +st icks +blo ss +ho pes +an thro +famili ar +wa it +ch ile +depre ssion +la x +je ts +le ice +recei ves +si er +an k +de x +inde ed +fle xi +fab ric +lam b +hel icop +am anda +âĢĶ âĢĶ +compe te +sn ack +techno logies +sy rian +mom s +mu ham +cho sen +an at +dev on +shar ks +re t +fundra iser +selfi es +st ations +communic ations +tennes see +tu tor +ro t +valu able +dynam ic +nur se +i ed +earth quake +deser ved +a ve +sar a +stre tch +dougla s +ne pal +à § +ob viously +d ame +ra pe +any body +k w +pat rol +hol ders +h anna +info graphic +ec o +be ating +stan ley +bo ats +ri bb +e z +wit ch +inv a +ac id +boar ding +- @ +gi l +da ve +care ers +opp os +l loy +in ter +do pe +re su +j agu +sh ade +in dy +on ist +rel ations +ag en +ab le +inci dent +me ter +shar ma +id r +pro ve +immedi ately +tro ops +am an +g low +gaz a +blo cks +person al +chron ic +all er +si d +sh r +whats app +lu cy +ar chae +ho u +journ alism +our selves +go t +the med +shap ed +we ak +cas ual +leng th +sla m +ab bey +e v +coun ter +est a +reci pi +cha pel +expan sion +sel f +suff ering +sp ice +n z +sp art +desp er +boo king +quart ers +y on +ðŁĴ Ĺ +p k +continu ed +- # +man hatt +tal ked +sh en +com bo +hybri d +je ans +liqu id +se al +re tweets +ac celer +collec tive +t as +: )) +profession als +ra w +o tt +su san +ir ing +okla homa +re ven +survi val +cre ator +tran sit +st ac +sur f +i k +ed iting +ch illing +bai ley +ste al +ra ble +pa rent +hun ger +sn app +collec t +philos oph +dedic ation +c f +c m +le ep +repe at +re ha +un fortun +a er +a ero +abstr act +mon itor +ag ents +bu l +sci ence +harb or +drag ons +floo ding +ac compli +d ash +juli a +the red +tues day +cy ber +b low +ta ined +le m +refe rence +pp o +ne goti +char le +con nor +au lt +access ories +commissi oner +rain y +re ar +advis ory +luc as +ma id +co al +k av +pol o +ðŁı ¾ +tran sport +mar gare +straw berry +bur ns +gre ens +ne v +partici pants +col in +belgi um +col our +in form +d ell +br on +cal y +kick off +strate gic +re union +hon ors +li b +egy p +âŃIJ ï¸ı +hy po +si zes +regi stered +bet es +relax ing +bloo m +inten se +valent ines +insan e +w wii +p x +tri o +bla de +wiscon sin +con e +plat in +ali ze +ra ven +incre asing +indi ans +il ian +bl u +rabb it +exten sion +je f +au di +fer ry +s ell +a day +us b +swe at +cham pag +metho d +mem ph +assi st +s by +ca pe +remo ved +mag n +v t +r ams +f bi +tack le +phe w +h on +motor cycle +su spec +eleph ant +sub ject +let te +da iry +whe at +awk ward +ac t +tro l +mit ted +zay n +sheri ff +ene my +con s +ke tt +bul ls +ev alu +bt c +satell ite +ho lo +por ter +dia betes +bet ter +rele asing +sur f +: - +se basti +collec ting +en cing +e thi +go ds +al ley +health y +m ills +sma sh +co pper +cr ack +read ers +sp ac +licen se +bas ket +bang la +en tic +om i +m ere +si vely +anim ation +lan es +dent ally +chill in +fi e +k aren +dep th +li pse +n g +ri p +mel o +sand y +ðŁijı ðŁijı +vin cent +nu t +hu g +who le +cre ates +? ??? +âĿ¤ï¸ı âĿ¤ï¸ı +bak ed +up grade +rober ts +har a +carib bean +auth entic +mb s +mosco w +attor ney +wi ki +ch lo +hu ll +cor k +" ! +sty lish +ðŁĵ¸ : +di ary +impro ving +ex pand +bri ght +pollu tion +k nights +person ality +chec ked +fac ilities +z el +bow ling +gu er +ðŁİ Ĥ +on going +un its +hoo k +be ck +confl ict +to dd +far ming +educ ational +k ak +cla y +stro ke +bel ly +explo re +mill enni +th m +loo p +sm s +consi st +cir ca +br yan +d ab +youn ger +soli dar +pp a +experi enced +b ella +bo ard +shef field +steph en +consu mer +sub mit +spon sor +t ang +ag gre +comb ined +trac king +sand ers +b az +survi ve +fer red +equ al +se p +re ed +str ong +priv acy +st ap +un g +ac ry +pa sta +pir ates +ag er +fair y +du p +introduc ed +wi p +let s +spr ay +ðŁĵ º +gre w +a sts +pitts burgh +new york +jo ey +lau ren +tra de +ch op +pi pe +cla ire +behavi or +v ap +cre ws +lap top +ðŁ¤ Ĺ +che ster +disci pl +d f +out doors +k s +go ver +super star +cas ino +far mer +; -) +re turned +ðŁı Ī +ma il +roa sted +co sta +v ill +pe z +gard ening +distribu tion +sh ining +inve stors +ra sp +dec ades +reali zed +bar n +p ti +st able +ut d +pan thers +m ens +b n +ca de +bu cket +yn n +when ever +wa ke +da is +ber nie +lo dge +ju lie +atmo sphere +ðŁĺĺ ðŁĺĺ +major ity +par ti +exc it +cu t +me h +musli ms +be gun +fli ghts +vene ss +ce me +po sing +so le +g ou +dark ness +pe ach +cel tic +auth ority +grand ma +ful ness +smi th +speci fic +gar cia +co ins +good ness +aldu b +recru iting +den nis +gar y +sle eve +weap on +pl z +disco ver +harri son +recruit ment +ja i +ch im +com pared +tom s +mo thers +am y +archi ve +t ask +ben jam +se g +law yer +al um +inve sting +mi e +che z +j p +a ke +fl am +wall paper +âĻ¥ ï¸ı +t ton +che st +favor ites +we igh +coo lest +r ating +relev ant +lo gan +ma ple +run ners +pri or +peop le +ma ur +terrori st +te sted +carni val +su spen +me asure +m v +cyber security +app ren +terror ism +o z +v ital +ni es +gon z +fun ded +twi st +assess ment +die sel +en for +colum n +ad dressing +ca sts +pay ment +x ton +fi er +, ' +la st +ne e +un less +clo se +sk ill +cuis ine +fun eral +ti les +a un +k ru +relation ships +ðŁĴ ¯ +ev ent +âĢįâĻĤ ï¸ı +kind ness +pro posed +acou stic +a es +defen der +dan ce +h tt +w at +vo y +ðŁ¤ ĺ +au s +cli ff +sear ching +beauti fully +in qu +at l +speci alist +ðŁIJ ¶ +da i +tra ils +class ics +inst ant +v ous +re venue +mar ch +kir k +fr inge +fire works +tri via +âĺ ħ +tr action +wal ter +mo to +l ily +att itude +cli mb +sc an +sav ings +c w +fa ith +cred its +ab led +gra ff +auto graph +he he +ran ch +ha d +ro gers +ðŁĮ ¹ +f in +re qu +fol k +ad ditional +lyn n +u ber +dol lars +lo gic +wor th +so m +the sis +p ound +bi c +st ur +cer am +spen cer +en tered +v amp +organi zed +âľ Ī +pp s +tr on +merce des +no ti +compet itive +do w +ous ness +vic tor +gr illed +na i +pu tin +ab ra +bl ame +alex and +anim al +dec ent +p ent +inter ior +:' ) +but ler +bal let +ðŁĴ Ķ +albu ms +down s +la d +si r +pla in +p ers +blon de +dis c +paki stan +se ment +ga a +w age +ch as +man i +co ps +terr it +lo l +lau ghter +ri vers +magnific ent +lam p +w b +new sle +char ts +ble ssing +p unch +lon gest +fl oral +cu tie +fare well +sto pping +mb b +bu d +chee se +de cla +si m +mc donald +de ter +you th +t ch +fre der +kin dle +fer n +at or +as leep +p ond +spr int +p ounds +la zy +gh e +fundra ising +dead ly +gran de +dou g +he y +lin da +consi dering +i um +gol den +vi k +auth ors +di ss +u ally +appropri ate +mor ning +y le +hon oring +foli o +be c +re bec +fin land +formu la +corn wall +sh ay +cau sing +bl end +sig nal +t ent +kash mir +nation als +har mony +sc out +acce ssi +he ight +medi eval +impro vement +ke es +prac tical +car d +de par +hu n +om ing +cal gary +ste l +bu bble +gur u +ma h +unex pe +n h +ed a +me at +i ge +si o +god dess +in ches +tun es +br itt +sti on +ra j +âĻ « +mer cy +ðŁĴ ĺ +sen ds +i est +pol ici +val e +reduc ed +as ap +vi jay +defen sive +celebr ations +ri ders +med itation +har mon +g ing + ¡ +program ming +in au +sud den +m h +replac ement +sk u +j ar +gra des +ta st +k itt +brand ing +k aw +boo t +f ought +p ays +g f +iz ation +ho p +k k +activi st +v end +coast al +cha os +ðŁĶ ´ +se me +bill board +li fting +cu mb +sc al +ðŁĸ ¤ +stru ck +l v +indie dev +beat en +jun gle +al right +destin y +m ing +k c +ch ances +om an +q atar +cra f +tra ined +pri x +char m +o tive +s mu +e c +and ers +hand ed +al ban +certain ly +arri ving +i ze +sa i +tr ack +pain ter +hu mble +appo intment +head line +manag ing +mo d +as pe +andre a +à ¤ +ethi op +un ited +exi st +bal i +k ad +n t +d red +re x +recogni ze +tam pa +be ers +ati a +he els +no te +transport ation +tur tle +re de +hipho p +sp icy +sp urs +⬠ĩ +cor p +ther n +to ast +hur ry +proper ties +ma ge +mar co +ele ments +bou ti +syn drome +ms g +develop er +gra ders +he im +re sil +off ices +del ay +di men +vin tag +barbar a +ðŁĺ ± +vene zu +cu lar +fac ed +bar n +ðŁĺ Ĩ +survi vor +wor m +confu sed +passion ate +Ø ± +identi fy +electr icity +sou ls +brad ley +repor tedly +lun ch +shel f +eli a +swee t +smoo th +emplo yment +am el +manhatt an +ste am +oun ts +ye p +li ving +un e +descri be +ca res +man ila +sha wn +ac ted +bas h +st even +re st +pet ition +div ine +wel sh +rac e +platin um +ðŁĮ ¸ +p b +extra ordinary +solidar ity +m all +on ion +schedu led +game of +fer gu +de ms +nor m +p k +tri als +polici es +publi shing +st ole +fron t +charac ter +van ia +ex ce +sti e +sc a +resi dential +sa iling +ðŁĶ¥ðŁĶ¥ ðŁĶ¥ +spons ors +th ick +champag ne +she pher +continu ing +ven ice +per th +na p +a ster +y ak +un limited +cho ices +ne o +hi v +repor ter +bru ssels +f old +dy s +se mi +la wn +it alia +wi fi +as k +em ed +fr ame +monit oring +ste ad +i da +gr in +is a +fli p +re stric +offen sive +atta ched +di sh +wh y +philli ps +gre et +p als +mix tape +v ou +fiel der +spar k +alber ta +g len +ca sh +s ri +u ri +ro dri +entreprene urs +climate change +p sy +d le +em ents +lin ked +nether lands +acci dentally +oppos ition +vel vet +ra ys +c w +om o +m f +lmfa o +newsle tter +: ) +toi let +liter ature +di sp +phili p +uni form +sudden ly +head er +cool er +-- - +prou d +bri g +nis san +scienti st +j ah +con centr +pac ks +appo inted +so ap +eng age +cho se +âĻ ¡ +se tup +jeal ous +har ry +g ation +tun nel +te mp +osc ars +dec ade +recomm ended +child ren +ab a +anxi ety +ve ments +sal on +pho too +organi z +mach ines +ab s +vil le +hy pe +ti ff +emer ging +av geek +[ # +contribu tion +bra dy +re sto +g mail +fit z +photo shoot +hel met +h t +eleg ant +ug anda +nur sing +or leans +pen n +na h +foo tage +em a +w o +w ad +concer ns +ve re +re mark +who ever +str ang +p t +qu it +sh ang +histor y +s ick +perman ent +ill ness +col d +visi on +he m +ar row +con vic +pin k +oc cup +bal d +ex hau +u of +am o +on t +ãĥ » +adop t +la id +smo ked +inter pre +ess enti +associ ated +b d +bb y +fi er +inst all +dipl om +con diti +c f +w ak +any a +gr aci +fi sher +s ss +ap r +il it +mus ician +symph ony +cor d +h ack +le gi +l v +bless ings +hum or +sc ra +e ti +min ster +trav elling +bu sh +jewell ery +li me +!! ! +pregn ant +pe e +lo b +cap ital +ip a +pen cil +la bor +duc ks +prou dly +wedd ing +dere k +m w +pe g +valent ine +an gu +re treat +pro spect +dang er +vul ner +up set +, # +sr k +x im +thur sday +n fl +kis ses +re ds +cr ack +re ward +c u +ko k +me te +aband oned +it t +me als +sp ell +stan bul +del ays +ru m +le op +gu m +no va +super man +ch ick +m is +dram atic +inno cent +r ounds +re c +auti sm +bangla desh +mor al +mo vie +sp oo +k la +âĥ £ +ou ting +mess i +ab road +loo kin +a im +q i +st ack +colla ge +à ¯ +hud son +sc an +ho e +ch au +oc cur +comm ander +ho les +ðŁİ Ħ +bi as +v on +stick er +ma k +responsi bility +colum bus +sa int +ed mon +rac ism +far ms +w en +gul f +may o +!!!! !!!! +corpor ation +ba chel +el a +inter nal +je ep +fol lows +di alogue +de rer +smart phone +he len +rich mond +equ ity +s land +b g +ne ar +av i +memph is +we ir +discu ssed +bad ge +p up +mi stake +phen omen +un ite +ðŁ Ľ +de pic +ri des +in augu +n at +sof twitter +comb ination +gosp el +âļ ¾ +ad mission +retro gaming +ðŁIJ ¾ +sch u +mb o +jun ction +al arm +à ¦ +gr ac +kh ali +k ul +m ale +cap tion +wi sh +te re +cor ps +ru bber +play station +er in +effici ent +l or +jo kes +in ary +nor man +lu is +inaugu ral +ch ed +âļ½ ï¸ı +di p +to e +str at +aa c +am u +pi er +co tt +comm and +tt en +sn oo +cu be +clo ses +class ical +s word +expre ssion +reach ing +n app +co st +affe ct +ric o +gi f +brea the +tri be +or tho +h ay +l g +fri es +n m +hi ding +richar ds +en de +mic ro +capit ol +cop y +ro m +regi me +mary land +tax i +di al +embar ra +un believ +ch t +v s +elim in +o dd +pen ny +sound track +l ings +trans ition +rema ining +a is +mali k +? !? +rand om +def end +ul tra +tru m +danc er +st ol +dri ve +a ver +ro ast +defin ition +se an +excit ement +partic ul +su rely +sh av +ber y +di shes +com m +is ol +i am +ob li +gho st +hugh es +chi efs +b as +conserv ative +speci al +fe min +sh ri +n ancy +inte l +tu ne +ðŁĩ ª +jo el +gg le +mo to +ðŁĺ Ķ +bu ck +d ag +antic ip +mont ana +gu id +fro g +ec raft +op e +dri ves +nu mer +x y +color ful +wednesday wisdom +illu min +bey on +inau gur +deep ly +pre fer +for tune +coo ked +ti ble +âĺ ķ +swe ater +it ter +tt y +u i +gi e +com plic +~ ~ +tax es +cu ps +di verse +sam anth +âłĢ âłĢ +ba king +sy mp +wa i +be half +mer cur +travel s +ðŁİī ðŁİ +or ia +eng aged +jump ing +reti red +n aked +p uni +speed way +sci ences +rehear sal +on ym +dy ou +pl ates +r ati +kri sh +jaz z +car ol +ra f +pen alty +tim eline +ru by +engine ers +ra f +bel le +do se +che on +esc ap +me g +ran k +or d +me gan +mer ch +ec lipse +âĺº ï¸ı +ple dge +kir k +per si +leice ster +sa k +w k +saf ely +yy y +je t +promis ed +j c +en ne +no ah +re no +re a +ðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤ +tra il +ðŁij Ģ +f d +soo o +ri min +w k +ภ² +i al +x ox +bis cu +d ale +fan dom +particip ating +fla g +privi lege +pe ach +mach ine +bo ston +gro ss +o g +mir acle +adop tion +u ss +mon sters +be ij +clar ke +pu shing +pra ying +ar o +d n +ell is +apol lo +od ds +refuge e +to w +b p +ðŁĩ¬ðŁĩ § +h end +app eared +memb ership +pe an +du m +viol ent +v y +potat oes +aw w +greet ings +t ts +ac on +sh ane +photograph ed +cra b +temper atures +cu ba +c fc +wel com +he l +in nings +m k +co de +kno ck +gra ss +swe dish +p ta +ick y +v at +lin ing +s q +sa p +ar c +announ cing +sk ins +cit yof +br ing +co x +gam er +it arian +i da +h d +ros se +sad ly +ge o +âļ ¡ï¸ı +tag s +fa ther +chan ge +l ance +whis key +adel aide +te c +stick ers +marke t +class y +bad ass +flo rence +lin er +fro st +k ate +ac on +scand al +es sex +ðŁĺ ı +vi vi +dr ill +blo ggers +recomm end +d ha +ac res +ro ma +bu y +gro cer +er ia +ma har +ff er +patter ns +ver i +com pu +st ev +ang a +ment or +do o +it ali +cdn poli +on ly +conduc t +elec tro +de f +wh ale +prepar ation +bicy cle +vi ral +turn out +bra ss +qu ad +hospit ality +pack aging +den cy +ceme tery +abo ard +dre aming +pic ture +t all +inv ent +ad mi +o e +tem ps +qu an +fun dam +pro mp +resi dence +mu d +sour i +âĦ ¢ +graff iti +gi f +d nd +com p +s war +pe eps +pale stine +devil s +san g +assi stance +bi ke +missi ssi +inter viewed +ne phew +dru ms +v and +gentle men +n sw +inst a +leban on +ee ee +oli via +ver y +rou gh +industri es +m ation +ðŁĺ Ĵ +bar rel +n ay +po ps +moder n +ill y +are st +on ents +protec ting +v ans +e o +vi kings +restaur ants +re ck +jac kie +andre w +w illing +he ath +citiz en +disc rimin +๠Ī +stu art +m ys +hi p +tran sp +" ? +te x +su shi +ke d +cro ssed +dist ur +pe dia +f ate +some how +mo th +proce ssing +is s +r in +u ts +yy c +ver t +lg bt +re id +on to +arab ia +habit at += = +stre ak +simp son +addic tion +wim ble +deli vers +challeng ing +ðŁİ ¶ +fran ch +e du +s me +ai ds +hur st +th am +tari an +remem bered +palestin ian +fe es +tru m +sket ch +ur u +fit ting +jes se +ðŁĶ¥ ðŁĶ¥ +---- ---- +ba ch +ici a +colo red +da h +associ ate +int el +s eller +p u +stu ffed +ac s +b s +sh in +cooper ation +certific ate +ab u +ingredi ents +re v +in ge +el der +christi an +bun dle +th ic +dir t +beij ing +comm it +ted dy +ed u +to day +s field +w yn +confir ms +lo o +j v +ene ss +al pha +vir us +ari um +gr ind +bri dges +introduc tion +pol ls +bac ter +z ach +termin al +ra iders +fla vor +zom bie +vo d +sp reading +gameof thrones +effici ency +lat ely +ale m +twee t +cri mes +cl er +de y +dg ed +hy un +pay ments +cir cus +ðŁĺŃ ðŁĺŃ +mis souri +lu b +episo des +c age +po s +mat ching +tumb lr +lin ed +ge st +am bi +nar r +ing ton +regu l +blo wn +is le +co co +on don +joshu a +tour ing +sm a +sau sage +best friend +bo eing +desi re +sav age +ra pper +de vo +te ar +take over +cow boys +po ker +par ag +pp e +h int +we ars +se th +ro les +l anc +man ga +form at +fl yer +c ay +mo or +ba ke +spla sh +v ad +ker ala +proce eds +sil ly +reflec tion +di str +wi d +su it +ci vic +yan kees +by n +migr ation +di stin +or ch +fe mini +quali fying +tu ri +o be +hun dred +cra p +wan g +mathe mat +bu re +expo sure +fergu son +seme ster +re serv +pl ym +a hu +fac ial +wa x +wor ried +ca b +vi o +as a +co d +to pics +p cs +hal o +rescu ed +horiz on +ar k +âļ ª +hol ly +el f +ul ti +pu p +quali fied +attend ance +ati vely +destro y +y c +for th +photoo ftheday +c ents +ic eland +meas ures +de sk +port folio +artic les +direc tors +dat ab +e w +creep y +oun ding +hon oured +mi st +j it +men tioned +port able +iti c +d ann +friday feeling +am id +ti ger +scri p +helicop ter +hard ware +expl or +work place +austri a +beat les +ber nar +spi der +disc o +cul t +lim its +shor tly +fin al +nin ja +lu ke +le bron +wal mart +o il +van illa +shi re +ye g +ak y +c s +bl er +collec ted +t g +rol led +speci als +b ff +pier re +sh im +vi er +flash back +restor ation +individu als +pro d +fre aking +tu rer +o a +re fre +mor oc +gre et +re yn +care ful +our ing +u sh +is d +g ill +vie w +thunder storm +b led +pic nic +guar di +pi g +ar k +syl vania +bann ed +u cl +vi jay +ori um +av engers +believ es +eu r +monu ment +concer ned +la bs +ber g +a ap +vi sh +sing les +can cel +z el +ar ab +ru th +too th +ar ta +sh af +chair s +r ack +dise ases +crow d +cl y +fle x +christ ma +artif icial +tom at +fin e +dra ws +advoc ate +fran ce +Ù Ĭ +ðŁĺ ³ +heav y +s our +compre hen +no ble +aa p +hin du +cor al +g ars +ow en +n l +st all +yel low +mar ina +in ver +suppor t +tou gh +promis es +pi e +master piece +sco re +for ce +mor tg +crypto currency +o x +r ors +rock in +pro vin +ho g +no stal +oak land +pat rick +inclu sion +tra ffic +ah med +a ha +lux ury +con secu +de mon +âĸ º +b lowing +st ag +: " +encoura ge +ben e +sku ll +do dge +bu ster +kin son +wit ne +er ror +lo west +fel low +à ° +sh re +bl ur +vir gin +compos er +sli p +mor nings +ga ins +tab le +gra in +ari st +braz ilian +w we +tu es +ribb on +an ag +di st +sac rif +em brace +entreprene ur +af fili +de o +t ali +touri st +fat al +ì Ĭ +autom atic +ðŁĩ µ +we ak +wel fare +confir m +benjam in +fi ghts +alleg ed +me ad +strugg ling +pro secu +che f +à ¨ +propos al +er n +ðŁĺ Ħ +dy k +on gs +hon g +m ack +mel on +on ent +ru sh +d ap +tol er +pro pag +c ze +trans lation +wal let +cott age +sa il +constitu tion +ðŁĴ Ģ +mun ici +fav or +storm hour +i h +ðŁĺ Į +approach ing +pin ned +j ed +niger ian +n ach +sh at +particul arly +mc don +camer as +anni e +admini str +he at +electr ical +char ming +gib son +bouti que +ex posed +ac tor +pil low +beach es +genu ine +margare t +ben nett +lou isi +pos itions +el y +shin y +ten tion +architec t +ren tal +ac qui +goo gle +sub way +mom ent +ðŁļ ¨ +ri m +metho ds +cy cli +nor folk +Ù Ī +over whel +ra pid +we ar +happy birthday +progre ssive +ðŁĴ ¥ +co gn +pap a +f ool +philosoph y +pol ar +jim my +wi g +ðŁĴ ĭ +oper ating +reduc tion +ph i +fla gs +to the +o di +a res +k oo +k ang +ar kansas +ash ton +wimble don +sci fi +attrac tive +mississi ppi +logi sts +ral ph +la bel +gradu ates +ma ha +home town +âľĮ ï¸ı +foun ded +on the +li z +trans l +mini mum +pre sti +ta m +gener ations +re bel +journ alists +par am +mc m +acry lic +death s +tes la +w t +bry ant +jer us +i stanbul +muham mad +ri ley +k ris +work shops +is o +coun ts +stre t +prote cted +trin ity +man ual +r hin +r il +pleas ant +le mon +ner d +har der +dar ren +bur y +ra h +bas is +mi gu +occa sion +li sts +âĿ¤ï¸ıâĿ¤ï¸ı âĿ¤ï¸ı +e b +de cre +hamp ton +ìĿ ´ +tra vis +trans form +puer to +nh l +av oc +tri ps +unexpe cted +ve t +di dyou +bar ber +st ages +m son +re presented +for t +l al +pp le +nic ely +ignor e +qu il +qu inn +h k +carri er +remin ded +am ong +pass enger +el len +gue z +sc ape +mu ral +youn gest +ma sh +d ill +rout ine +stain less +jack son +gand hi +th al +on ers +edit orial +convers ations +sd ale +autom ation +i ke +า ภ+ðŁĩ ª +hau l +la ying +men tions +am en +abor tion +i bi +coun ties +ca therine +man ds +jam e +roll er +au t +n am +o logical +cep tion +ran king +tox ic +sn acks +victor ian +bang kok +psycho logy +re g +ang ela +respon d +sty le +sophi e +dak ota +achiev ed +mar ked +imper ial +in as +glo ves +sli m +confi dent +att acked +gg er +lon ely +valentine sday +re b +craft beer +orig in +zim bab +ce iling +te ens +other wise +w b +f ers +day sof +advis or +y ah +âĻ ª +en der +republic ans +av a +skir t +pi pel +chi e +jan e +ja x +ðŁĺ ĭ +âľ Ĭ +j ays +bre tt +bal o +cru cial +d har +as is +de au +lloy d +chat ting +âĿĦ ï¸ı +rel ay +remark able +n s +we t +bris bane +ðŁĶ ´ +tion ally +f k +la yer +house hold +consecu tive +es is +pend ant +st ir +crit ic +su gar +photo shop +pa res +arti stic +do dgers +c un +cra fted +am end +bo at +âŃIJ ï¸ı +egyp tian +sa w +tra ge +small er +ox y +pa ired +nex t +i res +tac o +o y +u c +st i +a erial +: // +dr o +dot com +gg ins +r pg +ay e +le an +stri ker +lo bby +prote sts +pri ority +congre ss +am ate +inv it +r ington +mom my +th us +allow ing +pione er +enfor cement +g ori +tal k +dra g +du mb +bul let +san ge +er y +tar gets +ðŁĩ ¦ +he ather +consi der +seaf ood +ve st +ris ks +% . +p g +sac red +he ating +kick ed +tto t +. - +chan di +co ven +po ol +pul se +i a +ro ster +shakespe are +es a +car go +pean ut +tro op +ac tion +tab let +home work +cast le +stru ction +mus icians +free zing +bu tt +justin bieber +j j +bah rain +an them +au dit +didyou know +na vig +guid ance +âĸ ¶ +tur f +n un +fic ations +ye men +char ging +x c +bron cos +su bur +p ale +bor ing +among st +for the +em per +om fg +p j +expe cting +ðŁĴ « +st l +ad min +expect ations +sw an +shoo t +oooo o +min ent +ãĢ IJ +wall ace +stan g +satur day +adop ted +dou bles +hom ie +ome z +d han +vent ure +surroun ding +fi le +mob ility +de es +w ski +broo ke +emb ro +re members +kar a +test im +bo tan +m tv +sacrif ice +jerus alem +d l + ´ +proper ly +ili on +as i +leg it +co pe +m cla +recy cling +lar ger +ðŁĴ ĵ +pat ric +gener ous +ja red +p f +mol ly +thom as +ju dges +h b +sor ts +bl vd +o ven +enter ing +plan es +be et +integr ation +boo ked +fre ed +ver n +ash es +to pped +de pot +welcom ed +ren a +m ick +d and +see ks +gam er +ran kings +ren e +mu t +whis ky +fire fighters +gu es +ga ther +tour ney +de men +y ang +new ton +autom otive +back yard +deta iled +mi st +to bac +fi ber +un usual +grat itude +sp are +ne ys +: * +per i +flo ating +fin alist +don ating +dre ss +bro ad +be the +econom ics +tai wan +ed wards +plu g +pra iri +val en +bab a +f ad +an as +har per +dis order +app lied +p att +bi kin +li ver +cu ri +carol ine +ann er +juli an +wal king +mal col +screen shot +co ding +skin care +activi sts +myster ious +ex act +blo cking +mercur y +bat ter +du mp +âľ Į +en se +li sh +ridic ulous +prote sters +ðŁĻ Ī +lu st +swe at +as s +ali ke +co dy +re ments +win ds +as pir +vi enna +pra y +.. .@ +bo i +cand le +assi sts +te e +der son +p ony +f ence +con spir +âĺħ âĺħ +oo th +e pic +ba rely +a unt +b am +diamon ds +end less +scre ens +can cer +gr o +p st +pro spec +mo sque +help ful +ou ri +bro ther +gu jar +cri sti +ine z +to wers +ad dresses +gra y +bur ton +re tweeted +ðŁ¤ Ķ +n ity +du ck +super vis +jo an +kin der +sanc tu +pi ed +âı ° +ł ï¸ı +m ati +reven ge +ce ster +eli fe +desig ners +back ed +bo li +wei ght +cou ch +su res +s its +shri mp +la gos +auth orities +os ity +hol ly +compu ting +fac tors +ab e +pan els +ram ad +sent ence +missi on +hol m +r b +d ads +shang hai +mon ey +she ets +sk ate +thre w +cup cakes +infin ite +l is +practic ing +ess ay +ka i +as ci +mo b +u gh +hol mes +re gg +ik h +mo ck +collec tions +pe p +o va +sal t +nan dez +co y +thre ats +tex ts +cin nam +pregn ancy +pen ding +stam p +flow er +g is +agre ed +pay ne +ro ver +ph ra +sof t +f fin +fa thers +pass engers +aw ays +al a +h es +li van +in s +samu el +ingu i +h of +j j +chen nai +cat al +om ic +he ath +ni ece +pump ed +integr ated +are l +no m +produc tivity +wan ting +vis a +di ana +tw il +it v +cam ps +ro wing +d ley +black and +gu ards +b ells +re verse +vi be +ric ky +mo ss +ny t +âĺ Ģï¸ı +el le +tro y +cu dd +ev an +women s +fo to +mi stakes +wick ed +mi l +c led +me mes +co smo +schol ar +ren o +ðŁĺ Ģ +v ents +# âĢ¦ +terrori sts +ca sey +cardin als +ðŁĺĬ ðŁĺĬ +venezu ela +bol a +liter acy +t w +en o +con tains +au stin +fin anci +ev an +har vard +origin ally +chev ro +her ald +nott ingham +manag ers +âŀ ¡ +accep ting +wal sh +tutor ial +entrepreneur ship +yach t +requi rements +glen n +pe de +unfortun ately +ach ing +dais y +gi an +night mare +âĿ Ĺ +r ina +b art +ema ils +oppo site +who m +sa ke +pu zzle +da shi +par ty +blan ket +bus es +lo re +beau ty +reas on +pun jab +winds or +func tional +exi sting +hel lo +gli mp +con vin +la k +scre aming +rebec ca +bli ss +north west +infin ity +cosme tics +pul ling +coffe e +pl ing +op ho +colom bia +interior design +( + +emo tions +sa c +sun glasses +sav es +d f +six th +al y +ðŁĺ » +de en +dev ast +polit icians +lac rosse +g u +pe i +jav a +comb ine +coal ition +er ts +survi v +ch ad +stri an +n n +de vi +coun c +concer n +contro ller +bre ast +j ury +tu m +introduc es +la di +mobi le +al z +ste ady +nur ses +h acking +on line +oce an +ðŁİ Ħ +a am +ju ven +ic c +louisi ana +ar te +street art +is on +wn s +fr m +p anda +no ir +main tain +del ay +symp toms +thor n +ge ome +ter n +carri ed +p ru +pan or +as sy +per u +clou d +sp ra +pe di +e ste +tag ged +ðŁĺ Ŀ +shado ws +naz i +ا٠Ħ +cor ri +âĻ¥ âĻ¥ +j ad +ðŁĩ « +form al +spo ken +ðŁĮ ŀ +enjo y +lo pez +out look +in ho +w ander +Ù ħ +ma ya +pe e +d ine +ãĢ ij +brief ing +suppor ter +ar ily +ght ers +natur ally +doctor who +j en +v ar +new year +re se +si mm +re x +con sequ +tomat oes +bur st +bra vo +bur gers +cr acking +nor theast +bi om +mush room +mar que +dou ble +ni er +v ag +tw enty +key board +win ni +jama ica +par ish +: - +mental health +ali zing +ren der +wa king +ðŁİ Ĥ +g ly +na than +wa shing +mel issa +jun g +loy al +chil i +song writer +guit arist +bo wie +neighb ors +onym ous +as set +ta i +head quarters +ðŁĮ Ī +i hear +ci gare +sur g +) " +re pl +dar ling +ðŁĻ Ħ +z ak +sa re +ãħ ĭ +mic key +ware house +mass age +ine es +did nt +i w +hur ts +eng aging +mag ic +women in +k itten +mor s +c art +tit ans +colle ague +compe ting +er an +k hal +mar ble +dem and +del ight +et ary +bli zz +lou ise +m ls +fini shes +experim ent +conduc ted +electr onics +itt ers +car ing +wh ats +sym bol +jun g +e cu +pi x +con text +char ger +ðŁĺ ĩ +re ig +fra g +ë ĭ +ch ad +tru e +ker ry +def ending +a int +au ton +check out +bar nes +less ly +d t +m me +clou dy +second ary +are z +_ : +app a +const ant +" ) +ve ts +jo b +i ent +ðŁĺŃðŁĺŃ ðŁĺŃ +m j +fren ch +di ver +davi es +hh hh +e book +๠ī +mar iti +bree ze +susp ended +mat o +vi et +ra hu +se i +bol t +en ary +le is +kar l +fr amed +expla ining +ab c +de aling +nat o +ja ke +exp and +leon ard +establi shed +du b +ar men +el led +voc al +nichol as +ori ent +k yo +illustr ated +ah h +danc ers +milli on +ge ta +po pp +as u +mur dered +gi ble +sto ked +gri ffin +maxi mum +adri an +en counter +ther o +david son +ðŁį » +holi day +ev o +asse ts +car son +memor able +âļ ½ +ob am +represent ative +cb d +tr icks +vo gue +vo ice +mm mm +sebasti an +cli f +ath y +par alle +ðŁ¤ · +pa k +ev acu +e ats +ا Ø +tou ched +organ ised +spir its +can ad +gui ded +frame work +ðŁĮ Ł +pe d +natur al +ag ar +replac ed +anch or +ti t +sha h +organ is +super ior +r n +ch ro +eric a +st ill +cor on +chu ck +loc ks +or gan +ro sen +sc am +ben ed +/ # +ke en +tre vor +vamp ire +sor ted +! ' +af ford +in tro +gr ace +ðŁĺ ľ +sau r +kick starter +influ en +v u +y up +po c +ðŁİ ¥ +a ar +s ang +tre k +et sy +tb h +scre am +chevro let +pix el +shepher d +an or +gabri el +tw ood +sd cc +me ters +develop ers +clo sure +v w +twit ch +ì Ĺ +se oul +pr ice +ho g +n ish +hill ary +scrat ch +in cen +wag on +dis ability +pan ther +ch ats +g d +wit z +sus sex +l ate +den mark +ger ald +cancel led +net te +i x +nav al +bap tist +te t +y ad +ma th +ho y +r andy +po int +intel lec +fru its +w ool +gu in +pr on +the ft +con dem +mar ry +n ola +architec ts +cin cin +roc kets +gentle man +ex plan +t ate +do e +ra ises +wild life +w l +insi der +blan c +w p +for sale +ny c +po well +unbeliev able +pen s +goo dies +mu stang +p ens +st ays +squ ash +xox o +near by +ever ton +co co +le agu +k han +stu d +south west +con struc +s worth +cro atia +le a +su ms +aim s +e an +van ess +iti ous +pa thy +arc ade +b end +sugge sts +sac ram +roy als +ri er +em ir +in cl +an k +clar k +ri ght +vac c +ठ¾ +tan e +li b +u sc +sal es +hu h +s ally +ver a +p ga +gro ws +dru m +tre e +eth ics +sug gest +is ab +se aled +pre viously +anim ated +ab du +ri ses +glo b +pre dat +scar f +del ic +om ar +ll i +sx sw +py thon +ne bra +fun k +reflec t +pav ilion +tic ally +ch asing +bak ery +inva sion +ko h +believ ed +co hen +con qu +cra fts +nat i +cle ver +govern ance +sam ples +fa ils +â Ķ +ti mo +r itu +stri king +inclu sive +sho cking +can t +requi res +dra wings +à¸ Ń +purch ased +du m +z ach +war ner +con sole +man sion +foun tain +circu m +e sh +is land +mil k +pro fits +hali fax +ri val +âľĪ ï¸ı +jen ny +sand ra +ny e +k elly +y al +qu ad +no s +inste in +fin alists +mid fielder +cu e +excep tional +a an +sa pp +gett in +sa a +f ati +sl ice +vol k +s wal +la sting +sum mary +it as +sm o +s z +âĺ Ĩ +ip l +fl ames +ene ws +ha v +hoo die +pitch er +win dy +re vol +centr al +ton ite +ðŁİī ðŁİī +sol ved +mil wau +organiz ations +wee ts +re fin +s th +ãĥ ¼ +el in +ton a +cinnam on +ðŁİ ¨ +ðŁİ ģ +ron aldo +pen insu +ome ga +el ds +desig ning +e igh +blu et +ben z +nu g +ash a +robo ts +su dan +choo sing +en do +ser ge +clo sely +hand y +fing er +be ing +ar te +survi ved +fl ame +mile stone +gu t +d war +fu tures +é e +el o +fri dge +eli c +ou ch +u b +p v +tit an +col lar +st ation +nev ada +aur ora +r d +dun can +âģ ł +bri en +mar sh +Ð ¾ +to tal +ch ry +s ers +su ffe +ra chel +colle ge +to days +cour ts +ch it +re united +gym na +gen esis +be side +re presentation +ch ant +collec tor +ra k +ath ens +ni gh +mun ich +langu ages +fl u +particip ation +__ _ +c v +spec trum +so da +co ver +refe ren +ab bo +ap a +public ation +ed m +mon ica +ar my +ðŁļ Ģ +div or +dr y +stre ams +robo tics +ci der +bull ying +appro val +sto ke +plat forms +sier ra +ex tin +i b +ha yes +succe ed +suff er +at ically +da i +lyn ch +h ound +del ines +ack now +d ated +exclu sively +he res +fac ilit +dam aged +char ter +la kers +fal con +unve iled +wel ove +e ase +pati ence +l one +gent le +gene tic +produc ing +g our +shann on +bil ities +zimbab we +p int +dau ghters +liter ary +bel le +cl am +surroun ded +k any +ne il +pir ate +rang er +hb d +nat alie +bel ong +olym pi +emb assy +sc ol +en er +ak in +lo ren +b h +: / +di va +den im +hi pp +ðŁĩµ ðŁĩ +arn old +? ' +we ren +em power +dis abled +man or +rasp berry +b af +aw ful +dru mmer +kar dashi +n ash +machine learning +ch u +rebel s +tim ing +mon roe +ton gue +ran ge +pup ils +re ss +amaz on +b z +har ley +pal mer +ballo on +s ings +ic ec +j b +c ers +g ps +whi st +ri se +l t +oo oo +c attle +shoo ter +vod ka +uc l +mt g +le sli +jon as +di spo +at ric +ste in +vintag e +fir ms +flo yd +cow boy +soo oo +is aac +war craft +disney land +beauti ful +be am +franch ise +bu n +k ag +an on +tur bo +swee p +made in +kar achi +dete ctive +penn sylvania +contro versi +vitam in +a side +chron ic +descri bes +remo val +ha h +ap er +ten ed +u to +bad ly +mir ac +f ry +ye a +in jec +ther mal +comp act +th or +te ed +ur gent +l ite +g illi +sop hom +ic o +che m +p m +for k +fre ak +ch ak +recipi ent +i y +ni k +model ing +c ans +ðŁı Ģ +del ux +se am +surviv ors +rad ical +investig ating +reli able +f m +tur t +ligh thouse +to ol +go wn +) ) +bo ts +auto graph +a id +bu ffe +h mm +horri ble +ssi onal +ann i +๠Ģ +k its +sch i +eter nal +hu ss +sens itive +r u +tast es +chec ks +im o +por tion +sk ate +e den +half time +fri ed +ri hanna +ti se +fl ick +ca in +s gt +âľ Ķ +sh au +sta ined +ra ffle +dro ve +sal man +princi ples +sh o +ar u +je ss +gu ine +gar bage +my an +jel ly +dis ru +z ia +q ld +ent ries +la v +fle w +ad mit +objec ts +comp are +ny times +cann es +p n +suff ol +ro c +d ana +e gg +hi st +coun sel +' ! +phy si +imag ination +ad just +explo sion +plym outh +hor ror +elli ott +bour ne +de x +bre ed +au dio +lob ster +disappo inted +nation wide +( ( +incre ases +austr ali +ce dar +star ing +rac ial +e is +g mt +visi ons +stay ed +discu ssions +de an +cur tis +mai den +stel lar +happ iest +h wy +pre season +car av +mon days +hospit als +glimp se +schol ars +ja i +ter race +ann a +goo se +gra ded +lot us +hun g +grocer y +stam ps +emper or +sc oop +in ser +c as +exist ence +he al +fal cons +mar vel +reduc ing +terri fic +magne tic +perfor ms +bar re +p us +tre ating +ic on +w h +decla red +tra uma +do d +come dian +nik on +bu gs +as m +mont gom +ibi za +comprehen sive +ha s +san ti +fellow ship +da sh +p sal +louis ville +sp y +fau lt +d the +fi led +vi sta +de sc +fe ars +you tu +sp s +es p +ri g +cri me +ber ger +wonder land +k ent +in formed +stev ens +my th +ast on +ir i +visit or +at ri +produc ers +al la +person ally +separ ate +agen cies +af ri +il an +spo ke +n ina +squ ad +di ves +de pend +li v +fier ce +enter taining +cha in +sc at +bor ders +pal ette +sp ro +os is +der by +tobac co +zi o +willi e +ju vent +zoo m +hol y +enti rely +af e +mart inez +be ds +pe a +bull dogs +ðŁĩª ðŁĩ +ib m +ne on +ethiop ia +team mates +plan ting +tw er +any time +for bes +ó n +run way +ner vous +ro ger +p ile +ch anc +apo caly +u w +o i +dr ought +territ ory +br ick +cre atures +go in +w aff +gre n +sou theast +je an +am bul +ed ited +stra p +c v +aar on +ãĥ» ãĥ» +t su +descri ption +kin dly +clu tch +im mer +en or +women sday +or ange +ra g +ob vious +hy der +chann els +man go +me yer +ra ining +ge tty +pil gri +coordin ator +up load +ninten do +don uts +san chez +app arel +j r +zz i +, @ +jeff erson +accessi ble +great ly +e id +initi al +budd ha +par is +ma scot +â¬ĩ ï¸ı +sch war +si ri +sp inning +mortg age +e cho +end ange +ge dly +chlo e +enh ance +kar nat +k ry +explo res +ðŁĴ ģ +af fair +ic als +all a +dar t +dolph ins +diffe rences +squir rel +au gh +dr ones +ell en +re store +pa w +un for +pi ke +hil ton +colla b +consu mers +co inci +out comes +pp p +a q +coup on +li est +si ms +k ho +av es +spo on +pu dding +cor byn +hat ers +ex ams +sla ve +. ! +p sa +app les +tam il +se d +co ke +zz o +lo sange +car bon +cla ir +... ) +k hu +cra ig +explor ation +sanctu ary +su e +al way +demen tia +won ders +super hero +pakistan i +brown s +bluet ooth +lo cker +mar c +ev entu +delux e +rodri guez +âĿ¤ âĿ¤ +ro bb +ðŁĴ ¦ +lin ux +ten s +intellig ent +se ed +vo ter +s ler +pe aks +inter n +teen age +peninsu la +hand ling +ti e +cou sins +wen dy +me e +à¹Ģ ภ+din o +ðŁĴ ° +ðŁĺ ĥ +ze e +s bury +trage dy +b k +bo re +z in +war ns +idi ot +tou ching +contin ental +tac os +saf ari +wa shed +po dium +morri son +fore sts +c bc +al on +partic ular +be ads +inv ented +lo ch +li ghter +where ver +i de +docu ments +a we +k r +no where +min er +st it +ro x +contribu te +har dy +cl an +ob ject +ca it +ðŁĴķ ðŁĴķ +happ ier +vege tables +t art +g ag +nom inee +heav ily +pan ic +j d +there sa +at m +u ph +s fc +su ri +drin k +n al +re vel +k l +avoc ado +nom ination +ma donna +shar on +malcol m +control led +sh ers +revi val +legis lation +shoo ts +n in +comm entary +pro s +human rights +str anger +mit ch +pipel ine +leg ally +th u +gil bert +tol l +gran ted +gh s +ir anian +refre shing +du k +ab i +pri me +jose ph +mo sa +stati stics +produc tions +mer ry +pat el +sa x +human itarian +struc tures +e missions +town s +fre el +ster ing +rat ings +alle gedly +cab in +st l +w ade +fl yers +tri m +promis ing +z u +bal lot +compar ison +free ze +ou ter +great ness +as sign +snow y +r ale +tor ies +med iter +kno ck +consult ant +cincin nati +analy st +sc oo +je ws +appro xim +pu re +portra its +cy rus +ation al +lo ans +acqu is +el u +accep table +uni on +water color +ru st +batt les +per fu +seas onal +ser ial +mind set +ri ot +fel d +enni al +clo set +pri est +tan ks +int l +scre w +bu m +ab dul +ou x +expla ined +ric a +imag ing +law yers +bu ried +ãĥ»ãĥ» ãĥ» +ear l +âĢ ķ +l ton +resto red +stri pes +fo ss +de mands +ste aling +alex is +mun d +ak er +ur us +war dro +hu gs +gen re +e go +Ù Ħ +particip ated +bab es +ban quet +ti ous +he mi +ds b +lo st +milwau kee +jen ner +ge m +ou tra +lo ses +id i +re ps +ðŁİ § +regu lation +fla w +f ang +vibr ant +ram p +ra ins +well being +so viet +vie wers +de po +libr aries +bi go +ser y +g ill +de struction +co z +c x +bri dal +al ds +plan ted +amate ur +lu d +che ering +show cas +pro file +i u +ver tical +pack ers +wiz ard +ski p +s light +be au +air ways +mu ch +re ra +ðŁĮ Ĭ +ab sor +pati o +pack ages +s ells +ment ally +ðŁĺ ¢ +reyn olds +k are +tri bun +wal t +kn it +ta ste +sur rey +boun ce +cre ature +b are +bet ting +su re +mi ley +laugh s +al ore +cy n +t l +arti st +ann ah +war mer +dynam ics +lunch time +mariti me +vulner able +ðŁĴ ĥ +wol ver +dur ham +const antly +am in +si bl +: @ +bul let +k ach +angel o +wil der +doo m +desk top +law suit +k ca +hen derson +inv iting +bet ty +ta wards +ra fa +le aked +and i +ge ms +af l +vel o +mediter ran +pro be +to tten +steph anie +sn ation +com be +q s +over come +assas sin +ra v +fil ip +winni peg +sh il +determin ed +k as +ou tre +regre t +gui des +aa a +ðŁĺ Ī +wi ves +mani fe +er ly +sm y +sh ima +x ing +pix el +jac ob +ac commod +to y +on o +po o +ti er +an swe +ðŁĴ ģ +ro sa +le ase +bel ongs +th ar +eventu ally +nei ther +go a +ski ing +at ra +ag h +broad casting +f ury +py ram +d ice +volk swag +wom ens +provi der +bom bs +miss ile +whi p +d ick +nor we +back up +el der +mat ure +concer ts +gi ous +sque e +good morning +bra ves +^ _ +au ssie +lun a +mal es +he ck +for tn +rome o +steel ers +p n +pe er +re presents + « +kat y +migu el +requ ire +cha ins +l ur +immedi ate +ti mber +âĸ¶ ï¸ı +advoc acy +ex port +an z +tiff any +auth or +ðŁİ Ī +du des +chil ly +hi d +har m +bu g +mon ster +terri er +tu c +story telling +ta k +in ti +immigr ants +b is +reach es +com passion +john ny +contribu tions +ðŁIJ ¶ +mechan ical +impre ssion +ran ks +ko be +men ting +bloss om +pab lo +buil der +bom bing +tw el +sul livan +om o +pe te +de mi +ku dos +w bb +t gif +mass ach +neighb or +che fs +eng ines +pun e +ga ined +phan tom +s days +ext end +gr an +cent ers +jac qu +dat asci +sleep y +el vis +answe red +s lot +con y +flexi ble +ti ally +le tics +% , +andre ws +si ble +mom ma +vin o +do x +invit ational +twil ight +j ade +ill ery +joh ns +f ou +p v +-- -> +break down +billi on +prin ter +mon d +c bc +mag gie +legi on +du b +kur t +po or +paren ting +regi ons +bikin i +be ware +si onal +au burn +kid ding +amp les +sp an +con tempor +c ic +ha bits +ak o +pre fe +bud dies +it z +em ily +person nel +moun tain +ver sus +ðŁĺ ¬ +ear ning +s ink +dar i +u u +s win +i ster +bru tal +n ac +kat a +clo th +am and +ðŁĶ Ĺ +ne o +alu min +week ends +nebra ska +co des +delay ed +brun o +pro ven +in c +i ght +fl an +or o +lam bert +regu lat +w f +massach use +kardashi an +bern ard +fi esta +volcan o +grand pa +anc a +d re +st itu +mean ing +fo am +au ck +at ed +r l +hot el +pers ons +dy nasty +ell or +ma i +am ne +sty ling +avi er +e g +vege tarian +, âĢ¦ +foun ders +sta in +g d +cy cles +sky line +trac tor +exi sts +tra l +kid ney +mar il +inst ag +se tte +addic t +tri angle +flash back +controversi al +z on +p ins +i as +tr ay +town ship +deleg ates +sp am +h ms +cr ane +peop les +o lo +fac tion +but es +on ica +deleg ation +new profile +eli er +mc a +w and +g ely +losange les +ber ke +ti ve +dis rup +zz a +cas a +jor dan +ford shire +ga thered +ic hi +atten dees +à¸Ń ภ+pe ppers +co in +bour bon +ern ity +ro tary +behavi our +jere my +team work +compli ance +tre mend +ðŁĩ § +bu hari +cam bo +bu yers +ha gen +bu ds +bay ern +mon te +sm ells +an za +ath lon +descri bed +work force +gi ving +ap i +invest ments +da il +sel ena +datab ase +th um +mor tal +stu dent +bu yer +do ver +gar ten +att le +loy alty +gen oci +holo cau +theat ers +ru ling +ven us +pat ent +ch un +ab by +awa ke +mass acre +bang alore +break ing +simm ons +ju sti +hal e +ed chat +gg les +haw k +mar king +head lines +stro m +co ve +breath taking +med als +hair cut +christ ine +tele graph +gujar at +ju ra +can e +sho re +propag anda +mu eller +.... .... +sa vi +stom ach +thro ws +ta b +war m +j ong +reno wned +hi r +ra is +mush rooms +guaran teed +bo a +m j +revolu tionary +certi fication +bru ins +jo in +w es +pas sport +c g +sex u +cap able +w v +ton es +jac kets +ac compan +spin ach +fore ver +bla ir +wat ts +g l +cou ples +prairi e +newprofile pic +logi stics +massachuse tts +jagu ar +o id +we al +under water +mo z +y i +ma ths +myan mar +pre ps +suffe red +tr ace +wal i +ah hh +bor g +st itch +cu lin +real ise +infe ction +discrimin ation +sh ame +an kle +hu mid +y t +brac ket +tru ck +tri u +ea ster +commun ity +post card +invol ving +ty ler +car amel +over view +ex amples +integr ity +base ment +instru ments +ani um +at us +gh er +laun dry +achi eve +gen eva +pr icing +hyder abad +beli ef +me ta +j aw +accoun ting +lead er +cristi ano +cou ture +cy p +vis ed +, ,, +k nu +h ick +break er +br am +ra b +mo or +ham as +gradu ating +pupp ies +ak h +ta h +ach es +ri e +op ini +g ta +re ign +tra gic +re ver +p ill +pine apple +tou ches +da re +le ys +il o +inter iors +sc outs +bar t +en zie +don o +bro ck +christi ans +ense mble + · +cine mas +new port +air line +win ston +le igh +cont ents +pre scri +ur ge +tr out +fic ally +il ia +sub si +are r +âļ¾ ï¸ı +w ounded +ðŁĻ Ĥ +pe pper +ðŁĴ ŀ +fit ted +af f +re sur +thursday thoughts +z ero +archae ology +di v +je e +i on +awa iting +co zy +beauti es +bal d +dat a +gri zz +stal k +kin ds +cle ared +jess ic +regu lar +ali ens +plac e +bo s +bi zar +thisi s +ðŁĴ Ģ +totten ham +ma fia +s lam +ari ana +car roll +back pack +care y +uni v +r g +pe p +dig it +tatt oos +ag on +volunte ering +diffe ren +consu mption +ka thr +head phones +t shirt +o b +ele ment +re tail +sh ru +al gori +contain er +consci ous +fi l +com ing +ra sh +u rope +def ine +gi or +femini st +flow ing +rout es +gl aci +fer t +somer set +ant es +twee ps +$ $ +h our +endange red +year sof +ro h +po pped +bac king +ba sil +bra ke +mon aco +lgbt q +pra gue +ut ility +cas si +gate way +haun ted +sch ul +ðŁİ µ +shou ld +walking dead +comple ting +dann y +montgom ery +pengu in +ss i +mer chandi +ðŁij ij +chur ch +h ates +cap tain +brea thing +ce t +fair ly +approach es +compan ion +surpri sing +kany e +pe y +hin di +targe ted +lor ds +de ut +di gging +ger man +ru t +ener gy +close st +y un +apo logi +ภ± +s ack +ru p +dd y +port al +d ough +b ats +ðŁĵ ° +at ur +graph er +pi res +mo tors +ðŁĮ ¹ +j c +dan g +tu k +clu e +us c +pag e +d less +bro ws +ju s +ad ing +re marks +oo m +car dio +ste fan +arm strong +âĢ¢ âĢ¢ +ni est +belgi an +bi op +so y +lo f +í ĥ +q t +flashback friday +ce e +ģ ภ+wre ck +mar ines +amend ment +wardro be +vo y +bur ned +guit ars +ra inf +li fel +ssi l +oun ce +exter nal +c key +me sh +she ikh +inv itation +sugge sti +pop corn +phenomen al +an onymous +tun a +chic ago +o val +del y +loc als +( & +pro f +no vel +fin der +spar ks +la ven +in fu +nic ks +qu ant +ra e +exe c +dist ingui +st ances +mu tual +sh al +unve ils +edmon ton +zan ia +a dio +vie wer +brad ford +audit orium +qu is +re act +htt p +l ero +chee ky +impac ts +ta k +ed t +desper ate +t ay +ì Ħ +sett le +bar gain +resu me +un ite +thro wn +ke st +se ys +mar ching +am it +decl ine +sch ar +me tr +stan ford +lin ke +ber ra +dol ls +rug by +jam i +b or +road trip +dino saur +mi k +sun der +re m +b k +over seas +nau ghty +imple mentation +iam srk +lun cheon +fir ing +mi ami +pere z +the e +z on +gi fted +con version +ceram ic +¡ ï¸ı +pe dro +ì Ĩ +v ick +! @ +he ed +si d +b w +docu ment +pl un +gr ants +fant asy +predic tions +vali d +car ved +gradu ated +ðŁijį ðŁı» +nation ally +ch y +af l +re sso +blan k +ri vals +j ig +e ties +om ics +une mp +b ound +sk o +inspec tion +par al +high s +cri sp +b ans +ob a +[ @ +co spla +costu mes +rec all +mou th +ni gel +b ts +ter a +ko v +do cs +west minster +dic t +gra vity +kar i +ro gue +t ted +war k +ida ho +w end +aw i +queen sland +proce sses +cli ffe +m ick +com pens +op ol +the y +cl ari +wiki pedia +salman khan +haz ard +pre ston +swee test +pd f +che es +tr ilo +south africa +bur nt +( $ +con tain +t p +sub mitted +sound cloud +at u +re z +word press +corru pt +n f +ma ker +í ķ +par as +adv ent +ri al +ca fe +fo ssil +!!!! !!! +co ws +c j +sp ur +institu tions +land mark +ent it +re ut +h is +alz heim +we mb +regg ae +mo squ +st at +identi fied +deal er +re am +re land +ten sion +ðŁĩ © +wra pping +deep er +fr at +red dit +ar is +moroc co +.. " +b low +ma pping +pri orities +ing a +swa p +re wards +conspir acy +creati ve +c j +congre ssional +vau lt +ple x +sophom ore +shad ow +ele ss +ðŁĺ ħ +dar ts +aldu b +anno ying +pro ps +n as +alumin um +h bo +offen se +j ill +oni ons +la ur +ta e +har dest +sh ro +ga ining +meas ure +ed tech +cyp rus +tar a +ang eli +car lo +go on +all i +im plic +ju pit +resil ience +ha il +bal anced +) ... +joy ce +gr a +th eli +defin ed +shi pped +main ly +min a +l m +sac ri +o ber +p im +claim ing +ent ers +co rey +bo k +cri ed +cool ing +dani elle +pharmac y +thor ough +ca ke +k lo +outre ach +z ens +digital marketing +val ent +sn p +her b +mr w +caf é +cap tures +no tre +triu mph +pan cakes +cu mber +spi ke +d ation +bi gg +sp er +crit ical +am al +too th +foun ding +a stro +' # +quan tum +th ames +un c +pri de +air bus +kno cked +un defeated +mediterran ean +cal cu +clo wn +sens or +ham mer +for give +cu shi +ber ry +maje stic +elec t +polit an +g ta +k ari +bur ke +sea hawks +volkswag en +re i +landsc apes +cas u +grand father +list ened +/ / +star trek +rainf all +fur ry +vi er +star k +rif le +ff a +leg es +hillary clinton +min us +correc tly +architec tural +pre ce +up side +box er +ðŁĻĮ ðŁı¼ +is ai +de t +pro vo +tis sue +spoo ky +ve led +re con +prospec ts +que bec +âļ « +ig no +anat omy +shap es +w p +p interest +hor e +an es +pick up +ti p +pra desh +hu gh +co e +po k +gram my +well ington +sti gate +ri gh +lea p +king ston +scen ic +go sh +v ani +au g +s ary +zi er +bure au +lin son +con te +fra gr +all an +g aw +lan a +colli sion +surve ill +ren ais +ar range +s ali +do in +br ance +bren dan +our se +in coming +suspen sion +à ´ +l la +educ ators +in tri +da e +bio graphy +bul gar +villa in +go thic +rw anda +e w +may or +meet up +democr at +mor gan +su dden +te sco +car rot +bom ber +mck in +re ne +fun day +agricul tural +haha h +show time +form ing +col a +scor pi +quo te +po ppy +s life +d az +tu b +ne n +mo t +ðŁĺ » +s ore +elder ly +o ve +skin ny +um i +anc o +man ship +we re +g v +k ah +fol ding +ne at +samanth a +dan ish +uk rain +humid ity +nu tri +jak arta +cand les +oooo oooo +at ile +streng th +i bra +bap ti +charle ston +fr ames +girl s +clear ing +glu ten +# # +super natural +ju bi +ph one +he in +dr un +le ak +invest or +y er +dom ain +ball room +mi sh +app li +off shore +bla ze +dor o +âĺķ ï¸ı +win ery +shar if +ad ore +n ir +saf er +si gh +as cri +strong ly +trac y +ck er +ol l +faith ful +ey ed +deli ghtful +vis m +karnat aka +tit an +wh ar +jer seys +re fur +heav en +gri p +pan ama +pre li +glu ten +o dd +cont ent +pon ti +tion ing +e commerce +feder ation +flaw less +ge ar +ti res +by r +pol ice +cu ban +tri butes +tic ul +chur ches +nur sery +di aries +muse ums +snapp ed +i van +wi ght +touri sts +ramad an +t rent +prophe t +won dered +focu sing +hi d +ic ons +i q +ambul ance +pi st +fun niest +time less +sr ilan +bu ys +ki ds +colour ful +a shi +ch ir +mu m +ðŁĵ ļ +let ter +x en +reut ers +pre serve +in ting +ste p +fu ji +uni ver +i u +show down +po ems +surveill ance +suspec ted +ta e +sol ving +tom b +mother sday +car pen +recru it +pil ots +bro c +mix ing +fri days +ty r +represent atives +tra pped +abdu l +free style +clu ster +âļ łï¸ı +k d +sk ill +pit t +ex o +commer ci +muse um +loc ally +g ina +no bel +immun e +fr ac +cap su +main ed +attemp ts +bull dog +be spoke +sing ers +sp elling +seg ment +nat ures +tic k +lip stick +clean er +gett able +preci sion +âĢ¼ ï¸ı +th ood +re ef +no pe +bill y +di gi +mu si +ri val +figu red +tal ity +sun ny +ber k +aw ww +awa its +un real +co pen +asy lum +ex otic +bu en +mo ck +en able +arch y +fr a +pla stic +al mond +amp li +displa ys +abbo tt +s me +x p +ðŁĻ ĥ +graph ic +i ved +mar a +cau tion +lea ks +en berg +ul u +unic orn +cann on +appren tic +ðŁĺĺ ðŁĺĺ +b ball +wil low +at ics +am as +manufac turer +campaig ns +port ers +flo ors +l su +ty pe +ke j +honor ary +it im +to le +min ecraft +d x +ma sh +ri o +consequ ences +ron ald +go ssi +suffol k +mu se +r bi +live music +i van +ðŁİ ¤ +le u +patri ot +man it +lan ca +home decor +de ar +sig ma +ti de +str ings +v ita +sequ el +try na +inve stigate +bor is +ve gan +barri er +mind fulness +web b +hu stle +in da +tan zania +str ay +tex as +c ag +diagno sis +wom an +g w +ob session +l ative +nu fc +fl ynn +moment um +sof a +wal d +vege table +tu cker +supp er +se ab +ar ro +se ag +ven ting +counc ill +sp lat +cal cul +.. # +com fy +odi sha +sto pp +war fare +ca es +à ¨ +co y +price less +in sec +ðŁĺ Ľ +contro ls +empower ment +datasci ence +per pe +gen ic +e res +tru deau +man o +sla very +expand ing +ma he +fa iling +s aga +photograph s +cre st +re on +surf ing +hi e +ðŁį Ģ +ja e +fel lows +south ampton +sol om +ce ster +tab ility +hor n +se ct +he e +cole man +at las +explo rer +consul tation +copy right +organi zing +den ied +mon keys +noo dles +br is +fl or +dou gh +bon ds +sho cked +eco system +care fully +w m +apart ments +cur ve +san diego +must ard +comm en +cere mon +e ch +ru th +ðŁĻĮ ðŁı» +hawa i +fil med +te ar +as ingly +ca ir +wat t +instru ment +ou tta +ye ol +river side +ë ° +. : +nor wich +alo g +migr ants +new man +ri de +spr ink +targe ting +beli eve +tor ch +reflec ts +per mission +ff man +ene mies +bas ics +se ized +sun days +le i +hass an +en do +h c +st ad +le ments +kk kk +nan o +shar k +man a +on ic +treat ments +ear ly +collabor ative +shu ttle +bran ches +mis ses +mained cm +ap ers +ky le +carri e +leis ure +sh et +bir ding +adv ances +ðŁĵ Ŀ +popu lar +di ane +a be +re war +neigh bour +k pop +remem brance +play ground +ru b +krish na +e bola +inqu iry +ep a +lu min +organ isation +abra ham +norm ally +pre ten +jan et +w t +ðŁĴ İ +encoura ging +a stic +bu mp +syd ney +s z +ss ss +gar rett +ðŁĵ » +consul ting +roman ia +spo tting +chanc ellor +ar ma +presti gious +ðĿ IJ +t ad +cry st +compe tit +rati o +cat aly +bro w +j ur +vi king +commu te +y day +la yers +du mb +esc al +genoci de +f ill +gu pta +ste pping +se i +fo to +wild cats +col i +projec t +ear nings +st r +ge ons +comple tion +b m +decor ated +craw ford +af ghan +sc are +visi bility +hi b +direc tion +stro ll +christ ina +alter nate +cl are +sty list +be hold +s ance +leop ard +acqui red +narr ative +ash i +the a +?? ?? +pe as +at ch +sli des +le en +renew able +eng lish +qu ir +co aster +r x +fo ols +match day +mis m +amaz ing +z ig +ke ting +won t +to wel +di ab +sta ke +n m +mel t +e than +gra pe +polit ician +sm en +í ĺ +re o +wedd ings +cat cher +or acle +me mo +ðŁĮ ´ +ec k +rob bie +norwe gian +oper ator +am or +se wing +ju l +x ie +u v +fif ty +me ga +tatt oo +liber als +u pri +traffic king +richard son +su v +ki p +mess y +tremend ous +gl ou +cour tney +la d +stere o +my ers +i dio +^_ ^ +man ning +dy e +w d +thr one +jun k +as u +provin cial +k ook +wr c +fine art +hamp shire +renais sance +b red +fall out +s j +sn l +al am +tor ture +fy i +sh ines +pa w +ch ar +hen ry +c row +aci ous +di an +pa ige +ba re +stock holm +scen ery +ðŁĩ · +jef frey +pu sh +decor ation +ne d +cu te +brig ade +laven der +inv ites +e sports +vo ir +dri ed +tran spl +sur geon +no vels +pul ls +son y +lun ar +man e +i vy +fru str +dor set +sa i +tor res +ssi on +shut down +suggesti ons +writ ing +e o +battle field +u ga +ðŁIJ ¾ +vac u +spl ac +g it +u g +high land +% ) +mer maid +sacram ento +ta ils +p w +ka h +t ell +enh anced +ì ķ +auck land +cru el +ðŁ¤ © +au dre +sail or +gram mar +g love +de on +infl am +fresh ly +k ell +zi p +christi e +mil d +di xon +instru ctor +g ence +ãħ ł +sub jec +constitu tional +crow ds +in visible +ru ins +da k +si p +pla que +p ouring +comple x +z ine +ste ad +f let +trans mission +lo way +ar un +incre asingly +au d +transp aren +cro wned +sc oun +blizz ard +lux u +fi ers +achieve ments +hun ters +rock ed +bas in +vio let +pro ves +achiev ing +pro sper +se ga +flo at +vi an +xi v +pol ic +tur a +approxim ately +wander lust +keep ers +geta way +co d +pol is +br yan +col ts +tal ents +yo gur +gluten free +wri st +gr y +cze ch +ðŁİ Ī +ev ille +ðŁı Ī +to x +dani els +am er +bi ds +weare one +me tab +g t +boy z +pd x +pos session +pu shed +shr ine +reali stic +tri gger +na vi +ru mors +n af +jen kins +tr un +comm uni +Ã Ĺ +gam ers +arm or +moham med +bal cony +y ah +stron gest +rhy thm +unfor gettable +k p +ho bb +custo dy +greg or +r ita +aes thetic +il ation +sponsor ing +n ay +kid napp +sh s +ra jas +me g +signific antly +butt ons +la c +ver sions +essenti als +opini ons +k ro +d printing +wi dely +d k +ur an +y al +reque sted +c n +cur ric +plu m +gr un +v m +dev on +m yo +rel ation +juvent us +rou ge +min ority +min es +jupit er +n ine +oxy gen +fran kie +une sco +fab ric +disgu sting +sal man +dete ction +lan ka +d ac +ðŁĩ« ðŁĩ· +argu ment +shel ves +cel tics +rober to +pi gs +he dge +fau l +pow ering +butter flies +fi r +re make +att i +com o +emp ha +kend all +poke mon +se ating +d ans +bald win +ðŁij » +lesli e +one direction +ti mber +im an +fon t +e der +di on +ste ph +for mat +gre gory +pro p +he x +ru in +sor y +inf er +n aw +bar ak +sd gs +kar ao +lu sh +v ander +end ent +g is +a fro +soc cer +ay an +t uni +lun g +da yof +alex a +mar ath +addic ted +ag ile +hy gi +light weight +ì § +mand ela +jo ey +anc y +hu m +bi r +memor ial +jim in +ging er +v ak +jav ascri +cro ps +orig ins +d ari +pi per +im port +aggre ssive +predic tion +re pairs +cr acker +voy age +ni ke +mu mmy +linke din +country side +bor der +gla ss +per t +s als +sho e +autograph ed +wal nut +colle gi +sal ary +pa iring +ðŁĮ ¸ +cath ol +swee the +defe ats +streng then +roof top +impro vements +barri ers +ur u +t ally +ru led +ðŁĨ ļ +nai ja +emo ji +per cent +gi o +pro bs +on ce +adm its +pa ths +li ar +day tona +pe ters +cal i +cal li +mu g +o sa +ap h +ab y +hy de +eth nic +pla ins +ol f +haha hahaha +holi c +?! ?! +su bli +bl acks +mo t +gh ton +lo vin +b rent +bar u +l ati +de w +ate au +q a +pain ful +bu sters +st atic +ðŁĩ¨ðŁĩ ¦ +note book +out fits +si es +r f +floo ds +Ñ Ģ +thro at +su ici +ro vers +beng al +pre pares +blo g +mini ature +Ø ¨ +am phi +com b +r sp +in timate +green e +Ì ĩ +al tar +surg ical +ves sel +... ? +gav in +g ator +threat ened +z ar +rob bery +di er +promo ted +y g +x s +su bs +inter viewing +threat ening +do zen +me ado +water fall +nintendo switch +cal um +mini sters +dro p +univers ities +war ned +tac tics +ðŁĩ ² +refu se +ad ju +v ast +ðŁĺ ´ +mc fc +lib ya +no filter +distribu ted +re ser +ron nie +de co +javascri pt +mon k +intere sts +fle x +mar tha +sti es +oo d +ðŁ¤£ ðŁ¤£ +e un +b ali +g omez +sti mul +moder ate +d ity +ir is +stra w +consist ent +direc tions +adop t +sal sa +cro o +reco vered +black friday +lan caster +accep t +weareone exo +buil ds +free man +air plane +diti on +bel ong +jam ie +pit ching +li f +om in +cri spy +pre pping +ve g +chan g +accompli shed +graci as +dolph in +elec tor +culin ary +super bowl +wal a +pur suit +black berry +be an +cardin al +pro ved +immigr ant +stric tly +holocau st +pass age +ha us +cou p +pur se +har ass +< < +le ed +ado be +st ad +legis lat +par ked +pri yan +sil va +kri st +s the +fun ky +ig a +sett lement +ph s +t mrw +stre ssed +hun t +ho ckey +treas ures +cham bers +ol u +hu t +mar ley +tex ture +wilder ness +mm ing +poten tially +om aha +ju dy +to es +spo iler +distingui shed +feli x +ah u +recommend ations +zom bies +hit ler +tri ple +colla pse +motiv ated +ulti mat +gg ling +so y +ci gar +fo ren +vine yard +gl itter +fin dings +colon ial +hun ter +eri k +den s +beet le +lot te +sub tle +s matter +tru sted +experim ental +nam ents +ðŁĺ Ĩ +regi on +acquis ition +bre eding +quarter back +am reading +oo td +ru de +initi atives +st out +hy ung +out come +al fred +mic s +exper tise +bacter ia +pengu ins +jump er +valen cia +bar k +ing day +sell ers +contrac ts +hou ston +commissi oned +adap tation +swan sea +santi ago +common wealth +ju dging +sub mission +sco rer +tom my +ñ o +ex quis +fil ing +explan ation +alli son +wemb ley +ri dge +chev y +san tos +own ership +cogn itive +favour ites +sh ed +phil anthro +dele ted +go dd +s nor +gui delines +ff ing +je ep +cli ps +sw amp +an or +guil d +bol ton +spring field +munici pal +goal keeper +ye on +ðŁĺįðŁĺį ðŁĺįðŁĺį +ãħĭ ãħĭ +water front +gra ve +contempor ary +ar ity +ÃŃ a +sle eps +sy rup +al am +pi re +co yo +moto gp +ty son +kej ri +cir cul +sing ly +cr unch +complic ated +nostal gia +k op +mo ve +k ale +mac ro +mid west +h ans +tri bal +nu de +௠į +bey once +congratul ate +cat er +leagu e +ðŁĻ Ĭ +la dder +cra shed +tech nic +karao ke +harass ment +ro ts +experi encing +kri sten +ðŁĩ ³ +ðŁ¤ Ĺ +reflec tions +guin ness +illustr ator +ðŁĻı ðŁı» +cen ter +nar row +comm ons +regul ations +Ù Ĩ +har m +cro ft +cu ssion +hong kong +st ical +intern ship +zo e +cho p +hoo ds +estim ated +batter ies +berke ley +smooth ie +shau n +cro s +~ ~ +cam pe +hu mp +b g +proto type +cl ick +shaw n +re viewed +tem pl +p f +jed i +blo gs +ray mond +as th +ba h +av ail +scot ch +leaf s +nik ki +to k +hol low +ur ges +of t +un like +lat in +u e +cat ering +mil i +alter nati +ma ver +Ð ¸ +ag le +pre order +lu x +cu cu +ðŁijı ðŁijı +t art +âĿ¤âĿ¤ âĿ¤ +arab ic +rapi dly +ar rang +all en +travel tuesday +pa ws +flo ws +st ability +flu id +ca pp +can berra +uu uu +sp ani +demon stration +m la +plac ement +m w +presi dents +awe som +bever ly +ani st +ne al +father sday +referen dum +la hore +o aks +deb bie +half way +gho sts +de bor +matthe ws +fi at +t fw +pre sen +rob i +de d +bro ck +laugh ed +am ounts +bam boo +kinder garten +eat en +mtv hottest +break out +u sic +fra ser +legis lative +p ang +modu le +sam my +go ver +ear ns +expe dition +gar h +concep ts +char lie +la va +bachel or +veg gies +deter mine +el lie +un locked +fru it +dal la +cou pe +wash ington +depo sit +iv ory +pau la +chic ag +gu cci +ðŁİ ĥ +cul tiv +pier ce +li fted +stu mb +re cover +musc les +conduc ting +cb s +mcla ren +sophi a +cel lu +oce ans +up loaded +game play +mal dives +kim ber +avo i +rac er +ca ine +cav s +h ana +li ga +ra ven +inter vention +inaugur ation +oo h +at traction +merchandi se +tune in +li king +juni ors +int ended +att acking +aqu arium +i wd +comp onents +sur ing +cent u +yogur t +ðŁı ĥ +show room +op tical +ty our +ju dge +yi eld +an to +pl c +transparen cy +recy cled +chi ef +ar om +ambassad ors +plan et +âĿĦ ï¸ı +om ed +vaness a +cour t +mar gar +hal ey +v r +reg ina +pd ates +hi span +live stream +âģ £ +ya hoo +gal la +secu red +w ir +bene ath +off l +n il +am b +ye g +out let +u te +pe ep +lind say +bent ley +... ! +he el +trilo gy +vo s +ty re +there fore +tor onto +ab i +simp li +ja e +exten sive +eleph ants +s or +orient ation +im peach +re play +constru cted +peter son +pa is +por ted +custom s +colla p +ad u +high lands +sal em +shel by +ko vic +stra in +ro sie +sen ators +snap s +bo bb +suz uki +bla des +k p +lo lo +gener ate +si ght +ma e +struc tural +predic t +jump ed +ah mad +sun g +just ice +gla m +vol vo +jubi lee +de tention +lo sses +pu ri +every time +Ð ° +ra o +ed ge +li mer +rese mb +har old +re tri +sacri fic +surpri ses +am c +srilan ka +bar bie +men s +fin n +ag s +ukrain ian +em brac +î IJ +flav ors +hom er +lau re +ou th +pr iced +ver de +fir m +ah s +cu b +tre y +par anor +pro fit +in dv +who a +har sh +al ot +crit ics +hu bby +fi gur +gi ra +ca stro +chan el +in put +origin als +ten ant +yy yy +ture rs +lincol n +co on +lear n +ch ou +ac are +o les +din er +hy p +bizar re +mc r +let sgo +decor ating +ðŁĮ İ +al ison +ar vin +f d +reha b +mccar thy +lot tery +da h +minne apolis +eli gible +diagno sed +emer ald +destin ations +s ans +or y +bla zers +n v +ba il +digital art +no c +mal ta +sol ar +pi pes +alleg ations +no ck +po pe +bri d +premi er +n x +present ations +ef a +bo ws +val ve +opp onent +Į ë +visu al +ing le +cate gor +e ter +po is +dan i +at tract +neu tral +th ene +cra shes +fred die +ut ili +c st +awak ening +slo ven +quali fy +pro of +fair y +le v +fre ight +enjo ys +cup cake +flav our +â ķ +protec tive +ðŁijı ðŁı» +is u +ad mir +h mmm +continu ous +ai res +rap tors +showcas ing +y uk +pa ste +follow er +instru ctions +sp ru +@ __ +the o +debu ts +ve tte +sto w +es of +ach ed +sul tan +sand wich +som alia +franc o +car ne +flu ffy +al pine +jas mine +he ated +viol in +ple ss +divor ce +per former +phi es +port sm +dar a +kir by +lo p +chill i +for th +sky pe +ðŁĩ®ðŁĩ ¹ +celebr ities +ed y +ve e +po ison +ey el +gra bs +ssi c +un o +wester n +rail road +am er +numer ous +s v +fo w +fi st +âĢ ĭ +reque sts +mar tial +em my +accept ance +lau ra +ภ´ +er up +hyun dai +out lander +u tt +wrest le +esp resso +demand ing +g dp +geo graphy +sas kat +tro ll +confe der +su es +se m +be ts +t ful +to sh +teach es +col oured +gal way +mac y +dis orders +bb cra +at em +fen der +lit ter +e sh +provi ders +renov ation +nomin ate +ps g +nomin ations +jen na +shar p +some day +z ur +bra ins +che shire +pre y +hu go + ¿ +to ken +r v +car r +tac tical +zel da +kay la +fern ando +photograph ers +j our +umb rella +woo dy +congress man +du mp +le vy +ju an +d azz +sign als +la in +an u +mic hel +por ch +al den +sibl ings +y ale +pe el +sw ick +gg in +ll c +k ale +s con +il d +pat reon +re el +qu in +wit t +mar ty +moo dy +ton i +der y +g ators +speci fically +dd in +ly on +tr ick +meado ws +p j +bor gh +vi k +tu r +bron x +pu ff +lan tern +ðŁ¤ ¦ +g ently +be stie +fac t +refu sed +fas ci +mp y +ðŁĶ µ +cross over +mead ow +indian apolis +duc ation +sle y +loo m +mix er +new music +film maker +prosper ity +li m +week end +cre amy +neu tr +lu ther +h v +nor thern +tw o +h ra +cat ches +appear ances +ha bit +kitt ens +n v +illa c +inf an +regar dless +liz ard +dun k +cur tain +ac om +in tu +ve z +e min +fl ats +calend ars +em power +ru ined +hun gary +vi d +we x +u lum +aber deen +o sa +k t +ma ssi +se emed +s den +' ? +tele phone +de fi +insp ires +me ow +z ones +bl ind +pl y +tuc son +advent ure +ge d +oy ster +ðŁijıðŁijı ðŁijı +out put +tt t +metal lic +sma sh +ucl a +sco ts +perfe ct +lu cy +regular ly +sp ic +rel ative +ath ers +mis e +batt ling +deci des +mat a +occu pied +random ly +cat softwitter +gi an +ball y +al ties +al lies +im men +sy rac +ðŁĴľ ðŁĴľ +l lan +au r +k ut +lam ar +affe cts +n ra +star war +ðŁ¤ ĺ +sc ram +en chan +pro cess +luxu rious +ar ray +sher lock +comp ati +dor f +stre ss +m su +s with +sal a +sof instagram +fo il +under stood +qu ay +r p +c ade +ja w +en ab +en coun +ðŁİī : +do ck +satur n +mu ll +lay out +ra rely +happ ily +fix ture +or ph +over looking +her bs +m itt +pil lar +nol an +pe tty +str y +u i +mu k +o res +o vers +á µ +re creation +we sley +ri t +kejri wal +sto cking +g v +subscri bers +moo se +ma e +ber t +opp re +assign ment +u ro +high lighting +cal vin +we igh +cambo dia +av on +ke m +dis abilities +read y +char gers +p ads +iz ing +illi an +tru ste +col leges +associ ates +alban y +mil ton +cr on +bu r +har dly +si ghts +anti ques +e cho +surpri singly +ha iti +cap t +ph p +op io +ine quality +equ al +ken y +sch mid +autograph s +ren t +qu er +cit rus +challeng ed +te c +epi de +fe st +z hou +li me +citizen ship +cry stal +convin ced +mess enger +copen hagen +âĿĹ ï¸ı +war ran +develop ments +ï¸ı âĥ£ +fore x +hi ro +sne akers +xi de +vi va +stere o +bat ting +ss el +ho st +beng al +critic ism +q c +cr un +attemp ted +ry e +determin ation +cre ations +d read +label s +pos se +anc er +joh an +si ster +partner ships +les bian +k st +guaran tee +bar o +fix ing +ma son +m ous +chem icals +t less +bio diversity +par o +bhar at +ac ol +refu ge +en te +t iti +dys sey +respon ds +lef to +in er +se vel +rahu l +ol ine +frank fur +cho reo +enjoy able +c to +strugg les +wood land +heavy weight +gen s +rece p +ac cred +ðŁĺ ¡ +trans formed +list en +at op +n k +sur ge +be re +gover nor +prison ers +clau de +t ill +mu lator +emo tion +water loo +star t +ðŁĩ º +clean ed +grand mother +fear less +afric an +astron omy +ðŁı ģ +ภĻ +the world +su itable +anth ony +k and +tt en +meaning ful +disc lo +jaco bs +à ¸ +tom linson +ghe tti +ty pho +sub stan +as co +te k +nag ar +mu d +am on +vacc ine +f ty +fle sh +no el +infl ation +portu gue +glam our +tra m +v re +te qu +roun dup +w yn +rejec ted +mosa ic +si ghting +cal f +o ta +com position +go pro +gonz ale +e ed +b ard +tu e +effec tively +we en +al to +ri bs +rel ate +thir sty +fu rious +di m +ch ard +perfu me +s ny +chur chill +k of +master class +wa ve +ðŁĶ µ +er in +own s +to be +sk illed +te m +go f +en i +tor i +cra zy +l ick +resi stant +ici al +ag ar +! : +g ali +del aware +bl itz +koh li +pu ck +avail ability +hi malay +influ ential +cro chet +victor i +read ing +ho bby +vie t +j as +en gra +sk ul +ðŁĩ² ðŁĩ +educ ate +tech no +distric ts +blu es +se tt +seven th +lear ns +ee ee +apocaly pse +hang out +cru el +mu tu +bru h +hel en +she er +c tion +kle in +tex ans +ce real +sh ine +ne red +gra s +am bro +f ella +hin du +matthe w +li ma +mir anda +je wel +so ho +euro vision +neighb ours +chand ler +be sides +ðŁ¥ ° +ast ros +thu mbs +ren ault +ra ve +hi red +ðŁĸ ¤ +it ary +z or +bla zer +k ine +ea u +kat y +dc comics +pe c +ro dgers +water proof +kill ers +super int +pre serv +as so +brew ers +promo tional +sc am +villa ges +sket ches +ju icy +for life +au dit +so lo +fundam ental +len e +philipp ine +t end +conserv atives +sponsor ship +dd le +a ine +h tc +os i +hul k +w af +ภĻ +evalu ation +ant ine +sle e +robert son +roo sevel +ag i +sophi stic +emplo yers +bubb les +ko wski +inter action +sh u +bou le +ic an +j are +han k +leg itim +k nicks +kar ma +recei ver +per ks +u h +sta ir +sun i +labor atory +gra ves +voc als +oo t +c ture +thri ve +tic o +ãĥ ³ +b w +carto ons +mcdon alds +dra w +y ung +pl er +li d +eth ical +groo ve +ent a +international womensday +pat ron +wor ries +ðŁİ ħ +ðŁij ĭ +ka therine +di az +tor i +bach chan +tru st +min eral +ic om +buil ders +bor n +col oring +lat te +ca se +revolu tion +tra der +ox id +chi pot +inst antly +sou thern +se hun +pro b +her nandez +lis bon +hu awe +p ong +me a +ro oney +wheel chair +ke en +be tt +cor in +regulat ory +di splac +ka ren +sch em +sun sets +wh ales +remin is +he p +hi de +mar cel +pand ora +do yle +th fc +ot to +no kia +trans gender +ko v +hawai ian +sha ve +so vere +exc er +nick i +pu g +st or +ro th +wee t +leg al +dig nity +po w +hom age +ðŁĩ³ ðŁĩ +s re +can on +la x +wo ah +quart z +ñ a +gree ting +flick r +nai robi +advoc ates +an c +vi i +eu gene +th ra +c re +el an +pen sion +th letics +ton i +re agan +x v +sto re +ben ch +har lem +todd ler +sent enced +âĻ¥ ï¸ı +glob ally +che aper +u f +ma m +nic o +ik u +tho u +ni st +dam i +th ala +rho des +sal e +bow ls +â Ī +las vegas +sanc tions +adm ire +mat ched +un able +travel er +ele ven +straw berries +âĢĶâĢĶ âĢĶâĢĶ +stu dio +jac ques +im s +valu ed +s no +cheese cake +n xt +e os +s x +f x +ton ic +hat ch +chic ks +gra ds +hand ic +r ory +as p +ri pped +denti st +n en +lu fc +âľ Ĭ +di ge +hop kins +sher man +f da +for all +ash ley +str and +h y +liqu or +buffe t +ess ence +phar ma +suri ya +ðŁĴĻ ðŁĴĻ +festi vals +z an +re fresh +pur ple +uni forms +kenne th += ) +as an +hel sin +transform ers +k ali +person alized +chal k +bo bby +â Į +the mes +depar ture +prin t +illustr ations +qui et +agre es +gri ff +Ø ³ +m iti +toge ther +conven ience +ab ar +car lo +turt les +info sec +some what +ar lington +scholar ships +emir ates +mu ms +st ella +auton om +fe ather +g ore +nom inees +fragr ance +Ñ Ĥ +w ong +thea stern +gr e +z illa +is i +bump er +go o +do zens +ab duc +âļª ï¸ı +o ils +don ors +sil icon +i pod +fortn ite +ðŁĴ ¨ +tor o +spark ling +consci ousness +pal a +nu m +moun ted +ffin s +thi eves +team mate +pra b +om er +ta pes +bo d +mit su +ste w +e re +p bs +tu sc +lo we +ra de +parliam entary +h m +ed gar +ðŁijĩ ðŁijĩ +to a +a gh +hon i +s late +ge ek +ap t +hard t +ta p +horiz on +grow th +make over +hi l +paper back +id an +reha bil +gi u +possi bilities +let tu +fran co +bo ss +ach er +does nt +mo e +ta ker +huss ain +ml k +di l +th ia +ham a +real ised +raven s +curric ulum +m ith +k night +ted x +r v +isai ah +cumb ria +birth days +f ing +pre z +mu barak +exquis ite +clear ance +y en +par i +ev o +à º +modi fied +app lying +imple ment +disco vering +chap man +indie game +dis k +crowd funding +mach in +li vel +sty led +âĿ Į +ma king +rehear sals +nutr iti +subscri ption +and ro +cre ators +car ries +ky lie +cam den +appren tice +tax pay +c ca +tuesday thoughts +pis sed +er man +dete c +freed om +mer i +.. ! +psal m +sun light +per spec +be ings +book store +rock star +fun ctions +p ence +fav es +z n +obam acare +sp ill +coven try +pi geon +pi vo +ba it +kol kata +av al +don or +wa h +privi leg +tra ditions +rajas than +ten ess +portugue se +yn es +tack les +de fic +tor n +pol ling +thor ne +in a +bened ict +bar ry +cal ories +ver dict +save the +nor ton +off ice +main stream +impro ves +fr on +respon ding +real tor +scotti sh +de clar +r l +shi v +supp lier +re sting +swee ts +qu i +. âĢ¦ +whit ney +startu p +thank you +teach er +h alls +ha ve +hand made +pro ving +quar tet +ro chester +li an +virtu al +mend es +of icial +mid lands +x box +meas uring +o vo +accommod ation +bri des +collegi ate +intellec tual +in car +ni ag +ðŁį · +sf w +coco a +co ats +civil ians +presi dency +mat rix +sweethe art +tri athlon +wag ner +ra dic +plann er +the o +execu tion +k um +the walkingdead +sc ar +ro tation +blo gging +bom b +re son +bb les +st are +assi sted +e do +brand ed +war nings +thor pe +acknow le +satis fied +sho res +ri d +dor a +phys ically +bi gh +appro ves +ha h +ric al +vers atile +pret end +lu m +ab hi +ye e +sp it +ãĢ Į +dj s +ash tra +j t +ven ues +gram mys +cy clo +tr acker +over watch +repl ica +el yn +nr l +lind sey +hom o +ballo ons +kitch en +si s +am os +ende av +ðŁĴ » +a rec +thu g +hoo ked +hr c +new york +bur gh +americ as +patric ia +ug u +ap athy +ha st +psy chi +cor k +petro l +ðŁİ ¬ +ak u +po pping +psycho logical +au x +g ma +cad illac +wa ste +auth ent +bri stol +nam e +que er +to ber +jer ry +com in +ch ant +privileg ed +op ar +lo ser +tex t +mar ker +stri es +equ ally +ak i +christ mas +gare th +ble w +em ma +imag in +se als +che at +conditi oning +j ana +ren s +dar ies +o asis +disc ounts +coun cil +i ka +shir ley +vou cher +al ps +w x +q r +dri ft +attemp ting +ut c +Ø ª +gonzale z +m f +jo ker +paralle l +pa re +aspe cts +proce du +n p +am a +rale igh +bright en +gu ire +radi ation +cre scent +ho b +il le +str and +v ore +n ard +che st +di wali +av atar +al der +d ling +pa thetic +ðŁĴ ĺ +spir it +jor ge +film making +ðŁĻı ðŁĻı +challeng er +b j +down town +ht ml +ade qu +twi sted +in ely +( ' +wra ps +oper ational +y ne +n us +mag net +market place +health ier +snap shot +dam on +inter ven +fe derer +ow ls +biscu its +j p +ro deo +blue berry +lec tion +fron tier +summ ers +re yes +pede strian +go l +caf fe +refur bi +bou lder +me ghan +speci alty +la ss +e i +suspec ts +appro x +rr r +ra th +st im +cru shed +he d +wh un +lo af +cr ore +river a +gene tics +so ck +wa sted +ny pd +answ ering +do ve +bel la +ol in +du n +fi ji +pre tty +spar kle +y un +j d +euro pa +li fts +am ber +mu r +te k +boy d +roy alty +in do +ri b +go tham +ti est +inst alling +ke mp +the photo +cos mic +) )) +whole sale +loy ment +eas y +su ing +sett led +af p +pro ver +suppor tive +re es +ne ath +deli ber +c é +wel come +pic oftheday +new born +pat ty +sun s +si est +fl int +diffe rently +spo ilers +troop er +g ins +cor y +look out +equi pped +ta pe +to by +resear cher +u sh +ke yes +al ma +induc tion +k w +k har +sl ick +bri de +e ur +cra ving +book ings +ch es +tr unk +vern on +sp her +cryst als +rel atively +pom pe +uni ons +val ley +par a +w ant +ok c +de af +ser gio +len non +sh ay +cr a +v at +he e +t we +liqu id +pol y +ðŁİ ģ +b ent +be aring +motor sport +bar be +te sti +han i +fin ancing +astron aut +water colour +ri sh +comic con +gar t +wr ong +ber n +it an +ste pped +fil ters +c low +me x +dem ons +all o +expand ed +comm and +et ers +go ats +si ri +y r +pot tery +mari on +i le +el an +san to +person a +du ke +hom eless +li ghted +wheel er +chang er +cab bage +sur real +ham burg +sma shed +str an +k not +i art +ob i +be dro +di al +th ick +b ingo +fu s +vacu um +con ve +ati ve +accur acy +accoun t +re fer +ri z +spider man +ban a +r ite +u b +ab s +medic al +lin k +si em +> >>> +be tra +g lowing +re actions +pupp et +spa ghetti +ang s +re medi +pray for +roy ce +char lotte +£ ï¸ı +gh et +affe cting +ro de +soci alist +mo ses +az i +o it +re porters +cd t +ap ing +s nat +minim al +wa ist +sie ge +>> >> +ri g +schmid t +h are +ec a +thor n +he mp +es the +cly de +th a +don ut +moham ed +ling erie +le gg +carpen ter +perform ers +de a +imag ined +cur se +la sh +ct r +agu a +ro ar +gr i +ro le +j fk +resur rec +roosevel t +maril yn +sm alle +will is +wa ited +char ities +the res +li k +origin al +car i +c ough +cru ci +la gun +contra st +k ou +arm our +re moving +t ent +maz da +bri ghter +thi ef +cor ner +tequ ila +buzz ing +al bi +p am +az ure +disc oun +pixel art +possi bility +ham ont +tra des +bu da +hi ve +vers y +fin ch +tran spa +em i +terri fying +in qui +g ba +sub stitu +collec ti +plac ing +cin dy +k ann +pa tho +diamon d +mour inho +guine a +anthro po +air s +pu mps +ì ļ +pas o +cur ling +an ita +resi dency +ne wh +jo on +cigare tte +que ue +ex trac +gam es +spl en +ex press +public ly +bon nie +tribun e +ba ek +reason able +c or +timo thy +she eran +Ä ± +f dn +su tton +concentr ation +carav an +x avier +al ger +cy lin +freder ick +ner ve +pe ak +lettu ce +j ail +pre game +kav an +up graded +eco logy +squad ron +gra pes +goo g +pa stry +ðŁĹ £ +ãĥ¼ ãĥ +mil ano +awa z +presen ter +ðŁĮ ¿ +her d +king s +tem plate +fl our +h v +k ley +i ya +spe c +at er +frankfur t +co ch +tex ting +del i +communi st +regi ment +ele anor +anticip ated +ðŁijĮ ðŁı» +thephoto hour +ran o +survi ving +simul ation +daw son +ar in +aqu a +m or +âĢ¦ . +cin o +ira qi +sh az +dun dee +we s +dra u +hann ah +s news +occup ation +ste en +x m +ang les +sett ings +gur u +kno x +or ca +shap ing +w ent +dr illing +zz ie +br i +kis sing +fin d +ma ine +âŃIJï¸ı âŃIJï¸ı +ðŁĮ į +lar ry +bu sted +ta vern +acti vely +- " +replac ing +no d +un lock +. " +âŀ ¤ +affili ate +to w +l n +happy newyear +di f +j m +green wich +contro versy +daw g +con dol +sav annah +compens ation +touch down +te o +amb itious +embro i +convic ted +iart g +bar ack +tr ance +testim ony +au dition +thum b +my ths +be x +que z +orch id +den y +entit led +hoo d +gr ant +in box +blue jays +r illa +smalle st +bur den +in famous +divi ded +boun daries +t ter +el t +wy oming +be verage +me sm +one ws +budd hist +y ana +as sad +is ms +bar rett +predic ted +back to +tw it +e there +cap tains +escap ed +ay o +lam borgh +gard ner +la ps +k al +adverti sement +insec ts +na po +am en +ac y +r and +g k +te h +k athle +tri dge +pan cake +at ro +pyram id +bu la +paral ym +gau ge +en cies +tom y +biscu it +but cher +quali fier +coun ty +ke i +po ols +dar ker +should ers +ðŁĩºðŁĩ¸ ðŁĩºðŁĩ¸ +sp re +( " +writ ers +g m +ðŁİ ĵ +k nit +hu ff +mt b +philli es +o st +den is +g art +licen sed +inter face +ex cel +d well +from the +co fficial +az zi +appear ing +fore st +n ana +ke ith +manufac turers +beck ham +) ? +e se +col ony +delic ate +ut ter +mc in +transpl ant +pre ferred +par d +ari e +hu b +po ds +perspec tives +pic t +del u +app er +be than +p mo +crimin als +femin ism +sh ack +circum stances +fel las +prote sting +wa x +sugge sted +t ator +dre w +om ni +fa ke +kath y +re b +del ine +ber ni +mi sty +ðŁij © +er able +break through +men swear +millenni als +chan yeol +la z +inser t +rep lies +phra se +n x +ihear tawards +audre y +gran ite +rac ec +ori e +ter ra +innov ations +britt any +at eral +pe ar +bio logical +sh ments +institu tion +m sn +frequ ency +d man +neg lec +t f +ste fan +fox news +ty po +comm s +sequ ence +car men +wh ites +econom ist +exe ter +se um +re sorts +cas ually +bun de +divi de +Ø ¹ +ga g +cre ed +reti re +cau cus +rapi ds +wrestle mania +tul sa +sunder land +fundam ent +o di +yam aha +v ary +intri gu +el se +be acon +an gie +tra ded +tran sm +g ents +kn itting +gal ac +ðĿ Ĺ +u to +sea side +hol t +re rs +far go +train ers +mon soon +b ale +sou ght +mad die +h w +co li +fr an +fav s +ðŁĴ Ķ +int ent +r ally +s bs +lemon ade +barack obama +bre ad +stick y +explo sive +chel ten +t j +as soc +ram en +hom ies +v log +mi ster +lor d +âĢįâĻ Ģï¸ı +aly ssa +sketch book +ru mble +cat ch +migr ant +discipl ine +un likely +chronic les +fl ora +sl ams +am id +s boro +coo p +ju mps +tran qu +mel is +sof ia +en ri +gab e +sy ri +nicol as +cha i +w v +be cky +foo ty +ta o +suppo se +ðŁĺįðŁĺį ðŁĺįðŁĺį +plu sh +ri sh +ðŁ¤ ĵ +k ha +satur days +ac cent +he c +lim it +carl ton +wi red +taylor swift +ðŁĺ ij +sq l +har ro +recipi ents +g at +go p +th of +amaz ed +gh an +ðŁıĨ ðŁıĨ +por to +cla re +di stant +na c +ohi o +ðŁĻı ðŁı¼ +mt n +anti bio +dino sa +me sa +par tial +b v +lear nt +lov ato +questi on +ex tract +gossi p +gi bb +niag ara +ðŁij ¨ +displa yed +so oner +ste vie +nug gets +ml n +bro m +tur b +give aways +stu pi +bl ink +c ili +conven ient +mo h +vi ve +f ric +cau se +cham ber +cu les +ne arest +is se +small biz +t j +canadi ans +smar ter +bra sil +ra re +que tte +w ha +cand le +at omic +ðŁijį ðŁijį +warri or +relax ed +stri ps +ne ur +k ka +r fc +jen sen +reco vering +respon ses +sal am +ortho dox +acti ve +ell ers +n it +âŃ IJ +metro politan +centu ries +vi da +gra ding +transpa rent +sim ple +do ts +superint endent +elev ator +autom ated +red skins +ima m +summer time +jona than +ge aring +michel le +confl ic +m ice +to te +publi sh +pa x +) - +na iled +á ´ +tele scope +ser bia +ba b +ape u +st ically +sen ti +r ats +isol ated +grou p +hat red +paranor mal +stan ley +ali on +safe ty +l s +ठ° +nex us +alexand ra +mas ks ++ + +tr on +au k +brother hood +brow se +mix es +sim one +mu sk +appro ve +lo la +ex p +per th +fu turi +un seen +d m +chel se +sc outing +o we +portsm outh +k ram +mi ze +di spen +su p +d lc +adver t +tere sa +is le +cy cle +met all +shi elds +marin ers +ra z +ing en +fun d +an go +jon es +o ka +mad den +broc coli +domin ic +situ ations +mer o +cric ke +puni shment +d b +sha king +ðŁĺ ļ +m q +ari ans +le h +cla w +we ds +d ure +ni el +j elly +gour met +tra ders +le vi +w ages +kne es +wi se +heaven ly +avi d +melo dy +z ack +ban anas +apprentic e +pro p +fun ny +o de +respec ted +me gan +fe wer +dra fted +med it +gra pe +us army +cru sad +vo cali +prepar ations +non sense +us age +th r +ro th +wiz ards +insi de +promo tions +mon a +red sox +si g +eleg ance +ch ia +univer sal +ãĢ į +ra ja +un ga +pol lin +filip ino +ak a +t sun +ik on +bi king +decor ations +z ac +cade ts +hum our +ag m +re ppin +vac cin +elo ve +u w +dia be +galla gher +az er +do l +a while +pro minent +wel sh +t ann +' ) +bi en +wa g +in al +c wc +wic ket +ur st +q anon +x e +out door +dun n +star r +co logy +ric ky +u efa +reb ounds +s music +inf ant +ðŁĻ ĭ +so p +u mber +hand ing +beg in +sor ting +ha sh +sp ati +re k +buda pest +black hawks +dele te +ro m +can did +auth ori +de bris +spe cul +inter section +marri ott +im ran +ðŁĺģ ðŁĺģ +cru ises +ram sey +rafa el +aware ness +vas cular +beyon cé +ru g +ðŁĺ Į +festi v +ar am +s able +bas il +p ill +flo oring +un beaten +implic ations +u f +w ound +for ge +poin ting +po ts +popular ity +ðŁijı ðŁı» +mani pul +s lots +deb ates +abs ence +ver mont +never forget +wri st +gl oria +ren ce +hu sk +mel ting +ðŁİ Ł +br aces +tim ely +transform ing +am ps +ma k +po e +ah an +gener ally +nd p +ale ppo +unic ef +pro fs +nor d +ma sk +jackson ville +v v +sh ells +bloom ing +oper ators +char coal +ne ville +ma gi +chi p +sam a +ir an +re forms +accu mul +ru e +æ ľ +web sites +ga on +devast ating +sto s +glaci er +ra pp +chipot le +pr a +or ous +rom ney +seas on +decor ative +c isco +dit ch +compla in +ll o +assu me +ðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤðŁĺĤ +n els +cent ric +ft w +car rots +tat a +can ter +per ience +li ers +demo s +bl unt +oper ate +reserv ations +le ah +sub stance +di son +an te +elec tion +v ue +squ are +non profit +ca a +f su +y am +ãĤ ¤ +v ladi +comple tes +mar i +philli p +ne ill +er as +ka it +men do +mahar ashtra +g p +dan e +provi dence +ther apeu +juven ile +me mo +in corpor +aa aa +seven teen +teen ager +à £ +or ns +wi de +cu teness +tw d +ff les +bar a +com edy +over time +y az +bar on +unemp loyment +ðŁij ĭ +exter ior +den se +cent res +match up +history month +artif icial +qu it +e sk +war n +cr itic +j af +ðŁĵ ² +inform ative +fu els +recy cle +nam ing +stri pe +sol ic +mole cular +dee pi +con vo +s sel +na e +de scent +ti z +accoun tability +ter ry +r ito +sl ay +em o +dem ol +sens ation +co v +tor e +round table +y ol +excu ses +ॠį +tur quo +hh hh +pod casts +cele b +me ssi +li o +man n +contribu ted +u z +gener ator +ele ts +veg gie +indu l +en suring +detro it +pun jab +tran spor +instru ction +ad d +por cel +pan eli +cir cles +persi st +clay ton +sp n +dog softwitter +is nt +sp r +retail ers +p w +hun gar +el ena +mon aster +gu atem +je ssie +an z +ra shi +fle e +car ving +fau x +l al +hen ri +d jo +du ll +s ana +lar a +glo be +cri mson +com pass +pau se +na b +lion el +ba ths +u fo +invent ory +sin gh +sat an +ðŁĩ ¸ +ce ments +in form +gener ated +bi den +av g +tas ks +de er +sa u +ja iled +pa stel +sc c +na il +steel e +per is +lamborgh ini +pur sue +mar gin +u ch +bo sch +dra in +cl ara +bo m +lat ino +web ster +rose mary +r ha +s oun +billion aire +not ch +percent age +con or +' " +hom es +earth day +h ort +big gest +di sin +wal ton +edit ors +im ma +om ar +equi valent +pharmac eu +ah med +cam eo +han ni +under rated +ge ment +micro bi +v oo +honor able +obe sity +âļ ¡ï¸ı +limer ick +invol vement +st agram +boule vard +bur g +blackand white +liber ation +fi ve +inter im +sm m +rival ry +cap abilities +stat ements +thu mb +ve d +sw ans +bar ber +e que +seren a +hel m +noo dle +sam pling +n awaz +sing le +thunder storms +sh on +in ev +ë ¯ +to pp +orch ard +bi an +ðŁĺ Ķ +door step +salv ation +marke ting +r ons +cle mson +ra vi +in take +stand with +sin a +ha iku +ple y +elector al +ph illy +la ys +electr ic +cap turing +u pp +er gy +believ ing +cul tures +es day +inva sive +ed ed +spee ch +end ur +viet nam +boy cott +pe de +deli ver +ðŁĴĸ ðŁĴĸ +mer chant +st ir +den ies +poc kets +o ti +cu ddle +ro land +mm ed +den ed +lear ners +hoo p +sour cing +h acked +di m +environ ments +ben son +jud icial +wor cester +pear ls +govern ments +arri vals +cor ners +tun ing +la bour +y m +or dering +le wi +i fe +hygi ene +thou ghtful +indone sian +campaig ning +princi ple +assau l +ru bb +at v +wil ly +en tre +il i +ph on +du ties +âĻ¥ âĻ¥ +sn akes +lo op +am ar +conver tible +bon ding +ment oring +max well +ethere um +destro ying +ax is +ca iro +fin nish +sho ck +ðŁĺ IJ +cal eb +com a +pe dal +co re +contin ent +el son +temp o +helsin ki +ac p +tack ling +st ated +bl a +dou b +sma shing +a ja +camer on +disru ption +warm th +being salmankhan +bullet in +o de +syrac use +ar an +mc gregor +bul k +an ton +confir mation +sp ine +im ran +instru c +jac ks +chi o +pal m +str e +embarra ssing +un t +elimin ate +to ss +c ise +a ws +oni sts +sh inee +jo s +ho se +li vely +opp onents +mo vements +recogni zing +sandwich es +sh akes +exerc ises +se at +profe ssion +merry christmas +lu gg +adopt dont +mar vin +byr ne +un le +he t +ku wait +rah man +aspe ct +humb led +gen es +f and +long time +) ; +cam pu +an gus +ðŁijį ðŁı¼ +q uran +sle eves +s lic +¸ ë +twel ve +your e +i ke +go gh +b st +dic tionary +reflec ting +to on +yar n +em bed +ðŁı ´ +re serves +floo ded +ver iz +du sk +estab lish +pro li +au d +ritu al +or bit +declar ation +recor dings +cam o +cas sette +good luck +cu tter +bo p +b ho +che ating +paci fic +ma res +tim er +col t +tr ous +tomor row +han sen +ci e +w ang +ban i +circu lar +ac ute +far mer +co ys +p se +ir ving +w j +haw kins +b ison +ur day +cru ising +o te +k ath +whi stle +your selves +ant is +sla sh +thorough ly +ke sh +ser ie +ex em +en ig +guil d +sh red +ho gan +ap o +ä ¸ +pu zz +ne tball +au ssi +panor ama +ws j +av is +ar ming +hum ph +brow ser +cri es +fo ggy +mat te +ðŁĮ » +it er +tal lest +by ron +cap tiv +je su +any ways +flag ship +p ton +we y +fay ette +financi al +f oul +solom on +jenni fer +cucu mber +ar gue +tex tile +wrest ler +john ston +pa stor +ðŁĺŃðŁĺŃ ðŁĺŃðŁĺŃ +cac tus +edi ble +re served +ric hie +met res +ingredi ent +h ella +un to +ch ol +cele bs +po ets +gra ham +hay den +coinci dence +b aw +communic ate +flet cher +/ - +tole do +ecu ador +coun sel +s laughter +line ar +at p +os u +jo el +ev ed +conqu er +ru stic +plic ity +recogn ise +room mate +cr acked +jas per +ph er +ðŁĮ º +wo ven +mo ist +ff c +ste ering +ni sh +stand ings +frequ ent +ar di +haz el +as msg +bau m +d art +si dd +nat h +ch ero +card board +c ss +n sfw +pa ir +ðŁĺį ðŁĺĺ +occur red +homeless ness +mal one +ph e +xi a +pad dy +decl are +theat re +b f +per sian +ta d +ax e +susp icious +lam b +mu cho +sen ior +st as +k ite +st ing +gra d +k af +wat ering +Ø ¯ +spi ral +th ms +educ ator +jer ome +of c +clo ck +su l +pe mb +.... ..... +park way +de aux +restric tions +m ons +need le +e j +le agues +water melon +am an +pl enary +max im +w ab +coming soon +bry ce +vi gil +super market +fortun ate +turquo ise +presi dent +li v +inter ns +feel in +fix tures +stun t +st aged +premi eres +lo k +prac titi +shor tage +log ne +ve c +con cor +roc ke +li g +com posed +syn thetic +di p +cam ila +ch is +j ou +su san +eye brows +supp lement +satis faction +moham mad +ti bet +house of +pu n +as sam +shado whun +psy ched +se duc +mand atory +her bert +sc allo +stream ers +proto col +block buster +produc es +sch nei +lau rel +tri be +time hop +pl a +mod elling +tv time +mtv stars +wi dow +me tric +ch am +con do +flow ering +ale c +d ms +inten sity + ¨ +mccar tney +islam abad +k b +f fi +ph al +anal og +f ond +h acks +positi vity +treat y +sub marine +conne ct +sel en +categor ies +cu b +organi ze +si k +quote oftheday +remin ding +am or +loc king +ðŁijı ðŁı¼ +comp ound +et te +b out +rec ur +fe rence +mi zz +tren d +hip ster +for tress +forth coming +preli min +o dyssey +ang p +del ici +even ings +ðŁĶ ¹ +i q +d w +da ir +kathr yn +christian ity +moon light +ha b +wh oo +f bf +se th +genu inely +pa x +char ity +deplo yed +b nb +bu cs +ju dg +con ge +plant ation +im press +car a +sc lub +sco py +land ers +compla ints +b ama +re build +x y +real ism +sh our +le in +brac elets +mer a +assas sin +an chor +ðŁijĮ ðŁı¼ +lin en +con fron +chronic le +comm ent +cat alog +il les +gor ge +me try +jung kook +love my +sent in +se em +fit ness +alli ed +ts man +digital transformation +pr an +lo ft +min ton +alden richards +en vel +cher ish +certain ty +zz z +rhin o +per kins +en rich +cape town +ome ter +sec tions +ske leton +def enders +ðŁĺ Ŀ +pen c +bri t +ja h +capital ism +ðŁ¥ ĩ +baz aar +re me +ex t +kk k +conver t +stor my +b ye +kar an +chry sler +ad os +pre ssed +syn c +ation day +dang er +bad ges +refu ses +em powering +ly m +ex ports +adoptdont shop +ðŁĩ ¯ +th c +awa ited +focu ses +fin ed +o at +haha hah +âģ © +n family +fi ona +luck ily +thr illing +ty ping +out break +di es +he u +craw l +ne sses +o ath +scri pts +gee ks +ðŁIJ Ŀ +p b +mathemat ics +al is +________ ________ +gymna stics +acti vism +recommend ation +gre n +wa in +cour ty +n apol +cau li +hor nets +g als +jo ckey +dir ty +at ar +enor mous +pe st +greg ation +an os +ii ii +def ends +black historymonth +at x +mb c +lugg age +wit ch +co b +la sts +cu m +gg g +ba thing +n ar +ce bu +ðŁį ĥ +navig ation +min e +re jo +ðŁİ Ģ +gif tide +re ta +use less +pu ll +defic it +al lu +ati me +it v +tr illion +pu e +ac ies +proce dure +l ori +jen ny +c ad +ul ously +dr ac +promo tes +ing the +can u +woo hoo +na omi +zar dari +ts u +be ir +sd g +le ver +we ber +ab ud +lun d +crow ded +deplo yment +ter rain +ken ny +ho f +witne ssed +lo ch +j k +bul ly +w ren +poe try +do ff +ww i +mo red +din i +cul ture +promp t + ¥ +maur ice +to pps +r m +cor respon +ab out +jewel s +gi br +eag le +ðŁĺĺ ðŁĺĺðŁĺĺ +l ending +sou ven +ç Ķ +contemporary art +establi shment +j ong +âĢ¦ " +gat or +patri otic +mc coy +v ape +human e +feli z +coach ella +re posting +ste als +fu ller +n ering +at ra +( - +bla ke +he ather +wor ms +discipl inary +rede mption +y ard +am in +" @_ +d nc +t ds +k appa +ne wark +comm its +spe ars +j ams +t and +msn bc +inter medi +aim ed +at ic +teen th +observ ation +kash mir +kavan augh +ou l +san francisco +re u +bel ated +cho w +pass word +st ills +deta ined +sar i +day ton +dar ren +itali an +ar th +amu sic +ar bit +w m +v m +he m +dou g +my r +a sho +pre v +vin d +bra h +sta g +ภµ +pre views +gu k +con taining +leon ardo +sad dle +ru shing +st av +lon gh +gam bling +ve gas +reserv ation +end ale +bal a +fl a +vari ant +he dge +bulgar ia +nat ali +we aver +sol st +encoura ged +ap c +as parag +ne st +cycli sts +fe l +ìĬ ¤ +overwhel ming +pey ton +j it +a post +mb le +ble eding +neighbour hood +a very +expre ssions +mac donald +gi gs +mon ds +illu sion +n ct +cam ero +over head +my th +ol y +vi o +et v +lau rie +unve iling +pri or +con n +iron man +di ff +day in +crit ici +con go +re vision +wal e +direc tor +p ines +black pink +gar ner +cur ated +manit oba +h ac +common ly +bar ton +.... # +mor tality +live smatter +philos op +shor ter +con vince +fre ak +vend ors +insi ghtful +el ly +sens ors +e led +s berg +weight loss +u kip +sp ur +priv ate +qu a +ss c +, ... +supervis or +advis er +amaz ingly +less er +at es +mah on +oooo oo +sar as +pmo india +waff le +un ders +toler ance +sculp tures +her sh +kno cking +smo ke +cathol ic +gri m +tra veled +fli p +ge off +dinosa urs +sle pt +scar let +ok i +compla int +ob sc +nam i +la g +cross fit +u fc +mc cain +refe ree +sad ness +pen ny +li eu +mo de +ki er +vol s +w is +el on +she a +ba o +son ia +cla ire +em manuel +moist ure +di gest +vi ii +t eller +ch on +access ory +night club +foss il +aw an +hu sky +ab original +brand on +ffici ent +cou gars +ste d +ad mitted +igno red +content marketing +ag as +v ase +execu ted +negoti ations +she ad +n and +tab lets +go th +ts al +d fw +on ep +protec tor +sp ho +gaz ette +andre as +ss er +comp ilation +ha v +contain ers +bro ker +soc al +porcel ain +hy uk +air ing +ðŁĴ ° +publi sher +scen ario +spart ans +re viewing +itu des +ed el +pear son +ba sh +mau i +a ad +ðŁĮ Ĭ +li u +ul ate +program mes +fav our +web design +real ty +motiv ational +cro sses +' ... +bus ch +adjust able +ar jun +mist ak +dimen sion +pi stol +weigh s +en y +unve il +indy car +gor don +f ade +fran ken +qual ities +bet t +loc ate +ker r +sp c +confu sion +ne e +luck y +bas es +dep ends +fire fighter +ol a +re t +mar oon +ðŁĶ Ĭ +w am +defin ing +whe at +bi l +é s +b hai +psy ch +ta u +ic ans +thi k +ob ile +inspec tor +ìĨ Įë +ill on +go s +ev angel +fa i +si st +voc ation +bur ge +chi stan +renew ed +enthusi asm +en ting +ag ri +ike a +m sc +aero space +sens iti +memo ir +hosp ice +co caine +der ry +mechan ics +Ħ ภ+tin o +reduc es +collec tors +in justice +supp re +v ana +ab un +nap a +su sa +os lo +e ff +en core +lic ence +ched dar +z al +moun t +ðŁĴ IJ +threat ens +!! " +archi e +fu tsal +scu ba +jo s +gn on +se xi +s official +compar ing +domin ant +tof theday +fa it +propos als +gi ft +y as +cn c +l r +ha b +reser voir +beli efs +gener al +mar ti +t d +est e +ì ł +wi l +ðŁij ¯ +ðŁĶ « +sp x +et work +excer pt +e instein +hir o +sil hou +team ed +per ception +corri dor +mental health +hin ts +ben ny +induc ted +sw x +wi desp +spe ak +cher yl +dru g +ðŁĺ ķ +h f +asparag us +myster ies +fitz gerald +off er +therap ist +care er +dam aging +ts d +per u +wei bo +y ay +phoeni x +disc re +mac book +bar ker +stig ma +sp read +roc kies +kang ar +bri dg +pa i +bi shop +ta iled +capsu le +ðŁĴ ĵ +ge of +roy ale +short listed +o ste +ash amed +ch app +key e +cl a +screen shot +austri an +nati ve +en ight +juli et +michel e +ðŁĮ ´ +travel ers +pi l +football er +win chester +ðŁĻ Ħ +azer bai +gold eng +organis ations +interpre tation +predat or +ofthe week +lo gan +pok é +mari e +cal la +t nt +cin de +ge tic +fit fam +gra v +ow ens +ðŁĮ ± +shoot out +sal is +commissi ons +co he +p tic +ni xon +hi a +amb ition +mar ine +cruel ty +t k +cru de +sal ty +jim a +mon go +ir ony +on wards +arre sts +strang ers +ig er +cycli st +ra g +exten ds +tra dio +bour g +mo i +el la +e able +lex us +au l +der a +histor ian +mor ton +ti ff +man ner +ko t +d k +po inted +mar qu +a an +en ey +du blin +on poli +em ili +secre t +fl o +âļ ¡ +ba j +ste ep +accompan ied +rum ours +dev i +purch asing +fi g +pu b +sch oo +autonom ous +go alie +x ia +autom atically +re vers +ter o +fu ku +titan ic +shoo k +sand als +see kers +exc av +nor dic +bigo live +ba ke +r att +z ak +ne p +ðŁĺ ¤ +cand y +billi ons +book worm +pp et +à ³ +sur faces +sc ars +phil ip +do gg +ci gars +co te +transl ated +cur ator +sin dh +han gover +bre wer +on es +el ton +ðŁĴª ðŁı¼ +mar cu +elli ot +righ te +di oce +ru ss +rail ways +grand son +as cen +apo logy +awa it +mob ili +re spir +parti san +oli vi +stri ke +yo o +white house +expre ssed +pu ps +bed ford +cul tur +fro gs +fly ing +cav ali +c ds +fri ger +street photography +re solve +tali ban +kan g +cru shing +ju m +ðŁĺ Ĵ +william son +tan g +cur ly +t man +veter an +fa ire +artificial intelligence +un anim +pre n +back drop +fr ances +oc cer +doro thy +work ing +ar thr +conver ted +day light +serv ant +pad dle +compla ining +thir ty +nad al +ak u +ibra him +ad dressed +p iss +green house +batt alion +si mulator +out lets +embroi dery +ðŁĵ ± +fis cal +ger ard +sas sy +ðŁİī ðŁİīðŁİī +vent ures +mer it +public ity +ðŁij Ī +sophistic ated +c tu +conven tional +condol ences +isra el +tra dition +ar an +te ss +gla d +ðŁĺĬ ðŁĺĬ +correc tion +ge on +am d +or ship +be ast +ch ment +ì ŀ +nic o +wk nd +wel s +cushi on +beli e +vo c +idio ts +under neath +pu ma +corn ell +en ation +lu l +swa ch +ab ig +u rer +mi e +form erly +ca f +er nal +chor us +juli us +sen ator +âľ į +wh ir +salv ador +ph d +uni fied +boo ster +graph ical +w rec +son ny +mi z +dere rs +s all +ven s +tusc any +wi d +y ong +kur ds +w az +trol ls +mac ro +cat urday +pre ssing +sa sha +cent ennial +gu sts +em c +be fore +den ise +cu st +ðŁĵ ¢ +lo oo +base l +eng land +y olo +ar du +manife sto +do ha +ì ľ +kni ves +bourne mouth +bi bl +bar b +al icia +Ø © +com er +cycl one +g it +ane ws +character i +vent ura +in tra +sf giants +hu t +be a +dar win +ell er +al v +re ese +bl y +kar an +conclu sion +man ny +fla kes +unite blue +nad u +co pp +ed ges +lanca shire +i als +o tta +philipp e +l ent +che e +ment ors +festi val +an ism +compli mentary +r j +pu g +d ine +we i +cli ffs +sar my +ti veness +treas ury +il and +after math +rabb i +ou n +bou quet +herit age +zi on +sur render +shen an +in ks +kar l +gh ty +pol icing +exam ination +ce y +per su +measure ment +hydro gen +lu han +âłĢâłĢ âłĢâłĢ +war i +о Ð +j y +fow ler +mis h +al fre +âĺ ij +bb naija +cat alogue +recogn ised +sa ver +hu skies +col in +mun do +si va +p ng +discoun ted +man utd +fre sno +de vin +prelimin ary +tro phies +pla stics +du g +pro cu +indi go +g ard +dy lan +pit ches +ground breaking +in son +bl ac +an thology +f h +expl ic +r ard +admi ral +so chi +la shes +splen did +en vy +ad v +sex y +festiv ities +stic king +bi b +thr ill +op p +ari el +botan ical +endur ance +fe males +br icks +vat ican +black pool +ber mu +br ough +roll er +bi d +sue de +sloven ia +mm ing +ml b +med alist +di ans +rehabil itation +ne on +s go +li thu +ram os +z ed +pi anist +inten sive +broad band +stu dy +peter sburg +lu ca +ah hhh +phys ician +dill on +tele com +gri ef +mu n +ac ro +si ded +s ly +blo ws +classic cars +tri um +ar gy +? : +h ri +marsh mal +âĢ ĵ +to pping +war saw +tran sc +preserv ation +b av +re friger +experim ents +ä º +gl it +sli ga +g age +fac tor +flav ours +br ony +sp o +cook book +carri age +aw ay +ny fw +on ian +w g +simp sons +ro lex +ðŁı ¿ +cro sby +ãħ ¤ +cre di +syn dic +pu bs +ali fe +poor ly +mac ed +ðŁĺ ŀ +behin dthe +w enger +n ats +ðŁİ Ł +rubb ish +procedu res +typho on +opho bia +er do +fu el +vi era +bu mps +millenni um +new zealand +lec tures +it on +mil ky +respon ded +ê ° +landsc ape +.. @ +bo ther +âĸ ¶ +z hang +huawe i +tu ition +s worn +in u +y or +pa olo +au ditions +ab il +malay sian +ho ps +fe athers +mp le +au ts +ã o +boun ty +ic he +ì ĺ +sh q +pin ot +ge ars +disapp ear +video games +t na +alzheim er +ðŁĮ ŀ +a ji +under wear +swit ching +sign age +o scar +ec on +dro w +cl int +pl ated +gun dy +emb lem +ho es +ici st +nel ly +juni or +road show +miner als +at le +alexand ria +ac claimed +v ell +shi va +ad he +en ne +amne sty +h ounds +councill or +ðŁĴ ¦ +aes the +part nering +influ enced +mag no +fl are +extin ction +civil ian +maje sty +va il +law makers +rac ks +mc c +ori an +sp ices +er rors +may er +co ca +pa i +s ooooo +reti ring +ba thro +ðŁĻĮ ðŁĻĮ +âĸ ª +su f +endor sement +buil ding +broo ch +pal la +arvin d +ag ent +kar ate +r hi +c tv +ta ine +um m +ba x +reig ns +uni of +enterpri ses +adel e +fla ke +at tire +bru ce +ba hamas +gra vy +sa in +che ek +tri vi +lo v +e en +bb lo +lady gaga +itt a +. "- +du stin +observ atory +eigh th +bloom berg +kh s +f cc +gi st +commemor ate +ve er +sexu ality +ed c +nic ole +vac ancy +u ser +son a +:' ( +dipl oma +t end +up grades +Å Ł +jura ssic +cardi ac +dr s +widesp read +à ł +dail ies +vend or +sim plicity +wi der +len ses +supp lements +de pos +ob served +vin es +parti ally +renew al +collabor ate +ali g +fin ity +ph u +zz y +pe tit +ðŁĵ ħ +z in +i gu +sm ack +fall on +ðŁĵ £ +back wards +comp onent +o so +compati ble +bin ding +zur ich +thom e +w ounds +ly ric +fresh men +sne aky +fi bro +di et +emplo yer +in sect +h ated +sch er +raz or +n sw +boo ker +califor ni +av fc + ° +preten ding +pep si +al is +un titled +k art +grand parents +e the +o ck +lux emb +visu als +small business +abdul lah +min ho +su baru +h ra +reve aling +heart breaking +clar ity +am g +sl r +** ** +âŀ ĸ +recor d +ici ary +min ded +ye h +exce ssive +knu ck +icec ream +tru th +ev ic +ta stic +ant arc +ren dering +, , +mit t +loren zo +st patrick +bound ary +zi g +vo cab +osa ka +fur n +tu n +gu l +s ounding +blo gger +utter ly +g af +adv ancing +l cd +mar gin +lifel ong +solst ice +sh ra +wa its +ple ar +bre ach +en ligh +ad er +itt le +c ation +ho on +stu died +?? ??? +k ash +ev angeli +ps l +wei ghts +met als +ty res +tur no +wi e +car b +g ale +se al +sun ite +am ic +patter son +á n +eu ph +up stairs +quali fiers +khali fa +apple music +ìĨĮë ħ +vau ghan +al ter +cru iser +mu a +t ana +kat rina +id ols +spo iled +secre tly +fi bre +part nered +um es +gi ov +com et +screenshot saturday +k eller +fil tr +fe t +con way +pe u +bad minton +gi d +m ound +don key +bu ff +lea ther +lar gely +bro ch +int ments +am use +r k +sto ve +impac ted +con t +cr acks +prison er +bar i +contrac tor +ori oles +domin ate +pol ar +am elia +dr c +ðŁijĮ ðŁijĮ +vi st +su arez +injec tion +blo oms +ðŁļ¨ ðŁļ¨ +sti ff +pay pal +sno wing +thur sdays +goo se +we dge +educ ated +weak ness +de cker +abud ha +bree zy +Û Į +hope ful +o bi +rai der +gh am +de u +se ve +par tly +fu t +infu sed +mer ri +than e +some time +hu e +me in +cre dit +sli ding +ran de +cher ry +dead pool +sh ol +ar am +under wood +sky e +distur bing +m nt +poli shed +guardi ans +ha dn +pic asso +ari us +ak shay +ir ri +j h +happ en +la kh +dal ton +at the +s well +mar sha +re h +cour s +j kt +top us +serv ice +r ink +hack ers +dono van +hor o +tc m +may hem +cha se +dev ops +ken sing +sc up +sh ere +quali fication +c live +ton g +n ancy +mar is +der dale +ber man +cinde rella +jol ly +ci c +loo t +collecti bles +hom icide +g ge +epide mic +su ites +mu ddy +gi mme +e rec +- * +tal la +lis le +embro ide +ðŁĩ© ðŁĩª +veriz on +ve ctor +be anie +arti san +ga in +flo res +vi gil +u so +ðŁĻı ðŁı½ +grin ding +gh er +air ports +respon sive +shaf t +can cel +ceremon ies +e me +at ari +bru shes +eag er +bo hemi +children s +yan kee +ma a +suspen se +mor an +mac ar +sun flower +cre w +vo id +ke ar +fashi oned +jen nings +sunday funday +sub missions +me ad +her man +wa i +crit ically +le um +baek hyun +for cing +co bra +ãģ ® +acqu ire +al k +ge ology +pri mar +import antly +ire z +bunde sliga +curi osity +sen a +stric t +con soli +win ters +ven om +chelten ham +ðŁį º +cen a +t at +ba in +glo ver +under cover +as ses +car n +memorial day +am eli +i rene +ch on +syn thesis +spe edy +mitsu bi +sla yer +compos ite +under stands +pe w +inter rup +hen ri +mor row +an om +thof july +g lee +thre e +ðŁĺ ® +and hi +ch att +renew ables +ye s +trans fers +!!!! !!!! +bab u +du ter +lo ops +pe ers +o ilers +pau lo +ic ation +h mu +war a +mer cer +hom eland +fu ji +ale y +year book +re m +re en +ab sur +bo is +] : +caes ar +shot gun +kur dish +o ren +ra e +anci es +ty pic +f h +def ault +re plic +lu k +trans actions +r ys +infan try +ðŁį ¾ +cho w +chick ens +ba gh +wy att +ay e +gg i +bre ws +ed itions +mi ra +commen cement +pre su +peris cope +ic hi +guatem ala +zam bia +pain ts +wit ches +wan i +un dere +cro y +vo ws +us mc +hear ted +theat res +shu ffle +le vel +mul tic +squee ze +fer n +app et +post al +mal t +on board +ld nt +co o +s sc +k ac +ðŁĺ ĩ +sc rap +mar cos +deal ers +ann u +mill er +co ve +ul ary +vladi mir +be ef +th ur +pick led +se same +bengal uru +mo tt +kathle en +hi st +no tor +dr ank +du chess +snow fall +e ff +tin y +j n +sy our +speci alists +scot us +bay lor +eve rest +mali bu +pre m +harm ful +l ali +b ates +g ye +differen ti +and ra +geome try +el over +black out +== == +ko ta +inter act +asi an +la yo +samu rai +fi del +exhau sted +gla di +pd t +spher ic +anti qu +guit ar +stu ri +ho pper +ang le +f ills +sla p +mi th +rod ney +ong i +in som +pre venting +cassi dy +ap ho +ore gon +lo in +ham mond +contribu ting +f n +gar ri +ori on +comp elling +escap ing +aim ing +plu mb +bi stro +be asts +concer ning +bo e +do pp +shop local +stumb led +âĤ ¹ +naz is +âĢįâĻĤ ï¸ı +gest ure +war ts +us open +hi ggins +char li +hang s +bom bers +° : +fe eds +c ch +st il +nic ola +ðŁĵ º +clam ation +tro pic +af ro +ou k +expen ses +der rick +al ine +fa w +reg ard +im er +sat in +thi um +ry der +pear l +te ss +mm mmm +sen ses +ðŁĩ ¹ +positi ve +exhau st +occu r +nor ris +lil ly +is les +direc ting +yo fficial +count less +sam ar +on stage +flo ck +mir rors +arch er +mo i +k d +vi v +in os +si kh +le i +sen sory +br its +kno x +chest nut +op y +coli seum +z af +di vin +adap ter +:) )) +tem ple +ku n +hel mets +t df +gu ide +m old +o ids +lu ther +he is +monaster y +sp ree +k lu +brit ney +jagu ars +gre ats +c cc +ky rie +machin ery +cric ket +re ro +ab o +aspir ing +semi finals +ale ss +sig natures +var d +me th +her bal +hol den +king dom +ap or +reg gie +ore o +palestin ians +em mys +sec tional +ro i +ney mar +qu el +cu ll +l ka +haz el +estim ate +ul ties +go w +be a +purch ases +bel ts +protec ts +m é +gue ssing +bb o +clau dia +fr acking +jon ny +el k +cel tic +al mighty +ra je +courty ard +ig i +can es +ðŁĴª ðŁı» +bank rup +le thal +âľĮ ï¸ı +graphic design +vad er +penc ils +rough ly +dan te +m fg +const ell +cam el +j b +bloss oms +en to +balo chistan +cine mato +ill ard +jer sey +con sent +dent ed +con templ +sch er +hol i +lou gh +st our +a yo +begin ners +cur b +v hs +a jax +du ff +av eng +dom est +commit ting +ai red +cha p +hedge hog +disappo inting +freel ance +in land +char ms +ðŁĺį âĿ¤ï¸ı +ai sh +m x +buck le +ti dal +per mit +bo ating +ra cha +kend rick +b ello +b hi +ple a +estim ates +l b +apo logies +jay a +bb l +ast oni +inter state +main taining +el bow +mu p +ep it +ðŁĺ ¡ +viol ations +def end +be h +sl c +am ir +pur i +ti um +fi fa +blur ry +scri m +ðŁĻı ðŁı¾ +ma ple +rel atives +âĺ Ŀ +cho c +con nor +⾨ ⾨ +whi sp +list ings +ma ze +than king +ri dd +grass roots +shi fting +desper ately +gor illa +den i +ju les +stra th +g ley +ja in +bu ick +t anner +ðŁĴ Ŀ +ga e +pri m +it ors +n ano +separ ation +armen ia +bor deaux +ðŁ ħ +pj net +bu rial +e bon +glo ss +re new +gri er +spe eds +comic books +sym boli +pur poses +ãħł ãħł +spati al +no table +ci on +n ps +ho ffman +nor man +rt g +du sty +situ ated +tr an +k fc +em en +nic kel +hast ings +sett ling +gr it +l ena +w aw +art s +gu m +ca regi +le wis +sapp hire +rememb er +embed ded +t lc +bl at +serge ant +el sa +boot camp +bow man +photo graphic +pill ars +direction ers +classi fied +no is +ve er +barre ls +wh oop +ðŁĺ± ðŁĺ± +fe male +petro leum +medi a +e fc +poké mon +ठķ +enthusi astic +var un +pro files +pedi atric +acci dents +con rad +jan g +jo jo +ac or +ob server +l f +live stock +for gi +fo s +el m +an and +go e +c ere +avoi ding +gri t +om an +thank fully +scat tered +nick y +cylin der +chees y +di ver +mahe sh +cav es +ear liest +qu inte +subjec ts +b end +gul f +vocali st +glu e +pat ches +un stopp +sny der +demonstr ating +pi o +hor ns +wic kets +and the +r ama +yo on +stra ight +bed time +or ang +bul lets +sa urus +min ers +inci dents +! ... +ðŁİ ¸ +ag ers +hand les +stat es +in ity +d ons +incredi ble +emin em +avi v +ru dy +moz art +folk lore +appli ances +mt l +fre y +di as +hu a +page ant +stri ve +im prison +bul lish +r ana +al erts +bb mas +hy per +derby shire +re cre +re dd +debor ah +cosmo s +law son +mel anie +psy cho +ho or +doo dles +sni per +shad y +man tle +canadi an +new year +inter actions +separ ated +cor ds +spiritu ality +ap u +it o +p ct +pel osi +rebel lion +se iz +wor cester +sec tors +ul i +san ta +Ð µ +ðŁĩªðŁĩ ¸ +bi ased +class ical +gam ma +dee plear +emer ge +back er +sur ance +hand crafted +ðŁİ ¥ +franc is +mill an +ic i +cro wn +wo w +stri ped +un fair +relax ation +³ ï¸ı +embrac ing +she alth +pale o +martin i +dist illery +wr ink +or k +na th +hay ley +cour thouse +si ber +sa di +quiet ly +mel t +m sm +me h +smart phones +rel ent +pp ing +war wick +co logne +gli a +cot ton +pro g +lon e +ip sw +star ters +expan ds +u mp +su ed +ski pper +infe ctions +ing le +à ¡ +cler k +demonstr ate +ac ar +ðŁĺĤðŁĺĤ ðŁĺĤ +ti bet +bun s +alo m +demol ition +ssi a +g st +[ ] +so ar +âĺ Ģ +ðŁĺ ª +ðŁĵ Ĭ +dee pest +beyon d +are t +att ends +activ ated +di mit +âļª ï¸ı +high lighted +magaz ines +rum or +az za +steph ens +dol ph +sho ckey +mat s +we av +mel an +serv ers +tra um +ku sh +æ Ĺ +bab ys +pa z +a al +la use +break ers +canter bury +ul ture +mi ri +euro s +tane ous +impre ssions +du tch +il d +gh i +pur due +adequ ate +l p +sy ner +ang ler +du rable +gal ore +ro wn +mg mt +ðŁĵ Į +lu cia +âĺij ï¸ı +zay n +bor row +. ( +north umber +cru sh +eng a +su sh +extra vag +t out +ma hal +ali stic +ther mo +gall eries +es se +chi bi +attrac tions +lex ington +legislat ure +docu mented +resi den +brow nies +w f +st ool +plan ets +sho ppers +conduc tor +ms p +tr icky +fru ity +end ra +feel the +whi pped +hair style +re fer +oo k +oc topus +audi ences +ku mar +after no +op tim +c fl +ni p +gen i +alpha bet +ann ab +lam in +accep ts +l ng +ðŁĺ « +t ine +ac om +cheer leaders +t k +gr on +v g +k ung +ja x +dha bi +r ss +mack enzie +beir ut +clean up +gy psy +st ell +bur ger +hurric anes +educ ation +st ina +âĻ¡ âĻ¡ +unfortun ate +jere mi +bad ger +at ers +: âĢ¦ +ter ra +subli me +stu d +y mca +mr u +duter te +bren nan +bul b +mel o +yl on +hack er +c red +gu d +as an +pad illa +embroide red +vietnam ese +pione ers +projec tion +re boot +id c +an ey +pri mer +suff ers +win ding +p on +sto day +mor n +u ch +all in +adid as +eliza beth +tu ck +o graphy +ðŁļ Ģ +be g +os borne +ghet to +r h +cn n +ir ma +ma kin +cab les +mur ders +oc ks +inst a +al as +si k +cu ff +la re +foo dies +o vic +at om +geome tric +em pathy +ภµ +cent enary +newsp apers +administr ative +ðŁİ Ĭ +sti ve +contrac tors +le tt +tas mania +awesom eness +den sity +ve en +prince ton +frequ ently +re ject +gh i +modu lar +ceram ics +sh ag +ki wi +can vas +sweat shirt +an j +ti mm +napol i +il er +appe als +hamil ton +ma yo +we ave +arrang ed +whar f +occu py +b vb +as aki +ot ter +nor m +vi es +de tox +tion al +dere k +id ad +ad missions +constitu ency +u pper +woo t +allo y +se ve +lu b +un comfortable +ed win +ab re +d wight +ar che +virtu ally +sp ol +pri e +ai i +er r +swit ch +bar ack +se ok +cou l +wn t +pou l +o live +caffe ine +cardi ff +notor ious +de mp +ex cess +bar r +t ford +a jay +bump ed +my thology +shel ley +fal con +shakespe are +must angs +no ted +bon e +civil ization +sy d +par sons +un official +hy ped +sp ends +oppo sed +v ings +space x +noti fication +deci ding +bio tech +out si +sal ah +! . +fe d +ss y +c ms +bad gers +cr o +ela ine +n ba +dy our +n ant +honey moon +climb ed +conom y +ath a +m ell +ne bula +nature photography +juli e +bm x +inve sted +mon o +lieu tenant +wat kins +techn ician +o se +ka e +ì Ľ +mc queen +pre ach +trav eller +flexi bility +ze bra +reta iler +p ant +ben der +brand t +squ id +war rant +veri fied +cas s +pier cing +hon ours +t ying +mor ris +kis sed +op rah +panor amic +me i +splat oon +wich ita +ari as +gal li +indy ref +good times +athe ist +confe ssion +ow ski +re pping +ad ditions +mechan ism +z im +j ans +su f +cho pped +beg innings +vitam ins +ãħ¤ ãħ¤ +or th +po les +ru b +antarc tica +indie film +web cam +ket ch +bre tt +cle ment +her on +defe ating +hydr o +buc ket +wand ering +sid ney +future of +b inge +on ies +knock out +administr ator +syn the +l ent +jan i +bar ley +premier league +ner ds +cr m +bra s +bot any +evol ved +rot ter +ro wed +tum or +weal thy +Â Ń +mon arch +li shed +da hl +ðŁİ ĥ +bu ch +ken yan +Ø § +red ness +assemb led +se mit +hud der +shro p +ran i +lear ning +mor y +iti a +geo graphic +worl dof +f b +pho sp +boo gie +am ped +? ... +che w +dwar f +ar us +s sen +ru sty +recru its +h k +gar de +app lause +vol umes +invol ves +ta c +hand bag +trans late +ffe l +se ym +aqu atic +trans fer +zo di +and r +acade mia +cr ater +te z +ar se +adap t +col oni +snow man +mal i +hang in +di schar +oy sters +pho e +colon el +w ba +hispan ic +thri ving +sh y +ag les +sales force +cre me +so les +la fayette +â ī +ter ia +ach a +sp erson +go go +car ly +the ore +am ore +vo x +af t +ãĤ ¹ +stap le +mu ffin +di agram +ino x +su stained +av ent +me ta +arbit r +dec ay +ado le +Ð ½ +ec ol +ph o +n k +o cu +gr anny +ç a +luxemb our +stad t +alber to +le vit +am as +d x +or phan +co bb +as c +lo gy +immen se +chan ts +off line +p ent +bre x +w inger +plan e +i el +nichol s +ca thy +nar uto +low ed +/ // +ignor ance +cat astro +you ts +sch en +buil d +haz i +s ine +critical role +du g +dete ct +lo gs +en amel +stpatrick sday +ed die +co pa +cigare ttes +ho ff +kay a +la goon +ra pha +air borne +choo se +puer tor +ke v +gui ding +fro sty +bor ough +mir a +ðŁİ Ĭ +cade t +anu sh +yo gi +e ger +fl ing +slo pe +nin th +we ston +foot wear +f n +may weather +a am +pla in +stair case +witne sses +work outs +ro bust +dex ter +co hort +ðŁļ Ĺ +sp ell +ha ze +o om +organ ising +wild fire +cont acts +av on +min o +upd ating +ðŁį » +li thium +ing ual +k is +au ga +lo com +de duc +u da +th ak +boy le +mp er +hot tie +eri k +re vised +is la +travel photography +oo za +en qui +confe rences +clo ver +g room +cur ves +live on +per f +displac ed +bo log +xx xx +ðŁĺ© ðŁĺ© +te al +ve ssels +rain forest +cal ci +pan ther +gira ffe +ta sted +imag ery +pad res +day time +bas s +ri pe +opio id +nu e +vin yl +invent or +sen s +process or +mu t +gad gets +bibl ical +shann on +jacqu eline +car y +the resistance +ali en +n vi +co sy +bi har +fo ley +ren d +mu gs +fa ken +cl one +ni allo +gra bbed +chi hu +power house +n tt +chero kee +spon ge +imple menting +rh ine +le one +ðŁį Ģ +pret tiest +infra red +impro v +swit ched +tu bes +con tr +bl k +projec ted +be aver +yo t +bbcra dio +thi gh +per secu +apologi ze +w ack +po ster +oli ver +az a +lou d +( ?) +f the +women shi +spar row +blu sh +us able +sc ales +it ative +peu ge +ne eding +legg ings +glam orous +mat ur +c z +wat t +da b +tam ar +et sym +bau er +heart felt +h n +else where +bir ch +alu mini +hu ck +e me +j l +traf ford +d z +por tions +ana sta +arthr itis +esp n +ber gen +viol ation +yo shi +c z +northumber land +clo sures +ðŁĩ¯ ðŁĩ +smi ley +r w +tel ugu +inten si +gre gg +ve ga +dun geon +south bound +ba il +domin ican +semi final +chap ters +h itch +van ity +trans iti +recomm ends +sati sf +bar ca +queen s +( ( +de struc +stra it +ra vi +dess erts +in tru +har am +k os +fo e +fat ty +pais ley +magn itude +dri dge +com ey +schem es +vision ary +our t +down loaded +ðŁĻĮ ðŁı½ +gd pr +lan i +p wc +gu ad +nic est +stake holders +re ferred +george town +arvind kejriwal +schnei der +in doors +all star +strand ed +gen der +ze pp +ma sses +ðŁIJ ± +pati ently +bl dg +z ab +we arab +vi vid +he ck +d ella +sy mb +je opar +la ger +à ª +comb ines +ne c +br ay +flo p +tx wx +jo ys +pon t +pro found +sur round +mad hu +ma ble +ay r +te as +n sa +open ly +er nest +ãĥ © +to po +g na +anti oxid +ti an +e tr +c ello +ma thi +gener osity +b iting +man ic +kel sey +chee ks +ten der +w th +pron oun +ultimat ely +gu sta +ari anag +ger ry +ble ed +red dy +mic h +mitsubi shi +oper ated +sex ually +ma u +cl lr +vi ds +co c +mel ted +ðŁĮ Ī +q ld +ite ch +instru mental +end game +ðŁĵ ĸ +ener gi +brow nie +tam il +at in +domin ated +pra ises +fire place +sens ational +men a +k arti +un prece +ru pt +ori ental +mc cor +tour naments +scen ter +re eves +prescri ption +sam e +fra u +tru ffle +em bo +roman s +bla sts +techno logical +pr at +b sb +y ar +tren dy +ac l +al ad +ðŁį ģ +o hh +bankrup t +tho ven +regar ds +is er +war wick +vine yards +real m +niallo fficial +do ta +ge mini +to do +v able +¨ ¨ +la u +wre ath +ju ve +nat asha +le ver +lor i +hor ser +cc tv +air bnb +es anders +sin clair +ema biggest +high school +con test +optimi stic +t te +ðŁĴķ ðŁĴķ +ss d +ye e +hel ena +con sen +ric ks +jes se +an ic +ðŁİ ¯ +re acts +ro be +independ ence +vol tage +m ington +s ant +à¸Ļ ภ+-------- -------- +sentin el +ke tt +rehear sing +aaaa aaaa +sof the +stir ling +sear ch +wi gan +stand out +sna il +pent agon +Ä ģ +ch lor +cru st +net any +chemi st +disapp eared +ric ardo +sp iders +bo se +war ren +me ssing +bann ers +gu el +par ach +ma id +coun ted +epi le +bon fire +speech less +se tter +meas ured +rejec ts +nik ki +le ster +foren sic +fab rics +alo ha +pre served +wat ford +deta iling +dar th +bo u +car ly +... ' +tail gate +noti fications +å ¤ +pas sive +trous ers +balo ch +ro ther +typic ally +à ¥ +sp it +wi z +sic ily +technic ally +ex pose +st age +hu bb +cre am +cap s +po ke +sle ek +ju ne +tempor arily +de z +awak ens +l ame +_ - +ji ha +tues days +advis ed +advis ors +exi sted +dis agree +news room +lo sers +world tour +dr ying +al di +har ness +foot print +hobb it +p mln +i ro +que red +asse ss +gaz e +sa b +th ian +í Ĭ +ti f +ob serve +ev il +dra wer +swee p +cor y +co dy +kyo to +cal lum +n inj +lau rent +be i +sket ching +custom ized +du r +regre ts +knox ville +ìķ Ħ +mess aging +grac ie +abun dance +bi dding +bre wed +fl ouri +therapeu tic +alt itude +ho gs +bur ner +elec tro +wonder fully +he ater +post pon +li very +r all +ad as +a ac +sau l +brook lyn +play house +âĻ¥âĻ¥ âĻ¥ +char itable +in y +z ah +compet itions +be av +plu gged +o is +do om +astron om +speci alized +max i +ta ps +cellu lar +depre ssed +folklore thursday +cri b +e mul +ë° © +fi gh +ru z +car lisle +spe ar +side walk +de i +depend ent +lac es +nh s +ðŁĮ Ļ +reali zing +net work +ric he +re gin +re fresh +st ral +pa thology +pla id +psyched elic +hin d +u ka +algori thm +lin king +progre ssi +fe y +d ade +hydr ated +b ant +fam ed +cot sw +bo ise +as c +rac ing +ja vier +ww en +mar lins +poo p +swe pt +toni ghts +we f +ani me +slo vak +âŀĸ âŀĸ +cla us +lem me +cli ppers +re ls +arianag rande +r te +ko t +thal apathy +hungar ian +zu ma +y von +is u +jour neys +clin ics +be be +ww f +n ws +super heroes +er it +sle ague +identi fication +mo tto +ba i +sour ced +ill er +ap i +pri se +unprece dented +dam as +tuni sia +dra in +undere stim +e ther +quarter ly +rewar ding +al ham +wolver ine +cab ine +hyp no +nad ine +hav ana +da e +ðŁĵ Ī +dr on +read ings +b ati +pic o +mer ci +iti an +wal kers +el ope +mi key +god zilla +bur lington +abu ja +social ism +at ility +sh ell +harry potter +g no +ab ur +re leg +fel ici +ro gen +neuro science +inst in +ath am +vou chers +j arre +fu se +def ici +monte rey +de port +mid day +pp ard +fre ed +ame ter +wil t +n ingham +pr att +liber ty +slo gan +o to +pr i +co ated +c pd +ne tt +il las +mal awi +evol ve +accessi bility +ðŁĶ¥ðŁĶ¥ ðŁĶ¥ðŁĶ¥ +or nament +b p +el is +son line +chi ro +fl ick +ib m +ar ak +en ables +gar land +san e +cu ties +tri p +rotter dam +n ys +lam ps +lu cas +bo g +ra ils +travel led +hic ks +en u +sab ha +scru b +hi er +hart ford +fo o +fer nandez +tre vor +mat tress +appo intments +ale j +fe i +o logist +saf ar +oc ta +sr c +sha un +ambi ent +dri c +bi ker +she e +must ache +h ta +bo one +her ty +car dio +bra kes +rec ital +consi sts +overwhel med +cau l +robb ins +im it +al th +ur l +bi bli +on ne +black livesmatter +diffic ulties +tel ang +tall er +ðŁĵ Ĩ +deb ating +bur rito +mo vember +strength ening +bo e +te stam +mirac les +base ball +re nee +ðŁijī ðŁı» +al fa +âĺ ĺ +unstopp able +ec s +g mo +giftide as +path way +fen cing +ðŁİ ¤ +b ham +ra s +sk o +d led +thel ast +magn um +bin ary +wil de +wil der +wh ati +barbe cue +h ism +can oe +kur di +eli ve +advant ages +mad ame +bi er +mis sing +enter tain +air force +y ama +c is +hash tags +j is +ve il +dream y +ten se +may ward +ch ateau +hunt ington +âļ ĵ +v all +up on +bl ouse +dun es +ðŁĺ ´ +fert ility +m ole +curren cies +st u +ber lin +toa sted +div as +wal t +lar k +por a +hit ter +um er +chil led +bal ancing +fa is +y in +or tiz +east enders +h ate +ur al +ap ril +tim el +à ± +per o +sto cked +respec ts +th t +best friends +giving tuesday +be ad +inv ent +im i +nap les +comb ining +tok ens +thir st +ma sc +par rot +sp u +dent on +* -* +t res +subur ban +wid th +si ve +con tender +siri us +lo k +troop ers +outra ge +tur bo +frag ile +me ssed +do h +disc ord +netany ahu +re sign +forgi veness +mo han +mun ch +cam ou +identi fying +enab ling +hot ter +thorn ton +jai pur +ar ya +ðŁı» âĢįâĻĢï¸ı +mu staf +maj ors +o ke +du ffy +roh ing +til t +ðŁĩ®ðŁĩ ³ +rock star +she ep +hend rix +ra v +in vention +do u +lagun a +gru mpy +sw is +im pe +) ' +you ths +bun ker +st ache +oppo se +indi es +acceler ate +ml p +ed en +w ann +k ail +akshay kumar +su pt +pol ym +midd leton +extra ordin +wil son +australi an +alumini um +way ne +alum nus +mat ics +gri m +er nie +opp a +competit ors +rand all +h ence +decla res +pre aching +sha he +can e +sustain able +stap les +le dge +ad ena +doctor al +bur gundy +decor ate +ren dered +ri sen +pr ank +di or +bee thoven +flo or +ac com +to t +ho dg +touri sm +say in +objec tive +mar kers +premi ership +en abled +camou fla +gi ant +Ñ ģ +smo key +ric ket +pan g +de pending +s ation +evol ving +inter cep +cen sus +tof the +re en +mendo za +trum pet +marke ters +an it +ðŁĻ Ĭ +north western +v la +foto gra +blackand white +che wan +wi g +tro om +ginger bread +k n +ro mero +n fc +or chi +fun ko +sour ce +f s +ra ped +o st +tar ot +ann ually +ðŁĺ ¬ +r ill +del av +.. !! +se s +can n +medic are +ph el +ape x +guardi an +rema ined +r pm +a ñ +story month +instag ood +neighb our +p ing +sem ite +my stic +as cot +mat er +hand ful +dang ers +ti d +ana heim +opol y +sh allow +nami bia +tor ia +procu rement +big bang +announ cements +prosecu tor +beng als +sal le +en roll +ga stro +sugge stion +ba k +ha ul +budd hism +berni esanders +flu te +fati gue +cyn thia +cho i +ir win +gu a +str ous +h p +ba p +satisf ying +play a +ðŁİ ¼ +inst ap +al ice +t p +irri gation +ðŁĩ¬ðŁĩ § +in tric +clu es +ple x +sa x +he pat +dump ed +signific ance +by u +medic ation +pro v +tough est +corn ish +âŀ ľ +kel ley +u v +si zz +si bling +me st +di stor +diplom atic +aun tie +b hat +son ic +bren da +pump kins +ro ch +black burn +ur ged +shi a +arrange ments +floo d +sa unders +lec turer +nou ri +popul ations +diplom acy +consist ently +ðŁ¤ Ļ +t mund +cauli flower +l ily +vocab ulary +vari eties +coo ker +up town +qu ent +mo sa +re inde +velo city +spru ce +social medi +i ber +volun tary +proce ssed +bal tic +y ang +leban ese +d p +dol ly +arrange ment +y uri +cran berry +kal yan +elev ation +cli ff +pu shes +ìĬ ¤ +sil ic +co wx +eter nity +sla ves +vine gar +glou cester +con tained +breaking news +aga inst +renov ated +norm andy +hero in +ys m +mo ds +gre ek +un di +tren ch +v h +encoura ges +head ache +gr ange +: ' +ever green +Ù Ĭ +reck on +ab used +th ru +cho ice +ti dy +col der +scho ice +ha in +bru m +li ars +bre it +yor ker +sh ack +he idi +micha els +sco pic +fasci st +play ful +ca c +yas ss +sh ad +.. ? +qu en +ram irez +clif ton +pr s +best fan +âģ ł +gener ating +head set +disappo intment +abstr act +bo iled +paren thood +azerbai jan +exhib iting +bom bay +oli vier +ko so +un lea +mat ernity +iz er +si ves +r hu +col l +saskat chewan +fre akin +de k +na g +stab ili +ðŁį ķ +organi zer +bo sses +ar u +u va +at able +ta un +after wards +fert ili +ver ge +az i +mor ph +๠ģภ+jer k +cosme tic +ko w +stru st +ap ache +post cards +for mul +ì ĭ +spin al +jack pot +elec tri +Ã Ń +lo y +gra der +diab lo +ar di +he sit +f w +arch ery +pa sh +the ories +repe al +re live +per cy +âĺ Ĩ +im in +syn chron +sham poo +coup ons +o to +la i +thou ght +luxembour g +mo v +ðŁĺ ¥ +ge mma +se ated +m ga +strat ford +un certainty +shi fts +est o +fo ol +fire arms +cor rie +ki ki +appa rent +p ills +olym pia +fi d +elev ated +de cks +ignor ing +av alan +ro v +whist le +p tsd +milit ants +robo tic +pac ers +quil t +bankrupt cy +lic h +per cussion +celebr ity +al s +( ; +su t +pokemon go +h g +off s +gibr altar +scre ams +billi e +gen ome +mar in +be ams +arch bishop +em in +bedro oms +g ated +ol ly +warran ty +at own +cudd les +gun na +k ic +vi ve +cy mru +nar row +pro b +le o +refe rences +manufac tured +cho pper +brun swick +sem is +don ia +r ye +man o +hur ting +? # +hol li +investig ations +c els +ðŁĵ ŀ +le ster +temp les +sto rey +mc mahon +toi lets +wo of +ï¸ İ +le verage +at om +night mares +victor ious +haun ting +custom er +ag i +yo ongi +mon ty +ver onica +w ur +inti mid +blan kets +volu tion +j m +âĺ İ +am on +jud ith +ðŁĺİ ðŁĺİ +distr acted +dri p +hurric ane +and es +revel ation +tro op +ab leg +col lin +tibet an +wor rying +inter nationally +eat er +camero on +brad or +y uk +ðŁĴĹ ðŁĴĹ +tra k +slo pes +ci er +ne a +ol er +ta ka +albi on +volcan ic +am n +a fi +ob stac +face time +ger ing +n pr +metall ica +organ ic +ðŁĴ ¡ +ki dd +d ances +pemb ro +wash er +m its +om er +emo tionally +tan go +ip o +do cks +scan ning +spec s +tho m +the ology +emer gen +om i +g pa +selec tions +un necessary +ima ge +ter s +induc ed +gi gan +rent als +supp lied +m fa +shan kar +lat er +pa jam +cla ve +Ù ģ +ma hin +carl son +avi an +ano va +kati e +aj ith +design ated +chocol ates +investig ators +gla zed +prin cess +er ry +ra gn +ou rable +hr u +sun dance +peuge ot +steam punk +gh lin +gre ase +hi res +z ap +per ce +j ill +tom e +he hehe +joy ful +mae stro +ni shed +gene alo +v ich +p its +fox es +good man +emer son +lo bes +con verse +o ats +thom son +ra him +mal ware +ah i +man kind +re sin +im g +sw ood +kin der +sc roll +ar a +sak ura +ro bbed +xi on +ny a +c ism +ce dar +be in +mour ning +tor to +heath row +done gal +bar b +hydr ation +k or +elim ination +su pdates +hill s +appe ti +star red +ko m +gw en +dd d +cra y +sc anner +personal ised +seren ity +re design +meta ph +box ed +judg ment +no se +ë ¹ +er ad +ac ne +supp liers +ener getic +v om +as ap +ðŁĶ ¸ +ir vine +hat ch +la ss +ad ren +waff les +accur ately +ici o +itt le +se un +occup y +web cam +thene w +ent es +ga i +j w +accoun table +vis or +ir rit +licen sing +hudder sfield +gen ie +ðŁİ ¾ +atmo spheric +ten sions +spart an +clif ford +ol an +north bound +ame en +cen sor +u el +ster y +$ $ +far rell +hy ster +cl t +se dan +rep lied +descri bing +micro wave +sla b +pro sp +assi sting +ru bio +e than +hh hhh +gu ay +z man +ra ise +roll ing +o e +n ile +ambro se +scar borough +hero ic +coo ks +mor t +chop ra +ðŁĮ · +to b +shav ing +stac ey +dor m +motor sports +wi ki +fol ds +sp iced +stress ful +liter al +fu dge +pe ggy +wa ite +tre sses +se sh +pr ic +ðŁİ ħ +fri ght +r va +mumb ai +po m +tt v +cel lar +tom e +andro id +dor is +tsun ami +tin der +o ec +m wc +dor tmund +no thin +l iti +so u +believe in +at u +kno cks +mag ni +ss sss +ro hit +ine ws +ang i +m andy +ke ttle +intermedi ate +av ant +cur l +endor sed +ori o +ur t +consider ation +wi res +shel ters +b ino +vik ram +imple mented +ly dia +bu k +paro dy +c news +under graduate +canu cks +sam i +polit ically +ro tten +gh z +tex tiles +over load +moder ni +recre ational +fli r +bat on +typo graphy +ov ation +intrigu ing +pilgri mage +al ge +ad ays +tcm party +sp elled +cur ls +boo ze +ste m +ann es +ir ls +spon ge +sho pper +sig nation +bra ss +mi stress +le ah +beg inner +lau derdale +augu st +pre school +ta ping +tai pei +execu tives +b d +rhe tor +esc or +immun o +deeplear ning +stat ues +it us +manu script +ly ric +cor vette +mol ly +la ge +de p +cn bc +le st +je ssi +fi fe +griff ith +oppo sing +ran g +dr ills +respec tful +p ity +d ell +har ding +play boy +blo ke +shut out +k ili +o sp +se attle +bc poli +mis es +journ als +team ing +es ther +fre ddy +Ķ ï¸ı +metr ics +no tre +gar ry +for ty +navi gate +perio ds +bened ic +j id +da w +ance stors +restor ing +con g +aller gy +tit anium +c ence +lean ing +ab bas +v ast +uc f +roof ing +e man +seve rely +vo gue +ve au +in bound +d z +tane ously +stret ching +man chester +dr yer +dav is +kan th +the game +it ted +re tain +el les +conge stion +frat ernity +ol lie +lo ki +fre ely +cho o +pon y +sc ep +tab ly +bal t +rock n +di me +lo gging +ðŁį · +ad u +ha voc +water ford +char is +swee tie +run ning +ner d +erdo gan +z ara +weigh ing +fif ty +pre cise +low ell +kurdi stan +r yo +or th +syn th +lin ers +phenomen on +art illery +il legally +constru ct +nostal gic +gar th +al ta +shel ton +a sean +w ander +dur ban +di versi +bon o +cl on +le man +sh un +obstac les +appet ite +fe eder +respir atory +di xie +formu la +an to +so ber +extin ct +au c +ing les +legitim ate +; ; +min nie +ipsw ich +dram atically +ðŁijı ðŁı¼ +ingh am +milit ary +mon et +us navy +for k +dun no +play er +q otd +st oo +ex or +ethiop ian +film fest +pe red +c ate +sau di +in ner +sin cere +tion ality +ale e +de eds +cooper ative +ir onic +cro cod +br ary +post season +cam per +can ary +e in +exten sions +nb d +sher wood +spo kane +hu mp +jit su +ê ¹ +dar yl +p si +stab bed +offer ings +expe cts +cav al +body building +fr aming +f ca +ye arly +bom bed +sk il +resear ching +jud iciary +gree ted +tu dor +mil o +innov ate +ðŁĺ Ľ +r hs +ru by +contribu tor +fam er +soci ally +m lin +fi ery +ut ter +beau t +it os +de voted +rain bow +bar ney +pe ren +ar jun +r na +gab by +ut i +hann ity +pick le +ser v +qu akes +pp e +fe m +wh itec +j n +victor ies +ðŁ§ ¡ +gol fer +congratul ates +resul ting +mechan ic +ur ve +cen tered +kie v +an s +in cub +< < +c mo +bestfan army +dap h +en ham +on cology +ku sh +t xt +ori ented +fashion able +c sr +sa hara +r ack +pd p +han son +ภĩ +ti ers +ra r +pan am +in sky +sa hi +testam ent +asth ma +in her +fisher ies +or der +ho we +gall on +ep is +suz anne +drow ning +paneli sts +ðŁĺ ² +ë ¦ +al ach +commemor ative +at tribu +ðŁij » +mo o +visi onal +week sary +gu st +ak in +poin te +ee e +di spar +ni pp +dent al +st all +pi an +bor e +ul ster +tic k +ir r +tae hyung +micro phone +bermu da +ga ard +el er +plumb ing +hu gely +âļ« ï¸ı +race way +cam bridge +mar cel +burn ley +to ast +holly wood +fa sting +me red +hib ition +ca pped +benef icial +ow ning +cont amin +arab ian +to on +cap ac +hul u +sm ir +nutri ents +se in +graph s +con ditional +ðŁij ħ +or ac +play in +nor the +tor nad +mar ian +ju mbo +lex i +incredible india +road to +uk one +confu sing +sp h +shan k +pi ed +mq m +positi vely +sher ry +path ways +consi ders +tof u +argu ments +resil ient +che tt +with dra +ter o +ated ly +sw ana +he b +fli ght +har ley +decre ase +kind le +book shop +³ ï¸ı +marty rs +sm ur +mc cl +concer to +sti me +rejo ice +app lau +cle ment +mer kel +jai me +im mortal +isle of +mar co +youtu ber +stal king +me too +st ack +sp ouse +u st +lu v +âļ¾ ï¸ı +eque strian +ev ing +fl in +nick name +the big +as ar +st acks +wal ker +bor a +kidnapp ed +hur ling +humb old +rec alls +co pper +ann is +se o +mer ger +mu ir +ad dy +ðŁĴª ðŁĴª +be x +cr acy +con an +congratul ation +mid st +âĻ ¬ +for bi +op tic +cr ate +crocod ile +mad agas +secur ing +ast on +o gue +savi or +salis bury +love it +fuji film +cast les +as st +ar rows +sp acious +tr s +poly vore +progre ssion +m ri +nel son +bi m +indic ator +o da +pe pe +re signation +gu t +sne aker +log ically +az y +are lla +te aring +jo shi +ssion ism +q pr +mari ah +p x +ble ed +mi an +med ley +we iss +ker ry +gat ory +at al +madi son +av enger +nab y +pl and +gi les +fresh water +d ington +ta j +demonstr ates +n tv +bul bs +sunday morning +pe ake +souven ir +wa h +ton nes +m kt +complex ity +con den +ross i +b ing +y ds +su k +n go +mid land +ol y +life is +ri pple +mo reno +dd ers +tu s +á ĥ +bou l +x a +hol dings +wn y +shadowhun ters +ke i +asp ire +m ous +ow en +so ak +skir ts +moun taine +stor ming +ch rome +ri ots +sar ato +amaz e +less ness +nav ar +crit eria +ra fa +indul ge +ay er +por to +nam o +........ ........ +yi elds +val le +j h +mac ron +sa ins +dur ant +tra ilers +wo t +confeder ate +sh rin +id ol +form ally +ten e +motor cycles +than g +no de +bang er +dal y +p ats +enroll ment +au ctions +at al +ar bor +lo gos +de arest +trans action +dom ingo +fle a +ser mon +de ck +sin cere +questi oning +juli o +was p +pre tz +armen ian +k ham +inflam mation +picture sque +acci dental +film makers +ðŁĺ ļ +ðŁĴ į +ca sey +so b +yee zy +good will +parag ra +ss ly +fe ather +dy ed +assassin ation +na de +b cs +app lies +femin ine +fe u +ext ent +depu ties +l ack +psy chic +go i +kill ings +pse u +ðŁ¤ ª +un c +mar l +tan e +mck enna +sur fer +influ ences +free way +hack ney +mal aria +el and +te au +rema stered +Ø ± +raz or +gg y +cor ro +lak sh +fla ir +honest y +hoor ay +de pp +am c +wedne sdays +q a +ed its +- $ +se villa +dou bled +human ities +c cot +som os +r ine +af a +si oux +re construction +wel ding +th reads +am ish +encoura gement +po der +bo ck +bal m +p tions +stand up +accompli shments +guar ding +convic tion +ac ion +napo leon +depic ting +att ack +su i +wear able +âĸª ï¸ı +pot ter +esc ort +vis e +to ts +bo on +event profs +angu lar +womenshi storymonth +bar row +sch i +ac comp +ti k +l end +kensing ton +wol fe +st acked +cra shing +exhi bit +wing ed +sab rina +ma sa +k ms +alway s +et t +pla sma +counsel ing +pick les +nfl draft +mr s +inev itable +coura geous +staf ford +writers life +ho s +e j +gh yun +trade mark +adri an +influen cer +coron ation +ra ging +explo red +usa f +excep tion +eu x +tan ker +sw ami +pac ket +ðŁij¨ âĢį +f en +she en +a ero +j l +re gal +nw t +au ster +meh ta +char ge +a ste +b ate +inf eld +racec ourse +collap sed +fle ece +z il +al lie +alternati ves +geor ges +ðŁĵ į +quir ky +fc b +nat geo +philanthro py +bra i +every day +ðŁIJ ° +ach ers +ja an +fin es +q i +fisher man +distin ct +gri mes +nation alist +comm ence +ro wn +âĢ ³ +z ing +f ter +hr w +baro que +bl ender +kitt y +hoo ks +c ited +w anda +consen sus +reinde er +an and +supp ly +me ds +v n +ol ph +rat chet +shel don +secur ities +ë°© íĥ +cro m +mosqu ito +j eric +im mac +dimen sions +â ¤ +di ssi +sponge bob +dami en +steven son +jo anne +del ish +yi kes +than x +surve ys +postpon ed +alco holic +al ised +ðŁĻı ðŁı» +do ch +sen tim +mered ith +com pares +b ago +happy days +mo ss +ãħ ĭ +ne c +gn ment +frustr ated +comb in +ri v +ec lec +col lo +compli ment +actor slife +ct to +nic ar +op hon +apar the +man t +ja de +trol ley +optimi zation +eye on +eco logical +qui st +ep he +ॠĩ +cin co +appo ints +old school +c pr +behavi oral +min aj +:- ( +tag ging +ev al +jo aqu +ðŁĺ « +ha k +de me +jama ican +so s +hy att +hand book +libr arian +hanni bal +pump ing +ch om +f man +ga i +hu ll +respon ders +green ville +n us +vau gh +ðŁİī ðŁİī +ta xi +gold berg +man tra +te ase +forbi dden +metho dist +ati vity +* *** +ec t +mc gr +Ħ ëĭ +se b +amid st +disapp ear +thy ro +phili ps +er ina +v icious +stream er +million aire +ma p +str ick +hack athon +gh a +ed ic +mi ka +pe ck +ill i +anto ine +ar ca +op tic +ma ure +ðŁĩ¦ ðŁĩº +cla shes +man ly +âĺ ģ +al var +and res +me i +el m +ww ww +al tered +l te +ê¹ Ģ +mo jo +for rest +thal ai +non t +spee ches +acknow ledge +ign ite +x factor +ðŁ¥ Ĥ +mead ow +disru pt +debu ted +scrim mage +pharmaceu tical +fi dd +found ations +philosop her +et al +publi shers +bo ys +c ke +ru gged +opti mism +re be +phil harmon +nar cis +ral lies +lu is +go blue +fol ded +un acceptable +optim al +li sa +pol aro ++ . +en za +âĿ £ï¸ı +mon opoly +grace ful +dair y +du a +diffic ulty +judge ment +o si +mer sey +flu x +new found +ter ns +dimen sional +in vic +al ba +am it +abudha bi +alger ia +autom obile +the ad +lo tion +acceler ator +vac ant +iti on +lu f +al ic +pl l +bla zing +ba z +sen e +ðŁij ¼ +villa ins +direc tory +eis en +to ck +broch ure +ri pp +hb d +zayn malik +nic he +lo lol +certific ates +mor se +fac up +x ham +un wanted +im ports +carne gie +fan sign +mo u +r alph +destroy er +sw ing +trek king +cili ation +pit bull +g aps +ho well +defin itive +mc le +f ps +et z +bol ly +lyn n +gan o +at ure +fur suit +co il +na v +but ts +tro jans +eu re +en ko +sch umer +horri fic +install ment +br b +subur bs +a bel +vi r +de sh +cun ningham +ðŁIJ » +span n +sch we +ke mp +tr u +ste alth +qu es +le w +deli ghts +ko ch +hu mili +cr iti +il t +sp ells +mi ley +car ic +ðŁį ´ +lc fc +substitu te +oun g +? !! +af fir +predic table +class of +er r +cy press +chand ra +age ing +__ __ +ther land +don caster +el in +yo shi +sail ors +har ris +jo anna +niger ians +h ers +pla gue +pro cra +k no +can ton +busine s +un h +pra kash +c in +bow en +co ating +m als +be gging +smith son +ponti ac +sp ies +dam ian +pl ine +und ant +al ta +one ss +shame less +da q +bb m +wal es +stam pede +ser um +Ù Ĩ +cataly st +x n +ab sc +free zer +ch un +ari os +mc cre +fore head +he ars +damas cus +tac oma +ardu ino +encoun ters +stan ton +lg b +ab as +" .. +ke te +drac ula +ele m +g ne +zepp elin +la brador +pul p +op tional +or n +russi ans +san itation +hil ary +etsym ntt +pen alties +au st +ig ans +olympi an +medic aid +vers ace +va pe +re stra +pe ep +sexi est +st alls +di le +the a +punjab i +pupp y +tuesday motivation +ðŁĵ ļ +the flash +roc ket +mo dest +chihu ahu +on na +k sa +hur dles +ca ve +fail ures +sp lit +bo ho +gur l +disappo int +ho ward +nug get +fran z +stal ert +kaz akh +for getting +sch ri +ag ate +am at +eve rett +du et +veter inary +juli an +ch ills +bra ve +ghost busters +lan do +gre ets +profit able +d é +ti r +ze e +om en +pd x +gray son +har i +fix es +stab bing +swim mer +symb ols +compli ments +po se +func tioning +th nx +gi r +corpor ations +bar low +lo e +off season +distin ctive +marvel ous +nik on +enri que +ky u +ja ws +amo to +lom bar +travel blogger +fa h +ouri sm +tri stan +so e +ce ase +ðŁı ħ +z ac +mck enzie +taxpay ers +swim suit +bl o +les ley +kan sas +w ks +ki el +provo king +my les +str ing +kangar oo +galac tic +fif th +s ke +we ir +ll is +mat ory +ðŁĩ ¿ +un ci +re productive +roo ting +ti des +gad get +.... ...... +alex ander +bow ler +scre w +apo log +eri ka +wal ters +shet ty +lan e +ban ter +as ant +me so +v ain +" "" +us i +fer din +accomp lish +man sfield +bom bar +collabor ating +cla p +it ure +s da +smo ky +na k +im person +car la +com ra +bur gl +lo co +ti es +in hi +trac ey +se is +diss er +rr rr +dra y +prote ct +cor ona +hun ger +ck en +c eli +trou bled +predat ors +fic tional +shav ed +riche st +metab oli +ful ham +gro oming +mono chrome +wa sting +as co +ast e +ti sta +remedi es +ung soo +south end +perman ently +bu mble +procra stin +ident ical +practic ally +ma scul +su ke +assu red +val erie +devi ant +grizz lies +thi er +pur a +ne pal +not ts +bil ateral +spo il +car mel +cine matic +ph l +ni fty +ma o +hypo cri +la ser +pan try +mathemat ical +el isa +coordin ation +bel mont +a it +radi ant +bo iler +man g +f ag +cr c +h ams +br in +â¬ĩ ï¸ı +famil ia +âĿ £ +sab er +ru pert +gg an +rit z +mic h +sal ford +le vi +gra l +ðŁĴ ¤ +n ino +ce d +business man +ul tr +sim ply +compre ssion +pa ins +hal t +ë°©íĥ Ħ +landsc aping +n f +croo ked +er d +itt in +ddle ston +sur passed +ino a +da g +bl en +exten ding +at ing +al gae +ball er +u mar +snoo ker +col lu +flo wn +thu b +ridic ulously +ki sh +op le +di re +as ser +ari sto +sc iss +h ating +trou ble +syl via +suc cul +plo ts +sincere ly +al er +laure ate +br ack +att n +rif les +me to +collec tible +cu omo +conte stant +consist ency +ant z +rang es +abig ail +de b +mini ster +grow ers +an oo +hoo ver +dream er +nu cle +resear ch +mi y +sha hid +ma v +d honi +cin i +do j +hin dus +part ying +dal i +alon so +inform al +clark son +it ton +ki an +cit yo +mor i +la sted +as pen +libr ary +susp ici +qu at +den ial +fol der +ch ori +swee ping +eni x +ðŁį Ĥ +Ø Ń +nas car +handmade hour +mou l +heat wave +em er +exam ine +ib n +gr ind +po v +tion ist +m bo +she ila +integr ate +om es +take away +cer v +con nie +tic ket +ce led +bi en +visu ally +madagas car +sor ry +gu i +park run +tra its +la be +pois oning +ॠĢ +vi able +bohemi an +denti stry +bad os +spr outs +mask ed +te ddy +ðŁĺ · +sa f +sa as +ji ang +ti ght +spe aker +withdra wal +bc n +as signed +class rooms +fle ming +ðŁĴ « +super girl +tot als +table top +e books +horizon tal +cra z +flu sh +j ard +c dc +er son +ãħ ł +green wood +ni h +co x +ad a +lit re +go ing +v icky +cur ved +lou ie +gra ins +hy e +lon ge +reme dy +tra inee +san jay +super stars +ma ser +man u +s age +wh l +ðŁĺĤ ðŁĺŃ +ðŁijį ðŁı» +m sd +en z +rab hu +j oo +gh u +ac er +e po +resurrec tion +justice for +bl ended +mo da +avalan che +france sco +re spective +g s +ye ast +wel ch +devo tion +ge tin +athe ism +am ic +carol yn +lo c +ld nont +ave c +us da +le gged +bra very +b lower +cow boy +he h +sti ble +buff al +chann el +run chat +âĺķ ï¸ı +ide ology +best seller +y oo +pe anu +bon ne +fel ic +edi son +fr actu +naren dra +pp ets +seym our +ri viera +he ctor +necess arily +bi anca +soci eties +the best +w g +sent ences +win k +vacc ines +pal ooza +jam ming +as f +mp us +agre ements +ec k +ba c +hon ore +com pul +wild cat +im posed +yo ga +hud son +can celed +l ich +fu zzy +es que +ch uk +w vu +se k +fli pping +r hon +wi shed +wh a +cap ability +len ovo +ìĨĮëħ Ħëĭ +vi vo +tv d +nor a +sil k +pas adena +yo semite +valu ation +clo cks +u ber +mr c +dar kest +au bre +ss o +bell y +wrest lers +kill in +lou der +buck ley +ge el +ad on +un s +appe aling +ðŁij ¯ +semit ism +list ens +fit z +ãĥ³ ãĥ +ny lon +ar ty +seem ingly +hal a +su ited +et y +she ds +mu ffins +ap ric +um ents +u ta +jam mu +chelse afc +star z +yo ko +roo t +clean sing +di ar +pione ering +ihear tradio +dig iti +fin dyour +can o +ðŁĴ İ +z ol +spac ecraft +six ers +moi sturi +b ile +ti sts +hor ton +rang ing +colum bi +mete oro +senti ment +ep l +foo th +text book +drain age +r ly +sc ue +imran khan +ðŁĴ ¸ +margar ita +ed dy +predic ts +gamer gate +advis e +growth hacking +love you +ug and +v f +beng hazi +s later +ne wor +ch el +independence day +p np +cul len +hoo dies +num bered +brit t +t sa +kl tu +s ages +mom o +onep lus +col l +gu ts +w ta +mesm eri +enh ancing +chiro prac +j is +teen agers +m one +constell ation +sweep stakes +e ze +slovak ia +la ye +pear ce +wa ver +po gba +k ron +sur geons +mar x +ti d +gg a +desc end +p ours +upri sing +wal la +sab bath +bachel ore +mack in +k am +peter borough +hor a +ðŁĮŁ ðŁĮŁ +think big +r j +hy drau +sp al +univers it +ðŁı ī +mail online +league of +ten ants +w ally +lan ce +heav ens +dd r +bol ts +am ir +i phone +ci gar +en du +re i +el abor +r inging +john son +characteri stics +sal oon +algori thms +tal kin +m tn +di ve +region als +ff ice +hat i +deviant art +so tto +shir o +l ama +k we +f aded +por ting +tu mmy +est ates +buen os +ðŁ¦ ģ +beli ever +pen etr +dar n +sp ite +can opy +fashi oni +t illa +pet als +eli jah +bra wl +marty r +ë°©íĥĦ ìĨĮëħĦëĭ +mid town +eric h +d apper +sm town +me gam +ww w +le le +on s +cat fish +fir th +fossil friday +ball park +th aw +pot ent +illi e +cre ep +car p +so ap +gun dam +infe c +yy yyy +ठ¨ +z ag +rit t +calcu lator +bo ca +ok o +to ad +threat en +refin ed +olym pic +accompli shment +bacter ial +a ji +tat um +feli z +she ed +j at +th ic +jam al +ðĿ ĺ +lin a +ðŁIJ ¯ +jo king +yot po +pin ch +ak ron +her b +motiv ation +li a +ho stage +cre ek +gam ble +russ ell +patt i +fo tos +c pc +bro ken +back the +cla ys +u mm +stock ton +mat ernal +ü r +la kel +cent ury +be k +infe cted +ภ¡ +smack down +man ned +ta hoe +sm es +bas a +su la +augu sta +. * +rohing ya +gre ed +counsel or +silhou ette +gra vit +cla use +' - +bo bc +occa sions +now adays +dic tat +be ard +n ally +brigh test +kab ul +inc india +dhan ush +archae ological +che ape +mizz ou +d hi +ov ski +bax ter +asse mble +à ¢ +gi gi +ac am +wis ely +haz ard +north ampton +âľĪ ï¸ı +me th +bla sting +re unite +mu lus +ali zes +t read +mil a +ed ward +ko va +pe sto +ðŁij ¶ +vit z +hydrau lic +refurbi shed +mo tel +isab ella +hom me +sever ance +uph ol +mis erable +f ari +lat ter +ef er +crack ers +es l +ac io +yy j +in an +ec b +z ind +pan as +tru cking +re ed +sh aker +burge ss +em pire +ag nes +n ington +art works +fr s +ti le +bi ome +eu n +ch ong +americ ana +god father +go blin +i shi +! ). +temp ted +gen omics +mand ate +ck y +ðŁĴĻ ðŁĴĽ +som ali +br andy +in ven +spoke sperson +pc b +yu an +h g +fa z +starwar s +ro wan +blue grass +don g +d day +trin idad +er ton +ban ning +re tention +cu red +tober fest +re set +we is +deta ched +behindthe scenes +immun ity +ph a +bra y +ðŁij ½ +ran cho +ram say +est onia +nd tv +] . +cab aret +tar o +d v +show cases +plu m +ðŁij ¸ +son oma +pre pa +memor ab +e stu +drive way +u les +magn us +x r +nn n +much as +en ge +stre amed +fore stry +audio book +tro y +reck less +kil om +ru ler +ra k +proce ssion +i ons +po ole +noc tur +wh s +farm house +per a +par me +hypocri sy +s ics +v ant +cas k +holi stic +au st +Ð ¿ +in do +ðŁij© âĢį +di so +disp atch +ol sen +make it +en nis +cent re +ar range +ðŁĮ ¼ +sal ted +ea siest +f ate +reg atta +mo zz +ac an +sin i +g ically +ch ops +chick en +work in +ha gg +invol ve +wee ds +book day +wake up +ky r +michel in +fu ss +re juven +vac ancies +incar cer +m st +sc ents +sovere ign +kick er +à § +bo d +âĢĶ > +sa h +mob il +shrop shire +oph one +dress er +mis suni +hep burn +i mo +foli age +diagno stic +as san +cycl ing +guil t +c sa +puertor ico +win elover +wake field +do ggy +k he +pa pp +co g +al lot +cu ck +poe tic +mi o +re vit +mag ician +ç ¥ +ant enna +west wood +mber g +lux e +oat meal +Ø ¬ +te at +ffe e +sear ches +l ly +plu to +el on +let tering +inno cence +fa i +ann on +telang ana +ma it +neu ral +can ni +ar oma +a stor +fe x +co cac +mon etary +f ent +un sure +' @ +indi rec +teh ran +isol ation +li bs +make up +merce des +ff y +he tero +de o +sco m +cur sed +veteran sday +franken stein +shre ws +de co +ge ese +lefto ver +ha did +vari able +acade mics +carol in +under going +vari ation +na h +ssi er +gamer sunite +pur suing +emer ged +ll ers +control ling +ro aring +mete or +vol t +daw gs +be aver +is life +bathro oms +aci onal +pre vent +lake district +in als +y ani +gra bbing +sac ks +le z +sw ay +k ool +time s +klo pp +la de +con cord +resul ted +revi ve +recon ciliation +ol and +az z +gir o +mand arin +de en +nutriti onal +is coming +van i +aw www +der ived +love your +stop the +shou ting +nov ak +ðŁĻĮ ðŁı¾ +lo af +displa ying +sunday with +ma guire +ch eri +ðŁı Ł +re match +qu ic +Ú © +y in +ðŁĺ ¹ +ili ve +z ip +our ke +down loads +sw at +missi ss +care rs +t ment +proper ty +hahahaha haha +gi bbs +sur rey +ar ise +tic ism +sti a +ir ling +fro g +co se +bas sist +fore ig +lea u +pil lows +hol la +eli e +disclo sure +peanu ts +inte ch +ww c +plun ge +trium ph +cor i +sli ppers +ðŁĻı ðŁĻı +neutr ality +ma re +hair y +gang ster +hu mming +cust ard +mer lin +ale a +s by +dam p +mo han +ver bal +j st +gu tted +b jor +un finished +ðŁĩ¯ðŁĩ µ +un happy +âļ« ï¸ı +by pass +at su +fis cher +sa v +afric ans +re use +mid way +demo lished +ger rard +her cules +Ä Ł +medic ines +cl icking +sur round +jo ong +wav ing +tri bes +wet lands +offici el +argu ing +l le +do va +su zy +club house +ne gro +ob tain +ga o +gl ance +assi st +ch os +ãĤ ¢ +âĺ ķ +adri d +occur s +st ans +par don +livel i +emplo yed +re visit +ff xiv +bb le +ne aring +min er +ðŁĺ ¹ +giov anni +up to +mar vell +mar se +to wels +cb n +engine ered +y elling +spart an +si ans +ðŁĻĮ ðŁı¼ +se v +coyo te +sta di +t cm +app en +shenan igans +open access +so aked +ma squ +le vine +stro kes +l k +aparthe id +hipho p +char don +may may +ha asan +stri pped +fr o +scri ption +f ton +h f +pri sons +marsh al +ķ ãĤ +an cho +com promise +classi fication +buzz feed +bblo ggers +deser ving +) / +s way +ob o +camp ers +poder nfamily +p oured +bri e +squir rels +se ize +: # +le k +ti mb +st acy +nas daq +repe atedly +br at +mi ghty +competit or +mah one +de si +o ke +bm w +shi e +f cb +cheape st +minim alist +par amount +n ate +har as +insan ity +lat eral +ment ality +mo zam +ta pped +yad av +u sp +b way +the od +bil t +ra ids +em press +adap ted +pat ron +nut shell +ag ra +be aded +sundaywith marsha +vi king +proce ed +main tained +thinkbig sundaywithmarsha +sn es +mus ica +to wer +ch ab +bo k +sm t +insul t +harve sting +windo w +ru ther +be ige +dec al +indic ate +ma iling +ri ft +po le +ander son +ch oral +sp ride +l ili +ev elyn +imrankhan pti +.... " +ke red +un dp +water falls +se ars +le mans +world series +ri el +ani e +app ar +score rs +lam p +a than +phys icians +qu inoa +refu sing +vu itton +unle ash +s la +pat i +shou ts +inten tions +fo amed +europe an +neighbor hoods +me er +man son +du h +br at +con es +bow l +kazakh stan +ठ¿ +in appropriate +del hi +ketch up +ful ton +s ys +consul t +gar field +to go +f ml +f led +b ds +facilit ate +ree bok +selfi e +elev ate +activ ate +bi ble +ca wx +b ys +cam ille +sy ou +sk ool +her t +w bc +ple dges +recor der +po sh +ac re +so aking +mat il +v sco +shoot ings +pla r +e con +ðŁĻĮ ðŁı» +rashi d +u bi +ðŁ¤ ¤ +sw inging +wi pe +rap tor +m su +music video +dur ham +at tic +apar ty +fe tus +activ ation +aa z +motiv ate +ðŁĴķ ðŁĴķðŁĴķ +j al +ठ® +ag on +sche er +stal ker +fo ster +az zo +tele gram +vi gor +s laugh +screen shots +entrepre neu +kri stin +inten tion +ch illi +fr action +don a +ge a +tc u +s ite +la k +em il +d nt +bor o +wil kinson +re cu +ato day +t anya +bl anco +cd n +brilli antly +g cc +ac c +evacu ated +ther ine +den ny +cait lin +she pard +pou ch +hand held +sou theastern +ha a +à ´ +re solutions +led ger +sr in +r ar +shat tered +chim ney +im with +mete or +hand led +ra ke +town send +en han +shi py +duc t +tw x +inflam matory +war hammer +theat rical +gro s +sk ar +sco tty +ni el +tit o +tin i +conne ction +_ . +goldeng lobes +sha q +ðŁı ³ï¸ı +hall way +fron ts +effec tiveness +gla ston +d hs +ex pi +to h +c pl +sc s +re o +ha g +resemb lance +hor an +abu sive +qu er +virtu e +cho lester +a q +shan e +m ce +carri ers +di stress +re wind + ¡ +voo doo +int act +ann o +ðŁĺ ¤ +pi led +adi a +ãĥ ³ +en ow +di gs +light ly +goo fy +turb ine +governor s +con te +re open +pa h +i ve +cra fting +swee ps +jo di +an de +zu cker +kaw aii +o ko +v ai +out line +kri sti +ts n +insp o +qu int +fil thy +lyn ne +listen ers +depar ting +or d +t weed +, & +ale k +sel fish +nor ther +recogni zes +i ps +be s +a ed +w ills +pe at +surround ings +mon uments +ais le +be cker +la v +quant ity +v ah +helicop ters +tu cked +alv arez +sha pe +o bey +ad diti +road side +m ite +bl ers +ep age +j au +ignor ant +b ins +lu lu +x o +c fo +ee eee +apprentice ship +shef fiel +to i +ho k +faken ews +deplo y +aid an +husk ers +ãĢ İ +west brook +mi ster +confi gur +car r +fic a +proceed ings +ha w +ste ak +mur derer +pay day +a jo +p vc +don ates +bi af +nom nom +be it +k ali +x rp +ahmed abad +se mic +che y +x tra +an twer +head lining +squ ares +roun ded +flu ore +bol d +disa sters +am oo +gener ic +cran es +brief ly +gi g +auster ity +anticip ation +for ti +treas urer +cann y +ce cil +dete cted +check list +ภ§ +pam ela +bar bados +an field +hear ty +tx lege +peren ni +arro g +ing ram +âĹ ı +ty ne +spo on +r ation +am ba +m be +cam el +h hs +york shire +reflec tive +fre aks +to k +ju do +partic les +du bs +ban jo +accred itation +prover bs +over dose +inte gral +gu ang +mc s +super car +af b +al vin +ail s +x tre +st aging +tw ent +rabb its +mar o +inste m +dol l +cr ay +sant ana +ble ach +mini ons +che ap +man t +di vers +catal onia +lo is +mat ri +cou gar +kay ak +e gre +p so +a ia +å ® +char lton +tr acked +sc ari +pe tt +f wd +x in +gra vel +br ic +bigg boss +ar den +hu gging +pal ms +st v +li mb +the movie +handic ap +ri me +z ai +stu b +indi a +lithu ania +rhy th +p ita +maced onia +high ered +brid get +schwar z +ske let +hi kes +ant arctic +c ps +mash up +Ð ° +n ell +chand ra +he ir +an us +sher idan +mi mi +muse u +bec ca +an ir +bar rie +dioce se +compar able +ðŁı³ï¸ı âĢį +yuk on +me p +hor mon +mer ic +al f +con quered +christ church +ðŁĴĻ ðŁĴĻ +hazard ous +poo h +cont ing +retro spective +par ame +na ir +con sor +ho tra +astoni shing +cater pillar +u man +ti sm +t vs +serv ic +croy don +mor ales +c g +cu m +te ur +scan ada +s all +magno lia +el ise +th our +à® ¿ +ag omez +phel ps +ë°©íĥĦìĨĮëħĦëĭ ¨ +wh os +weav ing +si sd +pro poses +cro ws +pre sale +econom ies +bernar do +sha hid +air show +mc cann +hor ticul +nr l +du el +mongo lia +tou lou +requi rement +struc tured +ed i +o lives +he a +cu ter +Ð º +enthusi ast +harri et +domin ion +sub mer +ðŁį ĥ +sa ab +nes burg +mo ff +def ended +bur t +rewar ded +gold man +op tics +khali d +house holds +buc kets +ce cil +che ss +substan tial +ef l +oper ation +evalu ate +st n +rece ssion +l ll +tom as +tru ths +ak bar +s words +p act +embarra ss +ha o +ay urve +scrip ture +ny cc +op t +di ameter +sc ented +organi zers +re lat +ha e +dream ers +de se +ðŁĮ » +restric ted +n ale +r hp +dol an +mun ster +ha ired +consult ants +jo ints +hu mil +d ill +relent less +t é +af il +ut ilities +japan ese +condem n +pet ite +colli de +q f +peach es +cou rier +l ore +âĺİ ï¸ı +reli ability +ch uk +ðŁĻ ĥ +stu res +ge ther +ho stel +bi er +- _- +â ĩ +e ze +ta ilo +di ent +blu ff +chu ffed +pil ip +mon arch +e em +bu chan +b ick +op au +ku ps +ภ¢ +pist ons +sp ins +m and +ce st +bur ne +v ile +cher ries +bec kett +need les +pan ch +ë Ĥ +haha h +trou bles +insi sts +do you +g mc +mor tar +deleg ate +in n +g anda +sin atra +ठ¤ +spee ding +pu pil +pre mises +ali gnment +pi kach +as us +j alan +Ø µ +lime stone +fol kl +parme san +ce il +mo y +shawn mendes +ac up +hu st +ot es +med ina +ma di +gta v +censor ship +ar g +swe eney +sy kes +col o +foot steps +cann ed +adv ance +gta online +healthy living +ðŁį ¾ +a ig +p ality +oc s +he brew +im minent +berk shire +jeremi ah +out going +bak er +entr ata +ma ids +gro ves +bo c +a del +m fw +con science +arm ys +nut ella +conte stalert +novel ist +la h +ban ker +marque z +ðŁı ¡ +to ff +out age +gr p +ðŁĺŃðŁĺŃ ðŁĺŃðŁĺŃ +musc le +du dley +nvi dia +mi di +m uni +ess ays +dat ac +car ter +ภ£ +t ans +i ves +public ations +al er +ok wx +il u +cu tt +har p +out law +luther an +br ill +bo lic +do well +green land +be sties +path i +pay ton +gue st +har den +ðŁ¤ © +ann ed +evacu ation +po ised +mc der +b han +o i +envel ope +ci d +ca vi +ta pas +book review +grey hound +âĻ ª +fe ud +lun gs +for te +rai der +ff er +oni x +dep end +yn wa +rel ating +de vs +ðŁĴ IJ +acqui res +d ha +j yo +priv ati +can ine +k b +cra b +sar din +imag ining +k j +em por +down hill +ne z +ta eyeon +nick imin +gb p +à µ +w ap +sec co +ma shed +ðŁĴ¥ ðŁĴ¥ +augu stine +diss ol +dic tator +â ĵ +vi per +ed fringe +vau x +hard work +book let +no x +chi ff +ðŁĴ ¨ +observ ations +xbox one +u sher +ke er +lu p +dal las +cal gary +ma dra +di ous +k bs +wood ward +hero ine +lu mber +sea world +o ws +mc ke +maver ick +gu la +cross roads +fan g +s ade +nik ol +chee tah +me c +pp g +er ick +ðŁİ µ +tox ic +bj j +viol a +sp ire +ch ino +tra vis +institu tional +ha as +low ry +w ac +ea e +hu mid +mp ton +ru ck +je w +c ine +zim mer +se f +bhar at +fre es +aam ir +ðŁĴ ħ +z inc +wan e +multi player +royal wedding +e el +preci pit +qu ery +kimber ly +isa bel +ful fill +ig an +vau l +pan e +sc y +dig it +gun n +u tah +dog day +fi on +xia omi +da c +el ast +cha vez +ro blo +g ine +ten th +ab h +ke to +hur dle +na dia +memorab ilia +ha bs +qu an +h w +hv ac +pix ar +ec cle +kram er +accu ses +ðŁĴļ ðŁĴļ +per se +mean time +wa hl +atle tico +âĢ¢âĢ¢ âĢ¢âĢ¢ +ott oman +no vo +k us +conne cted +tru sts +d mv +spen cer +rahu lg +do ve +sto kes +bolog na +enthusi asts +à ª +rockstar games +ted cruz +du ras +s acked +late x +immer sive +cer t +lu cin +princi pals +fa res +sa ils +far n +am ent +saf fron +quent in +check point +fer ris +ex cur +ðŁijī ðŁı¼ +bai ley +se h +ter re +mad am +s band +wan derers +cumber batch +yy c +digit ally +blackandwhite photography +roll in +moroc can +ðŁĮ ħ +din ner +d well +to om +m ye +ez ra +cp fc +war hol +me er +jon ah +no aa +s gate +so on +secu lar +g ating +ti o +dri ver +si ssy +assan ge +ta th +ed mund +bobc ats +ra ji +po stage +stu ds +m gm +kat o +edin burgh +meet the +shir t +fa a +mens fashion +sp reads +wi m +car ts +phoe be +j ars +bot swana +Ù Ĥ +ed war +sk ar +ri ve +gu sty +c tv +ferdin and +su therland +nickimin aj +k v +si us +bee ch +re z +desi res +on ial +camp o +quar ry +lor raine +gil more +ig gy +µ ï¸ı +ho pping +avi z +ðŁĮ º +uni sex +dedic ate +att itudes +ste er +jun kie +rail way +y b +whi sper +key an +k us +ju g +di x +a ins +sum mon +ov ich +sy ed +her ald +ma ison +me ded +wild flower +main land +ri sky +ru kh +over looked +ki c +destro ys +nam an +ki p +z ano +champion sleague +ban dit +quin cy +smi le +cal vin +open ings +ta pp +ol ulu +spec tro +accred ited +ap k +pra ised +bar nett +pol len +premi ered +selen agomez +tou red +screen ings +uu u +mis o +en se +adam lambert +guel ph +har yana +hu tto +le ar +l tc +po ached +brex it +æ Ŀ +tt c +pa vement +mon gers +ro e +ad ers +ling ton +particip ant +ca red +ga il +y ates +lan tic +dash board +jo o +feli pe +ssi onist +bu m +s end +a eri +thu gs +luci fer +a he +dete ctor +fil ly +gas oline +ham per +hump day +the ta +the band +fore casts +o hhh +lo bb +hol l +cp u +az u +ad ar +hai ley +bu b +car t +quo ted +an archy +pan cre +twit art +al den +st ash +the less +or ni +belie bers +mor mon +partic le +avi ation +⬠Ĩ +webcam toy +sad dened +cru is +ham let +n ct +roll ins +marque e +saw yer +reli ance +a ura +di ec +soo thing +sig nings +ak is +à ³ +at kins +aer op +ðŁĮ ¿ +y ab +sh ari +con nol +du bbed +manufac ture +convin cing +feelthe bern +ra u +pu lit +on ec +gem stone +ur ging +bag u +ga h +aci ds +fi anc +zodi ac +sn oop +her rera +initi ated +ven ge +profess ors +pro di +stron ger +e mission +bb a +hal le +ta pp +haw an +wh im +compe ted +myr tle +ir port +cold play +ach e +ske p +m son +ss ic +calli graphy +swim mers +me y +pp c +thri ft +po c +re places +commu ter +âģ¦ âģ¦@ +go ers +lo gue +para dig +bas kets +sensiti vity +joh an +atl antis +& & +suit case +anxi ous +l h +str i +gal loway +stre ad +war den +gr ounded +ffici ency +li feat +reli c +disgu ise +island ers +f cofficial +classical music +b mc +en field +bi que +oak ley +bat man +sla ying +ner ves +mul tit +calci um +projec tor +scott sdale +ant ino +gri ps +kim mel +des mond +prote stors +hi atus +metaboli sm +conclu ded +press er +ti pping +sli de +e to +hun ting +aus open +ri k +pp ery +innov ators +pitch ers +ag ger +fun gi +z ad +proli fic +rockn roll +bl ames +ct ar +stam ford +q ad +mozz arella +insan ely +den ver +ph ouse +nom ad +ï ¿ +s ris +pro du +hen ley +pag an +am trak +ru bi +in cl +tu tor +sco tia +wo es +sing apo +fun nel +turn bull +know ledge +gri mm +real madrid +we are +missi les +con sol +emo jis +sne ak +smi ths +ru iz +br ou +i el +ha ver +ðŁĮ ļ +kin gof +basil ica +circul ation +prin ters +ta pping +ri dley +dra gged +ha j +writ er +fundament als +personal ities +me tre +stereo types +bur le +best of +n ffc +ha th +mini stries +a ali +trac ing +pav ed +ł ï¸ı +g ic +insp ire +tu g +ha re +repe ated +ex pon +lol li +rho de +pre cin +install ations +instag ram +az ar +i es +sole ly +du kes +mission ary +van guard +fursuit friday +on d +pol ari +ma st +har an +jos é +jack ed +ec oun +al ities +ne ph +ra vel +moder ated +sco w +s fb +uru guay +as o +ni g +au du +p ints +lat ina +ben z +m itting +char ted +mat ology +cit ro +biop ic +ðŁij Ń +djo kovic +fox y +agu il +so to +an ada +sin king +sc rap +hair s +bethan y +fact friday +ðŁIJ IJ +unlea shed +) ( +contra dic +ram on +coast line +y ong +sn sd +li gan +p ome +mit age +ge tt +wat i +ri sk +so aring +bru sh +f pl +av an +å Ĩ +lar son +sh ear +mul til +blu r +multi media +chun ky +par i +n ani +weir d +cholester ol +char les +dream ed +tan ning +puzz les +fr am +hand ball +ch ag +beli ze +al u +bang s +Ñ Ħ +detec tives +mc g +ish q +bo thered +saf c +mp ing +ten eri +g ays +sail or +an gi +mul ticul +gue ssed +ros é +high ways +bro om +chatt anoo +- ' +see ker +on ed +at f +lu c +> < +bar i +per cep +jewel ry +as ph +sor row +sl ing +mam moth +jac kie +ë § +wilt shire +sa o +can cell +im paired +tor ial +bre ed +guy en +jud ice +tit le +pro spective +applic ants +ðŁį Ĭ +epis cop +e id +b yo +stock ings +ðŁĴĥ ðŁĴĥ +ll p +sna g +keep it +l ough +ol son +matur ity +!! !" +cop ter +i sha +bl i +wil mington +tr youts +th ai +ðŁ¥ ³ +pe bble +kra ft +f p + º +ssi vely +li vin +contest ants +tex tures +jo an +h dr +film festival +prov ence +wi do +op end +c si +sto wn +cro ati +ad just +host ile +analy sts +il an +cu ppa +bru m +newfound land +good win +me tt +mall orca +plu gs +bu k +bb hutto +wrest le +sa ire +sho pped +for za +le head +vi vo +ba st +ro xy +reg is +hard working +hon olulu +desp air +young sters +ni g +impro mp +roll tide +de emed +tre ason +ru shed +for ged +ff f +pikach u +bri ggs +do it +ac cent +la us +gla ze +compet ent +a ho +photo g +mid field +le go +har vard +min orities +re illy +slic ed +once upon +initi ally +financi ally +landscape photography +har dro +qu o +mm ers +par kinson +smu gg +read iness +bru tally +glou cester +mp ed +bbhutto zardari +mur der +ye d +dat aviz +sr t +dow ning +bi ans +m ü +fle ck +fli pped +s ly +brilli ance +ri m +k um +bubb a +ko i +knit ted +sor g +ma is +ðŁĮ ² +ti ss +su stain +sen su +ak han +zi est +exam ines +chardon nay +user name +short list +re bs +on o +dar ing +hard wood +che que +righte ous +light ening +dir k +shra dd +du ra +down stairs +sh al +ami gos +ru ff +s law +ri es +red nation +man us +ðŁĩ§ ðŁĩ· +distin ction +u bun +dur an +mi gra +thi ans +la ver +domest ic +k x +jaz zy +justi fy +belong ing +insul ation +color stv +drun ken +chann eling +qu and +xi ii +enligh ten +kan o +fati ma +teen choice +terri fied +p ba +as ley +met museum +dun e +pack er +ki o +ðŁĴľ ðŁĴľ +bo iler +fas cism +ar mored +back grounds +in mates +embarra ssed +defin es +th d +we go +silic one +lo on +el ding +bor rowed +he mp +ak sh +kaw asaki +br y +de af +kill er +dispo sal +ðŁĩ ° +glaston bury +un covered +o xide +po ff +d ant +k j +ku ro +dri zzle +peop les +fe e +pro pri +dd lovato +pi ggy +ot is +aller gies +u bis +pengu in +ser a +vi z +prosp erous +ici des +tornad oes +sene gal +web cast +sto red +enchan ted +bb cone +bay area +entrepreneu rial +rednation rising +experim enting +ang an +lot to +they re +por e +er p +seren e +east wood +bro kers +bar ge +stal lion +timber lake +tailo red +dy stop +b ate +lat ors +di xit +bran son +dynam o +ky lie +shame ful +bt wn +spring time +mix ture +s ounded +lu ton +dad es +mal a +op ra +en ic +rahulg andhi +se wer +~~ ~~ +ky u +nor theastern +ca er +bc u +nir vana +kitch ens +ous y +al m +river dale +hid den +fl int +sp d +pat rons +katy perry +au gh +exhib itions +sm c +shu ts +at ore +da in +some thing +ber th +bo g +por ter +gen to +con cussion +ang lic +ro we +gr illing +scar lett +master ing +mor nin +comm ented +si me +si zing +christ y +ce os +st m +at ry +tari ffs +vac ation +pre judice +p su +paren tal +far age +can a +cap com +koso vo +you re +men stru +stal in +grape fruit +br an +che sa +dav en +exc el +!! ) +๠Į +distribu tor +ce a +bride sma +millenni al +wa in +ob serving +mis ery +plan etary +expo sing +bra ised +comp ton +don gha +q l +spring steen +th ul +syl ve +cab o +pal ad +niel sen +gaz ing +ba ja +r oud +orchi ds +johan nesburg +se man +d ji +oper ative +affe ction +eclec tic +at c +mut ant +aw x +nic e +mel bourne +indu lg +tu lip +dias pora +wel p +big gie +mississ auga +retri ever +or an +tam my +c ta +hipp o +seas oned +ger mans +eng v +marvell ous +im f +rela ys +mon tan +maur iti +me ister +as surance +reig ning +su fficient +han e +no thing +pos se +nav y +in love +brigh ton +en qu +ch ung +sweat y +es c +cal ed +man s +nicar agua +sl ices +mo cha +washington post +bb n +dam ned +grow ing +en burg +lo an +me s +wh oops +believ ers +spi el +vo daf +l at +s led +cricke ter +brown e +golf ers +bar ra +wat chers +lu igi +sw amy +mom s +pit ched +san tor +cr s +si re +sc amp +bo de +ste war +jon ny +ent ity +pac qui +mind ful +min india +bear ded +temp t +scorpi on +eat on +authori zed +ar to +s vp +op athy +cch ini +house music +disney world +âĢĶ @ +pro pose +di y +expen se +ten g +pupp ets +sm el +d aca +per ry +fin n +boo sting +lefto vers +cou gs +satell ites +man y +az e +g ong +fi e +metho do +fer ries +ðŁ¤Ķ ðŁ¤Ķ +explore rs +load er +attrac ted +il ton +godd amn +pi azza +doc tr +sav ing +paragra ph +visu alization +may ors +work flow +ack les +ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ +ठ¸ +twer k +clu t +lo ver +te ases +si an +o te +deter ior +accor d +l fw +swar ovski +nat al +tra ps +k ina +analy ze +laye red +bever ages +un it +ran som +pe shaw +dest ined +astro logy +si pping +miley cyrus +cam ino +marshmal low +bli ss +out back +fa q +int oler +humil ity +po ppin +hallo ween +mon tene +op hy +nu n +tattoo ed +a as +ðŁĮ ³ +dale y +qual ity +du sa +fisher men +swi f +ter rac +st au +le in +trol ling +ship ment +garden er +march madness +head band +gr t +bur nett +w and +!!!! !!!!! +gh e +du x +hu d +war ner +ðŁĩ ¦ +ex ile +rescu e +rat a +d han +duc ati +dro wn +bl ends +spi e +alli gator +simul taneously +broo ke +u ke +k har +comm union +ri ka +ford fc +chin atown +you rown +me y +can al +syste matic +de pri +ox ford +an il +w ut +equ ation +be z +fle ur +the good +lang ley +ad ity +ed ith +al fie +о ÑĤ +en cry +br ill +ex emp +ce sar +mb ling +ab ri +sc icom +j ing +school ing +mi ka +mechan isms +impromp tu +rhe a +moo re +crime a +be sto +wri ght +el ders +ro ds +kam al +folkl ore +be et +mini on +reli eve +thr o +team usa +pas cal +made with +boli via +itt i +free bies +desi red +best selling +l iness +la den +ke ane +mi sts +hipp ie +atta chment +@ / +se w +flan agan +âĿĹ ï¸ı +supre mac +stl cards +si as +q u +rh ys +ste ep +val leys +v w +pav ing +disp at +al ison +por te +id u +new sc +soc ket +mo s +co star +re vo +prote ins +stanley cup +m cal +ear ring +se cs +mc lean +cap ric +nick elo +ad en +v c +shou se +adap tive +maxi mize +entertain er +pro se +gri ffi +six teen +lam ar +mi rage +saudi arabia +awe ather +ru st +in filtr +fashion week +ðŁĺĬðŁĺĬ ðŁĺĬ +selec tive +bubb le +a den +fen nel +deci sive +m ta +mock ing +mb les +st amp +mu le +bernar do +gr in +po tt +j ingle +vet tel +colom bian +cam o +motivation monday +ba han +p ly +dh ary +k ami +x men +sleep er +gar a +my sti +confi dential +conflic ts +p neu +ce s +insur tech +clean se +me rely +va is +tu x +the great +shar on +ma j +hol a +eco systems +aj ay +aa j +hu sh +har mon +backto school +wiki leaks +reflec ted +ðŁĺ ĵ +commemor ating +ac et +buck ingham +messi ah +tu ous +hor net +to be +d q +he ine +mi g +pl ate +nichol son +sp ie +cumber land +nor mal +pho bia +happy halloween +city fc +mc el +gilli an +ke to +lu de +de mise +su ga +str ate +mcgr ath +visit scotland +foo led +cb r +gc se +col ori +po td +missuni verse +fin ances +ma poli +for ks +Ø ´ +cann on +medic inal +ðŁĹ ĵ +kh o +wre ck +pan to +bag el +gu ll +syndic ate +ic y +pr c +ki en +zi ka +ti sh +pe ta +c co +li za +ch ut +ex traction +el g +gl i +fu eled +pos it +respec tively +leice ster +br ink +vulner ability +im ported +e sha +ðŁ¦ ħ +r ural +re ll +gam ing +atlan tic +aband on +no ah +re solved +pro state +aller gic +ps d +âĺ ¹ +dun geon +fang irl +illumin ated +m hs +white sox +d ently +ck o +endor se +over ly +dazz ling +prior iti +night life +ut il +be have +flam en +east bound +ðŁĴ Ł +ilove you +gov uk +mozam bique +alle gi +dr i +testim onial +ath s +ì§ Ģ +mm y +shab by +pro secco +friend ships +cal am +dam ages +off set +jura ssic +jun o +arre ll +ðŁĴ © +interven tions +dare devil +car ver +run away +ran e +truste es +ha ute +dep ths +ðŁİ Ń +me in +sacrific es +con cier +ne sting +i zzy +me tam +ilove my +ur ine +du lu +mal hotra +ve ins +night ly +co at +an di +he witt +lon el +ci ble +wr ite +jen nie +sant ac +ĸ ï¸ı +str ato +singapo re +sop rano +kri sten +cheer ful +flee twood +fa iri +m eli +wa st +tur nt +sfor sale +sc rolling +angel ina +ren dition +jeric ho +nick y +or b +fla vo +patri ot +ash eville +sick ness +re fund +aggre ssion +b pl +ãĥ ĥ +elu sive +thi story +hang er +bu ffs +vil las +at kinson +sp h +ja it +decl ined +wo k +supre macy +oo tball +ey ang +ðŁİ ĵ +s ford +ath i +consu me +road ster +e so +u pro +reci pe +au f +uc i +ar on +oo oh +cs go +re ich +mc d +min ute +ladi es +pun k +rut gers +mee k +ariz on +ta j +land lord +de gra +autu mn +lyn x +us f +b hi +fairy tale +dongha e +bet sy +explo ded +chen nai +op a +pro tag +br ant +ðŁĵ °: +g f +pal li +ðŁı¼ âĢįâĻĢï¸ı +su t +ill ini +colum nist +shir tless +de centr +sear ched +ec or +bu ggy +s ack +ðŁĺĤ ðŁĺŃ +de t +ther i +or naments +bring back +to v +quarter finals +ic he +con stra +gi er +buchan an +vi x +kay aking +mu stread +swal low +mel b +sc af +op al +may oral +har at +ðŁ¦ ĭ +schedu les +id f +ha gue +ro z +a ah +d mc +du plic +ca che +orph an +frac ture +rec on +ch av +bun nies +al ain +mustaf a +ðŁİ Ļ +vac ations +dynam ite +tex ted +broad caster +ðŁĴ £ +ste amed +rock er +di etary +luxury travel +inaugur ated +sa wards +vaugh n +lincoln shire +click ed +kra ja +f anc +remo ves +layo ffs +mc far +bre eds +win nie +jon ghyun +incen tive +vari ations +pat ton +atur day +persist ent +pr un +pi ers +dal es +æ ĸ +breast feeding +r ance +ta wa +Ĥ âĸ +mur doch +cap tive +thi stle +nic a +commod ity +cou ldnt +board walk +graci ous +practiti oners +n gc +scru m +ner o +camoufla ge +col on +he i +phys icist +saturday morning +ten er +si won +colum ns +bru ne +y vr +ba ir +reti res +hal am +cab er +shaz am +min u +cas cade +milk shake +gri d +d ren +vin cent +so dium +plat ter +cheer leader +chen ko +y ak +elimin ated +ty po +y man +re think +âĿ Ĺ +ts ville +bernardo kath +ex tr +ðŁĺģ ðŁĺģðŁĺģ +ta o +re per +mo ths +em powered +c iting +transpor ted +mon ks +san at +cle ars +bachelore tte +camp bell +racha el +har le +hand ler +climb s +inter ference +rele ase +sh and +r bs +hr h +ãģ ª +val le +r é +sli me +w akes +chu bby +slo an +el ves +ath en +attor neys +micro scope +ston er +sc aling +o be +c out +se man +mid week +bal sam +ðŁĺį âĿ¤ +ti ful +v ish +lo tta +ri pping +re mn +ti re +le ap +ha vent +la by +hi mach +whisp ers +we in +ðŁİ ¸ +wild flowers +se le +u cc +li ability +az ine +sw ings +k ya +ta ir +re main +e do +flo ps +poc ket +grand ad +exam iner +gr is +ffe ct +ðŁijĬ ðŁı» +stud ded +heart beat +de acon +firm ly +infec tious +ste f +out lines +le asing +cla ws +sen se +tab s +hoo t +mo sul +spa wn +co a +hog warts +ve in +alban ia +manu el +b ino +vaux hall +scot land +go bucks +mat ty +phy sio +tor ino +const able +investig ated +s lower +mistak en +bay er +wild fires +vo ic +x on +time to +chas sis +bar ric +pi on +bald head +woo k +regi str +dra fts +b hs +li gue +l ick +staf fordshire +baf ta +dar ry +je anne +ven ding +cor p +⼠³ï¸ı +kid dos +fen way +ca o +west bound +ðŁĺ Ļ +dv r +quick er +bla h +goo die +ðŁĴĭ ðŁĴĭ +vo x +esp er +fac ade +cor relation +red bull +rou p +decl ining +chi ve +mc gee +tur o +in der +f eller +fu g +il ysm +mar di +peshaw ar +ki eran +ine ma +meat balls +pe ck +depre ssing +sen sing +gi z +dd ington +spring watch +ro aming +yellow stone +horse shoe +am man +week day +ol or +ðŁ¥ ° +boo sts +spr int +scar ves +je e +bee tro +cl an +all the +ìĦ ¸ë +enlighten ment +ado be +re generation +? @ +cont ag +yach ts +to u +mor a +en voy +r ani +go li +dhanush kraja +wood working +streng ths +se di +disc s +ar ina +sc on +lit e +ano ther +ðŁ¥ Ĭ +ye men +gu ern +sav vy +lo yed +biom ed +heart break +comra des +milli e +pat ch +un f +jar vis +bl aming +commemor ation +ge y +å ¥ +cardio vascular +alig ned +docu ment +. ? +aesthe tics +em u +the irs +le h +ps ic +si f +pl ateau +ex pend +domin ating +rob es +mauriti us +excep tionally +hom er +discover ies +bra un +ten nant +insul in +ðŁİ ® +car bs +te as +? !" +zi e +franco is +brow sing +th ol +cla rence +hel per +ob tained +cas sie +le es +! , +pome gran +hu bs +presti ge +] [ +mach er +bott led +pun ch +pi pe +o ch +gall ons +deliver ies +u ra +un day +mon de +depic ts +re gency +outra geous +khal ed +car o +he arti +za g +develop mental +over coming +stati stical +flavo red +for ds +cre atives +lau rence +di as +sun screen +in ked +pre acher +n ul +impac ting +auti stic +âļ Ķï¸ı +o ss +pel icans +cele ste +v b +ru mp +mc gra +fair fax +hu mor +bbc news +row ling +cal der +seam less +ag ne +p ti +mix ed +t shirts +mer ci +b tob +women instem +genealo gy +pre ven +l our +cra dle +gi use +Ð ¾ +chron o +fair ness +chocol ate +tor y +as da +pre scott +stret ched +al man +u il +re charge +in tre +ob st +hosp ital +hay ward +teneri fe +fried man +vap ing +confe ssions +ye ah +bal li +luck now +cor pse +sculp tor +amp ton +t pp +indic ates +sur plus +tru man +ðĿ Ļ +sin ha +in vo +sovere ign +ke v +establi shing +engra ved +assu ming +ðŁı ģ +sou za +fab i +ton ed +oun ge +del oit +dow ney +no ble +om or +car tridge +ðŁı IJ +u hur +hol loway +succe sses +r sa +âĦ ¢ +ma zz +tw d +disc ourse +. < +y at +satis fy +com pri +ठ¹ +graph ite +disser tation +ar ter +í Ķ +b ally +zom bi +ly ons +a ic +u bc +pra da +e il +da x +cla i +grand daughter +extravag anza +chall enge +ðŁ¤ ŀ +po ver +primar ily +dad dy +man a +bi kers +inqui ries +da un +fel ine +gener ative +he f +benef iting +lind sey +pol ka +demonstr ated +al le +rand y +o su +low key +weir dest +red bull +our y +n ous +wood stock +cre denti +nic er +g ado +aly ss +ap h +prepa redness +station ary +incorpor ated +dy er +sarato ga +cele sti +: " +antibio tics +or gs +inde fin +ap ron +и Ð +fif teen +no f +ðŁĶ Ŀ +ph x +te ga +m z +organiz ational +on air +band ung +pleas ures +mor i +secre tari +rac coon +ca shi +pil ates +k on +geof frey +la o +kam p +depart ments +back packing +an am +à « +crack down +aun ty +on do +li zzie +ph ers +cu n +ðŁĩ ± +k pop +pu t +inten tional +connol ly +bar clays +hs fb +swin don +u ku +s ally +a int +âľ ħ +pen ang +up lifting +epile psy +inter ro +bun gal +go ku +blue berries +ठ¦ +u ssia +sil ky +mou red +i stic +bri efs +me ats +go b +ch aser +state wide +pra sad +gl itch +ar in +ban ff +memb er +ðŁĺŃ âĿ¤ï¸ı +lo ving +hall a +ภ¡ +smo kers +yak u +scicom m +physi o +sw ol +lem ons +gel ato +ch ool +capit als +ki stan +ti ghts +spi kes +trav ellers +ik lan +commissi oning +ar ine +emabiggest fans +empha sis +front line +pad dock +destruc tive +ba ha +l inger +je wish +shet land +mc gin +mon key +ko z +s one +raj ini +te h +y en +c vs +masqu er +gir ly +we sle +was nt +bro dy +termin ator +gil le +mag gi +bir die +jeopar dy +cu bic +vm ware +intric ate +an up +to pia +east on +sab res +investig ates +bu sting +bil ingual +valent ino +in format +fer re +advent ur +hydr ate +for sy +az iz +san to +e de +whist ler +continu ously +d ham +un used +ji had +addic tive +vi dy +do b +i do +fi ed +ni versary +n one +fu er +ðŁĺį ðŁĺĺ +coven ant +prin table +immac ulate +o em +cl t +serv ants +consu med +un released +sc um +pack aged +me re +ìĦ¸ë ¸ +to by +ta f +spo ons +me al +f ball +fair field +jan et +silver stone +dart mouth +follow me +voy ager +kom bat +anni ver +ene w +mag dal +ho ve +sa th +grizz ly +car di +gart ner +sand y +kan ye +post ure +po ign +im pulse +radio logy +horiz ons +si am +aish war += => +no che +tr is +el yn +com me +du i +ce c +councill ors +cudd ling +creep ing +loc ke +manag es +trans ferred +ne cks +di er +dan o +v ick +lun ches +d he +en sures +cri ss +ul ster +bann on +cont enders +sp am +sweet ness +med al +hon duras +arc tic +ultra sound +in fr +disco vers +ei ffel +ca sters +ru ben +du st +awe ed +atri um +lest we +se ared +ðŁĵº : +ty ne +ex changes +little mix +l le +astron auts +hersh ey +work day +kno b +so v +re signs +today show +der man +an th +af c +ta ster +sw oo +sa eed +per ing +narrow ly +rn li +best buy +panas onic +obst acle +farmer s +ðŁİ Ļ +pa wan +ki est +ang ers +absur d +oh my +sin o +pist achi +sp ice +giu li +prime time +ko w +k ens +ex agger +! ?! +u ba +midd les +ju dd +e jec +slam med +pen sions +of a +re create +b hp +xx l +liver pool +thre sh +pur ity +ni eu +hol ics +wr ath +ra do +gli o +am ma +dile mma +cr u +lets go +.... @ +âĿ ĵ +sugge sting +tru mps +hor us +f v +ic om +refer ring +predic tive +tar ts +ge tte +so ck +glo ssy +pin ky +al ec +thy me +ou ra +thero ad +pe tr +cr am +p fi +dv n +me ier +incen tives +tun nels +mobi l +rec ap +extra s +upri ght +rev amp +per severance +, - +ot p +mir ror +ar wx +ger ry +ma her +g or +hom epage +am is +ag ra +made le +best friend +sirius xm +bun dles +admir ing +t dsb +ðŁį ģ +ch as +slow ing +ro h +wall papers +âĢ¦ / +tek ken +gang s +tal a +lind say +shou l +line backer +tool kit +ur anium +caly p +ab rams +mat thi +ðŁı ¿ +hon ourable +da yo +ver sail +tan k +st c +fr itz +spl end +pat ag +anno yed +on day +devast ated +chattanoo ga +national ism +mas sey +jen n +tail or +dev gn +org ans +zu cchini +on fox +sat ire +wex ford +dis grace +no to +vol ta +âĿ¤ï¸ıâĿ¤ï¸ı âĿ¤ï¸ıâĿ¤ï¸ı +à ¶ +home owners +poin ter +m cr +au sten +day sto +mo ons +pal ma +gra zing +e so +influen cers +shahid kapoor +compli ant +measure ments +develop s +y d +par l +p vt +rand olph +tor tured +ger ald +eli as +deepi kap +war mup +hick ory +g ap +co ffin +am our +re neg +moun ting +seven s +ig le +hi er +dec ad +tri ght +esc apes +wer ner +t fl +ful filled +ni ger +sour dough +re aper +choo ses +spin ner +week nd +fil tered +sh uk +kat i +old ham +open source +kh anna +at elier +conne c +opho bic +gla s +complic ations +ar son +counc ils +sm ol +as sy +lur king +ling ui +han ks +e in +Ù ħ +ru gs +n guyen +nou veau +men ace +le v +alad din +ru ining +round about +k m +con or +shoo ps +may day +traum atic +prab has +ka iser +k ita +rou ter +pe dro +re tar +stun ner +spani sh +distur bed +acade my +e learning +wit ty +sen g +fer al +av y +sta b +ke aton +ur du +ko to +hu i +coo ke +ari an +the personal +u ma +se ap +a sting +rhetor ic +hand writing +munici pality +consor tium +ðŁIJ Ł +glasgo w +ra ya +eli za +polym er +bro th +prac ti +correspon dent +addic ts +gay le +ail ing +o fe +p li +hear tw +st itch +sight ings +prie sts +sam o +slo th +good wood +roc co +sab c +summ it +l ace +pres ley +itt en +cin cy +thepersonal network +s week +pe gas +af con +regi stry +ci m +le th +dic ap +cand ice +flu ent +sm ack +pede stri +al oud +car ac +priyan kach +p gh +ir ons +dol ce +lat via +dece ased +thero ck +cla p +cen e +fo am +morris sey +gre t +essenti ally +com cast +be agle +argu es +ing ed +- âĢ¦ +sa g +ha san +ðŁĻ Ĩ +ðŁį ° +nh ra +kann ada +indic ators +on er +bri xton +at as +screen play +sor ority +sha heed +he em +class mates +tain ment +es i +breast cancer +zucker berg +aur or +en cia +ref ers +kae per +vor tex +com part +lym ph +photograph ing +ste ff +rest ling +par sley +mom ento +th man +lac king +du tt +ocu lus +fin o +fren zy +ra sc +der n +dis missed +noo k +met gala +sh ill +rapha el +maver icks +exhib its +eag erly +c pa +amen ities +. âłĢ +exo dus +ern st +lit a +deal t +womens march +i ain +score board +campe ones +c en +ti ki +garri son +fidel ity +bra g +road map +psy chop +lo e +ble u +ðŁijĬ ðŁı¼ +sau vi +spr inger +temp tation +ru dolph +ac ura +wic z +parach ute +stro l +len ny +zi k +dom s +nb af +al pac +vivi an +ro ve +pre et +perpe tu +sna ke +air soft +infl atable +prin ces +ati e +ffe y +pati ent +m ire +chel le +sl ack +groo vy +# : +up loading +!!!!!!!! !!!!!!!! +siem ens +provi sion +v fx +need y +f ats +to poli +bhu tto +sa thletics +alu ms +t winning +south western +adop ting +last night +man ne +la ga +tw ell +ac ia +-- -- +eye wear +hur ley +fle e +sa ch +pe cker +cost ly +is k +cr ates +polic y +ero sion +in go +wer k +ðŁIJ į +torto ise +therap ies +inter net +chihuahu a +ri ps +fre i +ed or +tai ji +t fc +do d +demp sey +christ in +chen g +hi ps +gra eme +com passionate +cavali ers +histor ic +soul ful +crimin al +ja c +vin ci +expi red +sur at +turi smo +k ona +se aweed +ber ts +le ica +expre ssing +a al +wor t +break fast +her ring +am used +rhu barb +mar tian +cospla yer +y ash +stri al +ra ul +refer ral +dw ts +j w +ad ler +cur tains +gu r +val ence +tyr one +sw fc +coach ed +re born +diabe tic +cho ke +nor folk +investig ative +ðŁĴ¯ ðŁĴ¯ +z id +v mas +phi e +objec tives +âľ ĭ +over due +di vers +mat su +ðŁİŁ ï¸ı +casu alties +ภ§ +al k +stand ardi +re alist +arti facts +pand or +ke x +in vin +( !) +ine y +par aly +mr t +fay e +the voice +on ga +de ed +skin ner +az wx +speci men +priyankach opra +nu evo +bar kley +toulou se +resu mes +football ers +cit i +fe tch +è re +lestwe forget +ðŁĻ ĭ +ch unk +dri fting +manipul ation +equ als +pu tt +ky ungsoo +âĿ¤ï¸ı # +ela stic +par ano +fo y +do ping +cin cy +ss ler +interrup ted +al ay +ado res +ame thy +con voy +ãĢ ı +Ĭ ãģ +black list +gener als +sa chin +bru shed +oun ces +non stop +illi ams +bt sarmy +u av +ru ff +bur ma +bi k +defen ce +schul tz +bo asts +lonel iness +go re +trans forms +alum na +@ @ +ra ppers +ne hru +car o +himalay an +wearab les +ge h +pepper mint +re development +flam ingo +cos by +big baldhead +ag ri +bare foot +sco pes +re gram +gh ana +ðŁİ « +i heart +sa die +carri e +microbi al +ku ala +sk ater +quer que +âĻ © +gen res +reas oning +ch ased +as o +sli pped +en can +vam os +ker s +ad verse +mo il +commod ities +with you +sil ent +hy pe +an de +am ination +whi spe +lit z +âļ½ï¸ı âļ½ï¸ı +ri ff +pp y +lam bs +gan esh +ab sent +regu lator +marse ille +en roll +par cel +wa p +by rd +ðŁĩ Ń +tu ber +country music +par l +contro llers +responsi bilities +we y +ch ate +montene gro +chic o +mil an +l ms +tra inees +appropri ately +un certain +popp ies +ed sheeran +nutr itious +gar o +deut sch +awe some +ãĥ ¼ +comfor tably +land marks +et i +re usable +daniel le +ro sal +co les +just ic +c cs +f anny +ni m +mc u +clin ch +at ene +mer ge +im db +ang lo +uc cino +pan ini +an not +bur berry +feat ure +predic ting +fashioni sta +s ask +imag inary +mm o +south sudan +spe ar +hu bble +jo inthe +coyo tes +sli go +ko dak +sit com +polaro id +roo ted +corru p +ðŁĻĮ ðŁĻĮ +bris ban +at z +ah l +re my +tal ent +aval on +ra da +pau line +locom otive +go ons +ne mo +maser ati +ic u +stu tt +histor ically +sm b +pres by +avo id +so oners +rhine stone +w ad +ri sing +tro t +mo des +reg ent +optimi ze +re ece +sm u +ver ti +newyork city +cor tez +ra c +in case +sin c +fiel ding +e tta +tiff any +al monds +sad dle +k rat +mat ter +g low +star ving +gl o +cra ppy +sl ur +st d +monit ors +recei pt +maymay entrata +mc il +un is +rain bows +cal dwell +pacqui ao +j op +a fe +hoo k +es sen +wiz ard +medi an +fla ws +com s +âĿ Ħ +ing h +ha ynes +anton io +tem plates +ou ter +na w +cardi gan +bel grade +ðŁĴ ī +hom o +a ise +ro pes +no ve +what you +tri gge +concep tion +ad ukone +na di +fri ars +sw er +adju sted +hot line +san ity +kau r +down loading +c gi +ten or +eth nic +app alach +ภ¸ +pa g +gol ds +on set +investig ator +car tel +peace fully +jarre tt +cat alan +poli o +n um +fru stration +dhar ma +my life +âľĮ ðŁı» +aber deen +mu sa +bin der +spark ly +fle eing +instin ct +co ping +domin ance +ill ers +er a +u conn +lo oms +living ston +gal i +he s +c ma +bel a +se ley +mon k +la ch +mar x + ´ +m erica +woman in +es sex +ra ina +jim i +nep tune +z ack +chine se +mart ins +chand elier +her n +with us +ear l +asph alt +modu les +st p +ul la +psychi atric +mile age +captiv ating +si der +men to +mor t +tran ce +tal bot +ab by +ì ĥ +âľĮ ðŁı¼ +j ak +daw n +turn up +scre wed +fe ds +blue print +ðŁĴĸ ðŁĴĸ +har sh +er os +insom nia +ban kers +ta emin +mis conduct +hu mber +gi di +edu ardo +con a +musc ular +consu ming +ra sh +don nie +di pped +col lie +samu el +melt down +ðŁĺįðŁĺį ðŁĺį +me z +exam ining +schwar tz +pri stine +ðŁIJ Ŀ +ve it +ful filling +an esthe +gue sses +dra ft +som me +soli d +pati onal +ho ped +evolu tionary +all er +enter tained +sli ps +lud wig +conclu des +sen sible +bon net +cra ze +tra s +haz ards +const antine +ed ics +star trek +to c +occu pational +in cheon +deepikap adukone +pizz as +new comer +de part +oppre ssion +ebon y +foss ils +tro jan +el en +ste aks +k hou +positi oning +ug by +red cross +ak h +dol ce +us mnt +pp en +dil ig +ma vs +call er +cost ello +⼠Ħ +dy n +thing s +rhin os +a xi +sar kar +con vocation +att ers +ss ss +fun gus +eu gen +russ o +squ at +w sb +eli on +william sburg +s off +defici ency +be arer +o kin +key stone +t wain +cal ming +break able +wa res +horser acing +com bs +bun ting +u it +t land +ðŁĴĻðŁĴĻ ðŁĴĻ +ga stron +sab ot +ick ers +commissi oners +sen ate +ii ot +ath ena +nit rogen +an tony +ero tic +di alo +mis sou +hypo cr +âľ Ī +kaeper nick +can v +d roo +clevel and +o sh +mon sta +stefan o +^ ) +sh ul +po ison +ha e +commerci als +ma ul +nit ro +co worker +alo e +vap or +t ents +russi an +qu id +question able +mid get +po ker +girl friends +sin the +erit rea +ten ure +depos its +buc keyes +spot ter +theod ore +trin ity +joaqu in +u cci +follow the +caf c +mp a +ðŁIJ » +plo tting +dom ino +ta ek +sion ally +dicap rio +pa p +car mel +ig er +bt cc +beth le +www bigbaldhead +foo die +bagh dad +mason ry +off ended +à · +ภģ +sc ro +vers es +ori ent +ar ches +pi yu +know your +gre e +ta kers +gu ard +dish on +bucket list +bha fc +war dly +ðŁİīðŁİ Ĭ +leigh ton +pe w +stra y +assaul ted +in hal +ly fe +amar keting +l x +kat z +ubun tu +me o +carto onist +turno ver +mi z +dis like +mul len +mo f +bl and +hi des +emer ges +chori zo +truste e +ma hog +lan sing +paralym pic +fa int +fa una +ch al +sn ar +cat h +bent on +cast illo +sli ppery +apric ot +oec d +bar o +l z +he ming +clow ns +co workers +peru vian +commu ters +y ell +ðŁļ ´ +under ing +v j +tt p +fli pk +w ana +soc ent +Ĥâĸ Ĥâĸ +ठĤ +oo sa +jag ger +di sm +e less +d ham +cali f +a official +ec lip +harro gate +gra pp +com rade +n tr +concentr ate +thi ghs +bit coin +bel arus +ë ĵ +end uring +now watching +industri al +pi p +ar on +ar at + ® +whit by +oooo ooo +sa ree +tic als +mis leading +yo on +year s +sle igh +roman ian +sciss ors +vam pires +ac up +ab ba +th weeksary +cent ri +fl ye +u o +c bi +bu ena +sin d +mar ino +bur r +re building +ठ² +anniver saire +ac ca +ðŁĴĢ ðŁĴĢ +gett ing +tu lips +wolf pack +âľį ï¸ı +more than +ta kin +ðŁ¤ĺ ðŁı» +u be +mon ic +dou bts +mo wer +co balt +don ne +specul ation +argu ably +kak u +htt ps +prosecu tion +din ah +stam atic +disclo sed +bever ly +fl wx +cra bs +extraordin aire +war mest +imper i +o logists +trac es +par c +lake side +am r +ter i +hour ly +domin ation +ar row +shrews bury +ance stry +wr angler +trigge red +pen sac +roo ster +survi ves +a on +bo ko +val or +love is +la g +pe y +fo cal +out laws +bl anc +artic ho +wit s +marsh all +die go +support small +u ca +sa h +je et +syn ago +gover ning +ðŁĴ ¬ +sal ads +cre ate +miri am +cen sored +ami de +no u +z eta +allegi ance +* ) +bl m +ric an +pa stors +oly mpus +blo c +whir l +star ry +pr one +y k +p ne +congratul ating +be v +so ber +love island +sa ir +an ing +tutor ials +q e +lun d +in ist +cle ver +taxpay er +ali z +wren ch +dd ling +cap ri +h pa +ðŁı» âĢįâĻĤï¸ı +na j +o j +futuri stic +jelly fish +ðŁĶ¥ðŁĶ¥ ðŁĶ¥ðŁĶ¥ +cel ery +plan k +fil a +ne me +un healthy +lec tions +ðŁ§ ¡ +rit chie +n ws +mi kha +wonder woman +âĢ İ +hip stamatic +ka g +ðŁĴľðŁĴľ ðŁĴľ +poul try +mo w +wor ds +lo ff +ðŁ¤£ ðŁ¤£ +relat able +re mixes +keny atta +ke m +re signed +fo d +stra igh +j lo +hu tch +box ers +colle en +mag s +instruc tional +ko l +attrac ts +pra g +account ant +go ggles +br u +th ole +mar row +leu ke +oc to +pon ds +bubb ly +he ist +ìĹ ij +im p +a har +ha unt +hall mark +psy ch +kkkk kkkk +col umb +jump suit +cost co +si delines +ag gies +over turned +ni b +key chain +fu k +f af +mi am +assist ants +cy cled +ri der +dam mit +red wings +mag es +kin s +ì Ĥ +ho d +son t +carol ine +" ' +cu le +bra id +fel ony +ar ities +ruther ford +depic tion +isab elle +ro ach +k day +fifth harmony +em y +li gam +bari sta +albu querque +gro ss +ðŁį º +oo ks +ðŁij ¼ +dun can +try in +jag s +g ould +li tho +âģ £ +а Ð +sam my +tun g +cas ser +apo lo +aaaa a +man g +as ics +sh en +p ye +tur bul +ss p +saint sfc +on lin +n anny +he ster +do z +ภĶ +th read +ren ts +kh and +ðŁĴª ðŁı½ +un conditional +rob son +car re +ph on +sacrific ed + £ +auto s +par ker +oc a +log in +kee gan +hard cover +dough nuts +ðŁĮ İ +spit fire +refresh ments +saskat oon +commod ore +j f +rub ber +halam adrid +child care +stra da +io m +ri k +dak ar +ther mom +cro pped +gar u +ali k +ven i +i ft +si ka +ritu als +z ul +e ch + © +su dan +l land +i me +do cker +ì ¤ +fe ared +fa o +wal ter +no g +mutu als +l h +ali gn +mon ia +concep tart +ðŁĻı ðŁı¼ +sco e +compet ence +sw ine +ly me +laun ch +green er +abstract art +inqu is +gran ada +ga elic +flu ff +d backs +grave yard +ba be +acade mic +adventur ous +joh ann +~ ! +bi bi +| # +pl ings +gett y +as b +âĿ¤ï¸ı @ +staf f +religi ons +bang or +world bookday +me gh +de vin +ash ore +meri dian +gi thub +qui z +all stars +be stest +ir resi +ack er +do te +war rington +pol ly +newor leans +cr ou +wi gs +che y +smithson ian +la sag +de tour +bor is +stra ps +mari ah +inten tionally +ko h +ðŁį ¸ +ssi an +mar issa +cor al +episcop al +casu alty +tom o +supply chain +sam p +on go +ro o +cavi ar +p fw +clau dio +buff alo +s ations +mat ty +snap back +l ds +al arms +mat te +âĺ Ķï¸ı +conditi oner +d ors +he x +fi zz +a stri +sus sex +secur ity +qa eda +all star +cocac ola +as one +cl icks +sc ans +mu te +he avier +ðŁİ § +âĺ ŀ +lv l +book boost +youtu be +fla shes +f jor +c su +explo de +do dge +cair n +gonz ales +th ill +pel le +hart ley +renew able +re tin +e stre +costar ica +shipy ard +nc fc +pri ya +a ghan +an ath +plu gin +co rey +re bound +or u +kat rin +hor mone +gi m +mahin dra +s sus +park land +har per +fanta stic +infer no +ep ilo +wrest ling +fe ct +c it +ac oun +to ssed +monu mental +char tered +bu st +pe tra +âĮ ļ +wildflower hour +sweat ers +* . +bl er +ate ch +go wan +demo graphic +bra l +suici de +renov ations +vu el +sin ister +ar mani +miso gy +ph arrell +nap s +un iting +crusad ers +cor gi +insu red +than i +no or +g q +d ada +bicy cles +snu ggle +sch an +ten berg +ss al +fe mme +bo il +½ ï¸ı +re ap +occur ring +hus sein +divi d +sto ke +sh alom +na ia +o lic +frustr ating +Ù ĩ +ig s +gro ver +scen arios +n ds +bru tality +med alli +bu on +sas s +skate boarding +ony x +lor ry +ny u +gau tam +mm ings +gu g +end i +lo thian +comm ando +chal k +ph ora +asse ssing +ti gh +crun chy +ad ay +is l +ci ara +pilgri ms +kam al +p to +brit anni +t ani +sm c +l ure +app store +ab y +golf ing +cl c +fa u +an as +shu tting +regul ated +carn age +scow boys +all enge +c ma +humbold t +rel le +ku mb +her i +refin ery +sound check +d wayne +bos nia +i sp +the alth +anni v +relev ance +my a +bag gage +dre ad +s bc +th ed +bu h +hi jab +lo id +ke w +c te +respec t +lovel ies +cu bes +celebr ate +dir t +sav ers +_ , +gar ment +pulit zer +mas jid +beat port +al arts +encry ption +s ner +ple ads +found ry +sym metry +ru mi +birth place +scallo ps +supp le +pivo tal +t ati +no de +so d +pro xim +tr ics +col dest +bren t +mand u +cla ir +e ach +and alu +hi ddleston +ðŁIJ º +mel ts +v ance +pin n +se ments +scre ened +sa chs +o bl +ic ha +âĺĺ ï¸ı +school ers +heal ed +lo gged +ðŁ¤ĺ ðŁı¼ +ic us +bore dom +b ish +b ffs +tal king +sure sh +hoo kem +de on +de fl +ei leen +ðŁį ķ +women intech +ri sotto +rang er +adverti se +ภģภ+tel ly +la go +dart moor +d ong +sk ates +lo go +un ner +mail box +ma sala +lo oooo +amethy st +che wing +c bb +australi ans +rc mp +game art +# ... +kor n +extre mism +fruit ful +anci ent +pu bg +pol ite +wh it +mur als +m gr +line man +dav ao +ste ms +ten nis +av age +tu pac +gigan tic +hs bc +auto biography +up the +ี à¹Ī +re gal +fig uring +ku l +mis sy +hoo p +gra s +for ums +back lash +abduc ted +p nw +min ic +bu tt +bott oms +at on +ven g +ðŁĮ ı +del aney +prab hu +fan club +over haul +health ye +sy no +aa f +ren amed +kim i +un cle +man city +se u +qu anti +este em +um in +en zo +mel vin +under go +j har +far ah +coast ers +humph rey +mh z +children s +^ . +d hi +disrup tive +integr ating +r nb +over sized +a ide +ne au +docu mentation +ðŁijĢ ðŁijĢ +pal o +hear th +ri yad +pun ctu +abc news +secu res +boy band +bir ch +ju co +tra ff +legislat ors +bay a +ãĤ ¯ +no ises +collec ts +s warm +k ner +bi shops +stur geon +snapp ing +mo l +fre aky +chair person +tro p +lyn ch +car cin +art sy +e sto +cha i +fl ur +inv ali +sau sages +im el +j or +fun fact +wit ter +puni shed +ac ons +h ya +re versi +em c +dif fu +z x +sp aw +cla d +d mit +hol land +fre sco +pay roll +ab undant +stu ffing +mor o +c ny +boy cott +wend y +ele ven +pro voc +pil ot +tr x +be ad +climate action +ri on +assi e +ì ĸ +o sm +islam ic +ho ar +good reads +al ici +afterno ons +spoke sman +jo lie +it as +masc ara +âĻ© âĻ« +pre vail +beetro ot +lu jah +k li +dod ger + » +ru le +l n +scre am +ho bart +col bert +r tc +er m +pat ro +quo ting +s live +que st +non fiction +semin ary +prosecu tors +ve st +express way +g ge +nau tical +et f +ðŁİīðŁİ Ĭ +dur ation +cha ired +the film +fab io +she h +can o +ðŁĴª ðŁı» +with draw +! :) +cor pus +phen om +yel p +la wn +ent om +snapp er +but te +pin ball +pro xy +libr e +alle vi +n ada +gabri el +fo wl +eure ka +daph ne +tu nes +pun ched +wh ore +jo g +ren tial +man ners +o pe +wh ufc +gu th +revol t +sne aker +philharmon ic +ho ste +sovereign ty +ðŁĻıðŁĻı ðŁĻı +fish ing +sci art +fe ta +i pp +dump ing +kel own +gir i +dig its +sal u +san jay +twee ters +sp as +col chester +sc ab +ma dd +๠Ħภ+Ä ĩ +ged don +march for +do p +maure en +un plugged +di do +fashion blogger +up a +mex ic +tar y +pol ye +jame son +v t +grin der +mad dy +consult ancy +¬ ë +leagueof legends +ac cents +um ni +jane iro +tu ss +h ens +ampli fier +to shi +pret tier +pre vents +new town +red wood +vant age +ball ard +ar tof +a she +a sion +lac ey +ap at +gro ve +ภĦ +rw and +real tors +tra itor +bed ding +ö r +zi on +fla shing +cam pan +boom er +secretari at +ab ol +liti gation +cont amination +se dly +shred ded +in for +do herty +bench mark +ro che +skate board +sho vel +i zz +to pper +o ster +laby rin +autu m +k ong +hum mus +vi z +tech news +kla us +am using +socialmedi amarketing +i des +cast ell +ste e +underestim ate +cal ab +pa ign +b illing +unanim ously +g mb +fly fishing +hath away +commerci al +colour ing +skul ls +pivo t +te p +tb c +motor way +x press +construc tive +pu k +under lying +kir sten +mani ac +cha o +se ma +chiff on +ðŁijĮ ðŁı» +ver ona +kom o +stan doff +wi ped +c ated +bla ir +wor kin +m sc +bethle hem +swi pe +unexpe c +pe es +pe tri +orig ami +ðŁij ħ +mex ico +flav or +ru dd +cannab is +mar u +ri ddle +wor shi +sil on +sch at +ap se +tang er +bi ous +e er +questi oned +o zar +dan k +angle sey +char an +bak u +compe ten +re pri +bat ter +sa xon +cal ves +leng ths +$ $$ +âŀ ¡ï¸ı +immer sion +ga unt +car ry +cy to +b anda +shu tt +experi ence +el gin +mous se +ta z +ê µ +in correct +en z +b ham +mor on +so ver +ar un +ti pped +la ble +de arly +bau tista +í Ļ +mor tal +woo p +dt la +sho cks +dav os +ðŁĵ Ŀ +swim wear +her man +ðŁijĩ ðŁijĩ +z ir +neglec ted +grac ed +campu ses +av s +ar ora +swach hb +live pd +ac cra +enqui ries +shoo ters +kur t +vancou ver +brad ley +gar da +g ü +ol la +attrac ting +up ton +ne win +lu mia +furn ace +ev ers +e on +sw a +roo kies +a oc +v ss +bris ket +tor ch +yo da +heart land +tac o +ph ony +food bank +ab bey +bab ylon +u y +gre ate +expre sses +d andy +sc apes +survi vor +ron d +e ci +ha vin +ab el +chil dish +tor que +wav y +ur self +kanye west +year of +ale stine +o brien +al fon +sk ag +kore an +anchor age +val eri +de w +ðŁİ ¨ +land slide +car ole +christ en +go phers +af i +priyan ka +q q +power of +it te +pc so +tw ol +pr y +intellec tu +guer rero +pi les +wish list +w ren +time table +ë ı +prodi gy +gibb ons +. / +ne ur +anz ac +mur ray +vie st +pla ster +la ir +art gallery +inter continental +g br +bell ator +nam joon +mam mals +am el +y aw +saras ota +cam ar +bud ding +sum mari +aco sta +la sh +ey ou +post graduate +instruc tors +ti g +const ant +were wolf +ic os +cla s +glen n +bud ge +ðŁĻ Ĥ +er ta +sta ins +persecu tion +cumb ri +o ch +syner gy +hu ang +scand in +mid terms +comment ator +regar ded +perpe tual +bo iling +al p +lan ge +sch le +fac eli +twee ta +ri dden +ok toberfest +charlotte sville +ik lan +jo u +ch atham +b sc +ðŁį ¦ +stra uss +mel low +xx xx +happy hour +re actor +ww er +distr action +at orial +ðŁĴª ðŁı¼ +twin peaks +fay ette +a or +ko k +bro om +sy fy +ou se +am ag +Ø · +ubis oft +lu lu +hall mark +stu art +it ya +si deline +venge ance +re lu +sex ism +boun cing +un ites +gu stav +te ssa +stu mp +pro clamation +ima x +divid end +col by +ðŁį İ +play wright +un safe +co smo +ðŁĩ²ðŁĩ ½ +cup board +constitu ents +ang lia +ram page +ðŁĺįðŁĺį ðŁĺįðŁĺįðŁĺį +than ked +take aways +shro ff +de bat +kh ur +conduc ts +format s +à © +port age +graph ers +u ten +pre m +mo ines +condem ns +s ous +l ps +f cs +deal ership +leuke mia +bure au +ski d +guardi ola +ca ster +thir d +avoi ded +en cyclo +c sr +vi xx +analy zing +she ar +dulu th +shap iro +chan ting +stre sses +as be +mil itia +ãĥ ª +col lin +arsen e +sure sh +teach ings +yi xing +sh ill +nu des +sv u +clear water +war ped +pro life +artist son +it u +versail les +galax y +ax el +spring st +cal a +hu hu +sc u +commit ments +exe ter +poign ant +mo tion +conserv atory +row dy +rec alled +mu sk +emb elli +so the +âĺ Ģ +sto pper +sch ild +to pe +el mo +zi el +j om +barn sley +snow den +on tour +jour ney +hills borough +par ole +w ts +mo ving +ag ility +tiv o +ff ers +kindle unlimited +g wen +ann an +ah mad +tex tured +hepat itis +dra m +insi ders +tis sues +ãĥ Ħ +fc barcelona +cr atic +na acp +pe can +f gm +custom ize +concer t +g sm +pe g +p one +justin trudeau +super cars +happy holidays +bu lar +ado x +lap tops +digital health +destin ation +gradu ally +áĥ ¦ +popp y +ss l +inhi bit +star light +of fro +glo omy +x per +hal der +im plants +le to +hass el +a as +un told +en ci +liber ia +or an +con tests +il ah +sma g +sc out +mari anne +cr yo +schedu ling +lo s +kan e +stutt gart +ne se +law rence +da in +pho tom +car ou +ภ£ +g wy +national dogday +roa sting +band camp +kentu cky +stret ches +ke rel +ca she +ãĤ ¸ +sta x +tran si +dog gie +at ric +hal le +ci vic +brow ning +lein ster +cat day +high land +joy ous +in cumb +or lando +ro mo +col ton +del ta +car ab +ro tc +aster oid +goose bumps +mo logy +yo ko +an ds +tomor rows +red carpet +sm p +ca sio +ðŁ¤£ðŁ¤£ ðŁ¤£ +se au +rejec tion +rot ating +bi partisan +th un +mat i +bon i +ol l +ener gye +do it +l j +mother hood +lou ise +neck laces +el ite +ni x +l cs +en v +gl u +le sh +cran k +su sie +m clau +so tu +crow ley +rat ri +use d +bre ton +alfre do +ye o +travel pics +ti pp +elli son +sax ophone +me red +heu ghan +ta ine +f es +vi ro +suppo sedly +i as +dige stive +y le +li zzy +wildlife photography +bri anna +west field +ra ined +am her +ðŁĺĦ ðŁĺĦ +distribu te +bott om +pre serving +oil and +craf ty +de scen +col ling +shakespeare sunday +r wc +ang led +ci an +t ations +mon tage +me yers +france sca +ðŁĮ · +wi ggins +san ford +volunte er +car ra +bar k +vari ed +pl in +am u +kap il +rock ers +qu ind +br ane +in mate +ent al +impro vis +michi gan +re tweeting +progre ssing +mercedes benz +smo ker +physi ology +dor ado +watt pad +h wa +sr bachchan +w ga +vol atility +hi re +ac ap +wn ba +hein z +stit ches +kidnapp ing +bur ys +lim b +f itters +thumb nail +ton e +mir and +desi rable +ad dison +tar an +tamil nadu +spec tator +soci ology +amit shah +remo tely +âĻ ¦ +ham id +r ds +g lee +smooth ly +sch ro +er c +lali ga +he als +us f +ni shi +d hu +un il +h le +tro mb +bhu tan +pilip inas +se ung +whit man +te y +min ce +snow boarding +re au +k ker +av o +zach ary +ran veer +ti k +gover n +qu al +beck y +anthropo logy +att en +grocer ies +de bit +war p +sil icon +hawa ii +ðŁĴ ħ +pomegran ate +pe er +orang es +people schoice +end ure +ðŁĴĽ ðŁĴĽ +ãĤ¹ ãĥ +ac ial +a haha +stu k +imper ial +bl ond +pow der +kno ts +vin ce +wood lands +den a +watch in +mat cha +ma hat +galax ies +middles brough +k ö +stre e +resc ues +wal do +lero y +desp ic +real ities +tm nt +ha q +un o +pe c +bolly wood +blin ds +design thinking +he ms +and hra +ab sen +fan s +ste ch +shire hour +bla ine +shak ti +pu rely +ðŁı ı +tra fal +ke ynes +gr ate +to bias +spon taneous +satur ated +caval ry +pri sc +ðŁĺ ij +wh t +pas si +~~ ~ +vir at +patt inson +la o +weir do +sym pathy +ju da +occa sionally +cred ited +stat u +es co +hil ly +esc ape +dischar ge +se er +may nard +sud bury +z lat +or al +we er +encoun tered +sm elling +over sight +ê ¸ +that cher +mack ay +you can +fre ep +freed oms +prophe cy +ho e +ishq ba +dra ke +qu its +pel led +tur k +o vi +wesle yan +new music +leg g +ch eng +h illi +ay y +pan ties +ad versity +ad jac +vaccin ation +ju ke +ga c +exce ed +time sof +sta ining +ep cot +v ital +up ward +bethe sda +apar k +ma hi +camp fire +enchan ting +rha pso +h z +na ver +fa x +vali dation +ac ad +ny r +as ym +coordin ated +depar ted +all ery +var ies +spr ite +chap lin +ss occer +s wat +bre t +relu ct +tunes app +super star +reminis cing +o co +home grown +dough nut +un canny +la pd +thyro id +! âĿ¤ï¸ı +botan ic +bre s +sp ade +i ste +echo es +du lil +bur sting +qui ero +ðŁij İ +loy ola +amuse ment +ha ils +sleep y +burgl ary +âľ ı +ro gue +cot land +mo ors +low er +wic ked +ðŁĶ Ĭ +compet iti +argent ine +yvon ne +karti keyan +ili ary +gat sby +precin ct +six ty +na ji +cam s +practiti oner +ðŁĺ³ ðŁĺ³ +pu ne +neg li +juli en +inv aded +cali br +cla m +duba i +mu k +lan tic +produc t +fe dex +ï¸ı : +eu ra +dari us +s ling +virtual reality +home stead +ðŁı³ï¸ıâĢį ðŁĮĪ +pac ed +in ha +pul mon +la zy +premi ering +ma stered +in he +con gregation +ba jo +sport ing +new jersey +hor ny +lma oo +leng thy +du t +yo gh +swe aring +philosoph ical +pap ua +in ski +know les +dy ke +âĢ ² +to ken +mc guire +ri ot +probab ility +mc con +gro s +su mat +c ite +da a +on da +mad dow +che w +board games +spar ked +re claimed +ad hd +ny se +imwith her +equ inox +boo ths +balsam ic +ha zy +dor chester +ag os +se aw +moder ator +seri ea +ander sen +pilgri m +âŃIJ âŃIJ +itch en +hal li +x ton +nathan iel +mun ition +celesti al +ga f +zo om +mark le +pen thouse +cal e +s fa +bar king +tu cket +em ery +cal orie +li que +ad ar +mc nam +tor tilla +wood pecker +mo town +bad ger +ayr shire +scram ble +dd ay +cra ziest +per rie +cho co +cast e +i ot +wre cked +selec ting +uss r +gra ft +pun t +lab ou +ir st +ba ek +Û Į +su ki +que u +ach at +te ster +aug mented +wc vb +sin ks +ðŁĵ » +ra ke +inter ne +be cause +belle vue +une arth +light en +ðŁĺ £ +turn around +labe led +unemp loyed +twitter kurds +le ia +h ye +great er +ðŁIJ İ +tim ed +i red +e tt +limit ations +cab e +s out +bee ch +anni hil +re trac +yo ona +ang er +den nis +supp lying +di z +" ( +sc ur +gun man +su ho +sauvi gnon +ภ¥ +wi ley +land on +choreo graphy +pre historic +ðŁı ĥ +var gas +assess ments +pinn acle +di i +chamber lain +ì Ī +v p +present ers +deut sche +sun shine +sal utes +r one +bu siest +- .- +motor ists +hemi sphere +al wx +ps p +ow a +den ying +cho c +gu tier +han uk +mus kete +jait ley +se wage +t ame +thin kers +shi m +se quo +pap ar +middle east +k wa +ke g +patag onia +no y +bar ça +take off +he a +à ¬ +n sc +g dc +ðŁij Ī +mou stache +mel ania +thr a +â¬Ĩ ï¸ı +pier ced +ze us +fon ts +ber a +it iner +q atar +contr ary +ire land +i fy +ou los +commun al +fin s +un paid +pa a +ðŁijĩ ðŁı» +ri os +ou p +f iller +cafe teria +à¸ Ń +kas i +cali ber +z ulu +v sco +ts ford +dragon fly +smo kin +pi st +psycho logist +diplom at +we bs +buc cane +à® ¾ +motiv ational +du ne +ba e +c fs +with out +er on +i ac +ate e +pen sion +fra zier +en sis +sk is +par ting +ger y +territ ories +nach os +eni ght +ever lasting +msd honi +tel e +sp un +po di +sab ah +environ mentally +ce ase +beau mont +mar ta +kel vin +ho ff +sun il +n da +co b +sh ale +ree dus +un boxing +u bio +re opened +n all +capsu les +mar r +himalay as +swee ter +ja z +f mr +twee ter +dha ka +na u +de mi +d fs +ta urus +fad ing +it utes +ci p +over flow +jef frey +don ny +car tunesapp +ðŁį ij +prefe cture +danc ed +c pt +ple asing +ital k +earth quakes +ul ation +hi o +ãĢ ĭ +ant an +nutri ent +de ere +selec ts +enrich ment +r iti +tram pol +bl amed +j ia +contribu tors +chesa peake +pi geons +tribun al +mad uro +w su +ilo ve +effici ently +dar cy +war ms +ar ra +ec u +ho wer +strugg led +rajini kanth +ðŁĺ¢ ðŁĺ¢ +hou sing +str at +eli x +disp ro +raf fic +thi erry +na sty +c fb +staf fing +al ma +back ers +hen son +sky walker +reale state +roo s +ness y +chan ce +cair ns +c ci +pe dal +ly ft +cross word +wait er +only in +kru ger +k ir +alej andro +car tier +car rera +re paired +ou at +un clear +un breakable +today in +qu eries +jo dy +gen ital +win ner +to l +kelown a +fascin ated +ãĥ ¬ +sris ri +squ ared +spr ung +negoti ate +priv ately +av en +>> >>> +g ical +gav in +chester field +zu mba +or r +nat alia +impeach ment +mn l +car at +criti que +credi ble +trac y +tan i +musi k +jig saw +gam bia +tol kien +fe u +as per +sav ory +fo xx +f itt +mar lon +l rt +v ell +p br +imprison ed +i om +chu l +wind shield +kay e +ba a +chor d +s art +al gon +minister ial +nat geo +la zio +nor ms +ðŁijį ðŁijį +lic king +fut bol +un sung +dalla scowboys +sh red +distur b +dev ine +be ards +ch f +b day +ro sso +ig or +ay i +si ren +k air +sti les +ro f +mag nets +un cover +mou se +bang ing +si ghted +spe ople +impac t +row land +kir a +environ ment +love the +p sis +mish ra +gl endale +ca jun +o che +de ception +sex ist +stra ws +s ga +buff er +apost le +sp l +pop up +ðŁļ Ĺ +r g +up er +ball in +i dy +occa sional +national park +ðŁı Ĭ +u an +innov ation +ภ« +te aparty +re tte +counter fe +b ha +rec s +ig en +ðŁĮ IJ +humming bird +cu r +ha ven +la zar +pue blo +: : +zi onist +op ath +inver ness +promo ter +carto on +cabine ts +mahog any +surve ying +r ational +feel ing +testi fy +so w +oc on +ภ¢ +ne el +mar is +sol itary +che mo +rad cliffe +sim ons +ros ary +new er +jo die +re tali +pra wn +pad dy +hen ge +k ala +im plant +at y +bren twood +par adox +ene z +re designed +p our +wy d +al de +௠ģ +sol d +biomed ical +๠Ĥ +tt tt +mat teo +ys er +new ton +de bun +ner dy +loo l +wo on +elisa beth +ec c +wh i +ach o +salv age +sal aries +qu ity +navig ating +oph thal +con soles +re built +o pec +ast ers +sho red +set list +kathr yn +rhy mes +re visiting +ash ish +li ft +re post +sole il +âı ± +weal th +sa at +we c +king james +flipk art +field work +se gu +mo dal +bu b +are rs +ðŁį Ĵ +clo oney +pad dington +necess ity +guth rie +pen te +li mo +jo sie +ar tin +en c +l hs +betra yal +info graphics +i er +mo a +hear ings +bon jour +sym bolic +ag ro +wed ges +krist ina +wild flower +athle tic +photograph y +pe sh +ca hill +chi lean +gou l +fi oren +ðŁij ¶ +z il +sk im +bad oo +deli a +tre ble +n cc +ðŁĩ¦ ðŁĩ +a house +bul lock +sol itude +ا٠Ĩ +can cers +futureof work +hu tch +water shed +war mongers +sp illed +colom bo +mo th +associ ations +weigh ed +global goals +not just +christ i +tor g +swe ating +man eu +clu sters +âĢ¼ï¸ı âĢ¼ï¸ı +ta ped +ul y +tru sting +yu suf +te in +ra b +, ,,, +sin ai +audi ble +explic it +cro wns +sch iz +at least +ðŁĹ £ +de bra +je suit +ene gger +z hen +one sie +i it +ss f +gur gaon +chak ra +bear cats +k ran +k awa +reque sting +han over +g end +sor os +mer cy +lovel y +do omed +tim my +ku z +ul l +ab ram +sa ison +ãĥ « +clean ers +re mo +circu its +bar red +o th +mo ist +madele ine +gall o +u j +per mits +hea viest +car ols +az te +gior gio +flo ats +decl aring +us rc +min at +craf ts +pri ma +conven i +nickelo deon +danc ing +ceremon ial +blo gg +tw p +anglic an +she k +k nick +( (( +hubb ard +harve y +hit man +fen g +we some +for za +s word +op us +bro m +gi bility +z al +m unch +dance hall +gre edy +hd mi +re birth +ðŁĺĭ ðŁĺĭ +s world +figur ine +com post +k f +engra ving +gior no +st ana +k man +ham ster +compos ers +aj e +func tionality +pol k +is ons +air planes +te se +hor rors +musc at +gi ven +sp ence +ðŁĩ¸ ðŁĩ +eli ot +ach illes +fre ck +crypto currencies +sou ther +hal o +bor neo +polit ic +hahahaha h +up state +si ena +obsc ure +hau sen +lloy d +happy friday +motor bike +bon a +americ as +hol s +- ( +spor ty +un aware +reven ues +christop her +bank sy +av an +ev apor +com press +eyel iner +to dos +buff y +renewable energy +ly rical +ar chan +rapi st +fair trade +lma ooo +beat z +pro active +la pse +ir ical +revers al +po de +mcin tyre +mac au +ãĥ ķãĤ +nash grier +f sa +g all +çĶ Ł +perpe tr +il ya +configur ation +% ; +str ange +rac i +ภĩ +pic kups +kov sky +mam mal +w ps +g able +compar ative +z h +save our +da vey +on etsy +mu ssels +mis er +cri stina +electr on +cra ve +lo ren +precipit ation +m z +ðŁį « +vin cen +snow board +no ida +ah n +marin ated +g tr +town hall +min is +bethe l +adv an +su ra +shi el +fur ry +ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤ +lyn d +so il +sc ence +sen eca +shar jah +dick ens +credenti als +av ar +per k +requ iring +pre fer +j ian +de ca +r ach +ing for +del e +be ep +ðŁĴ » +cis ely +hu ddle +green sboro +haw king +ho ax +hang ar +ç ľ +mis o +lo vin +gre ta +ab ad +logi e +at an +snow flake +mahe sh +fear the +al kal +bobb lehead +ba hn +ju dged +fu tu +feli x +ðŁį ĵ +pi ke +der iv +notic es +au er +dis super +or da +wi pes +am ino +stri kers +foo tb +dram as +pun ching +score less +heming way +bi h +bal lad +chat ter +am mo +kle in +fabric ation +kari m +z end +hi sto +vol ta +rock y +marke ter +xtre me +sequ encing +paradig m +cle ats +boom ing +âģł âģł +block ade +promp ts +yogh urt +pur pose +nu r +regu late +nois y +ing rid +bird watching +bar tender +Ù ĥ +wor dof +cha otic +shor ty +el dest +z app +onceupon atime +fl yo +rit os +mike quind +ðŁIJ ´ +regi stering +. ] +ad ol +gg gg +pur ge +kid lit +ar bor +val ves +synago gue +o th +unanim ous +veri fication +dar rell +ãģ Ħ +vander bilt +tape stry +pro sper +did dy +dra fting +de cep +marqu is +st int +michael jackson +pee led +men us +bb b +sc are +ema il +wri gley +it is +f ell +some thin +bar ra +ed gar +di pping +pu ddle +sla de +lear ner +jal en +ðŁ§ IJ +the daily +mikequind azzi +ju x +iq bal +mckin ney +ra iser +ef an +dr one +cat o +pic ket +cro we +l att +uk o +giuse ppe +hin i +synthe si +ponti fex +song writing +to d +swit ches +din ners +h q +gabri elle +pensac ola +cir cle +expo ses +ev s +riyad h +pro men +o ck +sa j +cit ation +brew co +jo si +ep aper +dri f +point less +tang led +cri pp +line ups +fairi es +daz e +mour n +bla dder +sal z +bur undi +book mark +the people +sub sequ +princi pal +sk er +court ney +a oki +rac ers +ad m +mom a +critical role +hou n +shed ding +sa ka +ace ous +mck ay +hus bands + ½ +me da +accu sations +ro sel +nc is +witne ssing +or ama +go ds +hil ton +el man +ÃŃ n +meg ap +cra ven +announ cer +crit eri +sheffiel dissuper +milit ant +consu l +hoo ded +aby ss +b x +ma dam +lo cu +mary am +manic ure +grat is +ac tresses +ros ario +this dayin +king ly +gn ome +cel ine +r ous +he el +lil ac +vish al +ab h +thor ns +s ls +ne al +construc ting +be ren +s lang +ma ins +far ra +sar ko +pai ge +gu iller +l ala +ice berg +nou n +plann ers +u mmm +ou ses +ill ary +ma an +box ing +zi pper +srin agar +migu el +o str +mp o +responsi bly +lan terns +appli ance +x b +gren ade +neglec t +dy sle +ham mock +ne ctar +wit cher +r gv +di ence +ser bian +seed ed +cru z +bi sh +sp he +e q +sky rim +alge bra +phil ately +bungal ow +ge off +y ves +demand ed +consider ations +the vamp +pawan kalyan +co ded +grit ty +erup tion +se infeld +uni denti +ëĭ Ī +wor m +ac us +se ung +dun g +ro land +su d +di visions +ab lanc +shor test +j f +p oun +plant based +be to +tough er +mc o +don et +mark us +v fl +ðŁı ł +open ing +co ward +caber net +o xi +burle sque +sand ra +su mo +consi st +tho t +cay man +motor ola +gutier rez +d slr +y w +no bel +nov ice +moms demand +grun ge +sp or +d cc +pre sses +sli st +allot ment +voc ational +ft c +pu ja +lo ven +utt arak +tan dem +sh ep +come dians +anat om +cant wait +healthye ating +west side +mar gins +chi ang +asbe stos +stupi dity +proble matic +fit bit +: $ +ceil ings +shu a +protec tions +bio tic +beng ali +re sts +bien nale +tim o +cul min +e minent +affe ction +unbeliev ably +individu ally +canvas sing +wh itt +nov asco +chin son +h pe +go w +gloucester shire +pa o +thresh old +chev ron +s ine +we ther +pp ie +aqu ino +antwer p +âĸ ¬ +po on +inst af +equ ine +cinemato graphy +nbaf inals +vali ant +kil kenny +te rence +syste mic +sr l +p ound +made ira +pl ough +tre cht +mat ed +mp d +ransom ware +ph in +li qui +bb ce +boom er +i standwith +con ju +r te +nar a +foo lish +da shing +vier nes +br ite +da u +juni per +ai da +you now +ra zer +de i +repe ating +comfor ting +adjac ent +e to +ca sted +chat ur +mu er +syn th +san itary +mac le +independ ent +law ful +e erie +h or +ðŁĴ Ń +am rit +vel o +station ery +mu f +may may +contempl ating +elabor ate +gre gor +dri es +ac col +ภļ +schwarz enegger +ill nesses +day break +follow back +collu sion +electr onic +jo vi +hiro shima +ta w +hom ec +mic ah +qu itting +fro sting +ben fica +hel i +s ical +pic cad +corpor ate +ment orship +you are +sing er +shi va +ru ne +ing er +ri um +play able +doo p +wil low +ter re +ni p +at d +war bler +profession ally +er ase +proce ed +pedestri ans +mis chief +ben ding +alas kan +c kett +mo p +dd les +shut ter +ge ared +atene o +ma deline +g ations +o sha +der ick +sw ild +an gry +pat ents +hun k +decre ased +fr y +ðŁĴĸðŁĴĸ ðŁĴĸ +sal on +quant ities +d ario +ni gel +ku ma +jen n +happ ye +xx x +rex perience +pro s +au sch +rele ssly +ham burger +fuku shima +er ne +stat ec +ren d +may field +j one +lef ty +bern stein +sm il +gener ates +fore station +band its +ta yo +r ca +ac ci +rodri go +kn app +elo vers +vege tation +u ral +le ft +ħ ï¸ı +worl dre +sur i +embar k +w son +ba you +mu ller +mo vers +ðŁķ º +presby ter +l f +cre e +bat b +sal am +demonstr ations +an ec +n pc +it ics +to graphy +re inst +thur st +tal e +off ences +smart city +bro tha +ofthe year +in valuable +ear n +ðŁijı ðŁı½ +kre mlin +gra dy +town fc +guern sey +ma ha +contag ious +dre x +be en +( £ +nati vity +k tm +somer halder +comp ounds +íķ ĺ +" âĢ¦ +af g +ott news +h ound +fire fly +cil an +donet sk +volunte ered +ak ira +è ª +sing ul +st h +dro wned +mand o +he ir +ðŁİīðŁİ Ī +tax is +y uki +vel d +k ans +el k +ran ts +hash tag +t eng +ro g +a at +gru b +e ber +in india +colo ssus +sig ni +so ever +mile stones +der o +differen tial +phu ket +master mind +an gh +mel ani +bro ker +actor vijay +stun ned +continu ity +af fl +vo cal +perenni al +fianc é +in complete +hun ts +re issue +domin ates +tur meric +ro am +ri on +bag ged +nas sau +fu t +x ox +national trust +jo ye +san o +hearth stone +dis respect +le es +h se +siber ian +offe e +re stock +wolf gang +re gan +plan o +un wind +re par +mil le +] , +skul l +fat ally +concep tual +ðŁĮ ² +f é +ber to +b ms +u a +mag na +notre dame +le te +la undering +heartw arming +buffe tt +go at +pe abo +wind mill +v ac +continu ally +az alea +mem brane +can cels +make yourown +athe red +p to +tor pe +ðŁĺ ł +ðŁĴ § +sc ares +le aking +z et +pix els +ac i +kh il +marath i +ðŁĻı ðŁı½ +u la +tam u +chandi garh +z agre +aa b +pronoun ced +aubre y +sand er +pun ta +har low +ic elan +celebr atory +so t +unci ation +stru ly +mc dowell +deepi ka +remin ders +my stical +ct c +chat ted +s ica +bar gains +ch hat +ru bin +m net +oiland gas +pel ican +o at +mor ality +k our +i h +nu clear +gc u +ric her +vene zia +m ma +le ith +ac company +rich mond +sports net +ba ahu +smu ggling +mm i +ðŁĩ®ðŁĩ ª +twi sts +sahi b +.... . +amb itions +il lo +histor ical +fo rec +show biz +pon ies +chas ers +remo del +will ing +prince sses +am ple +cushi ons +ac les +lot r +da ch +an the +in corporate +new bury +ki ri +fried rich +ab v +ball ers +alber t +ðŁij Ń +let i +nan op +ci de +anal o +n sf +)) )) +griffi ths +valen ci +ro ano +fun run +babys itting +ca day +ent re +u ck +slu g +tic al +the sims +ro ar +car ney +g am +sto we +fi d +bun ny +sham rock +pe cu +mol ina +go cougs +con tributes +transform ation +mo y +v aj +sever y +antioxid ants +thir teen +sight seeing +l j +reversi ble +odd ly +hoo kah +nou vel +hal al +fe i +stab les +mul t +ho pped +bra ids +inter change +ghana ian +ww ww +eth no +con junction +ago v +ye ti +earth and +ts p +con serve +heir loom +metaph or +woo f +tor io +self less +n wa +em ilia +yl ene +y xe +gi ar +moder ating +pro bz +b fi +ne er +du mmy +hanuk kah +we bber +k v +eye brow +dag ger +su mp +ra ges +ork ney +tb o +hal sey +assign ments +tr onic +scri b +co on +an war +# âĢİ +jal ape +flori da +qu aid +haw keyes +âĻ¡ âĻ¡ +street car +ro g +dat lantic +gran ola +un changed +expect ation +Ù ĩ +mar lin +gu mmy +ðŁĻı ðŁı¾ +awareness month +oil painting +mu th +per ch +jun to +villa gers +mor g +che ated +web comic +the future +d ps +la kings +men tioning +vo or +ident ities +accor d +mc gu +l pga +rum our +massi vely +m pls +heal y +d ate +sp oli +re visited +on t +al and +scru tiny +lakel and +bl ending +< / +an kara +jami edor +metab olic +f ences +ann y +å ħ +semic on +oo tt +space ship +wack y +le ta +ap ac +she e +in herit +do res +ðŁĩ¨ðŁĩ ¦ +gent e +tw ick +ri ms +gal ve +de ville +king fisher +scorpi o +ow l +al ar +vari an +ðŁĹ ĵ +vene tian +star dust +then orth +q ing +har rington +consul ate +spectac le +ho bbs +tur ks +gre er +mat ing +ðŁİ Ģ +ðŁĮ Ģ +direc ts +í ĭ +pompe o +vo iced +la os +tz u +pro me +pri sm +mer c +fortun ately +bc fc +mcdon nell +not sorry +smi led +t ba +for war +mid term +dar by +we instein +up grading +wol ff +bron co +cab ello +ðŁ¥ ĩ +fi able +shar pe +bat tered +sat o +myth ical +instap ic +pre pped +eni um +e spo +di aper +explan ations +who pping +ragn ar +pe el +antibio tic +l acks +harri son +li sm +au l +qu ail +martin a +sent encing +sc ams +di di +tr onics +ãħł ãħł +go ff +za in +param ore +cha ined +clin ton +li ff +cott ages +em on +reve rend +consu mer +ce an +t any +lum pur +e bay +sto ol +ðŁĺ» ðŁĺ» +ta pro +h ath +modern art +just ine +prover b +app y +tra x +mani fest +am bu +nai k +pe pp +r sd +mer chants +kitch ener +shi fted +li zz +âĺħâĺħ âĺħâĺħ +âĢĶâĢĶâĢĶâĢĶ âĢĶâĢĶâĢĶâĢĶ +uto pia +tom o +ou ted +com ers +chiroprac tic +book club +cin dy +pro hibition +se uss +ë¯ ¼ +thin kin +rr rr +go fund +t ack +om b +catastro phic +ling u +guild ford +bo td +ॠĭ +plan ter +^ ^ +win k +kath mandu +sto ppers +smooth ies +re efs +hin d +bell amy +Ħ ë +waste water +vo or +nat l +! ] +re el +y ap +scoo by +work space +corin thians +bl un +obli gation +g bbo +dy son +cra vings +ell ington +dap l +wre xham +earthand clouds +uk runchat +positi oned +kal b +four square +jo ck +im pending +even ing +ath y +pro claimed +c ites +ann apolis +san i +mar th +ir l +accom mo +ka a +fin a +y aa +di sper +ec ar +bha k +will y +ðŁĺĢ ðŁĺĢ +mcder mott +mo j +gener ational +u said +train ing +lon ely +lo res +impe cc +âĢ IJ +beav ers +ma ki +he b +aap l +å ı +wolver hampton +leader board +me u +c fa +easter n +hu r +civil war +ou rage +hor ned +le high +awar ds +evi dent +gi gab +r ous +ma del +ro byn +ur gently +k ors +en as +heis man +bam bam +fab ian +f om +evalu ating +assemb ly +out sourcing +hun tsville +ðŁĶ ª +justi fied +cashi er +sp aper +buc keye +analy tical +illumin ati +au tho +o j +sha de +geel ong +wh ey +he aton +terri bly +ele k +un charted +sd live +moto cross +her mes +dar shan +dar lington +cash mere +gri pping +cilan tro +pun ish +... : +ðŁĴ Ħ +inst ance +der i +lo bal +muk her +sp ar +thin ker +fre mont +com piled +color ado +vig ne +sm d +whe ad +villa ge +le ek +formula e +ta res +persist ence +?? ???? +ped ago +he z +alzheim ers +vul ture +off ence +is great +suff ra +kick in +h mmmm +broad way +ï¸ı @ +art i +alli son +endor ses +ry u +lolli pop +soy bean +kend all +cer a +inv ade +( ðŁĵ·: +conver ter +car pets +ho bo +fr it +pe ac +es qu +ern an +ou f +an il +di ffer +ch ing +bre cht +sp g +daven port +stra va +sever n +n gos +stor ians +fe te +parame dic +j hb +al amo +sne aking +gold coast +roof s +isi l +depic ted +projec tions +nu mb +o ss +ep i +glu cose +zid ane +infin iti +íĺ Ħ +ran som +ton ics +fal k +g ler +ou tw +re ss +week ly +the on +n ole +ðŁĩªðŁĩ º +vol ley +sum mar +neg ativity +sam son +ye w +aus votes +ju l +ju dy +f art +pra yed +pal ate +multicul tural +double header +cycl ones +pier re +ãģ ¨ +âĺ łï¸ı +rt w +conver ting +wir ral +l ari +ir relevant +austin mahone +an che +ya an +sd f +$ . +explo ding +ulti mate +prof ici +gofund me +cell ence +ep stein +bul lied +sep tic +à® ¤ +lu mber +cu ff +vsco cam +pl or +ภ¥ +se ok +ro to +venezu elan +sor ta +spir ited +daniel padilla +team sisd +radio active +icelan dic +ðŁĴ ¤ +ver e +accommo date +shi pp +ot ter +ol ina +e go +su la +san antonio +de as +simil arities +âļ ¾ +y om +bro ward +å ° +can cun +veri fy +on te +candle light +ìł ķ +inf ants +az am +ðŁĺ ° +le ven +un stable +bloom ington +x ford +con tour +y p +innov ator +histor ies +po y +lolo lol +ex pires +cat alo +bill boards +an ab +el ic +novasco tia +fa ire +ìĿ ´ +rock well +gr ille +az tec +joh or +ur struly +fi ren +dun lop +id le +port man +jo es +tx hsfb +hol m +cham ele +under world +lo ss +ti em +therap ists +past ure +pa ste +ing now +vul can +ra gon +lar kin +o shi +ho co +child hood +umb rel +success or +kath y +iz en +° ï¸ı +share holders +ol ga +ai b +he ap +fl aming +ro u +air tel +rat t +z ane +vo w +thor ough +sn ag +par th +un conscious +ve y +new release +gh ee +croati an +facilit ating +swan son +astor ia +to logy +master y +ðŁ¤ ij +bil bao +trou pe +the ori +chey enne +ro tt +shore line +gra sso +master chef ++ ) +vi x +ellen show +as g +an ak +ku ya +safar ilive +debu ting +blu m +list ener +v ins +book shelf +smart cities +makeyourown lane +; ; +ðŁIJ ¯ +ri zz +on ward +bull dog +bear ish +vir uses +fri gh +lin den +we iser +sn t +gon a +dre sden +fl anders +cu k +wheel ing +ba u +atu esday +surf ers +swi ft +mc call +arbitr ation +aw d +mon c +b ine +at x +re fr +mi ro +po sey +n are +rit ter +âģ ¦ +play book +blow out +sports manship +s oooooo +malay alam +gri ms +bur bank +infin ity +sar gent +oit nb +joseph ine +ski pping +par kin +excur sion +semin ars +jo har +par tridge +post game +ll ll +blan che +temp ting +m na +lu ka +is ers +to ffee +bar ron +he mmings +sa e +go hawks +cu pid +li mbs +con se +un common +z ada +head shot +so ils +pione er +mam ma +sem itic +pan dey +jamiedor nan +spl its +vel a +son i +ra ff +t mobile +âŀ ĸ +pra wns +lit er +enjo yment +egg plant +tu b +cultur al +us ic +suspici on +sy cam +summ ed +ma du +ho ck +up wards +eye ing +ri ve +assas sins +âĤ ¬ +out fy +chi ves +t ner +la is +por ridge +sad dest +w cc +vick i +sna ils +biz italk +mill an +ðŁĮ į +sam oa +j ing +mi key +gu j +chel ms +eli gibility +arma da +thro p +surger ies +ãĤ ¿ +mo hawk +ex its +me m +is lington +c me +land fill +kait lyn +ðŁİ ¼ +combin ations +tomorrow land +ver b +cor a +pre cisely +na om +ðŁĨ ķ +shr ink +sof tly +merce de +mand el +poo dle +ball erina +sop h +jux ta +y at +ary an +hesit ate +lo wered +gu lar +dungeon sand +ron an +my ri +sp f +men opau +gra sp +pa thi +fe asi +fla w +shi story +ste ward +gg le +fay re +cli que +credi bility +yo g +sec tion +mu sko +se ville +no tt +cal m +mate o +indic ted +fi ba +by l +lin o +u kin +!! # +enig ma +siri us +bu sc +ðŁį Ĭ +mac kerel +psal ms +a at +tomorrow spaper +ðŁĺ ĸ +p fc +........ ... +shre k +mul let +o sh +danger ously +immen sely +am ur +ðŁį Ĥ +pro por +sy a +london marathon +abo ve +obli gatory +pro v +ra cha +alex is +pri mary +sh h +ether net +d stv +cou gar +un lucky +ni l +steak house +mel a +fc bayern +cause way +ca therine +fluore scent +nx t +to kyo +au sp +releg ation +qui zz +shored itch +proud tobe +promo s +inter acting +home brew +da esh +w pg +stead ily +provin ces +bal lots +i ah +al to +< << +you u +ri ley +prefe rence +tra verse +incen se +am munition +ho dges +# @ +hail state +tart an +witch craft +vent ilation +liber tarian +! âĢ¦ +ow es +% ! +ong chang +bru shing +le ic +fi ber +under attack +down load +ex pir +hy o +pompe y +mc bride +y ag +stre e +com bat +ten ding +ai ra +gug gen +ab ra +in na +fli ps +aw al +m ach +dol lar +inspir ations +z um +o du +it ty +video game +aqu aman +har u +bel fast +je b +but ch +us gs +calcu lus +go yal +mor gen +x finity +stand up +contrac ep +sab re +na be +in secure +gener ously +epit ome +l w +t ca +narr atives +don nell +pand as +ber gh +tu t +ker al +fel icity +br ampton +quinte t +nom ore +ðŁĶ ij +lo i +alham dulil +ðŁĶ¥ ðŁĶĹ +ston er +shaw l +clin ical +bren dan +gon e +fla wed +tri ppy +j g +al location +po aching +ve vo +mo cks +lef tist +bon uses +condem ned +abil ity +st ating +microbi ome +bio logist +for you +wahl berg +ss or +ift ar +w ul +ÑĦ оÑĤ +pom er +me me +ver te +tre ll +tra it +in let +hormon es +deliber ately +vill ar +battle ship +p bl +tw enti +ho kies +dal ail +say a +may fair +han s +die ts +⾨ ⾨ +od in +hot spur +pap i +k ana +k amp +fin na +flo tus +ti ans +unic orns +tribe ca +chang ers +fore ground +out a +inv aders +gett ys +tomorrowspaper stoday +mac millan +hand written +w fp +u de +state of +base d +âĺģ ï¸ı +cas m +psy ched +histor ians +fol d +d da +ag grav +p ans +green way +au sv +ðŁĺ ¶ +shradd ha +inde x +be sti +zim mer +t ness +eye shadow +ot te +go ts +distribu ting +pro min +yo l +ace a +tram rahim +hoo per +supre me +jam min +intu itive +quali fications +sli m +sid di +jay ne +tri pping +g tx +pun s +e manuel +om g +mid summer +in to +succul ent +ri en +new mexico +o or +hoo king +in f +ðŁ¤ Ŀ +flir ting +na hi +g friend +t ps +hel ix +z s +on ie +ct f +kri s +irresi stible +fla p +ðŁijıðŁı» ðŁijıðŁı» +us wnt +ru d +ram ps +pin oy +ot w +lol z +low ering +favor ite +t mc +phra ses +her mi +aver aging +em br +ben o +estu ary +sle eve +ribb ons +ta sh +ภ¹ +x f +aw gs +sun ited +brew eries +anir ud +pun ches +ol die +ip ads +wi fey +land lords +d ji +gun ner +íķ ´ +tex an +ex op +cas sandra +s off +ðŁļ « +igh ton +bak ers +awareness week +v all +ear p +bts bbmas +apologi zes +âļĵ ï¸ı +was ps +states man +snat ch +watch dog +ra fi +after party +spi ke +j er +peri ph +r nc +mu ll +le en +shi es +li eu +urstruly mahesh +mer ton +de sai +shi f +ðŁĮ ± +pe dic +gos ling +arrang ing +ww g +gen y +you uu +netfli x +e ttes +k wi +bernar dino +am iga +Ø ¨ +kashmir i +t ings +emer itus +de cat +ab domin +dc i +pha ses +d jan +be am +op ry +i shed +the ellenshow +the st +habit ats +to ons +mclau ghlin +ri pper +micro biology +tal aga +clu eless +ss u +cro che +bro mance +longe vity +zagre b +prev ented +tra ve +spo ilt +darry l +migra ine +al cat +dd dd +vi v +ser pent +mat tel +jam a +con quest +î Ħ +sam sung +presbyter ian +ket ch +fire fox +mo tif +le c +cho pping +cher no +j ann +ðŁIJ ° +pro lon +wake up +conver gence +mersey side +heart broken +lo oming +hal lucin +mai ze +commun ism +mo h +twitter storians +serge y +res eller +favor able +ed gy +re iter +mal aga +live me +ka hn +pul sion +big g +kim kardashian +ati o +tyr anny +ru ption +q ant +pro ven +by z +pu shaw +kri stin +e er +tar dis +ri z +awak en +mi ko +un documented +path finder +indirec t +resemb les +h ler +conce aled +scand al +re im +d nb +cr itters +attend ant +apprentice ships +aa u +scre amed +l su +fa h +har bour +ed d +bat sman +li ss +mi sha +spani el +it f +advan cement +fa c +close up +cecil ia +medi c +narcis si +lav ish +gi ac +ma ys +le it +wine wednesday +pushaw ard +let to +curren ts +bug atti +out ine +w j +un do +ler osis +devo tional +ðŁij « +on na +fais al +sa una +himach al +am ii +à® ® +di zzy +screen writing +ph x +sp n +ick i +ag irl +fi shes +wb z +pi m +bo ar +ac id +! .. +rocke feller +n ga +dra stically +simpli fy +dru mming +autum nal +gur mee +lor de +jo ann +give up +b our +am ura +der land +sim pler +wat son +tri dent +concor dia +bel lum +bre k +dum plings +vi on +dungeonsand dragons +sp ri +ascen sion +wil datlantic +u st +rob ins +legi on +insi st +jar o +gue ss +so b +bigh it +pool side +negoti ating +mc gill +bil d +techn icians +miti gation +ajay devgn +b to +ant en +cosmo politan +ðŁĺĬðŁĺĬ ðŁĺĬðŁĺĬ +patri oti +temp er +promen ade +nav ajo +nam m +wrink les +dc fc +le ach +bru nette +r f +cout inho +al ti +tradition ally +op tome +na z +accord ingly +rec ard +de ets +sw ell +po sure +whit ening +strang er +illi on +here ford +u wu +ro bber +cotsw olds +cl en +gor ge +nam aste +re lish +gri ff +adren aline +bla sio +val e +ê ² +toler ate +rail minindia +jen sen +ho ven +el lu +ob sole +eisen hower +unidenti fied +than niversary +body guard +Ø ¯ +i dge +sch al +stock port +sn i +re taining +po po +pix ie +oli thic +ki er +ha jj +sa z +cor bin +!!!! !!!!!! +v it +me gat +de h +circu it +af fleck +theore tical +hope less +u ab +slu mp +b ice +jam med +let stalk +can i +side ways +labyrin th +re fs +ha hn +jare d +ðŁį ¹ +jam bo +ph yl +enhan cement +c tr +ful lest +se ye +do ba +cho ic +yo s +cb j +andr é +re watch +pri ma +doctr ine +for gets +u hm +ar ound +u le +art lovers +shi raz +har th +ex tor +Å ¡ +unexpec tedly +eli us +y x +em my +se ac +ðŁijĩðŁijĩ ðŁijĩ +correc ted +com bu +wom anc +cou gh +what son +publi shes +divers ity +back bone +lock down +mesmeri zing +nor te +ma b +desig ner +í ģ +ra gh +mole cules +get outside +the beatles +semicon duc +nach o +lun es +ham mers +sul tan +o on +fe ren +att ach +ar qu +uttarak hand +s ash +; - +tre ad +i ko +ar thur +scandin avian +r ation +ga el +charge able +fish y +v ma +hand bags +char a +ay ne +de fam +sett lers +qad ri +pal ais +in wx +apocaly ptic +poo ja +a es +at ories +proof ing +n lp +ts la +v ina +li do +dee phouse +informat ics +v v +pp ings +di ss +à ¯ +uhur u +st ony +betra yed +b aff +my ra +as pen +allow ance +tam ara +ci f +cor bett +ser ge +di go +ambi gu +pain ters +p cr +p ca +nom s +lo ft +ve e +opend ata +ðŁIJ ± +alex andre +identi fies +fantasy football +re production +brom ley +ware agle +mm er +p ss +cu es +ay at +hut chinson +sar ac +jack man +ira h +ap ink +col s +aussi es +ex ecs +day ton +ðŁĻ Ĩ +im v +har am +chuck le +authent icity +ar do +incub ator +ภª +photo shopped +embrac ed +fight for +gor man +zz zz +schol astic +cri sps +te apo +mid night +ga ine +col lier +s ate +de tte +å Ń +imag ine +i ff +tw ili +i fication +teat ro +nor ma +es ur +emergen cies +rise up +r inger +hass le +cait lyn +tranqu il +vers a +se b +over look +gin i +bo go +se re +may ne +henri k +contamin ated +rhapso dy +pro portion +wildatlantic way +âģ© . +organis ers +tran e +stand ard +sper m +laun cher +ric ci +her ts +paper work +showcas ed +mer yl +pen a +p imp +disa strous +^. ^ +phar a +x is +fron tal +sw irl +sp ills +swag ger +smart watch +sizz ling +savi our +cat ar +bb cr +refurbi shment +dr is +citro en +absor b +patrioti sm +il leg +chro mo +fresh ers +ru s +lim iting +ef ish +down ed +man dir +hazel nut +p all +mac on +disappear ing +quali fies +bo on +bar racks +am ine +gen dere +ðŁļ ĺ +j es +ãĥ Ń +qu ito +middle weight +sch au +quad ru +aci ones +limit less +ðŁijĮ ðŁı½ +ch man +ar av +regulat ors +it up +batter sea +mil ford +g z +tic king +gh ou +cru shes +tu tu +dread ful +fam ine +for change +dalail ama +ðŁĴ į +whit aker +hash mi +h us +vo d +bet te +aa ah +iso o +ðŁ¥ Ī +ha ar +la ine +b v +all day +spr out +indie games +free bie +gree ks +but ler +ill in +ha al +ware ness +si ma +public health +gam a +wa a +oun g +goo oo +okin awa +off enders +im pose +ho c +young ster +story teller +sc ap +figh ter ++ , +whit es +music monday +re za +go ducks +bri a +mi um +cas per +cru mbs +a ad +marti alarts +ch p +ri gged +tn g +harve sted +sa k +do jo +mill wall +b nw +oc d +histor yof +t mr +si rens +fan ci +caregi vers +vir a +son i +recur ring +acknowle dged +ðŁı Ł +oph ile +bu cky +stre ssing +roo k +di gger +vi val +san do +fle et +si ers +sel caday +refre shed +anti fa +a que +po lo +disappear ance +de mb +âĮļ ï¸ı +ren ted +ber ger +g mb +cu la +ss al +goo dy +u hh +marcel o +w anna +soft ware +shop small +turt le +tom as +fri sco +ðŁĺį ðŁĴķ +jim enez +c su +day z +an do +wyn ne +choreo grapher +cerv ical +trail blazers +ed g +zend aya +travel blog +el s +whole some +co g +lab out +ar ney +del le +su isse +ma si +ine se +om be +fi ddle +re claim +pa u +wat cher +sla in +ber ty +opti mum +el ites +min is +tur key +patro ls +ger ard +au reli +wild ly +wal tz +br gy +w ob +cre st ++ ++ +ve z +fro sted +davi do +the x +param edics +p into +han k +du pont +ur g +fo stering +micro poetry +spec tre +---- > +ne uro +fri da +music al +galve ston +e ffic +sc ape +pal azzo +th all +pro visional +p js +au re +ðŁĶ ľ +mam amoo +kit ties +cre e +wa k +lo ool +lu pus +cn blue +à º +ðŁİ ¬ +rac ed +tro se +om as +stri de +co ors +⤠µï¸ı +in comparable +cy ril +broad er +arec lipse +ðŁį Ķ +inter val +ti ru +co working +w aco +a ham +a bee +flouri sh +the times +ol ini +kick boxing +lu cer +at la +as un +casser ole +mi aw +lobb ying +jan ice +cir que +re flex +le ary +sanat omy +tem pest +se mb +mur dering +us av +ro bo +on et +p cc +nati ves +life of +sa ha +ruth less +rel ates +appeti zer +pye ongchang +nor d +er u +a thing +ug ly +pl ying +bran ce +organ ise +kend ra +dat o +chees es +par ma +burn out +a stra +pre toria +adjust ment +uk u +sl o +li ken +fav ors +cli ve +be ets +snow donia +go tv +sy n +open house +pan i +portra yed +sl ated +me cca +ren al +supportsmall streamers +staf fs +da o +bi ker +vik tor +tit us +admi red +ðŁĵ ± +hurric an +he ats +gl ory +photo genic +mer i +de por +burn ham +or angu +dj ing +impre ssionism +ign ition +ca i +w ynn +de pe +cove ted +colla gen +sau s +or nam +administr ators +ss on +nh politics +hahahaha hahahaha +aspir ations +r gb +swol len +so we +sc r +diver gent +hou ghton +han oi +d ory +ni ki +land ry +b cci +ðŁijĮ ðŁijĮ +is mail +tri pod +her d +bhat t +dress age +tab by +ingu ish +hur on +à³ į +à ł +to das +evangel ical +chor ds +st john +slo ppy +marty r +face book +ali ght +sen sei +kath niel +r ites +zi one +u o +revel ations +weight lifting +pan o +nc wx +ac ton +à® ķ +Ø ² +som a +à¸ Ĺ +respec ting +mar che +fore man +be tty +ki k +shi bu +po on +argy le +k swx +et z +mar bella +brac kets +stand by +fire side +defi ance +v ex +britanni a +in habit +appo int +piyu sh +le ash +sci ento +fla sk +sen na +> : +at roc +sand erson +id lib +dhan ush +ðŁĺ Ļ +en thr +hit ch +de dly +al ley +dor k +mon do +cudd ly +mis sin +ye sss +night ing +j pn +w ary +ump ire +ma z +ê ³ +bab s +ĭ ãģ +stan ford +posse ssed +exce eded +ðŁĶ ¶ +wall art +tra p +j il +hi bis +sp ying +scri be +khali l +trans lator +lu mb +di zed +ch c +super vision +shut ter +ja g +_ * +yester days +ms f +hi hi +gonz aga +gille spie +vive k +ec static +this morning +ch us +ed es +ston ed +be es +ðŁĩ¹ ðŁĩ +tur in +ho ver +at rics +ster n +sam heughan +auti sm +mi ya +eye witness +writ ings +travel tips +chut ney +px rtg +keny ans +my stic +k rit +/ $ +red head +world ly +am us +op la +le ve +gab bana +se en +o clock +gang a +keen an +sc ent +ol dies +go green +corner stone +comp ly +con cours +ðŁİ¶ ðŁİ¶ +ha an +con fis +aw son +cle op +î Ģ +su zu +sau té +al gar +subscri ber +este emed +ãĤ¤ ãĥ +worth while +mel rose +flo ck +bri ghtly +viol inist +p ere +sli pping +and co +si gh +ha van +cu lo +m sa +fibro sis +matil da +ra fting +aw ard +ë ª +mm mm +ge aux +ste iner +sin n +help ers +beet les +ai mee +tai wan +pistachi o +mac beth +m zan +descend ants +on sale +in r +il m +grou se +sa ig +mo w +bi gre +adjust ments +tu la +mathe w +transl ates +mu h +bol lah +ðŁĴĽ ðŁĴĻ +amo res +ab outs +bomb shell +bla ster +x avi +s ns +k roger +ga ther +erad ic +daf t +chem o +ben ches +ðŁĩ© ðŁĩ +ut v +our a +n ko +gator ade +biaf ra +ok state +im danielpadilla +dom ains +open ingday +kid do +do i +ric e +day care +mac millan +ba thurst +cheer leading +ðŁ¦ ģ +cash back +k won +hob bies +exem pl +ries ling +âļ ª +ag les +ny s +every thing +nav is +ad di +magne sium +faceli ft +ark ham +grand es +extre mist +don at +vit ality +pump kin +be tta +sl td +arti san +li by +pe aked +ah hhhh +mary am +assi m +un sc +ment e +al aya +low ers +ar as +gri ev +le ip +gr ati +cri ses +spr ints +exe cute +w to +ms d +mag ical +re viewer +spark les +juke box +ðŁĺĤ âĿ¤ï¸ı +pay back +licen ses +dun kin +bel t +lake wood +h ateful +bud gets +rev amped +ph erson +ky iv +went worth +ro sen +cru ise +gi ggle +def star +assassin scre +ym outh +win kle +w fc +band wagon +b kk +w iring +kear ney +south side +pe tit +! ðŁĺį +nor dic +mir za +mu gabe +v l +scon es +k tv +sand al +du c +m alls +ðŁĴŀ ðŁĴŀ +it c +al ay +im pair +un rest +flo ss +c é +ab ou +var ying +muse o +ser ver +di ya +hibis cus +ero y +mer ritt +fin dom +f pp +un usually +go tt +conting ent +ali aa +ball on +jo l +hi ked +zy me +ay r +ag n +ga z +perio dic +spar ty +practi sing +lin ton +tal is +cy pri +womanin biz +radio disney +ðŁĮ ¼ +jump ers +endo cr +ðŁļ¨ ðŁļ¨ +and on +shar apo +mi er +ma sonic +fac tories +vi en +bb ers +ìĽ IJ +hol d +ke bab +be ak +approach ed +ac milan +mun ro +ko sher +excell ency +negoti ation +walt disneyworld +cr ouch +te asing +suppre ssion +en ya +b ce +transformation tuesday +cal lie +vis was +p gat +ic ted +end ings +esc u +recru ited +it fc +collabor ations +g ino +snu ck +ausch witz +i fc +x ii +ke sha +ger vais +clo ak +x l +sa ad +prob ation +pre cau +mac in +anasta si +le k +e azy +daysof code +mariah carey +yo g +stit ched +boy friends +sh ar +ph ile +ag u +twin kle +phi shing +week ender +ic ton +gurmee tramrahim +al ton +l eness +all an +pen ultimate +kry stal +go u +lan de +dis mant +ab using +nor se +pat erson +ed mun +ap an +xi umin +sk el +cat walk +re act +wal led +t angle +br yn +ve to +super moon +cas ablanc +appreci ates +ski d +bo th +catal ina +ele ague +cyber monday +cau tious +ðŁ¤ ĵ +nov o +hamp ton +ha ye +jose f +var an +lo bos +roano ke +orph ans +tt in +squ ads +ishqba aaz +black panther +e tu +k sh +cru mble +cess na +reli eved +scul ly +pollin ators +explore canada +ki es +kam loops +kir an +pri mal +sett lements +hot spot +brain storming +ce dric +bi ennial +sh ant +âĻ¡âĻ¡ âĻ¡ +do on +hear n +walk way +fe m +ve al +deport ation +tox ins +elimin ating +descen ding +by the +bla sphe +ha sta +comple ment +as cent +ri ga +provo st +âĸ ª +wee ping +anti semitism +employe e +unearth ed +pin o +natali e +bla d +ang ola +lock heed +in ian +ag r +ni ster +im pala +m ke +fan atic +âĺħ âĺħ +ðŁij ¸ +lu ch +simpli fied +gall ery +econom ic +cy borg +con i +sel ma +in ception +ko ala +dv ds +cre sted +m mor +visi ble +n sd +ðŁĻĮ ðŁı½ +w under +refriger ator +re opening +e era +carou sel +as p +balli stic +victor y +mo tive +tre y +sharapo va +si i +mon ter +int end +west chester +sp e +cy mb +vi dal +ll ama +uni v +fin er +crafts manship +jazz fest +b ch +ag gio +n cc +lamb da +tranqu ility +cis co +ba den +so bbing +of i +go ta +ru mored +war med +ore an +ac ton +mar ci +gh ani +âľ ĵ +as sorted +pembro ke +pen elope +da f +at ty +aim o +pretz el +carni val +than os +ko chi +mer sal +ham radio +ar twit +cas c +guer rilla +kush ner +k app +al ise +todd lers +steward ship +o tti +ter ri +tem pe +rest less +vit o +zay ed +rsp b +pi on +hi ppo +haw thorne +in as +am ily +nut cracker +lo p +d ali +tro pic +ðŁ¤ ł +ul o +jare dle +py rene +pale o +usa ir +m ould +it ated +gene tically +biom ass +ðŁĩ³ðŁĩ ± +do dd +practic ed +monarch s +un manned +m buhari +am al +photo gra +ko ol +bren don +ju ices +cu re +world bank +poin ters +ðŁĴ Ŀ +tur f +le ds +bor ussia +bapti sm +warwick shire +moun ts +gay o +be gg +co pied +asi ans +k g +moder nist +gi d +front man +concentr ated +y t +sc avenger +iron ically +adi c +ps n +ðŁ¥ ī +cultur ally +yu v +mac arthur +fertili zer +be withyou +ri gor +min ors +z oning +âĸ ł +ri r +adole scent +vin ny +ren g +sand stone +gu et +we sth +ple dged +lac ed +sp ide +v ai +ty coon +seiz ure +du p +appalach ian +ro k +cathol ics +sey chel +posse ss +la ger +jo di +cham p +stra s +d ina +cent uri +cal der +blur ay +ðŁĩ¨ðŁĩ ³ +mo do +an nette +youtu bers +chap s +ang ling +label ing +a qui +pk wy +ly le +bi sexual +lit ur +dug out +li bby +grey sanatomy +sub stances +august us +rall ying +fi del +ing ue +äº º +hallmark channel +tooth brush +m á +adi rond +ag gi +ðŁĵį : +cru sade +tax ation +k z +i ver +dou bling +room ie +wa b +en rolled +az on +a ju +grand children +as df +ðŁ¥ º +mat ic +ough ton +utili ze +ðŁĴ £ +pon der +rais in +dys function +co bain +butter nut +e man +su red +dri an +and friends +with the +on omy +heine ken +bri dal +leader ship +pyram ids +deutsch land +jo cel +bo wel +y qr +horse power +be acon +ing eni +gra dient +fer mented +mo om +thing y +pot assi +wrist band +bor d +bo died +ðŁĺŃ ðŁĺį +ma pp +ka u +cyber punk +ph ish +loo king +co ates +ap ur +am ie +uk labour +at in +g la +adop table +shel by +v illi +ri ya +m ingly +cli mber +bumble bee +ðŁĺ ¸ +c sd +âĿ ¥ +hospit alized +c ki +hat er +ch r +re tina +it a +fan base +beat rice +gwy ne +go ss +fo s +favor ited +swachhb harat +mal ade +mon mouth +" [ +si van +sh hh +command ing +sains burys +wee d +g man +ss w +rep tile +iv y +tro pics +roll ers +over cast +ex position +masquer ade +man crush +wa ist +spr inter +sle et +le vin +j pg +_ ( +o pel +explo it +ap a +po we +wrec king +jong in +or b +er ick +bo sco +pra ising +ber tr +to wing +in security +ku t +resto cked +rr p +prescri bed +trafal gar +per t +g ases +app rais +g har +music als +âĸ¬ âĸ¬ +mc fad +ag ony +conditi on +equi p +shi k +atra vel +ðŁĩ¿ ðŁĩ¦ +ke h +abduc tion +pe oria +wil kins +g ms +as d +ev i +ðŁĴĹ ðŁĴĹðŁĴĹ +u z +mo c +halle lujah +guad alu +lou vre +dra wing +go ve +ph ant +fri e +web dev +program mer +z able +games com +clari fy +li th +kin ky +âĿ £ +labour doorstep +son ata +ju ris +mai den +vi adu +buch arest +conditi oned +capit alist +u de +ps b +sp ca +lul la +footh ills +kay o +bon d +wom b +roun der +ce sar +bur sts +ap ra +sw oon +sab rin +fra grant +cle arer +ku brick +cli max +jour no +ag le +ðŁı½ âĢįâĻĢï¸ı +poo ch +hal e +sol it +sal mon +organis ms +bron son +art en +hodg son +alo ve +vent ure +bb i +ae a +ðŁIJ ¢ +ld n +d nr +o zone +el las +man ny +azz ur +un beat +tru ffles +th ong +ma ñ +las ers +ley e +gettys burg +back packs +or is +ma ison +craw ling +la bra +cl ing +dra gging +ste al +dou bt +de van +ck ers +agent sof +photo bomb +elon musk +abo y +dist ances +story line +sp i +nor than +europe ans +wh ale +ser pent +ðŁļ ² +fi or +tr it +ox o +awar ding +class mate +su fc +smar test +rich es +pr k +big foot +ar mb +bi polar +dw elling +om ars +k wan +gri me +m eng +freder ick +navar ro +sorry notsorry +jaredle to +pa ve +sl ack +barn sley +att ar +evic tion +accumul ation +o ir +cat chy +wel ter +vik as +has see +nik ita +mo yes +mathe ws +shi v +gat wick +pro filing +compan ions +mar rake +an tics +ðŁĻĮðŁĻĮ ðŁĻĮ +se se +bo i +bart lett +poison ous +ab uses +ym m +kam pala +guggen heim +imv kohli +dol om +bre e +thro ttle +gare th +fitz patrick +un ya +par ad +mar got +j nr +we a +potassi um +p nc +disgu ised +cra sh +ren ergy +ill ic +coup led +ni els +ci ones +æĹ ¥ +im ent +despic able +d ye +what cha +conne ctions +paralym pics +gaunt let +wait rose +suici dal +star ship +vap or +st ou +law maker +coo led +si mo +then o +offro ad +ja den +bas que +vick y +lu kaku +centr o +tri sh +strate gist +medic ations +hor st +b fc +gra il +sharp ly +ad itya +tom b +kau fman +tri pad +sam ba +pastor al +brit ney +sag an +hill side +mas ons +sar a +z one +x u +to tes +rob bie +app en +mon tag +der o +short film +charis matic +tat ors +ki ba +and ri +al arming +split ting +ic ar +th ug +scari est +sylve ster +an an +u trecht +a difference +me ade +bu ster +air strikes +cu ffs +account ants +ðŁĺ¡ ðŁĺ¡ +new t +bo tt +issu ing +cl ancy +wwen etwork +kyu hyun +rese mble +pajam as +sin k +kin ney +sul ph +or k +li es +la gh +or ton +ra hul +d sc +we will +re am +collo qui +shar ia +hec tic +sar casm +land er +tm z +endor f +ro z +ham mered +fri s +w adi +pope francis +he it +flash light +un born +op es +hol iness +ðŁIJ ¦ +nach t +im sa +gr acing +bj p +ver ts +c sc +home owner +a que +bigo try +anni e +bag h +âĿ¤ï¸ı ðŁĺį +car i +thom p +dispo sable +cardio logy +pat ented +hh hhhh +ld r +stephen son +cro res +fan ning +cli mat +ðŁijį ðŁijįðŁijį +ðŁijį ðŁı¼ +aer on +piccad illy +bank rupt +sil via +emplo y +don ny +commen ting +screen writer +io ta +ce an +anc ers +tu an +street wear +ठ¯ +sk ine +esp a +asi f +os ce +she ppard +more cam +bott le +der s +orac le +google play +aver aged +edmon ton +steph an +sister hood +cru sted +stag gering +methodo logy +congress woman +c abo +tri ggers +mil ky +gli de +tooth paste +room mates +nu ff +gu am +sprink les +alternati ve +wat fordfc +uof t +hal ey +cont acted +bun dy +pro stitu +gh ar +pre ston +on site +hil ar +g ts +c att +hamp stead +? ?! +ðŁĩ§ ðŁĩ +bbc qt +aless andro +resi st +ma idan +t ko +shad ing +pin up +gal lo +sin u +at ec +fun k +ac lu +stri des +rhy me +wet land +bbc springwatch +t ins +wild card +st our +flamen co +pau la +onto logy +gang sta +am ade +ãĤ « +t bs +skelet al +run ner +jard in +harri er +hun ted +z hen +believein film +de mean +au diti +re start +chon dri +âĿ¤ï¸ı ðŁĴĻ +mcla ren +ga b +sh um +au sa +lewi sham +y pg +k jv +fur nished +dor o +bon ded +mor ty +lat itude +_ ) +lo va +water ways +vin ai +shor th +drun k +c ay +ay ana +kap lan +capp uccino +spr o +life boat +has bro +spol ice +tor on +do ing +dam n +sh ree +foun tains +ent ation +mar u +boar der +to pless +j ada +chan ning +ul ls +en closure +gib son +fractu red +brit ton +à ¶ +t ous +por th +dra f +tra iling +mar gate +eli fe +down ward +lin n +gla des +girl power +ak rish +u ki +ron da +ts c +appreci ationday +vis ing +lo om +ðŁį ³ +mex ican +ar gos +y ya +jad ine +south port +d end +si sta +rede em +men g +bra xton +antioxid ant +s key +mp g +fin ding +vibr ation +ce u +kh art +di mini +cl ine +shel ly +hin es +ī ï¸ı +to pical +no ver +ma xx +prim itive +illustr ate +b ounds +tren ton +join tly +breed ers +u chi +wakeup america +b ada +ðŁĹ £ï¸ı +gu acam +sp heres +pere gr +youth ful +lo lo +bir min +t ly +jeremy corbyn +defe cts +co sm +a rent +v aa +bag els +medi ac +cori ander +ic ago +g haz +ab bas +re model +struc turing +pu m +out law +ad ani +r bc +gul ls +n li +confu se +ðŁijĩ ðŁı¼ +vil a +mcnam ara +correc tions +mug hal +ser i +re gain +ss b +lea ve +haha hah +gran de +di stressed +re chargeable +ho a +hou sed +sti l +attribu ted +opath ic +di ps +pri t +head phone +conclu de +pil o +he t +ut sa +nit in +je m +sni ppet +tutor ing +op er +sun k +en sla +cha u +ac orn +quinte ss +ran kin +affili ated +our lives +cl int +se ater +isa ac +ba shing +sme ar +nur se +doo dling +" ; +sa ku +atroc ities +im am +g fs +viol ating +comm end +brad shaw +er ville +b illed +b be +thul hu +i phones +moo se +di os +re w +me thane +strang ely +whis ky +ti ghtly +spiel berg +radi us +notic ing +wi f +ig nati +i fa +ap is +w ali +ha itian +bu shes +y z +v l +ex ited +asse l +tru ec +dom en +ash er +in king +newyear seve +hend ricks +bat i +ìĿ´ ì +rich ter +mon santo +con line +agre at +ðŁ¤ ¯ +master pieces +ar n +rough s +cle ve +se v +fashi ons +to ya +sh ail +cop eland +aqu ari +dec als +are you +y aya +a str +fon t +ml m +ar ca +pp or +pol lock +xper ia +conserv ation +chain saw +ag gie +?! ?!? +si le +sh on +ìĹ IJ +note books +marque tte +de us +bb led +spic er +mc cabe +nor wich +modi fication +boo sted +stru m +sales man +bang le +nis san +hez bollah +brea sts +a af +anth us +sk er +ow ed +her os +gi fs +fo sters +eat ers +du es +_ / +lymph oma +sf am +me gal +afri di +ag ic +p amp +jeal ousy +ðŁijĮ ðŁı¼ +calcul ate +napp ing +g ale +ðŁ¦ Ħ +lub bock +assu med +ren ting +íĥ ľ +subur b +ãĤ · +tech nic +u cla +in front +gar net +ster oids +stri ving +ho war +mo ver +le ton +bull do +is in +ci ao +sn z +fore front +d ams +mid wife +ma wards +cla pton +we in +subsi dies +spr oud +rother ham +phan tom +ar ach +spi el +rac ket +sel amat +no on +l bc +enti ally +ðŁĴ ¸ +sil ve +m oud +kine tic +y asi +ðŁİ © +o ol +mi ku +i za +fer a +flo ren +barber shop +groo t +z est +ne ars +stan is +z and +police man +juris dic +form ations +appar atus +sp d +arti fact +to sc +motiv ating +womanc rush +re dro +diagno stics +ra za +out fitters +el xn +dod gy +ry n +sh d +ortho don +ol de +jay anti +bal ances +quic kest +can ton +friday reads +! * +na a +a ak +ðŁĶ · +behavi ors +rasp berries +ä » +polit ical +cam il +å ľ +di k +ast ounding +lie be +novel ty +tur moil +sul ly +spring break +hon ouring +cc g +ðŁı Ĵ +my little +ky c +pro ms +ðŁķ Ĭ +à ¨ +bi ge +av ril +ðŁĩµðŁĩ ° +mari on +as ants +sur ya +oc tag +luf than +ac ron +fayette ville +ti que +love s +en ca +de kalb +ta ver +de vote +aux iliary +joh annes +tread mill +ay an +qu r +donald son +cher yl +" .... +s ven +kir sty +gun ners +ra dish +o ahu +v sky +i ble +con course +b ps +elo qu +ash ford +te bow +roblo x +ma da +dri ving +th day +spro ject +m ms +band ed +. !! +libr arians +flan nel +intoler ance +her al +ç µ +neme sis +list a +tar ak +cry pt +star plus +vish nu +sc ale +cr is +% ), +j illian +regg ae +pegas us +ol in +ip ment +man ic +l fc +godd ard +ite am +parl our +anch ors +lee minho +talla hassee +ant it +d ho +kid ney +y ash +batt led +az ad +gar is +faul kner +sni ff +papar azzi +ed m +phy llis +con tested +aa ay +se ca +k ton +vel ve +rain ier +for um +tam pab +ho sp +trac tors +ox fordshire +no tion +guang zhou +ðŁĺ ¯ +ref ill +wednesday motivation +sli der +mukher jee +pr att +fon taine +alph on +af ar +ts i +pest icides +fi ends +mo cking +bra w +tran sat +do ses +co res +hom ophobia +docu menting +zlat an +con doms +s é +sun set +kun st +ton ga +ภª +v ation +sp ray +chow der +ra ps +palla dium +nor wood +music history +hoo ker +si si +osp rey +ph ys +conce ded +bob cat +ar mad +ze it +Ù Ħ +ðŁĺģ ðŁĺģ +mer idi +ðŁĩ· ðŁĩº +corn wall +! ), +touch downs +ze it +chal et +mm m +al che +gor illa +fo ss +ati ku +lumin ous +ivan ka +be ek +sta res +sw iss +âĿ¤âĿ¤ âĿ¤âĿ¤ +scru bs +me ath +gusta v +jo gging +confe tti +as os +ers fc +breit bart +applic able +autho red +ya ho +h in +displac ement +j v +ðŁĮ¹ ðŁĮ¹ +ot c +non profits +diec ast +gu sto +inte stin +c ages +me en +lu kas +moon ey +ðŁĺ · +very day +tor ah +is sion +wa c +lever aging +ish able +cu se +le wood +may an +turn table +ju ice +tru sty +tu p +eti quette +supervis ors +stu n +gu zman +confe ren +ric o +fe ast +back ward +pol aris +mic he +jo g +h ing +field house +vel ing +sho cker +esc ence +ठ¾ +vi be +anasta sia +mar ched +kill ing +Ķ ë +fe tt +exop lan +... ( +snow day +lo h +ir ani +la khs +del a +po caly +boom ers +dictat orship +ac er +tur keys +quarter final +muskete ers +ðŁĴĽ ðŁĴļ +sf x +museum week +sc ala +ri sis +( ðŁĵ· +ãĢ Ĥ +z ies +bo eh +hu es +lu sci +dol a +impeach trump +roo d +don caster +tor re +hero es +fo yer +tar i +blur red +ke w +frank ly +dro id +ap al +Ð ¼ +y af +bre t +par agu +cac ao +ðŁĻĮ ðŁı¾ +ru e +head aches +shaw ty +char ley +pal er +go wns +correc tional +ðŁĺ© ðŁĺ© +breaking bad +ol ing +da p +endeav our +cit adel +tra d +incumb ent +medit ate +foo ted +ðŁĴ µ +shab bat +dayof the +wil lem +gal way +to red +marri age +f illion +sleeve less +aud itor +jin young +invin cible +kad una +a and +volcan oes +mon eti +indie gogo +buccane ers +ðŁijī ðŁı½ +ãĢ Ĥ +lay ton +cuck oo +hu mber +buzz er +Ï ī +to re +stra ins +sto m +pa ine +s we +du ff +z ou +si mi +li pp +ur n +se agu +ðŁĶ ® +sun dae +hi c +ðŁĺ ¨ +bull pen +u per +flyo ver +al dridge +glo bes +ali es +ken zie +ge es +y cle +sp lin +mag enta +j ha +bal u +gh orn +ti pper +wick er +taste of +con clave +ch ale +inv asi +cat er +dio xide +me gab +win n +at p +transform ative +nest led +hi g +bri dging +lil ies +chee red +bad dest +sc rolls +real is +dipl o +ðŁĶ « +conce ssion +prefe rences +explo des +er gon +introduc tory +ine au +ch af +som es +land rover +spir ation +sex y +sco recard +illustr ates +soul mate +wi en +inter disciplinary +fore casting +ent ities +glu ed +en lar +cur t +percep tions +boot leg +mi re +asho k +v az +hor ne +cal le +ac ulture +ther oy +night time +oc al +character design +ar mist +ðŁĺı ðŁĺı +yah oo +ac eae +to se +even to +sou t +nay anth +wh om +v are +ri gging +gen us +hi ve +com mands +sti e +day a +ethan ol +en f +hi fi +flu ence +cle mson +re invent +thermom eter +humor ous +emer ging +aci ón +ðŁĺĺ ðŁĺį +s ity +haw ke +accompan ying +t ility +ðŁĺ ª +re cess +protag onist +l ery +dun dal +int l +britt any +q bs +off the +marri ages +how to +viol ated +adel aide +wit t +lanc er +pak v +hu me +st ade +bra gging +ou tright +ad c +super st +real time +cu res +garden ers +ero ck +dale jr +ver o +bar tol +mo ti +mc fly +v pn +st ink +over rated +guer ra +e tis +ath ome +twd family +th ab +tn x +rafa el +family travel +x ley +sat anic +equ ations +ru dy +wal dorf +stan i +tu be +meas les +zimmer man +obli gations +i ously +bow ser +trans former +sho ppe +shak en +gh ouse +to d +ke tball +share holder +mar ca +kp mg +ak an +given chy +coast al +au th +roller coaster +mar ches +coordin ate +cine ma +apprentic es +par lor +mit o +men on +consider able +bar re +glo ss +enh ances +jaz eera +fal mouth +thra sh +stat en +k zn +eng el +samanth ap +flo ppy +sal om +ðŁıĨ ðŁıĨ +w ack +deliber ate +osc ill +herit ag +du sted +orni thology +pad dle +fer ns +bar un +cl ans +anticip ate +a ay +mat ically +é ĩ +tu mble +post man +unic ef +tro tter +op d +leaf let +ge ist +cease fire +scre ws +cre ation +wal nuts +longh orns +under statement +ab b +proxim ity +na x +un ity +turn pike +orda ined +dub step +chak ra +me ch +love her +look alike +donne in +vir on +Ù Ī +bang ers +vari ants +out dated +in ta +cri sto +sp elt +food and +f on +stefan i +margin al +hu tton +ti ara +tel ford +qu en +fair grounds +que tta +mikha il +heal er +v ball +ty re +under grad +gl end +hom ers +scri bed +main tains +po che +mis sal +mar ko +u as +á n +sh p +con vey +pad re +sab a +pu glia +madhu ri +pa xton +chap lain +n ago +ca si +... !!! +fli rt +sal eh +k are +di re +stam ped +extre me +ðŁĺĥ ðŁĺĥ +ho ppy +guadalu pe +advant aged +eu char +p low +un n +mac qu +port land +cla sh +pe s +lou bout +y p +keep ing +arca dia +fran kie +fi u +de th +encyclo pedia +si ze +inve sts +ðŁį © +geo logical +fran ç +con front +ðŁĺ ¥ +d ys +af m +tex an +graph ene +repost app +ac f +ur sula +gaz a +dd led +fu m +wsb tv +m be +fron tiers +chrono graph +ke s +inter faith +tab oo +spar ta +won do +flori st +em braces +ca w +no el +arch ers +ðŁIJ · +roman o +ban an +sh akers +melo dies +geo thermal +se phora +ìļ ° +оР´ +pro c +hand shake +pan de +popul ated +slow down +hor tons +registr ations +un deni +lan ts +pas sover +thak ur +li ef +adhe sive +pe tal +micro scopy +memph is +confir ming +air drop +mesm er +perce ived +ming le +lifel ine +gh j +worcester shire +pas sions +ach er +el lar +ah o +firen ze +bar ang +letter man +hat field +lu cha +je ter +e shop +william s +horo scope +pre de +east bourne +dur ga +di version +al trin +seis mic +premi osm +nar co +ti r +ori g +or m +land fall +ci ous +lin do +max ine +x ico +tra y +os wald +c ba +ric otta +n cr +mar au +ภ² +gladi ator +ch ery +lun g +u me +po psic +lon ging +can als +ta ya +decentr alized +sho pp +pres sures +mahar aj +eti had +wal greens +succe ssion +sign aling +li g +staf fer +north korea +def ying +as ma +de g +peri meter +oak ville +m sk +balti more +rece ip +de ple +ðŁĺŃ ðŁĺĤ +jambo ree +> .< +rsp b +puni sher +consider ably +in tothe +pari sian +acceler ated +polye ster +low es +fr ying +sauté ed +mou ths +seychel les +ra x +go dis +dak ota +house wives +the me +mat inee +black bird +ye sung +pre fers +pelle gr +in ated +trun ks +stronger together +re pet +re pairing +ped als +toler ant +her r +dun ne +indic ation +decat ur +b tv +exhibit ors +ik on +friday motivation +bra gg +live tweet +al ves +womens art +foreig ners +wal lets +min dy +lan ey +bb in +tv miaw +lif ter +tar get +tam e +dr ou +astro photography +mp c +g pu +nord strom +fric tion +run off +lov able +sp nfamily +ext ingui +bloo dy +sch el +arti stry +sw ish +scar ce +ph ils +max im +pos sum +com promised +sty li +sc fc +is sa +birmin gham +sket ched +angel ica +ordin ance +je ts +conqu er +ðŁĺ IJ +online shopping +s ori +reason ably +nue stro +ar turo +ch l +benef ici +spho to +wel t +ni kk +ðŁ¤ ŀ +dan ao +for mid +as se +af irst +âľ Ĥ +gil lette +as sor +an onym +sel ca +fe mi +bear able +y and +ar mory +cre pe +celtic fc +bra vo +in expensive +de lec +ge cko +new market +snow flakes +kab ir +con tra +can ning +mor pho +gar wal +ðŁĴĥ ðŁı» +fight ing +mu tation +woo dy +ju gg +gr aces +premiosm tvmiaw +kenne dy +gu p +sa e +op ha +off spring +fini sher +bet ts +span ning +mar j +h one +sh ing +contin ents +samanthap rabhu +un related +l acy +explo sions +benjam in +sophi e +no ting +micro soft +as sen +a hoy +i ker +ho fer +mo e +ah madi +yan n +an ak +ma hi +be u +aha h +creep er +baahu bali +am at +pri ory +haw keye +deloit te +sko da +print making +assemb ling +mirac ulous +no ch +sw o +leg a +oper ates +border lands +eli e +stron gh +rep tiles +pir ate +un fold + ¯ +qual comm +un predictable +ot r +rose wood +direc tional +counsel ors +corn ell +liber ated +j ad +ir regular +bulgar ian +high ness +vodaf one +sw ild +mini mize +gra zie +๠ĩ +r stats +stre ep +ome tric +humb le +lu mp +l ille +b ü +home depot +tripad visor +ki wan +a via +er z +ex ico +du f +blu men +mi zing +ar ma +in im +con stan +sor a +ju al +au n +tw ell +tren ches +her a +r k +po plar +recipe oftheday +ll an +bhu ban +short ages +ing don +bridge water +ðŁIJ ĺ +fortn ite +cam den +un cture +pro w +colon ies +t ks +n go +b hm +live pd +spl ace +sli ke +happye aster +ter rence +revol ver +j ed +yy yy +office of +m ts +exist ential +r ourke +explore bc +sse d +pri est +vix en +si ding +k pa +a har +ju ic +ob struc +foren sics +uk mfg +cancell ation +we ary +ab q +ele c +pri zed +deb ts +me zz +salv atore +m dc +gre tte +c gc +th on +snow storm +ts ch +cook ery +å ¹ +wa xing +n acional +mur s +ra ve +cap es +ger main +dri pping +sub mitting +ome lette +iter ation +aj es +shim mer +fu eling +ðŁĩ§ ðŁĩª +li po +bo bble +un follow +islam ist +hi ber +cat s +agentsof shield +sen si +____ _ +ster ia +inst al +ausp icious +har row +over land +femini sts +inst ant +char iot +blind ness +sp ed +sc arec +nu it +mini atures +ho seok +glo ck +fifa worldcup +e te +dis m +we iner +ex foli +ear ts +ภĶ +my art +man il +iss ant +form a +in cu +buffal ob +in tim +mc cul +anj ali +po po +un doub +hil a +fun gal +thank ful +fu tur +en dish +ren ds +th ar +she ff +ring o +nichol ls +io wa +po tom +cl ams +ãģ Ħ +acon f +stadi ums +di mp +di k +residen ces +do v +caric ature +seagu ll +kl m +confe ss +sla pped +cele b +turb ines +pp v +nur ture +el ab +.... .# +tu ff +de press +al far +amii bo +di spon +e wing +que er +friend s +for re +âĺ ¼ +sw t +aqu arius +head liner +cur d +fi gs +o tters +love fl +kare em +go vegan +fri yay +consol ation +at ri +ì§ Ħ +âĺĿ ï¸ı +poly ne +gu ed +o ya +la us +intestin al +cam illa +scal p +pi r +leed s +horri fying +bore tum +dand elion +fer rer +ell ic +as x +so ren +re loaded +ale ague +navig ator +ine tte +add ams +al chemist +ak shay +dystop ian +awe c +n aya +al isa +ai led +ag or +avi ator +ali zer +smo bile +findyour park +cop ying +to ddy +sh ti +mon ger +cal houn +nap kin +break up +y atra +se thu +ric hi +eras mus +fer ry +am ore +prac tise +bo bo +power point +oo se +li ffe +chin a +sh ka +fad navis +du ane +war on +fal se +ðŁļ Ĥ +wa shes +disc ip +==== ==== +g k +ab b +stub born +medi eval +p ci +ðŁį ª +maril yn +h yo +man di +cr i +prede cess +continu ation +om usic +s lat +wh al +mall ory +bon n +shen zhen +ca i +âĺ ĥ +sa fest +for wards +dra wers +bla sted +sle e +mor phe +mb ta +dumb ass +ÑĦоÑĤ о +alhamdulil lah +ec lub +al beit +heal ey +ayurve da +adverti sed +cro cs +itt les +bry son +be i +nj pw +honore e +fu sed +ðŁĶ ĺ +mul tin +n aga +de parts +ko p +kin o +jhar khand +ed na +ax le +mil ton +supremac ist +marrake ch +domin ic +tran script +] [# +: ). +wo c +sur rounds +o gil +leaf lets +co well +whe w +tru de +proli fer +succe s +sports man +con dom +po che +k up +imprison ment +{ } +scram bled +å Ľ +ka ine +cell phone +metam or +con i +remn ants +ee z +down pour +afterno on +exerc ising +ber ser +architec ture +wick low +m ns +is p +bo c +n iss +mn wild +stu mble +r si +lu ffy +sil en +dd ad +bul lies +haw ker +bb cc +scu ba +e pp +que ts +for aging +pal let +ha di +cinemato grapher +cat chers +to aster +k hi +lite coin +kid lit +amher st +maur icio +ip ad +mar malade +fe y +don nelly +g to +est as +cere bral +ant grasso +zz led +vir gil +swa pped +ðŁĺħ ðŁĺħ +no dapl +greate st +nhl bruins +fra ser +b mo +ane w +. âĿ¤ï¸ı +se gregation +remark ably +mccor mick +lo gger +er as +contrac ting +âłĢ âłĢ +yor ks +uku lele +touch screen +de cked +ben n +south wark +ra vin +nu mis +ðŁ¤ Ļ +ru t +gre co +eth ic +red neck +ar r +t cs +ih ri +ðŁĩ« ðŁĩ· +l k +inher ited +zy k +viadu ct +marty red +hi gu +ss n +be in +street style +fer gie +bank of +æĹ ¥ +stake holder +exempl ary +cre ss +ess a +ero tica +intre pid +gom es +bra un +bethan y +bang tan +pulmon ary +m illing +doctor ate +trump russia +ठ° +s ani +bl att +pla u +depri ved +t le +ful ly +bour n +st ak +lufthan sa +kio sk +far oo +def y +bad an +ðŁĺĺ âĿ¤ï¸ı +rit z +tri sha +ran ds +middle sex +arab s +pro j +sport scenter +repe ats +iv f +bleed blue +as sure +o bs +territ orial +ele n +bever ley +ann ah +âĿ¤ï¸ıâĿ¤ï¸ı âĿ¤ï¸ıâĿ¤ï¸ı +z l +for good +science fiction +gla u +son ya +pri th +st weets +mix ers +mari o +ant elope +writing community +went z +den ham +be di +sf o +harley davidson +look book +immuno therapy +or phe +es ville +ed ged +tas k +sb ball +corro sion +kilom eters +co sting +play back +ke ke +di visi +u ter +re location +yel led +pen g +up beat +ser ve +âļ ł +hal en +stir ring +reh man +en v +schu macher +frag ment +alkal ine +sb k +resil i +share point +rol lover +tra sh +counter part +âĻ « +ob itu +à ½ +ãĤ ¹ +mul berry +ðŁİ Ĩ +auton omy +spra ying +nat l +love you +fran ki +nu k +esc ar +can teen +ali baba +de plor +mole cule +pu d +fort night +blon die +sp hin +portra yal +ta che +bu te +consi sting +freep alestine +c sp +im mort +d ns +ðŁĴ¥ ðŁĴ¥ +tour de +coo king +archi val +ga thers +bit t +b anc +pre mature +snow ball +poetry day +lou dly +fug itive +ed ay +em ra +ðŁĩ¸ ðŁĩª +sci en +node js +jur gen +je ong +band ana +un is +fox sports +v andy +pro visions +wee p +tu k +i ko +h oun +zig gy +z r +fil let +bat a +tin k +con e +we want +k ilo +hor ace +sl t +sc t +stay tuned +victor ia +umb ria +att acker +ingham shire +fright ening +no ir +fr at +con tempt +lia ison +ho i +br ink +tr ill +ni agar +kick ass +dun das +not my +rho de +bu mble +no xi +fa g +spec tators +mancrush monday +jin ping +distr act +dais y +wal den +portra it +ar thistory +vol tron +ev el +is c +ac m +r ite +na o +de ported +swe ats +ru fus +lo bo +labor day +gam o +ihri thik +bl it +abdomin al +ãħ¤ãħ¤ ãħ¤ãħ¤ +i it +e q +bu sy +allu arjun +un disclosed +de ton +pro create +ki l +ðŁİĤ ðŁİĤ +mitch ell +ki i +inherit ance +al p +jo burg +pat rolling +compul sory +un signed +ni am +l ga +eshop suk +tr illi +ma w +appreci ating +rock ab +mañ ana +an tal +mal vern +roy o +grand prix +sut ton +go ftheday +dig i +ãħĭãħĭ ãħĭãħĭ +t les +varan asi +erec ted +discip les +cont act +ðŁĺ µ +li d +⬠ĩ +scen tre +radi ator +ing tips +trans itions +thursday motivation +chem ical +separ ati +sal is +mi m +geo graphical +book fest +/ . +âľ ĭ +v ae +cur rie +ag garwal +acceler ation +the ses +lg m +u mass +pro portions +nat a +ani ans +ku ch +be acons +ap r +@ # +ðŁĴª ðŁı¾ +nu ke +sher aton +ki o +ma kati +polit ico +mor ale +ì Ļ +econom ically +gg ly +ss en +pa stries +intern ships +vic ente +fanta ken +aveng ers +accu se +slee pover +indic ated +the dream +ster one +ren ders +fro st +ou i +gre gg +d ore +⾨ ⾨⾨ +pu gs +sat y +nu mb +hems worth +tam i +la ssic +schi ff +igle sias +ag awa +] " +re shi +game stop +divor ced +theat er +clau di +un conventional +prophe ts +ac in +twel f +tow ering +t ml +sc lerosis +k wan +ge ts +distur b +na ira +ener g +pir acy +pru itt +noti fied +hen na +bra m +ground water +bl s +opti mis +$ ) +luci e +biz hour +fang irling +gr ills +or l +ver se +c ina +law less +artistson twitter +tele vised +marshmal lows +radio head +bar r +m fc +bre vi +mmor pg +g aya +âĸ « +sub titles +j t +disney land +to bago +nh m +groo ve +fi awec +" / +ba o +scra bble +om ni +ff l +um c +si mba +ali er +ter rell +plu me +mi di +dig nit +co c +bru t +ad ata +alche my +d sm +ðŁĺĨ ðŁĺĨ +win try +spa res +cu er +conclu sions +to ys +od or +fl ann +gar vey +scrip tions +inspec tions +cat ap +ang lo +st louis +heim er +at ay +tr ich +en yc +chil ds +vent il +mont p +guiller mo +circu lare +z ell +mode led +craf tsman +al ina +stimul ation +cashe w +ju das +best of +to ire +susp ends +scol lege +real ising +by tes +bloo ds +as si +ðŁĴ ¿ +o hs +ðŁį ĭ +scallo p +ठµ +gi fting +camo gie +wil kes +o zzy +ðŁ¤ ¤ +ver onic +sav oy +deme tri +baby girl +ðŁĺį ðŁĺŃ +so x +cly de +induc tee +count down +self care +ठľ +vi ka +tor re +phd chat +pe ars +aw h +suff rage +le sn +admir ation +mp p +shark week +schul z +santor ini +clo ver +( * +stras bourg +ex iting +so yu +finger print +che a +ãĢ ľ +vin dic +song writers +so a +prou der +nam a += )) +simple st +delici ously +gil les +u q +mn wx +ep p +sh un +ken nel +fall on +ðŁIJ £ +sin d +tra gically +out es +modern ism +co ke +gy n +spi on +âĺ¹ ï¸ı +le am +compress or +apolog ise +twent yon +fan atics +âĻ » +sco tsman +sa wa +ko u +as er +ภļ +welter weight +phen om +twick enham +stri a +p out +ka z +gi am +cd p +ho y +emplo y +red mond +ภĦภ+sm ere +trance family +proto cols +pie ce +lu iz +iter acy +carl s +united states +har med +phd life +ch aw +foot prints +l é +cho ker +z ana +sli pper +eric sson +insul ting +articho ke +advis ing +acquis itions +op or +mut ations +re ar +ॠģ +pod cast +wi ther +kun g +íĺ ¸ +win slow +di apers +ðŁĵ¸ @ +ec ker +col lar +hu ey +gi ro +mono gram +kas ich +si veness +malay si +arom atic +gre s +gali leo +u ji +rob b +dr m +none theless +as a +: > +lo a +l np +at work +ag t +laksh mi +pipel ines +id al +stre l +re all +chain z +stone wall +san sk +ðŁı ´ +pied mont +hoste ss +ci u +t é +analy ses +wil helm +scott y +rw by +mosqu it +use mb +qu ins +ðŁij İ +tu cker +s conf +speci fications +psychi atry +broo kes +s ils +ol af +de to +co di +cli p +fil th +womancrush wednesday +go to +ang erous +be ale +w tc +paneli st +ne x +lar sen +emili o +tab leau +h itters +conce ived +americ ani +or tega +mar di +Ñ ĥ +pain tball +thir sty +new yorker +etis ation +go ss +we aker +u gh +tro ll +har ga +du al +ght ning +at ine +ðŁĺİ ðŁĺİðŁĺİ +cook out +pyrene es +po ss +authent ication +sports wear +yun ho +kir o +archi pel +shen ko +ren der +nov ation +divin ity +ðŁij £ +su fi +humb ling +ge opol +devote es +wait ress +tr ough +py ro +i ba +bl ing +gra f +epilo ts +bt r +of tball +bas king +domin os +so om +r ath +sher yl +qu el +astronom ical +wel d +track list +sig nee +slee pless +com man +ch ron +summ on +pure michigan +cri spr +sli p +la gi +ra q +um u +thal ap +char med +scru mp +quad copter +ski p +peter sen +mun i +ðŁĮ ¾ +mon aghan +tra ys +ick ed +canad aday +te gr +ï¿ ½ +hot ness +heavy metal +ab ar +gop debate +az ul +spider man +sun flowers +ľ ë +web comics +bar d +Ð ² +nichol as +slu sh +ram an +mark ham +ffici al +ff ler +íĬ ¸ +ple ss +anush ka +to to +sk aters +pro wrestling +compet es +ay ala +myster y +thr ills +mp g +independ ently +y ul +imper ative +formid able +tire less +st acking +ton gues +mal tese +pot ts +mat ti +char ting +chill out +super nova +ome o +sky sports +nu tty +ðŁĹĵ ï¸ı +ro han +insp ired +concier ge +ser ra +ma kk +gal at +chi pp +ye v +ì £ +reim bur +op ul +kimber ley +i eee +bre men +ch itec +or in +nak u +bon kers +foo ty +emer gence +ðŁĨ ĺ +sti p +serge i +zo ey +ai me +wou ld +dy es +destin y +vinai grette +dri er +circulare conomy +an archi +ss r +sch el +cin er +gro om +determin ing +gar min +cal ais +incarcer ation +bu kit +no i +chelms ford +mckin ley +chi pped +belong ed +tu mors +str oud +mi i +influen za +wwen xt +tun dra +tele communications +cat sofinstagram +t ages +beat ty +o du +ml kday +oo per +dang le +ak ley +cru mb +anti gua +ti mbers +rou hani +ðŁĴª ðŁĴªðŁĴª +ha fi +... !! +w cs +coo p +sn c +lit res +ãĢ Ĭ +ha z +co z +k ant +green field +cur ti +y ale +flye agles +what soever +wor thing +rou lette +flyeagles fly +un da +a inted +stand ing +lusci ous +h pc +effic acy +ash land +me ghan +ky wx +n pr +bath tub +ac os +h ani +mar cor +man tis +da isi +bo ba +ab bie +mu til +vi al +spy der +po z +g ti +el fie +nigh tw +metro id +anton i +mad die +dh ry +dar lings +ten ds +taek wondo +atlan ta +me ow +chlo e +ãĥ İ +ym es +siber ia +k con +gu es +mar iner +fac il +azz le +[ ... +han nover +bav aria +vir go +te uk +u sps +) # +wall a +sam pson +need less +ver bally +hay ley +bow led +pi us +lam pard +ham string +vol vo +road safety +cho king +sor bet +a hem +healthy food +brai ded +horticul ture +cr ative +che ek +ad do +the force +ko ko +schiz oph +j ie +w ada +twentyon epilots +h bcu +pro ton +pau ls +lou isa +lat am +kyr gy +com pac +sd k +sap i +?? ? +liber alism +ep silon +ai den +w usa +spra yed +baske tball +kim ono +blue wave +ali as +ë§ Ī +mug shot +ce c +do gre +ad ora +ðŁĵ· @ +kra kow +intrigu ed +exhau sting +astron omer +ven ison +lady bug +ci v +bra e +us m +bri be +acup uncture +pembro ke +ke ating +chi e +y ad +t si +sm i +see ding +gate shead +lis boa +gy p +canv ass +ðŁĶ´ âļªï¸ı +op i +ni r +soci etal +ly te +ati es +c sm +ar tery +al in +aka poor +abstr acts +âĢ¦ âĢ¦ +teen wolf +ne we +travel gram +sentim ental +per ched +han del +ho ek +f ay +coordin ating +anim ate +man ian +effor t +jer ky +f ck +adri enne +ma bly +tra ding +my el +spi ro +sol a +stor ing +over drive +monday morning +dream team +pul se +bon di +ber nie +pgat our +tri poli +son am +plat t +âļ ¡ +ag roup +îIJ Ĵ +inv ading +v cu +k ell +ñ os +un dead +pod casting +mercede sam +mana fort +cor tex +que so +impecc able +pal mer +wil doz +sport sc +guacam ole +dispen ser +cate gori +stun ts +per il +invit ations +dune din +xi e +achi eves +saf er +pre ds +ph an +knuck les +k ak +igno res +lovemy job +aru ba +ound ation +datac enter +co vert +gr ing +cou ple +ا ر +vol i +mc cle +arti sans +lu do +kal am +arom a +under taker +hu la +wiz kid +gu mb +god frey +bakers field +ker n +engine er +car ve +pal in +guaran tees +pe bbles +b ays +zi eg +fin k +â¬ĩï¸ı â¬ĩï¸ı +down pours +ro chelle +rasp berry +ðŁĺ ® +gra phies +stom p +caf es +ari zed +utt ar +cal vary +dri e +crusad er +bus an +tux edo +si u +seam us +cul tured +blan chard +town house +ge red +butter milk +flu ctu +roger federer +hel i +ðŁ¦ ĥ +u ous +ram esh +mu ppets +email marketing +ye ss +br ice +ri zio +pel o +donnein arte +u rable +inve stin +bump ing +raji v +sav a +thro wer +fore x +o hhhh +th rust +pull man +r fid +sep sis +le ed +fri ght +roun ding +ne b +ph ins +ai sha +utili zing +squ ats +gold smith +j ic +bo ks +vau s +i po +exclu sion +tari ff +po kes +min al +land s +en force +washington dc +or char +g x +mar ys +ey our +aussi e +bak ers +un popular +latin os +lar ge +pu tnam +bol o +wa de +pel o +di zz +ob struction +fla ppy +weare the +depend ence +pajam a +e te +y ann +e wan +disc la +a ay +kar ina +e ic +an trim +w soc +neg atively +kai do +fotogra fia +dh ru +colo ssal +mcle od +k wang +mani pu +ex hilar +us atoday +summer slam +co les +tapro om +unbeat able +de ma +tic ks +k ling +fil s +campaig ners +ภķ +brew ster +audu bon +qu ay +ch s +ki gali +d ler +strength ens +som al +sign ingday +gol ds +pig ment +orche stral +g q +lin kin +ðŁı ĩ +ta w +algar ve +ho v +ear le +gold fish +am ig +ex er +ben in +dru id +ðŁIJ ¸ +she m +quat tro +mer cen +men te +incorpor ating +bon anza +state fair +en de +concep tions +e es +âĻ¥ï¸ı âĻ¥ï¸ı +d son +fire arm +orb ital +we h +multi p +fo b +requi em +p light +thou se +sa id +oc re +remem brance +n old +chi pping +be v +er t +ca thy +sy m +ri ggs +m ley +dialo gues +sl ender +how l +gau teng +wd w +to bi +smo kes +im plo +b pm +ad n +mom basa +cap sul +bloom field +artic ul +cle o +goog led +flu ffy +l ard +en zyme +ve sti +ibra hi +fl ame +e mea +out ages +dispro por +ble ak +an sel +ick er +st louis +stock market +good friday +sau lt +stal led +pro m +ep som +b é +the se +sau ces +me w +lit fest +pre d +re u +kar ak +si enna +ell in +bio technology +ï¸ıâĥ£ - +tac tic +sa in +por k +mon za +ka j +lu sh +compart ment +chang ing +shraddha kapoor +fo al +ar tem +cu ando +can ola +ori ente +me sse +d ited +br c +box er +bbc two +s st +ment day +em ing +de wey +kof i +âŀĸâŀĸ âŀĸâŀĸ +reali zation +smo l +tw ood +san je +flag staff +ber wick +cor set +can ary +whistle blower +et ched +com posing +squee zed +bow er +auto desk +ne h +mathi eu +ba ja +Å Ĥ +hy dra +da im +am eri +insi sted +mer lot +gar ros +heart news +gaine sville +cut ler +bo de +ðŁĺī ðŁĺī +lew es +scoun try +g sa +us u +cc m +god awgs +phara oh +cra e +mor ley +hyp noti +f ades +neur ons +fu zz +ing co +high landers +star k +vig ne +pac kets +amar illo +reu ben +insul ts +bas ic +vec tor +n me +ac ruz +tro s +transm itter +ðŁĺ ŀ +interpre t +ðŁĺ ² +pre quel +mc gowan +dis semin +ðŁĴĺ ðŁĴĺ +mascul inity +indie gamedev +ali ve +te t +pe tal +ema iled +ar med +ko o +he er +ba ird +super junior +metro polis +delav in +decl ines +stit utes +Û ģ +p tbo +g lan +cho res +e aling +chri ssy +ste mc +vi an +assassin ated +pron ounce +illeg als +discover y +cav ill +fri fotos +f al +so i +sabot age +t int +p dc +ðŁİīðŁİ Ī +ãĤ Ĭãģ +ji o +endeav or +in sig +commit tees +she arer +me tz +mar rying +h dd +g by +fre t +tri sh +pu l +scrip ted +sa ki +l w +ke ye +shim i +nan aimo +ca h +à « +tem pered +ici an +du gg +dish washer +air field +s rugby +gr inch +y st +r ms +mahat ma +lan kan +disc ar +dige stion +no des +l ls +om ic +gu tter +tis garh +feder ico +election day +bo he +master card +fire ball +âľ Ķï¸ı +oy ster +p ong +do k +en route +m vc +beat the +ali stair +shu b +sh aming +cherno byl +ghi bli +the s +pin ion +d bs +sal ts +ic tion +epi ph +nc pol +in convenience +whit ley +inspec ting +wood ley +wi ener +skil let +no les +m ca +h ina +a sha +willing ness +well ness +tam ed +show time +dis advantaged +ber nat +us n +mission aries +coun selling +arrog ant +quant itative +leg alization +ho dge +energye fficiency +cameron dallas +pos sessions +p bb +harris burg +v g +hindu ism +happy thanksgiving +fi b +re acting +tweeta picture +pol iti +mu ppet +hur rah +pac e +coast guard +guar ded +as am +par ry +fore very +x q +oom f +ke anu +j ind +ri st +customer service +sac red +ðŁĺ º +ton er +occur rence +mat u +val dez +red d +is ak +power rangers +pe asant +raj ini +abra ham +e mil +car do +tr il +hair styles +obsole te +sam pler +direc tive +delavin kisses +ver ton +glo s +sp ay +paler mo +com ets +man ziel +chicag of +ski pped +pic torial +h ant +b mi +a ol +re opens +pad dling +devo s +fra ud +bas eline +que ues +sp ired +sn are +eu ve +descri ptions +daisi es +ca ching +gall eria +tri mmed +stin o +recy cla +ic ular +bir ken +raw lings +fli x +chic as +b gt +lik eli +argy ll +thel ove +ga ston +bl anca +ha k +f one +sailor moon +h aci +ima c +fl yn +de can +bel les +ap ic +zo g +taun ton +con stance +lasag na +ker nel +in ka +har bor +collec tively +calcul ated +av ille +shil pa +pur du +gi mm +fun er +a est +pembroke shire +nighting ale +n unes +hyper tension +hu bert +sli ders +infer tility +comm ended +transat lantic +metr ical +!! @ +Å Ł +ss g +bac ca +inver ted +fun factfriday +it ans +albu m +acqu ainted +ri er +whel an +sar ab +mu e +snoo ze +pi ff +agre eing +sp itting +jer maine +n ye +âľı ï¸ı +am bush +ze ph +con greg +univers ity +s app +wann abe +pat rice +ib d +do glo +fri dges +sun d +king ston +ar gon +kam en +hardro ck +ds ley +do lores +ì ° +ota ku +pi ping +be having +âŃIJï¸ıâŃIJï¸ı âŃIJï¸ı +blue bird +an sari +teapo t +fire work +cro p +log ans +ty ped +thick ness +ig ers +c fp +dys functional +contra sting +et ty +aston martin +tx st +dra grace +at tributes +marath on +manu scripts +john stone +ðŁĺ± ðŁĺ± +bo er +ay u +aru gula +poo rest +con du +assu mption +anag h +no h +delav in +sit ter +g ö +mor ow +kick start +com i +gl acial +ghe ad +ba in +ker shaw +en dof +fre ud +om at +i af +hu g +sign up +each other +defin ite +tu bing +shak ira +ðŁijı ðŁı½ +uu uu +sw in +sham bles +ol as +sk ell +brit ain +kn w +clu tter +om y +j ens +hang ed +city scape +scra ps +un locking +dead liest +er no +breast cancer +a it +inspec t +fu ri +ðŁĴ Į +ku d +ju le +or ah +mi ds +m dt +bur gring +r attle +pu sa +stal k +cle ans +iss ance +z ek +worth it +nam eis +musko ka +council man +urban art +bar rac +un solved +tu l +g ita +white board +soy beans +em ent +cont i +saturday motivation +conveni ently +doc king +t ado +âı © +sp ino +puppy love +po f +fabric ated +robb ers +adop ts +ti fied +kk r +indulg ence +notic eable +macqu arie +chap el +sensu al +ki ko +melan oma +lore tta +li ance +ab en +sp lus +ga al +ac ele +lib dems +compar isons +ðŁĮ µ +rhy thms +mer y +en capsul +nap ier +ðŁijĮ ðŁijĮðŁijĮ +ðŁij IJ +plat z +fre sno +re formed +ran bir +el it +the best +bhu shan +vin nie +impro vised +s ittin +re created +e ba +ec ker +ac rob +pon te +cor d +gi ddy +eur usd +fe ver +intu ition +gar i +dum mies +bud weiser +amend ments +te tra +sch nit +ay as +mar ys +ci st +k ani +ker mit +ðŁĺ±ðŁĺ± ðŁĺ± +tin ker +strol ling +di visional +niger i +omin ous +menstru al +kar ab +k hy +bw fc +pan handle +l illi +well er +stra pped +son the +transfer ring +ethe real +sne aks +ru dol +gab les +jac king +cin code +for tune +canadi ens +con for +ab normal +frank lin +tit a +mu la +persi st +cu ties +ki el +ðŁĩ± ðŁĩ +her mann +aw k +fi asco +ko to +we ta +hi ker +budd y +preven tive +mcgra w +game boy +forsy th +top shop +si ob +sad h +in tram +follow art +so aps +dragon ball +ou x +morri son +๠ĥ +lu bric +adul thood +morri sons +âļ łï¸ı +her mo +ta ka +stall one +mis use +team gb +ra gha +con fined +at y +hom ophobic +nw o +sky news +ho ya +ac rosse +wi iu +pur ée +jed dah +ðŁ¤ § +advis ers +ph ine +an is +scrump tious +ë° ķ +c ke +vin y +ter m +s dc +o do +home school +vas c +leop ards +debor ah +illic it +cur ran +as roma +nau ght +mar ig +brand i +em p +ðŁĺį ðŁijĮ +î Į +su spend +lu z +initi ation +sch aft +jensen ackles +craw ler +post doc +des ks +trail blazer +den omin +tri x +no ise +po et +± ï¸ı +s mug +vol atile +proof s +pharmac ist +sardin ia +mash able +kim chi +co ed +schal ke +doo dled +c sw +sh ur +ro x +do k +chris brown +mathemat ician +ab ound +ang elic +rock ford +d ole +yor kers +ms n +g man +xavi er +bor rowing +mark ings +longh orn +k ja +diver ted +mm it +euph oria +ay yy +te a +pa h +ck i +un cut +li ven +ky ung +fan art +mer ing +red ding +amo vie +gri di +c thulhu +schol arly +ju dah +th bewithyou +eu calyp +ðŁIJ ķ +hert fordshire +cour troom +by u +auc tioned +ple ase +mar cia +ê° ĵ +succe eded +el as +arvin d +t lot +saig on +re tt +ra kesh +fd ny +as en +se bring +gladi ators +you know +v lad +gol a +par ap +ÑĢ и +sab cnews +one team +oh l +sun e +ri j +cd c +star gate +run down +plat o +ph c +chat ter +ra viol +mn f +mand ala +li et +ภķ +mari a +hun gover +consoli dation +fer rell +tradition al +ilove art +gal ap +ðŁı Į +que zon +espa ña +ðŁĩ¨ðŁĩ Ń +ho bby +steam boat +mali gn +guil lau +pro hi +its me +íĥ Ģ +in scription +al z +mari an +k ade +mm on +adju sting +ne sts +intern ally +ci r +vik ram +mal ala +k ph +fel icia +the real +cap tivity +at is +marcor ubio +kale ido +che v +mano j +le more +gent ri +vi ps +tro pe +" âĢĶ +pair ings +mal nutrition +fr ay +desig nation +brun omars +az e +tor rential +pan zer +ga il +under the +the ological +schizoph re +dazz le +freder ic +mo par +ad illa +so ggy +ra un +medi ocre +colo rec +i fe +p inst +blu ef + ² +world water +gir oud +clar inet +ad olf +tar antino +receip ts +assu mp +ðŁij Ł +coffe es +âľĬ ðŁı¾ +du plex +s of +r x +lin o +timber wolves +pan dit +mo tm +e ga +ay ama +ach s +outsi der +ll en +co er +til ly +cheese burger +ma ds +ple dis +emp ty +national parks +az iz +p mi +jun kies +f ener +sq n +è s +gener ation +cleop atra +bhuban es +mosqu es +ty free +popp ins +tw c +or well +n age +ka whi +hol low +dal ai +¨¨ ¨¨ +ou ro +m health +gi on +az o +vis as +reneg ade +re ic +w sop +ðŁĴļ ðŁĴĽ +e chel +tox icity +mü n +bun k +stimul ating +asth our +\ ' +ep h +ende mic +cn bc +shrin king +peabo dy +michel angelo +can yon +wal e +su mi +si ders +inu it +? . +profession alism +dr acing +plat oon +p ons +out bound +maple leafs +de sol +cen cy +a than +ver ma +ru bbing +ok an +ðŁij ł +mull ins +authent ic +Å į +alman ac +ga ia +bb q +on imo +ke h +ty a +tou ts +y av +re posit +, . +wi ght +se eyou +cal lof +done sia +bar gaining +gr anth +sd su +amphi theater +p su +re watching +wine tasting +peak district +dete cting +thur man +phe e +èª ķ +u mich +re r +sculp ted +go le +name sake +ðŁĶ ģ +serv icing +bau gh +pu gh +pen cil +dar th +munch kin +at orium +ten ers +sun y +rolling stones +mag ing +star rer +i dris +fe instein +ag ron +âĺºï¸ı âĺºï¸ı +supervis ed +chamele on +aggre gate +succe ssive +mo gul +inst yle +pol dark +custom e +ohio state +ha ya +ci des +broker age +angel ou +fifa wwc +de forestation +al ton +pam ph +hu gged +ho bo +change able +ku ber +bur roughs +demon etisation +cape cod +vers atility +or ice +le ila +womenin science +tu a +he dges +embarrass ment +ali fe +so ars +ni ghter +hy mn +gi pp +chas u +tech s +ni all +k illa +hi ka +cam els +valu e + ¢ +sc oops +mah moud +clu sive +adri ana +pac o +oz il +un as +transl ations +whispe rer +s bi +bu xton +bio tics +indi ffe +ken ney +k lar +et ching +barra best +inst ability +se ine +vo tel +blo gged +whis key +my space +t ant +lan dia +give back +illu s +aw ak +ac ab +f bloggers +cloud computing +blat ant +syri ans +band ra +sty n +an em +ke ted +kar thik +barun sob +pin ot +gu bernat +gay e +arti ste +i fied +conven tions +hu an +geni uses +eeee ee +fol ly +somer ville +pride month +ðŁĩºðŁĩ¸ ðŁĩºðŁĩ¸ +chemo therapy +paul s +bak ar +ìĦ¸ë¸ IJ +taiwan ese +fol lo +c ss +re ign +nn nn +fla un +catastro phe +iti es +frag ments +extre mists +ym oun +car men +eze kiel +conne cting +se h +man ta +remodel ing +we ymouth +at oms +ce m +ne well +lu mi +the open +mo c +mili band +g land +z shq +mag gie +mani acs +m sp +ad y +cre ams +le anne +e sta +py g +af finity +pray er +dun bar +ligh troom +ac adi +wyn onna +roman tic +state dept +sick le +wh os +lam o +et our +fin ity +shru b +shar pen +pun dit +ed on +af ore +mar s +jeff ery +ter ps +medal list +kath arine +accu sing +ta z +roy d +from home +confron tation +alle gh +ðŁijī ðŁijī +refresh er +ran veer +never land +jo jo +lu crative +en am +ca ver +pa edi +man jaro +flu ids +the ssal +oppre ssed +mu ss +joh anna +Ø ® +cn g +buil dthe +sett les +s ith +fu ego +cl amp +ar ag +pay er +ted x +mand y +inter stellar +fr c +ch and +b cc +mo lo +len til +johan sson +grims by +nature lovers +ðŁļ¨ ðŁļ¨ðŁļ¨ +shin de +x in +international dayof +transiti onal +sat a +cad dy +wo d +if u +ha ys +holl yo +j ang +ir c +co im +grad able +" " +ðŁį ´ +ঠ¾ +a el +n yo +west lake +time out +sof i +phenom ena +cultiv ation +ag no +un armed +so t +con j +gen o +royal navy +nutriti on +fair mont +ti relessly +sn g +re ty +mic a +lu cent +slo ane +droo l +riz al +od ell +critici zed +. '" +la ze +deser ted +co der +pra s +l illian +itiner ary +dav y +an ap +whi pping +hobo ken +kare ena +çľ Ł +vi us +ter n +nan tucket +mis understood +bu laga +st ant +chin ook +z am +reli es +d ss +ed mond +sket chy +m ell +fe x +rec tor +dist ill +day dream +wine maker +ri pley +billion aires +hel ene +ati f +cul prit +bertr and +wou ldnt +ma pped +v ak +gla dly +parliam ent +kidlit art +ware ness +goli ath +âĨ ĵ +view point +tat ted +fu ls +dor sey +ang lers +li ds +ki ya +bow les +be h +b ite +compati bility +ance stral +pro x +beha ved +gubernat orial +ch field +sab an +z h +teen y +shibu ya +holli day +pan cy +âĿĦï¸ı âĿĦï¸ı +seun gri +? , +ðŁĩ¦ ðŁĩ· +im itation +impac tful +any i +gene vie +añ os +bate man +gli der +af ar +ra sheed +effor tless +sh war +dach sh +er un +at os +kin i +ch d +kha ki +k lin +felici dades +bel o +as l +to ppers +fin ley +stac ey +rigor ous +kar ting +le ppard +car michael +be ret +c se +ak hi +mer ingue +ab an +ha ke +ger i +er jee +re sto +comm anders +pr it +fl or +ad ven +ex termin +remain der +å IJ +es g +martin o +lulla by +| @ +mi gn +in store +big bang +cor di +cau ley +ante bellum +dg ate +cro ck +span dex +scaf folding +ore os +ê°ĵ ìĦ¸ë¸IJ +pom ona +ma uro +uni versi +re mi +af ootball +t ant +sm alls +ne h +worl do +tropic al +mor ph +jav elin +gla r +arqu itec +reminis cent +tu bs +spide y +make u +syl la +progressi ves +blo t +shor ten +keep in +ch ak +ang st +super food +decad ent +ston y +neuro logical +ar boretum +ann ak +fe ma +per cu +dis respectful +small biz +lo x +co om +c sc +bs bi +pre valence +him ss +esp an +mo ga +fr ampton +sky map +mas se +levi athan +( ). +noctur nal +car ameli +ang or +amne sia +outsi ders +she alth +rhin o +ant ag +ag io +ðŁĴ° ðŁĴ° +take me +kab addi +c si +m sh +coch rane +thessal oni +sil a +ha us +du sting +obe se +mack lemore +mani sh +len in +m dc +gro wn +shef field +s rs +ke le +car son +ch um +dah lia +can tore +opp o +how ling +cyber crime +sur realism +sc ran +fa iz +thre n +rac ists +r out +pk not +se mana +sin i +mc cull +ma chi +alfon so +y b +sar dar +kend rick +den g +reci pro +on f +doom sday +bri bery +custom iz +art is +c pi +ðŁĻĪ ðŁĻĪ +sla va +let te +en s +âĿ¤ï¸ı ðŁĺĺ +cra yon +ad an +tr c +migr ate +simp son +row ers +king sley +farmers market +shee han +ne phe +bor non +car ton +mic key +all ure +u lu +sli pknot +heb do +gui do +dog celebration +online marketing +acceler ating +) .. +origin ated +macar oni +ed tech +out field +mit z +disc us +adverti ser +man or +ha shi +descri p +cap ita +ful bright +recep tor +con n +con ey +spion age +r attle +pre st +u li +blog post +acker ay +) âĢ¦ +red velvet +mat th +inspir ing +b sd +ker ri +po con +mil lar +re pur +accent ure +ä ¹ +ram bo +ragnar ok +dele ting +british museum +pat ory +leip zig +flori an +sci fi +in ers +br ate +yo y +melis sa +ab er +ma sa +po te +mosquit oes +transpl ant +r pa +; )) +bast ille +yl an +joye ux +melo dic +cap tions +atri st +roch dale +gott i +pew die +cuties aturday +who is +aqu aculture +tiv a +sp el +he ss +ha ji +fred die +co per +brand o +v k +photo book +* , +my dayin +micha ela +brune i +sr ini +in te +Ä ± +de ol +d fc +separ ately +bun d +ve sts +to c +me ck +rein forced +constra ints +car roll +sq ft +re ver +cam per +bird man +in action +gener ators +triumph ant +pe sts +o vo +gy pt +al amo +sc aled +suresh pp +sd n +is mo +gi os +) @ +justic eleague +restaur ant +gab i +den gue +next gen +exemp li +ap ex +inspir ational +down side +kid z +u pl +et na +alvar o +fel dman +bar net +m ha +es ch +bloo ded +>>>> >>>> +kan i +ho fficial +casablanc a +bir ds +ty ga +sw amp +o day +new castle +nb ap +ci sion +cho ols +af lo +ne p +mon ton +ak b +super model +down time +th os +sc wx +snoo py +ag greg +yo ke +nor cal +we tt +prolon ged +me tast +beat er +f ta +t lap +disgu sted +y h +voice over +itch y +ip c +ðŁİ ¾ +phe asant +stra its +ram pant +j g +fer til +assu res +fortun es +sal inas +liz ards +kett le +i bs +cyn thi +he g +mc cr +soccer oos +happen ings +cor den +ðŁĺĤ ðŁijĮ +t ches +egre t +wolver ines +congratul ated +ho gg +bott ling +wr i +fer ri +bo sch +af ire +og den +s jo +j dm +sv t +con tex +tol lywood +min k +me se +super sonic +op oulos +å ¸ +âĶ ģ +knuck le +gu ise +gam i +chu cky +z inger +radi al +compla ined +bo da +fe tal +discipl ines +cor ro +ðŁĩ®ðŁĩ ¹ +op ted +filtr ation +ad nan +em cee +mi stre +insom ni +fer gus +tra jec +on don +med tech +tanger ine +madra s +gru e +cab s +z hu +sureshpp rabhu +insul ated +day swild +pp m +band ai +v day +s ff +squ id +lo thing +not dead +expre ssive +cu ll +ala stair +x u +up front +fish ers +en es +um d +dis missal +sti er +sel s +lu st +re active +prote ster +eyel ashes +al im +goo de +gre eng +da ir +com pen +anush ka +proto typing +ma pu +bear ings +ðŁIJ Ł +for me +bsbi botany +timo thy +out skirts +am bed +are tha +wend ell +stre aks +ni m +k pk +sne e +fit ter +quo ta +p ate +win ning +ðŁį Ń +sho pping +ma inst +cul ver +ste vie +mcfad den +counter parts +gren fell +fol som +dor set +tech crunch +⬠ħï¸ı +tip tuesday +us l +tre x +geor gie +ranveer official +lic ks +se wn +k f +' âĢ¦ +jap s +p ate +orth op +fe sta +stra s +mon tal +hammer smith +fore most +wido ws +mad re +ite z +mito chondri +lig ans +z ona +cari bou +m ss +andre i +weather channel +gh c +: ... +ta ft +awe ather +al isation +bru tal +bliss ful +nik ola +mal icious +q m +mpg vip +bro die +bl itz +applau d +dri bb +v ague +dog go +transl ating +interpre ted +hat ched +ge tyour +benefici aries +spar ring +caes ars +aw illiams +la hat +bro ke +ti mp +virtu es +rel ying +pie tro +k tn +ici sts +pab lo +lou i +a ag +pn pp +cha st +pul ses +fini sh +usair force +type writer +thomp son +dog s +ut to +ãģ į +sand al +new ly +do ge +z w +wan kers +ne gr +mu cha +determin es +black fish +sk unk +mu ps +instru ment +phy to +daysto go +skin ned +hai der +con ten +ðŁIJ¾ ðŁIJ¾ +we iler +undoub tedly +chair ing +wall is +sh ard +zind abad +adul t +absor ption +pre sto +deplo ying +drum mond +battle front +seag ulls +how dy +juda ism +des de +part ition +âľ Ŀ +no logy +national bestfriend +lesn ar +film fare +co asts +christen sen +ac an +mb u +co pped +ru bble +sw c +fun nier +far ther +where as +nano technology +with stand +pil low +bow ers +to pe +it ly +con fit +ma kar +comfor ts +bo sh +cli pper +bal la +sti k +mil b +safe guard +musi que +eas port +ya z +pad ded +bad er +fore ign +chop in +archi ve +o ka +tran sporting +tml talk +aj it +consequ ence +sc roo +ff o +collabor ated +pug chat +ye mi +jav ed +au burn +o of +ma w +sau cer +miti gate +i les +evangeli st +ter ie +re cl +indic tment +cat a +bright ness +may the +whim sical +un lv +key word +cu min +med way +west world +tra w +im posing +form ity +coul ter +ab z +ny pd +grass i +kel sey +qld pol +clock work +f dr +di anne +âĺ ij +ad h +p ann +bra vely +ae ge +un lawful +ver di +pocaly pse +phar o +kar la +reson ance +ma stiff +la dak +bu u +ma iled +hi i +craw ley +tor rent +mach ado +liby an +effort lessly +fal sely +q vist +ke ef +craf thour +cheri shed +val kyrie +s ari +kal amaz +be he +ðŁĮ Ļ +th im +ro ddy +col trane +but chers +ach im +wk end +awk ward +cab rera +:) ))) +fran c +decl an +con dos +a ja +pandor amusic +char ter +ph ill +mon trose +hatch back +handic app +gre aves +eucalyp tus +ut most +t son +bur ton +mid wives +in cur +ðŁĺį # +moo d +compre ssed +tom a +must ang +mo g +as ana +te stic +sho tel +in sol +cor sair +nh q +ben ny +sm ma +kap ur +in con +jon as +ener gies +don al +as ad +se z +n pa +archi ved +stimul ate +do p +hy d +gri eving +ãĥ Ī +ron a +why te +tree house +ss ell +sand ro +ko bo +ther most +se clu +hi ya +ge ez +mam as +prisc illa +flav oured +fas s +w old +maker space +cospla y +p tv +happy valentinesday +sequo ia +love craft +gu an +d tm +ci i +yoko hama +pos thum +re q +ðŁĶµ âļªï¸ı +galat asar +dol by +hamp tons +disturb ance +stone henge +ok c +disrup ting +month sary +jun gle +head lights +du stin +micro sof +happy mothersday +ko ko +gra zi +te sto +na idu +mal ay +ari al +ru mb +ab oo +har man +tra pe +spo ils +je ho +go dly +lock screen +z un +pi ous +ma gento +l enders +prob able +corpor al +m our +aw al +su a +call me +ton ne +go vin +devast ation +x j +gear box +war lock +per me +it ate +gaza underattack +du val +paras ite +clement e +le th +i va +fro zen +tho les +to bin +cair n +s ill +luc kiest +conver ts +st ale +pan cra +euro pale +wis dom +sch ur +ì ¶ +verti go +bi j +u bc +nu re +righte ousness +mt c +factor y +ver st +revers ed +hur i +hee chul +fab er +ar r +ul ous +ven om +ph at +green ery +bra dy +à ¦ +: (( +never giveup +di sha +mo ta +health care +dun ham +dex po +den zel +bb ins +f ics +wh am +mc g +eli an +wat a +str alia +tel lu +pe sky +spin off +ar moured +re acted +do fficial +te du +sag ar +mor ally +paralle led +fi os +dow ner +dau gh +re do +world cup +tari q +bar ne +glaci ers +oc cult +barbar ian +her mosa +!! !) +y ur +inter nation +p ss +sit u +p int +american air +sw am +dopp ler +ðŁĴĻ ðŁĴľ +cincode mayo +le van +hell enic +mc ne +ju di +yu h +st x +qu are +ðŁĺĤ . +sti g +g els +mot ley +hard work +euro zone +e ad +ç¥ Ń +seab ir +ci us +la id +alpac a +presu mably +pewdie pie +boo ted +am ari +tam ine +sol ace +bar row +acade mies +x ian +om ination +dun geons +b ma +de ity +ai k +stab il +hir a +affection ate +ving ne +new port +ãħĭ ãħĭ +thir ds +re tains +aroma therapy +ski er +ni ma +do pe +cr inge +con domin +to or +anim ator +sar aj +seas cape +minim alism +lake shore +calla way +berg man +à¤ Ĺ +whisp ering +stupi d +ri ghtful +requ is +ir n +se va +ut pol +tuber culo +squ ish +de but +govern mental +christ ine +all man +weap on +s ito +bur i +lo lita +leaf y +fu ch +tin ted +mck en +a hahaha +ðŁĩµðŁĩ ¹ +repe al +ne gan +ðŁķ Ĭ +tail gating +game insight +ðŁıŁ ï¸ı +yaku za +z t +ti ring +pro posing +bow lers +tra itors +ak shi +cler gy +cit o +up sets +tu scal +symph onic +sil ently +shu ff +black well +ðŁĺĤ ) +ko be +rober to +ri dg +dc u +mer ino +ft p +east side +. ~ +nb l +mn leg +ts for +frau dul +ca pping +in my +gymna st +ston es +ss in +twe aks +shag gy +oak land +dem sin +sang ria +mm va +hen nessy +down ton +ri ghtly +in it +aga ve +ob last +northe ast +friend ship +dal a +tro phy +ðŁij ½ +mag in +margar itas +ê · +ww fc +fa sh +di ke +cu d +char t +ðŁij ® +refuge es +jop lin +n cs +imp y +firm ware +pas cu +flam in +health tech +bell letstalk +w aka +ol ls +la go +co wan +bombar dier +sh ome +ðŁĻ ħ +mc master +na ve +well s +u ta +tell ers +mis fits +kap il +face off +af firm +a pro +whit epaper +super yacht +speci mens +al located +... , +- __ +ka w +dachsh und +djo ker +s work +qui ere +or um +ðŁIJ ł +som m +c mt +ingh our +skin ny +lgb ti +gi ggles +break away +resear ched +par ity +my al +ms l +re tained +si vity +make inindia +sol ves +defam ation +wal tham +sri racha +road way +concep tu +al in +iw ant +å Ī +del ft +tender loin +ga ins +faul ts +sw ire +st ellen +pol lo +dy ne +bornon thisday +asdf ghj +sq l +sali m +advis es +vo ip +ìĹij ìĨ +un touched +she il +ontari o +uph ill +so bre +de shi +nov ella +du tton +craw fish +ا٠Ĩ +ma a +tw ine +kal in +ðŁĩµðŁĩ Ń +ye ss +brook s +hoo siers +ton ka +umbrel las +ay ers +ate am +acqu iring +su ction +ä n +wi es +tari ans +soci o +mat tb +shepher ds +o so +charity tuesday +s logans +ninj as +al bat +by te +bash ir +trampol ine +mydayin la +i ja +bas el +ror y +gol die +fi rec +un noticed +pecu liar +sch a +ker son +mour ns +liquid ity +qu ipment +hi bs +ar s +aeron au +slide show +sla bs +delici ousness +sk itchen +hta fc +full erton +cre ighton +aer ob +procrastin ation +az ores +white hall +uss occer +medi ation +djoker nole +and me +um en +noxi ous +jo ss +ili fe +anni vers +sudan ese +et res +under mine +whole foods +diso be +kor i +ade le +eli z +can ti +al on +gymna sium +sarko die +meteoro logist +yl de +ste en +stamp collecting +nas al +lo tt +fran ks +ex ol +ack i +good year +animal rights +y les +vio lets +mm es +s thel +ra pping +tu scan +wai ver +tur ner +eat local +northe asthour +anim ations +tom morow +t sh +ff ame +bra e +pe tron +glam our +br yn +d cs +bal es +ðŁĶ ¶ +bro v +bre v +b ons +physi que +car ne +x e +elix ir +vol ved +l oma +ìľ ł +æ ĺ +van u +ri gs +bal ance +va res +bon ita +sprink le +perfec to +di on +le ak +calcu tta +o ba +d ma +c mon +tun er +pneu monia +bo gus +apolo ge +cl ough +bor ne +)) )) +revi ved +o varian +ner f +c legg +fan fest +cho u +reali zes +mc n +li gu +leg alize +just saying +for ster +bo sni +k hi +in dom +hei del +en cryp +si ss +ed di +mar bles +brisban e +y ing +pre paid +wal sall +cooper ate +orche str +mar isa +ho wie +che wy +bren ner +andro meda +e gan +sto cki +cav endish +ag an +ban o +de ir +go g +bl k +re thinking +ch ig +rhe u +sni p +p eng +semin ole +m swx +an nex +lyn da +lewisham ilton +cu mul +tb l +dolph in +agu ero +........ .... +pre lude +at our +gr anger +too ting +ro tun +dis ar +home items +da res +**** **** +ðŁij Ĩ +compre h +jin x +as well +iri e +circul ating +ðŁIJ ¥ +over board +cultiv ate +rhe tt +oriente ering +ca k +bal kans +s itt +jas min +britney spears +ro tor +se aling +g bc +oc ci +f as +eman cip +com er +war time +tic kle +son ny +pac es +log g +at rix +sr p +g win +do bbs +uz be +the wanted +dru sh +ex tru +m icky +honore es +dar win +re dux +mm j +ram i +jalape ño +io c +do ver +ju ju +whit ney +s eng +en ly +au ch +archipel ago +vigil ant +man gal +wil dest +parano id +hal i +bb ly +sanc tioned +real ms +con co +u ddin +c sk +play time +libr a +sav ag +oc tane +rec tan +re turn +par rish +mor rha +cc p +c mu +sa iled +se vent +ro sie +pil ing +he w +boar ded +seg ments +neph ro +( . +cr ats +bak es +ðŁį ¸ +back tothe +sibl ing +kirk land +ke o +gu wa +bre ads +ðŁĺľ ðŁĺľ +t q +haras sed +ga u +wil bur +j isoo +ep er +li sam +tri ppin +sh ino +ru kh +beast mode +cho a +inst aweather +rich land +gar i +fe z +cowboy snation +fur suit +k run +a en +sycam ore +se gun +ent ennial +di h +o ax +demsin philly +ðŁĻ Ģ +sn hl +pen nies +pass words +ma kin +ty e +d eng +kni gh +jeep life +hel pline +a for +zz zz +ste amy +pic ker +iter ate +happen ingnow +ki b +bloom berg +martyr dom +bul ly +assor tment +a hora +zo e +no i +illu stri +agar wal +p sc +electr onica +recruit er +gar diner +rad ha +naf ta +dot net +pi ero +geor g +bel s +ðŁĺĤ ðŁĺį +tuberculo sis +run nin +mor is +haul ing +ev oc +bre thren +sha ir +frame works +a stu +ri gid +ku ma +kre me +jin nah +insu rers +ny u +f ere +nol lywood +good vibes +- ... +toi le +sk ril +instaweather pro +cze ch +pa vel +one piece +nike plus +fi let +cav ity +ðŁı½ âĢįâĻĤï¸ı +ðŁİ £ +dra stic +dail ys +siam ese +re bu +oste o +lar k +f re +sh elling +p é +glad ys +ðŁıĢ ðŁıĢ +gusta ve +submer ged +grand stand +att u +won t +f pv +b ley +jon i +ang ames +weigh ted +al ou +ठ¶ +les bians +f j +anni es +am l +dor ia +dav in +be ta +can c +madewith unity +ha j +bad lands +mu l +blu ec +pa wn +cov ington +neuro logy +htt weets +dysle xia +thel ove +ne at +fork lift +autom ate +une ven +monte ss +he in +ha g +rel ics +competiti veness +can elo +mar tens +bullet proof +sk ittles +g ya +pri mo +americ afirst +woo o +abor tions +?? !! +ma che +ld ers +rl ly +preli ms +direc t +cour se +swa in +super cell +ec centric +sting ray +ple ts +wil cox +west in +okan agan +kir an +car bo +bomb ings +ra rest +bo h +gaw d +di gg +mo ana +enti rety +en closed +dodge ball +par ton +milky way +at r +thorough bred +re ally +qant as +epiph any +ine e +aero smith +spi eth +ar thro +ell ini +du bu +bra ving +âļ½ âļ½ +re structuring +illumin ate +equ ili +mp i +ash ton +pony tail +ma scots +flat tering +cru m +ast a +à® ° +stranger things +bar nab +ر ÙĬ +make shift +got cha +will am +cho irs +kilom etres +gho sh +eu than +dol ly +un ning +the ar +cre we +w sw +j ace +dis miss +ke an +ho ta +kh at +~ > +thir u +ren dez +hart man +tee ssi +cas ca +z ah +hydr ange +fo d +aw p +mzan si +thick er +nago ya +ne va +sti que +cast el +dam ian +there by +ji ang +ale k +music islife +ra q +calla han +gou ache +somal iland +sean hannity +ra heem +lo se +elo ve +whar ton +rectan gular +illustr ating +har ne +auti sma +scra pped +ell and +decre e +nag pur +ki pp +so re +n md +ma as +gun a +gart ner +bel li +then ight +je on +gendere quality +gi ver +a el +gar ments +ne u +mardi gras +mar sden +ro wer +pollu ted +camer aman +vin od +be asley +cro c +ji u +hollyo aks +anesthe sia +al les +ste ward +lati mes +ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸ ðŁĩºðŁĩ¸ +tic ian +gor ia +come dic +ðŁ¤Ķ ðŁ¤ĶðŁ¤Ķ +nai ve +sli ons +ł Ī +bur glar +ðŁĺŃðŁĺŃ ðŁĺŃðŁĺŃðŁĺŃ +york shi +se ñ +fan boy +lau rel +inci dence +potom ac +rober ta +presi den +pr yor +os bourne +w ku +te me +pal ae +ðŁ¥ º +re boun +itu de +red dish +k hand +coloni alism +north carolina +ðĿ Ĵ +manne quin +lady bird +ta sty +knowledge able +g shore +ðŁĮ Į +à® © +qu aker +salz burg +med alists +chy na +bridesma id +ma ori +ro p +outra ged +in adequate +truck ers +al ana +ìĿ ¼ +ri x +oooo oooo +command ments +lam beth +aa j +eco friendly +bla z +morecam be +boun cy +rou x +rai ded +mi zed +sh c +gaw x +labor atories +ru bs +rest room +consult ations +ca jun +virgin i +so ir +rev ue +ple in +wag er +ç ¹ +we do +growing up +! ðŁĺĬ +face ted +sin ners +ho vering +ti ene +seas oning +an ja +leg go +il is +fla x +dev o +ash ram +mati sse +ker i +go wer +bo tox +mar shes +unh cr +ts m +opti mus +dun i +stu ffs +so k +order ly +n bad +islam ophobia +raviol i +fab er +cre ds +won ka +in fusion +over weight +daily news +assi mil +acol lege +medalli on +kili manjaro +sti ff +tham es +sun ken +th ard +my dubai +hilari ously +han nel +plu mber +fair view +separ ating +rasc al +qui en +necess ities +confeder ation +ll ll +: ] +weak nesses +bron co +ra ffles +el ot +ãĤ¸ ãĥ +advent calendar +ðŁİ ¹ +stra vel +tun ic +k su +im peach +e spionage +! - +di ment +cur rant +bio de +commu ting +by ron +ðŁĴĵ ðŁĴĵ +shad ed +tr uro +cray ons +ar ne +h sc +fre aked +dram ati +fle ek +u cd +marl borough +^ - +cross ings +mal o +black ops +bin ance +cho ked +chen ey +pl o +ge stures +val edic +ryan air +rem ington +v cs +mc kee +ec z +be gs +nail art +mayor of +happy fathersday +war t +pet itions +n ingly +clean energy +bro x +sl alom +exist ent +ab ay +ug liest +tom p +stom a +sel by +goal scorer +ben ji +overwhel mingly +lan s +semiconduc tor +south korea +re scheduled +sk yl +en listed +dow ski +si del +rosen berg +nas ser +white head +pri us +har are +en n +ry der +í Ĥ +mon g +clas ico +transpor ter +po tty +is me +** *** +vic e +sk it +ode ssa +l mp +her n +raci ally +pin oy +paragu ay +obitu ary +go es +bu cha +side walks +angu lar +un constitutional +transiti oning +i bu +gu ys +un packing +oooo oo +black girl +ber gs + ¯ +wordof theday +trump train +thunder bolt +m si +fasci sts +ठ¬ +t sk +collap ses +raje sh +loveis love +migr ating +set back +ðŁĺĬ âĿ¤ï¸ı +t els +safety first +nar rated +jae joong +un answered +lique ur +en nes +dal go +bill ings +salt water +mer maids +lon gs +clap ham +we arec +pic collage +n ach +h ace +pois oned +lo th +ag na +adel rey +guar dia +poli shing +peace keeping +d all +p isa +la pland +process ors +de andre +so bs +p once +dra ins +c be +ðŁİ¥ : +spla sh +meat ball +fon tana +worcester shirehour +ne v +bri sk +b int +ac r +po x +cay enne +skril lex +j fc +hahahaha hahaha +gla s +en gul +tempor al +oni zed +con cre +com pose +vibr ations +plant ers +fer t +criticalrole fanart +t bli +sch allenge +huck abee +munici pal +iam bic +radi os +ne vis +dura bility +mc cla +horse back +inst itutes +ful fill +atta ch +ate ur +ak an +resi sting +illumin ation +hand le +hair care +om ent +macle od +ka iser +g no +bear down +ly f +gl omer +distor tion +z m +san k +roo sters +is now +as ports +ag en +wo ken +st george +ro mper +my le +econom ists +ru to +t will +health and +d ito +ws l +tair p +pra kash +mic heal +h ts +w rights +kat su +fioren tina +defen seman +d itch +var sity +texan scheer +ba ham +sc anned +we il +seduc tive +ðŁijį ðŁı½ +fu e +er win +dav ison +ter ran +moo ds +wool f +re source +@ . +cu sh +ðŁį ° +regre ssion +cur led +la zer +jo anne +ab bott +mo z +down ers +mm mmmm +valent ina +k hair +dream t +cro ok +che k +ste aming +nephe ws +cl eric +as ober +indefin itely +w ye +us news +joy ce +flu shing +wynonna earp +ron do +kis s +hot dog +bar ns +sax ophon +far ley +gas p +decre asing +al way +pe x +l sd +shi ft +p outine +ra zz +rescu ing +ni ko +ho ch +cc l +u aap +n ts +m car +il wx +conqu ering +ket tering +stur dy +delay ing +sto k +vani shed +cath ar +bin gham +in v +ic hiro +he mo +budge ting +[... ] +be ss +sebasti an +slow ed +ðĿ ij +musli m +stun s +acton climate +ve a +se ton +rose tta +oun t +hard in +flu id +ca w +ðŁ¥ Ĥ +yach t +un l +sp hy +provoc ative +or ic +is back +__ _ +nicol as +gy an +loo se +fl in +reb ate +: :: +! "@ +com icon +she ff +down stream +chic hester +beach life +mom life +diabe te +ar ra +van e +ok u +ye o +man go +try out +app ell +he irs +arjun a +dd u +na veen +movi c +soci alists +s back +criteri on +soyu z +k her +da z +yol anda +wine oclock +re ina +one w +leon ard +en dez +u bs +support local +facilit ated +carameli zed +b pa +vuel ta +my tho +m ami +spe are +nbap layoffs +fe vre +nick jonas +im print +c so +craig slist +la salle +gi deon +ha doop +dis regard +w ud +tu c +ma gee +acou stics +ta a +qui e +pol a +cr t +dw yer +dis sec +capit ol +men tion +kn oll +he igh +fin ders +plac ements +l se +indi ra +gur i +madhuri dixit +kingdom s +iambic pent +geor gina +je ky +conflic ting +bay an +aga tha +uph old +dr on +vic ar +ex pat +periph eral +pe ssi +fa f +ance stor +? .. +wid get +pun c +comm enced +beav s +air waves +ad dis +po a +de sses +co den +vu e +ru pee +kar in +spo ck +m sy +ภ° +pr ick +fill more +ti fication +thing sto +sar de +em ile +pere ira +n ad +bright ening +arre sting +wo king +usc g +sp ill +raspberry pi +hu go +ite c +is ma +cuff links +optimi zed +oc c +mi wx +en ka +el ited +afford able +sa kh +coron ado +ho h +at ul +ai oli +jim cantore +accoun ted +vin ay +her mit +groo ves +ran ch +r illa +we tter +ou tof +veter in +ni kov +ki an +fair banks +ram apho +n iti +k ko +ru sty +ne stle +tv xq +shahe er +âĿ¤âĿ¤ âĿ¤âĿ¤ +penn ant +gem stones +dem debate +ðŁIJ Ĭ +auton ews +support indiefilm +mach o +ve x +new sat +ne ti +conce ssions +can died +yof the +mac au +den ds +cricke ters +san iti +mari ano +gh at +ar toftheday +¡ ľ +e gos +gen oa +chat bots +bri er +al labout +mon ty +spi ed +r tr +comfor t +sni ppets +real time +gra in +exam ined +en lightening +tt u +god bless +release the +sing ular +ki ans +ha ka +sor ren +defe ct +mar g +equ ities +d orian +su ka +per l +aishwar ya +pul lover +preci sion +fair way +ne ve +rive ting +vill anova +en com +ak o +passion ately +europale ague +siem pre +x vi +enligh tened +c fr +âĺħâĺħ âĺħâĺħ +wast eland +is f +new comers +emergen cy +amphi theatre +- . +text books +figur ative +tre mb +pe sc +ab hin +ab bot +ac acia +har ds +por sche +kau ai +el isa +car rick +abo u +elli er +be ch +neu tron +galap agos +ru ben +in nis +how to +nun s +sab ine +i ac +clin ched +no tori +fi ves +cairn gor +per i +gr c +ðŁĴ¯ ðŁĴ¯ +mal m +twelf th +di ff +rout ines +marty n +lin den +synthesi zer +nu mber +game cube +fal kirk +byz antine +queu ing +gr ill +scal able +char red +rou ting +her bali +gri zz +ðŁĺŃðŁĺŃ ðŁĺŃ +tol l +termin als +l pc +ab d +war mups +remo vable +¯ \ +vi go +pap aya +ne ve +lov ingly +jo kers +ib les +sse tt +poten ti +pel e +gi gi +sadi q +leg acy +son o +ru pees +retar ded +ele e +par r +fi ance +ey re +say ers +pend ants +mak nae +al bans +adap ting +p ff +pu berty +ji u +ing rad +hypocr ite +diplom ats +phys ical +rob by +bon sai +ãģ · +f att +catal unya +âľ ĸï¸ı +ro ma +more land +so e +conver sions +stl blues +shol m +gra ssy +pra do +on u +assaul ting +> _ +sett es +dis graceful +aph ra +âļ½ï¸ı âļ½ï¸ı +ठª +kil n +goal tender +s ru +philanthro pist +b als +th n +stu den +sando val +dogre scue +eli ons +asse ssed +lar go +hec tares +sh rm +sa if +cle avage +no ches +n ene +fat alities +cur ing +clean ser +al es +p vp +south bank +pizz eria +marsh als +kni fe +an dover +tbli ghtning +sr sly +ou te +digi mon +timesof india +prome the +le bo +f su +wit z +rever e +man as +mam ba +ch ica +gu an +exhibit or +csr racing +d ere +xx xxx +gu sta +story time +ston ey +organ ics +and u +se am +min ogue +anushka sharma +ab a +ðŁİĻ ï¸ı +ugand an +chro matic +as sn +document aries +sh t +ru paul +loy d +k ats +e us +ite ch +me dusa +pan ty +kel logg +et to +talla de +sha a +do st +p ms +mari ana +je ster +croo ks +ðŁĶ ¬ +min danao +ind hoven +ðŁ¤ ª +le xi +tv n +jan is +co te +ãģ Ĩ +ser rano +iw m +ðŁIJ ¬ +k ke +distribu tors +cap u +counterfe it +camp site +ag gie +ðŁĺ ¼ +chhat tisgarh +~ @ +state u +san di +prevent able +cl s +can ne +mm c +i ver +sa haran +pal is +night out +do s +ap ia +absc bn +manag erial +aro se +mo wx +aro sa +ðŁĮ ³ +under dog +remo ver +astronom ers +lent ils +su scep +smoo ther +pend leton +fau cet +e mory +dal mati +af cb +tic us +exem pt +en rol +d heim +ðŁIJ º +restric tion +star fish +sto w +snor kel +thunder birds +she ad +homo sexual +dy n +as li +andre tti +dou che +dom o +tar mac +slu mber +pr onto +first dayof +mini ature +mari achi +argu s +recomm ending +mobi les +in ce +illustri ous +or c +adver ts +gr its +wea sel +pag oda +over pass +gre ys +maxi mus +arma gh +wood land +sun ni +ðŁĴ ī +ë Ŀ +ti one +soci o +ho s +ðŁ¤Ĺ ðŁ¤Ĺ +wind sor +subsequ ent +munch ies +id h +exclu ding +e mi +cu th +z ai +week days +law suits +barn ard +Ø ª +pe tting +net es +mul ligan +pharmac ists +ra quel +e ton +cran ston +gil ded +cle ary +ce ph +ra a +pam per +lombar di +as in +sher ry +pro d +for te +ari anism +buffalob ills +æľ ¬ +ðŁĶ¥ # +uu u +just ices +car ina +nat in +mas low +dro oling +cog nac +cam ber +el ong +r dr +in en +convic tions +am use +tro ck +harm less +visit ation +gen omic +bl and +beno it +chim p +tuscal oosa +gre asy +x po +gil t +se q +per mitted +christma seve +book s +mu e +old school +human right +be ati +ðŁĶ Ŀ +sh at +sculp ting +h wan +fern andes +sci utto +fu entes +endeav ors +maid stone +un paralleled +shou ted +queen of +mer c +band ic +ve da +sel angor +pi le +ja han +intimid ating +disapp ears +cl ich +za ha +w urst +hi v +fod ils +cor dless +aaaa aa +hy dra +bel inda +e els +bu f +su staining +rugby league +no c +brig itte +( ðŁĵ¸: +tromb one +soo the +smo g +ad p +stab le +ing ley +diagno se +ms g +we ss +tic keting +one e +nsw pol +e up +auto psy +adity anath +sun down +river front +si ya +p is +hier archy +dur ango +di jk +ren shaw +he aps +epide mi +david bowie +interne tof +dd i +nation ality +mb ar +air y +win der +w alia +elli ott +c x +bav arian +pl att +an tw +wi wx +sof ter +ne ha +h eller +th and +dani ela +bo ast +degra dation +ðŁĴ¦ ðŁĴ¦ +transform ing +man e +av ut +ðŁĺĪ ðŁĺĪ +vo ter +the e +t ate +pu ff +in door +sop roud +boy ce +boris johnson +wait in +immun ology +ðŁıĨðŁıĨ ðŁıĨ +âĿ Į +street food +liz asober +cavali er +c elia +need le +motor ing +g ato +, ) +ra de +harve st +t ms +jar pad +on ey +air men +v re +impair ment +abhi shek +snoo p +l ant +fam ously +bl ou +s ze +g ander +un touch +tu f +dee jay +col lateral +b ind +ðŁļ © +pin ning +ic n +' ; +the economist +ul tram +worldwater day +ti poff +the i +feed ers +campa ign +sc umb +day weekend +yo m +pe dic +h ough +ps v +pl in +on de +boston marathon +az zy +* _* +con ley +thi ago +hoo o +gal erie +luci d +je tt +gl itz +final fantasy +achiev ers +y ung +peregr ine +op hi +dam es +biom ar +âĺĢï¸ı âĺĢï¸ı +sk c +l ics +fl ank +ar rahman +ho of +uphol stery +t ats +wo z + ¿ +snor ing +ra er +l ju +ap d +pl ating +kan u +im ation +fragr ances +m ra +mor ay +mo tt +im muni +hearti es +bho pal +tim ers +g ata +color way +car nation +win get +si ghs +s ville +optimi st +chate au +olympi ans +ci o +singer songwriter +ny o +fi bers +bur ch +ag ro +mil ne +ig bo +cr amer +ation als +dan ube +pad ma +nor mani +en forced +bre ck +boeh ner +ar den +sur rendered +pros thetic +om a +ha iled +calcul ations +w fa +bi b +fcb live +fon da +west coast +que sts +friend ly +to wie +fit ch +bal ot +star dom +scrat ching +ho sa +thi ka +o ven +stro ke +out post +pharmaceu ticals +hi kari +mu y +af d +fallon tonight +squ at +or u +dra ined +chocol at +ë¯ ¼ +wor ths +ri b +mu j +that s +residen te +it el +boo st +mi gos +mul led +la a +etsy shop +don keys +me k +p tc +flin ders +e hs +ro hit +mu ir +g ad +compos itions +åĨ Ļ +combu stion +i kh +yemen i +wav ed +gar ci +ak os +oo ds +fu sion +se que +s lan +pl ur +kic chasu +shenan do +s ams +worl den +horo witz +with me +mic robes +k ki +ðŁĴĶ ðŁĴĶ +w su +patch work +fre er +y aki +the art +symboli sm +mil er +bt n +ma bu +side kick +motiv ates +sag itt +natur als +serv iced +ps ori +pa ola +qu ig +i badan +gi ggs +ë ³ +sciento logy +si oux +salam at +d res +cad bury +d hawan +ci ón +_ ' +swa pping +maris ka +james bond +explo sives +ay les +af er +s agu +cen sor +tom a +jeff erson +ring ed +par tist +ir responsible +aguil ar +vac ay +equ itable +altrin cham +ac ur +man ish +ger min +schoo led +pu tter +ed ad +nav al +toast y +sol areclipse +dish u +coy ne +ac co +mu ck +mar an +el os +len der +cro ix +worth less +ha ber +gun men +ðŁį ĵ +zen ith +t enders +hur st +hol tz +itali ans +car low +u cd +characteri stic +bun g +av l +u th +sa sia +rs l +red man +neighbor ing +green peace +sti ps +follow party +y gk +en os +omni bus +na issance +chri ssy +secu re +call back +ji hoon +memor y +block er +l anta +daf fodils +bil t +ffer ty +fau st +ie c +nipp les +so g +m nd +jagu ar +bol dly +ab poli +pro position +gun sense +evan sville +cu tters +we go +dou n +do x +stal lions +ka j +shi ppers +j awa +vol o +le ven +pap rika +kov ich +jor di +induc tees +app alling +dial ysis +allevi ate +âĢĶ âĢĶ +pie ter +mid wi +q tr +juli ette +inter mission +haw ks +act ment +one ill +k lin +vam ps +fam ous +cou ld +autom obi +da an +west end +elli p +nh c +mel anch +web series +ton gue +snat ched +smy th +tan gible +sl i +e asing +bar stool +over lay +afford ability +ting ed +ter as +ay ush +wanna one +rh ine +dan a +sh ana +kend al +fer tile +w ir +repl eni +lar vae +is ro +con vos +ab brevi +u cc +hun gry +bur rows +ag er +nav i +mat in +du per +cer n +ma don +ķ ï¸ı +é ģ +tu ps +hy att +sh ep +friday night +wis er +hei di +hat ton +p gh +foun tain +wrist bands +ahmadi yya +aeri al +subscri bed +so los +m ace +sla yed +for fe +dul ce +christ mass +arun jaitley +viol ate +ob stru +ni eces +w vu +idy l +fa ze +pre serves +infr inge +premi ers +inter vals +agen cy +( © +stand alone +di mes +bo er +param eters +ge tit +ðŁĺĺðŁĺĺ ðŁĺĺðŁĺĺ +tu lane +for given +scol l +mb ps +smash bros +rob bi +prima vera +ali st +ghost ly +ay at +ye ats +impre ssionist +ear phones +caul field +wai kiki +sal ute +sc ou +mu ay +louis vuitton +bak hta +ado g +inven tions +hur d +forec lo +stream line +thalai var +ch snews +will ard +t sn +euro parl +cru sher +my sore +gro wer +ra ping +pat ti +g den +sm w +muf ti +kid man +ab r +soun ders +skep tical +ðŁĶ İ +sun dar +i me +fer g +feather weight +ar lington +pas qu +ag azine +wearab le +nati c +mccl ure +inter mitt +hor de +six ties +car te +bha v +ze al +experi ential +ador ned +som mer +eno te +hypo thesis +stin ky +pro to +dead lines +vo gel +mus ings +monc ton +gu ter +f le +aci on +voice of +ta sha +inhabit ants +type face +s ba +bts x +ðŁĶ Ĵ +wor x +u hc +jo ko +cell ars +gor o +continu um +... & +weather cee +ha p +sr k +ris ers +lonely planet +un named +co eur +ðŁį Į +the world +ili ke +fa sten +ami go +ri ba +ramapho sa +staf fers +had ley +? ?" +fi ore +sal ut +hu ff +bez os +Ñ ĭ +ra der +kam ala +in line +fill ers +um atic +all in +shat ter +re in +o ku +ch ases +fla gged +baby metal +water stones +ts b +cut out +op hel +aam a +rockab illy +sto lic +jet blue +ich ick +down ton +uzbe kistan +pat na +la q +gr ange +) _/ +subsi di +sc p +newsc ast +it sa +twee tyour +e mor +archae ologists +uni fication +por ta +q x +protec tors +pro hib +charis ma +car tag +ren fre +scul pt +guwa hati +de ma +boo p +unf pa +dex ter +lay la +alleg es +sou ps +never again +l ys +cal c +bar oness +visu alize +ger ber +absor bed +i ers +a han +fon tein +detec tors +verst appen +sv c +formul ated +ac dc +li x +in competent +bh k +lour des +water house +snow ed +appreci ative +sig ma +lizasober ano +pen ned +pay check +tall inn +fanc afe +par isi +av alley +vi g +ru fc +hard ship +so cute +po ise +ì ¹ +roth schild +k ly +???? ???? +l hp +il ay +f hs +am ad +ide als +brad bury +bal boa +nic ot +kid nap +wol ve +tas manian +op t +matthi as +ãĥ³ ãĤ +super markets +mylittle pony +me lee +li ster +gr oun +fe dora +kind ness +en en +bra hms +¯\ _( +ros well +mar lene +ic u +re formation +or ail +he brides +dispar ities +terrac otta +swal lows +re id +influ encing +flu or +den e +tum our +blon des +thunder bird +sh eva +moga dishu +ka b +cre eps +i ving +ene ed +anno y +âĶ Ģ +intri gue +enqu iry +ar aj +tur al +kuber netes +end lessly +divi dends +tor a +ti sh +commemor ates +un ra +tri b +pon ty +ne m +diss ent +brew ingco +ðŁĺ ½ +nor mali +bi of +( ... +chil len +ì£ ¼ +mell on +av is +mccor mack +ing ra +enrich ed +custome rexperience +testo sterone +snu g +sett i +ger onimo +inqui rer +bre aches +very thing +bloom ing +mu ra +dispo s +bi de +de va +shade sof +in trin +sh ev +s ven +nayanth ara +gan esha +c ws +ber ta +label led +use um +nick named +ma han +car uso +ap ur +ðŁij Ĩ +w q +orphan age +discar ded +mag nu +lu e +je on +bridge port +pac ing +mercur y +( ðŁĵ¸ +marx ist +amphi bious +transplant ation +stit ching +then burg +gradu al +ãĤ Į +ro ft +ma ils +ine c +guy ana +dopp elg +ver o +re write +head less +harb augh +gate way +car sforsale +sw i +st is +mach t +un de +sura baya +stap leton +nur turing +mil ner +ya o +lma oooo +ko sh +arsen al +k ame +er ry +ar royo +dis misses +ru bbed +rc b +lew d +dil u +and or +vi de +ur in +inter sec +ha ar +al b +year swith +app leton +é al +ul livan +suc cu +monter rey +d mx +artem is +ron nie +farm land +s football +gro tto +anth i +ãĢ ģ +à® Ł +vid ya +jimmy fallon +ൠį +t zer +gravit ational +w thr +u hhh +e hr +tin ker +ti juana +scran ton +ram charan +bar clay +re van +m si +ka p +wr s +we thenorth +tor al +sat u +gro m +fac ep +erick son +z yn +se dge +oo dle +spur sofficial +ds p +sic ilian +soli hull +recei vers +ladak h +hend rick +ther i +presi ding +mc guinness +litt ers +gun nar +gh oul +wi b +n tv +kar o +fro ck +b lau +ampli fy +all is +ul lah +memo irs +kh loe +intercep tions +pet day +lo oney +con fin +ch ay +piyush goyal +frequ encies +ut z +event ual +warm ly +obli vion +an ka +ta it +âĿ¤ï¸ı . +director ial +ru lers +prince s +mu ck +stur ridge +deu ce +abri dged +bagu ette +un cles +pen du +min ding +forre ster +av ila +wall er +wall street +ment or +hin o +high way +crom well +fanart friday +mb i +co yle +a hi +tro ve +spie gel +pay tm +mcin tosh +jan sen +nit i +nash ville +len o +leicester shire +le gos +dic t +ðŁĵ ½ +sp ad +beverly hills +sy rah +separ ates +z ain +un fit +dra gs +tan ia +over flowing +hri thik +haw thorn +z ani +mac far +fi de +to tem +pe ds +fundament ally +cal ico +sin ner +j ä +hil de +ds d +ten ay +ta hit +mil f +lie b +inform ing +up lift +ra el +mortg ages +lec t +ii ii +guillau me +compos ites +old smobile +l end +gar th +com mish +bapti zed +scorpi ons +ru cker +bringback our +alli ance +thalap athy +tal i +sp ans +eri dge +wither spoon +lin da +sky lar +kor n +hom s +Ä į +sil enced +caf fe +ar ty +dist inguish +to wed +pun g +jessic a +ear nest +beau fort +t ama +study abroad +si khs +new bie +nav ratri +mar ble +loun ging +lit ter +dal it +so sa +iz es +gra de +com promising +tr iton +de tta +v j +chau ffe +spec tral +powe red +montess ori +artic ulate +hal ton +al co +ye y +mn twins +acoun ty +ðŁijı ðŁı¾ +âī Ī +mad men +kal a +gru m +chi k +ati s +su me +akh tar +job search +high lighter +bo ath +âĦ ¹ +tar zan +lam bo +âĽĦ ï¸ı +ox fam +dump ster +pretz els +mac os +incl ined +fac tual +adverti sers +shu i +pu ree +ml pfi +anti dote +cap o +pa str +merc ado +but ton +ar min +ag g +lol la +horri bly +er rands +christop he +time snow +monday motiv +li ss +scand als +mc i +dispropor tion +âĺ İ +sur pass +samar itan +so tho +pu rest +fl att +trivi atuesday +delec table +leop old +hermi one +chou dhary +en rich +¡ ¡ +subsi diary +ine qualities +bachel or +auto immune +la kota +i hop +ad jec +the simpsons +sh es +se k +gret chen +up stream +hin akhan +coper nic +x tina +lu g +tough ness +e ad +cli pped +bi us +sl v +fah ren +dee pak +ca u +x an +im mature +dig ni +bo bs +shred ding +but tery +accommod ations +de ven +chun ks +super league +sky bet +kil dare +je et +ë į +ce k +wrec ks +pro pane +oh l +tb d +quo i +trum pp +mi mo +reluct ant +ver ne +o ic +ma gh +ar nau +se ver +li dge +stair way +kicchasu deep +ðŁĶ º +mach ining +aama admi +ot i +c da +al it +pan y +inst alls +ac ct +e shop +di em +hard well +fulfill ment +sc afe +qu ack +extrac ts +swee tened +fi ghton +f di +d inger +wal tham +us ur +refe rees +seok jin +gran n +af rin +th n +sch af +par cels +bet is +amar ine +nom an +kh tar +mor itz +cou pling +bar ons +ðŁIJ ¸ +à ¸ +sl p +sad ler +x ander +tri ad +mc millan +kh z +divi ding +ìĹijìĨ Į +dar yl +zed d +le ys +pla ques +flu ori +tipper ary +on nell +di dier +lang ford +im c +the sun +bir dies +ar cha +ye ssss +t di +dar ia +cand ace +al tam +pal aces +ch it +sant am +event ful +book of +ad b +mon stax +cre ole +co el +âĸ ½ +we aren +sten nis +she ath +ati sm +gron ingen +mlpfi m +le pre +wrong ly +rsp ca +rendez vous +acknowle dging +pel vic +solic itor +sla ys +nue stra +lo d +is lander +fer oci +fashion show +ra ss +dge on +adole scents +sma shes +negli gence +grate ful +ved ere +sw oop +ing l +apol ice +vand alism +gan n +jo ao +di supdates +zimbab we +under age +radi ance +w of +bour geo +pla s +cr ani +gh ue +wrec kem +warran ts +re form +jim mie +at wood +ys l +neil himself +l bj +i man +tan to +nois se +ver bs +equip o +al together +mam ent +l ice +dou glass +tier ney +pri med +j hal +furn itu +braz ili +v ill +past els +n ison +u ff +paral ysis +jay e +im po +ðŁij ģ +strate gically +pakistan is +was sup +super bike +thank u +tru elove +sha ikh +israel is +vi p +to g +li en +la ker +grey hounds +cul ars +bian chi +balot elli +ar ran +loo s +str ates +he bron +ar vo +sunder land +the al +tomb stone +sand man +c pac +thanks giving +love him +lat ino +an in +aka if +ĭ ãĤ +tor quay +di est +alli anz +ðŁĺ ķ +golf club +cl lr +wal cott +sch nau +promp ted +nomin ating +len nox +val et +mon ro +may ward +e ph +ðŁĶ Ķ +inter oper +r da +re flex +arm chair +ê° ķ +stri pper +por ti +ph arm +ham za +ni reland +ne ue +h pv +port foli +sun burn +fris bee +be al +bapti ste +x h +ty m +pr ati +o vers +haz rat +deser t +der ry +us ky +em mett +ach arya +)_/ ¯ +shu d +may a +ham ill +ra im +nr c +fitt ings +cur vy +ðŁı ĩ +ster ling +ॠĢ +wal kin +short cuts +mil ly +ast ur +alpha be +pl i +pe z +miss you +rad ford +ml g +ta eyang +notjust lakes +du mps +seren dip +le ur +ra ving +e ster +de priv +absc bn +ðŁijĩ ðŁı» +scar city +o cr +mean ings +cap t +da hl +fer mentation +bri oche +to win +out lander +massi mo +en cro +ðŁ¥ ³ +buil t +po tam +kir i +tm w +monit ored +k ites +peoples vote +gray son +íģ ¬ +afri ka +a dies +i vote +gy ne +g annon +di x +c mc +ou ral +fox andfriends +bel i +ig ne +gl an +katrin akaif +co politics +qual itative +p si +lu cci +disc oura +âĺ ® +kel li +gau tam +carac as +reale st +pu la +in us +hill top +make aw +atten borough +tw y +r arity +peck ham +ma hon +corn elius +clin icians +ton line +tb i +paradi se +ka si +inev it +fresh ness +colling wood +lun atic +defen se +cop d +in fra +wain wright +sains bury +alab am +te ma +lac o +chec ker +releg ated +tren t +stal ks +huff post +bhubanes war +ast ral +share your +prim rose +hi me +cat an +end ment +en dow +cle mens +mal oney +hil ary +game time +den ise +collabor ators +b wo +radic als +gue tta +ici on +au a +snap matic +sat chel +excav ation +base man +s ão +gn ation +fel d +surve y +shah zad +ma st +anirud hofficial +tru cker +ot ago +geo graph +ethe l +âļ¡ï¸ı âļ¡ï¸ı +s ver +mu tt +internetof things +ancho red +wh ouse +bang la +bal main +ç¹ ĭãģ +break fa +á Ģ +twi ster +te tris +ca v +stag s +g z +au b +stor med +hel ens +yar mouth +st asy +gustav o +co sc +vin son +up p +sc ricket +assump tions +app e +nu h +u er +pre mise +n aga +e amon +coron ary +na f +north side +el mer +ro tar +out lining +el f +re surg +kat elyn +in can +hyster ia +ce e +am bani +pro lly +Į ãĤĬãģ +ax es +san jose +rem brandt +mag pie +even ly +scor sese +qu aint +f g +b buk +indian football +weare all +spd wy +pis ces +ec g +âĺħâĺħâĺħâĺħ âĺħ +pre orders +: | +ni pple +sal azar +ju me +jail break +min n +bas sett +ze tta +jef free +ad jun +tic on +san diego +drink local +chol era +solic itors +o bo +com post +ni an +wr a +tre ach +ic ic +profession al +del ve +leg ate +histor ia +cro issant +con noisse +nam o +palli ative +chem trails +i ority +global warming +comic art +behavi oural +re sted +li as +cli mates +Ł ãģĦ +rut land +nou rish +menopau se +hot ties +demen ti +ve spa +mel ville +anal ogue +tz man +str ung +im perfect +gl are +cir cling +ros berg +rec o +oc ity +lo ire +em be +do ssier +ne el +nan do +me a +gal vani +fin esse +ag p +berke ley +asi m +âĺº âĺº +quil ted +ish ere +un matched +po tion +for z +at re +selfi es +juli ana +ðŁļ ¶ +âĸ º +mel ton +âłĢâłĢâłĢâłĢ âłĢâłĢâłĢâłĢ +spin rilla +pur cell +ed p +at leti +tony awards +ra ja +pro gno +mol ten +stu ff +p ally +nobel prize +âĻ» ï¸ı +spiritu al +spe ake +sa sha +bri um +tru ss +critici ze +assassinscre ed +yor uba +u lo +fire man +workin progress +ef cc +fla res +ro bot +hi kers +cl l +shado wing +pat sy +leh man +c ns +å ± +guad al +à± į +ra pe +r honda +paralle ls +son ja +langu age +land ings +z ola +cr amps +bur ning +apprais al +jol la +ham m +kas a +gul ly +f go +uly sses +ri be +ðŁĴ Ħ +ib u +eti enne +bri ar +fin ely +comb ating +y ql +go tham +we chat +to paz +primar ies +l se +iz z +hel e +dispon ible +cy stic +bel ichick +th rush +kansas city +ge om +soli di +red bubble +by stand +cambridge shire +par fait +ast le +ow o +ind ore +stom ping +sm elly +ðŁ¤ ĸ +locom o +adm itting +hol me +clock wise +min sk +mc co +for get +ev p +cam ra +ab ella +yo tes +universit yof +mé xico +silver ado +ric ket +crom bie +pu j +eradic ate +deli ght +y go +glam ping +vic a +du ggan +coun ters +cf d +sc our +react js +pu ram +paras ites +in ki +vill en +stel la +li mbo +ang as +k cr +ðŁĴļðŁĴļ ðŁĴļ +vap ori +mum ford +oli gar +à ¼ +al oo +boo ties +ad r +k elli +dru mmers +av ici +nature uk +ron al +in trac +un splash +le che +g oma +el ine +envir o +bi onic +bu eno +mi k +av in +star ling +em powers +cake day +boy cot +ðŁĴļ ðŁĴļ +ðŁĮ¸ ðŁĮ¸ +v ach +m ci +fractu res +ger i +sk ing +exclu ded +lu ce +ja ve +ig gy +evi den +aki stan +a wn +mor als +luci fer +ha ban +tumb ling +sunday motivation +mo sley +captain america +sch icago +the one +mo td +d ts +ðŁIJ ¼ +rep ell +ii i +locu st +geo spatial +mer sey +immer se +desc end +ber nade +j s +boat sales +win der +cran k +sing leton +candid acy +ben a +ðŁı» âĢį +high lander +ol t +k prs +healthy lifestyle +four teen +end the +ith aca +circul ated +r ans +pre valent +ha vas +splend or +roo ster +kalamaz oo +jewell ers +enne dy +rou sey +es y +cann ons +ornam ental +// // +ren don +win ne +mol ding +eid mubarak +coun tess +simon a +ha wa +fo es +du ster +sb u +por tray +mar ries +goo dday +cho co +achi ever +ðŁĺ¹ ðŁĺ¹ +pre neur +tr amp +tom i +n bat +garden chat +farra khan +ever glades +ab ru +sou sa +se ce +homes wee +terre strial +bar it +sri devi +ol u +mel inda +f rick +can dies +ðŁĺŃ ðŁĴķ +qu reshi +family fun +exor cist +cardin al +ny t +dies el +cu mulus +capric orn +si ology +lor na +dou gie +an die +super sport +c fl +п ÑĢи +say ang +pe ek +ภĬ +lo be +j em +ing lis +gg led +c sn +amne sty +chu ps +ba es +sau er +ðŁı IJ +mongo lian +en et +back street +dr illed +acce ssing +ce o +b se +ai ken +pur r +wor sen +whe res +war k +testi fying +bu ri +bla st +aw g +ðŁĵ ĭ +re defining +hear ing +u ci +c mp +bon i +tail oring +ta ji +noc chi +em t +stephen king +ne et +compla ins +campaig ner +luci ano +twili ght +ti esto +pas sports +flo yd +cathe dr +na ked +caregi ver +b coz +ade cides +ku ri +ly k +br aries +dren ched +disc lose +ðŁĴª ðŁı½ +le blanc +je tty +gar ty +chip mun +b su +rhyth mic +ic z +fri d +anne x +ame x +solo ist +lanc ers +arro whead +speci fication +simul ated +na is +inver te +bo wing +wor ship +f z +abo ss +sha q +ì¶ ķ +challeng ers +an arch +aamaadmi party +ãħĭãħĭ ãħĭ +suffol k +so corro +sn ell +cla dding +absor bing +shaw a +particip ates +ðŁį Ķ +book stores +bak u +seap ort +ko jima +gab y +pack ard +electr ician +let it +mo wing +fa wad +young jae +hot mail +men ing +u rie +intim acy +con ti +: ") +lifeis good +in ciner +i dri +craz iness +jour nos +fran chi +bott len +al da +ff es +k x +south we +air a +clay ton +sco ti +f j +bri ga +ðŁ¤ĺ ðŁı» +demonstr ators +y z +stor k +na q +casc ades +travel chat +plat a +pad ma +fran ci +at tain +bat girl +lom bard +hoo s +d dos +neon atal +discla imer +r ss +r ant +di sen +tex aste +so cal +frac tal +cam ry +stri fe +sn acking +mu h +sant ander +mor ons +gra f +par ades +hu ston +dru pal +mi ento +kir stel +hy de +vom it +forti fied +sphin x +da v +bir yani +win nings +s baseball +mer ged +lovel ondon +ling ering +dream big +car leton +liveli hood +djan go +astri d +gri ds +down e +bru ised +s ne +scarec row +hel ium +f nc +bi ggs +an ter +restor ative +em pires +ab del +life style +kiwan is +colloqui um +me en +pr ick +anti que +ze b +mi mic +edmon ds +ðŁijĬ ðŁijĬ +q ing +pp el +mc gill +interpre ting +âŀ ķ +rash ad +do ka +narr ator +electro magnetic +ash by +sau ra +iran deal +âģ īï¸ı +krish nan +in di +ff en +bre a +os man +multin ational +chi ppe +recruit ers +aus biz +p ounding +re gen +cur sor +refu sal +mac s +in ak +ax ial +wa ifu +up cycled +hindu stan +cas sini +carly le +scrat ches +re ef +man atee +eat ery +ðŁĵ ¢ +un condition +sen pai +on ther +comic book +pro sciutto +de mar +mi se +ma ge +fre ec +aye sha +al der +android games +ley ton +ho ck +door way +chicagof ire +aali yah +sw elling +bi x +. ðŁĺĤ +evan kirstel +torpe do +kon stant +genevie ve +ma ia +ha user +do torg +hide ous +fi k +sp raw +e ek +z appa +wan dered +' ' +ra jan +bam bi +( $) +wid ening +tool box +sa ir +illumin ating +pra ys +out patient +i w +day o +lo b +sw fl +sha des +gu ms +coo kin +ko di +gri ffin +traum ati +ste a +slaugh tered +god bless +air time +pseu do +b sa +hau led +ar if +à¸Ńภĩ +le l +wc po +mil iti +char ters +worl da +ru k +k gs +digital india +is able +idyl lic +esp ino +marie tta +e bo +team canada +ab our +wil ton +rock stars +fav ored +phys ic +wrink le +tb r +d print +ball arat +ad al +z ey +ðŁĺį ðŁĶ¥ +tom lin +mt r +pal sy +fener bah +tight en +phil ia +ir oning +ry u +b ant +enqu ire +ca ir +abur ger +tru n +green berg +chau han +ir ina +sh ani +trend setter +pre tt +zaf ar +alo ve +v ici +pan ic +no o +lu stre +disrup ted +bal lis +son sof +mon si +inst ac +ake st +ëĭ ¤ +kw ame +horror movies +distric t +sau cy +mb an +ar mies +with drawn +med ics +loft us +er oom +be kind +ar ns +all on +un ison +davi ds +cr at +nicot ine +so or +sm x +on co +cospla ying +zombi es +har ms +e ger +ro sy +moon shine +fe in +ce tt +du brov +reg ents +ben itez +ðŁijıðŁı¼ ðŁijıðŁı¼ +ste c +m alia +prioriti ze +ic eland +ft se +v amo +lam ont +homo sexuality +bre es +regu i +cb p +te j +sky sports +deter gent +sha sta +de rel +conserv ancy +colori zed +accol ades +vis o +show your +nan ow +bice ps +us ability +bi m +dailys ketch +pearl jam +stran gest +mega deth +broad casts +bar ren +ar ton +chri ss +confi gu +lu res +is the +e ul +railway ana +global health +gi anni +u aap +s lum +consci ously +ab re +n up +bud get +v ada +e sch +real ness +er ased +th unt +be z +armist ice +ðŁij ¹ +sh run +o led +driver less +ðŁ¤· ðŁı»âĢįâĻĢï¸ı +won dr +sk an +sal aam +mother land +h wang +gen o +gang nam +tw right +endor sing +en ic +ador ation +pau sed +patric ks +do cked +plat te +ff xv +ethnic ity +auto show +side show +after life +re located +orphan ed +food network +dare to +and ra +sla ps +v live +swim s +re imagined +mist le +re vise +real ity +bhar ti +ðŁĴĻ ðŁĴĽ +late st +prou dest +gra sses +lan yard +fresh est +carcin oma +anom aly +zieg ler +sum ner +ly rix +gor g +is d +av el +swild life +me squ +john cena +euro league +sab er +master ful +yar ra +cogn ition +jacob son +abo lic +sir loin +shuk la +moj ito +su pere +st weet +me z +e sa +rudol f +gur a +where you +tt m +win s +trust worthy +ny k +bra den +table top +good food +es on +be k +lingui stic +gra ys +ch ath +h cs +mon i +de ans +cu ssions +ch ell +slo ws +he mi +d app +shar pie +boo sters +a os +str ack +se dona +mu eller +hard wick +or nate +thor a +sal ud +o twol +ch um +mi ho +for age +thel ittle +tear ful +ones elf +min dy +sm g +gmb h +emer ald +ðŁĶ´ âļªï¸ı +tu tti +recep tions +re vising +i brox +tope ka +sal ami +expan se +i books +dob son +cli o +at s +ðŁļ Į +mo ha +is ance +shu tters +moo t +jan ine +marvel comics +jor dani +pos er +kenne th +hy ung +de ja +ase ball +speci ality +eu ston +classic car +had ith +ðŁIJ ī +chas ing +iz o +gros ven +ag lia +thisdayin history +t row +om ile +hu ar +by n +sal ine +div ine +demon ic +ty ran +han dover +revit alization +pa ella +cryp tic +se dg +m end +dun kirk +bre d +wal d +sport scar +a ard +whe aton +da ener +k lan +br t +bakhta war +spi res +schu bert +ro ti +poli sh +o se +ag ame +wonder con +prote stant +bo sa +ðŁĺ Ł +d ü +joy ride +ger trude +âĿ Ŀ +gil a +v h +tw a +tra v +swal lowed +star ve +la in +ent ren +rei ki +su kh +cra ic +az u +web page +kee fe +hypo the +hir sch +hel le +camp ground +w amy +tra vi +sha hi +san deep +ru i +han uman +dw p +reposit ory +no or +no ff +un real +p ell +black history +har vick +ma scar +pay ee +pa sha +gastron omy +d ÃŃ +ai g +rosen thal +open day +embelli shed +t tip +sun bathing +go pack +end ome +ï¸ı # +invali d +final four +st fu +squish y +ra sta +mo sch +jam esc +die trich +sel a +mel b +el vi +t dp +sun i +sli t +j ha +bi za +spi ked +l li +l illard +vam pi +syno psis +az har +kendrick lamar +ĮãĤĬãģ ŁãģĦ +heart less +country file +air play +arrog ance +pre e +virtu oso +ãħłãħł ãħłãħł +raj u +le bu +for ward +tu g +dro s +mondaymotiv aton +concep cion +thel o +pad i +looo ol +ÑĢ од +it ss +eth ical +end uro +__ : +expend iture +mon ste +mas king +terri ers +ib is +e mber +cu mple +punctu ation +pi per +ir vin +ade e +yy yyyy +flash backs +cel sius +don nie +bo gota +ben evol +the script +shil pa +pro se +fin dia +ze ke +ne ko +do ves +blues lyrix +fro sh +sowe to +mp lo +al ai +sab i +raq qa +wf tv +stro ller +ian somerhalder +ðŁĶ ª +an on +mo seley +! ?!? +sta king +mol y +car tri +c sg +ast or +transc end +ma er +de ux +cow girl +sas k +pun ter +ma ken +o ates +love tt +grow ler +sag in +v n +ssi ble +officeof rg +y mc +sab ar +faul ty +ap ha +ak on +ðŁij « +snow don +ae w +raise the +ðĿ ĵ +grue some +clement ine +sp ing +lat a +worlden viron +mi mic +can aria +bakhtawar bz +ao a +fal a +ãĤ Ń +avi va +you uuu +thi gh +la dders +gu mbo +tz ky +fu zz +plastic pollution +est ate +strength ened +k ant +dr in +cal vert +transform ational +frigh tened +mac lean +elited angerous +ear thy +t son +to da +j nu +.. , +mic hal +i ban +je ong +is real +sim coe +exclu sives +blue bells +ben e +te u +pil sner +pens ke +athe ists +m pu +cartag ena +ðŁĴĹ ðŁĴĹ +million aires +kk kk +it ar +subscri ptions +remo te +ma fi +hin ton +w cc +ho k +ds b +ab leton +sevent y +pun ks +e indhoven +sh one +mcfar lane +lim popo +empha si +à ¼ +sin fo +pe tre +man grove +ch ino +ber tie +play lists +push awards +p af +deb bie +c do +r ino +ðŁı¾ âĢįâĻĤï¸ı +fol ke +bon nar +th ine +sl an +hal ter +evi e +aw some +vul tures +spar ky +seiz ures +âľ Ķ +ram one +ine ffe +al n +pro ctor +ast ra +the voice +gro te +sci on +dead line +am aya +tain ted +patter ned +exce eding +cross fit +kay lee +drop box +ru shes +tack led +mo by +retro gamer +n cbd +benef itting +shay kh +guild hall +gen try +dream cast +dread ed +bun dled +th aw +revol ving +n pt +kylie jenner +imagin ative +ron i +over came +family time +ds burg +car naval +relation ship +recogni zable +cor oner +ho le +fan fic +emir ates +bur ritos +analy se +thin ner +ne es +galli poli +bl r +cat woman +-- >> +au lt +ada ily +nau ghty +ili o +solit aire +mtv br +jocel yn +arun ach +rep ent +south gate +hy acin +essenti al +fent on +and um +it or +go pal +sl inger +po sei +aw il +wi elding +ra ila +eli as +a sto +à ¤ +tend ency +str ata +ker t +< - +im acele +da es +sti mulus +han ley +fit nes +ec stasy +lim ous +ha iling +ðŁ¤ Ń +chis wick +tar ies +sla v +pul i +moderni zation +black mail +b ingham +h fx ++ + +ðŁĩ®ðŁĩ ³ +ni v +we a +profess or +k off +bol ster +su ave +sequ ences +pepper oni +not te +dre n +ãģ¨ ç¹ĭãģ +hs v +o ga +ap tly +z ad +excel si +rin ka +mol dova +min n +ma bel +conferen cing +bas ing +of er +ob si +hamill himself +care less +brief ed +inhe rent +par ish +dub nation +town sville +sar awak +gee ky +doncaster isgreat +was abi +gu p +phen o +dra inthe +carrie underwood +ble eds +bbc world +ane w +alta f +dul wich +ani ston +w ti +sumat ra +gra fton +bl n +me ster +bode ga +re go +es q +an jo +sump tuous +mai sie +ï¿ ½ +wil t +jak ob +el vis +se pul +mu ster +air pollution +president e +happy monday +exten sively +fl ondon +t ls +play ing +pe ed +din ho +var dy +pi ka +n iro +au cus +ðŁį ¦ +nu ll +el ondon +juvent us +imag ines +dis ab +lit o +d ura +work places +promo te +mc caf +wood work +waw x +à® ª +tt ino +shar i +sem per +better together +ðŁijĬ ðŁı» +ze bra +pon dering +en chil +ho m +cosm ic +tan z +mo cked +ec cc +ath ed +abo lish +prop eller +paris agreement +assemb lies +indu stry +fraudul ent +pe sa +chang min +ax x +ðŁĴ µ +irr ational +cu sa +ramad han +octa via +on elove +jac ki +bar ak +taxi der +seri ous +nathan fillion +mc en +ch k +po part +grav ity +copp ola +reading fc +illu sions +j ig +ww x +re sh +ex porting +buzz ard +âĻ ¤ +p cm +lan apar +ko s +arom as +antal ya +ww dc +ven a +phil a +ball in +ðŁij Ħ +quin ta +ma o +f ery +eigh ty +sentim ents +safe guarding +r wa +pu ffs +luc ille +de cath +sl u +nu gent +de ter +braz il +ze iss +super bowl +subsi dy +alter n +hi dalgo +enz ymes +ä ½ +tag ne +hair dresser +adri en +walk out +oppo ses +can tina +bed side +af an +ðŁĶ Ĺ +prophe tic +dan es +un successful +super charged +pk k +exem ption +hart le +secu lar +cli pping +br s +united way +c net +pat chy +ha gan +e en +âļ ľ +var a +sym pathi +never trump +affir mation +om f +ny cfc +ma ja +sur ro +keer th +up scale +sandal wood +mon archy +kno bs +å ĭ +po tholes +hunger games +ter races +na sir +coun sell +welcome to +wa q +se aman +m ita +stun ningly +on theroad +in ability +) !! +bon go +ant v +sp ut +worldenviron mentday +resu sc +y td +fi m +eun hyuk +sa chin +rose anne +cler mont +ape c +am ina +v ening +n antes +al most +sin us +ex as +ty l +ti en +ple ad +lanc s +bur naby +re k +jo om +observ ers +disco graphy +cl g +âĻ ¦ +sn ack +r ti +o ily +crystal li +bru te +web development +topp ings +la f +an is +ad der +reli ving +car lin +battle of +we g +syri an +pon t +n dc +lagh ate +yu ma +sp p +p iti +ro bbing +mart ing +rey kja +raj put +nc ds +kie wicz +âĢ¢ âĢ¢ +vam pire +substan tially +opio ids +nepal i +k line +ar oo +under stand +lit t +u it +thro mbo +sar ies +qu ot +b alling +t tr +s gh +philip p +br ant +ac l +m ello +whit taker +. ; +defi ant +b gc +repl ying +mir ren +metamor pho +sch wab +bul ge +utili zed +pick ering +par don +d sa +ภĪ +doo ley +cumul ative +Ð » +ur gency +e mir ++ /- +¦ Ī +ot as +âı ³ +station ed +grape vine +ar ac +karan johar +f ancy +sau l +coo gs +lgbt q +ا٠ħ +jav i +u mmer +pl l +den is +dai pur +pu ffin +lewi sham +fand om +co pe +ves matter +s ve +hel pless +deo dor +ostr ich +kaz an +friday the +con dor +v x +sophom ores +rob les +cu tt +cli mbers +ë¦ ¬ +sle g +sn f +mac ys +hydr ating +grou pe +po yn +mou lin +hg tv +lmfa ooo +sulph ur +asdfghj kl +annab elle +hump back +bra ved +viswas am +multi purpose +hu midi +escor ted +barb ican +f ad +cor sa +ðŁ¤ « +pi ppa +here to +can y +ser gi +or cas +o vie +ed ou +s any +glob alization +man cini +food truck +f is +defi brill +sch re +sma fia +love wins +la ut +k aka +hol lande +game on +resurg ence +out side +olympi ad +int an +abstr action +rapi d +pal om +cal le +jas min +attack ers +swag g +mit ra +ky lo +à® ² +her mitage +gor do +e ira +so sfam +roll out +exc ite +sy nod +mer rill +c als +as sa +liveli hoods +ju ve +the black +gopack go +ant lers +alban ian +wool ly +qu iche +puri fication +are th +smar thome +ne k +all blacks +mex icans +is m +ger ms +comple xion +mar ck +u shi +ðŁIJ IJ +char l +ca stic +till erson +giuli ani +biode gradable +mal bec +bo is +ju bil +im es +r ame +gene tic +esp nu +ch ley +so ho +go pher +g sc +buu ren +cu be +bridesma ids +webin ars +to e +mani pur +viol ently +notic ias +ex changing +chi ev +replac eable +muay thai +bu ss +sp il +instal ment +div ya +cait lin +o lim +fil tering +whirl wind +sta red +prior it +pr am +pompe ii +mono logue +k ite +bu ka +âĢ¦ .. +vac cine +bre ro +woz ni +sol ent +re ferr +my rt +gridi ron +galatasar ay +fro ze +clare mont +ðŁ¥ ĥ +victori as +ssel dorf +pa stures +net neutrality +ch or +ðŁij ģ +ಠ¿ +we ho +symp tom +jo sel +in ous +dragon con +power ball +p te +four thofjuly +ec la +ear buds +where abouts +salt life +depriv ation +ch ter +wi ggle +syste m +ps st +ch az +d any +ri mo +oax aca +lanapar rilla +barcel on +melanch oly +way back +ho tro +n si +l illy +kur o +ja han +intellec t +board game +ðŁı Ĭ +sneak peek +k prc +jail s +cand el +zan zi +mor timer +star ch +ra gs +p fa +long live +k art +gir ona +cro cker +christop h +precau tions +war ship +per m +paren t +van gogh +gif ford +allegh eny +ra yn +ut m +sten cil +rec alling +pen ney +z azzle +ìĥ Ŀ +hin ds +aren as +nu ev +law ler +gu in +do this +ðŁij ķ +ì¶ķ íķĺ +we g +ti b +ri din +complex es +turbul ent +pe sos +de marcus +vall arta +sam sun +kis ses +hein rich +deport es +wil ms +ur d +then ext +inki gayo +ho wi +fir sts +carri age +clean liness +mas war +is ch +ax el +si zzle +road house +fr ans +ent ourage +co bble +boo th +benedic t +tal on +fc u +year ofthe +ray on +raider nation +fo yle +ko val +pi anos +l pg +bur mese +man ure +geo caching +cosc ino +b np +fer ra +stro phy +mar ais +ce es +legen dof +kat niss +eno ch +av ed +you know +d prk +ðŁĺ¢ ðŁĺ¢ +sp un +pro st +sor rows +cent red +ke a +gal icia +? ðŁ¤Ķ +ÑĢод а +bou chard +ðŁĴĻ ðŁĴľ +yu i +seed lings +jon ah +reco vers +ny rd +board room +su ma +my japs +tun g +sha i +ir gc +eli o +wag ons +ka shi +polic emen +john nie +ale coscino +shop ify +dot ted +de tri +va w +to fficial +in your +chal mers +trac ed +no vi +by es +ari el +nipp on +la pel +gri ez +b gs +fool ing +d ita +vijay sethu +nm wx +as ot +kr anti +hel m +ve di +sic kest +mo chi +k abo +shru bs +he red +b sp +sq m +ham r +dul kar +anth a +nr f +avoid ance +at en +publi x +be arers +nas i +ha p +h ells +ðŁĸ ¥ +ภ· +thelast jedi +oh wx +ðŁį « +wa hoo +there se +rec aps +ss nhq +bird photography +v ay +pet ti +pau lo +bel vedere +( * +gr l +du vet +c pec +sa it +por sch +meas urable +avi ators +fre mantle +bre en +on om +me and +life saving +eu ref +en don +embar as +aira sia +el is +dun kin +star magic +s ill +porto bello +ki efer +ex e +mu ted +ãģ ¦ +we thepeople +logi a +liber al +theforce awakens +min ed +haun ts +freck les +care taker +s india +âķ IJ +dev lin +list on +direction er +oh n +fi garo +em manuel +du bois +cl ones +bru ise +ðŁİĪ ðŁİī +disin fe +der matology +as r +s watch +dis comfort +tam anna +pi day +mack en +k atic +delu sional +shaw nee +gu d +al bino +p ali +din gh +cucu mbers +coffe y +anticip ating +treas ured +web summit +shel tered +sav or +pedago gy +m gs +sh ma +s bu +den ali +cam pos +bubble gum +o ir +le aps +y ler +r one +sansk rit +min t +meat less +futuri st +du de +a vel +prote sted +squ ire +z aki +sz n +har court +cycl one +bour dain +gather ings +d ant +advent urer +parag on +alt man +dd ing +ban erjee +snorkel ing +mother well +mis sy +en der +glo ws +ki wis +chick pea +por o +e fron +app t +u y +speci fied +gab by +e strada +com bos +bour bon +vin i +var un +steph ani +key words +car vings +amit abh +wr ought +tw al +re els +clu bbing +ubi quit +cri t +ambed kar +æ Ļ +prun ing +vaccin ated +boe ing +s ks +lo ona +hypno sis +edel man +pho l +he w +colo sse +mckin sey +u on +to te +sacrific ing +ox i +n ang +e mu +пÑĢи ÑĢода +m th +kers wednesday +argu ed +timel apse +ris king +regul ating +ni gh +likeli hood +cu bic +au ction +rein for +pi stor +no ses +ye l +snu ggles +pe i +jean ette +ta ku +ri th +guy z +ภŀ +y te +ver ted +pay soff +jau regui +hoo ligans +procedu ral +mi b +har dy +el eng +chec kers +all ine +the met +prou dof +keerth yofficial +collabor ator +ni u +infl icted +adv ani +re twee +memor iam +f icial +ti ghter +sal em +re viewers +br ics +ben digo +am ell +tur kish +sush maswar +paul son +pal awan +mol lie +stitch er +s burgh +ir u +hay dn +en ers +aro a +u zzi +saraj evo +hel a +apol lo +nine ty +vac a +sp on +vent u +jel ena +hei fer +avo ids +sp ine +pri ze +mar ist +re creating +me de +woo den +find lay +ro fl +n di +compreh end +yu go +y ü +to work +u fos +son ar +pi ston +recor ding +tent ative +art forsale +pel lets +fre do +ÙĪ ر +mu ses +custom ization +pro found +is ner +ide ally +si am +plan kton +cm dr +man ger +fran ken +customiz able +ठ® +walk away +swi vel +vast ly +no ton +lex a +ex moor +z as +tan te +reduc tions +lol ly +hip sters +benef ited +ë ² +ww www +mascul ine +fi ji +dre y +ph ill +ane ous +nic ol +men dez +disapp ro +ch ner +through s +shen mue +east man +ðŁIJ İ +yu ck +under tale +re ys +go beavs +eng en +c na +mer r +bir k +ãģ¨ç¹ĭãģ ĮãĤĬãģŁãģĦ +âĥ£ @ +yn na +ste ed +offen der +at um +vani shing +presi denti +love them +g nocchi +fri ggin +per il +mad hya +ag ne +dee jay +mar nock +m tb +fold able +@ ___ +stand re +bron x +bow ski +fin ite +cro ckett +b sf +ge tit +seren awilliams +mir o +ignati us +sla y +rin se +fon due +sel dom +s more +gan i +dy ce +dmit ry +cru mb +late post +pri mark +oh ana +flor als +do a +remembrance day +d ds +azi one +toon ami +air port +æĿ ± +th ad +fi st +dine sh +dr who +ad words +admi rer +pro je +kyrgy z +à « +manife station +le wan +j ic +thi bau +le ased +van ity +nouri shed +never theless +aug mente +fu elled +che ad +wil shere +ru di +p z +my co +mor ro +herbali fe +hardro ck +de man +dre ality +sp ades +ce vic +bha i +bar on +ultimat efan +hou news +to bi +stru t +ke el +affili ation +the masters +sm al +hu e +este ban +con v +om nic +datab ases +co v +ter ti +st g +snoop dogg +metab ol +leth bridge +ðŁı» âĢįâĻĢï¸ı +year ling +residente vil +nws l +iy aki +griez mann +c ous +ðŁĵĿ : +tor ian +sam i +ðŁĶ¥ðŁĶ¥ ðŁĶ¥ðŁĶ¥ðŁĶ¥ +g are +alli ances +whit field +we ther +refin ing +coy i +kra ken +ðŁĺĺ âĿ¤ +singul arity +lil i +h ns +bol dand +waw rinka +misogy ny +lo vers +c q +b dg +ad ona +gar ter +women of +sc d +recogn ising +mun a +str ou +sign alling +lare do +hell boy +alek sand +un available +pedi atric +as in +mer ia +ri shi +futuri sm +w ye +polari zed +e we +pro pel +in forms +cre ase +~ " +arti ston +like for +heidel berg +er ra +life in +len ny +inter rupt +cohe rent +ca z +vick ers +le veled +f bs +cab ins +bu mmed +apost les +we h +ten don +souven irs +infu ri +pier ce +asse t +m las +go th +di ggin +ann as +yl or +th waite +sw el +pan era +mur derers +croo ked +bs go +ac u +a on +re an +one of +ko hl +bloo dh +pest icide +lost dog +fle xing +ëĤ ĺ +su pra +eter nally +ðŁļ Ļ +pa olo +ol an +mom o +is elle +captain marvel +s lou +mistak enly +akhi lesh +mer t +il inan +bu on +bal kan +mir ro +mill en +der ail +dam on +tit i +bi os +re don +pic ard +par te +ðŁ¤ Ł +Ø º +son ics +fir sth +dd c +veg ans +tur ban +ni gan +lot tie +lyn don +star buck +pink floyd +life styles +am ara +a she +r sc +val a +sm er +cw gc +cli ent +buen as +jag an +coo ps +ðŁijij ðŁijij +speci alizes +snag ged +g lar +ben net +wildlife wednesday +bow den +pi k +art in +empor ium +ar l +re ba +pas ser +disappo ints +additi ve +âľĬ ðŁı½ +bay er +missou la +ha skell +comm ences +ni x +ne man +explo ited +plastic surgery +cc d +aso cial +vo t +sie gel +fro ome +kap am +far a +e ha +pro bes +mw f +meet ing +p bb +ak ins +mistle toe +kingdom hearts +for kids +ec r +bal e +escor ts +adidas originals +k wa +k ts +hallo ffame +ðŁĺį . +wag s +pot ted +o wing +honey comb +he fty +uro logy +mer le +b pd +stri pping +re ich +k state +gu ay +yon ge +shak ti +g loom +bat t +son om +n ery +el ba +blan ks +hel le +triple ts +bom bay +ak arta +ab ia +transm itted +rol f +ja is +angular js +fi erc +m ss +trac e +ॠĩ +tom bs +old man +kom bucha +fo l +e health +cere als +are lli +in ari +ðŁĴ © +wo l +liber ties +fa wn +af firm +nun avut +hyster ical +k drama +art es +âĢ¢âĢ¢âĢ¢âĢ¢ âĢ¢âĢ¢âĢ¢âĢ¢ +valent in +man slaughter +gal es +eo in +energi zed +del s +with draws +st les +sar castic +ram esh +incredi bles +lock hart +ya wn +ultimatefan live +oooooooo oooooooo +mu en +guru dev +te er +pe eling +new snow +lingui stics +direc tv +ag end +uni lever +ru ger +han dedly +ero se +li mel +the c +royal ties +fini shers +nr g +m gt +fid get +com ps +bac on +aggre ssively +ab it +ch â +tar de +slu gger +q anda +gre ening +d ats +ensla ved +spec tor +o ye +fre ef +b hand +stop brexit +mis conceptions +cav a +ðŁĺįðŁĺįðŁĺįðŁĺį ðŁĺįðŁĺįðŁĺįðŁĺį +multit asking +hou sel +ferre ira +cen time +ank les +jo dh +hel ly +fro me +out tuesday +nar nia +bal aji +l bloggers +jyo ti +ðŁį ĩ +lan cia +cap ri +y ap +nat ash +down fall +." âĢĶ +à ® +ligam ent +coat ings +ai ded +hi ko +fall ing +encryp ted +yeg food +infringe ment +cu di +ce p +ðŁĺį ðŁĺĤ +tra d +super rugby +ed win +wh iche +vi meo +lay ne +in vigor +he he +dubrov nik +bie ber +u tr +sham an +op ers +ham ill +en ig +di f +ar um +scrap book +min h +diver gence +mckin non +life time +guter res +wil le +ple as +patt y +mic ron +k z +dom aine +ru sher +m ds +ches ney +screw driver +âģ© , +sle dge +hau er +chan a +stam ina +sprink ler +pl n +he ff +bol ton +om on +car rington +accor dion +jor ge +inter ception +in puts +gu ll +tran scription +vanu atu +it ical +eth os +tic h +spac ey +pee king +u mi +ha ger +psycho tic +illi an +illi a +bonnar oo +an ese +pu c +laghate parth +en hall +econom ical +dre dge +% - +u we +tu bular +scoun cil +pe asants +fl er +tumb ler +he p +ford ham +row ley +initi als +ev asion +er nation +plu gins +coch ran +c attle +acid ity +ðŁİĬ ðŁİī +re grann +jump man +ef ace +x ma +patri archy +esco bar +cristi an +tip ton +nu eva +hack ney +back seat +kill arney +aid an +sta dion +simul taneous +ida ho +a je +u th +figu re +clo s +bur k +volun tar +rec ite +macfar lane +cur few +bou do +w gn +sti x +sla p +scrat ched +philli p +jour ne +ex pelled +wa z +u ke +tati ana +ou e +ho pp +dimit ri +ðŁĵ £ +mato logist +electri fying +blu ffs +bill smafia +az cardinals +y aa +x mas +shar a +r ith +g ills +dre s +bar ton +authori zation +imperi alism +home of +to do +foot path +band width +visit spain +moh sin +erup ted +mi ki +insig nia +mike l +ss h +ger a +bank holiday +aw an +t weak +star craft +e al +construc tion +skelet ons +le ep +ine m +bar clay +ship wreck +monsi eur +yo h +ron t +form ative +ser o +le p +horse man +hoo sier +haz mat +cylin ders +cen ti +ðŁĴ¥ðŁĴ¥ ðŁĴ¥ +re em +na ire +mus ically +gras shopper +est onian +termin ology +ro main +blogger rt +tox in +stan ce +cultiv ated +an ast +ðŁIJ į +shi mano +go pher +ene i +recycla ble +gam ification +fight for +c q +avoc ados +ke ys +eli ke +gly cer +shak ur +mobili zation +gal ley +expla in +ex changed +pe th +obe dience +illa ge +en nis +ãĥ ŀ +wi v +walla bies +ma ar +ig ers +fin tech +fin alized +wo j +meaning less +in field +onna ise +e et +bron te +pass ages +ðŁij § +strick land +northern lights +lom ond +h tc +wr ay +shi fter +di alog +ðŁį į +>> >>>> +te atime +ste ch +sic huan +qu ill +fran ca +comple mentary +bar rington +marcu s +mal am +goo oo +for sa +elec tra +af s +âĹ Ĩ +tri fe +sn azzy +fo lia +and olan +after dark +wood son +stra de +litt lest +o gun +con wy +co wards +ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤðŁĺĤ +íĬ ¸ +se ul +mur phy +dun ks +kapil shar +jo achim +wom ack +equal ity +aver ages +a ine +ðŁ¦ Ī +tac ular +dis ability +u ked +mid century +bar thol +teas ers +tab ern +nj caa +sp out +op i +ku bball +bl om +so ar +popu lism +meth yl +ðŁijĬ ðŁı¼ +o spre +alo ils +ðŁĵ ĸ +ðŁĮ ļ +x er +sp illing +publ ica +car dam +adi sh +sa cha +p kg +bu da +lyric ist +i bc +gru mp +ho ver +hal ep +anti body +anem one +âĻ¥âĻ¥ âĻ¥âĻ¥ +m cl +litho graph +cc u +s fest +path ic +calli ster +otta wa +gun sn +rut ger +hali but +en vision +differenti ate +ðŁļĢ ðŁļĢ +pir an +lat el +uc n +trou bad +ra ine +fierc ely +learn english +lea se +wex mondays +em it +dray ton +bur rell +scuba diving +hol ler +dr u +clo cked +w ral +ap ro +trans lucent +w bo +patri arch +mo ja +lan nister +fish ery +ne derland +mil dly +mi rai +ma ko +ja p +ðŁĺ©ðŁĺ© ðŁĺ© +pro statec +p anna +ar ama +under taking +tomp kins +ne op +soli ds +sav oury +e ames +cut lery +wood bridge +steam er +ri zzo +wild cat +rat na +lamin ated +kin eni +jal ap +ai des +acknowle dges +?! ?!?! +! ðŁİī +w afc +mag gio +ha ves +dar je +of i +gr il +v asi +bru x +mo hd +fake speare +arn old +r mb +for be +wal leye +ro di +therapeu tics +strate gi +ob ste +mu dder +download able +dd ings +d ca +asi angames +campe on +appropri ation +th century +ram atta +dra ped +bul lion +mu c +one x +se greg +ophel ia +bod ily +âĿ¤ ðŁĺį +wi zar +te ased +ade my +to id +sur a +lazar us +sn ickers +ma se +lo h +bow ed +bibli o +x change +har lan +gho shal +flavor ful +bha gat +alle z +whiche ver +ten stein +disc er +organ iser +mt g +dream liner +t se +hok kaido +mo k +indulg ent +hick man +blin ded +al yn +aaa ah +sp ool +lough borough +inter pret +et v +aristo tle +optimi zing +avici i +madu rai +ju li +naw az +mat chups +ab ide +paint ing +w elling +vel i +octag on +in scribed +po king +plac er +life cycle +kili g +g sp +eli ves +cle ments +na sheed +me sut +incarcer ated +dist illed +wal ang +delic acy +del gado +che z +ch ita +ad ero +tu x +pati l +o do +abh cosmetics +tv c +p bc +in accurate +hardwork paysoff +ball er +quot ation +merchandi sing +ga stri +defen ses +dro gba +bex hill +ban kno +win ona +si eg +p gs +hahah ha +agu chi +su bram +mirac le +de sch +li bre +ba cher +ent ine +bbcra di +lou dest +r ps +pi erc +fr yer +storm trooper +rafael nadal +pas co +exhau stion +epic onetsy +rc tid +kel lie +ga ines +d bz +sm riti +s bridge +lim ited +cla w +technic al +bio graphical +ado red +ภ° +exclu de +ac adia +key boards +fur man +so ca +sur u +ni ps +sw aps +server less +run e +pu ffy +north ampton +nish ings +hen der +cartri dges +gun shot +ðŁĵ ¹ +fil ament +respon dents +pey ton +mountaine er +mer ging +life span +intimid ation +p afc +nl wx +expan sive +pur r +f ck +ca e +at ti +tele thon +so hn +mend el +lo pes +dor i +un broken +te red +tast ings +in active +disin tegr +t assel +share the +pi ano +is lay +air space +z awa +ricci ardo +ming ton +fresh er +cur ry +re vs +pharo ah +h mv +exhilar ating +wh oo +lin kin +kri spy +competen cy +ste wards +ne bu +kat su +ad mins +baz ar +as ar +giving back +s summit +song z +lin us +raj kumar +farm ington +fanta sia +ðŁĺ´ ðŁĺ´ +so bri +lis se +barry more +pri sm +blo b +sen ew +mono xide +exp ire +eigh teen +di pper +xi ao +kil t +hin ch +bbc sport +bam boo +p ter +ex al +ðŁ¦ ĭ +ham lin +expe ditions +star gazing +food security +wy lie +ul f +st ingly +on storm +lo eb +bro ome +bn ha +pancre atic +eli ve +!!!!!!!! !!! +ther apper +ortho pedic +avengers endgame +antit rust +ìļ ° +go te +om d +off side +gy llen +win eries +white water +ad l +lu pita +exce eds +consi sted +chew bacca +ash leigh +nhl jets +is san +sh ld +hay at +cran berries +ðŁ¤ĺ ðŁı½ +rock the +spring training +fall out +dairy free +wa j +un decided +so wn +rc n +north wales +htt r +fu mble +d its +comp elled +popu list +min ted +blan chett +. '' +pro pulsion +m illa +au berg +her tz +h ta +u daipur +serendip ity +azte cs +als ace +ðŁIJ ij +lu n +sho es +char li +gar za +ðŁĴ Ł +pro biotics +fox tv +ol is +mi ff +loc alized +diffu ser +si gue +fun ko +rend ous +ðŁĴ ij +jeky ll diff --git a/data/models/huggingface/stable-diffusion-v1-5/tokenizer/special_tokens_map.json b/data/models/huggingface/stable-diffusion-v1-5/tokenizer/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..2c2130b544c0c5a72d5d00da071ba130a9800fb2 --- /dev/null +++ b/data/models/huggingface/stable-diffusion-v1-5/tokenizer/special_tokens_map.json @@ -0,0 +1,24 @@ +{ + "bos_token": { + "content": "<|startoftext|>", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + }, + "eos_token": { + "content": "<|endoftext|>", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + }, + "pad_token": "<|endoftext|>", + "unk_token": { + "content": "<|endoftext|>", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + } +} diff --git a/data/models/huggingface/stable-diffusion-v1-5/tokenizer/tokenizer_config.json b/data/models/huggingface/stable-diffusion-v1-5/tokenizer/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..1bf819b6621086dc92428e2c9c8bbab39211fd55 --- /dev/null +++ b/data/models/huggingface/stable-diffusion-v1-5/tokenizer/tokenizer_config.json @@ -0,0 +1,30 @@ +{ + "add_prefix_space": false, + "added_tokens_decoder": { + "49406": { + "content": "<|startoftext|>", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false, + "special": true + }, + "49407": { + "content": "<|endoftext|>", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false, + "special": true + } + }, + "bos_token": "<|startoftext|>", + "clean_up_tokenization_spaces": true, + "do_lower_case": true, + "eos_token": "<|endoftext|>", + "errors": "replace", + "model_max_length": 77, + "pad_token": "<|endoftext|>", + "tokenizer_class": "CLIPTokenizer", + "unk_token": "<|endoftext|>" +} diff --git a/data/models/huggingface/stable-diffusion-v1-5/tokenizer/vocab.json b/data/models/huggingface/stable-diffusion-v1-5/tokenizer/vocab.json new file mode 100644 index 0000000000000000000000000000000000000000..469be27c5c010538f845f518c4f5e8574c78f7c8 --- /dev/null +++ b/data/models/huggingface/stable-diffusion-v1-5/tokenizer/vocab.json @@ -0,0 +1,49410 @@ +{ + "!": 0, + "!!": 1443, + "!!!": 11194, + "!!!!": 4003, + "!!!!!!!!": 11281, + "!!!!!!!!!!!!!!!!": 30146, + "!!!!!!!!!!!": 49339, + "!!!!!!!!!!": 35579, + "!!!!!!!!!": 28560, + "!!!!!!!!": 21622, + "!!!!!!!": 15203, + "!!!!!!": 9168, + "!!!!!": 5203, + "!!!!": 2360, + "!!!\"": 28048, + "!!!)": 42532, + "!!!": 995, + "!!\"": 20556, + "!!#": 34997, + "!!)": 28352, + "!!": 748, + "!!@": 40705, + "!\"": 2947, + "!\"@": 43819, + "!#": 9670, + "!'": 13222, + "!),": 37904, + "!).": 26225, + "!)": 4571, + "!*": 37737, + "!,": 29325, + "!-": 43499, + "!...": 22121, + "!..": 35475, + "!.": 22517, + "!:)": 31671, + "!:": 17545, + "!": 256, + "!?!": 29767, + "!?!?": 47081, + "!?": 6004, + "!@": 15117, + "!]": 34466, + "!âĢ¦": 35068, + "!âĿ¤ï¸ı": 32559, + "!ðŁİī": 49085, + "!ðŁĺĬ": 43434, + "!ðŁĺį": 36438, + "\"": 1, + "\"!": 10377, + "\"\"": 41530, + "\"\"\"": 25539, + "\"\"": 8575, + "\"#": 8345, + "\"'": 31065, + "\"(": 32741, + "\")": 13112, + "\",": 4332, + "\"-": 9375, + "\"....": 37785, + "\"...": 9049, + "\"..": 25403, + "\".": 2811, + "\"/": 39486, + "\":": 7811, + "\";": 37549, + "\"": 257, + "\"?": 11727, + "\"@": 1512, + "\"@_": 20236, + "\"[": 36930, + "\"âĢ¦": 33993, + "\"âĢĶ": 41151, + "#": 2, + "##": 15483, + "#...": 31491, + "#:": 30144, + "#": 258, + "#@": 35062, + "#âĢ¦": 12834, + "#âĢİ": 34262, + "$": 3, + "$$": 24233, + "$$$": 31859, + "$$": 14929, + "$)": 39460, + "$.": 34682, + "$": 259, + "%": 4, + "%!": 35070, + "%),": 37819, + "%)": 16063, + "%,": 14505, + "%-": 48784, + "%.": 12475, + "%;": 33379, + "%": 260, + "&": 5, + "&&": 27791, + "&": 261, + "'": 6, + "'!": 13781, + "'\"": 19479, + "'#": 15319, + "''": 46594, + "''": 8445, + "')": 19175, + "',": 5662, + "'-": 26152, + "'...": 20474, + "'.": 4645, + "':": 7182, + "';": 44517, + "'": 262, + "'?": 17242, + "'@": 26397, + "'d": 1896, + "'ll": 1342, + "'m": 880, + "'re": 982, + "'s": 568, + "'t": 713, + "'ve": 1200, + "'âĢ¦": 42120, + "(": 7, + "(!)": 30253, + "(\"": 18741, + "(#": 6229, + "($)": 46597, + "($": 15186, + "(&": 15042, + "('": 18235, + "((": 22944, + "(((": 33287, + "((": 13796, + "().": 41737, + "()": 8475, + "(*": 48004, + "(*": 39575, + "(+": 12903, + "(-": 20228, + "(...": 45159, + "(.": 43055, + "(:": 8528, + "(;": 23983, + "(": 263, + "(?)": 22885, + "(@": 2181, + "(£": 33987, + "(©": 44886, + "(ðŁĵ·:": 34610, + "(ðŁĵ·": 37999, + "(ðŁĵ¸:": 44422, + "(ðŁĵ¸": 45204, + ")": 8, + ")!!": 47518, + ")!": 7805, + ")\"": 13046, + ")#": 39981, + ")'": 23613, + ")(": 27956, + "))": 13720, + "))))": 42911, + "))))": 34181, + ")))": 18305, + "))": 5167, + "),": 2361, + ")-": 19034, + ")...": 15274, + ")..": 41822, + ").": 1818, + ")/": 26616, + "):": 4143, + ");": 19686, + ")": 264, + ")?": 18765, + ")@": 41928, + ")_/": 45028, + ")_/¯": 45781, + ")âĢ¦": 41844, + "*": 9, + "*)": 30956, + "**": 9825, + "****": 21326, + "********": 42974, + "*****": 43571, + "****": 25167, + "***": 7829, + "**": 4441, + "*,": 41895, + "*-*": 23568, + "*.": 31304, + "*": 265, + "*_*": 44535, + "+": 10, + "+)": 34810, + "++": 47298, + "+++": 35986, + "++": 19056, + "+,": 35885, + "+.": 25238, + "+/-": 47614, + "+": 266, + ",": 11, + ",\"": 3823, + ",#": 11215, + ",&": 26905, + ",'": 10599, + ",)": 44493, + ",,": 21340, + ",,,,": 33225, + ",,,": 14811, + ",,": 8844, + ",-": 29821, + ",...": 20365, + ",.": 41277, + ",": 267, + ",@": 13975, + ",âĢ¦": 14601, + "-": 12, + "-\"": 18646, + "-#": 10151, + "-$": 24946, + "-'": 28010, + "-(": 33345, + "-)": 3535, + "-*": 21527, + "--": 2154, + "----": 5753, + "--------": 11772, + "----------------": 23122, + "----": 30164, + "---->": 35999, + "---": 11079, + "--->": 14518, + "--": 2432, + "-->": 6422, + "-->>": 47252, + "-.-": 32765, + "-...": 43147, + "-.": 44040, + "-": 268, + "->": 5081, + "-@": 10087, + "-_-": 27227, + "-__": 42718, + "-âĢ¦": 30047, + ".": 13, + ".!!": 37805, + ".!": 14030, + ".\"": 18650, + ".\"-": 21234, + ".\"": 1081, + ".\"âĢĶ": 48703, + ".#": 5014, + ".'\"": 41558, + ".''": 49379, + ".'": 5938, + ".(": 22294, + ".)": 5376, + ".*": 26145, + ".,": 5276, + ".-": 12481, + "..": 608, + "..!!": 23707, + "..!": 17994, + "..\"": 15229, + "..#": 15735, + "..,": 47143, + "...": 3002, + "...!!!": 38351, + "...!!": 39915, + "...!": 16860, + "...\"": 5240, + "...#": 8195, + "...&": 44979, + "...'": 23167, + "...(": 37981, + "...)": 14040, + "...,": 42717, + "....": 2386, + "....\"": 26689, + "....#": 20346, + ".....": 34151, + ".....#": 38867, + "........": 8246, + "................": 24855, + "............": 42965, + "...........": 35008, + "..........": 25526, + ".........": 19881, + "........": 14720, + ".......": 9917, + "......": 5590, + ".....": 3104, + "....": 1390, + "....@": 29790, + "...:": 34570, + "...": 678, + "...?": 16388, + "...@": 12672, + "..": 852, + "..?": 23875, + "..@": 21124, + "./": 31975, + ".:": 15811, + ".;": 47596, + ".": 269, + ".<": 29442, + ".?": 29294, + ".@": 1230, + ".]": 33511, + ".~": 42651, + ".âĢ¦": 18047, + ".âĿ¤ï¸ı": 39085, + ".âłĢ": 30097, + ".ðŁĺĤ": 46580, + "/": 14, + "/#": 13217, + "/$": 36266, + "/-": 19811, + "/.": 39382, + "//": 15348, + "////": 46271, + "///": 22734, + "//": 3502, + "/": 270, + "/@": 8216, + "0": 15, + "0": 271, + "1": 16, + "1": 272, + "2": 17, + "2": 273, + "3": 18, + "3": 274, + "4": 19, + "4": 275, + "5": 20, + "5": 276, + "6": 21, + "6": 277, + "7": 22, + "7": 278, + "8": 23, + "8": 279, + "9": 24, + "9": 280, + ":": 25, + ":\"": 29498, + ":\")": 46432, + ":\"": 12089, + ":#": 26625, + ":$": 33769, + ":'": 8017, + ":'(": 21250, + ":')": 10701, + ":'": 23851, + ":((": 42496, + ":(": 5965, + ":)": 11070, + ":))))": 42339, + ":)))": 21840, + ":))": 10164, + ":).": 39010, + ":)": 1408, + ":*": 12617, + ":-": 13021, + ":-(": 25137, + ":-)": 4223, + ":-": 10323, + ":...": 42140, + "://": 12441, + ":/": 13604, + "::": 33077, + ":::": 43818, + "::": 9788, + ":": 281, + ":>": 39677, + ":@": 14339, + ":]": 43486, + ":|": 45986, + ":âĢ¦": 22365, + ";": 26, + ";))": 41873, + ";)": 3661, + ";-": 35657, + ";-)": 10475, + ";;": 34824, + ";;": 24492, + ";": 282, + "<": 27, + "<-": 47280, + "": 34308, + "<<": 24588, + "<": 283, + "<<": 16482, + "<<<": 35054, + "<|endoftext|>": 49407, + "<|startoftext|>": 49406, + "=": 28, + "=))": 39587, + "=)": 17840, + "=": 284, + "==": 11748, + "====": 21734, + "========": 38952, + "==>": 29688, + "=>": 9714, + ">": 29, + ">.<": 38507, + ">:": 36196, + ">": 285, + "><": 28015, + ">>": 8270, + ">>": 2988, + ">>>": 6395, + ">>>>": 18461, + ">>>>": 18435, + ">>>>>": 32972, + ">>>>>>": 48947, + ">>>>>>>>": 41947, + ">_": 44144, + "?": 30, + "?!": 9785, + "?!!": 25342, + "?!\"": 29315, + "?!": 2835, + "?!?!": 16349, + "?!?!?!": 49084, + "?!?!?": 37619, + "?!?": 11395, + "?\"": 3283, + "?#": 24018, + "?'": 13610, + "?)": 9626, + "?,": 41628, + "?...": 22641, + "?..": 43905, + "?.": 41251, + "?:": 21067, + "?": 286, + "??": 5195, + "??!!": 43219, + "??!": 37341, + "??\"": 44996, + "??": 2197, + "???": 40017, + "???": 3824, + "????": 15936, + "????": 10362, + "?????": 21370, + "??????": 34589, + "????????": 45091, + "?@": 29258, + "?ðŁ¤Ķ": 47928, + "@": 31, + "@#": 39397, + "@.": 43730, + "@/": 28639, + "@": 287, + "@@": 30314, + "@_": 2692, + "@__": 17042, + "@___": 48308, + "A": 32, + "A": 288, + "B": 33, + "B": 289, + "C": 34, + "C": 290, + "D": 35, + "D": 291, + "E": 36, + "E": 292, + "F": 37, + "F": 293, + "G": 38, + "G": 294, + "H": 39, + "H": 295, + "I": 40, + "I": 296, + "J": 41, + "J": 297, + "K": 42, + "K": 298, + "L": 43, + "L": 299, + "M": 44, + "M": 300, + "N": 45, + "N": 301, + "O": 46, + "O": 302, + "P": 47, + "P": 303, + "Q": 48, + "Q": 304, + "R": 49, + "R": 305, + "S": 50, + "S": 306, + "T": 51, + "T": 307, + "U": 52, + "U": 308, + "V": 53, + "V": 309, + "W": 54, + "W": 310, + "X": 55, + "X": 311, + "Y": 56, + "Y": 312, + "Z": 57, + "Z": 313, + "[": 58, + "[#": 11115, + "[...": 39975, + "[...]": 43790, + "[": 314, + "[@": 15148, + "[]": 22240, + "\\": 59, + "\\'": 41239, + "\\": 315, + "]": 60, + "]\"": 39434, + "],": 34067, + "].": 26262, + "]:": 21641, + "]": 316, + "][#": 39009, + "][": 29329, + "^": 61, + "^)": 30720, + "^-": 43516, + "^.": 31552, + "^.^": 35791, + "^": 317, + "^^": 34454, + "^^": 9064, + "^_": 14423, + "^_^": 15995, + "_": 62, + "_'": 44701, + "_(": 36951, + "_)": 37393, + "_*": 36237, + "_,": 31417, + "_-": 23193, + "_.": 26841, + "_/": 37647, + "_:": 13109, + "_": 318, + "__": 2355, + "__:": 47043, + "__": 3838, + "___": 43812, + "___": 13530, + "____": 4727, + "____": 25350, + "_____": 38803, + "________": 9549, + "________________": 20115, + "`": 63, + "`": 319, + "a": 64, + "a": 320, + "aa": 1821, + "aa": 3894, + "aaa": 14376, + "aaa": 9583, + "aaaa": 6727, + "aaaa": 19336, + "aaaaa": 31095, + "aaaaaa": 44413, + "aaaaaaaa": 23126, + "aaaah": 49151, + "aaah": 35856, + "aaay": 37846, + "aab": 34108, + "aac": 23251, + "aac": 11346, + "aad": 20464, + "aad": 35894, + "aaf": 37638, + "aaf": 31534, + "aag": 42174, + "aah": 28990, + "aaj": 28727, + "aaj": 43411, + "aak": 37739, + "aal": 22268, + "aal": 30208, + "aali": 27896, + "aaliyah": 46577, + "aam": 12943, + "aam": 22775, + "aama": 45018, + "aamaadmi": 45563, + "aamaadmiparty": 46406, + "aamir": 27456, + "aan": 20705, + "aan": 13426, + "aand": 38054, + "aap": 12023, + "aap": 12052, + "aapl": 34516, + "aar": 4695, + "aar": 13234, + "aard": 46932, + "aaron": 13948, + "aaron": 7709, + "aas": 28542, + "aas": 32205, + "aat": 34018, + "aat": 35004, + "aau": 35426, + "aay": 38281, + "aay": 40249, + "aaz": 26770, + "ab": 596, + "ab": 3937, + "aba": 44204, + "aba": 11102, + "abad": 33444, + "abad": 7155, + "aban": 41662, + "aband": 8595, + "abandon": 28805, + "abandoned": 11227, + "abar": 17860, + "abar": 39805, + "abas": 25402, + "abay": 43542, + "abb": 38954, + "abb": 38297, + "abba": 30870, + "abbas": 37494, + "abbas": 24412, + "abbey": 31927, + "abbey": 10132, + "abbie": 39949, + "abbo": 13536, + "abbot": 44046, + "abbott": 43737, + "abbott": 15649, + "abbrevi": 44843, + "abby": 30586, + "abby": 14694, + "abc": 13137, + "abc": 5334, + "abcnews": 31566, + "abd": 44093, + "abdel": 46511, + "abdomin": 35335, + "abdominal": 39328, + "abdu": 13361, + "abduc": 17884, + "abducted": 31520, + "abduction": 36984, + "abdul": 14227, + "abdul": 15593, + "abdullah": 21317, + "abe": 15856, + "abe": 12734, + "abee": 36037, + "abel": 31938, + "abel": 25318, + "abella": 46156, + "aben": 40865, + "aber": 7828, + "aber": 41867, + "aberdeen": 30539, + "aberdeen": 17236, + "abh": 27484, + "abh": 33649, + "abhcosmetics": 49189, + "abhi": 18113, + "abhin": 44045, + "abhishek": 44502, + "abi": 16867, + "abi": 14161, + "abia": 48604, + "abide": 49163, + "abig": 20863, + "abigail": 25686, + "abil": 21135, + "abilities": 8724, + "ability": 35146, + "ability": 3024, + "abit": 48668, + "ablanc": 33716, + "able": 10102, + "able": 863, + "abled": 10655, + "ableg": 24055, + "ables": 8486, + "ableton": 47169, + "ably": 6748, + "abnormal": 40934, + "abo": 2889, + "abo": 21861, + "aboard": 11661, + "abol": 31768, + "abolic": 46827, + "abolish": 47403, + "aboo": 42433, + "abor": 8416, + "aboriginal": 20422, + "abortion": 12336, + "abortions": 43218, + "aboss": 46401, + "abou": 36455, + "abou": 44053, + "abound": 41037, + "abour": 46637, + "about": 20204, + "about": 781, + "abouts": 36339, + "above": 35019, + "above": 4348, + "aboy": 37077, + "abpoli": 44779, + "abq": 38767, + "abr": 44932, + "abra": 10694, + "abra": 35087, + "abraham": 40623, + "abraham": 15869, + "abram": 33255, + "abrams": 29852, + "abre": 22472, + "abre": 46756, + "abri": 28605, + "abridged": 45333, + "abroad": 11253, + "abru": 46295, + "abs": 18431, + "abs": 11109, + "absc": 25389, + "abscbn": 44260, + "abscbn": 45810, + "absen": 32453, + "absence": 19240, + "absent": 30363, + "absol": 4624, + "absolu": 7055, + "absolut": 4666, + "absolute": 7501, + "absolutely": 4703, + "absor": 14303, + "absorb": 35806, + "absorbed": 45059, + "absorbing": 46412, + "absorption": 42210, + "abstr": 7530, + "abstract": 23885, + "abstract": 10197, + "abstractart": 31170, + "abstraction": 47696, + "abstracts": 40065, + "absur": 21639, + "absurd": 29757, + "abt": 9850, + "abu": 9167, + "abu": 11787, + "abud": 20180, + "abudha": 21450, + "abudhabi": 25256, + "abuja": 23371, + "abun": 20544, + "abundance": 23236, + "abundant": 31611, + "abur": 23377, + "aburger": 46660, + "abuse": 7678, + "abused": 23855, + "abuses": 37132, + "abusing": 36558, + "abusive": 26858, + "abv": 34172, + "aby": 16342, + "aby": 31378, + "abyss": 33632, + "abz": 42292, + "ac": 546, + "ac": 2816, + "aca": 9213, + "acab": 41388, + "acacia": 44047, + "acad": 32537, + "acade": 2892, + "academia": 22662, + "academic": 31178, + "academic": 7935, + "academics": 26417, + "academies": 42569, + "academy": 29968, + "academy": 4041, + "acadi": 41455, + "acadia": 49236, + "acam": 26172, + "acan": 42227, + "acan": 26318, + "acap": 32357, + "acar": 22232, + "acare": 16961, + "acc": 26805, + "acc": 9318, + "acca": 30883, + "acce": 8564, + "acceler": 10161, + "accelerate": 23619, + "accelerated": 38513, + "accelerating": 41821, + "acceleration": 39387, + "accelerator": 25261, + "accent": 28110, + "accent": 18931, + "accents": 31738, + "accenture": 41853, + "accep": 4616, + "accept": 16447, + "accept": 9338, + "acceptable": 14209, + "acceptance": 17090, + "accepted": 9159, + "accepting": 12855, + "accepts": 22338, + "access": 7596, + "access": 3822, + "accessi": 10787, + "accessibility": 23407, + "accessible": 13977, + "accessing": 46339, + "accessories": 10220, + "accessory": 20417, + "acci": 4263, + "acci": 33943, + "accident": 6608, + "accidental": 24895, + "accidentally": 11061, + "accidents": 22072, + "acclaimed": 21172, + "acco": 44730, + "accol": 33858, + "accolades": 46731, + "accom": 23658, + "accommo": 34495, + "accommod": 14386, + "accommodate": 34708, + "accommodation": 18066, + "accommodations": 45536, + "accomp": 24985, + "accompan": 14746, + "accompanied": 20715, + "accompany": 34142, + "accompanying": 38179, + "accompli": 10205, + "accomplish": 25542, + "accomplished": 16462, + "accomplishment": 26100, + "accomplishments": 24965, + "accor": 4182, + "accord": 34293, + "accord": 28513, + "according": 4717, + "accordingly": 35535, + "accordion": 48760, + "accoun": 3081, + "account": 18424, + "account": 4684, + "accountability": 19377, + "accountable": 24216, + "accountant": 31026, + "accountants": 37222, + "accounted": 43951, + "accounting": 14805, + "accounts": 9974, + "accra": 31900, + "accred": 17451, + "accreditation": 27015, + "accredited": 27647, + "acct": 45569, + "accu": 5618, + "accumul": 19275, + "accumulation": 37112, + "accur": 6551, + "accuracy": 18423, + "accurate": 8858, + "accurately": 24206, + "accusations": 33615, + "accuse": 39414, + "accused": 9434, + "accuses": 27496, + "accusing": 41474, + "acdc": 45067, + "ace": 2675, + "ace": 804, + "acea": 35219, + "aceae": 38153, + "acele": 40868, + "aceous": 33610, + "acer": 37990, + "acer": 25809, + "aces": 5725, + "acet": 28735, + "acf": 38389, + "ach": 972, + "ach": 987, + "acha": 22686, + "acharya": 45780, + "achat": 32706, + "ache": 27771, + "ache": 7214, + "ached": 17048, + "acher": 38442, + "acher": 17936, + "achers": 25051, + "aches": 14823, + "achi": 3264, + "achi": 9087, + "achiev": 8160, + "achieve": 14798, + "achieve": 8175, + "achieved": 12359, + "achievement": 8245, + "achievements": 16114, + "achiever": 46286, + "achievers": 44544, + "achieves": 40123, + "achieving": 16120, + "achilles": 33327, + "achim": 42335, + "aching": 12864, + "acho": 33130, + "achs": 41195, + "aci": 4359, + "aci": 34100, + "acia": 30163, + "acial": 32422, + "acid": 35474, + "acid": 10085, + "acidity": 48800, + "acids": 27751, + "acies": 20162, + "acin": 39442, + "acing": 9442, + "acio": 26202, + "acion": 44965, + "acion": 24968, + "acional": 26435, + "aciones": 35832, + "acious": 16020, + "acity": 7511, + "ación": 38175, + "ack": 877, + "ack": 725, + "acked": 5698, + "acker": 31201, + "acker": 7940, + "ackeray": 41843, + "acki": 42857, + "acking": 5515, + "ackles": 28503, + "acknow": 13563, + "acknowle": 18100, + "acknowledge": 25209, + "acknowledged": 35913, + "acknowledges": 49083, + "acknowledging": 45645, + "acks": 3858, + "acl": 47593, + "acl": 23073, + "acle": 6504, + "acles": 34164, + "aclu": 37354, + "acm": 39317, + "acmilan": 36500, + "acne": 24195, + "aco": 9463, + "aco": 8800, + "acol": 17431, + "acollege": 43468, + "acom": 17224, + "acom": 22342, + "acon": 11621, + "acon": 11571, + "aconf": 38851, + "acons": 31599, + "acor": 22076, + "acorn": 37537, + "acos": 39943, + "acosta": 31994, + "acou": 8794, + "acoun": 31295, + "acounty": 45449, + "acoustic": 10616, + "acoustics": 43873, + "acp": 19627, + "acqu": 7946, + "acquainted": 40713, + "acqui": 12194, + "acquire": 21576, + "acquired": 15932, + "acquires": 27376, + "acquiring": 42785, + "acquis": 14207, + "acquisition": 16543, + "acquisitions": 39649, + "acr": 43648, + "acre": 26749, + "acre": 9493, + "acres": 11630, + "acro": 21060, + "acrob": 40891, + "acron": 37770, + "across": 2500, + "acrosse": 40979, + "acruz": 40455, + "acry": 10440, + "acrylic": 12252, + "acs": 11782, + "act": 10305, + "act": 1393, + "acted": 10971, + "acti": 4786, + "acting": 6319, + "action": 12493, + "action": 1816, + "actions": 6271, + "activ": 3430, + "activate": 26737, + "activated": 22249, + "activation": 26769, + "active": 19009, + "active": 4046, + "actively": 18645, + "activi": 7230, + "activism": 20117, + "activist": 10850, + "activists": 12649, + "activities": 6514, + "activity": 6206, + "actment": 44807, + "acton": 36167, + "acton": 36697, + "actonclimate": 43797, + "actor": 12181, + "actor": 4035, + "actors": 9255, + "actorslife": 25117, + "actorvijay": 34033, + "actress": 5805, + "actresses": 33639, + "acts": 6816, + "actu": 2375, + "actual": 7488, + "actually": 2955, + "acu": 9204, + "acu": 48475, + "aculture": 38145, + "acup": 30869, + "acup": 27278, + "acupuncture": 40043, + "acur": 44719, + "acura": 30120, + "acus": 33710, + "acute": 19734, + "acy": 18717, + "acy": 2356, + "ad": 594, + "ad": 680, + "ada": 25785, + "ada": 1886, + "adaily": 47254, + "adal": 46646, + "adam": 6037, + "adam": 4944, + "adamlambert": 27659, + "adams": 7942, + "adan": 41802, + "adani": 37499, + "adap": 6341, + "adapt": 22666, + "adaptation": 16566, + "adapted": 26657, + "adapter": 21839, + "adapting": 44120, + "adaptive": 28672, + "adar": 27702, + "adar": 32681, + "adas": 23250, + "adata": 39500, + "aday": 31367, + "aday": 10280, + "adays": 24337, + "adb": 45630, + "adc": 38201, + "add": 19408, + "add": 3536, + "addams": 38912, + "added": 4149, + "adder": 47557, + "addi": 36378, + "addic": 5709, + "addict": 14614, + "addicted": 16275, + "addiction": 11751, + "addictive": 29638, + "addicts": 29997, + "adding": 8676, + "addis": 43911, + "addison": 32369, + "additi": 26927, + "addition": 6698, + "additional": 10666, + "additions": 22575, + "additive": 48546, + "addo": 40001, + "address": 5834, + "addressed": 20817, + "addresses": 12702, + "addressing": 10594, + "adds": 9944, + "addy": 24746, + "ade": 2194, + "ade": 1928, + "adecides": 46374, + "aded": 9994, + "adee": 47054, + "adel": 4434, + "adel": 27308, + "adelaide": 38193, + "adelaide": 11611, + "adele": 42843, + "adele": 21220, + "adelrey": 43627, + "ademy": 49123, + "aden": 28669, + "aden": 28688, + "adena": 23648, + "adequ": 18232, + "adequate": 22281, + "ader": 21365, + "adero": 49185, + "aders": 27672, + "ades": 5793, + "adh": 42301, + "adhd": 32649, + "adhe": 21175, + "adhesive": 38429, + "adi": 2486, + "adi": 8779, + "adia": 26874, + "adic": 36780, + "adid": 8086, + "adidas": 22396, + "adidas": 9589, + "adidasoriginals": 48575, + "adies": 45834, + "adifference": 37217, + "adilla": 41167, + "ading": 15000, + "adio": 15060, + "adirond": 36843, + "adish": 49009, + "adity": 28596, + "aditya": 37186, + "adityanath": 44437, + "adjac": 32517, + "adjacent": 33836, + "adjec": 45512, + "adju": 16413, + "adjun": 45995, + "adjust": 13784, + "adjust": 28073, + "adjustable": 20476, + "adjusted": 30515, + "adjusting": 41132, + "adjustment": 36081, + "adjustments": 36331, + "adl": 49351, + "adler": 30222, + "adm": 9892, + "adm": 33604, + "admi": 11666, + "admin": 12528, + "admini": 6434, + "administr": 12174, + "administration": 9502, + "administrative": 22424, + "administrator": 22603, + "administrators": 36123, + "admins": 49297, + "admir": 17031, + "admiral": 21013, + "admiration": 39569, + "admire": 17791, + "admired": 36103, + "admirer": 48344, + "admiring": 29835, + "admission": 11315, + "admissions": 22463, + "admit": 13769, + "admits": 16332, + "admitted": 20427, + "admitting": 46148, + "adn": 40339, + "adnan": 42037, + "ado": 4775, + "ado": 2933, + "adobe": 29256, + "adobe": 16484, + "adog": 44913, + "adol": 33512, + "adole": 22704, + "adolescent": 36793, + "adolescents": 45656, + "adolf": 41179, + "adon": 25907, + "adona": 48419, + "adop": 4183, + "adopt": 16441, + "adopt": 11159, + "adoptable": 36905, + "adoptdont": 19674, + "adoptdontshop": 20089, + "adopted": 12538, + "adopting": 30158, + "adoption": 11544, + "adopts": 40853, + "ador": 4992, + "ador": 9162, + "adora": 40031, + "adorable": 6298, + "adoration": 46781, + "adore": 15502, + "adored": 49233, + "adores": 30290, + "adorned": 44953, + "ados": 20079, + "adox": 32188, + "adp": 44426, + "adr": 46189, + "adren": 24204, + "adrenaline": 35552, + "adri": 5935, + "adrian": 25012, + "adrian": 13163, + "adriana": 41363, + "adrid": 26562, + "adrien": 47469, + "adrienne": 40081, + "ads": 2485, + "adu": 16882, + "adu": 24446, + "adukone": 30511, + "adul": 7222, + "adult": 42209, + "adult": 7115, + "adulthood": 40964, + "adults": 9391, + "adv": 1647, + "adv": 21018, + "advan": 33411, + "advance": 27291, + "advance": 7022, + "advanced": 7465, + "advancement": 35437, + "advances": 15852, + "advancing": 21355, + "advani": 48189, + "advant": 7017, + "advantage": 8573, + "advantaged": 38361, + "advantages": 23506, + "adven": 41670, + "advent": 3071, + "advent": 15199, + "adventcalendar": 43492, + "adventur": 29627, + "adventure": 17251, + "adventure": 4377, + "adventurer": 48098, + "adventures": 7941, + "adventurous": 31179, + "adver": 4806, + "adverse": 30348, + "adversity": 32516, + "advert": 19080, + "adverti": 5682, + "advertise": 31473, + "advertised": 38987, + "advertisement": 18713, + "advertiser": 41829, + "advertisers": 45472, + "advertising": 8158, + "adverts": 44306, + "advice": 4973, + "advis": 4634, + "advise": 25962, + "advised": 23196, + "adviser": 20367, + "advisers": 40984, + "advises": 42761, + "advising": 39648, + "advisor": 12380, + "advisors": 23197, + "advisory": 10224, + "advoc": 6657, + "advocacy": 14443, + "advocate": 12044, + "advocates": 17757, + "adwords": 48343, + "ady": 41446, + "ady": 8781, + "ae": 5548, + "ae": 4542, + "aea": 37048, + "aed": 26912, + "aege": 42304, + "ael": 41533, + "ael": 43340, + "aen": 43085, + "aer": 10195, + "aeri": 27685, + "aerial": 44866, + "aerial": 12440, + "aero": 10196, + "aero": 25026, + "aerob": 42824, + "aeron": 37286, + "aeronau": 42816, + "aerop": 27735, + "aerosmith": 43253, + "aerospace": 20530, + "aes": 10617, + "aes": 35677, + "aest": 40694, + "aesthe": 21181, + "aesthetic": 16179, + "aesthetics": 29295, + "aew": 47108, + "af": 702, + "af": 4391, + "afa": 24953, + "afan": 47474, + "afar": 41637, + "afar": 37866, + "afb": 27022, + "afc": 29742, + "afc": 6571, + "afcb": 44276, + "afcon": 30019, + "afd": 44626, + "afe": 30487, + "afe": 13912, + "afer": 44707, + "aff": 8849, + "aff": 14864, + "affair": 13998, + "affairs": 9830, + "affe": 4556, + "affect": 11361, + "affected": 9715, + "affecting": 18448, + "affection": 33780, + "affection": 28381, + "affectionate": 42578, + "affects": 17285, + "affili": 12120, + "affiliate": 18652, + "affiliated": 37540, + "affiliation": 48377, + "affinity": 41451, + "affir": 25343, + "affirm": 42711, + "affirm": 48625, + "affirmation": 47495, + "affl": 34036, + "affleck": 35584, + "afford": 7951, + "afford": 13223, + "affordability": 44828, + "affordable": 43944, + "affordable": 8926, + "afg": 33994, + "afgh": 9029, + "afghan": 15919, + "afghanistan": 9836, + "afi": 24074, + "afi": 31958, + "afil": 27209, + "afire": 42010, + "afirst": 38601, + "afl": 15132, + "afl": 14356, + "aflo": 41959, + "afm": 38385, + "afootball": 41694, + "afor": 43102, + "afore": 41468, + "afp": 18311, + "afraid": 9474, + "afri": 13888, + "afric": 2136, + "africa": 3093, + "african": 17471, + "african": 4736, + "africans": 26534, + "afridi": 37651, + "afrika": 45833, + "afrin": 45586, + "afro": 16267, + "afro": 21795, + "afs": 48960, + "aft": 22693, + "after": 2278, + "after": 953, + "afterdark": 48966, + "afterlife": 46790, + "aftermath": 20958, + "afterno": 22330, + "afternoon": 39035, + "afternoon": 2716, + "afternoons": 31631, + "afterparty": 35305, + "afterwards": 23911, + "ag": 602, + "ag": 5241, + "aga": 1050, + "aga": 4654, + "again": 1495, + "against": 23838, + "against": 1601, + "agame": 46943, + "agan": 42946, + "agan": 9178, + "agar": 13199, + "agar": 17544, + "agarwal": 43117, + "agas": 20430, + "agate": 25454, + "agatha": 43896, + "agave": 42671, + "agawa": 39433, + "agazine": 44942, + "age": 4758, + "age": 805, + "aged": 3889, + "ageing": 25349, + "agen": 10101, + "agen": 43696, + "agencies": 13887, + "agency": 44885, + "agency": 6270, + "agend": 48653, + "agenda": 8728, + "agent": 21210, + "agent": 6576, + "agents": 10199, + "agentsof": 37074, + "agentsofshield": 38801, + "ager": 44847, + "ager": 10443, + "agers": 22123, + "ages": 2321, + "agg": 45482, + "aggarwal": 39386, + "agger": 27836, + "aggi": 36844, + "aggie": 44244, + "aggie": 37618, + "aggies": 31047, + "aggio": 36685, + "aggrav": 35203, + "aggre": 10426, + "aggreg": 41968, + "aggregate": 41318, + "aggression": 28900, + "aggressive": 16295, + "aggressively": 48667, + "agh": 17917, + "agh": 14402, + "aghan": 31276, + "agi": 24036, + "agi": 17645, + "agic": 37652, + "agile": 16276, + "agility": 32161, + "aging": 4336, + "agio": 41746, + "agirl": 35469, + "agle": 37035, + "agle": 16702, + "agles": 36374, + "agles": 22679, + "aglia": 46912, + "agm": 19162, + "agn": 36474, + "agna": 43626, + "agne": 29374, + "agne": 48303, + "agnes": 26213, + "agno": 41540, + "ago": 6276, + "ago": 1468, + "agomez": 27127, + "agon": 26775, + "agon": 14901, + "agony": 36977, + "agor": 38920, + "agos": 32657, + "agov": 34227, + "agp": 46048, + "agr": 36639, + "agra": 26660, + "agra": 29830, + "agram": 2447, + "agre": 3180, + "agreat": 37594, + "agree": 5953, + "agreed": 12774, + "agreeing": 40720, + "agreement": 8286, + "agreements": 25865, + "agrees": 17854, + "agri": 20527, + "agri": 30326, + "agricul": 7234, + "agricultural": 15440, + "agriculture": 9720, + "agro": 33178, + "agro": 44589, + "agron": 41314, + "agroup": 40099, + "ags": 16926, + "agt": 39681, + "agu": 3922, + "agu": 36544, + "agua": 18482, + "aguchi": 49206, + "ague": 2095, + "aguero": 42964, + "agues": 7000, + "aguil": 27946, + "aguilar": 44715, + "ah": 1772, + "ah": 1288, + "aha": 12082, + "aha": 8429, + "ahah": 38661, + "ahaha": 32423, + "ahahaha": 42620, + "aham": 36036, + "ahan": 45061, + "ahan": 19255, + "ahar": 31038, + "ahar": 38760, + "ahe": 27688, + "ahead": 3158, + "ahem": 39995, + "ahh": 13152, + "ahhh": 14769, + "ahhhh": 21054, + "ahhhhh": 36392, + "ahi": 45349, + "ahi": 24154, + "ahl": 30433, + "ahmad": 32167, + "ahmad": 16902, + "ahmadi": 38656, + "ahmadiyya": 44865, + "ahmed": 19491, + "ahmed": 12081, + "ahmedabad": 26966, + "ahn": 33405, + "aho": 28114, + "aho": 38444, + "ahora": 43113, + "ahouse": 33197, + "ahoy": 38652, + "ahs": 16937, + "ahu": 11908, + "ahu": 16515, + "ai": 2014, + "ai": 2215, + "aia": 27046, + "aib": 34780, + "aic": 29454, + "aid": 13723, + "aid": 5182, + "aida": 33830, + "aidan": 48814, + "aidan": 26945, + "aide": 31558, + "aide": 9746, + "aided": 48707, + "aiden": 40020, + "aides": 49082, + "aids": 11759, + "aig": 27295, + "aig": 46989, + "aii": 22478, + "aik": 42575, + "aiken": 46342, + "ail": 1457, + "ail": 9154, + "ailed": 38919, + "ailing": 29999, + "ails": 27024, + "aim": 6787, + "aim": 11255, + "aime": 39872, + "aimed": 20247, + "aimee": 36318, + "aiming": 21768, + "aimo": 36706, + "aims": 13326, + "ain": 8326, + "ain": 2210, + "aine": 48983, + "aine": 17634, + "ains": 27621, + "aint": 29543, + "aint": 13099, + "ainted": 39933, + "aioli": 43949, + "air": 1281, + "air": 1922, + "aira": 35085, + "aira": 46444, + "airasia": 48020, + "airbnb": 23098, + "airborne": 22755, + "airbus": 15324, + "aircraft": 7706, + "airdrop": 38434, + "aire": 7682, + "aired": 21938, + "aires": 17034, + "airfield": 40525, + "airforce": 23511, + "airing": 20453, + "airline": 14847, + "airlines": 8929, + "airmen": 44499, + "airplane": 16451, + "airplanes": 33319, + "airplay": 47024, + "airpollution": 47362, + "airport": 48337, + "airport": 3259, + "airports": 21543, + "airs": 18539, + "airshow": 27139, + "airsoft": 30134, + "airspace": 49280, + "airstrikes": 37220, + "airtel": 34784, + "airtime": 46617, + "airwaves": 43910, + "airways": 14299, + "airy": 44453, + "ais": 7616, + "ais": 11393, + "aise": 30505, + "aish": 21946, + "aisha": 40211, + "aishwar": 29687, + "aishwarya": 44019, + "aisle": 26917, + "ait": 25613, + "ait": 40814, + "aj": 3990, + "aj": 6342, + "aja": 42343, + "aja": 19633, + "ajax": 21933, + "ajay": 22494, + "ajay": 28726, + "ajaydevgn": 35515, + "aje": 48818, + "aje": 33315, + "ajes": 38791, + "aji": 26102, + "aji": 21153, + "ajit": 42261, + "ajith": 24118, + "ajo": 26958, + "aju": 36855, + "ak": 819, + "ak": 1196, + "aka": 19154, + "aka": 3412, + "akaif": 45736, + "akan": 43678, + "akan": 38244, + "akapoor": 40064, + "akarta": 48603, + "akb": 41962, + "akbar": 27180, + "ake": 10558, + "ake": 5776, + "aked": 6115, + "aker": 14245, + "aker": 3074, + "akers": 5788, + "akes": 4764, + "akest": 46679, + "akh": 14821, + "akh": 30660, + "akhan": 28158, + "akhi": 41660, + "akhilesh": 48495, + "akhtar": 45458, + "aki": 18173, + "aki": 6592, + "akin": 24630, + "akin": 13601, + "aking": 1809, + "akins": 48568, + "akira": 34001, + "akis": 27732, + "akistan": 46221, + "akley": 39908, + "ako": 44027, + "ako": 14541, + "akon": 47105, + "akos": 44659, + "akrish": 37434, + "akron": 26115, + "aks": 2953, + "aksh": 28226, + "akshay": 21483, + "akshay": 38914, + "akshaykumar": 23624, + "akshi": 42634, + "aku": 18151, + "aku": 20815, + "aky": 11977, + "al": 526, + "al": 566, + "ala": 12783, + "ala": 3449, + "alab": 6365, + "alabam": 45880, + "alabama": 8422, + "alach": 24622, + "alad": 23074, + "aladdin": 29951, + "alai": 47072, + "alain": 28999, + "alam": 16612, + "alam": 16012, + "alamo": 41922, + "alamo": 34632, + "alan": 9563, + "alan": 5773, + "alana": 43405, + "aland": 34304, + "aland": 6819, + "alar": 34333, + "alarm": 11321, + "alarming": 37209, + "alarms": 31236, + "alarts": 31422, + "alas": 7276, + "alas": 22412, + "alaska": 9562, + "alaskan": 33898, + "alastair": 42062, + "alay": 30289, + "alay": 36450, + "alaya": 36397, + "alb": 45248, + "alba": 25254, + "alban": 10882, + "albania": 29170, + "albanian": 47721, + "albans": 44119, + "albany": 17359, + "albat": 42797, + "albeit": 38984, + "alber": 6413, + "albert": 34174, + "albert": 9507, + "alberta": 11048, + "alberto": 22714, + "albi": 18512, + "albino": 48062, + "albion": 24071, + "albu": 2216, + "album": 40712, + "album": 2431, + "albums": 10705, + "albuquerque": 31079, + "alcat": 35361, + "alche": 37909, + "alchemist": 38913, + "alchemy": 39501, + "alco": 6848, + "alco": 45446, + "alcohol": 9426, + "alcoholic": 25098, + "ald": 4539, + "ald": 2928, + "alda": 46440, + "alde": 33114, + "alden": 17155, + "alden": 27710, + "aldenrichards": 20051, + "alder": 18220, + "alder": 46571, + "aldi": 23204, + "aldo": 9933, + "aldridge": 38084, + "alds": 14285, + "aldu": 6505, + "aldub": 10532, + "aldub": 15247, + "ale": 1440, + "ale": 1336, + "alea": 26518, + "aleague": 38909, + "alec": 29804, + "alec": 19954, + "alecoscino": 47948, + "aled": 4970, + "alee": 24515, + "alej": 23440, + "alejandro": 32950, + "alek": 26906, + "alek": 43310, + "aleksand": 48429, + "alem": 11825, + "aleppo": 19258, + "aler": 25674, + "aler": 27335, + "alert": 4662, + "alerts": 22144, + "ales": 44171, + "ales": 5962, + "aless": 21864, + "alessandro": 37344, + "alestine": 31945, + "alex": 2959, + "alex": 4134, + "alexa": 16273, + "alexand": 10696, + "alexander": 25527, + "alexander": 7563, + "alexandra": 19054, + "alexandre": 35711, + "alexandria": 21171, + "alexis": 35023, + "alexis": 14243, + "aley": 21635, + "alf": 27098, + "alfa": 23482, + "alfar": 38870, + "alfie": 28598, + "alfon": 31947, + "alfonso": 41784, + "alfre": 20982, + "alfred": 16553, + "alfredo": 32291, + "algae": 25654, + "algar": 36291, + "algarve": 40290, + "alge": 24336, + "algebra": 33694, + "alger": 18568, + "algeria": 25257, + "algon": 33007, + "algori": 14912, + "algorithm": 23295, + "algorithms": 26039, + "alham": 23352, + "alhamdulil": 35129, + "alhamdulillah": 38982, + "ali": 835, + "ali": 3558, + "alia": 2492, + "aliaa": 36468, + "alian": 3464, + "alias": 40026, + "alibaba": 39231, + "alic": 25265, + "alice": 23759, + "alice": 9192, + "alici": 31630, + "alicia": 20914, + "alie": 8697, + "alien": 22846, + "alien": 9639, + "aliens": 14883, + "alier": 39493, + "alies": 38086, + "alife": 41347, + "alife": 21100, + "alig": 21272, + "alight": 36157, + "align": 31160, + "aligned": 29292, + "alignment": 27267, + "alik": 31141, + "alike": 12665, + "alim": 42075, + "alin": 42746, + "alin": 40063, + "alina": 39529, + "aline": 21799, + "aling": 5169, + "alion": 19049, + "alis": 21308, + "alis": 20114, + "alisa": 38918, + "alisation": 42143, + "alise": 36718, + "alised": 25099, + "alism": 5607, + "alison": 28653, + "alison": 16970, + "alist": 44900, + "alist": 3320, + "alistair": 40551, + "alistic": 22302, + "alists": 5653, + "alit": 45566, + "alities": 27925, + "ality": 1694, + "alive": 40467, + "alive": 4716, + "aliz": 30979, + "alization": 8026, + "alize": 10268, + "alized": 6141, + "alizer": 38922, + "alizes": 26181, + "alizing": 13023, + "alk": 30246, + "alk": 21577, + "alkal": 33450, + "alkaline": 39210, + "all": 813, + "all": 615, + "alla": 13884, + "alla": 14000, + "allabout": 43996, + "allah": 6378, + "allan": 36552, + "allan": 15404, + "allblacks": 47728, + "allday": 35862, + "alle": 4870, + "alle": 29478, + "alled": 7379, + "alleg": 7456, + "allegations": 16992, + "alleged": 12133, + "allegedly": 14177, + "alleges": 45051, + "allegh": 41479, + "allegheny": 47851, + "allegi": 28832, + "allegiance": 30955, + "allen": 16712, + "allen": 6386, + "allenge": 31387, + "aller": 10116, + "aller": 30630, + "allergic": 28809, + "allergies": 28247, + "allergy": 24408, + "allery": 32542, + "alles": 43354, + "allevi": 31682, + "alleviate": 44799, + "alley": 36205, + "alley": 10329, + "allez": 49137, + "alli": 4123, + "alli": 15268, + "alliance": 45404, + "alliance": 8945, + "alliances": 48403, + "allianz": 45740, + "allie": 25040, + "allied": 20045, + "allies": 17277, + "alligator": 28574, + "allin": 45007, + "allin": 22395, + "alline": 48182, + "alling": 2992, + "allis": 45309, + "allison": 34602, + "allison": 16578, + "allman": 42611, + "allo": 8107, + "allo": 18389, + "allocated": 42716, + "allocation": 35139, + "allon": 46693, + "allot": 26363, + "allotment": 33750, + "allow": 5645, + "allow": 6722, + "allowance": 35696, + "allowed": 7885, + "allowing": 12458, + "allows": 9966, + "alloy": 22467, + "alls": 1997, + "allstar": 31247, + "allstar": 22974, + "allstars": 31198, + "allthe": 29253, + "allu": 20157, + "alluarjun": 39333, + "allure": 41814, + "ally": 7461, + "ally": 769, + "alm": 28303, + "alma": 32933, + "alma": 18337, + "alman": 29394, + "almanac": 41268, + "almighty": 21898, + "almond": 15646, + "almonds": 30468, + "almost": 47534, + "almost": 2671, + "aln": 47203, + "alo": 3435, + "alo": 6183, + "aloe": 30728, + "alog": 15813, + "alogue": 9101, + "aloha": 23160, + "aloils": 49002, + "alom": 22236, + "alon": 14097, + "alon": 42846, + "alone": 4702, + "along": 8300, + "along": 2528, + "alongside": 8646, + "alonso": 25704, + "aloo": 46187, + "alore": 14323, + "alot": 16945, + "alou": 43180, + "aloud": 30028, + "alove": 46669, + "alove": 37045, + "alp": 32020, + "alp": 39342, + "alpac": 30128, + "alpaca": 42561, + "alph": 6720, + "alpha": 11807, + "alpha": 8624, + "alphabe": 45796, + "alphabet": 22335, + "alphon": 37865, + "alpine": 17055, + "alps": 18191, + "already": 2426, + "alright": 10866, + "als": 23982, + "als": 938, + "alsace": 49388, + "also": 1446, + "alt": 9995, + "alt": 10006, + "alta": 24470, + "alta": 25378, + "altaf": 47342, + "altam": 45624, + "altar": 16385, + "alter": 4949, + "alter": 21393, + "altered": 25201, + "altern": 47463, + "alternate": 15926, + "alternati": 16699, + "alternative": 37327, + "alternative": 8248, + "alternatives": 25041, + "alth": 23463, + "alth": 5863, + "although": 9421, + "alti": 35531, + "alties": 17276, + "altitude": 23241, + "altman": 48100, + "alto": 35053, + "alto": 17518, + "altogether": 45689, + "alton": 41331, + "alton": 36550, + "altrin": 38458, + "altrincham": 44718, + "alty": 5546, + "alu": 4776, + "alu": 27991, + "alum": 5404, + "alum": 10553, + "alumin": 14563, + "alumini": 22908, + "aluminium": 23631, + "aluminum": 15251, + "alumna": 30313, + "alumni": 6646, + "alumnus": 23633, + "alums": 30155, + "alv": 20928, + "alvar": 25196, + "alvarez": 26924, + "alvaro": 41941, + "alves": 38547, + "alvin": 27023, + "alway": 14046, + "alway": 43764, + "always": 24997, + "always": 1466, + "alwx": 32768, + "aly": 6468, + "aly": 12910, + "alyn": 49150, + "alyss": 29490, + "alyssa": 18898, + "alz": 12936, + "alz": 41128, + "alzheim": 15212, + "alzheimer": 21151, + "alzheimers": 34592, + "am": 548, + "am": 687, + "ama": 18206, + "ama": 1696, + "amad": 45095, + "amade": 37366, + "amag": 32049, + "amal": 15315, + "amal": 36753, + "aman": 19890, + "aman": 10110, + "amand": 14560, + "amanda": 10036, + "amar": 6424, + "amar": 19607, + "amara": 48522, + "amari": 42565, + "amarillo": 40449, + "amarine": 45591, + "amarketing": 30788, + "amas": 22716, + "amas": 15667, + "amat": 38664, + "amat": 25455, + "amate": 12453, + "amateur": 14287, + "amaya": 47210, + "amaz": 1185, + "amaze": 24846, + "amazed": 18944, + "amazing": 15949, + "amazing": 1370, + "amazingly": 20368, + "amazon": 13630, + "amazon": 4140, + "amb": 9042, + "amb": 16853, + "amba": 27003, + "ambani": 45967, + "ambas": 5634, + "ambassad": 5758, + "ambassador": 6795, + "ambassadors": 16832, + "ambed": 42089, + "ambedkar": 48131, + "amber": 18292, + "amber": 9986, + "ambi": 11844, + "ambient": 23447, + "ambigu": 35702, + "ambition": 20673, + "ambitions": 34152, + "ambitious": 18666, + "ambro": 17585, + "ambrose": 24253, + "ambu": 34423, + "ambul": 13944, + "ambulance": 15555, + "ambush": 40725, + "amc": 24942, + "amc": 16921, + "amd": 20845, + "ame": 3995, + "ame": 780, + "amed": 5660, + "ameen": 24229, + "amel": 31988, + "amel": 10960, + "ameli": 21599, + "amelia": 21433, + "amell": 48198, + "amen": 18716, + "amen": 12335, + "amend": 12425, + "amendment": 15019, + "amendments": 40901, + "amenities": 30096, + "ament": 27528, + "amer": 17081, + "amer": 16147, + "ameri": 40422, + "americ": 1283, + "america": 2224, + "americafirst": 43216, + "american": 8746, + "american": 2151, + "americana": 26221, + "americanair": 42538, + "americani": 39726, + "americans": 6676, + "americas": 33343, + "americas": 18142, + "ames": 5469, + "ameter": 23393, + "amethy": 30291, + "amethyst": 31485, + "amex": 46390, + "amg": 21324, + "amher": 32311, + "amherst": 39065, + "ami": 6100, + "ami": 3065, + "amic": 25824, + "amic": 21383, + "amid": 18908, + "amid": 11953, + "amide": 30952, + "amidst": 25172, + "amie": 36901, + "amig": 40294, + "amiga": 35329, + "amigo": 44991, + "amigos": 28176, + "amii": 35462, + "amiibo": 38871, + "amily": 36732, + "amin": 14337, + "amin": 20235, + "amina": 47531, + "amination": 30355, + "amine": 35823, + "aming": 3507, + "amino": 33464, + "amir": 26029, + "amir": 21973, + "amis": 29829, + "amish": 24958, + "amit": 15083, + "amit": 25255, + "amitabh": 48124, + "amitshah": 32374, + "aml": 43185, + "amma": 29786, + "amman": 29243, + "ammo": 33474, + "ammunition": 35060, + "amn": 24073, + "amne": 14596, + "amnesia": 41741, + "amnesty": 46330, + "amnesty": 21177, + "amo": 4833, + "amo": 11156, + "amodi": 9826, + "amon": 17492, + "amon": 24046, + "among": 12310, + "among": 4265, + "amongst": 12520, + "amoo": 26977, + "amor": 19977, + "amor": 15973, + "amore": 38937, + "amore": 22691, + "amores": 36338, + "amos": 18133, + "amoto": 25492, + "amount": 6403, + "amounts": 16747, + "amour": 29908, + "amovie": 41062, + "amp": 3521, + "amp": 6259, + "amped": 22640, + "amphi": 16379, + "amphibious": 45206, + "amphitheater": 41285, + "amphitheatre": 44039, + "ample": 34162, + "amples": 14536, + "ampli": 15647, + "amplifier": 31743, + "amplify": 45308, + "amps": 19252, + "ampton": 29410, + "ampton": 9347, + "amr": 30916, + "amreading": 16546, + "amrit": 33849, + "ams": 1396, + "amster": 9110, + "amsterdam": 9441, + "amtrak": 27855, + "amu": 11347, + "amu": 32336, + "amur": 35014, + "amura": 35487, + "amus": 36269, + "amuse": 21421, + "amuse": 44367, + "amused": 30212, + "amusement": 32570, + "amusic": 20266, + "amusing": 31789, + "amwriting": 9660, + "amy": 10547, + "amy": 5187, + "an": 514, + "an": 550, + "ana": 6588, + "ana": 1388, + "anab": 34742, + "anada": 27948, + "anag": 12115, + "anagh": 40774, + "anaheim": 23728, + "anak": 34814, + "anak": 38658, + "anal": 2785, + "analo": 34179, + "analog": 19963, + "analogue": 46031, + "analy": 4611, + "analyse": 47246, + "analyses": 39695, + "analysis": 5296, + "analyst": 14198, + "analysts": 28075, + "analytical": 34550, + "analytics": 8558, + "analyze": 28519, + "analyzing": 32107, + "anam": 29525, + "anan": 37215, + "anand": 25073, + "anand": 22083, + "anap": 41566, + "anarch": 46405, + "anarchi": 39879, + "anarchy": 27707, + "anas": 31382, + "anas": 12633, + "anast": 48902, + "anasta": 22915, + "anastasi": 36534, + "anastasia": 37975, + "anat": 10045, + "anath": 31277, + "anatom": 33759, + "anatomy": 15376, + "anc": 1124, + "anc": 17758, + "anca": 14583, + "ance": 7165, + "ance": 884, + "anced": 5071, + "ancer": 17415, + "ancers": 37296, + "ances": 3515, + "ancestor": 43904, + "ancestors": 24405, + "ancestral": 41615, + "ancestry": 30922, + "anch": 9489, + "anche": 34679, + "ancho": 26610, + "anchor": 20030, + "anchor": 13201, + "anchorage": 31950, + "anchored": 45926, + "anchors": 37830, + "anci": 4192, + "ancient": 31495, + "ancient": 5810, + "ancies": 21647, + "ancing": 7797, + "anco": 15459, + "ancy": 16282, + "ancy": 3633, + "and": 672, + "and": 537, + "anda": 2911, + "andalu": 31443, + "andco": 36302, + "ande": 26889, + "ande": 30354, + "ander": 3740, + "ander": 3935, + "anders": 10880, + "andersen": 32661, + "anderson": 26683, + "anderson": 6510, + "andes": 24052, + "andfriends": 36871, + "andhi": 21617, + "andhra": 32452, + "andi": 28870, + "andi": 14354, + "andie": 46318, + "andme": 42831, + "ando": 35950, + "ando": 5986, + "andolan": 48965, + "andon": 36488, + "andor": 45243, + "andover": 44177, + "andr": 22661, + "andra": 46795, + "andra": 21730, + "andre": 2657, + "andre": 9400, + "andrea": 10895, + "andreas": 20444, + "andrei": 42137, + "andres": 25197, + "andretti": 44291, + "andrew": 11717, + "andrew": 4847, + "andrews": 14506, + "andri": 37208, + "andro": 4417, + "andro": 17980, + "android": 24284, + "android": 5191, + "androidgames": 46572, + "andromeda": 42942, + "andré": 35609, + "ands": 32257, + "andthe": 22111, + "andu": 44200, + "andum": 47266, + "andy": 9447, + "andy": 2888, + "ane": 5846, + "ane": 3051, + "anec": 33965, + "anem": 41395, + "anemone": 49019, + "aneous": 48273, + "anes": 15381, + "anese": 48778, + "anesthe": 30622, + "anesthesia": 43353, + "anew": 39084, + "anew": 47341, + "anews": 20919, + "aney": 22387, + "anfield": 26993, + "ang": 883, + "ang": 2704, + "anga": 11641, + "angames": 43178, + "angan": 28264, + "angas": 46180, + "ange": 2960, + "ange": 3039, + "angel": 5029, + "angel": 5130, + "angela": 12354, + "angeles": 7382, + "angeli": 15265, + "angelic": 41038, + "angelica": 38582, + "angelina": 28890, + "angelo": 14342, + "angelou": 41328, + "angels": 7809, + "anger": 32737, + "anger": 6788, + "angerous": 39716, + "angers": 29756, + "angh": 34030, + "angi": 28003, + "angi": 24301, + "angie": 18859, + "angle": 21749, + "angle": 6946, + "angled": 32322, + "angler": 22284, + "anglers": 41608, + "angles": 18627, + "anglesey": 31850, + "anglia": 32076, + "anglic": 28322, + "anglican": 33284, + "angling": 36824, + "anglo": 39515, + "anglo": 30408, + "ango": 19090, + "angola": 36636, + "angor": 41740, + "angp": 19992, + "angry": 33910, + "angry": 9054, + "angs": 18441, + "angst": 41714, + "angu": 11209, + "angular": 43584, + "angular": 24981, + "angularjs": 48608, + "angus": 19688, + "ani": 1326, + "ani": 3624, + "ania": 9866, + "anian": 9945, + "anians": 39393, + "anic": 23113, + "anie": 26697, + "anie": 7671, + "anil": 28589, + "anil": 34619, + "anim": 2190, + "animal": 10697, + "animal": 4668, + "animalrights": 42859, + "animals": 4995, + "animate": 40076, + "animated": 13360, + "animation": 10344, + "animations": 42870, + "animator": 42591, + "anime": 23314, + "anime": 6469, + "anin": 45735, + "aning": 30972, + "anir": 27089, + "anirud": 35278, + "anirudhofficial": 45917, + "anis": 40986, + "anis": 47556, + "anism": 20947, + "anist": 16729, + "anistan": 9727, + "aniston": 47344, + "anit": 23683, + "anita": 18544, + "anium": 14794, + "anj": 22443, + "anja": 43440, + "anjali": 38834, + "anjo": 47353, + "ank": 13339, + "ank": 10029, + "anka": 45324, + "ankara": 34309, + "ankle": 14777, + "ankles": 48688, + "ann": 850, + "ann": 5424, + "anna": 13821, + "anna": 2160, + "annab": 22336, + "annabelle": 47661, + "annah": 39166, + "annah": 14327, + "annak": 41720, + "annan": 32166, + "annapolis": 34491, + "annas": 48467, + "anne": 9139, + "anne": 4083, + "anned": 27352, + "anner": 12642, + "annes": 24343, + "annette": 36821, + "annex": 42958, + "annex": 46389, + "anni": 2438, + "anni": 13728, + "annie": 37270, + "annie": 12173, + "annies": 43184, + "annihil": 32734, + "annis": 24742, + "anniv": 31399, + "anniver": 29671, + "annivers": 42836, + "anniversaire": 30882, + "anniversary": 3048, + "anno": 9901, + "anno": 26871, + "annon": 26385, + "annot": 30411, + "announ": 1806, + "announce": 3682, + "announced": 4103, + "announcement": 6932, + "announcements": 23735, + "announcer": 33626, + "announces": 6500, + "announcing": 11593, + "annoy": 45138, + "annoyed": 29863, + "annoying": 15248, + "annu": 21698, + "annual": 2906, + "annually": 23703, + "anny": 34313, + "anny": 5291, + "ano": 5617, + "ano": 2658, + "anom": 21612, + "anomaly": 46811, + "anon": 47079, + "anon": 13667, + "anonym": 38605, + "anonymous": 15036, + "anoo": 25690, + "anor": 13243, + "anor": 16596, + "anos": 20132, + "another": 29274, + "another": 1380, + "anova": 24116, + "ans": 24586, + "ans": 885, + "ansari": 40748, + "ansel": 40356, + "answ": 3369, + "answe": 14391, + "answer": 4518, + "answered": 14499, + "answering": 18280, + "answers": 8692, + "ant": 1103, + "ant": 773, + "anta": 3023, + "antag": 41745, + "antal": 39355, + "antalya": 47440, + "antan": 32899, + "antarc": 21338, + "antarctic": 27077, + "antarctica": 22587, + "ante": 19311, + "ante": 9769, + "antebellum": 41683, + "antelope": 39177, + "anten": 35517, + "antenna": 26370, + "anter": 46508, + "antes": 14927, + "antgrasso": 39074, + "anth": 3737, + "anth": 29741, + "antha": 47981, + "anthe": 34167, + "anthem": 12504, + "anthi": 45261, + "anthology": 21009, + "anthony": 17477, + "anthony": 6113, + "anthro": 10019, + "anthropo": 18538, + "anthropology": 32407, + "anthus": 37639, + "anti": 3120, + "anti": 3564, + "antibio": 18954, + "antibiotic": 34387, + "antibiotics": 29499, + "antibody": 49018, + "antic": 8260, + "anticip": 11435, + "anticipate": 38280, + "anticipated": 18605, + "anticipating": 48067, + "anticipation": 26983, + "antics": 37126, + "antidote": 45476, + "antifa": 35926, + "antigua": 39910, + "antine": 17641, + "antino": 27818, + "antioxid": 23010, + "antioxidant": 37452, + "antioxidants": 34208, + "antiqu": 21745, + "antique": 46517, + "antique": 9060, + "antiques": 17365, + "antis": 19748, + "antisemitism": 36630, + "antit": 37833, + "antitrust": 49343, + "antlers": 47720, + "antly": 5265, + "anto": 16826, + "anto": 24486, + "antoine": 25188, + "anton": 5497, + "anton": 19644, + "antoni": 39958, + "antonio": 30497, + "antonio": 7842, + "antony": 30707, + "antrim": 40252, + "ants": 1589, + "antv": 47520, + "antw": 44460, + "antwer": 26970, + "antwerp": 33797, + "antz": 25684, + "anu": 8537, + "anu": 17152, + "anup": 29617, + "anus": 27084, + "anush": 22765, + "anushka": 42080, + "anushka": 39822, + "anushkasharma": 44203, + "anwar": 34261, + "anxi": 9021, + "anxiety": 11103, + "anxious": 27793, + "any": 1307, + "any": 1504, + "anya": 11173, + "anybody": 10071, + "anyi": 41632, + "anymore": 7372, + "anyone": 2302, + "anything": 3582, + "anytime": 13924, + "anyway": 8931, + "anyways": 19778, + "anywhere": 8863, + "anz": 14445, + "anz": 19425, + "anza": 14669, + "anzac": 31977, + "ao": 7313, + "ao": 5703, + "aoa": 47119, + "aoc": 31918, + "aofficial": 30840, + "aoki": 33602, + "aol": 40643, + "aon": 30928, + "aon": 48476, + "aor": 32044, + "aos": 46860, + "ap": 688, + "ap": 2728, + "apa": 36954, + "apa": 13537, + "apac": 34320, + "apache": 23921, + "apal": 38017, + "apan": 36562, + "apar": 9161, + "apark": 32528, + "apart": 6474, + "apart": 7803, + "aparthe": 25121, + "apartheid": 26597, + "apartment": 8285, + "apartments": 15791, + "aparty": 26767, + "apat": 31755, + "apathy": 18145, + "apc": 20300, + "apd": 44563, + "ape": 6098, + "ape": 2609, + "apec": 47530, + "aper": 13681, + "aper": 5858, + "apers": 15846, + "apes": 9550, + "apeu": 19040, + "apex": 41935, + "apex": 23712, + "aph": 16341, + "aph": 29491, + "apha": 47104, + "apho": 21758, + "aphra": 44147, + "api": 23342, + "api": 14674, + "apia": 44259, + "apic": 40679, + "aping": 18456, + "apink": 35725, + "apis": 37575, + "apk": 27648, + "apo": 4089, + "apo": 19758, + "apocaly": 13932, + "apocalypse": 17571, + "apocalyptic": 35675, + "apol": 5023, + "apolice": 45663, + "apolis": 9598, + "apollo": 48213, + "apollo": 11554, + "apolo": 31094, + "apolog": 25530, + "apologe": 42908, + "apologi": 14977, + "apologies": 21959, + "apologise": 39608, + "apologize": 22879, + "apologizes": 35298, + "apology": 20768, + "apor": 21871, + "apore": 6679, + "apost": 20309, + "apostle": 33051, + "apostles": 48457, + "app": 882, + "app": 2231, + "appa": 4884, + "appa": 13110, + "appalach": 30523, + "appalachian": 36806, + "appalling": 44797, + "appar": 26698, + "apparatus": 37716, + "apparel": 13972, + "apparent": 23963, + "apparently": 5287, + "appe": 3748, + "appe": 45949, + "appeal": 9625, + "appealing": 25909, + "appeals": 22447, + "appear": 5544, + "appear": 9308, + "appearance": 7238, + "appearances": 17214, + "appeared": 11561, + "appearing": 18759, + "appears": 8743, + "appell": 43833, + "appen": 37201, + "appen": 26589, + "apper": 18780, + "appet": 21686, + "appeti": 24179, + "appetite": 24481, + "appetizer": 36065, + "applau": 24713, + "applaud": 42152, + "applause": 22650, + "apple": 8629, + "apple": 3055, + "applemusic": 21390, + "apples": 14032, + "appleton": 45250, + "appli": 15495, + "appliance": 33677, + "appliances": 22134, + "applic": 4235, + "applicable": 37927, + "applicants": 28035, + "application": 7241, + "applications": 7341, + "applied": 12636, + "applies": 24910, + "apply": 4356, + "applying": 17965, + "appo": 5433, + "appoint": 36190, + "appointed": 11087, + "appointment": 10890, + "appointments": 23439, + "appoints": 25132, + "apprais": 36972, + "appraisal": 46108, + "appreci": 3474, + "appreciate": 6263, + "appreciated": 9264, + "appreciates": 36573, + "appreciating": 39352, + "appreciation": 9212, + "appreciationday": 37438, + "appreciative": 45074, + "appren": 10582, + "apprentic": 15662, + "apprentice": 19122, + "apprentice": 17985, + "apprentices": 38252, + "apprenticeship": 26939, + "apprenticeships": 35425, + "appro": 2398, + "approach": 7781, + "approach": 6241, + "approached": 36499, + "approaches": 14962, + "approaching": 12164, + "appropri": 8446, + "appropriate": 10768, + "appropriately": 30383, + "appropriation": 49110, + "approval": 13549, + "approve": 19064, + "approved": 9412, + "approves": 18107, + "approx": 18266, + "approxim": 14201, + "approximately": 16128, + "apps": 7020, + "appstore": 31377, + "appt": 48112, + "appy": 34420, + "apr": 39396, + "apr": 11177, + "apra": 37027, + "apric": 25923, + "apricot": 30815, + "april": 23548, + "april": 2484, + "apro": 42712, + "apro": 49051, + "apron": 29502, + "aps": 8868, + "apse": 31843, + "apt": 17921, + "aptly": 47313, + "apu": 22166, + "apur": 36900, + "apur": 45193, + "aq": 14018, + "aq": 26862, + "aqu": 4458, + "aqua": 18613, + "aquaculture": 41885, + "aquaman": 35098, + "aquari": 37605, + "aquarium": 16814, + "aquarius": 38879, + "aquatic": 22658, + "aque": 35927, + "aque": 37268, + "aqui": 36826, + "aquino": 33796, + "ar": 516, + "ar": 625, + "ara": 24161, + "ara": 3340, + "arab": 5405, + "arab": 12028, + "arabia": 11746, + "arabian": 24663, + "arabic": 16709, + "arabs": 39155, + "arac": 47620, + "arach": 37689, + "arag": 41502, + "araj": 45142, + "arak": 23416, + "aram": 19223, + "aram": 21473, + "arama": 49066, + "aran": 20839, + "aran": 19641, + "aras": 36399, + "arat": 30856, + "arav": 35836, + "arbit": 20267, + "arbitr": 22702, + "arbitration": 34845, + "arbor": 33516, + "arbor": 24878, + "arboretum": 41719, + "arc": 4997, + "arc": 11592, + "arca": 25189, + "arca": 37612, + "arcade": 13331, + "arcadia": 38372, + "arch": 2458, + "arch": 8557, + "archa": 45619, + "archae": 10121, + "archaeological": 26163, + "archaeologists": 45035, + "archaeology": 14868, + "archan": 33359, + "archbishop": 23994, + "arche": 22474, + "archer": 21824, + "archers": 38407, + "archery": 23935, + "arches": 30771, + "archi": 4479, + "archie": 20557, + "archipel": 39750, + "archipelago": 43025, + "architec": 3359, + "architect": 12192, + "architects": 13290, + "architectural": 15360, + "architecture": 39038, + "architecture": 4920, + "archival": 39249, + "archive": 42257, + "archive": 10548, + "archived": 42379, + "archives": 9411, + "archy": 15643, + "arctic": 29716, + "arctic": 9138, + "ard": 3793, + "ard": 746, + "arden": 44600, + "arden": 27057, + "ardi": 23932, + "ardi": 19837, + "ardo": 35735, + "ardo": 9394, + "ards": 1654, + "ardu": 20906, + "arduino": 25398, + "are": 1076, + "are": 631, + "area": 2445, + "areas": 5429, + "arec": 18136, + "areclipse": 36030, + "ared": 5369, + "arel": 12798, + "arella": 24784, + "arelli": 48619, + "aren": 4033, + "aren": 4318, + "arena": 5463, + "arenas": 47860, + "arent": 37487, + "arer": 14857, + "arers": 33159, + "ares": 12224, + "arest": 11708, + "aret": 22247, + "areth": 47725, + "aretha": 42090, + "areyou": 37607, + "arez": 13108, + "arg": 27285, + "argent": 7812, + "argentina": 9789, + "argentine": 32582, + "argon": 40737, + "argos": 37443, + "argu": 7440, + "arguably": 30899, + "argue": 19788, + "argued": 48153, + "argues": 30045, + "arguing": 26549, + "argument": 16224, + "arguments": 24693, + "argus": 44300, + "argy": 21066, + "argyle": 36179, + "argyll": 40667, + "ari": 1221, + "ari": 3681, + "aria": 8883, + "arial": 42431, + "arian": 29980, + "arian": 6953, + "ariana": 14892, + "arianag": 23025, + "arianagrande": 23321, + "arianism": 44351, + "arians": 19104, + "arias": 22567, + "arie": 18774, + "ariel": 47959, + "ariel": 21025, + "aries": 5213, + "arif": 46621, + "arily": 12993, + "arin": 29564, + "arin": 18612, + "arina": 29271, + "arine": 29586, + "aring": 2142, + "ario": 8862, + "arios": 25392, + "aris": 15227, + "arise": 26490, + "arist": 12110, + "aristo": 25666, + "aristotle": 49156, + "arities": 31069, + "arity": 16608, + "arium": 11809, + "arius": 21482, + "ariz": 6516, + "arized": 40167, + "arizon": 28936, + "arizona": 7106, + "arjun": 24565, + "arjun": 20477, + "arjuna": 43835, + "ark": 11921, + "ark": 12010, + "arkansas": 12227, + "arkham": 36381, + "arl": 48542, + "arlington": 44940, + "arlington": 17865, + "arly": 3637, + "arm": 5671, + "arm": 4793, + "arma": 15887, + "arma": 38716, + "armad": 37897, + "armada": 34938, + "armagh": 44313, + "armani": 31314, + "armb": 37096, + "armchair": 45757, + "armed": 40471, + "armed": 8202, + "armen": 13145, + "armenia": 22008, + "armenian": 24891, + "armies": 46686, + "armin": 45481, + "arming": 19766, + "armist": 38150, + "armistice": 46765, + "armor": 16167, + "armored": 28214, + "armory": 38610, + "armour": 18503, + "armoured": 42514, + "arms": 5706, + "armstrong": 15005, + "army": 13541, + "army": 3133, + "armys": 27311, + "arn": 9348, + "arn": 37597, + "arnau": 45556, + "arne": 43509, + "arney": 35962, + "arnold": 49096, + "arnold": 13609, + "arns": 46692, + "aro": 7514, + "aro": 11551, + "aroa": 48209, + "arom": 16831, + "aroma": 40143, + "aroma": 26390, + "aromas": 47439, + "aromatherapy": 42584, + "aromatic": 39669, + "aron": 30855, + "aron": 28926, + "aroo": 47581, + "arora": 31897, + "arosa": 44264, + "arose": 44262, + "around": 35615, + "around": 1630, + "arqu": 35654, + "arquitec": 41703, + "arr": 39106, + "arr": 42489, + "arra": 32918, + "arra": 43827, + "arrahman": 44554, + "arran": 45722, + "arrang": 16711, + "arrange": 15410, + "arrange": 26311, + "arranged": 22451, + "arrangement": 23822, + "arrangements": 23792, + "arranging": 35321, + "array": 17293, + "arre": 4374, + "arrell": 28846, + "arrest": 9320, + "arrested": 5845, + "arresting": 43930, + "arrests": 20683, + "arri": 2115, + "arrival": 9073, + "arrivals": 19583, + "arrive": 8851, + "arrived": 3514, + "arrives": 9905, + "arriving": 10884, + "arro": 15729, + "arrog": 26997, + "arrogance": 47025, + "arrogant": 40582, + "arrow": 30920, + "arrow": 11149, + "arrowhead": 46393, + "arrows": 24768, + "arroyo": 45237, + "ars": 42815, + "ars": 864, + "arse": 22665, + "arsen": 5330, + "arsenal": 45234, + "arsenal": 6084, + "arsene": 32117, + "arson": 29937, + "art": 1486, + "art": 794, + "arta": 12031, + "arte": 13482, + "arte": 12947, + "artem": 40387, + "artemis": 45256, + "arten": 37043, + "arter": 29449, + "artery": 40062, + "artes": 48629, + "artforsale": 48239, + "artgallery": 31982, + "arth": 7146, + "arth": 20265, + "arthistory": 39313, + "arthr": 20807, + "arthritis": 22916, + "arthro": 43255, + "arthur": 35660, + "arthur": 8550, + "arti": 1635, + "arti": 34601, + "artic": 3003, + "articho": 30937, + "artichoke": 39647, + "article": 3550, + "articles": 11939, + "articul": 40343, + "articulate": 45444, + "artif": 8950, + "artifact": 37718, + "artifacts": 30249, + "artificial": 19357, + "artificial": 12040, + "artificialintelligence": 20799, + "artillery": 24465, + "artin": 33168, + "artin": 48540, + "artis": 41794, + "artisan": 36389, + "artisan": 21535, + "artisans": 40140, + "artist": 14326, + "artist": 2456, + "artiste": 41402, + "artistic": 12421, + "artiston": 48443, + "artistry": 38570, + "artists": 4899, + "artistson": 32127, + "artistsontwitter": 39469, + "artlovers": 35617, + "arto": 28464, + "artof": 31751, + "artoftheday": 43990, + "arton": 46744, + "arts": 22040, + "arts": 3812, + "artsy": 31588, + "arturo": 38591, + "artwit": 36713, + "artwork": 4188, + "artworks": 26215, + "arty": 45417, + "arty": 25916, + "aru": 13757, + "aru": 23907, + "aruba": 40131, + "arugula": 40770, + "arum": 48732, + "arun": 16105, + "arun": 31877, + "arunach": 47260, + "arunjaitley": 44874, + "arus": 22644, + "arvin": 16971, + "arvind": 21209, + "arvind": 41079, + "arvindkejriwal": 22971, + "arvo": 45726, + "arwx": 29824, + "ary": 4617, + "ary": 856, + "arya": 23594, + "aryan": 34966, + "as": 587, + "as": 601, + "asa": 39676, + "asa": 11914, + "asad": 42376, + "asaki": 22455, + "asam": 40603, + "asan": 22379, + "asan": 17841, + "asana": 42363, + "asant": 25536, + "asants": 37766, + "asap": 24199, + "asap": 10822, + "asar": 24733, + "asar": 49299, + "asb": 31186, + "asbe": 32113, + "asbestos": 33765, + "asc": 22720, + "asc": 23305, + "ascen": 20767, + "ascension": 35499, + "ascent": 36625, + "asci": 12753, + "asco": 25578, + "asco": 17488, + "ascot": 23723, + "ascri": 15506, + "asd": 36988, + "asda": 29391, + "asdf": 36857, + "asdfghj": 42758, + "asdfghjkl": 47660, + "ase": 8083, + "ase": 894, + "asean": 24472, + "aseball": 46903, + "ased": 2134, + "asen": 41085, + "aser": 39615, + "aser": 7209, + "ases": 3762, + "asf": 25863, + "asg": 34813, + "ash": 2067, + "ash": 2612, + "asha": 40572, + "asha": 13472, + "ashamed": 20633, + "ashby": 46531, + "ashe": 48523, + "ashe": 31752, + "asher": 37585, + "ashes": 12587, + "asheville": 28897, + "ashford": 37796, + "ashi": 15563, + "ashi": 15934, + "ashish": 33145, + "ashland": 39938, + "ashleigh": 49356, + "ashley": 17825, + "ashley": 8957, + "asho": 20273, + "ashok": 38141, + "ashore": 31194, + "ashram": 43445, + "ashton": 43264, + "ashton": 12228, + "ashtra": 18118, + "asi": 3596, + "asi": 12562, + "asia": 5741, + "asian": 21737, + "asian": 7128, + "asiangames": 49108, + "asians": 36771, + "asics": 31097, + "aside": 13676, + "asif": 37302, + "asim": 46050, + "asin": 48432, + "asin": 44347, + "asing": 4194, + "asingly": 15803, + "asion": 31753, + "asis": 12398, + "ask": 11027, + "ask": 2765, + "asked": 3993, + "asking": 5914, + "asks": 7953, + "asl": 41650, + "asleep": 10749, + "asley": 28206, + "asli": 44290, + "asm": 13851, + "asma": 38497, + "asmsg": 19839, + "aso": 30343, + "aso": 27932, + "asober": 43749, + "asocial": 48557, + "ason": 1163, + "asone": 31249, + "asons": 4249, + "asos": 37924, + "asot": 47968, + "asp": 17814, + "asp": 36666, + "asparag": 20301, + "asparagus": 20604, + "aspe": 10894, + "aspect": 19681, + "aspects": 18203, + "aspen": 35695, + "aspen": 25712, + "asper": 32991, + "asph": 28019, + "asphalt": 30574, + "aspir": 12669, + "aspirations": 36127, + "aspire": 24836, + "aspiring": 21862, + "asports": 43695, + "asr": 48052, + "asroma": 41000, + "ass": 12664, + "ass": 5301, + "assa": 47715, + "assad": 18699, + "assam": 19930, + "assan": 26352, + "assange": 27565, + "assas": 9603, + "assassin": 14366, + "assassin": 20029, + "assassinated": 40488, + "assassination": 24907, + "assassins": 34918, + "assassinscre": 36428, + "assassinscreed": 46082, + "assau": 7908, + "assaul": 19596, + "assault": 9679, + "assaulted": 30785, + "assaulting": 44143, + "asse": 3166, + "asse": 38600, + "assel": 37582, + "assemb": 5531, + "assemble": 26169, + "assembled": 22627, + "assemblies": 47406, + "assembling": 38670, + "assembly": 34542, + "assembly": 7059, + "assen": 38651, + "asser": 25665, + "asses": 21596, + "assess": 9209, + "assess": 23211, + "assessed": 44160, + "assessing": 31364, + "assessment": 10590, + "assessments": 32753, + "asset": 48463, + "asset": 13039, + "assets": 13170, + "assi": 2907, + "assi": 39540, + "assie": 31624, + "assign": 14190, + "assigned": 25767, + "assignment": 17342, + "assignments": 34257, + "assim": 36394, + "assimil": 43467, + "assist": 26558, + "assist": 10286, + "assistance": 11685, + "assistant": 6799, + "assistants": 31054, + "assisted": 18095, + "assisting": 24243, + "assists": 12675, + "assn": 44208, + "asso": 17617, + "assoc": 18891, + "associ": 3566, + "associate": 11777, + "associated": 11164, + "associates": 17358, + "association": 5578, + "associations": 33209, + "assor": 38604, + "assorted": 36701, + "assortment": 43112, + "asst": 24767, + "assu": 8328, + "assume": 19294, + "assumed": 37661, + "assuming": 29422, + "assump": 41182, + "assumption": 40773, + "assumptions": 45948, + "assurance": 28408, + "assure": 39161, + "assured": 25591, + "assures": 41988, + "assy": 29940, + "assy": 12963, + "ast": 1761, + "ast": 1242, + "asta": 43269, + "aste": 25033, + "aste": 25579, + "aster": 11013, + "aster": 9526, + "asteroid": 32253, + "asters": 33139, + "asth": 16684, + "asthma": 24610, + "asthour": 41238, + "astic": 15876, + "asting": 29984, + "astle": 46141, + "asto": 47275, + "aston": 24760, + "aston": 13879, + "astoni": 21962, + "astonishing": 27110, + "astonmartin": 40760, + "astor": 26391, + "astor": 47086, + "astoria": 34798, + "astounding": 37748, + "astr": 37609, + "astra": 47205, + "astra": 36079, + "astral": 45889, + "astri": 31243, + "astrid": 46499, + "astro": 8563, + "astro": 15318, + "astrology": 28526, + "astron": 7982, + "astronaut": 18376, + "astronauts": 29733, + "astronom": 23264, + "astronomer": 40036, + "astronomers": 44268, + "astronomical": 39775, + "astronomy": 17472, + "astrophotography": 38559, + "astros": 17598, + "asts": 10452, + "astu": 43137, + "astur": 45795, + "asu": 13157, + "asu": 16001, + "asun": 36044, + "asure": 3813, + "asus": 27269, + "aswell": 42978, + "asx": 38906, + "asy": 8524, + "asy": 2333, + "asylum": 15638, + "asym": 32539, + "at": 527, + "at": 536, + "ata": 4236, + "atable": 23909, + "atal": 24877, + "atal": 24797, + "atan": 33446, + "atar": 20128, + "atar": 7995, + "atari": 21549, + "atas": 30057, + "atay": 39518, + "atc": 28383, + "atch": 15938, + "atd": 33890, + "ate": 992, + "ate": 671, + "ateam": 42784, + "ateau": 16359, + "atec": 37352, + "atech": 31306, + "ated": 14589, + "ated": 943, + "atedly": 24698, + "atee": 32839, + "ateful": 5419, + "atelier": 29932, + "ately": 3862, + "atem": 17116, + "aten": 47984, + "atene": 30405, + "ateneo": 33904, + "ater": 18597, + "ater": 5877, + "ateral": 18819, + "aters": 22364, + "ates": 20370, + "ates": 1150, + "atest": 1705, + "ateur": 43677, + "atf": 28013, + "ath": 1374, + "ath": 1649, + "atha": 22530, + "atham": 23383, + "athan": 41260, + "athan": 26701, + "athe": 8963, + "athed": 47402, + "atheism": 25823, + "atheist": 22571, + "atheists": 47155, + "athen": 29112, + "athena": 30705, + "athens": 13524, + "ather": 6171, + "ather": 1817, + "athered": 34091, + "athers": 17266, + "athi": 28918, + "athing": 36069, + "athle": 3310, + "athlete": 7388, + "athletes": 7125, + "athletic": 33182, + "athletic": 9028, + "athletics": 7019, + "athlon": 14670, + "athome": 38217, + "athon": 4951, + "aths": 28835, + "athy": 34488, + "athy": 13183, + "ati": 591, + "ati": 6751, + "atia": 10908, + "atic": 20248, + "atic": 2647, + "atically": 13558, + "atics": 15666, + "atie": 30137, + "aties": 40060, + "atif": 41592, + "atiku": 37912, + "atile": 15474, + "atility": 23373, + "atime": 20158, + "atin": 36903, + "atin": 23047, + "atine": 39741, + "ating": 25653, + "ating": 1074, + "atio": 35401, + "ation": 2265, + "ation": 656, + "ational": 14205, + "ational": 3108, + "ationals": 44593, + "ationday": 20082, + "ations": 986, + "atis": 45456, + "atis": 41142, + "atism": 45638, + "ative": 18422, + "ative": 1648, + "atively": 11929, + "atives": 5629, + "ativity": 25166, + "atkins": 27734, + "atkinson": 28908, + "atl": 5411, + "atl": 10629, + "atla": 36043, + "atlan": 6818, + "atlanta": 39964, + "atlanta": 6839, + "atlantic": 28804, + "atlantic": 8189, + "atlantis": 27790, + "atlas": 15775, + "atle": 21170, + "atleast": 33231, + "atleti": 46067, + "atletico": 27501, + "atm": 14127, + "atmo": 8271, + "atmosphere": 10506, + "atmospheric": 24223, + "ato": 7987, + "ato": 4364, + "atoday": 26799, + "atom": 22418, + "atom": 24031, + "atomic": 18996, + "atoms": 41434, + "aton": 31525, + "aton": 10012, + "atop": 17455, + "ator": 10748, + "ator": 1962, + "atore": 28314, + "atorial": 32040, + "atories": 35678, + "atorium": 41306, + "ators": 3389, + "atory": 5920, + "atos": 41643, + "atour": 42967, + "atown": 24000, + "atp": 38105, + "atp": 19817, + "atr": 43247, + "atra": 20227, + "atra": 14401, + "atravel": 36981, + "atre": 46057, + "atri": 13882, + "atri": 38889, + "atric": 32238, + "atric": 13652, + "atrics": 36253, + "atrist": 41879, + "atrium": 29725, + "atrix": 43003, + "atro": 18724, + "atroc": 36197, + "atrocities": 37551, + "atry": 28334, + "ats": 46890, + "ats": 1032, + "atsu": 26531, + "att": 1017, + "att": 7103, + "atta": 7282, + "atta": 9146, + "attach": 43676, + "attach": 35653, + "attached": 11038, + "attachment": 28638, + "attack": 24971, + "attack": 3815, + "attacked": 12366, + "attacker": 39288, + "attackers": 47701, + "attacking": 16813, + "attacks": 7321, + "attain": 46459, + "attar": 37110, + "attemp": 4933, + "attempt": 7409, + "attempted": 17408, + "attempting": 18195, + "attempts": 15610, + "atten": 4084, + "atten": 32408, + "attenborough": 45860, + "attend": 9841, + "attend": 5802, + "attendance": 11928, + "attendant": 35424, + "attended": 8140, + "attendees": 14648, + "attending": 6696, + "attends": 22248, + "attention": 4936, + "atters": 30675, + "atthe": 21489, + "atti": 49265, + "atti": 16235, + "attic": 26766, + "attire": 21222, + "attitude": 10648, + "attitudes": 27611, + "attle": 14685, + "attle": 5030, + "attn": 25677, + "attor": 8856, + "attorney": 10372, + "attorneys": 29113, + "attrac": 7154, + "attract": 17010, + "attracted": 28493, + "attracting": 31909, + "attraction": 16807, + "attractions": 22307, + "attractive": 12231, + "attracts": 31024, + "attribu": 24624, + "attributed": 37520, + "attributes": 40763, + "attu": 43173, + "atty": 36705, + "atu": 15191, + "atu": 24295, + "atuesday": 34841, + "atul": 1744, + "atul": 43948, + "atum": 48295, + "atur": 14986, + "aturday": 29027, + "ature": 25305, + "ature": 4490, + "atures": 7358, + "atus": 14795, + "atv": 19598, + "atwood": 45680, + "atwork": 39680, + "atx": 34849, + "atx": 20136, + "aty": 40974, + "aty": 33107, + "atz": 30432, + "au": 627, + "au": 2566, + "aua": 45906, + "aub": 45938, + "auberg": 49382, + "aubre": 25899, + "aubrey": 34110, + "auburn": 42269, + "auburn": 14534, + "auc": 24489, + "auch": 43024, + "auck": 14588, + "auckland": 16072, + "auction": 48160, + "auction": 6462, + "auctioned": 41073, + "auctions": 24876, + "aucus": 47374, + "aud": 16107, + "aud": 19711, + "audi": 5091, + "audi": 10277, + "audible": 33227, + "audience": 6863, + "audiences": 22328, + "audio": 13792, + "audio": 5766, + "audiobook": 26282, + "audit": 12505, + "audit": 17625, + "auditi": 37377, + "audition": 18673, + "auditions": 21134, + "auditor": 38050, + "auditorium": 15063, + "audre": 16075, + "audrey": 18812, + "audu": 27934, + "audubon": 40275, + "auer": 33460, + "auf": 28924, + "aug": 15397, + "aug": 5720, + "auga": 22797, + "augh": 28310, + "augh": 14005, + "augmente": 48356, + "augmented": 32708, + "augu": 2610, + "august": 24353, + "august": 3171, + "augusta": 26144, + "augustine": 27397, + "augustus": 36835, + "auk": 19058, + "aul": 20695, + "aul": 34391, + "ault": 47253, + "ault": 10219, + "aun": 10608, + "aun": 38721, + "aunt": 12685, + "auntie": 23783, + "aunty": 29528, + "aur": 8156, + "aur": 17282, + "aura": 27728, + "aure": 36010, + "aureli": 35980, + "auror": 30067, + "aurora": 13500, + "aus": 10624, + "aus": 7630, + "ausa": 37384, + "ausbiz": 46543, + "ausch": 33926, + "auschwitz": 36523, + "ausopen": 27831, + "ausp": 35039, + "auspicious": 38806, + "auspol": 8241, + "aussi": 19762, + "aussie": 40230, + "aussie": 14424, + "aussies": 35727, + "aust": 26301, + "aust": 25418, + "austen": 29885, + "auster": 25030, + "austerity": 26982, + "austin": 12845, + "austin": 5125, + "austinmahone": 34678, + "austr": 2518, + "australi": 13798, + "australia": 3444, + "australian": 23630, + "australian": 6258, + "australians": 31488, + "austri": 8946, + "austria": 11960, + "austrian": 20638, + "ausv": 35206, + "ausvotes": 34661, + "aut": 12343, + "auth": 2381, + "auth": 38247, + "authent": 18158, + "authentic": 41266, + "authentic": 10369, + "authentication": 39746, + "authenticity": 35734, + "autho": 34552, + "author": 14447, + "author": 4358, + "authored": 37928, + "authori": 19207, + "authorities": 12729, + "authority": 10524, + "authorization": 48854, + "authorized": 28463, + "authors": 10765, + "auti": 8200, + "autism": 36256, + "autism": 11244, + "autisma": 43324, + "autistic": 29360, + "auto": 3917, + "auto": 5668, + "autobiography": 31509, + "autodesk": 40415, + "autograph": 10657, + "autograph": 13722, + "autographed": 16309, + "autographs": 17376, + "autoimmune": 45509, + "autom": 4114, + "automate": 43203, + "automated": 19022, + "automatic": 12126, + "automatically": 20725, + "automation": 12328, + "automobi": 44813, + "automobile": 25258, + "automotive": 12607, + "auton": 13100, + "autonews": 43975, + "autonom": 17870, + "autonomous": 20722, + "autonomy": 39223, + "autopsy": 44436, + "autos": 31118, + "autoshow": 46788, + "auts": 21140, + "autu": 5445, + "autum": 31783, + "autumn": 28940, + "autumn": 6110, + "autumnal": 35481, + "aux": 18154, + "aux": 8909, + "auxiliary": 37778, + "av": 722, + "av": 8484, + "ava": 12385, + "avage": 31505, + "avail": 1651, + "avail": 16686, + "availability": 17551, + "available": 1685, + "aval": 18012, + "avalan": 23970, + "avalanche": 25815, + "avalley": 45082, + "avalon": 30436, + "avan": 27971, + "avan": 33351, + "avant": 24305, + "avar": 33423, + "avatar": 18219, + "ave": 10062, + "ave": 4860, + "avec": 25828, + "aved": 47918, + "avel": 46817, + "avel": 48088, + "aven": 5963, + "aven": 32971, + "aveng": 21935, + "avenger": 24799, + "avengers": 39413, + "avengers": 12016, + "avengersendgame": 49342, + "avent": 22700, + "avenue": 7042, + "aver": 8788, + "aver": 11403, + "average": 6254, + "averaged": 37310, + "averages": 48982, + "averaging": 35266, + "avery": 20313, + "aves": 14023, + "avfc": 21304, + "avg": 19452, + "avgeek": 11114, + "avi": 3324, + "avi": 11297, + "avia": 38710, + "avian": 24115, + "aviation": 27717, + "aviation": 7617, + "aviator": 38921, + "aviators": 48011, + "avici": 46192, + "avicii": 49158, + "avid": 19118, + "avier": 14598, + "avila": 45339, + "aville": 40689, + "avin": 46204, + "avis": 45163, + "avis": 19765, + "aviv": 22130, + "aviva": 47122, + "aviz": 27607, + "avl": 44749, + "avo": 4496, + "avo": 32400, + "avoc": 12291, + "avocado": 14135, + "avocados": 48911, + "avoi": 16797, + "avoid": 30448, + "avoid": 5983, + "avoidance": 47983, + "avoided": 32103, + "avoiding": 22086, + "avoids": 48220, + "avon": 22790, + "avon": 17348, + "avril": 37763, + "avs": 31896, + "avut": 44472, + "avy": 29973, + "aw": 808, + "aw": 5557, + "awa": 4820, + "awa": 6872, + "await": 20769, + "awaited": 20092, + "awaiting": 14872, + "awaits": 15635, + "awak": 9776, + "awak": 41387, + "awake": 14695, + "awaken": 35412, + "awakening": 17017, + "awakens": 23191, + "awal": 42447, + "awal": 35090, + "awan": 48869, + "awan": 20420, + "awar": 5745, + "award": 36310, + "award": 2047, + "awarded": 7368, + "awarding": 37089, + "awards": 34528, + "awards": 2320, + "aware": 4427, + "aware": 7196, + "awareness": 19217, + "awareness": 4823, + "awarenessmonth": 34278, + "awarenessweek": 35294, + "away": 21088, + "away": 1520, + "aways": 12782, + "awaz": 18586, + "awd": 34846, + "awe": 1693, + "awe": 14106, + "aweather": 42142, + "aweather": 28681, + "awec": 38916, + "aweed": 29724, + "awesom": 16727, + "awesome": 30390, + "awesome": 1848, + "awesomeness": 22430, + "awful": 13617, + "awg": 46350, + "awgs": 35275, + "awh": 39566, + "awhile": 19171, + "awi": 15167, + "awil": 47271, + "awilliams": 42163, + "awk": 8888, + "awk": 40943, + "awkward": 42337, + "awkward": 10304, + "awn": 46222, + "awp": 43300, + "aws": 19658, + "awsome": 47196, + "awson": 36286, + "aww": 11568, + "awww": 15634, + "awwww": 26460, + "awx": 28385, + "ax": 3165, + "ax": 9203, + "axe": 19861, + "axel": 47889, + "axel": 32131, + "axes": 45970, + "axi": 30672, + "axial": 46550, + "axis": 19614, + "axle": 39003, + "axx": 47411, + "ay": 658, + "ay": 551, + "aya": 5917, + "ayala": 39827, + "ayama": 41194, + "ayan": 37781, + "ayan": 16269, + "ayana": 37400, + "ayas": 40904, + "ayat": 44902, + "ayat": 35720, + "aye": 21661, + "aye": 12446, + "ayer": 24852, + "ayers": 42783, + "ayesha": 46570, + "ayi": 33025, + "ayles": 44706, + "ayne": 35669, + "ayo": 21929, + "ayo": 18708, + "ayr": 23002, + "ayr": 36473, + "ayrshire": 32687, + "ays": 785, + "ayu": 40769, + "ayurve": 27185, + "ayurveda": 38986, + "ayush": 44831, + "ayy": 32514, + "ayyy": 41052, + "az": 854, + "az": 5468, + "aza": 22883, + "azad": 37838, + "azalea": 34087, + "azam": 34727, + "azar": 27911, + "azcardinals": 48846, + "aze": 41157, + "aze": 28485, + "azer": 19169, + "azerbai": 20649, + "azerbaijan": 23888, + "azhar": 47019, + "azi": 23914, + "azi": 18452, + "azine": 29140, + "azione": 48335, + "aziz": 41205, + "aziz": 29630, + "azo": 41227, + "azon": 36854, + "azores": 42826, + "azte": 33270, + "aztec": 34749, + "aztecs": 49387, + "azu": 27701, + "azu": 46963, + "azul": 39807, + "azure": 18514, + "azwx": 30262, + "azy": 24783, + "azz": 9817, + "azz": 26453, + "azza": 22255, + "azzi": 18758, + "azzle": 39974, + "azzo": 26779, + "azzur": 37055, + "azzy": 44534, + "añ": 23716, + "años": 41634, + "b": 65, + "b": 321, + "ba": 932, + "ba": 1792, + "baa": 33004, + "baahu": 34145, + "baahubali": 38663, + "bab": 1202, + "bab": 19039, + "baba": 12631, + "babe": 31177, + "babe": 7716, + "babes": 14253, + "babies": 6635, + "babs": 36217, + "babu": 21623, + "baby": 7268, + "baby": 1794, + "babygirl": 39554, + "babylon": 31928, + "babymetal": 45013, + "babys": 22266, + "babysitting": 34186, + "bac": 2791, + "bac": 25867, + "bacca": 40708, + "bach": 11773, + "bach": 8758, + "bachchan": 17690, + "bachel": 11283, + "bachelor": 45508, + "bachelor": 16766, + "bachelore": 26009, + "bachelorette": 29093, + "bacher": 49211, + "back": 1663, + "back": 893, + "backbone": 35635, + "backdrop": 20802, + "backed": 12721, + "backer": 22183, + "backers": 32934, + "background": 5994, + "backgrounds": 28215, + "backing": 14935, + "backlash": 31519, + "backpack": 14894, + "backpacking": 29524, + "backpacks": 37063, + "backs": 7562, + "backseat": 48812, + "backstage": 9236, + "backstreet": 46337, + "backthe": 26127, + "backto": 18703, + "backtoschool": 28730, + "backtothe": 43059, + "backup": 14415, + "backward": 37964, + "backwards": 21283, + "backyard": 12608, + "bacon": 48666, + "bacon": 7104, + "bacter": 11814, + "bacteria": 16556, + "bacterial": 26101, + "bad": 2564, + "bad": 2103, + "bada": 37475, + "badan": 39149, + "badass": 11616, + "baddest": 38112, + "baden": 36690, + "bader": 42254, + "badge": 11301, + "badger": 32686, + "badger": 22363, + "badgers": 22521, + "badges": 20084, + "badlands": 43192, + "badly": 13684, + "badminton": 21412, + "badoo": 33192, + "bados": 25755, + "bae": 32834, + "bae": 6855, + "baek": 18557, + "baek": 32702, + "baekhyun": 21572, + "baes": 46332, + "baf": 13616, + "baff": 35693, + "bafta": 29199, + "bag": 3408, + "bag": 3365, + "bage": 9698, + "bagel": 28777, + "bagels": 37489, + "baggage": 31402, + "bagged": 34047, + "bagh": 21659, + "bagh": 37271, + "baghdad": 30763, + "bago": 25105, + "bags": 6136, + "bagu": 27749, + "baguette": 45334, + "bah": 8372, + "bah": 16685, + "baha": 29592, + "baham": 43718, + "bahamas": 21224, + "bahan": 28704, + "bahn": 33452, + "bahrain": 12503, + "bai": 6232, + "bai": 23339, + "bail": 22933, + "bail": 16986, + "bailey": 27535, + "bailey": 10180, + "bain": 40784, + "bain": 21593, + "bair": 29059, + "baird": 40474, + "bait": 18010, + "baj": 20713, + "baja": 40418, + "baja": 28374, + "bajo": 32619, + "bak": 4059, + "bak": 23742, + "bakar": 41414, + "bake": 20736, + "bake": 11878, + "baked": 10364, + "baker": 27303, + "baker": 7743, + "bakers": 35293, + "bakers": 40231, + "bakersfield": 40149, + "bakery": 13377, + "bakes": 43057, + "bakhta": 44912, + "bakhtawar": 46937, + "bakhtawarbz": 47118, + "baking": 11467, + "baku": 46417, + "baku": 31852, + "bal": 1398, + "bal": 2282, + "bala": 20291, + "balaji": 48694, + "balance": 42894, + "balance": 6827, + "balanced": 15273, + "balances": 37733, + "balancing": 23541, + "balboa": 45098, + "balcony": 16169, + "bald": 11153, + "bald": 14875, + "baldhead": 29191, + "baldwin": 16242, + "bale": 48573, + "bale": 18873, + "bales": 42879, + "bali": 16432, + "bali": 10900, + "balkan": 48499, + "balkans": 42987, + "ball": 3807, + "ball": 1069, + "balla": 42246, + "ballad": 33472, + "ballarat": 46645, + "ballard": 31750, + "baller": 49194, + "baller": 25655, + "ballerina": 34962, + "ballers": 34173, + "ballet": 10703, + "balli": 29406, + "ballin": 47444, + "ballin": 33057, + "balling": 47588, + "ballis": 46675, + "ballistic": 36667, + "ballo": 8871, + "ballon": 36469, + "balloon": 13634, + "balloons": 18130, + "ballot": 14185, + "ballots": 35051, + "ballpark": 26080, + "ballroom": 15493, + "balls": 6927, + "bally": 17275, + "bally": 29451, + "balm": 24962, + "balmain": 45929, + "balo": 12395, + "baloch": 23173, + "balochistan": 21918, + "balot": 44615, + "balotelli": 45721, + "bals": 44154, + "balsam": 29121, + "balsamic": 32654, + "balt": 24441, + "balti": 8400, + "baltic": 23817, + "baltimore": 38502, + "baltimore": 9582, + "balu": 38093, + "bam": 6383, + "bam": 12686, + "bama": 20021, + "bambam": 34538, + "bambi": 46596, + "bamboo": 49322, + "bamboo": 16748, + "ban": 1159, + "ban": 2777, + "bana": 18428, + "banan": 38410, + "banana": 8922, + "bananas": 19121, + "banc": 39252, + "band": 4613, + "band": 1963, + "banda": 31865, + "bandai": 42054, + "bandana": 39265, + "bandcamp": 32229, + "banded": 37804, + "bandic": 44400, + "bandit": 27639, + "bandits": 33940, + "bandra": 41393, + "bands": 7858, + "bandung": 29512, + "bandwagon": 36432, + "bandwidth": 48859, + "bane": 9597, + "banerjee": 48102, + "banff": 29565, + "bang": 3524, + "bang": 6907, + "bangalore": 14697, + "banger": 24872, + "bangers": 38311, + "banging": 33033, + "bangkok": 12351, + "bangla": 10339, + "bangla": 45928, + "bangladesh": 11245, + "bangle": 37634, + "bangor": 31190, + "bangs": 27992, + "bangtan": 39131, + "bani": 19732, + "banjo": 27014, + "bank": 7061, + "bank": 2723, + "banker": 27316, + "bankers": 30599, + "bankholiday": 48868, + "banking": 9566, + "bankno": 49201, + "bankof": 39120, + "bankrup": 21904, + "bankrupt": 23077, + "bankrupt": 37288, + "bankruptcy": 23978, + "banks": 6367, + "banksy": 33350, + "bann": 5304, + "banned": 12012, + "banner": 9185, + "banners": 23145, + "banning": 26246, + "bannon": 29710, + "bano": 42947, + "banquet": 14254, + "bans": 15146, + "bant": 23301, + "bant": 46657, + "banter": 25535, + "bao": 39487, + "bao": 20408, + "bap": 7415, + "bap": 23754, + "bapti": 15477, + "baptism": 36765, + "baptist": 13274, + "baptiste": 45770, + "baptized": 45400, + "bar": 1040, + "bar": 2411, + "bara": 19345, + "barack": 18670, + "barack": 22481, + "barackobama": 18885, + "barak": 47419, + "barak": 16260, + "barang": 38446, + "barb": 24173, + "barb": 20913, + "barbados": 26992, + "barbar": 7906, + "barbara": 10937, + "barbarian": 42530, + "barbe": 18372, + "barbecue": 23501, + "barber": 19517, + "barber": 12296, + "barbershop": 37707, + "barbican": 47668, + "barbie": 16923, + "barca": 22942, + "barcel": 6134, + "barcelon": 47820, + "barcelona": 6412, + "barclay": 48877, + "barclay": 45276, + "barclays": 29538, + "bard": 39812, + "bard": 17514, + "bare": 16023, + "bare": 14318, + "barefoot": 30327, + "barely": 12684, + "bargain": 15076, + "bargaining": 41282, + "bargains": 34126, + "barge": 28272, + "bari": 21428, + "bari": 28016, + "barista": 31078, + "barit": 46300, + "bark": 32333, + "bark": 16560, + "barker": 20618, + "barking": 32676, + "barkley": 30266, + "barley": 22607, + "barlow": 25483, + "barn": 10490, + "barn": 10942, + "barnab": 43272, + "barnard": 44332, + "barne": 42527, + "barnes": 13102, + "barnet": 41943, + "barnett": 27650, + "barney": 24563, + "barns": 43759, + "barnsley": 37109, + "barnsley": 32153, + "baro": 17422, + "baro": 30817, + "baron": 48371, + "baron": 19349, + "baroness": 45056, + "barons": 45596, + "baroque": 25065, + "barr": 39473, + "barr": 22492, + "barra": 28442, + "barra": 33542, + "barrabest": 41376, + "barrac": 40835, + "barracks": 35822, + "barre": 13840, + "barre": 38257, + "barred": 33261, + "barrel": 11703, + "barrels": 22059, + "barren": 46743, + "barrett": 18701, + "barri": 8660, + "barric": 29189, + "barrie": 27090, + "barrier": 15706, + "barriers": 16321, + "barrington": 48954, + "barron": 34881, + "barrow": 42568, + "barrow": 24983, + "barry": 18028, + "barry": 8461, + "barrymore": 49310, + "bars": 8616, + "barstool": 44826, + "bart": 14838, + "bart": 12870, + "bartender": 33498, + "barthol": 48989, + "bartlett": 37130, + "bartol": 38209, + "barton": 48853, + "barton": 20345, + "baru": 16356, + "barun": 38278, + "barunsob": 41398, + "barça": 32788, + "bas": 1244, + "bas": 11420, + "basa": 26142, + "base": 2776, + "base": 4579, + "baseball": 23479, + "baseball": 3470, + "based": 35196, + "based": 2812, + "basel": 42803, + "basel": 20903, + "baseline": 40648, + "baseman": 45910, + "basement": 14792, + "bases": 20496, + "bash": 20462, + "bash": 10972, + "bashing": 37545, + "bashir": 42799, + "basic": 40452, + "basic": 7696, + "basically": 9125, + "basics": 15825, + "basil": 19225, + "basil": 14936, + "basilica": 27879, + "basin": 16117, + "basing": 47321, + "basis": 12278, + "baske": 3713, + "basket": 10338, + "basketball": 40023, + "basketball": 3835, + "baskets": 27787, + "basking": 39769, + "basque": 37175, + "bass": 22831, + "bass": 5992, + "bassett": 45992, + "bassist": 26496, + "bast": 28092, + "basti": 8559, + "bastille": 41874, + "bat": 2121, + "bat": 6575, + "bata": 39277, + "batb": 33962, + "batch": 9413, + "bate": 25034, + "bate": 28277, + "bateman": 41635, + "bates": 21727, + "batgirl": 46460, + "bath": 6064, + "bath": 5713, + "bathing": 20144, + "bathro": 21201, + "bathroom": 8470, + "bathrooms": 26434, + "baths": 19442, + "bathtub": 39942, + "bathurst": 36365, + "bati": 23362, + "bati": 37589, + "batman": 27811, + "batman": 7223, + "baton": 24331, + "bats": 14984, + "batsman": 35432, + "batt": 2407, + "batt": 48595, + "battalion": 20820, + "batter": 12654, + "batter": 31855, + "battered": 34375, + "batteries": 16666, + "battersea": 35839, + "battery": 7870, + "batting": 17401, + "battle": 7344, + "battle": 3528, + "battled": 37837, + "battlefield": 16055, + "battlefront": 42214, + "battleof": 47560, + "battles": 14213, + "battleship": 35165, + "battling": 17268, + "bau": 6055, + "bau": 34840, + "bauer": 22903, + "baugh": 41301, + "baum": 19840, + "bautista": 31881, + "bav": 21075, + "bavaria": 39977, + "bavarian": 44458, + "baw": 19808, + "bax": 21216, + "baxter": 26168, + "bay": 3631, + "bay": 2174, + "baya": 31573, + "bayan": 43895, + "bayarea": 28260, + "bayer": 48548, + "bayer": 29183, + "bayern": 14666, + "baylor": 21721, + "bayou": 33955, + "bays": 40156, + "baz": 10430, + "baz": 25268, + "bazaar": 20070, + "bazar": 49298, + "bb": 1174, + "bb": 3529, + "bba": 27762, + "bball": 15664, + "bbb": 33535, + "bbc": 5123, + "bbc": 5188, + "bbcc": 39052, + "bbce": 33818, + "bbcnews": 29370, + "bbcone": 28259, + "bbcqt": 37343, + "bbcr": 35802, + "bbcra": 17115, + "bbcradi": 49213, + "bbcradio": 22876, + "bbcsport": 49321, + "bbcspringwatch": 37358, + "bbctwo": 40395, + "bbcworld": 47340, + "bbe": 37559, + "bbed": 9077, + "bber": 7933, + "bbers": 36494, + "bbhutto": 28085, + "bbhuttozardari": 28135, + "bbi": 37047, + "bbin": 38553, + "bbing": 9787, + "bbins": 42504, + "bbl": 21961, + "bble": 26570, + "bble": 5924, + "bbled": 37626, + "bbles": 18093, + "bblo": 21231, + "bbloggers": 26614, + "bbly": 43031, + "bbm": 25382, + "bbmas": 22145, + "bbn": 28427, + "bbnaija": 20984, + "bbo": 21892, + "bbq": 41270, + "bbq": 6726, + "bbs": 10002, + "bbuk": 45978, + "bby": 11166, + "bby": 3810, + "bc": 3116, + "bc": 2162, + "bcc": 41509, + "bcci": 36138, + "bce": 36510, + "bcfc": 34359, + "bch": 36684, + "bcn": 25766, + "bcoz": 46373, + "bcpoli": 24389, + "bcs": 24909, + "bcu": 28299, + "bd": 24358, + "bd": 11165, + "bday": 33022, + "bday": 5781, + "bdg": 48418, + "bds": 26732, + "be": 571, + "be": 655, + "bea": 21886, + "bea": 20925, + "beach": 6068, + "beach": 2117, + "beaches": 12183, + "beachlife": 43824, + "beacon": 36883, + "beacon": 18858, + "beacons": 39395, + "bead": 31621, + "bead": 23557, + "beaded": 26661, + "beads": 14099, + "beagle": 30044, + "beak": 36498, + "beal": 45769, + "beale": 39717, + "beam": 35339, + "beam": 13663, + "beams": 23993, + "bean": 16471, + "bean": 5328, + "beanie": 21534, + "beans": 8302, + "bear": 6375, + "bear": 4298, + "bearable": 38608, + "bearcats": 33242, + "beard": 26157, + "beard": 9052, + "bearded": 28459, + "beardown": 43687, + "beards": 33020, + "bearer": 30686, + "bearers": 47986, + "bearing": 18370, + "bearings": 42083, + "bearish": 34829, + "bears": 6182, + "beasley": 43349, + "beast": 20847, + "beast": 6957, + "beastmode": 43076, + "beasts": 21771, + "beat": 3774, + "beat": 3018, + "beaten": 10864, + "beater": 41974, + "beati": 44386, + "beating": 10078, + "beatles": 11961, + "beatport": 31421, + "beatrice": 36922, + "beats": 6289, + "beatthe": 40550, + "beatty": 39903, + "beatz": 33363, + "beau": 1016, + "beau": 14298, + "beaufort": 45423, + "beaumont": 32857, + "beaut": 24559, + "beauti": 1154, + "beauties": 14874, + "beautiful": 13662, + "beautiful": 1215, + "beautifully": 10627, + "beauty": 12881, + "beauty": 2488, + "beav": 23260, + "beaver": 26432, + "beaver": 22874, + "beavers": 34513, + "beavs": 43909, + "bebe": 23331, + "bec": 6899, + "bec": 10773, + "became": 5464, + "because": 32714, + "because": 1631, + "becca": 27088, + "bech": 44055, + "beck": 8256, + "beck": 10396, + "becker": 26918, + "beckett": 27249, + "beckham": 18764, + "becky": 32406, + "becky": 18921, + "become": 2989, + "becomes": 6766, + "becoming": 6208, + "bed": 4152, + "bed": 2722, + "bedding": 31761, + "bedford": 20779, + "bedi": 39181, + "bedro": 18415, + "bedroom": 8411, + "bedrooms": 23996, + "beds": 13914, + "bedside": 47473, + "bedtime": 22115, + "bee": 6097, + "bee": 5028, + "beech": 32733, + "beech": 27596, + "beef": 21703, + "beef": 6529, + "beek": 37915, + "been": 33986, + "been": 1025, + "beep": 33432, + "beer": 8885, + "beer": 2544, + "beers": 10907, + "bees": 36249, + "bees": 9100, + "beet": 12582, + "beet": 28621, + "beethoven": 23656, + "beetle": 16534, + "beetles": 36317, + "beetro": 29251, + "beetroot": 31638, + "beets": 36087, + "before": 20898, + "before": 1348, + "beg": 2219, + "beg": 22401, + "began": 8636, + "begg": 36769, + "begging": 25371, + "begin": 19197, + "begin": 4947, + "beginner": 24351, + "beginners": 21930, + "beginning": 5791, + "beginnings": 22581, + "begins": 4635, + "begs": 43531, + "begun": 10514, + "beh": 21971, + "beh": 41612, + "beha": 5737, + "behalf": 11470, + "behave": 28825, + "behaved": 41617, + "behavi": 6149, + "behaving": 40745, + "behavior": 10461, + "behavioral": 25135, + "behaviors": 37741, + "behaviour": 14655, + "behavioural": 46019, + "behe": 42329, + "behin": 2335, + "behind": 2403, + "behindthe": 21104, + "behindthescenes": 26253, + "behold": 15929, + "bei": 38991, + "bei": 23227, + "beige": 26677, + "beij": 11547, + "beijing": 11796, + "bein": 39117, + "bein": 24168, + "being": 13481, + "being": 1265, + "beings": 17998, + "beingsalmankhan": 19637, + "beir": 20176, + "beirut": 22352, + "beit": 26963, + "bek": 46846, + "bek": 26135, + "bekind": 46691, + "bel": 1308, + "bel": 3543, + "bela": 30555, + "belarus": 30849, + "belated": 20256, + "belfast": 35100, + "belfast": 10015, + "belgi": 7001, + "belgian": 15008, + "belgium": 10239, + "belgrade": 30502, + "beli": 1859, + "beli": 45842, + "belichick": 46132, + "belie": 20854, + "beliebers": 27714, + "belief": 14802, + "beliefs": 20575, + "believ": 4972, + "believe": 15819, + "believe": 2649, + "believed": 13380, + "believein": 24294, + "believeinfilm": 37375, + "believer": 26057, + "believers": 28434, + "believes": 12017, + "believing": 19551, + "belinda": 44415, + "belize": 27990, + "bell": 5417, + "bell": 3718, + "bella": 18282, + "bella": 10418, + "bellamy": 34461, + "bellator": 31985, + "belle": 13587, + "belle": 11496, + "belles": 40678, + "bellevue": 32715, + "belli": 43335, + "bellletstalk": 42695, + "bello": 21954, + "bells": 12811, + "bellum": 35493, + "belly": 25901, + "belly": 10404, + "belmont": 25612, + "belo": 8379, + "belo": 41649, + "belong": 16453, + "belong": 13596, + "belonged": 39893, + "belonging": 28193, + "belongs": 14395, + "beloved": 9363, + "below": 3788, + "bels": 43127, + "belt": 36416, + "belt": 7373, + "belts": 21888, + "belvedere": 48003, + "ben": 1465, + "ben": 3518, + "bena": 46249, + "bench": 17770, + "bench": 8771, + "benches": 36349, + "benchmark": 31775, + "bend": 22100, + "bend": 13332, + "bender": 22551, + "bendigo": 48197, + "bending": 33897, + "bene": 12091, + "bene": 47151, + "beneath": 16850, + "bened": 13216, + "benedic": 24402, + "benedict": 47896, + "benedict": 18027, + "benef": 3260, + "benefici": 38593, + "beneficial": 24660, + "beneficiaries": 42160, + "benefit": 6399, + "benefited": 48266, + "benefiting": 29474, + "benefits": 5465, + "benefitting": 47222, + "benevol": 47060, + "benfica": 33873, + "beng": 6962, + "bengal": 17404, + "bengal": 16374, + "bengali": 33774, + "bengals": 23737, + "bengaluru": 21707, + "benghazi": 25967, + "benin": 40296, + "benitez": 46711, + "benjam": 10550, + "benjamin": 38647, + "benjamin": 12131, + "benji": 43548, + "benn": 39097, + "bennet": 48536, + "bennett": 12186, + "benny": 42369, + "benny": 20595, + "beno": 35268, + "benoit": 44373, + "benson": 19578, + "bent": 9809, + "bent": 18369, + "bentley": 16859, + "benton": 30812, + "benz": 27937, + "benz": 13470, + "ber": 867, + "ber": 1516, + "bera": 32802, + "bere": 17458, + "bered": 9193, + "beren": 33654, + "beret": 41658, + "berg": 12022, + "berg": 3294, + "bergen": 22918, + "berger": 35933, + "berger": 13873, + "bergh": 35120, + "bergman": 42597, + "bergs": 43592, + "berk": 15633, + "berke": 14639, + "berkeley": 46049, + "berkeley": 16667, + "berkshire": 27300, + "berlin": 23532, + "berlin": 5891, + "berman": 21514, + "bermu": 21032, + "bermuda": 24644, + "bern": 9195, + "bern": 18382, + "bernade": 46242, + "bernar": 11962, + "bernard": 14579, + "bernardino": 35328, + "bernardo": 27137, + "bernardo": 28696, + "bernardokath": 29081, + "bernat": 40578, + "berni": 18798, + "bernie": 40093, + "bernie": 10503, + "berniesanders": 23745, + "bernstein": 33936, + "berra": 15089, + "berries": 8319, + "berry": 15334, + "berry": 3488, + "bers": 6408, + "berser": 39037, + "bert": 17340, + "bert": 2358, + "berta": 45187, + "berth": 28317, + "bertie": 47182, + "berto": 34073, + "bertr": 36962, + "bertrand": 41594, + "berts": 30205, + "berty": 35973, + "berwick": 40407, + "bery": 11411, + "bes": 26911, + "bes": 3635, + "beside": 13519, + "besides": 17596, + "bespoke": 15612, + "bess": 43791, + "best": 3419, + "best": 949, + "bestbuy": 29749, + "bestest": 31199, + "bestfan": 23880, + "bestfanarmy": 24590, + "bestfriend": 29832, + "bestfriend": 11856, + "bestfriends": 23555, + "besti": 35210, + "bestie": 17188, + "besties": 27346, + "besto": 28615, + "bestof": 27892, + "bestof": 39533, + "bestseller": 25841, + "bestselling": 28632, + "bet": 1051, + "bet": 4430, + "beta": 43188, + "beta": 9505, + "betes": 10255, + "beth": 9993, + "beth": 4892, + "bethan": 18781, + "bethany": 39130, + "bethany": 27952, + "bethe": 12624, + "bethel": 33410, + "bethesda": 32527, + "bethle": 30760, + "bethlehem": 31827, + "betis": 45590, + "beto": 33721, + "betra": 18436, + "betrayal": 33171, + "betrayed": 35692, + "bets": 17107, + "betsy": 28946, + "bett": 17715, + "bett": 20489, + "betta": 36387, + "bette": 35855, + "better": 10320, + "better": 1539, + "bettertogether": 47392, + "betting": 14319, + "betts": 38637, + "betty": 36175, + "betty": 14350, + "between": 1957, + "beu": 38660, + "bev": 40324, + "bev": 30968, + "bever": 9924, + "beverage": 18694, + "beverages": 28521, + "beverley": 39165, + "beverly": 30906, + "beverly": 16728, + "beverlyhills": 45363, + "beware": 14532, + "bewithyou": 36787, + "bex": 18676, + "bex": 24748, + "bexhill": 49200, + "bey": 3234, + "bey": 6767, + "beyon": 11447, + "beyonce": 16632, + "beyoncé": 19219, + "beyond": 22246, + "beyond": 4432, + "bez": 28592, + "bez": 46764, + "bezos": 45000, + "bf": 19858, + "bf": 7990, + "bfc": 37183, + "bff": 11984, + "bffs": 31462, + "bfi": 34244, + "bg": 16674, + "bg": 11295, + "bgc": 47598, + "bgs": 47963, + "bgt": 40665, + "bh": 9930, + "bh": 13603, + "bha": 6144, + "bha": 33068, + "bhafc": 30779, + "bhagat": 49136, + "bhai": 48370, + "bhai": 20508, + "bhak": 34501, + "bham": 31874, + "bham": 23491, + "bhan": 27356, + "bhand": 48679, + "bhar": 9108, + "bharat": 27454, + "bharat": 17430, + "bharti": 46803, + "bhat": 23784, + "bhatt": 36143, + "bhav": 44950, + "bhi": 28943, + "bhi": 21955, + "bhk": 45070, + "bhm": 38741, + "bho": 19721, + "bhopal": 44573, + "bhp": 29776, + "bhs": 29195, + "bhu": 9172, + "bhuban": 38729, + "bhubanes": 41213, + "bhubaneswar": 45888, + "bhushan": 40884, + "bhutan": 32391, + "bhutto": 30153, + "bi": 717, + "bi": 3035, + "bia": 3841, + "biaf": 26961, + "biafra": 36355, + "bian": 19531, + "bian": 9027, + "bianca": 25854, + "bianchi": 45720, + "bians": 28141, + "bias": 11268, + "biased": 22178, + "bib": 44607, + "bib": 21022, + "bibi": 31182, + "bibl": 20912, + "bible": 26738, + "bible": 7583, + "bibli": 23465, + "biblical": 22841, + "biblio": 49131, + "bic": 5960, + "bic": 10675, + "bice": 35589, + "biceps": 46735, + "bick": 27238, + "bicy": 9247, + "bicycle": 11652, + "bicycles": 31326, + "bid": 21035, + "bid": 5553, + "bidding": 23237, + "bide": 45178, + "biden": 19451, + "bids": 16148, + "bie": 5561, + "bie": 4173, + "bieber": 48725, + "bieber": 7535, + "bien": 19176, + "bien": 25742, + "biennale": 33776, + "biennial": 36609, + "bier": 27226, + "bier": 23508, + "bies": 7867, + "big": 1915, + "big": 1205, + "bigbaldhead": 30325, + "bigbang": 41680, + "bigbang": 23734, + "bigdata": 9440, + "bige": 37762, + "bigfoot": 37095, + "bigg": 15312, + "bigg": 35399, + "biggboss": 27056, + "bigger": 6806, + "biggest": 19483, + "biggest": 3505, + "biggie": 28392, + "biggs": 46507, + "bigh": 18106, + "bighit": 35508, + "bigo": 14278, + "bigolive": 20735, + "bigotry": 37269, + "bigre": 36330, + "bih": 33471, + "bihar": 22849, + "bij": 42478, + "bik": 30306, + "bike": 11686, + "bike": 3701, + "biker": 36100, + "biker": 23449, + "bikers": 29468, + "bikes": 9227, + "bikin": 12638, + "biking": 19157, + "bikini": 14531, + "bil": 3092, + "bil": 20506, + "bilateral": 25599, + "bilbao": 34802, + "bild": 35512, + "bile": 25943, + "bilingual": 29623, + "bilities": 13582, + "bility": 4694, + "bill": 4444, + "bill": 2886, + "billboard": 10856, + "billboards": 34741, + "billed": 37558, + "billi": 7693, + "billie": 23990, + "billing": 31797, + "billings": 43615, + "billion": 14520, + "billion": 5729, + "billionaire": 19475, + "billionaires": 41590, + "billions": 20742, + "bills": 9810, + "billsmafia": 48845, + "billy": 15626, + "billy": 6814, + "bilt": 44770, + "bilt": 26654, + "bim": 46737, + "bim": 24775, + "bin": 4849, + "bin": 5346, + "binance": 43520, + "binary": 23497, + "bind": 44513, + "binder": 30541, + "binding": 21287, + "bine": 34848, + "bing": 24818, + "bing": 5665, + "binge": 22600, + "bingham": 43785, + "bingham": 47296, + "bingo": 18418, + "bino": 29172, + "bino": 24313, + "bins": 26934, + "bint": 43647, + "bio": 2830, + "bio": 5162, + "biode": 43502, + "biodegradable": 47740, + "biodiversity": 17428, + "biof": 45158, + "biographical": 49232, + "biography": 15423, + "biological": 18821, + "biologist": 35149, + "biology": 9796, + "biom": 13010, + "biomar": 44549, + "biomass": 36746, + "biome": 26218, + "biomed": 29280, + "biomedical": 33117, + "bionic": 46201, + "biop": 15009, + "biopic": 27942, + "bios": 48505, + "biotech": 22514, + "biotechnology": 40375, + "biotic": 33773, + "biotics": 41371, + "bious": 31845, + "bipartisan": 32266, + "bipolar": 37097, + "bique": 27809, + "bir": 921, + "bir": 16284, + "birch": 31569, + "birch": 22907, + "bird": 6908, + "bird": 3329, + "birdie": 29612, + "birdies": 45618, + "birding": 15851, + "birdman": 41915, + "birdphotography": 47999, + "birds": 41951, + "birds": 4337, + "birdwatching": 33497, + "birk": 48289, + "birken": 40661, + "birmin": 37482, + "birmingham": 38580, + "birmingham": 7720, + "birth": 1128, + "birth": 5397, + "birthday": 7381, + "birthday": 1166, + "birthdays": 17954, + "birthplace": 31429, + "biryani": 46489, + "bis": 5064, + "bis": 14461, + "biscu": 11532, + "biscuit": 18731, + "biscuits": 18248, + "bisexual": 36829, + "bish": 33690, + "bish": 31461, + "bishop": 20625, + "bishop": 8024, + "bishops": 31579, + "bison": 19741, + "bistro": 21770, + "bit": 3010, + "bit": 2010, + "bitcoin": 30848, + "bitcoin": 6366, + "bite": 41613, + "biting": 23016, + "bits": 7747, + "bitt": 39251, + "bius": 45525, + "bix": 46579, + "biz": 8212, + "biz": 5431, + "biza": 47013, + "bizar": 14886, + "bizarre": 16965, + "bizhour": 39462, + "bizitalk": 34929, + "bj": 4592, + "bj": 18229, + "bjj": 27437, + "bjor": 26525, + "bjp": 37264, + "bjp": 6178, + "bk": 15099, + "bk": 14083, + "bkk": 36433, + "bl": 833, + "bl": 9467, + "bla": 2205, + "bla": 19630, + "blac": 21008, + "black": 2025, + "black": 1449, + "blackand": 12809, + "blackandwhite": 23688, + "blackandwhite": 19506, + "blackandwhitephotography": 27544, + "blackberry": 16470, + "blackbird": 38526, + "blackburn": 23789, + "blackfish": 42193, + "blackfriday": 16445, + "blackgirl": 43591, + "blackhawks": 19203, + "blackhistory": 46982, + "blackhistorymonth": 20135, + "blacklist": 30295, + "blacklivesmatter": 23467, + "blackmail": 47295, + "blackops": 43519, + "blackout": 21733, + "blackpanther": 36592, + "blackpink": 20339, + "blackpool": 21031, + "blacks": 16351, + "blackwell": 42642, + "blad": 36635, + "bladder": 33593, + "blade": 10264, + "blades": 16893, + "blah": 29212, + "blaine": 32457, + "blair": 31824, + "blair": 14749, + "blake": 20229, + "blake": 9579, + "blame": 10695, + "blamed": 32906, + "blames": 27841, + "blaming": 29287, + "blan": 4609, + "blanc": 30936, + "blanc": 13301, + "blanca": 40670, + "blanchard": 40177, + "blanche": 34875, + "blanchett": 49378, + "blanco": 26801, + "bland": 44372, + "bland": 30799, + "blank": 15134, + "blanket": 12878, + "blankets": 24042, + "blanks": 48599, + "blasio": 35553, + "blasphe": 36622, + "blast": 46349, + "blast": 5964, + "blasted": 38976, + "blaster": 36341, + "blasting": 26178, + "blasts": 23067, + "blat": 22048, + "blatant": 41391, + "blatt": 39138, + "blau": 45307, + "blaz": 43413, + "blaze": 15497, + "blazer": 17606, + "blazers": 16984, + "blazing": 25267, + "bldg": 22981, + "ble": 1447, + "ble": 1059, + "bleach": 27034, + "bleak": 40355, + "bled": 12006, + "bleed": 23027, + "bleed": 24791, + "bleedblue": 39160, + "bleeding": 20311, + "bleeds": 47339, + "blen": 25651, + "blend": 10780, + "blended": 25813, + "blender": 25066, + "blending": 34307, + "blends": 28572, + "bler": 31305, + "bler": 11979, + "blers": 26930, + "bles": 5763, + "bless": 9640, + "bless": 5387, + "blessed": 4411, + "blessing": 10729, + "blessings": 11185, + "bleu": 30114, + "blew": 18176, + "bley": 43176, + "bli": 1450, + "bli": 28051, + "blin": 9678, + "blin": 5406, + "blind": 17248, + "blind": 8351, + "blinded": 49149, + "blindness": 38812, + "blinds": 32449, + "bling": 39764, + "bling": 7097, + "blink": 18976, + "bliss": 28531, + "bliss": 12893, + "blissful": 42145, + "blit": 39327, + "blitz": 42151, + "blitz": 17548, + "blizz": 13075, + "blizzard": 16111, + "blk": 42950, + "blk": 22872, + "blm": 30957, + "bln": 47348, + "blo": 1204, + "blo": 25505, + "blob": 49312, + "bloc": 30961, + "block": 4638, + "block": 4593, + "blockade": 33489, + "blockbuster": 19939, + "blockchain": 6653, + "blocked": 9106, + "blocker": 44767, + "blocking": 12652, + "blocks": 10113, + "blog": 16376, + "blog": 2589, + "blogg": 33282, + "blogged": 41380, + "blogger": 21352, + "blogger": 7806, + "bloggerrt": 48898, + "bloggers": 11627, + "blogging": 18090, + "blogpost": 41842, + "blogs": 16682, + "bloke": 24384, + "blom": 48996, + "blon": 7958, + "blond": 32426, + "blonde": 10711, + "blondes": 45130, + "blondie": 39236, + "bloo": 2373, + "blood": 9231, + "blood": 3590, + "blooded": 41946, + "bloodh": 48480, + "bloods": 39539, + "bloody": 38568, + "bloody": 9468, + "bloom": 7311, + "bloom": 10257, + "bloomberg": 43109, + "bloomberg": 21238, + "bloomfield": 40342, + "blooming": 45175, + "blooming": 19266, + "bloomington": 34731, + "blooms": 21439, + "bloss": 10017, + "blossom": 14472, + "blossoms": 21916, + "blot": 41710, + "blou": 44506, + "blouse": 23525, + "blow": 15230, + "blow": 10211, + "blower": 25832, + "blowing": 12087, + "blown": 11848, + "blowout": 34857, + "blows": 21063, + "blr": 47250, + "bls": 39458, + "blu": 1263, + "blu": 10273, + "blue": 3829, + "blue": 1746, + "bluebells": 47150, + "blueberries": 29551, + "blueberry": 18251, + "bluebird": 40747, + "bluec": 43194, + "bluef": 41174, + "bluegrass": 26241, + "bluejays": 18684, + "blueprint": 30594, + "blues": 17566, + "blues": 5159, + "blueslyrix": 47068, + "bluet": 13469, + "bluetooth": 14052, + "bluewave": 40025, + "bluff": 27232, + "bluffs": 48844, + "blum": 34818, + "blumen": 38714, + "blun": 34472, + "blunt": 19305, + "blur": 12102, + "blur": 27976, + "bluray": 36818, + "blurred": 38013, + "blurry": 21977, + "blush": 22889, + "blvd": 12578, + "bly": 20930, + "bly": 4426, + "bm": 4773, + "bm": 15916, + "bma": 42573, + "bmc": 27807, + "bmi": 40642, + "bmo": 39083, + "bms": 34074, + "bmw": 26637, + "bmw": 7869, + "bmx": 22535, + "bn": 10496, + "bn": 7992, + "bnb": 20010, + "bnha": 49336, + "bnp": 47910, + "bnw": 35903, + "bo": 647, + "bo": 2525, + "boa": 14732, + "boar": 7837, + "boar": 35473, + "board": 10419, + "board": 1972, + "boarded": 43052, + "boarder": 37414, + "boardgame": 47829, + "boardgames": 32646, + "boarding": 10086, + "boardroom": 47937, + "boards": 7963, + "boardwalk": 29043, + "boast": 44467, + "boasts": 30309, + "boat": 12426, + "boat": 4440, + "boath": 45461, + "boating": 21951, + "boats": 10080, + "boatsales": 46244, + "bob": 8444, + "bob": 4423, + "boba": 39948, + "bobb": 16891, + "bobble": 38796, + "bobblehead": 33451, + "bobby": 17847, + "bobby": 7816, + "bobc": 26153, + "bobcat": 37896, + "bobcats": 27568, + "bobo": 38939, + "bobs": 45533, + "boc": 27307, + "boc": 39042, + "boca": 26094, + "bock": 24961, + "bod": 17904, + "bod": 26340, + "boda": 42030, + "bode": 28452, + "bode": 40429, + "bodega": 47350, + "bodied": 36892, + "bodies": 9799, + "bodily": 49119, + "body": 7132, + "body": 1774, + "bodybuilding": 24538, + "bodyguard": 35565, + "boe": 23476, + "boe": 21773, + "boeh": 38002, + "boehner": 44599, + "boeing": 48135, + "boeing": 11857, + "boer": 44889, + "boer": 40768, + "bog": 23426, + "bog": 28318, + "bogo": 35769, + "bogota": 47059, + "bogus": 42907, + "boh": 43238, + "bohe": 40541, + "bohemi": 21552, + "bohemian": 25753, + "boho": 25444, + "boi": 37129, + "boi": 12673, + "boil": 31332, + "boiled": 23886, + "boiler": 28212, + "boiler": 25615, + "boiling": 32019, + "bois": 47742, + "bois": 21640, + "boise": 23304, + "bok": 26671, + "bok": 15289, + "boko": 30929, + "boks": 40216, + "bol": 2860, + "bol": 8413, + "bola": 12840, + "bold": 26975, + "bold": 8911, + "boldand": 48413, + "boldly": 44778, + "boli": 12722, + "bolic": 27343, + "bolivia": 28628, + "bollah": 36336, + "bolly": 25302, + "bollywood": 32448, + "bollywood": 9604, + "bolo": 40236, + "bolog": 22818, + "bologna": 27513, + "bolster": 47304, + "bolt": 13131, + "bolton": 48757, + "bolton": 16598, + "bolts": 26028, + "bom": 3012, + "bom": 19469, + "bomb": 18091, + "bomb": 6331, + "bombar": 25544, + "bombardier": 42700, + "bombay": 48602, + "bombay": 23890, + "bombed": 24542, + "bomber": 15436, + "bombers": 21786, + "bombing": 14475, + "bombings": 43236, + "bombs": 14410, + "bombshell": 36340, + "bon": 1871, + "bon": 4216, + "bona": 33342, + "bonanza": 40304, + "bond": 37022, + "bond": 6826, + "bonded": 37390, + "bondi": 40092, + "bonding": 19609, + "bonds": 15786, + "bone": 22502, + "bone": 6195, + "bones": 9476, + "bonfire": 23151, + "bongo": 47519, + "boni": 32269, + "boni": 46356, + "bonita": 42896, + "bonjour": 33176, + "bonkers": 39865, + "bonn": 38969, + "bonnar": 47191, + "bonnaroo": 48777, + "bonne": 25844, + "bonnet": 30636, + "bonnie": 18555, + "bono": 24476, + "bons": 42883, + "bonsai": 44129, + "bonus": 8164, + "bonuses": 35144, + "boo": 824, + "boo": 7317, + "boogie": 22639, + "book": 2828, + "book": 1116, + "bookboost": 31257, + "bookclub": 34438, + "bookday": 26327, + "booked": 12584, + "booker": 21302, + "bookfest": 39381, + "booking": 10145, + "bookings": 18345, + "booklet": 27405, + "bookmark": 33596, + "bookof": 45629, + "bookreview": 27362, + "books": 44382, + "books": 2161, + "bookshelf": 34821, + "bookshop": 24705, + "bookstore": 17999, + "bookstores": 46416, + "bookworm": 20743, + "boom": 9609, + "boom": 7121, + "boomer": 33819, + "boomer": 31766, + "boomers": 37988, + "booming": 33487, + "boon": 24979, + "boon": 35821, + "boone": 23453, + "boop": 45047, + "boost": 44639, + "boost": 6260, + "boosted": 37631, + "booster": 20877, + "boosters": 46859, + "boosting": 28480, + "boosts": 29247, + "boot": 10843, + "boot": 8087, + "bootcamp": 22051, + "booted": 42564, + "booth": 47895, + "booth": 3971, + "booths": 32653, + "booties": 46188, + "bootleg": 38139, + "boots": 7319, + "booze": 24341, + "bop": 19720, + "bor": 1141, + "bor": 15093, + "bora": 24736, + "bord": 36891, + "bordeaux": 22009, + "border": 16304, + "border": 6177, + "borderlands": 38676, + "borders": 13900, + "bore": 14084, + "bore": 24638, + "bored": 8933, + "boredom": 31460, + "boretum": 38902, + "borg": 14770, + "borgh": 17180, + "boring": 12519, + "boris": 31212, + "boris": 15704, + "borisjohnson": 44481, + "born": 17695, + "born": 2683, + "borne": 42910, + "borne": 9328, + "borneo": 33332, + "bornon": 41811, + "bornonthisday": 42757, + "boro": 26796, + "boro": 7974, + "borough": 22761, + "borough": 6203, + "borrow": 22293, + "borrowed": 28224, + "borrowing": 41045, + "borussia": 36764, + "bos": 14885, + "bos": 9644, + "bosa": 46946, + "bosch": 42009, + "bosch": 19466, + "bosco": 36960, + "bose": 23142, + "bosh": 42244, + "bosni": 42924, + "bosnia": 31396, + "boss": 17935, + "boss": 4206, + "bosses": 23906, + "boston": 11540, + "boston": 4399, + "bostonmarathon": 44533, + "bot": 4136, + "bot": 6947, + "botan": 12554, + "botanic": 32560, + "botanical": 21026, + "botany": 22612, + "botd": 34451, + "both": 36575, + "both": 2212, + "bother": 21125, + "bothered": 27997, + "botox": 43449, + "bots": 13721, + "botswana": 27584, + "bott": 3520, + "bott": 37225, + "bottle": 37306, + "bottle": 5392, + "bottled": 29331, + "bottlen": 46439, + "bottles": 9754, + "bottling": 42006, + "bottom": 32314, + "bottom": 5931, + "bottoms": 31524, + "bou": 3728, + "bou": 23165, + "bouchard": 47930, + "boudo": 48827, + "bought": 4142, + "boul": 24830, + "boulder": 18260, + "boule": 17652, + "boulevard": 19504, + "boun": 5993, + "bounce": 14316, + "bouncing": 32060, + "bouncy": 43415, + "bound": 15140, + "bound": 4567, + "boundaries": 18690, + "boundary": 21344, + "bounds": 37469, + "bounty": 21142, + "bouquet": 20961, + "bour": 2934, + "bour": 35486, + "bourbon": 48118, + "bourbon": 14652, + "bourdain": 48095, + "bourg": 20690, + "bourgeo": 45672, + "bourn": 39143, + "bourne": 13789, + "bourne": 5192, + "bournemouth": 20911, + "bout": 19982, + "bout": 8123, + "bouti": 10926, + "boutique": 12179, + "bow": 2297, + "bow": 4040, + "bowden": 48538, + "bowed": 49130, + "bowel": 36880, + "bowen": 25368, + "bower": 40414, + "bowers": 42238, + "bowie": 13036, + "bowing": 46398, + "bowl": 26719, + "bowl": 3814, + "bowled": 39987, + "bowler": 25528, + "bowlers": 42632, + "bowles": 41611, + "bowling": 10390, + "bowls": 17787, + "bowman": 22052, + "bows": 17000, + "bowser": 38234, + "bowski": 48311, + "box": 2774, + "box": 2063, + "boxed": 24190, + "boxer": 40394, + "boxer": 15363, + "boxers": 31019, + "boxes": 8350, + "boxing": 33669, + "boxing": 5554, + "boy": 2927, + "boy": 1876, + "boyband": 31568, + "boyce": 44480, + "boycot": 46208, + "boycott": 31615, + "boycott": 19559, + "boyd": 18295, + "boyfriend": 7328, + "boyfriends": 36541, + "boyle": 22802, + "boys": 25223, + "boys": 2034, + "boyz": 16152, + "bp": 23410, + "bp": 11558, + "bpa": 43855, + "bpd": 48587, + "bpl": 28901, + "bpm": 40338, + "bps": 37794, + "br": 711, + "br": 7532, + "bra": 1195, + "bra": 5860, + "brac": 6663, + "brace": 8376, + "brace": 9183, + "bracelet": 8969, + "bracelets": 20027, + "braces": 19249, + "brack": 25676, + "bracket": 14780, + "brackets": 36183, + "brad": 4848, + "brad": 9405, + "bradbury": 45097, + "braden": 46842, + "bradford": 15062, + "bradley": 31905, + "bradley": 10952, + "brador": 24062, + "bradshaw": 37556, + "brady": 42494, + "brady": 11117, + "brae": 42874, + "brae": 40040, + "brag": 30110, + "bragg": 38545, + "bragging": 38199, + "brah": 20276, + "brahms": 45114, + "brai": 25048, + "braid": 31067, + "braided": 39997, + "braids": 34221, + "brain": 9454, + "brain": 4812, + "brains": 17129, + "brainstorming": 36607, + "braised": 28363, + "brake": 14937, + "brakes": 23456, + "bral": 31309, + "bram": 14815, + "bram": 39456, + "brampton": 35124, + "bran": 3684, + "bran": 28348, + "brance": 36072, + "brance": 15413, + "branch": 7998, + "branches": 15843, + "brand": 3910, + "brand": 2896, + "branded": 18097, + "brandi": 41003, + "branding": 10841, + "brando": 41892, + "brandon": 20423, + "brandon": 9166, + "brands": 8681, + "brandt": 22552, + "brandy": 26232, + "brane": 32340, + "branson": 28280, + "brant": 28951, + "brant": 47592, + "braries": 46377, + "brary": 24520, + "bras": 22611, + "brasil": 18991, + "brass": 24348, + "brass": 11655, + "brat": 26717, + "brat": 26631, + "brate": 41864, + "braun": 39129, + "braun": 29309, + "brave": 25461, + "brave": 7769, + "braved": 47663, + "bravely": 42303, + "bravery": 25831, + "braves": 14422, + "braving": 43258, + "bravo": 38613, + "bravo": 13006, + "braw": 37871, + "brawl": 26066, + "braxton": 37451, + "bray": 26256, + "bray": 22993, + "braz": 4625, + "brazil": 47459, + "brazil": 6305, + "brazili": 45697, + "brazilian": 12111, + "brb": 25316, + "brc": 40393, + "bre": 887, + "bre": 7782, + "brea": 7318, + "brea": 46538, + "breach": 21363, + "breaches": 45173, + "bread": 18886, + "bread": 5066, + "breads": 43064, + "break": 2206, + "break": 2568, + "breakable": 30691, + "breakaway": 42732, + "breakdown": 14519, + "breaker": 14814, + "breakers": 22270, + "breakfa": 45931, + "breakfast": 30210, + "breakfast": 3290, + "breaking": 14698, + "breaking": 2755, + "breakingbad": 38032, + "breakingnews": 23837, + "breakout": 16752, + "breaks": 7263, + "breakthrough": 18802, + "breakup": 38931, + "breast": 12930, + "breast": 9475, + "breastcancer": 40813, + "breastcancer": 30065, + "breastfeeding": 29033, + "breasts": 37637, + "breath": 9508, + "breath": 9576, + "breathe": 11364, + "breathing": 14959, + "breathtaking": 14709, + "brecht": 34622, + "breck": 44598, + "bred": 46929, + "bred": 16008, + "bree": 7892, + "bree": 37138, + "breed": 28030, + "breed": 13791, + "breeders": 37472, + "breeding": 16544, + "breeds": 29021, + "breen": 48013, + "brees": 46721, + "breeze": 13125, + "breezy": 21451, + "breit": 23864, + "breitbart": 37926, + "brek": 35494, + "bremen": 39861, + "bren": 5209, + "brenda": 23786, + "brendan": 35134, + "brendan": 15414, + "brendon": 36756, + "brennan": 22372, + "brenner": 42941, + "brent": 31439, + "brent": 16355, + "brentwood": 33108, + "brero": 47781, + "bres": 32561, + "bret": 38020, + "bret": 32548, + "brethren": 43134, + "breton": 32290, + "brett": 22591, + "brett": 12394, + "brev": 42882, + "brevi": 39475, + "brew": 5048, + "brew": 7253, + "brewco": 33582, + "brewed": 23238, + "brewer": 20756, + "breweries": 35277, + "brewers": 17618, + "brewery": 8850, + "brewing": 8275, + "brewingco": 45155, + "brews": 21663, + "brewster": 40274, + "brex": 22726, + "brexit": 27666, + "brexit": 5801, + "brgy": 35983, + "bri": 1036, + "bri": 18636, + "bria": 35890, + "brian": 9824, + "brian": 4989, + "brianna": 32308, + "briar": 46119, + "bribe": 40042, + "bribery": 41792, + "bric": 27055, + "brice": 40190, + "brick": 13937, + "brick": 9518, + "bricks": 21029, + "brics": 48196, + "brid": 16995, + "bridal": 36875, + "bridal": 14284, + "bride": 18342, + "bride": 8964, + "brides": 18067, + "bridesma": 28356, + "bridesmaid": 43399, + "bridesmaids": 47754, + "bridg": 20623, + "bridge": 8647, + "bridge": 2465, + "bridgeport": 45201, + "bridges": 11811, + "bridget": 27073, + "bridgewater": 38732, + "bridging": 38109, + "brie": 26622, + "brief": 9435, + "brief": 8954, + "briefed": 47326, + "briefing": 12991, + "briefly": 26980, + "briefs": 29557, + "brien": 13504, + "brier": 43995, + "brig": 11081, + "briga": 46448, + "brigade": 16032, + "briggs": 28108, + "brigh": 6710, + "bright": 10383, + "bright": 4852, + "brighten": 18208, + "brightening": 43929, + "brighter": 18507, + "brightest": 26159, + "brightly": 36298, + "brightness": 42280, + "brighton": 28416, + "brighton": 9470, + "brigitte": 44421, + "brill": 27342, + "brill": 28601, + "brilli": 3821, + "brilliance": 28146, + "brilliant": 4106, + "brilliantly": 26803, + "brin": 25620, + "bring": 11596, + "bring": 2430, + "bringback": 28969, + "bringbackour": 45403, + "bringing": 4777, + "brings": 5138, + "brink": 39296, + "brink": 28796, + "brioche": 45818, + "bris": 9385, + "bris": 15783, + "brisban": 30431, + "brisbane": 42932, + "brisbane": 12407, + "brisk": 43646, + "brisket": 31920, + "bristol": 18159, + "bristol": 8010, + "brit": 2318, + "brit": 20066, + "britain": 40802, + "britain": 6272, + "britanni": 31373, + "britannia": 36188, + "brite": 33827, + "briti": 8155, + "british": 8651, + "british": 3504, + "britishmuseum": 41858, + "britney": 37192, + "britney": 21853, + "britneyspears": 42990, + "brits": 21832, + "britt": 10811, + "britt": 25976, + "brittany": 38187, + "brittany": 18818, + "britton": 37422, + "brium": 46079, + "brixton": 30056, + "bro": 927, + "bro": 4410, + "broad": 3491, + "broad": 12623, + "broadband": 21050, + "broadcast": 8967, + "broadcaster": 29005, + "broadcasting": 14403, + "broadcasts": 46742, + "broader": 36029, + "broadway": 34599, + "broadway": 9092, + "broc": 15587, + "broccoli": 19094, + "broch": 21419, + "brochure": 25275, + "brock": 14841, + "brock": 16745, + "brodie": 42150, + "brody": 29608, + "broke": 42165, + "broke": 6509, + "broken": 26126, + "broken": 5107, + "broker": 34032, + "broker": 20449, + "brokerage": 41327, + "brokers": 28271, + "brom": 18972, + "brom": 33296, + "bromance": 35353, + "bromley": 35715, + "bron": 4011, + "bron": 10243, + "bronco": 43488, + "bronco": 34370, + "broncos": 12516, + "bronson": 37042, + "bronte": 48936, + "bronx": 48310, + "bronx": 17183, + "brony": 21084, + "bronze": 8459, + "broo": 5204, + "brooch": 21207, + "brook": 4782, + "brook": 7322, + "brooke": 28576, + "brooke": 12549, + "brookes": 39707, + "brooklyn": 23253, + "brooklyn": 6983, + "brooks": 42779, + "brooks": 9991, + "broom": 32046, + "broom": 28008, + "broome": 49335, + "bros": 7776, + "broth": 29994, + "brotha": 33974, + "brother": 12697, + "brother": 3157, + "brotherhood": 19059, + "brothers": 4548, + "brou": 27874, + "brough": 21033, + "brought": 4222, + "brov": 42881, + "brow": 6547, + "brow": 15895, + "broward": 34719, + "brown": 6315, + "brown": 2866, + "browne": 28440, + "brownie": 23045, + "brownies": 22312, + "browning": 32241, + "browns": 14051, + "brows": 14998, + "browse": 19060, + "browser": 19768, + "browsing": 29318, + "brox": 43539, + "brs": 47485, + "brt": 46936, + "bru": 1698, + "bru": 31028, + "bruce": 21223, + "bruce": 7085, + "bruh": 17575, + "bruins": 14736, + "bruise": 48048, + "bruised": 46502, + "brum": 23862, + "brum": 28078, + "brun": 6870, + "brunch": 9113, + "brune": 29057, + "brunei": 41898, + "brunette": 35528, + "bruno": 14568, + "brunomars": 41156, + "brunswick": 24012, + "brush": 27969, + "brush": 8594, + "brushed": 30298, + "brushes": 21550, + "brushing": 35072, + "brussels": 11020, + "brut": 39499, + "brutal": 42144, + "brutal": 14556, + "brutality": 31348, + "brutally": 28132, + "brute": 47552, + "brux": 49093, + "bry": 6587, + "bry": 28228, + "bryan": 16134, + "bryan": 10412, + "bryant": 12256, + "bryce": 19895, + "bryn": 36569, + "bryn": 42877, + "bryson": 38990, + "bs": 11783, + "bs": 1329, + "bsa": 46619, + "bsb": 23070, + "bsbi": 41728, + "bsbibotany": 42086, + "bsc": 32031, + "bsd": 41848, + "bse": 46341, + "bsf": 48314, + "bsgo": 48474, + "bsp": 47977, + "bst": 19698, + "bsu": 46385, + "bt": 3317, + "bt": 4205, + "btc": 10315, + "btcc": 30759, + "btn": 44681, + "bto": 35516, + "btob": 29379, + "btr": 39767, + "bts": 15154, + "bts": 4007, + "btsarmy": 30302, + "btsbbmas": 35297, + "btsx": 44971, + "btv": 38541, + "btw": 9520, + "btwn": 28284, + "bu": 609, + "bu": 5831, + "bub": 27704, + "bub": 33158, + "bubb": 9739, + "bubba": 28149, + "bubble": 28687, + "bubble": 10799, + "bubblegum": 48078, + "bubbles": 17648, + "bubbly": 31034, + "buc": 8207, + "buccane": 32830, + "buccaneers": 38058, + "buch": 22623, + "bucha": 43582, + "buchan": 27237, + "buchanan": 28975, + "bucharest": 37013, + "buck": 6061, + "buck": 11433, + "bucket": 22596, + "bucket": 10498, + "bucketlist": 30778, + "buckets": 27168, + "buckeye": 34549, + "buckeyes": 30741, + "buckingham": 28736, + "buckle": 21948, + "buckley": 25905, + "bucks": 6103, + "bucky": 35916, + "bucs": 20011, + "bud": 2942, + "bud": 10737, + "buda": 18520, + "buda": 49012, + "budapest": 19202, + "budd": 7296, + "buddha": 13981, + "buddhism": 23744, + "buddhist": 18697, + "buddies": 14543, + "budding": 31992, + "buddy": 40948, + "buddy": 6557, + "budge": 32005, + "budget": 46758, + "budget": 5639, + "budgeting": 43789, + "budgets": 36419, + "buds": 14665, + "budweiser": 40900, + "buen": 15640, + "buena": 30876, + "buenas": 48529, + "bueno": 46202, + "buenos": 26055, + "buf": 44417, + "buff": 5456, + "buff": 21416, + "buffal": 25836, + "buffalo": 31231, + "buffalo": 8054, + "buffalob": 38831, + "buffalobills": 44352, + "buffe": 13724, + "buffer": 33050, + "buffet": 17829, + "buffett": 34081, + "buffs": 28906, + "buffy": 33356, + "bug": 14453, + "bug": 8162, + "bugatti": 35451, + "buggy": 28963, + "bugs": 13850, + "buh": 31406, + "buhari": 14661, + "buick": 22000, + "buil": 1354, + "build": 22739, + "build": 3289, + "builder": 14474, + "builders": 17694, + "building": 21206, + "building": 2307, + "buildings": 8866, + "builds": 16449, + "buildthe": 41497, + "built": 45824, + "built": 3874, + "buk": 28084, + "buk": 24317, + "buka": 47778, + "bukit": 39888, + "bul": 2572, + "bul": 10200, + "bula": 18726, + "bulaga": 41575, + "bular": 32187, + "bulb": 22373, + "bulbs": 24808, + "bulgar": 15424, + "bulgaria": 20295, + "bulgarian": 38693, + "bulge": 47603, + "bulk": 19643, + "bull": 4537, + "bull": 6029, + "bulldo": 37675, + "bulldog": 34828, + "bulldog": 15611, + "bulldogs": 13916, + "bullet": 14340, + "bullet": 12465, + "bulletin": 19638, + "bulletproof": 43212, + "bullets": 22117, + "bullied": 34689, + "bullies": 39050, + "bullion": 49114, + "bullish": 22142, + "bullock": 33198, + "bullpen": 38081, + "bulls": 10313, + "bully": 43111, + "bully": 20190, + "bullying": 13548, + "bum": 27683, + "bum": 14226, + "bumble": 25585, + "bumble": 39303, + "bumblebee": 36911, + "bummed": 48456, + "bump": 9783, + "bump": 15877, + "bumped": 22495, + "bumper": 17881, + "bumping": 40196, + "bumps": 21115, + "bun": 2591, + "bun": 13665, + "bunch": 7796, + "bund": 41905, + "bunde": 18841, + "bundesliga": 21582, + "bundle": 11793, + "bundled": 47228, + "bundles": 29834, + "bundy": 37332, + "bung": 44748, + "bungal": 29549, + "bungalow": 33696, + "bunk": 41236, + "bunker": 23615, + "bunnies": 28998, + "bunny": 34198, + "bunny": 9258, + "buns": 22235, + "bunting": 30695, + "buon": 31350, + "buon": 48498, + "bur": 1039, + "bur": 17362, + "burbank": 34862, + "burberry": 30412, + "burch": 44588, + "burden": 18687, + "bure": 11902, + "bureau": 32098, + "bureau": 15400, + "burg": 19505, + "burg": 3499, + "burge": 20522, + "burger": 22356, + "burger": 6548, + "burgers": 13007, + "burgess": 26211, + "burgh": 18141, + "burgh": 4965, + "burgl": 25554, + "burglar": 43365, + "burglary": 32573, + "burgring": 40823, + "burgundy": 23650, + "buri": 46348, + "buri": 42614, + "burial": 22012, + "buried": 14233, + "burk": 48822, + "burke": 15340, + "burle": 27891, + "burlesque": 33732, + "burlington": 23370, + "burma": 30305, + "burmese": 47906, + "burn": 7934, + "burn": 4285, + "burnaby": 47541, + "burne": 27246, + "burned": 15022, + "burner": 23243, + "burnett": 28558, + "burnham": 36111, + "burning": 46107, + "burning": 8405, + "burnley": 24653, + "burnout": 36078, + "burns": 10234, + "burnt": 15185, + "burr": 30879, + "burrell": 49045, + "burrito": 23473, + "burritos": 47245, + "burroughs": 41337, + "burrows": 44846, + "burst": 13005, + "bursting": 32566, + "bursts": 37026, + "burt": 27162, + "burton": 42354, + "burton": 12704, + "burundi": 33595, + "bury": 12276, + "bury": 3899, + "burys": 32362, + "bus": 1319, + "bus": 2840, + "busan": 40172, + "busc": 35000, + "busch": 20475, + "buses": 12879, + "bush": 11191, + "bush": 6867, + "bushes": 37578, + "busiest": 32764, + "busine": 4598, + "busines": 25364, + "business": 8346, + "business": 1716, + "businesses": 7287, + "businessman": 25635, + "buss": 47764, + "bust": 31299, + "bust": 9959, + "busted": 18643, + "buster": 37219, + "buster": 12094, + "busters": 16362, + "busting": 29622, + "busy": 39332, + "busy": 4354, + "but": 2201, + "but": 767, + "butch": 35102, + "butcher": 18732, + "butchers": 42334, + "bute": 39240, + "butes": 14630, + "butler": 35867, + "butler": 10702, + "butt": 12500, + "butt": 31523, + "butte": 31678, + "butter": 5427, + "butter": 6952, + "butterflies": 16232, + "butterfly": 9738, + "buttermilk": 40180, + "butternut": 36867, + "buttery": 45535, + "button": 45480, + "button": 8007, + "buttons": 16188, + "butts": 25309, + "buu": 42313, + "buuren": 47752, + "buxton": 41370, + "buy": 11632, + "buy": 2131, + "buyer": 14682, + "buyers": 14663, + "buying": 6566, + "buys": 15560, + "buzz": 7866, + "buzz": 8706, + "buzzard": 47434, + "buzzer": 38064, + "buzzfeed": 26613, + "buzzing": 18511, + "bv": 18958, + "bv": 35861, + "bvb": 22454, + "bw": 17672, + "bw": 15120, + "bway": 26652, + "bwfc": 40918, + "bwo": 45902, + "bx": 33633, + "by": 1713, + "by": 638, + "bye": 20076, + "bye": 4460, + "byes": 47958, + "byl": 34994, + "byn": 46917, + "byn": 11890, + "byo": 28039, + "bypass": 26530, + "byr": 15534, + "byrd": 30369, + "byrne": 19676, + "byron": 43504, + "byron": 19775, + "bys": 26740, + "bystand": 46138, + "byte": 42798, + "bytes": 39538, + "bythe": 36621, + "byu": 41072, + "byu": 23770, + "byz": 35406, + "byzantine": 44081, + "bz": 13631, + "bé": 40365, + "bü": 38706, + "c": 66, + "c": 322, + "ca": 772, + "ca": 1684, + "caa": 19316, + "cab": 3033, + "cab": 11912, + "cabaret": 26263, + "cabbage": 18407, + "cabe": 32731, + "cabello": 34371, + "caber": 29062, + "cabernet": 33730, + "cabin": 14178, + "cabine": 23354, + "cabinet": 9937, + "cabinets": 33083, + "cabins": 48455, + "cable": 7925, + "cables": 22408, + "cabo": 37318, + "cabo": 28370, + "cabrera": 42338, + "cabs": 42048, + "cac": 8298, + "cac": 23872, + "cacao": 38022, + "cache": 28993, + "caching": 40655, + "cactus": 19794, + "cad": 6297, + "cad": 20166, + "caday": 34187, + "cadbury": 44698, + "caddy": 41521, + "cade": 10497, + "cade": 17306, + "cadet": 22764, + "cadets": 19160, + "cadillac": 18156, + "cae": 49264, + "caer": 28298, + "caes": 15740, + "caesar": 21642, + "caesars": 42162, + "caf": 3471, + "caf": 20867, + "cafc": 30748, + "cafe": 15201, + "cafe": 4979, + "cafes": 40166, + "cafeteria": 32817, + "caffe": 18258, + "caffe": 45416, + "caffeine": 22487, + "café": 15304, + "cag": 15714, + "cage": 11838, + "cages": 37939, + "cah": 40519, + "cahill": 33185, + "cai": 38971, + "cai": 36116, + "cain": 13747, + "caine": 16799, + "cair": 15804, + "cair": 46659, + "cairn": 31264, + "cairn": 42467, + "cairngor": 44067, + "cairns": 32941, + "cairo": 19615, + "cait": 14116, + "caitlin": 47768, + "caitlin": 26809, + "caitlyn": 35763, + "cajun": 43425, + "cajun": 33044, + "cak": 42986, + "cake": 15295, + "cake": 2972, + "cakeday": 46207, + "cakes": 5950, + "cal": 1198, + "cal": 6372, + "cala": 32133, + "calab": 31795, + "calais": 39886, + "calam": 28841, + "calc": 45055, + "calci": 22824, + "calcium": 27815, + "calcu": 15328, + "calcul": 15734, + "calculate": 37656, + "calculated": 40688, + "calculations": 44605, + "calculator": 26093, + "calculus": 35104, + "calcutta": 42901, + "calder": 29372, + "calder": 36817, + "caldwell": 30484, + "cale": 32674, + "caleb": 19619, + "caled": 28421, + "calend": 6057, + "calendar": 7122, + "calendars": 17229, + "calf": 17508, + "calgary": 27415, + "calgary": 10797, + "calhoun": 38929, + "cali": 2857, + "cali": 16337, + "caliber": 32820, + "calibr": 32597, + "calico": 45379, + "calif": 30839, + "califor": 3526, + "californi": 21303, + "california": 3729, + "call": 7950, + "call": 1620, + "calla": 20658, + "callahan": 43313, + "callaway": 42596, + "callback": 44764, + "calle": 47699, + "calle": 38144, + "called": 2726, + "caller": 30666, + "calli": 16338, + "callie": 36512, + "calligraphy": 27775, + "calling": 4597, + "callister": 49026, + "callme": 42449, + "callof": 41280, + "calls": 4572, + "callum": 23224, + "calm": 34990, + "calm": 7011, + "calming": 30690, + "calorie": 32679, + "calories": 18029, + "cals": 47714, + "calum": 16405, + "calvary": 40169, + "calvert": 47134, + "calves": 31857, + "calvin": 27642, + "calvin": 17345, + "caly": 10244, + "calyp": 29851, + "cam": 1004, + "cam": 5982, + "camar": 31991, + "camber": 44362, + "cambo": 14662, + "cambodia": 17347, + "cambridge": 24651, + "cambridge": 9334, + "cambridgeshire": 46139, + "camden": 38735, + "camden": 17984, + "came": 1986, + "camel": 27005, + "camel": 21914, + "camels": 41357, + "cameo": 19492, + "camer": 4961, + "camera": 3934, + "cameraman": 43347, + "cameras": 12172, + "camero": 20320, + "cameron": 19634, + "cameron": 8057, + "camerondallas": 40587, + "cameroon": 24061, + "camil": 37745, + "camila": 19919, + "camilla": 38897, + "camille": 26741, + "camino": 28529, + "camo": 28702, + "camo": 19716, + "camogie": 39547, + "camou": 23588, + "camoufla": 23667, + "camouflage": 29049, + "camp": 2854, + "camp": 2877, + "campa": 2793, + "campaig": 9448, + "campaign": 44524, + "campaign": 3193, + "campaigner": 46364, + "campaigners": 40272, + "campaigning": 19594, + "campaigns": 15669, + "campan": 31765, + "campbell": 29094, + "campbell": 8806, + "campe": 16672, + "campeon": 49109, + "campeones": 30105, + "camper": 41914, + "camper": 24522, + "campers": 26619, + "campfire": 32530, + "campground": 46969, + "camping": 9982, + "campo": 27600, + "campos": 48077, + "camps": 12806, + "campsite": 44243, + "campu": 19687, + "campus": 4560, + "campuses": 31895, + "camra": 46155, + "camry": 46472, + "cams": 32590, + "can": 950, + "can": 753, + "cana": 28341, + "canad": 13193, + "canada": 2698, + "canadaday": 39800, + "canadi": 4329, + "canadian": 22160, + "canadian": 5255, + "canadians": 18989, + "canadiens": 40932, + "canal": 28585, + "canal": 9535, + "canals": 38483, + "canaria": 47117, + "canary": 40409, + "canary": 24523, + "canberra": 16719, + "canc": 43189, + "cancel": 12026, + "cancel": 21546, + "canceled": 25874, + "cancell": 28027, + "cancellation": 38765, + "cancelled": 13270, + "cancels": 34089, + "cancer": 12690, + "cancer": 3148, + "cancers": 33201, + "cancun": 34721, + "cand": 4986, + "candace": 45623, + "candel": 47834, + "candi": 6034, + "candice": 30024, + "candid": 7884, + "candid": 19206, + "candidacy": 46248, + "candidate": 6475, + "candidates": 8619, + "candied": 43982, + "candies": 46305, + "candle": 18995, + "candle": 12674, + "candlelight": 34724, + "candles": 15472, + "candy": 20741, + "candy": 6417, + "cane": 23644, + "cane": 14716, + "canelo": 43210, + "canes": 21902, + "cani": 35592, + "canine": 27380, + "cann": 4139, + "cann": 23709, + "cannab": 7577, + "cannabis": 31837, + "cannabis": 8861, + "canne": 44252, + "canned": 27290, + "cannes": 13773, + "canni": 26389, + "canning": 38621, + "cannon": 28771, + "cannon": 15661, + "cannons": 46269, + "cannot": 4785, + "canny": 26986, + "cano": 31668, + "cano": 25937, + "canoe": 23503, + "canola": 40389, + "canon": 17749, + "canon": 9310, + "canopy": 26061, + "cans": 13707, + "cant": 13395, + "cant": 5784, + "canteen": 39230, + "canter": 19301, + "canterbury": 22271, + "canti": 42845, + "cantina": 47472, + "canton": 37735, + "canton": 25363, + "cantore": 41769, + "cantwait": 33760, + "canu": 20171, + "canucks": 24321, + "canv": 30714, + "canvas": 22441, + "canvas": 7483, + "canvass": 40054, + "canvassing": 33783, + "cany": 47674, + "canyon": 41246, + "canyon": 9755, + "cao": 29207, + "cap": 1289, + "cap": 3938, + "capabilities": 19512, + "capability": 25885, + "capable": 14742, + "capac": 24665, + "capacity": 8970, + "capcom": 28342, + "cape": 10288, + "cape": 6631, + "capecod": 41339, + "capes": 38785, + "capetown": 20059, + "capit": 6889, + "capita": 41833, + "capital": 11198, + "capital": 5439, + "capitalism": 20068, + "capitalist": 37015, + "capitals": 29579, + "capitol": 43880, + "capitol": 11375, + "capo": 45477, + "capp": 16718, + "capped": 24659, + "capping": 42656, + "cappuccino": 37402, + "capri": 48699, + "capri": 30982, + "capric": 28667, + "capricorn": 46314, + "caps": 23185, + "capsu": 15608, + "capsul": 40341, + "capsule": 20627, + "capsules": 32870, + "capt": 45815, + "capt": 17369, + "captain": 14958, + "captain": 4621, + "captainamerica": 46229, + "captainmarvel": 48492, + "captains": 18706, + "caption": 11327, + "captions": 41878, + "captiv": 19776, + "captivating": 30580, + "captive": 29038, + "captivity": 41141, + "capture": 8818, + "captured": 8020, + "captures": 15305, + "capturing": 19548, + "capu": 44241, + "car": 811, + "car": 1615, + "cara": 20016, + "carab": 32251, + "carac": 30029, + "caracas": 45854, + "caramel": 14788, + "carameli": 41739, + "caramelized": 43854, + "carat": 32981, + "carav": 13814, + "caravan": 18566, + "carb": 21379, + "carbo": 43235, + "carbon": 14038, + "carbon": 7549, + "carbs": 29313, + "carcin": 31587, + "carcinoma": 46810, + "card": 10793, + "card": 2601, + "cardam": 49008, + "cardboard": 19845, + "cardi": 6211, + "cardi": 29677, + "cardiac": 21256, + "cardiff": 22488, + "cardiff": 9781, + "cardigan": 30501, + "cardin": 8457, + "cardinal": 46310, + "cardinal": 16472, + "cardinals": 12837, + "cardio": 15003, + "cardio": 23455, + "cardiology": 37276, + "cardiovascular": 29291, + "cardo": 40625, + "cards": 4094, + "care": 2050, + "care": 1776, + "cared": 27675, + "career": 20609, + "career": 3061, + "careers": 10090, + "careful": 11999, + "carefully": 15789, + "caregi": 22042, + "caregiver": 46372, + "caregivers": 35909, + "careless": 47325, + "carers": 26484, + "cares": 10968, + "caretaker": 48037, + "carey": 14895, + "cargo": 12490, + "cari": 18497, + "cari": 37273, + "carib": 9757, + "caribbean": 10368, + "caribou": 42135, + "caric": 25337, + "caricature": 38857, + "carina": 44357, + "caring": 13083, + "carl": 8273, + "carl": 9482, + "carla": 25552, + "carleton": 46496, + "carlin": 47559, + "carlisle": 23276, + "carlo": 17861, + "carlo": 15266, + "carlos": 9538, + "carlow": 44745, + "carls": 39635, + "carlson": 24114, + "carlton": 18934, + "carly": 23166, + "carly": 22689, + "carlyle": 46555, + "carmel": 30757, + "carmel": 25601, + "carmen": 41427, + "carmen": 18834, + "carmichael": 41657, + "carn": 21597, + "carnage": 31385, + "carnation": 44577, + "carnaval": 47238, + "carne": 17053, + "carne": 42885, + "carnegie": 25287, + "carney": 34194, + "carni": 8438, + "carnival": 36708, + "carnival": 10577, + "caro": 30317, + "caro": 29344, + "carol": 4242, + "carol": 11489, + "carole": 31955, + "carolin": 26418, + "carolina": 7027, + "caroline": 31064, + "caroline": 12641, + "carols": 33269, + "carolyn": 25825, + "carou": 32224, + "carousel": 36665, + "carp": 26085, + "carpen": 15584, + "carpenter": 18475, + "carpet": 6922, + "carpets": 34612, + "carr": 26951, + "carr": 17136, + "carra": 32332, + "carre": 31114, + "carrera": 32952, + "carri": 4739, + "carriage": 47885, + "carriage": 21087, + "carrick": 44052, + "carrie": 30334, + "carrie": 15848, + "carried": 12960, + "carrier": 12308, + "carriers": 26865, + "carries": 17982, + "carrieunderwood": 47338, + "carrington": 48759, + "carroll": 41911, + "carroll": 14893, + "carrot": 15435, + "carrots": 19299, + "carry": 31863, + "carry": 6998, + "carrying": 9920, + "cars": 3346, + "carsforsale": 45222, + "carson": 41766, + "carson": 13171, + "cart": 27705, + "cart": 13065, + "cartag": 45042, + "cartagena": 47157, + "carte": 44949, + "cartel": 30529, + "carter": 27330, + "carter": 7260, + "cartier": 32951, + "carto": 5487, + "carton": 41812, + "cartoon": 33082, + "cartoon": 7651, + "cartoonist": 30793, + "cartoons": 17673, + "cartri": 47084, + "cartridge": 29432, + "cartridges": 49249, + "carts": 27581, + "cartunesapp": 32888, + "caruso": 45192, + "carve": 40152, + "carved": 15127, + "carver": 28850, + "carving": 19428, + "carvings": 48123, + "cary": 22844, + "cas": 1671, + "cas": 13831, + "casa": 14643, + "casablanc": 36572, + "casablanca": 41950, + "casc": 36714, + "casca": 43296, + "cascade": 29065, + "cascades": 46454, + "case": 17698, + "case": 2068, + "cases": 6888, + "casey": 24899, + "casey": 12836, + "cash": 11050, + "cash": 5131, + "cashback": 36368, + "cashe": 32233, + "cashew": 39531, + "cashi": 29517, + "cashier": 34547, + "cashmere": 34566, + "casi": 38350, + "casino": 10473, + "casio": 32261, + "cask": 26299, + "casm": 35198, + "casper": 35892, + "cass": 22556, + "cassandra": 35289, + "casser": 31093, + "casserole": 36045, + "cassette": 19717, + "cassi": 14942, + "cassidy": 21757, + "cassie": 29323, + "cassini": 46554, + "cast": 2509, + "cast": 1970, + "caste": 32693, + "casted": 33838, + "castel": 43306, + "castell": 31792, + "caster": 32101, + "caster": 8449, + "casters": 29721, + "castic": 47737, + "castillo": 30813, + "casting": 7087, + "castle": 12496, + "castle": 3540, + "castles": 24766, + "castro": 16950, + "casts": 10595, + "casu": 15345, + "casual": 10129, + "casually": 18840, + "casualties": 30244, + "casualty": 31222, + "cat": 1481, + "cat": 2368, + "cata": 42279, + "catal": 12792, + "catalan": 30532, + "catalina": 36576, + "catalo": 34740, + "catalog": 20036, + "catalogue": 20985, + "catalonia": 27039, + "catalunya": 44132, + "cataly": 15894, + "catalyst": 25387, + "catan": 45893, + "catap": 39514, + "catar": 35801, + "catastro": 22736, + "catastrophe": 41422, + "catastrophic": 34448, + "catch": 18901, + "catch": 3042, + "catcher": 15965, + "catchers": 39060, + "catches": 17213, + "catching": 8617, + "catchy": 37114, + "catday": 32243, + "cate": 6357, + "cate": 24510, + "cated": 31823, + "categor": 17006, + "categori": 40117, + "categories": 19971, + "category": 9432, + "cater": 16634, + "cater": 38101, + "catering": 16697, + "caterpillar": 27111, + "catfish": 26077, + "cath": 9196, + "cath": 30811, + "cathar": 43784, + "cathe": 7174, + "cathedr": 46370, + "cathedral": 7865, + "catherine": 35035, + "catherine": 12339, + "catho": 7595, + "cathol": 16315, + "catholic": 20382, + "catholic": 7757, + "catholics": 36808, + "cathy": 40326, + "cathy": 22731, + "cation": 21367, + "cato": 33558, + "cats": 38800, + "cats": 3989, + "catsofinstagram": 39901, + "catsoftwitter": 17273, + "catt": 37339, + "cattle": 48799, + "cattle": 13644, + "caturday": 20892, + "catwalk": 36565, + "catwoman": 47251, + "cau": 1121, + "cau": 45529, + "caucus": 18847, + "caught": 4520, + "caul": 23460, + "cauley": 41682, + "caulfield": 44906, + "cauli": 20123, + "cauliflower": 23802, + "cause": 18982, + "cause": 1394, + "caused": 8940, + "causes": 9775, + "causeway": 35034, + "causing": 10779, + "caution": 15656, + "cautious": 36579, + "cav": 4942, + "cav": 45935, + "cava": 48682, + "caval": 24537, + "cavali": 20783, + "cavalier": 44488, + "cavaliers": 30194, + "cavalry": 32467, + "cave": 25441, + "cave": 9654, + "cavendish": 42945, + "caver": 41487, + "caves": 22096, + "cavi": 27360, + "caviar": 31228, + "cavill": 40492, + "cavity": 43156, + "cavs": 16800, + "caw": 38405, + "caw": 43804, + "cawx": 26739, + "cay": 11876, + "cay": 37399, + "cayenne": 43650, + "cayman": 33737, + "caz": 48451, + "cb": 4034, + "cb": 8830, + "cba": 38472, + "cbb": 31487, + "cbc": 14096, + "cbc": 14523, + "cbd": 13176, + "cbe": 43639, + "cbi": 30875, + "cbj": 35608, + "cbn": 26579, + "cbp": 46723, + "cbr": 28762, + "cbs": 16788, + "cbs": 8009, + "cc": 2976, + "cc": 2021, + "cca": 17987, + "ccc": 21856, + "ccd": 48556, + "ccg": 37755, + "cch": 21789, + "cchini": 28467, + "cci": 32942, + "cci": 8196, + "ccl": 43773, + "ccm": 40435, + "cco": 28786, + "ccot": 24950, + "ccp": 43045, + "ccs": 30400, + "cctv": 23097, + "ccu": 49023, + "cd": 4308, + "cd": 4480, + "cda": 45565, + "cdc": 41098, + "cdc": 25779, + "cdn": 8886, + "cdn": 26802, + "cdnpoli": 11645, + "cdo": 47187, + "cdp": 39624, + "cds": 20784, + "cdt": 18455, + "ce": 685, + "ce": 629, + "cea": 28355, + "cean": 34409, + "cean": 37295, + "cease": 32856, + "cease": 25499, + "ceasefire": 38291, + "cebu": 20146, + "cec": 29694, + "cec": 40029, + "cecil": 26987, + "cecil": 27169, + "cecilia": 35440, + "ced": 25634, + "ced": 2323, + "cedar": 24167, + "cedar": 13799, + "cedric": 36608, + "cee": 45966, + "cee": 15015, + "cees": 47914, + "ceil": 27275, + "ceiling": 12374, + "ceilings": 33770, + "cek": 45544, + "cel": 2269, + "cel": 7597, + "cele": 1314, + "celeb": 38862, + "celeb": 19393, + "celebr": 1372, + "celebrate": 31414, + "celebrate": 2694, + "celebrated": 9184, + "celebrates": 7564, + "celebrating": 3382, + "celebration": 4615, + "celebrations": 10825, + "celebratory": 34115, + "celebrities": 17071, + "celebrity": 23981, + "celebrity": 7320, + "celebs": 19803, + "celed": 25741, + "celer": 9621, + "celery": 30990, + "celeste": 29364, + "celesti": 29497, + "celestial": 32669, + "celi": 25567, + "celia": 44489, + "celine": 33644, + "cell": 9316, + "cell": 5533, + "cellar": 24282, + "cellars": 44976, + "cellence": 34687, + "cello": 23013, + "cellphone": 39029, + "cells": 8890, + "cellu": 16791, + "cellular": 23268, + "cels": 24021, + "celsius": 47057, + "celtic": 21897, + "celtic": 10523, + "celticfc": 38612, + "celtics": 16226, + "cem": 41435, + "ceme": 10517, + "cement": 4369, + "cements": 19448, + "cemetery": 11660, + "cen": 1306, + "cen": 30106, + "cena": 21591, + "cence": 24410, + "cency": 41259, + "cene": 30038, + "censor": 24230, + "censor": 44709, + "censored": 30951, + "censorship": 27284, + "census": 23677, + "cent": 1784, + "cent": 3662, + "centenary": 22422, + "centennial": 20895, + "center": 16651, + "center": 2119, + "centered": 24584, + "centers": 14494, + "centi": 48889, + "centime": 48687, + "centr": 2370, + "central": 13448, + "central": 3339, + "centre": 26310, + "centre": 2916, + "centred": 47925, + "centres": 19354, + "centri": 30872, + "centric": 19297, + "centro": 37178, + "cents": 11934, + "centu": 16818, + "centuri": 36816, + "centuries": 19014, + "century": 26134, + "century": 4275, + "ceo": 46340, + "ceo": 3559, + "ceos": 28332, + "cep": 2632, + "cep": 48714, + "ceph": 44343, + "cept": 3678, + "ception": 12346, + "cer": 1364, + "cer": 1925, + "cera": 34608, + "ceram": 10677, + "ceramic": 15112, + "ceramics": 22438, + "cere": 3984, + "cere": 22085, + "cereal": 17581, + "cereals": 48618, + "cerebral": 39073, + "ceremon": 15796, + "ceremonial": 33281, + "ceremonies": 21547, + "ceremony": 5193, + "cern": 44851, + "cers": 13638, + "cert": 27522, + "certain": 8526, + "certain": 7883, + "certainly": 10883, + "certainty": 20054, + "certi": 4888, + "certific": 9443, + "certificate": 11786, + "certificates": 25281, + "certification": 14735, + "certified": 9288, + "cerv": 25738, + "cervical": 35953, + "ces": 28715, + "ces": 1604, + "cesar": 37025, + "cesar": 28603, + "cess": 2314, + "cess": 1554, + "cessna": 36596, + "cest": 27245, + "cester": 15769, + "cester": 12718, + "cet": 14960, + "cett": 46708, + "ceu": 37457, + "cevic": 48369, + "cey": 20971, + "cf": 10189, + "cf": 11171, + "cfa": 34521, + "cfb": 32931, + "cfc": 11577, + "cfd": 46171, + "cfl": 46320, + "cfl": 22332, + "cfo": 26937, + "cfp": 40756, + "cfr": 44033, + "cfs": 32835, + "cg": 27118, + "cg": 14740, + "cgc": 38775, + "cgi": 30520, + "ch": 540, + "ch": 634, + "cha": 1587, + "cha": 4541, + "chab": 26670, + "chad": 13095, + "chad": 12923, + "chae": 9460, + "chaf": 38123, + "chag": 27989, + "chai": 31590, + "chai": 18919, + "chain": 13898, + "chain": 3946, + "chained": 34402, + "chains": 14438, + "chainsaw": 37617, + "chainz": 39687, + "chair": 4728, + "chair": 4269, + "chaired": 31664, + "chairing": 42205, + "chairman": 6901, + "chairperson": 31584, + "chairs": 12033, + "chak": 13702, + "chak": 41713, + "chakra": 38304, + "chakra": 33241, + "chal": 7397, + "chal": 30809, + "chale": 38099, + "chalet": 37907, + "chalk": 31362, + "chalk": 17846, + "chall": 2073, + "challeng": 4138, + "challenge": 29462, + "challenge": 2836, + "challenged": 17380, + "challenger": 18228, + "challengers": 46404, + "challenges": 6280, + "challenging": 11754, + "chalmers": 47955, + "cham": 1290, + "cham": 19951, + "chamber": 18983, + "chamber": 7642, + "chamberlain": 32756, + "chambers": 16501, + "chamele": 34759, + "chameleon": 41317, + "champ": 36813, + "champ": 6602, + "champag": 10283, + "champagne": 11007, + "champi": 1680, + "champion": 2643, + "champion": 3950, + "champions": 4227, + "championship": 3429, + "championships": 7047, + "championsleague": 27638, + "champs": 6240, + "chan": 1255, + "chan": 6704, + "chana": 48752, + "chanc": 13931, + "chance": 32940, + "chance": 2594, + "chancellor": 15886, + "chances": 10870, + "chand": 7126, + "chand": 41508, + "chandelier": 30570, + "chandi": 12482, + "chandigarh": 34106, + "chandler": 17595, + "chandra": 27082, + "chandra": 25348, + "chanel": 16951, + "chang": 2233, + "chang": 16461, + "change": 11608, + "change": 1799, + "changeable": 41335, + "changed": 4907, + "changer": 18406, + "changers": 35185, + "changes": 4938, + "changing": 40384, + "changing": 5621, + "changmin": 47410, + "chann": 8804, + "channel": 25837, + "channel": 3847, + "channeling": 28197, + "channels": 13961, + "channing": 37417, + "chant": 18165, + "chant": 13521, + "chanting": 32111, + "chants": 22723, + "chanyeol": 18805, + "chao": 31815, + "chaos": 10853, + "chaotic": 33501, + "chap": 3825, + "chap": 21939, + "chapel": 40859, + "chapel": 10137, + "chaplain": 38348, + "chaplin": 32545, + "chapman": 17968, + "chapp": 20634, + "chaps": 36823, + "chapter": 6014, + "chapters": 22936, + "char": 1054, + "char": 16017, + "chara": 35668, + "charac": 2792, + "character": 10997, + "character": 4009, + "characterdesign": 38149, + "characteri": 20920, + "characteristic": 44747, + "characteristics": 26037, + "characters": 6564, + "charan": 31851, + "charcoal": 19268, + "chard": 17524, + "chardon": 26599, + "chardonnay": 28161, + "charge": 25032, + "charge": 5948, + "chargeable": 35664, + "charged": 7916, + "charger": 13090, + "chargers": 17352, + "charges": 8962, + "charging": 12514, + "chariot": 38811, + "charis": 24449, + "charisma": 45041, + "charismatic": 37205, + "charitable": 23256, + "charities": 18493, + "charity": 20008, + "charity": 4607, + "charitytuesday": 42794, + "charl": 47736, + "charle": 10217, + "charles": 27983, + "charles": 5127, + "charleston": 15478, + "charley": 38027, + "charli": 21784, + "charli": 49392, + "charlie": 16764, + "charlie": 6393, + "charlotte": 18445, + "charlotte": 7871, + "charlottesville": 32027, + "charlton": 27048, + "charm": 10876, + "charmed": 39790, + "charming": 12177, + "charms": 21944, + "charred": 44085, + "chart": 42685, + "chart": 5053, + "charted": 27939, + "charter": 42345, + "charter": 13569, + "chartered": 31298, + "charters": 46626, + "charting": 39841, + "charts": 10728, + "chas": 10717, + "chas": 29838, + "chase": 21503, + "chase": 3859, + "chased": 30342, + "chaser": 29560, + "chasers": 34158, + "chases": 45011, + "chasing": 46909, + "chasing": 13376, + "chassis": 29188, + "chast": 42176, + "chasu": 41352, + "chat": 5355, + "chat": 2402, + "chatbots": 43994, + "chate": 30377, + "chateau": 44582, + "chateau": 23520, + "chath": 46849, + "chatham": 32030, + "chats": 13263, + "chatt": 21618, + "chattanoo": 28009, + "chattanooga": 29866, + "chatted": 34124, + "chatter": 33473, + "chatter": 41103, + "chatting": 12401, + "chatur": 33839, + "chau": 11263, + "chau": 37536, + "chauffe": 45440, + "chauhan": 46663, + "chav": 28997, + "chavez": 27480, + "chaw": 39639, + "chay": 45317, + "chaz": 47815, + "chc": 36233, + "chd": 41645, + "che": 983, + "che": 3842, + "chea": 39580, + "chead": 48358, + "cheap": 27036, + "cheap": 8678, + "cheape": 26164, + "cheaper": 17776, + "cheapest": 26640, + "cheat": 18180, + "cheated": 34285, + "cheating": 19722, + "chec": 1113, + "check": 7672, + "check": 1217, + "checked": 10387, + "checker": 45883, + "checkers": 48181, + "checking": 7441, + "checklist": 26989, + "checkout": 13101, + "checkpoint": 27531, + "checks": 13737, + "ched": 11341, + "ched": 2146, + "cheddar": 20551, + "chee": 5326, + "chee": 20944, + "cheek": 40000, + "cheek": 21227, + "cheeks": 23019, + "cheeky": 15068, + "cheer": 9733, + "cheer": 6918, + "cheered": 38111, + "cheerful": 28882, + "cheering": 14289, + "cheerleader": 29072, + "cheerleaders": 22343, + "cheerleading": 36366, + "cheers": 6562, + "chees": 15182, + "cheese": 10738, + "cheese": 4108, + "cheeseburger": 41200, + "cheesecake": 17803, + "cheeses": 36076, + "cheesy": 22093, + "cheetah": 27431, + "chef": 12137, + "chef": 4895, + "chefs": 14486, + "chek": 43745, + "chel": 3084, + "chel": 25970, + "chell": 46854, + "chelle": 30141, + "chelms": 34936, + "chelmsford": 39890, + "chelse": 19071, + "chelsea": 6031, + "chelseafc": 25927, + "chelten": 18889, + "cheltenham": 21589, + "chem": 5667, + "chem": 13698, + "chemi": 7179, + "chemical": 39376, + "chemical": 9208, + "chemicals": 17426, + "chemist": 23138, + "chemistry": 8841, + "chemo": 33095, + "chemo": 36348, + "chemotherapy": 41412, + "chemtrails": 46015, + "chen": 5907, + "chen": 8983, + "cheney": 43522, + "cheng": 32512, + "cheng": 30190, + "chenko": 29073, + "chennai": 28948, + "chennai": 12791, + "cheon": 11498, + "cheque": 28168, + "cher": 3597, + "cher": 3466, + "cheri": 26471, + "cherish": 20053, + "cherished": 42325, + "cherno": 35376, + "chernobyl": 40554, + "chero": 19844, + "cherokee": 22860, + "cherries": 27248, + "cherry": 21470, + "cherry": 7325, + "chers": 5789, + "chery": 38478, + "cheryl": 37784, + "cheryl": 20600, + "ches": 18346, + "ches": 1910, + "chesa": 28349, + "chesapeake": 32909, + "cheshire": 17130, + "chesney": 48747, + "chess": 27170, + "chess": 8397, + "chest": 18217, + "chest": 10563, + "chester": 10466, + "chester": 3343, + "chesterfield": 32975, + "chestnut": 21834, + "chet": 9663, + "chett": 24695, + "chev": 7152, + "chev": 41145, + "chevro": 12850, + "chevrolet": 13240, + "chevron": 33792, + "chevy": 16581, + "chew": 32645, + "chew": 22642, + "chewan": 23689, + "chewbacca": 49355, + "chewing": 31486, + "chewy": 42940, + "chey": 26968, + "chey": 31208, + "cheyenne": 34805, + "chez": 49183, + "chez": 10556, + "chf": 33021, + "chfield": 41619, + "chhat": 34127, + "chhattisgarh": 44246, + "chi": 1337, + "chi": 4039, + "chia": 19147, + "chiang": 33764, + "chibi": 22306, + "chic": 2627, + "chic": 9091, + "chica": 44190, + "chicag": 16778, + "chicago": 15038, + "chicago": 3530, + "chicagof": 40638, + "chicagofire": 46576, + "chicas": 40664, + "chichester": 43823, + "chick": 3170, + "chick": 11238, + "chicken": 26322, + "chicken": 3717, + "chickens": 21658, + "chickpea": 48109, + "chicks": 17810, + "chico": 30379, + "chie": 40046, + "chie": 12388, + "chief": 16830, + "chief": 3455, + "chiefs": 11419, + "chiev": 47761, + "chiff": 27407, + "chiffon": 31817, + "chig": 42952, + "chihu": 22857, + "chihuahu": 25437, + "chihuahua": 30181, + "chik": 45455, + "chil": 1333, + "child": 4392, + "child": 2913, + "childcare": 31133, + "childhood": 34772, + "childhood": 7551, + "childish": 31939, + "childre": 2135, + "children": 11101, + "children": 2153, + "childrens": 31551, + "childrens": 21553, + "childs": 39521, + "chile": 10022, + "chilean": 33186, + "chili": 13033, + "chill": 6498, + "chill": 6382, + "chilled": 23540, + "chillen": 45160, + "chilli": 26787, + "chilli": 17067, + "chillin": 10347, + "chilling": 10179, + "chillout": 39842, + "chills": 25460, + "chilly": 14450, + "chim": 10543, + "chimney": 26821, + "chimp": 44374, + "chin": 6555, + "chin": 8979, + "china": 38943, + "china": 2817, + "chinatown": 28582, + "chine": 4013, + "chinese": 30568, + "chinese": 4271, + "ching": 34621, + "ching": 1439, + "chino": 47181, + "chino": 27440, + "chinook": 41577, + "chinson": 33786, + "chio": 19650, + "chip": 19271, + "chip": 8730, + "chipmun": 46384, + "chipot": 17702, + "chipotle": 19284, + "chipp": 39854, + "chippe": 46541, + "chipped": 39892, + "chipping": 40323, + "chips": 8855, + "chir": 15564, + "chiro": 23413, + "chiroprac": 25987, + "chiropractic": 34437, + "chis": 19920, + "chistan": 20523, + "chiswick": 47290, + "chit": 13515, + "chit": 45626, + "chita": 49184, + "chitec": 39862, + "chive": 29222, + "chives": 34921, + "chk": 47424, + "chl": 38592, + "chley": 47748, + "chlo": 10374, + "chloe": 39966, + "chloe": 13992, + "chlor": 23135, + "chman": 35835, + "chment": 20848, + "chner": 48277, + "cho": 1327, + "cho": 5150, + "choa": 43077, + "choc": 32772, + "choc": 21983, + "choco": 46285, + "choco": 32692, + "chocol": 3443, + "chocolat": 44631, + "chocolate": 29389, + "chocolate": 3820, + "chocolates": 24120, + "choi": 23749, + "choic": 35606, + "choice": 23857, + "choice": 4051, + "choices": 11016, + "choir": 9214, + "choirs": 43277, + "choke": 30231, + "choked": 43521, + "choker": 39642, + "choking": 39993, + "chol": 19802, + "cholera": 45999, + "cholester": 26861, + "cholesterol": 27982, + "chom": 25151, + "chon": 20416, + "chon": 21601, + "chondri": 37379, + "chong": 26220, + "choo": 3869, + "choo": 24437, + "chool": 29578, + "chools": 41958, + "choose": 22756, + "choose": 5073, + "chooses": 29923, + "choosing": 13475, + "chop": 10458, + "chop": 16663, + "chopin": 42256, + "chopped": 22580, + "chopper": 24011, + "chopping": 35375, + "chopra": 24258, + "chops": 26321, + "chor": 7567, + "chor": 47795, + "choral": 26684, + "chord": 33005, + "chords": 36152, + "choreo": 17443, + "choreographer": 35952, + "choreography": 32749, + "chores": 40483, + "chori": 25718, + "chorizo": 30802, + "chorus": 20869, + "chos": 26559, + "chose": 11090, + "chosen": 10044, + "chou": 16960, + "chou": 42917, + "choudhary": 45503, + "chow": 20257, + "chow": 21657, + "chowder": 37886, + "chp": 35896, + "chr": 36918, + "chri": 1135, + "chris": 9907, + "chris": 2978, + "chrisbrown": 41035, + "chriss": 46745, + "chrissy": 44762, + "chrissy": 40485, + "christ": 1403, + "christ": 6703, + "christchurch": 27100, + "christen": 31956, + "christensen": 42226, + "christi": 3328, + "christi": 33213, + "christian": 11792, + "christian": 4729, + "christianity": 20000, + "christians": 14842, + "christie": 16084, + "christin": 30189, + "christina": 15925, + "christine": 42610, + "christine": 14712, + "christma": 12039, + "christmas": 18174, + "christmas": 1677, + "christmaseve": 44381, + "christmass": 44873, + "christop": 7917, + "christoph": 47844, + "christophe": 45486, + "christopher": 33349, + "christopher": 9630, + "christy": 28331, + "chro": 13207, + "chromatic": 44207, + "chrome": 24843, + "chrome": 9529, + "chromo": 35809, + "chron": 5577, + "chron": 39781, + "chronic": 10115, + "chronic": 13677, + "chronicle": 20034, + "chronicles": 18905, + "chrono": 29387, + "chronograph": 38397, + "chry": 13508, + "chrysler": 20078, + "chs": 40277, + "chs": 8391, + "chsnews": 44919, + "cht": 11384, + "chter": 47811, + "chu": 3799, + "chu": 13622, + "chubby": 29109, + "chuck": 13211, + "chuck": 9894, + "chuckle": 35733, + "chucky": 42026, + "chuffed": 27233, + "chuk": 25878, + "chuk": 27221, + "chul": 33001, + "chum": 46869, + "chum": 41767, + "chun": 14693, + "chun": 25391, + "chung": 28418, + "chunk": 30275, + "chunks": 45538, + "chunky": 27978, + "chups": 46331, + "chur": 2309, + "church": 14956, + "church": 2735, + "churches": 15539, + "churchill": 17527, + "chus": 36246, + "chut": 28788, + "chutney": 36261, + "chy": 15131, + "chy": 8096, + "chyna": 43398, + "châ": 48669, + "ci": 698, + "ci": 5798, + "cia": 4019, + "cial": 1143, + "cian": 32323, + "ciao": 37677, + "ciara": 31369, + "cible": 28873, + "cic": 14539, + "cic": 21517, + "cid": 27359, + "cide": 34178, + "cider": 13547, + "cides": 41326, + "cie": 19730, + "cier": 24067, + "cies": 6785, + "cif": 35698, + "cigar": 26031, + "cigar": 16525, + "cigare": 13044, + "cigarette": 18548, + "cigarettes": 22750, + "cigars": 20750, + "cii": 42408, + "cil": 9217, + "cil": 2998, + "cilan": 33998, + "cilantro": 34568, + "cili": 18977, + "ciliation": 25294, + "cim": 30021, + "cin": 2396, + "cin": 25367, + "cina": 39467, + "cincin": 13291, + "cincinnati": 14197, + "cinco": 25131, + "cincode": 40930, + "cincodemayo": 42542, + "cincy": 30015, + "cincy": 30286, + "cinde": 20660, + "cinderella": 21515, + "cindy": 34439, + "cindy": 18532, + "cine": 4015, + "cine": 27451, + "cinema": 38251, + "cinema": 6443, + "cinemas": 14845, + "cinematic": 25602, + "cinemato": 21919, + "cinematographer": 39059, + "cinematography": 33802, + "ciner": 39882, + "cing": 4014, + "cini": 25699, + "cinnam": 12768, + "cinnamon": 13460, + "cino": 18616, + "cio": 44584, + "cio": 9954, + "cion": 22024, + "ciones": 37155, + "cious": 38466, + "cip": 32884, + "cir": 2459, + "cir": 41135, + "circa": 10411, + "circle": 33574, + "circle": 7117, + "circles": 19411, + "circling": 46036, + "circu": 5143, + "circuit": 35583, + "circuit": 9801, + "circuits": 33260, + "circul": 16618, + "circular": 19733, + "circulare": 39525, + "circulareconomy": 39878, + "circulated": 46258, + "circulating": 42980, + "circulation": 27880, + "circum": 13406, + "circumstances": 18786, + "circus": 11833, + "cirque": 36049, + "cis": 9459, + "cis": 23513, + "cisco": 36689, + "cisco": 19290, + "cise": 19657, + "cisely": 33434, + "cision": 41957, + "cism": 24166, + "cist": 40906, + "cit": 4420, + "cit": 31294, + "citadel": 38036, + "citation": 33581, + "cite": 32641, + "cited": 25069, + "cites": 34490, + "citi": 4280, + "citi": 30270, + "cities": 5441, + "citing": 29088, + "citiz": 5816, + "citizen": 11720, + "citizen": 9814, + "citizens": 7949, + "citizenship": 17386, + "cito": 42636, + "citro": 27941, + "citroen": 35805, + "citrus": 17379, + "city": 5002, + "city": 1305, + "cityfc": 28751, + "cityo": 25709, + "cityof": 11595, + "cityscape": 40808, + "ciu": 39693, + "cius": 42559, + "civ": 40039, + "civic": 32240, + "civic": 11888, + "civil": 6923, + "civil": 6450, + "civilian": 21187, + "civilians": 18076, + "civilization": 22503, + "civilwar": 34524, + "ción": 44700, + "cj": 15238, + "cj": 15205, + "ck": 916, + "ck": 868, + "cke": 25224, + "cke": 40989, + "cked": 3441, + "cken": 25566, + "cker": 15509, + "cker": 4744, + "ckers": 37073, + "cket": 5525, + "ckett": 33899, + "ckey": 15029, + "ckey": 3657, + "cki": 36916, + "cki": 41055, + "cking": 4805, + "cko": 28818, + "cks": 2031, + "cky": 26229, + "cky": 3083, + "cl": 969, + "cl": 6482, + "cla": 940, + "cla": 20636, + "clad": 31606, + "cladding": 46411, + "clai": 29459, + "claim": 4290, + "claim": 6607, + "claimed": 9010, + "claiming": 15286, + "claims": 6852, + "clair": 31441, + "clair": 14039, + "claire": 20410, + "claire": 10460, + "clam": 13588, + "clam": 32598, + "clamation": 21793, + "clamp": 41501, + "clams": 38849, + "clan": 29252, + "clan": 14114, + "clancy": 37227, + "clans": 38279, + "clap": 30037, + "clap": 25546, + "clapham": 43619, + "clapton": 37683, + "clar": 3617, + "clara": 19468, + "clare": 18948, + "clare": 15927, + "claremont": 47789, + "clarence": 29320, + "clari": 15175, + "clarify": 37004, + "clarinet": 41178, + "clarity": 21323, + "clark": 13340, + "clark": 7521, + "clarke": 11548, + "clarkson": 25706, + "clas": 32003, + "clash": 38367, + "clash": 9359, + "clashes": 25193, + "clasico": 43567, + "class": 2876, + "class": 1874, + "classes": 6919, + "classi": 2507, + "classic": 9353, + "classic": 2713, + "classical": 22179, + "classical": 11355, + "classicalmusic": 27806, + "classiccar": 46906, + "classiccars": 21064, + "classics": 10634, + "classification": 26612, + "classified": 22056, + "classmate": 37090, + "classmates": 30062, + "classof": 25345, + "classroom": 9001, + "classrooms": 25768, + "classy": 11615, + "clau": 7526, + "claude": 17461, + "claudi": 39439, + "claudia": 21893, + "claudio": 31230, + "claus": 23317, + "clause": 26151, + "clave": 24111, + "claw": 49230, + "claw": 19106, + "claws": 29161, + "clay": 10402, + "clay": 8823, + "clays": 26128, + "clayton": 46445, + "clayton": 19413, + "clc": 31380, + "cle": 1321, + "cle": 2537, + "clean": 3572, + "clean": 3772, + "cleaned": 17468, + "cleanenergy": 43538, + "cleaner": 15619, + "cleaners": 33258, + "cleaning": 7210, + "cleanliness": 47886, + "cleans": 40827, + "cleanse": 28717, + "cleanser": 44170, + "cleansing": 25931, + "cleanup": 22353, + "clear": 4631, + "clear": 3143, + "clearance": 17959, + "cleared": 14880, + "clearer": 37031, + "clearing": 15481, + "clearly": 7767, + "clears": 29092, + "clearwater": 32124, + "cleary": 44342, + "cleats": 33486, + "cleavage": 44165, + "cled": 12827, + "clegg": 42915, + "clemens": 45896, + "clement": 22592, + "clement": 24714, + "clemente": 42461, + "clementine": 47112, + "clements": 49175, + "clemson": 38170, + "clemson": 19537, + "clen": 35547, + "cleo": 40344, + "cleop": 36287, + "cleopatra": 41212, + "cler": 11828, + "clergy": 42635, + "cleric": 43748, + "clerk": 22230, + "clermont": 47529, + "cles": 8077, + "cleve": 37599, + "clevel": 7701, + "cleveland": 30716, + "cleveland": 8430, + "clever": 30977, + "clever": 13385, + "clg": 47546, + "cli": 1503, + "clich": 44407, + "click": 16676, + "click": 3585, + "clicked": 29015, + "clicking": 26542, + "clicks": 31250, + "client": 48528, + "client": 7467, + "clients": 8114, + "clif": 13182, + "cliff": 23827, + "cliff": 10625, + "cliffe": 15170, + "clifford": 24226, + "cliffs": 20953, + "clifton": 23878, + "climat": 37283, + "climate": 7854, + "climate": 4589, + "climateaction": 31622, + "climatechange": 11055, + "climates": 46022, + "climax": 37033, + "climb": 7421, + "climb": 10649, + "climbed": 22528, + "climber": 36910, + "climbers": 47648, + "climbing": 9877, + "climbs": 29098, + "clin": 2879, + "clinch": 30404, + "clinched": 44064, + "cline": 37460, + "cling": 37068, + "cling": 4760, + "clinic": 7926, + "clinical": 35133, + "clinical": 9148, + "clinicians": 45866, + "clinics": 23330, + "clint": 37542, + "clint": 21160, + "clinton": 34403, + "clinton": 5820, + "clio": 46889, + "clip": 39712, + "clip": 9289, + "clipped": 45524, + "clipper": 42245, + "clippers": 23319, + "clipping": 47484, + "clips": 16594, + "clique": 34983, + "clive": 36086, + "clive": 21509, + "cll": 46091, + "cllr": 45743, + "cllr": 23034, + "clo": 1194, + "cloak": 36528, + "clock": 19878, + "clock": 6716, + "clocked": 49049, + "clocks": 25895, + "clockwise": 46150, + "clockwork": 42297, + "clon": 24477, + "clone": 22854, + "clones": 48047, + "clooney": 33161, + "clos": 48821, + "close": 10603, + "close": 2660, + "closed": 4552, + "closely": 13478, + "closer": 6377, + "closes": 11354, + "closest": 14975, + "closet": 14221, + "closeup": 35439, + "closing": 7101, + "closure": 13249, + "closures": 22923, + "cloth": 14559, + "clothes": 7080, + "clothing": 7425, + "clou": 4069, + "cloud": 12965, + "cloud": 3887, + "cloudcomputing": 41390, + "clouds": 6244, + "cloudy": 13106, + "clough": 42909, + "clover": 39574, + "clover": 22812, + "clow": 18386, + "clown": 15329, + "clowns": 30820, + "cls": 44251, + "clt": 29651, + "clt": 24236, + "clu": 996, + "club": 9642, + "club": 1736, + "clubbing": 48128, + "clubhouse": 26553, + "clubs": 9437, + "clue": 14994, + "clueless": 35350, + "clues": 23764, + "clusive": 41362, + "cluster": 15595, + "clusters": 33217, + "clut": 28507, + "clutch": 13953, + "clutter": 40804, + "cly": 12037, + "clyde": 39557, + "clyde": 18469, + "cm": 10190, + "cm": 3741, + "cma": 30554, + "cma": 31388, + "cmc": 45839, + "cmdr": 48250, + "cme": 34946, + "cmo": 24589, + "cmon": 42904, + "cmp": 46355, + "cms": 22520, + "cmt": 42727, + "cmu": 43046, + "cn": 3886, + "cn": 16200, + "cna": 48287, + "cnbc": 41242, + "cnbc": 24371, + "cnblue": 36018, + "cnc": 20571, + "cnet": 47487, + "cnews": 24319, + "cng": 41496, + "cnn": 22405, + "cnn": 8259, + "cns": 46095, + "cny": 31614, + "co": 622, + "co": 1320, + "coa": 29167, + "coach": 3275, + "coach": 2312, + "coached": 30228, + "coachella": 20222, + "coaches": 6924, + "coaching": 7766, + "coal": 10227, + "coal": 7919, + "coalition": 12920, + "coast": 6398, + "coast": 3720, + "coastal": 38246, + "coastal": 10852, + "coaster": 15944, + "coasters": 31548, + "coastguard": 40601, + "coastline": 27959, + "coasts": 42225, + "coat": 28869, + "coat": 7356, + "coated": 23401, + "coates": 36899, + "coating": 25369, + "coatings": 48706, + "coats": 18075, + "cob": 20140, + "cob": 32863, + "cobain": 36866, + "cobalt": 30896, + "cobb": 22719, + "cobble": 47894, + "cobra": 21574, + "coc": 23036, + "coc": 39498, + "coca": 21197, + "cocac": 26393, + "cocacola": 31248, + "cocaine": 20534, + "coch": 18599, + "cochran": 48798, + "cochrane": 41752, + "coco": 11850, + "coco": 13316, + "cocoa": 18074, + "cocon": 8597, + "coconut": 9581, + "cod": 16132, + "cod": 11915, + "code": 11582, + "code": 3217, + "coded": 33703, + "coden": 43914, + "coder": 41561, + "codes": 14566, + "codi": 39711, + "coding": 12647, + "cody": 23222, + "cody": 12666, + "coe": 15386, + "coed": 41028, + "coel": 45633, + "coer": 41198, + "coeur": 44986, + "coffe": 2255, + "coffee": 12898, + "coffee": 2453, + "coffees": 41184, + "coffey": 48066, + "cofficial": 18757, + "coffin": 29907, + "cog": 26362, + "cog": 35960, + "cogn": 12210, + "cognac": 44361, + "cognition": 46825, + "cognitive": 16584, + "cohe": 20669, + "cohen": 13381, + "coherent": 48450, + "cohort": 22782, + "coil": 25307, + "coim": 41528, + "coin": 14651, + "coin": 4170, + "coinci": 14015, + "coincidence": 19807, + "coins": 10530, + "coke": 39602, + "coke": 14035, + "col": 754, + "col": 9371, + "cola": 15444, + "colbert": 31647, + "colby": 32068, + "colchester": 31715, + "cold": 11146, + "cold": 3153, + "colder": 23859, + "coldest": 31438, + "coldplay": 27770, + "cole": 9305, + "cole": 8166, + "coleman": 15774, + "coles": 40265, + "coles": 30398, + "coli": 18877, + "coli": 15910, + "colin": 20989, + "colin": 10238, + "coliseum": 21836, + "coll": 25982, + "coll": 23898, + "colla": 2929, + "collab": 14013, + "collabor": 4437, + "collaborate": 21271, + "collaborated": 42265, + "collaborating": 25545, + "collaboration": 6642, + "collaborations": 36520, + "collaborative": 15841, + "collaborator": 48186, + "collaborators": 45901, + "collage": 11258, + "collagen": 36120, + "collap": 16881, + "collapse": 16520, + "collapsed": 25037, + "collapses": 43601, + "collar": 39662, + "collar": 13497, + "collateral": 44512, + "colle": 1801, + "colleague": 13067, + "colleagues": 8203, + "collec": 1733, + "collect": 10186, + "collected": 11980, + "collecti": 18530, + "collectible": 25680, + "collectibles": 21519, + "collecting": 10325, + "collection": 2548, + "collections": 12760, + "collective": 10162, + "collectively": 40687, + "collector": 13522, + "collectors": 20540, + "collects": 31576, + "colleen": 31020, + "college": 13512, + "college": 2229, + "colleges": 17357, + "collegi": 16311, + "collegiate": 18068, + "colli": 8262, + "collide": 27214, + "collie": 30611, + "collier": 35748, + "collin": 24056, + "collin": 32116, + "colling": 32319, + "collingwood": 45873, + "collins": 8684, + "collision": 15407, + "collo": 25115, + "colloqui": 37243, + "colloquium": 46514, + "collu": 25658, + "collusion": 33864, + "colo": 7300, + "colo": 27288, + "cologne": 22216, + "cology": 19187, + "colom": 8987, + "colombia": 12901, + "colombian": 28701, + "colombo": 33207, + "colon": 8280, + "colon": 29050, + "colonel": 22674, + "coloni": 22667, + "colonial": 16530, + "colonialism": 43385, + "colonies": 38738, + "colony": 18767, + "color": 4036, + "color": 3140, + "colorado": 34580, + "colorado": 6742, + "colorec": 41171, + "colored": 11775, + "colorful": 11444, + "colori": 28764, + "coloring": 17696, + "colorized": 46730, + "colors": 5389, + "colorstv": 28195, + "colorway": 44576, + "colossal": 40258, + "colosse": 48142, + "colossus": 34022, + "colour": 10240, + "colour": 4769, + "coloured": 17111, + "colourful": 15562, + "colouring": 31803, + "colours": 7626, + "cols": 35726, + "colt": 19726, + "colton": 32249, + "coltrane": 42333, + "colts": 16135, + "colum": 4164, + "columb": 31043, + "columbi": 25947, + "columbia": 9410, + "columbus": 11273, + "column": 10593, + "columnist": 28958, + "columns": 29056, + "com": 610, + "com": 2464, + "coma": 19620, + "comb": 3587, + "comb": 16380, + "combat": 35083, + "combat": 9275, + "combating": 46121, + "combe": 14363, + "combin": 25112, + "combination": 11312, + "combinations": 34950, + "combine": 12919, + "combined": 10427, + "combines": 22991, + "combining": 23561, + "combo": 10155, + "combos": 48117, + "combs": 30694, + "combu": 35629, + "combustion": 44654, + "comcast": 30043, + "come": 4225, + "come": 891, + "comeback": 8234, + "comedian": 13848, + "comedians": 33758, + "comedic": 43360, + "comedy": 19346, + "comedy": 4749, + "comer": 42997, + "comer": 20916, + "comers": 34436, + "comes": 2091, + "comet": 21405, + "comets": 40636, + "comey": 22957, + "comfor": 6563, + "comfort": 44000, + "comfort": 7808, + "comfortable": 8652, + "comfortably": 30392, + "comforting": 33835, + "comforts": 42243, + "comfy": 15736, + "comi": 40781, + "comic": 7729, + "comic": 4962, + "comicart": 46018, + "comicbook": 46564, + "comicbooks": 22018, + "comiccon": 18379, + "comicon": 43820, + "comics": 4256, + "comin": 18164, + "coming": 14916, + "coming": 1171, + "comingsoon": 19894, + "comm": 965, + "comm": 11413, + "comman": 39780, + "command": 18391, + "command": 11350, + "commander": 11265, + "commanders": 41667, + "commanding": 36933, + "commandments": 43409, + "commando": 31361, + "commands": 38163, + "comme": 29692, + "commemor": 9495, + "commemorate": 21242, + "commemorates": 45149, + "commemorating": 28734, + "commemoration": 29288, + "commemorative": 24623, + "commen": 15795, + "commence": 25059, + "commenced": 43908, + "commencement": 21666, + "commences": 48551, + "commend": 37555, + "commended": 40702, + "comment": 20035, + "comment": 5761, + "commentary": 14146, + "commentator": 32016, + "commented": 28328, + "commenting": 37292, + "comments": 6606, + "commer": 4028, + "commerce": 8333, + "commerci": 15601, + "commercial": 31802, + "commercial": 6287, + "commercials": 30724, + "commish": 45399, + "commissi": 6000, + "commission": 5292, + "commissioned": 16565, + "commissioner": 10221, + "commissioners": 30702, + "commissioning": 29585, + "commissions": 20668, + "commit": 3041, + "commit": 11797, + "commitment": 7770, + "commitments": 32136, + "commits": 20241, + "committed": 7907, + "committee": 5636, + "committees": 40504, + "committing": 21937, + "commod": 9496, + "commodities": 30350, + "commodity": 29041, + "commodore": 31129, + "common": 8414, + "common": 4176, + "commonly": 20344, + "commons": 16653, + "commonwealth": 16569, + "comms": 18832, + "commu": 9561, + "commun": 1515, + "communal": 32809, + "communi": 16164, + "communic": 4784, + "communicate": 19809, + "communication": 7999, + "communications": 10052, + "communion": 28579, + "communism": 35387, + "communist": 18602, + "communities": 6361, + "community": 14784, + "community": 1927, + "commute": 15898, + "commuter": 27782, + "commuters": 30823, + "commuting": 43503, + "como": 16236, + "comp": 2561, + "comp": 11679, + "compac": 40014, + "compact": 13690, + "compan": 1995, + "companies": 5361, + "companion": 14963, + "companions": 37124, + "company": 2634, + "compar": 7580, + "comparable": 27092, + "comparative": 33388, + "compare": 13771, + "compared": 10544, + "compares": 25104, + "comparing": 20564, + "comparison": 14186, + "comparisons": 40870, + "compart": 30072, + "compartment": 40383, + "compass": 19438, + "compassion": 14463, + "compassionate": 30193, + "compati": 17295, + "compatibility": 41614, + "compatible": 21286, + "compe": 5254, + "compelled": 49375, + "compelling": 21766, + "compen": 42079, + "compens": 15172, + "compensation": 18663, + "compet": 2932, + "compete": 10038, + "competed": 27767, + "competen": 31853, + "competence": 31165, + "competency": 49293, + "competent": 28113, + "competes": 39826, + "competing": 13068, + "competit": 15892, + "competiti": 32581, + "competition": 3742, + "competitions": 23259, + "competitive": 10687, + "competitiveness": 43209, + "competitor": 26633, + "competitors": 23638, + "compilation": 20446, + "compiled": 34579, + "compla": 7428, + "complain": 19292, + "complained": 42029, + "complaining": 20812, + "complains": 46363, + "complaint": 20391, + "complaints": 20020, + "comple": 1730, + "complement": 36624, + "complementary": 48953, + "complete": 3263, + "completed": 5976, + "completely": 5989, + "completes": 19321, + "completing": 14949, + "completion": 15915, + "complex": 16099, + "complex": 6324, + "complexes": 47870, + "complexion": 47732, + "complexity": 24815, + "compli": 5270, + "compliance": 14658, + "compliant": 29893, + "complic": 11460, + "complicated": 16621, + "complications": 29936, + "compliment": 25116, + "complimentary": 20948, + "compliments": 25477, + "comply": 36281, + "component": 21284, + "components": 16816, + "compos": 7783, + "compose": 43659, + "composed": 19916, + "composer": 12104, + "composers": 33314, + "composing": 40412, + "composite": 21606, + "composites": 45395, + "composition": 17510, + "compositions": 44652, + "compost": 46002, + "compost": 33307, + "compound": 19980, + "compounds": 33991, + "compre": 8483, + "compreh": 42976, + "comprehen": 12050, + "comprehend": 48230, + "comprehensive": 13854, + "compress": 33353, + "compressed": 42359, + "compression": 25638, + "compressor": 39607, + "compri": 29445, + "compromise": 26611, + "compromised": 38576, + "compromising": 45436, + "comps": 48665, + "compton": 28364, + "compu": 11639, + "compul": 25869, + "compulsory": 39345, + "computing": 12732, + "comra": 25553, + "comrade": 30844, + "comrades": 29282, + "coms": 30493, + "con": 616, + "con": 2457, + "cona": 30605, + "conan": 24750, + "conce": 9145, + "concealed": 35419, + "conceded": 37895, + "conceived": 39725, + "concentr": 11085, + "concentrate": 30846, + "concentrated": 36776, + "concentration": 18565, + "concep": 8389, + "concepcion": 47035, + "concept": 6353, + "conceptart": 31162, + "conception": 30510, + "conceptions": 40307, + "concepts": 16763, + "conceptu": 42745, + "conceptual": 34070, + "concer": 2228, + "concern": 12928, + "concerned": 12020, + "concerning": 21772, + "concerns": 11134, + "concert": 32180, + "concert": 3066, + "concerto": 24710, + "concerts": 14418, + "concession": 38117, + "concessions": 43981, + "concier": 28859, + "concierge": 39850, + "conclave": 38098, + "conclu": 9627, + "conclude": 37525, + "concluded": 27825, + "concludes": 30634, + "conclusion": 20932, + "conclusions": 39507, + "conco": 43034, + "concor": 19913, + "concord": 26448, + "concordia": 35492, + "concours": 36282, + "concourse": 37793, + "concre": 43658, + "concrete": 9637, + "concussion": 28321, + "condem": 13287, + "condemn": 27212, + "condemned": 35145, + "condemns": 32092, + "conden": 24816, + "conditi": 11170, + "condition": 36978, + "condition": 7336, + "conditional": 24671, + "conditioned": 37014, + "conditioner": 31239, + "conditioning": 18181, + "conditions": 5892, + "condo": 19952, + "condol": 18661, + "condolences": 20836, + "condom": 39021, + "condomin": 42589, + "condoms": 37878, + "condor": 47643, + "condos": 42342, + "condu": 40772, + "conduc": 5379, + "conduct": 11647, + "conducted": 13080, + "conducting": 16787, + "conductor": 22317, + "conducts": 32084, + "cone": 39279, + "cone": 10266, + "cones": 26718, + "coney": 41837, + "conf": 6477, + "confe": 1968, + "confeder": 17104, + "confederate": 24864, + "confederation": 43484, + "conferen": 37961, + "conference": 2230, + "conferences": 22811, + "conferencing": 47320, + "confess": 38860, + "confession": 22572, + "confessions": 29404, + "confetti": 37923, + "confi": 5005, + "confidence": 8510, + "confident": 12365, + "confidential": 28712, + "configu": 46746, + "configur": 26950, + "configuration": 33378, + "confin": 45316, + "confined": 40973, + "confir": 3930, + "confirm": 12130, + "confirmation": 19645, + "confirmed": 6346, + "confirming": 38433, + "confirms": 11803, + "confis": 36285, + "confit": 42241, + "confl": 8173, + "conflic": 19029, + "conflict": 10397, + "conflicting": 43894, + "conflicts": 28713, + "confor": 40933, + "confron": 20033, + "confront": 38382, + "confrontation": 41478, + "confu": 6890, + "confuse": 37503, + "confused": 10946, + "confusing": 24683, + "confusion": 20493, + "cong": 24407, + "conge": 20013, + "congestion": 24432, + "congo": 20334, + "congr": 1227, + "congrats": 1887, + "congratul": 1750, + "congratulate": 16633, + "congratulated": 42004, + "congratulates": 24580, + "congratulating": 30967, + "congratulation": 24751, + "congratulations": 1864, + "congre": 7947, + "congreg": 40727, + "congregation": 32618, + "congress": 12452, + "congress": 4599, + "congressional": 15239, + "congressman": 17145, + "congresswoman": 37317, + "coni": 39031, + "coni": 36651, + "conj": 41543, + "conju": 33821, + "conjunction": 34226, + "conley": 44536, + "conline": 37593, + "conn": 41836, + "conn": 20329, + "conne": 8437, + "connec": 29933, + "connect": 19969, + "connected": 27506, + "connecting": 41429, + "connection": 26840, + "connections": 37161, + "connie": 25739, + "connoisse": 46012, + "connol": 27739, + "connolly": 29537, + "connor": 21984, + "connor": 10218, + "conom": 2664, + "conomy": 22529, + "conor": 29955, + "conor": 19478, + "conqu": 13382, + "conquer": 38585, + "conquer": 19821, + "conquered": 27099, + "conquering": 43778, + "conquest": 35367, + "conrad": 22073, + "cons": 10311, + "consci": 9427, + "conscience": 27310, + "conscious": 14914, + "consciously": 46755, + "consciousness": 17894, + "conse": 34887, + "consecu": 12084, + "consecutive": 12413, + "consen": 23110, + "consensus": 25071, + "consent": 21922, + "consequ": 13003, + "consequence": 42262, + "consequences": 15682, + "conserv": 4649, + "conservancy": 46729, + "conservation": 37616, + "conservation": 8322, + "conservative": 11421, + "conservatives": 17631, + "conservatory": 32140, + "conserve": 34231, + "consi": 2899, + "consider": 12471, + "consider": 6734, + "considerable": 38256, + "considerably": 38510, + "consideration": 24310, + "considerations": 33700, + "considered": 9487, + "considering": 10761, + "considers": 24691, + "consist": 10410, + "consist": 33735, + "consisted": 49354, + "consistency": 25683, + "consistent": 16439, + "consistently": 23799, + "consisting": 39241, + "consists": 23458, + "consol": 27869, + "consolation": 38888, + "console": 13403, + "consoles": 33136, + "consoli": 21586, + "consolidation": 41111, + "consor": 27108, + "consortium": 29988, + "conspir": 12680, + "conspiracy": 15236, + "const": 3826, + "constable": 29179, + "constan": 38718, + "constance": 40682, + "constant": 32000, + "constant": 13111, + "constantine": 30640, + "constantly": 14336, + "constell": 21913, + "constellation": 25991, + "constitu": 6299, + "constituency": 22464, + "constituents": 32075, + "constitution": 12157, + "constitutional": 16091, + "constra": 28973, + "constraints": 41910, + "constru": 3983, + "construc": 13321, + "construct": 24467, + "constructed": 16876, + "constructing": 33653, + "construction": 48873, + "construction": 4585, + "constructive": 31810, + "consu": 4689, + "consul": 5295, + "consul": 33630, + "consulate": 34341, + "consult": 9438, + "consult": 26727, + "consultancy": 31735, + "consultant": 14196, + "consultants": 27203, + "consultation": 15777, + "consultations": 43424, + "consulting": 15883, + "consume": 28919, + "consumed": 29653, + "consumer": 34408, + "consumer": 10422, + "consumers": 14014, + "consuming": 30607, + "consumption": 14904, + "cont": 2036, + "cont": 21425, + "contact": 39367, + "contact": 3523, + "contacted": 37331, + "contacts": 22789, + "contag": 29259, + "contagious": 33984, + "contain": 9948, + "contain": 15187, + "contained": 23836, + "container": 14913, + "containers": 20448, + "containing": 20281, + "contains": 12844, + "contamin": 24662, + "contaminated": 35773, + "contamination": 31770, + "conte": 15402, + "conte": 26882, + "contempl": 21924, + "contemplating": 33854, + "contempor": 14538, + "contemporary": 16607, + "contemporary": 8859, + "contemporaryart": 20212, + "contempt": 39293, + "conten": 42201, + "contender": 23573, + "contenders": 29711, + "content": 15526, + "content": 4750, + "contentmarketing": 20429, + "contents": 14850, + "contest": 23103, + "contest": 4576, + "contestalert": 27313, + "contestant": 25682, + "contestants": 28062, + "contested": 37845, + "contests": 32210, + "contex": 42015, + "context": 13089, + "conti": 46431, + "conti": 40842, + "contin": 1918, + "continent": 19623, + "continental": 14089, + "continents": 38642, + "conting": 27104, + "contingent": 36467, + "continu": 4688, + "continually": 34086, + "continuation": 38964, + "continue": 3942, + "continued": 10150, + "continues": 4305, + "continuing": 11009, + "continuity": 34035, + "continuous": 17033, + "continuously": 29634, + "continuum": 44978, + "contour": 34733, + "contr": 22871, + "contra": 9880, + "contra": 38620, + "contrac": 7581, + "contracep": 35109, + "contract": 6120, + "contracting": 39091, + "contractor": 21429, + "contractors": 22427, + "contracts": 16563, + "contradic": 27957, + "contrary": 32805, + "contrast": 18501, + "contrasting": 40758, + "contribu": 4753, + "contribute": 14112, + "contributed": 19397, + "contributes": 34203, + "contributing": 21762, + "contribution": 11116, + "contributions": 14465, + "contributor": 24553, + "contributors": 32908, + "contro": 2372, + "control": 9963, + "control": 3366, + "controlled": 14140, + "controller": 12929, + "controllers": 30374, + "controlling": 26427, + "controls": 15746, + "controversi": 13674, + "controversial": 14617, + "controversy": 18659, + "conv": 48382, + "conve": 18421, + "conven": 7283, + "conveni": 33278, + "convenience": 17859, + "convenient": 18978, + "conveniently": 40844, + "convention": 6752, + "conventional": 20835, + "conventions": 41404, + "conver": 6336, + "convergence": 35381, + "convers": 4577, + "conversation": 5690, + "conversations": 12326, + "converse": 24149, + "conversion": 15111, + "conversions": 44137, + "convert": 20074, + "converted": 20808, + "converter": 34611, + "convertible": 19608, + "converting": 34674, + "converts": 42470, + "convey": 38342, + "convic": 11150, + "convicted": 18668, + "conviction": 24967, + "convictions": 44366, + "convin": 12889, + "convince": 20351, + "convinced": 17388, + "convincing": 27742, + "convo": 19372, + "convocation": 30674, + "convos": 44842, + "convoy": 30292, + "conway": 21410, + "conwy": 48971, + "cony": 14501, + "coo": 1664, + "coo": 21691, + "coogs": 47624, + "cook": 9726, + "cook": 5977, + "cookbook": 21086, + "cooke": 29979, + "cooked": 11452, + "cooker": 23806, + "cookery": 38779, + "cookie": 9367, + "cookies": 8320, + "cookin": 46610, + "cooking": 39248, + "cooking": 6283, + "cookout": 39743, + "cooks": 24256, + "cool": 5594, + "cool": 2077, + "cooled": 37170, + "cooler": 11078, + "coolest": 10566, + "cooling": 15291, + "coom": 41726, + "coon": 34260, + "coon": 16958, + "coop": 39917, + "coop": 18910, + "cooper": 7264, + "cooper": 8133, + "cooperate": 42936, + "cooperation": 11785, + "cooperative": 24517, + "coops": 48531, + "coordin": 8187, + "coordinate": 38250, + "coordinated": 32540, + "coordinating": 40075, + "coordination": 25611, + "coordinator": 13967, + "coors": 36025, + "cop": 3196, + "cop": 7070, + "copa": 22749, + "copd": 45876, + "cope": 47635, + "cope": 12564, + "copeland": 37604, + "copen": 15637, + "copenhagen": 17390, + "coper": 41891, + "copernic": 45519, + "copied": 36770, + "copies": 9851, + "coping": 30545, + "copolitics": 45846, + "copp": 20937, + "copped": 42229, + "copper": 24741, + "copper": 10333, + "coppola": 47427, + "cops": 10719, + "copter": 28049, + "copy": 11376, + "copy": 4509, + "copying": 38925, + "copyright": 15778, + "cor": 851, + "cor": 18559, + "cora": 34953, + "coral": 31220, + "coral": 12054, + "corbett": 35699, + "corbin": 35578, + "corbyn": 14026, + "cord": 40893, + "cord": 11181, + "corden": 41999, + "cordi": 41681, + "cordless": 44412, + "cords": 22164, + "core": 19622, + "core": 5000, + "cores": 37874, + "corey": 31279, + "corey": 15288, + "corgi": 31320, + "cori": 26508, + "coriander": 37491, + "corin": 17716, + "corinthians": 34471, + "cork": 18148, + "cork": 10376, + "corn": 5202, + "corn": 5894, + "cornelius": 45865, + "cornell": 38689, + "cornell": 20859, + "corner": 18509, + "corner": 5253, + "corners": 19584, + "cornerstone": 36280, + "cornish": 23774, + "cornwall": 37903, + "cornwall": 10777, + "coron": 13210, + "corona": 25564, + "coronado": 43946, + "coronary": 45955, + "coronation": 25014, + "coroner": 47241, + "corp": 29203, + "corp": 10918, + "corpor": 4258, + "corporal": 42445, + "corporate": 33877, + "corporate": 6838, + "corporation": 11282, + "corporations": 25482, + "corps": 11330, + "corpse": 29408, + "corpus": 31672, + "correc": 5011, + "correct": 8340, + "corrected": 35628, + "correction": 20843, + "correctional": 38030, + "corrections": 37507, + "correctly": 15359, + "correlation": 29218, + "correspon": 20203, + "correspondent": 29996, + "corri": 12974, + "corridor": 20592, + "corrie": 23961, + "corro": 24936, + "corro": 42033, + "corrosion": 39191, + "corru": 6501, + "corrup": 30429, + "corrupt": 15194, + "corruption": 9141, + "corsa": 47670, + "corsair": 42367, + "corset": 40408, + "cortex": 40109, + "cortez": 30461, + "corvette": 24367, + "cory": 23221, + "cory": 18329, + "cos": 5865, + "cos": 5700, + "cosby": 30324, + "cosc": 45944, + "coscino": 47909, + "cose": 26495, + "cosm": 37486, + "cosme": 9628, + "cosmetic": 23918, + "cosmetics": 12896, + "cosmic": 47398, + "cosmic": 18304, + "cosmo": 12829, + "cosmo": 32072, + "cosmopolitan": 35518, + "cosmos": 22151, + "cospla": 15149, + "cosplay": 42401, + "cosplay": 6435, + "cosplayer": 30215, + "cosplaying": 46701, + "cost": 11360, + "cost": 4713, + "costa": 10480, + "costar": 28659, + "costarica": 31272, + "costco": 31045, + "costello": 30667, + "costing": 39193, + "costly": 30170, + "costs": 7628, + "costu": 5786, + "costume": 7235, + "costumes": 15150, + "cosy": 22848, + "cot": 4718, + "cot": 5871, + "cote": 44234, + "cote": 20751, + "cotland": 32576, + "cotsw": 23303, + "cotswolds": 35546, + "cott": 8211, + "cott": 11349, + "cottage": 12155, + "cottages": 34405, + "cotton": 22218, + "cotton": 7050, + "cou": 1368, + "couch": 12724, + "cougar": 35028, + "cougar": 27042, + "cougars": 20425, + "cough": 35631, + "cough": 18498, + "cougs": 28482, + "coul": 22483, + "could": 44812, + "could": 1510, + "couldn": 4072, + "couldnt": 29042, + "coulter": 42291, + "coun": 939, + "counc": 12927, + "council": 18187, + "council": 3620, + "councill": 15732, + "councillor": 21179, + "councillors": 29695, + "councilman": 40833, + "councils": 29938, + "counsel": 13780, + "counsel": 19814, + "counseling": 25000, + "counsell": 47510, + "counselling": 40581, + "counselor": 26148, + "counselors": 38688, + "count": 6073, + "count": 5887, + "countdown": 39559, + "countdown": 7500, + "counted": 23149, + "counter": 10134, + "counter": 7352, + "counterfe": 33067, + "counterfeit": 44242, + "counterpart": 39216, + "counterparts": 42106, + "counters": 46170, + "countess": 46276, + "counties": 12338, + "counting": 9723, + "countless": 21819, + "countries": 5489, + "country": 7896, + "country": 2157, + "countryfile": 47023, + "countrymusic": 30372, + "countryside": 16303, + "counts": 12264, + "county": 18734, + "county": 2116, + "coup": 9871, + "coup": 16479, + "coupe": 16773, + "couple": 40136, + "couple": 3377, + "coupled": 37153, + "couples": 14752, + "coupling": 45595, + "coupon": 14019, + "coupons": 23945, + "cour": 1391, + "coura": 4436, + "courage": 9828, + "courageous": 25005, + "courier": 27217, + "cours": 21493, + "course": 43225, + "course": 2613, + "courses": 9464, + "court": 16837, + "court": 2908, + "courte": 5088, + "courtesy": 5228, + "courthouse": 22205, + "courtney": 33601, + "courtney": 15990, + "courtroom": 41071, + "courts": 13514, + "courty": 20121, + "courtyard": 21900, + "cous": 48397, + "cousin": 7780, + "cousins": 14073, + "cout": 29118, + "coutinho": 35530, + "couture": 14808, + "cov": 19384, + "cov": 48385, + "cove": 21700, + "cove": 14708, + "coven": 12483, + "covenant": 29647, + "coventry": 18007, + "cover": 13534, + "cover": 2202, + "coverage": 6810, + "covered": 5603, + "covering": 9462, + "covers": 7745, + "covert": 40134, + "coveted": 36119, + "covington": 43196, + "cow": 5076, + "cow": 9706, + "cowan": 42699, + "coward": 33729, + "cowards": 48972, + "cowboy": 25833, + "cowboy": 13657, + "cowboys": 11864, + "cowboysnation": 43082, + "cowell": 39015, + "cowgirl": 47090, + "coworker": 30727, + "coworkers": 30821, + "coworking": 36034, + "cows": 15204, + "cowx": 23831, + "cox": 25784, + "cox": 11597, + "coy": 12765, + "coy": 15742, + "coyi": 48407, + "coyle": 45348, + "coyne": 44729, + "coyo": 16614, + "coyote": 26586, + "coyotes": 30423, + "coys": 19736, + "coz": 39922, + "coz": 14282, + "cozy": 14873, + "cp": 7905, + "cp": 9130, + "cpa": 30095, + "cpac": 45731, + "cpc": 26125, + "cpd": 23402, + "cpec": 48007, + "cpfc": 27553, + "cpi": 41795, + "cpl": 26852, + "cpr": 25134, + "cps": 27078, + "cpt": 32892, + "cpu": 27700, + "cq": 48910, + "cq": 48417, + "cr": 1075, + "cr": 3483, + "cra": 1184, + "cra": 18362, + "crab": 27382, + "crab": 11574, + "crabs": 30908, + "crack": 11222, + "crack": 10334, + "crackdown": 29527, + "cracked": 19826, + "cracker": 16298, + "crackers": 26200, + "cracking": 13008, + "cracks": 21426, + "cracy": 24749, + "cradle": 29384, + "crae": 40438, + "craf": 10873, + "craft": 7717, + "craft": 3588, + "craftbeer": 12371, + "crafted": 12424, + "crafthour": 42324, + "crafting": 26886, + "crafts": 33276, + "crafts": 13383, + "craftsman": 39528, + "craftsmanship": 36682, + "crafty": 32317, + "craic": 46962, + "craig": 14042, + "craig": 8061, + "craigslist": 43865, + "cram": 29809, + "cramer": 44592, + "cramps": 46106, + "cran": 7761, + "cranberries": 49361, + "cranberry": 23824, + "crane": 14626, + "cranes": 26979, + "crani": 45674, + "crank": 46246, + "crank": 32283, + "cranston": 44340, + "crap": 11899, + "crappy": 30475, + "crash": 37150, + "crash": 5033, + "crashed": 16638, + "crashes": 17013, + "crashing": 24991, + "crat": 46696, + "crate": 24756, + "crater": 22663, + "crates": 30172, + "cratic": 32175, + "crative": 39999, + "crats": 43056, + "crave": 33397, + "craven": 33625, + "craving": 18344, + "cravings": 34476, + "craw": 7400, + "crawfish": 42772, + "crawford": 15918, + "crawl": 20106, + "crawler": 41012, + "crawley": 42316, + "crawling": 37066, + "cray": 24184, + "cray": 27032, + "crayon": 41801, + "crayons": 43508, + "craz": 25776, + "craze": 30637, + "craziest": 32690, + "craziness": 46436, + "crazy": 17540, + "crazy": 3578, + "crc": 25618, + "cre": 798, + "cre": 17762, + "cream": 23184, + "cream": 3867, + "creams": 41447, + "creamy": 17206, + "crease": 48441, + "create": 30949, + "create": 3380, + "created": 4080, + "creates": 10361, + "creati": 6714, + "creating": 5524, + "creation": 38293, + "creation": 6900, + "creations": 17411, + "creative": 15237, + "creative": 4450, + "creatives": 29352, + "creativity": 9636, + "creator": 10173, + "creators": 17981, + "creature": 14317, + "creatures": 13938, + "cred": 7314, + "cred": 22377, + "credenti": 29487, + "credentials": 33422, + "credi": 21097, + "credibility": 34984, + "credible": 32983, + "credit": 21467, + "credit": 3900, + "credited": 32480, + "credits": 10654, + "creds": 43462, + "cree": 33961, + "cree": 36014, + "creed": 18845, + "creek": 26120, + "creek": 5526, + "creep": 8153, + "creep": 26084, + "creeper": 38662, + "creeping": 29697, + "creeps": 45135, + "creepy": 11943, + "creighton": 42823, + "creme": 22681, + "creole": 45632, + "crepe": 38611, + "crescent": 18211, + "cress": 39124, + "crest": 35985, + "crest": 15760, + "crested": 36656, + "crete": 8584, + "crew": 21560, + "crew": 3462, + "crewe": 43284, + "crews": 10463, + "cri": 1621, + "cri": 38962, + "crib": 23271, + "cric": 4328, + "cricke": 19098, + "cricket": 21859, + "cricket": 5373, + "cricketer": 28439, + "cricketers": 43986, + "cried": 15290, + "cries": 19769, + "crime": 13872, + "crime": 4896, + "crimea": 28614, + "crimes": 11827, + "crimin": 5874, + "criminal": 30197, + "criminal": 8255, + "criminals": 18783, + "crimson": 19437, + "cringe": 42588, + "cripp": 33588, + "cris": 37818, + "crises": 36403, + "crisis": 5712, + "crisp": 15145, + "crispr": 39784, + "crisps": 35744, + "crispy": 16458, + "criss": 29708, + "cristi": 12699, + "cristian": 48808, + "cristiano": 14807, + "cristina": 33395, + "cristo": 38315, + "crit": 3613, + "crit": 48130, + "criteri": 33627, + "criteria": 24849, + "criterion": 43841, + "criti": 25333, + "critic": 12417, + "critic": 19361, + "critical": 15314, + "critical": 6808, + "critically": 21570, + "criticalrole": 33606, + "criticalrole": 22742, + "criticalrolefanart": 43663, + "critici": 20333, + "criticism": 17405, + "criticize": 46081, + "criticized": 41557, + "critics": 16946, + "critique": 32982, + "critters": 35423, + "crm": 22610, + "cro": 1192, + "cro": 22522, + "croati": 28072, + "croatia": 13323, + "croatian": 34795, + "croc": 43350, + "croche": 35352, + "crochet": 17554, + "crock": 41685, + "crocker": 47843, + "crockett": 48313, + "crocod": 24519, + "crocodile": 24757, + "crocs": 38988, + "croft": 16657, + "croissant": 46011, + "croix": 44735, + "crom": 25082, + "crombie": 46162, + "cromwell": 45345, + "cron": 17361, + "croo": 16443, + "crook": 43744, + "crooked": 48473, + "crooked": 25644, + "crooks": 44226, + "crop": 40751, + "crop": 9955, + "cropped": 31139, + "crops": 16290, + "crore": 18274, + "crores": 37281, + "cros": 16670, + "crosby": 21095, + "cross": 5266, + "cross": 3417, + "crossed": 11731, + "crosses": 20473, + "crossfit": 47214, + "crossfit": 20395, + "crossing": 8673, + "crossings": 43517, + "crossover": 17194, + "crossroads": 27427, + "crossword": 32945, + "crou": 31206, + "crouch": 36506, + "crow": 3138, + "crow": 16019, + "crowd": 12036, + "crowd": 4570, + "crowded": 20182, + "crowdfunding": 17971, + "crowds": 16092, + "crowe": 33560, + "crowley": 32287, + "crown": 22190, + "crown": 6902, + "crowned": 16109, + "crowns": 33229, + "crows": 27134, + "croy": 21676, + "croydon": 27116, + "crs": 28449, + "crt": 43877, + "cru": 1815, + "cru": 29788, + "cruci": 18499, + "crucial": 12396, + "crude": 20677, + "cruel": 16073, + "cruel": 17573, + "cruelty": 20675, + "cruis": 27721, + "cruise": 36425, + "cruise": 6764, + "cruiser": 21394, + "cruises": 19214, + "cruising": 19743, + "crum": 43268, + "crumb": 48327, + "crumb": 39909, + "crumble": 36595, + "crumbs": 35893, + "crun": 17407, + "crunch": 16620, + "crunchy": 31366, + "crusad": 19133, + "crusade": 36846, + "crusader": 40171, + "crusaders": 31319, + "crush": 22296, + "crush": 7610, + "crushed": 18270, + "crusher": 44923, + "crushes": 35844, + "crushing": 20790, + "crust": 23136, + "crusted": 37314, + "cruz": 33689, + "cruz": 8403, + "cry": 2837, + "cry": 6290, + "crying": 6828, + "cryo": 32215, + "cryp": 4865, + "crypt": 37814, + "cryptic": 46925, + "crypto": 8080, + "crypto": 9608, + "cryptocurrencies": 33329, + "cryptocurrency": 12070, + "cryst": 15891, + "crystal": 17387, + "crystal": 6517, + "crystalli": 47551, + "crystals": 18350, + "cs": 11978, + "cs": 2804, + "csa": 26355, + "csc": 41727, + "csc": 37266, + "csd": 36913, + "cse": 41659, + "csg": 47085, + "csgo": 28928, + "csi": 41750, + "csi": 28070, + "csk": 43036, + "csm": 40061, + "csn": 46329, + "cso": 43864, + "csp": 39243, + "csr": 32105, + "csr": 24598, + "csrracing": 44193, + "css": 41418, + "css": 19846, + "cst": 17016, + "csu": 35948, + "csu": 31261, + "csw": 41031, + "ct": 3381, + "ct": 1122, + "cta": 28397, + "ctar": 27842, + "ctc": 34123, + "cte": 31410, + "cted": 2910, + "ctf": 35250, + "cthulhu": 41064, + "cting": 7985, + "ction": 17578, + "ction": 1569, + "ctions": 7021, + "ctive": 9313, + "cto": 17445, + "ctor": 8108, + "ctr": 35602, + "ctr": 18481, + "cts": 6936, + "ctto": 25118, + "ctu": 20834, + "cture": 17668, + "ctv": 21213, + "ctv": 27590, + "cu": 729, + "cu": 11224, + "cuando": 40388, + "cub": 16938, + "cub": 19972, + "cuba": 11576, + "cuban": 15536, + "cube": 47753, + "cube": 11353, + "cubes": 31413, + "cubic": 48159, + "cubic": 29614, + "cubs": 9858, + "cuck": 26364, + "cuckoo": 38062, + "cucu": 16705, + "cucumber": 19787, + "cucumbers": 48065, + "cud": 42684, + "cudd": 12820, + "cuddle": 19568, + "cuddles": 24001, + "cuddling": 29696, + "cuddly": 36208, + "cudi": 48713, + "cue": 13424, + "cuer": 39506, + "cues": 35719, + "cuff": 34693, + "cuff": 22414, + "cufflinks": 43938, + "cuffs": 37221, + "cuis": 9938, + "cuisine": 10605, + "cuk": 34838, + "cul": 1877, + "cula": 35935, + "cular": 10940, + "culars": 45719, + "cule": 31066, + "cules": 18984, + "culin": 14772, + "culinary": 16466, + "cull": 21880, + "cull": 42061, + "cullen": 25973, + "culmin": 33778, + "culo": 36305, + "culprit": 41593, + "cult": 11965, + "cultiv": 16781, + "cultivate": 42983, + "cultivated": 48901, + "cultivation": 41539, + "cultur": 20780, + "cultural": 34908, + "cultural": 6753, + "culturally": 36783, + "culture": 20197, + "culture": 3673, + "cultured": 40176, + "cultures": 19552, + "culver": 42103, + "cum": 20142, + "cum": 27119, + "cumb": 10858, + "cumber": 15309, + "cumberbatch": 27541, + "cumberland": 28747, + "cumbri": 32010, + "cumbria": 17953, + "cumin": 42285, + "cumple": 47050, + "cumul": 42961, + "cumulative": 47610, + "cumulus": 46313, + "cun": 12423, + "cun": 29532, + "cunningham": 25321, + "cuomo": 25681, + "cup": 5059, + "cup": 1937, + "cupboard": 32074, + "cupcake": 17025, + "cupcakes": 12747, + "cupid": 34885, + "cuppa": 28077, + "cups": 11463, + "cur": 1092, + "cur": 33073, + "curated": 20341, + "curator": 20753, + "curb": 21931, + "curd": 38881, + "cure": 36758, + "cure": 9088, + "cured": 26248, + "cures": 38204, + "curfew": 48826, + "curi": 12640, + "curing": 44169, + "curiosity": 21583, + "curious": 9865, + "curl": 24306, + "curled": 43734, + "curling": 18543, + "curls": 24340, + "curly": 20795, + "curran": 40999, + "currant": 43501, + "curren": 6142, + "currencies": 23530, + "currency": 7853, + "current": 3653, + "currently": 3792, + "currents": 35450, + "curric": 16201, + "curriculum": 17947, + "currie": 39385, + "curry": 49285, + "curry": 8051, + "curse": 18479, + "cursed": 26408, + "cursor": 46546, + "curt": 38137, + "curtain": 17223, + "curtains": 30223, + "curti": 39925, + "curtis": 13808, + "curve": 15792, + "curved": 25789, + "curves": 22814, + "curvy": 45788, + "cus": 2736, + "cusa": 47414, + "cuse": 37950, + "cush": 43731, + "cushi": 15333, + "cushion": 20853, + "cushions": 34163, + "cussion": 16658, + "cussions": 46853, + "cust": 20900, + "custard": 26516, + "custo": 4376, + "custody": 16176, + "custom": 2662, + "custom": 4996, + "custome": 41323, + "customer": 24035, + "customer": 5102, + "customerexperience": 45167, + "customers": 5528, + "customerservice": 40611, + "customiz": 41793, + "customizable": 48253, + "customization": 48244, + "customize": 32179, + "customized": 23229, + "customs": 16880, + "cut": 10511, + "cut": 3032, + "cute": 16031, + "cute": 2242, + "cuteness": 19342, + "cuter": 27151, + "cutest": 8032, + "cuth": 44328, + "cutie": 10733, + "cuties": 40939, + "cuties": 23420, + "cutiesaturday": 41883, + "cutler": 40428, + "cutlery": 49073, + "cutout": 45016, + "cuts": 7435, + "cutt": 27338, + "cutt": 47647, + "cutter": 19719, + "cutters": 44783, + "cutting": 7266, + "cuz": 9215, + "cv": 13531, + "cv": 13947, + "cvs": 29603, + "cw": 10652, + "cw": 11065, + "cwc": 19179, + "cwgc": 48527, + "cws": 45186, + "cx": 44457, + "cx": 14283, + "cy": 1470, + "cy": 1678, + "cyber": 5830, + "cyber": 10210, + "cybercrime": 41772, + "cybermonday": 36578, + "cyberpunk": 36896, + "cybersecurity": 10581, + "cyborg": 36650, + "cycl": 9791, + "cycle": 19083, + "cycle": 5072, + "cycled": 31055, + "cycles": 14605, + "cycli": 12201, + "cycling": 26353, + "cycling": 6321, + "cyclist": 20686, + "cyclists": 20303, + "cyclo": 18122, + "cyclone": 48094, + "cyclone": 20917, + "cyclones": 34669, + "cylin": 18569, + "cylinder": 22092, + "cylinders": 48888, + "cymb": 36677, + "cymru": 24005, + "cyn": 14324, + "cynthi": 41994, + "cynthia": 23748, + "cyp": 14809, + "cypress": 25347, + "cypri": 36481, + "cyprus": 15263, + "cyril": 36028, + "cyrus": 14204, + "cystic": 46131, + "cyto": 31864, + "cz": 22898, + "cz": 22921, + "cze": 12152, + "czech": 43151, + "czech": 16141, + "cé": 36454, + "cé": 18317, + "d": 67, + "d": 323, + "da": 925, + "da": 1140, + "daa": 32642, + "daan": 44814, + "dab": 10413, + "dab": 22900, + "dac": 16222, + "dac": 27478, + "daca": 28477, + "dach": 34166, + "dachsh": 41641, + "dachshund": 42720, + "dad": 4346, + "dad": 2639, + "dada": 31325, + "daddy": 29466, + "daddy": 6546, + "dade": 23299, + "dades": 28289, + "dads": 12741, + "dae": 23358, + "dae": 15422, + "daener": 46934, + "daes": 47282, + "daesh": 35047, + "daf": 9972, + "daf": 36704, + "daffodils": 44769, + "daft": 36347, + "dag": 11434, + "dag": 25650, + "dagger": 34251, + "dah": 16976, + "dah": 11776, + "dahl": 45816, + "dahl": 22621, + "dahlia": 41768, + "dai": 13559, + "dai": 10632, + "dail": 14676, + "dailies": 21260, + "daily": 6689, + "daily": 2873, + "dailynews": 43466, + "dailys": 43160, + "dailysketch": 46738, + "daim": 40421, + "dain": 32222, + "dain": 28315, + "daipur": 47631, + "dair": 19998, + "dair": 42078, + "dairy": 25243, + "dairy": 10302, + "dairyfree": 49366, + "dais": 10502, + "daisi": 39947, + "daisies": 40654, + "daisy": 39310, + "daisy": 12865, + "dak": 6999, + "dak": 16095, + "dakar": 31137, + "dakota": 38522, + "dakota": 12358, + "dal": 2476, + "dal": 5601, + "dala": 42675, + "dalai": 41222, + "dalail": 35169, + "dalailama": 35849, + "dale": 11533, + "dale": 4677, + "dalejr": 38207, + "dales": 29031, + "daley": 28544, + "dalgo": 43614, + "dali": 36735, + "dali": 25703, + "dalit": 45432, + "dall": 43631, + "dalla": 16772, + "dallas": 27414, + "dallas": 5759, + "dallascowboys": 33016, + "dalmati": 44275, + "dalton": 21488, + "daly": 24873, + "dam": 1880, + "dam": 4926, + "damage": 6822, + "damaged": 13568, + "damages": 28842, + "damaging": 20610, + "damas": 23345, + "damascus": 25396, + "dame": 10069, + "dames": 44548, + "dami": 17783, + "damian": 43307, + "damian": 25375, + "damien": 25090, + "dammit": 31057, + "damn": 37409, + "damn": 4451, + "damned": 28428, + "damon": 48503, + "damon": 18244, + "damp": 26520, + "dams": 37680, + "dan": 2257, + "dan": 2284, + "dana": 44834, + "dana": 13777, + "danao": 38598, + "danc": 3945, + "dance": 10619, + "dance": 2724, + "danced": 32891, + "dancehall": 33300, + "dancer": 11400, + "dancers": 13153, + "dances": 24083, + "dancing": 33280, + "dancing": 6226, + "dand": 12593, + "dandelion": 38903, + "dandy": 31932, + "dane": 19330, + "danes": 47477, + "dang": 4283, + "dang": 14992, + "danger": 20083, + "danger": 11212, + "dangerous": 7350, + "dangerously": 35012, + "dangers": 23726, + "dangle": 39907, + "dani": 3001, + "dani": 17009, + "daniel": 7859, + "daniel": 4981, + "daniela": 44466, + "danielle": 30396, + "danielle": 15292, + "danielpadilla": 34702, + "daniels": 16146, + "danish": 15467, + "dank": 31849, + "dann": 11951, + "danny": 14950, + "danny": 7621, + "dano": 29703, + "dans": 16241, + "dant": 48097, + "dant": 28237, + "dante": 21911, + "danube": 44594, + "dany": 47816, + "dao": 36099, + "dap": 12149, + "dap": 38034, + "daph": 24591, + "daphne": 31687, + "dapl": 34478, + "dapp": 46857, + "dapper": 26071, + "daq": 25381, + "dar": 1377, + "dar": 6242, + "dara": 17064, + "darby": 34366, + "darcy": 32916, + "dare": 14833, + "dare": 9863, + "daredevil": 28849, + "dares": 42973, + "dareto": 46794, + "dari": 16292, + "dari": 14552, + "daria": 45622, + "daries": 18184, + "daring": 28166, + "dario": 33918, + "darius": 32606, + "darje": 49089, + "dark": 5724, + "dark": 3144, + "darker": 18737, + "darkest": 25898, + "darkness": 10521, + "darling": 13048, + "darlings": 39961, + "darlington": 34565, + "darn": 26059, + "darrell": 33522, + "darren": 20263, + "darren": 12275, + "darry": 29200, + "darryl": 35359, + "darshan": 34564, + "dart": 14001, + "dart": 19841, + "darth": 41304, + "darth": 23164, + "dartmoor": 31477, + "dartmouth": 29667, + "darts": 15246, + "darwin": 43013, + "darwin": 20926, + "daryl": 45607, + "daryl": 24532, + "das": 9940, + "das": 7359, + "dash": 13858, + "dash": 10206, + "dashboard": 27679, + "dashi": 12876, + "dashing": 33825, + "dat": 1717, + "dat": 9445, + "data": 14876, + "data": 2281, + "datab": 11941, + "database": 14678, + "databases": 48384, + "datac": 27329, + "datacenter": 40133, + "datasci": 14496, + "datascience": 15748, + "dataviz": 28138, + "date": 34300, + "date": 1524, + "dated": 13564, + "dates": 7228, + "dating": 8534, + "dation": 15311, + "datlantic": 34270, + "dato": 36075, + "dats": 48674, + "dau": 3162, + "dau": 33828, + "daugh": 42523, + "daughter": 3944, + "daughters": 13585, + "daun": 29470, + "dav": 3700, + "dav": 46488, + "davao": 31502, + "dave": 10089, + "dave": 5077, + "daven": 28350, + "davenport": 34624, + "davey": 33391, + "davi": 1732, + "david": 4640, + "david": 2259, + "davidbowie": 44448, + "davido": 35989, + "davids": 46695, + "davidson": 13166, + "davies": 13120, + "davin": 43187, + "davis": 24426, + "davis": 5536, + "davison": 43725, + "davos": 31887, + "davy": 41565, + "daw": 5971, + "daw": 24404, + "dawg": 18660, + "dawgs": 26431, + "dawn": 30590, + "dawn": 7689, + "dawson": 18611, + "dax": 29458, + "day": 1405, + "day": 575, + "daya": 38165, + "daybreak": 33862, + "daycare": 36363, + "daydream": 41587, + "dayin": 20332, + "daylight": 20809, + "dayo": 29856, + "dayo": 46605, + "dayof": 16272, + "dayofthe": 38043, + "days": 1161, + "daysof": 12379, + "daysofcode": 36537, + "daysto": 29886, + "daystogo": 42198, + "dayswild": 42052, + "daytime": 22830, + "dayton": 35729, + "dayton": 20262, + "daytona": 16335, + "dayweekend": 44526, + "dayz": 35949, + "daz": 15449, + "daz": 43844, + "daze": 33591, + "dazz": 17149, + "dazzle": 41164, + "dazzling": 28821, + "db": 19100, + "db": 8128, + "dbacks": 31175, + "dbs": 40558, + "dbz": 49226, + "dc": 5074, + "dc": 2743, + "dca": 49107, + "dcc": 33747, + "dccomics": 17610, + "dcfc": 35526, + "dci": 35336, + "dcs": 42878, + "dcu": 42647, + "dd": 1353, + "dd": 3766, + "dda": 35202, + "ddad": 39049, + "dday": 32689, + "dday": 26243, + "ddc": 48513, + "ddd": 24183, + "dddd": 35362, + "dden": 5013, + "dder": 9300, + "dders": 24827, + "ddi": 44450, + "ddin": 17175, + "dding": 48101, + "dding": 8974, + "ddings": 49106, + "ddington": 29238, + "ddle": 17633, + "ddle": 8357, + "ddled": 38392, + "ddles": 33901, + "ddleston": 25647, + "ddling": 30981, + "ddlovato": 28244, + "ddos": 46463, + "ddr": 26027, + "dds": 48334, + "ddu": 43836, + "ddy": 14981, + "ddy": 7876, + "de": 561, + "de": 654, + "dea": 18477, + "deacon": 29155, + "dead": 3906, + "dead": 2747, + "deadliest": 40811, + "deadline": 47209, + "deadline": 8458, + "deadlines": 44959, + "deadly": 10756, + "deadpool": 21471, + "deaf": 28229, + "deaf": 18358, + "deal": 7249, + "deal": 2696, + "dealer": 15218, + "dealers": 21697, + "dealership": 32096, + "dealing": 13138, + "deals": 4469, + "dealt": 30101, + "dean": 13807, + "dean": 5828, + "deandre": 43635, + "deans": 46852, + "dear": 15696, + "dear": 3817, + "dearest": 24880, + "dearly": 31880, + "deas": 34715, + "death": 7163, + "death": 2767, + "deaths": 12253, + "deau": 12399, + "deaux": 19883, + "deb": 2987, + "deb": 25687, + "debat": 32082, + "debate": 5196, + "debates": 19239, + "debating": 23472, + "debbie": 47186, + "debbie": 16735, + "debit": 32410, + "debor": 16738, + "deborah": 40997, + "deborah": 22150, + "debra": 33233, + "debris": 19208, + "debt": 8932, + "debts": 38770, + "debu": 9790, + "debun": 33123, + "debut": 42608, + "debut": 4085, + "debuted": 25215, + "debuting": 34817, + "debuts": 17044, + "dec": 3063, + "dec": 4628, + "deca": 33428, + "decad": 29914, + "decade": 11099, + "decadent": 41716, + "decades": 10488, + "decal": 26678, + "decals": 37606, + "decan": 40677, + "decat": 35334, + "decath": 47455, + "decatur": 38540, + "decay": 22703, + "dece": 3534, + "deceased": 30035, + "december": 3864, + "decent": 10698, + "decentr": 28960, + "decentralized": 38485, + "decep": 33529, + "deception": 33046, + "deci": 2262, + "decide": 8447, + "decided": 4939, + "decides": 17269, + "deciding": 22513, + "decision": 5575, + "decisions": 9903, + "decisive": 28690, + "deck": 24885, + "deck": 6943, + "decked": 39096, + "decker": 21449, + "decks": 23968, + "decl": 7091, + "decla": 10739, + "declan": 42341, + "declar": 18040, + "declaration": 19714, + "declare": 19856, + "declared": 13845, + "declares": 23641, + "declaring": 33273, + "decline": 15084, + "declined": 28911, + "declines": 40478, + "declining": 29221, + "deco": 26412, + "deco": 16422, + "decor": 5148, + "decor": 6928, + "decorate": 23651, + "decorated": 15917, + "decorating": 16968, + "decoration": 16029, + "decorations": 19158, + "decorative": 19289, + "decre": 12284, + "decrease": 24703, + "decreased": 33913, + "decreasing": 43763, + "decree": 43327, + "ded": 16744, + "ded": 1241, + "dedic": 4701, + "dedicate": 27610, + "dedicated": 6770, + "dedication": 10188, + "dedly": 36204, + "deduc": 22799, + "dee": 5268, + "dee": 6705, + "deed": 30260, + "deeds": 24516, + "deejay": 48304, + "deejay": 44511, + "deemed": 28102, + "deen": 26456, + "deen": 12912, + "deep": 5462, + "deep": 3383, + "deepak": 45528, + "deeper": 15224, + "deepest": 22245, + "deephouse": 35684, + "deepi": 19371, + "deepika": 34120, + "deepikap": 29903, + "deepikapadukone": 30646, + "deeplear": 22181, + "deeplearning": 24362, + "deeply": 11449, + "deer": 19454, + "deer": 8700, + "deere": 32901, + "dees": 12547, + "deets": 35537, + "def": 2044, + "def": 11649, + "defam": 35670, + "defamation": 42741, + "default": 21650, + "defe": 4148, + "defeat": 8477, + "defeated": 8927, + "defeating": 22594, + "defeats": 16317, + "defect": 44013, + "defects": 37485, + "defen": 3619, + "defence": 30307, + "defence": 9659, + "defend": 21970, + "defend": 11397, + "defended": 27161, + "defender": 10618, + "defenders": 20063, + "defending": 13098, + "defends": 20134, + "defense": 45875, + "defense": 6021, + "defenseman": 43714, + "defenses": 49198, + "defensive": 10824, + "defi": 17244, + "defiance": 36186, + "defiant": 47597, + "defibrill": 47684, + "defic": 18022, + "defici": 23387, + "deficiency": 30685, + "deficit": 20156, + "defin": 3188, + "define": 14919, + "defined": 15278, + "defines": 28218, + "defining": 20504, + "definite": 40793, + "definitely": 4824, + "definition": 11405, + "definitive": 25298, + "defl": 31467, + "deforestation": 41330, + "defstar": 36427, + "defy": 39148, + "defying": 38496, + "deg": 38498, + "degra": 28939, + "degradation": 44468, + "degre": 4653, + "degree": 7119, + "degrees": 8000, + "deh": 35582, + "dei": 33833, + "dei": 23279, + "deir": 42948, + "deity": 42574, + "deja": 46902, + "dek": 23901, + "dekalb": 37775, + "del": 1233, + "del": 2003, + "dela": 37986, + "delaney": 31528, + "delav": 23706, + "delavin": 40477, + "delavin": 40776, + "delavinkisses": 40631, + "delaware": 17547, + "delay": 12955, + "delay": 10934, + "delayed": 14567, + "delaying": 43781, + "delays": 11232, + "dele": 7922, + "dele": 33431, + "delec": 38615, + "delectable": 45500, + "deleg": 8046, + "delegate": 27259, + "delegates": 14623, + "delegation": 14632, + "delete": 19204, + "deleted": 16588, + "deleting": 41857, + "delft": 42749, + "delgado": 49182, + "delhi": 26723, + "delhi": 5717, + "deli": 1932, + "deli": 18601, + "delia": 33193, + "deliber": 18316, + "deliberate": 38271, + "deliberately": 35163, + "delic": 13366, + "delicacy": 49181, + "delicate": 18768, + "delici": 19993, + "delicious": 3959, + "deliciously": 39589, + "deliciousness": 42819, + "delight": 46165, + "delight": 13073, + "delighted": 5943, + "delightful": 15513, + "delights": 25330, + "deline": 18797, + "delines": 13562, + "delish": 25093, + "deliver": 19561, + "deliver": 7396, + "delivered": 7278, + "deliveries": 29336, + "delivering": 9943, + "delivers": 11753, + "delivery": 5619, + "dell": 24381, + "dell": 10242, + "della": 22986, + "delle": 35963, + "deloit": 29428, + "deloitte": 38667, + "dels": 48636, + "delta": 32250, + "delta": 8768, + "delu": 18779, + "delusional": 48059, + "delux": 13709, + "deluxe": 14056, + "delve": 46008, + "dely": 15040, + "dem": 3251, + "dem": 7825, + "dema": 40268, + "dema": 45046, + "deman": 48366, + "demand": 13072, + "demand": 5650, + "demanded": 33699, + "demanding": 17099, + "demands": 14241, + "demar": 46566, + "demarcus": 47873, + "demb": 35930, + "demdebate": 43973, + "deme": 25143, + "demean": 37376, + "demen": 12604, + "dementi": 46028, + "dementia": 14047, + "demetri": 39553, + "demi": 32879, + "demi": 14480, + "demise": 28756, + "demo": 2930, + "demo": 7380, + "democr": 3573, + "democracy": 7758, + "democrat": 15431, + "democratic": 9149, + "democrats": 8865, + "demographic": 31308, + "demol": 19382, + "demolished": 26537, + "demolition": 22237, + "demon": 5635, + "demon": 12085, + "demonetisation": 41338, + "demonic": 46920, + "demons": 18388, + "demonstr": 8579, + "demonstrate": 22231, + "demonstrated": 29477, + "demonstrates": 24806, + "demonstrating": 22107, + "demonstration": 16722, + "demonstrations": 33964, + "demonstrators": 46450, + "demos": 19304, + "demp": 22490, + "dempsey": 30188, + "dems": 10989, + "demsin": 42664, + "demsinphilly": 43091, + "den": 1177, + "den": 1181, + "dena": 32431, + "denali": 48076, + "dence": 3370, + "dency": 11659, + "dend": 37447, + "dends": 43985, + "dene": 45128, + "dened": 19571, + "deng": 43098, + "deng": 41788, + "dengue": 41932, + "denham": 39180, + "deni": 21995, + "denial": 25716, + "denied": 15780, + "denies": 19565, + "denim": 13606, + "denis": 47630, + "denis": 18750, + "denise": 45900, + "denise": 20899, + "denmark": 13268, + "dennis": 32738, + "dennis": 10534, + "denny": 26808, + "denomin": 41016, + "dens": 16533, + "dense": 19353, + "density": 22431, + "dent": 3593, + "dent": 1258, + "dental": 24635, + "dental": 8382, + "dentally": 10346, + "dented": 21923, + "denti": 4418, + "dential": 5459, + "dentist": 17816, + "dentistry": 25754, + "dently": 28817, + "denton": 23567, + "dents": 1517, + "denver": 27847, + "denver": 8569, + "deny": 18679, + "denying": 32771, + "denzel": 42503, + "deo": 26406, + "deo": 12121, + "deodor": 47639, + "deol": 41902, + "deon": 31466, + "deon": 16079, + "dep": 6079, + "dep": 24370, + "depar": 10794, + "depart": 5343, + "depart": 30649, + "departed": 32541, + "departing": 26902, + "department": 5744, + "departments": 29523, + "departs": 38998, + "departure": 17850, + "depe": 36118, + "depend": 13894, + "depend": 27371, + "dependence": 40243, + "dependent": 23280, + "depending": 23673, + "depends": 20497, + "depic": 11307, + "depicted": 34637, + "depicting": 24970, + "depiction": 31071, + "depicts": 29340, + "deple": 38504, + "deplo": 9356, + "deplor": 39232, + "deploy": 26944, + "deployed": 20009, + "deploying": 42212, + "deployment": 20183, + "depo": 14276, + "depor": 36110, + "deport": 23389, + "deportation": 36617, + "deported": 39320, + "deportes": 47878, + "depos": 21266, + "deposit": 16775, + "deposits": 30740, + "depot": 12589, + "depp": 24941, + "depre": 7107, + "depress": 38869, + "depressed": 23269, + "depressing": 29235, + "depression": 10023, + "depri": 28587, + "depriv": 45809, + "deprivation": 47810, + "deprived": 39140, + "dept": 9201, + "depth": 10350, + "depths": 28855, + "depu": 6912, + "deputies": 24914, + "deputy": 7932, + "der": 839, + "der": 801, + "dera": 20696, + "derail": 48502, + "derby": 13904, + "derby": 7177, + "derbyshire": 22147, + "derdale": 21513, + "dere": 5701, + "dere": 44194, + "dered": 3776, + "derek": 22461, + "derek": 11205, + "derel": 46728, + "derer": 11289, + "derers": 20882, + "deri": 34573, + "derick": 33908, + "dering": 6076, + "deriv": 33458, + "derived": 26461, + "derland": 35488, + "derman": 29740, + "dermatology": 48051, + "dern": 30086, + "dero": 37203, + "dero": 34026, + "derrick": 21798, + "derry": 45777, + "derry": 20535, + "ders": 37307, + "ders": 1923, + "derson": 12677, + "dery": 17172, + "des": 6797, + "des": 1437, + "desai": 35316, + "desc": 13866, + "descen": 32318, + "descend": 26004, + "descend": 46241, + "descendants": 36323, + "descending": 36620, + "descent": 19375, + "desch": 49209, + "descri": 4637, + "describe": 10967, + "described": 14671, + "describes": 13678, + "describing": 24239, + "descrip": 41832, + "description": 13951, + "descriptions": 40653, + "desde": 42218, + "dese": 27195, + "deser": 3659, + "desert": 45776, + "desert": 7301, + "deserted": 41560, + "deserve": 7043, + "deserved": 10061, + "deserves": 9079, + "deserving": 26615, + "desh": 25320, + "desh": 7448, + "deshi": 42769, + "desi": 6772, + "desi": 26635, + "desig": 1250, + "design": 8359, + "design": 1681, + "designated": 24119, + "designation": 41155, + "designed": 4486, + "designer": 35640, + "designer": 5728, + "designers": 12720, + "designing": 13467, + "designs": 6747, + "designthinking": 32450, + "desirable": 32368, + "desire": 11858, + "desired": 28631, + "desires": 27598, + "desk": 11937, + "desk": 6550, + "desks": 41014, + "desktop": 14345, + "desmond": 27821, + "desol": 41258, + "desp": 3642, + "despair": 28097, + "desper": 10144, + "desperate": 15072, + "desperately": 21993, + "despic": 32442, + "despicable": 37158, + "despite": 5325, + "dess": 7096, + "dess": 10001, + "dessert": 9753, + "desserts": 22948, + "desses": 43913, + "dest": 6540, + "dest": 4549, + "destin": 4934, + "destination": 32191, + "destination": 9179, + "destinations": 16981, + "destined": 28525, + "destiny": 39875, + "destiny": 10867, + "destro": 8287, + "destroy": 8308, + "destroy": 11930, + "destroyed": 9965, + "destroyer": 25291, + "destroying": 19613, + "destroys": 27634, + "destruc": 22945, + "destruction": 14281, + "destructive": 29591, + "det": 28966, + "det": 15366, + "deta": 1914, + "detached": 26252, + "detail": 7657, + "detailed": 12609, + "detailing": 23163, + "details": 2353, + "detained": 20260, + "dete": 5606, + "detec": 17991, + "detect": 22744, + "detected": 26988, + "detecting": 41290, + "detection": 16220, + "detective": 13672, + "detectives": 27994, + "detector": 27689, + "detectors": 45063, + "detention": 16908, + "deter": 10742, + "deter": 47458, + "detergent": 46726, + "deterior": 28512, + "determin": 8325, + "determination": 17410, + "determine": 16768, + "determined": 14371, + "determines": 42192, + "determining": 39884, + "deth": 38375, + "deto": 39710, + "deton": 39335, + "detour": 31211, + "detox": 22459, + "detri": 47951, + "detro": 6210, + "detroit": 19404, + "detroit": 7073, + "detta": 45438, + "dette": 35750, + "deu": 21457, + "deuce": 45332, + "deus": 37625, + "deut": 14970, + "deutsch": 30389, + "deutsche": 32760, + "deutschland": 36878, + "deux": 47089, + "dev": 2797, + "dev": 3670, + "deva": 45179, + "devan": 37072, + "devast": 12913, + "devastated": 29865, + "devastating": 19280, + "devastation": 42452, + "devel": 1820, + "develop": 1966, + "develop": 7708, + "developed": 8763, + "developer": 10929, + "developers": 13248, + "developing": 8131, + "development": 2855, + "developmental": 29347, + "developments": 17393, + "develops": 29895, + "deven": 45537, + "devgn": 29871, + "devi": 12926, + "devi": 20717, + "deviant": 25593, + "deviantart": 26046, + "device": 8163, + "devices": 9067, + "devil": 8894, + "devil": 8043, + "deville": 34329, + "devils": 11683, + "devin": 31193, + "devin": 20996, + "devine": 33019, + "devlin": 48040, + "devo": 11861, + "devo": 43444, + "devon": 16205, + "devon": 10046, + "devops": 21504, + "devos": 40646, + "devote": 37777, + "devoted": 24561, + "devotees": 39759, + "devotion": 25821, + "devotional": 35456, + "devs": 27374, + "dew": 31952, + "dew": 16358, + "dewey": 40399, + "dex": 10030, + "dex": 13790, + "dexpo": 42502, + "dexter": 45049, + "dexter": 22781, + "dey": 11829, + "dez": 23190, + "dez": 8122, + "df": 12908, + "df": 10468, + "dfc": 41903, + "dfs": 32880, + "dfw": 20439, + "dg": 2394, + "dg": 9742, + "dgate": 41684, + "dge": 4016, + "dge": 1360, + "dged": 11830, + "dgeon": 45655, + "dgers": 8733, + "dges": 5432, + "dging": 9565, + "dh": 6669, + "dh": 9960, + "dha": 11629, + "dha": 27377, + "dhabi": 22349, + "dhaka": 32877, + "dham": 29635, + "dham": 30838, + "dhan": 12542, + "dhan": 28569, + "dhanush": 26162, + "dhanush": 36200, + "dhanushkraja": 29266, + "dhar": 12397, + "dharma": 30536, + "dhary": 28706, + "dhawan": 44699, + "dhe": 29706, + "dheim": 44280, + "dhi": 31553, + "dhi": 26166, + "dho": 37834, + "dhoni": 25698, + "dhru": 40257, + "dhry": 39960, + "dhs": 26849, + "dhu": 32387, + "di": 570, + "di": 1618, + "dia": 7351, + "dia": 3357, + "diab": 15954, + "diabe": 19167, + "diabete": 43826, + "diabetes": 10319, + "diabetic": 30230, + "diablo": 23931, + "diag": 6851, + "diagno": 7736, + "diagnose": 44429, + "diagnosed": 16979, + "diagnosis": 15715, + "diagnostic": 26351, + "diagnostics": 37723, + "diagram": 22697, + "dial": 18416, + "dial": 11381, + "dialo": 30709, + "dialog": 48945, + "dialogue": 11288, + "dialogues": 40330, + "dialysis": 44798, + "diam": 4347, + "diameter": 27189, + "diamon": 8873, + "diamond": 18535, + "diamond": 6235, + "diamonds": 12687, + "dian": 16021, + "dian": 4998, + "diana": 12803, + "diane": 15855, + "dianne": 42299, + "dians": 21041, + "diaper": 34382, + "diapers": 39659, + "diar": 25932, + "diaries": 15541, + "diary": 10380, + "dias": 22137, + "dias": 29354, + "diaspora": 28390, + "diaz": 17688, + "dic": 1404, + "dic": 6717, + "dicap": 30023, + "dicaprio": 30755, + "dice": 14406, + "dick": 14413, + "dick": 9554, + "dickens": 33421, + "dict": 45360, + "dict": 15159, + "dictat": 26156, + "dictator": 27399, + "dictatorship": 37989, + "dictionary": 19699, + "did": 1861, + "did": 1335, + "diddy": 33527, + "didi": 34396, + "didier": 45614, + "didn": 2376, + "didnt": 13057, + "dido": 31725, + "didyou": 12295, + "didyouknow": 12506, + "die": 3150, + "die": 2082, + "diec": 27729, + "diecast": 37936, + "died": 3622, + "diego": 30940, + "diego": 6306, + "diem": 45571, + "dience": 33686, + "dient": 27231, + "dier": 29702, + "dier": 16394, + "dies": 20104, + "dies": 1862, + "diesel": 46312, + "diesel": 10591, + "diest": 45739, + "diet": 21295, + "diet": 6582, + "dietary": 29009, + "dietrich": 47005, + "diets": 35173, + "dif": 18656, + "dif": 48731, + "diff": 44073, + "diff": 20331, + "diffe": 1967, + "differ": 34620, + "differen": 14903, + "difference": 4731, + "differences": 14003, + "different": 2731, + "differenti": 21729, + "differential": 34027, + "differentiate": 49032, + "differently": 18325, + "diffic": 6140, + "difficult": 7405, + "difficulties": 23468, + "difficulty": 25245, + "diffu": 31603, + "diffuser": 49400, + "dig": 1831, + "dig": 9887, + "dige": 17820, + "digest": 20413, + "digestion": 40533, + "digestive": 32304, + "digg": 43240, + "digger": 35919, + "diggin": 48466, + "digging": 14971, + "digi": 15627, + "digi": 39361, + "digimon": 44181, + "digit": 14899, + "digit": 27472, + "digital": 4704, + "digital": 2794, + "digitalart": 16987, + "digitalhealth": 32190, + "digitalindia": 46630, + "digitally": 27543, + "digitalmarketing": 15299, + "digitaltransformation": 20047, + "digiti": 25935, + "digits": 31710, + "digni": 45532, + "dignit": 39497, + "dignity": 17744, + "digo": 35701, + "digs": 26877, + "dih": 43089, + "dii": 32755, + "dijk": 44444, + "dik": 38854, + "dik": 37747, + "dike": 42683, + "dil": 7643, + "dil": 17942, + "dile": 25428, + "dilemma": 29787, + "dilig": 30664, + "dill": 12318, + "dill": 27206, + "dillon": 21056, + "dilu": 45242, + "dim": 19576, + "dim": 17523, + "dime": 24443, + "dimen": 10935, + "dimension": 20479, + "dimensional": 25252, + "dimensions": 25086, + "diment": 43500, + "dimes": 44888, + "dimini": 37459, + "dimit": 22250, + "dimitri": 48840, + "dimp": 38853, + "din": 1462, + "din": 5673, + "dina": 36815, + "dinah": 30903, + "dine": 20951, + "dine": 12989, + "diner": 16963, + "dinesh": 48341, + "ding": 7545, + "ding": 796, + "dinger": 45580, + "dingh": 48064, + "dings": 5473, + "dington": 24804, + "dinho": 47370, + "dini": 20196, + "dining": 8658, + "dinner": 27548, + "dinner": 2571, + "dinners": 33570, + "dino": 9692, + "dino": 14077, + "dinosa": 18955, + "dinosaur": 15095, + "dinosaurs": 20387, + "dio": 3779, + "dio": 1521, + "dioce": 20763, + "diocese": 27091, + "dion": 42899, + "dion": 16250, + "dior": 23655, + "dios": 37563, + "dious": 27417, + "dioxide": 38102, + "dip": 19918, + "dip": 11343, + "dipl": 8490, + "diplo": 38115, + "diplom": 11169, + "diploma": 21251, + "diplomacy": 23798, + "diplomat": 32828, + "diplomatic": 23782, + "diplomats": 44126, + "dipped": 30610, + "dipper": 49317, + "dipping": 33544, + "dips": 37522, + "dir": 4251, + "dir": 8478, + "dire": 38355, + "dire": 25664, + "direc": 1534, + "direct": 43224, + "direct": 6016, + "directed": 8392, + "directing": 21817, + "direction": 15923, + "direction": 5407, + "directional": 38687, + "directioner": 48042, + "directioners": 22055, + "directions": 16440, + "directive": 40630, + "directly": 9701, + "director": 20337, + "director": 2681, + "directorial": 45327, + "directors": 11940, + "directory": 25272, + "directs": 34349, + "directv": 48652, + "dirk": 28171, + "dirt": 31415, + "dirt": 11795, + "dirty": 20127, + "dirty": 7615, + "dis": 1518, + "dis": 6112, + "disa": 3882, + "disab": 47380, + "disabilities": 17350, + "disability": 48986, + "disability": 13261, + "disabled": 13613, + "disadvantaged": 40577, + "disagree": 23199, + "disapp": 5384, + "disappear": 21148, + "disappear": 25173, + "disappearance": 35929, + "disappeared": 23139, + "disappearing": 35819, + "disappears": 44406, + "disappo": 7605, + "disappoint": 25446, + "disappointed": 13794, + "disappointing": 21941, + "disappointment": 23884, + "disappoints": 48545, + "disappro": 48276, + "disar": 42971, + "disaster": 9072, + "disasters": 26976, + "disastrous": 35790, + "disc": 1472, + "disc": 10712, + "discar": 40532, + "discarded": 45197, + "discer": 49140, + "dischar": 22671, + "discharge": 32485, + "disci": 9559, + "discip": 38951, + "discipl": 10467, + "disciples": 39366, + "disciplinary": 20232, + "discipline": 18903, + "disciplines": 42032, + "discla": 40248, + "disclaimer": 46465, + "disclo": 17481, + "disclose": 46379, + "disclosed": 30905, + "disclosure": 26502, + "disco": 2475, + "disco": 11964, + "discography": 47545, + "discomfort": 48054, + "discord": 23582, + "discoun": 18515, + "discount": 7638, + "discounted": 20993, + "discounts": 18186, + "discoura": 45850, + "discourse": 29441, + "discover": 10539, + "discover": 4834, + "discovered": 6986, + "discoveries": 29308, + "discovering": 17967, + "discovers": 29719, + "discovery": 40491, + "discovery": 8027, + "discre": 20616, + "discrimin": 11721, + "discrimination": 14775, + "discs": 29270, + "discu": 1984, + "discus": 41828, + "discuss": 4312, + "discussed": 11300, + "discusses": 8116, + "discussing": 5900, + "discussion": 5060, + "discussions": 13806, + "dise": 4262, + "disease": 5336, + "diseases": 12035, + "disen": 46468, + "disgrace": 29877, + "disgraceful": 44146, + "disgu": 9793, + "disguise": 27803, + "disguised": 37149, + "disgusted": 41977, + "disgusting": 16218, + "dish": 11039, + "dish": 4531, + "disha": 42498, + "dishes": 11412, + "dishon": 30777, + "dishu": 44728, + "dishwasher": 40524, + "disin": 19484, + "disinfe": 48050, + "disintegr": 49275, + "disk": 17970, + "dislike": 30796, + "dism": 30836, + "dism": 38821, + "dismant": 36557, + "dismiss": 43287, + "dismissal": 42068, + "dismissed": 30087, + "dismisses": 45238, + "disney": 6729, + "disney": 4696, + "disneyland": 39481, + "disneyland": 13661, + "disneyworld": 28469, + "diso": 26305, + "disobe": 42841, + "dison": 19310, + "disorder": 12635, + "disorders": 17114, + "disp": 11073, + "dispar": 24633, + "disparities": 45122, + "dispat": 28652, + "dispatch": 26306, + "dispen": 19077, + "dispenser": 40116, + "disper": 34499, + "displa": 9326, + "displac": 17718, + "displaced": 22817, + "displacement": 37931, + "display": 4456, + "displayed": 18967, + "displaying": 26468, + "displays": 15648, + "dispo": 13651, + "dispon": 38872, + "disponible": 46130, + "dispos": 45177, + "disposable": 37275, + "disposal": 28231, + "dispro": 32927, + "dispropor": 40354, + "disproportion": 45492, + "disregard": 43869, + "disrespect": 34055, + "disrespectful": 41723, + "disru": 13763, + "disrup": 14641, + "disrupt": 25214, + "disrupted": 46674, + "disrupting": 42419, + "disruption": 19635, + "disruptive": 31554, + "diss": 10766, + "diss": 35688, + "dissec": 43879, + "dissemin": 40463, + "dissent": 45154, + "disser": 25560, + "dissertation": 29448, + "dissi": 25088, + "dissol": 27398, + "dissuper": 33461, + "dist": 5479, + "dist": 12116, + "distance": 7964, + "distances": 37078, + "distant": 18949, + "distill": 41586, + "distilled": 49179, + "distillery": 22200, + "distin": 11892, + "distinct": 25056, + "distinction": 28183, + "distinctive": 25486, + "distingui": 15053, + "distinguish": 45418, + "distinguished": 16513, + "distor": 23781, + "distortion": 43690, + "distr": 11885, + "distract": 39309, + "distracted": 24049, + "distraction": 32039, + "distress": 26866, + "distressed": 37515, + "distri": 5987, + "distribu": 6138, + "distribute": 32313, + "distributed": 16419, + "distributing": 35216, + "distribution": 10484, + "distributor": 28354, + "distributors": 44240, + "distric": 3208, + "district": 46683, + "district": 3506, + "districts": 17565, + "distur": 11732, + "disturb": 33018, + "disturb": 39449, + "disturbance": 42416, + "disturbed": 29967, + "disturbing": 21476, + "disupdates": 45667, + "dit": 5752, + "dit": 2524, + "dita": 47965, + "ditch": 43715, + "ditch": 19291, + "dited": 40392, + "diti": 2363, + "dition": 16452, + "dition": 3015, + "ditional": 4322, + "ditions": 4503, + "dito": 43705, + "dits": 49374, + "dity": 16436, + "dium": 2903, + "div": 5293, + "div": 14869, + "diva": 13605, + "divas": 23534, + "dive": 26042, + "dive": 9058, + "diver": 13119, + "diver": 22094, + "divergence": 48735, + "divergent": 36132, + "divers": 30241, + "divers": 27038, + "diverse": 11464, + "diversi": 24475, + "diversion": 38457, + "diversity": 35634, + "diversity": 6257, + "diverted": 41049, + "dives": 13893, + "divi": 8375, + "divid": 31337, + "divide": 18842, + "divided": 18689, + "dividend": 32067, + "dividends": 45146, + "dividing": 45605, + "divin": 21838, + "divine": 46919, + "divine": 10976, + "diving": 9886, + "divinity": 39754, + "divisi": 39196, + "division": 5378, + "divisional": 40912, + "divisions": 33715, + "divor": 13543, + "divorce": 17060, + "divorced": 39437, + "divya": 47767, + "diwali": 18218, + "dix": 45838, + "dix": 27620, + "dixie": 24484, + "dixit": 28279, + "dixon": 16086, + "diy": 28472, + "diy": 7845, + "diya": 36459, + "diz": 32740, + "dized": 36232, + "dizz": 40239, + "dizzy": 35464, + "dj": 3761, + "dj": 3723, + "djan": 35338, + "django": 46498, + "dji": 35284, + "dji": 28379, + "djing": 36113, + "djo": 19432, + "djoker": 42721, + "djokernole": 42830, + "djokovic": 27944, + "djs": 18117, + "dk": 20702, + "dk": 16196, + "dl": 12558, + "dl": 9373, + "dlc": 19079, + "dle": 11057, + "dle": 3287, + "dled": 23494, + "dler": 40279, + "dles": 7890, + "dless": 14997, + "dley": 12808, + "dling": 18221, + "dly": 3069, + "dm": 19070, + "dm": 4667, + "dma": 42903, + "dman": 18826, + "dmc": 28991, + "dmit": 31607, + "dmitry": 48326, + "dms": 19955, + "dmv": 27508, + "dmx": 45255, + "dn": 11552, + "dn": 7459, + "dna": 8790, + "dnb": 35422, + "dnc": 20237, + "dnd": 11678, + "dnr": 37051, + "dns": 39245, + "dnt": 26795, + "do": 639, + "do": 818, + "doa": 48332, + "dob": 29640, + "doba": 35605, + "dobbs": 43006, + "dobson": 46888, + "doc": 3009, + "doc": 7251, + "doch": 25101, + "dock": 17311, + "dock": 8997, + "docked": 46784, + "docker": 31152, + "docking": 40845, + "docks": 24091, + "docs": 15157, + "doctor": 7872, + "doctor": 5547, + "doctoral": 23649, + "doctorate": 39134, + "doctors": 9705, + "doctorwho": 12996, + "doctr": 28497, + "doctrine": 35612, + "docu": 4433, + "document": 29293, + "document": 15121, + "documentaries": 44209, + "documentary": 7881, + "documentation": 31560, + "documented": 22310, + "documenting": 37876, + "documents": 14105, + "dod": 13847, + "dod": 30187, + "dodd": 36748, + "dodge": 31263, + "dodge": 12093, + "dodgeball": 43244, + "dodger": 31641, + "dodgers": 12422, + "dodgy": 37727, + "doe": 13296, + "does": 2397, + "does": 1897, + "doesn": 2503, + "doesnt": 17937, + "dof": 8277, + "doff": 20193, + "dofficial": 42516, + "dog": 4326, + "dog": 1929, + "dogcelebration": 41819, + "dogday": 27475, + "doge": 42187, + "dogg": 20749, + "doggie": 32237, + "doggo": 42155, + "doggy": 26359, + "doglo": 40733, + "dogre": 40030, + "dogrescue": 44158, + "dogs": 42182, + "dogs": 3255, + "dogsoftwitter": 19415, + "doh": 23581, + "doha": 20908, + "doherty": 31774, + "doi": 36361, + "doin": 15412, + "doing": 37408, + "doing": 1960, + "doit": 32272, + "doit": 28109, + "doj": 25700, + "dojo": 35901, + "dok": 40547, + "dok": 41034, + "doka": 46528, + "dol": 2287, + "dol": 19170, + "dola": 38005, + "dolan": 27200, + "dolby": 42414, + "dolce": 30033, + "dolce": 30661, + "dole": 41040, + "doll": 27031, + "doll": 9286, + "dollar": 35092, + "dollar": 7474, + "dollars": 10669, + "dolls": 15090, + "dolly": 43281, + "dolly": 23821, + "dolom": 37137, + "dolores": 40741, + "dolph": 8900, + "dolph": 22257, + "dolphin": 42963, + "dolphin": 16464, + "dolphins": 14002, + "dom": 2164, + "dom": 1919, + "domain": 15492, + "domaine": 48744, + "domains": 36358, + "dome": 8515, + "dome": 9827, + "domen": 37584, + "domest": 21936, + "domestic": 28189, + "domestic": 9043, + "domin": 4361, + "dominance": 30546, + "dominant": 20565, + "dominate": 21431, + "dominated": 23048, + "dominates": 34043, + "dominating": 29303, + "domination": 30919, + "domingo": 24882, + "dominic": 39007, + "dominic": 19095, + "dominican": 22934, + "dominion": 27155, + "domino": 30752, + "dominos": 39770, + "domo": 44293, + "doms": 30126, + "don": 1067, + "don": 847, + "dona": 26789, + "donal": 42375, + "donald": 5990, + "donald": 4335, + "donaldson": 37783, + "donaldtrump": 6652, + "donat": 36384, + "donate": 6429, + "donated": 8705, + "donates": 26960, + "donating": 12621, + "donation": 7924, + "donations": 9928, + "doncaster": 38008, + "doncaster": 25352, + "doncasterisgreat": 47333, + "done": 5136, + "done": 1700, + "donegal": 24172, + "donesia": 41281, + "donet": 33724, + "donetsk": 33999, + "dong": 26242, + "dong": 31478, + "dongha": 28365, + "donghae": 28945, + "donia": 24014, + "donkey": 21415, + "donkeys": 44644, + "donna": 9158, + "donne": 30897, + "donnein": 38308, + "donneinarte": 40193, + "donnell": 35118, + "donnelly": 39070, + "donnie": 47058, + "donnie": 30609, + "donny": 37291, + "donny": 32887, + "dono": 14840, + "donor": 18013, + "donors": 17887, + "donovan": 21499, + "dons": 22127, + "dont": 8094, + "dont": 4632, + "donut": 18471, + "donuts": 13970, + "doo": 4543, + "doo": 11643, + "doodle": 9388, + "doodled": 41030, + "doodles": 22156, + "doodling": 37548, + "dooley": 47609, + "doom": 23263, + "doom": 14344, + "doomed": 33251, + "doomsday": 41791, + "doon": 36612, + "doop": 33886, + "door": 7188, + "door": 2489, + "doors": 4228, + "doorstep": 19533, + "doorway": 46575, + "dop": 42381, + "dop": 31722, + "dope": 42587, + "dope": 10094, + "doping": 30285, + "dopp": 21774, + "doppelg": 45216, + "doppler": 42540, + "dor": 2766, + "dor": 8695, + "dora": 18104, + "dorado": 32350, + "dorchester": 32656, + "dore": 39423, + "dores": 34323, + "dorf": 17296, + "dori": 49270, + "doria": 43186, + "dorian": 44016, + "doris": 24285, + "dork": 36206, + "dorm": 24263, + "doro": 15498, + "doro": 37389, + "dorothy": 20805, + "dors": 31240, + "dorset": 42109, + "dorset": 16047, + "dorsey": 41607, + "dortmund": 24290, + "dory": 36135, + "dos": 44258, + "dos": 5474, + "dose": 11497, + "doses": 37873, + "dossier": 46042, + "dost": 44222, + "dot": 7473, + "dot": 7004, + "dota": 23085, + "dotcom": 12443, + "dote": 31202, + "dothis": 47864, + "dotnet": 43124, + "dotorg": 46587, + "dots": 19019, + "dotted": 47950, + "dou": 1756, + "dou": 23608, + "doub": 19631, + "double": 13013, + "double": 3200, + "doubled": 24948, + "doubleheader": 34668, + "doubles": 12539, + "doubling": 36850, + "doubt": 37071, + "doubt": 8671, + "doubts": 30894, + "douche": 44292, + "doug": 20271, + "doug": 10758, + "dough": 15785, + "dough": 14983, + "doughnut": 32555, + "doughnuts": 31124, + "dougie": 46317, + "dougla": 9140, + "douglas": 10065, + "douglass": 45692, + "doun": 44785, + "dov": 38856, + "dova": 26551, + "dove": 27511, + "dove": 18281, + "dover": 43019, + "dover": 14683, + "doves": 47067, + "dow": 8022, + "dow": 10688, + "dowell": 27344, + "down": 1833, + "down": 1136, + "downe": 46501, + "downed": 35814, + "downer": 42522, + "downers": 43739, + "downey": 29429, + "downfall": 48702, + "downhill": 27387, + "downing": 28140, + "download": 35076, + "download": 3794, + "downloadable": 49105, + "downloaded": 22961, + "downloading": 30519, + "downloads": 26481, + "downpour": 39034, + "downpours": 40160, + "downs": 10706, + "downside": 41937, + "downstairs": 28174, + "downstream": 43822, + "downtime": 41964, + "downton": 45023, + "downton": 42668, + "downtown": 18230, + "downtown": 5061, + "downward": 37430, + "dowski": 43556, + "dox": 44786, + "dox": 14510, + "doyle": 17728, + "doyou": 27256, + "doz": 31106, + "dozen": 16401, + "dozens": 17883, + "dp": 23820, + "dp": 6465, + "dprint": 46644, + "dprinting": 16194, + "dprk": 47920, + "dps": 34288, + "dq": 28741, + "dr": 1084, + "dr": 1701, + "dra": 1114, + "dra": 7402, + "drac": 20168, + "dracing": 41253, + "dracula": 25405, + "draf": 37426, + "draft": 30624, + "draft": 5198, + "drafted": 19129, + "drafting": 33528, + "drafts": 29194, + "drag": 8452, + "drag": 12463, + "dragged": 27884, + "dragging": 37069, + "dragon": 9187, + "dragon": 5471, + "dragonball": 40959, + "dragoncon": 47802, + "dragonfly": 32824, + "dragons": 10203, + "dragrace": 40762, + "drags": 45368, + "drain": 23347, + "drain": 19467, + "drainage": 25953, + "drained": 44630, + "drains": 43638, + "drainthe": 47337, + "drake": 32504, + "drake": 8958, + "dral": 7503, + "dram": 6937, + "dram": 32170, + "drama": 5055, + "dramas": 33467, + "dramati": 43512, + "dramatic": 11240, + "dramatically": 24495, + "drank": 21712, + "draped": 49113, + "drastic": 43159, + "drastically": 35478, + "drau": 18621, + "draw": 17675, + "draw": 4001, + "drawer": 23219, + "drawers": 38975, + "drawing": 36996, + "drawing": 3610, + "drawings": 13397, + "drawn": 8893, + "draws": 12043, + "dray": 25562, + "drayton": 49044, + "drc": 21434, + "dre": 960, + "dre": 14584, + "dread": 17412, + "dread": 31403, + "dreaded": 47227, + "dreadful": 35846, + "dreality": 48367, + "dream": 4595, + "dream": 2984, + "dreambig": 46495, + "dreamcast": 47226, + "dreamed": 27984, + "dreamer": 25692, + "dreamers": 27194, + "dreaming": 11662, + "dreamliner": 49143, + "dreams": 4405, + "dreamt": 43743, + "dreamteam": 40090, + "dreamy": 23517, + "dred": 10903, + "dredge": 48783, + "dren": 29068, + "dren": 47309, + "drenched": 46378, + "dres": 48852, + "dres": 44697, + "dresden": 34836, + "dress": 12622, + "dress": 2595, + "dressage": 36144, + "dressed": 6559, + "dresser": 26346, + "dresses": 8184, + "dressing": 6348, + "drew": 18792, + "drew": 5281, + "drex": 33985, + "drey": 48271, + "dri": 1203, + "dri": 28833, + "drian": 36870, + "dribb": 42153, + "dric": 23448, + "dridge": 22956, + "drie": 40170, + "dried": 16037, + "drier": 39877, + "dries": 33857, + "drif": 33585, + "drift": 18194, + "drifting": 30276, + "drill": 11626, + "drilled": 46338, + "drilling": 18634, + "drills": 24378, + "drin": 3375, + "drin": 47133, + "drink": 14131, + "drink": 3979, + "drinking": 5778, + "drinklocal": 45998, + "drinks": 6732, + "drip": 24050, + "dripping": 38787, + "dris": 35804, + "drive": 11402, + "drive": 2620, + "driven": 9314, + "driver": 27563, + "driver": 4383, + "driverless": 46769, + "drivers": 7384, + "drives": 11441, + "driveway": 26273, + "driving": 37800, + "driving": 4161, + "drizzle": 28240, + "drm": 39674, + "dro": 1494, + "dro": 12442, + "drogba": 49199, + "droid": 38016, + "drome": 9157, + "dron": 43898, + "dron": 23360, + "drone": 33557, + "drone": 9397, + "drones": 14006, + "droo": 30715, + "drool": 41554, + "drooling": 44360, + "drop": 16407, + "drop": 3387, + "dropbox": 47216, + "dropped": 6792, + "dropping": 8339, + "drops": 6437, + "dros": 47033, + "drou": 38558, + "drought": 13935, + "drove": 13753, + "drow": 21159, + "drown": 28571, + "drowned": 34005, + "drowning": 24618, + "drs": 21257, + "dru": 2275, + "dru": 49048, + "drug": 20601, + "drug": 5600, + "drugs": 8021, + "druid": 40297, + "drum": 13353, + "drum": 8698, + "drummer": 13618, + "drummers": 46191, + "drumming": 35480, + "drummond": 42213, + "drums": 11690, + "drun": 15488, + "drunk": 37398, + "drunk": 8232, + "drunken": 28196, + "drupal": 46481, + "drush": 43009, + "drwho": 48342, + "dry": 13544, + "dry": 4501, + "dryer": 24425, + "drying": 23203, + "ds": 3361, + "ds": 646, + "dsa": 47607, + "dsb": 47168, + "dsb": 14257, + "dsburg": 47237, + "dsc": 37240, + "dsd": 45383, + "dsley": 40740, + "dslr": 33740, + "dsm": 39502, + "dson": 40310, + "dsp": 45291, + "dss": 41580, + "dstv": 35027, + "dt": 13104, + "dt": 7427, + "dthe": 13863, + "dtla": 31885, + "dtm": 42407, + "dts": 46233, + "du": 691, + "du": 3686, + "dua": 25244, + "dual": 39739, + "dual": 5347, + "duane": 38946, + "dub": 14526, + "dub": 13144, + "duba": 5485, + "dubai": 32599, + "dubai": 5985, + "dubbed": 27740, + "dublin": 20707, + "dublin": 6145, + "dubnation": 47329, + "dubois": 48046, + "dubrov": 46709, + "dubrovnik": 48724, + "dubs": 27013, + "dubstep": 38303, + "dubu": 43257, + "duc": 979, + "duc": 36446, + "ducati": 28570, + "ducation": 17197, + "duce": 3660, + "duchess": 21713, + "duck": 12708, + "duck": 6910, + "ducks": 11202, + "duct": 26829, + "dude": 48087, + "dude": 5710, + "dudes": 14449, + "dudley": 27324, + "due": 2887, + "duel": 27143, + "dues": 37646, + "duet": 25457, + "duf": 38713, + "duff": 38071, + "duff": 21934, + "duffy": 23599, + "dug": 22743, + "dug": 21000, + "dugg": 40523, + "duggan": 46169, + "dugout": 36831, + "duh": 26716, + "dui": 29693, + "duk": 14160, + "duke": 18402, + "duke": 7732, + "dukes": 27914, + "dul": 6738, + "dulce": 44872, + "dulil": 32565, + "dulkar": 47980, + "dull": 19433, + "dulu": 28865, + "duluth": 32109, + "dulwich": 47343, + "dum": 13400, + "dum": 11564, + "dumb": 15901, + "dumb": 12464, + "dumbass": 38980, + "dummies": 40899, + "dummy": 34246, + "dump": 12655, + "dump": 17146, + "dumped": 23768, + "dumping": 31707, + "dumplings": 35495, + "dumps": 45804, + "dumpster": 45467, + "dun": 2616, + "dun": 18284, + "dunbar": 41453, + "duncan": 31084, + "duncan": 13502, + "dundal": 38185, + "dundas": 39300, + "dundee": 18619, + "dune": 32833, + "dune": 28208, + "dunedin": 40121, + "dunes": 23526, + "dung": 33712, + "dungeon": 28812, + "dungeon": 22931, + "dungeons": 42572, + "dungeonsand": 34970, + "dungeonsanddragons": 35497, + "dunham": 42501, + "duni": 43454, + "dunk": 17222, + "dunkin": 48022, + "dunkin": 36415, + "dunkirk": 46928, + "dunks": 48977, + "dunlop": 34753, + "dunn": 19185, + "dunne": 38538, + "dunno": 24502, + "duo": 8696, + "dup": 36805, + "dup": 10445, + "duper": 44850, + "duplex": 41186, + "duplic": 28992, + "dupont": 35994, + "dur": 4355, + "dur": 23230, + "dura": 28173, + "dura": 47382, + "durability": 43671, + "durable": 22285, + "duran": 28185, + "durango": 44443, + "durant": 24861, + "duras": 27518, + "duration": 31663, + "durban": 24474, + "dure": 19108, + "durga": 38456, + "durham": 26765, + "durham": 14335, + "during": 1590, + "dus": 9931, + "dusa": 28546, + "dusk": 19708, + "dust": 29723, + "dust": 8349, + "dusted": 38274, + "duster": 46280, + "dustin": 42423, + "dustin": 21235, + "dusting": 41756, + "dusty": 22029, + "dut": 32625, + "dutch": 22277, + "dutch": 7991, + "duter": 21624, + "duterte": 22371, + "duties": 19603, + "dutt": 30081, + "dutton": 42771, + "duty": 6458, + "duval": 42459, + "duvet": 48006, + "dux": 28562, + "dv": 4288, + "dv": 26265, + "dvd": 7170, + "dvds": 36655, + "dvn": 29811, + "dvr": 29210, + "dw": 8455, + "dw": 19997, + "dwar": 13487, + "dwarf": 22643, + "dwayne": 31395, + "dwell": 27549, + "dwell": 18755, + "dwelling": 37098, + "dwight": 22473, + "dwp": 46976, + "dwts": 30220, + "dwyer": 43878, + "dx": 22717, + "dx": 15679, + "dy": 1444, + "dy": 907, + "dyce": 48325, + "dye": 37159, + "dye": 15997, + "dyed": 24906, + "dyer": 29495, + "dyes": 39874, + "dying": 5115, + "dyk": 12142, + "dyke": 32632, + "dylan": 21004, + "dylan": 9900, + "dyn": 44289, + "dyn": 30669, + "dynam": 5735, + "dynamic": 10057, + "dynamics": 14329, + "dynamite": 29003, + "dynamo": 28281, + "dynasty": 14593, + "dyne": 42756, + "dyou": 11484, + "dyour": 22525, + "dys": 11022, + "dys": 38384, + "dysfunction": 36865, + "dysfunctional": 40757, + "dysle": 33681, + "dyslexia": 43199, + "dyson": 34475, + "dyssey": 17435, + "dystop": 28276, + "dystopian": 38915, + "dz": 24421, + "dz": 22913, + "dé": 25466, + "dü": 46948, + "dÃŃ": 46988, + "e": 68, + "e": 324, + "ea": 2150, + "ea": 8100, + "eable": 20693, + "each": 31442, + "each": 2416, + "eachother": 40792, + "ead": 42556, + "ead": 45523, + "eae": 27446, + "eag": 3743, + "eager": 21551, + "eagerly": 30094, + "eagle": 20207, + "eagle": 7517, + "eagles": 6920, + "eal": 48872, + "ealing": 40484, + "eames": 49072, + "eamon": 45954, + "ean": 13327, + "ear": 1055, + "ear": 8373, + "earbuds": 47807, + "eared": 9127, + "earl": 30573, + "earl": 14235, + "earle": 40292, + "earlier": 4297, + "earliest": 22097, + "early": 15840, + "early": 2090, + "earn": 33977, + "earn": 8465, + "earned": 8898, + "earnest": 45422, + "earning": 14550, + "earnings": 15912, + "earns": 16760, + "earp": 35296, + "earphones": 44905, + "earring": 28664, + "earrings": 9136, + "ears": 9861, + "eart": 7086, + "earth": 5184, + "earth": 3475, + "earthand": 34229, + "earthandclouds": 34480, + "earthday": 19481, + "earthquake": 10060, + "earthquakes": 32895, + "earthy": 47139, + "earts": 38824, + "eas": 5740, + "ease": 13574, + "easier": 8817, + "easiest": 26314, + "easily": 8197, + "easing": 44825, + "easport": 42251, + "east": 5022, + "east": 2602, + "eastbound": 28827, + "eastbourne": 38455, + "eastenders": 23545, + "easter": 14783, + "easter": 4811, + "eastern": 34522, + "eastern": 6311, + "eastman": 48280, + "easton": 29619, + "eastside": 42650, + "eastwood": 28270, + "easy": 18308, + "easy": 3176, + "eat": 5418, + "eat": 3384, + "eaten": 16750, + "eater": 24060, + "eaters": 37645, + "eatery": 46559, + "eating": 4371, + "eatlocal": 42868, + "eaton": 28462, + "eats": 13188, + "eau": 17608, + "eazy": 36536, + "eb": 12283, + "eb": 8677, + "eba": 40889, + "ebay": 34412, + "ebay": 4099, + "eber": 34020, + "ebo": 46635, + "ebola": 15864, + "ebon": 22013, + "ebony": 30651, + "ebook": 13122, + "ebooks": 25774, + "ec": 747, + "ec": 10879, + "eca": 18465, + "ecar": 34500, + "ecb": 26205, + "ecc": 33128, + "eccc": 47401, + "eccentric": 43228, + "eccle": 27494, + "ece": 2163, + "eces": 5905, + "ecg": 45983, + "ech": 15797, + "ech": 31147, + "echel": 41233, + "echo": 17366, + "echo": 13989, + "echoes": 32564, + "eci": 31936, + "eck": 25866, + "eck": 15969, + "ecker": 39661, + "ecker": 40890, + "ecla": 47806, + "eclec": 25114, + "eclectic": 28382, + "eclip": 30841, + "eclipse": 11505, + "eclub": 38983, + "eco": 5106, + "eco": 10077, + "ecofriendly": 43412, + "ecol": 22706, + "ecological": 25127, + "ecology": 18578, + "ecommerce": 15529, + "econ": 26755, + "econ": 21158, + "econom": 2768, + "economic": 36649, + "economic": 5259, + "economical": 48782, + "economically": 39406, + "economics": 12625, + "economies": 27136, + "economist": 18836, + "economists": 43701, + "economy": 5644, + "ecor": 28962, + "ecosystem": 15788, + "ecosystems": 28725, + "ecoun": 27924, + "ecr": 48572, + "ecraft": 11439, + "ecs": 23485, + "ecstasy": 47286, + "ecstatic": 36244, + "ect": 25168, + "ecu": 13087, + "ecu": 32919, + "ecuador": 19813, + "ecz": 43530, + "ed": 843, + "ed": 538, + "eda": 10804, + "edad": 44724, + "eday": 39258, + "edc": 21245, + "edchat": 14702, + "edd": 35431, + "eddi": 42930, + "eddie": 22748, + "eddie": 9517, + "eddy": 25959, + "ede": 29632, + "eded": 19555, + "edel": 20460, + "edelman": 48139, + "eden": 23621, + "eden": 13741, + "eder": 16249, + "edes": 36247, + "edfringe": 27402, + "edg": 35955, + "edgar": 33543, + "edgar": 17914, + "edge": 16914, + "edge": 5461, + "edged": 39188, + "edges": 20938, + "edgy": 35393, + "edi": 8750, + "edi": 27148, + "edible": 19795, + "edic": 25184, + "edics": 30641, + "edin": 6524, + "edinburgh": 27574, + "edinburgh": 8068, + "eding": 5742, + "edison": 25846, + "edit": 8239, + "edit": 8013, + "edited": 13945, + "edith": 28597, + "editing": 10178, + "edition": 3062, + "editions": 21664, + "editor": 7661, + "editorial": 12325, + "editors": 19486, + "edits": 24945, + "edm": 37843, + "edm": 13539, + "edmon": 11275, + "edmond": 41581, + "edmonds": 46520, + "edmonton": 37311, + "edmonton": 15058, + "edmun": 36561, + "edmund": 27567, + "edna": 39002, + "edo": 29145, + "edo": 18096, + "edon": 41467, + "edor": 30184, + "edou": 47678, + "edp": 46066, + "eds": 1941, + "edsheeran": 30386, + "edt": 15071, + "edtech": 41825, + "edtech": 15262, + "edu": 11757, + "edu": 11799, + "eduardo": 30604, + "educ": 2200, + "educate": 17563, + "educated": 21447, + "education": 22358, + "education": 2806, + "educational": 10400, + "educator": 19875, + "educators": 15420, + "edwar": 27586, + "edward": 26184, + "edward": 7450, + "edwards": 12627, + "edwin": 48718, + "edwin": 22471, + "edy": 17072, + "edy": 4144, + "ee": 2644, + "ee": 4708, + "eed": 17513, + "eee": 24632, + "eee": 9361, + "eeee": 11696, + "eeee": 17570, + "eeeee": 26938, + "eeeeee": 41407, + "eek": 46591, + "eel": 27462, + "eels": 44416, + "eem": 27236, + "een": 47490, + "een": 21230, + "eer": 35409, + "eer": 31846, + "eera": 36664, + "eerie": 33846, + "ees": 40308, + "eet": 48935, + "eez": 39033, + "ef": 1490, + "ef": 1829, + "efa": 16999, + "eface": 48804, + "efan": 33556, + "efc": 22065, + "efcc": 46087, + "efer": 26199, + "eff": 20548, + "eff": 21715, + "effe": 2808, + "effec": 3943, + "effect": 5436, + "effective": 6837, + "effectively": 17516, + "effectiveness": 26847, + "effects": 7331, + "effic": 36004, + "efficacy": 39937, + "effici": 6670, + "efficiency": 11823, + "efficient": 11334, + "efficiently": 32915, + "effor": 6356, + "effort": 40078, + "effort": 6255, + "effortless": 41639, + "effortlessly": 42320, + "efforts": 6847, + "efish": 35813, + "efl": 27172, + "efron": 48111, + "efs": 7389, + "eg": 8053, + "eg": 14599, + "ega": 41193, + "egan": 42943, + "eger": 46704, + "eger": 22767, + "egg": 13778, + "egg": 5911, + "eggplant": 34906, + "eggs": 7099, + "ego": 34712, + "ego": 14250, + "egos": 43992, + "egre": 27044, + "egret": 42002, + "egy": 5224, + "egyp": 10250, + "egypt": 7267, + "egyptian": 12428, + "eh": 9277, + "eh": 9135, + "eha": 48563, + "ehealth": 48617, + "ehr": 45271, + "ehs": 44648, + "ei": 4006, + "ei": 18264, + "eic": 40251, + "eid": 28038, + "eid": 13979, + "eidmubarak": 46275, + "eiffel": 29720, + "eigh": 13468, + "eight": 7910, + "eighteen": 49316, + "eighth": 21237, + "eighty": 47449, + "eil": 29457, + "eileen": 31468, + "ein": 29944, + "ein": 24524, + "eindhoven": 47172, + "eing": 7702, + "einstein": 20587, + "eira": 47708, + "eis": 13802, + "eisen": 25273, + "eisenhower": 35562, + "either": 6036, + "ej": 19887, + "ej": 25009, + "ejec": 29771, + "ek": 4212, + "ek": 2092, + "el": 544, + "el": 832, + "ela": 11284, + "ela": 3787, + "elab": 38866, + "elabor": 26034, + "elaborate": 33855, + "elaine": 22523, + "elan": 17763, + "elan": 18399, + "eland": 24930, + "eland": 6275, + "elas": 41078, + "elast": 27479, + "elastic": 30282, + "elba": 48598, + "elbow": 21965, + "eld": 5684, + "elder": 11791, + "elder": 14416, + "elderly": 15455, + "elders": 28617, + "eldest": 33503, + "elding": 28223, + "elds": 13466, + "ele": 2084, + "ele": 9766, + "eleague": 36577, + "eleanor": 18604, + "elearning": 29969, + "elec": 1564, + "elec": 38768, + "elect": 15336, + "elected": 8828, + "election": 19312, + "election": 4247, + "electionday": 40540, + "elections": 6949, + "elector": 16465, + "electoral": 19544, + "electr": 3654, + "electra": 48959, + "electri": 23927, + "electric": 19547, + "electric": 5031, + "electrical": 12176, + "electrician": 46422, + "electricity": 10950, + "electrifying": 48843, + "electro": 11648, + "electro": 23244, + "electromagnetic": 46530, + "electron": 33396, + "electronic": 33865, + "electronic": 9273, + "electronica": 43119, + "electronics": 13081, + "eled": 20357, + "elee": 44112, + "eleg": 8075, + "elegance": 19146, + "elegant": 11124, + "elek": 34559, + "elem": 25406, + "element": 14909, + "elementary": 8143, + "elements": 10925, + "elen": 30654, + "elen": 39164, + "elena": 19421, + "eleng": 48180, + "eleph": 7554, + "elephant": 10299, + "elephants": 16871, + "eler": 24646, + "eless": 15244, + "eless": 30837, + "elets": 19400, + "elev": 7921, + "elevate": 26736, + "elevated": 23967, + "elevation": 23826, + "elevator": 19021, + "eleven": 31617, + "eleven": 17795, + "elf": 45961, + "elf": 11924, + "elfie": 39955, + "elg": 28790, + "elgin": 31868, + "eli": 1018, + "eli": 6292, + "elia": 10956, + "elian": 42508, + "elias": 47274, + "elias": 29902, + "elic": 34743, + "elic": 13492, + "elie": 38677, + "elie": 26501, + "elier": 14634, + "elife": 37429, + "elife": 12719, + "eligibility": 34937, + "eligible": 16978, + "elijah": 26065, + "elike": 48913, + "elim": 9296, + "elimin": 11386, + "eliminate": 19655, + "eliminated": 29075, + "eliminating": 36619, + "elimination": 24176, + "elin": 25353, + "elin": 13458, + "eline": 46199, + "eline": 7153, + "eling": 9990, + "elio": 47943, + "elion": 30682, + "elions": 44159, + "eliot": 33326, + "elis": 23411, + "elis": 48021, + "elisa": 25610, + "elisa": 44051, + "elisabeth": 33127, + "elise": 27124, + "elit": 40882, + "elite": 32277, + "elite": 6553, + "elited": 43943, + "elitedangerous": 47138, + "elites": 35975, + "elius": 35623, + "elive": 49338, + "elive": 23505, + "elives": 49174, + "elix": 32926, + "elixir": 42887, + "eliz": 42844, + "eliza": 6132, + "eliza": 29992, + "elizabeth": 22397, + "elizabeth": 7026, + "elk": 34013, + "elk": 21896, + "ell": 826, + "ell": 812, + "ella": 20692, + "ella": 2957, + "elland": 43326, + "ellar": 38443, + "ellas": 37053, + "elle": 12818, + "elle": 4765, + "elled": 13146, + "ellen": 14007, + "ellen": 12312, + "ellenshow": 34812, + "eller": 20927, + "eller": 4465, + "ellers": 19010, + "elles": 24431, + "elli": 3367, + "elli": 6673, + "ellic": 38905, + "ellie": 16769, + "ellier": 44054, + "ellin": 40374, + "elling": 2220, + "ellington": 34477, + "ellini": 43256, + "elliot": 20761, + "elliott": 44456, + "elliott": 13788, + "ellip": 44816, + "ellis": 11553, + "ellison": 32295, + "ello": 2512, + "ellor": 14594, + "ells": 2433, + "ellu": 35560, + "elly": 8041, + "elly": 20355, + "elm": 25199, + "elm": 22082, + "elman": 33622, + "elmer": 45958, + "elmo": 32150, + "elo": 6170, + "elo": 13490, + "elon": 26381, + "elon": 20406, + "elondon": 47377, + "elong": 44363, + "elonmusk": 37076, + "elope": 23367, + "eloqu": 37795, + "elos": 44733, + "elot": 43490, + "elove": 43319, + "elove": 19165, + "elover": 21732, + "elovers": 33946, + "els": 35958, + "els": 1645, + "elsa": 22050, + "else": 18857, + "else": 3344, + "elsewhere": 22906, + "elson": 19624, + "elt": 18692, + "elton": 20758, + "elu": 14208, + "elusive": 28903, + "elves": 29111, + "elvi": 47008, + "elvis": 47359, + "elvis": 14498, + "elxn": 37726, + "ely": 12189, + "ely": 1273, + "elyn": 29691, + "elyn": 18126, + "em": 908, + "em": 2270, + "ema": 7002, + "ema": 11131, + "emabiggest": 23101, + "emabiggestfans": 29587, + "email": 33537, + "email": 4462, + "emailed": 40470, + "emailmarketing": 40188, + "emails": 12871, + "eman": 24416, + "eman": 36868, + "emancip": 42996, + "emanuel": 35232, + "emb": 3692, + "embar": 8266, + "embaras": 48019, + "embark": 33953, + "embarra": 11382, + "embarrass": 27183, + "embarrassed": 28217, + "embarrassing": 19653, + "embarrassment": 41346, + "embassy": 13598, + "embe": 46041, + "embed": 19703, + "embedded": 22046, + "embelli": 32144, + "embellished": 46992, + "ember": 47049, + "emblem": 21163, + "embo": 23065, + "embr": 35267, + "embrac": 16928, + "embrace": 12118, + "embraced": 35739, + "embraces": 38404, + "embracing": 22196, + "embro": 12550, + "embroi": 18667, + "embroide": 21530, + "embroidered": 22381, + "embroidery": 20823, + "emc": 20897, + "emc": 31602, + "emcee": 42038, + "eme": 22910, + "eme": 21548, + "emea": 40352, + "emed": 11028, + "emen": 22033, + "ement": 40841, + "ement": 2057, + "ements": 11058, + "emer": 3132, + "emer": 25727, + "emerald": 46878, + "emerald": 16980, + "emerge": 22182, + "emerged": 26425, + "emergen": 24096, + "emergence": 39867, + "emergencies": 35759, + "emergency": 44038, + "emergency": 5897, + "emerges": 30801, + "emerging": 38174, + "emerging": 11113, + "emeritus": 35333, + "emerson": 24147, + "emery": 32678, + "emi": 44327, + "emi": 18525, + "emil": 26794, + "emil": 40624, + "emile": 43926, + "emili": 20709, + "emilia": 34238, + "emilio": 39722, + "emily": 14545, + "emily": 7640, + "emin": 17227, + "emin": 23995, + "eminem": 22129, + "eminent": 33779, + "eming": 40398, + "emir": 13337, + "emir": 47613, + "emirates": 47244, + "emirates": 17867, + "emission": 27761, + "emissions": 14172, + "emit": 49043, + "emma": 18177, + "emma": 7445, + "emmanuel": 48045, + "emmanuel": 20411, + "emmett": 45779, + "emmy": 35625, + "emmy": 17089, + "emmys": 21875, + "emo": 3738, + "emo": 19381, + "emoji": 16327, + "emojis": 27870, + "emon": 34406, + "emor": 45034, + "emory": 44274, + "emotion": 17464, + "emotional": 7357, + "emotionally": 24088, + "emotions": 12904, + "emp": 3831, + "emp": 41004, + "empathy": 22420, + "emper": 12522, + "emperor": 13828, + "empha": 16237, + "emphasi": 47176, + "emphasis": 29588, + "empire": 26212, + "empire": 7614, + "empires": 46510, + "emplo": 3409, + "employ": 37290, + "employ": 39626, + "employe": 5037, + "employed": 26567, + "employee": 36631, + "employee": 9560, + "employees": 7377, + "employer": 21296, + "employers": 17647, + "employment": 10959, + "empor": 27386, + "emporium": 48541, + "empower": 13612, + "empower": 17230, + "empowered": 29087, + "empowering": 20086, + "empowerment": 15747, + "empowers": 46206, + "empress": 26656, + "empty": 41203, + "empty": 7893, + "emra": 39259, + "ems": 2858, + "emt": 46360, + "emu": 48149, + "emu": 29296, + "emul": 23272, + "emy": 31076, + "en": 524, + "en": 576, + "ena": 3452, + "enab": 17308, + "enable": 15642, + "enabled": 23666, + "enables": 23417, + "enabling": 23590, + "enam": 41486, + "enamel": 22746, + "enary": 13132, + "enas": 34536, + "enation": 20860, + "enberg": 15658, + "enburg": 28430, + "enc": 33169, + "enca": 37774, + "encan": 30345, + "encapsul": 40874, + "ence": 6495, + "ence": 954, + "enced": 6549, + "ences": 3777, + "enchan": 17290, + "enchanted": 28258, + "enchanting": 32531, + "enchil": 47396, + "enci": 32207, + "encia": 30068, + "encies": 18729, + "encing": 10326, + "enclosed": 43243, + "enclosure": 37419, + "encom": 44026, + "encore": 20549, + "encoun": 17309, + "encounter": 13164, + "encountered": 32492, + "encounters": 25399, + "encoura": 6169, + "encourage": 12090, + "encouraged": 20299, + "encouragement": 24959, + "encourages": 23848, + "encouraging": 15875, + "encro": 45822, + "encry": 28600, + "encryp": 42928, + "encrypted": 48710, + "encryption": 31423, + "ency": 3484, + "encyclo": 32104, + "encyclopedia": 38376, + "end": 945, + "end": 806, + "enda": 6735, + "endale": 20290, + "endange": 13990, + "endangered": 14931, + "ende": 11373, + "ende": 40306, + "endeav": 18134, + "endeavor": 40502, + "endeavors": 44394, + "endeavour": 38035, + "ended": 2622, + "endemic": 41241, + "endent": 16265, + "ender": 48106, + "ender": 12383, + "enders": 7418, + "endez": 43850, + "endgame": 23042, + "endi": 31359, + "ending": 2695, + "endings": 36516, + "endish": 38841, + "endless": 12688, + "endlessly": 45145, + "endment": 45894, + "endo": 13476, + "endo": 15830, + "endocr": 36486, + "endof": 40786, + "endome": 46996, + "endon": 48018, + "endor": 8092, + "endorf": 37249, + "endorse": 28819, + "endorsed": 24307, + "endorsement": 21205, + "endorses": 34603, + "endorsing": 46779, + "endow": 45895, + "endra": 22321, + "ends": 1339, + "endthe": 46256, + "endu": 26032, + "endur": 19557, + "endurance": 21027, + "endure": 32419, + "enduring": 30851, + "enduro": 47042, + "ene": 3297, + "ene": 6049, + "ened": 2494, + "eneed": 45137, + "enegger": 33235, + "enei": 48906, + "enemies": 15824, + "enemy": 10310, + "enen": 45113, + "ener": 2244, + "ener": 13600, + "energ": 39451, + "energetic": 24197, + "energi": 23044, + "energies": 42374, + "energized": 48635, + "energy": 14974, + "energy": 2650, + "energye": 32271, + "energyefficiency": 40586, + "eners": 48208, + "enes": 42066, + "eness": 11806, + "enet": 46336, + "enew": 29672, + "enews": 13442, + "eney": 20706, + "enez": 33110, + "enf": 38167, + "enfield": 27808, + "enfor": 10592, + "enforce": 40224, + "enforced": 44597, + "enforcement": 12460, + "eng": 1035, + "eng": 6730, + "enga": 22297, + "engag": 6793, + "engage": 11089, + "engaged": 11475, + "engagement": 7281, + "engaging": 13060, + "enge": 26279, + "enge": 2742, + "engel": 38265, + "engen": 48286, + "enger": 6618, + "engers": 7533, + "engine": 3355, + "engine": 5857, + "engineer": 40151, + "engineer": 8517, + "engineered": 26580, + "engineering": 5273, + "engineers": 11494, + "engines": 14487, + "england": 20904, + "england": 3595, + "english": 15942, + "english": 3469, + "engra": 17560, + "engraved": 29421, + "engraving": 33309, + "engul": 43655, + "engv": 28401, + "enh": 7449, + "enhall": 48781, + "enham": 24592, + "enhan": 26827, + "enhance": 13993, + "enhanced": 16070, + "enhancement": 35601, + "enhances": 38259, + "enhancing": 25986, + "eni": 4395, + "eni": 17538, + "enic": 46780, + "enic": 28292, + "enig": 19754, + "enig": 48730, + "enight": 32848, + "enight": 20640, + "enigma": 34998, + "ening": 1133, + "enium": 34380, + "enix": 25720, + "enjo": 1498, + "enjoy": 12981, + "enjoy": 2218, + "enjoyable": 17444, + "enjoyed": 5045, + "enjoying": 3603, + "enjoyment": 34905, + "enjoys": 17024, + "enka": 43942, + "enko": 25312, + "enlar": 38136, + "enligh": 21364, + "enlighten": 28200, + "enlightened": 44032, + "enlightening": 44005, + "enlightenment": 29255, + "enlisted": 43555, + "enly": 43023, + "enn": 43563, + "enna": 8095, + "enne": 21176, + "enne": 11518, + "ennedy": 46266, + "ennes": 43613, + "enni": 7049, + "ennial": 14220, + "ennis": 48923, + "ennis": 26309, + "eno": 9429, + "eno": 12843, + "enoch": 47917, + "enor": 13955, + "enormous": 20129, + "enos": 44759, + "enote": 44955, + "enough": 2744, + "enow": 26876, + "enqu": 28417, + "enqui": 22810, + "enquire": 46658, + "enquiries": 31901, + "enquiry": 45141, + "enri": 18915, + "enrich": 20058, + "enrich": 45504, + "enriched": 45166, + "enrichment": 32903, + "enrique": 25489, + "enrol": 44279, + "enroll": 23739, + "enroll": 30366, + "enrolled": 36853, + "enrollment": 24875, + "enroute": 40548, + "ens": 41799, + "ens": 1323, + "ense": 12657, + "ense": 27658, + "ensemble": 14843, + "ensis": 32842, + "ensla": 37535, + "enslaved": 48675, + "ensure": 7492, + "ensures": 29707, + "ensuring": 19403, + "ent": 724, + "ent": 621, + "enta": 17681, + "ental": 32342, + "ental": 6168, + "entary": 9833, + "entation": 37412, + "ente": 17433, + "ente": 9935, + "ented": 3800, + "entennial": 43088, + "enter": 2963, + "enter": 3819, + "entered": 10679, + "entering": 12580, + "enterpri": 7339, + "enterprise": 9220, + "enterprises": 21219, + "enters": 15287, + "entertain": 5566, + "entertain": 23510, + "entertained": 30631, + "entertainer": 28674, + "entertaining": 13897, + "entertainment": 6166, + "entes": 24213, + "enthr": 36202, + "enthusi": 9631, + "enthusiasm": 20525, + "enthusiast": 27153, + "enthusiastic": 22068, + "enthusiasts": 27514, + "enti": 1938, + "ential": 5194, + "entially": 37695, + "entic": 10340, + "entine": 49212, + "enting": 20526, + "entire": 4709, + "entirely": 13911, + "entirety": 43242, + "entit": 15209, + "entities": 38134, + "entitled": 18680, + "entity": 28455, + "ently": 2922, + "ento": 21917, + "ento": 8762, + "entom": 31676, + "entourage": 47893, + "entr": 7129, + "entrance": 9129, + "entrata": 27304, + "entre": 34188, + "entre": 19600, + "entren": 46959, + "entrepre": 4583, + "entreprene": 4789, + "entrepreneu": 26784, + "entrepreneur": 12119, + "entrepreneur": 8033, + "entrepreneurial": 28261, + "entrepreneurs": 11054, + "entrepreneurship": 12858, + "entries": 13766, + "entry": 5362, + "ents": 870, + "entu": 6650, + "enty": 5657, + "enu": 23430, + "env": 32280, + "env": 39207, + "envel": 20052, + "envelope": 27358, + "envir": 3512, + "enviro": 46200, + "environ": 3599, + "environment": 33039, + "environment": 5501, + "environmental": 7831, + "environmentally": 32855, + "environments": 19577, + "envision": 49031, + "envoy": 29263, + "envy": 21017, + "eny": 20482, + "enya": 36509, + "enyc": 39520, + "enz": 25805, + "enz": 31873, + "enza": 25239, + "enzie": 14839, + "enzo": 31543, + "enzyme": 40348, + "enzymes": 47465, + "eo": 16054, + "eo": 11712, + "eoin": 48634, + "eon": 31915, + "eos": 17805, + "ep": 1178, + "ep": 1117, + "epa": 15866, + "epage": 26931, + "epaper": 33584, + "epcot": 32524, + "eper": 43071, + "eph": 45752, + "eph": 41240, + "ephe": 25129, + "epi": 7219, + "epi": 34641, + "epic": 12683, + "epic": 4991, + "epiconetsy": 49222, + "epide": 17382, + "epidemi": 44447, + "epidemic": 21522, + "epile": 23150, + "epilepsy": 29547, + "epilo": 31291, + "epilots": 39766, + "epiph": 40561, + "epiphany": 43251, + "epis": 24616, + "episcop": 28037, + "episcopal": 31221, + "episo": 2708, + "episode": 2965, + "episodes": 11837, + "epit": 21967, + "epitome": 35114, + "epl": 25950, + "epo": 25810, + "epp": 39054, + "epp": 39593, + "eps": 4090, + "epsilon": 40019, + "epsom": 40364, + "epstein": 34688, + "eq": 39331, + "eq": 33692, + "equ": 2563, + "equal": 17373, + "equal": 10433, + "equality": 48981, + "equality": 9578, + "equally": 18172, + "equals": 30278, + "equation": 28591, + "equations": 38225, + "eque": 19518, + "equestrian": 24728, + "equi": 8752, + "equili": 43262, + "equine": 33801, + "equinox": 32652, + "equip": 6526, + "equip": 36979, + "equipment": 6893, + "equipo": 45688, + "equipped": 18331, + "equitable": 44717, + "equities": 44015, + "equity": 11293, + "equivalent": 19489, + "er": 517, + "er": 528, + "era": 30548, + "era": 2072, + "erable": 18801, + "erad": 24194, + "eradic": 36346, + "eradicate": 46164, + "eral": 6222, + "eran": 13069, + "eras": 19325, + "eras": 39090, + "erase": 33893, + "erased": 46762, + "erasmus": 38935, + "erc": 5360, + "erc": 32382, + "erd": 25645, + "erdo": 21112, + "erdogan": 24453, + "ere": 17907, + "ere": 642, + "erec": 21526, + "erected": 39365, + "ered": 9097, + "eres": 15751, + "ergon": 38120, + "ergy": 19550, + "eri": 2769, + "eri": 9509, + "eria": 11634, + "erial": 5409, + "eric": 1206, + "eric": 5396, + "erica": 13208, + "erich": 26070, + "erick": 27434, + "erick": 36959, + "erickson": 45286, + "ericsson": 39645, + "eridge": 45408, + "erie": 7005, + "eries": 9099, + "erik": 22805, + "erik": 16532, + "erika": 25531, + "erin": 17532, + "erin": 11333, + "erina": 25176, + "ering": 1785, + "erit": 23335, + "eritrea": 30738, + "erjee": 41665, + "erly": 14380, + "erm": 31649, + "erman": 17990, + "ern": 6992, + "ern": 12140, + "ernal": 20868, + "ernan": 34617, + "ernation": 48796, + "erne": 33930, + "ernest": 23006, + "ernie": 23636, + "ernity": 14653, + "erno": 40812, + "ernst": 30099, + "ero": 3211, + "ero": 3732, + "erock": 38206, + "eron": 32837, + "eroom": 46690, + "eros": 30597, + "erose": 48657, + "erosion": 30174, + "erotic": 30708, + "erotica": 39126, + "erous": 6384, + "eroy": 36461, + "erp": 28268, + "err": 22479, + "err": 25346, + "erra": 48446, + "errands": 45485, + "error": 12097, + "errors": 21195, + "erry": 45236, + "erry": 24124, + "ers": 4840, + "ers": 612, + "ersfc": 37925, + "ership": 2884, + "erson": 25780, + "erson": 6811, + "ert": 40325, + "ert": 3112, + "erta": 32007, + "erton": 26245, + "erts": 12921, + "eru": 36068, + "erun": 41642, + "erup": 17093, + "erupted": 48862, + "eruption": 33705, + "erville": 37557, + "erwin": 43724, + "ery": 12467, + "ery": 1692, + "erz": 38711, + "es": 957, + "es": 542, + "esa": 46834, + "esa": 12489, + "esanders": 23099, + "esc": 3330, + "esc": 28420, + "escal": 15902, + "escap": 11499, + "escape": 32484, + "escape": 7568, + "escaped": 18707, + "escapes": 29916, + "escaping": 21767, + "escar": 39229, + "escence": 37972, + "esch": 46760, + "esch": 41945, + "esco": 32482, + "escobar": 48807, + "escor": 24360, + "escort": 24976, + "escorted": 47667, + "escorts": 48574, + "escu": 36517, + "esday": 19553, + "ese": 18766, + "ese": 2260, + "esg": 41674, + "esh": 17119, + "esh": 13407, + "esha": 28799, + "eshop": 38451, + "eshop": 45570, + "eshopsuk": 39349, + "esi": 30064, + "esis": 12414, + "esk": 19359, + "esl": 26201, + "eso": 29890, + "eso": 28921, + "esof": 17047, + "eson": 46845, + "esp": 3849, + "esp": 13870, + "espa": 37301, + "espan": 41731, + "españa": 41118, + "especially": 4878, + "esper": 29216, + "espino": 46633, + "espionage": 43498, + "espn": 22917, + "espn": 7540, + "espnu": 47747, + "espo": 34381, + "esports": 16035, + "espresso": 17098, + "esq": 47352, + "esqu": 34616, + "esque": 25877, + "ess": 3118, + "ess": 9764, + "essa": 39125, + "essay": 12751, + "essays": 27328, + "esse": 22305, + "essen": 30489, + "essence": 17830, + "essenti": 11163, + "essential": 47264, + "essential": 6895, + "essentially": 30042, + "essentials": 16191, + "essex": 30563, + "essex": 11623, + "est": 2291, + "est": 1509, + "esta": 41449, + "esta": 10135, + "estab": 7010, + "establi": 8412, + "establish": 19709, + "established": 13143, + "establishing": 29420, + "establishment": 20213, + "estas": 39072, + "estate": 47130, + "estate": 6159, + "estates": 26054, + "este": 12968, + "este": 20579, + "esteban": 48381, + "esteem": 31541, + "esteemed": 36293, + "ester": 45808, + "esthe": 18468, + "esther": 24393, + "estim": 8904, + "estimate": 21883, + "estimated": 16665, + "estimates": 21957, + "esto": 31589, + "esto": 23958, + "estonia": 26260, + "estonian": 48895, + "estrada": 48116, + "estre": 31271, + "estu": 26272, + "estuary": 35269, + "esur": 35758, + "esville": 39187, + "esy": 46268, + "et": 1169, + "et": 875, + "eta": 8761, + "etal": 25221, + "etary": 13074, + "etc": 5353, + "etched": 40411, + "etching": 41375, + "ete": 38820, + "ete": 40245, + "eter": 8587, + "eter": 17007, + "eternal": 13732, + "eternally": 48486, + "eternity": 23832, + "eters": 18392, + "etf": 31661, + "eth": 4819, + "eth": 5927, + "ethan": 24245, + "ethan": 15958, + "ethanol": 38166, + "ethe": 21312, + "ethel": 45921, + "ether": 23349, + "ethere": 18705, + "ethereal": 40925, + "ethereum": 19612, + "ethernet": 35026, + "ethi": 10327, + "ethic": 39104, + "ethical": 47041, + "ethical": 17679, + "ethics": 13355, + "ethiop": 10897, + "ethiopia": 13920, + "ethiopian": 24507, + "ethnic": 30522, + "ethnic": 16344, + "ethnicity": 46787, + "ethno": 34225, + "ethos": 48768, + "eti": 11188, + "eti": 30394, + "etienne": 46118, + "eties": 15137, + "etihad": 38489, + "etiquette": 37957, + "etis": 38216, + "etisation": 39733, + "etna": 41940, + "eto": 27829, + "eto": 33837, + "eton": 44339, + "etour": 41462, + "etr": 23012, + "etres": 42838, + "ets": 3442, + "etsy": 13237, + "etsy": 6282, + "etsym": 22902, + "etsymntt": 25416, + "etsyshop": 44643, + "ett": 32729, + "ett": 24998, + "etta": 30466, + "ette": 19981, + "ette": 5212, + "ettes": 35326, + "etto": 44219, + "etty": 40759, + "etu": 36593, + "etv": 49155, + "etv": 20325, + "etwork": 20585, + "ety": 25920, + "ety": 2746, + "etz": 36181, + "etz": 25301, + "eu": 1506, + "eu": 3238, + "eucalyp": 41068, + "eucalyptus": 42351, + "euchar": 38362, + "eugen": 30678, + "eugene": 17760, + "eul": 46749, + "eun": 16431, + "eun": 26219, + "eunhyuk": 47526, + "eup": 44435, + "euph": 21386, + "euphoria": 41051, + "eur": 18343, + "eur": 12018, + "eura": 32605, + "eure": 25311, + "euref": 48017, + "eureka": 31686, + "euro": 2039, + "euro": 8463, + "euroleague": 46821, + "europa": 18290, + "europale": 42473, + "europaleague": 44029, + "europarl": 44922, + "europe": 4198, + "europe": 3848, + "european": 26712, + "european": 4759, + "europeans": 37082, + "euros": 22274, + "eurovision": 17593, + "eurozone": 42555, + "eurusd": 40895, + "eus": 44214, + "euston": 46905, + "euthan": 43280, + "euve": 40652, + "eux": 25019, + "ev": 776, + "ev": 10133, + "eva": 6845, + "evacu": 13187, + "evacuated": 26806, + "evacuation": 27353, + "eval": 25139, + "eval": 9703, + "evalu": 10314, + "evaluate": 27174, + "evaluating": 34541, + "evaluation": 17640, + "evan": 12821, + "evan": 12847, + "evangel": 20518, + "evangeli": 21372, + "evangelical": 36151, + "evangelist": 42275, + "evankirstel": 46581, + "evans": 8836, + "evansville": 44782, + "evapor": 33352, + "evasion": 48795, + "eve": 5732, + "eve": 1866, + "eved": 19820, + "evel": 39315, + "evelyn": 26687, + "evement": 8210, + "even": 6359, + "even": 1427, + "evening": 34487, + "evening": 2285, + "evenings": 19994, + "evenly": 45974, + "event": 10612, + "event": 1655, + "eventful": 45628, + "evento": 38155, + "eventprofs": 24980, + "events": 3667, + "eventu": 14055, + "eventual": 45321, + "eventually": 14397, + "ever": 888, + "ever": 1247, + "everest": 21722, + "everett": 25456, + "everglades": 46294, + "evergreen": 23852, + "everlasting": 32849, + "evers": 31914, + "everton": 13315, + "every": 1091, + "every": 1505, + "everybody": 5901, + "everyday": 25049, + "everyday": 5160, + "everyone": 1584, + "everything": 36376, + "everything": 2410, + "everytime": 16911, + "everywhere": 6364, + "eves": 7323, + "evi": 5348, + "evi": 36989, + "evic": 21336, + "eviction": 37111, + "eviden": 46220, + "evidence": 6439, + "evident": 34529, + "evie": 47195, + "evil": 23218, + "evil": 6006, + "eville": 16143, + "eving": 24729, + "evo": 17962, + "evo": 13169, + "evoc": 43133, + "evol": 5350, + "evolu": 7725, + "evolution": 8902, + "evolutionary": 30629, + "evolve": 23406, + "evolved": 22613, + "evolving": 23675, + "evp": 46154, + "evs": 33576, + "ew": 11942, + "ew": 15428, + "ewan": 40247, + "ewe": 48438, + "ewing": 38873, + "ews": 9878, + "ex": 659, + "ex": 4118, + "exac": 5460, + "exact": 12651, + "exactly": 5840, + "exagger": 29766, + "exal": 49324, + "exam": 4428, + "exam": 8785, + "examination": 20970, + "examine": 25728, + "examined": 44004, + "examiner": 29149, + "examines": 28160, + "examining": 30616, + "example": 6228, + "examples": 14790, + "exams": 14028, + "exas": 47536, + "exc": 1302, + "excav": 20733, + "excavation": 45909, + "exce": 10999, + "exceed": 32521, + "exceeded": 36221, + "exceeding": 47213, + "exceeds": 49353, + "excel": 28351, + "excel": 18754, + "excell": 3298, + "excellence": 8171, + "excellency": 36503, + "excellent": 4239, + "excelsi": 47315, + "excep": 8882, + "except": 8541, + "exception": 25018, + "exceptional": 13425, + "exceptionally": 29306, + "excer": 17737, + "excerpt": 20586, + "excess": 22491, + "excessive": 21332, + "exchange": 6616, + "exchanged": 48919, + "exchanges": 29730, + "exchanging": 47760, + "excit": 10510, + "excite": 47711, + "excited": 1889, + "excitement": 11407, + "exciting": 4300, + "exclu": 3114, + "exclude": 49235, + "excluded": 46216, + "excluding": 44326, + "exclusion": 40219, + "exclusive": 3747, + "exclusively": 13565, + "exclusives": 47149, + "excu": 7324, + "excur": 27533, + "excursion": 34869, + "excuse": 9266, + "excuses": 19388, + "exe": 3554, + "exe": 48027, + "exec": 15052, + "execs": 35728, + "execu": 4360, + "execute": 36405, + "executed": 20432, + "execution": 18085, + "executive": 5944, + "executives": 24357, + "exem": 19753, + "exemp": 28602, + "exempl": 36371, + "exemplary": 39123, + "exempli": 41934, + "exempt": 44278, + "exemption": 47481, + "exer": 40295, + "exerc": 5932, + "exercise": 7016, + "exercises": 19669, + "exercising": 39036, + "exeter": 32137, + "exeter": 18837, + "exfoli": 38823, + "exhau": 11154, + "exhaust": 21812, + "exhausted": 21741, + "exhausting": 40035, + "exhaustion": 49221, + "exhi": 3022, + "exhib": 3783, + "exhibit": 24992, + "exhibit": 8209, + "exhibiting": 23889, + "exhibition": 4219, + "exhibitions": 28311, + "exhibitor": 44192, + "exhibitors": 38542, + "exhibits": 30093, + "exhilar": 40262, + "exhilarating": 49289, + "exi": 5297, + "exico": 38712, + "exile": 28566, + "exist": 10899, + "exist": 9645, + "existed": 23198, + "existence": 13832, + "existent": 43541, + "existential": 38752, + "existing": 12886, + "exists": 14608, + "exit": 9374, + "exited": 37581, + "exiting": 39577, + "exits": 34943, + "exmoor": 48260, + "exo": 15600, + "exo": 5842, + "exodus": 30098, + "exol": 42856, + "exop": 35288, + "exoplan": 37980, + "exor": 24506, + "exorcist": 46309, + "exotic": 15639, + "exp": 9923, + "exp": 19066, + "expan": 7512, + "expand": 10382, + "expand": 13141, + "expanded": 18390, + "expanding": 15755, + "expands": 22223, + "expanse": 46886, + "expansion": 10138, + "expansive": 49261, + "expat": 43900, + "expe": 2560, + "expect": 9802, + "expect": 5716, + "expectation": 34273, + "expectations": 12529, + "expected": 5573, + "expecting": 12525, + "expects": 24536, + "expedition": 16761, + "expeditions": 49327, + "expelled": 48834, + "expen": 7216, + "expend": 29302, + "expenditure": 47044, + "expense": 28473, + "expenses": 21797, + "expensive": 9649, + "exper": 1533, + "experi": 4723, + "experience": 31867, + "experience": 2415, + "experienced": 10417, + "experiences": 8233, + "experiencing": 16643, + "experiential": 44952, + "experim": 6697, + "experiment": 13079, + "experimental": 16539, + "experimenting": 28263, + "experiments": 21077, + "expert": 6284, + "expertise": 16555, + "experts": 6960, + "expi": 26850, + "expir": 35077, + "expire": 49315, + "expired": 30200, + "expires": 34739, + "expl": 3261, + "expla": 3517, + "explain": 48918, + "explain": 7304, + "explained": 14229, + "explaining": 13136, + "explains": 6655, + "explan": 13294, + "explanation": 16577, + "explanations": 34383, + "explic": 21011, + "explicit": 33228, + "explo": 3586, + "explode": 31262, + "exploded": 28947, + "explodes": 38119, + "exploding": 34683, + "exploit": 36953, + "exploited": 48554, + "explor": 11958, + "exploration": 14043, + "explore": 10405, + "explore": 5147, + "explorebc": 38754, + "explorecanada": 36600, + "explored": 25016, + "explorer": 15776, + "explorers": 28491, + "explores": 13996, + "exploring": 7584, + "explosion": 13785, + "explosions": 38646, + "explosive": 18888, + "explosives": 44705, + "expo": 7820, + "expo": 6344, + "expon": 27905, + "export": 14444, + "exporting": 47433, + "exports": 20088, + "expose": 23181, + "exposed": 12180, + "exposes": 33575, + "exposing": 28362, + "exposition": 36943, + "exposure": 11903, + "expre": 6085, + "express": 18553, + "express": 5642, + "expressed": 20777, + "expresses": 31931, + "expressing": 30207, + "expression": 11357, + "expressions": 20314, + "expressive": 42060, + "expressway": 31658, + "exquis": 16575, + "exquisite": 17958, + "ext": 5711, + "ext": 20072, + "exten": 5555, + "extend": 14492, + "extended": 9614, + "extending": 25652, + "extends": 20688, + "extension": 10275, + "extensions": 24525, + "extensive": 16870, + "extensively": 47365, + "extent": 24913, + "exter": 9797, + "exterior": 19352, + "extermin": 41671, + "external": 15028, + "extin": 13553, + "extinct": 24488, + "extinction": 21186, + "extingui": 38567, + "extor": 35620, + "extr": 29082, + "extra": 6416, + "extra": 4231, + "extrac": 18550, + "extract": 18962, + "extraction": 28789, + "extracts": 45576, + "extraordin": 23628, + "extraordinaire": 30909, + "extraordinary": 10982, + "extras": 29817, + "extravag": 22299, + "extravaganza": 29461, + "extre": 3978, + "extreme": 38357, + "extreme": 8331, + "extremely": 6519, + "extremism": 31493, + "extremist": 36383, + "extremists": 41425, + "extru": 43010, + "ey": 1541, + "ey": 1477, + "eyang": 28915, + "eye": 5034, + "eye": 3272, + "eyebrow": 34250, + "eyebrows": 19923, + "eyed": 15512, + "eyeing": 34916, + "eyel": 17075, + "eyelashes": 42074, + "eyeliner": 33354, + "eyeon": 25126, + "eyes": 3095, + "eyeshadow": 35213, + "eyewear": 30165, + "eyewitness": 36258, + "eyou": 31996, + "eyour": 40229, + "eyre": 44115, + "ez": 10082, + "ez": 8387, + "eze": 25993, + "eze": 27229, + "ezekiel": 41428, + "ezra": 27552, + "f": 69, + "f": 325, + "fa": 778, + "fa": 2800, + "faa": 27577, + "fab": 2833, + "fab": 5492, + "faber": 43461, + "faber": 42488, + "fabi": 29425, + "fabian": 34539, + "fabio": 31666, + "fabric": 16217, + "fabric": 10033, + "fabricated": 40851, + "fabrication": 33476, + "fabrics": 23159, + "fabulous": 5189, + "fac": 1053, + "fac": 35438, + "facade": 29217, + "face": 2545, + "face": 1710, + "facebook": 36156, + "facebook": 2943, + "faced": 10941, + "faceli": 32023, + "facelift": 36380, + "faceoff": 42710, + "facep": 45285, + "faces": 4905, + "faceted": 43435, + "facetime": 24076, + "facial": 11909, + "facil": 39973, + "facilit": 13567, + "facilitate": 26733, + "facilitated": 43853, + "facilitating": 34796, + "facilities": 10388, + "facility": 8165, + "facing": 7619, + "fact": 17189, + "fact": 3598, + "factfriday": 27953, + "faction": 14629, + "factor": 21082, + "factor": 8124, + "factories": 36492, + "factors": 12733, + "factory": 42483, + "factory": 6072, + "facts": 5085, + "factual": 45471, + "faculty": 9504, + "facup": 25283, + "fad": 12632, + "fad": 47669, + "fade": 20486, + "faded": 26051, + "fades": 40441, + "fading": 32882, + "fadnavis": 38945, + "faf": 31052, + "faf": 43903, + "fag": 25617, + "fag": 39305, + "fah": 25495, + "fah": 35429, + "fahren": 45527, + "fai": 20519, + "fai": 26384, + "fail": 7105, + "fail": 6801, + "failed": 8314, + "failing": 15757, + "fails": 13388, + "failure": 8732, + "failures": 25442, + "faint": 30807, + "fair": 3031, + "fair": 2849, + "fairbanks": 43962, + "faire": 34745, + "faire": 20798, + "fairfax": 29368, + "fairfield": 29664, + "fairgrounds": 38325, + "fairi": 28884, + "fairies": 33590, + "fairly": 14961, + "fairmont": 41547, + "fairness": 29388, + "fairs": 8655, + "fairtrade": 33361, + "fairview": 43479, + "fairway": 44022, + "fairy": 17021, + "fairy": 10444, + "fairytale": 28944, + "fais": 23542, + "faisal": 35459, + "fait": 20567, + "faith": 10653, + "faith": 5080, + "faithful": 15511, + "faiz": 41775, + "fake": 18794, + "fake": 5777, + "faken": 22853, + "fakenews": 26943, + "fakespeare": 49095, + "fal": 2778, + "fal": 40494, + "fala": 47120, + "falcon": 22498, + "falcon": 13571, + "falcons": 13834, + "falk": 34648, + "falkirk": 44080, + "fall": 6489, + "fall": 2359, + "fallen": 8688, + "falling": 48709, + "falling": 7293, + "fallon": 39596, + "fallon": 21281, + "fallontonight": 44627, + "fallout": 49365, + "fallout": 16009, + "falls": 4778, + "falmouth": 38261, + "false": 38948, + "false": 9078, + "falsely": 42321, + "fam": 1058, + "fam": 5128, + "fame": 6573, + "famed": 23302, + "famer": 24554, + "famil": 3395, + "famili": 8488, + "familia": 25622, + "familiar": 10020, + "families": 4612, + "family": 8137, + "family": 1315, + "familyfun": 46308, + "familytime": 47236, + "familytravel": 38222, + "famine": 35847, + "famous": 44811, + "famous": 4096, + "famously": 44505, + "fan": 1675, + "fan": 2261, + "fanart": 41059, + "fanart": 7855, + "fanartfriday": 45346, + "fanatic": 36643, + "fanatics": 39610, + "fanbase": 36921, + "fanboy": 43369, + "fanc": 29017, + "fancafe": 45080, + "fanci": 35908, + "fanclub": 31530, + "fancy": 47622, + "fancy": 6733, + "fand": 19684, + "fandom": 47634, + "fandom": 11534, + "fanfest": 42916, + "fanfic": 47243, + "fang": 14269, + "fang": 27428, + "fangirl": 28813, + "fangirling": 39463, + "fanning": 37282, + "fanny": 30401, + "fans": 32454, + "fans": 1840, + "fansign": 25288, + "fant": 4467, + "fanta": 2703, + "fantaken": 39412, + "fantasia": 49306, + "fantastic": 31289, + "fantastic": 2935, + "fantasy": 15124, + "fantasy": 5267, + "fantasyfootball": 35713, + "fao": 31155, + "faq": 28533, + "far": 1578, + "far": 2384, + "fara": 48562, + "farage": 28340, + "farah": 31547, + "fare": 8620, + "fare": 6461, + "fares": 27525, + "farewell": 10734, + "fargo": 18870, + "fari": 26197, + "farley": 43761, + "farm": 9066, + "farm": 3985, + "farmer": 19735, + "farmer": 10474, + "farmers": 29752, + "farmers": 6402, + "farmersmarket": 41808, + "farmhouse": 26293, + "farming": 10399, + "farmington": 49305, + "farmland": 45258, + "farms": 11277, + "farn": 27527, + "faroo": 39147, + "farra": 33657, + "farrakhan": 46293, + "farrell": 24234, + "fart": 34664, + "farther": 42233, + "fas": 4830, + "fas": 42995, + "fasci": 17191, + "fascin": 7327, + "fascinated": 32964, + "fascinating": 8640, + "fascism": 28213, + "fascist": 23870, + "fascists": 43598, + "fash": 42682, + "fashi": 2099, + "fashion": 6976, + "fashion": 2444, + "fashionable": 24597, + "fashionblogger": 31726, + "fashioned": 21563, + "fashioni": 26062, + "fashionista": 30415, + "fashions": 37601, + "fashionshow": 45653, + "fashionweek": 28684, + "fass": 42398, + "fast": 8509, + "fast": 1953, + "fasten": 44990, + "faster": 8835, + "fastest": 9808, + "fasting": 24656, + "fat": 4751, + "fat": 5484, + "fatal": 12124, + "fatalities": 44168, + "fatally": 34069, + "fate": 26315, + "fate": 11734, + "father": 11607, + "father": 3224, + "fathers": 12780, + "fathersday": 16731, + "fati": 13430, + "fatigue": 23747, + "fatima": 28202, + "fats": 30151, + "fatt": 44131, + "fatty": 22953, + "fau": 5571, + "fau": 31381, + "faucet": 44273, + "faul": 16230, + "faulkner": 37840, + "fault": 13862, + "faults": 42752, + "faulty": 47103, + "fauna": 30808, + "faust": 44772, + "faux": 19429, + "fav": 1355, + "fav": 5426, + "fave": 7272, + "faves": 18003, + "favor": 1766, + "favor": 12160, + "favorable": 35392, + "favored": 46640, + "favorite": 35262, + "favorite": 1916, + "favorited": 36926, + "favorites": 10564, + "favors": 36085, + "favour": 3111, + "favour": 20469, + "favourite": 3342, + "favourites": 16585, + "favs": 18879, + "faw": 21800, + "fawad": 46425, + "fawn": 48624, + "fax": 32535, + "fax": 9337, + "fay": 8939, + "fay": 40074, + "faye": 30257, + "fayette": 32043, + "fayette": 19782, + "fayetteville": 37771, + "fayre": 34982, + "faz": 26238, + "faze": 44880, + "fb": 22637, + "fb": 3307, + "fball": 29663, + "fbf": 20004, + "fbi": 10293, + "fbloggers": 41389, + "fbs": 48454, + "fc": 4278, + "fc": 1399, + "fca": 24540, + "fcb": 26639, + "fcb": 25045, + "fcbarcelona": 32174, + "fcbayern": 35033, + "fcblive": 44608, + "fcc": 21240, + "fck": 40080, + "fck": 49263, + "fcofficial": 27805, + "fcs": 32095, + "fcu": 47898, + "fd": 16972, + "fd": 11525, + "fda": 17823, + "fdi": 45579, + "fdn": 18563, + "fdny": 41084, + "fdr": 42298, + "fe": 623, + "fe": 873, + "fear": 8744, + "fear": 5402, + "feared": 31154, + "fearless": 17470, + "fears": 13867, + "fearthe": 33449, + "feasi": 34977, + "feast": 37963, + "feast": 9564, + "feat": 1703, + "feat": 5611, + "feather": 24905, + "feather": 17871, + "feathers": 21138, + "featherweight": 44939, + "feature": 30413, + "feature": 4527, + "featured": 4743, + "features": 4643, + "featuring": 3706, + "feb": 4317, + "febru": 4202, + "february": 4248, + "fect": 31293, + "fed": 22518, + "fed": 7035, + "feder": 4737, + "federal": 6369, + "federation": 15530, + "federer": 18246, + "federico": 40539, + "fedex": 32603, + "fedora": 45111, + "feds": 30593, + "fee": 28242, + "fee": 9224, + "feed": 6662, + "feed": 5839, + "feedback": 8683, + "feeder": 24482, + "feeders": 44523, + "feeding": 9879, + "feeds": 21788, + "feel": 2408, + "feel": 2051, + "feelin": 19903, + "feeling": 33087, + "feeling": 3045, + "feelings": 9452, + "feels": 4808, + "feelthe": 22322, + "feelthebern": 27743, + "fees": 11765, + "feet": 4804, + "fei": 23441, + "fei": 34217, + "fein": 46707, + "feinstein": 41313, + "fel": 2081, + "fel": 20304, + "feld": 45913, + "feld": 14219, + "feldman": 41942, + "feli": 7498, + "felic": 25845, + "felici": 23379, + "felicia": 41139, + "felicidades": 41648, + "felicity": 35123, + "feline": 29471, + "felipe": 27681, + "felix": 33455, + "felix": 16514, + "feliz": 26104, + "feliz": 20221, + "fell": 33540, + "fell": 6266, + "fella": 17586, + "fellas": 18787, + "feller": 29226, + "fellow": 12099, + "fellow": 5242, + "fellows": 15766, + "fellowship": 13857, + "felony": 31068, + "felt": 5413, + "fem": 24574, + "fem": 36615, + "fema": 41721, + "female": 22062, + "female": 3970, + "females": 21028, + "femi": 38607, + "femin": 11423, + "femini": 11894, + "feminine": 24911, + "feminism": 18784, + "feminist": 14921, + "feminists": 38809, + "femme": 31331, + "fen": 5509, + "fen": 25024, + "fence": 12679, + "fences": 34312, + "fencing": 23489, + "fender": 17117, + "fener": 41208, + "fenerbah": 46652, + "feng": 33291, + "fennel": 28689, + "fent": 26395, + "fenton": 47265, + "fenway": 29206, + "fer": 1765, + "fer": 2897, + "fera": 37705, + "feral": 29972, + "ferdin": 25541, + "ferdinand": 27591, + "fere": 43144, + "feren": 35652, + "ference": 19984, + "ferg": 44938, + "fergie": 39119, + "fergu": 10988, + "fergus": 42041, + "ferguson": 11904, + "fermentation": 45817, + "fermented": 36886, + "fern": 10747, + "fern": 21685, + "fernandes": 44391, + "fernandez": 23436, + "fernando": 17140, + "ferns": 38277, + "feroci": 45652, + "ferr": 7256, + "ferra": 47911, + "ferrari": 9606, + "ferre": 29626, + "ferred": 10432, + "ferreira": 48686, + "ferrell": 41112, + "ferrer": 38904, + "ferri": 42008, + "ferries": 28489, + "ferris": 27532, + "ferry": 38936, + "ferry": 10278, + "fers": 12378, + "fert": 14925, + "fert": 43662, + "fertil": 41987, + "fertile": 44837, + "fertili": 23912, + "fertility": 23528, + "fertilizer": 36786, + "fery": 47448, + "fes": 32300, + "fest": 17383, + "fest": 2590, + "festa": 42124, + "festi": 1943, + "festiv": 19222, + "festival": 20946, + "festival": 2240, + "festivals": 17834, + "festive": 9533, + "festivities": 21020, + "fet": 21409, + "feta": 31705, + "fetal": 42031, + "fetch": 30271, + "fete": 34629, + "fett": 37979, + "fetus": 26768, + "feu": 24912, + "feu": 32990, + "feud": 27365, + "fever": 40896, + "fever": 9989, + "fevre": 43861, + "few": 1939, + "fewer": 19128, + "fex": 41584, + "fex": 26392, + "fey": 39069, + "fey": 23298, + "fez": 43081, + "ff": 1021, + "ff": 1304, + "ffa": 15355, + "ffame": 42873, + "ffc": 19832, + "ffe": 1138, + "ffe": 8631, + "ffect": 29151, + "ffed": 8448, + "ffee": 26377, + "ffel": 22656, + "ffen": 46537, + "ffer": 27369, + "ffer": 11636, + "ffers": 32163, + "fferty": 44771, + "ffes": 46441, + "ffey": 30138, + "fff": 28106, + "ffi": 19961, + "ffic": 4762, + "ffice": 26044, + "ffici": 3639, + "fficial": 39818, + "fficial": 6463, + "fficiency": 27800, + "fficient": 20424, + "ffin": 12779, + "ffin": 7367, + "ffing": 16592, + "ffins": 17898, + "ffl": 39490, + "ffle": 7749, + "ffler": 39819, + "ffles": 19344, + "ffman": 15823, + "ffo": 42264, + "ffs": 4424, + "ffxiv": 26569, + "ffxv": 46786, + "ffy": 26404, + "ffy": 7795, + "fg": 45977, + "fg": 6823, + "fgm": 32178, + "fgo": 46113, + "fh": 21649, + "fh": 21010, + "fhs": 45094, + "fi": 701, + "fi": 3589, + "fia": 8827, + "fiable": 34373, + "fianc": 27752, + "fiance": 44114, + "fiancé": 34039, + "fiasco": 40944, + "fiat": 16740, + "fiawec": 39485, + "fib": 40594, + "fiba": 34993, + "fiber": 35074, + "fiber": 12612, + "fibers": 44587, + "fibre": 21401, + "fibro": 21294, + "fibrosis": 36307, + "fic": 1788, + "fic": 2059, + "fica": 26952, + "fically": 14854, + "fication": 4523, + "fications": 12512, + "ficial": 48192, + "fics": 42505, + "fiction": 6218, + "fictional": 25570, + "fid": 34197, + "fid": 23966, + "fidd": 25218, + "fiddle": 35968, + "fide": 45375, + "fidel": 21740, + "fidel": 36837, + "fidelity": 30109, + "fidget": 48664, + "fie": 28487, + "fie": 10348, + "fied": 29642, + "fied": 2853, + "fiel": 1361, + "field": 7571, + "field": 1570, + "fielder": 11046, + "fieldhouse": 37969, + "fielding": 30465, + "fields": 6494, + "fieldwork": 33155, + "fiends": 37869, + "fier": 11167, + "fier": 10598, + "fierc": 48609, + "fierce": 13896, + "fiercely": 49039, + "fiers": 16113, + "fiery": 24557, + "fies": 9537, + "fiesta": 14580, + "fif": 5309, + "fifa": 21976, + "fifa": 8516, + "fifaworldcup": 38819, + "fifawwc": 41329, + "fife": 24374, + "fifteen": 29504, + "fifth": 25515, + "fifth": 8772, + "fifthharmony": 31075, + "fifty": 24456, + "fifty": 15978, + "fig": 4814, + "fig": 20719, + "figaro": 48044, + "figh": 23274, + "fight": 5262, + "fight": 2757, + "fighter": 35884, + "fighter": 6438, + "fighters": 7371, + "fightfor": 48909, + "fightfor": 35740, + "fighting": 38625, + "fighting": 4652, + "fighton": 45578, + "fights": 12132, + "figs": 38882, + "figu": 6390, + "figur": 16948, + "figurative": 44042, + "figure": 48820, + "figure": 5274, + "figured": 15630, + "figures": 8739, + "figurine": 33306, + "figuring": 31513, + "fiji": 48270, + "fiji": 18285, + "fik": 46589, + "fil": 1142, + "fil": 14915, + "fila": 30992, + "filament": 49252, + "file": 12545, + "file": 4512, + "filed": 13864, + "files": 7850, + "filet": 43155, + "fili": 9590, + "filing": 16576, + "filip": 14368, + "filipino": 19153, + "fill": 15904, + "fill": 6277, + "filled": 5589, + "filler": 32816, + "fillers": 45005, + "fillet": 39276, + "filling": 9736, + "fillion": 38048, + "fillmore": 43922, + "fills": 21750, + "filly": 27690, + "film": 5117, + "film": 1860, + "filmed": 15801, + "filmfare": 42224, + "filmfest": 24508, + "filmfestival": 28066, + "filming": 6866, + "filmmaker": 17202, + "filmmakers": 24896, + "filmmaking": 18226, + "films": 5370, + "fils": 40271, + "filter": 7541, + "filtered": 29926, + "filtering": 47770, + "filters": 18385, + "filth": 39713, + "filthy": 26899, + "filtr": 21408, + "filtration": 42036, + "fim": 47525, + "fin": 735, + "fin": 10663, + "fina": 34497, + "final": 11968, + "final": 1755, + "finale": 7844, + "finalfantasy": 44543, + "finalfour": 46999, + "finalist": 12620, + "finalists": 13422, + "finalized": 48930, + "finally": 1992, + "finals": 4536, + "finan": 4807, + "finance": 6117, + "finances": 28767, + "financi": 12846, + "financial": 19783, + "financial": 4930, + "financially": 28124, + "financing": 18375, + "finch": 18523, + "find": 18638, + "find": 1416, + "finder": 15045, + "finders": 43884, + "findia": 47064, + "finding": 37455, + "finding": 6002, + "findings": 16529, + "findlay": 48227, + "findom": 36463, + "finds": 6680, + "findyour": 25936, + "findyourpark": 38924, + "fine": 12042, + "fine": 3797, + "fineart": 7484, + "fineart": 16005, + "fineartamerica": 7724, + "fined": 20094, + "finely": 46120, + "finer": 36681, + "fines": 25053, + "finesse": 46047, + "finest": 7707, + "fing": 6485, + "fing": 17955, + "finger": 13480, + "finger": 8895, + "fingerprint": 39579, + "fingers": 9690, + "fini": 2405, + "finish": 42178, + "finish": 3958, + "finished": 3078, + "finisher": 38636, + "finishers": 48661, + "finishes": 13078, + "finishing": 7912, + "finite": 48312, + "finity": 41463, + "finity": 21273, + "fink": 40158, + "finland": 10775, + "finley": 41652, + "finn": 28479, + "finn": 16925, + "finna": 35180, + "finnish": 19616, + "fino": 30083, + "fins": 32810, + "fintech": 48929, + "fintech": 8899, + "fion": 27476, + "fiona": 20099, + "fior": 37086, + "fiore": 44997, + "fioren": 33188, + "fiorentina": 43713, + "fios": 42521, + "fir": 770, + "fir": 16233, + "fire": 2951, + "fire": 1769, + "firearm": 40311, + "firearms": 23960, + "fireball": 40543, + "firec": 42806, + "fired": 8846, + "firefighter": 20498, + "firefighters": 12600, + "firefly": 33997, + "firefox": 35372, + "fireman": 46085, + "firen": 34752, + "firenze": 38445, + "fireplace": 23050, + "fires": 8749, + "fireside": 36185, + "firework": 40750, + "fireworks": 10641, + "firing": 15105, + "firm": 16936, + "firm": 7705, + "firmly": 29156, + "firms": 13655, + "firmware": 42691, + "first": 6853, + "first": 874, + "firstdayof": 44297, + "firsth": 48512, + "firsts": 47884, + "firth": 26078, + "fis": 7846, + "fis": 47683, + "fiscal": 20825, + "fischer": 26532, + "fish": 6431, + "fish": 2759, + "fisher": 11175, + "fisher": 9176, + "fisheries": 24612, + "fisherman": 25055, + "fishermen": 28547, + "fishers": 42065, + "fishery": 49057, + "fishes": 35470, + "fishing": 31703, + "fishing": 4935, + "fishy": 35665, + "fist": 48340, + "fist": 17085, + "fit": 2366, + "fit": 2478, + "fitbit": 33768, + "fitch": 44614, + "fitfam": 20662, + "fitnes": 47285, + "fitness": 20044, + "fitness": 4838, + "fits": 6401, + "fitt": 32994, + "fitted": 14863, + "fitter": 42096, + "fitters": 32364, + "fitting": 11769, + "fittings": 45787, + "fitz": 11120, + "fitz": 25913, + "fitzgerald": 20606, + "fitzpatrick": 37141, + "fiu": 38374, + "five": 19508, + "five": 3127, + "fives": 44066, + "fix": 4596, + "fix": 6028, + "fixed": 9393, + "fixes": 25473, + "fixing": 17423, + "fixture": 17317, + "fixtures": 19904, + "fizz": 31242, + "fj": 43183, + "fj": 46447, + "fjor": 31260, + "fk": 12410, + "fl": 1082, + "fl": 2685, + "fla": 1577, + "fla": 20292, + "flag": 11536, + "flag": 4859, + "flagged": 45012, + "flags": 12221, + "flagship": 19779, + "flagstaff": 40406, + "flair": 24938, + "flake": 21221, + "flakes": 20934, + "flam": 10559, + "flame": 40351, + "flame": 13484, + "flamen": 28826, + "flamenco": 37362, + "flames": 13441, + "flamin": 42693, + "flaming": 34782, + "flamingo": 30323, + "flan": 14572, + "flanagan": 28641, + "flanders": 34837, + "flank": 44553, + "flann": 39510, + "flannel": 37807, + "flap": 35253, + "flappy": 40241, + "flare": 21185, + "flares": 46088, + "flash": 6089, + "flash": 5815, + "flashback": 14616, + "flashback": 11988, + "flashbackfriday": 15014, + "flashbacks": 47056, + "flashes": 31259, + "flashing": 31764, + "flashlight": 37256, + "flask": 36194, + "flat": 8986, + "flat": 6313, + "flats": 17228, + "flatt": 45498, + "flattering": 43267, + "flaun": 41421, + "flav": 7191, + "flavo": 28895, + "flavor": 31835, + "flavor": 11818, + "flavored": 29350, + "flavorful": 49135, + "flavors": 16930, + "flavour": 17026, + "flavoured": 42397, + "flavours": 21083, + "flaw": 14268, + "flaw": 34978, + "flawed": 35136, + "flawless": 15531, + "flaws": 30492, + "flax": 43443, + "fle": 2428, + "fle": 44964, + "flea": 24883, + "fleck": 28143, + "fled": 26731, + "flee": 19427, + "flee": 30167, + "fleece": 25038, + "fleeing": 30543, + "fleek": 43513, + "fleet": 35922, + "fleet": 9147, + "fleetwood": 28883, + "fleming": 25769, + "fler": 48789, + "flesh": 17495, + "flet": 16102, + "fletcher": 19810, + "fleur": 28593, + "flew": 13768, + "flex": 16426, + "flex": 12038, + "flexi": 10032, + "flexibility": 22547, + "flexible": 14502, + "flexing": 48483, + "fli": 2472, + "flick": 13746, + "flick": 23414, + "flickr": 17755, + "flies": 8070, + "flight": 24701, + "flight": 3795, + "flights": 10515, + "flin": 24730, + "flin": 43816, + "flinders": 44647, + "fling": 22768, + "flint": 28306, + "flint": 18324, + "flip": 20385, + "flip": 11035, + "flipk": 30829, + "flipkart": 33154, + "flipped": 28144, + "flipping": 25881, + "flips": 35089, + "flir": 24330, + "flirt": 38352, + "flirting": 35243, + "flix": 40663, + "flo": 1945, + "flo": 20711, + "float": 16123, + "floating": 12619, + "floats": 33272, + "flock": 36297, + "flock": 21822, + "flondon": 47366, + "floo": 4062, + "flood": 23793, + "flood": 7148, + "flooded": 19706, + "flooding": 10204, + "floods": 16369, + "floor": 23657, + "floor": 4125, + "flooring": 19227, + "floors": 15671, + "flop": 22994, + "floppy": 38267, + "flops": 29146, + "flor": 15784, + "flor": 41669, + "flora": 18906, + "floral": 10732, + "florals": 48331, + "floren": 37706, + "florence": 11617, + "flores": 21537, + "flori": 3482, + "florian": 41861, + "florida": 34264, + "florida": 3966, + "florist": 38403, + "floss": 36453, + "flotus": 35181, + "flour": 18592, + "flouri": 23239, + "flourish": 36038, + "flow": 2180, + "flow": 5608, + "flower": 12772, + "flower": 4055, + "flowering": 19953, + "flowers": 4023, + "flowing": 14922, + "flown": 25659, + "flows": 16715, + "floyd": 46369, + "floyd": 13656, + "flu": 3698, + "flu": 13528, + "fluctu": 40181, + "fluence": 38169, + "fluent": 30025, + "fluff": 31174, + "fluffy": 40346, + "fluffy": 17054, + "fluid": 43803, + "fluid": 16717, + "fluids": 41490, + "fluor": 45127, + "fluore": 26974, + "fluorescent": 35036, + "fluori": 45611, + "flur": 31591, + "flush": 25777, + "flushing": 43754, + "flute": 23746, + "flux": 25249, + "flwx": 30907, + "fly": 5666, + "fly": 3228, + "flye": 30873, + "flyeagles": 39927, + "flyeaglesfly": 39931, + "flyer": 11875, + "flyers": 14181, + "flyfishing": 31800, + "flying": 20782, + "flying": 4610, + "flyn": 40676, + "flynn": 15721, + "flyo": 33506, + "flyover": 38083, + "fm": 13715, + "fm": 3689, + "fman": 25152, + "fml": 26730, + "fmr": 32875, + "fn": 22773, + "fn": 21763, + "fnc": 46506, + "fo": 898, + "fo": 6157, + "foal": 40386, + "foam": 30039, + "foam": 14587, + "foamed": 26711, + "fob": 40315, + "focal": 30934, + "focu": 5827, + "focus": 4353, + "focused": 9319, + "focuses": 20093, + "focusing": 15551, + "fod": 31015, + "fod": 43299, + "fodils": 44411, + "foe": 22952, + "foes": 46279, + "fog": 9417, + "foggy": 19770, + "foil": 17302, + "fol": 1106, + "fol": 48616, + "fold": 35201, + "fold": 11021, + "foldable": 48307, + "folded": 25233, + "folder": 25717, + "folding": 15464, + "folds": 24266, + "foley": 22850, + "foli": 7713, + "folia": 48964, + "foliage": 26350, + "folio": 10772, + "folk": 10665, + "folk": 6032, + "folke": 47190, + "folkl": 27273, + "folklore": 22133, + "folklore": 28620, + "folklorethursday": 23270, + "folks": 5422, + "follo": 41417, + "follow": 1964, + "follow": 1979, + "followart": 40957, + "followback": 33863, + "followed": 6499, + "follower": 17039, + "followers": 4856, + "following": 3473, + "followme": 29668, + "followparty": 44757, + "follows": 11287, + "followthe": 30747, + "folly": 41408, + "folsom": 42108, + "fom": 34540, + "fon": 5017, + "fon": 38318, + "fond": 19964, + "fonda": 44609, + "fondue": 48321, + "fone": 40672, + "font": 37610, + "font": 16248, + "fontaine": 37864, + "fontana": 43643, + "fontein": 45062, + "fonts": 32801, + "foo": 1183, + "foo": 23435, + "food": 4586, + "food": 1559, + "foodand": 38317, + "foodbank": 31926, + "foodie": 30762, + "foodie": 9847, + "foodies": 22416, + "foodnetwork": 46793, + "foods": 7057, + "foodsecurity": 49329, + "foodtruck": 47682, + "fool": 23959, + "fool": 12212, + "fooled": 28761, + "fooling": 47964, + "foolish": 33824, + "fools": 15946, + "foot": 6702, + "foot": 4738, + "footage": 11130, + "footb": 33466, + "football": 9376, + "football": 1882, + "footballer": 20646, + "footballers": 30269, + "footed": 38040, + "footh": 25951, + "foothills": 37020, + "footpath": 48858, + "footprint": 23206, + "footprints": 39640, + "footsteps": 27289, + "footwear": 22772, + "footy": 39866, + "footy": 18922, + "for": 645, + "for": 556, + "forage": 46871, + "foraging": 39056, + "forall": 17824, + "forbe": 49098, + "forbes": 13925, + "forbi": 24754, + "forbidden": 25164, + "force": 12068, + "force": 2869, + "forced": 8201, + "forces": 5381, + "forchange": 35848, + "forcing": 21573, + "ford": 3751, + "ford": 1623, + "fordfc": 28581, + "fordham": 48792, + "fords": 29351, + "fordshire": 14645, + "fore": 1484, + "fore": 1332, + "forec": 34155, + "forecast": 7361, + "forecasting": 38133, + "forecasts": 27696, + "foreclo": 44916, + "forefront": 37679, + "foreground": 35186, + "forehead": 25394, + "foreig": 26497, + "foreign": 42255, + "foreign": 6046, + "foreigners": 38549, + "foreman": 36174, + "foremost": 42128, + "foren": 16526, + "forensic": 23158, + "forensics": 38763, + "forest": 18760, + "forest": 4167, + "forestation": 33939, + "forestry": 26281, + "forests": 14095, + "forever": 14748, + "forever": 3225, + "forevery": 40605, + "forex": 40200, + "forex": 17395, + "forfe": 44871, + "forge": 19232, + "forged": 28105, + "forget": 46153, + "forget": 2678, + "forgets": 35613, + "forgetting": 25452, + "forgi": 22080, + "forgive": 15332, + "forgiven": 44894, + "forgiveness": 23585, + "forgood": 39169, + "forgot": 6483, + "forgotten": 7994, + "fork": 24501, + "fork": 13700, + "forkids": 48571, + "forklift": 43202, + "forks": 28769, + "forlife": 17624, + "form": 1157, + "form": 1907, + "forma": 38829, + "formal": 12978, + "formally": 24867, + "format": 16252, + "format": 11874, + "formation": 2510, + "formations": 37715, + "formative": 48882, + "formats": 32085, + "forme": 42085, + "formed": 6528, + "former": 2276, + "formerly": 20866, + "formid": 38599, + "formidable": 39834, + "forming": 15443, + "formity": 42290, + "forms": 5161, + "formu": 8689, + "formul": 23923, + "formula": 24485, + "formula": 10776, + "formulae": 34586, + "formulated": 45066, + "forre": 38876, + "forrest": 25205, + "forrester": 45338, + "forsa": 48958, + "forsale": 13303, + "forster": 42923, + "forsy": 29629, + "forsyth": 40952, + "fort": 12300, + "fort": 2921, + "forte": 44350, + "forte": 27367, + "forth": 17068, + "forth": 11932, + "forthcoming": 19989, + "forthe": 12521, + "forti": 26984, + "fortified": 46486, + "fortn": 14428, + "fortnight": 39235, + "fortnite": 38734, + "fortnite": 17890, + "fortress": 19988, + "fortun": 6950, + "fortunate": 19898, + "fortunately": 34358, + "fortune": 40931, + "fortune": 11451, + "fortunes": 41989, + "forty": 24399, + "forum": 37851, + "forum": 4538, + "forums": 31518, + "forwar": 34364, + "forward": 47031, + "forward": 2342, + "forwards": 38974, + "foryou": 35150, + "forz": 46056, + "forza": 33293, + "forza": 28089, + "fos": 36925, + "fos": 22081, + "foss": 14240, + "foss": 37911, + "fossil": 20419, + "fossil": 15202, + "fossilfriday": 26079, + "fossils": 30652, + "foster": 26778, + "foster": 8139, + "fostering": 35996, + "fosters": 37644, + "foto": 15908, + "foto": 12823, + "fotogra": 23687, + "fotografia": 40256, + "fotos": 26124, + "fou": 14516, + "fought": 10844, + "foul": 19784, + "foun": 3154, + "found": 3454, + "found": 1546, + "foundation": 4058, + "foundations": 25219, + "founded": 12240, + "founder": 5145, + "founders": 14602, + "founding": 15317, + "foundry": 31426, + "fountain": 44863, + "fountain": 13405, + "fountains": 37411, + "four": 5113, + "four": 2721, + "foursquare": 34484, + "fourteen": 46255, + "fourth": 7516, + "fourthofjuly": 47805, + "fow": 17084, + "fowl": 31685, + "fowler": 20980, + "fox": 5007, + "fox": 3240, + "foxandfriends": 45841, + "foxes": 24145, + "foxnews": 18830, + "foxsports": 39267, + "foxtv": 49396, + "foxx": 32993, + "foxy": 27945, + "foy": 30284, + "foyer": 38011, + "foyle": 47902, + "fp": 28058, + "fp": 8941, + "fpl": 27970, + "fpp": 36464, + "fps": 25300, + "fpv": 43175, + "fr": 936, + "fr": 5512, + "fra": 3368, + "fra": 15644, + "frac": 15607, + "fracking": 21894, + "fractal": 46471, + "fraction": 26788, + "fractu": 25847, + "fracture": 28995, + "fractured": 37421, + "fractures": 46213, + "frag": 13093, + "fragile": 23579, + "fragment": 39209, + "fragments": 41424, + "fragr": 15403, + "fragrance": 17874, + "fragrances": 44567, + "fragrant": 37030, + "fram": 27987, + "frame": 11029, + "frame": 6481, + "framed": 13135, + "frames": 15479, + "framework": 13195, + "frameworks": 43136, + "framing": 24539, + "frampton": 41733, + "fran": 2118, + "fran": 18878, + "franc": 3872, + "franc": 42340, + "franca": 48952, + "france": 12045, + "france": 3552, + "frances": 20803, + "francesca": 32327, + "francesco": 25816, + "franch": 11756, + "franchi": 46438, + "franchise": 13664, + "franci": 46458, + "francis": 22187, + "francis": 7660, + "francisco": 6887, + "franco": 17934, + "franco": 17052, + "francois": 29317, + "frank": 5390, + "frank": 5229, + "franken": 20487, + "franken": 48252, + "frankenstein": 26410, + "frankfur": 17442, + "frankfurt": 18598, + "franki": 39227, + "frankie": 38373, + "frankie": 16215, + "franklin": 40935, + "franklin": 9999, + "frankly": 38015, + "franks": 42855, + "frans": 47892, + "franz": 25449, + "franç": 38381, + "fraser": 39082, + "fraser": 16754, + "frat": 15225, + "frat": 39292, + "fraternity": 24433, + "frau": 23063, + "fraud": 40647, + "fraud": 9961, + "fraudul": 42655, + "fraudulent": 47408, + "fray": 41154, + "frazier": 32841, + "frc": 41507, + "fre": 821, + "fre": 43165, + "freak": 20352, + "freak": 13701, + "freaked": 43511, + "freakin": 23900, + "freaking": 11992, + "freaks": 27009, + "freaky": 31583, + "freck": 33328, + "freckles": 48036, + "fred": 9486, + "fred": 6678, + "freddie": 41890, + "freddie": 17014, + "freddy": 24394, + "freder": 10745, + "frederic": 41165, + "frederick": 37103, + "frederick": 18570, + "fredo": 48241, + "free": 2065, + "free": 1139, + "freebie": 35865, + "freebies": 28630, + "freec": 46569, + "freed": 12585, + "freed": 23392, + "freedom": 17992, + "freedom": 4511, + "freedoms": 32500, + "freef": 48678, + "freel": 14174, + "freelance": 21942, + "freely": 24436, + "freeman": 16450, + "freep": 32499, + "freepalestine": 39242, + "freer": 44676, + "frees": 27455, + "freestyle": 15594, + "freeway": 24927, + "freeze": 14187, + "freezer": 25390, + "freezing": 12499, + "frei": 30183, + "freight": 17023, + "fremantle": 48012, + "fremont": 34578, + "fren": 2919, + "french": 13118, + "french": 3461, + "frenzy": 30084, + "frequ": 9211, + "frequencies": 45319, + "frequency": 18825, + "frequent": 19836, + "frequently": 22434, + "fresco": 31609, + "fresh": 4065, + "fresh": 2975, + "fresher": 49284, + "freshers": 35810, + "freshest": 46809, + "freshly": 16081, + "freshman": 9381, + "freshmen": 21292, + "freshness": 45872, + "freshwater": 24803, + "fresno": 40879, + "fresno": 20995, + "fret": 40510, + "freud": 40787, + "frey": 22136, + "frey": 9082, + "fri": 815, + "fri": 6882, + "friars": 30513, + "fric": 18981, + "frick": 46304, + "friction": 38563, + "frid": 46388, + "frida": 36001, + "friday": 6350, + "friday": 1461, + "fridayfeeling": 11952, + "fridaymotivation": 38544, + "fridaynight": 44858, + "fridayreads": 37736, + "fridays": 15589, + "fridaythe": 47642, + "fridge": 13491, + "fridges": 40734, + "frie": 36999, + "fried": 13743, + "fried": 7310, + "friedman": 29402, + "friedrich": 34171, + "friend": 3017, + "friend": 1625, + "friendly": 44612, + "friendly": 4681, + "friends": 38875, + "friends": 1574, + "friendship": 42674, + "friendship": 7679, + "friendships": 28840, + "fries": 11369, + "frifotos": 40493, + "friger": 20785, + "friggin": 48300, + "frigh": 34831, + "fright": 24277, + "fright": 40207, + "frightened": 47136, + "frightening": 39290, + "fringe": 10640, + "fris": 37252, + "frisbee": 45768, + "frisco": 35945, + "frit": 34614, + "fritz": 29860, + "friyay": 38887, + "frm": 12951, + "fro": 626, + "fro": 26603, + "frock": 45306, + "frog": 26494, + "frog": 11438, + "frogs": 20781, + "from": 8330, + "from": 633, + "frome": 48691, + "fromhome": 41477, + "fromthe": 18756, + "fron": 1847, + "fron": 18036, + "front": 10996, + "front": 2184, + "frontal": 35794, + "frontier": 18253, + "frontiers": 38396, + "frontline": 29589, + "frontman": 36775, + "fronts": 26846, + "froome": 48560, + "frosh": 47069, + "frost": 39420, + "frost": 11619, + "frosted": 35988, + "frosting": 33872, + "frosty": 22760, + "froze": 47788, + "frozen": 42464, + "frozen": 8507, + "frs": 26216, + "fru": 3248, + "fruit": 16771, + "fruit": 5190, + "fruitful": 31494, + "fruits": 13282, + "fruity": 22320, + "frustr": 16046, + "frustrated": 25111, + "frustrating": 31342, + "frustration": 30535, + "fry": 33914, + "fry": 13686, + "fryer": 49217, + "frying": 38516, + "fs": 23699, + "fs": 3854, + "fsa": 33373, + "fsu": 44185, + "fsu": 19317, + "ft": 3391, + "ft": 981, + "fta": 41975, + "ftc": 33752, + "fted": 5612, + "fter": 25063, + "fthe": 22886, + "ftheday": 9823, + "fting": 6174, + "fton": 26605, + "ftp": 42649, + "fts": 3767, + "ftse": 46717, + "ftw": 19298, + "fty": 17494, + "fu": 665, + "fu": 9098, + "fuch": 42617, + "fudge": 24270, + "fue": 43723, + "fuego": 41500, + "fuel": 21113, + "fuel": 5945, + "fueled": 28792, + "fueling": 38793, + "fuelled": 48357, + "fuels": 19365, + "fuentes": 44393, + "fuer": 29645, + "fug": 29227, + "fugitive": 39257, + "fuji": 15573, + "fuji": 21634, + "fujifilm": 24765, + "fuk": 31051, + "fuku": 20728, + "fukushima": 33929, + "ful": 1814, + "ful": 857, + "fulbright": 41834, + "fulfill": 43675, + "fulfill": 27467, + "fulfilled": 29919, + "fulfilling": 30621, + "fulfillment": 45573, + "fulham": 25574, + "full": 9407, + "full": 1476, + "fuller": 20225, + "fullerton": 42822, + "fullest": 35603, + "fully": 39142, + "fully": 2401, + "fulness": 10526, + "fuls": 41606, + "fulton": 26725, + "fum": 38393, + "fumble": 49373, + "fun": 1229, + "fun": 1499, + "func": 8679, + "function": 8093, + "functional": 12885, + "functionality": 33316, + "functioning": 25479, + "functions": 18001, + "fund": 19089, + "fund": 4877, + "fundam": 11670, + "fundament": 18852, + "fundamental": 17627, + "fundamentally": 45378, + "fundamentals": 27887, + "funday": 15439, + "funded": 10588, + "funding": 5588, + "fundra": 6201, + "fundraiser": 10049, + "fundraising": 10755, + "funds": 7066, + "funer": 40693, + "funeral": 10606, + "funfact": 31596, + "funfactfriday": 40710, + "fungal": 38838, + "fungi": 27837, + "fungus": 30677, + "funk": 37353, + "funk": 13372, + "funko": 49402, + "funko": 23697, + "funky": 16492, + "funnel": 27862, + "funnier": 42232, + "funniest": 15557, + "funny": 19124, + "funny": 3789, + "funrun": 34185, + "fur": 2395, + "fur": 9686, + "furi": 40816, + "furious": 17522, + "furman": 49238, + "furn": 21348, + "furnace": 31913, + "furnished": 37388, + "furnitu": 45696, + "furniture": 7993, + "furry": 33414, + "furry": 15351, + "fursuit": 25306, + "fursuit": 43083, + "fursuitfriday": 27917, + "further": 5583, + "fury": 14404, + "fus": 18419, + "fuse": 23386, + "fused": 38994, + "fusion": 44661, + "fusion": 9364, + "fuss": 26331, + "fut": 21460, + "fut": 34049, + "futbol": 33014, + "futsal": 20558, + "futu": 33454, + "futur": 38840, + "future": 7959, + "future": 1904, + "futureof": 22599, + "futureofwork": 33202, + "futures": 13488, + "futuri": 19068, + "futurism": 48435, + "futurist": 48086, + "futuristic": 30987, + "fuzz": 47128, + "fuzz": 40443, + "fuzzy": 25876, + "fv": 29795, + "fw": 23934, + "fw": 5277, + "fwd": 27052, + "fx": 17807, + "fx": 9025, + "fy": 8440, + "fy": 2702, + "fyi": 16014, + "fying": 5294, + "fz": 46400, + "fé": 34072, + "g": 70, + "g": 326, + "ga": 1275, + "ga": 1531, + "gaa": 10715, + "gaal": 40867, + "gaard": 24645, + "gab": 3927, + "gab": 37382, + "gabbana": 36272, + "gabby": 48115, + "gabby": 24567, + "gabe": 18916, + "gabi": 41931, + "gable": 33387, + "gables": 40928, + "gabri": 8311, + "gabriel": 31684, + "gabriel": 13244, + "gabrielle": 33572, + "gaby": 46420, + "gac": 32520, + "gad": 7786, + "gad": 44651, + "gadget": 25525, + "gadgets": 22840, + "gado": 29489, + "gae": 22003, + "gael": 35663, + "gaelic": 31173, + "gaf": 21354, + "gaf": 32670, + "gag": 14121, + "gag": 18844, + "gaga": 9782, + "gage": 21081, + "gah": 27750, + "gai": 24214, + "gai": 25153, + "gaia": 41269, + "gail": 41160, + "gail": 27676, + "gain": 21536, + "gain": 6202, + "gaine": 35747, + "gained": 14489, + "gaines": 49225, + "gainesville": 40427, + "gaining": 15260, + "gains": 42751, + "gains": 12107, + "gal": 2001, + "gal": 4488, + "gala": 7211, + "galac": 18864, + "galactic": 25514, + "galap": 41115, + "galapagos": 44057, + "galat": 39853, + "galatasar": 42413, + "galatasaray": 47787, + "galax": 5647, + "galaxies": 32435, + "galaxy": 32130, + "galaxy": 6545, + "gale": 37658, + "gale": 21380, + "galerie": 44539, + "gales": 48633, + "gali": 17546, + "gali": 30552, + "galicia": 47927, + "galileo": 39671, + "gall": 3011, + "gall": 33374, + "galla": 16847, + "gallagher": 19168, + "galleria": 40656, + "galleries": 22304, + "gallery": 36648, + "gallery": 3830, + "galley": 48917, + "galli": 22568, + "gallipoli": 47249, + "gallo": 37350, + "gallo": 33265, + "gallon": 24615, + "gallons": 29335, + "galloway": 27796, + "galore": 22286, + "gals": 20125, + "galvani": 46046, + "galve": 34328, + "galveston": 36003, + "galway": 38045, + "galway": 17112, + "gam": 1162, + "gam": 34195, + "gama": 35873, + "gambia": 32988, + "gamble": 26121, + "gambling": 20287, + "game": 2882, + "game": 1063, + "gameart": 31490, + "gameboy": 40951, + "gamecube": 44079, + "gameday": 9241, + "gamedev": 7544, + "gameinsight": 42626, + "gameof": 10987, + "gameofthrones": 11822, + "gameon": 47691, + "gameplay": 16794, + "gamer": 12595, + "gamer": 11598, + "gamergate": 25961, + "gamers": 16166, + "gamersunite": 26423, + "games": 18551, + "games": 1955, + "gamescom": 37003, + "gamestop": 39436, + "gametime": 45899, + "gami": 42025, + "gamification": 48908, + "gaming": 28803, + "gaming": 4017, + "gamma": 22180, + "gamo": 39325, + "gan": 1822, + "gan": 1670, + "gand": 8399, + "ganda": 27261, + "gander": 44508, + "gandhi": 12322, + "ganesh": 30362, + "ganesha": 45185, + "gang": 8066, + "gang": 5674, + "ganga": 36275, + "gangnam": 46777, + "gangs": 29844, + "gangsta": 37365, + "gangster": 26514, + "gani": 48324, + "gann": 45665, + "gannon": 45837, + "gano": 25304, + "gao": 26556, + "gaon": 19279, + "gap": 29906, + "gap": 7609, + "gaps": 25296, + "gar": 1099, + "gar": 5824, + "gara": 28710, + "garage": 8474, + "garbage": 13760, + "garci": 44658, + "garcia": 10529, + "gard": 7751, + "gard": 21003, + "garda": 31906, + "garde": 22649, + "garden": 4674, + "garden": 2756, + "gardenchat": 46292, + "gardener": 28554, + "gardeners": 38205, + "gardening": 10483, + "gardens": 6152, + "gardiner": 43121, + "gardner": 18710, + "gare": 5633, + "gare": 48402, + "gareth": 37140, + "gareth": 18175, + "garfield": 26728, + "garh": 16762, + "gari": 40898, + "gari": 43080, + "garis": 37839, + "garland": 23418, + "garlic": 9685, + "garment": 31418, + "garments": 43341, + "garmin": 39885, + "garner": 20340, + "garnet": 37669, + "garo": 30388, + "garrett": 15881, + "garri": 21764, + "garrison": 30108, + "garros": 40425, + "garry": 24398, + "gars": 12055, + "gart": 18380, + "gart": 18751, + "garten": 14684, + "garter": 48420, + "garth": 45398, + "garth": 24469, + "gartner": 43334, + "gartner": 29678, + "garty": 46383, + "garu": 31140, + "garvey": 39511, + "garwal": 38623, + "gary": 10535, + "gary": 4516, + "garza": 49393, + "gas": 5047, + "gas": 2474, + "gases": 36971, + "gasoline": 27691, + "gasp": 43762, + "gaston": 40669, + "gastri": 49197, + "gastro": 23740, + "gastron": 30699, + "gastronomy": 46987, + "gat": 5314, + "gat": 18941, + "gata": 44575, + "gate": 8071, + "gate": 3302, + "gated": 23997, + "gates": 9472, + "gateshead": 40051, + "gateway": 45221, + "gateway": 14943, + "gather": 36345, + "gather": 12602, + "gathered": 14646, + "gathering": 9197, + "gatherings": 48096, + "gathers": 39250, + "gating": 27561, + "gation": 11095, + "gations": 33906, + "gato": 44492, + "gator": 20216, + "gator": 16390, + "gatorade": 36354, + "gators": 17173, + "gatory": 24796, + "gatsby": 32586, + "gatwick": 37122, + "gau": 5919, + "gau": 43068, + "gauge": 18728, + "gaunt": 31862, + "gauntlet": 37163, + "gautam": 45853, + "gautam": 31356, + "gauteng": 40333, + "gav": 8966, + "gave": 3485, + "gavin": 32974, + "gavin": 16389, + "gaw": 15405, + "gawd": 43239, + "gawx": 43420, + "gay": 7460, + "gay": 5627, + "gaya": 39477, + "gaye": 41401, + "gayle": 29998, + "gayo": 36768, + "gays": 28001, + "gaz": 4837, + "gaz": 36475, + "gaza": 38391, + "gaza": 10112, + "gazaunderattack": 42458, + "gaze": 23212, + "gazette": 20443, + "gazing": 28373, + "gb": 8727, + "gb": 4619, + "gba": 18528, + "gbbo": 34474, + "gbc": 42993, + "gbp": 27391, + "gbr": 31984, + "gby": 40509, + "gc": 8577, + "gc": 6043, + "gcc": 26804, + "gcse": 28763, + "gcu": 34137, + "gd": 13264, + "gd": 14604, + "gdc": 32793, + "gden": 44928, + "gdp": 17100, + "gdpr": 22963, + "ge": 619, + "ge": 710, + "gea": 26790, + "gear": 15532, + "gear": 4802, + "gearbox": 42454, + "geared": 33903, + "gearing": 19027, + "gears": 21147, + "geaux": 36313, + "gecko": 38616, + "ged": 17252, + "ged": 3480, + "geddon": 31720, + "gedly": 13991, + "gee": 9806, + "gee": 9071, + "geek": 17920, + "geek": 7135, + "geeks": 20110, + "geeky": 47332, + "geel": 25906, + "geelong": 34555, + "gees": 38088, + "geese": 26413, + "geez": 42394, + "geh": 30320, + "geist": 38290, + "gel": 7343, + "gel": 5697, + "gelato": 29577, + "gels": 42552, + "gely": 14637, + "gem": 14261, + "gem": 7613, + "gement": 19495, + "gemini": 23086, + "gemma": 23952, + "gems": 14355, + "gemstone": 27747, + "gemstones": 43972, + "gen": 1024, + "gen": 3278, + "gence": 16088, + "gency": 5245, + "gend": 33247, + "gender": 22976, + "gender": 5906, + "gendere": 35824, + "genderequality": 43338, + "gene": 5822, + "gene": 7962, + "genealo": 24142, + "genealogy": 29381, + "gener": 1832, + "general": 20576, + "general": 3658, + "generally": 19256, + "generals": 30296, + "generate": 16896, + "generated": 19450, + "generates": 33938, + "generating": 23882, + "generation": 41211, + "generation": 4883, + "generational": 34506, + "generations": 12247, + "generative": 29472, + "generator": 19399, + "generators": 41917, + "generic": 26978, + "generosity": 23015, + "generous": 12570, + "generously": 35113, + "genes": 19683, + "genesis": 13518, + "genetic": 47746, + "genetic": 13578, + "genetically": 36745, + "genetics": 18276, + "geneva": 14799, + "genevie": 41633, + "genevieve": 46584, + "geni": 22334, + "genic": 15750, + "genie": 24221, + "genital": 32960, + "genius": 8235, + "geniuses": 41406, + "geno": 41544, + "geno": 46776, + "genoa": 43993, + "genoci": 14687, + "genocide": 15903, + "genome": 23991, + "genomic": 44371, + "genomics": 26227, + "genre": 14249, + "genres": 30340, + "gens": 17449, + "gent": 3685, + "gent": 7139, + "gente": 34325, + "gentle": 7262, + "gentle": 13577, + "gentleman": 13293, + "gentlemen": 11692, + "gently": 17187, + "gento": 28320, + "gentri": 41148, + "gentry": 47225, + "gents": 18862, + "genu": 9182, + "genuine": 12184, + "genuinely": 20006, + "genus": 38161, + "geny": 35323, + "geo": 5038, + "geo": 11604, + "geocaching": 47908, + "geof": 20629, + "geoff": 33697, + "geoff": 20386, + "geoffrey": 29520, + "geograph": 45920, + "geographic": 22635, + "geographical": 39380, + "geography": 17101, + "geological": 38380, + "geology": 21578, + "geom": 46135, + "geome": 12958, + "geometric": 22419, + "geometry": 21731, + "geon": 20844, + "geon": 7295, + "geons": 15914, + "geopol": 39758, + "geor": 2549, + "georg": 43126, + "george": 8377, + "george": 3296, + "georges": 25042, + "georgetown": 22970, + "georgie": 42115, + "georgina": 43892, + "geospatial": 46238, + "geothermal": 38413, + "geous": 3068, + "ger": 1291, + "ger": 1502, + "gera": 48867, + "gerald": 29901, + "gerald": 13269, + "gerard": 35979, + "gerard": 20826, + "gerber": 45058, + "gered": 40179, + "geri": 41664, + "geri": 46214, + "gering": 24077, + "germain": 38786, + "german": 14972, + "german": 4710, + "germans": 28400, + "germany": 4464, + "germin": 44721, + "germs": 47731, + "geronimo": 45171, + "gerrard": 26538, + "gerry": 29825, + "gerry": 23026, + "gers": 3314, + "gertrude": 46950, + "gervais": 36527, + "gery": 32845, + "ges": 3316, + "gest": 11843, + "gest": 2033, + "gesture": 21780, + "gestures": 43524, + "get": 5670, + "get": 779, + "geta": 13155, + "getaway": 16131, + "gether": 27224, + "getic": 20661, + "getin": 25822, + "getit": 44891, + "getit": 48315, + "getoutside": 35644, + "gets": 39448, + "gets": 2127, + "gett": 6647, + "gett": 27965, + "gettable": 15620, + "gette": 29800, + "gettin": 13428, + "getting": 30885, + "getting": 1500, + "getty": 31185, + "getty": 13965, + "gettys": 35189, + "gettysburg": 37062, + "getyour": 42159, + "gey": 29289, + "gf": 28953, + "gf": 10846, + "gfriend": 35245, + "gfs": 37553, + "gg": 1129, + "gg": 3286, + "gga": 26003, + "ggan": 25626, + "gge": 21521, + "gge": 31659, + "gged": 6095, + "gger": 12367, + "gger": 3493, + "ggers": 7480, + "ggg": 20143, + "gggg": 33513, + "ggi": 21662, + "ggin": 17160, + "gging": 4966, + "ggins": 12444, + "ggle": 34981, + "ggle": 11430, + "ggled": 46328, + "ggles": 14703, + "ggling": 16523, + "ggly": 39407, + "ggs": 4797, + "ggy": 24935, + "ggy": 6476, + "gh": 583, + "gh": 790, + "gha": 10010, + "gha": 25183, + "gham": 21456, + "ghan": 18945, + "ghan": 6624, + "ghana": 30330, + "ghana": 9731, + "ghanaian": 34223, + "ghani": 36699, + "ghar": 37334, + "ghar": 36973, + "ghat": 43989, + "ghaz": 37493, + "ghc": 42139, + "ghe": 10754, + "ghe": 28561, + "ghead": 40783, + "ghee": 34794, + "gher": 21542, + "gher": 14796, + "ghet": 18447, + "ghetti": 17485, + "ghetto": 22403, + "ghi": 22436, + "ghi": 22279, + "ghibli": 40555, + "ghj": 38439, + "ghlin": 24131, + "gho": 4307, + "ghorn": 38094, + "ghosh": 43279, + "ghoshal": 49134, + "ghost": 11417, + "ghost": 7108, + "ghostbusters": 25462, + "ghostly": 44901, + "ghosts": 16737, + "ghou": 35843, + "ghoul": 45302, + "ghouse": 38238, + "ghs": 14157, + "ght": 1413, + "ght": 630, + "ghted": 4963, + "ghter": 2427, + "ghters": 12994, + "ghtful": 8334, + "ghting": 3019, + "ghtly": 6993, + "ghtning": 39740, + "ghton": 16353, + "ghts": 1259, + "ghty": 20968, + "ghty": 5866, + "ghu": 25808, + "ghue": 45675, + "ghyun": 25010, + "ghz": 24325, + "gi": 707, + "gi": 4478, + "gia": 8864, + "giac": 35444, + "giam": 39623, + "gian": 17274, + "gian": 12866, + "gianni": 46752, + "giant": 23668, + "giant": 4687, + "giants": 7076, + "giar": 34241, + "gib": 9816, + "gibb": 18964, + "gibbons": 31974, + "gibbs": 26488, + "gibility": 33297, + "gible": 13159, + "gibr": 20206, + "gibraltar": 23988, + "gibson": 37420, + "gibson": 12178, + "gic": 27900, + "gic": 2570, + "gical": 32973, + "gically": 26320, + "gid": 36774, + "gid": 21413, + "giddy": 40894, + "gideon": 43867, + "gidi": 30603, + "gie": 11459, + "gie": 3991, + "gier": 28974, + "gies": 5505, + "gif": 11363, + "gif": 11677, + "gifford": 47850, + "gifs": 37643, + "gift": 20569, + "gift": 2733, + "gifted": 15110, + "giftide": 20152, + "giftideas": 23487, + "gifting": 39546, + "gifts": 5836, + "gig": 26981, + "gig": 7471, + "gigab": 34530, + "gigan": 24104, + "gigantic": 31507, + "giggle": 36426, + "giggles": 42731, + "giggs": 44692, + "gigi": 44106, + "gigi": 26171, + "gigs": 20316, + "gil": 3997, + "gil": 10088, + "gila": 46952, + "gilbert": 14154, + "gilded": 44341, + "giles": 24802, + "gill": 14280, + "gill": 12003, + "gille": 29610, + "gilles": 39590, + "gillespie": 36242, + "gillette": 38603, + "gilli": 13695, + "gillian": 28753, + "gills": 48851, + "gilmore": 27603, + "gilt": 44378, + "gim": 31284, + "gimm": 40692, + "gimme": 21525, + "gin": 3374, + "gin": 4941, + "gina": 15604, + "gine": 27482, + "ging": 10829, + "ging": 3905, + "ginger": 16287, + "ginger": 9718, + "gingerbread": 23692, + "gini": 35768, + "gino": 36521, + "gins": 18328, + "gio": 16329, + "gio": 8050, + "gion": 41226, + "gior": 14920, + "giorgio": 33271, + "giorno": 33310, + "gios": 41927, + "gious": 14419, + "giov": 21404, + "giovanni": 26574, + "gipp": 41351, + "gir": 1077, + "gir": 25481, + "gira": 16949, + "giraffe": 22826, + "giri": 31709, + "girl": 3914, + "girl": 1611, + "girlfriend": 8217, + "girlfriends": 30736, + "girlpower": 37433, + "girls": 15480, + "girls": 1917, + "girly": 29605, + "giro": 39664, + "giro": 26454, + "girona": 47842, + "giroud": 41177, + "gis": 16266, + "gis": 12773, + "gist": 21241, + "git": 16060, + "git": 20918, + "gita": 40838, + "github": 31196, + "giu": 17931, + "giuli": 29762, + "giuliani": 47739, + "giuse": 29385, + "giuseppe": 33563, + "give": 4120, + "give": 1781, + "giveaway": 5310, + "giveaways": 18974, + "giveback": 41385, + "given": 33323, + "given": 4302, + "givenchy": 38245, + "giver": 43339, + "gives": 3926, + "giveup": 35485, + "giving": 14673, + "giving": 2339, + "givingback": 49300, + "givingtuesday": 23556, + "giz": 29237, + "gk": 38953, + "gk": 18719, + "gl": 1849, + "gl": 14751, + "gla": 1523, + "gla": 36904, + "glaci": 14924, + "glacial": 40782, + "glacier": 19282, + "glaciers": 42528, + "glad": 20841, + "glad": 4761, + "glades": 37432, + "gladi": 21742, + "gladiator": 38477, + "gladiators": 41087, + "gladly": 41598, + "gladys": 43168, + "glam": 8738, + "glam": 16905, + "glamorous": 22896, + "glamour": 42876, + "glamour": 17499, + "glamping": 46167, + "glan": 40482, + "glan": 45844, + "glance": 26557, + "gland": 41441, + "glar": 48535, + "glar": 41702, + "glare": 46035, + "glas": 29935, + "glas": 43654, + "glasgo": 6757, + "glasgow": 29990, + "glasgow": 7363, + "glass": 16305, + "glass": 3313, + "glasses": 6116, + "glaston": 26848, + "glastonbury": 28233, + "glau": 39171, + "glaze": 28112, + "glazed": 24122, + "gle": 7166, + "gle": 2865, + "glee": 32379, + "glee": 21614, + "glen": 6158, + "glen": 11049, + "glend": 38332, + "glendale": 33043, + "glenn": 32004, + "glenn": 12861, + "gler": 34649, + "gley": 21998, + "gli": 5896, + "gli": 28791, + "glia": 22217, + "glide": 37321, + "glider": 41636, + "glimp": 12888, + "glimpse": 13817, + "glio": 29785, + "glit": 21079, + "glitch": 29563, + "glitter": 16528, + "glitz": 44542, + "glo": 1721, + "glo": 30474, + "glob": 13363, + "global": 6707, + "global": 2779, + "globalgoals": 33211, + "globalhealth": 46751, + "globalization": 47680, + "globally": 17775, + "globalwarming": 46017, + "globe": 19436, + "globe": 9368, + "globes": 38085, + "glock": 38818, + "glomer": 43689, + "gloom": 48594, + "gloomy": 32199, + "glori": 7270, + "gloria": 19244, + "glorious": 9171, + "glory": 36107, + "glory": 7285, + "glos": 40633, + "gloss": 38258, + "gloss": 22014, + "glossy": 29802, + "glou": 15989, + "gloucester": 28133, + "gloucester": 23835, + "gloucestershire": 33789, + "glove": 16078, + "glover": 21594, + "gloves": 12363, + "glow": 30472, + "glow": 10111, + "glowing": 18437, + "glows": 48107, + "glu": 5952, + "glu": 32281, + "glucose": 34642, + "glue": 22103, + "glued": 38135, + "gluten": 15482, + "gluten": 15524, + "glutenfree": 16138, + "gly": 13027, + "glycer": 48914, + "gm": 18743, + "gm": 5918, + "gma": 18155, + "gmail": 11119, + "gman": 41043, + "gman": 36936, + "gmb": 35934, + "gmb": 31799, + "gmbh": 46877, + "gmc": 27257, + "gmo": 23486, + "gms": 36987, + "gmt": 13803, + "gn": 2455, + "gn": 9831, + "gna": 23009, + "gnation": 45912, + "gne": 25407, + "gni": 5104, + "gnment": 25110, + "gno": 23376, + "gno": 43686, + "gnocchi": 48299, + "gnome": 33643, + "gnon": 20561, + "go": 650, + "go": 861, + "goa": 14399, + "goal": 9003, + "goal": 3321, + "goalie": 20723, + "goalkeeper": 16601, + "goals": 3295, + "goalscorer": 43547, + "goaltender": 44151, + "goat": 34082, + "goat": 9530, + "goats": 18393, + "gob": 29559, + "gobeavs": 48285, + "goblin": 26223, + "goblue": 25232, + "gobucks": 29175, + "gocougs": 34202, + "god": 4190, + "god": 1731, + "godawgs": 40436, + "godbless": 46616, + "godbless": 44007, + "godd": 16589, + "goddamn": 28495, + "goddard": 37827, + "goddess": 10808, + "godfather": 26222, + "godfrey": 40148, + "godis": 38521, + "godly": 42438, + "gods": 33620, + "gods": 10328, + "goducks": 35889, + "godzilla": 23369, + "goe": 22084, + "goers": 27784, + "goes": 43581, + "goes": 2635, + "gof": 17537, + "goff": 34399, + "goftheday": 39360, + "gofund": 34445, + "gofundme": 34686, + "gog": 42949, + "goggles": 31027, + "gogh": 19697, + "gogo": 22688, + "gogreen": 36279, + "gohawks": 34884, + "goi": 24917, + "goin": 13939, + "going": 25787, + "going": 1245, + "goku": 29550, + "gol": 1537, + "gol": 18257, + "gola": 41090, + "gold": 4999, + "gold": 2209, + "goldberg": 25161, + "goldcoast": 34634, + "golden": 10763, + "golden": 3878, + "goldeng": 20650, + "goldenglobes": 26842, + "goldfish": 40293, + "goldie": 42805, + "goldman": 27164, + "golds": 30526, + "golds": 40283, + "goldsmith": 40214, + "gole": 41297, + "golf": 9096, + "golf": 3096, + "golfclub": 45742, + "golfer": 24579, + "golfers": 28441, + "golfing": 31379, + "goli": 29265, + "goliath": 41602, + "gom": 7051, + "goma": 46198, + "gomes": 39128, + "gomez": 16433, + "gon": 1854, + "gon": 3379, + "gona": 34835, + "gone": 35135, + "gone": 3601, + "gong": 28486, + "gonna": 2562, + "gonz": 10587, + "gonzaga": 36241, + "gonzale": 17512, + "gonzales": 31265, + "gonzalez": 18198, + "goo": 1381, + "goo": 17882, + "good": 2185, + "good": 886, + "goodbye": 6968, + "goodday": 46284, + "goode": 42076, + "goodfood": 46844, + "goodfriday": 40360, + "goodie": 29213, + "goodies": 13308, + "goodluck": 19718, + "goodman": 24146, + "goodmorning": 14421, + "goodness": 10531, + "goodnight": 8540, + "goodreads": 31629, + "goods": 9340, + "goodtimes": 22570, + "goodvibes": 43146, + "goodwill": 24902, + "goodwin": 28080, + "goodwood": 30008, + "goody": 35937, + "goodyear": 42858, + "goofy": 26879, + "goog": 18581, + "google": 12195, + "google": 3460, + "googled": 40345, + "googleplay": 37309, + "goon": 15267, + "goons": 30440, + "goooo": 35876, + "goooo": 48957, + "goose": 21445, + "goose": 13822, + "goosebumps": 32254, + "gop": 18942, + "gop": 6250, + "gopack": 46995, + "gopackgo": 47719, + "gopal": 47268, + "gopdebate": 39806, + "gopher": 47750, + "gopher": 48905, + "gophers": 31957, + "gopro": 17511, + "gor": 1747, + "gor": 29827, + "gordo": 47707, + "gordon": 20485, + "gordon": 8244, + "gore": 30311, + "gore": 17872, + "gorg": 46815, + "gorge": 35548, + "gorge": 20038, + "gorgeous": 3241, + "gori": 12461, + "goria": 43359, + "gorilla": 37910, + "gorilla": 21994, + "gorman": 35741, + "goro": 44977, + "gory": 7160, + "gos": 20517, + "gos": 5693, + "gosh": 15395, + "gosling": 35320, + "gosp": 9617, + "gospel": 11313, + "goss": 39734, + "goss": 36924, + "gossi": 15684, + "gossip": 18963, + "got": 10125, + "got": 1005, + "gota": 36693, + "gotcha": 43275, + "gote": 49345, + "goth": 48465, + "goth": 20437, + "gotham": 46123, + "gotham": 18299, + "gothic": 15426, + "goti": 9497, + "goto": 39715, + "gots": 35215, + "gott": 5089, + "gott": 36466, + "gotta": 4633, + "gotten": 5889, + "gotti": 41881, + "gotv": 36089, + "gou": 10520, + "gou": 36555, + "gouache": 43314, + "goul": 33187, + "gould": 31087, + "gour": 13580, + "gourmet": 19111, + "gov": 4022, + "gov": 4564, + "gove": 36997, + "govegan": 38886, + "gover": 10471, + "gover": 16759, + "govern": 2351, + "govern": 32404, + "governance": 13386, + "governing": 30946, + "government": 3149, + "governmental": 42609, + "governments": 19582, + "governor": 17459, + "governor": 6630, + "governors": 26881, + "govin": 42451, + "govt": 5345, + "govuk": 28830, + "gow": 21885, + "gow": 33788, + "gowan": 31307, + "gower": 43448, + "gown": 13719, + "gowns": 38029, + "goyal": 35105, + "gp": 19329, + "gp": 5051, + "gpa": 24098, + "gps": 13639, + "gpu": 38561, + "gq": 40286, + "gq": 31324, + "gr": 709, + "gr": 6062, + "gra": 782, + "gra": 15276, + "grab": 4646, + "grabbed": 22856, + "grabbing": 26440, + "grabs": 17076, + "grac": 11323, + "grace": 13225, + "grace": 5142, + "graced": 31894, + "graceful": 25242, + "graces": 38629, + "graci": 11174, + "gracias": 16463, + "gracie": 23235, + "gracing": 37263, + "gracious": 29044, + "grad": 19869, + "grad": 7291, + "gradable": 41529, + "grade": 45435, + "grade": 3394, + "graded": 13823, + "grader": 23930, + "graders": 10930, + "grades": 10838, + "gradient": 36885, + "grading": 19016, + "grads": 17811, + "gradu": 3230, + "gradual": 45210, + "gradually": 32192, + "graduate": 6675, + "graduated": 15128, + "graduates": 12236, + "graduating": 14819, + "graduation": 8060, + "grady": 33980, + "graeme": 30192, + "graf": 46478, + "graf": 39765, + "graff": 10656, + "graffiti": 11676, + "graft": 32698, + "grafton": 47347, + "graham": 19805, + "graham": 7711, + "grail": 37184, + "grain": 44003, + "grain": 12109, + "grains": 25791, + "gral": 25631, + "gram": 2949, + "gram": 2338, + "grammar": 16077, + "grammy": 15388, + "grammys": 18121, + "grams": 6294, + "gran": 3892, + "gran": 14493, + "granada": 31172, + "grand": 3058, + "grand": 2991, + "grandad": 29148, + "grandchildren": 36856, + "granddaughter": 29460, + "grande": 37514, + "grande": 10757, + "grandes": 36382, + "grandfather": 15346, + "grandma": 10525, + "grandmother": 17469, + "grandpa": 14582, + "grandparents": 21311, + "grandprix": 39358, + "grandson": 20766, + "grandstand": 43172, + "grange": 45027, + "grange": 23850, + "granger": 42968, + "granite": 18813, + "grann": 45585, + "granny": 22710, + "granola": 34271, + "grant": 18682, + "grant": 5442, + "granted": 14156, + "granth": 41283, + "grants": 15123, + "grape": 19131, + "grape": 15959, + "grapefruit": 28347, + "grapes": 18580, + "grapevine": 47619, + "graph": 1349, + "graph": 4407, + "graphene": 38387, + "grapher": 14987, + "graphers": 32088, + "graphic": 15653, + "graphic": 4245, + "graphical": 20878, + "graphicdesign": 21907, + "graphics": 9492, + "graphies": 40164, + "graphite": 29447, + "graphs": 24670, + "graphy": 4897, + "grapp": 30843, + "gras": 31517, + "gras": 17584, + "grasp": 34975, + "grass": 11584, + "grass": 5922, + "grasses": 46807, + "grasshopper": 48894, + "grassi": 42294, + "grasso": 34808, + "grassroots": 21991, + "grassy": 44140, + "grat": 9221, + "grate": 32463, + "grateful": 45659, + "grateful": 5730, + "grati": 36402, + "gratis": 33638, + "gratitude": 12614, + "grav": 20663, + "grave": 16606, + "grave": 9981, + "gravel": 27054, + "graves": 17665, + "graveyard": 31176, + "gravit": 26150, + "gravitational": 45268, + "gravity": 47426, + "gravity": 15160, + "gravy": 21225, + "gray": 12703, + "gray": 7048, + "grays": 46848, + "grayson": 45831, + "grayson": 25471, + "grazi": 42427, + "grazie": 38698, + "grazing": 29889, + "grc": 44069, + "gre": 689, + "gre": 17878, + "grease": 24132, + "greasy": 44376, + "great": 3265, + "great": 830, + "greate": 31930, + "greater": 32725, + "greater": 7033, + "greatest": 39080, + "greatest": 4153, + "greatly": 13978, + "greatness": 14189, + "greats": 21855, + "greaves": 42350, + "greco": 39103, + "gree": 9987, + "gree": 30774, + "greece": 6965, + "greed": 26147, + "greedy": 33301, + "greek": 23844, + "greek": 6842, + "greeks": 35866, + "green": 2762, + "green": 1901, + "greenberg": 46662, + "greene": 16383, + "greener": 31169, + "greenery": 42493, + "greenfield": 39924, + "greeng": 42077, + "greenhouse": 20819, + "greening": 48673, + "greenland": 27345, + "greenpeace": 44755, + "greens": 10235, + "greensboro": 33436, + "greenville": 25156, + "greenway": 35205, + "greenwich": 18658, + "greenwood": 25782, + "greer": 34345, + "greet": 11042, + "greet": 11997, + "greeted": 24546, + "greeting": 17754, + "greetings": 11569, + "greets": 25464, + "greg": 6894, + "greg": 7943, + "gregation": 20131, + "gregg": 39422, + "gregg": 22929, + "gregor": 33856, + "gregor": 16177, + "gregory": 16253, + "gren": 13941, + "gren": 20119, + "grenade": 33679, + "grenfell": 42107, + "gres": 39670, + "gress": 2752, + "gret": 30041, + "greta": 33443, + "gretchen": 45516, + "grette": 38774, + "grew": 10451, + "grey": 9190, + "grey": 5046, + "greyhound": 27363, + "greyhounds": 45718, + "greys": 44311, + "greysanatomy": 36833, + "gri": 2169, + "gri": 18484, + "grid": 29067, + "grid": 9882, + "gridi": 41063, + "gridiron": 47786, + "grids": 46500, + "grief": 21058, + "grier": 22016, + "griev": 36400, + "grieving": 42383, + "griez": 47962, + "griezmann": 48396, + "griff": 17855, + "griff": 35551, + "griffi": 28676, + "griffin": 46612, + "griffin": 13161, + "griffith": 24375, + "griffiths": 34182, + "gril": 49091, + "grill": 44083, + "grill": 9519, + "grille": 34748, + "grilled": 10691, + "grilling": 28324, + "grills": 39464, + "grim": 20383, + "grim": 23635, + "grime": 37101, + "grimes": 25057, + "grimm": 27865, + "grims": 34861, + "grimsby": 41513, + "grin": 11033, + "grin": 28697, + "grinch": 40527, + "grind": 25730, + "grind": 11810, + "grinder": 31733, + "grinding": 21541, + "gring": 40135, + "grip": 15521, + "gripping": 34567, + "grips": 27819, + "gris": 29150, + "grit": 22037, + "grit": 22087, + "grits": 44307, + "gritty": 33704, + "grizz": 14877, + "grizz": 44088, + "grizzlies": 25594, + "grizzly": 29676, + "grl": 48005, + "gro": 1464, + "gro": 12691, + "grocer": 11633, + "groceries": 32409, + "grocery": 13826, + "grom": 45284, + "gron": 22345, + "groningen": 45639, + "groo": 9015, + "groom": 39883, + "groom": 22813, + "grooming": 25575, + "groot": 37708, + "groove": 39484, + "groove": 17680, + "grooves": 43954, + "groovy": 30143, + "gros": 26834, + "gros": 32639, + "gross": 31080, + "gross": 11541, + "grosven": 46911, + "grote": 47207, + "grotto": 45260, + "grou": 1582, + "groun": 45110, + "ground": 9558, + "ground": 2461, + "groundbreaking": 21006, + "grounded": 27799, + "grounds": 8454, + "groundwater": 39457, + "group": 19045, + "group": 1771, + "groupe": 47654, + "groups": 6776, + "grouse": 36327, + "grove": 31756, + "grove": 7463, + "grover": 31345, + "groves": 27306, + "grow": 3179, + "grow": 4559, + "grower": 44925, + "growers": 25689, + "growing": 28429, + "growing": 4425, + "growingup": 43433, + "growler": 47096, + "grown": 41762, + "grown": 7120, + "grows": 13352, + "growth": 17925, + "growth": 4026, + "growthhacking": 25963, + "grp": 27321, + "grt": 28557, + "gru": 5957, + "grub": 34019, + "grue": 42047, + "gruesome": 47111, + "grum": 45454, + "grump": 49015, + "grumpy": 23610, + "grun": 16203, + "grunge": 33745, + "gry": 16140, + "gry": 5364, + "gs": 25818, + "gs": 1345, + "gsa": 40433, + "gsc": 47751, + "gshore": 43392, + "gsm": 32181, + "gsp": 49173, + "gst": 22239, + "gt": 16151, + "gt": 4725, + "gta": 14826, + "gta": 15338, + "gtaonline": 27292, + "gtav": 27283, + "gti": 39954, + "gto": 39071, + "gtr": 33407, + "gts": 37338, + "gtx": 35230, + "gu": 700, + "gu": 12916, + "gua": 23751, + "guacam": 37477, + "guacamole": 40115, + "guad": 22966, + "guadal": 46097, + "guadalu": 36994, + "guadalupe": 38360, + "guam": 37325, + "guan": 44191, + "guan": 42406, + "guang": 27019, + "guangzhou": 37857, + "guar": 4119, + "guaran": 9242, + "guarantee": 17421, + "guaranteed": 14731, + "guarantees": 40154, + "guard": 30776, + "guard": 4901, + "guarded": 40602, + "guardi": 12008, + "guardia": 43628, + "guardian": 23713, + "guardian": 9498, + "guardians": 21479, + "guarding": 24966, + "guardiola": 32100, + "guards": 12810, + "guatem": 19423, + "guatemala": 21670, + "guay": 48591, + "guay": 24247, + "gubernat": 41400, + "gubernatorial": 41618, + "gucci": 16779, + "gud": 48061, + "gud": 22378, + "gue": 2030, + "gue": 2917, + "gued": 38893, + "guel": 23146, + "guelph": 27660, + "guer": 10391, + "guern": 29277, + "guernsey": 33982, + "guerra": 38215, + "guerrero": 31967, + "guerrilla": 36715, + "gues": 39971, + "gues": 12601, + "guess": 35506, + "guess": 3135, + "guessed": 28005, + "guesses": 30623, + "guessing": 21891, + "guest": 27349, + "guest": 3781, + "guests": 6212, + "guet": 36797, + "guetta": 45904, + "guez": 12313, + "gug": 31358, + "guggen": 35086, + "guggenheim": 37135, + "gui": 2587, + "gui": 25746, + "guid": 11437, + "guidance": 12508, + "guide": 21845, + "guide": 3555, + "guided": 13194, + "guidelines": 16591, + "guides": 14375, + "guiding": 22759, + "guido": 41818, + "guil": 5008, + "guild": 19755, + "guild": 16597, + "guildford": 34450, + "guildhall": 47224, + "guillau": 41123, + "guillaume": 45394, + "guiller": 33660, + "guillermo": 39524, + "guilt": 26354, + "guilty": 9761, + "guin": 13284, + "guin": 47863, + "guine": 13759, + "guinea": 18537, + "guinness": 16648, + "guire": 18209, + "guise": 42024, + "guit": 3759, + "guitar": 21746, + "guitar": 5084, + "guitarist": 13035, + "guitars": 15023, + "guj": 34935, + "gujar": 12698, + "gujarat": 14714, + "guk": 20280, + "gul": 5530, + "gul": 21350, + "gula": 27426, + "gular": 34969, + "gulf": 22101, + "gulf": 11279, + "gull": 48764, + "gull": 28778, + "gulls": 37501, + "gully": 46112, + "gum": 22041, + "gum": 11235, + "gumb": 40147, + "gumbo": 47126, + "gummy": 34276, + "gums": 46609, + "gun": 2748, + "gun": 3496, + "guna": 43333, + "gundam": 26087, + "gundy": 21162, + "gunman": 32743, + "gunmen": 44738, + "gunn": 27473, + "gunna": 24002, + "gunnar": 45301, + "gunner": 35285, + "gunners": 37788, + "guns": 7591, + "gunsense": 44781, + "gunshot": 49250, + "gunsn": 49028, + "gup": 38632, + "gup": 47335, + "gupta": 15905, + "gur": 3218, + "gur": 30224, + "gura": 46836, + "gurgaon": 33240, + "guri": 43888, + "gurl": 25445, + "gurmee": 35482, + "gurmeetramrahim": 36549, + "guru": 18629, + "guru": 10800, + "gurudev": 48647, + "gus": 8018, + "gust": 24629, + "gusta": 23024, + "gusta": 44196, + "gustav": 32062, + "gustav": 37921, + "gustave": 43170, + "gustavo": 45943, + "gusto": 37937, + "gusts": 20896, + "gusty": 27589, + "gut": 24780, + "gut": 13486, + "guter": 44963, + "guterres": 48738, + "guth": 31696, + "guthrie": 33164, + "gutier": 32773, + "gutierrez": 33739, + "guts": 25983, + "gutted": 26524, + "gutter": 40537, + "guwa": 43063, + "guwahati": 45045, + "guy": 10008, + "guy": 2149, + "guyana": 45215, + "guyen": 28031, + "guys": 43588, + "guys": 1791, + "guyz": 48170, + "guzman": 37960, + "gv": 15462, + "gv": 17336, + "gw": 7172, + "gw": 15717, + "gwen": 32165, + "gwen": 24182, + "gwin": 43005, + "gwy": 32226, + "gwyne": 36923, + "gx": 40227, + "gy": 2168, + "gy": 1164, + "gya": 43214, + "gyan": 43814, + "gye": 21728, + "gyllen": 49348, + "gym": 9902, + "gym": 5222, + "gymna": 13517, + "gymnasium": 42847, + "gymnast": 42658, + "gymnastics": 20116, + "gyn": 39603, + "gyne": 45836, + "gyp": 40053, + "gypsy": 22354, + "gypt": 41921, + "gz": 45937, + "gz": 35841, + "gö": 40778, + "gü": 31907, + "h": 71, + "h": 327, + "ha": 560, + "ha": 1429, + "haa": 26814, + "haal": 35869, + "haan": 36284, + "haar": 45247, + "haar": 35859, + "haas": 27443, + "haasan": 26601, + "hab": 20573, + "hab": 20002, + "haban": 46225, + "haber": 44737, + "habit": 8491, + "habit": 17215, + "habitat": 11747, + "habitats": 35344, + "habits": 14540, + "habs": 27489, + "hac": 20343, + "hace": 43623, + "haci": 40674, + "hack": 6610, + "hack": 11182, + "hackathon": 25182, + "hacked": 19575, + "hacker": 22376, + "hackers": 21498, + "hacking": 12939, + "hackney": 48811, + "hackney": 24928, + "hacks": 19965, + "had": 10660, + "had": 1100, + "hadi": 39058, + "hadid": 26415, + "hadith": 46907, + "hadley": 44995, + "hadn": 21480, + "hadoop": 43868, + "hae": 30723, + "hae": 27193, + "hafi": 39914, + "hag": 26855, + "hag": 43207, + "hagan": 47489, + "hagen": 14664, + "hager": 48773, + "hagg": 26324, + "hague": 28988, + "hah": 18108, + "hah": 13680, + "haha": 1913, + "haha": 3060, + "hahah": 27253, + "hahah": 15441, + "hahaha": 4722, + "hahahah": 37513, + "hahahah": 20096, + "hahahaha": 8058, + "hahahaha": 9501, + "hahahahah": 33334, + "hahahahaha": 16347, + "hahahahahaha": 26487, + "hahahahahahaha": 43653, + "hahahahahahahaha": 36126, + "hahahha": 49205, + "hahn": 35596, + "hai": 8734, + "hai": 5234, + "haider": 42200, + "haiku": 19542, + "hail": 15272, + "hail": 8634, + "hailed": 44604, + "hailey": 27703, + "hailing": 47288, + "hails": 32571, + "hailstate": 35063, + "hain": 23861, + "hair": 4658, + "hair": 2225, + "haircare": 43682, + "haircut": 14711, + "hairdresser": 47468, + "haired": 27202, + "hairs": 27951, + "hairstyle": 22324, + "hairstyles": 40627, + "hairy": 26513, + "haiti": 17368, + "haitian": 37577, + "haj": 27885, + "haj": 43191, + "haji": 41889, + "hajj": 35576, + "hak": 25142, + "hak": 40671, + "haka": 44011, + "hake": 41663, + "hal": 1296, + "hal": 8708, + "hala": 25918, + "halal": 34216, + "halam": 29061, + "halamadrid": 31132, + "halder": 32201, + "hale": 37038, + "hale": 14701, + "halen": 39204, + "halep": 49017, + "haley": 37330, + "haley": 16839, + "half": 7453, + "half": 2349, + "halftime": 13742, + "halfway": 16736, + "hali": 9860, + "hali": 43030, + "halibut": 49030, + "halifax": 13411, + "hall": 6850, + "hall": 2140, + "halla": 29569, + "halle": 27763, + "halle": 32239, + "hallelujah": 36993, + "halli": 32665, + "hallmark": 31040, + "hallmark": 32053, + "hallmarkchannel": 36840, + "hallo": 3463, + "halloffame": 48578, + "halloween": 28537, + "halloween": 3739, + "halls": 18052, + "hallucin": 35385, + "hallway": 26845, + "halo": 33331, + "halo": 11918, + "halsey": 34256, + "halt": 25640, + "halter": 47194, + "halton": 45445, + "ham": 1522, + "ham": 1714, + "hama": 17944, + "hamas": 14818, + "hamburg": 18409, + "hamburger": 33928, + "hamid": 32377, + "hamil": 6725, + "hamill": 45784, + "hamill": 48729, + "hamillhimself": 47324, + "hamilton": 22448, + "hamilton": 7684, + "hamlet": 27722, + "hamlin": 49326, + "hamm": 46110, + "hammer": 15331, + "hammer": 9401, + "hammered": 37251, + "hammers": 35649, + "hammersmith": 42127, + "hammock": 33682, + "hammond": 21761, + "hamont": 18518, + "hamp": 6665, + "hamper": 27692, + "hampshire": 16006, + "hampstead": 37340, + "hampton": 36582, + "hampton": 12285, + "hamptons": 42415, + "hamr": 47979, + "hamradio": 36712, + "hams": 25619, + "hamster": 33313, + "hamstring": 39990, + "hamza": 45762, + "han": 1545, + "han": 3565, + "hana": 16801, + "hand": 1722, + "hand": 2463, + "handbag": 22654, + "handbags": 35667, + "handball": 27988, + "handbook": 25147, + "handcrafted": 22185, + "handed": 10881, + "handedly": 48656, + "handel": 40072, + "handful": 23725, + "handheld": 26812, + "handic": 17812, + "handicap": 27063, + "handicapp": 42349, + "handing": 19196, + "handle": 43681, + "handle": 7245, + "handled": 26824, + "handler": 29097, + "handles": 22124, + "handling": 14071, + "handmade": 18054, + "handmade": 6737, + "handmadehour": 25724, + "handover": 46922, + "hands": 3500, + "handshake": 38418, + "handsome": 7438, + "handwriting": 29986, + "handwritten": 35192, + "handy": 13479, + "hane": 28411, + "hang": 3351, + "hang": 5592, + "hangar": 33439, + "hanged": 40807, + "hanger": 28905, + "hangin": 22670, + "hanging": 4850, + "hangout": 17572, + "hangover": 20755, + "hangs": 21785, + "hani": 39944, + "hani": 18374, + "hank": 35993, + "hank": 17655, + "hanks": 29943, + "hanley": 47284, + "hann": 5584, + "hanna": 10075, + "hannah": 18622, + "hannah": 9142, + "hannel": 43477, + "hanni": 19493, + "hannibal": 25149, + "hannity": 24569, + "hannover": 39976, + "hanoi": 36134, + "hanover": 33246, + "hans": 35172, + "hans": 16628, + "hansen": 19729, + "hanson": 24602, + "hant": 40641, + "hanuk": 32774, + "hanukkah": 34247, + "hanuman": 46975, + "hao": 27184, + "hap": 44981, + "hap": 47988, + "happ": 784, + "happen": 21486, + "happen": 4506, + "happened": 4402, + "happening": 4284, + "happeningnow": 43107, + "happenings": 41998, + "happens": 4988, + "happier": 14118, + "happiest": 13811, + "happily": 17316, + "happiness": 5096, + "happy": 2952, + "happy": 900, + "happybirthday": 9651, + "happybirthday": 12207, + "happydays": 25106, + "happye": 33922, + "happyeaster": 38745, + "happyfathersday": 43534, + "happyfriday": 33340, + "happyhalloween": 28750, + "happyholidays": 32186, + "happyhour": 32036, + "happymonday": 47364, + "happymothersday": 42425, + "happynewyear": 18655, + "happythanksgiving": 40593, + "happyvalentinesday": 42403, + "haps": 9114, + "haq": 32445, + "har": 915, + "har": 5888, + "hara": 10367, + "haram": 35732, + "haram": 22950, + "haran": 27921, + "harare": 43562, + "haras": 26644, + "harass": 16481, + "harassed": 43067, + "harassment": 16641, + "harat": 28984, + "harb": 5856, + "harbaugh": 45220, + "harbor": 40686, + "harbor": 10202, + "harbour": 35430, + "harbour": 10011, + "harcourt": 48093, + "hard": 3312, + "hard": 1626, + "hardcover": 31123, + "harden": 27350, + "harder": 12274, + "hardest": 15258, + "hardin": 43802, + "harding": 24382, + "hardly": 17363, + "hardro": 28126, + "hardrock": 48365, + "hardrock": 40739, + "hards": 44048, + "hardship": 45085, + "hardt": 17922, + "hardware": 11957, + "hardwell": 45572, + "hardwick": 46864, + "hardwood": 28167, + "hardwork": 42554, + "hardwork": 27404, + "hardworking": 28095, + "hardworkpaysoff": 49193, + "hardy": 48179, + "hardy": 14113, + "hare": 27903, + "hare": 18464, + "harga": 39738, + "hari": 25472, + "hari": 8981, + "harlan": 49133, + "harle": 29096, + "harlem": 17771, + "harley": 24702, + "harley": 13632, + "harleydavidson": 39183, + "harlow": 34113, + "harm": 16656, + "harm": 14452, + "harman": 42434, + "harmed": 39637, + "harmful": 21725, + "harmless": 44369, + "harmon": 10828, + "harmon": 28729, + "harmony": 10785, + "harms": 46703, + "harne": 43323, + "harness": 23205, + "harold": 16917, + "harp": 27339, + "harper": 31288, + "harper": 12634, + "harri": 6639, + "harrier": 37372, + "harriet": 27154, + "harrington": 34340, + "harris": 25356, + "harris": 6925, + "harrisburg": 40590, + "harrison": 34389, + "harrison": 10540, + "harro": 18939, + "harrogate": 30842, + "harrow": 38807, + "harry": 11094, + "harry": 3600, + "harrypotter": 23375, + "harsh": 30596, + "harsh": 16944, + "hart": 9335, + "hart": 7752, + "hartford": 23434, + "harth": 35619, + "hartle": 47482, + "hartley": 31268, + "hartman": 43294, + "haru": 35099, + "harvard": 28118, + "harvard": 12848, + "harve": 6405, + "harvest": 44495, + "harvest": 8971, + "harvested": 35899, + "harvesting": 26674, + "harvey": 33289, + "harvey": 9586, + "harvick": 46983, + "haryana": 27661, + "has": 13855, + "has": 791, + "hasan": 30049, + "hasbro": 37405, + "hash": 6338, + "hash": 19199, + "hashi": 41831, + "hashmi": 35852, + "hashtag": 34015, + "hashtag": 9238, + "hashtags": 23514, + "haskell": 48550, + "hasn": 9143, + "hass": 9298, + "hassan": 15829, + "hassee": 37117, + "hassel": 32204, + "hassle": 35762, + "hast": 18146, + "hasta": 36623, + "hastings": 22035, + "hat": 3447, + "hat": 3801, + "hatch": 24202, + "hatch": 17809, + "hatchback": 42348, + "hatched": 42158, + "hate": 23546, + "hate": 3753, + "hated": 21298, + "hateful": 36418, + "hater": 36917, + "haters": 14027, + "hates": 14957, + "hatfield": 38448, + "hath": 27894, + "hath": 34416, + "hathaway": 31801, + "hati": 26045, + "hating": 25668, + "hatred": 19046, + "hats": 9812, + "hatt": 8747, + "hatton": 44861, + "hau": 5152, + "hauer": 48751, + "haul": 23743, + "haul": 12332, + "hauled": 46620, + "hauling": 43132, + "haun": 9676, + "haunt": 31039, + "haunted": 14944, + "haunting": 24034, + "haunts": 48035, + "haus": 41755, + "haus": 16478, + "hausen": 33338, + "hauser": 46586, + "haute": 28854, + "hav": 13443, + "hav": 20447, + "havan": 36304, + "havana": 23357, + "havas": 46261, + "have": 18053, + "have": 720, + "haven": 33074, + "haven": 3871, + "havent": 29130, + "haver": 27876, + "haves": 49088, + "havin": 31937, + "having": 1977, + "havoc": 24447, + "haw": 2788, + "haw": 26954, + "hawa": 6067, + "hawa": 46278, + "hawai": 15800, + "hawaii": 32413, + "hawaii": 8265, + "hawaiian": 17734, + "hawan": 27765, + "hawk": 14704, + "hawk": 8218, + "hawke": 38178, + "hawker": 39051, + "hawkeye": 38666, + "hawkeyes": 34266, + "hawking": 33437, + "hawkins": 19740, + "hawks": 44806, + "hawks": 5841, + "hawthorn": 45372, + "hawthorne": 36730, + "hay": 4871, + "hay": 11367, + "haya": 41325, + "hayat": 49360, + "hayden": 19806, + "haydn": 48207, + "haye": 36583, + "hayes": 13555, + "hayley": 39986, + "hayley": 22204, + "haynes": 30496, + "hays": 41524, + "hayward": 29400, + "haz": 5040, + "haz": 39921, + "hazard": 26174, + "hazard": 15178, + "hazardous": 27102, + "hazards": 30639, + "haze": 22785, + "hazel": 19838, + "hazel": 21882, + "hazelnut": 35816, + "hazi": 22740, + "hazmat": 48887, + "hazrat": 45775, + "hazy": 32655, + "hb": 6854, + "hb": 12576, + "hbcu": 40008, + "hbd": 25277, + "hbd": 13594, + "hbo": 15252, + "hc": 15831, + "hc": 7821, + "hcs": 46850, + "hd": 11601, + "hd": 4414, + "hdd": 40508, + "hdmi": 33302, + "hdr": 28065, + "he": 651, + "he": 797, + "hea": 27150, + "hea": 32790, + "head": 1603, + "head": 1375, + "headache": 23849, + "headaches": 38025, + "headband": 28556, + "headed": 6153, + "header": 11077, + "heading": 4409, + "headless": 45219, + "headlights": 42422, + "headline": 10891, + "headliner": 38880, + "headlines": 14706, + "headlining": 26971, + "headphone": 37524, + "headphones": 14906, + "headquarters": 13041, + "heads": 5174, + "headset": 23883, + "headshot": 34890, + "heal": 1231, + "heal": 13833, + "healed": 31456, + "healer": 38328, + "healey": 38985, + "healing": 9295, + "heals": 32384, + "health": 2145, + "health": 1728, + "healthand": 43704, + "healthcare": 42500, + "healthcare": 6023, + "healthier": 18242, + "healthtech": 42694, + "healthy": 10330, + "healthy": 3782, + "healthye": 31532, + "healthyeating": 33761, + "healthyfood": 39996, + "healthylifestyle": 46254, + "healthyliving": 27293, + "healy": 34299, + "heap": 34781, + "heaps": 44446, + "hear": 2749, + "hear": 2584, + "heard": 4063, + "hearing": 46353, + "hearing": 5541, + "hearings": 33175, + "hearn": 36613, + "hears": 25395, + "heart": 4975, + "heart": 1936, + "heartbeat": 29154, + "heartbreak": 29281, + "heartbreaking": 21322, + "heartbroken": 35383, + "hearted": 21679, + "heartfelt": 22904, + "hearth": 31563, + "hearthstone": 34054, + "hearti": 29345, + "hearties": 44572, + "heartland": 31923, + "heartless": 47022, + "heartnews": 40426, + "hearts": 5516, + "heartw": 30002, + "heartwarming": 34080, + "hearty": 26994, + "heat": 12175, + "heat": 4403, + "heated": 17057, + "heater": 23246, + "heath": 12794, + "heath": 11719, + "heather": 20230, + "heather": 12470, + "heathrow": 24171, + "heating": 12478, + "heaton": 34557, + "heats": 36106, + "heatwave": 25726, + "heav": 2409, + "heaven": 15520, + "heaven": 5545, + "heavenly": 19117, + "heavens": 26026, + "heavier": 31253, + "heaviest": 33268, + "heavily": 14123, + "heavy": 12048, + "heavy": 4200, + "heavymetal": 39804, + "heavyweight": 17448, + "heb": 24700, + "heb": 34515, + "hebdo": 41817, + "hebrew": 27298, + "hebrides": 45121, + "hebron": 45725, + "hec": 18932, + "heck": 22985, + "heck": 14427, + "hectares": 44162, + "hectic": 37245, + "hector": 25852, + "hed": 18271, + "hedge": 16229, + "hedge": 20294, + "hedgehog": 21940, + "hedges": 41345, + "hee": 18364, + "hee": 15773, + "heechul": 42487, + "heed": 15118, + "heel": 33646, + "heel": 16861, + "heels": 10909, + "heem": 30061, + "heer": 40473, + "hef": 29473, + "heff": 48756, + "hefty": 48584, + "heg": 41995, + "heh": 25834, + "hehe": 48723, + "hehe": 10658, + "hehehe": 24138, + "hei": 6101, + "hei": 29051, + "heidel": 42927, + "heidelberg": 48445, + "heidi": 44860, + "heidi": 23867, + "heifer": 48219, + "heigh": 43883, + "height": 10788, + "heights": 8418, + "heim": 10931, + "heim": 9768, + "heimer": 39517, + "hein": 15487, + "hein": 43206, + "heine": 28742, + "heineken": 36874, + "heinrich": 47877, + "heinz": 32359, + "heir": 27083, + "heir": 34007, + "heirloom": 34232, + "heirs": 43834, + "heis": 21849, + "heisman": 34537, + "heist": 31035, + "heit": 37255, + "hel": 919, + "hel": 11579, + "hela": 48212, + "held": 4042, + "hele": 46129, + "helen": 17576, + "helen": 11291, + "helena": 23109, + "helene": 41591, + "helens": 45940, + "heli": 33874, + "heli": 40183, + "helicop": 10035, + "helicopter": 11956, + "helicopters": 26922, + "helium": 46505, + "helix": 35247, + "hell": 8410, + "hell": 4141, + "hella": 19800, + "hellboy": 48428, + "helle": 48600, + "helle": 46968, + "hellenic": 42544, + "heller": 44464, + "hello": 12887, + "hello": 3306, + "hells": 47989, + "helly": 48690, + "helm": 47970, + "helm": 19520, + "helmet": 11122, + "helmets": 21843, + "help": 8641, + "help": 1318, + "helped": 4845, + "helper": 29321, + "helpers": 36316, + "helpful": 12695, + "helping": 3875, + "helpless": 47638, + "helpline": 43101, + "helps": 5144, + "helsin": 17842, + "helsinki": 19626, + "hem": 20270, + "hem": 11148, + "hemi": 14256, + "hemi": 46856, + "heming": 30819, + "hemingway": 33470, + "hemisphere": 32767, + "hemmings": 34882, + "hemo": 43788, + "hemp": 28225, + "hemp": 18467, + "hems": 32451, + "hemsworth": 39428, + "hen": 2385, + "hen": 8047, + "hence": 23640, + "hend": 11560, + "hender": 49248, + "henderson": 14348, + "hendrick": 45296, + "hendricks": 37588, + "hendrix": 23605, + "henge": 33104, + "henley": 27853, + "henna": 39455, + "hennessy": 42667, + "henri": 19431, + "henri": 21610, + "henrik": 35772, + "henry": 16018, + "henry": 5508, + "hens": 31742, + "henson": 32935, + "hep": 17724, + "hep": 48791, + "hepat": 23767, + "hepatitis": 32169, + "hepburn": 26348, + "her": 1223, + "her": 899, + "hera": 38724, + "heral": 37809, + "herald": 27625, + "herald": 12851, + "herb": 26116, + "herb": 15302, + "herbal": 21868, + "herbali": 44087, + "herbalife": 48364, + "herbert": 19935, + "herbs": 17320, + "hercules": 26539, + "herd": 36142, + "herd": 18589, + "here": 9134, + "here": 763, + "hered": 47976, + "hereford": 35543, + "heres": 13566, + "hereto": 47673, + "heri": 31392, + "herit": 4720, + "heritag": 38273, + "heritage": 20962, + "heritage": 5455, + "herman": 31890, + "herman": 21568, + "hermann": 40942, + "hermes": 34563, + "hermi": 35265, + "hermione": 45502, + "hermit": 43953, + "hermitage": 47706, + "hermo": 40967, + "hermosa": 42531, + "hern": 30571, + "hern": 43576, + "hernandez": 17707, + "hero": 7338, + "hero": 3756, + "heroes": 38010, + "heroes": 5506, + "heroic": 24255, + "heroin": 23841, + "heroine": 27420, + "heron": 22593, + "heros": 37642, + "herr": 38537, + "herrera": 27755, + "herring": 30211, + "hers": 25359, + "herself": 9207, + "hersh": 20379, + "hershey": 29734, + "hert": 26744, + "hertfordshire": 41070, + "herts": 35784, + "herty": 23454, + "hertz": 49383, + "hes": 30553, + "hes": 12784, + "hesit": 23933, + "hesitate": 34967, + "hess": 41888, + "hester": 31105, + "het": 37527, + "het": 19678, + "hetero": 26405, + "heu": 20105, + "heughan": 32298, + "hew": 48141, + "hew": 43051, + "hewitt": 28871, + "hex": 16255, + "hex": 31241, + "hey": 10759, + "hey": 2189, + "hez": 34591, + "hezbollah": 37636, + "hf": 26606, + "hf": 20603, + "hfx": 47297, + "hg": 23986, + "hg": 26237, + "hgtv": 47657, + "hh": 3280, + "hh": 5180, + "hhh": 8281, + "hhhh": 19391, + "hhhh": 13121, + "hhhhh": 24246, + "hhhhhh": 37278, + "hhs": 27006, + "hi": 677, + "hi": 1883, + "hia": 20672, + "hiatus": 27823, + "hib": 15922, + "hiber": 38799, + "hibis": 36226, + "hibiscus": 36460, + "hibition": 24658, + "hibs": 42814, + "hic": 3549, + "hic": 38079, + "hick": 14813, + "hickman": 49148, + "hickory": 29905, + "hicks": 23429, + "hid": 15552, + "hid": 14451, + "hidalgo": 47464, + "hidden": 28305, + "hidden": 7029, + "hiddleston": 31444, + "hide": 17725, + "hide": 9379, + "hideous": 46588, + "hides": 30800, + "hiding": 11371, + "hie": 15763, + "hier": 23433, + "hier": 29913, + "hierarchy": 44442, + "hifi": 38168, + "hig": 38108, + "higgins": 21783, + "high": 1487, + "high": 1400, + "higher": 5321, + "highered": 27072, + "highest": 5317, + "highland": 32244, + "highland": 16062, + "highlander": 46251, + "highlanders": 40445, + "highlands": 16883, + "highlight": 8264, + "highlighted": 22252, + "highlighter": 45460, + "highlighting": 17344, + "highlights": 6173, + "highly": 5302, + "highness": 38694, + "highs": 15144, + "highschool": 23102, + "highway": 45344, + "highway": 7620, + "highways": 28007, + "higu": 39115, + "hihi": 36240, + "hii": 42315, + "hijab": 31407, + "hika": 41356, + "hikari": 44624, + "hike": 9404, + "hiked": 36471, + "hiker": 40947, + "hikers": 46090, + "hikes": 27076, + "hiking": 9118, + "hiko": 48708, + "hil": 3508, + "hil": 17927, + "hila": 38837, + "hilar": 37337, + "hilari": 7784, + "hilarious": 8358, + "hilariously": 43476, + "hilary": 45898, + "hilary": 25415, + "hilde": 45382, + "hill": 3671, + "hill": 2682, + "hillary": 13257, + "hillary": 7074, + "hillaryclinton": 15357, + "hilli": 32513, + "hills": 24178, + "hills": 5289, + "hillsborough": 32157, + "hillside": 37194, + "hilltop": 45858, + "hilly": 32483, + "hilton": 33621, + "hilton": 14012, + "him": 4128, + "him": 1269, + "himach": 29132, + "himachal": 35461, + "himalay": 17552, + "himalayan": 30318, + "himalayas": 32872, + "hime": 45892, + "himself": 4530, + "himss": 41730, + "hin": 1676, + "hin": 37930, + "hina": 40571, + "hinakhan": 45518, + "hinch": 49320, + "hind": 34460, + "hind": 23293, + "hindi": 14967, + "hinds": 47859, + "hindu": 17587, + "hindu": 12053, + "hinduism": 40592, + "hindus": 25701, + "hindustan": 46553, + "hines": 37462, + "hing": 37968, + "hini": 33564, + "hino": 45343, + "hint": 11868, + "hinton": 47165, + "hints": 20594, + "hio": 32897, + "hip": 11725, + "hip": 6584, + "hipho": 8819, + "hiphop": 26598, + "hiphop": 10914, + "hipp": 13607, + "hippie": 28637, + "hippo": 28398, + "hippo": 36729, + "hips": 30191, + "hipstamatic": 31002, + "hipster": 19987, + "hipsters": 48265, + "hir": 4959, + "hir": 14728, + "hira": 42577, + "hire": 32356, + "hire": 8243, + "hired": 17602, + "hires": 24133, + "hiring": 7835, + "hiro": 17396, + "hiro": 20588, + "hiroshima": 33867, + "hirsch": 46967, + "his": 15211, + "his": 787, + "hism": 23502, + "hispan": 16843, + "hispanic": 22676, + "hist": 21710, + "hist": 13779, + "histo": 33479, + "histor": 2993, + "historia": 46010, + "historian": 20697, + "historians": 35200, + "historic": 30195, + "historic": 5726, + "historical": 34154, + "historical": 8039, + "historically": 30445, + "histories": 34736, + "history": 11142, + "history": 1695, + "historymonth": 19356, + "historyof": 35905, + "hit": 5453, + "hit": 2341, + "hitch": 22937, + "hitch": 36203, + "hitler": 16518, + "hitman": 33290, + "hits": 4712, + "hitter": 23538, + "hitters": 39724, + "hitting": 7957, + "hiv": 44410, + "hiv": 11018, + "hive": 38162, + "hive": 18521, + "hiya": 42393, + "hk": 22648, + "hk": 12307, + "hl": 8297, + "hl": 5956, + "hle": 32389, + "hler": 35418, + "hm": 17913, + "hm": 7631, + "hmm": 13725, + "hmmm": 17032, + "hmmmm": 34598, + "hms": 14625, + "hmu": 21630, + "hmv": 49288, + "hn": 22905, + "hn": 7478, + "hns": 48412, + "ho": 606, + "ho": 2971, + "hoa": 37517, + "hoar": 31628, + "hoax": 33438, + "hob": 18212, + "hobart": 31646, + "hobb": 16175, + "hobbies": 36370, + "hobbit": 23207, + "hobbs": 34343, + "hobby": 41120, + "hobby": 17557, + "hobo": 34613, + "hobo": 41334, + "hoboken": 41568, + "hoc": 35880, + "hoch": 43772, + "hock": 34914, + "hock": 46574, + "hockey": 16499, + "hockey": 4111, + "hoco": 34771, + "hod": 31062, + "hodg": 23660, + "hodge": 40585, + "hodges": 35061, + "hodgson": 37044, + "hoe": 32502, + "hoe": 11262, + "hoek": 40073, + "hoes": 21164, + "hof": 20186, + "hof": 12789, + "hofer": 38654, + "hoff": 32860, + "hoff": 22751, + "hofficial": 41949, + "hoffman": 22026, + "hog": 12075, + "hog": 13255, + "hogan": 19757, + "hogg": 42005, + "hogs": 23242, + "hogwarts": 29168, + "hoh": 43947, + "hoi": 39295, + "hok": 26942, + "hok": 47167, + "hokies": 35168, + "hokkaido": 49145, + "hol": 1187, + "hol": 7349, + "hola": 28724, + "hold": 36496, + "hold": 3254, + "holden": 21869, + "holder": 7862, + "holders": 10074, + "holding": 5050, + "holdings": 24832, + "holds": 7286, + "hole": 47242, + "hole": 5341, + "holes": 11266, + "holi": 2093, + "holi": 21926, + "holic": 16348, + "holics": 29782, + "holiday": 13168, + "holiday": 2878, + "holidays": 5372, + "holiness": 37259, + "holistic": 26300, + "holl": 27699, + "holla": 26500, + "holland": 31608, + "holland": 9978, + "hollande": 47690, + "holler": 49047, + "holli": 24019, + "holliday": 41624, + "hollow": 41221, + "hollow": 16691, + "holloway": 29435, + "holly": 12731, + "holly": 11923, + "hollyo": 41525, + "hollyoaks": 43352, + "hollywood": 24655, + "hollywood": 5518, + "holm": 34758, + "holm": 12739, + "holme": 46149, + "holmes": 12756, + "holo": 10317, + "holocau": 14688, + "holocaust": 16476, + "hols": 33344, + "holt": 18868, + "holtz": 44743, + "holy": 13910, + "holy": 4874, + "hom": 906, + "hom": 47397, + "homa": 9557, + "homage": 17746, + "home": 2143, + "home": 1137, + "homebrew": 35046, + "homec": 33869, + "homecoming": 9008, + "homedecor": 15695, + "homedepot": 38707, + "homegrown": 32554, + "homeitems": 42972, + "homeland": 21633, + "homeless": 18403, + "homeless": 9661, + "homelessness": 19851, + "homemade": 7889, + "homeof": 48856, + "homeowner": 37267, + "homeowners": 29882, + "homepage": 29828, + "homer": 29307, + "homer": 16931, + "homers": 38333, + "homes": 19480, + "homes": 5416, + "homeschool": 40994, + "homestead": 32609, + "homeswee": 46298, + "hometown": 12238, + "homework": 12495, + "homicide": 21520, + "homie": 12540, + "homies": 18893, + "homme": 26193, + "homo": 18129, + "homo": 30504, + "homophobia": 37875, + "homophobic": 40975, + "homosexual": 44288, + "homosexuality": 46720, + "homs": 45413, + "hon": 1279, + "hon": 10296, + "honda": 8553, + "honduras": 29715, + "hone": 38640, + "honest": 7814, + "honest": 9602, + "honestly": 9155, + "honesty": 24939, + "honey": 9843, + "honey": 6406, + "honeycomb": 48583, + "honeymoon": 22527, + "hong": 12144, + "hong": 8598, + "hongkong": 16659, + "honi": 17918, + "honolulu": 28096, + "honor": 9206, + "honor": 3402, + "honorable": 19498, + "honorary": 15675, + "honore": 25868, + "honored": 5494, + "honoree": 38993, + "honorees": 43012, + "honoring": 10771, + "honors": 10248, + "honour": 8240, + "honourable": 29855, + "honoured": 11945, + "honouring": 37754, + "honours": 22558, + "hoo": 2300, + "hoo": 7920, + "hood": 18681, + "hood": 3222, + "hooded": 33631, + "hoodie": 13444, + "hoodies": 25974, + "hoods": 16664, + "hoof": 44555, + "hook": 30488, + "hook": 10395, + "hookah": 34214, + "hooked": 18138, + "hookem": 31465, + "hooker": 37891, + "hooking": 35240, + "hooks": 25068, + "hooligans": 48176, + "hoon": 21368, + "hooo": 44538, + "hoop": 31516, + "hoop": 19573, + "hooper": 35221, + "hoops": 9351, + "hoor": 22155, + "hooray": 24940, + "hoos": 46462, + "hoosier": 48886, + "hoosiers": 42780, + "hoot": 29164, + "hoover": 25691, + "hop": 10848, + "hop": 5833, + "hope": 5263, + "hope": 1683, + "hoped": 30628, + "hopeful": 21453, + "hopefully": 7602, + "hopeless": 35586, + "hopes": 10018, + "hoping": 7207, + "hopkins": 17821, + "hopp": 48839, + "hopped": 34220, + "hopper": 21748, + "hopping": 27606, + "hoppy": 38359, + "hops": 21137, + "hor": 1407, + "hor": 33847, + "hora": 26013, + "horace": 39282, + "horan": 26857, + "horde": 44947, + "hore": 15380, + "horiz": 8144, + "horizon": 17924, + "horizon": 11920, + "horizons": 29685, + "horizontal": 25775, + "hormon": 27096, + "hormone": 31283, + "hormones": 35162, + "horn": 15771, + "horn": 9607, + "horne": 38143, + "horned": 34526, + "hornet": 28739, + "hornets": 20124, + "horns": 22109, + "horny": 32622, + "horo": 21500, + "horoscope": 38453, + "horowitz": 44669, + "horri": 8656, + "horrible": 13726, + "horribly": 45484, + "horrific": 25314, + "horrifying": 38901, + "horror": 13787, + "horror": 5032, + "horrormovies": 46682, + "horrors": 33321, + "horse": 8562, + "horse": 4558, + "horseback": 43673, + "horseman": 48885, + "horsepower": 36882, + "horser": 23096, + "horseracing": 30693, + "horses": 8809, + "horseshoe": 29242, + "horst": 37182, + "hort": 19482, + "horticul": 27141, + "horticulture": 39998, + "horton": 25945, + "hortons": 38422, + "horus": 29794, + "hos": 44320, + "hos": 25008, + "hosa": 44618, + "hose": 19662, + "hoseok": 38817, + "hosp": 2847, + "hosp": 37853, + "hospice": 20533, + "hospit": 7180, + "hospital": 29399, + "hospital": 3851, + "hospitality": 11657, + "hospitalized": 36915, + "hospitals": 13816, + "host": 17403, + "host": 3953, + "hostage": 26119, + "hoste": 31700, + "hosted": 6017, + "hostel": 27225, + "hostess": 39692, + "hostile": 28074, + "hosting": 4857, + "hosts": 8718, + "hot": 2851, + "hot": 2069, + "hota": 43289, + "hotdog": 43758, + "hotel": 14591, + "hotel": 2738, + "hotels": 8654, + "hotline": 30516, + "hotmail": 46427, + "hotness": 39803, + "hotra": 27109, + "hotro": 47823, + "hotspot": 36606, + "hotspur": 35176, + "hotter": 23591, + "hottest": 8279, + "hottie": 22804, + "hotties": 46027, + "hou": 1011, + "hou": 10122, + "hough": 44529, + "houghton": 36133, + "houn": 39273, + "houn": 33607, + "hound": 33996, + "hound": 13561, + "hounds": 21178, + "hounews": 48373, + "hour": 14930, + "hour": 2232, + "hourly": 30918, + "hours": 2382, + "house": 4107, + "house": 1212, + "housed": 37518, + "household": 12412, + "households": 27167, + "housel": 48685, + "housemusic": 28468, + "houseof": 19928, + "houses": 7791, + "housewives": 38523, + "housing": 32924, + "housing": 5734, + "houston": 16564, + "houston": 5663, + "hov": 40291, + "hove": 29674, + "hoven": 35559, + "hover": 36252, + "hover": 49016, + "hovering": 43437, + "how": 7470, + "how": 829, + "howar": 37672, + "howard": 25447, + "howard": 7632, + "howdy": 42216, + "howe": 8179, + "howe": 24614, + "howell": 25297, + "hower": 32920, + "however": 8467, + "howi": 47883, + "howie": 42939, + "howl": 40332, + "howling": 41771, + "howto": 38191, + "howto": 44060, + "hoy": 39625, + "hoy": 13278, + "hoya": 40978, + "hp": 23753, + "hp": 6371, + "hpa": 30983, + "hpc": 39936, + "hpe": 33787, + "hpv": 45765, + "hq": 33571, + "hq": 4693, + "hr": 4810, + "hr": 4086, + "hra": 21320, + "hra": 17212, + "hrc": 18139, + "hrh": 29103, + "hri": 21068, + "hrithik": 45371, + "hrs": 7157, + "hru": 24127, + "hrw": 25064, + "hs": 9343, + "hs": 2466, + "hsbc": 31508, + "hsc": 43510, + "hse": 34057, + "hsfb": 29539, + "hsv": 47311, + "ht": 11123, + "ht": 7801, + "hta": 23452, + "hta": 49384, + "htafc": 42821, + "htc": 48942, + "htc": 17635, + "html": 18231, + "hts": 43710, + "htt": 10620, + "http": 15066, + "https": 30901, + "httr": 49372, + "httweets": 43198, + "hu": 845, + "hu": 5949, + "hua": 22138, + "huan": 41405, + "huang": 32013, + "huar": 46916, + "huawe": 17709, + "huawei": 21128, + "hub": 18775, + "hub": 7028, + "hubb": 23183, + "hubbard": 33288, + "hubble": 30421, + "hubby": 16947, + "hubert": 40699, + "hubs": 29327, + "huck": 22909, + "huckabee": 43666, + "hud": 7169, + "hud": 28563, + "hudder": 22629, + "huddersfield": 24220, + "huddle": 33435, + "hudson": 25873, + "hudson": 11260, + "hue": 48380, + "hue": 21465, + "hues": 38003, + "huey": 39663, + "huff": 18746, + "huff": 44999, + "huffpost": 45887, + "hug": 40790, + "hug": 10359, + "huge": 2699, + "hugely": 24648, + "hugged": 41333, + "hugging": 27058, + "hugh": 8723, + "hugh": 15385, + "hughes": 11418, + "hugo": 43935, + "hugo": 17132, + "hugs": 14248, + "huh": 13348, + "huhu": 32134, + "hui": 29978, + "hul": 7911, + "hula": 40145, + "hulk": 17637, + "hull": 25154, + "hull": 10375, + "hulu": 24666, + "hum": 5823, + "hum": 16283, + "human": 3175, + "human": 2751, + "humane": 20220, + "humanitarian": 14170, + "humanities": 24949, + "humanity": 9420, + "humanright": 44385, + "humanrights": 14148, + "humans": 8324, + "humb": 9988, + "humber": 30602, + "humber": 38063, + "humble": 38703, + "humble": 10889, + "humbled": 19682, + "humbling": 39757, + "humbold": 24739, + "humboldt": 31389, + "hume": 38197, + "humid": 14778, + "humid": 27447, + "humidi": 47666, + "humidity": 15469, + "humil": 27205, + "humili": 25332, + "humility": 28535, + "humming": 26515, + "hummingbird": 33072, + "hummus": 31785, + "humor": 29369, + "humor": 11186, + "humorous": 38173, + "humour": 19161, + "hump": 16673, + "hump": 24529, + "humpback": 47662, + "humpday": 27693, + "humph": 19767, + "humphrey": 31549, + "hun": 1616, + "hun": 10795, + "hundre": 8505, + "hundred": 11898, + "hundreds": 8879, + "hung": 13825, + "hungar": 19420, + "hungarian": 23325, + "hungary": 17232, + "hunger": 25565, + "hunger": 10184, + "hungergames": 47507, + "hungover": 41110, + "hungry": 44845, + "hungry": 8451, + "hunk": 33912, + "hunt": 16498, + "hunt": 5774, + "hunted": 37373, + "hunter": 16531, + "hunter": 6099, + "hunters": 16115, + "hunting": 27830, + "hunting": 7507, + "huntington": 23521, + "hunts": 34041, + "huntsville": 34544, + "hur": 2305, + "hur": 34523, + "hurd": 44915, + "hurdle": 27486, + "hurdles": 25440, + "huri": 42486, + "hurley": 30166, + "hurling": 24738, + "huron": 36147, + "hurrah": 40599, + "hurric": 6543, + "hurrican": 36105, + "hurricane": 24051, + "hurricane": 8782, + "hurricanes": 22357, + "hurry": 10921, + "hurst": 44742, + "hurst": 11760, + "hurt": 7413, + "hurting": 24017, + "hurts": 13059, + "hus": 5111, + "hus": 35853, + "husband": 6179, + "husbands": 33612, + "hush": 28728, + "husk": 19246, + "huskers": 26946, + "huskies": 20988, + "husky": 20421, + "huss": 13733, + "hussain": 17940, + "hussein": 31336, + "hust": 27279, + "hustle": 15709, + "huston": 46480, + "hut": 20924, + "hut": 16503, + "hutch": 31018, + "hutch": 33203, + "hutchinson": 35721, + "hutto": 27662, + "hutton": 38321, + "hv": 17209, + "hv": 18593, + "hvac": 27492, + "hw": 27491, + "hw": 18876, + "hwa": 32352, + "hwan": 44390, + "hwang": 46775, + "hwy": 13812, + "hy": 1441, + "hy": 17827, + "hya": 31600, + "hyacin": 47263, + "hyatt": 44856, + "hyatt": 25146, + "hybri": 9084, + "hybrid": 10156, + "hyd": 42382, + "hyde": 46484, + "hyde": 16343, + "hyder": 13960, + "hyderabad": 14801, + "hydr": 8031, + "hydra": 44414, + "hydra": 40420, + "hydrange": 43298, + "hydrate": 29628, + "hydrated": 23300, + "hydrating": 47653, + "hydration": 24174, + "hydrau": 26017, + "hydraulic": 26189, + "hydro": 8368, + "hydro": 22595, + "hydrogen": 20974, + "hye": 32724, + "hye": 25792, + "hygi": 16277, + "hygiene": 19591, + "hymn": 41350, + "hyo": 38960, + "hyo": 35078, + "hyp": 16964, + "hype": 30353, + "hype": 11111, + "hyped": 22507, + "hyper": 7997, + "hyper": 22146, + "hypertension": 40698, + "hypno": 23355, + "hypnosis": 48138, + "hypnoti": 40440, + "hypo": 10252, + "hypocr": 30711, + "hypocri": 25606, + "hypocrisy": 26296, + "hypocrite": 44125, + "hypothe": 46966, + "hypothesis": 44956, + "hyster": 24235, + "hysteria": 45965, + "hysterical": 48627, + "hyuk": 20452, + "hyun": 11831, + "hyun": 8589, + "hyundai": 17094, + "hyung": 46901, + "hyung": 16551, + "hz": 32533, + "i": 72, + "i": 328, + "ia": 12486, + "ia": 1073, + "iac": 32838, + "iac": 44063, + "iaf": 40789, + "iah": 35052, + "iain": 30103, + "ial": 11530, + "ial": 1974, + "ials": 20940, + "iam": 3579, + "iam": 11415, + "iambic": 43668, + "iambicpent": 43891, + "iamsrk": 15103, + "ian": 7723, + "ian": 1800, + "ians": 6451, + "iansomerhalder": 47077, + "iart": 18413, + "iartg": 18669, + "ias": 32303, + "ias": 14620, + "ib": 3962, + "ib": 13554, + "iba": 39763, + "ibadan": 44691, + "iban": 47145, + "ibc": 49014, + "ibd": 40732, + "iber": 23814, + "ibi": 12337, + "ibis": 47048, + "ibiza": 13853, + "ible": 37792, + "ibles": 44102, + "ibm": 23415, + "ibm": 13918, + "ibn": 25729, + "ibooks": 46887, + "ibra": 15476, + "ibrahi": 40350, + "ibrahim": 20816, + "ibrox": 46883, + "ibs": 41993, + "ibu": 43587, + "ibu": 46117, + "ic": 535, + "ic": 1029, + "ica": 2576, + "icago": 37492, + "ical": 6082, + "ical": 1110, + "ically": 3161, + "icals": 13999, + "ican": 17653, + "ican": 5246, + "icans": 20511, + "icar": 37211, + "ication": 21629, + "icc": 12945, + "ice": 2739, + "ice": 733, + "iceberg": 33662, + "icec": 13636, + "icecream": 21334, + "iced": 8049, + "icelan": 34114, + "iceland": 46716, + "iceland": 11935, + "icelandic": 34705, + "ices": 1931, + "ich": 5333, + "ich": 1232, + "icha": 31453, + "iche": 28972, + "iche": 21143, + "ichi": 21669, + "ichi": 14647, + "ichick": 45022, + "ichiro": 43787, + "ici": 948, + "ici": 22189, + "icia": 11774, + "icial": 17543, + "icial": 6397, + "ician": 40522, + "ician": 5374, + "icians": 6264, + "iciary": 21329, + "icic": 46006, + "icide": 6558, + "icides": 28253, + "icing": 7676, + "icio": 24207, + "icion": 45905, + "icious": 3325, + "icist": 21165, + "icists": 42171, + "icity": 7243, + "ick": 1168, + "ick": 1068, + "icked": 39799, + "icker": 40357, + "ickers": 30701, + "icki": 35468, + "icking": 6619, + "icks": 3727, + "icky": 11587, + "icn": 44516, + "ico": 13697, + "ico": 3040, + "icom": 17693, + "icom": 29796, + "icon": 13843, + "icon": 5646, + "iconic": 6959, + "icons": 15553, + "icop": 9389, + "icos": 32002, + "ics": 1324, + "ict": 6349, + "icted": 36515, + "iction": 40560, + "icton": 36548, + "icu": 45118, + "icu": 30443, + "icular": 40660, + "icus": 31459, + "icy": 28780, + "icy": 3495, + "icymi": 5315, + "icz": 46387, + "id": 1568, + "id": 1014, + "ida": 11032, + "ida": 11600, + "idad": 22462, + "idaho": 48817, + "idaho": 15165, + "idal": 39684, + "idan": 17929, + "idc": 22386, + "ide": 1909, + "ide": 14104, + "idea": 3612, + "ideal": 8789, + "ideally": 48247, + "ideals": 45096, + "ideas": 4452, + "ident": 7113, + "identi": 6009, + "identical": 25587, + "identification": 23337, + "identified": 15217, + "identifies": 35712, + "identify": 10949, + "identifying": 23589, + "identities": 34292, + "identity": 8892, + "ideology": 25840, + "iders": 8980, + "ides": 31791, + "idf": 28987, + "idge": 35567, + "idh": 44325, + "idi": 9611, + "idi": 14264, + "idio": 15994, + "idiot": 14087, + "idiots": 20856, + "idk": 8972, + "idle": 34754, + "idlib": 36199, + "ido": 6763, + "ido": 29641, + "idol": 24866, + "idol": 8884, + "idols": 21398, + "idr": 10106, + "idri": 46435, + "idris": 41312, + "ids": 6111, + "idu": 28655, + "idy": 33058, + "idyl": 44879, + "idyllic": 46632, + "ie": 6789, + "ie": 1718, + "iec": 44773, + "ied": 10059, + "ieee": 39860, + "iel": 27875, + "iel": 22729, + "ience": 1542, + "ient": 13115, + "ier": 33173, + "ier": 5912, + "iers": 45060, + "ies": 27912, + "ies": 963, + "iest": 10818, + "if": 8063, + "if": 878, + "ifa": 37574, + "ifc": 36524, + "ife": 41172, + "ife": 19590, + "iff": 35753, + "ification": 35755, + "ified": 41403, + "ift": 31143, + "iftar": 35153, + "ifu": 41523, + "ify": 32807, + "ig": 1089, + "ig": 3072, + "iga": 16493, + "igan": 27468, + "igans": 25419, + "igbo": 44591, + "ige": 10806, + "igen": 33070, + "iger": 30758, + "iger": 20685, + "igers": 40755, + "igers": 48928, + "iggy": 46219, + "iggy": 27604, + "igh": 2712, + "igh": 5451, + "ight": 14571, + "ight": 897, + "ighton": 35292, + "igi": 21901, + "igle": 29912, + "iglesias": 39432, + "ign": 7303, + "ign": 2326, + "ignati": 37573, + "ignatius": 48318, + "igne": 45843, + "ignite": 25210, + "ignition": 36115, + "igno": 15375, + "ignor": 7653, + "ignorance": 22735, + "ignorant": 26933, + "ignore": 12304, + "ignored": 20428, + "ignores": 40129, + "ignoring": 23969, + "igor": 33024, + "igs": 31344, + "igu": 21279, + "ih": 12162, + "ih": 34135, + "ihear": 13043, + "iheart": 30332, + "iheartawards": 18811, + "iheartradio": 25934, + "ihop": 45511, + "ihri": 39108, + "ihrithik": 39326, + "ii": 5103, + "ii": 2329, + "iii": 46236, + "iii": 6572, + "iiii": 20133, + "iiii": 45393, + "iiot": 30704, + "iit": 39330, + "iit": 33238, + "ij": 7337, + "ija": 42802, + "ik": 3903, + "ik": 10177, + "ika": 18188, + "ike": 12329, + "ike": 19696, + "ikea": 20528, + "iker": 38653, + "ikh": 44655, + "ikh": 12758, + "iklan": 32028, + "iklan": 29584, + "iko": 35659, + "iko": 39272, + "ikon": 38543, + "ikon": 19156, + "iku": 17780, + "il": 543, + "il": 958, + "ila": 4344, + "ilah": 32211, + "ilan": 13889, + "ilan": 28076, + "iland": 20957, + "ilation": 16180, + "ilay": 45093, + "ild": 22278, + "ild": 17164, + "ile": 18398, + "ile": 989, + "iled": 3358, + "iler": 22446, + "iler": 3615, + "ilers": 8975, + "iles": 42274, + "ili": 2076, + "ili": 19601, + "ilia": 14855, + "ilian": 10272, + "iliary": 32585, + "ilife": 42835, + "ilike": 44989, + "ilinan": 48497, + "iling": 3299, + "ilio": 47256, + "ilion": 12561, + "ilis": 43442, + "ilit": 11178, + "ilities": 5446, + "ility": 1787, + "ilive": 26478, + "ill": 828, + "ill": 660, + "illa": 8877, + "illa": 3043, + "illac": 17218, + "illage": 48922, + "illard": 21920, + "illary": 33667, + "illas": 23404, + "ille": 18213, + "ille": 5559, + "illed": 2527, + "illeg": 35808, + "illegal": 7983, + "illegally": 24466, + "illegals": 40490, + "iller": 23341, + "iller": 2956, + "illers": 30547, + "illery": 14514, + "illes": 20037, + "illi": 1086, + "illi": 25187, + "illia": 48776, + "illiams": 30301, + "illian": 48775, + "illian": 17355, + "illic": 37152, + "illicit": 40998, + "illie": 26083, + "illin": 35868, + "illing": 2803, + "illini": 28957, + "illino": 8920, + "illinois": 9414, + "illion": 35542, + "illion": 2035, + "illness": 11145, + "illnesses": 33861, + "illo": 34153, + "illo": 7588, + "illon": 20516, + "ills": 1900, + "illu": 3025, + "illumin": 11446, + "illuminate": 43261, + "illuminated": 28814, + "illuminati": 34551, + "illuminating": 46601, + "illumination": 43680, + "illus": 41386, + "illusion": 20318, + "illusions": 47429, + "illustr": 6268, + "illustrate": 37468, + "illustrated": 13151, + "illustrates": 38129, + "illustrating": 43322, + "illustration": 6052, + "illustrations": 17852, + "illustrator": 16649, + "illustri": 43116, + "illustrious": 44304, + "illy": 11707, + "illy": 9532, + "ilm": 36326, + "ilo": 4220, + "ilo": 14835, + "ilove": 7183, + "ilove": 32914, + "iloveart": 41114, + "ilovemy": 28863, + "iloveyou": 28829, + "ils": 1543, + "ilt": 25334, + "ilton": 28494, + "ilu": 27337, + "ilwx": 43777, + "ily": 4881, + "ily": 1026, + "ilya": 33377, + "ilysm": 29228, + "im": 732, + "im": 1496, + "ima": 2414, + "ima": 6432, + "imac": 40675, + "imacele": 47281, + "imag": 2316, + "image": 24101, + "image": 2867, + "imagery": 22828, + "images": 4952, + "imagin": 18178, + "imaginary": 30417, + "imagination": 13783, + "imaginative": 47233, + "imagine": 35752, + "imagine": 4826, + "imagined": 18478, + "imagines": 47379, + "imaging": 14231, + "imagining": 27384, + "imam": 37552, + "imam": 19024, + "iman": 45684, + "iman": 16247, + "imation": 44566, + "imax": 32066, + "imc": 45616, + "imdanielpadilla": 36357, + "imdb": 30407, + "ime": 44937, + "ime": 31151, + "imel": 31594, + "iment": 37157, + "imer": 21802, + "imes": 47744, + "imf": 28403, + "img": 24157, + "imi": 23559, + "imin": 23942, + "imit": 23462, + "imitation": 41630, + "imma": 19487, + "immac": 25085, + "immaculate": 29649, + "immature": 45531, + "immedi": 7366, + "immediate": 14440, + "immediately": 10108, + "immen": 17278, + "immense": 22722, + "immensely": 35013, + "immer": 13954, + "immerse": 46240, + "immersion": 31861, + "immersive": 27521, + "immigr": 5851, + "immigrant": 16474, + "immigrants": 14460, + "immigration": 9588, + "imminent": 27299, + "immort": 39244, + "immortal": 24717, + "immun": 8961, + "immune": 15606, + "immuni": 44571, + "immunity": 26254, + "immuno": 24361, + "immunology": 44483, + "immunotherapy": 39185, + "imo": 26349, + "imo": 13738, + "imp": 3335, + "imp": 31037, + "impac": 7573, + "impact": 33036, + "impact": 3844, + "impacted": 21424, + "impactful": 41631, + "impacting": 29359, + "impacts": 15069, + "impair": 36451, + "impaired": 28028, + "impairment": 44501, + "impala": 36641, + "impe": 23612, + "impeach": 16874, + "impeach": 43497, + "impeachment": 32979, + "impeachtrump": 38006, + "impecc": 34511, + "impeccable": 40111, + "impending": 34486, + "imper": 7727, + "imperative": 39833, + "imperfect": 46034, + "imperi": 30911, + "imperial": 32425, + "imperial": 12361, + "imperialism": 48855, + "imperson": 25551, + "implant": 33106, + "implants": 32202, + "imple": 7423, + "implement": 17966, + "implementation": 15102, + "implemented": 24315, + "implementing": 22862, + "implic": 15269, + "implications": 19229, + "implo": 40337, + "impo": 45704, + "import": 2336, + "import": 16294, + "importance": 6821, + "important": 2829, + "importantly": 21580, + "imported": 28798, + "imports": 25286, + "impose": 35879, + "imposed": 25871, + "imposing": 42289, + "impossible": 9815, + "impre": 3763, + "impress": 20015, + "impressed": 9689, + "impression": 14468, + "impressionism": 36114, + "impressionist": 44904, + "impressions": 22276, + "impressive": 6634, + "imprint": 43863, + "imprison": 22141, + "imprisoned": 32999, + "imprisonment": 39024, + "impro": 2531, + "impromp": 28100, + "impromptu": 28611, + "improv": 22868, + "improve": 4971, + "improved": 9446, + "improvement": 10790, + "improvements": 16320, + "improves": 18035, + "improving": 10381, + "improvis": 32343, + "improvised": 40886, + "impulse": 29683, + "impy": 42690, + "imran": 19647, + "imran": 19212, + "imrankhan": 25956, + "imrankhanpti": 26688, + "ims": 17800, + "imsa": 37262, + "imv": 35731, + "imvkohli": 37136, + "imwith": 26822, + "imwithher": 32651, + "in": 512, + "in": 530, + "ina": 18026, + "ina": 1366, + "inability": 47517, + "inaccurate": 49192, + "inaction": 41916, + "inactive": 49274, + "inadequate": 43403, + "inak": 46549, + "inal": 19178, + "inals": 26438, + "inan": 26204, + "inappropriate": 26722, + "inari": 48620, + "inary": 11337, + "inas": 36731, + "inas": 12362, + "inated": 38530, + "ination": 4706, + "inau": 10832, + "inaugu": 11309, + "inaugur": 11448, + "inaugural": 11340, + "inaugurated": 29011, + "inauguration": 16805, + "inbound": 24420, + "inbox": 18683, + "inc": 14570, + "inc": 4438, + "incan": 45964, + "incar": 18070, + "incarcer": 26334, + "incarcerated": 49178, + "incarceration": 39887, + "incase": 30463, + "ince": 44303, + "incen": 13259, + "incense": 35059, + "incentive": 29024, + "incentives": 29813, + "inception": 36653, + "inch": 6523, + "incheon": 30645, + "inches": 10809, + "inci": 5747, + "incidence": 43371, + "incident": 10103, + "incidents": 22120, + "incindia": 26161, + "inciner": 46434, + "incl": 27857, + "incl": 13338, + "inclined": 45470, + "inclu": 1738, + "include": 5942, + "included": 7414, + "includes": 6197, + "including": 2814, + "inclusion": 12079, + "inclusive": 13393, + "income": 8044, + "incoming": 15416, + "incomparable": 36027, + "incompetent": 45069, + "incomplete": 34040, + "incon": 42372, + "inconvenience": 40563, + "incorpor": 19335, + "incorporate": 34168, + "incorporated": 29494, + "incorporating": 40303, + "incorrect": 31872, + "incre": 1870, + "increase": 5230, + "increased": 9156, + "increases": 13797, + "increasing": 10270, + "increasingly": 16106, + "incredi": 2883, + "incredible": 22128, + "incredible": 3457, + "incredibleindia": 24680, + "incredibles": 48641, + "incredibly": 9513, + "incu": 38830, + "incub": 24587, + "incubator": 35736, + "incumb": 32246, + "incumbent": 38038, + "incur": 42356, + "ind": 5386, + "ind": 4655, + "inda": 15710, + "inde": 2645, + "indeed": 10031, + "indefin": 29501, + "indefinitely": 43750, + "independ": 4147, + "independence": 23117, + "independence": 7955, + "independenceday": 25971, + "independent": 33844, + "independent": 7088, + "independently": 39831, + "inder": 29225, + "index": 35209, + "index": 9458, + "indhoven": 44229, + "indi": 1098, + "indi": 46536, + "india": 27067, + "india": 1762, + "indian": 7685, + "indian": 3606, + "indiana": 8615, + "indianapolis": 17196, + "indianfootball": 45979, + "indians": 10271, + "indic": 7136, + "indicate": 26679, + "indicated": 39416, + "indicates": 29412, + "indication": 38539, + "indicator": 24776, + "indicators": 30054, + "indicted": 34992, + "indictment": 42278, + "indie": 5260, + "indie": 9383, + "indiedev": 10863, + "indiefilm": 22588, + "indiegame": 17969, + "indiegamedev": 40466, + "indiegames": 35864, + "indiegogo": 38057, + "indies": 23618, + "indiffe": 41372, + "indigen": 8348, + "indigenous": 9303, + "indigo": 21002, + "indira": 43887, + "indirec": 26398, + "indirect": 35416, + "indivi": 5649, + "individu": 9574, + "individual": 8512, + "individually": 33782, + "individuals": 11990, + "indo": 26303, + "indo": 18297, + "indom": 42926, + "indone": 6180, + "indonesia": 7229, + "indonesian": 19593, + "indoor": 44478, + "indoor": 9546, + "indoors": 22973, + "indore": 46143, + "indu": 2298, + "induc": 7973, + "induced": 24103, + "inducted": 20596, + "inductee": 39558, + "inductees": 44796, + "induction": 18338, + "indul": 19402, + "indulg": 28388, + "indulge": 24851, + "indulgence": 40856, + "indulgent": 49147, + "industri": 5082, + "industrial": 30853, + "industrial": 7520, + "industries": 11700, + "industry": 47407, + "industry": 3318, + "indv": 16942, + "indy": 9821, + "indy": 10098, + "indycar": 20484, + "indyref": 22569, + "ine": 855, + "ine": 715, + "ineau": 38122, + "inec": 45214, + "ined": 2038, + "inee": 43252, + "inee": 7986, + "inees": 13056, + "ineffe": 47202, + "inely": 18234, + "inem": 48876, + "inema": 29232, + "inen": 44365, + "inequalities": 45507, + "inequality": 17372, + "iner": 17438, + "iner": 5155, + "iners": 41863, + "ines": 2137, + "inese": 35966, + "iness": 1463, + "inet": 8121, + "inette": 38911, + "inev": 19527, + "inevit": 45871, + "inevitable": 25004, + "inews": 24300, + "inexpensive": 38614, + "iney": 30254, + "inez": 12700, + "inf": 1529, + "inf": 35241, + "infamous": 18688, + "infan": 17219, + "infant": 19192, + "infantry": 21655, + "infants": 34726, + "infe": 7164, + "infec": 26088, + "infected": 26136, + "infection": 14774, + "infections": 22227, + "infectious": 29157, + "infeld": 25035, + "infer": 16258, + "inferno": 31290, + "infertility": 40701, + "infield": 48933, + "infiltr": 28683, + "infin": 6246, + "infinite": 12748, + "infiniti": 34644, + "infinity": 34863, + "infinity": 12895, + "infl": 7627, + "inflam": 16080, + "inflammation": 24893, + "inflammatory": 26831, + "inflatable": 30135, + "inflation": 17497, + "inflicted": 48188, + "influ": 4835, + "influen": 13229, + "influence": 9199, + "influenced": 21183, + "influencer": 25013, + "influencers": 29891, + "influences": 24926, + "influencing": 45126, + "influential": 17553, + "influenza": 39897, + "info": 5680, + "info": 2222, + "infographic": 10076, + "infographics": 33172, + "infor": 31773, + "inform": 10241, + "inform": 19449, + "informal": 25705, + "informat": 29625, + "informatics": 35685, + "information": 3204, + "informative": 19364, + "informed": 13876, + "informing": 45388, + "informs": 48440, + "infosec": 17863, + "infr": 29718, + "infra": 7312, + "infra": 45877, + "infrared": 22867, + "infrastructure": 9034, + "infringe": 44882, + "infringement": 48712, + "infront": 37668, + "infu": 15048, + "infuri": 48461, + "infused": 21461, + "infusion": 43464, + "ing": 653, + "ing": 519, + "inga": 15233, + "ingco": 40444, + "ingday": 16561, + "ingdon": 38731, + "inge": 11790, + "inge": 7071, + "inged": 30046, + "ingen": 19088, + "ingeni": 36884, + "inger": 33883, + "inger": 3541, + "ingfor": 33430, + "ingh": 9170, + "ingh": 30495, + "ingham": 24497, + "ingham": 4291, + "inghamshire": 39289, + "inghour": 42728, + "inging": 4066, + "ingl": 45662, + "ingle": 22228, + "ingle": 17005, + "ingles": 24490, + "ingley": 44428, + "inglis": 46327, + "ingly": 4796, + "ingnow": 34766, + "ingo": 30175, + "ingo": 9012, + "ingra": 45165, + "ingrad": 44124, + "ingram": 26998, + "ingredi": 9272, + "ingredient": 19799, + "ingredients": 11788, + "ingrid": 33496, + "ings": 895, + "ingthe": 20170, + "ingtips": 39373, + "ington": 11846, + "ington": 2156, + "ingu": 8714, + "ingual": 22795, + "ingue": 36838, + "ingui": 12788, + "inguish": 36146, + "inha": 32612, + "inhabit": 36189, + "inhabitants": 44968, + "inhal": 30786, + "inhe": 32617, + "inher": 24611, + "inherent": 47327, + "inherit": 34322, + "inheritance": 39341, + "inherited": 39111, + "inhi": 25557, + "inhibit": 32196, + "inho": 12984, + "ini": 6154, + "ini": 3581, + "inian": 36638, + "inim": 38717, + "inindia": 34021, + "ining": 1389, + "inist": 30976, + "init": 42670, + "initi": 4580, + "initial": 13980, + "initially": 28123, + "initials": 48794, + "initiated": 27756, + "initiation": 41009, + "initiative": 8152, + "initiatives": 16549, + "inity": 22126, + "inj": 5112, + "injec": 13688, + "injection": 21438, + "inju": 5006, + "injured": 7505, + "injuries": 9481, + "injury": 6223, + "injustice": 20541, + "ink": 4547, + "ink": 967, + "inka": 40685, + "inked": 29356, + "inki": 46176, + "inkigayo": 47882, + "inking": 37586, + "inks": 20966, + "inktober": 9387, + "inland": 21943, + "inlet": 35161, + "inline": 45004, + "inlove": 28415, + "inmate": 32341, + "inmates": 28216, + "inmy": 42657, + "inn": 27260, + "inn": 5569, + "inna": 35088, + "inner": 24512, + "inner": 6955, + "inning": 4415, + "innings": 11580, + "innis": 44059, + "inno": 7961, + "innocence": 26383, + "innocent": 11241, + "innov": 2890, + "innovate": 24549, + "innovation": 33063, + "innovation": 4272, + "innovations": 18817, + "innovative": 8494, + "innovator": 34735, + "innovators": 27834, + "ino": 4211, + "ino": 2691, + "inoa": 25649, + "inos": 21828, + "inous": 47801, + "inox": 22698, + "input": 16952, + "inputs": 48763, + "inqu": 10628, + "inqui": 18527, + "inquirer": 45172, + "inquiries": 29469, + "inquiry": 15865, + "inquis": 31171, + "inr": 36325, + "ins": 12786, + "ins": 1041, + "insan": 7875, + "insane": 10260, + "insanely": 27846, + "insanity": 26645, + "inscribed": 49168, + "inscription": 41127, + "insec": 15744, + "insect": 21297, + "insects": 18714, + "insecure": 35112, + "insecurity": 36964, + "inser": 13830, + "insert": 18807, + "insi": 3453, + "inside": 19141, + "inside": 2912, + "insider": 13300, + "insiders": 32171, + "insig": 40503, + "insight": 8795, + "insightful": 20354, + "insights": 8729, + "insignia": 48864, + "insist": 35504, + "insisted": 40423, + "insists": 27255, + "inski": 32630, + "insky": 24607, + "insol": 42366, + "insom": 21755, + "insomni": 42040, + "insomnia": 30598, + "inson": 21007, + "insp": 1597, + "inspec": 7915, + "inspect": 40815, + "inspecting": 40565, + "inspection": 15142, + "inspections": 39513, + "inspector": 20514, + "inspir": 2573, + "inspiration": 4195, + "inspirational": 41936, + "inspirational": 9855, + "inspirations": 35093, + "inspire": 27901, + "inspire": 8583, + "inspired": 39849, + "inspired": 3516, + "inspires": 17245, + "inspiring": 41847, + "inspiring": 5705, + "inspo": 26897, + "inst": 1264, + "inst": 1581, + "insta": 22411, + "insta": 11694, + "instability": 41377, + "instac": 46678, + "instaf": 33800, + "instag": 14612, + "instagood": 23718, + "instagram": 27910, + "instagram": 2659, + "instal": 38805, + "install": 6940, + "install": 11168, + "installation": 9358, + "installations": 27909, + "installed": 8807, + "installing": 18301, + "installment": 25315, + "installs": 45568, + "instalment": 47766, + "instance": 34572, + "instant": 38810, + "instant": 10635, + "instantly": 17703, + "instap": 23758, + "instapic": 34378, + "instaweather": 43078, + "instaweatherpro": 43150, + "inste": 3571, + "instead": 4191, + "instein": 13421, + "instem": 27030, + "instin": 23382, + "instinct": 30544, + "institu": 4257, + "institute": 5861, + "institutes": 43674, + "institution": 18823, + "institutional": 27442, + "institutions": 15207, + "instore": 41679, + "instru": 4544, + "instruc": 19648, + "instruction": 19407, + "instructional": 31022, + "instructions": 17040, + "instructor": 16087, + "instructors": 31998, + "instrument": 42196, + "instrument": 15806, + "instrumental": 23041, + "instruments": 14793, + "instyle": 41321, + "insu": 8805, + "insul": 9615, + "insulated": 42051, + "insulation": 28194, + "insulin": 29311, + "insult": 26673, + "insulting": 39646, + "insults": 40451, + "insur": 5024, + "insurance": 5870, + "insured": 31321, + "insurers": 43142, + "insurtech": 28716, + "int": 1828, + "int": 1207, + "inta": 38314, + "intact": 26870, + "intake": 19539, + "intan": 47695, + "inte": 1598, + "inte": 41900, + "intech": 26504, + "inted": 6147, + "integr": 5151, + "integral": 27018, + "integrate": 25735, + "integrated": 12797, + "integrating": 31555, + "integration": 12583, + "integrity": 14791, + "intel": 11778, + "intel": 11426, + "intellec": 13281, + "intellect": 47828, + "intellectu": 31966, + "intellectual": 18069, + "intelli": 5324, + "intellig": 5632, + "intelligence": 6846, + "intelligent": 14063, + "inten": 2967, + "intend": 36674, + "intended": 16812, + "intense": 10258, + "intensi": 22928, + "intensity": 19956, + "intensive": 21049, + "intent": 18881, + "intention": 26786, + "intentional": 29536, + "intentionally": 31215, + "intentions": 26710, + "inter": 1006, + "inter": 10093, + "interact": 21736, + "interacting": 35045, + "interaction": 17650, + "interactions": 22162, + "interactive": 9456, + "intercep": 23676, + "interception": 48762, + "interceptions": 45313, + "interchange": 34222, + "intercontinental": 31983, + "interdisciplinary": 38132, + "intere": 2008, + "interest": 5095, + "interested": 4620, + "interesting": 3628, + "interests": 16425, + "interface": 18753, + "interfaith": 38399, + "interference": 29099, + "interim": 19509, + "interior": 10700, + "interior": 7305, + "interiordesign": 12902, + "interiors": 14836, + "intermedi": 20246, + "intermediate": 24304, + "intermission": 44805, + "intermitt": 44946, + "intern": 9976, + "intern": 14068, + "internal": 11285, + "internally": 41134, + "internation": 42534, + "international": 8566, + "international": 2436, + "internationaldayof": 41518, + "internationally": 24059, + "internationalwomensday": 17682, + "interne": 32713, + "internet": 30180, + "internet": 4757, + "internetof": 44449, + "internetofthings": 45925, + "interns": 19902, + "internship": 16661, + "internships": 39410, + "interoper": 45754, + "interpre": 11162, + "interpret": 49154, + "interpret": 40459, + "interpretation": 20652, + "interpreted": 42157, + "interpreting": 46525, + "interro": 29548, + "interrup": 21609, + "interrupt": 48449, + "interrupted": 30288, + "intersec": 45246, + "intersection": 19210, + "interstate": 21963, + "interstellar": 41506, + "interval": 36032, + "intervals": 44884, + "interven": 18245, + "intervention": 16804, + "interventions": 28848, + "interview": 2885, + "interviewed": 11688, + "interviewing": 16399, + "interviews": 9910, + "intestin": 37938, + "intestinal": 38896, + "inthe": 7486, + "inti": 14459, + "intim": 38832, + "intimacy": 46430, + "intimate": 16382, + "intimid": 24041, + "intimidating": 44405, + "intimidation": 49258, + "inting": 15571, + "intl": 38186, + "intl": 14224, + "intment": 9020, + "intments": 21420, + "into": 35235, + "into": 1095, + "intoler": 28534, + "intolerance": 37808, + "intothe": 38511, + "intra": 20922, + "intrac": 46195, + "intram": 40956, + "intre": 29397, + "intrepid": 39127, + "intri": 15421, + "intric": 23763, + "intricate": 29616, + "intrigu": 18856, + "intrigue": 45140, + "intrigued": 40034, + "intriguing": 24334, + "intrin": 45181, + "intro": 2999, + "intro": 13224, + "introduc": 3621, + "introduce": 9813, + "introduced": 10446, + "introduces": 12933, + "introducing": 6256, + "introduction": 11812, + "introductory": 38121, + "intru": 22949, + "ints": 2514, + "intu": 17225, + "intuition": 40897, + "intuitive": 35224, + "inu": 21131, + "inuit": 41250, + "inus": 45857, + "inv": 2279, + "inv": 43786, + "inva": 10084, + "invade": 34609, + "invaded": 32596, + "invaders": 35188, + "invading": 40101, + "invali": 31592, + "invalid": 46998, + "invaluable": 33976, + "invasi": 38100, + "invasion": 13378, + "invasive": 19554, + "inve": 2024, + "inven": 26233, + "invent": 11665, + "invent": 23558, + "invented": 14100, + "invention": 23607, + "inventions": 44914, + "inventor": 22836, + "inventory": 19444, + "inver": 12061, + "inverness": 33080, + "inverte": 46397, + "inverted": 40709, + "invest": 4180, + "invest": 9716, + "invested": 22536, + "investig": 4626, + "investigate": 15703, + "investigated": 29180, + "investigates": 29621, + "investigating": 13713, + "investigation": 8194, + "investigations": 24020, + "investigative": 30233, + "investigator": 30528, + "investigators": 24121, + "investin": 40195, + "investing": 10554, + "investment": 5605, + "investments": 14675, + "investor": 15490, + "investors": 10486, + "invests": 38378, + "invic": 25253, + "invigor": 48722, + "invin": 30252, + "invincible": 38052, + "invisible": 16093, + "invit": 12454, + "invitation": 15032, + "invitational": 14511, + "invitations": 40120, + "invite": 8109, + "invited": 7731, + "invites": 16034, + "inviting": 14349, + "invo": 29417, + "invol": 4000, + "involve": 26325, + "involved": 5320, + "involvement": 19502, + "involves": 22652, + "involving": 14786, + "inwx": 35674, + "iny": 23257, + "inyour": 47954, + "io": 3167, + "io": 3752, + "ioc": 43018, + "iom": 33000, + "iom": 31135, + "ion": 14871, + "ion": 3668, + "ions": 26289, + "ior": 7354, + "ior": 2498, + "iority": 46016, + "iors": 6427, + "ios": 6614, + "iot": 32694, + "iot": 6627, + "iota": 37294, + "ious": 6994, + "iously": 38233, + "iow": 7439, + "iowa": 38847, + "iowa": 8290, + "ip": 1719, + "ip": 8600, + "ipa": 11199, + "ipad": 39067, + "ipad": 7491, + "ipads": 35281, + "ipc": 41981, + "iphone": 26030, + "iphone": 4314, + "iphones": 37561, + "ipl": 13440, + "ipment": 37824, + "ipo": 40218, + "ipo": 24090, + "ipod": 17889, + "ipp": 31706, + "ips": 26910, + "ipsw": 22221, + "ipswich": 24494, + "iq": 15554, + "iq": 19996, + "iqbal": 33553, + "ir": 582, + "ir": 742, + "ira": 4923, + "ira": 5371, + "irah": 35724, + "iran": 19273, + "iran": 5075, + "irandeal": 46533, + "irani": 37984, + "iranian": 14158, + "iraq": 8543, + "iraqi": 18617, + "irc": 41527, + "ird": 2770, + "ire": 3013, + "ire": 1454, + "ired": 32728, + "ired": 2995, + "ireland": 32806, + "ireland": 4157, + "irene": 21600, + "ires": 12435, + "irez": 21581, + "irgc": 47942, + "iri": 2155, + "iri": 13880, + "irical": 33366, + "irie": 42979, + "irina": 46664, + "iring": 10169, + "iris": 16437, + "irish": 9386, + "irish": 4889, + "irl": 34494, + "irl": 8570, + "irling": 26493, + "irls": 24344, + "irma": 22406, + "irn": 42603, + "iro": 23209, + "iro": 7280, + "iron": 7699, + "iron": 5391, + "ironic": 24518, + "ironically": 36779, + "ironing": 46655, + "ironman": 20330, + "irons": 30032, + "irony": 20681, + "irport": 27769, + "irr": 24641, + "irrational": 47413, + "irregular": 38692, + "irrelevant": 34677, + "irresi": 31200, + "irresistible": 35252, + "irresponsible": 44714, + "irri": 21484, + "irrigation": 23761, + "irrit": 24218, + "irs": 6086, + "irst": 32701, + "iru": 48206, + "irvin": 47053, + "irvine": 24201, + "irving": 19738, + "irwin": 23750, + "iry": 7239, + "is": 595, + "is": 533, + "isa": 11034, + "isa": 6536, + "isaac": 37544, + "isaac": 13659, + "isab": 13357, + "isabel": 27466, + "isabella": 26192, + "isabelle": 31072, + "isable": 46631, + "isai": 15365, + "isaiah": 17952, + "isak": 40619, + "isance": 46893, + "isation": 7194, + "isback": 43811, + "isc": 39316, + "isch": 47888, + "isco": 5736, + "iscoming": 26458, + "isd": 46816, + "isd": 12002, + "ise": 7669, + "ise": 1479, + "ised": 2861, + "iselle": 48491, + "iser": 23080, + "iser": 5626, + "isers": 34879, + "ises": 5153, + "isf": 44036, + "isgreat": 34595, + "ish": 6844, + "ish": 1061, + "isha": 28050, + "ishable": 37949, + "ished": 35341, + "ishere": 46053, + "ishi": 26224, + "ishq": 27996, + "ishqba": 32503, + "ishqbaaaz": 36591, + "isi": 7233, + "isi": 17880, + "isil": 34636, + "isin": 37676, + "ising": 3426, + "isis": 7531, + "isk": 30171, + "isl": 31368, + "isla": 22807, + "islam": 6003, + "islam": 8770, + "islamabad": 19959, + "islamic": 31627, + "islamic": 9552, + "islamist": 38798, + "islamophobia": 43459, + "island": 13408, + "island": 2619, + "islander": 45651, + "islanders": 27804, + "islands": 7145, + "islay": 49279, + "isle": 19082, + "isle": 11849, + "isleof": 24718, + "isles": 21816, + "islife": 26433, + "islington": 34945, + "ism": 47730, + "ism": 1935, + "isma": 43937, + "ismail": 36140, + "isme": 43570, + "ismo": 41926, + "isms": 18700, + "isn": 2923, + "isner": 48246, + "isnow": 43694, + "isnt": 19416, + "iso": 2462, + "iso": 12263, + "isol": 11414, + "isolated": 19044, + "isolation": 26400, + "ison": 12949, + "ison": 4553, + "isons": 33318, + "isoo": 35857, + "isp": 31397, + "isp": 39041, + "isra": 3591, + "israel": 20837, + "israel": 4779, + "israeli": 8994, + "israelis": 45713, + "isreal": 47147, + "isro": 44841, + "iss": 11738, + "iss": 4950, + "issa": 38579, + "issa": 7560, + "issan": 49358, + "issance": 40828, + "issant": 38828, + "isse": 18986, + "ission": 37946, + "issu": 2049, + "issue": 3202, + "issued": 9246, + "issues": 4082, + "issuing": 37226, + "ist": 9751, + "ist": 2304, + "istanbul": 12258, + "istandwith": 33820, + "iste": 32563, + "ister": 14555, + "isthe": 46748, + "istic": 29556, + "ists": 8426, + "isu": 17030, + "isu": 23328, + "it": 529, + "it": 585, + "ita": 36920, + "ita": 2864, + "itable": 8915, + "ital": 2306, + "ital": 1660, + "itali": 11644, + "italia": 11025, + "italian": 20264, + "italian": 5175, + "italians": 44744, + "italk": 32894, + "italy": 4052, + "itan": 18383, + "itans": 40711, + "itar": 47161, + "itarian": 11599, + "itary": 17604, + "itas": 31634, + "itas": 13436, + "itate": 42457, + "itated": 36744, + "itation": 5070, + "itative": 22892, + "itc": 36449, + "itch": 2387, + "itch": 8147, + "itchen": 32664, + "itchy": 41980, + "ite": 2732, + "ite": 802, + "iteam": 37828, + "itec": 3099, + "itec": 43936, + "itech": 44215, + "itech": 23040, + "ited": 8603, + "ited": 1108, + "itel": 44638, + "itely": 4605, + "item": 8532, + "items": 6207, + "iter": 7938, + "iter": 19773, + "iteracy": 39634, + "iterate": 43106, + "iteration": 38790, + "ites": 2454, + "itez": 42131, + "itf": 35436, + "itfc": 36519, + "ith": 6133, + "ith": 1757, + "ithaca": 46257, + "iti": 760, + "iti": 6165, + "itia": 22634, + "itian": 23365, + "itic": 11950, + "itical": 48767, + "itics": 33967, + "ities": 41423, + "ities": 1480, + "itim": 15676, + "itiner": 32803, + "itinerary": 41564, + "iting": 1257, + "ition": 25263, + "ition": 1104, + "itions": 5540, + "itious": 13329, + "itis": 33539, + "itis": 8388, + "itive": 3067, + "itly": 42240, + "ito": 22167, + "ito": 4661, + "iton": 21119, + "itor": 47267, + "itor": 4584, + "itors": 22005, + "itos": 24560, + "its": 7140, + "its": 902, + "itsa": 45032, + "itself": 7290, + "itsme": 41125, + "itss": 47040, + "itt": 1031, + "itt": 11228, + "itta": 21233, + "itte": 31962, + "itted": 24429, + "itten": 30014, + "itten": 4343, + "itter": 11456, + "itters": 13082, + "itti": 28629, + "ittin": 25646, + "itting": 3147, + "ittle": 24208, + "ittle": 21366, + "ittles": 38989, + "itton": 25707, + "itty": 35096, + "itu": 1668, + "itu": 32128, + "itude": 43382, + "itude": 5012, + "itudes": 20459, + "itunes": 7007, + "itup": 35838, + "iture": 25547, + "itus": 24364, + "itutes": 32883, + "itv": 20159, + "itv": 12805, + "ity": 2480, + "ity": 696, + "itya": 32055, + "itz": 14544, + "itz": 7807, + "iu": 14292, + "iu": 15575, + "ium": 10762, + "ius": 6740, + "iv": 6775, + "iv": 9315, + "iva": 42463, + "ivan": 15544, + "ivan": 15689, + "ivanka": 37914, + "ive": 26885, + "ive": 8653, + "ived": 15654, + "iver": 36849, + "iver": 44254, + "ives": 27333, + "ivf": 39159, + "iving": 45136, + "ivory": 16776, + "ivote": 45835, + "ivy": 36939, + "ivy": 16045, + "iw": 13058, + "iw": 46604, + "iwant": 42747, + "iwd": 16815, + "iwm": 44237, + "ix": 13272, + "ix": 8756, + "iy": 13704, + "iya": 18595, + "iyaki": 48395, + "iz": 2845, + "iz": 8407, + "iza": 37704, + "ization": 10847, + "ize": 10885, + "ized": 7690, + "izen": 34776, + "izer": 23895, + "izes": 45434, + "izing": 17354, + "izo": 46910, + "izz": 31779, + "izz": 46128, + "izzy": 28861, + "j": 73, + "j": 329, + "ja": 1586, + "ja": 2641, + "jaan": 25052, + "jab": 8059, + "jab": 9439, + "jac": 2293, + "jac": 30198, + "jace": 43286, + "jack": 2679, + "jack": 3267, + "jacked": 27923, + "jacket": 6164, + "jackets": 14745, + "jacki": 47418, + "jackie": 28023, + "jackie": 11716, + "jacking": 40929, + "jackman": 35723, + "jackpot": 23926, + "jacks": 19649, + "jackson": 12321, + "jackson": 4363, + "jacksonville": 19263, + "jaco": 6840, + "jacob": 14385, + "jacob": 9222, + "jacobs": 17482, + "jacobson": 46826, + "jacqu": 14495, + "jacqueline": 22843, + "jacques": 17799, + "jad": 12976, + "jad": 38691, + "jada": 37416, + "jade": 25123, + "jade": 14513, + "jaden": 37174, + "jadine": 37445, + "jae": 16869, + "jae": 15765, + "jaejoong": 43610, + "jaf": 19362, + "jag": 7984, + "jag": 36236, + "jagan": 48530, + "jagger": 30835, + "jags": 31086, + "jagu": 10096, + "jaguar": 44777, + "jaguar": 14757, + "jaguars": 21854, + "jah": 20067, + "jah": 11084, + "jahan": 44404, + "jahan": 47827, + "jai": 10542, + "jai": 13819, + "jail": 18574, + "jail": 9332, + "jailbreak": 45990, + "jailed": 19456, + "jails": 47833, + "jaime": 24716, + "jain": 21999, + "jaipur": 23593, + "jais": 48607, + "jait": 28910, + "jaitley": 32776, + "jak": 9225, + "jak": 30589, + "jakarta": 15471, + "jake": 13140, + "jake": 7419, + "jakob": 47358, + "jal": 8380, + "jal": 26773, + "jalan": 27270, + "jalap": 49081, + "jalape": 34263, + "jalapeño": 43017, + "jalen": 33548, + "jam": 1434, + "jam": 5201, + "jama": 8977, + "jama": 35366, + "jamaica": 13019, + "jamaican": 25144, + "jamal": 26108, + "jambo": 35599, + "jamboree": 38506, + "jame": 12341, + "james": 6963, + "james": 2392, + "jamesbond": 44704, + "jamesc": 47004, + "jameson": 31731, + "jami": 15092, + "jamie": 16454, + "jamie": 8078, + "jamiedor": 34310, + "jamiedornan": 34896, + "jammed": 35590, + "jammin": 35223, + "jamming": 25862, + "jammu": 25926, + "jams": 20243, + "jan": 1891, + "jan": 3334, + "jana": 18182, + "jane": 12389, + "jane": 6736, + "janeiro": 31740, + "janet": 29665, + "janet": 15872, + "jang": 41526, + "jang": 22074, + "jani": 22606, + "janice": 36048, + "janine": 46896, + "janis": 44233, + "jann": 35377, + "jans": 22578, + "jansen": 45354, + "janu": 3623, + "january": 3697, + "jap": 2299, + "jap": 49062, + "japan": 4502, + "japan": 3400, + "japanese": 27211, + "japanese": 4925, + "japs": 42121, + "jar": 5120, + "jar": 10837, + "jard": 25778, + "jardin": 37371, + "jare": 17654, + "jared": 35597, + "jared": 12571, + "jaredle": 36739, + "jaredleto": 37106, + "jaro": 35505, + "jarpad": 44497, + "jarre": 23385, + "jarrett": 30531, + "jars": 27583, + "jarvis": 29286, + "jas": 4492, + "jas": 17559, + "jasmin": 42989, + "jasmin": 47700, + "jasmine": 17056, + "jason": 10009, + "jason": 5395, + "jasper": 19827, + "jat": 26106, + "jau": 26932, + "jauregui": 48175, + "jav": 6234, + "java": 12918, + "javascri": 16289, + "javascript": 16423, + "jave": 46218, + "javed": 42268, + "javelin": 41701, + "javi": 47627, + "javier": 23307, + "jaw": 14804, + "jaw": 17307, + "jawa": 44790, + "jaws": 25491, + "jax": 22348, + "jax": 12390, + "jay": 3427, + "jay": 4155, + "jaya": 21960, + "jayanti": 37732, + "jaye": 45703, + "jayne": 35228, + "jays": 12393, + "jaz": 3465, + "jaz": 32874, + "jazeera": 38260, + "jazz": 11488, + "jazz": 4528, + "jazzfest": 36683, + "jazzy": 28191, + "jb": 21915, + "jb": 13637, + "jc": 14991, + "jc": 11517, + "jd": 18289, + "jd": 14125, + "jdm": 42013, + "je": 1013, + "je": 8776, + "jeal": 9964, + "jealous": 11093, + "jealousy": 37654, + "jean": 13943, + "jean": 6473, + "jeanette": 48167, + "jeanne": 29201, + "jeans": 10157, + "jeb": 35101, + "jec": 1347, + "ject": 6070, + "jed": 12166, + "jed": 38748, + "jeddah": 40982, + "jedi": 16681, + "jee": 29250, + "jee": 14870, + "jeep": 16593, + "jeep": 11286, + "jeeplife": 43100, + "jeet": 45542, + "jeet": 30944, + "jef": 10276, + "jeff": 6245, + "jeff": 5550, + "jefferson": 44711, + "jefferson": 13976, + "jeffery": 41470, + "jeffree": 45994, + "jeffrey": 32886, + "jeffrey": 16027, + "jeho": 42437, + "jeky": 43893, + "jekyll": 49405, + "jel": 9794, + "jelena": 48218, + "jelly": 19110, + "jelly": 13762, + "jellyfish": 30988, + "jem": 46326, + "jem": 37530, + "jen": 2554, + "jen": 12997, + "jenkins": 16162, + "jenn": 33921, + "jenn": 29869, + "jenna": 17125, + "jenner": 14260, + "jenni": 6774, + "jennie": 28875, + "jennifer": 19786, + "jennifer": 8613, + "jennings": 21564, + "jenny": 20165, + "jenny": 13414, + "jens": 40806, + "jensen": 35558, + "jensen": 19004, + "jensenackles": 41011, + "jeon": 45200, + "jeon": 43337, + "jeong": 47146, + "jeong": 39264, + "jeopar": 22988, + "jeopardy": 29613, + "jer": 2310, + "jer": 35307, + "jere": 5614, + "jeremi": 22362, + "jeremiah": 27301, + "jeremy": 14656, + "jeremy": 8127, + "jeremycorbyn": 37484, + "jeric": 25084, + "jericho": 28892, + "jerk": 23917, + "jerky": 40079, + "jermaine": 40722, + "jerome": 19876, + "jerry": 18163, + "jerry": 9164, + "jersey": 21921, + "jersey": 4471, + "jerseys": 15518, + "jerus": 12257, + "jerusalem": 12557, + "jes": 7686, + "jes": 35826, + "jess": 5313, + "jess": 13758, + "jesse": 23112, + "jesse": 11770, + "jessi": 24373, + "jessic": 14881, + "jessica": 45421, + "jessica": 8178, + "jessie": 19424, + "jester": 44225, + "jesu": 19777, + "jesuit": 33234, + "jesus": 4070, + "jet": 11515, + "jet": 6565, + "jetblue": 45021, + "jeter": 38450, + "jets": 38584, + "jets": 10025, + "jett": 44541, + "jetty": 46382, + "jew": 27450, + "jewel": 4880, + "jewel": 17591, + "jewell": 9777, + "jewellers": 46265, + "jewellery": 11192, + "jewelry": 28018, + "jewelry": 6039, + "jewels": 20205, + "jewish": 29594, + "jewish": 9104, + "jews": 14200, + "jf": 31130, + "jf": 33718, + "jfc": 43652, + "jfk": 18486, + "jg": 41986, + "jg": 35138, + "jh": 24858, + "jh": 21485, + "jha": 47012, + "jha": 38092, + "jhal": 45695, + "jhar": 31546, + "jharkhand": 39001, + "jhb": 34631, + "ji": 3252, + "ji": 2697, + "jia": 32907, + "jian": 33427, + "jiang": 43309, + "jiang": 25762, + "jic": 48350, + "jic": 40215, + "jid": 24403, + "jie": 40005, + "jig": 15136, + "jig": 47430, + "jigsaw": 32987, + "jiha": 23194, + "jihad": 29637, + "jihoon": 44765, + "jil": 36225, + "jill": 24136, + "jill": 15254, + "jillian": 37820, + "jim": 3190, + "jim": 4550, + "jima": 20679, + "jimcantore": 43950, + "jimenez": 35947, + "jimi": 30565, + "jimin": 16286, + "jimmie": 45679, + "jimmy": 12215, + "jimmy": 6817, + "jimmyfallon": 45265, + "jin": 7927, + "jin": 8485, + "jind": 40609, + "jing": 34933, + "jing": 28607, + "jingle": 28699, + "jinnah": 43141, + "jinping": 39308, + "jinx": 42977, + "jinyoung": 38051, + "jio": 40501, + "jis": 25988, + "jis": 23515, + "jisoo": 43070, + "jit": 11947, + "jit": 20308, + "jitsu": 24530, + "jiu": 43351, + "jiu": 44123, + "jj": 12502, + "jj": 12790, + "jk": 20189, + "jk": 9702, + "jkt": 21494, + "jl": 25027, + "jl": 22911, + "jlo": 31017, + "jm": 24044, + "jm": 18657, + "jn": 24576, + "jn": 21717, + "jnr": 37145, + "jnu": 47142, + "jo": 683, + "jo": 3804, + "joachim": 48979, + "joan": 28064, + "joan": 12710, + "joann": 35484, + "joanna": 25357, + "joanne": 43736, + "joanne": 25092, + "joao": 45666, + "joaqu": 25140, + "joaquin": 30745, + "job": 13114, + "job": 2075, + "jobs": 3735, + "jobsearch": 45459, + "joburg": 39343, + "jocel": 36879, + "jocelyn": 47259, + "jock": 34485, + "jockey": 20126, + "jodh": 48689, + "jodi": 36812, + "jodi": 26888, + "jodie": 33100, + "jody": 32959, + "joe": 9309, + "joe": 3305, + "joel": 19819, + "joel": 11429, + "joes": 34756, + "joey": 16281, + "joey": 10455, + "jog": 37967, + "jog": 31691, + "jogging": 37922, + "joh": 1201, + "johan": 17416, + "johan": 27789, + "johann": 31180, + "johanna": 41494, + "johannes": 37779, + "johannesburg": 28377, + "johansson": 41512, + "johar": 34871, + "john": 2004, + "john": 1742, + "johncena": 46820, + "johnnie": 47947, + "johnny": 14464, + "johnny": 6904, + "johns": 14515, + "johnson": 26036, + "johnson": 4010, + "johnston": 19791, + "johnstone": 40766, + "johor": 34750, + "join": 14737, + "join": 1384, + "joined": 4954, + "joining": 5118, + "joins": 5681, + "joint": 6640, + "jointhe": 30422, + "jointly": 37471, + "joints": 27204, + "jojo": 41484, + "jojo": 22075, + "joke": 7198, + "joker": 18200, + "jokers": 44101, + "jokes": 11336, + "joking": 26112, + "joko": 44975, + "jol": 9174, + "jol": 36470, + "jolie": 31633, + "jolla": 46109, + "jolly": 21516, + "jom": 32152, + "jon": 3026, + "jon": 6139, + "jona": 6629, + "jonah": 47934, + "jonah": 27556, + "jonas": 42373, + "jonas": 13650, + "jonathan": 19026, + "jonathan": 7762, + "jone": 33934, + "jones": 19091, + "jones": 3538, + "jong": 20214, + "jong": 14726, + "jonghyun": 29023, + "jongin": 36957, + "joni": 43177, + "jonny": 28454, + "jonny": 21895, + "joo": 25807, + "joo": 27680, + "joom": 47543, + "joon": 18547, + "joong": 26544, + "jop": 30486, + "joplin": 42688, + "jor": 2482, + "jor": 31595, + "jordan": 14644, + "jordan": 4388, + "jordani": 46898, + "jordi": 44795, + "jorge": 48761, + "jorge": 18225, + "jos": 20560, + "jos": 19661, + "jose": 4647, + "jose": 7075, + "josef": 36584, + "josel": 47800, + "joseph": 14163, + "joseph": 6478, + "josephine": 34866, + "josh": 9998, + "josh": 5679, + "joshi": 24786, + "joshu": 9112, + "joshua": 11852, + "josi": 33583, + "josie": 33167, + "joss": 42834, + "josé": 27922, + "jou": 19921, + "jou": 32029, + "jour": 2078, + "jour": 17142, + "journ": 4563, + "journal": 6626, + "journalism": 10123, + "journalist": 9914, + "journalists": 12249, + "journals": 24391, + "journe": 48833, + "journey": 32156, + "journey": 3749, + "journeys": 23329, + "journo": 37034, + "journos": 46437, + "jovi": 33866, + "joy": 6308, + "joy": 4273, + "joyce": 43753, + "joyce": 15275, + "joye": 34052, + "joyeux": 41876, + "joyful": 24139, + "joyous": 32245, + "joyride": 46949, + "joys": 22996, + "jp": 18249, + "jp": 10557, + "jpg": 36950, + "jpn": 36212, + "jr": 13973, + "jr": 3605, + "js": 46243, + "js": 8006, + "jst": 26523, + "jt": 39480, + "jt": 18119, + "ju": 669, + "ju": 9970, + "jual": 38720, + "juan": 17148, + "juan": 9274, + "juana": 9081, + "jubi": 15485, + "jubil": 47743, + "jubilee": 16907, + "juco": 31570, + "jud": 8363, + "juda": 32478, + "judah": 41066, + "judaism": 42217, + "judas": 39532, + "judd": 29770, + "judg": 20012, + "judge": 16824, + "judge": 5656, + "judged": 33453, + "judgement": 25246, + "judges": 12575, + "judging": 16570, + "judgment": 24191, + "judi": 42546, + "judice": 28032, + "judicial": 19579, + "judiciary": 24545, + "judith": 24047, + "judo": 27011, + "judy": 34663, + "judy": 16510, + "jug": 27619, + "jugg": 38628, + "juic": 38761, + "juice": 37954, + "juice": 6916, + "juices": 36757, + "juicy": 17623, + "juju": 43020, + "juke": 32519, + "jukebox": 36411, + "jul": 34662, + "jul": 15975, + "jule": 40819, + "jules": 21996, + "juli": 3614, + "juli": 49160, + "julia": 10207, + "julian": 25459, + "julian": 12643, + "juliana": 46059, + "julie": 22534, + "julie": 10505, + "julien": 32595, + "juliet": 20641, + "juliette": 44804, + "julio": 24888, + "julius": 20870, + "july": 2272, + "jum": 20791, + "jumbo": 24678, + "jume": 45989, + "jump": 5519, + "jump": 6423, + "jumped": 16901, + "jumper": 16558, + "jumpers": 36485, + "jumping": 11476, + "jumpman": 48803, + "jumps": 18911, + "jumpsuit": 31044, + "jun": 1637, + "jun": 7719, + "junction": 11320, + "june": 23188, + "june": 2345, + "jung": 13086, + "jung": 13031, + "jungkook": 20040, + "jungle": 42421, + "jungle": 10865, + "juni": 4029, + "junior": 21167, + "junior": 5027, + "juniors": 16811, + "juniper": 33829, + "junk": 16000, + "junkie": 27613, + "junkies": 41207, + "juno": 28845, + "junto": 34282, + "jupit": 15270, + "jupiter": 16212, + "jur": 15896, + "jura": 14715, + "jurassic": 28844, + "jurassic": 21255, + "jurgen": 39263, + "juris": 37010, + "jurisdic": 37714, + "jury": 12931, + "jus": 14999, + "just": 1770, + "just": 761, + "justi": 14700, + "justic": 30399, + "justice": 16904, + "justice": 3604, + "justicefor": 25812, + "justiceleague": 41929, + "justices": 44356, + "justified": 34546, + "justify": 28192, + "justin": 7537, + "justin": 4394, + "justinbieber": 12501, + "justine": 34418, + "justintrudeau": 32184, + "justsaying": 42922, + "juve": 47717, + "juve": 23092, + "juven": 12944, + "juvenile": 19333, + "juvent": 13908, + "juventus": 47378, + "juventus": 16208, + "jux": 33552, + "juxta": 34964, + "jv": 37932, + "jv": 11805, + "jw": 30221, + "jw": 24215, + "jy": 20979, + "jyo": 27378, + "jyoti": 48696, + "jä": 45381, + "k": 74, + "k": 330, + "ka": 1595, + "ka": 1525, + "kaa": 34496, + "kab": 6554, + "kab": 45134, + "kabaddi": 41749, + "kabir": 38619, + "kabo": 47974, + "kabul": 26160, + "kac": 21693, + "kach": 14341, + "kad": 10901, + "kade": 41130, + "kaduna": 38053, + "kae": 22542, + "kaeper": 30070, + "kaepernick": 30713, + "kaf": 19870, + "kag": 13666, + "kag": 31003, + "kah": 16068, + "kah": 15463, + "kahn": 35397, + "kai": 12752, + "kai": 9601, + "kaido": 40255, + "kail": 23623, + "kaine": 39028, + "kair": 33027, + "kaiser": 43685, + "kaiser": 29960, + "kait": 19326, + "kaitlyn": 34948, + "kaj": 44788, + "kaj": 40381, + "kak": 10401, + "kak": 40128, + "kaka": 47689, + "kaku": 30900, + "kal": 4187, + "kal": 18712, + "kala": 45453, + "kala": 33105, + "kalam": 40142, + "kalamaz": 42328, + "kalamazoo": 46264, + "kalb": 34483, + "kale": 17162, + "kale": 16625, + "kaleido": 41144, + "kali": 17844, + "kali": 26964, + "kalin": 42776, + "kalyan": 23825, + "kam": 4104, + "kam": 26011, + "kamal": 31371, + "kamal": 28619, + "kamala": 45003, + "kame": 45235, + "kamen": 40738, + "kami": 28707, + "kamloops": 36602, + "kamp": 35179, + "kamp": 29522, + "kampala": 37134, + "kan": 2532, + "kan": 8101, + "kana": 35178, + "kand": 17478, + "kane": 32218, + "kane": 9765, + "kang": 12226, + "kang": 20789, + "kangar": 20622, + "kangaroo": 25513, + "kani": 40907, + "kani": 41948, + "kann": 18533, + "kannada": 30053, + "kano": 28201, + "kans": 34012, + "kansas": 25507, + "kansas": 6539, + "kansascity": 46134, + "kant": 39923, + "kant": 47132, + "kanth": 24427, + "kanu": 44565, + "kany": 13590, + "kanye": 29680, + "kanye": 14965, + "kanyewest": 31943, + "kap": 6804, + "kap": 45279, + "kapam": 48561, + "kapil": 32337, + "kapil": 42709, + "kapilshar": 48978, + "kaplan": 37401, + "kapoor": 9117, + "kapp": 36717, + "kappa": 20239, + "kapur": 42371, + "kar": 1813, + "kar": 5933, + "kara": 12552, + "karab": 40916, + "karachi": 13671, + "karak": 40372, + "karan": 20077, + "karan": 20931, + "karanjohar": 47621, + "karao": 16262, + "karaoke": 16640, + "karate": 21211, + "kardashi": 13619, + "kardashian": 14578, + "kare": 14310, + "kare": 38354, + "kareem": 38885, + "kareena": 41569, + "karen": 17719, + "karen": 10349, + "kari": 15339, + "kari": 15161, + "karim": 33477, + "karin": 43917, + "karina": 40250, + "karl": 20967, + "karl": 13134, + "karla": 42309, + "karma": 17658, + "karnat": 13994, + "karnataka": 15515, + "karo": 45305, + "kart": 47841, + "kart": 21310, + "karthik": 41397, + "karti": 23053, + "kartikeyan": 32584, + "karting": 41655, + "kas": 6119, + "kas": 14372, + "kasa": 46111, + "kash": 6954, + "kash": 21371, + "kashi": 47945, + "kashmir": 20251, + "kashmir": 10783, + "kashmiri": 35331, + "kasi": 45870, + "kasi": 32819, + "kasich": 39666, + "kat": 2844, + "kat": 9341, + "kata": 14558, + "kate": 11620, + "kate": 6699, + "katelyn": 45963, + "kath": 7386, + "kath": 19745, + "katharine": 41473, + "katherine": 17687, + "kathle": 18721, + "kathleen": 21709, + "kathmandu": 34456, + "kathniel": 36159, + "kathr": 14905, + "kathryn": 33142, + "kathryn": 19999, + "kathy": 34775, + "kathy": 18795, + "kati": 6515, + "kati": 29928, + "katic": 48058, + "katie": 24117, + "katie": 9076, + "katniss": 47916, + "kato": 27573, + "katrin": 31282, + "katrina": 21397, + "katrinakaif": 45845, + "kats": 44213, + "katsu": 49296, + "katsu": 43712, + "katy": 17609, + "katy": 14435, + "katyperry": 28309, + "katz": 30790, + "kau": 9299, + "kau": 36895, + "kauai": 44050, + "kaufman": 37188, + "kaur": 30518, + "kav": 10228, + "kavan": 18576, + "kavanaugh": 20252, + "kaw": 10842, + "kaw": 42719, + "kawa": 33244, + "kawaii": 26891, + "kawasaki": 28227, + "kawhi": 41220, + "kay": 4673, + "kay": 9862, + "kaya": 22752, + "kayak": 27043, + "kayaking": 28977, + "kaye": 33003, + "kayla": 17139, + "kaylee": 47215, + "kayo": 37021, + "kaz": 8812, + "kaz": 39622, + "kazakh": 25451, + "kazakhstan": 26720, + "kazan": 47641, + "kb": 27381, + "kb": 19960, + "kbs": 27418, + "kc": 10869, + "kc": 8638, + "kca": 14347, + "kcon": 39970, + "kcr": 46181, + "kd": 21826, + "kd": 15597, + "kday": 31074, + "kdrama": 48628, + "ke": 643, + "ke": 618, + "kea": 47926, + "kean": 43288, + "keane": 28635, + "keanu": 40608, + "kear": 21562, + "kearney": 36435, + "keating": 40045, + "keaton": 29975, + "kebab": 36497, + "ked": 11730, + "ked": 1243, + "kee": 9724, + "kee": 6760, + "keef": 42323, + "keefe": 46965, + "keegan": 31122, + "keel": 48376, + "keen": 17714, + "keen": 13218, + "keenan": 36276, + "keep": 2924, + "keep": 1726, + "keeper": 7650, + "keepers": 16130, + "keepin": 41712, + "keeping": 38371, + "keeping": 4873, + "keepit": 28044, + "keeps": 6333, + "keer": 27412, + "keerth": 47500, + "keerthyofficial": 48185, + "kees": 10791, + "keg": 32785, + "keh": 41272, + "keh": 36983, + "kei": 18735, + "kei": 24835, + "keith": 18762, + "keith": 8252, + "kej": 15674, + "kejri": 16617, + "kejriwal": 17334, + "keke": 39195, + "kel": 2825, + "kel": 7553, + "kele": 41765, + "kell": 16082, + "kell": 40103, + "keller": 21407, + "kelley": 23776, + "kelli": 45852, + "kelli": 46190, + "kellie": 49224, + "kellogg": 44218, + "kelly": 13417, + "kelly": 5220, + "kelown": 31708, + "kelowna": 32963, + "kelsey": 42295, + "kelsey": 23018, + "kelvin": 32859, + "kem": 31013, + "kem": 17349, + "kemp": 18302, + "kemp": 25325, + "ken": 1838, + "ken": 1702, + "kend": 7497, + "kendal": 44836, + "kendall": 34607, + "kendall": 16238, + "kendra": 36074, + "kendrick": 41787, + "kendrick": 21953, + "kendricklamar": 47020, + "kenne": 6209, + "kennedy": 38631, + "kennedy": 9004, + "kennel": 39595, + "kenneth": 46900, + "kenneth": 17839, + "kenney": 41373, + "kenny": 20185, + "kenny": 9595, + "kens": 29765, + "kensing": 21505, + "kensington": 24988, + "kent": 13875, + "kent": 8214, + "kentu": 9045, + "kentucky": 32230, + "kentucky": 10014, + "keny": 17374, + "kenya": 6181, + "kenyan": 22624, + "kenyans": 36263, + "kenyatta": 31012, + "kenzie": 38087, + "keo": 43062, + "kept": 7737, + "ker": 2352, + "ker": 1485, + "keral": 35122, + "kerala": 11881, + "kered": 26690, + "kerel": 32232, + "keri": 43447, + "kermit": 40908, + "kern": 40150, + "kernel": 40684, + "kerr": 20491, + "kerri": 41849, + "kerry": 24795, + "kerry": 13097, + "kers": 30347, + "kers": 2880, + "kershaw": 40785, + "kerson": 42810, + "kerswednesday": 48152, + "kert": 47279, + "kes": 38398, + "kes": 1115, + "kesh": 19751, + "kesha": 36526, + "kest": 15080, + "ket": 2715, + "ket": 1236, + "ketball": 38240, + "ketch": 22590, + "ketch": 35371, + "ketchup": 26724, + "kete": 25404, + "keted": 41396, + "keting": 15951, + "keto": 27485, + "keto": 28754, + "kets": 1632, + "kett": 23124, + "kett": 10312, + "kettering": 43779, + "kettle": 41992, + "kettle": 24303, + "kev": 22758, + "kev": 29419, + "kevin": 9419, + "kevin": 4685, + "kew": 38014, + "kew": 31409, + "kex": 30251, + "key": 2891, + "key": 1458, + "keyan": 27617, + "keyboard": 13017, + "keyboards": 49237, + "keychain": 31050, + "keye": 40516, + "keye": 20635, + "keyes": 18336, + "keynes": 32462, + "keynote": 7556, + "keys": 48912, + "keys": 6355, + "keystone": 30688, + "keyword": 42284, + "keywords": 48122, + "kf": 33308, + "kf": 42119, + "kfc": 22032, + "kg": 36772, + "kg": 7817, + "kgs": 46629, + "kh": 2166, + "kh": 7452, + "kha": 7333, + "kha": 18929, + "khair": 43742, + "khaki": 41646, + "khal": 13070, + "khaled": 29343, + "khali": 11324, + "khalid": 27166, + "khalifa": 21389, + "khalil": 36229, + "kham": 24892, + "khan": 13318, + "khan": 3873, + "khand": 43384, + "khand": 31110, + "khanna": 29931, + "khar": 18340, + "khar": 28578, + "khart": 37458, + "khat": 43290, + "khe": 26360, + "kher": 43843, + "khi": 39062, + "khi": 42925, + "khil": 34101, + "khloe": 45312, + "kho": 14022, + "kho": 28774, + "khou": 30656, + "khs": 21239, + "khtar": 45593, + "khu": 14041, + "khur": 32083, + "khy": 40917, + "khz": 45604, + "ki": 848, + "ki": 2608, + "kia": 8712, + "kian": 43961, + "kian": 25708, + "kians": 44010, + "kib": 43108, + "kiba": 37207, + "kic": 24003, + "kic": 27633, + "kicchasu": 44665, + "kicchasudeep": 45560, + "kick": 4102, + "kick": 4289, + "kickass": 39299, + "kickboxing": 36041, + "kicked": 12479, + "kicker": 26338, + "kickin": 34597, + "kicking": 7802, + "kickoff": 10245, + "kicks": 6989, + "kickstart": 40780, + "kickstarter": 13228, + "kid": 3948, + "kid": 3551, + "kidd": 24082, + "kidding": 14535, + "kiddo": 36360, + "kiddos": 29205, + "kidlit": 39064, + "kidlit": 33515, + "kidlitart": 41600, + "kidman": 44931, + "kidnap": 45100, + "kidnapp": 16183, + "kidnapped": 24737, + "kidnapping": 32361, + "kidney": 37835, + "kidney": 14610, + "kids": 15561, + "kids": 1911, + "kidz": 41938, + "kie": 8544, + "kie": 3094, + "kiefer": 48026, + "kiel": 40940, + "kiel": 25509, + "kien": 28782, + "kier": 20403, + "kier": 35575, + "kieran": 29231, + "kies": 36601, + "kies": 4993, + "kiest": 29755, + "kiev": 24585, + "kiewicz": 47574, + "kigali": 40278, + "kii": 39340, + "kik": 36176, + "kiki": 23962, + "kiko": 40861, + "kil": 4912, + "kil": 39337, + "kildare": 45541, + "kili": 24386, + "kilig": 49172, + "kilimanjaro": 43470, + "kilkenny": 33805, + "kill": 6163, + "kill": 4367, + "killa": 41355, + "killarney": 48813, + "killed": 3733, + "killer": 28230, + "killer": 6613, + "killers": 17614, + "killin": 25903, + "killing": 37977, + "killing": 5923, + "killings": 24918, + "kills": 9795, + "kiln": 44150, + "kilo": 39281, + "kilom": 26285, + "kilometers": 39192, + "kilometres": 43278, + "kilt": 49319, + "kim": 4639, + "kim": 4606, + "kimber": 16796, + "kimberley": 39859, + "kimberly": 27465, + "kimchi": 41027, + "kimi": 31536, + "kimkardashian": 35400, + "kimmel": 27820, + "kimono": 40024, + "kin": 1442, + "kin": 2667, + "kina": 28518, + "kind": 7204, + "kind": 3044, + "kinda": 6612, + "kinder": 12711, + "kinder": 24159, + "kindergarten": 16749, + "kindle": 24704, + "kindle": 10746, + "kindleunlimited": 32164, + "kindly": 13952, + "kindness": 45112, + "kindness": 10614, + "kinds": 14879, + "kine": 17607, + "kineni": 49080, + "kinetic": 37699, + "king": 2365, + "king": 674, + "kingdom": 21870, + "kingdom": 7364, + "kingdomhearts": 48570, + "kingdoms": 43890, + "kingfisher": 34330, + "kingjames": 33153, + "kingly": 33642, + "kingof": 27878, + "kings": 18590, + "kings": 4232, + "kingsley": 41807, + "kingston": 40736, + "kingston": 15393, + "kini": 41644, + "kinky": 37006, + "kinney": 37233, + "kino": 39000, + "kins": 31060, + "kins": 4386, + "kinson": 12095, + "kio": 28210, + "kio": 39401, + "kiosk": 39146, + "kip": 27636, + "kip": 15986, + "kipp": 43329, + "kir": 3476, + "kir": 32949, + "kira": 33038, + "kiran": 43234, + "kiran": 36603, + "kirby": 17065, + "kiri": 34170, + "kiri": 45826, + "kirk": 10639, + "kirk": 11508, + "kirkland": 43061, + "kiro": 39749, + "kirstel": 46483, + "kirsten": 31813, + "kirsty": 37787, + "kis": 3199, + "kis": 22796, + "kish": 25662, + "kiss": 43757, + "kiss": 5946, + "kissed": 22561, + "kisses": 47876, + "kisses": 11220, + "kissing": 18637, + "kistan": 29580, + "kit": 4566, + "kit": 4274, + "kita": 29961, + "kitch": 3850, + "kitchen": 18131, + "kitchen": 4485, + "kitchener": 34428, + "kitchens": 28301, + "kite": 47777, + "kite": 19867, + "kites": 45829, + "kits": 13730, + "kitt": 10840, + "kitten": 13063, + "kittens": 17216, + "kitties": 36013, + "kitty": 25067, + "kitty": 8417, + "kiwan": 38709, + "kiwanis": 46513, + "kiwi": 22440, + "kiwis": 48108, + "kiya": 41610, + "kj": 27385, + "kj": 28238, + "kja": 41048, + "kjv": 37387, + "kk": 4390, + "kk": 10849, + "kka": 19002, + "kke": 44239, + "kker": 32399, + "kki": 44672, + "kkk": 20073, + "kkkk": 15834, + "kkkk": 47160, + "kkkkkkkk": 31042, + "kko": 43965, + "kkr": 40855, + "kl": 8498, + "kl": 14134, + "kla": 11249, + "klan": 46935, + "klar": 41374, + "klaus": 31788, + "kle": 7612, + "kle": 7432, + "klein": 33475, + "klein": 17579, + "kley": 18594, + "kli": 31640, + "klin": 44809, + "klin": 41647, + "kline": 47580, + "kling": 40270, + "klm": 38859, + "klo": 15296, + "klopp": 26446, + "kltu": 25978, + "klu": 21852, + "kly": 45090, + "km": 29954, + "km": 4590, + "kman": 33312, + "kms": 24996, + "kn": 4825, + "kn": 23693, + "knapp": 33945, + "kne": 6358, + "knee": 9897, + "knees": 19115, + "kner": 31578, + "knew": 5009, + "kni": 6312, + "knick": 33286, + "knicks": 17657, + "knife": 44176, + "knife": 8960, + "knigh": 43099, + "knight": 17949, + "knight": 7355, + "knights": 10385, + "knit": 18745, + "knit": 14313, + "knitted": 28151, + "knitting": 18863, + "knives": 20910, + "kno": 1482, + "kno": 25362, + "knob": 29736, + "knobs": 47504, + "knock": 14195, + "knock": 11583, + "knocked": 15325, + "knocking": 20380, + "knockout": 22602, + "knocks": 24296, + "knoll": 43882, + "knot": 18412, + "knots": 32428, + "know": 4179, + "know": 1038, + "knowing": 9267, + "knowledge": 27864, + "knowledge": 5510, + "knowledgeable": 43391, + "knowles": 32631, + "known": 3102, + "knows": 4309, + "knowyour": 30773, + "knox": 18630, + "knox": 21833, + "knoxville": 23232, + "knu": 14812, + "knuck": 21333, + "knuckle": 42023, + "knuckles": 40127, + "knw": 40803, + "ko": 1313, + "ko": 2448, + "koala": 36654, + "kobe": 42644, + "kobe": 14470, + "kobo": 42390, + "koch": 25331, + "kochi": 36710, + "kodak": 30425, + "kodi": 46611, + "kof": 17528, + "koff": 47303, + "kofi": 40400, + "koh": 13379, + "koh": 31216, + "kohl": 48479, + "kohli": 17549, + "koi": 28150, + "kojima": 46419, + "kok": 32045, + "kok": 11225, + "koko": 42426, + "koko": 40003, + "kol": 7142, + "kol": 31023, + "kolkata": 18011, + "kom": 6686, + "kom": 24181, + "kombat": 29670, + "kombucha": 48615, + "komo": 31820, + "kon": 5743, + "kon": 29519, + "kona": 30203, + "kong": 31784, + "kong": 6506, + "konstant": 46583, + "koo": 12225, + "koo": 40472, + "kook": 16003, + "kool": 36755, + "kool": 26444, + "kop": 16623, + "kop": 38999, + "kor": 6428, + "kor": 24175, + "kore": 3919, + "korea": 5915, + "korean": 31949, + "korean": 8034, + "kori": 42842, + "korn": 45412, + "korn": 31492, + "kors": 34535, + "kos": 47438, + "kos": 22951, + "kosh": 45233, + "kosher": 36502, + "koso": 23892, + "kosovo": 28343, + "kot": 23323, + "kot": 20701, + "kota": 21735, + "koto": 40945, + "koto": 29977, + "kou": 18502, + "kou": 39614, + "kour": 34134, + "kov": 17733, + "kov": 15156, + "kova": 26185, + "koval": 47903, + "kovic": 16886, + "kovich": 44794, + "kovsky": 33384, + "kow": 29764, + "kow": 23919, + "kowski": 17649, + "koz": 29598, + "kp": 16174, + "kp": 16894, + "kpa": 38759, + "kph": 41138, + "kpk": 42094, + "kpmg": 38243, + "kpop": 29534, + "kpop": 15859, + "kprc": 47832, + "kprs": 46253, + "kr": 7309, + "kr": 14107, + "kra": 5762, + "kraft": 28057, + "kraja": 29016, + "kraken": 48408, + "krakow": 40033, + "kram": 19075, + "kramer": 27495, + "kran": 33243, + "kranti": 47969, + "krat": 30470, + "kre": 8362, + "kreme": 43140, + "kremlin": 33979, + "kri": 3679, + "kris": 35251, + "kris": 12261, + "krish": 11487, + "krishna": 15863, + "krishnan": 46535, + "krispy": 49292, + "krist": 16490, + "kristen": 28881, + "kristen": 16644, + "kristi": 26895, + "kristin": 35408, + "kristin": 26785, + "kristina": 33180, + "krit": 36265, + "kro": 16193, + "kroger": 36344, + "kron": 25999, + "kru": 10609, + "kruger": 32948, + "krun": 43084, + "kry": 13995, + "krystal": 36554, + "ks": 10470, + "ks": 662, + "ksa": 25439, + "ksh": 36594, + "kst": 17420, + "kstate": 48590, + "ksu": 43496, + "kswx": 36180, + "kt": 17238, + "kt": 7792, + "ktm": 33989, + "ktn": 42170, + "kton": 37848, + "kts": 48577, + "ktv": 36444, + "ku": 1836, + "ku": 4827, + "kuala": 30336, + "kubball": 48995, + "kuber": 41336, + "kubernetes": 45144, + "kubrick": 37032, + "kuch": 39394, + "kud": 40818, + "kudos": 14481, + "kul": 11325, + "kul": 31514, + "kum": 18086, + "kum": 28148, + "kuma": 43139, + "kuma": 33920, + "kumar": 22329, + "kumar": 7674, + "kumb": 31391, + "kun": 6849, + "kun": 21842, + "kung": 39656, + "kung": 22347, + "kunst": 37881, + "kup": 39023, + "kups": 27240, + "kur": 4862, + "kurdi": 23504, + "kurdish": 21644, + "kurdistan": 24459, + "kurds": 20888, + "kuri": 46375, + "kuro": 28239, + "kuro": 47826, + "kurt": 31903, + "kurt": 14527, + "kus": 27618, + "kus": 27505, + "kush": 22264, + "kush": 24594, + "kushner": 36716, + "kut": 17283, + "kut": 36965, + "kuwait": 19679, + "kuya": 34815, + "kuz": 33253, + "kv": 27594, + "kv": 34249, + "kw": 10072, + "kw": 18339, + "kwa": 32784, + "kwa": 48576, + "kwame": 46681, + "kwan": 37100, + "kwan": 39447, + "kwang": 40260, + "kwe": 26050, + "kwi": 35327, + "kwon": 36369, + "kx": 28190, + "kx": 46442, + "ky": 2018, + "ky": 2383, + "kya": 29142, + "kyc": 37758, + "kyiv": 36422, + "kyle": 15847, + "kyle": 7539, + "kylie": 28282, + "kylie": 17983, + "kyliejenner": 47232, + "kylo": 47704, + "kyo": 13150, + "kyo": 6281, + "kyoto": 23223, + "kyr": 26329, + "kyrgy": 40013, + "kyrgyz": 48346, + "kyrie": 21857, + "kyu": 28296, + "kyu": 25490, + "kyuhyun": 37229, + "kyung": 41058, + "kyungsoo": 30280, + "kywx": 39940, + "kz": 48743, + "kz": 36848, + "kzn": 38264, + "kö": 32437, + "l": 75, + "l": 331, + "la": 572, + "la": 1210, + "laa": 44642, + "lab": 3537, + "lab": 4352, + "labe": 25749, + "label": 12235, + "label": 9093, + "labeled": 32720, + "labeling": 36825, + "labelled": 45188, + "labels": 17413, + "lable": 31879, + "labor": 11201, + "labor": 7878, + "laboratories": 43421, + "laboratory": 17664, + "laborday": 39324, + "labou": 32700, + "labour": 19586, + "labour": 6019, + "labourdoorstep": 37008, + "labout": 35961, + "labra": 37067, + "labrador": 25409, + "labs": 12021, + "laby": 29131, + "labyrin": 31782, + "labyrinth": 35594, + "lac": 4477, + "lac": 16189, + "lace": 30012, + "lace": 5421, + "laced": 36800, + "laces": 23281, + "lacey": 31754, + "lach": 30558, + "lack": 24915, + "lack": 8069, + "lacking": 30080, + "lacks": 34388, + "laco": 45882, + "lacrosse": 12915, + "lacy": 38645, + "lad": 15991, + "lad": 10707, + "ladak": 42312, + "ladakh": 45295, + "ladder": 16637, + "ladders": 47125, + "lade": 26447, + "laden": 28634, + "ladi": 12934, + "ladies": 28932, + "ladies": 3431, + "lads": 9803, + "lady": 7275, + "lady": 2909, + "ladybird": 43389, + "ladybug": 40038, + "ladygaga": 21232, + "laf": 47555, + "lafayette": 22683, + "lag": 30932, + "lag": 20394, + "laga": 30161, + "lage": 24369, + "lager": 36811, + "lager": 22989, + "lagh": 37237, + "laghate": 47565, + "laghateparth": 48780, + "lagi": 39786, + "lago": 42698, + "lago": 31476, + "lagoon": 22753, + "lagos": 12728, + "lagun": 18500, + "laguna": 23609, + "lah": 27315, + "lah": 4299, + "lahat": 42164, + "lahore": 16733, + "lai": 23947, + "laid": 42560, + "laid": 11160, + "lain": 46958, + "lain": 17151, + "laine": 35860, + "lair": 31981, + "lais": 34923, + "lak": 12890, + "lak": 26793, + "lake": 6441, + "lake": 2553, + "lakedistrict": 26437, + "lakel": 26133, + "lakeland": 34306, + "laker": 45717, + "lakers": 13570, + "lakes": 9265, + "lakeshore": 42595, + "lakeside": 30915, + "lakewood": 36417, + "lakh": 21487, + "lakhs": 37985, + "lakings": 34289, + "lakota": 45510, + "laksh": 24937, + "lakshmi": 39682, + "lal": 12301, + "lal": 19430, + "lala": 33661, + "lali": 21726, + "laliga": 32383, + "lam": 2022, + "lam": 5704, + "lama": 26049, + "lamar": 28678, + "lamar": 17284, + "lamb": 19863, + "lamb": 10034, + "lambda": 36687, + "lambert": 14574, + "lambeth": 43410, + "lambo": 45464, + "lamborgh": 18709, + "lamborghini": 19462, + "lambs": 30361, + "lame": 23192, + "lamin": 22337, + "laminated": 49079, + "lamo": 41461, + "lamont": 46719, + "lamp": 26700, + "lamp": 10725, + "lampard": 39989, + "lamps": 23424, + "lan": 1193, + "lan": 4872, + "lana": 15406, + "lanapar": 47437, + "lanaparrilla": 47819, + "lanc": 11872, + "lanca": 15694, + "lancashire": 20939, + "lancaster": 16446, + "lance": 26025, + "lance": 11609, + "lancer": 38195, + "lancers": 46392, + "lancia": 48698, + "lancs": 47540, + "land": 1567, + "land": 973, + "lande": 36556, + "landed": 9873, + "lander": 37247, + "lander": 9666, + "landers": 20019, + "landfall": 38465, + "landfill": 34947, + "landia": 41384, + "landing": 8292, + "landings": 46104, + "landlord": 28938, + "landlords": 35283, + "landmark": 15208, + "landmarks": 30393, + "lando": 25463, + "lando": 7065, + "landon": 32748, + "landrover": 38125, + "landry": 36137, + "lands": 40223, + "lands": 2961, + "landsc": 4384, + "landscape": 21123, + "landscape": 5727, + "landscapephotography": 28125, + "landscapes": 15344, + "landscaping": 25642, + "landslide": 31954, + "lane": 25534, + "lane": 3980, + "lanes": 10345, + "laney": 38552, + "lang": 7969, + "lang": 8578, + "lange": 32021, + "langford": 45615, + "langley": 28595, + "langu": 4095, + "language": 46103, + "language": 4781, + "languages": 13527, + "lani": 22964, + "lanka": 16221, + "lankan": 40531, + "lannister": 49056, + "lans": 43550, + "lansing": 30805, + "lant": 44504, + "lanta": 44768, + "lantern": 17185, + "lanterns": 33676, + "lantic": 32601, + "lantic": 27678, + "lants": 38425, + "lanyard": 46808, + "lao": 32475, + "lao": 29521, + "laos": 34353, + "lap": 7213, + "lap": 8639, + "lapd": 32557, + "lapel": 47961, + "lapland": 43633, + "laps": 18711, + "lapse": 33365, + "laptop": 10464, + "laptops": 32189, + "laq": 45026, + "lar": 1592, + "lar": 1652, + "lara": 19435, + "lard": 40347, + "lare": 22415, + "laredo": 48427, + "large": 40234, + "large": 3638, + "largely": 21418, + "larger": 12567, + "largest": 4960, + "largo": 44161, + "lari": 34676, + "lark": 43164, + "lark": 23536, + "larkin": 34769, + "larry": 18642, + "larry": 8242, + "lars": 8669, + "larsen": 39721, + "larson": 27973, + "larvae": 44840, + "las": 8295, + "las": 2552, + "lasag": 31210, + "lasagna": 40683, + "lasalle": 43866, + "laser": 25607, + "laser": 9885, + "lasers": 37060, + "lash": 31995, + "lash": 18480, + "lashes": 21015, + "lass": 24203, + "lass": 18263, + "lassic": 39430, + "last": 10600, + "last": 952, + "lasted": 25711, + "lasting": 13434, + "lastnight": 30159, + "lasts": 20141, + "lasvegas": 17789, + "lat": 1591, + "lat": 28437, + "lata": 47114, + "latam": 40012, + "late": 13267, + "late": 2325, + "latel": 49035, + "lately": 11824, + "latepost": 48328, + "later": 24109, + "later": 2941, + "lateral": 26646, + "latest": 46805, + "latest": 2053, + "latex": 27520, + "lati": 16357, + "latimes": 43356, + "latin": 16695, + "latin": 9888, + "latina": 27936, + "latino": 45734, + "latino": 19470, + "latinos": 40233, + "lation": 6191, + "latitude": 37392, + "lative": 15719, + "lator": 9291, + "lators": 28278, + "latt": 33561, + "latte": 17697, + "latter": 26198, + "latvia": 30034, + "lau": 1853, + "lau": 23090, + "lauderdale": 24352, + "laugh": 4969, + "laugh": 6332, + "laughed": 16746, + "laughing": 8301, + "laughs": 14322, + "laughter": 10722, + "laun": 2944, + "launch": 31168, + "launch": 2904, + "launched": 6125, + "launcher": 35782, + "launches": 7023, + "launching": 8565, + "laundering": 34079, + "laundry": 14797, + "laur": 15256, + "laura": 17091, + "laura": 7763, + "laure": 16932, + "laureate": 25675, + "laurel": 43370, + "laurel": 19942, + "lauren": 10456, + "lauren": 7634, + "laurence": 29353, + "laurent": 23226, + "laurie": 20326, + "laus": 38895, + "laus": 28111, + "lause": 22269, + "laut": 47688, + "lav": 13767, + "lav": 26919, + "lava": 16765, + "laven": 15047, + "lavender": 16033, + "laver": 28188, + "lavish": 35443, + "law": 2874, + "law": 2606, + "lawful": 33845, + "lawler": 47862, + "lawless": 39468, + "lawmaker": 37169, + "lawmakers": 21190, + "lawn": 31675, + "lawn": 11024, + "lawrence": 32221, + "lawrence": 8820, + "laws": 7306, + "lawson": 22152, + "lawsuit": 14346, + "lawsuits": 44331, + "lawyer": 10552, + "lawyers": 14232, + "lax": 17750, + "lax": 10024, + "lay": 7205, + "lay": 6360, + "laye": 25995, + "layer": 12411, + "layered": 28520, + "layers": 15900, + "laying": 12333, + "layla": 45050, + "layne": 48721, + "layo": 21738, + "layoffs": 29019, + "layout": 17314, + "lays": 19546, + "layton": 38061, + "laz": 18806, + "lazar": 33075, + "lazarus": 49126, + "laze": 41559, + "lazer": 43735, + "lazio": 33010, + "lazy": 32614, + "lazy": 10753, + "lb": 21958, + "lb": 7422, + "lbc": 37694, + "lbj": 45683, + "lbloggers": 48695, + "lbs": 8912, + "lc": 9584, + "lc": 7225, + "lcd": 21356, + "lcfc": 25339, + "lcs": 32279, + "ld": 1431, + "ld": 730, + "lder": 6945, + "lders": 43221, + "ldn": 37050, + "ldn": 2517, + "ldnont": 25827, + "ldnt": 21690, + "ldr": 37279, + "lds": 31235, + "le": 534, + "le": 579, + "lea": 2246, + "lea": 13324, + "leach": 35527, + "lead": 1328, + "lead": 2784, + "leader": 14806, + "leader": 3236, + "leaderboard": 34519, + "leaders": 3546, + "leadership": 36876, + "leadership": 3652, + "leading": 3833, + "leads": 5335, + "leaf": 9377, + "leaf": 7232, + "leaflet": 38289, + "leaflets": 39014, + "leafs": 16688, + "leafy": 42616, + "leagu": 13317, + "league": 16635, + "league": 2313, + "leagueof": 26022, + "leagueoflegends": 31737, + "leagues": 19888, + "leah": 24350, + "leah": 19308, + "leak": 42900, + "leak": 15489, + "leaked": 14353, + "leaking": 34097, + "leaks": 15657, + "leam": 39606, + "lean": 12447, + "lean": 8208, + "leaning": 24411, + "leanne": 41448, + "leans": 9357, + "leap": 29129, + "leap": 15392, + "leaps": 48080, + "lear": 1146, + "lear": 27663, + "learn": 16959, + "learn": 1768, + "learned": 6048, + "learnenglish": 49040, + "learner": 33547, + "learners": 19572, + "learning": 22632, + "learning": 2378, + "learns": 17569, + "learnt": 18959, + "leary": 36051, + "lease": 49041, + "lease": 14394, + "leased": 48352, + "leash": 36192, + "leasing": 29160, + "least": 3651, + "leather": 21417, + "leather": 5862, + "leau": 26498, + "leav": 3198, + "leave": 37512, + "leave": 3258, + "leaves": 5579, + "leaving": 5216, + "leban": 9360, + "lebanese": 23819, + "lebanon": 11695, + "leblanc": 46381, + "lebo": 44184, + "lebron": 11971, + "lebu": 47030, + "lec": 944, + "lec": 35374, + "leche": 46197, + "lect": 45392, + "lection": 18252, + "lections": 30995, + "lecture": 6617, + "lecturer": 23795, + "lectures": 21118, + "led": 8767, + "led": 912, + "ledge": 23647, + "ledge": 4815, + "ledger": 26817, + "leds": 36763, + "lee": 6224, + "lee": 2592, + "leed": 16483, + "leed": 40206, + "leeds": 38900, + "leeds": 7420, + "leek": 34585, + "leeminho": 37831, + "leen": 35311, + "leen": 15940, + "leep": 48875, + "leep": 10191, + "lees": 29324, + "lees": 34056, + "lef": 9152, + "left": 33949, + "left": 1823, + "leftist": 35143, + "lefto": 17437, + "leftover": 26414, + "leftovers": 28481, + "lefty": 33935, + "leg": 1211, + "leg": 4924, + "lega": 38674, + "legacy": 44108, + "legacy": 6447, + "legal": 17743, + "legal": 3998, + "legalization": 40584, + "legalize": 42921, + "legally": 14152, + "legate": 46009, + "lege": 8065, + "legen": 6105, + "legend": 5480, + "legend": 3539, + "legendary": 6053, + "legendof": 47915, + "legends": 6396, + "leges": 15356, + "legg": 18474, + "legg": 32511, + "legged": 25830, + "leggings": 22895, + "leggo": 43441, + "legi": 11183, + "legion": 35503, + "legion": 14525, + "legis": 7200, + "legislat": 16486, + "legislation": 14143, + "legislative": 16755, + "legislators": 31572, + "legislature": 22309, + "legit": 12563, + "legitim": 17656, + "legitimate": 24491, + "lego": 28117, + "lego": 7849, + "legos": 45359, + "legs": 7072, + "leh": 19105, + "leh": 29298, + "lehead": 28090, + "lehigh": 34527, + "lehman": 46094, + "lei": 15828, + "lei": 21830, + "leia": 32723, + "leic": 35073, + "leica": 30206, + "leice": 10026, + "leicester": 28795, + "leicester": 11510, + "leicestershire": 45358, + "leigh": 14849, + "leigh": 9292, + "leighton": 30782, + "leila": 41342, + "lein": 20026, + "lein": 28551, + "leinster": 32242, + "leip": 36401, + "leipzig": 41860, + "leis": 13133, + "leisure": 15849, + "leit": 35446, + "leith": 34141, + "lek": 26626, + "lek": 36535, + "lel": 46623, + "lele": 26075, + "lem": 10213, + "lem": 8428, + "leman": 24478, + "lemans": 26694, + "lement": 9693, + "lements": 15833, + "lemme": 23318, + "lemon": 12272, + "lemon": 7184, + "lemonade": 18884, + "lemons": 29576, + "lemore": 41147, + "len": 3687, + "len": 2159, + "lena": 22038, + "lend": 45397, + "lend": 24987, + "lender": 44734, + "lenders": 42443, + "lending": 20209, + "lene": 17628, + "leness": 36551, + "leng": 7861, + "length": 10130, + "lengths": 31858, + "lengthy": 32624, + "lenin": 41760, + "lennon": 18360, + "lennox": 45748, + "lenny": 48448, + "lenny": 30124, + "leno": 45357, + "lenovo": 25886, + "lens": 8666, + "lenses": 21264, + "lent": 20943, + "lent": 22605, + "lentil": 41511, + "lentils": 44269, + "leo": 24008, + "leo": 8312, + "leon": 6581, + "leon": 9763, + "leonard": 43849, + "leonard": 13142, + "leonardo": 20282, + "leone": 22864, + "leop": 11234, + "leopard": 15931, + "leopards": 40996, + "leopold": 45501, + "lep": 48884, + "leppard": 41656, + "lepre": 45641, + "ler": 5587, + "ler": 1803, + "lero": 15067, + "lerosis": 35455, + "leroy": 32441, + "lers": 6247, + "lery": 38184, + "les": 4339, + "les": 840, + "lesbian": 17419, + "lesbians": 43182, + "lesh": 32282, + "lesley": 25506, + "lesli": 13649, + "leslie": 16244, + "lesn": 39568, + "lesnar": 42223, + "less": 3242, + "less": 1285, + "lesser": 20369, + "lessly": 13103, + "lessness": 24847, + "lesson": 7714, + "lessons": 7199, + "lest": 24372, + "lest": 6794, + "lester": 23157, + "lester": 24023, + "lestwe": 29726, + "lestweforget": 30273, + "let": 1898, + "let": 1094, + "leta": 34319, + "lete": 34078, + "letes": 6815, + "leth": 30022, + "leth": 42462, + "lethal": 21905, + "lethbridge": 48390, + "leti": 34176, + "letics": 14504, + "letit": 46423, + "leto": 32203, + "leton": 37674, + "leton": 7462, + "lets": 10448, + "lets": 3243, + "letsgo": 16967, + "letsgo": 29789, + "letstalk": 35591, + "lett": 22428, + "lett": 9778, + "lette": 41798, + "lette": 10301, + "letter": 15567, + "letter": 4861, + "lettering": 26382, + "letterman": 38447, + "letters": 9181, + "letting": 9510, + "letto": 35449, + "lettu": 17933, + "lettuce": 18573, + "leu": 15691, + "leuke": 31031, + "leukemia": 32097, + "leum": 21571, + "leur": 45806, + "lev": 17022, + "lev": 29950, + "levan": 42543, + "leve": 36271, + "level": 21682, + "level": 2931, + "leveled": 48453, + "levels": 6295, + "leven": 44792, + "leven": 34729, + "lever": 20178, + "lever": 23094, + "leverage": 24030, + "leveraging": 37948, + "levi": 25630, + "levi": 19113, + "leviathan": 41736, + "levin": 36949, + "levine": 26594, + "levit": 22715, + "levy": 17147, + "lew": 5063, + "lew": 25329, + "lewan": 48349, + "lewd": 45241, + "lewes": 40431, + "lewi": 19589, + "lewis": 22043, + "lewis": 6020, + "lewisham": 37385, + "lewisham": 47633, + "lewishamilton": 42960, + "lewood": 37951, + "lex": 6586, + "lex": 9658, + "lexa": 48259, + "lexi": 44231, + "lexi": 24679, + "lexington": 22308, + "lexus": 20694, + "ley": 2565, + "ley": 1066, + "leye": 37061, + "leys": 45609, + "leys": 14834, + "leyton": 46573, + "lez": 26442, + "lf": 33960, + "lf": 22078, + "lfc": 37826, + "lfc": 8267, + "lfw": 28514, + "lg": 4546, + "lg": 11368, + "lga": 39348, + "lgb": 25401, + "lgbt": 11743, + "lgbt": 9592, + "lgbti": 42730, + "lgbtq": 47625, + "lgbtq": 14939, + "lgm": 39389, + "lh": 27794, + "lh": 31159, + "lhp": 45092, + "lhs": 33170, + "li": 554, + "li": 4250, + "lia": 26118, + "lia": 6964, + "liability": 29139, + "liaison": 39294, + "liam": 5258, + "liam": 7167, + "lian": 18058, + "liance": 40864, + "liar": 16334, + "liars": 23863, + "lias": 46021, + "lib": 10249, + "lib": 13345, + "libby": 36832, + "libdems": 40869, + "liber": 3425, + "liberal": 48032, + "liberal": 9985, + "liberalism": 40018, + "liberals": 15981, + "liberated": 38690, + "liberation": 19507, + "liberia": 32208, + "libertarian": 35067, + "liberties": 48623, + "liberty": 23397, + "liberty": 8480, + "libr": 2856, + "libra": 43038, + "librarian": 25148, + "librarians": 37806, + "libraries": 14277, + "library": 25713, + "library": 3519, + "libre": 49210, + "libre": 31681, + "libs": 26401, + "liby": 36390, + "libya": 16417, + "libyan": 42319, + "lic": 2508, + "lic": 3376, + "lice": 45691, + "licen": 6706, + "licence": 20550, + "license": 10337, + "licensed": 18752, + "licenses": 36414, + "licensing": 24219, + "lich": 23979, + "lich": 25875, + "lick": 29197, + "lick": 17541, + "licking": 33013, + "licks": 42117, + "lics": 44552, + "lid": 39369, + "lid": 17678, + "lidge": 45558, + "lido": 35683, + "lids": 41609, + "lie": 6570, + "lie": 2538, + "lieb": 45387, + "liebe": 37749, + "lied": 6486, + "lief": 38428, + "lien": 45716, + "lier": 3626, + "liers": 19303, + "lies": 37236, + "lies": 3205, + "liest": 14020, + "liet": 41107, + "lieu": 20401, + "lieu": 35313, + "lieutenant": 22538, + "lif": 16456, + "life": 2666, + "life": 970, + "lifeat": 27801, + "lifeboat": 37404, + "lifecycle": 49171, + "lifein": 48447, + "lifeis": 24824, + "lifeisgood": 46433, + "lifel": 15025, + "lifeline": 38438, + "lifelong": 21358, + "lifeof": 36061, + "lifesaving": 48016, + "lifespan": 49257, + "lifestyle": 46512, + "lifestyle": 7037, + "lifestyles": 48521, + "lifetime": 48737, + "lifetime": 9107, + "liff": 34404, + "liffe": 38942, + "lift": 33146, + "lift": 6779, + "lifted": 16783, + "lifter": 38555, + "lifting": 10857, + "lifts": 18291, + "lig": 19915, + "lig": 38493, + "liga": 16802, + "ligam": 31077, + "ligament": 48705, + "ligan": 27962, + "ligans": 42133, + "ligh": 7510, + "light": 3885, + "light": 1395, + "lighted": 18404, + "lighten": 32717, + "lightening": 28170, + "lighter": 14102, + "lighthouse": 13717, + "lighting": 5799, + "lightly": 26878, + "lightning": 7756, + "lightroom": 41454, + "lights": 3073, + "lightweight": 16278, + "ligu": 42920, + "ligue": 29196, + "lik": 4831, + "lik": 18495, + "like": 9175, + "like": 789, + "liked": 7112, + "likefor": 48444, + "likeli": 40666, + "likelihood": 48158, + "likely": 5256, + "liken": 36084, + "likes": 4724, + "liking": 16810, + "lil": 6012, + "lil": 4461, + "lilac": 33647, + "lili": 26686, + "lili": 48411, + "lilies": 38110, + "lillard": 47016, + "lille": 38705, + "lilli": 40920, + "lillian": 41563, + "lilly": 47825, + "lilly": 21815, + "lily": 23803, + "lily": 10647, + "lim": 2377, + "lim": 17204, + "lima": 17589, + "limb": 27061, + "limb": 32363, + "limbo": 46179, + "limbs": 34886, + "lime": 17385, + "lime": 11193, + "limel": 48658, + "limer": 16915, + "limerick": 19501, + "limestone": 27272, + "limit": 18933, + "limit": 9973, + "limitations": 32730, + "limited": 49229, + "limited": 3472, + "limiting": 35812, + "limitless": 35833, + "limits": 11966, + "limo": 33166, + "limous": 47287, + "limpopo": 47175, + "lin": 1254, + "lin": 2424, + "lina": 26110, + "lincol": 6239, + "lincoln": 16957, + "lincoln": 7454, + "lincolnshire": 29014, + "lind": 6492, + "linda": 45410, + "linda": 10760, + "linden": 44076, + "linden": 34832, + "lindo": 38467, + "lindsay": 29846, + "lindsay": 16858, + "lindsey": 29475, + "lindsey": 18128, + "line": 3674, + "line": 1148, + "linear": 19816, + "linebacker": 29848, + "lined": 11842, + "lineman": 31501, + "linen": 20032, + "liner": 11618, + "liners": 24463, + "lines": 3418, + "liness": 28633, + "lineup": 7316, + "lineups": 33589, + "ling": 4851, + "ling": 1358, + "linger": 29593, + "lingerie": 18473, + "lingering": 46494, + "lings": 11390, + "lington": 27673, + "lington": 9002, + "lingu": 34449, + "lingui": 29942, + "linguistic": 46847, + "linguistics": 48651, + "lining": 11589, + "link": 18433, + "link": 2468, + "linke": 15088, + "linked": 11059, + "linkedin": 16302, + "linkin": 40287, + "linkin": 49291, + "linking": 23296, + "links": 8113, + "linn": 37431, + "lino": 41189, + "lino": 34995, + "lins": 6567, + "linson": 15401, + "linton": 36479, + "linus": 49303, + "linux": 14061, + "lio": 19395, + "lion": 8872, + "lion": 5567, + "lionel": 19441, + "lions": 7093, + "lip": 8630, + "lip": 8546, + "lipo": 38795, + "lipp": 38074, + "lips": 8847, + "lipse": 10351, + "lipstick": 15618, + "liqu": 6310, + "lique": 32680, + "liqueur": 43612, + "liqui": 33817, + "liquid": 18366, + "liquid": 10158, + "liquidity": 42812, + "liquor": 17828, + "lis": 7297, + "lis": 12749, + "lisa": 25236, + "lisa": 7424, + "lisam": 43072, + "lisboa": 40052, + "lisbon": 17708, + "lish": 12658, + "lish": 2354, + "lished": 22620, + "lisle": 21529, + "lism": 34390, + "liss": 45489, + "liss": 35433, + "lisse": 49309, + "list": 1734, + "list": 1998, + "lista": 37812, + "listed": 6457, + "listen": 17454, + "listen": 2672, + "listened": 15347, + "listener": 34819, + "listeners": 26901, + "listening": 3656, + "listens": 25912, + "lister": 45109, + "listing": 8145, + "listings": 21987, + "liston": 48041, + "lists": 12281, + "lit": 2213, + "lit": 4350, + "lita": 30100, + "lite": 29273, + "lite": 13694, + "litecoin": 39063, + "liter": 3085, + "liter": 34904, + "literacy": 12841, + "literal": 24269, + "literally": 4719, + "literary": 13586, + "literature": 11072, + "litfest": 40369, + "lith": 37005, + "lithium": 22794, + "litho": 31088, + "lithograph": 49022, + "lithu": 21045, + "lithuania": 27068, + "liti": 24292, + "litigation": 31769, + "lito": 47381, + "litre": 25786, + "litres": 39919, + "litt": 1216, + "litt": 47583, + "litter": 45431, + "litter": 17118, + "litters": 45300, + "little": 7024, + "little": 1274, + "littlemix": 29731, + "littlest": 48969, + "litur": 36830, + "litz": 30357, + "liu": 20466, + "liv": 13895, + "liv": 19901, + "livan": 12785, + "live": 3215, + "live": 1064, + "lived": 8867, + "livel": 17973, + "liveli": 26566, + "livelihood": 46497, + "livelihoods": 47716, + "lively": 19663, + "liveme": 35396, + "livemusic": 15688, + "liven": 41057, + "liveon": 22815, + "livepd": 38742, + "livepd": 31899, + "liver": 4755, + "liver": 12639, + "liverpool": 29778, + "liverpool": 5366, + "livery": 23248, + "lives": 3247, + "livesmatter": 20348, + "livestock": 22079, + "livestream": 16844, + "livetweet": 38546, + "livin": 28061, + "living": 10965, + "living": 2815, + "livingston": 30551, + "lix": 45068, + "liz": 8632, + "liz": 12242, + "liza": 28787, + "lizard": 17221, + "lizards": 41991, + "lizasober": 44487, + "lizasoberano": 45076, + "lizz": 34430, + "lizzie": 29530, + "lizzy": 32306, + "lj": 34211, + "lj": 32273, + "lju": 44562, + "lk": 39110, + "lk": 26596, + "lka": 21881, + "ll": 1657, + "ll": 865, + "lla": 15419, + "llama": 36679, + "llan": 17281, + "llan": 38728, + "lland": 31150, + "llc": 17161, + "lle": 26550, + "lle": 29732, + "llen": 41197, + "ller": 7722, + "llers": 26426, + "lli": 47015, + "lli": 13368, + "llis": 25518, + "lll": 27177, + "llll": 34874, + "llll": 43485, + "llo": 19293, + "lloy": 10092, + "lloyd": 33339, + "lloyd": 12400, + "llp": 28042, + "lls": 40535, + "lly": 26379, + "lm": 6981, + "lm": 15282, + "lma": 4493, + "lmao": 5121, + "lmaoo": 32623, + "lmaooo": 33362, + "lmaoooo": 45232, + "lmfa": 8928, + "lmfao": 11068, + "lmfaooo": 47658, + "lmp": 43575, + "lms": 30381, + "ln": 31644, + "ln": 18654, + "lng": 22339, + "lnp": 39679, + "lo": 549, + "lo": 2982, + "loa": 39678, + "load": 4515, + "load": 2834, + "loaded": 6756, + "loader": 28492, + "loading": 9975, + "loads": 8691, + "loaf": 26467, + "loaf": 18273, + "loan": 28431, + "loan": 8176, + "loans": 14206, + "lob": 11197, + "lob": 46606, + "lobal": 34574, + "lobb": 27698, + "lobby": 12449, + "lobbying": 36047, + "lobe": 46325, + "lobes": 24148, + "lobo": 39323, + "lobos": 36586, + "lobster": 13793, + "loc": 1378, + "loc": 25826, + "local": 9202, + "local": 2029, + "localized": 49399, + "locally": 15603, + "locals": 15041, + "locate": 20490, + "located": 5677, + "location": 4372, + "locations": 9580, + "loch": 20188, + "loch": 14101, + "lock": 7201, + "lock": 4381, + "lockdown": 35636, + "locke": 29698, + "locked": 8371, + "locker": 14053, + "lockhart": 48642, + "lockheed": 36637, + "locking": 19978, + "locks": 13212, + "lockscreen": 42439, + "loco": 25555, + "locom": 22798, + "locomo": 46147, + "locomotive": 30439, + "locu": 33635, + "locust": 46237, + "lod": 45650, + "lodge": 10504, + "loe": 30113, + "loe": 25484, + "loeb": 49334, + "lof": 15011, + "loff": 31008, + "loft": 35707, + "loft": 20049, + "loftus": 46689, + "log": 3239, + "log": 7383, + "logan": 20655, + "logan": 10569, + "logans": 40752, + "logg": 43002, + "logged": 31457, + "logger": 39089, + "logging": 24444, + "logi": 3177, + "logia": 48031, + "logic": 10670, + "logical": 4791, + "logically": 24782, + "logie": 33445, + "logies": 7378, + "login": 31121, + "logist": 7407, + "logistics": 14755, + "logists": 12233, + "logne": 19911, + "logo": 31480, + "logo": 5750, + "logos": 24879, + "logs": 22745, + "logue": 27785, + "logy": 22721, + "logy": 1659, + "loh": 49129, + "loh": 37983, + "loi": 35128, + "loid": 31408, + "loin": 21760, + "loire": 46040, + "lois": 27040, + "lok": 19908, + "lok": 23575, + "loki": 24435, + "lol": 10721, + "lol": 1824, + "lola": 19065, + "lolita": 42615, + "lolla": 45483, + "lolli": 27906, + "lollipop": 34605, + "lolly": 48264, + "lolo": 16895, + "lolo": 37481, + "lolol": 25280, + "lololol": 34738, + "lolz": 35260, + "lom": 9279, + "loma": 42889, + "lombar": 25493, + "lombard": 46461, + "lombardi": 44346, + "lomond": 48941, + "lon": 1235, + "lon": 6507, + "london": 6835, + "london": 1789, + "londonmarathon": 35018, + "lone": 22220, + "lone": 13576, + "lonel": 28872, + "loneliness": 30310, + "lonely": 34509, + "lonely": 12368, + "lonelyplanet": 44984, + "long": 4792, + "long": 1538, + "longe": 25793, + "longer": 5349, + "longest": 10731, + "longevity": 35354, + "longh": 20286, + "longhorn": 41047, + "longhorns": 38295, + "longing": 38482, + "longlive": 47840, + "longs": 43618, + "longtime": 19685, + "loo": 731, + "loo": 11804, + "look": 8874, + "look": 1012, + "lookalike": 38307, + "lookbook": 39184, + "looked": 4913, + "lookin": 11254, + "looking": 36898, + "looking": 1312, + "lookout": 18330, + "looks": 1606, + "lool": 33125, + "loom": 37440, + "loom": 17199, + "looming": 35384, + "looms": 30550, + "loon": 28222, + "loona": 48137, + "looney": 45315, + "looo": 20902, + "loool": 36016, + "looool": 47038, + "looooo": 31484, + "loop": 19606, + "loop": 10408, + "loops": 21625, + "loos": 45723, + "loose": 43815, + "loose": 9786, + "loot": 21518, + "lop": 36734, + "lop": 17066, + "lopes": 49269, + "lopez": 12982, + "lor": 2179, + "lor": 11335, + "lord": 18896, + "lord": 3486, + "lorde": 35483, + "lords": 14969, + "lore": 12880, + "lore": 27218, + "loren": 13602, + "loren": 33398, + "lorenzo": 21342, + "lores": 34510, + "loretta": 40863, + "lori": 20164, + "lori": 23095, + "lorna": 46316, + "lorraine": 27602, + "lorry": 31354, + "los": 32217, + "los": 3087, + "losange": 14037, + "losangeles": 14638, + "lose": 43318, + "lose": 5354, + "loser": 18168, + "losers": 23201, + "loses": 14263, + "losing": 7918, + "loss": 34761, + "loss": 4327, + "losses": 16909, + "lost": 14258, + "lost": 2624, + "lostdog": 48482, + "lot": 5132, + "lot": 1954, + "loth": 43625, + "lothian": 31360, + "lothing": 42058, + "lotion": 25260, + "lotr": 34165, + "lots": 2958, + "lott": 42854, + "lotta": 29125, + "lotte": 16535, + "lotte": 7274, + "lottery": 16975, + "lottie": 48517, + "lotto": 28265, + "lotus": 13824, + "lou": 2207, + "lou": 9745, + "loubout": 38369, + "loud": 22884, + "loud": 7464, + "louder": 25904, + "loudest": 49214, + "loudly": 39256, + "lough": 21927, + "lough": 28045, + "loughborough": 49153, + "loui": 42173, + "louie": 25790, + "louis": 8916, + "louis": 4459, + "louisa": 40011, + "louise": 32275, + "louise": 13076, + "louisi": 12187, + "louisiana": 12946, + "louisville": 13860, + "louisvuitton": 44911, + "loun": 6466, + "lounge": 7141, + "lounging": 45430, + "lour": 29383, + "lourdes": 45071, + "louvre": 36995, + "lov": 8923, + "lov": 21229, + "lova": 37394, + "lovable": 38565, + "lovato": 18960, + "love": 2618, + "love": 793, + "lovecraft": 42405, + "loved": 3249, + "lovefl": 38884, + "loveher": 38306, + "lovehim": 45733, + "loveis": 30931, + "loveisland": 30970, + "loveislove": 43603, + "loveit": 24764, + "lovel": 8999, + "lovelies": 31412, + "lovelondon": 46493, + "lovely": 33250, + "lovely": 2165, + "lovemy": 20041, + "lovemyjob": 40130, + "loven": 33754, + "lover": 28508, + "lover": 7168, + "lovers": 48416, + "lovers": 5973, + "loves": 37773, + "loves": 3925, + "lovethe": 33040, + "lovethem": 48298, + "lovett": 47095, + "lovewins": 47687, + "loveyou": 39226, + "loveyou": 25964, + "loveyour": 26462, + "lovin": 33442, + "lovin": 16354, + "loving": 29568, + "loving": 3721, + "lovingly": 44100, + "low": 1049, + "low": 1042, + "loway": 16104, + "lowe": 17910, + "lowed": 22733, + "lowell": 24458, + "lower": 32578, + "lower": 4909, + "lowered": 34968, + "lowering": 35261, + "lowers": 36398, + "lowes": 38515, + "lowest": 12098, + "lowing": 8283, + "lowkey": 29481, + "lowry": 27444, + "lows": 4406, + "lox": 41725, + "loy": 4519, + "loy": 23929, + "loyal": 13032, + "loyalty": 14686, + "loyd": 44212, + "loyed": 29279, + "loyment": 18307, + "loyola": 32569, + "lp": 22282, + "lp": 6392, + "lpc": 44092, + "lpg": 47905, + "lpga": 34295, + "lps": 32094, + "lr": 20572, + "lr": 7041, + "lrt": 32996, + "ls": 19051, + "ls": 1268, + "lsd": 43766, + "lse": 46127, + "lse": 43886, + "lsu": 35428, + "lsu": 15672, + "lt": 13642, + "lt": 3333, + "ltc": 27664, + "ltd": 6802, + "lte": 25202, + "lton": 14237, + "lu": 664, + "lu": 9657, + "lub": 22469, + "lub": 11836, + "lubbock": 37660, + "lubric": 40963, + "luc": 7013, + "luc": 28014, + "luca": 21053, + "lucas": 23425, + "lucas": 10225, + "lucci": 45849, + "luce": 46217, + "lucent": 41552, + "lucer": 36042, + "luch": 36646, + "lucha": 38449, + "luci": 8787, + "lucia": 22290, + "luciano": 46365, + "lucid": 44540, + "lucie": 39461, + "lucifer": 46224, + "lucifer": 27687, + "lucille": 47454, + "lucin": 27523, + "luck": 9647, + "luck": 2820, + "luckiest": 42469, + "luckily": 20100, + "lucknow": 29407, + "lucky": 20495, + "lucky": 4133, + "lucrative": 41485, + "lucy": 17262, + "lucy": 10120, + "lud": 14288, + "lude": 28755, + "ludo": 40141, + "ludwig": 30633, + "lue": 45199, + "luf": 25264, + "lufc": 17818, + "luffy": 39047, + "lufthan": 37769, + "lufthansa": 39145, + "lug": 45521, + "lugg": 19673, + "luggage": 20138, + "luhan": 20975, + "luigi": 28444, + "luis": 25231, + "luis": 11339, + "luiz": 39633, + "lujah": 31639, + "luk": 21652, + "luka": 34878, + "lukaku": 37177, + "lukas": 37941, + "luke": 11970, + "luke": 5652, + "lul": 20861, + "lulla": 37019, + "lullaby": 41676, + "lulu": 32052, + "lulu": 26935, + "lum": 18112, + "lum": 5997, + "lumb": 36231, + "lumber": 27421, + "lumber": 34692, + "lumi": 41437, + "lumia": 31912, + "lumin": 15867, + "luminous": 37913, + "lump": 38704, + "lumpur": 34411, + "lun": 3221, + "lun": 49390, + "luna": 14425, + "lunar": 16043, + "lunatic": 45874, + "lunch": 10954, + "lunch": 2772, + "luncheon": 15104, + "lunches": 29705, + "lunchtime": 14330, + "lund": 30975, + "lund": 20181, + "lunes": 35648, + "lung": 38479, + "lung": 16271, + "lungs": 27366, + "lup": 27413, + "lupita": 49352, + "lupus": 36017, + "lur": 14439, + "lure": 31376, + "lures": 46747, + "lurking": 29941, + "lus": 7158, + "lusci": 38004, + "luscious": 39935, + "lush": 40382, + "lush": 16263, + "lust": 42071, + "lust": 12662, + "lustre": 46673, + "luther": 21848, + "luther": 17208, + "lutheran": 27341, + "luton": 28288, + "luv": 24726, + "luv": 8502, + "lux": 3439, + "lux": 16704, + "luxe": 26373, + "luxemb": 21314, + "luxembour": 22712, + "luxembourg": 23949, + "luxu": 16112, + "luxurious": 17292, + "luxury": 12083, + "luxury": 5247, + "luxurytravel": 29010, + "luz": 41008, + "lv": 10862, + "lv": 11184, + "lvl": 31256, + "lw": 40515, + "lw": 35115, + "lx": 30789, + "ly": 1251, + "ly": 597, + "lydia": 24316, + "lyf": 43688, + "lyfe": 30787, + "lyft": 32944, + "lying": 7175, + "lyk": 46376, + "lyle": 36828, + "lym": 20087, + "lyme": 31167, + "lymph": 30073, + "lymphoma": 37648, + "lyn": 3957, + "lyn": 5054, + "lynch": 31586, + "lynch": 13560, + "lynd": 33416, + "lynda": 42959, + "lyndon": 48518, + "lynn": 25303, + "lynn": 10667, + "lynne": 26900, + "lynx": 28941, + "lyon": 17176, + "lyons": 29453, + "lyric": 24366, + "lyric": 21291, + "lyrical": 33358, + "lyricist": 49013, + "lyrics": 9551, + "lyrix": 46814, + "lys": 45054, + "lyte": 40059, + "lywood": 4012, + "lz": 30818, + "lé": 39641, + "m": 76, + "m": 332, + "ma": 577, + "ma": 1226, + "maa": 42774, + "maa": 21555, + "maan": 33668, + "maar": 48927, + "maas": 43332, + "mab": 35639, + "mabel": 47319, + "mable": 23001, + "mably": 40082, + "mabu": 44682, + "mac": 1961, + "mac": 4945, + "macar": 21558, + "macaroni": 41824, + "macarthur": 36785, + "macau": 43984, + "macau": 33370, + "macbeth": 36321, + "macbook": 20617, + "macdonald": 20315, + "mace": 44869, + "maced": 21102, + "macedonia": 27071, + "macfar": 45374, + "macfarlane": 48825, + "mach": 2637, + "mach": 35091, + "machado": 42318, + "mache": 43220, + "macher": 29330, + "machi": 41783, + "machin": 17972, + "machine": 11539, + "machine": 4169, + "machinelearning": 13621, + "machinery": 21858, + "machines": 11108, + "machining": 45562, + "macho": 43977, + "macht": 45225, + "macin": 36533, + "mack": 8590, + "mack": 12145, + "mackay": 32497, + "macken": 48057, + "mackenzie": 22351, + "mackerel": 35002, + "mackin": 26010, + "macklemore": 41758, + "macle": 33843, + "maclean": 47137, + "macleod": 43684, + "macmillan": 36364, + "macmillan": 35191, + "macon": 35818, + "macos": 45469, + "macqu": 38365, + "macquarie": 40858, + "macro": 20891, + "macro": 16626, + "macron": 24859, + "macs": 46548, + "macy": 17113, + "macys": 47652, + "mad": 2740, + "mad": 3843, + "mada": 37799, + "madagas": 24758, + "madagascar": 25744, + "madam": 33634, + "madam": 27538, + "madame": 23507, + "madd": 31717, + "madden": 19093, + "maddie": 39959, + "maddie": 18875, + "maddow": 32644, + "maddy": 31734, + "made": 5388, + "made": 1105, + "madein": 13670, + "madeira": 33810, + "madel": 34532, + "madele": 29831, + "madeleine": 33264, + "madeline": 33905, + "madewith": 28627, + "madewithunity": 43190, + "madhu": 23000, + "madhuri": 38346, + "madhuridixit": 43889, + "madhya": 48302, + "madi": 6527, + "madi": 27282, + "madison": 24798, + "madison": 8791, + "madmen": 45452, + "madness": 8755, + "madon": 44852, + "madonna": 14137, + "madra": 27416, + "madras": 42046, + "madre": 42130, + "madri": 5529, + "madrid": 5909, + "mads": 41201, + "madu": 34913, + "madurai": 49159, + "maduro": 32912, + "mae": 16898, + "mae": 17339, + "maer": 47088, + "maestro": 24140, + "mafi": 47164, + "mafia": 14890, + "mag": 1191, + "mag": 4508, + "maga": 8694, + "magaz": 2974, + "magazine": 3113, + "magazines": 22253, + "magdal": 29673, + "mage": 46568, + "mage": 10923, + "magee": 43872, + "magenta": 38091, + "magento": 42442, + "mages": 31059, + "maggi": 29611, + "maggie": 41443, + "maggie": 14524, + "maggio": 49087, + "magh": 45555, + "magi": 19270, + "magic": 13061, + "magic": 3778, + "magical": 36408, + "magical": 7823, + "magician": 26368, + "magin": 42678, + "maging": 41310, + "magn": 10290, + "magna": 34076, + "magne": 9921, + "magnesium": 36379, + "magnet": 18240, + "magnetic": 13838, + "magnets": 33030, + "magni": 24297, + "magnific": 9725, + "magnificent": 10724, + "magnitude": 22955, + "magno": 21184, + "magnolia": 27123, + "magnu": 45198, + "magnum": 23496, + "magnus": 26275, + "magpie": 45973, + "mags": 31021, + "maguire": 26470, + "mah": 7206, + "mah": 10801, + "maha": 12237, + "maha": 33983, + "mahal": 22301, + "mahan": 45191, + "mahar": 11635, + "maharaj": 38488, + "maharashtra": 19328, + "mahat": 32434, + "mahatma": 40530, + "mahe": 15756, + "maher": 29826, + "mahesh": 33448, + "mahesh": 22095, + "mahi": 32529, + "mahi": 38659, + "mahin": 24113, + "mahindra": 31285, + "mahmoud": 41361, + "mahog": 30804, + "mahogany": 33084, + "mahon": 45864, + "mahon": 20371, + "mahone": 26634, + "mai": 7138, + "mai": 14595, + "maia": 46585, + "maid": 23148, + "maid": 10226, + "maidan": 37346, + "maiden": 37011, + "maiden": 13809, + "maids": 27305, + "maidstone": 44395, + "mail": 10478, + "mail": 2614, + "mailbox": 31482, + "mailed": 42314, + "mailing": 26680, + "mailonline": 26021, + "mails": 45213, + "main": 3904, + "main": 2623, + "maine": 18639, + "maine": 7836, + "mained": 15609, + "mainedcm": 15845, + "mainland": 27629, + "mainly": 15280, + "mains": 33656, + "mainst": 42102, + "mainstream": 18034, + "maintain": 12954, + "maintained": 26665, + "maintaining": 21964, + "maintains": 38335, + "mainten": 9399, + "maintenance": 9610, + "mais": 28153, + "maisie": 47355, + "maison": 37065, + "maison": 27626, + "mait": 26387, + "maize": 35386, + "maj": 2948, + "maj": 28723, + "maja": 47498, + "maje": 9852, + "majestic": 15335, + "majesty": 21188, + "major": 8008, + "major": 3350, + "majority": 10508, + "majors": 23597, + "mak": 11271, + "mak": 19253, + "makar": 42242, + "makati": 39402, + "make": 3232, + "make": 1078, + "makeaw": 45859, + "makeinindia": 42739, + "makeit": 26308, + "maken": 47093, + "makeover": 17926, + "maker": 15196, + "maker": 4836, + "makers": 6577, + "makerspace": 42400, + "makes": 2088, + "makeshift": 43274, + "makeu": 41707, + "makeup": 26402, + "makeup": 5853, + "makeyourown": 34090, + "makeyourownlane": 34823, + "maki": 34514, + "makin": 43096, + "makin": 22407, + "making": 17976, + "making": 1665, + "makk": 39852, + "maknae": 44118, + "mako": 49061, + "mal": 1662, + "mal": 3796, + "mala": 28290, + "malade": 36928, + "malaga": 35395, + "malala": 41137, + "malam": 48956, + "malaria": 24929, + "malawi": 23405, + "malay": 5323, + "malay": 42430, + "malayalam": 34860, + "malaysi": 39668, + "malaysia": 8146, + "malaysian": 21136, + "malbec": 47741, + "malcol": 12645, + "malcolm": 14139, + "maldives": 16795, + "male": 11326, + "male": 2801, + "males": 14426, + "malhotra": 28866, + "mali": 6701, + "mali": 22669, + "malia": 46714, + "malibu": 21723, + "malicious": 42147, + "malign": 41122, + "malik": 11394, + "mall": 10984, + "mall": 6220, + "mallorca": 28082, + "mallory": 38968, + "malls": 36447, + "malm": 44071, + "malnutrition": 41153, + "malo": 43518, + "malone": 19852, + "maloney": 45897, + "mals": 25370, + "malt": 21688, + "malta": 16989, + "maltese": 39838, + "malvern": 39356, + "malware": 24153, + "mam": 4404, + "mam": 17778, + "mama": 7133, + "mamamoo": 36012, + "mamas": 42395, + "mamba": 44189, + "mament": 45690, + "mami": 43858, + "mamma": 34893, + "mammal": 33385, + "mammals": 31987, + "mammoth": 28022, + "man": 723, + "man": 786, + "mana": 29467, + "mana": 15837, + "manafort": 40108, + "manag": 1830, + "manage": 9770, + "managed": 7928, + "management": 3319, + "manager": 3898, + "managerial": 44261, + "managers": 12853, + "manages": 29699, + "managing": 10892, + "manas": 44188, + "manatee": 46558, + "mance": 2324, + "manchester": 24424, + "manchester": 4651, + "mancini": 47681, + "mancity": 31538, + "mancrush": 36945, + "mancrushmonday": 39307, + "mand": 4325, + "mand": 27244, + "mandala": 41106, + "mandarin": 26455, + "mandate": 26228, + "mandatory": 19934, + "mandel": 34960, + "mandela": 16280, + "mandi": 38961, + "mandir": 35815, + "mando": 34006, + "mands": 12340, + "mandu": 31440, + "mandy": 41505, + "mandy": 24302, + "mane": 44471, + "mane": 16044, + "maneu": 33216, + "mang": 25616, + "mang": 31096, + "manga": 11873, + "mangal": 43027, + "manger": 48251, + "mango": 43831, + "mango": 13962, + "mangrove": 47180, + "manhatt": 10152, + "manhattan": 10961, + "mani": 5654, + "mani": 10718, + "mania": 8435, + "maniac": 31814, + "maniacs": 41444, + "manian": 40077, + "manic": 23017, + "manic": 37825, + "manicure": 33637, + "manife": 14379, + "manifest": 34422, + "manifestation": 48348, + "manifesto": 20907, + "manil": 38827, + "manila": 10969, + "manipu": 40261, + "manipul": 19237, + "manipulation": 30277, + "manipur": 47757, + "manish": 41759, + "manish": 44720, + "manit": 15693, + "manitoba": 20342, + "manjaro": 41489, + "mankind": 24155, + "manly": 25194, + "mann": 19396, + "mann": 4783, + "manne": 30160, + "manned": 26139, + "mannequin": 43388, + "manner": 20700, + "manners": 31693, + "manning": 15996, + "manny": 37054, + "manny": 20933, + "mano": 15753, + "mano": 24016, + "manoj": 41146, + "manor": 41830, + "manor": 13614, + "mans": 28422, + "mans": 7746, + "mansfield": 25543, + "manship": 15460, + "mansion": 13404, + "manslaughter": 48632, + "manson": 26715, + "mant": 25122, + "mant": 27037, + "manta": 41431, + "mantis": 39946, + "mantle": 22159, + "mantra": 25162, + "manu": 3404, + "manu": 25799, + "manual": 12268, + "manuel": 29171, + "manuel": 9567, + "manufac": 5105, + "manufacture": 27741, + "manufactured": 24010, + "manufacturer": 15668, + "manufacturers": 18763, + "manufacturing": 8386, + "manure": 47907, + "manus": 28181, + "manuscript": 24365, + "manuscripts": 40765, + "manutd": 20994, + "many": 28484, + "many": 1346, + "manziel": 40637, + "mao": 47447, + "mao": 25605, + "maori": 43400, + "map": 25180, + "map": 3923, + "maple": 21980, + "maple": 10570, + "mapleleafs": 41257, + "mapoli": 28768, + "mapp": 36894, + "mapped": 41596, + "mapping": 15231, + "maps": 8765, + "mapu": 42082, + "mar": 675, + "mar": 3091, + "mara": 15655, + "marais": 47913, + "maran": 44732, + "marath": 16274, + "marathi": 34102, + "marathon": 40764, + "marathon": 5910, + "marau": 38475, + "marbella": 36182, + "marble": 45429, + "marble": 13071, + "marbles": 42931, + "marc": 14054, + "marc": 9075, + "marca": 38242, + "marcel": 17726, + "marcel": 24652, + "marcelo": 35939, + "march": 10638, + "march": 2227, + "marche": 36173, + "marched": 37976, + "marches": 38249, + "marchfor": 31721, + "marching": 15082, + "marchmadness": 28555, + "marci": 36698, + "marcia": 41075, + "marck": 47733, + "marco": 24719, + "marco": 10924, + "marcor": 39945, + "marcorubio": 41143, + "marcos": 21696, + "marcu": 20760, + "marcus": 48955, + "marcus": 9895, + "mardi": 39728, + "mardi": 29229, + "mardigras": 43343, + "mare": 26512, + "mare": 8870, + "mares": 19724, + "marg": 44014, + "margar": 16838, + "margare": 10232, + "margaret": 12185, + "margarita": 25958, + "margaritas": 42679, + "margate": 37428, + "margin": 19464, + "margin": 21357, + "marginal": 38320, + "margins": 33763, + "margot": 37144, + "mari": 2603, + "mari": 19322, + "maria": 41109, + "maria": 6595, + "mariachi": 44299, + "mariah": 31214, + "mariah": 24789, + "mariahcarey": 36538, + "marian": 41129, + "marian": 24677, + "mariana": 44224, + "marianne": 32214, + "mariano": 43988, + "marie": 20657, + "marie": 7864, + "marietta": 46634, + "marig": 41002, + "marijuana": 9864, + "maril": 14611, + "marilyn": 38959, + "marilyn": 18489, + "marin": 8910, + "marin": 23992, + "marina": 12060, + "marinated": 33406, + "marine": 20674, + "marine": 5746, + "mariner": 39972, + "mariners": 19086, + "marines": 15018, + "marino": 30878, + "mario": 39176, + "mario": 7600, + "marion": 37765, + "marion": 18397, + "maris": 21512, + "maris": 33093, + "marisa": 42938, + "mariska": 44703, + "marissa": 31219, + "marist": 48223, + "mariti": 13124, + "maritime": 14331, + "marj": 38639, + "mark": 3805, + "mark": 2110, + "marke": 2399, + "marked": 12360, + "marker": 18170, + "markers": 23664, + "market": 11614, + "market": 2196, + "marketer": 33482, + "marketers": 23682, + "marketing": 19535, + "marketing": 2905, + "marketplace": 18241, + "markets": 7292, + "markham": 39817, + "marking": 14705, + "markings": 41046, + "markle": 32672, + "marko": 38338, + "marks": 5466, + "markus": 33725, + "marl": 24922, + "marlborough": 43515, + "marlene": 45117, + "marley": 16504, + "marlin": 34275, + "marlins": 23309, + "marlon": 32995, + "marmalade": 39068, + "marnock": 48305, + "maro": 27029, + "maroon": 20501, + "marqu": 20704, + "marque": 13012, + "marquee": 27725, + "marquette": 37624, + "marquez": 27317, + "marquis": 33530, + "marr": 32871, + "marrake": 37125, + "marrakech": 39006, + "marri": 3839, + "marriage": 38047, + "marriage": 7040, + "marriages": 38190, + "married": 6791, + "marries": 46283, + "marriott": 19211, + "marrow": 31030, + "marry": 13288, + "marrying": 40507, + "mars": 41469, + "mars": 7496, + "marsden": 43344, + "marse": 26577, + "marseille": 30365, + "marsh": 9237, + "marsh": 13505, + "marsha": 21491, + "marshal": 26608, + "marshall": 30939, + "marshall": 9811, + "marshals": 44175, + "marshes": 43450, + "marshmal": 21069, + "marshmallow": 28530, + "marshmallows": 39471, + "mart": 2348, + "mart": 7772, + "marta": 32858, + "martens": 43211, + "marth": 34493, + "martha": 16427, + "marti": 20577, + "martial": 17088, + "martialarts": 35895, + "martian": 30214, + "martin": 6929, + "martin": 3690, + "martina": 34393, + "martinez": 13913, + "marting": 47570, + "martini": 22199, + "martino": 41675, + "martins": 30569, + "marty": 9926, + "marty": 17169, + "martyn": 44075, + "martyr": 36155, + "martyr": 26067, + "martyrdom": 43110, + "martyred": 39114, + "martyrs": 24707, + "maru": 37413, + "maru": 31838, + "marvel": 13835, + "marvel": 5996, + "marvelcomics": 46897, + "marvell": 26576, + "marvellous": 28402, + "marvelous": 25487, + "marvin": 19675, + "marx": 30559, + "marx": 26001, + "marxist": 45205, + "mary": 5146, + "mary": 2676, + "maryam": 33636, + "maryam": 36393, + "maryland": 11379, + "marys": 40905, + "marys": 40228, + "mas": 5226, + "mas": 1412, + "masa": 24995, + "masa": 41868, + "masala": 31483, + "masc": 23564, + "mascar": 46984, + "mascara": 31635, + "mascot": 13983, + "mascots": 43266, + "mascul": 25589, + "masculine": 48269, + "masculinity": 40465, + "mase": 49128, + "maser": 25798, + "maserati": 30442, + "mash": 12317, + "mash": 15680, + "mashable": 41026, + "mashed": 27395, + "mashup": 27079, + "masi": 35965, + "masjid": 31420, + "mask": 19262, + "mask": 8306, + "masked": 25757, + "masking": 47046, + "masks": 19055, + "maslow": 44359, + "mason": 17424, + "mason": 9699, + "masonic": 36491, + "masonry": 30764, + "masons": 37195, + "masqu": 26593, + "masquer": 29604, + "masquerade": 36944, + "mass": 4636, + "mass": 4854, + "massach": 14484, + "massachuse": 14577, + "massachusetts": 14756, + "massacre": 14696, + "massage": 13055, + "masse": 41735, + "masses": 22978, + "massey": 29868, + "massi": 17239, + "massimo": 45821, + "massive": 4818, + "massively": 34297, + "mast": 45916, + "mast": 27920, + "master": 4534, + "master": 3498, + "mastercard": 40542, + "masterchef": 34809, + "masterclass": 17529, + "mastered": 32616, + "masterful": 46823, + "mastering": 28326, + "mastermind": 34029, + "masterpiece": 12066, + "masterpieces": 37596, + "masters": 6913, + "mastery": 34800, + "mastiff": 42311, + "maswar": 47887, + "mat": 905, + "mat": 9063, + "mata": 17270, + "match": 7733, + "match": 2439, + "matcha": 32433, + "matchday": 15947, + "matched": 17792, + "matches": 8609, + "matching": 11840, + "matchup": 19355, + "matchups": 49162, + "mate": 6137, + "mate": 2936, + "mated": 33813, + "mateo": 34991, + "mater": 23724, + "materi": 7084, + "material": 7118, + "materials": 8161, + "maternal": 26131, + "maternity": 23894, + "mates": 5817, + "math": 13277, + "math": 6025, + "mathe": 8725, + "mathemat": 11901, + "mathematical": 25609, + "mathematician": 41036, + "mathematics": 20113, + "mathew": 36333, + "mathews": 37120, + "mathi": 23014, + "mathieu": 40417, + "maths": 14763, + "mati": 12716, + "mati": 32268, + "matic": 36859, + "matic": 7900, + "matically": 38282, + "matics": 23634, + "matil": 26751, + "matilda": 36308, + "matin": 44849, + "matinee": 38525, + "mating": 34346, + "mation": 11701, + "matisse": 43446, + "mato": 13127, + "matologist": 48842, + "matology": 27940, + "matory": 25519, + "matri": 27041, + "matrix": 18078, + "mats": 22259, + "matsu": 30242, + "matt": 7972, + "matt": 3972, + "mattb": 42791, + "matte": 31237, + "matte": 19771, + "mattel": 35365, + "matteo": 33120, + "matter": 30471, + "matter": 3828, + "matters": 5708, + "matth": 41846, + "matthe": 5116, + "matthew": 17588, + "matthew": 7008, + "matthews": 16739, + "matthi": 29853, + "matthias": 45104, + "matti": 39840, + "mattress": 23438, + "matty": 31233, + "matty": 29176, + "matu": 40616, + "matur": 22897, + "mature": 14417, + "maturity": 28047, + "mau": 8134, + "mau": 23033, + "maui": 20463, + "maul": 30725, + "maur": 10574, + "maure": 25191, + "maureen": 31723, + "maurice": 20200, + "mauricio": 39066, + "mauriti": 28406, + "mauritius": 29305, + "mauro": 41691, + "mav": 25697, + "maver": 16700, + "maverick": 27425, + "mavericks": 30092, + "mavs": 30665, + "maw": 39351, + "maw": 42271, + "mawards": 37682, + "max": 4898, + "max": 3902, + "maxi": 8554, + "maxi": 23266, + "maxim": 19892, + "maxim": 38574, + "maximize": 28673, + "maximum": 13162, + "maximus": 44312, + "maxine": 38468, + "maxwell": 19611, + "maxx": 37466, + "may": 1686, + "may": 1270, + "maya": 45783, + "maya": 12987, + "mayan": 37952, + "maybe": 3746, + "mayday": 29957, + "mayer": 21196, + "mayfair": 35171, + "mayfield": 33933, + "mayhem": 21502, + "maymay": 26600, + "maymay": 33853, + "maymayentrata": 30480, + "maynard": 32487, + "mayne": 35771, + "mayo": 22449, + "mayo": 11280, + "mayor": 15429, + "mayor": 4676, + "mayoral": 28983, + "mayorof": 43533, + "mayors": 28501, + "mays": 35445, + "maythe": 42281, + "mayward": 45751, + "mayward": 23519, + "mayweather": 22774, + "maz": 9177, + "maz": 36215, + "mazda": 18506, + "maze": 21988, + "mazz": 29439, + "mañ": 37059, + "mañana": 39354, + "mb": 758, + "mb": 3996, + "mba": 8329, + "mban": 46685, + "mbar": 44452, + "mbb": 10736, + "mbc": 20137, + "mbe": 38395, + "mbe": 27004, + "mber": 5467, + "mber": 1034, + "mberg": 26372, + "mbers": 5443, + "mbi": 45347, + "mble": 20310, + "mble": 4756, + "mbles": 28693, + "mbling": 28604, + "mbo": 25733, + "mbo": 11319, + "mbps": 44896, + "mbs": 10370, + "mbta": 38979, + "mbu": 42228, + "mbuhari": 36752, + "mc": 1278, + "mc": 4126, + "mca": 40570, + "mca": 14635, + "mcal": 28663, + "mcar": 43776, + "mcbride": 35080, + "mcc": 21192, + "mccabe": 37628, + "mccaf": 47385, + "mccain": 20397, + "mccall": 34844, + "mccann": 27140, + "mccar": 9570, + "mccarthy": 16974, + "mccartney": 19958, + "mccl": 24709, + "mccla": 43672, + "mccle": 40139, + "mcclure": 44945, + "mcco": 46152, + "mccon": 32638, + "mccor": 23057, + "mccormack": 45164, + "mccormick": 39088, + "mccoy": 20218, + "mccr": 41996, + "mccre": 25393, + "mccul": 38833, + "mccull": 41782, + "mcd": 28930, + "mcder": 27355, + "mcdermott": 34504, + "mcdon": 12171, + "mcdonald": 10741, + "mcdonalds": 17674, + "mcdonnell": 34360, + "mcdowell": 34119, + "mce": 26864, + "mcel": 28752, + "mcen": 47423, + "mcfad": 36976, + "mcfadden": 42105, + "mcfar": 29020, + "mcfarlane": 47174, + "mcfc": 16416, + "mcfly": 38211, + "mcg": 42507, + "mcg": 27995, + "mcgee": 29223, + "mcgill": 46524, + "mcgill": 35511, + "mcgin": 29596, + "mcgowan": 40462, + "mcgr": 25169, + "mcgra": 29367, + "mcgrath": 28759, + "mcgraw": 40950, + "mcgregor": 19642, + "mcgu": 34294, + "mcguinness": 45299, + "mcguire": 32635, + "mci": 46212, + "mci": 45491, + "mcil": 30481, + "mcin": 18770, + "mcintosh": 45353, + "mcintyre": 33369, + "mck": 6781, + "mckay": 33611, + "mcke": 27424, + "mckee": 43529, + "mcken": 42619, + "mckenna": 24924, + "mckenzie": 25502, + "mckin": 15437, + "mckinley": 39891, + "mckinney": 33554, + "mckinnon": 48736, + "mckinsey": 48143, + "mcl": 49021, + "mcla": 12565, + "mclaren": 37381, + "mclaren": 16789, + "mclau": 32285, + "mclaughlin": 35346, + "mcle": 25299, + "mclean": 28666, + "mcleod": 40259, + "mcm": 12251, + "mcmahon": 24026, + "mcmaster": 42703, + "mcmillan": 45603, + "mcn": 42919, + "mcnam": 32682, + "mcnamara": 37506, + "mcne": 42545, + "mco": 33723, + "mcqueen": 22544, + "mcr": 29884, + "mcr": 16966, + "mcs": 27020, + "mcu": 30403, + "md": 8637, + "md": 4732, + "mdc": 38773, + "mdc": 41761, + "mds": 48746, + "mdt": 40822, + "me": 613, + "me": 614, + "mea": 46045, + "mea": 17711, + "mead": 12134, + "mead": 21567, + "meade": 37218, + "meado": 16402, + "meadow": 25213, + "meadow": 17195, + "meadows": 17178, + "meal": 29662, + "meal": 5478, + "meals": 11229, + "mean": 4189, + "mean": 3450, + "meand": 48015, + "meaning": 14586, + "meaning": 8342, + "meaningful": 17480, + "meaningless": 48932, + "meanings": 45814, + "means": 3494, + "meant": 8674, + "meantime": 27499, + "meanwhile": 9650, + "meas": 5867, + "measles": 38230, + "measurable": 48010, + "measure": 15261, + "measure": 10579, + "measured": 23154, + "measurement": 20973, + "measurements": 29894, + "measures": 11936, + "measuring": 18064, + "meat": 10805, + "meat": 6480, + "meatball": 43642, + "meatballs": 29233, + "meath": 37920, + "meatless": 48085, + "meats": 29558, + "mec": 27432, + "mecca": 36095, + "mech": 38305, + "mechan": 6715, + "mechanic": 24582, + "mechanical": 14467, + "mechanics": 20536, + "mechanism": 22576, + "mechanisms": 28610, + "meck": 41908, + "med": 1948, + "med": 2177, + "meda": 33614, + "medal": 29714, + "medal": 6974, + "medalist": 21040, + "medalists": 43397, + "medalli": 31349, + "medallion": 43469, + "medallist": 41472, + "medals": 14710, + "mede": 48225, + "meded": 27627, + "medi": 1436, + "media": 22064, + "media": 1895, + "mediac": 37490, + "median": 30491, + "mediation": 42829, + "medic": 3602, + "medic": 35441, + "medicaid": 25421, + "medical": 18432, + "medical": 4116, + "medicare": 23710, + "medication": 23771, + "medications": 37181, + "medicinal": 28772, + "medicine": 5616, + "medicines": 26541, + "medics": 46688, + "medieval": 38956, + "medieval": 10789, + "medina": 27281, + "mediocre": 41170, + "medit": 19130, + "meditate": 38039, + "meditation": 10827, + "mediter": 14194, + "mediterran": 14358, + "mediterranean": 15327, + "medium": 8675, + "medley": 24793, + "meds": 25075, + "medtech": 42044, + "medusa": 44216, + "medway": 42286, + "mee": 1725, + "mee": 14075, + "meek": 28935, + "meen": 37940, + "meen": 46515, + "meer": 26714, + "meer": 27555, + "meet": 5714, + "meet": 1633, + "meeting": 48566, + "meeting": 2071, + "meetings": 9980, + "meets": 5972, + "meetthe": 27575, + "meetup": 15430, + "meg": 11500, + "meg": 16186, + "mega": 15979, + "mega": 9068, + "megab": 38103, + "megadeth": 46741, + "megal": 37650, + "megam": 26073, + "megan": 19127, + "megan": 11503, + "megap": 33624, + "megat": 35581, + "megh": 31192, + "meghan": 39939, + "meghan": 18261, + "meh": 10512, + "meh": 22211, + "mehta": 25031, + "mei": 22564, + "mei": 25198, + "meier": 29812, + "mein": 28857, + "mein": 21466, + "meister": 28407, + "mek": 44645, + "mel": 1902, + "mel": 6834, + "mela": 35032, + "melan": 22261, + "melanch": 44818, + "melancholy": 47821, + "melani": 34031, + "melania": 32796, + "melanie": 22153, + "melanoma": 40862, + "melb": 47007, + "melb": 28980, + "melbourne": 28387, + "melbourne": 6995, + "melee": 45108, + "meli": 28885, + "melinda": 46303, + "melis": 18913, + "melissa": 41866, + "melissa": 13030, + "mell": 22531, + "mell": 41583, + "mello": 47594, + "mellon": 45162, + "mellow": 32034, + "melo": 10354, + "melo": 22374, + "melodic": 41877, + "melodies": 38412, + "melody": 19119, + "melon": 12146, + "melrose": 36296, + "melt": 22209, + "melt": 15957, + "meltdown": 30613, + "melted": 23037, + "melting": 19247, + "melton": 46062, + "melts": 31446, + "melville": 46030, + "melvin": 31544, + "mely": 6373, + "mem": 4937, + "mem": 34944, + "memb": 2114, + "member": 29566, + "member": 1640, + "members": 2567, + "membership": 11562, + "membrane": 34088, + "meme": 35157, + "meme": 9169, + "memes": 12828, + "memo": 15967, + "memo": 19334, + "memoir": 20532, + "memoirs": 45311, + "memor": 1858, + "memorab": 26271, + "memorabilia": 27488, + "memorable": 13172, + "memorial": 16285, + "memorial": 4642, + "memorialday": 21598, + "memoriam": 48191, + "memories": 4304, + "memory": 44766, + "memory": 5137, + "memph": 10285, + "memphis": 38432, + "memphis": 11298, + "men": 1552, + "men": 1656, + "mena": 23052, + "menace": 29949, + "mend": 8151, + "mend": 46927, + "mendel": 49268, + "mendes": 18060, + "mendez": 48275, + "mendo": 19327, + "mendoza": 23680, + "meng": 37102, + "meng": 37450, + "mening": 46428, + "menon": 38255, + "menopau": 34974, + "menopause": 46026, + "mens": 16924, + "mens": 10495, + "mensfashion": 27578, + "menstru": 28345, + "menstrual": 40915, + "menswear": 18803, + "ment": 1585, + "ment": 777, + "mental": 8611, + "mental": 3448, + "mentalhealth": 20593, + "mentalhealth": 13022, + "mentality": 26647, + "mentally": 14307, + "mentary": 4468, + "mentation": 9512, + "mentday": 40397, + "mente": 40302, + "mente": 36396, + "mented": 9249, + "menting": 14471, + "mention": 43881, + "mention": 6762, + "mentioned": 11948, + "mentioning": 34290, + "mentions": 12334, + "mento": 30582, + "mentor": 45342, + "mentor": 11642, + "mentoring": 19610, + "mentors": 20945, + "mentorship": 33878, + "ments": 1827, + "menu": 6225, + "menus": 33534, + "meo": 30792, + "meow": 39965, + "meow": 17246, + "mep": 27095, + "mer": 1316, + "mer": 2452, + "mera": 20028, + "merc": 34357, + "merc": 44399, + "mercado": 45479, + "merce": 8409, + "mercede": 34959, + "mercedes": 26403, + "mercedes": 10685, + "mercedesam": 40107, + "mercedesbenz": 32347, + "mercen": 40301, + "mercer": 21632, + "merch": 11504, + "merchandi": 14954, + "merchandise": 16808, + "merchandising": 49196, + "merchant": 19563, + "merchants": 34427, + "merci": 23364, + "merci": 29378, + "mercur": 11471, + "mercury": 45203, + "mercury": 12653, + "mercy": 33249, + "mercy": 10815, + "mere": 29657, + "mere": 10342, + "mered": 24657, + "mered": 32297, + "meredith": 25103, + "merely": 28718, + "merge": 30406, + "merged": 46492, + "merger": 24744, + "merging": 49256, + "meri": 17993, + "meri": 36109, + "meria": 48433, + "meric": 27097, + "merica": 30561, + "meridi": 37901, + "meridian": 31195, + "mering": 41060, + "meringue": 41661, + "merino": 42648, + "merit": 20830, + "merkel": 24715, + "merle": 48586, + "merlin": 26517, + "merlot": 40424, + "mermaid": 16064, + "mermaids": 43617, + "mero": 19097, + "merr": 48288, + "merri": 21462, + "merrill": 47713, + "merritt": 36462, + "merry": 14167, + "merry": 5779, + "merrychristmas": 19672, + "mers": 4199, + "mersal": 36711, + "mersey": 25248, + "mersey": 46239, + "merseyside": 35382, + "mert": 48496, + "merton": 35315, + "mery": 40873, + "meryl": 35787, + "mes": 28432, + "mes": 3029, + "mesa": 18956, + "mese": 42018, + "mesh": 15030, + "mesm": 18695, + "mesmer": 38435, + "mesmeri": 25985, + "mesmerizing": 35637, + "meso": 25537, + "mesqu": 46819, + "mess": 2490, + "mess": 8188, + "message": 3918, + "messages": 9390, + "messaging": 23234, + "messe": 40391, + "messed": 23580, + "messenger": 17389, + "messi": 19394, + "messi": 11252, + "messiah": 28737, + "messing": 23144, + "messy": 15987, + "mest": 23780, + "mester": 47349, + "mesut": 49177, + "met": 5249, + "met": 2340, + "meta": 14803, + "meta": 22701, + "metab": 16150, + "metabol": 48389, + "metaboli": 25573, + "metabolic": 34311, + "metabolism": 27824, + "metal": 8935, + "metal": 4044, + "metall": 19084, + "metallic": 17257, + "metallica": 24079, + "metals": 21375, + "metam": 28862, + "metamor": 39030, + "metamorpho": 47601, + "metaph": 24189, + "metaphor": 34233, + "metast": 41973, + "mete": 11226, + "meteor": 26429, + "meteor": 26823, + "meteoro": 25948, + "meteorologist": 42849, + "meter": 10104, + "meters": 13247, + "metgala": 30089, + "meth": 21867, + "meth": 26177, + "methane": 37565, + "metho": 5770, + "method": 10284, + "methodist": 25165, + "methodo": 28488, + "methodology": 37316, + "methods": 12200, + "methyl": 48999, + "metmuseum": 28207, + "meto": 25679, + "metoo": 24722, + "metr": 15086, + "metre": 27889, + "metres": 19798, + "metric": 19950, + "metrical": 40704, + "metrics": 24396, + "metro": 7257, + "metro": 6784, + "metroid": 39957, + "metropolis": 40476, + "metropolitan": 19013, + "metry": 20039, + "mets": 9633, + "mett": 28081, + "metz": 40506, + "meu": 34520, + "mew": 40368, + "mex": 3213, + "mex": 18387, + "mexic": 31728, + "mexican": 37442, + "mexican": 8186, + "mexicans": 47729, + "mexico": 31834, + "mexico": 4604, + "mey": 28584, + "mey": 27777, + "meyer": 13963, + "meyers": 32326, + "mez": 30615, + "mez": 46833, + "mezz": 38771, + "mf": 18199, + "mf": 11067, + "mfa": 24107, + "mfc": 39474, + "mfg": 21912, + "mfw": 27309, + "mg": 10003, + "mg": 8014, + "mga": 23954, + "mgm": 27572, + "mgmt": 22288, + "mgr": 31500, + "mgs": 48073, + "mgt": 48663, + "mh": 9962, + "mh": 10834, + "mha": 41944, + "mhealth": 41225, + "mhs": 28815, + "mhz": 31550, + "mi": 714, + "mi": 2251, + "mia": 5852, + "miam": 31053, + "miami": 15106, + "miami": 4891, + "mian": 24792, + "miaw": 36046, + "mib": 48178, + "mic": 1213, + "mic": 3816, + "mica": 41551, + "micah": 33870, + "mice": 19030, + "mich": 25628, + "mich": 23029, + "micha": 2083, + "michael": 6051, + "michael": 2511, + "michaela": 41897, + "michaeljackson": 33532, + "michaels": 23868, + "michal": 47144, + "miche": 37966, + "micheal": 43709, + "michel": 5158, + "michel": 17153, + "michelangelo": 41245, + "michele": 20642, + "michelin": 26330, + "michelle": 19028, + "michelle": 8625, + "michi": 5658, + "michigan": 32344, + "michigan": 6296, + "mick": 15171, + "mick": 12592, + "mickey": 41813, + "mickey": 13053, + "micky": 43011, + "micro": 3160, + "micro": 11374, + "microbes": 44671, + "microbi": 19496, + "microbial": 30335, + "microbiology": 35348, + "microbiome": 35148, + "micron": 48742, + "microphone": 24643, + "micropoetry": 35997, + "microscope": 29114, + "microscopy": 38431, + "microsof": 42424, + "microsoft": 38650, + "microsoft": 7254, + "microwave": 24240, + "mics": 16554, + "mid": 2192, + "mid": 4734, + "midcentury": 48988, + "midd": 2983, + "midday": 23390, + "middle": 9849, + "middle": 3694, + "middleeast": 32783, + "middles": 29769, + "middlesbrough": 32436, + "middlesex": 39154, + "middleton": 23627, + "middleweight": 35829, + "midfield": 28116, + "midfielder": 13423, + "midget": 30734, + "midi": 39496, + "midi": 27326, + "midland": 24822, + "midlands": 18062, + "midnight": 35746, + "midnight": 6302, + "mids": 40821, + "midst": 24752, + "midsummer": 35234, + "midterm": 34365, + "midterms": 32015, + "midtown": 26069, + "midway": 26536, + "midweek": 29120, + "midwest": 16627, + "midwi": 44802, + "midwife": 37681, + "midwives": 42355, + "mie": 20865, + "mie": 10555, + "miento": 46482, + "mier": 36490, + "mies": 8840, + "miff": 49398, + "mig": 28743, + "might": 2727, + "mighty": 26632, + "mighty": 7815, + "mign": 41678, + "migos": 44640, + "migr": 3736, + "migra": 28186, + "migraine": 35360, + "migrant": 18902, + "migrants": 15814, + "migrate": 41804, + "migrating": 43604, + "migration": 11891, + "migu": 12279, + "miguel": 33672, + "miguel": 14436, + "miho": 46870, + "mii": 39896, + "mik": 15096, + "mik": 46203, + "mika": 28609, + "mika": 25185, + "mike": 5884, + "mike": 3178, + "mikel": 48865, + "mikequind": 33508, + "mikequindazzi": 33551, + "mikey": 34934, + "mikey": 23368, + "mikha": 30999, + "mikhail": 38327, + "miki": 48863, + "miko": 35413, + "miku": 37703, + "mil": 1469, + "mil": 12826, + "mila": 26183, + "milan": 30380, + "milan": 8552, + "milano": 18585, + "milb": 42248, + "mild": 16085, + "mildly": 49059, + "mile": 7833, + "mile": 6243, + "mileage": 30579, + "miler": 44680, + "miles": 3446, + "milestone": 13485, + "milestones": 34025, + "miley": 25336, + "miley": 14321, + "mileycyrus": 28528, + "milf": 45386, + "milford": 35840, + "mili": 16698, + "miliband": 41440, + "milit": 3715, + "militant": 33629, + "militants": 23974, + "military": 24498, + "military": 4323, + "militi": 46625, + "militia": 32114, + "milk": 13409, + "milk": 5205, + "milkshake": 29066, + "milky": 37320, + "milky": 21120, + "milkyway": 43246, + "mill": 4221, + "mill": 6637, + "milla": 49381, + "millan": 34930, + "millan": 22188, + "millar": 41851, + "mille": 34066, + "millen": 48501, + "millenni": 10406, + "millennial": 28357, + "millennials": 18804, + "millennium": 21116, + "miller": 21699, + "miller": 5733, + "milli": 5340, + "millie": 29283, + "milling": 39133, + "million": 13154, + "million": 2506, + "millionaire": 25179, + "millionaires": 47159, + "millions": 8492, + "mills": 10331, + "millwall": 35902, + "milly": 45794, + "milne": 44590, + "milner": 45230, + "milo": 24548, + "milton": 39004, + "milton": 17360, + "milwau": 13452, + "milwaukee": 14259, + "mim": 39379, + "mimi": 27086, + "mimic": 47116, + "mimic": 46519, + "mimo": 45551, + "min": 771, + "min": 3331, + "mina": 15281, + "minaj": 25136, + "minal": 40222, + "minat": 33275, + "mince": 32396, + "mind": 5890, + "mind": 2575, + "mindanao": 44228, + "minded": 21330, + "mindful": 28457, + "mindfulness": 15707, + "minding": 45337, + "minds": 9244, + "mindset": 14217, + "mindy": 46875, + "mindy": 38551, + "mine": 20149, + "mine": 3347, + "minecraft": 15678, + "mined": 48034, + "minent": 12533, + "miner": 14109, + "miner": 26572, + "mineral": 17692, + "minerals": 21169, + "miners": 22119, + "mines": 16211, + "ming": 10868, + "ming": 2107, + "mingham": 7590, + "mingle": 38437, + "mingly": 36909, + "mington": 49283, + "mington": 23119, + "minh": 48734, + "minho": 21318, + "mini": 1810, + "mini": 3954, + "miniature": 44298, + "miniature": 16377, + "miniatures": 38816, + "minic": 31522, + "minim": 10005, + "minimal": 18458, + "minimalism": 42594, + "minimalist": 26641, + "minimize": 38697, + "minimum": 12244, + "minindia": 28458, + "mining": 8473, + "minion": 28622, + "minions": 27035, + "minis": 33409, + "minis": 35976, + "minister": 25688, + "minister": 3569, + "ministerial": 33008, + "ministers": 16406, + "ministries": 27895, + "ministry": 8742, + "mink": 42017, + "minn": 45991, + "minn": 47318, + "minne": 7083, + "minneapolis": 16977, + "minneso": 9380, + "minnesota": 9968, + "minnie": 24493, + "mino": 22791, + "minogue": 44202, + "minor": 8522, + "minorities": 28119, + "minority": 16210, + "minors": 36789, + "mins": 6196, + "minsk": 46151, + "minster": 11189, + "mint": 48084, + "mint": 7506, + "minted": 49377, + "minton": 20050, + "minu": 29064, + "minus": 15358, + "minute": 28931, + "minute": 4497, + "minutes": 3056, + "mio": 26366, + "mir": 2750, + "mir": 6585, + "mira": 21665, + "mira": 22762, + "mirac": 13685, + "miracle": 49208, + "miracle": 11543, + "miracles": 23478, + "miraculous": 38671, + "mirage": 28679, + "mirai": 49060, + "mirand": 32367, + "miranda": 17590, + "mire": 38140, + "mire": 30140, + "miri": 22273, + "miriam": 30950, + "miro": 34851, + "miro": 48317, + "mirren": 47600, + "mirro": 48500, + "mirror": 29823, + "mirror": 7220, + "mirrors": 21823, + "mirza": 36440, + "mis": 866, + "mis": 11239, + "mischief": 33896, + "misconceptions": 48681, + "misconduct": 30601, + "mise": 46567, + "mise": 17267, + "miser": 33394, + "miserable": 26196, + "misery": 28360, + "mises": 24390, + "misfits": 42708, + "mish": 15494, + "mish": 20981, + "misha": 35434, + "mishra": 33042, + "misleading": 30862, + "mism": 15948, + "miso": 27657, + "miso": 33441, + "misogy": 31315, + "misogyny": 48415, + "miss": 6984, + "miss": 1526, + "missal": 38337, + "missed": 3955, + "misses": 15844, + "missi": 3008, + "missile": 14411, + "missiles": 27868, + "missin": 36209, + "missing": 23509, + "missing": 3423, + "mission": 12738, + "mission": 2406, + "missionaries": 40580, + "missionary": 27915, + "missions": 6990, + "mississ": 26483, + "mississauga": 28393, + "mississi": 11687, + "mississippi": 12232, + "missou": 30710, + "missoula": 48549, + "missouri": 11835, + "missuni": 26347, + "missuniverse": 28766, + "missy": 48105, + "missy": 31515, + "missyou": 45799, + "mist": 12610, + "mist": 11946, + "mistak": 20478, + "mistake": 11303, + "mistaken": 29182, + "mistakenly": 48494, + "mistakes": 12824, + "mister": 26949, + "mister": 18895, + "mistle": 46800, + "mistletoe": 48569, + "mistre": 42039, + "mistress": 24349, + "mists": 28636, + "misty": 18799, + "misunderstood": 41574, + "misuse": 40970, + "mit": 3303, + "mit": 4551, + "mita": 47514, + "mitage": 27964, + "mitch": 6969, + "mitch": 14150, + "mitchell": 39339, + "mitchell": 9007, + "mite": 26929, + "mith": 21752, + "mith": 17948, + "miti": 17857, + "mitigate": 42273, + "mitigation": 35514, + "mito": 38254, + "mitochondri": 42132, + "mitra": 47703, + "mits": 24086, + "mitsu": 17905, + "mitsubi": 21604, + "mitsubishi": 23030, + "mitt": 17321, + "mitt": 21341, + "mitted": 10307, + "mitting": 27938, + "mitz": 41827, + "mium": 35891, + "miwx": 43941, + "mix": 3210, + "mix": 3285, + "mixed": 29376, + "mixed": 6780, + "mixer": 17200, + "mixers": 39175, + "mixes": 19061, + "mixing": 15588, + "mixtape": 11044, + "mixture": 28286, + "miy": 25695, + "miya": 36257, + "miz": 20881, + "miz": 30795, + "mize": 19076, + "mized": 43418, + "mizing": 38715, + "mizz": 19985, + "mizzou": 26165, + "mj": 13117, + "mj": 14733, + "mk": 11581, + "mk": 8937, + "mke": 36642, + "mkt": 24814, + "ml": 3627, + "ml": 5780, + "mla": 16723, + "mlas": 48464, + "mlb": 21039, + "mlb": 7482, + "mley": 40329, + "mlg": 45801, + "mlin": 24556, + "mlk": 17941, + "mlkday": 39905, + "mlm": 37611, + "mln": 18971, + "mlp": 23620, + "mlpfi": 45475, + "mlpfim": 45640, + "mls": 13077, + "mm": 1028, + "mm": 2848, + "mma": 34140, + "mma": 6096, + "mmc": 44253, + "mme": 13105, + "mmed": 19570, + "mmer": 35717, + "mmer": 7508, + "mmers": 28128, + "mmes": 42862, + "mmi": 34147, + "mming": 21038, + "mming": 16507, + "mmings": 31357, + "mmit": 41050, + "mmj": 43015, + "mmm": 37908, + "mmm": 7641, + "mmmm": 36312, + "mmmm": 13180, + "mmmmm": 21808, + "mmmmmm": 43740, + "mmo": 30418, + "mmon": 41131, + "mmor": 36657, + "mmorpg": 39476, + "mms": 37803, + "mmva": 42666, + "mmy": 28837, + "mmy": 8722, + "mn": 5086, + "mn": 4057, + "mna": 34877, + "mnd": 44776, + "mnet": 34129, + "mnf": 41105, + "mnl": 32980, + "mnleg": 42653, + "mns": 39040, + "mnt": 21477, + "mntwins": 45448, + "mnwild": 39044, + "mnwx": 39592, + "mo": 617, + "mo": 2080, + "moa": 33174, + "moana": 43241, + "mob": 2818, + "mob": 12754, + "mobi": 9451, + "mobil": 26343, + "mobil": 29815, + "mobile": 12935, + "mobile": 3451, + "mobiles": 44302, + "mobili": 20770, + "mobility": 12546, + "mobilization": 48916, + "moby": 47219, + "moc": 41439, + "moc": 36992, + "mocha": 28425, + "mochi": 47973, + "mock": 15641, + "mock": 12759, + "mocked": 47400, + "mocking": 28692, + "mocking": 37870, + "mocks": 35142, + "mod": 6362, + "mod": 10893, + "moda": 25814, + "modal": 33157, + "mode": 20402, + "mode": 6493, + "model": 4591, + "model": 2863, + "modeled": 39527, + "modeling": 13706, + "modelling": 19946, + "models": 6176, + "moder": 2894, + "moderate": 16435, + "moderated": 27928, + "moderating": 34242, + "moderator": 32659, + "modern": 11706, + "modern": 4077, + "modernart": 34417, + "moderni": 24328, + "modernism": 39601, + "modernist": 36773, + "modernization": 47294, + "modes": 30454, + "modest": 25436, + "modi": 9047, + "modi": 7774, + "modification": 37630, + "modified": 17964, + "modo": 36820, + "mods": 23843, + "modu": 9036, + "modular": 22437, + "module": 16757, + "modules": 30575, + "moe": 38655, + "moe": 17938, + "mof": 30798, + "moff": 27160, + "mog": 42362, + "moga": 41732, + "mogadishu": 45133, + "mogul": 41320, + "moh": 18979, + "moh": 35388, + "moha": 46892, + "moham": 7923, + "mohamed": 18472, + "mohammad": 19926, + "mohammed": 16168, + "mohan": 26521, + "mohan": 23586, + "mohawk": 34942, + "mohd": 49094, + "mohsin": 48861, + "moi": 20691, + "moi": 21825, + "moil": 30349, + "moines": 32091, + "moist": 19831, + "moist": 33263, + "moisture": 20412, + "moisturi": 25942, + "moj": 34505, + "moja": 49055, + "mojito": 46830, + "mojo": 25204, + "mok": 49146, + "mol": 4246, + "mol": 31582, + "mold": 21846, + "molding": 46274, + "moldova": 47317, + "mole": 9927, + "mole": 23529, + "molecular": 19370, + "molecule": 39233, + "molecules": 35643, + "molina": 34201, + "mollie": 48203, + "molly": 24368, + "molly": 12573, + "molo": 41510, + "mology": 32255, + "molten": 46071, + "moly": 47083, + "mom": 1614, + "mom": 2543, + "moma": 33605, + "mombasa": 40340, + "moment": 12197, + "moment": 2495, + "momento": 30078, + "moments": 5251, + "momentum": 15722, + "momlife": 43825, + "momma": 14508, + "mommy": 12456, + "momo": 48490, + "momo": 25980, + "moms": 28446, + "moms": 10042, + "momsdemand": 33744, + "mon": 749, + "mon": 2173, + "mona": 19143, + "monaco": 14938, + "monaghan": 39797, + "monarch": 27235, + "monarch": 22619, + "monarchs": 36750, + "monarchy": 47503, + "monaster": 19422, + "monastery": 21850, + "monc": 34847, + "moncton": 44962, + "mond": 14522, + "mond": 4475, + "monday": 6205, + "monday": 2098, + "mondaymorning": 40089, + "mondaymotiv": 45488, + "mondaymotivation": 8198, + "mondaymotivaton": 47034, + "mondays": 13815, + "monde": 29339, + "mondo": 36207, + "monds": 20317, + "mone": 25990, + "monet": 24499, + "monetary": 26394, + "moneti": 38056, + "money": 12743, + "money": 2327, + "mong": 43566, + "monger": 38928, + "mongers": 27670, + "mongo": 20680, + "mongolia": 27144, + "mongolian": 46335, + "moni": 46851, + "monia": 31161, + "monic": 30893, + "monica": 13540, + "monit": 9014, + "monitor": 10198, + "monitored": 45828, + "monitoring": 11030, + "monitors": 30478, + "monk": 30557, + "monk": 16424, + "monkey": 29597, + "monkey": 9465, + "monkeys": 15781, + "monks": 29090, + "monmouth": 36929, + "mono": 8220, + "mono": 22537, + "monochrome": 25576, + "monogram": 39665, + "monologue": 47776, + "monopoly": 25241, + "monoxide": 49314, + "monro": 45750, + "monroe": 13625, + "mons": 19885, + "monsanto": 37592, + "monsi": 46677, + "monsieur": 48879, + "monsoon": 18872, + "monsta": 30718, + "monstax": 45631, + "monste": 47045, + "monster": 14454, + "monster": 6060, + "monsters": 11546, + "mont": 5186, + "mont": 5382, + "montag": 37202, + "montage": 32325, + "montal": 42126, + "montan": 28405, + "montana": 11436, + "monte": 8711, + "monte": 14667, + "montene": 28538, + "montenegro": 30378, + "monter": 36673, + "monterey": 23388, + "monterrey": 45254, + "montess": 43205, + "montessori": 45443, + "montgom": 13852, + "montgomery": 14951, + "month": 7680, + "month": 1924, + "monthly": 8764, + "months": 3109, + "monthsary": 42420, + "monton": 41961, + "montp": 39523, + "montre": 8434, + "montreal": 9262, + "montrose": 42347, + "monty": 43997, + "monty": 24038, + "monu": 9748, + "monument": 12019, + "monumental": 31297, + "monuments": 26916, + "mony": 4117, + "monza": 40380, + "moo": 4953, + "moo": 24626, + "mood": 42358, + "mood": 5394, + "moods": 43727, + "moody": 17170, + "moom": 36887, + "moon": 6334, + "moon": 3293, + "mooney": 37942, + "moonlight": 20001, + "moons": 29887, + "moonshine": 46706, + "moor": 14817, + "moor": 11877, + "moore": 28613, + "moore": 6708, + "moors": 32577, + "moose": 37562, + "moose": 17338, + "moot": 46895, + "mop": 33900, + "mopar": 41166, + "mor": 657, + "mor": 18614, + "mora": 29262, + "moral": 11246, + "morale": 39404, + "morales": 27117, + "morality": 34133, + "morally": 42519, + "morals": 46223, + "moran": 21557, + "moray": 44569, + "more": 5434, + "more": 750, + "morecam": 37305, + "morecambe": 43414, + "mored": 20195, + "moreland": 44135, + "moreno": 24826, + "morethan": 30889, + "morg": 34284, + "morgan": 15432, + "morgan": 6075, + "morgen": 35106, + "mori": 25710, + "mori": 29514, + "moris": 43131, + "moritz": 45594, + "morley": 40439, + "mormon": 27715, + "morn": 22393, + "mornin": 28327, + "morning": 10769, + "morning": 1119, + "mornings": 12106, + "moro": 31613, + "moroc": 11996, + "moroccan": 27546, + "morocco": 15228, + "moron": 31875, + "morons": 46477, + "morow": 40779, + "morph": 23915, + "morph": 41700, + "morphe": 38978, + "morpho": 38622, + "morrha": 43044, + "morri": 9876, + "morris": 22560, + "morris": 9090, + "morrison": 40961, + "morrison": 14094, + "morrisons": 40965, + "morrissey": 30040, + "morro": 48363, + "morrow": 21611, + "mors": 13064, + "morse": 25282, + "mort": 24257, + "mort": 30583, + "mortal": 31883, + "mortal": 14680, + "mortality": 20347, + "mortar": 27258, + "mortg": 12069, + "mortgage": 13988, + "mortgages": 45391, + "mortimer": 47836, + "morton": 20698, + "morty": 37391, + "mory": 22633, + "mos": 28658, + "mos": 9593, + "mosa": 14164, + "mosa": 23809, + "mosaic": 17506, + "mosch": 47003, + "mosco": 9840, + "moscow": 10371, + "moseley": 47080, + "moses": 18451, + "mosley": 46228, + "mosqu": 15215, + "mosque": 12694, + "mosques": 41214, + "mosquit": 39699, + "mosquito": 25083, + "mosquitoes": 41870, + "moss": 25107, + "moss": 12815, + "most": 7034, + "most": 1096, + "mostly": 8829, + "mosul": 29165, + "mot": 16352, + "mot": 15452, + "mota": 42499, + "motd": 46232, + "motel": 26191, + "moth": 33208, + "moth": 11736, + "mother": 7455, + "mother": 3050, + "motherhood": 32274, + "motherland": 46774, + "mothers": 10546, + "mothersday": 15583, + "motherwell": 48104, + "moths": 29086, + "moti": 38210, + "motif": 35373, + "motion": 32139, + "motion": 7860, + "motiv": 3183, + "motivate": 26771, + "motivated": 16521, + "motivates": 44684, + "motivating": 37720, + "motivation": 26117, + "motivation": 4193, + "motivational": 32832, + "motivational": 20472, + "motivationmonday": 28703, + "motive": 36669, + "motley": 42553, + "motm": 41192, + "moto": 10646, + "moto": 11431, + "motocross": 34562, + "motogp": 16615, + "motor": 3975, + "motor": 7659, + "motorbike": 33341, + "motorcycle": 10297, + "motorcycles": 24869, + "motoring": 44491, + "motorists": 32766, + "motorola": 33738, + "motors": 14989, + "motorsport": 18371, + "motorsports": 24264, + "motorway": 31808, + "motown": 32685, + "mott": 44570, + "mott": 21708, + "motto": 23338, + "mou": 2809, + "mou": 25289, + "moud": 37698, + "moul": 25725, + "mould": 36743, + "moulin": 47656, + "moun": 2023, + "mound": 21414, + "mount": 20553, + "mount": 5532, + "mountain": 14547, + "mountain": 3965, + "mountaine": 24841, + "mountaineer": 49255, + "mountains": 5873, + "mounted": 17897, + "mounting": 29910, + "mounts": 36767, + "mour": 9053, + "mour": 42446, + "moured": 29555, + "mourinho": 18536, + "mourn": 33592, + "mourning": 24169, + "mourns": 42811, + "mous": 24837, + "mous": 17425, + "mouse": 33032, + "mouse": 9301, + "mousse": 31869, + "moustache": 32795, + "mouth": 15152, + "mouth": 4932, + "mouths": 38518, + "mov": 23950, + "move": 16624, + "move": 2783, + "moved": 6997, + "movember": 23474, + "movement": 5208, + "movements": 19665, + "mover": 37673, + "movers": 33957, + "moves": 6880, + "movi": 1707, + "movic": 43838, + "movie": 11247, + "movie": 2016, + "movies": 4772, + "moving": 32160, + "moving": 3584, + "mow": 31006, + "mow": 36329, + "mower": 30895, + "mowing": 46424, + "mowx": 44263, + "moy": 27276, + "moy": 34205, + "moyes": 37119, + "moz": 14761, + "moz": 43738, + "mozam": 26648, + "mozambique": 28831, + "mozart": 22132, + "mozz": 26317, + "mozzarella": 27845, + "mp": 1037, + "mp": 1246, + "mpa": 30749, + "mpc": 38560, + "mpd": 33814, + "mped": 28134, + "mper": 22803, + "mpg": 39830, + "mpg": 37454, + "mpgvip": 42149, + "mph": 5306, + "mpi": 43263, + "mping": 27999, + "mple": 21139, + "mplo": 47071, + "mpls": 34298, + "mpo": 33674, + "mpp": 39570, + "mps": 5504, + "mption": 9717, + "mpton": 27448, + "mpu": 47156, + "mpus": 25864, + "mpy": 17192, + "mq": 19103, + "mqm": 24687, + "mr": 3139, + "mr": 1982, + "mra": 44568, + "mrc": 25897, + "mri": 24773, + "mrs": 25003, + "mrs": 4255, + "mrt": 30256, + "mru": 22370, + "mrw": 15303, + "ms": 3525, + "ms": 988, + "msa": 36306, + "msc": 31826, + "msc": 20529, + "msd": 25804, + "msd": 36407, + "msdhoni": 32850, + "msf": 36239, + "msg": 44430, + "msg": 10928, + "msh": 41751, + "msi": 43597, + "msi": 45278, + "msk": 38501, + "msl": 42736, + "msm": 22210, + "msn": 18824, + "msn": 41042, + "msnbc": 20245, + "mson": 27773, + "mson": 12298, + "msp": 41445, + "msp": 22318, + "mss": 42136, + "mss": 48610, + "mst": 26335, + "msu": 26763, + "msu": 17298, + "mswx": 42957, + "msy": 43919, + "mt": 4252, + "mt": 3284, + "mta": 28691, + "mtb": 48306, + "mtb": 18747, + "mtc": 42482, + "mtg": 49142, + "mtg": 13648, + "mth": 48151, + "mtl": 22135, + "mtn": 26041, + "mtn": 18953, + "mtr": 46650, + "mts": 38751, + "mtv": 8099, + "mtv": 12555, + "mtvbr": 47258, + "mtvhottest": 16751, + "mtvstars": 19948, + "mu": 670, + "mu": 6411, + "mua": 21395, + "muay": 44910, + "muaythai": 47763, + "mubarak": 17957, + "muc": 49115, + "much": 14300, + "much": 1238, + "mucha": 42191, + "muchas": 26278, + "mucho": 19864, + "muck": 44731, + "muck": 45330, + "mud": 17491, + "mud": 11673, + "mudder": 49104, + "muddy": 21524, + "mue": 44383, + "mue": 40717, + "mueller": 46863, + "mueller": 14719, + "muen": 48646, + "muer": 33840, + "muf": 33852, + "mufc": 9013, + "muffin": 22696, + "muffins": 25922, + "mufti": 44930, + "mug": 16339, + "mug": 9722, + "mugabe": 36441, + "mughal": 37508, + "mugs": 22852, + "mugshot": 40028, + "muh": 36335, + "muh": 46475, + "muham": 10043, + "muhammad": 12259, + "muir": 44650, + "muir": 24745, + "muj": 44635, + "muk": 17327, + "muk": 32600, + "mukher": 34575, + "mukherjee": 37862, + "mul": 1899, + "mul": 43193, + "mula": 40937, + "mulator": 17463, + "mulberry": 39221, + "mule": 28695, + "mull": 17313, + "mull": 35310, + "mulled": 44641, + "mullen": 30797, + "muller": 33956, + "mullet": 35010, + "mulligan": 44336, + "mullins": 41265, + "mult": 34219, + "multi": 3947, + "multi": 6400, + "multic": 21683, + "multicul": 28004, + "multicultural": 34667, + "multil": 27975, + "multimedia": 27977, + "multin": 38996, + "multinational": 46540, + "multip": 40314, + "multiplayer": 27460, + "multiple": 6470, + "multipurpose": 47665, + "multit": 27814, + "multitasking": 48684, + "mulus": 26180, + "mum": 15565, + "mum": 4030, + "mumb": 5850, + "mumbai": 24279, + "mumbai": 6971, + "mumford": 46184, + "mummy": 16301, + "mums": 17868, + "mun": 2617, + "mun": 21059, + "muna": 48424, + "munch": 23587, + "munch": 33299, + "munchies": 44324, + "munchkin": 41305, + "mund": 14244, + "mundo": 20990, + "muni": 27327, + "muni": 39795, + "munich": 13526, + "munici": 12159, + "municipal": 43667, + "municipal": 16600, + "municipality": 29987, + "munition": 32668, + "munro": 36501, + "munster": 27201, + "mup": 21966, + "muppet": 40598, + "muppets": 40187, + "mups": 42195, + "mur": 2144, + "mur": 18293, + "mura": 45176, + "mural": 12315, + "murals": 31499, + "murder": 28136, + "murder": 5787, + "murdered": 13158, + "murderer": 26956, + "murderers": 48472, + "murdering": 36055, + "murders": 22409, + "murdoch": 29037, + "murphy": 48976, + "murphy": 8914, + "murray": 31978, + "murray": 7513, + "murs": 38783, + "mus": 2198, + "mus": 8103, + "musa": 30540, + "musc": 5696, + "muscat": 33322, + "muscle": 27323, + "muscle": 9269, + "muscles": 16786, + "muscular": 30606, + "muse": 2369, + "muse": 15686, + "museo": 36457, + "muses": 48243, + "museu": 27087, + "museum": 15602, + "museum": 2786, + "museums": 15542, + "museumweek": 37996, + "mush": 7635, + "mushroom": 13011, + "mushrooms": 14730, + "musi": 15628, + "music": 4110, + "music": 1179, + "musica": 26668, + "musical": 36002, + "musical": 5173, + "musically": 48893, + "musicals": 36974, + "musichistory": 37890, + "musician": 11179, + "musicians": 12498, + "musicislife": 43311, + "musicmonday": 35887, + "musicvideo": 26764, + "musik": 32986, + "musings": 44961, + "musique": 42250, + "musk": 32143, + "musk": 19063, + "muskete": 32775, + "musketeers": 37993, + "musko": 34987, + "muskoka": 40832, + "musli": 4958, + "muslim": 43795, + "muslim": 7060, + "muslims": 10513, + "muss": 41493, + "mussels": 33393, + "must": 6783, + "must": 2048, + "mustache": 23451, + "mustaf": 23596, + "mustafa": 29000, + "mustang": 42361, + "mustang": 13309, + "mustangs": 22500, + "mustard": 15794, + "muster": 47361, + "mustread": 28978, + "mut": 12598, + "mut": 22839, + "mutant": 28384, + "mutation": 38626, + "mutations": 39651, + "mute": 31252, + "muted": 48028, + "muth": 34280, + "mutil": 39950, + "mutt": 45924, + "mutu": 17574, + "mutual": 15055, + "mutuals": 31158, + "muy": 44625, + "mv": 10580, + "mv": 8269, + "mvc": 40549, + "mvp": 8905, + "mw": 16725, + "mw": 11206, + "mwc": 24289, + "mwf": 48565, + "mx": 21947, + "mx": 9575, + "my": 1152, + "my": 607, + "mya": 31401, + "myal": 42735, + "myan": 13761, + "myanmar": 14764, + "myart": 38826, + "myco": 48362, + "mydayin": 41896, + "mydayinla": 42801, + "mydubai": 43475, + "mye": 27551, + "myel": 40084, + "myers": 15993, + "myjaps": 47939, + "myle": 43700, + "myles": 25511, + "mylife": 30537, + "mylittle": 37757, + "mylittlepony": 45107, + "myo": 16206, + "myr": 20272, + "myra": 35694, + "myri": 34972, + "myrt": 47785, + "myrtle": 27768, + "mys": 11724, + "myself": 3245, + "mysore": 44924, + "myspace": 41382, + "myster": 4669, + "mysteries": 20605, + "mysterious": 12650, + "mystery": 39828, + "mystery": 6711, + "mysti": 28711, + "mystic": 36264, + "mystic": 23722, + "mystical": 34122, + "myth": 20322, + "myth": 13878, + "mythical": 34377, + "mytho": 43857, + "mythology": 22496, + "myths": 18675, + "mz": 29509, + "mz": 33400, + "mzan": 36322, + "mzansi": 43301, + "má": 36842, + "mé": 21890, + "méxico": 46159, + "mü": 28142, + "mün": 41235, + "n": 77, + "n": 333, + "na": 1097, + "na": 1272, + "naa": 37738, + "naacp": 32176, + "nab": 6951, + "nab": 19440, + "nabe": 35111, + "naby": 24800, + "nac": 14557, + "nac": 18950, + "nach": 12168, + "nach": 43622, + "nacho": 35647, + "nachos": 32847, + "nacht": 37261, + "nacional": 38782, + "nad": 6204, + "nad": 43928, + "nada": 31683, + "nadal": 20814, + "nade": 24908, + "nadi": 30512, + "nadia": 27487, + "nadine": 23356, + "nadu": 20936, + "nae": 19374, + "naf": 16161, + "naf": 45956, + "nafta": 43123, + "nag": 6694, + "nag": 23902, + "naga": 45953, + "naga": 38997, + "nagar": 17490, + "nage": 41219, + "nago": 38349, + "nagoya": 43303, + "nagpur": 43328, + "nah": 26421, + "nah": 11129, + "nahi": 35244, + "nai": 6230, + "nai": 10692, + "naia": 31340, + "naidu": 42429, + "naija": 16326, + "naik": 34424, + "nail": 19459, + "nail": 9059, + "nailart": 43532, + "nailed": 19035, + "nails": 8469, + "nair": 27107, + "naira": 39450, + "naire": 48892, + "nairobi": 17756, + "nais": 46396, + "naissance": 44761, + "naive": 43362, + "naj": 30985, + "naji": 32589, + "nak": 9248, + "nak": 25550, + "naked": 46371, + "naked": 11478, + "naku": 39864, + "nal": 14132, + "nal": 3119, + "nale": 27198, + "nall": 32869, + "nally": 26158, + "nam": 1410, + "nam": 12344, + "nama": 39586, + "naman": 27635, + "namaste": 35549, + "name": 18160, + "name": 1981, + "named": 3194, + "nameis": 40831, + "nament": 3916, + "naments": 16540, + "names": 6130, + "namesake": 41298, + "nami": 20393, + "namibia": 23731, + "naming": 19367, + "namjoon": 31986, + "namm": 35524, + "namo": 46013, + "namo": 24854, + "nan": 4375, + "nan": 7750, + "nana": 18761, + "nanaimo": 40518, + "nancy": 21511, + "nancy": 11425, + "nand": 20435, + "nandez": 12764, + "nando": 46044, + "nang": 48148, + "nani": 27980, + "nanny": 31104, + "nano": 15835, + "nano": 22006, + "nanop": 34177, + "nanotechnology": 42235, + "nanow": 46734, + "nant": 22526, + "nantes": 47533, + "nantucket": 41573, + "nao": 39319, + "naom": 34955, + "naomi": 20173, + "nap": 6568, + "nap": 11012, + "napa": 20545, + "napier": 40875, + "napkin": 38930, + "naples": 23560, + "napo": 18715, + "napol": 20122, + "napoleon": 24969, + "napoli": 22445, + "napp": 11359, + "napping": 37657, + "naps": 31317, + "naq": 46453, + "nar": 2977, + "nar": 20145, + "nara": 33823, + "narcis": 25229, + "narcissi": 35442, + "narco": 38461, + "nard": 18216, + "nare": 34853, + "naren": 8468, + "narendr": 9807, + "narendra": 25848, + "narendramodi": 9853, + "narnia": 48693, + "narr": 11845, + "narrated": 43609, + "narrative": 15933, + "narratives": 35117, + "narrator": 46529, + "narrow": 24006, + "narrow": 16652, + "narrowly": 29747, + "naruto": 22732, + "nas": 3090, + "nas": 15250, + "nasa": 6841, + "nasal": 42853, + "nascar": 25723, + "nascar": 7868, + "nasdaq": 26629, + "nash": 6771, + "nash": 13620, + "nasheed": 49176, + "nashgrier": 33372, + "nashville": 45356, + "nashville": 8585, + "nasi": 47987, + "nasir": 47509, + "nassau": 34048, + "nasser": 43559, + "nasty": 32930, + "nasty": 8709, + "nat": 1276, + "nat": 11310, + "nata": 39392, + "natal": 28516, + "natali": 20296, + "natalia": 32978, + "natalie": 36634, + "natalie": 13595, + "natash": 48701, + "natasha": 23093, + "nate": 26643, + "nate": 7587, + "natgeo": 33009, + "natgeo": 25046, + "nath": 22203, + "nath": 19843, + "nathan": 13028, + "nathan": 9711, + "nathanfillion": 47422, + "nathaniel": 32667, + "nati": 1060, + "nati": 13384, + "natic": 44944, + "natin": 44358, + "nation": 2317, + "nation": 2670, + "national": 3126, + "national": 1362, + "nationalbestfriend": 42222, + "nationaldogday": 32227, + "nationalism": 29867, + "nationalist": 25058, + "nationality": 44451, + "nationally": 15130, + "nationalpark": 33060, + "nationalparks": 41204, + "nationals": 10784, + "nationaltrust": 34051, + "nations": 7654, + "nationwide": 13795, + "native": 20639, + "native": 4562, + "natives": 36060, + "nativity": 33988, + "natl": 39225, + "natl": 34465, + "nato": 13139, + "nats": 21106, + "natu": 2775, + "natur": 6800, + "natural": 13198, + "natural": 3288, + "naturally": 12995, + "naturals": 44686, + "nature": 9382, + "nature": 2625, + "naturelovers": 41514, + "naturephotography": 22533, + "natures": 15616, + "natureuk": 46193, + "nau": 5955, + "nau": 32878, + "naught": 41001, + "naughty": 47255, + "naughty": 15101, + "nautical": 31660, + "nav": 3413, + "nav": 25308, + "navajo": 35523, + "naval": 44725, + "naval": 13273, + "navar": 24848, + "navarro": 37104, + "nave": 42704, + "naveen": 43837, + "naver": 32534, + "navi": 16159, + "navi": 44848, + "navig": 12507, + "navigate": 24400, + "navigating": 33134, + "navigation": 20148, + "navigator": 38910, + "navis": 36377, + "navratri": 45428, + "navy": 28414, + "navy": 5598, + "naw": 16259, + "naw": 30500, + "nawaz": 49161, + "nawaz": 19523, + "nax": 38299, + "nay": 11704, + "nay": 16182, + "naya": 38917, + "nayanth": 38157, + "nayanthara": 45184, + "naz": 6363, + "naz": 35534, + "nazi": 12972, + "nazis": 21778, + "nb": 6459, + "nb": 6813, + "nba": 22524, + "nba": 5139, + "nbad": 43458, + "nbaf": 30127, + "nbafinals": 33803, + "nbap": 41956, + "nbaplayoffs": 43860, + "nbat": 46291, + "nbc": 9352, + "nbc": 8799, + "nbd": 24526, + "nbl": 42652, + "nc": 5021, + "nc": 4911, + "nca": 6921, + "ncaa": 9418, + "ncbd": 47221, + "ncc": 33195, + "ncc": 36686, + "ncds": 47573, + "ncfc": 31274, + "ncis": 33617, + "ncpol": 40562, + "ncr": 38474, + "ncs": 42689, + "nct": 27723, + "nct": 20319, + "ncwx": 36166, + "nd": 5625, + "nd": 1764, + "nda": 32862, + "ndc": 47564, + "ndi": 48229, + "ndp": 19257, + "nds": 31347, + "ndtv": 26261, + "ne": 557, + "ne": 1422, + "nea": 24068, + "neal": 33652, + "neal": 16730, + "near": 11296, + "near": 2252, + "nearby": 13314, + "nearest": 18985, + "nearing": 26571, + "nearly": 4816, + "nears": 37710, + "neat": 43201, + "neat": 15465, + "neath": 18315, + "neau": 31559, + "neb": 40209, + "nebra": 13371, + "nebraska": 14565, + "nebu": 49295, + "nebula": 22532, + "nec": 25109, + "nec": 22992, + "necess": 6961, + "necessarily": 25853, + "necessary": 8955, + "necessities": 43483, + "necessity": 33163, + "neck": 6066, + "neck": 6906, + "necklace": 7385, + "necklaces": 32276, + "necks": 29701, + "nectar": 33683, + "ned": 16030, + "ned": 1369, + "nederland": 49058, + "nee": 20494, + "nee": 10601, + "need": 3229, + "need": 1262, + "needed": 4049, + "needing": 22894, + "needle": 44490, + "needle": 19886, + "needles": 27250, + "needless": 39984, + "needs": 2536, + "needy": 30150, + "neel": 33092, + "neel": 46043, + "neer": 34245, + "nees": 47248, + "neet": 46362, + "neg": 5513, + "negan": 42623, + "negative": 8869, + "negatively": 40254, + "negativity": 34658, + "neglec": 18827, + "neglect": 33680, + "neglected": 31893, + "negli": 32594, + "negligence": 45658, + "negoti": 10216, + "negotiate": 32969, + "negotiating": 35510, + "negotiation": 36504, + "negotiations": 20433, + "negr": 42190, + "negro": 26554, + "neh": 40416, + "neh": 41697, + "neha": 44463, + "nehru": 30316, + "nei": 9366, + "neigh": 4061, + "neighb": 6534, + "neighbor": 7759, + "neighbor": 14485, + "neighborhood": 9471, + "neighborhoods": 26713, + "neighboring": 44754, + "neighbors": 13037, + "neighbour": 15858, + "neighbour": 23719, + "neighbourhood": 20312, + "neighbours": 17594, + "neil": 13591, + "neil": 8030, + "neilhimself": 45682, + "neill": 19324, + "neither": 14398, + "nek": 47727, + "neko": 47066, + "nel": 5476, + "nel": 2693, + "nell": 27081, + "nell": 8117, + "nelly": 21166, + "nels": 19296, + "nelson": 24774, + "nelson": 8586, + "nem": 45153, + "neman": 48553, + "neme": 30993, + "nemesis": 37811, + "nemo": 30441, + "nen": 17817, + "nen": 15451, + "nene": 44167, + "neo": 14562, + "neo": 11017, + "neon": 21043, + "neon": 13919, + "neonatal": 46464, + "neop": 49069, + "nep": 20739, + "nep": 41960, + "nepal": 25597, + "nepal": 10066, + "nepali": 47579, + "neph": 27926, + "nephe": 41810, + "nephew": 11689, + "nephews": 43747, + "nephro": 43054, + "neptune": 30566, + "ner": 2064, + "ner": 998, + "nerd": 24452, + "nerd": 12273, + "nerds": 22609, + "nerdy": 33124, + "nered": 17583, + "nerf": 42914, + "nering": 20226, + "nero": 29048, + "ners": 2129, + "nerve": 18571, + "nerves": 27813, + "nervous": 13928, + "nery": 48597, + "nes": 5457, + "nes": 4980, + "nesburg": 27159, + "nese": 32220, + "ness": 7187, + "ness": 1294, + "nesses": 20107, + "nessy": 32939, + "nest": 20302, + "nest": 8719, + "nesting": 28860, + "nestle": 43967, + "nestled": 38107, + "nests": 41133, + "net": 1851, + "net": 2315, + "netany": 23137, + "netanyahu": 23583, + "netball": 19761, + "netes": 44335, + "netfli": 6304, + "netflix": 35325, + "netflix": 6600, + "nether": 9946, + "netherlands": 11060, + "neti": 43980, + "netneutrality": 47794, + "nets": 8582, + "nett": 23403, + "nett": 6975, + "nette": 13271, + "network": 23285, + "network": 3304, + "networking": 9818, + "networks": 10004, + "neu": 3855, + "neu": 43342, + "neue": 45764, + "neur": 19001, + "neur": 31976, + "neural": 26388, + "neuro": 7401, + "neuro": 36000, + "neurological": 41718, + "neurology": 43197, + "neurons": 40442, + "neuroscience": 23381, + "neutr": 17207, + "neutral": 17011, + "neutrality": 26511, + "neutron": 44056, + "nev": 10236, + "nev": 43645, + "neva": 43304, + "nevada": 13499, + "neve": 44099, + "neve": 44023, + "never": 6746, + "never": 1426, + "neveragain": 45053, + "neverforget": 19242, + "nevergiveup": 42497, + "neverland": 41483, + "nevertheless": 48355, + "nevertrump": 47494, + "neville": 19269, + "nevis": 43670, + "new": 1218, + "new": 686, + "newark": 20240, + "newbie": 45427, + "newborn": 18320, + "newbury": 34169, + "newcastle": 41955, + "newcastle": 9302, + "newcomer": 30648, + "newcomers": 44037, + "newe": 40068, + "newell": 41436, + "newer": 33099, + "newest": 4990, + "newfound": 25250, + "newfoundland": 28079, + "newh": 18546, + "newin": 31911, + "newjersey": 32621, + "newly": 42186, + "newly": 7056, + "newman": 15815, + "newmarket": 38617, + "newmexico": 35238, + "newmusic": 32510, + "newmusic": 17201, + "newor": 25969, + "neworleans": 31205, + "newport": 42580, + "newport": 14846, + "newprofile": 14633, + "newprofilepic": 14754, + "newrelease": 34793, + "news": 6216, + "news": 1120, + "newsat": 43979, + "newsc": 28656, + "newscast": 45031, + "newsle": 10727, + "newsletter": 11069, + "newsnow": 48650, + "newsp": 7109, + "newspaper": 8786, + "newspapers": 22423, + "newsroom": 23200, + "newt": 37224, + "newton": 33122, + "newton": 12606, + "newtown": 31747, + "newyear": 22161, + "newyear": 12999, + "newyearseve": 37587, + "newyork": 18140, + "newyork": 10454, + "newyorkcity": 30460, + "newyorker": 39732, + "newzealand": 21117, + "nex": 6897, + "nex": 39720, + "next": 12434, + "next": 1131, + "nextgen": 41933, + "nexus": 19053, + "ney": 3857, + "ney": 1438, + "neymar": 21878, + "neys": 12616, + "nez": 27388, + "nf": 15195, + "nf": 25643, + "nfamily": 20098, + "nfc": 23695, + "nffc": 27893, + "nfl": 11219, + "nfl": 4691, + "nfldraft": 25002, + "ng": 10352, + "ng": 5215, + "nga": 35477, + "ngc": 29046, + "ngo": 38740, + "ngo": 24821, + "ngos": 34627, + "nguyen": 29947, + "nh": 3760, + "nh": 10803, + "nhc": 44817, + "nhl": 12290, + "nhl": 8167, + "nhlbruins": 39081, + "nhljets": 49357, + "nhm": 39483, + "nhpolitics": 36125, + "nhq": 42368, + "nhra": 30052, + "nhs": 23282, + "nhs": 7695, + "ni": 697, + "ni": 3256, + "nia": 3098, + "niag": 18071, + "niagar": 39298, + "niagara": 18965, + "niall": 41354, + "niall": 8327, + "niallo": 22855, + "niallofficial": 23084, + "niam": 39347, + "nian": 46003, + "nib": 31049, + "nic": 2109, + "nic": 6651, + "nica": 29040, + "nicar": 25119, + "nicaragua": 28423, + "nice": 28386, + "nice": 1805, + "nicely": 12303, + "nicer": 29488, + "nicest": 22967, + "niche": 25279, + "nichol": 7668, + "nicholas": 39814, + "nicholas": 13148, + "nicholls": 38846, + "nichols": 22730, + "nicholson": 28745, + "nick": 4209, + "nick": 4253, + "nickel": 22034, + "nickelo": 28668, + "nickelodeon": 33279, + "nicki": 17738, + "nickimin": 27390, + "nickiminaj": 27593, + "nickjonas": 43862, + "nickname": 24731, + "nicknamed": 45190, + "nicks": 15049, + "nicky": 28893, + "nicky": 22091, + "nico": 20850, + "nico": 17779, + "nicol": 9919, + "nicol": 48274, + "nicola": 21791, + "nicolas": 43813, + "nicolas": 18918, + "nicole": 21246, + "nicole": 10000, + "nicot": 45099, + "nicotine": 46697, + "nie": 9524, + "nie": 3501, + "niece": 12795, + "nieces": 44877, + "niel": 19109, + "niel": 26837, + "niels": 37154, + "nielsen": 28372, + "nier": 13014, + "nies": 10586, + "niest": 15007, + "nieu": 29781, + "nific": 4748, + "nifty": 25604, + "nig": 27933, + "nig": 28099, + "nigan": 48516, + "nigel": 33919, + "nigel": 15153, + "niger": 4524, + "niger": 29920, + "nigeri": 40913, + "nigeria": 6106, + "nigerian": 12167, + "nigerians": 25358, + "nigh": 13525, + "nigh": 48157, + "night": 3870, + "night": 930, + "nightclub": 20418, + "nighter": 41349, + "nighting": 36211, + "nightingale": 40696, + "nightlife": 28823, + "nightly": 28868, + "nightmare": 12867, + "nightmares": 24032, + "nightout": 44257, + "nights": 4296, + "nighttime": 38147, + "nightw": 39956, + "nih": 25783, + "nik": 5126, + "nik": 13705, + "nike": 16300, + "nike": 5783, + "nikeplus": 43154, + "niki": 36136, + "nikita": 37118, + "nikk": 38596, + "nikki": 23156, + "nikki": 16689, + "niko": 43771, + "nikol": 27430, + "nikola": 42146, + "nikon": 25488, + "nikon": 13849, + "nikov": 43960, + "nil": 16852, + "nil": 35030, + "nile": 24252, + "nim": 30402, + "nim": 42093, + "nima": 42586, + "nin": 5794, + "nin": 14145, + "nina": 13891, + "nine": 16213, + "nine": 7330, + "ninety": 48214, + "ning": 6050, + "ning": 762, + "ningham": 23395, + "ningly": 43537, + "nings": 4588, + "nington": 26214, + "ninj": 23225, + "ninja": 11969, + "ninjas": 42796, + "nino": 25633, + "ninten": 6184, + "nintendo": 13969, + "nintendo": 7886, + "nintendoswitch": 16404, + "ninth": 22770, + "nip": 33889, + "nip": 22333, + "nipp": 24634, + "nipple": 45987, + "nipples": 44774, + "nippon": 47960, + "nips": 49241, + "nir": 15503, + "nir": 40057, + "nireland": 45763, + "niro": 47373, + "nirvana": 28300, + "nis": 5609, + "nis": 3786, + "nish": 19834, + "nish": 13256, + "nished": 24141, + "nishi": 32386, + "nishings": 49247, + "nison": 45700, + "niss": 39043, + "nissan": 37635, + "nissan": 11082, + "nist": 17782, + "nister": 36640, + "nit": 4087, + "nit": 19011, + "nite": 8427, + "niti": 43964, + "niti": 45355, + "nitin": 37529, + "nitro": 30726, + "nitrogen": 30706, + "niture": 7840, + "nity": 12707, + "niu": 48187, + "niv": 47300, + "niversary": 29643, + "nix": 48552, + "nix": 32278, + "nixon": 20671, + "nj": 8343, + "nj": 6672, + "njcaa": 48992, + "njpw": 38992, + "nk": 22708, + "nk": 17456, + "nko": 36353, + "nl": 12057, + "nl": 7655, + "nli": 37502, + "nlp": 35680, + "nlwx": 49260, + "nm": 15956, + "nm": 11370, + "nmd": 43331, + "nme": 40454, + "nmwx": 47967, + "nn": 8947, + "nn": 12925, + "nnn": 26277, + "nnnn": 41420, + "no": 578, + "no": 871, + "noaa": 27557, + "noah": 28806, + "noah": 11519, + "nobel": 33742, + "nobel": 15605, + "nobelprize": 46074, + "noble": 29430, + "noble": 12051, + "nobody": 7009, + "noc": 16988, + "noc": 44420, + "nocchi": 46359, + "noch": 38672, + "noche": 29689, + "noches": 44166, + "nock": 16993, + "noctur": 26291, + "nocturnal": 41738, + "nod": 18648, + "nodapl": 39079, + "node": 31434, + "node": 24871, + "nodejs": 39262, + "nodes": 40534, + "noel": 38406, + "noel": 17496, + "nof": 29505, + "noff": 46979, + "nofilter": 16418, + "nog": 31157, + "noh": 40775, + "noi": 43115, + "noi": 39889, + "noida": 33404, + "noir": 39291, + "noir": 12953, + "nois": 22057, + "noise": 41018, + "noise": 9307, + "noises": 31575, + "noisse": 45686, + "noisy": 33495, + "nokia": 17731, + "nol": 8055, + "nola": 13289, + "nolan": 17323, + "nold": 40322, + "nole": 34654, + "noles": 40569, + "nollywood": 43145, + "nology": 42221, + "nom": 2981, + "nom": 12799, + "nomad": 27849, + "noman": 45592, + "nomin": 5643, + "nominate": 17122, + "nominated": 8710, + "nominating": 45747, + "nomination": 14136, + "nominations": 17124, + "nominee": 14122, + "nominees": 17873, + "nomnom": 26962, + "nomore": 35126, + "noms": 35706, + "non": 4282, + "non": 3353, + "none": 29644, + "none": 8906, + "nonetheless": 39675, + "nonfiction": 31654, + "nonprofit": 19315, + "nonprofits": 37935, + "nonsense": 19136, + "nonstop": 30300, + "nont": 25207, + "noo": 6759, + "noo": 46672, + "noodle": 19521, + "noodles": 15782, + "nook": 30088, + "noon": 37693, + "noon": 2347, + "noor": 46978, + "noor": 31323, + "nope": 15625, + "nor": 1062, + "nor": 6190, + "nora": 25890, + "norcal": 41970, + "nord": 19261, + "nord": 36067, + "nordic": 36439, + "nordic": 20734, + "nordstrom": 38562, + "norfolk": 30232, + "norfolk": 12202, + "norm": 10990, + "norm": 22457, + "norma": 35757, + "normal": 28748, + "normal": 5967, + "normali": 45157, + "normally": 15870, + "norman": 22027, + "norman": 11338, + "normandy": 23840, + "normani": 44596, + "norms": 33011, + "norris": 21814, + "norse": 36559, + "norte": 35638, + "north": 3468, + "north": 2188, + "northampton": 49246, + "northampton": 26175, + "northan": 37081, + "northbound": 24228, + "northcarolina": 43386, + "northe": 24675, + "northeast": 42673, + "northeast": 13009, + "northeastern": 28297, + "northeasthour": 42869, + "norther": 26908, + "northern": 17210, + "northern": 5049, + "northernlights": 48940, + "northkorea": 38495, + "northside": 45957, + "northumber": 22295, + "northumberland": 22922, + "northwales": 49371, + "northwest": 12894, + "northwestern": 23685, + "norton": 18032, + "norway": 8780, + "norwe": 14414, + "norwegian": 15971, + "norwich": 37629, + "norwich": 15812, + "norwood": 37889, + "nos": 13420, + "nose": 24192, + "nose": 8231, + "noses": 48163, + "nostal": 12076, + "nostalgia": 16622, + "nostalgic": 24468, + "not": 2534, + "not": 783, + "notable": 22023, + "notch": 19476, + "notdead": 42059, + "note": 10910, + "note": 3246, + "notebook": 16365, + "notebooks": 37623, + "noted": 22501, + "notes": 5795, + "nothin": 24291, + "nothing": 28412, + "nothing": 2586, + "noti": 10686, + "notic": 6915, + "notice": 6683, + "noticeable": 40857, + "noticed": 9324, + "notices": 33459, + "noticias": 47759, + "noticing": 37571, + "notification": 22512, + "notifications": 23169, + "notified": 39454, + "noting": 38649, + "notion": 37856, + "notjust": 33212, + "notjustlakes": 45803, + "notmy": 39301, + "noto": 29878, + "noton": 48258, + "notor": 21711, + "notori": 44065, + "notorious": 22489, + "notre": 24397, + "notre": 15306, + "notredame": 34077, + "notsorry": 34361, + "nott": 9333, + "nott": 34989, + "notte": 47308, + "nottingham": 12852, + "notts": 25598, + "nou": 8751, + "nou": 30953, + "noun": 33663, + "nouri": 23796, + "nourish": 46025, + "nourished": 48354, + "nous": 29485, + "nouveau": 29948, + "nouvel": 34215, + "nov": 2264, + "nov": 4293, + "nova": 11236, + "novak": 26465, + "novasco": 33785, + "novascotia": 34744, + "novation": 39753, + "nove": 30507, + "novel": 15044, + "novel": 6080, + "novelist": 27314, + "novella": 42770, + "novels": 16040, + "novelty": 37750, + "november": 3680, + "nover": 37465, + "novi": 47957, + "novice": 33743, + "novo": 27504, + "novo": 36581, + "now": 2040, + "now": 692, + "nowadays": 26155, + "nowhere": 14108, + "nowplaying": 3708, + "nowwatching": 30852, + "nox": 27406, + "noxi": 39304, + "noxious": 42833, + "noy": 32787, + "np": 18205, + "np": 6314, + "npa": 42378, + "npc": 33966, + "npr": 39941, + "npr": 24078, + "nps": 22025, + "npt": 47231, + "nr": 6574, + "nr": 9713, + "nra": 17286, + "nrc": 45786, + "nrf": 47982, + "nrg": 48662, + "nrl": 27142, + "nrl": 18127, + "ns": 12405, + "ns": 1373, + "nsa": 23004, + "nsc": 32792, + "nsd": 36659, + "nsf": 34180, + "nsfw": 19847, + "nsi": 47824, + "nsw": 21301, + "nsw": 11693, + "nswpol": 44434, + "nt": 10902, + "nt": 3207, + "ntr": 30845, + "nts": 43775, + "ntt": 22859, + "ntv": 24807, + "ntv": 45304, + "nu": 1156, + "nu": 9444, + "nucle": 25693, + "nuclear": 34136, + "nuclear": 7279, + "nude": 16630, + "nudes": 32122, + "nue": 22834, + "nuestra": 45649, + "nuestro": 38590, + "nuev": 47861, + "nueva": 48810, + "nuevo": 30265, + "nufc": 15720, + "nuff": 37324, + "nug": 13471, + "nugent": 47457, + "nugget": 25448, + "nuggets": 18970, + "nuh": 45950, + "nuit": 38815, + "nuk": 39228, + "nuke": 39399, + "nul": 29358, + "null": 47376, + "num": 17896, + "num": 30534, + "numb": 34639, + "numb": 39427, + "number": 44078, + "number": 2842, + "numbered": 25975, + "numbers": 6121, + "numer": 11442, + "numerous": 17082, + "numis": 39100, + "nun": 12511, + "nun": 28540, + "nunavut": 48626, + "nunes": 40697, + "nuns": 44061, + "nup": 46757, + "nur": 3920, + "nur": 33493, + "nure": 42480, + "nurse": 37547, + "nurse": 10058, + "nursery": 15540, + "nurses": 12938, + "nursing": 11126, + "nurture": 38865, + "nurturing": 45229, + "nus": 25157, + "nus": 18239, + "nut": 10358, + "nut": 6491, + "nutcracker": 36733, + "nutella": 27312, + "nutr": 6198, + "nutri": 15470, + "nutrient": 32900, + "nutrients": 24668, + "nutriti": 17978, + "nutrition": 41546, + "nutrition": 7989, + "nutritional": 26457, + "nutritious": 30387, + "nuts": 8644, + "nutshell": 26659, + "nutty": 39846, + "nv": 17217, + "nv": 16985, + "nvi": 22847, + "nvidia": 27325, + "nw": 7826, + "nw": 7030, + "nwa": 34237, + "nwo": 40976, + "nws": 23333, + "nws": 30998, + "nwsl": 48394, + "nwt": 25029, + "nx": 18810, + "nx": 16997, + "nxt": 35037, + "nxt": 17804, + "ny": 1383, + "ny": 1350, + "nya": 24165, + "nyc": 13304, + "nyc": 2832, + "nycc": 27187, + "nycfc": 47497, + "nye": 40723, + "nye": 13416, + "nyfw": 21089, + "nyk": 46841, + "nylon": 25915, + "nyo": 41534, + "nyo": 44586, + "nypd": 42293, + "nypd": 18279, + "nyr": 32538, + "nyrd": 47936, + "nys": 36375, + "nys": 23423, + "nyse": 32650, + "nyt": 46311, + "nyt": 12816, + "nytimes": 13772, + "nyu": 43143, + "nyu": 31355, + "nz": 10142, + "nz": 7082, + "o": 78, + "o": 334, + "oa": 11994, + "oahu": 37790, + "oak": 6010, + "oak": 7221, + "oakland": 42663, + "oakland": 12077, + "oakley": 27810, + "oaks": 16734, + "oakville": 38500, + "oasis": 18185, + "oat": 20095, + "oat": 34132, + "oates": 47094, + "oath": 20108, + "oatmeal": 26374, + "oats": 24150, + "oax": 43090, + "oaxaca": 47818, + "ob": 1411, + "ob": 14908, + "oba": 42902, + "oba": 15147, + "obam": 13174, + "obama": 4276, + "obamacare": 18005, + "obe": 11897, + "obe": 29117, + "obedience": 48921, + "ober": 15284, + "obese": 41757, + "obesity": 19499, + "obey": 26926, + "obi": 21454, + "obi": 18414, + "obile": 20513, + "obitu": 39218, + "obituary": 43580, + "objec": 7970, + "object": 14115, + "objective": 23663, + "objectives": 30238, + "objects": 13770, + "obl": 31452, + "oblast": 42672, + "obli": 11416, + "obligation": 34473, + "obligations": 38232, + "obligatory": 35020, + "oblivion": 45323, + "obo": 46001, + "obo": 26618, + "obrien": 31946, + "obs": 39162, + "obsc": 20392, + "obscure": 33337, + "obse": 8433, + "observ": 9050, + "observation": 20250, + "observations": 27409, + "observatory": 21236, + "observe": 23217, + "observed": 21267, + "observer": 22077, + "observers": 47544, + "observing": 28359, + "obsessed": 9744, + "obsession": 15718, + "obsi": 47323, + "obsole": 35561, + "obsolete": 40628, + "obst": 29398, + "obstac": 24075, + "obstacle": 29751, + "obstacles": 24480, + "obste": 49103, + "obstru": 44876, + "obstruc": 38762, + "obstruction": 40240, + "obtain": 26555, + "obtained": 29322, + "obvious": 13959, + "obviously": 10068, + "oc": 1566, + "oc": 6603, + "oca": 31120, + "ocal": 38148, + "occ": 43940, + "occa": 8530, + "occasion": 12280, + "occasional": 33059, + "occasionally": 32479, + "occasions": 26154, + "occer": 20804, + "occi": 42994, + "occu": 7863, + "occult": 42529, + "occup": 11152, + "occupation": 18624, + "occupational": 30644, + "occupied": 17271, + "occupy": 22453, + "occupy": 24210, + "occur": 11264, + "occur": 21813, + "occurred": 19850, + "occurrence": 40615, + "occurring": 31335, + "occurs": 26563, + "ocd": 35904, + "oce": 3509, + "ocean": 12941, + "ocean": 4918, + "oceans": 16792, + "och": 29334, + "och": 32011, + "oche": 33045, + "oci": 9891, + "ocity": 46039, + "ock": 33579, + "ock": 21313, + "ocks": 22410, + "oclock": 36274, + "oco": 32553, + "ocon": 33090, + "ocr": 45813, + "ocre": 40320, + "ocs": 27297, + "oct": 4565, + "octa": 23444, + "octag": 37768, + "octagon": 49167, + "octane": 43040, + "octavia": 47416, + "octo": 31032, + "october": 3481, + "octopus": 22327, + "ocu": 22709, + "oculus": 30082, + "od": 4886, + "od": 9719, + "oda": 24777, + "oday": 41954, + "odd": 15525, + "odd": 11387, + "oddly": 34213, + "odds": 11555, + "ode": 19125, + "ode": 19639, + "odell": 41556, + "odessa": 43574, + "odi": 12223, + "odi": 18853, + "odin": 35175, + "odisha": 15737, + "odo": 49188, + "odo": 40993, + "odor": 39509, + "odu": 35095, + "odu": 39904, + "odyssey": 19991, + "oe": 24251, + "oe": 11667, + "oec": 24288, + "oecd": 30816, + "oem": 29650, + "oes": 3643, + "of": 684, + "of": 539, + "ofa": 29774, + "ofc": 19877, + "ofe": 30000, + "ofer": 47322, + "off": 892, + "off": 1007, + "offe": 8261, + "offee": 34059, + "offen": 7231, + "offence": 34594, + "offences": 33972, + "offended": 30765, + "offender": 48294, + "offenders": 35878, + "offense": 15253, + "offensive": 11037, + "offer": 20607, + "offer": 3271, + "offered": 9395, + "offering": 6896, + "offerings": 24535, + "offers": 4679, + "offic": 3276, + "office": 18033, + "office": 2171, + "officeof": 38750, + "officeofrg": 47100, + "officer": 4683, + "officers": 6335, + "offices": 10933, + "offici": 1401, + "official": 5768, + "official": 1868, + "officially": 4226, + "officials": 7658, + "officiel": 26548, + "offl": 16851, + "offline": 22724, + "offro": 32198, + "offroad": 37173, + "offs": 23987, + "offseason": 25485, + "offset": 28843, + "offshore": 15496, + "offside": 49347, + "offspring": 38635, + "offthe": 38189, + "ofi": 36692, + "ofi": 49090, + "oficial": 18061, + "oft": 16693, + "oftball": 39768, + "often": 4864, + "ofthe": 7592, + "oftheday": 6988, + "oftheweek": 20654, + "oftheyear": 33975, + "og": 11542, + "og": 8555, + "oga": 47312, + "ogden": 42011, + "ogil": 39013, + "ography": 22399, + "ogue": 24761, + "ogun": 48970, + "oh": 5648, + "oh": 1779, + "ohana": 48330, + "ohh": 23076, + "ohhh": 27697, + "ohhhh": 40201, + "ohi": 5207, + "ohio": 18951, + "ohio": 6155, + "ohiostate": 41324, + "ohl": 45547, + "ohl": 41095, + "ohmy": 29758, + "ohn": 48043, + "ohs": 39542, + "ohwx": 47993, + "oi": 27357, + "oi": 13934, + "oic": 45554, + "oid": 14758, + "oids": 21847, + "oil": 11973, + "oil": 2870, + "oiland": 32316, + "oilandgas": 34130, + "oilers": 21627, + "oilpainting": 34279, + "oils": 17886, + "oily": 47550, + "oir": 48079, + "oir": 37113, + "ois": 23262, + "oit": 18453, + "oitnb": 34865, + "oj": 30986, + "oj": 34553, + "ok": 1944, + "ok": 2481, + "oka": 42258, + "oka": 19092, + "okan": 41263, + "okanagan": 43233, + "okay": 4917, + "okc": 42418, + "okc": 18357, + "oke": 26636, + "oke": 23598, + "oki": 20390, + "okin": 30687, + "okinawa": 35877, + "okla": 9431, + "oklahoma": 10170, + "oko": 26892, + "oko": 26095, + "okstate": 36356, + "oktoberfest": 32026, + "oku": 45010, + "oku": 43829, + "okwx": 27336, + "ol": 562, + "ol": 2985, + "ola": 20499, + "ola": 3373, + "olaf": 39709, + "olan": 48489, + "olan": 24227, + "oland": 26452, + "olas": 40800, + "old": 4931, + "old": 896, + "olde": 37731, + "older": 7700, + "oldest": 9285, + "oldham": 29929, + "oldie": 35280, + "oldies": 36278, + "oldman": 48614, + "olds": 8580, + "oldschool": 44384, + "oldschool": 25133, + "oldsmobile": 45396, + "ole": 9089, + "ole": 1947, + "oled": 46768, + "oler": 24069, + "oles": 16962, + "olf": 16346, + "olga": 34779, + "oli": 3811, + "oli": 8810, + "olic": 31341, + "oligar": 46185, + "olim": 47769, + "olin": 37823, + "olin": 18283, + "olina": 34711, + "oline": 17441, + "oling": 38033, + "olini": 36040, + "olis": 49397, + "olithic": 35574, + "olive": 22486, + "olive": 9898, + "oliver": 22882, + "oliver": 9261, + "olives": 27149, + "olivi": 20773, + "olivia": 11697, + "olivier": 23891, + "oll": 32270, + "oll": 15510, + "olla": 31908, + "ollie": 24434, + "olls": 42697, + "olly": 23998, + "olo": 14628, + "olo": 7606, + "ological": 12345, + "ologist": 23442, + "ologists": 30912, + "ology": 4627, + "olor": 29245, + "olph": 25077, + "ols": 2236, + "olsen": 26307, + "olson": 28046, + "olt": 46252, + "olu": 16502, + "olu": 46302, + "olulu": 27645, + "oly": 20323, + "oly": 24823, + "olym": 3594, + "olympi": 13597, + "olympia": 23965, + "olympiad": 47694, + "olympian": 25420, + "olympians": 44583, + "olympic": 26099, + "olympic": 6388, + "olympics": 7629, + "olympus": 30960, + "om": 547, + "om": 3932, + "oma": 44603, + "oma": 5358, + "omaha": 16509, + "oman": 22088, + "oman": 10871, + "omar": 19488, + "omar": 13367, + "omars": 37099, + "omas": 36023, + "omat": 40788, + "omb": 34447, + "ombe": 35967, + "omd": 49346, + "ome": 3693, + "ome": 5832, + "omed": 16835, + "omega": 13465, + "omelette": 38789, + "omen": 9969, + "omen": 25469, + "oment": 43683, + "omeo": 39844, + "omer": 24087, + "omer": 17902, + "omes": 25736, + "ometer": 20060, + "ometric": 38702, + "omez": 12541, + "omf": 47496, + "omfg": 12523, + "omg": 35233, + "omg": 3186, + "omi": 24097, + "omi": 10341, + "omic": 40536, + "omic": 12793, + "omics": 15138, + "omile": 46915, + "omin": 16457, + "omination": 42571, + "oming": 10796, + "ominous": 40914, + "omni": 18793, + "omni": 39489, + "omnibus": 44760, + "omnic": 48383, + "omo": 14478, + "omo": 11066, + "omon": 48758, + "omor": 29431, + "oms": 3770, + "omusic": 38965, + "omy": 40805, + "omy": 6884, + "on": 521, + "on": 525, + "ona": 2687, + "onair": 29511, + "onal": 918, + "onboard": 21689, + "once": 16331, + "once": 2654, + "onceupon": 28122, + "onceuponatime": 33505, + "onco": 46700, + "oncology": 24593, + "ond": 27918, + "ond": 2636, + "onda": 32643, + "onday": 29864, + "onde": 44532, + "ondo": 29529, + "ondon": 42043, + "ondon": 11851, + "one": 1980, + "one": 637, + "onec": 27746, + "oned": 28012, + "oned": 4698, + "onedirection": 16245, + "onee": 44433, + "oneill": 44808, + "onelove": 47417, + "onent": 12147, + "onents": 11709, + "oneof": 48478, + "onep": 20440, + "onepiece": 43153, + "oneplus": 25981, + "oner": 30055, + "oner": 6071, + "oners": 12324, + "ones": 20757, + "ones": 1575, + "oneself": 46874, + "onesie": 33237, + "oness": 25379, + "onet": 36058, + "oneteam": 41094, + "onetsy": 33392, + "onew": 43848, + "onews": 18696, + "onex": 49116, + "oney": 44498, + "oney": 9408, + "onf": 41790, + "onfox": 29874, + "ong": 2787, + "ong": 846, + "onga": 30259, + "ongchang": 35071, + "ongi": 21754, + "ongo": 31226, + "ongoing": 10393, + "ongs": 12143, + "oni": 4385, + "oni": 8048, + "onia": 8001, + "onial": 27599, + "onian": 21090, + "onic": 15838, + "onic": 3711, + "onica": 14631, + "onics": 9779, + "onie": 35249, + "onies": 22601, + "onimo": 41271, + "oning": 5197, + "onion": 10985, + "onions": 15255, + "onist": 10099, + "onists": 19659, + "onix": 27370, + "onized": 43657, + "onlin": 31103, + "online": 12940, + "online": 2027, + "onlinemarketing": 41820, + "onlineshopping": 38587, + "only": 11646, + "only": 1033, + "onlyin": 32947, + "onna": 25438, + "onna": 35458, + "onnaise": 48934, + "onne": 23466, + "onnell": 45613, + "ono": 28165, + "ono": 14388, + "onom": 48014, + "onomy": 36873, + "onpoli": 20708, + "ons": 26076, + "ons": 708, + "onsale": 36324, + "onset": 30527, + "onsite": 37336, + "onstage": 21821, + "onstorm": 49333, + "ont": 34303, + "ont": 11157, + "ontari": 6739, + "ontario": 42766, + "ontario": 7436, + "onte": 34723, + "onthe": 12241, + "onther": 46563, + "ontheroad": 47516, + "onthisday": 6862, + "onto": 11745, + "onto": 3141, + "ontology": 37364, + "ontour": 32155, + "onu": 44142, + "onward": 34827, + "onwards": 20682, + "ony": 9490, + "ony": 2926, + "onym": 11483, + "onymous": 13038, + "onyx": 31353, + "oo": 574, + "oo": 2822, + "ood": 16429, + "ood": 738, + "oodle": 45289, + "oods": 44660, + "oof": 42270, + "ooh": 16806, + "ook": 22326, + "ook": 8394, + "ooks": 31082, + "ool": 37702, + "ool": 929, + "oom": 22786, + "oom": 15002, + "oomf": 40607, + "oon": 35651, + "oon": 7100, + "ooo": 9571, + "oooh": 28927, + "oooo": 4002, + "oooo": 13643, + "ooooo": 12532, + "oooooo": 43590, + "oooooo": 20372, + "ooooooo": 30859, + "oooooooo": 15473, + "oooooooo": 43408, + "oooooooooooooooo": 48645, + "oop": 7326, + "ooper": 39906, + "oops": 9116, + "oor": 35239, + "oos": 9896, + "oosa": 30834, + "oose": 38941, + "oot": 17667, + "ootball": 28914, + "ootd": 16547, + "ooth": 12682, + "oott": 34316, + "ooza": 22809, + "op": 676, + "op": 3691, + "opa": 28949, + "opal": 28982, + "opar": 18167, + "opath": 33079, + "opathic": 37521, + "opathy": 28466, + "opau": 27239, + "opd": 38288, + "ope": 31694, + "ope": 11440, + "opec": 33138, + "opel": 36952, + "open": 3647, + "open": 1488, + "openaccess": 26591, + "opend": 28069, + "opendata": 35709, + "openday": 46991, + "opened": 5303, + "opener": 8998, + "openhouse": 36091, + "opening": 33728, + "opening": 2516, + "openingday": 36359, + "openings": 27643, + "openly": 23005, + "opens": 4801, + "opensource": 29930, + "oper": 2796, + "oper": 37533, + "opera": 8056, + "operate": 19306, + "operated": 23031, + "operates": 38675, + "operating": 12218, + "operation": 27173, + "operation": 7639, + "operational": 18237, + "operations": 8106, + "operative": 28380, + "operator": 15972, + "operators": 19267, + "opers": 48728, + "opes": 37258, + "oph": 6796, + "opha": 38634, + "ophel": 45017, + "ophelia": 49118, + "ophi": 44547, + "ophile": 35915, + "opho": 12900, + "ophobia": 21111, + "ophobic": 29934, + "ophon": 25120, + "ophone": 26345, + "ophthal": 33135, + "ophy": 28539, + "opi": 40056, + "opi": 48994, + "opin": 7636, + "opini": 14825, + "opinion": 7843, + "opinions": 16192, + "opio": 17371, + "opioid": 22833, + "opioids": 47578, + "opla": 36270, + "ople": 25663, + "opol": 15173, + "opoly": 23729, + "opor": 39650, + "opoulos": 42020, + "opp": 2020, + "opp": 21024, + "oppa": 23637, + "oppo": 7399, + "oppo": 41770, + "opponent": 17002, + "opponents": 19664, + "oppor": 2914, + "opportun": 2939, + "opportunities": 5978, + "opportunity": 4004, + "oppos": 10091, + "oppose": 23617, + "opposed": 22509, + "opposes": 47471, + "opposing": 24376, + "opposite": 12872, + "opposition": 11062, + "oppre": 17341, + "oppressed": 41492, + "oppression": 30650, + "opra": 28291, + "oprah": 22562, + "opry": 35340, + "ops": 3054, + "opt": 45103, + "opt": 27188, + "opted": 42035, + "opti": 6580, + "optic": 25190, + "optic": 24755, + "optical": 16822, + "optics": 27165, + "optim": 22331, + "optimal": 25235, + "optimi": 9737, + "optimis": 39459, + "optimism": 25226, + "optimist": 44581, + "optimistic": 23104, + "optimization": 25125, + "optimize": 30456, + "optimized": 43939, + "optimizing": 49157, + "optimum": 35974, + "optimus": 43453, + "option": 8464, + "optional": 25411, + "options": 7063, + "optome": 35533, + "opul": 39858, + "opus": 33295, + "opy": 21835, + "or": 523, + "or": 541, + "ora": 4301, + "orac": 24673, + "oracle": 37308, + "oracle": 15966, + "orah": 40820, + "orail": 45120, + "oral": 32490, + "oral": 6007, + "orama": 33619, + "oran": 32209, + "oran": 28395, + "orang": 22116, + "orange": 13957, + "orange": 4287, + "oranges": 32417, + "orangu": 36112, + "orb": 28894, + "orb": 36958, + "orbit": 19713, + "orbital": 40312, + "orc": 44305, + "orca": 18631, + "orcas": 47676, + "orch": 11893, + "orchar": 40226, + "orchard": 19530, + "orche": 8004, + "orchestr": 42937, + "orchestra": 9573, + "orchestral": 40285, + "orchi": 23696, + "orchid": 18678, + "orchids": 28376, + "ord": 26903, + "ord": 11502, + "orda": 33462, + "ordained": 38302, + "order": 24613, + "order": 2191, + "ordered": 8335, + "ordering": 19588, + "orderly": 43457, + "orders": 6187, + "ordin": 4378, + "ordinance": 38583, + "ordinary": 8012, + "ore": 3580, + "ore": 1423, + "orean": 36696, + "ored": 5133, + "oregon": 21759, + "oregon": 8035, + "oren": 21645, + "oreo": 21873, + "oreos": 41688, + "ores": 17328, + "org": 3401, + "org": 5593, + "organ": 3338, + "organ": 13213, + "organi": 3636, + "organic": 24080, + "organic": 5980, + "organics": 44199, + "organis": 13204, + "organisation": 15868, + "organisations": 20651, + "organise": 36073, + "organised": 13191, + "organiser": 49141, + "organisers": 35778, + "organising": 22787, + "organisms": 37041, + "organiz": 11107, + "organization": 8064, + "organizational": 29510, + "organizations": 13453, + "organize": 19973, + "organized": 10681, + "organizer": 23905, + "organizers": 27191, + "organizing": 15779, + "organs": 29872, + "orgs": 29500, + "ori": 1540, + "ori": 8693, + "oria": 11474, + "orial": 8648, + "orian": 21193, + "oric": 43810, + "orice": 41341, + "orie": 18815, + "orient": 13149, + "orient": 30770, + "oriental": 23056, + "orientation": 16873, + "oriente": 40390, + "oriented": 24596, + "orienteering": 42985, + "ories": 5934, + "orig": 2273, + "orig": 38463, + "origami": 31832, + "origin": 2555, + "origin": 12372, + "original": 18496, + "original": 3117, + "originally": 12849, + "originals": 16953, + "originated": 41823, + "origins": 16291, + "orin": 39863, + "oring": 3006, + "orio": 24308, + "orioles": 21430, + "orion": 21765, + "oris": 37064, + "orities": 7903, + "ority": 5556, + "orium": 12015, + "ork": 22202, + "ork": 37235, + "orkney": 34254, + "orl": 39465, + "orlando": 32247, + "orlando": 7827, + "orleans": 11127, + "orm": 38464, + "orn": 25412, + "orn": 8130, + "ornam": 36122, + "ornament": 23409, + "ornamental": 46270, + "ornaments": 28968, + "ornate": 46865, + "orni": 27713, + "ornithology": 38275, + "orns": 19340, + "oro": 9848, + "oro": 14573, + "orous": 19286, + "orph": 17318, + "orphan": 22718, + "orphan": 28994, + "orphanage": 45196, + "orphaned": 46792, + "orphans": 36588, + "orphe": 39186, + "orr": 32977, + "ors": 1127, + "orship": 20846, + "ort": 1019, + "ortega": 39727, + "orth": 22584, + "orth": 24461, + "ortho": 11366, + "orthodon": 37730, + "orthodox": 19008, + "orthop": 42123, + "orthopedic": 49341, + "ortiz": 23544, + "orton": 37238, + "oru": 44629, + "oru": 31281, + "orum": 42724, + "orwell": 41218, + "ory": 16983, + "ory": 1985, + "os": 2211, + "os": 1299, + "osa": 16340, + "osa": 17237, + "osaka": 21347, + "osborne": 22402, + "osbourne": 43376, + "osc": 5092, + "oscar": 21157, + "oscar": 8191, + "oscars": 11098, + "osce": 37303, + "oscill": 38272, + "ose": 46942, + "ose": 22541, + "osh": 30717, + "osh": 35011, + "osha": 33907, + "oshi": 34770, + "osi": 25247, + "osi": 17636, + "osis": 13903, + "osity": 12730, + "oslo": 20547, + "osm": 31626, + "osman": 46539, + "oso": 42793, + "oso": 21285, + "osp": 24387, + "ospre": 49001, + "osprey": 37893, + "oss": 29362, + "oss": 34640, + "ost": 23701, + "ost": 18749, + "oste": 20632, + "osteo": 43163, + "oster": 31781, + "ostr": 33673, + "ostrich": 47640, + "osu": 29480, + "osu": 19818, + "oswald": 38471, + "ot": 1863, + "ot": 2062, + "ota": 17509, + "ota": 8741, + "otago": 45919, + "otaku": 40743, + "otas": 47616, + "otc": 37934, + "otd": 5683, + "ote": 28511, + "ote": 19744, + "otes": 27280, + "oth": 33262, + "oth": 33519, + "other": 9758, + "other": 1010, + "others": 3326, + "otherwise": 12376, + "oti": 19567, + "oti": 45564, + "otic": 9671, + "otis": 28246, + "otive": 10877, + "oto": 23946, + "oto": 23399, + "otp": 29822, + "otr": 38685, + "ots": 5769, + "ott": 10167, + "ott": 7936, + "otta": 7623, + "otta": 20941, + "ottawa": 49027, + "ottawa": 9019, + "otte": 35214, + "otter": 34710, + "otter": 22456, + "otters": 38883, + "otti": 36721, + "ottnews": 33995, + "otto": 17730, + "ottoman": 27503, + "otw": 35259, + "otwol": 46868, + "ou": 520, + "ou": 6544, + "ouat": 32954, + "ouch": 13493, + "oud": 1359, + "oue": 48838, + "ouf": 34618, + "ough": 4204, + "ough": 991, + "ought": 2253, + "oughton": 36860, + "oui": 39421, + "ouk": 21796, + "oul": 20253, + "oul": 8081, + "ould": 859, + "oulos": 32808, + "oun": 636, + "oun": 20960, + "ounce": 15027, + "ounces": 30299, + "ound": 2013, + "ound": 853, + "oundation": 40132, + "ounded": 9634, + "ounding": 11944, + "ounds": 2753, + "oung": 35875, + "oung": 25341, + "ounge": 29427, + "ount": 43801, + "ount": 4172, + "ounts": 10963, + "oup": 32815, + "our": 727, + "our": 581, + "oura": 29806, + "oura": 36352, + "ourable": 24126, + "ourage": 34525, + "oural": 45840, + "oured": 6956, + "ouri": 12696, + "ouring": 12000, + "ourism": 25496, + "ourke": 26480, + "ourlives": 37541, + "ouro": 41224, + "ours": 1491, + "ourse": 15415, + "ourselves": 10124, + "ourt": 22960, + "oury": 29484, + "ous": 1987, + "ous": 879, + "ouse": 32048, + "ouse": 7603, + "ouses": 33666, + "ously": 2501, + "ousness": 10689, + "ousy": 28302, + "out": 1130, + "out": 620, + "outa": 35187, + "outage": 27320, + "outages": 40353, + "outback": 28532, + "outbound": 41256, + "outbreak": 20103, + "outcome": 16552, + "outcomes": 14016, + "outdated": 38313, + "outdoor": 19184, + "outdoor": 6368, + "outdoors": 10469, + "oute": 44180, + "outed": 34435, + "outer": 30499, + "outer": 14188, + "outes": 39600, + "outfield": 41826, + "outfit": 6525, + "outfits": 16366, + "outfitters": 37725, + "outfy": 34920, + "outgoing": 27302, + "outh": 16933, + "outh": 8111, + "outine": 35452, + "outing": 11251, + "outlander": 45820, + "outlander": 17095, + "outlaw": 37498, + "outlaw": 27340, + "outlaws": 30935, + "outlet": 16855, + "outlets": 20822, + "outline": 26894, + "outlines": 29159, + "outlining": 45960, + "outlook": 12983, + "outof": 43958, + "outpatient": 46603, + "outpost": 44622, + "output": 17255, + "outra": 14262, + "outrage": 23577, + "outraged": 43402, + "outrageous": 29342, + "outre": 14373, + "outreach": 15297, + "outright": 38200, + "outs": 5790, + "outsi": 22515, + "outside": 47693, + "outside": 2782, + "outsider": 41196, + "outsiders": 41742, + "outskirts": 42088, + "outsourcing": 34543, + "outstanding": 6387, + "outta": 15807, + "outtuesday": 48692, + "outw": 34650, + "oux": 40960, + "oux": 14228, + "ov": 6420, + "ov": 8479, + "ova": 12762, + "oval": 15039, + "ovarian": 42913, + "ovation": 24333, + "ove": 8649, + "ove": 15456, + "oven": 44620, + "oven": 12579, + "over": 1658, + "over": 962, + "overall": 6914, + "overboard": 42982, + "overcame": 47235, + "overcast": 36942, + "overcome": 14365, + "overcoming": 29348, + "overdose": 27017, + "overdrive": 40088, + "overdue": 30240, + "overflow": 32885, + "overflowing": 45370, + "overhaul": 31531, + "overhead": 20321, + "overland": 38808, + "overlay": 44827, + "overload": 24327, + "overlook": 35767, + "overlooked": 27632, + "overlooking": 17319, + "overly": 28820, + "overnight": 9913, + "overpass": 44310, + "overrated": 38214, + "overs": 45774, + "overs": 17329, + "overseas": 15100, + "oversight": 32494, + "oversized": 31557, + "overtime": 19347, + "overturned": 31048, + "overview": 14789, + "overwatch": 18124, + "overweight": 43465, + "overwhel": 12204, + "overwhelmed": 23459, + "overwhelming": 20306, + "overwhelmingly": 43549, + "ovi": 32508, + "ovic": 22417, + "ovich": 27623, + "ovie": 47677, + "ovo": 41920, + "ovo": 18065, + "ovski": 26167, + "ow": 2032, + "ow": 2250, + "owa": 32770, + "owe": 19073, + "owed": 37641, + "owen": 24838, + "owen": 12056, + "owens": 20664, + "owes": 35069, + "owing": 48582, + "owl": 34332, + "owl": 9899, + "owls": 18247, + "own": 3845, + "own": 1758, + "owned": 8536, + "owner": 5019, + "owners": 7712, + "ownership": 16583, + "owning": 24661, + "owns": 17533, + "owo": 46142, + "ows": 27423, + "owski": 22573, + "ox": 3282, + "ox": 12071, + "oxfam": 45466, + "oxford": 28588, + "oxford": 8824, + "oxfordshire": 37855, + "oxi": 33731, + "oxi": 48147, + "oxid": 17701, + "oxide": 28235, + "oxo": 37088, + "oxy": 12432, + "oxygen": 16214, + "oy": 6638, + "oy": 12437, + "oya": 38894, + "oye": 48677, + "oyster": 40545, + "oyster": 17253, + "oysters": 22672, + "oz": 10584, + "oz": 6044, + "ozar": 31848, + "ozil": 41365, + "ozone": 37052, + "ozzy": 39549, + "p": 79, + "p": 335, + "pa": 765, + "pa": 2217, + "paa": 32812, + "pab": 9354, + "pablo": 42172, + "pablo": 14473, + "pac": 2332, + "pac": 7608, + "pace": 40600, + "pace": 9450, + "paced": 32611, + "pacers": 23976, + "paces": 43001, + "paci": 5699, + "pacific": 19723, + "pacific": 6654, + "pacing": 45202, + "pack": 2711, + "pack": 3420, + "package": 7053, + "packaged": 29656, + "packages": 14305, + "packaging": 11658, + "packard": 46421, + "packed": 5883, + "packer": 28209, + "packers": 14294, + "packet": 25022, + "packets": 40448, + "packing": 9829, + "packs": 11086, + "paco": 41364, + "pacqui": 28456, + "pacquiao": 30485, + "pact": 27182, + "pad": 3798, + "pad": 7601, + "padded": 42253, + "paddington": 33162, + "paddle": 38276, + "paddle": 20811, + "paddling": 40645, + "paddock": 29590, + "paddy": 33103, + "paddy": 19855, + "padi": 47037, + "padilla": 22380, + "padma": 44595, + "padma": 46457, + "padre": 38343, + "padres": 22829, + "pads": 17353, + "paedi": 41488, + "paella": 46924, + "paf": 47185, + "pafc": 49259, + "pag": 4151, + "pag": 30525, + "pagan": 27854, + "page": 14996, + "page": 2504, + "pageant": 22139, + "pages": 8082, + "pagoda": 44309, + "pah": 41054, + "pah": 26884, + "pai": 20624, + "pai": 21198, + "paid": 5057, + "paige": 33659, + "paige": 16022, + "paign": 31796, + "pain": 2141, + "pain": 4495, + "paine": 38069, + "painful": 16361, + "pains": 25639, + "paint": 7948, + "paint": 5185, + "paintball": 39730, + "painted": 6433, + "painter": 10888, + "painters": 35703, + "painting": 49164, + "painting": 3086, + "paintings": 9956, + "paints": 21672, + "pair": 19848, + "pair": 4038, + "paired": 12433, + "pairing": 16313, + "pairings": 41152, + "pairs": 9950, + "pais": 16878, + "paisley": 22954, + "pajam": 24110, + "pajama": 40244, + "pajamas": 37231, + "pak": 13186, + "pak": 9094, + "paki": 3438, + "pakistan": 10713, + "pakistan": 3994, + "pakistani": 14050, + "pakistanis": 45707, + "pakv": 38196, + "pal": 1850, + "pal": 3611, + "pala": 17895, + "palace": 6381, + "palaces": 45625, + "palad": 28371, + "palae": 43379, + "palais": 35673, + "palate": 34666, + "palawan": 48202, + "palazzo": 36006, + "pale": 4768, + "pale": 12518, + "paleo": 36741, + "paleo": 22198, + "paler": 38028, + "palermo": 40635, + "palestin": 9449, + "palestine": 11682, + "palestinian": 11764, + "palestinians": 21874, + "palette": 13901, + "pali": 48063, + "palin": 40153, + "palis": 44256, + "pality": 27296, + "pall": 35817, + "palla": 21208, + "palladium": 37888, + "pallet": 39057, + "palli": 28954, + "palliative": 46014, + "pally": 46073, + "palm": 19651, + "palm": 8612, + "palma": 29888, + "palmer": 40112, + "palmer": 13633, + "palms": 27059, + "palo": 31562, + "palom": 47698, + "palooza": 25861, + "pals": 11043, + "palsy": 46651, + "pam": 8228, + "pam": 18513, + "pamela": 26991, + "pamp": 37653, + "pamper": 44345, + "pamph": 41332, + "pan": 1072, + "pan": 7437, + "panam": 24606, + "panama": 15522, + "panas": 26207, + "panasonic": 29750, + "pancake": 18723, + "pancakes": 15308, + "panch": 27251, + "pancra": 42472, + "pancre": 27708, + "pancreatic": 49337, + "pancy": 41625, + "pand": 5631, + "panda": 12952, + "pandas": 35119, + "pande": 38419, + "pandey": 34895, + "pandit": 41191, + "pandor": 30250, + "pandora": 17727, + "pandoramusic": 42344, + "pane": 27470, + "panel": 3724, + "paneli": 19410, + "panelist": 39719, + "panelists": 24619, + "panels": 12735, + "panera": 48471, + "pang": 16756, + "pang": 23672, + "panhandle": 40919, + "pani": 36092, + "panic": 46671, + "panic": 14124, + "panini": 30410, + "pann": 42302, + "panna": 49065, + "pano": 36165, + "panor": 12962, + "panorama": 19763, + "panoramic": 22563, + "pans": 35204, + "pant": 22550, + "panther": 22825, + "panther": 13262, + "panthers": 10494, + "panties": 32515, + "panto": 28776, + "pantry": 25608, + "pants": 5003, + "panty": 44217, + "pany": 45567, + "panzer": 41159, + "pao": 33790, + "paola": 44689, + "paolo": 48488, + "paolo": 21133, + "pap": 1884, + "pap": 30756, + "papa": 12211, + "papar": 32782, + "paparazzi": 37842, + "papaya": 44098, + "paper": 8680, + "paper": 2802, + "paperback": 17928, + "papers": 8204, + "paperwork": 35785, + "papi": 35177, + "papp": 26361, + "paprika": 44793, + "papua": 32629, + "par": 699, + "par": 9163, + "para": 18355, + "para": 8976, + "parach": 23147, + "parachute": 30122, + "parad": 37143, + "parade": 5809, + "parades": 46479, + "paradi": 6658, + "paradig": 27786, + "paradigm": 33485, + "paradise": 45869, + "paradise": 7247, + "paradox": 33109, + "parag": 11866, + "paragon": 48099, + "paragra": 24903, + "paragraph": 28499, + "paragu": 38021, + "paraguay": 43579, + "paral": 15143, + "paralle": 13184, + "parallel": 18201, + "paralleled": 42520, + "parallels": 46101, + "paraly": 30255, + "paralym": 18727, + "paralympic": 30806, + "paralympics": 37162, + "paralysis": 45702, + "param": 12250, + "parame": 27106, + "paramedic": 34630, + "paramedics": 35991, + "parameters": 44890, + "paramore": 34401, + "paramount": 26642, + "parano": 30283, + "paranoid": 43029, + "paranor": 16940, + "paranormal": 19047, + "parap": 41091, + "paras": 15198, + "parasite": 42460, + "parasites": 46175, + "parc": 30914, + "parcel": 30367, + "parcels": 45589, + "pard": 18773, + "pardon": 47606, + "pardon": 26565, + "pare": 18202, + "pared": 5498, + "paren": 3106, + "parent": 47848, + "parent": 10183, + "parental": 28339, + "parenthood": 23887, + "parenting": 14529, + "parents": 3731, + "pares": 12420, + "parfait": 46140, + "pari": 17961, + "pari": 27979, + "paris": 13982, + "paris": 3445, + "parisagreement": 47405, + "parish": 47328, + "parish": 13020, + "parisi": 45081, + "parisian": 38512, + "parity": 42734, + "park": 4985, + "park": 1452, + "parked": 16487, + "parker": 31119, + "parker": 8365, + "parkin": 34868, + "parking": 5984, + "parkinson": 28129, + "parkland": 31287, + "parkrun": 25747, + "parks": 6873, + "parkway": 19882, + "parl": 30373, + "parl": 29897, + "parliam": 5941, + "parliament": 41599, + "parliament": 7151, + "parliamentary": 17912, + "parlor": 38253, + "parlour": 37829, + "parma": 36077, + "parme": 26295, + "parmesan": 27274, + "paro": 17429, + "parody": 24318, + "parole": 32158, + "parr": 44113, + "parrish": 43043, + "parrot": 23565, + "parry": 40604, + "parsley": 30077, + "parsons": 22505, + "part": 1872, + "part": 1551, + "parte": 48508, + "parth": 34790, + "parti": 10509, + "partial": 18957, + "partially": 21269, + "partic": 2871, + "partici": 9540, + "particip": 4400, + "participant": 27674, + "participants": 10237, + "participate": 9433, + "participated": 14252, + "participates": 46414, + "participating": 11535, + "participation": 13529, + "particle": 27716, + "particles": 27012, + "particul": 11408, + "particular": 14098, + "particularly": 12170, + "parties": 9032, + "parting": 32844, + "partisan": 20772, + "partist": 44713, + "partition": 42219, + "partly": 21459, + "partner": 5210, + "partner": 4568, + "partnered": 21402, + "partnering": 21182, + "partners": 5568, + "partnership": 6123, + "partnerships": 17418, + "parton": 43245, + "partridge": 34872, + "parts": 5149, + "party": 12877, + "party": 1580, + "partying": 25702, + "pas": 1341, + "pas": 9525, + "pasadena": 25892, + "pascal": 28626, + "pasco": 49220, + "pascu": 42692, + "pash": 23936, + "pasha": 46986, + "paso": 18542, + "pasqu": 44941, + "pass": 5016, + "pass": 3511, + "passage": 16477, + "passages": 48937, + "passed": 4957, + "passenger": 12311, + "passengers": 12781, + "passer": 48544, + "passes": 7633, + "passi": 32471, + "passing": 6589, + "passion": 8822, + "passion": 5332, + "passionate": 10947, + "passionately": 44028, + "passions": 38441, + "passive": 23171, + "passover": 38426, + "passport": 14739, + "passports": 46368, + "password": 20258, + "passwords": 43095, + "past": 7315, + "past": 2729, + "pasta": 10441, + "paste": 34765, + "paste": 17038, + "pastel": 19457, + "pastels": 45699, + "pastor": 19792, + "pastor": 9664, + "pastoral": 37191, + "pastors": 30959, + "pastr": 45478, + "pastries": 39409, + "pastry": 18582, + "pasture": 34764, + "pastures": 47793, + "pat": 1300, + "pat": 7036, + "patag": 29862, + "patagonia": 32786, + "patch": 29284, + "patch": 8721, + "patches": 22104, + "patchwork": 44675, + "patchy": 47488, + "pate": 42122, + "pate": 42098, + "patel": 14168, + "patent": 14692, + "patented": 37277, + "patents": 33911, + "paterson": 36560, + "path": 7408, + "path": 5035, + "pathetic": 18222, + "pathfinder": 35415, + "pathi": 34976, + "pathi": 27347, + "pathic": 49025, + "patho": 18534, + "pathology": 23290, + "paths": 16333, + "pathway": 23488, + "pathways": 24690, + "pathy": 13330, + "pati": 2799, + "pati": 26708, + "patience": 13575, + "patient": 30139, + "patient": 6262, + "patiently": 22980, + "patients": 5543, + "patil": 49187, + "patio": 14304, + "pational": 30627, + "patna": 45025, + "patory": 41859, + "patreon": 17165, + "patri": 4771, + "patriarch": 49054, + "patriarchy": 48806, + "patric": 12569, + "patrice": 40731, + "patricia": 18143, + "patrick": 12078, + "patrick": 5286, + "patricks": 46783, + "patriot": 28896, + "patriot": 15692, + "patrioti": 35520, + "patriotic": 20217, + "patriotism": 35807, + "patriots": 8707, + "patro": 31650, + "patrol": 10073, + "patrolling": 39344, + "patrols": 35978, + "patron": 26658, + "patron": 17683, + "patrons": 28308, + "pats": 24874, + "patsy": 46093, + "patt": 12637, + "patter": 4982, + "pattern": 7447, + "patterned": 47212, + "patterns": 11637, + "patterson": 21384, + "patti": 44927, + "patti": 26123, + "pattinson": 32474, + "patton": 29026, + "patty": 48741, + "patty": 18321, + "pau": 1834, + "pau": 35970, + "paul": 6035, + "paul": 2597, + "paula": 37363, + "paula": 16777, + "pauline": 30438, + "paulo": 48002, + "paulo": 21628, + "pauls": 41413, + "pauls": 40010, + "paulson": 48201, + "pause": 19439, + "paused": 46782, + "pav": 6661, + "pave": 37107, + "paved": 27898, + "pavel": 43152, + "pavement": 27669, + "pavilion": 13374, + "paving": 28651, + "paw": 14009, + "paw": 16016, + "pawan": 29754, + "pawankalyan": 33702, + "pawn": 43195, + "paws": 16714, + "pax": 20007, + "pax": 19033, + "paxton": 38347, + "pay": 2642, + "pay": 3345, + "payback": 36413, + "paycheck": 45078, + "payday": 26957, + "payee": 46985, + "payer": 41503, + "paying": 8341, + "payment": 10596, + "payments": 11832, + "payne": 12775, + "paypal": 21442, + "payroll": 31610, + "pays": 10845, + "paysoff": 48174, + "paytm": 45352, + "payton": 27348, + "paz": 22267, + "pb": 20112, + "pb": 10981, + "pba": 28205, + "pbb": 48567, + "pbb": 40589, + "pbc": 49191, + "pbl": 35166, + "pbr": 32998, + "pbs": 17908, + "pc": 6782, + "pc": 3808, + "pca": 35705, + "pcb": 26235, + "pcc": 36059, + "pci": 38957, + "pcm": 47436, + "pcr": 35704, + "pcs": 11917, + "pcso": 31963, + "pct": 22168, + "pd": 4387, + "pd": 4675, + "pdates": 16842, + "pdc": 40498, + "pdf": 15181, + "pdp": 24601, + "pdt": 21743, + "pdx": 25470, + "pdx": 16153, + "pe": 661, + "pe": 956, + "pea": 13915, + "peabo": 34083, + "peabody": 41244, + "peac": 34615, + "peace": 6249, + "peace": 3021, + "peaceful": 9461, + "peacefully": 30530, + "peacekeeping": 43630, + "peach": 10522, + "peach": 11538, + "peaches": 27216, + "peak": 18572, + "peak": 6026, + "peakdistrict": 41289, + "peake": 24810, + "peaked": 36391, + "peaks": 14067, + "pean": 11563, + "peanu": 25843, + "peanut": 12491, + "peanuts": 26503, + "pear": 4910, + "pear": 18820, + "pearce": 25996, + "pearl": 21806, + "pearl": 8560, + "pearljam": 46739, + "pearls": 19581, + "pears": 39565, + "pearson": 20461, + "peas": 15937, + "peasant": 40621, + "peasants": 48788, + "peat": 26914, + "pebble": 28056, + "pebbles": 40155, + "pec": 32447, + "pec": 17611, + "pecan": 32177, + "peck": 25186, + "peck": 29234, + "pecker": 30169, + "peckham": 45863, + "pecu": 34200, + "peculiar": 42808, + "ped": 13197, + "ped": 2966, + "pedago": 34590, + "pedagogy": 48072, + "pedal": 32943, + "pedal": 19621, + "pedals": 38535, + "pede": 12862, + "pede": 19560, + "pedestri": 30027, + "pedestrian": 18256, + "pedestrians": 33895, + "pedi": 12967, + "pedia": 11733, + "pediatric": 48431, + "pediatric": 22071, + "pedic": 35319, + "pedic": 44528, + "pedro": 29963, + "pedro": 15114, + "peds": 45377, + "pee": 12988, + "pee": 11196, + "peed": 47369, + "peek": 46323, + "peek": 7569, + "peeking": 48771, + "peel": 34386, + "peel": 17158, + "peeled": 33533, + "peeling": 48649, + "peep": 25425, + "peep": 16857, + "peeps": 11681, + "peer": 32416, + "peer": 14432, + "peers": 21626, + "pees": 31830, + "peg": 32182, + "peg": 11207, + "pegas": 30018, + "pegasus": 37822, + "peggy": 24271, + "pei": 48166, + "pei": 12917, + "pel": 4286, + "pel": 7006, + "pele": 44105, + "pelican": 34131, + "pelicans": 29363, + "pell": 46981, + "pelle": 31267, + "pelled": 32506, + "pellegr": 38529, + "pellets": 48240, + "pelo": 40192, + "pelo": 40238, + "pelosi": 22169, + "pelvic": 45646, + "pemb": 19880, + "pembro": 24084, + "pembroke": 36702, + "pembroke": 40044, + "pembrokeshire": 40695, + "pen": 1501, + "pen": 5356, + "pena": 35788, + "penalties": 25417, + "penalty": 11491, + "penang": 29545, + "penc": 20065, + "pence": 18002, + "pencil": 41303, + "pencil": 11200, + "pencils": 21909, + "pend": 3052, + "pendant": 12415, + "pendants": 44117, + "pending": 12770, + "pendleton": 44272, + "pendu": 45336, + "penelope": 36703, + "penetr": 26058, + "peng": 42955, + "peng": 39200, + "pengu": 8854, + "penguin": 28249, + "penguin": 14952, + "penguins": 16557, + "peninsu": 13464, + "peninsula": 14070, + "penn": 7760, + "penn": 11128, + "pennant": 43971, + "penned": 45077, + "penney": 47856, + "pennies": 43094, + "pennsylvania": 13673, + "penny": 20400, + "penny": 11388, + "pens": 13307, + "pens": 13310, + "pensac": 30925, + "pensacola": 33573, + "pension": 32840, + "pension": 17764, + "pensions": 29773, + "penske": 47154, + "pent": 10699, + "pent": 22725, + "pentagon": 23133, + "pente": 33165, + "penthouse": 32673, + "penultimate": 36553, + "peop": 1030, + "people": 10573, + "people": 1047, + "peoples": 28241, + "peoples": 14627, + "peopleschoice": 32418, + "peoplesvote": 45830, + "peoria": 36985, + "pep": 12761, + "pep": 14898, + "pepe": 24778, + "pepp": 34425, + "pepper": 14861, + "pepper": 8253, + "peppermint": 30321, + "pepperoni": 47307, + "peppers": 14650, + "pepsi": 21307, + "per": 703, + "per": 1284, + "pera": 26294, + "perce": 24135, + "perceived": 38436, + "percent": 16328, + "percent": 9017, + "percentage": 19477, + "percep": 28017, + "perception": 20591, + "perceptions": 38138, + "perch": 34281, + "perched": 40071, + "percu": 41722, + "percussion": 23980, + "percy": 23940, + "pere": 8665, + "pere": 36300, + "pered": 24509, + "peregr": 37479, + "peregrine": 44546, + "pereira": 43927, + "peren": 24564, + "perenni": 26996, + "perennial": 34038, + "perez": 15107, + "perf": 22816, + "perfe": 1624, + "perfec": 6599, + "perfect": 17261, + "perfect": 1878, + "perfection": 9646, + "perfectly": 8037, + "perfecto": 42898, + "perfor": 2311, + "perform": 3866, + "perform": 5940, + "performan": 8973, + "performance": 2714, + "performances": 9553, + "performed": 9997, + "performer": 17061, + "performers": 18476, + "performing": 5170, + "performs": 13839, + "perfu": 14214, + "perfume": 17525, + "perhaps": 9297, + "peri": 12618, + "peri": 44068, + "perience": 19302, + "peril": 40119, + "peril": 48301, + "perimeter": 38499, + "pering": 29746, + "perio": 5101, + "period": 6131, + "periodic": 36476, + "periods": 24401, + "periph": 35308, + "peripheral": 43901, + "peris": 19461, + "periscope": 21668, + "perk": 33424, + "perkins": 20057, + "perks": 17660, + "perl": 44018, + "perm": 47847, + "perman": 9018, + "permanent": 11144, + "permanently": 25584, + "perme": 42456, + "permission": 15822, + "permit": 21950, + "permits": 33267, + "permitted": 44380, + "pero": 23551, + "perpe": 15749, + "perpetr": 33376, + "perpetu": 30132, + "perpetual": 32018, + "perrie": 32691, + "perry": 28478, + "perry": 7899, + "pers": 3688, + "pers": 10710, + "perse": 27498, + "persecu": 22878, + "persecution": 32009, + "perseverance": 29820, + "persi": 11509, + "persian": 19859, + "persist": 19412, + "persist": 40938, + "persistence": 34588, + "persistent": 29028, + "person": 3510, + "person": 2533, + "persona": 18401, + "personal": 10114, + "personal": 4121, + "personalised": 24186, + "personalities": 27888, + "personality": 10386, + "personalized": 17845, + "personally": 13885, + "personnel": 14546, + "persons": 14592, + "perspec": 17997, + "perspective": 8996, + "perspectives": 18777, + "persu": 20972, + "pert": 36970, + "pert": 16306, + "perth": 19067, + "perth": 11011, + "peru": 20612, + "peru": 12964, + "peruvian": 30822, + "pes": 38368, + "pes": 2598, + "pesa": 47409, + "pesc": 44044, + "pesh": 33184, + "peshaw": 28524, + "peshawar": 29230, + "pesky": 42512, + "pesos": 47872, + "pessi": 43902, + "pest": 20130, + "pest": 9425, + "pesticide": 48481, + "pesticides": 37868, + "pesto": 26186, + "pests": 41919, + "pet": 2167, + "pet": 3703, + "peta": 28785, + "petal": 38430, + "petal": 40469, + "petals": 26064, + "petday": 45314, + "pete": 14479, + "pete": 8571, + "peter": 5093, + "peter": 3696, + "peterborough": 26012, + "peters": 16336, + "petersburg": 21052, + "petersen": 39794, + "peterson": 16877, + "peth": 48920, + "petit": 36437, + "petit": 21276, + "petite": 27213, + "petition": 10975, + "petitions": 43536, + "petr": 29808, + "petra": 31300, + "petre": 47179, + "petri": 31831, + "petro": 8716, + "petrol": 18149, + "petroleum": 22063, + "petron": 42875, + "pets": 7663, + "pett": 27051, + "petti": 48001, + "petting": 44334, + "petty": 17324, + "peu": 21411, + "peuge": 22893, + "peugeot": 24129, + "pew": 21608, + "pew": 30783, + "pewdie": 41882, + "pewdiepie": 42563, + "pex": 43765, + "pey": 14966, + "pey": 30933, + "peyton": 49254, + "peyton": 20307, + "pez": 45798, + "pez": 10482, + "pf": 16680, + "pf": 12572, + "pfa": 47839, + "pfc": 35007, + "pff": 44121, + "pfi": 29810, + "pfw": 31229, + "pg": 12476, + "pg": 5211, + "pga": 13351, + "pgat": 36514, + "pgatour": 40094, + "pgh": 44862, + "pgh": 30031, + "pgs": 49204, + "ph": 745, + "ph": 2042, + "pha": 4443, + "pha": 26255, + "phal": 19962, + "phan": 8731, + "phan": 40126, + "phant": 36998, + "phantom": 37688, + "phantom": 14490, + "phar": 5570, + "phara": 35792, + "pharaoh": 40437, + "pharm": 45761, + "pharma": 17831, + "pharmac": 8193, + "pharmaceu": 19490, + "pharmaceutical": 25217, + "pharmaceuticals": 44623, + "pharmacist": 41024, + "pharmacists": 44337, + "pharmacy": 15293, + "pharo": 42308, + "pharoah": 49287, + "pharrell": 31316, + "phase": 8304, + "phases": 35337, + "phat": 42492, + "phc": 41102, + "phd": 20875, + "phd": 8472, + "phdchat": 39564, + "phdlife": 39638, + "phe": 4787, + "phe": 19853, + "pheasant": 41983, + "phee": 41292, + "phel": 23711, + "phelps": 27128, + "phen": 7718, + "pheno": 47336, + "phenom": 31673, + "phenom": 39618, + "phenomen": 11304, + "phenomena": 41538, + "phenomenal": 15035, + "phenomenon": 24464, + "pher": 9194, + "pher": 19828, + "phers": 29531, + "pherson": 36421, + "phew": 10295, + "phi": 2239, + "phi": 12220, + "phia": 9228, + "phic": 3977, + "phie": 30237, + "phies": 17062, + "phil": 2821, + "phil": 6199, + "phila": 47443, + "philadel": 9428, + "philadelphia": 9749, + "philanthro": 16587, + "philanthropist": 44153, + "philanthropy": 25047, + "philately": 33695, + "phile": 36543, + "philharmon": 25228, + "philharmonic": 31699, + "phili": 4277, + "philia": 46654, + "philip": 20748, + "philip": 11074, + "philipp": 5623, + "philipp": 47591, + "philippe": 20942, + "philippine": 17629, + "philippines": 8149, + "philips": 25175, + "phill": 42346, + "phill": 48272, + "philli": 6456, + "phillies": 18748, + "phillip": 48832, + "phillip": 19323, + "phillips": 11041, + "philly": 19545, + "philly": 7785, + "philos": 8395, + "philosop": 20349, + "philosoph": 10187, + "philosopher": 25220, + "philosophical": 32628, + "philosophy": 12213, + "phils": 38573, + "phin": 33816, + "phine": 40985, + "phins": 40210, + "phish": 36897, + "phishing": 36546, + "phl": 25603, + "pho": 816, + "pho": 22707, + "phobia": 28749, + "phoe": 22673, + "phoebe": 27582, + "phoeni": 6778, + "phoenix": 20615, + "phoenix": 7793, + "phol": 48140, + "phon": 19602, + "phon": 31115, + "phone": 15486, + "phone": 1951, + "phones": 6351, + "phony": 31925, + "phora": 31363, + "phosp": 22638, + "photo": 1153, + "photo": 1125, + "photobomb": 37075, + "photobook": 41894, + "photog": 28115, + "photogenic": 36108, + "photogra": 36754, + "photograph": 1688, + "photograph": 8853, + "photographed": 11573, + "photographer": 5748, + "photographers": 17141, + "photographic": 22053, + "photographing": 30074, + "photographs": 15759, + "photography": 33183, + "photography": 2108, + "photom": 32223, + "photoo": 11106, + "photooftheday": 11933, + "photos": 2479, + "photoshoot": 11121, + "photoshop": 12419, + "photoshopped": 35738, + "phouse": 27848, + "php": 17370, + "phra": 12777, + "phrase": 18809, + "phrases": 35264, + "phs": 16495, + "phu": 21274, + "phuket": 34028, + "phx": 35466, + "phx": 29507, + "phy": 6484, + "phy": 4292, + "phyl": 35600, + "phyllis": 37844, + "phys": 3734, + "phys": 37894, + "physi": 13782, + "physic": 46641, + "physical": 44127, + "physical": 6671, + "physically": 18105, + "physician": 21055, + "physicians": 26702, + "physicist": 29052, + "physics": 9369, + "physio": 29574, + "physio": 29177, + "physiology": 32349, + "physique": 42884, + "phyto": 42197, + "pi": 741, + "pi": 5357, + "pia": 8918, + "pian": 24637, + "pianist": 21048, + "piano": 49278, + "piano": 7894, + "pianos": 47904, + "piazza": 28496, + "pic": 901, + "pic": 1282, + "pical": 5482, + "picard": 48507, + "picasso": 21481, + "piccad": 33876, + "piccadilly": 37287, + "piccollage": 43621, + "pick": 6379, + "pick": 3142, + "picked": 6018, + "picker": 43105, + "pickering": 47605, + "picket": 33559, + "picking": 9545, + "pickle": 24570, + "pickled": 21705, + "pickles": 25001, + "picks": 8551, + "pickup": 15382, + "pickups": 33383, + "picnic": 12007, + "pico": 23363, + "picoftheday": 18319, + "pics": 2559, + "pict": 18778, + "pictorial": 40640, + "picture": 11663, + "picture": 1674, + "pictured": 7647, + "pictures": 3646, + "picturesque": 24894, + "pid": 5225, + "piday": 48056, + "pie": 12065, + "pie": 5319, + "piece": 39632, + "piece": 2754, + "pieces": 6194, + "pied": 24686, + "pied": 12713, + "piedmont": 39691, + "pier": 5641, + "pier": 11348, + "pierc": 49216, + "pierce": 48462, + "pierce": 16782, + "pierced": 32799, + "piercing": 22557, + "piero": 43125, + "pierre": 34670, + "pierre": 11985, + "piers": 29030, + "pies": 6898, + "pieter": 44801, + "pietro": 42169, + "piff": 40719, + "pig": 12009, + "pig": 9619, + "pigeon": 18008, + "pigeons": 32910, + "piggy": 28245, + "pigment": 40284, + "pigs": 16228, + "pik": 48539, + "pika": 47372, + "pikach": 27268, + "pikachu": 28107, + "pike": 33457, + "pike": 14011, + "pil": 2893, + "pil": 20645, + "pilates": 29518, + "pile": 44403, + "pile": 13930, + "piled": 26873, + "piles": 31968, + "pilgri": 13966, + "pilgrim": 32662, + "pilgrimage": 24335, + "pilgrims": 31370, + "piling": 43050, + "pilip": 27234, + "pilipinas": 32392, + "pill": 14830, + "pill": 19226, + "pillar": 17322, + "pillars": 22054, + "pillow": 42237, + "pillow": 12182, + "pillows": 26499, + "pills": 23964, + "pilo": 37526, + "pilot": 31619, + "pilot": 6687, + "pilots": 15586, + "pilsner": 47153, + "pim": 15285, + "pim": 35472, + "pimp": 35789, + "pin": 2629, + "pin": 5164, + "pinball": 31679, + "pinch": 26114, + "pine": 9398, + "pine": 7374, + "pineapple": 14831, + "pines": 20338, + "ping": 23720, + "ping": 2089, + "pinion": 40557, + "pink": 11151, + "pink": 3360, + "pinkfloyd": 48520, + "pinky": 29803, + "pinn": 31448, + "pinnacle": 32754, + "pinned": 12165, + "pinning": 44515, + "pino": 36633, + "pinot": 41399, + "pinot": 21146, + "pinoy": 43578, + "pinoy": 35258, + "pins": 14619, + "pinst": 41173, + "pint": 42537, + "pint": 13584, + "pinterest": 15379, + "pinto": 35992, + "pints": 27935, + "pinup": 37349, + "pio": 22108, + "pion": 36728, + "pion": 29190, + "pione": 7975, + "pioneer": 34892, + "pioneer": 12459, + "pioneering": 25933, + "pioneers": 22383, + "pious": 42441, + "pip": 30854, + "pipe": 29333, + "pipe": 10459, + "pipel": 12387, + "pipeline": 14151, + "pipelines": 39683, + "piper": 47052, + "piper": 16293, + "pipes": 16991, + "piping": 40744, + "pippa": 47672, + "pir": 4351, + "pir": 38899, + "piracy": 39452, + "piran": 49034, + "pirate": 38680, + "pirate": 13592, + "pirates": 10442, + "pire": 16613, + "pires": 14988, + "pis": 9230, + "pis": 44441, + "pisa": 43632, + "pisces": 45982, + "piss": 20818, + "pissed": 17989, + "pist": 15556, + "pist": 32826, + "pistachi": 29760, + "pistachio": 36320, + "pistol": 20480, + "piston": 48236, + "pistons": 27242, + "pistor": 48162, + "pit": 2946, + "pit": 7476, + "pita": 27070, + "pitbull": 25295, + "pitch": 8992, + "pitch": 5872, + "pitched": 28447, + "pitcher": 13445, + "pitchers": 27835, + "pitches": 21005, + "pitching": 16455, + "piti": 47568, + "pits": 24144, + "pitt": 7607, + "pitt": 15599, + "pitts": 9531, + "pittsburgh": 10453, + "pity": 24380, + "pius": 39988, + "pivo": 18009, + "pivot": 31805, + "pivotal": 31432, + "pix": 6185, + "pix": 13088, + "pixar": 27493, + "pixel": 14384, + "pixel": 13241, + "pixelart": 18516, + "pixels": 34099, + "pixie": 35573, + "piyu": 30772, + "piyush": 36191, + "piyushgoyal": 45318, + "pizz": 3897, + "pizza": 4474, + "pizzas": 30647, + "pizzeria": 44174, + "pj": 12524, + "pj": 17179, + "pjnet": 22011, + "pjs": 36009, + "pk": 10149, + "pk": 10991, + "pkg": 49011, + "pkk": 47480, + "pknot": 41779, + "pkwy": 36827, + "pl": 712, + "pl": 5678, + "pla": 841, + "pla": 19945, + "plac": 2331, + "place": 14884, + "place": 1445, + "placed": 9729, + "placement": 16724, + "placements": 43885, + "placer": 49170, + "places": 4448, + "placing": 18531, + "plague": 25360, + "plaid": 23291, + "plain": 22776, + "plain": 10709, + "plains": 16345, + "plan": 1740, + "plan": 2970, + "pland": 24801, + "plane": 22728, + "plane": 5363, + "planes": 12581, + "planet": 16833, + "planet": 5172, + "planetary": 28361, + "planets": 22315, + "plank": 30991, + "plankton": 48249, + "plann": 6409, + "planned": 8169, + "planner": 18083, + "planners": 33664, + "planning": 4446, + "plano": 34063, + "plans": 4181, + "plant": 8521, + "plant": 3912, + "plantation": 20014, + "plantbased": 33720, + "planted": 14286, + "planter": 34453, + "planters": 43661, + "planting": 13922, + "plants": 5829, + "plaque": 16097, + "plaques": 45610, + "plar": 26754, + "plas": 45673, + "plasma": 24999, + "plaster": 31980, + "plastic": 15645, + "plastic": 6102, + "plasticpollution": 47129, + "plastics": 20999, + "plasticsurgery": 48555, + "plat": 3172, + "plata": 46456, + "plate": 28744, + "plate": 5135, + "plateau": 29301, + "plated": 21161, + "plates": 11485, + "platform": 5549, + "platforms": 13551, + "platin": 10267, + "plating": 44564, + "platinum": 10979, + "plato": 41101, + "platoon": 41254, + "platt": 44459, + "platt": 40097, + "platte": 46785, + "platter": 29071, + "platz": 40878, + "plau": 39139, + "play": 1222, + "play": 1453, + "playa": 23756, + "playable": 33885, + "playback": 39194, + "playbook": 34856, + "playboy": 24383, + "played": 3432, + "player": 24503, + "player": 2477, + "players": 3030, + "playful": 23871, + "playground": 15861, + "playhouse": 23254, + "playin": 24674, + "playing": 47368, + "playing": 1629, + "playlist": 9180, + "playlists": 47183, + "playo": 5804, + "playoff": 9655, + "playoffs": 9548, + "plays": 5134, + "playstation": 11332, + "playtime": 43037, + "playwright": 32070, + "plaza": 8943, + "plc": 16827, + "ple": 926, + "ple": 1619, + "plea": 21956, + "plead": 47539, + "pleads": 31425, + "plear": 21362, + "pleas": 8481, + "pleas": 48740, + "pleasant": 12271, + "please": 41074, + "please": 1474, + "pleased": 6107, + "pleasing": 32893, + "pleasure": 5854, + "pleasures": 29513, + "pledge": 11507, + "pledged": 36799, + "pledges": 26746, + "pledis": 41202, + "plein": 43429, + "plenary": 19891, + "plenty": 7524, + "pler": 17677, + "ples": 6248, + "pless": 39821, + "pless": 17059, + "plets": 43230, + "plex": 23765, + "plex": 15241, + "pley": 19543, + "pli": 30001, + "pli": 45797, + "plic": 5806, + "plicity": 19823, + "plight": 40317, + "plin": 44531, + "plin": 32335, + "pline": 25376, + "pling": 12899, + "plings": 31184, + "pll": 47629, + "pll": 25266, + "pln": 48755, + "plo": 1778, + "plo": 43523, + "plor": 34695, + "plot": 9918, + "plots": 25672, + "plotting": 30751, + "plough": 33811, + "plow": 38363, + "pls": 5572, + "plu": 2052, + "plug": 12628, + "plugged": 23261, + "plugin": 31278, + "plugins": 48797, + "plugs": 28083, + "plum": 26267, + "plum": 16202, + "plumb": 21769, + "plumber": 43478, + "plumbing": 24647, + "plume": 39495, + "plun": 15122, + "plunge": 26506, + "plur": 44664, + "plus": 3097, + "plush": 18926, + "pluto": 26380, + "ply": 17249, + "ply": 28705, + "plying": 36071, + "plym": 11907, + "plymouth": 13786, + "plz": 10538, + "pm": 13699, + "pm": 990, + "pmi": 41206, + "pmln": 23208, + "pmo": 18782, + "pmoindia": 20374, + "pms": 44223, + "pn": 14431, + "pn": 13774, + "pnc": 37148, + "pne": 30966, + "pneu": 28714, + "pneumonia": 42906, + "png": 20992, + "pnp": 25972, + "pnpp": 42175, + "pnw": 31521, + "po": 628, + "po": 3057, + "poa": 43912, + "poached": 27665, + "poaching": 35140, + "poc": 13232, + "poc": 27780, + "pocaly": 37987, + "pocalypse": 42307, + "poche": 38336, + "poche": 39022, + "pocket": 29147, + "pocket": 8504, + "pockets": 19566, + "pocon": 41850, + "pod": 3583, + "pod": 7446, + "podcast": 39654, + "podcast": 4294, + "podcasting": 40106, + "podcasts": 19392, + "pode": 33368, + "poder": 24960, + "podernfamily": 26620, + "podi": 32853, + "podium": 14093, + "pods": 18776, + "poe": 4746, + "poe": 19254, + "poem": 9436, + "poems": 15577, + "poet": 41019, + "poet": 9872, + "poetic": 26365, + "poetry": 20192, + "poetry": 6038, + "poetryday": 39255, + "poets": 19804, + "pof": 40850, + "poff": 28236, + "pogba": 25998, + "poign": 29682, + "poignant": 32138, + "poin": 9074, + "point": 13280, + "point": 2301, + "pointe": 24631, + "pointed": 20703, + "pointer": 29883, + "pointers": 36760, + "pointing": 19233, + "pointless": 33586, + "points": 3396, + "pois": 17008, + "poise": 45087, + "poised": 27354, + "poison": 30722, + "poison": 17074, + "poisoned": 43624, + "poisoning": 25750, + "poisonous": 37131, + "pok": 15387, + "poke": 6892, + "poke": 23186, + "pokemon": 16239, + "pokemon": 9528, + "pokemongo": 23985, + "poker": 30735, + "poker": 11865, + "pokes": 40221, + "poking": 49169, + "poké": 20656, + "pokémon": 22066, + "pol": 977, + "pol": 7649, + "pola": 43876, + "poland": 9834, + "polar": 21432, + "polar": 12214, + "polari": 27919, + "polaris": 37965, + "polarized": 48437, + "polaro": 25237, + "polaroid": 30427, + "poldark": 41322, + "pole": 26682, + "pole": 8170, + "poles": 22585, + "poli": 9675, + "poli": 5414, + "polic": 16126, + "police": 15535, + "police": 2120, + "policeman": 37713, + "policemen": 47946, + "polici": 10819, + "policies": 10993, + "policing": 20969, + "policy": 30173, + "policy": 4660, + "polio": 30533, + "polis": 16133, + "polish": 46941, + "polish": 9632, + "polished": 21478, + "polishing": 43629, + "polit": 2247, + "politan": 15337, + "polite": 31497, + "politi": 40597, + "politic": 33333, + "political": 37744, + "political": 4197, + "politically": 24323, + "politician": 15960, + "politicians": 12914, + "politico": 39403, + "politics": 4929, + "polk": 33317, + "polka": 29476, + "poll": 7032, + "pollen": 27651, + "pollin": 19152, + "pollinators": 36599, + "polling": 18024, + "pollo": 42755, + "pollock": 37614, + "polls": 11813, + "pollu": 8370, + "polluted": 43346, + "pollution": 10384, + "polly": 31204, + "polo": 35928, + "polo": 10229, + "poly": 6833, + "poly": 18367, + "polye": 31730, + "polyester": 38514, + "polym": 23626, + "polymer": 29993, + "polyne": 38892, + "polyvore": 24771, + "pom": 7548, + "pom": 24280, + "pome": 27963, + "pomegran": 29326, + "pomegranate": 32415, + "pomer": 35156, + "pomona": 41690, + "pompe": 18352, + "pompeii": 47775, + "pompeo": 34351, + "pompey": 35079, + "pon": 3809, + "pon": 22391, + "ponce": 43637, + "pond": 10750, + "ponder": 36863, + "pondering": 47395, + "ponds": 31033, + "pone": 32183, + "pong": 40546, + "pong": 17710, + "ponies": 34157, + "pons": 41255, + "pont": 47563, + "pont": 22997, + "ponte": 40892, + "ponti": 15527, + "pontiac": 25373, + "pontifex": 33566, + "ponty": 45152, + "pony": 24438, + "pony": 12678, + "ponytail": 43265, + "poo": 6601, + "poo": 14389, + "pooch": 37037, + "poodle": 34961, + "pooh": 27103, + "pooja": 35676, + "pool": 12484, + "pool": 2831, + "poole": 26290, + "pools": 18736, + "poolside": 35509, + "poon": 33799, + "poon": 36178, + "poop": 23310, + "poor": 14528, + "poor": 3665, + "poorest": 40771, + "poorly": 21101, + "pop": 6530, + "pop": 2852, + "popart": 47425, + "popcorn": 15034, + "pope": 16994, + "pope": 9283, + "popefrancis": 37254, + "poplar": 38726, + "popo": 38835, + "popo": 35572, + "popp": 13156, + "popped": 14934, + "poppies": 30385, + "poppin": 28536, + "popping": 18152, + "poppins": 41216, + "poppy": 32194, + "poppy": 15447, + "pops": 11705, + "popsic": 38481, + "popu": 3785, + "popul": 6593, + "popular": 15854, + "popular": 4368, + "popularity": 19235, + "populated": 38420, + "population": 8423, + "populations": 23797, + "populism": 48998, + "populist": 49376, + "popup": 33053, + "por": 817, + "por": 7697, + "pora": 23537, + "porcel": 19409, + "porcelain": 20451, + "porch": 17154, + "pore": 28267, + "pork": 40379, + "pork": 7897, + "poro": 48110, + "porridge": 34924, + "porsch": 48009, + "porsche": 44049, + "porsche": 8783, + "port": 1641, + "port": 1418, + "porta": 45037, + "portable": 11949, + "portage": 32087, + "portal": 14982, + "porte": 28654, + "ported": 16879, + "porter": 28319, + "porter": 10318, + "porters": 15670, + "portfoli": 45766, + "portfolio": 11938, + "porth": 37425, + "porti": 45760, + "porting": 26052, + "portion": 13739, + "portions": 22914, + "portland": 38366, + "portland": 8880, + "portman": 34755, + "porto": 24853, + "porto": 18947, + "portobello": 48025, + "portra": 4175, + "portrait": 39312, + "portrait": 5352, + "portraits": 14203, + "portray": 46282, + "portrayal": 39238, + "portrayed": 36093, + "ports": 7734, + "portsm": 17063, + "portsmouth": 19074, + "portu": 7159, + "portugal": 9503, + "portugue": 17498, + "portuguese": 18019, + "pos": 1780, + "pos": 11839, + "pose": 25478, + "pose": 4230, + "posed": 5206, + "posei": 47270, + "poser": 46899, + "poses": 9773, + "posey": 34852, + "posh": 26748, + "posing": 10518, + "posit": 28793, + "positi": 7895, + "position": 4657, + "positioned": 34482, + "positioning": 30657, + "positions": 12188, + "positive": 21811, + "positive": 4844, + "positively": 24688, + "positivity": 19966, + "poss": 39745, + "posse": 17414, + "posse": 28413, + "possess": 36810, + "possessed": 36220, + "possession": 16154, + "possessions": 40588, + "possi": 2521, + "possibilities": 17932, + "possibility": 18517, + "possible": 3134, + "possibly": 8601, + "possum": 38575, + "post": 3489, + "post": 1549, + "postage": 27570, + "postal": 21687, + "postcard": 14785, + "postcards": 23922, + "postdoc": 41013, + "posted": 4752, + "poster": 22881, + "poster": 3574, + "posters": 9673, + "postgame": 34873, + "postgraduate": 31997, + "posthum": 42410, + "posting": 7559, + "postman": 38285, + "postpon": 23247, + "postponed": 25097, + "posts": 7824, + "postseason": 24521, + "posture": 29681, + "posure": 35539, + "pot": 3547, + "pot": 5168, + "potam": 45825, + "potassi": 36889, + "potassium": 37147, + "potat": 5975, + "potato": 8527, + "potatoes": 11567, + "potd": 28765, + "pote": 41869, + "poten": 4454, + "potent": 26082, + "potenti": 44104, + "potential": 5100, + "potentially": 16508, + "potholes": 47506, + "potion": 46055, + "potom": 38848, + "potomac": 43372, + "pots": 19234, + "pott": 28698, + "potted": 48581, + "potter": 24975, + "potter": 9026, + "pottery": 18396, + "potts": 39839, + "potty": 43569, + "potus": 8740, + "pou": 9423, + "pouch": 26811, + "poul": 22485, + "poultry": 31005, + "poun": 33719, + "pound": 33809, + "pound": 10674, + "pounding": 46544, + "pounds": 10752, + "pour": 33112, + "pour": 8180, + "poured": 26621, + "pouring": 16098, + "pours": 26005, + "pout": 39621, + "poutine": 43768, + "pov": 25731, + "pover": 8432, + "pover": 29464, + "poverty": 9095, + "pow": 1317, + "pow": 17745, + "powder": 32427, + "powder": 9674, + "powe": 36955, + "powell": 13305, + "power": 2789, + "power": 1807, + "powerball": 47803, + "powered": 45442, + "powered": 7332, + "powerful": 4875, + "powerhouse": 22858, + "powering": 16231, + "powerof": 31961, + "powerpoint": 38940, + "powerrangers": 40620, + "powers": 9422, + "pox": 43649, + "poy": 34737, + "poyn": 47655, + "poz": 39953, + "pp": 604, + "pp": 4186, + "ppa": 10416, + "ppard": 23391, + "ppc": 27778, + "ppe": 24573, + "ppe": 11867, + "pped": 1873, + "ppel": 46523, + "ppen": 30663, + "pper": 6719, + "pper": 2440, + "ppers": 5232, + "ppery": 27833, + "ppet": 20744, + "ppets": 25849, + "ppg": 27433, + "ppi": 9594, + "ppie": 33795, + "ppin": 8076, + "pping": 22214, + "pping": 1682, + "ppings": 35687, + "ppl": 6758, + "pple": 12302, + "ppm": 42053, + "ppo": 10215, + "ppor": 37613, + "ppp": 14017, + "pps": 10683, + "ppv": 38864, + "ppy": 30360, + "ppy": 3860, + "pr": 766, + "pr": 4150, + "pra": 1865, + "pra": 19285, + "prab": 17901, + "prabhas": 29959, + "prabhu": 31529, + "prac": 2243, + "practi": 29995, + "practic": 5495, + "practical": 10792, + "practically": 25588, + "practice": 3349, + "practiced": 36749, + "practices": 9040, + "practicing": 12750, + "practise": 38938, + "practising": 36478, + "practiti": 19909, + "practitioner": 32591, + "practitioners": 29045, + "prada": 29456, + "pradesh": 15384, + "prado": 44141, + "prag": 31025, + "prague": 14940, + "prairi": 12629, + "prairie": 14753, + "praise": 10013, + "praised": 27649, + "praises": 23049, + "praising": 36961, + "prakash": 43708, + "prakash": 25366, + "pram": 47774, + "pran": 20048, + "prank": 23654, + "pras": 41562, + "prasad": 29562, + "prat": 23069, + "prati": 45773, + "pratt": 37863, + "pratt": 23396, + "prawn": 33102, + "prawns": 34903, + "pray": 12671, + "pray": 6041, + "prayed": 34665, + "prayer": 41452, + "prayer": 6583, + "prayers": 8393, + "prayfor": 18443, + "praying": 11550, + "prays": 46602, + "prc": 28781, + "pre": 679, + "pre": 2900, + "preach": 22545, + "preacher": 29357, + "preaching": 23642, + "precau": 36532, + "precautions": 47845, + "prece": 15361, + "preci": 5470, + "precin": 27908, + "precinct": 32587, + "precious": 8226, + "precipit": 27463, + "precipitation": 33399, + "precise": 24457, + "precisely": 34954, + "precision": 44021, + "precision": 15621, + "pred": 40370, + "predat": 13364, + "predator": 20653, + "predators": 25569, + "prede": 38454, + "predecess": 38963, + "predic": 4876, + "predict": 16900, + "predictable": 25344, + "predicted": 18702, + "predicting": 30414, + "prediction": 16296, + "predictions": 15125, + "predictive": 29798, + "predicts": 25960, + "preds": 40125, + "pree": 47026, + "preet": 30131, + "prefe": 14542, + "prefecture": 32890, + "prefer": 33426, + "prefer": 11450, + "preference": 35057, + "preferences": 38118, + "preferred": 18772, + "prefers": 38528, + "pregame": 18575, + "pregn": 7190, + "pregnancy": 12769, + "pregnant": 11195, + "prehistoric": 32750, + "prejudice": 28337, + "preli": 15523, + "prelimin": 19990, + "preliminary": 20997, + "prelims": 43223, + "prelude": 42966, + "prem": 32090, + "prem": 21724, + "premature": 39253, + "premi": 2413, + "premier": 16996, + "premier": 5539, + "premiere": 5367, + "premiered": 27652, + "premieres": 19907, + "premiering": 32615, + "premierleague": 22608, + "premiers": 44883, + "premiership": 23665, + "premiosm": 38460, + "premiosmtvmiaw": 38630, + "premise": 45952, + "premises": 27266, + "premium": 8011, + "pren": 20801, + "preneur": 46288, + "preorder": 16703, + "preorders": 45985, + "prep": 6430, + "prep": 7277, + "prepa": 26270, + "prepaid": 42934, + "prepar": 4968, + "preparation": 11651, + "preparations": 19135, + "prepare": 7014, + "prepared": 7677, + "preparedness": 29492, + "prepares": 16375, + "preparing": 7365, + "prepped": 34379, + "prepping": 16459, + "preps": 14765, + "prequel": 40461, + "pres": 1385, + "pres": 8529, + "presale": 27135, + "presby": 30447, + "presbyter": 33959, + "presbyterian": 35370, + "preschool": 24354, + "prescott": 29392, + "prescri": 14851, + "prescribed": 36968, + "prescription": 23061, + "preseason": 13813, + "presen": 16742, + "presence": 8848, + "present": 2344, + "present": 2881, + "presentation": 4594, + "presentations": 16998, + "presented": 4587, + "presenter": 18587, + "presenters": 32759, + "presenting": 5339, + "presents": 4215, + "preserv": 17616, + "preservation": 21074, + "preserve": 15570, + "preserved": 23161, + "preserves": 44881, + "preserving": 32315, + "presi": 1697, + "presiden": 43374, + "presidency": 18077, + "president": 19900, + "president": 1940, + "presidente": 47363, + "presidenti": 48297, + "presidential": 8503, + "presidents": 16726, + "presiding": 45298, + "presley": 30013, + "press": 4124, + "press": 2124, + "pressed": 20080, + "presser": 27826, + "presses": 33748, + "pressing": 20893, + "pressure": 6083, + "pressures": 38487, + "prest": 41840, + "presti": 12245, + "prestige": 29328, + "prestigious": 15888, + "presto": 42211, + "preston": 37335, + "preston": 15179, + "presu": 21667, + "presumably": 42562, + "pret": 9652, + "preten": 15871, + "pretend": 18111, + "pretending": 21306, + "pretoria": 36080, + "prett": 46667, + "prettier": 31745, + "prettiest": 22866, + "pretty": 18286, + "pretty": 2111, + "pretz": 24890, + "pretzel": 36707, + "pretzels": 45468, + "prev": 20274, + "prevail": 31637, + "prevalence": 41729, + "prevalent": 46260, + "preven": 29382, + "prevent": 26436, + "prevent": 7968, + "preventable": 44250, + "prevented": 35356, + "preventing": 21756, + "prevention": 9500, + "preventive": 40949, + "prevents": 31746, + "preview": 4449, + "previews": 20279, + "previous": 9252, + "previously": 13359, + "prey": 17131, + "prez": 17956, + "pri": 955, + "pri": 23400, + "pric": 24275, + "price": 13254, + "price": 2827, + "priced": 16934, + "priceless": 15743, + "prices": 5954, + "pricing": 14800, + "prick": 43921, + "prick": 46516, + "pride": 15323, + "pride": 3436, + "pridemonth": 41410, + "prie": 22477, + "priest": 38756, + "priest": 14222, + "priests": 30005, + "prim": 22004, + "prima": 35611, + "prima": 33277, + "primal": 36604, + "primar": 21579, + "primaries": 46126, + "primarily": 29465, + "primark": 48329, + "primary": 35024, + "primary": 5814, + "primavera": 44899, + "prime": 14162, + "prime": 5183, + "primed": 45694, + "primer": 22388, + "primetime": 29763, + "primitive": 37467, + "primo": 43215, + "primrose": 45891, + "prin": 1588, + "prince": 9457, + "prince": 4735, + "princes": 45329, + "princes": 30136, + "princess": 24123, + "princess": 5079, + "princesses": 34161, + "princeton": 22433, + "princi": 5129, + "principal": 33599, + "principal": 8860, + "principals": 27524, + "principle": 19595, + "principles": 13755, + "print": 17851, + "print": 3557, + "printable": 29648, + "printed": 7978, + "printer": 14521, + "printers": 27881, + "printing": 7369, + "printmaking": 38669, + "prints": 7704, + "prior": 20328, + "prior": 10572, + "priorit": 47773, + "prioriti": 28822, + "priorities": 15232, + "prioritize": 46715, + "priority": 12451, + "priory": 38665, + "prisc": 32468, + "priscilla": 42396, + "prise": 23343, + "prism": 49311, + "prism": 34356, + "prison": 9281, + "prison": 6622, + "prisoner": 21427, + "prisoners": 17460, + "prisons": 26607, + "pristine": 30618, + "prit": 41668, + "prit": 37523, + "prith": 39173, + "prius": 43561, + "priv": 3270, + "privacy": 10437, + "private": 20362, + "private": 4439, + "privately": 32970, + "privati": 27379, + "privi": 8367, + "privileg": 18015, + "privilege": 11537, + "privileged": 18166, + "prix": 10875, + "priya": 31275, + "priyan": 16488, + "priyanka": 31959, + "priyankach": 30030, + "priyankachopra": 30264, + "prize": 48222, + "prize": 4521, + "prized": 38769, + "prizes": 9268, + "prk": 37094, + "pro": 644, + "pro": 2630, + "proactive": 33364, + "prob": 17706, + "prob": 24007, + "probab": 3907, + "probability": 32637, + "probable": 42444, + "probably": 4047, + "probation": 36531, + "probe": 14359, + "probes": 48564, + "probiotics": 49395, + "proble": 2719, + "problem": 4324, + "problematic": 33767, + "problems": 4671, + "probs": 16330, + "probz": 34243, + "proc": 38417, + "proce": 4076, + "procedu": 18204, + "procedural": 48177, + "procedure": 20163, + "procedures": 21109, + "proceed": 26664, + "proceed": 33894, + "proceedings": 26953, + "proceeds": 11882, + "process": 17291, + "process": 4078, + "processed": 23816, + "processes": 15169, + "processing": 11737, + "procession": 26288, + "processor": 22838, + "processors": 43634, + "proclaimed": 34489, + "proclamation": 32065, + "procra": 25361, + "procrastin": 25586, + "procrastination": 42825, + "procreate": 39336, + "proctor": 47204, + "procu": 21001, + "procurement": 23733, + "prod": 44349, + "prod": 11991, + "prodi": 27759, + "prodigy": 31973, + "produ": 27852, + "produc": 1471, + "produce": 7529, + "produced": 7479, + "producer": 7064, + "producers": 13883, + "produces": 19940, + "producing": 13579, + "product": 32602, + "product": 4306, + "production": 4146, + "productions": 14166, + "productive": 9697, + "productivity": 12800, + "products": 3964, + "prof": 15043, + "prof": 5488, + "profe": 2611, + "profess": 5486, + "professi": 3705, + "profession": 8104, + "profession": 19671, + "professional": 46007, + "professional": 4774, + "professionalism": 41252, + "professionally": 33892, + "professionals": 10165, + "professor": 47302, + "professor": 6092, + "professors": 27758, + "profici": 34685, + "profile": 14291, + "profile": 6444, + "profiles": 22070, + "profiling": 37123, + "profit": 16941, + "profit": 7909, + "profitable": 25465, + "profits": 13410, + "profound": 48245, + "profound": 22998, + "profs": 19260, + "prog": 22219, + "progno": 46070, + "program": 4162, + "program": 2737, + "programme": 6322, + "programmer": 37001, + "programmes": 20468, + "programming": 10831, + "programs": 7345, + "progre": 7069, + "progress": 4421, + "progressi": 23297, + "progressing": 32346, + "progression": 24772, + "progressive": 12208, + "progressives": 41709, + "prohi": 41124, + "prohib": 45040, + "prohibition": 34440, + "proj": 39156, + "proje": 48345, + "projec": 1610, + "project": 15911, + "project": 1965, + "projected": 22873, + "projection": 22384, + "projections": 34638, + "projector": 27816, + "projects": 5090, + "proli": 19710, + "prolife": 32126, + "prolifer": 39018, + "prolific": 27839, + "prolly": 45968, + "prolon": 35379, + "prolonged": 41972, + "prom": 40363, + "prom": 7944, + "prome": 34355, + "promen": 33578, + "promenade": 35522, + "promethe": 44183, + "promin": 35217, + "prominent": 19172, + "promis": 3963, + "promise": 6745, + "promised": 11516, + "promises": 12064, + "promising": 14183, + "promo": 3037, + "promo": 6755, + "promos": 35044, + "promote": 47384, + "promote": 8003, + "promoted": 16395, + "promoter": 33081, + "promotes": 20169, + "promoting": 9695, + "promotion": 9259, + "promotional": 17619, + "promotions": 19142, + "promp": 11671, + "prompt": 20198, + "prompted": 45746, + "prompts": 33490, + "proms": 37759, + "pron": 13285, + "prone": 30964, + "pronoun": 23022, + "pronounce": 40489, + "pronounced": 34109, + "pronto": 44296, + "proof": 17020, + "proof": 5248, + "proofing": 35679, + "proofs": 41023, + "prop": 19123, + "prop": 16254, + "propag": 12151, + "propaganda": 14718, + "propane": 45546, + "propel": 48439, + "propeller": 47404, + "proper": 3577, + "proper": 8205, + "properly": 12560, + "properties": 10922, + "property": 26486, + "property": 5043, + "prophe": 9662, + "prophecy": 32501, + "prophet": 15549, + "prophetic": 47476, + "prophets": 39441, + "propor": 35016, + "proportion": 35775, + "proportions": 39391, + "propos": 9455, + "proposal": 12139, + "proposals": 20568, + "propose": 28471, + "proposed": 10615, + "proposes": 27133, + "proposing": 42631, + "proposition": 44780, + "propri": 28243, + "props": 15249, + "propulsion": 49380, + "pros": 33925, + "pros": 14147, + "prosciutto": 46565, + "prose": 47063, + "prose": 28675, + "prosecco": 28839, + "prosecu": 12136, + "prosecution": 30902, + "prosecutor": 23736, + "prosecutors": 31656, + "prosp": 24242, + "prospec": 12693, + "prospect": 11211, + "prospective": 28034, + "prospects": 15372, + "prosper": 16121, + "prosper": 33526, + "prosperity": 17203, + "prosperous": 28252, + "prost": 47923, + "prostate": 28808, + "prostatec": 49064, + "prosthetic": 44602, + "prostitu": 37333, + "protag": 28950, + "protagonist": 38183, + "prote": 1845, + "protec": 5640, + "protect": 25563, + "protect": 4817, + "protected": 12266, + "protecting": 11710, + "protection": 6238, + "protections": 33772, + "protective": 17028, + "protector": 20441, + "protectors": 45039, + "protects": 21889, + "protein": 8088, + "proteins": 28661, + "protest": 6279, + "protestant": 46945, + "protested": 48089, + "protester": 42073, + "protesters": 12660, + "protesting": 18788, + "protestors": 27822, + "protests": 12450, + "proto": 8672, + "proto": 44958, + "protocol": 19938, + "protocols": 39631, + "proton": 40009, + "prototype": 16675, + "prototyping": 42081, + "prou": 5739, + "proud": 11080, + "proud": 1679, + "prouder": 39585, + "proudest": 46806, + "proudly": 11203, + "proudof": 48184, + "proudtobe": 35043, + "prov": 23772, + "prov": 35021, + "prove": 10107, + "proved": 16473, + "proven": 35405, + "proven": 14569, + "provence": 28067, + "prover": 18312, + "proverb": 34419, + "proverbs": 27016, + "proves": 16119, + "provi": 2289, + "provide": 4832, + "provided": 9046, + "providence": 19331, + "provider": 14409, + "providers": 17120, + "provides": 7161, + "providing": 7250, + "provin": 12074, + "province": 8978, + "provinces": 35050, + "provincial": 16002, + "proving": 18055, + "provision": 30148, + "provisional": 36008, + "provisions": 39269, + "provo": 15367, + "provoc": 31618, + "provocative": 43809, + "provoking": 25510, + "provost": 36627, + "prow": 38737, + "prowrestling": 39825, + "prox": 41616, + "proxim": 31436, + "proximity": 38298, + "proxy": 31680, + "prs": 23879, + "pru": 12961, + "pruitt": 39453, + "prun": 29029, + "pruning": 48133, + "pry": 31965, + "pryor": 43375, + "ps": 3982, + "ps": 814, + "psa": 14031, + "psal": 13859, + "psalm": 17995, + "psalms": 35003, + "psb": 37017, + "psc": 43118, + "psd": 28810, + "pse": 19737, + "pse": 5423, + "pseu": 24919, + "pseudo": 46618, + "psg": 17123, + "psi": 45848, + "psi": 24533, + "psic": 29299, + "psis": 33041, + "psl": 21373, + "psn": 36781, + "pso": 27045, + "pson": 7487, + "psori": 44688, + "psp": 32769, + "pss": 35718, + "pss": 42535, + "psst": 47814, + "pst": 12692, + "psu": 41286, + "psu": 28338, + "psv": 44530, + "psy": 3576, + "psy": 11056, + "psych": 31041, + "psych": 20509, + "psyched": 19932, + "psyched": 35199, + "psychedelic": 23292, + "psychi": 18147, + "psychiatric": 30578, + "psychiatry": 39706, + "psychic": 24916, + "psycho": 6472, + "psycho": 22154, + "psychological": 18153, + "psychologist": 32827, + "psychology": 12352, + "psychop": 30112, + "psychotic": 48774, + "pt": 11139, + "pt": 1459, + "pta": 11586, + "ptbo": 40481, + "ptc": 44646, + "pte": 47804, + "pter": 49323, + "pti": 29375, + "pti": 10491, + "ptic": 20670, + "ption": 3479, + "ptions": 24963, + "pto": 31372, + "pto": 34092, + "pton": 19780, + "pts": 5886, + "ptsd": 23973, + "ptv": 42402, + "pu": 755, + "pu": 11780, + "pub": 20720, + "pub": 6301, + "puberty": 44122, + "pubg": 31496, + "publ": 3434, + "publi": 1617, + "public": 3592, + "public": 2122, + "publica": 49007, + "publication": 13538, + "publications": 27334, + "publichealth": 35872, + "publicity": 20831, + "publicly": 18554, + "publish": 19032, + "published": 4311, + "publisher": 20455, + "publishers": 25222, + "publishes": 35633, + "publishing": 10994, + "publix": 47985, + "pubs": 21099, + "puc": 48779, + "puck": 17550, + "pud": 39234, + "pudding": 14025, + "puddle": 33545, + "pue": 20161, + "pueblo": 33076, + "puer": 8968, + "puerto": 12289, + "puertor": 22757, + "puertorico": 26356, + "puff": 44477, + "puff": 17184, + "puffin": 47632, + "puffs": 47453, + "puffy": 49245, + "pug": 20950, + "pug": 17739, + "pugchat": 42266, + "pugh": 41302, + "puglia": 38345, + "pugs": 39425, + "puj": 46163, + "puja": 33753, + "puk": 31811, + "pul": 2469, + "pul": 40512, + "pula": 45856, + "puli": 47293, + "pulit": 27745, + "pulitzer": 31419, + "pull": 20155, + "pull": 6857, + "pulled": 8525, + "pulling": 12897, + "pullman": 40203, + "pullover": 44020, + "pulls": 16041, + "pulmon": 32613, + "pulmonary": 39132, + "pulp": 25410, + "pulse": 40091, + "pulse": 12485, + "pulses": 42177, + "pulsion": 35398, + "pum": 37497, + "puma": 20858, + "pump": 5179, + "pump": 9173, + "pumped": 12796, + "pumping": 25150, + "pumpkin": 36386, + "pumpkin": 8842, + "pumpkins": 23787, + "pumps": 18540, + "pun": 2707, + "pun": 19929, + "punc": 43907, + "punch": 29332, + "punch": 10730, + "punched": 31689, + "punches": 35279, + "punching": 33468, + "punctu": 31565, + "punctuation": 47051, + "pundit": 41466, + "pune": 32593, + "pune": 14488, + "pung": 45420, + "puni": 11479, + "punish": 34569, + "punished": 31598, + "punisher": 38509, + "punishment": 19099, + "punjab": 19405, + "punjab": 12883, + "punjabi": 25430, + "punk": 28933, + "punk": 7246, + "punks": 47171, + "puns": 35231, + "punt": 32699, + "punta": 34112, + "punter": 47092, + "pup": 11926, + "pup": 11302, + "pupil": 27265, + "pupils": 13628, + "pupp": 7116, + "puppet": 18439, + "puppets": 28475, + "puppies": 14820, + "puppy": 25431, + "puppy": 6829, + "puppylove": 40849, + "pups": 20778, + "pur": 1727, + "pur": 6265, + "pura": 25596, + "puram": 46174, + "purcell": 46065, + "purch": 8384, + "purchase": 5481, + "purchased": 13399, + "purchases": 21887, + "purchasing": 20718, + "purdu": 40691, + "purdue": 22280, + "pure": 14202, + "pure": 5979, + "puree": 45474, + "purely": 32459, + "puremichigan": 39783, + "purest": 45497, + "purge": 33514, + "puri": 16910, + "puri": 21974, + "purification": 47724, + "purity": 29780, + "purple": 17837, + "purple": 5496, + "purpose": 33492, + "purpose": 7391, + "purposes": 22020, + "purr": 49262, + "purr": 46343, + "purse": 16480, + "pursue": 19463, + "pursuing": 26424, + "pursuit": 16469, + "purée": 40981, + "pus": 13841, + "pusa": 40825, + "push": 16028, + "push": 6831, + "pushaw": 35407, + "pushaward": 35448, + "pushawards": 47184, + "pushed": 16155, + "pushes": 23828, + "pushing": 11549, + "put": 29535, + "put": 1983, + "putin": 10693, + "putnam": 40235, + "puts": 7898, + "putt": 30279, + "putter": 44723, + "putting": 5154, + "puzz": 19760, + "puzzle": 12875, + "puzzles": 27986, + "pv": 14517, + "pv": 13495, + "pvc": 26959, + "pvp": 44172, + "pvt": 29898, + "pw": 19419, + "pw": 16067, + "pwc": 22965, + "px": 24790, + "px": 10262, + "pxrtg": 36262, + "py": 4005, + "py": 7504, + "pye": 31099, + "pyeongchang": 36066, + "pyg": 41450, + "pyram": 14405, + "pyramid": 18725, + "pyramids": 36877, + "pyrene": 36740, + "pyrenees": 39744, + "pyro": 39762, + "python": 13370, + "pz": 48361, + "pé": 43167, + "q": 80, + "q": 336, + "qa": 24944, + "qa": 16360, + "qad": 27844, + "qadri": 35672, + "qaeda": 31246, + "qanda": 48672, + "qanon": 19182, + "qant": 35404, + "qantas": 43250, + "qatar": 32804, + "qatar": 10872, + "qb": 8073, + "qbs": 38188, + "qc": 17406, + "qe": 30974, + "qf": 27215, + "qi": 25054, + "qi": 11256, + "qing": 46522, + "qing": 34339, + "ql": 28366, + "qld": 23039, + "qld": 13765, + "qldpol": 42296, + "qm": 42148, + "qotd": 24504, + "qpr": 24788, + "qq": 31960, + "qr": 18193, + "qs": 14364, + "qt": 15013, + "qtr": 44803, + "qu": 666, + "qu": 28646, + "qua": 20363, + "quack": 45575, + "quad": 11656, + "quad": 13419, + "quadcopter": 39792, + "quadru": 35831, + "quaid": 34265, + "quail": 34392, + "quaint": 45976, + "quake": 8421, + "quaker": 43395, + "quakes": 24572, + "qual": 9979, + "qual": 32405, + "qualcomm": 38683, + "quali": 4574, + "qualification": 21508, + "qualifications": 35225, + "qualified": 11927, + "qualifier": 18733, + "qualifiers": 21388, + "qualifies": 35820, + "qualify": 17019, + "qualifying": 11895, + "qualitative": 45847, + "qualities": 20488, + "quality": 28545, + "quality": 3027, + "quan": 11669, + "quan": 27490, + "quand": 28198, + "quant": 15050, + "quanti": 31540, + "quantitative": 40583, + "quantities": 33917, + "quantity": 26920, + "quantum": 15320, + "quar": 3856, + "quare": 42549, + "quarry": 27601, + "quart": 7851, + "quarter": 8816, + "quarter": 6632, + "quarterback": 16545, + "quarterfinal": 37992, + "quarterfinals": 28971, + "quarterly": 23350, + "quarters": 10146, + "quartet": 18056, + "quartz": 17752, + "quat": 25715, + "quattro": 40300, + "quay": 40276, + "quay": 17304, + "que": 1147, + "que": 2319, + "quebec": 15373, + "queen": 6407, + "queen": 2997, + "queenof": 44398, + "queens": 22943, + "queens": 9330, + "queensland": 15168, + "queer": 38874, + "queer": 18161, + "quel": 39774, + "quel": 21879, + "quen": 23876, + "quen": 38324, + "quent": 23808, + "quentin": 27530, + "quer": 17378, + "quer": 26859, + "quered": 23210, + "queries": 32958, + "querque": 30338, + "query": 27464, + "ques": 25328, + "ques": 7715, + "queso": 40110, + "quest": 31653, + "quest": 4846, + "questi": 2391, + "question": 18961, + "question": 4382, + "questionable": 30733, + "questioned": 31847, + "questioning": 24887, + "questions": 3883, + "quests": 44611, + "quet": 8513, + "quets": 39055, + "quetta": 38326, + "quette": 18993, + "queu": 32705, + "queue": 18549, + "queues": 40649, + "queuing": 44082, + "quez": 18677, + "quezon": 41117, + "qui": 1912, + "qui": 18046, + "quic": 26474, + "quiche": 47723, + "quick": 5969, + "quick": 3712, + "quicker": 29211, + "quickest": 37734, + "quickly": 7787, + "quid": 30732, + "quie": 43875, + "quien": 43482, + "quiere": 42723, + "quiero": 32567, + "quiet": 17853, + "quiet": 7557, + "quietly": 22208, + "quig": 44690, + "quil": 12305, + "quill": 48951, + "quilt": 23977, + "quilted": 46052, + "quin": 8607, + "quin": 17167, + "quincy": 27640, + "quind": 32339, + "quinn": 12306, + "quinoa": 26703, + "quins": 39701, + "quint": 26898, + "quinta": 47446, + "quinte": 22098, + "quintess": 37538, + "quintet": 35125, + "quipment": 42813, + "quir": 15943, + "quirky": 25044, + "quis": 15064, + "quist": 25128, + "quit": 19358, + "quit": 11140, + "quite": 4135, + "quito": 35828, + "quits": 32505, + "quitting": 33871, + "quity": 33133, + "quiz": 31197, + "quiz": 8344, + "quizz": 35041, + "quo": 3046, + "quo": 28127, + "quoi": 45549, + "quot": 5452, + "quot": 47587, + "quota": 42097, + "quotation": 49195, + "quote": 15446, + "quote": 4020, + "quoted": 27706, + "quoteoftheday": 19975, + "quotes": 5808, + "quoting": 31651, + "qur": 37782, + "quran": 19690, + "qureshi": 46307, + "qvist": 42322, + "qx": 45038, + "r": 81, + "r": 337, + "ra": 559, + "ra": 1735, + "raa": 44344, + "rab": 14816, + "rab": 33224, + "rabb": 6875, + "rabbi": 20959, + "rabbit": 10274, + "rabbits": 27028, + "rabhu": 25806, + "rable": 10182, + "rac": 1773, + "rac": 30462, + "raccoon": 29516, + "race": 10978, + "race": 2471, + "racec": 18814, + "racecourse": 25036, + "raced": 36021, + "racer": 16798, + "racers": 33603, + "races": 8605, + "raceway": 24650, + "rach": 6876, + "rach": 33429, + "racha": 21952, + "racha": 35022, + "rachael": 29095, + "rachel": 13511, + "rachel": 8029, + "raci": 33381, + "racial": 13801, + "racially": 43577, + "racing": 23306, + "racing": 3699, + "racism": 11276, + "racist": 9684, + "racists": 41777, + "rack": 24600, + "rack": 12034, + "racket": 37691, + "racks": 21191, + "rad": 4473, + "rad": 8238, + "rada": 30437, + "radar": 9672, + "radcliffe": 33096, + "rade": 44494, + "rade": 17911, + "rader": 45002, + "radford": 45800, + "radha": 43122, + "radi": 5772, + "radial": 42028, + "radiance": 45670, + "radiant": 25614, + "radiation": 18210, + "radiator": 39372, + "radic": 18082, + "radical": 13712, + "radicals": 45903, + "radio": 7176, + "radio": 2638, + "radioactive": 34704, + "radiodisney": 36483, + "radiohead": 39472, + "radiology": 29684, + "radios": 43669, + "radish": 37789, + "radius": 37570, + "rado": 29784, + "rae": 21646, + "rae": 15051, + "rael": 45390, + "raer": 44561, + "raf": 11495, + "raf": 11490, + "rafa": 14352, + "rafa": 24850, + "rafael": 38221, + "rafael": 19216, + "rafaelnadal": 49219, + "raff": 34900, + "raffic": 32928, + "raffle": 13752, + "raffles": 43489, + "rafi": 35304, + "raft": 9233, + "rafting": 36309, + "rag": 13958, + "rag": 20687, + "rage": 8593, + "rages": 34253, + "ragh": 35642, + "ragha": 40972, + "raging": 25015, + "ragn": 24125, + "ragnar": 34385, + "ragnarok": 41856, + "ragon": 34768, + "rags": 47838, + "rah": 12277, + "rah": 8766, + "raheem": 43317, + "rahim": 24152, + "rahman": 19680, + "rahu": 13129, + "rahul": 37239, + "rahul": 17440, + "rahulg": 27510, + "rahulgandhi": 28293, + "rai": 9165, + "rai": 9638, + "raid": 6877, + "raided": 43417, + "raider": 27368, + "raider": 21455, + "raidernation": 47901, + "raiders": 11817, + "raids": 26655, + "rail": 4573, + "rail": 6879, + "raila": 47273, + "railminindia": 35557, + "railroad": 17080, + "rails": 23427, + "railway": 27614, + "railway": 7856, + "railwayana": 46750, + "railways": 20765, + "raim": 45785, + "rain": 3128, + "rain": 2443, + "raina": 30564, + "rainbow": 24562, + "rainbow": 6286, + "rainbows": 30483, + "raine": 49038, + "raine": 6871, + "rained": 32310, + "rainf": 15024, + "rainfall": 15350, + "rainforest": 22823, + "rainier": 37850, + "raining": 13964, + "rains": 14272, + "rainy": 10222, + "rais": 14729, + "raise": 24249, + "raise": 5078, + "raised": 6027, + "raiser": 33555, + "raises": 13297, + "raisethe": 47109, + "raisin": 36864, + "raising": 6883, + "raj": 5958, + "raj": 10813, + "raja": 46069, + "raja": 19150, + "rajan": 46595, + "rajas": 16185, + "rajasthan": 18017, + "raje": 21899, + "rajesh": 43602, + "raji": 27569, + "rajini": 29600, + "rajini": 40622, + "rajinikanth": 32922, + "rajiv": 40197, + "rajkumar": 49304, + "rajput": 47572, + "raju": 47029, + "rak": 13523, + "rak": 26287, + "rake": 26825, + "rake": 32712, + "rakesh": 41083, + "ral": 8062, + "ral": 1406, + "rale": 14192, + "raleigh": 18207, + "rall": 23249, + "rallies": 25230, + "rally": 18882, + "rally": 5041, + "rallying": 36836, + "ralph": 25290, + "ralph": 12234, + "ram": 1976, + "ram": 2007, + "rama": 22112, + "ramad": 12736, + "ramadan": 15547, + "ramadhan": 47415, + "raman": 39816, + "ramapho": 43963, + "ramaphosa": 44993, + "ramatta": 49112, + "rambo": 41855, + "ramcharan": 45275, + "rame": 47745, + "ramen": 18892, + "ramesh": 48640, + "ramesh": 40186, + "rami": 43016, + "ramirez": 23877, + "ramon": 27958, + "ramone": 47201, + "ramos": 21046, + "ramp": 14271, + "rampage": 32077, + "rampant": 41985, + "ramps": 35257, + "rams": 10292, + "ramsay": 26259, + "ramsey": 19215, + "ran": 1433, + "ran": 4031, + "rana": 22143, + "ranbir": 40881, + "rance": 29034, + "ranch": 43955, + "ranch": 10659, + "rancho": 26258, + "rand": 5628, + "rand": 18718, + "randall": 23639, + "rande": 21469, + "randolph": 29899, + "random": 11396, + "random": 6160, + "randomly": 17272, + "rands": 39153, + "randy": 29479, + "randy": 13279, + "rane": 28852, + "rang": 4043, + "rang": 24377, + "range": 13627, + "range": 3818, + "ranger": 31472, + "ranger": 13593, + "rangers": 7664, + "ranges": 25685, + "ranging": 25946, + "rani": 29264, + "rani": 22631, + "rank": 11501, + "ranked": 8307, + "rankin": 37539, + "ranking": 12347, + "rankings": 12596, + "ranks": 14469, + "rano": 18608, + "rans": 46259, + "ransom": 28523, + "ransom": 34646, + "ransomware": 33815, + "rant": 46467, + "rant": 9819, + "rants": 34014, + "ranveer": 32402, + "ranveer": 41482, + "ranveerofficial": 42116, + "rao": 16913, + "rap": 7773, + "rap": 7348, + "rape": 46099, + "rape": 10070, + "raped": 23700, + "rapha": 22754, + "raphael": 30091, + "rapi": 8610, + "rapid": 47697, + "rapid": 12205, + "rapidly": 16710, + "rapids": 18848, + "raping": 44926, + "rapist": 33360, + "rapp": 19283, + "rapper": 11860, + "rappers": 30315, + "rapping": 42864, + "raps": 37887, + "raptor": 26762, + "raptors": 17035, + "raq": 39787, + "raq": 43312, + "raqqa": 47074, + "raquel": 44338, + "rar": 26819, + "rar": 24605, + "rard": 21012, + "rare": 18992, + "rare": 3865, + "rarely": 17315, + "rarest": 43237, + "rarity": 45862, + "ras": 23492, + "ras": 8224, + "rasc": 30085, + "rascal": 43481, + "rash": 14917, + "rash": 30608, + "rashad": 46527, + "rasheed": 41638, + "rashi": 19426, + "rashid": 26757, + "rasp": 10487, + "raspberries": 37742, + "raspberry": 40162, + "raspberry": 13615, + "raspberrypi": 43934, + "rass": 45654, + "rasta": 47002, + "rat": 3806, + "rat": 8985, + "rata": 28568, + "ratchet": 25078, + "rate": 5068, + "rated": 8183, + "rates": 6864, + "rath": 18268, + "rath": 39772, + "rather": 5252, + "rati": 11486, + "rating": 10567, + "ratings": 14176, + "ratio": 15893, + "ration": 27002, + "ration": 35662, + "rational": 33086, + "ratna": 49078, + "ratri": 32288, + "rats": 19043, + "ratt": 20737, + "ratt": 34785, + "rattle": 40824, + "rattle": 41839, + "rau": 27744, + "raul": 30218, + "raun": 41169, + "rav": 14367, + "rav": 23606, + "rave": 38784, + "rave": 17601, + "ravel": 27927, + "raven": 10269, + "raven": 16803, + "ravens": 17946, + "ravi": 22947, + "ravi": 19538, + "ravin": 39099, + "raving": 45807, + "raviol": 41104, + "ravioli": 43460, + "raw": 10166, + "raw": 6323, + "rawlings": 40662, + "rax": 38520, + "ray": 5312, + "ray": 3077, + "raya": 29991, + "raymond": 16683, + "rayn": 47852, + "rayon": 47900, + "rays": 11064, + "raz": 9700, + "raz": 19087, + "raza": 37724, + "razer": 33832, + "razor": 24934, + "razor": 21300, + "razz": 43769, + "rb": 12740, + "rb": 7477, + "rbc": 37500, + "rbi": 15687, + "rbs": 29102, + "rc": 7575, + "rc": 7457, + "rca": 33942, + "rcb": 45240, + "rcmp": 31489, + "rcn": 49370, + "rctid": 49223, + "rd": 13501, + "rd": 1973, + "rda": 45755, + "rdr": 44364, + "rds": 32378, + "re": 515, + "re": 810, + "rea": 11521, + "reach": 4483, + "reach": 4279, + "reached": 6878, + "reaches": 14462, + "reaching": 11358, + "react": 36566, + "react": 15065, + "reacted": 42515, + "reacting": 40595, + "reaction": 7189, + "reactions": 18438, + "reactive": 42072, + "reactjs": 46173, + "reactor": 32037, + "reacts": 23115, + "read": 933, + "read": 1199, + "reader": 9884, + "readers": 10335, + "readiness": 28131, + "reading": 17556, + "reading": 2337, + "readingfc": 47428, + "readings": 23361, + "reads": 6597, + "ready": 17351, + "ready": 1112, + "reagan": 17767, + "real": 2017, + "real": 1532, + "realdonaldtrump": 7025, + "reale": 5930, + "realest": 45855, + "realestate": 32937, + "realestate": 6569, + "reali": 4185, + "realis": 38114, + "realise": 14773, + "realised": 17945, + "realising": 39537, + "realism": 20024, + "realist": 30248, + "realistic": 16157, + "realities": 32443, + "reality": 46802, + "reality": 5004, + "realization": 40402, + "realize": 7538, + "realized": 10489, + "realizes": 42918, + "realizing": 23284, + "reall": 39686, + "really": 43249, + "really": 1414, + "realm": 23083, + "realmadrid": 27866, + "realms": 43033, + "realness": 46761, + "realtime": 44002, + "realtime": 38203, + "realtor": 18038, + "realtors": 31759, + "realty": 20471, + "ream": 37242, + "ream": 15219, + "rean": 48477, + "reap": 31334, + "reaper": 29922, + "rear": 39652, + "rear": 10223, + "reas": 9121, + "reason": 12882, + "reason": 3893, + "reasonable": 18558, + "reasonably": 38589, + "reasoning": 30341, + "reasons": 5686, + "reau": 32398, + "reb": 12370, + "reb": 18796, + "reba": 48543, + "rebate": 43817, + "rebe": 25227, + "rebec": 10774, + "rebecca": 12892, + "rebel": 8185, + "rebel": 12248, + "rebellion": 22170, + "rebels": 13623, + "rebirth": 33303, + "reboot": 22385, + "reborn": 30229, + "reboun": 43381, + "rebound": 31280, + "rebounds": 19190, + "rebs": 28164, + "rebu": 43162, + "rebuild": 20022, + "rebuilding": 30880, + "rebuilt": 33137, + "rec": 1020, + "rec": 11243, + "recall": 15151, + "recalled": 32142, + "recalling": 47855, + "recalls": 24740, + "recap": 29816, + "recap": 8337, + "recaps": 47997, + "recard": 35536, + "rece": 1890, + "recei": 2148, + "receip": 38503, + "receipt": 30479, + "receipts": 41181, + "receive": 4800, + "received": 4178, + "receiver": 17659, + "receivers": 45294, + "receives": 10027, + "receiving": 7252, + "recent": 3969, + "recently": 4482, + "recep": 17450, + "reception": 8364, + "receptions": 46881, + "receptor": 41835, + "recess": 38182, + "recession": 27176, + "recharge": 29396, + "rechargeable": 37516, + "reci": 2037, + "recipe": 28923, + "recipe": 4614, + "recipeoftheday": 38727, + "recipes": 9243, + "recipi": 10136, + "recipient": 13703, + "recipients": 18940, + "recipro": 41789, + "recital": 23457, + "recite": 48824, + "reck": 11715, + "reckless": 26284, + "reckon": 23854, + "recl": 42277, + "reclaim": 35969, + "reclaimed": 32648, + "reco": 2535, + "reco": 46038, + "recogn": 6343, + "recogni": 5329, + "recognise": 19824, + "recognised": 20986, + "recognising": 48423, + "recognition": 9415, + "recognizable": 47240, + "recognize": 10905, + "recognized": 9929, + "recognizes": 26909, + "recognizing": 19666, + "recomm": 4540, + "recommend": 11628, + "recommend": 8942, + "recommendation": 20118, + "recommendations": 16516, + "recommended": 11100, + "recommending": 44301, + "recommends": 22940, + "recon": 15371, + "recon": 28996, + "reconciliation": 26451, + "reconstruction": 24955, + "recor": 1723, + "record": 21328, + "record": 2717, + "recorded": 9392, + "recorder": 26747, + "recording": 48237, + "recording": 6942, + "recordings": 19715, + "records": 4529, + "recover": 16785, + "recovered": 16444, + "recovering": 19005, + "recovers": 47935, + "recovery": 6591, + "recre": 22148, + "recreate": 29775, + "recreated": 40888, + "recreating": 48224, + "recreation": 17331, + "recreational": 24329, + "recru": 4745, + "recruit": 9011, + "recruit": 15585, + "recruited": 36518, + "recruiter": 43120, + "recruiters": 46542, + "recruiting": 10533, + "recruitment": 10541, + "recruits": 22647, + "recs": 33069, + "rectan": 43041, + "rectangular": 43321, + "rector": 41585, + "recu": 26798, + "recur": 19983, + "recurring": 35912, + "recy": 6790, + "recycla": 40659, + "recyclable": 48907, + "recycle": 19366, + "recycled": 16829, + "recycling": 12566, + "red": 1893, + "red": 736, + "redbubble": 46137, + "redbull": 29483, + "redbull": 29219, + "redcarpet": 32259, + "redcross": 30659, + "redd": 22149, + "redd": 40618, + "redding": 41061, + "reddish": 43383, + "reddit": 15226, + "reddy": 23028, + "rede": 10913, + "redeem": 37449, + "redefining": 46352, + "redemption": 20233, + "redesign": 24188, + "redesigned": 33111, + "redevelopment": 30322, + "redhead": 36267, + "redi": 7976, + "redman": 44753, + "redmond": 39627, + "rednation": 28180, + "rednationrising": 28262, + "redneck": 39105, + "redness": 22626, + "redo": 42524, + "redon": 48506, + "redro": 37722, + "reds": 11221, + "redskins": 19023, + "redsox": 19144, + "reduc": 5015, + "reduce": 6604, + "reduced": 10821, + "reduces": 20539, + "reducing": 13836, + "reduction": 12219, + "reductions": 48263, + "redux": 43014, + "redvelvet": 41845, + "redwings": 31058, + "redwood": 31748, + "ree": 9282, + "ree": 5813, + "reebok": 26734, + "reece": 30457, + "reed": 26209, + "reed": 10435, + "reedus": 32865, + "reef": 46557, + "reef": 15624, + "reefs": 34459, + "reel": 34467, + "reel": 17166, + "reels": 48127, + "reem": 48891, + "reen": 21638, + "reen": 23679, + "rees": 18314, + "reese": 20929, + "reeves": 23060, + "ref": 4067, + "ref": 9591, + "refe": 5624, + "refer": 18425, + "refer": 22325, + "referee": 20398, + "referees": 45583, + "referen": 13535, + "reference": 10214, + "references": 24009, + "referendum": 16732, + "referr": 47784, + "referral": 30219, + "referred": 22969, + "referring": 29797, + "refers": 30069, + "refill": 37859, + "refin": 13455, + "refined": 26098, + "refinery": 31393, + "refining": 48406, + "reflec": 4608, + "reflect": 13373, + "reflected": 28732, + "reflecting": 19700, + "reflection": 11884, + "reflections": 16647, + "reflective": 27008, + "reflects": 15821, + "reflex": 45756, + "reflex": 36050, + "reform": 45678, + "reform": 8875, + "reformation": 45119, + "reformed": 40880, + "reforms": 19274, + "refr": 34850, + "refre": 11995, + "refresh": 17836, + "refresh": 23288, + "refreshed": 35925, + "refresher": 41481, + "refreshing": 14159, + "refreshments": 31127, + "refriger": 21076, + "refrigerator": 36662, + "refs": 35595, + "refu": 3545, + "refuge": 5638, + "refuge": 17432, + "refugee": 11556, + "refugees": 42687, + "refugees": 8316, + "refund": 28899, + "refur": 15519, + "refurbi": 18259, + "refurbished": 26190, + "refurbishment": 35803, + "refusal": 46547, + "refuse": 16412, + "refused": 17190, + "refuses": 20085, + "refusing": 26704, + "reg": 5472, + "reg": 12353, + "regain": 37510, + "regal": 31512, + "regal": 25028, + "regan": 34062, + "regar": 5881, + "regard": 21801, + "regarded": 32017, + "regarding": 8493, + "regardless": 17220, + "regards": 23079, + "regatta": 26316, + "regen": 46545, + "regency": 29341, + "regeneration": 29257, + "regent": 30455, + "regents": 46710, + "regg": 12757, + "reggae": 37821, + "reggae": 15214, + "reggie": 21872, + "regi": 1608, + "regime": 11378, + "regiment": 18603, + "regin": 23287, + "regina": 16841, + "region": 16542, + "region": 4341, + "regional": 5552, + "regionals": 26043, + "regions": 14530, + "regis": 28094, + "register": 3967, + "registered": 10254, + "registering": 33510, + "registr": 29193, + "registration": 7302, + "registrations": 38423, + "registry": 30020, + "rego": 47351, + "regram": 30329, + "regrann": 48802, + "regre": 8627, + "regression": 43733, + "regret": 14374, + "regrets": 23231, + "regu": 3411, + "regui": 46722, + "regul": 11847, + "regular": 14882, + "regular": 6307, + "regularly": 17263, + "regulat": 14575, + "regulate": 33494, + "regulated": 31384, + "regulating": 48156, + "regulation": 14267, + "regulations": 16654, + "regulator": 30364, + "regulators": 35837, + "regulatory": 17717, + "reh": 21492, + "reha": 10193, + "rehab": 16973, + "rehabil": 17930, + "rehabilitation": 21042, + "rehear": 7273, + "rehearsal": 11482, + "rehearsals": 17977, + "rehearsing": 23125, + "rehman": 39206, + "rei": 15343, + "rei": 26033, + "reic": 41230, + "reich": 48589, + "reich": 28929, + "reid": 45125, + "reid": 11744, + "reig": 13092, + "reign": 41419, + "reign": 14827, + "reigning": 28409, + "reigns": 21217, + "reiki": 46960, + "reilly": 28120, + "reim": 35421, + "reimagined": 46799, + "reimbur": 39857, + "rein": 9240, + "rein": 45009, + "reina": 43847, + "reinde": 23810, + "reindeer": 25072, + "reinfor": 48161, + "reinforced": 41909, + "reinst": 33969, + "reinvent": 38171, + "reissue": 34042, + "reiter": 35394, + "rejec": 9958, + "reject": 22435, + "rejected": 17505, + "rejection": 32264, + "rejects": 23155, + "rejo": 20150, + "rejoice": 24712, + "rejuven": 26332, + "rek": 47542, + "rek": 19201, + "rel": 1825, + "rel": 5233, + "rela": 4362, + "reland": 15220, + "relat": 27192, + "relatable": 31010, + "relate": 17520, + "related": 5880, + "relates": 36064, + "relating": 27373, + "relation": 4561, + "relation": 16207, + "relations": 10100, + "relationship": 47239, + "relationship": 5837, + "relationships": 10610, + "relative": 17265, + "relatively": 18351, + "relatives": 21981, + "relax": 6777, + "relax": 9035, + "relaxation": 22194, + "relaxed": 18999, + "relaxing": 10256, + "relay": 12403, + "relays": 28404, + "rele": 1602, + "release": 29100, + "release": 2706, + "released": 3410, + "releases": 7393, + "releasethe": 44008, + "releasing": 10321, + "releg": 23378, + "relegated": 45884, + "relegation": 35040, + "relent": 22213, + "relentless": 27207, + "relessly": 33927, + "relev": 9349, + "relevance": 31400, + "relevant": 10568, + "reli": 2674, + "reliability": 27220, + "reliable": 13714, + "reliance": 27727, + "relic": 27802, + "relics": 43208, + "relief": 7518, + "relies": 41579, + "relieve": 28623, + "relieved": 36597, + "religi": 4940, + "religion": 8803, + "religions": 31189, + "religious": 8289, + "relish": 35550, + "relive": 23939, + "reliving": 47558, + "rell": 28802, + "rell": 7127, + "rella": 9952, + "relle": 31390, + "reloaded": 38908, + "relocated": 46791, + "relocation": 39198, + "rels": 23320, + "relu": 32058, + "reluct": 32549, + "reluctant": 45552, + "rely": 4158, + "relying": 42168, + "rem": 15098, + "rem": 21637, + "rema": 4569, + "remain": 29144, + "remain": 6415, + "remainder": 41672, + "remained": 23714, + "remaining": 11392, + "remains": 6807, + "remake": 16234, + "remark": 11136, + "remarkable": 12404, + "remarkably": 39087, + "remarks": 15001, + "remastered": 24932, + "rematch": 26473, + "rembrandt": 45972, + "reme": 20071, + "remedi": 18442, + "remedies": 25581, + "remedy": 25794, + "remem": 7966, + "rememb": 7062, + "remember": 22045, + "remember": 2195, + "remembered": 11763, + "remembering": 8135, + "remembers": 12551, + "remembrance": 40321, + "remembrance": 15860, + "remembranceday": 48333, + "rement": 7173, + "rements": 12667, + "remi": 41693, + "remin": 3216, + "remind": 9868, + "reminded": 12309, + "reminder": 5565, + "reminders": 34121, + "reminding": 19976, + "reminds": 8303, + "remington": 43527, + "reminis": 17723, + "reminiscent": 41704, + "reminiscing": 32552, + "remix": 8519, + "remixes": 31011, + "remn": 29127, + "remnants": 39032, + "remo": 4064, + "remo": 33259, + "remodel": 34159, + "remodel": 37495, + "remodeling": 41432, + "remote": 47163, + "remote": 9687, + "remotely": 32375, + "removable": 44095, + "removal": 13679, + "remove": 9709, + "removed": 10289, + "remover": 44267, + "removes": 29018, + "removing": 18504, + "remy": 30434, + "ren": 737, + "ren": 2596, + "rena": 12591, + "renais": 15409, + "renaissance": 16007, + "renal": 36096, + "renamed": 31535, + "renault": 17600, + "rence": 19245, + "rence": 1553, + "rences": 8545, + "rend": 33932, + "rend": 22851, + "render": 39752, + "render": 13024, + "rendered": 23652, + "rendering": 21339, + "renders": 39419, + "rendez": 43293, + "rendezvous": 45644, + "rendition": 28891, + "rendon": 46272, + "rendous": 49403, + "rends": 38842, + "rene": 15438, + "rene": 12597, + "renee": 23480, + "reneg": 29909, + "renegade": 41229, + "renergy": 37151, + "renew": 6645, + "renew": 22015, + "renewable": 31269, + "renewable": 15941, + "renewableenergy": 33357, + "renewables": 21619, + "renewal": 21270, + "renewed": 20524, + "renfre": 45043, + "reng": 36795, + "reno": 11520, + "reno": 12831, + "renov": 9984, + "renovated": 23839, + "renovation": 17121, + "renovations": 31311, + "renowned": 14727, + "rens": 18183, + "renshaw": 44445, + "rent": 17377, + "rent": 1609, + "rental": 12193, + "rentals": 24105, + "rented": 35932, + "rential": 31692, + "renting": 37662, + "rently": 2615, + "rents": 31109, + "reo": 15963, + "reo": 26854, + "reon": 15761, + "reopen": 26883, + "reopened": 32868, + "reopening": 36663, + "reopens": 40644, + "rep": 4229, + "rep": 6487, + "repair": 8419, + "repaired": 32953, + "repairing": 38534, + "repairs": 16297, + "repar": 34065, + "repe": 5785, + "repeal": 42622, + "repeal": 23938, + "repeat": 10192, + "repeated": 27904, + "repeatedly": 26630, + "repeating": 33834, + "repeats": 39158, + "repell": 46235, + "repent": 47261, + "reper": 29085, + "repet": 38533, + "repl": 13047, + "replac": 6069, + "replace": 9466, + "replaceable": 47762, + "replaced": 13200, + "replacement": 10835, + "replaces": 27781, + "replacing": 18647, + "replay": 16875, + "repleni": 44839, + "replic": 21651, + "replica": 18125, + "replied": 24238, + "replies": 18808, + "reply": 8965, + "replying": 47599, + "repor": 2628, + "report": 2417, + "reported": 7598, + "reportedly": 10953, + "reporter": 11019, + "reporters": 18454, + "reporting": 9218, + "reports": 4908, + "reposit": 41276, + "repository": 46977, + "repost": 33147, + "repost": 7217, + "repostapp": 38388, + "reposting": 20223, + "reppin": 19163, + "repping": 22574, + "repre": 3397, + "represent": 8293, + "represent": 8406, + "representation": 13520, + "representative": 13175, + "representatives": 15591, + "represented": 12299, + "representing": 7561, + "represents": 14433, + "repri": 31854, + "reproduction": 35714, + "reproductive": 25522, + "reps": 14265, + "reptile": 36938, + "reptiles": 38679, + "republic": 6376, + "republic": 7185, + "republican": 9842, + "republicans": 12384, + "repur": 41852, + "req": 42411, + "requ": 10664, + "reque": 9539, + "request": 7813, + "requested": 16199, + "requesting": 33245, + "requests": 17087, + "requi": 4863, + "requiem": 40316, + "require": 14437, + "required": 8500, + "requirement": 27146, + "requirements": 12860, + "requires": 13396, + "requiring": 33425, + "requis": 42602, + "rer": 41295, + "rer": 3407, + "rera": 14301, + "rero": 21860, + "rers": 18869, + "res": 4466, + "res": 934, + "resc": 3956, + "rescheduled": 43553, + "rescu": 8618, + "rescue": 28567, + "rescue": 5718, + "rescued": 11919, + "rescues": 32439, + "rescuing": 43770, + "rese": 13000, + "resear": 6090, + "research": 25694, + "research": 2379, + "researched": 42733, + "researcher": 18334, + "researchers": 9522, + "researching": 24544, + "reseller": 35391, + "resemb": 16916, + "resemblance": 26856, + "resemble": 37230, + "resembles": 35417, + "reser": 16420, + "reserv": 11906, + "reservation": 20289, + "reservations": 19307, + "reserve": 6911, + "reserved": 19796, + "reserves": 19705, + "reservoir": 20574, + "reset": 26250, + "resh": 47432, + "reshi": 39435, + "resi": 2152, + "residen": 22311, + "residence": 11672, + "residences": 38855, + "residency": 18545, + "resident": 9016, + "residente": 44637, + "residentevil": 48393, + "residential": 11002, + "residents": 6008, + "resign": 23584, + "resignation": 24779, + "resigned": 31014, + "resigns": 29738, + "resil": 10932, + "resili": 39212, + "resilience": 15271, + "resilient": 24694, + "resin": 24156, + "resist": 37345, + "resist": 9587, + "resistance": 7392, + "resistant": 17542, + "resisting": 43679, + "resolution": 9977, + "resolutions": 26816, + "resolve": 20787, + "resolved": 28807, + "reson": 18092, + "resonance": 42310, + "resort": 6594, + "resorts": 18839, + "resource": 43729, + "resource": 9760, + "resources": 6723, + "respec": 7466, + "respect": 31411, + "respect": 4916, + "respected": 19126, + "respectful": 24379, + "respecting": 36172, + "respective": 25817, + "respectively": 28794, + "respects": 23553, + "respir": 20771, + "respiratory": 24483, + "respon": 2421, + "respond": 12355, + "responded": 21121, + "respondents": 49253, + "responders": 25155, + "responding": 18037, + "responds": 17436, + "response": 5399, + "responses": 19006, + "responsi": 5490, + "responsibilities": 30375, + "responsibility": 11272, + "responsible": 8936, + "responsibly": 33675, + "responsive": 21544, + "ress": 34651, + "ress": 13629, + "resso": 15133, + "rest": 10974, + "rest": 2539, + "restart": 37378, + "restaur": 3775, + "restaurant": 41930, + "restaurant": 4489, + "restaurants": 11714, + "rested": 46020, + "resting": 18044, + "restless": 36724, + "restling": 30076, + "resto": 11118, + "resto": 41666, + "restock": 34060, + "restocked": 36966, + "restor": 8984, + "restoration": 11989, + "restorative": 46509, + "restore": 14008, + "restored": 14238, + "restoring": 24406, + "restra": 25424, + "restric": 11036, + "restricted": 27197, + "restriction": 44282, + "restrictions": 19884, + "restroom": 43423, + "restructuring": 43260, + "rests": 33775, + "resu": 10095, + "resul": 2655, + "result": 5659, + "resulted": 26449, + "resulting": 24581, + "results": 3790, + "resume": 15077, + "resumes": 30268, + "resur": 14865, + "resurg": 45962, + "resurgence": 47692, + "resurrec": 18487, + "resurrection": 25811, + "resusc": 47523, + "ret": 20500, + "ret": 10048, + "reta": 20153, + "retail": 14910, + "retail": 6455, + "retailer": 22549, + "retailers": 19418, + "retain": 24430, + "retained": 42737, + "retaining": 35571, + "retains": 42583, + "retali": 33101, + "retar": 29964, + "retarded": 44111, + "retention": 26247, + "rethink": 29078, + "rethinking": 42951, + "reti": 4721, + "retin": 31270, + "retina": 36919, + "retire": 18846, + "retired": 11477, + "retirement": 9205, + "retires": 29060, + "retiring": 21200, + "retrac": 32735, + "retreat": 11210, + "retri": 16918, + "retriever": 28394, + "retro": 6535, + "retro": 7755, + "retrogamer": 47220, + "retrogaming": 11316, + "retrospective": 27105, + "rett": 41082, + "rett": 8425, + "rette": 33066, + "return": 43042, + "return": 3458, + "returned": 10476, + "returning": 9290, + "returns": 5020, + "retwee": 48190, + "retweet": 3195, + "retweeted": 12705, + "retweeting": 32345, + "retweets": 10160, + "rety": 41550, + "reu": 20255, + "reu": 40371, + "reuben": 40450, + "reunion": 10247, + "reunite": 26179, + "reunited": 13516, + "reusable": 30395, + "reuse": 26535, + "reut": 15210, + "reuters": 15569, + "rev": 8424, + "rev": 11789, + "revamp": 29819, + "revamped": 36420, + "revan": 45277, + "reve": 3115, + "reveal": 8052, + "revealed": 7171, + "revealing": 21321, + "reveals": 6621, + "revel": 14133, + "revelation": 24053, + "revelations": 36163, + "reven": 10171, + "revenge": 12717, + "revenue": 10637, + "revenues": 33348, + "rever": 14829, + "rever": 41913, + "revere": 44187, + "reverend": 34407, + "revers": 20726, + "reversal": 33367, + "reverse": 12812, + "reversed": 42485, + "reversi": 31601, + "reversible": 34212, + "revi": 8317, + "review": 2268, + "reviewed": 16678, + "reviewer": 36409, + "reviewers": 48195, + "reviewing": 20458, + "reviews": 7227, + "revise": 46801, + "revised": 22806, + "revising": 46882, + "revision": 20335, + "revisit": 26568, + "revisited": 34302, + "revisiting": 33144, + "revit": 26367, + "revitalization": 46923, + "revival": 14142, + "revive": 26450, + "revived": 42912, + "revo": 28660, + "revol": 13447, + "revolt": 31697, + "revolu": 4900, + "revolution": 17699, + "revolution": 6644, + "revolutionary": 14734, + "revolver": 38747, + "revolving": 47230, + "revs": 49286, + "revue": 43428, + "rew": 37564, + "rewar": 15857, + "reward": 11223, + "rewarded": 27163, + "rewarding": 23351, + "rewards": 15235, + "rewatch": 35610, + "rewatching": 41287, + "rewind": 26867, + "rewrite": 45218, + "rex": 13002, + "rex": 10904, + "rexperience": 33924, + "rey": 9681, + "rey": 4517, + "reyes": 18255, + "reykja": 47571, + "reyn": 11998, + "reynolds": 14309, + "reys": 48284, + "rez": 27597, + "rez": 15192, + "reza": 35888, + "rf": 35529, + "rf": 16368, + "rfc": 19003, + "rfid": 40204, + "rg": 33055, + "rg": 14897, + "rgb": 36128, + "rgv": 33685, + "rh": 8745, + "rh": 22404, + "rha": 19473, + "rhapso": 32532, + "rhapsody": 35774, + "rhe": 9186, + "rhea": 28612, + "rhetor": 24359, + "rhetoric": 29985, + "rhett": 42984, + "rheu": 42953, + "rhi": 21212, + "rhin": 12269, + "rhine": 22863, + "rhine": 44833, + "rhinestone": 30450, + "rhino": 41744, + "rhino": 20056, + "rhinos": 30671, + "rho": 7637, + "rhode": 39302, + "rhode": 27907, + "rhodes": 17785, + "rhon": 25882, + "rhonda": 46100, + "rhp": 27199, + "rhs": 24551, + "rhu": 23897, + "rhubarb": 30213, + "rhy": 7740, + "rhyme": 37356, + "rhymes": 33143, + "rhys": 28647, + "rhyth": 27069, + "rhythm": 16172, + "rhythmic": 46386, + "rhythms": 40872, + "ri": 553, + "ri": 2574, + "ria": 3650, + "rial": 15200, + "rian": 7788, + "rib": 44634, + "rib": 18298, + "riba": 44992, + "ribb": 10081, + "ribbon": 12114, + "ribbons": 35271, + "ribe": 46115, + "ribs": 17519, + "ric": 920, + "ric": 4798, + "rica": 14230, + "rical": 18109, + "rican": 30958, + "ricardo": 23140, + "ricci": 35783, + "ricciardo": 49282, + "rice": 36362, + "rice": 4741, + "rich": 5223, + "rich": 4021, + "richar": 9350, + "richard": 9080, + "richard": 4470, + "richards": 11372, + "richardson": 15984, + "riche": 23286, + "richer": 34138, + "riches": 37093, + "richest": 25572, + "richi": 38934, + "richie": 19797, + "richland": 43079, + "richmond": 34143, + "richmond": 11292, + "richter": 37591, + "rick": 6237, + "rick": 3064, + "ricket": 46161, + "ricket": 23671, + "ricks": 23111, + "ricky": 19188, + "ricky": 12814, + "rico": 37962, + "rico": 11362, + "ricotta": 38473, + "rics": 7353, + "ricul": 6980, + "rid": 18103, + "rid": 9874, + "ridd": 21990, + "ridden": 32025, + "riddle": 31839, + "ride": 15816, + "ride": 2994, + "rider": 31056, + "rider": 9707, + "riders": 10826, + "rides": 11308, + "ridg": 42646, + "ridge": 16580, + "ridge": 6352, + "ridic": 9624, + "ridiculous": 12659, + "ridiculously": 25661, + "ridin": 47869, + "riding": 6765, + "ridley": 27883, + "rie": 14824, + "rie": 5322, + "ried": 7552, + "riel": 26696, + "rien": 35237, + "rier": 40714, + "rier": 13336, + "ries": 28179, + "ries": 3059, + "riesling": 36372, + "rif": 7044, + "riff": 30359, + "rifle": 15354, + "rifles": 25678, + "rift": 26681, + "rig": 18462, + "rig": 13871, + "riga": 36626, + "rigged": 35897, + "rigging": 38160, + "riggs": 40328, + "righ": 15391, + "right": 13341, + "right": 1155, + "righte": 20762, + "righteous": 28169, + "righteousness": 42481, + "rightful": 42601, + "rightly": 42669, + "rights": 3336, + "rigid": 43138, + "rigor": 36788, + "rigorous": 41654, + "rigs": 42893, + "rihanna": 13744, + "rij": 41097, + "rik": 31136, + "rik": 27832, + "rika": 28580, + "ril": 12270, + "ril": 2388, + "riley": 35056, + "riley": 12260, + "rill": 23705, + "rilla": 43956, + "rilla": 18685, + "rim": 28147, + "rim": 12199, + "rime": 27064, + "rimin": 11527, + "rimo": 47817, + "rims": 34327, + "rin": 5859, + "rin": 11739, + "rina": 12869, + "rine": 24952, + "ring": 8318, + "ring": 2540, + "ringed": 44712, + "ringer": 35761, + "ringing": 26035, + "ringo": 38845, + "rings": 5751, + "rington": 12455, + "rink": 21497, + "rinka": 47316, + "rino": 47188, + "rinse": 48320, + "rio": 15681, + "rio": 5782, + "rion": 31623, + "rion": 34046, + "rios": 32814, + "riot": 32636, + "riot": 14218, + "riots": 24844, + "rious": 6340, + "rip": 10353, + "rip": 4243, + "ripe": 22832, + "ripley": 41589, + "ripp": 25276, + "ripped": 17815, + "ripper": 35347, + "ripping": 29126, + "ripple": 24825, + "rips": 30182, + "rir": 36792, + "ris": 6108, + "ris": 1999, + "rise": 13641, + "rise": 3151, + "risen": 23653, + "risers": 44983, + "rises": 13362, + "riseup": 35760, + "rish": 18378, + "rish": 18927, + "rishi": 48434, + "rising": 30452, + "rising": 5448, + "risis": 37998, + "risk": 27967, + "risk": 4213, + "risking": 48155, + "risks": 12474, + "risky": 27630, + "risotto": 31471, + "rist": 40610, + "rit": 5156, + "rit": 17333, + "rita": 16178, + "ritchie": 30997, + "rite": 39318, + "rite": 18429, + "rites": 36160, + "rith": 48169, + "rith": 48850, + "riti": 32904, + "rito": 19379, + "ritos": 33507, + "ritt": 26092, + "ritter": 34854, + "ritu": 13391, + "ritual": 19712, + "rituals": 31145, + "ritz": 39151, + "ritz": 25627, + "rium": 33884, + "riv": 25113, + "rival": 13412, + "rival": 15629, + "rivalry": 19511, + "rivals": 15135, + "rive": 27588, + "rive": 34917, + "river": 5239, + "river": 2473, + "rivera": 18275, + "riverdale": 28304, + "riverfront": 44439, + "rivers": 10723, + "riverside": 15809, + "riveting": 44024, + "riviera": 25851, + "rix": 43407, + "rix": 9483, + "riya": 36908, + "riyad": 31564, + "riyadh": 33577, + "riz": 18426, + "riz": 35411, + "rizal": 41555, + "rizio": 40191, + "rizz": 34826, + "rizzo": 49076, + "rj": 26016, + "rj": 20949, + "rk": 38725, + "rk": 21422, + "rl": 18041, + "rl": 14590, + "rlly": 43222, + "rly": 25954, + "rm": 20202, + "rm": 8431, + "rmb": 49097, + "rms": 40529, + "rn": 13206, + "rn": 7666, + "rna": 24566, + "rnb": 31556, + "rnc": 35309, + "rnli": 29748, + "ro": 532, + "ro": 2795, + "roa": 8313, + "roach": 31073, + "road": 4370, + "road": 1759, + "roadhouse": 47891, + "roadmap": 30111, + "roads": 6189, + "roadsafety": 39992, + "roadshow": 21168, + "roadside": 26928, + "roadster": 28920, + "roadto": 24681, + "roadtrip": 15094, + "roadway": 42744, + "roam": 34045, + "roaming": 29240, + "roano": 34184, + "roanoke": 36587, + "roar": 34193, + "roar": 18483, + "roaring": 26428, + "roast": 11404, + "roasted": 10479, + "roasting": 32228, + "rob": 2668, + "rob": 6442, + "robb": 14059, + "robb": 39673, + "robbed": 24163, + "robber": 35545, + "robbers": 40852, + "robbery": 16393, + "robbi": 44898, + "robbie": 37200, + "robbie": 15970, + "robbing": 47569, + "robbins": 23461, + "robby": 44128, + "robe": 23116, + "rober": 4532, + "robert": 8811, + "robert": 3929, + "roberta": 43373, + "roberto": 42645, + "roberto": 16227, + "roberts": 10366, + "robertson": 17643, + "robes": 29304, + "robi": 16743, + "robin": 6681, + "robin": 7988, + "robins": 35502, + "robinson": 8523, + "robles": 47646, + "roblo": 27481, + "roblox": 37798, + "robo": 4672, + "robo": 36057, + "robot": 46089, + "robot": 8797, + "robotic": 23975, + "robotics": 13546, + "robots": 13473, + "robson": 31113, + "robust": 22780, + "robyn": 34533, + "roc": 3268, + "roc": 13776, + "rocco": 30009, + "roch": 23788, + "rochdale": 41880, + "roche": 31776, + "rochelle": 40161, + "rochester": 18057, + "rock": 2640, + "rock": 2172, + "rockab": 39353, + "rockabilly": 45019, + "rocke": 19914, + "rocked": 16116, + "rockefeller": 35476, + "rocker": 29008, + "rockers": 32338, + "rocket": 25435, + "rocket": 8383, + "rockets": 13292, + "rockford": 41039, + "rockies": 20621, + "rockin": 12073, + "rocking": 7081, + "rockn": 24442, + "rocknroll": 27840, + "rocks": 6135, + "rockstar": 23603, + "rockstar": 18000, + "rockstargames": 27516, + "rockstars": 46639, + "rockthe": 49363, + "rockwell": 34747, + "rocky": 33481, + "rocky": 9648, + "rod": 9712, + "rod": 8291, + "roddy": 42332, + "rode": 18449, + "rodeo": 18250, + "rodgers": 17612, + "rodi": 49100, + "rodney": 21753, + "rodri": 11053, + "rodrigo": 33944, + "rodriguez": 14057, + "rods": 28618, + "roe": 27671, + "roe": 9996, + "rof": 33029, + "rofl": 48228, + "roft": 45212, + "rog": 34269, + "rog": 34017, + "rogen": 23380, + "roger": 13929, + "roger": 7735, + "rogerfederer": 40182, + "rogers": 10661, + "rogue": 32575, + "rogue": 15162, + "roh": 14933, + "roh": 29840, + "rohan": 39848, + "rohing": 23600, + "rohingya": 26146, + "rohit": 44649, + "rohit": 24299, + "roi": 21877, + "rok": 36807, + "rol": 3393, + "rol": 7818, + "roland": 33713, + "roland": 19569, + "role": 18485, + "role": 3414, + "roles": 11871, + "rolex": 21093, + "rolf": 48606, + "roll": 4711, + "roll": 3341, + "rolled": 11982, + "roller": 21034, + "roller": 12342, + "rollercoaster": 38248, + "rollers": 36941, + "rollin": 27545, + "rolling": 24250, + "rolling": 6347, + "rollingstones": 41309, + "rollins": 27724, + "rollout": 47710, + "rollover": 39214, + "rolls": 8614, + "rolltide": 28101, + "rom": 11377, + "rom": 19205, + "roma": 44134, + "roma": 11631, + "romain": 48897, + "roman": 4416, + "roman": 7370, + "romance": 7215, + "romania": 15884, + "romanian": 30866, + "romano": 38409, + "romans": 23066, + "romantic": 41457, + "romantic": 8821, + "rome": 9406, + "rome": 5243, + "romeo": 14429, + "romero": 23694, + "romney": 19287, + "romo": 32248, + "romper": 43699, + "ron": 2393, + "ron": 3372, + "rona": 42385, + "ronal": 46194, + "ronald": 15683, + "ronaldo": 13463, + "ronan": 34971, + "rond": 31935, + "ronda": 37436, + "rondo": 43756, + "rone": 48082, + "rone": 32763, + "roni": 47234, + "ronnie": 45257, + "ronnie": 16421, + "rons": 19536, + "ront": 48881, + "roo": 1249, + "roo": 31227, + "rood": 38007, + "roof": 9120, + "roof": 6449, + "roofing": 24415, + "roofs": 34635, + "rooftop": 16319, + "rook": 35918, + "rookie": 9771, + "rookies": 31917, + "room": 8845, + "room": 1530, + "roomie": 36851, + "roommate": 19825, + "roommates": 37323, + "rooms": 6328, + "rooney": 17712, + "roos": 32938, + "roosevel": 17644, + "roosevelt": 18488, + "rooster": 46263, + "rooster": 30926, + "roosters": 43693, + "root": 25930, + "root": 9728, + "rooted": 30428, + "rooting": 25523, + "roots": 8084, + "rop": 43401, + "rope": 9953, + "ropes": 30506, + "ror": 8668, + "ror": 2843, + "rors": 12072, + "rory": 42804, + "rory": 17813, + "ros": 5288, + "ros": 6930, + "rosa": 14393, + "rosal": 30397, + "rosario": 33640, + "rosary": 33098, + "rosberg": 46037, + "rose": 6146, + "rose": 3568, + "roseanne": 47528, + "rosel": 33616, + "rosemary": 19472, + "rosen": 13214, + "rosen": 36424, + "rosenberg": 43558, + "rosenthal": 46990, + "roses": 9061, + "rosetta": 43800, + "rosewood": 38686, + "rosie": 43049, + "rosie": 16888, + "ross": 8801, + "ross": 2158, + "rosse": 11602, + "rossi": 24817, + "rosso": 33023, + "roster": 12487, + "roswell": 45116, + "rosy": 46705, + "rosé": 28006, + "rot": 10055, + "rot": 9643, + "rotar": 45959, + "rotary": 14654, + "rotating": 32265, + "rotation": 18089, + "rotc": 32252, + "roth": 17741, + "roth": 19139, + "rother": 23174, + "rotherham": 37687, + "rothschild": 45089, + "roti": 46940, + "roto": 34698, + "rotor": 42991, + "rots": 16642, + "rott": 34806, + "rotten": 24324, + "rotter": 22614, + "rotterdam": 23422, + "rotun": 42970, + "rou": 2964, + "rou": 34783, + "roud": 28375, + "rouge": 16209, + "rough": 11699, + "rough": 8511, + "roughly": 21910, + "roughs": 37598, + "rouhani": 39912, + "roulette": 39930, + "roun": 5602, + "round": 9403, + "round": 2522, + "roundabout": 29953, + "rounded": 26973, + "rounder": 37024, + "rounding": 40208, + "rounds": 11242, + "roundtable": 19386, + "roundup": 17503, + "roup": 29220, + "rourke": 38753, + "rous": 33645, + "rous": 34531, + "rousey": 46267, + "rout": 7502, + "rout": 41778, + "route": 5261, + "router": 29962, + "routes": 14923, + "routine": 12319, + "routines": 44074, + "routing": 44086, + "roux": 43416, + "rov": 23971, + "rove": 30130, + "rover": 12776, + "rovers": 16373, + "row": 5275, + "row": 1044, + "rowan": 26240, + "rowdy": 32141, + "rowe": 28323, + "rowed": 22615, + "rower": 43345, + "rowers": 41806, + "rowing": 12807, + "rowland": 33037, + "rowley": 48793, + "rowling": 29371, + "rown": 22287, + "rown": 25060, + "rows": 9409, + "rox": 14111, + "rox": 41033, + "roxy": 28093, + "roy": 2128, + "roy": 6354, + "royal": 6691, + "royal": 3853, + "royale": 20630, + "royalnavy": 41545, + "royals": 13335, + "royalties": 48660, + "royalty": 18296, + "royalwedding": 27461, + "royce": 18444, + "royd": 41476, + "royo": 39357, + "roz": 28989, + "roz": 37250, + "rp": 17305, + "rp": 8174, + "rpa": 41872, + "rpg": 12445, + "rpm": 23715, + "rps": 49215, + "rr": 5311, + "rr": 9126, + "rrp": 36967, + "rrr": 18267, + "rrrr": 25561, + "rrrr": 34444, + "rs": 6978, + "rs": 1724, + "rsa": 29437, + "rsc": 48524, + "rsd": 34426, + "rsi": 39046, + "rsl": 44752, + "rsp": 16381, + "rspb": 38508, + "rspb": 36727, + "rspca": 45643, + "rss": 46466, + "rss": 22350, + "rstats": 38700, + "rsvp": 9774, + "rt": 8959, + "rt": 8991, + "rtc": 31648, + "rte": 33822, + "rte": 23322, + "rtg": 22028, + "rti": 47549, + "rtr": 43999, + "rts": 8496, + "rtw": 34673, + "ru": 681, + "ru": 13735, + "rub": 15862, + "rub": 22586, + "rubb": 19597, + "rubbed": 45239, + "rubber": 31131, + "rubber": 11331, + "rubbing": 41262, + "rubbish": 21108, + "rubble": 42230, + "ruben": 44058, + "ruben": 29722, + "rubi": 27856, + "rubin": 34128, + "rubio": 24244, + "rubs": 43422, + "ruby": 24552, + "ruby": 11493, + "ruck": 27449, + "rucker": 45402, + "rud": 35256, + "rudd": 31836, + "rude": 16548, + "rudi": 48360, + "rudol": 40927, + "rudolf": 46835, + "rudolph": 30119, + "rudy": 38226, + "rudy": 22131, + "rue": 38024, + "rue": 19276, + "rufc": 45084, + "ruff": 28177, + "ruff": 30304, + "rufus": 39322, + "rug": 4217, + "rug": 19220, + "rugby": 15091, + "rugby": 4964, + "rugbyleague": 44419, + "ruger": 48655, + "rugged": 25225, + "rugs": 29946, + "rui": 46974, + "ruin": 16256, + "ruined": 17231, + "ruining": 29952, + "ruins": 16094, + "ruiz": 27873, + "ruk": 46628, + "rukh": 43075, + "rukh": 27631, + "rule": 31643, + "rule": 6175, + "ruled": 16324, + "ruler": 26286, + "rulers": 45328, + "rules": 5272, + "ruling": 14690, + "rum": 9223, + "rum": 11233, + "rumb": 42432, + "rumble": 18900, + "rumi": 31428, + "rumor": 22254, + "rumored": 36694, + "rumors": 16160, + "rumour": 34296, + "rumours": 20716, + "rump": 29366, + "run": 1639, + "run": 1934, + "runaway": 28851, + "runchat": 25838, + "rundown": 41100, + "rune": 33882, + "rune": 49244, + "runner": 37370, + "runner": 7913, + "runners": 10571, + "runnin": 43130, + "running": 24451, + "running": 2761, + "runoff": 38564, + "runs": 5586, + "runway": 13927, + "rup": 7996, + "rup": 14980, + "rupaul": 44211, + "rupee": 43916, + "rupees": 44110, + "rupert": 25625, + "rupt": 23055, + "ruption": 35403, + "rural": 28801, + "rural": 8737, + "rus": 35811, + "rus": 5998, + "rush": 12148, + "rush": 6973, + "rushed": 28104, + "rusher": 48745, + "rushes": 47217, + "rushing": 20284, + "russ": 6285, + "russ": 20764, + "russell": 26122, + "russell": 8150, + "russi": 2600, + "russia": 4018, + "russian": 30731, + "russian": 4868, + "russians": 25413, + "russo": 30679, + "rust": 28682, + "rust": 14212, + "rustic": 19822, + "rusty": 43966, + "rusty": 22646, + "rut": 14973, + "rut": 39102, + "rutger": 49029, + "rutgers": 28934, + "ruth": 15798, + "ruth": 12029, + "ruther": 26676, + "rutherford": 31070, + "ruthless": 36063, + "rutland": 46024, + "ruto": 43702, + "ruz": 23275, + "rv": 17135, + "rv": 17951, + "rva": 24278, + "rw": 9085, + "rw": 22926, + "rwa": 47452, + "rwand": 31758, + "rwanda": 15427, + "rwby": 39698, + "rwc": 32321, + "rx": 41188, + "rx": 15945, + "ry": 1511, + "ry": 913, + "ryan": 8682, + "ryan": 4053, + "ryanair": 43526, + "ryder": 43564, + "ryder": 21805, + "rye": 24015, + "rye": 17409, + "rying": 7838, + "ryn": 37728, + "ryo": 24460, + "rys": 21654, + "ryu": 46656, + "ryu": 34604, + "ré": 29106, + "s": 82, + "s": 338, + "sa": 774, + "sa": 1344, + "saa": 13429, + "saab": 27158, + "saad": 36530, + "saas": 25761, + "saat": 33151, + "sab": 3233, + "sab": 23213, + "saba": 38344, + "sabah": 32854, + "saban": 41620, + "sabar": 47102, + "sabbath": 26008, + "sabc": 30010, + "sabcnews": 41093, + "saber": 46822, + "saber": 25624, + "sabha": 23431, + "sabi": 47073, + "sabine": 44062, + "sable": 19224, + "sabot": 30700, + "sabotage": 40496, + "sabre": 35110, + "sabres": 29620, + "sabrin": 37029, + "sabrina": 24994, + "sac": 3632, + "sac": 12905, + "sach": 30168, + "sacha": 49010, + "sachin": 47527, + "sachin": 30297, + "sachs": 31451, + "sack": 28964, + "sack": 14979, + "sacked": 27519, + "sacks": 26441, + "sacram": 13334, + "sacramento": 16065, + "sacred": 40612, + "sacred": 12477, + "sacri": 15283, + "sacrif": 12117, + "sacrific": 16919, + "sacrifice": 12556, + "sacrificed": 31116, + "sacrifices": 28858, + "sacrificing": 48146, + "sad": 2810, + "sad": 3719, + "saddened": 27720, + "saddest": 34925, + "saddle": 30469, + "saddle": 20283, + "sade": 27429, + "sadh": 40955, + "sadi": 22207, + "sadie": 30333, + "sadiq": 44107, + "sadler": 45600, + "sadly": 11603, + "sadness": 20399, + "sae": 38633, + "sae": 34883, + "saeed": 29745, + "saf": 2125, + "saf": 25760, + "safar": 23443, + "safari": 14091, + "safarilive": 34816, + "safc": 27998, + "safe": 2901, + "safe": 2996, + "safeguard": 42249, + "safeguarding": 47451, + "safely": 11513, + "safer": 40124, + "safer": 15504, + "safest": 38973, + "safety": 19050, + "safety": 3406, + "safetyfirst": 43608, + "saffron": 27529, + "sag": 6609, + "sag": 30048, + "saga": 15758, + "sagan": 37193, + "sagar": 42518, + "sage": 25800, + "sage": 7509, + "sages": 25979, + "sagin": 47097, + "sagitt": 44685, + "sagu": 44708, + "sah": 30943, + "sah": 26342, + "saha": 36062, + "sahara": 24599, + "saharan": 44255, + "sahi": 24608, + "sahib": 34150, + "sai": 16048, + "sai": 10886, + "said": 40319, + "said": 1946, + "saif": 44164, + "saig": 36328, + "saigon": 41081, + "sail": 7528, + "sail": 12156, + "sailed": 43047, + "sailing": 11003, + "sailor": 28002, + "sailor": 16076, + "sailormoon": 40673, + "sailors": 25355, + "sails": 27526, + "sain": 21226, + "sain": 40378, + "sains": 24860, + "sainsbury": 45879, + "sainsburys": 36934, + "saint": 11274, + "saint": 5599, + "saints": 8769, + "saintsfc": 31102, + "sair": 46600, + "sair": 30971, + "saire": 28087, + "saison": 33256, + "sait": 48008, + "saj": 33580, + "sak": 11511, + "sak": 35900, + "saka": 33609, + "sake": 12874, + "sakh": 43945, + "saki": 40514, + "saku": 37550, + "sakura": 24162, + "sal": 980, + "sal": 6126, + "sala": 17300, + "salaam": 46773, + "salad": 6188, + "salads": 30948, + "salah": 22516, + "salam": 19007, + "salam": 33963, + "salamat": 44696, + "salami": 46885, + "salaries": 33132, + "salary": 16312, + "salazar": 45988, + "sale": 17786, + "sale": 1690, + "saleh": 38353, + "salem": 48194, + "salem": 16884, + "sales": 13347, + "sales": 3765, + "salesforce": 22680, + "salesman": 37633, + "salford": 25629, + "sali": 15411, + "salim": 42760, + "salinas": 41990, + "saline": 46918, + "salis": 20667, + "salis": 39378, + "salisbury": 24763, + "sall": 27122, + "sall": 20883, + "salle": 23738, + "sally": 29542, + "sally": 13349, + "salman": 13754, + "salman": 16219, + "salmankhan": 15177, + "salmon": 37040, + "salmon": 9137, + "salom": 38268, + "salon": 33916, + "salon": 11105, + "saloon": 26038, + "sals": 16307, + "salsa": 16442, + "salt": 12763, + "salt": 6611, + "salted": 26313, + "saltlife": 47809, + "salts": 40559, + "saltwater": 43616, + "salty": 20678, + "salu": 31711, + "salud": 46867, + "salut": 44998, + "salute": 44908, + "salute": 9747, + "salutes": 32762, + "salv": 8299, + "salvador": 20874, + "salvage": 33131, + "salvation": 19534, + "salvatore": 38772, + "salz": 33594, + "salzburg": 43396, + "sam": 1644, + "sam": 3730, + "sama": 19272, + "samanth": 11465, + "samantha": 15466, + "samanthap": 38266, + "samanthaprabhu": 38643, + "samar": 21820, + "samaritan": 45495, + "samba": 37190, + "same": 23062, + "same": 2208, + "samheughan": 36255, + "sami": 48400, + "sami": 24322, + "sammy": 31091, + "sammy": 16758, + "samo": 30006, + "samoa": 34932, + "samp": 31225, + "sample": 9542, + "sampler": 40629, + "samples": 13387, + "sampling": 19522, + "sampson": 39983, + "sams": 44667, + "samson": 34659, + "samsun": 47875, + "samsung": 35369, + "samsung": 8115, + "samu": 7646, + "samuel": 30612, + "samuel": 12787, + "samurai": 21739, + "san": 1489, + "san": 2223, + "sana": 19434, + "sanantonio": 34714, + "sanat": 29091, + "sanatomy": 36052, + "sanc": 7398, + "sance": 15930, + "sanchez": 13971, + "sanctioned": 43032, + "sanctions": 17790, + "sanctu": 12712, + "sanctuary": 14044, + "sand": 2147, + "sand": 5094, + "sandal": 36445, + "sandal": 42185, + "sandals": 20731, + "sandalwood": 47502, + "sandeep": 46973, + "sander": 34111, + "sanders": 10429, + "sanderson": 36198, + "sandi": 44249, + "sandiego": 45997, + "sandiego": 15793, + "sandman": 45730, + "sando": 35921, + "sandoval": 44157, + "sandra": 33733, + "sandra": 13415, + "sandro": 42389, + "sands": 5936, + "sandstone": 36796, + "sandwich": 17050, + "sandwich": 8687, + "sandwiches": 19667, + "sandy": 29679, + "sandy": 10355, + "sane": 23419, + "sanford": 32330, + "sanfrancisco": 20254, + "sang": 13235, + "sang": 11684, + "sange": 12466, + "sangria": 42665, + "sani": 39137, + "sani": 34492, + "sanitary": 33842, + "sanitation": 25414, + "saniti": 43987, + "sanity": 30517, + "sanjay": 31712, + "sanjay": 25796, + "sanje": 40405, + "sanjose": 45971, + "sank": 43692, + "sano": 34053, + "sans": 16982, + "sansk": 39689, + "sanskrit": 48083, + "sant": 8356, + "sant": 23120, + "santa": 22175, + "santa": 4555, + "santac": 28876, + "santam": 45627, + "santana": 27033, + "santander": 46476, + "santi": 13856, + "santiago": 16568, + "santo": 29631, + "santo": 18400, + "santor": 28448, + "santorini": 39573, + "santos": 16582, + "sany": 47679, + "sao": 28026, + "sap": 8089, + "sap": 11591, + "sapi": 40016, + "sapp": 13427, + "sapp": 40729, + "sapphire": 22044, + "sar": 1808, + "sar": 9424, + "sara": 37196, + "sara": 10063, + "sarab": 40716, + "sarac": 35722, + "sarah": 9086, + "sarah": 5327, + "saraj": 42592, + "sarajevo": 48211, + "saras": 20373, + "sarasota": 31990, + "sarato": 24845, + "saratoga": 29496, + "sarawak": 47331, + "sarcasm": 37246, + "sarcastic": 48639, + "sardar": 41786, + "sarde": 43925, + "sardin": 27383, + "sardinia": 41025, + "sare": 13051, + "saree": 30860, + "sargent": 34864, + "sari": 42327, + "sari": 20261, + "saries": 47586, + "sarkar": 30673, + "sarko": 33658, + "sarkodie": 42848, + "sarmy": 20954, + "sart": 33006, + "sary": 15398, + "sas": 3960, + "sas": 5235, + "sash": 35656, + "sasha": 46078, + "sasha": 20894, + "sasia": 44751, + "sask": 47091, + "sask": 30416, + "saskat": 17102, + "saskatchewan": 23899, + "saskatoon": 31128, + "sass": 31351, + "sassy": 20827, + "sat": 1382, + "sat": 3279, + "sata": 41520, + "satan": 19446, + "satanic": 38224, + "satchel": 45908, + "sate": 35749, + "satell": 9031, + "satellite": 10316, + "satellites": 28483, + "sath": 29675, + "sathletics": 30154, + "sati": 7038, + "satin": 21803, + "sation": 23674, + "sations": 31232, + "satire": 29875, + "satis": 9906, + "satisf": 22941, + "satisfaction": 19925, + "satisfied": 18101, + "satisfy": 29444, + "satisfying": 23755, + "sato": 34376, + "satu": 45283, + "satur": 1634, + "saturated": 32466, + "saturday": 12537, + "saturday": 1748, + "saturdaymorning": 29053, + "saturdaymotivation": 40843, + "saturdays": 18930, + "saturn": 17312, + "saty": 39426, + "sau": 2096, + "sau": 19455, + "sauce": 5520, + "saucer": 42272, + "sauces": 40367, + "saucy": 46684, + "saudi": 24511, + "saudi": 8548, + "saudiarabia": 28680, + "sauer": 46333, + "saul": 47623, + "saul": 23252, + "sault": 40361, + "sauna": 35460, + "saunders": 23794, + "saur": 13227, + "saura": 46532, + "saurus": 22118, + "saus": 36121, + "sausage": 11855, + "sausages": 31593, + "sauté": 36290, + "sautéed": 38517, + "sauvi": 30116, + "sauvignon": 32745, + "sav": 2248, + "sav": 26533, + "sava": 40198, + "savag": 43039, + "savage": 11859, + "savannah": 18662, + "save": 5895, + "save": 2673, + "saved": 7137, + "saveour": 33390, + "saver": 20987, + "savers": 31416, + "saves": 12907, + "savethe": 18031, + "savi": 14721, + "saving": 28498, + "saving": 6979, + "savings": 10651, + "savior": 24762, + "saviour": 35800, + "savor": 48071, + "savory": 32992, + "savoury": 49071, + "savoy": 39552, + "savvy": 29278, + "saw": 12429, + "saw": 2425, + "sawa": 39613, + "sawards": 29012, + "sawyer": 27726, + "sax": 14169, + "sax": 23766, + "saxon": 31856, + "saxophon": 43760, + "saxophone": 32296, + "say": 3047, + "say": 1451, + "saya": 35170, + "sayang": 46322, + "sayers": 44116, + "sayin": 23662, + "saying": 4455, + "says": 1563, + "saz": 35577, + "sb": 5576, + "sb": 4977, + "sba": 44970, + "sback": 43840, + "sband": 27539, + "sbaseball": 46491, + "sbball": 39190, + "sbc": 31404, + "sberg": 20358, + "sbi": 41369, + "sbk": 39211, + "sboro": 18909, + "sbridge": 49228, + "sbs": 18883, + "sbu": 48075, + "sbu": 46281, + "sburg": 7390, + "sburgh": 48205, + "sbury": 14081, + "sby": 26519, + "sby": 10287, + "sc": 663, + "sc": 3219, + "sca": 11001, + "scab": 31716, + "scaf": 28981, + "scafe": 45574, + "scaffolding": 41687, + "scal": 10859, + "scala": 37997, + "scalable": 44084, + "scale": 37817, + "scale": 5879, + "scaled": 41923, + "scales": 22891, + "scaling": 29116, + "scallo": 19936, + "scallop": 39544, + "scallops": 31430, + "scalp": 38898, + "scam": 17620, + "scam": 13215, + "scamp": 28451, + "scams": 34395, + "scan": 10650, + "scan": 11261, + "scanada": 27121, + "scand": 8110, + "scandal": 35420, + "scandal": 11622, + "scandals": 45490, + "scandin": 32014, + "scandinavian": 35661, + "scanned": 43719, + "scanner": 24185, + "scanning": 24092, + "scans": 31251, + "scap": 35883, + "scape": 36005, + "scape": 12314, + "scapes": 31933, + "scar": 4171, + "scar": 18088, + "scarborough": 24254, + "scarce": 38572, + "scarcity": 45812, + "scare": 33536, + "scare": 15920, + "scarec": 38814, + "scarecrow": 46504, + "scared": 9870, + "scares": 34096, + "scarf": 13365, + "scari": 27050, + "scariest": 37213, + "scarlet": 20389, + "scarlett": 28325, + "scars": 20747, + "scarves": 29249, + "scary": 9250, + "scat": 13899, + "scattered": 22090, + "scavenger": 36778, + "scc": 19458, + "scd": 48422, + "scen": 2204, + "scenario": 20456, + "scenarios": 31346, + "scence": 33418, + "scene": 3562, + "scenery": 16025, + "scenes": 5415, + "scenic": 15394, + "scent": 36277, + "scent": 7683, + "scented": 27190, + "scenter": 23059, + "scentre": 39371, + "scents": 26336, + "scep": 24439, + "scfc": 38578, + "sch": 844, + "sch": 7542, + "scha": 42809, + "schaf": 45588, + "schaft": 41010, + "schal": 35568, + "schalke": 41029, + "schallenge": 43665, + "schan": 31328, + "schar": 15085, + "schat": 31842, + "schau": 35830, + "sche": 3038, + "sche": 7289, + "schedu": 4207, + "schedule": 5521, + "scheduled": 10986, + "schedules": 28986, + "scheduling": 32216, + "scheer": 26776, + "schel": 39881, + "schel": 38569, + "schem": 17720, + "scheme": 9024, + "schemes": 22958, + "schen": 22738, + "scher": 21925, + "scher": 21299, + "schi": 13731, + "schi": 24984, + "schicago": 46230, + "schiff": 39431, + "schild": 32148, + "schiz": 33230, + "schizoph": 40004, + "schizophre": 41163, + "schle": 32022, + "schmid": 17375, + "schmidt": 18463, + "schnau": 45745, + "schnei": 19941, + "schneider": 22972, + "schnit": 40903, + "scho": 2493, + "schoice": 23860, + "schol": 4498, + "scholar": 7192, + "scholar": 12830, + "scholarly": 41065, + "scholars": 13818, + "scholarship": 9070, + "scholarships": 17866, + "scholastic": 35743, + "schoo": 20721, + "school": 6063, + "school": 1228, + "schooled": 44722, + "schoolers": 31455, + "schooling": 28608, + "schools": 3513, + "schre": 47685, + "schri": 25453, + "schro": 32381, + "schu": 11318, + "schubert": 46939, + "schul": 14945, + "schultz": 30308, + "schulz": 39572, + "schumacher": 39208, + "schumer": 25313, + "schur": 42475, + "schwab": 47602, + "schwar": 13985, + "schwartz": 30617, + "schwarz": 27074, + "schwarzenegger": 33860, + "schwe": 25324, + "sci": 2267, + "sci": 8309, + "sciart": 31704, + "scicom": 28606, + "scicomm": 29573, + "scien": 39261, + "science": 10201, + "science": 2497, + "sciencefiction": 39170, + "sciences": 11481, + "scienti": 4338, + "scientific": 9750, + "scientist": 11083, + "scientists": 8045, + "sciento": 36193, + "scientology": 44694, + "scifi": 41862, + "scifi": 12230, + "scion": 47208, + "sciss": 25667, + "scissors": 30867, + "sciutto": 44392, + "sclerosis": 39446, + "sclub": 20017, + "sco": 1065, + "sco": 4763, + "scoe": 31164, + "scol": 13599, + "scoll": 44895, + "scollege": 39536, + "scom": 26407, + "scon": 17163, + "scon": 29272, + "scones": 36443, + "sconf": 39704, + "scoo": 14199, + "scooby": 34469, + "scoop": 13829, + "scoops": 41360, + "scope": 7979, + "scopes": 30328, + "scopic": 23869, + "scopy": 20018, + "scor": 8442, + "score": 12067, + "score": 4431, + "scoreboard": 30104, + "scorecard": 38128, + "scored": 6143, + "scoreless": 33469, + "scorer": 16572, + "scorers": 26699, + "scores": 7039, + "scoring": 9198, + "scorpi": 15445, + "scorpio": 34331, + "scorpion": 28461, + "scorpions": 45401, + "scorsese": 45975, + "scot": 2496, + "scot": 9271, + "scotch": 16687, + "scoti": 46446, + "scotia": 27859, + "scotland": 29174, + "scotland": 4203, + "scots": 17260, + "scotsman": 39612, + "scott": 7775, + "scott": 3664, + "scotti": 6227, + "scottish": 18039, + "scottish": 7442, + "scottsdale": 27817, + "scotty": 39697, + "scotty": 26836, + "scotus": 21720, + "scou": 44909, + "scoun": 16110, + "scouncil": 48787, + "scountry": 40432, + "scour": 46172, + "scout": 32213, + "scout": 10786, + "scouting": 19072, + "scouts": 14837, + "scow": 27929, + "scowboys": 31386, + "scp": 45030, + "scr": 36131, + "scra": 11187, + "scrabble": 39488, + "scram": 17289, + "scramble": 32688, + "scrambled": 39026, + "scran": 41774, + "scranton": 45274, + "scrap": 27950, + "scrap": 21695, + "scrapbook": 48733, + "scrapped": 43325, + "scraps": 40809, + "scrat": 9572, + "scratch": 13258, + "scratched": 48831, + "scratches": 46556, + "scratching": 44617, + "scre": 1795, + "scream": 31645, + "scream": 13239, + "screamed": 35427, + "screaming": 12891, + "screams": 23989, + "screen": 5351, + "screen": 3750, + "screened": 31450, + "screening": 6688, + "screenings": 27655, + "screenplay": 30058, + "screens": 12689, + "screenshot": 20637, + "screenshot": 12646, + "screenshots": 26783, + "screenshotsaturday": 21406, + "screenwriter": 37293, + "screenwriting": 35465, + "screw": 25529, + "screw": 14225, + "screwdriver": 48748, + "screwed": 30592, + "screws": 38292, + "scri": 2139, + "scrib": 34259, + "scribe": 36228, + "scribed": 38334, + "scricket": 45947, + "scrim": 21978, + "scrimmage": 25216, + "scrip": 11955, + "script": 8374, + "scripted": 40513, + "scription": 26604, + "scriptions": 39512, + "scripts": 20109, + "scripture": 27186, + "scro": 30768, + "scroll": 24160, + "scrolling": 28889, + "scrolls": 38113, + "scroo": 42263, + "scru": 7589, + "scrub": 23432, + "scrubs": 37919, + "scrum": 29047, + "scrump": 39791, + "scrumptious": 40987, + "scrutiny": 34305, + "scs": 26853, + "sct": 39284, + "scu": 8181, + "scu": 32135, + "scuba": 39053, + "scuba": 20559, + "scubadiving": 49046, + "scue": 25955, + "scul": 4948, + "scully": 36598, + "sculp": 6093, + "sculpt": 45044, + "sculpted": 41296, + "sculpting": 44389, + "sculptor": 29409, + "sculpture": 8757, + "sculptures": 20378, + "scum": 29655, + "scumb": 44525, + "scup": 21506, + "scur": 32742, + "scwx": 41966, + "scy": 27471, + "sd": 3080, + "sd": 4159, + "sda": 25548, + "sdale": 12327, + "sday": 5902, + "sday": 1376, + "sdays": 14491, + "sdc": 40992, + "sdcc": 13246, + "sden": 17241, + "sdf": 34681, + "sdg": 20177, + "sdgs": 16261, + "sdk": 40015, + "sdlive": 34561, + "sdn": 41925, + "sdsu": 41284, + "se": 567, + "se": 611, + "sea": 5970, + "sea": 2102, + "seab": 15728, + "seabir": 42558, + "seac": 35626, + "seaf": 9336, + "seafood": 12472, + "seag": 15730, + "seagu": 38076, + "seagull": 38858, + "seagulls": 42215, + "seahawks": 15341, + "seal": 21381, + "seal": 10159, + "sealed": 13358, + "sealing": 42992, + "seals": 18179, + "seam": 13710, + "seam": 44201, + "seaman": 47513, + "seamless": 29373, + "seamus": 40175, + "sean": 11406, + "sean": 6077, + "seanhannity": 43316, + "seap": 29983, + "seaport": 46418, + "sear": 1612, + "search": 23129, + "search": 1920, + "searched": 28961, + "searches": 26378, + "searching": 10626, + "seared": 29727, + "sears": 26693, + "seas": 7329, + "seas": 9556, + "seascape": 42593, + "seaside": 18867, + "season": 19288, + "season": 1367, + "seasonal": 14215, + "seasoned": 28399, + "seasoning": 43439, + "seasons": 8635, + "seat": 19670, + "seat": 4922, + "seated": 23953, + "seater": 37543, + "seating": 16240, + "seats": 6944, + "seattle": 24388, + "seattle": 6274, + "seau": 32263, + "seaw": 32658, + "seaweed": 30204, + "seaworld": 27422, + "seb": 35766, + "seb": 25171, + "sebasti": 10324, + "sebastian": 43792, + "sebastian": 13181, + "sebring": 41086, + "sec": 2875, + "sec": 5338, + "seca": 37847, + "secco": 27394, + "sece": 46297, + "seclu": 42392, + "secon": 1846, + "second": 9329, + "second": 2241, + "secondary": 13107, + "seconds": 6541, + "secre": 2460, + "secret": 20710, + "secret": 4145, + "secretari": 29515, + "secretariat": 31767, + "secretary": 6552, + "secretly": 21400, + "secrets": 9735, + "secs": 28665, + "sect": 15772, + "section": 34986, + "section": 4853, + "sectional": 21876, + "sections": 20061, + "sector": 6579, + "sectors": 22173, + "secu": 4894, + "secular": 47483, + "secular": 27560, + "secur": 2557, + "secure": 44763, + "secure": 7515, + "secured": 16848, + "secures": 31567, + "securing": 24759, + "securities": 25080, + "security": 31245, + "security": 2741, + "sed": 14034, + "sed": 1252, + "sedan": 24237, + "sedg": 46926, + "sedge": 45288, + "sedi": 29269, + "sedly": 31771, + "sedona": 46862, + "seduc": 19933, + "seductive": 43721, + "see": 1751, + "see": 862, + "seed": 14064, + "seed": 6488, + "seeded": 33688, + "seeding": 40050, + "seedlings": 47933, + "seeds": 9128, + "seeing": 3214, + "seek": 8839, + "seeker": 28011, + "seekers": 20732, + "seeking": 8592, + "seeks": 12594, + "seem": 20043, + "seem": 7523, + "seemed": 17240, + "seemingly": 25917, + "seems": 4453, + "seen": 36273, + "seen": 2041, + "seer": 32486, + "sees": 7594, + "seeyou": 41279, + "sef": 27453, + "seg": 10551, + "sega": 16122, + "segment": 15615, + "segments": 43053, + "segreg": 49117, + "segregation": 39086, + "segu": 33156, + "segun": 43087, + "seh": 27536, + "seh": 41430, + "sehun": 17705, + "sei": 13130, + "sei": 15907, + "sein": 24669, + "seine": 41378, + "seinfeld": 33706, + "seis": 25559, + "seismic": 38459, + "seiz": 22171, + "seize": 26624, + "seized": 15826, + "seizure": 36804, + "seizures": 47199, + "sek": 45515, + "sek": 25880, + "sel": 1000, + "sel": 4098, + "sela": 47006, + "selamat": 37692, + "selangor": 44402, + "selby": 43546, + "selca": 38606, + "selcaday": 35924, + "seldom": 48322, + "sele": 29137, + "selec": 3014, + "select": 8690, + "selected": 6881, + "selecting": 32696, + "selection": 6724, + "selections": 24099, + "selective": 28686, + "selects": 32902, + "selen": 19970, + "selena": 14677, + "selenagomez": 27653, + "seley": 30556, + "self": 10139, + "self": 1322, + "selfcare": 39560, + "selfi": 3007, + "selfie": 26735, + "selfie": 3666, + "selfies": 46058, + "selfies": 10050, + "selfish": 26907, + "selfless": 34236, + "sell": 10279, + "sell": 5119, + "seller": 11779, + "sellers": 16562, + "selling": 4396, + "sells": 14306, + "selma": 36652, + "sels": 42070, + "selves": 4505, + "sely": 8402, + "sem": 8645, + "sem": 17106, + "sema": 31816, + "seman": 29119, + "seman": 28378, + "semana": 41780, + "semb": 36054, + "seme": 10855, + "sement": 10714, + "sements": 31449, + "semester": 11905, + "semi": 11023, + "semi": 6684, + "semic": 26967, + "semicon": 34315, + "semiconduc": 35646, + "semiconductor": 43551, + "semifinal": 22935, + "semifinals": 21863, + "semin": 5595, + "seminar": 7269, + "seminars": 34870, + "seminary": 31655, + "seminole": 42956, + "semis": 24013, + "semit": 22628, + "semite": 23721, + "semitic": 34894, + "semitism": 25911, + "semper": 47391, + "sen": 1057, + "sen": 2249, + "sena": 21584, + "senate": 30703, + "senate": 6843, + "senator": 20871, + "senator": 8495, + "senators": 16889, + "send": 27684, + "send": 3625, + "sending": 6985, + "sends": 10817, + "sene": 25269, + "seneca": 33419, + "senegal": 28255, + "senew": 49313, + "seng": 43022, + "seng": 29971, + "senior": 19865, + "senior": 3415, + "seniors": 8138, + "senna": 36195, + "senpai": 46562, + "sens": 5218, + "sens": 22837, + "sensation": 19383, + "sensational": 23051, + "sense": 29162, + "sense": 4747, + "sensei": 36158, + "senses": 21809, + "sensi": 38802, + "sensible": 30635, + "sensing": 29236, + "sensiti": 20531, + "sensitive": 13734, + "sensitivity": 27788, + "sensor": 15330, + "sensors": 20356, + "sensory": 21831, + "sensu": 28157, + "sensual": 40860, + "sent": 6200, + "sent": 3676, + "sentence": 12737, + "sentenced": 17773, + "sentences": 25858, + "sentencing": 34394, + "senti": 19042, + "sentim": 25102, + "sentiment": 25949, + "sentimental": 40070, + "sentiments": 47450, + "sentin": 20042, + "sentinel": 23123, + "senting": 3924, + "seo": 24743, + "seo": 8622, + "seok": 34697, + "seok": 22482, + "seokjin": 45584, + "seoul": 13253, + "sep": 3212, + "sep": 10434, + "separ": 6859, + "separate": 13886, + "separated": 22163, + "separately": 41904, + "separates": 45365, + "separati": 39377, + "separating": 43480, + "separation": 22007, + "sephora": 38414, + "sepsis": 40205, + "sept": 5380, + "septe": 3672, + "september": 3707, + "septic": 34690, + "sepul": 47360, + "seq": 44379, + "sequ": 5491, + "seque": 44662, + "sequel": 15701, + "sequence": 18833, + "sequences": 47306, + "sequencing": 33484, + "sequo": 32781, + "sequoia": 42404, + "ser": 803, + "ser": 2771, + "sera": 28250, + "serbia": 19038, + "serbian": 33687, + "sere": 35770, + "seren": 7880, + "serena": 19519, + "serenawilliams": 48316, + "serendip": 45805, + "serendipity": 49386, + "serene": 28269, + "serenity": 24187, + "serge": 13477, + "serge": 35700, + "sergeant": 22049, + "sergei": 39870, + "sergey": 35390, + "sergi": 47675, + "sergio": 18359, + "seri": 2763, + "seri": 37509, + "serial": 14216, + "serie": 19752, + "seriea": 32660, + "series": 1857, + "serious": 47421, + "serious": 4770, + "seriously": 4885, + "sermon": 24884, + "sero": 48883, + "serpent": 37084, + "serpent": 35364, + "serra": 39851, + "serrano": 44236, + "sers": 13509, + "serum": 25385, + "serv": 1297, + "serv": 24571, + "servant": 20810, + "servants": 29652, + "serve": 39202, + "serve": 2838, + "served": 4740, + "server": 36458, + "server": 8398, + "serverless": 49243, + "servers": 22262, + "serves": 9915, + "servic": 27115, + "service": 21496, + "service": 2086, + "serviced": 44687, + "services": 3100, + "servicing": 41300, + "serving": 5722, + "sery": 14279, + "ses": 23708, + "ses": 1386, + "sesame": 21706, + "sese": 37128, + "sesh": 24274, + "session": 2550, + "sessions": 6327, + "set": 7965, + "set": 1167, + "setback": 43605, + "seth": 20005, + "seth": 11870, + "sethu": 38933, + "setlist": 33141, + "seton": 43799, + "sets": 4650, + "sett": 4984, + "sett": 17567, + "sette": 14613, + "setter": 23153, + "settes": 44145, + "setti": 45170, + "setting": 5264, + "settings": 18628, + "settle": 15075, + "settled": 18310, + "settlement": 16494, + "settlements": 36605, + "settlers": 35671, + "settles": 41498, + "settling": 22036, + "setup": 11092, + "seu": 31539, + "seul": 48975, + "seum": 18838, + "seun": 24209, + "seung": 32393, + "seung": 33711, + "seungri": 41627, + "seuss": 34441, + "sev": 26585, + "sev": 37600, + "seva": 42604, + "seve": 21458, + "seve": 22468, + "sevel": 17439, + "seven": 7874, + "seven": 5757, + "sevens": 29911, + "sevent": 43048, + "seventeen": 19337, + "seventh": 17568, + "seventy": 47170, + "sever": 3250, + "sever": 45557, + "several": 5560, + "severance": 26194, + "severe": 6215, + "severely": 24417, + "severn": 34626, + "severy": 34207, + "sevilla": 24947, + "seville": 34988, + "sew": 28640, + "sewage": 32777, + "sewer": 28294, + "sewing": 15974, + "sewn": 42118, + "sex": 3548, + "sex": 5937, + "sexi": 20562, + "sexiest": 25426, + "sexism": 32059, + "sexist": 33047, + "sexu": 14741, + "sexual": 6749, + "sexuality": 21244, + "sexually": 23032, + "sexy": 21019, + "sexy": 38127, + "sey": 6317, + "sey": 2258, + "seychel": 36809, + "seychelles": 38519, + "seye": 35604, + "seym": 22657, + "seymour": 25850, + "seys": 15081, + "sez": 42377, + "señ": 43368, + "sf": 4435, + "sf": 4915, + "sfa": 32675, + "sfam": 37649, + "sfb": 27930, + "sfc": 14129, + "sfest": 49024, + "sff": 42056, + "sfgiants": 20923, + "sfield": 11801, + "sfo": 39182, + "sfootball": 45259, + "sfor": 9115, + "sford": 28917, + "sforsale": 28888, + "sfw": 18073, + "sfx": 37995, + "sg": 9599, + "sg": 7611, + "sga": 33049, + "sgate": 27558, + "sgh": 47590, + "sgo": 5393, + "sgo": 21044, + "sgt": 13748, + "sh": 552, + "sh": 849, + "sha": 1514, + "sha": 3337, + "shaa": 44221, + "shab": 8323, + "shabbat": 38042, + "shabby": 28838, + "shack": 23866, + "shack": 18785, + "shad": 3182, + "shad": 23874, + "shade": 34554, + "shade": 10097, + "shaded": 43506, + "shades": 46608, + "shades": 9270, + "shadesof": 45180, + "shading": 37348, + "shado": 9325, + "shadow": 15243, + "shadow": 7068, + "shadowhun": 19931, + "shadowhunters": 24834, + "shadowing": 46092, + "shadows": 12971, + "shady": 22158, + "shaf": 12032, + "shaft": 21545, + "shag": 22439, + "shaggy": 42662, + "shah": 13203, + "shah": 8439, + "shahe": 23643, + "shaheed": 30060, + "shaheer": 43969, + "shahi": 46972, + "shahid": 25696, + "shahid": 27138, + "shahidkapoor": 29892, + "shahzad": 45915, + "shai": 47941, + "shaikh": 45712, + "shail": 37603, + "shair": 43135, + "shak": 8385, + "shake": 8206, + "shake": 8251, + "shaken": 38237, + "shaker": 26210, + "shakers": 38411, + "shakes": 19668, + "shakespe": 9890, + "shakespeare": 22499, + "shakespeare": 12488, + "shakespearesunday": 32320, + "shaking": 19101, + "shakira": 40795, + "shakti": 48593, + "shakti": 32458, + "shakur": 48915, + "shal": 15056, + "shal": 28175, + "shale": 32864, + "shall": 4742, + "shallow": 23730, + "shalom": 31339, + "sham": 6453, + "sham": 9005, + "shaman": 48727, + "shambles": 40799, + "shame": 14776, + "shame": 7593, + "shameful": 28283, + "shameless": 25380, + "shaming": 40553, + "shampoo": 23944, + "shamrock": 34199, + "shan": 5171, + "shan": 8834, + "shana": 44835, + "shand": 29101, + "shane": 26863, + "shane": 11572, + "shang": 11141, + "shanghai": 12742, + "shani": 46665, + "shank": 24685, + "shankar": 24108, + "shann": 9932, + "shannon": 22842, + "shannon": 13581, + "shant": 36610, + "shap": 5581, + "shape": 26925, + "shape": 6448, + "shaped": 10127, + "shapes": 15377, + "shaping": 18632, + "shapiro": 32110, + "shaq": 46402, + "shaq": 26843, + "shar": 1669, + "shar": 36542, + "shara": 48849, + "sharapo": 36489, + "sharapova": 36671, + "shard": 42207, + "share": 7585, + "share": 1978, + "shared": 5368, + "shareholder": 38241, + "shareholders": 34778, + "sharepoint": 39213, + "shares": 4974, + "sharethe": 49277, + "shareyour": 45890, + "shari": 27738, + "shari": 47390, + "sharia": 37244, + "sharif": 15501, + "sharing": 3567, + "sharjah": 33420, + "shark": 15836, + "shark": 7980, + "sharks": 10047, + "sharkweek": 39571, + "sharma": 10105, + "sharon": 28722, + "sharon": 14138, + "sharp": 17126, + "sharp": 8157, + "sharpe": 34374, + "sharpen": 41465, + "sharpie": 46858, + "sharply": 37185, + "shasta": 46727, + "shat": 12169, + "shat": 44388, + "shatter": 45008, + "shattered": 26820, + "shau": 13750, + "shaun": 23446, + "shaun": 16669, + "shav": 11410, + "shave": 17735, + "shaved": 25571, + "shaving": 24261, + "shaw": 6122, + "shaw": 6805, + "shawa": 46413, + "shawl": 35132, + "shawn": 16677, + "shawn": 10970, + "shawnee": 48060, + "shawnmendes": 27277, + "shawty": 38026, + "shay": 10778, + "shay": 18361, + "shaykh": 47223, + "shaz": 18618, + "shazam": 29063, + "shc": 43419, + "shd": 37729, + "she": 1729, + "she": 1043, + "shea": 20407, + "shead": 44287, + "shead": 20434, + "shealth": 41743, + "shealth": 22197, + "shear": 27974, + "shear": 32108, + "shearer": 40505, + "sheath": 45637, + "shed": 16586, + "shed": 1492, + "shedding": 33608, + "sheds": 25921, + "shee": 23450, + "shee": 34321, + "sheed": 26105, + "sheehan": 41809, + "sheen": 25025, + "sheep": 23604, + "sheep": 9629, + "sheer": 17577, + "sheeran": 18561, + "sheet": 7298, + "sheets": 12744, + "shef": 8237, + "sheff": 38844, + "sheff": 43821, + "sheffiel": 26940, + "sheffield": 41763, + "sheffield": 10420, + "sheffieldissuper": 33628, + "sheh": 31667, + "sheikh": 15031, + "sheil": 42765, + "sheila": 25734, + "shek": 33285, + "shel": 3159, + "shelby": 36906, + "shelby": 16885, + "sheldon": 25079, + "shelf": 10955, + "shell": 23374, + "shell": 6648, + "shelley": 22497, + "shelling": 43166, + "shells": 19265, + "shelly": 37461, + "shelter": 8599, + "sheltered": 48070, + "shelters": 24312, + "shelton": 24471, + "shelves": 16225, + "shem": 40299, + "shen": 10154, + "shen": 31098, + "shenan": 20965, + "shenando": 44666, + "shenanigans": 26590, + "shenko": 39751, + "shenmue": 48279, + "shenzhen": 38970, + "shep": 33757, + "shep": 44857, + "shepard": 26810, + "shepher": 11008, + "shepherd": 13242, + "shepherds": 42792, + "sheppard": 37304, + "sher": 3570, + "sher": 4510, + "sheraton": 39400, + "shere": 21507, + "sheri": 9235, + "sheridan": 27085, + "sheriff": 10309, + "sherlock": 17294, + "sherman": 17822, + "sherry": 44348, + "sherry": 24689, + "shers": 14141, + "sherwood": 24527, + "sheryl": 39773, + "shes": 45514, + "shes": 2502, + "shet": 15850, + "shetland": 29595, + "shetty": 25533, + "shev": 45182, + "sheva": 45132, + "shh": 35025, + "shhh": 36932, + "shi": 823, + "shi": 3533, + "shia": 23791, + "shibu": 36177, + "shibuya": 41623, + "shie": 26638, + "shiel": 33413, + "shield": 8670, + "shields": 19085, + "shies": 35312, + "shif": 35317, + "shift": 43767, + "shift": 6905, + "shifted": 34429, + "shifter": 48944, + "shifting": 21992, + "shifts": 23957, + "shik": 36980, + "shil": 14370, + "shill": 32121, + "shill": 30090, + "shilpa": 47062, + "shilpa": 40690, + "shim": 11986, + "shim": 32780, + "shima": 14382, + "shimano": 48904, + "shimi": 40517, + "shimmer": 38792, + "shin": 5664, + "shin": 11784, + "shinde": 41516, + "shine": 17582, + "shine": 3780, + "shinee": 19660, + "shines": 16015, + "shing": 38641, + "shing": 1743, + "shining": 10485, + "shino": 43074, + "shiny": 12190, + "ship": 7645, + "ship": 1158, + "shipment": 28553, + "shipp": 34709, + "shipped": 15279, + "shippers": 44789, + "shipping": 5721, + "ships": 3262, + "shipwreck": 48878, + "shipy": 26828, + "shipyard": 31273, + "shir": 1956, + "shiraz": 35618, + "shire": 11975, + "shire": 2968, + "shirehour": 32456, + "shirley": 18189, + "shiro": 26048, + "shirt": 27576, + "shirt": 2523, + "shirtless": 28959, + "shirts": 5803, + "shistory": 34979, + "shiv": 18042, + "shiv": 37121, + "shiva": 33881, + "shiva": 21174, + "shka": 38944, + "shld": 49359, + "shma": 48074, + "shment": 8802, + "shments": 18822, + "sho": 719, + "sho": 13756, + "shock": 19617, + "shock": 8736, + "shocked": 15787, + "shocker": 37971, + "shockey": 22258, + "shocking": 13394, + "shocks": 31886, + "shoe": 16308, + "shoe": 7342, + "shoes": 49391, + "shoes": 4079, + "shol": 21472, + "sholm": 44139, + "shome": 42701, + "shon": 19526, + "shon": 37621, + "shone": 47173, + "shoo": 1975, + "shook": 20730, + "shoops": 29956, + "shoot": 12531, + "shoot": 3704, + "shooter": 13645, + "shooters": 31902, + "shooting": 3992, + "shootings": 26753, + "shootout": 20666, + "shoots": 14144, + "shop": 5738, + "shop": 1557, + "shopify": 47949, + "shoplocal": 21775, + "shopp": 38486, + "shoppe": 38236, + "shopped": 28088, + "shopper": 24346, + "shoppers": 22316, + "shopping": 42101, + "shopping": 4266, + "shops": 6467, + "shopsmall": 35942, + "shor": 3209, + "shore": 14717, + "shore": 5928, + "shored": 33140, + "shoreditch": 35042, + "shoreline": 34807, + "shores": 18102, + "short": 6803, + "short": 3005, + "shortage": 19910, + "shortages": 38730, + "shortcuts": 45793, + "shorten": 41711, + "shorter": 20350, + "shortest": 33717, + "shortfilm": 37204, + "shorth": 37397, + "shortlist": 28163, + "shortlisted": 20631, + "shortly": 11967, + "shorts": 9680, + "shorty": 33502, + "shot": 9805, + "shot": 2000, + "shotel": 42365, + "shotgun": 21643, + "shots": 5342, + "shou": 3890, + "shoul": 29847, + "should": 14947, + "should": 1535, + "shoulder": 8476, + "shoulders": 18738, + "shouldn": 9416, + "shour": 20025, + "shouse": 28671, + "shout": 7335, + "shout": 5214, + "shouted": 44397, + "shouting": 26464, + "shoutout": 8274, + "shouts": 26709, + "shovel": 31778, + "show": 2133, + "show": 1080, + "showbiz": 34156, + "showcas": 14290, + "showcase": 7265, + "showcased": 35786, + "showcases": 26266, + "showcasing": 17036, + "showdown": 15576, + "showed": 7150, + "shower": 7777, + "showers": 9893, + "showing": 3649, + "shown": 8506, + "showroom": 16821, + "shows": 2665, + "showtime": 40576, + "showtime": 15442, + "showyour": 46733, + "shp": 38341, + "shq": 21145, + "shr": 10118, + "shra": 21360, + "shradd": 28172, + "shraddha": 35208, + "shraddhakapoor": 40385, + "shre": 12101, + "shred": 19756, + "shred": 33017, + "shredded": 31772, + "shredding": 45534, + "shree": 37410, + "shrek": 35009, + "shrews": 26411, + "shrewsbury": 30921, + "shri": 8838, + "shri": 11424, + "shrimp": 12727, + "shrin": 24865, + "shrine": 16156, + "shrink": 34957, + "shrinking": 41243, + "shrm": 44163, + "shro": 15259, + "shroff": 32081, + "shrop": 22630, + "shropshire": 26344, + "shru": 14911, + "shrub": 41464, + "shrubs": 47975, + "shrun": 46767, + "shs": 16184, + "sht": 44210, + "shti": 38927, + "shu": 2872, + "shu": 17651, + "shua": 33771, + "shub": 40552, + "shud": 45782, + "shuff": 42641, + "shuffle": 21681, + "shui": 45473, + "shuk": 29927, + "shukla": 46829, + "shul": 30721, + "shum": 37383, + "shun": 24479, + "shun": 39594, + "shur": 41032, + "shut": 8702, + "shut": 8282, + "shutdown": 16051, + "shutout": 24385, + "shuts": 28313, + "shutt": 31866, + "shutter": 36235, + "shutter": 33902, + "shutters": 46894, + "shutting": 31383, + "shuttle": 15842, + "shwar": 41640, + "shy": 22678, + "shy": 9682, + "si": 564, + "si": 2990, + "sia": 2357, + "siam": 29686, + "siam": 48248, + "siamese": 43161, + "sian": 28510, + "sian": 6221, + "sians": 26583, + "sias": 28645, + "siber": 22206, + "siberia": 39969, + "siberian": 34058, + "sibl": 14338, + "sible": 14507, + "sibling": 43060, + "sibling": 23779, + "siblings": 17156, + "sic": 8278, + "sic": 1118, + "sica": 34125, + "sical": 33875, + "sichuan": 48950, + "sicilian": 45292, + "sicily": 23179, + "sick": 11143, + "sick": 5359, + "sickest": 47972, + "sickle": 41459, + "sickness": 28898, + "sics": 26297, + "sid": 10117, + "sid": 15119, + "sidd": 19842, + "siddi": 35227, + "side": 5869, + "side": 1145, + "sided": 21061, + "sidekick": 44683, + "sidel": 43557, + "sideline": 32056, + "sidelines": 31046, + "sider": 30581, + "siders": 41249, + "sides": 7578, + "sideshow": 46789, + "sidewalk": 23278, + "sidewalks": 43583, + "sideways": 35593, + "siding": 38758, + "sidney": 22598, + "sie": 8533, + "sie": 5685, + "sieg": 49203, + "siege": 18460, + "siegel": 48559, + "siem": 18434, + "siemens": 30147, + "siempre": 44030, + "siena": 33336, + "sienna": 40373, + "sier": 10028, + "sier": 7444, + "sierra": 13552, + "siers": 35923, + "sies": 16367, + "siest": 18323, + "sif": 29300, + "sig": 872, + "sig": 19145, + "sigh": 36303, + "sigh": 15505, + "sighs": 44579, + "sight": 16897, + "sight": 6329, + "sighted": 33034, + "sighting": 17507, + "sightings": 30004, + "sights": 17364, + "sightseeing": 34210, + "sigma": 45075, + "sigma": 15697, + "sign": 5538, + "sign": 2292, + "signage": 21156, + "signal": 10781, + "signaling": 38492, + "signalling": 48426, + "signals": 17150, + "signation": 24347, + "signature": 9189, + "signatures": 21865, + "signed": 3163, + "signee": 39778, + "signi": 34023, + "signific": 6374, + "significance": 23769, + "significant": 8735, + "significantly": 16187, + "signing": 4401, + "signingday": 40282, + "signings": 27731, + "signs": 4659, + "signup": 40791, + "sigue": 49401, + "sii": 36672, + "sik": 19974, + "sik": 22413, + "sika": 31144, + "sikh": 21829, + "sikhs": 45426, + "sil": 1556, + "sil": 8315, + "sila": 41754, + "sile": 37620, + "silen": 39048, + "silence": 8462, + "silenced": 45415, + "silent": 30352, + "silent": 8487, + "silently": 42640, + "silhou": 20589, + "silhouette": 26149, + "silic": 23830, + "silicon": 32412, + "silicon": 17888, + "silicone": 28221, + "silk": 25891, + "silk": 9743, + "silky": 29554, + "sill": 42468, + "sill": 48024, + "silly": 11883, + "silon": 31841, + "sils": 39708, + "silva": 16489, + "silve": 37697, + "silver": 7525, + "silver": 3467, + "silverado": 46160, + "silverstone": 29666, + "silvia": 37289, + "sim": 5026, + "sim": 10740, + "sima": 35871, + "simba": 39492, + "simcoe": 47148, + "sime": 28329, + "simi": 38073, + "simil": 7202, + "similar": 8547, + "similarities": 34716, + "simm": 13001, + "simmons": 14699, + "simo": 37171, + "simon": 8796, + "simon": 6668, + "simona": 46277, + "simone": 19062, + "simons": 33097, + "simp": 2542, + "simple": 19018, + "simple": 4129, + "simpler": 35489, + "simplest": 39588, + "simpli": 16868, + "simplicity": 21262, + "simplified": 36647, + "simplify": 35479, + "simply": 25637, + "simply": 6151, + "simpson": 41805, + "simpson": 11750, + "simpsons": 21092, + "sims": 14021, + "simul": 9845, + "simulated": 46395, + "simulation": 18610, + "simulator": 20821, + "simultaneous": 48816, + "simultaneously": 28575, + "sin": 1303, + "sin": 3421, + "sina": 19541, + "sinai": 33226, + "sinatra": 27262, + "sinc": 30464, + "since": 1855, + "sincere": 24513, + "sincere": 24886, + "sincerely": 25673, + "sinclair": 23100, + "sind": 39598, + "sind": 30877, + "sindh": 20754, + "sindia": 48038, + "sine": 22741, + "sine": 33793, + "sinfo": 47178, + "sing": 1387, + "sing": 1197, + "singapo": 27861, + "singapore": 28879, + "singapore": 6754, + "singer": 33880, + "singer": 5108, + "singers": 15613, + "singersongwriter": 44585, + "singh": 19445, + "singh": 5715, + "singing": 5864, + "single": 19524, + "single": 2688, + "singles": 12025, + "singleton": 46247, + "singly": 16619, + "sings": 13635, + "singul": 34003, + "singular": 44009, + "singularity": 48410, + "sinha": 29416, + "sini": 41781, + "sini": 26319, + "sinister": 31313, + "sink": 37232, + "sink": 14551, + "sinking": 27949, + "sinks": 32710, + "sinn": 36315, + "sinner": 45380, + "sinners": 43436, + "sino": 29759, + "sins": 9345, + "sinthe": 30737, + "sinu": 37351, + "sinus": 47535, + "sio": 10807, + "siob": 40954, + "siology": 46315, + "sion": 5676, + "sion": 1015, + "sional": 14533, + "sionally": 30754, + "sions": 4060, + "sioux": 44695, + "sioux": 24954, + "sip": 16096, + "sipping": 28527, + "sir": 10708, + "sir": 3846, + "sire": 28450, + "siren": 33026, + "sirens": 35907, + "siri": 13986, + "siri": 18394, + "sirius": 23574, + "sirius": 34999, + "siriusxm": 29833, + "sirloin": 46828, + "sis": 18132, + "sis": 2580, + "sisd": 27132, + "sisi": 37892, + "siss": 42929, + "sissy": 27564, + "sist": 20520, + "sista": 37448, + "sister": 17417, + "sister": 3677, + "sisterhood": 37313, + "sisters": 6404, + "sit": 7387, + "sit": 4037, + "sitcom": 30426, + "site": 26792, + "site": 1988, + "sites": 7236, + "sith": 41499, + "sito": 42613, + "sits": 12726, + "sitt": 42988, + "sitter": 40777, + "sittin": 40887, + "sitting": 4919, + "situ": 5562, + "situ": 42536, + "situated": 22030, + "situation": 7144, + "situations": 19096, + "sity": 38177, + "sity": 5477, + "siu": 40174, + "sium": 8090, + "sius": 27595, + "siva": 20991, + "sivan": 36931, + "sive": 23572, + "sive": 1875, + "sively": 10343, + "siveness": 39667, + "sives": 23896, + "sivity": 42738, + "siwon": 29055, + "six": 5968, + "six": 4093, + "sixers": 25941, + "sixteen": 28677, + "sixth": 12909, + "sixties": 44948, + "sixty": 32588, + "siya": 44440, + "size": 38377, + "size": 3235, + "sized": 9832, + "sizes": 10253, + "sizing": 28330, + "sizz": 23778, + "sizzle": 47890, + "sizzling": 35799, + "sj": 7536, + "sj": 16010, + "sjo": 42012, + "sk": 909, + "sk": 2058, + "ska": 7495, + "skag": 31948, + "skan": 46772, + "skar": 27587, + "skar": 26835, + "skate": 13740, + "skate": 12745, + "skateboard": 31777, + "skateboarding": 31352, + "skater": 30337, + "skaters": 39824, + "skates": 31479, + "skc": 44551, + "ske": 6261, + "ske": 25516, + "skel": 36564, + "skelet": 27075, + "skeletal": 37369, + "skeleton": 20062, + "skeletons": 48874, + "skell": 40801, + "skep": 27772, + "skeptical": 44934, + "sker": 37640, + "sker": 33600, + "sket": 3744, + "sketch": 11767, + "sketch": 5269, + "sketchbook": 18899, + "sketched": 38581, + "sketches": 17622, + "sketching": 23228, + "sketchy": 41582, + "skey": 37453, + "ski": 3327, + "ski": 3428, + "skid": 36574, + "skid": 32099, + "skier": 42585, + "skies": 7244, + "skiing": 14400, + "skil": 24543, + "skill": 15598, + "skill": 10604, + "skilled": 17535, + "skillet": 40568, + "skills": 4113, + "skim": 33191, + "skin": 5821, + "skin": 3575, + "skincare": 12648, + "skine": 37300, + "sking": 46215, + "skinned": 42199, + "skinner": 30261, + "skinny": 42729, + "skinny": 15457, + "skins": 11594, + "skip": 39793, + "skip": 14296, + "skipped": 40639, + "skipper": 22226, + "skipping": 34867, + "skir": 8919, + "skirt": 12386, + "skirts": 24840, + "skis": 32843, + "skit": 43573, + "skitchen": 42820, + "skittles": 43213, + "sko": 15141, + "sko": 23493, + "skoda": 38668, + "skool": 26743, + "skril": 43149, + "skrillex": 43651, + "sks": 48136, + "sku": 10836, + "skul": 17561, + "skull": 34068, + "skull": 12092, + "skulls": 31804, + "skunk": 42194, + "sky": 3075, + "sky": 2390, + "skybet": 45540, + "skye": 21475, + "skyl": 43554, + "skylar": 45411, + "skyline": 14606, + "skymap": 41734, + "skynews": 40977, + "skype": 17069, + "skyrim": 33693, + "skysports": 39845, + "skysports": 46725, + "skywalker": 32936, + "sl": 2621, + "sl": 7489, + "sla": 2725, + "sla": 26707, + "slab": 24241, + "slabs": 42818, + "slack": 37108, + "slack": 30142, + "slade": 33546, + "slain": 35972, + "slalom": 43540, + "slam": 14891, + "slam": 10131, + "slammed": 29772, + "slams": 18907, + "slan": 44663, + "slan": 47193, + "sland": 11294, + "slang": 33655, + "slap": 48830, + "slap": 21751, + "slapped": 38861, + "slaps": 46796, + "slash": 19749, + "slat": 38966, + "slate": 17919, + "slated": 36094, + "slater": 25968, + "slaugh": 26782, + "slaughter": 19815, + "slaughtered": 46615, + "slav": 47292, + "slava": 41797, + "slave": 14029, + "slavery": 15754, + "slaves": 23833, + "slaw": 28178, + "slay": 48319, + "slay": 19380, + "slayed": 44870, + "slayer": 21605, + "slaying": 27812, + "slays": 45648, + "slc": 21972, + "sle": 1709, + "sleague": 23336, + "sled": 28438, + "sledge": 48750, + "slee": 17642, + "slee": 38977, + "sleek": 23187, + "sleep": 4656, + "sleep": 3840, + "sleeper": 28709, + "sleeping": 6982, + "sleepless": 39779, + "sleepover": 39415, + "sleeps": 16610, + "sleepy": 32572, + "sleepy": 14497, + "sleet": 36948, + "sleeve": 35270, + "sleeve": 10536, + "sleeveless": 38049, + "sleeves": 19691, + "sleg": 47650, + "sleigh": 30865, + "slender": 40331, + "slept": 20388, + "sler": 14066, + "sley": 17198, + "sley": 6496, + "sli": 1811, + "sli": 44824, + "slic": 19692, + "slice": 13431, + "sliced": 28121, + "slices": 28424, + "slick": 18341, + "slide": 27828, + "slide": 8837, + "slider": 37861, + "sliders": 40700, + "slides": 15939, + "slideshow": 42817, + "sliding": 21468, + "slife": 15448, + "sliga": 21080, + "slight": 14297, + "slightly": 8456, + "sligo": 30424, + "slike": 38744, + "slim": 35226, + "slim": 12364, + "slime": 29107, + "sling": 28021, + "sling": 32607, + "slinger": 47269, + "slions": 43363, + "slip": 39785, + "slip": 12105, + "slipknot": 41816, + "slipped": 30344, + "slipper": 39644, + "slippers": 26509, + "slippery": 30814, + "slipping": 36301, + "slips": 30632, + "slist": 33749, + "slit": 47011, + "slive": 31652, + "slo": 4303, + "slo": 36083, + "sloan": 29110, + "sloane": 41553, + "slogan": 23398, + "slogans": 42795, + "slope": 22769, + "slopes": 24066, + "sloppy": 36154, + "slot": 14500, + "sloth": 30007, + "slots": 19238, + "slou": 48493, + "slovak": 23315, + "slovakia": 25994, + "sloven": 17018, + "slovenia": 21037, + "slow": 6674, + "slow": 5444, + "slowdown": 38421, + "slowed": 43793, + "slower": 29181, + "slowing": 29839, + "slowly": 9568, + "slows": 46855, + "slp": 45599, + "slr": 21325, + "sls": 33651, + "slt": 39283, + "sltd": 36388, + "slu": 7224, + "slu": 47456, + "slug": 34190, + "slugger": 48671, + "slum": 46754, + "slumber": 44295, + "slump": 35588, + "slur": 30476, + "slush": 39815, + "slv": 45526, + "sly": 28145, + "sly": 21062, + "sm": 978, + "sm": 2764, + "sma": 4357, + "sma": 11854, + "smack": 21280, + "smack": 30026, + "smackdown": 26138, + "smafia": 47686, + "smag": 32212, + "smal": 48379, + "small": 5244, + "small": 2442, + "smallbiz": 41724, + "smallbiz": 18987, + "smallbusiness": 21316, + "smalle": 18490, + "smaller": 12431, + "smallest": 18686, + "smalls": 41696, + "sman": 9612, + "smar": 3201, + "smart": 5383, + "smart": 4115, + "smartcities": 34822, + "smartcity": 33973, + "smarter": 18990, + "smartest": 37092, + "smarthome": 47726, + "smartphone": 11290, + "smartphones": 22212, + "smartwatch": 35798, + "smash": 17258, + "smash": 10332, + "smashbros": 44897, + "smashed": 18410, + "smashes": 45657, + "smashing": 19632, + "smatter": 16537, + "smb": 30446, + "smc": 31375, + "smc": 28312, + "smd": 34582, + "sme": 11758, + "sme": 15650, + "smear": 37546, + "smel": 28476, + "smell": 9688, + "smelling": 32493, + "smells": 14668, + "smelly": 46145, + "smen": 15961, + "smer": 48526, + "smere": 39629, + "smes": 26141, + "smg": 46876, + "smh": 9623, + "smi": 5655, + "smi": 40049, + "smil": 33937, + "smile": 27641, + "smile": 3490, + "smiled": 34362, + "smiles": 8726, + "smiley": 22925, + "smiling": 9200, + "smir": 24667, + "smith": 10527, + "smith": 2915, + "smiths": 27872, + "smithson": 25372, + "smithsonian": 31209, + "smm": 19510, + "smma": 42370, + "smo": 2513, + "smo": 13437, + "smobile": 38923, + "smog": 44425, + "smoke": 20381, + "smoke": 6664, + "smoked": 11161, + "smoker": 32348, + "smokers": 29571, + "smokes": 40336, + "smokey": 23670, + "smokin": 32825, + "smoking": 9038, + "smoky": 25549, + "smol": 29939, + "smol": 40403, + "smoo": 5430, + "smooth": 10958, + "smooth": 8990, + "smoother": 44271, + "smoothie": 16668, + "smoothies": 34458, + "smoothly": 32380, + "smore": 48323, + "smp": 32260, + "smriti": 49227, + "sms": 10409, + "smt": 26672, + "smtown": 26072, + "smu": 10878, + "smu": 30458, + "smug": 41021, + "smugg": 28130, + "smuggling": 34146, + "smur": 24708, + "smusic": 19191, + "smw": 44929, + "smx": 46699, + "smy": 14381, + "smyth": 44822, + "sn": 1672, + "sn": 5844, + "sna": 4032, + "snack": 47548, + "snack": 10039, + "snacking": 46474, + "snacks": 12349, + "snag": 34789, + "snag": 28043, + "snagged": 48534, + "snail": 23132, + "snails": 34928, + "snake": 30133, + "snake": 8798, + "snakes": 19605, + "snap": 4578, + "snap": 7404, + "snapback": 31234, + "snapchat": 7799, + "snapmatic": 45907, + "snapp": 10185, + "snapped": 15543, + "snapper": 31677, + "snapping": 31581, + "snaps": 16890, + "snapshot": 18243, + "snar": 30810, + "snare": 40651, + "snat": 18457, + "snatch": 35302, + "snatched": 44821, + "snation": 14362, + "snazzy": 48963, + "snc": 39918, + "sne": 3791, + "sne": 46503, + "sneak": 27871, + "sneak": 6917, + "sneaker": 31698, + "sneaker": 24781, + "sneakers": 17397, + "sneaking": 34633, + "sneakpeek": 47831, + "sneaks": 40926, + "sneaky": 21293, + "snee": 42095, + "snell": 46410, + "sner": 31424, + "snes": 26667, + "snews": 18623, + "snf": 47651, + "sng": 41549, + "snhl": 43093, + "sni": 7186, + "sni": 35570, + "snickers": 49127, + "sniff": 37841, + "snip": 42954, + "sniper": 22157, + "snippet": 37531, + "snippets": 44001, + "snl": 16011, + "sno": 8567, + "sno": 17802, + "snoo": 11352, + "snooker": 25657, + "snoop": 44503, + "snoop": 27754, + "snoopdogg": 48388, + "snoopy": 41967, + "snooze": 40718, + "snor": 16590, + "snoring": 44560, + "snorkel": 44285, + "snorkeling": 48103, + "snow": 3880, + "snow": 2583, + "snowball": 39254, + "snowboard": 33403, + "snowboarding": 32397, + "snowday": 37982, + "snowden": 32154, + "snowdon": 47107, + "snowdonia": 36088, + "snowed": 45073, + "snowfall": 21714, + "snowflake": 33447, + "snowflakes": 38618, + "snowing": 21443, + "snowman": 22668, + "snowstorm": 38777, + "snowy": 14191, + "snp": 15301, + "sns": 36343, + "snsd": 27961, + "snt": 34834, + "snu": 9694, + "snuck": 36522, + "snug": 45169, + "snuggle": 31327, + "snuggles": 48165, + "sny": 17526, + "snyder": 22106, + "snz": 37678, + "so": 759, + "so": 706, + "soa": 39584, + "soak": 24839, + "soaked": 26592, + "soaking": 26750, + "soap": 26086, + "soap": 11088, + "soaps": 40958, + "soar": 48997, + "soar": 22241, + "soaring": 27968, + "soars": 41348, + "sob": 24900, + "sob": 35507, + "sobbing": 36691, + "sober": 30969, + "sober": 24487, + "sobre": 42768, + "sobri": 49308, + "sobs": 43636, + "soc": 3253, + "soc": 7741, + "soca": 49239, + "socal": 46470, + "socal": 20450, + "soccer": 16268, + "soccer": 4233, + "socceroos": 41997, + "socent": 30831, + "sochi": 21014, + "soci": 1720, + "social": 4803, + "social": 2346, + "socialism": 23372, + "socialist": 18450, + "socialists": 43839, + "socially": 24555, + "socialmedi": 23813, + "socialmedia": 9600, + "socialmediamarketing": 31790, + "societal": 40058, + "societies": 25855, + "society": 3757, + "socio": 44319, + "socio": 42790, + "sociology": 32373, + "sock": 29801, + "sock": 18277, + "socket": 28657, + "socks": 8774, + "socorro": 46409, + "socute": 45086, + "sod": 31435, + "soda": 13533, + "sodium": 29070, + "soe": 44136, + "soe": 25498, + "soever": 34024, + "sof": 1571, + "sof": 41187, + "sofa": 15723, + "soff": 35290, + "soff": 30684, + "sofficial": 20563, + "sofi": 41537, + "sofia": 18914, + "sofinstagram": 17301, + "soft": 12778, + "soft": 3773, + "softball": 8369, + "softer": 44462, + "softhe": 23127, + "softly": 34958, + "software": 35941, + "software": 5847, + "softwitter": 11311, + "sog": 44775, + "soggy": 41168, + "sohn": 49267, + "soho": 47749, + "soho": 17592, + "soi": 40495, + "soil": 33417, + "soil": 9216, + "soils": 34891, + "soir": 43427, + "sok": 43456, + "sol": 1175, + "sol": 9941, + "sola": 40086, + "solace": 42567, + "solar": 16990, + "solar": 5199, + "solareclipse": 44727, + "sold": 33116, + "sold": 3939, + "soldi": 5098, + "soldier": 9355, + "soldiers": 7547, + "sole": 10519, + "sole": 8576, + "soleil": 33148, + "solely": 27913, + "solent": 47783, + "soles": 22682, + "soli": 3911, + "solic": 19369, + "solicitor": 45647, + "solicitors": 46000, + "solid": 30626, + "solid": 6148, + "solidar": 10415, + "solidarity": 10983, + "solidi": 46136, + "solids": 49070, + "solihull": 45293, + "solit": 37039, + "solitaire": 47257, + "solitary": 33094, + "solitude": 33199, + "solo": 17626, + "solo": 5797, + "soloist": 46391, + "solom": 15768, + "solomon": 19785, + "solos": 44868, + "solst": 20298, + "solstice": 21359, + "solu": 2487, + "solution": 4575, + "solutions": 5140, + "solve": 8917, + "solved": 13451, + "solves": 42740, + "solving": 15581, + "som": 734, + "som": 10672, + "soma": 36170, + "somal": 40281, + "somali": 26231, + "somalia": 17051, + "somaliland": 43315, + "some": 1132, + "some": 836, + "somebody": 8305, + "someday": 17127, + "somehow": 11735, + "someone": 2100, + "somer": 9656, + "somerhalder": 33990, + "somerset": 14926, + "somerville": 41409, + "somes": 38124, + "somethin": 33541, + "something": 28316, + "something": 2006, + "sometime": 21464, + "sometimes": 4237, + "somewhat": 17864, + "somewhere": 8119, + "somm": 42726, + "somme": 30625, + "sommer": 44954, + "somos": 24951, + "son": 1176, + "son": 825, + "sona": 21249, + "sonam": 40096, + "sonar": 48235, + "sonata": 37009, + "sone": 29599, + "song": 6868, + "song": 2295, + "songs": 4641, + "songwriter": 13034, + "songwriters": 39583, + "songwriting": 33567, + "songz": 49302, + "soni": 34899, + "soni": 35911, + "sonia": 20409, + "sonic": 23785, + "sonic": 9132, + "sonics": 48511, + "sonja": 46102, + "sonline": 23412, + "sonny": 43000, + "sonny": 20880, + "sono": 44109, + "sonom": 48596, + "sonoma": 26269, + "sons": 5502, + "sonsof": 46676, + "sont": 31063, + "sonthe": 40923, + "sony": 16042, + "sony": 8748, + "sonya": 39172, + "soo": 5517, + "soo": 8602, + "soom": 39771, + "soon": 27559, + "soon": 1745, + "sooner": 18968, + "sooners": 30449, + "sooo": 11526, + "soooo": 13658, + "sooooo": 21199, + "soooooo": 34859, + "soor": 46698, + "soothe": 44424, + "soothing": 27730, + "sop": 3974, + "sop": 19194, + "soph": 34963, + "sophi": 6192, + "sophia": 16790, + "sophie": 38648, + "sophie": 12357, + "sophistic": 17646, + "sophisticated": 20833, + "sophom": 13696, + "sophomore": 15242, + "sophomores": 47645, + "soprano": 28880, + "soproud": 44479, + "sor": 1852, + "sor": 16872, + "sora": 38719, + "sorbet": 39994, + "sore": 43330, + "sore": 15454, + "sored": 6731, + "soren": 38907, + "sorg": 28152, + "sori": 38588, + "sorority": 30059, + "soros": 33248, + "sorren": 44012, + "sorrow": 28020, + "sorrows": 47924, + "sorry": 25745, + "sorry": 3675, + "sorrynotsorry": 37105, + "sort": 8450, + "sorta": 34700, + "sorted": 13221, + "sorting": 19198, + "sorts": 12577, + "sory": 16257, + "sos": 25145, + "sos": 5792, + "sosa": 45433, + "sosfam": 47709, + "sot": 41542, + "sot": 34116, + "sothe": 32145, + "sotho": 45496, + "soto": 27947, + "sotto": 26047, + "sotu": 32286, + "sou": 1101, + "sou": 24293, + "sought": 18874, + "soul": 8701, + "soul": 3755, + "soulful": 30196, + "soulmate": 38130, + "souls": 10951, + "soun": 19474, + "sound": 5236, + "sound": 3608, + "soundcheck": 31394, + "soundcloud": 15190, + "sounded": 28287, + "sounders": 44933, + "sounding": 21351, + "sounds": 5694, + "soundtrack": 11389, + "soup": 7077, + "soups": 45052, + "sour": 2235, + "sour": 12049, + "source": 23698, + "source": 3634, + "sourced": 23340, + "sources": 5124, + "sourcing": 19574, + "sourdough": 29921, + "souri": 11674, + "sous": 32093, + "sousa": 46296, + "sout": 38156, + "sout": 32732, + "south": 2938, + "south": 2045, + "southafrica": 15184, + "southampton": 15767, + "southbank": 44173, + "southbound": 22932, + "southeast": 13942, + "southeastern": 26813, + "southend": 25583, + "souther": 33330, + "southern": 17704, + "southern": 5036, + "southgate": 47262, + "southkorea": 43552, + "southport": 37446, + "southside": 36436, + "southsudan": 30419, + "southwark": 39098, + "southwe": 46443, + "southwest": 13320, + "southwestern": 30157, + "souven": 20210, + "souvenir": 24811, + "souvenirs": 48460, + "souza": 29424, + "sov": 29737, + "sover": 31876, + "sovere": 17736, + "sovereign": 29418, + "sovereign": 26337, + "sovereignty": 31701, + "soviet": 14274, + "sow": 33089, + "sowe": 36130, + "soweto": 47070, + "sown": 49369, + "sox": 39556, + "sox": 8657, + "soy": 16524, + "soy": 15010, + "soybean": 34606, + "soybeans": 40840, + "soyu": 39578, + "soyuz": 43842, + "sp": 588, + "sp": 4393, + "spa": 7852, + "spa": 6692, + "spac": 10336, + "space": 7857, + "space": 2138, + "spacecraft": 25940, + "spaces": 9006, + "spaceship": 34317, + "spacex": 22511, + "spacey": 48770, + "spacious": 24769, + "spad": 45362, + "spade": 32562, + "spades": 48368, + "spaghetti": 18440, + "spain": 5083, + "spal": 26018, + "spam": 29712, + "spam": 14624, + "span": 4270, + "span": 14537, + "spandex": 41686, + "spani": 16721, + "spaniel": 35435, + "spanish": 29966, + "spanish": 6013, + "spann": 25323, + "spanning": 38638, + "spans": 45407, + "spaper": 34548, + "spar": 3378, + "spar": 34576, + "spare": 12615, + "spares": 39505, + "spark": 9555, + "spark": 11047, + "sparked": 32647, + "sparkle": 18287, + "sparkles": 36410, + "sparkling": 17893, + "sparkly": 30542, + "sparks": 15046, + "sparky": 47198, + "sparring": 42161, + "sparrow": 22888, + "spart": 10143, + "sparta": 38401, + "spartan": 26582, + "spartan": 24225, + "spartans": 20457, + "sparty": 36477, + "spas": 31714, + "spati": 19200, + "spatial": 22022, + "spaw": 31605, + "spawn": 29166, + "spay": 40634, + "spc": 20492, + "spca": 37018, + "spd": 37717, + "spd": 28307, + "spdwy": 45981, + "spe": 876, + "spe": 36676, + "speak": 20599, + "speak": 4208, + "speake": 46077, + "speaker": 25764, + "speaker": 4914, + "speakers": 7675, + "speaking": 3714, + "speaks": 5661, + "spear": 23277, + "spear": 30420, + "speare": 43859, + "spears": 20242, + "spec": 1711, + "spec": 18596, + "speci": 1969, + "special": 11422, + "special": 1689, + "specialist": 10630, + "specialists": 21719, + "speciality": 46904, + "specialized": 23265, + "specializes": 48533, + "specially": 4513, + "specials": 11983, + "specialty": 18262, + "species": 6330, + "specific": 10528, + "specifically": 17174, + "specification": 46394, + "specifications": 39705, + "specified": 48114, + "specimen": 30263, + "specimens": 42715, + "specs": 24093, + "spect": 3416, + "spectac": 7242, + "spectacle": 34342, + "spectacular": 8404, + "spectator": 32372, + "spectators": 39306, + "spective": 6633, + "spector": 48676, + "spectral": 45441, + "spectre": 35998, + "spectro": 27646, + "spectrum": 13532, + "specul": 19209, + "speculation": 30898, + "sped": 38813, + "spee": 4050, + "speech": 19556, + "speech": 4902, + "speeches": 25208, + "speechless": 23152, + "speed": 6860, + "speed": 4163, + "speeding": 27264, + "speeds": 22017, + "speedway": 11480, + "speedy": 21603, + "spel": 41887, + "spell": 22784, + "spell": 11230, + "spelled": 24339, + "spelling": 15614, + "spells": 25335, + "spelt": 38316, + "spen": 5087, + "spence": 33324, + "spencer": 27509, + "spencer": 10678, + "spend": 4664, + "spending": 5961, + "spends": 22508, + "spent": 4429, + "speople": 33035, + "sper": 8213, + "sper": 15313, + "sperm": 35781, + "sperson": 22687, + "spf": 34973, + "spg": 34623, + "sph": 28909, + "sph": 24684, + "sphe": 33691, + "spher": 18349, + "sphere": 6987, + "spheres": 37478, + "spheric": 21744, + "sphin": 39237, + "sphinx": 46487, + "spho": 20442, + "sphoto": 38594, + "sphy": 43808, + "spi": 3174, + "spi": 37080, + "spic": 17264, + "spice": 29761, + "spice": 10141, + "spiced": 24267, + "spicer": 37627, + "spices": 21194, + "spicy": 10915, + "spide": 36801, + "spider": 11963, + "spider": 7622, + "spiderman": 39808, + "spiderman": 18427, + "spiders": 23141, + "spidey": 41706, + "spie": 28573, + "spie": 28746, + "spied": 43998, + "spiegel": 45351, + "spiel": 28435, + "spiel": 37690, + "spielberg": 37569, + "spies": 25374, + "spieth": 43254, + "spike": 35306, + "spike": 15310, + "spiked": 47014, + "spikes": 29582, + "spil": 47765, + "spill": 43933, + "spill": 18006, + "spilled": 33206, + "spilling": 49006, + "spills": 35796, + "spin": 6288, + "spin": 9226, + "spinach": 14747, + "spinal": 23925, + "spine": 48221, + "spine": 19646, + "sping": 47113, + "spinner": 29924, + "spinning": 13987, + "spino": 40848, + "spinoff": 42513, + "spinrilla": 46064, + "spins": 27243, + "spion": 39604, + "spionage": 41838, + "spir": 3745, + "spiral": 19873, + "spiration": 38126, + "spire": 27439, + "spired": 40650, + "spires": 46938, + "spiri": 4024, + "spirit": 18224, + "spirit": 4071, + "spirited": 34701, + "spirits": 13192, + "spiritu": 7237, + "spiritual": 46076, + "spiritual": 9473, + "spirituality": 22165, + "spiro": 40085, + "spit": 18115, + "spit": 23177, + "spite": 26060, + "spitfire": 31126, + "spitting": 40721, + "spl": 2470, + "spl": 33052, + "spla": 4809, + "splac": 16059, + "splace": 38743, + "splash": 43641, + "splash": 11879, + "splat": 15733, + "splatoon": 22565, + "splay": 3169, + "splen": 18552, + "splend": 29861, + "splendid": 21016, + "splendor": 46262, + "splin": 38090, + "split": 25443, + "split": 9109, + "splits": 34897, + "splitting": 37210, + "splus": 40866, + "spn": 35467, + "spn": 19414, + "spnfamily": 38566, + "spo": 1261, + "spo": 21085, + "spock": 43918, + "spoil": 25600, + "spoiled": 21399, + "spoiler": 16512, + "spoilers": 18326, + "spoils": 42436, + "spoilt": 35358, + "spokane": 24528, + "spoke": 13890, + "spoke": 6518, + "spoken": 12979, + "spokesman": 31632, + "spokesperson": 26234, + "spol": 22476, + "spol": 8132, + "spoli": 34301, + "spolice": 37406, + "spon": 1715, + "spon": 48216, + "sponge": 22861, + "sponge": 24345, + "spongebob": 25089, + "spons": 5597, + "sponsor": 10424, + "sponsor": 7574, + "sponsored": 7197, + "sponsoring": 16181, + "sponsors": 11005, + "sponsorship": 17632, + "spontaneous": 32465, + "spoo": 11248, + "spooky": 15369, + "spool": 49152, + "spoon": 27001, + "spoon": 14024, + "spoons": 29661, + "spor": 1475, + "spor": 33746, + "sport": 4379, + "sport": 2364, + "sporting": 32620, + "sporting": 8944, + "sports": 6436, + "sports": 2054, + "sportsc": 40114, + "sportscar": 46931, + "sportscenter": 39157, + "sportsman": 39020, + "sportsmanship": 34858, + "sportsnet": 34144, + "sportswear": 39747, + "sporty": 33346, + "spot": 3223, + "spot": 3049, + "spotify": 7193, + "spotlight": 7901, + "spots": 7670, + "spotted": 4533, + "spotter": 30742, + "spotting": 15885, + "spouse": 24724, + "spout": 48993, + "spp": 47567, + "spr": 1536, + "spr": 19417, + "spra": 12966, + "spraw": 46590, + "spray": 37885, + "spray": 10449, + "sprayed": 40022, + "spraying": 39224, + "spre": 18740, + "spread": 20620, + "spread": 5284, + "spreading": 11821, + "spreads": 27579, + "spree": 21851, + "spri": 35498, + "spride": 26685, + "spring": 5166, + "spring": 2420, + "springbreak": 37753, + "springer": 30117, + "springfield": 16599, + "springs": 7308, + "springst": 32132, + "springsteen": 28367, + "springtime": 28285, + "springtraining": 49364, + "springwatch": 29239, + "sprink": 15817, + "sprinkle": 42897, + "sprinkler": 48754, + "sprinkles": 37326, + "sprint": 29248, + "sprint": 10751, + "sprinter": 36947, + "sprints": 36404, + "sprite": 32544, + "spro": 13902, + "spro": 37403, + "sproject": 37802, + "sproud": 37686, + "sprout": 35863, + "sprouts": 25756, + "spru": 17041, + "spruce": 23812, + "sprung": 32968, + "sps": 13869, + "spu": 23566, + "spun": 47922, + "spun": 32852, + "spur": 15206, + "spur": 20361, + "spurs": 10916, + "spursofficial": 45290, + "sput": 47521, + "spx": 20584, + "spy": 13861, + "spy": 6656, + "spyder": 39952, + "spying": 36227, + "sq": 9370, + "sq": 11590, + "sqft": 41912, + "sql": 42759, + "sql": 18938, + "sqm": 47978, + "sqn": 41209, + "squ": 1653, + "squad": 13892, + "squad": 4234, + "squadron": 18579, + "squads": 36590, + "square": 19314, + "square": 3999, + "squared": 32967, + "squares": 26972, + "squash": 13312, + "squat": 44628, + "squat": 30680, + "squats": 40213, + "sque": 9721, + "sque": 8097, + "squee": 14420, + "squeeze": 21684, + "squeezed": 40413, + "squid": 42057, + "squid": 22553, + "squir": 9683, + "squire": 48090, + "squirrel": 14004, + "squirrels": 26623, + "squish": 42607, + "squishy": 47001, + "sr": 3437, + "sr": 5428, + "srbachchan": 32353, + "src": 23445, + "sre": 17748, + "sri": 11051, + "sri": 9276, + "sridevi": 46301, + "srilan": 15559, + "srilanka": 16922, + "srin": 26818, + "srinagar": 33671, + "srini": 41899, + "sriracha": 42743, + "sris": 27851, + "srisri": 32966, + "srk": 44982, + "srk": 11216, + "srl": 33808, + "srp": 43004, + "srs": 41764, + "srsly": 44179, + "srt": 28139, + "sru": 44152, + "srugby": 40526, + "ss": 690, + "ss": 632, + "ssa": 6088, + "ssal": 31330, + "ssal": 35936, + "ssb": 37511, + "ssc": 21692, + "ssc": 20364, + "ssd": 23107, + "sse": 9030, + "sse": 8938, + "ssed": 38755, + "ssed": 1804, + "ssel": 17402, + "ssel": 19373, + "sseldorf": 47792, + "ssell": 42388, + "ssels": 8355, + "ssen": 39408, + "ssen": 22645, + "sser": 20445, + "sses": 1802, + "ssett": 44103, + "ssf": 33239, + "ssg": 40707, + "ssh": 48866, + "ssi": 834, + "ssi": 14953, + "ssia": 22238, + "ssian": 31218, + "ssible": 47099, + "ssic": 27774, + "ssic": 17077, + "ssie": 7572, + "ssier": 26422, + "ssil": 15026, + "ssin": 42660, + "ssing": 2112, + "ssion": 16050, + "ssion": 1627, + "ssional": 13727, + "ssionism": 24787, + "ssionist": 27682, + "ssions": 4137, + "ssive": 2734, + "ssively": 28060, + "ssl": 32195, + "ssler": 30287, + "ssly": 24904, + "ssn": 39116, + "ssnhq": 47998, + "sso": 25900, + "sso": 7914, + "ssoccer": 32546, + "sson": 36124, + "sson": 7271, + "ssor": 35152, + "ssp": 31101, + "ssr": 39880, + "sss": 11176, + "ssss": 30676, + "ssss": 15880, + "sssss": 24298, + "sst": 40396, + "ssu": 35351, + "ssummit": 49301, + "ssus": 31286, + "ssw": 36937, + "ssy": 22519, + "ssy": 8661, + "st": 522, + "st": 545, + "sta": 1363, + "sta": 2745, + "stab": 7726, + "stab": 29974, + "stabbed": 24534, + "stabbing": 25474, + "stabil": 42576, + "stabili": 23903, + "stability": 16716, + "stable": 44427, + "stable": 10492, + "stables": 34218, + "stac": 10175, + "stacey": 41653, + "stacey": 24262, + "stache": 23616, + "stack": 24723, + "stack": 11257, + "stacked": 24990, + "stacking": 39836, + "stacks": 24734, + "stacy": 26628, + "stad": 15832, + "stad": 16485, + "stade": 38198, + "stadi": 26587, + "stadion": 48815, + "stadium": 3390, + "stadiums": 38852, + "stadt": 22713, + "staf": 2367, + "staff": 31188, + "staff": 2813, + "staffer": 38494, + "staffers": 44994, + "staffing": 32932, + "stafford": 25006, + "staffordshire": 29198, + "staffs": 36098, + "stag": 12088, + "stag": 20277, + "stage": 23182, + "stage": 2170, + "staged": 19906, + "stages": 12297, + "staggering": 37315, + "staging": 27026, + "stagram": 19503, + "stags": 45936, + "stain": 3933, + "stain": 14603, + "stained": 13751, + "staining": 32523, + "stainless": 12320, + "stains": 32008, + "stair": 7240, + "stair": 17662, + "staircase": 22777, + "stairs": 9577, + "stairway": 45559, + "stak": 39144, + "stake": 15955, + "stake": 7937, + "stakeholder": 39122, + "stakeholders": 22968, + "stakes": 7519, + "staking": 47082, + "stal": 3861, + "stal": 5535, + "stale": 42471, + "stalert": 25450, + "stalin": 28346, + "stalk": 40826, + "stalk": 14878, + "stalker": 26777, + "stalking": 24721, + "stalks": 45886, + "stall": 24636, + "stall": 12058, + "stalled": 40362, + "stallion": 28273, + "stallions": 44787, + "stallone": 40969, + "stalls": 25427, + "stam": 4663, + "stamatic": 30904, + "stamford": 27843, + "stamina": 48753, + "stamp": 28694, + "stamp": 12771, + "stampcollecting": 42852, + "stamped": 38356, + "stampede": 25384, + "stamps": 13827, + "stan": 2203, + "stan": 2434, + "stana": 33311, + "stanbul": 11231, + "stance": 48900, + "stance": 3542, + "stances": 15054, + "stand": 1819, + "stand": 2087, + "standalone": 44887, + "standard": 35780, + "standard": 5807, + "standardi": 30247, + "standards": 9022, + "standby": 36184, + "standing": 39934, + "standing": 2862, + "standings": 19835, + "standoff": 31821, + "standout": 23131, + "standre": 48309, + "stands": 6446, + "standup": 35108, + "standup": 24964, + "standwith": 19540, + "stanford": 36219, + "stanford": 15087, + "stang": 12536, + "stani": 38228, + "stanis": 37711, + "stanley": 19048, + "stanley": 10079, + "stanleycup": 28662, + "stans": 26564, + "stant": 41576, + "stant": 4906, + "stanton": 25400, + "stap": 10438, + "staple": 22695, + "staples": 23646, + "stapleton": 45228, + "star": 993, + "star": 1565, + "starbuck": 48519, + "starbucks": 9499, + "starch": 47837, + "starcraft": 48871, + "stardom": 44616, + "stardust": 34337, + "stare": 18094, + "stared": 47772, + "stares": 37916, + "starfish": 44283, + "stargate": 41099, + "stargazing": 49328, + "staring": 13800, + "stark": 40446, + "stark": 15353, + "starlight": 32197, + "starling": 46205, + "starmagic": 48023, + "starplus": 37815, + "starr": 19186, + "starred": 24180, + "starrer": 41311, + "starring": 6660, + "starry": 30963, + "stars": 2895, + "starship": 37166, + "start": 17466, + "start": 1572, + "started": 2760, + "starter": 7800, + "starters": 22222, + "starting": 2530, + "startrek": 30642, + "startrek": 15349, + "starts": 3105, + "startu": 6996, + "startup": 18049, + "startup": 5882, + "startups": 9056, + "starve": 46957, + "starving": 30473, + "starwar": 17287, + "starwars": 26239, + "starwars": 7887, + "starz": 25928, + "stas": 19866, + "stash": 27711, + "stasy": 45942, + "stat": 3004, + "stat": 15216, + "state": 3492, + "state": 1295, + "statec": 33931, + "stated": 19629, + "statedept": 41458, + "statefair": 40305, + "statement": 5401, + "statements": 19513, + "staten": 38263, + "stateof": 35195, + "states": 22125, + "states": 4218, + "statesman": 35301, + "stateu": 44248, + "statewide": 29561, + "stati": 9622, + "static": 16363, + "stating": 35147, + "station": 13498, + "station": 2631, + "stationary": 29493, + "stationed": 47618, + "stationery": 33851, + "stations": 10051, + "statistical": 29349, + "statistics": 14165, + "stats": 7294, + "statu": 32481, + "statue": 8222, + "statues": 24363, + "status": 6414, + "stau": 28550, + "staur": 3709, + "stav": 20285, + "stax": 32235, + "stay": 4714, + "stay": 2277, + "stayed": 13805, + "staying": 8993, + "stays": 13311, + "staytuned": 39285, + "stc": 29859, + "std": 30477, + "ste": 795, + "ste": 2686, + "stea": 46614, + "stead": 16101, + "stead": 11031, + "steadily": 35049, + "steady": 12937, + "steak": 26955, + "steak": 8913, + "steakhouse": 35031, + "steaks": 30655, + "steal": 37070, + "steal": 10181, + "stealing": 14242, + "steals": 20224, + "stealth": 25327, + "steam": 10962, + "steam": 6972, + "steamboat": 41121, + "steamed": 29007, + "steamer": 49075, + "steaming": 43746, + "steampunk": 24130, + "steamy": 43104, + "stec": 46713, + "stech": 48949, + "stech": 32455, + "sted": 20426, + "sted": 1356, + "stee": 31793, + "steed": 48293, + "steel": 6938, + "steel": 4726, + "steele": 19460, + "steelers": 14430, + "steen": 42851, + "steen": 18625, + "steep": 28648, + "steep": 20714, + "steer": 27612, + "steering": 19833, + "stef": 29158, + "stefan": 15004, + "stefan": 18829, + "stefani": 38319, + "stefano": 30719, + "steff": 30075, + "stein": 13653, + "stein": 5818, + "steiner": 36314, + "stel": 9102, + "stel": 10798, + "stell": 22355, + "stella": 46178, + "stella": 17869, + "stellar": 13810, + "stellen": 42754, + "stem": 24342, + "stem": 6761, + "stemc": 40486, + "stems": 31503, + "sten": 7652, + "sten": 7877, + "stencil": 47854, + "stennis": 45636, + "step": 15572, + "step": 3348, + "steph": 3522, + "steph": 16251, + "stephan": 37312, + "stephani": 48121, + "stephanie": 14361, + "stephen": 10421, + "stephen": 6078, + "stephenking": 46361, + "stephens": 22256, + "stephenson": 37280, + "stepped": 18384, + "stepping": 15906, + "steps": 5408, + "ster": 1022, + "ster": 881, + "stere": 9229, + "stered": 6935, + "stereo": 15992, + "stereo": 17400, + "stereotypes": 27890, + "steria": 38804, + "stering": 14175, + "sterling": 45790, + "sterling": 9378, + "stern": 36254, + "stern": 2945, + "steroids": 37670, + "sterone": 39418, + "sters": 2132, + "stery": 24232, + "stest": 8556, + "stev": 11640, + "steve": 7412, + "steve": 3803, + "steven": 10973, + "steven": 8016, + "stevens": 13877, + "stevenson": 25091, + "stevie": 42104, + "stevie": 18969, + "stew": 17906, + "stewar": 28453, + "steward": 34980, + "steward": 43355, + "stewards": 49294, + "stewardship": 36720, + "stewart": 8120, + "stfu": 47000, + "stg": 48387, + "stgeorge": 43698, + "sth": 13456, + "sth": 34004, + "sthe": 16491, + "sthel": 42863, + "sti": 860, + "sti": 12439, + "stia": 26492, + "stible": 25835, + "stic": 5868, + "stic": 1561, + "stical": 16660, + "stically": 19041, + "stick": 5483, + "stick": 4987, + "sticker": 11270, + "stickers": 11613, + "sticking": 21021, + "sticks": 10016, + "sticky": 18887, + "stics": 5449, + "stie": 38164, + "stie": 11000, + "stier": 42069, + "sties": 16428, + "stiff": 43471, + "stiff": 21441, + "stig": 4088, + "stig": 42551, + "stigate": 15390, + "stigma": 20619, + "stik": 42247, + "stil": 21790, + "stil": 37519, + "stiles": 33028, + "still": 13209, + "still": 1170, + "stills": 20259, + "stim": 18269, + "stime": 24711, + "stimul": 16434, + "stimulate": 42380, + "stimulating": 41237, + "stimulation": 39530, + "stimulus": 47283, + "stin": 2588, + "stin": 4025, + "stina": 22359, + "stine": 7098, + "sting": 19868, + "sting": 1271, + "stingly": 49332, + "stingray": 43229, + "stink": 38213, + "stinky": 44957, + "stino": 40658, + "stint": 33531, + "stion": 10812, + "stip": 39869, + "stips": 44756, + "stique": 43305, + "stir": 12416, + "stir": 19564, + "stirling": 23128, + "stirring": 39205, + "stis": 45224, + "stit": 14110, + "stitch": 30003, + "stitch": 14771, + "stitched": 36540, + "stitcher": 48204, + "stitches": 32360, + "stitching": 45208, + "stitu": 14585, + "stitutes": 40479, + "stive": 22426, + "stix": 48829, + "stjohn": 36153, + "stl": 14179, + "stl": 12527, + "stlblues": 44138, + "stlcards": 28644, + "stle": 7698, + "stles": 48638, + "stlouis": 40358, + "stlouis": 39516, + "stm": 28333, + "stn": 27175, + "sto": 928, + "sto": 5723, + "stock": 5899, + "stock": 3206, + "stocked": 23552, + "stockholm": 16024, + "stocki": 42944, + "stocking": 17335, + "stockings": 28040, + "stockmarket": 40359, + "stockport": 35569, + "stocks": 9321, + "stockton": 26130, + "stoday": 22392, + "stok": 43782, + "stoke": 31338, + "stoke": 13550, + "stoked": 13160, + "stokes": 27512, + "stol": 11401, + "stol": 6700, + "stole": 10995, + "stolen": 8704, + "stolic": 45020, + "stom": 2343, + "stom": 38068, + "stoma": 43545, + "stomach": 14722, + "stomp": 40165, + "stomping": 46144, + "ston": 4101, + "ston": 1839, + "stone": 7694, + "stone": 2441, + "stoned": 36248, + "stonehenge": 42417, + "stoner": 35131, + "stoner": 29115, + "stones": 42659, + "stones": 6885, + "stonewall": 39688, + "stoney": 44198, + "stony": 41717, + "stony": 35691, + "stoo": 24505, + "stood": 9151, + "stool": 34413, + "stool": 22314, + "stop": 6005, + "stop": 1691, + "stopbrexit": 48680, + "stopp": 15738, + "stopped": 6015, + "stopper": 32147, + "stoppers": 34457, + "stopping": 10735, + "stops": 9822, + "stopthe": 26463, + "stor": 809, + "stor": 17740, + "storage": 6824, + "store": 17769, + "store": 2183, + "stored": 28257, + "stores": 6370, + "storey": 24025, + "storians": 34628, + "stories": 3784, + "storing": 40087, + "stork": 46452, + "storm": 7434, + "storm": 2819, + "stormed": 45939, + "stormhour": 12161, + "storming": 24842, + "storms": 6464, + "stormtrooper": 49218, + "stormy": 20075, + "stors": 7178, + "story": 6512, + "story": 1134, + "storyline": 37079, + "storymonth": 23717, + "storyteller": 35882, + "storytelling": 14457, + "storytime": 44197, + "stos": 19281, + "stou": 37168, + "stour": 37361, + "stour": 21928, + "stout": 16550, + "stove": 21423, + "stow": 44284, + "stow": 17046, + "stowe": 34196, + "stown": 28071, + "stown": 7939, + "stp": 30576, + "stpatrick": 21343, + "stpatricksday": 22747, + "str": 807, + "str": 15913, + "stra": 1894, + "stra": 6253, + "strack": 46861, + "strada": 31134, + "strade": 48968, + "straigh": 31016, + "straight": 22114, + "straight": 4241, + "strain": 16887, + "strains": 38067, + "strait": 22946, + "straits": 41984, + "stral": 23289, + "stralia": 42510, + "stran": 18411, + "strand": 18214, + "strand": 17826, + "stranded": 22975, + "strang": 11138, + "strange": 33380, + "strange": 7288, + "strangely": 37566, + "stranger": 35541, + "stranger": 14149, + "strangers": 20684, + "strangerthings": 43271, + "strangest": 46740, + "strap": 13946, + "strapped": 40922, + "straps": 31213, + "stras": 36814, + "stras": 42125, + "strasbourg": 39576, + "strat": 11345, + "strat": 32925, + "strata": 47278, + "strate": 3532, + "strate": 28758, + "strategi": 49102, + "strategic": 10246, + "strategically": 45706, + "strategies": 9942, + "strategist": 37180, + "strategy": 5637, + "strates": 45724, + "stratford": 23955, + "strath": 21997, + "stration": 3156, + "strato": 28878, + "strauss": 32033, + "strava": 34625, + "stravel": 43494, + "straw": 7430, + "straw": 16438, + "strawberries": 17796, + "strawberry": 10233, + "straws": 33048, + "stray": 30784, + "stray": 15712, + "stre": 1079, + "stre": 19652, + "stread": 27797, + "streak": 11749, + "streaks": 42092, + "stream": 8659, + "stream": 3322, + "streamed": 26280, + "streamer": 25178, + "streamers": 19937, + "streaming": 6278, + "streamline": 44917, + "streams": 13545, + "stree": 35082, + "stree": 32438, + "streep": 38701, + "street": 4839, + "street": 2012, + "streetart": 12948, + "streetcar": 34268, + "streetfood": 44486, + "streetphotography": 20786, + "streets": 6058, + "streetstyle": 39118, + "streetwear": 37298, + "strel": 39685, + "stren": 4349, + "streng": 4472, + "strength": 15475, + "strength": 5959, + "strengthen": 16318, + "strengthened": 47131, + "strengthening": 23475, + "strengthens": 40280, + "strengths": 29268, + "stress": 17297, + "stress": 5843, + "stressed": 16497, + "stresses": 32112, + "stressful": 24268, + "stressing": 35917, + "stret": 12265, + "stretch": 10064, + "stretched": 29393, + "stretches": 32231, + "stretching": 24423, + "stri": 1493, + "stri": 27795, + "stria": 39620, + "strial": 30217, + "strian": 12924, + "stric": 2607, + "strick": 25181, + "strickland": 48939, + "strict": 21585, + "strictly": 16475, + "stride": 36024, + "strides": 37355, + "stries": 18171, + "strife": 46473, + "strike": 20774, + "strike": 5767, + "striker": 12448, + "strikers": 33465, + "strikes": 9280, + "striking": 13392, + "string": 25512, + "string": 9696, + "strings": 15699, + "strip": 9317, + "stripe": 19368, + "striped": 22192, + "stripes": 14239, + "stripped": 26602, + "stripper": 45759, + "stripping": 48588, + "strips": 19000, + "strive": 22140, + "striving": 37671, + "stro": 3121, + "stro": 6186, + "stroke": 44621, + "stroke": 10403, + "strokes": 26595, + "strol": 30123, + "stroll": 15924, + "stroller": 47076, + "strolling": 40911, + "strom": 14707, + "stron": 4165, + "strong": 10436, + "strong": 2389, + "stronger": 27760, + "stronger": 9245, + "strongertogether": 38532, + "strongest": 16171, + "strongh": 38678, + "strongly": 15507, + "strophy": 47912, + "strou": 48425, + "stroud": 39895, + "strous": 23752, + "stru": 1666, + "struc": 3311, + "struck": 10861, + "struction": 12497, + "structural": 16899, + "structure": 5285, + "structured": 27147, + "structures": 14171, + "structuring": 37496, + "strugg": 5176, + "struggle": 8443, + "struggled": 32921, + "struggles": 17446, + "struggling": 12135, + "struly": 34118, + "strum": 37632, + "strung": 46033, + "strust": 23920, + "strut": 48375, + "stry": 17325, + "stry": 2245, + "sts": 1088, + "stu": 858, + "stu": 23531, + "stuart": 32054, + "stuart": 11723, + "stub": 27066, + "stubborn": 38955, + "stuck": 6596, + "stud": 22368, + "stud": 13319, + "studded": 29153, + "studen": 44156, + "student": 14681, + "student": 2556, + "students": 1712, + "studi": 5691, + "studied": 21369, + "studies": 6426, + "studio": 17798, + "studio": 3155, + "studios": 6231, + "studs": 27571, + "study": 21051, + "study": 3123, + "studyabroad": 45425, + "studying": 8826, + "stuff": 46072, + "stuff": 3487, + "stuffed": 11781, + "stuffing": 31612, + "stuffs": 43455, + "stuk": 32424, + "stumb": 16784, + "stumble": 39045, + "stumbled": 21776, + "stump": 32064, + "stun": 3088, + "stun": 37959, + "stunned": 34034, + "stunner": 29965, + "stunning": 3769, + "stunningly": 47515, + "stuns": 43796, + "stunt": 19905, + "stunts": 40118, + "stupi": 18975, + "stupid": 42600, + "stupid": 8085, + "stupidity": 33766, + "stur": 10676, + "sturdy": 43780, + "stures": 27223, + "sturgeon": 31580, + "sturi": 21747, + "sturridge": 45331, + "stutt": 30444, + "stuttgart": 32219, + "stv": 27060, + "stv": 9708, + "stweet": 46832, + "stweets": 39174, + "stx": 42548, + "sty": 1421, + "sty": 2920, + "style": 12356, + "style": 1844, + "styled": 17974, + "styles": 6948, + "styli": 38577, + "styling": 14597, + "stylish": 10378, + "stylist": 15928, + "styn": 41394, + "su": 605, + "su": 2937, + "sua": 42448, + "suarez": 21437, + "suave": 47305, + "sub": 1783, + "sub": 7765, + "subaru": 21319, + "subjec": 16090, + "subject": 10300, + "subjects": 22099, + "subli": 16350, + "sublime": 22367, + "submarine": 19968, + "submer": 27156, + "submerged": 43171, + "submission": 16571, + "submissions": 21566, + "submit": 10423, + "submitted": 15189, + "submitting": 38788, + "subram": 49207, + "subs": 16398, + "subscri": 5838, + "subscribe": 9839, + "subscribed": 44867, + "subscriber": 36292, + "subscribers": 17337, + "subscription": 17979, + "subscriptions": 47162, + "subsequ": 33598, + "subsequent": 44323, + "subsi": 14856, + "subsidi": 45029, + "subsidiary": 45506, + "subsidies": 37685, + "subsidy": 47462, + "substan": 17487, + "substance": 19309, + "substances": 36834, + "substantial": 27171, + "substantially": 47577, + "substitu": 18529, + "substitute": 25340, + "subtitles": 39479, + "subtle": 16536, + "subur": 12517, + "suburb": 37664, + "suburban": 23570, + "suburbs": 25317, + "subway": 12196, + "suc": 1869, + "succe": 7981, + "succeed": 13556, + "succeeded": 41077, + "succes": 39019, + "success": 3695, + "success": 3034, + "successes": 29436, + "successful": 4670, + "successfully": 9934, + "succession": 38491, + "successive": 41319, + "successor": 34774, + "succu": 45253, + "succul": 25671, + "succulent": 35236, + "such": 2046, + "suction": 42786, + "sud": 8067, + "sud": 33714, + "sudan": 31149, + "sudan": 13474, + "sudanese": 42837, + "sudbury": 32488, + "sudden": 10833, + "sudden": 15433, + "suddenly": 11076, + "sue": 14045, + "sue": 6641, + "sued": 22225, + "suede": 21036, + "sues": 17105, + "suf": 21204, + "suf": 22579, + "sufc": 37091, + "suff": 4866, + "suffe": 13510, + "suffer": 13557, + "suffered": 14766, + "suffering": 10140, + "suffers": 22389, + "sufficient": 28410, + "suffol": 13775, + "suffolk": 46408, + "suffolk": 15685, + "suffra": 34596, + "suffrage": 39567, + "sufi": 39756, + "sug": 3189, + "suga": 28757, + "sugar": 12418, + "sugar": 5574, + "sugge": 6345, + "suggest": 13356, + "suggested": 18790, + "suggesti": 15033, + "suggesting": 29792, + "suggestion": 23741, + "suggestions": 16052, + "suggests": 13333, + "suho": 32744, + "sui": 24972, + "suici": 16372, + "suicidal": 37165, + "suicide": 31310, + "suicide": 8247, + "suing": 18309, + "suisse": 35964, + "suit": 11887, + "suit": 3940, + "suitable": 17476, + "suitcase": 27792, + "suite": 9346, + "suited": 25919, + "suites": 21523, + "suits": 9949, + "suk": 24820, + "suk": 6886, + "suka": 44017, + "suke": 25590, + "sukh": 46961, + "suki": 32704, + "sul": 1767, + "sul": 19879, + "sula": 34713, + "sula": 26143, + "sullivan": 14477, + "sully": 37752, + "sulph": 37234, + "sulphur": 47659, + "sultan": 35650, + "sultan": 17049, + "sum": 7054, + "sum": 8257, + "suma": 47938, + "sumat": 32640, + "sumatra": 47346, + "sume": 45457, + "sumi": 41248, + "summ": 1309, + "summar": 34657, + "summari": 31993, + "summary": 13435, + "summed": 34912, + "summer": 5500, + "summer": 1673, + "summers": 18254, + "summerslam": 40264, + "summertime": 19025, + "summit": 30011, + "summit": 3768, + "summon": 27622, + "summon": 39782, + "sumner": 46813, + "sumo": 33734, + "sump": 34252, + "sumptuous": 47354, + "sums": 13325, + "sun": 968, + "sun": 2176, + "sunbathing": 46994, + "sunburn": 45767, + "sund": 40735, + "sundae": 38078, + "sundance": 24128, + "sundar": 44936, + "sunday": 6649, + "sunday": 1706, + "sundayfunday": 21565, + "sundaymorning": 24809, + "sundaymotivation": 46227, + "sundays": 15827, + "sundaywith": 26469, + "sundaywithmarsha": 26662, + "sunder": 15097, + "sunderland": 45727, + "sunderland": 18851, + "sundown": 44438, + "sune": 41096, + "sunflower": 21559, + "sunflowers": 39809, + "sung": 16903, + "sung": 6047, + "sunglasses": 12906, + "suni": 17663, + "suni": 47010, + "sunil": 32861, + "sunite": 21382, + "sunited": 35276, + "sunk": 37534, + "sunken": 43473, + "sunlight": 17996, + "sunni": 44315, + "sunny": 15632, + "sunny": 5438, + "sunrise": 5610, + "suns": 18322, + "sunscreen": 29355, + "sunset": 37880, + "sunset": 3424, + "sunsets": 17721, + "sunshine": 32761, + "sunshine": 5385, + "suny": 41308, + "sup": 19078, + "sup": 8249, + "supdates": 24177, + "super": 1642, + "super": 1994, + "superb": 8930, + "superbike": 45709, + "superbowl": 47461, + "superbowl": 16467, + "supercar": 27021, + "supercars": 32185, + "supercell": 43227, + "supercharged": 47479, + "supere": 46831, + "superfood": 41715, + "supergirl": 25771, + "superhero": 14049, + "superheroes": 23334, + "superint": 17615, + "superintendent": 19020, + "superior": 13205, + "superjunior": 40475, + "superleague": 45539, + "superman": 11237, + "supermarket": 19897, + "supermarkets": 45106, + "supermodel": 41963, + "supermoon": 36571, + "supernatural": 15484, + "supernova": 39843, + "superrugby": 48717, + "supersonic": 42019, + "supersport": 46319, + "superst": 38202, + "superstar": 32551, + "superstar": 10472, + "superstars": 25797, + "supervis": 12709, + "supervised": 41316, + "supervision": 36234, + "supervisor": 20366, + "supervisors": 37958, + "superyacht": 42714, + "supp": 1023, + "supper": 15727, + "supple": 31431, + "supplement": 19924, + "supplements": 21265, + "supplied": 24106, + "supplier": 18043, + "suppliers": 24196, + "supplies": 9384, + "supply": 25074, + "supply": 6389, + "supplychain": 31224, + "supplying": 32739, + "suppo": 6941, + "suppor": 2104, + "support": 12062, + "support": 1425, + "supported": 8038, + "supporter": 12992, + "supporters": 7403, + "supportindiefilm": 43976, + "supporting": 3976, + "supportive": 18313, + "supportlocal": 43852, + "supports": 8336, + "supportsmall": 30941, + "supportsmallstreamers": 36097, + "suppose": 18924, + "supposed": 9119, + "supposedly": 32302, + "suppre": 20542, + "suppression": 36508, + "supra": 48485, + "supre": 5875, + "supremac": 28643, + "supremacist": 39005, + "supremacy": 28913, + "supreme": 35222, + "supreme": 7468, + "supt": 23625, + "sur": 1090, + "sur": 7123, + "sura": 33412, + "sura": 49125, + "surabaya": 45227, + "surance": 22184, + "surat": 30201, + "sure": 14320, + "sure": 1650, + "sured": 36869, + "surely": 11409, + "sures": 12725, + "suresh": 32118, + "suresh": 31464, + "sureshpp": 41924, + "sureshpprabhu": 42050, + "surf": 10176, + "surf": 10322, + "surface": 7744, + "surfaces": 20746, + "surfer": 24925, + "surfers": 34842, + "surfing": 15762, + "surg": 13045, + "surge": 17457, + "surgeon": 16039, + "surgeons": 26000, + "surger": 5122, + "surgeries": 34940, + "surgery": 5344, + "surgical": 16386, + "suri": 14130, + "suri": 33952, + "suring": 16817, + "suriya": 17832, + "surpass": 45494, + "surpassed": 25648, + "surplus": 29413, + "surpri": 3244, + "surprise": 5099, + "surprised": 8949, + "surprises": 16920, + "surprising": 14964, + "surprisingly": 17367, + "surreal": 18408, + "surrealism": 41773, + "surrender": 20964, + "surrendered": 44601, + "surrey": 26489, + "surrey": 14315, + "surro": 47499, + "surroun": 8250, + "surround": 26543, + "surround": 22999, + "surrounded": 13589, + "surrounding": 12544, + "surroundings": 26915, + "surrounds": 39012, + "suru": 49240, + "surve": 8952, + "surveill": 15408, + "surveillance": 15578, + "survey": 45914, + "survey": 6809, + "surveying": 33085, + "surveys": 25096, + "survi": 3440, + "surviv": 12922, + "survival": 10172, + "survive": 10431, + "survived": 13483, + "survives": 30927, + "surviving": 18609, + "survivor": 31934, + "survivor": 10944, + "survivors": 13711, + "surya": 37767, + "sus": 8091, + "sus": 3036, + "susa": 20546, + "susan": 19922, + "susan": 10168, + "suscep": 44270, + "sush": 22298, + "sushi": 11729, + "sushmaswar": 48200, + "susie": 32284, + "susp": 7971, + "suspec": 10298, + "suspect": 9065, + "suspected": 15579, + "suspects": 18265, + "suspen": 10578, + "suspend": 41007, + "suspended": 13126, + "suspends": 39535, + "suspense": 21556, + "suspension": 15417, + "suspici": 25714, + "suspicion": 34910, + "suspicious": 19862, + "sussex": 31244, + "sussex": 13266, + "sustain": 4644, + "sustain": 28156, + "sustainability": 9635, + "sustainable": 23645, + "sustainable": 7078, + "sustained": 22699, + "sustaining": 44418, + "sut": 23984, + "sut": 28956, + "sutherland": 27592, + "sutton": 39359, + "sutton": 18564, + "suv": 15985, + "suz": 9957, + "suzanne": 24617, + "suzu": 36289, + "suzuki": 16892, + "suzy": 26552, + "sv": 6508, + "sv": 17083, + "svc": 45065, + "sve": 47637, + "sven": 37786, + "sven": 45183, + "sver": 45923, + "sville": 44580, + "sville": 6741, + "svp": 28465, + "svt": 42014, + "svu": 32123, + "sw": 1220, + "sw": 4457, + "swa": 4707, + "swa": 31916, + "swach": 20862, + "swachhb": 31898, + "swachhbharat": 36927, + "swag": 8852, + "swag": 8177, + "swagg": 47702, + "swagger": 35797, + "swain": 43226, + "swal": 13433, + "swallow": 28979, + "swallowed": 46956, + "swallows": 45124, + "swam": 42539, + "swami": 25021, + "swamp": 41953, + "swamp": 16595, + "swamy": 28445, + "swan": 8215, + "swan": 12530, + "swana": 24699, + "swans": 19516, + "swansea": 16567, + "swanson": 34797, + "swap": 15234, + "swapped": 39077, + "swapping": 44702, + "swaps": 49242, + "swar": 11680, + "swarm": 31577, + "swarovski": 28515, + "swat": 32547, + "swat": 26482, + "swatch": 48053, + "sway": 26443, + "sway": 26617, + "swc": 42231, + "swe": 2350, + "swe": 38070, + "swear": 7406, + "swearing": 32627, + "sweat": 10282, + "sweat": 12663, + "sweater": 11455, + "sweaters": 31303, + "sweating": 33215, + "sweats": 39321, + "sweatshirt": 22442, + "sweaty": 28419, + "sweden": 8760, + "swedish": 11585, + "swee": 1812, + "sweek": 30017, + "sweeney": 27286, + "sweep": 23220, + "sweep": 13669, + "sweeping": 25719, + "sweeps": 26887, + "sweepstakes": 25992, + "sweet": 10957, + "sweet": 2418, + "sweetened": 45577, + "sweeter": 32873, + "sweetest": 15180, + "sweethe": 16316, + "sweetheart": 18079, + "sweetie": 24450, + "sweetness": 29713, + "sweets": 18045, + "swel": 48470, + "swell": 35538, + "swell": 21490, + "swelling": 46578, + "swept": 23311, + "swer": 30514, + "swfc": 30227, + "swfl": 46607, + "swi": 3881, + "swi": 45223, + "swick": 17159, + "swif": 28548, + "swift": 34843, + "swift": 8229, + "swild": 33909, + "swild": 38696, + "swildlife": 46818, + "swim": 4928, + "swim": 7681, + "swimmer": 25475, + "swimmers": 27776, + "swimming": 7411, + "swims": 46798, + "swimsuit": 25504, + "swimwear": 31889, + "swin": 14554, + "swin": 40798, + "swindon": 29540, + "swine": 31166, + "swing": 25292, + "swing": 7429, + "swinging": 26760, + "swings": 29141, + "swipe": 31828, + "swire": 42753, + "swirl": 35795, + "swis": 23611, + "swish": 38571, + "swiss": 37917, + "swiss": 9287, + "swit": 3726, + "switch": 22480, + "switch": 5893, + "switched": 22869, + "switches": 33569, + "switching": 21155, + "swith": 17299, + "switzer": 9835, + "switzerland": 9912, + "swivel": 48256, + "swo": 38673, + "swol": 29575, + "swollen": 36129, + "swoo": 29744, + "swood": 24158, + "swoon": 37028, + "swoop": 45661, + "sword": 33294, + "sword": 11356, + "swords": 27181, + "swork": 42722, + "sworld": 33305, + "sworn": 21130, + "sworth": 13322, + "swt": 38878, + "swx": 20597, + "sx": 9402, + "sx": 17806, + "sxsw": 13369, + "sy": 974, + "sy": 2126, + "sya": 35017, + "sycam": 34911, + "sycamore": 43086, + "syd": 4525, + "syd": 22504, + "sydney": 15878, + "sydney": 5278, + "syed": 27624, + "syfy": 32047, + "sykes": 27287, + "syl": 6452, + "sylla": 41708, + "sylvania": 12011, + "sylve": 28369, + "sylvester": 37214, + "sylvia": 25670, + "sym": 3645, + "sym": 40327, + "symb": 22987, + "symbol": 13085, + "symboli": 22019, + "symbolic": 33177, + "symbolism": 44679, + "symbols": 25476, + "symmetry": 31427, + "symp": 11468, + "sympathi": 47493, + "sympathy": 32477, + "symph": 9544, + "symphonic": 42639, + "symphony": 11180, + "sympo": 9730, + "symposium": 9971, + "symptom": 47799, + "symptoms": 12956, + "syn": 3758, + "syn": 36090, + "synago": 30945, + "synagogue": 33518, + "sync": 20081, + "synchron": 23943, + "syndic": 21098, + "syndicate": 28779, + "syndrome": 10927, + "syner": 22283, + "synergy": 32012, + "syno": 31533, + "synod": 47712, + "synopsis": 47018, + "synth": 33841, + "synth": 24462, + "synthe": 22604, + "synthesi": 33565, + "synthesis": 21602, + "synthesizer": 44077, + "synthetic": 19917, + "syou": 26742, + "syour": 21718, + "syrac": 17279, + "syracuse": 19640, + "syrah": 45364, + "syri": 18917, + "syria": 5563, + "syrian": 47562, + "syrian": 10041, + "syrians": 41392, + "syrup": 16611, + "sys": 26726, + "syste": 1933, + "system": 47813, + "system": 2422, + "systematic": 28586, + "systemic": 33807, + "systems": 4828, + "sz": 13438, + "sz": 15879, + "sze": 44507, + "szn": 48092, + "são": 45911, + "sé": 37879, + "t": 83, + "t": 339, + "ta": 648, + "ta": 1397, + "taa": 43874, + "tab": 2648, + "tab": 14724, + "tabby": 36145, + "tabern": 48991, + "tability": 15770, + "table": 12108, + "table": 2175, + "tableau": 39723, + "tables": 7822, + "tablet": 12494, + "tabletop": 46843, + "tabletop": 25773, + "tablets": 20436, + "tably": 24440, + "taboo": 38400, + "tabs": 29163, + "tac": 3145, + "tac": 22653, + "tache": 39239, + "tack": 6339, + "tack": 34446, + "tackle": 10294, + "tackled": 47218, + "tackles": 18021, + "tackling": 19628, + "taco": 31924, + "taco": 12436, + "tacoma": 25397, + "tacos": 14090, + "tactic": 40377, + "tactical": 17137, + "tactics": 16410, + "tacular": 48985, + "tad": 15890, + "tad": 19860, + "tado": 40846, + "tae": 15257, + "tae": 15580, + "taehyung": 24642, + "taek": 30753, + "taekwondo": 39963, + "taemin": 30600, + "taeyang": 45802, + "taeyeon": 27389, + "taf": 29660, + "taft": 42141, + "tag": 3456, + "tag": 3640, + "tage": 2669, + "tages": 39902, + "tagged": 12969, + "tagging": 25138, + "tagne": 47467, + "tags": 11606, + "tah": 14822, + "tah": 7090, + "tahit": 45385, + "tahoe": 26140, + "tai": 6511, + "tai": 13040, + "taiji": 30185, + "tail": 7156, + "tail": 4132, + "tailed": 20626, + "tailgate": 23168, + "tailgating": 42625, + "tailo": 27230, + "tailor": 29870, + "tailored": 28275, + "tailoring": 46357, + "tails": 16066, + "tain": 2841, + "tain": 1908, + "taine": 21214, + "taine": 32299, + "tained": 10212, + "taining": 7565, + "tainment": 30063, + "tains": 3952, + "tainted": 47211, + "taipei": 24356, + "tair": 29143, + "tairp": 43707, + "tait": 45325, + "taiwan": 36319, + "taiwan": 12626, + "taiwanese": 41416, + "taj": 28937, + "taj": 24805, + "taji": 46358, + "tak": 15070, + "tak": 14458, + "taka": 24070, + "taka": 40968, + "take": 5052, + "take": 1172, + "takeaway": 25737, + "takeaways": 32080, + "takeme": 41748, + "taken": 2807, + "takeoff": 32789, + "takeover": 11863, + "taker": 17939, + "takers": 30775, + "takes": 2633, + "takin": 30890, + "taking": 2019, + "taku": 48168, + "tal": 976, + "tal": 2066, + "tala": 29845, + "talaga": 35349, + "talbot": 30585, + "tale": 33971, + "tale": 7798, + "talent": 30435, + "talent": 5114, + "talented": 5331, + "talents": 16136, + "tales": 9469, + "tali": 12122, + "tali": 45406, + "taliban": 20788, + "talis": 36480, + "tality": 15631, + "talk": 12462, + "talk": 1841, + "talked": 10153, + "talkin": 26040, + "talking": 31463, + "talking": 2578, + "talks": 3237, + "tall": 11664, + "tall": 7771, + "talla": 21528, + "tallade": 44220, + "tallahassee": 37832, + "taller": 23470, + "tallest": 19774, + "tallinn": 45079, + "tally": 16323, + "talon": 47897, + "tam": 2661, + "tam": 12246, + "tama": 45424, + "tamanna": 48055, + "tamar": 22901, + "tamara": 35697, + "tame": 38557, + "tame": 32778, + "tamed": 40575, + "tami": 39429, + "tamil": 23046, + "tamil": 14033, + "tamilnadu": 32371, + "tamine": 42566, + "tammy": 28396, + "tampa": 10906, + "tampab": 37852, + "tamu": 34105, + "tan": 2123, + "tan": 5039, + "tana": 21396, + "tand": 20244, + "tandem": 33756, + "tane": 13344, + "tane": 24923, + "taneous": 22275, + "taneously": 24422, + "tang": 10425, + "tang": 20794, + "tanger": 31844, + "tangerine": 42045, + "tangible": 44823, + "tangle": 36568, + "tangled": 33587, + "tango": 24089, + "tani": 31374, + "tani": 32985, + "tania": 45369, + "tank": 29858, + "tank": 6172, + "tanker": 25020, + "tanks": 14223, + "tann": 19174, + "tanner": 22001, + "tanning": 27985, + "tans": 27332, + "tant": 41383, + "tant": 41695, + "tante": 48262, + "tanto": 45685, + "tany": 34410, + "tanya": 26800, + "tanz": 47399, + "tanzania": 15711, + "tao": 29084, + "tao": 18923, + "tap": 17923, + "tap": 7888, + "tapas": 27361, + "tape": 18332, + "tape": 5749, + "taped": 33219, + "tapes": 17903, + "tapestry": 33525, + "taping": 24355, + "tapp": 27644, + "tapp": 27764, + "tapped": 26649, + "tapping": 27882, + "tapro": 34415, + "taproom": 40266, + "taps": 23267, + "tar": 2002, + "tar": 6977, + "tara": 15264, + "tarak": 37813, + "taran": 32370, + "tarantino": 41180, + "tarde": 48670, + "tardis": 35410, + "tares": 34587, + "targe": 9620, + "target": 38556, + "target": 5400, + "targeted": 14968, + "targeting": 15818, + "targets": 12468, + "tari": 4238, + "tari": 38012, + "tarian": 11762, + "tarians": 42789, + "taries": 47291, + "tariff": 40220, + "tariffs": 28335, + "tariq": 42526, + "tarmac": 44294, + "taro": 26264, + "tarot": 23702, + "tart": 16707, + "tart": 14120, + "tartan": 35064, + "tarts": 29799, + "tary": 31729, + "tary": 5065, + "tarzan": 45463, + "tas": 6538, + "tas": 10163, + "tash": 35272, + "tasha": 44967, + "task": 39189, + "task": 10549, + "tasks": 19453, + "tasmania": 22429, + "tasmanian": 45102, + "tassel": 49276, + "tast": 10839, + "taste": 14314, + "taste": 5219, + "tasted": 22827, + "tasteof": 38097, + "taster": 29743, + "tastes": 13736, + "tastic": 21337, + "tasting": 7656, + "tastings": 49273, + "tasty": 43390, + "tasty": 8568, + "tat": 2652, + "tat": 21592, + "tata": 19300, + "tate": 44476, + "tate": 13295, + "tath": 27566, + "tati": 31433, + "tatiana": 48837, + "tation": 5280, + "tations": 32324, + "tator": 18791, + "tators": 37206, + "tats": 44557, + "tatt": 9232, + "tatted": 41605, + "tattoo": 15980, + "tattoo": 6325, + "tattooed": 28541, + "tattoos": 14900, + "tatum": 26103, + "tau": 6620, + "tau": 20510, + "taught": 9306, + "taun": 23910, + "taunton": 40681, + "taurus": 32881, + "taver": 37776, + "tavern": 18644, + "taw": 33868, + "taw": 40289, + "tawa": 29035, + "tawards": 14351, + "tax": 4581, + "tax": 3879, + "taxation": 36847, + "taxes": 11462, + "taxi": 25160, + "taxi": 11380, + "taxider": 47420, + "taxis": 34009, + "taxpay": 17986, + "taxpayer": 30978, + "taxpayers": 25503, + "tay": 6542, + "tay": 15073, + "taya": 38484, + "tayl": 3913, + "taylor": 9044, + "taylor": 3961, + "taylorswift": 18936, + "tayo": 33941, + "taz": 41475, + "taz": 31870, + "tb": 1990, + "tb": 7490, + "tba": 34363, + "tball": 8390, + "tball": 1467, + "tbc": 31807, + "tbd": 45548, + "tbh": 13238, + "tbi": 45868, + "tbl": 42962, + "tbli": 43664, + "tblightning": 44178, + "tbo": 34255, + "tbr": 46643, + "tbs": 37368, + "tbt": 2950, + "tc": 6820, + "tc": 5454, + "tca": 35116, + "tch": 10744, + "tch": 4048, + "tches": 42001, + "tcm": 21501, + "tcm": 26588, + "tcmparty": 24338, + "tcot": 8995, + "tcs": 39107, + "tcu": 26791, + "td": 20578, + "td": 3192, + "tdf": 21844, + "tdi": 45621, + "tdp": 47009, + "tds": 20238, + "tdsb": 29836, + "te": 600, + "te": 756, + "tea": 41053, + "tea": 3274, + "teach": 2043, + "teach": 6865, + "teacher": 18051, + "teacher": 4008, + "teachers": 5069, + "teaches": 17110, + "teaching": 5141, + "teachings": 32119, + "teal": 22821, + "team": 2085, + "team": 1027, + "teamcanada": 46636, + "teamed": 20590, + "teamgb": 40971, + "teaming": 24392, + "teammate": 17900, + "teammates": 13921, + "teams": 3891, + "teamsisd": 34703, + "teamusa": 28625, + "teamwork": 14657, + "teaparty": 33065, + "teapo": 35745, + "teapot": 40749, + "tear": 15802, + "tear": 11862, + "tearful": 46873, + "tearing": 24785, + "tears": 7688, + "teas": 23003, + "teas": 29314, + "tease": 25163, + "teased": 49122, + "teaser": 8982, + "teasers": 48990, + "teases": 28509, + "teasing": 36507, + "teat": 26376, + "teatime": 48948, + "teatro": 35756, + "teau": 24931, + "tebow": 37797, + "tec": 17381, + "tec": 11612, + "tech": 1782, + "tech": 2061, + "techcrunch": 42110, + "techn": 6252, + "technews": 31787, + "technic": 16639, + "technic": 37666, + "technical": 49231, + "technical": 7582, + "technically": 23180, + "technician": 22540, + "technicians": 35513, + "techno": 2599, + "techno": 17564, + "technological": 23068, + "technologies": 10040, + "technology": 3089, + "techs": 41353, + "ted": 4841, + "ted": 775, + "tedcruz": 27517, + "teddy": 25758, + "teddy": 11798, + "tedly": 8539, + "tedu": 42517, + "tedx": 17950, + "tedx": 41504, + "tee": 12676, + "tee": 3385, + "teed": 13692, + "teen": 5398, + "teen": 4697, + "teenage": 14069, + "teenager": 19338, + "teenagers": 25989, + "teenchoice": 28203, + "teens": 12375, + "teenth": 20249, + "teenwolf": 40067, + "teeny": 41622, + "teer": 48648, + "tees": 9641, + "teessi": 43295, + "teeth": 8225, + "tega": 29508, + "tegr": 39801, + "teh": 18720, + "teh": 29601, + "tehran": 26399, + "tein": 33223, + "tej": 46724, + "tek": 17489, + "tek": 18294, + "tekken": 29843, + "tel": 4978, + "tel": 2226, + "telang": 23469, + "telangana": 26386, + "tele": 3103, + "tele": 32851, + "telecom": 21057, + "telecommunications": 39900, + "telegram": 26780, + "telegraph": 14713, + "telephone": 17243, + "telescope": 19037, + "telethon": 49266, + "televised": 39470, + "television": 8608, + "telford": 38323, + "tell": 16069, + "tell": 2330, + "teller": 20415, + "tellers": 42707, + "telling": 5507, + "tells": 5217, + "tellu": 42511, + "telly": 31475, + "tels": 43607, + "telugu": 22927, + "tely": 5630, + "tem": 2404, + "tem": 17536, + "tema": 45881, + "teme": 43378, + "temp": 2684, + "temp": 11097, + "tempe": 36723, + "temper": 5981, + "temper": 35521, + "temperature": 9543, + "temperatures": 11575, + "tempered": 40521, + "tempest": 36053, + "templ": 16679, + "template": 18591, + "templates": 30498, + "temple": 21841, + "temple": 5620, + "temples": 24024, + "tempo": 19625, + "tempor": 4858, + "temporal": 43656, + "temporarily": 23189, + "temporary": 6513, + "temps": 11668, + "tempt": 28460, + "temptation": 30118, + "tempted": 26226, + "tempting": 34876, + "ten": 1149, + "ten": 2581, + "tenant": 16954, + "tenants": 26023, + "tenay": 45384, + "tenberg": 31329, + "tend": 17630, + "tend": 21252, + "tendency": 47277, + "tender": 23020, + "tender": 9838, + "tenderloin": 42750, + "tenders": 44741, + "tending": 35084, + "tendon": 48459, + "tends": 39962, + "tene": 24868, + "tened": 13682, + "tener": 29054, + "teneri": 28000, + "tenerife": 29401, + "teners": 41307, + "teness": 18018, + "teng": 34016, + "teng": 28474, + "tennant": 29310, + "tennes": 9514, + "tennessee": 10053, + "tennis": 31504, + "tennis": 5298, + "tenor": 30521, + "tens": 14062, + "tense": 23518, + "tension": 15221, + "tensions": 24224, + "tenstein": 49139, + "tent": 18505, + "tent": 10782, + "tentative": 48238, + "tenth": 27483, + "tention": 12191, + "tents": 30730, + "tenure": 30739, + "teo": 18665, + "tep": 31806, + "tequ": 17502, + "tequila": 18510, + "ter": 704, + "ter": 652, + "tera": 15155, + "teras": 44830, + "tere": 11329, + "tered": 49272, + "tered": 4389, + "terence": 33806, + "teresa": 19081, + "teri": 30917, + "teria": 22685, + "terie": 42276, + "tering": 7929, + "term": 40991, + "term": 4780, + "termin": 4766, + "terminal": 11816, + "terminals": 44091, + "terminator": 29609, + "terminology": 48896, + "terms": 8663, + "tern": 41572, + "tern": 12959, + "terns": 25251, + "tero": 20727, + "tero": 24697, + "terps": 41471, + "terr": 3921, + "terra": 22366, + "terra": 18816, + "terrac": 28549, + "terrace": 13820, + "terraces": 47508, + "terracotta": 45123, + "terrain": 20184, + "terran": 43726, + "terre": 33888, + "terre": 27537, + "terrell": 39494, + "terrence": 38746, + "terrestrial": 46299, + "terri": 4504, + "terri": 36722, + "terrible": 9741, + "terribly": 34558, + "terrier": 14455, + "terriers": 47047, + "terrific": 13837, + "terrified": 28204, + "terrifying": 18526, + "territ": 10720, + "territorial": 39163, + "territories": 32846, + "territory": 13936, + "terror": 9596, + "terror": 9327, + "terrori": 6836, + "terrorism": 10583, + "terrorist": 10575, + "terrorists": 12835, + "terry": 19378, + "terry": 8561, + "ters": 24102, + "ters": 1737, + "terti": 48386, + "tery": 4184, + "tes": 8019, + "tes": 3609, + "tesco": 15434, + "tese": 33320, + "tesla": 12254, + "tess": 21807, + "tess": 20840, + "tessa": 32063, + "test": 7738, + "test": 1628, + "testam": 23477, + "testament": 24609, + "tested": 10576, + "tester": 32707, + "testi": 18373, + "testic": 42364, + "testify": 33088, + "testifying": 46347, + "testim": 12553, + "testimonial": 28834, + "testimony": 18672, + "testing": 4967, + "testo": 42428, + "testosterone": 45168, + "tests": 8715, + "tet": 40468, + "tet": 13275, + "tetra": 40902, + "tetris": 45934, + "teu": 47152, + "teuk": 39979, + "teur": 27120, + "tex": 2056, + "tex": 11728, + "texan": 35287, + "texan": 38386, + "texans": 17580, + "texanscheer": 43717, + "texas": 15713, + "texas": 3403, + "texaste": 46469, + "text": 18169, + "text": 4160, + "textbook": 25952, + "textbooks": 44041, + "texted": 29004, + "textile": 19789, + "textiles": 24326, + "texting": 18600, + "texts": 12767, + "texture": 16505, + "textured": 32168, + "textures": 28063, + "tey": 32395, + "tez": 22664, + "tf": 18828, + "tf": 5001, + "tfc": 30186, + "tfl": 29918, + "tford": 22493, + "tful": 17108, + "tfw": 16741, + "tg": 7665, + "tg": 11981, + "tgif": 14483, + "th": 513, + "th": 640, + "tha": 18470, + "tha": 4715, + "thab": 38219, + "thad": 48339, + "thai": 28054, + "thai": 8825, + "thail": 7258, + "thailand": 7469, + "thak": 22801, + "thakur": 38427, + "thal": 7967, + "thal": 12323, + "thala": 17784, + "thalai": 25206, + "thalaivar": 44918, + "thalap": 39789, + "thalapathy": 45405, + "thalapathy": 23324, + "thall": 36007, + "tham": 11761, + "tham": 8896, + "thames": 43472, + "thames": 15321, + "than": 792, + "than": 1126, + "thand": 44465, + "thane": 21463, + "thang": 24870, + "thani": 31322, + "thank": 2790, + "thank": 1144, + "thanked": 32079, + "thankful": 38839, + "thankful": 6217, + "thankfully": 22089, + "thanking": 21989, + "thanks": 5672, + "thanks": 1085, + "thanksgiving": 45732, + "thanksgiving": 6167, + "thanku": 45710, + "thankyou": 18050, + "thankyou": 9911, + "thanniversary": 35564, + "thanos": 36709, + "thanx": 25095, + "thar": 14396, + "thar": 38843, + "thard": 43474, + "that": 6303, + "that": 682, + "thatcher": 32496, + "thats": 44636, + "thats": 9254, + "thaw": 26081, + "thaw": 47229, + "thbewithyou": 41067, + "thc": 20091, + "thcentury": 49111, + "thd": 28219, + "thday": 37801, + "the": 599, + "the": 518, + "thea": 15935, + "thea": 25429, + "thead": 25259, + "theal": 45728, + "thealth": 31398, + "thear": 43283, + "theart": 44678, + "theast": 8378, + "theastern": 17877, + "theat": 2263, + "theater": 39438, + "theater": 6128, + "theaters": 14689, + "theatre": 19857, + "theatre": 3292, + "theatres": 21680, + "theatrical": 26833, + "theband": 27695, + "thebeatles": 35645, + "thebest": 40883, + "thebest": 25856, + "thebig": 24732, + "theblack": 47718, + "thec": 48659, + "thed": 31405, + "thedaily": 33550, + "theday": 4408, + "thedream": 39417, + "thee": 44475, + "thee": 15108, + "theeconomist": 44518, + "theellenshow": 35342, + "thefilm": 31665, + "theflash": 25434, + "theforce": 40002, + "theforceawakens": 48033, + "theft": 13286, + "thefuture": 34287, + "thegame": 24428, + "thegood": 28594, + "thegreat": 28721, + "thei": 44522, + "their": 911, + "theirs": 29297, + "thel": 5403, + "thelast": 23495, + "thelastjedi": 47992, + "theless": 27712, + "theli": 15277, + "thelittle": 46872, + "thelo": 47036, + "thelove": 40668, + "thelove": 43200, + "them": 5435, + "them": 1180, + "themasters": 48378, + "theme": 38524, + "theme": 5849, + "themed": 10126, + "themes": 17849, + "themet": 48183, + "themovie": 27062, + "themselves": 6503, + "then": 5929, + "then": 1594, + "thenburg": 45209, + "thene": 17012, + "thenew": 24212, + "thenext": 47881, + "thenight": 43336, + "theno": 37172, + "thenorth": 34338, + "theo": 17043, + "theo": 18084, + "theod": 26653, + "theodore": 30743, + "theological": 41162, + "theology": 24095, + "theon": 34653, + "theone": 46231, + "theopen": 41438, + "theore": 22690, + "theoretical": 35585, + "theori": 34804, + "theories": 23937, + "theory": 7143, + "thepeople": 33597, + "thepersonal": 29981, + "thepersonalnetwork": 30016, + "thephoto": 18303, + "thephotohour": 18607, + "ther": 1160, + "ther": 743, + "therap": 4499, + "therapeu": 19332, + "therapeutic": 23240, + "therapeutics": 49101, + "therapies": 30179, + "therapist": 20608, + "therapists": 34763, + "therapper": 49340, + "therapy": 5257, + "there": 5283, + "there": 997, + "thereal": 8074, + "thereal": 41140, + "thereby": 43308, + "thered": 10208, + "therefore": 16865, + "theres": 18494, + "theresa": 14126, + "therese": 47996, + "theresistance": 22845, + "theri": 28967, + "theri": 45297, + "therine": 26807, + "therine": 9239, + "thering": 7891, + "therland": 25351, + "thermal": 13689, + "thermo": 22303, + "thermom": 31138, + "thermometer": 38172, + "thermost": 42391, + "thern": 10919, + "thern": 3137, + "thero": 13165, + "theroad": 29807, + "therock": 30036, + "theroy": 38146, + "thers": 1959, + "thes": 40556, + "thes": 6460, + "thescript": 47061, + "these": 40366, + "these": 1071, + "theses": 39388, + "thesimpsons": 45513, + "thesims": 34192, + "thesis": 10673, + "thessal": 41491, + "thessaloni": 41753, + "thest": 35343, + "thesun": 45617, + "theta": 27694, + "thetic": 7954, + "thetimes": 36039, + "thevamp": 33701, + "thevoice": 47206, + "thevoice": 30258, + "thewalkingdead": 18087, + "thewanted": 43008, + "theworld": 44988, + "theworld": 17475, + "thex": 35990, + "they": 15174, + "they": 889, + "theyre": 28266, + "thfc": 17729, + "thi": 2362, + "thi": 9111, + "thia": 17943, + "thiago": 44537, + "thian": 23214, + "thians": 28187, + "thibau": 48351, + "thic": 26107, + "thic": 11794, + "thick": 18417, + "thick": 11006, + "thicker": 43302, + "thickness": 40754, + "thief": 18508, + "thier": 25595, + "thierry": 32929, + "thieves": 17899, + "thigh": 47124, + "thigh": 22877, + "thighs": 30847, + "thik": 20512, + "thika": 44619, + "thill": 31266, + "thim": 42331, + "thin": 2178, + "thin": 7847, + "thine": 47192, + "thing": 7499, + "thing": 946, + "things": 30670, + "things": 1739, + "thingsto": 43924, + "thingy": 36888, + "think": 9820, + "think": 1331, + "thinkbig": 26015, + "thinkbigsundaywithmarsha": 26666, + "thinker": 34577, + "thinkers": 32779, + "thinkin": 34443, + "thinking": 3291, + "thinks": 6109, + "thinner": 47247, + "thir": 6030, + "third": 32102, + "third": 3981, + "thirds": 42582, + "thirst": 23563, + "thirsty": 39731, + "thirsty": 17521, + "thirteen": 34209, + "thirty": 20813, + "thiru": 43292, + "this": 4340, + "this": 589, + "thisday": 6532, + "thisdayin": 33641, + "thisdayinhistory": 46913, + "thisi": 7299, + "thisis": 14887, + "thismorning": 36245, + "thistle": 29039, + "thistory": 28904, + "thium": 21804, + "thletics": 17765, + "thm": 10407, + "thman": 30079, + "thms": 19874, + "thn": 44155, + "thn": 45587, + "thnx": 25480, + "tho": 1325, + "tho": 5025, + "thof": 18943, + "thofjuly": 21613, + "thol": 29319, + "thole": 31029, + "tholes": 42465, + "thology": 9881, + "thom": 2585, + "thom": 24094, + "thomas": 12574, + "thomas": 3888, + "thome": 21289, + "thomp": 37274, + "thompson": 42181, + "thompson": 8535, + "thomson": 24151, + "thon": 38776, + "thon": 8924, + "thong": 37058, + "thood": 15623, + "thor": 4130, + "thor": 13691, + "thora": 46866, + "thorn": 12957, + "thorn": 18466, + "thorne": 18025, + "thorns": 33650, + "thornton": 23592, + "thorough": 15294, + "thorough": 34788, + "thoroughbred": 43248, + "thoroughly": 19750, + "thorpe": 18099, + "thos": 41965, + "those": 1753, + "thot": 33736, + "thou": 1513, + "thou": 17781, + "though": 2846, + "thought": 23948, + "thought": 2449, + "thoughtful": 19592, + "thoughts": 3618, + "thour": 27125, + "thousand": 9344, + "thousands": 7089, + "thouse": 40318, + "thouse": 7819, + "thoven": 23078, + "thr": 1111, + "thr": 19138, + "thra": 17761, + "thra": 32797, + "thrash": 38262, + "thre": 1607, + "thread": 31108, + "thread": 8815, + "threads": 24957, + "threat": 7527, + "threat": 7212, + "threaten": 26097, + "threatened": 16391, + "threatening": 16400, + "threatens": 20555, + "threats": 12766, + "three": 21615, + "three": 2097, + "thren": 41776, + "thresh": 29779, + "threshold": 33791, + "threw": 12746, + "thri": 8713, + "thrift": 27779, + "thrill": 21023, + "thrilled": 7879, + "thriller": 9653, + "thrilling": 20101, + "thrills": 39829, + "thrive": 17669, + "thriving": 22677, + "thro": 2101, + "thro": 28624, + "throat": 16371, + "thrombo": 47585, + "throne": 15999, + "thrones": 8072, + "throp": 34939, + "throttle": 37139, + "through": 6091, + "through": 1417, + "throughout": 6721, + "throughs": 48278, + "throw": 3315, + "throw": 6293, + "throwback": 6001, + "throwback": 5058, + "throwbackthursday": 6326, + "thrower": 40199, + "throwing": 9734, + "thrown": 15079, + "throws": 14723, + "thru": 23856, + "thru": 6162, + "thrush": 46133, + "thrust": 40202, + "ths": 2079, + "tht": 23554, + "thu": 3837, + "thu": 14153, + "thub": 25660, + "thug": 37212, + "thug": 18137, + "thugs": 27686, + "thul": 28368, + "thulhu": 37560, + "thum": 14679, + "thumb": 19514, + "thumb": 18674, + "thumbnail": 32365, + "thumbs": 17599, + "thun": 32267, + "thunder": 6161, + "thunder": 8951, + "thunderbird": 45131, + "thunderbirds": 44286, + "thunderbolt": 43596, + "thunderstorm": 12005, + "thunderstorms": 19525, + "thunt": 46763, + "thur": 1837, + "thur": 21704, + "thurman": 41291, + "thurs": 9908, + "thursday": 11218, + "thursday": 2221, + "thursdaymotivation": 39375, + "thursdays": 21444, + "thursdaythoughts": 14866, + "thurst": 33970, + "thus": 12457, + "thusi": 9488, + "thwaite": 48469, + "thweeksary": 30871, + "thx": 5913, + "thy": 7804, + "thy": 3362, + "thyme": 29805, + "thyro": 25174, + "thyroid": 32558, + "ti": 555, + "ti": 2605, + "tia": 6709, + "tial": 2826, + "tially": 14503, + "tian": 23011, + "tian": 8125, + "tians": 35182, + "tiara": 38322, + "tib": 47868, + "tibet": 19927, + "tibet": 22234, + "tibetan": 24057, + "tible": 11453, + "tic": 890, + "tic": 1550, + "tica": 9669, + "tical": 34191, + "tical": 4342, + "tically": 13375, + "ticals": 30861, + "tice": 3122, + "tich": 48769, + "tician": 43358, + "ticism": 26491, + "tick": 24640, + "tick": 15617, + "ticket": 25740, + "ticket": 4500, + "ticketing": 44432, + "tickets": 2015, + "ticking": 35842, + "tickle": 42999, + "ticks": 40269, + "tico": 17670, + "ticon": 45996, + "tics": 2419, + "ticul": 15538, + "ticus": 44277, + "tid": 26002, + "tid": 23727, + "tidal": 21949, + "tide": 15698, + "tide": 9105, + "tides": 25524, + "tidy": 23858, + "tie": 14072, + "tie": 3422, + "tied": 9889, + "tiem": 34762, + "tien": 47538, + "tiene": 43438, + "tier": 14390, + "tier": 6598, + "tierney": 45693, + "tiers": 24604, + "ties": 25556, + "ties": 2499, + "tiest": 18300, + "tiesto": 46367, + "tif": 23216, + "tiff": 11112, + "tiff": 20699, + "tiffany": 30467, + "tiffany": 14446, + "tification": 43923, + "tified": 40854, + "tiful": 29123, + "tify": 6677, + "tig": 31999, + "tiger": 11954, + "tiger": 6531, + "tigers": 6934, + "tigh": 31365, + "tight": 25763, + "tight": 9123, + "tighten": 46653, + "tighter": 48193, + "tightly": 37568, + "tights": 29581, + "tijuana": 45273, + "tik": 24986, + "tik": 32403, + "tiki": 30107, + "til": 6124, + "til": 1763, + "tile": 26217, + "tile": 8227, + "tiles": 10607, + "tility": 38180, + "till": 17462, + "till": 4267, + "tilla": 26063, + "tillerson": 47738, + "tilly": 41199, + "tilt": 23601, + "tim": 1292, + "tim": 3863, + "timate": 4754, + "timb": 26627, + "timber": 14441, + "timber": 16246, + "timberlake": 28274, + "timbers": 39911, + "timberwolves": 41190, + "time": 3764, + "time": 788, + "timed": 32727, + "timehop": 19944, + "timel": 23549, + "timelapse": 48154, + "timeless": 15558, + "timeline": 11492, + "timely": 19250, + "timeout": 41536, + "timer": 19725, + "timers": 44574, + "times": 26445, + "times": 1661, + "timesnow": 45487, + "timesof": 32522, + "timesofindia": 44182, + "timetable": 31971, + "timeto": 29187, + "timing": 13624, + "timm": 22444, + "timmy": 33252, + "timo": 13390, + "timo": 33777, + "timothy": 42087, + "timothy": 18560, + "timp": 42166, + "tin": 1310, + "tin": 5420, + "tina": 9257, + "tinder": 24287, + "tine": 22341, + "ting": 7451, + "ting": 694, + "tinged": 44829, + "tings": 35332, + "tini": 26839, + "tink": 39278, + "tinker": 45272, + "tinker": 40910, + "tino": 20538, + "tins": 37359, + "tint": 40497, + "tinted": 42618, + "tiny": 21716, + "tiny": 5591, + "tio": 27562, + "tion": 2274, + "tion": 740, + "tional": 22460, + "tional": 2986, + "tionality": 24514, + "tionally": 12409, + "tionary": 8381, + "tione": 44318, + "tioned": 9083, + "tioning": 15528, + "tionist": 25732, + "tions": 1371, + "tious": 14255, + "tip": 15383, + "tip": 4623, + "tipoff": 44521, + "tipp": 32294, + "tipped": 31878, + "tipper": 38095, + "tipperary": 45612, + "tipping": 27827, + "tips": 3173, + "tipton": 48809, + "tiptuesday": 42112, + "tique": 37772, + "tir": 25467, + "tir": 38462, + "tire": 29128, + "tire": 9362, + "tired": 6533, + "tireless": 39835, + "tirelessly": 41548, + "tires": 15533, + "tiring": 42630, + "tiru": 36033, + "tis": 7839, + "tis": 7394, + "tise": 13745, + "tisgarh": 40538, + "tish": 45148, + "tish": 28784, + "tism": 27113, + "tiss": 28155, + "tissue": 15368, + "tissues": 32172, + "tist": 7902, + "tista": 25580, + "tists": 25944, + "tit": 1991, + "tit": 13202, + "tita": 40936, + "titan": 13496, + "titan": 15516, + "titanic": 20729, + "titanium": 24409, + "titans": 13066, + "titi": 17434, + "titi": 48504, + "title": 28033, + "title": 3644, + "titled": 9939, + "titles": 9780, + "tito": 26838, + "titus": 36102, + "tium": 21975, + "tiv": 1835, + "tiva": 41886, + "tive": 14640, + "tive": 1420, + "tively": 9883, + "tiveness": 20955, + "tives": 7570, + "tivity": 9859, + "tivo": 32162, + "tix": 5835, + "tiz": 19376, + "tj": 18890, + "tj": 18988, + "tk": 22344, + "tk": 20676, + "tko": 37347, + "tks": 38739, + "tl": 14325, + "tl": 8190, + "tland": 30697, + "tlap": 41976, + "tlc": 22047, + "tle": 39141, + "tle": 5825, + "tles": 39363, + "tless": 17427, + "tlot": 41080, + "tls": 47367, + "tly": 37483, + "tly": 1646, + "tm": 9430, + "tm": 7789, + "tman": 20796, + "tmc": 35263, + "tment": 26485, + "tml": 39445, + "tmltalk": 42260, + "tmnt": 32444, + "tmobile": 34901, + "tmr": 35906, + "tmrw": 16496, + "tms": 44496, + "tmund": 23801, + "tmw": 45827, + "tmz": 37248, + "tn": 3827, + "tn": 7248, + "tna": 21150, + "tnam": 8079, + "tner": 34922, + "tness": 35212, + "tney": 9523, + "tng": 35898, + "tnt": 20659, + "tnx": 38220, + "to": 580, + "to": 531, + "toa": 17916, + "toad": 26096, + "toast": 24654, + "toast": 10920, + "toasted": 23533, + "toaster": 39061, + "toasty": 44726, + "tob": 24260, + "tobac": 12611, + "tobacco": 13905, + "tobago": 39482, + "tobe": 17534, + "tobe": 28740, + "tober": 18162, + "tober": 2925, + "toberfest": 26249, + "tobi": 40335, + "tobi": 48374, + "tobias": 32464, + "tobin": 42466, + "toby": 29659, + "toby": 18333, + "toc": 41907, + "toc": 30643, + "tock": 25274, + "tod": 38239, + "tod": 33568, + "toda": 47141, + "todas": 36150, + "today": 11800, + "today": 721, + "todayin": 32957, + "todays": 13513, + "todayshow": 29739, + "todd": 10398, + "todd": 9951, + "toddler": 17772, + "toddlers": 36719, + "toddy": 38926, + "todo": 48857, + "todo": 23087, + "todos": 33355, + "toe": 47756, + "toe": 11344, + "toes": 16511, + "tof": 6659, + "toff": 27319, + "toffee": 34880, + "tofficial": 47953, + "tofthe": 23678, + "toftheday": 20566, + "tofu": 24692, + "tog": 45715, + "toge": 1903, + "together": 17858, + "together": 1952, + "togo": 26729, + "tography": 33968, + "toh": 26851, + "toi": 7472, + "toi": 26941, + "toid": 49124, + "toile": 43148, + "toilet": 11071, + "toilets": 24027, + "toire": 39534, + "tok": 16690, + "tok": 27010, + "token": 32634, + "token": 17134, + "tokens": 23562, + "tokyo": 35038, + "tokyo": 6667, + "tol": 4678, + "tol": 32962, + "told": 3527, + "tole": 15677, + "toledo": 19812, + "toler": 12150, + "tolerance": 20377, + "tolerant": 38536, + "tolerate": 35556, + "tolkien": 32989, + "toll": 44090, + "toll": 14155, + "tollywood": 42016, + "tology": 34799, + "tom": 999, + "tom": 2435, + "toma": 42360, + "toma": 44710, + "tomas": 35944, + "tomas": 27178, + "tomat": 12041, + "tomato": 9867, + "tomatoes": 13004, + "tomb": 37187, + "tomb": 15582, + "tombs": 48613, + "tombstone": 45729, + "tome": 24137, + "tome": 24283, + "tomi": 46290, + "tomlin": 46649, + "tomlinson": 17484, + "tommorow": 42871, + "tommy": 16573, + "tommy": 8876, + "tomo": 31223, + "tomo": 34434, + "tomor": 1277, + "tomorrow": 19728, + "tomorrow": 1293, + "tomorrowland": 34951, + "tomorrows": 32258, + "tomorrowspaper": 35005, + "tomorrowspaperstoday": 35190, + "tomp": 43544, + "tompkins": 49068, + "toms": 10545, + "tomy": 18730, + "ton": 838, + "ton": 917, + "tona": 13459, + "tone": 32366, + "tone": 8408, + "toned": 29426, + "toner": 40614, + "tones": 14744, + "tong": 21510, + "tonga": 37882, + "tongue": 44820, + "tongue": 13626, + "tongues": 39837, + "toni": 17766, + "toni": 17171, + "tonic": 17808, + "tonics": 34647, + "tonight": 1009, + "tonights": 23312, + "tonite": 13449, + "tonka": 42781, + "tonline": 45867, + "tonne": 42450, + "tonnes": 24813, + "tons": 7555, + "tony": 9150, + "tony": 4767, + "tonyawards": 46068, + "too": 1843, + "too": 1256, + "took": 2280, + "tool": 13718, + "tool": 5999, + "toolbox": 46599, + "toolkit": 29849, + "tools": 5771, + "toom": 27550, + "toon": 24664, + "toon": 19701, + "toonami": 48336, + "toons": 35345, + "toor": 42590, + "tooth": 15316, + "tooth": 12030, + "toothbrush": 36841, + "toothpaste": 37322, + "tooting": 42969, + "top": 5534, + "top": 1253, + "topaz": 46125, + "tope": 32149, + "tope": 42239, + "topeka": 46884, + "topia": 29618, + "topic": 8720, + "topical": 37464, + "topics": 11916, + "topless": 37415, + "topo": 23008, + "topoli": 30152, + "topp": 19529, + "topped": 12588, + "topper": 31780, + "toppers": 41651, + "topping": 21071, + "toppings": 47554, + "topps": 20201, + "tops": 8154, + "topshop": 40953, + "topus": 21495, + "tor": 937, + "tor": 1208, + "tora": 45147, + "torah": 37945, + "toral": 45282, + "torch": 31921, + "torch": 15820, + "tore": 38066, + "tore": 19385, + "tored": 38046, + "torg": 33214, + "tori": 17689, + "tori": 17539, + "toria": 23732, + "torial": 28029, + "torian": 48399, + "tories": 14193, + "torino": 29178, + "torio": 34235, + "torn": 8572, + "torn": 18023, + "tornad": 24676, + "tornado": 9062, + "tornadoes": 28254, + "toro": 17892, + "toron": 37407, + "toronto": 16866, + "toronto": 4514, + "torpe": 34093, + "torpedo": 46582, + "torquay": 45738, + "torque": 31940, + "torre": 39563, + "torre": 38009, + "torrent": 42317, + "torrential": 41158, + "torres": 16049, + "tors": 2546, + "tortilla": 32683, + "torto": 24170, + "tortoise": 30178, + "torture": 16013, + "tortured": 29900, + "tory": 29390, + "tory": 4214, + "tos": 6094, + "tosc": 37719, + "tose": 38154, + "tosh": 17109, + "toshi": 31744, + "toss": 19656, + "tossed": 31296, + "tot": 4618, + "tot": 23659, + "total": 13507, + "total": 4445, + "totally": 5440, + "totals": 25772, + "tote": 48145, + "tote": 19031, + "totem": 45376, + "totes": 37199, + "tothe": 12222, + "toto": 39823, + "tots": 24978, + "totten": 14360, + "tottenham": 14889, + "tou": 1879, + "tou": 29261, + "touch": 9480, + "touch": 4526, + "touchdown": 18664, + "touchdowns": 37905, + "touched": 13190, + "touches": 14832, + "touching": 14088, + "touchscreen": 39095, + "tough": 12063, + "tough": 5499, + "tougher": 33722, + "toughest": 23773, + "toughness": 45522, + "toulou": 27145, + "toulouse": 30267, + "tour": 2710, + "tour": 1760, + "tourde": 39247, + "toured": 27654, + "touri": 4224, + "touring": 11853, + "tourism": 23661, + "tourism": 6556, + "tourist": 12123, + "tourists": 15546, + "tournament": 4097, + "tournaments": 23058, + "tourney": 12603, + "tours": 8948, + "tous": 37424, + "tout": 22300, + "touts": 41274, + "tov": 28970, + "tow": 11557, + "tow": 18653, + "toward": 8508, + "towards": 4447, + "towed": 45419, + "towel": 15953, + "towels": 26578, + "tower": 26669, + "tower": 4730, + "towering": 39444, + "towers": 12701, + "towie": 44613, + "towin": 45819, + "towing": 36963, + "town": 4068, + "town": 1605, + "townfc": 33981, + "townhall": 33408, + "townhouse": 40178, + "towns": 14173, + "townsend": 26826, + "township": 14622, + "townsville": 47330, + "towork": 48233, + "tox": 7742, + "tox": 16145, + "toxic": 27436, + "toxic": 12348, + "toxicity": 41234, + "toxin": 48899, + "toxins": 36618, + "toy": 14387, + "toy": 5988, + "toya": 37602, + "toyo": 7644, + "toyota": 8908, + "toys": 39508, + "toys": 7162, + "tp": 23760, + "tp": 15188, + "tpp": 29411, + "tps": 35246, + "tq": 43066, + "tr": 635, + "tr": 6337, + "tra": 752, + "tra": 2483, + "trac": 2266, + "trace": 48611, + "trace": 14767, + "traced": 47956, + "traces": 30913, + "tracey": 25558, + "tracing": 27897, + "track": 10887, + "track": 2700, + "tracked": 27049, + "tracker": 18123, + "tracking": 10428, + "tracklist": 39777, + "tracks": 7579, + "tract": 4690, + "traction": 10644, + "tractor": 14607, + "tractors": 37854, + "tracy": 32984, + "tracy": 15508, + "trad": 48716, + "trad": 38037, + "trade": 10457, + "trade": 3629, + "traded": 18860, + "trademark": 25011, + "trader": 17700, + "traders": 19112, + "trades": 18519, + "trading": 40083, + "trading": 6520, + "tradio": 20689, + "tradition": 20838, + "tradition": 8784, + "traditional": 41113, + "traditional": 5604, + "traditionally": 35532, + "traditions": 18016, + "traf": 3227, + "trafal": 32461, + "trafalgar": 36969, + "traff": 31571, + "traffic": 12080, + "traffic": 3399, + "trafficking": 15983, + "trafford": 22912, + "trage": 12430, + "tragedy": 14082, + "tragic": 14828, + "tragically": 39599, + "trail": 11523, + "trail": 4921, + "trailblazer": 41015, + "trailblazers": 35954, + "trailer": 4700, + "trailers": 24862, + "trailing": 37427, + "trails": 10633, + "train": 9122, + "train": 3231, + "trained": 10874, + "trainee": 25795, + "trainees": 30382, + "trainer": 9767, + "trainers": 18871, + "training": 34508, + "training": 2199, + "trains": 9541, + "trait": 35160, + "traitor": 31760, + "traitors": 42633, + "traits": 25748, + "trajec": 42042, + "trak": 24065, + "tral": 14609, + "tram": 9800, + "tram": 17500, + "tramp": 46289, + "trampol": 32905, + "trampoline": 42800, + "tramrahim": 35220, + "tran": 1357, + "tran": 22031, + "trance": 30584, + "trance": 18671, + "trancefamily": 39630, + "trane": 35779, + "tranqu": 18912, + "tranquil": 35764, + "tranquility": 36688, + "trans": 1826, + "trans": 8126, + "transaction": 24881, + "transactions": 21653, + "transat": 37872, + "transatlantic": 40703, + "transc": 21073, + "transcend": 47087, + "transcript": 39008, + "transcription": 48765, + "transfer": 22659, + "transfer": 7134, + "transferred": 29700, + "transferring": 40924, + "transfers": 21621, + "transform": 8142, + "transform": 12288, + "transformation": 34204, + "transformation": 7832, + "transformational": 47135, + "transformationtuesday": 36511, + "transformative": 38106, + "transformed": 17453, + "transformer": 38235, + "transformers": 17843, + "transforming": 44470, + "transforming": 19251, + "transforms": 30312, + "transgender": 17732, + "transi": 32236, + "transit": 10174, + "transiti": 22939, + "transition": 11391, + "transitional": 41519, + "transitioning": 43586, + "transitions": 39374, + "transl": 12243, + "translate": 22655, + "translated": 20752, + "translates": 36334, + "translating": 42156, + "translation": 12153, + "translations": 41367, + "translator": 36230, + "translucent": 49052, + "transm": 18861, + "transmission": 16103, + "transmitted": 48605, + "transmitter": 40457, + "transp": 11726, + "transpa": 18524, + "transparen": 16108, + "transparency": 16828, + "transparent": 19017, + "transpl": 16038, + "transplant": 41871, + "transplant": 18771, + "transplantation": 45207, + "transpor": 19406, + "transport": 10231, + "transport": 7362, + "transportation": 10911, + "transported": 29089, + "transporter": 43568, + "transporting": 42259, + "trap": 36224, + "trap": 9677, + "trape": 42435, + "trapped": 15592, + "traps": 28517, + "tras": 30638, + "trash": 39215, + "trash": 9798, + "traum": 22263, + "trauma": 13846, + "traumati": 46613, + "traumatic": 29958, + "trav": 7586, + "trav": 46955, + "trave": 35357, + "travel": 2824, + "travel": 1949, + "travelblog": 35957, + "travelblogger": 25494, + "travelchat": 46455, + "traveled": 20384, + "traveler": 17794, + "travelers": 20644, + "travelgram": 40069, + "traveling": 9365, + "travelled": 23428, + "traveller": 22546, + "travellers": 29583, + "travelling": 11190, + "travelphotography": 22808, + "travelpics": 32293, + "travels": 11472, + "traveltips": 36260, + "traveltuesday": 16713, + "traverse": 35058, + "travi": 46971, + "travis": 27441, + "travis": 12287, + "traw": 42288, + "trax": 34421, + "tray": 38470, + "tray": 14621, + "trays": 39798, + "trc": 41803, + "tre": 975, + "tre": 6033, + "treach": 46005, + "tread": 26182, + "tread": 35658, + "treadmill": 37780, + "treas": 8591, + "treason": 28103, + "treasure": 9922, + "treasured": 48068, + "treasurer": 26985, + "treasures": 16500, + "treasury": 20956, + "treat": 3968, + "treat": 3901, + "treated": 9772, + "treating": 13842, + "treatment": 4869, + "treatments": 15839, + "treats": 8878, + "treaty": 19967, + "treble": 33194, + "trecht": 33812, + "tree": 13354, + "tree": 2677, + "treehouse": 42387, + "trees": 4682, + "trek": 13236, + "trek": 8136, + "trekking": 25293, + "trell": 35159, + "tremb": 44043, + "tremend": 14659, + "tremendous": 15988, + "tren": 2579, + "trench": 23846, + "trenches": 38723, + "trend": 19986, + "trend": 6643, + "trending": 6087, + "trends": 7015, + "trendsetter": 46666, + "trendy": 23072, + "trent": 45885, + "trent": 15548, + "trenton": 37470, + "tres": 23569, + "tress": 4733, + "tresses": 24273, + "trevor": 23437, + "trevor": 13219, + "trex": 42114, + "trey": 36670, + "trey": 16939, + "tri": 924, + "tri": 9618, + "triad": 45602, + "trial": 5991, + "trials": 10992, + "triangle": 14615, + "triathlon": 18080, + "trib": 45151, + "tribal": 16629, + "tribe": 19943, + "tribe": 11365, + "tribeca": 35184, + "tribes": 26546, + "tribu": 3028, + "tribun": 14311, + "tribunal": 32911, + "tribune": 18556, + "tribute": 5493, + "tributes": 15537, + "tric": 9511, + "tric": 4081, + "trich": 39519, + "trick": 17177, + "trick": 8172, + "tricks": 13177, + "tricky": 22319, + "trics": 31437, + "trident": 35491, + "tridge": 18722, + "tried": 4554, + "tries": 4315, + "trife": 48962, + "trigge": 30509, + "trigger": 16158, + "triggered": 30924, + "triggers": 37319, + "tright": 29915, + "tril": 40626, + "trill": 39297, + "trilli": 39350, + "trillion": 20160, + "trilo": 15183, + "trilogy": 16862, + "trim": 14182, + "trimmed": 40657, + "trin": 6628, + "trinidad": 26244, + "trinity": 30744, + "trinity": 12267, + "trio": 10263, + "trip": 23421, + "trip": 2529, + "tripad": 37189, + "tripadvisor": 38708, + "triple": 16519, + "triple": 7673, + "triplets": 48601, + "tripod": 36141, + "tripoli": 40095, + "trippin": 43073, + "tripping": 35229, + "trippy": 35137, + "trips": 12292, + "tris": 29690, + "trish": 40511, + "trish": 37179, + "trisha": 39152, + "tristan": 25497, + "trit": 37087, + "triton": 45437, + "triu": 14782, + "trium": 21065, + "triumph": 26507, + "triumph": 15307, + "triumphant": 41918, + "trivi": 21228, + "trivia": 10642, + "triviatuesday": 45499, + "trix": 41017, + "tro": 1046, + "tro": 3332, + "trock": 44368, + "trojan": 30653, + "trojans": 25310, + "trol": 10306, + "troll": 39737, + "troll": 17103, + "trolley": 25124, + "trolling": 28552, + "trolls": 20890, + "tromb": 32390, + "trombone": 44423, + "tron": 19057, + "tron": 10684, + "tronic": 34258, + "tronics": 34397, + "troom": 23691, + "troop": 12492, + "troop": 24054, + "trooper": 18327, + "troopers": 23576, + "troops": 10109, + "trop": 31585, + "trope": 41150, + "trophies": 20998, + "trophy": 42676, + "trophy": 6502, + "tropic": 21794, + "tropic": 36736, + "tropical": 41699, + "tropical": 8686, + "tropics": 36940, + "tros": 40456, + "trose": 36022, + "trot": 30453, + "trotter": 38287, + "trou": 5181, + "troubad": 49037, + "trouble": 25669, + "trouble": 7848, + "troubled": 25568, + "troubles": 27254, + "trough": 39761, + "troupe": 34803, + "trous": 19727, + "trousers": 23172, + "trout": 14853, + "trove": 45350, + "trow": 46914, + "troy": 26283, + "troy": 12819, + "trs": 24770, + "tru": 931, + "tru": 25326, + "truck": 14781, + "truck": 4629, + "trucker": 45918, + "truckers": 43404, + "trucking": 26208, + "trucks": 9569, + "trude": 39017, + "trudeau": 15752, + "true": 13096, + "true": 2328, + "truec": 37583, + "truelove": 45711, + "truffle": 23064, + "truffles": 37057, + "truly": 4545, + "trum": 11766, + "trum": 11399, + "truman": 29414, + "trump": 9124, + "trump": 1797, + "trumpet": 23681, + "trumpp": 45550, + "trumprussia": 39135, + "trumps": 29793, + "trumptrain": 43595, + "trun": 16163, + "trun": 46661, + "trunk": 18347, + "trunks": 38531, + "truro": 43507, + "truss": 46080, + "trust": 17691, + "trust": 3876, + "truste": 17356, + "trusted": 16538, + "trustee": 30803, + "trustees": 28853, + "trusting": 33221, + "trusts": 27507, + "trustworthy": 46840, + "trusty": 37955, + "truth": 21335, + "truth": 4319, + "truths": 27179, + "trx": 31620, + "try": 4487, + "try": 1209, + "tryin": 31085, + "trying": 2551, + "tryna": 15702, + "tryout": 43832, + "tryouts": 28053, + "ts": 2290, + "ts": 590, + "tsa": 25977, + "tsal": 20438, + "tsb": 45015, + "tsc": 37437, + "tsch": 38778, + "tsd": 20611, + "tse": 49144, + "tsfor": 42654, + "tsford": 32823, + "tsh": 42872, + "tshirt": 14907, + "tshirts": 29377, + "tsi": 40048, + "tsi": 37867, + "tsk": 43600, + "tsla": 35681, + "tsm": 43452, + "tsman": 20046, + "tsn": 44921, + "tsn": 26896, + "tson": 42353, + "tson": 47140, + "tsp": 34230, + "tsu": 13950, + "tsu": 20175, + "tsun": 19155, + "tsunami": 24286, + "tsville": 29080, + "tt": 971, + "tt": 1402, + "tta": 2646, + "ttc": 27668, + "tte": 23105, + "tte": 3070, + "tted": 15163, + "tten": 11351, + "tten": 17479, + "tter": 18691, + "tter": 5165, + "tters": 6318, + "ttes": 9293, + "tti": 5237, + "ttin": 36589, + "tting": 1188, + "ttino": 47389, + "ttip": 46993, + "ttle": 9253, + "ttm": 46838, + "tto": 8759, + "tto": 8105, + "tton": 10562, + "ttot": 12480, + "ttp": 30828, + "ttr": 47589, + "tts": 11570, + "ttt": 17256, + "tttt": 33119, + "ttu": 44006, + "ttv": 24281, + "tty": 11457, + "tty": 1856, + "tu": 764, + "tu": 5760, + "tua": 41344, + "tual": 4799, + "tuan": 37297, + "tub": 34907, + "tub": 15450, + "tube": 38229, + "tube": 3308, + "tuber": 30371, + "tuberculo": 42606, + "tuberculosis": 43129, + "tubes": 22870, + "tubing": 40794, + "tubs": 41705, + "tubular": 48786, + "tuc": 14456, + "tuc": 43871, + "tuck": 22398, + "tucked": 26923, + "tucker": 39703, + "tucker": 15726, + "tucket": 32677, + "tucson": 17250, + "tudor": 24547, + "tue": 17515, + "tues": 2283, + "tues": 12113, + "tuesday": 10209, + "tuesday": 2519, + "tuesdaymotivation": 25432, + "tuesdays": 23195, + "tuesdaythoughts": 17988, + "tuf": 44510, + "tuff": 38868, + "tug": 47032, + "tug": 27902, + "tuition": 21129, + "tuk": 39271, + "tuk": 14993, + "tul": 9069, + "tul": 40837, + "tula": 36332, + "tulane": 44893, + "tulip": 28389, + "tulips": 30886, + "tulsa": 18850, + "tum": 12932, + "tum": 8843, + "tumb": 8831, + "tumble": 38284, + "tumbler": 48790, + "tumbling": 46226, + "tumblr": 11841, + "tummy": 26053, + "tumor": 22616, + "tumors": 39894, + "tumour": 45129, + "tun": 1415, + "tun": 21349, + "tuna": 15037, + "tundra": 39899, + "tune": 11427, + "tune": 3300, + "tuned": 5898, + "tunein": 16809, + "tuner": 42905, + "tunes": 31688, + "tunes": 10810, + "tunesapp": 32550, + "tung": 47940, + "tung": 31092, + "tuni": 16270, + "tunic": 43495, + "tuning": 19585, + "tunisia": 23346, + "tunnel": 11096, + "tunnels": 29814, + "tuous": 28738, + "tup": 37956, + "tup": 4507, + "tupac": 31506, + "tups": 44855, + "tur": 985, + "tur": 17182, + "tura": 16127, + "tural": 45143, + "tural": 4261, + "turb": 18973, + "turban": 48515, + "turbine": 26880, + "turbines": 38863, + "turbo": 23578, + "turbo": 13668, + "turbul": 31100, + "turbulent": 47871, + "ture": 4321, + "ture": 941, + "tured": 3987, + "turer": 11993, + "turers": 16956, + "tures": 2400, + "turf": 36762, + "turf": 12510, + "turi": 11896, + "turin": 36251, + "turing": 5812, + "turismo": 30202, + "turk": 8254, + "turk": 32507, + "turkey": 35977, + "turkey": 4790, + "turkeys": 37991, + "turkish": 48199, + "turkish": 9278, + "turks": 34344, + "turmeric": 34044, + "turmoil": 37751, + "turn": 5522, + "turn": 2105, + "turnaround": 32719, + "turnbull": 27863, + "turned": 3771, + "turner": 42867, + "turner": 8777, + "turning": 4976, + "turno": 21377, + "turnout": 11654, + "turnover": 30794, + "turnpike": 38301, + "turns": 3185, + "turnt": 28887, + "turntable": 37953, + "turnup": 30591, + "turo": 29224, + "turquo": 19390, + "turquoise": 19899, + "turt": 13716, + "turtle": 35943, + "turtle": 10912, + "turtles": 17862, + "tus": 24828, + "tus": 7079, + "tusc": 17909, + "tuscal": 42638, + "tuscaloosa": 44375, + "tuscan": 42865, + "tuscany": 20885, + "tuss": 31741, + "tut": 35121, + "tutor": 10054, + "tutor": 27858, + "tutorial": 12857, + "tutorials": 30973, + "tutoring": 37532, + "tutti": 46880, + "tutu": 35845, + "tux": 28720, + "tux": 49186, + "tuxedo": 40173, + "tv": 3197, + "tv": 1583, + "tvc": 49190, + "tvd": 25889, + "tvmiaw": 38554, + "tvn": 44232, + "tvs": 27114, + "tvtime": 19947, + "tvxq": 43968, + "tw": 966, + "tw": 12842, + "twa": 46954, + "twain": 30689, + "twal": 48126, + "tware": 5707, + "twc": 41217, + "twd": 29440, + "twd": 19343, + "twdfamily": 38218, + "twe": 18365, + "tweak": 48870, + "tweaks": 42661, + "twee": 1330, + "tweed": 26904, + "tweeps": 14928, + "tweet": 11826, + "tweet": 1842, + "tweeta": 32024, + "tweetapicture": 40596, + "tweeted": 7841, + "tweeter": 32876, + "tweeters": 31713, + "tweeting": 8901, + "tweets": 3560, + "tweetyour": 45033, + "twel": 14476, + "twelf": 39443, + "twelfth": 44072, + "twell": 38722, + "twell": 30162, + "twelve": 19694, + "twent": 27027, + "twenti": 35167, + "twenty": 13016, + "twentyon": 39609, + "twentyonepilots": 40007, + "twer": 13923, + "twerk": 28506, + "twi": 5537, + "twice": 6970, + "twick": 34326, + "twickenham": 39619, + "twil": 12804, + "twili": 35754, + "twilight": 46366, + "twilight": 14512, + "twill": 43703, + "twin": 9342, + "twin": 6769, + "twine": 42775, + "twinkle": 36545, + "twinning": 30156, + "twinpeaks": 32042, + "twins": 8040, + "twist": 10589, + "twisted": 18233, + "twister": 45933, + "twists": 34149, + "twit": 1643, + "twit": 18704, + "twitart": 27709, + "twitch": 13251, + "twitch": 9153, + "twitter": 7546, + "twitter": 1989, + "twitterkurds": 32722, + "twitterstorians": 35389, + "two": 17211, + "two": 1237, + "twol": 31964, + "twood": 40404, + "twood": 13245, + "twp": 33283, + "twright": 46778, + "twt": 6825, + "twx": 26830, + "twy": 45861, + "tx": 6636, + "tx": 5200, + "txhsfb": 34757, + "txlege": 26995, + "txst": 40761, + "txt": 24595, + "txwx": 22995, + "ty": 1260, + "ty": 744, + "tya": 41273, + "tycoon": 36803, + "tye": 43097, + "tyfree": 41215, + "tyga": 41952, + "tying": 22559, + "tyl": 47537, + "tyler": 14787, + "tyler": 7058, + "tym": 45772, + "tyne": 27000, + "tyne": 29729, + "tyour": 16823, + "type": 15673, + "type": 3877, + "typed": 40753, + "typeface": 44969, + "types": 7543, + "typewriter": 42180, + "typho": 17486, + "typhoon": 21110, + "typic": 21648, + "typical": 9854, + "typically": 23175, + "typing": 20102, + "typo": 18831, + "typo": 29076, + "typography": 24332, + "tyr": 15590, + "tyran": 46921, + "tyranny": 35402, + "tyre": 38330, + "tyre": 16864, + "tyres": 21376, + "tyrone": 30226, + "tyson": 16616, + "tz": 7710, + "tz": 4983, + "tzer": 45267, + "tzky": 47127, + "tzman": 46032, + "tzu": 34354, + "té": 27208, + "té": 39694, + "u": 84, + "u": 340, + "ua": 34075, + "ua": 8441, + "uaap": 46753, + "uaap": 43774, + "uab": 35587, + "uae": 9752, + "ual": 1921, + "ually": 10767, + "uan": 33062, + "uas": 38339, + "uav": 30303, + "ub": 18430, + "ub": 13494, + "uba": 29768, + "ubc": 42479, + "ubc": 29455, + "ube": 30892, + "uber": 25896, + "uber": 10668, + "ubi": 26758, + "ubio": 32867, + "ubiquit": 48129, + "ubis": 28248, + "ubisoft": 32051, + "ubs": 43851, + "ubun": 28184, + "ubuntu": 30791, + "uc": 4903, + "uc": 12438, + "uca": 30942, + "ucc": 44844, + "ucc": 29138, + "ucci": 30746, + "uccino": 30409, + "ucd": 44746, + "ucd": 43514, + "ucf": 24414, + "uch": 19465, + "uch": 22394, + "uchi": 37473, + "uci": 46354, + "uci": 28925, + "uck": 34189, + "ucl": 12013, + "ucl": 13647, + "ucla": 37667, + "ucla": 17259, + "ucn": 49036, + "uconn": 30549, + "ud": 6560, + "ud": 5765, + "uda": 22800, + "udaipur": 49385, + "uddin": 43035, + "ude": 37016, + "ude": 35194, + "ue": 16696, + "ue": 1190, + "uefa": 19189, + "uel": 24231, + "uer": 45951, + "ues": 2526, + "uf": 17777, + "uf": 19230, + "ufc": 20396, + "ufc": 6490, + "uff": 45701, + "ufo": 19443, + "ufos": 48234, + "ug": 3754, + "ug": 16061, + "uga": 16056, + "ugand": 25965, + "uganda": 11125, + "ugandan": 44206, + "ugby": 30658, + "ugh": 39736, + "ugh": 12755, + "ugliest": 43543, + "ugly": 36070, + "ugly": 8159, + "ugu": 18144, + "uh": 17661, + "uh": 9219, + "uhc": 44974, + "uhh": 35938, + "uhhh": 45270, + "uhm": 35614, + "uhur": 29434, + "uhuru": 35690, + "ui": 17326, + "ui": 11458, + "uil": 29395, + "uit": 30696, + "uit": 47584, + "uj": 33266, + "uji": 39672, + "uk": 2294, + "uk": 1432, + "uka": 23294, + "uke": 48836, + "uke": 28577, + "uked": 48987, + "uki": 37435, + "uki": 9009, + "ukin": 34996, + "ukip": 20360, + "uklabour": 36902, + "ukmfg": 38764, + "uko": 33562, + "ukone": 24682, + "ukrain": 15468, + "ukraine": 7768, + "ukrainian": 16927, + "ukrunchat": 34481, + "uku": 29541, + "uku": 36082, + "ukulele": 39094, + "ul": 914, + "ul": 6625, + "ula": 34104, + "ula": 9506, + "ular": 4927, + "ulary": 21701, + "ulate": 20467, + "ulation": 32896, + "ule": 35616, + "ules": 26274, + "ulf": 49331, + "uli": 41841, + "uli": 22174, + "ull": 33254, + "ulla": 30577, + "ullah": 45310, + "ullivan": 45252, + "ulls": 37418, + "ulo": 46084, + "ulo": 36738, + "ulous": 42490, + "ulous": 4281, + "ulously": 20167, + "ulster": 29709, + "ulster": 24639, + "ult": 4380, + "ulti": 11925, + "ulties": 21884, + "ultimat": 16522, + "ultimate": 34684, + "ultimate": 5377, + "ultimatefan": 48372, + "ultimatefanlive": 48644, + "ultimately": 23023, + "ultr": 25636, + "ultra": 11398, + "ultra": 8118, + "ultram": 44519, + "ultrasound": 29717, + "ulture": 22272, + "ulty": 8036, + "ulu": 41815, + "ulu": 15659, + "ulum": 17235, + "uly": 33220, + "ulysses": 46114, + "um": 1622, + "um": 1008, + "uma": 29982, + "uma": 9256, + "uman": 27112, + "umar": 25656, + "umass": 39390, + "umatic": 45006, + "umb": 7493, + "umber": 19195, + "umbrel": 34773, + "umbrella": 17143, + "umbrellas": 42782, + "umbria": 39287, + "umc": 39491, + "umd": 42067, + "ume": 38480, + "umen": 42832, + "uments": 25924, + "umer": 23539, + "umes": 21403, + "umi": 48772, + "umi": 15458, + "umich": 41294, + "umin": 31542, + "umm": 26129, + "umm": 21215, + "ummer": 47628, + "ummm": 33665, + "umni": 31739, + "ump": 22224, + "umpire": 36214, + "ums": 8643, + "umu": 39788, + "un": 569, + "un": 2271, + "una": 6385, + "unable": 17793, + "unacceptable": 25234, + "unanim": 20800, + "unanimous": 33520, + "unanimously": 31798, + "unanswered": 43611, + "unarmed": 41541, + "unas": 41366, + "unavailable": 48430, + "unaware": 33347, + "unbeat": 37056, + "unbeatable": 40267, + "unbeaten": 19228, + "unbeliev": 11383, + "unbelievable": 13306, + "unbelievably": 33781, + "unborn": 37257, + "unboxing": 32866, + "unbreakable": 32956, + "unbroken": 49271, + "unc": 24921, + "unc": 15322, + "uncanny": 32556, + "uncertain": 30384, + "uncertainty": 23956, + "unch": 1527, + "unchanged": 34272, + "uncharted": 34560, + "unci": 25521, + "unciation": 34117, + "uncle": 31537, + "uncle": 8002, + "unclear": 32955, + "uncles": 45335, + "uncomfortable": 22470, + "uncommon": 34888, + "uncondition": 46561, + "unconditional": 31112, + "unconscious": 34791, + "unconstitutional": 43585, + "unconventional": 39440, + "uncover": 33031, + "uncovered": 28234, + "uncture": 38736, + "uncut": 41056, + "und": 9762, + "und": 9732, + "unda": 39932, + "undant": 25377, + "unday": 29338, + "unde": 45226, + "undead": 40105, + "undecided": 49368, + "undefeated": 15326, + "undeni": 38424, + "under": 1473, + "under": 1798, + "underage": 45669, + "underattack": 35075, + "undercover": 21595, + "underdog": 44266, + "undere": 21675, + "underestim": 23348, + "underestimate": 31794, + "undergo": 31545, + "undergoing": 26419, + "undergrad": 38331, + "undergraduate": 24320, + "underground": 9396, + "undering": 30826, + "underlying": 31812, + "undermine": 42839, + "underneath": 20857, + "underrated": 19494, + "unders": 20376, + "understand": 47582, + "understand": 4600, + "understanding": 7522, + "understands": 21607, + "understatement": 38296, + "understood": 17303, + "undertaker": 40144, + "undertaking": 49067, + "undertale": 48283, + "underthe": 41161, + "underwater": 14760, + "underway": 6273, + "underwear": 21154, + "underwood": 21474, + "underworld": 34760, + "undi": 23845, + "undisclosed": 39334, + "undo": 35454, + "undocumented": 35414, + "undoub": 38836, + "undoubtedly": 42204, + "undp": 26691, + "une": 4522, + "une": 10966, + "unearth": 32716, + "unearthed": 36632, + "unemp": 15139, + "unemployed": 32721, + "unemployment": 19350, + "unes": 6394, + "unesco": 16216, + "uneven": 43204, + "unex": 9484, + "unexpe": 10802, + "unexpec": 31829, + "unexpected": 12293, + "unexpectedly": 35622, + "unf": 29285, + "unfair": 22193, + "unfinished": 26526, + "unfit": 45367, + "unfold": 38681, + "unfollow": 38797, + "unfor": 14010, + "unforgettable": 16173, + "unfortun": 10194, + "unfortunate": 22361, + "unfortunately": 12863, + "unfpa": 45048, + "ung": 10439, + "ung": 4334, + "unga": 19151, + "ungsoo": 25582, + "unh": 25365, + "unhappy": 26528, + "unhcr": 43451, + "unhealthy": 30994, + "uni": 1107, + "uni": 5926, + "unic": 7648, + "unicef": 38286, + "unicef": 19259, + "unicorn": 15660, + "unicorns": 35183, + "unidenti": 33707, + "unidentified": 35563, + "unification": 45036, + "unified": 20876, + "uniform": 11075, + "uniforms": 17838, + "unil": 32388, + "unilever": 48654, + "uniof": 21218, + "union": 14210, + "union": 3503, + "unions": 18353, + "unis": 30482, + "unis": 39266, + "unisex": 27609, + "unison": 46694, + "unit": 28522, + "unit": 5695, + "unite": 15078, + "unite": 11305, + "uniteblue": 20935, + "united": 10898, + "united": 2690, + "unitedstates": 39636, + "unitedway": 47486, + "unites": 32061, + "uniting": 31318, + "units": 10394, + "unity": 38300, + "unity": 8581, + "univ": 36680, + "univ": 14896, + "univer": 15574, + "univers": 5855, + "universal": 19148, + "universal": 8754, + "universe": 6104, + "universi": 41692, + "universit": 26019, + "universities": 16408, + "university": 40728, + "university": 2182, + "universityof": 46158, + "unk": 5542, + "unknown": 8685, + "unl": 43807, + "unlawful": 42305, + "unle": 19677, + "unlea": 23893, + "unleash": 26706, + "unleashed": 27955, + "unless": 10602, + "unlike": 16694, + "unlikely": 18904, + "unlimited": 11015, + "unlock": 18649, + "unlocked": 16770, + "unlocking": 40810, + "unlucky": 35029, + "unlv": 42283, + "unmanned": 36751, + "unmatched": 46054, + "unn": 38364, + "unnamed": 44985, + "unnecessary": 24100, + "unner": 31481, + "unning": 43282, + "unnoticed": 42807, + "uno": 32446, + "uno": 17078, + "unofficial": 22506, + "unpacking": 43589, + "unpaid": 32811, + "unparalleled": 44396, + "unplugged": 31724, + "unpopular": 40232, + "unprece": 23054, + "unprecedented": 23344, + "unpredictable": 38684, + "unra": 45150, + "unreal": 46980, + "unreal": 15636, + "unrelated": 38644, + "unreleased": 29654, + "unrest": 36452, + "uns": 25908, + "unsafe": 32071, + "unsc": 36395, + "unseen": 19069, + "unsigned": 39346, + "unsolved": 40836, + "unsplash": 46196, + "unstable": 34730, + "unstopp": 22105, + "unstoppable": 23484, + "unsuccessful": 47478, + "unsung": 33015, + "unsure": 26396, + "unt": 19654, + "unt": 6537, + "until": 1942, + "untitled": 21309, + "unto": 19801, + "untold": 32206, + "untouch": 44509, + "untouched": 42764, + "unused": 29636, + "unusual": 12613, + "unusually": 36465, + "unve": 6685, + "unveil": 20483, + "unveiled": 13572, + "unveiling": 20327, + "unveils": 15057, + "unwanted": 25285, + "unwind": 34064, + "unya": 37142, + "uo": 30874, + "uo": 36162, + "uof": 11155, + "uoft": 37329, + "uon": 48144, + "uous": 40185, + "up": 1083, + "up": 705, + "upa": 31727, + "upbeat": 39201, + "upcoming": 4196, + "upcycled": 46552, + "upd": 3226, + "update": 2491, + "updated": 5974, + "updates": 4904, + "updating": 22792, + "uper": 38082, + "uper": 33056, + "upfront": 42064, + "upgrade": 10365, + "upgraded": 18577, + "upgrades": 21253, + "upgrading": 34368, + "uph": 14128, + "uphill": 42767, + "uphol": 26195, + "uphold": 43897, + "upholstery": 44556, + "upl": 41939, + "uplift": 45389, + "uplifting": 29546, + "upload": 13968, + "uploaded": 16793, + "uploading": 30145, + "upon": 23524, + "upon": 5067, + "upp": 19549, + "upp": 45946, + "upper": 22465, + "upper": 7067, + "upri": 15982, + "upright": 29818, + "uprising": 26006, + "upro": 28922, + "ups": 6926, + "upscale": 47501, + "upset": 11214, + "upsets": 42637, + "upside": 15362, + "upstairs": 21387, + "upstate": 33335, + "upstream": 45517, + "upthe": 31510, + "upto": 26575, + "upton": 31910, + "uptown": 23807, + "upward": 32526, + "upwards": 34915, + "uq": 39591, + "ur": 565, + "ur": 1775, + "ura": 29337, + "ura": 3544, + "urable": 40194, + "ural": 23547, + "ural": 33948, + "uran": 16197, + "uranium": 29850, + "urban": 7931, + "urban": 5800, + "urbanart": 40834, + "urd": 47880, + "urday": 19742, + "urdu": 29976, + "ure": 5514, + "ure": 726, + "ured": 4210, + "urer": 20864, + "ures": 2288, + "urg": 35995, + "urge": 14852, + "urged": 23790, + "urgency": 47612, + "urgent": 13693, + "urgently": 34534, + "urges": 16692, + "urging": 27748, + "uri": 11052, + "uri": 8699, + "urie": 46429, + "urin": 45245, + "urine": 28864, + "uring": 1351, + "url": 23464, + "urn": 38075, + "uro": 17343, + "uro": 5925, + "urology": 48585, + "urope": 14918, + "urs": 4794, + "urself": 31942, + "urst": 19181, + "urstruly": 34751, + "urstrulymahesh": 35314, + "ursula": 38390, + "urt": 24309, + "uru": 16322, + "uru": 11768, + "uruguay": 27931, + "urus": 14246, + "urve": 24583, + "ury": 8642, + "ury": 2106, + "us": 904, + "us": 718, + "usa": 9491, + "usa": 2547, + "usability": 46736, + "usable": 22890, + "usaf": 25017, + "usage": 19137, + "usaid": 34507, + "usair": 36742, + "usairforce": 42179, + "usarmy": 19132, + "usatoday": 40263, + "usav": 36056, + "usb": 10281, + "usc": 13346, + "usc": 14995, + "uscg": 43932, + "usd": 7485, + "usda": 25829, + "use": 4419, + "use": 1483, + "used": 32289, + "used": 2026, + "useful": 9784, + "useless": 20154, + "usemb": 39700, + "user": 21248, + "user": 7031, + "username": 28162, + "users": 7433, + "uses": 5282, + "useum": 45189, + "usf": 32385, + "usf": 28942, + "usgs": 35103, + "ush": 12001, + "ush": 18335, + "usher": 27411, + "ushi": 47734, + "usi": 25540, + "usic": 34909, + "usic": 16753, + "using": 1996, + "usky": 45778, + "usl": 42113, + "usm": 40041, + "usmc": 21678, + "usmnt": 30662, + "usn": 40579, + "usnavy": 24500, + "usnews": 43752, + "uso": 21539, + "usopen": 21782, + "usp": 26651, + "usps": 39980, + "usrc": 33274, + "uss": 11545, + "uss": 9260, + "ussia": 29553, + "ussoccer": 42828, + "ussr": 32697, + "ust": 35501, + "ust": 24725, + "usu": 4254, + "usu": 40434, + "usual": 6129, + "usually": 8296, + "usur": 45582, + "uswnt": 35255, + "ut": 1419, + "ut": 3641, + "uta": 42706, + "uta": 25925, + "utah": 27474, + "utah": 9312, + "utc": 18196, + "utd": 10493, + "ute": 16856, + "ute": 3130, + "uten": 32089, + "uter": 39197, + "utes": 2850, + "uth": 48819, + "uth": 44750, + "uti": 24568, + "util": 28824, + "utili": 17015, + "utilities": 27210, + "utility": 14941, + "utilize": 36861, + "utilized": 47604, + "utilizing": 40212, + "utm": 47853, + "utmost": 42352, + "uto": 18866, + "uto": 13683, + "utopia": 34433, + "utpol": 42605, + "utr": 48726, + "utrecht": 37216, + "uts": 11740, + "utsa": 37528, + "utt": 17096, + "uttar": 40168, + "uttarak": 33755, + "uttarakhand": 35655, + "utter": 18769, + "utter": 24558, + "utterly": 21353, + "utto": 42183, + "utv": 36351, + "utz": 45320, + "uu": 5702, + "uu": 14553, + "uuu": 44355, + "uuu": 27656, + "uuuu": 16720, + "uuuu": 40797, + "uv": 23777, + "uv": 15977, + "uva": 23908, + "uw": 13933, + "uw": 19166, + "uwe": 48785, + "uwu": 35544, + "ux": 9251, + "ux": 6213, + "uy": 31929, + "uy": 48113, + "uz": 19398, + "uz": 36991, + "uzbe": 43007, + "uzbekistan": 45024, + "uzzi": 48210, + "v": 85, + "v": 341, + "va": 4648, + "va": 1892, + "vaa": 37488, + "vable": 23088, + "vac": 3125, + "vac": 34085, + "vaca": 48215, + "vacancies": 26333, + "vacancy": 21247, + "vacant": 25262, + "vacation": 28336, + "vacation": 6561, + "vacations": 29002, + "vacay": 44716, + "vacc": 13342, + "vaccin": 19164, + "vaccinated": 48134, + "vaccination": 32518, + "vaccine": 47780, + "vaccine": 17493, + "vaccines": 25860, + "vach": 46211, + "vacu": 16058, + "vacuum": 18420, + "vad": 11880, + "vada": 46759, + "vader": 21908, + "vae": 39384, + "vag": 13015, + "vague": 42154, + "vah": 26921, + "vai": 26893, + "vai": 36802, + "vail": 21189, + "vain": 25538, + "vais": 28719, + "vaj": 34206, + "vak": 16288, + "vak": 41597, + "val": 1214, + "val": 1560, + "vala": 48525, + "valdez": 40617, + "vale": 35554, + "vale": 10820, + "valedic": 43525, + "valen": 12630, + "valence": 30225, + "valenci": 34183, + "valencia": 16559, + "valent": 3655, + "valent": 15300, + "valentin": 48631, + "valentina": 43741, + "valentine": 11208, + "valentine": 5876, + "valentines": 10259, + "valentinesday": 12369, + "valentino": 29624, + "valeri": 31951, + "valerie": 25592, + "valet": 45749, + "vali": 8230, + "valiant": 33804, + "valid": 15126, + "validation": 32536, + "valkyrie": 42326, + "vall": 23523, + "vall": 35295, + "vallarta": 47874, + "valle": 24857, + "valle": 29105, + "valley": 18354, + "valley": 3136, + "valleys": 28649, + "valor": 30930, + "vals": 7431, + "valu": 6291, + "valuable": 10056, + "valuation": 25894, + "value": 41358, + "value": 4602, + "valued": 17801, + "values": 8857, + "valve": 17001, + "valves": 33517, + "vam": 9983, + "vamo": 46718, + "vamos": 30346, + "vamp": 10680, + "vampi": 47017, + "vampire": 47576, + "vampire": 13220, + "vampires": 30868, + "vamps": 44810, + "van": 2446, + "van": 2451, + "vana": 20543, + "vanc": 6320, + "vance": 31447, + "vancou": 6750, + "vancouver": 31904, + "vancouver": 7208, + "vand": 11691, + "vandalism": 45664, + "vander": 16264, + "vanderbilt": 33524, + "vandy": 39268, + "vane": 43828, + "vaness": 13328, + "vanessa": 16836, + "vangogh": 47849, + "vanguard": 27916, + "vani": 15396, + "vani": 26459, + "vania": 10998, + "vanilla": 11974, + "vanished": 43783, + "vanishing": 48296, + "vanity": 48353, + "vanity": 22938, + "vans": 11711, + "vant": 26298, + "vantage": 31749, + "vanu": 42892, + "vanuatu": 48766, + "vap": 10462, + "vape": 25423, + "vape": 20219, + "vaping": 29403, + "vapor": 37167, + "vapor": 30729, + "vapori": 46183, + "var": 3187, + "var": 12998, + "vara": 47492, + "varan": 36585, + "varanasi": 39364, + "vard": 21866, + "vard": 8773, + "vardy": 47371, + "vare": 38159, + "vares": 42895, + "vargas": 32752, + "vari": 3354, + "variable": 26416, + "varian": 34334, + "variant": 20293, + "variants": 38312, + "variation": 26420, + "variations": 29025, + "varied": 32334, + "varies": 32543, + "varieties": 23805, + "variety": 8396, + "various": 7395, + "varsity": 43716, + "varsity": 8574, + "varun": 48120, + "varun": 22069, + "vary": 18855, + "varying": 36456, + "vas": 5669, + "vas": 5995, + "vasc": 40995, + "vascular": 19218, + "vase": 20431, + "vasi": 49092, + "vast": 24413, + "vast": 16414, + "vastly": 48257, + "vat": 11588, + "vat": 18363, + "vatican": 21030, + "vation": 37884, + "vau": 6391, + "vaugh": 25158, + "vaughan": 21392, + "vaughn": 29013, + "vaul": 27469, + "vault": 15240, + "vaus": 40217, + "vaux": 27403, + "vauxhall": 29173, + "vaw": 47952, + "vay": 48000, + "vaz": 38142, + "vb": 29365, + "vb": 8778, + "vball": 38329, + "vc": 28670, + "vc": 7952, + "vcs": 43528, + "vcu": 40102, + "vd": 9515, + "vday": 42055, + "ve": 673, + "ve": 563, + "vea": 43798, + "veal": 36616, + "veau": 24419, + "vec": 19912, + "vector": 40453, + "vector": 21533, + "ved": 19515, + "ved": 1102, + "veda": 44401, + "vedere": 45660, + "vedi": 47971, + "vee": 35708, + "vee": 17073, + "veen": 22432, + "veer": 21243, + "veer": 22058, + "veg": 9048, + "veg": 16460, + "vega": 22930, + "vegan": 15705, + "vegan": 5615, + "vegans": 48514, + "vegas": 20288, + "vegas": 4413, + "vege": 6219, + "vegetable": 15725, + "vegetables": 14119, + "vegetarian": 14600, + "vegetation": 33947, + "veggie": 19401, + "veggies": 16767, + "vehic": 3973, + "vehicle": 5299, + "vehicles": 8361, + "veil": 23516, + "vein": 29169, + "veins": 28867, + "veit": 30620, + "vel": 942, + "vel": 1287, + "vela": 34898, + "veld": 34011, + "veled": 15370, + "veli": 49166, + "veling": 37970, + "vell": 21173, + "vell": 32997, + "velo": 14357, + "velo": 33850, + "velocity": 23811, + "vels": 5109, + "velve": 37849, + "velvet": 11063, + "vely": 1708, + "vember": 3477, + "vement": 3129, + "vements": 11104, + "ven": 1240, + "ven": 1638, + "vena": 47442, + "vend": 10851, + "vending": 29202, + "vendor": 21261, + "vendors": 20353, + "vene": 5365, + "veness": 10516, + "venetian": 34336, + "venezia": 34139, + "venezu": 10939, + "venezuela": 12839, + "venezuelan": 34699, + "veng": 31526, + "venge": 27757, + "vengeance": 32057, + "veni": 31142, + "venice": 11010, + "vening": 47532, + "venison": 40037, + "venom": 42491, + "venom": 21588, + "vens": 20884, + "vent": 4373, + "vent": 5687, + "ventil": 39522, + "ventilation": 35066, + "venting": 15731, + "vention": 4122, + "vents": 12833, + "ventu": 48217, + "ventura": 20921, + "venture": 37046, + "venture": 12543, + "ventures": 20829, + "venue": 5097, + "venues": 18120, + "venus": 14691, + "ver": 624, + "ver": 667, + "vera": 13350, + "verage": 3725, + "verb": 34952, + "verbal": 26522, + "verbally": 39985, + "verbs": 45687, + "verde": 16935, + "verdi": 42306, + "verdict": 18030, + "vere": 11135, + "vere": 34707, + "vered": 2868, + "verge": 23913, + "veri": 11638, + "verification": 33521, + "verified": 22555, + "verify": 34722, + "vering": 4630, + "veriz": 19707, + "verizon": 21532, + "verma": 41261, + "vermont": 19241, + "vern": 2214, + "vern": 12586, + "verne": 45553, + "vernon": 18348, + "vero": 45217, + "vero": 38208, + "verona": 31819, + "veronic": 39551, + "veronica": 24039, + "vers": 1219, + "vers": 2094, + "versa": 35765, + "versace": 25422, + "versail": 29857, + "versailles": 32129, + "versary": 2940, + "versatile": 18110, + "versatility": 41340, + "verse": 39466, + "verse": 3131, + "verses": 30769, + "versi": 8934, + "version": 3273, + "versions": 16190, + "versity": 1906, + "verst": 42484, + "verstappen": 45064, + "versus": 14548, + "versy": 18522, + "vert": 11742, + "verte": 35158, + "verted": 48173, + "verti": 30459, + "vertical": 14293, + "vertigo": 42477, + "verton": 40632, + "verts": 37265, + "very": 11698, + "very": 1070, + "veryday": 37944, + "verything": 45174, + "ves": 9616, + "ves": 1003, + "vesmatter": 47636, + "vespa": 46029, + "vessel": 16387, + "vessels": 22822, + "vest": 31657, + "vest": 12473, + "vesti": 40349, + "vests": 41906, + "vet": 12294, + "vet": 5951, + "veter": 4330, + "veteran": 20797, + "veteran": 8814, + "veterans": 7092, + "veteransday": 26409, + "veterin": 43959, + "veterinary": 25458, + "veto": 36570, + "vets": 13113, + "vette": 17045, + "vettel": 28700, + "vevo": 35141, + "vex": 36187, + "vex": 43978, + "vey": 34792, + "vey": 3884, + "vez": 35987, + "vez": 17226, + "vf": 25966, + "vfl": 33726, + "vfx": 30149, + "vg": 40591, + "vg": 22346, + "vh": 46953, + "vh": 23847, + "vhs": 21932, + "vi": 603, + "vi": 4259, + "via": 1048, + "viable": 25752, + "viadu": 37012, + "viaduct": 39113, + "vial": 39951, + "vian": 40487, + "vian": 16124, + "vibe": 37974, + "vibe": 12813, + "vibes": 7764, + "vibr": 9527, + "vibrant": 14270, + "vibration": 37456, + "vibrations": 43660, + "vic": 1555, + "vic": 4412, + "vica": 46168, + "vicar": 43899, + "vice": 43572, + "vice": 6931, + "vicente": 39411, + "vices": 8332, + "vich": 24143, + "vici": 46670, + "vicious": 25177, + "vick": 15116, + "vick": 29704, + "vickers": 48452, + "vicki": 34927, + "vicky": 37176, + "vicky": 25788, + "victi": 6861, + "victim": 9133, + "victims": 7131, + "victor": 2423, + "victor": 10690, + "victori": 17555, + "victoria": 39286, + "victoria": 6127, + "victorian": 12350, + "victorias": 47791, + "victories": 24577, + "victorious": 24033, + "victory": 36668, + "victory": 4127, + "vid": 17233, + "vid": 9284, + "vida": 19015, + "vidal": 36678, + "vide": 1334, + "vide": 45244, + "video": 9478, + "video": 1455, + "videogame": 35097, + "videogames": 21149, + "videos": 6081, + "vids": 23035, + "vidy": 29639, + "vidya": 45264, + "vie": 922, + "vie": 8538, + "vien": 36493, + "vienna": 12670, + "vier": 15352, + "vier": 11987, + "viera": 21114, + "viernes": 33826, + "vies": 22458, + "viest": 31979, + "viet": 17558, + "viet": 13128, + "vietnam": 19558, + "vietnam": 8623, + "vietnamese": 22382, + "view": 12004, + "view": 1093, + "viewed": 7226, + "viewer": 15061, + "viewers": 14275, + "viewing": 7124, + "viewpoint": 41604, + "views": 2758, + "vig": 8549, + "vig": 45083, + "vigil": 21538, + "vigil": 19896, + "vigilant": 43026, + "vigne": 40447, + "vigne": 34581, + "vigo": 44097, + "vigor": 26781, + "vii": 17759, + "viii": 20414, + "vijay": 12014, + "vijay": 10823, + "vijaysethu": 47966, + "vik": 10764, + "vik": 17181, + "vika": 39562, + "vikas": 37116, + "viking": 26663, + "viking": 15897, + "vikings": 11713, + "vikram": 41136, + "vikram": 24314, + "viktor": 36101, + "vil": 1338, + "vil": 3000, + "vila": 37505, + "vile": 27247, + "vill": 10481, + "vill": 45698, + "villa": 3203, + "villa": 7754, + "village": 34584, + "village": 4331, + "villagers": 34283, + "villages": 17621, + "villain": 15425, + "villains": 25271, + "villanova": 44025, + "villar": 35164, + "villas": 28907, + "ville": 11110, + "ville": 1930, + "villen": 46177, + "villi": 36907, + "vimeo": 48720, + "vin": 1379, + "vin": 2558, + "vina": 35682, + "vinai": 37396, + "vinaigrette": 39876, + "vinay": 43952, + "vince": 32429, + "vince": 6236, + "vincen": 33402, + "vincent": 29069, + "vincent": 10357, + "vinci": 30199, + "vind": 20275, + "vindic": 39582, + "vine": 8471, + "vine": 7721, + "vinegar": 23834, + "vines": 21268, + "vineyard": 16527, + "vineyards": 23082, + "ving": 5375, + "ving": 903, + "vingne": 42579, + "vings": 22510, + "vini": 48119, + "vinnie": 40885, + "vinny": 36794, + "vino": 14509, + "vinod": 43348, + "vins": 34820, + "vinson": 45945, + "vintag": 10936, + "vintage": 13654, + "vintage": 3266, + "viny": 40990, + "vinyl": 22835, + "vinyl": 5754, + "vio": 11913, + "vio": 20324, + "viol": 3164, + "viola": 27438, + "violate": 44875, + "violated": 38192, + "violating": 37554, + "violation": 22919, + "violations": 21969, + "violence": 5450, + "violent": 11565, + "violently": 47758, + "violet": 16118, + "violets": 42861, + "violin": 17058, + "violinist": 36299, + "vion": 35496, + "vious": 6418, + "viously": 7149, + "vip": 45714, + "vip": 7111, + "viper": 27401, + "vips": 41149, + "vir": 1790, + "vir": 25319, + "vira": 35910, + "viral": 11653, + "virat": 32473, + "virgil": 39076, + "virgin": 5651, + "virgin": 12103, + "virgini": 43426, + "virginia": 6728, + "virgo": 39978, + "viro": 32301, + "viron": 38309, + "virtu": 7977, + "virtual": 18059, + "virtual": 7790, + "virtually": 22475, + "virtualreality": 32608, + "virtue": 26860, + "virtues": 42167, + "virtuoso": 47027, + "virus": 11808, + "viruses": 34830, + "vis": 1301, + "vis": 5337, + "visa": 12802, + "visas": 41228, + "vise": 24977, + "vised": 14810, + "vish": 12024, + "vish": 29124, + "vishal": 33648, + "vishnu": 37816, + "visi": 1409, + "visibility": 15921, + "visible": 36658, + "visible": 8626, + "vising": 37439, + "vision": 11147, + "vision": 2515, + "visional": 24627, + "visionary": 22959, + "visions": 13804, + "visit": 3388, + "visit": 1600, + "visitation": 44370, + "visited": 5580, + "visiting": 4680, + "visitor": 13881, + "visitors": 9160, + "visits": 8489, + "visitscotland": 28760, + "visitspain": 48860, + "vism": 15514, + "viso": 46732, + "visor": 24217, + "vist": 21436, + "vista": 13865, + "visu": 7739, + "visual": 17004, + "visual": 7195, + "visualization": 28500, + "visualize": 45057, + "visually": 25743, + "visuals": 21315, + "viswas": 36513, + "viswasam": 47664, + "vit": 4056, + "vit": 35580, + "vita": 15700, + "vital": 32525, + "vital": 10585, + "vitality": 36385, + "vitam": 9856, + "vitamin": 13675, + "vitamins": 22582, + "vito": 36725, + "vity": 4893, + "vitz": 26188, + "vius": 41571, + "viv": 21827, + "viv": 35363, + "viva": 17399, + "vival": 35920, + "vive": 18980, + "vive": 24004, + "vivek": 36243, + "vivi": 11625, + "vivian": 30129, + "vivid": 22984, + "vivo": 28091, + "vivo": 25888, + "vix": 28976, + "vix": 34811, + "vixen": 38757, + "vixx": 32106, + "viz": 28251, + "viz": 31786, + "vj": 45439, + "vj": 30827, + "vk": 41893, + "vl": 37580, + "vl": 36442, + "vla": 23686, + "vlad": 41089, + "vladi": 19320, + "vladimir": 21702, + "vlive": 46797, + "vlog": 18894, + "vm": 16204, + "vm": 20269, + "vma": 35666, + "vmas": 30236, + "vmware": 29615, + "vn": 47098, + "vn": 25076, + "vo": 947, + "vo": 3951, + "voc": 4105, + "voc": 20855, + "vocab": 21346, + "vocabulary": 23804, + "vocal": 34037, + "vocal": 13147, + "vocali": 19134, + "vocalist": 22102, + "vocals": 17666, + "vocation": 20521, + "vocational": 33751, + "vod": 11820, + "vod": 35854, + "vodaf": 28436, + "vodafone": 38695, + "vodka": 13646, + "vogel": 44960, + "vogue": 24418, + "vogue": 13178, + "voic": 29185, + "voice": 13179, + "voice": 3386, + "voiced": 34352, + "voiceof": 44966, + "voiceover": 41979, + "voices": 9144, + "void": 21561, + "voip": 42762, + "voir": 16036, + "vol": 1343, + "vol": 7945, + "volatile": 41022, + "volatility": 32355, + "volcan": 9916, + "volcanic": 24072, + "volcano": 14581, + "volcanoes": 38055, + "voli": 40138, + "volk": 13432, + "volkswag": 14407, + "volkswagen": 15342, + "volley": 7130, + "volley": 34656, + "volleyball": 7458, + "volo": 44791, + "vols": 20404, + "volt": 26430, + "volta": 29879, + "volta": 33480, + "voltage": 23118, + "voltron": 39314, + "volu": 3563, + "volume": 8284, + "volumes": 22651, + "volun": 3356, + "voluntar": 48823, + "voluntary": 23815, + "volunte": 3556, + "volunteer": 32331, + "volunteer": 7114, + "volunteered": 34000, + "volunteering": 14902, + "volunteers": 5939, + "volution": 24043, + "volved": 42888, + "volvo": 39991, + "volvo": 16906, + "vom": 24198, + "vomit": 46485, + "von": 11269, + "von": 8497, + "voo": 19497, + "voodoo": 26869, + "voor": 34291, + "voor": 34464, + "vor": 8338, + "vor": 5308, + "vore": 18215, + "vortex": 30071, + "vos": 16863, + "vot": 48558, + "vote": 6830, + "vote": 2187, + "voted": 6454, + "votel": 41379, + "voter": 44474, + "voter": 14065, + "voters": 8925, + "votes": 6693, + "voting": 5756, + "vou": 11045, + "voucher": 18190, + "vouchers": 23384, + "vous": 10636, + "vow": 34787, + "vows": 21677, + "vox": 29215, + "vox": 22692, + "voy": 10622, + "voy": 15021, + "voyage": 16299, + "voyager": 29669, + "vp": 32758, + "vp": 3896, + "vpn": 38212, + "vr": 16840, + "vr": 5921, + "vre": 44500, + "vre": 17501, + "vs": 11385, + "vs": 1547, + "vsco": 26752, + "vsco": 32822, + "vscocam": 34694, + "vsky": 37791, + "vss": 31919, + "vt": 31732, + "vt": 10291, + "vu": 8664, + "vu": 13230, + "vue": 43915, + "vue": 19313, + "vuel": 31312, + "vuelta": 43856, + "vuitton": 26705, + "vul": 6856, + "vulcan": 34767, + "vulner": 11213, + "vulnerability": 28797, + "vulnerable": 14332, + "vulture": 34593, + "vultures": 47197, + "vv": 19264, + "vv": 35686, + "vw": 28650, + "vw": 13250, + "vx": 47644, + "vy": 11566, + "vy": 5157, + "w": 86, + "w": 342, + "wa": 869, + "wa": 2663, + "waa": 35874, + "wab": 19893, + "wab": 36852, + "wac": 27445, + "wac": 37947, + "wack": 22880, + "wack": 38270, + "wacky": 34318, + "waco": 36035, + "wad": 11133, + "wad": 30451, + "wada": 40006, + "wade": 40237, + "wade": 14180, + "wadi": 37253, + "waf": 17638, + "wafc": 49086, + "waff": 13940, + "waffle": 20375, + "waffles": 24205, + "wag": 5764, + "wag": 19177, + "wage": 10716, + "wager": 43430, + "wages": 19114, + "wagner": 18081, + "wagon": 13260, + "wagons": 47944, + "wags": 48580, + "wah": 24812, + "wah": 18014, + "wahl": 27500, + "wahlberg": 35151, + "wahoo": 47995, + "wai": 11469, + "wai": 21569, + "waifu": 46551, + "waikiki": 44907, + "wain": 28358, + "wain": 20120, + "wainwright": 45878, + "waist": 36946, + "waist": 18459, + "wait": 10021, + "wait": 1885, + "waite": 24272, + "waited": 18492, + "waiter": 32946, + "waitin": 44482, + "waiting": 2680, + "waitress": 39760, + "waitrose": 37164, + "waits": 21361, + "waiver": 42866, + "waj": 49367, + "wak": 11172, + "wak": 36015, + "waka": 42696, + "wake": 10501, + "wake": 5731, + "wakefield": 26358, + "wakes": 29108, + "wakeup": 26328, + "wakeup": 35380, + "wakeupamerica": 37474, + "waking": 13025, + "wal": 1056, + "wal": 6903, + "wala": 16468, + "walang": 49180, + "walcott": 45744, + "wald": 46930, + "wald": 15724, + "walden": 39311, + "waldo": 32440, + "waldorf": 38227, + "wale": 41247, + "wale": 20336, + "wales": 25383, + "wales": 5110, + "walgreens": 38490, + "wali": 37576, + "wali": 14768, + "walia": 44455, + "walk": 8588, + "walk": 2374, + "walkaway": 48255, + "walked": 8667, + "walker": 24735, + "walker": 6150, + "walkers": 23366, + "walkin": 45792, + "walking": 12644, + "walking": 3941, + "walkingdead": 14948, + "walkout": 47470, + "walks": 8192, + "walkway": 36614, + "wall": 4316, + "wall": 2569, + "walla": 26007, + "walla": 39982, + "wallabies": 48926, + "wallace": 12535, + "wallart": 36223, + "walled": 36567, + "waller": 45340, + "wallet": 12154, + "wallets": 38550, + "walleye": 49099, + "wallis": 42206, + "wallpaper": 10560, + "wallpapers": 29841, + "walls": 8258, + "wallstreet": 45341, + "wally": 26024, + "walmart": 11972, + "walnut": 16310, + "walnuts": 38294, + "walsall": 42935, + "walsh": 12856, + "walt": 23535, + "walt": 14312, + "waltdisneyworld": 36505, + "walter": 31156, + "walter": 10645, + "walters": 25532, + "waltham": 42742, + "waltham": 45581, + "walton": 19485, + "waltz": 35982, + "wam": 20503, + "wamy": 46970, + "wan": 2060, + "wan": 4557, + "wana": 30830, + "wand": 14636, + "wand": 28559, + "wanda": 25070, + "wander": 12985, + "wander": 24473, + "wandered": 46593, + "wanderers": 27540, + "wandering": 22597, + "wanderlust": 16129, + "wane": 27459, + "wang": 19731, + "wang": 11900, + "wani": 21674, + "wankers": 42189, + "wann": 23622, + "wanna": 35940, + "wanna": 3836, + "wannabe": 40730, + "wannaone": 44832, + "want": 18356, + "want": 1280, + "wanted": 3146, + "wanting": 12801, + "wants": 3107, + "wap": 27393, + "wap": 30368, + "waq": 47512, + "war": 984, + "war": 2238, + "wara": 21631, + "warbler": 33891, + "warcraft": 13660, + "ward": 7728, + "ward": 1460, + "warden": 27798, + "wardly": 30780, + "wardro": 14247, + "wardrobe": 15020, + "wards": 2593, + "ware": 7416, + "ware": 4476, + "wareagle": 35716, + "warehouse": 13054, + "wareness": 41601, + "wareness": 35870, + "wares": 30692, + "warfare": 15739, + "warhammer": 26832, + "warhol": 27554, + "wari": 20977, + "wark": 46346, + "wark": 15164, + "warlock": 42455, + "warm": 14725, + "warm": 3616, + "warmed": 36695, + "warmer": 14328, + "warmest": 30910, + "warming": 8606, + "warmly": 45322, + "warmongers": 33205, + "warms": 32917, + "warmth": 19636, + "warmup": 29904, + "warmups": 44094, + "warn": 19360, + "warned": 16409, + "warner": 28564, + "warner": 13402, + "warning": 4994, + "warnings": 18098, + "warns": 14086, + "waron": 38947, + "warp": 32411, + "warped": 32125, + "warran": 17392, + "warrant": 22554, + "warrants": 45677, + "warranty": 23999, + "warren": 23143, + "warren": 9234, + "warri": 4109, + "warrington": 31203, + "warrior": 18998, + "warrior": 8148, + "warriors": 6421, + "wars": 3931, + "warsaw": 21072, + "warship": 47846, + "wart": 43535, + "wart": 7346, + "wartime": 42998, + "warts": 21781, + "warwick": 23081, + "warwick": 22215, + "warwickshire": 36766, + "wary": 36213, + "was": 3398, + "was": 739, + "wasabi": 47334, + "wash": 3363, + "wash": 7810, + "washed": 14092, + "washer": 24085, + "washes": 38950, + "washing": 13029, + "washington": 16774, + "washington": 4365, + "washingtondc": 40225, + "washingtonpost": 28426, + "wasn": 5044, + "wasnt": 29607, + "wasp": 24889, + "wasps": 35300, + "wassup": 45708, + "wast": 28886, + "waste": 18157, + "waste": 6065, + "wasted": 18278, + "wasteland": 44035, + "wastewater": 34463, + "wasting": 25577, + "wat": 800, + "wat": 10621, + "wata": 42509, + "watch": 7046, + "watch": 1239, + "watchdog": 35303, + "watched": 5775, + "watcher": 35971, + "watchers": 28443, + "watches": 9521, + "watchin": 32432, + "watching": 2113, + "water": 2505, + "water": 1573, + "watercolor": 14211, + "watercolour": 18377, + "waterfall": 16403, + "waterfalls": 26692, + "waterford": 24448, + "waterfront": 16605, + "waterhouse": 45072, + "watering": 19871, + "waterloo": 17465, + "watermelon": 19889, + "waterproof": 17613, + "waters": 7753, + "watershed": 33204, + "waterstones": 45014, + "waterways": 37395, + "watford": 23162, + "watfordfc": 37328, + "wati": 27966, + "watkins": 22539, + "watson": 35490, + "watson": 9294, + "watt": 22899, + "watt": 15805, + "wattpad": 32351, + "watts": 14750, + "wau": 9479, + "wav": 6054, + "wave": 17530, + "wave": 4535, + "waved": 44657, + "waver": 25997, + "waves": 7882, + "waving": 26545, + "wavy": 31941, + "waw": 22039, + "wawrinka": 48414, + "wawx": 47387, + "wax": 18789, + "wax": 11910, + "waxing": 38781, + "way": 3079, + "way": 923, + "wayback": 47822, + "wayne": 23632, + "wayne": 7003, + "ways": 1248, + "waz": 20889, + "waz": 48835, + "wb": 10726, + "wb": 12377, + "wba": 22675, + "wbb": 14482, + "wbc": 26745, + "wbo": 49053, + "wbz": 35471, + "wc": 4842, + "wc": 5755, + "wcc": 47166, + "wcc": 34926, + "wcpo": 46624, + "wcs": 39916, + "wcvb": 32709, + "wcw": 9041, + "wd": 15998, + "wd": 7494, + "wdw": 40334, + "we": 598, + "we": 649, + "wea": 37146, + "wea": 47301, + "weak": 12128, + "weak": 10128, + "weaker": 39735, + "weakness": 21448, + "weaknesses": 43487, + "weal": 14759, + "wealth": 33150, + "wealth": 7904, + "wealthy": 22617, + "weap": 6156, + "weapon": 42612, + "weapon": 10537, + "weapons": 10007, + "wear": 12206, + "wear": 2839, + "wearab": 22983, + "wearable": 44943, + "wearable": 24973, + "wearables": 30319, + "weare": 4264, + "weare": 27867, + "weareall": 45980, + "wearec": 43620, + "wearen": 45635, + "weareone": 16149, + "weareoneexo": 16448, + "wearethe": 40242, + "wearing": 3309, + "wears": 11869, + "weary": 38766, + "weasel": 44308, + "weather": 8808, + "weather": 2237, + "weathercee": 44980, + "weatherchannel": 42138, + "weav": 22260, + "weave": 22450, + "weaver": 20297, + "weaving": 27131, + "web": 2055, + "web": 4601, + "webb": 15708, + "webber": 34248, + "webcam": 24211, + "webcam": 22589, + "webcamtoy": 27719, + "webcast": 28256, + "webcomic": 34286, + "webcomics": 39811, + "webdesign": 20470, + "webdev": 37000, + "webdevelopment": 47553, + "weber": 20179, + "webin": 8460, + "webinar": 8921, + "webinars": 47755, + "webpage": 46964, + "webs": 32829, + "webseries": 44819, + "website": 3364, + "websites": 19278, + "webster": 19471, + "websummit": 48069, + "wec": 33152, + "wechat": 46124, + "wed": 1687, + "wed": 3478, + "wedd": 7576, + "wedding": 11204, + "wedding": 3101, + "weddings": 15964, + "wedge": 21446, + "wedges": 33179, + "wedne": 2380, + "wednesday": 9311, + "wednesday": 2689, + "wednesdaymotivation": 37860, + "wednesdays": 24943, + "wednesdaywisdom": 11445, + "wedo": 43432, + "weds": 19107, + "wee": 716, + "wee": 8288, + "weed": 36935, + "weed": 8015, + "weeds": 26326, + "week": 1286, + "week": 994, + "weekday": 29244, + "weekdays": 44330, + "weekend": 17205, + "weekend": 1456, + "weekender": 36547, + "weekends": 14564, + "weekly": 34652, + "weekly": 5885, + "weeknd": 29925, + "weeks": 2898, + "weeksary": 24628, + "ween": 17517, + "ween": 1599, + "weep": 39270, + "weeping": 36629, + "weer": 32491, + "weet": 17742, + "weets": 13454, + "wef": 23313, + "weg": 47867, + "weg": 47561, + "wego": 44784, + "wego": 28220, + "weh": 48458, + "weh": 40313, + "weho": 47798, + "wei": 6958, + "wei": 20952, + "weibo": 20613, + "weigh": 10565, + "weigh": 17346, + "weighed": 33210, + "weighing": 24455, + "weighs": 20481, + "weight": 12723, + "weight": 3868, + "weighted": 43179, + "weightlifting": 36164, + "weightloss": 20359, + "weights": 21374, + "weil": 43720, + "weiler": 42203, + "wein": 29134, + "wein": 37684, + "weiner": 38822, + "weinstein": 34367, + "weir": 11299, + "weir": 25517, + "weird": 27981, + "weird": 5613, + "weirdest": 29482, + "weirdo": 32476, + "weis": 26251, + "weiser": 34833, + "weiss": 24794, + "wel": 1267, + "wel": 8042, + "welch": 25820, + "welcom": 11578, + "welcome": 18318, + "welcome": 1881, + "welcomed": 12590, + "welcomes": 9304, + "welcometo": 47511, + "welcoming": 8775, + "weld": 39776, + "welding": 24956, + "welfare": 12129, + "well": 3277, + "well": 1123, + "wellbeing": 14273, + "weller": 40921, + "welling": 49165, + "wellington": 15389, + "wellness": 40574, + "wellness": 9904, + "wells": 42705, + "wells": 9804, + "welove": 13573, + "welp": 28391, + "wels": 20852, + "welsh": 19173, + "welsh": 10977, + "welt": 38595, + "welter": 37115, + "welterweight": 39617, + "wemb": 15213, + "wembley": 16579, + "wen": 6590, + "wen": 11278, + "wend": 15166, + "wendell": 42091, + "wendy": 31616, + "wendy": 14074, + "wenger": 21105, + "went": 18633, + "went": 2437, + "wentworth": 36423, + "wentz": 39179, + "wer": 6316, + "wer": 2980, + "were": 15461, + "were": 1365, + "wered": 6605, + "weren": 13611, + "werewolf": 32001, + "werk": 30176, + "werner": 29917, + "wers": 7110, + "wes": 18620, + "wes": 14738, + "wesle": 29606, + "wesley": 17332, + "wesleyan": 32509, + "wesome": 33292, + "wess": 44431, + "west": 2973, + "west": 1593, + "westbound": 29208, + "westbrook": 26948, + "westchester": 36675, + "westcoast": 44610, + "westend": 44815, + "wester": 9846, + "western": 17079, + "western": 4463, + "westfield": 32309, + "westh": 36798, + "westin": 43232, + "westlake": 41535, + "westminster": 15158, + "weston": 22771, + "westside": 33762, + "westwood": 26371, + "westworld": 42287, + "wet": 12406, + "wet": 6682, + "weta": 40946, + "wethenorth": 45281, + "wethepeople": 48030, + "wether": 33794, + "wether": 48405, + "wetland": 37357, + "wetlands": 26547, + "wett": 41971, + "wetter": 43957, + "wewant": 39280, + "wewill": 37241, + "wex": 17234, + "wexford": 29876, + "wexmondays": 49042, + "wey": 30376, + "wey": 19781, + "weymouth": 41433, + "wf": 14576, + "wf": 22313, + "wfa": 44606, + "wfc": 36431, + "wfp": 35193, + "wftv": 47075, + "wg": 21091, + "wg": 25857, + "wga": 32354, + "wgn": 48828, + "wh": 573, + "wh": 13844, + "wha": 18994, + "wha": 25884, + "whal": 38967, + "whale": 37083, + "whale": 11650, + "whales": 17722, + "wham": 42506, + "whar": 15517, + "wharf": 22452, + "wharton": 43320, + "what": 4268, + "what": 768, + "whatcha": 37160, + "whate": 6695, + "whatever": 6743, + "whati": 23500, + "whats": 9263, + "whats": 13084, + "whatsapp": 10119, + "whatsoever": 39928, + "whatson": 35632, + "whatyou": 30508, + "whe": 2009, + "whead": 34583, + "wheat": 20505, + "wheat": 10303, + "wheaton": 46933, + "wheel": 7360, + "wheel": 6744, + "wheelchair": 17713, + "wheeler": 18405, + "wheeling": 34839, + "wheels": 8025, + "whel": 9792, + "whelan": 40715, + "when": 8753, + "when": 827, + "whenever": 10500, + "where": 7052, + "where": 1234, + "whereabouts": 47808, + "whereas": 42234, + "wheres": 46345, + "wherever": 14103, + "whereyou": 46837, + "whether": 5903, + "whew": 39016, + "whey": 34556, + "whi": 4295, + "whi": 33129, + "which": 1448, + "whiche": 48719, + "whichever": 49138, + "whil": 8499, + "while": 1519, + "whilst": 8596, + "whim": 27766, + "whimsical": 42282, + "whip": 14412, + "whipped": 22323, + "whipping": 41567, + "whir": 20873, + "whirl": 30962, + "whirlwind": 47771, + "whis": 6024, + "whiskey": 41381, + "whiskey": 11610, + "whisky": 37567, + "whisky": 12599, + "whisp": 21986, + "whispe": 30356, + "whisper": 27616, + "whisperer": 41368, + "whispering": 42599, + "whispers": 29133, + "whist": 13640, + "whistle": 23972, + "whistle": 19746, + "whistleblower": 40410, + "whistler": 29633, + "whit": 4398, + "whit": 31498, + "whitaker": 35851, + "whitby": 30858, + "white": 4699, + "white": 1579, + "whiteboard": 40839, + "whitec": 24575, + "whitehall": 42827, + "whitehead": 43560, + "whitehouse": 20776, + "whitening": 35540, + "whitepaper": 42713, + "whites": 35886, + "whites": 18835, + "whitesox": 28816, + "whitewater": 49350, + "whitfield": 48404, + "whitley": 40564, + "whitman": 32394, + "whitney": 43021, + "whitney": 18048, + "whitt": 33784, + "whittaker": 47595, + "whl": 25801, + "who": 2969, + "who": 822, + "whoa": 16943, + "whoever": 11137, + "whois": 41884, + "whole": 10360, + "whole": 2954, + "wholefoods": 42840, + "wholesale": 18306, + "wholesome": 35959, + "whom": 38158, + "whom": 12873, + "whoo": 20003, + "whoo": 49290, + "whoop": 22060, + "whoops": 28433, + "whopping": 34384, + "whore": 31690, + "whos": 41460, + "whos": 27130, + "whose": 6933, + "whouse": 45927, + "whs": 26292, + "wht": 32470, + "whufc": 31695, + "whun": 18272, + "why": 11040, + "why": 1182, + "whyte": 42386, + "wi": 820, + "wi": 5585, + "wib": 45303, + "wic": 7834, + "wich": 9759, + "wich": 5238, + "wichita": 22566, + "wick": 6798, + "wick": 6479, + "wicked": 32579, + "wicked": 12825, + "wicker": 38096, + "wicket": 19180, + "wickets": 22110, + "wicklow": 39039, + "wicz": 30121, + "wid": 11886, + "wid": 20886, + "wide": 19341, + "wide": 3184, + "widely": 16195, + "widening": 46598, + "wider": 21263, + "widesp": 20598, + "widespread": 21258, + "widget": 43906, + "wido": 28068, + "widow": 19949, + "widows": 42129, + "width": 23571, + "wie": 21378, + "wie": 9131, + "wielding": 47272, + "wien": 38131, + "wiener": 40567, + "wies": 42788, + "wif": 37572, + "wife": 3607, + "wifey": 35282, + "wifi": 11026, + "wig": 23690, + "wig": 12216, + "wigan": 23130, + "wiggins": 32329, + "wiggle": 47812, + "wight": 41278, + "wight": 15545, + "wigs": 31207, + "wii": 8005, + "wiiu": 40980, + "wiki": 10373, + "wiki": 24265, + "wikileaks": 28731, + "wikipedia": 15176, + "wil": 1352, + "wil": 20581, + "wilbur": 43069, + "wilcox": 43231, + "wild": 2780, + "wild": 3220, + "wildatlantic": 35500, + "wildatlanticway": 35776, + "wildcard": 37360, + "wildcat": 49077, + "wildcat": 25870, + "wildcats": 15909, + "wilde": 23498, + "wilder": 14343, + "wilder": 23499, + "wilderness": 16506, + "wildest": 43028, + "wildfire": 22788, + "wildfires": 29184, + "wildflower": 27628, + "wildflower": 33181, + "wildflowerhour": 31302, + "wildflowers": 29136, + "wildlife": 13298, + "wildlife": 5250, + "wildlifephotography": 32307, + "wildlifewednesday": 48537, + "wildly": 35981, + "wildoz": 40113, + "wiley": 32747, + "wilhelm": 39696, + "wilkes": 39548, + "wilkins": 36986, + "wilkinson": 26797, + "will": 5062, + "will": 751, + "willam": 43276, + "willard": 44920, + "wille": 48739, + "willem": 38044, + "willi": 2256, + "william": 8420, + "william": 4705, + "williams": 38452, + "williams": 4075, + "williamsburg": 30683, + "williamson": 20793, + "willie": 13907, + "willing": 34160, + "willing": 11718, + "willingness": 40573, + "willis": 18491, + "willow": 33887, + "willow": 15665, + "wills": 26913, + "willy": 34502, + "willy": 19599, + "wilmington": 28052, + "wilms": 47879, + "wilshere": 48359, + "wilson": 23629, + "wilson": 5622, + "wilt": 23394, + "wilt": 47357, + "wilton": 46638, + "wiltshire": 28025, + "wim": 8662, + "wim": 27580, + "wimble": 11752, + "wimbledon": 12229, + "win": 831, + "win": 1225, + "winchester": 20647, + "wind": 6812, + "wind": 3630, + "winder": 44454, + "winder": 46245, + "winding": 22390, + "windmill": 34084, + "windo": 3110, + "window": 26675, + "window": 4879, + "windows": 5437, + "winds": 12668, + "winds": 7012, + "windshield": 33002, + "windsor": 44322, + "windsor": 12884, + "windy": 13446, + "wine": 7375, + "wine": 2604, + "winelover": 26357, + "winemaker": 41588, + "wineoclock": 43846, + "wineries": 49349, + "winery": 15500, + "wines": 8263, + "winetasting": 41288, + "winewednesday": 35447, + "wing": 8141, + "wing": 1340, + "winged": 24993, + "winger": 22727, + "winget": 44578, + "wings": 5178, + "wink": 34455, + "wink": 25859, + "winkle": 36430, + "winn": 38104, + "winne": 46273, + "winner": 32961, + "winner": 2520, + "winners": 4320, + "winni": 13018, + "winnie": 29022, + "winning": 42099, + "winning": 2577, + "winnings": 46490, + "winnipeg": 14369, + "winona": 49202, + "wins": 46839, + "wins": 2718, + "winslow": 39658, + "winston": 14848, + "winter": 7340, + "winter": 2541, + "winters": 21587, + "wintry": 39504, + "wip": 10447, + "wipe": 26761, + "wiped": 31822, + "wipes": 33463, + "wir": 16849, + "wir": 44838, + "wire": 7558, + "wire": 7794, + "wired": 18935, + "wireless": 9103, + "wires": 24311, + "wiring": 36434, + "wirral": 34675, + "wis": 3392, + "wis": 20405, + "wiscon": 9857, + "wisconsin": 10265, + "wisdom": 42474, + "wisdom": 5425, + "wise": 19116, + "wise": 5558, + "wisely": 26173, + "wiser": 44859, + "wish": 11328, + "wish": 2412, + "wished": 25883, + "wishes": 6045, + "wishing": 5307, + "wishlist": 31969, + "wit": 584, + "wit": 8531, + "witch": 20139, + "witch": 10083, + "witchcraft": 35065, + "witcher": 33684, + "witches": 21673, + "with": 1435, + "with": 593, + "withdra": 24696, + "withdraw": 31670, + "withdrawal": 25765, + "withdrawn": 46687, + "withdraws": 48637, + "wither": 39655, + "witherspoon": 45409, + "within": 4154, + "withme": 44670, + "without": 32836, + "without": 2193, + "withstand": 42236, + "withthe": 36872, + "withus": 30572, + "withyou": 30351, + "witne": 12096, + "witness": 8793, + "witnessed": 20187, + "witnesses": 22778, + "witnessing": 33618, + "wits": 30938, + "witt": 38194, + "witt": 17168, + "witter": 31597, + "witty": 29970, + "witz": 44186, + "witz": 13265, + "wiv": 48925, + "wives": 14378, + "wiwx": 44461, + "wiz": 7730, + "wiz": 23178, + "wizar": 49121, + "wizard": 30490, + "wizard": 14295, + "wizards": 19140, + "wizkid": 40146, + "wj": 19739, + "wj": 35453, + "wk": 11512, + "wk": 11528, + "wkend": 42336, + "wknd": 20851, + "wks": 25508, + "wku": 43377, + "wl": 13299, + "wl": 9613, + "wm": 20268, + "wm": 15790, + "wn": 1186, + "wn": 757, + "wnba": 32358, + "wned": 8628, + "wns": 12950, + "wnt": 22484, + "wny": 24833, + "wo": 1613, + "wo": 11132, + "woah": 17751, + "wob": 35984, + "woc": 39011, + "wod": 41522, + "woes": 27860, + "wof": 45671, + "woj": 48931, + "wok": 28912, + "woke": 9331, + "woken": 43697, + "woking": 43931, + "wol": 2798, + "wol": 48622, + "wold": 42399, + "wolf": 9453, + "wolf": 5916, + "wolfe": 24989, + "wolff": 34369, + "wolfgang": 34061, + "wolfpack": 30887, + "wolve": 45101, + "wolver": 14334, + "wolverhampton": 34518, + "wolverine": 23353, + "wolverines": 42003, + "wolves": 9372, + "wom": 1087, + "womack": 48980, + "woman": 15716, + "woman": 2308, + "womanc": 35630, + "womancrush": 37721, + "womancrushwednesday": 39714, + "womanin": 30562, + "womaninbiz": 36482, + "womb": 37023, + "women": 3648, + "women": 1507, + "womenin": 13062, + "womeninscience": 41343, + "womeninstem": 29380, + "womenintech": 31470, + "womenof": 48421, + "womens": 12822, + "womens": 14408, + "womensart": 38548, + "womensday": 13956, + "womenshi": 22887, + "womenshistorymonth": 24982, + "womensmarch": 30102, + "won": 1528, + "won": 1749, + "wonder": 2070, + "wonder": 3936, + "wondercon": 46944, + "wondered": 15550, + "wonderful": 2582, + "wonderfully": 23245, + "wondering": 8360, + "wonderland": 13874, + "wonders": 14048, + "wonderwoman": 31000, + "wondo": 38402, + "wondr": 46771, + "wong": 17876, + "wonka": 43463, + "wont": 43174, + "wont": 15952, + "woo": 1867, + "woo": 9322, + "wood": 3269, + "wood": 1704, + "woodbridge": 49074, + "wooden": 48226, + "wooden": 9057, + "woodland": 44314, + "woodland": 17447, + "woodlands": 32430, + "woodley": 40566, + "woodpecker": 32684, + "woods": 6267, + "woodson": 48967, + "woodstock": 29486, + "woodward": 27419, + "woodwork": 47386, + "woodworking": 29267, + "woody": 38627, + "woody": 17144, + "woof": 34234, + "woof": 24028, + "woohoo": 20172, + "wook": 29192, + "wool": 9967, + "wool": 13283, + "woolf": 43728, + "woolly": 47722, + "woon": 33126, + "wooo": 43217, + "woop": 31884, + "woot": 22466, + "wor": 641, + "worcester": 22172, + "worcester": 19580, + "worcestershire": 38440, + "worcestershirehour": 43644, + "word": 8272, + "word": 2653, + "wordof": 33500, + "wordoftheday": 43594, + "wordpress": 15193, + "words": 31007, + "words": 2709, + "wore": 8953, + "work": 1636, + "work": 951, + "workday": 29735, + "worked": 5410, + "worker": 8098, + "workers": 4795, + "workflow": 28502, + "workforce": 14672, + "workin": 31825, + "workin": 26323, + "working": 20806, + "working": 1699, + "workinprogress": 46086, + "workout": 6773, + "workouts": 22779, + "workplace": 11959, + "workplaces": 47383, + "works": 2322, + "workshop": 3832, + "workshops": 12262, + "workspace": 34470, + "worl": 5221, + "world": 2334, + "world": 1002, + "worlda": 46627, + "worldbank": 36759, + "worldbookday": 31191, + "worldcup": 42525, + "worldcup": 8650, + "worlden": 44668, + "worldenviron": 47115, + "worldenvironmentday": 47522, + "worldly": 36268, + "worldo": 41698, + "worldof": 22636, + "worldre": 33951, + "worlds": 7691, + "worldseries": 26695, + "worldtour": 23202, + "worldwater": 41176, + "worldwaterday": 44520, + "worldwide": 6214, + "worm": 33709, + "worm": 10945, + "worms": 20231, + "worn": 9037, + "worried": 11911, + "worries": 17684, + "worry": 7534, + "worrying": 24058, + "worse": 8236, + "worsen": 46344, + "worshi": 31840, + "worship": 46399, + "worship": 9023, + "worst": 5719, + "wort": 30209, + "worth": 10671, + "worth": 2450, + "worthing": 39929, + "worthit": 40830, + "worthless": 44736, + "worths": 44633, + "worthwhile": 36295, + "worthy": 8881, + "worx": 44973, + "wot": 24863, + "wou": 5279, + "would": 39873, + "would": 1311, + "wouldn": 5878, + "wouldnt": 41595, + "wound": 19231, + "wounded": 14859, + "wounds": 21290, + "woven": 19830, + "wow": 22191, + "wow": 2781, + "woz": 44558, + "wozni": 47782, + "wp": 15378, + "wp": 13302, + "wpg": 35048, + "wps": 33386, + "wq": 45195, + "wr": 1189, + "wr": 8028, + "wra": 3852, + "wra": 46004, + "wral": 49050, + "wrangler": 30923, + "wrap": 7094, + "wrapped": 9875, + "wrapping": 15223, + "wraps": 18236, + "wrath": 29783, + "wray": 48943, + "wrc": 16004, + "wre": 3168, + "wreath": 23091, + "wrec": 20879, + "wreck": 28775, + "wreck": 15017, + "wrecked": 32695, + "wreckem": 45676, + "wrecking": 36956, + "wrecks": 45545, + "wren": 20191, + "wren": 31970, + "wrench": 30980, + "wrest": 4177, + "wrestle": 17097, + "wrestle": 28086, + "wrestlemania": 18849, + "wrestler": 19790, + "wrestlers": 25902, + "wrestling": 31292, + "wrestling": 5904, + "wrexham": 34479, + "wri": 7667, + "wri": 42007, + "wright": 28616, + "wright": 6991, + "wrights": 43711, + "wrigley": 33538, + "wrink": 22201, + "wrinkle": 46642, + "wrinkles": 35525, + "wrist": 19243, + "wrist": 16139, + "wristband": 36890, + "wristbands": 44864, + "writ": 2902, + "write": 28874, + "write": 4946, + "writer": 27886, + "writer": 4422, + "writers": 18742, + "writers": 7307, + "writerslife": 25007, + "writes": 8023, + "writing": 16053, + "writing": 2979, + "writingcommunity": 39178, + "writings": 36259, + "written": 5231, + "wro": 5447, + "wrong": 18381, + "wrong": 3669, + "wrongly": 45642, + "wrote": 5796, + "wrought": 48125, + "wrs": 45280, + "ws": 6300, + "ws": 799, + "wsb": 30681, + "wsbtv": 38394, + "wsj": 19764, + "wski": 12548, + "wsl": 43706, + "wsoc": 40253, + "wson": 33954, + "wsop": 41231, + "wsu": 44674, + "wsu": 32913, + "wsw": 43285, + "wt": 15873, + "wt": 12255, + "wta": 25984, + "wtc": 39718, + "wtf": 6891, + "wth": 23021, + "wthr": 45269, + "wti": 47345, + "wto": 36406, + "wts": 32159, + "wu": 9710, + "wu": 9837, + "wud": 43870, + "wul": 35154, + "wunder": 36661, + "wur": 24040, + "wurst": 44409, + "wusa": 40021, + "wut": 28590, + "wv": 18920, + "wv": 14743, + "wvu": 44878, + "wvu": 25879, + "ww": 3181, + "ww": 4491, + "wwc": 26505, + "wwdc": 47441, + "wwe": 12112, + "wwe": 5290, + "wwen": 23308, + "wwenetwork": 37228, + "wwenxt": 39898, + "wwer": 32038, + "wwf": 23332, + "wwfc": 42681, + "wwg": 35322, + "wwi": 20194, + "wwii": 10261, + "www": 26074, + "www": 9667, + "wwwbigbaldhead": 30761, + "wwww": 34224, + "wwww": 25200, + "wwwww": 48268, + "wwx": 47431, + "wx": 18192, + "wx": 3561, + "wy": 4665, + "wy": 7625, + "wyatt": 21660, + "wyd": 33113, + "wye": 48436, + "wye": 43751, + "wylie": 49330, + "wyn": 11802, + "wyn": 17504, + "wynn": 36117, + "wynne": 35951, + "wynonna": 41456, + "wynonnaearp": 43755, + "wyoming": 18693, + "x": 87, + "x": 343, + "xa": 24831, + "xan": 45530, + "xander": 45601, + "xavi": 36342, + "xavier": 41044, + "xavier": 18567, + "xb": 33678, + "xbox": 18063, + "xbox": 7748, + "xboxone": 27410, + "xc": 12515, + "xchange": 49132, + "xd": 6380, + "xe": 42886, + "xe": 19183, + "xen": 15568, + "xer": 49005, + "xf": 35274, + "xfactor": 25211, + "xfinity": 35107, + "xford": 34732, + "xh": 45771, + "xham": 25284, + "xi": 2467, + "xi": 7376, + "xia": 19854, + "xia": 20724, + "xian": 42570, + "xiao": 49318, + "xiaomi": 27477, + "xico": 38469, + "xide": 17398, + "xie": 40122, + "xie": 15976, + "xii": 36525, + "xiii": 28199, + "xim": 11217, + "xin": 27053, + "xin": 41517, + "xing": 14383, + "xion": 24164, + "xis": 35793, + "xit": 5316, + "xiumin": 36563, + "xiv": 16125, + "xj": 42453, + "xl": 36529, + "xl": 8833, + "xley": 38223, + "xm": 18626, + "xma": 48805, + "xmas": 48848, + "xmas": 6425, + "xmen": 28708, + "xn": 25388, + "xo": 26936, + "xo": 9000, + "xon": 29186, + "xon": 8482, + "xox": 11531, + "xox": 34050, + "xoxo": 13313, + "xp": 15651, + "xper": 32200, + "xperia": 37615, + "xpo": 44377, + "xpress": 31809, + "xq": 40606, + "xr": 26276, + "xrp": 26965, + "xs": 16397, + "xt": 1052, + "xtina": 45520, + "xton": 32666, + "xton": 10597, + "xtra": 26969, + "xtre": 27025, + "xtreme": 33483, + "xu": 42063, + "xu": 37198, + "xv": 17768, + "xvi": 44031, + "xx": 5675, + "xx": 3553, + "xxl": 29777, + "xxx": 33923, + "xxx": 8352, + "xxxx": 32035, + "xxxx": 22819, + "xxxxx": 44195, + "xy": 20023, + "xy": 11443, + "y": 88, + "y": 344, + "ya": 5018, + "ya": 1430, + "yaa": 48847, + "yaa": 34498, + "yaan": 34680, + "yab": 27737, + "yach": 9039, + "yacht": 43806, + "yacht": 12859, + "yachts": 29260, + "yad": 13276, + "yad": 40047, + "yadav": 26650, + "yaf": 38019, + "yag": 35081, + "yah": 16170, + "yah": 12381, + "yaho": 37929, + "yahoo": 38152, + "yahoo": 16846, + "yak": 11014, + "yak": 29074, + "yaki": 44677, + "yaku": 29572, + "yakuza": 42628, + "yal": 16198, + "yal": 13418, + "yale": 39926, + "yale": 17157, + "yall": 9210, + "yam": 6666, + "yam": 19318, + "yama": 23512, + "yamaha": 18854, + "yan": 3949, + "yan": 4788, + "yana": 18698, + "yand": 38609, + "yang": 23818, + "yang": 12605, + "yani": 26439, + "yankee": 21554, + "yankees": 11889, + "yann": 40246, + "yann": 38657, + "yao": 45231, + "yap": 48700, + "yap": 34468, + "yar": 6786, + "yar": 23071, + "yard": 20234, + "yard": 4313, + "yards": 7550, + "yarmouth": 45941, + "yarn": 19702, + "yarra": 46824, + "yas": 8168, + "yas": 20570, + "yash": 30216, + "yash": 37836, + "yasi": 37700, + "yasss": 23873, + "yat": 29443, + "yat": 34965, + "yates": 27677, + "yatra": 38932, + "yav": 41275, + "yaw": 31989, + "yawn": 48643, + "yay": 20614, + "yay": 6712, + "yaya": 37608, + "yaz": 19348, + "yaz": 42252, + "yb": 41785, + "yb": 27615, + "yc": 11931, + "ycle": 38089, + "yd": 29896, + "yd": 9534, + "yday": 15899, + "yds": 24819, + "ye": 693, + "ye": 4582, + "yea": 13687, + "yeah": 29405, + "yeah": 3908, + "year": 5163, + "year": 935, + "yearbook": 21636, + "yearling": 48392, + "yearly": 24541, + "yearof": 31944, + "yearofthe": 47899, + "years": 30864, + "years": 1151, + "yearsof": 14932, + "yearswith": 45249, + "yeast": 25819, + "yeats": 44903, + "yed": 28137, + "yed": 3301, + "yee": 18114, + "yee": 23108, + "yeezy": 24901, + "yeg": 16854, + "yeg": 11976, + "yegfood": 48711, + "yeh": 21331, + "yel": 3323, + "yel": 48164, + "yell": 30824, + "yelled": 39199, + "yelling": 26581, + "yellow": 12059, + "yellow": 4481, + "yellowstone": 29241, + "yelp": 31674, + "yemen": 29276, + "yemen": 12513, + "yemeni": 44656, + "yemi": 42267, + "yen": 29602, + "yen": 17960, + "yeo": 32292, + "yeo": 43830, + "yeol": 15808, + "yeon": 16602, + "yep": 10964, + "yer": 15491, + "yer": 2371, + "yers": 3722, + "yes": 21620, + "yes": 1958, + "yess": 42778, + "yess": 40189, + "yesss": 36210, + "yessss": 45620, + "yester": 1905, + "yesterday": 1926, + "yesterdays": 36238, + "yesung": 38527, + "yet": 2296, + "yeti": 34228, + "yev": 39855, + "yew": 34660, + "yey": 45447, + "yg": 16396, + "ygk": 44758, + "ygo": 46166, + "yh": 41978, + "yi": 5826, + "yi": 14762, + "yield": 16825, + "yields": 24856, + "yikes": 25094, + "yin": 26476, + "yin": 23543, + "ying": 42933, + "ying": 910, + "yixing": 32120, + "yk": 30965, + "yl": 2656, + "yl": 4045, + "ylan": 41875, + "ylde": 42850, + "yle": 32305, + "yle": 10770, + "ylene": 34239, + "yler": 48081, + "yles": 42860, + "ylon": 22375, + "ylor": 48468, + "ym": 1786, + "ym": 19587, + "yman": 29077, + "ymc": 47101, + "ymca": 22369, + "yment": 8199, + "ymes": 39968, + "ymi": 5271, + "ymm": 37133, + "ymoun": 41426, + "ymouth": 36429, + "yn": 2823, + "yn": 4100, + "yne": 18238, + "ynes": 18020, + "ynn": 10499, + "ynna": 48292, + "ynwa": 27372, + "yo": 586, + "yo": 3497, + "yoda": 31922, + "yof": 5966, + "yofficial": 21818, + "yofthe": 43983, + "yog": 34985, + "yog": 36539, + "yoga": 25872, + "yoga": 5523, + "yogh": 32626, + "yoghurt": 33491, + "yogi": 22766, + "yogur": 16137, + "yogurt": 16819, + "yoh": 48880, + "yoke": 41969, + "yoko": 25929, + "yoko": 32256, + "yokohama": 42409, + "yol": 19387, + "yol": 35218, + "yolanda": 43845, + "yolo": 20905, + "yom": 34718, + "yom": 44527, + "yon": 10147, + "yon": 7604, + "yong": 27960, + "yong": 20887, + "yonge": 48592, + "yoo": 25842, + "yoo": 20775, + "yoon": 30863, + "yoon": 22113, + "yoona": 32736, + "yoongi": 24037, + "yor": 2028, + "yor": 21132, + "york": 5318, + "york": 2705, + "yorker": 23865, + "yorkers": 41041, + "yorks": 39093, + "yorkshi": 43367, + "yorkshire": 27007, + "yorkshire": 8633, + "yoruba": 46083, + "yos": 35607, + "yosemite": 25893, + "yoshi": 22920, + "yoshi": 25354, + "yot": 22875, + "yotes": 46157, + "yotpo": 26113, + "you": 1562, + "you": 592, + "youare": 33879, + "youcan": 32498, + "youknow": 47919, + "youknow": 41088, + "youn": 1596, + "young": 6939, + "young": 1888, + "younger": 10414, + "youngest": 12316, + "youngjae": 46426, + "youngster": 35881, + "youngsters": 28098, + "younow": 33831, + "your": 2130, + "your": 695, + "youre": 28344, + "youre": 19695, + "yourown": 28583, + "yours": 3834, + "yourself": 3053, + "yourselves": 19747, + "youth": 10743, + "youth": 3281, + "youthful": 37480, + "youths": 23614, + "youts": 22737, + "youtu": 13868, + "youtube": 31258, + "youtube": 3895, + "youtuber": 24720, + "youtubers": 36822, + "youu": 35055, + "youuu": 35324, + "youuuu": 47123, + "yoy": 41865, + "yp": 38370, + "yp": 34734, + "ypg": 37386, + "yql": 46122, + "yqr": 36881, + "yr": 18395, + "yr": 4333, + "yrs": 4822, + "ys": 1971, + "ys": 961, + "yser": 33121, + "ysis": 4843, + "ysl": 45681, + "ysm": 23842, + "yst": 40528, + "yt": 36777, + "yt": 14779, + "ytd": 47524, + "yte": 48172, + "yu": 3371, + "yu": 8887, + "yuan": 26236, + "yuck": 48282, + "yugo": 48231, + "yuh": 42547, + "yui": 47932, + "yuk": 17037, + "yuk": 24063, + "yuki": 34010, + "yukon": 27094, + "yul": 39832, + "yum": 6869, + "yum": 7259, + "yuma": 47566, + "yummy": 7687, + "yun": 14976, + "yun": 18288, + "yung": 44545, + "yung": 17676, + "yunho": 39748, + "yup": 13231, + "yur": 42533, + "yuri": 23823, + "yusuf": 33222, + "yuv": 36784, + "yves": 33698, + "yvon": 23327, + "yvonne": 32583, + "yvr": 29058, + "yw": 33741, + "yx": 35624, + "yxe": 34240, + "yy": 3433, + "yy": 8321, + "yya": 37444, + "yyc": 27542, + "yyc": 11741, + "yyj": 26203, + "yyy": 11514, + "yyyy": 38749, + "yyyy": 16955, + "yyyyy": 26089, + "yyyyyy": 47055, + "yz": 37579, + "yz": 46451, + "yü": 48232, + "z": 89, + "z": 345, + "za": 3710, + "za": 2186, + "zab": 22982, + "zable": 37002, + "zac": 25501, + "zac": 19159, + "zach": 13401, + "zach": 11815, + "zachary": 32401, + "zack": 30567, + "zack": 19120, + "zad": 47314, + "zad": 27838, + "zada": 34889, + "zaf": 21837, + "zafar": 46668, + "zag": 26091, + "zag": 29346, + "zagre": 34107, + "zagreb": 35355, + "zah": 23258, + "zah": 43297, + "zaha": 44408, + "zai": 44329, + "zai": 27065, + "zain": 34400, + "zain": 45366, + "zak": 13050, + "zak": 20738, + "zaki": 48091, + "zal": 20552, + "zal": 33298, + "zam": 7218, + "zam": 41578, + "zambia": 21671, + "zan": 7284, + "zan": 17835, + "zana": 39643, + "zand": 37712, + "zane": 34786, + "zani": 45373, + "zania": 15059, + "zano": 27637, + "zanzi": 47835, + "zap": 24134, + "zapp": 33504, + "zappa": 46592, + "zar": 5458, + "zar": 16392, + "zara": 24454, + "zardari": 20174, + "zas": 48261, + "zation": 3683, + "zawa": 49281, + "zay": 7102, + "zayed": 36726, + "zayn": 22292, + "zayn": 10308, + "zaynmalik": 25278, + "zazzle": 47857, + "ze": 2254, + "ze": 1298, + "zeal": 44951, + "zealand": 7618, + "zeb": 46518, + "zebra": 47394, + "zebra": 22548, + "zed": 21047, + "zed": 1993, + "zedd": 45608, + "zee": 25468, + "zee": 14080, + "zeiss": 47460, + "zeit": 37898, + "zeit": 37906, + "zek": 40829, + "zeke": 47065, + "zel": 10389, + "zel": 12027, + "zelda": 17138, + "zell": 39526, + "zen": 8518, + "zen": 3928, + "zend": 33478, + "zendaya": 35956, + "zenith": 44740, + "zens": 15298, + "zeph": 40726, + "zepp": 22977, + "zeppelin": 25408, + "zer": 6118, + "zer": 3716, + "zero": 14867, + "zero": 5848, + "zers": 9547, + "zes": 4073, + "zest": 37709, + "zet": 34098, + "zeta": 30954, + "zetta": 45993, + "zeus": 32800, + "zey": 46647, + "zh": 33389, + "zh": 41621, + "zhang": 21127, + "zhen": 37374, + "zhen": 33236, + "zhou": 17384, + "zhu": 42049, + "zi": 2651, + "zi": 5819, + "zia": 13764, + "zid": 30235, + "zidane": 34643, + "zie": 29316, + "zie": 8956, + "zieg": 40157, + "ziegler": 46812, + "ziel": 32151, + "zier": 15399, + "zies": 38001, + "ziest": 28159, + "zig": 15950, + "zig": 21345, + "ziggy": 39274, + "zik": 30125, + "zika": 28783, + "zil": 25039, + "zil": 33190, + "zilla": 17879, + "zim": 8112, + "zim": 22577, + "zimbab": 12373, + "zimbabwe": 45668, + "zimbabwe": 13583, + "zimmer": 27452, + "zimmer": 35211, + "zimmerman": 38231, + "zin": 14085, + "zin": 21278, + "zinc": 27458, + "zind": 26206, + "zindabad": 42208, + "zine": 16100, + "zing": 25062, + "zing": 3152, + "zinger": 42027, + "zio": 13906, + "zion": 31763, + "zion": 20963, + "zione": 36161, + "zionist": 33078, + "zip": 26479, + "zip": 16083, + "zipper": 33670, + "zir": 31892, + "zl": 39168, + "zlat": 32489, + "zlatan": 37877, + "zm": 43691, + "zman": 24248, + "zn": 18004, + "zo": 4397, + "zo": 5056, + "zodi": 22660, + "zodiac": 27753, + "zoe": 43114, + "zoe": 16662, + "zoey": 39871, + "zog": 40680, + "zol": 25939, + "zola": 46105, + "zom": 6623, + "zombi": 29452, + "zombie": 11819, + "zombies": 46702, + "zombies": 16517, + "zon": 15109, + "zon": 14618, + "zona": 42134, + "zone": 37197, + "zone": 4442, + "zones": 17247, + "zoning": 36790, + "zoo": 8182, + "zoo": 7147, + "zoom": 32671, + "zoom": 13909, + "zor": 17605, + "zou": 38072, + "zr": 39275, + "zs": 35248, + "zshq": 41442, + "zt": 42629, + "zu": 4091, + "zu": 14184, + "zucchini": 29873, + "zucker": 26890, + "zuckerberg": 30066, + "zul": 31146, + "zulu": 32821, + "zum": 35094, + "zuma": 23326, + "zumba": 32976, + "zun": 42440, + "zur": 17128, + "zurich": 21288, + "zw": 42188, + "zx": 31604, + "zy": 6615, + "zy": 2303, + "zyk": 39112, + "zyme": 36472, + "zyn": 45287, + "zz": 1544, + "zz": 4943, + "zza": 14642, + "zzi": 13974, + "zzie": 18635, + "zzle": 7873, + "zzled": 39075, + "zzo": 14036, + "zzy": 21275, + "zzy": 8353, + "zzz": 20055, + "zzzz": 35742, + "zzzz": 43103, + "{": 90, + "{": 346, + "{}": 39025, + "|": 91, + "|#": 31183, + "|": 347, + "|@": 41677, + "||": 7566, + "}": 92, + "}": 348, + "~": 93, + "~!": 31181, + "~\"": 48442, + "~": 349, + "~>": 43291, + "~@": 44247, + "~~": 11461, + "~~": 16671, + "~~~": 32472, + "~~~~": 28295, + "¡": 94, + "¡": 350, + "¡ï¸ı": 15113, + "¡ï¸ı": 4174, + "¡ľ": 43991, + "¢": 95, + "¢": 351, + "£": 96, + "£": 352, + "£ï¸ı": 18446, + "¤": 97, + "¤": 353, + "¥": 98, + "¥": 354, + "¦": 99, + "¦": 355, + "¦Ī": 47615, + "§": 100, + "§": 356, + "¨": 101, + "¨": 357, + "©": 102, + "©": 358, + "ª": 103, + "ª": 359, + "«": 104, + "«": 360, + "¬": 105, + "¬": 361, + "¬ë": 31736, + "®": 106, + "®": 362, + "¯": 107, + "¯": 363, + "°": 108, + "°:": 21787, + "°": 364, + "°ï¸ı": 34777, + "±": 109, + "±": 365, + "±ï¸ı": 41020, + "²": 110, + "²": 366, + "³": 111, + "³": 367, + "³ï¸ı": 22195, + "³ï¸ı": 24706, + "´": 112, + "´": 368, + "µ": 113, + "µ": 369, + "µï¸ı": 27605, + "¶": 114, + "¶": 370, + "·": 115, + "·": 371, + "¸": 116, + "¸": 372, + "¸ë": 19693, + "¹": 117, + "¹": 373, + "º": 118, + "º": 374, + "»": 119, + "»": 375, + "¼": 120, + "¼": 376, + "½": 121, + "½": 377, + "½ï¸ı": 31333, + "¾": 122, + "¾": 378, + "¿": 123, + "¿": 379, + "À": 124, + "À": 380, + "Á": 125, + "Á": 381, + "Â": 126, + "Â": 382, + "¡": 26868, + "¡": 10830, + "¡¡": 45505, + "¢": 41359, + "£": 31117, + "£": 1950, + "Â¥": 20199, + "¨": 19957, + "¨¨": 23089, + "¨¨¨¨": 41223, + "©": 31148, + "©": 5811, + "«": 14434, + "®": 30857, + "®": 8436, + "¯": 38682, + "¯": 43593, + "¯\\": 44096, + "¯\\_(": 45115, + "°": 21305, + "°": 6858, + "²": 41175, + "´": 30560, + "´": 12559, + "·": 14844, + "º": 28059, + "»": 31642, + "»": 7599, + "½": 33613, + "¿": 44559, + "¿": 17133, + "ÂŃ": 22618, + "Ã": 127, + "Ã": 383, + "á": 7261, + "á": 22229, + "án": 38340, + "án": 21385, + "â": 26170, + "ã": 19339, + "ão": 21141, + "ä": 10896, + "ä": 47276, + "än": 42787, + "Ã¥": 23176, + "æ": 42495, + "ç": 10067, + "ça": 22711, + "è": 12138, + "è": 37761, + "ère": 30272, + "ès": 41210, + "é": 3459, + "é": 4166, + "éal": 45251, + "ée": 13489, + "és": 20507, + "ê": 27515, + "ë": 29526, + "ë": 40520, + "î": 48704, + "ï": 35689, + "ñ": 6445, + "ña": 17753, + "ño": 16574, + "ños": 40104, + "ó": 8891, + "ó": 27733, + "ón": 13926, + "ô": 26815, + "ö": 7255, + "ö": 37423, + "ör": 31762, + "ø": 17483, + "ø": 45598, + "ú": 17963, + "ú": 36019, + "ü": 6522, + "ü": 47177, + "ür": 26132, + "ÃĹ": 16165, + "Ãł": 36149, + "Ãł": 21259, + "ÃŃ": 8366, + "ÃŃ": 23928, + "ÃŃa": 16609, + "ÃŃn": 33623, + "Ä": 128, + "Ä": 384, + "ı": 18562, + "ı": 41901, + "Äģ": 23134, + "Äĩ": 31719, + "Äį": 45414, + "ÄŁ": 26540, + "Å": 129, + "Å": 385, + "Å¡": 35621, + "ÅĤ": 40419, + "Åį": 41267, + "ÅŁ": 21254, + "ÅŁ": 40706, + "Æ": 130, + "Æ": 386, + "Ç": 131, + "Ç": 387, + "È": 132, + "È": 388, + "É": 133, + "É": 389, + "Ê": 134, + "Ê": 390, + "Ë": 135, + "Ë": 391, + "Ì": 136, + "Ì": 392, + "Ìĩ": 16384, + "Í": 137, + "Í": 393, + "Î": 138, + "Î": 394, + "Ï": 139, + "Ï": 395, + "Ïī": 38065, + "Ð": 140, + "Ð": 396, + "а": 16912, + "а": 27080, + "аÐ": 31090, + "в": 39813, + "е": 22176, + "и": 16701, + "иÐ": 29503, + "к": 27152, + "л": 47611, + "м": 38018, + "н": 22705, + "о": 13506, + "о": 29386, + "оÐ": 20978, + "од": 38416, + "оÑĤ": 28599, + "п": 26302, + "пÑĢи": 46321, + "пÑĢиÑĢода": 48150, + "Ñ": 141, + "Ñ": 397, + "ÑĢ": 16370, + "ÑĢи": 41092, + "ÑĢод": 47039, + "ÑĢода": 47929, + "Ñģ": 23669, + "ÑĤ": 17875, + "Ñĥ": 39729, + "ÑĦ": 27993, + "ÑĦоÑĤ": 35155, + "ÑĦоÑĤо": 38981, + "Ñĭ": 45001, + "Ò": 142, + "Ò": 398, + "Ó": 143, + "Ó": 399, + "Ô": 144, + "Ô": 400, + "Õ": 145, + "Õ": 401, + "Ö": 146, + "Ö": 402, + "×": 147, + "×": 403, + "Ø": 148, + "Ø": 404, + "ا": 6042, + "ا": 22625, + "اØ": 13189, + "ار": 40137, + "اÙ": 8453, + "اÙĦ": 12973, + "اÙħ": 47626, + "اÙĨ": 42773, + "اÙĨ": 33200, + "ب": 16378, + "ب": 35330, + "Ø©": 20915, + "ت": 18197, + "ت": 44333, + "ج": 26375, + "Ø®": 41495, + "د": 19872, + "د": 35566, + "ر": 10948, + "ر": 24933, + "رÙĬ": 43273, + "ز": 36169, + "س": 17856, + "Ø´": 28770, + "ص": 27271, + "Ø·": 32050, + "ع": 18843, + "غ": 48510, + "ØŃ": 25722, + "Ù": 149, + "Ù": 405, + "Ùģ": 24112, + "ÙĤ": 27585, + "Ùĥ": 33499, + "ÙĦ": 14251, + "ÙĦ": 37899, + "Ùħ": 12986, + "Ùħ": 29945, + "ÙĨ": 16655, + "ÙĨ": 25386, + "Ùĩ": 34274, + "Ùĩ": 31343, + "ÙĪ": 12203, + "ÙĪ": 38310, + "ÙĪر": 48242, + "ÙĬ": 12046, + "ÙĬ": 23853, + "Ú": 150, + "Ú": 406, + "Ú©": 26475, + "Û": 151, + "Û": 407, + "Ûģ": 40480, + "ÛĮ": 21452, + "ÛĮ": 32703, + "Ü": 152, + "Ü": 408, + "Ý": 153, + "Ý": 409, + "Þ": 154, + "Þ": 410, + "ß": 155, + "ß": 411, + "à": 156, + "à": 412, + "à¤": 3124, + "त": 27263, + "द": 29552, + "न": 26090, + "प": 44149, + "ब": 43599, + "म": 48254, + "म": 26774, + "य": 37299, + "र": 39136, + "र": 19052, + "ल": 30881, + "व": 39545, + "श": 43181, + "स": 28505, + "ह": 29446, + "ा": 37973, + "ा": 13343, + "ि": 26721, + "à¤Ĥ": 30833, + "à¤ķ": 22067, + "à¤Ĺ": 42598, + "à¤ľ": 39561, + "à¥": 7410, + "à¥Ģ": 45791, + "à¥Ģ": 25751, + "à¥ģ": 39653, + "à¥ĩ": 48612, + "à¥ĩ": 25130, + "à¥ĭ": 34452, + "à¥į": 19389, + "à¦": 11322, + "া": 41532, + "à§": 26339, + "à¨": 15741, + "à©": 32086, + "àª": 22990, + "à«": 48347, + "à¬": 32791, + "à®": 6022, + "த": 34691, + "ன": 43394, + "ப": 47388, + "à®®": 35463, + "à®°": 43270, + "ல": 47705, + "ா": 32831, + "ி": 27126, + "à®ķ": 36168, + "à®Ł": 45263, + "à¯": 11259, + "à¯ģ": 33115, + "à¯į": 16631, + "à°": 12100, + "à±": 23550, + "à±į": 46098, + "à²": 9992, + "ಿ": 47797, + "à³": 20745, + "à³į": 36148, + "à´": 15418, + "àµ": 27392, + "àµį": 45266, + "à¶": 29881, + "à·": 30766, + "à¸": 1777, + "ม": 26137, + "ม": 29570, + "ย": 27241, + "ย": 33091, + "ร": 32225, + "ร": 27331, + "ล": 34696, + "ล": 32746, + "ว": 26990, + "ว": 30245, + "ส": 37883, + "ส": 35737, + "ห": 33064, + "ะ": 43920, + "ะ": 49234, + "ั": 14978, + "า": 11529, + "า": 38476, + "าà¸": 12330, + "ิ": 17092, + "ี": 22421, + "ี": 20278, + "ีà¹Ī": 31511, + "ื": 47991, + "ุ": 30524, + "ู": 35273, + "à¸ģ": 30767, + "à¸ģà¸": 31474, + "à¸Ħ": 31757, + "à¸Ħà¸": 39628, + "à¸ĩ": 24603, + "à¸ĩ": 33382, + "à¸Ī": 47608, + "à¸Ĭ": 46324, + "à¸Ķ": 31107, + "à¸Ķ": 38825, + "à¸ķ": 40273, + "à¸ķ": 41108, + "à¸Ĺ": 36171, + "à¸Ļ": 17474, + "à¸Ļ": 17639, + "à¸Ļà¸": 23121, + "à¸ļ": 33859, + "à¸ļ": 39616, + "à¸ŀ": 48171, + "à¸Ń": 13398, + "à¸Ń": 32818, + "à¸Ńà¸": 14649, + "à¸Ńà¸ĩ": 46622, + "à¹": 4484, + "à¹Ģ": 13729, + "à¹Ģà¸": 14076, + "à¹ģà¸": 23916, + "à¹Ĥ": 33118, + "à¹ĥ": 40962, + "à¹Ħà¸": 31718, + "à¹ĩ": 38699, + "à¹Ī": 11722, + "à¹ī": 13123, + "à¹Į": 28353, + "à¼": 46186, + "à½": 39219, + "á": 157, + "á": 413, + "á´": 19036, + "áµ": 17330, + "áĢ": 45932, + "áĥ": 24829, + "áĥ¦": 32193, + "â": 158, + "â": 414, + "â¤": 25087, + "⤵ï¸ı": 36026, + "â¬": 7930, + "â¬ħï¸ı": 42111, + "â¬Ĩ": 27718, + "â¬Ĩï¸ı": 32798, + "â¬ĩ": 10917, + "â¬ĩ": 39370, + "â¬ĩï¸ı": 25621, + "â¬ĩï¸ı": 13984, + "â¬ĩï¸ıâ¬ĩï¸ı": 40159, + "âĢ": 728, + "âĢ¢": 9485, + "âĢ¢": 2701, + "âĢ¢âĢ¢": 15006, + "âĢ¢âĢ¢": 47575, + "âĢ¢âĢ¢âĢ¢âĢ¢": 27502, + "âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢": 48630, + "âĢ¦": 7095, + "âĢ¦\"": 20215, + "âĢ¦..": 47779, + "âĢ¦.": 18615, + "âĢ¦/": 29842, + "âĢ¦": 959, + "âĢ¦âĢ¦": 40066, + "âĢ²": 32633, + "âĢ³": 25061, + "âĢ¼": 6578, + "âĢ¼ï¸ı": 15622, + "âĢ¼ï¸ı": 8310, + "âĢ¼ï¸ıâĢ¼ï¸ı": 33218, + "âĢĭ": 17086, + "âĢĭ": 9844, + "âĢį": 4244, + "âĢįâĻ": 5177, + "âĢįâĻĢï¸ı": 18897, + "âĢįâĻĢï¸ı": 9605, + "âĢįâĻĤ": 8832, + "âĢįâĻĤï¸ı": 21779, + "âĢįâĻĤï¸ı": 10613, + "âĢİ": 31001, + "âĢIJ": 34512, + "âĢĵ": 21070, + "âĢĵ": 1224, + "âĢĶ": 6718, + "âĢĶ": 2005, + "âĢĶ>": 26341, + "âĢĶ@": 28470, + "âĢĶâĢĶ": 10037, + "âĢĶâĢĶ": 44800, + "âĢĶâĢĶâĢĶâĢĶ": 17797, + "âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ": 34432, + "âĢķ": 14236, + "âģ": 1667, + "âģ£": 31089, + "âģ£": 16845, + "âģ¦": 2773, + "âģ¦": 34855, + "âģ¦@": 2859, + "âģ¦âģ¦@": 27783, + "âģ©": 20097, + "âģ©,": 48749, + "âģ©.": 35777, + "âģ©": 2918, + "âģīï¸ı": 46534, + "âģł": 23881, + "âģł": 13503, + "âģłâģł": 33488, + "âĤ": 5227, + "âĤ¬": 34919, + "âĤ¬": 6309, + "âĤ¹": 21777, + "âĥ": 2805, + "âĥ£": 11250, + "âĥ£": 3076, + "âĥ£@": 48291, + "âĦ": 8604, + "âĦ¢": 29438, + "âĦ¢": 11675, + "âĦ¹": 45462, + "âĨ": 6059, + "âĨĴ": 7481, + "âĨĵ": 41603, + "âĩ": 27228, + "âĪ": 17788, + "âī": 22684, + "âīĪ": 45451, + "âĮ": 17848, + "âĮļ": 31301, + "âĮļï¸ı": 35931, + "âı": 7960, + "âı©": 40847, + "âı°": 12714, + "âı±": 33149, + "âı³": 47617, + "âĵ": 27400, + "âĶ": 13389, + "âĶĢ": 45139, + "âĶģ": 42022, + "âķ": 17027, + "âķIJ": 48039, + "âĸ": 4168, + "âĸª": 21203, + "âĸª": 36628, + "âĸªï¸ı": 24974, + "âĸ«": 39478, + "âĸ¬": 33798, + "âĸ¬âĸ¬": 36975, + "âĸ¶": 12509, + "âĸ¶": 21126, + "âĸ¶ï¸ı": 14442, + "âĸº": 46061, + "âĸº": 12086, + "âĸ½": 45634, + "âĸł": 36791, + "âĹ": 9323, + "âĹĨ": 48961, + "âĹı": 26999, + "âĺ": 1741, + "âĺ®": 45851, + "âĺ¹": 28811, + "âĺ¹ï¸ı": 39605, + "âĺº": 5010, + "âĺº": 8703, + "âĺºâĺº": 46051, + "âĺºï¸ı": 11506, + "âĺºï¸ı": 7779, + "âĺºï¸ıâĺºï¸ı": 41315, + "âĺ¼": 38877, + "âĺĢ": 32146, + "âĺĢ": 22242, + "âĺĢï¸ı": 12817, + "âĺĢï¸ı": 8219, + "âĺĢï¸ıâĺĢï¸ı": 44550, + "âĺģ": 25195, + "âĺģï¸ı": 35197, + "âĺĥ": 38972, + "âĺħ": 9339, + "âĺħ": 10643, + "âĺħâĺħ": 12681, + "âĺħâĺħ": 36644, + "âĺħâĺħâĺħâĺħ": 34431, + "âĺħâĺħâĺħâĺħ": 44034, + "âĺħâĺħâĺħâĺħâĺħ": 45984, + "âĺĨ": 23941, + "âĺĨ": 13439, + "âĺİ": 24045, + "âĺİ": 45493, + "âĺİï¸ı": 27219, + "âĺij": 20983, + "âĺij": 42300, + "âĺijï¸ı": 22291, + "âĺĶï¸ı": 31238, + "âĺķ": 11454, + "âĺķ": 26561, + "âĺķï¸ı": 25839, + "âĺķï¸ı": 15499, + "âĺĺ": 23483, + "âĺĺï¸ı": 31454, + "âĺĿ": 21982, + "âĺĿï¸ı": 38891, + "âĺŀ": 31255, + "âĺłï¸ı": 34672, + "âĻ": 1548, + "âĻ¡": 11091, + "âĻ¡": 6251, + "âĻ¡âĻ¡": 22360, + "âĻ¡âĻ¡": 34267, + "âĻ¡âĻ¡âĻ¡": 36611, + "âĻ¤": 47435, + "âĻ¥": 4622, + "âĻ¥": 3405, + "âĻ¥âĻ¥": 12975, + "âĻ¥âĻ¥": 19604, + "âĻ¥âĻ¥âĻ¥": 23255, + "âĻ¥âĻ¥âĻ¥âĻ¥": 49020, + "âĻ¥ï¸ı": 17774, + "âĻ¥ï¸ı": 10561, + "âĻ¥ï¸ıâĻ¥ï¸ı": 40309, + "âĻ¦": 32376, + "âĻ¦": 47547, + "âĻ©": 30339, + "âĻ©âĻ«": 31636, + "âĻª": 27364, + "âĻª": 12382, + "âĻ«": 39217, + "âĻ«": 10814, + "âĻ¬": 24753, + "âĻ»": 39611, + "âĻ»ï¸ı": 46075, + "âļ": 2234, + "âļ¡": 40098, + "âļ¡": 20712, + "âļ¡ï¸ı": 19500, + "âļ¡ï¸ı": 11605, + "âļ¡ï¸ıâļ¡ï¸ı": 45922, + "âļª": 11922, + "âļª": 36373, + "âļªï¸ı": 22251, + "âļªï¸ı": 17885, + "âļ«": 15374, + "âļ«ï¸ı": 26529, + "âļ«ï¸ı": 24649, + "âļ½": 4867, + "âļ½": 13173, + "âļ½âļ½": 43259, + "âļ½ï¸ı": 11342, + "âļ½ï¸ı": 6768, + "âļ½ï¸ıâļ½ï¸ı": 30358, + "âļ½ï¸ıâļ½ï¸ı": 44148, + "âļ¾": 11314, + "âļ¾": 34717, + "âļ¾ï¸ı": 24727, + "âļ¾ï¸ı": 14858, + "âļĵ": 23522, + "âļĵï¸ı": 35299, + "âļĶï¸ı": 29361, + "âļľ": 47491, + "âļł": 39203, + "âļłï¸ı": 40966, + "âļłï¸ı": 15596, + "âĽ": 7956, + "âĽ³ï¸ı": 29204, + "âĽĦ": 30668, + "âĽĦï¸ı": 45465, + "âľ": 1508, + "⾨": 7181, + "⾨": 3531, + "⾨⾨": 35174, + "⾨⾨": 21985, + "⾨⾨⾨": 39424, + "âľĤ": 38602, + "âľħ": 29544, + "âľħ": 5564, + "âľĪ": 10682, + "âľĪ": 30712, + "âľĪï¸ı": 26176, + "âľĪï¸ı": 13413, + "âľĬ": 12392, + "âľĬ": 17819, + "âľĬðŁı½": 48547, + "âľĬðŁı¾": 41185, + "âľĭ": 39383, + "âľĭ": 30239, + "âľĮ": 6419, + "âľĮ": 12656, + "âľĮï¸ı": 21906, + "âľĮï¸ı": 12239, + "âľĮðŁı»": 30538, + "âľĮðŁı¼": 30588, + "âľį": 20872, + "âľįï¸ı": 30888, + "âľı": 32574, + "âľıï¸ı": 40724, + "âľĵ": 36700, + "âľĶ": 47200, + "âľĶ": 13749, + "âľĶï¸ı": 40544, + "âľĶï¸ı": 9191, + "âľĸï¸ı": 44133, + "âľĿ": 42220, + "âĿ": 1045, + "âĿ£": 37007, + "âĿ£": 25623, + "âĿ£ï¸ı": 25240, + "âĿ¤": 1266, + "âĿ¤": 2720, + "âĿ¤âĿ¤": 9033, + "âĿ¤âĿ¤": 14058, + "âĿ¤âĿ¤âĿ¤": 16708, + "âĿ¤âĿ¤âĿ¤âĿ¤": 37918, + "âĿ¤âĿ¤âĿ¤âĿ¤": 43970, + "âĿ¤ï¸ı": 2626, + "âĿ¤ï¸ı#": 30281, + "âĿ¤ï¸ı.": 45326, + "âĿ¤ï¸ı": 1752, + "âĿ¤ï¸ı@": 31187, + "âĿ¤ï¸ıâĿ¤ï¸ı": 6713, + "âĿ¤ï¸ıâĿ¤ï¸ı": 10363, + "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 12282, + "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 39167, + "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 29880, + "âĿ¤ï¸ıðŁĴĻ": 37380, + "âĿ¤ï¸ıðŁĺį": 37272, + "âĿ¤ï¸ıðŁĺĺ": 41800, + "âĿ¤ðŁĺį": 49120, + "âĿ¥": 36914, + "âĿĦ": 8501, + "âĿĦ": 30494, + "âĿĦï¸ı": 16834, + "âĿĦï¸ı": 12402, + "âĿĦï¸ıâĿĦï¸ı": 41626, + "âĿĮ": 44485, + "âĿĮ": 17975, + "âĿĵ": 29791, + "âĿĹ": 12868, + "âĿĹ": 29079, + "âĿĹï¸ı": 28642, + "âĿĹï¸ı": 17391, + "âĿĿ": 46951, + "âŀ": 3257, + "âŀ¡": 12854, + "âŀ¡ï¸ı": 31860, + "âŀ¡ï¸ı": 4956, + "âŀ¤": 18651, + "âŀķ": 46526, + "âŀĸ": 21327, + "âŀĸ": 34902, + "âŀĸâŀĸ": 23316, + "âŀĸâŀĸâŀĸâŀĸ": 40401, + "âŀľ": 23775, + "âł": 5689, + "âłĢ": 9691, + "âłĢ": 8621, + "âłĢâłĢ": 11466, + "âłĢâłĢ": 39092, + "âłĢâłĢâłĢâłĢ": 20976, + "âłĢâłĢâłĢâłĢâłĢâłĢâłĢâłĢ": 46063, + "âŃ": 5527, + "âŃIJ": 6410, + "âŃIJ": 19012, + "âŃIJâŃIJ": 32663, + "âŃIJï¸ı": 12427, + "âŃIJï¸ı": 10251, + "âŃIJï¸ıâŃIJï¸ı": 18640, + "âŃIJï¸ıâŃIJï¸ıâŃIJï¸ı": 40746, + "ã": 159, + "ã": 415, + "ãĢ": 4092, + "ãĢģ": 45262, + "ãĢĤ": 38060, + "ãĢĤ": 38000, + "ãĢĬ": 39920, + "ãĢĭ": 32898, + "ãĢĮ": 18116, + "ãĢį": 19149, + "ãĢİ": 26947, + "ãĢı": 30293, + "ãĢIJ": 12534, + "ãĢij": 12990, + "ãĢľ": 39581, + "ãģ": 4813, + "ãģ¦": 48029, + "ãģ¨": 34671, + "ãģ¨ç¹ĭãģ": 47310, + "ãģ¨ç¹ĭãģĮãĤĬãģŁãģĦ": 48290, + "ãģª": 29104, + "ãģ®": 21575, + "ãģ·": 44130, + "ãģĦ": 33523, + "ãģĦ": 38850, + "ãģĨ": 44235, + "ãģį": 42184, + "ãĤ": 3909, + "ãĤ¢": 26560, + "ãĤ¤": 19319, + "ãĤ¤ãĥ": 36294, + "ãĤ«": 37367, + "ãĤ¯": 31574, + "ãĤ·": 37665, + "ãĤ¸": 32234, + "ãĤ¸ãĥ": 43491, + "ãĤ¹": 22694, + "ãĤ¹": 39220, + "ãĤ¹ãĥ": 32421, + "ãĤ¿": 34941, + "ãĤĬãģ": 40500, + "ãĤĮ": 45211, + "ãĤŃ": 47121, + "ãĥ": 2429, + "ãĥ©": 23007, + "ãĥª": 32115, + "ãĥ«": 33257, + "ãĥ¬": 32965, + "ãĥ³": 17671, + "ãĥ³": 26875, + "ãĥ³ãĤ": 45105, + "ãĥ³ãĥ": 25914, + "ãĥ»": 8415, + "ãĥ»": 11158, + "ãĥ»ãĥ»": 13949, + "ãĥ»ãĥ»ãĥ»": 14234, + "ãĥ¼": 13457, + "ãĥ¼": 30391, + "ãĥ¼ãĥ": 18584, + "ãĥĥ": 28902, + "ãĥĦ": 32173, + "ãĥĪ": 42384, + "ãĥİ": 39967, + "ãĥķãĤ": 33371, + "ãĥŀ": 48924, + "ãĥŃ": 35827, + "ãħ": 5947, + "ãħ¤": 21096, + "ãħ¤ãħ¤": 22583, + "ãħ¤ãħ¤ãħ¤ãħ¤": 39329, + "ãħĭ": 13052, + "ãħĭ": 25108, + "ãħĭãħĭ": 16604, + "ãħĭãħĭ": 42581, + "ãħĭãħĭãħĭ": 46407, + "ãħĭãħĭãħĭãħĭ": 39362, + "ãħł": 16089, + "ãħł": 25781, + "ãħłãħł": 22021, + "ãħłãħł": 34398, + "ãħłãħłãħłãħł": 47028, + "ä": 160, + "ä": 416, + "ä¸": 19759, + "ä¹": 41854, + "äº": 21078, + "人": 36839, + "ä»": 37743, + "ä½": 47466, + "å": 161, + "å": 417, + "å¤": 23170, + "å¥": 29290, + "å®": 27047, + "å°": 34720, + "å±": 46096, + "å¸": 42021, + "å¹": 38780, + "åħ": 34314, + "åĨ": 27972, + "åĨĻ": 44653, + "åĪ": 42748, + "åĭ": 47505, + "åı": 34517, + "åIJ": 41673, + "åĽ": 39027, + "åľ": 37746, + "åŃ": 35751, + "æ": 162, + "æ": 418, + "æĸ": 29032, + "æĹ": 22265, + "æĹ¥": 39121, + "æĹ¥": 37156, + "æĺ": 42891, + "æĻ": 48132, + "æľ": 19277, + "æľ¬": 44353, + "æĿ": 27667, + "æĿ±": 48338, + "ç": 163, + "ç": 419, + "ç¥": 26369, + "ç¥Ń": 42557, + "çµ": 37810, + "ç¹": 43431, + "ç¹ĭãģ": 45930, + "çĶ": 20211, + "çĶŁ": 33375, + "çľ": 33440, + "羣": 41570, + "è": 164, + "è": 420, + "èª": 34002, + "èªķ": 41293, + "é": 165, + "é": 421, + "éģ": 44854, + "éĩ": 38283, + "ê": 166, + "ê": 422, + "ê°": 21122, + "ê°ĵ": 41076, + "ê°ĵìĦ¸ë¸IJ": 41689, + "ê°ķ": 45758, + "ê²": 35555, + "ê³": 36216, + "êµ": 31871, + "ê·": 42680, + "ê¸": 32495, + "ê¹": 24531, + "ê¹Ģ": 25203, + "ë": 167, + "ë": 423, + "ë¦": 24621, + "리": 47649, + "ë§": 28024, + "ë§Ī": 40027, + "ëª": 36311, + "ë¯": 19528, + "민": 34442, + "민": 44632, + "ë°": 15810, + "ë°©": 23273, + "ë°©íĥ": 25081, + "ë°©íĥĦ": 25641, + "ë°©íĥĦìĨĮëħĦëĭ": 26068, + "ë°©íĥĦìĨĮëħĦëĭ¨": 27129, + "ë°ķ": 40988, + "ë²": 48267, + "ë³": 44693, + "ë¹": 24193, + "ëĤ": 27252, + "ëĤĺ": 48484, + "ëĭ": 13094, + "ëĭ¤": 46680, + "ëĭĪ": 33708, + "ëį": 45543, + "ëı": 31972, + "ëĵ": 30850, + "ëĿ": 44317, + "ì": 168, + "ì": 424, + "ì£": 39856, + "주": 45161, + "ì¤": 31153, + "ì§": 16279, + "ì§Ģ": 28836, + "ì§Ħ": 38890, + "ì°": 40742, + "ì¶": 42476, + "ì¶ķ": 46403, + "ì¶ķíķĺ": 47866, + "ì¹": 45088, + "ìĤ": 31061, + "ìĥ": 30587, + "ìĥĿ": 47858, + "ìĦ": 15074, + "ìĦ¸ë": 29254, + "ìĦ¸ë¸": 29658, + "ìĦ¸ë¸IJ": 41415, + "ìĨ": 15115, + "ìĨĮë": 20515, + "ìĨĮëħ": 21391, + "ìĨĮëħĦëĭ": 25887, + "ìĪ": 32757, + "ìĬ": 12125, + "ìĬ¤": 20305, + "ìĬ¤": 23829, + "ìĭ": 23924, + "ìķ": 16071, + "ìķĦ": 23233, + "ìĸ": 31625, + "ìĹ": 13252, + "ìĹIJ": 37622, + "ìĹij": 31036, + "ìĹijìĨ": 42763, + "ìĹijìĨĮ": 45606, + "ìĺ": 21144, + "ìĻ": 39405, + "ìļ": 18541, + "ìļ°": 38415, + "ìļ°": 49344, + "ìĽ": 22543, + "ìĽIJ": 36495, + "ìľ": 20909, + "ìľł": 42890, + "ìĿ": 8276, + "ìĿ´": 12286, + "ìĿ´": 34746, + "ìĿ´ì": 37590, + "ìĿ¼": 43406, + "ìŀ": 20849, + "ìł": 20580, + "ìłķ": 34725, + "í": 169, + "í": 425, + "íģ": 35641, + "íģ¬": 45832, + "íĤ": 43565, + "íĥ": 15012, + "íĥĢ": 41126, + "íĥľ": 37663, + "íĬ": 23215, + "íĬ¸": 48974, + "íĬ¸": 39820, + "íĭ": 34350, + "íĶ": 29450, + "íķ": 15197, + "íķ´": 35286, + "íķĺ": 33992, + "íĺ": 15962, + "íĺ¸": 39657, + "íĺĦ": 34645, + "íĻ": 31882, + "î": 170, + "î": 426, + "îĢ": 36288, + "îĦ": 35368, + "îĮ": 41006, + "îIJ": 16929, + "îIJĴ": 40100, + "ï": 171, + "ï": 427, + "ï¸": 842, + "ï¸İ": 24029, + "ï¸ı": 1392, + "ï¸ı#": 46997, + "ï¸ı:": 32604, + "ï¸ı": 1001, + "ï¸ı@": 34600, + "ï¸ıâĥ£": 17394, + "ï¸ıâĥ£-": 40376, + "ï¸ıâĥ£": 4603, + "ï¿": 27850, + "�": 47356, + "�": 39802, + "ð": 172, + "ð": 428, + "ðĿ": 6874, + "ðĿIJ": 15889, + "ðĿij": 43794, + "ðĿĴ": 43387, + "ðĿĵ": 47110, + "ðĿĹ": 18865, + "ðĿĺ": 26109, + "ðĿĻ": 29415, + "ðŁ": 558, + "ðŁ¤": 1793, + "ðŁ¤£": 9665, + "ðŁ¤£": 9909, + "ðŁ¤£ðŁ¤£": 16430, + "ðŁ¤£ðŁ¤£": 31009, + "ðŁ¤£ðŁ¤£ðŁ¤£": 32262, + "ðŁ¤¤": 39550, + "ðŁ¤¤": 26759, + "ðŁ¤¦": 17186, + "ðŁ¤§": 40983, + "ðŁ¤©": 27351, + "ðŁ¤©": 16074, + "ðŁ¤ª": 44230, + "ðŁ¤ª": 24920, + "ðŁ¤«": 47671, + "ðŁ¤¯": 37595, + "ðŁ¤·": 13185, + "ðŁ¤·ðŁı»âĢįâĻĢï¸ı": 46770, + "ðŁ¤ij": 34801, + "ðŁ¤ĵ": 36580, + "ðŁ¤ĵ": 18928, + "ðŁ¤Ķ": 12706, + "ðŁ¤Ķ": 6497, + "ðŁ¤ĶðŁ¤Ķ": 28490, + "ðŁ¤ĶðŁ¤ĶðŁ¤Ķ": 43361, + "ðŁ¤ĸ": 46146, + "ðŁ¤Ĺ": 16646, + "ðŁ¤Ĺ": 10465, + "ðŁ¤ĹðŁ¤Ĺ": 44321, + "ðŁ¤ĺ": 10623, + "ðŁ¤ĺ": 17288, + "ðŁ¤ĺðŁı»": 46449, + "ðŁ¤ĺðŁı»": 30891, + "ðŁ¤ĺðŁı¼": 31458, + "ðŁ¤ĺðŁı½": 49362, + "ðŁ¤Ļ": 23800, + "ðŁ¤Ļ": 39101, + "ðŁ¤Ŀ": 35242, + "ðŁ¤ŀ": 29463, + "ðŁ¤ŀ": 38597, + "ðŁ¤Ł": 48509, + "ðŁ¤ł": 36737, + "ðŁ¤Ń": 47289, + "ðŁ¥": 4156, + "ðŁ¥°": 29246, + "ðŁ¥°": 17597, + "ðŁ¥³": 45823, + "ðŁ¥³": 28055, + "ðŁ¥º": 43380, + "ðŁ¥º": 36858, + "ðŁ¥Ĥ": 43805, + "ðŁ¥Ĥ": 25212, + "ðŁ¥ĥ": 47790, + "ðŁ¥ĩ": 34372, + "ðŁ¥ĩ": 20069, + "ðŁ¥Ī": 35858, + "ðŁ¥ī": 36782, + "ðŁ¥Ĭ": 29275, + "ðŁ¦": 6040, + "ðŁ¦ģ": 36367, + "ðŁ¦ģ": 26056, + "ðŁ¦ĥ": 40184, + "ðŁ¦Ħ": 37659, + "ðŁ¦ħ": 28800, + "ðŁ¦Ī": 48984, + "ðŁ¦ĭ": 49325, + "ðŁ¦ĭ": 28985, + "ðŁ§": 8792, + "ðŁ§¡": 30996, + "ðŁ§¡": 24578, + "ðŁ§IJ": 33549, + "ðŁħ": 22010, + "ðŁĨ": 9536, + "ðŁĨķ": 34956, + "ðŁĨĺ": 39868, + "ðŁĨļ": 16325, + "ðŁĩ": 1173, + "ðŁĩ¦": 12469, + "ðŁĩ¦": 28565, + "ðŁĩ¦ðŁĩ": 33196, + "ðŁĩ¦ðŁĩ·": 41629, + "ðŁĩ¦ðŁĩº": 25192, + "ðŁĩ§": 14660, + "ðŁĩ§ðŁĩ": 37342, + "ðŁĩ§ðŁĩª": 38794, + "ðŁĩ§ðŁĩ·": 28182, + "ðŁĩ¨": 8889, + "ðŁĩ¨ðŁĩ": 8989, + "ðŁĩ¨ðŁĩ¦": 34324, + "ðŁĩ¨ðŁĩ¦": 16364, + "ðŁĩ¨ðŁĩ³": 36819, + "ðŁĩ¨ðŁĩŃ": 41119, + "ðŁĩ©": 15222, + "ðŁĩ©ðŁĩ": 36350, + "ðŁĩ©ðŁĩª": 21531, + "ðŁĩª": 11428, + "ðŁĩª": 12331, + "ðŁĩªðŁĩ": 13917, + "ðŁĩªðŁĩ¸": 22177, + "ðŁĩªðŁĩº": 34655, + "ðŁĩ«": 12977, + "ðŁĩ«ðŁĩ·": 39109, + "ðŁĩ«ðŁĩ·": 16223, + "ðŁĩ¬": 8129, + "ðŁĩ¬ðŁĩ": 8354, + "ðŁĩ¬ðŁĩ§": 23762, + "ðŁĩ¬ðŁĩ§": 11559, + "ðŁĩ®": 8268, + "ðŁĩ®ðŁĩ": 8347, + "ðŁĩ®ðŁĩª": 34148, + "ðŁĩ®ðŁĩ³": 47299, + "ðŁĩ®ðŁĩ³": 23602, + "ðŁĩ®ðŁĩ¹": 42034, + "ðŁĩ®ðŁĩ¹": 17070, + "ðŁĩ¯": 20090, + "ðŁĩ¯ðŁĩ": 22924, + "ðŁĩ¯ðŁĩµ": 26527, + "ðŁĩ°": 28232, + "ðŁĩ±": 29533, + "ðŁĩ±ðŁĩ": 40941, + "ðŁĩ²": 16411, + "ðŁĩ²ðŁĩ": 17562, + "ðŁĩ²ðŁĩ½": 32073, + "ðŁĩ³": 16645, + "ðŁĩ³ðŁĩ": 17747, + "ðŁĩ³ðŁĩ±": 36747, + "ðŁĩµ": 12127, + "ðŁĩµðŁĩ": 13608, + "ðŁĩµðŁĩ°": 37764, + "ðŁĩµðŁĩ¹": 42621, + "ðŁĩµðŁĩŃ": 42777, + "ðŁĩ·": 16026, + "ðŁĩ·": 9869, + "ðŁĩ·ðŁĩº": 37902, + "ðŁĩ¸": 19447, + "ðŁĩ¸ðŁĩ": 33325, + "ðŁĩ¸ðŁĩª": 39260, + "ðŁĩ¹": 21810, + "ðŁĩ¹ðŁĩ": 36250, + "ðŁĩº": 4054, + "ðŁĩº": 17467, + "ðŁĩºðŁĩ": 4131, + "ðŁĩºðŁĩ¸": 8907, + "ðŁĩºðŁĩ¸": 5688, + "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 18739, + "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 41411, + "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 43357, + "ðŁĩ¿": 25520, + "ðŁĩ¿ðŁĩ¦": 36982, + "ðŁĩŃ": 30370, + "ðŁĮ": 1576, + "ðŁĮ±": 35318, + "ðŁĮ±": 20665, + "ðŁĮ²": 34071, + "ðŁĮ²": 28154, + "ðŁĮ³": 44265, + "ðŁĮ³": 28543, + "ðŁĮ´": 20643, + "ðŁĮ´": 15968, + "ðŁĮµ": 40871, + "ðŁĮ·": 32328, + "ðŁĮ·": 24259, + "ðŁĮ¸": 16314, + "ðŁĮ¸": 10980, + "ðŁĮ¸ðŁĮ¸": 46210, + "ðŁĮ¹": 14990, + "ðŁĮ¹": 10662, + "ðŁĮ¹ðŁĮ¹": 37933, + "ðŁĮº": 27608, + "ðŁĮº": 19829, + "ðŁĮ»": 27196, + "ðŁĮ»": 19772, + "ðŁĮ¼": 36484, + "ðŁĮ¼": 26312, + "ðŁĮ¾": 39796, + "ðŁĮ¿": 27736, + "ðŁĮ¿": 18588, + "ðŁĮĢ": 34348, + "ðŁĮħ": 27547, + "ðŁĮĪ": 23038, + "ðŁĮĪ": 13042, + "ðŁĮĬ": 20465, + "ðŁĮĬ": 14302, + "ðŁĮĮ": 43393, + "ðŁĮį": 34931, + "ðŁĮį": 18641, + "ðŁĮİ": 31125, + "ðŁĮİ": 16969, + "ðŁĮı": 31527, + "ðŁĮIJ": 33071, + "ðŁĮĻ": 42330, + "ðŁĮĻ": 23283, + "ðŁĮļ": 49004, + "ðŁĮļ": 27877, + "ðŁĮŀ": 21152, + "ðŁĮŀ": 12980, + "ðŁĮŁ": 13196, + "ðŁĮŁ": 8542, + "ðŁĮŁðŁĮŁ": 26014, + "ðŁį": 2011, + "ðŁį¦": 47375, + "ðŁį¦": 32032, + "ðŁį©": 38379, + "ðŁįª": 38958, + "ðŁį«": 47994, + "ðŁį«": 33401, + "ðŁį°": 43732, + "ðŁį°": 30051, + "ðŁį³": 37441, + "ðŁį´": 41531, + "ðŁį´": 25338, + "ðŁį·": 24445, + "ðŁį·": 18072, + "ðŁį¸": 43058, + "ðŁį¸": 31217, + "ðŁį¹": 35598, + "ðŁįº": 31081, + "ðŁįº": 21590, + "ðŁį»": 22793, + "ðŁį»": 13167, + "ðŁį¾": 27294, + "ðŁį¾": 21656, + "ðŁįĢ": 22865, + "ðŁįĢ": 15764, + "ðŁįģ": 29837, + "ðŁįģ": 23075, + "ðŁįĤ": 35015, + "ðŁįĤ": 25721, + "ðŁįĥ": 27157, + "ðŁįĥ": 20147, + "ðŁįĩ": 48697, + "ðŁįĬ": 35001, + "ðŁįĬ": 28036, + "ðŁįĭ": 39543, + "ðŁįĮ": 44987, + "ðŁįį": 48946, + "ðŁįİ": 32069, + "ðŁįij": 32889, + "ðŁįĴ": 33160, + "ðŁįĵ": 44739, + "ðŁįĵ": 33456, + "ðŁįĶ": 46415, + "ðŁįĶ": 36031, + "ðŁįķ": 31469, + "ðŁįķ": 23904, + "ðŁįŃ": 42100, + "ðŁİ": 1165, + "ðŁİ£": 43158, + "ðŁİ¤": 23490, + "ðŁİ¤": 15690, + "ðŁİ¥": 22186, + "ðŁİ¥:": 43640, + "ðŁİ¥": 13233, + "ðŁİ§": 31254, + "ðŁİ§": 14266, + "ðŁİ¨": 31953, + "ðŁİ¨": 13461, + "ðŁİ©": 37701, + "ðŁİ«": 30331, + "ðŁİ¬": 36020, + "ðŁİ¬": 18150, + "ðŁİ®": 29312, + "ðŁİ¯": 23114, + "ðŁİµ": 27435, + "ðŁİµ": 14946, + "ðŁİ¶": 11755, + "ðŁİ¶": 6011, + "ðŁİ¶ðŁİ¶": 36283, + "ðŁİ¸": 29135, + "ðŁİ¸": 22122, + "ðŁİ¹": 43493, + "ðŁİ¼": 34949, + "ðŁİ¼": 23757, + "ðŁİ¾": 41982, + "ðŁİ¾": 24222, + "ðŁİĢ": 34347, + "ðŁİĢ": 20151, + "ðŁİģ": 18368, + "ðŁİģ": 13462, + "ðŁİĤ": 13026, + "ðŁİĤ": 10392, + "ðŁİĤðŁİĤ": 39338, + "ðŁİĥ": 22622, + "ðŁİĥ": 16780, + "ðŁİĦ": 12942, + "ðŁİĦ": 11267, + "ðŁİħ": 17685, + "ðŁİħ": 24276, + "ðŁİĨ": 39222, + "ðŁİĪ": 16142, + "ðŁİĪ": 14448, + "ðŁİĪðŁİī": 48049, + "ðŁİī": 4310, + "ðŁİī:": 17310, + "ðŁİī": 3986, + "ðŁİīðŁİ": 11473, + "ðŁİīðŁİĪ": 40499, + "ðŁİīðŁİĪ": 34008, + "ðŁİīðŁİī": 25159, + "ðŁİīðŁİī": 13450, + "ðŁİīðŁİīðŁİī": 20828, + "ðŁİīðŁİĬ": 31662, + "ðŁİīðŁİĬ": 30781, + "ðŁİĬ": 22763, + "ðŁİĬ": 22425, + "ðŁİĬðŁİī": 48801, + "ðŁİĵ": 28916, + "ðŁİĵ": 18744, + "ðŁİĻ": 29001, + "ðŁİĻ": 29753, + "ðŁİĻï¸ı": 44205, + "ðŁİŁ": 19248, + "ðŁİŁ": 21107, + "ðŁİŁï¸ı": 30243, + "ðŁİŃ": 28856, + "ðŁı": 1109, + "ðŁı¡": 27318, + "ðŁı³ï¸ı": 26844, + "ðŁı³ï¸ıâĢį": 27093, + "ðŁı³ï¸ıâĢįðŁĮĪ": 32610, + "ðŁı´": 39690, + "ðŁı´": 19704, + "ðŁı»": 5042, + "ðŁı»": 3702, + "ðŁı»âĢį": 46250, + "ðŁı»âĢįâĻĢï¸ı": 48391, + "ðŁı»âĢįâĻĢï¸ı": 23595, + "ðŁı»âĢįâĻĤï¸ı": 30984, + "ðŁı¼": 6193, + "ðŁı¼": 4027, + "ðŁı¼âĢįâĻĢï¸ı": 28955, + "ðŁı½": 8514, + "ðŁı½": 6114, + "ðŁı½âĢįâĻĢï¸ı": 37036, + "ðŁı½âĢįâĻĤï¸ı": 43157, + "ðŁı¾": 10230, + "ðŁı¾": 7778, + "ðŁı¾âĢįâĻĤï¸ı": 47189, + "ðŁı¿": 29854, + "ðŁı¿": 21094, + "ðŁıĢ": 13708, + "ðŁıĢ": 8813, + "ðŁıĢðŁıĢ": 43169, + "ðŁıģ": 29423, + "ðŁıģ": 17473, + "ðŁıĥ": 16820, + "ðŁıĥ": 32751, + "ðŁıħ": 25500, + "ðŁıĨ": 9585, + "ðŁıĨ": 5596, + "ðŁıĨðŁıĨ": 18946, + "ðŁıĨðŁıĨ": 38269, + "ðŁıĨðŁıĨðŁıĨ": 44484, + "ðŁıĩ": 45789, + "ðŁıĩ": 40288, + "ðŁıĪ": 16144, + "ðŁıĪ": 10477, + "ðŁıī": 26020, + "ðŁıĬ": 33061, + "ðŁıĬ": 47830, + "ðŁıĮ": 41116, + "ðŁıı": 32460, + "ðŁıIJ": 46334, + "ðŁıIJ": 29433, + "ðŁıĴ": 37756, + "ðŁıŁ": 35914, + "ðŁıŁ": 26472, + "ðŁıŁï¸ı": 42627, + "ðŁıł": 33727, + "ðŁIJ": 2074, + "ðŁIJ¢": 37049, + "ðŁIJ£": 39597, + "ðŁIJ¥": 42981, + "ðŁIJ¦": 37260, + "ðŁIJ¬": 44238, + "ðŁIJ¯": 34825, + "ðŁIJ¯": 26111, + "ðŁIJ°": 35378, + "ðŁIJ°": 25050, + "ðŁIJ±": 35710, + "ðŁIJ±": 22979, + "ðŁIJ´": 33509, + "ðŁIJ¶": 14466, + "ðŁIJ¶": 10631, + "ðŁIJ·": 38408, + "ðŁIJ¸": 45597, + "ðŁIJ¸": 40298, + "ðŁIJº": 44281, + "ðŁIJº": 31445, + "ðŁIJ»": 30750, + "ðŁIJ»": 25322, + "ðŁIJ¼": 46234, + "ðŁIJ¾": 16057, + "ðŁIJ¾": 11317, + "ðŁIJ¾ðŁIJ¾": 42202, + "ðŁIJī": 46908, + "ðŁIJĬ": 43974, + "ðŁIJį": 48903, + "ðŁIJį": 30177, + "ðŁIJİ": 48281, + "ðŁIJİ": 32726, + "ðŁIJIJ": 47735, + "ðŁIJIJ": 27954, + "ðŁIJij": 49389, + "ðŁIJķ": 41069, + "ðŁIJĺ": 38733, + "ðŁIJĿ": 30619, + "ðŁIJĿ": 20111, + "ðŁIJŁ": 42084, + "ðŁIJŁ": 29989, + "ðŁIJł": 42725, + "ðŁij": 964, + "ðŁij£": 39755, + "ðŁij§": 48938, + "ðŁij¨": 18966, + "ðŁij¨âĢį": 25023, + "ðŁij©": 18800, + "ðŁij©âĢį": 26304, + "ðŁij«": 47106, + "ðŁij«": 35457, + "ðŁij®": 42686, + "ðŁij¯": 25910, + "ðŁij¯": 20582, + "ðŁij¶": 26187, + "ðŁij¶": 33189, + "ðŁij¸": 26268, + "ðŁij¸": 36645, + "ðŁij¹": 46766, + "ðŁij»": 24625, + "ðŁij»": 16243, + "ðŁij¼": 25270, + "ðŁij¼": 31083, + "ðŁij½": 42677, + "ðŁij½": 26257, + "ðŁijĢ": 11524, + "ðŁijĢ": 5908, + "ðŁijĢðŁijĢ": 31561, + "ðŁijģ": 47796, + "ðŁijģ": 45705, + "ðŁijĦ": 47445, + "ðŁijħ": 31833, + "ðŁijħ": 24672, + "ðŁijĨ": 42975, + "ðŁijĨ": 45194, + "ðŁijĩ": 7662, + "ðŁijĩ": 7475, + "ðŁijĩðŁı»": 45811, + "ðŁijĩðŁı»": 32813, + "ðŁijĩðŁı¼": 37504, + "ðŁijĩðŁijĩ": 17915, + "ðŁijĩðŁijĩ": 31891, + "ðŁijĩðŁijĩðŁijĩ": 35627, + "ðŁijĪ": 32794, + "ðŁijĪ": 20832, + "ðŁijī": 9477, + "ðŁijī": 3988, + "ðŁijīðŁı»": 23481, + "ðŁijīðŁı¼": 27534, + "ðŁijīðŁı½": 38059, + "ðŁijīðŁijī": 41480, + "ðŁijĬ": 8897, + "ðŁijĬ": 9704, + "ðŁijĬðŁı»": 47393, + "ðŁijĬðŁı»": 29152, + "ðŁijĬðŁı¼": 49000, + "ðŁijĬðŁı¼": 30115, + "ðŁijĬðŁijĬ": 46521, + "ðŁijĭ": 19351, + "ðŁijĭ": 17686, + "ðŁijĮ": 4890, + "ðŁijĮ": 4494, + "ðŁijĮðŁı»": 31818, + "ðŁijĮðŁı»": 18606, + "ðŁijĮðŁı¼": 37655, + "ðŁijĮðŁı¼": 20031, + "ðŁijĮðŁı½": 35834, + "ðŁijĮðŁijĮ": 36139, + "ðŁijĮðŁijĮ": 21435, + "ðŁijĮðŁijĮðŁijĮ": 40876, + "ðŁijį": 4686, + "ðŁijį": 4201, + "ðŁijįðŁı»": 25803, + "ðŁijįðŁı»": 15129, + "ðŁijįðŁı¼": 37285, + "ðŁijįðŁı¼": 19689, + "ðŁijįðŁı½": 43722, + "ðŁijįðŁijį": 33012, + "ðŁijįðŁijį": 18997, + "ðŁijįðŁijįðŁijį": 37284, + "ðŁijİ": 39702, + "ðŁijİ": 32568, + "ðŁijı": 3802, + "ðŁijı": 4829, + "ðŁijıðŁı»": 19236, + "ðŁijıðŁı»": 17029, + "ðŁijıðŁı»ðŁijıðŁı»": 35254, + "ðŁijıðŁı¼": 24496, + "ðŁijıðŁı¼": 19979, + "ðŁijıðŁı¼ðŁijıðŁı¼": 46712, + "ðŁijıðŁı½": 40796, + "ðŁijıðŁı½": 33978, + "ðŁijıðŁı¾": 45450, + "ðŁijıðŁijı": 10356, + "ðŁijıðŁijı": 16706, + "ðŁijıðŁijıðŁijı": 17254, + "ðŁijIJ": 40877, + "ðŁijij": 14955, + "ðŁijij": 8717, + "ðŁijijðŁijij": 48532, + "ðŁijķ": 47865, + "ðŁijŁ": 41183, + "ðŁijł": 41264, + "ðŁijŃ": 34175, + "ðŁijŃ": 27943, + "ðŁĴ": 837, + "ðŁĴ¡": 24081, + "ðŁĴ£": 36862, + "ðŁĴ£": 29006, + "ðŁĴ¤": 34706, + "ðŁĴ¤": 25632, + "ðŁĴ¥": 12209, + "ðŁĴ¥": 7347, + "ðŁĴ¥ðŁĴ¥": 27396, + "ðŁĴ¥ðŁĴ¥": 39246, + "ðŁĴ¥ðŁĴ¥ðŁĴ¥": 48890, + "ðŁĴ¦": 21180, + "ðŁĴ¦": 14060, + "ðŁĴ¦ðŁĴ¦": 44469, + "ðŁĴ§": 34095, + "ðŁĴ¨": 27408, + "ðŁĴ¨": 17891, + "ðŁĴ©": 48621, + "ðŁĴ©": 28847, + "ðŁĴª": 5475, + "ðŁĴª": 6440, + "ðŁĴªðŁı»": 31669, + "ðŁĴªðŁı»": 21903, + "ðŁĴªðŁı¼": 32041, + "ðŁĴªðŁı¼": 20759, + "ðŁĴªðŁı½": 46380, + "ðŁĴªðŁı½": 31111, + "ðŁĴªðŁı¾": 39398, + "ðŁĴªðŁĴª": 24747, + "ðŁĴªðŁĴªðŁĴª": 39913, + "ðŁĴ«": 25770, + "ðŁĴ«": 12526, + "ðŁĴ¬": 30947, + "ðŁĴ¯": 10611, + "ðŁĴ¯": 7018, + "ðŁĴ¯ðŁĴ¯": 30234, + "ðŁĴ¯ðŁĴ¯": 44070, + "ðŁĴ°": 20454, + "ðŁĴ°": 14078, + "ðŁĴ°ðŁĴ°": 41747, + "ðŁĴµ": 47412, + "ðŁĴµ": 38041, + "ðŁĴ¸": 37696, + "ðŁĴ¸": 25957, + "ðŁĴ»": 33433, + "ðŁĴ»": 18135, + "ðŁĴ¿": 39541, + "ðŁĴĢ": 14888, + "ðŁĴĢ": 12158, + "ðŁĴĢðŁĴĢ": 30884, + "ðŁĴģ": 13997, + "ðŁĴģ": 14392, + "ðŁĴĥ": 9947, + "ðŁĴĥ": 14333, + "ðŁĴĥðŁı»": 38624, + "ðŁĴĥðŁĴĥ": 28041, + "ðŁĴĦ": 46116, + "ðŁĴĦ": 34571, + "ðŁĴħ": 27457, + "ðŁĴħ": 32414, + "ðŁĴī": 44316, + "ðŁĴī": 30503, + "ðŁĴĭ": 12217, + "ðŁĴĭ": 7417, + "ðŁĴĭðŁĴĭ": 29214, + "ðŁĴĮ": 40817, + "ðŁĴį": 35850, + "ðŁĴį": 24898, + "ðŁĴİ": 25938, + "ðŁĴİ": 15874, + "ðŁĴIJ": 27375, + "ðŁĴIJ": 20554, + "ðŁĴij": 49404, + "ðŁĴĵ": 20628, + "ðŁĴĵ": 12568, + "ðŁĴĵðŁĴĵ": 43505, + "ðŁĴĶ": 18880, + "ðŁĴĶ": 10704, + "ðŁĴĶðŁĴĶ": 44673, + "ðŁĴķ": 5412, + "ðŁĴķ": 3082, + "ðŁĴķðŁĴķ": 23106, + "ðŁĴķðŁĴķ": 14117, + "ðŁĴķðŁĴķðŁĴķ": 26772, + "ðŁĴĸ": 8466, + "ðŁĴĸ": 5582, + "ðŁĴĸðŁĴĸ": 19562, + "ðŁĴĸðŁĴĸ": 30595, + "ðŁĴĸðŁĴĸðŁĴĸ": 33915, + "ðŁĴĹ": 10148, + "ðŁĴĹ": 6690, + "ðŁĴĹðŁĴĹ": 47158, + "ðŁĴĹðŁĴĹ": 24064, + "ðŁĴĹðŁĴĹðŁĴĹ": 36990, + "ðŁĴĺ": 18223, + "ðŁĴĺ": 10816, + "ðŁĴĺðŁĴĺ": 40464, + "ðŁĴĻ": 5305, + "ðŁĴĻ": 4074, + "ðŁĴĻðŁĴĻ": 17833, + "ðŁĴĻðŁĴĻ": 27101, + "ðŁĴĻðŁĴĻðŁĴĻ": 30698, + "ðŁĴĻðŁĴĽ": 46804, + "ðŁĴĻðŁĴĽ": 26230, + "ðŁĴĻðŁĴľ": 47931, + "ðŁĴĻðŁĴľ": 42541, + "ðŁĴļ": 8102, + "ðŁĴļ": 6521, + "ðŁĴļðŁĴļ": 27497, + "ðŁĴļðŁĴļ": 46209, + "ðŁĴļðŁĴļðŁĴļ": 46182, + "ðŁĴļðŁĴĽ": 41232, + "ðŁĴĽ": 8221, + "ðŁĴĽ": 6233, + "ðŁĴĽðŁĴĻ": 36337, + "ðŁĴĽðŁĴļ": 37994, + "ðŁĴĽðŁĴĽ": 32420, + "ðŁĴľ": 6832, + "ðŁĴľ": 4882, + "ðŁĴľðŁĴľ": 17280, + "ðŁĴľðŁĴľ": 28211, + "ðŁĴľðŁĴľðŁĴľ": 31004, + "ðŁĴĿ": 36761, + "ðŁĴĿ": 22002, + "ðŁĴŀ": 14862, + "ðŁĴŀ": 8988, + "ðŁĴŀðŁĴŀ": 36448, + "ðŁĴŁ": 49394, + "ðŁĴŁ": 28828, + "ðŁĴŃ": 33848, + "ðŁĵ": 1497, + "ðŁĵ¢": 46560, + "ðŁĵ¢": 20901, + "ðŁĵ£": 48841, + "ðŁĵ£": 21282, + "ðŁĵ°:": 28952, + "ðŁĵ°": 14985, + "ðŁĵ±": 36104, + "ðŁĵ±": 20824, + "ðŁĵ²": 19363, + "ðŁĵ·": 6966, + "ðŁĵ·:": 8294, + "ðŁĵ·": 5551, + "ðŁĵ·@": 40032, + "ðŁĵ¸": 8401, + "ðŁĵ¸:": 10379, + "ðŁĵ¸": 6074, + "ðŁĵ¸@": 39660, + "ðŁĵ¹": 49251, + "ðŁĵº": 21792, + "ðŁĵº:": 29728, + "ðŁĵº": 10450, + "ðŁĵ»": 32711, + "ðŁĵ»": 15882, + "ðŁĵ½": 45361, + "ðŁĵħ": 21277, + "ðŁĵĨ": 23471, + "ðŁĵĪ": 23359, + "ðŁĵĬ": 22244, + "ðŁĵĭ": 46351, + "ðŁĵĮ": 22289, + "ðŁĵį": 25043, + "ðŁĵį:": 36845, + "ðŁĵį": 8903, + "ðŁĵĸ": 49003, + "ðŁĵĸ": 23043, + "ðŁĵļ": 25433, + "ðŁĵļ": 15566, + "ðŁĵĿ": 31888, + "ðŁĵĿ:": 48398, + "ðŁĵĿ": 15853, + "ðŁĵŀ": 24022, + "ðŁĶ": 1428, + "ðŁĶ¥": 3191, + "ðŁĶ¥#": 44354, + "ðŁĶ¥": 3016, + "ðŁĶ¥ðŁĶ¥": 5692, + "ðŁĶ¥ðŁĶ¥": 11771, + "ðŁĶ¥ðŁĶ¥ðŁĶ¥": 11004, + "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥": 23408, + "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥": 30989, + "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥": 48401, + "ðŁĶ¥ðŁĶĹ": 35130, + "ðŁĶª": 47078, + "ðŁĶª": 34545, + "ðŁĶ«": 38116, + "ðŁĶ«": 20583, + "ðŁĶ¬": 44227, + "ðŁĶ®": 38077, + "ðŁĶ´": 12408, + "ðŁĶ´": 10854, + "ðŁĶ´âļªï¸ı": 46879, + "ðŁĶ´âļªï¸ı": 40055, + "ðŁĶµ": 17531, + "ðŁĶµ": 17193, + "ðŁĶµâļªï¸ı": 42412, + "ðŁĶ¶": 42880, + "ðŁĶ¶": 36222, + "ðŁĶ·": 37740, + "ðŁĶ¸": 24200, + "ðŁĶ¹": 19995, + "ðŁĶº": 45561, + "ðŁĶģ": 41299, + "ðŁĶĬ": 32580, + "ðŁĶĬ": 20502, + "ðŁĶİ": 44935, + "ðŁĶij": 35127, + "ðŁĶĴ": 44972, + "ðŁĶĶ": 45753, + "ðŁĶĹ": 47475, + "ðŁĶĹ": 14561, + "ðŁĶĺ": 38995, + "ðŁĶľ": 36011, + "ðŁĶĿ": 44387, + "ðŁĶĿ": 29506, + "ðŁķ": 7692, + "ðŁķº": 33958, + "ðŁķĬ": 42624, + "ðŁķĬ": 37760, + "ðŁĸ": 6269, + "ðŁĸ¤": 17603, + "ðŁĸ¤": 10860, + "ðŁĸ¥": 47990, + "ðŁĹ": 7045, + "ðŁĹ£": 33232, + "ðŁĹ£": 18583, + "ðŁĹ£ï¸ı": 37476, + "ðŁĹĵ": 34335, + "ðŁĹĵ": 28773, + "ðŁĹĵï¸ı": 39847, + "ðŁĺ": 668, + "ðŁĺ¡": 21968, + "ðŁĺ¡": 17452, + "ðŁĺ¡ðŁĺ¡": 37223, + "ðŁĺ¢": 14308, + "ðŁĺ¢": 9925, + "ðŁĺ¢ðŁĺ¢": 32923, + "ðŁĺ¢ðŁĺ¢": 47921, + "ðŁĺ£": 32718, + "ðŁĺ¤": 26872, + "ðŁĺ¤": 20740, + "ðŁĺ¥": 38383, + "ðŁĺ¥": 23951, + "ðŁĺ¨": 38080, + "ðŁĺ©": 9051, + "ðŁĺ©": 9494, + "ðŁĺ©ðŁĺ©": 22820, + "ðŁĺ©ðŁĺ©": 38031, + "ðŁĺ©ðŁĺ©ðŁĺ©": 49063, + "ðŁĺª": 38181, + "ðŁĺª": 22243, + "ðŁĺ«": 25141, + "ðŁĺ«": 22340, + "ðŁĺ¬": 23704, + "ðŁĺ¬": 14549, + "ðŁĺ®": 40163, + "ðŁĺ®": 21616, + "ðŁĺ¯": 37858, + "ðŁĺ°": 34728, + "ðŁĺ±": 10938, + "ðŁĺ±": 9055, + "ðŁĺ±ðŁĺ±": 22061, + "ðŁĺ±ðŁĺ±": 40767, + "ðŁĺ±ðŁĺ±ðŁĺ±": 40909, + "ðŁĺ²": 40460, + "ðŁĺ²": 24620, + "ðŁĺ³": 12047, + "ðŁĺ³": 8223, + "ðŁĺ³ðŁĺ³": 32592, + "ðŁĺ´": 23527, + "ðŁĺ´": 16415, + "ðŁĺ´ðŁĺ´": 49307, + "ðŁĺµ": 39368, + "ðŁĺ¶": 35207, + "ðŁĺ·": 37943, + "ðŁĺ·": 25759, + "ðŁĺ¸": 36912, + "ðŁĺ¹": 26477, + "ðŁĺ¹": 26573, + "ðŁĺ¹ðŁĺ¹": 46287, + "ðŁĺº": 40613, + "ðŁĺ»": 15453, + "ðŁĺ»": 12911, + "ðŁĺ»ðŁĺ»": 34414, + "ðŁĺ¼": 44245, + "ðŁĺ½": 45156, + "ðŁĺĢ": 12832, + "ðŁĺĢ": 7334, + "ðŁĺĢðŁĺĢ": 34503, + "ðŁĺģ": 6967, + "ðŁĺģ": 4821, + "ðŁĺģðŁĺģ": 37900, + "ðŁĺģðŁĺģ": 19213, + "ðŁĺģðŁĺģðŁĺģ": 29083, + "ðŁĺĤ": 1424, + "ðŁĺĤ)": 42643, + "ðŁĺĤ.": 42550, + "ðŁĺĤ": 1558, + "ðŁĺĤâĿ¤ï¸ı": 36412, + "ðŁĺĤðŁijĮ": 42000, + "ðŁĺĤðŁĺĤ": 2286, + "ðŁĺĤðŁĺĤ": 4112, + "ðŁĺĤðŁĺĤðŁĺĤ": 22233, + "ðŁĺĤðŁĺĤðŁĺĤ": 4887, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 9936, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 11522, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 19295, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 33415, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 48973, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 28504, + "ðŁĺĤðŁĺį": 43128, + "ðŁĺĤðŁĺŃ": 28965, + "ðŁĺĤðŁĺŃ": 25802, + "ðŁĺĥ": 14079, + "ðŁĺĥ": 8520, + "ðŁĺĥðŁĺĥ": 38358, + "ðŁĺĦ": 12141, + "ðŁĺĦ": 7624, + "ðŁĺĦðŁĺĦ": 32312, + "ðŁĺħ": 15245, + "ðŁĺħ": 9188, + "ðŁĺħðŁĺħ": 39078, + "ðŁĺĨ": 16541, + "ðŁĺĨ": 10943, + "ðŁĺĨðŁĺĨ": 39503, + "ðŁĺĩ": 21694, + "ðŁĺĩ": 13091, + "ðŁĺĪ": 14377, + "ðŁĺĪ": 9756, + "ðŁĺĪðŁĺĪ": 44473, + "ðŁĺī": 9740, + "ðŁĺī": 4955, + "ðŁĺīðŁĺī": 40430, + "ðŁĺĬ": 4692, + "ðŁĺĬ": 3020, + "ðŁĺĬâĿ¤ï¸ı": 43606, + "ðŁĺĬðŁĺĬ": 12838, + "ðŁĺĬðŁĺĬ": 20842, + "ðŁĺĬðŁĺĬðŁĺĬ": 28685, + "ðŁĺĬðŁĺĬðŁĺĬðŁĺĬ": 35519, + "ðŁĺĭ": 12391, + "ðŁĺĭ": 7203, + "ðŁĺĭðŁĺĭ": 33304, + "ðŁĺĮ": 19221, + "ðŁĺĮ": 12163, + "ðŁĺį": 1796, + "ðŁĺį#": 42357, + "ðŁĺį.": 48579, + "ðŁĺį": 1754, + "ðŁĺįâĿ¤": 29122, + "ðŁĺįâĿ¤ï¸ı": 21945, + "ðŁĺįðŁijĮ": 41005, + "ðŁĺįðŁĴķ": 35946, + "ðŁĺįðŁĶ¥": 46648, + "ðŁĺįðŁĺĤ": 48715, + "ðŁĺįðŁĺį": 3663, + "ðŁĺįðŁĺį": 6471, + "ðŁĺįðŁĺįðŁĺį": 30614, + "ðŁĺįðŁĺįðŁĺį": 7703, + "ðŁĺįðŁĺįðŁĺįðŁĺį": 16603, + "ðŁĺįðŁĺįðŁĺįðŁĺį": 18925, + "ðŁĺįðŁĺįðŁĺįðŁĺįðŁĺį": 32078, + "ðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺį": 48683, + "ðŁĺįðŁĺĺ": 29646, + "ðŁĺįðŁĺĺ": 19849, + "ðŁĺįðŁĺŃ": 39555, + "ðŁĺİ": 7426, + "ðŁĺİ": 4345, + "ðŁĺİðŁĺİ": 24048, + "ðŁĺİðŁĺİðŁĺİ": 39742, + "ðŁĺı": 11624, + "ðŁĺı": 6909, + "ðŁĺıðŁĺı": 38151, + "ðŁĺIJ": 38586, + "ðŁĺIJ": 19618, + "ðŁĺij": 32469, + "ðŁĺij": 18937, + "ðŁĺĴ": 20792, + "ðŁĺĴ": 11702, + "ðŁĺĵ": 28733, + "ðŁĺĶ": 19532, + "ðŁĺĶ": 11432, + "ðŁĺķ": 45741, + "ðŁĺķ": 20602, + "ðŁĺĸ": 35006, + "ðŁĺĺ": 4240, + "ðŁĺĺ": 3352, + "ðŁĺĺâĿ¤": 48409, + "ðŁĺĺâĿ¤ï¸ı": 39150, + "ðŁĺĺðŁĺį": 38176, + "ðŁĺĺðŁĺĺ": 15663, + "ðŁĺĺðŁĺĺ": 10507, + "ðŁĺĺðŁĺĺðŁĺĺ": 20208, + "ðŁĺĺðŁĺĺðŁĺĺðŁĺĺ": 44892, + "ðŁĺĻ": 36201, + "ðŁĺĻ": 29209, + "ðŁĺļ": 24897, + "ðŁĺļ": 19102, + "ðŁĺĽ": 24550, + "ðŁĺĽ": 15745, + "ðŁĺľ": 13226, + "ðŁĺľ": 7830, + "ðŁĺľðŁĺľ": 43065, + "ðŁĺĿ": 20064, + "ðŁĺĿ": 12970, + "ðŁĺŀ": 40458, + "ðŁĺŀ": 21103, + "ðŁĺŁ": 46947, + "ðŁĺł": 34094, + "ðŁĺŃ": 2962, + "ðŁĺŃ": 3915, + "ðŁĺŃâĿ¤ï¸ı": 29567, + "ðŁĺŃðŁĴķ": 46306, + "ðŁĺŃðŁĺĤ": 38505, + "ðŁĺŃðŁĺį": 36893, + "ðŁĺŃðŁĺŃ": 5300, + "ðŁĺŃðŁĺŃ": 11834, + "ðŁĺŃðŁĺŃðŁĺŃ": 44089, + "ðŁĺŃðŁĺŃðŁĺŃ": 13116, + "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 19793, + "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 27322, + "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 43366, + "ðŁĻ": 1478, + "ðŁĻĢ": 43092, + "ðŁĻĤ": 32006, + "ðŁĻĤ": 14860, + "ðŁĻĥ": 27222, + "ðŁĻĥ": 15652, + "ðŁĻĦ": 20648, + "ðŁĻĦ": 13049, + "ðŁĻħ": 42702, + "ðŁĻĨ": 30050, + "ðŁĻĨ": 35730, + "ðŁĻĪ": 12661, + "ðŁĻĪ": 9516, + "ðŁĻĪðŁĻĪ": 41796, + "ðŁĻĬ": 23684, + "ðŁĻĬ": 16636, + "ðŁĻĭ": 19193, + "ðŁĻĭ": 30274, + "ðŁĻĮ": 4366, + "ðŁĻĮ": 4855, + "ðŁĻĮðŁı»": 26756, + "ðŁĻĮðŁı»": 15799, + "ðŁĻĮðŁı¼": 26584, + "ðŁĻĮðŁı¼": 15364, + "ðŁĻĮðŁı½": 36660, + "ðŁĻĮðŁı½": 22962, + "ðŁĻĮðŁı¾": 38023, + "ðŁĻĮðŁı¾": 26466, + "ðŁĻĮðŁĻĮ": 21202, + "ðŁĻĮðŁĻĮ": 30430, + "ðŁĻĮðŁĻĮðŁĻĮ": 37127, + "ðŁĻı": 4260, + "ðŁĻı": 5503, + "ðŁĻıðŁı»": 25100, + "ðŁĻıðŁı»": 16650, + "ðŁĻıðŁı¼": 31163, + "ðŁĻıðŁı¼": 18952, + "ðŁĻıðŁı½": 34103, + "ðŁĻıðŁı½": 21540, + "ðŁĻıðŁı¾": 34277, + "ðŁĻıðŁı¾": 21979, + "ðŁĻıðŁĻı": 18227, + "ðŁĻıðŁĻı": 26510, + "ðŁĻıðŁĻıðŁĻı": 31702, + "ðŁļ": 2730, + "ðŁļ¨": 12198, + "ðŁļ¨": 6056, + "ðŁļ¨ðŁļ¨": 36487, + "ðŁļ¨ðŁļ¨": 21440, + "ðŁļ¨ðŁļ¨ðŁļ¨": 41515, + "ðŁļ©": 44514, + "ðŁļ«": 35291, + "ðŁļ²": 37085, + "ðŁļ´": 30825, + "ðŁļ¶": 46060, + "ðŁļĢ": 22400, + "ðŁļĢ": 13542, + "ðŁļĢðŁļĢ": 49033, + "ðŁļĤ": 38949, + "ðŁļĮ": 46891, + "ðŁļĹ": 33054, + "ðŁļĹ": 22783, + "ðŁļĺ": 35825, + "ðŁļĻ": 48487, + "ðŁĽ": 11306, + "ñ": 173, + "ñ": 429, + "ò": 174, + "ò": 430, + "ó": 175, + "ó": 431, + "ô": 176, + "ô": 432, + "õ": 177, + "õ": 433, + "ö": 178, + "ö": 434, + "÷": 179, + "÷": 435, + "ø": 180, + "ø": 436, + "ù": 181, + "ù": 437, + "ú": 182, + "ú": 438, + "û": 183, + "û": 439, + "ü": 184, + "ü": 440, + "ý": 185, + "ý": 441, + "þ": 186, + "þ": 442, + "ÿ": 187, + "ÿ": 443, + "Ā": 188, + "Ā": 444, + "ā": 189, + "ā": 445, + "Ă": 190, + "Ă": 446, + "ă": 191, + "ă": 447, + "Ą": 192, + "Ą": 448, + "ą": 193, + "ą": 449, + "Ć": 194, + "Ć": 450, + "ć": 195, + "ć": 451, + "Ĉ": 196, + "Ĉ": 452, + "ĉ": 197, + "ĉ": 453, + "Ċ": 198, + "Ċ": 454, + "ċ": 199, + "ċ": 455, + "Č": 200, + "Č": 456, + "č": 201, + "č": 457, + "Ď": 202, + "Ď": 458, + "ď": 203, + "ď": 459, + "Đ": 204, + "Đ": 460, + "đ": 205, + "đ": 461, + "Ē": 206, + "Ē": 462, + "ē": 207, + "ē": 463, + "Ĕ": 208, + "Ĕ": 464, + "ĕ": 209, + "ĕ": 465, + "Ė": 210, + "Ė": 466, + "ė": 211, + "ė": 467, + "Ę": 212, + "Ę": 468, + "ę": 213, + "ę": 469, + "Ě": 214, + "Ě": 470, + "ě": 215, + "ě": 471, + "Ĝ": 216, + "Ĝ": 472, + "ĝ": 217, + "ĝ": 473, + "Ğ": 218, + "Ğ": 474, + "ğ": 219, + "ğ": 475, + "Ġ": 220, + "Ġ": 476, + "ġ": 221, + "ġ": 477, + "Ģ": 222, + "Ģ": 478, + "Ģï¸ı": 9668, + "Ģï¸ı": 5511, + "ģ": 223, + "ģ": 479, + "ģà¸": 15016, + "Ĥ": 224, + "Ĥ": 480, + "Ĥâĸ": 29036, + "ĤâĸĤâĸ": 30832, + "ĥ": 225, + "ĥ": 481, + "Ħ": 226, + "Ħ": 482, + "Ħà¸": 20537, + "Ħë": 34462, + "Ħëĭ": 25170, + "ħ": 227, + "ħ": 483, + "ħï¸ı": 33950, + "Ĩ": 228, + "Ĩ": 484, + "ĩ": 229, + "ĩ": 485, + "Ī": 230, + "Ī": 486, + "ī": 231, + "ī": 487, + "īï¸ı": 37463, + "Ĭ": 232, + "Ĭ": 488, + "Ĭãģ": 30294, + "ĭ": 233, + "ĭ": 489, + "ĭãģ": 36218, + "ĭãĤ": 45737, + "Į": 234, + "Į": 490, + "ĮãĤĬãģ": 45969, + "ĮãĤĬãģŁãģĦ": 47021, + "Įë": 17003, + "į": 235, + "į": 491, + "İ": 236, + "İ": 492, + "ı": 237, + "ı": 493, + "IJ": 238, + "IJ": 494, + "ij": 239, + "ij": 495, + "Ĵ": 240, + "Ĵ": 496, + "ĵ": 241, + "ĵ": 497, + "Ķ": 242, + "Ķ": 498, + "Ķë": 37978, + "Ķï¸ı": 24395, + "Ķï¸ı": 7443, + "ķ": 243, + "ķ": 499, + "ķãĤ": 26609, + "ķï¸ı": 44853, + "ĸ": 244, + "ĸ": 500, + "ĸï¸ı": 28877, + "Ĺ": 245, + "Ĺ": 501, + "ĺ": 246, + "ĺ": 502, + "Ļ": 247, + "Ļ": 503, + "ļ": 248, + "ļ": 504, + "Ľ": 249, + "Ľ": 505, + "ľ": 250, + "ľ": 506, + "ľë": 39810, + "Ŀ": 251, + "Ŀ": 507, + "ŀ": 252, + "ŀ": 508, + "Ł": 253, + "Ł": 509, + "ŁãģĦ": 46023, + "ł": 254, + "ł": 510, + "łï¸ı": 27899, + "łï¸ı": 12715, + "łĪ": 43364, + "Ń": 255, + "Ń": 511 +} diff --git a/data/models/huggingface/stable-diffusion-v1-5/unet/config.json b/data/models/huggingface/stable-diffusion-v1-5/unet/config.json new file mode 100644 index 0000000000000000000000000000000000000000..0aa4a3b12d29fdc01a7133a2a928959e6a0f485a --- /dev/null +++ b/data/models/huggingface/stable-diffusion-v1-5/unet/config.json @@ -0,0 +1,68 @@ +{ + "_class_name": "UNet2DConditionModel", + "_diffusers_version": "0.23.0", + "_name_or_path": "/home/user/.cache/huggingface/hub/models--runwayml--stable-diffusion-v1-5/snapshots/1d0c4ebf6ff58a5caecab40fa1406526bca4b5b9/unet", + "act_fn": "silu", + "addition_embed_type": null, + "addition_embed_type_num_heads": 64, + "addition_time_embed_dim": null, + "attention_head_dim": 8, + "attention_type": "default", + "block_out_channels": [ + 320, + 640, + 1280, + 1280 + ], + "center_input_sample": false, + "class_embed_type": null, + "class_embeddings_concat": false, + "conv_in_kernel": 3, + "conv_out_kernel": 3, + "cross_attention_dim": 768, + "cross_attention_norm": null, + "down_block_types": [ + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "DownBlock2D" + ], + "downsample_padding": 1, + "dropout": 0.0, + "dual_cross_attention": false, + "encoder_hid_dim": null, + "encoder_hid_dim_type": null, + "flip_sin_to_cos": true, + "freq_shift": 0, + "in_channels": 4, + "layers_per_block": 2, + "mid_block_only_cross_attention": null, + "mid_block_scale_factor": 1, + "mid_block_type": "UNetMidBlock2DCrossAttn", + "norm_eps": 1e-05, + "norm_num_groups": 32, + "num_attention_heads": null, + "num_class_embeds": null, + "only_cross_attention": false, + "out_channels": 4, + "projection_class_embeddings_input_dim": null, + "resnet_out_scale_factor": 1.0, + "resnet_skip_time_act": false, + "resnet_time_scale_shift": "default", + "reverse_transformer_layers_per_block": null, + "sample_size": 64, + "time_cond_proj_dim": null, + "time_embedding_act_fn": null, + "time_embedding_dim": null, + "time_embedding_type": "positional", + "timestep_post_act": null, + "transformer_layers_per_block": 1, + "up_block_types": [ + "UpBlock2D", + "CrossAttnUpBlock2D", + "CrossAttnUpBlock2D", + "CrossAttnUpBlock2D" + ], + "upcast_attention": false, + "use_linear_projection": false +} diff --git a/data/models/huggingface/stable-diffusion-v1-5/unet/diffusion_pytorch_model.safetensors b/data/models/huggingface/stable-diffusion-v1-5/unet/diffusion_pytorch_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b46f099a7851d878c46c2c3a3da52c2e4c0165bc --- /dev/null +++ b/data/models/huggingface/stable-diffusion-v1-5/unet/diffusion_pytorch_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d27cd69d4a0aa32105087a619f32a51bc087e133be93fe23da92f3c0bcc07d79 +size 3438167536 diff --git a/data/models/huggingface/stable-diffusion-v1-5/vae/config.json b/data/models/huggingface/stable-diffusion-v1-5/vae/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b5bd9e0c6ba901a7bdbaff26db8363193b66da1e --- /dev/null +++ b/data/models/huggingface/stable-diffusion-v1-5/vae/config.json @@ -0,0 +1,32 @@ +{ + "_class_name": "AutoencoderKL", + "_diffusers_version": "0.23.0", + "_name_or_path": "/home/user/.cache/huggingface/hub/models--runwayml--stable-diffusion-v1-5/snapshots/1d0c4ebf6ff58a5caecab40fa1406526bca4b5b9/vae", + "act_fn": "silu", + "block_out_channels": [ + 128, + 256, + 512, + 512 + ], + "down_block_types": [ + "DownEncoderBlock2D", + "DownEncoderBlock2D", + "DownEncoderBlock2D", + "DownEncoderBlock2D" + ], + "force_upcast": true, + "in_channels": 3, + "latent_channels": 4, + "layers_per_block": 2, + "norm_num_groups": 32, + "out_channels": 3, + "sample_size": 512, + "scaling_factor": 0.18215, + "up_block_types": [ + "UpDecoderBlock2D", + "UpDecoderBlock2D", + "UpDecoderBlock2D", + "UpDecoderBlock2D" + ] +} diff --git a/data/models/huggingface/stable-diffusion-v1-5/vae/diffusion_pytorch_model.safetensors b/data/models/huggingface/stable-diffusion-v1-5/vae/diffusion_pytorch_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2094758bacefe775536294dc7c67f86ae54dcf8b --- /dev/null +++ b/data/models/huggingface/stable-diffusion-v1-5/vae/diffusion_pytorch_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b4d2b5932bb4151e54e694fd31ccf51fca908223c9485bd56cd0e1d83ad94c49 +size 334643268 diff --git a/data/models/huggingface/xxmix9realistic_v40.safetensors b/data/models/huggingface/xxmix9realistic_v40.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..76e539d5fe89fdbddf0297eb26a0fa8ca7f04684 --- /dev/null +++ b/data/models/huggingface/xxmix9realistic_v40.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:18ed2b6c48fda400330e5dec9e6f4d714ef664869ea8d4021f12ba699b31da06 +size 2400040320 diff --git a/data/models/motion-module/mm_sd_v15_v2.ckpt b/data/models/motion-module/mm_sd_v15_v2.ckpt new file mode 100644 index 0000000000000000000000000000000000000000..e52e8a28920f998f42b8fa2bc0a277f42c6eb176 --- /dev/null +++ b/data/models/motion-module/mm_sd_v15_v2.ckpt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:69ed0f5fef82b110aca51bcab73b21104242bc65d6ab4b8b2a2a94d31cad1bf0 +size 1817888431 diff --git a/example.md b/example.md new file mode 100644 index 0000000000000000000000000000000000000000..b37ca780cc8a8924a91f82f66f0e3ecb2e8b307a --- /dev/null +++ b/example.md @@ -0,0 +1,144 @@ +### Example + +- region prompt(txt2img / no controlnet) +- region 0 ... 1girl, upper body etc +- region 1 ... ((car)), street, road,no human etc +- background ... town, outdoors etc +- ip adapter input for background / region 0 / region 1 + + +- animatediff generate -c config/prompts/region_txt2img.json -W 512 -H 768 -L 32 -C 16 +- region 0 mask / region 1 mask / txt2img + +
+ + + +
+
+ +- apply different lora for each region. +- [abdiel](https://civitai.com/models/159943/abdiel-shin-megami-tensei-v-v) for region 0 +- [amanozoko](https://civitai.com/models/159933/amanozoko-shin-megami-tensei-v-v) for region 1 +- no lora for background +
+ +```json + # new lora_map format + "lora_map": { + # Specify lora as a path relative to /animatediff-cli/data + "share/Lora/zs_Abdiel.safetensors": { # setting for abdiel lora + "region" : ["0"], # target region. Multiple designations possible + "scale" : { + # "frame_no" : scale format + "0": 0.75 # lora scale. same as prompt_map format. For example, it is possible to set the lora to be used from the 30th frame. + } + }, + "share/Lora/zs_Amanazoko.safetensors": { # setting for amanozako lora + "region" : ["1"], # target region + "scale" : { + "0": 0.75 + } + } + }, +``` +- more example [here](https://github.com/s9roll7/animatediff-cli-prompt-travel/issues/147) +
+ + + +- img2img +- This can be improved using controlnet, but this sample does not use it. +- source / denoising_strength 0.7 / denoising_strength 0.85 +
+
+
+ +- [A command to stylization with region has been added](https://github.com/s9roll7/animatediff-cli-prompt-travel#video-stylization-with-region). +- (You can also create json manually without using the stylize command.) +- region prompt +- Region division into person shapes +- source / img2img / txt2img +
+
+ +- source / Region division into person shapes / inpaint +
+
+
+ + + + +- [A command to stylization with mask has been added](https://github.com/s9roll7/animatediff-cli-prompt-travel#video-stylization-with-mask). +- more example [here](https://github.com/s9roll7/animatediff-cli-prompt-travel/issues/111) + +
+
+ + +- [A command to automate video stylization has been added](https://github.com/s9roll7/animatediff-cli-prompt-travel#video-stylization). +- Original / First generation result / Second generation(for upscaling) result +- It took 4 minutes to generate the first one and about 5 minutes to generate the second one (on rtx 4090). +- more example [here](https://github.com/s9roll7/animatediff-cli-prompt-travel/issues/29) + +
+
+ + +- controlnet_openpose + controlnet_softedge +- input frames for controlnet(0,16,32 frames) + + +- result +
+
+ +- In the latest version, generation can now be controlled more precisely through prompts. +- sample 1 +```json + "prompt_fixed_ratio": 0.8, + "head_prompt": "1girl, wizard, circlet, earrings, jewelry, purple hair,", + "prompt_map": { + "0": "(standing,full_body),blue_sky, town", + "8": "(sitting,full_body),rain, town", + "16": "(standing,full_body),blue_sky, woods", + "24": "(upper_body), beach", + "32": "(upper_body, smile)", + "40": "(upper_body, angry)", + "48": "(upper_body, smile, from_above)", + "56": "(upper_body, angry, from_side)", + "64": "(upper_body, smile, from_below)", + "72": "(upper_body, angry, from_behind, looking at viewer)", + "80": "face,looking at viewer", + "88": "face,looking at viewer, closed_eyes", + "96": "face,looking at viewer, open eyes, open_mouth", + "104": "face,looking at viewer, closed_eyes, closed_mouth", + "112": "face,looking at viewer, open eyes,eyes, open_mouth, tongue, smile, laughing", + "120": "face,looking at viewer, eating, bowl,chopsticks,holding,food" + }, +``` +
+
+ +- sample 2 +```json + "prompt_fixed_ratio": 1.0, + "head_prompt": "1girl, wizard, circlet, earrings, jewelry, purple hair,", + "prompt_map": { + "0": "", + "8": "((fire magic spell, fire background))", + "16": "((ice magic spell, ice background))", + "24": "((thunder magic spell, thunder background))", + "32": "((skull magic spell, skull background))", + "40": "((wind magic spell, wind background))", + "48": "((stone magic spell, stone background))", + "56": "((holy magic spell, holy background))", + "64": "((star magic spell, star background))", + "72": "((plant magic spell, plant background))", + "80": "((meteor magic spell, meteor background))" + }, +``` +
+
+ diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..cb30c7d3d50411723ad2b3d7b302f8d9ccb8e4a5 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,27 @@ +[build-system] +build-backend = "setuptools.build_meta" +requires = ["setuptools>=46.4.0", "wheel", "setuptools_scm[toml]>=6.2"] + +[tool.setuptools_scm] +write_to = "src/animatediff/_version.py" + +[tool.black] +line-length = 110 +target-version = ['py310'] +ignore = ['F841', 'F401', 'E501'] +preview = true + +[tool.ruff] +line-length = 110 +target-version = 'py310' +ignore = ['F841', 'F401', 'E501'] + +[tool.ruff.isort] +combine-as-imports = true +force-wrap-aliases = true +known-local-folder = ["src"] +known-first-party = ["animatediff"] + +[tool.pyright] +include = ['src/**'] +exclude = ['/usr/lib/**'] diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..a77930056f7d847084852575287ffb4c2642cdfe --- /dev/null +++ b/requirements.txt @@ -0,0 +1,165 @@ +absl-py==2.1.0 +accelerate==0.30.1 +aiofiles==23.2.1 +aiohttp==3.9.5 +aiosignal==1.3.1 +altair==5.3.0 +analytics-python==1.4.post1 +-e git+https://github.com/TheNetherWatcher/Vid2Vid-using-Text-prompt.git@213aa1fc330895557d619514bc778d0985a3112a#egg=animatediff +annotated-types==0.7.0 +antlr4-python3-runtime==4.9.3 +anyio==4.4.0 +async-timeout==4.0.3 +attrs==23.2.0 +backoff==1.10.0 +bcrypt==4.1.3 +beautifulsoup4==4.12.3 +blinker==1.8.2 +certifi==2024.2.2 +cffi==1.16.0 +charset-normalizer==3.3.2 +click==8.1.7 +cmake==3.29.3 +colorama==0.4.6 +coloredlogs==15.0.1 +contourpy==1.2.1 +controlnet-aux==0.0.9 +cryptography==42.0.7 +cycler==0.12.1 +diffusers==0.23.0 +dnspython==2.6.1 +einops==0.8.0 +email_validator==2.1.1 +exceptiongroup==1.2.1 +fastapi==0.111.0 +fastapi-cli==0.0.4 +ffmpeg-python==0.2.0 +ffmpy==0.3.2 +filelock==3.14.0 +Flask==3.0.3 +Flask-CacheBuster==1.0.0 +Flask-Cors==4.0.1 +Flask-Login==0.6.3 +flatbuffers==24.3.25 +fonttools==4.53.0 +frozenlist==1.4.1 +fsspec==2024.5.0 +future==1.0.0 +gdown==5.2.0 +gradio==3.0 +gradio_client==0.7.0 +h11==0.14.0 +httpcore==1.0.5 +httptools==0.6.1 +httpx==0.27.0 +huggingface-hub==0.17.3 +humanfriendly==10.0 +idna==3.7 +imageio==2.34.1 +importlib_metadata==7.1.0 +importlib_resources==6.4.0 +itsdangerous==2.2.0 +jax==0.4.28 +jaxlib==0.4.28 +Jinja2==3.1.4 +jsonschema==4.22.0 +jsonschema-specifications==2023.12.1 +kiwisolver==1.4.5 +lazy_loader==0.4 +linkify-it-py==2.0.3 +markdown-it-py==3.0.0 +markdown2==2.4.13 +MarkupSafe==2.1.5 +matplotlib==3.9.0 +mdit-py-plugins==0.4.1 +mdurl==0.1.2 +mediapipe==0.10.14 +ml-dtypes==0.4.0 +monotonic==1.6 +mpmath==1.3.0 +multidict==6.0.5 +networkx==3.3 +ninja==1.11.1.1 +numpy==1.26.4 +nvidia-cublas-cu12==12.1.3.1 +nvidia-cuda-cupti-cu12==12.1.105 +nvidia-cuda-nvrtc-cu12==12.1.105 +nvidia-cuda-runtime-cu12==12.1.105 +nvidia-cudnn-cu12==8.9.2.26 +nvidia-cufft-cu12==11.0.2.54 +nvidia-curand-cu12==10.3.2.106 +nvidia-cusolver-cu12==11.4.5.107 +nvidia-cusparse-cu12==12.1.0.106 +nvidia-nccl-cu12==2.18.1 +nvidia-nvjitlink-cu12==12.5.40 +nvidia-nvtx-cu12==12.1.105 +omegaconf==2.3.0 +onnxruntime-gpu==1.18.0 +opencv-contrib-python==4.9.0.80 +opencv-python==4.9.0.80 +opencv-python-headless==4.9.0.80 +opt-einsum==3.3.0 +orjson==3.10.3 +packaging==24.0 +pandas==2.2.2 +paramiko==3.4.0 +Pillow==9.5.0 +protobuf==4.25.3 +psutil==5.9.8 +pycparser==2.22 +pycryptodome==3.20.0 +pydantic==1.10.15 +pydantic_core==2.18.3 +pydub==0.25.1 +Pygments==2.18.0 +PyNaCl==1.5.0 +pyparsing==3.1.2 +PySocks==1.7.1 +python-dateutil==2.9.0.post0 +python-dotenv==1.0.1 +python-multipart==0.0.9 +pytz==2024.1 +PyYAML==6.0.1 +referencing==0.35.1 +regex==2024.5.15 +requests==2.32.3 +rich==13.7.1 +rpds-py==0.18.1 +ruff==0.4.7 +safetensors==0.4.3 +scikit-image==0.23.2 +scipy==1.13.1 +semantic-version==2.10.0 +sentencepiece==0.2.0 +shellingham==1.5.4 +six==1.16.0 +sniffio==1.3.1 +sounddevice==0.4.7 +soupsieve==2.5 +starlette==0.37.2 +sympy==1.12.1 +tifffile==2024.5.22 +timm==0.6.7 +tokenizers==0.14.1 +tomlkit==0.12.0 +toolz==0.12.1 +torch==2.1.2 +torchaudio==2.1.2 +torchvision==0.16.2 +tqdm==4.66.4 +transformers==4.34.1 +triton==2.1.0 +typer==0.12.3 +typing_extensions==4.12.0 +tzdata==2024.1 +uc-micro-py==1.0.3 +ujson==5.10.0 +urllib3==2.2.1 +uvicorn==0.30.1 +uvloop==0.19.0 +watchfiles==0.22.0 +websockets==11.0.3 +Werkzeug==3.0.3 +xformers==0.0.23.post1 +yarl==1.9.4 +zipp==3.19.1 \ No newline at end of file diff --git a/scripts/download/01-Motion-Modules.sh b/scripts/download/01-Motion-Modules.sh new file mode 100644 index 0000000000000000000000000000000000000000..4b1404252806386bc59bdcd767213c6d6924ae02 --- /dev/null +++ b/scripts/download/01-Motion-Modules.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +echo "Attempting download of Motion Module models from Google Drive." +echo "If this fails, please download them manually from the links in the error messages/README." + +gdown 1RqkQuGPaCO5sGZ6V6KZ-jUWmsRu48Kdq -O models/motion-module/ || true +gdown 1ql0g_Ys4UCz2RnokYlBjyOYPbttbIpbu -O models/motion-module/ || true + +echo "Motion module download script complete." +echo "If you see errors above, please download the models manually from the links in the error messages/README." +exit 0 diff --git a/scripts/download/02-All-SD-Models.sh b/scripts/download/02-All-SD-Models.sh new file mode 100644 index 0000000000000000000000000000000000000000..7226b028ad85fc85ccb7c561a4aad09f3ecd8d9f --- /dev/null +++ b/scripts/download/02-All-SD-Models.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash +set -euo pipefail + +repo_dir=$(git rev-parse --show-toplevel) +if [[ ! -d "${repo_dir}" ]]; then + echo "Could not find the repo root. Checking for ./data/models/sd" + repo_dir="." +fi + +models_dir=$(realpath "${repo_dir}/data/models/sd") +if [[ ! -d "${models_dir}" ]]; then + echo "Could not find repo root or models directory." + echo "Either create ./data/models/sd or run this script from a checked-out git repo." + exit 1 +fi + +model_urls=( + https://civitai.com/api/download/models/78775 # ToonYou + https://civitai.com/api/download/models/72396 # Lyriel + https://civitai.com/api/download/models/71009 # RcnzCartoon + https://civitai.com/api/download/models/79068 # MajicMix + https://civitai.com/api/download/models/29460 # RealisticVision + https://civitai.com/api/download/models/97261 # Tusun (1/2) + https://civitai.com/api/download/models/50705 # Tusun (2/2) + https://civitai.com/api/download/models/90115 # FilmVelvia (1/2) + https://civitai.com/api/download/models/92475 # FilmVelvia (2/2) + https://civitai.com/api/download/models/102828 # GhibliBackground (1/2) + https://civitai.com/api/download/models/57618 # GhibliBackground (2/2) +) + +echo "Downloading model files to ${models_dir}..." + +# Create the models directory if it doesn't exist +mkdir -p "${models_dir}" + +# Download the models +for url in ${model_urls[@]}; do + curl -JLO --output-dir "${models_dir}" "${url}" || true +done diff --git a/scripts/download/03-BaseSD.py b/scripts/download/03-BaseSD.py new file mode 100644 index 0000000000000000000000000000000000000000..26c9fcd4b611c731c3b766e1a06b2e9cda0e5c9e --- /dev/null +++ b/scripts/download/03-BaseSD.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 +from diffusers.pipelines import StableDiffusionPipeline + +from animatediff import get_dir + +out_dir = get_dir("data/models/huggingface/stable-diffusion-v1-5") + +pipeline = StableDiffusionPipeline.from_pretrained( + "runwayml/stable-diffusion-v1-5", + use_safetensors=True, + kwargs=dict(safety_checker=None, requires_safety_checker=False), +) +pipeline.save_pretrained( + save_directory=str(out_dir), + safe_serialization=True, +) diff --git a/scripts/download/11-ToonYou.sh b/scripts/download/11-ToonYou.sh new file mode 100644 index 0000000000000000000000000000000000000000..43ad97fd438037cf54c6b0a8fc1f9a2fb711c263 --- /dev/null +++ b/scripts/download/11-ToonYou.sh @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +wget https://civitai.com/api/download/models/78775 -P models/DreamBooth_LoRA/ --content-disposition --no-check-certificate diff --git a/scripts/download/12-Lyriel.sh b/scripts/download/12-Lyriel.sh new file mode 100644 index 0000000000000000000000000000000000000000..8c5f53dc13835b2a72cd63cad6d82cf8ea6492af --- /dev/null +++ b/scripts/download/12-Lyriel.sh @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +wget https://civitai.com/api/download/models/72396 -P models/DreamBooth_LoRA/ --content-disposition --no-check-certificate diff --git a/scripts/download/13-RcnzCartoon.sh b/scripts/download/13-RcnzCartoon.sh new file mode 100644 index 0000000000000000000000000000000000000000..b0529dceda610b111f06fe3e2bc3cb0686fa6782 --- /dev/null +++ b/scripts/download/13-RcnzCartoon.sh @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +wget https://civitai.com/api/download/models/71009 -P models/DreamBooth_LoRA/ --content-disposition --no-check-certificate diff --git a/scripts/download/14-MajicMix.sh b/scripts/download/14-MajicMix.sh new file mode 100644 index 0000000000000000000000000000000000000000..794afc28243da7220896945558f5071837e0a120 --- /dev/null +++ b/scripts/download/14-MajicMix.sh @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +wget https://civitai.com/api/download/models/79068 -P models/DreamBooth_LoRA/ --content-disposition --no-check-certificate diff --git a/scripts/download/15-RealisticVision.sh b/scripts/download/15-RealisticVision.sh new file mode 100644 index 0000000000000000000000000000000000000000..da1729af6844e780958e8851146d74fa81d83e89 --- /dev/null +++ b/scripts/download/15-RealisticVision.sh @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +wget https://civitai.com/api/download/models/29460 -P models/DreamBooth_LoRA/ --content-disposition --no-check-certificate diff --git a/scripts/download/16-Tusun.sh b/scripts/download/16-Tusun.sh new file mode 100644 index 0000000000000000000000000000000000000000..3874a94b78d23d85fb2a26200d20274f962cc4db --- /dev/null +++ b/scripts/download/16-Tusun.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash +wget https://civitai.com/api/download/models/97261 -P models/DreamBooth_LoRA/ --content-disposition --no-check-certificate +wget https://civitai.com/api/download/models/50705 -P models/DreamBooth_LoRA/ --content-disposition --no-check-certificate diff --git a/scripts/download/17-FilmVelvia.sh b/scripts/download/17-FilmVelvia.sh new file mode 100644 index 0000000000000000000000000000000000000000..1bca1c6e6e67868d83d6718add275e82734f4dc0 --- /dev/null +++ b/scripts/download/17-FilmVelvia.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash +wget https://civitai.com/api/download/models/90115 -P models/DreamBooth_LoRA/ --content-disposition --no-check-certificate +wget https://civitai.com/api/download/models/92475 -P models/DreamBooth_LoRA/ --content-disposition --no-check-certificate diff --git a/scripts/download/18-GhibliBackground.sh b/scripts/download/18-GhibliBackground.sh new file mode 100644 index 0000000000000000000000000000000000000000..131d0aec1e4e6101bb32b62f0493e342b9ba1a35 --- /dev/null +++ b/scripts/download/18-GhibliBackground.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash +wget https://civitai.com/api/download/models/102828 -P models/DreamBooth_LoRA/ --content-disposition --no-check-certificate +wget https://civitai.com/api/download/models/57618 -P models/DreamBooth_LoRA/ --content-disposition --no-check-certificate diff --git a/scripts/download/sd-models.aria2 b/scripts/download/sd-models.aria2 new file mode 100644 index 0000000000000000000000000000000000000000..161e19245677a35cba990deb76fe8339ff9512b7 --- /dev/null +++ b/scripts/download/sd-models.aria2 @@ -0,0 +1,22 @@ +https://civitai.com/api/download/models/78775 + out=models/sd/toonyou_beta3.safetensors +https://civitai.com/api/download/models/72396 + out=models/sd/lyriel_v16.safetensors +https://civitai.com/api/download/models/71009 + out=models/sd/rcnzCartoon3d_v10.safetensors +https://civitai.com/api/download/models/79068 + out=majicmixRealistic_v5Preview.safetensors +https://civitai.com/api/download/models/29460 + out=models/sd/realisticVisionV40_v20Novae.safetensors +https://civitai.com/api/download/models/97261 + out=models/sd/TUSUN.safetensors +https://civitai.com/api/download/models/50705 + out=models/sd/leosamsMoonfilm_reality20.safetensors +https://civitai.com/api/download/models/90115 + out=models/sd/FilmVelvia2.safetensors +https://civitai.com/api/download/models/92475 + out=models/sd/leosamsMoonfilm_filmGrain10.safetensors +https://civitai.com/api/download/models/102828 + out=models/sd/Pyramid\ lora_Ghibli_n3.safetensors +https://civitai.com/api/download/models/57618 + out=models/sd/CounterfeitV30_v30.safetensors diff --git a/scripts/test_persistent.py b/scripts/test_persistent.py new file mode 100644 index 0000000000000000000000000000000000000000..9b49d091934957ffba08e986e864fa407e9295dc --- /dev/null +++ b/scripts/test_persistent.py @@ -0,0 +1,37 @@ +from rich import print + +from animatediff import get_dir +from animatediff.cli import generate, logger + +config_dir = get_dir("config") + +config_path = config_dir.joinpath("prompts/test.json") +width = 512 +height = 512 +length = 32 +context = 16 +stride = 4 + +logger.warn("Running first-round generation test, this should load the full model.\n\n") +out_dir = generate( + config_path=config_path, + width=width, + height=height, + length=length, + context=context, + stride=stride, +) +logger.warn(f"Generated animation to {out_dir}") + +logger.warn("\n\nRunning second-round generation test, this should reuse the already loaded model.\n\n") +out_dir = generate( + config_path=config_path, + width=width, + height=height, + length=length, + context=context, + stride=stride, +) +logger.warn(f"Generated animation to {out_dir}") + +logger.error("If the second round didn't talk about reloading the model, it worked! yay!") diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000000000000000000000000000000000000..f779eccb523e090a825cc709e67c90890cfd0314 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,92 @@ +[metadata] +name = animatediff +author = Andi Powers-Holmes +email = aholmes@omnom.net +maintainer = Andi Powers-Holmes +maintainer_email = aholmes@omnom.net +license_files = LICENSE.md + +[options] +python_requires = >=3.10 +packages = find: +package_dir = + =src +py_modules = + animatediff +include_package_data = True +install_requires = + accelerate >= 0.20.3 + colorama >= 0.4.3, < 0.5.0 + cmake >= 3.25.0 + diffusers == 0.23.0 + einops >= 0.6.1 + gdown >= 4.6.6 + ninja >= 1.11.0 + numpy >= 1.22.4 + omegaconf >= 2.3.0 + pillow >= 9.4.0, < 10.0.0 + pydantic >= 1.10.0, < 2.0.0 + rich >= 13.0.0, < 14.0.0 + safetensors >= 0.3.1 + sentencepiece >= 0.1.99 + shellingham >= 1.5.0, < 2.0.0 + torch >= 2.1.0, < 2.2.0 + torchaudio + torchvision + transformers >= 4.30.2, < 4.35.0 + typer >= 0.9.0, < 1.0.0 + controlnet_aux + matplotlib + ffmpeg-python >= 0.2.0 + mediapipe + xformers >= 0.0.22.post7 + opencv-python + +[options.packages.find] +where = src + +[options.package_data] + * = *.txt, *.md + +[options.extras_require] +dev = + black >= 22.3.0 + ruff >= 0.0.234 + setuptools-scm >= 7.0.0 + pre-commit >= 3.3.0 + ipython +rife = + ffmpeg-python >= 0.2.0 +stylize = + ffmpeg-python >= 0.2.0 + onnxruntime-gpu + pandas + opencv-python +dwpose = + onnxruntime-gpu +stylize_mask = + ffmpeg-python >= 0.2.0 + pandas + segment-anything-hq == 0.3 + groundingdino-py == 0.4.0 + gitpython + rembg[gpu] + onnxruntime-gpu + +[options.entry_points] +console_scripts = + animatediff = animatediff.cli:cli + +[flake8] +max-line-length = 110 +ignore = + # these are annoying during development but should be enabled later + F401 # module imported but unused + F841 # local variable is assigned to but never used + # black automatically fixes this + E501 # line too long + # black breaks these two rules: + E203 # whitespace before : + W503 # line break before binary operator +extend-exclude = + .venv diff --git a/setup.py b/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..b908cbe55cb344569d32de1dfc10ca7323828dc5 --- /dev/null +++ b/setup.py @@ -0,0 +1,3 @@ +import setuptools + +setuptools.setup() diff --git a/src/animatediff/__init__.py b/src/animatediff/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..042e97a682db80c56e479d108c284c496d0bf6af --- /dev/null +++ b/src/animatediff/__init__.py @@ -0,0 +1,67 @@ +try: + from ._version import ( + version as __version__, + version_tuple, + ) +except ImportError: + __version__ = "unknown (no version information available)" + version_tuple = (0, 0, "unknown", "noinfo") + +from functools import lru_cache +from os import getenv +from pathlib import Path +from warnings import filterwarnings + +from rich.console import Console +from tqdm import TqdmExperimentalWarning + +PACKAGE = __package__.replace("_", "-") +PACKAGE_ROOT = Path(__file__).parent.parent + +HF_HOME = Path(getenv("HF_HOME", Path.home() / ".cache" / "huggingface")) +HF_HUB_CACHE = Path(getenv("HUGGINGFACE_HUB_CACHE", HF_HOME.joinpath("hub"))) + +HF_LIB_NAME = "animatediff-cli" +HF_LIB_VER = __version__ +HF_MODULE_REPO = "neggles/animatediff-modules" + +console = Console(highlight=True) +err_console = Console(stderr=True) + +# shhh torch, don't worry about it it's fine +filterwarnings("ignore", category=UserWarning, message="TypedStorage is deprecated") +# you too tqdm +filterwarnings("ignore", category=TqdmExperimentalWarning) + + +@lru_cache(maxsize=4) +def get_dir(dirname: str = "data") -> Path: + if PACKAGE_ROOT.name == "src": + # we're installed in editable mode from within the repo + dirpath = PACKAGE_ROOT.parent.joinpath(dirname) + else: + # we're installed normally, so we just use the current working directory + dirpath = Path.cwd().joinpath(dirname) + dirpath.mkdir(parents=True, exist_ok=True) + return dirpath.absolute() + + +__all__ = [ + "__version__", + "version_tuple", + "PACKAGE", + "PACKAGE_ROOT", + "HF_HOME", + "HF_HUB_CACHE", + "console", + "err_console", + "get_dir", + "models", + "pipelines", + "rife", + "utils", + "cli", + "generate", + "schedulers", + "settings", +] diff --git a/src/animatediff/__main__.py b/src/animatediff/__main__.py new file mode 100644 index 0000000000000000000000000000000000000000..bf4d0abdb53959b6f3b62562c2ecf0a4dbbb3d45 --- /dev/null +++ b/src/animatediff/__main__.py @@ -0,0 +1,4 @@ +from animatediff.cli import cli + +if __name__ == "__main__": + cli() diff --git a/src/animatediff/cli.py b/src/animatediff/cli.py new file mode 100644 index 0000000000000000000000000000000000000000..67fbdc68ee2ac94221eaa20637d058fedba54d9c --- /dev/null +++ b/src/animatediff/cli.py @@ -0,0 +1,1186 @@ +import glob +import logging +import os.path +from datetime import datetime +from pathlib import Path +from typing import Annotated, Optional + +if False: + if 'PYTORCH_CUDA_ALLOC_CONF' in os.environ: + os.environ['PYTORCH_CUDA_ALLOC_CONF'] = ",backend:cudaMallocAsync" + else: + os.environ['PYTORCH_CUDA_ALLOC_CONF'] = "backend:cudaMallocAsync" + + #"garbage_collection_threshold:0.6" + # max_split_size_mb:1024" + # "backend:cudaMallocAsync" + # roundup_power2_divisions:4 + print(f"{os.environ['PYTORCH_CUDA_ALLOC_CONF']=}") + +if False: + os.environ['PYTORCH_NO_CUDA_MEMORY_CACHING']="1" + + +import torch +import typer +from diffusers import DiffusionPipeline +from diffusers.utils.logging import \ + set_verbosity_error as set_diffusers_verbosity_error +from rich.logging import RichHandler + +from animatediff import __version__, console, get_dir +from animatediff.generate import (controlnet_preprocess, create_pipeline, + create_us_pipeline, img2img_preprocess, + ip_adapter_preprocess, + load_controlnet_models, prompt_preprocess, + region_preprocess, run_inference, + run_upscale, save_output, + unload_controlnet_models, + wild_card_conversion) +from animatediff.pipelines import AnimationPipeline, load_text_embeddings +from animatediff.settings import (CKPT_EXTENSIONS, InferenceConfig, + ModelConfig, get_infer_config, + get_model_config) +from animatediff.utils.civitai2config import generate_config_from_civitai_info +from animatediff.utils.model import (checkpoint_to_pipeline, + fix_checkpoint_if_needed, get_base_model) +from animatediff.utils.pipeline import get_context_params, send_to_device +from animatediff.utils.util import (extract_frames, is_sdxl_checkpoint, + is_v2_motion_module, path_from_cwd, + save_frames, save_imgs, save_video, + set_tensor_interpolation_method, show_gpu) +from animatediff.utils.wild_card import replace_wild_card + +cli: typer.Typer = typer.Typer( + context_settings=dict(help_option_names=["-h", "--help"]), + rich_markup_mode="rich", + no_args_is_help=True, + pretty_exceptions_show_locals=False, +) +data_dir = get_dir("data") +checkpoint_dir = data_dir.joinpath("models/sd") +pipeline_dir = data_dir.joinpath("models/huggingface") + + +try: + import google.colab + IN_COLAB = True +except: + IN_COLAB = False + +if IN_COLAB: + import sys + logging.basicConfig( + level=logging.INFO, + stream=sys.stdout, + format="%(message)s", + datefmt="%H:%M:%S", + force=True, + ) +else: + logging.basicConfig( + level=logging.INFO, + format="%(message)s", + handlers=[ + RichHandler(console=console, rich_tracebacks=True), + ], + datefmt="%H:%M:%S", + force=True, + ) + +logger = logging.getLogger(__name__) + + +from importlib.metadata import version as meta_version + +from packaging import version + +diffuser_ver = meta_version('diffusers') + +logger.info(f"{diffuser_ver=}") + +if version.parse(diffuser_ver) < version.parse('0.23.0'): + logger.error(f"The version of diffusers is out of date") + logger.error(f"python -m pip install diffusers==0.23.0") + raise ImportError("Please update diffusers to 0.23.0") + +try: + from animatediff.rife import app as rife_app + + cli.add_typer(rife_app, name="rife") +except ImportError: + logger.debug("RIFE not available, skipping...", exc_info=True) + rife_app = None + + +from animatediff.stylize import stylize + +cli.add_typer(stylize, name="stylize") + + + + +# mildly cursed globals to allow for reuse of the pipeline if we're being called as a module +g_pipeline: Optional[DiffusionPipeline] = None +last_model_path: Optional[Path] = None + + +def version_callback(value: bool): + if value: + console.print(f"AnimateDiff v{__version__}") + raise typer.Exit() + +def get_random(): + import sys + + import numpy as np + return int(np.random.randint(sys.maxsize, dtype=np.int64)) + + +@cli.command() +def generate( + config_path: Annotated[ + Path, + typer.Option( + "--config-path", + "-c", + path_type=Path, + exists=True, + readable=True, + dir_okay=False, + help="Path to a prompt configuration JSON file", + ), + ] = Path("config/prompts/01-ToonYou.json"), + width: Annotated[ + int, + typer.Option( + "--width", + "-W", + min=64, + max=3840, + help="Width of generated frames", + rich_help_panel="Generation", + ), + ] = 512, + height: Annotated[ + int, + typer.Option( + "--height", + "-H", + min=64, + max=2160, + help="Height of generated frames", + rich_help_panel="Generation", + ), + ] = 512, + length: Annotated[ + int, + typer.Option( + "--length", + "-L", + min=1, + max=9999, + help="Number of frames to generate", + rich_help_panel="Generation", + ), + ] = 16, + context: Annotated[ + Optional[int], + typer.Option( + "--context", + "-C", + min=1, + max=32, + help="Number of frames to condition on (default: 16)", + show_default=False, + rich_help_panel="Generation", + ), + ] = 16, + overlap: Annotated[ + Optional[int], + typer.Option( + "--overlap", + "-O", + min=0, + max=12, + help="Number of frames to overlap in context (default: context//4)", + show_default=False, + rich_help_panel="Generation", + ), + ] = None, + stride: Annotated[ + Optional[int], + typer.Option( + "--stride", + "-S", + min=0, + max=8, + help="Max motion stride as a power of 2 (default: 0)", + show_default=False, + rich_help_panel="Generation", + ), + ] = None, + repeats: Annotated[ + int, + typer.Option( + "--repeats", + "-r", + min=1, + max=99, + help="Number of times to repeat the prompt (default: 1)", + show_default=False, + rich_help_panel="Generation", + ), + ] = 1, + device: Annotated[ + str, + typer.Option( + "--device", "-d", help="Device to run on (cpu, cuda, cuda:id)", rich_help_panel="Advanced" + ), + ] = "cuda", + use_xformers: Annotated[ + bool, + typer.Option( + "--xformers", + "-x", + is_flag=True, + help="Use XFormers instead of SDP Attention", + rich_help_panel="Advanced", + ), + ] = False, + force_half_vae: Annotated[ + bool, + typer.Option( + "--half-vae", + is_flag=True, + help="Force VAE to use fp16 (not recommended)", + rich_help_panel="Advanced", + ), + ] = False, + out_dir: Annotated[ + Path, + typer.Option( + "--out-dir", + "-o", + path_type=Path, + file_okay=False, + help="Directory for output folders (frames, gifs, etc)", + rich_help_panel="Output", + ), + ] = Path("output/"), + no_frames: Annotated[ + bool, + typer.Option( + "--no-frames", + "-N", + is_flag=True, + help="Don't save frames, only the animation", + rich_help_panel="Output", + ), + ] = False, + save_merged: Annotated[ + bool, + typer.Option( + "--save-merged", + "-m", + is_flag=True, + help="Save a merged animation of all prompts", + rich_help_panel="Output", + ), + ] = False, + version: Annotated[ + Optional[bool], + typer.Option( + "--version", + "-v", + callback=version_callback, + is_eager=True, + is_flag=True, + help="Show version", + ), + ] = None, +): + """ + Do the thing. Make the animation happen. Waow. + """ + + # be quiet, diffusers. we care not for your safety checker + set_diffusers_verbosity_error() + + #torch.set_flush_denormal(True) + + config_path = config_path.absolute() + logger.info(f"Using generation config: {path_from_cwd(config_path)}") + model_config: ModelConfig = get_model_config(config_path) + + is_sdxl = is_sdxl_checkpoint(data_dir.joinpath(model_config.path)) + + if is_sdxl: + is_v2 = False + else: + is_v2 = is_v2_motion_module(data_dir.joinpath(model_config.motion_module)) + + infer_config: InferenceConfig = get_infer_config(is_v2, is_sdxl) + + set_tensor_interpolation_method( model_config.tensor_interpolation_slerp ) + + # set sane defaults for context, overlap, and stride if not supplied + context, overlap, stride = get_context_params(length, context, overlap, stride) + + if (not is_v2) and (not is_sdxl) and (context > 24): + logger.warning( "For motion module v1, the maximum value of context is 24. Set to 24" ) + context = 24 + + # turn the device string into a torch.device + device: torch.device = torch.device(device) + + model_name_or_path = Path("runwayml/stable-diffusion-v1-5") if not is_sdxl else Path("stabilityai/stable-diffusion-xl-base-1.0") + + # Get the base model if we don't have it already + logger.info(f"Using base model: {model_name_or_path}") + base_model_path: Path = get_base_model(model_name_or_path, local_dir=get_dir("data/models/huggingface"), is_sdxl=is_sdxl) + + # get a timestamp for the output directory + time_str = datetime.now().strftime("%Y-%m-%dT%H-%M-%S") + # make the output directory + save_dir = out_dir.joinpath(f"{time_str}-{model_config.save_name}") + save_dir.mkdir(parents=True, exist_ok=True) + logger.info(f"Will save outputs to ./{path_from_cwd(save_dir)}") + + controlnet_image_map, controlnet_type_map, controlnet_ref_map, controlnet_no_shrink = controlnet_preprocess(model_config.controlnet_map, width, height, length, save_dir, device, is_sdxl) + img2img_map = img2img_preprocess(model_config.img2img_map, width, height, length, save_dir) + + # beware the pipeline + global g_pipeline + global last_model_path + pipeline_already_loaded = False + if g_pipeline is None or last_model_path != model_config.path.resolve(): + g_pipeline = create_pipeline( + base_model=base_model_path, + model_config=model_config, + infer_config=infer_config, + use_xformers=use_xformers, + video_length=length, + is_sdxl=is_sdxl + ) + last_model_path = model_config.path.resolve() + else: + logger.info("Pipeline already loaded, skipping initialization") + # reload TIs; create_pipeline does this for us, but they may have changed + # since load time if we're being called from another package + #load_text_embeddings(g_pipeline, is_sdxl=is_sdxl) + pipeline_already_loaded = True + + load_controlnet_models(pipe=g_pipeline, model_config=model_config, is_sdxl=is_sdxl) + +# if g_pipeline.device == device: + if pipeline_already_loaded: + logger.info("Pipeline already on the correct device, skipping device transfer") + else: + + g_pipeline = send_to_device( + g_pipeline, device, freeze=True, force_half=force_half_vae, compile=model_config.compile, is_sdxl=is_sdxl + ) + + torch.cuda.empty_cache() + + apply_lcm_lora = False + if model_config.lcm_map: + if "enable" in model_config.lcm_map: + apply_lcm_lora = model_config.lcm_map["enable"] + + # save raw config to output directory + save_config_path = save_dir.joinpath("raw_prompt.json") + save_config_path.write_text(model_config.json(indent=4), encoding="utf-8") + + # fix seed + for i, s in enumerate(model_config.seed): + if s == -1: + model_config.seed[i] = get_random() + + # wildcard conversion + wild_card_conversion(model_config) + + is_init_img_exist = img2img_map != None + region_condi_list, region_list, ip_adapter_config_map, region2index = region_preprocess(model_config, width, height, length, save_dir, is_init_img_exist, is_sdxl) + + if controlnet_type_map: + for c in controlnet_type_map: + tmp_r = [region2index[r] for r in controlnet_type_map[c]["control_region_list"]] + controlnet_type_map[c]["control_region_list"] = [r for r in tmp_r if r != -1] + logger.info(f"{c=} / {controlnet_type_map[c]['control_region_list']}") + + # save config to output directory + logger.info("Saving prompt config to output directory") + save_config_path = save_dir.joinpath("prompt.json") + save_config_path.write_text(model_config.json(indent=4), encoding="utf-8") + + num_negatives = len(model_config.n_prompt) + num_seeds = len(model_config.seed) + gen_total = repeats # total number of generations + + logger.info("Initialization complete!") + logger.info(f"Generating {gen_total} animations") + outputs = [] + + gen_num = 0 # global generation index + + # repeat the prompts if we're doing multiple runs + for _ in range(repeats): + if model_config.prompt_map: + # get the index of the prompt, negative, and seed + idx = gen_num + logger.info(f"Running generation {gen_num + 1} of {gen_total}") + + # allow for reusing the same negative prompt(s) and seed(s) for multiple prompts + n_prompt = model_config.n_prompt[idx % num_negatives] + seed = model_config.seed[idx % num_seeds] + + logger.info(f"Generation seed: {seed}") + + + output = run_inference( + pipeline=g_pipeline, + n_prompt=n_prompt, + seed=seed, + steps=model_config.steps, + guidance_scale=model_config.guidance_scale, + unet_batch_size=model_config.unet_batch_size, + width=width, + height=height, + duration=length, + idx=gen_num, + out_dir=save_dir, + context_schedule=model_config.context_schedule, + context_frames=context, + context_overlap=overlap, + context_stride=stride, + clip_skip=model_config.clip_skip, + controlnet_map=model_config.controlnet_map, + controlnet_image_map=controlnet_image_map, + controlnet_type_map=controlnet_type_map, + controlnet_ref_map=controlnet_ref_map, + controlnet_no_shrink=controlnet_no_shrink, + no_frames=no_frames, + img2img_map=img2img_map, + ip_adapter_config_map=ip_adapter_config_map, + region_list=region_list, + region_condi_list=region_condi_list, + output_map = model_config.output, + is_single_prompt_mode=model_config.is_single_prompt_mode, + is_sdxl=is_sdxl, + apply_lcm_lora=apply_lcm_lora, + gradual_latent_map=model_config.gradual_latent_hires_fix_map + ) + outputs.append(output) + torch.cuda.empty_cache() + + # increment the generation number + gen_num += 1 + + unload_controlnet_models(pipe=g_pipeline) + + + logger.info("Generation complete!") + if save_merged: + logger.info("Output merged output video...") + merged_output = torch.concat(outputs, dim=0) + save_video(merged_output, save_dir.joinpath("final.gif")) + + logger.info("Done, exiting...") + cli.info + + return save_dir + +@cli.command() +def tile_upscale( + frames_dir: Annotated[ + Path, + typer.Argument(path_type=Path, file_okay=False, exists=True, help="Path to source frames directory"), + ] = ..., + model_name_or_path: Annotated[ + Path, + typer.Option( + ..., + "--model-path", + "-m", + path_type=Path, + help="Base model to use (path or HF repo ID). You probably don't need to change this.", + ), + ] = Path("runwayml/stable-diffusion-v1-5"), + config_path: Annotated[ + Path, + typer.Option( + "--config-path", + "-c", + path_type=Path, + exists=True, + readable=True, + dir_okay=False, + help="Path to a prompt configuration JSON file. default is frames_dir/../prompt.json", + ), + ] = None, + width: Annotated[ + int, + typer.Option( + "--width", + "-W", + min=-1, + max=3840, + help="Width of generated frames", + rich_help_panel="Generation", + ), + ] = -1, + height: Annotated[ + int, + typer.Option( + "--height", + "-H", + min=-1, + max=2160, + help="Height of generated frames", + rich_help_panel="Generation", + ), + ] = -1, + device: Annotated[ + str, + typer.Option( + "--device", "-d", help="Device to run on (cpu, cuda, cuda:id)", rich_help_panel="Advanced" + ), + ] = "cuda", + use_xformers: Annotated[ + bool, + typer.Option( + "--xformers", + "-x", + is_flag=True, + help="Use XFormers instead of SDP Attention", + rich_help_panel="Advanced", + ), + ] = False, + force_half_vae: Annotated[ + bool, + typer.Option( + "--half-vae", + is_flag=True, + help="Force VAE to use fp16 (not recommended)", + rich_help_panel="Advanced", + ), + ] = False, + out_dir: Annotated[ + Path, + typer.Option( + "--out-dir", + "-o", + path_type=Path, + file_okay=False, + help="Directory for output folders (frames, gifs, etc)", + rich_help_panel="Output", + ), + ] = Path("upscaled/"), + no_frames: Annotated[ + bool, + typer.Option( + "--no-frames", + "-N", + is_flag=True, + help="Don't save frames, only the animation", + rich_help_panel="Output", + ), + ] = False, +): + """Upscale frames using controlnet tile""" + # be quiet, diffusers. we care not for your safety checker + set_diffusers_verbosity_error() + + if width < 0 and height < 0: + raise ValueError(f"invalid width,height: {width},{height} \n At least one of them must be specified.") + + if not config_path: + tmp = frames_dir.parent.joinpath("prompt.json") + if tmp.is_file(): + config_path = tmp + + config_path = config_path.absolute() + logger.info(f"Using generation config: {path_from_cwd(config_path)}") + model_config: ModelConfig = get_model_config(config_path) + + is_sdxl = is_sdxl_checkpoint(data_dir.joinpath(model_config.path)) + if is_sdxl: + raise ValueError("Currently SDXL model is not available for this command.") + + infer_config: InferenceConfig = get_infer_config(is_v2_motion_module(data_dir.joinpath(model_config.motion_module)), is_sdxl) + frames_dir = frames_dir.absolute() + + set_tensor_interpolation_method( model_config.tensor_interpolation_slerp ) + + # turn the device string into a torch.device + device: torch.device = torch.device(device) + + # get a timestamp for the output directory + time_str = datetime.now().strftime("%Y-%m-%dT%H-%M-%S") + # make the output directory + save_dir = out_dir.joinpath(f"{time_str}-{model_config.save_name}") + save_dir.mkdir(parents=True, exist_ok=True) + logger.info(f"Will save outputs to ./{path_from_cwd(save_dir)}") + + + if "controlnet_tile" not in model_config.upscale_config: + model_config.upscale_config["controlnet_tile"] = { + "enable": True, + "controlnet_conditioning_scale": 1.0, + "guess_mode": False, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + } + + use_controlnet_ref = False + use_controlnet_tile = False + use_controlnet_line_anime = False + use_controlnet_ip2p = False + + if model_config.upscale_config: + use_controlnet_ref = model_config.upscale_config["controlnet_ref"]["enable"] if "controlnet_ref" in model_config.upscale_config else False + use_controlnet_tile = model_config.upscale_config["controlnet_tile"]["enable"] if "controlnet_tile" in model_config.upscale_config else False + use_controlnet_line_anime = model_config.upscale_config["controlnet_line_anime"]["enable"] if "controlnet_line_anime" in model_config.upscale_config else False + use_controlnet_ip2p = model_config.upscale_config["controlnet_ip2p"]["enable"] if "controlnet_ip2p" in model_config.upscale_config else False + + if use_controlnet_tile == False: + if use_controlnet_line_anime==False: + if use_controlnet_ip2p == False: + raise ValueError(f"At least one of them should be enabled. {use_controlnet_tile=}, {use_controlnet_line_anime=}, {use_controlnet_ip2p=}") + + # beware the pipeline + us_pipeline = create_us_pipeline( + model_config=model_config, + infer_config=infer_config, + use_xformers=use_xformers, + use_controlnet_ref=use_controlnet_ref, + use_controlnet_tile=use_controlnet_tile, + use_controlnet_line_anime=use_controlnet_line_anime, + use_controlnet_ip2p=use_controlnet_ip2p, + ) + + + if us_pipeline.device == device: + logger.info("Pipeline already on the correct device, skipping device transfer") + else: + us_pipeline = send_to_device( + us_pipeline, device, freeze=True, force_half=force_half_vae, compile=model_config.compile + ) + + + model_config.result = { "original_frames": str(frames_dir) } + + + # save config to output directory + logger.info("Saving prompt config to output directory") + save_config_path = save_dir.joinpath("prompt.json") + save_config_path.write_text(model_config.json(indent=4), encoding="utf-8") + + num_prompts = 1 + num_negatives = len(model_config.n_prompt) + num_seeds = len(model_config.seed) + + logger.info("Initialization complete!") + + gen_num = 0 # global generation index + + org_images = sorted(glob.glob( os.path.join(frames_dir, "[0-9]*.png"), recursive=False)) + length = len(org_images) + + if model_config.prompt_map: + # get the index of the prompt, negative, and seed + idx = gen_num % num_prompts + logger.info(f"Running generation {gen_num + 1} of {1} (prompt {idx + 1})") + + # allow for reusing the same negative prompt(s) and seed(s) for multiple prompts + n_prompt = model_config.n_prompt[idx % num_negatives] + seed = seed = model_config.seed[idx % num_seeds] + + if seed == -1: + seed = get_random() + logger.info(f"Generation seed: {seed}") + + prompt_map = {} + for k in model_config.prompt_map.keys(): + if int(k) < length: + pr = model_config.prompt_map[k] + if model_config.head_prompt: + pr = model_config.head_prompt + "," + pr + if model_config.tail_prompt: + pr = pr + "," + model_config.tail_prompt + + prompt_map[int(k)]=pr + + if model_config.upscale_config: + + upscaled_output = run_upscale( + org_imgs=org_images, + pipeline=us_pipeline, + prompt_map=prompt_map, + n_prompt=n_prompt, + seed=seed, + steps=model_config.steps, + guidance_scale=model_config.guidance_scale, + clip_skip=model_config.clip_skip, + us_width=width, + us_height=height, + idx=gen_num, + out_dir=save_dir, + upscale_config=model_config.upscale_config, + use_controlnet_ref=use_controlnet_ref, + use_controlnet_tile=use_controlnet_tile, + use_controlnet_line_anime=use_controlnet_line_anime, + use_controlnet_ip2p=use_controlnet_ip2p, + no_frames = no_frames, + output_map = model_config.output, + ) + torch.cuda.empty_cache() + + # increment the generation number + gen_num += 1 + + logger.info("Generation complete!") + + logger.info("Done, exiting...") + cli.info + + return save_dir + +@cli.command() +def civitai2config( + lora_dir: Annotated[ + Path, + typer.Argument(path_type=Path, file_okay=False, exists=True, help="Path to loras directory"), + ] = ..., + config_org: Annotated[ + Path, + typer.Option( + "--config-org", + "-c", + path_type=Path, + dir_okay=False, + exists=True, + help="Path to original config file", + ), + ] = Path("config/prompts/prompt_travel.json"), + out_dir: Annotated[ + Optional[Path], + typer.Option( + "--out-dir", + "-o", + path_type=Path, + file_okay=False, + help="Target directory for generated configs", + ), + ] = Path("config/prompts/converted/"), + lora_weight: Annotated[ + float, + typer.Option( + "--lora_weight", + "-l", + min=0.0, + max=3.0, + help="Lora weight", + ), + ] = 0.75, +): + """Generate config file from *.civitai.info""" + + out_dir.mkdir(parents=True, exist_ok=True) + + logger.info(f"Generate config files from: {lora_dir}") + generate_config_from_civitai_info(lora_dir,config_org,out_dir, lora_weight) + logger.info(f"saved at: {out_dir.absolute()}") + + +@cli.command() +def convert( + checkpoint: Annotated[ + Path, + typer.Option( + "--checkpoint", + "-i", + path_type=Path, + dir_okay=False, + exists=True, + help="Path to a model checkpoint file", + ), + ] = ..., + out_dir: Annotated[ + Optional[Path], + typer.Option( + "--out-dir", + "-o", + path_type=Path, + file_okay=False, + help="Target directory for converted model", + ), + ] = None, +): + """Convert a StableDiffusion checkpoint into a Diffusers pipeline""" + logger.info(f"Converting checkpoint: {checkpoint}") + _, pipeline_dir = checkpoint_to_pipeline(checkpoint, target_dir=out_dir) + logger.info(f"Converted to HuggingFace pipeline at {pipeline_dir}") + + +@cli.command() +def fix_checkpoint( + checkpoint: Annotated[ + Path, + typer.Argument(path_type=Path, dir_okay=False, exists=True, help="Path to a model checkpoint file"), + ] = ..., + debug: Annotated[ + bool, + typer.Option( + "--debug", + "-d", + is_flag=True, + rich_help_panel="Debug", + ), + ] = False, +): + """Fix checkpoint with error "AttributeError: 'Attention' object has no attribute 'to_to_k'" on loading""" + set_diffusers_verbosity_error() + + logger.info(f"Converting checkpoint: {checkpoint}") + fix_checkpoint_if_needed(checkpoint, debug) + + + +@cli.command() +def merge( + checkpoint: Annotated[ + Path, + typer.Option( + "--checkpoint", + "-i", + path_type=Path, + dir_okay=False, + exists=True, + help="Path to a model checkpoint file", + ), + ] = ..., + out_dir: Annotated[ + Optional[Path], + typer.Option( + "--out-dir", + "-o", + path_type=Path, + file_okay=False, + help="Target directory for converted model", + ), + ] = None, +): + """Convert a StableDiffusion checkpoint into an AnimationPipeline""" + raise NotImplementedError("Sorry, haven't implemented this yet!") + + # if we have a checkpoint, convert it to HF automagically + if checkpoint.is_file() and checkpoint.suffix in CKPT_EXTENSIONS: + logger.info(f"Loading model from checkpoint: {checkpoint}") + # check if we've already converted this model + model_dir = pipeline_dir.joinpath(checkpoint.stem) + if model_dir.joinpath("model_index.json").exists(): + # we have, so just use that + logger.info("Found converted model in {model_dir}, will not convert") + logger.info("Delete the output directory to re-run conversion.") + else: + # we haven't, so convert it + logger.info("Converting checkpoint to HuggingFace pipeline...") + g_pipeline, model_dir = checkpoint_to_pipeline(checkpoint) + logger.info("Done!") + + + +@cli.command(no_args_is_help=True) +def refine( + frames_dir: Annotated[ + Path, + typer.Argument(path_type=Path, file_okay=False, exists=True, help="Path to source frames directory"), + ] = ..., + config_path: Annotated[ + Path, + typer.Option( + "--config-path", + "-c", + path_type=Path, + exists=True, + readable=True, + dir_okay=False, + help="Path to a prompt configuration JSON file. default is frames_dir/../prompt.json", + ), + ] = None, + interpolation_multiplier: Annotated[ + int, + typer.Option( + "--interpolation-multiplier", + "-M", + min=1, + max=10, + help="Interpolate with RIFE before generation. (I'll leave it as is, but I think interpolation after generation is sufficient).", + rich_help_panel="Generation", + ), + ] = 1, + tile_conditioning_scale: Annotated[ + float, + typer.Option( + "--tile", + "-t", + min= 0, + max= 1.0, + help="controlnet_tile conditioning scale", + rich_help_panel="Generation", + ), + ] = 0.75, + width: Annotated[ + int, + typer.Option( + "--width", + "-W", + min=-1, + max=3840, + help="Width of generated frames", + rich_help_panel="Generation", + ), + ] = -1, + height: Annotated[ + int, + typer.Option( + "--height", + "-H", + min=-1, + max=2160, + help="Height of generated frames", + rich_help_panel="Generation", + ), + ] = -1, + length: Annotated[ + int, + typer.Option( + "--length", + "-L", + min=-1, + max=9999, + help="Number of frames to generate. -1 means using all frames in frames_dir.", + rich_help_panel="Generation", + ), + ] = -1, + context: Annotated[ + Optional[int], + typer.Option( + "--context", + "-C", + min=1, + max=32, + help="Number of frames to condition on (default: 16)", + show_default=False, + rich_help_panel="Generation", + ), + ] = 16, + overlap: Annotated[ + Optional[int], + typer.Option( + "--overlap", + "-O", + min=1, + max=12, + help="Number of frames to overlap in context (default: context//4)", + show_default=False, + rich_help_panel="Generation", + ), + ] = None, + stride: Annotated[ + Optional[int], + typer.Option( + "--stride", + "-S", + min=0, + max=8, + help="Max motion stride as a power of 2 (default: 0)", + show_default=False, + rich_help_panel="Generation", + ), + ] = None, + repeats: Annotated[ + int, + typer.Option( + "--repeats", + "-r", + min=1, + max=99, + help="Number of times to repeat the refine (default: 1)", + show_default=False, + rich_help_panel="Generation", + ), + ] = 1, + device: Annotated[ + str, + typer.Option( + "--device", "-d", help="Device to run on (cpu, cuda, cuda:id)", rich_help_panel="Advanced" + ), + ] = "cuda", + use_xformers: Annotated[ + bool, + typer.Option( + "--xformers", + "-x", + is_flag=True, + help="Use XFormers instead of SDP Attention", + rich_help_panel="Advanced", + ), + ] = False, + force_half_vae: Annotated[ + bool, + typer.Option( + "--half-vae", + is_flag=True, + help="Force VAE to use fp16 (not recommended)", + rich_help_panel="Advanced", + ), + ] = False, + out_dir: Annotated[ + Path, + typer.Option( + "--out-dir", + "-o", + path_type=Path, + file_okay=False, + help="Directory for output folders (frames, gifs, etc)", + rich_help_panel="Output", + ), + ] = Path("refine/"), +): + """Create upscaled or improved video using pre-generated frames""" + import shutil + + from PIL import Image + + from animatediff.rife.rife import rife_interpolate + + if not config_path: + tmp = frames_dir.parent.joinpath("prompt.json") + if tmp.is_file(): + config_path = tmp + else: + raise ValueError(f"config_path invalid.") + + org_frames = sorted(glob.glob( os.path.join(frames_dir, "[0-9]*.png"), recursive=False)) + W,H = Image.open(org_frames[0]).size + + if width == -1 and height == -1: + width = W + height = H + elif width == -1: + width = int(height * W / H) //8 * 8 + elif height == -1: + height = int(width * H / W) //8 * 8 + else: + pass + + if length == -1: + length = len(org_frames) + else: + length = min(length, len(org_frames)) + + config_path = config_path.absolute() + logger.info(f"Using generation config: {path_from_cwd(config_path)}") + model_config: ModelConfig = get_model_config(config_path) + + # get a timestamp for the output directory + time_str = datetime.now().strftime("%Y-%m-%dT%H-%M-%S") + # make the output directory + save_dir = out_dir.joinpath(f"{time_str}-{model_config.save_name}") + save_dir.mkdir(parents=True, exist_ok=True) + logger.info(f"Will save outputs to ./{path_from_cwd(save_dir)}") + + seeds = [get_random() for i in range(repeats)] + + rife_img_dir = None + + for repeat_count in range(repeats): + + if interpolation_multiplier > 1: + rife_img_dir = save_dir.joinpath(f"{repeat_count:02d}_rife_frame") + rife_img_dir.mkdir(parents=True, exist_ok=True) + + rife_interpolate(frames_dir, rife_img_dir, interpolation_multiplier) + length *= interpolation_multiplier + + if model_config.output: + model_config.output["fps"] *= interpolation_multiplier + if model_config.prompt_map: + model_config.prompt_map = { str(int(i)*interpolation_multiplier): model_config.prompt_map[i] for i in model_config.prompt_map } + + frames_dir = rife_img_dir + + + controlnet_img_dir = save_dir.joinpath(f"{repeat_count:02d}_controlnet_image") + + for c in ["controlnet_canny","controlnet_depth","controlnet_inpaint","controlnet_ip2p","controlnet_lineart","controlnet_lineart_anime","controlnet_mlsd","controlnet_normalbae","controlnet_openpose","controlnet_scribble","controlnet_seg","controlnet_shuffle","controlnet_softedge","controlnet_tile"]: + c_dir = controlnet_img_dir.joinpath(c) + c_dir.mkdir(parents=True, exist_ok=True) + + shutil.copytree(frames_dir, controlnet_img_dir.joinpath("controlnet_tile"), dirs_exist_ok=True) + + model_config.controlnet_map["input_image_dir"] = os.path.relpath(controlnet_img_dir.absolute(), data_dir) + model_config.controlnet_map["is_loop"] = False + + if "controlnet_tile" in model_config.controlnet_map: + model_config.controlnet_map["controlnet_tile"]["enable"] = True + model_config.controlnet_map["controlnet_tile"]["control_scale_list"] = [] + model_config.controlnet_map["controlnet_tile"]["controlnet_conditioning_scale"] = tile_conditioning_scale + + else: + model_config.controlnet_map["controlnet_tile"] = { + "enable": True, + "use_preprocessor":True, + "guess_mode":False, + "controlnet_conditioning_scale": tile_conditioning_scale, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[] + } + + model_config.seed = [seeds[repeat_count]] + + config_path = save_dir.joinpath(f"{repeat_count:02d}_prompt.json") + config_path.write_text(model_config.json(indent=4), encoding="utf-8") + + + generated_dir = generate( + config_path=config_path, + width=width, + height=height, + length=length, + context=context, + overlap=overlap, + stride=stride, + device=device, + use_xformers=use_xformers, + force_half_vae=force_half_vae, + out_dir=save_dir, + ) + + interpolation_multiplier = 1 + + torch.cuda.empty_cache() + + generated_dir = generated_dir.rename(generated_dir.parent / f"{time_str}_{repeat_count:02d}") + + + frames_dir = glob.glob( os.path.join(generated_dir, "00-[0-9]*"), recursive=False)[0] + + + if rife_img_dir: + frames = sorted(glob.glob( os.path.join(rife_img_dir, "[0-9]*.png"), recursive=False)) + out_images = [] + for f in frames: + out_images.append(Image.open(f)) + + out_file = save_dir.joinpath(f"rife_only_for_comparison") + save_output(out_images,rife_img_dir,out_file,model_config.output,True,save_frames=None,save_video=None) + + + logger.info(f"Refined results are output to {generated_dir}") + diff --git a/src/animatediff/dwpose/__init__.py b/src/animatediff/dwpose/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..28df14c4fcb615c3df72d3212405e8cc7a913eb0 --- /dev/null +++ b/src/animatediff/dwpose/__init__.py @@ -0,0 +1,91 @@ +# https://github.com/IDEA-Research/DWPose +# Openpose +# Original from CMU https://github.com/CMU-Perceptual-Computing-Lab/openpose +# 2nd Edited by https://github.com/Hzzone/pytorch-openpose +# 3rd Edited by ControlNet +# 4th Edited by ControlNet (added face and correct hands) + +import os + +os.environ["KMP_DUPLICATE_LIB_OK"]="TRUE" +import cv2 +import numpy as np +import torch +from controlnet_aux.util import HWC3, resize_image +from PIL import Image + +from . import util +from .wholebody import Wholebody + + +def draw_pose(pose, H, W): + bodies = pose['bodies'] + faces = pose['faces'] + hands = pose['hands'] + candidate = bodies['candidate'] + subset = bodies['subset'] + canvas = np.zeros(shape=(H, W, 3), dtype=np.uint8) + + canvas = util.draw_bodypose(canvas, candidate, subset) + + canvas = util.draw_handpose(canvas, hands) + + canvas = util.draw_facepose(canvas, faces) + + return canvas + + +class DWposeDetector: + def __init__(self): + pass + + def to(self, device): + self.pose_estimation = Wholebody(device) + return self + + def __call__(self, input_image, detect_resolution=512, image_resolution=512, output_type="pil", **kwargs): + input_image = cv2.cvtColor(np.array(input_image, dtype=np.uint8), cv2.COLOR_RGB2BGR) + + input_image = HWC3(input_image) + input_image = resize_image(input_image, detect_resolution) + H, W, C = input_image.shape + with torch.no_grad(): + candidate, subset = self.pose_estimation(input_image) + nums, keys, locs = candidate.shape + candidate[..., 0] /= float(W) + candidate[..., 1] /= float(H) + body = candidate[:,:18].copy() + body = body.reshape(nums*18, locs) + score = subset[:,:18] + for i in range(len(score)): + for j in range(len(score[i])): + if score[i][j] > 0.3: + score[i][j] = int(18*i+j) + else: + score[i][j] = -1 + + un_visible = subset<0.3 + candidate[un_visible] = -1 + + foot = candidate[:,18:24] + + faces = candidate[:,24:92] + + hands = candidate[:,92:113] + hands = np.vstack([hands, candidate[:,113:]]) + + bodies = dict(candidate=body, subset=score) + pose = dict(bodies=bodies, hands=hands, faces=faces) + + detected_map = draw_pose(pose, H, W) + detected_map = HWC3(detected_map) + + img = resize_image(input_image, image_resolution) + H, W, C = img.shape + + detected_map = cv2.resize(detected_map, (W, H), interpolation=cv2.INTER_LINEAR) + + if output_type == "pil": + detected_map = Image.fromarray(detected_map) + + return detected_map diff --git a/src/animatediff/dwpose/onnxdet.py b/src/animatediff/dwpose/onnxdet.py new file mode 100644 index 0000000000000000000000000000000000000000..f755e374b43d181111c9ff3151ead62153981cfd --- /dev/null +++ b/src/animatediff/dwpose/onnxdet.py @@ -0,0 +1,126 @@ +# https://github.com/IDEA-Research/DWPose +import cv2 +import numpy as np +import onnxruntime + + +def nms(boxes, scores, nms_thr): + """Single class NMS implemented in Numpy.""" + x1 = boxes[:, 0] + y1 = boxes[:, 1] + x2 = boxes[:, 2] + y2 = boxes[:, 3] + + areas = (x2 - x1 + 1) * (y2 - y1 + 1) + order = scores.argsort()[::-1] + + keep = [] + while order.size > 0: + i = order[0] + keep.append(i) + xx1 = np.maximum(x1[i], x1[order[1:]]) + yy1 = np.maximum(y1[i], y1[order[1:]]) + xx2 = np.minimum(x2[i], x2[order[1:]]) + yy2 = np.minimum(y2[i], y2[order[1:]]) + + w = np.maximum(0.0, xx2 - xx1 + 1) + h = np.maximum(0.0, yy2 - yy1 + 1) + inter = w * h + ovr = inter / (areas[i] + areas[order[1:]] - inter) + + inds = np.where(ovr <= nms_thr)[0] + order = order[inds + 1] + + return keep + +def multiclass_nms(boxes, scores, nms_thr, score_thr): + """Multiclass NMS implemented in Numpy. Class-aware version.""" + final_dets = [] + num_classes = scores.shape[1] + for cls_ind in range(num_classes): + cls_scores = scores[:, cls_ind] + valid_score_mask = cls_scores > score_thr + if valid_score_mask.sum() == 0: + continue + else: + valid_scores = cls_scores[valid_score_mask] + valid_boxes = boxes[valid_score_mask] + keep = nms(valid_boxes, valid_scores, nms_thr) + if len(keep) > 0: + cls_inds = np.ones((len(keep), 1)) * cls_ind + dets = np.concatenate( + [valid_boxes[keep], valid_scores[keep, None], cls_inds], 1 + ) + final_dets.append(dets) + if len(final_dets) == 0: + return None + return np.concatenate(final_dets, 0) + +def demo_postprocess(outputs, img_size, p6=False): + grids = [] + expanded_strides = [] + strides = [8, 16, 32] if not p6 else [8, 16, 32, 64] + + hsizes = [img_size[0] // stride for stride in strides] + wsizes = [img_size[1] // stride for stride in strides] + + for hsize, wsize, stride in zip(hsizes, wsizes, strides): + xv, yv = np.meshgrid(np.arange(wsize), np.arange(hsize)) + grid = np.stack((xv, yv), 2).reshape(1, -1, 2) + grids.append(grid) + shape = grid.shape[:2] + expanded_strides.append(np.full((*shape, 1), stride)) + + grids = np.concatenate(grids, 1) + expanded_strides = np.concatenate(expanded_strides, 1) + outputs[..., :2] = (outputs[..., :2] + grids) * expanded_strides + outputs[..., 2:4] = np.exp(outputs[..., 2:4]) * expanded_strides + + return outputs + +def preprocess(img, input_size, swap=(2, 0, 1)): + if len(img.shape) == 3: + padded_img = np.ones((input_size[0], input_size[1], 3), dtype=np.uint8) * 114 + else: + padded_img = np.ones(input_size, dtype=np.uint8) * 114 + + r = min(input_size[0] / img.shape[0], input_size[1] / img.shape[1]) + resized_img = cv2.resize( + img, + (int(img.shape[1] * r), int(img.shape[0] * r)), + interpolation=cv2.INTER_LINEAR, + ).astype(np.uint8) + padded_img[: int(img.shape[0] * r), : int(img.shape[1] * r)] = resized_img + + padded_img = padded_img.transpose(swap) + padded_img = np.ascontiguousarray(padded_img, dtype=np.float32) + return padded_img, r + +def inference_detector(session, oriImg): + input_shape = (640,640) + img, ratio = preprocess(oriImg, input_shape) + + ort_inputs = {session.get_inputs()[0].name: img[None, :, :, :]} + output = session.run(None, ort_inputs) + predictions = demo_postprocess(output[0], input_shape)[0] + + boxes = predictions[:, :4] + scores = predictions[:, 4:5] * predictions[:, 5:] + + boxes_xyxy = np.ones_like(boxes) + boxes_xyxy[:, 0] = boxes[:, 0] - boxes[:, 2]/2. + boxes_xyxy[:, 1] = boxes[:, 1] - boxes[:, 3]/2. + boxes_xyxy[:, 2] = boxes[:, 0] + boxes[:, 2]/2. + boxes_xyxy[:, 3] = boxes[:, 1] + boxes[:, 3]/2. + boxes_xyxy /= ratio + dets = multiclass_nms(boxes_xyxy, scores, nms_thr=0.45, score_thr=0.1) + if dets is not None: + final_boxes, final_scores, final_cls_inds = dets[:, :4], dets[:, 4], dets[:, 5] + isscore = final_scores>0.3 + iscat = final_cls_inds == 0 + isbbox = [ i and j for (i, j) in zip(isscore, iscat)] + final_boxes = final_boxes[isbbox] + else: + return [] + + return final_boxes diff --git a/src/animatediff/dwpose/onnxpose.py b/src/animatediff/dwpose/onnxpose.py new file mode 100644 index 0000000000000000000000000000000000000000..cb080e75e8d6beba5e74b843a1e475e8152ed01e --- /dev/null +++ b/src/animatediff/dwpose/onnxpose.py @@ -0,0 +1,361 @@ +# https://github.com/IDEA-Research/DWPose +from typing import List, Tuple + +import cv2 +import numpy as np +import onnxruntime as ort + +def preprocess( + img: np.ndarray, out_bbox, input_size: Tuple[int, int] = (192, 256) +) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: + """Do preprocessing for RTMPose model inference. + + Args: + img (np.ndarray): Input image in shape. + input_size (tuple): Input image size in shape (w, h). + + Returns: + tuple: + - resized_img (np.ndarray): Preprocessed image. + - center (np.ndarray): Center of image. + - scale (np.ndarray): Scale of image. + """ + # get shape of image + img_shape = img.shape[:2] + out_img, out_center, out_scale = [], [], [] + if len(out_bbox) == 0: + out_bbox = [[0, 0, img_shape[1], img_shape[0]]] + for i in range(len(out_bbox)): + x0 = out_bbox[i][0] + y0 = out_bbox[i][1] + x1 = out_bbox[i][2] + y1 = out_bbox[i][3] + bbox = np.array([x0, y0, x1, y1]) + + # get center and scale + center, scale = bbox_xyxy2cs(bbox, padding=1.25) + + # do affine transformation + resized_img, scale = top_down_affine(input_size, scale, center, img) + + # normalize image + mean = np.array([123.675, 116.28, 103.53]) + std = np.array([58.395, 57.12, 57.375]) + resized_img = (resized_img - mean) / std + + out_img.append(resized_img) + out_center.append(center) + out_scale.append(scale) + + return out_img, out_center, out_scale + + +def inference(sess: ort.InferenceSession, img: np.ndarray) -> np.ndarray: + """Inference RTMPose model. + + Args: + sess (ort.InferenceSession): ONNXRuntime session. + img (np.ndarray): Input image in shape. + + Returns: + outputs (np.ndarray): Output of RTMPose model. + """ + all_out = [] + # build input + for i in range(len(img)): + input = [img[i].transpose(2, 0, 1)] + + # build output + sess_input = {sess.get_inputs()[0].name: input} + sess_output = [] + for out in sess.get_outputs(): + sess_output.append(out.name) + + # run model + outputs = sess.run(sess_output, sess_input) + all_out.append(outputs) + + return all_out + + +def postprocess(outputs: List[np.ndarray], + model_input_size: Tuple[int, int], + center: Tuple[int, int], + scale: Tuple[int, int], + simcc_split_ratio: float = 2.0 + ) -> Tuple[np.ndarray, np.ndarray]: + """Postprocess for RTMPose model output. + + Args: + outputs (np.ndarray): Output of RTMPose model. + model_input_size (tuple): RTMPose model Input image size. + center (tuple): Center of bbox in shape (x, y). + scale (tuple): Scale of bbox in shape (w, h). + simcc_split_ratio (float): Split ratio of simcc. + + Returns: + tuple: + - keypoints (np.ndarray): Rescaled keypoints. + - scores (np.ndarray): Model predict scores. + """ + all_key = [] + all_score = [] + for i in range(len(outputs)): + # use simcc to decode + simcc_x, simcc_y = outputs[i] + keypoints, scores = decode(simcc_x, simcc_y, simcc_split_ratio) + + # rescale keypoints + keypoints = keypoints / model_input_size * scale[i] + center[i] - scale[i] / 2 + all_key.append(keypoints[0]) + all_score.append(scores[0]) + + return np.array(all_key), np.array(all_score) + + +def bbox_xyxy2cs(bbox: np.ndarray, + padding: float = 1.) -> Tuple[np.ndarray, np.ndarray]: + """Transform the bbox format from (x,y,w,h) into (center, scale) + + Args: + bbox (ndarray): Bounding box(es) in shape (4,) or (n, 4), formatted + as (left, top, right, bottom) + padding (float): BBox padding factor that will be multilied to scale. + Default: 1.0 + + Returns: + tuple: A tuple containing center and scale. + - np.ndarray[float32]: Center (x, y) of the bbox in shape (2,) or + (n, 2) + - np.ndarray[float32]: Scale (w, h) of the bbox in shape (2,) or + (n, 2) + """ + # convert single bbox from (4, ) to (1, 4) + dim = bbox.ndim + if dim == 1: + bbox = bbox[None, :] + + # get bbox center and scale + x1, y1, x2, y2 = np.hsplit(bbox, [1, 2, 3]) + center = np.hstack([x1 + x2, y1 + y2]) * 0.5 + scale = np.hstack([x2 - x1, y2 - y1]) * padding + + if dim == 1: + center = center[0] + scale = scale[0] + + return center, scale + + +def _fix_aspect_ratio(bbox_scale: np.ndarray, + aspect_ratio: float) -> np.ndarray: + """Extend the scale to match the given aspect ratio. + + Args: + scale (np.ndarray): The image scale (w, h) in shape (2, ) + aspect_ratio (float): The ratio of ``w/h`` + + Returns: + np.ndarray: The reshaped image scale in (2, ) + """ + w, h = np.hsplit(bbox_scale, [1]) + bbox_scale = np.where(w > h * aspect_ratio, + np.hstack([w, w / aspect_ratio]), + np.hstack([h * aspect_ratio, h])) + return bbox_scale + + +def _rotate_point(pt: np.ndarray, angle_rad: float) -> np.ndarray: + """Rotate a point by an angle. + + Args: + pt (np.ndarray): 2D point coordinates (x, y) in shape (2, ) + angle_rad (float): rotation angle in radian + + Returns: + np.ndarray: Rotated point in shape (2, ) + """ + sn, cs = np.sin(angle_rad), np.cos(angle_rad) + rot_mat = np.array([[cs, -sn], [sn, cs]]) + return rot_mat @ pt + + +def _get_3rd_point(a: np.ndarray, b: np.ndarray) -> np.ndarray: + """To calculate the affine matrix, three pairs of points are required. This + function is used to get the 3rd point, given 2D points a & b. + + The 3rd point is defined by rotating vector `a - b` by 90 degrees + anticlockwise, using b as the rotation center. + + Args: + a (np.ndarray): The 1st point (x,y) in shape (2, ) + b (np.ndarray): The 2nd point (x,y) in shape (2, ) + + Returns: + np.ndarray: The 3rd point. + """ + direction = a - b + c = b + np.r_[-direction[1], direction[0]] + return c + + +def get_warp_matrix(center: np.ndarray, + scale: np.ndarray, + rot: float, + output_size: Tuple[int, int], + shift: Tuple[float, float] = (0., 0.), + inv: bool = False) -> np.ndarray: + """Calculate the affine transformation matrix that can warp the bbox area + in the input image to the output size. + + Args: + center (np.ndarray[2, ]): Center of the bounding box (x, y). + scale (np.ndarray[2, ]): Scale of the bounding box + wrt [width, height]. + rot (float): Rotation angle (degree). + output_size (np.ndarray[2, ] | list(2,)): Size of the + destination heatmaps. + shift (0-100%): Shift translation ratio wrt the width/height. + Default (0., 0.). + inv (bool): Option to inverse the affine transform direction. + (inv=False: src->dst or inv=True: dst->src) + + Returns: + np.ndarray: A 2x3 transformation matrix + """ + shift = np.array(shift) + src_w = scale[0] + dst_w = output_size[0] + dst_h = output_size[1] + + # compute transformation matrix + rot_rad = np.deg2rad(rot) + src_dir = _rotate_point(np.array([0., src_w * -0.5]), rot_rad) + dst_dir = np.array([0., dst_w * -0.5]) + + # get four corners of the src rectangle in the original image + src = np.zeros((3, 2), dtype=np.float32) + src[0, :] = center + scale * shift + src[1, :] = center + src_dir + scale * shift + src[2, :] = _get_3rd_point(src[0, :], src[1, :]) + + # get four corners of the dst rectangle in the input image + dst = np.zeros((3, 2), dtype=np.float32) + dst[0, :] = [dst_w * 0.5, dst_h * 0.5] + dst[1, :] = np.array([dst_w * 0.5, dst_h * 0.5]) + dst_dir + dst[2, :] = _get_3rd_point(dst[0, :], dst[1, :]) + + if inv: + warp_mat = cv2.getAffineTransform(np.float32(dst), np.float32(src)) + else: + warp_mat = cv2.getAffineTransform(np.float32(src), np.float32(dst)) + + return warp_mat + + +def top_down_affine(input_size: dict, bbox_scale: dict, bbox_center: dict, + img: np.ndarray) -> Tuple[np.ndarray, np.ndarray]: + """Get the bbox image as the model input by affine transform. + + Args: + input_size (dict): The input size of the model. + bbox_scale (dict): The bbox scale of the img. + bbox_center (dict): The bbox center of the img. + img (np.ndarray): The original image. + + Returns: + tuple: A tuple containing center and scale. + - np.ndarray[float32]: img after affine transform. + - np.ndarray[float32]: bbox scale after affine transform. + """ + w, h = input_size + warp_size = (int(w), int(h)) + + # reshape bbox to fixed aspect ratio + bbox_scale = _fix_aspect_ratio(bbox_scale, aspect_ratio=w / h) + + # get the affine matrix + center = bbox_center + scale = bbox_scale + rot = 0 + warp_mat = get_warp_matrix(center, scale, rot, output_size=(w, h)) + + # do affine transform + img = cv2.warpAffine(img, warp_mat, warp_size, flags=cv2.INTER_LINEAR) + + return img, bbox_scale + + +def get_simcc_maximum(simcc_x: np.ndarray, + simcc_y: np.ndarray) -> Tuple[np.ndarray, np.ndarray]: + """Get maximum response location and value from simcc representations. + + Note: + instance number: N + num_keypoints: K + heatmap height: H + heatmap width: W + + Args: + simcc_x (np.ndarray): x-axis SimCC in shape (K, Wx) or (N, K, Wx) + simcc_y (np.ndarray): y-axis SimCC in shape (K, Wy) or (N, K, Wy) + + Returns: + tuple: + - locs (np.ndarray): locations of maximum heatmap responses in shape + (K, 2) or (N, K, 2) + - vals (np.ndarray): values of maximum heatmap responses in shape + (K,) or (N, K) + """ + N, K, Wx = simcc_x.shape + simcc_x = simcc_x.reshape(N * K, -1) + simcc_y = simcc_y.reshape(N * K, -1) + + # get maximum value locations + x_locs = np.argmax(simcc_x, axis=1) + y_locs = np.argmax(simcc_y, axis=1) + locs = np.stack((x_locs, y_locs), axis=-1).astype(np.float32) + max_val_x = np.amax(simcc_x, axis=1) + max_val_y = np.amax(simcc_y, axis=1) + + # get maximum value across x and y axis + mask = max_val_x > max_val_y + max_val_x[mask] = max_val_y[mask] + vals = max_val_x + locs[vals <= 0.] = -1 + + # reshape + locs = locs.reshape(N, K, 2) + vals = vals.reshape(N, K) + + return locs, vals + + +def decode(simcc_x: np.ndarray, simcc_y: np.ndarray, + simcc_split_ratio) -> Tuple[np.ndarray, np.ndarray]: + """Modulate simcc distribution with Gaussian. + + Args: + simcc_x (np.ndarray[K, Wx]): model predicted simcc in x. + simcc_y (np.ndarray[K, Wy]): model predicted simcc in y. + simcc_split_ratio (int): The split ratio of simcc. + + Returns: + tuple: A tuple containing center and scale. + - np.ndarray[float32]: keypoints in shape (K, 2) or (n, K, 2) + - np.ndarray[float32]: scores in shape (K,) or (n, K) + """ + keypoints, scores = get_simcc_maximum(simcc_x, simcc_y) + keypoints /= simcc_split_ratio + + return keypoints, scores + + +def inference_pose(session, out_bbox, oriImg): + h, w = session.get_inputs()[0].shape[2:] + model_input_size = (w, h) + resized_img, center, scale = preprocess(oriImg, out_bbox, model_input_size) + outputs = inference(session, resized_img) + keypoints, scores = postprocess(outputs, model_input_size, center, scale) + + return keypoints, scores \ No newline at end of file diff --git a/src/animatediff/dwpose/util.py b/src/animatediff/dwpose/util.py new file mode 100644 index 0000000000000000000000000000000000000000..21e37d609b08591dfa15dd81b3c35839256267fe --- /dev/null +++ b/src/animatediff/dwpose/util.py @@ -0,0 +1,298 @@ +# https://github.com/IDEA-Research/DWPose +import math +import numpy as np +import matplotlib +import cv2 + + +eps = 0.01 + + +def smart_resize(x, s): + Ht, Wt = s + if x.ndim == 2: + Ho, Wo = x.shape + Co = 1 + else: + Ho, Wo, Co = x.shape + if Co == 3 or Co == 1: + k = float(Ht + Wt) / float(Ho + Wo) + return cv2.resize(x, (int(Wt), int(Ht)), interpolation=cv2.INTER_AREA if k < 1 else cv2.INTER_LANCZOS4) + else: + return np.stack([smart_resize(x[:, :, i], s) for i in range(Co)], axis=2) + + +def smart_resize_k(x, fx, fy): + if x.ndim == 2: + Ho, Wo = x.shape + Co = 1 + else: + Ho, Wo, Co = x.shape + Ht, Wt = Ho * fy, Wo * fx + if Co == 3 or Co == 1: + k = float(Ht + Wt) / float(Ho + Wo) + return cv2.resize(x, (int(Wt), int(Ht)), interpolation=cv2.INTER_AREA if k < 1 else cv2.INTER_LANCZOS4) + else: + return np.stack([smart_resize_k(x[:, :, i], fx, fy) for i in range(Co)], axis=2) + + +def padRightDownCorner(img, stride, padValue): + h = img.shape[0] + w = img.shape[1] + + pad = 4 * [None] + pad[0] = 0 # up + pad[1] = 0 # left + pad[2] = 0 if (h % stride == 0) else stride - (h % stride) # down + pad[3] = 0 if (w % stride == 0) else stride - (w % stride) # right + + img_padded = img + pad_up = np.tile(img_padded[0:1, :, :]*0 + padValue, (pad[0], 1, 1)) + img_padded = np.concatenate((pad_up, img_padded), axis=0) + pad_left = np.tile(img_padded[:, 0:1, :]*0 + padValue, (1, pad[1], 1)) + img_padded = np.concatenate((pad_left, img_padded), axis=1) + pad_down = np.tile(img_padded[-2:-1, :, :]*0 + padValue, (pad[2], 1, 1)) + img_padded = np.concatenate((img_padded, pad_down), axis=0) + pad_right = np.tile(img_padded[:, -2:-1, :]*0 + padValue, (1, pad[3], 1)) + img_padded = np.concatenate((img_padded, pad_right), axis=1) + + return img_padded, pad + + +def transfer(model, model_weights): + transfered_model_weights = {} + for weights_name in model.state_dict().keys(): + transfered_model_weights[weights_name] = model_weights['.'.join(weights_name.split('.')[1:])] + return transfered_model_weights + + +def draw_bodypose(canvas, candidate, subset): + H, W, C = canvas.shape + candidate = np.array(candidate) + subset = np.array(subset) + + stickwidth = 4 + + limbSeq = [[2, 3], [2, 6], [3, 4], [4, 5], [6, 7], [7, 8], [2, 9], [9, 10], \ + [10, 11], [2, 12], [12, 13], [13, 14], [2, 1], [1, 15], [15, 17], \ + [1, 16], [16, 18], [3, 17], [6, 18]] + + colors = [[255, 0, 0], [255, 85, 0], [255, 170, 0], [255, 255, 0], [170, 255, 0], [85, 255, 0], [0, 255, 0], \ + [0, 255, 85], [0, 255, 170], [0, 255, 255], [0, 170, 255], [0, 85, 255], [0, 0, 255], [85, 0, 255], \ + [170, 0, 255], [255, 0, 255], [255, 0, 170], [255, 0, 85]] + + for i in range(17): + for n in range(len(subset)): + index = subset[n][np.array(limbSeq[i]) - 1] + if -1 in index: + continue + Y = candidate[index.astype(int), 0] * float(W) + X = candidate[index.astype(int), 1] * float(H) + mX = np.mean(X) + mY = np.mean(Y) + length = ((X[0] - X[1]) ** 2 + (Y[0] - Y[1]) ** 2) ** 0.5 + angle = math.degrees(math.atan2(X[0] - X[1], Y[0] - Y[1])) + polygon = cv2.ellipse2Poly((int(mY), int(mX)), (int(length / 2), stickwidth), int(angle), 0, 360, 1) + cv2.fillConvexPoly(canvas, polygon, colors[i]) + + canvas = (canvas * 0.6).astype(np.uint8) + + for i in range(18): + for n in range(len(subset)): + index = int(subset[n][i]) + if index == -1: + continue + x, y = candidate[index][0:2] + x = int(x * W) + y = int(y * H) + cv2.circle(canvas, (int(x), int(y)), 4, colors[i], thickness=-1) + + return canvas + + +def draw_handpose(canvas, all_hand_peaks): + H, W, C = canvas.shape + + edges = [[0, 1], [1, 2], [2, 3], [3, 4], [0, 5], [5, 6], [6, 7], [7, 8], [0, 9], [9, 10], \ + [10, 11], [11, 12], [0, 13], [13, 14], [14, 15], [15, 16], [0, 17], [17, 18], [18, 19], [19, 20]] + + for peaks in all_hand_peaks: + peaks = np.array(peaks) + + for ie, e in enumerate(edges): + x1, y1 = peaks[e[0]] + x2, y2 = peaks[e[1]] + x1 = int(x1 * W) + y1 = int(y1 * H) + x2 = int(x2 * W) + y2 = int(y2 * H) + if x1 > eps and y1 > eps and x2 > eps and y2 > eps: + cv2.line(canvas, (x1, y1), (x2, y2), matplotlib.colors.hsv_to_rgb([ie / float(len(edges)), 1.0, 1.0]) * 255, thickness=2) + + for i, keyponit in enumerate(peaks): + x, y = keyponit + x = int(x * W) + y = int(y * H) + if x > eps and y > eps: + cv2.circle(canvas, (x, y), 4, (0, 0, 255), thickness=-1) + return canvas + + +def draw_facepose(canvas, all_lmks): + H, W, C = canvas.shape + for lmks in all_lmks: + lmks = np.array(lmks) + for lmk in lmks: + x, y = lmk + x = int(x * W) + y = int(y * H) + if x > eps and y > eps: + cv2.circle(canvas, (x, y), 3, (255, 255, 255), thickness=-1) + return canvas + + +# detect hand according to body pose keypoints +# please refer to https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/src/openpose/hand/handDetector.cpp +def handDetect(candidate, subset, oriImg): + # right hand: wrist 4, elbow 3, shoulder 2 + # left hand: wrist 7, elbow 6, shoulder 5 + ratioWristElbow = 0.33 + detect_result = [] + image_height, image_width = oriImg.shape[0:2] + for person in subset.astype(int): + # if any of three not detected + has_left = np.sum(person[[5, 6, 7]] == -1) == 0 + has_right = np.sum(person[[2, 3, 4]] == -1) == 0 + if not (has_left or has_right): + continue + hands = [] + #left hand + if has_left: + left_shoulder_index, left_elbow_index, left_wrist_index = person[[5, 6, 7]] + x1, y1 = candidate[left_shoulder_index][:2] + x2, y2 = candidate[left_elbow_index][:2] + x3, y3 = candidate[left_wrist_index][:2] + hands.append([x1, y1, x2, y2, x3, y3, True]) + # right hand + if has_right: + right_shoulder_index, right_elbow_index, right_wrist_index = person[[2, 3, 4]] + x1, y1 = candidate[right_shoulder_index][:2] + x2, y2 = candidate[right_elbow_index][:2] + x3, y3 = candidate[right_wrist_index][:2] + hands.append([x1, y1, x2, y2, x3, y3, False]) + + for x1, y1, x2, y2, x3, y3, is_left in hands: + # pos_hand = pos_wrist + ratio * (pos_wrist - pos_elbox) = (1 + ratio) * pos_wrist - ratio * pos_elbox + # handRectangle.x = posePtr[wrist*3] + ratioWristElbow * (posePtr[wrist*3] - posePtr[elbow*3]); + # handRectangle.y = posePtr[wrist*3+1] + ratioWristElbow * (posePtr[wrist*3+1] - posePtr[elbow*3+1]); + # const auto distanceWristElbow = getDistance(poseKeypoints, person, wrist, elbow); + # const auto distanceElbowShoulder = getDistance(poseKeypoints, person, elbow, shoulder); + # handRectangle.width = 1.5f * fastMax(distanceWristElbow, 0.9f * distanceElbowShoulder); + x = x3 + ratioWristElbow * (x3 - x2) + y = y3 + ratioWristElbow * (y3 - y2) + distanceWristElbow = math.sqrt((x3 - x2) ** 2 + (y3 - y2) ** 2) + distanceElbowShoulder = math.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2) + width = 1.5 * max(distanceWristElbow, 0.9 * distanceElbowShoulder) + # x-y refers to the center --> offset to topLeft point + # handRectangle.x -= handRectangle.width / 2.f; + # handRectangle.y -= handRectangle.height / 2.f; + x -= width / 2 + y -= width / 2 # width = height + # overflow the image + if x < 0: x = 0 + if y < 0: y = 0 + width1 = width + width2 = width + if x + width > image_width: width1 = image_width - x + if y + width > image_height: width2 = image_height - y + width = min(width1, width2) + # the max hand box value is 20 pixels + if width >= 20: + detect_result.append([int(x), int(y), int(width), is_left]) + + ''' + return value: [[x, y, w, True if left hand else False]]. + width=height since the network require squared input. + x, y is the coordinate of top left + ''' + return detect_result + + +# Written by Lvmin +def faceDetect(candidate, subset, oriImg): + # left right eye ear 14 15 16 17 + detect_result = [] + image_height, image_width = oriImg.shape[0:2] + for person in subset.astype(int): + has_head = person[0] > -1 + if not has_head: + continue + + has_left_eye = person[14] > -1 + has_right_eye = person[15] > -1 + has_left_ear = person[16] > -1 + has_right_ear = person[17] > -1 + + if not (has_left_eye or has_right_eye or has_left_ear or has_right_ear): + continue + + head, left_eye, right_eye, left_ear, right_ear = person[[0, 14, 15, 16, 17]] + + width = 0.0 + x0, y0 = candidate[head][:2] + + if has_left_eye: + x1, y1 = candidate[left_eye][:2] + d = max(abs(x0 - x1), abs(y0 - y1)) + width = max(width, d * 3.0) + + if has_right_eye: + x1, y1 = candidate[right_eye][:2] + d = max(abs(x0 - x1), abs(y0 - y1)) + width = max(width, d * 3.0) + + if has_left_ear: + x1, y1 = candidate[left_ear][:2] + d = max(abs(x0 - x1), abs(y0 - y1)) + width = max(width, d * 1.5) + + if has_right_ear: + x1, y1 = candidate[right_ear][:2] + d = max(abs(x0 - x1), abs(y0 - y1)) + width = max(width, d * 1.5) + + x, y = x0, y0 + + x -= width + y -= width + + if x < 0: + x = 0 + + if y < 0: + y = 0 + + width1 = width * 2 + width2 = width * 2 + + if x + width > image_width: + width1 = image_width - x + + if y + width > image_height: + width2 = image_height - y + + width = min(width1, width2) + + if width >= 20: + detect_result.append([int(x), int(y), int(width)]) + + return detect_result + + +# get max index of 2d array +def npmax(array): + arrayindex = array.argmax(1) + arrayvalue = array.max(1) + i = arrayvalue.argmax() + j = arrayindex[i] + return i, j diff --git a/src/animatediff/dwpose/wholebody.py b/src/animatediff/dwpose/wholebody.py new file mode 100644 index 0000000000000000000000000000000000000000..4de6d10e3cff9634b80063e0d6862606731f75c4 --- /dev/null +++ b/src/animatediff/dwpose/wholebody.py @@ -0,0 +1,49 @@ +# https://github.com/IDEA-Research/DWPose +import cv2 +import numpy as np +import onnxruntime as ort + +from .onnxdet import inference_detector +from .onnxpose import inference_pose + + +class Wholebody: + def __init__(self, device='cuda:0'): + providers = ['CPUExecutionProvider' + ] if device == 'cpu' else ['CUDAExecutionProvider'] + onnx_det = 'data/models/DWPose/yolox_l.onnx' + onnx_pose = 'data/models/DWPose/dw-ll_ucoco_384.onnx' + + self.session_det = ort.InferenceSession(path_or_bytes=onnx_det, providers=providers) + self.session_pose = ort.InferenceSession(path_or_bytes=onnx_pose, providers=providers) + + def __call__(self, oriImg): + det_result = inference_detector(self.session_det, oriImg) + keypoints, scores = inference_pose(self.session_pose, det_result, oriImg) + + keypoints_info = np.concatenate( + (keypoints, scores[..., None]), axis=-1) + # compute neck joint + neck = np.mean(keypoints_info[:, [5, 6]], axis=1) + # neck score when visualizing pred + neck[:, 2:4] = np.logical_and( + keypoints_info[:, 5, 2:4] > 0.3, + keypoints_info[:, 6, 2:4] > 0.3).astype(int) + new_keypoints_info = np.insert( + keypoints_info, 17, neck, axis=1) + mmpose_idx = [ + 17, 6, 8, 10, 7, 9, 12, 14, 16, 13, 15, 2, 1, 4, 3 + ] + openpose_idx = [ + 1, 2, 3, 4, 6, 7, 8, 9, 10, 12, 13, 14, 15, 16, 17 + ] + new_keypoints_info[:, openpose_idx] = \ + new_keypoints_info[:, mmpose_idx] + keypoints_info = new_keypoints_info + + keypoints, scores = keypoints_info[ + ..., :2], keypoints_info[..., 2] + + return keypoints, scores + + diff --git a/src/animatediff/generate.py b/src/animatediff/generate.py new file mode 100644 index 0000000000000000000000000000000000000000..a7e223b8c3ebded7c082b6807b81c997c9b95605 --- /dev/null +++ b/src/animatediff/generate.py @@ -0,0 +1,1786 @@ +import glob +import logging +import os +import re +from functools import partial +from itertools import chain +from os import PathLike +from pathlib import Path +from typing import Any, Callable, Dict, List, Union + +import numpy as np +import torch +from controlnet_aux import LineartAnimeDetector +from controlnet_aux.processor import MODELS +from controlnet_aux.processor import Processor as ControlnetPreProcessor +from controlnet_aux.util import HWC3, ade_palette +from controlnet_aux.util import resize_image as aux_resize_image +from diffusers import (AutoencoderKL, ControlNetModel, DiffusionPipeline, + EulerDiscreteScheduler, + StableDiffusionControlNetImg2ImgPipeline, + StableDiffusionPipeline, StableDiffusionXLPipeline) +from PIL import Image +from torchvision.datasets.folder import IMG_EXTENSIONS +from tqdm.rich import tqdm +from transformers import (AutoImageProcessor, CLIPImageProcessor, + CLIPTextConfig, CLIPTextModel, + CLIPTextModelWithProjection, CLIPTokenizer, + UperNetForSemanticSegmentation) + +from animatediff import get_dir +from animatediff.dwpose import DWposeDetector +from animatediff.models.clip import CLIPSkipTextModel +from animatediff.models.unet import UNet3DConditionModel +from animatediff.pipelines import AnimationPipeline, load_text_embeddings +from animatediff.pipelines.lora import load_lcm_lora, load_lora_map +from animatediff.pipelines.pipeline_controlnet_img2img_reference import \ + StableDiffusionControlNetImg2ImgReferencePipeline +from animatediff.schedulers import DiffusionScheduler, get_scheduler +from animatediff.settings import InferenceConfig, ModelConfig +from animatediff.utils.control_net_lllite import (ControlNetLLLite, + load_controlnet_lllite) +from animatediff.utils.convert_from_ckpt import convert_ldm_vae_checkpoint +from animatediff.utils.convert_lora_safetensor_to_diffusers import convert_lora +from animatediff.utils.model import (ensure_motion_modules, + get_checkpoint_weights, + get_checkpoint_weights_sdxl) +from animatediff.utils.util import (get_resized_image, get_resized_image2, + get_resized_images, + get_tensor_interpolation_method, + prepare_dwpose, prepare_extra_controlnet, + prepare_ip_adapter, + prepare_ip_adapter_sdxl, prepare_lcm_lora, + prepare_lllite, prepare_motion_module, + save_frames, save_imgs, save_video) + +controlnet_address_table={ + "controlnet_tile" : ['lllyasviel/control_v11f1e_sd15_tile'], + "controlnet_lineart_anime" : ['lllyasviel/control_v11p_sd15s2_lineart_anime'], + "controlnet_ip2p" : ['lllyasviel/control_v11e_sd15_ip2p'], + "controlnet_openpose" : ['lllyasviel/control_v11p_sd15_openpose'], + "controlnet_softedge" : ['lllyasviel/control_v11p_sd15_softedge'], + "controlnet_shuffle" : ['lllyasviel/control_v11e_sd15_shuffle'], + "controlnet_depth" : ['lllyasviel/control_v11f1p_sd15_depth'], + "controlnet_canny" : ['lllyasviel/control_v11p_sd15_canny'], + "controlnet_inpaint" : ['lllyasviel/control_v11p_sd15_inpaint'], + "controlnet_lineart" : ['lllyasviel/control_v11p_sd15_lineart'], + "controlnet_mlsd" : ['lllyasviel/control_v11p_sd15_mlsd'], + "controlnet_normalbae" : ['lllyasviel/control_v11p_sd15_normalbae'], + "controlnet_scribble" : ['lllyasviel/control_v11p_sd15_scribble'], + "controlnet_seg" : ['lllyasviel/control_v11p_sd15_seg'], + "qr_code_monster_v1" : ['monster-labs/control_v1p_sd15_qrcode_monster'], + "qr_code_monster_v2" : ['monster-labs/control_v1p_sd15_qrcode_monster', 'v2'], + "controlnet_mediapipe_face" : ['CrucibleAI/ControlNetMediaPipeFace', "diffusion_sd15"], + "animatediff_controlnet" : [None, "data/models/controlnet/animatediff_controlnet/controlnet_checkpoint.ckpt"] +} + +# Edit this table if you want to change to another controlnet checkpoint +controlnet_address_table_sdxl={ +# "controlnet_openpose" : ['thibaud/controlnet-openpose-sdxl-1.0'], +# "controlnet_softedge" : ['SargeZT/controlnet-sd-xl-1.0-softedge-dexined'], +# "controlnet_depth" : ['diffusers/controlnet-depth-sdxl-1.0-small'], +# "controlnet_canny" : ['diffusers/controlnet-canny-sdxl-1.0-small'], +# "controlnet_seg" : ['SargeZT/sdxl-controlnet-seg'], + "qr_code_monster_v1" : ['monster-labs/control_v1p_sdxl_qrcode_monster'], +} + +# Edit this table if you want to change to another lllite checkpoint +lllite_address_table_sdxl={ + "controlnet_tile" : ['models/lllite/bdsqlsz_controlllite_xl_tile_anime_β.safetensors'], + "controlnet_lineart_anime" : ['models/lllite/bdsqlsz_controlllite_xl_lineart_anime_denoise.safetensors'], +# "controlnet_ip2p" : ('lllyasviel/control_v11e_sd15_ip2p'), + "controlnet_openpose" : ['models/lllite/bdsqlsz_controlllite_xl_dw_openpose.safetensors'], +# "controlnet_openpose" : ['models/lllite/controllllite_v01032064e_sdxl_pose_anime.safetensors'], + "controlnet_softedge" : ['models/lllite/bdsqlsz_controlllite_xl_softedge.safetensors'], + "controlnet_shuffle" : ['models/lllite/bdsqlsz_controlllite_xl_t2i-adapter_color_shuffle.safetensors'], + "controlnet_depth" : ['models/lllite/bdsqlsz_controlllite_xl_depth.safetensors'], + "controlnet_canny" : ['models/lllite/bdsqlsz_controlllite_xl_canny.safetensors'], +# "controlnet_canny" : ['models/lllite/controllllite_v01032064e_sdxl_canny.safetensors'], +# "controlnet_inpaint" : ('lllyasviel/control_v11p_sd15_inpaint'), +# "controlnet_lineart" : ('lllyasviel/control_v11p_sd15_lineart'), + "controlnet_mlsd" : ['models/lllite/bdsqlsz_controlllite_xl_mlsd_V2.safetensors'], + "controlnet_normalbae" : ['models/lllite/bdsqlsz_controlllite_xl_normal.safetensors'], + "controlnet_scribble" : ['models/lllite/bdsqlsz_controlllite_xl_sketch.safetensors'], + "controlnet_seg" : ['models/lllite/bdsqlsz_controlllite_xl_segment_animeface_V2.safetensors'], +# "qr_code_monster_v1" : ['monster-labs/control_v1p_sdxl_qrcode_monster'], +# "qr_code_monster_v2" : ('monster-labs/control_v1p_sd15_qrcode_monster', 'v2'), +# "controlnet_mediapipe_face" : ('CrucibleAI/ControlNetMediaPipeFace', "diffusion_sd15"), +} + + + + + +try: + import onnxruntime + onnxruntime_installed = True +except: + onnxruntime_installed = False + + + + +logger = logging.getLogger(__name__) + +data_dir = get_dir("data") +default_base_path = data_dir.joinpath("models/huggingface/stable-diffusion-v1-5") + +re_clean_prompt = re.compile(r"[^\w\-, ]") + +controlnet_preprocessor = {} + +def load_safetensors_lora(text_encoder, unet, lora_path, alpha=0.75, is_animatediff=True): + from safetensors.torch import load_file + + from animatediff.utils.lora_diffusers import (LoRANetwork, + create_network_from_weights) + + sd = load_file(lora_path) + + print(f"create LoRA network") + lora_network: LoRANetwork = create_network_from_weights(text_encoder, unet, sd, multiplier=alpha, is_animatediff=is_animatediff) + print(f"load LoRA network weights") + lora_network.load_state_dict(sd, False) + #lora_network.merge_to(alpha) + lora_network.apply_to(alpha) + return lora_network + +def load_safetensors_lora2(text_encoder, unet, lora_path, alpha=0.75, is_animatediff=True): + from safetensors.torch import load_file + + from animatediff.utils.lora_diffusers import (LoRANetwork, + create_network_from_weights) + + sd = load_file(lora_path) + + print(f"create LoRA network") + lora_network: LoRANetwork = create_network_from_weights(text_encoder, unet, sd, multiplier=alpha, is_animatediff=is_animatediff) + print(f"load LoRA network weights") + lora_network.load_state_dict(sd, False) + lora_network.merge_to(alpha) + + +def load_tensors(path:Path,framework="pt",device="cpu"): + tensors = {} + if path.suffix == ".safetensors": + from safetensors import safe_open + with safe_open(path, framework=framework, device=device) as f: + for k in f.keys(): + tensors[k] = f.get_tensor(k) # loads the full tensor given a key + else: + from torch import load + tensors = load(path, device) + if "state_dict" in tensors: + tensors = tensors["state_dict"] + return tensors + +def load_motion_lora(unet, lora_path:Path, alpha=1.0): + state_dict = load_tensors(lora_path) + + # directly update weight in diffusers model + for key in state_dict: + # only process lora down key + if "up." in key: continue + + up_key = key.replace(".down.", ".up.") + model_key = key.replace("processor.", "").replace("_lora", "").replace("down.", "").replace("up.", "") + model_key = model_key.replace("to_out.", "to_out.0.") + layer_infos = model_key.split(".")[:-1] + + curr_layer = unet + try: + while len(layer_infos) > 0: + temp_name = layer_infos.pop(0) + curr_layer = curr_layer.__getattr__(temp_name) + except: + logger.info(f"{model_key} not found") + continue + + + weight_down = state_dict[key] + weight_up = state_dict[up_key] + curr_layer.weight.data += alpha * torch.mm(weight_up, weight_down).to(curr_layer.weight.data.device) + + +class SegPreProcessor: + + def __init__(self): + self.image_processor = AutoImageProcessor.from_pretrained("openmmlab/upernet-convnext-small") + self.processor = UperNetForSemanticSegmentation.from_pretrained("openmmlab/upernet-convnext-small") + + def __call__(self, input_image, detect_resolution=512, image_resolution=512, output_type="pil", **kwargs): + + input_array = np.array(input_image, dtype=np.uint8) + input_array = HWC3(input_array) + input_array = aux_resize_image(input_array, detect_resolution) + + pixel_values = self.image_processor(input_array, return_tensors="pt").pixel_values + + with torch.no_grad(): + outputs = self.processor(pixel_values.to(self.processor.device)) + + outputs.loss = outputs.loss.to("cpu") if outputs.loss is not None else outputs.loss + outputs.logits = outputs.logits.to("cpu") if outputs.logits is not None else outputs.logits + outputs.hidden_states = outputs.hidden_states.to("cpu") if outputs.hidden_states is not None else outputs.hidden_states + outputs.attentions = outputs.attentions.to("cpu") if outputs.attentions is not None else outputs.attentions + + seg = self.image_processor.post_process_semantic_segmentation(outputs, target_sizes=[input_image.size[::-1]])[0] + color_seg = np.zeros((seg.shape[0], seg.shape[1], 3), dtype=np.uint8) # height, width, 3 + + for label, color in enumerate(ade_palette()): + color_seg[seg == label, :] = color + + color_seg = color_seg.astype(np.uint8) + color_seg = aux_resize_image(color_seg, image_resolution) + color_seg = Image.fromarray(color_seg) + + return color_seg + +class NullPreProcessor: + def __call__(self, input_image, **kwargs): + return input_image + +class BlurPreProcessor: + def __call__(self, input_image, sigma=5.0, **kwargs): + import cv2 + + input_array = np.array(input_image, dtype=np.uint8) + input_array = HWC3(input_array) + + dst = cv2.GaussianBlur(input_array, (0, 0), sigma) + + return Image.fromarray(dst) + +class TileResamplePreProcessor: + + def resize(self, input_image, resolution): + import cv2 + + H, W, C = input_image.shape + H = float(H) + W = float(W) + k = float(resolution) / min(H, W) + H *= k + W *= k + img = cv2.resize(input_image, (int(W), int(H)), interpolation=cv2.INTER_LANCZOS4 if k > 1 else cv2.INTER_AREA) + return img + + def __call__(self, input_image, down_sampling_rate = 1.0, **kwargs): + + input_array = np.array(input_image, dtype=np.uint8) + input_array = HWC3(input_array) + + H, W, C = input_array.shape + + target_res = min(H,W) / down_sampling_rate + + dst = self.resize(input_array, target_res) + + return Image.fromarray(dst) + + + +def is_valid_controlnet_type(type_str, is_sdxl): + if not is_sdxl: + return type_str in controlnet_address_table + else: + return (type_str in controlnet_address_table_sdxl) or (type_str in lllite_address_table_sdxl) + +def load_controlnet_from_file(file_path, torch_dtype): + from safetensors.torch import load_file + + prepare_extra_controlnet() + + file_path = Path(file_path) + + if file_path.exists() and file_path.is_file(): + if file_path.suffix.lower() in [".pth", ".pt", ".ckpt"]: + controlnet_state_dict = torch.load(file_path, map_location="cpu", weights_only=True) + elif file_path.suffix.lower() == ".safetensors": + controlnet_state_dict = load_file(file_path, device="cpu") + else: + raise RuntimeError( + f"unknown file format for controlnet weights: {file_path.suffix}" + ) + else: + raise FileNotFoundError(f"no controlnet weights found in {file_path}") + + if file_path.parent.name == "animatediff_controlnet": + model = ControlNetModel(cross_attention_dim=768) + else: + model = ControlNetModel() + + missing, _ = model.load_state_dict(controlnet_state_dict["state_dict"], strict=False) + if len(missing) > 0: + logger.info(f"ControlNetModel has missing keys: {missing}") + + return model.to(dtype=torch_dtype) + +def create_controlnet_model(pipe, type_str, is_sdxl): + if not is_sdxl: + if type_str in controlnet_address_table: + addr = controlnet_address_table[type_str] + if addr[0] != None: + if len(addr) == 1: + return ControlNetModel.from_pretrained(addr[0], torch_dtype=torch.float16) + else: + return ControlNetModel.from_pretrained(addr[0], subfolder=addr[1], torch_dtype=torch.float16) + else: + return load_controlnet_from_file(addr[1],torch_dtype=torch.float16) + else: + raise ValueError(f"unknown controlnet type {type_str}") + else: + + if type_str in controlnet_address_table_sdxl: + addr = controlnet_address_table_sdxl[type_str] + if len(addr) == 1: + return ControlNetModel.from_pretrained(addr[0], torch_dtype=torch.float16) + else: + return ControlNetModel.from_pretrained(addr[0], subfolder=addr[1], torch_dtype=torch.float16) + elif type_str in lllite_address_table_sdxl: + addr = lllite_address_table_sdxl[type_str] + model_path = data_dir.joinpath(addr[0]) + return load_controlnet_lllite(model_path, pipe, torch_dtype=torch.float16) + else: + raise ValueError(f"unknown controlnet type {type_str}") + + + +default_preprocessor_table={ + "controlnet_lineart_anime":"lineart_anime", + "controlnet_openpose": "openpose_full" if onnxruntime_installed==False else "dwpose", + "controlnet_softedge":"softedge_hedsafe", + "controlnet_shuffle":"shuffle", + "controlnet_depth":"depth_midas", + "controlnet_canny":"canny", + "controlnet_lineart":"lineart_realistic", + "controlnet_mlsd":"mlsd", + "controlnet_normalbae":"normal_bae", + "controlnet_scribble":"scribble_pidsafe", + "controlnet_seg":"upernet_seg", + "controlnet_mediapipe_face":"mediapipe_face", + "qr_code_monster_v1":"depth_midas", + "qr_code_monster_v2":"depth_midas", +} + +def create_preprocessor_from_name(pre_type): + if pre_type == "dwpose": + prepare_dwpose() + return DWposeDetector() + elif pre_type == "upernet_seg": + return SegPreProcessor() + elif pre_type == "blur": + return BlurPreProcessor() + elif pre_type == "tile_resample": + return TileResamplePreProcessor() + elif pre_type == "none": + return NullPreProcessor() + elif pre_type in MODELS: + return ControlnetPreProcessor(pre_type) + else: + raise ValueError(f"unknown controlnet preprocessor type {pre_type}") + + +def create_default_preprocessor(type_str): + if type_str in default_preprocessor_table: + pre_type = default_preprocessor_table[type_str] + else: + pre_type = "none" + + return create_preprocessor_from_name(pre_type) + + +def get_preprocessor(type_str, device_str, preprocessor_map): + if type_str not in controlnet_preprocessor: + if preprocessor_map: + controlnet_preprocessor[type_str] = create_preprocessor_from_name(preprocessor_map["type"]) + + if type_str not in controlnet_preprocessor: + controlnet_preprocessor[type_str] = create_default_preprocessor(type_str) + + if hasattr(controlnet_preprocessor[type_str], "processor"): + if hasattr(controlnet_preprocessor[type_str].processor, "to"): + if device_str: + controlnet_preprocessor[type_str].processor.to(device_str) + elif hasattr(controlnet_preprocessor[type_str], "to"): + if device_str: + controlnet_preprocessor[type_str].to(device_str) + + + return controlnet_preprocessor[type_str] + +def clear_controlnet_preprocessor(type_str = None): + global controlnet_preprocessor + if type_str == None: + for t in controlnet_preprocessor: + controlnet_preprocessor[t] = None + controlnet_preprocessor={} + torch.cuda.empty_cache() + else: + controlnet_preprocessor[type_str] = None + torch.cuda.empty_cache() + + +def get_preprocessed_img(type_str, img, use_preprocessor, device_str, preprocessor_map): + if use_preprocessor: + param = {} + if preprocessor_map: + param = preprocessor_map["param"] if "param" in preprocessor_map else {} + return get_preprocessor(type_str, device_str, preprocessor_map)(img, **param) + else: + return img + + +def create_pipeline_sdxl( + base_model: Union[str, PathLike] = default_base_path, + model_config: ModelConfig = ..., + infer_config: InferenceConfig = ..., + use_xformers: bool = True, + video_length: int = 16, + motion_module_path = ..., +): + from animatediff.pipelines.sdxl_animation import AnimationPipeline + from animatediff.sdxl_models.unet import UNet3DConditionModel + + logger.info("Loading tokenizer...") + tokenizer: CLIPTokenizer = CLIPTokenizer.from_pretrained(base_model, subfolder="tokenizer") + logger.info("Loading text encoder...") + text_encoder: CLIPTextModel = CLIPTextModel.from_pretrained(base_model, subfolder="text_encoder", torch_dtype=torch.float16) + logger.info("Loading VAE...") + vae: AutoencoderKL = AutoencoderKL.from_pretrained(base_model, subfolder="vae") + logger.info("Loading tokenizer two...") + tokenizer_two = CLIPTokenizer.from_pretrained(base_model, subfolder="tokenizer_2") + logger.info("Loading text encoder two...") + text_encoder_two = CLIPTextModelWithProjection.from_pretrained(base_model, subfolder="text_encoder_2", torch_dtype=torch.float16) + + + logger.info("Loading UNet...") + unet: UNet3DConditionModel = UNet3DConditionModel.from_pretrained_2d( + pretrained_model_path=base_model, + motion_module_path=motion_module_path, + subfolder="unet", + unet_additional_kwargs=infer_config.unet_additional_kwargs, + ) + + # set up scheduler + sched_kwargs = infer_config.noise_scheduler_kwargs + scheduler = get_scheduler(model_config.scheduler, sched_kwargs) + logger.info(f'Using scheduler "{model_config.scheduler}" ({scheduler.__class__.__name__})') + + if model_config.gradual_latent_hires_fix_map: + if "enable" in model_config.gradual_latent_hires_fix_map: + if model_config.gradual_latent_hires_fix_map["enable"]: + if model_config.scheduler not in (DiffusionScheduler.euler_a, DiffusionScheduler.lcm): + logger.warn("gradual_latent_hires_fix enable") + logger.warn(f"{model_config.scheduler=}") + logger.warn("If you are forced to exit with an error, change to euler_a or lcm") + + + + # Load the checkpoint weights into the pipeline + if model_config.path is not None: + model_path = data_dir.joinpath(model_config.path) + logger.info(f"Loading weights from {model_path}") + if model_path.is_file(): + logger.debug("Loading from single checkpoint file") + unet_state_dict, tenc_state_dict, tenc2_state_dict, vae_state_dict = get_checkpoint_weights_sdxl(model_path) + elif model_path.is_dir(): + logger.debug("Loading from Diffusers model directory") + temp_pipeline = StableDiffusionXLPipeline.from_pretrained(model_path) + unet_state_dict, tenc_state_dict, tenc2_state_dict, vae_state_dict = ( + temp_pipeline.unet.state_dict(), + temp_pipeline.text_encoder.state_dict(), + temp_pipeline.text_encoder_2.state_dict(), + temp_pipeline.vae.state_dict(), + ) + del temp_pipeline + else: + raise FileNotFoundError(f"model_path {model_path} is not a file or directory") + + # Load into the unet, TE, and VAE + logger.info("Merging weights into UNet...") + _, unet_unex = unet.load_state_dict(unet_state_dict, strict=False) + if len(unet_unex) > 0: + raise ValueError(f"UNet has unexpected keys: {unet_unex}") + tenc_missing, _ = text_encoder.load_state_dict(tenc_state_dict, strict=False) + if len(tenc_missing) > 0: + raise ValueError(f"TextEncoder has missing keys: {tenc_missing}") + tenc2_missing, _ = text_encoder_two.load_state_dict(tenc2_state_dict, strict=False) + if len(tenc2_missing) > 0: + raise ValueError(f"TextEncoder2 has missing keys: {tenc2_missing}") + vae_missing, _ = vae.load_state_dict(vae_state_dict, strict=False) + if len(vae_missing) > 0: + raise ValueError(f"VAE has missing keys: {vae_missing}") + else: + logger.info("Using base model weights (no checkpoint/LoRA)") + + if model_config.vae_path: + vae_path = data_dir.joinpath(model_config.vae_path) + logger.info(f"Loading vae from {vae_path}") + + if vae_path.is_dir(): + vae = AutoencoderKL.from_pretrained(vae_path) + else: + tensors = load_tensors(vae_path) + tensors = convert_ldm_vae_checkpoint(tensors, vae.config) + vae.load_state_dict(tensors) + + unet.to(torch.float16) + text_encoder.to(torch.float16) + text_encoder_two.to(torch.float16) + + del unet_state_dict + del tenc_state_dict + del tenc2_state_dict + del vae_state_dict + + # enable xformers if available + if use_xformers: + logger.info("Enabling xformers memory-efficient attention") + unet.enable_xformers_memory_efficient_attention() + + # motion lora + for l in model_config.motion_lora_map: + lora_path = data_dir.joinpath(l) + logger.info(f"loading motion lora {lora_path=}") + if lora_path.is_file(): + logger.info(f"Loading motion lora {lora_path}") + logger.info(f"alpha = {model_config.motion_lora_map[l]}") + load_motion_lora(unet, lora_path, alpha=model_config.motion_lora_map[l]) + else: + raise ValueError(f"{lora_path=} not found") + + logger.info("Creating AnimationPipeline...") + pipeline = AnimationPipeline( + vae=vae, + text_encoder=text_encoder, + text_encoder_2=text_encoder_two, + tokenizer=tokenizer, + tokenizer_2=tokenizer_two, + unet=unet, + scheduler=scheduler, + controlnet_map=None, + ) + + del vae + del text_encoder + del text_encoder_two + del tokenizer + del tokenizer_two + del unet + + torch.cuda.empty_cache() + + pipeline.lcm = None + if model_config.lcm_map: + if model_config.lcm_map["enable"]: + prepare_lcm_lora() + load_lcm_lora(pipeline, model_config.lcm_map, is_sdxl=True) + + load_lora_map(pipeline, model_config.lora_map, video_length, is_sdxl=True) + + pipeline.unet = pipeline.unet.half() + pipeline.text_encoder = pipeline.text_encoder.half() + pipeline.text_encoder_2 = pipeline.text_encoder_2.half() + + # Load TI embeddings + pipeline.text_encoder = pipeline.text_encoder.to("cuda") + pipeline.text_encoder_2 = pipeline.text_encoder_2.to("cuda") + + load_text_embeddings(pipeline, is_sdxl=True) + + pipeline.text_encoder = pipeline.text_encoder.to("cpu") + pipeline.text_encoder_2 = pipeline.text_encoder_2.to("cpu") + + return pipeline + + +def create_pipeline( + base_model: Union[str, PathLike] = default_base_path, + model_config: ModelConfig = ..., + infer_config: InferenceConfig = ..., + use_xformers: bool = True, + video_length: int = 16, + is_sdxl:bool = False, +) -> DiffusionPipeline: + """Create an AnimationPipeline from a pretrained model. + Uses the base_model argument to load or download the pretrained reference pipeline model.""" + + # make sure motion_module is a Path and exists + logger.info("Checking motion module...") + motion_module = data_dir.joinpath(model_config.motion_module) + if not (motion_module.exists() and motion_module.is_file()): + prepare_motion_module() + if not (motion_module.exists() and motion_module.is_file()): + # check for safetensors version + motion_module = motion_module.with_suffix(".safetensors") + if not (motion_module.exists() and motion_module.is_file()): + # download from HuggingFace Hub if not found + ensure_motion_modules() + if not (motion_module.exists() and motion_module.is_file()): + # this should never happen, but just in case... + raise FileNotFoundError(f"Motion module {motion_module} does not exist or is not a file!") + + if is_sdxl: + return create_pipeline_sdxl( + base_model=base_model, + model_config=model_config, + infer_config=infer_config, + use_xformers=use_xformers, + video_length=video_length, + motion_module_path=motion_module, + ) + + logger.info("Loading tokenizer...") + tokenizer: CLIPTokenizer = CLIPTokenizer.from_pretrained(base_model, subfolder="tokenizer") + logger.info("Loading text encoder...") + text_encoder: CLIPSkipTextModel = CLIPSkipTextModel.from_pretrained(base_model, subfolder="text_encoder") + logger.info("Loading VAE...") + vae: AutoencoderKL = AutoencoderKL.from_pretrained(base_model, subfolder="vae") + logger.info("Loading UNet...") + unet: UNet3DConditionModel = UNet3DConditionModel.from_pretrained_2d( + pretrained_model_path=base_model, + motion_module_path=motion_module, + subfolder="unet", + unet_additional_kwargs=infer_config.unet_additional_kwargs, + ) + feature_extractor = CLIPImageProcessor.from_pretrained(base_model, subfolder="feature_extractor") + + # set up scheduler + if model_config.gradual_latent_hires_fix_map: + if "enable" in model_config.gradual_latent_hires_fix_map: + if model_config.gradual_latent_hires_fix_map["enable"]: + if model_config.scheduler not in (DiffusionScheduler.euler_a, DiffusionScheduler.lcm): + logger.warn("gradual_latent_hires_fix enable") + logger.warn(f"{model_config.scheduler=}") + logger.warn("If you are forced to exit with an error, change to euler_a or lcm") + + sched_kwargs = infer_config.noise_scheduler_kwargs + scheduler = get_scheduler(model_config.scheduler, sched_kwargs) + logger.info(f'Using scheduler "{model_config.scheduler}" ({scheduler.__class__.__name__})') + + # Load the checkpoint weights into the pipeline + if model_config.path is not None: + model_path = data_dir.joinpath(model_config.path) + logger.info(f"Loading weights from {model_path}") + if model_path.is_file(): + logger.debug("Loading from single checkpoint file") + unet_state_dict, tenc_state_dict, vae_state_dict = get_checkpoint_weights(model_path) + elif model_path.is_dir(): + logger.debug("Loading from Diffusers model directory") + temp_pipeline = StableDiffusionPipeline.from_pretrained(model_path) + unet_state_dict, tenc_state_dict, vae_state_dict = ( + temp_pipeline.unet.state_dict(), + temp_pipeline.text_encoder.state_dict(), + temp_pipeline.vae.state_dict(), + ) + del temp_pipeline + else: + raise FileNotFoundError(f"model_path {model_path} is not a file or directory") + + # Load into the unet, TE, and VAE + logger.info("Merging weights into UNet...") + _, unet_unex = unet.load_state_dict(unet_state_dict, strict=False) + if len(unet_unex) > 0: + raise ValueError(f"UNet has unexpected keys: {unet_unex}") + tenc_missing, _ = text_encoder.load_state_dict(tenc_state_dict, strict=False) + if len(tenc_missing) > 0: + raise ValueError(f"TextEncoder has missing keys: {tenc_missing}") + vae_missing, _ = vae.load_state_dict(vae_state_dict, strict=False) + if len(vae_missing) > 0: + raise ValueError(f"VAE has missing keys: {vae_missing}") + else: + logger.info("Using base model weights (no checkpoint/LoRA)") + + if model_config.vae_path: + vae_path = data_dir.joinpath(model_config.vae_path) + logger.info(f"Loading vae from {vae_path}") + + if vae_path.is_dir(): + vae = AutoencoderKL.from_pretrained(vae_path) + else: + tensors = load_tensors(vae_path) + tensors = convert_ldm_vae_checkpoint(tensors, vae.config) + vae.load_state_dict(tensors) + + + # enable xformers if available + if use_xformers: + logger.info("Enabling xformers memory-efficient attention") + unet.enable_xformers_memory_efficient_attention() + + if False: + # lora + for l in model_config.lora_map: + lora_path = data_dir.joinpath(l) + if lora_path.is_file(): + logger.info(f"Loading lora {lora_path}") + logger.info(f"alpha = {model_config.lora_map[l]}") + load_safetensors_lora(text_encoder, unet, lora_path, alpha=model_config.lora_map[l]) + + # motion lora + for l in model_config.motion_lora_map: + lora_path = data_dir.joinpath(l) + logger.info(f"loading motion lora {lora_path=}") + if lora_path.is_file(): + logger.info(f"Loading motion lora {lora_path}") + logger.info(f"alpha = {model_config.motion_lora_map[l]}") + load_motion_lora(unet, lora_path, alpha=model_config.motion_lora_map[l]) + else: + raise ValueError(f"{lora_path=} not found") + + logger.info("Creating AnimationPipeline...") + pipeline = AnimationPipeline( + vae=vae, + text_encoder=text_encoder, + tokenizer=tokenizer, + unet=unet, + scheduler=scheduler, + feature_extractor=feature_extractor, + controlnet_map=None, + ) + + pipeline.lcm = None + if model_config.lcm_map: + if model_config.lcm_map["enable"]: + prepare_lcm_lora() + load_lcm_lora(pipeline, model_config.lcm_map, is_sdxl=False) + + load_lora_map(pipeline, model_config.lora_map, video_length) + + # Load TI embeddings + pipeline.unet = pipeline.unet.half() + pipeline.text_encoder = pipeline.text_encoder.half() + + pipeline.text_encoder = pipeline.text_encoder.to("cuda") + + load_text_embeddings(pipeline) + + pipeline.text_encoder = pipeline.text_encoder.to("cpu") + + return pipeline + +def load_controlnet_models(pipe: DiffusionPipeline, model_config: ModelConfig = ..., is_sdxl:bool = False): + # controlnet + + if is_sdxl: + prepare_lllite() + + controlnet_map={} + if model_config.controlnet_map: + c_image_dir = data_dir.joinpath( model_config.controlnet_map["input_image_dir"] ) + + for c in model_config.controlnet_map: + item = model_config.controlnet_map[c] + if type(item) is dict: + if item["enable"] == True: + if is_valid_controlnet_type(c, is_sdxl): + img_dir = c_image_dir.joinpath( c ) + cond_imgs = sorted(glob.glob( os.path.join(img_dir, "[0-9]*.png"), recursive=False)) + if len(cond_imgs) > 0: + logger.info(f"loading {c=} model") + controlnet_map[c] = create_controlnet_model(pipe, c , is_sdxl) + else: + logger.info(f"invalid controlnet type for {'sdxl' if is_sdxl else 'sd15'} : {c}") + + if not controlnet_map: + controlnet_map = None + + pipe.controlnet_map = controlnet_map + +def unload_controlnet_models(pipe: AnimationPipeline): + from animatediff.utils.util import show_gpu + + if pipe.controlnet_map: + for c in pipe.controlnet_map: + controlnet = pipe.controlnet_map[c] + if isinstance(controlnet, ControlNetLLLite): + controlnet.unapply_to() + del controlnet + + #show_gpu("before uload controlnet") + pipe.controlnet_map = None + torch.cuda.empty_cache() + #show_gpu("after unload controlnet") + + +def create_us_pipeline( + model_config: ModelConfig = ..., + infer_config: InferenceConfig = ..., + use_xformers: bool = True, + use_controlnet_ref: bool = False, + use_controlnet_tile: bool = False, + use_controlnet_line_anime: bool = False, + use_controlnet_ip2p: bool = False, +) -> DiffusionPipeline: + + # set up scheduler + sched_kwargs = infer_config.noise_scheduler_kwargs + scheduler = get_scheduler(model_config.scheduler, sched_kwargs) + logger.info(f'Using scheduler "{model_config.scheduler}" ({scheduler.__class__.__name__})') + + controlnet = [] + if use_controlnet_tile: + controlnet.append( ControlNetModel.from_pretrained('lllyasviel/control_v11f1e_sd15_tile') ) + if use_controlnet_line_anime: + controlnet.append( ControlNetModel.from_pretrained('lllyasviel/control_v11p_sd15s2_lineart_anime') ) + if use_controlnet_ip2p: + controlnet.append( ControlNetModel.from_pretrained('lllyasviel/control_v11e_sd15_ip2p') ) + + if len(controlnet) == 1: + controlnet = controlnet[0] + elif len(controlnet) == 0: + controlnet = None + + # Load the checkpoint weights into the pipeline + pipeline:DiffusionPipeline + + if model_config.path is not None: + model_path = data_dir.joinpath(model_config.path) + logger.info(f"Loading weights from {model_path}") + if model_path.is_file(): + + def is_empty_dir(path): + import os + return len(os.listdir(path)) == 0 + + save_path = data_dir.joinpath("models/huggingface/" + model_path.stem + "_" + str(model_path.stat().st_size)) + save_path.mkdir(exist_ok=True) + if save_path.is_dir() and is_empty_dir(save_path): + # StableDiffusionControlNetImg2ImgPipeline.from_single_file does not exist in version 18.2 + logger.debug("Loading from single checkpoint file") + tmp_pipeline = StableDiffusionPipeline.from_single_file( + pretrained_model_link_or_path=str(model_path.absolute()) + ) + tmp_pipeline.save_pretrained(save_path, safe_serialization=True) + del tmp_pipeline + + if use_controlnet_ref: + pipeline = StableDiffusionControlNetImg2ImgReferencePipeline.from_pretrained( + save_path, + controlnet=controlnet, + local_files_only=False, + load_safety_checker=False, + safety_checker=None, + ) + else: + pipeline = StableDiffusionControlNetImg2ImgPipeline.from_pretrained( + save_path, + controlnet=controlnet, + local_files_only=False, + load_safety_checker=False, + safety_checker=None, + ) + + elif model_path.is_dir(): + logger.debug("Loading from Diffusers model directory") + if use_controlnet_ref: + pipeline = StableDiffusionControlNetImg2ImgReferencePipeline.from_pretrained( + model_path, + controlnet=controlnet, + local_files_only=True, + load_safety_checker=False, + safety_checker=None, + ) + else: + pipeline = StableDiffusionControlNetImg2ImgPipeline.from_pretrained( + model_path, + controlnet=controlnet, + local_files_only=True, + load_safety_checker=False, + safety_checker=None, + ) + else: + raise FileNotFoundError(f"model_path {model_path} is not a file or directory") + else: + raise ValueError("model_config.path is invalid") + + pipeline.scheduler = scheduler + + # enable xformers if available + if use_xformers: + logger.info("Enabling xformers memory-efficient attention") + pipeline.enable_xformers_memory_efficient_attention() + + # lora + for l in model_config.lora_map: + lora_path = data_dir.joinpath(l) + if lora_path.is_file(): + alpha = model_config.lora_map[l] + if isinstance(alpha, dict): + alpha = 0.75 + + logger.info(f"Loading lora {lora_path}") + logger.info(f"alpha = {alpha}") + load_safetensors_lora2(pipeline.text_encoder, pipeline.unet, lora_path, alpha=alpha,is_animatediff=False) + + # Load TI embeddings + pipeline.unet = pipeline.unet.half() + pipeline.text_encoder = pipeline.text_encoder.half() + + pipeline.text_encoder = pipeline.text_encoder.to("cuda") + + load_text_embeddings(pipeline) + + pipeline.text_encoder = pipeline.text_encoder.to("cpu") + + return pipeline + + +def seed_everything(seed): + import random + + import numpy as np + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + np.random.seed(seed % (2**32)) + random.seed(seed) + +def controlnet_preprocess( + controlnet_map: Dict[str, Any] = None, + width: int = 512, + height: int = 512, + duration: int = 16, + out_dir: PathLike = ..., + device_str:str=None, + is_sdxl:bool = False, + ): + + if not controlnet_map: + return None, None, None, None + + out_dir = Path(out_dir) # ensure out_dir is a Path + + # { 0 : { "type_str" : IMAGE, "type_str2" : IMAGE } } + controlnet_image_map={} + + controlnet_type_map={} + + c_image_dir = data_dir.joinpath( controlnet_map["input_image_dir"] ) + save_detectmap = controlnet_map["save_detectmap"] if "save_detectmap" in controlnet_map else True + + preprocess_on_gpu = controlnet_map["preprocess_on_gpu"] if "preprocess_on_gpu" in controlnet_map else True + device_str = device_str if preprocess_on_gpu else None + + for c in controlnet_map: + if c == "controlnet_ref": + continue + + item = controlnet_map[c] + + processed = False + + if type(item) is dict: + if item["enable"] == True: + + if is_valid_controlnet_type(c, is_sdxl): + preprocessor_map = item["preprocessor"] if "preprocessor" in item else {} + + img_dir = c_image_dir.joinpath( c ) + cond_imgs = sorted(glob.glob( os.path.join(img_dir, "[0-9]*.png"), recursive=False)) + if len(cond_imgs) > 0: + + controlnet_type_map[c] = { + "controlnet_conditioning_scale" : item["controlnet_conditioning_scale"], + "control_guidance_start" : item["control_guidance_start"], + "control_guidance_end" : item["control_guidance_end"], + "control_scale_list" : item["control_scale_list"], + "guess_mode" : item["guess_mode"] if "guess_mode" in item else False, + "control_region_list" : item["control_region_list"] if "control_region_list" in item else [] + } + + use_preprocessor = item["use_preprocessor"] if "use_preprocessor" in item else True + + for img_path in tqdm(cond_imgs, desc=f"Preprocessing images ({c})"): + frame_no = int(Path(img_path).stem) + if frame_no < duration: + if frame_no not in controlnet_image_map: + controlnet_image_map[frame_no] = {} + controlnet_image_map[frame_no][c] = get_preprocessed_img( c, get_resized_image2(img_path, 512) , use_preprocessor, device_str, preprocessor_map) + processed = True + else: + logger.info(f"invalid controlnet type for {'sdxl' if is_sdxl else 'sd15'} : {c}") + + + if save_detectmap and processed: + det_dir = out_dir.joinpath(f"{0:02d}_detectmap/{c}") + det_dir.mkdir(parents=True, exist_ok=True) + for frame_no in tqdm(controlnet_image_map, desc=f"Saving Preprocessed images ({c})"): + save_path = det_dir.joinpath(f"{frame_no:08d}.png") + if c in controlnet_image_map[frame_no]: + controlnet_image_map[frame_no][c].save(save_path) + + clear_controlnet_preprocessor(c) + + clear_controlnet_preprocessor() + + controlnet_ref_map = None + + if "controlnet_ref" in controlnet_map: + r = controlnet_map["controlnet_ref"] + if r["enable"] == True: + org_name = data_dir.joinpath( r["ref_image"]).stem +# ref_image = get_resized_image( data_dir.joinpath( r["ref_image"] ) , width, height) + ref_image = get_resized_image2( data_dir.joinpath( r["ref_image"] ) , 512) + + if ref_image is not None: + controlnet_ref_map = { + "ref_image" : ref_image, + "style_fidelity" : r["style_fidelity"], + "attention_auto_machine_weight" : r["attention_auto_machine_weight"], + "gn_auto_machine_weight" : r["gn_auto_machine_weight"], + "reference_attn" : r["reference_attn"], + "reference_adain" : r["reference_adain"], + "scale_pattern" : r["scale_pattern"] + } + + if save_detectmap: + det_dir = out_dir.joinpath(f"{0:02d}_detectmap/controlnet_ref") + det_dir.mkdir(parents=True, exist_ok=True) + save_path = det_dir.joinpath(f"{org_name}.png") + ref_image.save(save_path) + + controlnet_no_shrink = ["controlnet_tile","animatediff_controlnet","controlnet_canny","controlnet_normalbae","controlnet_depth","controlnet_lineart","controlnet_lineart_anime","controlnet_scribble","controlnet_seg","controlnet_softedge","controlnet_mlsd"] + if "no_shrink_list" in controlnet_map: + controlnet_no_shrink = controlnet_map["no_shrink_list"] + + return controlnet_image_map, controlnet_type_map, controlnet_ref_map, controlnet_no_shrink + + +def ip_adapter_preprocess( + ip_adapter_config_map: Dict[str, Any] = None, + width: int = 512, + height: int = 512, + duration: int = 16, + out_dir: PathLike = ..., + is_sdxl: bool = False, + ): + + ip_adapter_map={} + + processed = False + + if ip_adapter_config_map: + if ip_adapter_config_map["enable"] == True: + resized_to_square = ip_adapter_config_map["resized_to_square"] if "resized_to_square" in ip_adapter_config_map else False + image_dir = data_dir.joinpath( ip_adapter_config_map["input_image_dir"] ) + imgs = sorted(chain.from_iterable([glob.glob(os.path.join(image_dir, f"[0-9]*{ext}")) for ext in IMG_EXTENSIONS])) + if len(imgs) > 0: + prepare_ip_adapter_sdxl() if is_sdxl else prepare_ip_adapter() + ip_adapter_map["images"] = {} + for img_path in tqdm(imgs, desc=f"Preprocessing images (ip_adapter)"): + frame_no = int(Path(img_path).stem) + if frame_no < duration: + if resized_to_square: + ip_adapter_map["images"][frame_no] = get_resized_image(img_path, 256, 256) + else: + ip_adapter_map["images"][frame_no] = get_resized_image2(img_path, 256) + processed = True + + if processed: + ip_adapter_config_map["prompt_fixed_ratio"] = max(min(1.0, ip_adapter_config_map["prompt_fixed_ratio"]),0) + + prompt_fixed_ratio = ip_adapter_config_map["prompt_fixed_ratio"] + prompt_map = ip_adapter_map["images"] + prompt_map = dict(sorted(prompt_map.items())) + key_list = list(prompt_map.keys()) + for k0,k1 in zip(key_list,key_list[1:]+[duration]): + k05 = k0 + round((k1-k0) * prompt_fixed_ratio) + if k05 == k1: + k05 -= 1 + if k05 != k0: + prompt_map[k05] = prompt_map[k0] + ip_adapter_map["images"] = prompt_map + + if (ip_adapter_config_map["save_input_image"] == True) and processed: + det_dir = out_dir.joinpath(f"{0:02d}_ip_adapter/") + det_dir.mkdir(parents=True, exist_ok=True) + for frame_no in tqdm(ip_adapter_map["images"], desc=f"Saving Preprocessed images (ip_adapter)"): + save_path = det_dir.joinpath(f"{frame_no:08d}.png") + ip_adapter_map["images"][frame_no].save(save_path) + + return ip_adapter_map if processed else None + +def prompt_preprocess( + prompt_config_map: Dict[str, Any], + head_prompt: str, + tail_prompt: str, + prompt_fixed_ratio: float, + video_length: int, +): + prompt_map = {} + for k in prompt_config_map.keys(): + if int(k) < video_length: + pr = prompt_config_map[k] + if head_prompt: + pr = head_prompt + "," + pr + if tail_prompt: + pr = pr + "," + tail_prompt + + prompt_map[int(k)]=pr + + prompt_map = dict(sorted(prompt_map.items())) + key_list = list(prompt_map.keys()) + for k0,k1 in zip(key_list,key_list[1:]+[video_length]): + k05 = k0 + round((k1-k0) * prompt_fixed_ratio) + if k05 == k1: + k05 -= 1 + if k05 != k0: + prompt_map[k05] = prompt_map[k0] + + return prompt_map + + +def region_preprocess( + model_config: ModelConfig = ..., + width: int = 512, + height: int = 512, + duration: int = 16, + out_dir: PathLike = ..., + is_init_img_exist: bool = False, + is_sdxl:bool = False, + ): + + is_bg_init_img = False + if is_init_img_exist: + if model_config.region_map: + if "background" in model_config.region_map: + is_bg_init_img = model_config.region_map["background"]["is_init_img"] + + + region_condi_list=[] + region2index={} + + condi_index = 0 + + prev_ip_map = None + + if not is_bg_init_img: + ip_map = ip_adapter_preprocess( + model_config.ip_adapter_map, + width, + height, + duration, + out_dir, + is_sdxl + ) + + if ip_map: + prev_ip_map = ip_map + + condition_map = { + "prompt_map": prompt_preprocess( + model_config.prompt_map, + model_config.head_prompt, + model_config.tail_prompt, + model_config.prompt_fixed_ratio, + duration + ), + "ip_adapter_map": ip_map + } + + region_condi_list.append( condition_map ) + + bg_src = condi_index + condi_index += 1 + else: + bg_src = -1 + + region_list=[ + { + "mask_images": None, + "src" : bg_src, + "crop_generation_rate" : 0 + } + ] + region2index["background"]=bg_src + + if model_config.region_map: + for r in model_config.region_map: + if r == "background": + continue + if model_config.region_map[r]["enable"] != True: + continue + region_dir = out_dir.joinpath(f"region_{int(r):05d}/") + region_dir.mkdir(parents=True, exist_ok=True) + + mask_map = mask_preprocess( + model_config.region_map[r], + width, + height, + duration, + region_dir + ) + + if not mask_map: + continue + + if model_config.region_map[r]["is_init_img"] == False: + ip_map = ip_adapter_preprocess( + model_config.region_map[r]["condition"]["ip_adapter_map"], + width, + height, + duration, + region_dir, + is_sdxl + ) + + if ip_map: + prev_ip_map = ip_map + + condition_map={ + "prompt_map": prompt_preprocess( + model_config.region_map[r]["condition"]["prompt_map"], + model_config.region_map[r]["condition"]["head_prompt"], + model_config.region_map[r]["condition"]["tail_prompt"], + model_config.region_map[r]["condition"]["prompt_fixed_ratio"], + duration + ), + "ip_adapter_map": ip_map + } + + region_condi_list.append( condition_map ) + + src = condi_index + condi_index += 1 + else: + if is_init_img_exist == False: + logger.warn("'is_init_img' : true / BUT init_img is not exist -> ignore region") + continue + src = -1 + + region_list.append( + { + "mask_images": mask_map, + "src" : src, + "crop_generation_rate" : model_config.region_map[r]["crop_generation_rate"] if "crop_generation_rate" in model_config.region_map[r] else 0 + } + ) + region2index[r]=src + + ip_adapter_config_map = None + + if prev_ip_map is not None: + ip_adapter_config_map={} + ip_adapter_config_map["scale"] = model_config.ip_adapter_map["scale"] + ip_adapter_config_map["is_plus"] = model_config.ip_adapter_map["is_plus"] + ip_adapter_config_map["is_plus_face"] = model_config.ip_adapter_map["is_plus_face"] if "is_plus_face" in model_config.ip_adapter_map else False + ip_adapter_config_map["is_light"] = model_config.ip_adapter_map["is_light"] if "is_light" in model_config.ip_adapter_map else False + ip_adapter_config_map["is_full_face"] = model_config.ip_adapter_map["is_full_face"] if "is_full_face" in model_config.ip_adapter_map else False + for c in region_condi_list: + if c["ip_adapter_map"] == None: + logger.info(f"fill map") + c["ip_adapter_map"] = prev_ip_map + + + + + #for c in region_condi_list: + # logger.info(f"{c['prompt_map']=}") + + + if not region_condi_list: + raise ValueError("erro! There is not a single valid region") + + return region_condi_list, region_list, ip_adapter_config_map, region2index + +def img2img_preprocess( + img2img_config_map: Dict[str, Any] = None, + width: int = 512, + height: int = 512, + duration: int = 16, + out_dir: PathLike = ..., + ): + + img2img_map={} + + processed = False + + if img2img_config_map: + if img2img_config_map["enable"] == True: + image_dir = data_dir.joinpath( img2img_config_map["init_img_dir"] ) + imgs = sorted(glob.glob( os.path.join(image_dir, "[0-9]*.png"), recursive=False)) + if len(imgs) > 0: + img2img_map["images"] = {} + img2img_map["denoising_strength"] = img2img_config_map["denoising_strength"] + for img_path in tqdm(imgs, desc=f"Preprocessing images (img2img)"): + frame_no = int(Path(img_path).stem) + if frame_no < duration: + img2img_map["images"][frame_no] = get_resized_image(img_path, width, height) + processed = True + + if (img2img_config_map["save_init_image"] == True) and processed: + det_dir = out_dir.joinpath(f"{0:02d}_img2img_init_img/") + det_dir.mkdir(parents=True, exist_ok=True) + for frame_no in tqdm(img2img_map["images"], desc=f"Saving Preprocessed images (img2img)"): + save_path = det_dir.joinpath(f"{frame_no:08d}.png") + img2img_map["images"][frame_no].save(save_path) + + return img2img_map if processed else None + +def mask_preprocess( + region_config_map: Dict[str, Any] = None, + width: int = 512, + height: int = 512, + duration: int = 16, + out_dir: PathLike = ..., + ): + + mask_map={} + + processed = False + size = None + mode = None + + if region_config_map: + image_dir = data_dir.joinpath( region_config_map["mask_dir"] ) + imgs = sorted(glob.glob( os.path.join(image_dir, "[0-9]*.png"), recursive=False)) + if len(imgs) > 0: + for img_path in tqdm(imgs, desc=f"Preprocessing images (mask)"): + frame_no = int(Path(img_path).stem) + if frame_no < duration: + mask_map[frame_no] = get_resized_image(img_path, width, height) + if size is None: + size = mask_map[frame_no].size + mode = mask_map[frame_no].mode + + processed = True + + if processed: + if 0 in mask_map: + prev_img = mask_map[0] + else: + prev_img = Image.new(mode, size, color=0) + + for i in range(duration): + if i in mask_map: + prev_img = mask_map[i] + else: + mask_map[i] = prev_img + + if (region_config_map["save_mask"] == True) and processed: + det_dir = out_dir.joinpath(f"mask/") + det_dir.mkdir(parents=True, exist_ok=True) + for frame_no in tqdm(mask_map, desc=f"Saving Preprocessed images (mask)"): + save_path = det_dir.joinpath(f"{frame_no:08d}.png") + mask_map[frame_no].save(save_path) + + return mask_map if processed else None + +def wild_card_conversion(model_config: ModelConfig = ...,): + from animatediff.utils.wild_card import replace_wild_card + + wild_card_dir = get_dir("wildcards") + for k in model_config.prompt_map.keys(): + model_config.prompt_map[k] = replace_wild_card(model_config.prompt_map[k], wild_card_dir) + + if model_config.head_prompt: + model_config.head_prompt = replace_wild_card(model_config.head_prompt, wild_card_dir) + if model_config.tail_prompt: + model_config.tail_prompt = replace_wild_card(model_config.tail_prompt, wild_card_dir) + + model_config.prompt_fixed_ratio = max(min(1.0, model_config.prompt_fixed_ratio),0) + + if model_config.region_map: + for r in model_config.region_map: + if r == "background": + continue + + if "condition" in model_config.region_map[r]: + c = model_config.region_map[r]["condition"] + for k in c["prompt_map"].keys(): + c["prompt_map"][k] = replace_wild_card(c["prompt_map"][k], wild_card_dir) + + if "head_prompt" in c: + c["head_prompt"] = replace_wild_card(c["head_prompt"], wild_card_dir) + if "tail_prompt" in c: + c["tail_prompt"] = replace_wild_card(c["tail_prompt"], wild_card_dir) + if "prompt_fixed_ratio" in c: + c["prompt_fixed_ratio"] = max(min(1.0, c["prompt_fixed_ratio"]),0) + +def save_output( + pipeline_output, + frame_dir:str, + out_file:str, + output_map : Dict[str,Any] = {}, + no_frames : bool = False, + save_frames=save_frames, + save_video=None, +): + + output_format = "gif" + output_fps = 8 + if output_map: + output_format = output_map["format"] if "format" in output_map else output_format + output_fps = output_map["fps"] if "fps" in output_map else output_fps + if output_format == "mp4": + output_format = "h264" + + if output_format == "gif": + out_file = out_file.with_suffix(".gif") + if no_frames is not True: + if save_frames: + save_frames(pipeline_output,frame_dir) + + # generate the output filename and save the video + if save_video: + save_video(pipeline_output, out_file, output_fps) + else: + pipeline_output[0].save( + fp=out_file, format="GIF", append_images=pipeline_output[1:], save_all=True, duration=(1 / output_fps * 1000), loop=0 + ) + + else: + + if save_frames: + save_frames(pipeline_output,frame_dir) + + from animatediff.rife.ffmpeg import (FfmpegEncoder, VideoCodec, + codec_extn) + + out_file = out_file.with_suffix( f".{codec_extn(output_format)}" ) + + logger.info("Creating ffmpeg encoder...") + encoder = FfmpegEncoder( + frames_dir=frame_dir, + out_file=out_file, + codec=output_format, + in_fps=output_fps, + out_fps=output_fps, + lossless=False, + param= output_map["encode_param"] if "encode_param" in output_map else {} + ) + logger.info("Encoding interpolated frames with ffmpeg...") + result = encoder.encode() + logger.debug(f"ffmpeg result: {result}") + + + +def run_inference( + pipeline: DiffusionPipeline, + n_prompt: str = ..., + seed: int = -1, + steps: int = 25, + guidance_scale: float = 7.5, + unet_batch_size: int = 1, + width: int = 512, + height: int = 512, + duration: int = 16, + idx: int = 0, + out_dir: PathLike = ..., + context_frames: int = -1, + context_stride: int = 3, + context_overlap: int = 4, + context_schedule: str = "uniform", + clip_skip: int = 1, + controlnet_map: Dict[str, Any] = None, + controlnet_image_map: Dict[str,Any] = None, + controlnet_type_map: Dict[str,Any] = None, + controlnet_ref_map: Dict[str,Any] = None, + controlnet_no_shrink:List[str]=None, + no_frames :bool = False, + img2img_map: Dict[str,Any] = None, + ip_adapter_config_map: Dict[str,Any] = None, + region_list: List[Any] = None, + region_condi_list: List[Any] = None, + output_map: Dict[str,Any] = None, + is_single_prompt_mode: bool = False, + is_sdxl:bool=False, + apply_lcm_lora:bool=False, + gradual_latent_map: Dict[str,Any] = None, +): + out_dir = Path(out_dir) # ensure out_dir is a Path + + # Trim and clean up the prompt for filename use + prompt_map = region_condi_list[0]["prompt_map"] + prompt_tags = [re_clean_prompt.sub("", tag).strip().replace(" ", "-") for tag in prompt_map[list(prompt_map.keys())[0]].split(",")] + prompt_str = "_".join((prompt_tags[:6]))[:50] + frame_dir = out_dir.joinpath(f"{idx:02d}-{seed}") + out_file = out_dir.joinpath(f"{idx:02d}_{seed}_{prompt_str}") + + def preview_callback(i: int, video: torch.Tensor, save_fn: Callable[[torch.Tensor], None], out_file: str) -> None: + save_fn(video, out_file=Path(f"{out_file}_preview@{i}")) + + save_fn = partial( + save_output, + frame_dir=frame_dir, + output_map=output_map, + no_frames=no_frames, + save_frames=partial(save_frames, show_progress=False), + save_video=save_video + ) + callback = partial(preview_callback, save_fn=save_fn, out_file=out_file) + + seed_everything(seed) + + logger.info(f"{len( region_condi_list )=}") + logger.info(f"{len( region_list )=}") + + pipeline_output = pipeline( + negative_prompt=n_prompt, + num_inference_steps=steps, + guidance_scale=guidance_scale, + unet_batch_size=unet_batch_size, + width=width, + height=height, + video_length=duration, + return_dict=False, + context_frames=context_frames, + context_stride=context_stride + 1, + context_overlap=context_overlap, + context_schedule=context_schedule, + clip_skip=clip_skip, + controlnet_type_map=controlnet_type_map, + controlnet_image_map=controlnet_image_map, + controlnet_ref_map=controlnet_ref_map, + controlnet_no_shrink=controlnet_no_shrink, + controlnet_max_samples_on_vram=controlnet_map["max_samples_on_vram"] if "max_samples_on_vram" in controlnet_map else 999, + controlnet_max_models_on_vram=controlnet_map["max_models_on_vram"] if "max_models_on_vram" in controlnet_map else 99, + controlnet_is_loop = controlnet_map["is_loop"] if "is_loop" in controlnet_map else True, + img2img_map=img2img_map, + ip_adapter_config_map=ip_adapter_config_map, + region_list=region_list, + region_condi_list=region_condi_list, + interpolation_factor=1, + is_single_prompt_mode=is_single_prompt_mode, + apply_lcm_lora=apply_lcm_lora, + gradual_latent_map=gradual_latent_map, + callback=callback, + callback_steps=output_map.get("preview_steps"), + ) + logger.info("Generation complete, saving...") + + save_fn(pipeline_output, out_file=out_file) + + logger.info(f"Saved sample to {out_file}") + return pipeline_output + + +def run_upscale( + org_imgs: List[str], + pipeline: DiffusionPipeline, + prompt_map: Dict[int, str] = None, + n_prompt: str = ..., + seed: int = -1, + steps: int = 25, + strength: float = 0.5, + guidance_scale: float = 7.5, + clip_skip: int = 1, + us_width: int = 512, + us_height: int = 512, + idx: int = 0, + out_dir: PathLike = ..., + upscale_config:Dict[str, Any]=None, + use_controlnet_ref: bool = False, + use_controlnet_tile: bool = False, + use_controlnet_line_anime: bool = False, + use_controlnet_ip2p: bool = False, + no_frames:bool = False, + output_map: Dict[str,Any] = None, +): + from animatediff.utils.lpw_stable_diffusion import lpw_encode_prompt + + pipeline.set_progress_bar_config(disable=True) + + images = get_resized_images(org_imgs, us_width, us_height) + + steps = steps if "steps" not in upscale_config else upscale_config["steps"] + scheduler = scheduler if "scheduler" not in upscale_config else upscale_config["scheduler"] + guidance_scale = guidance_scale if "guidance_scale" not in upscale_config else upscale_config["guidance_scale"] + clip_skip = clip_skip if "clip_skip" not in upscale_config else upscale_config["clip_skip"] + strength = strength if "strength" not in upscale_config else upscale_config["strength"] + + controlnet_conditioning_scale = [] + guess_mode = [] + control_guidance_start = [] + control_guidance_end = [] + + # for controlnet tile + if use_controlnet_tile: + controlnet_conditioning_scale.append(upscale_config["controlnet_tile"]["controlnet_conditioning_scale"]) + guess_mode.append(upscale_config["controlnet_tile"]["guess_mode"]) + control_guidance_start.append(upscale_config["controlnet_tile"]["control_guidance_start"]) + control_guidance_end.append(upscale_config["controlnet_tile"]["control_guidance_end"]) + + # for controlnet line_anime + if use_controlnet_line_anime: + controlnet_conditioning_scale.append(upscale_config["controlnet_line_anime"]["controlnet_conditioning_scale"]) + guess_mode.append(upscale_config["controlnet_line_anime"]["guess_mode"]) + control_guidance_start.append(upscale_config["controlnet_line_anime"]["control_guidance_start"]) + control_guidance_end.append(upscale_config["controlnet_line_anime"]["control_guidance_end"]) + + # for controlnet ip2p + if use_controlnet_ip2p: + controlnet_conditioning_scale.append(upscale_config["controlnet_ip2p"]["controlnet_conditioning_scale"]) + guess_mode.append(upscale_config["controlnet_ip2p"]["guess_mode"]) + control_guidance_start.append(upscale_config["controlnet_ip2p"]["control_guidance_start"]) + control_guidance_end.append(upscale_config["controlnet_ip2p"]["control_guidance_end"]) + + # for controlnet ref + ref_image = None + if use_controlnet_ref: + if not upscale_config["controlnet_ref"]["use_frame_as_ref_image"] and not upscale_config["controlnet_ref"]["use_1st_frame_as_ref_image"]: + ref_image = get_resized_images([ data_dir.joinpath( upscale_config["controlnet_ref"]["ref_image"] ) ], us_width, us_height)[0] + + + generator = torch.manual_seed(seed) + + seed_everything(seed) + + prompt_embeds_map = {} + prompt_map = dict(sorted(prompt_map.items())) + negative = None + + do_classifier_free_guidance=guidance_scale > 1.0 + + prompt_list = [prompt_map[key_frame] for key_frame in prompt_map.keys()] + + prompt_embeds,neg_embeds = lpw_encode_prompt( + pipe=pipeline, + prompt=prompt_list, + do_classifier_free_guidance=do_classifier_free_guidance, + negative_prompt=n_prompt, + ) + + if do_classifier_free_guidance: + negative = neg_embeds.chunk(neg_embeds.shape[0], 0) + positive = prompt_embeds.chunk(prompt_embeds.shape[0], 0) + else: + negative = [None] + positive = prompt_embeds.chunk(prompt_embeds.shape[0], 0) + + for i, key_frame in enumerate(prompt_map): + prompt_embeds_map[key_frame] = positive[i] + + key_first =list(prompt_map.keys())[0] + key_last =list(prompt_map.keys())[-1] + + def get_current_prompt_embeds( + center_frame: int = 0, + video_length : int = 0 + ): + + key_prev = key_last + key_next = key_first + + for p in prompt_map.keys(): + if p > center_frame: + key_next = p + break + key_prev = p + + dist_prev = center_frame - key_prev + if dist_prev < 0: + dist_prev += video_length + dist_next = key_next - center_frame + if dist_next < 0: + dist_next += video_length + + if key_prev == key_next or dist_prev + dist_next == 0: + return prompt_embeds_map[key_prev] + + rate = dist_prev / (dist_prev + dist_next) + + return get_tensor_interpolation_method()(prompt_embeds_map[key_prev],prompt_embeds_map[key_next], rate) + + + line_anime_processor = LineartAnimeDetector.from_pretrained("lllyasviel/Annotators") + + + out_images=[] + + logger.info(f"{use_controlnet_tile=}") + logger.info(f"{use_controlnet_line_anime=}") + logger.info(f"{use_controlnet_ip2p=}") + + logger.info(f"{controlnet_conditioning_scale=}") + logger.info(f"{guess_mode=}") + logger.info(f"{control_guidance_start=}") + logger.info(f"{control_guidance_end=}") + + + for i, org_image in enumerate(tqdm(images, desc=f"Upscaling...")): + + cur_positive = get_current_prompt_embeds(i, len(images)) + +# logger.info(f"w {condition_image.size[0]}") +# logger.info(f"h {condition_image.size[1]}") + condition_image = [] + + if use_controlnet_tile: + condition_image.append( org_image ) + if use_controlnet_line_anime: + condition_image.append( line_anime_processor(org_image) ) + if use_controlnet_ip2p: + condition_image.append( org_image ) + + if not use_controlnet_ref: + out_image = pipeline( + prompt_embeds=cur_positive, + negative_prompt_embeds=negative[0], + image=org_image, + control_image=condition_image, + width=org_image.size[0], + height=org_image.size[1], + strength=strength, + num_inference_steps=steps, + guidance_scale=guidance_scale, + generator=generator, + + controlnet_conditioning_scale= controlnet_conditioning_scale if len(controlnet_conditioning_scale) > 1 else controlnet_conditioning_scale[0], + guess_mode= guess_mode[0], + control_guidance_start= control_guidance_start if len(control_guidance_start) > 1 else control_guidance_start[0], + control_guidance_end= control_guidance_end if len(control_guidance_end) > 1 else control_guidance_end[0], + + ).images[0] + else: + + if upscale_config["controlnet_ref"]["use_1st_frame_as_ref_image"]: + if i == 0: + ref_image = org_image + elif upscale_config["controlnet_ref"]["use_frame_as_ref_image"]: + ref_image = org_image + + out_image = pipeline( + prompt_embeds=cur_positive, + negative_prompt_embeds=negative[0], + image=org_image, + control_image=condition_image, + width=org_image.size[0], + height=org_image.size[1], + strength=strength, + num_inference_steps=steps, + guidance_scale=guidance_scale, + generator=generator, + + controlnet_conditioning_scale= controlnet_conditioning_scale if len(controlnet_conditioning_scale) > 1 else controlnet_conditioning_scale[0], + guess_mode= guess_mode[0], + # control_guidance_start= control_guidance_start, + # control_guidance_end= control_guidance_end, + + ### for controlnet ref + ref_image=ref_image, + attention_auto_machine_weight = upscale_config["controlnet_ref"]["attention_auto_machine_weight"], + gn_auto_machine_weight = upscale_config["controlnet_ref"]["gn_auto_machine_weight"], + style_fidelity = upscale_config["controlnet_ref"]["style_fidelity"], + reference_attn= upscale_config["controlnet_ref"]["reference_attn"], + reference_adain= upscale_config["controlnet_ref"]["reference_adain"], + + ).images[0] + + out_images.append(out_image) + + # Trim and clean up the prompt for filename use + prompt_tags = [re_clean_prompt.sub("", tag).strip().replace(" ", "-") for tag in prompt_map[list(prompt_map.keys())[0]].split(",")] + prompt_str = "_".join((prompt_tags[:6]))[:50] + + # generate the output filename and save the video + out_file = out_dir.joinpath(f"{idx:02d}_{seed}_{prompt_str}") + + frame_dir = out_dir.joinpath(f"{idx:02d}-{seed}-upscaled") + + save_output( out_images, frame_dir, out_file, output_map, no_frames, save_imgs, None ) + + logger.info(f"Saved sample to {out_file}") + + return out_images diff --git a/src/animatediff/ip_adapter/__init__.py b/src/animatediff/ip_adapter/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b364c9f385d2c528f743b4ce69ef33ec179250a0 --- /dev/null +++ b/src/animatediff/ip_adapter/__init__.py @@ -0,0 +1,10 @@ +from .ip_adapter import (IPAdapter, IPAdapterFull, IPAdapterPlus, + IPAdapterPlusXL, IPAdapterXL) + +__all__ = [ + "IPAdapter", + "IPAdapterPlus", + "IPAdapterPlusXL", + "IPAdapterXL", + "IPAdapterFull", +] diff --git a/src/animatediff/ip_adapter/attention_processor.py b/src/animatediff/ip_adapter/attention_processor.py new file mode 100644 index 0000000000000000000000000000000000000000..4754be00e0e7cde21ff07fdc10199196ccf4812a --- /dev/null +++ b/src/animatediff/ip_adapter/attention_processor.py @@ -0,0 +1,390 @@ +# modified from https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py +import torch +import torch.nn as nn +import torch.nn.functional as F + + +class AttnProcessor(nn.Module): + r""" + Default processor for performing attention-related computations. + """ + def __init__( + self, + hidden_size=None, + cross_attention_dim=None, + ): + super().__init__() + + def __call__( + self, + attn, + hidden_states, + encoder_hidden_states=None, + attention_mask=None, + temb=None, + ): + residual = hidden_states + + if attn.spatial_norm is not None: + hidden_states = attn.spatial_norm(hidden_states, temb) + + input_ndim = hidden_states.ndim + + if input_ndim == 4: + batch_size, channel, height, width = hidden_states.shape + hidden_states = hidden_states.view(batch_size, channel, height * width).transpose(1, 2) + + batch_size, sequence_length, _ = ( + hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape + ) + attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size) + + if attn.group_norm is not None: + hidden_states = attn.group_norm(hidden_states.transpose(1, 2)).transpose(1, 2) + + query = attn.to_q(hidden_states) + + if encoder_hidden_states is None: + encoder_hidden_states = hidden_states + elif attn.norm_cross: + encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states) + + key = attn.to_k(encoder_hidden_states) + value = attn.to_v(encoder_hidden_states) + + query = attn.head_to_batch_dim(query) + key = attn.head_to_batch_dim(key) + value = attn.head_to_batch_dim(value) + + attention_probs = attn.get_attention_scores(query, key, attention_mask) + hidden_states = torch.bmm(attention_probs, value) + hidden_states = attn.batch_to_head_dim(hidden_states) + + # linear proj + hidden_states = attn.to_out[0](hidden_states) + # dropout + hidden_states = attn.to_out[1](hidden_states) + + if input_ndim == 4: + hidden_states = hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width) + + if attn.residual_connection: + hidden_states = hidden_states + residual + + hidden_states = hidden_states / attn.rescale_output_factor + + return hidden_states + + +class IPAttnProcessor(nn.Module): + r""" + Attention processor for IP-Adapater. + Args: + hidden_size (`int`): + The hidden size of the attention layer. + cross_attention_dim (`int`): + The number of channels in the `encoder_hidden_states`. + text_context_len (`int`, defaults to 77): + The context length of the text features. + scale (`float`, defaults to 1.0): + the weight scale of image prompt. + """ + + def __init__(self, hidden_size, cross_attention_dim=None, text_context_len=77, scale=1.0): + super().__init__() + + self.hidden_size = hidden_size + self.cross_attention_dim = cross_attention_dim + self.text_context_len = text_context_len + self.scale = scale + + self.to_k_ip = nn.Linear(cross_attention_dim or hidden_size, hidden_size, bias=False) + self.to_v_ip = nn.Linear(cross_attention_dim or hidden_size, hidden_size, bias=False) + + def __call__( + self, + attn, + hidden_states, + encoder_hidden_states=None, + attention_mask=None, + temb=None, + ): + residual = hidden_states + + if attn.spatial_norm is not None: + hidden_states = attn.spatial_norm(hidden_states, temb) + + input_ndim = hidden_states.ndim + + if input_ndim == 4: + batch_size, channel, height, width = hidden_states.shape + hidden_states = hidden_states.view(batch_size, channel, height * width).transpose(1, 2) + + batch_size, sequence_length, _ = ( + hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape + ) + attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size) + + if attn.group_norm is not None: + hidden_states = attn.group_norm(hidden_states.transpose(1, 2)).transpose(1, 2) + + query = attn.to_q(hidden_states) + + if encoder_hidden_states is None: + encoder_hidden_states = hidden_states + elif attn.norm_cross: + encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states) + + # split hidden states + encoder_hidden_states, ip_hidden_states = encoder_hidden_states[:, :self.text_context_len, :], encoder_hidden_states[:, self.text_context_len:, :] + + key = attn.to_k(encoder_hidden_states) + value = attn.to_v(encoder_hidden_states) + + query = attn.head_to_batch_dim(query) + key = attn.head_to_batch_dim(key) + value = attn.head_to_batch_dim(value) + + attention_probs = attn.get_attention_scores(query, key, attention_mask) + hidden_states = torch.bmm(attention_probs, value) + hidden_states = attn.batch_to_head_dim(hidden_states) + + # for ip-adapter + ip_key = self.to_k_ip(ip_hidden_states) + ip_value = self.to_v_ip(ip_hidden_states) + + ip_key = attn.head_to_batch_dim(ip_key) + ip_value = attn.head_to_batch_dim(ip_value) + + ip_attention_probs = attn.get_attention_scores(query, ip_key, None) + ip_hidden_states = torch.bmm(ip_attention_probs, ip_value) + ip_hidden_states = attn.batch_to_head_dim(ip_hidden_states) + + hidden_states = hidden_states + self.scale * ip_hidden_states + + # linear proj + hidden_states = attn.to_out[0](hidden_states) + # dropout + hidden_states = attn.to_out[1](hidden_states) + + if input_ndim == 4: + hidden_states = hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width) + + if attn.residual_connection: + hidden_states = hidden_states + residual + + hidden_states = hidden_states / attn.rescale_output_factor + + return hidden_states + + +class AttnProcessor2_0(torch.nn.Module): + r""" + Processor for implementing scaled dot-product attention (enabled by default if you're using PyTorch 2.0). + """ + def __init__( + self, + hidden_size=None, + cross_attention_dim=None, + ): + super().__init__() + if not hasattr(F, "scaled_dot_product_attention"): + raise ImportError("AttnProcessor2_0 requires PyTorch 2.0, to use it, please upgrade PyTorch to 2.0.") + + def __call__( + self, + attn, + hidden_states, + encoder_hidden_states=None, + attention_mask=None, + temb=None, + ): + residual = hidden_states + + if attn.spatial_norm is not None: + hidden_states = attn.spatial_norm(hidden_states, temb) + + input_ndim = hidden_states.ndim + + if input_ndim == 4: + batch_size, channel, height, width = hidden_states.shape + hidden_states = hidden_states.view(batch_size, channel, height * width).transpose(1, 2) + + batch_size, sequence_length, _ = ( + hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape + ) + + if attention_mask is not None: + attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size) + # scaled_dot_product_attention expects attention_mask shape to be + # (batch, heads, source_length, target_length) + attention_mask = attention_mask.view(batch_size, attn.heads, -1, attention_mask.shape[-1]) + + if attn.group_norm is not None: + hidden_states = attn.group_norm(hidden_states.transpose(1, 2)).transpose(1, 2) + + query = attn.to_q(hidden_states) + + if encoder_hidden_states is None: + encoder_hidden_states = hidden_states + elif attn.norm_cross: + encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states) + + key = attn.to_k(encoder_hidden_states) + value = attn.to_v(encoder_hidden_states) + + inner_dim = key.shape[-1] + head_dim = inner_dim // attn.heads + + query = query.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + + key = key.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + value = value.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + + # the output of sdp = (batch, num_heads, seq_len, head_dim) + # TODO: add support for attn.scale when we move to Torch 2.1 + hidden_states = F.scaled_dot_product_attention( + query, key, value, attn_mask=attention_mask, dropout_p=0.0, is_causal=False + ) + + hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, attn.heads * head_dim) + hidden_states = hidden_states.to(query.dtype) + + # linear proj + hidden_states = attn.to_out[0](hidden_states) + # dropout + hidden_states = attn.to_out[1](hidden_states) + + if input_ndim == 4: + hidden_states = hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width) + + if attn.residual_connection: + hidden_states = hidden_states + residual + + hidden_states = hidden_states / attn.rescale_output_factor + + return hidden_states + + +class IPAttnProcessor2_0(torch.nn.Module): + r""" + Attention processor for IP-Adapater for PyTorch 2.0. + Args: + hidden_size (`int`): + The hidden size of the attention layer. + cross_attention_dim (`int`): + The number of channels in the `encoder_hidden_states`. + text_context_len (`int`, defaults to 77): + The context length of the text features. + scale (`float`, defaults to 1.0): + the weight scale of image prompt. + """ + + def __init__(self, hidden_size, cross_attention_dim=None, text_context_len=77, scale=1.0): + super().__init__() + + if not hasattr(F, "scaled_dot_product_attention"): + raise ImportError("AttnProcessor2_0 requires PyTorch 2.0, to use it, please upgrade PyTorch to 2.0.") + + self.hidden_size = hidden_size + self.cross_attention_dim = cross_attention_dim + self.text_context_len = text_context_len + self.scale = scale + + self.to_k_ip = nn.Linear(cross_attention_dim or hidden_size, hidden_size, bias=False) + self.to_v_ip = nn.Linear(cross_attention_dim or hidden_size, hidden_size, bias=False) + + def __call__( + self, + attn, + hidden_states, + encoder_hidden_states=None, + attention_mask=None, + temb=None, + ): + residual = hidden_states + + if attn.spatial_norm is not None: + hidden_states = attn.spatial_norm(hidden_states, temb) + + input_ndim = hidden_states.ndim + + if input_ndim == 4: + batch_size, channel, height, width = hidden_states.shape + hidden_states = hidden_states.view(batch_size, channel, height * width).transpose(1, 2) + + batch_size, sequence_length, _ = ( + hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape + ) + + if attention_mask is not None: + attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size) + # scaled_dot_product_attention expects attention_mask shape to be + # (batch, heads, source_length, target_length) + attention_mask = attention_mask.view(batch_size, attn.heads, -1, attention_mask.shape[-1]) + + if attn.group_norm is not None: + hidden_states = attn.group_norm(hidden_states.transpose(1, 2)).transpose(1, 2) + + query = attn.to_q(hidden_states) + + if encoder_hidden_states is None: + encoder_hidden_states = hidden_states + elif attn.norm_cross: + encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states) + + # split hidden states + encoder_hidden_states, ip_hidden_states = encoder_hidden_states[:, :self.text_context_len, :], encoder_hidden_states[:, self.text_context_len:, :] + + key = attn.to_k(encoder_hidden_states) + value = attn.to_v(encoder_hidden_states) + + inner_dim = key.shape[-1] + head_dim = inner_dim // attn.heads + + query = query.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + + key = key.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + value = value.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + + # the output of sdp = (batch, num_heads, seq_len, head_dim) + # TODO: add support for attn.scale when we move to Torch 2.1 + hidden_states = F.scaled_dot_product_attention( + query, key, value, attn_mask=attention_mask, dropout_p=0.0, is_causal=False + ) + + hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, attn.heads * head_dim) + hidden_states = hidden_states.to(query.dtype) + + # for ip-adapter + ip_key = self.to_k_ip(ip_hidden_states) + ip_value = self.to_v_ip(ip_hidden_states) + + ip_key = ip_key.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + ip_value = ip_value.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + + # the output of sdp = (batch, num_heads, seq_len, head_dim) + # TODO: add support for attn.scale when we move to Torch 2.1 + ip_hidden_states = F.scaled_dot_product_attention( + query, ip_key, ip_value, attn_mask=None, dropout_p=0.0, is_causal=False + ) + + ip_hidden_states = ip_hidden_states.transpose(1, 2).reshape(batch_size, -1, attn.heads * head_dim) + ip_hidden_states = ip_hidden_states.to(query.dtype) + + hidden_states = hidden_states + self.scale * ip_hidden_states + + # linear proj + hidden_states = attn.to_out[0](hidden_states) + # dropout + hidden_states = attn.to_out[1](hidden_states) + + if input_ndim == 4: + hidden_states = hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width) + + if attn.residual_connection: + hidden_states = hidden_states + residual + + hidden_states = hidden_states / attn.rescale_output_factor + + return hidden_states diff --git a/src/animatediff/ip_adapter/ip_adapter.py b/src/animatediff/ip_adapter/ip_adapter.py new file mode 100644 index 0000000000000000000000000000000000000000..c63d578c52fde9b64f471105fa191b3cb9632f6a --- /dev/null +++ b/src/animatediff/ip_adapter/ip_adapter.py @@ -0,0 +1,389 @@ +import os +from typing import List + +import torch +from diffusers import StableDiffusionPipeline +from PIL import Image +from safetensors import safe_open +from transformers import CLIPImageProcessor, CLIPVisionModelWithProjection + +from .utils import is_torch2_available + +if is_torch2_available(): + from .attention_processor import AttnProcessor2_0 as AttnProcessor + from .attention_processor import IPAttnProcessor2_0 as IPAttnProcessor +else: + from .attention_processor import IPAttnProcessor, AttnProcessor + +import logging + +from .resampler import Resampler + +logger = logging.getLogger(__name__) + +class ImageProjModel(torch.nn.Module): + """Projection Model""" + def __init__(self, cross_attention_dim=1024, clip_embeddings_dim=1024, clip_extra_context_tokens=4): + super().__init__() + + self.cross_attention_dim = cross_attention_dim + self.clip_extra_context_tokens = clip_extra_context_tokens + self.proj = torch.nn.Linear(clip_embeddings_dim, self.clip_extra_context_tokens * cross_attention_dim) + self.norm = torch.nn.LayerNorm(cross_attention_dim) + + def forward(self, image_embeds): + embeds = image_embeds + clip_extra_context_tokens = self.proj(embeds).reshape(-1, self.clip_extra_context_tokens, self.cross_attention_dim) + clip_extra_context_tokens = self.norm(clip_extra_context_tokens) + return clip_extra_context_tokens + + +class MLPProjModel(torch.nn.Module): + """SD model with image prompt""" + def __init__(self, cross_attention_dim=1024, clip_embeddings_dim=1024): + super().__init__() + + self.proj = torch.nn.Sequential( + torch.nn.Linear(clip_embeddings_dim, clip_embeddings_dim), + torch.nn.GELU(), + torch.nn.Linear(clip_embeddings_dim, cross_attention_dim), + torch.nn.LayerNorm(cross_attention_dim) + ) + + def forward(self, image_embeds): + clip_extra_context_tokens = self.proj(image_embeds) + return clip_extra_context_tokens + + +class IPAdapter: + + def __init__(self, sd_pipe, image_encoder_path, ip_ckpt, device, num_tokens=4): + + self.device = device + self.image_encoder_path = image_encoder_path + self.ip_ckpt = ip_ckpt + self.num_tokens = num_tokens + + self.pipe = sd_pipe + self.set_ip_adapter() + + # load image encoder + self.image_encoder = CLIPVisionModelWithProjection.from_pretrained(self.image_encoder_path).to(self.device, dtype=torch.float16) + self.clip_image_processor = CLIPImageProcessor() + # image proj model + self.image_proj_model = self.init_proj() + + self.load_ip_adapter() + + def init_proj(self): + image_proj_model = ImageProjModel( + cross_attention_dim=self.pipe.unet.config.cross_attention_dim, + clip_embeddings_dim=self.image_encoder.config.projection_dim, + clip_extra_context_tokens=self.num_tokens, + ).to(self.device, dtype=torch.float16) + return image_proj_model + + def set_ip_adapter(self): + unet = self.pipe.unet + attn_procs = {} + for name in unet.attn_processors.keys(): + cross_attention_dim = None if name.endswith("attn1.processor") else unet.config.cross_attention_dim + if name.startswith("mid_block"): + hidden_size = unet.config.block_out_channels[-1] + elif name.startswith("up_blocks"): + block_id = int(name[len("up_blocks.")]) + hidden_size = list(reversed(unet.config.block_out_channels))[block_id] + elif name.startswith("down_blocks"): + block_id = int(name[len("down_blocks.")]) + hidden_size = unet.config.block_out_channels[block_id] + if cross_attention_dim is None: + attn_procs[name] = AttnProcessor() + else: + attn_procs[name] = IPAttnProcessor(hidden_size=hidden_size, cross_attention_dim=cross_attention_dim, + scale=1.0).to(self.device, dtype=torch.float16) + unet.set_attn_processor(attn_procs) + + + def load_ip_adapter(self): + if os.path.splitext(self.ip_ckpt)[-1] == ".safetensors": + state_dict = {"image_proj": {}, "ip_adapter": {}} + with safe_open(self.ip_ckpt, framework="pt", device="cpu") as f: + for key in f.keys(): + if key.startswith("image_proj."): + state_dict["image_proj"][key.replace("image_proj.", "")] = f.get_tensor(key) + elif key.startswith("ip_adapter."): + state_dict["ip_adapter"][key.replace("ip_adapter.", "")] = f.get_tensor(key) + else: + state_dict = torch.load(self.ip_ckpt, map_location="cpu") + self.image_proj_model.load_state_dict(state_dict["image_proj"]) + ip_layers = torch.nn.ModuleList(self.pipe.unet.attn_processors.values()) + ip_layers.load_state_dict(state_dict["ip_adapter"]) + + @torch.inference_mode() + def get_image_embeds(self, pil_image): + if isinstance(pil_image, Image.Image): + pil_image = [pil_image] + clip_image = self.clip_image_processor(images=pil_image, return_tensors="pt").pixel_values + clip_image_embeds = self.image_encoder(clip_image.to(self.device, dtype=torch.float16)).image_embeds + image_prompt_embeds = self.image_proj_model(clip_image_embeds) + uncond_image_prompt_embeds = self.image_proj_model(torch.zeros_like(clip_image_embeds)) + return image_prompt_embeds, uncond_image_prompt_embeds + + def set_scale(self, scale): + for attn_processor in self.pipe.unet.attn_processors.values(): + if isinstance(attn_processor, IPAttnProcessor): + attn_processor.scale = scale + + def set_text_length(self, text_length): + for attn_processor in self.pipe.unet.attn_processors.values(): + if isinstance(attn_processor, IPAttnProcessor): + attn_processor.text_context_len = text_length + + def unload(self): + unet = self.pipe.unet + attn_procs = {} + for name in unet.attn_processors.keys(): + attn_procs[name] = AttnProcessor() + unet.set_attn_processor(attn_procs) + + def delete_encoder(self): + del self.image_encoder + del self.clip_image_processor + del self.image_proj_model + torch.cuda.empty_cache() + + def generate( + self, + pil_image, + prompt=None, + negative_prompt=None, + scale=1.0, + num_samples=4, + seed=-1, + guidance_scale=7.5, + num_inference_steps=30, + **kwargs, + ): + self.set_scale(scale) + + if isinstance(pil_image, Image.Image): + num_prompts = 1 + else: + num_prompts = len(pil_image) + + if prompt is None: + prompt = "best quality, high quality" + if negative_prompt is None: + negative_prompt = "monochrome, lowres, bad anatomy, worst quality, low quality" + + if not isinstance(prompt, List): + prompt = [prompt] * num_prompts + if not isinstance(negative_prompt, List): + negative_prompt = [negative_prompt] * num_prompts + + image_prompt_embeds, uncond_image_prompt_embeds = self.get_image_embeds(pil_image) + bs_embed, seq_len, _ = image_prompt_embeds.shape + image_prompt_embeds = image_prompt_embeds.repeat(1, num_samples, 1) + image_prompt_embeds = image_prompt_embeds.view(bs_embed * num_samples, seq_len, -1) + uncond_image_prompt_embeds = uncond_image_prompt_embeds.repeat(1, num_samples, 1) + uncond_image_prompt_embeds = uncond_image_prompt_embeds.view(bs_embed * num_samples, seq_len, -1) + + with torch.inference_mode(): + prompt_embeds = self.pipe._encode_prompt( + prompt, device=self.device, num_images_per_prompt=num_samples, do_classifier_free_guidance=True, negative_prompt=negative_prompt) + negative_prompt_embeds_, prompt_embeds_ = prompt_embeds.chunk(2) + prompt_embeds = torch.cat([prompt_embeds_, image_prompt_embeds], dim=1) + negative_prompt_embeds = torch.cat([negative_prompt_embeds_, uncond_image_prompt_embeds], dim=1) + + generator = torch.Generator(self.device).manual_seed(seed) if seed is not None else None + images = self.pipe( + prompt_embeds=prompt_embeds, + negative_prompt_embeds=negative_prompt_embeds, + guidance_scale=guidance_scale, + num_inference_steps=num_inference_steps, + generator=generator, + **kwargs, + ).images + + return images + + +class IPAdapterXL(IPAdapter): + """SDXL""" + + def generate( + self, + pil_image, + prompt=None, + negative_prompt=None, + scale=1.0, + num_samples=4, + seed=-1, + num_inference_steps=30, + **kwargs, + ): + self.set_scale(scale) + + if isinstance(pil_image, Image.Image): + num_prompts = 1 + else: + num_prompts = len(pil_image) + + if prompt is None: + prompt = "best quality, high quality" + if negative_prompt is None: + negative_prompt = "monochrome, lowres, bad anatomy, worst quality, low quality" + + if not isinstance(prompt, List): + prompt = [prompt] * num_prompts + if not isinstance(negative_prompt, List): + negative_prompt = [negative_prompt] * num_prompts + + image_prompt_embeds, uncond_image_prompt_embeds = self.get_image_embeds(pil_image) + bs_embed, seq_len, _ = image_prompt_embeds.shape + image_prompt_embeds = image_prompt_embeds.repeat(1, num_samples, 1) + image_prompt_embeds = image_prompt_embeds.view(bs_embed * num_samples, seq_len, -1) + uncond_image_prompt_embeds = uncond_image_prompt_embeds.repeat(1, num_samples, 1) + uncond_image_prompt_embeds = uncond_image_prompt_embeds.view(bs_embed * num_samples, seq_len, -1) + + with torch.inference_mode(): + prompt_embeds, negative_prompt_embeds, pooled_prompt_embeds, negative_pooled_prompt_embeds = self.pipe.encode_prompt( + prompt, num_images_per_prompt=num_samples, do_classifier_free_guidance=True, negative_prompt=negative_prompt) + prompt_embeds = torch.cat([prompt_embeds, image_prompt_embeds], dim=1) + negative_prompt_embeds = torch.cat([negative_prompt_embeds, uncond_image_prompt_embeds], dim=1) + + generator = torch.Generator(self.device).manual_seed(seed) if seed is not None else None + images = self.pipe( + prompt_embeds=prompt_embeds, + negative_prompt_embeds=negative_prompt_embeds, + pooled_prompt_embeds=pooled_prompt_embeds, + negative_pooled_prompt_embeds=negative_pooled_prompt_embeds, + num_inference_steps=num_inference_steps, + generator=generator, + **kwargs, + ).images + + return images + + +class IPAdapterPlus(IPAdapter): + """IP-Adapter with fine-grained features""" + + def init_proj(self): + image_proj_model = Resampler( + dim=self.pipe.unet.config.cross_attention_dim, + depth=4, + dim_head=64, + heads=12, + num_queries=self.num_tokens, + embedding_dim=self.image_encoder.config.hidden_size, + output_dim=self.pipe.unet.config.cross_attention_dim, + ff_mult=4 + ).to(self.device, dtype=torch.float16) + return image_proj_model + + @torch.inference_mode() + def get_image_embeds(self, pil_image): + if isinstance(pil_image, Image.Image): + pil_image = [pil_image] + clip_image = self.clip_image_processor(images=pil_image, return_tensors="pt").pixel_values + clip_image = clip_image.to(self.device, dtype=torch.float16) + clip_image_embeds = self.image_encoder(clip_image, output_hidden_states=True).hidden_states[-2] + image_prompt_embeds = self.image_proj_model(clip_image_embeds) + uncond_clip_image_embeds = self.image_encoder(torch.zeros_like(clip_image), output_hidden_states=True).hidden_states[-2] + uncond_image_prompt_embeds = self.image_proj_model(uncond_clip_image_embeds) + return image_prompt_embeds, uncond_image_prompt_embeds + + +class IPAdapterFull(IPAdapterPlus): + """IP-Adapter with full features""" + + def init_proj(self): + image_proj_model = MLPProjModel( + cross_attention_dim=self.pipe.unet.config.cross_attention_dim, + clip_embeddings_dim=self.image_encoder.config.hidden_size, + ).to(self.device, dtype=torch.float16) + return image_proj_model + + +class IPAdapterPlusXL(IPAdapter): + """SDXL""" + + def init_proj(self): + image_proj_model = Resampler( + dim=1280, + depth=4, + dim_head=64, + heads=20, + num_queries=self.num_tokens, + embedding_dim=self.image_encoder.config.hidden_size, + output_dim=self.pipe.unet.config.cross_attention_dim, + ff_mult=4 + ).to(self.device, dtype=torch.float16) + return image_proj_model + + @torch.inference_mode() + def get_image_embeds(self, pil_image): + if isinstance(pil_image, Image.Image): + pil_image = [pil_image] + clip_image = self.clip_image_processor(images=pil_image, return_tensors="pt").pixel_values + clip_image = clip_image.to(self.device, dtype=torch.float16) + clip_image_embeds = self.image_encoder(clip_image, output_hidden_states=True).hidden_states[-2] + image_prompt_embeds = self.image_proj_model(clip_image_embeds) + uncond_clip_image_embeds = self.image_encoder(torch.zeros_like(clip_image), output_hidden_states=True).hidden_states[-2] + uncond_image_prompt_embeds = self.image_proj_model(uncond_clip_image_embeds) + return image_prompt_embeds, uncond_image_prompt_embeds + + def generate( + self, + pil_image, + prompt=None, + negative_prompt=None, + scale=1.0, + num_samples=4, + seed=-1, + num_inference_steps=30, + **kwargs, + ): + self.set_scale(scale) + + if isinstance(pil_image, Image.Image): + num_prompts = 1 + else: + num_prompts = len(pil_image) + + if prompt is None: + prompt = "best quality, high quality" + if negative_prompt is None: + negative_prompt = "monochrome, lowres, bad anatomy, worst quality, low quality" + + if not isinstance(prompt, List): + prompt = [prompt] * num_prompts + if not isinstance(negative_prompt, List): + negative_prompt = [negative_prompt] * num_prompts + + image_prompt_embeds, uncond_image_prompt_embeds = self.get_image_embeds(pil_image) + bs_embed, seq_len, _ = image_prompt_embeds.shape + image_prompt_embeds = image_prompt_embeds.repeat(1, num_samples, 1) + image_prompt_embeds = image_prompt_embeds.view(bs_embed * num_samples, seq_len, -1) + uncond_image_prompt_embeds = uncond_image_prompt_embeds.repeat(1, num_samples, 1) + uncond_image_prompt_embeds = uncond_image_prompt_embeds.view(bs_embed * num_samples, seq_len, -1) + + with torch.inference_mode(): + prompt_embeds, negative_prompt_embeds, pooled_prompt_embeds, negative_pooled_prompt_embeds = self.pipe.encode_prompt( + prompt, num_images_per_prompt=num_samples, do_classifier_free_guidance=True, negative_prompt=negative_prompt) + prompt_embeds = torch.cat([prompt_embeds, image_prompt_embeds], dim=1) + negative_prompt_embeds = torch.cat([negative_prompt_embeds, uncond_image_prompt_embeds], dim=1) + + generator = torch.Generator(self.device).manual_seed(seed) if seed is not None else None + images = self.pipe( + prompt_embeds=prompt_embeds, + negative_prompt_embeds=negative_prompt_embeds, + pooled_prompt_embeds=pooled_prompt_embeds, + negative_pooled_prompt_embeds=negative_pooled_prompt_embeds, + num_inference_steps=num_inference_steps, + generator=generator, + **kwargs, + ).images + + return images \ No newline at end of file diff --git a/src/animatediff/ip_adapter/resampler.py b/src/animatediff/ip_adapter/resampler.py new file mode 100644 index 0000000000000000000000000000000000000000..509969819384a8998d37f38894d3161b72e3aa1d --- /dev/null +++ b/src/animatediff/ip_adapter/resampler.py @@ -0,0 +1,158 @@ +# modified from https://github.com/mlfoundations/open_flamingo/blob/main/open_flamingo/src/helpers.py +# and https://github.com/lucidrains/imagen-pytorch/blob/main/imagen_pytorch/imagen_pytorch.py +import math + +import torch +import torch.nn as nn +from einops import rearrange +from einops.layers.torch import Rearrange + + +# FFN +def FeedForward(dim, mult=4): + inner_dim = int(dim * mult) + return nn.Sequential( + nn.LayerNorm(dim), + nn.Linear(dim, inner_dim, bias=False), + nn.GELU(), + nn.Linear(inner_dim, dim, bias=False), + ) + + +def reshape_tensor(x, heads): + bs, length, width = x.shape + #(bs, length, width) --> (bs, length, n_heads, dim_per_head) + x = x.view(bs, length, heads, -1) + # (bs, length, n_heads, dim_per_head) --> (bs, n_heads, length, dim_per_head) + x = x.transpose(1, 2) + # (bs, n_heads, length, dim_per_head) --> (bs*n_heads, length, dim_per_head) + x = x.reshape(bs, heads, length, -1) + return x + + +class PerceiverAttention(nn.Module): + def __init__(self, *, dim, dim_head=64, heads=8): + super().__init__() + self.scale = dim_head**-0.5 + self.dim_head = dim_head + self.heads = heads + inner_dim = dim_head * heads + + self.norm1 = nn.LayerNorm(dim) + self.norm2 = nn.LayerNorm(dim) + + self.to_q = nn.Linear(dim, inner_dim, bias=False) + self.to_kv = nn.Linear(dim, inner_dim * 2, bias=False) + self.to_out = nn.Linear(inner_dim, dim, bias=False) + + + def forward(self, x, latents): + """ + Args: + x (torch.Tensor): image features + shape (b, n1, D) + latent (torch.Tensor): latent features + shape (b, n2, D) + """ + x = self.norm1(x) + latents = self.norm2(latents) + + b, l, _ = latents.shape + + q = self.to_q(latents) + kv_input = torch.cat((x, latents), dim=-2) + k, v = self.to_kv(kv_input).chunk(2, dim=-1) + + q = reshape_tensor(q, self.heads) + k = reshape_tensor(k, self.heads) + v = reshape_tensor(v, self.heads) + + # attention + scale = 1 / math.sqrt(math.sqrt(self.dim_head)) + weight = (q * scale) @ (k * scale).transpose(-2, -1) # More stable with f16 than dividing afterwards + weight = torch.softmax(weight.float(), dim=-1).type(weight.dtype) + out = weight @ v + + out = out.permute(0, 2, 1, 3).reshape(b, l, -1) + + return self.to_out(out) + + +class Resampler(nn.Module): + def __init__( + self, + dim=1024, + depth=8, + dim_head=64, + heads=16, + num_queries=8, + embedding_dim=768, + output_dim=1024, + ff_mult=4, + max_seq_len: int = 257, # CLIP tokens + CLS token + apply_pos_emb: bool = False, + num_latents_mean_pooled: int = 0, # number of latents derived from mean pooled representation of the sequence + ): + super().__init__() + self.pos_emb = nn.Embedding(max_seq_len, embedding_dim) if apply_pos_emb else None + + self.latents = nn.Parameter(torch.randn(1, num_queries, dim) / dim**0.5) + + self.proj_in = nn.Linear(embedding_dim, dim) + + self.proj_out = nn.Linear(dim, output_dim) + self.norm_out = nn.LayerNorm(output_dim) + + self.to_latents_from_mean_pooled_seq = ( + nn.Sequential( + nn.LayerNorm(dim), + nn.Linear(dim, dim * num_latents_mean_pooled), + Rearrange("b (n d) -> b n d", n=num_latents_mean_pooled), + ) + if num_latents_mean_pooled > 0 + else None + ) + + self.layers = nn.ModuleList([]) + for _ in range(depth): + self.layers.append( + nn.ModuleList( + [ + PerceiverAttention(dim=dim, dim_head=dim_head, heads=heads), + FeedForward(dim=dim, mult=ff_mult), + ] + ) + ) + + def forward(self, x): + if self.pos_emb is not None: + n, device = x.shape[1], x.device + pos_emb = self.pos_emb(torch.arange(n, device=device)) + x = x + pos_emb + + latents = self.latents.repeat(x.size(0), 1, 1) + + x = self.proj_in(x) + + if self.to_latents_from_mean_pooled_seq: + meanpooled_seq = masked_mean(x, dim=1, mask=torch.ones(x.shape[:2], device=x.device, dtype=torch.bool)) + meanpooled_latents = self.to_latents_from_mean_pooled_seq(meanpooled_seq) + latents = torch.cat((meanpooled_latents, latents), dim=-2) + + for attn, ff in self.layers: + latents = attn(x, latents) + latents + latents = ff(latents) + latents + + latents = self.proj_out(latents) + return self.norm_out(latents) + + +def masked_mean(t, *, dim, mask=None): + if mask is None: + return t.mean(dim=dim) + + denom = mask.sum(dim=dim, keepdim=True) + mask = rearrange(mask, "b n -> b n 1") + masked_t = t.masked_fill(~mask, 0.0) + + return masked_t.sum(dim=dim) / denom.clamp(min=1e-5) diff --git a/src/animatediff/ip_adapter/utils.py b/src/animatediff/ip_adapter/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..cf02e561680dd62d53c9bab3e0171c7eb7702b07 --- /dev/null +++ b/src/animatediff/ip_adapter/utils.py @@ -0,0 +1,367 @@ +import inspect +import warnings +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +import numpy as np +import PIL.Image +import torch +import torch.nn.functional as F +from diffusers.models import ControlNetModel +from diffusers.pipelines.controlnet.multicontrolnet import MultiControlNetModel +from diffusers.pipelines.stable_diffusion import StableDiffusionPipelineOutput +from diffusers.utils.torch_utils import is_compiled_module + + +def is_torch2_available(): + return hasattr(F, "scaled_dot_product_attention") + + +@torch.no_grad() +def generate( + self, + prompt: Union[str, List[str]] = None, + image: Union[ + torch.FloatTensor, + PIL.Image.Image, + np.ndarray, + List[torch.FloatTensor], + List[PIL.Image.Image], + List[np.ndarray], + ] = None, + height: Optional[int] = None, + width: Optional[int] = None, + num_inference_steps: int = 50, + guidance_scale: float = 7.5, + negative_prompt: Optional[Union[str, List[str]]] = None, + num_images_per_prompt: Optional[int] = 1, + eta: float = 0.0, + generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None, + latents: Optional[torch.FloatTensor] = None, + prompt_embeds: Optional[torch.FloatTensor] = None, + negative_prompt_embeds: Optional[torch.FloatTensor] = None, + output_type: Optional[str] = "pil", + return_dict: bool = True, + callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None, + callback_steps: int = 1, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + controlnet_conditioning_scale: Union[float, List[float]] = 1.0, + guess_mode: bool = False, + control_guidance_start: Union[float, List[float]] = 0.0, + control_guidance_end: Union[float, List[float]] = 1.0, +): + r""" + Function invoked when calling the pipeline for generation. + + Args: + prompt (`str` or `List[str]`, *optional*): + The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`. + instead. + image (`torch.FloatTensor`, `PIL.Image.Image`, `np.ndarray`, `List[torch.FloatTensor]`, `List[PIL.Image.Image]`, `List[np.ndarray]`,: + `List[List[torch.FloatTensor]]`, `List[List[np.ndarray]]` or `List[List[PIL.Image.Image]]`): + The ControlNet input condition. ControlNet uses this input condition to generate guidance to Unet. If + the type is specified as `Torch.FloatTensor`, it is passed to ControlNet as is. `PIL.Image.Image` can + also be accepted as an image. The dimensions of the output image defaults to `image`'s dimensions. If + height and/or width are passed, `image` is resized according to them. If multiple ControlNets are + specified in init, images must be passed as a list such that each element of the list can be correctly + batched for input to a single controlnet. + height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor): + The height in pixels of the generated image. + width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor): + The width in pixels of the generated image. + num_inference_steps (`int`, *optional*, defaults to 50): + The number of denoising steps. More denoising steps usually lead to a higher quality image at the + expense of slower inference. + guidance_scale (`float`, *optional*, defaults to 7.5): + Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598). + `guidance_scale` is defined as `w` of equation 2. of [Imagen + Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale > + 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`, + usually at the expense of lower image quality. + negative_prompt (`str` or `List[str]`, *optional*): + The prompt or prompts not to guide the image generation. If not defined, one has to pass + `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is + less than `1`). + num_images_per_prompt (`int`, *optional*, defaults to 1): + The number of images to generate per prompt. + eta (`float`, *optional*, defaults to 0.0): + Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to + [`schedulers.DDIMScheduler`], will be ignored for others. + generator (`torch.Generator` or `List[torch.Generator]`, *optional*): + One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html) + to make generation deterministic. + latents (`torch.FloatTensor`, *optional*): + Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image + generation. Can be used to tweak the same generation with different prompts. If not provided, a latents + tensor will ge generated by sampling using the supplied random `generator`. + prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not + provided, text embeddings will be generated from `prompt` input argument. + negative_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt + weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input + argument. + output_type (`str`, *optional*, defaults to `"pil"`): + The output format of the generate image. Choose between + [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a + plain tuple. + callback (`Callable`, *optional*): + A function that will be called every `callback_steps` steps during inference. The function will be + called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`. + callback_steps (`int`, *optional*, defaults to 1): + The frequency at which the `callback` function will be called. If not specified, the callback will be + called at every step. + cross_attention_kwargs (`dict`, *optional*): + A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under + `self.processor` in + [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py). + controlnet_conditioning_scale (`float` or `List[float]`, *optional*, defaults to 1.0): + The outputs of the controlnet are multiplied by `controlnet_conditioning_scale` before they are added + to the residual in the original unet. If multiple ControlNets are specified in init, you can set the + corresponding scale as a list. + guess_mode (`bool`, *optional*, defaults to `False`): + In this mode, the ControlNet encoder will try best to recognize the content of the input image even if + you remove all prompts. The `guidance_scale` between 3.0 and 5.0 is recommended. + control_guidance_start (`float` or `List[float]`, *optional*, defaults to 0.0): + The percentage of total steps at which the controlnet starts applying. + control_guidance_end (`float` or `List[float]`, *optional*, defaults to 1.0): + The percentage of total steps at which the controlnet stops applying. + + Examples: + + Returns: + [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`: + [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple. + When returning a tuple, the first element is a list with the generated images, and the second element is a + list of `bool`s denoting whether the corresponding generated image likely represents "not-safe-for-work" + (nsfw) content, according to the `safety_checker`. + """ + controlnet = self.controlnet._orig_mod if is_compiled_module(self.controlnet) else self.controlnet + + # align format for control guidance + if not isinstance(control_guidance_start, list) and isinstance(control_guidance_end, list): + control_guidance_start = len(control_guidance_end) * [control_guidance_start] + elif not isinstance(control_guidance_end, list) and isinstance(control_guidance_start, list): + control_guidance_end = len(control_guidance_start) * [control_guidance_end] + elif not isinstance(control_guidance_start, list) and not isinstance(control_guidance_end, list): + mult = len(controlnet.nets) if isinstance(controlnet, MultiControlNetModel) else 1 + control_guidance_start, control_guidance_end = mult * [control_guidance_start], mult * [ + control_guidance_end + ] + + # 1. Check inputs. Raise error if not correct + self.check_inputs( + prompt, + image, + callback_steps, + negative_prompt, + prompt_embeds, + negative_prompt_embeds, + controlnet_conditioning_scale, + control_guidance_start, + control_guidance_end, + ) + + # 2. Define call parameters + if prompt is not None and isinstance(prompt, str): + batch_size = 1 + elif prompt is not None and isinstance(prompt, list): + batch_size = len(prompt) + else: + batch_size = prompt_embeds.shape[0] + + device = self._execution_device + # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2) + # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1` + # corresponds to doing no classifier free guidance. + do_classifier_free_guidance = guidance_scale > 1.0 + + if isinstance(controlnet, MultiControlNetModel) and isinstance(controlnet_conditioning_scale, float): + controlnet_conditioning_scale = [controlnet_conditioning_scale] * len(controlnet.nets) + + global_pool_conditions = ( + controlnet.config.global_pool_conditions + if isinstance(controlnet, ControlNetModel) + else controlnet.nets[0].config.global_pool_conditions + ) + guess_mode = guess_mode or global_pool_conditions + + # 3. Encode input prompt + text_encoder_lora_scale = ( + cross_attention_kwargs.get("scale", None) if cross_attention_kwargs is not None else None + ) + prompt_embeds = self._encode_prompt( + prompt, + device, + num_images_per_prompt, + do_classifier_free_guidance, + negative_prompt, + prompt_embeds=prompt_embeds, + negative_prompt_embeds=negative_prompt_embeds, + lora_scale=text_encoder_lora_scale, + ) + + # 4. Prepare image + if isinstance(controlnet, ControlNetModel): + image = self.prepare_image( + image=image, + width=width, + height=height, + batch_size=batch_size * num_images_per_prompt, + num_images_per_prompt=num_images_per_prompt, + device=device, + dtype=controlnet.dtype, + do_classifier_free_guidance=do_classifier_free_guidance, + guess_mode=guess_mode, + ) + height, width = image.shape[-2:] + elif isinstance(controlnet, MultiControlNetModel): + images = [] + + for image_ in image: + image_ = self.prepare_image( + image=image_, + width=width, + height=height, + batch_size=batch_size * num_images_per_prompt, + num_images_per_prompt=num_images_per_prompt, + device=device, + dtype=controlnet.dtype, + do_classifier_free_guidance=do_classifier_free_guidance, + guess_mode=guess_mode, + ) + + images.append(image_) + + image = images + height, width = image[0].shape[-2:] + else: + assert False + + # 5. Prepare timesteps + self.scheduler.set_timesteps(num_inference_steps, device=device) + timesteps = self.scheduler.timesteps + + # 6. Prepare latent variables + num_channels_latents = self.unet.config.in_channels + latents = self.prepare_latents( + batch_size * num_images_per_prompt, + num_channels_latents, + height, + width, + prompt_embeds.dtype, + device, + generator, + latents, + ) + + # 7. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline + extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta) + + # 7.1 Create tensor stating which controlnets to keep + controlnet_keep = [] + for i in range(len(timesteps)): + keeps = [ + 1.0 - float(i / len(timesteps) < s or (i + 1) / len(timesteps) > e) + for s, e in zip(control_guidance_start, control_guidance_end) + ] + controlnet_keep.append(keeps[0] if isinstance(controlnet, ControlNetModel) else keeps) + + # 8. Denoising loop + num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order + with self.progress_bar(total=num_inference_steps) as progress_bar: + for i, t in enumerate(timesteps): + # expand the latents if we are doing classifier free guidance + latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents + latent_model_input = self.scheduler.scale_model_input(latent_model_input, t) + + # controlnet(s) inference + if guess_mode and do_classifier_free_guidance: + # Infer ControlNet only for the conditional batch. + control_model_input = latents + control_model_input = self.scheduler.scale_model_input(control_model_input, t) + controlnet_prompt_embeds = prompt_embeds[:, :77, :].chunk(2)[1] + else: + control_model_input = latent_model_input + controlnet_prompt_embeds = prompt_embeds[:, :77, :] + + if isinstance(controlnet_keep[i], list): + cond_scale = [c * s for c, s in zip(controlnet_conditioning_scale, controlnet_keep[i])] + else: + controlnet_cond_scale = controlnet_conditioning_scale + if isinstance(controlnet_cond_scale, list): + controlnet_cond_scale = controlnet_cond_scale[0] + cond_scale = controlnet_cond_scale * controlnet_keep[i] + + down_block_res_samples, mid_block_res_sample = self.controlnet( + control_model_input, + t, + encoder_hidden_states=controlnet_prompt_embeds, + controlnet_cond=image, + conditioning_scale=cond_scale, + guess_mode=guess_mode, + return_dict=False, + ) + + if guess_mode and do_classifier_free_guidance: + # Infered ControlNet only for the conditional batch. + # To apply the output of ControlNet to both the unconditional and conditional batches, + # add 0 to the unconditional batch to keep it unchanged. + down_block_res_samples = [torch.cat([torch.zeros_like(d), d]) for d in down_block_res_samples] + mid_block_res_sample = torch.cat([torch.zeros_like(mid_block_res_sample), mid_block_res_sample]) + + # predict the noise residual + noise_pred = self.unet( + latent_model_input, + t, + encoder_hidden_states=prompt_embeds, + cross_attention_kwargs=cross_attention_kwargs, + down_block_additional_residuals=down_block_res_samples, + mid_block_additional_residual=mid_block_res_sample, + return_dict=False, + )[0] + + # perform guidance + if do_classifier_free_guidance: + noise_pred_uncond, noise_pred_text = noise_pred.chunk(2) + noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond) + + # compute the previous noisy sample x_t -> x_t-1 + latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0] + + # call the callback, if provided + if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0): + progress_bar.update() + if callback is not None and i % callback_steps == 0: + callback(i, t, latents) + + # If we do sequential model offloading, let's offload unet and controlnet + # manually for max memory savings + if hasattr(self, "final_offload_hook") and self.final_offload_hook is not None: + self.unet.to("cpu") + self.controlnet.to("cpu") + torch.cuda.empty_cache() + + if not output_type == "latent": + image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0] + image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype) + else: + image = latents + has_nsfw_concept = None + + if has_nsfw_concept is None: + do_denormalize = [True] * image.shape[0] + else: + do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept] + + image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize) + + # Offload last model to CPU + if hasattr(self, "final_offload_hook") and self.final_offload_hook is not None: + self.final_offload_hook.offload() + + if not return_dict: + return (image, has_nsfw_concept) + + return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept) \ No newline at end of file diff --git a/src/animatediff/models/__init__.py b/src/animatediff/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/animatediff/models/attention.py b/src/animatediff/models/attention.py new file mode 100644 index 0000000000000000000000000000000000000000..83cace31a49a7b7776167e9a0d2b137aac4090d6 --- /dev/null +++ b/src/animatediff/models/attention.py @@ -0,0 +1,326 @@ +# Adapted from https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention.py + +import logging +from dataclasses import dataclass +from typing import Any, Dict, Optional + +import torch +from diffusers.configuration_utils import ConfigMixin, register_to_config +from diffusers.models import ModelMixin +from diffusers.models.attention import AdaLayerNorm, Attention, FeedForward +from diffusers.utils import BaseOutput +from diffusers.utils.torch_utils import maybe_allow_in_graph +from einops import rearrange, repeat +from torch import Tensor, nn + +logger = logging.getLogger(__name__) + +@dataclass +class Transformer3DModelOutput(BaseOutput): + sample: torch.FloatTensor + + +@maybe_allow_in_graph +class Transformer3DModel(ModelMixin, ConfigMixin): + @register_to_config + def __init__( + self, + num_attention_heads: int = 16, + attention_head_dim: int = 88, + in_channels: Optional[int] = None, + num_layers: int = 1, + dropout: float = 0.0, + norm_num_groups: int = 32, + cross_attention_dim: Optional[int] = None, + attention_bias: bool = False, + activation_fn: str = "geglu", + num_embeds_ada_norm: Optional[int] = None, + use_linear_projection: bool = False, + only_cross_attention: bool = False, + upcast_attention: bool = False, + unet_use_cross_frame_attention=None, + unet_use_temporal_attention=None, + ): + super().__init__() + self.use_linear_projection = use_linear_projection + self.num_attention_heads = num_attention_heads + self.attention_head_dim = attention_head_dim + inner_dim = num_attention_heads * attention_head_dim + + # Define input layers + self.in_channels = in_channels + + self.norm = torch.nn.GroupNorm( + num_groups=norm_num_groups, num_channels=in_channels, eps=1e-6, affine=True + ) + if use_linear_projection: + self.proj_in = nn.Linear(in_channels, inner_dim) + else: + self.proj_in = nn.Conv2d(in_channels, inner_dim, kernel_size=1, stride=1, padding=0) + + # Define transformers blocks + self.transformer_blocks = nn.ModuleList( + [ + BasicTransformerBlock( + inner_dim, + num_attention_heads, + attention_head_dim, + dropout=dropout, + cross_attention_dim=cross_attention_dim, + activation_fn=activation_fn, + num_embeds_ada_norm=num_embeds_ada_norm, + attention_bias=attention_bias, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + unet_use_cross_frame_attention=unet_use_cross_frame_attention, + unet_use_temporal_attention=unet_use_temporal_attention, + ) + for d in range(num_layers) + ] + ) + + # 4. Define output layers + if use_linear_projection: + self.proj_out = nn.Linear(in_channels, inner_dim) + else: + self.proj_out = nn.Conv2d(inner_dim, in_channels, kernel_size=1, stride=1, padding=0) + + def forward( + self, + hidden_states: torch.Tensor, + encoder_hidden_states: Optional[torch.Tensor] = None, + timestep: Optional[torch.LongTensor] = None, + cross_attention_kwargs: Dict[str, Any] = None, + attention_mask: Optional[torch.Tensor] = None, + encoder_attention_mask: Optional[torch.Tensor] = None, + return_dict: bool = True, + ) -> tuple[Tensor] | Transformer3DModelOutput: + # validate input dim + if hidden_states.dim() != 5: + raise ValueError(f"Expected hidden_states to have ndim=5, but got ndim={hidden_states.dim()}.") + + # ensure attention_mask is a bias, and give it a singleton query_tokens dimension. + # we may have done this conversion already, e.g. if we came here via UNet2DConditionModel#forward. + # we can tell by counting dims; if ndim == 2: it's a mask rather than a bias. + # expects mask of shape: + # [batch, key_tokens] + # adds singleton query_tokens dimension: + # [batch, 1, key_tokens] + # this helps to broadcast it as a bias over attention scores, which will be in one of the following shapes: + # [batch, heads, query_tokens, key_tokens] (e.g. torch sdp attn) + # [batch * heads, query_tokens, key_tokens] (e.g. xformers or classic attn) + if attention_mask is not None and attention_mask.ndim == 2: + # assume that mask is expressed as: + # (1 = keep, 0 = discard) + # convert mask into a bias that can be added to attention scores: + # (keep = +0, discard = -10000.0) + attention_mask = (1 - attention_mask.to(hidden_states.dtype)) * -10000.0 + attention_mask = attention_mask.unsqueeze(1) + + # convert encoder_attention_mask to a bias the same way we do for attention_mask + if encoder_attention_mask is not None and encoder_attention_mask.ndim == 2: + encoder_attention_mask = (1 - encoder_attention_mask.to(hidden_states.dtype)) * -10000.0 + encoder_attention_mask = encoder_attention_mask.unsqueeze(1) + + # shenanigans for motion module + video_length = hidden_states.shape[2] + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + + if encoder_hidden_states.shape[0] < video_length: + encoder_hidden_states = repeat(encoder_hidden_states, "b n c -> (b f) n c", f=video_length) + + # 1. Input + batch, _, height, width = hidden_states.shape + residual = hidden_states + + hidden_states = self.norm(hidden_states) + if not self.use_linear_projection: + hidden_states = self.proj_in(hidden_states) + inner_dim = hidden_states.shape[1] + hidden_states = hidden_states.permute(0, 2, 3, 1).reshape(batch, height * width, inner_dim) + else: + inner_dim = hidden_states.shape[1] + hidden_states = hidden_states.permute(0, 2, 3, 1).reshape(batch, height * width, inner_dim) + hidden_states = self.proj_in(hidden_states) + + # 2. Blocks + for block in self.transformer_blocks: + hidden_states = block( + hidden_states, + attention_mask=attention_mask, + encoder_hidden_states=encoder_hidden_states, + timestep=timestep, + video_length=video_length, + encoder_attention_mask=encoder_attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + ) + + # 3. Output + if not self.use_linear_projection: + hidden_states = ( + hidden_states.reshape(batch, height, width, inner_dim).permute(0, 3, 1, 2).contiguous() + ) + hidden_states = self.proj_out(hidden_states) + else: + hidden_states = self.proj_out(hidden_states) + hidden_states = ( + hidden_states.reshape(batch, height, width, inner_dim).permute(0, 3, 1, 2).contiguous() + ) + + output = hidden_states + residual + + output = rearrange(output, "(b f) c h w -> b c f h w", f=video_length) + if not return_dict: + return (output,) + + return Transformer3DModelOutput(sample=output) + + +@maybe_allow_in_graph +class BasicTransformerBlock(nn.Module): + def __init__( + self, + dim: int, + num_attention_heads: int, + attention_head_dim: int, + dropout: float = 0.0, + cross_attention_dim: Optional[int] = None, + activation_fn: str = "geglu", + num_embeds_ada_norm: Optional[int] = None, + attention_bias: bool = False, + only_cross_attention: bool = False, + upcast_attention: bool = False, + norm_elementwise_affine: bool = True, + unet_use_cross_frame_attention: bool = False, + unet_use_temporal_attention: bool = False, + final_dropout: bool = False, + ) -> None: + super().__init__() + self.only_cross_attention = only_cross_attention + self.use_ada_layer_norm = num_embeds_ada_norm is not None + self.unet_use_cross_frame_attention = unet_use_cross_frame_attention + self.unet_use_temporal_attention = unet_use_temporal_attention + + # Define 3 blocks. Each block has its own normalization layer. + # 1. Self-Attn / SC-Attn + if self.use_ada_layer_norm: + self.norm1 = AdaLayerNorm(dim, num_embeds_ada_norm) + else: + self.norm1 = nn.LayerNorm(dim, elementwise_affine=norm_elementwise_affine) + + if unet_use_cross_frame_attention: + # this isn't actually implemented anywhere in the AnimateDiff codebase or in Diffusers... + raise NotImplementedError("SC-Attn is not implemented yet.") + else: + self.attn1 = Attention( + query_dim=dim, + cross_attention_dim=cross_attention_dim if only_cross_attention else None, + heads=num_attention_heads, + dim_head=attention_head_dim, + dropout=dropout, + bias=attention_bias, + upcast_attention=upcast_attention, + ) + + # 2. Cross-Attn + if cross_attention_dim is not None: + self.norm2 = ( + AdaLayerNorm(dim, num_embeds_ada_norm) + if self.use_ada_layer_norm + else nn.LayerNorm(dim, elementwise_affine=norm_elementwise_affine) + ) + self.attn2 = Attention( + query_dim=dim, + cross_attention_dim=cross_attention_dim, + heads=num_attention_heads, + dim_head=attention_head_dim, + dropout=dropout, + bias=attention_bias, + upcast_attention=upcast_attention, + ) # is self-attn if encoder_hidden_states is none + else: + self.norm2 = None + self.attn2 = None + + # 3. Feed-forward + self.norm3 = nn.LayerNorm(dim, elementwise_affine=norm_elementwise_affine) + self.ff = FeedForward(dim, dropout=dropout, activation_fn=activation_fn, final_dropout=final_dropout) + + # 4. Temporal Attn + assert unet_use_temporal_attention is not None + if unet_use_temporal_attention: + self.attn_temp = Attention( + query_dim=dim, + heads=num_attention_heads, + dim_head=attention_head_dim, + dropout=dropout, + bias=attention_bias, + upcast_attention=upcast_attention, + ) + nn.init.zeros_(self.attn_temp.to_out[0].weight.data) + if self.use_ada_layer_norm: + self.norm1 = AdaLayerNorm(dim, num_embeds_ada_norm) + else: + self.norm1 = nn.LayerNorm(dim, elementwise_affine=norm_elementwise_affine) + + def forward( + self, + hidden_states: torch.FloatTensor, + attention_mask: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + timestep: Optional[torch.LongTensor] = None, + cross_attention_kwargs: Dict[str, Any] = None, + video_length=None, + ): + # SparseCausal-Attention + # Notice that normalization is always applied before the real computation in the following blocks. + # 1. Self-Attention + if self.use_ada_layer_norm: + norm_hidden_states = self.norm1(hidden_states, timestep) + else: + norm_hidden_states = self.norm1(hidden_states) + + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if self.unet_use_cross_frame_attention: + cross_attention_kwargs["video_length"] = video_length + + attn_output = self.attn1( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states if self.only_cross_attention else None, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) + + hidden_states = attn_output + hidden_states + + # 2. Cross-Attention + if self.attn2 is not None: + norm_hidden_states = ( + self.norm2(hidden_states, timestep) if self.use_ada_layer_norm else self.norm2(hidden_states) + ) + + attn_output = self.attn2( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=encoder_attention_mask, + **cross_attention_kwargs, + ) + hidden_states = attn_output + hidden_states + + # 3. Feed-forward + hidden_states = self.ff(self.norm3(hidden_states)) + hidden_states + + # 4. Temporal-Attention + if self.unet_use_temporal_attention: + d = hidden_states.shape[1] + hidden_states = rearrange(hidden_states, "(b f) d c -> (b d) f c", f=video_length) + norm_hidden_states = ( + self.norm_temp(hidden_states, timestep) + if self.use_ada_layer_norm + else self.norm_temp(hidden_states) + ) + hidden_states = self.attn_temp(norm_hidden_states) + hidden_states + hidden_states = rearrange(hidden_states, "(b d) f c -> (b f) d c", d=d) + + return hidden_states diff --git a/src/animatediff/models/clip.py b/src/animatediff/models/clip.py new file mode 100644 index 0000000000000000000000000000000000000000..9d91a0bf08b447d03669b79c3e7415d256c814da --- /dev/null +++ b/src/animatediff/models/clip.py @@ -0,0 +1,161 @@ +from typing import Optional, Tuple, Union + +import torch +from torch import Tensor, nn +from transformers.modeling_outputs import BaseModelOutput, BaseModelOutputWithPooling +from transformers.models.clip import CLIPPreTrainedModel, CLIPTextConfig, CLIPTextModel +from transformers.models.clip.modeling_clip import ( + CLIP_TEXT_INPUTS_DOCSTRING, + CLIPTextTransformer, + _expand_mask, + _make_causal_mask, +) +from transformers.utils import add_start_docstrings_to_model_forward, replace_return_docstrings + +CLIP_SKIP_TEXT_INPUTS_DOCSTRING = ( + CLIP_TEXT_INPUTS_DOCSTRING + + r""" + clip_skip (`int`, *optional*, defaults to 1): + Skip the final N layers of the CLIP text encoder. Some Diffusion models were trained + using the hidden states from the 2nd-last layer of the CLIP text encoder (ie clip_skip=2), + so we reproduce that behavior here for use with those models. +""" +) + + +class CLIPSkipTextTransformer(CLIPTextTransformer): + @add_start_docstrings_to_model_forward(CLIP_SKIP_TEXT_INPUTS_DOCSTRING) + @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=CLIPTextConfig) + def forward( + self, + input_ids: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.Tensor] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + clip_skip: int = 1, + ) -> Union[Tuple, BaseModelOutputWithPooling]: + r""" + Returns: + + """ + output_attentions = ( + output_attentions if output_attentions is not None else self.config.output_attentions + ) + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + if input_ids is None: + raise ValueError("You have to specify input_ids") + + input_shape = input_ids.size() + input_ids = input_ids.view(-1, input_shape[-1]) + + hidden_states = self.embeddings(input_ids=input_ids, position_ids=position_ids) + + # CLIP's text model uses causal mask, prepare it here. + # https://github.com/openai/CLIP/blob/cfcffb90e69f37bf2ff1e988237a0fbe41f33c04/clip/model.py#L324 + causal_attention_mask = _make_causal_mask( + input_shape, hidden_states.dtype, device=hidden_states.device + ) + # expand attention_mask + if attention_mask is not None: + # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] + attention_mask = _expand_mask(attention_mask, hidden_states.dtype) + + encoder_outputs: BaseModelOutput = self.encoder( + inputs_embeds=hidden_states, + attention_mask=attention_mask, + causal_attention_mask=causal_attention_mask, + output_attentions=output_attentions, + output_hidden_states=True, + return_dict=True, + ) + + # take the hidden state from the Nth-to-last layer of the encoder, where N = clip_skip + # clip_skip=1 means take the hidden state from the last layer as with CLIPTextTransformer + last_hidden_state = encoder_outputs.hidden_states[-clip_skip] + last_hidden_state = self.final_layer_norm(last_hidden_state) + + # text_embeds.shape = [batch_size, sequence_length, transformer.width] + # take features from the eot embedding (eot_token is the highest number in each sequence) + # casting to torch.int for onnx compatibility: argmax doesn't support int64 inputs with opset 14 + pooled_output = last_hidden_state[ + torch.arange(last_hidden_state.shape[0], device=last_hidden_state.device), + input_ids.to(dtype=torch.int, device=last_hidden_state.device).argmax(dim=-1), + ] + + if not return_dict: + return (last_hidden_state, pooled_output) + encoder_outputs[1:] + + return BaseModelOutputWithPooling( + last_hidden_state=last_hidden_state, + pooler_output=pooled_output, + hidden_states=encoder_outputs.hidden_states, + attentions=encoder_outputs.attentions, + ) + + def _build_causal_attention_mask(self, bsz, seq_len, dtype): + # lazily create causal attention mask, with full attention between the vision tokens + # pytorch uses additive attention mask; fill with -inf + mask = torch.empty(bsz, seq_len, seq_len, dtype=dtype) + mask.fill_(torch.tensor(torch.finfo(dtype).min)) + mask.triu_(1) # zero out the lower diagonal + mask = mask.unsqueeze(1) # expand mask + return mask + + +class CLIPSkipTextModel(CLIPTextModel): + config_class = CLIPTextConfig + + _no_split_modules = ["CLIPEncoderLayer"] + + def __init__(self, config: CLIPTextConfig): + super().__init__(config) + self.text_model = CLIPSkipTextTransformer(config) + # Initialize weights and apply final processing + self.post_init() + + @add_start_docstrings_to_model_forward(CLIP_SKIP_TEXT_INPUTS_DOCSTRING) + @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=CLIPTextConfig) + def forward( + self, + input_ids: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.Tensor] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + clip_skip: int = 1, + ) -> Union[Tuple, BaseModelOutputWithPooling]: + r""" + Returns: + + Examples: + + ```python + >>> from transformers import AutoTokenizer, CLIPSkipTextModel + + >>> model = CLIPSkipTextModel.from_pretrained("openai/clip-vit-base-patch32") + >>> tokenizer = AutoTokenizer.from_pretrained("openai/clip-vit-base-patch32") + + >>> inputs = tokenizer(["a photo of a cat", "a photo of a dog"], padding=True, return_tensors="pt") + + >>> outputs = model(**inputs) + >>> last_hidden_state = outputs.last_hidden_state + >>> pooled_output = outputs.pooler_output # pooled (EOS token) states + ```""" + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + return self.text_model( + input_ids=input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + clip_skip=clip_skip, + ) diff --git a/src/animatediff/models/motion_module.py b/src/animatediff/models/motion_module.py new file mode 100644 index 0000000000000000000000000000000000000000..355bd94ee7a3fc1225ac21e415e3625864c80cba --- /dev/null +++ b/src/animatediff/models/motion_module.py @@ -0,0 +1,304 @@ +import logging +import math +from dataclasses import dataclass +from typing import Optional + +import torch +import torch.nn.functional as F +from diffusers.models.attention import Attention, FeedForward +from diffusers.utils import BaseOutput +from diffusers.utils.torch_utils import maybe_allow_in_graph +from einops import rearrange, repeat +from torch import Tensor, nn + +logger = logging.getLogger(__name__) + +def zero_module(module): + # Zero out the parameters of a module and return it. + for p in module.parameters(): + p.detach().zero_() + return module + + +@dataclass +class TemporalTransformer3DModelOutput(BaseOutput): + sample: torch.FloatTensor + + +def get_motion_module(in_channels, motion_module_type: str, motion_module_kwargs: dict): + if motion_module_type == "Vanilla": + return VanillaTemporalModule( + in_channels=in_channels, + **motion_module_kwargs, + ) + else: + raise ValueError + + +class VanillaTemporalModule(nn.Module): + def __init__( + self, + in_channels, + num_attention_heads=8, + num_transformer_block=2, + attention_block_types=("Temporal_Self", "Temporal_Self"), + cross_frame_attention_mode=None, + temporal_position_encoding=False, + temporal_position_encoding_max_len=24, + temporal_attention_dim_div=1, + zero_initialize=True, + ): + super().__init__() + + self.temporal_transformer = TemporalTransformer3DModel( + in_channels=in_channels, + num_attention_heads=num_attention_heads, + attention_head_dim=in_channels // num_attention_heads // temporal_attention_dim_div, + num_layers=num_transformer_block, + attention_block_types=attention_block_types, + cross_frame_attention_mode=cross_frame_attention_mode, + temporal_position_encoding=temporal_position_encoding, + temporal_position_encoding_max_len=temporal_position_encoding_max_len, + ) + + if zero_initialize: + self.temporal_transformer.proj_out = zero_module(self.temporal_transformer.proj_out) + + def forward(self, input_tensor, temb, encoder_hidden_states, attention_mask=None, anchor_frame_idx=None): + hidden_states = input_tensor + hidden_states = self.temporal_transformer(hidden_states, encoder_hidden_states, attention_mask) + + output = hidden_states + return output + + +@maybe_allow_in_graph +class TemporalTransformer3DModel(nn.Module): + def __init__( + self, + in_channels, + num_attention_heads, + attention_head_dim, + num_layers, + attention_block_types=( + "Temporal_Self", + "Temporal_Self", + ), + dropout=0.0, + norm_num_groups=32, + cross_attention_dim=768, + activation_fn="geglu", + attention_bias=False, + upcast_attention=False, + cross_frame_attention_mode=None, + temporal_position_encoding=False, + temporal_position_encoding_max_len=24, + ): + super().__init__() + + inner_dim = num_attention_heads * attention_head_dim + + self.norm = torch.nn.GroupNorm( + num_groups=norm_num_groups, num_channels=in_channels, eps=1e-6, affine=True + ) + self.proj_in = nn.Linear(in_channels, inner_dim) + + self.transformer_blocks = nn.ModuleList( + [ + TemporalTransformerBlock( + dim=inner_dim, + num_attention_heads=num_attention_heads, + attention_head_dim=attention_head_dim, + attention_block_types=attention_block_types, + dropout=dropout, + norm_num_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + activation_fn=activation_fn, + attention_bias=attention_bias, + upcast_attention=upcast_attention, + cross_frame_attention_mode=cross_frame_attention_mode, + temporal_position_encoding=temporal_position_encoding, + temporal_position_encoding_max_len=temporal_position_encoding_max_len, + ) + for d in range(num_layers) + ] + ) + self.proj_out = nn.Linear(inner_dim, in_channels) + + def forward( + self, + hidden_states: Tensor, + encoder_hidden_states: Optional[Tensor] = None, + attention_mask: Optional[Tensor] = None, + ): + assert ( + hidden_states.dim() == 5 + ), f"Expected hidden_states to have ndim=5, but got ndim={hidden_states.dim()}." + video_length = hidden_states.shape[2] + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + + batch, channel, height, weight = hidden_states.shape + residual = hidden_states + + hidden_states = self.norm(hidden_states) + inner_dim = hidden_states.shape[1] + hidden_states = hidden_states.permute(0, 2, 3, 1).reshape(batch, height * weight, inner_dim) + hidden_states = self.proj_in(hidden_states) + + # Transformer Blocks + for block in self.transformer_blocks: + hidden_states = block( + hidden_states, encoder_hidden_states=encoder_hidden_states, video_length=video_length + ) + + # output + hidden_states = self.proj_out(hidden_states) + hidden_states = ( + hidden_states.reshape(batch, height, weight, inner_dim).permute(0, 3, 1, 2).contiguous() + ) + + output = hidden_states + residual + output = rearrange(output, "(b f) c h w -> b c f h w", f=video_length) + + return output + + +@maybe_allow_in_graph +class TemporalTransformerBlock(nn.Module): + def __init__( + self, + dim: int, + num_attention_heads: int, + attention_head_dim: int, + attention_block_types=( + "Temporal_Self", + "Temporal_Self", + ), + dropout=0.0, + norm_num_groups: int = 32, + cross_attention_dim: int = 768, + activation_fn: str = "geglu", + attention_bias: bool = False, + upcast_attention: bool = False, + cross_frame_attention_mode=None, + temporal_position_encoding: bool = False, + temporal_position_encoding_max_len: int = 24, + ): + super().__init__() + + attention_blocks = [] + norms = [] + + for block_name in attention_block_types: + attention_blocks.append( + VersatileAttention( + attention_mode=block_name.split("_")[0], + cross_attention_dim=cross_attention_dim if block_name.endswith("_Cross") else None, + query_dim=dim, + heads=num_attention_heads, + dim_head=attention_head_dim, + dropout=dropout, + bias=attention_bias, + upcast_attention=upcast_attention, + cross_frame_attention_mode=cross_frame_attention_mode, + temporal_position_encoding=temporal_position_encoding, + temporal_position_encoding_max_len=temporal_position_encoding_max_len, + ) + ) + norms.append(nn.LayerNorm(dim)) + + self.attention_blocks = nn.ModuleList(attention_blocks) + self.norms = nn.ModuleList(norms) + + self.ff = FeedForward(dim, dropout=dropout, activation_fn=activation_fn) + self.ff_norm = nn.LayerNorm(dim) + + def forward(self, hidden_states, encoder_hidden_states=None, attention_mask=None, video_length=None): + for attention_block, norm in zip(self.attention_blocks, self.norms): + norm_hidden_states = norm(hidden_states) + hidden_states = ( + attention_block( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states + if attention_block.is_cross_attention + else None, + video_length=video_length, + ) + + hidden_states + ) + + hidden_states = self.ff(self.ff_norm(hidden_states)) + hidden_states + + output = hidden_states + return output + + +class PositionalEncoding(nn.Module): + def __init__(self, d_model, dropout: float = 0.0, max_len: int = 24): + super().__init__() + self.dropout: nn.Module = nn.Dropout(p=dropout) + position = torch.arange(max_len).unsqueeze(1) + div_term = torch.exp(torch.arange(0, d_model, 2) * (-math.log(10000.0) / d_model)) + pe: Tensor = torch.zeros(1, max_len, d_model) + pe[0, :, 0::2] = torch.sin(position * div_term) + pe[0, :, 1::2] = torch.cos(position * div_term) + self.register_buffer("pe", pe) + + def forward(self, x: Tensor): + x = x + self.pe[:, : x.size(1)] + return self.dropout(x) + + +@maybe_allow_in_graph +class VersatileAttention(Attention): + def __init__( + self, + attention_mode: str = None, + cross_frame_attention_mode: Optional[str] = None, + temporal_position_encoding: bool = False, + temporal_position_encoding_max_len: int = 24, + *args, + **kwargs, + ): + super().__init__(*args, **kwargs) + if attention_mode.lower() != "temporal": + raise ValueError(f"Attention mode {attention_mode} is not supported.") + + self.attention_mode = attention_mode + self.is_cross_attention = kwargs["cross_attention_dim"] is not None + + self.pos_encoder = ( + PositionalEncoding(kwargs["query_dim"], dropout=0.0, max_len=temporal_position_encoding_max_len) + if (temporal_position_encoding and attention_mode == "Temporal") + else None + ) + + def extra_repr(self): + return f"(Module Info) Attention_Mode: {self.attention_mode}, Is_Cross_Attention: {self.is_cross_attention}" + + def forward( + self, hidden_states: Tensor, encoder_hidden_states=None, attention_mask=None, video_length=None + ): + if self.attention_mode == "Temporal": + d = hidden_states.shape[1] + hidden_states = rearrange(hidden_states, "(b f) d c -> (b d) f c", f=video_length) + + if self.pos_encoder is not None: + hidden_states = self.pos_encoder(hidden_states) + + if encoder_hidden_states and encoder_hidden_states.shape[0] < d: + encoder_hidden_states = ( + repeat(encoder_hidden_states, "b n c -> (b d) n c", d=d) + if encoder_hidden_states is not None + else encoder_hidden_states + ) + else: + raise NotImplementedError + + # attention processor makes this easy so that's nice + hidden_states = self.processor(self, hidden_states, encoder_hidden_states, attention_mask) + + if self.attention_mode == "Temporal": + hidden_states = rearrange(hidden_states, "(b d) f c -> (b f) d c", d=d) + + return hidden_states diff --git a/src/animatediff/models/resnet.py b/src/animatediff/models/resnet.py new file mode 100644 index 0000000000000000000000000000000000000000..febecf282babef0be1ec662db805d1dfc688d601 --- /dev/null +++ b/src/animatediff/models/resnet.py @@ -0,0 +1,228 @@ +# Adapted from https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/resnet.py + +from typing import Optional + +import torch +import torch.nn.functional as F +from diffusers.models.lora import LoRACompatibleConv, LoRACompatibleLinear +from einops import rearrange +from torch import Tensor, nn + + +#class InflatedConv3d(nn.Conv2d): +class InflatedConv3d(LoRACompatibleConv): + def forward(self, x: Tensor) -> Tensor: + frames = x.shape[2] + + x = rearrange(x, "b c f h w -> (b f) c h w") + x = F.conv2d(x, self.weight, self.bias, self.stride, self.padding, self.dilation, self.groups) + x = rearrange(x, "(b f) c h w -> b c f h w", f=frames) + return x + +class InflatedGroupNorm(nn.GroupNorm): + def forward(self, x): + video_length = x.shape[2] + + x = rearrange(x, "b c f h w -> (b f) c h w") + x = super().forward(x) + x = rearrange(x, "(b f) c h w -> b c f h w", f=video_length) + + return x + +class Upsample3D(nn.Module): + def __init__( + self, + channels: int, + use_conv: bool = False, + use_conv_transpose: bool = False, + out_channels: Optional[int] = None, + name="conv", + ): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.use_conv_transpose = use_conv_transpose + self.name = name + + if use_conv_transpose: + raise NotImplementedError + elif use_conv: + self.conv = InflatedConv3d(self.channels, self.out_channels, 3, padding=1) + + def forward(self, hidden_states: Tensor, output_size=None): + assert hidden_states.shape[1] == self.channels + + if self.use_conv_transpose: + raise NotImplementedError + + # Cast to float32 to as 'upsample_nearest2d_out_frame' op does not support bfloat16 + dtype = hidden_states.dtype + if dtype == torch.bfloat16: + hidden_states = hidden_states.to(torch.float32) + + # upsample_nearest_nhwc fails with large batch sizes. see https://github.com/huggingface/diffusers/issues/984 + if hidden_states.shape[0] >= 64: + hidden_states = hidden_states.contiguous() + + # if `output_size` is passed we force the interpolation output + # size and do not make use of `scale_factor=2` + if output_size is None: + hidden_states = F.interpolate(hidden_states, scale_factor=[1.0, 2.0, 2.0], mode="nearest") + else: + hidden_states = F.interpolate(hidden_states, size=output_size, mode="nearest") + + # If the input is bfloat16, we cast back to bfloat16 + if dtype == torch.bfloat16: + hidden_states = hidden_states.to(dtype) + + hidden_states = self.conv(hidden_states) + + return hidden_states + + +class Downsample3D(nn.Module): + def __init__( + self, + channels: int, + use_conv: bool = False, + out_channels: Optional[int] = None, + padding: int = 1, + name="conv", + ): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.padding = padding + stride = 2 + self.name = name + + if use_conv: + self.conv = InflatedConv3d(self.channels, self.out_channels, 3, stride=stride, padding=padding) + else: + raise NotImplementedError + + def forward(self, hidden_states): + assert hidden_states.shape[1] == self.channels + if self.use_conv and self.padding == 0: + raise NotImplementedError + + assert hidden_states.shape[1] == self.channels + hidden_states = self.conv(hidden_states) + + return hidden_states + + +class ResnetBlock3D(nn.Module): + def __init__( + self, + *, + in_channels, + out_channels=None, + conv_shortcut=False, + dropout=0.0, + temb_channels=512, + groups=32, + groups_out=None, + pre_norm=True, + eps=1e-6, + non_linearity="swish", + time_embedding_norm="default", + output_scale_factor=1.0, + use_in_shortcut=None, + use_inflated_groupnorm=None, + ): + super().__init__() + self.pre_norm = pre_norm + self.pre_norm = True + self.in_channels = in_channels + out_channels = in_channels if out_channels is None else out_channels + self.out_channels = out_channels + self.use_conv_shortcut = conv_shortcut + self.time_embedding_norm = time_embedding_norm + self.output_scale_factor = output_scale_factor + + if groups_out is None: + groups_out = groups + + assert use_inflated_groupnorm != None + if use_inflated_groupnorm: + self.norm1 = InflatedGroupNorm(num_groups=groups, num_channels=in_channels, eps=eps, affine=True) + else: + self.norm1 = nn.GroupNorm(num_groups=groups, num_channels=in_channels, eps=eps, affine=True) + + self.conv1 = InflatedConv3d(in_channels, out_channels, kernel_size=3, stride=1, padding=1) + + if temb_channels is not None: + if self.time_embedding_norm == "default": + time_emb_proj_out_channels = out_channels + elif self.time_embedding_norm == "scale_shift": + time_emb_proj_out_channels = out_channels * 2 + else: + raise ValueError(f"unknown time_embedding_norm : {self.time_embedding_norm} ") + +# self.time_emb_proj = nn.Linear(temb_channels, time_emb_proj_out_channels) + self.time_emb_proj = LoRACompatibleLinear(temb_channels, time_emb_proj_out_channels) + else: + self.time_emb_proj = None + + if use_inflated_groupnorm: + self.norm2 = InflatedGroupNorm(num_groups=groups_out, num_channels=out_channels, eps=eps, affine=True) + else: + self.norm2 = nn.GroupNorm(num_groups=groups_out, num_channels=out_channels, eps=eps, affine=True) + + self.dropout = nn.Dropout(dropout) + self.conv2 = InflatedConv3d(out_channels, out_channels, kernel_size=3, stride=1, padding=1) + + if non_linearity == "swish": + self.nonlinearity = lambda x: F.silu(x) + elif non_linearity == "mish": + self.nonlinearity = Mish() + elif non_linearity == "silu": + self.nonlinearity = nn.SiLU() + + self.use_in_shortcut = ( + self.in_channels != self.out_channels if use_in_shortcut is None else use_in_shortcut + ) + + self.conv_shortcut = None + if self.use_in_shortcut: + self.conv_shortcut = InflatedConv3d(in_channels, out_channels, kernel_size=1, stride=1, padding=0) + + def forward(self, input_tensor, temb): + hidden_states = input_tensor + + hidden_states = self.norm1(hidden_states) + hidden_states = self.nonlinearity(hidden_states) + + hidden_states = self.conv1(hidden_states) + + if temb is not None: + temb = self.time_emb_proj(self.nonlinearity(temb))[:, :, None, None, None] + + if temb is not None and self.time_embedding_norm == "default": + hidden_states = hidden_states + temb + + hidden_states = self.norm2(hidden_states) + + if temb is not None and self.time_embedding_norm == "scale_shift": + scale, shift = torch.chunk(temb, 2, dim=1) + hidden_states = hidden_states * (1 + scale) + shift + + hidden_states = self.nonlinearity(hidden_states) + + hidden_states = self.dropout(hidden_states) + hidden_states = self.conv2(hidden_states) + + if self.conv_shortcut is not None: + input_tensor = self.conv_shortcut(input_tensor) + + output_tensor = (input_tensor + hidden_states) / self.output_scale_factor + + return output_tensor + + +class Mish(nn.Module): + def forward(self, hidden_states): + return hidden_states * torch.tanh(torch.nn.functional.softplus(hidden_states)) diff --git a/src/animatediff/models/unet.py b/src/animatediff/models/unet.py new file mode 100644 index 0000000000000000000000000000000000000000..2a9d3208eb007ecd01db36a7a3ece93b6b83dedb --- /dev/null +++ b/src/animatediff/models/unet.py @@ -0,0 +1,643 @@ +# Adapted from https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/unet_2d_condition.py + +from dataclasses import dataclass +from os import PathLike +from pathlib import Path +from typing import Any, Dict, List, Optional, Tuple, Union + +import torch +import torch.utils.checkpoint +from diffusers.configuration_utils import ConfigMixin, register_to_config +from diffusers.models import ModelMixin +from diffusers.models.attention_processor import AttentionProcessor +from diffusers.models.embeddings import TimestepEmbedding, Timesteps +from diffusers.utils import (SAFETENSORS_WEIGHTS_NAME, WEIGHTS_NAME, + BaseOutput, logging) +from safetensors.torch import load_file +from torch import Tensor, nn + +from .resnet import InflatedConv3d, InflatedGroupNorm +from .unet_blocks import (CrossAttnDownBlock3D, CrossAttnUpBlock3D, + DownBlock3D, UNetMidBlock3DCrossAttn, UpBlock3D, + get_down_block, get_up_block) + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +@dataclass +class UNet3DConditionOutput(BaseOutput): + sample: torch.FloatTensor + + +class UNet3DConditionModel(ModelMixin, ConfigMixin): + _supports_gradient_checkpointing = True + + @register_to_config + def __init__( + self, + sample_size: Optional[int] = None, + in_channels: int = 4, + out_channels: int = 4, + center_input_sample: bool = False, + flip_sin_to_cos: bool = True, + freq_shift: int = 0, + down_block_types: Tuple[str] = ( + "CrossAttnDownBlock3D", + "CrossAttnDownBlock3D", + "CrossAttnDownBlock3D", + "DownBlock3D", + ), + mid_block_type: str = "UNetMidBlock3DCrossAttn", + up_block_types: Tuple[str] = ( + "UpBlock3D", + "CrossAttnUpBlock3D", + "CrossAttnUpBlock3D", + "CrossAttnUpBlock3D", + ), + only_cross_attention: Union[bool, Tuple[bool]] = False, + block_out_channels: Tuple[int] = (320, 640, 1280, 1280), + layers_per_block: int = 2, + downsample_padding: int = 1, + mid_block_scale_factor: float = 1, + act_fn: str = "silu", + norm_num_groups: int = 32, + norm_eps: float = 1e-5, + cross_attention_dim: int = 1280, + attention_head_dim: Union[int, Tuple[int]] = 8, + num_attention_heads: Optional[Union[int, Tuple[int]]] = None, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + class_embed_type: Optional[str] = None, + num_class_embeds: Optional[int] = None, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + use_inflated_groupnorm=False, + # Additional + use_motion_module=False, + motion_module_resolutions=(1, 2, 4, 8), + motion_module_mid_block=False, + motion_module_decoder_only=False, + motion_module_type=None, + motion_module_kwargs={}, + unet_use_cross_frame_attention=None, + unet_use_temporal_attention=None, + ): + super().__init__() + + self.sample_size = sample_size + time_embed_dim = block_out_channels[0] * 4 + + # input + self.conv_in = InflatedConv3d(in_channels, block_out_channels[0], kernel_size=3, padding=(1, 1)) + + # time + self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift) + timestep_input_dim = block_out_channels[0] + + self.time_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim) + + # class embedding + if class_embed_type is None and num_class_embeds is not None: + self.class_embedding = nn.Embedding(num_class_embeds, time_embed_dim) + elif class_embed_type == "timestep": + self.class_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim) + elif class_embed_type == "identity": + self.class_embedding = nn.Identity(time_embed_dim, time_embed_dim) + else: + self.class_embedding = None + + self.down_blocks = nn.ModuleList([]) + self.mid_block = None + self.up_blocks = nn.ModuleList([]) + + if isinstance(only_cross_attention, bool): + only_cross_attention = [only_cross_attention] * len(down_block_types) + + if isinstance(attention_head_dim, int): + attention_head_dim = (attention_head_dim,) * len(down_block_types) + + # down + output_channel = block_out_channels[0] + for i, down_block_type in enumerate(down_block_types): + res = 2**i + input_channel = output_channel + output_channel = block_out_channels[i] + is_final_block = i == len(block_out_channels) - 1 + + down_block = get_down_block( + down_block_type, + num_layers=layers_per_block, + in_channels=input_channel, + out_channels=output_channel, + temb_channels=time_embed_dim, + add_downsample=not is_final_block, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attention_head_dim[i], + downsample_padding=downsample_padding, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + unet_use_cross_frame_attention=unet_use_cross_frame_attention, + unet_use_temporal_attention=unet_use_temporal_attention, + use_inflated_groupnorm=use_inflated_groupnorm, + use_motion_module=use_motion_module + and (res in motion_module_resolutions) + and (not motion_module_decoder_only), + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) + self.down_blocks.append(down_block) + + # mid + if mid_block_type == "UNetMidBlock3DCrossAttn": + self.mid_block = UNetMidBlock3DCrossAttn( + in_channels=block_out_channels[-1], + temb_channels=time_embed_dim, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + output_scale_factor=mid_block_scale_factor, + resnet_time_scale_shift=resnet_time_scale_shift, + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attention_head_dim[-1], + resnet_groups=norm_num_groups, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + upcast_attention=upcast_attention, + unet_use_cross_frame_attention=unet_use_cross_frame_attention, + unet_use_temporal_attention=unet_use_temporal_attention, + use_inflated_groupnorm=use_inflated_groupnorm, + use_motion_module=use_motion_module and motion_module_mid_block, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) + else: + raise ValueError(f"unknown mid_block_type : {mid_block_type}") + + # count how many layers upsample the videos + self.num_upsamplers = 0 + + # up + reversed_block_out_channels = list(reversed(block_out_channels)) + reversed_attention_head_dim = list(reversed(attention_head_dim)) + only_cross_attention = list(reversed(only_cross_attention)) + output_channel = reversed_block_out_channels[0] + for i, up_block_type in enumerate(up_block_types): + res = 2 ** (3 - i) + is_final_block = i == len(block_out_channels) - 1 + + prev_output_channel = output_channel + output_channel = reversed_block_out_channels[i] + input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)] + + # add upsample block for all BUT final layer + if not is_final_block: + add_upsample = True + self.num_upsamplers += 1 + else: + add_upsample = False + + up_block = get_up_block( + up_block_type, + num_layers=layers_per_block + 1, + in_channels=input_channel, + out_channels=output_channel, + prev_output_channel=prev_output_channel, + temb_channels=time_embed_dim, + add_upsample=add_upsample, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=reversed_attention_head_dim[i], + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + unet_use_cross_frame_attention=unet_use_cross_frame_attention, + unet_use_temporal_attention=unet_use_temporal_attention, + use_inflated_groupnorm=use_inflated_groupnorm, + use_motion_module=use_motion_module and (res in motion_module_resolutions), + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) + self.up_blocks.append(up_block) + prev_output_channel = output_channel + + # out + if use_inflated_groupnorm: + self.conv_norm_out = InflatedGroupNorm(num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps) + else: + self.conv_norm_out = nn.GroupNorm(num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps) + + self.conv_act = nn.SiLU() + self.conv_out = InflatedConv3d(block_out_channels[0], out_channels, kernel_size=3, padding=1) + + def set_attention_slice(self, slice_size): + r""" + Enable sliced attention computation. + + When this option is enabled, the attention module will split the input tensor in slices, to compute attention + in several steps. This is useful to save some memory in exchange for a small speed decrease. + + Args: + slice_size (`str` or `int` or `list(int)`, *optional*, defaults to `"auto"`): + When `"auto"`, halves the input to the attention heads, so attention will be computed in two steps. If + `"max"`, maxium amount of memory will be saved by running only one slice at a time. If a number is + provided, uses as many slices as `attention_head_dim // slice_size`. In this case, `attention_head_dim` + must be a multiple of `slice_size`. + """ + sliceable_head_dims = [] + + def fn_recursive_retrieve_slicable_dims(module: nn.Module): + if hasattr(module, "set_attention_slice"): + sliceable_head_dims.append(module.sliceable_head_dim) + + for child in module.children(): + fn_recursive_retrieve_slicable_dims(child) + + # retrieve number of attention layers + for module in self.children(): + fn_recursive_retrieve_slicable_dims(module) + + num_slicable_layers = len(sliceable_head_dims) + + if slice_size == "auto": + # half the attention head size is usually a good trade-off between + # speed and memory + slice_size = [dim // 2 for dim in sliceable_head_dims] + elif slice_size == "max": + # make smallest slice possible + slice_size = num_slicable_layers * [1] + + slice_size = num_slicable_layers * [slice_size] if not isinstance(slice_size, list) else slice_size + + if len(slice_size) != len(sliceable_head_dims): + raise ValueError( + f"You have provided {len(slice_size)}, but {self.config} has {len(sliceable_head_dims)} different" + f" attention layers. Make sure to match `len(slice_size)` to be {len(sliceable_head_dims)}." + ) + + for i in range(len(slice_size)): + size = slice_size[i] + dim = sliceable_head_dims[i] + if size is not None and size > dim: + raise ValueError(f"size {size} has to be smaller or equal to {dim}.") + + # Recursively walk through all the children. + # Any children which exposes the set_attention_slice method + # gets the message + def fn_recursive_set_attention_slice(module: nn.Module, slice_size: List[int]): + if hasattr(module, "set_attention_slice"): + module.set_attention_slice(slice_size.pop()) + + for child in module.children(): + fn_recursive_set_attention_slice(child, slice_size) + + reversed_slice_size = list(reversed(slice_size)) + for module in self.children(): + fn_recursive_set_attention_slice(module, reversed_slice_size) + + def _set_gradient_checkpointing(self, module, value=False): + if isinstance(module, (CrossAttnDownBlock3D, DownBlock3D, CrossAttnUpBlock3D, UpBlock3D)): + module.gradient_checkpointing = value + + def forward( + self, + sample: torch.FloatTensor, + timestep: Union[Tensor, float, int], + encoder_hidden_states: Tensor, + class_labels: Optional[Tensor] = None, + attention_mask: Optional[Tensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + added_cond_kwargs: Optional[Dict[str, torch.Tensor]] = None, + down_block_additional_residuals: Optional[Tuple[torch.Tensor]] = None, + mid_block_additional_residual: Optional[torch.Tensor] = None, + encoder_attention_mask: Optional[torch.Tensor] = None, + return_dict: bool = True, + ) -> Union[UNet3DConditionOutput, Tuple]: + r""" + Args: + sample (`torch.FloatTensor`): (batch, channel, height, width) noisy inputs tensor + timestep (`torch.FloatTensor` or `float` or `int`): (batch) timesteps + encoder_hidden_states (`torch.FloatTensor`): (batch, sequence_length, feature_dim) encoder hidden states + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain tuple. + + Returns: + [`~models.unet_2d_condition.UNet2DConditionOutput`] or `tuple`: + [`~models.unet_2d_condition.UNet2DConditionOutput`] if `return_dict` is True, otherwise a `tuple`. When + returning a tuple, the first element is the sample tensor. + """ + # By default samples have to be at least a multiple of the overall upsampling factor. + # The overall upsampling factor is equal to 2 ** (# num of upsampling layears). + # However, the upsampling interpolation output size can be forced to fit any upsampling size + # on the fly if necessary. + default_overall_up_factor = 2**self.num_upsamplers + + # upsample size should be forwarded when sample is not a multiple of `default_overall_up_factor` + forward_upsample_size = False + upsample_size = None + + if any(s % default_overall_up_factor != 0 for s in sample.shape[-2:]): + logger.debug("Forward upsample size to force interpolation output size.") + forward_upsample_size = True + + # ensure attention_mask is a bias, and give it a singleton query_tokens dimension + # expects mask of shape: + # [batch, key_tokens] + # adds singleton query_tokens dimension: + # [batch, 1, key_tokens] + # this helps to broadcast it as a bias over attention scores, which will be in one of the following shapes: + # [batch, heads, query_tokens, key_tokens] (e.g. torch sdp attn) + # [batch * heads, query_tokens, key_tokens] (e.g. xformers or classic attn) + if attention_mask is not None: + # assume that mask is expressed as: + # (1 = keep, 0 = discard) + # convert mask into a bias that can be added to attention scores: + # (keep = +0, discard = -10000.0) + attention_mask = (1 - attention_mask.to(sample.dtype)) * -10000.0 + attention_mask = attention_mask.unsqueeze(1) + + # convert encoder_attention_mask to a bias the same way we do for attention_mask + if encoder_attention_mask is not None: + encoder_attention_mask = (1 - encoder_attention_mask.to(sample.dtype)) * -10000.0 + encoder_attention_mask = encoder_attention_mask.unsqueeze(1) + + # 0. center input if necessary + if self.config.center_input_sample: + sample = 2 * sample - 1.0 + + # 1. time + timesteps = timestep + if not torch.is_tensor(timesteps): + # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can + # This would be a good case for the `match` statement (Python 3.10+) + is_mps = sample.device.type == "mps" + if isinstance(timestep, float): + dtype = torch.float32 if is_mps else torch.float64 + else: + dtype = torch.int32 if is_mps else torch.int64 + timesteps = torch.tensor([timesteps], dtype=dtype, device=sample.device) + elif len(timesteps.shape) == 0: + timesteps = timesteps[None].to(sample.device) + + # broadcast to batch dimension in a way that's compatible with ONNX/Core ML + timesteps = timesteps.expand(sample.shape[0]) + + t_emb = self.time_proj(timesteps) + + # `Timesteps` does not contain any weights and will always return f32 tensors + # but time_embedding might actually be running in fp16. so we need to cast here. + # there might be better ways to encapsulate this. + t_emb = t_emb.to(dtype=sample.dtype) + + emb = self.time_embedding(t_emb) + + if self.class_embedding is not None: + if class_labels is None: + raise ValueError("class_labels should be provided when num_class_embeds > 0") + + if self.config.class_embed_type == "timestep": + class_labels = self.time_proj(class_labels) + + # `Timesteps` does not contain any weights and will always return f32 tensors + # there might be better ways to encapsulate this. + class_labels = class_labels.to(dtype=sample.dtype) + + class_emb = self.class_embedding(class_labels).to(dtype=sample.dtype) + + if self.config.class_embeddings_concat: + emb = torch.cat([emb, class_emb], dim=-1) + else: + emb = emb + class_emb + + # 2. pre-process + sample = self.conv_in(sample) + + # 3. down + down_block_res_samples = (sample,) + for downsample_block in self.down_blocks: + if hasattr(downsample_block, "has_cross_attention") and downsample_block.has_cross_attention: + sample, res_samples = downsample_block( + hidden_states=sample, + temb=emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + ) + else: + sample, res_samples = downsample_block( + hidden_states=sample, temb=emb, encoder_hidden_states=encoder_hidden_states + ) + + down_block_res_samples = down_block_res_samples + res_samples + + if down_block_additional_residuals is not None: + new_down_block_res_samples = () + + for down_block_res_sample, down_block_additional_residual in zip( + down_block_res_samples, down_block_additional_residuals + ): + down_block_res_sample = down_block_res_sample + down_block_additional_residual + new_down_block_res_samples = new_down_block_res_samples + (down_block_res_sample,) + + down_block_res_samples = new_down_block_res_samples + + # 4. mid + if self.mid_block is not None: + sample = self.mid_block( + sample, + emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + ) + + + if mid_block_additional_residual is not None: + sample = sample + mid_block_additional_residual + + # up + for i, upsample_block in enumerate(self.up_blocks): + is_final_block = i == len(self.up_blocks) - 1 + + res_samples = down_block_res_samples[-len(upsample_block.resnets) :] + down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] + + # if we have not reached the final block and need to forward the + # upsample size, we do it here + if not is_final_block and forward_upsample_size: + upsample_size = down_block_res_samples[-1].shape[2:] + + if hasattr(upsample_block, "has_cross_attention") and upsample_block.has_cross_attention: + sample = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + encoder_hidden_states=encoder_hidden_states, + upsample_size=upsample_size, + attention_mask=attention_mask, + ) + else: + sample = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + upsample_size=upsample_size, + encoder_hidden_states=encoder_hidden_states, + ) + + # post-process + sample = self.conv_norm_out(sample) + sample = self.conv_act(sample) + sample = self.conv_out(sample) + + if not return_dict: + return (sample,) + + return UNet3DConditionOutput(sample=sample) + + @classmethod + def from_pretrained_2d( + cls: "UNet3DConditionModel", + pretrained_model_path: PathLike, + motion_module_path: PathLike, + subfolder: Optional[str] = None, + unet_additional_kwargs: Optional[dict] = None, + ): + pretrained_model_path = Path(pretrained_model_path) + motion_module_path = Path(motion_module_path) + if subfolder is not None: + pretrained_model_path = pretrained_model_path.joinpath(subfolder) + + logger.debug(f"Loading temporal unet weights into {pretrained_model_path}") + + config_file = pretrained_model_path / "config.json" + if not (config_file.exists() and config_file.is_file()): + raise RuntimeError(f"{config_file} does not exist or is not a file") + + unet_config = cls.load_config(config_file) + unet_config["_class_name"] = cls.__name__ + unet_config["down_block_types"] = [ + "CrossAttnDownBlock3D", + "CrossAttnDownBlock3D", + "CrossAttnDownBlock3D", + "DownBlock3D", + ] + unet_config["up_block_types"] = [ + "UpBlock3D", + "CrossAttnUpBlock3D", + "CrossAttnUpBlock3D", + "CrossAttnUpBlock3D", + ] + unet_config["mid_block_type"] = "UNetMidBlock3DCrossAttn" + + model: nn.Module = cls.from_config(unet_config, **unet_additional_kwargs) + + # load the vanilla weights + if pretrained_model_path.joinpath(SAFETENSORS_WEIGHTS_NAME).exists(): + logger.debug(f"loading safeTensors weights from {pretrained_model_path} ...") + state_dict = load_file(pretrained_model_path.joinpath(SAFETENSORS_WEIGHTS_NAME), device="cpu") + + elif pretrained_model_path.joinpath(WEIGHTS_NAME).exists(): + logger.debug(f"loading weights from {pretrained_model_path} ...") + state_dict = torch.load( + pretrained_model_path.joinpath(WEIGHTS_NAME), map_location="cpu", weights_only=True + ) + else: + raise FileNotFoundError(f"no weights file found in {pretrained_model_path}") + + # load the motion module weights + if motion_module_path.exists() and motion_module_path.is_file(): + if motion_module_path.suffix.lower() in [".pth", ".pt", ".ckpt"]: + motion_state_dict = torch.load(motion_module_path, map_location="cpu", weights_only=True) + elif motion_module_path.suffix.lower() == ".safetensors": + motion_state_dict = load_file(motion_module_path, device="cpu") + else: + raise RuntimeError( + f"unknown file format for motion module weights: {motion_module_path.suffix}" + ) + else: + raise FileNotFoundError(f"no motion module weights found in {motion_module_path}") + + # merge the state dicts + state_dict.update(motion_state_dict) + + # load the weights into the model + m, u = model.load_state_dict(state_dict, strict=False) + logger.debug(f"### missing keys: {len(m)}; \n### unexpected keys: {len(u)};") + + params = [p.numel() if "temporal" in n else 0 for n, p in model.named_parameters()] + logger.info(f"Loaded {sum(params) / 1e6}M-parameter motion module") + + return model + + + @property + def attn_processors(self) -> Dict[str, AttentionProcessor]: + r""" + Returns: + `dict` of attention processors: A dictionary containing all attention processors used in the model with + indexed by its weight name. + """ + # set recursively + processors = {} + + def fn_recursive_add_processors( + name: str, module: torch.nn.Module, processors: Dict[str, AttentionProcessor] + ): + if hasattr(module, "set_processor"): + processors[f"{name}.processor"] = module.processor + + for sub_name, child in module.named_children(): + if "temporal_transformer" not in sub_name: + fn_recursive_add_processors(f"{name}.{sub_name}", child, processors) + + return processors + + for name, module in self.named_children(): + if "temporal_transformer" not in name: + fn_recursive_add_processors(name, module, processors) + + return processors + + def set_attn_processor(self, processor: Union[AttentionProcessor, Dict[str, AttentionProcessor]]): + r""" + Sets the attention processor to use to compute attention. + Parameters: + processor (`dict` of `AttentionProcessor` or only `AttentionProcessor`): + The instantiated processor class or a dictionary of processor classes that will be set as the processor + for **all** `Attention` layers. + If `processor` is a dict, the key needs to define the path to the corresponding cross attention + processor. This is strongly recommended when setting trainable attention processors. + """ + count = len(self.attn_processors.keys()) + + if isinstance(processor, dict) and len(processor) != count: + raise ValueError( + f"A dict of processors was passed, but the number of processors {len(processor)} does not match the" + f" number of attention layers: {count}. Please make sure to pass {count} processor classes." + ) + + def fn_recursive_attn_processor(name: str, module: torch.nn.Module, processor): + if hasattr(module, "set_processor"): + if not isinstance(processor, dict): + module.set_processor(processor) + else: + module.set_processor(processor.pop(f"{name}.processor")) + + for sub_name, child in module.named_children(): + if "temporal_transformer" not in sub_name: + fn_recursive_attn_processor(f"{name}.{sub_name}", child, processor) + + for name, module in self.named_children(): + if "temporal_transformer" not in name: + fn_recursive_attn_processor(name, module, processor) + diff --git a/src/animatediff/models/unet_blocks.py b/src/animatediff/models/unet_blocks.py new file mode 100644 index 0000000000000000000000000000000000000000..6f2ee01eacff950b6701bdb94d961fe36ebe2ca7 --- /dev/null +++ b/src/animatediff/models/unet_blocks.py @@ -0,0 +1,843 @@ +# Adapted from https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/unet_2d_blocks.py + +from typing import Any, Dict, Optional, Tuple, Union + +import torch +from torch import nn + +from animatediff.models.attention import Transformer3DModel +from animatediff.models.motion_module import get_motion_module +from animatediff.models.resnet import Downsample3D, ResnetBlock3D, Upsample3D + + +def get_down_block( + down_block_type, + num_layers, + in_channels, + out_channels, + temb_channels, + add_downsample, + resnet_eps, + resnet_act_fn, + attn_num_head_channels, + resnet_groups=None, + cross_attention_dim=None, + downsample_padding=None, + dual_cross_attention=False, + use_linear_projection=False, + only_cross_attention=False, + upcast_attention=False, + resnet_time_scale_shift="default", + unet_use_cross_frame_attention=None, + unet_use_temporal_attention=None, + use_inflated_groupnorm=None, + use_motion_module=None, + motion_module_type=None, + motion_module_kwargs=None, +): + down_block_type = down_block_type[7:] if down_block_type.startswith("UNetRes") else down_block_type + if down_block_type == "DownBlock3D": + return DownBlock3D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + resnet_time_scale_shift=resnet_time_scale_shift, + use_inflated_groupnorm=use_inflated_groupnorm, + use_motion_module=use_motion_module, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) + elif down_block_type == "CrossAttnDownBlock3D": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for CrossAttnDownBlock3D") + return CrossAttnDownBlock3D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attn_num_head_channels, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + unet_use_cross_frame_attention=unet_use_cross_frame_attention, + unet_use_temporal_attention=unet_use_temporal_attention, + use_inflated_groupnorm=use_inflated_groupnorm, + use_motion_module=use_motion_module, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) + raise ValueError(f"{down_block_type} does not exist.") + + +def get_up_block( + up_block_type, + num_layers, + in_channels, + out_channels, + prev_output_channel, + temb_channels, + add_upsample, + resnet_eps, + resnet_act_fn, + attn_num_head_channels, + resnet_groups=None, + cross_attention_dim=None, + dual_cross_attention=False, + use_linear_projection=False, + only_cross_attention=False, + upcast_attention=False, + resnet_time_scale_shift="default", + unet_use_cross_frame_attention=None, + unet_use_temporal_attention=None, + use_inflated_groupnorm=None, + use_motion_module=None, + motion_module_type=None, + motion_module_kwargs=None, +): + up_block_type = up_block_type[7:] if up_block_type.startswith("UNetRes") else up_block_type + if up_block_type == "UpBlock3D": + return UpBlock3D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + use_inflated_groupnorm=use_inflated_groupnorm, + use_motion_module=use_motion_module, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) + elif up_block_type == "CrossAttnUpBlock3D": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for CrossAttnUpBlock3D") + return CrossAttnUpBlock3D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attn_num_head_channels, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + unet_use_cross_frame_attention=unet_use_cross_frame_attention, + unet_use_temporal_attention=unet_use_temporal_attention, + use_inflated_groupnorm=use_inflated_groupnorm, + use_motion_module=use_motion_module, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) + raise ValueError(f"{up_block_type} does not exist.") + + +class UNetMidBlock3DCrossAttn(nn.Module): + def __init__( + self, + in_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attn_num_head_channels=1, + output_scale_factor=1.0, + cross_attention_dim=1280, + dual_cross_attention=False, + use_linear_projection=False, + upcast_attention=False, + unet_use_cross_frame_attention=None, + unet_use_temporal_attention=None, + use_inflated_groupnorm=None, + use_motion_module=None, + motion_module_type=None, + motion_module_kwargs=None, + ): + super().__init__() + + self.has_cross_attention = True + self.attn_num_head_channels = attn_num_head_channels + resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32) + + # there is always at least one resnet + resnets = [ + ResnetBlock3D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + use_inflated_groupnorm=use_inflated_groupnorm, + ) + ] + attentions = [] + motion_modules = [] + + for _ in range(num_layers): + if dual_cross_attention: + raise NotImplementedError + attentions.append( + Transformer3DModel( + attn_num_head_channels, + in_channels // attn_num_head_channels, + in_channels=in_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + use_linear_projection=use_linear_projection, + upcast_attention=upcast_attention, + unet_use_cross_frame_attention=unet_use_cross_frame_attention, + unet_use_temporal_attention=unet_use_temporal_attention, + ) + ) + motion_modules.append( + get_motion_module( + in_channels=in_channels, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) + if use_motion_module + else None + ) + resnets.append( + ResnetBlock3D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + use_inflated_groupnorm=use_inflated_groupnorm, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + self.motion_modules = nn.ModuleList(motion_modules) + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + hidden_states = self.resnets[0](hidden_states, temb) + for attn, resnet, motion_module in zip(self.attentions, self.resnets[1:], self.motion_modules): + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + if motion_module is not None: + hidden_states = motion_module( + hidden_states, + temb, + encoder_hidden_states=encoder_hidden_states, + ) + hidden_states = resnet(hidden_states, temb) + + return hidden_states + + +class CrossAttnDownBlock3D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + transformer_layers_per_block: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attn_num_head_channels=1, + cross_attention_dim=1280, + output_scale_factor=1.0, + downsample_padding=1, + add_downsample=True, + dual_cross_attention=False, + use_linear_projection=False, + only_cross_attention=False, + upcast_attention=False, + unet_use_cross_frame_attention=None, + unet_use_temporal_attention=None, + use_inflated_groupnorm=None, + use_motion_module=None, + motion_module_type=None, + motion_module_kwargs=None, + ): + super().__init__() + resnets = [] + attentions = [] + motion_modules = [] + + self.has_cross_attention = True + self.attn_num_head_channels = attn_num_head_channels + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock3D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + use_inflated_groupnorm=use_inflated_groupnorm, + ) + ) + if dual_cross_attention: + raise NotImplementedError + attentions.append( + Transformer3DModel( + num_attention_heads=attn_num_head_channels, + attention_head_dim=out_channels // attn_num_head_channels, + in_channels=out_channels, + num_layers=transformer_layers_per_block, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + unet_use_cross_frame_attention=unet_use_cross_frame_attention, + unet_use_temporal_attention=unet_use_temporal_attention, + ) + ) + motion_modules.append( + get_motion_module( + in_channels=out_channels, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) + if use_motion_module + else None + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + self.motion_modules = nn.ModuleList(motion_modules) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + Downsample3D( + out_channels, + use_conv=True, + out_channels=out_channels, + padding=downsample_padding, + name="op", + ) + ] + ) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + output_states = () + + for resnet, attn, motion_module in zip(self.resnets, self.attentions, self.motion_modules): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(attn, return_dict=False), + hidden_states, + encoder_hidden_states, + )[0] + if motion_module is not None: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(motion_module), + hidden_states.requires_grad_(), + temb, + encoder_hidden_states, + ) + + else: + hidden_states = resnet(hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + # add motion module + hidden_states = ( + motion_module(hidden_states, temb, encoder_hidden_states=encoder_hidden_states) + if motion_module is not None + else hidden_states + ) + + output_states = output_states + (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + output_states = output_states + (hidden_states,) + + return hidden_states, output_states + + +class DownBlock3D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor=1.0, + add_downsample=True, + downsample_padding=1, + use_inflated_groupnorm=None, + use_motion_module=None, + motion_module_type=None, + motion_module_kwargs=None, + ): + super().__init__() + resnets = [] + motion_modules = [] + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock3D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + use_inflated_groupnorm=use_inflated_groupnorm, + ) + ) + motion_modules.append( + get_motion_module( + in_channels=out_channels, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) + if use_motion_module + else None + ) + + self.resnets = nn.ModuleList(resnets) + self.motion_modules = nn.ModuleList(motion_modules) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + Downsample3D( + out_channels, + use_conv=True, + out_channels=out_channels, + padding=downsample_padding, + name="op", + ) + ] + ) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward(self, hidden_states, temb=None, encoder_hidden_states=None): + output_states = () + + for resnet, motion_module in zip(self.resnets, self.motion_modules): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + if motion_module is not None: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(motion_module), + hidden_states.requires_grad_(), + temb, + encoder_hidden_states, + ) + else: + hidden_states = resnet(hidden_states, temb) + + # add motion module + if motion_module: + hidden_states = motion_module( + hidden_states, temb, encoder_hidden_states=encoder_hidden_states + ) + + output_states = output_states + (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + output_states = output_states + (hidden_states,) + + return hidden_states, output_states + + +class CrossAttnUpBlock3D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + prev_output_channel: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + transformer_layers_per_block: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attn_num_head_channels=1, + cross_attention_dim=1280, + output_scale_factor=1.0, + add_upsample=True, + dual_cross_attention=False, + use_linear_projection=False, + only_cross_attention=False, + upcast_attention=False, + unet_use_cross_frame_attention=None, + unet_use_temporal_attention=None, + use_inflated_groupnorm=None, + use_motion_module=None, + motion_module_type=None, + motion_module_kwargs=None, + ): + super().__init__() + resnets = [] + attentions = [] + motion_modules = [] + + self.has_cross_attention = True + self.attn_num_head_channels = attn_num_head_channels + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock3D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + use_inflated_groupnorm=use_inflated_groupnorm, + ) + ) + if dual_cross_attention: + raise NotImplementedError + attentions.append( + Transformer3DModel( + attn_num_head_channels, + out_channels // attn_num_head_channels, + in_channels=out_channels, + num_layers=transformer_layers_per_block, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + unet_use_cross_frame_attention=unet_use_cross_frame_attention, + unet_use_temporal_attention=unet_use_temporal_attention, + ) + ) + motion_modules.append( + get_motion_module( + in_channels=out_channels, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) + if use_motion_module + else None + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + self.motion_modules = nn.ModuleList(motion_modules) + + if add_upsample: + self.upsamplers = nn.ModuleList( + [Upsample3D(out_channels, use_conv=True, out_channels=out_channels)] + ) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + upsample_size: Optional[int] = None, + attention_mask: Optional[torch.FloatTensor] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ): + for resnet, attn, motion_module in zip(self.resnets, self.attentions, self.motion_modules): + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(attn, return_dict=False), + hidden_states, + encoder_hidden_states, + )[0] + if motion_module is not None: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(motion_module), + hidden_states.requires_grad_(), + temb, + encoder_hidden_states, + ) + + else: + hidden_states = resnet(hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + + # add motion module + if motion_module: + hidden_states = motion_module( + hidden_states, temb, encoder_hidden_states=encoder_hidden_states + ) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, upsample_size) + + return hidden_states + + +class UpBlock3D(nn.Module): + def __init__( + self, + in_channels: int, + prev_output_channel: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor=1.0, + add_upsample=True, + use_inflated_groupnorm=None, + use_motion_module=None, + motion_module_type=None, + motion_module_kwargs=None, + ): + super().__init__() + resnets = [] + motion_modules = [] + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock3D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + use_inflated_groupnorm=use_inflated_groupnorm, + ) + ) + motion_modules.append( + get_motion_module( + in_channels=out_channels, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) + if use_motion_module + else None + ) + + self.resnets = nn.ModuleList(resnets) + self.motion_modules = nn.ModuleList(motion_modules) + + if add_upsample: + self.upsamplers = nn.ModuleList( + [Upsample3D(out_channels, use_conv=True, out_channels=out_channels)] + ) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states, + res_hidden_states_tuple, + temb=None, + upsample_size=None, + encoder_hidden_states=None, + ): + for resnet, motion_module in zip(self.resnets, self.motion_modules): + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + if motion_module is not None: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(motion_module), + hidden_states.requires_grad_(), + temb, + encoder_hidden_states, + ) + else: + hidden_states = resnet(hidden_states, temb) + if motion_module: + hidden_states = motion_module( + hidden_states, temb, encoder_hidden_states=encoder_hidden_states + ) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, upsample_size) + + return hidden_states diff --git a/src/animatediff/pipelines/__init__.py b/src/animatediff/pipelines/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5cb74e5d3aeff6d09d36b56538e83a4935196eee --- /dev/null +++ b/src/animatediff/pipelines/__init__.py @@ -0,0 +1,14 @@ +from .animation import AnimationPipeline, AnimationPipelineOutput +from .context import get_context_scheduler, get_total_steps, ordered_halving, uniform +from .ti import get_text_embeddings, load_text_embeddings + +__all__ = [ + "AnimationPipeline", + "AnimationPipelineOutput", + "get_context_scheduler", + "get_total_steps", + "ordered_halving", + "uniform", + "get_text_embeddings", + "load_text_embeddings", +] diff --git a/src/animatediff/pipelines/animation.py b/src/animatediff/pipelines/animation.py new file mode 100644 index 0000000000000000000000000000000000000000..a513fd994c423e9c2c5333f230440d76bc81ca3c --- /dev/null +++ b/src/animatediff/pipelines/animation.py @@ -0,0 +1,3488 @@ +# Adapted from https://github.com/showlab/Tune-A-Video/blob/main/tuneavideo/pipelines/pipeline_tuneavideo.py + +import inspect +import itertools +import logging +from dataclasses import dataclass +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +import numpy as np +import torch +from diffusers import LCMScheduler +from diffusers.configuration_utils import FrozenDict +from diffusers.image_processor import VaeImageProcessor +from diffusers.loaders import LoraLoaderMixin, TextualInversionLoaderMixin +from diffusers.models import AutoencoderKL, ControlNetModel +from diffusers.pipelines.pipeline_utils import DiffusionPipeline +from diffusers.schedulers import (DDIMScheduler, DPMSolverMultistepScheduler, + EulerAncestralDiscreteScheduler, + EulerDiscreteScheduler, LMSDiscreteScheduler, + PNDMScheduler) +from diffusers.utils import (BaseOutput, deprecate, is_accelerate_available, + is_accelerate_version) +from diffusers.utils.torch_utils import is_compiled_module, randn_tensor +from einops import rearrange +from packaging import version +from tqdm.rich import tqdm +from transformers import CLIPImageProcessor, CLIPTokenizer + +from animatediff.ip_adapter import IPAdapter, IPAdapterFull, IPAdapterPlus +from animatediff.models.attention import BasicTransformerBlock +from animatediff.models.clip import CLIPSkipTextModel +from animatediff.models.unet import (UNet3DConditionModel, + UNetMidBlock3DCrossAttn) +from animatediff.models.unet_blocks import (CrossAttnDownBlock3D, + CrossAttnUpBlock3D, DownBlock3D, + UpBlock3D) +from animatediff.pipelines.context import (get_context_scheduler, + get_total_steps) +from animatediff.utils.model import nop_train +from animatediff.utils.pipeline import get_memory_format +from animatediff.utils.util import (end_profile, + get_tensor_interpolation_method, show_gpu, + start_profile, stopwatch_record, + stopwatch_start, stopwatch_stop) + +logger = logging.getLogger(__name__) + + + +C_REF_MODE = "write" + + +def torch_dfs(model: torch.nn.Module): + result = [model] + for child in model.children(): + result += torch_dfs(child) + return result + + +class PromptEncoder: + def __init__( + self, + pipe, + device, + latents_device, + num_videos_per_prompt, + do_classifier_free_guidance, + region_condi_list, + negative_prompt, + is_signle_prompt_mode, + clip_skip, + multi_uncond_mode + ): + self.pipe = pipe + self.is_single_prompt_mode=is_signle_prompt_mode + self.do_classifier_free_guidance = do_classifier_free_guidance + + uncond_num = 0 + if do_classifier_free_guidance: + if multi_uncond_mode: + uncond_num = len(region_condi_list) + else: + uncond_num = 1 + + ### text + + prompt_nums = [] + prompt_map_list = [] + prompt_list = [] + + for condi in region_condi_list: + _prompt_map = condi["prompt_map"] + prompt_map_list.append(_prompt_map) + _prompt_map = dict(sorted(_prompt_map.items())) + _prompt_list = [_prompt_map[key_frame] for key_frame in _prompt_map.keys()] + prompt_nums.append( len(_prompt_list) ) + prompt_list += _prompt_list + + prompt_embeds = pipe._encode_prompt( + prompt_list, + device, + num_videos_per_prompt, + do_classifier_free_guidance, + negative_prompt, + prompt_embeds=None, + negative_prompt_embeds=None, + clip_skip=clip_skip, + ).to(device = latents_device) + + self.prompt_embeds_dtype = prompt_embeds.dtype + + + if do_classifier_free_guidance: + negative, positive = prompt_embeds.chunk(2, 0) + negative = negative.chunk(negative.shape[0], 0) + positive = positive.chunk(positive.shape[0], 0) + else: + positive = prompt_embeds + positive = positive.chunk(positive.shape[0], 0) + + if pipe.ip_adapter: + pipe.ip_adapter.set_text_length(positive[0].shape[1]) + + + prompt_embeds_region_list = [] + + if do_classifier_free_guidance: + prompt_embeds_region_list = [ + { + 0:negative[0] + } + ] * uncond_num + prompt_embeds_region_list + + pos_index = 0 + for prompt_map, num in zip(prompt_map_list, prompt_nums): + prompt_embeds_map={} + pos = positive[pos_index:pos_index+num] + + for i, key_frame in enumerate(prompt_map): + prompt_embeds_map[key_frame] = pos[i] + + prompt_embeds_region_list.append( prompt_embeds_map ) + pos_index += num + + if do_classifier_free_guidance: + prompt_map_list = [ + { + 0:negative_prompt + } + ] * uncond_num + prompt_map_list + + self.prompt_map_list = prompt_map_list + self.prompt_embeds_region_list = prompt_embeds_region_list + + ### image + if pipe.ip_adapter: + + ip_im_nums = [] + ip_im_map_list = [] + ip_im_list = [] + + for condi in region_condi_list: + _ip_im_map = condi["ip_adapter_map"]["images"] + ip_im_map_list.append(_ip_im_map) + _ip_im_map = dict(sorted(_ip_im_map.items())) + _ip_im_list = [_ip_im_map[key_frame] for key_frame in _ip_im_map.keys()] + ip_im_nums.append( len(_ip_im_list) ) + ip_im_list += _ip_im_list + + positive, negative = pipe.ip_adapter.get_image_embeds(ip_im_list) + + positive = positive.to(device=latents_device) + negative = negative.to(device=latents_device) + + bs_embed, seq_len, _ = positive.shape + positive = positive.repeat(1, 1, 1) + positive = positive.view(bs_embed * 1, seq_len, -1) + + bs_embed, seq_len, _ = negative.shape + negative = negative.repeat(1, 1, 1) + negative = negative.view(bs_embed * 1, seq_len, -1) + + if do_classifier_free_guidance: + negative = negative.chunk(negative.shape[0], 0) + positive = positive.chunk(positive.shape[0], 0) + else: + positive = positive.chunk(positive.shape[0], 0) + + im_prompt_embeds_region_list = [] + + if do_classifier_free_guidance: + im_prompt_embeds_region_list = [ + { + 0:negative[0] + } + ] * uncond_num + im_prompt_embeds_region_list + + pos_index = 0 + for ip_im_map, num in zip(ip_im_map_list, ip_im_nums): + im_prompt_embeds_map={} + pos = positive[pos_index:pos_index+num] + + for i, key_frame in enumerate(ip_im_map): + im_prompt_embeds_map[key_frame] = pos[i] + + im_prompt_embeds_region_list.append( im_prompt_embeds_map ) + pos_index += num + + + if do_classifier_free_guidance: + ip_im_map_list = [ + { + 0:None + } + ] * uncond_num + ip_im_map_list + + + self.ip_im_map_list = ip_im_map_list + self.im_prompt_embeds_region_list = im_prompt_embeds_region_list + + + def _get_current_prompt_embeds_from_text( + self, + prompt_map, + prompt_embeds_map, + center_frame = None, + video_length : int = 0 + ): + + key_prev = list(prompt_map.keys())[-1] + key_next = list(prompt_map.keys())[0] + + for p in prompt_map.keys(): + if p > center_frame: + key_next = p + break + key_prev = p + + dist_prev = center_frame - key_prev + if dist_prev < 0: + dist_prev += video_length + dist_next = key_next - center_frame + if dist_next < 0: + dist_next += video_length + + if key_prev == key_next or dist_prev + dist_next == 0: + return prompt_embeds_map[key_prev] + + rate = dist_prev / (dist_prev + dist_next) + + return get_tensor_interpolation_method()( prompt_embeds_map[key_prev], prompt_embeds_map[key_next], rate ) + + def get_current_prompt_embeds_from_text( + self, + center_frame = None, + video_length : int = 0 + ): + outputs = () + for prompt_map, prompt_embeds_map in zip(self.prompt_map_list, self.prompt_embeds_region_list): + embs = self._get_current_prompt_embeds_from_text( + prompt_map, + prompt_embeds_map, + center_frame, + video_length) + outputs += (embs,) + + return outputs + + def _get_current_prompt_embeds_from_image( + self, + ip_im_map, + im_prompt_embeds_map, + center_frame = None, + video_length : int = 0 + ): + + key_prev = list(ip_im_map.keys())[-1] + key_next = list(ip_im_map.keys())[0] + + for p in ip_im_map.keys(): + if p > center_frame: + key_next = p + break + key_prev = p + + dist_prev = center_frame - key_prev + if dist_prev < 0: + dist_prev += video_length + dist_next = key_next - center_frame + if dist_next < 0: + dist_next += video_length + + if key_prev == key_next or dist_prev + dist_next == 0: + return im_prompt_embeds_map[key_prev] + + rate = dist_prev / (dist_prev + dist_next) + + return get_tensor_interpolation_method()( im_prompt_embeds_map[key_prev], im_prompt_embeds_map[key_next], rate) + + def get_current_prompt_embeds_from_image( + self, + center_frame = None, + video_length : int = 0 + ): + outputs=() + for prompt_map, prompt_embeds_map in zip(self.ip_im_map_list, self.im_prompt_embeds_region_list): + embs = self._get_current_prompt_embeds_from_image( + prompt_map, + prompt_embeds_map, + center_frame, + video_length) + outputs += (embs,) + + return outputs + + def get_current_prompt_embeds_single( + self, + context: List[int] = None, + video_length : int = 0 + ): + center_frame = context[len(context)//2] + text_emb = self.get_current_prompt_embeds_from_text(center_frame, video_length) + text_emb = torch.cat(text_emb) + if self.pipe.ip_adapter: + image_emb = self.get_current_prompt_embeds_from_image(center_frame, video_length) + image_emb = torch.cat(image_emb) + return torch.cat([text_emb,image_emb], dim=1) + else: + return text_emb + + def get_current_prompt_embeds_multi( + self, + context: List[int] = None, + video_length : int = 0 + ): + + emb_list = [] + for c in context: + t = self.get_current_prompt_embeds_from_text(c, video_length) + for i, emb in enumerate(t): + if i >= len(emb_list): + emb_list.append([]) + emb_list[i].append(emb) + + text_emb = [] + for emb in emb_list: + emb = torch.cat(emb) + text_emb.append(emb) + text_emb = torch.cat(text_emb) + + if self.pipe.ip_adapter == None: + return text_emb + + emb_list = [] + for c in context: + t = self.get_current_prompt_embeds_from_image(c, video_length) + for i, emb in enumerate(t): + if i >= len(emb_list): + emb_list.append([]) + emb_list[i].append(emb) + + image_emb = [] + for emb in emb_list: + emb = torch.cat(emb) + image_emb.append(emb) + image_emb = torch.cat(image_emb) + + return torch.cat([text_emb,image_emb], dim=1) + + def get_current_prompt_embeds( + self, + context: List[int] = None, + video_length : int = 0 + ): + return self.get_current_prompt_embeds_single(context,video_length) if self.is_single_prompt_mode else self.get_current_prompt_embeds_multi(context,video_length) + + def get_prompt_embeds_dtype(self): + return self.prompt_embeds_dtype + + def get_condi_size(self): + return len(self.prompt_embeds_region_list) + + +class RegionMask: + def __init__( + self, + region_list, + batch_size, + num_channels_latents, + video_length, + height, + width, + vae_scale_factor, + dtype, + device, + multi_uncond_mode + ): + shape = ( + batch_size, + num_channels_latents, + video_length, + height // vae_scale_factor, + width // vae_scale_factor, + ) + + def get_area(m:torch.Tensor): + area = torch.where(m == 1) + if len(area[0]) == 0 or len(area[1]) == 0: + return (0,0,0,0) + + ymin = min(area[0]) + ymax = max(area[0]) + xmin = min(area[1]) + xmax = max(area[1]) + h = ymax+1 - ymin + w = xmax+1 - xmin + + mod_h = (h + 7) // 8 * 8 + diff_h = mod_h - h + ymin -= diff_h + if ymin < 0: + ymin = 0 + h = mod_h + + mod_w = (w + 7) // 8 * 8 + diff_w = mod_w - w + xmin -= diff_w + if xmin < 0: + xmin = 0 + w = mod_w + + return (int(xmin), int(ymin), int(w), int(h)) + + + for r in region_list: + mask_latents = torch.zeros(shape) + cur = r["mask_images"] + area_info = None + if cur: + area_info = [ (0,0,0,0) for l in range(video_length)] + + for frame_no in cur: + mask = cur[frame_no] + mask = np.array(mask.convert("L"))[None, None, :] + mask = mask.astype(np.float32) / 255.0 + mask[mask < 0.5] = 0 + mask[mask >= 0.5] = 1 + mask = torch.from_numpy(mask) + mask = torch.nn.functional.interpolate( + mask, size=(height // vae_scale_factor, width // vae_scale_factor) + ) + area_info[frame_no] = get_area(mask[0][0]) + + mask_latents[:,:,frame_no,:,:] = mask + else: + mask_latents = torch.ones(shape) + + w = mask_latents.shape[4] + h = mask_latents.shape[3] + + r["mask_latents"] = mask_latents.to(device=device, dtype=dtype, non_blocking=True) + r["mask_images"] = None + r["area"] = area_info + r["latent_size"] = (w, h) + + self.region_list = region_list + + self.multi_uncond_mode = multi_uncond_mode + + self.cond2region = {} + for i,r in enumerate(self.region_list): + if r["src"] != -1: + self.cond2region[r["src"]] = i + + + def get_mask( + self, + region_index, + ): + return self.region_list[region_index]["mask_latents"] + + def get_region_from_layer( + self, + cond_layer, + cond_nums, + ): + if self.multi_uncond_mode: + cond_layer = cond_layer if cond_layer < cond_nums//2 else cond_layer - cond_nums//2 + else: + if cond_layer == 0: + return -1 #uncond for all layer + + cond_layer -= 1 + + if cond_layer not in self.cond2region: + logger.warn(f"unknown {cond_layer=}") + return -1 + + return self.cond2region[cond_layer] + + + def get_area( + self, + cond_layer, + cond_nums, + context, + ): + + if self.multi_uncond_mode: + cond_layer = cond_layer if cond_layer < cond_nums//2 else cond_layer - cond_nums//2 + else: + if cond_layer == 0: + return None,None + + cond_layer -= 1 + + + if cond_layer not in self.cond2region: + return None,None + + region_index = self.cond2region[cond_layer] + + if region_index == -1: + return None,None + + _,_,w,h = self.region_list[region_index]["area"][context[0]] + + l_w, l_h = self.region_list[region_index]["latent_size"] + + xy_list = [] + for c in context: + x,y,_,_ = self.region_list[region_index]["area"][c] + + if x + w > l_w: + x -= (x+w - l_w) + if y + h > l_h: + y -= (y+h - l_h) + + xy_list.append( (x,y) ) + + + if self.region_list[region_index]["area"]: + return (w,h), xy_list + else: + return None,None + + def get_crop_generation_rate( + self, + cond_layer, + cond_nums, + ): + + if self.multi_uncond_mode: + cond_layer = cond_layer if cond_layer < cond_nums//2 else cond_layer - cond_nums//2 + else: + if cond_layer == 0: + return 0 + + cond_layer -= 1 + + + if cond_layer not in self.cond2region: + return 0 + + region_index = self.cond2region[cond_layer] + + if region_index == -1: + return 0 + + return self.region_list[region_index]["crop_generation_rate"] + + +@dataclass +class AnimationPipelineOutput(BaseOutput): + videos: Union[torch.Tensor, np.ndarray] + + +class AnimationPipeline(DiffusionPipeline, TextualInversionLoaderMixin): + _optional_components = ["feature_extractor"] + + vae: AutoencoderKL + text_encoder: CLIPSkipTextModel + tokenizer: CLIPTokenizer + unet: UNet3DConditionModel + feature_extractor: CLIPImageProcessor + scheduler: Union[ + DDIMScheduler, + DPMSolverMultistepScheduler, + EulerAncestralDiscreteScheduler, + EulerDiscreteScheduler, + LMSDiscreteScheduler, + PNDMScheduler, + ] + controlnet_map: Dict[ str , ControlNetModel ] + ip_adapter: IPAdapter = None + + model_cpu_offload_seq = "text_encoder->unet->vae" + + def __init__( + self, + vae: AutoencoderKL, + text_encoder: CLIPSkipTextModel, + tokenizer: CLIPTokenizer, + unet: UNet3DConditionModel, + scheduler: Union[ + DDIMScheduler, + PNDMScheduler, + LMSDiscreteScheduler, + EulerDiscreteScheduler, + EulerAncestralDiscreteScheduler, + DPMSolverMultistepScheduler, + ], + feature_extractor: CLIPImageProcessor, + controlnet_map: Dict[ str , ControlNetModel ]=None, + ): + super().__init__() + + if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: + deprecation_message = ( + f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" + f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " + "to update the config accordingly as leaving `steps_offset` might led to incorrect results" + " in future versions. If you have downloaded this checkpoint from the Hugging Face Hub," + " it would be very nice if you could open a Pull request for the `scheduler/scheduler_config.json`" + " file" + ) + deprecate("steps_offset!=1", "1.0.0", deprecation_message, standard_warn=False) + new_config = dict(scheduler.config) + new_config["steps_offset"] = 1 + scheduler._internal_dict = FrozenDict(new_config) + + if hasattr(scheduler.config, "clip_sample") and scheduler.config.clip_sample is True: + deprecation_message = ( + f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`." + " `clip_sample` should be set to False in the configuration file. Please make sure to update the" + " config accordingly as not setting `clip_sample` in the config might lead to incorrect results in" + " future versions. If you have downloaded this checkpoint from the Hugging Face Hub, it would be very" + " nice if you could open a Pull request for the `scheduler/scheduler_config.json` file" + ) + deprecate("clip_sample not set", "1.0.0", deprecation_message, standard_warn=False) + new_config = dict(scheduler.config) + new_config["clip_sample"] = False + scheduler._internal_dict = FrozenDict(new_config) + + is_unet_version_less_0_9_0 = hasattr(unet.config, "_diffusers_version") and version.parse( + version.parse(unet.config._diffusers_version).base_version + ) < version.parse("0.9.0.dev0") + is_unet_sample_size_less_64 = hasattr(unet.config, "sample_size") and unet.config.sample_size < 64 + if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64: + deprecation_message = ( + "The configuration file of the unet has set the default `sample_size` to smaller than" + " 64 which seems highly unlikely. If your checkpoint is a fine-tuned version of any of the" + " following: \n- CompVis/stable-diffusion-v1-4 \n- CompVis/stable-diffusion-v1-3 \n-" + " CompVis/stable-diffusion-v1-2 \n- CompVis/stable-diffusion-v1-1 \n- runwayml/stable-diffusion-v1-5" + " \n- runwayml/stable-diffusion-inpainting \n you should change 'sample_size' to 64 in the" + " configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`" + " in the config might lead to incorrect results in future versions. If you have downloaded this" + " checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for" + " the `unet/config.json` file" + ) + deprecate("sample_size<64", "1.0.0", deprecation_message, standard_warn=False) + new_config = dict(unet.config) + new_config["sample_size"] = 64 + unet._internal_dict = FrozenDict(new_config) + + self.register_modules( + vae=vae, + text_encoder=text_encoder, + tokenizer=tokenizer, + unet=unet, + scheduler=scheduler, + feature_extractor=feature_extractor, + ) + self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1) + self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor) + self.control_image_processor = VaeImageProcessor( + vae_scale_factor=self.vae_scale_factor, do_convert_rgb=True, do_normalize=False + ) + self.controlnet_map = controlnet_map + + + def enable_vae_slicing(self): + r""" + Enable sliced VAE decoding. + + When this option is enabled, the VAE will split the input tensor in slices to compute decoding in several + steps. This is useful to save some memory and allow larger batch sizes. + """ + self.vae.enable_slicing() + + def disable_vae_slicing(self): + r""" + Disable sliced VAE decoding. If `enable_vae_slicing` was previously invoked, this method will go back to + computing decoding in one step. + """ + self.vae.disable_slicing() + + def enable_vae_tiling(self): + r""" + Enable tiled VAE decoding. + + When this option is enabled, the VAE will split the input tensor into tiles to compute decoding and encoding in + several steps. This is useful to save a large amount of memory and to allow the processing of larger images. + """ + self.vae.enable_tiling() + + def disable_vae_tiling(self): + r""" + Disable tiled VAE decoding. If `enable_vae_tiling` was previously invoked, this method will go back to + computing decoding in one step. + """ + self.vae.disable_tiling() + + def __enable_model_cpu_offload(self, gpu_id=0): + r""" + Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared + to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward` + method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with + `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`. + """ + if is_accelerate_available() and is_accelerate_version(">=", "0.17.0.dev0"): + from accelerate import cpu_offload_with_hook + else: + raise ImportError("`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.") + + device = torch.device(f"cuda:{gpu_id}") + + if self.device.type != "cpu": + self.to("cpu", silence_dtype_warnings=True) + torch.cuda.empty_cache() # otherwise we don't see the memory savings (but they probably exist) + + hook = None + for cpu_offloaded_model in [self.text_encoder, self.unet, self.vae]: + _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook) + + if self.safety_checker is not None: + _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook) + + # control net hook has be manually offloaded as it alternates with unet + cpu_offload_with_hook(self.controlnet, device) + + # We'll offload the last model manually. + self.final_offload_hook = hook + + @property + def _execution_device(self): + r""" + Returns the device on which the pipeline's models will be executed. After calling + `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module + hooks. + """ + if not hasattr(self.unet, "_hf_hook"): + return self.device + for module in self.unet.modules(): + if ( + hasattr(module, "_hf_hook") + and hasattr(module._hf_hook, "execution_device") + and module._hf_hook.execution_device is not None + ): + return torch.device(module._hf_hook.execution_device) + return self.device + + def _encode_prompt( + self, + prompt, + device, + num_videos_per_prompt: int = 1, + do_classifier_free_guidance: bool = False, + negative_prompt=None, + max_embeddings_multiples=3, + prompt_embeds: Optional[torch.FloatTensor] = None, + negative_prompt_embeds: Optional[torch.FloatTensor] = None, + clip_skip: int = 1, + ): + r""" + Encodes the prompt into text encoder hidden states. + + Args: + prompt (`str` or `list(int)`): + prompt to be encoded + device: (`torch.device`): + torch device + num_videos_per_prompt (`int`): + number of videos that should be generated per prompt + do_classifier_free_guidance (`bool`): + whether to use classifier free guidance or not + negative_prompt (`str` or `List[str]`): + The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored + if `guidance_scale` is less than `1`). + max_embeddings_multiples (`int`, *optional*, defaults to `3`): + The max multiple length of prompt embeddings compared to the max output length of text encoder. + """ + from ..utils.lpw_stable_diffusion import get_weighted_text_embeddings + + if prompt is not None and isinstance(prompt, str): + batch_size = 1 + elif prompt is not None and isinstance(prompt, list): + batch_size = len(prompt) + else: + batch_size = prompt_embeds.shape[0] + + if negative_prompt_embeds is None: + if negative_prompt is None: + negative_prompt = [""] * batch_size + elif isinstance(negative_prompt, str): + negative_prompt = [negative_prompt] * batch_size + if batch_size != len(negative_prompt): + raise ValueError( + f"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:" + f" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches" + " the batch size of `prompt`." + ) + if prompt_embeds is None or negative_prompt_embeds is None: + if isinstance(self, TextualInversionLoaderMixin): + prompt = self.maybe_convert_prompt(prompt, self.tokenizer) + if do_classifier_free_guidance and negative_prompt_embeds is None: + negative_prompt = self.maybe_convert_prompt(negative_prompt, self.tokenizer) + + prompt_embeds1, negative_prompt_embeds1 = get_weighted_text_embeddings( + pipe=self, + prompt=prompt, + uncond_prompt=negative_prompt if do_classifier_free_guidance else None, + max_embeddings_multiples=max_embeddings_multiples, + clip_skip=clip_skip + ) + if prompt_embeds is None: + prompt_embeds = prompt_embeds1 + if negative_prompt_embeds is None: + negative_prompt_embeds = negative_prompt_embeds1 + + bs_embed, seq_len, _ = prompt_embeds.shape + # duplicate text embeddings for each generation per prompt, using mps friendly method + prompt_embeds = prompt_embeds.repeat(1, num_videos_per_prompt, 1) + prompt_embeds = prompt_embeds.view(bs_embed * num_videos_per_prompt, seq_len, -1) + + if do_classifier_free_guidance: + bs_embed, seq_len, _ = negative_prompt_embeds.shape + negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_videos_per_prompt, 1) + negative_prompt_embeds = negative_prompt_embeds.view(bs_embed * num_videos_per_prompt, seq_len, -1) + prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds]) + + return prompt_embeds + + def __encode_prompt( + self, + prompt, + device, + num_videos_per_prompt: int = 1, + do_classifier_free_guidance: bool = False, + negative_prompt=None, + prompt_embeds: Optional[torch.FloatTensor] = None, + negative_prompt_embeds: Optional[torch.FloatTensor] = None, + lora_scale: Optional[float] = None, + clip_skip: int = 1, + ): + # set lora scale so that monkey patched LoRA + # function of text encoder can correctly access it + if lora_scale is not None and isinstance(self, LoraLoaderMixin): + self._lora_scale = lora_scale + + batch_size = len(prompt) if isinstance(prompt, list) else 1 + + if prompt_embeds is None: + # textual inversion: procecss multi-vector tokens if necessary + if isinstance(self, TextualInversionLoaderMixin): + prompt = self.maybe_convert_prompt(prompt, self.tokenizer) + + text_inputs = self.tokenizer( + prompt, + padding="max_length", + max_length=self.tokenizer.model_max_length, + truncation=True, + return_tensors="pt", + ) + text_input_ids = text_inputs.input_ids + untruncated_ids = self.tokenizer(prompt, padding="longest", return_tensors="pt").input_ids + + if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal( + text_input_ids, untruncated_ids + ): + removed_text = self.tokenizer.batch_decode( + untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1] + ) + logger.warning( + "The following part of your input was truncated because CLIP can only handle sequences up to" + f" {self.tokenizer.model_max_length} tokens: {removed_text}" + ) + + if ( + hasattr(self.text_encoder.config, "use_attention_mask") + and self.text_encoder.config.use_attention_mask + ): + attention_mask = text_inputs.attention_mask.to(device) + else: + attention_mask = None + + prompt_embeds = self.text_encoder( + text_input_ids.to(device), + attention_mask=attention_mask, + clip_skip=clip_skip, + ) + prompt_embeds = prompt_embeds[0] + + bs_embed, seq_len, _ = prompt_embeds.shape + # duplicate text embeddings for each generation per prompt, using mps friendly method + prompt_embeds = prompt_embeds.repeat(1, num_videos_per_prompt, 1) + prompt_embeds = prompt_embeds.view(bs_embed * num_videos_per_prompt, seq_len, -1) + + # get unconditional embeddings for classifier free guidance + if do_classifier_free_guidance and negative_prompt_embeds is None: + uncond_tokens: List[str] + if negative_prompt is None: + uncond_tokens = [""] * batch_size + elif prompt is not None and type(prompt) is not type(negative_prompt): + raise TypeError( + f"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=" + f" {type(prompt)}." + ) + elif isinstance(negative_prompt, str): + uncond_tokens = [negative_prompt] + elif batch_size != len(negative_prompt): + raise ValueError( + f"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:" + f" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches" + " the batch size of `prompt`." + ) + else: + uncond_tokens = negative_prompt + + # textual inversion: procecss multi-vector tokens if necessary + if isinstance(self, TextualInversionLoaderMixin): + uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer) + + max_length = prompt_embeds.shape[1] + uncond_input = self.tokenizer( + uncond_tokens, + padding="max_length", + max_length=max_length, + truncation=True, + return_tensors="pt", + ) + uncond_input_ids = uncond_input.input_ids + + if ( + hasattr(self.text_encoder.config, "use_attention_mask") + and self.text_encoder.config.use_attention_mask + ): + attention_mask = uncond_input.attention_mask.to(device) + else: + attention_mask = None + + negative_prompt_embeds = self.text_encoder( + uncond_input_ids.to(device), + attention_mask=attention_mask, + clip_skip=clip_skip, + ) + negative_prompt_embeds = negative_prompt_embeds[0] + + if do_classifier_free_guidance: + # duplicate unconditional embeddings for each generation per prompt, using mps friendly method + seq_len = negative_prompt_embeds.shape[1] + + negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device) + + negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_videos_per_prompt, 1) + negative_prompt_embeds = negative_prompt_embeds.view( + batch_size * num_videos_per_prompt, seq_len, -1 + ) + + # For classifier free guidance, we need to do two forward passes. + # Here we concatenate the unconditional and text embeddings into a single batch + # to avoid doing two forward passes + prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds]) + + return prompt_embeds + + def interpolate_latents(self, latents: torch.Tensor, interpolation_factor:int, device ): + if interpolation_factor < 2: + return latents + + new_latents = torch.zeros( + (latents.shape[0],latents.shape[1],((latents.shape[2]-1) * interpolation_factor)+1, latents.shape[3],latents.shape[4]), + device=latents.device, + dtype=latents.dtype, + ) + + org_video_length = latents.shape[2] + rate = [i/interpolation_factor for i in range(interpolation_factor)][1:] + + new_index = 0 + + v0 = None + v1 = None + + for i0,i1 in zip( range( org_video_length ),range( org_video_length )[1:] ): + v0 = latents[:,:,i0,:,:] + v1 = latents[:,:,i1,:,:] + + new_latents[:,:,new_index,:,:] = v0 + new_index += 1 + + for f in rate: + v = get_tensor_interpolation_method()(v0.to(device=device),v1.to(device=device),f) + new_latents[:,:,new_index,:,:] = v.to(latents.device) + new_index += 1 + + new_latents[:,:,new_index,:,:] = v1 + new_index += 1 + + return new_latents + + + + def decode_latents(self, latents: torch.Tensor): + video_length = latents.shape[2] + latents = 1 / self.vae.config.scaling_factor * latents + latents = rearrange(latents, "b c f h w -> (b f) c h w") + # video = self.vae.decode(latents).sample + video = [] + for frame_idx in range(latents.shape[0]): + video.append( +# self.vae.decode(latents[frame_idx : frame_idx + 1].to(self.vae.device, self.vae.dtype)).sample.cpu() + self.vae.decode(latents[frame_idx : frame_idx + 1].to("cuda", self.vae.dtype)).sample.cpu() + ) + video = torch.cat(video) + video = rearrange(video, "(b f) c h w -> b c f h w", f=video_length) + video = (video / 2 + 0.5).clamp(0, 1) + # we always cast to float32 as this does not cause significant overhead and is compatible with bfloa16 + video = video.float().numpy() + return video + + def prepare_extra_step_kwargs(self, generator, eta): + # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature + # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers. + # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502 + # and should be between [0, 1] + + accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys()) + extra_step_kwargs = {} + if accepts_eta: + extra_step_kwargs["eta"] = eta + + # check if the scheduler accepts generator + accepts_generator = "generator" in set(inspect.signature(self.scheduler.step).parameters.keys()) + if accepts_generator: + extra_step_kwargs["generator"] = generator + return extra_step_kwargs + + def check_inputs( + self, + prompt, + height, + width, + callback_steps, + negative_prompt=None, + prompt_embeds=None, + negative_prompt_embeds=None, + ): + if height % 8 != 0 or width % 8 != 0: + raise ValueError(f"`height` and `width` have to be divisible by 8 but are {height} and {width}.") + + if callback_steps is not None: + if not isinstance(callback_steps, list): + raise ValueError("`callback_steps` has to be a list of positive integers.") + for callback_step in callback_steps: + if not isinstance(callback_step, int) or callback_step <= 0: + raise ValueError("`callback_steps` has to be a list of positive integers.") + + if prompt is not None and prompt_embeds is not None: + raise ValueError( + f"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to" + " only forward one of the two." + ) + elif prompt is None and prompt_embeds is None: + raise ValueError( + "Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined." + ) + elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)): + raise ValueError(f"`prompt` has to be of type `str` or `list` but is {type(prompt)}") + + if negative_prompt is not None and negative_prompt_embeds is not None: + raise ValueError( + f"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:" + f" {negative_prompt_embeds}. Please make sure to only forward one of the two." + ) + + if prompt_embeds is not None and negative_prompt_embeds is not None: + if prompt_embeds.shape != negative_prompt_embeds.shape: + raise ValueError( + "`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but" + f" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`" + f" {negative_prompt_embeds.shape}." + ) + + def prepare_image( + self, + image, + width, + height, + batch_size, + num_images_per_prompt, + device, + dtype, + do_classifier_free_guidance=False, + guess_mode=False, + ): + image = self.control_image_processor.preprocess(image, height=height, width=width).to(dtype=torch.float32) + image_batch_size = image.shape[0] + + if image_batch_size == 1: + repeat_by = batch_size + else: + # image batch size is the same as prompt batch size + repeat_by = num_images_per_prompt + + image = image.repeat_interleave(repeat_by, dim=0) + + image = image.to(device=device, dtype=dtype) + + #if do_classifier_free_guidance and not guess_mode: + # image = torch.cat([image] * 2) + + return image + + def prepare_ref_image( + self, + image, + width, + height, + batch_size, + num_images_per_prompt, + device, + dtype, + do_classifier_free_guidance=False, + guess_mode=False, + ): + image = self.image_processor.preprocess(image, height=height, width=width).to(dtype=torch.float32) + image_batch_size = image.shape[0] + + if image_batch_size == 1: + repeat_by = batch_size + else: + # image batch size is the same as prompt batch size + repeat_by = num_images_per_prompt + + image = image.repeat_interleave(repeat_by, dim=0) + + image = image.to(device=device, dtype=dtype) + + if do_classifier_free_guidance and not guess_mode: + image = torch.cat([image] * 2) + + return image + + def prepare_latents( + self, + batch_size, + num_channels_latents, + video_length, + height, + width, + dtype, + device, + generator, + img2img_map, + timestep, + latents=None, + is_strength_max=True, + return_noise=True, + return_image_latents=True, + ): + shape = ( + batch_size, + num_channels_latents, + video_length, + height // self.vae_scale_factor, + width // self.vae_scale_factor, + ) + if isinstance(generator, list) and len(generator) != batch_size: + raise ValueError( + f"You have passed a list of generators of length {len(generator)}, but requested an effective batch" + f" size of {batch_size}. Make sure the batch size matches the length of the generators." + ) + + image_latents = None + + if img2img_map: + image_latents = torch.zeros(shape, device=device, dtype=dtype) + for frame_no in img2img_map["images"]: + img = img2img_map["images"][frame_no] + img = self.image_processor.preprocess(img) + img = img.to(device="cuda", dtype=self.vae.dtype) + img = self.vae.encode(img).latent_dist.sample(generator) + img = self.vae.config.scaling_factor * img + img = torch.cat([img], dim=0) + image_latents[:,:,frame_no,:,:] = img.to(device=device, dtype=dtype) + + else: + is_strength_max = True + + + if latents is None: + noise = randn_tensor(shape, generator=generator, device=device, dtype=dtype) + latents = noise if is_strength_max else self.scheduler.add_noise(image_latents, noise, timestep) + latents = latents * self.scheduler.init_noise_sigma if is_strength_max else latents + else: + noise = latents.to(device) + latents = noise * self.scheduler.init_noise_sigma + + outputs = (latents.to(device, dtype),) + + if return_noise: + outputs += (noise.to(device, dtype),) + + if return_image_latents: + if image_latents is not None: + outputs += (image_latents.to(device, dtype),) + else: + outputs += (None,) + + + return outputs + + + # from diffusers/examples/community/stable_diffusion_controlnet_reference.py + def prepare_ref_latents(self, refimage, batch_size, dtype, device, generator, do_classifier_free_guidance): + refimage = refimage.to(device=device, dtype=self.vae.dtype) + + # encode the mask image into latents space so we can concatenate it to the latents + if isinstance(generator, list): + ref_image_latents = [ + self.vae.encode(refimage[i : i + 1]).latent_dist.sample(generator=generator[i]) + for i in range(batch_size) + ] + ref_image_latents = torch.cat(ref_image_latents, dim=0) + else: + ref_image_latents = self.vae.encode(refimage).latent_dist.sample(generator=generator) + ref_image_latents = self.vae.config.scaling_factor * ref_image_latents + + ref_image_latents = ref_image_latents.to(device=device, dtype=dtype) + + # duplicate mask and ref_image_latents for each generation per prompt, using mps friendly method + if ref_image_latents.shape[0] < batch_size: + if not batch_size % ref_image_latents.shape[0] == 0: + raise ValueError( + "The passed images and the required batch size don't match. Images are supposed to be duplicated" + f" to a total batch size of {batch_size}, but {ref_image_latents.shape[0]} images were passed." + " Make sure the number of images that you pass is divisible by the total requested batch size." + ) + ref_image_latents = ref_image_latents.repeat(batch_size // ref_image_latents.shape[0], 1, 1, 1) + + ref_image_latents = torch.cat([ref_image_latents] * 2) if do_classifier_free_guidance else ref_image_latents + + # aligning device to prevent device errors when concating it with the latent model input + ref_image_latents = ref_image_latents.to(device=device, dtype=dtype) + return ref_image_latents + + # from diffusers/examples/community/stable_diffusion_controlnet_reference.py + def prepare_controlnet_ref_only_without_motion( + self, + ref_image_latents, + batch_size, + num_images_per_prompt, + do_classifier_free_guidance, + attention_auto_machine_weight, + gn_auto_machine_weight, + style_fidelity, + reference_attn, + reference_adain, + _scale_pattern, + region_num + ): + global C_REF_MODE + # 9. Modify self attention and group norm + C_REF_MODE = "write" + uc_mask = ( + torch.Tensor([1] * batch_size * num_images_per_prompt + [0] * batch_size * num_images_per_prompt * (region_num-1)) + .type_as(ref_image_latents) + .bool() + ) + + _scale_pattern = _scale_pattern * (batch_size // len(_scale_pattern) + 1) + _scale_pattern = _scale_pattern[:batch_size] + _rev_pattern = [1-i for i in _scale_pattern] + + scale_pattern_double = torch.tensor(_scale_pattern*region_num).to(self.device, dtype=self.unet.dtype) + rev_pattern_double = torch.tensor(_rev_pattern*region_num).to(self.device, dtype=self.unet.dtype) + scale_pattern = torch.tensor(_scale_pattern).to(self.device, dtype=self.unet.dtype) + rev_pattern = torch.tensor(_rev_pattern).to(self.device, dtype=self.unet.dtype) + + + def hacked_basic_transformer_inner_forward( + self, + hidden_states: torch.FloatTensor, + attention_mask: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + timestep: Optional[torch.LongTensor] = None, + cross_attention_kwargs: Dict[str, Any] = None, + video_length=None, + ): + if self.use_ada_layer_norm: + norm_hidden_states = self.norm1(hidden_states, timestep) + else: + norm_hidden_states = self.norm1(hidden_states) + + # 1. Self-Attention + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if self.unet_use_cross_frame_attention: + cross_attention_kwargs["video_length"] = video_length + + if self.only_cross_attention: + attn_output = self.attn1( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states if self.only_cross_attention else None, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) + else: + if C_REF_MODE == "write": + self.bank.append(norm_hidden_states.detach().clone()) + attn_output = self.attn1( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states if self.only_cross_attention else None, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) + if C_REF_MODE == "read": + if attention_auto_machine_weight > self.attn_weight: + attn_output_uc = self.attn1( + norm_hidden_states, + encoder_hidden_states=torch.cat([norm_hidden_states] + self.bank, dim=1), + # attention_mask=attention_mask, + **cross_attention_kwargs, + ) + + if style_fidelity > 0: + attn_output_c = attn_output_uc.clone() + + if do_classifier_free_guidance: + attn_output_c[uc_mask] = self.attn1( + norm_hidden_states[uc_mask], + encoder_hidden_states=norm_hidden_states[uc_mask], + **cross_attention_kwargs, + ) + + attn_output = style_fidelity * attn_output_c + (1.0 - style_fidelity) * attn_output_uc + + else: + attn_output = attn_output_uc + + attn_org = self.attn1( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states if self.only_cross_attention else None, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) + + attn_output = scale_pattern_double[:,None,None] * attn_output + rev_pattern_double[:,None,None] * attn_org + + else: + attn_output = self.attn1( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states if self.only_cross_attention else None, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) + + self.bank.clear() + + hidden_states = attn_output + hidden_states + + if self.attn2 is not None: + norm_hidden_states = ( + self.norm2(hidden_states, timestep) if self.use_ada_layer_norm else self.norm2(hidden_states) + ) + + # 2. Cross-Attention + attn_output = self.attn2( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=encoder_attention_mask, + **cross_attention_kwargs, + ) + hidden_states = attn_output + hidden_states + + # 3. Feed-forward + hidden_states = self.ff(self.norm3(hidden_states)) + hidden_states + + # 4. Temporal-Attention + if self.unet_use_temporal_attention: + d = hidden_states.shape[1] + hidden_states = rearrange(hidden_states, "(b f) d c -> (b d) f c", f=video_length) + norm_hidden_states = ( + self.norm_temp(hidden_states, timestep) + if self.use_ada_layer_norm + else self.norm_temp(hidden_states) + ) + hidden_states = self.attn_temp(norm_hidden_states) + hidden_states + hidden_states = rearrange(hidden_states, "(b d) f c -> (b f) d c", d=d) + + return hidden_states + + def hacked_mid_forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + + eps = 1e-6 + + hidden_states = self.resnets[0](hidden_states, temb) + for attn, resnet, motion_module in zip(self.attentions, self.resnets[1:], self.motion_modules): + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + + x = hidden_states + + if C_REF_MODE == "write": + if gn_auto_machine_weight >= self.gn_weight: + var, mean = torch.var_mean(x, dim=(3, 4), keepdim=True, correction=0) + self.mean_bank.append(mean) + self.var_bank.append(var) + if C_REF_MODE == "read": + if len(self.mean_bank) > 0 and len(self.var_bank) > 0: + var, mean = torch.var_mean(x, dim=(3, 4), keepdim=True, correction=0) + std = torch.maximum(var, torch.zeros_like(var) + eps) ** 0.5 + mean_acc = sum(self.mean_bank) / float(len(self.mean_bank)) + var_acc = sum(self.var_bank) / float(len(self.var_bank)) + std_acc = torch.maximum(var_acc, torch.zeros_like(var_acc) + eps) ** 0.5 + x_uc = (((x - mean) / std) * std_acc) + mean_acc + x_c = x_uc.clone() + if do_classifier_free_guidance and style_fidelity > 0: + + f = x.shape[2] + x_c = rearrange(x_c, "b c f h w -> (b f) c h w") + x = rearrange(x, "b c f h w -> (b f) c h w") + + x_c[uc_mask] = x[uc_mask] + + x_c = rearrange(x_c, "(b f) c h w -> b c f h w", f=f) + x = rearrange(x, "(b f) c h w -> b c f h w", f=f) + + mod_x = style_fidelity * x_c + (1.0 - style_fidelity) * x_uc + + x = scale_pattern[None,None,:,None,None] * mod_x + rev_pattern[None,None,:,None,None] * x + + self.mean_bank = [] + self.var_bank = [] + + hidden_states = x + + if motion_module is not None: + hidden_states = motion_module( + hidden_states, + temb, + encoder_hidden_states=encoder_hidden_states, + ) + + hidden_states = resnet(hidden_states, temb) + + return hidden_states + + + def hack_CrossAttnDownBlock3D_forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ): + eps = 1e-6 + + # TODO(Patrick, William) - attention mask is not used + output_states = () + + for i, (resnet, attn, motion_module) in enumerate(zip(self.resnets, self.attentions, self.motion_modules)): + hidden_states = resnet(hidden_states, temb) + + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + + if C_REF_MODE == "write": + if gn_auto_machine_weight >= self.gn_weight: + var, mean = torch.var_mean(hidden_states, dim=(3, 4), keepdim=True, correction=0) + self.mean_bank.append([mean]) + self.var_bank.append([var]) + if C_REF_MODE == "read": + if len(self.mean_bank) > 0 and len(self.var_bank) > 0: + var, mean = torch.var_mean(hidden_states, dim=(3, 4), keepdim=True, correction=0) + std = torch.maximum(var, torch.zeros_like(var) + eps) ** 0.5 + mean_acc = sum(self.mean_bank[i]) / float(len(self.mean_bank[i])) + var_acc = sum(self.var_bank[i]) / float(len(self.var_bank[i])) + std_acc = torch.maximum(var_acc, torch.zeros_like(var_acc) + eps) ** 0.5 + hidden_states_uc = (((hidden_states - mean) / std) * std_acc) + mean_acc + hidden_states_c = hidden_states_uc.clone() + if do_classifier_free_guidance and style_fidelity > 0: + + f = hidden_states.shape[2] + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states_c = rearrange(hidden_states_c, "b c f h w -> (b f) c h w") + + hidden_states_c[uc_mask] = hidden_states[uc_mask] + + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=f) + hidden_states_c = rearrange(hidden_states_c, "(b f) c h w -> b c f h w", f=f) + + mod_hidden_states = style_fidelity * hidden_states_c + (1.0 - style_fidelity) * hidden_states_uc + + hidden_states = scale_pattern[None,None,:,None,None] * mod_hidden_states + rev_pattern[None,None,:,None,None] * hidden_states + + # add motion module + hidden_states = ( + motion_module(hidden_states, temb, encoder_hidden_states=encoder_hidden_states) + if motion_module is not None + else hidden_states + ) + + output_states = output_states + (hidden_states,) + + if C_REF_MODE == "read": + self.mean_bank = [] + self.var_bank = [] + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + output_states = output_states + (hidden_states,) + + return hidden_states, output_states + + def hacked_DownBlock3D_forward(self, hidden_states, temb=None, encoder_hidden_states=None): + eps = 1e-6 + + output_states = () + + for i, (resnet, motion_module) in enumerate(zip(self.resnets, self.motion_modules)): + hidden_states = resnet(hidden_states, temb) + + if C_REF_MODE == "write": + if gn_auto_machine_weight >= self.gn_weight: + var, mean = torch.var_mean(hidden_states, dim=(3, 4), keepdim=True, correction=0) + self.mean_bank.append([mean]) + self.var_bank.append([var]) + if C_REF_MODE == "read": + if len(self.mean_bank) > 0 and len(self.var_bank) > 0: + var, mean = torch.var_mean(hidden_states, dim=(3, 4), keepdim=True, correction=0) + std = torch.maximum(var, torch.zeros_like(var) + eps) ** 0.5 + mean_acc = sum(self.mean_bank[i]) / float(len(self.mean_bank[i])) + var_acc = sum(self.var_bank[i]) / float(len(self.var_bank[i])) + std_acc = torch.maximum(var_acc, torch.zeros_like(var_acc) + eps) ** 0.5 + hidden_states_uc = (((hidden_states - mean) / std) * std_acc) + mean_acc + hidden_states_c = hidden_states_uc.clone() + if do_classifier_free_guidance and style_fidelity > 0: + + f = hidden_states.shape[2] + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states_c = rearrange(hidden_states_c, "b c f h w -> (b f) c h w") + + hidden_states_c[uc_mask] = hidden_states[uc_mask] + + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=f) + hidden_states_c = rearrange(hidden_states_c, "(b f) c h w -> b c f h w", f=f) + + mod_hidden_states = style_fidelity * hidden_states_c + (1.0 - style_fidelity) * hidden_states_uc + + hidden_states = scale_pattern[None,None,:,None,None] * mod_hidden_states + rev_pattern[None,None,:,None,None] * hidden_states + + # add motion module + if motion_module: + hidden_states = motion_module( + hidden_states, temb, encoder_hidden_states=encoder_hidden_states + ) + + output_states = output_states + (hidden_states,) + + if C_REF_MODE == "read": + self.mean_bank = [] + self.var_bank = [] + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + output_states = output_states + (hidden_states,) + + return hidden_states, output_states + + def hacked_CrossAttnUpBlock3D_forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + upsample_size: Optional[int] = None, + attention_mask: Optional[torch.FloatTensor] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ): + eps = 1e-6 + # TODO(Patrick, William) - attention mask is not used + for i, (resnet, attn, motion_module) in enumerate(zip(self.resnets, self.attentions, self.motion_modules)): + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + hidden_states = resnet(hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + + + if C_REF_MODE == "write": + if gn_auto_machine_weight >= self.gn_weight: + var, mean = torch.var_mean(hidden_states, dim=(3, 4), keepdim=True, correction=0) + self.mean_bank.append([mean]) + self.var_bank.append([var]) + if C_REF_MODE == "read": + if len(self.mean_bank) > 0 and len(self.var_bank) > 0: + var, mean = torch.var_mean(hidden_states, dim=(3, 4), keepdim=True, correction=0) + std = torch.maximum(var, torch.zeros_like(var) + eps) ** 0.5 + mean_acc = sum(self.mean_bank[i]) / float(len(self.mean_bank[i])) + var_acc = sum(self.var_bank[i]) / float(len(self.var_bank[i])) + std_acc = torch.maximum(var_acc, torch.zeros_like(var_acc) + eps) ** 0.5 + hidden_states_uc = (((hidden_states - mean) / std) * std_acc) + mean_acc + hidden_states_c = hidden_states_uc.clone() + if do_classifier_free_guidance and style_fidelity > 0: + + f = hidden_states.shape[2] + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states_c = rearrange(hidden_states_c, "b c f h w -> (b f) c h w") + + hidden_states_c[uc_mask] = hidden_states[uc_mask] + + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=f) + hidden_states_c = rearrange(hidden_states_c, "(b f) c h w -> b c f h w", f=f) + + mod_hidden_states = style_fidelity * hidden_states_c + (1.0 - style_fidelity) * hidden_states_uc + + hidden_states = scale_pattern[None,None,:,None,None] * mod_hidden_states + rev_pattern[None,None,:,None,None] * hidden_states + + # add motion module + if motion_module: + hidden_states = motion_module( + hidden_states, temb, encoder_hidden_states=encoder_hidden_states + ) + + + if C_REF_MODE == "read": + self.mean_bank = [] + self.var_bank = [] + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, upsample_size) + + return hidden_states + + def hacked_UpBlock3D_forward(self, hidden_states, res_hidden_states_tuple, temb=None, upsample_size=None, encoder_hidden_states=None): + eps = 1e-6 + for i, (resnet,motion_module) in enumerate(zip(self.resnets, self.motion_modules)): + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + hidden_states = resnet(hidden_states, temb) + + if C_REF_MODE == "write": + if gn_auto_machine_weight >= self.gn_weight: + var, mean = torch.var_mean(hidden_states, dim=(3, 4), keepdim=True, correction=0) + self.mean_bank.append([mean]) + self.var_bank.append([var]) + if C_REF_MODE == "read": + if len(self.mean_bank) > 0 and len(self.var_bank) > 0: + var, mean = torch.var_mean(hidden_states, dim=(3, 4), keepdim=True, correction=0) + std = torch.maximum(var, torch.zeros_like(var) + eps) ** 0.5 + mean_acc = sum(self.mean_bank[i]) / float(len(self.mean_bank[i])) + var_acc = sum(self.var_bank[i]) / float(len(self.var_bank[i])) + std_acc = torch.maximum(var_acc, torch.zeros_like(var_acc) + eps) ** 0.5 + hidden_states_uc = (((hidden_states - mean) / std) * std_acc) + mean_acc + hidden_states_c = hidden_states_uc.clone() + if do_classifier_free_guidance and style_fidelity > 0: + f = hidden_states.shape[2] + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states_c = rearrange(hidden_states_c, "b c f h w -> (b f) c h w") + + hidden_states_c[uc_mask] = hidden_states[uc_mask] + + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=f) + hidden_states_c = rearrange(hidden_states_c, "(b f) c h w -> b c f h w", f=f) + + mod_hidden_states = style_fidelity * hidden_states_c + (1.0 - style_fidelity) * hidden_states_uc + + hidden_states = scale_pattern[None,None,:,None,None] * mod_hidden_states + rev_pattern[None,None,:,None,None] * hidden_states + + if motion_module: + hidden_states = motion_module( + hidden_states, temb, encoder_hidden_states=encoder_hidden_states + ) + + + + if C_REF_MODE == "read": + self.mean_bank = [] + self.var_bank = [] + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, upsample_size) + + return hidden_states + + if reference_attn: + attn_modules = [module for module in torch_dfs(self.unet) if isinstance(module, BasicTransformerBlock)] + attn_modules = sorted(attn_modules, key=lambda x: -x.norm1.normalized_shape[0]) + + for i, module in enumerate(attn_modules): + module._original_inner_forward = module.forward + module.forward = hacked_basic_transformer_inner_forward.__get__(module, BasicTransformerBlock) + module.bank = [] + module.attn_weight = float(i) / float(len(attn_modules)) + + attn_modules = None + torch.cuda.empty_cache() + + if reference_adain: + gn_modules = [self.unet.mid_block] + self.unet.mid_block.gn_weight = 0 + + down_blocks = self.unet.down_blocks + for w, module in enumerate(down_blocks): + module.gn_weight = 1.0 - float(w) / float(len(down_blocks)) + gn_modules.append(module) + + up_blocks = self.unet.up_blocks + for w, module in enumerate(up_blocks): + module.gn_weight = float(w) / float(len(up_blocks)) + gn_modules.append(module) + + for i, module in enumerate(gn_modules): + if getattr(module, "original_forward", None) is None: + module.original_forward = module.forward + if i == 0: + # mid_block + module.forward = hacked_mid_forward.__get__(module, UNetMidBlock3DCrossAttn) + elif isinstance(module, CrossAttnDownBlock3D): + module.forward = hack_CrossAttnDownBlock3D_forward.__get__(module, CrossAttnDownBlock3D) + elif isinstance(module, DownBlock3D): + module.forward = hacked_DownBlock3D_forward.__get__(module, DownBlock3D) + elif isinstance(module, CrossAttnUpBlock3D): + module.forward = hacked_CrossAttnUpBlock3D_forward.__get__(module, CrossAttnUpBlock3D) + elif isinstance(module, UpBlock3D): + module.forward = hacked_UpBlock3D_forward.__get__(module, UpBlock3D) + module.mean_bank = [] + module.var_bank = [] + module.gn_weight *= 2 + + gn_modules = None + torch.cuda.empty_cache() + + + # from diffusers/examples/community/stable_diffusion_controlnet_reference.py + def prepare_controlnet_ref_only( + self, + ref_image_latents, + batch_size, + num_images_per_prompt, + do_classifier_free_guidance, + attention_auto_machine_weight, + gn_auto_machine_weight, + style_fidelity, + reference_attn, + reference_adain, + _scale_pattern, + ): + global C_REF_MODE + # 9. Modify self attention and group norm + C_REF_MODE = "write" + uc_mask = ( + torch.Tensor([1] * batch_size * num_images_per_prompt + [0] * batch_size * num_images_per_prompt) + .type_as(ref_image_latents) + .bool() + ) + + _scale_pattern = _scale_pattern * (batch_size // len(_scale_pattern) + 1) + _scale_pattern = _scale_pattern[:batch_size] + _rev_pattern = [1-i for i in _scale_pattern] + + scale_pattern_double = torch.tensor(_scale_pattern*2).to(self.device, dtype=self.unet.dtype) + rev_pattern_double = torch.tensor(_rev_pattern*2).to(self.device, dtype=self.unet.dtype) + scale_pattern = torch.tensor(_scale_pattern).to(self.device, dtype=self.unet.dtype) + rev_pattern = torch.tensor(_rev_pattern).to(self.device, dtype=self.unet.dtype) + + + + def hacked_basic_transformer_inner_forward( + self, + hidden_states: torch.FloatTensor, + attention_mask: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + timestep: Optional[torch.LongTensor] = None, + cross_attention_kwargs: Dict[str, Any] = None, + video_length=None, + ): + if self.use_ada_layer_norm: + norm_hidden_states = self.norm1(hidden_states, timestep) + else: + norm_hidden_states = self.norm1(hidden_states) + + # 1. Self-Attention + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if self.unet_use_cross_frame_attention: + cross_attention_kwargs["video_length"] = video_length + + if self.only_cross_attention: + attn_output = self.attn1( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states if self.only_cross_attention else None, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) + else: + if C_REF_MODE == "write": + self.bank.append(norm_hidden_states.detach().clone()) + attn_output = self.attn1( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states if self.only_cross_attention else None, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) + if C_REF_MODE == "read": + if attention_auto_machine_weight > self.attn_weight: + attn_output_uc = self.attn1( + norm_hidden_states, + encoder_hidden_states=torch.cat([norm_hidden_states] + self.bank, dim=1), + # attention_mask=attention_mask, + **cross_attention_kwargs, + ) + + if style_fidelity > 0: + attn_output_c = attn_output_uc.clone() + + if do_classifier_free_guidance: + attn_output_c[uc_mask] = self.attn1( + norm_hidden_states[uc_mask], + encoder_hidden_states=norm_hidden_states[uc_mask], + **cross_attention_kwargs, + ) + + attn_output = style_fidelity * attn_output_c + (1.0 - style_fidelity) * attn_output_uc + + else: + attn_output = attn_output_uc + + attn_org = self.attn1( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states if self.only_cross_attention else None, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) + + attn_output = scale_pattern_double[:,None,None] * attn_output + rev_pattern_double[:,None,None] * attn_org + + else: + attn_output = self.attn1( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states if self.only_cross_attention else None, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) + + self.bank.clear() + + + hidden_states = attn_output + hidden_states + + if self.attn2 is not None: + norm_hidden_states = ( + self.norm2(hidden_states, timestep) if self.use_ada_layer_norm else self.norm2(hidden_states) + ) + + # 2. Cross-Attention + attn_output = self.attn2( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=encoder_attention_mask, + **cross_attention_kwargs, + ) + hidden_states = attn_output + hidden_states + + # 3. Feed-forward + hidden_states = self.ff(self.norm3(hidden_states)) + hidden_states + + # 4. Temporal-Attention + if self.unet_use_temporal_attention: + d = hidden_states.shape[1] + hidden_states = rearrange(hidden_states, "(b f) d c -> (b d) f c", f=video_length) + norm_hidden_states = ( + self.norm_temp(hidden_states, timestep) + if self.use_ada_layer_norm + else self.norm_temp(hidden_states) + ) + hidden_states = self.attn_temp(norm_hidden_states) + hidden_states + hidden_states = rearrange(hidden_states, "(b d) f c -> (b f) d c", d=d) + + return hidden_states + + def hacked_mid_forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + + eps = 1e-6 + + hidden_states = self.resnets[0](hidden_states, temb) + for attn, resnet, motion_module in zip(self.attentions, self.resnets[1:], self.motion_modules): + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + if motion_module is not None: + hidden_states = motion_module( + hidden_states, + temb, + encoder_hidden_states=encoder_hidden_states, + ) + hidden_states = resnet(hidden_states, temb) + + x = hidden_states + + if C_REF_MODE == "write": + if gn_auto_machine_weight >= self.gn_weight: + var, mean = torch.var_mean(x, dim=(3, 4), keepdim=True, correction=0) + self.mean_bank.append(mean) + self.var_bank.append(var) + if C_REF_MODE == "read": + if len(self.mean_bank) > 0 and len(self.var_bank) > 0: + var, mean = torch.var_mean(x, dim=(3, 4), keepdim=True, correction=0) + std = torch.maximum(var, torch.zeros_like(var) + eps) ** 0.5 + mean_acc = sum(self.mean_bank) / float(len(self.mean_bank)) + var_acc = sum(self.var_bank) / float(len(self.var_bank)) + std_acc = torch.maximum(var_acc, torch.zeros_like(var_acc) + eps) ** 0.5 + x_uc = (((x - mean) / std) * std_acc) + mean_acc + x_c = x_uc.clone() + if do_classifier_free_guidance and style_fidelity > 0: + + f = x.shape[2] + x_c = rearrange(x_c, "b c f h w -> (b f) c h w") + x = rearrange(x, "b c f h w -> (b f) c h w") + + x_c[uc_mask] = x[uc_mask] + + x_c = rearrange(x_c, "(b f) c h w -> b c f h w", f=f) + x = rearrange(x, "(b f) c h w -> b c f h w", f=f) + + mod_x = style_fidelity * x_c + (1.0 - style_fidelity) * x_uc + + x = scale_pattern[None,None,:,None,None] * mod_x + rev_pattern[None,None,:,None,None] * x + + self.mean_bank = [] + self.var_bank = [] + + hidden_states = x + + return hidden_states + + def hack_CrossAttnDownBlock3D_forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ): + eps = 1e-6 + + # TODO(Patrick, William) - attention mask is not used + output_states = () + + for i, (resnet, attn, motion_module) in enumerate(zip(self.resnets, self.attentions, self.motion_modules)): + hidden_states = resnet(hidden_states, temb) + + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + # add motion module + hidden_states = ( + motion_module(hidden_states, temb, encoder_hidden_states=encoder_hidden_states) + if motion_module is not None + else hidden_states + ) + + if C_REF_MODE == "write": + if gn_auto_machine_weight >= self.gn_weight: + var, mean = torch.var_mean(hidden_states, dim=(3, 4), keepdim=True, correction=0) + self.mean_bank.append([mean]) + self.var_bank.append([var]) + if C_REF_MODE == "read": + if len(self.mean_bank) > 0 and len(self.var_bank) > 0: + var, mean = torch.var_mean(hidden_states, dim=(3, 4), keepdim=True, correction=0) + std = torch.maximum(var, torch.zeros_like(var) + eps) ** 0.5 + mean_acc = sum(self.mean_bank[i]) / float(len(self.mean_bank[i])) + var_acc = sum(self.var_bank[i]) / float(len(self.var_bank[i])) + std_acc = torch.maximum(var_acc, torch.zeros_like(var_acc) + eps) ** 0.5 + hidden_states_uc = (((hidden_states - mean) / std) * std_acc) + mean_acc + hidden_states_c = hidden_states_uc.clone() + if do_classifier_free_guidance and style_fidelity > 0: + + f = hidden_states.shape[2] + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states_c = rearrange(hidden_states_c, "b c f h w -> (b f) c h w") + + hidden_states_c[uc_mask] = hidden_states[uc_mask] + + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=f) + hidden_states_c = rearrange(hidden_states_c, "(b f) c h w -> b c f h w", f=f) + + mod_hidden_states = style_fidelity * hidden_states_c + (1.0 - style_fidelity) * hidden_states_uc + + hidden_states = scale_pattern[None,None,:,None,None] * mod_hidden_states + rev_pattern[None,None,:,None,None] * hidden_states + + output_states = output_states + (hidden_states,) + + if C_REF_MODE == "read": + self.mean_bank = [] + self.var_bank = [] + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + output_states = output_states + (hidden_states,) + + return hidden_states, output_states + + def hacked_DownBlock3D_forward(self, hidden_states, temb=None, encoder_hidden_states=None): + eps = 1e-6 + + output_states = () + + for i, (resnet, motion_module) in enumerate(zip(self.resnets, self.motion_modules)): + hidden_states = resnet(hidden_states, temb) + + # add motion module + if motion_module: + hidden_states = motion_module( + hidden_states, temb, encoder_hidden_states=encoder_hidden_states + ) + + if C_REF_MODE == "write": + if gn_auto_machine_weight >= self.gn_weight: + var, mean = torch.var_mean(hidden_states, dim=(3, 4), keepdim=True, correction=0) + self.mean_bank.append([mean]) + self.var_bank.append([var]) + if C_REF_MODE == "read": + if len(self.mean_bank) > 0 and len(self.var_bank) > 0: + var, mean = torch.var_mean(hidden_states, dim=(3, 4), keepdim=True, correction=0) + std = torch.maximum(var, torch.zeros_like(var) + eps) ** 0.5 + mean_acc = sum(self.mean_bank[i]) / float(len(self.mean_bank[i])) + var_acc = sum(self.var_bank[i]) / float(len(self.var_bank[i])) + std_acc = torch.maximum(var_acc, torch.zeros_like(var_acc) + eps) ** 0.5 + hidden_states_uc = (((hidden_states - mean) / std) * std_acc) + mean_acc + hidden_states_c = hidden_states_uc.clone() + if do_classifier_free_guidance and style_fidelity > 0: + + f = hidden_states.shape[2] + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states_c = rearrange(hidden_states_c, "b c f h w -> (b f) c h w") + + hidden_states_c[uc_mask] = hidden_states[uc_mask] + + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=f) + hidden_states_c = rearrange(hidden_states_c, "(b f) c h w -> b c f h w", f=f) + + mod_hidden_states = style_fidelity * hidden_states_c + (1.0 - style_fidelity) * hidden_states_uc + + hidden_states = scale_pattern[None,None,:,None,None] * mod_hidden_states + rev_pattern[None,None,:,None,None] * hidden_states + + output_states = output_states + (hidden_states,) + + if C_REF_MODE == "read": + self.mean_bank = [] + self.var_bank = [] + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + output_states = output_states + (hidden_states,) + + return hidden_states, output_states + + def hacked_CrossAttnUpBlock3D_forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + upsample_size: Optional[int] = None, + attention_mask: Optional[torch.FloatTensor] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ): + eps = 1e-6 + # TODO(Patrick, William) - attention mask is not used + for i, (resnet, attn, motion_module) in enumerate(zip(self.resnets, self.attentions, self.motion_modules)): + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + hidden_states = resnet(hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + + # add motion module + if motion_module: + hidden_states = motion_module( + hidden_states, temb, encoder_hidden_states=encoder_hidden_states + ) + + if C_REF_MODE == "write": + if gn_auto_machine_weight >= self.gn_weight: + var, mean = torch.var_mean(hidden_states, dim=(3, 4), keepdim=True, correction=0) + self.mean_bank.append([mean]) + self.var_bank.append([var]) + if C_REF_MODE == "read": + if len(self.mean_bank) > 0 and len(self.var_bank) > 0: + var, mean = torch.var_mean(hidden_states, dim=(3, 4), keepdim=True, correction=0) + std = torch.maximum(var, torch.zeros_like(var) + eps) ** 0.5 + mean_acc = sum(self.mean_bank[i]) / float(len(self.mean_bank[i])) + var_acc = sum(self.var_bank[i]) / float(len(self.var_bank[i])) + std_acc = torch.maximum(var_acc, torch.zeros_like(var_acc) + eps) ** 0.5 + hidden_states_uc = (((hidden_states - mean) / std) * std_acc) + mean_acc + hidden_states_c = hidden_states_uc.clone() + if do_classifier_free_guidance and style_fidelity > 0: + + f = hidden_states.shape[2] + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states_c = rearrange(hidden_states_c, "b c f h w -> (b f) c h w") + + hidden_states_c[uc_mask] = hidden_states[uc_mask] + + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=f) + hidden_states_c = rearrange(hidden_states_c, "(b f) c h w -> b c f h w", f=f) + + mod_hidden_states = style_fidelity * hidden_states_c + (1.0 - style_fidelity) * hidden_states_uc + + hidden_states = scale_pattern[None,None,:,None,None] * mod_hidden_states + rev_pattern[None,None,:,None,None] * hidden_states + + + if C_REF_MODE == "read": + self.mean_bank = [] + self.var_bank = [] + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, upsample_size) + + return hidden_states + + def hacked_UpBlock3D_forward(self, hidden_states, res_hidden_states_tuple, temb=None, upsample_size=None, encoder_hidden_states=None): + eps = 1e-6 + for i, (resnet,motion_module) in enumerate(zip(self.resnets, self.motion_modules)): + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + hidden_states = resnet(hidden_states, temb) + + if motion_module: + hidden_states = motion_module( + hidden_states, temb, encoder_hidden_states=encoder_hidden_states + ) + + if C_REF_MODE == "write": + if gn_auto_machine_weight >= self.gn_weight: + var, mean = torch.var_mean(hidden_states, dim=(3, 4), keepdim=True, correction=0) + self.mean_bank.append([mean]) + self.var_bank.append([var]) + if C_REF_MODE == "read": + if len(self.mean_bank) > 0 and len(self.var_bank) > 0: + var, mean = torch.var_mean(hidden_states, dim=(3, 4), keepdim=True, correction=0) + std = torch.maximum(var, torch.zeros_like(var) + eps) ** 0.5 + mean_acc = sum(self.mean_bank[i]) / float(len(self.mean_bank[i])) + var_acc = sum(self.var_bank[i]) / float(len(self.var_bank[i])) + std_acc = torch.maximum(var_acc, torch.zeros_like(var_acc) + eps) ** 0.5 + hidden_states_uc = (((hidden_states - mean) / std) * std_acc) + mean_acc + hidden_states_c = hidden_states_uc.clone() + if do_classifier_free_guidance and style_fidelity > 0: + f = hidden_states.shape[2] + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states_c = rearrange(hidden_states_c, "b c f h w -> (b f) c h w") + + hidden_states_c[uc_mask] = hidden_states[uc_mask] + + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=f) + hidden_states_c = rearrange(hidden_states_c, "(b f) c h w -> b c f h w", f=f) + + mod_hidden_states = style_fidelity * hidden_states_c + (1.0 - style_fidelity) * hidden_states_uc + + hidden_states = scale_pattern[None,None,:,None,None] * mod_hidden_states + rev_pattern[None,None,:,None,None] * hidden_states + + if C_REF_MODE == "read": + self.mean_bank = [] + self.var_bank = [] + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, upsample_size) + + return hidden_states + + if reference_attn: + attn_modules = [module for module in torch_dfs(self.unet) if isinstance(module, BasicTransformerBlock)] + attn_modules = sorted(attn_modules, key=lambda x: -x.norm1.normalized_shape[0]) + + for i, module in enumerate(attn_modules): + module._original_inner_forward = module.forward + module.forward = hacked_basic_transformer_inner_forward.__get__(module, BasicTransformerBlock) + module.bank = [] + module.attn_weight = float(i) / float(len(attn_modules)) + + attn_modules = None + torch.cuda.empty_cache() + + if reference_adain: + gn_modules = [self.unet.mid_block] + self.unet.mid_block.gn_weight = 0 + + down_blocks = self.unet.down_blocks + for w, module in enumerate(down_blocks): + module.gn_weight = 1.0 - float(w) / float(len(down_blocks)) + gn_modules.append(module) + + up_blocks = self.unet.up_blocks + for w, module in enumerate(up_blocks): + module.gn_weight = float(w) / float(len(up_blocks)) + gn_modules.append(module) + + for i, module in enumerate(gn_modules): + if getattr(module, "original_forward", None) is None: + module.original_forward = module.forward + if i == 0: + # mid_block + module.forward = hacked_mid_forward.__get__(module, UNetMidBlock3DCrossAttn) + elif isinstance(module, CrossAttnDownBlock3D): + module.forward = hack_CrossAttnDownBlock3D_forward.__get__(module, CrossAttnDownBlock3D) + elif isinstance(module, DownBlock3D): + module.forward = hacked_DownBlock3D_forward.__get__(module, DownBlock3D) + elif isinstance(module, CrossAttnUpBlock3D): + module.forward = hacked_CrossAttnUpBlock3D_forward.__get__(module, CrossAttnUpBlock3D) + elif isinstance(module, UpBlock3D): + module.forward = hacked_UpBlock3D_forward.__get__(module, UpBlock3D) + module.mean_bank = [] + module.var_bank = [] + module.gn_weight *= 2 + + gn_modules = None + torch.cuda.empty_cache() + + + def unload_controlnet_ref_only( + self, + reference_attn, + reference_adain, + ): + if reference_attn: + attn_modules = [module for module in torch_dfs(self.unet) if isinstance(module, BasicTransformerBlock)] + attn_modules = sorted(attn_modules, key=lambda x: -x.norm1.normalized_shape[0]) + + for i, module in enumerate(attn_modules): + module.forward = module._original_inner_forward + module.bank = [] + + attn_modules = None + torch.cuda.empty_cache() + + if reference_adain: + gn_modules = [self.unet.mid_block] + self.unet.mid_block.gn_weight = 0 + + down_blocks = self.unet.down_blocks + for w, module in enumerate(down_blocks): + module.gn_weight = 1.0 - float(w) / float(len(down_blocks)) + gn_modules.append(module) + + up_blocks = self.unet.up_blocks + for w, module in enumerate(up_blocks): + module.gn_weight = float(w) / float(len(up_blocks)) + gn_modules.append(module) + + for i, module in enumerate(gn_modules): + module.forward = module.original_forward + module.mean_bank = [] + module.var_bank = [] + module.gn_weight *= 2 + + gn_modules = None + torch.cuda.empty_cache() + + def get_img2img_timesteps(self, num_inference_steps, strength, device): + strength = min(1, max(0,strength)) + # get the original timestep using init_timestep + init_timestep = min(int(num_inference_steps * strength), num_inference_steps) + + t_start = max(num_inference_steps - init_timestep, 0) + timesteps = self.scheduler.timesteps[t_start * self.scheduler.order :] + + return timesteps, num_inference_steps - t_start + + @torch.no_grad() + def __call__( + self, + height: Optional[int] = None, + width: Optional[int] = None, + num_inference_steps: int = 50, + guidance_scale: float = 7.5, + unet_batch_size: int = 1, + negative_prompt: Optional[Union[str, List[str]]] = None, + video_length: Optional[int] = None, + num_videos_per_prompt: Optional[int] = 1, + eta: float = 0.0, + generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None, + latents: Optional[torch.FloatTensor] = None, + prompt_embeds: Optional[torch.FloatTensor] = None, + negative_prompt_embeds: Optional[torch.FloatTensor] = None, + output_type: Optional[str] = "tensor", + return_dict: bool = True, + callback: Optional[Callable[[int, torch.FloatTensor], None]] = None, + callback_steps: Optional[List[int]] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + context_frames: int = -1, + context_stride: int = 3, + context_overlap: int = 4, + context_schedule: str = "uniform", + clip_skip: int = 1, + controlnet_type_map: Dict[str, Dict[str,float]] = None, + controlnet_image_map: Dict[int, Dict[str,Any]] = None, + controlnet_ref_map: Dict[str, Any] = None, + controlnet_no_shrink:List[str]=None, + controlnet_max_samples_on_vram: int = 999, + controlnet_max_models_on_vram: int=99, + controlnet_is_loop: bool=True, + img2img_map: Dict[str, Any] = None, + ip_adapter_config_map: Dict[str,Any] = None, + region_list: List[Any] = None, + region_condi_list: List[Any] = None, + interpolation_factor = 1, + is_single_prompt_mode = False, + apply_lcm_lora= False, + gradual_latent_map=None, + **kwargs, + ): + import gc + + global C_REF_MODE + + gradual_latent = False + if gradual_latent_map: + gradual_latent = gradual_latent_map["enable"] + + logger.info(f"{apply_lcm_lora=}") + if apply_lcm_lora: + self.scheduler = LCMScheduler.from_config(self.scheduler.config) + + controlnet_image_map_org = controlnet_image_map + + controlnet_max_models_on_vram = max(controlnet_max_models_on_vram,0) + + # Default height and width to unet + height = height or self.unet.config.sample_size * self.vae_scale_factor + width = width or self.unet.config.sample_size * self.vae_scale_factor + + sequential_mode = video_length is not None and video_length > context_frames + + multi_uncond_mode = self.lora_map is not None + + controlnet_for_region = False + if controlnet_type_map: + for c in controlnet_type_map: + reg_list = controlnet_type_map[c]["control_region_list"] + if reg_list: + controlnet_for_region = True + break + + if controlnet_for_region or multi_uncond_mode: + controlnet_for_region = True + multi_uncond_mode = True + unet_batch_size = 1 + + logger.info(f"{controlnet_for_region=}") + logger.info(f"{multi_uncond_mode=}") + logger.info(f"{unet_batch_size=}") + + # 1. Check inputs. Raise error if not correct + self.check_inputs( + "dummy string", height, width, callback_steps, negative_prompt, prompt_embeds, negative_prompt_embeds + ) + + # Define call parameters + batch_size = 1 + + device = self._execution_device + latents_device = torch.device("cpu") if sequential_mode else device + + + if ip_adapter_config_map: + if self.ip_adapter is None: + img_enc_path = "data/models/ip_adapter/models/image_encoder/" + if ip_adapter_config_map["is_full_face"]: + self.ip_adapter = IPAdapterFull(self, img_enc_path, "data/models/ip_adapter/models/ip-adapter-full-face_sd15.bin", device, 257) + elif ip_adapter_config_map["is_light"]: + self.ip_adapter = IPAdapter(self, img_enc_path, "data/models/ip_adapter/models/ip-adapter_sd15_light.bin", device, 4) + elif ip_adapter_config_map["is_plus_face"]: + self.ip_adapter = IPAdapterPlus(self, img_enc_path, "data/models/ip_adapter/models/ip-adapter-plus-face_sd15.bin", device, 16) + elif ip_adapter_config_map["is_plus"]: + self.ip_adapter = IPAdapterPlus(self, img_enc_path, "data/models/ip_adapter/models/ip-adapter-plus_sd15.bin", device, 16) + else: + self.ip_adapter = IPAdapter(self, img_enc_path, "data/models/ip_adapter/models/ip-adapter_sd15.bin", device, 4) + self.ip_adapter.set_scale( ip_adapter_config_map["scale"] ) + + # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2) + # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1` + # corresponds to doing no classifier free guidance. + do_classifier_free_guidance = guidance_scale > 1.0 + + # 3. Encode input prompt + text_encoder_lora_scale = ( + cross_attention_kwargs.get("scale", None) if cross_attention_kwargs is not None else None + ) + + + prompt_encoder = PromptEncoder( + self, + device, + device,#latents_device, + num_videos_per_prompt, + do_classifier_free_guidance, + region_condi_list, + negative_prompt, + is_single_prompt_mode, + clip_skip, + multi_uncond_mode + ) + + if self.ip_adapter: + self.ip_adapter.delete_encoder() + + if controlnet_ref_map is not None: + if unet_batch_size < prompt_encoder.get_condi_size(): + raise ValueError(f"controlnet_ref is not available in this configuration. {unet_batch_size=} < {prompt_encoder.get_condi_size()}") + if multi_uncond_mode: + raise ValueError(f"controlnet_ref is not available in this configuration. {multi_uncond_mode=}") + + + logger.info(f"{prompt_encoder.get_condi_size()=}") + + + # 3.5 Prepare controlnet variables + + if self.controlnet_map: + for i, type_str in enumerate(self.controlnet_map): + if i < controlnet_max_models_on_vram: + self.controlnet_map[type_str].to(device=device, non_blocking=True) + + + + # controlnet_image_map + # { 0 : { "type_str" : IMAGE, "type_str2" : IMAGE } } + # { "type_str" : { 0 : IMAGE, 15 : IMAGE } } + controlnet_image_map= None + + if controlnet_image_map_org: + controlnet_image_map= {key: {} for key in controlnet_type_map} + for key_frame_no in controlnet_image_map_org: + for t, img in controlnet_image_map_org[key_frame_no].items(): + tmp = self.prepare_image( + image=img, + width=width, + height=height, + batch_size=1 * 1, + num_images_per_prompt=1, + #device=device, + device=latents_device, + dtype=self.controlnet_map[t].dtype, + do_classifier_free_guidance=False, + guess_mode=False, + ) + controlnet_image_map[t][key_frame_no] = torch.cat([tmp] * prompt_encoder.get_condi_size()) + + del controlnet_image_map_org + torch.cuda.empty_cache() + + # { "0_type_str" : { "scales" = [0.1, 0.3, 0.5, 1.0, 0.5, 0.3, 0.1], "frames"=[125, 126, 127, 0, 1, 2, 3] }} + controlnet_scale_map = {} + controlnet_affected_list = np.zeros(video_length,dtype = int) + + is_v2v = True + + if controlnet_image_map: + for type_str in controlnet_image_map: + for key_frame_no in controlnet_image_map[type_str]: + scale_list = controlnet_type_map[type_str]["control_scale_list"] + if len(scale_list) > 0: + is_v2v = False + scale_list = scale_list[0: context_frames] + scale_len = len(scale_list) + + if controlnet_is_loop: + frames = [ i%video_length for i in range(key_frame_no-scale_len, key_frame_no+scale_len+1)] + + controlnet_scale_map[str(key_frame_no) + "_" + type_str] = { + "scales" : scale_list[::-1] + [1.0] + scale_list, + "frames" : frames, + } + else: + frames = [ i for i in range(max(0, key_frame_no-scale_len), min(key_frame_no+scale_len+1, video_length))] + + controlnet_scale_map[str(key_frame_no) + "_" + type_str] = { + "scales" : scale_list[:key_frame_no][::-1] + [1.0] + scale_list[:video_length-key_frame_no-1], + "frames" : frames, + } + + controlnet_affected_list[frames] = 1 + + def controlnet_is_affected( frame_index:int): + return controlnet_affected_list[frame_index] + + def get_controlnet_scale( + type: str, + cur_step: int, + step_length: int, + ): + s = controlnet_type_map[type]["control_guidance_start"] + e = controlnet_type_map[type]["control_guidance_end"] + keep = 1.0 - float(cur_step / len(timesteps) < s or (cur_step + 1) / step_length > e) + + scale = controlnet_type_map[type]["controlnet_conditioning_scale"] + + return keep * scale + + def get_controlnet_variable( + type_str: str, + cur_step: int, + step_length: int, + target_frames: List[int], + ): + cont_vars = [] + + if not controlnet_image_map: + return None + + if type_str not in controlnet_image_map: + return None + + for fr, img in controlnet_image_map[type_str].items(): + + if fr in target_frames: + cont_vars.append( { + "frame_no" : fr, + "image" : img, + "cond_scale" : get_controlnet_scale(type_str, cur_step, step_length), + "guess_mode" : controlnet_type_map[type_str]["guess_mode"] + } ) + + return cont_vars + + # 3.9. Preprocess reference image + c_ref_enable = controlnet_ref_map is not None + + if c_ref_enable: + ref_image = controlnet_ref_map["ref_image"] + + ref_image = self.prepare_ref_image( + image=ref_image, + width=width, + height=height, + batch_size=1 * 1, + num_images_per_prompt=1, + device=device, + dtype=prompt_encoder.get_prompt_embeds_dtype(), + ) + + # 4. Prepare timesteps + self.scheduler.set_timesteps(num_inference_steps, device=latents_device) + if img2img_map: + timesteps, num_inference_steps = self.get_img2img_timesteps(num_inference_steps, img2img_map["denoising_strength"], latents_device) + latent_timestep = timesteps[:1].repeat(batch_size * 1) + else: + timesteps = self.scheduler.timesteps + latent_timestep = None + + is_strength_max = True + if img2img_map: + is_strength_max = img2img_map["denoising_strength"] == 1.0 + + # 5. Prepare latent variables + num_channels_latents = self.unet.config.in_channels + latents_outputs = self.prepare_latents( + batch_size * num_videos_per_prompt, + num_channels_latents, + video_length, + height, + width, + prompt_encoder.get_prompt_embeds_dtype(), + latents_device, # keep latents on cpu for sequential mode + generator, + img2img_map, + latent_timestep, + latents, + is_strength_max, + True, + True, + ) + + latents, noise, image_latents = latents_outputs + + del img2img_map + torch.cuda.empty_cache() + gc.collect() + + # 5.5 Prepare region mask + region_mask = RegionMask( + region_list, + batch_size, + num_channels_latents, + video_length, + height, + width, + self.vae_scale_factor, + prompt_encoder.get_prompt_embeds_dtype(), + latents_device, + multi_uncond_mode + ) + + torch.cuda.empty_cache() + + # 5.9. Prepare reference latent variables + if c_ref_enable: + ref_image_latents = self.prepare_ref_latents( + ref_image, + context_frames * 1, + prompt_encoder.get_prompt_embeds_dtype(), + device, + generator, + do_classifier_free_guidance=False, + ) + ref_image_latents = torch.cat([ref_image_latents] * prompt_encoder.get_condi_size()) + ref_image_latents = rearrange(ref_image_latents, "(b f) c h w -> b c f h w", f=context_frames) + + # 5.99. Modify self attention and group norm +# self.prepare_controlnet_ref_only( + self.prepare_controlnet_ref_only_without_motion( + ref_image_latents=ref_image_latents, + batch_size=context_frames, + num_images_per_prompt=1, + do_classifier_free_guidance=do_classifier_free_guidance, + attention_auto_machine_weight=controlnet_ref_map["attention_auto_machine_weight"], + gn_auto_machine_weight=controlnet_ref_map["gn_auto_machine_weight"], + style_fidelity=controlnet_ref_map["style_fidelity"], + reference_attn=controlnet_ref_map["reference_attn"], + reference_adain=controlnet_ref_map["reference_adain"], + _scale_pattern=controlnet_ref_map["scale_pattern"], + region_num = prompt_encoder.get_condi_size() + ) + + # 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline + extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta) + + # 6.5 - Infinite context loop shenanigans + context_scheduler = get_context_scheduler(context_schedule) + total_steps = get_total_steps( + context_scheduler, + timesteps, + num_inference_steps, + latents.shape[2], + context_frames, + context_stride, + context_overlap, + ) + + lat_height, lat_width = latents.shape[-2:] + + def gradual_latent_scale(progress): + if gradual_latent: + cur = 0.5 + for s in gradual_latent_map["scale"]: + v = gradual_latent_map["scale"][s] + if float(s) > progress: + return cur + cur = v + return cur + else: + return 1.0 + def gradual_latent_size(progress): + if gradual_latent: + current_ratio = gradual_latent_scale(progress) + h = int(lat_height * current_ratio) + w = int(lat_width * current_ratio) + return (h,w) + else: + return (lat_height, lat_width) + + def unsharp_mask(img): + imgf = img.float() + k = 0.05 # strength + kernel = torch.FloatTensor([[0, -k, 0], + [-k, 1+4*k, -k], + [0, -k, 0]]) + + conv_kernel = torch.eye(4)[..., None, None] * kernel[None, None, ...] + imgf = torch.nn.functional.conv2d(imgf, conv_kernel.to(img.device), padding=1) + return imgf.to(img.dtype) + + def resize_tensor(ten, size, do_unsharp_mask=False): + ten = rearrange(ten, "b c f h w -> (b f) c h w") + ten = torch.nn.functional.interpolate( + ten.float(), size=size, mode="bicubic", align_corners=False + ).to(ten.dtype) + if do_unsharp_mask: + ten = unsharp_mask(ten) + return rearrange(ten, "(b f) c h w -> b c f h w", f=video_length) + + if gradual_latent: + latents = resize_tensor(latents, gradual_latent_size(0)) + reverse_steps = gradual_latent_map["reverse_steps"] + noise_add_count = gradual_latent_map["noise_add_count"] + total_steps = ((total_steps/num_inference_steps) * (reverse_steps* (len(gradual_latent_map["scale"].keys()) - 1) )) + total_steps + total_steps = int(total_steps) + + prev_gradient_latent_size = gradual_latent_size(0) + + + shrink_controlnet = True + no_shrink_type = controlnet_no_shrink + + if controlnet_type_map: + for nt in no_shrink_type: + if nt in controlnet_type_map: + controlnet_type_map[nt] = controlnet_type_map.pop(nt) + + def need_region_blend(cur_step, total_steps): + if cur_step + 1 == total_steps: + return True + if multi_uncond_mode == False: + return True + return cur_step % 2 == 1 + + # 7. Denoising loop + num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order + with self.progress_bar(total=total_steps) as progress_bar: + i = 0 + real_i = 0 +# for i, t in enumerate(timesteps): + while i < len(timesteps): + t = timesteps[i] + stopwatch_start() + + cur_gradient_latent_size = gradual_latent_size((real_i+1) / len(timesteps)) + + if self.lcm: + self.lcm.apply(i, len(timesteps)) + + noise_pred = torch.zeros( + (prompt_encoder.get_condi_size(), *latents.shape[1:]), + device=latents.device, + dtype=latents.dtype, + ) + counter = torch.zeros( + (1, 1, latents.shape[2], 1, 1), device=latents.device, dtype=latents.dtype + ) + + # { "0_type_str" : (down_samples, mid_sample) } + controlnet_result={} + + def scale_5d_tensor(ten, h, w, f): + ten = rearrange(ten, "b c f h w -> (b f) c h w") + ten = torch.nn.functional.interpolate( + ten, size=(h, w), mode="bicubic", align_corners=False + ) + return rearrange(ten, "(b f) c h w -> b c f h w", f=f) + + def get_controlnet_result(context: List[int] = None, layer:int = -1): + #logger.info(f"get_controlnet_result called {context=}") + + if controlnet_image_map is None: + return None, None + + hit = False + for n in context: + if controlnet_is_affected(n): + hit=True + break + if hit == False: + return None, None + + + def is_control_layer(type_str, layer): + if layer == -1: + return True + region_list = controlnet_type_map[type_str]["control_region_list"] + if not region_list: + return True + r = region_mask.get_region_from_layer(layer, prompt_encoder.get_condi_size()) + if r == -1: + return False + return r in region_list + + + def to_device(sample, target_device): + down_samples = [ + v.to(device = target_device, non_blocking=True) if v.device != target_device else v + for v in sample[0] ] + mid_sample = sample[1].to(device = target_device, non_blocking=True) if sample[1].device != target_device else sample[1] + return (down_samples, mid_sample) + + _down_block_res_samples=[] + + first_down = list(list(controlnet_result.values())[0].values())[0][0] + first_mid = list(list(controlnet_result.values())[0].values())[0][1] + + shape0 = first_mid.shape[0] if layer == -1 else 1 + for ii in range(len(first_down)): + _down_block_res_samples.append( + torch.zeros( + (shape0, first_down[ii].shape[1], len(context) ,*first_down[ii].shape[3:]), + device=device, + dtype=first_down[ii].dtype, + )) + _mid_block_res_samples = torch.zeros( + (shape0, first_mid.shape[1], len(context) ,*first_mid.shape[3:]), + device=device, + dtype=first_mid.dtype, + ) + + + def merge_result(fr, type_str): + nonlocal _mid_block_res_samples, _down_block_res_samples + result = str(fr) + "_" + type_str + + val = controlnet_result[fr][type_str] + + if layer == -1: + cur_down = [ + v.to(device = device, dtype=first_down[0].dtype, non_blocking=True) if v.device != device else v + for v in val[0] + ] + cur_mid =val[1].to(device = device, dtype=first_mid.dtype, non_blocking=True) if val[1].device != device else val[1] + else: + cur_down = [ + v[layer].to(device = device, dtype=first_down[0].dtype, non_blocking=True) if v.device != device else v[layer] + for v in val[0] + ] + cur_mid =val[1][layer].to(device = device, dtype=first_mid.dtype, non_blocking=True) if val[1].device != device else val[1][layer] + + loc = list(set(context) & set(controlnet_scale_map[result]["frames"])) + scales = [] + + for o in loc: + for j, f in enumerate(controlnet_scale_map[result]["frames"]): + if o == f: + scales.append(controlnet_scale_map[result]["scales"][j]) + break + loc_index=[] + + for o in loc: + for j, f in enumerate( context ): + if o==f: + loc_index.append(j) + break + + mod = torch.tensor(scales).to(device, dtype=cur_mid.dtype) + + ''' + for ii in range(len(_down_block_res_samples)): + logger.info(f"{type_str=} / {cur_down[ii].shape=}") + logger.info(f"{type_str=} / {_down_block_res_samples[ii].shape=}") + logger.info(f"{type_str=} / {cur_mid.shape=}") + logger.info(f"{type_str=} / {_mid_block_res_samples.shape=}") + ''' + + add = cur_mid * mod[None,None,:,None,None] + _mid_block_res_samples[:, :, loc_index, :, :] = _mid_block_res_samples[:, :, loc_index, :, :] + add + + for ii in range(len(cur_down)): + add = cur_down[ii] * mod[None,None,:,None,None] + _down_block_res_samples[ii][:, :, loc_index, :, :] = _down_block_res_samples[ii][:, :, loc_index, :, :] + add + + + + + hit = False + + no_shrink_list = [] + + for fr in controlnet_result: + for type_str in controlnet_result[fr]: + if not is_control_layer(type_str, layer): + continue + + hit = True + + if shrink_controlnet and (type_str in no_shrink_type): + no_shrink_list.append(type_str) + continue + + merge_result(fr, type_str) + + cur_d_height, cur_d_width = _down_block_res_samples[0].shape[-2:] + cur_lat_height, cur_lat_width = latents.shape[-2:] + if cur_lat_height != cur_d_height: + #logger.info(f"{cur_lat_height=} / {cur_d_height=}") + for ii, rate in zip(range(len(_down_block_res_samples)), (1,1,1,2,2,2,4,4,4,8,8,8)): + new_h = (cur_lat_height + rate-1) // rate + new_w = (cur_lat_width + rate-1) // rate + #logger.info(f"b {_down_block_res_samples[ii].shape=}") + _down_block_res_samples[ii] = scale_5d_tensor(_down_block_res_samples[ii], new_h, new_w, context_frames) + #logger.info(f"a {_down_block_res_samples[ii].shape=}") + _mid_block_res_samples = scale_5d_tensor(_mid_block_res_samples, (cur_lat_height + rate - 1)// 8, (cur_lat_width + rate - 1)// 8, context_frames) + + + for fr in controlnet_result: + for type_str in controlnet_result[fr]: + if type_str not in no_shrink_list: + continue + merge_result(fr, type_str) + + + if not hit: + return None, None + + return _down_block_res_samples, _mid_block_res_samples + + def process_controlnet( target_frames: List[int] = None ): + #logger.info(f"process_controlnet called {target_frames=}") + nonlocal controlnet_result + + controlnet_samples_on_vram = 0 + + loc = list(set(target_frames) & set(controlnet_result.keys())) + + controlnet_result = {key: controlnet_result[key] for key in loc} + + target_frames = list(set(target_frames) - set(loc)) + #logger.info(f"-> {target_frames=}") + if len(target_frames) == 0: + return + + def sample_to_device( sample ): + nonlocal controlnet_samples_on_vram + + if controlnet_max_samples_on_vram <= controlnet_samples_on_vram: + if sample[0][0].device != torch.device("cpu"): + down_samples = [ v.to(device = torch.device("cpu"), non_blocking=True) for v in sample[0] ] + mid_sample = sample[1].to(device = torch.device("cpu"), non_blocking=True) + else: + down_samples = sample[0] + mid_sample = sample[1] + + else: + if sample[0][0].device != device: + down_samples = [ v.to(device = device, non_blocking=True) for v in sample[0] ] + mid_sample = sample[1].to(device = device, non_blocking=True) + else: + down_samples = sample[0] + mid_sample = sample[1] + + controlnet_samples_on_vram += 1 + return down_samples, mid_sample + + + for fr in controlnet_result: + for type_str in controlnet_result[fr]: + controlnet_result[fr][type_str] = sample_to_device(controlnet_result[fr][type_str]) + + for type_str in controlnet_type_map: + cont_vars = get_controlnet_variable(type_str, i, len(timesteps), target_frames) + if not cont_vars: + continue + + org_device = self.controlnet_map[type_str].device + if org_device != device: + self.controlnet_map[type_str] = self.controlnet_map[type_str].to(device=device, non_blocking=True) + + for cont_var in cont_vars: + frame_no = cont_var["frame_no"] + + if latents.shape[0] == 1: + latent_model_input = ( + latents[:, :, [frame_no]] + .to(device) + .repeat( prompt_encoder.get_condi_size(), 1, 1, 1, 1) + ) + else: + latent_model_input=[] + for s0_index in list(range(latents.shape[0])) + list(range(latents.shape[0])): + latent_model_input.append( latents[[s0_index], :, [frame_no]].to(device).unsqueeze(dim=2) ) + latent_model_input = torch.cat(latent_model_input) + + if shrink_controlnet and (type_str not in no_shrink_type): + cur_lat_height, cur_lat_width = latent_model_input.shape[-2:] + cur = min(cur_lat_height, cur_lat_width) + if cur > 64: # 512 / 8 = 64 + if cur_lat_height > cur_lat_width: + shr_lat_height = 64 * cur_lat_height / cur_lat_width + shr_lat_width = 64 + else: + shr_lat_height = 64 + shr_lat_width = 64 * cur_lat_width / cur_lat_height + shr_lat_height = int(shr_lat_height // 8 * 8) + shr_lat_width = int(shr_lat_width // 8 * 8) + #logger.info(f"b {latent_model_input.shape=}") + latent_model_input = scale_5d_tensor(latent_model_input, shr_lat_height, shr_lat_width, 1) + #logger.info(f"a {latent_model_input.shape=}") + + + control_model_input = self.scheduler.scale_model_input(latent_model_input, t)[:, :, 0] + controlnet_prompt_embeds = prompt_encoder.get_current_prompt_embeds([frame_no], latents.shape[2]) + + + if False: + controlnet_prompt_embeds = controlnet_prompt_embeds.to(device=device, non_blocking=True) + cont_var_img = cont_var["image"].to(device=device, non_blocking=True) + + __down_list=[] + __mid_list=[] + for layer_index in range(0, control_model_input.shape[0], unet_batch_size): + + __control_model_input = control_model_input[layer_index:layer_index+unet_batch_size] + __controlnet_prompt_embeds = controlnet_prompt_embeds[layer_index :(layer_index + unet_batch_size)] + __cont_var_img = cont_var_img[layer_index:layer_index+unet_batch_size] + + __down_samples, __mid_sample = self.controlnet_map[type_str]( + __control_model_input, + t, + encoder_hidden_states=__controlnet_prompt_embeds, + controlnet_cond=__cont_var_img, + conditioning_scale=cont_var["cond_scale"], + guess_mode=cont_var["guess_mode"], + return_dict=False, + ) + __down_list.append(__down_samples) + __mid_list.append(__mid_sample) + + down_samples=[] + for d_no in range(len(__down_list[0])): + down_samples.append( + torch.cat([ + v[d_no] for v in __down_list + ]) + ) + mid_sample = torch.cat(__mid_list) + + else: + cont_var_img = cont_var["image"].to(device=device) + + cur_lat_height, cur_lat_width = latent_model_input.shape[-2:] + cur_img_height, cur_img_width = cont_var_img.shape[-2:] + if (cur_lat_height*8 != cur_img_height) or (cur_lat_width*8 != cur_img_width): + cont_var_img = torch.nn.functional.interpolate( + cont_var_img.float(), size=(cur_lat_height*8, cur_lat_width*8), mode="bicubic", align_corners=False + ).to(cont_var_img.dtype) + + down_samples, mid_sample = self.controlnet_map[type_str]( + control_model_input, + t, + encoder_hidden_states=controlnet_prompt_embeds.to(device=device), + controlnet_cond=cont_var_img, + conditioning_scale=cont_var["cond_scale"], + guess_mode=cont_var["guess_mode"], + return_dict=False, + ) + + for ii in range(len(down_samples)): + down_samples[ii] = rearrange(down_samples[ii], "(b f) c h w -> b c f h w", f=1) + mid_sample = rearrange(mid_sample, "(b f) c h w -> b c f h w", f=1) + + if frame_no not in controlnet_result: + controlnet_result[frame_no] = {} + + ''' + for ii in range(len(down_samples)): + logger.info(f"{type_str=} / {down_samples[ii].shape=}") + logger.info(f"{type_str=} / {mid_sample.shape=}") + ''' + + controlnet_result[frame_no][type_str] = sample_to_device((down_samples, mid_sample)) + + if org_device != device: + self.controlnet_map[type_str] = self.controlnet_map[type_str].to(device=org_device, non_blocking=True) + + #logger.info(f"STEP start") + stopwatch_record("STEP start") + + for context in context_scheduler( + i, num_inference_steps, latents.shape[2], context_frames, context_stride, context_overlap + ): + + stopwatch_record("lora_map UNapply start") + if self.lora_map: + self.lora_map.unapply() + stopwatch_record("lora_map UNapply end") + + if controlnet_image_map: + if is_v2v: + controlnet_target = context + else: + controlnet_target = list(range(context[0]-context_frames, context[0])) + context + list(range(context[-1]+1, context[-1]+1+context_frames)) + controlnet_target = [f%video_length for f in controlnet_target] + controlnet_target = list(set(controlnet_target)) + + process_controlnet(controlnet_target) + + # expand the latents + if latents.shape[0] == 1: + latent_model_input = ( + latents[:, :, context] + .to(device) + .repeat(prompt_encoder.get_condi_size(), 1, 1, 1, 1) + ) + else: + latent_model_input=[] + for s0_index in list(range(latents.shape[0])) + list(range(latents.shape[0])): + latent_model_input.append( latents[s0_index:s0_index+1, :, context].to(device) ) + latent_model_input = torch.cat(latent_model_input) + + + latent_model_input = self.scheduler.scale_model_input(latent_model_input, t) + + cur_prompt = prompt_encoder.get_current_prompt_embeds(context, latents.shape[2]).to(device=device) + + if controlnet_for_region: + down_block_res_samples,mid_block_res_sample = (None,None) + else: + down_block_res_samples,mid_block_res_sample = get_controlnet_result(context) + + + if c_ref_enable: + # ref only part + ref_noise = randn_tensor( + ref_image_latents.shape, generator=generator, device=device, dtype=ref_image_latents.dtype + ) + + ref_xt = self.scheduler.add_noise( + ref_image_latents, + ref_noise, + t.reshape( + 1, + ), + ) + ref_xt = self.scheduler.scale_model_input(ref_xt, t) + + stopwatch_record("C_REF_MODE write start") + + C_REF_MODE = "write" + self.unet( + ref_xt, + t, + encoder_hidden_states=cur_prompt, + cross_attention_kwargs=cross_attention_kwargs, + return_dict=False, + ) + + stopwatch_record("C_REF_MODE write end") + + C_REF_MODE = "read" + + # predict the noise residual + + stopwatch_record("normal unet start") + + __pred = [] + + for layer_index in range(0, latent_model_input.shape[0], unet_batch_size): + + if self.lora_map: + self.lora_map.apply(layer_index, latent_model_input.shape[0], context[len(context)//2]) + + if controlnet_for_region: + __do,__mid = get_controlnet_result(context, layer_index) + else: + __do = [] + if down_block_res_samples is not None: + for do in down_block_res_samples: + __do.append(do[layer_index:layer_index+unet_batch_size]) + else: + __do = None + + __mid = None + if mid_block_res_sample is not None: + __mid = mid_block_res_sample[layer_index:layer_index+unet_batch_size] + + + __lat = latent_model_input[layer_index:layer_index+unet_batch_size] + __cur_prompt = cur_prompt[layer_index * context_frames:(layer_index + unet_batch_size)*context_frames] + + stopwatch_record("self.unet start") + pred_layer = self.unet( + __lat.to(self.unet.device, self.unet.dtype), + t, + encoder_hidden_states=__cur_prompt, + cross_attention_kwargs=cross_attention_kwargs, + down_block_additional_residuals=__do, + mid_block_additional_residual=__mid, + return_dict=False, + )[0] + stopwatch_record("self.unet end") + + wh = None + + if i < len(timesteps) * region_mask.get_crop_generation_rate(layer_index, latent_model_input.shape[0]): + wh, xy_list = region_mask.get_area(layer_index, latent_model_input.shape[0], context) + if wh: + a_w, a_h = wh + __lat_list = [] + for c_index, xy in enumerate( xy_list ): + a_x, a_y = xy + __lat_list.append( __lat[:,:,[c_index],a_y:a_y+a_h, a_x:a_x+a_w ] ) + + __lat = torch.cat(__lat_list, dim=2) + + if __do is not None: + __tmp_do = [] + for _d, rate in zip(__do, (1,1,1,2,2,2,4,4,4,8,8,8)): + _inner_do_list = [] + for c_index, xy in enumerate( xy_list ): + a_x, a_y = xy + _inner_do_list.append(_d[:,:,[c_index],(a_y + rate-1)//rate:((a_y+a_h)+ rate-1)//rate, (a_x+ rate-1)//rate:((a_x+a_w)+ rate-1)//rate ] ) + + __tmp_do.append( torch.cat(_inner_do_list, dim=2) ) + __do = __tmp_do + + if __mid is not None: + rate = 8 + _mid_list = [] + for c_index, xy in enumerate( xy_list ): + a_x, a_y = xy + _mid_list.append( __mid[:,:,[c_index],(a_y+ rate-1)//rate:((a_y+a_h)+ rate-1)//rate, (a_x+ rate-1)//rate:((a_x+a_w)+ rate-1)//rate ] ) + __mid = torch.cat(_mid_list, dim=2) + + stopwatch_record("crop self.unet start") + crop_pred_layer = self.unet( + __lat.to(self.unet.device, self.unet.dtype), + t, + encoder_hidden_states=__cur_prompt, + cross_attention_kwargs=cross_attention_kwargs, + down_block_additional_residuals=__do, + mid_block_additional_residual=__mid, + return_dict=False, + )[0] + stopwatch_record("crop self.unet end") + + if wh: + a_w, a_h = wh + for c_index, xy in enumerate( xy_list ): + a_x, a_y = xy + pred_layer[:,:,[c_index],a_y:a_y+a_h, a_x:a_x+a_w] = crop_pred_layer[:,:,[c_index],:,:] + + __pred.append( pred_layer ) + + __do = None + __mid = None + down_block_res_samples = None + mid_block_res_sample = None + + + pred = torch.cat(__pred) + + + stopwatch_record("normal unet end") + + pred = pred.to(dtype=latents.dtype, device=latents.device) + noise_pred[:, :, context] = noise_pred[:, :, context] + pred + counter[:, :, context] = counter[:, :, context] + 1 + progress_bar.update() + + # perform guidance + noise_size = prompt_encoder.get_condi_size() + if do_classifier_free_guidance: + noise_pred = (noise_pred / counter) + noise_list = list(noise_pred.chunk( noise_size )) + + if multi_uncond_mode: + uc_noise_list = noise_list[:len(noise_list)//2] + noise_list = noise_list[len(noise_list)//2:] + for n in range(len(noise_list)): + noise_list[n] = uc_noise_list[n] + guidance_scale * (noise_list[n] - uc_noise_list[n]) + else: + noise_pred_uncond = noise_list.pop(0) + for n in range(len(noise_list)): + noise_list[n] = noise_pred_uncond + guidance_scale * (noise_list[n] - noise_pred_uncond) + noise_size = len(noise_list) + noise_pred = torch.cat(noise_list) + + # call the callback, if provided + if (i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0)) and ( + callback is not None and (callback_steps is not None and i in callback_steps) + ): + denoised = latents - noise_pred + denoised = self.interpolate_latents(denoised, interpolation_factor, device) + video = torch.from_numpy(self.decode_latents(denoised)) + callback(i, video) + + if gradual_latent: + if prev_gradient_latent_size != cur_gradient_latent_size: + noise_pred = resize_tensor(noise_pred, cur_gradient_latent_size, True) + latents = resize_tensor(latents, cur_gradient_latent_size, True) + + + # compute the previous noisy sample x_t -> x_t-1 + latents = self.scheduler.step( + model_output=noise_pred, + timestep=t, + sample=latents, + **extra_step_kwargs, + return_dict=False, + )[0] + + if need_region_blend(i, len(timesteps)): + latents_list = latents.chunk( noise_size ) + + tmp_latent = torch.zeros( + latents_list[0].shape, device=latents.device, dtype=latents.dtype + ) + + for r_no in range(len(region_list)): + mask = region_mask.get_mask( r_no ) + if gradual_latent: + mask = resize_tensor(mask, cur_gradient_latent_size) + src = region_list[r_no]["src"] + if src == -1: + init_latents_proper = image_latents[:1] + + if i < len(timesteps) - 1: + noise_timestep = timesteps[i + 1] + init_latents_proper = self.scheduler.add_noise( + init_latents_proper, noise, torch.tensor([noise_timestep]) + ) + + if gradual_latent: + lat = resize_tensor(init_latents_proper, cur_gradient_latent_size) + else: + lat = init_latents_proper + + else: + lat = latents_list[src] + + tmp_latent = tmp_latent * (1-mask) + lat * mask + + latents = tmp_latent + + init_latents_proper = None + lat = None + latents_list = None + tmp_latent = None + + i+=1 + real_i = max(i, real_i) + if gradual_latent: + if prev_gradient_latent_size != cur_gradient_latent_size: + reverse = min(i, reverse_steps) + self.scheduler._step_index -= reverse + _noise = resize_tensor(noise, cur_gradient_latent_size) + for count in range(i, i+noise_add_count): + count = min(count,len(timesteps)-1) + latents = self.scheduler.add_noise( + latents, _noise, torch.tensor([timesteps[count]]) + ) + i -= reverse + torch.cuda.empty_cache() + gc.collect() + + prev_gradient_latent_size = cur_gradient_latent_size + + stopwatch_stop("LOOP end") + + controlnet_result = None + torch.cuda.empty_cache() + gc.collect() + + if c_ref_enable: + self.unload_controlnet_ref_only( + reference_attn=controlnet_ref_map["reference_attn"], + reference_adain=controlnet_ref_map["reference_adain"], + ) + + if self.ip_adapter: + show_gpu("before unload ip_adapter") + self.ip_adapter.unload() + self.ip_adapter = None + torch.cuda.empty_cache() + show_gpu("after unload ip_adapter") + + latents = self.interpolate_latents(latents,interpolation_factor, device) + + # Return latents if requested (this will never be a dict) + if not output_type == "latent": + video = self.decode_latents(latents) + else: + video = latents + + # Convert to tensor + if output_type == "tensor": + video = torch.from_numpy(video) + + # Offload last model to CPU + if hasattr(self, "final_offload_hook") and self.final_offload_hook is not None: + self.final_offload_hook.offload() + + if not return_dict: + return video + + return AnimationPipelineOutput(videos=video) + + def progress_bar(self, iterable=None, total=None): + if not hasattr(self, "_progress_bar_config"): + self._progress_bar_config = {} + elif not isinstance(self._progress_bar_config, dict): + raise ValueError( + f"`self._progress_bar_config` should be of type `dict`, but is {type(self._progress_bar_config)}." + ) + + if iterable is not None: + return tqdm(iterable, **self._progress_bar_config) + elif total is not None: + return tqdm(total=total, **self._progress_bar_config) + else: + raise ValueError("Either `total` or `iterable` has to be defined.") + + def freeze(self): + logger.debug("Freezing pipeline...") + _ = self.unet.eval() + self.unet = self.unet.requires_grad_(False) + self.unet.train = nop_train + + _ = self.text_encoder.eval() + self.text_encoder = self.text_encoder.requires_grad_(False) + self.text_encoder.train = nop_train + + _ = self.vae.eval() + self.vae = self.vae.requires_grad_(False) + self.vae.train = nop_train diff --git a/src/animatediff/pipelines/context.py b/src/animatediff/pipelines/context.py new file mode 100644 index 0000000000000000000000000000000000000000..f6569955c3fe36e05ba3086f9cfee5a7c50ec4db --- /dev/null +++ b/src/animatediff/pipelines/context.py @@ -0,0 +1,114 @@ +from typing import Callable, Optional + +import numpy as np + + +# Whatever this is, it's utterly cursed. +def ordered_halving(val): + bin_str = f"{val:064b}" + bin_flip = bin_str[::-1] + as_int = int(bin_flip, 2) + + return as_int / (1 << 64) + + +# I have absolutely no idea how this works and I don't like that. +def uniform( + step: int = ..., + num_steps: Optional[int] = None, + num_frames: int = ..., + context_size: Optional[int] = None, + context_stride: int = 3, + context_overlap: int = 4, + closed_loop: bool = True, +): + if num_frames <= context_size: + yield list(range(num_frames)) + return + + context_stride = min(context_stride, int(np.ceil(np.log2(num_frames / context_size))) + 1) + + for context_step in 1 << np.arange(context_stride): + pad = int(round(num_frames * ordered_halving(step))) + for j in range( + int(ordered_halving(step) * context_step) + pad, + num_frames + pad + (0 if closed_loop else -context_overlap), + (context_size * context_step - context_overlap), + ): + yield [e % num_frames for e in range(j, j + context_size * context_step, context_step)] + + +def shuffle( + step: int = ..., + num_steps: Optional[int] = None, + num_frames: int = ..., + context_size: Optional[int] = None, + context_stride: int = 3, + context_overlap: int = 4, + closed_loop: bool = True, +): + import random + c = list(range(num_frames)) + c = random.sample(c, len(c)) + + if len(c) % context_size: + c += c[0:context_size - len(c) % context_size] + + c = random.sample(c, len(c)) + + for i in range(0, len(c), context_size): + yield c[i:i+context_size] + + +def composite( + step: int = ..., + num_steps: Optional[int] = None, + num_frames: int = ..., + context_size: Optional[int] = None, + context_stride: int = 3, + context_overlap: int = 4, + closed_loop: bool = True, +): + if (step/num_steps) < 0.1: + return shuffle(step,num_steps,num_frames,context_size,context_stride,context_overlap,closed_loop) + else: + return uniform(step,num_steps,num_frames,context_size,context_stride,context_overlap,closed_loop) + + +def get_context_scheduler(name: str) -> Callable: + match name: + case "uniform": + return uniform + case "shuffle": + return shuffle + case "composite": + return composite + case _: + raise ValueError(f"Unknown context_overlap policy {name}") + + +def get_total_steps( + scheduler, + timesteps: list[int], + num_steps: Optional[int] = None, + num_frames: int = ..., + context_size: Optional[int] = None, + context_stride: int = 3, + context_overlap: int = 4, + closed_loop: bool = True, +): + return sum( + len( + list( + scheduler( + i, + num_steps, + num_frames, + context_size, + context_stride, + context_overlap, + ) + ) + ) + for i in range(len(timesteps)) + ) diff --git a/src/animatediff/pipelines/lora.py b/src/animatediff/pipelines/lora.py new file mode 100644 index 0000000000000000000000000000000000000000..3df57d475379df5edaa862fed2b50aa4700062f9 --- /dev/null +++ b/src/animatediff/pipelines/lora.py @@ -0,0 +1,243 @@ +import logging + +from safetensors.torch import load_file + +from animatediff import get_dir +from animatediff.utils.lora_diffusers import (LoRANetwork, + create_network_from_weights) + +logger = logging.getLogger(__name__) + +data_dir = get_dir("data") + + +def merge_safetensors_lora(text_encoder, unet, lora_path, alpha=0.75, is_animatediff=True): + + def dump(loaded): + for a in loaded: + logger.info(f"{a} {loaded[a].shape}") + + sd = load_file(lora_path) + + if False: + dump(sd) + + print(f"create LoRA network") + lora_network: LoRANetwork = create_network_from_weights(text_encoder, unet, sd, multiplier=alpha, is_animatediff=is_animatediff) + print(f"load LoRA network weights") + lora_network.load_state_dict(sd, False) + lora_network.merge_to(alpha) + +def load_lora_map(pipe, lora_map_config, video_length, is_sdxl=False): + new_map = {} + for item in lora_map_config: + lora_path = data_dir.joinpath(item) + if type(lora_map_config[item]) in (float,int): + te_en = [pipe.text_encoder, pipe.text_encoder_2] if is_sdxl else pipe.text_encoder + merge_safetensors_lora(te_en, pipe.unet, lora_path, lora_map_config[item], not is_sdxl) + else: + new_map[lora_path] = lora_map_config[item] + + lora_map = LoraMap(pipe, new_map, video_length, is_sdxl) + pipe.lora_map = lora_map if lora_map.is_valid else None + +def load_lcm_lora(pipe, lcm_map, is_sdxl=False, is_merge=False): + if is_sdxl: + lora_path = data_dir.joinpath("models/lcm_lora/sdxl/pytorch_lora_weights.safetensors") + else: + lora_path = data_dir.joinpath("models/lcm_lora/sd15/pytorch_lora_weights.safetensors") + logger.info(f"{lora_path=}") + + if is_merge: + te_en = [pipe.text_encoder, pipe.text_encoder_2] if is_sdxl else pipe.text_encoder + merge_safetensors_lora(te_en, pipe.unet, lora_path, 1.0, not is_sdxl) + pipe.lcm = None + return + + lcm = LcmLora(pipe, is_sdxl, lora_path, lcm_map) + pipe.lcm = lcm if lcm.is_valid else None + +class LcmLora: + def __init__( + self, + pipe, + is_sdxl, + lora_path, + lcm_map + ): + self.is_valid = False + + sd = load_file(lora_path) + if not sd: + return + + te_en = [pipe.text_encoder, pipe.text_encoder_2] if is_sdxl else pipe.text_encoder + lora_network: LoRANetwork = create_network_from_weights(te_en, pipe.unet, sd, multiplier=1.0, is_animatediff=not is_sdxl) + lora_network.load_state_dict(sd, False) + lora_network.apply_to(1.0) + self.network = lora_network + + self.is_valid = True + + self.start_scale = lcm_map["start_scale"] + self.end_scale = lcm_map["end_scale"] + self.gradient_start = lcm_map["gradient_start"] + self.gradient_end = lcm_map["gradient_end"] + + + def to( + self, + device, + dtype, + ): + self.network.to(device=device, dtype=dtype) + + def apply( + self, + step, + total_steps, + ): + step += 1 + progress = step / total_steps + + if progress < self.gradient_start: + scale = self.start_scale + elif progress > self.gradient_end: + scale = self.end_scale + else: + if (self.gradient_end - self.gradient_start) < 1e-4: + progress = 0 + else: + progress = (progress - self.gradient_start) / (self.gradient_end - self.gradient_start) + scale = (self.end_scale - self.start_scale) * progress + scale += self.start_scale + + self.network.active( scale ) + + def unapply( + self, + ): + self.network.deactive( ) + + + +class LoraMap: + def __init__( + self, + pipe, + lora_map, + video_length, + is_sdxl, + ): + self.networks = [] + + def create_schedule(scales, length): + scales = { int(i):scales[i] for i in scales } + keys = sorted(scales.keys()) + + if len(keys) == 1: + return { i:scales[keys[0]] for i in range(length) } + keys = keys + [keys[0]] + + schedule={} + + def calc(rate,start_v,end_v): + return start_v + (rate * rate)*(end_v - start_v) + + for key_prev,key_next in zip(keys[:-1],keys[1:]): + v1 = scales[key_prev] + v2 = scales[key_next] + if key_prev > key_next: + key_next += length + for i in range(key_prev,key_next): + dist = i-key_prev + if i >= length: + i -= length + schedule[i] = calc( dist/(key_next-key_prev), v1, v2 ) + return schedule + + for lora_path in lora_map: + sd = load_file(lora_path) + if not sd: + continue + te_en = [pipe.text_encoder, pipe.text_encoder_2] if is_sdxl else pipe.text_encoder + lora_network: LoRANetwork = create_network_from_weights(te_en, pipe.unet, sd, multiplier=0.75, is_animatediff=not is_sdxl) + lora_network.load_state_dict(sd, False) + lora_network.apply_to(0.75) + + self.networks.append( + { + "network":lora_network, + "region":lora_map[lora_path]["region"], + "schedule": create_schedule(lora_map[lora_path]["scale"], video_length ) + } + ) + + def region_convert(i): + if i == "background": + return 0 + else: + return int(i) + 1 + + for net in self.networks: + net["region"] = [ region_convert(i) for i in net["region"] ] + +# for n in self.networks: +# logger.info(f"{n['region']=}") +# logger.info(f"{n['schedule']=}") + + if self.networks: + self.is_valid = True + else: + self.is_valid = False + + def to( + self, + device, + dtype, + ): + for net in self.networks: + net["network"].to(device=device, dtype=dtype) + + def apply( + self, + cond_index, + cond_nums, + frame_no, + ): + ''' + neg 0 (bg) + neg 1 + neg 2 + pos 0 (bg) + pos 1 + pos 2 + ''' + + region_index = cond_index if cond_index < cond_nums//2 else cond_index - cond_nums//2 +# logger.info(f"{cond_index=}") +# logger.info(f"{cond_nums=}") +# logger.info(f"{region_index=}") + + + for i,net in enumerate(self.networks): + if region_index in net["region"]: + scale = net["schedule"][frame_no] + if scale > 0: + net["network"].active( scale ) +# logger.info(f"{i=} active {scale=}") + else: + net["network"].deactive( ) +# logger.info(f"{i=} DEactive") + + else: + net["network"].deactive( ) + # logger.info(f"{i=} DEactive") + + def unapply( + self, + ): + + for net in self.networks: + net["network"].deactive( ) + diff --git a/src/animatediff/pipelines/pipeline_controlnet_img2img_reference.py b/src/animatediff/pipelines/pipeline_controlnet_img2img_reference.py new file mode 100644 index 0000000000000000000000000000000000000000..e3227633ebf7d1ffd585c57f16a8e45cb61c1fd7 --- /dev/null +++ b/src/animatediff/pipelines/pipeline_controlnet_img2img_reference.py @@ -0,0 +1,1595 @@ +# https://github.com/huggingface/diffusers/blob/e831749e11f9b66de36cbbadf5820b9eb8f16ea8/src/diffusers/pipelines/controlnet/pipeline_controlnet_img2img_reference.py + +# Copyright 2023 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import inspect +import warnings +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +import numpy as np +import PIL.Image +import torch +import torch.nn.functional as F +from diffusers.image_processor import VaeImageProcessor +from diffusers.loaders import LoraLoaderMixin, TextualInversionLoaderMixin +from diffusers.models import (AutoencoderKL, ControlNetModel, + UNet2DConditionModel) +from diffusers.models.attention import BasicTransformerBlock +from diffusers.models.unet_2d_blocks import (CrossAttnDownBlock2D, + CrossAttnUpBlock2D, DownBlock2D, + UpBlock2D) +from diffusers.pipelines.controlnet.multicontrolnet import MultiControlNetModel +from diffusers.pipelines.pipeline_utils import DiffusionPipeline +from diffusers.pipelines.stable_diffusion import StableDiffusionPipelineOutput +from diffusers.pipelines.stable_diffusion.safety_checker import \ + StableDiffusionSafetyChecker +from diffusers.schedulers import KarrasDiffusionSchedulers +from diffusers.utils import (deprecate, is_accelerate_available, + is_accelerate_version, logging, + replace_example_docstring) +from diffusers.utils.torch_utils import is_compiled_module, randn_tensor +from transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +EXAMPLE_DOC_STRING = """ + Examples: + ```py + >>> import sys + >>> import cv2 + >>> import torch + >>> import numpy as np + >>> from PIL import Image + >>> from diffusers import EulerAncestralDiscreteScheduler, ControlNetModel, StableDiffusionControlNetImg2ImgReferencePipeline + >>> from diffusers.utils import load_image + + >>> input_image = load_image("https://hf.co/datasets/huggingface/documentation-images/resolve/main/diffusers/input_image_vermeer.png") + + >>> # get canny image + >>> image = cv2.Canny(np.array(input_image), 100, 200) + >>> image = image[:, :, None] + >>> image = np.concatenate([image, image, image], axis=2) + >>> canny_image = Image.fromarray(image) + + >>> controlnet = [] + >>> controlnet.append(ControlNetModel.from_pretrained("lllyasviel/sd-controlnet-canny", torch_dtype=torch.float16)) + >>> model = "runwayml/stable-diffusion-v1-5" + >>> pipe = StableDiffusionControlNetImg2ImgReferencePipeline.from_pretrained( + >>> model, + >>> controlnet=controlnet, + >>> safety_checker=None, + >>> torch_dtype=torch.float16 + >>> ).to('cuda:0') + + >>> pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config) + >>> pipe.enable_xformers_memory_efficient_attention() + >>> pipe.enable_model_cpu_offload() + + >>> result_img = pipe( + >>> prompt="oil painting", + >>> num_inference_steps=20, + >>> image=input_image, + >>> strength=0.8, + >>> control_image=[canny_image], + >>> controlnet_conditioning_scale = [0.01], + >>> ref_image=input_image, + >>> attention_auto_machine_weight = 0.3, + >>> gn_auto_machine_weight = 0.3, + >>> style_fidelity = 1, + >>> reference_attn=True, + >>> reference_adain=True + >>> ).images[0] + + >>> result_img.show() + ``` +""" + + +def prepare_image(image): + if isinstance(image, torch.Tensor): + # Batch single image + if image.ndim == 3: + image = image.unsqueeze(0) + + image = image.to(dtype=torch.float32) + else: + # preprocess image + if isinstance(image, (PIL.Image.Image, np.ndarray)): + image = [image] + + if isinstance(image, list) and isinstance(image[0], PIL.Image.Image): + image = [np.array(i.convert("RGB"))[None, :] for i in image] + image = np.concatenate(image, axis=0) + elif isinstance(image, list) and isinstance(image[0], np.ndarray): + image = np.concatenate([i[None, :] for i in image], axis=0) + + image = image.transpose(0, 3, 1, 2) + image = torch.from_numpy(image).to(dtype=torch.float32) / 127.5 - 1.0 + + return image + +def torch_dfs(model: torch.nn.Module): + result = [model] + for child in model.children(): + result += torch_dfs(child) + return result + +class StableDiffusionControlNetImg2ImgReferencePipeline(DiffusionPipeline, TextualInversionLoaderMixin, LoraLoaderMixin): + r""" + Pipeline for text-to-image generation using Stable Diffusion with ControlNet guidance. + + This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the + library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.) + + In addition the pipeline inherits the following loading methods: + - *Textual-Inversion*: [`loaders.TextualInversionLoaderMixin.load_textual_inversion`] + + Args: + vae ([`AutoencoderKL`]): + Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations. + text_encoder ([`CLIPTextModel`]): + Frozen text-encoder. Stable Diffusion uses the text portion of + [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically + the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant. + tokenizer (`CLIPTokenizer`): + Tokenizer of class + [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer). + unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents. + controlnet ([`ControlNetModel`] or `List[ControlNetModel]`): + Provides additional conditioning to the unet during the denoising process. If you set multiple ControlNets + as a list, the outputs from each ControlNet are added together to create one combined additional + conditioning. + scheduler ([`SchedulerMixin`]): + A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of + [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`]. + safety_checker ([`StableDiffusionSafetyChecker`]): + Classification module that estimates whether generated images could be considered offensive or harmful. + Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details. + feature_extractor ([`CLIPImageProcessor`]): + Model that extracts features from generated images to be used as inputs for the `safety_checker`. + """ + _optional_components = ["safety_checker", "feature_extractor"] + + def __init__( + self, + vae: AutoencoderKL, + text_encoder: CLIPTextModel, + tokenizer: CLIPTokenizer, + unet: UNet2DConditionModel, + controlnet: Union[ControlNetModel, List[ControlNetModel], Tuple[ControlNetModel], MultiControlNetModel], + scheduler: KarrasDiffusionSchedulers, + safety_checker: StableDiffusionSafetyChecker, + feature_extractor: CLIPImageProcessor, + requires_safety_checker: bool = True, + ): + super().__init__() + + if safety_checker is None and requires_safety_checker: + logger.warning( + f"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure" + " that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered" + " results in services or applications open to the public. Both the diffusers team and Hugging Face" + " strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling" + " it only for use-cases that involve analyzing network behavior or auditing its results. For more" + " information, please have a look at https://github.com/huggingface/diffusers/pull/254 ." + ) + + if safety_checker is not None and feature_extractor is None: + raise ValueError( + "Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety" + " checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead." + ) + + if isinstance(controlnet, (list, tuple)): + controlnet = MultiControlNetModel(controlnet) + + self.register_modules( + vae=vae, + text_encoder=text_encoder, + tokenizer=tokenizer, + unet=unet, + controlnet=controlnet, + scheduler=scheduler, + safety_checker=safety_checker, + feature_extractor=feature_extractor, + ) + self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1) + self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor, do_convert_rgb=True) + self.control_image_processor = VaeImageProcessor( + vae_scale_factor=self.vae_scale_factor, do_convert_rgb=True, do_normalize=False + ) + self.register_to_config(requires_safety_checker=requires_safety_checker) + + def prepare_ref_latents(self, refimage, batch_size, dtype, device, generator, do_classifier_free_guidance): + refimage = refimage.to(device=device, dtype=dtype) + + # encode the mask image into latents space so we can concatenate it to the latents + if isinstance(generator, list): + ref_image_latents = [ + self.vae.encode(refimage[i : i + 1]).latent_dist.sample(generator=generator[i]) + for i in range(batch_size) + ] + ref_image_latents = torch.cat(ref_image_latents, dim=0) + else: + ref_image_latents = self.vae.encode(refimage).latent_dist.sample(generator=generator) + ref_image_latents = self.vae.config.scaling_factor * ref_image_latents + + # duplicate mask and ref_image_latents for each generation per prompt, using mps friendly method + if ref_image_latents.shape[0] < batch_size: + if not batch_size % ref_image_latents.shape[0] == 0: + raise ValueError( + "The passed images and the required batch size don't match. Images are supposed to be duplicated" + f" to a total batch size of {batch_size}, but {ref_image_latents.shape[0]} images were passed." + " Make sure the number of images that you pass is divisible by the total requested batch size." + ) + ref_image_latents = ref_image_latents.repeat(batch_size // ref_image_latents.shape[0], 1, 1, 1) + + ref_image_latents = torch.cat([ref_image_latents] * 2) if do_classifier_free_guidance else ref_image_latents + + # aligning device to prevent device errors when concating it with the latent model input + ref_image_latents = ref_image_latents.to(device=device, dtype=dtype) + return ref_image_latents + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_slicing + def enable_vae_slicing(self): + r""" + Enable sliced VAE decoding. + + When this option is enabled, the VAE will split the input tensor in slices to compute decoding in several + steps. This is useful to save some memory and allow larger batch sizes. + """ + self.vae.enable_slicing() + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_slicing + def disable_vae_slicing(self): + r""" + Disable sliced VAE decoding. If `enable_vae_slicing` was previously invoked, this method will go back to + computing decoding in one step. + """ + self.vae.disable_slicing() + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_tiling + def enable_vae_tiling(self): + r""" + Enable tiled VAE decoding. + + When this option is enabled, the VAE will split the input tensor into tiles to compute decoding and encoding in + several steps. This is useful to save a large amount of memory and to allow the processing of larger images. + """ + self.vae.enable_tiling() + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_tiling + def disable_vae_tiling(self): + r""" + Disable tiled VAE decoding. If `enable_vae_tiling` was previously invoked, this method will go back to + computing decoding in one step. + """ + self.vae.disable_tiling() + + def enable_sequential_cpu_offload(self, gpu_id=0): + r""" + Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet, + text_encoder, vae, controlnet, and safety checker have their state dicts saved to CPU and then are moved to a + `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called. + Note that offloading happens on a submodule basis. Memory savings are higher than with + `enable_model_cpu_offload`, but performance is lower. + """ + if is_accelerate_available(): + from accelerate import cpu_offload + else: + raise ImportError("Please install accelerate via `pip install accelerate`") + + device = torch.device(f"cuda:{gpu_id}") + + for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae, self.controlnet]: + cpu_offload(cpu_offloaded_model, device) + + if self.safety_checker is not None: + cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True) + + def enable_model_cpu_offload(self, gpu_id=0): + r""" + Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared + to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward` + method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with + `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`. + """ + if is_accelerate_available() and is_accelerate_version(">=", "0.17.0.dev0"): + from accelerate import cpu_offload_with_hook + else: + raise ImportError("`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.") + + device = torch.device(f"cuda:{gpu_id}") + + hook = None + for cpu_offloaded_model in [self.text_encoder, self.unet, self.vae]: + _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook) + + if self.safety_checker is not None: + # the safety checker can offload the vae again + _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook) + + # control net hook has be manually offloaded as it alternates with unet + cpu_offload_with_hook(self.controlnet, device) + + # We'll offload the last model manually. + self.final_offload_hook = hook + + @property + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device + def _execution_device(self): + r""" + Returns the device on which the pipeline's models will be executed. After calling + `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module + hooks. + """ + if not hasattr(self.unet, "_hf_hook"): + return self.device + for module in self.unet.modules(): + if ( + hasattr(module, "_hf_hook") + and hasattr(module._hf_hook, "execution_device") + and module._hf_hook.execution_device is not None + ): + return torch.device(module._hf_hook.execution_device) + return self.device + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt + def _encode_prompt( + self, + prompt, + device, + num_images_per_prompt, + do_classifier_free_guidance, + negative_prompt=None, + prompt_embeds: Optional[torch.FloatTensor] = None, + negative_prompt_embeds: Optional[torch.FloatTensor] = None, + lora_scale: Optional[float] = None, + ): + r""" + Encodes the prompt into text encoder hidden states. + + Args: + prompt (`str` or `List[str]`, *optional*): + prompt to be encoded + device: (`torch.device`): + torch device + num_images_per_prompt (`int`): + number of images that should be generated per prompt + do_classifier_free_guidance (`bool`): + whether to use classifier free guidance or not + negative_prompt (`str` or `List[str]`, *optional*): + The prompt or prompts not to guide the image generation. If not defined, one has to pass + `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is + less than `1`). + prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not + provided, text embeddings will be generated from `prompt` input argument. + negative_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt + weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input + argument. + lora_scale (`float`, *optional*): + A lora scale that will be applied to all LoRA layers of the text encoder if LoRA layers are loaded. + """ + # set lora scale so that monkey patched LoRA + # function of text encoder can correctly access it + if lora_scale is not None and isinstance(self, LoraLoaderMixin): + self._lora_scale = lora_scale + + if prompt is not None and isinstance(prompt, str): + batch_size = 1 + elif prompt is not None and isinstance(prompt, list): + batch_size = len(prompt) + else: + batch_size = prompt_embeds.shape[0] + + if prompt_embeds is None: + # textual inversion: procecss multi-vector tokens if necessary + if isinstance(self, TextualInversionLoaderMixin): + prompt = self.maybe_convert_prompt(prompt, self.tokenizer) + + text_inputs = self.tokenizer( + prompt, + padding="max_length", + max_length=self.tokenizer.model_max_length, + truncation=True, + return_tensors="pt", + ) + text_input_ids = text_inputs.input_ids + untruncated_ids = self.tokenizer(prompt, padding="longest", return_tensors="pt").input_ids + + if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal( + text_input_ids, untruncated_ids + ): + removed_text = self.tokenizer.batch_decode( + untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1] + ) + logger.warning( + "The following part of your input was truncated because CLIP can only handle sequences up to" + f" {self.tokenizer.model_max_length} tokens: {removed_text}" + ) + + if hasattr(self.text_encoder.config, "use_attention_mask") and self.text_encoder.config.use_attention_mask: + attention_mask = text_inputs.attention_mask.to(device) + else: + attention_mask = None + + prompt_embeds = self.text_encoder( + text_input_ids.to(device), + attention_mask=attention_mask, + ) + prompt_embeds = prompt_embeds[0] + + prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device) + + bs_embed, seq_len, _ = prompt_embeds.shape + # duplicate text embeddings for each generation per prompt, using mps friendly method + prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1) + prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1) + + # get unconditional embeddings for classifier free guidance + if do_classifier_free_guidance and negative_prompt_embeds is None: + uncond_tokens: List[str] + if negative_prompt is None: + uncond_tokens = [""] * batch_size + elif prompt is not None and type(prompt) is not type(negative_prompt): + raise TypeError( + f"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=" + f" {type(prompt)}." + ) + elif isinstance(negative_prompt, str): + uncond_tokens = [negative_prompt] + elif batch_size != len(negative_prompt): + raise ValueError( + f"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:" + f" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches" + " the batch size of `prompt`." + ) + else: + uncond_tokens = negative_prompt + + # textual inversion: procecss multi-vector tokens if necessary + if isinstance(self, TextualInversionLoaderMixin): + uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer) + + max_length = prompt_embeds.shape[1] + uncond_input = self.tokenizer( + uncond_tokens, + padding="max_length", + max_length=max_length, + truncation=True, + return_tensors="pt", + ) + + if hasattr(self.text_encoder.config, "use_attention_mask") and self.text_encoder.config.use_attention_mask: + attention_mask = uncond_input.attention_mask.to(device) + else: + attention_mask = None + + negative_prompt_embeds = self.text_encoder( + uncond_input.input_ids.to(device), + attention_mask=attention_mask, + ) + negative_prompt_embeds = negative_prompt_embeds[0] + + if do_classifier_free_guidance: + # duplicate unconditional embeddings for each generation per prompt, using mps friendly method + seq_len = negative_prompt_embeds.shape[1] + + negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device) + + negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1) + negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1) + + # For classifier free guidance, we need to do two forward passes. + # Here we concatenate the unconditional and text embeddings into a single batch + # to avoid doing two forward passes + prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds]) + + return prompt_embeds + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.run_safety_checker + def run_safety_checker(self, image, device, dtype): + if self.safety_checker is None: + has_nsfw_concept = None + else: + if torch.is_tensor(image): + feature_extractor_input = self.image_processor.postprocess(image, output_type="pil") + else: + feature_extractor_input = self.image_processor.numpy_to_pil(image) + safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors="pt").to(device) + image, has_nsfw_concept = self.safety_checker( + images=image, clip_input=safety_checker_input.pixel_values.to(dtype) + ) + return image, has_nsfw_concept + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents + def decode_latents(self, latents): + warnings.warn( + "The decode_latents method is deprecated and will be removed in a future version. Please" + " use VaeImageProcessor instead", + FutureWarning, + ) + latents = 1 / self.vae.config.scaling_factor * latents + image = self.vae.decode(latents, return_dict=False)[0] + image = (image / 2 + 0.5).clamp(0, 1) + # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16 + image = image.cpu().permute(0, 2, 3, 1).float().numpy() + return image + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs + def prepare_extra_step_kwargs(self, generator, eta): + # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature + # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers. + # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502 + # and should be between [0, 1] + + accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys()) + extra_step_kwargs = {} + if accepts_eta: + extra_step_kwargs["eta"] = eta + + # check if the scheduler accepts generator + accepts_generator = "generator" in set(inspect.signature(self.scheduler.step).parameters.keys()) + if accepts_generator: + extra_step_kwargs["generator"] = generator + return extra_step_kwargs + + def check_inputs( + self, + prompt, + image, + callback_steps, + negative_prompt=None, + prompt_embeds=None, + negative_prompt_embeds=None, + controlnet_conditioning_scale=1.0, + ): + if (callback_steps is None) or ( + callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0) + ): + raise ValueError( + f"`callback_steps` has to be a positive integer but is {callback_steps} of type" + f" {type(callback_steps)}." + ) + + if prompt is not None and prompt_embeds is not None: + raise ValueError( + f"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to" + " only forward one of the two." + ) + elif prompt is None and prompt_embeds is None: + raise ValueError( + "Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined." + ) + elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)): + raise ValueError(f"`prompt` has to be of type `str` or `list` but is {type(prompt)}") + + if negative_prompt is not None and negative_prompt_embeds is not None: + raise ValueError( + f"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:" + f" {negative_prompt_embeds}. Please make sure to only forward one of the two." + ) + + if prompt_embeds is not None and negative_prompt_embeds is not None: + if prompt_embeds.shape != negative_prompt_embeds.shape: + raise ValueError( + "`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but" + f" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`" + f" {negative_prompt_embeds.shape}." + ) + + # `prompt` needs more sophisticated handling when there are multiple + # conditionings. + if isinstance(self.controlnet, MultiControlNetModel): + if isinstance(prompt, list): + logger.warning( + f"You have {len(self.controlnet.nets)} ControlNets and you have passed {len(prompt)}" + " prompts. The conditionings will be fixed across the prompts." + ) + + # Check `image` + is_compiled = hasattr(F, "scaled_dot_product_attention") and isinstance( + self.controlnet, torch._dynamo.eval_frame.OptimizedModule + ) + if ( + isinstance(self.controlnet, ControlNetModel) + or is_compiled + and isinstance(self.controlnet._orig_mod, ControlNetModel) + ): + self.check_image(image, prompt, prompt_embeds) + elif ( + isinstance(self.controlnet, MultiControlNetModel) + or is_compiled + and isinstance(self.controlnet._orig_mod, MultiControlNetModel) + ): + if not isinstance(image, list): + raise TypeError("For multiple controlnets: `image` must be type `list`") + + # When `image` is a nested list: + # (e.g. [[canny_image_1, pose_image_1], [canny_image_2, pose_image_2]]) + elif any(isinstance(i, list) for i in image): + raise ValueError("A single batch of multiple conditionings are supported at the moment.") + elif len(image) != len(self.controlnet.nets): + raise ValueError( + f"For multiple controlnets: `image` must have the same length as the number of controlnets, but got {len(image)} images and {len(self.controlnet.nets)} ControlNets." + ) + + for image_ in image: + self.check_image(image_, prompt, prompt_embeds) + else: + if self.controlnet == None: + return + assert False + + # Check `controlnet_conditioning_scale` + if ( + isinstance(self.controlnet, ControlNetModel) + or is_compiled + and isinstance(self.controlnet._orig_mod, ControlNetModel) + ): + if not isinstance(controlnet_conditioning_scale, float): + raise TypeError("For single controlnet: `controlnet_conditioning_scale` must be type `float`.") + elif ( + isinstance(self.controlnet, MultiControlNetModel) + or is_compiled + and isinstance(self.controlnet._orig_mod, MultiControlNetModel) + ): + if isinstance(controlnet_conditioning_scale, list): + if any(isinstance(i, list) for i in controlnet_conditioning_scale): + raise ValueError("A single batch of multiple conditionings are supported at the moment.") + elif isinstance(controlnet_conditioning_scale, list) and len(controlnet_conditioning_scale) != len( + self.controlnet.nets + ): + raise ValueError( + "For multiple controlnets: When `controlnet_conditioning_scale` is specified as `list`, it must have" + " the same length as the number of controlnets" + ) + else: + assert False + + # Copied from diffusers.pipelines.controlnet.pipeline_controlnet.StableDiffusionControlNetPipeline.check_image + def check_image(self, image, prompt, prompt_embeds): + image_is_pil = isinstance(image, PIL.Image.Image) + image_is_tensor = isinstance(image, torch.Tensor) + image_is_np = isinstance(image, np.ndarray) + image_is_pil_list = isinstance(image, list) and isinstance(image[0], PIL.Image.Image) + image_is_tensor_list = isinstance(image, list) and isinstance(image[0], torch.Tensor) + image_is_np_list = isinstance(image, list) and isinstance(image[0], np.ndarray) + + if ( + not image_is_pil + and not image_is_tensor + and not image_is_np + and not image_is_pil_list + and not image_is_tensor_list + and not image_is_np_list + ): + raise TypeError( + f"image must be passed and be one of PIL image, numpy array, torch tensor, list of PIL images, list of numpy arrays or list of torch tensors, but is {type(image)}" + ) + + if image_is_pil: + image_batch_size = 1 + else: + image_batch_size = len(image) + + if prompt is not None and isinstance(prompt, str): + prompt_batch_size = 1 + elif prompt is not None and isinstance(prompt, list): + prompt_batch_size = len(prompt) + elif prompt_embeds is not None: + prompt_batch_size = prompt_embeds.shape[0] + + if image_batch_size != 1 and image_batch_size != prompt_batch_size: + raise ValueError( + f"If image batch size is not 1, image batch size must be same as prompt batch size. image batch size: {image_batch_size}, prompt batch size: {prompt_batch_size}" + ) + + # Copied from diffusers.pipelines.controlnet.pipeline_controlnet.StableDiffusionControlNetPipeline.prepare_image + def prepare_control_image( + self, + image, + width, + height, + batch_size, + num_images_per_prompt, + device, + dtype, + do_classifier_free_guidance=False, + guess_mode=False, + ): + image = self.control_image_processor.preprocess(image, height=height, width=width).to(dtype=torch.float32) + image_batch_size = image.shape[0] + + if image_batch_size == 1: + repeat_by = batch_size + else: + # image batch size is the same as prompt batch size + repeat_by = num_images_per_prompt + + image = image.repeat_interleave(repeat_by, dim=0) + + image = image.to(device=device, dtype=dtype) + + if do_classifier_free_guidance and not guess_mode: + image = torch.cat([image] * 2) + + return image + + def prepare_ref_image( + self, + image, + width, + height, + batch_size, + num_images_per_prompt, + device, + dtype, + do_classifier_free_guidance=False, + guess_mode=False, + ): + image = self.image_processor.preprocess(image, height=height, width=width).to(dtype=torch.float32) + image_batch_size = image.shape[0] + + if image_batch_size == 1: + repeat_by = batch_size + else: + # image batch size is the same as prompt batch size + repeat_by = num_images_per_prompt + + image = image.repeat_interleave(repeat_by, dim=0) + + image = image.to(device=device, dtype=dtype) + + if do_classifier_free_guidance and not guess_mode: + image = torch.cat([image] * 2) + + return image + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.StableDiffusionImg2ImgPipeline.get_timesteps + def get_timesteps(self, num_inference_steps, strength, device): + # get the original timestep using init_timestep + init_timestep = min(int(num_inference_steps * strength), num_inference_steps) + + t_start = max(num_inference_steps - init_timestep, 0) + timesteps = self.scheduler.timesteps[t_start * self.scheduler.order :] + + return timesteps, num_inference_steps - t_start + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.StableDiffusionImg2ImgPipeline.prepare_latents + def prepare_latents(self, image, timestep, batch_size, num_images_per_prompt, dtype, device, generator=None): + if not isinstance(image, (torch.Tensor, PIL.Image.Image, list)): + raise ValueError( + f"`image` has to be of type `torch.Tensor`, `PIL.Image.Image` or list but is {type(image)}" + ) + + image = image.to(device=device, dtype=dtype) + + batch_size = batch_size * num_images_per_prompt + + if image.shape[1] == 4: + init_latents = image + + else: + if isinstance(generator, list) and len(generator) != batch_size: + raise ValueError( + f"You have passed a list of generators of length {len(generator)}, but requested an effective batch" + f" size of {batch_size}. Make sure the batch size matches the length of the generators." + ) + + elif isinstance(generator, list): + init_latents = [ + self.vae.encode(image[i : i + 1]).latent_dist.sample(generator[i]) for i in range(batch_size) + ] + init_latents = torch.cat(init_latents, dim=0) + else: + init_latents = self.vae.encode(image).latent_dist.sample(generator) + + init_latents = self.vae.config.scaling_factor * init_latents + + if batch_size > init_latents.shape[0] and batch_size % init_latents.shape[0] == 0: + # expand init_latents for batch_size + deprecation_message = ( + f"You have passed {batch_size} text prompts (`prompt`), but only {init_latents.shape[0]} initial" + " images (`image`). Initial images are now duplicating to match the number of text prompts. Note" + " that this behavior is deprecated and will be removed in a version 1.0.0. Please make sure to update" + " your script to pass as many initial images as text prompts to suppress this warning." + ) + deprecate("len(prompt) != len(image)", "1.0.0", deprecation_message, standard_warn=False) + additional_image_per_prompt = batch_size // init_latents.shape[0] + init_latents = torch.cat([init_latents] * additional_image_per_prompt, dim=0) + elif batch_size > init_latents.shape[0] and batch_size % init_latents.shape[0] != 0: + raise ValueError( + f"Cannot duplicate `image` of batch size {init_latents.shape[0]} to {batch_size} text prompts." + ) + else: + init_latents = torch.cat([init_latents], dim=0) + + shape = init_latents.shape + noise = randn_tensor(shape, generator=generator, device=device, dtype=dtype) + + # get latents + init_latents = self.scheduler.add_noise(init_latents, noise, timestep) + latents = init_latents + + return latents + + @torch.no_grad() + @replace_example_docstring(EXAMPLE_DOC_STRING) + def __call__( + self, + prompt: Union[str, List[str]] = None, + image: Union[ + torch.FloatTensor, + PIL.Image.Image, + np.ndarray, + List[torch.FloatTensor], + List[PIL.Image.Image], + List[np.ndarray], + ] = None, + control_image: Union[ + torch.FloatTensor, + PIL.Image.Image, + np.ndarray, + List[torch.FloatTensor], + List[PIL.Image.Image], + List[np.ndarray], + ] = None, + ref_image: Union[torch.FloatTensor, PIL.Image.Image] = None, + height: Optional[int] = None, + width: Optional[int] = None, + strength: float = 0.8, + num_inference_steps: int = 50, + guidance_scale: float = 7.5, + negative_prompt: Optional[Union[str, List[str]]] = None, + num_images_per_prompt: Optional[int] = 1, + eta: float = 0.0, + generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None, + latents: Optional[torch.FloatTensor] = None, + prompt_embeds: Optional[torch.FloatTensor] = None, + negative_prompt_embeds: Optional[torch.FloatTensor] = None, + output_type: Optional[str] = "pil", + return_dict: bool = True, + callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None, + callback_steps: int = 1, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + controlnet_conditioning_scale: Union[float, List[float]] = 0.8, + guess_mode: bool = False, + attention_auto_machine_weight: float = 1.0, + gn_auto_machine_weight: float = 1.0, + style_fidelity: float = 0.5, + reference_attn: bool = True, + reference_adain: bool = True, + ): + r""" + Function invoked when calling the pipeline for generation. + + Args: + prompt (`str` or `List[str]`, *optional*): + The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`. + instead. + image (`torch.FloatTensor`, `PIL.Image.Image`, `np.ndarray`, `List[torch.FloatTensor]`, `List[PIL.Image.Image]`, `List[np.ndarray]`,: + `List[List[torch.FloatTensor]]`, `List[List[np.ndarray]]` or `List[List[PIL.Image.Image]]`): + The initial image will be used as the starting point for the image generation process. Can also accpet + image latents as `image`, if passing latents directly, it will not be encoded again. + control_image (`torch.FloatTensor`, `PIL.Image.Image`, `np.ndarray`, `List[torch.FloatTensor]`, `List[PIL.Image.Image]`, `List[np.ndarray]`,: + `List[List[torch.FloatTensor]]`, `List[List[np.ndarray]]` or `List[List[PIL.Image.Image]]`): + The ControlNet input condition. ControlNet uses this input condition to generate guidance to Unet. If + the type is specified as `Torch.FloatTensor`, it is passed to ControlNet as is. `PIL.Image.Image` can + also be accepted as an image. The dimensions of the output image defaults to `image`'s dimensions. If + height and/or width are passed, `image` is resized according to them. If multiple ControlNets are + specified in init, images must be passed as a list such that each element of the list can be correctly + batched for input to a single controlnet. + ref_image (`torch.FloatTensor`, `PIL.Image.Image`): + The Reference Control input condition. Reference Control uses this input condition to generate guidance to Unet. If + the type is specified as `Torch.FloatTensor`, it is passed to Reference Control as is. `PIL.Image.Image` can + also be accepted as an image. + height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor): + The height in pixels of the generated image. + width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor): + The width in pixels of the generated image. + num_inference_steps (`int`, *optional*, defaults to 50): + The number of denoising steps. More denoising steps usually lead to a higher quality image at the + expense of slower inference. + guidance_scale (`float`, *optional*, defaults to 7.5): + Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598). + `guidance_scale` is defined as `w` of equation 2. of [Imagen + Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale > + 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`, + usually at the expense of lower image quality. + negative_prompt (`str` or `List[str]`, *optional*): + The prompt or prompts not to guide the image generation. If not defined, one has to pass + `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is + less than `1`). + num_images_per_prompt (`int`, *optional*, defaults to 1): + The number of images to generate per prompt. + eta (`float`, *optional*, defaults to 0.0): + Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to + [`schedulers.DDIMScheduler`], will be ignored for others. + generator (`torch.Generator` or `List[torch.Generator]`, *optional*): + One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html) + to make generation deterministic. + latents (`torch.FloatTensor`, *optional*): + Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image + generation. Can be used to tweak the same generation with different prompts. If not provided, a latents + tensor will ge generated by sampling using the supplied random `generator`. + prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not + provided, text embeddings will be generated from `prompt` input argument. + negative_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt + weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input + argument. + output_type (`str`, *optional*, defaults to `"pil"`): + The output format of the generate image. Choose between + [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a + plain tuple. + callback (`Callable`, *optional*): + A function that will be called every `callback_steps` steps during inference. The function will be + called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`. + callback_steps (`int`, *optional*, defaults to 1): + The frequency at which the `callback` function will be called. If not specified, the callback will be + called at every step. + cross_attention_kwargs (`dict`, *optional*): + A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under + `self.processor` in + [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py). + controlnet_conditioning_scale (`float` or `List[float]`, *optional*, defaults to 1.0): + The outputs of the controlnet are multiplied by `controlnet_conditioning_scale` before they are added + to the residual in the original unet. If multiple ControlNets are specified in init, you can set the + corresponding scale as a list. Note that by default, we use a smaller conditioning scale for inpainting + than for [`~StableDiffusionControlNetPipeline.__call__`]. + guess_mode (`bool`, *optional*, defaults to `False`): + In this mode, the ControlNet encoder will try best to recognize the content of the input image even if + you remove all prompts. The `guidance_scale` between 3.0 and 5.0 is recommended. + attention_auto_machine_weight (`float`): + Weight of using reference query for self attention's context. + If attention_auto_machine_weight=1.0, use reference query for all self attention's context. + gn_auto_machine_weight (`float`): + Weight of using reference adain. If gn_auto_machine_weight=2.0, use all reference adain plugins. + style_fidelity (`float`): + style fidelity of ref_uncond_xt. If style_fidelity=1.0, control more important, + elif style_fidelity=0.0, prompt more important, else balanced. + reference_attn (`bool`): + Whether to use reference query for self attention's context. + reference_adain (`bool`): + Whether to use reference adain. + + Examples: + + Returns: + [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`: + [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple. + When returning a tuple, the first element is a list with the generated images, and the second element is a + list of `bool`s denoting whether the corresponding generated image likely represents "not-safe-for-work" + (nsfw) content, according to the `safety_checker`. + """ + assert reference_attn or reference_adain, "`reference_attn` or `reference_adain` must be True." + + # 1. Check inputs. Raise error if not correct + self.check_inputs( + prompt, + control_image, + callback_steps, + negative_prompt, + prompt_embeds, + negative_prompt_embeds, + controlnet_conditioning_scale, + ) + + # 2. Define call parameters + if prompt is not None and isinstance(prompt, str): + batch_size = 1 + elif prompt is not None and isinstance(prompt, list): + batch_size = len(prompt) + else: + batch_size = prompt_embeds.shape[0] + + device = self._execution_device + # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2) + # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1` + # corresponds to doing no classifier free guidance. + do_classifier_free_guidance = guidance_scale > 1.0 + + if self.controlnet == None: + pass + else: + controlnet = self.controlnet._orig_mod if is_compiled_module(self.controlnet) else self.controlnet + + if isinstance(controlnet, MultiControlNetModel) and isinstance(controlnet_conditioning_scale, float): + controlnet_conditioning_scale = [controlnet_conditioning_scale] * len(controlnet.nets) + + global_pool_conditions = ( + controlnet.config.global_pool_conditions + if isinstance(controlnet, ControlNetModel) + else controlnet.nets[0].config.global_pool_conditions + ) + guess_mode = guess_mode or global_pool_conditions + + # 3. Encode input prompt + text_encoder_lora_scale = ( + cross_attention_kwargs.get("scale", None) if cross_attention_kwargs is not None else None + ) + prompt_embeds = self._encode_prompt( + prompt, + device, + num_images_per_prompt, + do_classifier_free_guidance, + negative_prompt, + prompt_embeds=prompt_embeds, + negative_prompt_embeds=negative_prompt_embeds, + lora_scale=text_encoder_lora_scale, + ) + + # 4. Prepare image + image = self.image_processor.preprocess(image).to(dtype=torch.float32) + + # 5. Prepare controlnet_conditioning_image + if self.controlnet == None: + pass + else: + if isinstance(controlnet, ControlNetModel): + control_image = self.prepare_control_image( + image=control_image, + width=width, + height=height, + batch_size=batch_size * num_images_per_prompt, + num_images_per_prompt=num_images_per_prompt, + device=device, + dtype=controlnet.dtype, + do_classifier_free_guidance=do_classifier_free_guidance, + guess_mode=guess_mode, + ) + elif isinstance(controlnet, MultiControlNetModel): + control_images = [] + + for control_image_ in control_image: + control_image_ = self.prepare_control_image( + image=control_image_, + width=width, + height=height, + batch_size=batch_size * num_images_per_prompt, + num_images_per_prompt=num_images_per_prompt, + device=device, + dtype=controlnet.dtype, + do_classifier_free_guidance=do_classifier_free_guidance, + guess_mode=guess_mode, + ) + + control_images.append(control_image_) + + control_image = control_images + else: + assert False + + # 5. Preprocess reference image + ref_image = self.prepare_ref_image( + image=ref_image, + width=width, + height=height, + batch_size=batch_size * num_images_per_prompt, + num_images_per_prompt=num_images_per_prompt, + device=device, + dtype=prompt_embeds.dtype + ) + + # 6. Prepare timesteps + self.scheduler.set_timesteps(num_inference_steps, device=device) + timesteps, num_inference_steps = self.get_timesteps(num_inference_steps, strength, device) + latent_timestep = timesteps[:1].repeat(batch_size * num_images_per_prompt) + + # 7. Prepare latent variables + latents = self.prepare_latents( + image, + latent_timestep, + batch_size, + num_images_per_prompt, + prompt_embeds.dtype, + device, + generator, + ) + + # 8. Prepare reference latent variables + ref_image_latents = self.prepare_ref_latents( + ref_image, + batch_size * num_images_per_prompt, + prompt_embeds.dtype, + device, + generator, + do_classifier_free_guidance, + ) + + # 9. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline + extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta) + + # 9. Modify self attention and group norm + MODE = "write" + uc_mask = ( + torch.Tensor([1] * batch_size * num_images_per_prompt + [0] * batch_size * num_images_per_prompt) + .type_as(ref_image_latents) + .bool() + ) + + def hacked_basic_transformer_inner_forward( + self, + hidden_states: torch.FloatTensor, + attention_mask: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + timestep: Optional[torch.LongTensor] = None, + cross_attention_kwargs: Dict[str, Any] = None, + class_labels: Optional[torch.LongTensor] = None, + ): + if self.use_ada_layer_norm: + norm_hidden_states = self.norm1(hidden_states, timestep) + elif self.use_ada_layer_norm_zero: + norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1( + hidden_states, timestep, class_labels, hidden_dtype=hidden_states.dtype + ) + else: + norm_hidden_states = self.norm1(hidden_states) + + # 1. Self-Attention + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if self.only_cross_attention: + attn_output = self.attn1( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states if self.only_cross_attention else None, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) + else: + if MODE == "write": + self.bank.append(norm_hidden_states.detach().clone()) + attn_output = self.attn1( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states if self.only_cross_attention else None, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) + if MODE == "read": + if attention_auto_machine_weight > self.attn_weight: + attn_output_uc = self.attn1( + norm_hidden_states, + encoder_hidden_states=torch.cat([norm_hidden_states] + self.bank, dim=1), + # attention_mask=attention_mask, + **cross_attention_kwargs, + ) + attn_output_c = attn_output_uc.clone() + if do_classifier_free_guidance and style_fidelity > 0: + attn_output_c[uc_mask] = self.attn1( + norm_hidden_states[uc_mask], + encoder_hidden_states=norm_hidden_states[uc_mask], + **cross_attention_kwargs, + ) + attn_output = style_fidelity * attn_output_c + (1.0 - style_fidelity) * attn_output_uc + else: + attn_output = self.attn1( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states if self.only_cross_attention else None, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) + self.bank.clear() + if self.use_ada_layer_norm_zero: + attn_output = gate_msa.unsqueeze(1) * attn_output + hidden_states = attn_output + hidden_states + + if self.attn2 is not None: + norm_hidden_states = ( + self.norm2(hidden_states, timestep) if self.use_ada_layer_norm else self.norm2(hidden_states) + ) + + # 2. Cross-Attention + attn_output = self.attn2( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=encoder_attention_mask, + **cross_attention_kwargs, + ) + hidden_states = attn_output + hidden_states + + # 3. Feed-forward + norm_hidden_states = self.norm3(hidden_states) + + if self.use_ada_layer_norm_zero: + norm_hidden_states = norm_hidden_states * (1 + scale_mlp[:, None]) + shift_mlp[:, None] + + ff_output = self.ff(norm_hidden_states) + + if self.use_ada_layer_norm_zero: + ff_output = gate_mlp.unsqueeze(1) * ff_output + + hidden_states = ff_output + hidden_states + + return hidden_states + + def hacked_mid_forward(self, *args, **kwargs): + eps = 1e-6 + x = self.original_forward(*args, **kwargs) + if MODE == "write": + if gn_auto_machine_weight >= self.gn_weight: + var, mean = torch.var_mean(x, dim=(2, 3), keepdim=True, correction=0) + self.mean_bank.append(mean) + self.var_bank.append(var) + if MODE == "read": + if len(self.mean_bank) > 0 and len(self.var_bank) > 0: + var, mean = torch.var_mean(x, dim=(2, 3), keepdim=True, correction=0) + std = torch.maximum(var, torch.zeros_like(var) + eps) ** 0.5 + mean_acc = sum(self.mean_bank) / float(len(self.mean_bank)) + var_acc = sum(self.var_bank) / float(len(self.var_bank)) + std_acc = torch.maximum(var_acc, torch.zeros_like(var_acc) + eps) ** 0.5 + x_uc = (((x - mean) / std) * std_acc) + mean_acc + x_c = x_uc.clone() + if do_classifier_free_guidance and style_fidelity > 0: + x_c[uc_mask] = x[uc_mask] + x = style_fidelity * x_c + (1.0 - style_fidelity) * x_uc + self.mean_bank = [] + self.var_bank = [] + return x + + def hack_CrossAttnDownBlock2D_forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ): + eps = 1e-6 + + # TODO(Patrick, William) - attention mask is not used + output_states = () + + for i, (resnet, attn) in enumerate(zip(self.resnets, self.attentions)): + hidden_states = resnet(hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + if MODE == "write": + if gn_auto_machine_weight >= self.gn_weight: + var, mean = torch.var_mean(hidden_states, dim=(2, 3), keepdim=True, correction=0) + self.mean_bank.append([mean]) + self.var_bank.append([var]) + if MODE == "read": + if len(self.mean_bank) > 0 and len(self.var_bank) > 0: + var, mean = torch.var_mean(hidden_states, dim=(2, 3), keepdim=True, correction=0) + std = torch.maximum(var, torch.zeros_like(var) + eps) ** 0.5 + mean_acc = sum(self.mean_bank[i]) / float(len(self.mean_bank[i])) + var_acc = sum(self.var_bank[i]) / float(len(self.var_bank[i])) + std_acc = torch.maximum(var_acc, torch.zeros_like(var_acc) + eps) ** 0.5 + hidden_states_uc = (((hidden_states - mean) / std) * std_acc) + mean_acc + hidden_states_c = hidden_states_uc.clone() + if do_classifier_free_guidance and style_fidelity > 0: + hidden_states_c[uc_mask] = hidden_states[uc_mask] + hidden_states = style_fidelity * hidden_states_c + (1.0 - style_fidelity) * hidden_states_uc + + output_states = output_states + (hidden_states,) + + if MODE == "read": + self.mean_bank = [] + self.var_bank = [] + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + output_states = output_states + (hidden_states,) + + return hidden_states, output_states + + def hacked_DownBlock2D_forward(self, hidden_states, temb=None): + eps = 1e-6 + + output_states = () + + for i, resnet in enumerate(self.resnets): + hidden_states = resnet(hidden_states, temb) + + if MODE == "write": + if gn_auto_machine_weight >= self.gn_weight: + var, mean = torch.var_mean(hidden_states, dim=(2, 3), keepdim=True, correction=0) + self.mean_bank.append([mean]) + self.var_bank.append([var]) + if MODE == "read": + if len(self.mean_bank) > 0 and len(self.var_bank) > 0: + var, mean = torch.var_mean(hidden_states, dim=(2, 3), keepdim=True, correction=0) + std = torch.maximum(var, torch.zeros_like(var) + eps) ** 0.5 + mean_acc = sum(self.mean_bank[i]) / float(len(self.mean_bank[i])) + var_acc = sum(self.var_bank[i]) / float(len(self.var_bank[i])) + std_acc = torch.maximum(var_acc, torch.zeros_like(var_acc) + eps) ** 0.5 + hidden_states_uc = (((hidden_states - mean) / std) * std_acc) + mean_acc + hidden_states_c = hidden_states_uc.clone() + if do_classifier_free_guidance and style_fidelity > 0: + hidden_states_c[uc_mask] = hidden_states[uc_mask] + hidden_states = style_fidelity * hidden_states_c + (1.0 - style_fidelity) * hidden_states_uc + + output_states = output_states + (hidden_states,) + + if MODE == "read": + self.mean_bank = [] + self.var_bank = [] + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + output_states = output_states + (hidden_states,) + + return hidden_states, output_states + + def hacked_CrossAttnUpBlock2D_forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + upsample_size: Optional[int] = None, + attention_mask: Optional[torch.FloatTensor] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ): + eps = 1e-6 + # TODO(Patrick, William) - attention mask is not used + for i, (resnet, attn) in enumerate(zip(self.resnets, self.attentions)): + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + hidden_states = resnet(hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + + if MODE == "write": + if gn_auto_machine_weight >= self.gn_weight: + var, mean = torch.var_mean(hidden_states, dim=(2, 3), keepdim=True, correction=0) + self.mean_bank.append([mean]) + self.var_bank.append([var]) + if MODE == "read": + if len(self.mean_bank) > 0 and len(self.var_bank) > 0: + var, mean = torch.var_mean(hidden_states, dim=(2, 3), keepdim=True, correction=0) + std = torch.maximum(var, torch.zeros_like(var) + eps) ** 0.5 + mean_acc = sum(self.mean_bank[i]) / float(len(self.mean_bank[i])) + var_acc = sum(self.var_bank[i]) / float(len(self.var_bank[i])) + std_acc = torch.maximum(var_acc, torch.zeros_like(var_acc) + eps) ** 0.5 + hidden_states_uc = (((hidden_states - mean) / std) * std_acc) + mean_acc + hidden_states_c = hidden_states_uc.clone() + if do_classifier_free_guidance and style_fidelity > 0: + hidden_states_c[uc_mask] = hidden_states[uc_mask] + hidden_states = style_fidelity * hidden_states_c + (1.0 - style_fidelity) * hidden_states_uc + + if MODE == "read": + self.mean_bank = [] + self.var_bank = [] + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, upsample_size) + + return hidden_states + + def hacked_UpBlock2D_forward(self, hidden_states, res_hidden_states_tuple, temb=None, upsample_size=None): + eps = 1e-6 + for i, resnet in enumerate(self.resnets): + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + hidden_states = resnet(hidden_states, temb) + + if MODE == "write": + if gn_auto_machine_weight >= self.gn_weight: + var, mean = torch.var_mean(hidden_states, dim=(2, 3), keepdim=True, correction=0) + self.mean_bank.append([mean]) + self.var_bank.append([var]) + if MODE == "read": + if len(self.mean_bank) > 0 and len(self.var_bank) > 0: + var, mean = torch.var_mean(hidden_states, dim=(2, 3), keepdim=True, correction=0) + std = torch.maximum(var, torch.zeros_like(var) + eps) ** 0.5 + mean_acc = sum(self.mean_bank[i]) / float(len(self.mean_bank[i])) + var_acc = sum(self.var_bank[i]) / float(len(self.var_bank[i])) + std_acc = torch.maximum(var_acc, torch.zeros_like(var_acc) + eps) ** 0.5 + hidden_states_uc = (((hidden_states - mean) / std) * std_acc) + mean_acc + hidden_states_c = hidden_states_uc.clone() + if do_classifier_free_guidance and style_fidelity > 0: + hidden_states_c[uc_mask] = hidden_states[uc_mask] + hidden_states = style_fidelity * hidden_states_c + (1.0 - style_fidelity) * hidden_states_uc + + if MODE == "read": + self.mean_bank = [] + self.var_bank = [] + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, upsample_size) + + return hidden_states + + if reference_attn: + attn_modules = [module for module in torch_dfs(self.unet) if isinstance(module, BasicTransformerBlock)] + attn_modules = sorted(attn_modules, key=lambda x: -x.norm1.normalized_shape[0]) + + for i, module in enumerate(attn_modules): + module._original_inner_forward = module.forward + module.forward = hacked_basic_transformer_inner_forward.__get__(module, BasicTransformerBlock) + module.bank = [] + module.attn_weight = float(i) / float(len(attn_modules)) + + if reference_adain: + gn_modules = [self.unet.mid_block] + self.unet.mid_block.gn_weight = 0 + + down_blocks = self.unet.down_blocks + for w, module in enumerate(down_blocks): + module.gn_weight = 1.0 - float(w) / float(len(down_blocks)) + gn_modules.append(module) + + up_blocks = self.unet.up_blocks + for w, module in enumerate(up_blocks): + module.gn_weight = float(w) / float(len(up_blocks)) + gn_modules.append(module) + + for i, module in enumerate(gn_modules): + if getattr(module, "original_forward", None) is None: + module.original_forward = module.forward + if i == 0: + # mid_block + module.forward = hacked_mid_forward.__get__(module, torch.nn.Module) + elif isinstance(module, CrossAttnDownBlock2D): + module.forward = hack_CrossAttnDownBlock2D_forward.__get__(module, CrossAttnDownBlock2D) + elif isinstance(module, DownBlock2D): + module.forward = hacked_DownBlock2D_forward.__get__(module, DownBlock2D) + elif isinstance(module, CrossAttnUpBlock2D): + module.forward = hacked_CrossAttnUpBlock2D_forward.__get__(module, CrossAttnUpBlock2D) + elif isinstance(module, UpBlock2D): + module.forward = hacked_UpBlock2D_forward.__get__(module, UpBlock2D) + module.mean_bank = [] + module.var_bank = [] + module.gn_weight *= 2 + + # 11. Denoising loop + num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order + with self.progress_bar(total=num_inference_steps) as progress_bar: + for i, t in enumerate(timesteps): + # expand the latents if we are doing classifier free guidance + latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents + latent_model_input = self.scheduler.scale_model_input(latent_model_input, t) + + if self.controlnet == None: + down_block_res_samples = None + mid_block_res_sample=None + else: + # controlnet(s) inference + if guess_mode and do_classifier_free_guidance: + # Infer ControlNet only for the conditional batch. + control_model_input = latents + control_model_input = self.scheduler.scale_model_input(control_model_input, t) + controlnet_prompt_embeds = prompt_embeds.chunk(2)[1] + else: + control_model_input = latent_model_input + controlnet_prompt_embeds = prompt_embeds + + down_block_res_samples, mid_block_res_sample = self.controlnet( + control_model_input, + t, + encoder_hidden_states=controlnet_prompt_embeds, + controlnet_cond=control_image, + conditioning_scale=controlnet_conditioning_scale, + guess_mode=guess_mode, + return_dict=False, + ) + + if guess_mode and do_classifier_free_guidance: + # Infered ControlNet only for the conditional batch. + # To apply the output of ControlNet to both the unconditional and conditional batches, + # add 0 to the unconditional batch to keep it unchanged. + down_block_res_samples = [torch.cat([torch.zeros_like(d), d]) for d in down_block_res_samples] + mid_block_res_sample = torch.cat([torch.zeros_like(mid_block_res_sample), mid_block_res_sample]) + + # ref only part + noise = randn_tensor( + ref_image_latents.shape, generator=generator, device=device, dtype=ref_image_latents.dtype + ) + ref_xt = self.scheduler.add_noise( + ref_image_latents, + noise, + t.reshape( + 1, + ), + ) + ref_xt = self.scheduler.scale_model_input(ref_xt, t) + + MODE = "write" + self.unet( + ref_xt, + t, + encoder_hidden_states=prompt_embeds, + cross_attention_kwargs=cross_attention_kwargs, + return_dict=False, + ) + + # predict the noise residual + MODE = "read" + noise_pred = self.unet( + latent_model_input, + t, + encoder_hidden_states=prompt_embeds, + cross_attention_kwargs=cross_attention_kwargs, + down_block_additional_residuals=down_block_res_samples, + mid_block_additional_residual=mid_block_res_sample, + return_dict=False, + )[0] + + # perform guidance + if do_classifier_free_guidance: + noise_pred_uncond, noise_pred_text = noise_pred.chunk(2) + noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond) + + # compute the previous noisy sample x_t -> x_t-1 + latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0] + + # call the callback, if provided + if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0): + progress_bar.update() + if callback is not None and i % callback_steps == 0: + callback(i, t, latents) + + # If we do sequential model offloading, let's offload unet and controlnet + # manually for max memory savings + if hasattr(self, "final_offload_hook") and self.final_offload_hook is not None: + self.unet.to("cpu") + self.controlnet.to("cpu") + torch.cuda.empty_cache() + + if not output_type == "latent": + image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0] + image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype) + else: + image = latents + has_nsfw_concept = None + + if has_nsfw_concept is None: + do_denormalize = [True] * image.shape[0] + else: + do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept] + + image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize) + + # Offload last model to CPU + if hasattr(self, "final_offload_hook") and self.final_offload_hook is not None: + self.final_offload_hook.offload() + + if not return_dict: + return (image, has_nsfw_concept) + + return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept) \ No newline at end of file diff --git a/src/animatediff/pipelines/sdxl_animation.py b/src/animatediff/pipelines/sdxl_animation.py new file mode 100644 index 0000000000000000000000000000000000000000..2329a0e423476858105f4b5708e07a5a4aa0f023 --- /dev/null +++ b/src/animatediff/pipelines/sdxl_animation.py @@ -0,0 +1,2229 @@ +# Copyright 2023 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import inspect +import os +from dataclasses import dataclass +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +import numpy as np +import torch +from diffusers import LCMScheduler +from diffusers.image_processor import VaeImageProcessor +from diffusers.loaders import (FromSingleFileMixin, LoraLoaderMixin, + TextualInversionLoaderMixin) +from diffusers.models import AutoencoderKL, ControlNetModel +from diffusers.models.attention_processor import (AttnProcessor2_0, + LoRAAttnProcessor2_0, + LoRAXFormersAttnProcessor, + XFormersAttnProcessor) +from diffusers.pipelines.pipeline_utils import DiffusionPipeline +from diffusers.schedulers import KarrasDiffusionSchedulers +from diffusers.utils import (BaseOutput, is_accelerate_available, + is_accelerate_version, logging, + replace_example_docstring) +from diffusers.utils.torch_utils import randn_tensor +from einops import rearrange +from transformers import (CLIPTextModel, CLIPTextModelWithProjection, + CLIPTokenizer) + +from animatediff.ip_adapter import IPAdapterPlusXL, IPAdapterXL +from animatediff.pipelines.animation import PromptEncoder, RegionMask +from animatediff.pipelines.context import (get_context_scheduler, + get_total_steps) +from animatediff.sdxl_models.unet import UNet3DConditionModel +from animatediff.utils.control_net_lllite import ControlNetLLLite +from animatediff.utils.lpw_stable_diffusion_xl import \ + get_weighted_text_embeddings_sdxl2 +from animatediff.utils.util import (get_tensor_interpolation_method, show_gpu, + stopwatch_record, stopwatch_start, + stopwatch_stop) + + +class PromptEncoderSDXL(PromptEncoder): + def __init__( + self, + pipe, + device, + latents_device, + num_videos_per_prompt, + do_classifier_free_guidance, + region_condi_list, + negative_prompt, + is_signle_prompt_mode, + clip_skip, + multi_uncond_mode + ): + self.pipe = pipe + self.is_single_prompt_mode=is_signle_prompt_mode + self.do_classifier_free_guidance = do_classifier_free_guidance + + uncond_num = 0 + if do_classifier_free_guidance: + if multi_uncond_mode: + uncond_num = len(region_condi_list) + else: + uncond_num = 1 + + self.uncond_num = uncond_num + + ### text + + prompt_nums = [] + prompt_map_list = [] + prompt_list = [] + + for condi in region_condi_list: + _prompt_map = condi["prompt_map"] + prompt_map_list.append(_prompt_map) + _prompt_map = dict(sorted(_prompt_map.items())) + _prompt_list = [_prompt_map[key_frame] for key_frame in _prompt_map.keys()] + prompt_nums.append( len(_prompt_list) ) + prompt_list += _prompt_list + + (prompt_embeds_list, negative_prompt_embeds_list, + pooled_prompt_embeds_list, negative_pooled_prompt_embeds_list) = get_weighted_text_embeddings_sdxl2( + pipe, prompt_list, [negative_prompt], latents_device + ) + + self.prompt_embeds_dtype = prompt_embeds_list[0].dtype + + + if do_classifier_free_guidance: + negative = negative_prompt_embeds_list + negative_pooled = negative_pooled_prompt_embeds_list + positive = prompt_embeds_list + positive_pooled = pooled_prompt_embeds_list + else: + positive = prompt_embeds_list + positive_pooled = pooled_prompt_embeds_list + + if pipe.ip_adapter: + pipe.ip_adapter.set_text_length(positive[0].shape[1]) + + prompt_embeds_region_list = [] + pooled_embeds_region_list = [] + + if do_classifier_free_guidance: + prompt_embeds_region_list = [ + { + 0:negative[0] + } + ] * uncond_num + prompt_embeds_region_list + pooled_embeds_region_list = [ + { + 0:negative_pooled[0] + } + ] * uncond_num + pooled_embeds_region_list + + pos_index = 0 + for prompt_map, num in zip(prompt_map_list, prompt_nums): + prompt_embeds_map={} + pooled_embeds_map={} + pos = positive[pos_index:pos_index+num] + pos_pooled = positive_pooled[pos_index:pos_index+num] + + for i, key_frame in enumerate(prompt_map): + prompt_embeds_map[key_frame] = pos[i] + pooled_embeds_map[key_frame] = pos_pooled[i] + + prompt_embeds_region_list.append( prompt_embeds_map ) + pooled_embeds_region_list.append( pooled_embeds_map ) + pos_index += num + + if do_classifier_free_guidance: + prompt_map_list = [ + { + 0:negative_prompt + } + ] * uncond_num + prompt_map_list + + self.prompt_map_list = prompt_map_list + self.prompt_embeds_region_list = prompt_embeds_region_list + self.pooled_embeds_region_list = pooled_embeds_region_list + + ### image + if pipe.ip_adapter: + + ip_im_nums = [] + ip_im_map_list = [] + ip_im_list = [] + + for condi in region_condi_list: + _ip_im_map = condi["ip_adapter_map"]["images"] + ip_im_map_list.append(_ip_im_map) + _ip_im_map = dict(sorted(_ip_im_map.items())) + _ip_im_list = [_ip_im_map[key_frame] for key_frame in _ip_im_map.keys()] + ip_im_nums.append( len(_ip_im_list) ) + ip_im_list += _ip_im_list + + positive, negative = pipe.ip_adapter.get_image_embeds(ip_im_list) + + positive = positive.to(device=latents_device) + negative = negative.to(device=latents_device) + + bs_embed, seq_len, _ = positive.shape + positive = positive.repeat(1, 1, 1) + positive = positive.view(bs_embed * 1, seq_len, -1) + + bs_embed, seq_len, _ = negative.shape + negative = negative.repeat(1, 1, 1) + negative = negative.view(bs_embed * 1, seq_len, -1) + + if do_classifier_free_guidance: + negative = negative.chunk(negative.shape[0], 0) + positive = positive.chunk(positive.shape[0], 0) + else: + positive = positive.chunk(positive.shape[0], 0) + + im_prompt_embeds_region_list = [] + + if do_classifier_free_guidance: + im_prompt_embeds_region_list = [ + { + 0:negative[0] + } + ] * uncond_num + im_prompt_embeds_region_list + + pos_index = 0 + for ip_im_map, num in zip(ip_im_map_list, ip_im_nums): + im_prompt_embeds_map={} + pos = positive[pos_index:pos_index+num] + + for i, key_frame in enumerate(ip_im_map): + im_prompt_embeds_map[key_frame] = pos[i] + + im_prompt_embeds_region_list.append( im_prompt_embeds_map ) + pos_index += num + + + if do_classifier_free_guidance: + ip_im_map_list = [ + { + 0:None + } + ] * uncond_num + ip_im_map_list + + + self.ip_im_map_list = ip_im_map_list + self.im_prompt_embeds_region_list = im_prompt_embeds_region_list + + def is_uncond_layer(self, layer_index): + return self.uncond_num > layer_index + + + def _get_current_prompt_embeds_from_text( + self, + prompt_map, + prompt_embeds_map, + pooled_embeds_map, + center_frame = None, + video_length : int = 0 + ): + + key_prev = list(prompt_map.keys())[-1] + key_next = list(prompt_map.keys())[0] + + for p in prompt_map.keys(): + if p > center_frame: + key_next = p + break + key_prev = p + + dist_prev = center_frame - key_prev + if dist_prev < 0: + dist_prev += video_length + dist_next = key_next - center_frame + if dist_next < 0: + dist_next += video_length + + if key_prev == key_next or dist_prev + dist_next == 0: + return prompt_embeds_map[key_prev], pooled_embeds_map[key_prev] + + rate = dist_prev / (dist_prev + dist_next) + + return (get_tensor_interpolation_method()( prompt_embeds_map[key_prev], prompt_embeds_map[key_next], rate ), + get_tensor_interpolation_method()( pooled_embeds_map[key_prev], pooled_embeds_map[key_next], rate )) + + def get_current_prompt_embeds_from_text( + self, + center_frame = None, + video_length : int = 0 + ): + outputs = () + outputs2 = () + for prompt_map, prompt_embeds_map, pooled_embeds_map in zip(self.prompt_map_list, self.prompt_embeds_region_list, self.pooled_embeds_region_list): + embs,embs2 = self._get_current_prompt_embeds_from_text( + prompt_map, + prompt_embeds_map, + pooled_embeds_map, + center_frame, + video_length) + outputs += (embs,) + outputs2 += (embs2,) + + return outputs, outputs2 + + def get_current_prompt_embeds_single( + self, + context: List[int] = None, + video_length : int = 0 + ): + center_frame = context[len(context)//2] + text_emb, pooled_emb = self.get_current_prompt_embeds_from_text(center_frame, video_length) + text_emb = torch.cat(text_emb) + pooled_emb = torch.cat(pooled_emb) + if self.pipe.ip_adapter: + image_emb = self.get_current_prompt_embeds_from_image(center_frame, video_length) + image_emb = torch.cat(image_emb) + return torch.cat([text_emb,image_emb], dim=1), pooled_emb + else: + return text_emb, pooled_emb + + def get_current_prompt_embeds_multi( + self, + context: List[int] = None, + video_length : int = 0 + ): + + emb_list = [] + pooled_emb_list = [] + for c in context: + t,p = self.get_current_prompt_embeds_from_text(c, video_length) + for i, (emb, pooled) in enumerate(zip(t,p)): + if i >= len(emb_list): + emb_list.append([]) + pooled_emb_list.append([]) + emb_list[i].append(emb) + pooled_emb_list[i].append(pooled) + + text_emb = [] + for emb in emb_list: + emb = torch.cat(emb) + text_emb.append(emb) + text_emb = torch.cat(text_emb) + + pooled_emb = [] + for emb in pooled_emb_list: + emb = torch.cat(emb) + pooled_emb.append(emb) + pooled_emb = torch.cat(pooled_emb) + + if self.pipe.ip_adapter == None: + return text_emb, pooled_emb + + emb_list = [] + for c in context: + t = self.get_current_prompt_embeds_from_image(c, video_length) + for i, emb in enumerate(t): + if i >= len(emb_list): + emb_list.append([]) + emb_list[i].append(emb) + + image_emb = [] + for emb in emb_list: + emb = torch.cat(emb) + image_emb.append(emb) + image_emb = torch.cat(image_emb) + + return torch.cat([text_emb,image_emb], dim=1), pooled_emb + + ''' + def get_current_prompt_embeds( + self, + context: List[int] = None, + video_length : int = 0 + ): + return self.get_current_prompt_embeds_single(context,video_length) if self.is_single_prompt_mode else self.get_current_prompt_embeds_multi(context,video_length) + + def get_prompt_embeds_dtype(self): + return self.prompt_embeds_dtype + + def get_condi_size(self): + return len(self.prompt_embeds_region_list) + ''' + + + + + + + + +@dataclass +class AnimatePipelineOutput(BaseOutput): + """ + Output class for Stable Diffusion pipelines. + + Args: + images (`List[PIL.Image.Image]` or `np.ndarray`) + List of denoised PIL images of length `batch_size` or numpy array of shape `(batch_size, height, width, + num_channels)`. PIL images or numpy array present the denoised images of the diffusion pipeline. + """ + + videos: Union[torch.Tensor, np.ndarray] + + + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + +EXAMPLE_DOC_STRING = """ + Examples: + ```py + >>> import torch + >>> from diffusers import StableDiffusionXLPipeline + + >>> pipe = StableDiffusionXLPipeline.from_pretrained( + ... "stabilityai/stable-diffusion-xl-base-1.0", torch_dtype=torch.float16 + ... ) + >>> pipe = pipe.to("cuda") + + >>> prompt = "a photo of an astronaut riding a horse on mars" + >>> image = pipe(prompt).images[0] + ``` +""" + + +# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.rescale_noise_cfg +def rescale_noise_cfg(noise_cfg, noise_pred_text, guidance_rescale=0.0): + """ + Rescale `noise_cfg` according to `guidance_rescale`. Based on findings of [Common Diffusion Noise Schedules and + Sample Steps are Flawed](https://arxiv.org/pdf/2305.08891.pdf). See Section 3.4 + """ + std_text = noise_pred_text.std(dim=list(range(1, noise_pred_text.ndim)), keepdim=True) + std_cfg = noise_cfg.std(dim=list(range(1, noise_cfg.ndim)), keepdim=True) + # rescale the results from guidance (fixes overexposure) + noise_pred_rescaled = noise_cfg * (std_text / std_cfg) + # mix with the original results from guidance by factor guidance_rescale to avoid "plain looking" images + noise_cfg = guidance_rescale * noise_pred_rescaled + (1 - guidance_rescale) * noise_cfg + return noise_cfg + + +class AnimationPipeline(DiffusionPipeline, FromSingleFileMixin, LoraLoaderMixin, TextualInversionLoaderMixin): + r""" + Pipeline for text-to-image generation using Stable Diffusion XL. + + This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the + library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.) + + In addition the pipeline inherits the following loading methods: + - *LoRA*: [`StableDiffusionXLPipeline.load_lora_weights`] + - *Ckpt*: [`loaders.FromSingleFileMixin.from_single_file`] + + as well as the following saving methods: + - *LoRA*: [`loaders.StableDiffusionXLPipeline.save_lora_weights`] + + Args: + vae ([`AutoencoderKL`]): + Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations. + text_encoder ([`CLIPTextModel`]): + Frozen text-encoder. Stable Diffusion XL uses the text portion of + [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically + the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant. + text_encoder_2 ([` CLIPTextModelWithProjection`]): + Second frozen text-encoder. Stable Diffusion XL uses the text and pool portion of + [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModelWithProjection), + specifically the + [laion/CLIP-ViT-bigG-14-laion2B-39B-b160k](https://huggingface.co/laion/CLIP-ViT-bigG-14-laion2B-39B-b160k) + variant. + tokenizer (`CLIPTokenizer`): + Tokenizer of class + [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer). + tokenizer_2 (`CLIPTokenizer`): + Second Tokenizer of class + [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer). + unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents. + scheduler ([`SchedulerMixin`]): + A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of + [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`]. + """ + model_cpu_offload_seq = "text_encoder->text_encoder_2->unet->vae" + + def __init__( + self, + vae: AutoencoderKL, + text_encoder: CLIPTextModel, + text_encoder_2: CLIPTextModelWithProjection, + tokenizer: CLIPTokenizer, + tokenizer_2: CLIPTokenizer, + unet: UNet3DConditionModel, + scheduler: KarrasDiffusionSchedulers, + force_zeros_for_empty_prompt: bool = True, + add_watermarker: Optional[bool] = None, + controlnet_map: Dict[ str , ControlNetModel ]=None, + ): + super().__init__() + + self.register_modules( + vae=vae, + text_encoder=text_encoder, + text_encoder_2=text_encoder_2, + tokenizer=tokenizer, + tokenizer_2=tokenizer_2, + unet=unet, + scheduler=scheduler, + ) + self.register_to_config(force_zeros_for_empty_prompt=force_zeros_for_empty_prompt) + self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1) + self.default_sample_size = self.unet.config.sample_size + + self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor) + self.control_image_processor = VaeImageProcessor( + vae_scale_factor=self.vae_scale_factor, do_convert_rgb=True, do_normalize=False + ) + self.controlnet_map = controlnet_map + + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_slicing + def enable_vae_slicing(self): + r""" + Enable sliced VAE decoding. When this option is enabled, the VAE will split the input tensor in slices to + compute decoding in several steps. This is useful to save some memory and allow larger batch sizes. + """ + self.vae.enable_slicing() + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_slicing + def disable_vae_slicing(self): + r""" + Disable sliced VAE decoding. If `enable_vae_slicing` was previously enabled, this method will go back to + computing decoding in one step. + """ + self.vae.disable_slicing() + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_tiling + def enable_vae_tiling(self): + r""" + Enable tiled VAE decoding. When this option is enabled, the VAE will split the input tensor into tiles to + compute decoding and encoding in several steps. This is useful for saving a large amount of memory and to allow + processing larger images. + """ + self.vae.enable_tiling() + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_tiling + def disable_vae_tiling(self): + r""" + Disable tiled VAE decoding. If `enable_vae_tiling` was previously enabled, this method will go back to + computing decoding in one step. + """ + self.vae.disable_tiling() + + def __enable_model_cpu_offload(self, gpu_id=0): + r""" + Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared + to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward` + method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with + `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`. + """ + if is_accelerate_available() and is_accelerate_version(">=", "0.17.0.dev0"): + from accelerate import cpu_offload_with_hook + else: + raise ImportError("`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.") + + device = torch.device(f"cuda:{gpu_id}") + + if self.device.type != "cpu": + self.to("cpu", silence_dtype_warnings=True) + torch.cuda.empty_cache() # otherwise we don't see the memory savings (but they probably exist) + + model_sequence = ( + [self.text_encoder, self.text_encoder_2] if self.text_encoder is not None else [self.text_encoder_2] + ) + model_sequence.extend([self.unet, self.vae]) + + hook = None + for cpu_offloaded_model in model_sequence: + _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook) + + # We'll offload the last model manually. + self.final_offload_hook = hook + + def encode_prompt( + self, + prompt: str, + prompt_2: Optional[str] = None, + device: Optional[torch.device] = None, + num_videos_per_prompt: int = 1, + do_classifier_free_guidance: bool = True, + negative_prompt: Optional[str] = None, + negative_prompt_2: Optional[str] = None, + prompt_embeds: Optional[torch.FloatTensor] = None, + negative_prompt_embeds: Optional[torch.FloatTensor] = None, + pooled_prompt_embeds: Optional[torch.FloatTensor] = None, + negative_pooled_prompt_embeds: Optional[torch.FloatTensor] = None, + lora_scale: Optional[float] = None, + ): + r""" + Encodes the prompt into text encoder hidden states. + + Args: + prompt (`str` or `List[str]`, *optional*): + prompt to be encoded + prompt_2 (`str` or `List[str]`, *optional*): + The prompt or prompts to be sent to the `tokenizer_2` and `text_encoder_2`. If not defined, `prompt` is + used in both text-encoders + device: (`torch.device`): + torch device + num_videos_per_prompt (`int`): + number of images that should be generated per prompt + do_classifier_free_guidance (`bool`): + whether to use classifier free guidance or not + negative_prompt (`str` or `List[str]`, *optional*): + The prompt or prompts not to guide the image generation. If not defined, one has to pass + `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is + less than `1`). + negative_prompt_2 (`str` or `List[str]`, *optional*): + The prompt or prompts not to guide the image generation to be sent to `tokenizer_2` and + `text_encoder_2`. If not defined, `negative_prompt` is used in both text-encoders + prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not + provided, text embeddings will be generated from `prompt` input argument. + negative_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt + weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input + argument. + pooled_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated pooled text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. + If not provided, pooled text embeddings will be generated from `prompt` input argument. + negative_pooled_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated negative pooled text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt + weighting. If not provided, pooled negative_prompt_embeds will be generated from `negative_prompt` + input argument. + lora_scale (`float`, *optional*): + A lora scale that will be applied to all LoRA layers of the text encoder if LoRA layers are loaded. + """ + device = device or self._execution_device + + # set lora scale so that monkey patched LoRA + # function of text encoder can correctly access it + if lora_scale is not None and isinstance(self, LoraLoaderMixin): + self._lora_scale = lora_scale + + if prompt is not None and isinstance(prompt, str): + batch_size = 1 + elif prompt is not None and isinstance(prompt, list): + batch_size = len(prompt) + else: + batch_size = prompt_embeds.shape[0] + + # Define tokenizers and text encoders + tokenizers = [self.tokenizer, self.tokenizer_2] if self.tokenizer is not None else [self.tokenizer_2] + text_encoders = ( + [self.text_encoder, self.text_encoder_2] if self.text_encoder is not None else [self.text_encoder_2] + ) + + if prompt_embeds is None: + prompt_2 = prompt_2 or prompt + # textual inversion: procecss multi-vector tokens if necessary + prompt_embeds_list = [] + prompts = [prompt, prompt_2] + for prompt, tokenizer, text_encoder in zip(prompts, tokenizers, text_encoders): + if isinstance(self, TextualInversionLoaderMixin): + prompt = self.maybe_convert_prompt(prompt, tokenizer) + + text_inputs = tokenizer( + prompt, + padding="max_length", + max_length=tokenizer.model_max_length, + truncation=True, + return_tensors="pt", + ) + + text_input_ids = text_inputs.input_ids + untruncated_ids = tokenizer(prompt, padding="longest", return_tensors="pt").input_ids + + if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal( + text_input_ids, untruncated_ids + ): + removed_text = tokenizer.batch_decode(untruncated_ids[:, tokenizer.model_max_length - 1 : -1]) + logger.warning( + "The following part of your input was truncated because CLIP can only handle sequences up to" + f" {tokenizer.model_max_length} tokens: {removed_text}" + ) + + prompt_embeds = text_encoder( + text_input_ids.to(device), + output_hidden_states=True, + ) + + # We are only ALWAYS interested in the pooled output of the final text encoder + pooled_prompt_embeds = prompt_embeds[0] + prompt_embeds = prompt_embeds.hidden_states[-2] + + prompt_embeds_list.append(prompt_embeds) + + prompt_embeds = torch.concat(prompt_embeds_list, dim=-1) + + # get unconditional embeddings for classifier free guidance + zero_out_negative_prompt = negative_prompt is None and self.config.force_zeros_for_empty_prompt + if do_classifier_free_guidance and negative_prompt_embeds is None and zero_out_negative_prompt: + negative_prompt_embeds = torch.zeros_like(prompt_embeds) + negative_pooled_prompt_embeds = torch.zeros_like(pooled_prompt_embeds) + elif do_classifier_free_guidance and negative_prompt_embeds is None: + negative_prompt = negative_prompt or "" + negative_prompt_2 = negative_prompt_2 or negative_prompt + + uncond_tokens: List[str] + if prompt is not None and type(prompt) is not type(negative_prompt): + raise TypeError( + f"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=" + f" {type(prompt)}." + ) + elif isinstance(negative_prompt, str): + uncond_tokens = [negative_prompt, negative_prompt_2] + elif batch_size != len(negative_prompt): + raise ValueError( + f"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:" + f" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches" + " the batch size of `prompt`." + ) + else: + uncond_tokens = [negative_prompt, negative_prompt_2] + + negative_prompt_embeds_list = [] + for negative_prompt, tokenizer, text_encoder in zip(uncond_tokens, tokenizers, text_encoders): + if isinstance(self, TextualInversionLoaderMixin): + negative_prompt = self.maybe_convert_prompt(negative_prompt, tokenizer) + + max_length = prompt_embeds.shape[1] + uncond_input = tokenizer( + negative_prompt, + padding="max_length", + max_length=max_length, + truncation=True, + return_tensors="pt", + ) + + negative_prompt_embeds = text_encoder( + uncond_input.input_ids.to(device), + output_hidden_states=True, + ) + # We are only ALWAYS interested in the pooled output of the final text encoder + negative_pooled_prompt_embeds = negative_prompt_embeds[0] + negative_prompt_embeds = negative_prompt_embeds.hidden_states[-2] + + negative_prompt_embeds_list.append(negative_prompt_embeds) + + negative_prompt_embeds = torch.concat(negative_prompt_embeds_list, dim=-1) + + prompt_embeds = prompt_embeds.to(dtype=self.text_encoder_2.dtype, device=device) + + bs_embed, seq_len, _ = prompt_embeds.shape + # duplicate text embeddings for each generation per prompt, using mps friendly method + prompt_embeds = prompt_embeds.repeat(1, num_videos_per_prompt, 1) + prompt_embeds = prompt_embeds.view(bs_embed * num_videos_per_prompt, seq_len, -1) + + if do_classifier_free_guidance: + # duplicate unconditional embeddings for each generation per prompt, using mps friendly method + seq_len = negative_prompt_embeds.shape[1] + negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder_2.dtype, device=device) + negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_videos_per_prompt, 1) + negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_videos_per_prompt, seq_len, -1) + + pooled_prompt_embeds = pooled_prompt_embeds.repeat(1, num_videos_per_prompt).view( + bs_embed * num_videos_per_prompt, -1 + ) + if do_classifier_free_guidance: + negative_pooled_prompt_embeds = negative_pooled_prompt_embeds.repeat(1, num_videos_per_prompt).view( + bs_embed * num_videos_per_prompt, -1 + ) + + return prompt_embeds, negative_prompt_embeds, pooled_prompt_embeds, negative_pooled_prompt_embeds + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs + def prepare_extra_step_kwargs(self, generator, eta): + # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature + # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers. + # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502 + # and should be between [0, 1] + + accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys()) + extra_step_kwargs = {} + if accepts_eta: + extra_step_kwargs["eta"] = eta + + # check if the scheduler accepts generator + accepts_generator = "generator" in set(inspect.signature(self.scheduler.step).parameters.keys()) + if accepts_generator: + extra_step_kwargs["generator"] = generator + return extra_step_kwargs + + + def prepare_image( + self, + image, + width, + height, + batch_size, + num_images_per_prompt, + device, + dtype, + do_classifier_free_guidance=False, + guess_mode=False, + do_normalize=False, + ): + if do_normalize == False: + image = self.control_image_processor.preprocess(image, height=height, width=width).to(dtype=torch.float32) + else: + image = self.image_processor.preprocess(image, height=height, width=width).to(dtype=torch.float32) + + image_batch_size = image.shape[0] + + if image_batch_size == 1: + repeat_by = batch_size + else: + # image batch size is the same as prompt batch size + repeat_by = num_images_per_prompt + + image = image.repeat_interleave(repeat_by, dim=0) + + image = image.to(device=device, dtype=dtype) + + #if do_classifier_free_guidance and not guess_mode: + # image = torch.cat([image] * 2) + + return image + + + def check_inputs( + self, + prompt, + prompt_2, + height, + width, + callback_steps, + negative_prompt=None, + negative_prompt_2=None, + prompt_embeds=None, + negative_prompt_embeds=None, + pooled_prompt_embeds=None, + negative_pooled_prompt_embeds=None, + ): + if height % 8 != 0 or width % 8 != 0: + raise ValueError(f"`height` and `width` have to be divisible by 8 but are {height} and {width}.") + + ''' + if (callback_steps is None) or ( + callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0) + ): + raise ValueError( + f"`callback_steps` has to be a positive integer but is {callback_steps} of type" + f" {type(callback_steps)}." + ) + ''' + if callback_steps is not None: + if not isinstance(callback_steps, list): + raise ValueError("`callback_steps` has to be a list of positive integers.") + for callback_step in callback_steps: + if not isinstance(callback_step, int) or callback_step <= 0: + raise ValueError("`callback_steps` has to be a list of positive integers.") + + if prompt is not None and prompt_embeds is not None: + raise ValueError( + f"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to" + " only forward one of the two." + ) + elif prompt_2 is not None and prompt_embeds is not None: + raise ValueError( + f"Cannot forward both `prompt_2`: {prompt_2} and `prompt_embeds`: {prompt_embeds}. Please make sure to" + " only forward one of the two." + ) + elif prompt is None and prompt_embeds is None: + raise ValueError( + "Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined." + ) + elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)): + raise ValueError(f"`prompt` has to be of type `str` or `list` but is {type(prompt)}") + elif prompt_2 is not None and (not isinstance(prompt_2, str) and not isinstance(prompt_2, list)): + raise ValueError(f"`prompt_2` has to be of type `str` or `list` but is {type(prompt_2)}") + + if negative_prompt is not None and negative_prompt_embeds is not None: + raise ValueError( + f"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:" + f" {negative_prompt_embeds}. Please make sure to only forward one of the two." + ) + elif negative_prompt_2 is not None and negative_prompt_embeds is not None: + raise ValueError( + f"Cannot forward both `negative_prompt_2`: {negative_prompt_2} and `negative_prompt_embeds`:" + f" {negative_prompt_embeds}. Please make sure to only forward one of the two." + ) + + if prompt_embeds is not None and negative_prompt_embeds is not None: + if prompt_embeds.shape != negative_prompt_embeds.shape: + raise ValueError( + "`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but" + f" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`" + f" {negative_prompt_embeds.shape}." + ) + + if prompt_embeds is not None and pooled_prompt_embeds is None: + raise ValueError( + "If `prompt_embeds` are provided, `pooled_prompt_embeds` also have to be passed. Make sure to generate `pooled_prompt_embeds` from the same text encoder that was used to generate `prompt_embeds`." + ) + + if negative_prompt_embeds is not None and negative_pooled_prompt_embeds is None: + raise ValueError( + "If `negative_prompt_embeds` are provided, `negative_pooled_prompt_embeds` also have to be passed. Make sure to generate `negative_pooled_prompt_embeds` from the same text encoder that was used to generate `negative_prompt_embeds`." + ) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents + def __prepare_latents(self, batch_size, single_model_length, num_channels_latents, height, width, dtype, device, generator, latents=None): + shape = (batch_size, num_channels_latents, single_model_length, height // self.vae_scale_factor, width // self.vae_scale_factor) + if isinstance(generator, list) and len(generator) != batch_size: + raise ValueError( + f"You have passed a list of generators of length {len(generator)}, but requested an effective batch" + f" size of {batch_size}. Make sure the batch size matches the length of the generators." + ) + + if latents is None: + latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype) + else: + latents = latents.to(device) + + # scale the initial noise by the standard deviation required by the scheduler + latents = latents * self.scheduler.init_noise_sigma + return latents + + def prepare_latents( + self, + batch_size, + num_channels_latents, + video_length, + height, + width, + dtype, + device, + generator, + img2img_map, + timestep, + latents=None, + is_strength_max=True, + return_noise=True, + return_image_latents=True, + ): + shape = ( + batch_size, + num_channels_latents, + video_length, + height // self.vae_scale_factor, + width // self.vae_scale_factor, + ) + if isinstance(generator, list) and len(generator) != batch_size: + raise ValueError( + f"You have passed a list of generators of length {len(generator)}, but requested an effective batch" + f" size of {batch_size}. Make sure the batch size matches the length of the generators." + ) + + # Offload text encoder if `enable_model_cpu_offload` was enabled + if hasattr(self, "final_offload_hook") and self.final_offload_hook is not None: + self.text_encoder_2.to("cpu") + torch.cuda.empty_cache() + + + image_latents = None + + if img2img_map: + + image_latents = torch.zeros(shape, device=device, dtype=dtype) + for frame_no in img2img_map["images"]: + img = img2img_map["images"][frame_no] + img = self.image_processor.preprocess(img) + img = img.to(device="cuda", dtype=self.vae.dtype) + img = self.vae.encode(img).latent_dist.sample(generator) + img = self.vae.config.scaling_factor * img + img = torch.cat([img], dim=0) + image_latents[:,:,frame_no,:,:] = img.to(device=device, dtype=dtype) + + else: + is_strength_max = True + + + if latents is None: + noise = randn_tensor(shape, generator=generator, device=device, dtype=dtype) + latents = noise if is_strength_max else self.scheduler.add_noise(image_latents, noise, timestep) + latents = latents * self.scheduler.init_noise_sigma if is_strength_max else latents + else: + noise = latents.to(device) + latents = noise * self.scheduler.init_noise_sigma + + + outputs = (latents.to(device, dtype),) + + if return_noise: + outputs += (noise.to(device, dtype),) + + if return_image_latents: + if image_latents is not None: + outputs += (image_latents.to(device, dtype),) + else: + outputs += (None,) + + return outputs + + + def __prepare_latents( + self, image, timestep, batch_size, num_images_per_prompt, dtype, device, generator=None, add_noise=True + ): + + image = image.to(device=device, dtype=dtype) + + batch_size = batch_size * num_images_per_prompt + + if image.shape[1] == 4: + init_latents = image + + else: + # make sure the VAE is in float32 mode, as it overflows in float16 + if self.vae.config.force_upcast: + image = image.float() + self.vae.to(dtype=torch.float32) + + if isinstance(generator, list) and len(generator) != batch_size: + raise ValueError( + f"You have passed a list of generators of length {len(generator)}, but requested an effective batch" + f" size of {batch_size}. Make sure the batch size matches the length of the generators." + ) + + elif isinstance(generator, list): + init_latents = [ + self.vae.encode(image[i : i + 1]).latent_dist.sample(generator[i]) for i in range(batch_size) + ] + init_latents = torch.cat(init_latents, dim=0) + else: + init_latents = self.vae.encode(image).latent_dist.sample(generator) + + if self.vae.config.force_upcast: + self.vae.to(dtype) + + init_latents = init_latents.to(dtype) + init_latents = self.vae.config.scaling_factor * init_latents + + if batch_size > init_latents.shape[0] and batch_size % init_latents.shape[0] == 0: + # expand init_latents for batch_size + additional_image_per_prompt = batch_size // init_latents.shape[0] + init_latents = torch.cat([init_latents] * additional_image_per_prompt, dim=0) + elif batch_size > init_latents.shape[0] and batch_size % init_latents.shape[0] != 0: + raise ValueError( + f"Cannot duplicate `image` of batch size {init_latents.shape[0]} to {batch_size} text prompts." + ) + else: + init_latents = torch.cat([init_latents], dim=0) + + if add_noise: + shape = init_latents.shape + noise = randn_tensor(shape, generator=generator, device=device, dtype=dtype) + # get latents + init_latents = self.scheduler.add_noise(init_latents, noise, timestep) + + latents = init_latents + + return latents + + + + + + + def _get_add_time_ids(self, original_size, crops_coords_top_left, target_size, dtype): + add_time_ids = list(original_size + crops_coords_top_left + target_size) + + passed_add_embed_dim = ( + self.unet.config.addition_time_embed_dim * len(add_time_ids) + self.text_encoder_2.config.projection_dim + ) + expected_add_embed_dim = self.unet.add_embedding.linear_1.in_features + + if expected_add_embed_dim != passed_add_embed_dim: + raise ValueError( + f"Model expects an added time embedding vector of length {expected_add_embed_dim}, but a vector of {passed_add_embed_dim} was created. The model has an incorrect config. Please check `unet.config.time_embedding_type` and `text_encoder_2.config.projection_dim`." + ) + + add_time_ids = torch.tensor([add_time_ids], dtype=dtype) + return add_time_ids + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_upscale.StableDiffusionUpscalePipeline.upcast_vae + def upcast_vae(self): + dtype = self.vae.dtype + self.vae.to(dtype=torch.float32) + use_torch_2_0_or_xformers = isinstance( + self.vae.decoder.mid_block.attentions[0].processor, + ( + AttnProcessor2_0, + XFormersAttnProcessor, + LoRAXFormersAttnProcessor, + LoRAAttnProcessor2_0, + ), + ) + # if xformers or torch_2_0 is used attention block does not need + # to be in float32 which can save lots of memory + if use_torch_2_0_or_xformers: + self.vae.post_quant_conv.to(dtype) + self.vae.decoder.conv_in.to(dtype) + self.vae.decoder.mid_block.to(dtype) + + + def decode_latents(self, latents: torch.Tensor): + video_length = latents.shape[2] + latents = 1 / self.vae.config.scaling_factor * latents + latents = rearrange(latents, "b c f h w -> (b f) c h w") + # video = self.vae.decode(latents).sample + video = [] + for frame_idx in range(latents.shape[0]): + video.append( +# self.vae.decode(latents[frame_idx : frame_idx + 1].to(self.vae.device, self.vae.dtype)).sample.cpu() + self.vae.decode(latents[frame_idx : frame_idx + 1].to("cuda", self.vae.dtype)).sample.cpu() + ) + video = torch.cat(video) + video = rearrange(video, "(b f) c h w -> b c f h w", f=video_length) + video = (video / 2 + 0.5).clamp(0, 1) + # we always cast to float32 as this does not cause significant overhead and is compatible with bfloa16 + video = video.float().numpy() + return video + + def get_img2img_timesteps(self, num_inference_steps, strength, device): + strength = min(1, max(0,strength)) + # get the original timestep using init_timestep + init_timestep = min(int(num_inference_steps * strength), num_inference_steps) + + t_start = max(num_inference_steps - init_timestep, 0) + timesteps = self.scheduler.timesteps[t_start * self.scheduler.order :] + + return timesteps, num_inference_steps - t_start + + + @torch.no_grad() + @replace_example_docstring(EXAMPLE_DOC_STRING) + def __call__( + self, + prompt: Union[str, List[str]] = None, + prompt_2: Optional[Union[str, List[str]]] = None, + single_model_length: Optional[int] = 16, + height: Optional[int] = None, + width: Optional[int] = None, + num_inference_steps: int = 50, + denoising_end: Optional[float] = None, + guidance_scale: float = 5.0, + negative_prompt: Optional[Union[str, List[str]]] = None, + negative_prompt_2: Optional[Union[str, List[str]]] = None, + num_videos_per_prompt: Optional[int] = 1, + eta: float = 0.0, + generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None, + latents: Optional[torch.FloatTensor] = None, + prompt_embeds: Optional[torch.FloatTensor] = None, + negative_prompt_embeds: Optional[torch.FloatTensor] = None, + pooled_prompt_embeds: Optional[torch.FloatTensor] = None, + negative_pooled_prompt_embeds: Optional[torch.FloatTensor] = None, + output_type: Optional[str] = "tensor", + return_dict: bool = True, + callback: Optional[Callable[[int, torch.FloatTensor], None]] = None, + callback_steps: Optional[List[int]] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + guidance_rescale: float = 0.0, + original_size: Optional[Tuple[int, int]] = None, + crops_coords_top_left: Tuple[int, int] = (0, 0), + target_size: Optional[Tuple[int, int]] = None, + + unet_batch_size: int = 1, + video_length: Optional[int] = None, + context_frames: int = -1, + context_stride: int = 3, + context_overlap: int = 4, + context_schedule: str = "uniform", + clip_skip: int = 1, + controlnet_type_map: Dict[str, Dict[str,float]] = None, + controlnet_image_map: Dict[int, Dict[str,Any]] = None, + controlnet_ref_map: Dict[str, Any] = None, + controlnet_max_samples_on_vram: int = 999, + controlnet_max_models_on_vram: int=99, + controlnet_is_loop: bool=True, + img2img_map: Dict[str, Any] = None, + ip_adapter_config_map: Dict[str,Any] = None, + region_list: List[Any] = None, + region_condi_list: List[Any] = None, + interpolation_factor = 1, + is_single_prompt_mode = False, + apply_lcm_lora=False, + gradual_latent_map=None, + **kwargs, + ): + r""" + Function invoked when calling the pipeline for generation. + + Args: + prompt (`str` or `List[str]`, *optional*): + The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`. + instead. + prompt_2 (`str` or `List[str]`, *optional*): + The prompt or prompts to be sent to the `tokenizer_2` and `text_encoder_2`. If not defined, `prompt` is + used in both text-encoders + height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor): + The height in pixels of the generated image. + width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor): + The width in pixels of the generated image. + num_inference_steps (`int`, *optional*, defaults to 50): + The number of denoising steps. More denoising steps usually lead to a higher quality image at the + expense of slower inference. + denoising_end (`float`, *optional*): + When specified, determines the fraction (between 0.0 and 1.0) of the total denoising process to be + completed before it is intentionally prematurely terminated. As a result, the returned sample will + still retain a substantial amount of noise as determined by the discrete timesteps selected by the + scheduler. The denoising_end parameter should ideally be utilized when this pipeline forms a part of a + "Mixture of Denoisers" multi-pipeline setup, as elaborated in [**Refining the Image + Output**](https://huggingface.co/docs/diffusers/api/pipelines/stable_diffusion/stable_diffusion_xl#refining-the-image-output) + guidance_scale (`float`, *optional*, defaults to 5.0): + Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598). + `guidance_scale` is defined as `w` of equation 2. of [Imagen + Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale > + 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`, + usually at the expense of lower image quality. + negative_prompt (`str` or `List[str]`, *optional*): + The prompt or prompts not to guide the image generation. If not defined, one has to pass + `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is + less than `1`). + negative_prompt_2 (`str` or `List[str]`, *optional*): + The prompt or prompts not to guide the image generation to be sent to `tokenizer_2` and + `text_encoder_2`. If not defined, `negative_prompt` is used in both text-encoders + num_videos_per_prompt (`int`, *optional*, defaults to 1): + The number of images to generate per prompt. + eta (`float`, *optional*, defaults to 0.0): + Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to + [`schedulers.DDIMScheduler`], will be ignored for others. + generator (`torch.Generator` or `List[torch.Generator]`, *optional*): + One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html) + to make generation deterministic. + latents (`torch.FloatTensor`, *optional*): + Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image + generation. Can be used to tweak the same generation with different prompts. If not provided, a latents + tensor will ge generated by sampling using the supplied random `generator`. + prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not + provided, text embeddings will be generated from `prompt` input argument. + negative_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt + weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input + argument. + pooled_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated pooled text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. + If not provided, pooled text embeddings will be generated from `prompt` input argument. + negative_pooled_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated negative pooled text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt + weighting. If not provided, pooled negative_prompt_embeds will be generated from `negative_prompt` + input argument. + output_type (`str`, *optional*, defaults to `"pil"`): + The output format of the generate image. Choose between + [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`~pipelines.stable_diffusion_xl.StableDiffusionXLPipelineOutput`] instead + of a plain tuple. + callback (`Callable`, *optional*): + A function that will be called every `callback_steps` steps during inference. The function will be + called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`. + callback_steps (`int`, *optional*, defaults to 1): + The frequency at which the `callback` function will be called. If not specified, the callback will be + called at every step. + cross_attention_kwargs (`dict`, *optional*): + A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under + `self.processor` in + [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py). + guidance_rescale (`float`, *optional*, defaults to 0.7): + Guidance rescale factor proposed by [Common Diffusion Noise Schedules and Sample Steps are + Flawed](https://arxiv.org/pdf/2305.08891.pdf) `guidance_scale` is defined as `φ` in equation 16. of + [Common Diffusion Noise Schedules and Sample Steps are Flawed](https://arxiv.org/pdf/2305.08891.pdf). + Guidance rescale factor should fix overexposure when using zero terminal SNR. + original_size (`Tuple[int]`, *optional*, defaults to (1024, 1024)): + If `original_size` is not the same as `target_size` the image will appear to be down- or upsampled. + `original_size` defaults to `(width, height)` if not specified. Part of SDXL's micro-conditioning as + explained in section 2.2 of + [https://huggingface.co/papers/2307.01952](https://huggingface.co/papers/2307.01952). + crops_coords_top_left (`Tuple[int]`, *optional*, defaults to (0, 0)): + `crops_coords_top_left` can be used to generate an image that appears to be "cropped" from the position + `crops_coords_top_left` downwards. Favorable, well-centered images are usually achieved by setting + `crops_coords_top_left` to (0, 0). Part of SDXL's micro-conditioning as explained in section 2.2 of + [https://huggingface.co/papers/2307.01952](https://huggingface.co/papers/2307.01952). + target_size (`Tuple[int]`, *optional*, defaults to (1024, 1024)): + For most cases, `target_size` should be set to the desired height and width of the generated image. If + not specified it will default to `(width, height)`. Part of SDXL's micro-conditioning as explained in + section 2.2 of [https://huggingface.co/papers/2307.01952](https://huggingface.co/papers/2307.01952). + + Examples: + + Returns: + [`~pipelines.stable_diffusion_xl.StableDiffusionXLPipelineOutput`] or `tuple`: + [`~pipelines.stable_diffusion_xl.StableDiffusionXLPipelineOutput`] if `return_dict` is True, otherwise a + `tuple`. When returning a tuple, the first element is a list with the generated images. + """ + + gradual_latent = False + if gradual_latent_map: + gradual_latent = gradual_latent_map["enable"] + + + logger.info(f"{apply_lcm_lora=}") + if apply_lcm_lora: + self.scheduler = LCMScheduler.from_config(self.scheduler.config) + + controlnet_image_map_org = controlnet_image_map + + controlnet_max_models_on_vram = 0 + controlnet_max_samples_on_vram = 0 + + multi_uncond_mode = self.lora_map is not None + logger.info(f"{multi_uncond_mode=}") + + # 0. Default height and width to unet + height = height or self.default_sample_size * self.vae_scale_factor + width = width or self.default_sample_size * self.vae_scale_factor + + original_size = original_size or (height, width) + target_size = target_size or (height, width) + + # 1. Check inputs. Raise error if not correct + self.check_inputs( + "dummy_str", + prompt_2, + height, + width, + callback_steps, + negative_prompt, + negative_prompt_2, + prompt_embeds, + negative_prompt_embeds, + pooled_prompt_embeds, + negative_pooled_prompt_embeds, + ) + + # 2. Define call parameters + if False: + if prompt is not None and isinstance(prompt, str): + batch_size = 1 + elif prompt is not None and isinstance(prompt, list): + batch_size = len(prompt) + else: + batch_size = prompt_embeds.shape[0] + + batch_size = 1 + + sequential_mode = video_length is not None and video_length > context_frames + + device = self._execution_device + latents_device = torch.device("cpu") if sequential_mode else device + + if ip_adapter_config_map: + img_enc_path = "data/models/ip_adapter/models/image_encoder/" + if ip_adapter_config_map["is_plus"]: + self.ip_adapter = IPAdapterPlusXL(self, img_enc_path, "data/models/ip_adapter/sdxl_models/ip-adapter-plus_sdxl_vit-h.bin", device, 16) + elif ip_adapter_config_map["is_plus_face"]: + self.ip_adapter = IPAdapterPlusXL(self, img_enc_path, "data/models/ip_adapter/sdxl_models/ip-adapter-plus-face_sdxl_vit-h.bin", device, 16) + else: + self.ip_adapter = IPAdapterXL(self, img_enc_path, "data/models/ip_adapter/sdxl_models/ip-adapter_sdxl_vit-h.bin", device, 4) + self.ip_adapter.set_scale( ip_adapter_config_map["scale"] ) + else: + self.ip_adapter = None + + + # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2) + # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1` + # corresponds to doing no classifier free guidance. + do_classifier_free_guidance = guidance_scale > 1.0 + + # 3. Encode input prompt + text_encoder_lora_scale = ( + cross_attention_kwargs.get("scale", None) if cross_attention_kwargs is not None else None + ) + + prompt_encoder = PromptEncoderSDXL( + self, + device, + device,#latents_device, + num_videos_per_prompt, + do_classifier_free_guidance, + region_condi_list, + negative_prompt, + is_single_prompt_mode, + clip_skip, + multi_uncond_mode=multi_uncond_mode + ) + + if self.ip_adapter: + self.ip_adapter.delete_encoder() + + + condi_size = prompt_encoder.get_condi_size() + + + # 3.5 Prepare controlnet variables + + if self.controlnet_map: + for i, type_str in enumerate(self.controlnet_map): + if i < controlnet_max_models_on_vram: + self.controlnet_map[type_str].to(device=device, non_blocking=True) + + + # controlnet_image_map + # { 0 : { "type_str" : IMAGE, "type_str2" : IMAGE } } + # { "type_str" : { 0 : IMAGE, 15 : IMAGE } } + controlnet_image_map= None + + if controlnet_image_map_org: + controlnet_image_map= {key: {} for key in controlnet_type_map} + for key_frame_no in controlnet_image_map_org: + for t, img in controlnet_image_map_org[key_frame_no].items(): + if isinstance( self.controlnet_map[t], ControlNetLLLite ): + img_size = 1 + do_normalize=True + else: + img_size = prompt_encoder.get_condi_size() + do_normalize=False + c_dtype = torch.float16 #self.controlnet_map[t].dtype + tmp = self.prepare_image( + image=img, + width=width, + height=height, + batch_size=1 * 1, + num_images_per_prompt=1, + #device=device, + device=latents_device, + dtype=c_dtype, + do_classifier_free_guidance=False, + guess_mode=False, + do_normalize=do_normalize, + ) + controlnet_image_map[t][key_frame_no] = torch.cat([tmp] * img_size) + + del controlnet_image_map_org + torch.cuda.empty_cache() + + # { "0_type_str" : { "scales" = [0.1, 0.3, 0.5, 1.0, 0.5, 0.3, 0.1], "frames"=[125, 126, 127, 0, 1, 2, 3] }} + controlnet_scale_map = {} + controlnet_affected_list = np.zeros(video_length,dtype = int) + + is_v2v = True + + if controlnet_image_map: + for type_str in controlnet_image_map: + for key_frame_no in controlnet_image_map[type_str]: + scale_list = controlnet_type_map[type_str]["control_scale_list"] + if len(scale_list) > 0: + is_v2v = False + scale_list = scale_list[0: context_frames] + scale_len = len(scale_list) + + if controlnet_is_loop: + frames = [ i%video_length for i in range(key_frame_no-scale_len, key_frame_no+scale_len+1)] + + controlnet_scale_map[str(key_frame_no) + "_" + type_str] = { + "scales" : scale_list[::-1] + [1.0] + scale_list, + "frames" : frames, + } + else: + frames = [ i for i in range(max(0, key_frame_no-scale_len), min(key_frame_no+scale_len+1, video_length))] + + controlnet_scale_map[str(key_frame_no) + "_" + type_str] = { + "scales" : scale_list[:key_frame_no][::-1] + [1.0] + scale_list[:video_length-key_frame_no-1], + "frames" : frames, + } + + controlnet_affected_list[frames] = 1 + + def controlnet_is_affected( frame_index:int): + return controlnet_affected_list[frame_index] + + def get_controlnet_scale( + type: str, + cur_step: int, + step_length: int, + ): + s = controlnet_type_map[type]["control_guidance_start"] + e = controlnet_type_map[type]["control_guidance_end"] + keep = 1.0 - float(cur_step / len(timesteps) < s or (cur_step + 1) / step_length > e) + + scale = controlnet_type_map[type]["controlnet_conditioning_scale"] + + return keep * scale + + def get_controlnet_variable( + type_str: str, + cur_step: int, + step_length: int, + target_frames: List[int], + ): + cont_vars = [] + + if not controlnet_image_map: + return None + + if type_str not in controlnet_image_map: + return None + + for fr, img in controlnet_image_map[type_str].items(): + + if fr in target_frames: + cont_vars.append( { + "frame_no" : fr, + "image" : img, + "cond_scale" : get_controlnet_scale(type_str, cur_step, step_length), + "guess_mode" : controlnet_type_map[type_str]["guess_mode"] + } ) + + return cont_vars + + + + # 4. Prepare timesteps + self.scheduler.set_timesteps(num_inference_steps, device=latents_device) + if img2img_map: + timesteps, num_inference_steps = self.get_img2img_timesteps(num_inference_steps, img2img_map["denoising_strength"], latents_device) + latent_timestep = timesteps[:1].repeat(batch_size * 1) + else: + timesteps = self.scheduler.timesteps + latent_timestep = None + + is_strength_max = True + if img2img_map: + is_strength_max = img2img_map["denoising_strength"] == 1.0 + + + # 5. Prepare latent variables + num_channels_latents = self.unet.config.in_channels + latents_outputs = self.prepare_latents( + batch_size = 1, + num_channels_latents=num_channels_latents, + video_length=video_length, + height=height, + width=width, + dtype=prompt_encoder.get_prompt_embeds_dtype(), + device=latents_device, + generator=generator, + img2img_map=img2img_map, + timestep=latent_timestep, + latents=latents, + is_strength_max=is_strength_max, + return_noise=True, + return_image_latents=True, + ) + + latents, noise, image_latents = latents_outputs + + del img2img_map + torch.cuda.empty_cache() + + # 5.5 Prepare region mask + region_mask = RegionMask( + region_list, + batch_size, + num_channels_latents, + video_length, + height, + width, + self.vae_scale_factor, + prompt_encoder.get_prompt_embeds_dtype(), + latents_device, + multi_uncond_mode + ) + + torch.cuda.empty_cache() + + + # 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline + extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta) + + # 6.5 - Infinite context loop shenanigans + context_scheduler = get_context_scheduler(context_schedule) + total_steps = get_total_steps( + context_scheduler, + timesteps, + num_inference_steps, + latents.shape[2], + context_frames, + context_stride, + context_overlap, + ) + + # 7. Prepare added time ids & embeddings +# add_text_embeds = pooled_prompt_embeds + add_time_ids = self._get_add_time_ids( + original_size, crops_coords_top_left, target_size, dtype=prompt_encoder.get_prompt_embeds_dtype(), + ) + + add_time_ids = torch.cat([add_time_ids for c in range(condi_size)], dim=0) + add_time_ids = add_time_ids.to(device) + + # 8. Denoising loop + num_warmup_steps = max(len(timesteps) - num_inference_steps * self.scheduler.order, 0) + + if False: + # 7.1 Apply denoising_end + if denoising_end is not None and type(denoising_end) == float and denoising_end > 0 and denoising_end < 1: + discrete_timestep_cutoff = int( + round( + self.scheduler.config.num_train_timesteps + - (denoising_end * self.scheduler.config.num_train_timesteps) + ) + ) + num_inference_steps = len(list(filter(lambda ts: ts >= discrete_timestep_cutoff, timesteps))) + timesteps = timesteps[:num_inference_steps] + + + logger.info(f"{do_classifier_free_guidance=}") + logger.info(f"{condi_size=}") + + if self.lora_map: + self.lora_map.to(device, self.unet.dtype) + if self.lcm: + self.lcm.to(device, self.unet.dtype) + + lat_height, lat_width = latents.shape[-2:] + + def gradual_latent_scale(progress): + if gradual_latent: + cur = 0.5 + for s in gradual_latent_map["scale"]: + v = gradual_latent_map["scale"][s] + if float(s) > progress: + return cur + cur = v + return cur + else: + return 1.0 + def gradual_latent_size(progress): + if gradual_latent: + current_ratio = gradual_latent_scale(progress) + h = int(lat_height * current_ratio) // 8 * 8 + w = int(lat_width * current_ratio) // 8 * 8 + return (h,w) + else: + return (lat_height, lat_width) + + def unsharp_mask(img): + imgf = img.float() + k = 0.05 # strength + kernel = torch.FloatTensor([[0, -k, 0], + [-k, 1+4*k, -k], + [0, -k, 0]]) + + conv_kernel = torch.eye(4)[..., None, None] * kernel[None, None, ...] + imgf = torch.nn.functional.conv2d(imgf, conv_kernel.to(img.device), padding=1) + return imgf.to(img.dtype) + + def resize_tensor(ten, size, do_unsharp_mask=False): + ten = rearrange(ten, "b c f h w -> (b f) c h w") + ten = torch.nn.functional.interpolate( + ten.float(), size=size, mode="bicubic", align_corners=False + ).to(ten.dtype) + if do_unsharp_mask: + ten = unsharp_mask(ten) + return rearrange(ten, "(b f) c h w -> b c f h w", f=video_length) + + if gradual_latent: + latents = resize_tensor(latents, gradual_latent_size(0)) + reverse_steps = gradual_latent_map["reverse_steps"] + noise_add_count = gradual_latent_map["noise_add_count"] + total_steps = ((total_steps/num_inference_steps) * (reverse_steps* (len(gradual_latent_map["scale"].keys()) - 1) )) + total_steps + total_steps = int(total_steps) + + prev_gradient_latent_size = gradual_latent_size(0) + + + + with self.progress_bar(total=total_steps) as progress_bar: + + i = 0 + real_i = 0 +# for i, t in enumerate(timesteps): + while i < len(timesteps): + t = timesteps[i] + + cur_gradient_latent_size = gradual_latent_size((real_i+1) / len(timesteps)) + + if self.lcm: + self.lcm.apply(i, len(timesteps)) + + + noise_pred = torch.zeros( + (latents.shape[0] * condi_size, *latents.shape[1:]), + device=latents.device, + dtype=latents.dtype, + ) + counter = torch.zeros( + (1, 1, latents.shape[2], 1, 1), device=latents.device, dtype=latents.dtype + ) + + # { "0_type_str" : (down_samples, mid_sample) } + controlnet_result={} + + def apply_lllite(context: List[int]): + for type_str in controlnet_type_map: + if not isinstance( self.controlnet_map[type_str] , ControlNetLLLite): + continue + + cont_vars = get_controlnet_variable(type_str, i, len(timesteps), context) + if not cont_vars: + self.controlnet_map[type_str].set_multiplier(0.0) + continue + + def get_index(l, x): + return l.index(x) if x in l else -1 + + zero_img = torch.zeros_like(cont_vars[0]["image"]) + + scales=[0.0 for fr in context] + imgs=[zero_img for fr in context] + + for cont_var in cont_vars: + c_fr = cont_var["frame_no"] + scale_index = str(c_fr) + "_" + type_str + + for s_i, fr in enumerate(controlnet_scale_map[scale_index]["frames"]): + index = get_index(context, fr) + if index != -1: + scales[index] = controlnet_scale_map[scale_index]["scales"][s_i] + imgs[index] = cont_var["image"] + + scales = [ s * cont_var["cond_scale"] for s in scales ] + + + imgs = torch.cat(imgs).to(device=device, non_blocking=True) + + key= ".".join(map(str, context)) + key= type_str + "." + key + + self.controlnet_map[type_str].to(device=device) + self.controlnet_map[type_str].set_cond_image(imgs,key) + self.controlnet_map[type_str].set_multiplier(scales) + + def get_controlnet_result(context: List[int] = None): + #logger.info(f"get_controlnet_result called {context=}") + + if controlnet_image_map is None: + return None, None + + hit = False + for n in context: + if controlnet_is_affected(n): + hit=True + break + if hit == False: + return None, None + + apply_lllite(context) + + if len(controlnet_result) == 0: + return None, None + + _down_block_res_samples=[] + + first_down = list(list(controlnet_result.values())[0].values())[0][0] + first_mid = list(list(controlnet_result.values())[0].values())[0][1] + for ii in range(len(first_down)): + _down_block_res_samples.append( + torch.zeros( + (first_down[ii].shape[0], first_down[ii].shape[1], len(context) ,*first_down[ii].shape[3:]), + device=device, + dtype=first_down[ii].dtype, + )) + _mid_block_res_samples = torch.zeros( + (first_mid.shape[0], first_mid.shape[1], len(context) ,*first_mid.shape[3:]), + device=device, + dtype=first_mid.dtype, + ) + + for fr in controlnet_result: + for type_str in controlnet_result[fr]: + result = str(fr) + "_" + type_str + + val = controlnet_result[fr][type_str] + cur_down = [ + v.to(device = device, dtype=first_down[0].dtype, non_blocking=True) if v.device != device else v + for v in val[0] + ] + cur_mid =val[1].to(device = device, dtype=first_mid.dtype, non_blocking=True) if val[1].device != device else val[1] + loc = list(set(context) & set(controlnet_scale_map[result]["frames"])) + scales = [] + + for o in loc: + for j, f in enumerate(controlnet_scale_map[result]["frames"]): + if o == f: + scales.append(controlnet_scale_map[result]["scales"][j]) + break + loc_index=[] + + for o in loc: + for j, f in enumerate( context ): + if o==f: + loc_index.append(j) + break + + mod = torch.tensor(scales).to(device, dtype=cur_mid.dtype) + + add = cur_mid * mod[None,None,:,None,None] + _mid_block_res_samples[:, :, loc_index, :, :] = _mid_block_res_samples[:, :, loc_index, :, :] + add + + for ii in range(len(cur_down)): + add = cur_down[ii] * mod[None,None,:,None,None] + _down_block_res_samples[ii][:, :, loc_index, :, :] = _down_block_res_samples[ii][:, :, loc_index, :, :] + add + + return _down_block_res_samples, _mid_block_res_samples + + def process_controlnet( target_frames: List[int] = None ): + #logger.info(f"process_controlnet called {target_frames=}") + nonlocal controlnet_result + + controlnet_samples_on_vram = 0 + + loc = list(set(target_frames) & set(controlnet_result.keys())) + + controlnet_result = {key: controlnet_result[key] for key in loc} + + target_frames = list(set(target_frames) - set(loc)) + #logger.info(f"-> {target_frames=}") + if len(target_frames) == 0: + return + + def sample_to_device( sample ): + nonlocal controlnet_samples_on_vram + + if controlnet_max_samples_on_vram <= controlnet_samples_on_vram: + down_samples = [ + v.to(device = torch.device("cpu"), non_blocking=True) if v.device != torch.device("cpu") else v + for v in sample[0] ] + mid_sample = sample[1].to(device = torch.device("cpu"), non_blocking=True) if sample[1].device != torch.device("cpu") else sample[1] + else: + if sample[0][0].device != device: + down_samples = [ v.to(device = device, non_blocking=True) for v in sample[0] ] + mid_sample = sample[1].to(device = device, non_blocking=True) + else: + down_samples = sample[0] + mid_sample = sample[1] + controlnet_samples_on_vram += 1 + return down_samples, mid_sample + + + for fr in controlnet_result: + for type_str in controlnet_result[fr]: + controlnet_result[fr][type_str] = sample_to_device(controlnet_result[fr][type_str]) + + for type_str in controlnet_type_map: + + if isinstance( self.controlnet_map[type_str] , ControlNetLLLite): + continue + + cont_vars = get_controlnet_variable(type_str, i, len(timesteps), target_frames) + if not cont_vars: + continue + + org_device = self.controlnet_map[type_str].device + if org_device != device: + self.controlnet_map[type_str] = self.controlnet_map[type_str].to(device=device, non_blocking=True) + + for cont_var in cont_vars: + frame_no = cont_var["frame_no"] + + latent_model_input = ( + latents[:, :, [frame_no]] + .to(device) + .repeat( prompt_encoder.get_condi_size(), 1, 1, 1, 1) + ) + control_model_input = self.scheduler.scale_model_input(latent_model_input, t)[:, :, 0] + controlnet_prompt_embeds, controlnet_add_text_embeds = prompt_encoder.get_current_prompt_embeds([frame_no], latents.shape[2]) + + controlnet_added_cond_kwargs = {"text_embeds": controlnet_add_text_embeds.to(device=device), "time_ids": add_time_ids} + + cont_var_img = cont_var["image"].to(device=device) + + if gradual_latent: + cur_lat_height, cur_lat_width = latents.shape[-2:] + cont_var_img = torch.nn.functional.interpolate( + cont_var_img.float(), size=(cur_lat_height*8, cur_lat_width*8), mode="bicubic", align_corners=False + ).to(cont_var_img.dtype) + + + down_samples, mid_sample = self.controlnet_map[type_str]( + control_model_input, + t, + encoder_hidden_states=controlnet_prompt_embeds.to(device=device), + controlnet_cond=cont_var_img, + conditioning_scale=cont_var["cond_scale"], + guess_mode=cont_var["guess_mode"], + added_cond_kwargs=controlnet_added_cond_kwargs, + return_dict=False, + ) + + for ii in range(len(down_samples)): + down_samples[ii] = rearrange(down_samples[ii], "(b f) c h w -> b c f h w", f=1) + mid_sample = rearrange(mid_sample, "(b f) c h w -> b c f h w", f=1) + + if frame_no not in controlnet_result: + controlnet_result[frame_no] = {} + + controlnet_result[frame_no][type_str] = sample_to_device((down_samples, mid_sample)) + + if org_device != device: + self.controlnet_map[type_str] = self.controlnet_map[type_str].to(device=org_device, non_blocking=True) + + + + for context in context_scheduler( + i, num_inference_steps, latents.shape[2], context_frames, context_stride, context_overlap + ): + + if self.lora_map: + self.lora_map.unapply() + + + if controlnet_image_map: + if is_v2v: + controlnet_target = context + else: + controlnet_target = list(range(context[0]-context_frames, context[0])) + context + list(range(context[-1]+1, context[-1]+1+context_frames)) + controlnet_target = [f%video_length for f in controlnet_target] + controlnet_target = list(set(controlnet_target)) + + process_controlnet(controlnet_target) + + # expand the latents if we are doing classifier free guidance + latent_model_input = ( + latents[:, :, context] + .to(device) + .repeat(condi_size, 1, 1, 1, 1) + ) + latent_model_input = self.scheduler.scale_model_input(latent_model_input, t) + + cur_prompt, add_text_embeds = prompt_encoder.get_current_prompt_embeds(context, latents.shape[2]) + down_block_res_samples,mid_block_res_sample = get_controlnet_result(context) + + cur_prompt = cur_prompt.to(device=device) + add_text_embeds = add_text_embeds.to(device=device) + + # predict the noise residual + #added_cond_kwargs = {"text_embeds": add_text_embeds, "time_ids": add_time_ids} + ts = torch.tensor([t], dtype=latent_model_input.dtype, device=latent_model_input.device) + if condi_size > 1: + ts = ts.repeat(condi_size) + + + __pred = [] + + for layer_index in range(0, latent_model_input.shape[0], unet_batch_size): + + if self.lora_map: + self.lora_map.apply(layer_index, latent_model_input.shape[0], context[len(context)//2]) + + layer_width = 1 if is_single_prompt_mode else context_frames + + __lat = latent_model_input[layer_index:layer_index+unet_batch_size] + __cur_prompt = cur_prompt[layer_index * layer_width:(layer_index + unet_batch_size)*layer_width] + __added_cond_kwargs = {"text_embeds": add_text_embeds[layer_index * layer_width:(layer_index + unet_batch_size)*layer_width], "time_ids": add_time_ids[layer_index:layer_index+unet_batch_size]} + + __do = [] + if down_block_res_samples is not None: + for do in down_block_res_samples: + __do.append(do[layer_index:layer_index+unet_batch_size]) + else: + __do = None + + __mid = None + if mid_block_res_sample is not None: + __mid = mid_block_res_sample[layer_index:layer_index+unet_batch_size] + + pred_layer = self.unet( + __lat, + ts[layer_index:layer_index+unet_batch_size], + encoder_hidden_states=__cur_prompt, + cross_attention_kwargs=cross_attention_kwargs, + added_cond_kwargs=__added_cond_kwargs, + down_block_additional_residuals=__do, + mid_block_additional_residual=__mid, + return_dict=False, + )[0] + + wh = None + + if i < len(timesteps) * region_mask.get_crop_generation_rate(layer_index, latent_model_input.shape[0]): + #TODO lllite + wh, xy_list = region_mask.get_area(layer_index, latent_model_input.shape[0], context) + if wh: + a_w, a_h = wh + __lat_list = [] + for c_index, xy in enumerate( xy_list ): + a_x, a_y = xy + __lat_list.append( __lat[:,:,[c_index],a_y:a_y+a_h, a_x:a_x+a_w ] ) + + __lat = torch.cat(__lat_list, dim=2) + + if __do is not None: + __tmp_do = [] + for _d, rate in zip(__do, (1,1,1,2,2,2,4,4,4,8,8,8)): + _inner_do_list = [] + for c_index, xy in enumerate( xy_list ): + a_x, a_y = xy + _inner_do_list.append(_d[:,:,[c_index],a_y//rate:(a_y+a_h)//rate, a_x//rate:(a_x+a_w)//rate ] ) + + __tmp_do.append( torch.cat(_inner_do_list, dim=2) ) + __do = __tmp_do + + if __mid is not None: + rate = 8 + _mid_list = [] + for c_index, xy in enumerate( xy_list ): + a_x, a_y = xy + _mid_list.append( __mid[:,:,[c_index],a_y//rate:(a_y+a_h)//rate, a_x//rate:(a_x+a_w)//rate ] ) + __mid = torch.cat(_mid_list, dim=2) + + crop_pred_layer = self.unet( + __lat, + ts[layer_index:layer_index+unet_batch_size], + encoder_hidden_states=__cur_prompt, + cross_attention_kwargs=cross_attention_kwargs, + added_cond_kwargs=__added_cond_kwargs, + down_block_additional_residuals=__do, + mid_block_additional_residual=__mid, + return_dict=False, + )[0] + + if wh: + a_w, a_h = wh + for c_index, xy in enumerate( xy_list ): + a_x, a_y = xy + pred_layer[:,:,[c_index],a_y:a_y+a_h, a_x:a_x+a_w] = crop_pred_layer[:,:,[c_index],:,:] + + + __pred.append( pred_layer ) + + down_block_res_samples = None + mid_block_res_sample = None + + pred = torch.cat(__pred) + + pred = pred.to(dtype=latents.dtype, device=latents.device) + noise_pred[:, :, context] = noise_pred[:, :, context] + pred + counter[:, :, context] = counter[:, :, context] + 1 + progress_bar.update() + + + # perform guidance + noise_size = condi_size + if do_classifier_free_guidance: + noise_pred = (noise_pred / counter) + noise_list = list(noise_pred.chunk( noise_size )) + + if multi_uncond_mode: + uc_noise_list = noise_list[:len(noise_list)//2] + noise_list = noise_list[len(noise_list)//2:] + for n in range(len(noise_list)): + noise_list[n] = uc_noise_list[n] + guidance_scale * (noise_list[n] - uc_noise_list[n]) + else: + noise_pred_uncond = noise_list.pop(0) + for n in range(len(noise_list)): + noise_list[n] = noise_pred_uncond + guidance_scale * (noise_list[n] - noise_pred_uncond) + + noise_size = len(noise_list) + noise_pred = torch.cat(noise_list) + + + if gradual_latent: + if prev_gradient_latent_size != cur_gradient_latent_size: + noise_pred = resize_tensor(noise_pred, cur_gradient_latent_size, True) + latents = resize_tensor(latents, cur_gradient_latent_size, True) + + # compute the previous noisy sample x_t -> x_t-1 + latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0] + + # call the callback, if provided + if (i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0)) and ( + callback is not None and (callback_steps is not None and i in callback_steps) + ): + denoised = latents - noise_pred + #denoised = self.interpolate_latents(denoised, interpolation_factor, device) + video = torch.from_numpy(self.decode_latents(denoised)) + callback(i, video) + + latents_list = latents.chunk( noise_size ) + + tmp_latent = torch.zeros( + latents_list[0].shape, device=latents.device, dtype=latents.dtype + ) + + for r_no in range(len(region_list)): + mask = region_mask.get_mask( r_no ) + if gradual_latent: + mask = resize_tensor(mask, cur_gradient_latent_size) + src = region_list[r_no]["src"] + if src == -1: + init_latents_proper = image_latents[:1] + + if i < len(timesteps) - 1: + noise_timestep = timesteps[i + 1] + init_latents_proper = self.scheduler.add_noise( + init_latents_proper, noise, torch.tensor([noise_timestep]) + ) + + if gradual_latent: + lat = resize_tensor(init_latents_proper, cur_gradient_latent_size) + else: + lat = init_latents_proper + else: + lat = latents_list[src] + + tmp_latent = tmp_latent * (1-mask) + lat * mask + + latents = tmp_latent + + init_latents_proper = None + lat = None + latents_list = None + tmp_latent = None + + i+=1 + real_i = max(i, real_i) + if gradual_latent: + if prev_gradient_latent_size != cur_gradient_latent_size: + reverse = min(i, reverse_steps) + self.scheduler._step_index -= reverse + _noise = resize_tensor(noise, cur_gradient_latent_size) + for count in range(i, i+noise_add_count): + count = min(count,len(timesteps)-1) + latents = self.scheduler.add_noise( + latents, _noise, torch.tensor([timesteps[count]]) + ) + i -= reverse + torch.cuda.empty_cache() + + prev_gradient_latent_size = cur_gradient_latent_size + + + controlnet_result = None + torch.cuda.empty_cache() + + # make sure the VAE is in float32 mode, as it overflows in float16 + if self.vae.dtype == torch.float32 and latents.dtype == torch.float16: + self.upcast_vae() + latents = latents.to(next(iter(self.vae.post_quant_conv.parameters())).dtype) + + if self.ip_adapter: + show_gpu("before unload ip_adapter") + self.ip_adapter.unload() + self.ip_adapter = None + torch.cuda.empty_cache() + show_gpu("after unload ip_adapter") + + self.maybe_free_model_hooks() + torch.cuda.empty_cache() + + if False: + if not output_type == "latent": + latents = rearrange(latents, "b c f h w -> (b f) c h w") + image = self.vae.decode((latents / self.vae.config.scaling_factor).to(self.vae.device, self.vae.dtype), return_dict=False)[0] + else: + raise ValueError(f"{output_type=} not supported") + image = latents + return StableDiffusionXLPipelineOutput(images=image) + + #image = self.image_processor.postprocess(image, output_type=output_type) + + # Offload last model to CPU + if hasattr(self, "final_offload_hook") and self.final_offload_hook is not None: + self.final_offload_hook.offload() + image = ((image + 1) / 2).clamp(0, 1) + video = rearrange(image, "(b f) c h w -> b c f h w", f=single_model_length).cpu() + if not return_dict: + return (video,) + else: + # Return latents if requested (this will never be a dict) + if not output_type == "latent": + video = self.decode_latents(latents) + else: + video = latents + + # Convert to tensor + if output_type == "tensor": + video = torch.from_numpy(video) + + # Offload all models + self.maybe_free_model_hooks() + + if not return_dict: + return video + + + return AnimatePipelineOutput(videos=video) + + + # Overrride to properly handle the loading and unloading of the additional text encoder. + def load_lora_weights(self, pretrained_model_name_or_path_or_dict: Union[str, Dict[str, torch.Tensor]], **kwargs): + # We could have accessed the unet config from `lora_state_dict()` too. We pass + # it here explicitly to be able to tell that it's coming from an SDXL + # pipeline. + + state_dict, network_alphas = self.lora_state_dict( + pretrained_model_name_or_path_or_dict, + unet_config=self.unet.config, + **kwargs, + ) + self.load_lora_into_unet(state_dict, network_alphas=network_alphas, unet=self.unet) + + text_encoder_state_dict = {k: v for k, v in state_dict.items() if "text_encoder." in k} + if len(text_encoder_state_dict) > 0: + self.load_lora_into_text_encoder( + text_encoder_state_dict, + network_alphas=network_alphas, + text_encoder=self.text_encoder, + prefix="text_encoder", + lora_scale=self.lora_scale, + ) + + text_encoder_2_state_dict = {k: v for k, v in state_dict.items() if "text_encoder_2." in k} + if len(text_encoder_2_state_dict) > 0: + self.load_lora_into_text_encoder( + text_encoder_2_state_dict, + network_alphas=network_alphas, + text_encoder=self.text_encoder_2, + prefix="text_encoder_2", + lora_scale=self.lora_scale, + ) + + @classmethod + def save_lora_weights( + self, + save_directory: Union[str, os.PathLike], + unet_lora_layers: Dict[str, Union[torch.nn.Module, torch.Tensor]] = None, + text_encoder_lora_layers: Dict[str, Union[torch.nn.Module, torch.Tensor]] = None, + text_encoder_2_lora_layers: Dict[str, Union[torch.nn.Module, torch.Tensor]] = None, + is_main_process: bool = True, + weight_name: str = None, + save_function: Callable = None, + safe_serialization: bool = True, + ): + state_dict = {} + + def pack_weights(layers, prefix): + layers_weights = layers.state_dict() if isinstance(layers, torch.nn.Module) else layers + layers_state_dict = {f"{prefix}.{module_name}": param for module_name, param in layers_weights.items()} + return layers_state_dict + + state_dict.update(pack_weights(unet_lora_layers, "unet")) + + if text_encoder_lora_layers and text_encoder_2_lora_layers: + state_dict.update(pack_weights(text_encoder_lora_layers, "text_encoder")) + state_dict.update(pack_weights(text_encoder_2_lora_layers, "text_encoder_2")) + + self.write_lora_layers( + state_dict=state_dict, + save_directory=save_directory, + is_main_process=is_main_process, + weight_name=weight_name, + save_function=save_function, + safe_serialization=safe_serialization, + ) + + def _remove_text_encoder_monkey_patch(self): + self._remove_text_encoder_monkey_patch_classmethod(self.text_encoder) + self._remove_text_encoder_monkey_patch_classmethod(self.text_encoder_2) \ No newline at end of file diff --git a/src/animatediff/pipelines/ti.py b/src/animatediff/pipelines/ti.py new file mode 100644 index 0000000000000000000000000000000000000000..6157f9e06e95a7ae4b67133eae711433bb782259 --- /dev/null +++ b/src/animatediff/pipelines/ti.py @@ -0,0 +1,155 @@ +import logging +from pathlib import Path +from typing import Optional, Union + +import torch +from diffusers import DiffusionPipeline +from safetensors.torch import load_file +from torch import Tensor + +from animatediff import get_dir + +EMBED_DIR = get_dir("data").joinpath("embeddings") +EMBED_DIR_SDXL = get_dir("data").joinpath("sdxl_embeddings") +EMBED_EXTS = [".pt", ".pth", ".bin", ".safetensors"] + +logger = logging.getLogger(__name__) + + +def scan_text_embeddings(is_sdxl=False) -> list[Path]: + embed_dir=EMBED_DIR_SDXL if is_sdxl else EMBED_DIR + return [x for x in embed_dir.rglob("**/*") if x.is_file() and x.suffix.lower() in EMBED_EXTS] + + +def get_text_embeddings(return_tensors: bool = True, is_sdxl:bool = False) -> dict[str, Union[Tensor, Path]]: + embed_dir=EMBED_DIR_SDXL if is_sdxl else EMBED_DIR + embeds = {} + skipped = {} + path: Path + for path in scan_text_embeddings(is_sdxl): + if path.stem not in embeds: + # new token/name, add it + logger.debug(f"Found embedding token {path.stem} at {path.relative_to(embed_dir)}") + embeds[path.stem] = path + else: + # duplicate token/name, skip it + skipped[path.stem] = path + logger.debug(f"Duplicate embedding token {path.stem} at {path.relative_to(embed_dir)}") + + # warn the user if there are duplicates we skipped + if skipped: + logger.warn(f"Skipped {len(skipped)} embeddings with duplicate tokens!") + logger.warn(f"Skipped paths: {[x.relative_to(embed_dir) for x in skipped.values()]}") + logger.warn("Rename these files to avoid collisions!") + + # we can optionally return the tensors instead of the paths + if return_tensors: + # load the embeddings + embeds = {k: load_embed_weights(v) for k, v in embeds.items()} + # filter out the ones that failed to load + loaded_embeds = {k: v for k, v in embeds.items() if v is not None} + if len(loaded_embeds) != len(embeds): + logger.warn(f"Failed to load {len(embeds) - len(loaded_embeds)} embeddings!") + logger.warn(f"Skipped embeddings: {[x for x in embeds.keys() if x not in loaded_embeds]}") + + # return a dict of {token: path | embedding} + return embeds + + +def load_embed_weights(path: Path, key: Optional[str] = None) -> Optional[Tensor]: + """Load an embedding from a file. + Accepts an optional key to load a specific embedding from a file with multiple embeddings, otherwise + it will try to load the first one it finds. + """ + if not path.exists() and path.is_file(): + raise ValueError(f"Embedding path {path} does not exist or is not a file!") + try: + if path.suffix.lower() == ".safetensors": + state_dict = load_file(path, device="cpu") + elif path.suffix.lower() in EMBED_EXTS: + state_dict = torch.load(path, weights_only=True, map_location="cpu") + except Exception: + logger.error(f"Failed to load embedding {path}", exc_info=True) + return None + + embedding = None + if len(state_dict) == 1: + logger.debug(f"Found single key in {path.stem}, using it") + embedding = next(iter(state_dict.values())) + elif key is not None and key in state_dict: + logger.debug(f"Using passed key {key} for {path.stem}") + embedding = state_dict[key] + elif "string_to_param" in state_dict: + logger.debug(f"A1111 style embedding found for {path.stem}") + embedding = next(iter(state_dict["string_to_param"].values())) + else: + # we couldn't find the embedding key, warn the user and just use the first key that's a Tensor + logger.warn(f"Could not find embedding key in {path.stem}!") + logger.warn("Taking a wild guess and using the first Tensor we find...") + for key, value in state_dict.items(): + if torch.is_tensor(value): + embedding = value + logger.warn(f"Using key: {key}") + break + + return embedding + + +def load_text_embeddings( + pipeline: DiffusionPipeline, text_embeds: Optional[tuple[str, torch.Tensor]] = None, is_sdxl = False +) -> None: + if text_embeds is None: + text_embeds = get_text_embeddings(False, is_sdxl) + if len(text_embeds) < 1: + logger.info("No TI embeddings found") + return + + logger.info(f"Loading {len(text_embeds)} TI embeddings...") + loaded, skipped, failed = [], [], [] + + if True: + vocab = pipeline.tokenizer.get_vocab() # get the tokenizer vocab so we can skip loaded embeddings + for token, emb_path in text_embeds.items(): + try: + if token not in vocab: + if is_sdxl: + embed = load_embed_weights(emb_path, "clip_g").to(pipeline.text_encoder_2.device) + pipeline.load_textual_inversion(embed, token=token, text_encoder=pipeline.text_encoder_2, tokenizer=pipeline.tokenizer_2) + embed = load_embed_weights(emb_path, "clip_l").to(pipeline.text_encoder.device) + pipeline.load_textual_inversion(embed, token=token, text_encoder=pipeline.text_encoder, tokenizer=pipeline.tokenizer) + else: + embed = load_embed_weights(emb_path).to(pipeline.text_encoder.device) + pipeline.load_textual_inversion({token: embed}) + logger.debug(f"Loaded embedding '{token}'") + loaded.append(token) + else: + logger.debug(f"Skipping embedding '{token}' (already loaded)") + skipped.append(token) + except Exception: + logger.error(f"Failed to load TI embedding: {token}", exc_info=True) + failed.append(token) + + else: + vocab = pipeline.tokenizer.get_vocab() # get the tokenizer vocab so we can skip loaded embeddings + for token, embed in text_embeds.items(): + try: + if token not in vocab: + if is_sdxl: + pipeline.load_textual_inversion(text_encoder_sd, token=token, text_encoder=pipe.text_encoder, tokenizer=pipe.tokenizer) + else: + pipeline.load_textual_inversion({token: embed}) + logger.debug(f"Loaded embedding '{token}'") + loaded.append(token) + else: + logger.debug(f"Skipping embedding '{token}' (already loaded)") + skipped.append(token) + except Exception: + logger.error(f"Failed to load TI embedding: {token}", exc_info=True) + failed.append(token) + + # Print a summary of what we loaded + logger.info(f"Loaded {len(loaded)} embeddings, {len(skipped)} existing, {len(failed)} failed") + logger.info(f"Available embeddings: {', '.join(loaded + skipped)}") + if len(failed) > 0: + # only print failed if there were failures + logger.warn(f"Failed to load embeddings: {', '.join(failed)}") diff --git a/src/animatediff/repo/.gitignore b/src/animatediff/repo/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..d6b7ef32c8478a48c3994dcadc86837f4371184d --- /dev/null +++ b/src/animatediff/repo/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore diff --git a/src/animatediff/rife/__init__.py b/src/animatediff/rife/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ae2d868cc0ef3284e7f4464bd720dd03c065ddee --- /dev/null +++ b/src/animatediff/rife/__init__.py @@ -0,0 +1,5 @@ +from .rife import app + +__all__ = [ + "app", +] diff --git a/src/animatediff/rife/ffmpeg.py b/src/animatediff/rife/ffmpeg.py new file mode 100644 index 0000000000000000000000000000000000000000..d3b825db1c45877daa21b84bc9f23aee4ffef4fe --- /dev/null +++ b/src/animatediff/rife/ffmpeg.py @@ -0,0 +1,231 @@ +from enum import Enum +from pathlib import Path +from re import split +from typing import Annotated, Optional, Union + +import ffmpeg +from ffmpeg.nodes import FilterNode, InputNode +from torch import Value + + +class VideoCodec(str, Enum): + gif = "gif" + vp9 = "vp9" + webm = "webm" + webp = "webp" + h264 = "h264" + hevc = "hevc" + + +def codec_extn(codec: VideoCodec): + match codec: + case VideoCodec.gif: + return "gif" + case VideoCodec.vp9: + return "webm" + case VideoCodec.webm: + return "webm" + case VideoCodec.webp: + return "webp" + case VideoCodec.h264: + return "mp4" + case VideoCodec.hevc: + return "mp4" + case _: + raise ValueError(f"Unknown codec {codec}") + + +def clamp_gif_fps(fps: int): + """Clamp FPS to a value that is supported by GIFs. + + GIF frame duration is measured in 1/100ths of a second, so we need to clamp the + FPS to a value that 100 is a factor of. + """ + # the sky is not the limit, sadly... + if fps > 100: + return 100 + + # if 100/fps is an integer, we're good + if 100 % fps == 0: + return fps + + # but of course, it was never going to be that easy. + match fps: + case x if x > 50: + # 50 is the highest FPS that 100 is a factor of. + # people will ask for 60. they will get 50, and they will like it. + return 50 + case x if x >= 30: + return 33 + case x if x >= 24: + return 25 + case x if x >= 20: + return 20 + case x if x >= 15: + # ffmpeg will pad a few frames to make this work + return 16 + case x if x >= 12: + return 12 + case x if x >= 10: + # idk why anyone would request 11fps, but they're getting 10 + return 10 + case x if x >= 6: + # also invalid but ffmpeg will pad it + return 6 + case 4: + return 4 # FINE, I GUESS + case _: + return 1 # I don't know why you would want this, but here you go + + +class FfmpegEncoder: + def __init__( + self, + frames_dir: Path, + out_file: Path, + codec: VideoCodec, + in_fps: int = 60, + out_fps: int = 60, + lossless: bool = False, + param={}, + ): + self.frames_dir = frames_dir + self.out_file = out_file + self.codec = codec + self.in_fps = in_fps + self.out_fps = out_fps + self.lossless = lossless + self.param = param + + self.input: Optional[InputNode] = None + + def encode(self) -> tuple: + self.input: InputNode = ffmpeg.input( + str(self.frames_dir.resolve().joinpath("%08d.png")), framerate=self.in_fps + ).filter("fps", fps=self.in_fps) + match self.codec: + case VideoCodec.gif: + return self._encode_gif() + case VideoCodec.webm: + return self._encode_webm() + case VideoCodec.webp: + return self._encode_webp() + case VideoCodec.h264: + return self._encode_h264() + case VideoCodec.hevc: + return self._encode_hevc() + case _: + raise ValueError(f"Unknown codec {self.codec}") + + @property + def _out_file(self) -> Path: + return str(self.out_file.resolve()) + + @staticmethod + def _interpolate(stream, out_fps: int) -> FilterNode: + return stream.filter( + "minterpolate", fps=out_fps, mi_mode="mci", mc_mode="aobmc", me_mode="bidir", vsbmc=1 + ) + + def _encode_gif(self) -> tuple: + stream: FilterNode = self.input + + # Output FPS must be divisible by 100 for GIFs, so we clamp it + out_fps = clamp_gif_fps(self.out_fps) + if self.in_fps != out_fps: + stream = self._interpolate(stream, out_fps) + + # split into two streams for palettegen and paletteuse + split_stream = stream.split() + + # generate the palette, then use it to encode the GIF + palette = split_stream[0].filter("palettegen") + stream = ffmpeg.filter([split_stream[1], palette], "paletteuse").output( + self._out_file, vcodec="gif", loop=0 + ) + return stream.run() + + def _encode_webm(self) -> tuple: + stream: FilterNode = self.input + if self.in_fps != self.out_fps: + stream = self._interpolate(stream, self.out_fps) + param = { + "pix_fmt":"yuv420p", + "vcodec":"libvpx-vp9", + "video_bitrate":0, + "crf":24, + } + param.update(**self.param) + stream = stream.output( + self._out_file, **param + ) + return stream.run() + + def _encode_webp(self) -> tuple: + stream: FilterNode = self.input + if self.in_fps != self.out_fps: + stream = self._interpolate(stream, self.out_fps) + + if self.lossless: + param = { + "pix_fmt":"bgra", + "vcodec":"libwebp_anim", + "lossless":1, + "compression_level":5, + "qscale":75, + "loop":0, + } + param.update(**self.param) + stream = stream.output( + self._out_file, + **param + ) + else: + param = { + "pix_fmt":"yuv420p", + "vcodec":"libwebp_anim", + "lossless":0, + "compression_level":5, + "qscale":90, + "loop":0, + } + param.update(**self.param) + stream = stream.output( + self._out_file, + **param + ) + return stream.run() + + def _encode_h264(self) -> tuple: + stream: FilterNode = self.input + if self.in_fps != self.out_fps: + stream = self._interpolate(stream, self.out_fps) + + param = { + "pix_fmt":"yuv420p", + "vcodec":"libx264", + "crf":21, + "tune":"animation", + } + param.update(**self.param) + + stream = stream.output( + self._out_file, **param + ) + return stream.run() + + def _encode_hevc(self) -> tuple: + stream: FilterNode = self.input + if self.in_fps != self.out_fps: + stream = self._interpolate(stream, self.out_fps) + + param = { + "pix_fmt":"yuv420p", + "vcodec":"libx264", + "crf":21, + "tune":"animation", + } + param.update(**self.param) + + stream = stream.output(self._out_file, **param) + return stream.run() diff --git a/src/animatediff/rife/ncnn.py b/src/animatediff/rife/ncnn.py new file mode 100644 index 0000000000000000000000000000000000000000..8f544a649aa15f4ae54ac113625dc25592d1b5d3 --- /dev/null +++ b/src/animatediff/rife/ncnn.py @@ -0,0 +1,84 @@ +import logging +from pathlib import Path +from typing import Optional + +from pydantic import BaseModel, Field + +logger = logging.getLogger(__name__) + + +class RifeNCNNOptions(BaseModel): + model_path: Path = Field(..., description="Path to RIFE model directory") + input_path: Path = Field(..., description="Path to source frames directory") + output_path: Optional[Path] = Field(None, description="Path to output frames directory") + num_frame: Optional[int] = Field(None, description="Number of frames to generate (default N*2)") + time_step: float = Field(0.5, description="Time step for interpolation (default 0.5)", gt=0.0, le=1.0) + gpu_id: Optional[int | list[int]] = Field( + None, description="GPU ID(s) to use (default: auto, -1 for CPU)" + ) + load_threads: int = Field(1, description="Number of threads for frame loading", gt=0) + process_threads: int = Field(2, description="Number of threads used for frame processing", gt=0) + save_threads: int = Field(2, description="Number of threads for frame saving", gt=0) + spatial_tta: bool = Field(False, description="Enable spatial TTA mode") + temporal_tta: bool = Field(False, description="Enable temporal TTA mode") + uhd: bool = Field(False, description="Enable UHD mode") + verbose: bool = Field(False, description="Enable verbose logging") + + def get_args(self, frame_multiplier: int = 7) -> list[str]: + """Generate arguments to pass to rife-ncnn-vulkan. + + Frame multiplier is used to calculate the number of frames to generate, if num_frame is not set. + """ + if self.output_path is None: + self.output_path = self.input_path.joinpath("out") + + # calc num frames + if self.num_frame is None: + num_src_frames = len([x for x in self.input_path.glob("*.png") if x.is_file()]) + logger.info(f"Found {num_src_frames} source frames, using multiplier {frame_multiplier}") + num_frame = num_src_frames * frame_multiplier + logger.info(f"We will generate {num_frame} frames") + else: + num_frame = self.num_frame + + # GPU ID and process threads are comma-separated lists, so we need to convert them to strings + if self.gpu_id is None: + gpu_id = "auto" + process_threads = self.process_threads + elif isinstance(self.gpu_id, list): + gpu_id = ",".join([str(x) for x in self.gpu_id]) + process_threads = ",".join([str(self.process_threads) for _ in self.gpu_id]) + else: + gpu_id = str(self.gpu_id) + process_threads = str(self.process_threads) + + # Build args list + args_list = [ + "-i", + f"{self.input_path.resolve()}/", + "-o", + f"{self.output_path.resolve()}/", + "-m", + f"{self.model_path.resolve()}/", + "-n", + num_frame, + "-s", + f"{self.time_step:.5f}", + "-g", + gpu_id, + "-j", + f"{self.load_threads}:{process_threads}:{self.save_threads}", + ] + + # Add flags if set + if self.spatial_tta: + args_list.append("-x") + if self.temporal_tta: + args_list.append("-z") + if self.uhd: + args_list.append("-u") + if self.verbose: + args_list.append("-v") + + # Convert all args to strings and return + return [str(x) for x in args_list] diff --git a/src/animatediff/rife/rife.py b/src/animatediff/rife/rife.py new file mode 100644 index 0000000000000000000000000000000000000000..fbb4cfdaf6531a4a8024a0cc8960b9100c77b47e --- /dev/null +++ b/src/animatediff/rife/rife.py @@ -0,0 +1,195 @@ +import logging +import subprocess +from math import ceil +from pathlib import Path +from typing import Annotated, Optional + +import typer + +from animatediff import get_dir + +from .ffmpeg import FfmpegEncoder, VideoCodec, codec_extn +from .ncnn import RifeNCNNOptions + +rife_dir = get_dir("data/rife") +rife_ncnn_vulkan = rife_dir.joinpath("rife-ncnn-vulkan") + +logger = logging.getLogger(__name__) + +app: typer.Typer = typer.Typer( + name="rife", + context_settings=dict(help_option_names=["-h", "--help"]), + rich_markup_mode="rich", + pretty_exceptions_show_locals=False, + help="RIFE motion flow interpolation (MORE FPS!)", +) + +def rife_interpolate( + input_frames_dir:str, + output_frames_dir:str, + frame_multiplier:int = 2, + rife_model:str = "rife-v4.6", + spatial_tta:bool = False, + temporal_tta:bool = False, + uhd:bool = False, +): + + rife_model_dir = rife_dir.joinpath(rife_model) + if not rife_model_dir.joinpath("flownet.bin").exists(): + raise FileNotFoundError(f"RIFE model dir {rife_model_dir} does not have a model in it!") + + + rife_opts = RifeNCNNOptions( + model_path=rife_model_dir, + input_path=input_frames_dir, + output_path=output_frames_dir, + time_step=1 / frame_multiplier, + spatial_tta=spatial_tta, + temporal_tta=temporal_tta, + uhd=uhd, + ) + rife_args = rife_opts.get_args(frame_multiplier=frame_multiplier) + + # actually run RIFE + logger.info("Running RIFE, this may take a little while...") + with subprocess.Popen( + [rife_ncnn_vulkan, *rife_args], stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) as proc: + errs = [] + for line in proc.stderr: + line = line.decode("utf-8").strip() + if line: + logger.debug(line) + stdout, _ = proc.communicate() + if proc.returncode != 0: + raise RuntimeError(f"RIFE failed with code {proc.returncode}:\n" + "\n".join(errs)) + + import glob + import os + org_images = sorted(glob.glob( os.path.join(output_frames_dir, "[0-9]*.png"), recursive=False)) + for o in org_images: + p = Path(o) + new_no = int(p.stem) - 1 + new_p = p.with_stem(f"{new_no:08d}") + p.rename(new_p) + + + +@app.command(no_args_is_help=True) +def interpolate( + rife_model: Annotated[ + str, + typer.Option("--rife-model", "-m", help="RIFE model to use (subdirectory of data/rife/)"), + ] = "rife-v4.6", + in_fps: Annotated[ + int, + typer.Option("--in-fps", "-I", help="Input frame FPS (8 for AnimateDiff)", show_default=True), + ] = 8, + frame_multiplier: Annotated[ + int, + typer.Option( + "--frame-multiplier", "-M", help="Multiply total frame count by this", show_default=True + ), + ] = 8, + out_fps: Annotated[ + int, + typer.Option("--out-fps", "-F", help="Target FPS", show_default=True), + ] = 50, + codec: Annotated[ + VideoCodec, + typer.Option("--codec", "-c", help="Output video codec", show_default=True), + ] = VideoCodec.webm, + lossless: Annotated[ + bool, + typer.Option("--lossless", "-L", is_flag=True, help="Use lossless encoding (WebP only)"), + ] = False, + spatial_tta: Annotated[ + bool, + typer.Option("--spatial-tta", "-x", is_flag=True, help="Enable RIFE Spatial TTA mode"), + ] = False, + temporal_tta: Annotated[ + bool, + typer.Option("--temporal-tta", "-z", is_flag=True, help="Enable RIFE Temporal TTA mode"), + ] = False, + uhd: Annotated[ + bool, + typer.Option("--uhd", "-u", is_flag=True, help="Enable RIFE UHD mode"), + ] = False, + frames_dir: Annotated[ + Path, + typer.Argument(path_type=Path, file_okay=False, exists=True, help="Path to source frames directory"), + ] = ..., + out_file: Annotated[ + Optional[Path], + typer.Argument( + dir_okay=False, + help="Path to output file (default: frames_dir/rife-output.)", + show_default=False, + ), + ] = None, +): + rife_model_dir = rife_dir.joinpath(rife_model) + if not rife_model_dir.joinpath("flownet.bin").exists(): + raise FileNotFoundError(f"RIFE model dir {rife_model_dir} does not have a model in it!") + + if not frames_dir.exists(): + raise FileNotFoundError(f"Frames directory {frames_dir} does not exist!") + + # where to put the RIFE interpolated frames (default: frames_dir/../-rife) + # TODO: make this configurable? + rife_frames_dir = frames_dir.parent.joinpath(f"{frames_dir.name}-rife") + rife_frames_dir.mkdir(exist_ok=True, parents=True) + + # build output file path + file_extn = codec_extn(codec) + if out_file is None: + out_file = frames_dir.parent.joinpath(f"{frames_dir.name}-rife.{file_extn}") + elif out_file.suffix != file_extn: + logger.warn("Output file extension does not match codec, changing extension") + out_file = out_file.with_suffix(file_extn) + + # build RIFE command and get args + # This doesn't need to be a Pydantic model tbh. It could just be a function/class. + rife_opts = RifeNCNNOptions( + model_path=rife_model_dir, + input_path=frames_dir, + output_path=rife_frames_dir, + time_step=1 / in_fps, # TODO: make this configurable? + spatial_tta=spatial_tta, + temporal_tta=temporal_tta, + uhd=uhd, + ) + rife_args = rife_opts.get_args(frame_multiplier=frame_multiplier) + + # actually run RIFE + logger.info("Running RIFE, this may take a little while...") + with subprocess.Popen( + [rife_ncnn_vulkan, *rife_args], stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) as proc: + errs = [] + for line in proc.stderr: + line = line.decode("utf-8").strip() + if line: + logger.debug(line) + stdout, _ = proc.communicate() + if proc.returncode != 0: + raise RuntimeError(f"RIFE failed with code {proc.returncode}:\n" + "\n".join(errs)) + + # now it is ffmpeg time + logger.info("Creating ffmpeg encoder...") + encoder = FfmpegEncoder( + frames_dir=rife_frames_dir, + out_file=out_file, + codec=codec, + in_fps=min(out_fps, in_fps * frame_multiplier), + out_fps=out_fps, + lossless=lossless, + ) + logger.info("Encoding interpolated frames with ffmpeg...") + result = encoder.encode() + + logger.debug(f"ffmpeg result: {result}") + + logger.info(f"Find the RIFE frames at: {rife_frames_dir.absolute().relative_to(Path.cwd())}") + logger.info(f"Find the output file at: {out_file.absolute().relative_to(Path.cwd())}") + logger.info("Done!") diff --git a/src/animatediff/schedulers.py b/src/animatediff/schedulers.py new file mode 100644 index 0000000000000000000000000000000000000000..b2935a52b09893a4027890a06d8de89502c26342 --- /dev/null +++ b/src/animatediff/schedulers.py @@ -0,0 +1,91 @@ +import logging +from enum import Enum + +from diffusers.schedulers import (DDIMScheduler, DPMSolverMultistepScheduler, + DPMSolverSinglestepScheduler, + EulerAncestralDiscreteScheduler, + EulerDiscreteScheduler, + HeunDiscreteScheduler, + KDPM2AncestralDiscreteScheduler, + KDPM2DiscreteScheduler, LCMScheduler, + LMSDiscreteScheduler, PNDMScheduler, + UniPCMultistepScheduler) + +logger = logging.getLogger(__name__) + + +# See https://github.com/huggingface/diffusers/issues/4167 for more details on sched mapping from A1111 +class DiffusionScheduler(str, Enum): + lcm = "lcm" # LCM + ddim = "ddim" # DDIM + pndm = "pndm" # PNDM + heun = "heun" # Heun + unipc = "unipc" # UniPC + euler = "euler" # Euler + euler_a = "euler_a" # Euler a + + lms = "lms" # LMS + k_lms = "k_lms" # LMS Karras + + dpm_2 = "dpm_2" # DPM2 + k_dpm_2 = "k_dpm_2" # DPM2 Karras + + dpm_2_a = "dpm_2_a" # DPM2 a + k_dpm_2_a = "k_dpm_2_a" # DPM2 a Karras + + dpmpp_2m = "dpmpp_2m" # DPM++ 2M + k_dpmpp_2m = "k_dpmpp_2m" # DPM++ 2M Karras + + dpmpp_sde = "dpmpp_sde" # DPM++ SDE + k_dpmpp_sde = "k_dpmpp_sde" # DPM++ SDE Karras + + dpmpp_2m_sde = "dpmpp_2m_sde" # DPM++ 2M SDE + k_dpmpp_2m_sde = "k_dpmpp_2m_sde" # DPM++ 2M SDE Karras + + +def get_scheduler(name: str, config: dict = {}): + is_karras = name.startswith("k_") + if is_karras: + # strip the k_ prefix and add the karras sigma flag to config + name = name.lstrip("k_") + config["use_karras_sigmas"] = True + + match name: + case DiffusionScheduler.lcm: + sched_class = LCMScheduler + case DiffusionScheduler.ddim: + sched_class = DDIMScheduler + case DiffusionScheduler.pndm: + sched_class = PNDMScheduler + case DiffusionScheduler.heun: + sched_class = HeunDiscreteScheduler + case DiffusionScheduler.unipc: + sched_class = UniPCMultistepScheduler + case DiffusionScheduler.euler: + sched_class = EulerDiscreteScheduler + case DiffusionScheduler.euler_a: + sched_class = EulerAncestralDiscreteScheduler + case DiffusionScheduler.lms: + sched_class = LMSDiscreteScheduler + case DiffusionScheduler.dpm_2: + # Equivalent to DPM2 in K-Diffusion + sched_class = KDPM2DiscreteScheduler + case DiffusionScheduler.dpm_2_a: + # Equivalent to `DPM2 a`` in K-Diffusion + sched_class = KDPM2AncestralDiscreteScheduler + case DiffusionScheduler.dpmpp_2m: + # Equivalent to `DPM++ 2M` in K-Diffusion + sched_class = DPMSolverMultistepScheduler + config["algorithm_type"] = "dpmsolver++" + config["solver_order"] = 2 + case DiffusionScheduler.dpmpp_sde: + # Equivalent to `DPM++ SDE` in K-Diffusion + sched_class = DPMSolverSinglestepScheduler + case DiffusionScheduler.dpmpp_2m_sde: + # Equivalent to `DPM++ 2M SDE` in K-Diffusion + sched_class = DPMSolverMultistepScheduler + config["algorithm_type"] = "sde-dpmsolver++" + case _: + raise ValueError(f"Invalid scheduler '{'k_' if is_karras else ''}{name}'") + + return sched_class.from_config(config) diff --git a/src/animatediff/sdxl_models/motion_module.py b/src/animatediff/sdxl_models/motion_module.py new file mode 100644 index 0000000000000000000000000000000000000000..6745f6104f15621006457987563d6f50b27a0eee --- /dev/null +++ b/src/animatediff/sdxl_models/motion_module.py @@ -0,0 +1,431 @@ +import math +import pdb +import random +from dataclasses import dataclass +from typing import Callable, List, Optional, Tuple, Union + +import numpy as np +import torch +import torch.nn.functional as F +from diffusers.configuration_utils import ConfigMixin, register_to_config +from diffusers.models.attention import FeedForward +from diffusers.models.attention_processor import Attention +from diffusers.models.modeling_utils import ModelMixin +from diffusers.utils import BaseOutput, logging +from diffusers.utils.import_utils import is_xformers_available +from einops import rearrange, repeat +from torch import nn + +from animatediff.utils.util import zero_rank_print + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + + +def zero_module(module): + # Zero out the parameters of a module and return it. + for p in module.parameters(): + p.detach().zero_() + return module + + +@dataclass +class TemporalTransformer3DModelOutput(BaseOutput): + sample: torch.FloatTensor + + +def get_motion_module( + in_channels, + motion_module_type: str, + motion_module_kwargs: dict +): + if motion_module_type == "Vanilla": + return VanillaTemporalModule(in_channels=in_channels, **motion_module_kwargs) + elif motion_module_type == "Conv": + return ConvTemporalModule(in_channels=in_channels, **motion_module_kwargs) + else: + raise ValueError + +class VanillaTemporalModule(nn.Module): + def __init__( + self, + in_channels, + num_attention_heads = 8, + num_transformer_block = 2, + attention_block_types =( "Temporal_Self", ), + spatial_position_encoding = False, + temporal_position_encoding = True, + temporal_position_encoding_max_len = 32, + temporal_attention_dim_div = 1, + zero_initialize = True, + + causal_temporal_attention = False, + causal_temporal_attention_mask_type = "", + ): + super().__init__() + + self.temporal_transformer = TemporalTransformer3DModel( + in_channels=in_channels, + num_attention_heads=num_attention_heads, + attention_head_dim=in_channels // num_attention_heads // temporal_attention_dim_div, + num_layers=num_transformer_block, + attention_block_types=attention_block_types, + temporal_position_encoding=temporal_position_encoding, + temporal_position_encoding_max_len=temporal_position_encoding_max_len, + spatial_position_encoding = spatial_position_encoding, + causal_temporal_attention=causal_temporal_attention, + causal_temporal_attention_mask_type=causal_temporal_attention_mask_type, + ) + + if zero_initialize: + self.temporal_transformer.proj_out = zero_module(self.temporal_transformer.proj_out) + + def forward(self, input_tensor, temb=None, encoder_hidden_states=None, attention_mask=None): + + hidden_states = input_tensor + hidden_states = self.temporal_transformer(hidden_states, encoder_hidden_states, attention_mask) + + output = hidden_states + + return output + + +class TemporalTransformer3DModel(nn.Module): + def __init__( + self, + in_channels, + num_attention_heads, + attention_head_dim, + num_layers, + attention_block_types = ( "Temporal_Self", "Temporal_Self", ), + dropout = 0.0, + norm_num_groups = 32, + cross_attention_dim = 768, + activation_fn = "geglu", + attention_bias = False, + upcast_attention = False, + temporal_position_encoding = False, + temporal_position_encoding_max_len = 32, + spatial_position_encoding = False, + + causal_temporal_attention = None, + causal_temporal_attention_mask_type = "", + ): + super().__init__() + assert causal_temporal_attention is not None + self.causal_temporal_attention = causal_temporal_attention + + assert (not causal_temporal_attention) or (causal_temporal_attention_mask_type != "") + self.causal_temporal_attention_mask_type = causal_temporal_attention_mask_type + self.causal_temporal_attention_mask = None + self.spatial_position_encoding = spatial_position_encoding + inner_dim = num_attention_heads * attention_head_dim + + self.norm = torch.nn.GroupNorm(num_groups=norm_num_groups, num_channels=in_channels, eps=1e-6, affine=True) + self.proj_in = nn.Linear(in_channels, inner_dim) + if spatial_position_encoding: + self.pos_encoder_2d = PositionalEncoding2D(inner_dim) + + + self.transformer_blocks = nn.ModuleList( + [ + TemporalTransformerBlock( + dim=inner_dim, + num_attention_heads=num_attention_heads, + attention_head_dim=attention_head_dim, + attention_block_types=attention_block_types, + dropout=dropout, + norm_num_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + activation_fn=activation_fn, + attention_bias=attention_bias, + upcast_attention=upcast_attention, + temporal_position_encoding=temporal_position_encoding, + temporal_position_encoding_max_len=temporal_position_encoding_max_len, + ) + for d in range(num_layers) + ] + ) + self.proj_out = nn.Linear(inner_dim, in_channels) + + def get_causal_temporal_attention_mask(self, hidden_states): + batch_size, sequence_length, dim = hidden_states.shape + + if self.causal_temporal_attention_mask is None or self.causal_temporal_attention_mask.shape != (batch_size, sequence_length, sequence_length): + zero_rank_print(f"build attn mask of type {self.causal_temporal_attention_mask_type}") + if self.causal_temporal_attention_mask_type == "causal": + # 1. vanilla causal mask + mask = torch.tril(torch.ones(sequence_length, sequence_length)) + + elif self.causal_temporal_attention_mask_type == "2-seq": + # 2. 2-seq + mask = torch.zeros(sequence_length, sequence_length) + mask[:sequence_length // 2, :sequence_length // 2] = 1 + mask[-sequence_length // 2:, -sequence_length // 2:] = 1 + + elif self.causal_temporal_attention_mask_type == "0-prev": + # attn to the previous frame + indices = torch.arange(sequence_length) + indices_prev = indices - 1 + indices_prev[0] = 0 + mask = torch.zeros(sequence_length, sequence_length) + mask[:, 0] = 1. + mask[indices, indices_prev] = 1. + + elif self.causal_temporal_attention_mask_type == "0": + # only attn to first frame + mask = torch.zeros(sequence_length, sequence_length) + mask[:,0] = 1 + + elif self.causal_temporal_attention_mask_type == "wo-self": + indices = torch.arange(sequence_length) + mask = torch.ones(sequence_length, sequence_length) + mask[indices, indices] = 0 + + elif self.causal_temporal_attention_mask_type == "circle": + indices = torch.arange(sequence_length) + indices_prev = indices - 1 + indices_prev[0] = 0 + + mask = torch.eye(sequence_length) + mask[indices, indices_prev] = 1 + mask[0,-1] = 1 + + else: raise ValueError + + # for sanity check + if dim == 320: zero_rank_print(mask) + + # generate attention mask fron binary values + mask = mask.masked_fill(mask == 0, float('-inf')).masked_fill(mask == 1, float(0.0)) + mask = mask.unsqueeze(0) + mask = mask.repeat(batch_size, 1, 1) + + self.causal_temporal_attention_mask = mask.to(hidden_states.device) + + return self.causal_temporal_attention_mask + + def forward(self, hidden_states, encoder_hidden_states=None, attention_mask=None): + + residual = hidden_states + assert hidden_states.dim() == 5, f"Expected hidden_states to have ndim=5, but got ndim={hidden_states.dim()}." + height, width = hidden_states.shape[-2:] + + hidden_states = self.norm(hidden_states) + + hidden_states = rearrange(hidden_states, "b c f h w -> (b h w) f c") + hidden_states = self.proj_in(hidden_states) + + if self.spatial_position_encoding: + + video_length = hidden_states.shape[1] + hidden_states = rearrange(hidden_states, "(b h w) f c -> (b f) h w c", h=height, w=width) + pos_encoding = self.pos_encoder_2d(hidden_states) + pos_encoding = rearrange(pos_encoding, "(b f) h w c -> (b h w) f c", f = video_length) + hidden_states = rearrange(hidden_states, "(b f) h w c -> (b h w) f c", f=video_length) + + attention_mask = self.get_causal_temporal_attention_mask(hidden_states) if self.causal_temporal_attention else attention_mask + + # Transformer Blocks + for block in self.transformer_blocks: + if not self.spatial_position_encoding : + pos_encoding = None + + hidden_states = block(hidden_states, pos_encoding=pos_encoding, encoder_hidden_states=encoder_hidden_states, attention_mask=attention_mask) + + hidden_states = self.proj_out(hidden_states) + + hidden_states = rearrange(hidden_states, "(b h w) f c -> b c f h w", h=height, w=width) + + output = hidden_states + residual + # output = hidden_states + + return output + + +class TemporalTransformerBlock(nn.Module): + def __init__( + self, + dim, + num_attention_heads, + attention_head_dim, + attention_block_types = ( "Temporal_Self", "Temporal_Self", ), + dropout = 0.0, + norm_num_groups = 32, + cross_attention_dim = 768, + activation_fn = "geglu", + attention_bias = False, + upcast_attention = False, + temporal_position_encoding = False, + temporal_position_encoding_max_len = 32, + ): + super().__init__() + + attention_blocks = [] + norms = [] + + for block_name in attention_block_types: + attention_blocks.append( + TemporalSelfAttention( + attention_mode=block_name.split("_")[0], + cross_attention_dim=cross_attention_dim if block_name.endswith("_Cross") else None, + + query_dim=dim, + heads=num_attention_heads, + dim_head=attention_head_dim, + dropout=dropout, + bias=attention_bias, + upcast_attention=upcast_attention, + + temporal_position_encoding=temporal_position_encoding, + temporal_position_encoding_max_len=temporal_position_encoding_max_len, + ) + ) + norms.append(nn.LayerNorm(dim)) + + self.attention_blocks = nn.ModuleList(attention_blocks) + self.norms = nn.ModuleList(norms) + + self.ff = FeedForward(dim, dropout=dropout, activation_fn=activation_fn) + self.ff_norm = nn.LayerNorm(dim) + + + def forward(self, hidden_states, pos_encoding=None, encoder_hidden_states=None, attention_mask=None): + for attention_block, norm in zip(self.attention_blocks, self.norms): + if pos_encoding is not None: + hidden_states += pos_encoding + norm_hidden_states = norm(hidden_states) + hidden_states = attention_block( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + ) + hidden_states + + hidden_states = self.ff(self.ff_norm(hidden_states)) + hidden_states + + output = hidden_states + return output + + +def get_emb(sin_inp): + """ + Gets a base embedding for one dimension with sin and cos intertwined + """ + emb = torch.stack((sin_inp.sin(), sin_inp.cos()), dim=-1) + return torch.flatten(emb, -2, -1) + +class PositionalEncoding2D(nn.Module): + def __init__(self, channels): + """ + :param channels: The last dimension of the tensor you want to apply pos emb to. + """ + super(PositionalEncoding2D, self).__init__() + self.org_channels = channels + channels = int(np.ceil(channels / 4) * 2) + self.channels = channels + inv_freq = 1.0 / (10000 ** (torch.arange(0, channels, 2).float() / channels)) + self.register_buffer("inv_freq", inv_freq) + self.register_buffer("cached_penc", None) + + def forward(self, tensor): + """ + :param tensor: A 4d tensor of size (batch_size, x, y, ch) + :return: Positional Encoding Matrix of size (batch_size, x, y, ch) + """ + if len(tensor.shape) != 4: + raise RuntimeError("The input tensor has to be 4d!") + + if self.cached_penc is not None and self.cached_penc.shape == tensor.shape: + return self.cached_penc + + self.cached_penc = None + batch_size, x, y, orig_ch = tensor.shape + pos_x = torch.arange(x, device=tensor.device).type(self.inv_freq.type()) + pos_y = torch.arange(y, device=tensor.device).type(self.inv_freq.type()) + sin_inp_x = torch.einsum("i,j->ij", pos_x, self.inv_freq) + sin_inp_y = torch.einsum("i,j->ij", pos_y, self.inv_freq) + emb_x = get_emb(sin_inp_x).unsqueeze(1) + emb_y = get_emb(sin_inp_y) + emb = torch.zeros((x, y, self.channels * 2), device=tensor.device).type( + tensor.type() + ) + emb[:, :, : self.channels] = emb_x + emb[:, :, self.channels : 2 * self.channels] = emb_y + + self.cached_penc = emb[None, :, :, :orig_ch].repeat(tensor.shape[0], 1, 1, 1) + return self.cached_penc + +class PositionalEncoding(nn.Module): + def __init__( + self, + d_model, + dropout = 0., + max_len = 32, + ): + super().__init__() + self.dropout = nn.Dropout(p=dropout) + position = torch.arange(max_len).unsqueeze(1) + div_term = torch.exp(torch.arange(0, d_model, 2) * (-math.log(10000.0) / d_model)) + pe = torch.zeros(1, max_len, d_model) + pe[0, :, 0::2] = torch.sin(position * div_term) + pe[0, :, 1::2] = torch.cos(position * div_term) + self.register_buffer('pe', pe) + + def forward(self, x): + # if x.size(1) < 16: + # start_idx = random.randint(0, 12) + # else: + # start_idx = 0 + + x = x + self.pe[:, :x.size(1)] + return self.dropout(x) + + +class TemporalSelfAttention(Attention): + def __init__( + self, + attention_mode = None, + temporal_position_encoding = False, + temporal_position_encoding_max_len = 32, + *args, **kwargs + ): + super().__init__(*args, **kwargs) + assert attention_mode == "Temporal" + + self.pos_encoder = PositionalEncoding( + kwargs["query_dim"], + max_len=temporal_position_encoding_max_len + ) if temporal_position_encoding else None + + def set_use_memory_efficient_attention_xformers( + self, use_memory_efficient_attention_xformers: bool, attention_op: Optional[Callable] = None + ): + # disable motion module efficient xformers to avoid bad results, don't know why + # TODO: fix this bug + pass + + def forward(self, hidden_states, encoder_hidden_states=None, attention_mask=None, **cross_attention_kwargs): + # The `Attention` class can call different attention processors / attention functions + # here we simply pass along all tensors to the selected processor class + # For standard processors that are defined here, `**cross_attention_kwargs` is empty + + # add position encoding + hidden_states = self.pos_encoder(hidden_states) + + if hasattr(self.processor, "__call__"): + return self.processor.__call__( + self, + hidden_states, + encoder_hidden_states=None, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) + + else: + return self.processor( + self, + hidden_states, + encoder_hidden_states=None, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) diff --git a/src/animatediff/sdxl_models/unet.py b/src/animatediff/sdxl_models/unet.py new file mode 100644 index 0000000000000000000000000000000000000000..19e08fc4fd107b754c4971c54306c6129d939452 --- /dev/null +++ b/src/animatediff/sdxl_models/unet.py @@ -0,0 +1,1260 @@ +# Copyright 2023 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import json +import os +from dataclasses import dataclass +from os import PathLike +from pathlib import Path +from typing import Any, Dict, List, Optional, Tuple, Union + +import torch +import torch.nn as nn +import torch.utils.checkpoint +from diffusers.configuration_utils import ConfigMixin, register_to_config +from diffusers.loaders import AttnProcsLayers, UNet2DConditionLoadersMixin +from diffusers.models.activations import get_activation +from diffusers.models.attention_processor import (AttentionProcessor, + AttnProcessor, + LoRAAttnProcessor) +from diffusers.models.embeddings import (GaussianFourierProjection, + ImageHintTimeEmbedding, + ImageProjection, ImageTimeEmbedding, + PositionNet, TextImageProjection, + TextImageTimeEmbedding, + TextTimeEmbedding, TimestepEmbedding, + Timesteps) +from diffusers.models.modeling_utils import ModelMixin +from diffusers.utils import BaseOutput, logging +from einops import rearrange, repeat + +from animatediff.utils.util import zero_rank_print + +from .unet_blocks import UNetMidBlock3DCrossAttn, get_down_block, get_up_block + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +@dataclass +class UNet3DConditionOutput(BaseOutput): + """ + The output of [`UNet3DConditionModel`]. + + Args: + sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)`): + The hidden states output conditioned on `encoder_hidden_states` input. Output of last layer of model. + """ + + sample: torch.FloatTensor = None + + +class UNet3DConditionModel(ModelMixin, ConfigMixin, UNet2DConditionLoadersMixin): + r""" + A conditional 3D UNet model that takes a noisy sample, conditional state, and a timestep and returns a sample + shaped output. + + This model inherits from [`ModelMixin`]. Check the superclass documentation for it's generic methods implemented + for all models (such as downloading or saving). + + Parameters: + sample_size (`int` or `Tuple[int, int]`, *optional*, defaults to `None`): + Height and width of input/output sample. + in_channels (`int`, *optional*, defaults to 4): Number of channels in the input sample. + out_channels (`int`, *optional*, defaults to 4): Number of channels in the output. + center_input_sample (`bool`, *optional*, defaults to `False`): Whether to center the input sample. + flip_sin_to_cos (`bool`, *optional*, defaults to `False`): + Whether to flip the sin to cos in the time embedding. + freq_shift (`int`, *optional*, defaults to 0): The frequency shift to apply to the time embedding. + down_block_types (`Tuple[str]`, *optional*, defaults to `("CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "DownBlock2D")`): + The tuple of downsample blocks to use. + mid_block_type (`str`, *optional*, defaults to `"UNetMidBlock2DCrossAttn"`): + Block type for middle of UNet, it can be either `UNetMidBlock2DCrossAttn` or + `UNetMidBlock2DSimpleCrossAttn`. If `None`, the mid block layer is skipped. + up_block_types (`Tuple[str]`, *optional*, defaults to `("UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D")`): + The tuple of upsample blocks to use. + only_cross_attention(`bool` or `Tuple[bool]`, *optional*, default to `False`): + Whether to include self-attention in the basic transformer blocks, see + [`~models.attention.BasicTransformerBlock`]. + block_out_channels (`Tuple[int]`, *optional*, defaults to `(320, 640, 1280, 1280)`): + The tuple of output channels for each block. + layers_per_block (`int`, *optional*, defaults to 2): The number of layers per block. + downsample_padding (`int`, *optional*, defaults to 1): The padding to use for the downsampling convolution. + mid_block_scale_factor (`float`, *optional*, defaults to 1.0): The scale factor to use for the mid block. + act_fn (`str`, *optional*, defaults to `"silu"`): The activation function to use. + norm_num_groups (`int`, *optional*, defaults to 32): The number of groups to use for the normalization. + If `None`, normalization and activation layers is skipped in post-processing. + norm_eps (`float`, *optional*, defaults to 1e-5): The epsilon to use for the normalization. + cross_attention_dim (`int` or `Tuple[int]`, *optional*, defaults to 1280): + The dimension of the cross attention features. + transformer_layers_per_block (`int` or `Tuple[int]`, *optional*, defaults to 1): + The number of transformer blocks of type [`~models.attention.BasicTransformerBlock`]. Only relevant for + [`~models.unet_2d_blocks.CrossAttnDownBlock2D`], [`~models.unet_2d_blocks.CrossAttnUpBlock2D`], + [`~models.unet_2d_blocks.UNetMidBlock2DCrossAttn`]. + encoder_hid_dim (`int`, *optional*, defaults to None): + If `encoder_hid_dim_type` is defined, `encoder_hidden_states` will be projected from `encoder_hid_dim` + dimension to `cross_attention_dim`. + encoder_hid_dim_type (`str`, *optional*, defaults to `None`): + If given, the `encoder_hidden_states` and potentially other embeddings are down-projected to text + embeddings of dimension `cross_attention` according to `encoder_hid_dim_type`. + attention_head_dim (`int`, *optional*, defaults to 8): The dimension of the attention heads. + num_attention_heads (`int`, *optional*): + The number of attention heads. If not defined, defaults to `attention_head_dim` + resnet_time_scale_shift (`str`, *optional*, defaults to `"default"`): Time scale shift config + for ResNet blocks (see [`~models.resnet.ResnetBlock2D`]). Choose from `default` or `scale_shift`. + class_embed_type (`str`, *optional*, defaults to `None`): + The type of class embedding to use which is ultimately summed with the time embeddings. Choose from `None`, + `"timestep"`, `"identity"`, `"projection"`, or `"simple_projection"`. + addition_embed_type (`str`, *optional*, defaults to `None`): + Configures an optional embedding which will be summed with the time embeddings. Choose from `None` or + "text". "text" will use the `TextTimeEmbedding` layer. + addition_time_embed_dim: (`int`, *optional*, defaults to `None`): + Dimension for the timestep embeddings. + num_class_embeds (`int`, *optional*, defaults to `None`): + Input dimension of the learnable embedding matrix to be projected to `time_embed_dim`, when performing + class conditioning with `class_embed_type` equal to `None`. + time_embedding_type (`str`, *optional*, defaults to `positional`): + The type of position embedding to use for timesteps. Choose from `positional` or `fourier`. + time_embedding_dim (`int`, *optional*, defaults to `None`): + An optional override for the dimension of the projected time embedding. + time_embedding_act_fn (`str`, *optional*, defaults to `None`): + Optional activation function to use only once on the time embeddings before they are passed to the rest of + the UNet. Choose from `silu`, `mish`, `gelu`, and `swish`. + timestep_post_act (`str`, *optional*, defaults to `None`): + The second activation function to use in timestep embedding. Choose from `silu`, `mish` and `gelu`. + time_cond_proj_dim (`int`, *optional*, defaults to `None`): + The dimension of `cond_proj` layer in the timestep embedding. + conv_in_kernel (`int`, *optional*, default to `3`): The kernel size of `conv_in` layer. + conv_out_kernel (`int`, *optional*, default to `3`): The kernel size of `conv_out` layer. + projection_class_embeddings_input_dim (`int`, *optional*): The dimension of the `class_labels` input when + `class_embed_type="projection"`. Required when `class_embed_type="projection"`. + class_embeddings_concat (`bool`, *optional*, defaults to `False`): Whether to concatenate the time + embeddings with the class embeddings. + mid_block_only_cross_attention (`bool`, *optional*, defaults to `None`): + Whether to use cross attention with the mid block when using the `UNetMidBlock2DSimpleCrossAttn`. If + `only_cross_attention` is given as a single boolean and `mid_block_only_cross_attention` is `None`, the + `only_cross_attention` value is used as the value for `mid_block_only_cross_attention`. Default to `False` + otherwise. + """ + + _supports_gradient_checkpointing = True + + @register_to_config + def __init__( + self, + sample_size: Optional[int] = None, + in_channels: int = 4, + out_channels: int = 4, + center_input_sample: bool = False, + flip_sin_to_cos: bool = True, + freq_shift: int = 0, + down_block_types: Tuple[str] = ( + "CrossAttnDownBlock3D", + "CrossAttnDownBlock3D", + "CrossAttnDownBlock3D", + "DownBlock3D", + ), + mid_block_type: Optional[str] = "UNetMidBlock3DCrossAttn", + up_block_types: Tuple[str] = ("UpBlock3D", "CrossAttnUpBlock3D", "CrossAttnUpBlock3D", "CrossAttnUpBlock3D"), + only_cross_attention: Union[bool, Tuple[bool]] = False, + block_out_channels: Tuple[int] = (320, 640, 1280, 1280), + layers_per_block: Union[int, Tuple[int]] = 2, + downsample_padding: int = 1, + mid_block_scale_factor: float = 1, + act_fn: str = "silu", + norm_num_groups: Optional[int] = 32, + norm_eps: float = 1e-5, + cross_attention_dim: Union[int, Tuple[int]] = 1280, + transformer_layers_per_block: Union[int, Tuple[int]] = 1, + encoder_hid_dim: Optional[int] = None, + encoder_hid_dim_type: Optional[str] = None, + attention_head_dim: Union[int, Tuple[int]] = 8, + num_attention_heads: Optional[Union[int, Tuple[int]]] = None, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + class_embed_type: Optional[str] = None, + addition_embed_type: Optional[str] = None, + addition_time_embed_dim: Optional[int] = None, + num_class_embeds: Optional[int] = None, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + resnet_skip_time_act: bool = False, + resnet_out_scale_factor: int = 1.0, + time_embedding_type: str = "positional", + time_embedding_dim: Optional[int] = None, + time_embedding_act_fn: Optional[str] = None, + timestep_post_act: Optional[str] = None, + time_cond_proj_dim: Optional[int] = None, + conv_in_kernel: int = 3, + conv_out_kernel: int = 3, + projection_class_embeddings_input_dim: Optional[int] = None, + attention_type: str = "default", + class_embeddings_concat: bool = False, + mid_block_only_cross_attention: Optional[bool] = None, + cross_attention_norm: Optional[str] = None, + addition_embed_type_num_heads=64, + + # motion module + use_motion_module=False, + motion_module_resolutions = (1,2,4,8), + motion_module_mid_block = False, + motion_module_decoder_only = False, + motion_module_type=None, + motion_module_kwargs=None, + ): + super().__init__() + self.sample_size = sample_size + + if num_attention_heads is not None: + raise ValueError( + "At the moment it is not possible to define the number of attention heads via `num_attention_heads` because of a naming issue as described in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131. Passing `num_attention_heads` will only be supported in diffusers v0.19." + ) + + # If `num_attention_heads` is not defined (which is the case for most models) + # it will default to `attention_head_dim`. This looks weird upon first reading it and it is. + # The reason for this behavior is to correct for incorrectly named variables that were introduced + # when this library was created. The incorrect naming was only discovered much later in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131 + # Changing `attention_head_dim` to `num_attention_heads` for 40,000+ configurations is too backwards breaking + # which is why we correct for the naming here. + num_attention_heads = num_attention_heads or attention_head_dim + + # Check inputs + if len(down_block_types) != len(up_block_types): + raise ValueError( + f"Must provide the same number of `down_block_types` as `up_block_types`. `down_block_types`: {down_block_types}. `up_block_types`: {up_block_types}." + ) + + if len(block_out_channels) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `block_out_channels` as `down_block_types`. `block_out_channels`: {block_out_channels}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(only_cross_attention, bool) and len(only_cross_attention) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `only_cross_attention` as `down_block_types`. `only_cross_attention`: {only_cross_attention}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(num_attention_heads, int) and len(num_attention_heads) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `num_attention_heads` as `down_block_types`. `num_attention_heads`: {num_attention_heads}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(attention_head_dim, int) and len(attention_head_dim) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `attention_head_dim` as `down_block_types`. `attention_head_dim`: {attention_head_dim}. `down_block_types`: {down_block_types}." + ) + + if isinstance(cross_attention_dim, list) and len(cross_attention_dim) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `cross_attention_dim` as `down_block_types`. `cross_attention_dim`: {cross_attention_dim}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(layers_per_block, int) and len(layers_per_block) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `layers_per_block` as `down_block_types`. `layers_per_block`: {layers_per_block}. `down_block_types`: {down_block_types}." + ) + + # input + conv_in_padding = (conv_in_kernel - 1) // 2 + self.conv_in = nn.Conv2d( + in_channels, block_out_channels[0], kernel_size=conv_in_kernel, padding=conv_in_padding + ) + + # time + if time_embedding_type == "fourier": + time_embed_dim = time_embedding_dim or block_out_channels[0] * 2 + if time_embed_dim % 2 != 0: + raise ValueError(f"`time_embed_dim` should be divisible by 2, but is {time_embed_dim}.") + self.time_proj = GaussianFourierProjection( + time_embed_dim // 2, set_W_to_weight=False, log=False, flip_sin_to_cos=flip_sin_to_cos + ) + timestep_input_dim = time_embed_dim + elif time_embedding_type == "positional": + time_embed_dim = time_embedding_dim or block_out_channels[0] * 4 + + self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift) + timestep_input_dim = block_out_channels[0] + else: + raise ValueError( + f"{time_embedding_type} does not exist. Please make sure to use one of `fourier` or `positional`." + ) + + self.time_embedding = TimestepEmbedding( + timestep_input_dim, + time_embed_dim, + act_fn=act_fn, + post_act_fn=timestep_post_act, + cond_proj_dim=time_cond_proj_dim, + ) + + if encoder_hid_dim_type is None and encoder_hid_dim is not None: + encoder_hid_dim_type = "text_proj" + self.register_to_config(encoder_hid_dim_type=encoder_hid_dim_type) + logger.info("encoder_hid_dim_type defaults to 'text_proj' as `encoder_hid_dim` is defined.") + + if encoder_hid_dim is None and encoder_hid_dim_type is not None: + raise ValueError( + f"`encoder_hid_dim` has to be defined when `encoder_hid_dim_type` is set to {encoder_hid_dim_type}." + ) + + if encoder_hid_dim_type == "text_proj": + self.encoder_hid_proj = nn.Linear(encoder_hid_dim, cross_attention_dim) + elif encoder_hid_dim_type == "text_image_proj": + # image_embed_dim DOESN'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image_proj"` (Kadinsky 2.1)` + self.encoder_hid_proj = TextImageProjection( + text_embed_dim=encoder_hid_dim, + image_embed_dim=cross_attention_dim, + cross_attention_dim=cross_attention_dim, + ) + elif encoder_hid_dim_type == "image_proj": + # Kandinsky 2.2 + self.encoder_hid_proj = ImageProjection( + image_embed_dim=encoder_hid_dim, + cross_attention_dim=cross_attention_dim, + ) + elif encoder_hid_dim_type is not None: + raise ValueError( + f"encoder_hid_dim_type: {encoder_hid_dim_type} must be None, 'text_proj' or 'text_image_proj'." + ) + else: + self.encoder_hid_proj = None + + # class embedding + if class_embed_type is None and num_class_embeds is not None: + self.class_embedding = nn.Embedding(num_class_embeds, time_embed_dim) + elif class_embed_type == "timestep": + self.class_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim, act_fn=act_fn) + elif class_embed_type == "identity": + self.class_embedding = nn.Identity(time_embed_dim, time_embed_dim) + elif class_embed_type == "projection": + if projection_class_embeddings_input_dim is None: + raise ValueError( + "`class_embed_type`: 'projection' requires `projection_class_embeddings_input_dim` be set" + ) + # The projection `class_embed_type` is the same as the timestep `class_embed_type` except + # 1. the `class_labels` inputs are not first converted to sinusoidal embeddings + # 2. it projects from an arbitrary input dimension. + # + # Note that `TimestepEmbedding` is quite general, being mainly linear layers and activations. + # When used for embedding actual timesteps, the timesteps are first converted to sinusoidal embeddings. + # As a result, `TimestepEmbedding` can be passed arbitrary vectors. + self.class_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + elif class_embed_type == "simple_projection": + if projection_class_embeddings_input_dim is None: + raise ValueError( + "`class_embed_type`: 'simple_projection' requires `projection_class_embeddings_input_dim` be set" + ) + self.class_embedding = nn.Linear(projection_class_embeddings_input_dim, time_embed_dim) + else: + self.class_embedding = None + + if addition_embed_type == "text": + if encoder_hid_dim is not None: + text_time_embedding_from_dim = encoder_hid_dim + else: + text_time_embedding_from_dim = cross_attention_dim + + self.add_embedding = TextTimeEmbedding( + text_time_embedding_from_dim, time_embed_dim, num_heads=addition_embed_type_num_heads + ) + elif addition_embed_type == "text_image": + # text_embed_dim and image_embed_dim DON'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image"` (Kadinsky 2.1)` + self.add_embedding = TextImageTimeEmbedding( + text_embed_dim=cross_attention_dim, image_embed_dim=cross_attention_dim, time_embed_dim=time_embed_dim + ) + elif addition_embed_type == "text_time": + self.add_time_proj = Timesteps(addition_time_embed_dim, flip_sin_to_cos, freq_shift) + self.add_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + elif addition_embed_type == "image": + # Kandinsky 2.2 + self.add_embedding = ImageTimeEmbedding(image_embed_dim=encoder_hid_dim, time_embed_dim=time_embed_dim) + elif addition_embed_type == "image_hint": + # Kandinsky 2.2 ControlNet + self.add_embedding = ImageHintTimeEmbedding(image_embed_dim=encoder_hid_dim, time_embed_dim=time_embed_dim) + elif addition_embed_type is not None: + raise ValueError(f"addition_embed_type: {addition_embed_type} must be None, 'text' or 'text_image'.") + + if time_embedding_act_fn is None: + self.time_embed_act = None + else: + self.time_embed_act = get_activation(time_embedding_act_fn) + + self.down_blocks = nn.ModuleList([]) + self.up_blocks = nn.ModuleList([]) + + if isinstance(only_cross_attention, bool): + if mid_block_only_cross_attention is None: + mid_block_only_cross_attention = only_cross_attention + + only_cross_attention = [only_cross_attention] * len(down_block_types) + + if mid_block_only_cross_attention is None: + mid_block_only_cross_attention = False + + if isinstance(num_attention_heads, int): + num_attention_heads = (num_attention_heads,) * len(down_block_types) + + if isinstance(attention_head_dim, int): + attention_head_dim = (attention_head_dim,) * len(down_block_types) + + if isinstance(cross_attention_dim, int): + cross_attention_dim = (cross_attention_dim,) * len(down_block_types) + + if isinstance(layers_per_block, int): + layers_per_block = [layers_per_block] * len(down_block_types) + + if isinstance(transformer_layers_per_block, int): + transformer_layers_per_block = [transformer_layers_per_block] * len(down_block_types) + + if class_embeddings_concat: + # The time embeddings are concatenated with the class embeddings. The dimension of the + # time embeddings passed to the down, middle, and up blocks is twice the dimension of the + # regular time embeddings + blocks_time_embed_dim = time_embed_dim * 2 + else: + blocks_time_embed_dim = time_embed_dim + + # down + output_channel = block_out_channels[0] + for i, down_block_type in enumerate(down_block_types): + input_channel = output_channel + output_channel = block_out_channels[i] + is_final_block = i == len(block_out_channels) - 1 + res = 2 ** i + down_block = get_down_block( + down_block_type, + num_layers=layers_per_block[i], + transformer_layers_per_block=transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + temb_channels=blocks_time_embed_dim, + add_downsample=not is_final_block, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim[i], + num_attention_heads=num_attention_heads[i], + downsample_padding=downsample_padding, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_type=attention_type, + resnet_skip_time_act=resnet_skip_time_act, + resnet_out_scale_factor=resnet_out_scale_factor, + cross_attention_norm=cross_attention_norm, + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + use_motion_module=use_motion_module and (res in motion_module_resolutions) and (not motion_module_decoder_only), + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) + self.down_blocks.append(down_block) + + # mid + if mid_block_type == "UNetMidBlock3DCrossAttn": + self.mid_block = UNetMidBlock3DCrossAttn( + transformer_layers_per_block=transformer_layers_per_block[-1], + in_channels=block_out_channels[-1], + temb_channels=blocks_time_embed_dim, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + output_scale_factor=mid_block_scale_factor, + resnet_time_scale_shift=resnet_time_scale_shift, + cross_attention_dim=cross_attention_dim[-1], + num_attention_heads=num_attention_heads[-1], + resnet_groups=norm_num_groups, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + upcast_attention=upcast_attention, + attention_type=attention_type, + use_motion_module=use_motion_module and motion_module_mid_block, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) + elif mid_block_type is None: + self.mid_block = None + else: + raise ValueError(f"unknown mid_block_type : {mid_block_type}") + + # count how many layers upsample the images + self.num_upsamplers = 0 + + # up + reversed_block_out_channels = list(reversed(block_out_channels)) + reversed_num_attention_heads = list(reversed(num_attention_heads)) + reversed_layers_per_block = list(reversed(layers_per_block)) + reversed_cross_attention_dim = list(reversed(cross_attention_dim)) + reversed_transformer_layers_per_block = list(reversed(transformer_layers_per_block)) + only_cross_attention = list(reversed(only_cross_attention)) + + output_channel = reversed_block_out_channels[0] + for i, up_block_type in enumerate(up_block_types): + is_final_block = i == len(block_out_channels) - 1 + res = 2 ** (2 - i) + prev_output_channel = output_channel + output_channel = reversed_block_out_channels[i] + input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)] + + # add upsample block for all BUT final layer + if not is_final_block: + add_upsample = True + self.num_upsamplers += 1 + else: + add_upsample = False + + up_block = get_up_block( + up_block_type, + num_layers=reversed_layers_per_block[i] + 1, + transformer_layers_per_block=reversed_transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + prev_output_channel=prev_output_channel, + temb_channels=blocks_time_embed_dim, + add_upsample=add_upsample, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + cross_attention_dim=reversed_cross_attention_dim[i], + num_attention_heads=reversed_num_attention_heads[i], + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_type=attention_type, + resnet_skip_time_act=resnet_skip_time_act, + resnet_out_scale_factor=resnet_out_scale_factor, + cross_attention_norm=cross_attention_norm, + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + use_motion_module=use_motion_module and (res in motion_module_resolutions), + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) + self.up_blocks.append(up_block) + prev_output_channel = output_channel + + # out + if norm_num_groups is not None: + self.conv_norm_out = nn.GroupNorm( + num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps + ) + + self.conv_act = get_activation(act_fn) + + else: + self.conv_norm_out = None + self.conv_act = None + + conv_out_padding = (conv_out_kernel - 1) // 2 + self.conv_out = nn.Conv2d( + block_out_channels[0], out_channels, kernel_size=conv_out_kernel, padding=conv_out_padding + ) + + if attention_type == "gated": + positive_len = 768 + if isinstance(cross_attention_dim, int): + positive_len = cross_attention_dim + elif isinstance(cross_attention_dim, tuple) or isinstance(cross_attention_dim, list): + positive_len = cross_attention_dim[0] + self.position_net = PositionNet(positive_len=positive_len, out_dim=cross_attention_dim) + + def set_image_layer_lora(self, image_layer_lora_rank: int = 128): + lora_attn_procs = {} + for name in self.attn_processors.keys(): + zero_rank_print(f"(add lora) {name}") + cross_attention_dim = None if name.endswith("attn1.processor") else self.config.cross_attention_dim + if name.startswith("mid_block"): + hidden_size = self.config.block_out_channels[-1] + elif name.startswith("up_blocks"): + block_id = int(name[len("up_blocks.")]) + hidden_size = list(reversed(self.config.block_out_channels))[block_id] + elif name.startswith("down_blocks"): + block_id = int(name[len("down_blocks.")]) + hidden_size = self.config.block_out_channels[block_id] + + lora_attn_procs[name] = LoRAAttnProcessor( + hidden_size=hidden_size, + cross_attention_dim=cross_attention_dim, + rank=image_layer_lora_rank if image_layer_lora_rank > 16 else hidden_size // image_layer_lora_rank, + ) + self.set_attn_processor(lora_attn_procs) + + lora_layers = AttnProcsLayers(self.attn_processors) + zero_rank_print(f"(lora parameters): {sum(p.numel() for p in lora_layers.parameters()) / 1e6:.3f} M") + del lora_layers + + def set_image_layer_lora_scale(self, lora_scale: float = 1.0): + for block in self.down_blocks: setattr(block, "lora_scale", lora_scale) + for block in self.up_blocks: setattr(block, "lora_scale", lora_scale) + setattr(self.mid_block, "lora_scale", lora_scale) + + @property + def attn_processors(self) -> Dict[str, AttentionProcessor]: + r""" + Returns: + `dict` of attention processors: A dictionary containing all attention processors used in the model with + indexed by its weight name. + """ + # set recursively + processors = {} + + def fn_recursive_add_processors(name: str, module: torch.nn.Module, processors: Dict[str, AttentionProcessor]): + if hasattr(module, "set_processor"): + if not "motion_modules." in name: + processors[f"{name}.processor"] = module.processor + + for sub_name, child in module.named_children(): + fn_recursive_add_processors(f"{name}.{sub_name}", child, processors) + + return processors + + for name, module in self.named_children(): + fn_recursive_add_processors(name, module, processors) + + return processors + + def set_attn_processor(self, processor: Union[AttentionProcessor, Dict[str, AttentionProcessor]], is_motion_module=False): + r""" + Sets the attention processor to use to compute attention. + + Parameters: + processor (`dict` of `AttentionProcessor` or only `AttentionProcessor`): + The instantiated processor class or a dictionary of processor classes that will be set as the processor + for **all** `Attention` layers. + + If `processor` is a dict, the key needs to define the path to the corresponding cross attention + processor. This is strongly recommended when setting trainable attention processors. + + """ + count = len(self.attn_processors.keys()) if not is_motion_module else len(self.motion_module_attn_processors.keys()) + + if isinstance(processor, dict) and len(processor) != count: + raise ValueError( + f"A dict of processors was passed, but the number of processors {len(processor)} does not match the" + f" number of attention layers: {count}. Please make sure to pass {count} processor classes." + ) + + def fn_recursive_attn_processor(name: str, module: torch.nn.Module, processor): + if hasattr(module, "set_processor"): + if ((not is_motion_module) and (not "motion_modules." in name)) or (is_motion_module and ("motion_modules." in name)): + if not isinstance(processor, dict): + module.set_processor(processor) + else: + module.set_processor(processor.pop(f"{name}.processor")) + + for sub_name, child in module.named_children(): + fn_recursive_attn_processor(f"{name}.{sub_name}", child, processor) + + for name, module in self.named_children(): + fn_recursive_attn_processor(name, module, processor) + + def set_default_attn_processor(self): + """ + Disables custom attention processors and sets the default attention implementation. + """ + self.set_attn_processor(AttnProcessor()) + + @property + def motion_module_attn_processors(self): + # set recursively + processors = {} + + def fn_recursive_add_processors(name: str, module: torch.nn.Module, processors: Dict[str, AttentionProcessor]): + # filter out processors in motion module + if hasattr(module, "set_processor"): + if "motion_modules." in name: + processors[f"{name}.processor"] = module.processor + + for sub_name, child in module.named_children(): + fn_recursive_add_processors(f"{name}.{sub_name}", child, processors) + + return processors + + for name, module in self.named_children(): + fn_recursive_add_processors(name, module, processors) + + return processors + + def set_motion_module_lora(self, motion_module_lora_rank: int = 256, motion_lora_resolution=[32, 64, 128]): + lora_attn_procs = {} + #motion_name = [] + #if 32 in motion_lora_resolution: + # motion_name.append('up_blocks.0') + # motion_name.append('down_blocks.2') + # if 64 in motion_lora_resolution: + # motion_name.append('up_blocks.1') + # motion_name.append('down_blocks.1') + # if 128 in motion_lora_resolution: + # motion_name.append('up_blocks.2') + # motion_name.append('down_blocks.0') + for name in self.motion_module_attn_processors.keys(): + #prefix = '.'.join(name.split('.')[:2]) + #if prefix not in motion_name: + # continue + print(f"(add motion lora) {name}") + + if name.startswith("mid_block"): + hidden_size = self.config.block_out_channels[-1] + elif name.startswith("up_blocks"): + block_id = int(name[len("up_blocks.")]) + hidden_size = list(reversed(self.config.block_out_channels))[block_id] + elif name.startswith("down_blocks"): + block_id = int(name[len("down_blocks.")]) + hidden_size = self.config.block_out_channels[block_id] + + lora_attn_procs[name] = LoRAAttnProcessor( + hidden_size=hidden_size, + cross_attention_dim=None, + rank=motion_module_lora_rank, + ) + self.set_attn_processor(lora_attn_procs, is_motion_module=True) + + lora_layers = AttnProcsLayers(self.motion_module_attn_processors) + print(f"(motion lora parameters): {sum(p.numel() for p in lora_layers.parameters()) / 1e6:.3f} M") + del lora_layers + + + + def set_attention_slice(self, slice_size): + r""" + Enable sliced attention computation. + + When this option is enabled, the attention module splits the input tensor in slices to compute attention in + several steps. This is useful for saving some memory in exchange for a small decrease in speed. + + Args: + slice_size (`str` or `int` or `list(int)`, *optional*, defaults to `"auto"`): + When `"auto"`, input to the attention heads is halved, so attention is computed in two steps. If + `"max"`, maximum amount of memory is saved by running only one slice at a time. If a number is + provided, uses as many slices as `attention_head_dim // slice_size`. In this case, `attention_head_dim` + must be a multiple of `slice_size`. + """ + sliceable_head_dims = [] + + def fn_recursive_retrieve_sliceable_dims(module: torch.nn.Module): + if hasattr(module, "set_attention_slice"): + sliceable_head_dims.append(module.sliceable_head_dim) + + for child in module.children(): + fn_recursive_retrieve_sliceable_dims(child) + + # retrieve number of attention layers + for module in self.children(): + fn_recursive_retrieve_sliceable_dims(module) + + num_sliceable_layers = len(sliceable_head_dims) + + if slice_size == "auto": + # half the attention head size is usually a good trade-off between + # speed and memory + slice_size = [dim // 2 for dim in sliceable_head_dims] + elif slice_size == "max": + # make smallest slice possible + slice_size = num_sliceable_layers * [1] + + slice_size = num_sliceable_layers * [slice_size] if not isinstance(slice_size, list) else slice_size + + if len(slice_size) != len(sliceable_head_dims): + raise ValueError( + f"You have provided {len(slice_size)}, but {self.config} has {len(sliceable_head_dims)} different" + f" attention layers. Make sure to match `len(slice_size)` to be {len(sliceable_head_dims)}." + ) + + for i in range(len(slice_size)): + size = slice_size[i] + dim = sliceable_head_dims[i] + if size is not None and size > dim: + raise ValueError(f"size {size} has to be smaller or equal to {dim}.") + + # Recursively walk through all the children. + # Any children which exposes the set_attention_slice method + # gets the message + def fn_recursive_set_attention_slice(module: torch.nn.Module, slice_size: List[int]): + if hasattr(module, "set_attention_slice"): + module.set_attention_slice(slice_size.pop()) + + for child in module.children(): + fn_recursive_set_attention_slice(child, slice_size) + + reversed_slice_size = list(reversed(slice_size)) + for module in self.children(): + fn_recursive_set_attention_slice(module, reversed_slice_size) + + def _set_gradient_checkpointing(self, module, value=False): + if hasattr(module, "gradient_checkpointing"): + module.gradient_checkpointing = value + + def forward( + self, + sample: torch.FloatTensor, + timestep: Union[torch.Tensor, float, int], + encoder_hidden_states: torch.Tensor, + class_labels: Optional[torch.Tensor] = None, + timestep_cond: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + added_cond_kwargs: Optional[Dict[str, torch.Tensor]] = None, + down_block_additional_residuals: Optional[Tuple[torch.Tensor]] = None, + mid_block_additional_residual: Optional[torch.Tensor] = None, + encoder_attention_mask: Optional[torch.Tensor] = None, + return_dict: bool = True, + ) -> Union[UNet3DConditionOutput, Tuple]: + r""" + The [`UNet2DConditionModel`] forward method. + + Args: + sample (`torch.FloatTensor`): + The noisy input tensor with the following shape `(batch, channel, height, width)`. + timestep (`torch.FloatTensor` or `float` or `int`): The number of timesteps to denoise an input. + encoder_hidden_states (`torch.FloatTensor`): + The encoder hidden states with shape `(batch, sequence_length, feature_dim)`. + encoder_attention_mask (`torch.Tensor`): + A cross-attention mask of shape `(batch, sequence_length)` is applied to `encoder_hidden_states`. If + `True` the mask is kept, otherwise if `False` it is discarded. Mask will be converted into a bias, + which adds large negative values to the attention scores corresponding to "discard" tokens. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`~models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain + tuple. + cross_attention_kwargs (`dict`, *optional*): + A kwargs dictionary that if specified is passed along to the [`AttnProcessor`]. + added_cond_kwargs: (`dict`, *optional*): + A kwargs dictionary containin additional embeddings that if specified are added to the embeddings that + are passed along to the UNet blocks. + + Returns: + [`~models.unet_2d_condition.UNet2DConditionOutput`] or `tuple`: + If `return_dict` is True, an [`~models.unet_2d_condition.UNet2DConditionOutput`] is returned, otherwise + a `tuple` is returned where the first element is the sample tensor. + """ + + + # By default samples have to be AT least a multiple of the overall upsampling factor. + # The overall upsampling factor is equal to 2 ** (# num of upsampling layers). + # However, the upsampling interpolation output size can be forced to fit any upsampling size + # on the fly if necessary. + default_overall_up_factor = 2**self.num_upsamplers + + # convert the time, size, and text embedding into (b f) c h w + video_length = sample.shape[2] + timestep = repeat(timestep, "b-> (b f)", f=video_length) + if encoder_hidden_states.shape[0] < video_length: + encoder_hidden_states = repeat(encoder_hidden_states, "b n c -> (b f) n c", f=video_length) + added_cond_kwargs['time_ids'] = repeat(added_cond_kwargs['time_ids'], "b c -> (b f) c", f=video_length) + if added_cond_kwargs['text_embeds'].shape[0] < video_length: + added_cond_kwargs['text_embeds'] = repeat(added_cond_kwargs['text_embeds'], "b c -> (b f) c", f=video_length) + + #sample = rearrange(sample, "b c f h w -> (b f) c h w") + + # upsample size should be forwarded when sample is not a multiple of `default_overall_up_factor` + forward_upsample_size = False + upsample_size = None + + if any(s % default_overall_up_factor != 0 for s in sample.shape[-2:]): + logger.info("Forward upsample size to force interpolation output size.") + forward_upsample_size = True + + # ensure attention_mask is a bias, and give it a singleton query_tokens dimension + # expects mask of shape: + # [batch, key_tokens] + # adds singleton query_tokens dimension: + # [batch, 1, key_tokens] + # this helps to broadcast it as a bias over attention scores, which will be in one of the following shapes: + # [batch, heads, query_tokens, key_tokens] (e.g. torch sdp attn) + # [batch * heads, query_tokens, key_tokens] (e.g. xformers or classic attn) + if attention_mask is not None: + # assume that mask is expressed as: + # (1 = keep, 0 = discard) + # convert mask into a bias that can be added to attention scores: + # (keep = +0, discard = -10000.0) + attention_mask = (1 - attention_mask.to(sample.dtype)) * -10000.0 + attention_mask = attention_mask.unsqueeze(1) + + # convert encoder_attention_mask to a bias the same way we do for attention_mask + if encoder_attention_mask is not None: + encoder_attention_mask = (1 - encoder_attention_mask.to(sample.dtype)) * -10000.0 + encoder_attention_mask = encoder_attention_mask.unsqueeze(1) + + # 0. center input if necessary + if self.config.center_input_sample: + sample = 2 * sample - 1.0 + + + # 1. time + timesteps = timestep + if not torch.is_tensor(timesteps): + # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can + # This would be a good case for the `match` statement (Python 3.10+) + is_mps = sample.device.type == "mps" + if isinstance(timestep, float): + dtype = torch.float32 if is_mps else torch.float64 + else: + dtype = torch.int32 if is_mps else torch.int64 + timesteps = torch.tensor([timesteps], dtype=dtype, device=sample.device) + elif len(timesteps.shape) == 0: + timesteps = timesteps[None].to(sample.device) + + # broadcast to batch dimension in a way that's compatible with ONNX/Core ML + # timesteps = timesteps + + + t_emb = self.time_proj(timesteps) + + # `Timesteps` does not contain any weights and will always return f32 tensors + # but time_embedding might actually be running in fp16. so we need to cast here. + # there might be better ways to encapsulate this. + t_emb = t_emb.to(dtype=sample.dtype) + + emb = self.time_embedding(t_emb, timestep_cond) + aug_emb = None + + if self.class_embedding is not None: + if class_labels is None: + raise ValueError("class_labels should be provided when num_class_embeds > 0") + + if self.config.class_embed_type == "timestep": + class_labels = self.time_proj(class_labels) + + # `Timesteps` does not contain any weights and will always return f32 tensors + # there might be better ways to encapsulate this. + class_labels = class_labels.to(dtype=sample.dtype) + + class_emb = self.class_embedding(class_labels).to(dtype=sample.dtype) + + if self.config.class_embeddings_concat: + emb = torch.cat([emb, class_emb], dim=-1) + else: + emb = emb + class_emb + + if self.config.addition_embed_type == "text": + aug_emb = self.add_embedding(encoder_hidden_states) + elif self.config.addition_embed_type == "text_image": + # Kandinsky 2.1 - style + if "image_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_image' which requires the keyword argument `image_embeds` to be passed in `added_cond_kwargs`" + ) + + image_embs = added_cond_kwargs.get("image_embeds") + text_embs = added_cond_kwargs.get("text_embeds", encoder_hidden_states) + aug_emb = self.add_embedding(text_embs, image_embs) + elif self.config.addition_embed_type == "text_time": + + # SDXL - style + if "text_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `text_embeds` to be passed in `added_cond_kwargs`" + ) + + text_embeds = added_cond_kwargs.get("text_embeds") + if "time_ids" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `time_ids` to be passed in `added_cond_kwargs`" + ) + time_ids = added_cond_kwargs.get("time_ids") + time_embeds = self.add_time_proj(time_ids.flatten()) + time_embeds = time_embeds.reshape((text_embeds.shape[0], -1)) + + add_embeds = torch.concat([text_embeds, time_embeds], dim=-1) + add_embeds = add_embeds.to(emb.dtype) + aug_emb = self.add_embedding(add_embeds) + elif self.config.addition_embed_type == "image": + # Kandinsky 2.2 - style + if "image_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'image' which requires the keyword argument `image_embeds` to be passed in `added_cond_kwargs`" + ) + image_embs = added_cond_kwargs.get("image_embeds") + aug_emb = self.add_embedding(image_embs) + elif self.config.addition_embed_type == "image_hint": + # Kandinsky 2.2 - style + if "image_embeds" not in added_cond_kwargs or "hint" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'image_hint' which requires the keyword arguments `image_embeds` and `hint` to be passed in `added_cond_kwargs`" + ) + image_embs = added_cond_kwargs.get("image_embeds") + hint = added_cond_kwargs.get("hint") + aug_emb, hint = self.add_embedding(image_embs, hint) + sample = torch.cat([sample, hint], dim=1) + + emb = emb + aug_emb if aug_emb is not None else emb + + if self.time_embed_act is not None: + emb = self.time_embed_act(emb) + + if self.encoder_hid_proj is not None and self.config.encoder_hid_dim_type == "text_proj": + encoder_hidden_states = self.encoder_hid_proj(encoder_hidden_states) + elif self.encoder_hid_proj is not None and self.config.encoder_hid_dim_type == "text_image_proj": + # Kadinsky 2.1 - style + if "image_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `encoder_hid_dim_type` set to 'text_image_proj' which requires the keyword argument `image_embeds` to be passed in `added_conditions`" + ) + + image_embeds = added_cond_kwargs.get("image_embeds") + encoder_hidden_states = self.encoder_hid_proj(encoder_hidden_states, image_embeds) + elif self.encoder_hid_proj is not None and self.config.encoder_hid_dim_type == "image_proj": + # Kandinsky 2.2 - style + if "image_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `encoder_hid_dim_type` set to 'image_proj' which requires the keyword argument `image_embeds` to be passed in `added_conditions`" + ) + image_embeds = added_cond_kwargs.get("image_embeds") + encoder_hidden_states = self.encoder_hid_proj(image_embeds) + + + # 2. pre-process + video_length = sample.shape[2] + sample = rearrange(sample, "b c f h w -> (b f) c h w") + sample = self.conv_in(sample) + sample = rearrange(sample, "(b f) c h w -> b c f h w", f=video_length) + + + # 2.5 GLIGEN position net + if cross_attention_kwargs is not None and cross_attention_kwargs.get("gligen", None) is not None: + cross_attention_kwargs = cross_attention_kwargs.copy() + gligen_args = cross_attention_kwargs.pop("gligen") + cross_attention_kwargs["gligen"] = {"objs": self.position_net(**gligen_args)} + + # 3. down + + is_controlnet = mid_block_additional_residual is not None and down_block_additional_residuals is not None + is_adapter = mid_block_additional_residual is None and down_block_additional_residuals is not None + + down_block_res_samples = (sample,) + for downsample_block in self.down_blocks: + if hasattr(downsample_block, "has_cross_attention") and downsample_block.has_cross_attention: + # For t2i-adapter CrossAttnDownBlock2D + additional_residuals = {} + if is_adapter and len(down_block_additional_residuals) > 0: + additional_residuals["additional_residuals"] = down_block_additional_residuals.pop(0) + + sample, res_samples = downsample_block( + hidden_states=sample, + temb=emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + **additional_residuals, + ) + else: + sample, res_samples = downsample_block(hidden_states=sample, temb=emb) + + if is_adapter and len(down_block_additional_residuals) > 0: + sample += down_block_additional_residuals.pop(0) + + down_block_res_samples += res_samples + + if is_controlnet: + new_down_block_res_samples = () + + for down_block_res_sample, down_block_additional_residual in zip( + down_block_res_samples, down_block_additional_residuals + ): + down_block_res_sample = down_block_res_sample + down_block_additional_residual + new_down_block_res_samples = new_down_block_res_samples + (down_block_res_sample,) + + down_block_res_samples = new_down_block_res_samples + + + # 4. mid + if self.mid_block is not None: + sample = self.mid_block( + sample, + emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + ) + # To support T2I-Adapter-XL + if ( + is_adapter + and len(down_block_additional_residuals) > 0 + and sample.shape == down_block_additional_residuals[0].shape + ): + sample += down_block_additional_residuals.pop(0) + + if is_controlnet: + sample = sample + mid_block_additional_residual + + # 5. up + for i, upsample_block in enumerate(self.up_blocks): + is_final_block = i == len(self.up_blocks) - 1 + + res_samples = down_block_res_samples[-len(upsample_block.resnets) :] + down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] + + # if we have not reached the final block and need to forward the + # upsample size, we do it here + if not is_final_block and forward_upsample_size: + upsample_size = down_block_res_samples[-1].shape[2:] + + if hasattr(upsample_block, "has_cross_attention") and upsample_block.has_cross_attention: + sample = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + upsample_size=upsample_size, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + ) + else: + sample = upsample_block( + hidden_states=sample, temb=emb, res_hidden_states_tuple=res_samples, upsample_size=upsample_size + ) + + video_length = sample.shape[2] + sample = rearrange(sample, "b c f h w -> (b f) c h w") + + + # 6. post-process + if self.conv_norm_out: + sample = self.conv_norm_out(sample) + sample = self.conv_act(sample) + sample = self.conv_out(sample) + + sample = rearrange(sample, "(b f) c h w -> b c f h w", f=video_length) + + if not return_dict: + return (sample,) + + return UNet3DConditionOutput(sample=sample) + + @classmethod + def from_pretrained_2d(cls, pretrained_model_path, motion_module_path, subfolder=None, unet_additional_kwargs=None): + from safetensors.torch import load_file + + if subfolder is not None: + pretrained_model_path = os.path.join(pretrained_model_path, subfolder) + print(f"loaded temporal unet's pretrained weights from {pretrained_model_path} ...") + + config_file = os.path.join(pretrained_model_path, 'config.json') + if not os.path.isfile(config_file): + raise RuntimeError(f"{config_file} does not exist") + with open(config_file, "r") as f: + config = json.load(f) + config["_class_name"] = cls.__name__ + config["down_block_types"] = [ + "DownBlock3D", + "CrossAttnDownBlock3D", + "CrossAttnDownBlock3D", + + ] + config["up_block_types"] = [ + "CrossAttnUpBlock3D", + "CrossAttnUpBlock3D", + "UpBlock3D", + ] + + config["mid_block_type"] = "UNetMidBlock3DCrossAttn" + from diffusers.utils import SAFETENSORS_WEIGHTS_NAME + model = cls.from_config(config, **unet_additional_kwargs) + model.to(torch.float16) + model_file = os.path.join(pretrained_model_path, SAFETENSORS_WEIGHTS_NAME) + if not os.path.isfile(model_file): + raise RuntimeError(f"{model_file} does not exist") + + ''' + state_dict = {} + from safetensors import safe_open + with safe_open(model_file, framework='pt') as f: + for k in f.keys(): + state_dict[k] = f.get_tensor(k) + + ''' + + # load the vanilla weights + logger.debug(f"loading safeTensors weights from {pretrained_model_path} ...") + state_dict = load_file(model_file, device="cpu") + + # load the motion module weights + if motion_module_path.exists() and motion_module_path.is_file(): + if motion_module_path.suffix.lower() in [".pth", ".pt", ".ckpt"]: + motion_state_dict = torch.load(motion_module_path, map_location="cpu", weights_only=True) + elif motion_module_path.suffix.lower() == ".safetensors": + motion_state_dict = load_file(motion_module_path, device="cpu") + else: + raise RuntimeError( + f"unknown file format for motion module weights: {motion_module_path.suffix}" + ) + else: + raise FileNotFoundError(f"no motion module weights found in {motion_module_path}") + + # merge the state dicts + state_dict.update(motion_state_dict) + + # load the weights into the model + m, u = model.load_state_dict(state_dict, strict=False) + logger.debug(f"### missing keys: {len(m)}; \n### unexpected keys: {len(u)};") + + params = [p.numel() if "temporal" in n else 0 for n, p in model.named_parameters()] + logger.info(f"### Temporal Module Parameters: {sum(params) / 1e6} M") + + model.to(torch.float16) + + return model + + @classmethod + def from_pretrained_2d_org(cls, pretrained_model_path, subfolder=None, unet_additional_kwargs=None): + if subfolder is not None: + pretrained_model_path = os.path.join(pretrained_model_path, subfolder) + print(f"loaded temporal unet's pretrained weights from {pretrained_model_path} ...") + + config_file = os.path.join(pretrained_model_path, 'config.json') + if not os.path.isfile(config_file): + raise RuntimeError(f"{config_file} does not exist") + with open(config_file, "r") as f: + config = json.load(f) + config["_class_name"] = cls.__name__ + config["down_block_types"] = [ + "DownBlock3D", + "CrossAttnDownBlock3D", + "CrossAttnDownBlock3D", + + ] + config["up_block_types"] = [ + "CrossAttnUpBlock3D", + "CrossAttnUpBlock3D", + "UpBlock3D", + ] + + config["mid_block_type"] = "UNetMidBlock3DCrossAttn" + from diffusers.utils import SAFETENSORS_WEIGHTS_NAME + model = cls.from_config(config, **unet_additional_kwargs) + model_file = os.path.join(pretrained_model_path, SAFETENSORS_WEIGHTS_NAME) + if not os.path.isfile(model_file): + raise RuntimeError(f"{model_file} does not exist") + + state_dict = {} + from safetensors import safe_open + with safe_open(model_file, framework='pt', device="cpu") as f: + for k in f.keys(): + state_dict[k] = f.get_tensor(k) + + m, u = model.load_state_dict(state_dict, strict=False) + print(f"### missing keys: {len(m)}; \n### unexpected keys: {len(u)};") + # print(f"### missing keys:\n{m}\n### unexpected keys:\n{u}\n") + + del state_dict + + params = [p.numel() if "temporal" in n else 0 for n, p in model.named_parameters()] + print(f"### Temporal Module Parameters: {sum(params) / 1e6} M") + + return model diff --git a/src/animatediff/sdxl_models/unet_blocks.py b/src/animatediff/sdxl_models/unet_blocks.py new file mode 100644 index 0000000000000000000000000000000000000000..d5f34276cf3c9e52dab686d46075f4442a168b14 --- /dev/null +++ b/src/animatediff/sdxl_models/unet_blocks.py @@ -0,0 +1,942 @@ +from typing import Any, Dict, Optional, Tuple + +import numpy as np +import torch +import torch.nn.functional as F +from diffusers.models.activations import get_activation +from diffusers.models.attention_processor import (Attention, + AttnAddedKVProcessor, + AttnAddedKVProcessor2_0) +from diffusers.models.normalization import AdaGroupNorm +from diffusers.models.resnet import (Downsample2D, FirDownsample2D, + FirUpsample2D, KDownsample2D, KUpsample2D, + ResnetBlock2D, Upsample2D) +from diffusers.models.transformer_2d import Transformer2DModel +from diffusers.utils import is_torch_version, logging +from einops import rearrange +from torch import nn + +from .motion_module import get_motion_module + +TORCH_CLEAR_CACHE = False + + + +def clear_cache(): + if TORCH_CLEAR_CACHE: + + import inspect + callerframerecord = inspect.stack()[1] + frame = callerframerecord[0] + info = inspect.getframeinfo(frame) + + import time + + import GPUtil + torch.cuda.synchronize() + + logger.info(f"{info.filename}/{info.lineno}/before clear cache") + GPUtil.showUtilization() + + torch.cuda.empty_cache() + + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + +def get_down_block( + down_block_type, + num_layers, + in_channels, + out_channels, + temb_channels, + add_downsample, + resnet_eps, + resnet_act_fn, + transformer_layers_per_block=1, + num_attention_heads=None, + resnet_groups=None, + cross_attention_dim=None, + downsample_padding=None, + dual_cross_attention=False, + use_linear_projection=False, + only_cross_attention=False, + upcast_attention=False, + resnet_time_scale_shift="default", + attention_type="default", + resnet_skip_time_act=False, + resnet_out_scale_factor=1.0, + cross_attention_norm=None, + attention_head_dim=None, + downsample_type=None, + use_motion_module=None, + motion_module_type=None, + motion_module_kwargs=None, +): + # If attn head dim is not defined, we default it to the number of heads + if attention_head_dim is None: + logger.warn( + f"It is recommended to provide `attention_head_dim` when calling `get_down_block`. Defaulting `attention_head_dim` to {num_attention_heads}." + ) + attention_head_dim = num_attention_heads + + down_block_type = down_block_type[7:] if down_block_type.startswith("UNetRes") else down_block_type + if down_block_type == "DownBlock3D": + return DownBlock3D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + resnet_time_scale_shift=resnet_time_scale_shift, + use_motion_module=use_motion_module, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) + elif down_block_type == "CrossAttnDownBlock3D": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for CrossAttnDownBlock2D") + return CrossAttnDownBlock3D( + num_layers=num_layers, + transformer_layers_per_block=transformer_layers_per_block, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_type=attention_type, + use_motion_module=use_motion_module, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) + raise ValueError(f"{down_block_type} does not exist.") + + +def get_up_block( + up_block_type, + num_layers, + in_channels, + out_channels, + prev_output_channel, + temb_channels, + add_upsample, + resnet_eps, + resnet_act_fn, + transformer_layers_per_block=1, + num_attention_heads=None, + resnet_groups=None, + cross_attention_dim=None, + dual_cross_attention=False, + use_linear_projection=False, + only_cross_attention=False, + upcast_attention=False, + resnet_time_scale_shift="default", + attention_type="default", + resnet_skip_time_act=False, + resnet_out_scale_factor=1.0, + cross_attention_norm=None, + attention_head_dim=None, + upsample_type=None, + use_motion_module=None, + motion_module_type=None, + motion_module_kwargs=None, +): + # If attn head dim is not defined, we default it to the number of heads + if attention_head_dim is None: + logger.warn( + f"It is recommended to provide `attention_head_dim` when calling `get_up_block`. Defaulting `attention_head_dim` to {num_attention_heads}." + ) + attention_head_dim = num_attention_heads + + up_block_type = up_block_type[7:] if up_block_type.startswith("UNetRes") else up_block_type + if up_block_type == "UpBlock3D": + return UpBlock3D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + use_motion_module=use_motion_module, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) + elif up_block_type == "CrossAttnUpBlock3D": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for CrossAttnUpBlock2D") + return CrossAttnUpBlock3D( + num_layers=num_layers, + transformer_layers_per_block=transformer_layers_per_block, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_type=attention_type, + use_motion_module=use_motion_module, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) + + raise ValueError(f"{up_block_type} does not exist.") + +class UNetMidBlock3DCrossAttn(nn.Module): + def __init__( + self, + in_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + transformer_layers_per_block: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + num_attention_heads=1, + output_scale_factor=1.0, + cross_attention_dim=1280, + dual_cross_attention=False, + use_linear_projection=False, + upcast_attention=False, + attention_type="default", + use_motion_module=None, + motion_module_type=None, + motion_module_kwargs=None, + ): + super().__init__() + + self.has_cross_attention = True + self.num_attention_heads = num_attention_heads + resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32) + + # there is always at least one resnet + resnets = [ + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ] + attentions = [] + motion_modules = [] + + for _ in range(num_layers): + if not dual_cross_attention: + attentions.append( + Transformer2DModel( + num_attention_heads, + in_channels // num_attention_heads, + in_channels=in_channels, + num_layers=transformer_layers_per_block, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + use_linear_projection=use_linear_projection, + upcast_attention=upcast_attention, + attention_type=attention_type, + ) + ) + else: + attentions.append( + DualTransformer2DModel( + num_attention_heads, + in_channels // num_attention_heads, + in_channels=in_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + motion_modules.append( + get_motion_module( + in_channels=in_channels, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) if use_motion_module else None + ) + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + self.motion_modules = nn.ModuleList(motion_modules) + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + video_length = hidden_states.shape[2] + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states = self.resnets[0](hidden_states, temb) + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=video_length) + for attn, motion_module, resnet in zip(self.attentions, self.motion_modules, self.resnets[1:]): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=video_length) + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(motion_module), hidden_states, temb, + encoder_hidden_states) if motion_module is not None else hidden_states + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=video_length) + else: + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=video_length) + hidden_states = motion_module(hidden_states, temb, encoder_hidden_states=encoder_hidden_states) if motion_module is not None else hidden_states + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states = resnet(hidden_states, temb) + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=video_length) + + return hidden_states + +class DownBlock3D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor=1.0, + add_downsample=True, + downsample_padding=1, + use_motion_module=None, + motion_module_type=None, + motion_module_kwargs=None, + ): + super().__init__() + resnets = [] + motion_modules = [] + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + motion_modules.append( + get_motion_module( + in_channels=out_channels, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) if use_motion_module else None + ) + + self.resnets = nn.ModuleList(resnets) + self.motion_modules = nn.ModuleList(motion_modules) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + Downsample2D( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward(self, hidden_states, temb=None, encoder_hidden_states=None): + + output_states = () + + for resnet, motion_module in zip(self.resnets, self.motion_modules): + if self.training and self.gradient_checkpointing: + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + video_length = hidden_states.shape[2] + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=video_length) + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(motion_module), hidden_states, temb, encoder_hidden_states, use_reentrant=False) if motion_module is not None else hidden_states + else: + video_length = hidden_states.shape[2] + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states = resnet(hidden_states, temb) + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=video_length) + hidden_states = motion_module(hidden_states, temb, encoder_hidden_states) if motion_module is not None else hidden_states + + output_states = output_states + (hidden_states,) + + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states = downsampler(hidden_states) + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=video_length) + + output_states = output_states + (hidden_states,) + + + return hidden_states, output_states + + +class UpBlock3D(nn.Module): + def __init__( + self, + in_channels: int, + prev_output_channel: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor=1.0, + add_upsample=True, + use_motion_module=None, + motion_module_type=None, + motion_module_kwargs=None, + ): + super().__init__() + resnets = [] + motion_modules = [] + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + motion_modules.append( + get_motion_module( + in_channels=out_channels, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) if use_motion_module else None + ) + + self.resnets = nn.ModuleList(resnets) + self.motion_modules = nn.ModuleList(motion_modules) + + if add_upsample: + self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)]) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + + def forward(self, hidden_states, res_hidden_states_tuple, encoder_hidden_states=None, temb=None, upsample_size=None): + for (resnet, motion_module) in zip(self.resnets, self.motion_modules): + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + video_length = hidden_states.shape[2] + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=video_length) + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(motion_module), hidden_states, + temb, encoder_hidden_states, use_reentrant=False) if motion_module is not None else hidden_states + else: + video_length = hidden_states.shape[2] + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states = resnet(hidden_states, temb) + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=video_length) + hidden_states = motion_module(hidden_states, temb, encoder_hidden_states) if motion_module is not None else hidden_states + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states = upsampler(hidden_states, upsample_size) + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=video_length) + + return hidden_states + + +class CrossAttnDownBlock3D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + transformer_layers_per_block: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + num_attention_heads=1, + cross_attention_dim=1280, + output_scale_factor=1.0, + downsample_padding=1, + add_downsample=True, + dual_cross_attention=False, + use_linear_projection=False, + only_cross_attention=False, + upcast_attention=False, + attention_type="default", + use_motion_module=None, + motion_module_type=None, + motion_module_kwargs=None, + ): + super().__init__() + resnets = [] + attentions = [] + motion_modules = [] + + self.has_cross_attention = True + self.num_attention_heads = num_attention_heads + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + if not dual_cross_attention: + attentions.append( + Transformer2DModel( + num_attention_heads, + out_channels // num_attention_heads, + in_channels=out_channels, + num_layers=transformer_layers_per_block, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + attention_type=attention_type, + ) + ) + else: + attentions.append( + DualTransformer2DModel( + num_attention_heads, + out_channels // num_attention_heads, + in_channels=out_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + motion_modules.append( + get_motion_module( + in_channels=out_channels, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) if use_motion_module else None + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + self.motion_modules = nn.ModuleList(motion_modules) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + Downsample2D( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + additional_residuals=None, + ): + output_states = () + + blocks = list(zip(self.resnets, self.attentions, self.motion_modules)) + + for i, (resnet, attn, motion_module) in enumerate(blocks): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + video_length = hidden_states.shape[2] + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=video_length) + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(motion_module), + hidden_states, temb, + encoder_hidden_states, use_reentrant=False) if motion_module is not None else hidden_states + else: + video_length = hidden_states.shape[2] + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states = resnet(hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=video_length) + hidden_states = motion_module(hidden_states, temb, encoder_hidden_states=encoder_hidden_states) if motion_module is not None else hidden_states + + # apply additional residuals to the output of the last pair of resnet and attention blocks + if i == len(blocks) - 1 and additional_residuals is not None: + hidden_states = hidden_states + additional_residuals + + output_states = output_states + (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states = downsampler(hidden_states) + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=video_length) + + output_states = output_states + (hidden_states,) + + return hidden_states, output_states + +class CrossAttnUpBlock3D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + prev_output_channel: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + transformer_layers_per_block: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + num_attention_heads=1, + cross_attention_dim=1280, + output_scale_factor=1.0, + add_upsample=True, + dual_cross_attention=False, + use_linear_projection=False, + only_cross_attention=False, + upcast_attention=False, + attention_type="default", + use_motion_module=None, + motion_module_type=None, + motion_module_kwargs=None, + ): + super().__init__() + resnets = [] + attentions = [] + motion_modules = [] + + self.has_cross_attention = True + self.num_attention_heads = num_attention_heads + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + if not dual_cross_attention: + attentions.append( + Transformer2DModel( + num_attention_heads, + out_channels // num_attention_heads, + in_channels=out_channels, + num_layers=transformer_layers_per_block, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + attention_type=attention_type, + ) + ) + else: + attentions.append( + DualTransformer2DModel( + num_attention_heads, + out_channels // num_attention_heads, + in_channels=out_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + motion_modules.append( + get_motion_module( + in_channels=out_channels, + motion_module_type=motion_module_type, + motion_module_kwargs=motion_module_kwargs, + ) if use_motion_module else None + ) + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + self.motion_modules = nn.ModuleList(motion_modules) + + if add_upsample: + self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)]) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + upsample_size: Optional[int] = None, + attention_mask: Optional[torch.FloatTensor] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ): + for resnet, attn, motion_module in zip(self.resnets, self.attentions, self.motion_modules): + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + video_length = hidden_states.shape[2] + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=video_length) + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(motion_module), + hidden_states, temb, encoder_hidden_states, + use_reentrant=False) if motion_module is not None else hidden_states + else: + video_length = hidden_states.shape[2] + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states = resnet(hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=video_length) + hidden_states = motion_module(hidden_states, temb, encoder_hidden_states=encoder_hidden_states) if motion_module is not None else hidden_states + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = rearrange(hidden_states, "b c f h w -> (b f) c h w") + hidden_states = upsampler(hidden_states, upsample_size) + hidden_states = rearrange(hidden_states, "(b f) c h w -> b c f h w", f=video_length) + + return hidden_states \ No newline at end of file diff --git a/src/animatediff/settings.py b/src/animatediff/settings.py new file mode 100644 index 0000000000000000000000000000000000000000..2a5e7ed72c99bffaf4b2e019b953e4ffc85b2afb --- /dev/null +++ b/src/animatediff/settings.py @@ -0,0 +1,143 @@ +import json +import logging +from os import PathLike +from pathlib import Path +from typing import Any, Dict, Optional, Tuple, Union + +from pydantic.v1 import BaseConfig, BaseSettings, Field +from pydantic.env_settings import (EnvSettingsSource, InitSettingsSource, + SecretsSettingsSource, + SettingsSourceCallable) + +from animatediff import get_dir +from animatediff.schedulers import DiffusionScheduler + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +CKPT_EXTENSIONS = [".pt", ".ckpt", ".pth", ".safetensors"] + + +class JsonSettingsSource: + __slots__ = ["json_config_path"] + + def __init__( + self, + json_config_path: Optional[Union[PathLike, list[PathLike]]] = list(), + ) -> None: + if isinstance(json_config_path, list): + self.json_config_path = [Path(path) for path in json_config_path] + else: + self.json_config_path = [Path(json_config_path)] if json_config_path is not None else [] + + def __call__(self, settings: BaseSettings) -> Dict[str, Any]: # noqa C901 + classname = settings.__class__.__name__ + encoding = settings.__config__.env_file_encoding + if len(self.json_config_path) == 0: + pass # no json config provided + + merged_config = dict() # create an empty dict to merge configs into + for idx, path in enumerate(self.json_config_path): + if path.exists() and path.is_file(): # check if the path exists and is a file + logger.debug(f"{classname}: loading config #{idx+1} from {path}") + merged_config.update(json.loads(path.read_text(encoding=encoding))) + logger.debug(f"{classname}: config state #{idx+1}: {merged_config}") + else: + raise FileNotFoundError(f"{classname}: config #{idx+1} at {path} not found or not a file") + + logger.debug(f"{classname}: loaded config: {merged_config}") + return merged_config # return the merged config + + def __repr__(self) -> str: + return f"JsonSettingsSource(json_config_path={repr(self.json_config_path)})" + + +class JsonConfig(BaseConfig): + json_config_path: Optional[Union[Path, list[Path]]] = None + env_file_encoding: str = "utf-8" + + @classmethod + def customise_sources( + cls, + init_settings: InitSettingsSource, + env_settings: EnvSettingsSource, + file_secret_settings: SecretsSettingsSource, + ) -> Tuple[SettingsSourceCallable, ...]: + # pull json_config_path from init_settings if passed, otherwise use the class var + json_config_path = init_settings.init_kwargs.pop("json_config_path", cls.json_config_path) + + logger.debug(f"Using JsonSettingsSource for {cls.__name__}") + json_settings = JsonSettingsSource(json_config_path=json_config_path) + + # return the new settings sources + return ( + init_settings, + json_settings, + ) + + +class InferenceConfig(BaseSettings): + unet_additional_kwargs: dict[str, Any] + noise_scheduler_kwargs: dict[str, Any] + + class Config(JsonConfig): + json_config_path: Path + + +def get_infer_config( + is_v2:bool, + is_sdxl:bool, +) -> InferenceConfig: + config_path: Path = get_dir("config").joinpath("inference/default.json" if not is_v2 else "inference/motion_v2.json") + + if is_sdxl: + config_path = get_dir("config").joinpath("inference/motion_sdxl.json") + + settings = InferenceConfig(json_config_path=config_path) + return settings + + +class ModelConfig(BaseSettings): + name: str = Field(...) # Config name, not actually used for much of anything + path: Path = Field(...) # Path to the model + vae_path: str = "" # Path to the model + motion_module: Path = Field(...) # Path to the motion module + context_schedule: str = "uniform" + lcm_map: Dict[str,Any]= Field({}) + gradual_latent_hires_fix_map: Dict[str,Any]= Field({}) + compile: bool = Field(False) # whether to compile the model with TorchDynamo + tensor_interpolation_slerp: bool = Field(True) + seed: list[int] = Field([]) # Seed(s) for the random number generators + scheduler: DiffusionScheduler = Field(DiffusionScheduler.k_dpmpp_2m) # Scheduler to use + steps: int = 25 # Number of inference steps to run + guidance_scale: float = 7.5 # CFG scale to use + unet_batch_size: int = 1 + clip_skip: int = 1 # skip the last N-1 layers of the CLIP text encoder + prompt_fixed_ratio: float = 0.5 + head_prompt: str = "" + prompt_map: Dict[str,str]= Field({}) + tail_prompt: str = "" + n_prompt: list[str] = Field([]) # Anti-prompt(s) to use + is_single_prompt_mode : bool = Field(False) + lora_map: Dict[str,Any]= Field({}) + motion_lora_map: Dict[str,float]= Field({}) + ip_adapter_map: Dict[str,Any]= Field({}) + img2img_map: Dict[str,Any]= Field({}) + region_map: Dict[str,Any]= Field({}) + controlnet_map: Dict[str,Any]= Field({}) + upscale_config: Dict[str,Any]= Field({}) + stylize_config: Dict[str,Any]= Field({}) + output: Dict[str,Any]= Field({}) + result: Dict[str,Any]= Field({}) + + class Config(JsonConfig): + json_config_path: Path + + @property + def save_name(self): + return f"{self.name.lower()}-{self.path.stem.lower()}" + + +def get_model_config(config_path: Path) -> ModelConfig: + settings = ModelConfig(json_config_path=config_path) + return settings diff --git a/src/animatediff/softmax_splatting/README.md b/src/animatediff/softmax_splatting/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b01d39145603de4279a6719bea71bdce8211a517 --- /dev/null +++ b/src/animatediff/softmax_splatting/README.md @@ -0,0 +1,90 @@ +# softmax-splatting +This is a reference implementation of the softmax splatting operator, which has been proposed in Softmax Splatting for Video Frame Interpolation [1], using PyTorch. Softmax splatting is a well-motivated approach for differentiable forward warping. It uses a translational invariant importance metric to disambiguate cases where multiple source pixels map to the same target pixel. Should you be making use of our work, please cite our paper [1]. + +Paper + +For our previous work on SepConv, see: https://github.com/sniklaus/revisiting-sepconv + +## setup +The softmax splatting is implemented in CUDA using CuPy, which is why CuPy is a required dependency. It can be installed using `pip install cupy` or alternatively using one of the provided [binary packages](https://docs.cupy.dev/en/stable/install.html#installing-cupy) as outlined in the CuPy repository. + +If you plan to process videos, then please also make sure to have `pip install moviepy` installed. + +## usage +To run it on your own pair of frames, use the following command. + +``` +python run.py --model lf --one ./images/one.png --two ./images/two.png --out ./out.png +``` + +To run in on a video, use the following command. + +``` +python run.py --model lf --video ./videos/car-turn.mp4 --out ./out.mp4 +``` + +For a quick benchmark using examples from the Middlebury benchmark for optical flow, run `python benchmark_middlebury.py`. You can use it to easily verify that the provided implementation runs as expected. + +## warping +We provide a small script to replicate the third figure of our paper [1]. You can simply run the following to obtain the comparison between summation splatting, average splatting, linear splatting, and softmax splatting. + +The example script is using OpenCV to load and display images, as well as to read the provided optical flow file. An easy way to install OpenCV for Python is using the `pip install opencv-contrib-python` package. + +``` +import cv2 +import numpy +import torch + +import run + +import softsplat # the custom softmax splatting layer + +########################################################## + +torch.set_grad_enabled(False) # make sure to not compute gradients for computational performance + +torch.backends.cudnn.enabled = True # make sure to use cudnn for computational performance + +########################################################## + +tenOne = torch.FloatTensor(numpy.ascontiguousarray(cv2.imread(filename='./images/one.png', flags=-1).transpose(2, 0, 1)[None, :, :, :].astype(numpy.float32) * (1.0 / 255.0))).cuda() +tenTwo = torch.FloatTensor(numpy.ascontiguousarray(cv2.imread(filename='./images/two.png', flags=-1).transpose(2, 0, 1)[None, :, :, :].astype(numpy.float32) * (1.0 / 255.0))).cuda() +tenFlow = torch.FloatTensor(numpy.ascontiguousarray(run.read_flo('./images/flow.flo').transpose(2, 0, 1)[None, :, :, :])).cuda() + +tenMetric = torch.nn.functional.l1_loss(input=tenOne, target=run.backwarp(tenIn=tenTwo, tenFlow=tenFlow), reduction='none').mean([1], True) + +for intTime, fltTime in enumerate(numpy.linspace(0.0, 1.0, 11).tolist()): + tenSummation = softsplat.softsplat(tenIn=tenOne, tenFlow=tenFlow * fltTime, tenMetric=None, strMode='sum') + tenAverage = softsplat.softsplat(tenIn=tenOne, tenFlow=tenFlow * fltTime, tenMetric=None, strMode='avg') + tenLinear = softsplat.softsplat(tenIn=tenOne, tenFlow=tenFlow * fltTime, tenMetric=(0.3 - tenMetric).clip(0.001, 1.0), strMode='linear') # finding a good linearly metric is difficult, and it is not invariant to translations + tenSoftmax = softsplat.softsplat(tenIn=tenOne, tenFlow=tenFlow * fltTime, tenMetric=(-20.0 * tenMetric).clip(-20.0, 20.0), strMode='soft') # -20.0 is a hyperparameter, called 'alpha' in the paper, that could be learned using a torch.Parameter + + cv2.imshow(winname='summation', mat=tenSummation[0, :, :, :].cpu().numpy().transpose(1, 2, 0)) + cv2.imshow(winname='average', mat=tenAverage[0, :, :, :].cpu().numpy().transpose(1, 2, 0)) + cv2.imshow(winname='linear', mat=tenLinear[0, :, :, :].cpu().numpy().transpose(1, 2, 0)) + cv2.imshow(winname='softmax', mat=tenSoftmax[0, :, :, :].cpu().numpy().transpose(1, 2, 0)) + cv2.waitKey(delay=0) +# end +``` + +## xiph +In our paper, we propose to use 4K video clips from Xiph to evaluate video frame interpolation on high-resolution footage. Please see the supplementary `benchmark_xiph.py` on how to reproduce the shown metrics. + +## video +Video + +## license +The provided implementation is strictly for academic purposes only. Should you be interested in using our technology for any commercial use, please feel free to contact us. + +## references +``` +[1] @inproceedings{Niklaus_CVPR_2020, + author = {Simon Niklaus and Feng Liu}, + title = {Softmax Splatting for Video Frame Interpolation}, + booktitle = {IEEE Conference on Computer Vision and Pattern Recognition}, + year = {2020} + } +``` + +## acknowledgment +The video above uses materials under a Creative Common license as detailed at the end. \ No newline at end of file diff --git a/src/animatediff/softmax_splatting/correlation/README.md b/src/animatediff/softmax_splatting/correlation/README.md new file mode 100644 index 0000000000000000000000000000000000000000..a8e0ca529d50b7e09d521cc288daae7771514188 --- /dev/null +++ b/src/animatediff/softmax_splatting/correlation/README.md @@ -0,0 +1 @@ +This is an adaptation of the FlowNet2 implementation in order to compute cost volumes. Should you be making use of this work, please make sure to adhere to the licensing terms of the original authors. Should you be making use or modify this particular implementation, please acknowledge it appropriately. \ No newline at end of file diff --git a/src/animatediff/softmax_splatting/correlation/correlation.py b/src/animatediff/softmax_splatting/correlation/correlation.py new file mode 100644 index 0000000000000000000000000000000000000000..5b560d41d55655945d0cf7a81de39c8d2678f0e1 --- /dev/null +++ b/src/animatediff/softmax_splatting/correlation/correlation.py @@ -0,0 +1,395 @@ +#!/usr/bin/env python + +import cupy +import re +import torch + +kernel_Correlation_rearrange = ''' + extern "C" __global__ void kernel_Correlation_rearrange( + const int n, + const float* input, + float* output + ) { + int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; + + if (intIndex >= n) { + return; + } + + int intSample = blockIdx.z; + int intChannel = blockIdx.y; + + float fltValue = input[(((intSample * SIZE_1(input)) + intChannel) * SIZE_2(input) * SIZE_3(input)) + intIndex]; + + __syncthreads(); + + int intPaddedY = (intIndex / SIZE_3(input)) + 4; + int intPaddedX = (intIndex % SIZE_3(input)) + 4; + int intRearrange = ((SIZE_3(input) + 8) * intPaddedY) + intPaddedX; + + output[(((intSample * SIZE_1(output) * SIZE_2(output)) + intRearrange) * SIZE_1(input)) + intChannel] = fltValue; + } +''' + +kernel_Correlation_updateOutput = ''' + extern "C" __global__ void kernel_Correlation_updateOutput( + const int n, + const float* rbot0, + const float* rbot1, + float* top + ) { + extern __shared__ char patch_data_char[]; + + float *patch_data = (float *)patch_data_char; + + // First (upper left) position of kernel upper-left corner in current center position of neighborhood in image 1 + int x1 = blockIdx.x + 4; + int y1 = blockIdx.y + 4; + int item = blockIdx.z; + int ch_off = threadIdx.x; + + // Load 3D patch into shared shared memory + for (int j = 0; j < 1; j++) { // HEIGHT + for (int i = 0; i < 1; i++) { // WIDTH + int ji_off = (j + i) * SIZE_3(rbot0); + for (int ch = ch_off; ch < SIZE_3(rbot0); ch += 32) { // CHANNELS + int idx1 = ((item * SIZE_1(rbot0) + y1+j) * SIZE_2(rbot0) + x1+i) * SIZE_3(rbot0) + ch; + int idxPatchData = ji_off + ch; + patch_data[idxPatchData] = rbot0[idx1]; + } + } + } + + __syncthreads(); + + __shared__ float sum[32]; + + // Compute correlation + for (int top_channel = 0; top_channel < SIZE_1(top); top_channel++) { + sum[ch_off] = 0; + + int s2o = top_channel % 9 - 4; + int s2p = top_channel / 9 - 4; + + for (int j = 0; j < 1; j++) { // HEIGHT + for (int i = 0; i < 1; i++) { // WIDTH + int ji_off = (j + i) * SIZE_3(rbot0); + for (int ch = ch_off; ch < SIZE_3(rbot0); ch += 32) { // CHANNELS + int x2 = x1 + s2o; + int y2 = y1 + s2p; + + int idxPatchData = ji_off + ch; + int idx2 = ((item * SIZE_1(rbot0) + y2+j) * SIZE_2(rbot0) + x2+i) * SIZE_3(rbot0) + ch; + + sum[ch_off] += patch_data[idxPatchData] * rbot1[idx2]; + } + } + } + + __syncthreads(); + + if (ch_off == 0) { + float total_sum = 0; + for (int idx = 0; idx < 32; idx++) { + total_sum += sum[idx]; + } + const int sumelems = SIZE_3(rbot0); + const int index = ((top_channel*SIZE_2(top) + blockIdx.y)*SIZE_3(top))+blockIdx.x; + top[index + item*SIZE_1(top)*SIZE_2(top)*SIZE_3(top)] = total_sum / (float)sumelems; + } + } + } +''' + +kernel_Correlation_updateGradOne = ''' + #define ROUND_OFF 50000 + + extern "C" __global__ void kernel_Correlation_updateGradOne( + const int n, + const int intSample, + const float* rbot0, + const float* rbot1, + const float* gradOutput, + float* gradOne, + float* gradTwo + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + int n = intIndex % SIZE_1(gradOne); // channels + int l = (intIndex / SIZE_1(gradOne)) % SIZE_3(gradOne) + 4; // w-pos + int m = (intIndex / SIZE_1(gradOne) / SIZE_3(gradOne)) % SIZE_2(gradOne) + 4; // h-pos + + // round_off is a trick to enable integer division with ceil, even for negative numbers + // We use a large offset, for the inner part not to become negative. + const int round_off = ROUND_OFF; + const int round_off_s1 = round_off; + + // We add round_off before_s1 the int division and subtract round_off after it, to ensure the formula matches ceil behavior: + int xmin = (l - 4 + round_off_s1 - 1) + 1 - round_off; // ceil (l - 4) + int ymin = (m - 4 + round_off_s1 - 1) + 1 - round_off; // ceil (l - 4) + + // Same here: + int xmax = (l - 4 + round_off_s1) - round_off; // floor (l - 4) + int ymax = (m - 4 + round_off_s1) - round_off; // floor (m - 4) + + float sum = 0; + if (xmax>=0 && ymax>=0 && (xmin<=SIZE_3(gradOutput)-1) && (ymin<=SIZE_2(gradOutput)-1)) { + xmin = max(0,xmin); + xmax = min(SIZE_3(gradOutput)-1,xmax); + + ymin = max(0,ymin); + ymax = min(SIZE_2(gradOutput)-1,ymax); + + for (int p = -4; p <= 4; p++) { + for (int o = -4; o <= 4; o++) { + // Get rbot1 data: + int s2o = o; + int s2p = p; + int idxbot1 = ((intSample * SIZE_1(rbot0) + (m+s2p)) * SIZE_2(rbot0) + (l+s2o)) * SIZE_3(rbot0) + n; + float bot1tmp = rbot1[idxbot1]; // rbot1[l+s2o,m+s2p,n] + + // Index offset for gradOutput in following loops: + int op = (p+4) * 9 + (o+4); // index[o,p] + int idxopoffset = (intSample * SIZE_1(gradOutput) + op); + + for (int y = ymin; y <= ymax; y++) { + for (int x = xmin; x <= xmax; x++) { + int idxgradOutput = (idxopoffset * SIZE_2(gradOutput) + y) * SIZE_3(gradOutput) + x; // gradOutput[x,y,o,p] + sum += gradOutput[idxgradOutput] * bot1tmp; + } + } + } + } + } + const int sumelems = SIZE_1(gradOne); + const int bot0index = ((n * SIZE_2(gradOne)) + (m-4)) * SIZE_3(gradOne) + (l-4); + gradOne[bot0index + intSample*SIZE_1(gradOne)*SIZE_2(gradOne)*SIZE_3(gradOne)] = sum / (float)sumelems; + } } +''' + +kernel_Correlation_updateGradTwo = ''' + #define ROUND_OFF 50000 + + extern "C" __global__ void kernel_Correlation_updateGradTwo( + const int n, + const int intSample, + const float* rbot0, + const float* rbot1, + const float* gradOutput, + float* gradOne, + float* gradTwo + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + int n = intIndex % SIZE_1(gradTwo); // channels + int l = (intIndex / SIZE_1(gradTwo)) % SIZE_3(gradTwo) + 4; // w-pos + int m = (intIndex / SIZE_1(gradTwo) / SIZE_3(gradTwo)) % SIZE_2(gradTwo) + 4; // h-pos + + // round_off is a trick to enable integer division with ceil, even for negative numbers + // We use a large offset, for the inner part not to become negative. + const int round_off = ROUND_OFF; + const int round_off_s1 = round_off; + + float sum = 0; + for (int p = -4; p <= 4; p++) { + for (int o = -4; o <= 4; o++) { + int s2o = o; + int s2p = p; + + //Get X,Y ranges and clamp + // We add round_off before_s1 the int division and subtract round_off after it, to ensure the formula matches ceil behavior: + int xmin = (l - 4 - s2o + round_off_s1 - 1) + 1 - round_off; // ceil (l - 4 - s2o) + int ymin = (m - 4 - s2p + round_off_s1 - 1) + 1 - round_off; // ceil (l - 4 - s2o) + + // Same here: + int xmax = (l - 4 - s2o + round_off_s1) - round_off; // floor (l - 4 - s2o) + int ymax = (m - 4 - s2p + round_off_s1) - round_off; // floor (m - 4 - s2p) + + if (xmax>=0 && ymax>=0 && (xmin<=SIZE_3(gradOutput)-1) && (ymin<=SIZE_2(gradOutput)-1)) { + xmin = max(0,xmin); + xmax = min(SIZE_3(gradOutput)-1,xmax); + + ymin = max(0,ymin); + ymax = min(SIZE_2(gradOutput)-1,ymax); + + // Get rbot0 data: + int idxbot0 = ((intSample * SIZE_1(rbot0) + (m-s2p)) * SIZE_2(rbot0) + (l-s2o)) * SIZE_3(rbot0) + n; + float bot0tmp = rbot0[idxbot0]; // rbot1[l+s2o,m+s2p,n] + + // Index offset for gradOutput in following loops: + int op = (p+4) * 9 + (o+4); // index[o,p] + int idxopoffset = (intSample * SIZE_1(gradOutput) + op); + + for (int y = ymin; y <= ymax; y++) { + for (int x = xmin; x <= xmax; x++) { + int idxgradOutput = (idxopoffset * SIZE_2(gradOutput) + y) * SIZE_3(gradOutput) + x; // gradOutput[x,y,o,p] + sum += gradOutput[idxgradOutput] * bot0tmp; + } + } + } + } + } + const int sumelems = SIZE_1(gradTwo); + const int bot1index = ((n * SIZE_2(gradTwo)) + (m-4)) * SIZE_3(gradTwo) + (l-4); + gradTwo[bot1index + intSample*SIZE_1(gradTwo)*SIZE_2(gradTwo)*SIZE_3(gradTwo)] = sum / (float)sumelems; + } } +''' + +def cupy_kernel(strFunction, objVariables): + strKernel = globals()[strFunction] + + while True: + objMatch = re.search('(SIZE_)([0-4])(\()([^\)]*)(\))', strKernel) + + if objMatch is None: + break + # end + + intArg = int(objMatch.group(2)) + + strTensor = objMatch.group(4) + intSizes = objVariables[strTensor].size() + + strKernel = strKernel.replace(objMatch.group(), str(intSizes[intArg] if torch.is_tensor(intSizes[intArg]) == False else intSizes[intArg].item())) + + while True: + objMatch = re.search('(VALUE_)([0-4])(\()([^\)]+)(\))', strKernel) + + if objMatch is None: + break + # end + + intArgs = int(objMatch.group(2)) + strArgs = objMatch.group(4).split(',') + + strTensor = strArgs[0] + intStrides = objVariables[strTensor].stride() + strIndex = [ '((' + strArgs[intArg + 1].replace('{', '(').replace('}', ')').strip() + ')*' + str(intStrides[intArg] if torch.is_tensor(intStrides[intArg]) == False else intStrides[intArg].item()) + ')' for intArg in range(intArgs) ] + + strKernel = strKernel.replace(objMatch.group(0), strTensor + '[' + str.join('+', strIndex) + ']') + # end + + return strKernel +# end + +@cupy.memoize(for_each_device=True) +def cupy_launch(strFunction, strKernel): + return cupy.cuda.compile_with_cache(strKernel).get_function(strFunction) +# end + +class _FunctionCorrelation(torch.autograd.Function): + @staticmethod + def forward(self, one, two): + rbot0 = one.new_zeros([ one.shape[0], one.shape[2] + 8, one.shape[3] + 8, one.shape[1] ]) + rbot1 = one.new_zeros([ one.shape[0], one.shape[2] + 8, one.shape[3] + 8, one.shape[1] ]) + + one = one.contiguous(); assert(one.is_cuda == True) + two = two.contiguous(); assert(two.is_cuda == True) + + output = one.new_zeros([ one.shape[0], 81, one.shape[2], one.shape[3] ]) + + if one.is_cuda == True: + n = one.shape[2] * one.shape[3] + cupy_launch('kernel_Correlation_rearrange', cupy_kernel('kernel_Correlation_rearrange', { + 'input': one, + 'output': rbot0 + }))( + grid=tuple([ int((n + 16 - 1) / 16), one.shape[1], one.shape[0] ]), + block=tuple([ 16, 1, 1 ]), + args=[ cupy.int32(n), one.data_ptr(), rbot0.data_ptr() ] + ) + + n = two.shape[2] * two.shape[3] + cupy_launch('kernel_Correlation_rearrange', cupy_kernel('kernel_Correlation_rearrange', { + 'input': two, + 'output': rbot1 + }))( + grid=tuple([ int((n + 16 - 1) / 16), two.shape[1], two.shape[0] ]), + block=tuple([ 16, 1, 1 ]), + args=[ cupy.int32(n), two.data_ptr(), rbot1.data_ptr() ] + ) + + n = output.shape[1] * output.shape[2] * output.shape[3] + cupy_launch('kernel_Correlation_updateOutput', cupy_kernel('kernel_Correlation_updateOutput', { + 'rbot0': rbot0, + 'rbot1': rbot1, + 'top': output + }))( + grid=tuple([ output.shape[3], output.shape[2], output.shape[0] ]), + block=tuple([ 32, 1, 1 ]), + shared_mem=one.shape[1] * 4, + args=[ cupy.int32(n), rbot0.data_ptr(), rbot1.data_ptr(), output.data_ptr() ] + ) + + elif one.is_cuda == False: + raise NotImplementedError() + + # end + + self.save_for_backward(one, two, rbot0, rbot1) + + return output + # end + + @staticmethod + def backward(self, gradOutput): + one, two, rbot0, rbot1 = self.saved_tensors + + gradOutput = gradOutput.contiguous(); assert(gradOutput.is_cuda == True) + + gradOne = one.new_zeros([ one.shape[0], one.shape[1], one.shape[2], one.shape[3] ]) if self.needs_input_grad[0] == True else None + gradTwo = one.new_zeros([ one.shape[0], one.shape[1], one.shape[2], one.shape[3] ]) if self.needs_input_grad[1] == True else None + + if one.is_cuda == True: + if gradOne is not None: + for intSample in range(one.shape[0]): + n = one.shape[1] * one.shape[2] * one.shape[3] + cupy_launch('kernel_Correlation_updateGradOne', cupy_kernel('kernel_Correlation_updateGradOne', { + 'rbot0': rbot0, + 'rbot1': rbot1, + 'gradOutput': gradOutput, + 'gradOne': gradOne, + 'gradTwo': None + }))( + grid=tuple([ int((n + 512 - 1) / 512), 1, 1 ]), + block=tuple([ 512, 1, 1 ]), + args=[ cupy.int32(n), intSample, rbot0.data_ptr(), rbot1.data_ptr(), gradOutput.data_ptr(), gradOne.data_ptr(), None ] + ) + # end + # end + + if gradTwo is not None: + for intSample in range(one.shape[0]): + n = one.shape[1] * one.shape[2] * one.shape[3] + cupy_launch('kernel_Correlation_updateGradTwo', cupy_kernel('kernel_Correlation_updateGradTwo', { + 'rbot0': rbot0, + 'rbot1': rbot1, + 'gradOutput': gradOutput, + 'gradOne': None, + 'gradTwo': gradTwo + }))( + grid=tuple([ int((n + 512 - 1) / 512), 1, 1 ]), + block=tuple([ 512, 1, 1 ]), + args=[ cupy.int32(n), intSample, rbot0.data_ptr(), rbot1.data_ptr(), gradOutput.data_ptr(), None, gradTwo.data_ptr() ] + ) + # end + # end + + elif one.is_cuda == False: + raise NotImplementedError() + + # end + + return gradOne, gradTwo + # end +# end + +def FunctionCorrelation(tenOne, tenTwo): + return _FunctionCorrelation.apply(tenOne, tenTwo) +# end + +class ModuleCorrelation(torch.nn.Module): + def __init__(self): + super().__init__() + # end + + def forward(self, tenOne, tenTwo): + return _FunctionCorrelation.apply(tenOne, tenTwo) + # end +# end diff --git a/src/animatediff/softmax_splatting/run.py b/src/animatediff/softmax_splatting/run.py new file mode 100644 index 0000000000000000000000000000000000000000..22313f889dabafb31c675f199029713332751952 --- /dev/null +++ b/src/animatediff/softmax_splatting/run.py @@ -0,0 +1,871 @@ +#!/usr/bin/env python + +import getopt +import math +import sys +import typing + +import numpy +import PIL +import PIL.Image +import torch + +from . import softsplat # the custom softmax splatting layer + +try: + from .correlation import correlation # the custom cost volume layer +except: + sys.path.insert(0, './correlation'); import correlation # you should consider upgrading python +# end + +########################################################## + +torch.set_grad_enabled(False) # make sure to not compute gradients for computational performance + +torch.backends.cudnn.enabled = True # make sure to use cudnn for computational performance + +########################################################## + +arguments_strModel = 'lf' +arguments_strOne = './images/one.png' +arguments_strTwo = './images/two.png' +arguments_strVideo = './videos/car-turn.mp4' +arguments_strOut = './out.png' +arguments_strVideo2 = '' + +for strOption, strArgument in getopt.getopt(sys.argv[1:], '', [strParameter[2:] + '=' for strParameter in sys.argv[1::2]])[0]: + if strOption == '--model' and strArgument != '': arguments_strModel = strArgument # which model to use + if strOption == '--one' and strArgument != '': arguments_strOne = strArgument # path to the first frame + if strOption == '--two' and strArgument != '': arguments_strTwo = strArgument # path to the second frame + if strOption == '--video' and strArgument != '': arguments_strVideo = strArgument # path to a video + if strOption == '--video2' and strArgument != '': arguments_strVideo2 = strArgument # path to a video + if strOption == '--out' and strArgument != '': arguments_strOut = strArgument # path to where the output should be stored +# end + +########################################################## + +def read_flo(strFile): + with open(strFile, 'rb') as objFile: + strFlow = objFile.read() + # end + + assert(numpy.frombuffer(buffer=strFlow, dtype=numpy.float32, count=1, offset=0) == 202021.25) + + intWidth = numpy.frombuffer(buffer=strFlow, dtype=numpy.int32, count=1, offset=4)[0] + intHeight = numpy.frombuffer(buffer=strFlow, dtype=numpy.int32, count=1, offset=8)[0] + + return numpy.frombuffer(buffer=strFlow, dtype=numpy.float32, count=intHeight * intWidth * 2, offset=12).reshape(intHeight, intWidth, 2) +# end + +########################################################## + +backwarp_tenGrid = {} + +def backwarp(tenIn, tenFlow): + if str(tenFlow.shape) not in backwarp_tenGrid: + tenHor = torch.linspace(start=-1.0, end=1.0, steps=tenFlow.shape[3], dtype=tenFlow.dtype, device=tenFlow.device).view(1, 1, 1, -1).repeat(1, 1, tenFlow.shape[2], 1) + tenVer = torch.linspace(start=-1.0, end=1.0, steps=tenFlow.shape[2], dtype=tenFlow.dtype, device=tenFlow.device).view(1, 1, -1, 1).repeat(1, 1, 1, tenFlow.shape[3]) + + backwarp_tenGrid[str(tenFlow.shape)] = torch.cat([tenHor, tenVer], 1).cuda() + # end + + tenFlow = torch.cat([tenFlow[:, 0:1, :, :] / ((tenIn.shape[3] - 1.0) / 2.0), tenFlow[:, 1:2, :, :] / ((tenIn.shape[2] - 1.0) / 2.0)], 1) + + return torch.nn.functional.grid_sample(input=tenIn, grid=(backwarp_tenGrid[str(tenFlow.shape)] + tenFlow).permute(0, 2, 3, 1), mode='bilinear', padding_mode='zeros', align_corners=True) +# end + +########################################################## + +class Flow(torch.nn.Module): + def __init__(self): + super().__init__() + + class Extractor(torch.nn.Module): + def __init__(self): + super().__init__() + + self.netFirst = torch.nn.Sequential( + torch.nn.Conv2d(in_channels=3, out_channels=16, kernel_size=3, stride=2, padding=1), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d(in_channels=16, out_channels=16, kernel_size=3, stride=1, padding=1), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1) + ) + + self.netSecond = torch.nn.Sequential( + torch.nn.Conv2d(in_channels=16, out_channels=32, kernel_size=3, stride=2, padding=1), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d(in_channels=32, out_channels=32, kernel_size=3, stride=1, padding=1), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1) + ) + + self.netThird = torch.nn.Sequential( + torch.nn.Conv2d(in_channels=32, out_channels=64, kernel_size=3, stride=2, padding=1), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1) + ) + + self.netFourth = torch.nn.Sequential( + torch.nn.Conv2d(in_channels=64, out_channels=96, kernel_size=3, stride=2, padding=1), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d(in_channels=96, out_channels=96, kernel_size=3, stride=1, padding=1), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1) + ) + + self.netFifth = torch.nn.Sequential( + torch.nn.Conv2d(in_channels=96, out_channels=128, kernel_size=3, stride=2, padding=1), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d(in_channels=128, out_channels=128, kernel_size=3, stride=1, padding=1), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1) + ) + + self.netSixth = torch.nn.Sequential( + torch.nn.Conv2d(in_channels=128, out_channels=192, kernel_size=3, stride=2, padding=1), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d(in_channels=192, out_channels=192, kernel_size=3, stride=1, padding=1), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1) + ) + # end + + def forward(self, tenInput): + tenFirst = self.netFirst(tenInput) + tenSecond = self.netSecond(tenFirst) + tenThird = self.netThird(tenSecond) + tenFourth = self.netFourth(tenThird) + tenFifth = self.netFifth(tenFourth) + tenSixth = self.netSixth(tenFifth) + + return [tenFirst, tenSecond, tenThird, tenFourth, tenFifth, tenSixth] + # end + # end + + class Decoder(torch.nn.Module): + def __init__(self, intChannels): + super().__init__() + + self.netMain = torch.nn.Sequential( + torch.nn.Conv2d(in_channels=intChannels, out_channels=128, kernel_size=3, stride=1, padding=1), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d(in_channels=128, out_channels=128, kernel_size=3, stride=1, padding=1), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d(in_channels=128, out_channels=96, kernel_size=3, stride=1, padding=1), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d(in_channels=96, out_channels=64, kernel_size=3, stride=1, padding=1), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d(in_channels=64, out_channels=32, kernel_size=3, stride=1, padding=1), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d(in_channels=32, out_channels=2, kernel_size=3, stride=1, padding=1) + ) + # end + + def forward(self, tenOne, tenTwo, objPrevious): + intWidth = tenOne.shape[3] and tenTwo.shape[3] + intHeight = tenOne.shape[2] and tenTwo.shape[2] + + tenMain = None + + if objPrevious is None: + tenVolume = correlation.FunctionCorrelation(tenOne=tenOne, tenTwo=tenTwo) + + tenMain = torch.cat([tenOne, tenVolume], 1) + + elif objPrevious is not None: + tenForward = torch.nn.functional.interpolate(input=objPrevious['tenForward'], size=(intHeight, intWidth), mode='bilinear', align_corners=False) / float(objPrevious['tenForward'].shape[3]) * float(intWidth) + + tenVolume = correlation.FunctionCorrelation(tenOne=tenOne, tenTwo=backwarp(tenTwo, tenForward)) + + tenMain = torch.cat([tenOne, tenVolume, tenForward], 1) + + # end + + return { + 'tenForward': self.netMain(tenMain) + } + # end + # end + + self.netExtractor = Extractor() + + self.netFirst = Decoder(16 + 81 + 2) + self.netSecond = Decoder(32 + 81 + 2) + self.netThird = Decoder(64 + 81 + 2) + self.netFourth = Decoder(96 + 81 + 2) + self.netFifth = Decoder(128 + 81 + 2) + self.netSixth = Decoder(192 + 81) + # end + + def forward(self, tenOne, tenTwo): + intWidth = tenOne.shape[3] and tenTwo.shape[3] + intHeight = tenOne.shape[2] and tenTwo.shape[2] + + tenOne = self.netExtractor(tenOne) + tenTwo = self.netExtractor(tenTwo) + + objForward = None + objBackward = None + + objForward = self.netSixth(tenOne[-1], tenTwo[-1], objForward) + objBackward = self.netSixth(tenTwo[-1], tenOne[-1], objBackward) + + objForward = self.netFifth(tenOne[-2], tenTwo[-2], objForward) + objBackward = self.netFifth(tenTwo[-2], tenOne[-2], objBackward) + + objForward = self.netFourth(tenOne[-3], tenTwo[-3], objForward) + objBackward = self.netFourth(tenTwo[-3], tenOne[-3], objBackward) + + objForward = self.netThird(tenOne[-4], tenTwo[-4], objForward) + objBackward = self.netThird(tenTwo[-4], tenOne[-4], objBackward) + + objForward = self.netSecond(tenOne[-5], tenTwo[-5], objForward) + objBackward = self.netSecond(tenTwo[-5], tenOne[-5], objBackward) + + objForward = self.netFirst(tenOne[-6], tenTwo[-6], objForward) + objBackward = self.netFirst(tenTwo[-6], tenOne[-6], objBackward) + + return { + 'tenForward': torch.nn.functional.interpolate(input=objForward['tenForward'], size=(intHeight, intWidth), mode='bilinear', align_corners=False) * (float(intWidth) / float(objForward['tenForward'].shape[3])), + 'tenBackward': torch.nn.functional.interpolate(input=objBackward['tenForward'], size=(intHeight, intWidth), mode='bilinear', align_corners=False) * (float(intWidth) / float(objBackward['tenForward'].shape[3])) + } + # end +# end + +########################################################## + +class Synthesis(torch.nn.Module): + def __init__(self): + super().__init__() + + class Basic(torch.nn.Module): + def __init__(self, strType, intChannels, boolSkip): + super().__init__() + + if strType == 'relu-conv-relu-conv': + self.netMain = torch.nn.Sequential( + torch.nn.PReLU(num_parameters=intChannels[0], init=0.25), + torch.nn.Conv2d(in_channels=intChannels[0], out_channels=intChannels[1], kernel_size=3, stride=1, padding=1, bias=False), + torch.nn.PReLU(num_parameters=intChannels[1], init=0.25), + torch.nn.Conv2d(in_channels=intChannels[1], out_channels=intChannels[2], kernel_size=3, stride=1, padding=1, bias=False) + ) + + elif strType == 'conv-relu-conv': + self.netMain = torch.nn.Sequential( + torch.nn.Conv2d(in_channels=intChannels[0], out_channels=intChannels[1], kernel_size=3, stride=1, padding=1, bias=False), + torch.nn.PReLU(num_parameters=intChannels[1], init=0.25), + torch.nn.Conv2d(in_channels=intChannels[1], out_channels=intChannels[2], kernel_size=3, stride=1, padding=1, bias=False) + ) + + # end + + self.boolSkip = boolSkip + + if boolSkip == True: + if intChannels[0] == intChannels[2]: + self.netShortcut = None + + elif intChannels[0] != intChannels[2]: + self.netShortcut = torch.nn.Conv2d(in_channels=intChannels[0], out_channels=intChannels[2], kernel_size=1, stride=1, padding=0, bias=False) + + # end + # end + # end + + def forward(self, tenInput): + if self.boolSkip == False: + return self.netMain(tenInput) + # end + + if self.netShortcut is None: + return self.netMain(tenInput) + tenInput + + elif self.netShortcut is not None: + return self.netMain(tenInput) + self.netShortcut(tenInput) + + # end + # end + # end + + class Downsample(torch.nn.Module): + def __init__(self, intChannels): + super().__init__() + + self.netMain = torch.nn.Sequential( + torch.nn.PReLU(num_parameters=intChannels[0], init=0.25), + torch.nn.Conv2d(in_channels=intChannels[0], out_channels=intChannels[1], kernel_size=3, stride=2, padding=1, bias=False), + torch.nn.PReLU(num_parameters=intChannels[1], init=0.25), + torch.nn.Conv2d(in_channels=intChannels[1], out_channels=intChannels[2], kernel_size=3, stride=1, padding=1, bias=False) + ) + # end + + def forward(self, tenInput): + return self.netMain(tenInput) + # end + # end + + class Upsample(torch.nn.Module): + def __init__(self, intChannels): + super().__init__() + + self.netMain = torch.nn.Sequential( + torch.nn.Upsample(scale_factor=2, mode='bilinear', align_corners=False), + torch.nn.PReLU(num_parameters=intChannels[0], init=0.25), + torch.nn.Conv2d(in_channels=intChannels[0], out_channels=intChannels[1], kernel_size=3, stride=1, padding=1, bias=False), + torch.nn.PReLU(num_parameters=intChannels[1], init=0.25), + torch.nn.Conv2d(in_channels=intChannels[1], out_channels=intChannels[2], kernel_size=3, stride=1, padding=1, bias=False) + ) + # end + + def forward(self, tenInput): + return self.netMain(tenInput) + # end + # end + + class Encode(torch.nn.Module): + def __init__(self): + super().__init__() + + self.netOne = torch.nn.Sequential( + torch.nn.Conv2d(in_channels=3, out_channels=32, kernel_size=3, stride=1, padding=1, bias=False), + torch.nn.PReLU(num_parameters=32, init=0.25), + torch.nn.Conv2d(in_channels=32, out_channels=32, kernel_size=3, stride=1, padding=1, bias=False), + torch.nn.PReLU(num_parameters=32, init=0.25) + ) + + self.netTwo = torch.nn.Sequential( + torch.nn.Conv2d(in_channels=32, out_channels=64, kernel_size=3, stride=2, padding=1, bias=False), + torch.nn.PReLU(num_parameters=64, init=0.25), + torch.nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1, bias=False), + torch.nn.PReLU(num_parameters=64, init=0.25) + ) + + self.netThr = torch.nn.Sequential( + torch.nn.Conv2d(in_channels=64, out_channels=96, kernel_size=3, stride=2, padding=1, bias=False), + torch.nn.PReLU(num_parameters=96, init=0.25), + torch.nn.Conv2d(in_channels=96, out_channels=96, kernel_size=3, stride=1, padding=1, bias=False), + torch.nn.PReLU(num_parameters=96, init=0.25) + ) + # end + + def forward(self, tenInput): + tenOutput = [] + + tenOutput.append(self.netOne(tenInput)) + tenOutput.append(self.netTwo(tenOutput[-1])) + tenOutput.append(self.netThr(tenOutput[-1])) + + return [torch.cat([tenInput, tenOutput[0]], 1)] + tenOutput[1:] + # end + # end + + class Softmetric(torch.nn.Module): + def __init__(self): + super().__init__() + + self.netInput = torch.nn.Conv2d(in_channels=3, out_channels=12, kernel_size=3, stride=1, padding=1, bias=False) + self.netError = torch.nn.Conv2d(in_channels=1, out_channels=4, kernel_size=3, stride=1, padding=1, bias=False) + + for intRow, intFeatures in [(0, 16), (1, 32), (2, 64), (3, 96)]: + self.add_module(str(intRow) + 'x0' + ' - ' + str(intRow) + 'x1', Basic('relu-conv-relu-conv', [intFeatures, intFeatures, intFeatures], True)) + # end + + for intCol in [0]: + self.add_module('0x' + str(intCol) + ' - ' + '1x' + str(intCol), Downsample([16, 32, 32])) + self.add_module('1x' + str(intCol) + ' - ' + '2x' + str(intCol), Downsample([32, 64, 64])) + self.add_module('2x' + str(intCol) + ' - ' + '3x' + str(intCol), Downsample([64, 96, 96])) + # end + + for intCol in [1]: + self.add_module('3x' + str(intCol) + ' - ' + '2x' + str(intCol), Upsample([96, 64, 64])) + self.add_module('2x' + str(intCol) + ' - ' + '1x' + str(intCol), Upsample([64, 32, 32])) + self.add_module('1x' + str(intCol) + ' - ' + '0x' + str(intCol), Upsample([32, 16, 16])) + # end + + self.netOutput = Basic('conv-relu-conv', [16, 16, 1], True) + # end + + def forward(self, tenEncone, tenEnctwo, tenFlow): + tenColumn = [None, None, None, None] + + tenColumn[0] = torch.cat([self.netInput(tenEncone[0][:, 0:3, :, :]), self.netError(torch.nn.functional.l1_loss(input=tenEncone[0], target=backwarp(tenEnctwo[0], tenFlow), reduction='none').mean([1], True))], 1) + tenColumn[1] = self._modules['0x0 - 1x0'](tenColumn[0]) + tenColumn[2] = self._modules['1x0 - 2x0'](tenColumn[1]) + tenColumn[3] = self._modules['2x0 - 3x0'](tenColumn[2]) + + intColumn = 1 + for intRow in range(len(tenColumn) -1, -1, -1): + tenColumn[intRow] = self._modules[str(intRow) + 'x' + str(intColumn - 1) + ' - ' + str(intRow) + 'x' + str(intColumn)](tenColumn[intRow]) + if intRow != len(tenColumn) - 1: + tenUp = self._modules[str(intRow + 1) + 'x' + str(intColumn) + ' - ' + str(intRow) + 'x' + str(intColumn)](tenColumn[intRow + 1]) + + if tenUp.shape[2] != tenColumn[intRow].shape[2]: tenUp = torch.nn.functional.pad(input=tenUp, pad=[0, 0, 0, -1], mode='constant', value=0.0) + if tenUp.shape[3] != tenColumn[intRow].shape[3]: tenUp = torch.nn.functional.pad(input=tenUp, pad=[0, -1, 0, 0], mode='constant', value=0.0) + + tenColumn[intRow] = tenColumn[intRow] + tenUp + # end + # end + + return self.netOutput(tenColumn[0]) + # end + # end + + class Warp(torch.nn.Module): + def __init__(self): + super().__init__() + + self.netOne = Basic('conv-relu-conv', [3 + 3 + 32 + 32 + 1 + 1, 32, 32], True) + self.netTwo = Basic('conv-relu-conv', [0 + 0 + 64 + 64 + 1 + 1, 64, 64], True) + self.netThr = Basic('conv-relu-conv', [0 + 0 + 96 + 96 + 1 + 1, 96, 96], True) + # end + + def forward(self, tenEncone, tenEnctwo, tenMetricone, tenMetrictwo, tenForward, tenBackward): + tenOutput = [] + + for intLevel in range(3): + if intLevel != 0: + tenMetricone = torch.nn.functional.interpolate(input=tenMetricone, size=(tenEncone[intLevel].shape[2], tenEncone[intLevel].shape[3]), mode='bilinear', align_corners=False) + tenMetrictwo = torch.nn.functional.interpolate(input=tenMetrictwo, size=(tenEnctwo[intLevel].shape[2], tenEnctwo[intLevel].shape[3]), mode='bilinear', align_corners=False) + + tenForward = torch.nn.functional.interpolate(input=tenForward, size=(tenEncone[intLevel].shape[2], tenEncone[intLevel].shape[3]), mode='bilinear', align_corners=False) * (float(tenEncone[intLevel].shape[3]) / float(tenForward.shape[3])) + tenBackward = torch.nn.functional.interpolate(input=tenBackward, size=(tenEnctwo[intLevel].shape[2], tenEnctwo[intLevel].shape[3]), mode='bilinear', align_corners=False) * (float(tenEnctwo[intLevel].shape[3]) / float(tenBackward.shape[3])) + # end + + tenOutput.append([self.netOne, self.netTwo, self.netThr][intLevel](torch.cat([ + softsplat.softsplat(tenIn=torch.cat([tenEncone[intLevel], tenMetricone], 1), tenFlow=tenForward, tenMetric=tenMetricone.neg().clip(-20.0, 20.0), strMode='soft'), + softsplat.softsplat(tenIn=torch.cat([tenEnctwo[intLevel], tenMetrictwo], 1), tenFlow=tenBackward, tenMetric=tenMetrictwo.neg().clip(-20.0, 20.0), strMode='soft') + ], 1))) + # end + + return tenOutput + # end + # end + + self.netEncode = Encode() + + self.netSoftmetric = Softmetric() + + self.netWarp = Warp() + + for intRow, intFeatures in [(0, 32), (1, 64), (2, 96)]: + self.add_module(str(intRow) + 'x0' + ' - ' + str(intRow) + 'x1', Basic('relu-conv-relu-conv', [intFeatures, intFeatures, intFeatures], True)) + self.add_module(str(intRow) + 'x1' + ' - ' + str(intRow) + 'x2', Basic('relu-conv-relu-conv', [intFeatures, intFeatures, intFeatures], True)) + self.add_module(str(intRow) + 'x2' + ' - ' + str(intRow) + 'x3', Basic('relu-conv-relu-conv', [intFeatures, intFeatures, intFeatures], True)) + self.add_module(str(intRow) + 'x3' + ' - ' + str(intRow) + 'x4', Basic('relu-conv-relu-conv', [intFeatures, intFeatures, intFeatures], True)) + self.add_module(str(intRow) + 'x4' + ' - ' + str(intRow) + 'x5', Basic('relu-conv-relu-conv', [intFeatures, intFeatures, intFeatures], True)) + # end + + for intCol in [0, 1, 2]: + self.add_module('0x' + str(intCol) + ' - ' + '1x' + str(intCol), Downsample([32, 64, 64])) + self.add_module('1x' + str(intCol) + ' - ' + '2x' + str(intCol), Downsample([64, 96, 96])) + # end + + for intCol in [3, 4, 5]: + self.add_module('2x' + str(intCol) + ' - ' + '1x' + str(intCol), Upsample([96, 64, 64])) + self.add_module('1x' + str(intCol) + ' - ' + '0x' + str(intCol), Upsample([64, 32, 32])) + # end + + self.netOutput = Basic('conv-relu-conv', [32, 32, 3], True) + # end + + def forward(self, tenOne, tenTwo, tenForward, tenBackward, fltTime): + tenEncone = self.netEncode(tenOne) + tenEnctwo = self.netEncode(tenTwo) + + tenMetricone = self.netSoftmetric(tenEncone, tenEnctwo, tenForward) * 2.0 * fltTime + tenMetrictwo = self.netSoftmetric(tenEnctwo, tenEncone, tenBackward) * 2.0 * (1.0 - fltTime) + + tenForward = tenForward * fltTime + tenBackward = tenBackward * (1.0 - fltTime) + + tenWarp = self.netWarp(tenEncone, tenEnctwo, tenMetricone, tenMetrictwo, tenForward, tenBackward) + + tenColumn = [None, None, None] + + tenColumn[0] = tenWarp[0] + tenColumn[1] = tenWarp[1] + self._modules['0x0 - 1x0'](tenColumn[0]) + tenColumn[2] = tenWarp[2] + self._modules['1x0 - 2x0'](tenColumn[1]) + + intColumn = 1 + for intRow in range(len(tenColumn)): + tenColumn[intRow] = self._modules[str(intRow) + 'x' + str(intColumn - 1) + ' - ' + str(intRow) + 'x' + str(intColumn)](tenColumn[intRow]) + if intRow != 0: + tenColumn[intRow] = tenColumn[intRow] + self._modules[str(intRow - 1) + 'x' + str(intColumn) + ' - ' + str(intRow) + 'x' + str(intColumn)](tenColumn[intRow - 1]) + # end + # end + + intColumn = 2 + for intRow in range(len(tenColumn)): + tenColumn[intRow] = self._modules[str(intRow) + 'x' + str(intColumn - 1) + ' - ' + str(intRow) + 'x' + str(intColumn)](tenColumn[intRow]) + if intRow != 0: + tenColumn[intRow] = tenColumn[intRow] + self._modules[str(intRow - 1) + 'x' + str(intColumn) + ' - ' + str(intRow) + 'x' + str(intColumn)](tenColumn[intRow - 1]) + # end + # end + + intColumn = 3 + for intRow in range(len(tenColumn) -1, -1, -1): + tenColumn[intRow] = self._modules[str(intRow) + 'x' + str(intColumn - 1) + ' - ' + str(intRow) + 'x' + str(intColumn)](tenColumn[intRow]) + if intRow != len(tenColumn) - 1: + tenUp = self._modules[str(intRow + 1) + 'x' + str(intColumn) + ' - ' + str(intRow) + 'x' + str(intColumn)](tenColumn[intRow + 1]) + + if tenUp.shape[2] != tenColumn[intRow].shape[2]: tenUp = torch.nn.functional.pad(input=tenUp, pad=[0, 0, 0, -1], mode='constant', value=0.0) + if tenUp.shape[3] != tenColumn[intRow].shape[3]: tenUp = torch.nn.functional.pad(input=tenUp, pad=[0, -1, 0, 0], mode='constant', value=0.0) + + tenColumn[intRow] = tenColumn[intRow] + tenUp + # end + # end + + intColumn = 4 + for intRow in range(len(tenColumn) -1, -1, -1): + tenColumn[intRow] = self._modules[str(intRow) + 'x' + str(intColumn - 1) + ' - ' + str(intRow) + 'x' + str(intColumn)](tenColumn[intRow]) + if intRow != len(tenColumn) - 1: + tenUp = self._modules[str(intRow + 1) + 'x' + str(intColumn) + ' - ' + str(intRow) + 'x' + str(intColumn)](tenColumn[intRow + 1]) + + if tenUp.shape[2] != tenColumn[intRow].shape[2]: tenUp = torch.nn.functional.pad(input=tenUp, pad=[0, 0, 0, -1], mode='constant', value=0.0) + if tenUp.shape[3] != tenColumn[intRow].shape[3]: tenUp = torch.nn.functional.pad(input=tenUp, pad=[0, -1, 0, 0], mode='constant', value=0.0) + + tenColumn[intRow] = tenColumn[intRow] + tenUp + # end + # end + + intColumn = 5 + for intRow in range(len(tenColumn) -1, -1, -1): + tenColumn[intRow] = self._modules[str(intRow) + 'x' + str(intColumn - 1) + ' - ' + str(intRow) + 'x' + str(intColumn)](tenColumn[intRow]) + if intRow != len(tenColumn) - 1: + tenUp = self._modules[str(intRow + 1) + 'x' + str(intColumn) + ' - ' + str(intRow) + 'x' + str(intColumn)](tenColumn[intRow + 1]) + + if tenUp.shape[2] != tenColumn[intRow].shape[2]: tenUp = torch.nn.functional.pad(input=tenUp, pad=[0, 0, 0, -1], mode='constant', value=0.0) + if tenUp.shape[3] != tenColumn[intRow].shape[3]: tenUp = torch.nn.functional.pad(input=tenUp, pad=[0, -1, 0, 0], mode='constant', value=0.0) + + tenColumn[intRow] = tenColumn[intRow] + tenUp + # end + # end + + return self.netOutput(tenColumn[0]) + # end +# end + +########################################################## + +class Network(torch.nn.Module): + def __init__(self): + super().__init__() + + self.netFlow = Flow() + + self.netSynthesis = Synthesis() + + self.load_state_dict({strKey.replace('module', 'net'): tenWeight for strKey, tenWeight in torch.hub.load_state_dict_from_url(url='http://content.sniklaus.com/softsplat/network-' + arguments_strModel + '.pytorch', file_name='softsplat-' + arguments_strModel).items()}) + # end + + def forward(self, tenOne, tenTwo, fltTimes): + with torch.set_grad_enabled(False): + tenStats = [tenOne, tenTwo] + tenMean = sum([tenIn.mean([1, 2, 3], True) for tenIn in tenStats]) / len(tenStats) + tenStd = (sum([tenIn.std([1, 2, 3], False, True).square() + (tenMean - tenIn.mean([1, 2, 3], True)).square() for tenIn in tenStats]) / len(tenStats)).sqrt() + tenOne = ((tenOne - tenMean) / (tenStd + 0.0000001)).detach() + tenTwo = ((tenTwo - tenMean) / (tenStd + 0.0000001)).detach() + # end + + objFlow = self.netFlow(tenOne, tenTwo) + + tenImages = [self.netSynthesis(tenOne, tenTwo, objFlow['tenForward'], objFlow['tenBackward'], fltTime) for fltTime in fltTimes] + + return [(tenImage * tenStd) + tenMean for tenImage in tenImages] + # end +# end + +netNetwork = None + +########################################################## + +def estimate(tenOne, tenTwo, fltTimes): + global netNetwork + + if netNetwork is None: + netNetwork = Network().cuda().eval() + # end + + assert(tenOne.shape[1] == tenTwo.shape[1]) + assert(tenOne.shape[2] == tenTwo.shape[2]) + + intWidth = tenOne.shape[2] + intHeight = tenOne.shape[1] + + tenPreprocessedOne = tenOne.cuda().view(1, 3, intHeight, intWidth) + tenPreprocessedTwo = tenTwo.cuda().view(1, 3, intHeight, intWidth) + + intPadr = (2 - (intWidth % 2)) % 2 + intPadb = (2 - (intHeight % 2)) % 2 + + tenPreprocessedOne = torch.nn.functional.pad(input=tenPreprocessedOne, pad=[0, intPadr, 0, intPadb], mode='replicate') + tenPreprocessedTwo = torch.nn.functional.pad(input=tenPreprocessedTwo, pad=[0, intPadr, 0, intPadb], mode='replicate') + + return [tenImage[0, :, :intHeight, :intWidth].cpu() for tenImage in netNetwork(tenPreprocessedOne, tenPreprocessedTwo, fltTimes)] +# end +########################################################## +import logging + +logger = logging.getLogger(__name__) + +raft = None + +class Raft: + def __init__(self): + from torchvision.models.optical_flow import (Raft_Large_Weights, + raft_large) + + weights = Raft_Large_Weights.DEFAULT + self.device = "cuda" if torch.cuda.is_available() else "cpu" + model = raft_large(weights=weights, progress=False).to(self.device) + self.model = model.eval() + + def __call__(self,img1,img2): + with torch.no_grad(): + img1 = img1.to(self.device) + img2 = img2.to(self.device) + i1 = torch.vstack([img1,img2]) + i2 = torch.vstack([img2,img1]) + list_of_flows = self.model(i1, i2) + + predicted_flows = list_of_flows[-1] + return { 'tenForward' : predicted_flows[0].unsqueeze(dim=0) , 'tenBackward' : predicted_flows[1].unsqueeze(dim=0) } + +img_count = 0 +def debug_save_img(img, comment, inc=False): + return + global img_count + from torchvision.utils import save_image + + save_image(img, f"debug0/{img_count:04d}_{comment}.png") + + if inc: + img_count += 1 + + +class Network2(torch.nn.Module): + def __init__(self, model_file_path): + super().__init__() + + self.netFlow = Flow() + + self.netSynthesis = Synthesis() + + d = torch.load(model_file_path) + + d = {strKey.replace('module', 'net'): tenWeight for strKey, tenWeight in d.items()} + + self.load_state_dict(d) + # end + + def forward(self, tenOne, tenTwo, guideFrameList): + global raft + + do_composite = True + use_raft = True + + if use_raft: + if raft is None: + raft = Raft() + + + with torch.set_grad_enabled(False): + tenStats = [tenOne, tenTwo] + tenMean = sum([tenIn.mean([1, 2, 3], True) for tenIn in tenStats]) / len(tenStats) + tenStd = (sum([tenIn.std([1, 2, 3], False, True).square() + (tenMean - tenIn.mean([1, 2, 3], True)).square() for tenIn in tenStats]) / len(tenStats)).sqrt() + tenOne = ((tenOne - tenMean) / (tenStd + 0.0000001)).detach() + tenTwo = ((tenTwo - tenMean) / (tenStd + 0.0000001)).detach() + + gtenStats = guideFrameList + gtenMean = sum([tenIn.mean([1, 2, 3], True) for tenIn in gtenStats]) / len(gtenStats) + gtenStd = (sum([tenIn.std([1, 2, 3], False, True).square() + (gtenMean - tenIn.mean([1, 2, 3], True)).square() for tenIn in gtenStats]) / len(gtenStats)).sqrt() + guideFrameList = [((g - gtenMean) / (gtenStd + 0.0000001)).detach() for g in guideFrameList] + + # end + + tenImages =[] + l = len(guideFrameList) + i = 1 + g1 = guideFrameList.pop(0) + + if use_raft: + styleFlow = raft(tenOne, tenTwo) + else: + styleFlow = self.netFlow(tenOne, tenTwo) + + def composite1(fA, fB, nA, nB): + # 1,2,768,512 + A = fA[:,0,:,:] + B = fA[:,1,:,:] + Z = nA + + UA = A / Z + UB = B / Z + + A2 = fB[:,0,:,:] + B2 = fB[:,1,:,:] + Z2 = nB + fB[:,0,:,:] = Z2 * UA + fB[:,1,:,:] = Z2 * UB + return fB + + def mask_dilate(ten, kernel_size=3): + ten = ten.to(torch.float32) + k=torch.ones(1, 1, kernel_size, kernel_size, dtype=torch.float32).cuda() + ten = torch.nn.functional.conv2d(ten, k, padding=(kernel_size//2, kernel_size// 2)) + result = torch.clamp(ten, 0, 1) + return result.to(torch.bool) + + def composite2(fA, fB, nA, nB): + Z = nA + Z2 = nB + + mean2 = torch.mean(Z2) + max2 = torch.max(Z2) + mask2 = (Z2 > (mean2+max2)/2) + debug_save_img(mask2.to(torch.float), "mask2_0") + mask2 = mask_dilate(mask2, 9) + debug_save_img(mask2.to(torch.float), "mask2_1") + mask2 = ~mask2 + + debug_save_img(mask2.to(torch.float), "mask2") + + mean1 = torch.mean(Z) + max1 = torch.max(Z) + mask1 = (Z > (mean1+max1)/2) + + debug_save_img(mask1.to(torch.float), "mask1") + + mask = mask1 & mask2 + mask = mask.squeeze() + + debug_save_img(mask.to(torch.float), "cmask", True) + + fB[:,:,mask] = fA[:,:,mask] + + return fB + + + def composite(fA, fB): + A = fA[:,0,:,:] + B = fA[:,1,:,:] + Z = (A*A + B*B)**0.5 + A2 = fB[:,0,:,:] + B2 = fB[:,1,:,:] + Z2 = (A2*A2 + B2*B2)**0.5 + + fB = composite1(fA, fB, Z, Z2) + fB = composite2(fA, fB, Z, Z2) + return fB + + for g2 in guideFrameList: + if use_raft: + objFlow = raft(g1, g2) + else: + objFlow = self.netFlow(g1, g2) + + + objFlow['tenForward'] = objFlow['tenForward'] * (l/i) + objFlow['tenBackward'] = objFlow['tenBackward'] * (l/i) + + if do_composite: + objFlow['tenForward'] = composite(objFlow['tenForward'], styleFlow['tenForward']) + objFlow['tenBackward'] = composite(objFlow['tenBackward'], styleFlow['tenBackward']) + + img = self.netSynthesis(tenOne, tenTwo, objFlow['tenForward'], objFlow['tenBackward'], i/l) + tenImages.append(img) + i += 1 + + return [(tenImage * tenStd) + tenMean for tenImage in tenImages] + + +# end + +netNetwork = None + +########################################################## + +def estimate2(img1: PIL.Image, img2:PIL.Image, guideFrames, model_file_path): + global netNetwork + + if netNetwork is None: + netNetwork = Network2(model_file_path).cuda().eval() + # end + + def forTensor(im): + return torch.FloatTensor(numpy.ascontiguousarray(numpy.array(im)[:, :, ::-1].transpose(2, 0, 1).astype(numpy.float32) * (1.0 / 255.0))) + + tenOne = forTensor(img1) + tenTwo = forTensor(img2) + + tenGuideFrames=[] + for g in guideFrames: + tenGuideFrames.append(forTensor(g)) + + assert(tenOne.shape[1] == tenTwo.shape[1]) + assert(tenOne.shape[2] == tenTwo.shape[2]) + + intWidth = tenOne.shape[2] + intHeight = tenOne.shape[1] + + tenPreprocessedOne = tenOne.cuda().view(1, 3, intHeight, intWidth) + tenPreprocessedTwo = tenTwo.cuda().view(1, 3, intHeight, intWidth) + tenGuideFrames = [ ten.cuda().view(1, 3, intHeight, intWidth) for ten in tenGuideFrames] + + intPadr = (2 - (intWidth % 2)) % 2 + intPadb = (2 - (intHeight % 2)) % 2 + + tenPreprocessedOne = torch.nn.functional.pad(input=tenPreprocessedOne, pad=[0, intPadr, 0, intPadb], mode='replicate') + tenPreprocessedTwo = torch.nn.functional.pad(input=tenPreprocessedTwo, pad=[0, intPadr, 0, intPadb], mode='replicate') + tenGuideFrames = [ torch.nn.functional.pad(input=ten, pad=[0, intPadr, 0, intPadb], mode='replicate') for ten in tenGuideFrames] + + result = [tenImage[0, :, :intHeight, :intWidth].cpu() for tenImage in netNetwork(tenPreprocessedOne, tenPreprocessedTwo, tenGuideFrames)] + result = [ PIL.Image.fromarray((r.clip(0.0, 1.0).numpy().transpose(1, 2, 0)[:, :, ::-1] * 255.0).astype(numpy.uint8)) for r in result] + + return result +# end + +########################################################## +''' +if __name__ == '__main__': + if arguments_strOut.split('.')[-1] in ['bmp', 'jpg', 'jpeg', 'png']: + tenOne = torch.FloatTensor(numpy.ascontiguousarray(numpy.array(PIL.Image.open(arguments_strOne))[:, :, ::-1].transpose(2, 0, 1).astype(numpy.float32) * (1.0 / 255.0))) + tenTwo = torch.FloatTensor(numpy.ascontiguousarray(numpy.array(PIL.Image.open(arguments_strTwo))[:, :, ::-1].transpose(2, 0, 1).astype(numpy.float32) * (1.0 / 255.0))) + + tenOutput = estimate(tenOne, tenTwo, [0.5])[0] + + PIL.Image.fromarray((tenOutput.clip(0.0, 1.0).numpy().transpose(1, 2, 0)[:, :, ::-1] * 255.0).astype(numpy.uint8)).save(arguments_strOut) + + elif arguments_strOut.split('.')[-1] in ['avi', 'mp4', 'webm', 'wmv']: + import moviepy + import moviepy.editor + import moviepy.video.io.ffmpeg_writer + + objVideoreader = moviepy.editor.VideoFileClip(filename=arguments_strVideo) + objVideoreader2 = moviepy.editor.VideoFileClip(filename=arguments_strVideo2) + + from moviepy.video.fx.resize import resize + objVideoreader2 = resize(objVideoreader2, (objVideoreader.w, objVideoreader.h)) + + intWidth = objVideoreader.w + intHeight = objVideoreader.h + + tenFrames = [None, None, None, None] + + with moviepy.video.io.ffmpeg_writer.FFMPEG_VideoWriter(filename=arguments_strOut, size=(intWidth, intHeight), fps=objVideoreader.fps) as objVideowriter: + for npyFrame in objVideoreader.iter_frames(): + tenFrames[3] = torch.FloatTensor(numpy.ascontiguousarray(npyFrame[:, :, ::-1].transpose(2, 0, 1).astype(numpy.float32) * (1.0 / 255.0))) + + if tenFrames[0] is not None: + tenFrames[1:3] = estimate(tenFrames[0], tenFrames[3], [0.333, 0.666]) + + objVideowriter.write_frame((tenFrames[0].clip(0.0, 1.0).numpy().transpose(1, 2, 0)[:, :, ::-1] * 255.0).astype(numpy.uint8)) + objVideowriter.write_frame((tenFrames[1].clip(0.0, 1.0).numpy().transpose(1, 2, 0)[:, :, ::-1] * 255.0).astype(numpy.uint8)) + objVideowriter.write_frame((tenFrames[2].clip(0.0, 1.0).numpy().transpose(1, 2, 0)[:, :, ::-1] * 255.0).astype(numpy.uint8)) +# objVideowriter.write_frame((tenFrames[3].clip(0.0, 1.0).numpy().transpose(1, 2, 0)[:, :, ::-1] * 255.0).astype(numpy.uint8)) + # end + + tenFrames[0] = torch.FloatTensor(numpy.ascontiguousarray(npyFrame[:, :, ::-1].transpose(2, 0, 1).astype(numpy.float32) * (1.0 / 255.0))) + # end + # end + + # end +# end +''' \ No newline at end of file diff --git a/src/animatediff/softmax_splatting/softsplat.py b/src/animatediff/softmax_splatting/softsplat.py new file mode 100644 index 0000000000000000000000000000000000000000..f35ccc21604479940c2c86580c287e73f3dc327d --- /dev/null +++ b/src/animatediff/softmax_splatting/softsplat.py @@ -0,0 +1,529 @@ +#!/usr/bin/env python + +import collections +import cupy +import os +import re +import torch +import typing + + +########################################################## + + +objCudacache = {} + + +def cuda_int32(intIn:int): + return cupy.int32(intIn) +# end + + +def cuda_float32(fltIn:float): + return cupy.float32(fltIn) +# end + + +def cuda_kernel(strFunction:str, strKernel:str, objVariables:typing.Dict): + if 'device' not in objCudacache: + objCudacache['device'] = torch.cuda.get_device_name() + # end + + strKey = strFunction + + for strVariable in objVariables: + objValue = objVariables[strVariable] + + strKey += strVariable + + if objValue is None: + continue + + elif type(objValue) == int: + strKey += str(objValue) + + elif type(objValue) == float: + strKey += str(objValue) + + elif type(objValue) == bool: + strKey += str(objValue) + + elif type(objValue) == str: + strKey += objValue + + elif type(objValue) == torch.Tensor: + strKey += str(objValue.dtype) + strKey += str(objValue.shape) + strKey += str(objValue.stride()) + + elif True: + print(strVariable, type(objValue)) + assert(False) + + # end + # end + + strKey += objCudacache['device'] + + if strKey not in objCudacache: + for strVariable in objVariables: + objValue = objVariables[strVariable] + + if objValue is None: + continue + + elif type(objValue) == int: + strKernel = strKernel.replace('{{' + strVariable + '}}', str(objValue)) + + elif type(objValue) == float: + strKernel = strKernel.replace('{{' + strVariable + '}}', str(objValue)) + + elif type(objValue) == bool: + strKernel = strKernel.replace('{{' + strVariable + '}}', str(objValue)) + + elif type(objValue) == str: + strKernel = strKernel.replace('{{' + strVariable + '}}', objValue) + + elif type(objValue) == torch.Tensor and objValue.dtype == torch.uint8: + strKernel = strKernel.replace('{{type}}', 'unsigned char') + + elif type(objValue) == torch.Tensor and objValue.dtype == torch.float16: + strKernel = strKernel.replace('{{type}}', 'half') + + elif type(objValue) == torch.Tensor and objValue.dtype == torch.float32: + strKernel = strKernel.replace('{{type}}', 'float') + + elif type(objValue) == torch.Tensor and objValue.dtype == torch.float64: + strKernel = strKernel.replace('{{type}}', 'double') + + elif type(objValue) == torch.Tensor and objValue.dtype == torch.int32: + strKernel = strKernel.replace('{{type}}', 'int') + + elif type(objValue) == torch.Tensor and objValue.dtype == torch.int64: + strKernel = strKernel.replace('{{type}}', 'long') + + elif type(objValue) == torch.Tensor: + print(strVariable, objValue.dtype) + assert(False) + + elif True: + print(strVariable, type(objValue)) + assert(False) + + # end + # end + + while True: + objMatch = re.search('(SIZE_)([0-4])(\()([^\)]*)(\))', strKernel) + + if objMatch is None: + break + # end + + intArg = int(objMatch.group(2)) + + strTensor = objMatch.group(4) + intSizes = objVariables[strTensor].size() + + strKernel = strKernel.replace(objMatch.group(), str(intSizes[intArg] if torch.is_tensor(intSizes[intArg]) == False else intSizes[intArg].item())) + # end + + while True: + objMatch = re.search('(OFFSET_)([0-4])(\()', strKernel) + + if objMatch is None: + break + # end + + intStart = objMatch.span()[1] + intStop = objMatch.span()[1] + intParentheses = 1 + + while True: + intParentheses += 1 if strKernel[intStop] == '(' else 0 + intParentheses -= 1 if strKernel[intStop] == ')' else 0 + + if intParentheses == 0: + break + # end + + intStop += 1 + # end + + intArgs = int(objMatch.group(2)) + strArgs = strKernel[intStart:intStop].split(',') + + assert(intArgs == len(strArgs) - 1) + + strTensor = strArgs[0] + intStrides = objVariables[strTensor].stride() + + strIndex = [] + + for intArg in range(intArgs): + strIndex.append('((' + strArgs[intArg + 1].replace('{', '(').replace('}', ')').strip() + ')*' + str(intStrides[intArg] if torch.is_tensor(intStrides[intArg]) == False else intStrides[intArg].item()) + ')') + # end + + strKernel = strKernel.replace('OFFSET_' + str(intArgs) + '(' + strKernel[intStart:intStop] + ')', '(' + str.join('+', strIndex) + ')') + # end + + while True: + objMatch = re.search('(VALUE_)([0-4])(\()', strKernel) + + if objMatch is None: + break + # end + + intStart = objMatch.span()[1] + intStop = objMatch.span()[1] + intParentheses = 1 + + while True: + intParentheses += 1 if strKernel[intStop] == '(' else 0 + intParentheses -= 1 if strKernel[intStop] == ')' else 0 + + if intParentheses == 0: + break + # end + + intStop += 1 + # end + + intArgs = int(objMatch.group(2)) + strArgs = strKernel[intStart:intStop].split(',') + + assert(intArgs == len(strArgs) - 1) + + strTensor = strArgs[0] + intStrides = objVariables[strTensor].stride() + + strIndex = [] + + for intArg in range(intArgs): + strIndex.append('((' + strArgs[intArg + 1].replace('{', '(').replace('}', ')').strip() + ')*' + str(intStrides[intArg] if torch.is_tensor(intStrides[intArg]) == False else intStrides[intArg].item()) + ')') + # end + + strKernel = strKernel.replace('VALUE_' + str(intArgs) + '(' + strKernel[intStart:intStop] + ')', strTensor + '[' + str.join('+', strIndex) + ']') + # end + + objCudacache[strKey] = { + 'strFunction': strFunction, + 'strKernel': strKernel + } + # end + + return strKey +# end + + +@cupy.memoize(for_each_device=True) +def cuda_launch(strKey:str): + if 'CUDA_HOME' not in os.environ: + os.environ['CUDA_HOME'] = cupy.cuda.get_cuda_path() + # end + + return cupy.cuda.compile_with_cache(objCudacache[strKey]['strKernel'], tuple(['-I ' + os.environ['CUDA_HOME'], '-I ' + os.environ['CUDA_HOME'] + '/include'])).get_function(objCudacache[strKey]['strFunction']) +# end + + +########################################################## + + +def softsplat(tenIn:torch.Tensor, tenFlow:torch.Tensor, tenMetric:torch.Tensor, strMode:str): + assert(strMode.split('-')[0] in ['sum', 'avg', 'linear', 'soft']) + + if strMode == 'sum': assert(tenMetric is None) + if strMode == 'avg': assert(tenMetric is None) + if strMode.split('-')[0] == 'linear': assert(tenMetric is not None) + if strMode.split('-')[0] == 'soft': assert(tenMetric is not None) + + if strMode == 'avg': + tenIn = torch.cat([tenIn, tenIn.new_ones([tenIn.shape[0], 1, tenIn.shape[2], tenIn.shape[3]])], 1) + + elif strMode.split('-')[0] == 'linear': + tenIn = torch.cat([tenIn * tenMetric, tenMetric], 1) + + elif strMode.split('-')[0] == 'soft': + tenIn = torch.cat([tenIn * tenMetric.exp(), tenMetric.exp()], 1) + + # end + + tenOut = softsplat_func.apply(tenIn, tenFlow) + + if strMode.split('-')[0] in ['avg', 'linear', 'soft']: + tenNormalize = tenOut[:, -1:, :, :] + + if len(strMode.split('-')) == 1: + tenNormalize = tenNormalize + 0.0000001 + + elif strMode.split('-')[1] == 'addeps': + tenNormalize = tenNormalize + 0.0000001 + + elif strMode.split('-')[1] == 'zeroeps': + tenNormalize[tenNormalize == 0.0] = 1.0 + + elif strMode.split('-')[1] == 'clipeps': + tenNormalize = tenNormalize.clip(0.0000001, None) + + # end + + tenOut = tenOut[:, :-1, :, :] / tenNormalize + # end + + return tenOut +# end + + +class softsplat_func(torch.autograd.Function): + @staticmethod + @torch.cuda.amp.custom_fwd(cast_inputs=torch.float32) + def forward(self, tenIn, tenFlow): + tenOut = tenIn.new_zeros([tenIn.shape[0], tenIn.shape[1], tenIn.shape[2], tenIn.shape[3]]) + + if tenIn.is_cuda == True: + cuda_launch(cuda_kernel('softsplat_out', ''' + extern "C" __global__ void __launch_bounds__(512) softsplat_out( + const int n, + const {{type}}* __restrict__ tenIn, + const {{type}}* __restrict__ tenFlow, + {{type}}* __restrict__ tenOut + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + const int intN = ( intIndex / SIZE_3(tenOut) / SIZE_2(tenOut) / SIZE_1(tenOut) ) % SIZE_0(tenOut); + const int intC = ( intIndex / SIZE_3(tenOut) / SIZE_2(tenOut) ) % SIZE_1(tenOut); + const int intY = ( intIndex / SIZE_3(tenOut) ) % SIZE_2(tenOut); + const int intX = ( intIndex ) % SIZE_3(tenOut); + + assert(SIZE_1(tenFlow) == 2); + + {{type}} fltX = ({{type}}) (intX) + VALUE_4(tenFlow, intN, 0, intY, intX); + {{type}} fltY = ({{type}}) (intY) + VALUE_4(tenFlow, intN, 1, intY, intX); + + if (isfinite(fltX) == false) { return; } + if (isfinite(fltY) == false) { return; } + + {{type}} fltIn = VALUE_4(tenIn, intN, intC, intY, intX); + + int intNorthwestX = (int) (floor(fltX)); + int intNorthwestY = (int) (floor(fltY)); + int intNortheastX = intNorthwestX + 1; + int intNortheastY = intNorthwestY; + int intSouthwestX = intNorthwestX; + int intSouthwestY = intNorthwestY + 1; + int intSoutheastX = intNorthwestX + 1; + int intSoutheastY = intNorthwestY + 1; + + {{type}} fltNorthwest = (({{type}}) (intSoutheastX) - fltX) * (({{type}}) (intSoutheastY) - fltY); + {{type}} fltNortheast = (fltX - ({{type}}) (intSouthwestX)) * (({{type}}) (intSouthwestY) - fltY); + {{type}} fltSouthwest = (({{type}}) (intNortheastX) - fltX) * (fltY - ({{type}}) (intNortheastY)); + {{type}} fltSoutheast = (fltX - ({{type}}) (intNorthwestX)) * (fltY - ({{type}}) (intNorthwestY)); + + if ((intNorthwestX >= 0) && (intNorthwestX < SIZE_3(tenOut)) && (intNorthwestY >= 0) && (intNorthwestY < SIZE_2(tenOut))) { + atomicAdd(&tenOut[OFFSET_4(tenOut, intN, intC, intNorthwestY, intNorthwestX)], fltIn * fltNorthwest); + } + + if ((intNortheastX >= 0) && (intNortheastX < SIZE_3(tenOut)) && (intNortheastY >= 0) && (intNortheastY < SIZE_2(tenOut))) { + atomicAdd(&tenOut[OFFSET_4(tenOut, intN, intC, intNortheastY, intNortheastX)], fltIn * fltNortheast); + } + + if ((intSouthwestX >= 0) && (intSouthwestX < SIZE_3(tenOut)) && (intSouthwestY >= 0) && (intSouthwestY < SIZE_2(tenOut))) { + atomicAdd(&tenOut[OFFSET_4(tenOut, intN, intC, intSouthwestY, intSouthwestX)], fltIn * fltSouthwest); + } + + if ((intSoutheastX >= 0) && (intSoutheastX < SIZE_3(tenOut)) && (intSoutheastY >= 0) && (intSoutheastY < SIZE_2(tenOut))) { + atomicAdd(&tenOut[OFFSET_4(tenOut, intN, intC, intSoutheastY, intSoutheastX)], fltIn * fltSoutheast); + } + } } + ''', { + 'tenIn': tenIn, + 'tenFlow': tenFlow, + 'tenOut': tenOut + }))( + grid=tuple([int((tenOut.nelement() + 512 - 1) / 512), 1, 1]), + block=tuple([512, 1, 1]), + args=[cuda_int32(tenOut.nelement()), tenIn.data_ptr(), tenFlow.data_ptr(), tenOut.data_ptr()], + stream=collections.namedtuple('Stream', 'ptr')(torch.cuda.current_stream().cuda_stream) + ) + + elif tenIn.is_cuda != True: + assert(False) + + # end + + self.save_for_backward(tenIn, tenFlow) + + return tenOut + # end + + @staticmethod + @torch.cuda.amp.custom_bwd + def backward(self, tenOutgrad): + tenIn, tenFlow = self.saved_tensors + + tenOutgrad = tenOutgrad.contiguous(); assert(tenOutgrad.is_cuda == True) + + tenIngrad = tenIn.new_zeros([tenIn.shape[0], tenIn.shape[1], tenIn.shape[2], tenIn.shape[3]]) if self.needs_input_grad[0] == True else None + tenFlowgrad = tenFlow.new_zeros([tenFlow.shape[0], tenFlow.shape[1], tenFlow.shape[2], tenFlow.shape[3]]) if self.needs_input_grad[1] == True else None + + if tenIngrad is not None: + cuda_launch(cuda_kernel('softsplat_ingrad', ''' + extern "C" __global__ void __launch_bounds__(512) softsplat_ingrad( + const int n, + const {{type}}* __restrict__ tenIn, + const {{type}}* __restrict__ tenFlow, + const {{type}}* __restrict__ tenOutgrad, + {{type}}* __restrict__ tenIngrad, + {{type}}* __restrict__ tenFlowgrad + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + const int intN = ( intIndex / SIZE_3(tenIngrad) / SIZE_2(tenIngrad) / SIZE_1(tenIngrad) ) % SIZE_0(tenIngrad); + const int intC = ( intIndex / SIZE_3(tenIngrad) / SIZE_2(tenIngrad) ) % SIZE_1(tenIngrad); + const int intY = ( intIndex / SIZE_3(tenIngrad) ) % SIZE_2(tenIngrad); + const int intX = ( intIndex ) % SIZE_3(tenIngrad); + + assert(SIZE_1(tenFlow) == 2); + + {{type}} fltIngrad = 0.0f; + + {{type}} fltX = ({{type}}) (intX) + VALUE_4(tenFlow, intN, 0, intY, intX); + {{type}} fltY = ({{type}}) (intY) + VALUE_4(tenFlow, intN, 1, intY, intX); + + if (isfinite(fltX) == false) { return; } + if (isfinite(fltY) == false) { return; } + + int intNorthwestX = (int) (floor(fltX)); + int intNorthwestY = (int) (floor(fltY)); + int intNortheastX = intNorthwestX + 1; + int intNortheastY = intNorthwestY; + int intSouthwestX = intNorthwestX; + int intSouthwestY = intNorthwestY + 1; + int intSoutheastX = intNorthwestX + 1; + int intSoutheastY = intNorthwestY + 1; + + {{type}} fltNorthwest = (({{type}}) (intSoutheastX) - fltX) * (({{type}}) (intSoutheastY) - fltY); + {{type}} fltNortheast = (fltX - ({{type}}) (intSouthwestX)) * (({{type}}) (intSouthwestY) - fltY); + {{type}} fltSouthwest = (({{type}}) (intNortheastX) - fltX) * (fltY - ({{type}}) (intNortheastY)); + {{type}} fltSoutheast = (fltX - ({{type}}) (intNorthwestX)) * (fltY - ({{type}}) (intNorthwestY)); + + if ((intNorthwestX >= 0) && (intNorthwestX < SIZE_3(tenOutgrad)) && (intNorthwestY >= 0) && (intNorthwestY < SIZE_2(tenOutgrad))) { + fltIngrad += VALUE_4(tenOutgrad, intN, intC, intNorthwestY, intNorthwestX) * fltNorthwest; + } + + if ((intNortheastX >= 0) && (intNortheastX < SIZE_3(tenOutgrad)) && (intNortheastY >= 0) && (intNortheastY < SIZE_2(tenOutgrad))) { + fltIngrad += VALUE_4(tenOutgrad, intN, intC, intNortheastY, intNortheastX) * fltNortheast; + } + + if ((intSouthwestX >= 0) && (intSouthwestX < SIZE_3(tenOutgrad)) && (intSouthwestY >= 0) && (intSouthwestY < SIZE_2(tenOutgrad))) { + fltIngrad += VALUE_4(tenOutgrad, intN, intC, intSouthwestY, intSouthwestX) * fltSouthwest; + } + + if ((intSoutheastX >= 0) && (intSoutheastX < SIZE_3(tenOutgrad)) && (intSoutheastY >= 0) && (intSoutheastY < SIZE_2(tenOutgrad))) { + fltIngrad += VALUE_4(tenOutgrad, intN, intC, intSoutheastY, intSoutheastX) * fltSoutheast; + } + + tenIngrad[intIndex] = fltIngrad; + } } + ''', { + 'tenIn': tenIn, + 'tenFlow': tenFlow, + 'tenOutgrad': tenOutgrad, + 'tenIngrad': tenIngrad, + 'tenFlowgrad': tenFlowgrad + }))( + grid=tuple([int((tenIngrad.nelement() + 512 - 1) / 512), 1, 1]), + block=tuple([512, 1, 1]), + args=[cuda_int32(tenIngrad.nelement()), tenIn.data_ptr(), tenFlow.data_ptr(), tenOutgrad.data_ptr(), tenIngrad.data_ptr(), None], + stream=collections.namedtuple('Stream', 'ptr')(torch.cuda.current_stream().cuda_stream) + ) + # end + + if tenFlowgrad is not None: + cuda_launch(cuda_kernel('softsplat_flowgrad', ''' + extern "C" __global__ void __launch_bounds__(512) softsplat_flowgrad( + const int n, + const {{type}}* __restrict__ tenIn, + const {{type}}* __restrict__ tenFlow, + const {{type}}* __restrict__ tenOutgrad, + {{type}}* __restrict__ tenIngrad, + {{type}}* __restrict__ tenFlowgrad + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + const int intN = ( intIndex / SIZE_3(tenFlowgrad) / SIZE_2(tenFlowgrad) / SIZE_1(tenFlowgrad) ) % SIZE_0(tenFlowgrad); + const int intC = ( intIndex / SIZE_3(tenFlowgrad) / SIZE_2(tenFlowgrad) ) % SIZE_1(tenFlowgrad); + const int intY = ( intIndex / SIZE_3(tenFlowgrad) ) % SIZE_2(tenFlowgrad); + const int intX = ( intIndex ) % SIZE_3(tenFlowgrad); + + assert(SIZE_1(tenFlow) == 2); + + {{type}} fltFlowgrad = 0.0f; + + {{type}} fltX = ({{type}}) (intX) + VALUE_4(tenFlow, intN, 0, intY, intX); + {{type}} fltY = ({{type}}) (intY) + VALUE_4(tenFlow, intN, 1, intY, intX); + + if (isfinite(fltX) == false) { return; } + if (isfinite(fltY) == false) { return; } + + int intNorthwestX = (int) (floor(fltX)); + int intNorthwestY = (int) (floor(fltY)); + int intNortheastX = intNorthwestX + 1; + int intNortheastY = intNorthwestY; + int intSouthwestX = intNorthwestX; + int intSouthwestY = intNorthwestY + 1; + int intSoutheastX = intNorthwestX + 1; + int intSoutheastY = intNorthwestY + 1; + + {{type}} fltNorthwest = 0.0f; + {{type}} fltNortheast = 0.0f; + {{type}} fltSouthwest = 0.0f; + {{type}} fltSoutheast = 0.0f; + + if (intC == 0) { + fltNorthwest = (({{type}}) (-1.0f)) * (({{type}}) (intSoutheastY) - fltY); + fltNortheast = (({{type}}) (+1.0f)) * (({{type}}) (intSouthwestY) - fltY); + fltSouthwest = (({{type}}) (-1.0f)) * (fltY - ({{type}}) (intNortheastY)); + fltSoutheast = (({{type}}) (+1.0f)) * (fltY - ({{type}}) (intNorthwestY)); + + } else if (intC == 1) { + fltNorthwest = (({{type}}) (intSoutheastX) - fltX) * (({{type}}) (-1.0f)); + fltNortheast = (fltX - ({{type}}) (intSouthwestX)) * (({{type}}) (-1.0f)); + fltSouthwest = (({{type}}) (intNortheastX) - fltX) * (({{type}}) (+1.0f)); + fltSoutheast = (fltX - ({{type}}) (intNorthwestX)) * (({{type}}) (+1.0f)); + + } + + for (int intChannel = 0; intChannel < SIZE_1(tenOutgrad); intChannel += 1) { + {{type}} fltIn = VALUE_4(tenIn, intN, intChannel, intY, intX); + + if ((intNorthwestX >= 0) && (intNorthwestX < SIZE_3(tenOutgrad)) && (intNorthwestY >= 0) && (intNorthwestY < SIZE_2(tenOutgrad))) { + fltFlowgrad += VALUE_4(tenOutgrad, intN, intChannel, intNorthwestY, intNorthwestX) * fltIn * fltNorthwest; + } + + if ((intNortheastX >= 0) && (intNortheastX < SIZE_3(tenOutgrad)) && (intNortheastY >= 0) && (intNortheastY < SIZE_2(tenOutgrad))) { + fltFlowgrad += VALUE_4(tenOutgrad, intN, intChannel, intNortheastY, intNortheastX) * fltIn * fltNortheast; + } + + if ((intSouthwestX >= 0) && (intSouthwestX < SIZE_3(tenOutgrad)) && (intSouthwestY >= 0) && (intSouthwestY < SIZE_2(tenOutgrad))) { + fltFlowgrad += VALUE_4(tenOutgrad, intN, intChannel, intSouthwestY, intSouthwestX) * fltIn * fltSouthwest; + } + + if ((intSoutheastX >= 0) && (intSoutheastX < SIZE_3(tenOutgrad)) && (intSoutheastY >= 0) && (intSoutheastY < SIZE_2(tenOutgrad))) { + fltFlowgrad += VALUE_4(tenOutgrad, intN, intChannel, intSoutheastY, intSoutheastX) * fltIn * fltSoutheast; + } + } + + tenFlowgrad[intIndex] = fltFlowgrad; + } } + ''', { + 'tenIn': tenIn, + 'tenFlow': tenFlow, + 'tenOutgrad': tenOutgrad, + 'tenIngrad': tenIngrad, + 'tenFlowgrad': tenFlowgrad + }))( + grid=tuple([int((tenFlowgrad.nelement() + 512 - 1) / 512), 1, 1]), + block=tuple([512, 1, 1]), + args=[cuda_int32(tenFlowgrad.nelement()), tenIn.data_ptr(), tenFlow.data_ptr(), tenOutgrad.data_ptr(), None, tenFlowgrad.data_ptr()], + stream=collections.namedtuple('Stream', 'ptr')(torch.cuda.current_stream().cuda_stream) + ) + # end + + return tenIngrad, tenFlowgrad + # end +# end diff --git a/src/animatediff/stylize.py b/src/animatediff/stylize.py new file mode 100644 index 0000000000000000000000000000000000000000..edd6879be55911e509efa0db97295dfa664377c6 --- /dev/null +++ b/src/animatediff/stylize.py @@ -0,0 +1,1716 @@ +import glob +import json +import logging +import os.path +import shutil +from datetime import datetime +from pathlib import Path +from typing import Annotated, Optional + +import torch +import typer +from PIL import Image +from tqdm.rich import tqdm + +from animatediff import __version__, get_dir +from animatediff.settings import ModelConfig, get_model_config +from animatediff.utils.tagger import get_labels +from animatediff.utils.util import (extract_frames, get_resized_image, + path_from_cwd, prepare_anime_seg, + prepare_groundingDINO, prepare_propainter, + prepare_sam_hq, prepare_softsplat) + +logger = logging.getLogger(__name__) + + + +stylize: typer.Typer = typer.Typer( + name="stylize", + context_settings=dict(help_option_names=["-h", "--help"]), + rich_markup_mode="rich", + pretty_exceptions_show_locals=False, + help="stylize video", +) + +data_dir = get_dir("data") + +controlnet_dirs = [ + "controlnet_canny", + "controlnet_depth", + "controlnet_inpaint", + "controlnet_ip2p", + "controlnet_lineart", + "controlnet_lineart_anime", + "controlnet_mlsd", + "controlnet_normalbae", + "controlnet_openpose", + "controlnet_scribble", + "controlnet_seg", + "controlnet_shuffle", + "controlnet_softedge", + "controlnet_tile", + "qr_code_monster_v1", + "qr_code_monster_v2", + "controlnet_mediapipe_face", + "animatediff_controlnet", + ] + +def create_controlnet_dir(controlnet_root): + for c in controlnet_dirs: + c_dir = controlnet_root.joinpath(c) + c_dir.mkdir(parents=True, exist_ok=True) + +@stylize.command(no_args_is_help=True) +def create_config( + org_movie: Annotated[ + Path, + typer.Argument(path_type=Path, file_okay=True, dir_okay=False, exists=True, help="Path to movie file"), + ] = ..., + config_org: Annotated[ + Path, + typer.Option( + "--config-org", + "-c", + path_type=Path, + dir_okay=False, + exists=True, + help="Path to original config file", + ), + ] = Path("config/prompts/prompt_travel.json"), + ignore_list: Annotated[ + Path, + typer.Option( + "--ignore-list", + "-g", + path_type=Path, + dir_okay=False, + exists=True, + help="path to ignore token list file", + ), + ] = Path("config/prompts/ignore_tokens.txt"), + out_dir: Annotated[ + Optional[Path], + typer.Option( + "--out-dir", + "-o", + path_type=Path, + file_okay=False, + help="output directory", + ), + ] = Path("stylize/"), + fps: Annotated[ + int, + typer.Option( + "--fps", + "-f", + min=1, + max=120, + help="fps", + ), + ] = 8, + duration: Annotated[ + int, + typer.Option( + "--duration", + "-d", + min=-1, + max=3600, + help="Video duration in seconds. -1 means that the duration of the input video is used as is", + ), + ] = -1, + offset: Annotated[ + int, + typer.Option( + "--offset", + "-of", + min=0, + max=3600, + help="offset in seconds. '-d 30 -of 1200' means to use 1200-1230 seconds of the input video", + ), + ] = 0, + aspect_ratio: Annotated[ + float, + typer.Option( + "--aspect-ratio", + "-a", + min=-1, + max=5.0, + help="aspect ratio (width / height). (ex. 512 / 512 = 1.0 , 512 / 768 = 0.6666 , 768 / 512 = 1.5) -1 means that the aspect ratio of the input video is used as is.", + ), + ] = -1, + size_of_short_edge: Annotated[ + int, + typer.Option( + "--short-edge", + "-sh", + min=100, + max=1024, + help="size of short edge", + ), + ] = 512, + predicte_interval: Annotated[ + int, + typer.Option( + "--predicte-interval", + "-p", + min=1, + max=120, + help="Interval of frames to be predicted", + ), + ] = 1, + general_threshold: Annotated[ + float, + typer.Option( + "--threshold", + "-th", + min=0.0, + max=1.0, + help="threshold for general token confidence", + ), + ] = 0.35, + character_threshold: Annotated[ + float, + typer.Option( + "--threshold2", + "-th2", + min=0.0, + max=1.0, + help="threshold for character token confidence", + ), + ] = 0.85, + without_confidence: Annotated[ + bool, + typer.Option( + "--no-confidence-format", + "-ncf", + is_flag=True, + help="confidence token format or not. ex. '(close-up:0.57), (monochrome:1.1)' -> 'close-up, monochrome'", + ), + ] = False, + is_no_danbooru_format: Annotated[ + bool, + typer.Option( + "--no-danbooru-format", + "-ndf", + is_flag=True, + help="danbooru token format or not. ex. 'bandaid_on_leg, short_hair' -> 'bandaid on leg, short hair'", + ), + ] = False, + is_img2img: Annotated[ + bool, + typer.Option( + "--img2img", + "-i2i", + is_flag=True, + help="img2img or not(txt2img).", + ), + ] = False, + low_vram: Annotated[ + bool, + typer.Option( + "--low_vram", + "-lo", + is_flag=True, + help="low vram mode", + ), + ] = False, + gradual_latent_hires_fix: Annotated[ + bool, + typer.Option( + "--gradual_latent_hires_fix", + "-gh", + is_flag=True, + help="gradual latent hires fix", + ), + ] = False, +): + """Create a config file for video stylization""" + is_danbooru_format = not is_no_danbooru_format + with_confidence = not without_confidence + logger.info(f"{org_movie=}") + logger.info(f"{config_org=}") + logger.info(f"{ignore_list=}") + logger.info(f"{out_dir=}") + logger.info(f"{fps=}") + logger.info(f"{duration=}") + logger.info(f"{offset=}") + logger.info(f"{aspect_ratio=}") + logger.info(f"{size_of_short_edge=}") + logger.info(f"{predicte_interval=}") + logger.info(f"{general_threshold=}") + logger.info(f"{character_threshold=}") + logger.info(f"{with_confidence=}") + logger.info(f"{is_danbooru_format=}") + logger.info(f"{is_img2img=}") + logger.info(f"{low_vram=}") + logger.info(f"{gradual_latent_hires_fix=}") + + model_config: ModelConfig = get_model_config(config_org) + + # get a timestamp for the output directory + time_str = datetime.now().strftime("%Y-%m-%dT%H-%M-%S") + # make the output directory + save_dir = out_dir.joinpath(f"{time_str}-{model_config.save_name}") + save_dir.mkdir(parents=True, exist_ok=True) + logger.info(f"Will save outputs to ./{path_from_cwd(save_dir)}") + + img2img_dir = save_dir.joinpath("00_img2img") + img2img_dir.mkdir(parents=True, exist_ok=True) + extract_frames(org_movie, fps, img2img_dir, aspect_ratio, duration, offset, size_of_short_edge, low_vram) + + controlnet_img_dir = save_dir.joinpath("00_controlnet_image") + + create_controlnet_dir(controlnet_img_dir) + + shutil.copytree(img2img_dir, controlnet_img_dir.joinpath("controlnet_openpose"), dirs_exist_ok=True) + + #shutil.copytree(img2img_dir, controlnet_img_dir.joinpath("controlnet_ip2p"), dirs_exist_ok=True) + + + black_list = [] + if ignore_list.is_file(): + with open(ignore_list) as f: + black_list = [s.strip() for s in f.readlines()] + + model_config.prompt_map = get_labels( + frame_dir=img2img_dir, + interval=predicte_interval, + general_threshold=general_threshold, + character_threshold=character_threshold, + ignore_tokens=black_list, + with_confidence=with_confidence, + is_danbooru_format=is_danbooru_format, + is_cpu = False, + ) + + + model_config.head_prompt = "" + model_config.tail_prompt = "" + model_config.controlnet_map["input_image_dir"] = os.path.relpath(controlnet_img_dir.absolute(), data_dir) + model_config.controlnet_map["is_loop"] = False + + model_config.lora_map={} + model_config.motion_lora_map={} + + model_config.controlnet_map["max_samples_on_vram"] = 0 + model_config.controlnet_map["max_models_on_vram"] = 0 + + + model_config.controlnet_map["controlnet_openpose"] = { + "enable": True, + "use_preprocessor":True, + "guess_mode":False, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[], + "control_region_list":[] + } + + + model_config.controlnet_map["controlnet_ip2p"] = { + "enable": True, + "use_preprocessor":True, + "guess_mode":False, + "controlnet_conditioning_scale": 0.5, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[], + "control_region_list":[] + } + + for m in model_config.controlnet_map: + if isinstance(model_config.controlnet_map[m] ,dict): + if "control_scale_list" in model_config.controlnet_map[m]: + model_config.controlnet_map[m]["control_scale_list"]=[] + + ip_adapter_dir = save_dir.joinpath("00_ipadapter") + ip_adapter_dir.mkdir(parents=True, exist_ok=True) + + model_config.ip_adapter_map = { + "enable": True, + "input_image_dir": os.path.relpath(ip_adapter_dir.absolute(), data_dir), + "prompt_fixed_ratio": 0.5, + "save_input_image": True, + "resized_to_square": False, + "scale": 0.5, + "is_full_face": False, + "is_plus_face": False, + "is_plus": True, + "is_light": False + } + + model_config.img2img_map = { + "enable": is_img2img, + "init_img_dir" : os.path.relpath(img2img_dir.absolute(), data_dir), + "save_init_image": True, + "denoising_strength" : 0.7 + } + + model_config.region_map = { + + } + + model_config.gradual_latent_hires_fix_map = { + "enable" : True, + "scale" : { + "0": 0.5, + "0.7": 1.0 + }, + "reverse_steps": 5, + "noise_add_count": 3 + } + + model_config.output = { + "format" : "mp4", + "fps" : fps, + "encode_param":{ + "crf": 10 + } + } + + img = Image.open( img2img_dir.joinpath("00000000.png") ) + W, H = img.size + + base_size = 768 if gradual_latent_hires_fix else 512 + + if W < H: + width = base_size + height = int(base_size * H/W) + else: + width = int(base_size * W/H) + height = base_size + + width = int(width//8*8) + height = int(height//8*8) + + length = len(glob.glob( os.path.join(img2img_dir, "[0-9]*.png"), recursive=False)) + + model_config.stylize_config={ + "original_video":{ + "path":org_movie, + "aspect_ratio":aspect_ratio, + "offset":offset, + }, + "create_mask": [ + "person" + ], + "composite": { + "fg_list": [ + { + "path" : " absolute path to frame dir ", + "mask_path" : " absolute path to mask dir (this is optional) ", + "mask_prompt" : "person" + }, + { + "path" : " absolute path to frame dir ", + "mask_path" : " absolute path to mask dir (this is optional) ", + "mask_prompt" : "cat" + }, + ], + "bg_frame_dir": "Absolute path to the BG frame directory", + "hint": "" + }, + "0":{ + "width": width, + "height": height, + "length": length, + "context": 16, + "overlap": 16//4, + "stride": 0, + }, + "1":{ + "steps": model_config.steps, + "guidance_scale": model_config.guidance_scale, + "width": int(width * 1.5 //8*8), + "height": int(height * 1.5 //8*8), + "length": length, + "context": 8, + "overlap": 8//4, + "stride": 0, + "controlnet_tile":{ + "enable": True, + "use_preprocessor":True, + "guess_mode":False, + "controlnet_conditioning_scale": 1.0, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[] + }, + "controlnet_ip2p": { + "enable": False, + "use_preprocessor":True, + "guess_mode":False, + "controlnet_conditioning_scale": 0.5, + "control_guidance_start": 0.0, + "control_guidance_end": 1.0, + "control_scale_list":[] + }, + "ip_adapter": False, + "reference": False, + "img2img": False, + "interpolation_multiplier": 1 + } + } + + if gradual_latent_hires_fix: + model_config.stylize_config.pop("1") + + + save_config_path = save_dir.joinpath("prompt.json") + save_config_path.write_text(model_config.json(indent=4), encoding="utf-8") + + logger.info(f"config = { save_config_path }") + logger.info(f"stylize_dir = { save_dir }") + + logger.info(f"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!") + logger.info(f"Hint. Edit the config file before starting the generation") + logger.info(f"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!") + logger.info(f"1. Change 'path' and 'motion_module' as needed") + logger.info(f"2. Enter the 'head_prompt' or 'tail_prompt' with your preferred prompt, quality prompt, lora trigger word, or any other prompt you wish to add.") + logger.info(f"3. Change 'n_prompt' as needed") + logger.info(f"4. Add the lora you need to 'lora_map'") + logger.info(f"5. If you do not like the default settings, edit 'ip_adapter_map' or 'controlnet_map'. \nIf you want to change the controlnet type, you need to replace the input image.") + logger.info(f"6. Change 'stylize_config' as needed. By default, it is generated twice: once for normal generation and once for upscaling.\nIf you don't need upscaling, delete the whole '1'.") + logger.info(f"7. Change 'output' as needed. Changing the 'fps' at this timing is not recommended as it will change the playback speed.\nIf you want to change the fps, specify it with the create-config option") + + +@stylize.command(no_args_is_help=True) +def generate( + stylize_dir: Annotated[ + Path, + typer.Argument(path_type=Path, file_okay=False, dir_okay=True, exists=True, help="Path to stylize dir"), + ] = ..., + length: Annotated[ + int, + typer.Option( + "--length", + "-L", + min=-1, + max=9999, + help="Number of frames to generate. -1 means that the value in the config file is referenced.", + rich_help_panel="Generation", + ), + ] = -1, + frame_offset: Annotated[ + int, + typer.Option( + "--frame-offset", + "-FO", + min=0, + max=999999, + help="Frame offset at generation.", + rich_help_panel="Generation", + ), + ] = 0, +): + """Run video stylization""" + from animatediff.cli import generate + + time_str = datetime.now().strftime("%Y-%m-%dT%H-%M-%S") + + + config_org = stylize_dir.joinpath("prompt.json") + + model_config: ModelConfig = get_model_config(config_org) + + if length == -1: + length = model_config.stylize_config["0"]["length"] + + model_config.stylize_config["0"]["length"] = min(model_config.stylize_config["0"]["length"] - frame_offset, length) + if "1" in model_config.stylize_config: + model_config.stylize_config["1"]["length"] = min(model_config.stylize_config["1"]["length"] - frame_offset, length) + + if frame_offset > 0: + #controlnet + org_controlnet_img_dir = data_dir.joinpath( model_config.controlnet_map["input_image_dir"] ) + new_controlnet_img_dir = org_controlnet_img_dir.parent / "00_tmp_controlnet_image" + if new_controlnet_img_dir.is_dir(): + shutil.rmtree(new_controlnet_img_dir) + new_controlnet_img_dir.mkdir(parents=True, exist_ok=True) + + for c in controlnet_dirs: + src_dir = org_controlnet_img_dir.joinpath(c) + dst_dir = new_controlnet_img_dir.joinpath(c) + if src_dir.is_dir(): + dst_dir.mkdir(parents=True, exist_ok=True) + + frame_length = model_config.stylize_config["0"]["length"] + + src_imgs = sorted(glob.glob( os.path.join(src_dir, "[0-9]*.png"), recursive=False)) + for img in src_imgs: + n = int(Path(img).stem) + if n in range(frame_offset, frame_offset + frame_length): + dst_img_path = dst_dir.joinpath( f"{n-frame_offset:08d}.png" ) + shutil.copy(img, dst_img_path) + #img2img + org_img2img_img_dir = data_dir.joinpath( model_config.img2img_map["init_img_dir"] ) + new_img2img_img_dir = org_img2img_img_dir.parent / "00_tmp_init_img_dir" + if new_img2img_img_dir.is_dir(): + shutil.rmtree(new_img2img_img_dir) + new_img2img_img_dir.mkdir(parents=True, exist_ok=True) + + src_dir = org_img2img_img_dir + dst_dir = new_img2img_img_dir + if src_dir.is_dir(): + dst_dir.mkdir(parents=True, exist_ok=True) + + frame_length = model_config.stylize_config["0"]["length"] + + src_imgs = sorted(glob.glob( os.path.join(src_dir, "[0-9]*.png"), recursive=False)) + for img in src_imgs: + n = int(Path(img).stem) + if n in range(frame_offset, frame_offset + frame_length): + dst_img_path = dst_dir.joinpath( f"{n-frame_offset:08d}.png" ) + shutil.copy(img, dst_img_path) + + new_prompt_map = {} + for p in model_config.prompt_map: + n = int(p) + if n in range(frame_offset, frame_offset + frame_length): + new_prompt_map[str(n-frame_offset)]=model_config.prompt_map[p] + + model_config.prompt_map = new_prompt_map + + model_config.controlnet_map["input_image_dir"] = os.path.relpath(new_controlnet_img_dir.absolute(), data_dir) + model_config.img2img_map["init_img_dir"] = os.path.relpath(new_img2img_img_dir.absolute(), data_dir) + + tmp_config_path = stylize_dir.joinpath("prompt_tmp.json") + tmp_config_path.write_text(model_config.json(indent=4), encoding="utf-8") + config_org = tmp_config_path + + + output_0_dir = generate( + config_path=config_org, + width=model_config.stylize_config["0"]["width"], + height=model_config.stylize_config["0"]["height"], + length=model_config.stylize_config["0"]["length"], + context=model_config.stylize_config["0"]["context"], + overlap=model_config.stylize_config["0"]["overlap"], + stride=model_config.stylize_config["0"]["stride"], + out_dir=stylize_dir + ) + + torch.cuda.empty_cache() + + output_0_dir = output_0_dir.rename(output_0_dir.parent / f"{time_str}_{0:02d}") + + + if "1" not in model_config.stylize_config: + logger.info(f"Stylized results are output to {output_0_dir}") + return + + logger.info(f"Intermediate files have been output to {output_0_dir}") + + output_0_img_dir = glob.glob( os.path.join(output_0_dir, "00-[0-9]*"), recursive=False)[0] + + interpolation_multiplier = 1 + if "interpolation_multiplier" in model_config.stylize_config["1"]: + interpolation_multiplier = model_config.stylize_config["1"]["interpolation_multiplier"] + + if interpolation_multiplier > 1: + from animatediff.rife.rife import rife_interpolate + + rife_img_dir = stylize_dir.joinpath(f"{1:02d}_rife_frame") + if rife_img_dir.is_dir(): + shutil.rmtree(rife_img_dir) + rife_img_dir.mkdir(parents=True, exist_ok=True) + + rife_interpolate(output_0_img_dir, rife_img_dir, interpolation_multiplier) + model_config.stylize_config["1"]["length"] *= interpolation_multiplier + + if model_config.output: + model_config.output["fps"] *= interpolation_multiplier + if model_config.prompt_map: + model_config.prompt_map = { str(int(i)*interpolation_multiplier): model_config.prompt_map[i] for i in model_config.prompt_map } + + output_0_img_dir = rife_img_dir + + + controlnet_img_dir = stylize_dir.joinpath("01_controlnet_image") + img2img_dir = stylize_dir.joinpath("01_img2img") + img2img_dir.mkdir(parents=True, exist_ok=True) + + create_controlnet_dir(controlnet_img_dir) + + ip2p_for_upscale = model_config.stylize_config["1"]["controlnet_ip2p"]["enable"] + ip_adapter_for_upscale = model_config.stylize_config["1"]["ip_adapter"] + ref_for_upscale = model_config.stylize_config["1"]["reference"] + + shutil.copytree(output_0_img_dir, controlnet_img_dir.joinpath("controlnet_tile"), dirs_exist_ok=True) + if ip2p_for_upscale: + shutil.copytree(controlnet_img_dir.joinpath("controlnet_tile"), controlnet_img_dir.joinpath("controlnet_ip2p"), dirs_exist_ok=True) + + shutil.copytree(controlnet_img_dir.joinpath("controlnet_tile"), img2img_dir, dirs_exist_ok=True) + + model_config.controlnet_map["input_image_dir"] = os.path.relpath(controlnet_img_dir.absolute(), data_dir) + + model_config.controlnet_map["controlnet_tile"] = model_config.stylize_config["1"]["controlnet_tile"] + model_config.controlnet_map["controlnet_ip2p"] = model_config.stylize_config["1"]["controlnet_ip2p"] + + if "controlnet_ref" in model_config.controlnet_map: + model_config.controlnet_map["controlnet_ref"]["enable"] = ref_for_upscale + + model_config.ip_adapter_map["enable"] = ip_adapter_for_upscale + for r in model_config.region_map: + reg = model_config.region_map[r] + if "condition" in reg: + if "ip_adapter_map" in reg["condition"]: + reg["condition"]["ip_adapter_map"]["enable"] = ip_adapter_for_upscale + + model_config.steps = model_config.stylize_config["1"]["steps"] if "steps" in model_config.stylize_config["1"] else model_config.steps + model_config.guidance_scale = model_config.stylize_config["1"]["guidance_scale"] if "guidance_scale" in model_config.stylize_config["1"] else model_config.guidance_scale + + model_config.img2img_map["enable"] = model_config.stylize_config["1"]["img2img"] + + if model_config.img2img_map["enable"]: + model_config.img2img_map["init_img_dir"] = os.path.relpath(Path(output_0_img_dir).absolute(), data_dir) + + save_config_path = stylize_dir.joinpath("prompt_01.json") + save_config_path.write_text(model_config.json(indent=4), encoding="utf-8") + + output_1_dir = generate( + config_path=save_config_path, + width=model_config.stylize_config["1"]["width"], + height=model_config.stylize_config["1"]["height"], + length=model_config.stylize_config["1"]["length"], + context=model_config.stylize_config["1"]["context"], + overlap=model_config.stylize_config["1"]["overlap"], + stride=model_config.stylize_config["1"]["stride"], + out_dir=stylize_dir + ) + + output_1_dir = output_1_dir.rename(output_1_dir.parent / f"{time_str}_{1:02d}") + + logger.info(f"Stylized results are output to {output_1_dir}") + + + + +@stylize.command(no_args_is_help=True) +def interpolate( + frame_dir: Annotated[ + Path, + typer.Argument(path_type=Path, file_okay=False, dir_okay=True, exists=True, help="Path to frame dir"), + ] = ..., + interpolation_multiplier: Annotated[ + int, + typer.Option( + "--interpolation_multiplier", + "-m", + min=1, + max=10, + help="interpolation_multiplier", + ), + ] = 1, +): + """Interpolation with original frames. This function does not work well if the shape of the subject is changed from the original video. Large movements can also ruin the picture.(Since this command is experimental, it is better to use other interpolation methods in most cases.)""" + + try: + import cupy + except: + logger.info(f"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!") + logger.info(f"cupy is required to run interpolate") + logger.info(f"Your CUDA version is {torch.version.cuda}") + logger.info(f"Please find the installation method of cupy for your CUDA version from the following URL") + logger.info(f"https://docs.cupy.dev/en/latest/install.html#installing-cupy-from-pypi") + logger.info(f"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!") + return + + prepare_softsplat() + + time_str = datetime.now().strftime("%Y-%m-%dT%H-%M-%S") + + config_org = frame_dir.parent.joinpath("prompt.json") + + model_config: ModelConfig = get_model_config(config_org) + + if "original_video" in model_config.stylize_config: + org_video = Path(model_config.stylize_config["original_video"]["path"]) + offset = model_config.stylize_config["original_video"]["offset"] + aspect_ratio = model_config.stylize_config["original_video"]["aspect_ratio"] + else: + logger.warn('!!! The following parameters are required !!!') + logger.warn('"stylize_config": {') + logger.warn(' "original_video": {') + logger.warn(' "path": "C:\\my_movie\\test.mp4",') + logger.warn(' "aspect_ratio": 0.6666,') + logger.warn(' "offset": 0') + logger.warn(' },') + raise ValueError('model_config.stylize_config["original_video"] not found') + + + save_dir = frame_dir.parent.joinpath(f"optflow_{time_str}") + + org_frame_dir = save_dir.joinpath("org_frame") + org_frame_dir.mkdir(parents=True, exist_ok=True) + + stylize_frame = sorted(glob.glob( os.path.join(frame_dir, "[0-9]*.png"), recursive=False)) + stylize_frame_num = len(stylize_frame) + + duration = int(stylize_frame_num / model_config.output["fps"]) + 1 + + extract_frames(org_video, model_config.output["fps"] * interpolation_multiplier, org_frame_dir,aspect_ratio,duration,offset) + + W, H = Image.open(stylize_frame[0]).size + + org_frame = sorted(glob.glob( os.path.join(org_frame_dir, "[0-9]*.png"), recursive=False)) + + for org in tqdm(org_frame): + img = get_resized_image(org, W, H) + img.save(org) + + output_dir = save_dir.joinpath("warp_img") + output_dir.mkdir(parents=True, exist_ok=True) + + from animatediff.softmax_splatting.run import estimate2 + + for sty1,sty2 in tqdm(zip(stylize_frame,stylize_frame[1:]), total=len(stylize_frame[1:])): + sty1 = Path(sty1) + sty2 = Path(sty2) + + head = int(sty1.stem) + + sty1_img = Image.open(sty1) + sty2_img = Image.open(sty2) + + guide_frames=[org_frame_dir.joinpath(f"{g:08d}.png") for g in range(head*interpolation_multiplier, (head+1)*interpolation_multiplier)] + + guide_frames=[Image.open(g) for g in guide_frames] + + result = estimate2(sty1_img, sty2_img, guide_frames, "data/models/softsplat/softsplat-lf") + + shutil.copy( frame_dir.joinpath(f"{head:08d}.png"), output_dir.joinpath(f"{head*interpolation_multiplier:08d}.png")) + + offset = head*interpolation_multiplier + 1 + for i, r in enumerate(result): + r.save( output_dir.joinpath(f"{offset+i:08d}.png") ) + + + from animatediff.generate import save_output + + + frames = sorted(glob.glob( os.path.join(output_dir, "[0-9]*.png"), recursive=False)) + out_images = [] + for f in frames: + out_images.append(Image.open(f)) + + model_config.output["fps"] *= interpolation_multiplier + + out_file = save_dir.joinpath(f"01_{model_config.output['fps']}fps") + save_output(out_images,output_dir,out_file,model_config.output,True,save_frames=None,save_video=None) + + out_file = save_dir.joinpath(f"00_original") + save_output(out_images,org_frame_dir,out_file,model_config.output,True,save_frames=None,save_video=None) + + +@stylize.command(no_args_is_help=True) +def create_mask( + stylize_dir: Annotated[ + Path, + typer.Argument(path_type=Path, file_okay=False, dir_okay=True, exists=True, help="Path to stylize dir"), + ] = ..., + frame_dir: Annotated[ + Path, + typer.Option( + "--frame_dir", + "-f", + path_type=Path, + file_okay=False, + help="Path to source frames directory. default is 'STYLIZE_DIR/00_img2img'", + ), + ] = None, + box_threshold: Annotated[ + float, + typer.Option( + "--box_threshold", + "-b", + min=0.0, + max=1.0, + help="box_threshold", + rich_help_panel="create mask", + ), + ] = 0.3, + text_threshold: Annotated[ + float, + typer.Option( + "--text_threshold", + "-t", + min=0.0, + max=1.0, + help="text_threshold", + rich_help_panel="create mask", + ), + ] = 0.25, + mask_padding: Annotated[ + int, + typer.Option( + "--mask_padding", + "-mp", + min=-100, + max=100, + help="padding pixel value", + rich_help_panel="create mask", + ), + ] = 0, + no_gb: Annotated[ + bool, + typer.Option( + "--no_gb", + "-ng", + is_flag=True, + help="no green back", + rich_help_panel="create mask", + ), + ] = False, + no_crop: Annotated[ + bool, + typer.Option( + "--no_crop", + "-nc", + is_flag=True, + help="no crop", + rich_help_panel="create mask", + ), + ] = False, + use_rembg: Annotated[ + bool, + typer.Option( + "--use_rembg", + "-rem", + is_flag=True, + help="use [rembg] instead of [Sam+GroundingDINO]", + rich_help_panel="create mask", + ), + ] = False, + use_animeseg: Annotated[ + bool, + typer.Option( + "--use_animeseg", + "-anim", + is_flag=True, + help="use [anime-segmentation] instead of [Sam+GroundingDINO]", + rich_help_panel="create mask", + ), + ] = False, + low_vram: Annotated[ + bool, + typer.Option( + "--low_vram", + "-lo", + is_flag=True, + help="low vram mode", + rich_help_panel="create mask/tag", + ), + ] = False, + ignore_list: Annotated[ + Path, + typer.Option( + "--ignore-list", + "-g", + path_type=Path, + dir_okay=False, + exists=True, + help="path to ignore token list file", + rich_help_panel="create tag", + ), + ] = Path("config/prompts/ignore_tokens.txt"), + predicte_interval: Annotated[ + int, + typer.Option( + "--predicte-interval", + "-p", + min=1, + max=120, + help="Interval of frames to be predicted", + rich_help_panel="create tag", + ), + ] = 1, + general_threshold: Annotated[ + float, + typer.Option( + "--threshold", + "-th", + min=0.0, + max=1.0, + help="threshold for general token confidence", + rich_help_panel="create tag", + ), + ] = 0.35, + character_threshold: Annotated[ + float, + typer.Option( + "--threshold2", + "-th2", + min=0.0, + max=1.0, + help="threshold for character token confidence", + rich_help_panel="create tag", + ), + ] = 0.85, + without_confidence: Annotated[ + bool, + typer.Option( + "--no-confidence-format", + "-ncf", + is_flag=True, + help="confidence token format or not. ex. '(close-up:0.57), (monochrome:1.1)' -> 'close-up, monochrome'", + rich_help_panel="create tag", + ), + ] = False, + is_no_danbooru_format: Annotated[ + bool, + typer.Option( + "--no-danbooru-format", + "-ndf", + is_flag=True, + help="danbooru token format or not. ex. 'bandaid_on_leg, short_hair' -> 'bandaid on leg, short hair'", + rich_help_panel="create tag", + ), + ] = False, +): + """Create mask from prompt""" + from animatediff.utils.mask import (create_bg, create_fg, crop_frames, + crop_mask_list, save_crop_info) + from animatediff.utils.mask_animseg import animseg_create_fg + from animatediff.utils.mask_rembg import rembg_create_fg + + is_danbooru_format = not is_no_danbooru_format + with_confidence = not without_confidence + + if use_animeseg and use_rembg: + raise ValueError("use_animeseg and use_rembg cannot be enabled at the same time") + + prepare_sam_hq(low_vram) + prepare_groundingDINO() + prepare_propainter() + + if use_animeseg: + prepare_anime_seg() + + time_str = datetime.now().strftime("%Y-%m-%dT%H-%M-%S") + + config_org = stylize_dir.joinpath("prompt.json") + + model_config: ModelConfig = get_model_config(config_org) + + if frame_dir is None: + frame_dir = stylize_dir / "00_img2img" + + if not frame_dir.is_dir(): + raise ValueError(f'{frame_dir=} does not exist.') + + is_img2img = model_config.img2img_map["enable"] if "enable" in model_config.img2img_map else False + + + create_mask_list = [] + if "create_mask" in model_config.stylize_config: + create_mask_list = model_config.stylize_config["create_mask"] + else: + raise ValueError('model_config.stylize_config["create_mask"] not found') + + output_list = [] + + stylize_frame = sorted(glob.glob( os.path.join(frame_dir, "[0-9]*.png"), recursive=False)) + frame_len = len(stylize_frame) + + W, H = Image.open(stylize_frame[0]).size + org_frame_size = (H,W) + + masked_area = [None for f in range(frame_len)] + + if use_rembg: + create_mask_list = ["rembg"] + elif use_animeseg: + create_mask_list = ["anime-segmentation"] + + + for i,mask_token in enumerate(create_mask_list): + fg_dir = stylize_dir.joinpath(f"fg_{i:02d}_{time_str}") + fg_dir.mkdir(parents=True, exist_ok=True) + + create_controlnet_dir( fg_dir / "00_controlnet_image" ) + + fg_masked_dir = fg_dir / "00_img2img" + fg_masked_dir.mkdir(parents=True, exist_ok=True) + + fg_mask_dir = fg_dir / "00_mask" + fg_mask_dir.mkdir(parents=True, exist_ok=True) + + if use_animeseg: + masked_area = animseg_create_fg( + frame_dir=frame_dir, + output_dir=fg_masked_dir, + output_mask_dir=fg_mask_dir, + masked_area_list=masked_area, + mask_padding=mask_padding, + bg_color=None if no_gb else (0,255,0), + ) + elif use_rembg: + masked_area = rembg_create_fg( + frame_dir=frame_dir, + output_dir=fg_masked_dir, + output_mask_dir=fg_mask_dir, + masked_area_list=masked_area, + mask_padding=mask_padding, + bg_color=None if no_gb else (0,255,0), + ) + else: + masked_area = create_fg( + mask_token=mask_token, + frame_dir=frame_dir, + output_dir=fg_masked_dir, + output_mask_dir=fg_mask_dir, + masked_area_list=masked_area, + box_threshold=box_threshold, + text_threshold=text_threshold, + mask_padding=mask_padding, + sam_checkpoint= "data/models/SAM/sam_hq_vit_h.pth" if not low_vram else "data/models/SAM/sam_hq_vit_b.pth", + bg_color=None if no_gb else (0,255,0), + ) + + if not no_crop: + frame_size_hw = (masked_area[0].shape[1],masked_area[0].shape[2]) + cropped_mask_list, mask_pos_list, crop_size_hw = crop_mask_list(masked_area) + + logger.info(f"crop fg_masked_dir") + crop_frames(mask_pos_list, crop_size_hw, fg_masked_dir) + logger.info(f"crop fg_mask_dir") + crop_frames(mask_pos_list, crop_size_hw, fg_mask_dir) + save_crop_info(mask_pos_list, crop_size_hw, frame_size_hw, fg_dir / "crop_info.json") + else: + crop_size_hw = None + + logger.info(f"mask from [{mask_token}] are output to {fg_dir}") + + shutil.copytree(fg_masked_dir, fg_dir / "00_controlnet_image/controlnet_openpose", dirs_exist_ok=True) + + #shutil.copytree(fg_masked_dir, fg_dir / "00_controlnet_image/controlnet_ip2p", dirs_exist_ok=True) + + if crop_size_hw: + if crop_size_hw[0] == 0 or crop_size_hw[1] == 0: + crop_size_hw = None + + output_list.append((fg_dir, crop_size_hw)) + + torch.cuda.empty_cache() + + bg_dir = stylize_dir.joinpath(f"bg_{time_str}") + bg_dir.mkdir(parents=True, exist_ok=True) + create_controlnet_dir( bg_dir / "00_controlnet_image" ) + bg_inpaint_dir = bg_dir / "00_img2img" + bg_inpaint_dir.mkdir(parents=True, exist_ok=True) + + + create_bg(frame_dir, bg_inpaint_dir, masked_area, + use_half = True, + raft_iter = 20, + subvideo_length=80 if not low_vram else 50, + neighbor_length=10 if not low_vram else 8, + ref_stride=10 if not low_vram else 8, + low_vram = low_vram, + ) + + logger.info(f"background are output to {bg_dir}") + + shutil.copytree(bg_inpaint_dir, bg_dir / "00_controlnet_image/controlnet_tile", dirs_exist_ok=True) + + shutil.copytree(bg_inpaint_dir, bg_dir / "00_controlnet_image/controlnet_ip2p", dirs_exist_ok=True) + + output_list.append((bg_dir,None)) + + torch.cuda.empty_cache() + + black_list = [] + if ignore_list.is_file(): + with open(ignore_list) as f: + black_list = [s.strip() for s in f.readlines()] + + for output, size in output_list: + + model_config.prompt_map = get_labels( + frame_dir= output / "00_img2img", + interval=predicte_interval, + general_threshold=general_threshold, + character_threshold=character_threshold, + ignore_tokens=black_list, + with_confidence=with_confidence, + is_danbooru_format=is_danbooru_format, + is_cpu = False, + ) + + model_config.controlnet_map["input_image_dir"] = os.path.relpath((output / "00_controlnet_image" ).absolute(), data_dir) + model_config.img2img_map["init_img_dir"] = os.path.relpath((output / "00_img2img" ).absolute(), data_dir) + + if size is not None: + h, w = size + height = 1024 * (h/(h+w)) + width = 1024 * (w/(h+w)) + height = int(height//8 * 8) + width = int(width//8 * 8) + + model_config.stylize_config["0"]["width"]=width + model_config.stylize_config["0"]["height"]=height + if "1" in model_config.stylize_config: + model_config.stylize_config["1"]["width"]=int(width * 1.25 //8*8) + model_config.stylize_config["1"]["height"]=int(height * 1.25 //8*8) + else: + height, width = org_frame_size + model_config.stylize_config["0"]["width"]=width + model_config.stylize_config["0"]["height"]=height + if "1" in model_config.stylize_config: + model_config.stylize_config["1"]["width"]=int(width * 1.25 //8*8) + model_config.stylize_config["1"]["height"]=int(height * 1.25 //8*8) + + + + save_config_path = output.joinpath("prompt.json") + save_config_path.write_text(model_config.json(indent=4), encoding="utf-8") + + + + +@stylize.command(no_args_is_help=True) +def composite( + stylize_dir: Annotated[ + Path, + typer.Argument(path_type=Path, file_okay=False, dir_okay=True, exists=True, help="Path to stylize dir"), + ] = ..., + box_threshold: Annotated[ + float, + typer.Option( + "--box_threshold", + "-b", + min=0.0, + max=1.0, + help="box_threshold", + rich_help_panel="create mask", + ), + ] = 0.3, + text_threshold: Annotated[ + float, + typer.Option( + "--text_threshold", + "-t", + min=0.0, + max=1.0, + help="text_threshold", + rich_help_panel="create mask", + ), + ] = 0.25, + mask_padding: Annotated[ + int, + typer.Option( + "--mask_padding", + "-mp", + min=-100, + max=100, + help="padding pixel value", + rich_help_panel="create mask", + ), + ] = 0, + use_rembg: Annotated[ + bool, + typer.Option( + "--use_rembg", + "-rem", + is_flag=True, + help="use \[rembg] instead of \[Sam+GroundingDINO]", + rich_help_panel="create mask", + ), + ] = False, + use_animeseg: Annotated[ + bool, + typer.Option( + "--use_animeseg", + "-anim", + is_flag=True, + help="use \[anime-segmentation] instead of \[Sam+GroundingDINO]", + rich_help_panel="create mask", + ), + ] = False, + low_vram: Annotated[ + bool, + typer.Option( + "--low_vram", + "-lo", + is_flag=True, + help="low vram mode", + rich_help_panel="create mask/tag", + ), + ] = False, + is_simple_composite: Annotated[ + bool, + typer.Option( + "--simple_composite", + "-si", + is_flag=True, + help="simple composite", + rich_help_panel="composite", + ), + ] = False, +): + """composite FG and BG""" + + from animatediff.utils.composite import composite, simple_composite + from animatediff.utils.mask import (create_fg, load_frame_list, + load_mask_list, restore_position) + from animatediff.utils.mask_animseg import animseg_create_fg + from animatediff.utils.mask_rembg import rembg_create_fg + + if use_animeseg and use_rembg: + raise ValueError("use_animeseg and use_rembg cannot be enabled at the same time") + + prepare_sam_hq(low_vram) + if use_animeseg: + prepare_anime_seg() + + time_str = datetime.now().strftime("%Y-%m-%dT%H-%M-%S") + + config_org = stylize_dir.joinpath("prompt.json") + + model_config: ModelConfig = get_model_config(config_org) + + + composite_config = {} + if "composite" in model_config.stylize_config: + composite_config = model_config.stylize_config["composite"] + else: + raise ValueError('model_config.stylize_config["composite"] not found') + + save_dir = stylize_dir.joinpath(f"cp_{time_str}") + save_dir.mkdir(parents=True, exist_ok=True) + + save_config_path = save_dir.joinpath("prompt.json") + save_config_path.write_text(model_config.json(indent=4), encoding="utf-8") + + + bg_dir = composite_config["bg_frame_dir"] + bg_dir = Path(bg_dir) + if not bg_dir.is_dir(): + raise ValueError('model_config.stylize_config["composite"]["bg_frame_dir"] not valid') + + frame_len = len(sorted(glob.glob( os.path.join(bg_dir, "[0-9]*.png"), recursive=False))) + + fg_list = composite_config["fg_list"] + + for i, fg_param in enumerate(fg_list): + mask_token = fg_param["mask_prompt"] + frame_dir = Path(fg_param["path"]) + if not frame_dir.is_dir(): + logger.warn(f"{frame_dir=} not valid -> skip") + continue + + mask_dir = Path(fg_param["mask_path"]) + if not mask_dir.is_dir(): + logger.info(f"{mask_dir=} not valid -> create mask") + + fg_tmp_dir = save_dir.joinpath(f"fg_{i:02d}_{time_str}") + fg_tmp_dir.mkdir(parents=True, exist_ok=True) + + masked_area_list = [None for f in range(frame_len)] + + if use_animeseg: + mask_list = animseg_create_fg( + frame_dir=frame_dir, + output_dir=fg_tmp_dir, + output_mask_dir=None, + masked_area_list=masked_area_list, + mask_padding=mask_padding, + ) + elif use_rembg: + mask_list = rembg_create_fg( + frame_dir=frame_dir, + output_dir=fg_tmp_dir, + output_mask_dir=None, + masked_area_list=masked_area_list, + mask_padding=mask_padding, + ) + else: + mask_list = create_fg( + mask_token=mask_token, + frame_dir=frame_dir, + output_dir=fg_tmp_dir, + output_mask_dir=None, + masked_area_list=masked_area_list, + box_threshold=box_threshold, + text_threshold=text_threshold, + mask_padding=mask_padding, + sam_checkpoint= "data/models/SAM/sam_hq_vit_h.pth" if not low_vram else "data/models/SAM/sam_hq_vit_b.pth", + ) + + else: + logger.info(f"use {mask_dir=} as mask") + + masked_area_list = [None for f in range(frame_len)] + + mask_list = load_mask_list(mask_dir, masked_area_list, mask_padding) + + mask_list = [ m.transpose([1,2,0]) if m is not None else m for m in mask_list] + + crop_info_path = frame_dir.parent.parent / "crop_info.json" + crop_info={} + if crop_info_path.is_file(): + with open(crop_info_path, mode="rt", encoding="utf-8") as f: + crop_info = json.load(f) + mask_list = restore_position(mask_list, crop_info) + + + fg_list = [None for f in range(frame_len)] + fg_list = load_frame_list(frame_dir, fg_list, crop_info) + + output_dir = save_dir.joinpath(f"bg_{i:02d}_{time_str}") + output_dir.mkdir(parents=True, exist_ok=True) + + if is_simple_composite: + simple_composite(bg_dir, fg_list, output_dir, mask_list) + else: + composite(bg_dir, fg_list, output_dir, mask_list) + + bg_dir = output_dir + + + from animatediff.generate import save_output + + frames = sorted(glob.glob( os.path.join(bg_dir, "[0-9]*.png"), recursive=False)) + out_images = [] + for f in frames: + out_images.append(Image.open(f)) + + out_file = save_dir.joinpath(f"composite") + save_output(out_images,bg_dir,out_file,model_config.output,True,save_frames=None,save_video=None) + + logger.info(f"output to {out_file}") + + + + +@stylize.command(no_args_is_help=True) +def create_region( + stylize_dir: Annotated[ + Path, + typer.Argument(path_type=Path, file_okay=False, dir_okay=True, exists=True, help="Path to stylize dir"), + ] = ..., + frame_dir: Annotated[ + Path, + typer.Option( + "--frame_dir", + "-f", + path_type=Path, + file_okay=False, + help="Path to source frames directory. default is 'STYLIZE_DIR/00_img2img'", + ), + ] = None, + box_threshold: Annotated[ + float, + typer.Option( + "--box_threshold", + "-b", + min=0.0, + max=1.0, + help="box_threshold", + rich_help_panel="create mask", + ), + ] = 0.3, + text_threshold: Annotated[ + float, + typer.Option( + "--text_threshold", + "-t", + min=0.0, + max=1.0, + help="text_threshold", + rich_help_panel="create mask", + ), + ] = 0.25, + mask_padding: Annotated[ + int, + typer.Option( + "--mask_padding", + "-mp", + min=-100, + max=100, + help="padding pixel value", + rich_help_panel="create mask", + ), + ] = 0, + use_rembg: Annotated[ + bool, + typer.Option( + "--use_rembg", + "-rem", + is_flag=True, + help="use [rembg] instead of [Sam+GroundingDINO]", + rich_help_panel="create mask", + ), + ] = False, + use_animeseg: Annotated[ + bool, + typer.Option( + "--use_animeseg", + "-anim", + is_flag=True, + help="use [anime-segmentation] instead of [Sam+GroundingDINO]", + rich_help_panel="create mask", + ), + ] = False, + low_vram: Annotated[ + bool, + typer.Option( + "--low_vram", + "-lo", + is_flag=True, + help="low vram mode", + rich_help_panel="create mask/tag", + ), + ] = False, + ignore_list: Annotated[ + Path, + typer.Option( + "--ignore-list", + "-g", + path_type=Path, + dir_okay=False, + exists=True, + help="path to ignore token list file", + rich_help_panel="create tag", + ), + ] = Path("config/prompts/ignore_tokens.txt"), + predicte_interval: Annotated[ + int, + typer.Option( + "--predicte-interval", + "-p", + min=1, + max=120, + help="Interval of frames to be predicted", + rich_help_panel="create tag", + ), + ] = 1, + general_threshold: Annotated[ + float, + typer.Option( + "--threshold", + "-th", + min=0.0, + max=1.0, + help="threshold for general token confidence", + rich_help_panel="create tag", + ), + ] = 0.35, + character_threshold: Annotated[ + float, + typer.Option( + "--threshold2", + "-th2", + min=0.0, + max=1.0, + help="threshold for character token confidence", + rich_help_panel="create tag", + ), + ] = 0.85, + without_confidence: Annotated[ + bool, + typer.Option( + "--no-confidence-format", + "-ncf", + is_flag=True, + help="confidence token format or not. ex. '(close-up:0.57), (monochrome:1.1)' -> 'close-up, monochrome'", + rich_help_panel="create tag", + ), + ] = False, + is_no_danbooru_format: Annotated[ + bool, + typer.Option( + "--no-danbooru-format", + "-ndf", + is_flag=True, + help="danbooru token format or not. ex. 'bandaid_on_leg, short_hair' -> 'bandaid on leg, short hair'", + rich_help_panel="create tag", + ), + ] = False, +): + """Create region from prompt""" + from animatediff.utils.mask import create_bg, create_fg + from animatediff.utils.mask_animseg import animseg_create_fg + from animatediff.utils.mask_rembg import rembg_create_fg + + is_danbooru_format = not is_no_danbooru_format + with_confidence = not without_confidence + + if use_animeseg and use_rembg: + raise ValueError("use_animeseg and use_rembg cannot be enabled at the same time") + + prepare_sam_hq(low_vram) + prepare_groundingDINO() + prepare_propainter() + + if use_animeseg: + prepare_anime_seg() + + time_str = datetime.now().strftime("%Y-%m-%dT%H-%M-%S") + + config_org = stylize_dir.joinpath("prompt.json") + + model_config: ModelConfig = get_model_config(config_org) + + if frame_dir is None: + frame_dir = stylize_dir / "00_img2img" + + if not frame_dir.is_dir(): + raise ValueError(f'{frame_dir=} does not exist.') + + + create_mask_list = [] + if "create_mask" in model_config.stylize_config: + create_mask_list = model_config.stylize_config["create_mask"] + else: + raise ValueError('model_config.stylize_config["create_mask"] not found') + + output_list = [] + + stylize_frame = sorted(glob.glob( os.path.join(frame_dir, "[0-9]*.png"), recursive=False)) + frame_len = len(stylize_frame) + + masked_area = [None for f in range(frame_len)] + + if use_rembg: + create_mask_list = ["rembg"] + elif use_animeseg: + create_mask_list = ["anime-segmentation"] + + + for i,mask_token in enumerate(create_mask_list): + fg_dir = stylize_dir.joinpath(f"r_fg_{i:02d}_{time_str}") + fg_dir.mkdir(parents=True, exist_ok=True) + + fg_masked_dir = fg_dir / "00_tmp_masked" + fg_masked_dir.mkdir(parents=True, exist_ok=True) + + fg_mask_dir = fg_dir / "00_mask" + fg_mask_dir.mkdir(parents=True, exist_ok=True) + + if use_animeseg: + masked_area = animseg_create_fg( + frame_dir=frame_dir, + output_dir=fg_masked_dir, + output_mask_dir=fg_mask_dir, + masked_area_list=masked_area, + mask_padding=mask_padding, + bg_color=(0,255,0), + ) + elif use_rembg: + masked_area = rembg_create_fg( + frame_dir=frame_dir, + output_dir=fg_masked_dir, + output_mask_dir=fg_mask_dir, + masked_area_list=masked_area, + mask_padding=mask_padding, + bg_color=(0,255,0), + ) + else: + masked_area = create_fg( + mask_token=mask_token, + frame_dir=frame_dir, + output_dir=fg_masked_dir, + output_mask_dir=fg_mask_dir, + masked_area_list=masked_area, + box_threshold=box_threshold, + text_threshold=text_threshold, + mask_padding=mask_padding, + sam_checkpoint= "data/models/SAM/sam_hq_vit_h.pth" if not low_vram else "data/models/SAM/sam_hq_vit_b.pth", + bg_color=(0,255,0), + ) + + logger.info(f"mask from [{mask_token}] are output to {fg_dir}") + + output_list.append((fg_dir, fg_masked_dir, fg_mask_dir)) + + torch.cuda.empty_cache() + + bg_dir = stylize_dir.joinpath(f"r_bg_{time_str}") + bg_dir.mkdir(parents=True, exist_ok=True) + + bg_inpaint_dir = bg_dir / "00_tmp_inpainted" + bg_inpaint_dir.mkdir(parents=True, exist_ok=True) + + + create_bg(frame_dir, bg_inpaint_dir, masked_area, + use_half = True, + raft_iter = 20, + subvideo_length=80 if not low_vram else 50, + neighbor_length=10 if not low_vram else 8, + ref_stride=10 if not low_vram else 8, + low_vram = low_vram, + ) + + logger.info(f"background are output to {bg_dir}") + + + output_list.append((bg_dir,bg_inpaint_dir,None)) + + torch.cuda.empty_cache() + + black_list = [] + if ignore_list.is_file(): + with open(ignore_list) as f: + black_list = [s.strip() for s in f.readlines()] + + black_list.append("simple_background") + black_list.append("green_background") + + region_map = {} + + for i, (output_root, masked_dir, mask_dir) in enumerate(output_list): + + prompt_map = get_labels( + frame_dir= masked_dir, + interval=predicte_interval, + general_threshold=general_threshold, + character_threshold=character_threshold, + ignore_tokens=black_list, + with_confidence=with_confidence, + is_danbooru_format=is_danbooru_format, + is_cpu = False, + ) + + if mask_dir: + + ipadapter_dir = output_root / "00_ipadapter" + ipadapter_dir.mkdir(parents=True, exist_ok=True) + + region_map[str(i)]={ + "enable": True, + "crop_generation_rate": 0.0, + "mask_dir" : os.path.relpath(mask_dir.absolute(), data_dir), + "save_mask": True, + "is_init_img" : False, + "condition" : { + "prompt_fixed_ratio": 0.5, + "head_prompt": "", + "prompt_map": prompt_map, + "tail_prompt": "", + "ip_adapter_map": { + "enable": True, + "input_image_dir": os.path.relpath(ipadapter_dir.absolute(), data_dir), + "prompt_fixed_ratio": 0.5, + "save_input_image": True, + "resized_to_square": False + } + } + } + else: + region_map["background"]={ + "is_init_img" : False, + "hint" : "background's condition refers to the one in root" + } + + model_config.prompt_map = prompt_map + + + model_config.region_map =region_map + + + config_org.write_text(model_config.json(indent=4), encoding="utf-8") + + diff --git a/src/animatediff/utils/__init__.py b/src/animatediff/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/animatediff/utils/civitai2config.py b/src/animatediff/utils/civitai2config.py new file mode 100644 index 0000000000000000000000000000000000000000..c1ee6a4a84a2068478c4654858856e2fa2339c08 --- /dev/null +++ b/src/animatediff/utils/civitai2config.py @@ -0,0 +1,122 @@ +import glob +import json +import logging +import os +import re +import shutil +from pathlib import Path + +from animatediff import get_dir + +logger = logging.getLogger(__name__) + +data_dir = get_dir("data") + +extra_loading_regex = r'(<[^>]+?>)' + +def generate_config_from_civitai_info( + lora_dir:Path, + config_org:Path, + out_dir:Path, + lora_weight:float, +): + lora_abs_dir = lora_dir.absolute() + config_org = config_org.absolute() + out_dir = out_dir.absolute() + + civitais = sorted(glob.glob( os.path.join(lora_abs_dir, "*.civitai.info"), recursive=False)) + + with open(config_org, "r") as cf: + org_config = json.load(cf) + + for civ in civitais: + + logger.info(f"convert {civ}") + + with open(civ, "r") as f: + # trim .civitai.info + name = os.path.splitext(os.path.splitext(os.path.basename(civ))[0])[0] + + output_path = out_dir.joinpath(name + ".json") + + if os.path.isfile(output_path): + logger.info("already converted -> skip") + continue + + if os.path.isfile( lora_abs_dir.joinpath(name + ".safetensors")): + lora_path = os.path.relpath(lora_abs_dir.joinpath(name + ".safetensors"), data_dir) + elif os.path.isfile( lora_abs_dir.joinpath(name + ".ckpt")): + lora_path = os.path.relpath(lora_abs_dir.joinpath(name + ".ckpt"), data_dir) + else: + logger.info("lora file not found -> skip") + continue + + info = json.load(f) + + if not info: + logger.info(f"empty civitai info -> skip") + continue + + if info["model"]["type"] not in ("LORA","lora"): + logger.info(f"unsupported type {info['model']['type']} -> skip") + continue + + new_config = org_config.copy() + + new_config["name"] = name + + new_prompt_map = {} + new_n_prompt = "" + new_seed = -1 + + + raw_prompt_map = {} + + i = 0 + for img_info in info["images"]: + if img_info["meta"]: + try: + raw_prompt = img_info["meta"]["prompt"] + except Exception as e: + logger.info("missing prompt") + continue + + raw_prompt_map[str(10000 + i*32)] = raw_prompt + + new_prompt_map[str(i*32)] = re.sub(extra_loading_regex, '', raw_prompt) + + if not new_n_prompt: + try: + new_n_prompt = img_info["meta"]["negativePrompt"] + except Exception as e: + new_n_prompt = "" + if new_seed == -1: + try: + new_seed = img_info["meta"]["seed"] + except Exception as e: + new_seed = -1 + + i += 1 + + if not new_prompt_map: + new_prompt_map[str(0)] = "" + + for k in raw_prompt_map: + # comment + new_prompt_map[k] = raw_prompt_map[k] + + new_config["prompt_map"] = new_prompt_map + new_config["n_prompt"] = [new_n_prompt] + new_config["seed"] = [new_seed] + + new_config["lora_map"] = {lora_path.replace(os.sep,'/'):lora_weight} + + with open( out_dir.joinpath(name + ".json"), 'w') as wf: + json.dump(new_config, wf, indent=4) + logger.info("converted!") + + preview = lora_abs_dir.joinpath(name + ".preview.png") + if preview.is_file(): + shutil.copy(preview, out_dir.joinpath(name + ".preview.png")) + + diff --git a/src/animatediff/utils/composite.py b/src/animatediff/utils/composite.py new file mode 100644 index 0000000000000000000000000000000000000000..191a8553dcc9e512ecd4e2e5e5740e6622634475 --- /dev/null +++ b/src/animatediff/utils/composite.py @@ -0,0 +1,202 @@ +import glob +import logging +import os +import shutil +from pathlib import Path + +import cv2 +import numpy as np +import torch +import torch.nn.functional as F +from PIL import Image +from tqdm.rich import tqdm + +logger = logging.getLogger(__name__) + + +#https://github.com/jinwonkim93/laplacian-pyramid-blend +#https://blog.shikoan.com/pytorch-laplacian-pyramid/ +class LaplacianPyramidBlender: + + device = None + + def get_gaussian_kernel(self): + kernel = np.array([ + [1, 4, 6, 4, 1], + [4, 16, 24, 16, 4], + [6, 24, 36, 24, 6], + [4, 16, 24, 16, 4], + [1, 4, 6, 4, 1]], np.float32) / 256.0 + gaussian_k = torch.as_tensor(kernel.reshape(1, 1, 5, 5),device=self.device) + return gaussian_k + + def pyramid_down(self, image): + with torch.no_grad(): + gaussian_k = self.get_gaussian_kernel() + multiband = [F.conv2d(image[:, i:i + 1,:,:], gaussian_k, padding=2, stride=2) for i in range(3)] + down_image = torch.cat(multiband, dim=1) + return down_image + + def pyramid_up(self, image, size = None): + with torch.no_grad(): + gaussian_k = self.get_gaussian_kernel() + if size is None: + upsample = F.interpolate(image, scale_factor=2) + else: + upsample = F.interpolate(image, size=size) + multiband = [F.conv2d(upsample[:, i:i + 1,:,:], gaussian_k, padding=2) for i in range(3)] + up_image = torch.cat(multiband, dim=1) + return up_image + + def gaussian_pyramid(self, original, n_pyramids): + x = original + # pyramid down + pyramids = [original] + for i in range(n_pyramids): + x = self.pyramid_down(x) + pyramids.append(x) + return pyramids + + def laplacian_pyramid(self, original, n_pyramids): + pyramids = self.gaussian_pyramid(original, n_pyramids) + + # pyramid up - diff + laplacian = [] + for i in range(len(pyramids) - 1): + diff = pyramids[i] - self.pyramid_up(pyramids[i + 1], pyramids[i].shape[2:]) + laplacian.append(diff) + + laplacian.append(pyramids[-1]) + return laplacian + + def laplacian_pyramid_blending_with_mask(self, src, target, mask, num_levels = 9): + # assume mask is float32 [0,1] + + # generate Gaussian pyramid for src,target and mask + + Gsrc = torch.as_tensor(np.expand_dims(src, axis=0), device=self.device) + Gtarget = torch.as_tensor(np.expand_dims(target, axis=0), device=self.device) + Gmask = torch.as_tensor(np.expand_dims(mask, axis=0), device=self.device) + + lpA = self.laplacian_pyramid(Gsrc,num_levels)[::-1] + lpB = self.laplacian_pyramid(Gtarget,num_levels)[::-1] + gpMr = self.gaussian_pyramid(Gmask,num_levels)[::-1] + + # Now blend images according to mask in each level + LS = [] + for idx, (la,lb,Gmask) in enumerate(zip(lpA,lpB,gpMr)): + lo = lb * (1.0 - Gmask) + if idx <= 2: + lo += lb * Gmask + else: + lo += la * Gmask + LS.append(lo) + + # now reconstruct + ls_ = LS.pop(0) + for lap in LS: + ls_ = self.pyramid_up(ls_, lap.shape[2:]) + lap + + result = ls_.squeeze(dim=0).to('cpu').detach().numpy().copy() + + return result + + def __call__(self, + src_image: np.ndarray, + target_image: np.ndarray, + mask_image: np.ndarray, + device + ): + + self.device = device + + num_levels = int(np.log2(src_image.shape[0])) + #normalize image to 0, 1 + mask_image = np.clip(mask_image, 0, 1).transpose([2, 0, 1]) + + src_image = src_image.transpose([2, 0, 1]).astype(np.float32) / 255.0 + target_image = target_image.transpose([2, 0, 1]).astype(np.float32) / 255.0 + composite_image = self.laplacian_pyramid_blending_with_mask(src_image, target_image, mask_image, num_levels) + composite_image = np.clip(composite_image*255, 0 , 255).astype(np.uint8) + composite_image=composite_image.transpose([1, 2, 0]) + return composite_image + + +def composite(bg_dir, fg_list, output_dir, masked_area_list, device="cuda"): + bg_list = sorted(glob.glob( os.path.join(bg_dir ,"[0-9]*.png"), recursive=False)) + + blender = LaplacianPyramidBlender() + + for bg, fg_array, mask in tqdm(zip(bg_list, fg_list, masked_area_list),total=len(bg_list), desc="compositing"): + name = Path(bg).name + save_path = output_dir / name + + if fg_array is None: + logger.info(f"composite fg_array is None -> skip") + shutil.copy(bg, save_path) + continue + + if mask is None: + logger.info(f"mask is None -> skip") + shutil.copy(bg, save_path) + continue + + bg = np.asarray(Image.open(bg)).copy() + fg = fg_array + mask = np.concatenate([mask, mask, mask], 2) + + h, w, _ = bg.shape + + fg = cv2.resize(fg, dsize=(w,h)) + mask = cv2.resize(mask, dsize=(w,h)) + + + mask = mask.astype(np.float32) +# mask = mask * 255 + mask = cv2.GaussianBlur(mask, (15, 15), 0) + mask = mask / 255 + + fg = fg * mask + bg * (1-mask) + + img = blender(fg, bg, mask,device) + + + img = Image.fromarray(img) + img.save(save_path) + +def simple_composite(bg_dir, fg_list, output_dir, masked_area_list, device="cuda"): + bg_list = sorted(glob.glob( os.path.join(bg_dir ,"[0-9]*.png"), recursive=False)) + + for bg, fg_array, mask in tqdm(zip(bg_list, fg_list, masked_area_list),total=len(bg_list), desc="compositing"): + name = Path(bg).name + save_path = output_dir / name + + if fg_array is None: + logger.info(f"composite fg_array is None -> skip") + shutil.copy(bg, save_path) + continue + + if mask is None: + logger.info(f"mask is None -> skip") + shutil.copy(bg, save_path) + continue + + bg = np.asarray(Image.open(bg)).copy() + fg = fg_array + mask = np.concatenate([mask, mask, mask], 2) + + h, w, _ = bg.shape + + fg = cv2.resize(fg, dsize=(w,h)) + mask = cv2.resize(mask, dsize=(w,h)) + + + mask = mask.astype(np.float32) + mask = cv2.GaussianBlur(mask, (15, 15), 0) + mask = mask / 255 + + img = fg * mask + bg * (1-mask) + img = img.clip(0 , 255).astype(np.uint8) + + img = Image.fromarray(img) + img.save(save_path) \ No newline at end of file diff --git a/src/animatediff/utils/control_net_lllite.py b/src/animatediff/utils/control_net_lllite.py new file mode 100644 index 0000000000000000000000000000000000000000..1c83cd4d75b44a48ea8dfa7b20e7bf7fb95957cf --- /dev/null +++ b/src/animatediff/utils/control_net_lllite.py @@ -0,0 +1,526 @@ +# https://github.com/kohya-ss/sd-scripts/blob/main/networks/control_net_lllite.py + +import bisect +import os +from typing import Any, List, Mapping, Optional, Type + +import torch + +from animatediff.utils.util import show_bytes + +# input_blocksに適用するかどうか / if True, input_blocks are not applied +SKIP_INPUT_BLOCKS = False + +# output_blocksに適用するかどうか / if True, output_blocks are not applied +SKIP_OUTPUT_BLOCKS = True + +# conv2dに適用するかどうか / if True, conv2d are not applied +SKIP_CONV2D = False + +# transformer_blocksのみに適用するかどうか。Trueの場合、ResBlockには適用されない +# if True, only transformer_blocks are applied, and ResBlocks are not applied +TRANSFORMER_ONLY = True # if True, SKIP_CONV2D is ignored because conv2d is not used in transformer_blocks + +# Trueならattn1とattn2にのみ適用し、ffなどには適用しない / if True, apply only to attn1 and attn2, not to ff etc. +ATTN1_2_ONLY = True + +# Trueならattn1のQKV、attn2のQにのみ適用する、ATTN1_2_ONLY指定時のみ有効 / if True, apply only to attn1 QKV and attn2 Q, only valid when ATTN1_2_ONLY is specified +ATTN_QKV_ONLY = True + +# Trueならattn1やffなどにのみ適用し、attn2などには適用しない / if True, apply only to attn1 and ff, not to attn2 +# ATTN1_2_ONLYと同時にTrueにできない / cannot be True at the same time as ATTN1_2_ONLY +ATTN1_ETC_ONLY = False # True + +# transformer_blocksの最大インデックス。Noneなら全てのtransformer_blocksに適用 +# max index of transformer_blocks. if None, apply to all transformer_blocks +TRANSFORMER_MAX_BLOCK_INDEX = None + + +class LLLiteModule(torch.nn.Module): + def __init__(self, depth, cond_emb_dim, name, org_module, mlp_dim, dropout=None, multiplier=1.0): + super().__init__() + self.cond_cache ={} + + self.is_conv2d = org_module.__class__.__name__ == "Conv2d" or org_module.__class__.__name__ == "LoRACompatibleConv" + self.lllite_name = name + self.cond_emb_dim = cond_emb_dim + self.org_module = [org_module] + self.dropout = dropout + self.multiplier = multiplier + + if self.is_conv2d: + in_dim = org_module.in_channels + else: + in_dim = org_module.in_features + + # conditioning1はconditioning imageを embedding する。timestepごとに呼ばれない + # conditioning1 embeds conditioning image. it is not called for each timestep + modules = [] + modules.append(torch.nn.Conv2d(3, cond_emb_dim // 2, kernel_size=4, stride=4, padding=0)) # to latent (from VAE) size + if depth == 1: + modules.append(torch.nn.ReLU(inplace=True)) + modules.append(torch.nn.Conv2d(cond_emb_dim // 2, cond_emb_dim, kernel_size=2, stride=2, padding=0)) + elif depth == 2: + modules.append(torch.nn.ReLU(inplace=True)) + modules.append(torch.nn.Conv2d(cond_emb_dim // 2, cond_emb_dim, kernel_size=4, stride=4, padding=0)) + elif depth == 3: + # kernel size 8は大きすぎるので、4にする / kernel size 8 is too large, so set it to 4 + modules.append(torch.nn.ReLU(inplace=True)) + modules.append(torch.nn.Conv2d(cond_emb_dim // 2, cond_emb_dim // 2, kernel_size=4, stride=4, padding=0)) + modules.append(torch.nn.ReLU(inplace=True)) + modules.append(torch.nn.Conv2d(cond_emb_dim // 2, cond_emb_dim, kernel_size=2, stride=2, padding=0)) + + self.conditioning1 = torch.nn.Sequential(*modules) + + # downで入力の次元数を削減する。LoRAにヒントを得ていることにする + # midでconditioning image embeddingと入力を結合する + # upで元の次元数に戻す + # これらはtimestepごとに呼ばれる + # reduce the number of input dimensions with down. inspired by LoRA + # combine conditioning image embedding and input with mid + # restore to the original dimension with up + # these are called for each timestep + + if self.is_conv2d: + self.down = torch.nn.Sequential( + torch.nn.Conv2d(in_dim, mlp_dim, kernel_size=1, stride=1, padding=0), + torch.nn.ReLU(inplace=True), + ) + self.mid = torch.nn.Sequential( + torch.nn.Conv2d(mlp_dim + cond_emb_dim, mlp_dim, kernel_size=1, stride=1, padding=0), + torch.nn.ReLU(inplace=True), + ) + self.up = torch.nn.Sequential( + torch.nn.Conv2d(mlp_dim, in_dim, kernel_size=1, stride=1, padding=0), + ) + else: + # midの前にconditioningをreshapeすること / reshape conditioning before mid + self.down = torch.nn.Sequential( + torch.nn.Linear(in_dim, mlp_dim), + torch.nn.ReLU(inplace=True), + ) + self.mid = torch.nn.Sequential( + torch.nn.Linear(mlp_dim + cond_emb_dim, mlp_dim), + torch.nn.ReLU(inplace=True), + ) + self.up = torch.nn.Sequential( + torch.nn.Linear(mlp_dim, in_dim), + ) + + # Zero-Convにする / set to Zero-Conv + torch.nn.init.zeros_(self.up[0].weight) # zero conv + + self.depth = depth # 1~3 + self.cond_emb = None + self.batch_cond_only = False # Trueなら推論時のcondにのみ適用する / if True, apply only to cond at inference + self.use_zeros_for_batch_uncond = False # Trueならuncondのconditioningを0にする / if True, set uncond conditioning to 0 + + # batch_cond_onlyとuse_zeros_for_batch_uncondはどちらも適用すると生成画像の色味がおかしくなるので実際には使えそうにない + # Controlの種類によっては使えるかも + # both batch_cond_only and use_zeros_for_batch_uncond make the color of the generated image strange, so it doesn't seem to be usable in practice + # it may be available depending on the type of Control + + def _set_cond_image(self, cond_image): + r""" + 中でモデルを呼び出すので必要ならwith torch.no_grad()で囲む + / call the model inside, so if necessary, surround it with torch.no_grad() + """ + if cond_image is None: + self.cond_emb = None + return + + # timestepごとに呼ばれないので、あらかじめ計算しておく / it is not called for each timestep, so calculate it in advance + # print(f"C {self.lllite_name}, cond_image.shape={cond_image.shape}") + cx = self.conditioning1(cond_image) + if not self.is_conv2d: + # reshape / b,c,h,w -> b,h*w,c + n, c, h, w = cx.shape + cx = cx.view(n, c, h * w).permute(0, 2, 1) + self.cond_emb = cx + + def set_cond_image(self, cond_image, cond_key): + self.cond_image = cond_image + self.cond_key = cond_key + #self.cond_emb = None + self.cond_emb = self.get_cond_emb(self.cond_key, "cuda", torch.float16) + + def set_batch_cond_only(self, cond_only, zeros): + self.batch_cond_only = cond_only + self.use_zeros_for_batch_uncond = zeros + + def apply_to(self): + self.org_forward = self.org_module[0].forward + self.org_module[0].forward = self.forward + + def unapply_to(self): + self.org_module[0].forward = self.org_forward + self.cond_cache ={} + + def get_cond_emb(self, key, device, dtype): + #if key in self.cond_cache: + # return self.cond_cache[key].to(device, dtype=dtype, non_blocking=True) + cx = self.conditioning1(self.cond_image.to(device, dtype=dtype)) + if not self.is_conv2d: + # reshape / b,c,h,w -> b,h*w,c + n, c, h, w = cx.shape + cx = cx.view(n, c, h * w).permute(0, 2, 1) + #self.cond_cache[key] = cx.to("cpu", non_blocking=True) + return cx + + + def forward(self, x, scale=1.0): + r""" + 学習用の便利forward。元のモジュールのforwardを呼び出す + / convenient forward for training. call the forward of the original module + """ +# if self.multiplier == 0.0 or self.cond_emb is None: + if (type(self.multiplier) is int and self.multiplier == 0.0) or self.cond_emb is None: + return self.org_forward(x) + + if self.cond_emb is None: + # print(f"cond_emb is None, {self.name}") + ''' + cx = self.conditioning1(self.cond_image.to(x.device, dtype=x.dtype)) + if not self.is_conv2d: + # reshape / b,c,h,w -> b,h*w,c + n, c, h, w = cx.shape + cx = cx.view(n, c, h * w).permute(0, 2, 1) + #show_bytes("self.conditioning1", self.conditioning1) + #show_bytes("cx", cx) + ''' + self.cond_emb = self.get_cond_emb(self.cond_key, x.device, x.dtype) + + + cx = self.cond_emb + + if not self.batch_cond_only and x.shape[0] // 2 == cx.shape[0]: # inference only + cx = cx.repeat(2, 1, 1, 1) if self.is_conv2d else cx.repeat(2, 1, 1) + if self.use_zeros_for_batch_uncond: + cx[0::2] = 0.0 # uncond is zero + # print(f"C {self.lllite_name}, x.shape={x.shape}, cx.shape={cx.shape}") + + # downで入力の次元数を削減し、conditioning image embeddingと結合する + # 加算ではなくchannel方向に結合することで、うまいこと混ぜてくれることを期待している + # down reduces the number of input dimensions and combines it with conditioning image embedding + # we expect that it will mix well by combining in the channel direction instead of adding + + cx = torch.cat([cx, self.down(x if not self.batch_cond_only else x[1::2])], dim=1 if self.is_conv2d else 2) + cx = self.mid(cx) + + if self.dropout is not None and self.training: + cx = torch.nn.functional.dropout(cx, p=self.dropout) + + cx = self.up(cx) * self.multiplier + + #print(f"{self.multiplier=}") + #print(f"{cx.shape=}") + + #mul = torch.tensor(self.multiplier).to(x.device, dtype=x.dtype) + #cx = cx * mul[:,None,None] + + # residual (x) を加算して元のforwardを呼び出す / add residual (x) and call the original forward + if self.batch_cond_only: + zx = torch.zeros_like(x) + zx[1::2] += cx + cx = zx + + x = self.org_forward(x + cx) # ここで元のモジュールを呼び出す / call the original module here + return x + + + + +class ControlNetLLLite(torch.nn.Module): + UNET_TARGET_REPLACE_MODULE = ["Transformer2DModel"] + UNET_TARGET_REPLACE_MODULE_CONV2D_3X3 = ["ResnetBlock2D", "Downsample2D", "Upsample2D"] + + def __init__( + self, + unet, + cond_emb_dim: int = 16, + mlp_dim: int = 16, + dropout: Optional[float] = None, + varbose: Optional[bool] = False, + multiplier: Optional[float] = 1.0, + ) -> None: + super().__init__() + # self.unets = [unet] + + def create_modules( + root_module: torch.nn.Module, + target_replace_modules: List[torch.nn.Module], + module_class: Type[object], + ) -> List[torch.nn.Module]: + prefix = "lllite_unet" + + modules = [] + for name, module in root_module.named_modules(): + if module.__class__.__name__ in target_replace_modules: + for child_name, child_module in module.named_modules(): + is_linear = child_module.__class__.__name__ == "Linear" or child_module.__class__.__name__ == "LoRACompatibleLinear" + is_conv2d = child_module.__class__.__name__ == "Conv2d" or child_module.__class__.__name__ == "LoRACompatibleConv" + + if is_linear or (is_conv2d and not SKIP_CONV2D): + # block indexからdepthを計算: depthはconditioningのサイズやチャネルを計算するのに使う + # block index to depth: depth is using to calculate conditioning size and channels + #print(f"{name=} {child_name=}") + + #block_name, index1, index2 = (name + "." + child_name).split(".")[:3] + #index1 = int(index1) + block_name, num1, block_name2 ,num2 = (name + "." + child_name).split(".")[:4] + + #if block_name == "input_blocks": + """ + hf_down_res_prefix = f"down_blocks.{i}.resnets.{j}." + sd_down_res_prefix = f"input_blocks.{3*i + j + 1}.0." + + hf_downsample_prefix = f"down_blocks.{i}.downsamplers.0.conv." + sd_downsample_prefix = f"input_blocks.{3*(i+1)}.0.op." + """ + if block_name == "down_blocks" and block_name2=="downsamplers": + index1 = 3*(int(num1)+1) + if SKIP_INPUT_BLOCKS: + continue + depth = 1 if index1 <= 2 else (2 if index1 <= 5 else 3) + elif block_name == "down_blocks": + index1 = 3*int(num1)+int(num2)+1 + if SKIP_INPUT_BLOCKS: + continue + depth = 1 if index1 <= 2 else (2 if index1 <= 5 else 3) + + #elif block_name == "middle_block": + elif block_name == "mid_block": + depth = 3 + + #elif block_name == "output_blocks": + """ + hf_up_res_prefix = f"up_blocks.{i}.resnets.{j}." + sd_up_res_prefix = f"output_blocks.{3*i + j}.0." + + hf_upsample_prefix = f"up_blocks.{i}.upsamplers.0." + sd_upsample_prefix = f"output_blocks.{3*i + 2}.{2}." # change for sdxl + """ + elif block_name == "up_blocks" and block_name2=="upsamplers": + + index1 = 3*int(num1)+2 + if SKIP_OUTPUT_BLOCKS: + continue + depth = 3 if index1 <= 2 else (2 if index1 <= 5 else 1) + #if int(index2) >= 2: + if block_name2 == "upsamplers": + depth -= 1 + elif block_name == "up_blocks": + index1 = 3*int(num1)+int(num2) + if SKIP_OUTPUT_BLOCKS: + continue + depth = 3 if index1 <= 2 else (2 if index1 <= 5 else 1) + #if int(index2) >= 2: + if block_name2 == "upsamplers": + depth -= 1 + else: + raise NotImplementedError() + + lllite_name = prefix + "." + name + "." + child_name + lllite_name = lllite_name.replace(".", "_") + + if TRANSFORMER_MAX_BLOCK_INDEX is not None: + p = lllite_name.find("transformer_blocks") + if p >= 0: + tf_index = int(lllite_name[p:].split("_")[2]) + if tf_index > TRANSFORMER_MAX_BLOCK_INDEX: + continue + + # time embは適用外とする + # attn2のconditioning (CLIPからの入力) はshapeが違うので適用できない + # time emb is not applied + # attn2 conditioning (input from CLIP) cannot be applied because the shape is different + ''' + if "emb_layers" in lllite_name or ( + "attn2" in lllite_name and ("to_k" in lllite_name or "to_v" in lllite_name) + ): + continue + ''' + #("emb_layers.1.", "time_emb_proj."), + if "time_emb_proj" in lllite_name or ( + "attn2" in lllite_name and ("to_k" in lllite_name or "to_v" in lllite_name) + ): + continue + + if ATTN1_2_ONLY: + if not ("attn1" in lllite_name or "attn2" in lllite_name): + continue + if ATTN_QKV_ONLY: + if "to_out" in lllite_name: + continue + + if ATTN1_ETC_ONLY: + if "proj_out" in lllite_name: + pass + elif "attn1" in lllite_name and ( + "to_k" in lllite_name or "to_v" in lllite_name or "to_out" in lllite_name + ): + pass + elif "ff_net_2" in lllite_name: + pass + else: + continue + + module = module_class( + depth, + cond_emb_dim, + lllite_name, + child_module, + mlp_dim, + dropout=dropout, + multiplier=multiplier, + ) + modules.append(module) + return modules + + target_modules = ControlNetLLLite.UNET_TARGET_REPLACE_MODULE + if not TRANSFORMER_ONLY: + target_modules = target_modules + ControlNetLLLite.UNET_TARGET_REPLACE_MODULE_CONV2D_3X3 + + # create module instances + self.unet_modules: List[LLLiteModule] = create_modules(unet, target_modules, LLLiteModule) + print(f"create ControlNet LLLite for U-Net: {len(self.unet_modules)} modules.") + + def forward(self, x): + return x # dummy + + def set_cond_image(self, cond_image, cond_key): + r""" + 中でモデルを呼び出すので必要ならwith torch.no_grad()で囲む + / call the model inside, so if necessary, surround it with torch.no_grad() + """ + for module in self.unet_modules: + module.set_cond_image(cond_image,cond_key) + + def set_batch_cond_only(self, cond_only, zeros): + for module in self.unet_modules: + module.set_batch_cond_only(cond_only, zeros) + + def set_multiplier(self, multiplier): + if isinstance(multiplier, list): + multiplier = torch.tensor(multiplier).to("cuda", dtype=torch.float16, non_blocking=True) + multiplier = multiplier[:,None,None] + + for module in self.unet_modules: + module.multiplier = multiplier + + def load_weights(self, file): + if os.path.splitext(file)[1] == ".safetensors": + from safetensors.torch import load_file + + weights_sd = load_file(file) + else: + weights_sd = torch.load(file, map_location="cpu") + + info = self.load_state_dict(weights_sd, False) + return info + + def apply_to(self): + print("applying LLLite for U-Net...") + for module in self.unet_modules: + module.apply_to() + self.add_module(module.lllite_name, module) + + def unapply_to(self): + for module in self.unet_modules: + module.unapply_to() + + # マージできるかどうかを返す + def is_mergeable(self): + return False + + def merge_to(self, text_encoder, unet, weights_sd, dtype, device): + raise NotImplementedError() + + def enable_gradient_checkpointing(self): + # not supported + pass + + def prepare_optimizer_params(self): + self.requires_grad_(True) + return self.parameters() + + def prepare_grad_etc(self): + self.requires_grad_(True) + + def on_epoch_start(self): + self.train() + + def get_trainable_params(self): + return self.parameters() + + def save_weights(self, file, dtype, metadata): + if metadata is not None and len(metadata) == 0: + metadata = None + + state_dict = self.state_dict() + + if dtype is not None: + for key in list(state_dict.keys()): + v = state_dict[key] + v = v.detach().clone().to("cpu").to(dtype) + state_dict[key] = v + + if os.path.splitext(file)[1] == ".safetensors": + from safetensors.torch import save_file + + save_file(state_dict, file, metadata) + else: + torch.save(state_dict, file) + + def load_state_dict(self, state_dict: Mapping[str, Any], strict: bool = True): + from animatediff.utils.lora_diffusers import UNET_CONVERSION_MAP + + # convert SDXL Stability AI's state dict to Diffusers' based state dict + map_keys = list(UNET_CONVERSION_MAP.keys()) # prefix of U-Net modules + map_keys.sort() + for key in list(state_dict.keys()): + if key.startswith("lllite_unet" + "_"): + search_key = key.replace("lllite_unet" + "_", "") + position = bisect.bisect_right(map_keys, search_key) + map_key = map_keys[position - 1] + if search_key.startswith(map_key): + new_key = key.replace(map_key, UNET_CONVERSION_MAP[map_key]) + state_dict[new_key] = state_dict[key] + del state_dict[key] + + # in case of V2, some weights have different shape, so we need to convert them + # because V2 LoRA is based on U-Net created by use_linear_projection=False + my_state_dict = self.state_dict() + for key in state_dict.keys(): + if state_dict[key].size() != my_state_dict[key].size(): + # print(f"convert {key} from {state_dict[key].size()} to {my_state_dict[key].size()}") + state_dict[key] = state_dict[key].view(my_state_dict[key].size()) + + return super().load_state_dict(state_dict, strict) + + +def load_controlnet_lllite(model_file, pipe, torch_dtype=torch.float16): + print(f"loading ControlNet-LLLite: {model_file}") + + from safetensors.torch import load_file + + state_dict = load_file(model_file) + mlp_dim = None + cond_emb_dim = None + for key, value in state_dict.items(): + if mlp_dim is None and "down.0.weight" in key: + mlp_dim = value.shape[0] + elif cond_emb_dim is None and "conditioning1.0" in key: + cond_emb_dim = value.shape[0] * 2 + if mlp_dim is not None and cond_emb_dim is not None: + break + assert mlp_dim is not None and cond_emb_dim is not None, f"invalid control net: {model_file}" + + control_net = ControlNetLLLite(pipe.unet, cond_emb_dim, mlp_dim, multiplier=1.0) + control_net.apply_to() + info = control_net.load_state_dict(state_dict, False) + print(info) + #control_net.to(dtype).to(device) + control_net.to(torch_dtype) + control_net.set_batch_cond_only(False, False) + return control_net diff --git a/src/animatediff/utils/convert_from_ckpt.py b/src/animatediff/utils/convert_from_ckpt.py new file mode 100644 index 0000000000000000000000000000000000000000..930e8fe084d6abdd051acc7e6ac907c412a589ff --- /dev/null +++ b/src/animatediff/utils/convert_from_ckpt.py @@ -0,0 +1,794 @@ +# coding=utf-8 +# Copyright 2023 The HuggingFace Inc. team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" Conversion script for the Stable Diffusion checkpoints.""" + +import re +from io import BytesIO +from typing import Optional + +import requests +import torch +from diffusers.models import (AutoencoderKL, ControlNetModel, PriorTransformer, + UNet2DConditionModel) +from diffusers.schedulers import DDIMScheduler +from diffusers.utils import (is_accelerate_available, is_omegaconf_available, + logging) +from transformers import CLIPTextConfig, CLIPTextModel + +if is_accelerate_available(): + from accelerate import init_empty_weights + from accelerate.utils import set_module_tensor_to_device + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +def is_safetensors_available(): + return True + +def shave_segments(path, n_shave_prefix_segments=1): + """ + Removes segments. Positive values shave the first segments, negative shave the last segments. + """ + if n_shave_prefix_segments >= 0: + return ".".join(path.split(".")[n_shave_prefix_segments:]) + else: + return ".".join(path.split(".")[:n_shave_prefix_segments]) + + +def renew_resnet_paths(old_list, n_shave_prefix_segments=0): + """ + Updates paths inside resnets to the new naming scheme (local renaming) + """ + mapping = [] + for old_item in old_list: + new_item = old_item.replace("in_layers.0", "norm1") + new_item = new_item.replace("in_layers.2", "conv1") + + new_item = new_item.replace("out_layers.0", "norm2") + new_item = new_item.replace("out_layers.3", "conv2") + + new_item = new_item.replace("emb_layers.1", "time_emb_proj") + new_item = new_item.replace("skip_connection", "conv_shortcut") + + new_item = shave_segments(new_item, n_shave_prefix_segments=n_shave_prefix_segments) + + mapping.append({"old": old_item, "new": new_item}) + + return mapping + + +def renew_vae_resnet_paths(old_list, n_shave_prefix_segments=0): + """ + Updates paths inside resnets to the new naming scheme (local renaming) + """ + mapping = [] + for old_item in old_list: + new_item = old_item + + new_item = new_item.replace("nin_shortcut", "conv_shortcut") + new_item = shave_segments(new_item, n_shave_prefix_segments=n_shave_prefix_segments) + + mapping.append({"old": old_item, "new": new_item}) + + return mapping + + +def renew_attention_paths(old_list, n_shave_prefix_segments=0): + """ + Updates paths inside attentions to the new naming scheme (local renaming) + """ + mapping = [] + for old_item in old_list: + new_item = old_item + + # new_item = new_item.replace('norm.weight', 'group_norm.weight') + # new_item = new_item.replace('norm.bias', 'group_norm.bias') + + # new_item = new_item.replace('proj_out.weight', 'proj_attn.weight') + # new_item = new_item.replace('proj_out.bias', 'proj_attn.bias') + + # new_item = shave_segments(new_item, n_shave_prefix_segments=n_shave_prefix_segments) + + mapping.append({"old": old_item, "new": new_item}) + + return mapping + + +def renew_vae_attention_paths(old_list, n_shave_prefix_segments=0): + """ + Updates paths inside attentions to the new naming scheme (local renaming) + """ + mapping = [] + for old_item in old_list: + new_item = old_item + + new_item = new_item.replace("norm.weight", "group_norm.weight") + new_item = new_item.replace("norm.bias", "group_norm.bias") + + new_item = new_item.replace("q.weight", "to_q.weight") + new_item = new_item.replace("q.bias", "to_q.bias") + + new_item = new_item.replace("k.weight", "to_k.weight") + new_item = new_item.replace("k.bias", "to_k.bias") + + new_item = new_item.replace("v.weight", "to_v.weight") + new_item = new_item.replace("v.bias", "to_v.bias") + + new_item = new_item.replace("proj_out.weight", "to_out.0.weight") + new_item = new_item.replace("proj_out.bias", "to_out.0.bias") + + new_item = shave_segments(new_item, n_shave_prefix_segments=n_shave_prefix_segments) + + mapping.append({"old": old_item, "new": new_item}) + + return mapping + + +def assign_to_checkpoint( + paths, + checkpoint, + old_checkpoint, + attention_paths_to_split=None, + additional_replacements=None, + config=None, +): + """ + This does the final conversion step: take locally converted weights and apply a global renaming to them. It splits + attention layers, and takes into account additional replacements that may arise. + + Assigns the weights to the new checkpoint. + """ + assert isinstance(paths, list), "Paths should be a list of dicts containing 'old' and 'new' keys." + + # Splits the attention layers into three variables. + if attention_paths_to_split is not None: + for path, path_map in attention_paths_to_split.items(): + old_tensor = old_checkpoint[path] + channels = old_tensor.shape[0] // 3 + + target_shape = (-1, channels) if len(old_tensor.shape) == 3 else (-1) + + num_heads = old_tensor.shape[0] // config["num_head_channels"] // 3 + + old_tensor = old_tensor.reshape((num_heads, 3 * channels // num_heads) + old_tensor.shape[1:]) + query, key, value = old_tensor.split(channels // num_heads, dim=1) + + checkpoint[path_map["query"]] = query.reshape(target_shape) + checkpoint[path_map["key"]] = key.reshape(target_shape) + checkpoint[path_map["value"]] = value.reshape(target_shape) + + for path in paths: + new_path = path["new"] + + # These have already been assigned + if attention_paths_to_split is not None and new_path in attention_paths_to_split: + continue + + # Global renaming happens here + new_path = new_path.replace("middle_block.0", "mid_block.resnets.0") + new_path = new_path.replace("middle_block.1", "mid_block.attentions.0") + new_path = new_path.replace("middle_block.2", "mid_block.resnets.1") + + if additional_replacements is not None: + for replacement in additional_replacements: + new_path = new_path.replace(replacement["old"], replacement["new"]) + + # proj_attn.weight has to be converted from conv 1D to linear + is_attn_weight = "proj_attn.weight" in new_path or ("attentions" in new_path and "to_" in new_path) + shape = old_checkpoint[path["old"]].shape + if is_attn_weight and len(shape) == 3: + checkpoint[new_path] = old_checkpoint[path["old"]][:, :, 0] + elif is_attn_weight and len(shape) == 4: + checkpoint[new_path] = old_checkpoint[path["old"]][:, :, 0, 0] + else: + checkpoint[new_path] = old_checkpoint[path["old"]] + + +def conv_attn_to_linear(checkpoint): + keys = list(checkpoint.keys()) + attn_keys = ["query.weight", "key.weight", "value.weight"] + for key in keys: + if ".".join(key.split(".")[-2:]) in attn_keys: + if checkpoint[key].ndim > 2: + checkpoint[key] = checkpoint[key][:, :, 0, 0] + elif "proj_attn.weight" in key: + if checkpoint[key].ndim > 2: + checkpoint[key] = checkpoint[key][:, :, 0] + + +def create_unet_diffusers_config(original_config, image_size: int, controlnet=False): + """ + Creates a config for the diffusers based on the config of the LDM model. + """ + if controlnet: + unet_params = original_config.model.params.control_stage_config.params + else: + if ( + "unet_config" in original_config.model.params + and original_config.model.params.unet_config is not None + ): + unet_params = original_config.model.params.unet_config.params + else: + unet_params = original_config.model.params.network_config.params + + vae_params = original_config.model.params.first_stage_config.params.ddconfig + + block_out_channels = [unet_params.model_channels * mult for mult in unet_params.channel_mult] + + down_block_types = [] + resolution = 1 + for i in range(len(block_out_channels)): + block_type = ( + "CrossAttnDownBlock2D" if resolution in unet_params.attention_resolutions else "DownBlock2D" + ) + down_block_types.append(block_type) + if i != len(block_out_channels) - 1: + resolution *= 2 + + up_block_types = [] + for i in range(len(block_out_channels)): + block_type = "CrossAttnUpBlock2D" if resolution in unet_params.attention_resolutions else "UpBlock2D" + up_block_types.append(block_type) + resolution //= 2 + + if unet_params.transformer_depth is not None: + transformer_layers_per_block = ( + unet_params.transformer_depth + if isinstance(unet_params.transformer_depth, int) + else list(unet_params.transformer_depth) + ) + else: + transformer_layers_per_block = 1 + + vae_scale_factor = 2 ** (len(vae_params.ch_mult) - 1) + + head_dim = unet_params.num_heads if "num_heads" in unet_params else None + use_linear_projection = ( + unet_params.use_linear_in_transformer if "use_linear_in_transformer" in unet_params else False + ) + if use_linear_projection: + # stable diffusion 2-base-512 and 2-768 + if head_dim is None: + head_dim_mult = unet_params.model_channels // unet_params.num_head_channels + head_dim = [head_dim_mult * c for c in list(unet_params.channel_mult)] + + class_embed_type = None + addition_embed_type = None + addition_time_embed_dim = None + projection_class_embeddings_input_dim = None + context_dim = None + + if unet_params.context_dim is not None: + context_dim = ( + unet_params.context_dim + if isinstance(unet_params.context_dim, int) + else unet_params.context_dim[0] + ) + + if "num_classes" in unet_params: + if unet_params.num_classes == "sequential": + if context_dim in [2048, 1280]: + # SDXL + addition_embed_type = "text_time" + addition_time_embed_dim = 256 + else: + class_embed_type = "projection" + assert "adm_in_channels" in unet_params + projection_class_embeddings_input_dim = unet_params.adm_in_channels + else: + raise NotImplementedError( + f"Unknown conditional unet num_classes config: {unet_params.num_classes}" + ) + + config = { + "sample_size": image_size // vae_scale_factor, + "in_channels": unet_params.in_channels, + "down_block_types": tuple(down_block_types), + "block_out_channels": tuple(block_out_channels), + "layers_per_block": unet_params.num_res_blocks, + "cross_attention_dim": context_dim, + "attention_head_dim": head_dim, + "use_linear_projection": use_linear_projection, + "class_embed_type": class_embed_type, + "addition_embed_type": addition_embed_type, + "addition_time_embed_dim": addition_time_embed_dim, + "projection_class_embeddings_input_dim": projection_class_embeddings_input_dim, + "transformer_layers_per_block": transformer_layers_per_block, + } + + if controlnet: + config["conditioning_channels"] = unet_params.hint_channels + else: + config["out_channels"] = unet_params.out_channels + config["up_block_types"] = tuple(up_block_types) + + return config + + +def create_vae_diffusers_config(original_config, image_size: int): + """ + Creates a config for the diffusers based on the config of the LDM model. + """ + vae_params = original_config.model.params.first_stage_config.params.ddconfig + _ = original_config.model.params.first_stage_config.params.embed_dim + + block_out_channels = [vae_params.ch * mult for mult in vae_params.ch_mult] + down_block_types = ["DownEncoderBlock2D"] * len(block_out_channels) + up_block_types = ["UpDecoderBlock2D"] * len(block_out_channels) + + config = { + "sample_size": image_size, + "in_channels": vae_params.in_channels, + "out_channels": vae_params.out_ch, + "down_block_types": tuple(down_block_types), + "up_block_types": tuple(up_block_types), + "block_out_channels": tuple(block_out_channels), + "latent_channels": vae_params.z_channels, + "layers_per_block": vae_params.num_res_blocks, + } + return config + + +def create_diffusers_schedular(original_config): + schedular = DDIMScheduler( + num_train_timesteps=original_config.model.params.timesteps, + beta_start=original_config.model.params.linear_start, + beta_end=original_config.model.params.linear_end, + beta_schedule="scaled_linear", + ) + return schedular + + +def convert_ldm_unet_checkpoint( + checkpoint, config, path=None, extract_ema=False, controlnet=False, skip_extract_state_dict=False +): + """ + Takes a state dict and a config, and returns a converted checkpoint. + """ + + if skip_extract_state_dict: + unet_state_dict = checkpoint + else: + # extract state_dict for UNet + unet_state_dict = {} + keys = list(checkpoint.keys()) + + if controlnet: + unet_key = "control_model." + else: + unet_key = "model.diffusion_model." + + # at least a 100 parameters have to start with `model_ema` in order for the checkpoint to be EMA + if sum(k.startswith("model_ema") for k in keys) > 100 and extract_ema: + logger.warning(f"Checkpoint {path} has both EMA and non-EMA weights.") + logger.warning( + "In this conversion only the EMA weights are extracted. If you want to instead extract the non-EMA" + " weights (useful to continue fine-tuning), please make sure to remove the `--extract_ema` flag." + ) + for key in keys: + if key.startswith("model.diffusion_model"): + flat_ema_key = "model_ema." + "".join(key.split(".")[1:]) + unet_state_dict[key.replace(unet_key, "")] = checkpoint.pop(flat_ema_key) + else: + if sum(k.startswith("model_ema") for k in keys) > 100: + logger.warning( + "In this conversion only the non-EMA weights are extracted. If you want to instead extract the EMA" + " weights (usually better for inference), please make sure to add the `--extract_ema` flag." + ) + + for key in keys: + if key.startswith(unet_key): + unet_state_dict[key.replace(unet_key, "")] = checkpoint.pop(key) + + new_checkpoint = {} + + new_checkpoint["time_embedding.linear_1.weight"] = unet_state_dict["time_embed.0.weight"] + new_checkpoint["time_embedding.linear_1.bias"] = unet_state_dict["time_embed.0.bias"] + new_checkpoint["time_embedding.linear_2.weight"] = unet_state_dict["time_embed.2.weight"] + new_checkpoint["time_embedding.linear_2.bias"] = unet_state_dict["time_embed.2.bias"] + + if config["class_embed_type"] is None: + # No parameters to port + ... + elif config["class_embed_type"] == "timestep" or config["class_embed_type"] == "projection": + new_checkpoint["class_embedding.linear_1.weight"] = unet_state_dict["label_emb.0.0.weight"] + new_checkpoint["class_embedding.linear_1.bias"] = unet_state_dict["label_emb.0.0.bias"] + new_checkpoint["class_embedding.linear_2.weight"] = unet_state_dict["label_emb.0.2.weight"] + new_checkpoint["class_embedding.linear_2.bias"] = unet_state_dict["label_emb.0.2.bias"] + else: + raise NotImplementedError(f"Not implemented `class_embed_type`: {config['class_embed_type']}") + + if config["addition_embed_type"] == "text_time": + new_checkpoint["add_embedding.linear_1.weight"] = unet_state_dict["label_emb.0.0.weight"] + new_checkpoint["add_embedding.linear_1.bias"] = unet_state_dict["label_emb.0.0.bias"] + new_checkpoint["add_embedding.linear_2.weight"] = unet_state_dict["label_emb.0.2.weight"] + new_checkpoint["add_embedding.linear_2.bias"] = unet_state_dict["label_emb.0.2.bias"] + + new_checkpoint["conv_in.weight"] = unet_state_dict["input_blocks.0.0.weight"] + new_checkpoint["conv_in.bias"] = unet_state_dict["input_blocks.0.0.bias"] + + if not controlnet: + new_checkpoint["conv_norm_out.weight"] = unet_state_dict["out.0.weight"] + new_checkpoint["conv_norm_out.bias"] = unet_state_dict["out.0.bias"] + new_checkpoint["conv_out.weight"] = unet_state_dict["out.2.weight"] + new_checkpoint["conv_out.bias"] = unet_state_dict["out.2.bias"] + + # Retrieves the keys for the input blocks only + num_input_blocks = len( + {".".join(layer.split(".")[:2]) for layer in unet_state_dict if "input_blocks" in layer} + ) + input_blocks = { + layer_id: [key for key in unet_state_dict if f"input_blocks.{layer_id}" in key] + for layer_id in range(num_input_blocks) + } + + # Retrieves the keys for the middle blocks only + num_middle_blocks = len( + {".".join(layer.split(".")[:2]) for layer in unet_state_dict if "middle_block" in layer} + ) + middle_blocks = { + layer_id: [key for key in unet_state_dict if f"middle_block.{layer_id}" in key] + for layer_id in range(num_middle_blocks) + } + + # Retrieves the keys for the output blocks only + num_output_blocks = len( + {".".join(layer.split(".")[:2]) for layer in unet_state_dict if "output_blocks" in layer} + ) + output_blocks = { + layer_id: [key for key in unet_state_dict if f"output_blocks.{layer_id}" in key] + for layer_id in range(num_output_blocks) + } + + for i in range(1, num_input_blocks): + block_id = (i - 1) // (config["layers_per_block"] + 1) + layer_in_block_id = (i - 1) % (config["layers_per_block"] + 1) + + resnets = [ + key + for key in input_blocks[i] + if f"input_blocks.{i}.0" in key and f"input_blocks.{i}.0.op" not in key + ] + attentions = [key for key in input_blocks[i] if f"input_blocks.{i}.1" in key] + + if f"input_blocks.{i}.0.op.weight" in unet_state_dict: + new_checkpoint[f"down_blocks.{block_id}.downsamplers.0.conv.weight"] = unet_state_dict.pop( + f"input_blocks.{i}.0.op.weight" + ) + new_checkpoint[f"down_blocks.{block_id}.downsamplers.0.conv.bias"] = unet_state_dict.pop( + f"input_blocks.{i}.0.op.bias" + ) + + paths = renew_resnet_paths(resnets) + meta_path = { + "old": f"input_blocks.{i}.0", + "new": f"down_blocks.{block_id}.resnets.{layer_in_block_id}", + } + assign_to_checkpoint( + paths, new_checkpoint, unet_state_dict, additional_replacements=[meta_path], config=config + ) + + if len(attentions): + paths = renew_attention_paths(attentions) + meta_path = { + "old": f"input_blocks.{i}.1", + "new": f"down_blocks.{block_id}.attentions.{layer_in_block_id}", + } + assign_to_checkpoint( + paths, new_checkpoint, unet_state_dict, additional_replacements=[meta_path], config=config + ) + + resnet_0 = middle_blocks[0] + attentions = middle_blocks[1] + resnet_1 = middle_blocks[2] + + resnet_0_paths = renew_resnet_paths(resnet_0) + assign_to_checkpoint(resnet_0_paths, new_checkpoint, unet_state_dict, config=config) + + resnet_1_paths = renew_resnet_paths(resnet_1) + assign_to_checkpoint(resnet_1_paths, new_checkpoint, unet_state_dict, config=config) + + attentions_paths = renew_attention_paths(attentions) + meta_path = {"old": "middle_block.1", "new": "mid_block.attentions.0"} + assign_to_checkpoint( + attentions_paths, new_checkpoint, unet_state_dict, additional_replacements=[meta_path], config=config + ) + + for i in range(num_output_blocks): + block_id = i // (config["layers_per_block"] + 1) + layer_in_block_id = i % (config["layers_per_block"] + 1) + output_block_layers = [shave_segments(name, 2) for name in output_blocks[i]] + output_block_list = {} + + for layer in output_block_layers: + layer_id, layer_name = layer.split(".")[0], shave_segments(layer, 1) + if layer_id in output_block_list: + output_block_list[layer_id].append(layer_name) + else: + output_block_list[layer_id] = [layer_name] + + if len(output_block_list) > 1: + resnets = [key for key in output_blocks[i] if f"output_blocks.{i}.0" in key] + attentions = [key for key in output_blocks[i] if f"output_blocks.{i}.1" in key] + + resnet_0_paths = renew_resnet_paths(resnets) + paths = renew_resnet_paths(resnets) + + meta_path = { + "old": f"output_blocks.{i}.0", + "new": f"up_blocks.{block_id}.resnets.{layer_in_block_id}", + } + assign_to_checkpoint( + paths, new_checkpoint, unet_state_dict, additional_replacements=[meta_path], config=config + ) + + output_block_list = {k: sorted(v) for k, v in output_block_list.items()} + if ["conv.bias", "conv.weight"] in output_block_list.values(): + index = list(output_block_list.values()).index(["conv.bias", "conv.weight"]) + new_checkpoint[f"up_blocks.{block_id}.upsamplers.0.conv.weight"] = unet_state_dict[ + f"output_blocks.{i}.{index}.conv.weight" + ] + new_checkpoint[f"up_blocks.{block_id}.upsamplers.0.conv.bias"] = unet_state_dict[ + f"output_blocks.{i}.{index}.conv.bias" + ] + + # Clear attentions as they have been attributed above. + if len(attentions) == 2: + attentions = [] + + if len(attentions): + paths = renew_attention_paths(attentions) + meta_path = { + "old": f"output_blocks.{i}.1", + "new": f"up_blocks.{block_id}.attentions.{layer_in_block_id}", + } + assign_to_checkpoint( + paths, new_checkpoint, unet_state_dict, additional_replacements=[meta_path], config=config + ) + else: + resnet_0_paths = renew_resnet_paths(output_block_layers, n_shave_prefix_segments=1) + for path in resnet_0_paths: + old_path = ".".join(["output_blocks", str(i), path["old"]]) + new_path = ".".join( + ["up_blocks", str(block_id), "resnets", str(layer_in_block_id), path["new"]] + ) + + new_checkpoint[new_path] = unet_state_dict[old_path] + + if controlnet: + # conditioning embedding + + orig_index = 0 + + new_checkpoint["controlnet_cond_embedding.conv_in.weight"] = unet_state_dict.pop( + f"input_hint_block.{orig_index}.weight" + ) + new_checkpoint["controlnet_cond_embedding.conv_in.bias"] = unet_state_dict.pop( + f"input_hint_block.{orig_index}.bias" + ) + + orig_index += 2 + + diffusers_index = 0 + + while diffusers_index < 6: + new_checkpoint[ + f"controlnet_cond_embedding.blocks.{diffusers_index}.weight" + ] = unet_state_dict.pop(f"input_hint_block.{orig_index}.weight") + new_checkpoint[f"controlnet_cond_embedding.blocks.{diffusers_index}.bias"] = unet_state_dict.pop( + f"input_hint_block.{orig_index}.bias" + ) + diffusers_index += 1 + orig_index += 2 + + new_checkpoint["controlnet_cond_embedding.conv_out.weight"] = unet_state_dict.pop( + f"input_hint_block.{orig_index}.weight" + ) + new_checkpoint["controlnet_cond_embedding.conv_out.bias"] = unet_state_dict.pop( + f"input_hint_block.{orig_index}.bias" + ) + + # down blocks + for i in range(num_input_blocks): + new_checkpoint[f"controlnet_down_blocks.{i}.weight"] = unet_state_dict.pop( + f"zero_convs.{i}.0.weight" + ) + new_checkpoint[f"controlnet_down_blocks.{i}.bias"] = unet_state_dict.pop(f"zero_convs.{i}.0.bias") + + # mid block + new_checkpoint["controlnet_mid_block.weight"] = unet_state_dict.pop("middle_block_out.0.weight") + new_checkpoint["controlnet_mid_block.bias"] = unet_state_dict.pop("middle_block_out.0.bias") + + return new_checkpoint + + +def convert_ldm_vae_checkpoint(checkpoint, config, is_extract=False): + # extract state dict for VAE + vae_state_dict = {} + if is_extract: + vae_key = "first_stage_model." + keys = list(checkpoint.keys()) + for key in keys: + if is_extract: + if key.startswith(vae_key): + vae_state_dict[key.replace(vae_key, "")] = checkpoint.get(key) + else: + vae_state_dict = checkpoint + + + new_checkpoint = {} + + new_checkpoint["encoder.conv_in.weight"] = vae_state_dict["encoder.conv_in.weight"] + new_checkpoint["encoder.conv_in.bias"] = vae_state_dict["encoder.conv_in.bias"] + new_checkpoint["encoder.conv_out.weight"] = vae_state_dict["encoder.conv_out.weight"] + new_checkpoint["encoder.conv_out.bias"] = vae_state_dict["encoder.conv_out.bias"] + new_checkpoint["encoder.conv_norm_out.weight"] = vae_state_dict["encoder.norm_out.weight"] + new_checkpoint["encoder.conv_norm_out.bias"] = vae_state_dict["encoder.norm_out.bias"] + + new_checkpoint["decoder.conv_in.weight"] = vae_state_dict["decoder.conv_in.weight"] + new_checkpoint["decoder.conv_in.bias"] = vae_state_dict["decoder.conv_in.bias"] + new_checkpoint["decoder.conv_out.weight"] = vae_state_dict["decoder.conv_out.weight"] + new_checkpoint["decoder.conv_out.bias"] = vae_state_dict["decoder.conv_out.bias"] + new_checkpoint["decoder.conv_norm_out.weight"] = vae_state_dict["decoder.norm_out.weight"] + new_checkpoint["decoder.conv_norm_out.bias"] = vae_state_dict["decoder.norm_out.bias"] + + new_checkpoint["quant_conv.weight"] = vae_state_dict["quant_conv.weight"] + new_checkpoint["quant_conv.bias"] = vae_state_dict["quant_conv.bias"] + new_checkpoint["post_quant_conv.weight"] = vae_state_dict["post_quant_conv.weight"] + new_checkpoint["post_quant_conv.bias"] = vae_state_dict["post_quant_conv.bias"] + + # Retrieves the keys for the encoder down blocks only + num_down_blocks = len( + {".".join(layer.split(".")[:3]) for layer in vae_state_dict if "encoder.down" in layer} + ) + down_blocks = { + layer_id: [key for key in vae_state_dict if f"down.{layer_id}" in key] + for layer_id in range(num_down_blocks) + } + + # Retrieves the keys for the decoder up blocks only + num_up_blocks = len({".".join(layer.split(".")[:3]) for layer in vae_state_dict if "decoder.up" in layer}) + up_blocks = { + layer_id: [key for key in vae_state_dict if f"up.{layer_id}" in key] + for layer_id in range(num_up_blocks) + } + + for i in range(num_down_blocks): + resnets = [key for key in down_blocks[i] if f"down.{i}" in key and f"down.{i}.downsample" not in key] + + if f"encoder.down.{i}.downsample.conv.weight" in vae_state_dict: + new_checkpoint[f"encoder.down_blocks.{i}.downsamplers.0.conv.weight"] = vae_state_dict.pop( + f"encoder.down.{i}.downsample.conv.weight" + ) + new_checkpoint[f"encoder.down_blocks.{i}.downsamplers.0.conv.bias"] = vae_state_dict.pop( + f"encoder.down.{i}.downsample.conv.bias" + ) + + paths = renew_vae_resnet_paths(resnets) + meta_path = {"old": f"down.{i}.block", "new": f"down_blocks.{i}.resnets"} + assign_to_checkpoint( + paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path], config=config + ) + + mid_resnets = [key for key in vae_state_dict if "encoder.mid.block" in key] + num_mid_res_blocks = 2 + for i in range(1, num_mid_res_blocks + 1): + resnets = [key for key in mid_resnets if f"encoder.mid.block_{i}" in key] + + paths = renew_vae_resnet_paths(resnets) + meta_path = {"old": f"mid.block_{i}", "new": f"mid_block.resnets.{i - 1}"} + assign_to_checkpoint( + paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path], config=config + ) + + mid_attentions = [key for key in vae_state_dict if "encoder.mid.attn" in key] + paths = renew_vae_attention_paths(mid_attentions) + meta_path = {"old": "mid.attn_1", "new": "mid_block.attentions.0"} + assign_to_checkpoint( + paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path], config=config + ) + conv_attn_to_linear(new_checkpoint) + + for i in range(num_up_blocks): + block_id = num_up_blocks - 1 - i + resnets = [ + key + for key in up_blocks[block_id] + if f"up.{block_id}" in key and f"up.{block_id}.upsample" not in key + ] + + if f"decoder.up.{block_id}.upsample.conv.weight" in vae_state_dict: + new_checkpoint[f"decoder.up_blocks.{i}.upsamplers.0.conv.weight"] = vae_state_dict[ + f"decoder.up.{block_id}.upsample.conv.weight" + ] + new_checkpoint[f"decoder.up_blocks.{i}.upsamplers.0.conv.bias"] = vae_state_dict[ + f"decoder.up.{block_id}.upsample.conv.bias" + ] + + paths = renew_vae_resnet_paths(resnets) + meta_path = {"old": f"up.{block_id}.block", "new": f"up_blocks.{i}.resnets"} + assign_to_checkpoint( + paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path], config=config + ) + + mid_resnets = [key for key in vae_state_dict if "decoder.mid.block" in key] + num_mid_res_blocks = 2 + for i in range(1, num_mid_res_blocks + 1): + resnets = [key for key in mid_resnets if f"decoder.mid.block_{i}" in key] + + paths = renew_vae_resnet_paths(resnets) + meta_path = {"old": f"mid.block_{i}", "new": f"mid_block.resnets.{i - 1}"} + assign_to_checkpoint( + paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path], config=config + ) + + mid_attentions = [key for key in vae_state_dict if "decoder.mid.attn" in key] + paths = renew_vae_attention_paths(mid_attentions) + meta_path = {"old": "mid.attn_1", "new": "mid_block.attentions.0"} + assign_to_checkpoint( + paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path], config=config + ) + conv_attn_to_linear(new_checkpoint) + return new_checkpoint + + +def convert_ldm_clip_checkpoint(checkpoint, local_files_only=False, text_model=None): + if text_model is None: + config_name = "openai/clip-vit-large-patch14" + config = CLIPTextConfig.from_pretrained(config_name) + + with init_empty_weights(): + text_model = CLIPTextModel(config) + + keys = list(checkpoint.keys()) + + text_model_dict = {} + + remove_prefixes = ["cond_stage_model.transformer", "conditioner.embedders.0.transformer"] + + for key in keys: + for prefix in remove_prefixes: + if key.startswith(prefix): + text_model_dict[key[len(prefix + ".") :]] = checkpoint[key] + + for param_name, param in text_model_dict.items(): + set_module_tensor_to_device(text_model, param_name, "cpu", value=param) + + return text_model + + +textenc_conversion_lst = [ + ("positional_embedding", "text_model.embeddings.position_embedding.weight"), + ("token_embedding.weight", "text_model.embeddings.token_embedding.weight"), + ("ln_final.weight", "text_model.final_layer_norm.weight"), + ("ln_final.bias", "text_model.final_layer_norm.bias"), + ("text_projection", "text_projection.weight"), +] +textenc_conversion_map = {x[0]: x[1] for x in textenc_conversion_lst} + +textenc_transformer_conversion_lst = [ + # (stable-diffusion, HF Diffusers) + ("resblocks.", "text_model.encoder.layers."), + ("ln_1", "layer_norm1"), + ("ln_2", "layer_norm2"), + (".c_fc.", ".fc1."), + (".c_proj.", ".fc2."), + (".attn", ".self_attn"), + ("ln_final.", "transformer.text_model.final_layer_norm."), + ("token_embedding.weight", "transformer.text_model.embeddings.token_embedding.weight"), + ("positional_embedding", "transformer.text_model.embeddings.position_embedding.weight"), +] +protected = {re.escape(x[0]): x[1] for x in textenc_transformer_conversion_lst} +textenc_pattern = re.compile("|".join(protected.keys())) diff --git a/src/animatediff/utils/convert_lora_safetensor_to_diffusers.py b/src/animatediff/utils/convert_lora_safetensor_to_diffusers.py new file mode 100644 index 0000000000000000000000000000000000000000..cc38ed8a2c8d9f8ede894a9ff4149905f991d5da --- /dev/null +++ b/src/animatediff/utils/convert_lora_safetensor_to_diffusers.py @@ -0,0 +1,141 @@ +# coding=utf-8 +# Copyright 2023, Haofan Wang, Qixun Wang, All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" Conversion script for the LoRA's safetensors checkpoints. """ + +import argparse + +import torch + + +def convert_lora( + pipeline, state_dict, LORA_PREFIX_UNET="lora_unet", LORA_PREFIX_TEXT_ENCODER="lora_te", alpha=0.6 +): + # load base model + # pipeline = StableDiffusionPipeline.from_pretrained(base_model_path, torch_dtype=torch.float32) + + # load LoRA weight from .safetensors + # state_dict = load_file(checkpoint_path) + + visited = [] + + # directly update weight in diffusers model + for key in state_dict: + # it is suggested to print out the key, it usually will be something like below + # "lora_te_text_model_encoder_layers_0_self_attn_k_proj.lora_down.weight" + + # as we have set the alpha beforehand, so just skip + if ".alpha" in key or key in visited: + continue + + if "text" in key: + layer_infos = key.split(".")[0].split(LORA_PREFIX_TEXT_ENCODER + "_")[-1].split("_") + curr_layer = pipeline.text_encoder + else: + layer_infos = key.split(".")[0].split(LORA_PREFIX_UNET + "_")[-1].split("_") + curr_layer = pipeline.unet + + # find the target layer + temp_name = layer_infos.pop(0) + while len(layer_infos) > -1: + try: + curr_layer = curr_layer.__getattr__(temp_name) + if len(layer_infos) > 0: + temp_name = layer_infos.pop(0) + elif len(layer_infos) == 0: + break + except Exception: + if len(temp_name) > 0: + temp_name += "_" + layer_infos.pop(0) + else: + temp_name = layer_infos.pop(0) + + pair_keys = [] + if "lora_down" in key: + pair_keys.append(key.replace("lora_down", "lora_up")) + pair_keys.append(key) + else: + pair_keys.append(key) + pair_keys.append(key.replace("lora_up", "lora_down")) + + # update weight + if len(state_dict[pair_keys[0]].shape) == 4: + weight_up = state_dict[pair_keys[0]].squeeze(3).squeeze(2).to(torch.float32) + weight_down = state_dict[pair_keys[1]].squeeze(3).squeeze(2).to(torch.float32) + curr_layer.weight.data += alpha * torch.mm(weight_up, weight_down).unsqueeze(2).unsqueeze(3).to( + curr_layer.weight.data.device + ) + else: + weight_up = state_dict[pair_keys[0]].to(torch.float32) + weight_down = state_dict[pair_keys[1]].to(torch.float32) + curr_layer.weight.data += alpha * torch.mm(weight_up, weight_down).to( + curr_layer.weight.data.device + ) + + # update visited list + for item in pair_keys: + visited.append(item) + + return pipeline + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + + parser.add_argument( + "--base_model_path", + default=None, + type=str, + required=True, + help="Path to the base model in diffusers format.", + ) + parser.add_argument( + "--checkpoint_path", default=None, type=str, required=True, help="Path to the checkpoint to convert." + ) + parser.add_argument( + "--dump_path", default=None, type=str, required=True, help="Path to the output model." + ) + parser.add_argument( + "--lora_prefix_unet", default="lora_unet", type=str, help="The prefix of UNet weight in safetensors" + ) + parser.add_argument( + "--lora_prefix_text_encoder", + default="lora_te", + type=str, + help="The prefix of text encoder weight in safetensors", + ) + parser.add_argument( + "--alpha", default=0.75, type=float, help="The merging ratio in W = W0 + alpha * deltaW" + ) + parser.add_argument( + "--to_safetensors", + action="store_true", + help="Whether to store pipeline in safetensors format or not.", + ) + parser.add_argument("--device", type=str, help="Device to use (e.g. cpu, cuda:0, cuda:1, etc.)") + + args = parser.parse_args() + + base_model_path = args.base_model_path + checkpoint_path = args.checkpoint_path + dump_path = args.dump_path + lora_prefix_unet = args.lora_prefix_unet + lora_prefix_text_encoder = args.lora_prefix_text_encoder + alpha = args.alpha + + pipe = convert_lora(base_model_path, checkpoint_path, lora_prefix_unet, lora_prefix_text_encoder, alpha) + + pipe = pipe.to(args.device) + pipe.save_pretrained(args.dump_path, safe_serialization=args.to_safetensors) diff --git a/src/animatediff/utils/device.py b/src/animatediff/utils/device.py new file mode 100644 index 0000000000000000000000000000000000000000..8a481521dd7c3dd1a4494638b8a59f2b830633c6 --- /dev/null +++ b/src/animatediff/utils/device.py @@ -0,0 +1,112 @@ +import logging +from functools import lru_cache +from math import ceil +from typing import Union + +import torch + +logger = logging.getLogger(__name__) + + +def device_info_str(device: torch.device) -> str: + device_info = torch.cuda.get_device_properties(device) + return ( + f"{device_info.name} {ceil(device_info.total_memory / 1024 ** 3)}GB, " + + f"CC {device_info.major}.{device_info.minor}, {device_info.multi_processor_count} SM(s)" + ) + + +@lru_cache(maxsize=4) +def supports_bfloat16(device: Union[str, torch.device]) -> bool: + """A non-exhaustive check for bfloat16 support on a given device. + Weird that torch doesn't have a global function for this. If your device + does support bfloat16 and it's not listed here, go ahead and add it. + """ + device = torch.device(device) # make sure device is a torch.device + match device.type: + case "cpu": + ret = False + case "cuda": + with device: + ret = torch.cuda.is_bf16_supported() + case "xla": + ret = True + case "mps": + ret = True + case _: + ret = False + return ret + + +@lru_cache(maxsize=4) +def maybe_bfloat16( + device: Union[str, torch.device], + fallback: torch.dtype = torch.float32, +) -> torch.dtype: + """Returns torch.bfloat16 if available, otherwise the fallback dtype (default float32)""" + device = torch.device(device) # make sure device is a torch.device + return torch.bfloat16 if supports_bfloat16(device) else fallback + + +def dtype_for_model(model: str, device: torch.device) -> torch.dtype: + match model: + case "unet": + return torch.float32 if device.type == "cpu" else torch.float16 + case "tenc": + return torch.float32 if device.type == "cpu" else torch.float16 + case "vae": + return maybe_bfloat16(device, fallback=torch.float32) + case unknown: + raise ValueError(f"Invalid model {unknown}") + + +def get_model_dtypes( + device: Union[str, torch.device], + force_half_vae: bool = False, +) -> tuple[torch.dtype, torch.dtype, torch.dtype]: + device = torch.device(device) # make sure device is a torch.device + unet_dtype = dtype_for_model("unet", device) + tenc_dtype = dtype_for_model("tenc", device) + vae_dtype = dtype_for_model("vae", device) + + if device.type == "cpu": + logger.warn("Device explicitly set to CPU, will run everything in fp32") + logger.warn("This is likely to be *incredibly* slow, but I don't tell you how to live.") + + if force_half_vae: + if device.type == "cpu": + logger.critical("Can't force VAE to fp16 mode on CPU! Exiting...") + raise RuntimeError("Can't force VAE to fp16 mode on CPU!") + if vae_dtype == torch.bfloat16: + logger.warn("Forcing VAE to use fp16 despite bfloat16 support! This is a bad idea!") + logger.warn("If you're not sure why you're doing this, you probably shouldn't be.") + vae_dtype = torch.float16 + else: + logger.warn("Forcing VAE to use fp16 instead of fp32 on CUDA! This may result in black outputs!") + logger.warn("Running a VAE in fp16 can result in black images or poor output quality.") + logger.warn("I don't tell you how to live, but you probably shouldn't do this.") + vae_dtype = torch.float16 + + logger.info(f"Selected data types: {unet_dtype=}, {tenc_dtype=}, {vae_dtype=}") + return unet_dtype, tenc_dtype, vae_dtype + + +def get_memory_format(device: Union[str, torch.device]) -> torch.memory_format: + device = torch.device(device) # make sure device is a torch.device + # if we have a cuda device + if device.type == "cuda": + device_info = torch.cuda.get_device_properties(device) + # Volta and newer seem to like channels_last. This will probably bite me on TU11x cards. + if device_info.major >= 7: + ret = torch.channels_last + else: + ret = torch.contiguous_format + elif device.type == "xpu": + # Intel ARC GPUs/XPUs like channels_last + ret = torch.channels_last + else: + # TODO: Does MPS like channels_last? do other devices? + ret = torch.contiguous_format + if ret == torch.channels_last: + logger.info("Using channels_last memory format for UNet and VAE") + return ret \ No newline at end of file diff --git a/src/animatediff/utils/huggingface.py b/src/animatediff/utils/huggingface.py new file mode 100644 index 0000000000000000000000000000000000000000..89c90e131ff36d09c5e6e00797cf4c643287d850 --- /dev/null +++ b/src/animatediff/utils/huggingface.py @@ -0,0 +1,149 @@ +import logging +from os import PathLike +from pathlib import Path +from typing import Optional + +from diffusers import StableDiffusionPipeline, StableDiffusionXLPipeline +from huggingface_hub import hf_hub_download, snapshot_download +from tqdm.rich import tqdm + +from animatediff import HF_HUB_CACHE, HF_LIB_NAME, HF_LIB_VER, get_dir +from animatediff.utils.util import path_from_cwd + +logger = logging.getLogger(__name__) + +data_dir = get_dir("data") +checkpoint_dir = data_dir.joinpath("models/sd") +pipeline_dir = data_dir.joinpath("models/huggingface") + +IGNORE_TF = ["*.git*", "*.h5", "tf_*"] +IGNORE_FLAX = ["*.git*", "flax_*", "*.msgpack"] +IGNORE_TF_FLAX = IGNORE_TF + IGNORE_FLAX + + +class DownloadTqdm(tqdm): + def __init__(self, *args, **kwargs): + kwargs.update( + { + "ncols": 100, + "dynamic_ncols": False, + "disable": None, + } + ) + super().__init__(*args, **kwargs) + + +def get_hf_file( + repo_id: Path, + filename: str, + target_dir: Path, + subfolder: Optional[PathLike] = None, + revision: Optional[str] = None, + force: bool = False, +) -> Path: + target_path = target_dir.joinpath(filename) + if target_path.exists() and force is not True: + raise FileExistsError( + f"File {path_from_cwd(target_path)} already exists! Pass force=True to overwrite" + ) + + target_dir.mkdir(exist_ok=True, parents=True) + save_path = hf_hub_download( + repo_id=str(repo_id), + filename=filename, + revision=revision or "main", + subfolder=subfolder, + local_dir=target_dir, + local_dir_use_symlinks=False, + cache_dir=HF_HUB_CACHE, + resume_download=True, + ) + return Path(save_path) + + +def get_hf_repo( + repo_id: Path, + target_dir: Path, + subfolder: Optional[PathLike] = None, + revision: Optional[str] = None, + force: bool = False, +) -> Path: + if target_dir.exists() and force is not True: + raise FileExistsError( + f"Target dir {path_from_cwd(target_dir)} already exists! Pass force=True to overwrite" + ) + + target_dir.mkdir(exist_ok=True, parents=True) + save_path = snapshot_download( + repo_id=str(repo_id), + revision=revision or "main", + subfolder=subfolder, + library_name=HF_LIB_NAME, + library_version=HF_LIB_VER, + local_dir=target_dir, + local_dir_use_symlinks=False, + ignore_patterns=IGNORE_TF_FLAX, + cache_dir=HF_HUB_CACHE, + tqdm_class=DownloadTqdm, + max_workers=2, + resume_download=True, + ) + return Path(save_path) + + +def get_hf_pipeline( + repo_id: Path, + target_dir: Path, + save: bool = True, + force_download: bool = False, +) -> StableDiffusionPipeline: + pipeline_exists = target_dir.joinpath("model_index.json").exists() + if pipeline_exists and force_download is not True: + pipeline = StableDiffusionPipeline.from_pretrained( + pretrained_model_name_or_path=target_dir, + local_files_only=True, + ) + else: + target_dir.mkdir(exist_ok=True, parents=True) + pipeline = StableDiffusionPipeline.from_pretrained( + pretrained_model_name_or_path=str(repo_id).lstrip("./").replace("\\", "/"), + cache_dir=HF_HUB_CACHE, + resume_download=True, + ) + if save and force_download: + logger.warning(f"Pipeline already exists at {path_from_cwd(target_dir)}. Overwriting!") + pipeline.save_pretrained(target_dir, safe_serialization=True) + elif save and not pipeline_exists: + logger.info(f"Saving pipeline to {path_from_cwd(target_dir)}") + pipeline.save_pretrained(target_dir, safe_serialization=True) + return pipeline + +def get_hf_pipeline_sdxl( + repo_id: Path, + target_dir: Path, + save: bool = True, + force_download: bool = False, +) -> StableDiffusionXLPipeline: + import torch + pipeline_exists = target_dir.joinpath("model_index.json").exists() + if pipeline_exists and force_download is not True: + pipeline = StableDiffusionXLPipeline.from_pretrained( + pretrained_model_name_or_path=target_dir, + local_files_only=True, + torch_dtype=torch.float16, use_safetensors=True, variant="fp16" + ) + else: + target_dir.mkdir(exist_ok=True, parents=True) + pipeline = StableDiffusionXLPipeline.from_pretrained( + pretrained_model_name_or_path=str(repo_id).lstrip("./").replace("\\", "/"), + cache_dir=HF_HUB_CACHE, + resume_download=True, + torch_dtype=torch.float16, use_safetensors=True, variant="fp16" + ) + if save and force_download: + logger.warning(f"Pipeline already exists at {path_from_cwd(target_dir)}. Overwriting!") + pipeline.save_pretrained(target_dir, safe_serialization=True) + elif save and not pipeline_exists: + logger.info(f"Saving pipeline to {path_from_cwd(target_dir)}") + pipeline.save_pretrained(target_dir, safe_serialization=True) + return pipeline diff --git a/src/animatediff/utils/lora_diffusers.py b/src/animatediff/utils/lora_diffusers.py new file mode 100644 index 0000000000000000000000000000000000000000..2576fa78154e4461381b4514266076177e070f98 --- /dev/null +++ b/src/animatediff/utils/lora_diffusers.py @@ -0,0 +1,649 @@ +# https://github.com/kohya-ss/sd-scripts/blob/dev/networks/lora_diffusers.py + +# Diffusersで動くLoRA。このファイル単独で完結する。 +# LoRA module for Diffusers. This file works independently. + +import bisect +import math +import random +from typing import Any, Dict, List, Mapping, Optional, Union + +import numpy as np +import torch +from diffusers import UNet2DConditionModel +from tqdm import tqdm +from transformers import CLIPTextModel + + +def make_unet_conversion_map() -> Dict[str, str]: + unet_conversion_map_layer = [] + + for i in range(3): # num_blocks is 3 in sdxl + # loop over downblocks/upblocks + for j in range(2): + # loop over resnets/attentions for downblocks + hf_down_res_prefix = f"down_blocks.{i}.resnets.{j}." + sd_down_res_prefix = f"input_blocks.{3*i + j + 1}.0." + unet_conversion_map_layer.append((sd_down_res_prefix, hf_down_res_prefix)) + + if i < 3: + # no attention layers in down_blocks.3 + hf_down_atn_prefix = f"down_blocks.{i}.attentions.{j}." + sd_down_atn_prefix = f"input_blocks.{3*i + j + 1}.1." + unet_conversion_map_layer.append((sd_down_atn_prefix, hf_down_atn_prefix)) + + for j in range(3): + # loop over resnets/attentions for upblocks + hf_up_res_prefix = f"up_blocks.{i}.resnets.{j}." + sd_up_res_prefix = f"output_blocks.{3*i + j}.0." + unet_conversion_map_layer.append((sd_up_res_prefix, hf_up_res_prefix)) + + # if i > 0: commentout for sdxl + # no attention layers in up_blocks.0 + hf_up_atn_prefix = f"up_blocks.{i}.attentions.{j}." + sd_up_atn_prefix = f"output_blocks.{3*i + j}.1." + unet_conversion_map_layer.append((sd_up_atn_prefix, hf_up_atn_prefix)) + + if i < 3: + # no downsample in down_blocks.3 + hf_downsample_prefix = f"down_blocks.{i}.downsamplers.0.conv." + sd_downsample_prefix = f"input_blocks.{3*(i+1)}.0.op." + unet_conversion_map_layer.append((sd_downsample_prefix, hf_downsample_prefix)) + + # no upsample in up_blocks.3 + hf_upsample_prefix = f"up_blocks.{i}.upsamplers.0." + sd_upsample_prefix = f"output_blocks.{3*i + 2}.{2}." # change for sdxl + unet_conversion_map_layer.append((sd_upsample_prefix, hf_upsample_prefix)) + + hf_mid_atn_prefix = "mid_block.attentions.0." + sd_mid_atn_prefix = "middle_block.1." + unet_conversion_map_layer.append((sd_mid_atn_prefix, hf_mid_atn_prefix)) + + for j in range(2): + hf_mid_res_prefix = f"mid_block.resnets.{j}." + sd_mid_res_prefix = f"middle_block.{2*j}." + unet_conversion_map_layer.append((sd_mid_res_prefix, hf_mid_res_prefix)) + + unet_conversion_map_resnet = [ + # (stable-diffusion, HF Diffusers) + ("in_layers.0.", "norm1."), + ("in_layers.2.", "conv1."), + ("out_layers.0.", "norm2."), + ("out_layers.3.", "conv2."), + ("emb_layers.1.", "time_emb_proj."), + ("skip_connection.", "conv_shortcut."), + ] + + unet_conversion_map = [] + for sd, hf in unet_conversion_map_layer: + if "resnets" in hf: + for sd_res, hf_res in unet_conversion_map_resnet: + unet_conversion_map.append((sd + sd_res, hf + hf_res)) + else: + unet_conversion_map.append((sd, hf)) + + for j in range(2): + hf_time_embed_prefix = f"time_embedding.linear_{j+1}." + sd_time_embed_prefix = f"time_embed.{j*2}." + unet_conversion_map.append((sd_time_embed_prefix, hf_time_embed_prefix)) + + for j in range(2): + hf_label_embed_prefix = f"add_embedding.linear_{j+1}." + sd_label_embed_prefix = f"label_emb.0.{j*2}." + unet_conversion_map.append((sd_label_embed_prefix, hf_label_embed_prefix)) + + unet_conversion_map.append(("input_blocks.0.0.", "conv_in.")) + unet_conversion_map.append(("out.0.", "conv_norm_out.")) + unet_conversion_map.append(("out.2.", "conv_out.")) + + sd_hf_conversion_map = {sd.replace(".", "_")[:-1]: hf.replace(".", "_")[:-1] for sd, hf in unet_conversion_map} + return sd_hf_conversion_map + + +UNET_CONVERSION_MAP = make_unet_conversion_map() + + +class LoRAModule(torch.nn.Module): + """ + replaces forward method of the original Linear, instead of replacing the original Linear module. + """ + + def __init__( + self, + lora_name, + org_module: torch.nn.Module, + multiplier=1.0, + lora_dim=4, + alpha=1, + ): + """if alpha == 0 or None, alpha is rank (no scaling).""" + super().__init__() + self.lora_name = lora_name + + if org_module.__class__.__name__ == "Conv2d" or org_module.__class__.__name__ == "LoRACompatibleConv" or org_module.__class__.__name__ == "InflatedConv3d": + in_dim = org_module.in_channels + out_dim = org_module.out_channels + else: + in_dim = org_module.in_features + out_dim = org_module.out_features + + self.lora_dim = lora_dim + + self.need_rearrange = False + if org_module.__class__.__name__ == "InflatedConv3d": + self.need_rearrange = True + + if org_module.__class__.__name__ == "Conv2d" or org_module.__class__.__name__ == "LoRACompatibleConv" or org_module.__class__.__name__ == "InflatedConv3d": + kernel_size = org_module.kernel_size + stride = org_module.stride + padding = org_module.padding + self.lora_down = torch.nn.Conv2d(in_dim, self.lora_dim, kernel_size, stride, padding, bias=False) + self.lora_up = torch.nn.Conv2d(self.lora_dim, out_dim, (1, 1), (1, 1), bias=False) + else: + self.lora_down = torch.nn.Linear(in_dim, self.lora_dim, bias=False) + self.lora_up = torch.nn.Linear(self.lora_dim, out_dim, bias=False) + + if type(alpha) == torch.Tensor: + alpha = alpha.detach().float().numpy() # without casting, bf16 causes error + alpha = self.lora_dim if alpha is None or alpha == 0 else alpha + self.scale = alpha / self.lora_dim + self.register_buffer("alpha", torch.tensor(alpha)) # 勾配計算に含めない / not included in gradient calculation + + # same as microsoft's + torch.nn.init.kaiming_uniform_(self.lora_down.weight, a=math.sqrt(5)) + torch.nn.init.zeros_(self.lora_up.weight) + + self.multiplier = multiplier + self.org_module = [org_module] + self.enabled = True + self.network: LoRANetwork = None + self.org_forward = None + + # override org_module's forward method + def apply_to(self, multiplier=None): + if multiplier is not None: + self.multiplier = multiplier + if self.org_forward is None: + self.org_forward = self.org_module[0].forward + self.org_module[0].forward = self.forward + + # restore org_module's forward method + def unapply_to(self): + if self.org_forward is not None: + self.org_module[0].forward = self.org_forward + + # forward with lora + # scale is used LoRACompatibleConv, but we ignore it because we have multiplier + def forward(self, x, scale=1.0): + from einops import rearrange + if not self.enabled: + return self.org_forward(x) + + if self.need_rearrange: + org = self.org_forward(x) + frames = x.shape[2] + x = rearrange(x, "b c f h w -> (b f) c h w") + x = self.lora_up(self.lora_down(x)) * self.multiplier * self.scale + x = rearrange(x, "(b f) c h w -> b c f h w", f=frames) + return org + x + else: + return self.org_forward(x) + self.lora_up(self.lora_down(x)) * self.multiplier * self.scale + + def set_network(self, network): + self.network = network + + # merge lora weight to org weight + def merge_to(self, multiplier=1.0): + # get lora weight + lora_weight = self.get_weight(multiplier) + + # get org weight + org_sd = self.org_module[0].state_dict() + org_weight = org_sd["weight"] + weight = org_weight + lora_weight.to(org_weight.device, dtype=org_weight.dtype) + + # set weight to org_module + org_sd["weight"] = weight + self.org_module[0].load_state_dict(org_sd) + + # restore org weight from lora weight + def restore_from(self, multiplier=1.0): + # get lora weight + lora_weight = self.get_weight(multiplier) + + # get org weight + org_sd = self.org_module[0].state_dict() + org_weight = org_sd["weight"] + weight = org_weight - lora_weight.to(org_weight.device, dtype=org_weight.dtype) + + # set weight to org_module + org_sd["weight"] = weight + self.org_module[0].load_state_dict(org_sd) + + # return lora weight + def get_weight(self, multiplier=None): + if multiplier is None: + multiplier = self.multiplier + + # get up/down weight from module + up_weight = self.lora_up.weight.to(torch.float) + down_weight = self.lora_down.weight.to(torch.float) + + # pre-calculated weight + if len(down_weight.size()) == 2: + # linear + weight = self.multiplier * (up_weight @ down_weight) * self.scale + elif down_weight.size()[2:4] == (1, 1): + # conv2d 1x1 + weight = ( + self.multiplier + * (up_weight.squeeze(3).squeeze(2) @ down_weight.squeeze(3).squeeze(2)).unsqueeze(2).unsqueeze(3) + * self.scale + ) + else: + # conv2d 3x3 + conved = torch.nn.functional.conv2d(down_weight.permute(1, 0, 2, 3), up_weight).permute(1, 0, 2, 3) + weight = self.multiplier * conved * self.scale + + return weight + + +# Create network from weights for inference, weights are not loaded here +def create_network_from_weights( + text_encoder: Union[CLIPTextModel, List[CLIPTextModel]], unet: UNet2DConditionModel, weights_sd: Dict, multiplier: float = 1.0, is_animatediff = True, +): + # get dim/alpha mapping + modules_dim = {} + modules_alpha = {} + + for key, value in weights_sd.items(): + if "." not in key: + #print(f"skip {key}") + continue + + lora_name = key.split(".")[0] + if "alpha" in key: + #print(f"{key} have alpha -> modules_alpha") + modules_alpha[lora_name] = value + elif "lora_down" in key: + #print(f"{key} have lora_down -> modules_dim") + dim = value.size()[0] + modules_dim[lora_name] = dim + #print(lora_name, value.size(), dim) + + # support old LoRA without alpha + for key in modules_dim.keys(): + if key not in modules_alpha: + modules_alpha[key] = modules_dim[key] + + return LoRANetwork(text_encoder, unet, multiplier=multiplier, modules_dim=modules_dim, modules_alpha=modules_alpha, is_animatediff=is_animatediff) + + +def merge_lora_weights(pipe, weights_sd: Dict, multiplier: float = 1.0): + text_encoders = [pipe.text_encoder, pipe.text_encoder_2] if hasattr(pipe, "text_encoder_2") else [pipe.text_encoder] + unet = pipe.unet + + lora_network = create_network_from_weights(text_encoders, unet, weights_sd, multiplier=multiplier) + lora_network.load_state_dict(weights_sd) + lora_network.merge_to(multiplier=multiplier) + + +# block weightや学習に対応しない簡易版 / simple version without block weight and training +class LoRANetwork(torch.nn.Module): + UNET_TARGET_REPLACE_MODULE_TYPE1 = ["Transformer3DModel"] + UNET_TARGET_REPLACE_MODULE_CONV2D_3X3_TYPE1 = ["ResnetBlock3D", "Downsample3D", "Upsample3D"] + UNET_TARGET_REPLACE_MODULE_TYPE2 = ["Transformer2DModel"] + UNET_TARGET_REPLACE_MODULE_CONV2D_3X3_TYPE2 = ["ResnetBlock2D", "Downsample2D", "Upsample2D"] + TEXT_ENCODER_TARGET_REPLACE_MODULE = ["CLIPAttention", "CLIPMLP"] + LORA_PREFIX_UNET = "lora_unet" + LORA_PREFIX_TEXT_ENCODER = "lora_te" + + # SDXL: must starts with LORA_PREFIX_TEXT_ENCODER + LORA_PREFIX_TEXT_ENCODER1 = "lora_te1" + LORA_PREFIX_TEXT_ENCODER2 = "lora_te2" + + def __init__( + self, + text_encoder: Union[List[CLIPTextModel], CLIPTextModel], + unet: UNet2DConditionModel, + multiplier: float = 1.0, + modules_dim: Optional[Dict[str, int]] = None, + modules_alpha: Optional[Dict[str, int]] = None, + varbose: Optional[bool] = False, + is_animatediff: bool = True, + ) -> None: + super().__init__() + self.multiplier = multiplier + + print(f"create LoRA network from weights") + + # convert SDXL Stability AI's U-Net modules to Diffusers + converted = self.convert_unet_modules(modules_dim, modules_alpha) + if converted: + print(f"converted {converted} Stability AI's U-Net LoRA modules to Diffusers (SDXL)") + + # create module instances + def create_modules( + is_unet: bool, + text_encoder_idx: Optional[int], # None, 1, 2 + root_module: torch.nn.Module, + target_replace_modules: List[torch.nn.Module], + ) -> List[LoRAModule]: + prefix = ( + self.LORA_PREFIX_UNET + if is_unet + else ( + self.LORA_PREFIX_TEXT_ENCODER + if text_encoder_idx is None + else (self.LORA_PREFIX_TEXT_ENCODER1 if text_encoder_idx == 1 else self.LORA_PREFIX_TEXT_ENCODER2) + ) + ) + loras = [] + skipped = [] + for name, module in root_module.named_modules(): + if module.__class__.__name__ in target_replace_modules: + for child_name, child_module in module.named_modules(): + #print(f"{name=} / {child_name=} / {child_module.__class__.__name__}") + is_linear = ( + child_module.__class__.__name__ == "Linear" or child_module.__class__.__name__ == "LoRACompatibleLinear" + ) + is_conv2d = ( + child_module.__class__.__name__ == "Conv2d" or child_module.__class__.__name__ == "LoRACompatibleConv" or child_module.__class__.__name__ == "InflatedConv3d" + ) + + if is_linear or is_conv2d: + lora_name = prefix + "." + name + "." + child_name + lora_name = lora_name.replace(".", "_") + + if lora_name not in modules_dim: + print(f"skipped {lora_name} (not found in modules_dim)") + skipped.append(lora_name) + continue + + dim = modules_dim[lora_name] + alpha = modules_alpha[lora_name] + lora = LoRAModule( + lora_name, + child_module, + self.multiplier, + dim, + alpha, + ) + #print(f"{lora_name=}") + loras.append(lora) + return loras, skipped + + text_encoders = text_encoder if type(text_encoder) == list else [text_encoder] + + # create LoRA for text encoder + # 毎回すべてのモジュールを作るのは無駄なので要検討 / it is wasteful to create all modules every time, need to consider + self.text_encoder_loras: List[LoRAModule] = [] + skipped_te = [] + for i, text_encoder in enumerate(text_encoders): + if len(text_encoders) > 1: + index = i + 1 + else: + index = None + + text_encoder_loras, skipped = create_modules(False, index, text_encoder, LoRANetwork.TEXT_ENCODER_TARGET_REPLACE_MODULE) + self.text_encoder_loras.extend(text_encoder_loras) + skipped_te += skipped + print(f"create LoRA for Text Encoder: {len(self.text_encoder_loras)} modules.") + if len(skipped_te) > 0: + print(f"skipped {len(skipped_te)} modules because of missing weight for text encoder.") + + # extend U-Net target modules to include Conv2d 3x3 + if is_animatediff: + target_modules = LoRANetwork.UNET_TARGET_REPLACE_MODULE_TYPE1 + LoRANetwork.UNET_TARGET_REPLACE_MODULE_CONV2D_3X3_TYPE1 + else: + target_modules = LoRANetwork.UNET_TARGET_REPLACE_MODULE_TYPE2 + LoRANetwork.UNET_TARGET_REPLACE_MODULE_CONV2D_3X3_TYPE2 + + self.unet_loras: List[LoRAModule] + self.unet_loras, skipped_un = create_modules(True, None, unet, target_modules) + print(f"create LoRA for U-Net: {len(self.unet_loras)} modules.") + if len(skipped_un) > 0: + print(f"skipped {len(skipped_un)} modules because of missing weight for U-Net.") + + # assertion + names = set() + for lora in self.text_encoder_loras + self.unet_loras: + names.add(lora.lora_name) + for lora_name in modules_dim.keys(): + assert lora_name in names, f"{lora_name} is not found in created LoRA modules." + + # make to work load_state_dict + for lora in self.text_encoder_loras + self.unet_loras: + self.add_module(lora.lora_name, lora) + + # SDXL: convert SDXL Stability AI's U-Net modules to Diffusers + def convert_unet_modules(self, modules_dim, modules_alpha): + converted_count = 0 + not_converted_count = 0 + + map_keys = list(UNET_CONVERSION_MAP.keys()) + map_keys.sort() + + for key in list(modules_dim.keys()): + if key.startswith(LoRANetwork.LORA_PREFIX_UNET + "_"): + search_key = key.replace(LoRANetwork.LORA_PREFIX_UNET + "_", "") + position = bisect.bisect_right(map_keys, search_key) + map_key = map_keys[position - 1] + if search_key.startswith(map_key): + new_key = key.replace(map_key, UNET_CONVERSION_MAP[map_key]) + modules_dim[new_key] = modules_dim[key] + modules_alpha[new_key] = modules_alpha[key] + del modules_dim[key] + del modules_alpha[key] + converted_count += 1 + else: + not_converted_count += 1 + assert ( + converted_count == 0 or not_converted_count == 0 + ), f"some modules are not converted: {converted_count} converted, {not_converted_count} not converted" + return converted_count + + def set_multiplier(self, multiplier): + self.multiplier = multiplier + for lora in self.text_encoder_loras + self.unet_loras: + lora.multiplier = self.multiplier + + def active(self, multiplier): + self.multiplier = multiplier + for lora in self.text_encoder_loras + self.unet_loras: + lora.multiplier = self.multiplier + lora.enabled = True + + def deactive(self): + for lora in self.text_encoder_loras + self.unet_loras: + lora.enabled = False + + def apply_to(self, multiplier=1.0, apply_text_encoder=True, apply_unet=True): + if apply_text_encoder: + print("enable LoRA for text encoder") + for lora in self.text_encoder_loras: + lora.apply_to(multiplier) + if apply_unet: + print("enable LoRA for U-Net") + for lora in self.unet_loras: + lora.apply_to(multiplier) + + def unapply_to(self): + for lora in self.text_encoder_loras + self.unet_loras: + lora.unapply_to() + + def merge_to(self, multiplier=1.0): + print("merge LoRA weights to original weights") + for lora in tqdm(self.text_encoder_loras + self.unet_loras): + lora.merge_to(multiplier) + print(f"weights are merged") + + def restore_from(self, multiplier=1.0): + print("restore LoRA weights from original weights") + for lora in tqdm(self.text_encoder_loras + self.unet_loras): + lora.restore_from(multiplier) + print(f"weights are restored") + + def load_state_dict(self, state_dict: Mapping[str, Any], strict: bool = True): + # convert SDXL Stability AI's state dict to Diffusers' based state dict + map_keys = list(UNET_CONVERSION_MAP.keys()) # prefix of U-Net modules + map_keys.sort() + for key in list(state_dict.keys()): + if key.startswith(LoRANetwork.LORA_PREFIX_UNET + "_"): + search_key = key.replace(LoRANetwork.LORA_PREFIX_UNET + "_", "") + position = bisect.bisect_right(map_keys, search_key) + map_key = map_keys[position - 1] + if search_key.startswith(map_key): + new_key = key.replace(map_key, UNET_CONVERSION_MAP[map_key]) + state_dict[new_key] = state_dict[key] + del state_dict[key] + + # in case of V2, some weights have different shape, so we need to convert them + # because V2 LoRA is based on U-Net created by use_linear_projection=False + my_state_dict = self.state_dict() + for key in state_dict.keys(): + if state_dict[key].size() != my_state_dict[key].size(): + # print(f"convert {key} from {state_dict[key].size()} to {my_state_dict[key].size()}") + state_dict[key] = state_dict[key].view(my_state_dict[key].size()) + + return super().load_state_dict(state_dict, strict) + + +if __name__ == "__main__": + # sample code to use LoRANetwork + import argparse + import os + + import torch + from diffusers import StableDiffusionPipeline, StableDiffusionXLPipeline + + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + + parser = argparse.ArgumentParser() + parser.add_argument("--model_id", type=str, default=None, help="model id for huggingface") + parser.add_argument("--lora_weights", type=str, default=None, help="path to LoRA weights") + parser.add_argument("--sdxl", action="store_true", help="use SDXL model") + parser.add_argument("--prompt", type=str, default="A photo of cat", help="prompt text") + parser.add_argument("--negative_prompt", type=str, default="", help="negative prompt text") + parser.add_argument("--seed", type=int, default=0, help="random seed") + args = parser.parse_args() + + image_prefix = args.model_id.replace("/", "_") + "_" + + # load Diffusers model + print(f"load model from {args.model_id}") + pipe: Union[StableDiffusionPipeline, StableDiffusionXLPipeline] + if args.sdxl: + # use_safetensors=True does not work with 0.18.2 + pipe = StableDiffusionXLPipeline.from_pretrained(args.model_id, variant="fp16", torch_dtype=torch.float16) + else: + pipe = StableDiffusionPipeline.from_pretrained(args.model_id, variant="fp16", torch_dtype=torch.float16) + pipe.to(device) + pipe.set_use_memory_efficient_attention_xformers(True) + + text_encoders = [pipe.text_encoder, pipe.text_encoder_2] if args.sdxl else [pipe.text_encoder] + + # load LoRA weights + print(f"load LoRA weights from {args.lora_weights}") + if os.path.splitext(args.lora_weights)[1] == ".safetensors": + from safetensors.torch import load_file + + lora_sd = load_file(args.lora_weights) + else: + lora_sd = torch.load(args.lora_weights) + + # create by LoRA weights and load weights + print(f"create LoRA network") + lora_network: LoRANetwork = create_network_from_weights(text_encoders, pipe.unet, lora_sd, multiplier=1.0) + + print(f"load LoRA network weights") + lora_network.load_state_dict(lora_sd) + + lora_network.to(device, dtype=pipe.unet.dtype) # required to apply_to. merge_to works without this + + # 必要があれば、元のモデルの重みをバックアップしておく + # back-up unet/text encoder weights if necessary + def detach_and_move_to_cpu(state_dict): + for k, v in state_dict.items(): + state_dict[k] = v.detach().cpu() + return state_dict + + org_unet_sd = pipe.unet.state_dict() + detach_and_move_to_cpu(org_unet_sd) + + org_text_encoder_sd = pipe.text_encoder.state_dict() + detach_and_move_to_cpu(org_text_encoder_sd) + + if args.sdxl: + org_text_encoder_2_sd = pipe.text_encoder_2.state_dict() + detach_and_move_to_cpu(org_text_encoder_2_sd) + + def seed_everything(seed): + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + np.random.seed(seed) + random.seed(seed) + + # create image with original weights + print(f"create image with original weights") + seed_everything(args.seed) + image = pipe(args.prompt, negative_prompt=args.negative_prompt).images[0] + image.save(image_prefix + "original.png") + + # apply LoRA network to the model: slower than merge_to, but can be reverted easily + print(f"apply LoRA network to the model") + lora_network.apply_to(multiplier=1.0) + + print(f"create image with applied LoRA") + seed_everything(args.seed) + image = pipe(args.prompt, negative_prompt=args.negative_prompt).images[0] + image.save(image_prefix + "applied_lora.png") + + # unapply LoRA network to the model + print(f"unapply LoRA network to the model") + lora_network.unapply_to() + + print(f"create image with unapplied LoRA") + seed_everything(args.seed) + image = pipe(args.prompt, negative_prompt=args.negative_prompt).images[0] + image.save(image_prefix + "unapplied_lora.png") + + # merge LoRA network to the model: faster than apply_to, but requires back-up of original weights (or unmerge_to) + print(f"merge LoRA network to the model") + lora_network.merge_to(multiplier=1.0) + + print(f"create image with LoRA") + seed_everything(args.seed) + image = pipe(args.prompt, negative_prompt=args.negative_prompt).images[0] + image.save(image_prefix + "merged_lora.png") + + # restore (unmerge) LoRA weights: numerically unstable + # マージされた重みを元に戻す。計算誤差のため、元の重みと完全に一致しないことがあるかもしれない + # 保存したstate_dictから元の重みを復元するのが確実 + print(f"restore (unmerge) LoRA weights") + lora_network.restore_from(multiplier=1.0) + + print(f"create image without LoRA") + seed_everything(args.seed) + image = pipe(args.prompt, negative_prompt=args.negative_prompt).images[0] + image.save(image_prefix + "unmerged_lora.png") + + # restore original weights + print(f"restore original weights") + pipe.unet.load_state_dict(org_unet_sd) + pipe.text_encoder.load_state_dict(org_text_encoder_sd) + if args.sdxl: + pipe.text_encoder_2.load_state_dict(org_text_encoder_2_sd) + + print(f"create image with restored original weights") + seed_everything(args.seed) + image = pipe(args.prompt, negative_prompt=args.negative_prompt).images[0] + image.save(image_prefix + "restore_original.png") + + # use convenience function to merge LoRA weights + print(f"merge LoRA weights with convenience function") + merge_lora_weights(pipe, lora_sd, multiplier=1.0) + + print(f"create image with merged LoRA weights") + seed_everything(args.seed) + image = pipe(args.prompt, negative_prompt=args.negative_prompt).images[0] + image.save(image_prefix + "convenience_merged_lora.png") diff --git a/src/animatediff/utils/lpw_stable_diffusion.py b/src/animatediff/utils/lpw_stable_diffusion.py new file mode 100644 index 0000000000000000000000000000000000000000..6f7cecbe03bda5bb4b1433afba66288f7c9ce845 --- /dev/null +++ b/src/animatediff/utils/lpw_stable_diffusion.py @@ -0,0 +1,1520 @@ +# https://github.com/huggingface/diffusers/blob/main/examples/community/lpw_stable_diffusion.py + +import inspect +import re +from typing import Any, Callable, Dict, List, Optional, Union + +import numpy as np +import PIL +import torch +from diffusers import DiffusionPipeline +from diffusers.configuration_utils import FrozenDict +from diffusers.image_processor import VaeImageProcessor +from diffusers.loaders import (FromSingleFileMixin, LoraLoaderMixin, + TextualInversionLoaderMixin) +from diffusers.models import AutoencoderKL, UNet2DConditionModel +from diffusers.pipelines.stable_diffusion import ( + StableDiffusionPipelineOutput, StableDiffusionSafetyChecker) +from diffusers.schedulers import KarrasDiffusionSchedulers +from diffusers.utils import (PIL_INTERPOLATION, deprecate, + is_accelerate_available, is_accelerate_version, + logging) +from diffusers.utils.torch_utils import randn_tensor +from packaging import version +from transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer + +# ------------------------------------------------------------------------------ + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + +re_attention = re.compile( + r""" +\\\(| +\\\)| +\\\[| +\\]| +\\\\| +\\| +\(| +\[| +:([+-]?[.\d]+)\)| +\)| +]| +[^\\()\[\]:]+| +: +""", + re.X, +) + + +def parse_prompt_attention(text): + """ + Parses a string with attention tokens and returns a list of pairs: text and its associated weight. + Accepted tokens are: + (abc) - increases attention to abc by a multiplier of 1.1 + (abc:3.12) - increases attention to abc by a multiplier of 3.12 + [abc] - decreases attention to abc by a multiplier of 1.1 + \( - literal character '(' + \[ - literal character '[' + \) - literal character ')' + \] - literal character ']' + \\ - literal character '\' + anything else - just text + >>> parse_prompt_attention('normal text') + [['normal text', 1.0]] + >>> parse_prompt_attention('an (important) word') + [['an ', 1.0], ['important', 1.1], [' word', 1.0]] + >>> parse_prompt_attention('(unbalanced') + [['unbalanced', 1.1]] + >>> parse_prompt_attention('\(literal\]') + [['(literal]', 1.0]] + >>> parse_prompt_attention('(unnecessary)(parens)') + [['unnecessaryparens', 1.1]] + >>> parse_prompt_attention('a (((house:1.3)) [on] a (hill:0.5), sun, (((sky))).') + [['a ', 1.0], + ['house', 1.5730000000000004], + [' ', 1.1], + ['on', 1.0], + [' a ', 1.1], + ['hill', 0.55], + [', sun, ', 1.1], + ['sky', 1.4641000000000006], + ['.', 1.1]] + """ + + res = [] + round_brackets = [] + square_brackets = [] + + round_bracket_multiplier = 1.1 + square_bracket_multiplier = 1 / 1.1 + + def multiply_range(start_position, multiplier): + for p in range(start_position, len(res)): + res[p][1] *= multiplier + + for m in re_attention.finditer(text): + text = m.group(0) + weight = m.group(1) + + if text.startswith("\\"): + res.append([text[1:], 1.0]) + elif text == "(": + round_brackets.append(len(res)) + elif text == "[": + square_brackets.append(len(res)) + elif weight is not None and len(round_brackets) > 0: + multiply_range(round_brackets.pop(), float(weight)) + elif text == ")" and len(round_brackets) > 0: + multiply_range(round_brackets.pop(), round_bracket_multiplier) + elif text == "]" and len(square_brackets) > 0: + multiply_range(square_brackets.pop(), square_bracket_multiplier) + else: + res.append([text, 1.0]) + + for pos in round_brackets: + multiply_range(pos, round_bracket_multiplier) + + for pos in square_brackets: + multiply_range(pos, square_bracket_multiplier) + + if len(res) == 0: + res = [["", 1.0]] + + # merge runs of identical weights + i = 0 + while i + 1 < len(res): + if res[i][1] == res[i + 1][1]: + res[i][0] += res[i + 1][0] + res.pop(i + 1) + else: + i += 1 + + return res + + +def get_prompts_with_weights(pipe: DiffusionPipeline, prompt: List[str], max_length: int): + r""" + Tokenize a list of prompts and return its tokens with weights of each token. + + No padding, starting or ending token is included. + """ + tokens = [] + weights = [] + truncated = False + for text in prompt: + texts_and_weights = parse_prompt_attention(text) + text_token = [] + text_weight = [] + for word, weight in texts_and_weights: + # tokenize and discard the starting and the ending token + token = pipe.tokenizer(word).input_ids[1:-1] + text_token += token + # copy the weight by length of token + text_weight += [weight] * len(token) + # stop if the text is too long (longer than truncation limit) + if len(text_token) > max_length: + truncated = True + break + # truncate + if len(text_token) > max_length: + truncated = True + text_token = text_token[:max_length] + text_weight = text_weight[:max_length] + tokens.append(text_token) + weights.append(text_weight) + if truncated: + logger.warning("Prompt was truncated. Try to shorten the prompt or increase max_embeddings_multiples") + return tokens, weights + + +def pad_tokens_and_weights(tokens, weights, max_length, bos, eos, pad, no_boseos_middle=True, chunk_length=77): + r""" + Pad the tokens (with starting and ending tokens) and weights (with 1.0) to max_length. + """ + max_embeddings_multiples = (max_length - 2) // (chunk_length - 2) + weights_length = max_length if no_boseos_middle else max_embeddings_multiples * chunk_length + for i in range(len(tokens)): + tokens[i] = [bos] + tokens[i] + [pad] * (max_length - 1 - len(tokens[i]) - 1) + [eos] + if no_boseos_middle: + weights[i] = [1.0] + weights[i] + [1.0] * (max_length - 1 - len(weights[i])) + else: + w = [] + if len(weights[i]) == 0: + w = [1.0] * weights_length + else: + for j in range(max_embeddings_multiples): + w.append(1.0) # weight for starting token in this chunk + w += weights[i][j * (chunk_length - 2) : min(len(weights[i]), (j + 1) * (chunk_length - 2))] + w.append(1.0) # weight for ending token in this chunk + w += [1.0] * (weights_length - len(w)) + weights[i] = w[:] + + return tokens, weights + +def get_unweighted_text_embeddings( + pipe: DiffusionPipeline, + text_input: torch.Tensor, + chunk_length: int, + no_boseos_middle: Optional[bool] = True, + clip_skip: int = 1 +): + """ + When the length of tokens is a multiple of the capacity of the text encoder, + it should be split into chunks and sent to the text encoder individually. + """ + from ..models.clip import CLIPSkipTextModel + max_embeddings_multiples = (text_input.shape[1] - 2) // (chunk_length - 2) + if max_embeddings_multiples > 1: + text_embeddings = [] + for i in range(max_embeddings_multiples): + # extract the i-th chunk + text_input_chunk = text_input[:, i * (chunk_length - 2) : (i + 1) * (chunk_length - 2) + 2].clone() + + # cover the head and the tail by the starting and the ending tokens + text_input_chunk[:, 0] = text_input[0, 0] + text_input_chunk[:, -1] = text_input[0, -1] + if isinstance(pipe.text_encoder, CLIPSkipTextModel): + text_embedding = pipe.text_encoder(text_input_chunk, clip_skip=clip_skip)[0] + else: + text_embedding = pipe.text_encoder(text_input_chunk)[0] + + if no_boseos_middle: + if i == 0: + # discard the ending token + text_embedding = text_embedding[:, :-1] + elif i == max_embeddings_multiples - 1: + # discard the starting token + text_embedding = text_embedding[:, 1:] + else: + # discard both starting and ending tokens + text_embedding = text_embedding[:, 1:-1] + + text_embeddings.append(text_embedding) + text_embeddings = torch.concat(text_embeddings, axis=1) + else: + if isinstance(pipe.text_encoder, CLIPSkipTextModel): + text_embeddings = pipe.text_encoder(text_input, clip_skip=clip_skip)[0] + else: + text_embeddings = pipe.text_encoder(text_input)[0] + return text_embeddings + + +def get_weighted_text_embeddings( + pipe: DiffusionPipeline, + prompt: Union[str, List[str]], + uncond_prompt: Optional[Union[str, List[str]]] = None, + max_embeddings_multiples: Optional[int] = 3, + no_boseos_middle: Optional[bool] = False, + skip_parsing: Optional[bool] = False, + skip_weighting: Optional[bool] = False, + clip_skip:int=1 +): + r""" + Prompts can be assigned with local weights using brackets. For example, + prompt 'A (very beautiful) masterpiece' highlights the words 'very beautiful', + and the embedding tokens corresponding to the words get multiplied by a constant, 1.1. + + Also, to regularize of the embedding, the weighted embedding would be scaled to preserve the original mean. + + Args: + pipe (`DiffusionPipeline`): + Pipe to provide access to the tokenizer and the text encoder. + prompt (`str` or `List[str]`): + The prompt or prompts to guide the image generation. + uncond_prompt (`str` or `List[str]`): + The unconditional prompt or prompts for guide the image generation. If unconditional prompt + is provided, the embeddings of prompt and uncond_prompt are concatenated. + max_embeddings_multiples (`int`, *optional*, defaults to `3`): + The max multiple length of prompt embeddings compared to the max output length of text encoder. + no_boseos_middle (`bool`, *optional*, defaults to `False`): + If the length of text token is multiples of the capacity of text encoder, whether reserve the starting and + ending token in each of the chunk in the middle. + skip_parsing (`bool`, *optional*, defaults to `False`): + Skip the parsing of brackets. + skip_weighting (`bool`, *optional*, defaults to `False`): + Skip the weighting. When the parsing is skipped, it is forced True. + """ + max_length = (pipe.tokenizer.model_max_length - 2) * max_embeddings_multiples + 2 + if isinstance(prompt, str): + prompt = [prompt] + + if not skip_parsing: + prompt_tokens, prompt_weights = get_prompts_with_weights(pipe, prompt, max_length - 2) + if uncond_prompt is not None: + if isinstance(uncond_prompt, str): + uncond_prompt = [uncond_prompt] + uncond_tokens, uncond_weights = get_prompts_with_weights(pipe, uncond_prompt, max_length - 2) + else: + prompt_tokens = [ + token[1:-1] for token in pipe.tokenizer(prompt, max_length=max_length, truncation=True).input_ids + ] + prompt_weights = [[1.0] * len(token) for token in prompt_tokens] + if uncond_prompt is not None: + if isinstance(uncond_prompt, str): + uncond_prompt = [uncond_prompt] + uncond_tokens = [ + token[1:-1] + for token in pipe.tokenizer(uncond_prompt, max_length=max_length, truncation=True).input_ids + ] + uncond_weights = [[1.0] * len(token) for token in uncond_tokens] + + # round up the longest length of tokens to a multiple of (model_max_length - 2) + max_length = max([len(token) for token in prompt_tokens]) + if uncond_prompt is not None: + max_length = max(max_length, max([len(token) for token in uncond_tokens])) + + max_embeddings_multiples = min( + max_embeddings_multiples, + (max_length - 1) // (pipe.tokenizer.model_max_length - 2) + 1, + ) + max_embeddings_multiples = max(1, max_embeddings_multiples) + max_length = (pipe.tokenizer.model_max_length - 2) * max_embeddings_multiples + 2 + + # pad the length of tokens and weights + bos = pipe.tokenizer.bos_token_id + eos = pipe.tokenizer.eos_token_id + pad = getattr(pipe.tokenizer, "pad_token_id", eos) + prompt_tokens, prompt_weights = pad_tokens_and_weights( + prompt_tokens, + prompt_weights, + max_length, + bos, + eos, + pad, + no_boseos_middle=no_boseos_middle, + chunk_length=pipe.tokenizer.model_max_length, + ) + prompt_tokens = torch.tensor(prompt_tokens, dtype=torch.long, device=pipe.device) + if uncond_prompt is not None: + uncond_tokens, uncond_weights = pad_tokens_and_weights( + uncond_tokens, + uncond_weights, + max_length, + bos, + eos, + pad, + no_boseos_middle=no_boseos_middle, + chunk_length=pipe.tokenizer.model_max_length, + ) + uncond_tokens = torch.tensor(uncond_tokens, dtype=torch.long, device=pipe.device) + + # get the embeddings + text_embeddings = get_unweighted_text_embeddings( + pipe, + prompt_tokens, + pipe.tokenizer.model_max_length, + no_boseos_middle=no_boseos_middle, + clip_skip=clip_skip + ) + prompt_weights = torch.tensor(prompt_weights, dtype=text_embeddings.dtype, device=text_embeddings.device) + if uncond_prompt is not None: + uncond_embeddings = get_unweighted_text_embeddings( + pipe, + uncond_tokens, + pipe.tokenizer.model_max_length, + no_boseos_middle=no_boseos_middle, + clip_skip=clip_skip + ) + uncond_weights = torch.tensor(uncond_weights, dtype=uncond_embeddings.dtype, device=uncond_embeddings.device) + + # assign weights to the prompts and normalize in the sense of mean + # TODO: should we normalize by chunk or in a whole (current implementation)? + if (not skip_parsing) and (not skip_weighting): + previous_mean = text_embeddings.float().mean(axis=[-2, -1]).to(text_embeddings.dtype) + text_embeddings *= prompt_weights.unsqueeze(-1) + current_mean = text_embeddings.float().mean(axis=[-2, -1]).to(text_embeddings.dtype) + text_embeddings *= (previous_mean / current_mean).unsqueeze(-1).unsqueeze(-1) + if uncond_prompt is not None: + previous_mean = uncond_embeddings.float().mean(axis=[-2, -1]).to(uncond_embeddings.dtype) + uncond_embeddings *= uncond_weights.unsqueeze(-1) + current_mean = uncond_embeddings.float().mean(axis=[-2, -1]).to(uncond_embeddings.dtype) + uncond_embeddings *= (previous_mean / current_mean).unsqueeze(-1).unsqueeze(-1) + + if uncond_prompt is not None: + return text_embeddings, uncond_embeddings + return text_embeddings, None + + +def lpw_encode_prompt( + pipe: DiffusionPipeline, + prompt:str, + do_classifier_free_guidance:bool, + negative_prompt:Optional[str] = None, + max_embeddings_multiples:Optional[int] = 3, +): + r""" + Encodes the prompt into text encoder hidden states. + + Args: + prompt (`str` or `list(int)`): + prompt to be encoded + do_classifier_free_guidance (`bool`): + whether to use classifier free guidance or not + negative_prompt (`str` or `List[str]`): + The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored + if `guidance_scale` is less than `1`). + max_embeddings_multiples (`int`, *optional*, defaults to `3`): + The max multiple length of prompt embeddings compared to the max output length of text encoder. + """ + + if negative_prompt is None: + negative_prompt = [""] + elif isinstance(negative_prompt, str): + negative_prompt = [negative_prompt] + + if isinstance(pipe, TextualInversionLoaderMixin): + prompt = pipe.maybe_convert_prompt(prompt, pipe.tokenizer) + if do_classifier_free_guidance: + negative_prompt = pipe.maybe_convert_prompt(negative_prompt, pipe.tokenizer) + + prompt_embeds1, negative_prompt_embeds1 = get_weighted_text_embeddings( + pipe=pipe, + prompt=prompt, + uncond_prompt=negative_prompt if do_classifier_free_guidance else None, + max_embeddings_multiples=max_embeddings_multiples, + ) + + return prompt_embeds1, negative_prompt_embeds1 + + +def preprocess_image(image, batch_size): + w, h = image.size + w, h = (x - x % 8 for x in (w, h)) # resize to integer multiple of 8 + image = image.resize((w, h), resample=PIL_INTERPOLATION["lanczos"]) + image = np.array(image).astype(np.float32) / 255.0 + image = np.vstack([image[None].transpose(0, 3, 1, 2)] * batch_size) + image = torch.from_numpy(image) + return 2.0 * image - 1.0 + + +def preprocess_mask(mask, batch_size, scale_factor=8): + if not isinstance(mask, torch.FloatTensor): + mask = mask.convert("L") + w, h = mask.size + w, h = (x - x % 8 for x in (w, h)) # resize to integer multiple of 8 + mask = mask.resize((w // scale_factor, h // scale_factor), resample=PIL_INTERPOLATION["nearest"]) + mask = np.array(mask).astype(np.float32) / 255.0 + mask = np.tile(mask, (4, 1, 1)) + mask = np.vstack([mask[None]] * batch_size) + mask = 1 - mask # repaint white, keep black + mask = torch.from_numpy(mask) + return mask + + else: + valid_mask_channel_sizes = [1, 3] + # if mask channel is fourth tensor dimension, permute dimensions to pytorch standard (B, C, H, W) + if mask.shape[3] in valid_mask_channel_sizes: + mask = mask.permute(0, 3, 1, 2) + elif mask.shape[1] not in valid_mask_channel_sizes: + raise ValueError( + f"Mask channel dimension of size in {valid_mask_channel_sizes} should be second or fourth dimension," + f" but received mask of shape {tuple(mask.shape)}" + ) + # (potentially) reduce mask channel dimension from 3 to 1 for broadcasting to latent shape + mask = mask.mean(dim=1, keepdim=True) + h, w = mask.shape[-2:] + h, w = (x - x % 8 for x in (h, w)) # resize to integer multiple of 8 + mask = torch.nn.functional.interpolate(mask, (h // scale_factor, w // scale_factor)) + return mask + + +class StableDiffusionLongPromptWeightingPipeline( + DiffusionPipeline, TextualInversionLoaderMixin, LoraLoaderMixin, FromSingleFileMixin +): + r""" + Pipeline for text-to-image generation using Stable Diffusion without tokens length limit, and support parsing + weighting in prompt. + + This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the + library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.) + + Args: + vae ([`AutoencoderKL`]): + Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations. + text_encoder ([`CLIPTextModel`]): + Frozen text-encoder. Stable Diffusion uses the text portion of + [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically + the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant. + tokenizer (`CLIPTokenizer`): + Tokenizer of class + [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer). + unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents. + scheduler ([`SchedulerMixin`]): + A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of + [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`]. + safety_checker ([`StableDiffusionSafetyChecker`]): + Classification module that estimates whether generated images could be considered offensive or harmful. + Please, refer to the [model card](https://huggingface.co/CompVis/stable-diffusion-v1-4) for details. + feature_extractor ([`CLIPImageProcessor`]): + Model that extracts features from generated images to be used as inputs for the `safety_checker`. + """ + + _optional_components = ["safety_checker", "feature_extractor"] + + def __init__( + self, + vae: AutoencoderKL, + text_encoder: CLIPTextModel, + tokenizer: CLIPTokenizer, + unet: UNet2DConditionModel, + scheduler: KarrasDiffusionSchedulers, + safety_checker: StableDiffusionSafetyChecker, + feature_extractor: CLIPImageProcessor, + requires_safety_checker: bool = True, + ): + super().__init__() + + if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: + deprecation_message = ( + f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" + f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " + "to update the config accordingly as leaving `steps_offset` might led to incorrect results" + " in future versions. If you have downloaded this checkpoint from the Hugging Face Hub," + " it would be very nice if you could open a Pull request for the `scheduler/scheduler_config.json`" + " file" + ) + deprecate("steps_offset!=1", "1.0.0", deprecation_message, standard_warn=False) + new_config = dict(scheduler.config) + new_config["steps_offset"] = 1 + scheduler._internal_dict = FrozenDict(new_config) + + if hasattr(scheduler.config, "clip_sample") and scheduler.config.clip_sample is True: + deprecation_message = ( + f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`." + " `clip_sample` should be set to False in the configuration file. Please make sure to update the" + " config accordingly as not setting `clip_sample` in the config might lead to incorrect results in" + " future versions. If you have downloaded this checkpoint from the Hugging Face Hub, it would be very" + " nice if you could open a Pull request for the `scheduler/scheduler_config.json` file" + ) + deprecate("clip_sample not set", "1.0.0", deprecation_message, standard_warn=False) + new_config = dict(scheduler.config) + new_config["clip_sample"] = False + scheduler._internal_dict = FrozenDict(new_config) + + if safety_checker is None and requires_safety_checker: + logger.warning( + f"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure" + " that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered" + " results in services or applications open to the public. Both the diffusers team and Hugging Face" + " strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling" + " it only for use-cases that involve analyzing network behavior or auditing its results. For more" + " information, please have a look at https://github.com/huggingface/diffusers/pull/254 ." + ) + + if safety_checker is not None and feature_extractor is None: + raise ValueError( + "Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety" + " checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead." + ) + + is_unet_version_less_0_9_0 = hasattr(unet.config, "_diffusers_version") and version.parse( + version.parse(unet.config._diffusers_version).base_version + ) < version.parse("0.9.0.dev0") + is_unet_sample_size_less_64 = hasattr(unet.config, "sample_size") and unet.config.sample_size < 64 + if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64: + deprecation_message = ( + "The configuration file of the unet has set the default `sample_size` to smaller than" + " 64 which seems highly unlikely. If your checkpoint is a fine-tuned version of any of the" + " following: \n- CompVis/stable-diffusion-v1-4 \n- CompVis/stable-diffusion-v1-3 \n-" + " CompVis/stable-diffusion-v1-2 \n- CompVis/stable-diffusion-v1-1 \n- runwayml/stable-diffusion-v1-5" + " \n- runwayml/stable-diffusion-inpainting \n you should change 'sample_size' to 64 in the" + " configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`" + " in the config might lead to incorrect results in future versions. If you have downloaded this" + " checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for" + " the `unet/config.json` file" + ) + deprecate("sample_size<64", "1.0.0", deprecation_message, standard_warn=False) + new_config = dict(unet.config) + new_config["sample_size"] = 64 + unet._internal_dict = FrozenDict(new_config) + self.register_modules( + vae=vae, + text_encoder=text_encoder, + tokenizer=tokenizer, + unet=unet, + scheduler=scheduler, + safety_checker=safety_checker, + feature_extractor=feature_extractor, + ) + self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1) + + self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor) + self.register_to_config( + requires_safety_checker=requires_safety_checker, + ) + + def enable_vae_slicing(self): + r""" + Enable sliced VAE decoding. + + When this option is enabled, the VAE will split the input tensor in slices to compute decoding in several + steps. This is useful to save some memory and allow larger batch sizes. + """ + self.vae.enable_slicing() + + def disable_vae_slicing(self): + r""" + Disable sliced VAE decoding. If `enable_vae_slicing` was previously invoked, this method will go back to + computing decoding in one step. + """ + self.vae.disable_slicing() + + def enable_vae_tiling(self): + r""" + Enable tiled VAE decoding. + + When this option is enabled, the VAE will split the input tensor into tiles to compute decoding and encoding in + several steps. This is useful to save a large amount of memory and to allow the processing of larger images. + """ + self.vae.enable_tiling() + + def disable_vae_tiling(self): + r""" + Disable tiled VAE decoding. If `enable_vae_tiling` was previously invoked, this method will go back to + computing decoding in one step. + """ + self.vae.disable_tiling() + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_sequential_cpu_offload + def enable_sequential_cpu_offload(self, gpu_id=0): + r""" + Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet, + text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a + `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called. + Note that offloading happens on a submodule basis. Memory savings are higher than with + `enable_model_cpu_offload`, but performance is lower. + """ + if is_accelerate_available() and is_accelerate_version(">=", "0.14.0"): + from accelerate import cpu_offload + else: + raise ImportError("`enable_sequential_cpu_offload` requires `accelerate v0.14.0` or higher") + + device = torch.device(f"cuda:{gpu_id}") + + if self.device.type != "cpu": + self.to("cpu", silence_dtype_warnings=True) + torch.cuda.empty_cache() # otherwise we don't see the memory savings (but they probably exist) + + for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]: + cpu_offload(cpu_offloaded_model, device) + + if self.safety_checker is not None: + cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_model_cpu_offload + def enable_model_cpu_offload(self, gpu_id=0): + r""" + Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared + to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward` + method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with + `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`. + """ + if is_accelerate_available() and is_accelerate_version(">=", "0.17.0.dev0"): + from accelerate import cpu_offload_with_hook + else: + raise ImportError("`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.") + + device = torch.device(f"cuda:{gpu_id}") + + if self.device.type != "cpu": + self.to("cpu", silence_dtype_warnings=True) + torch.cuda.empty_cache() # otherwise we don't see the memory savings (but they probably exist) + + hook = None + for cpu_offloaded_model in [self.text_encoder, self.unet, self.vae]: + _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook) + + if self.safety_checker is not None: + _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook) + + # We'll offload the last model manually. + self.final_offload_hook = hook + + @property + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device + def _execution_device(self): + r""" + Returns the device on which the pipeline's models will be executed. After calling + `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module + hooks. + """ + if not hasattr(self.unet, "_hf_hook"): + return self.device + for module in self.unet.modules(): + if ( + hasattr(module, "_hf_hook") + and hasattr(module._hf_hook, "execution_device") + and module._hf_hook.execution_device is not None + ): + return torch.device(module._hf_hook.execution_device) + return self.device + + def _encode_prompt( + self, + prompt, + device, + num_images_per_prompt, + do_classifier_free_guidance, + negative_prompt=None, + max_embeddings_multiples=3, + prompt_embeds: Optional[torch.FloatTensor] = None, + negative_prompt_embeds: Optional[torch.FloatTensor] = None, + ): + r""" + Encodes the prompt into text encoder hidden states. + + Args: + prompt (`str` or `list(int)`): + prompt to be encoded + device: (`torch.device`): + torch device + num_images_per_prompt (`int`): + number of images that should be generated per prompt + do_classifier_free_guidance (`bool`): + whether to use classifier free guidance or not + negative_prompt (`str` or `List[str]`): + The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored + if `guidance_scale` is less than `1`). + max_embeddings_multiples (`int`, *optional*, defaults to `3`): + The max multiple length of prompt embeddings compared to the max output length of text encoder. + """ + if prompt is not None and isinstance(prompt, str): + batch_size = 1 + elif prompt is not None and isinstance(prompt, list): + batch_size = len(prompt) + else: + batch_size = prompt_embeds.shape[0] + + if negative_prompt_embeds is None: + if negative_prompt is None: + negative_prompt = [""] * batch_size + elif isinstance(negative_prompt, str): + negative_prompt = [negative_prompt] * batch_size + if batch_size != len(negative_prompt): + raise ValueError( + f"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:" + f" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches" + " the batch size of `prompt`." + ) + if prompt_embeds is None or negative_prompt_embeds is None: + if isinstance(self, TextualInversionLoaderMixin): + prompt = self.maybe_convert_prompt(prompt, self.tokenizer) + if do_classifier_free_guidance and negative_prompt_embeds is None: + negative_prompt = self.maybe_convert_prompt(negative_prompt, self.tokenizer) + + prompt_embeds1, negative_prompt_embeds1 = get_weighted_text_embeddings( + pipe=self, + prompt=prompt, + uncond_prompt=negative_prompt if do_classifier_free_guidance else None, + max_embeddings_multiples=max_embeddings_multiples, + ) + if prompt_embeds is None: + prompt_embeds = prompt_embeds1 + if negative_prompt_embeds is None: + negative_prompt_embeds = negative_prompt_embeds1 + + bs_embed, seq_len, _ = prompt_embeds.shape + # duplicate text embeddings for each generation per prompt, using mps friendly method + prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1) + prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1) + + if do_classifier_free_guidance: + bs_embed, seq_len, _ = negative_prompt_embeds.shape + negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1) + negative_prompt_embeds = negative_prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1) + prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds]) + + return prompt_embeds + + def check_inputs( + self, + prompt, + height, + width, + strength, + callback_steps, + negative_prompt=None, + prompt_embeds=None, + negative_prompt_embeds=None, + ): + if height % 8 != 0 or width % 8 != 0: + raise ValueError(f"`height` and `width` have to be divisible by 8 but are {height} and {width}.") + + if strength < 0 or strength > 1: + raise ValueError(f"The value of strength should in [0.0, 1.0] but is {strength}") + + if (callback_steps is None) or ( + callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0) + ): + raise ValueError( + f"`callback_steps` has to be a positive integer but is {callback_steps} of type" + f" {type(callback_steps)}." + ) + + if prompt is not None and prompt_embeds is not None: + raise ValueError( + f"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to" + " only forward one of the two." + ) + elif prompt is None and prompt_embeds is None: + raise ValueError( + "Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined." + ) + elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)): + raise ValueError(f"`prompt` has to be of type `str` or `list` but is {type(prompt)}") + + if negative_prompt is not None and negative_prompt_embeds is not None: + raise ValueError( + f"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:" + f" {negative_prompt_embeds}. Please make sure to only forward one of the two." + ) + + if prompt_embeds is not None and negative_prompt_embeds is not None: + if prompt_embeds.shape != negative_prompt_embeds.shape: + raise ValueError( + "`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but" + f" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`" + f" {negative_prompt_embeds.shape}." + ) + + def get_timesteps(self, num_inference_steps, strength, device, is_text2img): + if is_text2img: + return self.scheduler.timesteps.to(device), num_inference_steps + else: + # get the original timestep using init_timestep + init_timestep = min(int(num_inference_steps * strength), num_inference_steps) + + t_start = max(num_inference_steps - init_timestep, 0) + timesteps = self.scheduler.timesteps[t_start * self.scheduler.order :] + + return timesteps, num_inference_steps - t_start + + def run_safety_checker(self, image, device, dtype): + if self.safety_checker is not None: + safety_checker_input = self.feature_extractor(self.numpy_to_pil(image), return_tensors="pt").to(device) + image, has_nsfw_concept = self.safety_checker( + images=image, clip_input=safety_checker_input.pixel_values.to(dtype) + ) + else: + has_nsfw_concept = None + return image, has_nsfw_concept + + def decode_latents(self, latents): + latents = 1 / self.vae.config.scaling_factor * latents + image = self.vae.decode(latents).sample + image = (image / 2 + 0.5).clamp(0, 1) + # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16 + image = image.cpu().permute(0, 2, 3, 1).float().numpy() + return image + + def prepare_extra_step_kwargs(self, generator, eta): + # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature + # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers. + # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502 + # and should be between [0, 1] + + accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys()) + extra_step_kwargs = {} + if accepts_eta: + extra_step_kwargs["eta"] = eta + + # check if the scheduler accepts generator + accepts_generator = "generator" in set(inspect.signature(self.scheduler.step).parameters.keys()) + if accepts_generator: + extra_step_kwargs["generator"] = generator + return extra_step_kwargs + + def prepare_latents( + self, + image, + timestep, + num_images_per_prompt, + batch_size, + num_channels_latents, + height, + width, + dtype, + device, + generator, + latents=None, + ): + if image is None: + batch_size = batch_size * num_images_per_prompt + shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor) + if isinstance(generator, list) and len(generator) != batch_size: + raise ValueError( + f"You have passed a list of generators of length {len(generator)}, but requested an effective batch" + f" size of {batch_size}. Make sure the batch size matches the length of the generators." + ) + + if latents is None: + latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype) + else: + latents = latents.to(device) + + # scale the initial noise by the standard deviation required by the scheduler + latents = latents * self.scheduler.init_noise_sigma + return latents, None, None + else: + image = image.to(device=self.device, dtype=dtype) + init_latent_dist = self.vae.encode(image).latent_dist + init_latents = init_latent_dist.sample(generator=generator) + init_latents = self.vae.config.scaling_factor * init_latents + + # Expand init_latents for batch_size and num_images_per_prompt + init_latents = torch.cat([init_latents] * num_images_per_prompt, dim=0) + init_latents_orig = init_latents + + # add noise to latents using the timesteps + noise = randn_tensor(init_latents.shape, generator=generator, device=self.device, dtype=dtype) + init_latents = self.scheduler.add_noise(init_latents, noise, timestep) + latents = init_latents + return latents, init_latents_orig, noise + + @torch.no_grad() + def __call__( + self, + prompt: Union[str, List[str]], + negative_prompt: Optional[Union[str, List[str]]] = None, + image: Union[torch.FloatTensor, PIL.Image.Image] = None, + mask_image: Union[torch.FloatTensor, PIL.Image.Image] = None, + height: int = 512, + width: int = 512, + num_inference_steps: int = 50, + guidance_scale: float = 7.5, + strength: float = 0.8, + num_images_per_prompt: Optional[int] = 1, + add_predicted_noise: Optional[bool] = False, + eta: float = 0.0, + generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None, + latents: Optional[torch.FloatTensor] = None, + prompt_embeds: Optional[torch.FloatTensor] = None, + negative_prompt_embeds: Optional[torch.FloatTensor] = None, + max_embeddings_multiples: Optional[int] = 3, + output_type: Optional[str] = "pil", + return_dict: bool = True, + callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None, + is_cancelled_callback: Optional[Callable[[], bool]] = None, + callback_steps: int = 1, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + ): + r""" + Function invoked when calling the pipeline for generation. + + Args: + prompt (`str` or `List[str]`): + The prompt or prompts to guide the image generation. + negative_prompt (`str` or `List[str]`, *optional*): + The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored + if `guidance_scale` is less than `1`). + image (`torch.FloatTensor` or `PIL.Image.Image`): + `Image`, or tensor representing an image batch, that will be used as the starting point for the + process. + mask_image (`torch.FloatTensor` or `PIL.Image.Image`): + `Image`, or tensor representing an image batch, to mask `image`. White pixels in the mask will be + replaced by noise and therefore repainted, while black pixels will be preserved. If `mask_image` is a + PIL image, it will be converted to a single channel (luminance) before use. If it's a tensor, it should + contain one color channel (L) instead of 3, so the expected shape would be `(B, H, W, 1)`. + height (`int`, *optional*, defaults to 512): + The height in pixels of the generated image. + width (`int`, *optional*, defaults to 512): + The width in pixels of the generated image. + num_inference_steps (`int`, *optional*, defaults to 50): + The number of denoising steps. More denoising steps usually lead to a higher quality image at the + expense of slower inference. + guidance_scale (`float`, *optional*, defaults to 7.5): + Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598). + `guidance_scale` is defined as `w` of equation 2. of [Imagen + Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale > + 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`, + usually at the expense of lower image quality. + strength (`float`, *optional*, defaults to 0.8): + Conceptually, indicates how much to transform the reference `image`. Must be between 0 and 1. + `image` will be used as a starting point, adding more noise to it the larger the `strength`. The + number of denoising steps depends on the amount of noise initially added. When `strength` is 1, added + noise will be maximum and the denoising process will run for the full number of iterations specified in + `num_inference_steps`. A value of 1, therefore, essentially ignores `image`. + num_images_per_prompt (`int`, *optional*, defaults to 1): + The number of images to generate per prompt. + add_predicted_noise (`bool`, *optional*, defaults to True): + Use predicted noise instead of random noise when constructing noisy versions of the original image in + the reverse diffusion process + eta (`float`, *optional*, defaults to 0.0): + Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to + [`schedulers.DDIMScheduler`], will be ignored for others. + generator (`torch.Generator` or `List[torch.Generator]`, *optional*): + One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html) + to make generation deterministic. + latents (`torch.FloatTensor`, *optional*): + Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image + generation. Can be used to tweak the same generation with different prompts. If not provided, a latents + tensor will ge generated by sampling using the supplied random `generator`. + prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not + provided, text embeddings will be generated from `prompt` input argument. + negative_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt + weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input + argument. + max_embeddings_multiples (`int`, *optional*, defaults to `3`): + The max multiple length of prompt embeddings compared to the max output length of text encoder. + output_type (`str`, *optional*, defaults to `"pil"`): + The output format of the generate image. Choose between + [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a + plain tuple. + callback (`Callable`, *optional*): + A function that will be called every `callback_steps` steps during inference. The function will be + called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`. + is_cancelled_callback (`Callable`, *optional*): + A function that will be called every `callback_steps` steps during inference. If the function returns + `True`, the inference will be cancelled. + callback_steps (`int`, *optional*, defaults to 1): + The frequency at which the `callback` function will be called. If not specified, the callback will be + called at every step. + cross_attention_kwargs (`dict`, *optional*): + A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under + `self.processor` in + [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py). + + Returns: + `None` if cancelled by `is_cancelled_callback`, + [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`: + [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple. + When returning a tuple, the first element is a list with the generated images, and the second element is a + list of `bool`s denoting whether the corresponding generated image likely represents "not-safe-for-work" + (nsfw) content, according to the `safety_checker`. + """ + # 0. Default height and width to unet + height = height or self.unet.config.sample_size * self.vae_scale_factor + width = width or self.unet.config.sample_size * self.vae_scale_factor + + # 1. Check inputs. Raise error if not correct + self.check_inputs( + prompt, height, width, strength, callback_steps, negative_prompt, prompt_embeds, negative_prompt_embeds + ) + + # 2. Define call parameters + if prompt is not None and isinstance(prompt, str): + batch_size = 1 + elif prompt is not None and isinstance(prompt, list): + batch_size = len(prompt) + else: + batch_size = prompt_embeds.shape[0] + + device = self._execution_device + # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2) + # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1` + # corresponds to doing no classifier free guidance. + do_classifier_free_guidance = guidance_scale > 1.0 + + # 3. Encode input prompt + prompt_embeds = self._encode_prompt( + prompt, + device, + num_images_per_prompt, + do_classifier_free_guidance, + negative_prompt, + max_embeddings_multiples, + prompt_embeds=prompt_embeds, + negative_prompt_embeds=negative_prompt_embeds, + ) + dtype = prompt_embeds.dtype + + # 4. Preprocess image and mask + if isinstance(image, PIL.Image.Image): + image = preprocess_image(image, batch_size) + if image is not None: + image = image.to(device=self.device, dtype=dtype) + if isinstance(mask_image, PIL.Image.Image): + mask_image = preprocess_mask(mask_image, batch_size, self.vae_scale_factor) + if mask_image is not None: + mask = mask_image.to(device=self.device, dtype=dtype) + mask = torch.cat([mask] * num_images_per_prompt) + else: + mask = None + + # 5. set timesteps + self.scheduler.set_timesteps(num_inference_steps, device=device) + timesteps, num_inference_steps = self.get_timesteps(num_inference_steps, strength, device, image is None) + latent_timestep = timesteps[:1].repeat(batch_size * num_images_per_prompt) + + # 6. Prepare latent variables + latents, init_latents_orig, noise = self.prepare_latents( + image, + latent_timestep, + num_images_per_prompt, + batch_size, + self.unet.config.in_channels, + height, + width, + dtype, + device, + generator, + latents, + ) + + # 7. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline + extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta) + + # 8. Denoising loop + num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order + with self.progress_bar(total=num_inference_steps) as progress_bar: + for i, t in enumerate(timesteps): + # expand the latents if we are doing classifier free guidance + latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents + latent_model_input = self.scheduler.scale_model_input(latent_model_input, t) + + # predict the noise residual + noise_pred = self.unet( + latent_model_input, + t, + encoder_hidden_states=prompt_embeds, + cross_attention_kwargs=cross_attention_kwargs, + ).sample + + # perform guidance + if do_classifier_free_guidance: + noise_pred_uncond, noise_pred_text = noise_pred.chunk(2) + noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond) + + # compute the previous noisy sample x_t -> x_t-1 + latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample + + if mask is not None: + # masking + if add_predicted_noise: + init_latents_proper = self.scheduler.add_noise( + init_latents_orig, noise_pred_uncond, torch.tensor([t]) + ) + else: + init_latents_proper = self.scheduler.add_noise(init_latents_orig, noise, torch.tensor([t])) + latents = (init_latents_proper * mask) + (latents * (1 - mask)) + + # call the callback, if provided + if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0): + progress_bar.update() + if i % callback_steps == 0: + if callback is not None: + callback(i, t, latents) + if is_cancelled_callback is not None and is_cancelled_callback(): + return None + + if output_type == "latent": + image = latents + has_nsfw_concept = None + elif output_type == "pil": + # 9. Post-processing + image = self.decode_latents(latents) + + # 10. Run safety checker + image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype) + + # 11. Convert to PIL + image = self.numpy_to_pil(image) + else: + # 9. Post-processing + image = self.decode_latents(latents) + + # 10. Run safety checker + image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype) + + # Offload last model to CPU + if hasattr(self, "final_offload_hook") and self.final_offload_hook is not None: + self.final_offload_hook.offload() + + if not return_dict: + return image, has_nsfw_concept + + return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept) + + def text2img( + self, + prompt: Union[str, List[str]], + negative_prompt: Optional[Union[str, List[str]]] = None, + height: int = 512, + width: int = 512, + num_inference_steps: int = 50, + guidance_scale: float = 7.5, + num_images_per_prompt: Optional[int] = 1, + eta: float = 0.0, + generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None, + latents: Optional[torch.FloatTensor] = None, + prompt_embeds: Optional[torch.FloatTensor] = None, + negative_prompt_embeds: Optional[torch.FloatTensor] = None, + max_embeddings_multiples: Optional[int] = 3, + output_type: Optional[str] = "pil", + return_dict: bool = True, + callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None, + is_cancelled_callback: Optional[Callable[[], bool]] = None, + callback_steps: int = 1, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + ): + r""" + Function for text-to-image generation. + Args: + prompt (`str` or `List[str]`): + The prompt or prompts to guide the image generation. + negative_prompt (`str` or `List[str]`, *optional*): + The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored + if `guidance_scale` is less than `1`). + height (`int`, *optional*, defaults to 512): + The height in pixels of the generated image. + width (`int`, *optional*, defaults to 512): + The width in pixels of the generated image. + num_inference_steps (`int`, *optional*, defaults to 50): + The number of denoising steps. More denoising steps usually lead to a higher quality image at the + expense of slower inference. + guidance_scale (`float`, *optional*, defaults to 7.5): + Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598). + `guidance_scale` is defined as `w` of equation 2. of [Imagen + Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale > + 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`, + usually at the expense of lower image quality. + num_images_per_prompt (`int`, *optional*, defaults to 1): + The number of images to generate per prompt. + eta (`float`, *optional*, defaults to 0.0): + Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to + [`schedulers.DDIMScheduler`], will be ignored for others. + generator (`torch.Generator` or `List[torch.Generator]`, *optional*): + One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html) + to make generation deterministic. + latents (`torch.FloatTensor`, *optional*): + Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image + generation. Can be used to tweak the same generation with different prompts. If not provided, a latents + tensor will ge generated by sampling using the supplied random `generator`. + prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not + provided, text embeddings will be generated from `prompt` input argument. + negative_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt + weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input + argument. + max_embeddings_multiples (`int`, *optional*, defaults to `3`): + The max multiple length of prompt embeddings compared to the max output length of text encoder. + output_type (`str`, *optional*, defaults to `"pil"`): + The output format of the generate image. Choose between + [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a + plain tuple. + callback (`Callable`, *optional*): + A function that will be called every `callback_steps` steps during inference. The function will be + called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`. + is_cancelled_callback (`Callable`, *optional*): + A function that will be called every `callback_steps` steps during inference. If the function returns + `True`, the inference will be cancelled. + callback_steps (`int`, *optional*, defaults to 1): + The frequency at which the `callback` function will be called. If not specified, the callback will be + called at every step. + cross_attention_kwargs (`dict`, *optional*): + A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under + `self.processor` in + [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py). + + Returns: + `None` if cancelled by `is_cancelled_callback`, + [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`: + [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple. + When returning a tuple, the first element is a list with the generated images, and the second element is a + list of `bool`s denoting whether the corresponding generated image likely represents "not-safe-for-work" + (nsfw) content, according to the `safety_checker`. + """ + return self.__call__( + prompt=prompt, + negative_prompt=negative_prompt, + height=height, + width=width, + num_inference_steps=num_inference_steps, + guidance_scale=guidance_scale, + num_images_per_prompt=num_images_per_prompt, + eta=eta, + generator=generator, + latents=latents, + prompt_embeds=prompt_embeds, + negative_prompt_embeds=negative_prompt_embeds, + max_embeddings_multiples=max_embeddings_multiples, + output_type=output_type, + return_dict=return_dict, + callback=callback, + is_cancelled_callback=is_cancelled_callback, + callback_steps=callback_steps, + cross_attention_kwargs=cross_attention_kwargs, + ) + + def img2img( + self, + image: Union[torch.FloatTensor, PIL.Image.Image], + prompt: Union[str, List[str]], + negative_prompt: Optional[Union[str, List[str]]] = None, + strength: float = 0.8, + num_inference_steps: Optional[int] = 50, + guidance_scale: Optional[float] = 7.5, + num_images_per_prompt: Optional[int] = 1, + eta: Optional[float] = 0.0, + generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None, + prompt_embeds: Optional[torch.FloatTensor] = None, + negative_prompt_embeds: Optional[torch.FloatTensor] = None, + max_embeddings_multiples: Optional[int] = 3, + output_type: Optional[str] = "pil", + return_dict: bool = True, + callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None, + is_cancelled_callback: Optional[Callable[[], bool]] = None, + callback_steps: int = 1, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + ): + r""" + Function for image-to-image generation. + Args: + image (`torch.FloatTensor` or `PIL.Image.Image`): + `Image`, or tensor representing an image batch, that will be used as the starting point for the + process. + prompt (`str` or `List[str]`): + The prompt or prompts to guide the image generation. + negative_prompt (`str` or `List[str]`, *optional*): + The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored + if `guidance_scale` is less than `1`). + strength (`float`, *optional*, defaults to 0.8): + Conceptually, indicates how much to transform the reference `image`. Must be between 0 and 1. + `image` will be used as a starting point, adding more noise to it the larger the `strength`. The + number of denoising steps depends on the amount of noise initially added. When `strength` is 1, added + noise will be maximum and the denoising process will run for the full number of iterations specified in + `num_inference_steps`. A value of 1, therefore, essentially ignores `image`. + num_inference_steps (`int`, *optional*, defaults to 50): + The number of denoising steps. More denoising steps usually lead to a higher quality image at the + expense of slower inference. This parameter will be modulated by `strength`. + guidance_scale (`float`, *optional*, defaults to 7.5): + Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598). + `guidance_scale` is defined as `w` of equation 2. of [Imagen + Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale > + 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`, + usually at the expense of lower image quality. + num_images_per_prompt (`int`, *optional*, defaults to 1): + The number of images to generate per prompt. + eta (`float`, *optional*, defaults to 0.0): + Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to + [`schedulers.DDIMScheduler`], will be ignored for others. + generator (`torch.Generator` or `List[torch.Generator]`, *optional*): + One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html) + to make generation deterministic. + prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not + provided, text embeddings will be generated from `prompt` input argument. + negative_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt + weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input + argument. + max_embeddings_multiples (`int`, *optional*, defaults to `3`): + The max multiple length of prompt embeddings compared to the max output length of text encoder. + output_type (`str`, *optional*, defaults to `"pil"`): + The output format of the generate image. Choose between + [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a + plain tuple. + callback (`Callable`, *optional*): + A function that will be called every `callback_steps` steps during inference. The function will be + called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`. + is_cancelled_callback (`Callable`, *optional*): + A function that will be called every `callback_steps` steps during inference. If the function returns + `True`, the inference will be cancelled. + callback_steps (`int`, *optional*, defaults to 1): + The frequency at which the `callback` function will be called. If not specified, the callback will be + called at every step. + cross_attention_kwargs (`dict`, *optional*): + A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under + `self.processor` in + [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py). + + Returns: + `None` if cancelled by `is_cancelled_callback`, + [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple. + When returning a tuple, the first element is a list with the generated images, and the second element is a + list of `bool`s denoting whether the corresponding generated image likely represents "not-safe-for-work" + (nsfw) content, according to the `safety_checker`. + """ + return self.__call__( + prompt=prompt, + negative_prompt=negative_prompt, + image=image, + num_inference_steps=num_inference_steps, + guidance_scale=guidance_scale, + strength=strength, + num_images_per_prompt=num_images_per_prompt, + eta=eta, + generator=generator, + prompt_embeds=prompt_embeds, + negative_prompt_embeds=negative_prompt_embeds, + max_embeddings_multiples=max_embeddings_multiples, + output_type=output_type, + return_dict=return_dict, + callback=callback, + is_cancelled_callback=is_cancelled_callback, + callback_steps=callback_steps, + cross_attention_kwargs=cross_attention_kwargs, + ) + + def inpaint( + self, + image: Union[torch.FloatTensor, PIL.Image.Image], + mask_image: Union[torch.FloatTensor, PIL.Image.Image], + prompt: Union[str, List[str]], + negative_prompt: Optional[Union[str, List[str]]] = None, + strength: float = 0.8, + num_inference_steps: Optional[int] = 50, + guidance_scale: Optional[float] = 7.5, + num_images_per_prompt: Optional[int] = 1, + add_predicted_noise: Optional[bool] = False, + eta: Optional[float] = 0.0, + generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None, + prompt_embeds: Optional[torch.FloatTensor] = None, + negative_prompt_embeds: Optional[torch.FloatTensor] = None, + max_embeddings_multiples: Optional[int] = 3, + output_type: Optional[str] = "pil", + return_dict: bool = True, + callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None, + is_cancelled_callback: Optional[Callable[[], bool]] = None, + callback_steps: int = 1, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + ): + r""" + Function for inpaint. + Args: + image (`torch.FloatTensor` or `PIL.Image.Image`): + `Image`, or tensor representing an image batch, that will be used as the starting point for the + process. This is the image whose masked region will be inpainted. + mask_image (`torch.FloatTensor` or `PIL.Image.Image`): + `Image`, or tensor representing an image batch, to mask `image`. White pixels in the mask will be + replaced by noise and therefore repainted, while black pixels will be preserved. If `mask_image` is a + PIL image, it will be converted to a single channel (luminance) before use. If it's a tensor, it should + contain one color channel (L) instead of 3, so the expected shape would be `(B, H, W, 1)`. + prompt (`str` or `List[str]`): + The prompt or prompts to guide the image generation. + negative_prompt (`str` or `List[str]`, *optional*): + The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored + if `guidance_scale` is less than `1`). + strength (`float`, *optional*, defaults to 0.8): + Conceptually, indicates how much to inpaint the masked area. Must be between 0 and 1. When `strength` + is 1, the denoising process will be run on the masked area for the full number of iterations specified + in `num_inference_steps`. `image` will be used as a reference for the masked area, adding more + noise to that region the larger the `strength`. If `strength` is 0, no inpainting will occur. + num_inference_steps (`int`, *optional*, defaults to 50): + The reference number of denoising steps. More denoising steps usually lead to a higher quality image at + the expense of slower inference. This parameter will be modulated by `strength`, as explained above. + guidance_scale (`float`, *optional*, defaults to 7.5): + Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598). + `guidance_scale` is defined as `w` of equation 2. of [Imagen + Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale > + 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`, + usually at the expense of lower image quality. + num_images_per_prompt (`int`, *optional*, defaults to 1): + The number of images to generate per prompt. + add_predicted_noise (`bool`, *optional*, defaults to True): + Use predicted noise instead of random noise when constructing noisy versions of the original image in + the reverse diffusion process + eta (`float`, *optional*, defaults to 0.0): + Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to + [`schedulers.DDIMScheduler`], will be ignored for others. + generator (`torch.Generator` or `List[torch.Generator]`, *optional*): + One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html) + to make generation deterministic. + prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not + provided, text embeddings will be generated from `prompt` input argument. + negative_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt + weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input + argument. + max_embeddings_multiples (`int`, *optional*, defaults to `3`): + The max multiple length of prompt embeddings compared to the max output length of text encoder. + output_type (`str`, *optional*, defaults to `"pil"`): + The output format of the generate image. Choose between + [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a + plain tuple. + callback (`Callable`, *optional*): + A function that will be called every `callback_steps` steps during inference. The function will be + called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`. + is_cancelled_callback (`Callable`, *optional*): + A function that will be called every `callback_steps` steps during inference. If the function returns + `True`, the inference will be cancelled. + callback_steps (`int`, *optional*, defaults to 1): + The frequency at which the `callback` function will be called. If not specified, the callback will be + called at every step. + cross_attention_kwargs (`dict`, *optional*): + A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under + `self.processor` in + [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py). + + Returns: + `None` if cancelled by `is_cancelled_callback`, + [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple. + When returning a tuple, the first element is a list with the generated images, and the second element is a + list of `bool`s denoting whether the corresponding generated image likely represents "not-safe-for-work" + (nsfw) content, according to the `safety_checker`. + """ + return self.__call__( + prompt=prompt, + negative_prompt=negative_prompt, + image=image, + mask_image=mask_image, + num_inference_steps=num_inference_steps, + guidance_scale=guidance_scale, + strength=strength, + num_images_per_prompt=num_images_per_prompt, + add_predicted_noise=add_predicted_noise, + eta=eta, + generator=generator, + prompt_embeds=prompt_embeds, + negative_prompt_embeds=negative_prompt_embeds, + max_embeddings_multiples=max_embeddings_multiples, + output_type=output_type, + return_dict=return_dict, + callback=callback, + is_cancelled_callback=is_cancelled_callback, + callback_steps=callback_steps, + cross_attention_kwargs=cross_attention_kwargs, + ) diff --git a/src/animatediff/utils/lpw_stable_diffusion_xl.py b/src/animatediff/utils/lpw_stable_diffusion_xl.py new file mode 100644 index 0000000000000000000000000000000000000000..b71a370c3d32f87e621857a1894c4a8925c89ffc --- /dev/null +++ b/src/animatediff/utils/lpw_stable_diffusion_xl.py @@ -0,0 +1,1463 @@ +# https://github.com/huggingface/diffusers/blob/main/examples/community/lpw_stable_diffusion_xl.py + +## ---------------------------------------------------------- +# A SDXL pipeline can take unlimited weighted prompt +# +# Author: Andrew Zhu +# Github: https://github.com/xhinker +# Medium: https://medium.com/@xhinker +## ----------------------------------------------------------- + +import inspect +import os +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +import torch +from diffusers import DiffusionPipeline, StableDiffusionXLPipeline +from diffusers.image_processor import VaeImageProcessor +from diffusers.loaders import (FromSingleFileMixin, LoraLoaderMixin, + TextualInversionLoaderMixin) +from diffusers.models import AutoencoderKL, UNet2DConditionModel +from diffusers.models.attention_processor import (AttnProcessor2_0, + LoRAAttnProcessor2_0, + LoRAXFormersAttnProcessor, + XFormersAttnProcessor) +from diffusers.pipelines.stable_diffusion_xl import \ + StableDiffusionXLPipelineOutput +from diffusers.schedulers import KarrasDiffusionSchedulers +from diffusers.utils import (is_accelerate_available, is_accelerate_version, + is_invisible_watermark_available, logging, + replace_example_docstring) +from diffusers.utils.torch_utils import randn_tensor +from transformers import (CLIPTextModel, CLIPTextModelWithProjection, + CLIPTokenizer) + +if is_invisible_watermark_available(): + from diffusers.pipelines.stable_diffusion_xl.watermark import \ + StableDiffusionXLWatermarker + + +def parse_prompt_attention(text): + """ + Parses a string with attention tokens and returns a list of pairs: text and its associated weight. + Accepted tokens are: + (abc) - increases attention to abc by a multiplier of 1.1 + (abc:3.12) - increases attention to abc by a multiplier of 3.12 + [abc] - decreases attention to abc by a multiplier of 1.1 + \( - literal character '(' + \[ - literal character '[' + \) - literal character ')' + \] - literal character ']' + \\ - literal character '\' + anything else - just text + + >>> parse_prompt_attention('normal text') + [['normal text', 1.0]] + >>> parse_prompt_attention('an (important) word') + [['an ', 1.0], ['important', 1.1], [' word', 1.0]] + >>> parse_prompt_attention('(unbalanced') + [['unbalanced', 1.1]] + >>> parse_prompt_attention('\(literal\]') + [['(literal]', 1.0]] + >>> parse_prompt_attention('(unnecessary)(parens)') + [['unnecessaryparens', 1.1]] + >>> parse_prompt_attention('a (((house:1.3)) [on] a (hill:0.5), sun, (((sky))).') + [['a ', 1.0], + ['house', 1.5730000000000004], + [' ', 1.1], + ['on', 1.0], + [' a ', 1.1], + ['hill', 0.55], + [', sun, ', 1.1], + ['sky', 1.4641000000000006], + ['.', 1.1]] + """ + import re + + re_attention = re.compile( + r""" + \\\(|\\\)|\\\[|\\]|\\\\|\\|\(|\[|:([+-]?[.\d]+)\)| + \)|]|[^\\()\[\]:]+|: + """, + re.X, + ) + + re_break = re.compile(r"\s*\bBREAK\b\s*", re.S) + + res = [] + round_brackets = [] + square_brackets = [] + + round_bracket_multiplier = 1.1 + square_bracket_multiplier = 1 / 1.1 + + def multiply_range(start_position, multiplier): + for p in range(start_position, len(res)): + res[p][1] *= multiplier + + for m in re_attention.finditer(text): + text = m.group(0) + weight = m.group(1) + + if text.startswith("\\"): + res.append([text[1:], 1.0]) + elif text == "(": + round_brackets.append(len(res)) + elif text == "[": + square_brackets.append(len(res)) + elif weight is not None and len(round_brackets) > 0: + multiply_range(round_brackets.pop(), float(weight)) + elif text == ")" and len(round_brackets) > 0: + multiply_range(round_brackets.pop(), round_bracket_multiplier) + elif text == "]" and len(square_brackets) > 0: + multiply_range(square_brackets.pop(), square_bracket_multiplier) + else: + parts = re.split(re_break, text) + for i, part in enumerate(parts): + if i > 0: + res.append(["BREAK", -1]) + res.append([part, 1.0]) + + for pos in round_brackets: + multiply_range(pos, round_bracket_multiplier) + + for pos in square_brackets: + multiply_range(pos, square_bracket_multiplier) + + if len(res) == 0: + res = [["", 1.0]] + + # merge runs of identical weights + i = 0 + while i + 1 < len(res): + if res[i][1] == res[i + 1][1]: + res[i][0] += res[i + 1][0] + res.pop(i + 1) + else: + i += 1 + + return res + + +def get_prompts_tokens_with_weights(clip_tokenizer: CLIPTokenizer, prompt: str): + """ + Get prompt token ids and weights, this function works for both prompt and negative prompt + + Args: + pipe (CLIPTokenizer) + A CLIPTokenizer + prompt (str) + A prompt string with weights + + Returns: + text_tokens (list) + A list contains token ids + text_weight (list) + A list contains the correspodent weight of token ids + + Example: + import torch + from transformers import CLIPTokenizer + + clip_tokenizer = CLIPTokenizer.from_pretrained( + "stablediffusionapi/deliberate-v2" + , subfolder = "tokenizer" + , dtype = torch.float16 + ) + + token_id_list, token_weight_list = get_prompts_tokens_with_weights( + clip_tokenizer = clip_tokenizer + ,prompt = "a (red:1.5) cat"*70 + ) + """ + texts_and_weights = parse_prompt_attention(prompt) + text_tokens, text_weights = [], [] + for word, weight in texts_and_weights: + # tokenize and discard the starting and the ending token + token = clip_tokenizer(word, truncation=False).input_ids[1:-1] # so that tokenize whatever length prompt + # the returned token is a 1d list: [320, 1125, 539, 320] + + # merge the new tokens to the all tokens holder: text_tokens + text_tokens = [*text_tokens, *token] + + # each token chunk will come with one weight, like ['red cat', 2.0] + # need to expand weight for each token. + chunk_weights = [weight] * len(token) + + # append the weight back to the weight holder: text_weights + text_weights = [*text_weights, *chunk_weights] + return text_tokens, text_weights + + +def group_tokens_and_weights(token_ids: list, weights: list, pad_last_block=False): + """ + Produce tokens and weights in groups and pad the missing tokens + + Args: + token_ids (list) + The token ids from tokenizer + weights (list) + The weights list from function get_prompts_tokens_with_weights + pad_last_block (bool) + Control if fill the last token list to 75 tokens with eos + Returns: + new_token_ids (2d list) + new_weights (2d list) + + Example: + token_groups,weight_groups = group_tokens_and_weights( + token_ids = token_id_list + , weights = token_weight_list + ) + """ + bos, eos = 49406, 49407 + + # this will be a 2d list + new_token_ids = [] + new_weights = [] + while len(token_ids) >= 75: + # get the first 75 tokens + head_75_tokens = [token_ids.pop(0) for _ in range(75)] + head_75_weights = [weights.pop(0) for _ in range(75)] + + # extract token ids and weights + temp_77_token_ids = [bos] + head_75_tokens + [eos] + temp_77_weights = [1.0] + head_75_weights + [1.0] + + # add 77 token and weights chunk to the holder list + new_token_ids.append(temp_77_token_ids) + new_weights.append(temp_77_weights) + + # padding the left + if len(token_ids) > 0: + padding_len = 75 - len(token_ids) if pad_last_block else 0 + + temp_77_token_ids = [bos] + token_ids + [eos] * padding_len + [eos] + new_token_ids.append(temp_77_token_ids) + + temp_77_weights = [1.0] + weights + [1.0] * padding_len + [1.0] + new_weights.append(temp_77_weights) + + return new_token_ids, new_weights + + +def get_weighted_text_embeddings_sdxl( + pipe: StableDiffusionXLPipeline, + prompt: str = "", + prompt_2: str = None, + neg_prompt: str = "", + neg_prompt_2: str = None, +): + """ + This function can process long prompt with weights, no length limitation + for Stable Diffusion XL + + Args: + pipe (StableDiffusionPipeline) + prompt (str) + prompt_2 (str) + neg_prompt (str) + neg_prompt_2 (str) + Returns: + prompt_embeds (torch.Tensor) + neg_prompt_embeds (torch.Tensor) + """ + if prompt_2: + prompt = f"{prompt} {prompt_2}" + + if neg_prompt_2: + neg_prompt = f"{neg_prompt} {neg_prompt_2}" + + eos = pipe.tokenizer.eos_token_id + + # tokenizer 1 + prompt_tokens, prompt_weights = get_prompts_tokens_with_weights(pipe.tokenizer, prompt) + + neg_prompt_tokens, neg_prompt_weights = get_prompts_tokens_with_weights(pipe.tokenizer, neg_prompt) + + # tokenizer 2 + prompt_tokens_2, prompt_weights_2 = get_prompts_tokens_with_weights(pipe.tokenizer_2, prompt) + + neg_prompt_tokens_2, neg_prompt_weights_2 = get_prompts_tokens_with_weights(pipe.tokenizer_2, neg_prompt) + + # padding the shorter one for prompt set 1 + prompt_token_len = len(prompt_tokens) + neg_prompt_token_len = len(neg_prompt_tokens) + + if prompt_token_len > neg_prompt_token_len: + # padding the neg_prompt with eos token + neg_prompt_tokens = neg_prompt_tokens + [eos] * abs(prompt_token_len - neg_prompt_token_len) + neg_prompt_weights = neg_prompt_weights + [1.0] * abs(prompt_token_len - neg_prompt_token_len) + else: + # padding the prompt + prompt_tokens = prompt_tokens + [eos] * abs(prompt_token_len - neg_prompt_token_len) + prompt_weights = prompt_weights + [1.0] * abs(prompt_token_len - neg_prompt_token_len) + + # padding the shorter one for token set 2 + prompt_token_len_2 = len(prompt_tokens_2) + neg_prompt_token_len_2 = len(neg_prompt_tokens_2) + + if prompt_token_len_2 > neg_prompt_token_len_2: + # padding the neg_prompt with eos token + neg_prompt_tokens_2 = neg_prompt_tokens_2 + [eos] * abs(prompt_token_len_2 - neg_prompt_token_len_2) + neg_prompt_weights_2 = neg_prompt_weights_2 + [1.0] * abs(prompt_token_len_2 - neg_prompt_token_len_2) + else: + # padding the prompt + prompt_tokens_2 = prompt_tokens_2 + [eos] * abs(prompt_token_len_2 - neg_prompt_token_len_2) + prompt_weights_2 = prompt_weights + [1.0] * abs(prompt_token_len_2 - neg_prompt_token_len_2) + + embeds = [] + neg_embeds = [] + + prompt_token_groups, prompt_weight_groups = group_tokens_and_weights(prompt_tokens.copy(), prompt_weights.copy()) + + neg_prompt_token_groups, neg_prompt_weight_groups = group_tokens_and_weights( + neg_prompt_tokens.copy(), neg_prompt_weights.copy() + ) + + prompt_token_groups_2, prompt_weight_groups_2 = group_tokens_and_weights( + prompt_tokens_2.copy(), prompt_weights_2.copy() + ) + + neg_prompt_token_groups_2, neg_prompt_weight_groups_2 = group_tokens_and_weights( + neg_prompt_tokens_2.copy(), neg_prompt_weights_2.copy() + ) + + # get prompt embeddings one by one is not working. + for i in range(len(prompt_token_groups)): + # get positive prompt embeddings with weights + token_tensor = torch.tensor([prompt_token_groups[i]], dtype=torch.long, device=pipe.device) + weight_tensor = torch.tensor(prompt_weight_groups[i], dtype=torch.float16, device=pipe.device) + + token_tensor_2 = torch.tensor([prompt_token_groups_2[i]], dtype=torch.long, device=pipe.device) + + # use first text encoder + prompt_embeds_1 = pipe.text_encoder(token_tensor.to(pipe.device), output_hidden_states=True) + prompt_embeds_1_hidden_states = prompt_embeds_1.hidden_states[-2] + + # use second text encoder + prompt_embeds_2 = pipe.text_encoder_2(token_tensor_2.to(pipe.device), output_hidden_states=True) + prompt_embeds_2_hidden_states = prompt_embeds_2.hidden_states[-2] + pooled_prompt_embeds = prompt_embeds_2[0] + + prompt_embeds_list = [prompt_embeds_1_hidden_states, prompt_embeds_2_hidden_states] + token_embedding = torch.concat(prompt_embeds_list, dim=-1).squeeze(0) + + for j in range(len(weight_tensor)): + if weight_tensor[j] != 1.0: + token_embedding[j] = ( + token_embedding[-1] + (token_embedding[j] - token_embedding[-1]) * weight_tensor[j] + ) + + token_embedding = token_embedding.unsqueeze(0) + embeds.append(token_embedding) + + # get negative prompt embeddings with weights + neg_token_tensor = torch.tensor([neg_prompt_token_groups[i]], dtype=torch.long, device=pipe.device) + neg_token_tensor_2 = torch.tensor([neg_prompt_token_groups_2[i]], dtype=torch.long, device=pipe.device) + neg_weight_tensor = torch.tensor(neg_prompt_weight_groups[i], dtype=torch.float16, device=pipe.device) + + # use first text encoder + neg_prompt_embeds_1 = pipe.text_encoder(neg_token_tensor.to(pipe.device), output_hidden_states=True) + neg_prompt_embeds_1_hidden_states = neg_prompt_embeds_1.hidden_states[-2] + + # use second text encoder + neg_prompt_embeds_2 = pipe.text_encoder_2(neg_token_tensor_2.to(pipe.device), output_hidden_states=True) + neg_prompt_embeds_2_hidden_states = neg_prompt_embeds_2.hidden_states[-2] + negative_pooled_prompt_embeds = neg_prompt_embeds_2[0] + + neg_prompt_embeds_list = [neg_prompt_embeds_1_hidden_states, neg_prompt_embeds_2_hidden_states] + neg_token_embedding = torch.concat(neg_prompt_embeds_list, dim=-1).squeeze(0) + + for z in range(len(neg_weight_tensor)): + if neg_weight_tensor[z] != 1.0: + neg_token_embedding[z] = ( + neg_token_embedding[-1] + (neg_token_embedding[z] - neg_token_embedding[-1]) * neg_weight_tensor[z] + ) + + neg_token_embedding = neg_token_embedding.unsqueeze(0) + neg_embeds.append(neg_token_embedding) + + prompt_embeds = torch.cat(embeds, dim=1) + negative_prompt_embeds = torch.cat(neg_embeds, dim=1) + + return prompt_embeds, negative_prompt_embeds, pooled_prompt_embeds, negative_pooled_prompt_embeds + + + +def get_weighted_text_embeddings_sdxl2( + pipe: StableDiffusionXLPipeline, + prompt_list: List[str] = [], + neg_prompt_list: List[str] = [], + device: str= "", +): + """ + This function can process long prompt with weights, no length limitation + for Stable Diffusion XL + + Args: + pipe (StableDiffusionPipeline) + prompt (str) + prompt_2 (str) + neg_prompt (str) + neg_prompt_2 (str) + Returns: + prompt_embeds (torch.Tensor) + neg_prompt_embeds (torch.Tensor) + """ + + if len(neg_prompt_list) == 1: + neg_prompt_list = neg_prompt_list * len(prompt_list) + + eos = pipe.tokenizer.eos_token_id + + prompt_tokens_list=[] + prompt_weights_list=[] + neg_prompt_tokens_list=[] + neg_prompt_weights_list=[] + + for prompt, neg_prompt in zip(prompt_list, neg_prompt_list): + # tokenizer 1 + prompt_tokens, prompt_weights = get_prompts_tokens_with_weights(pipe.tokenizer, prompt) + prompt_tokens_list.append(prompt_tokens) + prompt_weights_list.append(prompt_weights) + + neg_prompt_tokens, neg_prompt_weights = get_prompts_tokens_with_weights(pipe.tokenizer, neg_prompt) + neg_prompt_tokens_list.append(neg_prompt_tokens) + neg_prompt_weights_list.append(neg_prompt_weights) + + prompt_tokens_2_list=[] + prompt_weights_2_list=[] + neg_prompt_tokens_2_list=[] + neg_prompt_weights_2_list=[] + + for prompt, neg_prompt in zip(prompt_list, neg_prompt_list): + # tokenizer 2 + prompt_tokens_2, prompt_weights_2 = get_prompts_tokens_with_weights(pipe.tokenizer_2, prompt) + prompt_tokens_2_list.append(prompt_tokens_2) + prompt_weights_2_list.append(prompt_weights_2) + + neg_prompt_tokens_2, neg_prompt_weights_2 = get_prompts_tokens_with_weights(pipe.tokenizer_2, neg_prompt) + neg_prompt_tokens_2_list.append(neg_prompt_tokens_2) + neg_prompt_weights_2_list.append(neg_prompt_weights_2) + + def padding(token_list, weight_list, neg_token_list, neg_weight_list): + max_length = len(max(token_list + neg_token_list, key=len)) + token_list = [ cur + [eos] * (max_length-len(cur)) for cur in token_list ] + weight_list = [ cur + [1.0] * (max_length-len(cur)) for cur in weight_list ] + neg_token_list = [ cur + [eos] * (max_length-len(cur)) for cur in neg_token_list ] + neg_weight_list = [ cur + [1.0] * (max_length-len(cur)) for cur in neg_weight_list ] + return token_list, weight_list, neg_token_list, neg_weight_list + + prompt_tokens_list, prompt_weights_list, neg_prompt_tokens_list, neg_prompt_weights_list = padding( + prompt_tokens_list, prompt_weights_list, neg_prompt_tokens_list, neg_prompt_weights_list + ) + + prompt_tokens_2_list, prompt_weights_2_list, neg_prompt_tokens_2_list, neg_prompt_weights_2_list = padding( + prompt_tokens_2_list, prompt_weights_2_list, neg_prompt_tokens_2_list, neg_prompt_weights_2_list + ) + + def get_embeddings( + prompt_tokens, prompt_weights, neg_prompt_tokens, neg_prompt_weights, + prompt_tokens_2, prompt_weights_2, neg_prompt_tokens_2, neg_prompt_weights_2): + embeds = [] + neg_embeds = [] + + prompt_token_groups, prompt_weight_groups = group_tokens_and_weights( + prompt_tokens.copy(), prompt_weights.copy(), True + ) + + neg_prompt_token_groups, neg_prompt_weight_groups = group_tokens_and_weights( + neg_prompt_tokens.copy(), neg_prompt_weights.copy(), True + ) + + prompt_token_groups_2, prompt_weight_groups_2 = group_tokens_and_weights( + prompt_tokens_2.copy(), prompt_weights_2.copy(), True + ) + + neg_prompt_token_groups_2, neg_prompt_weight_groups_2 = group_tokens_and_weights( + neg_prompt_tokens_2.copy(), neg_prompt_weights_2.copy(), True + ) + + # get prompt embeddings one by one is not working. + for i in range(len(prompt_token_groups)): + # get positive prompt embeddings with weights + token_tensor = torch.tensor([prompt_token_groups[i]], dtype=torch.long, device=pipe.device) + weight_tensor = torch.tensor(prompt_weight_groups[i], dtype=torch.float16, device=pipe.device) + + token_tensor_2 = torch.tensor([prompt_token_groups_2[i]], dtype=torch.long, device=pipe.device) + + # use first text encoder + prompt_embeds_1 = pipe.text_encoder(token_tensor.to(pipe.device), output_hidden_states=True) + prompt_embeds_1_hidden_states = prompt_embeds_1.hidden_states[-2] + + # use second text encoder + prompt_embeds_2 = pipe.text_encoder_2(token_tensor_2.to(pipe.device), output_hidden_states=True) + prompt_embeds_2_hidden_states = prompt_embeds_2.hidden_states[-2] + pooled_prompt_embeds = prompt_embeds_2[0] + + prompt_embeds_list = [prompt_embeds_1_hidden_states, prompt_embeds_2_hidden_states] + token_embedding = torch.concat(prompt_embeds_list, dim=-1).squeeze(0) + + for j in range(len(weight_tensor)): + if weight_tensor[j] != 1.0: + token_embedding[j] = ( + token_embedding[-1] + (token_embedding[j] - token_embedding[-1]) * weight_tensor[j] + ) + + token_embedding = token_embedding.unsqueeze(0) + embeds.append(token_embedding) + + # get negative prompt embeddings with weights + neg_token_tensor = torch.tensor([neg_prompt_token_groups[i]], dtype=torch.long, device=pipe.device) + neg_token_tensor_2 = torch.tensor([neg_prompt_token_groups_2[i]], dtype=torch.long, device=pipe.device) + neg_weight_tensor = torch.tensor(neg_prompt_weight_groups[i], dtype=torch.float16, device=pipe.device) + + # use first text encoder + neg_prompt_embeds_1 = pipe.text_encoder(neg_token_tensor.to(pipe.device), output_hidden_states=True) + neg_prompt_embeds_1_hidden_states = neg_prompt_embeds_1.hidden_states[-2] + + # use second text encoder + neg_prompt_embeds_2 = pipe.text_encoder_2(neg_token_tensor_2.to(pipe.device), output_hidden_states=True) + neg_prompt_embeds_2_hidden_states = neg_prompt_embeds_2.hidden_states[-2] + negative_pooled_prompt_embeds = neg_prompt_embeds_2[0] + + neg_prompt_embeds_list = [neg_prompt_embeds_1_hidden_states, neg_prompt_embeds_2_hidden_states] + neg_token_embedding = torch.concat(neg_prompt_embeds_list, dim=-1).squeeze(0) + + for z in range(len(neg_weight_tensor)): + if neg_weight_tensor[z] != 1.0: + neg_token_embedding[z] = ( + neg_token_embedding[-1] + (neg_token_embedding[z] - neg_token_embedding[-1]) * neg_weight_tensor[z] + ) + + neg_token_embedding = neg_token_embedding.unsqueeze(0) + neg_embeds.append(neg_token_embedding) + + prompt_embeds = torch.cat(embeds, dim=1) + negative_prompt_embeds = torch.cat(neg_embeds, dim=1) + + return prompt_embeds.to(device), negative_prompt_embeds.to(device), pooled_prompt_embeds.to(device), negative_pooled_prompt_embeds.to(device) + + p_list,n_list,pp_list,np_list = ([],[],[],[]) + + for i in range(len(prompt_tokens_list)): + p,n,pp,np = get_embeddings( + prompt_tokens_list[i], + prompt_weights_list[i], + neg_prompt_tokens_list[i], + neg_prompt_weights_list[i], + prompt_tokens_2_list[i], + prompt_weights_2_list[i], + neg_prompt_tokens_2_list[i], + neg_prompt_weights_2_list[i], + ) + p_list.append(p) + n_list.append(n) + pp_list.append(pp) + np_list.append(np) + + return p_list,n_list,pp_list,np_list + + + +# ------------------------------------------------------------------------------------------------------------------------------- +# reuse the backbone code from StableDiffusionXLPipeline +# ------------------------------------------------------------------------------------------------------------------------------- + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + +EXAMPLE_DOC_STRING = """ + Examples: + ```py + from diffusers import DiffusionPipeline + import torch + + pipe = DiffusionPipeline.from_pretrained( + "stabilityai/stable-diffusion-xl-base-1.0" + , torch_dtype = torch.float16 + , use_safetensors = True + , variant = "fp16" + , custom_pipeline = "lpw_stable_diffusion_xl", + ) + + prompt = "a white cat running on the grass"*20 + prompt2 = "play a football"*20 + prompt = f"{prompt},{prompt2}" + neg_prompt = "blur, low quality" + + pipe.to("cuda") + images = pipe( + prompt = prompt + , negative_prompt = neg_prompt + ).images[0] + + pipe.to("cpu") + torch.cuda.empty_cache() + images + ``` +""" + + +# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.rescale_noise_cfg +def rescale_noise_cfg(noise_cfg, noise_pred_text, guidance_rescale=0.0): + """ + Rescale `noise_cfg` according to `guidance_rescale`. Based on findings of [Common Diffusion Noise Schedules and + Sample Steps are Flawed](https://arxiv.org/pdf/2305.08891.pdf). See Section 3.4 + """ + std_text = noise_pred_text.std(dim=list(range(1, noise_pred_text.ndim)), keepdim=True) + std_cfg = noise_cfg.std(dim=list(range(1, noise_cfg.ndim)), keepdim=True) + # rescale the results from guidance (fixes overexposure) + noise_pred_rescaled = noise_cfg * (std_text / std_cfg) + # mix with the original results from guidance by factor guidance_rescale to avoid "plain looking" images + noise_cfg = guidance_rescale * noise_pred_rescaled + (1 - guidance_rescale) * noise_cfg + return noise_cfg + + +class SDXLLongPromptWeightingPipeline(DiffusionPipeline, FromSingleFileMixin, LoraLoaderMixin): + r""" + Pipeline for text-to-image generation using Stable Diffusion XL. + + This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the + library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.) + + In addition the pipeline inherits the following loading methods: + - *LoRA*: [`StableDiffusionXLPipeline.load_lora_weights`] + - *Ckpt*: [`loaders.FromSingleFileMixin.from_single_file`] + + as well as the following saving methods: + - *LoRA*: [`loaders.StableDiffusionXLPipeline.save_lora_weights`] + + Args: + vae ([`AutoencoderKL`]): + Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations. + text_encoder ([`CLIPTextModel`]): + Frozen text-encoder. Stable Diffusion XL uses the text portion of + [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically + the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant. + text_encoder_2 ([` CLIPTextModelWithProjection`]): + Second frozen text-encoder. Stable Diffusion XL uses the text and pool portion of + [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModelWithProjection), + specifically the + [laion/CLIP-ViT-bigG-14-laion2B-39B-b160k](https://huggingface.co/laion/CLIP-ViT-bigG-14-laion2B-39B-b160k) + variant. + tokenizer (`CLIPTokenizer`): + Tokenizer of class + [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer). + tokenizer_2 (`CLIPTokenizer`): + Second Tokenizer of class + [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer). + unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents. + scheduler ([`SchedulerMixin`]): + A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of + [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`]. + """ + + def __init__( + self, + vae: AutoencoderKL, + text_encoder: CLIPTextModel, + text_encoder_2: CLIPTextModelWithProjection, + tokenizer: CLIPTokenizer, + tokenizer_2: CLIPTokenizer, + unet: UNet2DConditionModel, + scheduler: KarrasDiffusionSchedulers, + force_zeros_for_empty_prompt: bool = True, + add_watermarker: Optional[bool] = None, + ): + super().__init__() + + self.register_modules( + vae=vae, + text_encoder=text_encoder, + text_encoder_2=text_encoder_2, + tokenizer=tokenizer, + tokenizer_2=tokenizer_2, + unet=unet, + scheduler=scheduler, + ) + self.register_to_config(force_zeros_for_empty_prompt=force_zeros_for_empty_prompt) + self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1) + self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor) + self.default_sample_size = self.unet.config.sample_size + + add_watermarker = add_watermarker if add_watermarker is not None else is_invisible_watermark_available() + + if add_watermarker: + self.watermark = StableDiffusionXLWatermarker() + else: + self.watermark = None + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_slicing + def enable_vae_slicing(self): + r""" + Enable sliced VAE decoding. When this option is enabled, the VAE will split the input tensor in slices to + compute decoding in several steps. This is useful to save some memory and allow larger batch sizes. + """ + self.vae.enable_slicing() + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_slicing + def disable_vae_slicing(self): + r""" + Disable sliced VAE decoding. If `enable_vae_slicing` was previously enabled, this method will go back to + computing decoding in one step. + """ + self.vae.disable_slicing() + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_tiling + def enable_vae_tiling(self): + r""" + Enable tiled VAE decoding. When this option is enabled, the VAE will split the input tensor into tiles to + compute decoding and encoding in several steps. This is useful for saving a large amount of memory and to allow + processing larger images. + """ + self.vae.enable_tiling() + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_tiling + def disable_vae_tiling(self): + r""" + Disable tiled VAE decoding. If `enable_vae_tiling` was previously enabled, this method will go back to + computing decoding in one step. + """ + self.vae.disable_tiling() + + def enable_model_cpu_offload(self, gpu_id=0): + r""" + Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared + to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward` + method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with + `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`. + """ + if is_accelerate_available() and is_accelerate_version(">=", "0.17.0.dev0"): + from accelerate import cpu_offload_with_hook + else: + raise ImportError("`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.") + + device = torch.device(f"cuda:{gpu_id}") + + if self.device.type != "cpu": + self.to("cpu", silence_dtype_warnings=True) + torch.cuda.empty_cache() # otherwise we don't see the memory savings (but they probably exist) + + model_sequence = ( + [self.text_encoder, self.text_encoder_2] if self.text_encoder is not None else [self.text_encoder_2] + ) + model_sequence.extend([self.unet, self.vae]) + + hook = None + for cpu_offloaded_model in model_sequence: + _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook) + + # We'll offload the last model manually. + self.final_offload_hook = hook + + def encode_prompt( + self, + prompt: str, + prompt_2: Optional[str] = None, + device: Optional[torch.device] = None, + num_images_per_prompt: int = 1, + do_classifier_free_guidance: bool = True, + negative_prompt: Optional[str] = None, + negative_prompt_2: Optional[str] = None, + prompt_embeds: Optional[torch.FloatTensor] = None, + negative_prompt_embeds: Optional[torch.FloatTensor] = None, + pooled_prompt_embeds: Optional[torch.FloatTensor] = None, + negative_pooled_prompt_embeds: Optional[torch.FloatTensor] = None, + lora_scale: Optional[float] = None, + ): + r""" + Encodes the prompt into text encoder hidden states. + + Args: + prompt (`str` or `List[str]`, *optional*): + prompt to be encoded + prompt_2 (`str` or `List[str]`, *optional*): + The prompt or prompts to be sent to the `tokenizer_2` and `text_encoder_2`. If not defined, `prompt` is + used in both text-encoders + device: (`torch.device`): + torch device + num_images_per_prompt (`int`): + number of images that should be generated per prompt + do_classifier_free_guidance (`bool`): + whether to use classifier free guidance or not + negative_prompt (`str` or `List[str]`, *optional*): + The prompt or prompts not to guide the image generation. If not defined, one has to pass + `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is + less than `1`). + negative_prompt_2 (`str` or `List[str]`, *optional*): + The prompt or prompts not to guide the image generation to be sent to `tokenizer_2` and + `text_encoder_2`. If not defined, `negative_prompt` is used in both text-encoders + prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not + provided, text embeddings will be generated from `prompt` input argument. + negative_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt + weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input + argument. + pooled_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated pooled text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. + If not provided, pooled text embeddings will be generated from `prompt` input argument. + negative_pooled_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated negative pooled text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt + weighting. If not provided, pooled negative_prompt_embeds will be generated from `negative_prompt` + input argument. + lora_scale (`float`, *optional*): + A lora scale that will be applied to all LoRA layers of the text encoder if LoRA layers are loaded. + """ + device = device or self._execution_device + + # set lora scale so that monkey patched LoRA + # function of text encoder can correctly access it + if lora_scale is not None and isinstance(self, LoraLoaderMixin): + self._lora_scale = lora_scale + + if prompt is not None and isinstance(prompt, str): + batch_size = 1 + elif prompt is not None and isinstance(prompt, list): + batch_size = len(prompt) + else: + batch_size = prompt_embeds.shape[0] + + # Define tokenizers and text encoders + tokenizers = [self.tokenizer, self.tokenizer_2] if self.tokenizer is not None else [self.tokenizer_2] + text_encoders = ( + [self.text_encoder, self.text_encoder_2] if self.text_encoder is not None else [self.text_encoder_2] + ) + + if prompt_embeds is None: + prompt_2 = prompt_2 or prompt + # textual inversion: procecss multi-vector tokens if necessary + prompt_embeds_list = [] + prompts = [prompt, prompt_2] + for prompt, tokenizer, text_encoder in zip(prompts, tokenizers, text_encoders): + if isinstance(self, TextualInversionLoaderMixin): + prompt = self.maybe_convert_prompt(prompt, tokenizer) + + text_inputs = tokenizer( + prompt, + padding="max_length", + max_length=tokenizer.model_max_length, + truncation=True, + return_tensors="pt", + ) + + text_input_ids = text_inputs.input_ids + untruncated_ids = tokenizer(prompt, padding="longest", return_tensors="pt").input_ids + + if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal( + text_input_ids, untruncated_ids + ): + removed_text = tokenizer.batch_decode(untruncated_ids[:, tokenizer.model_max_length - 1 : -1]) + logger.warning( + "The following part of your input was truncated because CLIP can only handle sequences up to" + f" {tokenizer.model_max_length} tokens: {removed_text}" + ) + + prompt_embeds = text_encoder( + text_input_ids.to(device), + output_hidden_states=True, + ) + + # We are only ALWAYS interested in the pooled output of the final text encoder + pooled_prompt_embeds = prompt_embeds[0] + prompt_embeds = prompt_embeds.hidden_states[-2] + + prompt_embeds_list.append(prompt_embeds) + + prompt_embeds = torch.concat(prompt_embeds_list, dim=-1) + + # get unconditional embeddings for classifier free guidance + zero_out_negative_prompt = negative_prompt is None and self.config.force_zeros_for_empty_prompt + if do_classifier_free_guidance and negative_prompt_embeds is None and zero_out_negative_prompt: + negative_prompt_embeds = torch.zeros_like(prompt_embeds) + negative_pooled_prompt_embeds = torch.zeros_like(pooled_prompt_embeds) + elif do_classifier_free_guidance and negative_prompt_embeds is None: + negative_prompt = negative_prompt or "" + negative_prompt_2 = negative_prompt_2 or negative_prompt + + uncond_tokens: List[str] + if prompt is not None and type(prompt) is not type(negative_prompt): + raise TypeError( + f"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=" + f" {type(prompt)}." + ) + elif isinstance(negative_prompt, str): + uncond_tokens = [negative_prompt, negative_prompt_2] + elif batch_size != len(negative_prompt): + raise ValueError( + f"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:" + f" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches" + " the batch size of `prompt`." + ) + else: + uncond_tokens = [negative_prompt, negative_prompt_2] + + negative_prompt_embeds_list = [] + for negative_prompt, tokenizer, text_encoder in zip(uncond_tokens, tokenizers, text_encoders): + if isinstance(self, TextualInversionLoaderMixin): + negative_prompt = self.maybe_convert_prompt(negative_prompt, tokenizer) + + max_length = prompt_embeds.shape[1] + uncond_input = tokenizer( + negative_prompt, + padding="max_length", + max_length=max_length, + truncation=True, + return_tensors="pt", + ) + + negative_prompt_embeds = text_encoder( + uncond_input.input_ids.to(device), + output_hidden_states=True, + ) + # We are only ALWAYS interested in the pooled output of the final text encoder + negative_pooled_prompt_embeds = negative_prompt_embeds[0] + negative_prompt_embeds = negative_prompt_embeds.hidden_states[-2] + + negative_prompt_embeds_list.append(negative_prompt_embeds) + + negative_prompt_embeds = torch.concat(negative_prompt_embeds_list, dim=-1) + + prompt_embeds = prompt_embeds.to(dtype=self.text_encoder_2.dtype, device=device) + bs_embed, seq_len, _ = prompt_embeds.shape + # duplicate text embeddings for each generation per prompt, using mps friendly method + prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1) + prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1) + + if do_classifier_free_guidance: + # duplicate unconditional embeddings for each generation per prompt, using mps friendly method + seq_len = negative_prompt_embeds.shape[1] + negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder_2.dtype, device=device) + negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1) + negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1) + + pooled_prompt_embeds = pooled_prompt_embeds.repeat(1, num_images_per_prompt).view( + bs_embed * num_images_per_prompt, -1 + ) + if do_classifier_free_guidance: + negative_pooled_prompt_embeds = negative_pooled_prompt_embeds.repeat(1, num_images_per_prompt).view( + bs_embed * num_images_per_prompt, -1 + ) + + return prompt_embeds, negative_prompt_embeds, pooled_prompt_embeds, negative_pooled_prompt_embeds + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs + def prepare_extra_step_kwargs(self, generator, eta): + # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature + # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers. + # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502 + # and should be between [0, 1] + + accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys()) + extra_step_kwargs = {} + if accepts_eta: + extra_step_kwargs["eta"] = eta + + # check if the scheduler accepts generator + accepts_generator = "generator" in set(inspect.signature(self.scheduler.step).parameters.keys()) + if accepts_generator: + extra_step_kwargs["generator"] = generator + return extra_step_kwargs + + def check_inputs( + self, + prompt, + prompt_2, + height, + width, + callback_steps, + negative_prompt=None, + negative_prompt_2=None, + prompt_embeds=None, + negative_prompt_embeds=None, + pooled_prompt_embeds=None, + negative_pooled_prompt_embeds=None, + ): + if height % 8 != 0 or width % 8 != 0: + raise ValueError(f"`height` and `width` have to be divisible by 8 but are {height} and {width}.") + + if (callback_steps is None) or ( + callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0) + ): + raise ValueError( + f"`callback_steps` has to be a positive integer but is {callback_steps} of type" + f" {type(callback_steps)}." + ) + + if prompt is not None and prompt_embeds is not None: + raise ValueError( + f"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to" + " only forward one of the two." + ) + elif prompt_2 is not None and prompt_embeds is not None: + raise ValueError( + f"Cannot forward both `prompt_2`: {prompt_2} and `prompt_embeds`: {prompt_embeds}. Please make sure to" + " only forward one of the two." + ) + elif prompt is None and prompt_embeds is None: + raise ValueError( + "Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined." + ) + elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)): + raise ValueError(f"`prompt` has to be of type `str` or `list` but is {type(prompt)}") + elif prompt_2 is not None and (not isinstance(prompt_2, str) and not isinstance(prompt_2, list)): + raise ValueError(f"`prompt_2` has to be of type `str` or `list` but is {type(prompt_2)}") + + if negative_prompt is not None and negative_prompt_embeds is not None: + raise ValueError( + f"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:" + f" {negative_prompt_embeds}. Please make sure to only forward one of the two." + ) + elif negative_prompt_2 is not None and negative_prompt_embeds is not None: + raise ValueError( + f"Cannot forward both `negative_prompt_2`: {negative_prompt_2} and `negative_prompt_embeds`:" + f" {negative_prompt_embeds}. Please make sure to only forward one of the two." + ) + + if prompt_embeds is not None and negative_prompt_embeds is not None: + if prompt_embeds.shape != negative_prompt_embeds.shape: + raise ValueError( + "`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but" + f" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`" + f" {negative_prompt_embeds.shape}." + ) + + if prompt_embeds is not None and pooled_prompt_embeds is None: + raise ValueError( + "If `prompt_embeds` are provided, `pooled_prompt_embeds` also have to be passed. Make sure to generate `pooled_prompt_embeds` from the same text encoder that was used to generate `prompt_embeds`." + ) + + if negative_prompt_embeds is not None and negative_pooled_prompt_embeds is None: + raise ValueError( + "If `negative_prompt_embeds` are provided, `negative_pooled_prompt_embeds` also have to be passed. Make sure to generate `negative_pooled_prompt_embeds` from the same text encoder that was used to generate `negative_prompt_embeds`." + ) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents + def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None): + shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor) + if isinstance(generator, list) and len(generator) != batch_size: + raise ValueError( + f"You have passed a list of generators of length {len(generator)}, but requested an effective batch" + f" size of {batch_size}. Make sure the batch size matches the length of the generators." + ) + + if latents is None: + latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype) + else: + latents = latents.to(device) + + # scale the initial noise by the standard deviation required by the scheduler + latents = latents * self.scheduler.init_noise_sigma + return latents + + def _get_add_time_ids(self, original_size, crops_coords_top_left, target_size, dtype): + add_time_ids = list(original_size + crops_coords_top_left + target_size) + + passed_add_embed_dim = ( + self.unet.config.addition_time_embed_dim * len(add_time_ids) + self.text_encoder_2.config.projection_dim + ) + expected_add_embed_dim = self.unet.add_embedding.linear_1.in_features + + if expected_add_embed_dim != passed_add_embed_dim: + raise ValueError( + f"Model expects an added time embedding vector of length {expected_add_embed_dim}, but a vector of {passed_add_embed_dim} was created. The model has an incorrect config. Please check `unet.config.time_embedding_type` and `text_encoder_2.config.projection_dim`." + ) + + add_time_ids = torch.tensor([add_time_ids], dtype=dtype) + return add_time_ids + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_upscale.StableDiffusionUpscalePipeline.upcast_vae + def upcast_vae(self): + dtype = self.vae.dtype + self.vae.to(dtype=torch.float32) + use_torch_2_0_or_xformers = isinstance( + self.vae.decoder.mid_block.attentions[0].processor, + ( + AttnProcessor2_0, + XFormersAttnProcessor, + LoRAXFormersAttnProcessor, + LoRAAttnProcessor2_0, + ), + ) + # if xformers or torch_2_0 is used attention block does not need + # to be in float32 which can save lots of memory + if use_torch_2_0_or_xformers: + self.vae.post_quant_conv.to(dtype) + self.vae.decoder.conv_in.to(dtype) + self.vae.decoder.mid_block.to(dtype) + + @torch.no_grad() + @replace_example_docstring(EXAMPLE_DOC_STRING) + def __call__( + self, + prompt: str = None, + prompt_2: Optional[str] = None, + height: Optional[int] = None, + width: Optional[int] = None, + num_inference_steps: int = 50, + denoising_end: Optional[float] = None, + guidance_scale: float = 5.0, + negative_prompt: Optional[str] = None, + negative_prompt_2: Optional[str] = None, + num_images_per_prompt: Optional[int] = 1, + eta: float = 0.0, + generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None, + latents: Optional[torch.FloatTensor] = None, + prompt_embeds: Optional[torch.FloatTensor] = None, + negative_prompt_embeds: Optional[torch.FloatTensor] = None, + pooled_prompt_embeds: Optional[torch.FloatTensor] = None, + negative_pooled_prompt_embeds: Optional[torch.FloatTensor] = None, + output_type: Optional[str] = "pil", + return_dict: bool = True, + callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None, + callback_steps: int = 1, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + guidance_rescale: float = 0.0, + original_size: Optional[Tuple[int, int]] = None, + crops_coords_top_left: Tuple[int, int] = (0, 0), + target_size: Optional[Tuple[int, int]] = None, + ): + r""" + Function invoked when calling the pipeline for generation. + + Args: + prompt (`str`): + The prompt to guide the image generation. If not defined, one has to pass `prompt_embeds`. + instead. + prompt_2 (`str`): + The prompt to be sent to the `tokenizer_2` and `text_encoder_2`. If not defined, `prompt` is + used in both text-encoders + height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor): + The height in pixels of the generated image. + width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor): + The width in pixels of the generated image. + num_inference_steps (`int`, *optional*, defaults to 50): + The number of denoising steps. More denoising steps usually lead to a higher quality image at the + expense of slower inference. + denoising_end (`float`, *optional*): + When specified, determines the fraction (between 0.0 and 1.0) of the total denoising process to be + completed before it is intentionally prematurely terminated. As a result, the returned sample will + still retain a substantial amount of noise as determined by the discrete timesteps selected by the + scheduler. The denoising_end parameter should ideally be utilized when this pipeline forms a part of a + "Mixture of Denoisers" multi-pipeline setup, as elaborated in [**Refining the Image + Output**](https://huggingface.co/docs/diffusers/api/pipelines/stable_diffusion/stable_diffusion_xl#refining-the-image-output) + guidance_scale (`float`, *optional*, defaults to 5.0): + Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598). + `guidance_scale` is defined as `w` of equation 2. of [Imagen + Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale > + 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`, + usually at the expense of lower image quality. + negative_prompt (`str`): + The prompt not to guide the image generation. If not defined, one has to pass + `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is + less than `1`). + negative_prompt_2 (`str`): + The prompt not to guide the image generation to be sent to `tokenizer_2` and + `text_encoder_2`. If not defined, `negative_prompt` is used in both text-encoders + num_images_per_prompt (`int`, *optional*, defaults to 1): + The number of images to generate per prompt. + eta (`float`, *optional*, defaults to 0.0): + Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to + [`schedulers.DDIMScheduler`], will be ignored for others. + generator (`torch.Generator` or `List[torch.Generator]`, *optional*): + One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html) + to make generation deterministic. + latents (`torch.FloatTensor`, *optional*): + Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image + generation. Can be used to tweak the same generation with different prompts. If not provided, a latents + tensor will ge generated by sampling using the supplied random `generator`. + prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not + provided, text embeddings will be generated from `prompt` input argument. + negative_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt + weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input + argument. + pooled_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated pooled text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. + If not provided, pooled text embeddings will be generated from `prompt` input argument. + negative_pooled_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated negative pooled text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt + weighting. If not provided, pooled negative_prompt_embeds will be generated from `negative_prompt` + input argument. + output_type (`str`, *optional*, defaults to `"pil"`): + The output format of the generate image. Choose between + [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`~pipelines.stable_diffusion_xl.StableDiffusionXLPipelineOutput`] instead + of a plain tuple. + callback (`Callable`, *optional*): + A function that will be called every `callback_steps` steps during inference. The function will be + called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`. + callback_steps (`int`, *optional*, defaults to 1): + The frequency at which the `callback` function will be called. If not specified, the callback will be + called at every step. + cross_attention_kwargs (`dict`, *optional*): + A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under + `self.processor` in + [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py). + guidance_rescale (`float`, *optional*, defaults to 0.0): + Guidance rescale factor proposed by [Common Diffusion Noise Schedules and Sample Steps are + Flawed](https://arxiv.org/pdf/2305.08891.pdf) `guidance_scale` is defined as `φ` in equation 16. of + [Common Diffusion Noise Schedules and Sample Steps are Flawed](https://arxiv.org/pdf/2305.08891.pdf). + Guidance rescale factor should fix overexposure when using zero terminal SNR. + original_size (`Tuple[int]`, *optional*, defaults to (1024, 1024)): + If `original_size` is not the same as `target_size` the image will appear to be down- or upsampled. + `original_size` defaults to `(height, width)` if not specified. Part of SDXL's micro-conditioning as + explained in section 2.2 of + [https://huggingface.co/papers/2307.01952](https://huggingface.co/papers/2307.01952). + crops_coords_top_left (`Tuple[int]`, *optional*, defaults to (0, 0)): + `crops_coords_top_left` can be used to generate an image that appears to be "cropped" from the position + `crops_coords_top_left` downwards. Favorable, well-centered images are usually achieved by setting + `crops_coords_top_left` to (0, 0). Part of SDXL's micro-conditioning as explained in section 2.2 of + [https://huggingface.co/papers/2307.01952](https://huggingface.co/papers/2307.01952). + target_size (`Tuple[int]`, *optional*, defaults to (1024, 1024)): + For most cases, `target_size` should be set to the desired height and width of the generated image. If + not specified it will default to `(height, width)`. Part of SDXL's micro-conditioning as explained in + section 2.2 of [https://huggingface.co/papers/2307.01952](https://huggingface.co/papers/2307.01952). + + Examples: + + Returns: + [`~pipelines.stable_diffusion_xl.StableDiffusionXLPipelineOutput`] or `tuple`: + [`~pipelines.stable_diffusion_xl.StableDiffusionXLPipelineOutput`] if `return_dict` is True, otherwise a + `tuple`. When returning a tuple, the first element is a list with the generated images. + """ + # 0. Default height and width to unet + height = height or self.default_sample_size * self.vae_scale_factor + width = width or self.default_sample_size * self.vae_scale_factor + + original_size = original_size or (height, width) + target_size = target_size or (height, width) + + # 1. Check inputs. Raise error if not correct + self.check_inputs( + prompt, + prompt_2, + height, + width, + callback_steps, + negative_prompt, + negative_prompt_2, + prompt_embeds, + negative_prompt_embeds, + pooled_prompt_embeds, + negative_pooled_prompt_embeds, + ) + + # 2. Define call parameters + if prompt is not None and isinstance(prompt, str): + batch_size = 1 + elif prompt is not None and isinstance(prompt, list): + batch_size = len(prompt) + else: + batch_size = prompt_embeds.shape[0] + + device = self._execution_device + + # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2) + # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1` + # corresponds to doing no classifier free guidance. + do_classifier_free_guidance = guidance_scale > 1.0 + + # 3. Encode input prompt + (cross_attention_kwargs.get("scale", None) if cross_attention_kwargs is not None else None) + + negative_prompt = negative_prompt if negative_prompt is not None else "" + + ( + prompt_embeds, + negative_prompt_embeds, + pooled_prompt_embeds, + negative_pooled_prompt_embeds, + ) = get_weighted_text_embeddings_sdxl(pipe=self, prompt=prompt, neg_prompt=negative_prompt) + + # 4. Prepare timesteps + self.scheduler.set_timesteps(num_inference_steps, device=device) + + timesteps = self.scheduler.timesteps + + # 5. Prepare latent variables + num_channels_latents = self.unet.config.in_channels + latents = self.prepare_latents( + batch_size * num_images_per_prompt, + num_channels_latents, + height, + width, + prompt_embeds.dtype, + device, + generator, + latents, + ) + + # 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline + extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta) + + # 7. Prepare added time ids & embeddings + add_text_embeds = pooled_prompt_embeds + add_time_ids = self._get_add_time_ids( + original_size, crops_coords_top_left, target_size, dtype=prompt_embeds.dtype + ) + + if do_classifier_free_guidance: + prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds], dim=0) + add_text_embeds = torch.cat([negative_pooled_prompt_embeds, add_text_embeds], dim=0) + add_time_ids = torch.cat([add_time_ids, add_time_ids], dim=0) + + prompt_embeds = prompt_embeds.to(device) + add_text_embeds = add_text_embeds.to(device) + add_time_ids = add_time_ids.to(device).repeat(batch_size * num_images_per_prompt, 1) + + # 8. Denoising loop + num_warmup_steps = max(len(timesteps) - num_inference_steps * self.scheduler.order, 0) + + # 7.1 Apply denoising_end + if denoising_end is not None and isinstance(denoising_end, float) and denoising_end > 0 and denoising_end < 1: + discrete_timestep_cutoff = int( + round( + self.scheduler.config.num_train_timesteps + - (denoising_end * self.scheduler.config.num_train_timesteps) + ) + ) + num_inference_steps = len(list(filter(lambda ts: ts >= discrete_timestep_cutoff, timesteps))) + timesteps = timesteps[:num_inference_steps] + + with self.progress_bar(total=num_inference_steps) as progress_bar: + for i, t in enumerate(timesteps): + # expand the latents if we are doing classifier free guidance + latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents + + latent_model_input = self.scheduler.scale_model_input(latent_model_input, t) + + # predict the noise residual + added_cond_kwargs = {"text_embeds": add_text_embeds, "time_ids": add_time_ids} + noise_pred = self.unet( + latent_model_input, + t, + encoder_hidden_states=prompt_embeds, + cross_attention_kwargs=cross_attention_kwargs, + added_cond_kwargs=added_cond_kwargs, + return_dict=False, + )[0] + + # perform guidance + if do_classifier_free_guidance: + noise_pred_uncond, noise_pred_text = noise_pred.chunk(2) + noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond) + + if do_classifier_free_guidance and guidance_rescale > 0.0: + # Based on 3.4. in https://arxiv.org/pdf/2305.08891.pdf + noise_pred = rescale_noise_cfg(noise_pred, noise_pred_text, guidance_rescale=guidance_rescale) + + # compute the previous noisy sample x_t -> x_t-1 + latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0] + + # call the callback, if provided + if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0): + progress_bar.update() + if callback is not None and i % callback_steps == 0: + step_idx = i // getattr(self.scheduler, "order", 1) + callback(step_idx, t, latents) + + if not output_type == "latent": + # make sure the VAE is in float32 mode, as it overflows in float16 + needs_upcasting = self.vae.dtype == torch.float16 and self.vae.config.force_upcast + + if needs_upcasting: + self.upcast_vae() + latents = latents.to(next(iter(self.vae.post_quant_conv.parameters())).dtype) + + image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0] + + # cast back to fp16 if needed + if needs_upcasting: + self.vae.to(dtype=torch.float16) + else: + image = latents + return StableDiffusionXLPipelineOutput(images=image) + + # apply watermark if available + if self.watermark is not None: + image = self.watermark.apply_watermark(image) + + image = self.image_processor.postprocess(image, output_type=output_type) + + # Offload last model to CPU + if hasattr(self, "final_offload_hook") and self.final_offload_hook is not None: + self.final_offload_hook.offload() + + if not return_dict: + return (image,) + + return StableDiffusionXLPipelineOutput(images=image) + + # Overrride to properly handle the loading and unloading of the additional text encoder. + def load_lora_weights(self, pretrained_model_name_or_path_or_dict: Union[str, Dict[str, torch.Tensor]], **kwargs): + # We could have accessed the unet config from `lora_state_dict()` too. We pass + # it here explicitly to be able to tell that it's coming from an SDXL + # pipeline. + state_dict, network_alphas = self.lora_state_dict( + pretrained_model_name_or_path_or_dict, + unet_config=self.unet.config, + **kwargs, + ) + self.load_lora_into_unet(state_dict, network_alphas=network_alphas, unet=self.unet) + + text_encoder_state_dict = {k: v for k, v in state_dict.items() if "text_encoder." in k} + if len(text_encoder_state_dict) > 0: + self.load_lora_into_text_encoder( + text_encoder_state_dict, + network_alphas=network_alphas, + text_encoder=self.text_encoder, + prefix="text_encoder", + lora_scale=self.lora_scale, + ) + + text_encoder_2_state_dict = {k: v for k, v in state_dict.items() if "text_encoder_2." in k} + if len(text_encoder_2_state_dict) > 0: + self.load_lora_into_text_encoder( + text_encoder_2_state_dict, + network_alphas=network_alphas, + text_encoder=self.text_encoder_2, + prefix="text_encoder_2", + lora_scale=self.lora_scale, + ) + + @classmethod + def save_lora_weights( + self, + save_directory: Union[str, os.PathLike], + unet_lora_layers: Dict[str, Union[torch.nn.Module, torch.Tensor]] = None, + text_encoder_lora_layers: Dict[str, Union[torch.nn.Module, torch.Tensor]] = None, + text_encoder_2_lora_layers: Dict[str, Union[torch.nn.Module, torch.Tensor]] = None, + is_main_process: bool = True, + weight_name: str = None, + save_function: Callable = None, + safe_serialization: bool = False, + ): + state_dict = {} + + def pack_weights(layers, prefix): + layers_weights = layers.state_dict() if isinstance(layers, torch.nn.Module) else layers + layers_state_dict = {f"{prefix}.{module_name}": param for module_name, param in layers_weights.items()} + return layers_state_dict + + state_dict.update(pack_weights(unet_lora_layers, "unet")) + + if text_encoder_lora_layers and text_encoder_2_lora_layers: + state_dict.update(pack_weights(text_encoder_lora_layers, "text_encoder")) + state_dict.update(pack_weights(text_encoder_2_lora_layers, "text_encoder_2")) + + self.write_lora_layers( + state_dict=state_dict, + save_directory=save_directory, + is_main_process=is_main_process, + weight_name=weight_name, + save_function=save_function, + safe_serialization=safe_serialization, + ) + + def _remove_text_encoder_monkey_patch(self): + self._remove_text_encoder_monkey_patch_classmethod(self.text_encoder) + self._remove_text_encoder_monkey_patch_classmethod(self.text_encoder_2) diff --git a/src/animatediff/utils/mask.py b/src/animatediff/utils/mask.py new file mode 100644 index 0000000000000000000000000000000000000000..4f4359ac1063187a60cb651159b7359a06045a91 --- /dev/null +++ b/src/animatediff/utils/mask.py @@ -0,0 +1,721 @@ +import glob +import logging +import os +from pathlib import Path + +import cv2 +import numpy as np +import torch +from groundingdino.models import build_model +from groundingdino.util.slconfig import SLConfig +from groundingdino.util.utils import clean_state_dict, get_phrases_from_posmap +from PIL import Image +from segment_anything_hq import (SamPredictor, build_sam_vit_b, + build_sam_vit_h, build_sam_vit_l) +from segment_anything_hq.build_sam import build_sam_vit_t +from tqdm.rich import tqdm + +logger = logging.getLogger(__name__) + +build_sam_table={ + "sam_hq_vit_l":build_sam_vit_l, + "sam_hq_vit_h":build_sam_vit_h, + "sam_hq_vit_b":build_sam_vit_b, + "sam_hq_vit_tiny":build_sam_vit_t, +} + +# adapted from https://github.com/IDEA-Research/Grounded-Segment-Anything/blob/main/grounded_sam_demo.py +class MaskPredictor: + def __init__(self,model_config_path, model_checkpoint_path,device, sam_checkpoint, box_threshold=0.3, text_threshold=0.25 ): + self.groundingdino_model = None + self.sam_predictor = None + + self.model_config_path = model_config_path + self.model_checkpoint_path = model_checkpoint_path + self.device = device + self.sam_checkpoint = sam_checkpoint + + self.box_threshold = box_threshold + self.text_threshold = text_threshold + + def load_groundingdino_model(self): + args = SLConfig.fromfile(self.model_config_path) + args.device = self.device + model = build_model(args) + checkpoint = torch.load(self.model_checkpoint_path, map_location="cpu") + load_res = model.load_state_dict(clean_state_dict(checkpoint["model"]), strict=False) + #print(load_res) + _ = model.eval() + self.groundingdino_model = model + + def load_sam_predictor(self): + s = Path(self.sam_checkpoint) + self.sam_predictor = SamPredictor(build_sam_table[ s.stem ](checkpoint=self.sam_checkpoint).to(self.device)) + + def transform_image(self,image_pil): + import groundingdino.datasets.transforms as T + transform = T.Compose( + [ + T.RandomResize([800], max_size=1333), + T.ToTensor(), + T.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), + ] + ) + image, _ = transform(image_pil, None) # 3, h, w + return image + + def get_grounding_output(self, image, caption, with_logits=True): + model = self.groundingdino_model + device = self.device + + caption = caption.lower() + caption = caption.strip() + if not caption.endswith("."): + caption = caption + "." + model = model.to(device) + image = image.to(device) + with torch.no_grad(): + outputs = model(image[None], captions=[caption]) + logits = outputs["pred_logits"].cpu().sigmoid()[0] # (nq, 256) + boxes = outputs["pred_boxes"].cpu()[0] # (nq, 4) + logits.shape[0] + + # filter output + logits_filt = logits.clone() + boxes_filt = boxes.clone() + filt_mask = logits_filt.max(dim=1)[0] > self.box_threshold + logits_filt = logits_filt[filt_mask] # num_filt, 256 + boxes_filt = boxes_filt[filt_mask] # num_filt, 4 + logits_filt.shape[0] + + # get phrase + tokenlizer = model.tokenizer + tokenized = tokenlizer(caption) + # build pred + pred_phrases = [] + for logit, box in zip(logits_filt, boxes_filt): + pred_phrase = get_phrases_from_posmap(logit > self.text_threshold, tokenized, tokenlizer) + if with_logits: + pred_phrases.append(pred_phrase + f"({str(logit.max().item())[:4]})") + else: + pred_phrases.append(pred_phrase) + + return boxes_filt, pred_phrases + + + def __call__(self, image_pil:Image, text_prompt): + if self.groundingdino_model is None: + self.load_groundingdino_model() + self.load_sam_predictor() + + transformed_img = self.transform_image(image_pil) + + # run grounding dino model + boxes_filt, pred_phrases = self.get_grounding_output( + transformed_img, text_prompt + ) + + if boxes_filt.shape[0] == 0: + logger.info(f"object not found") + w, h = image_pil.size + return np.zeros(shape=(1,h,w), dtype=bool) + + img_array = np.array(image_pil) + self.sam_predictor.set_image(img_array) + + size = image_pil.size + H, W = size[1], size[0] + for i in range(boxes_filt.size(0)): + boxes_filt[i] = boxes_filt[i] * torch.Tensor([W, H, W, H]) + boxes_filt[i][:2] -= boxes_filt[i][2:] / 2 + boxes_filt[i][2:] += boxes_filt[i][:2] + + boxes_filt = boxes_filt.cpu() + transformed_boxes = self.sam_predictor.transform.apply_boxes_torch(boxes_filt, img_array.shape[:2]).to(self.device) + + masks, _, _ = self.sam_predictor.predict_torch( + point_coords = None, + point_labels = None, + boxes = transformed_boxes.to(self.device), + multimask_output = False, + ) + + result = None + for m in masks: + if result is None: + result = m + else: + result |= m + + result = result.cpu().detach().numpy().copy() + + return result + +def load_mask_list(mask_dir, masked_area_list, mask_padding): + + mask_frame_list = sorted(glob.glob( os.path.join(mask_dir, "[0-9]*.png"), recursive=False)) + + kernel = np.ones((abs(mask_padding),abs(mask_padding)),np.uint8) + + for m in mask_frame_list: + cur = int(Path(m).stem) + tmp = np.asarray(Image.open(m)) + + if mask_padding < 0: + tmp = cv2.erode(tmp, kernel,iterations = 1) + elif mask_padding > 0: + tmp = cv2.dilate(tmp, kernel,iterations = 1) + + masked_area_list[cur] = tmp[None,...] + + return masked_area_list + +def crop_mask_list(mask_list): + area_list = [] + + max_h = 0 + max_w = 0 + + for m in mask_list: + if m is None: + area_list.append(None) + continue + m = m > 127 + area = np.where(m[0] == True) + if area[0].size == 0: + area_list.append(None) + continue + + ymin = min(area[0]) + ymax = max(area[0]) + xmin = min(area[1]) + xmax = max(area[1]) + h = ymax+1 - ymin + w = xmax+1 - xmin + max_h = max(max_h, h) + max_w = max(max_w, w) + area_list.append( (ymin, ymax, xmin, xmax) ) + #crop = m[ymin:ymax+1,xmin:xmax+1] + + logger.info(f"{max_h=}") + logger.info(f"{max_w=}") + + border_h = mask_list[0].shape[1] + border_w = mask_list[0].shape[2] + + mask_pos_list=[] + cropped_mask_list=[] + + for a, m in zip(area_list, mask_list): + if m is None or a is None: + mask_pos_list.append(None) + cropped_mask_list.append(None) + continue + + ymin,ymax,xmin,xmax = a + h = ymax+1 - ymin + w = xmax+1 - xmin + + # H + diff_h = max_h - h + dh1 = diff_h//2 + dh2 = diff_h - dh1 + y1 = ymin - dh1 + y2 = ymax + dh2 + if y1 < 0: + y1 = 0 + y2 = max_h-1 + elif y2 >= border_h: + y1 = (border_h-1) - (max_h - 1) + y2 = (border_h-1) + + # W + diff_w = max_w - w + dw1 = diff_w//2 + dw2 = diff_w - dw1 + x1 = xmin - dw1 + x2 = xmax + dw2 + if x1 < 0: + x1 = 0 + x2 = max_w-1 + elif x2 >= border_w: + x1 = (border_w-1) - (max_w - 1) + x2 = (border_w-1) + + mask_pos_list.append( (int(x1),int(y1)) ) + m = m[0][y1:y2+1,x1:x2+1] + cropped_mask_list.append( m[None,...] ) + + + return cropped_mask_list, mask_pos_list, (max_h,max_w) + +def crop_frames(pos_list, crop_size_hw, frame_dir): + h,w = crop_size_hw + + for i,pos in tqdm(enumerate(pos_list),total=len(pos_list)): + filename = f"{i:08d}.png" + frame_path = frame_dir / filename + if not frame_path.is_file(): + logger.info(f"{frame_path=} not found. skip") + continue + if pos is None: + continue + + x, y = pos + + tmp = np.asarray(Image.open(frame_path)) + tmp = tmp[y:y+h,x:x+w,...] + Image.fromarray(tmp).save(frame_path) + +def save_crop_info(mask_pos_list, crop_size_hw, frame_size_hw, save_path): + import json + + pos_map = {} + + for i, pos in enumerate(mask_pos_list): + if pos is not None: + pos_map[str(i)]=pos + + info = { + "frame_height" : int(frame_size_hw[0]), + "frame_width" : int(frame_size_hw[1]), + "height": int(crop_size_hw[0]), + "width": int(crop_size_hw[1]), + "pos_map" : pos_map, + } + + with open(save_path, mode="wt", encoding="utf-8") as f: + json.dump(info, f, ensure_ascii=False, indent=4) + +def restore_position(mask_list, crop_info): + + f_h = crop_info["frame_height"] + f_w = crop_info["frame_width"] + + h = crop_info["height"] + w = crop_info["width"] + pos_map = crop_info["pos_map"] + + for i in pos_map: + x,y = pos_map[i] + i = int(i) + + m = mask_list[i] + + if m is None: + continue + + m = cv2.resize( m, (w,h) ) + if len(m.shape) == 2: + m = m[...,None] + + frame = np.zeros(shape=(f_h,f_w,m.shape[2]), dtype=np.uint8) + + frame[y:y+h,x:x+w,...] = m + mask_list[i] = frame + + + return mask_list + +def load_frame_list(frame_dir, frame_array_list, crop_info): + frame_list = sorted(glob.glob( os.path.join(frame_dir, "[0-9]*.png"), recursive=False)) + + for f in frame_list: + cur = int(Path(f).stem) + frame_array_list[cur] = np.asarray(Image.open(f)) + + if not crop_info: + logger.info(f"crop_info is not exists -> skip restore") + return frame_array_list + + for i,f in enumerate(frame_array_list): + if f is None: + continue + frame_array_list[i] = f + + frame_array_list = restore_position(frame_array_list, crop_info) + + return frame_array_list + + +def create_fg(mask_token, frame_dir, output_dir, output_mask_dir, masked_area_list, + box_threshold=0.3, + text_threshold=0.25, + bg_color=(0,255,0), + mask_padding=0, + groundingdino_config="config/GroundingDINO/GroundingDINO_SwinB_cfg.py", + groundingdino_checkpoint="data/models/GroundingDINO/groundingdino_swinb_cogcoor.pth", + sam_checkpoint="data/models/SAM/sam_hq_vit_l.pth", + device="cuda", + ): + + frame_list = sorted(glob.glob( os.path.join(frame_dir, "[0-9]*.png"), recursive=False)) + + with torch.no_grad(): + predictor = MaskPredictor( + model_config_path=groundingdino_config, + model_checkpoint_path=groundingdino_checkpoint, + device=device, + sam_checkpoint=sam_checkpoint, + box_threshold=box_threshold, + text_threshold=text_threshold, + ) + + + if mask_padding != 0: + kernel = np.ones((abs(mask_padding),abs(mask_padding)),np.uint8) + kernel2 = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3)) + + for i, frame in tqdm(enumerate(frame_list),total=len(frame_list), desc=f"creating mask from {mask_token=}"): + frame = Path(frame) + file_name = frame.name + + cur_frame_no = int(frame.stem) + + img = Image.open(frame) + + mask_array = predictor(img, mask_token) + mask_array = mask_array[0].astype(np.uint8) * 255 + + + if mask_padding < 0: + mask_array = cv2.erode(mask_array.astype(np.uint8),kernel,iterations = 1) + elif mask_padding > 0: + mask_array = cv2.dilate(mask_array.astype(np.uint8),kernel,iterations = 1) + + mask_array = cv2.morphologyEx(mask_array.astype(np.uint8), cv2.MORPH_OPEN, kernel2) + mask_array = cv2.GaussianBlur(mask_array, (7, 7), sigmaX=3, sigmaY=3, borderType=cv2.BORDER_DEFAULT) + + if masked_area_list[cur_frame_no] is not None: + masked_area_list[cur_frame_no] = np.where(masked_area_list[cur_frame_no] > mask_array[None,...], masked_area_list[cur_frame_no], mask_array[None,...]) + #masked_area_list[cur_frame_no] = masked_area_list[cur_frame_no] | mask_array[None,...] + else: + masked_area_list[cur_frame_no] = mask_array[None,...] + + + if output_mask_dir: + #mask_array2 = mask_array.astype(np.uint8).clip(0,1) + #mask_array2 *= 255 + Image.fromarray(mask_array).save( output_mask_dir / file_name ) + + img_array = np.asarray(img).copy() + if bg_color is not None: + img_array[mask_array == 0] = bg_color + + img = Image.fromarray(img_array) + + img.save( output_dir / file_name ) + + return masked_area_list + + +def dilate_mask(masked_area_list, flow_mask_dilates=8, mask_dilates=5): + kernel = np.ones((flow_mask_dilates,flow_mask_dilates),np.uint8) + flow_masks = [ cv2.dilate(mask[0].astype(np.uint8),kernel,iterations = 1) for mask in masked_area_list ] + flow_masks = [ Image.fromarray(mask * 255) for mask in flow_masks ] + + kernel = np.ones((mask_dilates,mask_dilates),np.uint8) + dilated_masks = [ cv2.dilate(mask[0].astype(np.uint8),kernel,iterations = 1) for mask in masked_area_list ] + dilated_masks = [ Image.fromarray(mask * 255) for mask in dilated_masks ] + + return flow_masks, dilated_masks + + +# adapted from https://github.com/sczhou/ProPainter/blob/main/inference_propainter.py +def resize_frames(frames, size=None): + if size is not None: + out_size = size + process_size = (out_size[0]-out_size[0]%8, out_size[1]-out_size[1]%8) + frames = [f.resize(process_size) for f in frames] + else: + out_size = frames[0].size + process_size = (out_size[0]-out_size[0]%8, out_size[1]-out_size[1]%8) + if not out_size == process_size: + frames = [f.resize(process_size) for f in frames] + + return frames, process_size, out_size + +def get_ref_index(mid_neighbor_id, neighbor_ids, length, ref_stride=10, ref_num=-1): + ref_index = [] + if ref_num == -1: + for i in range(0, length, ref_stride): + if i not in neighbor_ids: + ref_index.append(i) + else: + start_idx = max(0, mid_neighbor_id - ref_stride * (ref_num // 2)) + end_idx = min(length, mid_neighbor_id + ref_stride * (ref_num // 2)) + for i in range(start_idx, end_idx, ref_stride): + if i not in neighbor_ids: + if len(ref_index) > ref_num: + break + ref_index.append(i) + return ref_index + +def create_bg(frame_dir, output_dir, masked_area_list, + use_half = True, + raft_iter = 20, + subvideo_length=80, + neighbor_length=10, + ref_stride=10, + device="cuda", + low_vram = False, + ): + import sys + repo_path = Path("src/animatediff/repo/ProPainter").absolute() + repo_path = str(repo_path) + sys.path.append(repo_path) + + from animatediff.repo.ProPainter.core.utils import to_tensors + from animatediff.repo.ProPainter.model.modules.flow_comp_raft import \ + RAFT_bi + from animatediff.repo.ProPainter.model.propainter import InpaintGenerator + from animatediff.repo.ProPainter.model.recurrent_flow_completion import \ + RecurrentFlowCompleteNet + from animatediff.repo.ProPainter.utils.download_util import \ + load_file_from_url + + pretrain_model_url = 'https://github.com/sczhou/ProPainter/releases/download/v0.1.0/' + model_dir = Path("data/models/ProPainter") + model_dir.mkdir(parents=True, exist_ok=True) + + frame_list = sorted(glob.glob( os.path.join(frame_dir, "[0-9]*.png"), recursive=False)) + + frames = [Image.open(f) for f in frame_list] + + if low_vram: + org_size = frames[0].size + _w, _h = frames[0].size + if max(_w, _h) > 512: + _w = int(_w * 0.75) + _h = int(_h * 0.75) + + frames, size, out_size = resize_frames(frames, (_w, _h)) + out_size = org_size + + masked_area_list = [m[0] for m in masked_area_list] + masked_area_list = [cv2.resize(m.astype(np.uint8), dsize=size) for m in masked_area_list] + masked_area_list = [ m>127 for m in masked_area_list] + masked_area_list = [m[None,...] for m in masked_area_list] + + else: + frames, size, out_size = resize_frames(frames, None) + masked_area_list = [ m>127 for m in masked_area_list] + + w, h = size + + flow_masks,masks_dilated = dilate_mask(masked_area_list) + + frames_inp = [np.array(f).astype(np.uint8) for f in frames] + frames = to_tensors()(frames).unsqueeze(0) * 2 - 1 + flow_masks = to_tensors()(flow_masks).unsqueeze(0) + masks_dilated = to_tensors()(masks_dilated).unsqueeze(0) + frames, flow_masks, masks_dilated = frames.to(device), flow_masks.to(device), masks_dilated.to(device) + + + ############################################## + # set up RAFT and flow competition model + ############################################## + ckpt_path = load_file_from_url(url=os.path.join(pretrain_model_url, 'raft-things.pth'), + model_dir=model_dir, progress=True, file_name=None) + fix_raft = RAFT_bi(ckpt_path, device) + + ckpt_path = load_file_from_url(url=os.path.join(pretrain_model_url, 'recurrent_flow_completion.pth'), + model_dir=model_dir, progress=True, file_name=None) + fix_flow_complete = RecurrentFlowCompleteNet(ckpt_path) + for p in fix_flow_complete.parameters(): + p.requires_grad = False + fix_flow_complete.to(device) + fix_flow_complete.eval() + + ############################################## + # set up ProPainter model + ############################################## + ckpt_path = load_file_from_url(url=os.path.join(pretrain_model_url, 'ProPainter.pth'), + model_dir=model_dir, progress=True, file_name=None) + model = InpaintGenerator(model_path=ckpt_path).to(device) + model.eval() + + + + ############################################## + # ProPainter inference + ############################################## + video_length = frames.size(1) + logger.info(f'\nProcessing: [{video_length} frames]...') + with torch.no_grad(): + # ---- compute flow ---- + if max(w,h) <= 640: + short_clip_len = 12 + elif max(w,h) <= 720: + short_clip_len = 8 + elif max(w,h) <= 1280: + short_clip_len = 4 + else: + short_clip_len = 2 + + # use fp32 for RAFT + if frames.size(1) > short_clip_len: + gt_flows_f_list, gt_flows_b_list = [], [] + for f in range(0, video_length, short_clip_len): + end_f = min(video_length, f + short_clip_len) + if f == 0: + flows_f, flows_b = fix_raft(frames[:,f:end_f], iters=raft_iter) + else: + flows_f, flows_b = fix_raft(frames[:,f-1:end_f], iters=raft_iter) + + gt_flows_f_list.append(flows_f) + gt_flows_b_list.append(flows_b) + torch.cuda.empty_cache() + + gt_flows_f = torch.cat(gt_flows_f_list, dim=1) + gt_flows_b = torch.cat(gt_flows_b_list, dim=1) + gt_flows_bi = (gt_flows_f, gt_flows_b) + else: + gt_flows_bi = fix_raft(frames, iters=raft_iter) + torch.cuda.empty_cache() + + + if use_half: + frames, flow_masks, masks_dilated = frames.half(), flow_masks.half(), masks_dilated.half() + gt_flows_bi = (gt_flows_bi[0].half(), gt_flows_bi[1].half()) + fix_flow_complete = fix_flow_complete.half() + model = model.half() + + + # ---- complete flow ---- + flow_length = gt_flows_bi[0].size(1) + if flow_length > subvideo_length: + pred_flows_f, pred_flows_b = [], [] + pad_len = 5 + for f in range(0, flow_length, subvideo_length): + s_f = max(0, f - pad_len) + e_f = min(flow_length, f + subvideo_length + pad_len) + pad_len_s = max(0, f) - s_f + pad_len_e = e_f - min(flow_length, f + subvideo_length) + pred_flows_bi_sub, _ = fix_flow_complete.forward_bidirect_flow( + (gt_flows_bi[0][:, s_f:e_f], gt_flows_bi[1][:, s_f:e_f]), + flow_masks[:, s_f:e_f+1]) + pred_flows_bi_sub = fix_flow_complete.combine_flow( + (gt_flows_bi[0][:, s_f:e_f], gt_flows_bi[1][:, s_f:e_f]), + pred_flows_bi_sub, + flow_masks[:, s_f:e_f+1]) + + pred_flows_f.append(pred_flows_bi_sub[0][:, pad_len_s:e_f-s_f-pad_len_e]) + pred_flows_b.append(pred_flows_bi_sub[1][:, pad_len_s:e_f-s_f-pad_len_e]) + torch.cuda.empty_cache() + + pred_flows_f = torch.cat(pred_flows_f, dim=1) + pred_flows_b = torch.cat(pred_flows_b, dim=1) + pred_flows_bi = (pred_flows_f, pred_flows_b) + else: + pred_flows_bi, _ = fix_flow_complete.forward_bidirect_flow(gt_flows_bi, flow_masks) + pred_flows_bi = fix_flow_complete.combine_flow(gt_flows_bi, pred_flows_bi, flow_masks) + torch.cuda.empty_cache() + + + # ---- image propagation ---- + masked_frames = frames * (1 - masks_dilated) + subvideo_length_img_prop = min(100, subvideo_length) # ensure a minimum of 100 frames for image propagation + if video_length > subvideo_length_img_prop: + updated_frames, updated_masks = [], [] + pad_len = 10 + for f in range(0, video_length, subvideo_length_img_prop): + s_f = max(0, f - pad_len) + e_f = min(video_length, f + subvideo_length_img_prop + pad_len) + pad_len_s = max(0, f) - s_f + pad_len_e = e_f - min(video_length, f + subvideo_length_img_prop) + + b, t, _, _, _ = masks_dilated[:, s_f:e_f].size() + pred_flows_bi_sub = (pred_flows_bi[0][:, s_f:e_f-1], pred_flows_bi[1][:, s_f:e_f-1]) + prop_imgs_sub, updated_local_masks_sub = model.img_propagation(masked_frames[:, s_f:e_f], + pred_flows_bi_sub, + masks_dilated[:, s_f:e_f], + 'nearest') + updated_frames_sub = frames[:, s_f:e_f] * (1 - masks_dilated[:, s_f:e_f]) + \ + prop_imgs_sub.view(b, t, 3, h, w) * masks_dilated[:, s_f:e_f] + updated_masks_sub = updated_local_masks_sub.view(b, t, 1, h, w) + + updated_frames.append(updated_frames_sub[:, pad_len_s:e_f-s_f-pad_len_e]) + updated_masks.append(updated_masks_sub[:, pad_len_s:e_f-s_f-pad_len_e]) + torch.cuda.empty_cache() + + updated_frames = torch.cat(updated_frames, dim=1) + updated_masks = torch.cat(updated_masks, dim=1) + else: + b, t, _, _, _ = masks_dilated.size() + prop_imgs, updated_local_masks = model.img_propagation(masked_frames, pred_flows_bi, masks_dilated, 'nearest') + updated_frames = frames * (1 - masks_dilated) + prop_imgs.view(b, t, 3, h, w) * masks_dilated + updated_masks = updated_local_masks.view(b, t, 1, h, w) + torch.cuda.empty_cache() + + ori_frames = frames_inp + comp_frames = [None] * video_length + + neighbor_stride = neighbor_length // 2 + if video_length > subvideo_length: + ref_num = subvideo_length // ref_stride + else: + ref_num = -1 + + # ---- feature propagation + transformer ---- + for f in tqdm(range(0, video_length, neighbor_stride)): + neighbor_ids = [ + i for i in range(max(0, f - neighbor_stride), + min(video_length, f + neighbor_stride + 1)) + ] + ref_ids = get_ref_index(f, neighbor_ids, video_length, ref_stride, ref_num) + selected_imgs = updated_frames[:, neighbor_ids + ref_ids, :, :, :] + selected_masks = masks_dilated[:, neighbor_ids + ref_ids, :, :, :] + selected_update_masks = updated_masks[:, neighbor_ids + ref_ids, :, :, :] + selected_pred_flows_bi = (pred_flows_bi[0][:, neighbor_ids[:-1], :, :, :], pred_flows_bi[1][:, neighbor_ids[:-1], :, :, :]) + + with torch.no_grad(): + # 1.0 indicates mask + l_t = len(neighbor_ids) + + # pred_img = selected_imgs # results of image propagation + pred_img = model(selected_imgs, selected_pred_flows_bi, selected_masks, selected_update_masks, l_t) + + pred_img = pred_img.view(-1, 3, h, w) + + pred_img = (pred_img + 1) / 2 + pred_img = pred_img.cpu().permute(0, 2, 3, 1).numpy() * 255 + binary_masks = masks_dilated[0, neighbor_ids, :, :, :].cpu().permute( + 0, 2, 3, 1).numpy().astype(np.uint8) + for i in range(len(neighbor_ids)): + idx = neighbor_ids[i] + img = np.array(pred_img[i]).astype(np.uint8) * binary_masks[i] \ + + ori_frames[idx] * (1 - binary_masks[i]) + if comp_frames[idx] is None: + comp_frames[idx] = img + else: + comp_frames[idx] = comp_frames[idx].astype(np.float32) * 0.5 + img.astype(np.float32) * 0.5 + + comp_frames[idx] = comp_frames[idx].astype(np.uint8) + + torch.cuda.empty_cache() + + # save each frame + for idx in range(video_length): + f = comp_frames[idx] + f = cv2.resize(f, out_size, interpolation = cv2.INTER_CUBIC) + f = cv2.cvtColor(f, cv2.COLOR_BGR2RGB) + dst_img_path = output_dir.joinpath( f"{idx:08d}.png" ) + cv2.imwrite(str(dst_img_path), f) + + sys.path.remove(repo_path) + + + + + + + + + + + + + + + diff --git a/src/animatediff/utils/mask_animseg.py b/src/animatediff/utils/mask_animseg.py new file mode 100644 index 0000000000000000000000000000000000000000..299c8ca0d836d8b2e3a0926276972708435392da --- /dev/null +++ b/src/animatediff/utils/mask_animseg.py @@ -0,0 +1,88 @@ +import glob +import logging +import os +from pathlib import Path + +import cv2 +import numpy as np +import onnxruntime as rt +import torch +from PIL import Image +from rembg import new_session, remove +from tqdm.rich import tqdm + +logger = logging.getLogger(__name__) + +def animseg_create_fg(frame_dir, output_dir, output_mask_dir, masked_area_list, + bg_color=(0,255,0), + mask_padding=0, + ): + + frame_list = sorted(glob.glob( os.path.join(frame_dir, "[0-9]*.png"), recursive=False)) + + if mask_padding != 0: + kernel = np.ones((abs(mask_padding),abs(mask_padding)),np.uint8) + kernel2 = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3)) + + + providers = ['CUDAExecutionProvider', 'CPUExecutionProvider'] + rmbg_model = rt.InferenceSession("data/models/anime_seg/isnetis.onnx", providers=providers) + + def get_mask(img, s=1024): + img = (img / 255).astype(np.float32) + h, w = h0, w0 = img.shape[:-1] + h, w = (s, int(s * w / h)) if h > w else (int(s * h / w), s) + ph, pw = s - h, s - w + img_input = np.zeros([s, s, 3], dtype=np.float32) + img_input[ph // 2:ph // 2 + h, pw // 2:pw // 2 + w] = cv2.resize(img, (w, h)) + img_input = np.transpose(img_input, (2, 0, 1)) + img_input = img_input[np.newaxis, :] + mask = rmbg_model.run(None, {'img': img_input})[0][0] + mask = np.transpose(mask, (1, 2, 0)) + mask = mask[ph // 2:ph // 2 + h, pw // 2:pw // 2 + w] + mask = cv2.resize(mask, (w0, h0)) + mask = (mask * 255).astype(np.uint8) + return mask + + + for i, frame in tqdm(enumerate(frame_list),total=len(frame_list), desc=f"creating mask"): + frame = Path(frame) + file_name = frame.name + + cur_frame_no = int(frame.stem) + + img = Image.open(frame) + img_array = np.asarray(img) + + mask_array = get_mask(img_array) + +# Image.fromarray(mask_array).save( output_dir / Path("raw_" + file_name)) + + if mask_padding < 0: + mask_array = cv2.erode(mask_array.astype(np.uint8),kernel,iterations = 1) + elif mask_padding > 0: + mask_array = cv2.dilate(mask_array.astype(np.uint8),kernel,iterations = 1) + + mask_array = cv2.morphologyEx(mask_array, cv2.MORPH_OPEN, kernel2) + mask_array = cv2.GaussianBlur(mask_array, (7, 7), sigmaX=3, sigmaY=3, borderType=cv2.BORDER_DEFAULT) + + if masked_area_list[cur_frame_no] is not None: + masked_area_list[cur_frame_no] = np.where(masked_area_list[cur_frame_no] > mask_array[None,...], masked_area_list[cur_frame_no], mask_array[None,...]) + else: + masked_area_list[cur_frame_no] = mask_array[None,...] + + if output_mask_dir: + Image.fromarray(mask_array).save( output_mask_dir / file_name ) + + img_array = np.asarray(img).copy() + if bg_color is not None: + img_array[mask_array == 0] = bg_color + + img = Image.fromarray(img_array) + + img.save( output_dir / file_name ) + + return masked_area_list + + + diff --git a/src/animatediff/utils/mask_rembg.py b/src/animatediff/utils/mask_rembg.py new file mode 100644 index 0000000000000000000000000000000000000000..78514a28dca01dc67fc07881ec98b50e247cec8d --- /dev/null +++ b/src/animatediff/utils/mask_rembg.py @@ -0,0 +1,68 @@ +import glob +import logging +import os +from pathlib import Path + +import cv2 +import numpy as np +import torch +from PIL import Image +from rembg import new_session, remove +from tqdm.rich import tqdm + +logger = logging.getLogger(__name__) + +def rembg_create_fg(frame_dir, output_dir, output_mask_dir, masked_area_list, + bg_color=(0,255,0), + mask_padding=0, + ): + + frame_list = sorted(glob.glob( os.path.join(frame_dir, "[0-9]*.png"), recursive=False)) + + if mask_padding != 0: + kernel = np.ones((abs(mask_padding),abs(mask_padding)),np.uint8) + kernel2 = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3)) + + session = new_session(providers=['CUDAExecutionProvider', 'CPUExecutionProvider']) + + for i, frame in tqdm(enumerate(frame_list),total=len(frame_list), desc=f"creating mask"): + frame = Path(frame) + file_name = frame.name + + cur_frame_no = int(frame.stem) + + img = Image.open(frame) + img_array = np.asarray(img) + + mask_array = remove(img_array, only_mask=True, session=session) + + #mask_array = mask_array[None,...] + + if mask_padding < 0: + mask_array = cv2.erode(mask_array.astype(np.uint8),kernel,iterations = 1) + elif mask_padding > 0: + mask_array = cv2.dilate(mask_array.astype(np.uint8),kernel,iterations = 1) + + mask_array = cv2.morphologyEx(mask_array, cv2.MORPH_OPEN, kernel2) + mask_array = cv2.GaussianBlur(mask_array, (7, 7), sigmaX=3, sigmaY=3, borderType=cv2.BORDER_DEFAULT) + + if masked_area_list[cur_frame_no] is not None: + masked_area_list[cur_frame_no] = np.where(masked_area_list[cur_frame_no] > mask_array[None,...], masked_area_list[cur_frame_no], mask_array[None,...]) + else: + masked_area_list[cur_frame_no] = mask_array[None,...] + + if output_mask_dir: + Image.fromarray(mask_array).save( output_mask_dir / file_name ) + + img_array = np.asarray(img).copy() + if bg_color is not None: + img_array[mask_array == 0] = bg_color + + img = Image.fromarray(img_array) + + img.save( output_dir / file_name ) + + return masked_area_list + + + diff --git a/src/animatediff/utils/model.py b/src/animatediff/utils/model.py new file mode 100644 index 0000000000000000000000000000000000000000..9805160233cb598700e48d85af5ad07cdd614196 --- /dev/null +++ b/src/animatediff/utils/model.py @@ -0,0 +1,201 @@ +import logging +from functools import wraps +from pathlib import Path +from typing import Optional, TypeVar + +from diffusers import StableDiffusionPipeline, StableDiffusionXLPipeline +from huggingface_hub import hf_hub_download +from torch import nn + +from animatediff import HF_HUB_CACHE, HF_MODULE_REPO, get_dir +from animatediff.settings import CKPT_EXTENSIONS +from animatediff.utils.huggingface import get_hf_pipeline, get_hf_pipeline_sdxl +from animatediff.utils.util import path_from_cwd + +logger = logging.getLogger(__name__) + +data_dir = get_dir("data") +checkpoint_dir = data_dir.joinpath("models/sd") +pipeline_dir = data_dir.joinpath("models/huggingface") + +# for the nop_train() monkeypatch +T = TypeVar("T", bound=nn.Module) + + +def nop_train(self: T, mode: bool = True) -> T: + """No-op for monkeypatching train() call to prevent unfreezing module""" + return self + + +def get_base_model(model_name_or_path: str, local_dir: Path, force: bool = False, is_sdxl:bool=False) -> Path: + model_name_or_path = Path(model_name_or_path) + + model_save_dir = local_dir.joinpath(str(model_name_or_path).split("/")[-1]).resolve() + model_is_repo_id = False if model_name_or_path.joinpath("model_index.json").exists() else True + + # if we have a HF repo ID, download it + if model_is_repo_id: + logger.debug("Base model is a HuggingFace repo ID") + if model_save_dir.joinpath("model_index.json").exists(): + logger.debug(f"Base model already downloaded to: {path_from_cwd(model_save_dir)}") + else: + logger.info(f"Downloading base model from {model_name_or_path}...") + if is_sdxl: + _ = get_hf_pipeline_sdxl(model_name_or_path, model_save_dir, save=True, force_download=force) + else: + _ = get_hf_pipeline(model_name_or_path, model_save_dir, save=True, force_download=force) + model_name_or_path = model_save_dir + + return Path(model_name_or_path) + + +def fix_checkpoint_if_needed(checkpoint: Path, debug:bool): + def dump(loaded): + for a in loaded: + logger.info(f"{a} {loaded[a].shape}") + + if debug: + from safetensors.torch import load_file, save_file + loaded = load_file(checkpoint, "cpu") + + dump(loaded) + + return + + try: + pipeline = StableDiffusionPipeline.from_single_file( + pretrained_model_link_or_path=str(checkpoint.absolute()), + local_files_only=False, + load_safety_checker=False, + ) + logger.info("This file works fine.") + return + except: + from safetensors.torch import load_file, save_file + + loaded = load_file(checkpoint, "cpu") + + convert_table_bias={ + "first_stage_model.decoder.mid.attn_1.to_k.bias":"first_stage_model.decoder.mid.attn_1.k.bias", + "first_stage_model.decoder.mid.attn_1.to_out.0.bias":"first_stage_model.decoder.mid.attn_1.proj_out.bias", + "first_stage_model.decoder.mid.attn_1.to_q.bias":"first_stage_model.decoder.mid.attn_1.q.bias", + "first_stage_model.decoder.mid.attn_1.to_v.bias":"first_stage_model.decoder.mid.attn_1.v.bias", + "first_stage_model.encoder.mid.attn_1.to_k.bias":"first_stage_model.encoder.mid.attn_1.k.bias", + "first_stage_model.encoder.mid.attn_1.to_out.0.bias":"first_stage_model.encoder.mid.attn_1.proj_out.bias", + "first_stage_model.encoder.mid.attn_1.to_q.bias":"first_stage_model.encoder.mid.attn_1.q.bias", + "first_stage_model.encoder.mid.attn_1.to_v.bias":"first_stage_model.encoder.mid.attn_1.v.bias", + } + + convert_table_weight={ + "first_stage_model.decoder.mid.attn_1.to_k.weight":"first_stage_model.decoder.mid.attn_1.k.weight", + "first_stage_model.decoder.mid.attn_1.to_out.0.weight":"first_stage_model.decoder.mid.attn_1.proj_out.weight", + "first_stage_model.decoder.mid.attn_1.to_q.weight":"first_stage_model.decoder.mid.attn_1.q.weight", + "first_stage_model.decoder.mid.attn_1.to_v.weight":"first_stage_model.decoder.mid.attn_1.v.weight", + "first_stage_model.encoder.mid.attn_1.to_k.weight":"first_stage_model.encoder.mid.attn_1.k.weight", + "first_stage_model.encoder.mid.attn_1.to_out.0.weight":"first_stage_model.encoder.mid.attn_1.proj_out.weight", + "first_stage_model.encoder.mid.attn_1.to_q.weight":"first_stage_model.encoder.mid.attn_1.q.weight", + "first_stage_model.encoder.mid.attn_1.to_v.weight":"first_stage_model.encoder.mid.attn_1.v.weight", + } + + for a in list(loaded.keys()): + if a in convert_table_bias: + new_key = convert_table_bias[a] + loaded[new_key] = loaded.pop(a) + elif a in convert_table_weight: + new_key = convert_table_weight[a] + item = loaded.pop(a) + if len(item.shape) == 2: + item = item.unsqueeze(dim=-1).unsqueeze(dim=-1) + loaded[new_key] = item + + new_path = str(checkpoint.parent / checkpoint.stem) + "_fixed"+checkpoint.suffix + + logger.info(f"Saving file to {new_path}") + save_file(loaded, Path(new_path)) + + + +def checkpoint_to_pipeline( + checkpoint: Path, + target_dir: Optional[Path] = None, + save: bool = True, +) -> StableDiffusionPipeline: + logger.debug(f"Converting checkpoint {path_from_cwd(checkpoint)}") + if target_dir is None: + target_dir = pipeline_dir.joinpath(checkpoint.stem) + + pipeline = StableDiffusionPipeline.from_single_file( + pretrained_model_link_or_path=str(checkpoint.absolute()), + local_files_only=False, + load_safety_checker=False, + ) + + if save: + target_dir.mkdir(parents=True, exist_ok=True) + logger.info(f"Saving pipeline to {path_from_cwd(target_dir)}") + pipeline.save_pretrained(target_dir, safe_serialization=True) + return pipeline, target_dir + +def checkpoint_to_pipeline_sdxl( + checkpoint: Path, + target_dir: Optional[Path] = None, + save: bool = True, +) -> StableDiffusionXLPipeline: + logger.debug(f"Converting checkpoint {path_from_cwd(checkpoint)}") + if target_dir is None: + target_dir = pipeline_dir.joinpath(checkpoint.stem) + + pipeline = StableDiffusionXLPipeline.from_single_file( + pretrained_model_link_or_path=str(checkpoint.absolute()), + local_files_only=False, + load_safety_checker=False, + ) + + if save: + target_dir.mkdir(parents=True, exist_ok=True) + logger.info(f"Saving pipeline to {path_from_cwd(target_dir)}") + pipeline.save_pretrained(target_dir, safe_serialization=True) + return pipeline, target_dir + +def get_checkpoint_weights(checkpoint: Path): + temp_pipeline: StableDiffusionPipeline + temp_pipeline, _ = checkpoint_to_pipeline(checkpoint, save=False) + unet_state_dict = temp_pipeline.unet.state_dict() + tenc_state_dict = temp_pipeline.text_encoder.state_dict() + vae_state_dict = temp_pipeline.vae.state_dict() + return unet_state_dict, tenc_state_dict, vae_state_dict + +def get_checkpoint_weights_sdxl(checkpoint: Path): + temp_pipeline: StableDiffusionXLPipeline + temp_pipeline, _ = checkpoint_to_pipeline_sdxl(checkpoint, save=False) + unet_state_dict = temp_pipeline.unet.state_dict() + tenc_state_dict = temp_pipeline.text_encoder.state_dict() + tenc2_state_dict = temp_pipeline.text_encoder_2.state_dict() + vae_state_dict = temp_pipeline.vae.state_dict() + return unet_state_dict, tenc_state_dict, tenc2_state_dict, vae_state_dict + + +def ensure_motion_modules( + repo_id: str = HF_MODULE_REPO, + fp16: bool = False, + force: bool = False, +): + """Retrieve the motion modules from HuggingFace Hub.""" + module_files = ["mm_sd_v14.safetensors", "mm_sd_v15.safetensors"] + module_dir = get_dir("data/models/motion-module") + for file in module_files: + target_path = module_dir.joinpath(file) + if fp16: + target_path = target_path.with_suffix(".fp16.safetensors") + if target_path.exists() and force is not True: + logger.debug(f"File {path_from_cwd(target_path)} already exists, skipping download") + else: + result = hf_hub_download( + repo_id=repo_id, + filename=target_path.name, + cache_dir=HF_HUB_CACHE, + local_dir=module_dir, + local_dir_use_symlinks=False, + resume_download=True, + ) + logger.debug(f"Downloaded {path_from_cwd(result)}") diff --git a/src/animatediff/utils/pipeline.py b/src/animatediff/utils/pipeline.py new file mode 100644 index 0000000000000000000000000000000000000000..f24c2d445acfaf263862f4e5737134eab8cfda40 --- /dev/null +++ b/src/animatediff/utils/pipeline.py @@ -0,0 +1,123 @@ +import logging +from typing import Optional + +import torch +import torch._dynamo as dynamo +from diffusers import (DiffusionPipeline, StableDiffusionPipeline, + StableDiffusionXLPipeline) +from einops._torch_specific import allow_ops_in_compiled_graph + +from animatediff.utils.device import get_memory_format, get_model_dtypes +from animatediff.utils.model import nop_train + +logger = logging.getLogger(__name__) + + +def send_to_device( + pipeline: DiffusionPipeline, + device: torch.device, + freeze: bool = True, + force_half: bool = False, + compile: bool = False, + is_sdxl: bool = False, +) -> DiffusionPipeline: + if is_sdxl: + return send_to_device_sdxl( + pipeline=pipeline, + device=device, + freeze=freeze, + force_half=force_half, + compile=compile, + ) + + logger.info(f"Sending pipeline to device \"{device.type}{device.index if device.index else ''}\"") + + unet_dtype, tenc_dtype, vae_dtype = get_model_dtypes(device, force_half) + model_memory_format = get_memory_format(device) + + if hasattr(pipeline, 'controlnet'): + unet_dtype = tenc_dtype = vae_dtype + + logger.info(f"-> Selected data types: {unet_dtype=},{tenc_dtype=},{vae_dtype=}") + + if hasattr(pipeline.controlnet, 'nets'): + for i in range(len(pipeline.controlnet.nets)): + pipeline.controlnet.nets[i] = pipeline.controlnet.nets[i].to(device=device, dtype=vae_dtype, memory_format=model_memory_format) + else: + if pipeline.controlnet: + pipeline.controlnet = pipeline.controlnet.to(device=device, dtype=vae_dtype, memory_format=model_memory_format) + + if hasattr(pipeline, 'controlnet_map'): + if pipeline.controlnet_map: + for c in pipeline.controlnet_map: + #pipeline.controlnet_map[c] = pipeline.controlnet_map[c].to(device=device, dtype=unet_dtype, memory_format=model_memory_format) + pipeline.controlnet_map[c] = pipeline.controlnet_map[c].to(dtype=unet_dtype, memory_format=model_memory_format) + + if hasattr(pipeline, 'lora_map'): + if pipeline.lora_map: + pipeline.lora_map.to(device=device, dtype=unet_dtype) + + if hasattr(pipeline, 'lcm'): + if pipeline.lcm: + pipeline.lcm.to(device=device, dtype=unet_dtype) + + pipeline.unet = pipeline.unet.to(device=device, dtype=unet_dtype, memory_format=model_memory_format) + pipeline.text_encoder = pipeline.text_encoder.to(device=device, dtype=tenc_dtype) + pipeline.vae = pipeline.vae.to(device=device, dtype=vae_dtype, memory_format=model_memory_format) + + # Compile model if enabled + if compile: + if not isinstance(pipeline.unet, dynamo.OptimizedModule): + allow_ops_in_compiled_graph() # make einops behave + logger.warn("Enabling model compilation with TorchDynamo, this may take a while...") + logger.warn("Model compilation is experimental and may not work as expected!") + pipeline.unet = torch.compile( + pipeline.unet, + backend="inductor", + mode="reduce-overhead", + ) + else: + logger.debug("Skipping model compilation, already compiled!") + + return pipeline + + +def send_to_device_sdxl( + pipeline: StableDiffusionXLPipeline, + device: torch.device, + freeze: bool = True, + force_half: bool = False, + compile: bool = False, +) -> StableDiffusionXLPipeline: + logger.info(f"Sending pipeline to device \"{device.type}{device.index if device.index else ''}\"") + + pipeline.unet = pipeline.unet.half() + pipeline.text_encoder = pipeline.text_encoder.half() + pipeline.text_encoder_2 = pipeline.text_encoder_2.half() + + if False: + pipeline.to(device) + else: + pipeline.enable_model_cpu_offload() + + pipeline.enable_xformers_memory_efficient_attention() + pipeline.enable_vae_slicing() + pipeline.enable_vae_tiling() + + return pipeline + + + +def get_context_params( + length: int, + context: Optional[int] = None, + overlap: Optional[int] = None, + stride: Optional[int] = None, +): + if context is None: + context = min(length, 16) + if overlap is None: + overlap = context // 4 + if stride is None: + stride = 0 + return context, overlap, stride diff --git a/src/animatediff/utils/tagger.py b/src/animatediff/utils/tagger.py new file mode 100644 index 0000000000000000000000000000000000000000..3e63c42312aedbc9e852ca319bc5b7ca10abe4c0 --- /dev/null +++ b/src/animatediff/utils/tagger.py @@ -0,0 +1,161 @@ +# https://huggingface.co/spaces/SmilingWolf/wd-v1-4-tags/blob/main/app.py + +import glob +import logging +import os + +import cv2 +import numpy as np +import onnxruntime +import pandas as pd +from PIL import Image +from tqdm.rich import tqdm + +from animatediff.utils.util import prepare_wd14tagger + +logger = logging.getLogger(__name__) + + +def make_square(img, target_size): + old_size = img.shape[:2] + desired_size = max(old_size) + desired_size = max(desired_size, target_size) + + delta_w = desired_size - old_size[1] + delta_h = desired_size - old_size[0] + top, bottom = delta_h // 2, delta_h - (delta_h // 2) + left, right = delta_w // 2, delta_w - (delta_w // 2) + + color = [255, 255, 255] + new_im = cv2.copyMakeBorder( + img, top, bottom, left, right, cv2.BORDER_CONSTANT, value=color + ) + return new_im + +def smart_resize(img, size): + # Assumes the image has already gone through make_square + if img.shape[0] > size: + img = cv2.resize(img, (size, size), interpolation=cv2.INTER_AREA) + elif img.shape[0] < size: + img = cv2.resize(img, (size, size), interpolation=cv2.INTER_CUBIC) + return img + + +class Tagger: + def __init__(self, general_threshold, character_threshold, ignore_tokens, with_confidence, is_danbooru_format,is_cpu): + prepare_wd14tagger() +# self.model = onnxruntime.InferenceSession("data/models/WD14tagger/model.onnx", providers=['CUDAExecutionProvider','CPUExecutionProvider']) + if is_cpu: + self.model = onnxruntime.InferenceSession("data/models/WD14tagger/model.onnx", providers=['CPUExecutionProvider']) + else: + self.model = onnxruntime.InferenceSession("data/models/WD14tagger/model.onnx", providers=['CUDAExecutionProvider']) + df = pd.read_csv("data/models/WD14tagger/selected_tags.csv") + self.tag_names = df["name"].tolist() + self.rating_indexes = list(np.where(df["category"] == 9)[0]) + self.general_indexes = list(np.where(df["category"] == 0)[0]) + self.character_indexes = list(np.where(df["category"] == 4)[0]) + + self.general_threshold = general_threshold + self.character_threshold = character_threshold + self.ignore_tokens = ignore_tokens + self.with_confidence = with_confidence + self.is_danbooru_format = is_danbooru_format + + def __call__( + self, + image: Image, + ): + + _, height, width, _ = self.model.get_inputs()[0].shape + + # Alpha to white + image = image.convert("RGBA") + new_image = Image.new("RGBA", image.size, "WHITE") + new_image.paste(image, mask=image) + image = new_image.convert("RGB") + image = np.asarray(image) + + # PIL RGB to OpenCV BGR + image = image[:, :, ::-1] + + image = make_square(image, height) + image = smart_resize(image, height) + image = image.astype(np.float32) + image = np.expand_dims(image, 0) + + input_name = self.model.get_inputs()[0].name + label_name = self.model.get_outputs()[0].name + probs = self.model.run([label_name], {input_name: image})[0] + + labels = list(zip(self.tag_names, probs[0].astype(float))) + + # First 4 labels are actually ratings: pick one with argmax + ratings_names = [labels[i] for i in self.rating_indexes] + rating = dict(ratings_names) + + # Then we have general tags: pick any where prediction confidence > threshold + general_names = [labels[i] for i in self.general_indexes] + general_res = [x for x in general_names if x[1] > self.general_threshold] + general_res = dict(general_res) + + # Everything else is characters: pick any where prediction confidence > threshold + character_names = [labels[i] for i in self.character_indexes] + character_res = [x for x in character_names if x[1] > self.character_threshold] + character_res = dict(character_res) + + #logger.info(f"{rating=}") + #logger.info(f"{general_res=}") + #logger.info(f"{character_res=}") + + general_res = {k:general_res[k] for k in (general_res.keys() - set(self.ignore_tokens)) } + character_res = {k:character_res[k] for k in (character_res.keys() - set(self.ignore_tokens)) } + + prompt = "" + + if self.with_confidence: + prompt = [ f"({i}:{character_res[i]:.2f})" for i in (character_res.keys()) ] + prompt += [ f"({i}:{general_res[i]:.2f})" for i in (general_res.keys()) ] + else: + prompt = [ i for i in (character_res.keys()) ] + prompt += [ i for i in (general_res.keys()) ] + + prompt = ",".join(prompt) + + if not self.is_danbooru_format: + prompt = prompt.replace("_", " ") + + #logger.info(f"{prompt=}") + return prompt + + +def get_labels(frame_dir, interval, general_threshold, character_threshold, ignore_tokens, with_confidence, is_danbooru_format, is_cpu =False): + + import torch + + result = {} + if os.path.isdir(frame_dir): + png_list = sorted(glob.glob( os.path.join(frame_dir, "[0-9]*.png"), recursive=False)) + + png_map ={} + for png_path in png_list: + basename_without_ext = os.path.splitext(os.path.basename(png_path))[0] + png_map[int(basename_without_ext)] = png_path + + with torch.no_grad(): + tagger = Tagger(general_threshold, character_threshold, ignore_tokens, with_confidence, is_danbooru_format, is_cpu) + + for i in tqdm(range(0, len(png_list), interval ), desc=f"WD14tagger"): + path = png_map[i] + + #logger.info(f"{path=}") + + result[str(i)] = tagger( + image= Image.open(path) + ) + + tagger = None + + torch.cuda.empty_cache() + + return result + diff --git a/src/animatediff/utils/util.py b/src/animatediff/utils/util.py new file mode 100644 index 0000000000000000000000000000000000000000..a1ca2ef817e3c487e4eba3e51a48007cbb547a02 --- /dev/null +++ b/src/animatediff/utils/util.py @@ -0,0 +1,667 @@ +import logging +from os import PathLike +from pathlib import Path +from typing import List + +import torch +import torch.distributed as dist +from einops import rearrange +from PIL import Image +from torch import Tensor +from torchvision.utils import save_image +from tqdm.rich import tqdm + +logger = logging.getLogger(__name__) + +def zero_rank_print(s): + if not isinstance(s, str): s = repr(s) + if (not dist.is_initialized()) or (dist.is_initialized() and dist.get_rank() == 0): print("### " + s) + + +def save_frames(video: Tensor, frames_dir: PathLike, show_progress:bool=True): + frames_dir = Path(frames_dir) + frames_dir.mkdir(parents=True, exist_ok=True) + frames = rearrange(video, "b c t h w -> t b c h w") + if show_progress: + for idx, frame in enumerate(tqdm(frames, desc=f"Saving frames to {frames_dir.stem}")): + save_image(frame, frames_dir.joinpath(f"{idx:08d}.png")) + else: + for idx, frame in enumerate(frames): + save_image(frame, frames_dir.joinpath(f"{idx:08d}.png")) + + +def save_imgs(imgs:List[Image.Image], frames_dir: PathLike): + frames_dir = Path(frames_dir) + frames_dir.mkdir(parents=True, exist_ok=True) + for idx, img in enumerate(tqdm(imgs, desc=f"Saving frames to {frames_dir.stem}")): + img.save( frames_dir.joinpath(f"{idx:08d}.png") ) + +def save_video(video: Tensor, save_path: PathLike, fps: int = 8): + save_path = Path(save_path) + save_path.parent.mkdir(parents=True, exist_ok=True) + + if video.ndim == 5: + # batch, channels, frame, width, height -> frame, channels, width, height + frames = video.permute(0, 2, 1, 3, 4).squeeze(0) + elif video.ndim == 4: + # channels, frame, width, height -> frame, channels, width, height + frames = video.permute(1, 0, 2, 3) + else: + raise ValueError(f"video must be 4 or 5 dimensional, got {video.ndim}") + + # Add 0.5 after unnormalizing to [0, 255] to round to the nearest integer + frames = frames.mul(255).add_(0.5).clamp_(0, 255).permute(0, 2, 3, 1).to("cpu", torch.uint8).numpy() + + images = [Image.fromarray(frame) for frame in frames] + images[0].save( + fp=save_path, format="GIF", append_images=images[1:], save_all=True, duration=(1 / fps * 1000), loop=0 + ) + + +def path_from_cwd(path: PathLike) -> str: + path = Path(path) + return str(path.absolute().relative_to(Path.cwd())) + + +def resize_for_condition_image(input_image: Image, us_width: int, us_height: int): + input_image = input_image.convert("RGB") + H = int(round(us_height / 8.0)) * 8 + W = int(round(us_width / 8.0)) * 8 + img = input_image.resize((W, H), resample=Image.LANCZOS) + return img + +def get_resized_images(org_images_path: List[str], us_width: int, us_height: int): + + images = [Image.open( p ) for p in org_images_path] + + W, H = images[0].size + + if us_width == -1: + us_width = W/H * us_height + elif us_height == -1: + us_height = H/W * us_width + + return [resize_for_condition_image(img, us_width, us_height) for img in images] + +def get_resized_image(org_image_path: str, us_width: int, us_height: int): + + image = Image.open( org_image_path ) + + W, H = image.size + + if us_width == -1: + us_width = W/H * us_height + elif us_height == -1: + us_height = H/W * us_width + + return resize_for_condition_image(image, us_width, us_height) + +def get_resized_image2(org_image_path: str, size: int): + + image = Image.open( org_image_path ) + + W, H = image.size + + if size < 0: + return resize_for_condition_image(image, W, H) + + if W < H: + us_width = size + us_height = int(size * H/W) + else: + us_width = int(size * W/H) + us_height = size + + return resize_for_condition_image(image, us_width, us_height) + + +def show_bytes(comment, obj): + + import sys +# memory_size = sys.getsizeof(tensor) + torch.numel(tensor)*tensor.element_size() + + if torch.is_tensor(obj): + logger.info(f"{comment} : {obj.dtype=}") + + cpu_mem = sys.getsizeof(obj)/1024/1024 + cpu_mem = 0 if cpu_mem < 1 else cpu_mem + logger.info(f"{comment} : CPU {cpu_mem} MB") + + gpu_mem = torch.numel(obj)*obj.element_size()/1024/1024 + gpu_mem = 0 if gpu_mem < 1 else gpu_mem + logger.info(f"{comment} : GPU {gpu_mem} MB") + elif type(obj) is tuple: + logger.info(f"{comment} : {type(obj)}") + cpu_mem = 0 + gpu_mem = 0 + + for o in obj: + cpu_mem += sys.getsizeof(o)/1024/1024 + gpu_mem += torch.numel(o)*o.element_size()/1024/1024 + + cpu_mem = 0 if cpu_mem < 1 else cpu_mem + logger.info(f"{comment} : CPU {cpu_mem} MB") + + gpu_mem = 0 if gpu_mem < 1 else gpu_mem + logger.info(f"{comment} : GPU {gpu_mem} MB") + + else: + logger.info(f"{comment} : unknown type") + + + +def show_gpu(comment=""): + return + import inspect + callerframerecord = inspect.stack()[1] + frame = callerframerecord[0] + info = inspect.getframeinfo(frame) + + import time + + import GPUtil + torch.cuda.synchronize() + +# time.sleep(1.5) + + #logger.info(comment) + logger.info(f"{info.filename}/{info.lineno}/{comment}") + GPUtil.showUtilization() + + +PROFILE_ON = False + +def start_profile(): + if PROFILE_ON: + import cProfile + + pr = cProfile.Profile() + pr.enable() + return pr + else: + return None + +def end_profile(pr, file_name): + if PROFILE_ON: + import io + import pstats + + pr.disable() + s = io.StringIO() + ps = pstats.Stats(pr, stream=s).sort_stats('cumtime') + ps.print_stats() + + with open(file_name, 'w+') as f: + f.write(s.getvalue()) + +STOPWATCH_ON = False + +time_record = [] +start_time = 0 + +def stopwatch_start(): + global start_time,time_record + import time + + if STOPWATCH_ON: + time_record = [] + torch.cuda.synchronize() + start_time = time.time() + +def stopwatch_record(comment): + import time + + if STOPWATCH_ON: + torch.cuda.synchronize() + time_record.append(((time.time() - start_time) , comment)) + +def stopwatch_stop(comment): + + if STOPWATCH_ON: + stopwatch_record(comment) + + for rec in time_record: + logger.info(rec) + + +def prepare_ip_adapter(): + import os + from pathlib import PurePosixPath + + from huggingface_hub import hf_hub_download + + os.makedirs("data/models/ip_adapter/models/image_encoder", exist_ok=True) + for hub_file in [ + "models/image_encoder/config.json", + "models/image_encoder/pytorch_model.bin", + "models/ip-adapter-plus_sd15.bin", + "models/ip-adapter_sd15.bin", + "models/ip-adapter_sd15_light.bin", + "models/ip-adapter-plus-face_sd15.bin", + "models/ip-adapter-full-face_sd15.bin", + ]: + path = Path(hub_file) + + saved_path = "data/models/ip_adapter" / path + + if os.path.exists(saved_path): + continue + + hf_hub_download( + repo_id="h94/IP-Adapter", subfolder=PurePosixPath(path.parent), filename=PurePosixPath(path.name), local_dir="data/models/ip_adapter" + ) + +def prepare_ip_adapter_sdxl(): + import os + from pathlib import PurePosixPath + + from huggingface_hub import hf_hub_download + + os.makedirs("data/models/ip_adapter/sdxl_models/image_encoder", exist_ok=True) + for hub_file in [ + "models/image_encoder/config.json", + "models/image_encoder/pytorch_model.bin", + "sdxl_models/ip-adapter-plus_sdxl_vit-h.bin", + "sdxl_models/ip-adapter-plus-face_sdxl_vit-h.bin", + "sdxl_models/ip-adapter_sdxl_vit-h.bin", + ]: + path = Path(hub_file) + + saved_path = "data/models/ip_adapter" / path + + if os.path.exists(saved_path): + continue + + hf_hub_download( + repo_id="h94/IP-Adapter", subfolder=PurePosixPath(path.parent), filename=PurePosixPath(path.name), local_dir="data/models/ip_adapter" + ) + + +def prepare_lcm_lora(): + import os + from pathlib import PurePosixPath + + from huggingface_hub import hf_hub_download + + os.makedirs("data/models/lcm_lora/sdxl", exist_ok=True) + for hub_file in [ + "pytorch_lora_weights.safetensors", + ]: + path = Path(hub_file) + + saved_path = "data/models/lcm_lora/sdxl" / path + + if os.path.exists(saved_path): + continue + + hf_hub_download( + repo_id="latent-consistency/lcm-lora-sdxl", subfolder=PurePosixPath(path.parent), filename=PurePosixPath(path.name), local_dir="data/models/lcm_lora/sdxl" + ) + + os.makedirs("data/models/lcm_lora/sd15", exist_ok=True) + for hub_file in [ + "pytorch_lora_weights.safetensors", + ]: + path = Path(hub_file) + + saved_path = "data/models/lcm_lora/sd15" / path + + if os.path.exists(saved_path): + continue + + hf_hub_download( + repo_id="latent-consistency/lcm-lora-sdv1-5", subfolder=PurePosixPath(path.parent), filename=PurePosixPath(path.name), local_dir="data/models/lcm_lora/sd15" + ) + +def prepare_lllite(): + import os + from pathlib import PurePosixPath + + from huggingface_hub import hf_hub_download + + os.makedirs("data/models/lllite", exist_ok=True) + for hub_file in [ + "bdsqlsz_controlllite_xl_canny.safetensors", + "bdsqlsz_controlllite_xl_depth.safetensors", + "bdsqlsz_controlllite_xl_dw_openpose.safetensors", + "bdsqlsz_controlllite_xl_lineart_anime_denoise.safetensors", + "bdsqlsz_controlllite_xl_mlsd_V2.safetensors", + "bdsqlsz_controlllite_xl_normal.safetensors", + "bdsqlsz_controlllite_xl_recolor_luminance.safetensors", + "bdsqlsz_controlllite_xl_segment_animeface_V2.safetensors", + "bdsqlsz_controlllite_xl_sketch.safetensors", + "bdsqlsz_controlllite_xl_softedge.safetensors", + "bdsqlsz_controlllite_xl_t2i-adapter_color_shuffle.safetensors", + "bdsqlsz_controlllite_xl_tile_anime_α.safetensors", # alpha + "bdsqlsz_controlllite_xl_tile_anime_β.safetensors", # beta + ]: + path = Path(hub_file) + + saved_path = "data/models/lllite" / path + + if os.path.exists(saved_path): + continue + + hf_hub_download( + repo_id="bdsqlsz/qinglong_controlnet-lllite", subfolder=PurePosixPath(path.parent), filename=PurePosixPath(path.name), local_dir="data/models/lllite" + ) + + +def prepare_extra_controlnet(): + import os + from pathlib import PurePosixPath + + from huggingface_hub import hf_hub_download + + os.makedirs("data/models/controlnet/animatediff_controlnet", exist_ok=True) + for hub_file in [ + "controlnet_checkpoint.ckpt" + ]: + path = Path(hub_file) + + saved_path = "data/models/controlnet/animatediff_controlnet" / path + + if os.path.exists(saved_path): + continue + + hf_hub_download( + repo_id="crishhh/animatediff_controlnet", subfolder=PurePosixPath(path.parent), filename=PurePosixPath(path.name), local_dir="data/models/controlnet/animatediff_controlnet" + ) + + +def prepare_motion_module(): + import os + from pathlib import PurePosixPath + + from huggingface_hub import hf_hub_download + + os.makedirs("data/models/motion-module", exist_ok=True) + for hub_file in [ + "mm_sd_v15_v2.ckpt", + "mm_sdxl_v10_beta.ckpt", + ]: + path = Path(hub_file) + + saved_path = "data/models/motion-module" / path + + if os.path.exists(saved_path): + continue + + hf_hub_download( + repo_id="guoyww/animatediff", subfolder=PurePosixPath(path.parent), filename=PurePosixPath(path.name), local_dir="data/models/motion-module" + ) + +def prepare_wd14tagger(): + import os + from pathlib import PurePosixPath + + from huggingface_hub import hf_hub_download + + os.makedirs("data/models/WD14tagger", exist_ok=True) + for hub_file in [ + "model.onnx", + "selected_tags.csv", + ]: + path = Path(hub_file) + + saved_path = "data/models/WD14tagger" / path + + if os.path.exists(saved_path): + continue + + hf_hub_download( + repo_id="SmilingWolf/wd-v1-4-moat-tagger-v2", subfolder=PurePosixPath(path.parent), filename=PurePosixPath(path.name), local_dir="data/models/WD14tagger" + ) + +def prepare_dwpose(): + import os + from pathlib import PurePosixPath + + from huggingface_hub import hf_hub_download + + os.makedirs("data/models/DWPose", exist_ok=True) + for hub_file in [ + "dw-ll_ucoco_384.onnx", + "yolox_l.onnx", + ]: + path = Path(hub_file) + + saved_path = "data/models/DWPose" / path + + if os.path.exists(saved_path): + continue + + hf_hub_download( + repo_id="yzd-v/DWPose", subfolder=PurePosixPath(path.parent), filename=PurePosixPath(path.name), local_dir="data/models/DWPose" + ) + + + +def prepare_softsplat(): + import os + from pathlib import PurePosixPath + + from huggingface_hub import hf_hub_download + + os.makedirs("data/models/softsplat", exist_ok=True) + for hub_file in [ + "softsplat-lf", + ]: + path = Path(hub_file) + + saved_path = "data/models/softsplat" / path + + if os.path.exists(saved_path): + continue + + hf_hub_download( + repo_id="s9roll74/softsplat_mirror", subfolder=PurePosixPath(path.parent), filename=PurePosixPath(path.name), local_dir="data/models/softsplat" + ) + + +def extract_frames(movie_file_path, fps, out_dir, aspect_ratio, duration, offset, size_of_short_edge=-1, low_vram_mode=False): + import ffmpeg + + probe = ffmpeg.probe(movie_file_path) + video = next((stream for stream in probe['streams'] if stream['codec_type'] == 'video'), None) + width = int(video['width']) + height = int(video['height']) + + node = ffmpeg.input( str(movie_file_path.resolve()) ) + + node = node.filter( "fps", fps=fps ) + + + if duration > 0: + node = node.trim(start=offset,end=offset+duration).setpts('PTS-STARTPTS') + elif offset > 0: + node = node.trim(start=offset).setpts('PTS-STARTPTS') + + if size_of_short_edge != -1: + if width < height: + r = height / width + width = size_of_short_edge + height = int( (size_of_short_edge * r)//8 * 8) + node = node.filter('scale', size_of_short_edge, height) + else: + r = width / height + height = size_of_short_edge + width = int( (size_of_short_edge * r)//8 * 8) + node = node.filter('scale', width, size_of_short_edge) + + if low_vram_mode: + if aspect_ratio == -1: + aspect_ratio = width/height + logger.info(f"low {aspect_ratio=}") + aspect_ratio = max(min( aspect_ratio, 1.5 ), 0.6666) + logger.info(f"low {aspect_ratio=}") + + if aspect_ratio > 0: + # aspect ratio (width / height) + ww = round(height * aspect_ratio) + if ww < width: + x= (width - ww)//2 + y= 0 + w = ww + h = height + else: + hh = round(width/aspect_ratio) + x = 0 + y = (height - hh)//2 + w = width + h = hh + w = int(w // 8 * 8) + h = int(h // 8 * 8) + logger.info(f"crop to {w=},{h=}") + node = node.crop(x, y, w, h) + + node = node.output( str(out_dir.resolve().joinpath("%08d.png")), start_number=0 ) + + node.run(quiet=True, overwrite_output=True) + + + + + + +def is_v2_motion_module(motion_module_path:Path): + if motion_module_path.suffix == ".safetensors": + from safetensors.torch import load_file + loaded = load_file(motion_module_path, "cpu") + else: + from torch import load + loaded = load(motion_module_path, "cpu") + + is_v2 = "mid_block.motion_modules.0.temporal_transformer.norm.bias" in loaded + + loaded = None + torch.cuda.empty_cache() + + logger.info(f"{is_v2=}") + + return is_v2 + +def is_sdxl_checkpoint(checkpoint_path:Path): + if checkpoint_path.suffix == ".safetensors": + from safetensors.torch import load_file + loaded = load_file(checkpoint_path, "cpu") + else: + from torch import load + loaded = load(checkpoint_path, "cpu") + + is_sdxl = False + + if "conditioner.embedders.1.model.ln_final.weight" in loaded: + is_sdxl = True + if "conditioner.embedders.0.model.ln_final.weight" in loaded: + is_sdxl = True + + loaded = None + torch.cuda.empty_cache() + + logger.info(f"{is_sdxl=}") + return is_sdxl + + +tensor_interpolation = None + +def get_tensor_interpolation_method(): + return tensor_interpolation + +def set_tensor_interpolation_method(is_slerp): + global tensor_interpolation + tensor_interpolation = slerp if is_slerp else linear + +def linear(v1, v2, t): + return (1.0 - t) * v1 + t * v2 + +def slerp( + v0: torch.Tensor, v1: torch.Tensor, t: float, DOT_THRESHOLD: float = 0.9995 +) -> torch.Tensor: + u0 = v0 / v0.norm() + u1 = v1 / v1.norm() + dot = (u0 * u1).sum() + if dot.abs() > DOT_THRESHOLD: + #logger.info(f'warning: v0 and v1 close to parallel, using linear interpolation instead.') + return (1.0 - t) * v0 + t * v1 + omega = dot.acos() + return (((1.0 - t) * omega).sin() * v0 + (t * omega).sin() * v1) / omega.sin() + + + +def prepare_sam_hq(low_vram): + import os + from pathlib import PurePosixPath + + from huggingface_hub import hf_hub_download + + os.makedirs("data/models/SAM", exist_ok=True) + for hub_file in [ + "sam_hq_vit_h.pth" if not low_vram else "sam_hq_vit_b.pth" + ]: + path = Path(hub_file) + + saved_path = "data/models/SAM" / path + + if os.path.exists(saved_path): + continue + + hf_hub_download( + repo_id="lkeab/hq-sam", subfolder=PurePosixPath(path.parent), filename=PurePosixPath(path.name), local_dir="data/models/SAM" + ) + +def prepare_groundingDINO(): + import os + from pathlib import PurePosixPath + + from huggingface_hub import hf_hub_download + + os.makedirs("data/models/GroundingDINO", exist_ok=True) + for hub_file in [ + "groundingdino_swinb_cogcoor.pth", + ]: + path = Path(hub_file) + + saved_path = "data/models/GroundingDINO" / path + + if os.path.exists(saved_path): + continue + + hf_hub_download( + repo_id="ShilongLiu/GroundingDINO", subfolder=PurePosixPath(path.parent), filename=PurePosixPath(path.name), local_dir="data/models/GroundingDINO" + ) + + +def prepare_propainter(): + import os + + import git + + if os.path.isdir("src/animatediff/repo/ProPainter"): + if os.listdir("src/animatediff/repo/ProPainter"): + return + + repo = git.Repo.clone_from(url="https://github.com/sczhou/ProPainter", to_path="src/animatediff/repo/ProPainter", no_checkout=True ) + repo.git.checkout("a8a5827ca5e7e8c1b4c360ea77cbb2adb3c18370") + + +def prepare_anime_seg(): + import os + from pathlib import PurePosixPath + + from huggingface_hub import hf_hub_download + + os.makedirs("data/models/anime_seg", exist_ok=True) + for hub_file in [ + "isnetis.onnx", + ]: + path = Path(hub_file) + + saved_path = "data/models/anime_seg" / path + + if os.path.exists(saved_path): + continue + + hf_hub_download( + repo_id="skytnt/anime-seg", subfolder=PurePosixPath(path.parent), filename=PurePosixPath(path.name), local_dir="data/models/anime_seg" + ) diff --git a/src/animatediff/utils/wild_card.py b/src/animatediff/utils/wild_card.py new file mode 100644 index 0000000000000000000000000000000000000000..6e4ef1a75a8741437674409ebff80a60565e6ab4 --- /dev/null +++ b/src/animatediff/utils/wild_card.py @@ -0,0 +1,39 @@ +import glob +import os +import random +import re + +wild_card_regex = r'(\A|\W)__([\w-]+)__(\W|\Z)' + + +def create_wild_card_map(wild_card_dir): + result = {} + if os.path.isdir(wild_card_dir): + txt_list = glob.glob( os.path.join(wild_card_dir ,"**/*.txt"), recursive=True) + for txt in txt_list: + basename_without_ext = os.path.splitext(os.path.basename(txt))[0] + with open(txt, encoding='utf-8') as f: + try: + result[basename_without_ext] = [s.rstrip() for s in f.readlines()] + except Exception as e: + print(e) + print("can not read ", txt) + return result + +def replace_wild_card_token(match_obj, wild_card_map): + m1 = match_obj.group(1) + m3 = match_obj.group(3) + + dict_name = match_obj.group(2) + + if dict_name in wild_card_map: + token_list = wild_card_map[dict_name] + token = token_list[random.randint(0,len(token_list)-1)] + return m1+token+m3 + else: + return match_obj.group(0) + +def replace_wild_card(prompt, wild_card_dir): + wild_card_map = create_wild_card_map(wild_card_dir) + prompt = re.sub(wild_card_regex, lambda x: replace_wild_card_token(x, wild_card_map ), prompt) + return prompt diff --git a/test.py b/test.py new file mode 100644 index 0000000000000000000000000000000000000000..4bc1f10871166e70f3b19e3f77f607dd3730f0db --- /dev/null +++ b/test.py @@ -0,0 +1,69 @@ +import cv2 +import json +import os +import asyncio + +async def stylize(video): + command = f"animatediff stylize create-config {video}" + process = await asyncio.create_subprocess_shell( + command, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE + ) + stdout, stderr = await process.communicate() + if process.returncode == 0: + return stdout.decode() + else: + print(f"Error: {stderr.decode()}") + +async def start_video_edit(prompt_file): + command = f"animatediff stylize generate {prompt_file}" + process = await asyncio.create_subprocess_shell( + command, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE + ) + stdout, stderr = await process.communicate() + if process.returncode == 0: + return stdout.decode() + else: + print(f"Error: {stderr.decode()}") + +def edit_video(video, pos_prompt): + x = asyncio.run(stylize(video)) + x = x.split("stylize.py") + config = x[18].split("config =")[-1].strip() + d = x[19].split("stylize_dir = ")[-1].strip() + + with open(config, 'r+') as f: + data = json.load(f) + data['head_prompt'] = pos_prompt + data["path"] = "models/huggingface/xxmix9realistic_v40.safetensors" + + os.remove(config) + with open(config, 'w') as f: + json.dump(data, f, indent=4) + + out = asyncio.run(start_video_edit(d)) + out = out.split("Stylized results are output to ")[-1] + out = out.split("stylize.py")[0].strip() + + cwd = os.getcwd() + video_dir = cwd + "/" + out + + video_extensions = {'.mp4', '.avi', '.mkv', '.mov', '.flv', '.wmv'} + video_path = None + + for dirpath, dirnames, filenames in os.walk(video_dir): + for filename in filenames: + if os.path.splitext(filename)[1].lower() in video_extensions: + video_path = os.path.join(dirpath, filename) + break + if video_path: + break + + return video_path + +video_path = input("Enter the path to your video: ") +pos_prompt = input("Enter the what you want to do with the video: ") +print("The video is stored at", edit_video(video_path, pos_prompt)) \ No newline at end of file